CombinedText stringlengths 4 3.42M |
|---|
Pod::Spec.new do |s|
s.name = 'SSObject'
s.version = '1.0'
s.license = { :type => 'MIT' }
s.summary = '对象序列化反序列化 framework.'
s.homepage = 'http://blog.isteven.cn'
s.authors = { 'Steven' => 'qzs21@qq.com' }
s.source = {
:git => 'https://github.com/qzs21/SSObject.git',
:tag => s.version
}
s.source_files = 'SSObject/*.{h,m}'
s.requires_arc = true
s.ios.deployment_target = '6.0'
end
Fix CodoaPods Doc Bug.
Pod::Spec.new do |s|
s.name = 'SSObject'
s.version = '1.0.1'
s.license = { :type => 'MIT' }
s.summary = '对象序列化反序列化 framework.'
s.homepage = 'http://blog.isteven.cn'
s.authors = { 'Steven' => 'qzs21@qq.com' }
s.source = {
:git => 'https://github.com/qzs21/SSObject.git',
:tag => s.version
}
s.source_files = 'SSObject/*.{h,m}'
s.requires_arc = true
s.ios.deployment_target = '6.0'
end |
require 'test_helper'
class AuthTest < MiniTest::Test
include Rack::Test::Methods
def app
Rails.application
end
def setup
@auth_token = JSON.parse((post '/api/auth.json', '{"username":"acain", "password":"password"}', "CONTENT_TYPE" => 'application/json').body)['auth_token']
end
# --------------------------------------------------------------------------- #
# --- Endpoint testing for:
# ------- /api/auth.json
# ------- POST PUT DELETE
# POST test
def test_auth_post
# Get response back for logging in with username 'acain' password 'password'
post '/api/auth.json',
'{' +
'"username":"acain",' +
'"password":"password"' +
'}', "CONTENT_TYPE" => 'application/json'
# Check to see if the username matches what was expected
assert JSON.parse(last_response.body)['user']['username'], 'acain'
# Check to see if the first name matches what was expected
assert JSON.parse(last_response.body)['user']['first_name'], 'andrew'
# Check to see if the last name matches what was expected
assert JSON.parse(last_response.body)['user']['last_name'], 'cain'
end
# PUT test
def test_auth_put
put "/api/auth/#{@auth_token}.json",
'{' +
'"username":"acain"' +
'}', "CONTENT_TYPE" => 'application/json'
# Check to see if the response auth token matches the auth token that was sent through in put
assert JSON.parse(last_response.body)['auth_token'], @auth_token
end
# DELETE test
def test_auth_delete
# Get the auth token needed for delete test
delete "/api/auth/#{@auth_token}.json", "CONTENT_TYPE" => 'application/json'
# 200 response code means success!
assert_equal(last_response.status, 200)
end
end
QUALITY: Set standard for code layout and JSON representation
require 'test_helper'
class AuthTest < MiniTest::Test
include Rack::Test::Methods
def app
Rails.application
end
def setup
@auth_token = JSON.parse((post '/api/auth.json', '{"username":"acain", "password":"password"}', "CONTENT_TYPE" => 'application/json').body)['auth_token']
end
# --------------------------------------------------------------------------- #
# --- Endpoint testing for:
# ------- /api/auth.json
# ------- POST PUT DELETE
# POST test
def test_auth_post
# Get response back for logging in with username 'acain' password 'password'
post '/api/auth.json',
'{' +
'"username":"acain",' +
'"password":"password"' +
'}',
"CONTENT_TYPE" => 'application/json'
# Check to see if the username matches what was expected
assert JSON.parse(last_response.body)['user']['username'], 'acain'
# Check to see if the first name matches what was expected
assert JSON.parse(last_response.body)['user']['first_name'], 'andrew'
# Check to see if the last name matches what was expected
assert JSON.parse(last_response.body)['user']['last_name'], 'cain'
end
# PUT test
def test_auth_put
put '/api/auth/#{@auth_token}.json',
'{' +
'"username":"acain"' +
'}',
"CONTENT_TYPE" => 'application/json'
# Check to see if the response auth token matches the auth token that was sent through in put
assert JSON.parse(last_response.body)['auth_token'], @auth_token
end
# DELETE test
def test_auth_delete
# Get the auth token needed for delete test
delete "/api/auth/#{@auth_token}.json", "CONTENT_TYPE" => 'application/json'
# 200 response code means success!
assert_equal(last_response.status, 200)
end
end
|
#########################################################
# Xavier Demompion : xavier.demompion@mobile-devices.fr
# Mobile Devices 2013
#########################################################
module UserApis
module Mdi
module Dialog
# @api public
# This class handles all mdi cloud to mdi cloud communication.
# @note You don't have to instantiate this class yourself.
# Use the user_api.mdi.dialog.cloud_gate object which is already configured for your agent.
class CloudGateClass
# @api private
# @param channel [String] the messages passing through this gate will be sent on this channel
def initialize(apis, default_origin_channel)
@user_apis = apis
@default_origin_channel = default_origin_channel
end
# @api private
def user_api
@user_apis
end
# Inject a message in the server queue on a specific channel (ie push a message to the server)
# @return true on success
# @param [MessageClass] msg the message to inject
# @param [String] channel channel the message will be posted to
# @note Be wary of "infinite message loops" with this method.
# @note: if id is not nil (ie received from the cloud or duplicated), the injection will fail.
# @example Injecte a new message to the cloud
# new_msg = user_api.mdi.dialog.create_new_message
# new_msg.recorded_at = Time.now.to_i
# new_msg.asset = "3735843415387632"
# new_msg.content = "hello from ruby agent !"
# new_msg.account = "my_account"
# user_api.mdi.dialog.cloud_gate.inject_message(new_msg, "com.me.services.test_messages")
def inject_message(msg, channel, origin_channel = default_origin_channel)
begin
PUNK.start('injectmsg','inject message to cloud ...')
raise "Message id #{msg.id} has already been sent into the cloud. Dropping injection." if msg.id != nil
out_id = 00000
user_api.mdi.dialog.protogen.protogen_encode(msg).each do |message|
out_id = CC.indigen_next_id(message.asset)
inject_hash = {
"meta" => {
"account" => message.account,
"cookies" => message.cookies,
"class" => 'message'
},
"payload" => {
"id" => out_id, # Indigen integer
"asset" => "ragent",
"sender" => send_channel, # Sender identifier (can be the same as the asset)
"recipient" => "@@server@@", # Recipient identifier (can be the same as the asset)
"type" => "message",
"received_at" => Time.now.to_i, # timestamp integer in seconds
"channel" => channel,
"payload" => message.content,
"parent_id" => nil, # nil | message_identifier
"timeout" => 0 # -1 | 0 | timeout integer. 0 -> instant
}
}
inject_hash['meta'].delete_if { |k, v| v.nil? }
inject_hash['payload'].delete_if { |k, v| v.nil? }
# todo: put some limitation
CC.push(inject_hash,'messages')
end
# success !
PUNK.end('injectmsg','ok','out',"SERVER <- SERVER MSG[#{crop_ref(out_id,4)}]")
SDK_STATS.stats['agents'][user_api.user_class.agent_name]['inject_to_cloud'] += 1
SDK_STATS.stats['agents'][user_api.user_class.agent_name]['total_sent'] += 1
return true
rescue Exception => e
user_api.mdi.tools.log.error("Error on inject message")
user_api.mdi.tools.print_ruby_exception(e)
PUNK.end('injectmsg','ko','out',"SERVER <- SERVER MSG")
# stats:
SDK_STATS.stats['agents'][user_api.user_class.agent_name]['err_on_inject'] += 1
SDK_STATS.stats['agents'][user_api.user_class.agent_name]['total_error'] += 1
return false
end
end
# Inject a track in the server queue (ie push a track to the server)
# @return true on success
# @param [TrackClass] track the track to send
# @example Injecte a new track to the cloud
# new_track = user_api.mdi.dialog.create_new_track
# new_track.recorded_at = Time.now.to_i
# new_track.latitude = 4878384 # in degree * 10^-5
# new_track.longitude = 236682 # in degree * 10^-5
# new_track.asset = "3735843415387632"
# new_track.account = "my_account"
# new_track.set_field("MDI_CC_LEGAL_SPEED", "50")
# user_api.mdi.dialog.cloud_gate.inject_track(new_track)
def inject_track(track)
raise "Track id #{msg.id} has already been sent into the cloud. Dropping injection." if track.id != nil
raise "I don't push empty track. Dropping injection." if track.fields_data.size == 0
begin
PUNK.start('injecttrack','inject track to cloud ...')
# todo: put some limitation
CC.push(track.to_hash_to_send_to_cloud,'tracks')
# success !
SDK_STATS.stats['agents'][user_api.user_class.agent_name]['inject_to_cloud'] += 1
return true
rescue Exception => e
user_api.mdi.tools.log.error("Error on inject track")
user_api.mdi.tools.print_ruby_exception(e)
PUNK.end('injecttrack','ko','out',"SERVER <- SERVER MSG")
# stats:
SDK_STATS.stats['agents'][user_api.user_class.agent_name]['err_on_inject'] += 1
SDK_STATS.stats['agents'][user_api.user_class.agent_name]['total_error'] += 1
return false
end
end
# Inject a collection in the server queue (ie push a track to the server)
# @return true on success
# @param [CollectionClass] track the track to send
def inject_collection(collection)
raise "Collection id #{msg.id} has already been sent into the cloud. Dropping injection." if collection.id != nil
raise "I don't push empty collection. Dropping injection." if collection.data.size == 0
begin
PUNK.start('injectcollection','inject collection to cloud ...')
# now push all elements of the collection
collection.data.each do |el|
if el.id != nil
CC.logger.info("Injection el of collection #{el.class}")
case el.class
when "PresenceClass"
# NYI
when "MessageClass"
user_api.mdi.dialog.cloud_gate.inject_message(el, el.channel) # channel is good ? no idea !
when "TrackClass"
user_api.mdi.dialog.cloud_gate.inject_track(el)
end
end
end
# todo: put some limitation
CC.push(collection.to_hash,'collection')
# success !
SDK_STATS.stats['agents'][user_api.user_class.agent_name]['inject_to_cloud'] += 1
return true
rescue Exception => e
user_api.mdi.tools.log.error("Error on inject collection")
user_api.mdi.tools.print_ruby_exception(e)
PUNK.end('injecttrack','ko','out',"SERVER <- SERVER COLLECTION")
# stats:
SDK_STATS.stats['agents'][user_api.user_class.agent_name]['err_on_inject'] += 1
SDK_STATS.stats['agents'][user_api.user_class.agent_name]['total_error'] += 1
return false
end
end
end
end
end
end
Alter debug.
#########################################################
# Xavier Demompion : xavier.demompion@mobile-devices.fr
# Mobile Devices 2013
#########################################################
module UserApis
module Mdi
module Dialog
# @api public
# This class handles all mdi cloud to mdi cloud communication.
# @note You don't have to instantiate this class yourself.
# Use the user_api.mdi.dialog.cloud_gate object which is already configured for your agent.
class CloudGateClass
# @api private
# @param channel [String] the messages passing through this gate will be sent on this channel
def initialize(apis, default_origin_channel)
@user_apis = apis
@default_origin_channel = default_origin_channel
end
# @api private
def user_api
@user_apis
end
# Inject a message in the server queue on a specific channel (ie push a message to the server)
# @return true on success
# @param [MessageClass] msg the message to inject
# @param [String] channel channel the message will be posted to
# @note Be wary of "infinite message loops" with this method.
# @note: if id is not nil (ie received from the cloud or duplicated), the injection will fail.
# @example Injecte a new message to the cloud
# new_msg = user_api.mdi.dialog.create_new_message
# new_msg.recorded_at = Time.now.to_i
# new_msg.asset = "3735843415387632"
# new_msg.content = "hello from ruby agent !"
# new_msg.account = "my_account"
# user_api.mdi.dialog.cloud_gate.inject_message(new_msg, "com.me.services.test_messages")
def inject_message(msg, channel, origin_channel = default_origin_channel)
begin
PUNK.start('injectmsg','inject message to cloud ...')
raise "Message id #{msg.id} has already been sent into the cloud. Dropping injection." if msg.id != nil
out_id = 00000
user_api.mdi.dialog.protogen.protogen_encode(msg).each do |message|
out_id = CC.indigen_next_id(message.asset)
inject_hash = {
"meta" => {
"account" => message.account,
"cookies" => message.cookies,
"class" => 'message'
},
"payload" => {
"id" => out_id, # Indigen integer
"asset" => "ragent",
"sender" => send_channel, # Sender identifier (can be the same as the asset)
"recipient" => "@@server@@", # Recipient identifier (can be the same as the asset)
"type" => "message",
"received_at" => Time.now.to_i, # timestamp integer in seconds
"channel" => channel,
"payload" => message.content,
"parent_id" => nil, # nil | message_identifier
"timeout" => 0 # -1 | 0 | timeout integer. 0 -> instant
}
}
inject_hash['meta'].delete_if { |k, v| v.nil? }
inject_hash['payload'].delete_if { |k, v| v.nil? }
# todo: put some limitation
CC.push(inject_hash,'messages')
end
# success !
PUNK.end('injectmsg','ok','out',"SERVER <- SERVER MSG[#{crop_ref(out_id,4)}]")
SDK_STATS.stats['agents'][user_api.user_class.agent_name]['inject_to_cloud'] += 1
SDK_STATS.stats['agents'][user_api.user_class.agent_name]['total_sent'] += 1
return true
rescue Exception => e
user_api.mdi.tools.log.error("Error on inject message")
user_api.mdi.tools.print_ruby_exception(e)
PUNK.end('injectmsg','ko','out',"SERVER <- SERVER MSG")
# stats:
SDK_STATS.stats['agents'][user_api.user_class.agent_name]['err_on_inject'] += 1
SDK_STATS.stats['agents'][user_api.user_class.agent_name]['total_error'] += 1
return false
end
end
# Inject a track in the server queue (ie push a track to the server)
# @return true on success
# @param [TrackClass] track the track to send
# @example Injecte a new track to the cloud
# new_track = user_api.mdi.dialog.create_new_track
# new_track.recorded_at = Time.now.to_i
# new_track.latitude = 4878384 # in degree * 10^-5
# new_track.longitude = 236682 # in degree * 10^-5
# new_track.asset = "3735843415387632"
# new_track.account = "my_account"
# new_track.set_field("MDI_CC_LEGAL_SPEED", "50")
# user_api.mdi.dialog.cloud_gate.inject_track(new_track)
def inject_track(track)
raise "Track id #{msg.id} has already been sent into the cloud. Dropping injection." if track.id != nil
raise "I don't push empty track. Dropping injection." if track.fields_data.size == 0
begin
PUNK.start('injecttrack','inject track to cloud ...')
# todo: put some limitation
CC.push(track.to_hash_to_send_to_cloud,'tracks')
# success !
SDK_STATS.stats['agents'][user_api.user_class.agent_name]['inject_to_cloud'] += 1
return true
rescue Exception => e
user_api.mdi.tools.log.error("Error on inject track")
user_api.mdi.tools.print_ruby_exception(e)
PUNK.end('injecttrack','ko','out',"SERVER <- SERVER MSG")
# stats:
SDK_STATS.stats['agents'][user_api.user_class.agent_name]['err_on_inject'] += 1
SDK_STATS.stats['agents'][user_api.user_class.agent_name]['total_error'] += 1
return false
end
end
# Inject a collection in the server queue (ie push a track to the server)
# @return true on success
# @param [CollectionClass] track the track to send
def inject_collection(collection)
raise "Collection id #{msg.id} has already been sent into the cloud. Dropping injection." if collection.id != nil
raise "I don't push empty collection. Dropping injection." if collection.data.size == 0
begin
PUNK.start('injectcollection','inject collection to cloud ...')
# now push all elements of the collection
collection.data.each do |el|
if el.id != nil
CC.logger.info("Injection #{el.class} of collection")
case el.class
when "PresenceClass"
# NYI
when "MessageClass"
user_api.mdi.dialog.cloud_gate.inject_message(el, el.channel) # channel is good ? no idea !
when "TrackClass"
user_api.mdi.dialog.cloud_gate.inject_track(el)
end
end
end
# todo: put some limitation
CC.push(collection.to_hash,'collection')
# success !
SDK_STATS.stats['agents'][user_api.user_class.agent_name]['inject_to_cloud'] += 1
return true
rescue Exception => e
user_api.mdi.tools.log.error("Error on inject collection")
user_api.mdi.tools.print_ruby_exception(e)
PUNK.end('injecttrack','ko','out',"SERVER <- SERVER COLLECTION")
# stats:
SDK_STATS.stats['agents'][user_api.user_class.agent_name]['err_on_inject'] += 1
SDK_STATS.stats['agents'][user_api.user_class.agent_name]['total_error'] += 1
return false
end
end
end
end
end
end |
Pod::Spec.new do |s|
s.name = 'ASJPushNotificationManager'
s.version = '1.0'
s.platform = :ios, '7.0'
s.license = { :type => 'MIT' }
s.homepage = 'https://github.com/sudeepjaiswal/ASJPushNotificationManager'
s.authors = { 'Sudeep Jaiswal' => 'sudeepjaiswal87@gmail.com' }
s.summary = 'Super easy setup for push notifications in your iOS app'
s.source = { :git => 'https://github.com/sudeepjaiswal/ASJPushNotificationManager.git', :tag => s.version }
s.source_files = 'ASJPushNotificationManager/*.{h,m}'
s.requires_arc = true
end
updated podspec for ver. 1.1
Pod::Spec.new do |s|
s.name = 'ASJPushNotificationManager'
s.version = '1.1'
s.platform = :ios, '7.0'
s.license = { :type => 'MIT' }
s.homepage = 'https://github.com/sudeepjaiswal/ASJPushNotificationManager'
s.authors = { 'Sudeep Jaiswal' => 'sudeepjaiswal87@gmail.com' }
s.summary = 'Super easy setup for push notifications in your iOS app'
s.source = { :git => 'https://github.com/sudeepjaiswal/ASJPushNotificationManager.git', :tag => s.version }
s.source_files = 'ASJPushNotificationManager/*.{h,m}'
s.requires_arc = true
end |
Pod::Spec.new do |s|
s.name = "Saystack"
s.version = "0.4.11"
s.summary = "A collection of simple iOS classes and categories that fit in almost every project."
s.description = <<-DESC
Saystack contains a collection of simple iOS classes and small libraries that fit in almost every project. Most of these classes are not large or structured enough to fit in a special library or framework, but are regurarely needed throughout the most projects you are working on.
DESC
s.homepage = "http://github.com/Legoless/Saystack"
s.license = 'MIT'
s.author = { "Dal Rupnik" => "legoless@gmail.com" }
s.ios.deployment_target = '8.0'
s.ios.source_files = 'Code/Extensions/**/*.{swift}'
s.osx.deployment_target = '10.11'
s.osx.source_files = 'Code/Extensions/Core/**/*.{swift}'
s.source = { :git => "https://github.com/Legoless/Saystack.git", :tag => s.version }
s.requires_arc = true
end
Pod update
Pod::Spec.new do |s|
s.name = "Saystack"
s.version = "0.5.0"
s.summary = "A collection of simple iOS classes and categories that fit in almost every project."
s.description = <<-DESC
Saystack contains a collection of simple iOS classes and small libraries that fit in almost every project. Most of these classes are not large or structured enough to fit in a special library or framework, but are regurarely needed throughout the most projects you are working on.
DESC
s.homepage = "http://github.com/Legoless/Saystack"
s.license = 'MIT'
s.author = { "Dal Rupnik" => "legoless@gmail.com" }
s.ios.deployment_target = '8.0'
s.ios.source_files = 'Code/Extensions/**/*.{swift}'
s.osx.deployment_target = '10.11'
s.osx.source_files = 'Code/Extensions/Core/**/*.{swift}'
s.source = { :git => "https://github.com/Legoless/Saystack.git", :tag => s.version }
s.requires_arc = true
end
|
Prepare for 0.0.1-alpha3 release
|
Pod::Spec.new do |s|
s.name = 'StatefulTableView'
s.version = '0.1.1'
s.license = {
:type => 'MIT',
:file => 'LICENSE'
}
s.homepage = 'http://github.com/timominous/StatefulTableView'
s.description = 'Custom UITableView container class that supports pull-to-refresh, load-more, initial load, and empty states. Swift port of SKStatefulTableViewController'
s.summary = 'Custom UITableView container class that supports pull-to-refresh, load-more, initial load, and empty states.'
s.author = {
'timominous' => 'timominous@gmail.com'
}
s.source = {
:git => 'https://github.com/timominous/StatefulTableView.git',
:tag => s.version.to_s
}
s.ios.deployment_target = "8.0"
s.source_files = 'Sources/*.swift'
s.requires_arc = true
end
Added supported Swift versions to podspec
Pod::Spec.new do |s|
s.name = 'StatefulTableView'
s.version = '0.1.1'
s.license = {
:type => 'MIT',
:file => 'LICENSE'
}
s.homepage = 'http://github.com/timominous/StatefulTableView'
s.description = 'Custom UITableView container class that supports pull-to-refresh, load-more, initial load, and empty states. Swift port of SKStatefulTableViewController'
s.summary = 'Custom UITableView container class that supports pull-to-refresh, load-more, initial load, and empty states.'
s.author = {
'timominous' => 'timominous@gmail.com'
}
s.source = {
:git => 'https://github.com/timominous/StatefulTableView.git',
:tag => s.version.to_s
}
s.ios.deployment_target = "8.0"
s.source_files = 'Sources/*.swift'
s.requires_arc = true
s.swift_versions = ['5.0']
end
|
#!/usr/bin/env ruby
require 'fileutils'
require 'socket'
require 'timeout'
def is_port_open_orig?(port)
begin
Timeout::timeout(2) {
begin
puts 'starting'
s = TCPServer.new(port)
s.close
puts 'stopped'
return true
rescue Exception => e #Errno::ECONNREFUSED, Errno::EHOSTUNREACH
return false
end
}
rescue Timeout::Error
return false
end
end
def is_port_open?(host,port)
begin
t = TCPSocket.new(host,port)
t.close
return false
rescue Exception => e
@exception = e
return true
end
end
def wait_until_found(filepath,pattern)
print "\n wait_until_found: pattern '#{pattern}' in file '#{filepath}' ."
$stdout.flush
begin
Timeout::timeout(30) {
while true do
text = File.read filepath
return true if text =~ pattern
print '.'
$stdout.flush
sleep 1
end
}
rescue Timeout::Error
end
puts '\nTimeout Error, pattern not found!'
false
end
dir = Dir.pwd
puts "PWD: #{dir}"
log_dir = "#{dir}/everyLog"
Dir.mkdir(log_dir) unless File.exists?(log_dir)
jetty_pattern=/Starting scanner at interval of 5 seconds/
procs = [
{name: 'API', port: 8080, dir: "#{dir}/api", exec: "mvn -o jetty:run", pattern: jetty_pattern},
{name: 'Ingestion', port: 8000, dir: "#{dir}/ingestion/ingestion-service", exec: "mvn -o jetty:run", pattern: jetty_pattern},
{name: 'Dashboard', port: 8888, dir: "#{dir}/dashboard", exec: "mvn -o jetty:run", pattern: jetty_pattern},
{name: 'SimpleIDP', port: 8082, dir: "#{dir}/simple-idp", exec: "mvn -o jetty:run", pattern: jetty_pattern},
{name: 'Databrowser', port: 3000, dir: "#{dir}/databrowser", exec: "bundle exec rails server", pattern: />> Listening on/},
{name: 'AdminTools', port: 3001, dir: "#{dir}/admin-tools/admin-rails", exec: "bundle exec rails server", pattern: /WEBrick::HTTPServer#start:/}
]
procs.each { |p|
if is_port_open?('localhost', p[:port])
print "Starting #{p[:name]} on port #{p[:port]}"
$stdout.flush
Dir.chdir p[:dir]
`#{p[:exec]} > #{log_dir}/#{p[:name]}Console.log 2>&1 &`
p[:pid] = $?.pid
puts ": pid = #{$?.pid}"
else
puts "Not Starting #{p[:name]}: port #{p[:port]} is already in use"
end
}
procs.each { |p|
wait_until_found("#{log_dir}/#{p[:name]}Console.log", p[:pattern]) if p.has_key? :pid
}
puts "\n\nStarting Smoke Tests\n"
Dir.chdir "#{dir}/acceptance-tests"
exec 'bundle exec rake smokeTests'
adds bundle install to rails startup
#!/usr/bin/env ruby
require 'fileutils'
require 'socket'
require 'timeout'
def is_port_open_orig?(port)
begin
Timeout::timeout(2) {
begin
puts 'starting'
s = TCPServer.new(port)
s.close
puts 'stopped'
return true
rescue Exception => e #Errno::ECONNREFUSED, Errno::EHOSTUNREACH
return false
end
}
rescue Timeout::Error
return false
end
end
def is_port_open?(host,port)
begin
t = TCPSocket.new(host,port)
t.close
return false
rescue Exception => e
@exception = e
return true
end
end
def wait_until_found(filepath,pattern)
print "\n wait_until_found: pattern '#{pattern}' in file '#{filepath}' ."
$stdout.flush
begin
Timeout::timeout(30) {
while true do
text = File.read filepath
return true if text =~ pattern
print '.'
$stdout.flush
sleep 1
end
}
rescue Timeout::Error
end
puts '\nTimeout Error, pattern not found!'
false
end
dir = Dir.pwd
puts "PWD: #{dir}"
log_dir = "#{dir}/everyLog"
Dir.mkdir(log_dir) unless File.exists?(log_dir)
jetty_pattern=/Starting scanner at interval of 5 seconds/
procs = [
{name: 'API', port: 8080, dir: "#{dir}/api", exec: "mvn -o jetty:run", pattern: jetty_pattern},
{name: 'Ingestion', port: 8000, dir: "#{dir}/ingestion/ingestion-service", exec: "mvn -o jetty:run", pattern: jetty_pattern},
{name: 'Dashboard', port: 8888, dir: "#{dir}/dashboard", exec: "mvn -o jetty:run", pattern: jetty_pattern},
{name: 'SimpleIDP', port: 8082, dir: "#{dir}/simple-idp", exec: "mvn -o jetty:run", pattern: jetty_pattern},
{name: 'Databrowser', port: 3000, dir: "#{dir}/databrowser", exec: "bundle install; bundle exec rails server", pattern: />> Listening on/},
{name: 'AdminTools', port: 3001, dir: "#{dir}/admin-tools/admin-rails", exec: "bundle install; bundle exec rails server", pattern: /WEBrick::HTTPServer#start:/}
]
procs.each { |p|
if is_port_open?('localhost', p[:port])
print "Starting #{p[:name]} on port #{p[:port]}"
$stdout.flush
Dir.chdir p[:dir]
`#{p[:exec]} > #{log_dir}/#{p[:name]}Console.log 2>&1 &`
p[:pid] = $?.pid
puts ": pid = #{$?.pid}"
else
puts "Not Starting #{p[:name]}: port #{p[:port]} is already in use"
end
}
procs.each { |p|
wait_until_found("#{log_dir}/#{p[:name]}Console.log", p[:pattern]) if p.has_key? :pid
}
puts "\n\nStarting Smoke Tests\n"
Dir.chdir "#{dir}/acceptance-tests"
exec 'bundle exec rake smokeTests'
|
# coding: utf-8
lib = File.expand_path('../lib', __FILE__)
$LOAD_PATH.unshift(lib) unless $LOAD_PATH.include?(lib)
require 'jquerycsv/rails/version'
Gem::Specification.new do |spec|
spec.name = "jquerycsv-rails"
spec.version = Jquerycsv::Rails::VERSION
spec.authors = ["James Koval"]
spec.email = ["james.ross.koval@gmail.com"]
spec.summary = "Rails gem tracking jquery csv"
spec.description = spec.summary
spec.homepage = "https://github.com/jakl/jquerycsv-rails"
spec.license = "MIT"
spec.files = Dir["{lib,vendor}/**/*"] + ["MIT-LICENSE", "README.md"]
spec.require_paths = ["lib"]
spec.add_development_dependency "bundler", "~> 1.3"
spec.add_development_dependency "rake"
spec.add_dependency "railties", "~> 3.1"
end
license file
# coding: utf-8
lib = File.expand_path('../lib', __FILE__)
$LOAD_PATH.unshift(lib) unless $LOAD_PATH.include?(lib)
require 'jquerycsv/rails/version'
Gem::Specification.new do |spec|
spec.name = "jquerycsv-rails"
spec.version = Jquerycsv::Rails::VERSION
spec.authors = ["James Koval"]
spec.email = ["james.ross.koval@gmail.com"]
spec.summary = "Rails gem tracking jquery csv"
spec.description = spec.summary
spec.homepage = "https://github.com/jakl/jquerycsv-rails"
spec.license = "MIT"
spec.files = Dir["{lib,vendor}/**/*"] + ["LICENSE.txt", "README.md"]
spec.require_paths = ["lib"]
spec.add_development_dependency "bundler", "~> 1.3"
spec.add_development_dependency "rake"
spec.add_dependency "railties", "~> 3.1"
end
|
# Copyright (C) 2007, 2008, 2009, 2010, 2011, 2012, 2013 The Collaborative Software Foundation
#
# This file is part of TriSano.
#
# TriSano is free software: you can redistribute it and/or modify it under the
# terms of the GNU Affero General Public License as published by the
# Free Software Foundation, either version 3 of the License,
# or (at your option) any later version.
#
# TriSano is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with TriSano. If not, see http://www.gnu.org/licenses/agpl-3.0.txt.
RoutingStruct = Struct.new(:jurisdiction_id, :note)
class EventsController < ApplicationController
before_filter :can_update?, :only => [:edit, :update, :destroy, :soft_delete, :event_type]
before_filter :can_new?, :only => [:new, :create]
before_filter :can_view?, :only => [:show, :export_single]
before_filter :can_index?, :only => [:index, :export]
before_filter :can_access_sensitive?, :only => [:edit, :show, :update, :destroy, :soft_delete]
before_filter :set_tab_index
before_filter :update_last_modified_date, :only => [:update]
before_filter :find_or_build_event, :only => [ :reporters_search_selection, :reporting_agencies_search, :reporting_agency_search_selection ]
before_filter :can_promote?, :only => :event_type
before_filter :load_event_queues, :only => [:index, :export]
before_filter :reject_if_wrong_type, :only => [:show, :export_single, :edit, :update, :destroy, :soft_delete, :event_type]
def index
return unless index_processing
@event_states_and_descriptions = Event.get_all_states_and_descriptions
respond_to do |format|
format.html
format.xml { render :xml => @events }
end
end
def export
return unless index_processing
respond_to do |format|
format.csv
end
end
def contacts_search
begin
@results = HumanEvent.find_by_name_and_bdate(
:fulltext_terms => params[:name]
)
rescue
flash.now[:error] = t(:invalid_search_criteria)
end
render :partial => "events/contacts_search", :layout => false
end
def clinicians_search_selection
clinician_entity = PersonEntity.find(params[:id])
@clinician = Clinician.new
@clinician.person_entity = clinician_entity
render :partial => "events/clinician_show", :layout => false, :locals => { :event_type => params[:event_type] }
end
def reporter_search_selection
@event = Event.find_by_id(params[:event_id]) || params[:event_type].constantize.new
@event.build_reporter :secondary_entity_id => params[:id]
render :partial => "events/reporter_from_search", :layout => false
end
def places_search
page = params[:page] ? params[:page] : 1
name = params[:name]
# DEBT: Sure there must be a better way to parse this.
type_ids = params[:types].sub(/^\[(.*)\]$/, '\1').split(',').map {|s| s.to_i}
types = Code.find(type_ids).map{|c|c.the_code}
types = Place.epi_type_codes if types.blank?
begin
@places = Place.starts_with(name).types(types).paginate(:include => { :entity => [:addresses, :canonical_address] }, :page => page, :per_page => 10)
rescue
logger.error($!)
flash.now['error'] = t('invalid_search_criteria')
@places = []
end
render :partial => "events/places_search", :layout => false, :locals => { :places => @places }
end
def places_search_selection
place_entity = PlaceEntity.find(params[:id])
@place = PlaceEvent.new
@place.build_interested_place
@place.interested_place.place_entity = place_entity
@place.build_participations_place
render :partial => "events/place_exposure_show", :layout => false, :locals => {:event_type => params[:event_type].underscore}
end
def clinicians_search
page = params[:page] || 1
name = (params[:name] || '').strip
begin
@clinicians = Person.active.send(:clinicians).find(:all,
:conditions => ["(LOWER(last_name) LIKE ? OR LOWER(first_name) LIKE ?)", name.downcase + '%', name.downcase + '%']).paginate(:include => {:person_entity => :telephones}, :page => page, :per_page => 10)
rescue
logger.error($!)
flash.now['error'] = t('invalid_search_criteria')
@clinicians = []
end
render :partial => "events/clinicians_search", :layout => false
end
def reporters_search
@event = Event.find_by_id(params[:event_id]) || params[:event_type].constantize.new
page = params[:page] || 1
name = (params[:name] || '').strip
begin
@reporters = Person.active.send(:reporters).find(:all,
:conditions => ["(LOWER(last_name) LIKE ? OR LOWER(first_name) LIKE ?)", name.downcase + '%', name.downcase + '%']).paginate(:include => {:person_entity => :telephones}, :page => page, :per_page => 10)
rescue
logger.error($!)
flash.now['error'] = t('invalid_search_criteria')
@reporters = []
end
render :partial => "events/reporters_search", :layout => false
end
def diagnostic_facilities_search
page = params[:page] || 1
name = (params[:name] || '').strip
begin
@places = Place.diagnostic_facilities(name).paginate(:include => { :entity => [:addresses, :canonical_address] }, :page => page, :per_page => 10)
rescue
logger.error($!)
flash.now['error'] = t('invalid_search_criteria')
@places = []
end
render :partial => "events/diagnostics_search", :layout => false
end
def diagnostics_search_selection
diagnostic_entity = PlaceEntity.find(params[:id])
@diagnostic = DiagnosticFacility.new
@diagnostic.place_entity = diagnostic_entity
render :partial => "events/diagnostic", :layout => false, :locals => {:event_type => params[:event_type]}
end
def reporting_agencies_search
page = params[:page] || 1
name = (params[:name] || '').strip
begin
@places = Place.reporting_agencies_by_name(name).paginate(:include => { :entity => [:addresses, :canonical_address] }, :page => page, :per_page => 10)
rescue
logger.error($!)
flash.now['error'] = t('invalid_search_criteria')
@places = []
end
render :partial => "events/reporting_agencies_search", :layout => false
end
def reporting_agency_search_selection
if @event.reporting_agency.nil?
@event.build_reporting_agency :secondary_entity_id => params[:id]
else
@event.reporting_agency.secondary_entity_id = params[:id]
end
render :layout => false
end
# This action is for development/testing purposes only. This is not a "real" login action
def change_user
auth_allow_user_switch = config_option(:auth_allow_user_switch)
if auth_allow_user_switch == true
session[:user_id] = params[:user_id]
User.current_user = User.find_by_uid(params[:user_id])
redirect_to request.env["HTTP_REFERER"]
else
render :text => t("action_not_avaliable"), :status => 403
end
end
def soft_delete
if @event.soft_delete
flash[:notice] = t("successfully_marked_as_deleted")
redirect_to request.env["HTTP_REFERER"]
else
flash[:error] = t("error_marking_event_as_deleted")
redirect_to request.env["HTTP_REFERER"]
end
end
def lab_form
lab = Lab.new
lab.build_place_entity
lab.place_entity.build_place
lab.lab_results.build
@disease = params[:disease_id].blank? ? nil : Disease.find(params[:disease_id])
# Need the event to collect @event.form_referneces for repeaters
# nil is ok for unsaved events
@event = Event.find_by_id(params[:event_id])
render :partial => 'events/lab', :object => lab, :locals => {:prefix => params[:prefix] }
end
def lab_result_form
@disease = params[:disease_id].blank? ? nil : Disease.find(params[:disease_id])
# Need the event to collect @event.form_referneces for repeaters
# nil is ok for unsaved events
@event = Event.find_by_id(params[:event_id])
render :partial => 'events/lab_result', :object => LabResult.new, :locals => {:prefix => params[:prefix]}
end
def test_type_options
render :inline => <<-test_opts
<% test_types = test_type_options(nil, nil, nil) %>
<option value=""/>
<%= options_from_collection_for_select(test_types, 'id', 'common_name') %>
test_opts
end
def organism_options
render :inline => <<-org_opts
<% org_types = organism_options(nil, nil, nil) %>
<option value=""/>
<%= options_from_collection_for_select(org_types, 'id', 'organism_name') %>
org_opts
end
def edit_jurisdiction
respond_to do |format|
format.xml do
@event = Event.find params[:id]
@routing = RoutingStruct.new
@routing.jurisdiction_id = @event.jurisdiction.secondary_entity_id if @event.jurisdiction
@routing.note = ''
end
end
end
# Route an event from one jurisdiction to another
def jurisdiction
@event = Event.find(params[:id])
begin
# Debt: be nice to have to only call one method to route
Event.transaction do
@event.assign_to_lhd params[:routing][:jurisdiction_id], params[:secondary_jurisdiction_ids] || [], params[:routing][:note]
@event.reset_to_new if @event.jurisdiction.place.is_unassigned_jurisdiction?
@event.save!
end
rescue Exception => e
# DEBT: The :no_jurisdiction_change halted_because value is set
# at lib/routing/workflow_helper.rb:41. However, in some cases,
# this method is never called. For example, if the
# jurisdiction_id field is invalid (not a legitimate place
# entity), an exception is raised within the Workflow module
# before assign_to_lhd is called. We do not seem to have direct
# control over that field, which in that instance is a string
# like "Couldn't find PlaceEntity with ID=721." The exceptions
# are not distinguished by class either. The only immediate way
# to distinguish these cases is by parsing this string in one
# place or another.
if @event.halted? && @event.halted_because =~ /^Couldn't find PlaceEntity with ID=/
respond_to do |format|
# DEBT: Respond to HTML? This can't happen, since the user
# is given a drop-down list of place entities.
format.xml do
@event.errors.add(:jurisdiction_id, @event.halted_because)
render :xml => @event.errors, :status => :unprocessable_entity
end
end
elsif @event.halted? && @event.halted_because != :no_jurisdiction_change
respond_to do |format|
format.html do
render :partial => "events/permission_denied", :locals => { :reason => e.message, :event => @event }, :status => 403, :layout => true
end
format.xml { render :xml => @event.errors, :status => :forbidden }
end
else
if User.current_user.can_update?(@event)
respond_to do |format|
format.html do
flash.now[:error] = t("unable_to_route_cmr", :message => e.message)
render :action => :edit, :status => :bad_request
end
format.xml { render :xml => @event.errors, :status => :bad_request }
end
else
respond_to do |format|
format.html do
flash[:error] = t(:unable_to_route_cmr_no_edit_priv, :message => e.message)
redirect_to :back
end
format.xml { render :xml => @event.errors, :status => :forbidden }
end
end
end
return
end
respond_to do |format|
format.html do
if User.current_user.is_entitled_to_in?(:view_event, params[:routing][:jurisdiction_id]) or
User.current_user.is_entitled_to_in?(:view_event, params[:secondary_jurisdiction_ids])
flash[:notice] = t("event_successfully_routed")
redirect_to :back
else
flash[:notice] = t("event_successfully_routed_no_privs")
redirect_to events_path
end
end
format.xml { head :ok }
end
end
def state
@event = Event.find(params[:id])
workflow_action = params[:morbidity_event].delete(:workflow_action)
# Squirrel any notes away
note = params[:morbidity_event].delete(:note)
begin
# A status change may be accompanied by other values such as an
# event queue, set them
@event.attributes = params[:morbidity_event]
@event.send(workflow_action, note)
rescue Exception => e
# grr. workflow halt exception doesn't work as documented
if @event.halted?
render :partial => "events/permission_denied", :locals => { :reason => e.message, :event => nil }, :layout => true, :status => 403 and return
else
logger.error("Illegal state transition")
logger.error(e.message)
flash[:error] = "Event state was modified prior to your action. Review the event again."
redirect_to :back and return
end
end
if @event.save
flash[:notice] = t("event_successfully_routed")
redirect_to :back
else
if User.current_user.can_update?(@event)
flash.now[:error] = t("unable_to_change_cmr_state")
render :action => :edit, :status => :bad_request
else
flash[:error] = t(:unable_to_change_state_no_edit_privs)
redirect_to events_path
end
end
end
def event_type
if promoted_event = @event.promote_to(params[:type])
flash[:notice] = t(:promoted_to, :type => params[:type].humanize.downcase)
redirect_to @template.event_path(promoted_event)
else
flash.now[:error] = t("could_not_promote_event")
render :action => :edit, :status => :bad_request
end
end
private
def can_update?
@event ||= Event.find(params[:id])
unless User.current_user.can_update?(@event)
render :partial => 'events/permission_denied', :layout => true, :locals => { :reason => t("no_update_privs_for_jurisdiction"), :event => @event }, :status => 403 and return
end
end
def can_new?
unless User.current_user.can_create?
render :partial => 'events/permission_denied', :layout => true, :locals => { :reason => t("no_event_create_privs") }, :status => 403 and return
end
end
def can_index?
unless User.current_user.can_view?
render :partial => 'events/permission_denied', :layout => true, :locals => { :reason => t("no_event_view_privs") }, :status => 403 and return
end
end
def can_view?
@event ||= Event.find(params[:id])
@display_view_warning = false
unless User.current_user.can_view?(@event)
log_access_or_prompt_for_reason
return
end
stale?(:last_modified => @event.updated_at.utc, :etag => @event) if RAILS_ENV == 'production'
end
def can_access_sensitive?
@event ||= Event.find(params[:id])
if @event.sensitive? && !User.current_user.can_access_sensitive_diseases?(@event)
render :file => static_error_page_path(403), :layout => 'application', :status => 403 and return
end
end
def can_create?
User.current_user.can_create?(@event) && (@event.sensitive? ? User.current_user.can_access_sensitive_diseases?(@event) : true)
end
def reject_if_wrong_type(event=@event)
is_rejected = false
if event.read_attribute('type') != controller_name.classify
is_rejected = true
correct_url = @template.event_path(@event)
event_link = correct_url ? @template.link_to(correct_url, correct_url) : nil
respond_to do |format|
format.html { render :partial => "shared/missing_event", :locals => {:event_link => event_link}, :layout => 'application', :status => 404 }
format.all { render :nothing => true, :status => 404 }
end
end
is_rejected
end
def log_access_or_prompt_for_reason
access_record = AccessRecord.find_by_user_id_and_event_id(User.current_user.id, @event.id)
redirect_to(new_event_access_record_path(@event)) and return if access_record.nil?
access_record.update_attribute(:access_count, access_record.access_count + 1)
@display_view_warning = true
@event.add_note(I18n.translate("system_notes.extra_jurisdictional_view_only_access", :locale => I18n.default_locale))
end
def set_tab_index
@query_params = {}
@tab_index = 0
unless params[:tab_index].blank?
@tab_index = params[:tab_index]
@query_params[:tab_index] = params[:tab_index]
end
end
# Debt: too fat. needs to be in the place class
def places_by_name_and_types(name, type_array)
@places = Place.find(:all, :select => "DISTINCT ON (LOWER(TRIM(places.name)), codes.id) places.entity_id, places.name, codes.id",
:include => [:place_types, {:entity => [:addresses, :canonical_address]}],
:conditions => [ "LOWER(places.name) LIKE ? AND codes.code_name = 'placetype' AND codes.the_code IN (#{type_array.to_list}) AND entities.deleted_at IS NULL", name.downcase + '%'],
:order => "LOWER(TRIM(name)) ASC",
:limit => 20
)
end
def find_or_build_event
if params[:event_id]
@event = Event.find(params[:event_id])
else
@event = params[:event_type].camelize.constantize.new
end
end
def update_last_modified_date
@event.updated_at = DateTime.now
end
def load_event_queues
@event_queues = EventQueue.queues_for_jurisdictions User.current_user.jurisdiction_ids_for_privilege(:view_event)
end
def index_processing
if params[:per_page].to_i > 100
render :text => t("too_many_cmrs"), :layout => 'application', :status => 400
return false
end
begin
@export_options = params[:export_options]
query_options = {
:event_types => params[:event_types],
:states => params[:states],
:queues => params[:queues],
:investigators => params[:investigators],
:diseases => params[:diseases],
:order_direction => params[:sort_direction],
:do_not_show_deleted => params[:do_not_show_deleted],
:per_page => params[:per_page]
}
@events = MorbidityEvent.find_all_for_filtered_view(query_options.merge({
:view_jurisdiction_ids => User.current_user.jurisdiction_ids_for_privilege(:view_event),
:access_sensitive_jurisdiction_ids => User.current_user.jurisdiction_ids_for_privilege(:access_sensitive_diseases),
:order_by => params[:sort_order],
:page => params[:page]
}))
User.current_user.update_attribute('event_view_settings', query_options) if params[:set_as_default_view] == "1"
rescue
render :file => static_error_page_path(404), :layout => 'application', :status => 404
return false
end
return true
end
end
fix bug in event search
# Copyright (C) 2007, 2008, 2009, 2010, 2011, 2012, 2013 The Collaborative Software Foundation
#
# This file is part of TriSano.
#
# TriSano is free software: you can redistribute it and/or modify it under the
# terms of the GNU Affero General Public License as published by the
# Free Software Foundation, either version 3 of the License,
# or (at your option) any later version.
#
# TriSano is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with TriSano. If not, see http://www.gnu.org/licenses/agpl-3.0.txt.
RoutingStruct = Struct.new(:jurisdiction_id, :note)
class EventsController < ApplicationController
before_filter :can_update?, :only => [:edit, :update, :destroy, :soft_delete, :event_type]
before_filter :can_new?, :only => [:new, :create]
before_filter :can_view?, :only => [:show, :export_single]
before_filter :can_index?, :only => [:index, :export]
before_filter :can_access_sensitive?, :only => [:edit, :show, :update, :destroy, :soft_delete]
before_filter :set_tab_index
before_filter :update_last_modified_date, :only => [:update]
before_filter :find_or_build_event, :only => [ :reporters_search_selection, :reporting_agencies_search, :reporting_agency_search_selection ]
before_filter :can_promote?, :only => :event_type
before_filter :load_event_queues, :only => [:index, :export]
before_filter :reject_if_wrong_type, :only => [:show, :export_single, :edit, :update, :destroy, :soft_delete, :event_type]
def index
return unless index_processing
@event_states_and_descriptions = Event.get_all_states_and_descriptions
respond_to do |format|
format.html
format.xml { render :xml => @events }
end
end
def export
return unless index_processing
respond_to do |format|
format.csv
end
end
def contacts_search
begin
@results = HumanEvent.find_by_name_and_bdate(
:fulltext_terms => params[:name]
)
rescue
flash.now[:error] = t(:invalid_search_criteria)
end
render :partial => "events/contacts_search", :layout => false
end
def clinicians_search_selection
clinician_entity = PersonEntity.find(params[:id])
@clinician = Clinician.new
@clinician.person_entity = clinician_entity
render :partial => "events/clinician_show", :layout => false, :locals => { :event_type => params[:event_type] }
end
def reporter_search_selection
@event = Event.find_by_id(params[:event_id]) || params[:event_type].constantize.new
@event.build_reporter :secondary_entity_id => params[:id]
render :partial => "events/reporter_from_search", :layout => false
end
def places_search
page = params[:page] ? params[:page] : 1
name = params[:name]
# DEBT: Sure there must be a better way to parse this.
type_ids = params[:types].sub(/^\[(.*)\]$/, '\1').split(',').map {|s| s.to_i}
types = Code.find(type_ids).map{|c|c.the_code}
types = Place.epi_type_codes if types.blank?
begin
@places = Place.starts_with(name).types(types).paginate(:include => { :entity => [:addresses, :canonical_address] }, :page => page, :per_page => 10)
rescue
logger.error($!)
flash.now['error'] = t('invalid_search_criteria')
@places = []
end
render :partial => "events/places_search", :layout => false, :locals => { :places => @places }
end
def places_search_selection
place_entity = PlaceEntity.find(params[:id])
@place = PlaceEvent.new
@place.build_interested_place
@place.interested_place.place_entity = place_entity
@place.build_participations_place
render :partial => "events/place_exposure_show", :layout => false, :locals => {:event_type => params[:event_type].underscore}
end
def clinicians_search
page = params[:page] || 1
name = (params[:name] || '').strip
begin
@clinicians = Person.active.send(:clinicians).find(:all,
:conditions => ["(LOWER(last_name) LIKE ? OR LOWER(first_name) LIKE ?)", name.downcase + '%', name.downcase + '%']).paginate(:include => {:person_entity => :telephones}, :page => page, :per_page => 10)
rescue
logger.error($!)
flash.now['error'] = t('invalid_search_criteria')
@clinicians = []
end
render :partial => "events/clinicians_search", :layout => false
end
def reporters_search
@event = Event.find_by_id(params[:event_id]) || params[:event_type].constantize.new
page = params[:page] || 1
name = (params[:name] || '').strip
begin
@reporters = Person.active.send(:reporters).find(:all,
:conditions => ["(LOWER(last_name) LIKE ? OR LOWER(first_name) LIKE ?)", name.downcase + '%', name.downcase + '%']).paginate(:include => {:person_entity => :telephones}, :page => page, :per_page => 10)
rescue
logger.error($!)
flash.now['error'] = t('invalid_search_criteria')
@reporters = []
end
render :partial => "events/reporters_search", :layout => false
end
def diagnostic_facilities_search
page = params[:page] || 1
name = (params[:name] || '').strip
begin
@places = Place.diagnostic_facilities(name).paginate(:include => { :entity => [:addresses, :canonical_address] }, :page => page, :per_page => 10)
rescue
logger.error($!)
flash.now['error'] = t('invalid_search_criteria')
@places = []
end
render :partial => "events/diagnostics_search", :layout => false
end
def diagnostics_search_selection
diagnostic_entity = PlaceEntity.find(params[:id])
@diagnostic = DiagnosticFacility.new
@diagnostic.place_entity = diagnostic_entity
render :partial => "events/diagnostic", :layout => false, :locals => {:event_type => params[:event_type]}
end
def reporting_agencies_search
page = params[:page] || 1
name = (params[:name] || '').strip
begin
@places = Place.reporting_agencies_by_name(name).paginate(:include => { :entity => [:addresses, :canonical_address] }, :page => page, :per_page => 10)
rescue
logger.error($!)
flash.now['error'] = t('invalid_search_criteria')
@places = []
end
render :partial => "events/reporting_agencies_search", :layout => false
end
def reporting_agency_search_selection
if @event.reporting_agency.nil?
@event.build_reporting_agency :secondary_entity_id => params[:id]
else
@event.reporting_agency.secondary_entity_id = params[:id]
end
render :layout => false
end
# This action is for development esting purposes only. This is not a "real" login action
def change_user
auth_allow_user_switch = config_option(:auth_allow_user_switch)
if auth_allow_user_switch == true
session[:user_id] = params[:user_id]
User.current_user = User.find_by_uid(params[:user_id])
redirect_to request.env["HTTP_REFERER"]
else
render :text => t("action_not_avaliable"), :status => 403
end
end
def soft_delete
if @event.soft_delete
flash[:notice] = t("successfully_marked_as_deleted")
redirect_to request.env["HTTP_REFERER"]
else
flash[:error] = t("error_marking_event_as_deleted")
redirect_to request.env["HTTP_REFERER"]
end
end
def lab_form
lab = Lab.new
lab.build_place_entity
lab.place_entity.build_place
lab.lab_results.build
@disease = params[:disease_id].blank? ? nil : Disease.find(params[:disease_id])
# Need the event to collect @event.form_referneces for repeaters
# nil is ok for unsaved events
@event = Event.find_by_id(params[:event_id])
render :partial => 'events/lab', :object => lab, :locals => {:prefix => params[:prefix] }
end
def lab_result_form
@disease = params[:disease_id].blank? ? nil : Disease.find(params[:disease_id])
# Need the event to collect @event.form_referneces for repeaters
# nil is ok for unsaved events
@event = Event.find_by_id(params[:event_id])
render :partial => 'events/lab_result', :object => LabResult.new, :locals => {:prefix => params[:prefix]}
end
def test_type_options
render :inline => <<-test_opts
<% test_types = test_type_options(nil, nil, nil) %>
<option value=""/>
<%= options_from_collection_for_select(test_types, 'id', 'common_name') %>
test_opts
end
def organism_options
render :inline => <<-org_opts
<% org_types = organism_options(nil, nil, nil) %>
<option value=""/>
<%= options_from_collection_for_select(org_types, 'id', 'organism_name') %>
org_opts
end
def edit_jurisdiction
respond_to do |format|
format.xml do
@event = Event.find params[:id]
@routing = RoutingStruct.new
@routing.jurisdiction_id = @event.jurisdiction.secondary_entity_id if @event.jurisdiction
@routing.note = ''
end
end
end
# Route an event from one jurisdiction to another
def jurisdiction
@event = Event.find(params[:id])
begin
# Debt: be nice to have to only call one method to route
Event.transaction do
@event.assign_to_lhd params[:routing][:jurisdiction_id], params[:secondary_jurisdiction_ids] || [], params[:routing][:note]
@event.reset_to_new if @event.jurisdiction.place.is_unassigned_jurisdiction?
@event.save!
end
rescue Exception => e
# DEBT: The :no_jurisdiction_change halted_because value is set
# at lib/routing/workflow_helper.rb:41. However, in some cases,
# this method is never called. For example, if the
# jurisdiction_id field is invalid (not a legitimate place
# entity), an exception is raised within the Workflow module
# before assign_to_lhd is called. We do not seem to have direct
# control over that field, which in that instance is a string
# like "Couldn't find PlaceEntity with ID=721." The exceptions
# are not distinguished by class either. The only immediate way
# to distinguish these cases is by parsing this string in one
# place or another.
if @event.halted? && @event.halted_because =~ /^Couldn't find PlaceEntity with ID=/
respond_to do |format|
# DEBT: Respond to HTML? This can't happen, since the user
# is given a drop-down list of place entities.
format.xml do
@event.errors.add(:jurisdiction_id, @event.halted_because)
render :xml => @event.errors, :status => :unprocessable_entity
end
end
elsif @event.halted? && @event.halted_because != :no_jurisdiction_change
respond_to do |format|
format.html do
render :partial => "events/permission_denied", :locals => { :reason => e.message, :event => @event }, :status => 403, :layout => true
end
format.xml { render :xml => @event.errors, :status => :forbidden }
end
else
if User.current_user.can_update?(@event)
respond_to do |format|
format.html do
flash.now[:error] = t("unable_to_route_cmr", :message => e.message)
render :action => :edit, :status => :bad_request
end
format.xml { render :xml => @event.errors, :status => :bad_request }
end
else
respond_to do |format|
format.html do
flash[:error] = t(:unable_to_route_cmr_no_edit_priv, :message => e.message)
redirect_to :back
end
format.xml { render :xml => @event.errors, :status => :forbidden }
end
end
end
return
end
respond_to do |format|
format.html do
if User.current_user.is_entitled_to_in?(:view_event, params[:routing][:jurisdiction_id]) or
User.current_user.is_entitled_to_in?(:view_event, params[:secondary_jurisdiction_ids])
flash[:notice] = t("event_successfully_routed")
redirect_to :back
else
flash[:notice] = t("event_successfully_routed_no_privs")
redirect_to events_path
end
end
format.xml { head :ok }
end
end
def state
@event = Event.find(params[:id])
workflow_action = params[:morbidity_event].delete(:workflow_action)
# Squirrel any notes away
note = params[:morbidity_event].delete(:note)
begin
# A status change may be accompanied by other values such as an
# event queue, set them
@event.attributes = params[:morbidity_event]
@event.send(workflow_action, note)
rescue Exception => e
# grr. workflow halt exception doesn't work as documented
if @event.halted?
render :partial => "events/permission_denied", :locals => { :reason => e.message, :event => nil }, :layout => true, :status => 403 and return
else
logger.error("Illegal state transition")
logger.error(e.message)
flash[:error] = "Event state was modified prior to your action. Review the event again."
redirect_to :back and return
end
end
if @event.save
flash[:notice] = t("event_successfully_routed")
redirect_to :back
else
if User.current_user.can_update?(@event)
flash.now[:error] = t("unable_to_change_cmr_state")
render :action => :edit, :status => :bad_request
else
flash[:error] = t(:unable_to_change_state_no_edit_privs)
redirect_to events_path
end
end
end
def event_type
if promoted_event = @event.promote_to(params[:type])
flash[:notice] = t(:promoted_to, :type => params[:type].humanize.downcase)
redirect_to @template.event_path(promoted_event)
else
flash.now[:error] = t("could_not_promote_event")
render :action => :edit, :status => :bad_request
end
end
private
def can_update?
@event ||= Event.find(params[:id])
unless User.current_user.can_update?(@event)
render :partial => 'events/permission_denied', :layout => true, :locals => { :reason => t("no_update_privs_for_jurisdiction"), :event => @event }, :status => 403 and return
end
end
def can_new?
unless User.current_user.can_create?
render :partial => 'events/permission_denied', :layout => true, :locals => { :reason => t("no_event_create_privs") }, :status => 403 and return
end
end
def can_index?
unless User.current_user.can_view?
render :partial => 'events/permission_denied', :layout => true, :locals => { :reason => t("no_event_view_privs") }, :status => 403 and return
end
end
def can_view?
@event ||= Event.find(params[:id])
@display_view_warning = false
unless User.current_user.can_view?(@event)
log_access_or_prompt_for_reason
return
end
stale?(:last_modified => @event.updated_at.utc, :etag => @event) if RAILS_ENV == 'production'
end
def can_access_sensitive?
@event ||= Event.find(params[:id])
if @event.sensitive? && !User.current_user.can_access_sensitive_diseases?(@event)
render :file => static_error_page_path(403), :layout => 'application', :status => 403 and return
end
end
def can_create?
User.current_user.can_create?(@event) && (@event.sensitive? ? User.current_user.can_access_sensitive_diseases?(@event) : true)
end
def reject_if_wrong_type(event=@event)
is_rejected = false
if event.read_attribute('type') != controller_name.classify
is_rejected = true
correct_url = @template.event_path(@event)
event_link = correct_url ? @template.link_to(correct_url, correct_url) : nil
respond_to do |format|
format.html { render :partial => "shared/missing_event", :locals => {:event_link => event_link}, :layout => 'application', :status => 404 }
format.all { render :nothing => true, :status => 404 }
end
end
is_rejected
end
def log_access_or_prompt_for_reason
access_record = AccessRecord.find_by_user_id_and_event_id(User.current_user.id, @event.id)
redirect_to(new_event_access_record_path(@event)) and return if access_record.nil?
access_record.update_attribute(:access_count, access_record.access_count + 1)
@display_view_warning = true
@event.add_note(I18n.translate("system_notes.extra_jurisdictional_view_only_access", :locale => I18n.default_locale))
end
def set_tab_index
@query_params = {}
@tab_index = 0
unless params[:tab_index].blank?
@tab_index = params[:tab_index]
@query_params[:tab_index] = params[:tab_index]
end
end
# Debt: too fat. needs to be in the place class
def places_by_name_and_types(name, type_array)
@places = Place.find(:all, :select => "DISTINCT ON (LOWER(TRIM(places.name)), codes.id) places.entity_id, places.name, codes.id",
:include => [:place_types, {:entity => [:addresses, :canonical_address]}],
:conditions => [ "LOWER(places.name) LIKE ? AND codes.code_name = 'placetype' AND codes.the_code IN (#{type_array.to_list}) AND entities.deleted_at IS NULL", name.downcase + '%'],
:order => "LOWER(TRIM(name)) ASC",
:limit => 20
)
end
def find_or_build_event
if params[:event_id]
@event = Event.find(params[:event_id])
else
@event = params[:event_type].camelize.constantize.new
end
end
def update_last_modified_date
@event.updated_at = DateTime.now
end
def load_event_queues
@event_queues = EventQueue.queues_for_jurisdictions User.current_user.jurisdiction_ids_for_privilege(:view_event)
end
def index_processing
if params[:per_page].to_i > 100
render :text => t("too_many_cmrs"), :layout => 'application', :status => 400
return false
end
event_types = params[:event_types];
if(!event_types.blank? && event_types.include?('ContactEvent') && params[:investigators].blank?)
render :text => "Contact event search must include investigator selection.", :layout => 'application', :status => 400
return false
end
begin
@export_options = params[:export_options]
query_options = {
:event_types => params[:event_types],
:states => params[:states],
:queues => params[:queues],
:investigators => params[:investigators],
:diseases => params[:diseases],
:order_direction => params[:sort_direction],
:do_not_show_deleted => params[:do_not_show_deleted],
:per_page => params[:per_page]
}
@events = MorbidityEvent.find_all_for_filtered_view(query_options.merge({
:view_jurisdiction_ids => User.current_user.jurisdiction_ids_for_privilege(:view_event),
:access_sensitive_jurisdiction_ids => User.current_user.jurisdiction_ids_for_privilege(:access_sensitive_diseases),
:order_by => params[:sort_order],
:page => params[:page]
}))
User.current_user.update_attribute('event_view_settings', query_options) if params[:set_as_default_view] == "1"
rescue
render :file => static_error_page_path(404), :layout => 'application', :status => 404
return false
end
return true
end
end
|
# Generated by jeweler
# DO NOT EDIT THIS FILE
# Instead, edit Jeweler::Tasks in Rakefile, and run `rake gemspec`
# -*- encoding: utf-8 -*-
Gem::Specification.new do |s|
s.name = %q{fractional}
s.version = "0.0.2"
s.required_rubygems_version = Gem::Requirement.new(">= 0") if s.respond_to? :required_rubygems_version=
s.authors = ["Chris O'Sullivan"]
s.date = %q{2009-10-02}
s.description = %q{Fractional is a Ruby library for parsing fractions.}
s.email = %q{thechrisoshow@gmail.com}
s.extra_rdoc_files = [
"LICENSE",
"README.rdoc"
]
s.files = [
".document",
".gitignore",
"LICENSE",
"README.rdoc",
"Rakefile",
"VERSION",
"fractional.gemspec",
"lib/fractional.rb",
"spec/fractional_spec.rb",
"spec/spec_helper.rb"
]
s.homepage = %q{http://github.com/thechrisoshow/fractional}
s.rdoc_options = ["--charset=UTF-8"]
s.require_paths = ["lib"]
s.rubygems_version = %q{1.3.5}
s.summary = %q{Fractional is a Ruby library for parsing fractions}
s.test_files = [
"spec/fractional_spec.rb",
"spec/spec_helper.rb"
]
if s.respond_to? :specification_version then
current_version = Gem::Specification::CURRENT_SPECIFICATION_VERSION
s.specification_version = 3
if Gem::Version.new(Gem::RubyGemsVersion) >= Gem::Version.new('1.2.0') then
s.add_development_dependency(%q<rspec>, [">= 0"])
else
s.add_dependency(%q<rspec>, [">= 0"])
end
else
s.add_dependency(%q<rspec>, [">= 0"])
end
end
Updated gemspec
# Generated by jeweler
# DO NOT EDIT THIS FILE
# Instead, edit Jeweler::Tasks in Rakefile, and run `rake gemspec`
# -*- encoding: utf-8 -*-
Gem::Specification.new do |s|
s.name = %q{fractional}
s.version = "0.0.3"
s.required_rubygems_version = Gem::Requirement.new(">= 0") if s.respond_to? :required_rubygems_version=
s.authors = ["Chris O'Sullivan"]
s.date = %q{2009-10-05}
s.description = %q{Fractional is a Ruby library for parsing fractions.}
s.email = %q{thechrisoshow@gmail.com}
s.extra_rdoc_files = [
"LICENSE",
"README.rdoc"
]
s.files = [
".document",
".gitignore",
"LICENSE",
"README.rdoc",
"Rakefile",
"VERSION",
"fractional.gemspec",
"lib/fractional.rb",
"spec/fractional_spec.rb",
"spec/spec_helper.rb"
]
s.homepage = %q{http://github.com/thechrisoshow/fractional}
s.rdoc_options = ["--charset=UTF-8"]
s.require_paths = ["lib"]
s.rubygems_version = %q{1.3.5}
s.summary = %q{Fractional is a Ruby library for parsing fractions}
s.test_files = [
"spec/fractional_spec.rb",
"spec/spec_helper.rb"
]
if s.respond_to? :specification_version then
current_version = Gem::Specification::CURRENT_SPECIFICATION_VERSION
s.specification_version = 3
if Gem::Version.new(Gem::RubyGemsVersion) >= Gem::Version.new('1.2.0') then
s.add_development_dependency(%q<rspec>, [">= 0"])
else
s.add_dependency(%q<rspec>, [">= 0"])
end
else
s.add_dependency(%q<rspec>, [">= 0"])
end
end
|
# coding: utf-8
lib = File.expand_path('../lib', __FILE__)
$LOAD_PATH.unshift(lib) unless $LOAD_PATH.include?(lib)
require 'jeffries_tube/version'
Gem::Specification.new do |spec|
spec.name = "jeffries_tube"
spec.version = JeffriesTube::VERSION
spec.authors = ["Brian Samson"]
spec.email = ["brian@briansamson.com"]
spec.summary = %q{TODO: Write a short summary. Required.}
spec.description = %q{TODO: Write a longer description. Optional.}
spec.homepage = ""
spec.license = "MIT"
spec.files = `git ls-files -z`.split("\x0")
spec.executables = spec.files.grep(%r{^bin/}) { |f| File.basename(f) }
spec.test_files = spec.files.grep(%r{^(test|spec|features)/})
spec.require_paths = ["lib"]
spec.add_development_dependency "bundler", "~> 1.5"
spec.add_development_dependency "rake"
spec.add_runtime_dependency "rails", ">=3.0"
end
remove rails dependency
# coding: utf-8
lib = File.expand_path('../lib', __FILE__)
$LOAD_PATH.unshift(lib) unless $LOAD_PATH.include?(lib)
require 'jeffries_tube/version'
Gem::Specification.new do |spec|
spec.name = "jeffries_tube"
spec.version = JeffriesTube::VERSION
spec.authors = ["Brian Samson"]
spec.email = ["brian@tenforwardconsulting.com"]
spec.summary = %q{Ten Forward Consulting useful tools.}
spec.description = %q{cap rails:console}
spec.homepage = ""
spec.license = "MIT"
spec.files = `git ls-files -z`.split("\x0")
spec.executables = spec.files.grep(%r{^bin/}) { |f| File.basename(f) }
spec.test_files = spec.files.grep(%r{^(test|spec|features)/})
spec.require_paths = ["lib"]
spec.add_development_dependency "bundler", "~> 1.5"
spec.add_development_dependency "rake"
end
|
$LOAD_PATH.unshift(File.expand_path("lib", __dir__))
require "jekyll/assets/version"
Gem::Specification.new do |spec|
spec.version = Jekyll::Assets::VERSION
spec.homepage = "http://github.com/jekyll/jekyll-assets/"
spec.authors = ["Jordon Bedwell", "Aleksey V Zapparov", "Zachary Bush"]
spec.email = ["jordon@envygeeks.io", "ixti@member.fsf.org", "zach@zmbush.com"]
spec.files = %W(Rakefile Gemfile README.md LICENSE) + Dir["lib/**/*"]
spec.summary = "Assets for Jekyll"
spec.name = "jekyll-assets"
spec.license = "MIT"
spec.has_rdoc = false
spec.require_paths = ["lib"]
spec.description = spec.description = <<-DESC
A Jekyll plugin, that allows you to write javascript/css assets in
other languages such as CoffeeScript, Sass, Less and ERB, concatenate
them, respecting dependencies, minify and many more.
DESC
spec.add_runtime_dependency("rack", "~> 1.6")
spec.add_runtime_dependency("activesupport", "~> 5.0")
spec.add_runtime_dependency("concurrent-ruby", "~> 1.0")
spec.add_runtime_dependency("sprockets", ">= 3.3", "< 4.1")
spec.add_runtime_dependency("fastimage", ">= 1.8", "~> 2.0")
spec.add_runtime_dependency("liquid-tag-parser", "~> 1.0")
spec.add_runtime_dependency("jekyll", ">= 3.0", "~> 3.1")
spec.add_runtime_dependency("jekyll-sanity", "~> 1.2")
spec.add_runtime_dependency("pathutil", ">= 0.8")
spec.add_runtime_dependency("extras", "~> 0.2")
spec.add_development_dependency("nokogiri", "~> 1.6")
spec.add_development_dependency("luna-rspec-formatters", "~> 3.5")
spec.add_development_dependency("rspec", "~> 3.4")
end
Make Nokogiri and Nokogumbo hard dependencies.
$LOAD_PATH.unshift(File.expand_path("lib", __dir__))
require "jekyll/assets/version"
Gem::Specification.new do |spec|
spec.version = Jekyll::Assets::VERSION
spec.homepage = "http://github.com/jekyll/jekyll-assets/"
spec.authors = ["Jordon Bedwell", "Aleksey V Zapparov", "Zachary Bush"]
spec.email = ["jordon@envygeeks.io", "ixti@member.fsf.org", "zach@zmbush.com"]
spec.files = %W(Rakefile Gemfile README.md LICENSE) + Dir["lib/**/*"]
spec.summary = "Assets for Jekyll"
spec.name = "jekyll-assets"
spec.license = "MIT"
spec.has_rdoc = false
spec.require_paths = ["lib"]
spec.description = spec.description = <<-DESC
A Jekyll plugin, that allows you to write javascript/css assets in
other languages such as CoffeeScript, Sass, Less and ERB, concatenate
them, respecting dependencies, minify and many more.
DESC
spec.add_runtime_dependency("rack", "~> 1.6")
spec.add_runtime_dependency("nokogiri", "~> 1.8")
spec.add_runtime_dependency("activesupport", "~> 5.0")
spec.add_runtime_dependency("concurrent-ruby", "~> 1.0")
spec.add_runtime_dependency("sprockets", ">= 3.3", "< 4.1")
spec.add_runtime_dependency("fastimage", ">= 1.8", "~> 2.0")
spec.add_runtime_dependency("liquid-tag-parser", "~> 1.0")
spec.add_runtime_dependency("jekyll", ">= 3.0", "~> 3.1")
spec.add_runtime_dependency("jekyll-sanity", "~> 1.2")
spec.add_runtime_dependency("nokogumbo", "~> 1.4")
spec.add_runtime_dependency("pathutil", ">= 0.8")
spec.add_runtime_dependency("extras", "~> 0.2")
spec.add_development_dependency("nokogiri", "~> 1.6")
spec.add_development_dependency("luna-rspec-formatters", "~> 3.5")
spec.add_development_dependency("rspec", "~> 3.4")
end
|
# -*- encoding: utf-8 -*-
require File.expand_path("../lib/jekyll-pandoc/version", __FILE__)
Gem::Specification.new do |s|
s.name = 'jekyll-pandoc'
s.version = JekyllPandoc::VERSION
s.platform = Gem::Platform::RUBY
s.authors = ["Martin Fenner"]
s.email = 'martin.fenner@datacite.org'
s.homepage = 'https://github.com/mfenner/jekyll-pandoc'
s.summary = 'Jekyll Pandoc markdown converter'
s.description = 'A Jekyll markdown converter that uses Pandoc (via the pandoc-ruby gem).'
s.license = 'MIT'
s.required_rubygems_version = ">= 1.3.6"
s.add_dependency "jekyll", ">= 2.1.1"
s.add_dependency "pandoc-ruby", "~> 1.0.0"
s.add_development_dependency 'rake'
s.add_development_dependency "rspec", "~> 3.3"
s.add_development_dependency "rdiscount", "~> 2.1.8"
s.files = Dir.glob("lib/**/*.rb")
end
require ruby 1.9.3
# -*- encoding: utf-8 -*-
require File.expand_path("../lib/jekyll-pandoc/version", __FILE__)
Gem::Specification.new do |s|
s.name = 'jekyll-pandoc'
s.version = JekyllPandoc::VERSION
s.platform = Gem::Platform::RUBY
s.authors = ["Martin Fenner"]
s.email = 'martin.fenner@datacite.org'
s.homepage = 'https://github.com/mfenner/jekyll-pandoc'
s.summary = 'Jekyll Pandoc markdown converter'
s.description = 'A Jekyll markdown converter that uses Pandoc (via the pandoc-ruby gem).'
s.license = 'MIT'
s.required_rubygems_version = ">= 1.3.6"
s.required_ruby_version = '>= 1.9.3'
s.add_dependency "jekyll", ">= 2.1.1"
s.add_dependency "pandoc-ruby", "~> 1.0.0"
s.add_development_dependency 'rake'
s.add_development_dependency "rspec", "~> 3.3"
s.add_development_dependency "rdiscount", "~> 2.1.8"
s.files = Dir.glob("lib/**/*.rb")
end
|
# -*- encoding: utf-8 -*-
$:.push File.expand_path("../lib", __FILE__)
require "jetpack/version"
Gem::Specification.new do |s|
s.name = "jetpack-rails"
s.version = Jetpack::VERSION
s.authors = ["Andrew Burleson"]
s.email = ["andrew@eighty-b.com"]
s.homepage = "http://github.com/burlesona/jetpack-rails"
s.summary = %q{A sweet fractional grid system plus an awesome bootstrap mashup, optimized for the asset pipeline.}
s.description = %q{A sweet boostrap mashup for Rails applications.
The philosophy is pretty simple, it should make building Rails apps go a lot faster.
The library prefers modularity over rigidity and mixins over defintions. Check out the github readme for more.}
s.files = `git ls-files`.split("\n")
# s.test_files = `git ls-files -- {test,spec,features}/*`.split("\n")
# s.executables = `git ls-files -- bin/*`.split("\n").map{ |f| File.basename(f) }
s.require_paths = ["lib"]
# specify any dependencies here; for example:
s.add_dependency 'sass-rails', '>= 3.1'
# s.add_development_dependency "rspec"
# s.add_runtime_dependency "rest-client"
end
Add MIT License to Gemspec
# -*- encoding: utf-8 -*-
$:.push File.expand_path("../lib", __FILE__)
require "jetpack/version"
Gem::Specification.new do |s|
s.name = "jetpack-rails"
s.version = Jetpack::VERSION
s.authors = ["Andrew Burleson"]
s.email = ["andrew@eighty-b.com"]
s.homepage = "http://github.com/burlesona/jetpack-rails"
s.summary = %q{A sweet fractional grid system plus an awesome bootstrap mashup, optimized for the asset pipeline.}
s.description = %q{A sweet boostrap mashup for Rails applications.
The philosophy is pretty simple, it should make building Rails apps go a lot faster.
The library prefers modularity over rigidity and mixins over defintions. Check out the github readme for more.}
s.license = 'MIT'
s.files = `git ls-files`.split("\n")
# s.test_files = `git ls-files -- {test,spec,features}/*`.split("\n")
# s.executables = `git ls-files -- bin/*`.split("\n").map{ |f| File.basename(f) }
s.require_paths = ["lib"]
# specify any dependencies here; for example:
s.add_dependency 'sass-rails', '>= 3.1'
# s.add_development_dependency "rspec"
# s.add_runtime_dependency "rest-client"
end
|
Bump version
|
# coding: utf-8
lib = File.expand_path('../lib', __FILE__)
$LOAD_PATH.unshift(lib) unless $LOAD_PATH.include?(lib)
require 'freemobile/version'
Gem::Specification.new do |spec|
spec.name = "freemobile"
spec.version = Freemobile::VERSION
spec.authors = ["CkuT"]
spec.email = ["s3cu@hotmail.fr"]
spec.summary = %q{Freemobile gem allows you to use Freemobile API}
spec.description = %q{You can send SMS through Freemobile API}
spec.homepage = ""
spec.license = "MIT"
spec.files = `git ls-files -z`.split("\x0")
spec.executables = spec.files.grep(%r{^bin/}) { |f| File.basename(f) }
spec.test_files = spec.files.grep(%r{^(test|spec|features)/})
spec.require_paths = ["lib"]
spec.add_runtime_dependency "rest-client",
spec.add_development_dependency "bundler", "~> 1.6"
spec.add_development_dependency "rake"
end
Add rest-client dependency
# coding: utf-8
lib = File.expand_path('../lib', __FILE__)
$LOAD_PATH.unshift(lib) unless $LOAD_PATH.include?(lib)
require 'freemobile/version'
Gem::Specification.new do |spec|
spec.name = "freemobile"
spec.version = Freemobile::VERSION
spec.authors = ["CkuT"]
spec.email = ["s3cu@hotmail.fr"]
spec.summary = %q{Freemobile gem allows you to use Freemobile API}
spec.description = %q{You can send SMS through Freemobile API}
spec.homepage = ""
spec.license = "MIT"
spec.files = `git ls-files -z`.split("\x0")
spec.executables = spec.files.grep(%r{^bin/}) { |f| File.basename(f) }
spec.test_files = spec.files.grep(%r{^(test|spec|features)/})
spec.require_paths = ["lib"]
spec.add_runtime_dependency "rest-client"
spec.add_development_dependency "bundler", "~> 1.6"
spec.add_development_dependency "rake"
end
|
Pod::Spec.new do |s|
s.name = 'Skopelos'
s.version = '2.0.0-je'
s.license = { :type => 'Apache 2.0', :file => 'LICENSE' }
s.summary = 'Simply all you need for doing Core Data. Swift flavour.'
s.description = 'A minimalistic, thread safe, non-boilerplate and super easy to use version of Active Record on Core Data. Simply all you need for doing Core Data. Swift flavour.'
s.homepage = 'https://github.com/justeat/Skopelos'
s.author = 'Just Eat'
s.source = { :git => 'https://github.com/justeat/Skopelos.git', :tag => "#{s.version}" }
s.source_files = 'Skopelos/src/**/*.{swift}'
s.module_name = 'Skopelos'
s.ios.deployment_target = '9.0'
s.watchos.deployment_target = '3.0'
s.requires_arc = true
s.frameworks = ["Foundation", "CoreData"]
end
Bump version to 2.0.0
Pod::Spec.new do |s|
s.name = 'Skopelos'
s.version = '2.0.0'
s.license = { :type => 'Apache 2.0', :file => 'LICENSE' }
s.summary = 'Simply all you need for doing Core Data. Swift flavour.'
s.description = 'A minimalistic, thread safe, non-boilerplate and super easy to use version of Active Record on Core Data. Simply all you need for doing Core Data. Swift flavour.'
s.homepage = 'https://github.com/justeat/Skopelos'
s.author = 'Just Eat'
s.source = { :git => 'https://github.com/justeat/Skopelos.git', :tag => "#{s.version}" }
s.source_files = 'Skopelos/src/**/*.{swift}'
s.module_name = 'Skopelos'
s.ios.deployment_target = '9.0'
s.watchos.deployment_target = '3.0'
s.requires_arc = true
s.frameworks = ["Foundation", "CoreData"]
end
|
module LoginEndpoint
def self.included(base)
# Internal Login
# https://github.com/cloudfoundry/uaa/blob/master/docs/UAA-APIs.rst#internal-login-post-logindo
base.post '/uaa/login.do' do
email = auth_hash[:info][:email]
user = env['omniauth.identity']
if user.nil?
# using something other than the Identity strategy (like LDAP)
info = env['omniauth.auth'][:info]
username = info[:nickname]
if env['omniauth.auth'][:provider] =='ldap'
username = env['omniauth.auth'][:extra][:raw_info][AppConfig[:strategy][:ldap][:uid]]
username = username.kind_of?(Array) ? username.first : username
end
if username.nil?
raise "Couldn't find a username to user for this user! #{env['omniauth.auth'].inspect}"
end
user = Identity.find_by_username(username)
if user.nil?
user = Identity.create!(
username: username,
email: info[:email],
given_name: info[:first_name],
family_name: info[:last_name],
)
end
if env['omniauth.auth'][:provider] =='ldap'
config_admin = AppConfig[:strategy][:ldap][:admin_user]
if config_admin && username.downcase == config_admin.downcase
admin_group = Group.find_by_name!('cloud_controller.admin')
user.groups << admin_group
user.save!
end
end
end
set_current_user(user)
if env["aok.block"] # legacy login
env["aok.block"].call(user)
return
end
if env["aok.no_openid"] # legacy login
return {:email => email}.to_json
end
destination = nil
if request.env['omniauth.origin']
destination = CGI.unescape(request.env['omniauth.origin'])
logger.debug "Found stored origin for redirect: #{destination.inspect}"
unless destination =~ /^\/uaa/
# Redirects within AOK only
logger.debug "Don't like the looks of that redirect; overwriting..."
destination = nil
end
end
destination ||= '/uaa'
redirect to(destination), 302
end
end
end
Bug #102562 admin_user fails after first login
module LoginEndpoint
def self.included(base)
# Internal Login
# https://github.com/cloudfoundry/uaa/blob/master/docs/UAA-APIs.rst#internal-login-post-logindo
base.post '/uaa/login.do' do
email = auth_hash[:info][:email]
user = env['omniauth.identity']
if user.nil?
# using something other than the Identity strategy (like LDAP)
info = env['omniauth.auth'][:info]
username = info[:nickname]
if env['omniauth.auth'][:provider] =='ldap'
username = env['omniauth.auth'][:extra][:raw_info][AppConfig[:strategy][:ldap][:uid]]
username = username.kind_of?(Array) ? username.first : username
end
if username.nil?
raise "Couldn't find a username to user for this user! #{env['omniauth.auth'].inspect}"
end
user = Identity.find_by_username(username)
if user.nil?
user = Identity.create!(
username: username,
email: info[:email],
given_name: info[:first_name],
family_name: info[:last_name],
)
end
if env['omniauth.auth'][:provider] =='ldap'
config_admin = AppConfig[:strategy][:ldap][:admin_user]
if config_admin && username.downcase == config_admin.downcase
admin_group = Group.find_by_name!('cloud_controller.admin')
unless user.groups.include? admin_group
user.groups << admin_group
user.save!
end
end
end
end
set_current_user(user)
if env["aok.block"] # legacy login
env["aok.block"].call(user)
return
end
if env["aok.no_openid"] # legacy login
return {:email => email}.to_json
end
destination = nil
if request.env['omniauth.origin']
destination = CGI.unescape(request.env['omniauth.origin'])
logger.debug "Found stored origin for redirect: #{destination.inspect}"
unless destination =~ /^\/uaa/
# Redirects within AOK only
logger.debug "Don't like the looks of that redirect; overwriting..."
destination = nil
end
end
destination ||= '/uaa'
redirect to(destination), 302
end
end
end
|
#
# Cookbook Name:: mkf
# Recipe:: default
#
# Copyright 2013, MeineKleineFarm UG
#
# All rights reserved - Do Not Redistribute
#
include_recipe "users"
include_recipe "sudo"
gem_package "ruby-shadow"
gem_package "bundler"
include_recipe "rbenv::default"
include_recipe "rbenv::ruby_build"
rbenv_ruby "1.9.3-p448" do
ruby_version "1.9.3-p448"
global true
end
%w{bundler rake ruby-shadow pg unicorn}.each do |gem_name|
rbenv_gem gem_name do
ruby_version "1.9.3-p448"
end
end
include_recipe "users"
users_manage "users" do
group_id 100
action :create
end
users_manage "sudo" do
group_id 27
action :create
end
sudo 'shop' do
user "rails" # or a username
commands ['/etc/init.d/mkf_production reload']
end
certificate_manage "shop" do
cert_path "/etc/ssl/"
key_file "shop.meinekleinefarm.org.key"
chain_file "shop.meinekleinefarm.org.crt"
end
application "mkf_production" do
path "/var/apps/mkf/production"
owner "rails"
group "rails"
repository "git://github.com/meinekleinefarm/shop.git"
revision "master"
deploy_key '~/.ssh/id_rsa'
# Keep the release for debugging
rollback_on_error false
action :force_deploy
#action :deploy
migrate true
create_dirs_before_symlink ["tmp"]
purge_before_symlink ["log", "tmp/pids", "public/system"]
symlink_before_migrate "database.yml" => "config/database.yml", "memcached.yml" => "config/memcached.yml"
symlinks "system" => "public/system", "pids" => "tmp/pids", "log" => "log", "spree" => "public/spree"
before_symlink do
directory "#{new_resource.shared_path}/log" do
owner new_resource.owner
group new_resource.group
mode '755'
action :create
end
directory "#{new_resource.shared_path}/system" do
owner new_resource.owner
group new_resource.group
mode '755'
action :create
end
directory "#{new_resource.shared_path}/spree" do
owner new_resource.owner
group new_resource.group
mode '755'
action :create
end
directory "#{new_resource.shared_path}/pids" do
owner new_resource.owner
group new_resource.group
mode '755'
action :create
end
end
# Apply the rails LWRP from application_ruby
rails do
# Rails-specific configuration. See the README in the
# application_ruby cookbook for more information.
bundler true
bundle_command "/opt/rbenv/shims/bundle"
restart_command "sudo /etc/init.d/mkf_production reload"
precompile_assets true
db_creds = Chef::EncryptedDataBagItem.load("passwords", "mkf_shop_db")
database do
adapter "postgresql"
encoding "unicode"
reconnect false
database "mkf_production"
username "mkf_production"
password db_creds["password"]
pool 50
end
# database_master_role "mkf_shop_database_server"
end
# Apply the unicorn LWRP, also from application_ruby
unicorn do
# unicorn-specific configuration.
preload_app true
before_fork <<-EOF
Signal.trap 'TERM' do
puts 'Unicorn master intercepting TERM and sending myself QUIT instead'
Process.kill 'QUIT', Process.pid
end
# This option works in together with preload_app true setting
# What it does is prevent the master process from holding
# the database connection
defined?(ActiveRecord::Base) and ActiveRecord::Base.connection.disconnect!
EOF
after_fork <<-EOF
Signal.trap 'TERM' do
puts 'Unicorn worker intercepting TERM and doing nothing. Wait for master to send QUIT'
end
defined?(ActiveRecord::Base) and ActiveRecord::Base.establish_connection
EOF
bundler true
worker_processes 8
worker_timeout 30
port '8080'
end
memcached do
role "memcached_master"
options do
ttl 1800
memory 256
end
end
nginx_load_balancer do
only_if { node['roles'].include?('mkf_shop_application_server') }
ssl true
ssl_certificate '/etc/ssl/certs/shop.meinekleinefarm.org.crt'
ssl_certificate_key '/etc/ssl/private/shop.meinekleinefarm.org.key'
application_server_role 'mkf_shop_application_server'
server_name 'shop.meinekleinefarm.org'
# application_socket ["/var/apps/mkf/production/shared/unicorn.sock"]
application_port 8080
template 'mkf_production.conf.erb'
end
end
Reload unicorn without password.
#
# Cookbook Name:: mkf
# Recipe:: default
#
# Copyright 2013, MeineKleineFarm UG
#
# All rights reserved - Do Not Redistribute
#
include_recipe "users"
include_recipe "sudo"
gem_package "ruby-shadow"
gem_package "bundler"
include_recipe "rbenv::default"
include_recipe "rbenv::ruby_build"
rbenv_ruby "1.9.3-p448" do
ruby_version "1.9.3-p448"
global true
end
%w{bundler rake ruby-shadow pg unicorn}.each do |gem_name|
rbenv_gem gem_name do
ruby_version "1.9.3-p448"
end
end
include_recipe "users"
users_manage "users" do
group_id 100
action :create
end
users_manage "sudo" do
group_id 27
action :create
end
sudo 'shop' do
user "rails" # or a username
commands ['/etc/init.d/mkf_production reload']
nopasswd true
end
certificate_manage "shop" do
cert_path "/etc/ssl/"
key_file "shop.meinekleinefarm.org.key"
chain_file "shop.meinekleinefarm.org.crt"
end
application "mkf_production" do
path "/var/apps/mkf/production"
owner "rails"
group "rails"
repository "git://github.com/meinekleinefarm/shop.git"
revision "master"
deploy_key '~/.ssh/id_rsa'
# Keep the release for debugging
rollback_on_error false
action :force_deploy
#action :deploy
migrate true
create_dirs_before_symlink ["tmp"]
purge_before_symlink ["log", "tmp/pids", "public/system"]
symlink_before_migrate "database.yml" => "config/database.yml", "memcached.yml" => "config/memcached.yml"
symlinks "system" => "public/system", "pids" => "tmp/pids", "log" => "log", "spree" => "public/spree"
before_symlink do
directory "#{new_resource.shared_path}/log" do
owner new_resource.owner
group new_resource.group
mode '755'
action :create
end
directory "#{new_resource.shared_path}/system" do
owner new_resource.owner
group new_resource.group
mode '755'
action :create
end
directory "#{new_resource.shared_path}/spree" do
owner new_resource.owner
group new_resource.group
mode '755'
action :create
end
directory "#{new_resource.shared_path}/pids" do
owner new_resource.owner
group new_resource.group
mode '755'
action :create
end
end
# Apply the rails LWRP from application_ruby
rails do
# Rails-specific configuration. See the README in the
# application_ruby cookbook for more information.
bundler true
bundle_command "/opt/rbenv/shims/bundle"
restart_command "sudo /etc/init.d/mkf_production reload"
precompile_assets true
db_creds = Chef::EncryptedDataBagItem.load("passwords", "mkf_shop_db")
database do
adapter "postgresql"
encoding "unicode"
reconnect false
database "mkf_production"
username "mkf_production"
password db_creds["password"]
pool 50
end
# database_master_role "mkf_shop_database_server"
end
# Apply the unicorn LWRP, also from application_ruby
unicorn do
# unicorn-specific configuration.
preload_app true
before_fork <<-EOF
Signal.trap 'TERM' do
puts 'Unicorn master intercepting TERM and sending myself QUIT instead'
Process.kill 'QUIT', Process.pid
end
# This option works in together with preload_app true setting
# What it does is prevent the master process from holding
# the database connection
defined?(ActiveRecord::Base) and ActiveRecord::Base.connection.disconnect!
EOF
after_fork <<-EOF
Signal.trap 'TERM' do
puts 'Unicorn worker intercepting TERM and doing nothing. Wait for master to send QUIT'
end
defined?(ActiveRecord::Base) and ActiveRecord::Base.establish_connection
EOF
bundler true
worker_processes 8
worker_timeout 30
port '8080'
end
memcached do
role "memcached_master"
options do
ttl 1800
memory 256
end
end
nginx_load_balancer do
only_if { node['roles'].include?('mkf_shop_application_server') }
ssl true
ssl_certificate '/etc/ssl/certs/shop.meinekleinefarm.org.crt'
ssl_certificate_key '/etc/ssl/private/shop.meinekleinefarm.org.key'
application_server_role 'mkf_shop_application_server'
server_name 'shop.meinekleinefarm.org'
# application_socket ["/var/apps/mkf/production/shared/unicorn.sock"]
application_port 8080
template 'mkf_production.conf.erb'
end
end |
added .podspec
Pod::Spec.new do |s|
s.name = 'CHFunctionalArrayAdditions'
s.version = '1.0.0'
s.license = 'MIT'
s.summary = 'A few functional additions to NSArray such as ch_map: and ch_filter: that make working with NSArray much simpler and cleaner.'
s.homepage = 'https://github.com/chaione/CHFunctionalArrayAdditions'
s.authors = { 'Terry Lewis' => 'terry@ploverproductions.com' }
s.source = { :git => 'https://github.com/chaione/CHFunctionalArrayAdditions.git', :tag => '1.0.0' }
s.description = 'Higher order functions for NSArray.'
s.platform = :ios, '6.0'
s.requires_arc = true
s.source_files = 'CHFunctionalArrayAdditions/Source/*.{h,m}'
end
|
# coding: utf-8
lib = File.expand_path('../lib', __FILE__)
$LOAD_PATH.unshift(lib) unless $LOAD_PATH.include?(lib)
require 'guard/kitchen/version'
Gem::Specification.new do |spec|
spec.name = "guard-kitchen"
spec.version = Guard::Kitchen::VERSION
spec.authors = ["Adam Jacob"]
spec.email = ["adam@chef.io"]
spec.description = %q{Guard plugin for test kitchen}
spec.summary = %q{Guard plugin for test kitchen}
spec.homepage = "http://github.com/test-kitchen/guard-kitchen"
spec.license = "Apache 2"
spec.files = `git ls-files`.split($/)
spec.executables = spec.files.grep(%r{^bin/}) { |f| File.basename(f) }
spec.test_files = spec.files.grep(%r{^(test|spec|features)/})
spec.require_paths = ["lib"]
spec.add_dependency "guard", "> 2.0.0"
spec.add_dependency "mixlib-shellout"
spec.add_development_dependency "bundler"
spec.add_development_dependency "rake"
end
Use a SPDX compliant license string in the gemspec
This is the correct string to use here
Signed-off-by: Tim Smith <764ef62106582a09ed09dfa0b6bff7c05fd7d1e4@chef.io>
# coding: utf-8
lib = File.expand_path('../lib', __FILE__)
$LOAD_PATH.unshift(lib) unless $LOAD_PATH.include?(lib)
require 'guard/kitchen/version'
Gem::Specification.new do |spec|
spec.name = "guard-kitchen"
spec.version = Guard::Kitchen::VERSION
spec.authors = ["Adam Jacob"]
spec.email = ["adam@chef.io"]
spec.description = %q{Guard plugin for test kitchen}
spec.summary = %q{Guard plugin for test kitchen}
spec.homepage = "http://github.com/test-kitchen/guard-kitchen"
spec.license = "Apache-2.0"
spec.files = `git ls-files`.split($/)
spec.executables = spec.files.grep(%r{^bin/}) { |f| File.basename(f) }
spec.test_files = spec.files.grep(%r{^(test|spec|features)/})
spec.require_paths = ["lib"]
spec.add_dependency "guard", "> 2.0.0"
spec.add_dependency "mixlib-shellout"
spec.add_development_dependency "bundler"
spec.add_development_dependency "rake"
end
|
cask 'intellij-idea-next-ce-eap' do
version '2017.1,171.2613.7'
sha256 '220f2d15723f8b0eec8089278a65f0402737df8a39f6b51e160d6e50055373f8'
url "https://download.jetbrains.com/idea/ideaIC-#{version.after_comma}.dmg"
name 'IntelliJ IDEA Next Community Edition EAP'
homepage "https://confluence.jetbrains.com/display/IDEADEV/IDEA+#{version.major_minor}+EAP"
auto_updates true
app "IntelliJ IDEA #{version.before_comma} CE EAP.app"
uninstall delete: '/usr/local/bin/idea'
zap delete: [
"~/Library/Application Support/IdeaIC#{version.major_minor}",
"~/Library/Caches/IdeaIC#{version.major_minor}",
"~/Library/Logs/IdeaIC#{version.major_minor}",
"~/Library/Preferences/IdeaIC#{version.major_minor}",
]
end
Update intellij-idea-next-ce-eap to 2017.1,171.2822.15 (#3268)
cask 'intellij-idea-next-ce-eap' do
version '2017.1,171.2822.15'
sha256 '0787cd5a29b7a168065681ddc64eb7d4b74f579b483aa0a9b940b14033b62324'
url "https://download.jetbrains.com/idea/ideaIC-#{version.after_comma}.dmg"
name 'IntelliJ IDEA Next Community Edition EAP'
homepage "https://confluence.jetbrains.com/display/IDEADEV/IDEA+#{version.major_minor}+EAP"
auto_updates true
app "IntelliJ IDEA #{version.before_comma} CE EAP.app"
uninstall delete: '/usr/local/bin/idea'
zap delete: [
"~/Library/Application Support/IdeaIC#{version.major_minor}",
"~/Library/Caches/IdeaIC#{version.major_minor}",
"~/Library/Logs/IdeaIC#{version.major_minor}",
"~/Library/Preferences/IdeaIC#{version.major_minor}",
]
end
|
cask "photo-supreme-single-user" do
version "7.3.0.4464"
sha256 :no_check # required as upstream package is updated in-place
url "https://trial.idimager.com/PhotoSupreme_V#{version.major}.pkg"
name "Photo Supreme Single User"
desc "Digital Asset Management"
homepage "https://www.idimager.com/home"
livecheck do
url "https://www.idimager.com/what-s-new-in-photo-supreme-v#{version.major}"
regex(/>s*(\d+(?:\.\d+)+)[\s<]/i)
end
pkg "PhotoSupreme_V#{version.major}.pkg"
uninstall pkgutil: "com.idimager.idimagersu"
end
photo-supreme-single-user 7.3.0.4494
Update photo-supreme-single-user from 7.3.0.4464 to 7.3.0.4494
Closes #131096.
Signed-off-by: BrewTestBot <8a898ee6867e4f2028e63d2a6319b2224641c06c@users.noreply.github.com>
cask "photo-supreme-single-user" do
version "7.3.0.4494"
sha256 :no_check # required as upstream package is updated in-place
url "https://trial.idimager.com/PhotoSupreme_V#{version.major}.pkg"
name "Photo Supreme Single User"
desc "Digital Asset Management"
homepage "https://www.idimager.com/home"
livecheck do
url "https://www.idimager.com/what-s-new-in-photo-supreme-v#{version.major}"
regex(/>s*(\d+(?:\.\d+)+)[\s<]/i)
end
pkg "PhotoSupreme_V#{version.major}.pkg"
uninstall pkgutil: "com.idimager.idimagersu"
end
|
cask 'safari-technology-preview' do
if MacOS.version <= :mojave
version '100,061-72784-20200205-5c835f07-90bd-4ec6-88b6-30ed28ce9e69'
sha256 'f81b550a1df751354b4c2ccb93cca7859faa5732017628ca0074b7fbb73795b7'
else
version '100,061-72782-20200205-2dfe759e-8392-4b7a-92d4-700aaa3333c3'
sha256 '2b57da06065ec14a427ebe40d09a7c50e6ca84c01238012a911470b7eaa48f33'
end
url "https://secure-appldnld.apple.com/STP/#{version.after_comma}/SafariTechnologyPreview.dmg"
appcast 'https://developer.apple.com/safari/download/'
name 'Safari Technology Preview'
homepage 'https://developer.apple.com/safari/download/'
auto_updates true
depends_on macos: '>= :mojave'
pkg 'Safari Technology Preview.pkg'
uninstall delete: '/Applications/Safari Technology Preview.app'
zap trash: [
'~/Library/Application Support/com.apple.sharedfilelist/com.apple.LSSharedFileList.ApplicationRecentDocuments/com.apple.safaritechnologypreview.sfl*',
'~/Library/Caches/com.apple.SafariTechnologyPreview',
'~/Library/Preferences/com.apple.SafariTechnologyPreview.plist',
'~/Library/SafariTechnologyPreview',
'~/Library/Saved Application State/com.apple.SafariTechnologyPreview.savedState',
'~/Library/SyncedPreferences/com.apple.SafariTechnologyPreview-com.apple.Safari.UserRequests.plist',
'~/Library/SyncedPreferences/com.apple.SafariTechnologyPreview-com.apple.Safari.WebFeedSubscriptions.plist',
'~/Library/SyncedPreferences/com.apple.SafariTechnologyPreview.plist',
'~/Library/WebKit/com.apple.SafariTechnologyPreview',
]
end
Update safari-technology-preview to 101 (#8601)
Version as reported by safaridriver --version:
Included with Safari Technology Preview (Release 101, 15610.1.3)
Source: https://developer.apple.com/safari/download/
Build: https://dev.azure.com/foolip/safari-technology-preview-updater/_build/results?buildId=1442&view=logs
cask 'safari-technology-preview' do
if MacOS.version <= :mojave
version '101,061-79986-20200218-f3264d1d-fff0-4ff6-b518-719415265e1c'
sha256 '00e091a57289366ecdac4f47de8405561817730d79b040966903459ac90da20a'
else
version '101,061-79983-20200218-baf609a5-fdff-4f67-ade1-24d800440418'
sha256 'a9ee1470dc7319e17b5a793530c21ff8a33d5458348096a95226b1da084a36b0'
end
url "https://secure-appldnld.apple.com/STP/#{version.after_comma}/SafariTechnologyPreview.dmg"
appcast 'https://developer.apple.com/safari/download/'
name 'Safari Technology Preview'
homepage 'https://developer.apple.com/safari/download/'
auto_updates true
depends_on macos: '>= :mojave'
pkg 'Safari Technology Preview.pkg'
uninstall delete: '/Applications/Safari Technology Preview.app'
zap trash: [
'~/Library/Application Support/com.apple.sharedfilelist/com.apple.LSSharedFileList.ApplicationRecentDocuments/com.apple.safaritechnologypreview.sfl*',
'~/Library/Caches/com.apple.SafariTechnologyPreview',
'~/Library/Preferences/com.apple.SafariTechnologyPreview.plist',
'~/Library/SafariTechnologyPreview',
'~/Library/Saved Application State/com.apple.SafariTechnologyPreview.savedState',
'~/Library/SyncedPreferences/com.apple.SafariTechnologyPreview-com.apple.Safari.UserRequests.plist',
'~/Library/SyncedPreferences/com.apple.SafariTechnologyPreview-com.apple.Safari.WebFeedSubscriptions.plist',
'~/Library/SyncedPreferences/com.apple.SafariTechnologyPreview.plist',
'~/Library/WebKit/com.apple.SafariTechnologyPreview',
]
end
|
cask 'screaming-frog-seo-spider' do
if MacOS.release <= :lion
version '2.40'
sha256 'f37a517cb1ddb13a0621ae2ef98eba148027b3a2b5ce56b6e6b4ca756e40329b'
else
version '5.1'
sha256 'bd4ccf73c8ee99e4da893a832bf85155820659cc5a61f72df33d39871fcc7b66'
end
url "https://www.screamingfrog.co.uk/products/seo-spider/ScreamingFrogSEOSpider-#{version}.dmg"
name 'Screaming Frog SEO Spider'
name 'Screaming Frog'
homepage 'http://www.screamingfrog.co.uk/seo-spider/'
license :freemium
app 'Screaming Frog SEO Spider.app'
caveats <<-EOS.undent
#{token} requires Java 7+, you can install the latest Java using
brew cask install java
EOS
end
screaming-frog-seo-spider.rb: fixed name stanzas
cask 'screaming-frog-seo-spider' do
if MacOS.release <= :lion
version '2.40'
sha256 'f37a517cb1ddb13a0621ae2ef98eba148027b3a2b5ce56b6e6b4ca756e40329b'
else
version '5.1'
sha256 'bd4ccf73c8ee99e4da893a832bf85155820659cc5a61f72df33d39871fcc7b66'
end
url "https://www.screamingfrog.co.uk/products/seo-spider/ScreamingFrogSEOSpider-#{version}.dmg"
name 'Screaming Frog SEO Spider'
homepage 'http://www.screamingfrog.co.uk/seo-spider/'
license :freemium
app 'Screaming Frog SEO Spider.app'
caveats <<-EOS.undent
#{token} requires Java 7+, you can install the latest Java using
brew cask install java
EOS
end
|
class TimemachineschedulerBeta < Cask
version '4.0b3(483)'
sha256 '2cd1c172da73e7ff26ddfa71417dbf323a635b3eef2c8f3025edbaa2686880ba'
url 'http://www.klieme.com/Downloads/TimeMachineScheduler/TimeMachineScheduler_4.0b3Full.zip'
homepage 'http://www.klieme.com/TimeMachineScheduler.html'
license :unknown
container :nested => 'TimeMachineScheduler_4.0b3.dmg'
app 'TimeMachineScheduler.app'
end
new-style header in timemachinescheduler-beta
cask :v1 => 'timemachinescheduler-beta' do
version '4.0b3(483)'
sha256 '2cd1c172da73e7ff26ddfa71417dbf323a635b3eef2c8f3025edbaa2686880ba'
url 'http://www.klieme.com/Downloads/TimeMachineScheduler/TimeMachineScheduler_4.0b3Full.zip'
homepage 'http://www.klieme.com/TimeMachineScheduler.html'
license :unknown
container :nested => 'TimeMachineScheduler_4.0b3.dmg'
app 'TimeMachineScheduler.app'
end
|
cask 'virtualbox-extension-pack' do
version '5.2.2-119230'
sha256 '9328548ca8cbc526232c0631cb5a17618c771b07665b362c1e3d89a2425bf799'
url "http://download.virtualbox.org/virtualbox/#{version.sub(%r{-.*}, '')}/Oracle_VM_VirtualBox_Extension_Pack-#{version}.vbox-extpack"
appcast 'http://download.virtualbox.org/virtualbox/LATEST.TXT',
checkpoint: '486d2ad103a38d1f6f661e9d3191e024b7e2fae6e6ce99ff03b073f43cd0f65b'
name 'Oracle VirtualBox Extension Pack'
homepage 'https://www.virtualbox.org/'
depends_on cask: 'virtualbox'
container type: :naked
stage_only true
postflight do
system_command '/usr/local/bin/VBoxManage',
args: [
'extpack', 'install',
'--replace', "#{staged_path}/Oracle_VM_VirtualBox_Extension_Pack-#{version}.vbox-extpack",
'--accept-license=56be48f923303c8cababb0bb4c478284b688ed23f16d775d729b89a2e8e5f9eb'
],
sudo: true
end
uninstall_postflight do
next unless File.exist?('/usr/local/bin/VBoxManage')
system_command '/usr/local/bin/VBoxManage',
args: [
'extpack', 'uninstall',
'Oracle VM VirtualBox Extension Pack'
],
sudo: true
end
caveats <<~EOS
Installing this Cask means you have AGREED to the
VirtualBox Personal Use and Evaluation License at
https://www.virtualbox.org/wiki/VirtualBox_PUEL
EOS
end
Update virtualbox-extension-pack to 5.2.4-119785 (#42104)
cask 'virtualbox-extension-pack' do
version '5.2.4-119785'
sha256 '98e9df4f23212c3de827af9d770b391cf2dba8d21f4de597145512c1479302cd'
url "http://download.virtualbox.org/virtualbox/#{version.sub(%r{-.*}, '')}/Oracle_VM_VirtualBox_Extension_Pack-#{version}.vbox-extpack"
appcast 'http://download.virtualbox.org/virtualbox/LATEST.TXT',
checkpoint: '003d27893bdd57babeee9db2074b2947da732fef6a3208a9e3871edf3f1f2850'
name 'Oracle VirtualBox Extension Pack'
homepage 'https://www.virtualbox.org/'
depends_on cask: 'virtualbox'
container type: :naked
stage_only true
postflight do
system_command '/usr/local/bin/VBoxManage',
args: [
'extpack', 'install',
'--replace', "#{staged_path}/Oracle_VM_VirtualBox_Extension_Pack-#{version}.vbox-extpack",
'--accept-license=56be48f923303c8cababb0bb4c478284b688ed23f16d775d729b89a2e8e5f9eb'
],
sudo: true
end
uninstall_postflight do
next unless File.exist?('/usr/local/bin/VBoxManage')
system_command '/usr/local/bin/VBoxManage',
args: [
'extpack', 'uninstall',
'Oracle VM VirtualBox Extension Pack'
],
sudo: true
end
caveats <<~EOS
Installing this Cask means you have AGREED to the
VirtualBox Personal Use and Evaluation License at
https://www.virtualbox.org/wiki/VirtualBox_PUEL
EOS
end
|
require 'thread'
pid_file = ARGV.shift
scenario = ARGV.shift
ruby_exe = ARGV.shift
# We must do this first otherwise there will be a race with the process that
# creates this process and the TERM signal below could go to that process
# instead, which will likely abort the specs process.
Process.setsid if scenario && Process.respond_to?(:setsid)
signaled = false
mutex = Mutex.new
Signal.trap(:TERM) do
if mutex.try_lock
unless signaled
signaled = true
STDOUT.puts "signaled"
STDOUT.flush
end
end
end
File.open(pid_file, "wb") { |f| f.puts Process.pid }
if scenario
# We are sending a signal to ourselves or the process group
process = Process.respond_to?(:getpgid) ? "Process.getpgid(Process.pid)" : "Process.pid"
case scenario
when "self"
signal = %["SIGTERM"]
process = "0"
when "group_numeric"
signal = %[-Signal.list["TERM"]]
when "group_short_string"
signal = %["-TERM"]
when "group_full_string"
signal = %["-SIGTERM"]
else
raise "unknown scenario: #{scenario.inspect}"
end
cmd = %[#{ruby_exe} -e 'Process.kill(#{signal}, #{process})']
Thread.new { system cmd }.join
end
sleep 0.1 until signaled
Use #system directly with multiple arguments in Process#kill specs
git-svn-id: ab86ecd26fe50a6a239cacb71380e346f71cee7d@58649 b2dd03c8-39d4-4d8f-98ff-823fe69b080e
require 'thread'
pid_file = ARGV.shift
scenario = ARGV.shift
ruby_exe = ARGV.shift
# We must do this first otherwise there will be a race with the process that
# creates this process and the TERM signal below could go to that process
# instead, which will likely abort the specs process.
Process.setsid if scenario && Process.respond_to?(:setsid)
signaled = false
mutex = Mutex.new
Signal.trap(:TERM) do
if mutex.try_lock
unless signaled
signaled = true
STDOUT.puts "signaled"
STDOUT.flush
end
end
end
File.open(pid_file, "wb") { |f| f.puts Process.pid }
if scenario
# We are sending a signal to ourselves or the process group
process = Process.respond_to?(:getpgid) ? "Process.getpgid(Process.pid)" : "Process.pid"
case scenario
when "self"
signal = %["SIGTERM"]
process = "0"
when "group_numeric"
signal = %[-Signal.list["TERM"]]
when "group_short_string"
signal = %["-TERM"]
when "group_full_string"
signal = %["-SIGTERM"]
else
raise "unknown scenario: #{scenario.inspect}"
end
code = "Process.kill(#{signal}, #{process})"
system(*ruby_exe.split(' '), "-e", code)
end
sleep 0.001 until signaled
|
# -*- encoding: utf-8 -*-
$:.push File.expand_path("../lib", __FILE__)
require "rails-jquery-autocomplete/version"
Gem::Specification.new do |s|
s.name = %q{rails-jquery-autocomplete}
s.version = RailsJQueryAutocomplete::VERSION
s.platform = Gem::Platform::RUBY
s.authors = ["David Padilla", "Joiey Seeley", "Sundus Yousuf"]
s.email = %q{david@padilla.cc joiey.seeley@gmail.com sundusahmedyousuf@gmail.com}
s.homepage = %q{https://github.com/bigtunacan/rails-jquery-autocomplete/}
s.summary = %q{Use jQuery's autocomplete plugin with Rails 4+.}
s.description = %q{Use jQuery's autocomplete plugin with Rails 4+.}
s.license = %q{MIT}
s.add_dependency('rails', '>= 3.2')
s.add_development_dependency 'sqlite3-ruby'
s.add_development_dependency 'mongoid', '>= 2.0.0'
s.add_development_dependency 'mongo_mapper', '>= 0.9'
#s.add_development_dependency 'mongo', '~> 1.6.2'
s.add_development_dependency 'bson_ext', '~> 1.6.2'
s.add_development_dependency 'guard'
s.add_development_dependency 'guard-test'
s.add_development_dependency 'test-unit', '~> 2.2.0'
s.add_development_dependency 'shoulda', '~> 3.0.1'
s.add_development_dependency 'uglifier'
s.add_development_dependency 'rr'
s.add_development_dependency 'simple_form', '~>1.5'
s.add_development_dependency 'debugger'
s.files = Dir['lib/**/*'] + %w{CHANGELOG.md LICENSE README.md Rakefile}
s.test_files = `git ls-files -- {test,spec,features}/*`.split("\n")
s.executables = `git ls-files -- bin/*`.split("\n").map{ |f| File.basename(f) }
s.require_paths = ["lib"]
end
Updated gemspec debugger dependency to work with Ruby 1.9.x and 2.x versions
# -*- encoding: utf-8 -*-
$:.push File.expand_path("../lib", __FILE__)
require "rails-jquery-autocomplete/version"
Gem::Specification.new do |s|
s.name = %q{rails-jquery-autocomplete}
s.version = RailsJQueryAutocomplete::VERSION
s.platform = Gem::Platform::RUBY
s.authors = ["David Padilla", "Joiey Seeley", "Sundus Yousuf"]
s.email = %q{david@padilla.cc joiey.seeley@gmail.com sundusahmedyousuf@gmail.com}
s.homepage = %q{https://github.com/bigtunacan/rails-jquery-autocomplete/}
s.summary = %q{Use jQuery's autocomplete plugin with Rails 4+.}
s.description = %q{Use jQuery's autocomplete plugin with Rails 4+.}
s.license = %q{MIT}
s.add_dependency('rails', '>= 3.2')
s.add_development_dependency 'sqlite3-ruby'
s.add_development_dependency 'mongoid', '>= 2.0.0'
s.add_development_dependency 'mongo_mapper', '>= 0.9'
#s.add_development_dependency 'mongo', '~> 1.6.2'
s.add_development_dependency 'bson_ext', '~> 1.6.2'
s.add_development_dependency 'guard'
s.add_development_dependency 'guard-test'
s.add_development_dependency 'test-unit', '~> 2.2.0'
s.add_development_dependency 'shoulda', '~> 3.0.1'
s.add_development_dependency 'uglifier'
s.add_development_dependency 'rr'
s.add_development_dependency 'simple_form', '~>1.5'
s.add_development_dependency 'debugger' if RUBY_VERSION < '2.0.0'
s.add_development_dependency 'byebug' if RUBY_VERSION > '2.0.0'
s.files = Dir['lib/**/*'] + %w{CHANGELOG.md LICENSE README.md Rakefile}
s.test_files = `git ls-files -- {test,spec,features}/*`.split("\n")
s.executables = `git ls-files -- bin/*`.split("\n").map{ |f| File.basename(f) }
s.require_paths = ["lib"]
end
|
namespace :db do
namespace :create do
desc 'Create all the local databases defined in config/database.yml'
task :all => :environment do
ActiveRecord::Base.configurations.each_value do |config|
# Skip entries that don't have a database key, such as the first entry here:
#
# defaults: &defaults
# adapter: mysql
# username: root
# password:
# host: localhost
#
# development:
# database: blog_development
# <<: *defaults
next unless config['database']
# Only connect to local databases
if config['host'] == 'localhost' || config['host'].blank?
create_database(config)
else
p "This task only creates local databases. #{config['database']} is on a remote host."
end
end
end
end
desc 'Create the database defined in config/database.yml for the current RAILS_ENV'
task :create => :environment do
create_database(ActiveRecord::Base.configurations[RAILS_ENV])
end
def create_database(config)
begin
ActiveRecord::Base.establish_connection(config)
ActiveRecord::Base.connection
rescue
case config['adapter']
when 'mysql'
@charset = ENV['CHARSET'] || 'utf8'
@collation = ENV['COLLATION'] || 'utf8_general_ci'
begin
ActiveRecord::Base.establish_connection(config.merge({'database' => nil}))
ActiveRecord::Base.connection.create_database(config['database'], {:charset => @charset, :collation => @collation})
ActiveRecord::Base.establish_connection(config)
rescue
$stderr.puts "Couldn't create database for #{config.inspect}"
end
when 'postgresql'
`createdb "#{config['database']}" -E utf8`
when 'sqlite'
`sqlite "#{config['database']}"`
when 'sqlite3'
`sqlite3 "#{config['database']}"`
end
else
p "#{config['database']} already exists"
end
end
namespace :drop do
desc 'Drops all the local databases defined in config/database.yml'
task :all => :environment do
ActiveRecord::Base.configurations.each_value do |config|
# Skip entries that don't have a database key
next unless config['database']
# Only connect to local databases
if config['host'] == 'localhost' || config['host'].blank?
drop_database(config)
else
p "This task only drops local databases. #{config['database']} is on a remote host."
end
end
end
end
desc 'Drops the database for the current RAILS_ENV'
task :drop => :environment do
drop_database(ActiveRecord::Base.configurations[RAILS_ENV || 'development'])
end
desc "Migrate the database through scripts in db/migrate. Target specific version with VERSION=x. Turn off output with VERBOSE=false."
task :migrate => :environment do
ActiveRecord::Migration.verbose = ENV["VERBOSE"] ? ENV["VERBOSE"] == "true" : true
ActiveRecord::Migrator.migrate("db/migrate/", ENV["VERSION"] ? ENV["VERSION"].to_i : nil)
Rake::Task["db:schema:dump"].invoke if ActiveRecord::Base.schema_format == :ruby
end
desc 'Rolls the schema back to the previous version. Specify the number of steps with STEP=n'
task :rollback => :environment do
step = ENV['STEP'] ? ENV['STEP'].to_i : 1
version = ActiveRecord::Migrator.current_version - step
ActiveRecord::Migrator.migrate('db/migrate/', version)
end
desc 'Drops and recreates the database from db/schema.rb for the current environment.'
task :reset => ['db:drop', 'db:create', 'db:schema:load']
desc "Retrieves the charset for the current environment's database"
task :charset => :environment do
config = ActiveRecord::Base.configurations[RAILS_ENV || 'development']
case config['adapter']
when 'mysql'
ActiveRecord::Base.establish_connection(config)
puts ActiveRecord::Base.connection.charset
else
puts 'sorry, your database adapter is not supported yet, feel free to submit a patch'
end
end
desc "Retrieves the collation for the current environment's database"
task :collation => :environment do
config = ActiveRecord::Base.configurations[RAILS_ENV || 'development']
case config['adapter']
when 'mysql'
ActiveRecord::Base.establish_connection(config)
puts ActiveRecord::Base.connection.collation
else
puts 'sorry, your database adapter is not supported yet, feel free to submit a patch'
end
end
desc "Retrieves the current schema version number"
task :version => :environment do
puts "Current version: #{ActiveRecord::Migrator.current_version}"
end
desc "Raises an error if there are pending migrations"
task :abort_if_pending_migrations => :environment do
pending_migrations = ActiveRecord::Migrator.new(:up, 'db/migrate').pending_migrations
if pending_migrations.any?
puts "You have #{pending_migrations.size} pending migrations:"
pending_migrations.each do |pending_migration|
puts ' %4d %s' % [pending_migration.version, pending_migration.name]
end
abort "Run `rake db:migrate` to update your database then try again."
end
end
namespace :fixtures do
desc "Load fixtures into the current environment's database. Load specific fixtures using FIXTURES=x,y"
task :load => :environment do
require 'active_record/fixtures'
ActiveRecord::Base.establish_connection(RAILS_ENV.to_sym)
(ENV['FIXTURES'] ? ENV['FIXTURES'].split(/,/) : Dir.glob(File.join(RAILS_ROOT, 'test', 'fixtures', '*.{yml,csv}'))).each do |fixture_file|
Fixtures.create_fixtures('test/fixtures', File.basename(fixture_file, '.*'))
end
end
desc "Search for a fixture given a LABEL or ID."
task :identify => :environment do
require "active_record/fixtures"
label, id = ENV["LABEL"], ENV["ID"]
raise "LABEL or ID required" if label.blank? && id.blank?
puts %Q(The fixture ID for "#{label}" is #{Fixtures.identify(label)}.) if label
Dir["#{RAILS_ROOT}/test/fixtures/**/*.yml"].each do |file|
if data = YAML::load(ERB.new(IO.read(file)).result)
data.keys.each do |key|
key_id = Fixtures.identify(key)
if key == label || key_id == id.to_i
puts "#{file}: #{key} (#{key_id})"
end
end
end
end
end
end
namespace :schema do
desc "Create a db/schema.rb file that can be portably used against any DB supported by AR"
task :dump => :environment do
require 'active_record/schema_dumper'
File.open(ENV['SCHEMA'] || "db/schema.rb", "w") do |file|
ActiveRecord::SchemaDumper.dump(ActiveRecord::Base.connection, file)
end
end
desc "Load a schema.rb file into the database"
task :load => :environment do
file = ENV['SCHEMA'] || "db/schema.rb"
load(file)
end
end
namespace :structure do
desc "Dump the database structure to a SQL file"
task :dump => :environment do
abcs = ActiveRecord::Base.configurations
case abcs[RAILS_ENV]["adapter"]
when "mysql", "oci", "oracle"
ActiveRecord::Base.establish_connection(abcs[RAILS_ENV])
File.open("db/#{RAILS_ENV}_structure.sql", "w+") { |f| f << ActiveRecord::Base.connection.structure_dump }
when "postgresql"
ENV['PGHOST'] = abcs[RAILS_ENV]["host"] if abcs[RAILS_ENV]["host"]
ENV['PGPORT'] = abcs[RAILS_ENV]["port"].to_s if abcs[RAILS_ENV]["port"]
ENV['PGPASSWORD'] = abcs[RAILS_ENV]["password"].to_s if abcs[RAILS_ENV]["password"]
search_path = abcs[RAILS_ENV]["schema_search_path"]
search_path = "--schema=#{search_path}" if search_path
`pg_dump -i -U "#{abcs[RAILS_ENV]["username"]}" -s -x -O -f db/#{RAILS_ENV}_structure.sql #{search_path} #{abcs[RAILS_ENV]["database"]}`
raise "Error dumping database" if $?.exitstatus == 1
when "sqlite", "sqlite3"
dbfile = abcs[RAILS_ENV]["database"] || abcs[RAILS_ENV]["dbfile"]
`#{abcs[RAILS_ENV]["adapter"]} #{dbfile} .schema > db/#{RAILS_ENV}_structure.sql`
when "sqlserver"
`scptxfr /s #{abcs[RAILS_ENV]["host"]} /d #{abcs[RAILS_ENV]["database"]} /I /f db\\#{RAILS_ENV}_structure.sql /q /A /r`
`scptxfr /s #{abcs[RAILS_ENV]["host"]} /d #{abcs[RAILS_ENV]["database"]} /I /F db\ /q /A /r`
when "firebird"
set_firebird_env(abcs[RAILS_ENV])
db_string = firebird_db_string(abcs[RAILS_ENV])
sh "isql -a #{db_string} > db/#{RAILS_ENV}_structure.sql"
else
raise "Task not supported by '#{abcs["test"]["adapter"]}'"
end
if ActiveRecord::Base.connection.supports_migrations?
File.open("db/#{RAILS_ENV}_structure.sql", "a") { |f| f << ActiveRecord::Base.connection.dump_schema_information }
end
end
end
namespace :test do
desc "Recreate the test database from the current environment's database schema"
task :clone => %w(db:schema:dump db:test:purge) do
ActiveRecord::Base.establish_connection(ActiveRecord::Base.configurations['test'])
ActiveRecord::Schema.verbose = false
Rake::Task["db:schema:load"].invoke
end
desc "Recreate the test databases from the development structure"
task :clone_structure => [ "db:structure:dump", "db:test:purge" ] do
abcs = ActiveRecord::Base.configurations
case abcs["test"]["adapter"]
when "mysql"
ActiveRecord::Base.establish_connection(:test)
ActiveRecord::Base.connection.execute('SET foreign_key_checks = 0')
IO.readlines("db/#{RAILS_ENV}_structure.sql").join.split("\n\n").each do |table|
ActiveRecord::Base.connection.execute(table)
end
when "postgresql"
ENV['PGHOST'] = abcs["test"]["host"] if abcs["test"]["host"]
ENV['PGPORT'] = abcs["test"]["port"].to_s if abcs["test"]["port"]
ENV['PGPASSWORD'] = abcs["test"]["password"].to_s if abcs["test"]["password"]
`psql -U "#{abcs["test"]["username"]}" -f db/#{RAILS_ENV}_structure.sql #{abcs["test"]["database"]}`
when "sqlite", "sqlite3"
dbfile = abcs["test"]["database"] || abcs["test"]["dbfile"]
`#{abcs["test"]["adapter"]} #{dbfile} < db/#{RAILS_ENV}_structure.sql`
when "sqlserver"
`osql -E -S #{abcs["test"]["host"]} -d #{abcs["test"]["database"]} -i db\\#{RAILS_ENV}_structure.sql`
when "oci", "oracle"
ActiveRecord::Base.establish_connection(:test)
IO.readlines("db/#{RAILS_ENV}_structure.sql").join.split(";\n\n").each do |ddl|
ActiveRecord::Base.connection.execute(ddl)
end
when "firebird"
set_firebird_env(abcs["test"])
db_string = firebird_db_string(abcs["test"])
sh "isql -i db/#{RAILS_ENV}_structure.sql #{db_string}"
else
raise "Task not supported by '#{abcs["test"]["adapter"]}'"
end
end
desc "Empty the test database"
task :purge => :environment do
abcs = ActiveRecord::Base.configurations
case abcs["test"]["adapter"]
when "mysql"
ActiveRecord::Base.establish_connection(:test)
ActiveRecord::Base.connection.recreate_database(abcs["test"]["database"])
when "postgresql"
ENV['PGHOST'] = abcs["test"]["host"] if abcs["test"]["host"]
ENV['PGPORT'] = abcs["test"]["port"].to_s if abcs["test"]["port"]
ENV['PGPASSWORD'] = abcs["test"]["password"].to_s if abcs["test"]["password"]
enc_option = "-E #{abcs["test"]["encoding"]}" if abcs["test"]["encoding"]
ActiveRecord::Base.clear_active_connections!
`dropdb -U "#{abcs["test"]["username"]}" #{abcs["test"]["database"]}`
`createdb #{enc_option} -U "#{abcs["test"]["username"]}" #{abcs["test"]["database"]}`
when "sqlite","sqlite3"
dbfile = abcs["test"]["database"] || abcs["test"]["dbfile"]
File.delete(dbfile) if File.exist?(dbfile)
when "sqlserver"
dropfkscript = "#{abcs["test"]["host"]}.#{abcs["test"]["database"]}.DP1".gsub(/\\/,'-')
`osql -E -S #{abcs["test"]["host"]} -d #{abcs["test"]["database"]} -i db\\#{dropfkscript}`
`osql -E -S #{abcs["test"]["host"]} -d #{abcs["test"]["database"]} -i db\\#{RAILS_ENV}_structure.sql`
when "oci", "oracle"
ActiveRecord::Base.establish_connection(:test)
ActiveRecord::Base.connection.structure_drop.split(";\n\n").each do |ddl|
ActiveRecord::Base.connection.execute(ddl)
end
when "firebird"
ActiveRecord::Base.establish_connection(:test)
ActiveRecord::Base.connection.recreate_database!
else
raise "Task not supported by '#{abcs["test"]["adapter"]}'"
end
end
desc 'Prepare the test database and load the schema'
task :prepare => %w(environment db:abort_if_pending_migrations) do
if defined?(ActiveRecord::Base) && !ActiveRecord::Base.configurations.blank?
Rake::Task[{ :sql => "db:test:clone_structure", :ruby => "db:test:clone" }[ActiveRecord::Base.schema_format]].invoke
end
end
end
namespace :sessions do
desc "Creates a sessions migration for use with CGI::Session::ActiveRecordStore"
task :create => :environment do
raise "Task unavailable to this database (no migration support)" unless ActiveRecord::Base.connection.supports_migrations?
require 'rails_generator'
require 'rails_generator/scripts/generate'
Rails::Generator::Scripts::Generate.new.run(["session_migration", ENV["MIGRATION"] || "CreateSessions"])
end
desc "Clear the sessions table"
task :clear => :environment do
session_table = 'session'
session_table = Inflector.pluralize(session_table) if ActiveRecord::Base.pluralize_table_names
ActiveRecord::Base.connection.execute "DELETE FROM #{session_table}"
end
end
end
def drop_database(config)
case config['adapter']
when 'mysql'
ActiveRecord::Base.connection.drop_database config['database']
when /^sqlite/
FileUtils.rm_f(File.join(RAILS_ROOT, config['database']))
when 'postgresql'
`dropdb "#{config['database']}"`
end
end
def session_table_name
ActiveRecord::Base.pluralize_table_names ? :sessions : :session
end
def set_firebird_env(config)
ENV["ISC_USER"] = config["username"].to_s if config["username"]
ENV["ISC_PASSWORD"] = config["password"].to_s if config["password"]
end
def firebird_db_string(config)
FireRuby::Database.db_string_for(config.symbolize_keys)
end
Don't check for pending migrations if Active Record isn't loaded
git-svn-id: afc9fed30c1a09d8801d1e4fbe6e01c29c67d11f@8338 5ecf4fe2-1ee6-0310-87b1-e25e094e27de
namespace :db do
namespace :create do
desc 'Create all the local databases defined in config/database.yml'
task :all => :environment do
ActiveRecord::Base.configurations.each_value do |config|
# Skip entries that don't have a database key, such as the first entry here:
#
# defaults: &defaults
# adapter: mysql
# username: root
# password:
# host: localhost
#
# development:
# database: blog_development
# <<: *defaults
next unless config['database']
# Only connect to local databases
if config['host'] == 'localhost' || config['host'].blank?
create_database(config)
else
p "This task only creates local databases. #{config['database']} is on a remote host."
end
end
end
end
desc 'Create the database defined in config/database.yml for the current RAILS_ENV'
task :create => :environment do
create_database(ActiveRecord::Base.configurations[RAILS_ENV])
end
def create_database(config)
begin
ActiveRecord::Base.establish_connection(config)
ActiveRecord::Base.connection
rescue
case config['adapter']
when 'mysql'
@charset = ENV['CHARSET'] || 'utf8'
@collation = ENV['COLLATION'] || 'utf8_general_ci'
begin
ActiveRecord::Base.establish_connection(config.merge({'database' => nil}))
ActiveRecord::Base.connection.create_database(config['database'], {:charset => @charset, :collation => @collation})
ActiveRecord::Base.establish_connection(config)
rescue
$stderr.puts "Couldn't create database for #{config.inspect}"
end
when 'postgresql'
`createdb "#{config['database']}" -E utf8`
when 'sqlite'
`sqlite "#{config['database']}"`
when 'sqlite3'
`sqlite3 "#{config['database']}"`
end
else
p "#{config['database']} already exists"
end
end
namespace :drop do
desc 'Drops all the local databases defined in config/database.yml'
task :all => :environment do
ActiveRecord::Base.configurations.each_value do |config|
# Skip entries that don't have a database key
next unless config['database']
# Only connect to local databases
if config['host'] == 'localhost' || config['host'].blank?
drop_database(config)
else
p "This task only drops local databases. #{config['database']} is on a remote host."
end
end
end
end
desc 'Drops the database for the current RAILS_ENV'
task :drop => :environment do
drop_database(ActiveRecord::Base.configurations[RAILS_ENV || 'development'])
end
desc "Migrate the database through scripts in db/migrate. Target specific version with VERSION=x. Turn off output with VERBOSE=false."
task :migrate => :environment do
ActiveRecord::Migration.verbose = ENV["VERBOSE"] ? ENV["VERBOSE"] == "true" : true
ActiveRecord::Migrator.migrate("db/migrate/", ENV["VERSION"] ? ENV["VERSION"].to_i : nil)
Rake::Task["db:schema:dump"].invoke if ActiveRecord::Base.schema_format == :ruby
end
desc 'Rolls the schema back to the previous version. Specify the number of steps with STEP=n'
task :rollback => :environment do
step = ENV['STEP'] ? ENV['STEP'].to_i : 1
version = ActiveRecord::Migrator.current_version - step
ActiveRecord::Migrator.migrate('db/migrate/', version)
end
desc 'Drops and recreates the database from db/schema.rb for the current environment.'
task :reset => ['db:drop', 'db:create', 'db:schema:load']
desc "Retrieves the charset for the current environment's database"
task :charset => :environment do
config = ActiveRecord::Base.configurations[RAILS_ENV || 'development']
case config['adapter']
when 'mysql'
ActiveRecord::Base.establish_connection(config)
puts ActiveRecord::Base.connection.charset
else
puts 'sorry, your database adapter is not supported yet, feel free to submit a patch'
end
end
desc "Retrieves the collation for the current environment's database"
task :collation => :environment do
config = ActiveRecord::Base.configurations[RAILS_ENV || 'development']
case config['adapter']
when 'mysql'
ActiveRecord::Base.establish_connection(config)
puts ActiveRecord::Base.connection.collation
else
puts 'sorry, your database adapter is not supported yet, feel free to submit a patch'
end
end
desc "Retrieves the current schema version number"
task :version => :environment do
puts "Current version: #{ActiveRecord::Migrator.current_version}"
end
desc "Raises an error if there are pending migrations"
task :abort_if_pending_migrations => :environment do
if defined? ActiveRecord
pending_migrations = ActiveRecord::Migrator.new(:up, 'db/migrate').pending_migrations
if pending_migrations.any?
puts "You have #{pending_migrations.size} pending migrations:"
pending_migrations.each do |pending_migration|
puts ' %4d %s' % [pending_migration.version, pending_migration.name]
end
abort "Run `rake db:migrate` to update your database then try again."
end
end
end
namespace :fixtures do
desc "Load fixtures into the current environment's database. Load specific fixtures using FIXTURES=x,y"
task :load => :environment do
require 'active_record/fixtures'
ActiveRecord::Base.establish_connection(RAILS_ENV.to_sym)
(ENV['FIXTURES'] ? ENV['FIXTURES'].split(/,/) : Dir.glob(File.join(RAILS_ROOT, 'test', 'fixtures', '*.{yml,csv}'))).each do |fixture_file|
Fixtures.create_fixtures('test/fixtures', File.basename(fixture_file, '.*'))
end
end
desc "Search for a fixture given a LABEL or ID."
task :identify => :environment do
require "active_record/fixtures"
label, id = ENV["LABEL"], ENV["ID"]
raise "LABEL or ID required" if label.blank? && id.blank?
puts %Q(The fixture ID for "#{label}" is #{Fixtures.identify(label)}.) if label
Dir["#{RAILS_ROOT}/test/fixtures/**/*.yml"].each do |file|
if data = YAML::load(ERB.new(IO.read(file)).result)
data.keys.each do |key|
key_id = Fixtures.identify(key)
if key == label || key_id == id.to_i
puts "#{file}: #{key} (#{key_id})"
end
end
end
end
end
end
namespace :schema do
desc "Create a db/schema.rb file that can be portably used against any DB supported by AR"
task :dump => :environment do
require 'active_record/schema_dumper'
File.open(ENV['SCHEMA'] || "db/schema.rb", "w") do |file|
ActiveRecord::SchemaDumper.dump(ActiveRecord::Base.connection, file)
end
end
desc "Load a schema.rb file into the database"
task :load => :environment do
file = ENV['SCHEMA'] || "db/schema.rb"
load(file)
end
end
namespace :structure do
desc "Dump the database structure to a SQL file"
task :dump => :environment do
abcs = ActiveRecord::Base.configurations
case abcs[RAILS_ENV]["adapter"]
when "mysql", "oci", "oracle"
ActiveRecord::Base.establish_connection(abcs[RAILS_ENV])
File.open("db/#{RAILS_ENV}_structure.sql", "w+") { |f| f << ActiveRecord::Base.connection.structure_dump }
when "postgresql"
ENV['PGHOST'] = abcs[RAILS_ENV]["host"] if abcs[RAILS_ENV]["host"]
ENV['PGPORT'] = abcs[RAILS_ENV]["port"].to_s if abcs[RAILS_ENV]["port"]
ENV['PGPASSWORD'] = abcs[RAILS_ENV]["password"].to_s if abcs[RAILS_ENV]["password"]
search_path = abcs[RAILS_ENV]["schema_search_path"]
search_path = "--schema=#{search_path}" if search_path
`pg_dump -i -U "#{abcs[RAILS_ENV]["username"]}" -s -x -O -f db/#{RAILS_ENV}_structure.sql #{search_path} #{abcs[RAILS_ENV]["database"]}`
raise "Error dumping database" if $?.exitstatus == 1
when "sqlite", "sqlite3"
dbfile = abcs[RAILS_ENV]["database"] || abcs[RAILS_ENV]["dbfile"]
`#{abcs[RAILS_ENV]["adapter"]} #{dbfile} .schema > db/#{RAILS_ENV}_structure.sql`
when "sqlserver"
`scptxfr /s #{abcs[RAILS_ENV]["host"]} /d #{abcs[RAILS_ENV]["database"]} /I /f db\\#{RAILS_ENV}_structure.sql /q /A /r`
`scptxfr /s #{abcs[RAILS_ENV]["host"]} /d #{abcs[RAILS_ENV]["database"]} /I /F db\ /q /A /r`
when "firebird"
set_firebird_env(abcs[RAILS_ENV])
db_string = firebird_db_string(abcs[RAILS_ENV])
sh "isql -a #{db_string} > db/#{RAILS_ENV}_structure.sql"
else
raise "Task not supported by '#{abcs["test"]["adapter"]}'"
end
if ActiveRecord::Base.connection.supports_migrations?
File.open("db/#{RAILS_ENV}_structure.sql", "a") { |f| f << ActiveRecord::Base.connection.dump_schema_information }
end
end
end
namespace :test do
desc "Recreate the test database from the current environment's database schema"
task :clone => %w(db:schema:dump db:test:purge) do
ActiveRecord::Base.establish_connection(ActiveRecord::Base.configurations['test'])
ActiveRecord::Schema.verbose = false
Rake::Task["db:schema:load"].invoke
end
desc "Recreate the test databases from the development structure"
task :clone_structure => [ "db:structure:dump", "db:test:purge" ] do
abcs = ActiveRecord::Base.configurations
case abcs["test"]["adapter"]
when "mysql"
ActiveRecord::Base.establish_connection(:test)
ActiveRecord::Base.connection.execute('SET foreign_key_checks = 0')
IO.readlines("db/#{RAILS_ENV}_structure.sql").join.split("\n\n").each do |table|
ActiveRecord::Base.connection.execute(table)
end
when "postgresql"
ENV['PGHOST'] = abcs["test"]["host"] if abcs["test"]["host"]
ENV['PGPORT'] = abcs["test"]["port"].to_s if abcs["test"]["port"]
ENV['PGPASSWORD'] = abcs["test"]["password"].to_s if abcs["test"]["password"]
`psql -U "#{abcs["test"]["username"]}" -f db/#{RAILS_ENV}_structure.sql #{abcs["test"]["database"]}`
when "sqlite", "sqlite3"
dbfile = abcs["test"]["database"] || abcs["test"]["dbfile"]
`#{abcs["test"]["adapter"]} #{dbfile} < db/#{RAILS_ENV}_structure.sql`
when "sqlserver"
`osql -E -S #{abcs["test"]["host"]} -d #{abcs["test"]["database"]} -i db\\#{RAILS_ENV}_structure.sql`
when "oci", "oracle"
ActiveRecord::Base.establish_connection(:test)
IO.readlines("db/#{RAILS_ENV}_structure.sql").join.split(";\n\n").each do |ddl|
ActiveRecord::Base.connection.execute(ddl)
end
when "firebird"
set_firebird_env(abcs["test"])
db_string = firebird_db_string(abcs["test"])
sh "isql -i db/#{RAILS_ENV}_structure.sql #{db_string}"
else
raise "Task not supported by '#{abcs["test"]["adapter"]}'"
end
end
desc "Empty the test database"
task :purge => :environment do
abcs = ActiveRecord::Base.configurations
case abcs["test"]["adapter"]
when "mysql"
ActiveRecord::Base.establish_connection(:test)
ActiveRecord::Base.connection.recreate_database(abcs["test"]["database"])
when "postgresql"
ENV['PGHOST'] = abcs["test"]["host"] if abcs["test"]["host"]
ENV['PGPORT'] = abcs["test"]["port"].to_s if abcs["test"]["port"]
ENV['PGPASSWORD'] = abcs["test"]["password"].to_s if abcs["test"]["password"]
enc_option = "-E #{abcs["test"]["encoding"]}" if abcs["test"]["encoding"]
ActiveRecord::Base.clear_active_connections!
`dropdb -U "#{abcs["test"]["username"]}" #{abcs["test"]["database"]}`
`createdb #{enc_option} -U "#{abcs["test"]["username"]}" #{abcs["test"]["database"]}`
when "sqlite","sqlite3"
dbfile = abcs["test"]["database"] || abcs["test"]["dbfile"]
File.delete(dbfile) if File.exist?(dbfile)
when "sqlserver"
dropfkscript = "#{abcs["test"]["host"]}.#{abcs["test"]["database"]}.DP1".gsub(/\\/,'-')
`osql -E -S #{abcs["test"]["host"]} -d #{abcs["test"]["database"]} -i db\\#{dropfkscript}`
`osql -E -S #{abcs["test"]["host"]} -d #{abcs["test"]["database"]} -i db\\#{RAILS_ENV}_structure.sql`
when "oci", "oracle"
ActiveRecord::Base.establish_connection(:test)
ActiveRecord::Base.connection.structure_drop.split(";\n\n").each do |ddl|
ActiveRecord::Base.connection.execute(ddl)
end
when "firebird"
ActiveRecord::Base.establish_connection(:test)
ActiveRecord::Base.connection.recreate_database!
else
raise "Task not supported by '#{abcs["test"]["adapter"]}'"
end
end
desc 'Prepare the test database and load the schema'
task :prepare => %w(environment db:abort_if_pending_migrations) do
if defined?(ActiveRecord) && !ActiveRecord::Base.configurations.blank?
Rake::Task[{ :sql => "db:test:clone_structure", :ruby => "db:test:clone" }[ActiveRecord::Base.schema_format]].invoke
end
end
end
namespace :sessions do
desc "Creates a sessions migration for use with CGI::Session::ActiveRecordStore"
task :create => :environment do
raise "Task unavailable to this database (no migration support)" unless ActiveRecord::Base.connection.supports_migrations?
require 'rails_generator'
require 'rails_generator/scripts/generate'
Rails::Generator::Scripts::Generate.new.run(["session_migration", ENV["MIGRATION"] || "CreateSessions"])
end
desc "Clear the sessions table"
task :clear => :environment do
session_table = 'session'
session_table = Inflector.pluralize(session_table) if ActiveRecord::Base.pluralize_table_names
ActiveRecord::Base.connection.execute "DELETE FROM #{session_table}"
end
end
end
def drop_database(config)
case config['adapter']
when 'mysql'
ActiveRecord::Base.connection.drop_database config['database']
when /^sqlite/
FileUtils.rm_f(File.join(RAILS_ROOT, config['database']))
when 'postgresql'
`dropdb "#{config['database']}"`
end
end
def session_table_name
ActiveRecord::Base.pluralize_table_names ? :sessions : :session
end
def set_firebird_env(config)
ENV["ISC_USER"] = config["username"].to_s if config["username"]
ENV["ISC_PASSWORD"] = config["password"].to_s if config["password"]
end
def firebird_db_string(config)
FireRuby::Database.db_string_for(config.symbolize_keys)
end
|
namespace :db do
namespace :create do
desc 'Create all the local databases defined in config/database.yml'
task :all => :environment do
ActiveRecord::Base.configurations.each_value do |config|
# Skip entries that don't have a database key, such as the first entry here:
#
# defaults: &defaults
# adapter: mysql
# username: root
# password:
# host: localhost
#
# development:
# database: blog_development
# <<: *defaults
next unless config['database']
# Only connect to local databases
local_database?(config) { create_database(config) }
end
end
end
desc 'Create the database defined in config/database.yml for the current RAILS_ENV'
task :create => :environment do
create_database(ActiveRecord::Base.configurations[RAILS_ENV])
end
def create_database(config)
begin
ActiveRecord::Base.establish_connection(config)
ActiveRecord::Base.connection
rescue
case config['adapter']
when 'mysql'
@charset = ENV['CHARSET'] || 'utf8'
@collation = ENV['COLLATION'] || 'utf8_general_ci'
begin
ActiveRecord::Base.establish_connection(config.merge('database' => nil))
ActiveRecord::Base.connection.create_database(config['database'], :charset => (config['charset'] || @charset), :collation => (config['collation'] || @collation))
ActiveRecord::Base.establish_connection(config)
rescue
$stderr.puts "Couldn't create database for #{config.inspect}, charset: #{config['charset'] || @charset}, collation: #{config['collation'] || @collation} (if you set the charset manually, make sure you have a matching collation)"
end
when 'postgresql'
@encoding = config[:encoding] || ENV['CHARSET'] || 'utf8'
begin
ActiveRecord::Base.establish_connection(config.merge('database' => 'postgres', 'schema_search_path' => 'public'))
ActiveRecord::Base.connection.create_database(config['database'], config.merge('encoding' => @encoding))
ActiveRecord::Base.establish_connection(config)
rescue
$stderr.puts $!, *($!.backtrace)
$stderr.puts "Couldn't create database for #{config.inspect}"
end
when 'sqlite'
`sqlite "#{config['database']}"`
when 'sqlite3'
`sqlite3 "#{config['database']}"`
end
else
$stderr.puts "#{config['database']} already exists"
end
end
namespace :drop do
desc 'Drops all the local databases defined in config/database.yml'
task :all => :environment do
ActiveRecord::Base.configurations.each_value do |config|
# Skip entries that don't have a database key
next unless config['database']
# Only connect to local databases
local_database?(config) { drop_database(config) }
end
end
end
desc 'Drops the database for the current RAILS_ENV'
task :drop => :environment do
config = ActiveRecord::Base.configurations[RAILS_ENV || 'development']
begin
drop_database(config)
rescue Exception => e
puts "Couldn't drop #{config['database']} : #{e.inspect}"
end
end
def local_database?(config, &block)
if %w( 127.0.0.1 localhost ).include?(config['host']) || config['host'].blank?
yield
else
puts "This task only modifies local databases. #{config['database']} is on a remote host."
end
end
desc "Migrate the database through scripts in db/migrate. Target specific version with VERSION=x. Turn off output with VERBOSE=false."
task :migrate => :environment do
ActiveRecord::Migration.verbose = ENV["VERBOSE"] ? ENV["VERBOSE"] == "true" : true
ActiveRecord::Migrator.migrate("db/migrate/", ENV["VERSION"] ? ENV["VERSION"].to_i : nil)
Rake::Task["db:schema:dump"].invoke if ActiveRecord::Base.schema_format == :ruby
end
namespace :migrate do
desc 'Rollbacks the database one migration and re migrate up. If you want to rollback more than one step, define STEP=x'
task :redo => [ 'db:rollback', 'db:migrate' ]
desc 'Resets your database using your migrations for the current environment'
task :reset => ["db:drop", "db:create", "db:migrate"]
desc 'Runs the "up" for a given migration VERSION.'
task :up => :environment do
version = ENV["VERSION"] ? ENV["VERSION"].to_i : nil
raise "VERSION is required" unless version
ActiveRecord::Migrator.run(:up, "db/migrate/", version)
Rake::Task["db:schema:dump"].invoke if ActiveRecord::Base.schema_format == :ruby
end
desc 'Runs the "down" for a given migration VERSION.'
task :down => :environment do
version = ENV["VERSION"] ? ENV["VERSION"].to_i : nil
raise "VERSION is required" unless version
ActiveRecord::Migrator.run(:down, "db/migrate/", version)
Rake::Task["db:schema:dump"].invoke if ActiveRecord::Base.schema_format == :ruby
end
end
desc 'Rolls the schema back to the previous version. Specify the number of steps with STEP=n'
task :rollback => :environment do
step = ENV['STEP'] ? ENV['STEP'].to_i : 1
ActiveRecord::Migrator.rollback('db/migrate/', step)
Rake::Task["db:schema:dump"].invoke if ActiveRecord::Base.schema_format == :ruby
end
desc 'Drops and recreates the database from db/schema.rb for the current environment.'
task :reset => ['db:drop', 'db:create', 'db:schema:load']
desc "Retrieves the charset for the current environment's database"
task :charset => :environment do
config = ActiveRecord::Base.configurations[RAILS_ENV || 'development']
case config['adapter']
when 'mysql'
ActiveRecord::Base.establish_connection(config)
puts ActiveRecord::Base.connection.charset
else
puts 'sorry, your database adapter is not supported yet, feel free to submit a patch'
end
end
desc "Retrieves the collation for the current environment's database"
task :collation => :environment do
config = ActiveRecord::Base.configurations[RAILS_ENV || 'development']
case config['adapter']
when 'mysql'
ActiveRecord::Base.establish_connection(config)
puts ActiveRecord::Base.connection.collation
else
puts 'sorry, your database adapter is not supported yet, feel free to submit a patch'
end
end
desc "Retrieves the current schema version number"
task :version => :environment do
puts "Current version: #{ActiveRecord::Migrator.current_version}"
end
desc "Raises an error if there are pending migrations"
task :abort_if_pending_migrations => :environment do
if defined? ActiveRecord
pending_migrations = ActiveRecord::Migrator.new(:up, 'db/migrate').pending_migrations
if pending_migrations.any?
puts "You have #{pending_migrations.size} pending migrations:"
pending_migrations.each do |pending_migration|
puts ' %4d %s' % [pending_migration.version, pending_migration.name]
end
abort %{Run "rake db:migrate" to update your database then try again.}
end
end
end
namespace :fixtures do
desc "Load fixtures into the current environment's database. Load specific fixtures using FIXTURES=x,y"
task :load => :environment do
require 'active_record/fixtures'
ActiveRecord::Base.establish_connection(RAILS_ENV.to_sym)
(ENV['FIXTURES'] ? ENV['FIXTURES'].split(/,/) : Dir.glob(File.join(RAILS_ROOT, 'test', 'fixtures', '*.{yml,csv}'))).each do |fixture_file|
Fixtures.create_fixtures('test/fixtures', File.basename(fixture_file, '.*'))
end
end
desc "Search for a fixture given a LABEL or ID."
task :identify => :environment do
require "active_record/fixtures"
label, id = ENV["LABEL"], ENV["ID"]
raise "LABEL or ID required" if label.blank? && id.blank?
puts %Q(The fixture ID for "#{label}" is #{Fixtures.identify(label)}.) if label
Dir["#{RAILS_ROOT}/test/fixtures/**/*.yml"].each do |file|
if data = YAML::load(ERB.new(IO.read(file)).result)
data.keys.each do |key|
key_id = Fixtures.identify(key)
if key == label || key_id == id.to_i
puts "#{file}: #{key} (#{key_id})"
end
end
end
end
end
end
namespace :schema do
desc "Create a db/schema.rb file that can be portably used against any DB supported by AR"
task :dump => :environment do
require 'active_record/schema_dumper'
File.open(ENV['SCHEMA'] || "db/schema.rb", "w") do |file|
ActiveRecord::SchemaDumper.dump(ActiveRecord::Base.connection, file)
end
end
desc "Load a schema.rb file into the database"
task :load => :environment do
file = ENV['SCHEMA'] || "db/schema.rb"
load(file)
end
end
namespace :structure do
desc "Dump the database structure to a SQL file"
task :dump => :environment do
abcs = ActiveRecord::Base.configurations
case abcs[RAILS_ENV]["adapter"]
when "mysql", "oci", "oracle"
ActiveRecord::Base.establish_connection(abcs[RAILS_ENV])
File.open("db/#{RAILS_ENV}_structure.sql", "w+") { |f| f << ActiveRecord::Base.connection.structure_dump }
when "postgresql"
ENV['PGHOST'] = abcs[RAILS_ENV]["host"] if abcs[RAILS_ENV]["host"]
ENV['PGPORT'] = abcs[RAILS_ENV]["port"].to_s if abcs[RAILS_ENV]["port"]
ENV['PGPASSWORD'] = abcs[RAILS_ENV]["password"].to_s if abcs[RAILS_ENV]["password"]
search_path = abcs[RAILS_ENV]["schema_search_path"]
search_path = "--schema=#{search_path}" if search_path
`pg_dump -i -U "#{abcs[RAILS_ENV]["username"]}" -s -x -O -f db/#{RAILS_ENV}_structure.sql #{search_path} #{abcs[RAILS_ENV]["database"]}`
raise "Error dumping database" if $?.exitstatus == 1
when "sqlite", "sqlite3"
dbfile = abcs[RAILS_ENV]["database"] || abcs[RAILS_ENV]["dbfile"]
`#{abcs[RAILS_ENV]["adapter"]} #{dbfile} .schema > db/#{RAILS_ENV}_structure.sql`
when "sqlserver"
`scptxfr /s #{abcs[RAILS_ENV]["host"]} /d #{abcs[RAILS_ENV]["database"]} /I /f db\\#{RAILS_ENV}_structure.sql /q /A /r`
`scptxfr /s #{abcs[RAILS_ENV]["host"]} /d #{abcs[RAILS_ENV]["database"]} /I /F db\ /q /A /r`
when "firebird"
set_firebird_env(abcs[RAILS_ENV])
db_string = firebird_db_string(abcs[RAILS_ENV])
sh "isql -a #{db_string} > db/#{RAILS_ENV}_structure.sql"
else
raise "Task not supported by '#{abcs["test"]["adapter"]}'"
end
if ActiveRecord::Base.connection.supports_migrations?
File.open("db/#{RAILS_ENV}_structure.sql", "a") { |f| f << ActiveRecord::Base.connection.dump_schema_information }
end
end
end
namespace :test do
desc "Recreate the test database from the current environment's database schema"
task :clone => %w(db:schema:dump db:test:purge) do
ActiveRecord::Base.establish_connection(ActiveRecord::Base.configurations['test'])
ActiveRecord::Schema.verbose = false
Rake::Task["db:schema:load"].invoke
end
desc "Recreate the test databases from the development structure"
task :clone_structure => [ "db:structure:dump", "db:test:purge" ] do
abcs = ActiveRecord::Base.configurations
case abcs["test"]["adapter"]
when "mysql"
ActiveRecord::Base.establish_connection(:test)
ActiveRecord::Base.connection.execute('SET foreign_key_checks = 0')
IO.readlines("db/#{RAILS_ENV}_structure.sql").join.split("\n\n").each do |table|
ActiveRecord::Base.connection.execute(table)
end
when "postgresql"
ENV['PGHOST'] = abcs["test"]["host"] if abcs["test"]["host"]
ENV['PGPORT'] = abcs["test"]["port"].to_s if abcs["test"]["port"]
ENV['PGPASSWORD'] = abcs["test"]["password"].to_s if abcs["test"]["password"]
`psql -U "#{abcs["test"]["username"]}" -f db/#{RAILS_ENV}_structure.sql #{abcs["test"]["database"]}`
when "sqlite", "sqlite3"
dbfile = abcs["test"]["database"] || abcs["test"]["dbfile"]
`#{abcs["test"]["adapter"]} #{dbfile} < db/#{RAILS_ENV}_structure.sql`
when "sqlserver"
`osql -E -S #{abcs["test"]["host"]} -d #{abcs["test"]["database"]} -i db\\#{RAILS_ENV}_structure.sql`
when "oci", "oracle"
ActiveRecord::Base.establish_connection(:test)
IO.readlines("db/#{RAILS_ENV}_structure.sql").join.split(";\n\n").each do |ddl|
ActiveRecord::Base.connection.execute(ddl)
end
when "firebird"
set_firebird_env(abcs["test"])
db_string = firebird_db_string(abcs["test"])
sh "isql -i db/#{RAILS_ENV}_structure.sql #{db_string}"
else
raise "Task not supported by '#{abcs["test"]["adapter"]}'"
end
end
desc "Empty the test database"
task :purge => :environment do
abcs = ActiveRecord::Base.configurations
case abcs["test"]["adapter"]
when "mysql"
ActiveRecord::Base.establish_connection(:test)
ActiveRecord::Base.connection.recreate_database(abcs["test"]["database"])
when "postgresql"
ActiveRecord::Base.clear_active_connections!
drop_database(abcs['test'])
create_database(abcs['test'])
when "sqlite","sqlite3"
dbfile = abcs["test"]["database"] || abcs["test"]["dbfile"]
File.delete(dbfile) if File.exist?(dbfile)
when "sqlserver"
dropfkscript = "#{abcs["test"]["host"]}.#{abcs["test"]["database"]}.DP1".gsub(/\\/,'-')
`osql -E -S #{abcs["test"]["host"]} -d #{abcs["test"]["database"]} -i db\\#{dropfkscript}`
`osql -E -S #{abcs["test"]["host"]} -d #{abcs["test"]["database"]} -i db\\#{RAILS_ENV}_structure.sql`
when "oci", "oracle"
ActiveRecord::Base.establish_connection(:test)
ActiveRecord::Base.connection.structure_drop.split(";\n\n").each do |ddl|
ActiveRecord::Base.connection.execute(ddl)
end
when "firebird"
ActiveRecord::Base.establish_connection(:test)
ActiveRecord::Base.connection.recreate_database!
else
raise "Task not supported by '#{abcs["test"]["adapter"]}'"
end
end
desc 'Prepare the test database and load the schema'
task :prepare => %w(environment db:abort_if_pending_migrations) do
if defined?(ActiveRecord) && !ActiveRecord::Base.configurations.blank?
Rake::Task[{ :sql => "db:test:clone_structure", :ruby => "db:test:clone" }[ActiveRecord::Base.schema_format]].invoke
end
end
end
namespace :sessions do
desc "Creates a sessions migration for use with CGI::Session::ActiveRecordStore"
task :create => :environment do
raise "Task unavailable to this database (no migration support)" unless ActiveRecord::Base.connection.supports_migrations?
require 'rails_generator'
require 'rails_generator/scripts/generate'
Rails::Generator::Scripts::Generate.new.run(["session_migration", ENV["MIGRATION"] || "CreateSessions"])
end
desc "Clear the sessions table"
task :clear => :environment do
ActiveRecord::Base.connection.execute "DELETE FROM #{session_table_name}"
end
end
end
def drop_database(config)
case config['adapter']
when 'mysql'
ActiveRecord::Base.connection.drop_database config['database']
when /^sqlite/
FileUtils.rm(File.join(RAILS_ROOT, config['database']))
when 'postgresql'
ActiveRecord::Base.establish_connection(config.merge('database' => 'postgres', 'schema_search_path' => 'public'))
ActiveRecord::Base.connection.drop_database config['database']
end
end
def session_table_name
ActiveRecord::Base.pluralize_table_names ? :sessions : :session
end
def set_firebird_env(config)
ENV["ISC_USER"] = config["username"].to_s if config["username"]
ENV["ISC_PASSWORD"] = config["password"].to_s if config["password"]
end
def firebird_db_string(config)
FireRuby::Database.db_string_for(config.symbolize_keys)
end
Don't dump schema for every test run, just when migrations are run
namespace :db do
namespace :create do
desc 'Create all the local databases defined in config/database.yml'
task :all => :environment do
ActiveRecord::Base.configurations.each_value do |config|
# Skip entries that don't have a database key, such as the first entry here:
#
# defaults: &defaults
# adapter: mysql
# username: root
# password:
# host: localhost
#
# development:
# database: blog_development
# <<: *defaults
next unless config['database']
# Only connect to local databases
local_database?(config) { create_database(config) }
end
end
end
desc 'Create the database defined in config/database.yml for the current RAILS_ENV'
task :create => :environment do
create_database(ActiveRecord::Base.configurations[RAILS_ENV])
end
def create_database(config)
begin
ActiveRecord::Base.establish_connection(config)
ActiveRecord::Base.connection
rescue
case config['adapter']
when 'mysql'
@charset = ENV['CHARSET'] || 'utf8'
@collation = ENV['COLLATION'] || 'utf8_general_ci'
begin
ActiveRecord::Base.establish_connection(config.merge('database' => nil))
ActiveRecord::Base.connection.create_database(config['database'], :charset => (config['charset'] || @charset), :collation => (config['collation'] || @collation))
ActiveRecord::Base.establish_connection(config)
rescue
$stderr.puts "Couldn't create database for #{config.inspect}, charset: #{config['charset'] || @charset}, collation: #{config['collation'] || @collation} (if you set the charset manually, make sure you have a matching collation)"
end
when 'postgresql'
@encoding = config[:encoding] || ENV['CHARSET'] || 'utf8'
begin
ActiveRecord::Base.establish_connection(config.merge('database' => 'postgres', 'schema_search_path' => 'public'))
ActiveRecord::Base.connection.create_database(config['database'], config.merge('encoding' => @encoding))
ActiveRecord::Base.establish_connection(config)
rescue
$stderr.puts $!, *($!.backtrace)
$stderr.puts "Couldn't create database for #{config.inspect}"
end
when 'sqlite'
`sqlite "#{config['database']}"`
when 'sqlite3'
`sqlite3 "#{config['database']}"`
end
else
$stderr.puts "#{config['database']} already exists"
end
end
namespace :drop do
desc 'Drops all the local databases defined in config/database.yml'
task :all => :environment do
ActiveRecord::Base.configurations.each_value do |config|
# Skip entries that don't have a database key
next unless config['database']
# Only connect to local databases
local_database?(config) { drop_database(config) }
end
end
end
desc 'Drops the database for the current RAILS_ENV'
task :drop => :environment do
config = ActiveRecord::Base.configurations[RAILS_ENV || 'development']
begin
drop_database(config)
rescue Exception => e
puts "Couldn't drop #{config['database']} : #{e.inspect}"
end
end
def local_database?(config, &block)
if %w( 127.0.0.1 localhost ).include?(config['host']) || config['host'].blank?
yield
else
puts "This task only modifies local databases. #{config['database']} is on a remote host."
end
end
desc "Migrate the database through scripts in db/migrate. Target specific version with VERSION=x. Turn off output with VERBOSE=false."
task :migrate => :environment do
ActiveRecord::Migration.verbose = ENV["VERBOSE"] ? ENV["VERBOSE"] == "true" : true
ActiveRecord::Migrator.migrate("db/migrate/", ENV["VERSION"] ? ENV["VERSION"].to_i : nil)
Rake::Task["db:schema:dump"].invoke if ActiveRecord::Base.schema_format == :ruby
end
namespace :migrate do
desc 'Rollbacks the database one migration and re migrate up. If you want to rollback more than one step, define STEP=x'
task :redo => [ 'db:rollback', 'db:migrate' ]
desc 'Resets your database using your migrations for the current environment'
task :reset => ["db:drop", "db:create", "db:migrate"]
desc 'Runs the "up" for a given migration VERSION.'
task :up => :environment do
version = ENV["VERSION"] ? ENV["VERSION"].to_i : nil
raise "VERSION is required" unless version
ActiveRecord::Migrator.run(:up, "db/migrate/", version)
Rake::Task["db:schema:dump"].invoke if ActiveRecord::Base.schema_format == :ruby
end
desc 'Runs the "down" for a given migration VERSION.'
task :down => :environment do
version = ENV["VERSION"] ? ENV["VERSION"].to_i : nil
raise "VERSION is required" unless version
ActiveRecord::Migrator.run(:down, "db/migrate/", version)
Rake::Task["db:schema:dump"].invoke if ActiveRecord::Base.schema_format == :ruby
end
end
desc 'Rolls the schema back to the previous version. Specify the number of steps with STEP=n'
task :rollback => :environment do
step = ENV['STEP'] ? ENV['STEP'].to_i : 1
ActiveRecord::Migrator.rollback('db/migrate/', step)
Rake::Task["db:schema:dump"].invoke if ActiveRecord::Base.schema_format == :ruby
end
desc 'Drops and recreates the database from db/schema.rb for the current environment.'
task :reset => ['db:drop', 'db:create', 'db:schema:load']
desc "Retrieves the charset for the current environment's database"
task :charset => :environment do
config = ActiveRecord::Base.configurations[RAILS_ENV || 'development']
case config['adapter']
when 'mysql'
ActiveRecord::Base.establish_connection(config)
puts ActiveRecord::Base.connection.charset
else
puts 'sorry, your database adapter is not supported yet, feel free to submit a patch'
end
end
desc "Retrieves the collation for the current environment's database"
task :collation => :environment do
config = ActiveRecord::Base.configurations[RAILS_ENV || 'development']
case config['adapter']
when 'mysql'
ActiveRecord::Base.establish_connection(config)
puts ActiveRecord::Base.connection.collation
else
puts 'sorry, your database adapter is not supported yet, feel free to submit a patch'
end
end
desc "Retrieves the current schema version number"
task :version => :environment do
puts "Current version: #{ActiveRecord::Migrator.current_version}"
end
desc "Raises an error if there are pending migrations"
task :abort_if_pending_migrations => :environment do
if defined? ActiveRecord
pending_migrations = ActiveRecord::Migrator.new(:up, 'db/migrate').pending_migrations
if pending_migrations.any?
puts "You have #{pending_migrations.size} pending migrations:"
pending_migrations.each do |pending_migration|
puts ' %4d %s' % [pending_migration.version, pending_migration.name]
end
abort %{Run "rake db:migrate" to update your database then try again.}
end
end
end
namespace :fixtures do
desc "Load fixtures into the current environment's database. Load specific fixtures using FIXTURES=x,y"
task :load => :environment do
require 'active_record/fixtures'
ActiveRecord::Base.establish_connection(RAILS_ENV.to_sym)
(ENV['FIXTURES'] ? ENV['FIXTURES'].split(/,/) : Dir.glob(File.join(RAILS_ROOT, 'test', 'fixtures', '*.{yml,csv}'))).each do |fixture_file|
Fixtures.create_fixtures('test/fixtures', File.basename(fixture_file, '.*'))
end
end
desc "Search for a fixture given a LABEL or ID."
task :identify => :environment do
require "active_record/fixtures"
label, id = ENV["LABEL"], ENV["ID"]
raise "LABEL or ID required" if label.blank? && id.blank?
puts %Q(The fixture ID for "#{label}" is #{Fixtures.identify(label)}.) if label
Dir["#{RAILS_ROOT}/test/fixtures/**/*.yml"].each do |file|
if data = YAML::load(ERB.new(IO.read(file)).result)
data.keys.each do |key|
key_id = Fixtures.identify(key)
if key == label || key_id == id.to_i
puts "#{file}: #{key} (#{key_id})"
end
end
end
end
end
end
namespace :schema do
desc "Create a db/schema.rb file that can be portably used against any DB supported by AR"
task :dump => :environment do
require 'active_record/schema_dumper'
File.open(ENV['SCHEMA'] || "db/schema.rb", "w") do |file|
ActiveRecord::SchemaDumper.dump(ActiveRecord::Base.connection, file)
end
end
desc "Load a schema.rb file into the database"
task :load => :environment do
file = ENV['SCHEMA'] || "db/schema.rb"
load(file)
end
end
namespace :structure do
desc "Dump the database structure to a SQL file"
task :dump => :environment do
abcs = ActiveRecord::Base.configurations
case abcs[RAILS_ENV]["adapter"]
when "mysql", "oci", "oracle"
ActiveRecord::Base.establish_connection(abcs[RAILS_ENV])
File.open("db/#{RAILS_ENV}_structure.sql", "w+") { |f| f << ActiveRecord::Base.connection.structure_dump }
when "postgresql"
ENV['PGHOST'] = abcs[RAILS_ENV]["host"] if abcs[RAILS_ENV]["host"]
ENV['PGPORT'] = abcs[RAILS_ENV]["port"].to_s if abcs[RAILS_ENV]["port"]
ENV['PGPASSWORD'] = abcs[RAILS_ENV]["password"].to_s if abcs[RAILS_ENV]["password"]
search_path = abcs[RAILS_ENV]["schema_search_path"]
search_path = "--schema=#{search_path}" if search_path
`pg_dump -i -U "#{abcs[RAILS_ENV]["username"]}" -s -x -O -f db/#{RAILS_ENV}_structure.sql #{search_path} #{abcs[RAILS_ENV]["database"]}`
raise "Error dumping database" if $?.exitstatus == 1
when "sqlite", "sqlite3"
dbfile = abcs[RAILS_ENV]["database"] || abcs[RAILS_ENV]["dbfile"]
`#{abcs[RAILS_ENV]["adapter"]} #{dbfile} .schema > db/#{RAILS_ENV}_structure.sql`
when "sqlserver"
`scptxfr /s #{abcs[RAILS_ENV]["host"]} /d #{abcs[RAILS_ENV]["database"]} /I /f db\\#{RAILS_ENV}_structure.sql /q /A /r`
`scptxfr /s #{abcs[RAILS_ENV]["host"]} /d #{abcs[RAILS_ENV]["database"]} /I /F db\ /q /A /r`
when "firebird"
set_firebird_env(abcs[RAILS_ENV])
db_string = firebird_db_string(abcs[RAILS_ENV])
sh "isql -a #{db_string} > db/#{RAILS_ENV}_structure.sql"
else
raise "Task not supported by '#{abcs["test"]["adapter"]}'"
end
if ActiveRecord::Base.connection.supports_migrations?
File.open("db/#{RAILS_ENV}_structure.sql", "a") { |f| f << ActiveRecord::Base.connection.dump_schema_information }
end
end
end
namespace :test do
desc "Recreate the test database from the current environment's database schema"
task :clone => %w(db:schema:dump db:test:purge) do
ActiveRecord::Base.establish_connection(ActiveRecord::Base.configurations['test'])
ActiveRecord::Schema.verbose = false
Rake::Task["db:schema:load"].invoke
end
desc "Recreate the test databases from the development structure"
task :clone_structure => [ "db:structure:dump", "db:test:purge" ] do
abcs = ActiveRecord::Base.configurations
case abcs["test"]["adapter"]
when "mysql"
ActiveRecord::Base.establish_connection(:test)
ActiveRecord::Base.connection.execute('SET foreign_key_checks = 0')
IO.readlines("db/#{RAILS_ENV}_structure.sql").join.split("\n\n").each do |table|
ActiveRecord::Base.connection.execute(table)
end
when "postgresql"
ENV['PGHOST'] = abcs["test"]["host"] if abcs["test"]["host"]
ENV['PGPORT'] = abcs["test"]["port"].to_s if abcs["test"]["port"]
ENV['PGPASSWORD'] = abcs["test"]["password"].to_s if abcs["test"]["password"]
`psql -U "#{abcs["test"]["username"]}" -f db/#{RAILS_ENV}_structure.sql #{abcs["test"]["database"]}`
when "sqlite", "sqlite3"
dbfile = abcs["test"]["database"] || abcs["test"]["dbfile"]
`#{abcs["test"]["adapter"]} #{dbfile} < db/#{RAILS_ENV}_structure.sql`
when "sqlserver"
`osql -E -S #{abcs["test"]["host"]} -d #{abcs["test"]["database"]} -i db\\#{RAILS_ENV}_structure.sql`
when "oci", "oracle"
ActiveRecord::Base.establish_connection(:test)
IO.readlines("db/#{RAILS_ENV}_structure.sql").join.split(";\n\n").each do |ddl|
ActiveRecord::Base.connection.execute(ddl)
end
when "firebird"
set_firebird_env(abcs["test"])
db_string = firebird_db_string(abcs["test"])
sh "isql -i db/#{RAILS_ENV}_structure.sql #{db_string}"
else
raise "Task not supported by '#{abcs["test"]["adapter"]}'"
end
end
desc "Empty the test database"
task :purge => :environment do
abcs = ActiveRecord::Base.configurations
case abcs["test"]["adapter"]
when "mysql"
ActiveRecord::Base.establish_connection(:test)
ActiveRecord::Base.connection.recreate_database(abcs["test"]["database"])
when "postgresql"
ActiveRecord::Base.clear_active_connections!
drop_database(abcs['test'])
create_database(abcs['test'])
when "sqlite","sqlite3"
dbfile = abcs["test"]["database"] || abcs["test"]["dbfile"]
File.delete(dbfile) if File.exist?(dbfile)
when "sqlserver"
dropfkscript = "#{abcs["test"]["host"]}.#{abcs["test"]["database"]}.DP1".gsub(/\\/,'-')
`osql -E -S #{abcs["test"]["host"]} -d #{abcs["test"]["database"]} -i db\\#{dropfkscript}`
`osql -E -S #{abcs["test"]["host"]} -d #{abcs["test"]["database"]} -i db\\#{RAILS_ENV}_structure.sql`
when "oci", "oracle"
ActiveRecord::Base.establish_connection(:test)
ActiveRecord::Base.connection.structure_drop.split(";\n\n").each do |ddl|
ActiveRecord::Base.connection.execute(ddl)
end
when "firebird"
ActiveRecord::Base.establish_connection(:test)
ActiveRecord::Base.connection.recreate_database!
else
raise "Task not supported by '#{abcs["test"]["adapter"]}'"
end
end
desc 'Check for pending migrations and load the test schema'
task :prepare => 'db:abort_if_pending_migrations' do
if defined?(ActiveRecord) && !ActiveRecord::Base.configurations.blank?
Rake::Task[{ :sql => "db:test:clone_structure", :ruby => "db:schema:load" }[ActiveRecord::Base.schema_format]].invoke
end
end
end
namespace :sessions do
desc "Creates a sessions migration for use with CGI::Session::ActiveRecordStore"
task :create => :environment do
raise "Task unavailable to this database (no migration support)" unless ActiveRecord::Base.connection.supports_migrations?
require 'rails_generator'
require 'rails_generator/scripts/generate'
Rails::Generator::Scripts::Generate.new.run(["session_migration", ENV["MIGRATION"] || "CreateSessions"])
end
desc "Clear the sessions table"
task :clear => :environment do
ActiveRecord::Base.connection.execute "DELETE FROM #{session_table_name}"
end
end
end
def drop_database(config)
case config['adapter']
when 'mysql'
ActiveRecord::Base.connection.drop_database config['database']
when /^sqlite/
FileUtils.rm(File.join(RAILS_ROOT, config['database']))
when 'postgresql'
ActiveRecord::Base.establish_connection(config.merge('database' => 'postgres', 'schema_search_path' => 'public'))
ActiveRecord::Base.connection.drop_database config['database']
end
end
def session_table_name
ActiveRecord::Base.pluralize_table_names ? :sessions : :session
end
def set_firebird_env(config)
ENV["ISC_USER"] = config["username"].to_s if config["username"]
ENV["ISC_PASSWORD"] = config["password"].to_s if config["password"]
end
def firebird_db_string(config)
FireRuby::Database.db_string_for(config.symbolize_keys)
end
|
require 'spec_helper'
require_relative '../corals/dominoes'
describe 'Dominoes' do
describe 'Game' do
it '1 table 4 players & 55 dominoes' do
test :dominoes, then: {
table: [],
players: -> { expect(count).to eq 4 },
dominoes: -> { expect(count).to eq 55 }
}
end
end
describe 'Start' do
it 'empty table, each player with 10 dominoes' do
test :dominoes,
when: { on: :start },
then: {
table: [],
players: -> { all? { |_, dominoes| expect(dominoes.count).to eq 10 }},
dominoes: -> { expect(count).to eq 15 }
}
end
end
describe 'Turn' do
it 'can play anything if table is empty' do
test :dominoes,
given: { players: { player: [[9,9]] } },
when: { on: :turn },
then: { domino: [9,9], players: { player: [] }}
end
it 'finds a domino to play' do
test :dominoes,
given: { table: [[9,9]], players: { player: [[0,0],[9,8],[8,8]] }},
when: { on: :turn },
then: { domino: [9,8], players: { player: [[0,0],[8,8]] }}
end
it 'finds nothing if cannot play' do
test :dominoes,
given: { table: [[9,9]], players: { player: [[0,0],[8,8]] }},
when: { on: :turn },
then: { domino: nil, players: { player: [[0,0],[8,8]] }}
end
end
describe 'Play' do
it 'the domino goes on the table' do
test :dominoes,
when: { on: :play, domino: [9,9] },
then: { table: [[9,9]] }
end
it 'the head indicates preference' do
test :dominoes,
given: { table: [[9,9],[9,8],[8,7]] },
when: { on: :play, domino: [9,7] },
then: { table: [[7,9],[9,9],[9,8],[8,7]] }
test :dominoes,
given: { table: [[9,9],[9,8],[8,7]] },
when: { on: :play, domino: [7,9] },
then: { table: [[9,9],[9,8],[8,7],[7,9]] }
end
it 'the tail is an afterthought' do
test :dominoes,
given: { table: [[9,9]] },
when: { on: :play, domino: [7,9] },
then: { table: [[7,9],[9,9]] }
test :dominoes,
given: { table: [[9,9],[9,8]] },
when: { on: :play, domino: [7,8] },
then: { table: [[9,9],[9,8],[8,7]] }
end
end
describe 'Controller' do
it 'starts the game' do
test :dominoes, then: { on: :start }
end
it 'player has the first turn' do
test :dominoes,
when: { on: :start },
then: { on: :turn, player: :player }
end
it 'plays the domino' do
test :dominoes,
given: { players: { player:[:domino] } },
when: { on: :turn, player: :player },
then: { on: :play }
end
it 'knocks' do
test :dominoes,
when: { on: :turn, player: :player },
then: { on: :knock, knocked: [:player] }
end
it 'finds next player after play' do
test :dominoes,
given: { players: { player: [:more_dominoes]}},
when: { on: :play, domino: :domino },
then: { on: :turn, player: :right }
end
it 'finds next player after knock' do
test :dominoes,
when: { on: :knock, player: :left },
then: { on: :turn, player: :player }
end
it 'stops the game when stuck' do
test :dominoes,
given: { knocked: [:right, :front, :left, :player]},
when: { on: :knock },
then: { on: :stuck }
end
it 'calls the winner' do
test :dominoes,
when: { on: :play, domino: :domino },
then: { on: :won }
end
it 'winner has the first turn'
end
end
the end game
require 'spec_helper'
require_relative '../corals/dominoes'
describe 'Dominoes' do
describe 'Defaults' do
it '1 table 4 players & 55 dominoes' do
test :dominoes, then: {
table: [],
players: -> { expect(count).to eq 4 },
dominoes: -> { expect(count).to eq 55 }
}
end
end
describe 'Start' do
it 'empty table, each player with 10 dominoes' do
test :dominoes,
when: { on: :start },
then: {
table: [],
players: -> { all? { |_, dominoes| expect(dominoes.count).to eq 10 }},
dominoes: -> { expect(count).to eq 15 }
}
end
end
describe 'Turn' do
it 'can play anything if table is empty' do
test :dominoes,
given: { players: { player: [[9,9]] } },
when: { on: :turn },
then: { domino: [9,9], players: { player: [] }}
end
it 'finds a domino to play' do
test :dominoes,
given: { table: [[9,9]], players: { player: [[0,0],[9,8],[8,8]] }},
when: { on: :turn },
then: { domino: [9,8], players: { player: [[0,0],[8,8]] }}
end
it 'finds nothing if cannot play' do
test :dominoes,
given: { table: [[9,9]], players: { player: [[0,0],[8,8]] }},
when: { on: :turn },
then: { domino: nil, players: { player: [[0,0],[8,8]] }}
end
end
describe 'Play' do
it 'the domino goes on the table' do
test :dominoes,
when: { on: :play, domino: [9,9] },
then: { table: [[9,9]] }
end
it 'the head indicates preference' do
test :dominoes,
given: { table: [[9,9],[9,8],[8,7]] },
when: { on: :play, domino: [9,7] },
then: { table: [[7,9],[9,9],[9,8],[8,7]] }
test :dominoes,
given: { table: [[9,9],[9,8],[8,7]] },
when: { on: :play, domino: [7,9] },
then: { table: [[9,9],[9,8],[8,7],[7,9]] }
end
it 'the tail is an afterthought' do
test :dominoes,
given: { table: [[9,9]] },
when: { on: :play, domino: [7,9] },
then: { table: [[7,9],[9,9]] }
test :dominoes,
given: { table: [[9,9],[9,8]] },
when: { on: :play, domino: [7,8] },
then: { table: [[9,9],[9,8],[8,7]] }
end
end
describe 'Controller' do
it 'starts the game' do
test :dominoes, then: { on: :start }
end
it 'player has the first turn' do
test :dominoes,
when: { on: :start },
then: { on: :turn, player: :player }
end
it 'plays the domino' do
test :dominoes,
given: { players: { player:[:domino] } },
when: { on: :turn, player: :player },
then: { on: :play }
end
it 'knocks' do
test :dominoes,
when: { on: :turn, player: :player },
then: { on: :knock, knocked: [:player] }
end
it 'finds next player after play' do
test :dominoes,
given: { players: { player: [:more_dominoes]}},
when: { on: :play, domino: :domino },
then: { on: :turn, player: :right }
end
it 'finds next player after knock' do
test :dominoes,
when: { on: :knock, player: :left },
then: { on: :turn, player: :player }
end
it 'stops the game when stuck' do
test :dominoes,
given: { knocked: [:right, :front, :left, :player]},
when: { on: :knock },
then: { on: :stuck }
end
it 'calls the winner' do
test :dominoes,
when: { on: :play, domino: :domino },
then: { on: :won }
end
end
let(:game) {{}}
it 'plays a game' do
game = resolve game, [:dominoes] until game[:winner]
end
end |
argument order dependency
# --------------------------- House for SALE ----------------------------
class House
attr_reader :square_feet, :num_bedrooms, :num_baths, :cost, :has_tenants, :short_sale
def initialize(info = {})
@address = info[:address]
@square_feet = info[:square_feet]
@num_bedrooms = info[:num_bedrooms] || 3
@num_baths = info[:num_baths] || 2
@cost = info[:cost] || 320_000
@down_payment = info[:down_payment] || 0.20
@sold = info[:sold] || false
@short_sale = info[:short_sale] || 5
@has_tenants = info.fetch(:has_tenants, true)
# @has_tenants = info.fetch(:has_tenants) {true}
# @has_tenants = info[:has_tenants] || false
end
def obscure_address
@address.sub(/\A\d*/, '****')
end
def buy!(money, good_credit)
@sold = true if money >= down_payment && good_credit
end
def down_payment
cost * @down_payment
end
def to_s
"#{obscure_address} : #{square_feet} sq. ft., #{num_bedrooms} bed, #{num_baths} bath. $#{cost}"
end
end
house_1 = House.new(:address => "3245 Temple St",
:square_feet => 2_500,
:num_bedrooms => 5,
:num_baths => 3,
:cost => 320_000,
:down_payment => 0.00,
:sold => false,
:has_tenants => false)
p house_1
p house_1.num_baths
p house_1.has_tenants
p house_1.short_sale
p house_1.to_s
# --------------------------- Herbie the Car ----------------------------
puts "---------------------"
class Car
attr_reader :headlights, :gear
def start_car(options = {})
# @headlights = options[:headlights] || true
@headlights = options.fetch(:headlights, true)
@gear = options[:gear] || 1
@windows = options[:windows] || 3
end
end
herbie = Car.new
# When we start the car, the headlights should be on by default:
p herbie.start_car
p herbie.headlights # => true
# But what happens when we want to start without headlights?
p herbie.start_car(headlights: false)
p herbie.headlights # => true WHAT?! (With the fetch used for headlight this is now corrected to false.)
|
#
# Be sure to run `pod spec lint SwiftOCR.podspec' to ensure this is a
# valid spec and to remove all comments including this before submitting the spec.
#
# To learn more about Podspec attributes see http://docs.cocoapods.org/specification.html
# To see working Podspecs in the CocoaPods repo see https://github.com/CocoaPods/Specs/
#
Pod::Spec.new do |s|
# ――― Spec Metadata ―――――――――――――――――――――――――――――――――――――――――――――――――――――――――― #
#
# These will help people to find your library, and whilst it
# can feel like a chore to fill in it's definitely to your advantage. The
# summary should be tweet-length, and the description more in depth.
#
s.name = "SwiftOCR"
s.version = "1.0"
s.summary = "OCR engine written in swift"
# This description is used to generate tags and improve search results.
# * Think: What does it do? Why did you write it? What is the focus?
# * Try to keep it short, snappy and to the point.
# * Write the description between the DESC delimiters below.
# * Finally, don't worry about the indent, CocoaPods strips it!
s.description = <<-DESC
SwiftOCR is a fast and simple OCR library written in Swift. It uses a neural network for image recognition. As of now, SwiftOCR is optimized for recognizing short, one line long alphanumeric codes (e.g. DI4C9CM). We currently support iOS and OS X.
DESC
s.homepage = "https://github.com/garnele007/SwiftOCR"
# s.screenshots = "www.example.com/screenshots_1.gif", "www.example.com/screenshots_2.gif"
# ――― Spec License ――――――――――――――――――――――――――――――――――――――――――――――――――――――――――― #
#
# Licensing your code is important. See http://choosealicense.com for more info.
# CocoaPods will detect a license file if there is a named LICENSE*
# Popular ones are 'MIT', 'BSD' and 'Apache License, Version 2.0'.
#
s.license = "Apache-2.0"
# s.license = { :type => "MIT", :file => "FILE_LICENSE" }
# ――― Author Metadata ――――――――――――――――――――――――――――――――――――――――――――――――――――――――― #
#
# Specify the authors of the library, with email addresses. Email addresses
# of the authors are extracted from the SCM log. E.g. $ git log. CocoaPods also
# accepts just a name if you'd rather not provide an email address.
#
# Specify a social_media_url where others can refer to, for example a twitter
# profile URL.
#
s.author = { "Nicolas Camenisch" => "garnele007@gmail.com" }
# Or just: s.author = "Nicolas Camenisch"
# s.authors = { "Nicolas Camenisch" => "garnele007@gmail.com" }
# s.social_media_url = "http://twitter.com/Nicolas Camenisch"
# ――― Platform Specifics ――――――――――――――――――――――――――――――――――――――――――――――――――――――― #
#
# If this Pod runs only on iOS or OS X, then specify the platform and
# the deployment target. You can optionally include the target after the platform.
#
# s.platform = :ios
# s.platform = :ios, "8.0"
# When using multiple platforms
s.ios.deployment_target = "8.0"
s.osx.deployment_target = "10.9"
# s.watchos.deployment_target = "2.0"
# s.tvos.deployment_target = "9.0"
# ――― Source Location ―――――――――――――――――――――――――――――――――――――――――――――――――――――――――― #
#
# Specify the location from where the source should be retrieved.
# Supports git, hg, bzr, svn and HTTP.
#
s.source = { :git => "https://github.com/garnele007/SwiftOCR.git", :tag => "#{s.version}" }
# ――― Source Code ―――――――――――――――――――――――――――――――――――――――――――――――――――――――――――――― #
#
# CocoaPods is smart about how it includes source code. For source files
# giving a folder will include any swift, h, m, mm, c & cpp files.
# For header files it will include any header in the folder.
# Not including the public_header_files will make all headers public.
#
s.source_files = "framework/SwiftOCR/*.swift", "framework/SwiftOCR/FFNN/*.swift"
# s.public_header_files = "Classes/**/*.h"
# ――― Resources ―――――――――――――――――――――――――――――――――――――――――――――――――――――――――――――――― #
#
# A list of resources included with the Pod. These are copied into the
# target bundle with a build phase script. Anything else will be cleaned.
# You can preserve files from being cleaned, please don't preserve
# non-essential files like tests, examples and documentation.
#
s.resources = "framework/SwiftOCR/OCR-Network", "framework/SwiftOCR/Realistic Training Images/*.png"
# s.resources = "Resources/*.png"
# s.preserve_paths = "FilesToSave", "MoreFilesToSave"
# ――― Project Linking ―――――――――――――――――――――――――――――――――――――――――――――――――――――――――― #
#
# Link your library with frameworks, or libraries. Libraries do not include
# the lib prefix of their name.
#
# s.framework = "GPUImage"
# s.frameworks = "SomeFramework", "AnotherFramework"
# s.library = "iconv"
# s.libraries = "iconv", "xml2"
# ――― Project Settings ――――――――――――――――――――――――――――――――――――――――――――――――――――――――― #
#
# If your library depends on compiler flags you can set them in the xcconfig hash
# where they will only apply to your library. If you depend on other Podspecs
# you can include multiple dependencies to ensure it works.
# s.requires_arc = true
s.xcconfig = { "SWIFT_VERSION" => "3.0.1" }
s.dependency "GPUImage"
end
Update Podspec
#
# Be sure to run `pod spec lint SwiftOCR.podspec' to ensure this is a
# valid spec and to remove all comments including this before submitting the spec.
#
# To learn more about Podspec attributes see http://docs.cocoapods.org/specification.html
# To see working Podspecs in the CocoaPods repo see https://github.com/CocoaPods/Specs/
#
Pod::Spec.new do |s|
# ――― Spec Metadata ―――――――――――――――――――――――――――――――――――――――――――――――――――――――――― #
#
# These will help people to find your library, and whilst it
# can feel like a chore to fill in it's definitely to your advantage. The
# summary should be tweet-length, and the description more in depth.
#
s.name = "SwiftOCR"
s.version = "1.1"
s.summary = "OCR engine written in swift"
# This description is used to generate tags and improve search results.
# * Think: What does it do? Why did you write it? What is the focus?
# * Try to keep it short, snappy and to the point.
# * Write the description between the DESC delimiters below.
# * Finally, don't worry about the indent, CocoaPods strips it!
s.description = <<-DESC
SwiftOCR is a fast and simple OCR library written in Swift. It uses a neural network for image recognition. As of now, SwiftOCR is optimized for recognizing short, one line long alphanumeric codes (e.g. DI4C9CM). We currently support iOS and OS X.
DESC
s.homepage = "https://github.com/garnele007/SwiftOCR"
# s.screenshots = "www.example.com/screenshots_1.gif", "www.example.com/screenshots_2.gif"
# ――― Spec License ――――――――――――――――――――――――――――――――――――――――――――――――――――――――――― #
#
# Licensing your code is important. See http://choosealicense.com for more info.
# CocoaPods will detect a license file if there is a named LICENSE*
# Popular ones are 'MIT', 'BSD' and 'Apache License, Version 2.0'.
#
s.license = "Apache-2.0"
# s.license = { :type => "MIT", :file => "FILE_LICENSE" }
# ――― Author Metadata ――――――――――――――――――――――――――――――――――――――――――――――――――――――――― #
#
# Specify the authors of the library, with email addresses. Email addresses
# of the authors are extracted from the SCM log. E.g. $ git log. CocoaPods also
# accepts just a name if you'd rather not provide an email address.
#
# Specify a social_media_url where others can refer to, for example a twitter
# profile URL.
#
s.author = { "Nicolas Camenisch" => "garnele007@gmail.com" }
# Or just: s.author = "Nicolas Camenisch"
# s.authors = { "Nicolas Camenisch" => "garnele007@gmail.com" }
# s.social_media_url = "http://twitter.com/Nicolas Camenisch"
# ――― Platform Specifics ――――――――――――――――――――――――――――――――――――――――――――――――――――――― #
#
# If this Pod runs only on iOS or OS X, then specify the platform and
# the deployment target. You can optionally include the target after the platform.
#
# s.platform = :ios
# s.platform = :ios, "8.0"
# When using multiple platforms
s.ios.deployment_target = "8.0"
s.osx.deployment_target = "10.9"
# s.watchos.deployment_target = "2.0"
# s.tvos.deployment_target = "9.0"
# ――― Source Location ―――――――――――――――――――――――――――――――――――――――――――――――――――――――――― #
#
# Specify the location from where the source should be retrieved.
# Supports git, hg, bzr, svn and HTTP.
#
s.source = { :git => "https://github.com/garnele007/SwiftOCR.git", :tag => "#{s.version}" }
# ――― Source Code ―――――――――――――――――――――――――――――――――――――――――――――――――――――――――――――― #
#
# CocoaPods is smart about how it includes source code. For source files
# giving a folder will include any swift, h, m, mm, c & cpp files.
# For header files it will include any header in the folder.
# Not including the public_header_files will make all headers public.
#
s.source_files = "framework/SwiftOCR/*.swift", "framework/SwiftOCR/FFNN/*.swift"
# s.public_header_files = "Classes/**/*.h"
# ――― Resources ―――――――――――――――――――――――――――――――――――――――――――――――――――――――――――――――― #
#
# A list of resources included with the Pod. These are copied into the
# target bundle with a build phase script. Anything else will be cleaned.
# You can preserve files from being cleaned, please don't preserve
# non-essential files like tests, examples and documentation.
#
s.resources = "framework/SwiftOCR/OCR-Network", "framework/SwiftOCR/Realistic Training Images/*.png"
# s.resources = "Resources/*.png"
# s.preserve_paths = "FilesToSave", "MoreFilesToSave"
# ――― Project Linking ―――――――――――――――――――――――――――――――――――――――――――――――――――――――――― #
#
# Link your library with frameworks, or libraries. Libraries do not include
# the lib prefix of their name.
#
# s.framework = "GPUImage"
# s.frameworks = "SomeFramework", "AnotherFramework"
# s.library = "iconv"
# s.libraries = "iconv", "xml2"
# ――― Project Settings ――――――――――――――――――――――――――――――――――――――――――――――――――――――――― #
#
# If your library depends on compiler flags you can set them in the xcconfig hash
# where they will only apply to your library. If you depend on other Podspecs
# you can include multiple dependencies to ensure it works.
# s.requires_arc = true
s.xcconfig = { "SWIFT_VERSION" => "3.0.1" }
s.dependency "GPUImage"
end
|
#
# Be sure to run `pod spec lint SwiftyUI.podspec' to ensure this is a
# valid spec and to remove all comments including this before submitting the spec.
#
# To learn more about Podspec attributes see http://docs.cocoapods.org/specification.html
# To see working Podspecs in the CocoaPods repo see https://github.com/CocoaPods/Specs/
#
Pod::Spec.new do |s|
# ――― Spec Metadata ―――――――――――――――――――――――――――――――――――――――――――――――――――――――――― #
#
# These will help people to find your library, and whilst it
# can feel like a chore to fill in it's definitely to your advantage. The
# summary should be tweet-length, and the description more in depth.
#
s.name = "SwiftyUI"
s.version = "1.1.0"
s.summary = "High performance and lightweight UI."
# This description is used to generate tags and improve search results.
# * Think: What does it do? Why did you write it? What is the focus?
# * Try to keep it short, snappy and to the point.
# * Write the description between the DESC delimiters below.
# * Finally, don't worry about the indent, CocoaPods strips it!
s.description = <<-DESC
High performance(100%) and lightweight(one class each UI) UIView, UIImage, UIImageView, UIlabel, UIButton and more.
DESC
s.homepage = "https://github.com/haoking/SwiftyUI"
# s.screenshots = "www.example.com/screenshots_1.gif", "www.example.com/screenshots_2.gif"
# ――― Spec License ――――――――――――――――――――――――――――――――――――――――――――――――――――――――――― #
#
# Licensing your code is important. See http://choosealicense.com for more info.
# CocoaPods will detect a license file if there is a named LICENSE*
# Popular ones are 'MIT', 'BSD' and 'Apache License, Version 2.0'.
#
s.license = { :type => 'MIT', :file => 'LICENSE' }
# s.license = { :type => "MIT", :file => "FILE_LICENSE" }
# ――― Author Metadata ――――――――――――――――――――――――――――――――――――――――――――――――――――――――― #
#
# Specify the authors of the library, with email addresses. Email addresses
# of the authors are extracted from the SCM log. E.g. $ git log. CocoaPods also
# accepts just a name if you'd rather not provide an email address.
#
# Specify a social_media_url where others can refer to, for example a twitter
# profile URL.
#
s.author = { "Haochen Wang" => "hoakinus@gmail.com" }
# Or just: s.author = "Haochen Wang"
# s.authors = { "Haochen Wang" => "hoakinus@gmail.com" }
# s.social_media_url = "http://twitter.com/Haochen Wang"
# ――― Platform Specifics ――――――――――――――――――――――――――――――――――――――――――――――――――――――― #
#
# If this Pod runs only on iOS or OS X, then specify the platform and
# the deployment target. You can optionally include the target after the platform.
#
# s.platform = :ios
# s.platform = :ios, "5.0"
# When using multiple platforms
s.ios.deployment_target = "8.0"
# s.osx.deployment_target = "10.7"
# s.watchos.deployment_target = "2.0"
# s.tvos.deployment_target = "9.0"
# ――― Source Location ―――――――――――――――――――――――――――――――――――――――――――――――――――――――――― #
#
# Specify the location from where the source should be retrieved.
# Supports git, hg, bzr, svn and HTTP.
#
s.source = { :git => "https://github.com/haoking/SwiftyUI.git", :tag => s.version }
# ――― Source Code ―――――――――――――――――――――――――――――――――――――――――――――――――――――――――――――― #
#
# CocoaPods is smart about how it includes source code. For source files
# giving a folder will include any swift, h, m, mm, c & cpp files.
# For header files it will include any header in the folder.
# Not including the public_header_files will make all headers public.
#
s.source_files = 'Source/*.swift'
# s.ios.deployment_target = '8.0'
# s.ios.vendored_frameworks = 'SwiftyUI.framework'
# s.exclude_files = "Classes/Exclude"
# s.public_header_files = "Classes/**/*.h"
# ――― Resources ―――――――――――――――――――――――――――――――――――――――――――――――――――――――――――――――― #
#
# A list of resources included with the Pod. These are copied into the
# target bundle with a build phase script. Anything else will be cleaned.
# You can preserve files from being cleaned, please don't preserve
# non-essential files like tests, examples and documentation.
#
# s.resource = "icon.png"
# s.resources = "Resources/*.png"
# s.preserve_paths = "FilesToSave", "MoreFilesToSave"
# ――― Project Linking ―――――――――――――――――――――――――――――――――――――――――――――――――――――――――― #
#
# Link your library with frameworks, or libraries. Libraries do not include
# the lib prefix of their name.
#
# s.framework = "SomeFramework"
# s.frameworks = "SomeFramework", "AnotherFramework"
# s.library = "iconv"
# s.libraries = "iconv", "xml2"
# ――― Project Settings ――――――――――――――――――――――――――――――――――――――――――――――――――――――――― #
#
# If your library depends on compiler flags you can set them in the xcconfig hash
# where they will only apply to your library. If you depend on other Podspecs
# you can include multiple dependencies to ensure it works.
# s.requires_arc = true
# s.xcconfig = { "HEADER_SEARCH_PATHS" => "$(SDKROOT)/usr/include/libxml2" }
# s.dependency "JSONKit", "~> 1.4"
end
update to 1.2.0
#
# Be sure to run `pod spec lint SwiftyUI.podspec' to ensure this is a
# valid spec and to remove all comments including this before submitting the spec.
#
# To learn more about Podspec attributes see http://docs.cocoapods.org/specification.html
# To see working Podspecs in the CocoaPods repo see https://github.com/CocoaPods/Specs/
#
Pod::Spec.new do |s|
# ――― Spec Metadata ―――――――――――――――――――――――――――――――――――――――――――――――――――――――――― #
#
# These will help people to find your library, and whilst it
# can feel like a chore to fill in it's definitely to your advantage. The
# summary should be tweet-length, and the description more in depth.
#
s.name = "SwiftyUI"
s.version = "1.2.0"
s.summary = "High performance and lightweight UI."
# This description is used to generate tags and improve search results.
# * Think: What does it do? Why did you write it? What is the focus?
# * Try to keep it short, snappy and to the point.
# * Write the description between the DESC delimiters below.
# * Finally, don't worry about the indent, CocoaPods strips it!
s.description = <<-DESC
High performance(100%) and lightweight(one class each UI) UIView, UIImage, UIImageView, UIlabel, UIButton and more.
DESC
s.homepage = "https://github.com/haoking/SwiftyUI"
# s.screenshots = "www.example.com/screenshots_1.gif", "www.example.com/screenshots_2.gif"
# ――― Spec License ――――――――――――――――――――――――――――――――――――――――――――――――――――――――――― #
#
# Licensing your code is important. See http://choosealicense.com for more info.
# CocoaPods will detect a license file if there is a named LICENSE*
# Popular ones are 'MIT', 'BSD' and 'Apache License, Version 2.0'.
#
s.license = { :type => 'MIT', :file => 'LICENSE' }
# s.license = { :type => "MIT", :file => "FILE_LICENSE" }
# ――― Author Metadata ――――――――――――――――――――――――――――――――――――――――――――――――――――――――― #
#
# Specify the authors of the library, with email addresses. Email addresses
# of the authors are extracted from the SCM log. E.g. $ git log. CocoaPods also
# accepts just a name if you'd rather not provide an email address.
#
# Specify a social_media_url where others can refer to, for example a twitter
# profile URL.
#
s.author = { "Haochen Wang" => "hoakinus@gmail.com" }
# Or just: s.author = "Haochen Wang"
# s.authors = { "Haochen Wang" => "hoakinus@gmail.com" }
# s.social_media_url = "http://twitter.com/Haochen Wang"
# ――― Platform Specifics ――――――――――――――――――――――――――――――――――――――――――――――――――――――― #
#
# If this Pod runs only on iOS or OS X, then specify the platform and
# the deployment target. You can optionally include the target after the platform.
#
# s.platform = :ios
# s.platform = :ios, "5.0"
# When using multiple platforms
s.ios.deployment_target = "8.0"
# s.osx.deployment_target = "10.7"
# s.watchos.deployment_target = "2.0"
# s.tvos.deployment_target = "9.0"
# ――― Source Location ―――――――――――――――――――――――――――――――――――――――――――――――――――――――――― #
#
# Specify the location from where the source should be retrieved.
# Supports git, hg, bzr, svn and HTTP.
#
s.source = { :git => "https://github.com/haoking/SwiftyUI.git", :tag => s.version }
# ――― Source Code ―――――――――――――――――――――――――――――――――――――――――――――――――――――――――――――― #
#
# CocoaPods is smart about how it includes source code. For source files
# giving a folder will include any swift, h, m, mm, c & cpp files.
# For header files it will include any header in the folder.
# Not including the public_header_files will make all headers public.
#
s.source_files = 'Source/*.swift'
# s.ios.deployment_target = '8.0'
# s.ios.vendored_frameworks = 'SwiftyUI.framework'
# s.exclude_files = "Classes/Exclude"
# s.public_header_files = "Classes/**/*.h"
# ――― Resources ―――――――――――――――――――――――――――――――――――――――――――――――――――――――――――――――― #
#
# A list of resources included with the Pod. These are copied into the
# target bundle with a build phase script. Anything else will be cleaned.
# You can preserve files from being cleaned, please don't preserve
# non-essential files like tests, examples and documentation.
#
# s.resource = "icon.png"
# s.resources = "Resources/*.png"
# s.preserve_paths = "FilesToSave", "MoreFilesToSave"
# ――― Project Linking ―――――――――――――――――――――――――――――――――――――――――――――――――――――――――― #
#
# Link your library with frameworks, or libraries. Libraries do not include
# the lib prefix of their name.
#
# s.framework = "SomeFramework"
# s.frameworks = "SomeFramework", "AnotherFramework"
# s.library = "iconv"
# s.libraries = "iconv", "xml2"
# ――― Project Settings ――――――――――――――――――――――――――――――――――――――――――――――――――――――――― #
#
# If your library depends on compiler flags you can set them in the xcconfig hash
# where they will only apply to your library. If you depend on other Podspecs
# you can include multiple dependencies to ensure it works.
# s.requires_arc = true
# s.xcconfig = { "HEADER_SEARCH_PATHS" => "$(SDKROOT)/usr/include/libxml2" }
# s.dependency "JSONKit", "~> 1.4"
end
|
#
# Be sure to run `pod lib lint TMPlayer.podspec' to ensure this is a
# valid spec before submitting.
#
# Any lines starting with a # are optional, but their use is encouraged
# To learn more about a Podspec see http://guides.cocoapods.org/syntax/podspec.html
#
Pod::Spec.new do |s|
s.name = 'TMPlayer'
s.version = '1.0.4'
s.summary = 'A perfect player based on ZFPlayer'
# This description is used to generate tags and improve search results.
# * Think: What does it do? Why did you write it? What is the focus?
# * Try to keep it short, snappy and to the point.
# * Write the description between the DESC delimiters below.
# * Finally, don't worry about the indent, CocoaPods strips it!
s.description = <<-DESC
A perfect player based on ZFPlayer made by renzifeng, extend by Nguyễn Khánh Ngọc
DESC
s.homepage = 'https://github.com/Th3M4sk8712/TMPlayer'
# s.screenshots = 'www.example.com/screenshots_1', 'www.example.com/screenshots_2'
s.license = { :type => 'MIT', :file => 'LICENSE' }
s.author = { 'Th3M4sk' => 'themaskhenzo@gmail.com' }
s.source = { :git => 'https://github.com/Th3M4sk8712/TMPlayer.git', :tag => s.version.to_s }
# s.social_media_url = 'https://twitter.com/<TWITTER_USERNAME>'
s.ios.deployment_target = '8.0'
s.source_files = 'TMPlayer/**/*.{h,m}'
s.resources = 'TMPlayer/ZFPlayer.bundle'
#s.public_header_files = 'Pod/Classes/**/*.h'
s.framework = 'UIKit','MediaPlayer'
s.dependency 'Masonry'
end
update version 1.0.5
#
# Be sure to run `pod lib lint TMPlayer.podspec' to ensure this is a
# valid spec before submitting.
#
# Any lines starting with a # are optional, but their use is encouraged
# To learn more about a Podspec see http://guides.cocoapods.org/syntax/podspec.html
#
Pod::Spec.new do |s|
s.name = 'TMPlayer'
s.version = '1.0.5'
s.summary = 'A perfect player based on ZFPlayer'
# This description is used to generate tags and improve search results.
# * Think: What does it do? Why did you write it? What is the focus?
# * Try to keep it short, snappy and to the point.
# * Write the description between the DESC delimiters below.
# * Finally, don't worry about the indent, CocoaPods strips it!
s.description = <<-DESC
A perfect player based on ZFPlayer made by renzifeng, extend by Nguyễn Khánh Ngọc
DESC
s.homepage = 'https://github.com/Th3M4sk8712/TMPlayer'
# s.screenshots = 'www.example.com/screenshots_1', 'www.example.com/screenshots_2'
s.license = { :type => 'MIT', :file => 'LICENSE' }
s.author = { 'Th3M4sk' => 'themaskhenzo@gmail.com' }
s.source = { :git => 'https://github.com/Th3M4sk8712/TMPlayer.git', :tag => s.version.to_s }
# s.social_media_url = 'https://twitter.com/<TWITTER_USERNAME>'
s.ios.deployment_target = '8.0'
s.source_files = 'TMPlayer/**/*.{h,m}'
s.resources = 'TMPlayer/ZFPlayer.bundle'
#s.public_header_files = 'Pod/Classes/**/*.h'
s.framework = 'UIKit','MediaPlayer'
s.dependency 'Masonry'
end
|
Pod::Spec.new do |s|
s.name = "TagsView"
s.version = "1.1.1"
s.summary = "Quickly create a view with tags, written in Swift"
s.homepage = "https://github.com/tomoponzoo/TagsView"
s.license = "MIT"
s.social_media_url = "http://twitter.com/tomoponzoo"
s.authors = { "tomoponzoo" => "tomoponzoo@gmail.com" }
s.source = { :git => "https://github.com/tomoponzoo/TagsView.git", :tag => s.version }
s.ios.deployment_target = "9.0"
s.source_files = "Classes/*.swift"
end
Update podspec
Pod::Spec.new do |s|
s.name = "TagsView"
s.version = "1.2.0"
s.summary = "Quickly create a view with tags, written in Swift"
s.homepage = "https://github.com/tomoponzoo/TagsView"
s.license = "MIT"
s.social_media_url = "http://twitter.com/tomoponzoo"
s.authors = { "tomoponzoo" => "tomoponzoo@gmail.com" }
s.source = { :git => "https://github.com/tomoponzoo/TagsView.git", :tag => s.version }
s.ios.deployment_target = "9.0"
s.source_files = "Classes/*.swift"
end
|
require 'fileutils'
require_relative '../header.rb'
# For TEST purposes
test_images_before = Dir.entries(Bkmkr::Paths.submitted_images)
# Local path variables
all_submitted_images = Dir.entries(Bkmkr::Paths.submitted_images)
# Rename and move input files to tmp folder to eliminate possibility of overwriting
if Dir.exist?(Bkmkr::Paths.project_tmp_dir)
FileUtils.rm_r(Bkmkr::Paths.project_tmp_dir)
end
Dir.mkdir(Bkmkr::Paths.project_tmp_dir)
Dir.mkdir(Bkmkr::Paths.project_tmp_dir_img)
FileUtils.cp(Bkmkr::Project.input_file, Bkmkr::Paths.project_tmp_file)
all_submitted_images.each do |c|
unless c.include?(".db") or c.include?("DS_Store")
filename = c.split("/").pop
FileUtils.mv(c, "#{Bkmkr::Paths.project_tmp_dir_img}/#{filename}")
end
end
# Add a notice to the conversion dir warning that the process is in use
File.open("#{Bkmkr::Paths.alert}", 'w') do |output|
output.write "The conversion processor is currently running. Please do not submit any new files or images until the process completes."
end
# TESTING
# Filename should not be null
test_fname = Bkmkr::Project.filename.split(%r{\s*})
if test_fname.length != 0
test_fname_status = "pass: original filename is not null"
else
test_fname_status = "FAIL: original filename is not null"
end
# tmpdir should exist
if File.exist?("#{Bkmkr::Paths.project_tmp_dir}") and File.exist?("#{Bkmkr::Paths.project_tmp_dir_img}")
test_dir_status = "pass: temp directory and all sub-directories were successfully created"
else
test_dir_status = "FAIL: temp directory and all sub-directories were successfully created"
end
# submitted images dir should be clean
test_images_after = Dir.entries("#{Bkmkr::Paths.submitted_images}")
if test_images_after.length == 2
test_imagedir_status = "pass: submitted images directory has been emptied"
else
test_imagedir_status = "FAIL: submitted images directory has been emptied"
end
# IF submitted images dir was not clean at beginning, tmpdir images dir should also not be clean at end
test_tmp_images = Dir.entries("#{Bkmkr::Paths.project_tmp_dir_img}")
if test_images_before.length == test_tmp_images.length
test_tmpimgdir_status = "pass: all submitted images have been copied to temp directory"
else
test_tmpimgdir_status = "FAIL: all submitted images have been copied to temp directory"
end
# input file should exist in tmp dir
if File.file?("#{Bkmkr::Paths.project_tmp_file}")
test_input_status = "pass: original file preserved in project directory"
else
test_input_status = "FAIL: original file preserved in project directory"
end
# Write test results
File.open("#{Bkmkr::Paths.log_file}", 'a+') do |f|
f.puts "-----"
f.puts Time.now
f.puts "----- TMPARCHIVE PROCESSES"
f.puts test_fname_status
f.puts test_dir_status
f.puts test_imagedir_status
f.puts test_tmpimgdir_status
f.puts test_input_status
end
fixing image handling
require 'fileutils'
require_relative '../header.rb'
# For TEST purposes
test_images_before = Dir.entries(Bkmkr::Paths.submitted_images)
# Local path variables
all_submitted_images = Dir.entries(Bkmkr::Paths.submitted_images)
# Rename and move input files to tmp folder to eliminate possibility of overwriting
if Dir.exist?(Bkmkr::Paths.project_tmp_dir)
FileUtils.rm_r(Bkmkr::Paths.project_tmp_dir)
end
Dir.mkdir(Bkmkr::Paths.project_tmp_dir)
Dir.mkdir(Bkmkr::Paths.project_tmp_dir_img)
FileUtils.cp(Bkmkr::Project.input_file, Bkmkr::Paths.project_tmp_file)
all_submitted_images.each do |c|
unless c.include?(".db") or c.include?("DS_Store") or c == "." or c ==".."
filename = c.split("/").pop
FileUtils.mv(c, "#{Bkmkr::Paths.project_tmp_dir_img}/#{filename}")
end
end
# Add a notice to the conversion dir warning that the process is in use
File.open("#{Bkmkr::Paths.alert}", 'w') do |output|
output.write "The conversion processor is currently running. Please do not submit any new files or images until the process completes."
end
# TESTING
# Filename should not be null
test_fname = Bkmkr::Project.filename.split(%r{\s*})
if test_fname.length != 0
test_fname_status = "pass: original filename is not null"
else
test_fname_status = "FAIL: original filename is not null"
end
# tmpdir should exist
if File.exist?("#{Bkmkr::Paths.project_tmp_dir}") and File.exist?("#{Bkmkr::Paths.project_tmp_dir_img}")
test_dir_status = "pass: temp directory and all sub-directories were successfully created"
else
test_dir_status = "FAIL: temp directory and all sub-directories were successfully created"
end
# submitted images dir should be clean
test_images_after = Dir.entries("#{Bkmkr::Paths.submitted_images}")
if test_images_after.length == 2
test_imagedir_status = "pass: submitted images directory has been emptied"
else
test_imagedir_status = "FAIL: submitted images directory has been emptied"
end
# IF submitted images dir was not clean at beginning, tmpdir images dir should also not be clean at end
test_tmp_images = Dir.entries("#{Bkmkr::Paths.project_tmp_dir_img}")
if test_images_before.length == test_tmp_images.length
test_tmpimgdir_status = "pass: all submitted images have been copied to temp directory"
else
test_tmpimgdir_status = "FAIL: all submitted images have been copied to temp directory"
end
# input file should exist in tmp dir
if File.file?("#{Bkmkr::Paths.project_tmp_file}")
test_input_status = "pass: original file preserved in project directory"
else
test_input_status = "FAIL: original file preserved in project directory"
end
# Write test results
File.open("#{Bkmkr::Paths.log_file}", 'a+') do |f|
f.puts "-----"
f.puts Time.now
f.puts "----- TMPARCHIVE PROCESSES"
f.puts test_fname_status
f.puts test_dir_status
f.puts test_imagedir_status
f.puts test_tmpimgdir_status
f.puts test_input_status
end |
# Generated by jeweler
# DO NOT EDIT THIS FILE DIRECTLY
# Instead, edit Jeweler::Tasks in Rakefile, and run 'rake gemspec'
# -*- encoding: utf-8 -*-
Gem::Specification.new do |s|
s.name = "sofa_gallery"
s.version = "0.0.4"
s.required_rubygems_version = Gem::Requirement.new(">= 0") if s.respond_to? :required_rubygems_version=
s.authors = ["Oleg Khabarov", "Stephen McLeod", "The Working Group Inc."]
s.date = "2011-09-28"
s.description = ""
s.email = "stephen@theworkinggroup.ca"
s.extra_rdoc_files = [
"LICENSE",
"README.md"
]
s.files = [
".document",
".travis.yml",
"Gemfile",
"Gemfile.lock",
"LICENSE",
"README.md",
"Rakefile",
"VERSION",
"app/assets/images/sofa_gallery/jcrop.gif",
"app/assets/images/sofa_gallery/prettyPhoto/dark_rounded/btnNext.png",
"app/assets/images/sofa_gallery/prettyPhoto/dark_rounded/btnPrevious.png",
"app/assets/images/sofa_gallery/prettyPhoto/dark_rounded/contentPattern.png",
"app/assets/images/sofa_gallery/prettyPhoto/dark_rounded/default_thumbnail.gif",
"app/assets/images/sofa_gallery/prettyPhoto/dark_rounded/loader.gif",
"app/assets/images/sofa_gallery/prettyPhoto/dark_rounded/sprite.png",
"app/assets/images/sofa_gallery/prettyPhoto/dark_square/btnNext.png",
"app/assets/images/sofa_gallery/prettyPhoto/dark_square/btnPrevious.png",
"app/assets/images/sofa_gallery/prettyPhoto/dark_square/contentPattern.png",
"app/assets/images/sofa_gallery/prettyPhoto/dark_square/default_thumbnail.gif",
"app/assets/images/sofa_gallery/prettyPhoto/dark_square/loader.gif",
"app/assets/images/sofa_gallery/prettyPhoto/dark_square/sprite.png",
"app/assets/images/sofa_gallery/prettyPhoto/default/default_thumb.png",
"app/assets/images/sofa_gallery/prettyPhoto/default/loader.gif",
"app/assets/images/sofa_gallery/prettyPhoto/default/sprite.png",
"app/assets/images/sofa_gallery/prettyPhoto/default/sprite_next.png",
"app/assets/images/sofa_gallery/prettyPhoto/default/sprite_prev.png",
"app/assets/images/sofa_gallery/prettyPhoto/default/sprite_x.png",
"app/assets/images/sofa_gallery/prettyPhoto/default/sprite_y.png",
"app/assets/images/sofa_gallery/prettyPhoto/facebook/btnNext.png",
"app/assets/images/sofa_gallery/prettyPhoto/facebook/btnPrevious.png",
"app/assets/images/sofa_gallery/prettyPhoto/facebook/contentPatternBottom.png",
"app/assets/images/sofa_gallery/prettyPhoto/facebook/contentPatternLeft.png",
"app/assets/images/sofa_gallery/prettyPhoto/facebook/contentPatternRight.png",
"app/assets/images/sofa_gallery/prettyPhoto/facebook/contentPatternTop.png",
"app/assets/images/sofa_gallery/prettyPhoto/facebook/default_thumbnail.gif",
"app/assets/images/sofa_gallery/prettyPhoto/facebook/loader.gif",
"app/assets/images/sofa_gallery/prettyPhoto/facebook/sprite.png",
"app/assets/images/sofa_gallery/prettyPhoto/light_rounded/btnNext.png",
"app/assets/images/sofa_gallery/prettyPhoto/light_rounded/btnPrevious.png",
"app/assets/images/sofa_gallery/prettyPhoto/light_rounded/default_thumbnail.gif",
"app/assets/images/sofa_gallery/prettyPhoto/light_rounded/loader.gif",
"app/assets/images/sofa_gallery/prettyPhoto/light_rounded/sprite.png",
"app/assets/images/sofa_gallery/prettyPhoto/light_square/btnNext.png",
"app/assets/images/sofa_gallery/prettyPhoto/light_square/btnPrevious.png",
"app/assets/images/sofa_gallery/prettyPhoto/light_square/default_thumbnail.gif",
"app/assets/images/sofa_gallery/prettyPhoto/light_square/loader.gif",
"app/assets/images/sofa_gallery/prettyPhoto/light_square/sprite.png",
"app/assets/javascripts/sofa_gallery/admin.js",
"app/assets/javascripts/sofa_gallery/application.js",
"app/assets/javascripts/sofa_gallery/carousel.js",
"app/assets/javascripts/sofa_gallery/jquery.jcrop.js",
"app/assets/javascripts/sofa_gallery/thumbnails.js",
"app/assets/stylesheets/sofa_gallery/admin.css",
"app/assets/stylesheets/sofa_gallery/application.css",
"app/assets/stylesheets/sofa_gallery/carousel.css",
"app/assets/stylesheets/sofa_gallery/gallery_list.css",
"app/assets/stylesheets/sofa_gallery/jquery.jcrop.css",
"app/assets/stylesheets/sofa_gallery/reset.css",
"app/assets/stylesheets/sofa_gallery/tango/credits.txt",
"app/assets/stylesheets/sofa_gallery/tango/next-horizontal.png",
"app/assets/stylesheets/sofa_gallery/tango/next-vertical.png",
"app/assets/stylesheets/sofa_gallery/tango/prev-horizontal.png",
"app/assets/stylesheets/sofa_gallery/tango/prev-vertical.png",
"app/assets/stylesheets/sofa_gallery/thumbnails.css.erb",
"app/controllers/application_controller.rb",
"app/controllers/sofa_gallery/admin/base_controller.rb",
"app/controllers/sofa_gallery/admin/galleries_controller.rb",
"app/controllers/sofa_gallery/admin/photos_controller.rb",
"app/controllers/sofa_gallery/galleries_controller.rb",
"app/helpers/sofa_gallery_helper.rb",
"app/models/sofa_gallery/gallery.rb",
"app/models/sofa_gallery/photo.rb",
"app/views/layouts/gallery_admin/application.html.erb",
"app/views/sofa_gallery/_carousel.html.erb",
"app/views/sofa_gallery/_list_galleries.html.erb",
"app/views/sofa_gallery/_thumbnails.html.erb",
"app/views/sofa_gallery/admin/_html_head.html.erb",
"app/views/sofa_gallery/admin/_navigation.html.erb",
"app/views/sofa_gallery/admin/galleries/_form.html.erb",
"app/views/sofa_gallery/admin/galleries/edit.html.erb",
"app/views/sofa_gallery/admin/galleries/index.html.erb",
"app/views/sofa_gallery/admin/galleries/new.html.erb",
"app/views/sofa_gallery/admin/photos/_form.html.erb",
"app/views/sofa_gallery/admin/photos/crop.html.erb",
"app/views/sofa_gallery/admin/photos/edit.html.erb",
"app/views/sofa_gallery/admin/photos/index.html.erb",
"app/views/sofa_gallery/admin/photos/new.html.erb",
"app/views/sofa_gallery/galleries/index.html.erb",
"app/views/sofa_gallery/galleries/show.html.erb",
"config.ru",
"config/application.rb",
"config/boot.rb",
"config/database.yml",
"config/environment.rb",
"config/environments/development.rb",
"config/environments/production.rb",
"config/environments/test.rb",
"config/initializers/paperclip.rb",
"config/initializers/sofa_gallery.rb",
"config/routes.rb",
"db/migrate/01_create_sofa_gallery.rb",
"lib/generators/README",
"lib/generators/sofa_gallery_generator.rb",
"lib/paperclip_processors/cropper.rb",
"lib/sofa_gallery.rb",
"lib/sofa_gallery/configuration.rb",
"lib/sofa_gallery/engine.rb",
"lib/sofa_gallery/form_builder.rb",
"script/rails",
"sofa_gallery.gemspec",
"test/fixtures/files/default.jpg",
"test/fixtures/files/default.txt",
"test/fixtures/files/default2.jpg",
"test/fixtures/sofa_gallery/galleries.yml",
"test/fixtures/sofa_gallery/photos.yml",
"test/functional/admin/galleries_controller_test.rb",
"test/functional/admin/photos_controller_test.rb",
"test/functional/galleries_controller_test.rb",
"test/test_helper.rb",
"test/unit/configuration_test.rb",
"test/unit/gallery_test.rb",
"test/unit/photo_test.rb"
]
s.homepage = "http://github.com/twg/sofa-gallery"
s.licenses = ["MIT"]
s.require_paths = ["lib"]
s.rubygems_version = "1.8.10"
s.summary = "SofaGallery is an image gallery engine for Rails 3.1 apps (and ComfortableMexicanSofa)"
if s.respond_to? :specification_version then
s.specification_version = 3
if Gem::Version.new(Gem::VERSION) >= Gem::Version.new('1.2.0') then
s.add_runtime_dependency(%q<rails>, [">= 3.1.0"])
s.add_runtime_dependency(%q<paperclip>, [">= 2.3.14"])
s.add_runtime_dependency(%q<jquery-rails>, [">= 1.0.14"])
s.add_development_dependency(%q<sqlite3>, [">= 0"])
s.add_development_dependency(%q<jeweler>, [">= 0"])
else
s.add_dependency(%q<rails>, [">= 3.1.0"])
s.add_dependency(%q<paperclip>, [">= 2.3.14"])
s.add_dependency(%q<jquery-rails>, [">= 1.0.14"])
s.add_dependency(%q<sqlite3>, [">= 0"])
s.add_dependency(%q<jeweler>, [">= 0"])
end
else
s.add_dependency(%q<rails>, [">= 3.1.0"])
s.add_dependency(%q<paperclip>, [">= 2.3.14"])
s.add_dependency(%q<jquery-rails>, [">= 1.0.14"])
s.add_dependency(%q<sqlite3>, [">= 0"])
s.add_dependency(%q<jeweler>, [">= 0"])
end
end
regenerate gemspec
# Generated by jeweler
# DO NOT EDIT THIS FILE DIRECTLY
# Instead, edit Jeweler::Tasks in Rakefile, and run 'rake gemspec'
# -*- encoding: utf-8 -*-
Gem::Specification.new do |s|
s.name = "sofa_gallery"
s.version = "0.0.4"
s.required_rubygems_version = Gem::Requirement.new(">= 0") if s.respond_to? :required_rubygems_version=
s.authors = ["Oleg Khabarov", "Stephen McLeod", "The Working Group Inc."]
s.date = "2011-10-03"
s.description = ""
s.email = "stephen@theworkinggroup.ca"
s.extra_rdoc_files = [
"LICENSE",
"README.md"
]
s.files = [
".document",
".travis.yml",
"Gemfile",
"LICENSE",
"README.md",
"Rakefile",
"VERSION",
"app/assets/images/sofa_gallery/jcrop.gif",
"app/assets/images/sofa_gallery/prettyPhoto/dark_rounded/btnNext.png",
"app/assets/images/sofa_gallery/prettyPhoto/dark_rounded/btnPrevious.png",
"app/assets/images/sofa_gallery/prettyPhoto/dark_rounded/contentPattern.png",
"app/assets/images/sofa_gallery/prettyPhoto/dark_rounded/default_thumbnail.gif",
"app/assets/images/sofa_gallery/prettyPhoto/dark_rounded/loader.gif",
"app/assets/images/sofa_gallery/prettyPhoto/dark_rounded/sprite.png",
"app/assets/images/sofa_gallery/prettyPhoto/dark_square/btnNext.png",
"app/assets/images/sofa_gallery/prettyPhoto/dark_square/btnPrevious.png",
"app/assets/images/sofa_gallery/prettyPhoto/dark_square/contentPattern.png",
"app/assets/images/sofa_gallery/prettyPhoto/dark_square/default_thumbnail.gif",
"app/assets/images/sofa_gallery/prettyPhoto/dark_square/loader.gif",
"app/assets/images/sofa_gallery/prettyPhoto/dark_square/sprite.png",
"app/assets/images/sofa_gallery/prettyPhoto/default/default_thumb.png",
"app/assets/images/sofa_gallery/prettyPhoto/default/loader.gif",
"app/assets/images/sofa_gallery/prettyPhoto/default/sprite.png",
"app/assets/images/sofa_gallery/prettyPhoto/default/sprite_next.png",
"app/assets/images/sofa_gallery/prettyPhoto/default/sprite_prev.png",
"app/assets/images/sofa_gallery/prettyPhoto/default/sprite_x.png",
"app/assets/images/sofa_gallery/prettyPhoto/default/sprite_y.png",
"app/assets/images/sofa_gallery/prettyPhoto/facebook/btnNext.png",
"app/assets/images/sofa_gallery/prettyPhoto/facebook/btnPrevious.png",
"app/assets/images/sofa_gallery/prettyPhoto/facebook/contentPatternBottom.png",
"app/assets/images/sofa_gallery/prettyPhoto/facebook/contentPatternLeft.png",
"app/assets/images/sofa_gallery/prettyPhoto/facebook/contentPatternRight.png",
"app/assets/images/sofa_gallery/prettyPhoto/facebook/contentPatternTop.png",
"app/assets/images/sofa_gallery/prettyPhoto/facebook/default_thumbnail.gif",
"app/assets/images/sofa_gallery/prettyPhoto/facebook/loader.gif",
"app/assets/images/sofa_gallery/prettyPhoto/facebook/sprite.png",
"app/assets/images/sofa_gallery/prettyPhoto/light_rounded/btnNext.png",
"app/assets/images/sofa_gallery/prettyPhoto/light_rounded/btnPrevious.png",
"app/assets/images/sofa_gallery/prettyPhoto/light_rounded/default_thumbnail.gif",
"app/assets/images/sofa_gallery/prettyPhoto/light_rounded/loader.gif",
"app/assets/images/sofa_gallery/prettyPhoto/light_rounded/sprite.png",
"app/assets/images/sofa_gallery/prettyPhoto/light_square/btnNext.png",
"app/assets/images/sofa_gallery/prettyPhoto/light_square/btnPrevious.png",
"app/assets/images/sofa_gallery/prettyPhoto/light_square/default_thumbnail.gif",
"app/assets/images/sofa_gallery/prettyPhoto/light_square/loader.gif",
"app/assets/images/sofa_gallery/prettyPhoto/light_square/sprite.png",
"app/assets/javascripts/sofa_gallery/admin.js",
"app/assets/javascripts/sofa_gallery/application.js",
"app/assets/javascripts/sofa_gallery/carousel.js",
"app/assets/javascripts/sofa_gallery/jquery.jcrop.js",
"app/assets/javascripts/sofa_gallery/thumbnails.js",
"app/assets/stylesheets/sofa_gallery/admin.css",
"app/assets/stylesheets/sofa_gallery/application.css",
"app/assets/stylesheets/sofa_gallery/carousel.css",
"app/assets/stylesheets/sofa_gallery/gallery_list.css",
"app/assets/stylesheets/sofa_gallery/jquery.jcrop.css",
"app/assets/stylesheets/sofa_gallery/reset.css",
"app/assets/stylesheets/sofa_gallery/tango/credits.txt",
"app/assets/stylesheets/sofa_gallery/tango/next-horizontal.png",
"app/assets/stylesheets/sofa_gallery/tango/next-vertical.png",
"app/assets/stylesheets/sofa_gallery/tango/prev-horizontal.png",
"app/assets/stylesheets/sofa_gallery/tango/prev-vertical.png",
"app/assets/stylesheets/sofa_gallery/thumbnails.css.erb",
"app/controllers/application_controller.rb",
"app/controllers/sofa_gallery/admin/base_controller.rb",
"app/controllers/sofa_gallery/admin/galleries_controller.rb",
"app/controllers/sofa_gallery/admin/photos_controller.rb",
"app/controllers/sofa_gallery/galleries_controller.rb",
"app/helpers/sofa_gallery_helper.rb",
"app/models/sofa_gallery/gallery.rb",
"app/models/sofa_gallery/photo.rb",
"app/views/layouts/gallery_admin/application.html.erb",
"app/views/sofa_gallery/_carousel.html.erb",
"app/views/sofa_gallery/_list_galleries.html.erb",
"app/views/sofa_gallery/_thumbnails.html.erb",
"app/views/sofa_gallery/admin/_html_head.html.erb",
"app/views/sofa_gallery/admin/_navigation.html.erb",
"app/views/sofa_gallery/admin/galleries/_form.html.erb",
"app/views/sofa_gallery/admin/galleries/edit.html.erb",
"app/views/sofa_gallery/admin/galleries/index.html.erb",
"app/views/sofa_gallery/admin/galleries/new.html.erb",
"app/views/sofa_gallery/admin/photos/_form.html.erb",
"app/views/sofa_gallery/admin/photos/crop.html.erb",
"app/views/sofa_gallery/admin/photos/edit.html.erb",
"app/views/sofa_gallery/admin/photos/index.html.erb",
"app/views/sofa_gallery/admin/photos/new.html.erb",
"app/views/sofa_gallery/galleries/index.html.erb",
"app/views/sofa_gallery/galleries/show.html.erb",
"config.ru",
"config/application.rb",
"config/boot.rb",
"config/database.yml",
"config/environment.rb",
"config/environments/development.rb",
"config/environments/production.rb",
"config/environments/test.rb",
"config/initializers/paperclip.rb",
"config/initializers/sofa_gallery.rb",
"config/routes.rb",
"db/migrate/01_create_sofa_gallery.rb",
"lib/generators/README",
"lib/generators/sofa_gallery_generator.rb",
"lib/paperclip_processors/cropper.rb",
"lib/sofa_gallery.rb",
"lib/sofa_gallery/configuration.rb",
"lib/sofa_gallery/engine.rb",
"lib/sofa_gallery/form_builder.rb",
"script/rails",
"sofa_gallery.gemspec",
"test/fixtures/files/default.jpg",
"test/fixtures/files/default.txt",
"test/fixtures/files/default2.jpg",
"test/fixtures/sofa_gallery/galleries.yml",
"test/fixtures/sofa_gallery/photos.yml",
"test/functional/admin/galleries_controller_test.rb",
"test/functional/admin/photos_controller_test.rb",
"test/functional/galleries_controller_test.rb",
"test/test_helper.rb",
"test/unit/configuration_test.rb",
"test/unit/gallery_test.rb",
"test/unit/photo_test.rb"
]
s.homepage = "http://github.com/twg/sofa-gallery"
s.licenses = ["MIT"]
s.require_paths = ["lib"]
s.rubygems_version = "1.8.10"
s.summary = "SofaGallery is an image gallery engine for Rails 3.1 apps (and ComfortableMexicanSofa)"
if s.respond_to? :specification_version then
s.specification_version = 3
if Gem::Version.new(Gem::VERSION) >= Gem::Version.new('1.2.0') then
s.add_runtime_dependency(%q<rails>, [">= 3.1.0"])
s.add_runtime_dependency(%q<paperclip>, ["~> 2.3.0"])
s.add_runtime_dependency(%q<jquery-rails>, [">= 1.0.14"])
s.add_development_dependency(%q<sqlite3>, [">= 0"])
s.add_development_dependency(%q<jeweler>, [">= 0"])
else
s.add_dependency(%q<rails>, [">= 3.1.0"])
s.add_dependency(%q<paperclip>, ["~> 2.3.0"])
s.add_dependency(%q<jquery-rails>, [">= 1.0.14"])
s.add_dependency(%q<sqlite3>, [">= 0"])
s.add_dependency(%q<jeweler>, [">= 0"])
end
else
s.add_dependency(%q<rails>, [">= 3.1.0"])
s.add_dependency(%q<paperclip>, ["~> 2.3.0"])
s.add_dependency(%q<jquery-rails>, [">= 1.0.14"])
s.add_dependency(%q<sqlite3>, [">= 0"])
s.add_dependency(%q<jeweler>, [">= 0"])
end
end
|
Oups, I forgot the totally revamped test system.
$mydir = File.dirname __FILE__
$:.unshift File.join($mydir, '..', 'lib')
require 'coderay'
module CodeRay
require 'test/unit'
class TestCase < Test::Unit::TestCase
class << self
def inherited child
CodeRay::TestSuite << child.suite
end
# Calls its block with the working directory set to the examples
# for this test case.
def dir
examples = File.join $mydir, lang.to_s
Dir.chdir examples do
yield
end
end
def lang
@lang ||= name.downcase.to_sym
end
def extension extension = nil
if extension
@extension = extension.to_s
else
@extension ||= lang.to_s
end
end
end
def extension
@extension ||= 'in.' + self.class.extension
end
def test_ALL
puts
puts " >> Running #{self.class.name} <<"
puts
scanner = CodeRay::Scanners[self.class.lang].new
tokenizer = CodeRay::Encoders[:debug].new
highlighter = CodeRay::Encoders[:html].new(
:tab_width => 2,
:line_numbers => :inline,
:wrap => :page,
:hint => :debug,
:css => :class
)
self.class.dir do
for input in Dir["*.#{extension}"]
next if ENV['testonly'] and ENV['testonly'] != File.basename(input, ".#{extension}")
print "testing #{input}: "
name = File.basename(input, ".#{extension}")
output = name + '.out.' + tokenizer.file_extension
code = File.open(input, 'rb') { |f| break f.read }
unless ENV['noincremental']
print 'incremental, '
for size in 0..[code.size, 300].min
print size, '.' if ENV['showprogress']
scanner.string = code[0,size]
scanner.tokenize
end
end
print 'complete, '
scanner.string = code
tokens = scanner.tokens
result = tokenizer.encode_tokens tokens
if File.exist? output
expected = File.open(output, 'rb') { |f| break f.read }
ok = expected == result
computed = output.sub('.out.', '.computed.')
unless ok
File.open(computed, 'wb') { |f| f.write result }
print `gvimdiff #{output} #{computed}` if ENV['diff']
end
assert(ok, "Scan error: #{computed} != #{output}") unless ENV['diff']
else
File.open(output, 'wb') do |f| f.write result end
puts "New test: #{output}"
end
print 'highlighting, '
highlighted = highlighter.encode_tokens tokens
File.open(name + '.html', 'w') { |f| f.write highlighted }
puts 'finished.'
end
end
end
end
require 'test/unit/testsuite'
class TestSuite
@suite = Test::Unit::TestSuite.new 'CodeRay::Scanners'
class << self
def << sub_suite
@suite << sub_suite
end
def load_suite name
begin
suite = File.join($mydir, name, 'suite.rb')
require suite
rescue LoadError
$stderr.puts <<-ERR
!! Suite #{suite} not found
ERR
false
end
end
def load
if subsuite = ARGV.find { |a| break $1 if a[/^([^-].*)/] } || ENV['scannerlang']
load_suite(subsuite) or exit
else
Dir[File.join($mydir, '*', '')].each { |suite| load_suite File.basename(suite) }
end
end
def run
load
$VERBOSE = true
if ARGV.include? '-f'
require 'test/unit/ui/fox/testrunner'
Test::Unit::UI::Fox::TestRunner
else
require 'test/unit/ui/console/testrunner'
Test::Unit::UI::Console::TestRunner
end.run @suite
end
end
end
end
|
################################################################################
#
# searcher.rb
#
# class Searcher: executes a file search
#
################################################################################
require 'find'
require 'pathname'
require 'set'
require 'fileutil.rb'
require 'searchresult.rb'
class Searcher
attr_accessor :results
attr_accessor :settings
def initialize(settings)
@settings = settings
validate_settings
@fileutil = FileUtil.new
@results = []
@timers = {}
@totalElapsed = 0
@filehash = Hash.new([])
end
def log(message)
puts message
end
def validate_settings
raise 'Startpath not defined' unless @settings.startpath
raise 'Startpath not found' unless Pathname.new(@settings.startpath).exist?
raise 'No search patterns specified' unless @settings.searchpatterns
end
def matches_any_pattern(s, pattern_set)
pattern_set.any? {|p| p.match(s)}
end
def any_matches_any_pattern(slist, pattern_set)
slist.each do |s|
if matches_any_pattern(s, pattern_set)
return true
end
end
return false
end
def is_search_dir(d)
path_elems = d.split(File::SEPARATOR) - ['.', '..']
if @settings.excludehidden and path_elems.any? {|p| p.start_with?('.')}
return false
end
if @settings.in_dirpatterns.count > 0 and
not any_matches_any_pattern(path_elems, @settings.in_dirpatterns)
return false
end
if @settings.out_dirpatterns.count > 0 and
any_matches_any_pattern(path_elems, @settings.out_dirpatterns)
return false
end
true
end
def is_search_file(f)
if @settings.excludehidden and f.start_with?('.')
return false
end
if @settings.in_extensions.count > 0 and
not @settings.in_extensions.include?(@fileutil.get_extension(f))
return false
end
if @settings.out_extensions.count > 0 and
@settings.out_extensions.include?(@fileutil.get_extension(f))
return false
end
filename = Pathname.new(f).basename.to_s
if @settings.in_filepatterns.count > 0 and
not matches_any_pattern(filename, @settings.in_filepatterns)
return false
end
if @settings.out_filepatterns.count > 0 and
matches_any_pattern(filename, @settings.out_filepatterns)
return false
end
true
end
def is_archive_search_file(f)
if @settings.excludehidden and f.start_with?('.')
return false
end
if @settings.in_archiveextensions.count > 0 and
not @settings.in_archiveextensions.include?(@fileutil.get_extension(f))
return false
end
if @settings.out_archiveextensions.count > 0 and
@settings.out_archiveextensions.include?(@fileutil.get_extension(f))
return false
end
filename = Pathname.new(f).basename.to_s
if @settings.in_archivefilepatterns.count > 0 and
not matches_any_pattern(filename, @settings.in_archivefilepatterns)
return false
end
if @settings.out_archivefilepatterns.count > 0 and
matches_any_pattern(filename, @settings.out_archivefilepatterns)
return false
end
true
end
def get_search_dirs
searchdirs = []
if @settings.recursive
Find.find(@settings.startpath) do |f|
if FileTest.directory?(f)
searchdirs.push(f) if is_search_dir(f)
end
end
else
searchdirs.push(@settings.startpath) if is_search_dir(@settings.startpath)
end
searchdirs
end
def get_search_files(searchdirs)
searchfiles = []
searchdirs.each do |d|
all_dirs = Dir.entries(d)
all_dirs.each do |f|
unless FileTest.directory?(f)
if @fileutil.is_archive_file(f) and @settings.searcharchives and is_archive_search_file(f)
searchfiles.push(Pathname.new(d).join(f).to_s)
elsif not @settings.archivesonly and is_search_file(f)
searchfiles.push(Pathname.new(d).join(f).to_s)
end
end
end
end
searchfiles
end
def add_timer(name, action)
@timers[name+':'+action] = Time.new
end
def start_timer(name)
add_timer(name, 'start')
end
def stop_timer(name)
add_timer(name, 'stop')
add_elapsed(name)
end
def get_elapsed(name)
start = @timers[name+':start']
stop = @timers[name+':stop']
stop - start
end
def add_elapsed(name)
@totalElapsed += get_elapsed(name)
end
def print_elapsed(name)
elapsed = get_elapsed(name) * 1000
log("Elapsed time for #{name}: #{elapsed} ms")
end
def print_total_elapsed
log("Total elapsed time: #{@totalElapsed * 1000} ms")
end
def search
# get the searchdirs
if @settings.dotiming
start_timer('get_search_dirs')
end
searchdirs = get_search_dirs()
if @settings.dotiming
stop_timer('get_search_dirs')
if @settings.printresults
print_elapsed('get_search_dirs')
end
end
if @settings.verbose
log("\nDirectories to be searched (#{searchdirs.count}):")
searchdirs.each do |d|
log("#{d}")
end
end
# get the searchfiles
if @settings.dotiming
start_timer('get_search_files')
end
searchfiles = get_search_files(searchdirs)
if @settings.dotiming
stop_timer('get_search_files')
if @settings.printresults
print_elapsed('get_search_files')
end
end
if @settings.verbose
log("\nFiles to be searched (#{searchfiles.count}):")
searchfiles.each do |f|
log("#{f}")
end
log("\n")
end
if @settings.dotiming
start_timer('search_files')
end
searchfiles.each do |f|
search_file(f)
end
if @settings.dotiming
stop_timer('search_files')
if @settings.printresults
print_elapsed('search_files')
print_total_elapsed
end
end
end
def search_file(f)
unless @fileutil.is_searchable_file(f)
if @settings.verbose or @settings.debug
log("Skipping unsearchable file: #{f}")
return 0
end
end
if @fileutil.is_text_file(f)
search_text_file(f)
elsif @fileutil.is_binary_file(f)
search_binary_file(f)
end
end
def search_binary_file(f)
contents = File.open(f, "rb").read
@settings.searchpatterns.each do |p|
if p.match(contents)
add_search_result(SearchResult.new(p, f, 0, nil))
end
end
end
def search_text_file(f, enc = nil)
if @settings.debug
log("Searching text file #{f}")
end
if @settings.multilinesearch
search_text_file_contents(f, enc)
else
search_text_file_lines(f, enc)
end
end
def get_line_count(s)
s.scan(/(\r\n|\n)/m).size
end
def search_text_file_contents(f, enc = nil)
contents = File.open(f, "r").read
results = search_contents(contents)
results.each do |r|
r.filename = f
add_search_result(r)
end
end
def search_contents(contents)
results = []
@settings.searchpatterns.each do |p|
m = p.match(contents)
while m
before_line_count = get_line_count(m.pre_match)
after_line_count = get_line_count(m.post_match)
line_start_index, line_end_index = m.offset(0)
if before_line_count > 0
line_start_index = contents.rindex("\n", line_start_index) + 1
end
if after_line_count > 0
line_end_index = contents.index(/(\r\n|\n)/, line_end_index) - 1
end
line = contents[line_start_index..line_end_index]
match_start_index = m.begin(0) - line_start_index
match_end_index = m.end(0) - line_start_index
results.push(SearchResult.new(
p,
'',
before_line_count+1,
line,
match_start_index + 1,
match_end_index + 1))
m = p.match(contents, line_start_index+match_end_index)
end
end
results
end
def search_text_file_lines(f, enc = nil)
linenum = 0
fo = File.open(f, "r")
line_iterator = fo.each_line
results = search_line_iterator(line_iterator)
fo.close
results.each do |r|
r.filename = f
add_search_result(r)
end
end
def search_line_iterator(lines)
linenum = 0
pattern_matches = {}
results = []
while true
begin
line = lines.next
linenum += 1
@settings.searchpatterns.each do |p|
search_line = true
pos = 0
while search_line and pos < line.length
# TODO: catch ArgumentError: "in `match': invalid byte sequence in US-ASCII"
m = p.match(line, pos)
if m
if @settings.firstmatch and pattern_matches.include?(p)
search_line = false
else
results.push(SearchResult.new(
p,
'',
linenum,
line,
m.begin(0) + 1,
m.end(0) + 1))
pos = m.end(0) + 1
pattern_matches[p] = 1
end
else
search_line = false
end
end
end
rescue StopIteration
return results
end
end
end
def add_search_result(search_result)
@results.push(search_result)
end
def print_results()
log("Search results (#{@results.count}):")
@results.each do |r|
print_result(r)
end
end
def print_result(search_result)
s = ""
if @settings.searchpatterns.count > 1
s += "#{search_result.pattern}: "
end
s += search_result.to_s
log(s)
end
def get_matching_dirs(patterns = [])
if patterns.empty?
patterns = @settings.searchpatterns
end
pattern_set = Set.new patterns
dirs = Set.new
@results.each do |r|
if pattern_set.include? r.pattern
dirs.add(File.dirname(r.filename))
end
end
dirs = dirs.to_a
dirs.sort!
dirs
end
def get_matching_files(patterns = [])
if patterns.empty?
patterns = @settings.searchpatterns
end
pattern_set = Set.new patterns
files = Set.new
@results.each do |r|
if pattern_set.include? r.pattern
files.add(r.filename)
end
end
files = files.to_a
files.sort!
files
end
def get_matching_lines(patterns = [])
if patterns.empty?
patterns = @settings.searchpatterns
end
pattern_set = Set.new patterns
lines = []
@results.each do |r|
if pattern_set.include? r.pattern
lines.push(r.line.strip)
end
end
if @settings.uniquelines
lines.uniq!
end
lines.sort!
lines
end
end
Startpath as file in ruby
################################################################################
#
# searcher.rb
#
# class Searcher: executes a file search
#
################################################################################
require 'find'
require 'pathname'
require 'set'
require 'fileutil.rb'
require 'searchresult.rb'
class Searcher
attr_accessor :results
attr_accessor :settings
def initialize(settings)
@settings = settings
validate_settings
@fileutil = FileUtil.new
@results = []
@timers = {}
@totalElapsed = 0
@filehash = Hash.new([])
end
def log(message)
puts message
end
def validate_settings
raise 'Startpath not defined' unless @settings.startpath
raise 'Startpath not found' unless Pathname.new(@settings.startpath).exist?
raise 'No search patterns specified' unless @settings.searchpatterns
end
def matches_any_pattern(s, pattern_set)
pattern_set.any? {|p| p.match(s)}
end
def any_matches_any_pattern(slist, pattern_set)
slist.each do |s|
if matches_any_pattern(s, pattern_set)
return true
end
end
return false
end
def is_search_dir(d)
path_elems = d.split(File::SEPARATOR) - ['.', '..']
if @settings.excludehidden and path_elems.any? {|p| p.start_with?('.')}
return false
end
if @settings.in_dirpatterns.count > 0 and
not any_matches_any_pattern(path_elems, @settings.in_dirpatterns)
return false
end
if @settings.out_dirpatterns.count > 0 and
any_matches_any_pattern(path_elems, @settings.out_dirpatterns)
return false
end
true
end
def is_search_file(f)
if @settings.excludehidden and f.start_with?('.')
return false
end
if @settings.in_extensions.count > 0 and
not @settings.in_extensions.include?(@fileutil.get_extension(f))
return false
end
if @settings.out_extensions.count > 0 and
@settings.out_extensions.include?(@fileutil.get_extension(f))
return false
end
filename = Pathname.new(f).basename.to_s
if @settings.in_filepatterns.count > 0 and
not matches_any_pattern(filename, @settings.in_filepatterns)
return false
end
if @settings.out_filepatterns.count > 0 and
matches_any_pattern(filename, @settings.out_filepatterns)
return false
end
true
end
def is_archive_search_file(f)
if @settings.excludehidden and f.start_with?('.')
return false
end
if @settings.in_archiveextensions.count > 0 and
not @settings.in_archiveextensions.include?(@fileutil.get_extension(f))
return false
end
if @settings.out_archiveextensions.count > 0 and
@settings.out_archiveextensions.include?(@fileutil.get_extension(f))
return false
end
filename = Pathname.new(f).basename.to_s
if @settings.in_archivefilepatterns.count > 0 and
not matches_any_pattern(filename, @settings.in_archivefilepatterns)
return false
end
if @settings.out_archivefilepatterns.count > 0 and
matches_any_pattern(filename, @settings.out_archivefilepatterns)
return false
end
true
end
def get_search_dirs
searchdirs = []
if FileTest.directory?(@settings.startpath)
if @settings.recursive
Find.find(@settings.startpath) do |f|
if FileTest.directory?(f)
searchdirs.push(f) if is_search_dir(f)
end
end
else
searchdirs.push(@settings.startpath) if is_search_dir(@settings.startpath)
end
elsif FileTest.file?(@settings.startpath)
d = File.dirname(@settings.startpath)
if not d
d = '.'
end
searchdirs.push(d) if is_search_dir(d)
end
searchdirs
end
def get_search_files(searchdirs)
searchfiles = []
if FileTest.directory?(@settings.startpath)
searchdirs.each do |d|
all_dirs = Dir.entries(d)
all_dirs.each do |f|
unless FileTest.directory?(f)
if @fileutil.is_archive_file(f) and @settings.searcharchives and is_archive_search_file(f)
searchfiles.push(Pathname.new(d).join(f).to_s)
elsif not @settings.archivesonly and is_search_file(f)
searchfiles.push(Pathname.new(d).join(f).to_s)
end
end
end
end
elsif FileTest.file?(@settings.startpath)
searchfiles.push(@settings.startpath) if is_search_file(@settings.startpath)
end
searchfiles
end
def add_timer(name, action)
@timers[name+':'+action] = Time.new
end
def start_timer(name)
add_timer(name, 'start')
end
def stop_timer(name)
add_timer(name, 'stop')
add_elapsed(name)
end
def get_elapsed(name)
start = @timers[name+':start']
stop = @timers[name+':stop']
stop - start
end
def add_elapsed(name)
@totalElapsed += get_elapsed(name)
end
def print_elapsed(name)
elapsed = get_elapsed(name) * 1000
log("Elapsed time for #{name}: #{elapsed} ms")
end
def print_total_elapsed
log("Total elapsed time: #{@totalElapsed * 1000} ms")
end
def search
# get the searchdirs
if @settings.dotiming
start_timer('get_search_dirs')
end
searchdirs = get_search_dirs()
if @settings.dotiming
stop_timer('get_search_dirs')
if @settings.printresults
print_elapsed('get_search_dirs')
end
end
if @settings.verbose
log("\nDirectories to be searched (#{searchdirs.count}):")
searchdirs.each do |d|
log("#{d}")
end
end
# get the searchfiles
if @settings.dotiming
start_timer('get_search_files')
end
searchfiles = get_search_files(searchdirs)
if @settings.dotiming
stop_timer('get_search_files')
if @settings.printresults
print_elapsed('get_search_files')
end
end
if @settings.verbose
log("\nFiles to be searched (#{searchfiles.count}):")
searchfiles.each do |f|
log("#{f}")
end
log("\n")
end
if @settings.dotiming
start_timer('search_files')
end
searchfiles.each do |f|
search_file(f)
end
if @settings.dotiming
stop_timer('search_files')
if @settings.printresults
print_elapsed('search_files')
print_total_elapsed
end
end
end
def search_file(f)
unless @fileutil.is_searchable_file(f)
if @settings.verbose or @settings.debug
log("Skipping unsearchable file: #{f}")
return 0
end
end
if @fileutil.is_text_file(f)
search_text_file(f)
elsif @fileutil.is_binary_file(f)
search_binary_file(f)
end
end
def search_binary_file(f)
contents = File.open(f, "rb").read
@settings.searchpatterns.each do |p|
if p.match(contents)
add_search_result(SearchResult.new(p, f, 0, nil))
end
end
end
def search_text_file(f, enc = nil)
if @settings.debug
log("Searching text file #{f}")
end
if @settings.multilinesearch
search_text_file_contents(f, enc)
else
search_text_file_lines(f, enc)
end
end
def get_line_count(s)
s.scan(/(\r\n|\n)/m).size
end
def search_text_file_contents(f, enc = nil)
contents = File.open(f, "r").read
results = search_contents(contents)
results.each do |r|
r.filename = f
add_search_result(r)
end
end
def search_contents(contents)
results = []
@settings.searchpatterns.each do |p|
m = p.match(contents)
while m
before_line_count = get_line_count(m.pre_match)
after_line_count = get_line_count(m.post_match)
line_start_index, line_end_index = m.offset(0)
if before_line_count > 0
line_start_index = contents.rindex("\n", line_start_index) + 1
end
if after_line_count > 0
line_end_index = contents.index(/(\r\n|\n)/, line_end_index) - 1
end
line = contents[line_start_index..line_end_index]
match_start_index = m.begin(0) - line_start_index
match_end_index = m.end(0) - line_start_index
results.push(SearchResult.new(
p,
'',
before_line_count+1,
line,
match_start_index + 1,
match_end_index + 1))
m = p.match(contents, line_start_index+match_end_index)
end
end
results
end
def search_text_file_lines(f, enc = nil)
linenum = 0
fo = File.open(f, "r")
line_iterator = fo.each_line
results = search_line_iterator(line_iterator)
fo.close
results.each do |r|
r.filename = f
add_search_result(r)
end
end
def search_line_iterator(lines)
linenum = 0
pattern_matches = {}
results = []
while true
begin
line = lines.next
linenum += 1
@settings.searchpatterns.each do |p|
search_line = true
pos = 0
while search_line and pos < line.length
# TODO: catch ArgumentError: "in `match': invalid byte sequence in US-ASCII"
m = p.match(line, pos)
if m
if @settings.firstmatch and pattern_matches.include?(p)
search_line = false
else
results.push(SearchResult.new(
p,
'',
linenum,
line,
m.begin(0) + 1,
m.end(0) + 1))
pos = m.end(0) + 1
pattern_matches[p] = 1
end
else
search_line = false
end
end
end
rescue StopIteration
return results
end
end
end
def add_search_result(search_result)
@results.push(search_result)
end
def print_results()
log("Search results (#{@results.count}):")
@results.each do |r|
print_result(r)
end
end
def print_result(search_result)
s = ""
if @settings.searchpatterns.count > 1
s += "#{search_result.pattern}: "
end
s += search_result.to_s
log(s)
end
def get_matching_dirs(patterns = [])
if patterns.empty?
patterns = @settings.searchpatterns
end
pattern_set = Set.new patterns
dirs = Set.new
@results.each do |r|
if pattern_set.include? r.pattern
dirs.add(File.dirname(r.filename))
end
end
dirs = dirs.to_a
dirs.sort!
dirs
end
def get_matching_files(patterns = [])
if patterns.empty?
patterns = @settings.searchpatterns
end
pattern_set = Set.new patterns
files = Set.new
@results.each do |r|
if pattern_set.include? r.pattern
files.add(r.filename)
end
end
files = files.to_a
files.sort!
files
end
def get_matching_lines(patterns = [])
if patterns.empty?
patterns = @settings.searchpatterns
end
pattern_set = Set.new patterns
lines = []
@results.each do |r|
if pattern_set.include? r.pattern
lines.push(r.line.strip)
end
end
if @settings.uniquelines
lines.uniq!
end
lines.sort!
lines
end
end
|
#
# Be sure to run `pod lib lint JOTextField.podspec' to ensure this is a
# valid spec and remove all comments before submitting the spec.
#
# Any lines starting with a # are optional, but encouraged
#
# To learn more about a Podspec see http://guides.cocoapods.org/syntax/podspec.html
#
Pod::Spec.new do |s|
s.name = "JOTextField"
s.version = "0.1.0"
s.summary = "A short description of JOTextField."
s.description = "JOTextField is a custom version of UITextField that Includes Validations and more"
s.homepage = "https://github.com/lojals/JOTextField"
# s.screenshots = "www.example.com/screenshots_1", "www.example.com/screenshots_2"
s.license = 'MIT'
s.author = { "Jorge Ovalle" => "jroz9105@gmail.com" }
s.source = { :git => "https://github.com/lojals/JOTextField.git", :tag => s.version.to_s }
# s.social_media_url = 'https://twitter.com/<TWITTER_USERNAME>'
s.platform = :ios, '7.0'
s.requires_arc = true
s.source_files = 'Pod/Classes/**/*'
s.resource_bundles = {
'JOTextField' => ['Pod/Assets/*.png']
}
# s.public_header_files = 'Pod/Classes/**/*.h'
s.frameworks = 'UIKit'
s.dependency 'pop', '~> 1.0'
end
preRelease
#
# Be sure to run `pod lib lint JOTextField.podspec' to ensure this is a
# valid spec and remove all comments before submitting the spec.
#
# Any lines starting with a # are optional, but encouraged
#
# To learn more about a Podspec see http://guides.cocoapods.org/syntax/podspec.html
#
Pod::Spec.new do |s|
s.name = "JOTextField"
s.version = "0.0.1"
s.summary = "A short description of JOTextField."
s.description = "JOTextField is a custom version of UITextField that Includes Validations and more"
s.homepage = "https://github.com/lojals/JOTextField"
# s.screenshots = "www.example.com/screenshots_1", "www.example.com/screenshots_2"
s.license = 'MIT'
s.author = { "Jorge Ovalle" => "jroz9105@gmail.com" }
s.source = { :git => "https://github.com/lojals/JOTextField.git", :tag => s.version.to_s }
# s.social_media_url = 'https://twitter.com/<TWITTER_USERNAME>'
s.platform = :ios, '7.0'
s.requires_arc = true
s.source_files = 'Pod/Classes/**/*'
s.resource_bundles = {
'JOTextField' => ['Pod/Assets/*.png']
}
# s.public_header_files = 'Pod/Classes/**/*.h'
s.frameworks = 'UIKit'
s.dependency 'pop', '~> 1.0'
end
|
Pod::Spec.new do |s|
s.name = "XMCommon"
s.version = "0.0.1"
s.summary = "XMCommon is a common class."
s.description = <<-DESC
A longer description of XMCommon in Markdown format.
DESC
s.homepage = "https://github.com/rgshio/XMCommon"
s.license = "MIT"
s.author = { "rgshio" => "754086445@qq.com" }
s.platform = :ios, "5.0"
s.source = { :git => "https://github.com/rgshio/XMCommon.git", :tag => "0.0.1" }
s.source_files = "XMCommon/*"
s.requires_arc = true
end
podspec
Pod::Spec.new do |s|
s.name = "XMCommon"
s.version = "1.0.0"
s.summary = "XMCommon is a common class."
s.description = <<-DESC
A longer description of XMCommon in Markdown format.
DESC
s.homepage = "https://github.com/rgshio/XMCommon"
s.license = "MIT"
s.author = { "rgshio" => "754086445@qq.com" }
s.platform = :ios, "5.0"
s.source = { :git => "https://github.com/rgshio/XMCommon.git", :tag => "1.0.0" }
s.source_files = "XMCommon/*"
s.requires_arc = true
end
|
#
# Be sure to run `pod lib lint JSONValueRX.podspec' to ensure this is a
# valid spec before submitting.
#
# Any lines starting with a # are optional, but their use is encouraged
# To learn more about a Podspec see http://guides.cocoapods.org/syntax/podspec.html
#
Pod::Spec.new do |s|
s.name = "JSONValueRX"
s.version = "1.4"
s.summary = "Simple Swift JSON representation supporting subscripting and pattern matching."
# This description is used to generate tags and improve search results.
# * Think: What does it do? Why did you write it? What is the focus?
# * Try to keep it short, snappy and to the point.
# * Write the description between the DESC delimiters below.
# * Finally, don't worry about the indent, CocoaPods strips it!
s.description = <<-DESC
Simple Swift JSON representation supporting subscripting and pattern matching. JSONValue uses an algebraic datatype representation of JSON for type safety and pattern matching.
DESC
s.homepage = "https://github.com/rexmas/JSONValue"
s.license = 'MIT'
s.author = { "rexmas" => "rex.fenley@gmail.com" }
s.source = { :git => "https://github.com/rexmas/JSONValue.git", :tag => s.version.to_s }
s.platform = :ios, '8.0'
s.requires_arc = true
s.source_files = 'JSONValue/**/*'
s.resource_bundles = {
}
end
[master] podspec bump
#
# Be sure to run `pod lib lint JSONValueRX.podspec' to ensure this is a
# valid spec before submitting.
#
# Any lines starting with a # are optional, but their use is encouraged
# To learn more about a Podspec see http://guides.cocoapods.org/syntax/podspec.html
#
Pod::Spec.new do |s|
s.name = "JSONValueRX"
s.version = "1.5"
s.summary = "Simple Swift JSON representation supporting subscripting and pattern matching."
# This description is used to generate tags and improve search results.
# * Think: What does it do? Why did you write it? What is the focus?
# * Try to keep it short, snappy and to the point.
# * Write the description between the DESC delimiters below.
# * Finally, don't worry about the indent, CocoaPods strips it!
s.description = <<-DESC
Simple Swift JSON representation supporting subscripting and pattern matching. JSONValue uses an algebraic datatype representation of JSON for type safety and pattern matching.
DESC
s.homepage = "https://github.com/rexmas/JSONValue"
s.license = 'MIT'
s.author = { "rexmas" => "rex.fenley@gmail.com" }
s.source = { :git => "https://github.com/rexmas/JSONValue.git", :tag => s.version.to_s }
s.platform = :ios, '8.0'
s.requires_arc = true
s.source_files = 'JSONValue/**/*'
s.resource_bundles = {
}
end
|
Pod::Spec.new do |s|
s.name = 'XMCommon'
s.version = '0.1.1'
s.license = 'MIT'
s.summary = 'extension class.'
s.description = %{XMCommon is a extension Class.}
s.homepage = 'https://github.com/rgshio/XMCommon'
s.author = { 'rgshio' => 'rgshio@qq.com' }
s.source = { :git => 'https://github.com/rgshio/XMCommon.git', :tag => "v#{s.version}" }
s.source_files = 'Classes/XMExtension/*.{h,m}'
s.ios.frameworks = 'Foundation', 'UIKit', 'CommonCrypto'
s.ios.deployment_target = '7.0' # minimum SDK with autolayout
s.requires_arc = true
end
update podspec
update podspec
Pod::Spec.new do |s|
s.name = 'XMCommon'
s.version = '0.1.1'
s.license = 'MIT'
s.summary = 'extension class.'
s.description = %{XMCommon is a extension Class.}
s.homepage = 'https://github.com/rgshio/XMCommon'
s.author = { 'rgshio' => 'rgshio@qq.com' }
s.source = { :git => 'https://github.com/rgshio/XMCommon.git', :tag => "v#{s.version}" }
s.source_files = 'Classes/XMExtension/*.{h,m}'
s.framework = 'CommonCrypto'
s.ios.frameworks = 'Foundation', 'UIKit'
s.ios.deployment_target = '7.0' # minimum SDK with autolayout
s.requires_arc = true
end
|
module Native
class Object
include Native
def [] (name)
`#@native[name]`
end
def []= (name, value)
`#@native[name] = value`
end
def nil?
`self === null || self === undefined`
end
def method_missing (name, *args)
return super unless Opal.function? `#@native[name]`
__native_send__ name, *args
end
end
def initialize (native)
@native = native
end
def to_native
@native
end
def native_send (name, *args)
return method_missing(name, *args) unless Opal.function? `#@native[name]`
`#@native[name].apply(#@native, args)`
end
alias_method :__native_send__, :native_send
end
corelib/native: fix #nil?
module Native
class Object
include Native
def [] (name)
`#@native[name]`
end
def []= (name, value)
`#@native[name] = value`
end
def nil?
`#@native === null || #@native === undefined`
end
def method_missing (name, *args)
return super unless Opal.function? `#@native[name]`
__native_send__ name, *args
end
end
def initialize (native)
@native = native
end
def to_native
@native
end
def native_send (name, *args)
return method_missing(name, *args) unless Opal.function? `#@native[name]`
`#@native[name].apply(#@native, args)`
end
alias_method :__native_send__, :native_send
end
|
Pod::Spec.new do |s|
s.name = 'JudoKitObjC'
s.version = '6.2.6'
s.summary = 'Judo Pay Full iOS Client Kit'
s.homepage = 'http://judopay.com/'
s.license = 'MIT'
s.author = { "Ashley Barrett" => 'ashley.barrett@judopayments.com' }
s.source = { :git => 'https://github.com/JudoPay/JudoKitObjC.git', :tag => s.version.to_s }
s.documentation_url = 'https://judopay.github.io/JudoKitObjC/'
s.ios.deployment_target = '8.0'
s.requires_arc = true
s.source_files = 'Source/**/*.{m,h}'
s.dependency 'JudoShield'
s.frameworks = 'CoreLocation', 'Security', 'CoreTelephony'
s.pod_target_xcconfig = { 'FRAMEWORK_SEARCH_PATHS' => '$(inherited) ${PODS_ROOT}/JudoShield/Source' }
end
changed podspec
Pod::Spec.new do |s|
s.name = 'JudoKitObjC'
s.version = '6.2.6'
s.summary = 'Judo Pay Full iOS Client Kit'
s.homepage = 'http://judopay.com/'
s.license = 'MIT'
s.author = { "Ashley Barrett" => 'ashley.barrett@judopayments.com' }
s.source = { :git => 'https://github.com/JudoPay/JudoKitObjC.git', :tag => s.version.to_s }
s.documentation_url = 'https://judopay.github.io/JudoKitObjC/'
s.ios.deployment_target = '8.0'
s.requires_arc = true
s.source_files = 'Source/**/*.{m,h}'
s.dependency 'DeviceDNA', :path => '~/Documents/iOS/DeviceDNA-iOS'
s.frameworks = 'CoreLocation', 'Security', 'CoreTelephony'
s.pod_target_xcconfig = { 'FRAMEWORK_SEARCH_PATHS' => '$(inherited) ${PODS_ROOT}/DeviceDNA/Source' }
end
|
# frozen_string_literal: true
module ActiveRecordShards
module DefaultReplicaPatches
def self.wrap_method_in_on_replica(class_method, base, method, force_on_replica: false)
base_methods =
if class_method
base.methods + base.private_methods
else
base.instance_methods + base.private_instance_methods
end
return unless base_methods.include?(method)
_, method, punctuation = method.to_s.match(/^(.*?)([\?\!]?)$/).to_a
# _ALWAYS_ on replica, or only for on `on_replica_by_default = true` models?
wrapper = force_on_replica ? 'force_on_replica' : 'on_replica_unless_tx'
base.class_eval <<-RUBY, __FILE__, __LINE__ + 1
#{class_method ? 'class << self' : ''}
def #{method}_with_default_replica#{punctuation}(*args, &block)
#{wrapper} do
#{method}_without_default_replica#{punctuation}(*args, &block)
end
end
ruby2_keywords(:#{method}_with_default_replica#{punctuation}) if respond_to?(:ruby2_keywords, true)
alias_method :#{method}_without_default_replica#{punctuation}, :#{method}#{punctuation}
alias_method :#{method}#{punctuation}, :#{method}_with_default_replica#{punctuation}
#{class_method ? 'end' : ''}
RUBY
end
def transaction_with_replica_off(*args, &block)
if on_replica_by_default?
begin
old_val = Thread.current[:_active_record_shards_in_tx]
Thread.current[:_active_record_shards_in_tx] = true
transaction_without_replica_off(*args, &block)
ensure
Thread.current[:_active_record_shards_in_tx] = old_val
end
else
transaction_without_replica_off(*args, &block)
end
end
ruby2_keywords(:transaction_with_replica_off) if respond_to?(:ruby2_keywords, true)
module InstanceMethods
def on_replica_unless_tx
self.class.on_replica_unless_tx { yield }
end
end
CLASS_REPLICA_METHODS = [
:calculate,
:count_by_sql,
:exists?,
:find,
:find_by,
:find_by_sql,
:find_every,
:find_one,
:find_some,
:get_primary_key
].freeze
CLASS_FORCE_REPLICA_METHODS = [
:replace_bind_variable,
:replace_bind_variables,
:sanitize_sql_array,
:sanitize_sql_hash_for_assignment,
:table_exists?
].freeze
CLASS_SLAVE_METHODS = CLASS_REPLICA_METHODS
CLASS_FORCE_SLAVE_METHODS = CLASS_FORCE_REPLICA_METHODS
def self.extended(base)
CLASS_REPLICA_METHODS.each { |m| ActiveRecordShards::DefaultReplicaPatches.wrap_method_in_on_replica(true, base, m) }
CLASS_FORCE_REPLICA_METHODS.each { |m| ActiveRecordShards::DefaultReplicaPatches.wrap_method_in_on_replica(true, base, m, force_on_replica: true) }
ActiveRecordShards::DefaultReplicaPatches.wrap_method_in_on_replica(true, base, :load_schema!, force_on_replica: true)
ActiveRecordShards::DefaultReplicaPatches.wrap_method_in_on_replica(false, base, :reload)
base.class_eval do
include InstanceMethods
class << self
alias_method :transaction_without_replica_off, :transaction
alias_method :transaction, :transaction_with_replica_off
end
end
end
def on_replica_unless_tx(&block)
return yield if Thread.current[:_active_record_shards_in_migration]
return yield if Thread.current[:_active_record_shards_in_tx]
if on_replica_by_default?
on_replica(&block)
else
yield
end
end
def force_on_replica(&block)
return yield if Thread.current[:_active_record_shards_in_migration]
on_cx_switch_block(:replica, construct_ro_scope: false, force: true, &block)
end
module ActiveRelationPatches
def self.included(base)
[:calculate, :exists?, :pluck, :load].each do |m|
ActiveRecordShards::DefaultReplicaPatches.wrap_method_in_on_replica(false, base, m)
end
ActiveRecordShards::DefaultReplicaPatches.wrap_method_in_on_replica(false, base, :to_sql, force_on_replica: true)
end
def on_replica_unless_tx
@klass.on_replica_unless_tx { yield }
end
end
module Rails52RelationPatches
def connection
return super if Thread.current[:_active_record_shards_in_migration]
return super if Thread.current[:_active_record_shards_in_tx]
if @klass.on_replica_by_default?
@klass.on_replica.connection
else
super
end
end
end
# in rails 4.1+, they create a join class that's used to pull in records for HABTM.
# this simplifies the hell out of our existence, because all we have to do is inerit on-replica-by-default
# down from the parent now.
module Rails41HasAndBelongsToManyBuilderExtension
def self.included(base)
base.class_eval do
alias_method :through_model_without_inherit_default_replica_from_lhs, :through_model
alias_method :through_model, :through_model_with_inherit_default_replica_from_lhs
end
end
def through_model_with_inherit_default_replica_from_lhs
model = through_model_without_inherit_default_replica_from_lhs
def model.on_replica_by_default?
left_reflection.klass.on_replica_by_default?
end
# also transfer the sharded-ness of the left table to the join model
model.not_sharded unless model.left_reflection.klass.is_sharded?
model
end
end
module AssociationsAssociationAssociationScopePatch
def association_scope
if klass
on_replica_unless_tx { super }
else
super
end
end
def on_replica_unless_tx
klass.on_replica_unless_tx { yield }
end
end
module AssociationsAssociationFindTargetPatch
def find_target
if klass
on_replica_unless_tx { super }
else
super
end
end
def on_replica_unless_tx
klass.on_replica_unless_tx { yield }
end
end
module AssociationsAssociationGetRecordsPatch
def get_records # rubocop:disable Naming/AccessorMethodName
if klass
on_replica_unless_tx { super }
else
super
end
end
def on_replica_unless_tx
klass.on_replica_unless_tx { yield }
end
end
module AssociationsPreloaderAssociationAssociatedRecordsByOwnerPatch
def associated_records_by_owner(preloader)
if klass
on_replica_unless_tx { super }
else
super
end
end
def on_replica_unless_tx
klass.on_replica_unless_tx { yield }
end
end
module AssociationsPreloaderAssociationLoadRecordsPatch
def load_records
if klass
on_replica_unless_tx { super }
else
super
end
end
def on_replica_unless_tx
klass.on_replica_unless_tx { yield }
end
end
module TypeCasterConnectionConnectionPatch
def connection
return super if Thread.current[:_active_record_shards_in_migration]
return super if Thread.current[:_active_record_shards_in_tx]
if @klass.on_replica_by_default?
@klass.on_replica.connection
else
super
end
end
end
module SchemaDefinePatch
def define(info, &block)
old_val = Thread.current[:_active_record_shards_in_migration]
Thread.current[:_active_record_shards_in_migration] = true
super
ensure
Thread.current[:_active_record_shards_in_migration] = old_val
end
end
end
end
Remove two unused constants
Seems we forgot to remove these in 59976084d756e13cba7ed94cd57b2eb55aa6.
# frozen_string_literal: true
module ActiveRecordShards
module DefaultReplicaPatches
def self.wrap_method_in_on_replica(class_method, base, method, force_on_replica: false)
base_methods =
if class_method
base.methods + base.private_methods
else
base.instance_methods + base.private_instance_methods
end
return unless base_methods.include?(method)
_, method, punctuation = method.to_s.match(/^(.*?)([\?\!]?)$/).to_a
# _ALWAYS_ on replica, or only for on `on_replica_by_default = true` models?
wrapper = force_on_replica ? 'force_on_replica' : 'on_replica_unless_tx'
base.class_eval <<-RUBY, __FILE__, __LINE__ + 1
#{class_method ? 'class << self' : ''}
def #{method}_with_default_replica#{punctuation}(*args, &block)
#{wrapper} do
#{method}_without_default_replica#{punctuation}(*args, &block)
end
end
ruby2_keywords(:#{method}_with_default_replica#{punctuation}) if respond_to?(:ruby2_keywords, true)
alias_method :#{method}_without_default_replica#{punctuation}, :#{method}#{punctuation}
alias_method :#{method}#{punctuation}, :#{method}_with_default_replica#{punctuation}
#{class_method ? 'end' : ''}
RUBY
end
def transaction_with_replica_off(*args, &block)
if on_replica_by_default?
begin
old_val = Thread.current[:_active_record_shards_in_tx]
Thread.current[:_active_record_shards_in_tx] = true
transaction_without_replica_off(*args, &block)
ensure
Thread.current[:_active_record_shards_in_tx] = old_val
end
else
transaction_without_replica_off(*args, &block)
end
end
ruby2_keywords(:transaction_with_replica_off) if respond_to?(:ruby2_keywords, true)
module InstanceMethods
def on_replica_unless_tx
self.class.on_replica_unless_tx { yield }
end
end
CLASS_REPLICA_METHODS = [
:calculate,
:count_by_sql,
:exists?,
:find,
:find_by,
:find_by_sql,
:find_every,
:find_one,
:find_some,
:get_primary_key
].freeze
CLASS_FORCE_REPLICA_METHODS = [
:replace_bind_variable,
:replace_bind_variables,
:sanitize_sql_array,
:sanitize_sql_hash_for_assignment,
:table_exists?
].freeze
def self.extended(base)
CLASS_REPLICA_METHODS.each { |m| ActiveRecordShards::DefaultReplicaPatches.wrap_method_in_on_replica(true, base, m) }
CLASS_FORCE_REPLICA_METHODS.each { |m| ActiveRecordShards::DefaultReplicaPatches.wrap_method_in_on_replica(true, base, m, force_on_replica: true) }
ActiveRecordShards::DefaultReplicaPatches.wrap_method_in_on_replica(true, base, :load_schema!, force_on_replica: true)
ActiveRecordShards::DefaultReplicaPatches.wrap_method_in_on_replica(false, base, :reload)
base.class_eval do
include InstanceMethods
class << self
alias_method :transaction_without_replica_off, :transaction
alias_method :transaction, :transaction_with_replica_off
end
end
end
def on_replica_unless_tx(&block)
return yield if Thread.current[:_active_record_shards_in_migration]
return yield if Thread.current[:_active_record_shards_in_tx]
if on_replica_by_default?
on_replica(&block)
else
yield
end
end
def force_on_replica(&block)
return yield if Thread.current[:_active_record_shards_in_migration]
on_cx_switch_block(:replica, construct_ro_scope: false, force: true, &block)
end
module ActiveRelationPatches
def self.included(base)
[:calculate, :exists?, :pluck, :load].each do |m|
ActiveRecordShards::DefaultReplicaPatches.wrap_method_in_on_replica(false, base, m)
end
ActiveRecordShards::DefaultReplicaPatches.wrap_method_in_on_replica(false, base, :to_sql, force_on_replica: true)
end
def on_replica_unless_tx
@klass.on_replica_unless_tx { yield }
end
end
module Rails52RelationPatches
def connection
return super if Thread.current[:_active_record_shards_in_migration]
return super if Thread.current[:_active_record_shards_in_tx]
if @klass.on_replica_by_default?
@klass.on_replica.connection
else
super
end
end
end
# in rails 4.1+, they create a join class that's used to pull in records for HABTM.
# this simplifies the hell out of our existence, because all we have to do is inerit on-replica-by-default
# down from the parent now.
module Rails41HasAndBelongsToManyBuilderExtension
def self.included(base)
base.class_eval do
alias_method :through_model_without_inherit_default_replica_from_lhs, :through_model
alias_method :through_model, :through_model_with_inherit_default_replica_from_lhs
end
end
def through_model_with_inherit_default_replica_from_lhs
model = through_model_without_inherit_default_replica_from_lhs
def model.on_replica_by_default?
left_reflection.klass.on_replica_by_default?
end
# also transfer the sharded-ness of the left table to the join model
model.not_sharded unless model.left_reflection.klass.is_sharded?
model
end
end
module AssociationsAssociationAssociationScopePatch
def association_scope
if klass
on_replica_unless_tx { super }
else
super
end
end
def on_replica_unless_tx
klass.on_replica_unless_tx { yield }
end
end
module AssociationsAssociationFindTargetPatch
def find_target
if klass
on_replica_unless_tx { super }
else
super
end
end
def on_replica_unless_tx
klass.on_replica_unless_tx { yield }
end
end
module AssociationsAssociationGetRecordsPatch
def get_records # rubocop:disable Naming/AccessorMethodName
if klass
on_replica_unless_tx { super }
else
super
end
end
def on_replica_unless_tx
klass.on_replica_unless_tx { yield }
end
end
module AssociationsPreloaderAssociationAssociatedRecordsByOwnerPatch
def associated_records_by_owner(preloader)
if klass
on_replica_unless_tx { super }
else
super
end
end
def on_replica_unless_tx
klass.on_replica_unless_tx { yield }
end
end
module AssociationsPreloaderAssociationLoadRecordsPatch
def load_records
if klass
on_replica_unless_tx { super }
else
super
end
end
def on_replica_unless_tx
klass.on_replica_unless_tx { yield }
end
end
module TypeCasterConnectionConnectionPatch
def connection
return super if Thread.current[:_active_record_shards_in_migration]
return super if Thread.current[:_active_record_shards_in_tx]
if @klass.on_replica_by_default?
@klass.on_replica.connection
else
super
end
end
end
module SchemaDefinePatch
def define(info, &block)
old_val = Thread.current[:_active_record_shards_in_migration]
Thread.current[:_active_record_shards_in_migration] = true
super
ensure
Thread.current[:_active_record_shards_in_migration] = old_val
end
end
end
end
|
#
# Copyright (C) 2016 - present Instructure, Inc.
#
# This file is part of Canvas.
#
# Canvas is free software: you can redistribute it and/or modify it under
# the terms of the GNU Affero General Public License as published by the Free
# Software Foundation, version 3 of the License.
#
# Canvas is distributed in the hope that it will be useful, but WITHOUT ANY
# WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR
# A PARTICULAR PURPOSE. See the GNU Affero General Public License for more
# details.
#
# You should have received a copy of the GNU Affero General Public License along
# with this program. If not, see <http://www.gnu.org/licenses/>.
module Canvas::Migration::ExternalContent
class Translator
attr_reader :content_migration, :content_export
def initialize(content_migration: nil, content_export: nil)
@content_migration = content_migration
@content_export = content_export
end
# recursively searches for keys matching our special format -
# $canvas_TYPE_id
# e.g. $canvas_assignment_id
# this indicates that they are originally ids for objects of type TYPE
# and we'll translate them into migration ids for export and translate back into the new ids for import
# translate_type is either :export or :import
def translate_data(data, translate_type)
case data
when Array
data.each{|item| translate_data(item, translate_type)}
when Hash
data.each do |key, item|
if item.is_a?(Hash) || item.is_a?(Array)
translate_data(item, translate_type)
elsif obj_class = object_class_for_translation(key)
data[key] =
case translate_type
when :export
get_migration_id_from_canvas_id(obj_class, item)
when :import
get_canvas_id_from_migration_id(obj_class, item)
end
end
end
end
data
end
# probably not a comprehensive list
TYPES_TO_CLASSES = {
"announcement" => Announcement,
"assessment_question_bank" => AssessmentQuestionBank,
"assignment" => Assignment,
"assignment_group" => AssignmentGroup,
"attachment" => Attachment,
"calendar_event" => CalendarEvent,
"context_external_tool" => ContextExternalTool,
"context_module" => ContextModule,
"context_module_tag" => ContentTag,
"discussion_topic" => DiscussionTopic,
"grading_standard" => GradingStandard,
"learning_outcome" => LearningOutcome,
"quiz" => Quizzes::Quiz,
"rubric" => Rubric,
"wiki_page" => WikiPage
}.freeze
CLASSES_TO_TYPES = TYPES_TO_CLASSES.invert.freeze
ALIASED_TYPES = {
'context_module_item' => 'context_module_tag',
'file' => 'attachment',
'page' => 'wiki_page'
}
def object_class_for_translation(key)
if match = key.to_s.match(/^\$canvas_(\w+)_id$/)
type = match[1]
TYPES_TO_CLASSES[ALIASED_TYPES[type] || type]
end
end
def get_migration_id_from_canvas_id(obj_class, canvas_id)
if content_export&.for_master_migration?
obj = obj_class.find(canvas_id)
content_export.create_key(obj)
else
CC::CCHelper.create_key("#{obj_class.reflection_type_name}_#{canvas_id}")
end
end
NOT_FOUND = "$OBJECT_NOT_FOUND"
def get_canvas_id_from_migration_id(obj_class, migration_id)
if item = content_migration.find_imported_migration_item(obj_class, migration_id)
return item.id
end
# most of the time, the new canvas object have been imported with the current import
# but it may have been imported earlier as a selective import
# so we can search for it in the course just to be sure
obj_type = TYPES_TO_CLASSES.detect{|k, v| v == obj_class}.first
if item = content_migration.context.send(obj_type.pluralize).where(:migration_id => migration_id).first
return item.id
end
NOT_FOUND
end
end
end
handle missing content on blueprint external export
refs #ADMIN-1474
Change-Id: Ic170a90768819c5419e544e8dc7c4b7497d5b849
Reviewed-on: https://gerrit.instructure.com/165517
Tested-by: Jenkins
Reviewed-by: Jeremy Stanley <b3f594e10a9edcf5413cf1190121d45078c62290@instructure.com>
Product-Review: James Williams <3a5f4ed991dbfa5c9ef76ad729d240ca12f62180@instructure.com>
QA-Review: James Williams <3a5f4ed991dbfa5c9ef76ad729d240ca12f62180@instructure.com>
#
# Copyright (C) 2016 - present Instructure, Inc.
#
# This file is part of Canvas.
#
# Canvas is free software: you can redistribute it and/or modify it under
# the terms of the GNU Affero General Public License as published by the Free
# Software Foundation, version 3 of the License.
#
# Canvas is distributed in the hope that it will be useful, but WITHOUT ANY
# WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR
# A PARTICULAR PURPOSE. See the GNU Affero General Public License for more
# details.
#
# You should have received a copy of the GNU Affero General Public License along
# with this program. If not, see <http://www.gnu.org/licenses/>.
module Canvas::Migration::ExternalContent
class Translator
attr_reader :content_migration, :content_export
def initialize(content_migration: nil, content_export: nil)
@content_migration = content_migration
@content_export = content_export
end
# recursively searches for keys matching our special format -
# $canvas_TYPE_id
# e.g. $canvas_assignment_id
# this indicates that they are originally ids for objects of type TYPE
# and we'll translate them into migration ids for export and translate back into the new ids for import
# translate_type is either :export or :import
def translate_data(data, translate_type)
case data
when Array
data.each{|item| translate_data(item, translate_type)}
when Hash
data.each do |key, item|
if item.is_a?(Hash) || item.is_a?(Array)
translate_data(item, translate_type)
elsif obj_class = object_class_for_translation(key)
data[key] =
case translate_type
when :export
get_migration_id_from_canvas_id(obj_class, item)
when :import
get_canvas_id_from_migration_id(obj_class, item)
end
end
end
end
data
end
# probably not a comprehensive list
TYPES_TO_CLASSES = {
"announcement" => Announcement,
"assessment_question_bank" => AssessmentQuestionBank,
"assignment" => Assignment,
"assignment_group" => AssignmentGroup,
"attachment" => Attachment,
"calendar_event" => CalendarEvent,
"context_external_tool" => ContextExternalTool,
"context_module" => ContextModule,
"context_module_tag" => ContentTag,
"discussion_topic" => DiscussionTopic,
"grading_standard" => GradingStandard,
"learning_outcome" => LearningOutcome,
"quiz" => Quizzes::Quiz,
"rubric" => Rubric,
"wiki_page" => WikiPage
}.freeze
CLASSES_TO_TYPES = TYPES_TO_CLASSES.invert.freeze
ALIASED_TYPES = {
'context_module_item' => 'context_module_tag',
'file' => 'attachment',
'page' => 'wiki_page'
}
def object_class_for_translation(key)
if match = key.to_s.match(/^\$canvas_(\w+)_id$/)
type = match[1]
TYPES_TO_CLASSES[ALIASED_TYPES[type] || type]
end
end
def get_migration_id_from_canvas_id(obj_class, canvas_id)
if content_export&.for_master_migration?
obj = obj_class.where(obj_class.primary_key => canvas_id).first
obj ? content_export.create_key(obj) : NOT_FOUND
else
CC::CCHelper.create_key("#{obj_class.reflection_type_name}_#{canvas_id}")
end
end
NOT_FOUND = "$OBJECT_NOT_FOUND"
def get_canvas_id_from_migration_id(obj_class, migration_id)
return NOT_FOUND if migration_id == NOT_FOUND
if item = content_migration.find_imported_migration_item(obj_class, migration_id)
return item.id
end
# most of the time, the new canvas object have been imported with the current import
# but it may have been imported earlier as a selective import
# so we can search for it in the course just to be sure
obj_type = TYPES_TO_CLASSES.detect{|k, v| v == obj_class}.first
if item = content_migration.context.send(obj_type.pluralize).where(:migration_id => migration_id).first
return item.id
end
NOT_FOUND
end
end
end
|
require 'cms_scanner/finders/finder/smart_url_checker/findings'
module CMSScanner
module Finders
class Finder
# Smart URL Checker
module SmartURLChecker
# @param [ Array<String> ] urls
# @param [ Hash ] opts
#
# @return []
def process_urls(_urls, _opts = {})
fail NotImplementedError
end
# @param [ Hash ] opts
#
# @return [ Array<Finding> ]
def passive(opts = {})
process_urls(passive_urls(opts), opts)
end
# @param [ Hash ] opts
#
# @return [ Array<String> ]
def passive_urls(_opts = {})
target.in_scope_urls(NS::Browser.get_and_follow_location(target.url), passive_urls_xpath)
end
# @return [ String ]
def passive_urls_xpath
fail NotImplementedError
end
# @param [ Hash ] opts
#
# @return [ Array<Finding> ]
def aggressive(opts = {})
# To avoid scanning the same twice
urls = aggressive_urls(opts)
urls -= passive_urls(opts) if opts[:mode] == :mixed
process_urls(urls, opts)
end
# @param [ Hash ] opts
#
# @return [ Array<String> ]
def aggressive_urls(_opts = {})
fail NotImplementedError
end
end
end
end
end
Updates the SmartURLChecker module to use a simple get instead of following location(s) - Ref #35
require 'cms_scanner/finders/finder/smart_url_checker/findings'
module CMSScanner
module Finders
class Finder
# Smart URL Checker
module SmartURLChecker
# @param [ Array<String> ] urls
# @param [ Hash ] opts
#
# @return []
def process_urls(_urls, _opts = {})
fail NotImplementedError
end
# @param [ Hash ] opts
#
# @return [ Array<Finding> ]
def passive(opts = {})
process_urls(passive_urls(opts), opts)
end
# @param [ Hash ] opts
#
# @return [ Array<String> ]
def passive_urls(_opts = {})
target.in_scope_urls(NS::Browser.get(target.url), passive_urls_xpath)
end
# @return [ String ]
def passive_urls_xpath
fail NotImplementedError
end
# @param [ Hash ] opts
#
# @return [ Array<Finding> ]
def aggressive(opts = {})
# To avoid scanning the same twice
urls = aggressive_urls(opts)
urls -= passive_urls(opts) if opts[:mode] == :mixed
process_urls(urls, opts)
end
# @param [ Hash ] opts
#
# @return [ Array<String> ]
def aggressive_urls(_opts = {})
fail NotImplementedError
end
end
end
end
end
|
Pod::Spec.new do |s|
s.name = "UIColor_Hex_Swift"
s.version = "5.0.0"
s.summary = "Convenience method for creating autoreleased color using RGBA hex string."
s.homepage = "https://github.com/yeahdongcn/UIColor-Hex-Swift"
s.license = { :type => 'MIT', :file => 'LICENSE' }
s.author = { "R0CKSTAR" => "yeahdongcn@gmail.com" }
s.ios.deployment_target = "8.0"
s.tvos.deployment_target = "9.0"
s.watchos.deployment_target = "2.1"
s.osx.deployment_target = "10.8"
s.source = { :git => 'https://github.com/yeahdongcn/UIColor-Hex-Swift.git', :tag => "#{s.version}" }
s.source_files = 'HEXColor/*.{h,swift}'
s.frameworks = ['UIKit']
s.requires_arc = true
s.swift_versions = ['5.0']
end
Swift requires a minimum deployment target of OS X 10.9
Pod::Spec.new do |s|
s.name = "UIColor_Hex_Swift"
s.version = "5.0.0"
s.summary = "Convenience method for creating autoreleased color using RGBA hex string."
s.homepage = "https://github.com/yeahdongcn/UIColor-Hex-Swift"
s.license = { :type => 'MIT', :file => 'LICENSE' }
s.author = { "R0CKSTAR" => "yeahdongcn@gmail.com" }
s.ios.deployment_target = "8.0"
s.tvos.deployment_target = "9.0"
s.watchos.deployment_target = "2.1"
s.osx.deployment_target = "10.9"
s.source = { :git => 'https://github.com/yeahdongcn/UIColor-Hex-Swift.git', :tag => "#{s.version}" }
s.source_files = 'HEXColor/*.{h,swift}'
s.frameworks = ['UIKit']
s.requires_arc = true
s.swift_versions = ['5.0']
end
|
Pod::Spec.new do |s|
s.name = "MBVineVideo"
s.version = "0.1.0"
s.summary = "Extracts video url and information from Vine"
s.homepage = "https://github.com/inket/MBVineVideo"
s.license = 'MIT'
s.author = { "inket" => "injekter@gmail.com" }
s.source = { :git => "https://github.com/inket/MBVineVideo.git", :tag => s.version.to_s }
s.social_media_url = 'https://twitter.com/inket'
s.platform = :ios, '7.0'
s.requires_arc = true
s.source_files = 'Pod/Classes/**/*'
s.public_header_files = 'Pod/Classes/**/*.h'
s.dependency 'Ono', '~> 1.0'
end
Bump pod version to 1.0.0
Pod::Spec.new do |s|
s.name = "MBVineVideo"
s.version = "1.0.0"
s.summary = "Extracts video url and information from Vine"
s.homepage = "https://github.com/inket/MBVineVideo"
s.license = 'MIT'
s.author = { "inket" => "injekter@gmail.com" }
s.source = { :git => "https://github.com/inket/MBVineVideo.git", :tag => s.version.to_s }
s.social_media_url = 'https://twitter.com/inket'
s.platform = :ios, '7.0'
s.requires_arc = true
s.source_files = 'Pod/Classes/**/*'
s.public_header_files = 'Pod/Classes/**/*.h'
s.dependency 'Ono', '~> 1.0'
end
|
=begin
Copyright (C) 2013 Keisuke Nishida
Licensed to the Apache Software Foundation (ASF) under one
or more contributor license agreements. See the NOTICE file
distributed with this work for additional information
regarding copyright ownership. The ASF licenses this file
to you under the Apache License, Version 2.0 (the
"License"); you may not use this file except in compliance
with the License. You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing,
software distributed under the License is distributed on an
"AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
KIND, either express or implied. See the License for the
specific language governing permissions and limitations
under the License.
=end
module Fluent
class SplunkHTTPEventcollectorOutput < BufferedOutput
Plugin.register_output('splunk-http-eventcollector', self)
config_param :server, :string, :default => 'localhost:8088'
config_param :verify, :bool, :default => true
config_param :token, :string, :default => nil
# Event parameters
config_param :host, :string, :default => nil # TODO: auto-detect
config_param :index, :string, :default => nil
config_param :source, :string, :default => '{TAG}'
config_param :sourcetype, :string, :default => '_json'
config_param :post_retry_max, :integer, :default => 5
config_param :post_retry_interval, :integer, :default => 5
def initialize
super
require 'net/http/persistent'
require 'time'
@idx_indexers = 0
@indexers = []
end
def configure(conf)
super
case @source
when '{TAG}'
@source_formatter = lambda { |tag| tag }
else
@source_formatter = lambda { |tag| @source.sub('{TAG}', tag) }
end
@time_formatter = lambda { |time| time.to_s }
@formatter = lambda { |record| record.to_json }
if @server.match(/,/)
@indexers = @server.split(',')
else
@indexers = [@server]
end
end
def start
super
@http = Net::HTTP::Persistent.new 'fluent-plugin-splunk-http-eventcollector'
@http.verify_mode = OpenSSL::SSL::VERIFY_NONE unless @verify
@http.headers['Content-Type'] = 'text/plain' #XXX Might need to change this to application/json
$log.debug "initialized for splunk-http-eventcollector"
end
def shutdown
# NOTE: call super before @http.shutdown because super may flush final output
super
@http.shutdown
$log.debug "shutdown from splunk-http-eventcollector"
end
def format(tag, time, record)
if @time_formatter
time_str = "#{@time_formatter.call(time)}: "
else
time_str = ''
end
#record.delete('time')
event = "#{time_str}#{@formatter.call(record)}\n"
[tag, event].to_msgpack
end
def chunk_to_buffers(chunk)
buffers = {}
chunk.msgpack_each do |tag, event|
(buffers[@source_formatter.call(tag)] ||= []) << event
end
return buffers
end
def write(chunk)
chunk_to_buffers(chunk).each do |source, messages|
uri = URI get_baseurl
post = Net::HTTP::Post.new uri.request_uri
post['Authorization'] = "Splunk #{token}"
post.body = messages.join('')
$log.debug "POST #{uri}"
# retry up to :post_retry_max times
1.upto(@post_retry_max) do |c|
response = @http.request uri, post
$log.debug "=>(#{c}/#{@post_retry_max} #{response.code} (#{response.message})"
if response.code == "200"
# success
break
elsif response.code.match(/^40/)
# user error
$log.error "#{uri}: #{response.code} (#{response.message})\n#{response.body}"
break
elsif c < @post_retry_max
# retry
$log.debug "#{uri}: Retrying..."
sleep @post_retry_interval
next
else
# other errors. fluentd will retry processing on exception
# FIXME: this may duplicate logs when using multiple buffers
raise "#{uri}: #{response.message}"
end
end
end
end
def get_baseurl
base_url = ''
server = @indexers[@idx_indexers];
@idx_indexers = (@idx_indexers + 1) % @indexers.length
base_url = "https://#{server}/services/collectors"
base_url
end
end
end
Set User-Agent and Content-Type request headers
=begin
Copyright (C) 2013 Keisuke Nishida
Licensed to the Apache Software Foundation (ASF) under one
or more contributor license agreements. See the NOTICE file
distributed with this work for additional information
regarding copyright ownership. The ASF licenses this file
to you under the Apache License, Version 2.0 (the
"License"); you may not use this file except in compliance
with the License. You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing,
software distributed under the License is distributed on an
"AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
KIND, either express or implied. See the License for the
specific language governing permissions and limitations
under the License.
=end
module Fluent
class SplunkHTTPEventcollectorOutput < BufferedOutput
Plugin.register_output('splunk-http-eventcollector', self)
config_param :server, :string, :default => 'localhost:8088'
config_param :verify, :bool, :default => true
config_param :token, :string, :default => nil
# Event parameters
config_param :host, :string, :default => nil # TODO: auto-detect
config_param :index, :string, :default => nil
config_param :source, :string, :default => '{TAG}'
config_param :sourcetype, :string, :default => '_json'
config_param :post_retry_max, :integer, :default => 5
config_param :post_retry_interval, :integer, :default => 5
def initialize
super
require 'net/http/persistent'
require 'time'
@idx_indexers = 0
@indexers = []
end
def configure(conf)
super
case @source
when '{TAG}'
@source_formatter = lambda { |tag| tag }
else
@source_formatter = lambda { |tag| @source.sub('{TAG}', tag) }
end
@time_formatter = lambda { |time| time.to_s }
@formatter = lambda { |record| record.to_json }
if @server.match(/,/)
@indexers = @server.split(',')
else
@indexers = [@server]
end
end
def start
super
@http = Net::HTTP::Persistent.new 'fluent-plugin-splunk-http-eventcollector'
@http.verify_mode = OpenSSL::SSL::VERIFY_NONE unless @verify
@http.override_headers['Content-Type'] = 'application/json'
@http.override_headers['User-Agent'] = 'fluent-plugin-splunk-http-eventcollector/0.0.1'
$log.debug "initialized for splunk-http-eventcollector"
end
def shutdown
# NOTE: call super before @http.shutdown because super may flush final output
super
@http.shutdown
$log.debug "shutdown from splunk-http-eventcollector"
end
def format(tag, time, record)
if @time_formatter
time_str = "#{@time_formatter.call(time)}: "
else
time_str = ''
end
#record.delete('time')
event = "#{time_str}#{@formatter.call(record)}\n"
[tag, event].to_msgpack
end
def chunk_to_buffers(chunk)
buffers = {}
chunk.msgpack_each do |tag, event|
(buffers[@source_formatter.call(tag)] ||= []) << event
end
return buffers
end
def write(chunk)
chunk_to_buffers(chunk).each do |source, messages|
uri = URI get_baseurl
post = Net::HTTP::Post.new uri.request_uri
post['Authorization'] = "Splunk #{token}"
post.body = messages.join('')
$log.debug "POST #{uri}"
# retry up to :post_retry_max times
1.upto(@post_retry_max) do |c|
response = @http.request uri, post
$log.debug "=>(#{c}/#{@post_retry_max} #{response.code} (#{response.message})"
if response.code == "200"
# success
break
elsif response.code.match(/^40/)
# user error
$log.error "#{uri}: #{response.code} (#{response.message})\n#{response.body}"
break
elsif c < @post_retry_max
# retry
$log.debug "#{uri}: Retrying..."
sleep @post_retry_interval
next
else
# other errors. fluentd will retry processing on exception
# FIXME: this may duplicate logs when using multiple buffers
raise "#{uri}: #{response.message}"
end
end
end
end
def get_baseurl
base_url = ''
server = @indexers[@idx_indexers];
@idx_indexers = (@idx_indexers + 1) % @indexers.length
base_url = "https://#{server}/services/collectors"
base_url
end
end
end
|
[cloudstack] add disk_offerings collection
require 'fog/core/collection'
require 'fog/cloudstack/models/compute/disk_offering'
module Fog
module Compute
class Cloudstack
class DiskOfferings < Fog::Collection
model Fog::Compute::Cloudstack::DiskOffering
def all(options = {})
response = service.list_disk_offerings(options)
disk_offerings_data = response["listdiskofferingsresponse"]["diskoffering"] || []
load(disk_offerings_data)
end
def get(disk_offering_id)
response = service.list_disk_offerings('id' => disk_offering_id)
disk_offering_data = response["listdiskofferingsresponse"]["diskoffering"].first
new(disk_offering_data)
end
end
end
end
end
|
# Use this setup block to configure all options available in SimpleForm.
SimpleForm.setup do |config|
# Components used by the form builder to generate a complete input. You can remove
# any of them, change the order, or even add your own components to the stack.
# config.components = [ :placeholder, :label_input, :hint, :error ]
# Default tag used on hints.
# config.hint_tag = :span
# CSS class to add to all hint tags.
# config.hint_class = :hint
# CSS class used on errors.
# config.error_class = :error
# Default tag used on errors.
# config.error_tag = :span
# Method used to tidy up errors.
# config.error_method = :first
# Default tag used for error notification helper.
# config.error_notification_tag = :p
# CSS class to add for error notification helper.
# config.error_notification_class = :error_notification
# ID to add for error notification helper.
# config.error_notification_id = nil
# You can wrap all inputs in a pre-defined tag.
# config.wrapper_tag = :div
# CSS class to add to all wrapper tags.
# config.wrapper_class = :input
# CSS class to add to the wrapper if the field has errors.
# config.wrapper_error_class = :field_with_errors
# You can wrap a collection of radio/check boxes in a pre-defined tag, defaulting to none.
# config.collection_wrapper_tag = nil
# You can wrap each item in a collection of radio/check boxes with a tag, defaulting to span.
# config.item_wrapper_tag = :span
# Series of attemps to detect a default label method for collection.
# config.collection_label_methods = [ :to_label, :name, :title, :to_s ]
# Series of attemps to detect a default value method for collection.
# config.collection_value_methods = [ :id, :to_s ]
# How the label text should be generated altogether with the required text.
# config.label_text = lambda { |label, required| "#{required} #{label}" }
# You can define the class to use on all labels. Default is nil.
# config.label_class = nil
# You can define the class to use on all forms. Default is simple_form.
# config.form_class = :simple_form
# Whether attributes are required by default (or not). Default is true.
# config.required_by_default = true
# Tell browsers whether to use default HTML5 validations (novalidate option).
# Default is enabled.
# config.browser_validations = true
# Determines whether HTML5 types (:email, :url, :search, :tel) and attributes
# (e.g. required) are used or not. True by default.
# Having this on in non-HTML5 compliant sites can cause odd behavior in
# HTML5-aware browsers such as Chrome.
# config.html5 = true
# Custom mappings for input types. This should be a hash containing a regexp
# to match as key, and the input type that will be used when the field name
# matches the regexp as value.
# config.input_mappings = { /count/ => :integer }
# Collection of methods to detect if a file type was given.
# config.file_methods = [ :mounted_as, :file?, :public_filename ]
# Default priority for time_zone inputs.
# config.time_zone_priority = nil
# Default priority for country inputs.
# config.country_priority = nil
# Default size for text inputs.
# config.default_input_size = 50
# When false, do not use translations for labels, hints or placeholders.
# config.translate = true
end
Spelling
# Use this setup block to configure all options available in SimpleForm.
SimpleForm.setup do |config|
# Components used by the form builder to generate a complete input. You can remove
# any of them, change the order, or even add your own components to the stack.
# config.components = [ :placeholder, :label_input, :hint, :error ]
# Default tag used on hints.
# config.hint_tag = :span
# CSS class to add to all hint tags.
# config.hint_class = :hint
# CSS class used on errors.
# config.error_class = :error
# Default tag used on errors.
# config.error_tag = :span
# Method used to tidy up errors.
# config.error_method = :first
# Default tag used for error notification helper.
# config.error_notification_tag = :p
# CSS class to add for error notification helper.
# config.error_notification_class = :error_notification
# ID to add for error notification helper.
# config.error_notification_id = nil
# You can wrap all inputs in a pre-defined tag.
# config.wrapper_tag = :div
# CSS class to add to all wrapper tags.
# config.wrapper_class = :input
# CSS class to add to the wrapper if the field has errors.
# config.wrapper_error_class = :field_with_errors
# You can wrap a collection of radio/check boxes in a pre-defined tag, defaulting to none.
# config.collection_wrapper_tag = nil
# You can wrap each item in a collection of radio/check boxes with a tag, defaulting to span.
# config.item_wrapper_tag = :span
# Series of attempts to detect a default label method for collection.
# config.collection_label_methods = [ :to_label, :name, :title, :to_s ]
# Series of attempts to detect a default value method for collection.
# config.collection_value_methods = [ :id, :to_s ]
# How the label text should be generated altogether with the required text.
# config.label_text = lambda { |label, required| "#{required} #{label}" }
# You can define the class to use on all labels. Default is nil.
# config.label_class = nil
# You can define the class to use on all forms. Default is simple_form.
# config.form_class = :simple_form
# Whether attributes are required by default (or not). Default is true.
# config.required_by_default = true
# Tell browsers whether to use default HTML5 validations (novalidate option).
# Default is enabled.
# config.browser_validations = true
# Determines whether HTML5 types (:email, :url, :search, :tel) and attributes
# (e.g. required) are used or not. True by default.
# Having this on in non-HTML5 compliant sites can cause odd behavior in
# HTML5-aware browsers such as Chrome.
# config.html5 = true
# Custom mappings for input types. This should be a hash containing a regexp
# to match as key, and the input type that will be used when the field name
# matches the regexp as value.
# config.input_mappings = { /count/ => :integer }
# Collection of methods to detect if a file type was given.
# config.file_methods = [ :mounted_as, :file?, :public_filename ]
# Default priority for time_zone inputs.
# config.time_zone_priority = nil
# Default priority for country inputs.
# config.country_priority = nil
# Default size for text inputs.
# config.default_input_size = 50
# When false, do not use translations for labels, hints or placeholders.
# config.translate = true
end
|
#!/usr/bin/env ruby
require 'rubygems'
require 'rubygems/dependency_installer'
require 'rubygems/uninstaller'
require 'rubygems/dependency'
require 'thor'
require 'fileutils'
require 'yaml'
module MerbThorHelper
private
# The current working directory, or Merb app root (--merb-root option).
def working_dir
@_working_dir ||= File.expand_path(options['merb-root'] || Dir.pwd)
end
# We should have a ./src dir for local and system-wide management.
def source_dir
@_source_dir ||= File.join(working_dir, 'src')
create_if_missing(@_source_dir)
@_source_dir
end
# If a local ./gems dir is found, it means we are in a Merb app.
def application?
gem_dir
end
# If a local ./gems dir is found, return it.
def gem_dir
if File.directory?(dir = File.join(working_dir, 'gems'))
dir
end
end
# If we're in a Merb app, we can have a ./bin directory;
# create it if it's not there.
def bin_dir
@_bin_dir ||= begin
if gem_dir
dir = File.join(working_dir, 'bin')
create_if_missing(dir)
dir
end
end
end
# Helper to create dir unless it exists.
def create_if_missing(path)
FileUtils.mkdir(path) unless File.exists?(path)
end
# Create a modified executable wrapper in the app's ./bin directory.
def ensure_local_bin_for(*gems)
if bin_dir && File.directory?(bin_dir)
gems.each do |gem|
if gemspec_path = Dir[File.join(gem_dir, 'specifications', "#{gem}-*.gemspec")].last
spec = Gem::Specification.load(gemspec_path)
spec.executables.each do |exec|
if File.exists?(executable = File.join(gem_dir, 'bin', exec))
local_executable = File.join(bin_dir, exec)
puts "Adding local executable #{local_executable}"
File.open(local_executable, 'w', 0755) do |f|
f.write(executable_wrapper(spec, exec))
end
end
end
end
end
end
end
def executable_wrapper(spec, bin_file_name)
<<-TEXT
#!/usr/bin/env #{RbConfig::CONFIG["ruby_install_name"]}
#
# This file was generated by merb.thor.
#
# The application '#{spec.name}' is installed as part of a gem, and
# this file is here to facilitate running it.
#
begin
require 'minigems'
rescue LoadError
require 'rubygems'
end
if File.directory?(gems_dir = File.join(File.dirname(__FILE__), '..', 'gems'))
$BUNDLE = true; Gem.clear_paths; Gem.path.unshift(gems_dir)
end
version = "#{Gem::Requirement.default}"
if ARGV.first =~ /^_(.*)_$/ and Gem::Version.correct? $1 then
version = $1
ARGV.shift
end
gem '#{spec.name}', version
load '#{bin_file_name}'
TEXT
end
end
# TODO
# - a task to figure out an app's dependencies
# - pulling a specific UUID/Tag (gitspec hash) with clone/update
# - a 'deploy' task (in addition to 'redeploy' ?)
# - eventually take a --orm option for the 'merb-stack' type of tasks
# - add merb:gems:refresh to refresh all gems (from specifications)
# - merb:gems:uninstall should remove local bin/ entries
class Merb < Thor
class SourcePathMissing < Exception
end
class GemPathMissing < Exception
end
class GemInstallError < Exception
end
class GemUninstallError < Exception
end
class Dependencies < Thor
include MerbThorHelper
# List all dependencies by extracting them from the actual application;
# will differentiate between locally available gems and system gems.
# Local gems will be shown with the installed gem version numbers.
desc 'list', 'List all application dependencies'
method_options "--merb-root" => :optional,
"--local" => :boolean,
"--system" => :boolean
def list
partitioned = { :local => [], :system => [] }
extract_dependencies.each do |dependency|
if gem_dir && !(versions = find_dependency_versions(dependency)).empty?
partitioned[:local] << "#{dependency} [#{versions.join(', ')}]"
else
partitioned[:system] << "#{dependency}"
end
end
none = options[:system].nil? && options[:local].nil?
if (options[:system] || none) && !partitioned[:system].empty?
puts "System dependencies:"
partitioned[:system].each { |str| puts "- #{str}" }
end
if (options[:local] || none) && !partitioned[:local].empty?
puts "Local dependencies:"
partitioned[:local].each { |str| puts "- #{str}" }
end
end
# Retrieve all application dependencies and store them in a local
# configuration file at ./config/dependencies.yml
#
# The format of this YAML file is as follows:
# - merb_helpers (>= 0, runtime)
# - merb-slices (> 0.9.4, runtime)
desc 'configure', 'Retrieve and store dependencies in ./config/dependencies.yml'
method_options "--merb-root" => :optional,
"--force" => :boolean
def configure
entries = extract_dependencies.map { |d| d.to_s }
FileUtils.mkdir_p(config_dir) unless File.directory?(config_dir)
config = YAML.dump(entries)
if File.exists?(config_file) && !options[:force]
puts "File already exists! Use --force to overwrite."
else
File.open(config_file, 'w') { |f| f.write config }
puts "Written #{config_file}:"
end
puts config
rescue
puts "Failed to write to #{config_file}"
end
# Install the gems listed in dependencies.yml from RubyForge (stable).
desc 'install', 'Install the gems listed in ./config/dependencies.yml'
method_options "--merb-root" => :optional
def install
if File.exists?(config_file)
dependencies = parse_dependencies_yaml(File.read(config_file))
gems = Gems.new
gems.options = options
dependencies.each do |dependency|
gems.install(dependency.name, dependency.version_requirements.to_s)
end
else
puts "No configuration file found at #{config_file}"
puts "Please run merb:dependencies:configure first."
end
end
protected
def config_dir
@_config_dir ||= File.join(working_dir, 'config')
end
def config_file
@_config_file ||= File.join(config_dir, 'dependencies.yml')
end
# Find local gems and return matched version numbers.
def find_dependency_versions(dependency)
versions = []
specs = Dir[File.join(gem_dir, 'specifications', "#{dependency.name}-*.gemspec")]
unless specs.empty?
specs.inject(versions) do |versions, gemspec_path|
versions << gemspec_path[/-([\d\.]+)\.gemspec$/, 1]
end
end
versions.sort.reverse
end
# Extract the runtime dependencies by starting the application in runner mode.
def extract_dependencies
FileUtils.cd(working_dir) do
cmd = ["require 'yaml';"]
cmd << "dependencies = Merb::BootLoader::Dependencies.dependencies"
cmd << "entries = dependencies.map { |d| d.to_s }"
cmd << "puts YAML.dump(entries)"
output = `merb -r "#{cmd.join("\n")}"`
if index = (lines = output.split(/\n/)).index('--- ')
yaml = lines.slice(index, lines.length - 1).join("\n")
return parse_dependencies_yaml(yaml)
end
end
return []
end
# Parse the basic YAML config data, and process Gem::Dependency output.
# Formatting example: merb_helpers (>= 0.9.8, runtime)
def parse_dependencies_yaml(yaml)
dependencies = []
entries = YAML.load(yaml) rescue []
entries.each do |entry|
if matches = entry.match(/^(\S+) \(([^,]+)?, ([^\)]+)\)/)
name, version_req, type = matches.captures
dependencies << Gem::Dependency.new(name, version_req, type.to_sym)
else
puts "Invalid entry: #{entry}"
end
end
dependencies
end
end
# Install a Merb stack from stable RubyForge gems. Optionally install a
# suitable Rack adapter/server when setting --adapter to one of the
# following: mongrel, emongrel, thin or ebb.
desc 'stable', 'Install extlib, merb-core and merb-more from rubygems'
method_options "--merb-root" => :optional,
"--adapter" => :optional
def stable
adapters = %w[mongrel emongrel thin ebb]
stable = Stable.new
stable.options = options
if stable.core && stable.more
puts "Installed extlib, merb-core and merb-more"
if options[:adapter] && adapters.include?(options[:adapter]) &&
stable.refresh_from_gems(options[:adapter])
puts "Installed #{options[:adapter]}"
elsif options[:adapter]
puts "Please specify one of the following adapters: #{adapters.join(' ')}"
end
end
end
class Stable < Thor
# The Stable tasks deal with known -stable- gems; available
# as shortcuts to Merb and DataMapper gems.
#
# These are pulled from rubyforge and installed into into the
# desired gems dir (either system-wide or into the application's
# gems directory).
include MerbThorHelper
# Gets latest gem versions from RubyForge and installs them.
#
# Examples:
#
# thor merb:edge:core
# thor merb:edge:core --merb-root ./path/to/your/app
# thor merb:edge:core --sources ./path/to/sources.yml
desc 'core', 'Install extlib and merb-core from rubygems'
method_options "--merb-root" => :optional
def core
refresh_from_gems 'extlib', 'merb-core'
ensure_local_bin_for('merb-core', 'rake', 'rspec', 'thor')
end
desc 'more', 'Install merb-more from rubygems'
method_options "--merb-root" => :optional
def more
refresh_from_gems 'merb-more'
ensure_local_bin_for('merb-gen')
end
desc 'plugins', 'Install merb-plugins from rubygems'
method_options "--merb-root" => :optional
def plugins
refresh_from_gems 'merb-plugins'
end
desc 'dm_core', 'Install dm-core from rubygems'
method_options "--merb-root" => :optional
def dm_core
refresh_from_gems 'extlib', 'dm-core'
end
desc 'dm_more', 'Install dm-more from rubygems'
method_options "--merb-root" => :optional
def dm_more
refresh_from_gems 'extlib', 'dm-core', 'dm-more'
end
# Pull from RubyForge and install.
def refresh_from_gems(*components)
gems = Gems.new
gems.options = options
components.all? { |name| gems.install(name) }
end
end
# Retrieve latest Merb versions from git and optionally install them.
#
# Note: the --sources option takes a path to a YAML file
# with a regular Hash mapping gem names to git urls.
#
# Examples:
#
# thor merb:edge
# thor merb:edge --install
# thor merb:edge --merb-root ./path/to/your/app
# thor merb:edge --sources ./path/to/sources.yml
desc 'edge', 'Install extlib, merb-core and merb-more from git HEAD'
method_options "--merb-root" => :optional,
"--sources" => :optional,
"--install" => :boolean
def edge
edge = Edge.new
edge.options = options
edge.core
edge.more
edge.custom
end
class Edge < Thor
# The Edge tasks deal with known gems from the bleeding edge; available
# as shortcuts to Merb and DataMapper gems.
#
# These are pulled from git and optionally installed into into the
# desired gems dir (either system-wide or into the application's
# gems directory).
include MerbThorHelper
# Gets latest gem versions from git - optionally installs them.
#
# Note: the --sources option takes a path to a YAML file
# with a regular Hash mapping gem names to git urls,
# allowing pulling forks of the official repositories.
#
# Examples:
#
# thor merb:edge:core
# thor merb:edge:core --install
# thor merb:edge:core --merb-root ./path/to/your/app
# thor merb:edge:core --sources ./path/to/sources.yml
desc 'core', 'Update extlib and merb-core from git HEAD'
method_options "--merb-root" => :optional,
"--sources" => :optional,
"--install" => :boolean
def core
refresh_from_source 'thor', 'extlib', 'merb-core'
ensure_local_bin_for('merb-core', 'rake', 'rspec', 'thor')
end
desc 'more', 'Update merb-more from git HEAD'
method_options "--merb-root" => :optional,
"--sources" => :optional,
"--install" => :boolean
def more
refresh_from_source 'merb-more'
ensure_local_bin_for('merb-gen')
end
desc 'plugins', 'Update merb-plugins from git HEAD'
method_options "--merb-root" => :optional,
"--sources" => :optional,
"--install" => :boolean
def plugins
refresh_from_source 'merb-plugins'
end
desc 'dm_core', 'Update dm-core from git HEAD'
method_options "--merb-root" => :optional,
"--sources" => :optional,
"--install" => :boolean
def dm_core
refresh_from_source 'extlib', 'dm-core'
end
desc 'dm_more', 'Update dm-more from git HEAD'
method_options "--merb-root" => :optional,
"--sources" => :optional,
"--install" => :boolean
def dm_more
refresh_from_source 'extlib', 'dm-core', 'dm-more'
end
desc 'custom', 'Update all the custom repos from git HEAD'
method_options "--merb-root" => :optional,
"--sources" => :optional,
"--install" => :boolean
def custom
custom_repos = Merb.repos.keys - Merb.default_repos.keys
refresh_from_source *custom_repos
end
private
# Pull from git and optionally install the resulting gems.
def refresh_from_source(*components)
source = Source.new
source.options = options
components.each do |name|
source.clone(name)
source.install(name) if options[:install]
end
end
end
class Source < Thor
# The Source tasks deal with gem source packages - mainly from github.
# Any directory inside ./src is regarded as a gem that can be packaged
# and installed from there into the desired gems dir (either system-wide
# or into the application's gems directory).
include MerbThorHelper
# Install a particular gem from source.
#
# If a local ./gems dir is found, or --merb-root is given
# the gems will be installed locally into that directory.
#
# Note that this task doesn't retrieve any (new) source from git;
# To update and install you'd execute the following two tasks:
#
# thor merb:source:update merb-core
# thor merb:source:install merb-core
#
# Alternatively, look at merb:edge and merb:edge:* with --install.
#
# Examples:
#
# thor merb:source:install merb-core
# thor merb:source:install merb-more
# thor merb:source:install merb-more/merb-slices
# thor merb:source:install merb-plugins/merb_helpers
# thor merb:source:install merb-core --merb-root ./path/to/your/app
desc 'install GEM_NAME', 'Install a rubygem from (git) source'
method_options "--merb-root" => :optional
def install(name)
puts "Installing #{name}..."
gem_src_dir = File.join(source_dir, name)
opts = {}
opts[:install_dir] = gem_dir if gem_dir
Merb.install_gem_from_src(gem_src_dir, opts)
rescue Merb::SourcePathMissing
puts "Missing rubygem source path: #{gem_src_dir}"
rescue Merb::GemPathMissing
puts "Missing rubygems path: #{gem_dir}"
rescue => e
puts "Failed to install #{name} (#{e.message})"
end
# Clone a git repository into ./src. The repository can be
# a direct git url or a known -named- repository.
#
# Examples:
#
# thor merb:source:clone dm-core
# thor merb:source:clone dm-core --sources ./path/to/sources.yml
# thor merb:source:clone git://github.com/sam/dm-core.git
desc 'clone REPOSITORY', 'Clone a git repository into ./src'
method_options "--sources" => :optional
def clone(repository)
if repository =~ /^git:\/\//
repository_url = repository
elsif url = Merb.repos(options[:sources])[repository]
repository_url = url
end
if repository_url
repository_name = repository_url[/([\w+|-]+)\.git/u, 1]
fork_name = repository_url[/.com\/+?(.+)\/.+\.git/u, 1]
local_repo_path = "#{source_dir}/#{repository_name}"
if File.directory?(local_repo_path)
puts "\n#{repository_name} repository exists, updating or branching instead of cloning..."
FileUtils.cd(local_repo_path) do
# to avoid conflicts we need to set a remote branch for non official repos
existing_repos = `git remote -v`.split("\n").map{|branch| branch.split(/\s+/)}
origin_repo_url = existing_repos.detect{ |r| r.first == "origin" }.last
# pull from the original repository - no branching needed
if repository_url == origin_repo_url
puts "Pulling from #{repository_url}"
system %{
git fetch
git checkout master
git rebase origin/master
}
# update and switch to a branch for a particular github fork
elsif existing_repos.map{ |r| r.last }.include?(repository_url)
puts "Switching to remote branch: #{fork_name}"
`git checkout -b #{fork_name} #{fork_name}/master`
`git rebase #{fork_name}/master`
# create a new remote branch for a particular github fork
else
puts "Add a new remote branch: #{fork_name}"
`git remote add -f #{fork_name} #{repository_url}`
`git checkout -b#{fork_name} #{fork_name}/master`
end
end
else
FileUtils.cd(source_dir) do
puts "\nCloning #{repository_name} repository from #{repository_url}..."
system("git clone --depth 1 #{repository_url} ")
end
end
else
puts "No valid repository url given"
end
end
# Update a specific gem source directory from git. See #clone.
desc 'update REPOSITORY', 'Update a git repository in ./src'
alias :update :clone
# Update all gem sources from git - based on the current branch.
desc 'refresh', 'Pull fresh copies of all source gems'
def refresh
repos = Dir["#{source_dir}/*"]
repos.each do |repo|
next unless File.directory?(repo) && File.exists?(File.join(repo, '.git'))
FileUtils.cd(repo) do
puts "Refreshing #{File.basename(repo)}"
system %{git fetch}
branch = `git branch --no-color 2> /dev/null | sed -e '/^[^*]/d' -e 's/* \(.*\)/(\1) /'`[/\* (.+)/, 1]
system %{git rebase #{branch}}
end
end
end
end
class Gems < Thor
# The Gems tasks deal directly with rubygems, either through remotely
# available sources (rubyforge for example) or by searching the
# system-wide gem cache for matching gems. The gems are installed from
# there into the desired gems dir (either system-wide or into the
# application's gems directory).
include MerbThorHelper
# Install a gem and its dependencies.
#
# If a local ./gems dir is found, or --merb-root is given
# the gems will be installed locally into that directory.
#
# The option --cache will look in the system's gem cache
# for the latest version and install it in the apps' gems.
# This is particularly handy for gems that aren't available
# through rubyforge.org - like in-house merb slices etc.
#
# Examples:
#
# thor merb:gems:install merb-core
# thor merb:gems:install merb-core --cache
# thor merb:gems:install merb-core --version 0.9.7
# thor merb:gems:install merb-core --merb-root ./path/to/your/app
desc 'install GEM_NAME', 'Install a gem from rubygems'
method_options "--version" => :optional,
"--merb-root" => :optional,
"--cache" => :boolean,
"--binaries" => :boolean
def install(name, version = nil)
puts "Installing #{name}..."
opts = {}
opts[:version] = version || options[:version]
opts[:cache] = options[:cache] if gem_dir
opts[:install_dir] = gem_dir if gem_dir
Merb.install_gem(name, opts)
ensure_local_bin_for(name) if options[:binaries]
rescue => e
puts "Failed to install #{name} (#{e.message})"
end
# Update a gem and its dependencies.
#
# If a local ./gems dir is found, or --merb-root is given
# the gems will be installed locally into that directory.
#
# The option --cache will look in the system's gem cache
# for the latest version and install it in the apps' gems.
# This is particularly handy for gems that aren't available
# through rubyforge.org - like in-house merb slices etc.
#
# Examples:
#
# thor merb:gems:update merb-core
# thor merb:gems:update merb-core --cache
# thor merb:gems:update merb-core --merb-root ./path/to/your/app
desc 'update GEM_NAME', 'Update a gem from rubygems'
method_options "--merb-root" => :optional,
"--cache" => :boolean,
"--binaries" => :boolean
def update(name)
puts "Updating #{name}..."
opts = {}
if gem_dir
if gemspec_path = Dir[File.join(gem_dir, 'specifications', "#{name}-*.gemspec")].last
gemspec = Gem::Specification.load(gemspec_path)
opts[:version] = Gem::Requirement.new [">#{gemspec.version}"]
end
opts[:install_dir] = gem_dir
opts[:cache] = options[:cache]
end
Merb.install_gem(name, opts)
ensure_local_bin_for(name) if options[:binaries]
rescue => e
puts "Failed to update #{name} (#{e.message})"
end
# Uninstall a gem - ignores dependencies.
#
# If a local ./gems dir is found, or --merb-root is given
# the gems will be uninstalled locally from that directory.
#
# The --all option indicates that all versions of the gem should be
# uninstalled. If --version isn't set, and multiple versions are
# available, you will be prompted to pick one - or all.
#
# Examples:
#
# thor merb:gems:uninstall merb-core
# thor merb:gems:uninstall merb-core --all
# thor merb:gems:uninstall merb-core --version 0.9.7
# thor merb:gems:uninstall merb-core --merb-root ./path/to/your/app
desc 'uninstall GEM_NAME', 'Uninstall a gem'
method_options "--version" => :optional,
"--merb-root" => :optional,
"--all" => :boolean
def uninstall(name)
puts "Uninstalling #{name}..."
opts = {}
opts[:ignore] = true
opts[:all] = options[:all]
opts[:executables] = true
opts[:version] = options[:version]
opts[:install_dir] = gem_dir if gem_dir
Merb.uninstall_gem(name, opts)
rescue => e
puts "Failed to uninstall #{name} (#{e.message})"
end
# Completely remove a gem and all its versions - ignores dependencies.
#
# If a local ./gems dir is found, or --merb-root is given
# the gems will be uninstalled locally from that directory.
#
# Examples:
#
# thor merb:gems:wipe merb-core
# thor merb:gems:wipe merb-core --merb-root ./path/to/your/app
desc 'wipe GEM_NAME', 'Remove a gem completely'
method_options "--merb-root" => :optional
def wipe(name)
puts "Wiping #{name}..."
opts = {}
opts[:ignore] = true
opts[:all] = true
opts[:executables] = true
opts[:install_dir] = gem_dir if gem_dir
Merb.uninstall_gem(name, opts)
rescue => e
puts "Failed to wipe #{name} (#{e.message})"
end
# This task should be executed as part of a deployment setup, where
# the deployment system runs this after the app has been installed.
# Usually triggered by Capistrano, God...
#
# It will regenerate gems from the bundled gems cache for any gem
# that has C extensions - which need to be recompiled for the target
# deployment platform.
desc 'redeploy', 'Recreate any binary gems on the target deployment platform'
def redeploy
require 'tempfile' # for
if File.directory?(specs_dir = File.join(gem_dir, 'specifications')) &&
File.directory?(cache_dir = File.join(gem_dir, 'cache'))
Dir[File.join(specs_dir, '*.gemspec')].each do |gemspec_path|
unless (gemspec = Gem::Specification.load(gemspec_path)).extensions.empty?
if File.exists?(gem_file = File.join(cache_dir, "#{gemspec.full_name}.gem"))
gem_file_copy = File.join(Dir::tmpdir, File.basename(gem_file))
# Copy the gem to a temporary file, because otherwise RubyGems/FileUtils
# will complain about copying identical files (same source/destination).
FileUtils.cp(gem_file, gem_file_copy)
Merb.install_gem(gem_file_copy, :install_dir => gem_dir)
File.delete(gem_file_copy)
end
end
end
else
puts "No application local gems directory found"
end
end
end
class << self
# Default Git repositories
def default_repos
@_default_repos ||= {
'merb-core' => "git://github.com/wycats/merb-core.git",
'merb-more' => "git://github.com/wycats/merb-more.git",
'merb-plugins' => "git://github.com/wycats/merb-plugins.git",
'extlib' => "git://github.com/sam/extlib.git",
'dm-core' => "git://github.com/sam/dm-core.git",
'dm-more' => "git://github.com/sam/dm-more.git",
'thor' => "git://github.com/wycats/thor.git"
}
end
# Git repository sources - pass source_config option to load a yaml
# configuration file - defaults to ./config/git-sources.yml and
# ~/.merb/git-sources.yml - which need to create yourself if desired.
#
# Example of contents:
#
# merb-core: git://github.com/myfork/merb-core.git
# merb-more: git://github.com/myfork/merb-more.git
def repos(source_config = nil)
source_config ||= begin
local_config = File.join(Dir.pwd, 'config', 'git-sources.yml')
user_config = File.join(ENV["HOME"] || ENV["APPDATA"], '.merb', 'git-sources.yml')
File.exists?(local_config) ? local_config : user_config
end
if source_config && File.exists?(source_config)
default_repos.merge(YAML.load(File.read(source_config)))
else
default_repos
end
end
# Install a gem - looks remotely and local gem cache;
# won't process rdoc or ri options.
def install_gem(gem, options = {})
from_cache = (options.key?(:cache) && options.delete(:cache))
if from_cache
install_gem_from_cache(gem, options)
else
version = options.delete(:version)
Gem.configuration.update_sources = false
update_source_index(options[:install_dir]) if options[:install_dir]
installer = Gem::DependencyInstaller.new(options.merge(:user_install => false))
exception = nil
begin
installer.install gem, version
rescue Gem::InstallError => e
exception = e
rescue Gem::GemNotFoundException => e
if from_cache && gem_file = find_gem_in_cache(gem, version)
puts "Located #{gem} in gem cache..."
installer.install gem_file
else
exception = e
end
rescue => e
exception = e
end
if installer.installed_gems.empty? && exception
puts "Failed to install gem '#{gem}' (#{exception.message})"
end
installer.installed_gems.each do |spec|
puts "Successfully installed #{spec.full_name}"
end
end
end
# Install a gem - looks in the system's gem cache instead of remotely;
# won't process rdoc or ri options.
def install_gem_from_cache(gem, options = {})
version = options.delete(:version)
Gem.configuration.update_sources = false
installer = Gem::DependencyInstaller.new(options.merge(:user_install => false))
exception = nil
begin
if gem_file = find_gem_in_cache(gem, version)
puts "Located #{gem} in gem cache..."
installer.install gem_file
else
raise Gem::InstallError, "Unknown gem #{gem}"
end
rescue Gem::InstallError => e
exception = e
end
if installer.installed_gems.empty? && exception
puts "Failed to install gem '#{gem}' (#{e.message})"
end
installer.installed_gems.each do |spec|
puts "Successfully installed #{spec.full_name}"
end
end
# Install a gem from source - builds and packages it first then installs it.
def install_gem_from_src(gem_src_dir, options = {})
raise SourcePathMissing unless File.directory?(gem_src_dir)
raise GemPathMissing if options[:install_dir] && !File.directory?(options[:install_dir])
gem_name = File.basename(gem_src_dir)
gem_pkg_dir = File.expand_path(File.join(gem_src_dir, 'pkg'))
# We need to use local bin executables if available.
thor = which('thor')
rake = which('rake')
# Handle pure Thor installation instead of Rake
if File.exists?(File.join(gem_src_dir, 'Thorfile'))
# Remove any existing packages.
FileUtils.rm_rf(gem_pkg_dir) if File.directory?(gem_pkg_dir)
# Create the package.
FileUtils.cd(gem_src_dir) { system("#{thor} :package") }
# Install the package using rubygems.
if package = Dir[File.join(gem_pkg_dir, "#{gem_name}-*.gem")].last
FileUtils.cd(File.dirname(package)) do
install_gem(File.basename(package), options.dup)
return
end
else
raise Merb::GemInstallError, "No package found for #{gem_name}"
end
# Handle standard installation through Rake
else
# Clean and regenerate any subgems for meta gems.
Dir[File.join(gem_src_dir, '*', 'Rakefile')].each do |rakefile|
FileUtils.cd(File.dirname(rakefile)) { system("#{rake} clobber_package; #{rake} package") }
end
# Handle the main gem install.
if File.exists?(File.join(gem_src_dir, 'Rakefile'))
# Remove any existing packages.
FileUtils.cd(gem_src_dir) { system("#{rake} clobber_package") }
# Create the main gem pkg dir if it doesn't exist.
FileUtils.mkdir_p(gem_pkg_dir) unless File.directory?(gem_pkg_dir)
# Copy any subgems to the main gem pkg dir.
Dir[File.join(gem_src_dir, '**', 'pkg', '*.gem')].each do |subgem_pkg|
FileUtils.cp(subgem_pkg, gem_pkg_dir)
end
# Finally generate the main package and install it; subgems
# (dependencies) are local to the main package.
FileUtils.cd(gem_src_dir) do
system("#{rake} package")
FileUtils.cd(gem_pkg_dir) do
if package = Dir[File.join(gem_pkg_dir, "#{gem_name}-*.gem")].last
# If the (meta) gem has it's own package, install it.
install_gem(File.basename(package), options.dup)
else
# Otherwise install each package seperately.
Dir["*.gem"].each { |gem| install_gem(gem, options.dup) }
end
end
return
end
end
end
raise Merb::GemInstallError, "No Rakefile found for #{gem_name}"
end
# Uninstall a gem.
def uninstall_gem(gem, options = {})
if options[:version] && !options[:version].is_a?(Gem::Requirement)
options[:version] = Gem::Requirement.new ["= #{options[:version]}"]
end
update_source_index(options[:install_dir]) if options[:install_dir]
Gem::Uninstaller.new(gem, options).uninstall
end
# Will prepend sudo on a suitable platform.
def sudo
@_sudo ||= begin
windows = PLATFORM =~ /win32|cygwin/ rescue nil
windows ? "" : "sudo "
end
end
# Use the local bin/* executables if available.
def which(executable)
if File.executable?(exec = File.join(Dir.pwd, 'bin', executable))
exec
else
executable
end
end
private
def find_gem_in_cache(gem, version)
spec = if version
version = Gem::Requirement.new ["= #{version}"] unless version.is_a?(Gem::Requirement)
Gem.source_index.find_name(gem, version).first
else
Gem.source_index.find_name(gem).sort_by { |g| g.version }.last
end
if spec && File.exists?(gem_file = "#{spec.installation_path}/cache/#{spec.full_name}.gem")
gem_file
end
end
def update_source_index(dir)
Gem.source_index.load_gems_in(File.join(dir, 'specifications'))
end
end
class Tasks < Thor
include MerbThorHelper
# Install Thor, Rake and RSpec into the local gems dir, by copying it from
# the system-wide rubygems cache - which is OK since we needed it to run
# this task already.
#
# After this we don't need the system-wide rubygems anymore, as all required
# executables are available in the local ./bin directory.
#
# RSpec is needed here because source installs might fail when running
# rake tasks where spec/rake/spectask has been required.
desc 'setup', 'Install Thor, Rake and RSpec in the local gems dir'
method_options "--merb-root" => :optional
def setup
if $0 =~ /^(\.\/)?bin\/thor$/
puts "You cannot run the setup from #{$0} - try #{File.basename($0)} merb:tasks:setup instead"
return
end
create_if_missing(File.join(working_dir, 'gems'))
Merb.install_gem('thor', :cache => true, :install_dir => gem_dir)
Merb.install_gem('rake', :cache => true, :install_dir => gem_dir)
Merb.install_gem('rspec', :cache => true, :install_dir => gem_dir)
ensure_local_bin_for('thor', 'rake', 'rspec')
end
# Get the latest merb.thor and install it into the working dir.
desc 'update', 'Fetch the latest merb.thor and install it locally'
def update
require 'open-uri'
url = 'http://merbivore.com/merb.thor'
remote_file = open(url)
File.open(File.join(working_dir, 'merb.thor'), 'w') do |f|
f.write(remote_file.read)
end
puts "Installed the latest merb.thor"
rescue OpenURI::HTTPError
puts "Error opening #{url}"
rescue => e
puts "An error occurred (#{e.message})"
end
end
end
Tiny fix to #add_slice (don't call to_sym on namespace)
#!/usr/bin/env ruby
require 'rubygems'
require 'rubygems/dependency_installer'
require 'rubygems/uninstaller'
require 'rubygems/dependency'
require 'thor'
require 'fileutils'
require 'yaml'
module MerbThorHelper
private
# The current working directory, or Merb app root (--merb-root option).
def working_dir
@_working_dir ||= File.expand_path(options['merb-root'] || Dir.pwd)
end
# We should have a ./src dir for local and system-wide management.
def source_dir
@_source_dir ||= File.join(working_dir, 'src')
create_if_missing(@_source_dir)
@_source_dir
end
# If a local ./gems dir is found, it means we are in a Merb app.
def application?
gem_dir
end
# If a local ./gems dir is found, return it.
def gem_dir
if File.directory?(dir = File.join(working_dir, 'gems'))
dir
end
end
# If we're in a Merb app, we can have a ./bin directory;
# create it if it's not there.
def bin_dir
@_bin_dir ||= begin
if gem_dir
dir = File.join(working_dir, 'bin')
create_if_missing(dir)
dir
end
end
end
# Helper to create dir unless it exists.
def create_if_missing(path)
FileUtils.mkdir(path) unless File.exists?(path)
end
# Create a modified executable wrapper in the app's ./bin directory.
def ensure_local_bin_for(*gems)
if bin_dir && File.directory?(bin_dir)
gems.each do |gem|
if gemspec_path = Dir[File.join(gem_dir, 'specifications', "#{gem}-*.gemspec")].last
spec = Gem::Specification.load(gemspec_path)
spec.executables.each do |exec|
if File.exists?(executable = File.join(gem_dir, 'bin', exec))
local_executable = File.join(bin_dir, exec)
puts "Adding local executable #{local_executable}"
File.open(local_executable, 'w', 0755) do |f|
f.write(executable_wrapper(spec, exec))
end
end
end
end
end
end
end
def executable_wrapper(spec, bin_file_name)
<<-TEXT
#!/usr/bin/env #{RbConfig::CONFIG["ruby_install_name"]}
#
# This file was generated by merb.thor.
#
# The application '#{spec.name}' is installed as part of a gem, and
# this file is here to facilitate running it.
#
begin
require 'minigems'
rescue LoadError
require 'rubygems'
end
if File.directory?(gems_dir = File.join(File.dirname(__FILE__), '..', 'gems'))
$BUNDLE = true; Gem.clear_paths; Gem.path.unshift(gems_dir)
end
version = "#{Gem::Requirement.default}"
if ARGV.first =~ /^_(.*)_$/ and Gem::Version.correct? $1 then
version = $1
ARGV.shift
end
gem '#{spec.name}', version
load '#{bin_file_name}'
TEXT
end
end
# TODO
# - pulling a specific UUID/Tag (gitspec hash) with clone/update
# - a 'deploy' task (in addition to 'redeploy' ?)
# - eventually take a --orm option for the 'merb-stack' type of tasks
# - add merb:gems:refresh to refresh all gems (from specifications)
# - merb:gems:uninstall should remove local bin/ entries
class Merb < Thor
class SourcePathMissing < Exception
end
class GemPathMissing < Exception
end
class GemInstallError < Exception
end
class GemUninstallError < Exception
end
class Dependencies < Thor
include MerbThorHelper
# List all dependencies by extracting them from the actual application;
# will differentiate between locally available gems and system gems.
# Local gems will be shown with the installed gem version numbers.
desc 'list', 'List all application dependencies'
method_options "--merb-root" => :optional,
"--local" => :boolean,
"--system" => :boolean
def list
partitioned = { :local => [], :system => [] }
extract_dependencies.each do |dependency|
if gem_dir && !(versions = find_dependency_versions(dependency)).empty?
partitioned[:local] << "#{dependency} [#{versions.join(', ')}]"
else
partitioned[:system] << "#{dependency}"
end
end
none = options[:system].nil? && options[:local].nil?
if (options[:system] || none) && !partitioned[:system].empty?
puts "System dependencies:"
partitioned[:system].each { |str| puts "- #{str}" }
end
if (options[:local] || none) && !partitioned[:local].empty?
puts "Local dependencies:"
partitioned[:local].each { |str| puts "- #{str}" }
end
end
# Retrieve all application dependencies and store them in a local
# configuration file at ./config/dependencies.yml
#
# The format of this YAML file is as follows:
# - merb_helpers (>= 0, runtime)
# - merb-slices (> 0.9.4, runtime)
desc 'configure', 'Retrieve and store dependencies in ./config/dependencies.yml'
method_options "--merb-root" => :optional,
"--force" => :boolean
def configure
entries = extract_dependencies.map { |d| d.to_s }
FileUtils.mkdir_p(config_dir) unless File.directory?(config_dir)
config = YAML.dump(entries)
if File.exists?(config_file) && !options[:force]
puts "File already exists! Use --force to overwrite."
else
File.open(config_file, 'w') { |f| f.write config }
puts "Written #{config_file}:"
end
puts config
rescue
puts "Failed to write to #{config_file}"
end
# Install the gems listed in dependencies.yml from RubyForge (stable).
desc 'install', 'Install the gems listed in ./config/dependencies.yml'
method_options "--merb-root" => :optional
def install
if File.exists?(config_file)
dependencies = parse_dependencies_yaml(File.read(config_file))
gems = Gems.new
gems.options = options
dependencies.each do |dependency|
gems.install(dependency.name, dependency.version_requirements.to_s)
end
else
puts "No configuration file found at #{config_file}"
puts "Please run merb:dependencies:configure first."
end
end
protected
def config_dir
@_config_dir ||= File.join(working_dir, 'config')
end
def config_file
@_config_file ||= File.join(config_dir, 'dependencies.yml')
end
# Find local gems and return matched version numbers.
def find_dependency_versions(dependency)
versions = []
specs = Dir[File.join(gem_dir, 'specifications', "#{dependency.name}-*.gemspec")]
unless specs.empty?
specs.inject(versions) do |versions, gemspec_path|
versions << gemspec_path[/-([\d\.]+)\.gemspec$/, 1]
end
end
versions.sort.reverse
end
# Extract the runtime dependencies by starting the application in runner mode.
def extract_dependencies
FileUtils.cd(working_dir) do
cmd = ["require 'yaml';"]
cmd << "dependencies = Merb::BootLoader::Dependencies.dependencies"
cmd << "entries = dependencies.map { |d| d.to_s }"
cmd << "puts YAML.dump(entries)"
output = `merb -r "#{cmd.join("\n")}"`
if index = (lines = output.split(/\n/)).index('--- ')
yaml = lines.slice(index, lines.length - 1).join("\n")
return parse_dependencies_yaml(yaml)
end
end
return []
end
# Parse the basic YAML config data, and process Gem::Dependency output.
# Formatting example: merb_helpers (>= 0.9.8, runtime)
def parse_dependencies_yaml(yaml)
dependencies = []
entries = YAML.load(yaml) rescue []
entries.each do |entry|
if matches = entry.match(/^(\S+) \(([^,]+)?, ([^\)]+)\)/)
name, version_req, type = matches.captures
dependencies << Gem::Dependency.new(name, version_req, type.to_sym)
else
puts "Invalid entry: #{entry}"
end
end
dependencies
end
end
# Install a Merb stack from stable RubyForge gems. Optionally install a
# suitable Rack adapter/server when setting --adapter to one of the
# following: mongrel, emongrel, thin or ebb.
desc 'stable', 'Install extlib, merb-core and merb-more from rubygems'
method_options "--merb-root" => :optional,
"--adapter" => :optional
def stable
adapters = %w[mongrel emongrel thin ebb]
stable = Stable.new
stable.options = options
if stable.core && stable.more
puts "Installed extlib, merb-core and merb-more"
if options[:adapter] && adapters.include?(options[:adapter]) &&
stable.refresh_from_gems(options[:adapter])
puts "Installed #{options[:adapter]}"
elsif options[:adapter]
puts "Please specify one of the following adapters: #{adapters.join(' ')}"
end
end
end
class Stable < Thor
# The Stable tasks deal with known -stable- gems; available
# as shortcuts to Merb and DataMapper gems.
#
# These are pulled from rubyforge and installed into into the
# desired gems dir (either system-wide or into the application's
# gems directory).
include MerbThorHelper
# Gets latest gem versions from RubyForge and installs them.
#
# Examples:
#
# thor merb:edge:core
# thor merb:edge:core --merb-root ./path/to/your/app
# thor merb:edge:core --sources ./path/to/sources.yml
desc 'core', 'Install extlib and merb-core from rubygems'
method_options "--merb-root" => :optional
def core
refresh_from_gems 'extlib', 'merb-core'
ensure_local_bin_for('merb-core', 'rake', 'rspec', 'thor')
end
desc 'more', 'Install merb-more from rubygems'
method_options "--merb-root" => :optional
def more
refresh_from_gems 'merb-more'
ensure_local_bin_for('merb-gen')
end
desc 'plugins', 'Install merb-plugins from rubygems'
method_options "--merb-root" => :optional
def plugins
refresh_from_gems 'merb-plugins'
end
desc 'dm_core', 'Install dm-core from rubygems'
method_options "--merb-root" => :optional
def dm_core
refresh_from_gems 'extlib', 'dm-core'
end
desc 'dm_more', 'Install dm-more from rubygems'
method_options "--merb-root" => :optional
def dm_more
refresh_from_gems 'extlib', 'dm-core', 'dm-more'
end
# Pull from RubyForge and install.
def refresh_from_gems(*components)
gems = Gems.new
gems.options = options
components.all? { |name| gems.install(name) }
end
end
# Retrieve latest Merb versions from git and optionally install them.
#
# Note: the --sources option takes a path to a YAML file
# with a regular Hash mapping gem names to git urls.
#
# Examples:
#
# thor merb:edge
# thor merb:edge --install
# thor merb:edge --merb-root ./path/to/your/app
# thor merb:edge --sources ./path/to/sources.yml
desc 'edge', 'Install extlib, merb-core and merb-more from git HEAD'
method_options "--merb-root" => :optional,
"--sources" => :optional,
"--install" => :boolean
def edge
edge = Edge.new
edge.options = options
edge.core
edge.more
edge.custom
end
class Edge < Thor
# The Edge tasks deal with known gems from the bleeding edge; available
# as shortcuts to Merb and DataMapper gems.
#
# These are pulled from git and optionally installed into into the
# desired gems dir (either system-wide or into the application's
# gems directory).
include MerbThorHelper
# Gets latest gem versions from git - optionally installs them.
#
# Note: the --sources option takes a path to a YAML file
# with a regular Hash mapping gem names to git urls,
# allowing pulling forks of the official repositories.
#
# Examples:
#
# thor merb:edge:core
# thor merb:edge:core --install
# thor merb:edge:core --merb-root ./path/to/your/app
# thor merb:edge:core --sources ./path/to/sources.yml
desc 'core', 'Update extlib and merb-core from git HEAD'
method_options "--merb-root" => :optional,
"--sources" => :optional,
"--install" => :boolean
def core
refresh_from_source 'thor', 'extlib', 'merb-core'
ensure_local_bin_for('merb-core', 'rake', 'rspec', 'thor')
end
desc 'more', 'Update merb-more from git HEAD'
method_options "--merb-root" => :optional,
"--sources" => :optional,
"--install" => :boolean
def more
refresh_from_source 'merb-more'
ensure_local_bin_for('merb-gen')
end
desc 'plugins', 'Update merb-plugins from git HEAD'
method_options "--merb-root" => :optional,
"--sources" => :optional,
"--install" => :boolean
def plugins
refresh_from_source 'merb-plugins'
end
desc 'dm_core', 'Update dm-core from git HEAD'
method_options "--merb-root" => :optional,
"--sources" => :optional,
"--install" => :boolean
def dm_core
refresh_from_source 'extlib', 'dm-core'
end
desc 'dm_more', 'Update dm-more from git HEAD'
method_options "--merb-root" => :optional,
"--sources" => :optional,
"--install" => :boolean
def dm_more
refresh_from_source 'extlib', 'dm-core', 'dm-more'
end
desc 'custom', 'Update all the custom repos from git HEAD'
method_options "--merb-root" => :optional,
"--sources" => :optional,
"--install" => :boolean
def custom
custom_repos = Merb.repos.keys - Merb.default_repos.keys
refresh_from_source *custom_repos
end
private
# Pull from git and optionally install the resulting gems.
def refresh_from_source(*components)
source = Source.new
source.options = options
components.each do |name|
source.clone(name)
source.install(name) if options[:install]
end
end
end
class Source < Thor
# The Source tasks deal with gem source packages - mainly from github.
# Any directory inside ./src is regarded as a gem that can be packaged
# and installed from there into the desired gems dir (either system-wide
# or into the application's gems directory).
include MerbThorHelper
# Install a particular gem from source.
#
# If a local ./gems dir is found, or --merb-root is given
# the gems will be installed locally into that directory.
#
# Note that this task doesn't retrieve any (new) source from git;
# To update and install you'd execute the following two tasks:
#
# thor merb:source:update merb-core
# thor merb:source:install merb-core
#
# Alternatively, look at merb:edge and merb:edge:* with --install.
#
# Examples:
#
# thor merb:source:install merb-core
# thor merb:source:install merb-more
# thor merb:source:install merb-more/merb-slices
# thor merb:source:install merb-plugins/merb_helpers
# thor merb:source:install merb-core --merb-root ./path/to/your/app
desc 'install GEM_NAME', 'Install a rubygem from (git) source'
method_options "--merb-root" => :optional
def install(name)
puts "Installing #{name}..."
gem_src_dir = File.join(source_dir, name)
opts = {}
opts[:install_dir] = gem_dir if gem_dir
Merb.install_gem_from_src(gem_src_dir, opts)
rescue Merb::SourcePathMissing
puts "Missing rubygem source path: #{gem_src_dir}"
rescue Merb::GemPathMissing
puts "Missing rubygems path: #{gem_dir}"
rescue => e
puts "Failed to install #{name} (#{e.message})"
end
# Clone a git repository into ./src. The repository can be
# a direct git url or a known -named- repository.
#
# Examples:
#
# thor merb:source:clone dm-core
# thor merb:source:clone dm-core --sources ./path/to/sources.yml
# thor merb:source:clone git://github.com/sam/dm-core.git
desc 'clone REPOSITORY', 'Clone a git repository into ./src'
method_options "--sources" => :optional
def clone(repository)
if repository =~ /^git:\/\//
repository_url = repository
elsif url = Merb.repos(options[:sources])[repository]
repository_url = url
end
if repository_url
repository_name = repository_url[/([\w+|-]+)\.git/u, 1]
fork_name = repository_url[/.com\/+?(.+)\/.+\.git/u, 1]
local_repo_path = "#{source_dir}/#{repository_name}"
if File.directory?(local_repo_path)
puts "\n#{repository_name} repository exists, updating or branching instead of cloning..."
FileUtils.cd(local_repo_path) do
# to avoid conflicts we need to set a remote branch for non official repos
existing_repos = `git remote -v`.split("\n").map{|branch| branch.split(/\s+/)}
origin_repo_url = existing_repos.detect{ |r| r.first == "origin" }.last
# pull from the original repository - no branching needed
if repository_url == origin_repo_url
puts "Pulling from #{repository_url}"
system %{
git fetch
git checkout master
git rebase origin/master
}
# update and switch to a branch for a particular github fork
elsif existing_repos.map{ |r| r.last }.include?(repository_url)
puts "Switching to remote branch: #{fork_name}"
`git checkout -b #{fork_name} #{fork_name}/master`
`git rebase #{fork_name}/master`
# create a new remote branch for a particular github fork
else
puts "Add a new remote branch: #{fork_name}"
`git remote add -f #{fork_name} #{repository_url}`
`git checkout -b#{fork_name} #{fork_name}/master`
end
end
else
FileUtils.cd(source_dir) do
puts "\nCloning #{repository_name} repository from #{repository_url}..."
system("git clone --depth 1 #{repository_url} ")
end
end
else
puts "No valid repository url given"
end
end
# Update a specific gem source directory from git. See #clone.
desc 'update REPOSITORY', 'Update a git repository in ./src'
alias :update :clone
# Update all gem sources from git - based on the current branch.
desc 'refresh', 'Pull fresh copies of all source gems'
def refresh
repos = Dir["#{source_dir}/*"]
repos.each do |repo|
next unless File.directory?(repo) && File.exists?(File.join(repo, '.git'))
FileUtils.cd(repo) do
puts "Refreshing #{File.basename(repo)}"
system %{git fetch}
branch = `git branch --no-color 2> /dev/null | sed -e '/^[^*]/d' -e 's/* \(.*\)/(\1) /'`[/\* (.+)/, 1]
system %{git rebase #{branch}}
end
end
end
end
class Gems < Thor
# The Gems tasks deal directly with rubygems, either through remotely
# available sources (rubyforge for example) or by searching the
# system-wide gem cache for matching gems. The gems are installed from
# there into the desired gems dir (either system-wide or into the
# application's gems directory).
include MerbThorHelper
# Install a gem and its dependencies.
#
# If a local ./gems dir is found, or --merb-root is given
# the gems will be installed locally into that directory.
#
# The option --cache will look in the system's gem cache
# for the latest version and install it in the apps' gems.
# This is particularly handy for gems that aren't available
# through rubyforge.org - like in-house merb slices etc.
#
# Examples:
#
# thor merb:gems:install merb-core
# thor merb:gems:install merb-core --cache
# thor merb:gems:install merb-core --version 0.9.7
# thor merb:gems:install merb-core --merb-root ./path/to/your/app
desc 'install GEM_NAME', 'Install a gem from rubygems'
method_options "--version" => :optional,
"--merb-root" => :optional,
"--cache" => :boolean,
"--binaries" => :boolean
def install(name, version = nil)
puts "Installing #{name}..."
opts = {}
opts[:version] = version || options[:version]
opts[:cache] = options[:cache] if gem_dir
opts[:install_dir] = gem_dir if gem_dir
Merb.install_gem(name, opts)
ensure_local_bin_for(name) if options[:binaries]
rescue => e
puts "Failed to install #{name} (#{e.message})"
end
# Update a gem and its dependencies.
#
# If a local ./gems dir is found, or --merb-root is given
# the gems will be installed locally into that directory.
#
# The option --cache will look in the system's gem cache
# for the latest version and install it in the apps' gems.
# This is particularly handy for gems that aren't available
# through rubyforge.org - like in-house merb slices etc.
#
# Examples:
#
# thor merb:gems:update merb-core
# thor merb:gems:update merb-core --cache
# thor merb:gems:update merb-core --merb-root ./path/to/your/app
desc 'update GEM_NAME', 'Update a gem from rubygems'
method_options "--merb-root" => :optional,
"--cache" => :boolean,
"--binaries" => :boolean
def update(name)
puts "Updating #{name}..."
opts = {}
if gem_dir
if gemspec_path = Dir[File.join(gem_dir, 'specifications', "#{name}-*.gemspec")].last
gemspec = Gem::Specification.load(gemspec_path)
opts[:version] = Gem::Requirement.new [">#{gemspec.version}"]
end
opts[:install_dir] = gem_dir
opts[:cache] = options[:cache]
end
Merb.install_gem(name, opts)
ensure_local_bin_for(name) if options[:binaries]
rescue => e
puts "Failed to update #{name} (#{e.message})"
end
# Uninstall a gem - ignores dependencies.
#
# If a local ./gems dir is found, or --merb-root is given
# the gems will be uninstalled locally from that directory.
#
# The --all option indicates that all versions of the gem should be
# uninstalled. If --version isn't set, and multiple versions are
# available, you will be prompted to pick one - or all.
#
# Examples:
#
# thor merb:gems:uninstall merb-core
# thor merb:gems:uninstall merb-core --all
# thor merb:gems:uninstall merb-core --version 0.9.7
# thor merb:gems:uninstall merb-core --merb-root ./path/to/your/app
desc 'uninstall GEM_NAME', 'Uninstall a gem'
method_options "--version" => :optional,
"--merb-root" => :optional,
"--all" => :boolean
def uninstall(name)
puts "Uninstalling #{name}..."
opts = {}
opts[:ignore] = true
opts[:all] = options[:all]
opts[:executables] = true
opts[:version] = options[:version]
opts[:install_dir] = gem_dir if gem_dir
Merb.uninstall_gem(name, opts)
rescue => e
puts "Failed to uninstall #{name} (#{e.message})"
end
# Completely remove a gem and all its versions - ignores dependencies.
#
# If a local ./gems dir is found, or --merb-root is given
# the gems will be uninstalled locally from that directory.
#
# Examples:
#
# thor merb:gems:wipe merb-core
# thor merb:gems:wipe merb-core --merb-root ./path/to/your/app
desc 'wipe GEM_NAME', 'Remove a gem completely'
method_options "--merb-root" => :optional
def wipe(name)
puts "Wiping #{name}..."
opts = {}
opts[:ignore] = true
opts[:all] = true
opts[:executables] = true
opts[:install_dir] = gem_dir if gem_dir
Merb.uninstall_gem(name, opts)
rescue => e
puts "Failed to wipe #{name} (#{e.message})"
end
# This task should be executed as part of a deployment setup, where
# the deployment system runs this after the app has been installed.
# Usually triggered by Capistrano, God...
#
# It will regenerate gems from the bundled gems cache for any gem
# that has C extensions - which need to be recompiled for the target
# deployment platform.
desc 'redeploy', 'Recreate any binary gems on the target deployment platform'
def redeploy
require 'tempfile' # for
if File.directory?(specs_dir = File.join(gem_dir, 'specifications')) &&
File.directory?(cache_dir = File.join(gem_dir, 'cache'))
Dir[File.join(specs_dir, '*.gemspec')].each do |gemspec_path|
unless (gemspec = Gem::Specification.load(gemspec_path)).extensions.empty?
if File.exists?(gem_file = File.join(cache_dir, "#{gemspec.full_name}.gem"))
gem_file_copy = File.join(Dir::tmpdir, File.basename(gem_file))
# Copy the gem to a temporary file, because otherwise RubyGems/FileUtils
# will complain about copying identical files (same source/destination).
FileUtils.cp(gem_file, gem_file_copy)
Merb.install_gem(gem_file_copy, :install_dir => gem_dir)
File.delete(gem_file_copy)
end
end
end
else
puts "No application local gems directory found"
end
end
end
class << self
# Default Git repositories
def default_repos
@_default_repos ||= {
'merb-core' => "git://github.com/wycats/merb-core.git",
'merb-more' => "git://github.com/wycats/merb-more.git",
'merb-plugins' => "git://github.com/wycats/merb-plugins.git",
'extlib' => "git://github.com/sam/extlib.git",
'dm-core' => "git://github.com/sam/dm-core.git",
'dm-more' => "git://github.com/sam/dm-more.git",
'thor' => "git://github.com/wycats/thor.git"
}
end
# Git repository sources - pass source_config option to load a yaml
# configuration file - defaults to ./config/git-sources.yml and
# ~/.merb/git-sources.yml - which need to create yourself if desired.
#
# Example of contents:
#
# merb-core: git://github.com/myfork/merb-core.git
# merb-more: git://github.com/myfork/merb-more.git
def repos(source_config = nil)
source_config ||= begin
local_config = File.join(Dir.pwd, 'config', 'git-sources.yml')
user_config = File.join(ENV["HOME"] || ENV["APPDATA"], '.merb', 'git-sources.yml')
File.exists?(local_config) ? local_config : user_config
end
if source_config && File.exists?(source_config)
default_repos.merge(YAML.load(File.read(source_config)))
else
default_repos
end
end
# Install a gem - looks remotely and local gem cache;
# won't process rdoc or ri options.
def install_gem(gem, options = {})
from_cache = (options.key?(:cache) && options.delete(:cache))
if from_cache
install_gem_from_cache(gem, options)
else
version = options.delete(:version)
Gem.configuration.update_sources = false
update_source_index(options[:install_dir]) if options[:install_dir]
installer = Gem::DependencyInstaller.new(options.merge(:user_install => false))
exception = nil
begin
installer.install gem, version
rescue Gem::InstallError => e
exception = e
rescue Gem::GemNotFoundException => e
if from_cache && gem_file = find_gem_in_cache(gem, version)
puts "Located #{gem} in gem cache..."
installer.install gem_file
else
exception = e
end
rescue => e
exception = e
end
if installer.installed_gems.empty? && exception
puts "Failed to install gem '#{gem}' (#{exception.message})"
end
installer.installed_gems.each do |spec|
puts "Successfully installed #{spec.full_name}"
end
end
end
# Install a gem - looks in the system's gem cache instead of remotely;
# won't process rdoc or ri options.
def install_gem_from_cache(gem, options = {})
version = options.delete(:version)
Gem.configuration.update_sources = false
installer = Gem::DependencyInstaller.new(options.merge(:user_install => false))
exception = nil
begin
if gem_file = find_gem_in_cache(gem, version)
puts "Located #{gem} in gem cache..."
installer.install gem_file
else
raise Gem::InstallError, "Unknown gem #{gem}"
end
rescue Gem::InstallError => e
exception = e
end
if installer.installed_gems.empty? && exception
puts "Failed to install gem '#{gem}' (#{e.message})"
end
installer.installed_gems.each do |spec|
puts "Successfully installed #{spec.full_name}"
end
end
# Install a gem from source - builds and packages it first then installs it.
def install_gem_from_src(gem_src_dir, options = {})
raise SourcePathMissing unless File.directory?(gem_src_dir)
raise GemPathMissing if options[:install_dir] && !File.directory?(options[:install_dir])
gem_name = File.basename(gem_src_dir)
gem_pkg_dir = File.expand_path(File.join(gem_src_dir, 'pkg'))
# We need to use local bin executables if available.
thor = which('thor')
rake = which('rake')
# Handle pure Thor installation instead of Rake
if File.exists?(File.join(gem_src_dir, 'Thorfile'))
# Remove any existing packages.
FileUtils.rm_rf(gem_pkg_dir) if File.directory?(gem_pkg_dir)
# Create the package.
FileUtils.cd(gem_src_dir) { system("#{thor} :package") }
# Install the package using rubygems.
if package = Dir[File.join(gem_pkg_dir, "#{gem_name}-*.gem")].last
FileUtils.cd(File.dirname(package)) do
install_gem(File.basename(package), options.dup)
return
end
else
raise Merb::GemInstallError, "No package found for #{gem_name}"
end
# Handle standard installation through Rake
else
# Clean and regenerate any subgems for meta gems.
Dir[File.join(gem_src_dir, '*', 'Rakefile')].each do |rakefile|
FileUtils.cd(File.dirname(rakefile)) { system("#{rake} clobber_package; #{rake} package") }
end
# Handle the main gem install.
if File.exists?(File.join(gem_src_dir, 'Rakefile'))
# Remove any existing packages.
FileUtils.cd(gem_src_dir) { system("#{rake} clobber_package") }
# Create the main gem pkg dir if it doesn't exist.
FileUtils.mkdir_p(gem_pkg_dir) unless File.directory?(gem_pkg_dir)
# Copy any subgems to the main gem pkg dir.
Dir[File.join(gem_src_dir, '**', 'pkg', '*.gem')].each do |subgem_pkg|
FileUtils.cp(subgem_pkg, gem_pkg_dir)
end
# Finally generate the main package and install it; subgems
# (dependencies) are local to the main package.
FileUtils.cd(gem_src_dir) do
system("#{rake} package")
FileUtils.cd(gem_pkg_dir) do
if package = Dir[File.join(gem_pkg_dir, "#{gem_name}-*.gem")].last
# If the (meta) gem has it's own package, install it.
install_gem(File.basename(package), options.dup)
else
# Otherwise install each package seperately.
Dir["*.gem"].each { |gem| install_gem(gem, options.dup) }
end
end
return
end
end
end
raise Merb::GemInstallError, "No Rakefile found for #{gem_name}"
end
# Uninstall a gem.
def uninstall_gem(gem, options = {})
if options[:version] && !options[:version].is_a?(Gem::Requirement)
options[:version] = Gem::Requirement.new ["= #{options[:version]}"]
end
update_source_index(options[:install_dir]) if options[:install_dir]
Gem::Uninstaller.new(gem, options).uninstall
end
# Will prepend sudo on a suitable platform.
def sudo
@_sudo ||= begin
windows = PLATFORM =~ /win32|cygwin/ rescue nil
windows ? "" : "sudo "
end
end
# Use the local bin/* executables if available.
def which(executable)
if File.executable?(exec = File.join(Dir.pwd, 'bin', executable))
exec
else
executable
end
end
private
def find_gem_in_cache(gem, version)
spec = if version
version = Gem::Requirement.new ["= #{version}"] unless version.is_a?(Gem::Requirement)
Gem.source_index.find_name(gem, version).first
else
Gem.source_index.find_name(gem).sort_by { |g| g.version }.last
end
if spec && File.exists?(gem_file = "#{spec.installation_path}/cache/#{spec.full_name}.gem")
gem_file
end
end
def update_source_index(dir)
Gem.source_index.load_gems_in(File.join(dir, 'specifications'))
end
end
class Tasks < Thor
include MerbThorHelper
# Install Thor, Rake and RSpec into the local gems dir, by copying it from
# the system-wide rubygems cache - which is OK since we needed it to run
# this task already.
#
# After this we don't need the system-wide rubygems anymore, as all required
# executables are available in the local ./bin directory.
#
# RSpec is needed here because source installs might fail when running
# rake tasks where spec/rake/spectask has been required.
desc 'setup', 'Install Thor, Rake and RSpec in the local gems dir'
method_options "--merb-root" => :optional
def setup
if $0 =~ /^(\.\/)?bin\/thor$/
puts "You cannot run the setup from #{$0} - try #{File.basename($0)} merb:tasks:setup instead"
return
end
create_if_missing(File.join(working_dir, 'gems'))
Merb.install_gem('thor', :cache => true, :install_dir => gem_dir)
Merb.install_gem('rake', :cache => true, :install_dir => gem_dir)
Merb.install_gem('rspec', :cache => true, :install_dir => gem_dir)
ensure_local_bin_for('thor', 'rake', 'rspec')
end
# Get the latest merb.thor and install it into the working dir.
desc 'update', 'Fetch the latest merb.thor and install it locally'
def update
require 'open-uri'
url = 'http://merbivore.com/merb.thor'
remote_file = open(url)
File.open(File.join(working_dir, 'merb.thor'), 'w') do |f|
f.write(remote_file.read)
end
puts "Installed the latest merb.thor"
rescue OpenURI::HTTPError
puts "Error opening #{url}"
rescue => e
puts "An error occurred (#{e.message})"
end
end
end |
Pod::Spec.new do |s|
s.name = 'MMWeChatSDK'
s.version = '1.7.1'
s.license = { :type => 'Copyright', :text => 'Copyright (c) 2012 Tencent. All rights reserved.' }
s.summary = 'The Official iOS SDK of WeChat from Tencent.'
s.description = <<-DESC
1、iOS 9系统策略更新,限制了http协议的访问,此外应用需要在“Info.plist”中将要使用的URL Schemes列为白名单,才可正常检查其他应用是否安装。
受此影响,当你的应用在iOS 9中需要使用微信SDK的相关能力(分享、收藏、支付、登录等)时,需要在“Info.plist”里增加如下代码:
<key>LSApplicationQueriesSchemes</key>
<array>
<string>weixin</string>
</array>
<key>NSAppTransportSecurity</key>
<dict>
<key>NSAllowsArbitraryLoads</key>
<true/>
</dict>
或者配置白名单域名pingma.qq.com
2、开发者需要在工程中链接上 CoreTelephony.framework
3、解决bitcode编译不过问题
DESC
s.homepage = 'https://open.weixin.qq.com/cgi-bin/showdocument?action=dir_list&t=resource/res_list&verify=1&id=1417694084&token=&lang=zh_CN'
s.authors = { 'Tencent' => 'developer@wechatapp.com' }
s.source = { :http => 'https://res.wx.qq.com/open/zh_CN/htmledition/res/dev/download/sdk/WeChatSDK1.7.1.zip' }
s.ios.deployment_target = '4.3'
s.requires_arc = true
s.vendored_libraries = 'SDK1.7.1/libWeChatSDK.a'
s.source_files = 'SDK1.7.1/*.h'
s.frameworks = [
'Foundation',
'SystemConfiguration',
'CoreTelephony'
]
s.libraries = [
'z',
'c++',
'sqlite3.0',
]
end
修改spec存在路径错误
Pod::Spec.new do |s|
s.name = 'MMWeChatSDK'
s.version = '1.7.1'
s.license = { :type => 'Copyright', :text => 'Copyright (c) 2012 Tencent. All rights reserved.' }
s.summary = 'The Official iOS SDK of WeChat from Tencent.'
s.description = <<-DESC
1、iOS 9系统策略更新,限制了http协议的访问,此外应用需要在“Info.plist”中将要使用的URL Schemes列为白名单,才可正常检查其他应用是否安装。
受此影响,当你的应用在iOS 9中需要使用微信SDK的相关能力(分享、收藏、支付、登录等)时,需要在“Info.plist”里增加如下代码:
<key>LSApplicationQueriesSchemes</key>
<array>
<string>weixin</string>
</array>
<key>NSAppTransportSecurity</key>
<dict>
<key>NSAllowsArbitraryLoads</key>
<true/>
</dict>
或者配置白名单域名pingma.qq.com
2、开发者需要在工程中链接上 CoreTelephony.framework
3、解决bitcode编译不过问题
DESC
s.homepage = 'https://open.weixin.qq.com/cgi-bin/showdocument?action=dir_list&t=resource/res_list&verify=1&id=1417694084&token=&lang=zh_CN'
s.authors = { 'Tencent' => 'developer@wechatapp.com' }
s.source = { :http => 'https://res.wx.qq.com/open/zh_CN/htmledition/res/dev/download/sdk/WeChatSDK1.7.1.zip' }
s.ios.deployment_target = '4.3'
s.requires_arc = true
s.vendored_libraries = 'OpenSDK1.7.1/libWeChatSDK.a'
s.source_files = 'OpenSDK1.7.1/*.h'
s.frameworks = [
'Foundation',
'SystemConfiguration',
'CoreTelephony'
]
s.libraries = [
'z',
'c++',
'sqlite3.0',
]
end |
Pod::Spec.new do |s|
s.name = "MTMigration"
s.version = "0.0.2"
s.summary = "Manages blocks of code that only need to run once on version updates in iOS apps."
s.homepage = "https://github.com/mysterioustrousers/MTMigration"
s.license = 'MIT'
s.author = { "Parker Wightman" => "parkerwightman@gmail.com" }
s.source = { :git => "https://github.com/mysterioustrousers/MTMigration.git", :tag => "0.0.2" }
s.platform = :ios, '5.0'
s.source_files = 'MTMigration/MTMigration.{h,m}'
s.requires_arc = true
end
Updating to 0.0.3
Pod::Spec.new do |s|
s.name = "MTMigration"
s.version = "0.0.3"
s.summary = "Manages blocks of code that only need to run once on version updates in iOS apps."
s.homepage = "https://github.com/mysterioustrousers/MTMigration"
s.license = 'MIT'
s.author = { "Parker Wightman" => "parkerwightman@gmail.com" }
s.source = { :git => "https://github.com/mysterioustrousers/MTMigration.git", :tag => "0.0.3" }
s.platform = :ios, '5.0'
s.source_files = 'MTMigration/MTMigration.{h,m}'
s.requires_arc = true
end
|
namespace :assets do
desc "Compile all the assets named in config.assets.precompile"
task :precompile do
# We need to do this dance because RAILS_GROUPS is used
# too early in the boot process and changing here is already too late.
if ENV["RAILS_GROUPS"].to_s.empty? || ENV["RAILS_ENV"].to_s.empty?
ENV["RAILS_GROUPS"] ||= "assets"
ENV["RAILS_ENV"] ||= "production"
ruby $0, *ARGV
else
require "fileutils"
Rake::Task["tmp:cache:clear"].invoke
Rails.application.initialize!(:assets)
Sprockets::Bootstrap.new(Rails.application).run
unless Rails.application.config.assets.enabled
raise "Cannot precompile assets if sprockets is disabled. Please set config.assets.enabled to true"
end
# Ensure that action view is loaded and the appropriate sprockets hooks get executed
ActionView::Base
# Always compile files
Rails.application.config.assets.compile = true
config = Rails.application.config
env = Rails.application.assets
target = File.join(Rails.public_path, config.assets.prefix)
static_compiler = Sprockets::StaticCompiler.new(env, target, :digest => config.assets.digest)
manifest = static_compiler.precompile(config.assets.precompile)
manifest_path = config.assets.manifest || target
FileUtils.mkdir_p(manifest_path)
File.open("#{manifest_path}/manifest.yml", 'wb') do |f|
YAML.dump(manifest, f)
end
end
end
desc "Remove compiled assets"
task :clean => [:environment, 'tmp:cache:clear'] do
config = Rails.application.config
public_asset_path = File.join(Rails.public_path, config.assets.prefix)
rm_rf public_asset_path, :secure => true
end
end
Encapsulate assets initialization in its own rake task.
namespace :assets do
desc "Compile all the assets named in config.assets.precompile"
task :precompile do
# We need to do this dance because RAILS_GROUPS is used
# too early in the boot process and changing here is already too late.
if ENV["RAILS_GROUPS"].to_s.empty? || ENV["RAILS_ENV"].to_s.empty?
ENV["RAILS_GROUPS"] ||= "assets"
ENV["RAILS_ENV"] ||= "production"
ruby $0, *ARGV
else
require "fileutils"
Rake::Task["tmp:cache:clear"].invoke
Rake::Task["assets:environment"].invoke
unless Rails.application.config.assets.enabled
raise "Cannot precompile assets if sprockets is disabled. Please set config.assets.enabled to true"
end
# Ensure that action view is loaded and the appropriate sprockets hooks get executed
ActionView::Base
# Always compile files
Rails.application.config.assets.compile = true
config = Rails.application.config
env = Rails.application.assets
target = File.join(Rails.public_path, config.assets.prefix)
static_compiler = Sprockets::StaticCompiler.new(env, target, :digest => config.assets.digest)
manifest = static_compiler.precompile(config.assets.precompile)
manifest_path = config.assets.manifest || target
FileUtils.mkdir_p(manifest_path)
File.open("#{manifest_path}/manifest.yml", 'wb') do |f|
YAML.dump(manifest, f)
end
end
end
desc "Remove compiled assets"
task :clean => ['assets:environment', 'tmp:cache:clear'] do
config = Rails.application.config
public_asset_path = File.join(Rails.public_path, config.assets.prefix)
rm_rf public_asset_path, :secure => true
end
task :environment do
Rails.application.initialize!(:assets)
Sprockets::Bootstrap.new(Rails.application).run
end
end
|
require File.expand_path('../../../load_paths', __FILE__)
lib = File.expand_path("#{File.dirname(__FILE__)}/../lib")
$:.unshift(lib) unless $:.include?('lib') || $:.include?(lib)
require 'test/unit'
require 'active_resource'
require 'active_support'
require 'active_support/test_case'
require 'setter_trap'
require 'logger'
ActiveResource::Base.logger = Logger.new("#{File.dirname(__FILE__)}/debug.log")
begin
require 'ruby-debug'
rescue LoadError
end
def setup_response
matz_hash = { 'person' => { :id => 1, :name => 'Matz' } }
@default_request_headers = { 'Content-Type' => 'application/json' }
@matz = matz_hash.to_json
@matz_xml = matz_hash.to_xml
@david = { :person => { :id => 2, :name => 'David' } }.to_json
@greg = { :person => { :id => 3, :name => 'Greg' } }.to_json
@addy = { :address => { :id => 1, :street => '12345 Street', :country => 'Australia' } }.to_json
@rick = { :person => { :name => "Rick", :age => 25 } }.to_json
@joe = { :person => { :id => 6, :name => 'Joe', :likes_hats => true }}.to_json
@people = { :people => [ { :person => { :id => 1, :name => 'Matz' } }, { :person => { :id => 2, :name => 'David' } }] }.to_json
@people_david = { :people => [ { :person => { :id => 2, :name => 'David' } }] }.to_json
@addresses = { :addresses => [{ :address => { :id => 1, :street => '12345 Street', :country => 'Australia' } }] }.to_json
# - deep nested resource -
# - Luis (Customer)
# - JK (Customer::Friend)
# - Mateo (Customer::Friend::Brother)
# - Edith (Customer::Friend::Brother::Child)
# - Martha (Customer::Friend::Brother::Child)
# - Felipe (Customer::Friend::Brother)
# - Bryan (Customer::Friend::Brother::Child)
# - Luke (Customer::Friend::Brother::Child)
# - Eduardo (Customer::Friend)
# - Sebas (Customer::Friend::Brother)
# - Andres (Customer::Friend::Brother::Child)
# - Jorge (Customer::Friend::Brother::Child)
# - Elsa (Customer::Friend::Brother)
# - Natacha (Customer::Friend::Brother::Child)
# - Milena (Customer::Friend::Brother)
#
@luis = {
:customer => {
:id => 1,
:name => 'Luis',
:friends => [{
:name => 'JK',
:brothers => [
{
:name => 'Mateo',
:children => [{ :name => 'Edith' },{ :name => 'Martha' }]
}, {
:name => 'Felipe',
:children => [{ :name => 'Bryan' },{ :name => 'Luke' }]
}
]
}, {
:name => 'Eduardo',
:brothers => [
{
:name => 'Sebas',
:children => [{ :name => 'Andres' },{ :name => 'Jorge' }]
}, {
:name => 'Elsa',
:children => [{ :name => 'Natacha' }]
}, {
:name => 'Milena',
:children => []
}
]
}]
}
}.to_json
# - resource with yaml array of strings; for ARs using serialize :bar, Array
@marty = <<-eof.strip
<?xml version=\"1.0\" encoding=\"UTF-8\"?>
<person>
<id type=\"integer\">5</id>
<name>Marty</name>
<colors type=\"yaml\">---
- \"red\"
- \"green\"
- \"blue\"
</colors>
</person>
eof
@startup_sound = {
:sound => {
:name => "Mac Startup Sound", :author => { :name => "Jim Reekes" }
}
}.to_json
ActiveResource::HttpMock.respond_to do |mock|
mock.get "/people/1.json", {}, @matz
mock.get "/people/1.xml", {}, @matz_xml
mock.get "/people/2.xml", {}, @david
mock.get "/people/5.xml", {}, @marty
mock.get "/people/Greg.json", {}, @greg
mock.get "/people/6.json", {}, @joe
mock.get "/people/4.json", { 'key' => 'value' }, nil, 404
mock.put "/people/1.json", {}, nil, 204
mock.delete "/people/1.json", {}, nil, 200
mock.delete "/people/2.xml", {}, nil, 400
mock.get "/people/99.json", {}, nil, 404
mock.post "/people.json", {}, @rick, 201, 'Location' => '/people/5.xml'
mock.get "/people.json", {}, @people
mock.get "/people/1/addresses.json", {}, @addresses
mock.get "/people/1/addresses/1.json", {}, @addy
mock.get "/people/1/addresses/2.xml", {}, nil, 404
mock.get "/people/2/addresses.json", {}, nil, 404
mock.get "/people/2/addresses/1.xml", {}, nil, 404
mock.get "/people/Greg/addresses/1.json", {}, @addy
mock.put "/people/1/addresses/1.json", {}, nil, 204
mock.delete "/people/1/addresses/1.json", {}, nil, 200
mock.post "/people/1/addresses.json", {}, nil, 201, 'Location' => '/people/1/addresses/5'
mock.get "/people/1/addresses/99.json", {}, nil, 404
mock.get "/people//addresses.xml", {}, nil, 404
mock.get "/people//addresses/1.xml", {}, nil, 404
mock.put "/people//addresses/1.xml", {}, nil, 404
mock.delete "/people//addresses/1.xml", {}, nil, 404
mock.post "/people//addresses.xml", {}, nil, 404
mock.head "/people/1.json", {}, nil, 200
mock.head "/people/Greg.json", {}, nil, 200
mock.head "/people/99.json", {}, nil, 404
mock.head "/people/1/addresses/1.json", {}, nil, 200
mock.head "/people/1/addresses/2.json", {}, nil, 404
mock.head "/people/2/addresses/1.json", {}, nil, 404
mock.head "/people/Greg/addresses/1.json", {}, nil, 200
# customer
mock.get "/customers/1.json", {}, @luis
# sound
mock.get "/sounds/1.json", {}, @startup_sound
end
Person.user = nil
Person.password = nil
end
Not loading ruby-debug specially.
require File.expand_path('../../../load_paths', __FILE__)
lib = File.expand_path("#{File.dirname(__FILE__)}/../lib")
$:.unshift(lib) unless $:.include?('lib') || $:.include?(lib)
require 'test/unit'
require 'active_resource'
require 'active_support'
require 'active_support/test_case'
require 'setter_trap'
require 'logger'
ActiveResource::Base.logger = Logger.new("#{File.dirname(__FILE__)}/debug.log")
def setup_response
matz_hash = { 'person' => { :id => 1, :name => 'Matz' } }
@default_request_headers = { 'Content-Type' => 'application/json' }
@matz = matz_hash.to_json
@matz_xml = matz_hash.to_xml
@david = { :person => { :id => 2, :name => 'David' } }.to_json
@greg = { :person => { :id => 3, :name => 'Greg' } }.to_json
@addy = { :address => { :id => 1, :street => '12345 Street', :country => 'Australia' } }.to_json
@rick = { :person => { :name => "Rick", :age => 25 } }.to_json
@joe = { :person => { :id => 6, :name => 'Joe', :likes_hats => true }}.to_json
@people = { :people => [ { :person => { :id => 1, :name => 'Matz' } }, { :person => { :id => 2, :name => 'David' } }] }.to_json
@people_david = { :people => [ { :person => { :id => 2, :name => 'David' } }] }.to_json
@addresses = { :addresses => [{ :address => { :id => 1, :street => '12345 Street', :country => 'Australia' } }] }.to_json
# - deep nested resource -
# - Luis (Customer)
# - JK (Customer::Friend)
# - Mateo (Customer::Friend::Brother)
# - Edith (Customer::Friend::Brother::Child)
# - Martha (Customer::Friend::Brother::Child)
# - Felipe (Customer::Friend::Brother)
# - Bryan (Customer::Friend::Brother::Child)
# - Luke (Customer::Friend::Brother::Child)
# - Eduardo (Customer::Friend)
# - Sebas (Customer::Friend::Brother)
# - Andres (Customer::Friend::Brother::Child)
# - Jorge (Customer::Friend::Brother::Child)
# - Elsa (Customer::Friend::Brother)
# - Natacha (Customer::Friend::Brother::Child)
# - Milena (Customer::Friend::Brother)
#
@luis = {
:customer => {
:id => 1,
:name => 'Luis',
:friends => [{
:name => 'JK',
:brothers => [
{
:name => 'Mateo',
:children => [{ :name => 'Edith' },{ :name => 'Martha' }]
}, {
:name => 'Felipe',
:children => [{ :name => 'Bryan' },{ :name => 'Luke' }]
}
]
}, {
:name => 'Eduardo',
:brothers => [
{
:name => 'Sebas',
:children => [{ :name => 'Andres' },{ :name => 'Jorge' }]
}, {
:name => 'Elsa',
:children => [{ :name => 'Natacha' }]
}, {
:name => 'Milena',
:children => []
}
]
}]
}
}.to_json
# - resource with yaml array of strings; for ARs using serialize :bar, Array
@marty = <<-eof.strip
<?xml version=\"1.0\" encoding=\"UTF-8\"?>
<person>
<id type=\"integer\">5</id>
<name>Marty</name>
<colors type=\"yaml\">---
- \"red\"
- \"green\"
- \"blue\"
</colors>
</person>
eof
@startup_sound = {
:sound => {
:name => "Mac Startup Sound", :author => { :name => "Jim Reekes" }
}
}.to_json
ActiveResource::HttpMock.respond_to do |mock|
mock.get "/people/1.json", {}, @matz
mock.get "/people/1.xml", {}, @matz_xml
mock.get "/people/2.xml", {}, @david
mock.get "/people/5.xml", {}, @marty
mock.get "/people/Greg.json", {}, @greg
mock.get "/people/6.json", {}, @joe
mock.get "/people/4.json", { 'key' => 'value' }, nil, 404
mock.put "/people/1.json", {}, nil, 204
mock.delete "/people/1.json", {}, nil, 200
mock.delete "/people/2.xml", {}, nil, 400
mock.get "/people/99.json", {}, nil, 404
mock.post "/people.json", {}, @rick, 201, 'Location' => '/people/5.xml'
mock.get "/people.json", {}, @people
mock.get "/people/1/addresses.json", {}, @addresses
mock.get "/people/1/addresses/1.json", {}, @addy
mock.get "/people/1/addresses/2.xml", {}, nil, 404
mock.get "/people/2/addresses.json", {}, nil, 404
mock.get "/people/2/addresses/1.xml", {}, nil, 404
mock.get "/people/Greg/addresses/1.json", {}, @addy
mock.put "/people/1/addresses/1.json", {}, nil, 204
mock.delete "/people/1/addresses/1.json", {}, nil, 200
mock.post "/people/1/addresses.json", {}, nil, 201, 'Location' => '/people/1/addresses/5'
mock.get "/people/1/addresses/99.json", {}, nil, 404
mock.get "/people//addresses.xml", {}, nil, 404
mock.get "/people//addresses/1.xml", {}, nil, 404
mock.put "/people//addresses/1.xml", {}, nil, 404
mock.delete "/people//addresses/1.xml", {}, nil, 404
mock.post "/people//addresses.xml", {}, nil, 404
mock.head "/people/1.json", {}, nil, 200
mock.head "/people/Greg.json", {}, nil, 200
mock.head "/people/99.json", {}, nil, 404
mock.head "/people/1/addresses/1.json", {}, nil, 200
mock.head "/people/1/addresses/2.json", {}, nil, 404
mock.head "/people/2/addresses/1.json", {}, nil, 404
mock.head "/people/Greg/addresses/1.json", {}, nil, 200
# customer
mock.get "/customers/1.json", {}, @luis
# sound
mock.get "/sounds/1.json", {}, @startup_sound
end
Person.user = nil
Person.password = nil
end
|
require File.expand_path('../boot', __FILE__)
<% unless options[:skip_activerecord] -%>
require 'rails/all'
<% else -%>
# Pick the frameworks you want:
# require "active_record/railtie"
require "action_controller/railtie"
require "action_mailer/railtie"
require "active_resource/railtie"
require "rails/test_unit/railtie"
<% end -%>
# If you have a Gemfile, require the gems listed there, including any gems
# you've limited to :test, :development, or :production.
Bundler.require(:default, Rails.env) if defined?(Bundler)
module <%= app_const_base %>
class Application < Rails::Application
# Settings in config/environments/* take precedence over those specified here.
# Application configuration should go into files in config/initializers
# -- all .rb files in that directory are automatically loaded.
# Custom directories with classes and modules you want to be autoloadable.
# config.autoload_paths += %W( #{config.root}/extras )
# Only load the plugins named here, in the order given (default is alphabetical).
# :all can be used as a placeholder for all plugins not explicitly named.
# config.plugins = [ :exception_notification, :ssl_requirement, :all ]
# Activate observers that should always be running.
# config.active_record.observers = :cacher, :garbage_collector, :forum_observer
# Set Time.zone default to the specified zone and make Active Record auto-convert to this zone.
# Run "rake -D time" for a list of tasks for finding time zone names. Default is UTC.
# config.time_zone = 'Central Time (US & Canada)'
# The default locale is :en and all translations from config/locales/*.rb,yml are auto loaded.
# config.i18n.load_path += Dir[Rails.root.join('my', 'locales', '*.{rb,yml}').to_s]
# config.i18n.default_locale = :de
# Add your default javascripts
<% if options[:skip_prototype] -%>
config.action_view.javascript_expansions[:defaults] = []
<% else -%>
# config.action_view.javascript_expansions[:defaults] = %w(jquery rails)
<% end -%>
# Configure generators values. Many other options are available, be sure to check the documentation.
# config.generators do |g|
# g.orm :active_record
# g.template_engine :erb
# g.test_framework :test_unit, :fixture => true
# end
# Configure the default encoding used in templates for Ruby 1.9.
config.encoding = "utf-8"
# Configure sensitive parameters which will be filtered from the log file.
config.filter_parameters += [:password]
end
end
minor pass to generated code in application.rb related to :defaults
require File.expand_path('../boot', __FILE__)
<% unless options[:skip_activerecord] -%>
require 'rails/all'
<% else -%>
# Pick the frameworks you want:
# require "active_record/railtie"
require "action_controller/railtie"
require "action_mailer/railtie"
require "active_resource/railtie"
require "rails/test_unit/railtie"
<% end -%>
# If you have a Gemfile, require the gems listed there, including any gems
# you've limited to :test, :development, or :production.
Bundler.require(:default, Rails.env) if defined?(Bundler)
module <%= app_const_base %>
class Application < Rails::Application
# Settings in config/environments/* take precedence over those specified here.
# Application configuration should go into files in config/initializers
# -- all .rb files in that directory are automatically loaded.
# Custom directories with classes and modules you want to be autoloadable.
# config.autoload_paths += %W(#{config.root}/extras)
# Only load the plugins named here, in the order given (default is alphabetical).
# :all can be used as a placeholder for all plugins not explicitly named.
# config.plugins = [ :exception_notification, :ssl_requirement, :all ]
# Activate observers that should always be running.
# config.active_record.observers = :cacher, :garbage_collector, :forum_observer
# Set Time.zone default to the specified zone and make Active Record auto-convert to this zone.
# Run "rake -D time" for a list of tasks for finding time zone names. Default is UTC.
# config.time_zone = 'Central Time (US & Canada)'
# The default locale is :en and all translations from config/locales/*.rb,yml are auto loaded.
# config.i18n.load_path += Dir[Rails.root.join('my', 'locales', '*.{rb,yml}').to_s]
# config.i18n.default_locale = :de
# JavaScript files you want as :defaults (application.js is always included).
<% if options[:skip_prototype] -%>
config.action_view.javascript_expansions[:defaults] = %w()
<% else -%>
# config.action_view.javascript_expansions[:defaults] = %w(jquery rails)
<% end -%>
# Configure generators values. Many other options are available, be sure to check the documentation.
# config.generators do |g|
# g.orm :active_record
# g.template_engine :erb
# g.test_framework :test_unit, :fixture => true
# end
# Configure the default encoding used in templates for Ruby 1.9.
config.encoding = "utf-8"
# Configure sensitive parameters which will be filtered from the log file.
config.filter_parameters += [:password]
end
end
|
# -*- encoding: us-ascii -*-
class File
def self.path(obj)
return obj.to_path if obj.respond_to? :to_path
StringValue(obj)
end
def initialize(path_or_fd, mode=undefined, perm=undefined, options=undefined)
if path_or_fd.kind_of? Integer
super(path_or_fd, mode, options)
@path = nil
else
path = Rubinius::Type.coerce_to_path path_or_fd
if options.equal?(undefined)
options = Rubinius::Type.try_convert(perm, Hash, :to_hash)
perm = undefined if options
end
nmode, binary, external, internal = IO.normalize_options(mode, options)
nmode ||= "r"
perm = 0666 if perm.equal? undefined
fd = IO.sysopen(path, nmode, perm)
if fd < 0
begin
Errno.handle path
rescue Errno::EMFILE
# true means force to run, don't ignore it.
GC.run(true)
fd = IO.sysopen(path, nmode, perm)
Errno.handle if fd < 0
end
end
@path = path
super(fd, mode, options)
end
end
private :initialize
def size
raise IOError, "closed stream" if closed?
stat.size
end
def self.absolute_path(obj, dir = nil)
if dir.nil?
path(obj)
else
expand_path(obj, dir)
end
end
def self.world_readable?(path)
path = Rubinius::Type.coerce_to_path path
return nil unless exists? path
mode = Stat.new(path).mode
if (mode & Stat::S_IROTH) == Stat::S_IROTH
tmp = mode & (Stat::S_IRUGO | Stat::S_IWUGO | Stat::S_IXUGO)
return Rubinius::Type.coerce_to tmp, Fixnum, :to_int
end
nil
end
def self.world_writable?(path)
path = Rubinius::Type.coerce_to_path path
return nil unless exists? path
mode = Stat.new(path).mode
if (mode & Stat::S_IWOTH) == Stat::S_IWOTH
tmp = mode & (Stat::S_IRUGO | Stat::S_IWUGO | Stat::S_IXUGO)
return Rubinius::Type.coerce_to tmp, Fixnum, :to_int
end
end
alias_method :to_path, :path
def self.realpath(path, basedir = nil)
path = expand_path(path, basedir || Dir.pwd)
real = ''
symlinks = {}
while !path.empty?
pos = path.index(SEPARATOR, 1)
if pos
name = path[0...pos]
path = path[pos..-1]
else
name = path
path = ''
end
real = join(real, name)
if symlink?(real)
raise Errno::ELOOP if symlinks[real]
symlinks[real] = true
if path.empty?
path = expand_path(readlink(real))
else
path = expand_path(join(readlink(real), path))
end
real = ''
end
end
real
end
end
class File::Stat
def world_readable?
if (@stat[:st_mode] & S_IROTH) == S_IROTH
tmp = @stat[:st_mode] & (S_IRUGO | S_IWUGO | S_IXUGO)
return Rubinius::Type.coerce_to tmp, Fixnum, :to_int
end
end
def world_writable?
if (@stat[:st_mode] & S_IWOTH) == S_IWOTH
tmp = @stat[:st_mode] & (S_IRUGO | S_IWUGO | S_IXUGO)
return Rubinius::Type.coerce_to tmp, Fixnum, :to_int
end
end
end
Expand "~" from File.absolute_path properly.
Fixes #1877.
# -*- encoding: us-ascii -*-
class File
def self.path(obj)
return obj.to_path if obj.respond_to? :to_path
StringValue(obj)
end
def initialize(path_or_fd, mode=undefined, perm=undefined, options=undefined)
if path_or_fd.kind_of? Integer
super(path_or_fd, mode, options)
@path = nil
else
path = Rubinius::Type.coerce_to_path path_or_fd
if options.equal?(undefined)
options = Rubinius::Type.try_convert(perm, Hash, :to_hash)
perm = undefined if options
end
nmode, binary, external, internal = IO.normalize_options(mode, options)
nmode ||= "r"
perm = 0666 if perm.equal? undefined
fd = IO.sysopen(path, nmode, perm)
if fd < 0
begin
Errno.handle path
rescue Errno::EMFILE
# true means force to run, don't ignore it.
GC.run(true)
fd = IO.sysopen(path, nmode, perm)
Errno.handle if fd < 0
end
end
@path = path
super(fd, mode, options)
end
end
private :initialize
def size
raise IOError, "closed stream" if closed?
stat.size
end
def self.absolute_path(obj, dir = nil)
obj = path(obj)
if obj[0] == "~"
File.join Dir.getwd, dir.to_s, obj
else
expand_path(obj, dir)
end
end
def self.world_readable?(path)
path = Rubinius::Type.coerce_to_path path
return nil unless exists? path
mode = Stat.new(path).mode
if (mode & Stat::S_IROTH) == Stat::S_IROTH
tmp = mode & (Stat::S_IRUGO | Stat::S_IWUGO | Stat::S_IXUGO)
return Rubinius::Type.coerce_to tmp, Fixnum, :to_int
end
nil
end
def self.world_writable?(path)
path = Rubinius::Type.coerce_to_path path
return nil unless exists? path
mode = Stat.new(path).mode
if (mode & Stat::S_IWOTH) == Stat::S_IWOTH
tmp = mode & (Stat::S_IRUGO | Stat::S_IWUGO | Stat::S_IXUGO)
return Rubinius::Type.coerce_to tmp, Fixnum, :to_int
end
end
alias_method :to_path, :path
def self.realpath(path, basedir = nil)
path = expand_path(path, basedir || Dir.pwd)
real = ''
symlinks = {}
while !path.empty?
pos = path.index(SEPARATOR, 1)
if pos
name = path[0...pos]
path = path[pos..-1]
else
name = path
path = ''
end
real = join(real, name)
if symlink?(real)
raise Errno::ELOOP if symlinks[real]
symlinks[real] = true
if path.empty?
path = expand_path(readlink(real))
else
path = expand_path(join(readlink(real), path))
end
real = ''
end
end
real
end
end
class File::Stat
def world_readable?
if (@stat[:st_mode] & S_IROTH) == S_IROTH
tmp = @stat[:st_mode] & (S_IRUGO | S_IWUGO | S_IXUGO)
return Rubinius::Type.coerce_to tmp, Fixnum, :to_int
end
end
def world_writable?
if (@stat[:st_mode] & S_IWOTH) == S_IWOTH
tmp = @stat[:st_mode] & (S_IRUGO | S_IWUGO | S_IXUGO)
return Rubinius::Type.coerce_to tmp, Fixnum, :to_int
end
end
end
|
unless Rubinius::Config['hash.hamt']
class Hash
include Enumerable
class State
attr_accessor :head
attr_accessor :tail
def self.from(state)
new_state = new
new_state.compare_by_identity if state and state.compare_by_identity?
new_state
end
def initialize
@compare_by_identity = false
@head = nil
@tail = nil
end
def compare_by_identity?
@compare_by_identity
end
def compare_by_identity
@compare_by_identity = true
class << self
def match?(this_key, this_hash, other_key, other_hash)
other_key.equal? this_key
end
end
self
end
def match?(this_key, this_hash, other_key, other_hash)
other_hash == this_hash and other_key.eql? this_key
end
end
# Bucket stores key, value pairs in Hash. The key's hash
# is also cached in the item and recalculated when the
# Hash#rehash method is called.
class Bucket
attr_accessor :key
attr_accessor :key_hash
attr_accessor :value
attr_accessor :link
attr_accessor :previous
attr_accessor :next
attr_accessor :state
def initialize(key, key_hash, value, state)
@key = key
@key_hash = key_hash
@value = value
@link = nil
@state = state
if tail = state.tail
@previous = tail
state.tail = tail.next = self
else
state.head = state.tail = self
end
end
def delete(key, key_hash)
if @state.match? @key, @key_hash, key, key_hash
remove
self
end
end
def remove
if @previous
@previous.next = @next
else
@state.head = @next
end
if @next
@next.previous = @previous
else
@state.tail = @previous
end
end
end
# An external iterator that returns entries in insertion order. While
# somewhat following the API of Enumerator, it is named Iterator because it
# does not provide <code>#each</code> and should not conflict with
# +Enumerator+ in MRI 1.8.7+. Returned by <code>Hash#to_iter</code>.
class Iterator
def initialize(state)
@state = state
end
# Returns the next object or +nil+.
def next(item)
if item
return item if item = item.next
else
return @state.head
end
end
end
# Hash methods
attr_reader :size
# #entries is a method provided by Enumerable which calls #to_a,
# so we have to not collide with that.
def __entries__
@entries
end
attr_reader :capacity
attr_reader :max_entries
alias_method :length, :size
Entries = Rubinius::Tuple
# Initial size of Hash. MUST be a power of 2.
MIN_SIZE = 16
# Allocate more storage when this full. This value grows with
# the size of the Hash so that the max load factor is 0.75.
MAX_ENTRIES = 12
# Overridden in lib/1.8.7 or lib/1.9
def self.[](*args)
if args.size == 1
obj = args.first
if obj.kind_of? Hash
return new.replace(obj)
elsif obj.respond_to? :to_hash
return new.replace(Rubinius::Type.coerce_to(obj, Hash, :to_hash))
elsif obj.is_a?(Array) # See redmine # 1385
h = new
args.first.each do |arr|
next unless arr.respond_to? :to_ary
arr = arr.to_ary
next unless (1..2).include? arr.size
h[arr.at(0)] = arr.at(1)
end
return h
end
end
return new if args.empty?
if args.size & 1 == 1
raise ArgumentError, "Expected an even number, got #{args.length}"
end
hash = new
i = 0
total = args.size
while i < total
hash[args[i]] = args[i+1]
i += 2
end
hash
end
def self.try_convert(obj)
Rubinius::Type.try_convert obj, Hash, :to_hash
end
def self.new_from_literal(size)
new
end
# Creates a fully-formed instance of Hash.
def self.allocate
hash = super()
Rubinius.privately { hash.__setup__ }
hash
end
def ==(other)
return true if self.equal? other
unless other.kind_of? Hash
return false unless other.respond_to? :to_hash
return other == self
end
return false unless other.size == size
Thread.detect_recursion self, other do
each_item do |item|
other_item = other.find_item(item.key)
# Other doesn't even have this key
return false unless other_item
# Order of the comparison matters! We must compare our value with
# the other Hash's value and not the other way around.
return false unless item.value == other_item.value
end
end
true
end
def eql?(other)
# Just like ==, but uses eql? to compare values.
return true if self.equal? other
unless other.kind_of? Hash
return false unless other.respond_to? :to_hash
return other.eql?(self)
end
return false unless other.size == size
Thread.detect_recursion self, other do
each_item do |item|
other_item = other.find_item(item.key)
# Other doesn't even have this key
return false unless other_item
# Order of the comparison matters! We must compare our value with
# the other Hash's value and not the other way around.
return false unless item.value.eql?(other_item.value)
end
end
true
end
def hash
val = size
Thread.detect_outermost_recursion self do
each_item do |item|
val ^= item.key.hash
val ^= item.value.hash
end
end
return val
end
def [](key)
if item = find_item(key)
item.value
else
default key
end
end
def []=(key, value)
Rubinius.check_frozen
redistribute @entries if @size > @max_entries
key_hash = key.hash
index = key_hash & @mask
item = @entries[index]
unless item
@entries[index] = new_bucket key, key_hash, value
return value
end
if @state.match? item.key, item.key_hash, key, key_hash
return item.value = value
end
last = item
item = item.link
while item
if @state.match? item.key, item.key_hash, key, key_hash
return item.value = value
end
last = item
item = item.link
end
last.link = new_bucket key, key_hash, value
value
end
alias_method :store, :[]=
# Used internally to get around subclasses redefining #[]=
alias_method :__store__, :[]=
def assoc(key)
each_item { |e| return e.key, e.value if key == e.key }
nil
end
def clear
Rubinius.check_frozen
__setup__
self
end
def compare_by_identity
Rubinius.check_frozen
@state = State.new unless @state
@state.compare_by_identity
self
end
def compare_by_identity?
return false unless @state
@state.compare_by_identity?
end
def default(key=undefined)
if @default_proc and !key.equal?(undefined)
@default_proc.call(self, key)
else
@default
end
end
def default=(value)
@default_proc = nil
@default = value
end
def default_proc
@default_proc
end
# Sets the default proc to be executed on each key lookup
def default_proc=(prc)
prc = Rubinius::Type.coerce_to prc, Proc, :to_proc
if prc.lambda? and prc.arity != 2
raise TypeError, "default proc must have arity 2"
end
@default = nil
@default_proc = prc
end
def delete(key)
Rubinius.check_frozen
key_hash = key.hash
index = key_index key_hash
if item = @entries[index]
if item.delete key, key_hash
@entries[index] = item.link
@size -= 1
return item.value
end
last = item
while item = item.link
if item.delete key, key_hash
last.link = item.link
@size -= 1
return item.value
end
last = item
end
end
return yield(key) if block_given?
end
def delete_if(&block)
Rubinius.check_frozen
return to_enum(:delete_if) unless block_given?
each_item { |e| delete e.key if yield(e.key, e.value) }
self
end
def each_item
return unless @state
item = @state.head
while item
yield item
item = item.next
end
end
def each
return to_enum(:each) unless block_given?
return unless @state
item = @state.head
while item
yield [item.key, item.value]
item = item.next
end
self
end
alias_method :each_pair, :each
def each_key
return to_enum(:each_key) unless block_given?
each_item { |e| yield e.key }
self
end
def each_value
return to_enum(:each_value) unless block_given?
each_item do |item|
yield item.value
end
self
end
# Returns true if there are no entries.
def empty?
@size == 0
end
def fetch(key, default=undefined)
if item = find_item(key)
return item.value
end
return yield(key) if block_given?
return default unless default.equal?(undefined)
raise IndexError, 'key not found'
end
# Searches for an item matching +key+. Returns the item
# if found. Otherwise returns +nil+.
def find_item(key)
key_hash = key.hash
item = @entries[key_index(key_hash)]
while item
if @state.match? item.key, item.key_hash, key, key_hash
return item
end
item = item.link
end
end
def flatten(level=1)
to_a.flatten(level)
end
def key(value)
each_item do |item|
return item.key if item.value == value
end
nil
end
alias_method :index, :key
def keep_if(&block)
Rubinius.check_frozen
return to_enum(:keep_if) unless block_given?
each_item { |e| delete e.key unless yield(e.key, e.value) }
self
end
def initialize(default=undefined, &block)
Rubinius.check_frozen
if !default.equal?(undefined) and block
raise ArgumentError, "Specify a default or a block, not both"
end
if block
@default = nil
@default_proc = block
elsif !default.equal?(undefined)
@default = default
@default_proc = nil
end
self
end
private :initialize
def initialize_copy(other)
replace other
end
private :initialize_copy
def inspect
out = []
return '{...}' if Thread.detect_recursion self do
each_item do |item|
str = item.key.inspect
str << '=>'
str << item.value.inspect
out << str
end
end
"{#{out.join ', '}}"
end
alias_method :to_s, :inspect
def invert
inverted = {}
each_item do |item|
inverted[item.value] = item.key
end
inverted
end
def key?(key)
find_item(key) != nil
end
alias_method :has_key?, :key?
alias_method :include?, :key?
alias_method :member?, :key?
# Calculates the +@entries+ slot given a key_hash value.
def key_index(key_hash)
key_hash & @mask
end
private :key_index
def keys
ary = []
each_item do |item|
ary << item.key
end
ary
end
def merge(other, &block)
dup.merge!(other, &block)
end
def merge!(other)
Rubinius.check_frozen
other = Rubinius::Type.coerce_to other, Hash, :to_hash
if block_given?
other.each_item do |item|
key = item.key
if key? key
__store__ key, yield(key, self[key], item.value)
else
__store__ key, item.value
end
end
else
other.each_item do |item|
__store__ item.key, item.value
end
end
self
end
alias_method :update, :merge!
# Returns a new +Bucket+ instance having +key+, +key_hash+,
# and +value+. If +key+ is a kind of +String+, +key+ is
# duped and frozen.
def new_bucket(key, key_hash, value)
if key.kind_of?(String) and !key.frozen?
key = key.dup
key.freeze
end
@size += 1
Bucket.new key, key_hash, value, @state
end
private :new_bucket
# Adjusts the hash storage and redistributes the entries among
# the new bins. Any Iterator instance will be invalid after a
# call to #redistribute. Does not recalculate the cached key_hash
# values. See +#rehash+.
def redistribute(entries)
capacity = @capacity
# Rather than using __setup__, initialize the specific values we need to
# change so we don't eg overwrite @state.
@capacity = capacity * 2
@entries = Entries.new @capacity
@mask = @capacity - 1
@max_entries = @max_entries * 2
i = -1
while (i += 1) < capacity
next unless old = entries[i]
while old
old.link = nil if nxt = old.link
index = key_index old.key_hash
if item = @entries[index]
old.link = item
end
@entries[index] = old
old = nxt
end
end
end
def rassoc(value)
each_item { |e| return e.key, e.value if value == e.value }
nil
end
# Recalculates the cached key_hash values and reorders the entries
# into a new +@entries+ vector. Does NOT change the size of the
# hash. See +#redistribute+.
def rehash
capacity = @capacity
entries = @entries
@entries = Entries.new @capacity
i = -1
while (i += 1) < capacity
next unless old = entries[i]
while old
old.link = nil if nxt = old.link
index = key_index(old.key_hash = old.key.hash)
if item = @entries[index]
old.link = item
end
@entries[index] = old
old = nxt
end
end
self
end
def reject(&block)
return to_enum(:reject) unless block_given?
hsh = dup.delete_if(&block)
hsh.taint if tainted?
hsh
end
def reject!(&block)
Rubinius.check_frozen
return to_enum(:reject!) unless block_given?
unless empty?
size = @size
delete_if(&block)
return self if size != @size
end
nil
end
def replace(other)
Rubinius.check_frozen
other = Rubinius::Type.coerce_to other, Hash, :to_hash
return self if self.equal? other
# Normally this would be a call to __setup__, but that will create a new
# unused Tuple that we would wind up replacing anyways.
@capacity = other.capacity
@entries = Entries.new @capacity
@mask = @capacity - 1
@size = 0
@max_entries = other.max_entries
@state = State.new
@state.compare_by_identity if other.compare_by_identity?
other.each_item do |item|
__store__ item.key, item.value
end
@default = other.default
@default_proc = other.default_proc
self
end
def select
return to_enum(:select) unless block_given?
selected = Hash.allocate
each_item do |item|
if yield(item.key, item.value)
selected[item.key] = item.value
end
end
selected
end
def select!
Rubinius.check_frozen
return to_enum(:select!) unless block_given?
return nil if empty?
size = @size
each_item { |e| delete e.key unless yield(e.key, e.value) }
return nil if size == @size
self
end
def shift
Rubinius.check_frozen
return default(nil) if empty?
item = @state.head
delete item.key
return item.key, item.value
end
# Sets the underlying data structures.
#
# @capacity is the maximum number of +@entries+.
# @max_entries is the maximum number of entries before redistributing.
# @size is the number of pairs, equivalent to <code>hsh.size</code>.
# @entrien is the vector of storage for the item chains.
def __setup__(capacity=MIN_SIZE, max=MAX_ENTRIES, size=0)
@capacity = capacity
@mask = capacity - 1
@max_entries = max
@size = size
@entries = Entries.new capacity
@state = State.new
end
private :__setup__
def sort(&block)
to_a.sort(&block)
end
def to_a
ary = []
each_item do |item|
ary << [item.key, item.value]
end
ary
end
# Returns an external iterator for the bins. See +Iterator+
def to_iter
Iterator.new @state
end
def to_hash
self
end
def value?(value)
each_item do |item|
return true if value == item.value
end
false
end
alias_method :has_value?, :value?
def values
ary = []
each_item do |item|
ary << item.value
end
ary
end
def values_at(*args)
args.map do |key|
if item = find_item(key)
item.value
else
default key
end
end
end
alias_method :indexes, :values_at
alias_method :indices, :values_at
end
end
Hash#compare_by_identity shouldn't invoke #equal?
This is why we can't have nice things.
unless Rubinius::Config['hash.hamt']
class Hash
include Enumerable
class State
attr_accessor :head
attr_accessor :tail
def self.from(state)
new_state = new
new_state.compare_by_identity if state and state.compare_by_identity?
new_state
end
def initialize
@compare_by_identity = false
@head = nil
@tail = nil
end
def compare_by_identity?
@compare_by_identity
end
def compare_by_identity
@compare_by_identity = true
class << self
def match?(this_key, this_hash, other_key, other_hash)
Rubinius::Type.object_equal other_key, this_key
end
end
self
end
def match?(this_key, this_hash, other_key, other_hash)
other_hash == this_hash and other_key.eql? this_key
end
end
# Bucket stores key, value pairs in Hash. The key's hash
# is also cached in the item and recalculated when the
# Hash#rehash method is called.
class Bucket
attr_accessor :key
attr_accessor :key_hash
attr_accessor :value
attr_accessor :link
attr_accessor :previous
attr_accessor :next
attr_accessor :state
def initialize(key, key_hash, value, state)
@key = key
@key_hash = key_hash
@value = value
@link = nil
@state = state
if tail = state.tail
@previous = tail
state.tail = tail.next = self
else
state.head = state.tail = self
end
end
def delete(key, key_hash)
if @state.match? @key, @key_hash, key, key_hash
remove
self
end
end
def remove
if @previous
@previous.next = @next
else
@state.head = @next
end
if @next
@next.previous = @previous
else
@state.tail = @previous
end
end
end
# An external iterator that returns entries in insertion order. While
# somewhat following the API of Enumerator, it is named Iterator because it
# does not provide <code>#each</code> and should not conflict with
# +Enumerator+ in MRI 1.8.7+. Returned by <code>Hash#to_iter</code>.
class Iterator
def initialize(state)
@state = state
end
# Returns the next object or +nil+.
def next(item)
if item
return item if item = item.next
else
return @state.head
end
end
end
# Hash methods
attr_reader :size
# #entries is a method provided by Enumerable which calls #to_a,
# so we have to not collide with that.
def __entries__
@entries
end
attr_reader :capacity
attr_reader :max_entries
alias_method :length, :size
Entries = Rubinius::Tuple
# Initial size of Hash. MUST be a power of 2.
MIN_SIZE = 16
# Allocate more storage when this full. This value grows with
# the size of the Hash so that the max load factor is 0.75.
MAX_ENTRIES = 12
# Overridden in lib/1.8.7 or lib/1.9
def self.[](*args)
if args.size == 1
obj = args.first
if obj.kind_of? Hash
return new.replace(obj)
elsif obj.respond_to? :to_hash
return new.replace(Rubinius::Type.coerce_to(obj, Hash, :to_hash))
elsif obj.is_a?(Array) # See redmine # 1385
h = new
args.first.each do |arr|
next unless arr.respond_to? :to_ary
arr = arr.to_ary
next unless (1..2).include? arr.size
h[arr.at(0)] = arr.at(1)
end
return h
end
end
return new if args.empty?
if args.size & 1 == 1
raise ArgumentError, "Expected an even number, got #{args.length}"
end
hash = new
i = 0
total = args.size
while i < total
hash[args[i]] = args[i+1]
i += 2
end
hash
end
def self.try_convert(obj)
Rubinius::Type.try_convert obj, Hash, :to_hash
end
def self.new_from_literal(size)
new
end
# Creates a fully-formed instance of Hash.
def self.allocate
hash = super()
Rubinius.privately { hash.__setup__ }
hash
end
def ==(other)
return true if self.equal? other
unless other.kind_of? Hash
return false unless other.respond_to? :to_hash
return other == self
end
return false unless other.size == size
Thread.detect_recursion self, other do
each_item do |item|
other_item = other.find_item(item.key)
# Other doesn't even have this key
return false unless other_item
# Order of the comparison matters! We must compare our value with
# the other Hash's value and not the other way around.
return false unless item.value == other_item.value
end
end
true
end
def eql?(other)
# Just like ==, but uses eql? to compare values.
return true if self.equal? other
unless other.kind_of? Hash
return false unless other.respond_to? :to_hash
return other.eql?(self)
end
return false unless other.size == size
Thread.detect_recursion self, other do
each_item do |item|
other_item = other.find_item(item.key)
# Other doesn't even have this key
return false unless other_item
# Order of the comparison matters! We must compare our value with
# the other Hash's value and not the other way around.
return false unless item.value.eql?(other_item.value)
end
end
true
end
def hash
val = size
Thread.detect_outermost_recursion self do
each_item do |item|
val ^= item.key.hash
val ^= item.value.hash
end
end
return val
end
def [](key)
if item = find_item(key)
item.value
else
default key
end
end
def []=(key, value)
Rubinius.check_frozen
redistribute @entries if @size > @max_entries
key_hash = key.hash
index = key_hash & @mask
item = @entries[index]
unless item
@entries[index] = new_bucket key, key_hash, value
return value
end
if @state.match? item.key, item.key_hash, key, key_hash
return item.value = value
end
last = item
item = item.link
while item
if @state.match? item.key, item.key_hash, key, key_hash
return item.value = value
end
last = item
item = item.link
end
last.link = new_bucket key, key_hash, value
value
end
alias_method :store, :[]=
# Used internally to get around subclasses redefining #[]=
alias_method :__store__, :[]=
def assoc(key)
each_item { |e| return e.key, e.value if key == e.key }
nil
end
def clear
Rubinius.check_frozen
__setup__
self
end
def compare_by_identity
Rubinius.check_frozen
@state = State.new unless @state
@state.compare_by_identity
self
end
def compare_by_identity?
return false unless @state
@state.compare_by_identity?
end
def default(key=undefined)
if @default_proc and !key.equal?(undefined)
@default_proc.call(self, key)
else
@default
end
end
def default=(value)
@default_proc = nil
@default = value
end
def default_proc
@default_proc
end
# Sets the default proc to be executed on each key lookup
def default_proc=(prc)
prc = Rubinius::Type.coerce_to prc, Proc, :to_proc
if prc.lambda? and prc.arity != 2
raise TypeError, "default proc must have arity 2"
end
@default = nil
@default_proc = prc
end
def delete(key)
Rubinius.check_frozen
key_hash = key.hash
index = key_index key_hash
if item = @entries[index]
if item.delete key, key_hash
@entries[index] = item.link
@size -= 1
return item.value
end
last = item
while item = item.link
if item.delete key, key_hash
last.link = item.link
@size -= 1
return item.value
end
last = item
end
end
return yield(key) if block_given?
end
def delete_if(&block)
Rubinius.check_frozen
return to_enum(:delete_if) unless block_given?
each_item { |e| delete e.key if yield(e.key, e.value) }
self
end
def each_item
return unless @state
item = @state.head
while item
yield item
item = item.next
end
end
def each
return to_enum(:each) unless block_given?
return unless @state
item = @state.head
while item
yield [item.key, item.value]
item = item.next
end
self
end
alias_method :each_pair, :each
def each_key
return to_enum(:each_key) unless block_given?
each_item { |e| yield e.key }
self
end
def each_value
return to_enum(:each_value) unless block_given?
each_item do |item|
yield item.value
end
self
end
# Returns true if there are no entries.
def empty?
@size == 0
end
def fetch(key, default=undefined)
if item = find_item(key)
return item.value
end
return yield(key) if block_given?
return default unless default.equal?(undefined)
raise IndexError, 'key not found'
end
# Searches for an item matching +key+. Returns the item
# if found. Otherwise returns +nil+.
def find_item(key)
key_hash = key.hash
item = @entries[key_index(key_hash)]
while item
if @state.match? item.key, item.key_hash, key, key_hash
return item
end
item = item.link
end
end
def flatten(level=1)
to_a.flatten(level)
end
def key(value)
each_item do |item|
return item.key if item.value == value
end
nil
end
alias_method :index, :key
def keep_if(&block)
Rubinius.check_frozen
return to_enum(:keep_if) unless block_given?
each_item { |e| delete e.key unless yield(e.key, e.value) }
self
end
def initialize(default=undefined, &block)
Rubinius.check_frozen
if !default.equal?(undefined) and block
raise ArgumentError, "Specify a default or a block, not both"
end
if block
@default = nil
@default_proc = block
elsif !default.equal?(undefined)
@default = default
@default_proc = nil
end
self
end
private :initialize
def initialize_copy(other)
replace other
end
private :initialize_copy
def inspect
out = []
return '{...}' if Thread.detect_recursion self do
each_item do |item|
str = item.key.inspect
str << '=>'
str << item.value.inspect
out << str
end
end
"{#{out.join ', '}}"
end
alias_method :to_s, :inspect
def invert
inverted = {}
each_item do |item|
inverted[item.value] = item.key
end
inverted
end
def key?(key)
find_item(key) != nil
end
alias_method :has_key?, :key?
alias_method :include?, :key?
alias_method :member?, :key?
# Calculates the +@entries+ slot given a key_hash value.
def key_index(key_hash)
key_hash & @mask
end
private :key_index
def keys
ary = []
each_item do |item|
ary << item.key
end
ary
end
def merge(other, &block)
dup.merge!(other, &block)
end
def merge!(other)
Rubinius.check_frozen
other = Rubinius::Type.coerce_to other, Hash, :to_hash
if block_given?
other.each_item do |item|
key = item.key
if key? key
__store__ key, yield(key, self[key], item.value)
else
__store__ key, item.value
end
end
else
other.each_item do |item|
__store__ item.key, item.value
end
end
self
end
alias_method :update, :merge!
# Returns a new +Bucket+ instance having +key+, +key_hash+,
# and +value+. If +key+ is a kind of +String+, +key+ is
# duped and frozen.
def new_bucket(key, key_hash, value)
if key.kind_of?(String) and !key.frozen?
key = key.dup
key.freeze
end
@size += 1
Bucket.new key, key_hash, value, @state
end
private :new_bucket
# Adjusts the hash storage and redistributes the entries among
# the new bins. Any Iterator instance will be invalid after a
# call to #redistribute. Does not recalculate the cached key_hash
# values. See +#rehash+.
def redistribute(entries)
capacity = @capacity
# Rather than using __setup__, initialize the specific values we need to
# change so we don't eg overwrite @state.
@capacity = capacity * 2
@entries = Entries.new @capacity
@mask = @capacity - 1
@max_entries = @max_entries * 2
i = -1
while (i += 1) < capacity
next unless old = entries[i]
while old
old.link = nil if nxt = old.link
index = key_index old.key_hash
if item = @entries[index]
old.link = item
end
@entries[index] = old
old = nxt
end
end
end
def rassoc(value)
each_item { |e| return e.key, e.value if value == e.value }
nil
end
# Recalculates the cached key_hash values and reorders the entries
# into a new +@entries+ vector. Does NOT change the size of the
# hash. See +#redistribute+.
def rehash
capacity = @capacity
entries = @entries
@entries = Entries.new @capacity
i = -1
while (i += 1) < capacity
next unless old = entries[i]
while old
old.link = nil if nxt = old.link
index = key_index(old.key_hash = old.key.hash)
if item = @entries[index]
old.link = item
end
@entries[index] = old
old = nxt
end
end
self
end
def reject(&block)
return to_enum(:reject) unless block_given?
hsh = dup.delete_if(&block)
hsh.taint if tainted?
hsh
end
def reject!(&block)
Rubinius.check_frozen
return to_enum(:reject!) unless block_given?
unless empty?
size = @size
delete_if(&block)
return self if size != @size
end
nil
end
def replace(other)
Rubinius.check_frozen
other = Rubinius::Type.coerce_to other, Hash, :to_hash
return self if self.equal? other
# Normally this would be a call to __setup__, but that will create a new
# unused Tuple that we would wind up replacing anyways.
@capacity = other.capacity
@entries = Entries.new @capacity
@mask = @capacity - 1
@size = 0
@max_entries = other.max_entries
@state = State.new
@state.compare_by_identity if other.compare_by_identity?
other.each_item do |item|
__store__ item.key, item.value
end
@default = other.default
@default_proc = other.default_proc
self
end
def select
return to_enum(:select) unless block_given?
selected = Hash.allocate
each_item do |item|
if yield(item.key, item.value)
selected[item.key] = item.value
end
end
selected
end
def select!
Rubinius.check_frozen
return to_enum(:select!) unless block_given?
return nil if empty?
size = @size
each_item { |e| delete e.key unless yield(e.key, e.value) }
return nil if size == @size
self
end
def shift
Rubinius.check_frozen
return default(nil) if empty?
item = @state.head
delete item.key
return item.key, item.value
end
# Sets the underlying data structures.
#
# @capacity is the maximum number of +@entries+.
# @max_entries is the maximum number of entries before redistributing.
# @size is the number of pairs, equivalent to <code>hsh.size</code>.
# @entrien is the vector of storage for the item chains.
def __setup__(capacity=MIN_SIZE, max=MAX_ENTRIES, size=0)
@capacity = capacity
@mask = capacity - 1
@max_entries = max
@size = size
@entries = Entries.new capacity
@state = State.new
end
private :__setup__
def sort(&block)
to_a.sort(&block)
end
def to_a
ary = []
each_item do |item|
ary << [item.key, item.value]
end
ary
end
# Returns an external iterator for the bins. See +Iterator+
def to_iter
Iterator.new @state
end
def to_hash
self
end
def value?(value)
each_item do |item|
return true if value == item.value
end
false
end
alias_method :has_value?, :value?
def values
ary = []
each_item do |item|
ary << item.value
end
ary
end
def values_at(*args)
args.map do |key|
if item = find_item(key)
item.value
else
default key
end
end
end
alias_method :indexes, :values_at
alias_method :indices, :values_at
end
end
|
# depends on: class.rb
#--
# Be very careful about calling raise in here! Thread has its own
# raise which, if you're calling raise, you probably don't want. Use
# Kernel.raise to call the proper raise.
#++
class Thread
class Die < Exception; end # HACK
def task; @task; end
@abort_on_exception = false
def self.abort_on_exception
@abort_on_exception
end
def self.abort_on_exception=(val)
@abort_on_exception = val
end
def inspect
stat = status()
stat = "dead" unless stat
"#<#{self.class}:0x#{object_id.to_s(16)} #{stat}>"
end
def setup(prime_lock)
@group = nil
@alive = true
@result = nil
@exception = nil
@critical = false
@locals = LookupTable.new
@lock = Channel.new
@lock.send nil if prime_lock
@joins = []
end
def initialize(*args)
unless block_given?
Kernel.raise ThreadError, "must be called with a block"
end
block = block_given?
block = block.block if block.kind_of? Proc
block.disable_long_return!
setup(false)
setup_task do
begin
begin
@lock.send nil
begin
@result = block.call(*args)
rescue IllegalLongReturn, LongReturnException => e2
Kernel.raise ThreadError,
"return is not allowed across threads", e2.context
end
ensure
@lock.receive
@alive = false
@joins.each do |join|
join.send self
end
end
rescue Die
@exception = nil
rescue Exception => e
@exception = e
ensure
@lock.send nil
end
if @exception
if Thread.abort_on_exception
Thread.main.raise @exception
elsif $DEBUG
STDERR.puts "Exception in thread: #{@exception.message} (#{@exception.class})"
end
end
Thread.dequeue
end
Thread.current.group.add self
end
def setup_task
block = block_given?
@task.associate block
end
def self.new(*args)
block = block_given?
th = allocate()
th.__send__ :initialize, *args, &block
th.wakeup
return th
end
def self.start(*args, &block)
new(*args, &block) # HACK
end
def current_context
@task.current_context
end
def alive?
@lock.receive
begin
@alive
ensure
@lock.send nil
end
end
def stop?
!alive? || @sleep
end
def kill
raise Die
end
def status
if alive?
if @sleep
"sleep"
else
"run"
end
else
if(@exception)
nil
else
false
end
end
end
def self.stop()
Thread.critical = false
sleep
nil
end
def self.critical
@critical
end
def self.critical=(value)
@critical = value
end
def join(timeout = Undefined)
join_inner(timeout) { @alive ? nil : self }
end
def group
@group
end
def add_to_group(group)
@group = group
end
def value
join_inner { @result }
end
def join_inner(timeout = Undefined)
result = nil
@lock.receive
begin
if @alive
jc = Channel.new
@joins << jc
@lock.send nil
begin
unless timeout.equal?(Undefined)
Scheduler.send_in_seconds(jc, timeout.to_f, nil)
end
jc.receive
ensure
@lock.receive
end
end
Kernel.raise @exception if @exception
result = yield
ensure
@lock.send nil
end
result
end
private :join_inner
def raise_prim(exc)
Ruby.primitive :thread_raise
end
private :raise_prim
def raise(exc=$!, msg=nil, trace=nil)
if exc.respond_to? :exception
exc = exc.exception msg
Kernel.raise TypeError, 'exception class/object expected' unless Exception === exc
exc.set_backtrace trace if trace
elsif exc.kind_of? String or !exc
exc = RuntimeError.exception exc
else
Kernel.raise TypeError, 'exception class/object expected'
end
if $DEBUG
STDERR.puts "Exception: #{exc.message} (#{exc.class})"
end
raise_prim exc
end
def [](key)
raise TypeError, "#{key} is not a symbol" if key.kind_of?(NilClass)
raise ArgumentError, "#{key} is not a symbol" unless key.kind_of?(Symbol) or key.kind_of?(String)
@locals[Type.coerce_to(key,Symbol,:to_sym)]
end
def []=(key, value)
raise TypeError, "#{key} is not a symbol" if key.kind_of?(NilClass)
raise ArgumentError, "#{key} is not a symbol" unless key.kind_of?(Symbol) or key.kind_of?(String)
@locals[Type.coerce_to(key,Symbol,:to_sym)] = value
end
def keys
@locals.keys
end
def key?(key)
raise TypeError, "#{key} is not a symbol" if key.kind_of?(NilClass)
raise ArgumentError, "#{key} is not a symbol" unless key.kind_of?(Symbol) or key.kind_of?(String)
@locals.key?(Type.coerce_to(key,Symbol,:to_sym))
end
def set_debugging(dc, cc)
@task.set_debugging(dc, cc)
end
def debug_channel
@task.debug_channel
end
def control_channel
@task.control_channel
end
def self.main
@main_thread
end
def self.initialize_main_thread(thread)
@main_thread = thread
end
def self.list
Thread.current.group.list
end
end
Add mising Thread#exit, #terminate aliases
# depends on: class.rb
#--
# Be very careful about calling raise in here! Thread has its own
# raise which, if you're calling raise, you probably don't want. Use
# Kernel.raise to call the proper raise.
#++
class Thread
class Die < Exception; end # HACK
def task; @task; end
@abort_on_exception = false
def self.abort_on_exception
@abort_on_exception
end
def self.abort_on_exception=(val)
@abort_on_exception = val
end
def inspect
stat = status()
stat = "dead" unless stat
"#<#{self.class}:0x#{object_id.to_s(16)} #{stat}>"
end
def setup(prime_lock)
@group = nil
@alive = true
@result = nil
@exception = nil
@critical = false
@locals = LookupTable.new
@lock = Channel.new
@lock.send nil if prime_lock
@joins = []
end
def initialize(*args)
unless block_given?
Kernel.raise ThreadError, "must be called with a block"
end
block = block_given?
block = block.block if block.kind_of? Proc
block.disable_long_return!
setup(false)
setup_task do
begin
begin
@lock.send nil
begin
@result = block.call(*args)
rescue IllegalLongReturn, LongReturnException => e2
Kernel.raise ThreadError,
"return is not allowed across threads", e2.context
end
ensure
@lock.receive
@alive = false
@joins.each do |join|
join.send self
end
end
rescue Die
@exception = nil
rescue Exception => e
@exception = e
ensure
@lock.send nil
end
if @exception
if Thread.abort_on_exception
Thread.main.raise @exception
elsif $DEBUG
STDERR.puts "Exception in thread: #{@exception.message} (#{@exception.class})"
end
end
Thread.dequeue
end
Thread.current.group.add self
end
def setup_task
block = block_given?
@task.associate block
end
def self.new(*args)
block = block_given?
th = allocate()
th.__send__ :initialize, *args, &block
th.wakeup
return th
end
def self.start(*args, &block)
new(*args, &block) # HACK
end
def current_context
@task.current_context
end
def alive?
@lock.receive
begin
@alive
ensure
@lock.send nil
end
end
def stop?
!alive? || @sleep
end
def kill
raise Die
end
alias exit kill
alias terminate kill
def status
if alive?
if @sleep
"sleep"
else
"run"
end
else
if(@exception)
nil
else
false
end
end
end
def self.stop()
Thread.critical = false
sleep
nil
end
def self.critical
@critical
end
def self.critical=(value)
@critical = value
end
def join(timeout = Undefined)
join_inner(timeout) { @alive ? nil : self }
end
def group
@group
end
def add_to_group(group)
@group = group
end
def value
join_inner { @result }
end
def join_inner(timeout = Undefined)
result = nil
@lock.receive
begin
if @alive
jc = Channel.new
@joins << jc
@lock.send nil
begin
unless timeout.equal?(Undefined)
Scheduler.send_in_seconds(jc, timeout.to_f, nil)
end
jc.receive
ensure
@lock.receive
end
end
Kernel.raise @exception if @exception
result = yield
ensure
@lock.send nil
end
result
end
private :join_inner
def raise_prim(exc)
Ruby.primitive :thread_raise
end
private :raise_prim
def raise(exc=$!, msg=nil, trace=nil)
if exc.respond_to? :exception
exc = exc.exception msg
Kernel.raise TypeError, 'exception class/object expected' unless Exception === exc
exc.set_backtrace trace if trace
elsif exc.kind_of? String or !exc
exc = RuntimeError.exception exc
else
Kernel.raise TypeError, 'exception class/object expected'
end
if $DEBUG
STDERR.puts "Exception: #{exc.message} (#{exc.class})"
end
raise_prim exc
end
def [](key)
raise TypeError, "#{key} is not a symbol" if key.kind_of?(NilClass)
raise ArgumentError, "#{key} is not a symbol" unless key.kind_of?(Symbol) or key.kind_of?(String)
@locals[Type.coerce_to(key,Symbol,:to_sym)]
end
def []=(key, value)
raise TypeError, "#{key} is not a symbol" if key.kind_of?(NilClass)
raise ArgumentError, "#{key} is not a symbol" unless key.kind_of?(Symbol) or key.kind_of?(String)
@locals[Type.coerce_to(key,Symbol,:to_sym)] = value
end
def keys
@locals.keys
end
def key?(key)
raise TypeError, "#{key} is not a symbol" if key.kind_of?(NilClass)
raise ArgumentError, "#{key} is not a symbol" unless key.kind_of?(Symbol) or key.kind_of?(String)
@locals.key?(Type.coerce_to(key,Symbol,:to_sym))
end
def set_debugging(dc, cc)
@task.set_debugging(dc, cc)
end
def debug_channel
@task.debug_channel
end
def control_channel
@task.control_channel
end
def self.main
@main_thread
end
def self.initialize_main_thread(thread)
@main_thread = thread
end
def self.list
Thread.current.group.list
end
end
|
Código de exemplo usando Until
Usando o iterador Unti
#!/usr/bin/ruby
#exemplo do ebook
#posicao 553
numero = 0
until numero == 100
puts "Numero: #{numero}"
numero += 1
end
|
require "spec_helper"
describe Monzo::Balance do
context "initializing a balance" do
before :each do
@balance = FactoryGirl.build(:balance)
end
it "should have a balance" do
expect(@balance.balance).to eql(5000)
end
it "should have a currency" do
expect(@balance.currency).to eql("GBP")
end
it "should have the amount spent today" do
expect(@balance.spend_today).to eql(200)
end
it "should have a local currency" do
expect(@balance.local_currency).to eql("")
end
it "should have a local exchange rate" do
expect(@balance.local_exchange_rate).to eql(0)
end
it "should have the amount spent locally" do
expect(@balance.local_spend).to eql([])
end
end
context ".find" do
before :each do
access_token = "abc"
Monzo.configure(access_token)
attributes = FactoryGirl.attributes_for(:balance)
account_id = "acc_123"
@stub = stub_request(:get, "https://api.monzo.com/balance?account_id=#{account_id}").
with(headers: build_request_headers(access_token)).
to_return(status: 200, body: attributes.to_json, headers: {})
@balance = Monzo::Balance.find(account_id)
end
it "has performed the request" do
expect(@stub).to have_been_requested
end
it "should be an instance of balance" do
expect(@balance).to be_an_instance_of(Monzo::Balance)
end
it "should have a balance" do
expect(@balance.balance).to eql(5000)
end
it "should have a currency" do
expect(@balance.currency).to eql("GBP")
end
it "should have the amount spent today" do
expect(@balance.spend_today).to eql(200)
end
it "should have a local currency" do
expect(@balance.local_currency).to eql("")
end
it "should have a local exchange rate" do
expect(@balance.local_exchange_rate).to eql(0)
end
it "should have the amount spent locally" do
expect(@balance.local_spend).to eql([])
end
end
end
Tidy up balance spec.
require "spec_helper"
describe Monzo::Balance do
context "initializing a balance" do
before :each do
@balance = FactoryGirl.build(:balance)
end
it "should have a balance" do
expect(@balance.balance).to eql(5000)
end
it "should have a currency" do
expect(@balance.currency).to eql("GBP")
end
it "should have the amount spent today" do
expect(@balance.spend_today).to eql(200)
end
it "should have a local currency" do
expect(@balance.local_currency).to eql("")
end
it "should have a local exchange rate" do
expect(@balance.local_exchange_rate).to eql(0)
end
it "should have the amount spent locally" do
expect(@balance.local_spend).to eql([])
end
end
context ".find" do
before :each do
access_token = "abc"
Monzo.configure(access_token)
attributes = FactoryGirl.attributes_for(:balance)
account_id = "acc_123"
@stub = stub_request(:get, "https://api.monzo.com/balance?account_id=#{account_id}")
@stub.with(headers: build_request_headers(access_token))
@stub.to_return(status: 200, body: attributes.to_json, headers: {})
@balance = Monzo::Balance.find(account_id)
end
it "has performed the request" do
expect(@stub).to have_been_requested
end
it "should be an instance of balance" do
expect(@balance).to be_an_instance_of(Monzo::Balance)
end
it "should have a balance" do
expect(@balance.balance).to eql(5000)
end
it "should have a currency" do
expect(@balance.currency).to eql("GBP")
end
it "should have the amount spent today" do
expect(@balance.spend_today).to eql(200)
end
it "should have a local currency" do
expect(@balance.local_currency).to eql("")
end
it "should have a local exchange rate" do
expect(@balance.local_exchange_rate).to eql(0)
end
it "should have the amount spent locally" do
expect(@balance.local_spend).to eql([])
end
end
end
|
module PropertiesHelper
# 352 is "DamageAbsorb", figure this out
PROPERTIES = {
"192" => { :type => :other, :name => "Fishing Skill"},
"181" => { :type => :other, :name => "Chess Rank"},
"218" => { :type => :aug, :name => "Reinforcement of the Lugians"},
"219" => { :type => :aug, :name => "Bleeargh's Fortitude" },
"220" => { :type => :aug, :name => "Oswald's Enhancement" },
"221" => { :type => :aug, :name => "Siraluun's Blessing" },
"222" => { :type => :aug, :name => "Enduring Calm" },
"223" => { :type => :aug, :name => "Steadfast Will" },
"224" => { :type => :aug, :name => "Ciandra's Essence" },
"225" => { :type => :aug, :name => "Yoshi's Essence" },
"226" => { :type => :aug, :name => "Jibril's Essence" },
"227" => { :type => :aug, :name => "Celdiseth's Essence" },
"228" => { :type => :aug, :name => "Koga's Essence" },
"229" => { :type => :aug, :name => "Shadow of the Seventh Mule" },
"230" => { :type => :aug, :name => "Might of the Seventh Mule" },
"231" => { :type => :aug, :name => "Clutch of the Miser" },
"232" => { :type => :aug, :name => "Enduring Enchantment" },
"233" => { :type => :aug, :name => "Critical Protection" },
"234" => { :type => :aug, :name => "Quick Learner" },
"235" => { :type => :aug, :name => "Ciandra's Fortune" },
"236" => { :type => :aug, :name => "Charmed Smith" },
"237" => { :type => :aug, :name => "Innate Renewal" },
"238" => { :type => :aug, :name => "Archmage's Endurance" },
"240" => { :type => :aug, :name => "Enchancement of the Blade ;Turner" },
"241" => { :type => :aug, :name => "Enchancement of the Arrow ;Turner" },
"242" => { :type => :aug, :name => "Enchancement of the Mace ;Turner" },
"243" => { :type => :aug, :name => "Caustic Enhancement" },
"244" => { :type => :aug, :name => "Fiery Enchancement" },
"245" => { :type => :aug, :name => "Icy Enchancement" },
"236" => { :type => :aug, :name => "Storm's Enhancement" },
"298" => { :type => :aug, :name => "Eye of the Remorseless"},
"299" => { :type => :aug, :name => "Hand of the Remorseless"},
"300" => { :type => :aug, :name => "Master of the Steel Circle"},
"301" => { :type => :aug, :name => "Master of the Focused Eye"},
"302" => { :type => :aug, :name => "Master of the Five Fold Path" },
"309" => { :type => :aug, :name => "Frenzy of the Slayer"},
"310" => { :type => :aug, :name => "Iron Skin of the Invincible" },
"326" => { :type => :aug, :name => "Jack of all Trades" },
"328" => { :type => :aug, :name => "Infused Void Magic" },
"294" => { :type => :aug, :name => "Infused Creature Magic" },
"296" => { :type => :aug, :name => "Infused Life Magic" },
"297" => { :type => :aug, :name => "Infused War Magic" },
"333" => { :type => :aura, :name => "Valor"},
"334" => { :type => :aura, :name => "Protection"},
"335" => { :type => :aura, :name => "Glory"},
"336" => { :type => :aura, :name => "Temperance"},
"338" => { :type => :aura, :name => "Aetheric Vision"},
"339" => { :type => :aura, :name => "Mana Flow"},
"342" => { :type => :aura, :name => "Purity"},
"343" => { :type => :aura, :name => "Crafstman"},
"344" => { :type => :aura, :name => "Specialization"},
"365" => { :type => :aura, :name => "World"},
"370" => { :type => :rating, :name => "Damage" },
"371" => { :type => :rating, :name => "Damage Resistance" },
"372" => { :type => :rating, :name => "Critical" },
"373" => { :type => :rating, :name => "Critical Resistance" },
"374" => { :type => :rating, :name => "Critical Damage" },
"375" => { :type => :rating, :name => "Critical Damage Resistance" },
"376" => { :type => :rating, :name => "Healing Boost" },
"379" => { :type => :rating, :name => "Vitality" },
"281" => { :type => :society, :name => "Society"},
"282" => { :type => :society, :name => "Society1Status"},
"283" => { :type => :society, :name => "Society2Status"},
"284" => { :type => :society, :name => "Society3Status"},
"354" => { :type => :mastery, :name => "Melee Mastery"},
"355" => { :type => :mastery, :name => "Ranged Mastery"},
"362" => { :type => :mastery, :name => "Summoning Mastery"},
"243" => { :type => :resist, :name => "Acid Resistance"},
"244" => { :type => :resist, :name => "Fire Resistance"},
"245" => { :type => :resist, :name => "Lightning Resistance"},
"246" => { :type => :resist, :name => "Piercing Resistance"}
}
MASTERY_NAMES = {
'354' => {
1 => "Unarmed",
2 => "Swords",
3 => "Axes",
4 => "Maces",
6 => "Daggers",
7 => "Staves",
11 => "Two-Handed"
},
'355' => {
8 => "Bows",
10 => "Thrown Weapons",
12 => "Magical Spells"
},
'362' => {
1 => "Primalist",
2 => "Necromancer",
3 => "Naturalist"
}
}
def self.get_property_name(id)
return PROPERTIES[id] ? PROPERTIES[id][:name] : id
end
def self.is_type(id, type)
return PROPERTIES[id] && PROPERTIES[id][:type] == type
end
def self.is_known(id)
return PROPERTIES[id]
end
def self.get_mastery_name(type, id)
return id if MASTERY_NAMES[type].nil?
return id if MASTERY_NAMES[type][id].nil?
MASTERY_NAMES[type][id]
end
end
Remove "Mastery" text from properties
module PropertiesHelper
# 352 is "DamageAbsorb", figure this out
PROPERTIES = {
"192" => { :type => :other, :name => "Fishing Skill"},
"181" => { :type => :other, :name => "Chess Rank"},
"218" => { :type => :aug, :name => "Reinforcement of the Lugians"},
"219" => { :type => :aug, :name => "Bleeargh's Fortitude" },
"220" => { :type => :aug, :name => "Oswald's Enhancement" },
"221" => { :type => :aug, :name => "Siraluun's Blessing" },
"222" => { :type => :aug, :name => "Enduring Calm" },
"223" => { :type => :aug, :name => "Steadfast Will" },
"224" => { :type => :aug, :name => "Ciandra's Essence" },
"225" => { :type => :aug, :name => "Yoshi's Essence" },
"226" => { :type => :aug, :name => "Jibril's Essence" },
"227" => { :type => :aug, :name => "Celdiseth's Essence" },
"228" => { :type => :aug, :name => "Koga's Essence" },
"229" => { :type => :aug, :name => "Shadow of the Seventh Mule" },
"230" => { :type => :aug, :name => "Might of the Seventh Mule" },
"231" => { :type => :aug, :name => "Clutch of the Miser" },
"232" => { :type => :aug, :name => "Enduring Enchantment" },
"233" => { :type => :aug, :name => "Critical Protection" },
"234" => { :type => :aug, :name => "Quick Learner" },
"235" => { :type => :aug, :name => "Ciandra's Fortune" },
"236" => { :type => :aug, :name => "Charmed Smith" },
"237" => { :type => :aug, :name => "Innate Renewal" },
"238" => { :type => :aug, :name => "Archmage's Endurance" },
"240" => { :type => :aug, :name => "Enchancement of the Blade ;Turner" },
"241" => { :type => :aug, :name => "Enchancement of the Arrow ;Turner" },
"242" => { :type => :aug, :name => "Enchancement of the Mace ;Turner" },
"243" => { :type => :aug, :name => "Caustic Enhancement" },
"244" => { :type => :aug, :name => "Fiery Enchancement" },
"245" => { :type => :aug, :name => "Icy Enchancement" },
"236" => { :type => :aug, :name => "Storm's Enhancement" },
"298" => { :type => :aug, :name => "Eye of the Remorseless"},
"299" => { :type => :aug, :name => "Hand of the Remorseless"},
"300" => { :type => :aug, :name => "Master of the Steel Circle"},
"301" => { :type => :aug, :name => "Master of the Focused Eye"},
"302" => { :type => :aug, :name => "Master of the Five Fold Path" },
"309" => { :type => :aug, :name => "Frenzy of the Slayer"},
"310" => { :type => :aug, :name => "Iron Skin of the Invincible" },
"326" => { :type => :aug, :name => "Jack of all Trades" },
"328" => { :type => :aug, :name => "Infused Void Magic" },
"294" => { :type => :aug, :name => "Infused Creature Magic" },
"296" => { :type => :aug, :name => "Infused Life Magic" },
"297" => { :type => :aug, :name => "Infused War Magic" },
"333" => { :type => :aura, :name => "Valor"},
"334" => { :type => :aura, :name => "Protection"},
"335" => { :type => :aura, :name => "Glory"},
"336" => { :type => :aura, :name => "Temperance"},
"338" => { :type => :aura, :name => "Aetheric Vision"},
"339" => { :type => :aura, :name => "Mana Flow"},
"342" => { :type => :aura, :name => "Purity"},
"343" => { :type => :aura, :name => "Crafstman"},
"344" => { :type => :aura, :name => "Specialization"},
"365" => { :type => :aura, :name => "World"},
"370" => { :type => :rating, :name => "Damage" },
"371" => { :type => :rating, :name => "Damage Resistance" },
"372" => { :type => :rating, :name => "Critical" },
"373" => { :type => :rating, :name => "Critical Resistance" },
"374" => { :type => :rating, :name => "Critical Damage" },
"375" => { :type => :rating, :name => "Critical Damage Resistance" },
"376" => { :type => :rating, :name => "Healing Boost" },
"379" => { :type => :rating, :name => "Vitality" },
"281" => { :type => :society, :name => "Society"},
"282" => { :type => :society, :name => "Society1Status"},
"283" => { :type => :society, :name => "Society2Status"},
"284" => { :type => :society, :name => "Society3Status"},
"354" => { :type => :mastery, :name => "Melee"},
"355" => { :type => :mastery, :name => "Ranged"},
"362" => { :type => :mastery, :name => "Summoning"},
"243" => { :type => :resist, :name => "Acid Resistance"},
"244" => { :type => :resist, :name => "Fire Resistance"},
"245" => { :type => :resist, :name => "Lightning Resistance"},
"246" => { :type => :resist, :name => "Piercing Resistance"}
}
MASTERY_NAMES = {
'354' => {
1 => "Unarmed",
2 => "Swords",
3 => "Axes",
4 => "Maces",
6 => "Daggers",
7 => "Staves",
11 => "Two-Handed"
},
'355' => {
8 => "Bows",
10 => "Thrown Weapons",
12 => "Magical Spells"
},
'362' => {
1 => "Primalist",
2 => "Necromancer",
3 => "Naturalist"
}
}
def self.get_property_name(id)
return PROPERTIES[id] ? PROPERTIES[id][:name] : id
end
def self.is_type(id, type)
return PROPERTIES[id] && PROPERTIES[id][:type] == type
end
def self.is_known(id)
return PROPERTIES[id]
end
def self.get_mastery_name(type, id)
return id if MASTERY_NAMES[type].nil?
return id if MASTERY_NAMES[type][id].nil?
MASTERY_NAMES[type][id]
end
end |
require '/Users/Albert/Repos/Scripts/ruby/lib/utilities.rb'
require '/Users/Albert/Repos/Scripts/ruby/lib/encrypter.rb'
require 'columnist'
class ShowBankTransactions
include Columnist
# Initialize all the DB stuff, etc.
def initialize(argv)
# COLORS
@green = 10
@magenta = 201
@yellow = 226
@cyan = 87
@red = 9
@blue = 32
@white = 255
@plus_color = 47
@minus_color = 196
# Get Database Connection
encrypter = Encrypter.new
@databaseConnection = Mysql.new(
encrypter.decrypt(EC2MySqlAlb3rtukHost),
encrypter.decrypt(EC2MySqlAlb3rtukUser),
encrypter.decrypt(EC2MySqlAlb3rtukPass),
encrypter.decrypt(EC2MySqlAlb3rtukSchema)
)
# INTERNAL TYPE ID LEGEND
# 1 => CASH IN
# 2 => RECURRING IN
# 3 => RECURRING OUT
@recognizedTransactions = Array[
# NATWEST AD GOLD
{:intTypeID => 0, :id => 100, :bank_account_id => 1, :type => 'BAC', :terms => Array['PAYPAL', 'PPWD'], :color => @white, :translation => 'PAYPAL WITHDRAWAL'},
{:intTypeID => 1, :id => 200, :bank_account_id => 1, :type => 'CDM', :terms => Array['521005', '521007', '560005'], :color => @green, :translation => 'CASH'},
{:intTypeID => 1, :id => 200, :bank_account_id => 1, :type => '- ', :terms => Array['521005', '521007', '560005'], :color => @green, :translation => 'CASH'},
{:intTypeID => 1, :id => 200, :bank_account_id => 1, :type => 'TLR', :terms => Array[''], :color => @green, :translation => 'CASH'},
{:intTypeID => 3, :id => 300, :bank_account_id => 1, :type => 'POS', :terms => Array['NAMESCO'], :color => @red, :translation => 'NAMESCO WEB SERVER', :recurring_amount => 29.99},
{:intTypeID => 3, :id => 400, :bank_account_id => 1, :type => 'D/D', :terms => Array['SLMLTD INCOME AC'], :color => @red, :translation => 'HORFIELD SPORTS CENTRE', :recurring_amount => 33.60},
{:intTypeID => 0, :id => 500, :bank_account_id => 1, :type => 'D/D', :terms => Array['UK MAIL'], :color => @white, :translation => 'UK MAIL'},
{:intTypeID => 0, :id => 600, :bank_account_id => 1, :type => 'POS', :terms => Array['UK MAIL'], :color => @white, :translation => 'UK MAIL'},
{:intTypeID => 0, :id => 700, :bank_account_id => 1, :type => 'OTR', :terms => Array['07519616416'], :color => @white, :translation => 'ROSS JOY'},
{:intTypeID => 0, :id => 800, :bank_account_id => 1, :type => 'OTR', :terms => Array['07980286590', 'SCOULDING L A'], :color => @white, :translation => 'LUKE SCOULDING'},
{:intTypeID => 0, :id => 900, :bank_account_id => 1, :type => 'OTR', :terms => Array['07825126363'], :color => @white, :translation => 'LUKE CHAMBERLAIN'},
{:intTypeID => 0, :id => 1000, :bank_account_id => 1, :type => 'BAC', :terms => Array['D LINDEN'], :color => @white, :translation => 'DEAN LINDEN'},
{:intTypeID => 0, :id => 1100, :bank_account_id => 1, :type => 'BAC', :terms => Array['P HACKETT'], :color => @white, :translation => 'PHIL HACKETT'},
{:intTypeID => 2, :id => 1150, :bank_account_id => 1, :type => 'BAC', :terms => Array['SALARY','T27 SYSTEMS'], :color => @cyan, :translation => 'BRIGHTPEARL WAGE', :recurring_amount => 1946.23}, # 1946.23
{:intTypeID => 2, :id => 1200, :bank_account_id => 1, :type => 'BAC', :terms => Array['VIRGIN TV'], :color => @cyan, :translation => 'GARY SOLAN (VIRGIN MEDIA)', :recurring_amount => 30},
{:intTypeID => 0, :id => 1400, :bank_account_id => 1, :type => 'BAC', :terms => Array['ALEX CARLIN'], :color => @white, :translation => 'ALEX CARLIN'},
{:intTypeID => 0, :id => 1500, :bank_account_id => 1, :type => 'BAC', :terms => Array['J HARTRY '], :color => @white, :translation => 'JOE HARTRY'},
{:intTypeID => 3, :id => 1600, :bank_account_id => 1, :type => 'POS', :terms => Array['SPOTIFY'], :color => @red, :translation => 'SPOTIFY', :recurring_amount => 19.98},
{:intTypeID => 3, :id => 1700, :bank_account_id => 1, :type => 'POS', :terms => Array['LYNDA.COM'], :color => @red, :translation => 'LYNDA.COM', :recurring_amount => 16, :estimated => true},
{:intTypeID => 3, :id => 1800, :bank_account_id => 1, :type => 'POS', :terms => Array['GITHUB.COM'], :color => @red, :translation => 'GITHUB.COM', :recurring_amount => 8.50, :estimated => true},
{:intTypeID => 0, :id => 1900, :bank_account_id => 1, :type => 'POS', :terms => Array['TRANSFERWISE'], :color => @white, :translation => 'TRANFERWISE (WEDDING FUND)'},
# NATWEST SAVINGS
{:intTypeID => 0, :id => 2000, :bank_account_id => 3, :type => 'BAC', :terms => Array['TRANSFERWISE'], :color => @white, :translation => 'TRANFERWISE (REFUND)'},
# HALIFAX ULTIMATE REWARD
{:intTypeID => 3, :id => 2100, :bank_account_id => 4, :type => 'FEE', :terms => Array['ACCOUNT FEE'], :color => @red, :translation => 'ACCOUNT FEE (HALIFAX ULTIAMTE REWARD)', :recurring_amount => 15},
{:intTypeID => 1, :id => 2200, :bank_account_id => 4, :type => 'CSH', :terms => Array[''], :color => @green, :translation => 'CASH'},
{:intTypeID => 3, :id => 2250, :bank_account_id => 4, :type => 'DD', :terms => Array['DVLA-EU51GVC'], :color => @red, :translation => 'CAR TAX (DVLA-EU51GVC)', :recurring_amount => 19.68},
# HALIFAX REWARD
{:intTypeID => 3, :id => 2300, :bank_account_id => 5, :type => 'DEB', :terms => Array['CREDITEXPERT.CO.UK'], :color => @red, :translation => 'CREDITEXPERT', :recurring_amount => 9.99},
{:intTypeID => 3, :id => 2350, :bank_account_id => 5, :type => 'DEB', :terms => Array['ANIMOTO'], :color => @red, :translation => 'ANIMOTO', :recurring_amount => 5},
{:intTypeID => 0, :id => 2400, :bank_account_id => 5, :type => 'FPI', :terms => Array['PAYPAL WITHDRAWAL'], :color => @white, :translation => 'PAYPAL WITHDRAWAL'},
{:intTypeID => 1, :id => 2500, :bank_account_id => 5, :type => 'CSH', :terms => Array[''], :color => @green, :translation => 'CASH'},
{:intTypeID => 0, :id => 2550, :bank_account_id => 6, :type => 'D-C', :terms => Array[''], :color => @white, :translation => 'ISA INTEREST'},
# LLOYDS CURRENT
{:intTypeID => 3, :id => 2600, :bank_account_id => 8, :type => 'FPO', :terms => Array['STELLA TALIOTIS'], :color => @red, :translation => 'RENT', :recurring_amount => 250},
{:intTypeID => 3, :id => 2700, :bank_account_id => 8, :type => 'DD', :terms => Array['VODAFONE LIMITED'], :color => @red, :translation => 'VODAFONE LIMITED', :recurring_amount => 60, :estimated => true},
{:intTypeID => 3, :id => 2800, :bank_account_id => 8, :type => 'DD', :terms => Array['VIRGIN MEDIA'], :color => @red, :translation => 'VIRGIN MEDIA', :recurring_amount => 112.99, :estimated => true},
{:intTypeID => 1, :id => 2900, :bank_account_id => 8, :type => 'CSH', :terms => Array[''], :color => @green, :translation => 'CASH'},
{:intTypeID => 1, :id => 2950, :bank_account_id => 8, :type => 'DEP', :terms => Array[''], :color => @green, :translation => 'CASH'},
{:intTypeID => 3, :id => 3000, :bank_account_id => 8, :type => 'DD', :terms => Array['TESCO BANK'], :color => @red, :translation => 'TESCO CAR INSURANCE', :recurring_amount => 62.73},
{:intTypeID => 3, :id => 3100, :bank_account_id => 8, :type => 'FEE', :terms => Array['ACCOUNT FEE'], :color => @red, :translation => 'ACCOUNT FEE (LLOYDS CURRENT)', :recurring_amount => 15},
{:intTypeID => 2, :id => 3200, :bank_account_id => 8, :type => 'FPI', :terms => Array['MATTHEW JONES'], :color => @cyan, :translation => 'MATT JONES (VIRGIN MEDIA)', :recurring_amount => 24},
]
@recognizedTransactionsIndexedID = {}
@recognizedTransactions.each do |recognizedTransaction|
@recognizedTransactionsIndexedID["#{recognizedTransaction[:id]}"] = recognizedTransaction
end
@internalTransfers = Array[
# NATWEST
{:bank_account_id => Array[1, 2, 3], :type => 'BAC', :terms => Array['A RANNETSPERGER', 'HALIFAX ULTIMATE', 'HALIFAX REWARD', 'AR HALIFAX ACC', 'LLOYDS ACCOUNT']},
{:bank_account_id => Array[1, 2, 3], :type => 'OTR', :terms => Array['CALL REF.NO.'], :terms_not => ['UK MAIL LIMITED', 'DEAN LINDEN', 'TRANSFERWISE']},
{:bank_account_id => Array[1, 2, 3], :type => 'POS', :terms => Array['BARCLAYCARD', 'CAPITAL ONE']},
# LLOYDS
{:bank_account_id => Array[8], :type => 'FPO', :terms => Array['NATWEST AD GOLD', 'NATWEST STEP', 'NATWEST SAVINGS', 'LLOYDS BANK PLATIN']},
{:bank_account_id => Array[8], :type => 'FPI', :terms => Array['RANNETSPERGER A NATWEST']},
{:bank_account_id => Array[8], :type => 'TFR', :terms => Array['HALIFAX ULTIMATE', 'HALIFAX REWARD', 'A RANNETSPERGER']},
{:bank_account_id => Array[7], :type => 'CC', :terms => Array['PAYMENT RECEIVED']},
# HALIFAX
{:bank_account_id => Array[4, 5], :type => 'DEB', :terms => Array['BARCLAYCARD']},
{:bank_account_id => Array[4, 5], :type => 'FPO', :terms => Array['NATWEST']},
{:bank_account_id => Array[4, 5], :type => 'FPI', :terms => Array['RANNETSPERGER A NATWEST']},
{:bank_account_id => Array[4, 5], :type => 'TFR', :terms => Array['HALIFAX ULTIMATE', 'HALIFAX REWARD', 'A RANNETSPERGER']},
{:bank_account_id => Array[6], :type => 'P-C', :terms => Array['']},
{:bank_account_id => Array[6], :type => 'P-T', :terms => Array['']},
{:bank_account_id => Array[6], :type => 'D-T', :terms => Array['']},
# BARCLAYCARD
{:bank_account_id => Array[9], :type => 'OTHER', :terms => Array['PAYMENT, THANK YOU']},
# CAPITAL ONE
{:bank_account_id => Array[10], :type => 'CR', :terms => Array['PAYMENT RECEIVED', 'DIRECT DEBIT PAYMENT']},
]
@ignoredTransactions = Array.new
# Hawaii Payments
@ignoredTransactions.push(*Array[2556, 2557, 2558, 2555, 2545, 2567, 2576, 2566, 2959, 3328, 3364, 3310, 3349, 3405, 3413, 3424, 3482, 3483, 3492, 3493, 3543,
3564, 3556, 3585, 3593, 3599, 3600, 3615, 3619, 3635, 3672, 3723, 3789, 3954, 3989, 4036, 4038])
# Misc Globals
@rightHandSideCount = 4
@rightHandSideContent = Array.new
@rightHandSideContentCount = -1
@rightHandSideContentExists = true
# Balance Globals
@totalAvailable = 0
@totalCredit = 0
@totalCreditUsed = 0
@totalCash = 0
@moneyInRemaining = 0
@moneyOutRemaining = 0
@fixedMonthlyOutgoings = 0
@creditScore = Array.new
@summaryData = {
:month1 => {:misc_in => 0, :misc_out => 0, :cash_in => 0, :total_in => 0, :total_out => 0, :profit_loss => 0, :starting_balances => 0},
:month2 => {:misc_in => 0, :misc_out => 0, :cash_in => 0, :total_in => 0, :total_out => 0, :profit_loss => 0, :starting_balances => 0},
:month3 => {:misc_in => 0, :misc_out => 0, :cash_in => 0, :total_in => 0, :total_out => 0, :profit_loss => 0, :starting_balances => 0},
:month4 => {:misc_in => 0, :misc_out => 0, :cash_in => 0, :total_in => 0, :total_out => 0, :profit_loss => 0, :starting_balances => 0},
:month5 => {:misc_in => 0, :misc_out => 0, :cash_in => 0, :total_in => 0, :total_out => 0, :profit_loss => 0, :starting_balances => 0},
:monthTotal => {:misc_in => 0, :misc_out => 0, :cash_in => 0, :total_in => 0, :total_out => 0, :profit_loss => 0},
}
# Months
@month1 = DateTime.now
@month2 = DateTime.now << 1
@month3 = DateTime.now << 2
@month4 = DateTime.now << 3
@month5 = DateTime.now << 4
# Get different modes.
@untranslated = false
@withIDs = false
@withInternalTransfers = false
if argv == 'untranslated'
@untranslated = true
elsif argv == 'with-ids'
@withIDs = true
elsif argv == 'with-internal-transfers'
@withInternalTransfers = true
end
@rule = getRuleString(202)
# Get banks into Hash
@banks = {}
banksSQL = @databaseConnection.query('SELECT * FROM bank ORDER BY id ASC')
banksSQL.each_hash do |row|
@banks[row['id']] = row['title']
end
banksSQL.free
# Get bank accounts into Hash
@bankAccounts = {}
bankAccountsSQL = @databaseConnection.query('SELECT * FROM bank_account ORDER BY id ASC')
bankAccountsSQL.each_hash do |row|
@bankAccounts[row['id']] = row
end
bankAccountsSQL.free
# Get bank/credit card balances into Hash
@bankAccountBalances = {}
@bankAccounts.each do |bankAccount|
bankAccount = bankAccount[1]
case bankAccount['bank_account_type_id'].to_i
when 1
bankAccountTable = 'bank_account_type_bank_account'
when 2
bankAccountTable = 'bank_account_type_credit_card'
when 3
bankAccountTable = 'bank_account_type_isa'
else
raise(RuntimeError, "bank_account_type => #{bankAccount['bank_account_type']} doesn't exist.")
end
balance = @databaseConnection.query("SELECT * FROM #{bankAccountTable} WHERE bank_account_id='#{bankAccount['id']}' ORDER BY date_fetched DESC LIMIT 1")
@bankAccountBalances[bankAccount['id'].to_i] = balance.fetch_hash
balance.free
end
# Get transactions into Hash
@transactions = Array.new
transactionsSQL = @databaseConnection.query("SELECT * FROM bank_account_transactions WHERE date >= '#{@month5.strftime('%Y-%m-01')}' ORDER BY date ASC, bank_account_id ASC, type ASC")
transactionsSQL.each_hash do |transaction|
# Skip ISA.
if transaction['bank_account_id'].to_i == 6
next
end
@transactions << transaction
end
transactionsSQL.free
# Column widths for transactions
@transWidth_1 = 20
@transWidth_2 = 20
@transWidth_3 = 12
@transWidth_4 = 111
@transWidth_5 = 6
@transWidth_6 = 11
@transWidth_7 = 12
@transWidthTotal = @transWidth_1 + @transWidth_2 + @transWidth_3 + @transWidth_4 + @transWidth_5 + @transWidth_6 + @transWidth_7 + 8
# Column widths for balances
@colWidth_1 = 20
@colWidth_2 = 22
@colWidth_3 = 20
@colWidth_4 = 20
@colWidth_5 = 20
@colWidth_6 = 20
@colWidth_7 = 20
@colWidth_8 = 21
@colWidth_9 = 2
@colWidth_10 = 24
@colWidthTotal = @colWidth_1 + @colWidth_2 + @colWidth_3 + @colWidth_4 + @colWidth_5 + @colWidth_6 + @colWidth_7 + @colWidth_8 + @colWidth_9 + @colWidth_10 + 9
# Column widths for balances
@summaryWidth_1 = 43
@summaryWidth_2 = 20
@summaryWidth_3 = 20
@summaryWidth_4 = 20
@summaryWidth_5 = 20
@summaryWidth_6 = 20
@summaryWidth_7 = 21
@summaryWidth_8 = 2
@summaryWidth_9 = 24
@summaryWidthTotal = @summaryWidth_1 + @summaryWidth_2 + @summaryWidth_3 + @summaryWidth_4 + @summaryWidth_5 + @summaryWidth_6 + @summaryWidth_7 + @summaryWidth_8 + @summaryWidth_9 + 8
end
# Main function
def run
# MAKE SURE WE'RE ONLINE
checkMachineIsOnline
# DO ALL CALCULATIONS
calculateSummary
calculateMoneyRemaining
calculateFixedMonthlyOutgoings
# DO GETS
getCreditScore
getTotals
# START OUTPUT
displayTransactions
displayCreditCards
displayBankAccounts
displaySummary
end
# Display Transactions
def displayTransactions
table(:border => false) do
row do
column(getRuleString(@transWidth_1), :width => @transWidth_1, :align => 'left', :bold => 'true')
column(getRuleString(@transWidth_2), :width => @transWidth_2, :align => 'left', :bold => 'true')
column(getRuleString(@transWidth_3), :width => @transWidth_3, :align => 'left')
column(getRuleString(@transWidth_4), :width => @transWidth_4, :align => 'right')
column(getRuleString(@transWidth_5), :width => @transWidth_5, :align => 'left', :bold => 'true')
column(getRuleString(@transWidth_6), :width => @transWidth_6, :align => 'right')
column(getRuleString(@transWidth_7), :width => @transWidth_7, :align => 'right')
end
row do
column(' Bank Name')
column('Account Name')
column('Date')
column('Description')
column('Type')
column('Paid In')
column('Paid Out')
end
row do
column(getRuleString(@transWidth_1))
column(getRuleString(@transWidth_2))
column(getRuleString(@transWidth_3))
column(getRuleString(@transWidth_4))
column(getRuleString(@transWidth_5))
column(getRuleString(@transWidth_6))
column(getRuleString(@transWidth_7))
end
last_date = nil
@transactions.each do |transaction|
# Determine Bank Text Color
bankAndColor = getBankAndColor(@bankAccounts[transaction['bank_account_id']]['bank_id'])
# Translation Handling
transactionDetails = getDescriptionAndColor(transaction)
transactionColor = transactionDetails[:color]
if @untranslated
transactionDescription = transaction['description']
else
transactionDescription = transactionDetails[:description]
end
# Internal Transfer Handling
if isInternalTransfer(transaction)
if @withInternalTransfers
if @ignoredTransactions.include?(transaction['id'].to_i)
transactionColor = @green
else
transactionColor = @yellow
end
else
next
end
else
if @withInternalTransfers
transactionColor = @white
end
end
# Insert MONTH divider
if last_date != nil
if DateTime.strptime(transaction['date'], '%Y-%m-%d').strftime('%B') != DateTime.strptime(last_date, '%Y-%m-%d').strftime('%B')
displayTransactionsMonth(DateTime.strptime(transaction['date'], '%Y-%m-%d').strftime('%B'))
else
# Insert space if new day
if last_date != transaction['date']
displayTransactionsBlankRow
end
end
end
# Format description
if @withIDs
descriptionAddedInfo = "##{transaction['id']}"
description = transactionDescription[0..((@transWidth_4 - 2) - descriptionAddedInfo.length)]
description = "#{descriptionAddedInfo}#{getRuleString(@transWidth_4 - (descriptionAddedInfo.length + description.length), ' ')}#{description}"
else
description = transactionDescription[0..(@transWidth_4 - 2)]
end
row do
column(" #{bankAndColor[0]}", :color => bankAndColor[1])
column(@bankAccounts[transaction['bank_account_id']]['title'], :color => bankAndColor[1])
column(DateTime.strptime(transaction['date'], '%Y-%m-%d').strftime('%d %b %Y'), :color => transactionColor)
column("#{description}", :color => transactionColor)
column(transaction['type'], :color => transactionColor)
column((transaction['paid_in'].to_f == 0) ? '' : getAsCurrency(transaction['paid_in'])[0], :color => transactionColor)
column((transaction['paid_out'].to_f == 0) ? '' : getAsCurrency(0 - transaction['paid_out'].to_f)[0], :color => transactionColor)
end
last_date = transaction['date']
end
end
puts "\n#{getRuleString(@colWidthTotal)}"
end
# Translates Description
# @return string
def getDescriptionAndColor(transaction)
@recognizedTransactions.each do |translation|
if transaction['bank_account_id'].to_i == translation[:bank_account_id] && transaction['type'] == translation[:type] && translation[:terms].any? { |w| transaction['description'] =~ /#{w}/ }
return {:description => translation[:translation].upcase, :color => translation[:color]}
end
end
{:description => transaction['description'].upcase, :color => @white}
end
# Returns TRUE if transaction is internal transfer
# @return boolean
def isInternalTransfer(transaction)
if inArray(@ignoredTransactions, transaction['id'].to_i)
return true
end
@internalTransfers.each do |match|
if match[:bank_account_id].any? { |w| transaction['bank_account_id'] =~ /#{w}/ } && match[:terms].any? { |w| transaction['description'].upcase =~ /#{w}/ } && match[:type] == transaction['type']
if match.has_key?(:terms_not)
if match[:terms_not].any? { |w| transaction['description'] =~ /#{w}/ }
return false
end
end
return true
end
end
false
end
# Inserts a divider to display the month
# @return void
def displayTransactionsMonth(month)
displayTransactionsBlankRow
row do
@pastMonthDeployed = true
column(getRuleString(@transWidth_1))
column(getRuleString(@transWidth_2))
column(getRuleString(@transWidth_3))
column(" [ #{month.upcase} ] #{getRuleString(@transWidth_4 - (month.length + 6))}")
column(getRuleString(@transWidth_5))
column(getRuleString(@transWidth_6))
column(getRuleString(@transWidth_7))
end
displayTransactionsBlankRow
end
# Displays a blank transaction row
# @return void
def displayTransactionsBlankRow
row do
column('')
column('')
column('')
column('')
column('')
column('')
column('')
end
end
# Display Bank Accounts
def displayBankAccounts
table(:border => false) do
row do
column(' Bank', :width => @colWidth_1, :align => 'left', :bold => 'true')
column('Name', :width => @colWidth_2, :align => 'left', :bold => 'true')
column('Balance', :width => @colWidth_3, :align => 'right')
column('Available', :width => @colWidth_4, :align => 'right')
column('Overdraft', :width => @colWidth_5, :align => 'right')
column('', :width => @colWidth_6, :align => 'right')
column('', :width => @colWidth_7, :align => 'right')
column('', :width => @colWidth_8, :align => 'right')
column(' |', :width => @colWidth_9, :align => 'right')
column('Last Fetch', :width => @colWidth_10, :align => 'right')
end
row do
column(getRuleString(@colWidth_1))
column(getRuleString(@colWidth_2))
column(getRuleString(@colWidth_3))
column(getRuleString(@colWidth_4))
column(getRuleString(@colWidth_5))
column(getRuleString(@colWidth_6))
column(getRuleString(@colWidth_7))
column(getRuleString(@colWidth_8))
column(getRuleString(@colWidth_9))
column(getRuleString(@colWidth_10))
end
@bankAccounts.each do |row|
row = row[1]
if row['bank_account_type_id'].to_i == 1 && row['id'].to_i != 3
bankAndColor = getBankAndColor(row['bank_id'])
balances = @bankAccountBalances[row['id'].to_i]
balances['date_fetched_string'] = normalizeTimestamp(balances['date_fetched_string'])
row do
column(" #{bankAndColor[0]}", :color => bankAndColor[1])
column(row['title'], :color => bankAndColor[1])
column(getAsCurrency(balances['balance'])[0], :color => getAsCurrency(balances['balance'])[1])
column(getAsCurrency(balances['balance_available'])[0], :color => @white)
column(getAsCurrency(balances['balance_overdraft'])[0], :color => @white)
column('—', :color => @white)
column('—', :color => @white)
column('—', :color => @white)
column(' |')
column("#{getTimeAgoInHumanReadable(balances['date_fetched_string'])}", :color => @white)
end
end
end
end
puts "#{getRuleString(@colWidthTotal)}"
end
# Display CreditCards
def displayCreditCards
summaryTitle = "\x1B[48;5;92m SUMMARY FOR \xe2\x86\x92 #{DateTime.now.strftime('%^B %e, %Y (%^A)')} \x1B[0m"
puts '|'.rjust(173, ' ')
puts "#{summaryTitle.ljust(186, ' ')}|"
puts '|'.rjust(173, ' ')
table(:border => false) do
row do
column(' Credit Card', :width => @colWidth_1, :align => 'left', :bold => 'true')
column('Name', :width => @colWidth_2, :align => 'left', :bold => 'true')
column('Balance', :width => @colWidth_3, :align => 'right')
column('Available', :width => @colWidth_4, :align => 'right')
column('Limit', :width => @colWidth_5, :align => 'right')
column('Pending', :width => @colWidth_6, :align => 'right')
column('Minimum Payment', :width => @colWidth_7, :align => 'right')
column('Payment Date', :width => @colWidth_8, :align => 'right')
column(' |', :width => @colWidth_9, :align => 'left')
column('Last Fetch', :width => @colWidth_10, :align => 'right')
end
row do
column(getRuleString(@colWidth_1))
column(getRuleString(@colWidth_2))
column(getRuleString(@colWidth_3))
column(getRuleString(@colWidth_4))
column(getRuleString(@colWidth_5))
column(getRuleString(@colWidth_6))
column(getRuleString(@colWidth_7))
column(getRuleString(@colWidth_8))
column(getRuleString(@colWidth_9))
column(getRuleString(@colWidth_10))
end
@bankAccounts.each do |row|
row = row[1]
if row['bank_account_type_id'].to_i == 2
bankAndColor = getBankAndColor(row['bank_id'])
balances = @bankAccountBalances[row['id'].to_i]
balances['date_fetched_string'] = normalizeTimestamp(balances['date_fetched_string'])
row do
column(" #{bankAndColor[0]}", :color => bankAndColor[1])
column(row['title'], :color => bankAndColor[1])
creditCardBalance = 0 - balances['balance'].to_f
minimumPaymentDate = balances['minimum_payment_date']
if minimumPaymentDate == '0000-00-00'
minimumPaymentDate = '1983-10-29'
end
timeStamp = DateTime.strptime(minimumPaymentDate, '%Y-%m-%d')
timeNow = DateTime.now
minimumPaymentDateIn = (timeStamp - timeNow).to_i
if minimumPaymentDateIn <= 3
minimumPaymentColor = @red
else
minimumPaymentColor = @white
end
# Calculate Pending Transacions for LLoyds & Capital One.
if row['id'].to_i == 7 || row['id'].to_i == 10
balances['pending_transactions'] = '%.2f' % (balances['balance_limit'].to_f - balances['balance_available'].to_f - balances['balance'].to_f)
end
column(getAsCurrency(creditCardBalance)[0], :color => (getAsCurrency(creditCardBalance)[1] == @red) ? @red : @white)
column(getAsCurrency(balances['balance_available'])[0], :color => @white)
column(getAsCurrency(balances['balance_limit'])[0], :color => @white)
column(balances['pending_transactions'].to_f <= 0 ? '—' : getAsCurrency(0 - balances['pending_transactions'].to_f)[0], :color => balances['pending_transactions'].to_f <= 0 ? @white : getAsCurrency(0 - balances['pending_transactions'].to_f)[1])
column(getAsCurrency(balances['minimum_payment'])[0], :color => (balances['minimum_payment'].to_f > 0) ? ((minimumPaymentDateIn <= 3) ? @red : @white) : @white)
if minimumPaymentDateIn < 0 || balances['minimum_payment'].to_f == 0
column('—', :color => @white)
else
column("#{DateTime.strptime(minimumPaymentDate, '%Y-%m-%d').strftime('%d %b %Y')}", :color => minimumPaymentColor)
end
column(' |')
column("#{getTimeAgoInHumanReadable(balances['date_fetched_string'])}", :color => @white)
end
end
end
end
puts "#{getRuleString(@colWidthTotal)}\n"
end
# Display Summary
def displaySummary
# Get some info for 'Estimated [XX]' column.
endOfMonthDate = DateTime.new(@month1.strftime('%Y').to_i, @month1.strftime('%m').to_i, getEndOfMonthDay, 0, 0, 0, 0)
case endOfMonthDate.strftime('%d').to_i
when 28..30
endOfMonthSuffix = 'th'
when 31
endOfMonthSuffix = 'st'
else
endOfMonthSuffix = ''
end
projectedOEMBalance = getProjectedOEMBalance
@rightHandSideContent = Array[
Array['Current Balance', @white],
Array[getAsCurrency(@totalCash)[0], getAsCurrency(@totalCash)[1]],
Array['After Bills/Wages', @white],
Array[getBalanceAfterBills.nil? ? '-' : getAsCurrency(getBalanceAfterBills)[0], getBalanceAfterBills.nil? ? @white : getAsCurrency(getBalanceAfterBills)[1]],
Array["Estimated [#{endOfMonthDate.strftime('%b %d')}#{endOfMonthSuffix}]", @white],
Array[projectedOEMBalance.nil? ? '—' : getAsCurrency(projectedOEMBalance)[0], projectedOEMBalance.nil? ? @white : getAsCurrency(projectedOEMBalance)[1]],
Array['NatWest Savings Account', @white],
Array[getAsCurrency(@bankAccountBalances[3]['balance'])[0], getAsCurrency(@bankAccountBalances[3]['balance'])[1]],
Array['Credit Total', @white],
Array[getAsCurrency(@totalCredit)[0], @cyan],
Array['Credit Used', @white],
Array["#{calculateCreditUsed}%", @magenta],
Array['Monthly Outgoings', @white],
Array[getAsCurrency(@fixedMonthlyOutgoings)[0], @cyan],
Array['Remaining Outgoings', @white],
Array["#{@moneyOutRemaining > 0 ? '—' : ''}#{getAsCurrency(@moneyOutRemaining)[0]}", @moneyOutRemaining <= 0 ? @white : @red],
Array['Remaining Incomings', @white],
Array["#{getAsCurrency(@moneyInRemaining)[0]}", @moneyInRemaining <= 0 ? @white : @cyan],
Array['Credit Score', @white],
Array["#{@creditScore[0]} (#{@creditScore[1]})", @green],
]
table(:border => false) do
row do
column('', :width => @summaryWidth_1, :align => 'left')
column("#{@month1.strftime('%B %Y')}", :width => @summaryWidth_2, :align => 'right', :color => 245)
column("#{@month2.strftime('%B %Y')}", :width => @summaryWidth_3, :align => 'right', :color => 245)
column("#{@month3.strftime('%B %Y')}", :width => @summaryWidth_4, :align => 'right', :color => 245)
column("#{@month4.strftime('%B %Y')}", :width => @summaryWidth_5, :align => 'right', :color => 245)
column("#{@month5.strftime('%B %Y')}", :width => @summaryWidth_6, :align => 'right', :color => 245)
column('5-Month Total', :width => @summaryWidth_7, :align => 'right', :color => 245)
column(' |', :width => @summaryWidth_8, :align => 'left')
column(insertRightHandContent[0], :color => insertRightHandContent[1], :width => @summaryWidth_9, :align => 'right')
end
displaySummaryDivider
row do
column(' STARTING BALANCE', :color => @white)
column("#{@summaryData[:month1][:starting_balances][:totalCash].nil? ? '—' : getAsCurrency(@summaryData[:month1][:starting_balances][:totalCash])[0]}", :color => @white)
column("#{@summaryData[:month2][:starting_balances][:totalCash].nil? ? '—' : getAsCurrency(@summaryData[:month2][:starting_balances][:totalCash])[0]}", :color => @white)
column("#{@summaryData[:month3][:starting_balances][:totalCash].nil? ? '—' : getAsCurrency(@summaryData[:month3][:starting_balances][:totalCash])[0]}", :color => @white)
column("#{@summaryData[:month4][:starting_balances][:totalCash].nil? ? '—' : getAsCurrency(@summaryData[:month4][:starting_balances][:totalCash])[0]}", :color => @white)
column("#{@summaryData[:month5][:starting_balances][:totalCash].nil? ? '—' : getAsCurrency(@summaryData[:month5][:starting_balances][:totalCash])[0]}", :color => @white)
column('—', :color => @white)
column(' |')
column(insertRightHandContent[0], :color => insertRightHandContent[1])
end
displaySummaryDivider
row do
column(' CASH DEPOSITED', :color => @green)
column("#{@summaryData[:month1][:cash_in].to_f <= 0 ? '—' : getAsCurrency(@summaryData[:month1][:cash_in])[0]}", :color => (@summaryData[:month1][:cash_in].to_f <= 0 ? @white : @green))
column("#{@summaryData[:month2][:cash_in].to_f <= 0 ? '—' : getAsCurrency(@summaryData[:month2][:cash_in])[0]}", :color => (@summaryData[:month2][:cash_in].to_f <= 0 ? @white : @green))
column("#{@summaryData[:month3][:cash_in].to_f <= 0 ? '—' : getAsCurrency(@summaryData[:month3][:cash_in])[0]}", :color => (@summaryData[:month3][:cash_in].to_f <= 0 ? @white : @green))
column("#{@summaryData[:month4][:cash_in].to_f <= 0 ? '—' : getAsCurrency(@summaryData[:month4][:cash_in])[0]}", :color => (@summaryData[:month4][:cash_in].to_f <= 0 ? @white : @green))
column("#{@summaryData[:month5][:cash_in].to_f <= 0 ? '—' : getAsCurrency(@summaryData[:month5][:cash_in])[0]}", :color => (@summaryData[:month5][:cash_in].to_f <= 0 ? @white : @green))
column("#{@summaryData[:monthTotal][:cash_in].to_f <= 0 ? '—' : getAsCurrency(@summaryData[:monthTotal][:cash_in])[0]}", :color => (@summaryData[:monthTotal][:cash_in].to_f <= 0 ? @white : @green))
column(' |')
column(insertRightHandContent[0], :color => insertRightHandContent[1])
end
displaySummaryDivider
recognizedTransactionLoop(2)
# row do
# column(' MISC (IN)', :color => @plus_color)
# column(@summaryData[:month1][:misc_in] <= 0 ? '—' : getAsCurrency(@summaryData[:month1][:misc_in])[0], :color => @summaryData[:month1][:misc_in] <= 0 ? @white : @plus_color)
# column(@summaryData[:month2][:misc_in] <= 0 ? '—' : getAsCurrency(@summaryData[:month2][:misc_in])[0], :color => @summaryData[:month2][:misc_in] <= 0 ? @white : @plus_color)
# column(@summaryData[:month3][:misc_in] <= 0 ? '—' : getAsCurrency(@summaryData[:month3][:misc_in])[0], :color => @summaryData[:month3][:misc_in] <= 0 ? @white : @plus_color)
# column(@summaryData[:month4][:misc_in] <= 0 ? '—' : getAsCurrency(@summaryData[:month4][:misc_in])[0], :color => @summaryData[:month4][:misc_in] <= 0 ? @white : @plus_color)
# column(@summaryData[:month5][:misc_in] <= 0 ? '—' : getAsCurrency(@summaryData[:month5][:misc_in])[0], :color => @summaryData[:month5][:misc_in] <= 0 ? @white : @plus_color)
# column(@summaryData[:monthTotal][:misc_in] <= 0 ? '—' : getAsCurrency(@summaryData[:monthTotal][:misc_in])[0], :color => @summaryData[:monthTotal][:misc_in] <= 0 ? @white : @plus_color)
# column(' |')
# column(insertRightHandContent[0], :color => insertRightHandContent[1])
# end
displaySummaryDivider
recognizedTransactionLoop(3)
# row do
# column(' MISC (OUT)', :color => @minus_color)
# column(@summaryData[:month1][:misc_out] <= 0 ? '—' : getAsCurrency(@summaryData[:month1][:misc_out])[0], :color => @summaryData[:month1][:misc_out] <= 0 ? @white : @minus_color)
# column(@summaryData[:month2][:misc_out] <= 0 ? '—' : getAsCurrency(@summaryData[:month2][:misc_out])[0], :color => @summaryData[:month2][:misc_out] <= 0 ? @white : @minus_color)
# column(@summaryData[:month3][:misc_out] <= 0 ? '—' : getAsCurrency(@summaryData[:month3][:misc_out])[0], :color => @summaryData[:month3][:misc_out] <= 0 ? @white : @minus_color)
# column(@summaryData[:month4][:misc_out] <= 0 ? '—' : getAsCurrency(@summaryData[:month4][:misc_out])[0], :color => @summaryData[:month4][:misc_out] <= 0 ? @white : @minus_color)
# column(@summaryData[:month5][:misc_out] <= 0 ? '—' : getAsCurrency(@summaryData[:month5][:misc_out])[0], :color => @summaryData[:month5][:misc_out] <= 0 ? @white : @minus_color)
# column(@summaryData[:monthTotal][:misc_out] <= 0 ? '—' : getAsCurrency(@summaryData[:monthTotal][:misc_out])[0], :color => @summaryData[:monthTotal][:misc_out] <= 0 ? @white : @minus_color)
# column(' |')
# column(insertRightHandContent[0], :color => insertRightHandContent[1])
# end
displaySummaryDivider
row do
column(' TOTAL MONEY IN', :color => @white)
column(@summaryData[:month1][:total_in] <= 0 ? '—' : getAsCurrency(@summaryData[:month1][:total_in])[0], :color => @white)
column(@summaryData[:month2][:total_in] <= 0 ? '—' : getAsCurrency(@summaryData[:month2][:total_in])[0], :color => @white)
column(@summaryData[:month3][:total_in] <= 0 ? '—' : getAsCurrency(@summaryData[:month3][:total_in])[0], :color => @white)
column(@summaryData[:month4][:total_in] <= 0 ? '—' : getAsCurrency(@summaryData[:month4][:total_in])[0], :color => @white)
column(@summaryData[:month5][:total_in] <= 0 ? '—' : getAsCurrency(@summaryData[:month5][:total_in])[0], :color => @white)
column(@summaryData[:monthTotal][:total_in] <= 0 ? '—' : getAsCurrency(@summaryData[:monthTotal][:total_in])[0], :color => @white)
column(' |')
column(insertRightHandContent[0], :color => insertRightHandContent[1])
end
row do
column(' TOTAL MONEY OUT', :color => @white)
column(@summaryData[:month1][:total_out] <= 0 ? '—' : getAsCurrency(0 - @summaryData[:month1][:total_out])[0], :color => @white)
column(@summaryData[:month2][:total_out] <= 0 ? '—' : getAsCurrency(0 - @summaryData[:month2][:total_out])[0], :color => @white)
column(@summaryData[:month3][:total_out] <= 0 ? '—' : getAsCurrency(0 - @summaryData[:month3][:total_out])[0], :color => @white)
column(@summaryData[:month4][:total_out] <= 0 ? '—' : getAsCurrency(0 - @summaryData[:month4][:total_out])[0], :color => @white)
column(@summaryData[:month5][:total_out] <= 0 ? '—' : getAsCurrency(0 - @summaryData[:month5][:total_out])[0], :color => @white)
column(@summaryData[:monthTotal][:total_out] <= 0 ? '—' : getAsCurrency(0 - @summaryData[:monthTotal][:total_out])[0], :color => @white)
column(' |')
column(insertRightHandContent[0], :color => insertRightHandContent[1])
end
displaySummaryDivider
row do
column(' ENDING BALANCE', :color => @white)
column('—', :color => @white)
column("#{@summaryData[:month1][:starting_balances][:totalCash].nil? ? '—' : getAsCurrency(@summaryData[:month1][:starting_balances][:totalCash])[0]}", :color => @white)
column("#{@summaryData[:month2][:starting_balances][:totalCash].nil? ? '—' : getAsCurrency(@summaryData[:month2][:starting_balances][:totalCash])[0]}", :color => @white)
column("#{@summaryData[:month3][:starting_balances][:totalCash].nil? ? '—' : getAsCurrency(@summaryData[:month3][:starting_balances][:totalCash])[0]}", :color => @white)
column("#{@summaryData[:month4][:starting_balances][:totalCash].nil? ? '—' : getAsCurrency(@summaryData[:month4][:starting_balances][:totalCash])[0]}", :color => @white)
column('—', :color => @white)
column(' |')
column(insertRightHandContent[0], :color => insertRightHandContent[1])
end
displaySummaryDivider
row do
column(' PROFIT/LOSS', :color => @white)
column(@summaryData[:month1][:profit_loss] == 0 ? '—' : getAsCurrency(@summaryData[:month1][:profit_loss])[0], :color => @summaryData[:month1][:profit_loss] == 0 ? @white : getAsCurrency(@summaryData[:month1][:profit_loss])[1])
column(@summaryData[:month2][:profit_loss] == 0 ? '—' : getAsCurrency(@summaryData[:month2][:profit_loss])[0], :color => @summaryData[:month2][:profit_loss] == 0 ? @white : getAsCurrency(@summaryData[:month2][:profit_loss])[1])
column(@summaryData[:month3][:profit_loss] == 0 ? '—' : getAsCurrency(@summaryData[:month3][:profit_loss])[0], :color => @summaryData[:month3][:profit_loss] == 0 ? @white : getAsCurrency(@summaryData[:month3][:profit_loss])[1])
column(@summaryData[:month4][:profit_loss] == 0 ? '—' : getAsCurrency(@summaryData[:month4][:profit_loss])[0], :color => @summaryData[:month4][:profit_loss] == 0 ? @white : getAsCurrency(@summaryData[:month4][:profit_loss])[1])
column(@summaryData[:month5][:profit_loss] == 0 ? '—' : getAsCurrency(@summaryData[:month5][:profit_loss])[0], :color => @summaryData[:month5][:profit_loss] == 0 ? @white : getAsCurrency(@summaryData[:month5][:profit_loss])[1])
column(@summaryData[:monthTotal][:profit_loss] == 0 ? '—' : getAsCurrency(@summaryData[:monthTotal][:profit_loss])[0], :color => @summaryData[:monthTotal][:profit_loss] == 0 ? @white : getAsCurrency(@summaryData[:monthTotal][:profit_loss])[1])
column(' |')
column(insertRightHandContent[0], :color => insertRightHandContent[1])
end
end
puts "#{getRuleString(@summaryWidthTotal)}"
# Calculates (displays) where to put arrow depending on how far through the month we are..
currentDay = @month1.strftime('%d').to_f
lastDay = (getEndOfMonthDay.to_f) - 1
percentOfMonthLeft = 100 - (currentDay - 1) / (lastDay / 100)
pixelsRemaining = ((@summaryWidthTotal - 1).to_f / 100) * percentOfMonthLeft
pixelToPutArrow = ((@summaryWidthTotal - 1) - pixelsRemaining)
puts " \x1B[36m#{getRuleString(pixelToPutArrow - 1, ' ')}\x1B[33m\xe2\x98\x85\x1B[0m\n\n"
# Uncomment for 'Enter to clear' functionality after script run.
# 31-10-2014 - Removing this because it's annoying.
# enter_to_clear
end
# @return void
def displaySummaryDivider
row do
column(getRuleString(@summaryWidth_1), :bold => false, :color => @white)
column(getRuleString(@summaryWidth_2), :bold => false, :color => @white)
column(getRuleString(@summaryWidth_3), :bold => false, :color => @white)
column(getRuleString(@summaryWidth_4), :bold => false, :color => @white)
column(getRuleString(@summaryWidth_5), :bold => false, :color => @white)
column(getRuleString(@summaryWidth_6), :bold => false, :color => @white)
column(getRuleString(@summaryWidth_7), :bold => false, :color => @white)
column(' |', :bold => false, :color => @white)
column(insertRightHandContent[0], :color => insertRightHandContent[1], :bold => false)
end
end
# @return void
def recognizedTransactionLoop(intTypeId)
@recognizedTransactions.each do |recognizedTransaction|
if recognizedTransaction[:intTypeID] == intTypeId
amt1, amt2, amt3, amt4, amt5, amtTotal = runCalculationFor(method(:calculateAmountPaidReceivedForRecognizedTransaction), recognizedTransaction[:id])
plus_color = 235
minus_color = 235
if intTypeId == 2
color = Array[plus_color, plus_color, plus_color, plus_color, plus_color, plus_color]
else
color = Array[minus_color, minus_color, minus_color, minus_color, minus_color, minus_color]
end
# Only do estimations for 1st column (current month)
if amt1[0] == 0
amt1 = Array[23, Array["#{(!recognizedTransaction[:estimated].nil?) ? '~' : ''}#{getAsCurrency(recognizedTransaction[:recurring_amount])[0].delete('£')}", @white]]
if intTypeId == 2
color[0] = @green
else
color[0] = @red
end
end
row do
column(" #{recognizedTransaction[:translation]}", :color => (intTypeId == 2) ? 47 : 196)
column(amt1[0] <= 0 ? '—' : "#{intTypeId == 3 && recognizedTransaction[:estimated].nil? ? '—' : ''}#{amt1[1][0]}", :color => amt1[0] <= 0 ? @white : color[0])
column(amt2[0] <= 0 ? '—' : "#{intTypeId == 3 ? '—' : ''}#{amt2[1][0]}", :color => amt2[0] <= 0 ? @white : color[1])
column(amt3[0] <= 0 ? '—' : "#{intTypeId == 3 ? '—' : ''}#{amt3[1][0]}", :color => amt3[0] <= 0 ? @white : color[2])
column(amt4[0] <= 0 ? '—' : "#{intTypeId == 3 ? '—' : ''}#{amt4[1][0]}", :color => amt4[0] <= 0 ? @white : color[3])
column(amt5[0] <= 0 ? '—' : "#{intTypeId == 3 ? '—' : ''}#{amt5[1][0]}", :color => amt5[0] <= 0 ? @white : color[4])
column(amtTotal[0] <= 0 ? '—' : "#{intTypeId == 3 ? '—' : ''}#{amtTotal[1][0]}", :color => amtTotal[0] <= 0 ? @white : color[5])
column(' |')
column(insertRightHandContent[0], :color => insertRightHandContent[1])
end
end
end
end
# @return array
def insertRightHandContent
@rightHandSideCount = @rightHandSideCount + 1
if @rightHandSideCount == 3
@rightHandSideContentCount = @rightHandSideContentCount + 1
elsif @rightHandSideCount >= 7
@rightHandSideCount = 1
@rightHandSideContentCount = @rightHandSideContentCount + 1
elsif @rightHandSideCount >= 5
if @rightHandSideContentExists
return Array['', @white]
# DO NOT DELETE!
# The following line of code would insert '----' between the outputs.
#
# return Array[" #{getRuleString(@summaryWidth_9 - 1)}", @white]
else
return Array['', @white]
end
end
content = ''
color = 208
if !@rightHandSideContent[@rightHandSideContentCount].nil?
content = @rightHandSideContent[@rightHandSideContentCount][0]
color = @rightHandSideContent[@rightHandSideContentCount][1]
else
@rightHandSideContentExists = false
end
Array[content, color]
end
# @return array
def runCalculationFor(callback, param1 = nil)
var1 = Array.new
var2 = Array.new
var3 = Array.new
var4 = Array.new
var5 = Array.new
var1[0] = callback.call(@month1, param1)
var1[1] = getAsCurrency(var1[0])
var2[0] = callback.call(@month2, param1)
var2[1] = getAsCurrency(var2[0])
var3[0] = callback.call(@month3, param1)
var3[1] = getAsCurrency(var3[0])
var4[0] = callback.call(@month4, param1)
var4[1] = getAsCurrency(var4[0])
var5[0] = callback.call(@month5, param1)
var5[1] = getAsCurrency(var5[0])
varTotal = Array.new
varTotal[0] = var1[0] + var2[0] + var3[0] + var4[0] + var5[0]
varTotal[1] = getAsCurrency(varTotal[0])
Array[var1, var2, var3, var4, var5, varTotal]
end
# Get all the totals (for current month)
# @return void
def getTotals
totals = calculateTotals(@bankAccountBalances)
@totalAvailable = totals[:totalAvailable]
@totalCreditUsed = totals[:totalCreditUsed]
@totalCredit = totals[:totalCredit]
@totalCash = totals[:totalCash]
end
# @return object
def calculateTotals(data)
# If any of the balances are nil, abort mission
data.each do |object|
object.each do |value|
if value.nil?
return {
:totalAvailable => nil,
:totalCreditUsed => nil,
:totalCredit => nil,
:totalCash => nil
}
end
end
end
totalAvailable =
(data[1]['balance_available'].to_f +
data[2]['balance_available'].to_f +
data[4]['balance_available'].to_f +
data[5]['balance_available'].to_f +
data[7]['balance_available'].to_f +
data[8]['balance_available'].to_f +
data[9]['balance_available'].to_f +
data[10]['balance_available'].to_f +
data[11]['balance_available'].to_f).round(2)
totalCreditUsed =
(data[7]['balance'].to_f +
data[7]['pending_transactions'].to_f +
data[10]['balance'].to_f +
data[10]['pending_transactions'].to_f +
data[9]['balance'].to_f +
data[9]['pending_transactions'].to_f +
data[11]['balance'].to_f +
data[11]['pending_transactions'].to_f +
(data[1]['balance'].to_f < 0 ? -data[1]['balance'].to_f : 0) +
(data[2]['balance'].to_f < 0 ? -data[2]['balance'].to_f : 0) +
(data[4]['balance'].to_f < 0 ? -data[4]['balance'].to_f : 0) +
(data[5]['balance'].to_f < 0 ? -data[5]['balance'].to_f : 0) +
(data[8]['balance'].to_f < 0 ? -data[8]['balance'].to_f : 0)).round(2)
totalCredit =
(data[1]['balance_overdraft'].to_f +
data[2]['balance_overdraft'].to_f +
data[4]['balance_overdraft'].to_f +
data[5]['balance_overdraft'].to_f +
data[8]['balance_overdraft'].to_f +
data[7]['balance_limit'].to_f +
data[9]['balance_limit'].to_f +
data[10]['balance_limit'].to_f +
data[11]['balance_limit'].to_f).round(2)
totalCash = (totalAvailable - totalCredit).round(2)
{
:totalAvailable => totalAvailable,
:totalCreditUsed => totalCreditUsed,
:totalCredit => totalCredit,
:totalCash => totalCash,
}
end
# Get summary data for 5 month output
# @return void
def calculateSummary
# Get start of month balances
Array[@month1, @month2, @month3, @month4, @month5].each do |month|
# Get start of month balances.
thisMonthBalances = {}
@bankAccounts.each do |bankAccount|
bankAccount = bankAccount[1]
case bankAccount['bank_account_type_id'].to_i
when 1
bankAccountTable = 'bank_account_type_bank_account'
when 2
bankAccountTable = 'bank_account_type_credit_card'
when 3
bankAccountTable = 'bank_account_type_isa'
else
raise(RuntimeError, "bank_account_type => #{bankAccount['bank_account_type']} doesn't exist.")
end
balance = @databaseConnection.query("SELECT * FROM #{bankAccountTable} WHERE bank_account_id='#{bankAccount['id']}' AND (date_fetched>='#{month.strftime('%Y-%m-01')}' AND date_fetched<='#{month.strftime('%Y-%m-07')}') ORDER BY date_fetched ASC LIMIT 1")
thisMonthBalances[bankAccount['id'].to_i] = balance.fetch_hash
balance.free
end
monthObject = getMonthObject(month.strftime('%Y-%m'))
monthObject[:starting_balances] = calculateTotals(thisMonthBalances)
end
@transactions.each do |transaction|
# Skip internal transfers
if isInternalTransfer(transaction)
next
end
# Find out what month we're in and retrieve relevant location in memory for object.
monthObject = getMonthObject(DateTime.strptime(transaction['date'], '%Y-%m-%d').strftime('%Y-%m'))
# If it's a month we don't recognize, skip to next transaction.
if monthObject.nil?
next
end
transactionRecognized = false
transactionAdded = false
# Check if transaction is recognized.
@recognizedTransactions.each do |rt|
if transaction['bank_account_id'].to_i == rt[:bank_account_id] && transaction['type'] == rt[:type] && rt[:terms].any? { |w| transaction['description'] =~ /#{w}/ }
transactionRecognized = rt[:id]
break
end
end
# Process recurring transactions
if transactionRecognized
transactionAdded = true
rt = @recognizedTransactionsIndexedID["#{transactionRecognized}"]
if rt[:intTypeID] == 1
monthObject[:cash_in] = monthObject[:cash_in] + transaction['paid_in'].to_f
elsif rt[:intTypeID] == 2
if monthObject[:"#{rt[:id]}"].nil?
monthObject[:"#{rt[:id]}"] = 0
end
monthObject[:"#{rt[:id]}"] = monthObject[:"#{rt[:id]}"] + transaction['paid_in'].to_f
elsif rt[:intTypeID] == 3
if monthObject[:"#{rt[:id]}"].nil?
monthObject[:"#{rt[:id]}"] = 0
end
monthObject[:"#{rt[:id]}"] = monthObject[:"#{rt[:id]}"] + transaction['paid_out'].to_f
else
transactionAdded = false
end
end
# Process remaining transactions (un-recurring)
unless transactionAdded
if transaction['paid_in'].to_f > 0
monthObject[:misc_in] = monthObject[:misc_in] + transaction['paid_in'].to_f
elsif transaction['paid_out'].to_f > 0
monthObject[:misc_out] = monthObject[:misc_out] + transaction['paid_out'].to_f
end
end
# Calculate Totals (for all transactions)
if transaction['paid_in'].to_f > 0
monthObject[:total_in] = monthObject[:total_in] + transaction['paid_in'].to_f
elsif transaction['paid_out'].to_f > 0
monthObject[:total_out] = monthObject[:total_out] + transaction['paid_out'].to_f
end
end
# Monthly Totals
@summaryData.each do |monthObject|
if monthObject[0].to_s != 'monthTotal'
monthObject[1].each do |key, value|
unless key.to_s == 'starting_balances'
if @summaryData[:monthTotal][:"#{key}"].nil?
@summaryData[:monthTotal][:"#{key}"] = 0
end
@summaryData[:monthTotal][:"#{key}"] = @summaryData[:monthTotal][:"#{key}"] + value
end
end
end
end
# Montly Profit Loss
@summaryData.each do |monthObject|
monthObject[1][:profit_loss] = monthObject[1][:total_in] - monthObject[1][:total_out]
end
end
# @return array
def calculateAmountPaidReceivedForRecognizedTransaction(month, id)
case month.strftime('%m')
when @month1.strftime('%m')
amt = @summaryData[:month1][:"#{id}"].to_f
when @month2.strftime('%m')
amt = @summaryData[:month2][:"#{id}"].to_f
when @month3.strftime('%m')
amt = @summaryData[:month3][:"#{id}"].to_f
when @month4.strftime('%m')
amt = @summaryData[:month4][:"#{id}"].to_f
when @month5.strftime('%m')
amt = @summaryData[:month5][:"#{id}"].to_f
else
raise(RuntimeError('Month not found.'))
end
amt
end
# @return void
def calculateMoneyRemaining
@recognizedTransactions.each do |rt|
if rt[:intTypeID] == 2 || rt[:intTypeID] == 3
transactionFound = false
@transactions.each do |transaction|
if @month1.strftime('%Y-%m') == DateTime.strptime(transaction['date'], '%Y-%m-%d').strftime('%Y-%m')
if transaction['bank_account_id'].to_i == rt[:bank_account_id] && transaction['type'] == rt[:type] && rt[:terms].any? { |w| transaction['description'] =~ /#{w}/ }
transactionFound = true
end
end
end
if !transactionFound
if rt[:intTypeID] == 2
@moneyInRemaining = @moneyInRemaining + rt[:recurring_amount]
elsif rt[:intTypeID] == 3
@moneyOutRemaining = @moneyOutRemaining + rt[:recurring_amount]
end
end
end
end
end
# @return void
def calculateFixedMonthlyOutgoings
@recognizedTransactions.each do |rt|
if rt[:intTypeID] == 3
@fixedMonthlyOutgoings = @fixedMonthlyOutgoings + rt[:recurring_amount]
end
end
end
# @return float
def calculateCreditUsed
if @totalCreditUsed.to_f > 0
((@totalCreditUsed.to_f / @totalCredit.to_f) * 100).round(2)
else
0
end
end
# @return array
def getCreditScore
creditScore = @databaseConnection.query('SELECT * FROM experian_credit_report ORDER BY date_fetched DESC LIMIT 1')
creditScore = creditScore.fetch_hash
@creditScore = Array[creditScore['score'], creditScore['score_text']]
end
# Returns name of bank account + associated color.
def getBankAndColor(bankId)
returnHash = {}
returnHash[0] = @banks[bankId.to_s]
case bankId.to_i
when 1
returnHash[1] = @magenta
when 2
returnHash[1] = @blue
when 3
returnHash[1] = @green
when 4
returnHash[1] = @cyan
when 5
returnHash[1] = 113
else
returnHash[1] = @white
end
returnHash
end
# @return float
def getBalanceAfterBills
(@totalCash + @moneyInRemaining) - @moneyOutRemaining
end
# Parameter passed in must be in string form as '%Y-%m'
# @return object
def getMonthObject(transactionMonth)
case transactionMonth
when @month1.strftime('%Y-%m')
monthObject = @summaryData[:month1]
when @month2.strftime('%Y-%m')
monthObject = @summaryData[:month2]
when @month3.strftime('%Y-%m')
monthObject = @summaryData[:month3]
when @month4.strftime('%Y-%m')
monthObject = @summaryData[:month4]
when @month5.strftime('%Y-%m')
monthObject = @summaryData[:month5]
else
monthObject = nil
end
monthObject
end
# Calculates (depending on the last 4 month trend) how much money I should have by the end of the month.
# @return float
def getProjectedOEMBalance
averageIn =
((@summaryData[:month2][:total_in] +
@summaryData[:month3][:total_in] +
@summaryData[:month4][:total_in] +
@summaryData[:month5][:total_in]) / 4).round(2)
averageOut =
((@summaryData[:month2][:total_out] +
@summaryData[:month3][:total_out] +
@summaryData[:month4][:total_out] +
@summaryData[:month5][:total_out]) / 4).round(2)
if @summaryData[:month1][:starting_balances][:totalCash].nil?
return nil
end
(@summaryData[:month1][:starting_balances][:totalCash] + averageIn) - averageOut
end
# Calculates (depending on the last 4 month trend) how much money I should have by the end of the month.
# @return float
def getProjectedOEMBalanceDeprecated
averageCashIn =
((@summaryData[:month2][:cash_in] +
@summaryData[:month3][:cash_in] +
@summaryData[:month4][:cash_in] +
@summaryData[:month5][:cash_in]) / 4).round(2)
averageMiscIn =
((@summaryData[:month2][:misc_in] +
@summaryData[:month3][:misc_in] +
@summaryData[:month4][:misc_in] +
@summaryData[:month5][:misc_in]) / 4).round(2)
averageMiscOut =
((@summaryData[:month2][:misc_out] +
@summaryData[:month3][:misc_out] +
@summaryData[:month4][:misc_out] +
@summaryData[:month5][:misc_out]) / 4).round(2)
# An estimate of how much 'MISC' goes in/out based on last 4 months.
inAdjustment = (averageMiscIn - @summaryData[:month1][:misc_in]) + (averageCashIn - @summaryData[:month1][:cash_in])
outAdjustment = (averageMiscOut - @summaryData[:month1][:misc_out])
# Calculate how much % of month is left...
totalDaysInCurrentMonth = Date.civil(@month1.strftime('%Y').to_i, @month1.strftime('%m').to_i, -1).day
currentDay = @month1.strftime('%d')
percentOfMonthLeft = 100 - (currentDay.to_f / (totalDaysInCurrentMonth.to_f / 100))
# ...and adjust adjustments accordingly.
inAdjustment = inAdjustment * (percentOfMonthLeft / 100)
outAdjustment = outAdjustment * (percentOfMonthLeft / 100)
# Add ajustments to @moneyRemaining variables.
moneyInRemaining = @moneyInRemaining + inAdjustment
moneyOutRemaining = @moneyOutRemaining + outAdjustment
(@totalCash - moneyOutRemaining) + moneyInRemaining
end
# Gets end of month day (for current month)
# @return int
def getEndOfMonthDay
Date.civil(@month1.strftime('%Y').to_i, @month1.strftime('%m').to_i, -1).day.to_i
end
# Returns '━━━━'
# @retrun string
def getRuleString(length, delimiter = '━')
ruleString = ''
for i in 0..length - 1
ruleString = "#{ruleString}#{delimiter}"
end
ruleString
end
# Returns the amount as currency formatted string with color (as hash)
# return object
def getAsCurrency(amount, symbol = '£', delimiter = ',')
amount = amount.to_f
returnHash = {}
# Set index '1' to color
if amount < 0
returnHash[1] = @red
else
returnHash[1] = @green
end
# Set index '0' to formatted amount
minus = (amount < 0) ? '-' : ''
amount = '%.2f' % amount.abs
amount = amount.to_s.reverse.gsub(%r{([0-9]{3}(?=([0-9])))}, "\\1#{delimiter}").reverse
returnHash[0] = "#{minus}#{symbol}#{amount}"
returnHash
end
# If timestamp is blank, this gives it a normalized timestamp so script doesn't error.
# @return string
def normalizeTimestamp(timestamp)
if timestamp == '0000-00-00 00:00:00' || timestamp == '0000-00-00T00:00:00+00:00'
timestamp = '1983-10-29T03:16:00+00:00'
end
timestamp
end
# Gives a prompt where ONLY 'Enter' will clear the screen, any other key will not.
# @return void
def enter_to_clear
begin
system('stty raw -echo')
response = STDIN.getc
ensure
system('stty -raw echo')
end
if response.chr == "\r"
system('clear')
end
exit
end
end
ShowBankTransactions.new(ARGV[0]).run
More stuff added to ignore array.
require '/Users/Albert/Repos/Scripts/ruby/lib/utilities.rb'
require '/Users/Albert/Repos/Scripts/ruby/lib/encrypter.rb'
require 'columnist'
class ShowBankTransactions
include Columnist
# Initialize all the DB stuff, etc.
def initialize(argv)
# COLORS
@green = 10
@magenta = 201
@yellow = 226
@cyan = 87
@red = 9
@blue = 32
@white = 255
@plus_color = 47
@minus_color = 196
# Get Database Connection
encrypter = Encrypter.new
@databaseConnection = Mysql.new(
encrypter.decrypt(EC2MySqlAlb3rtukHost),
encrypter.decrypt(EC2MySqlAlb3rtukUser),
encrypter.decrypt(EC2MySqlAlb3rtukPass),
encrypter.decrypt(EC2MySqlAlb3rtukSchema)
)
# INTERNAL TYPE ID LEGEND
# 1 => CASH IN
# 2 => RECURRING IN
# 3 => RECURRING OUT
@recognizedTransactions = Array[
# NATWEST AD GOLD
{:intTypeID => 0, :id => 100, :bank_account_id => 1, :type => 'BAC', :terms => Array['PAYPAL', 'PPWD'], :color => @white, :translation => 'PAYPAL WITHDRAWAL'},
{:intTypeID => 1, :id => 200, :bank_account_id => 1, :type => 'CDM', :terms => Array['521005', '521007', '560005'], :color => @green, :translation => 'CASH'},
{:intTypeID => 1, :id => 200, :bank_account_id => 1, :type => '- ', :terms => Array['521005', '521007', '560005'], :color => @green, :translation => 'CASH'},
{:intTypeID => 1, :id => 200, :bank_account_id => 1, :type => 'TLR', :terms => Array[''], :color => @green, :translation => 'CASH'},
{:intTypeID => 3, :id => 300, :bank_account_id => 1, :type => 'POS', :terms => Array['NAMESCO'], :color => @red, :translation => 'NAMESCO WEB SERVER', :recurring_amount => 29.99},
{:intTypeID => 3, :id => 400, :bank_account_id => 1, :type => 'D/D', :terms => Array['SLMLTD INCOME AC'], :color => @red, :translation => 'HORFIELD SPORTS CENTRE', :recurring_amount => 33.60},
{:intTypeID => 0, :id => 500, :bank_account_id => 1, :type => 'D/D', :terms => Array['UK MAIL'], :color => @white, :translation => 'UK MAIL'},
{:intTypeID => 0, :id => 600, :bank_account_id => 1, :type => 'POS', :terms => Array['UK MAIL'], :color => @white, :translation => 'UK MAIL'},
{:intTypeID => 0, :id => 700, :bank_account_id => 1, :type => 'OTR', :terms => Array['07519616416'], :color => @white, :translation => 'ROSS JOY'},
{:intTypeID => 0, :id => 800, :bank_account_id => 1, :type => 'OTR', :terms => Array['07980286590', 'SCOULDING L A'], :color => @white, :translation => 'LUKE SCOULDING'},
{:intTypeID => 0, :id => 900, :bank_account_id => 1, :type => 'OTR', :terms => Array['07825126363'], :color => @white, :translation => 'LUKE CHAMBERLAIN'},
{:intTypeID => 0, :id => 1000, :bank_account_id => 1, :type => 'BAC', :terms => Array['D LINDEN'], :color => @white, :translation => 'DEAN LINDEN'},
{:intTypeID => 0, :id => 1100, :bank_account_id => 1, :type => 'BAC', :terms => Array['P HACKETT'], :color => @white, :translation => 'PHIL HACKETT'},
{:intTypeID => 2, :id => 1150, :bank_account_id => 1, :type => 'BAC', :terms => Array['SALARY','T27 SYSTEMS'], :color => @cyan, :translation => 'BRIGHTPEARL WAGE', :recurring_amount => 1946.23}, # 1946.23
{:intTypeID => 2, :id => 1200, :bank_account_id => 1, :type => 'BAC', :terms => Array['VIRGIN TV'], :color => @cyan, :translation => 'GARY SOLAN (VIRGIN MEDIA)', :recurring_amount => 30},
{:intTypeID => 0, :id => 1400, :bank_account_id => 1, :type => 'BAC', :terms => Array['ALEX CARLIN'], :color => @white, :translation => 'ALEX CARLIN'},
{:intTypeID => 0, :id => 1500, :bank_account_id => 1, :type => 'BAC', :terms => Array['J HARTRY '], :color => @white, :translation => 'JOE HARTRY'},
{:intTypeID => 3, :id => 1600, :bank_account_id => 1, :type => 'POS', :terms => Array['SPOTIFY'], :color => @red, :translation => 'SPOTIFY', :recurring_amount => 19.98},
{:intTypeID => 3, :id => 1700, :bank_account_id => 1, :type => 'POS', :terms => Array['LYNDA.COM'], :color => @red, :translation => 'LYNDA.COM', :recurring_amount => 16, :estimated => true},
{:intTypeID => 3, :id => 1800, :bank_account_id => 1, :type => 'POS', :terms => Array['GITHUB.COM'], :color => @red, :translation => 'GITHUB.COM', :recurring_amount => 8.50, :estimated => true},
{:intTypeID => 0, :id => 1900, :bank_account_id => 1, :type => 'POS', :terms => Array['TRANSFERWISE'], :color => @white, :translation => 'TRANFERWISE (WEDDING FUND)'},
# NATWEST SAVINGS
{:intTypeID => 0, :id => 2000, :bank_account_id => 3, :type => 'BAC', :terms => Array['TRANSFERWISE'], :color => @white, :translation => 'TRANFERWISE (REFUND)'},
# HALIFAX ULTIMATE REWARD
{:intTypeID => 3, :id => 2100, :bank_account_id => 4, :type => 'FEE', :terms => Array['ACCOUNT FEE'], :color => @red, :translation => 'ACCOUNT FEE (HALIFAX ULTIAMTE REWARD)', :recurring_amount => 15},
{:intTypeID => 1, :id => 2200, :bank_account_id => 4, :type => 'CSH', :terms => Array[''], :color => @green, :translation => 'CASH'},
{:intTypeID => 3, :id => 2250, :bank_account_id => 4, :type => 'DD', :terms => Array['DVLA-EU51GVC'], :color => @red, :translation => 'CAR TAX (DVLA-EU51GVC)', :recurring_amount => 19.68},
# HALIFAX REWARD
{:intTypeID => 3, :id => 2300, :bank_account_id => 5, :type => 'DEB', :terms => Array['CREDITEXPERT.CO.UK'], :color => @red, :translation => 'CREDITEXPERT', :recurring_amount => 9.99},
{:intTypeID => 3, :id => 2350, :bank_account_id => 5, :type => 'DEB', :terms => Array['ANIMOTO'], :color => @red, :translation => 'ANIMOTO', :recurring_amount => 5},
{:intTypeID => 0, :id => 2400, :bank_account_id => 5, :type => 'FPI', :terms => Array['PAYPAL WITHDRAWAL'], :color => @white, :translation => 'PAYPAL WITHDRAWAL'},
{:intTypeID => 1, :id => 2500, :bank_account_id => 5, :type => 'CSH', :terms => Array[''], :color => @green, :translation => 'CASH'},
{:intTypeID => 0, :id => 2550, :bank_account_id => 6, :type => 'D-C', :terms => Array[''], :color => @white, :translation => 'ISA INTEREST'},
# LLOYDS CURRENT
{:intTypeID => 3, :id => 2600, :bank_account_id => 8, :type => 'FPO', :terms => Array['STELLA TALIOTIS'], :color => @red, :translation => 'RENT', :recurring_amount => 250},
{:intTypeID => 3, :id => 2700, :bank_account_id => 8, :type => 'DD', :terms => Array['VODAFONE LIMITED'], :color => @red, :translation => 'VODAFONE LIMITED', :recurring_amount => 60, :estimated => true},
{:intTypeID => 3, :id => 2800, :bank_account_id => 8, :type => 'DD', :terms => Array['VIRGIN MEDIA'], :color => @red, :translation => 'VIRGIN MEDIA', :recurring_amount => 112.99, :estimated => true},
{:intTypeID => 1, :id => 2900, :bank_account_id => 8, :type => 'CSH', :terms => Array[''], :color => @green, :translation => 'CASH'},
{:intTypeID => 1, :id => 2950, :bank_account_id => 8, :type => 'DEP', :terms => Array[''], :color => @green, :translation => 'CASH'},
{:intTypeID => 3, :id => 3000, :bank_account_id => 8, :type => 'DD', :terms => Array['TESCO BANK'], :color => @red, :translation => 'TESCO CAR INSURANCE', :recurring_amount => 62.73},
{:intTypeID => 3, :id => 3100, :bank_account_id => 8, :type => 'FEE', :terms => Array['ACCOUNT FEE'], :color => @red, :translation => 'ACCOUNT FEE (LLOYDS CURRENT)', :recurring_amount => 15},
{:intTypeID => 2, :id => 3200, :bank_account_id => 8, :type => 'FPI', :terms => Array['MATTHEW JONES'], :color => @cyan, :translation => 'MATT JONES (VIRGIN MEDIA)', :recurring_amount => 24},
]
@recognizedTransactionsIndexedID = {}
@recognizedTransactions.each do |recognizedTransaction|
@recognizedTransactionsIndexedID["#{recognizedTransaction[:id]}"] = recognizedTransaction
end
@internalTransfers = Array[
# NATWEST
{:bank_account_id => Array[1, 2, 3], :type => 'BAC', :terms => Array['A RANNETSPERGER', 'HALIFAX ULTIMATE', 'HALIFAX REWARD', 'AR HALIFAX ACC', 'LLOYDS ACCOUNT']},
{:bank_account_id => Array[1, 2, 3], :type => 'OTR', :terms => Array['CALL REF.NO.'], :terms_not => ['UK MAIL LIMITED', 'DEAN LINDEN', 'TRANSFERWISE']},
{:bank_account_id => Array[1, 2, 3], :type => 'POS', :terms => Array['BARCLAYCARD', 'CAPITAL ONE']},
# LLOYDS
{:bank_account_id => Array[8], :type => 'FPO', :terms => Array['NATWEST AD GOLD', 'NATWEST STEP', 'NATWEST SAVINGS', 'LLOYDS BANK PLATIN']},
{:bank_account_id => Array[8], :type => 'FPI', :terms => Array['RANNETSPERGER A NATWEST']},
{:bank_account_id => Array[8], :type => 'TFR', :terms => Array['HALIFAX ULTIMATE', 'HALIFAX REWARD', 'A RANNETSPERGER']},
{:bank_account_id => Array[7], :type => 'CC', :terms => Array['PAYMENT RECEIVED']},
# HALIFAX
{:bank_account_id => Array[4, 5], :type => 'DEB', :terms => Array['BARCLAYCARD']},
{:bank_account_id => Array[4, 5], :type => 'FPO', :terms => Array['NATWEST']},
{:bank_account_id => Array[4, 5], :type => 'FPI', :terms => Array['RANNETSPERGER A NATWEST']},
{:bank_account_id => Array[4, 5], :type => 'TFR', :terms => Array['HALIFAX ULTIMATE', 'HALIFAX REWARD', 'A RANNETSPERGER']},
{:bank_account_id => Array[6], :type => 'P-C', :terms => Array['']},
{:bank_account_id => Array[6], :type => 'P-T', :terms => Array['']},
{:bank_account_id => Array[6], :type => 'D-T', :terms => Array['']},
# BARCLAYCARD
{:bank_account_id => Array[9], :type => 'OTHER', :terms => Array['PAYMENT, THANK YOU']},
# CAPITAL ONE
{:bank_account_id => Array[10], :type => 'CR', :terms => Array['PAYMENT RECEIVED', 'DIRECT DEBIT PAYMENT']},
]
@ignoredTransactions = Array.new
# Hawaii Payments
@ignoredTransactions.push(*Array[2556, 2557, 2558, 2555, 2545, 2567, 2576, 2566, 2959, 3328, 3364, 3310, 3349, 3405, 3413, 3424, 3482, 3483, 3492, 3493, 3543,
3564, 3556, 3585, 3593, 3599, 3600, 3615, 3619, 3635, 3672, 3723, 3789, 3954, 3989, 4036, 4038, 4046, 4045, 4041, 4052, 4054,
4056])
# Misc Globals
@rightHandSideCount = 4
@rightHandSideContent = Array.new
@rightHandSideContentCount = -1
@rightHandSideContentExists = true
# Balance Globals
@totalAvailable = 0
@totalCredit = 0
@totalCreditUsed = 0
@totalCash = 0
@moneyInRemaining = 0
@moneyOutRemaining = 0
@fixedMonthlyOutgoings = 0
@creditScore = Array.new
@summaryData = {
:month1 => {:misc_in => 0, :misc_out => 0, :cash_in => 0, :total_in => 0, :total_out => 0, :profit_loss => 0, :starting_balances => 0},
:month2 => {:misc_in => 0, :misc_out => 0, :cash_in => 0, :total_in => 0, :total_out => 0, :profit_loss => 0, :starting_balances => 0},
:month3 => {:misc_in => 0, :misc_out => 0, :cash_in => 0, :total_in => 0, :total_out => 0, :profit_loss => 0, :starting_balances => 0},
:month4 => {:misc_in => 0, :misc_out => 0, :cash_in => 0, :total_in => 0, :total_out => 0, :profit_loss => 0, :starting_balances => 0},
:month5 => {:misc_in => 0, :misc_out => 0, :cash_in => 0, :total_in => 0, :total_out => 0, :profit_loss => 0, :starting_balances => 0},
:monthTotal => {:misc_in => 0, :misc_out => 0, :cash_in => 0, :total_in => 0, :total_out => 0, :profit_loss => 0},
}
# Months
@month1 = DateTime.now
@month2 = DateTime.now << 1
@month3 = DateTime.now << 2
@month4 = DateTime.now << 3
@month5 = DateTime.now << 4
# Get different modes.
@untranslated = false
@withIDs = false
@withInternalTransfers = false
if argv == 'untranslated'
@untranslated = true
elsif argv == 'with-ids'
@withIDs = true
elsif argv == 'with-internal-transfers'
@withInternalTransfers = true
end
@rule = getRuleString(202)
# Get banks into Hash
@banks = {}
banksSQL = @databaseConnection.query('SELECT * FROM bank ORDER BY id ASC')
banksSQL.each_hash do |row|
@banks[row['id']] = row['title']
end
banksSQL.free
# Get bank accounts into Hash
@bankAccounts = {}
bankAccountsSQL = @databaseConnection.query('SELECT * FROM bank_account ORDER BY id ASC')
bankAccountsSQL.each_hash do |row|
@bankAccounts[row['id']] = row
end
bankAccountsSQL.free
# Get bank/credit card balances into Hash
@bankAccountBalances = {}
@bankAccounts.each do |bankAccount|
bankAccount = bankAccount[1]
case bankAccount['bank_account_type_id'].to_i
when 1
bankAccountTable = 'bank_account_type_bank_account'
when 2
bankAccountTable = 'bank_account_type_credit_card'
when 3
bankAccountTable = 'bank_account_type_isa'
else
raise(RuntimeError, "bank_account_type => #{bankAccount['bank_account_type']} doesn't exist.")
end
balance = @databaseConnection.query("SELECT * FROM #{bankAccountTable} WHERE bank_account_id='#{bankAccount['id']}' ORDER BY date_fetched DESC LIMIT 1")
@bankAccountBalances[bankAccount['id'].to_i] = balance.fetch_hash
balance.free
end
# Get transactions into Hash
@transactions = Array.new
transactionsSQL = @databaseConnection.query("SELECT * FROM bank_account_transactions WHERE date >= '#{@month5.strftime('%Y-%m-01')}' ORDER BY date ASC, bank_account_id ASC, type ASC")
transactionsSQL.each_hash do |transaction|
# Skip ISA.
if transaction['bank_account_id'].to_i == 6
next
end
@transactions << transaction
end
transactionsSQL.free
# Column widths for transactions
@transWidth_1 = 20
@transWidth_2 = 20
@transWidth_3 = 12
@transWidth_4 = 111
@transWidth_5 = 6
@transWidth_6 = 11
@transWidth_7 = 12
@transWidthTotal = @transWidth_1 + @transWidth_2 + @transWidth_3 + @transWidth_4 + @transWidth_5 + @transWidth_6 + @transWidth_7 + 8
# Column widths for balances
@colWidth_1 = 20
@colWidth_2 = 22
@colWidth_3 = 20
@colWidth_4 = 20
@colWidth_5 = 20
@colWidth_6 = 20
@colWidth_7 = 20
@colWidth_8 = 21
@colWidth_9 = 2
@colWidth_10 = 24
@colWidthTotal = @colWidth_1 + @colWidth_2 + @colWidth_3 + @colWidth_4 + @colWidth_5 + @colWidth_6 + @colWidth_7 + @colWidth_8 + @colWidth_9 + @colWidth_10 + 9
# Column widths for balances
@summaryWidth_1 = 43
@summaryWidth_2 = 20
@summaryWidth_3 = 20
@summaryWidth_4 = 20
@summaryWidth_5 = 20
@summaryWidth_6 = 20
@summaryWidth_7 = 21
@summaryWidth_8 = 2
@summaryWidth_9 = 24
@summaryWidthTotal = @summaryWidth_1 + @summaryWidth_2 + @summaryWidth_3 + @summaryWidth_4 + @summaryWidth_5 + @summaryWidth_6 + @summaryWidth_7 + @summaryWidth_8 + @summaryWidth_9 + 8
end
# Main function
def run
# MAKE SURE WE'RE ONLINE
checkMachineIsOnline
# DO ALL CALCULATIONS
calculateSummary
calculateMoneyRemaining
calculateFixedMonthlyOutgoings
# DO GETS
getCreditScore
getTotals
# START OUTPUT
displayTransactions
displayCreditCards
displayBankAccounts
displaySummary
end
# Display Transactions
def displayTransactions
table(:border => false) do
row do
column(getRuleString(@transWidth_1), :width => @transWidth_1, :align => 'left', :bold => 'true')
column(getRuleString(@transWidth_2), :width => @transWidth_2, :align => 'left', :bold => 'true')
column(getRuleString(@transWidth_3), :width => @transWidth_3, :align => 'left')
column(getRuleString(@transWidth_4), :width => @transWidth_4, :align => 'right')
column(getRuleString(@transWidth_5), :width => @transWidth_5, :align => 'left', :bold => 'true')
column(getRuleString(@transWidth_6), :width => @transWidth_6, :align => 'right')
column(getRuleString(@transWidth_7), :width => @transWidth_7, :align => 'right')
end
row do
column(' Bank Name')
column('Account Name')
column('Date')
column('Description')
column('Type')
column('Paid In')
column('Paid Out')
end
row do
column(getRuleString(@transWidth_1))
column(getRuleString(@transWidth_2))
column(getRuleString(@transWidth_3))
column(getRuleString(@transWidth_4))
column(getRuleString(@transWidth_5))
column(getRuleString(@transWidth_6))
column(getRuleString(@transWidth_7))
end
last_date = nil
@transactions.each do |transaction|
# Determine Bank Text Color
bankAndColor = getBankAndColor(@bankAccounts[transaction['bank_account_id']]['bank_id'])
# Translation Handling
transactionDetails = getDescriptionAndColor(transaction)
transactionColor = transactionDetails[:color]
if @untranslated
transactionDescription = transaction['description']
else
transactionDescription = transactionDetails[:description]
end
# Internal Transfer Handling
if isInternalTransfer(transaction)
if @withInternalTransfers
if @ignoredTransactions.include?(transaction['id'].to_i)
transactionColor = @green
else
transactionColor = @yellow
end
else
next
end
else
if @withInternalTransfers
transactionColor = @white
end
end
# Insert MONTH divider
if last_date != nil
if DateTime.strptime(transaction['date'], '%Y-%m-%d').strftime('%B') != DateTime.strptime(last_date, '%Y-%m-%d').strftime('%B')
displayTransactionsMonth(DateTime.strptime(transaction['date'], '%Y-%m-%d').strftime('%B'))
else
# Insert space if new day
if last_date != transaction['date']
displayTransactionsBlankRow
end
end
end
# Format description
if @withIDs
descriptionAddedInfo = "##{transaction['id']}"
description = transactionDescription[0..((@transWidth_4 - 2) - descriptionAddedInfo.length)]
description = "#{descriptionAddedInfo}#{getRuleString(@transWidth_4 - (descriptionAddedInfo.length + description.length), ' ')}#{description}"
else
description = transactionDescription[0..(@transWidth_4 - 2)]
end
row do
column(" #{bankAndColor[0]}", :color => bankAndColor[1])
column(@bankAccounts[transaction['bank_account_id']]['title'], :color => bankAndColor[1])
column(DateTime.strptime(transaction['date'], '%Y-%m-%d').strftime('%d %b %Y'), :color => transactionColor)
column("#{description}", :color => transactionColor)
column(transaction['type'], :color => transactionColor)
column((transaction['paid_in'].to_f == 0) ? '' : getAsCurrency(transaction['paid_in'])[0], :color => transactionColor)
column((transaction['paid_out'].to_f == 0) ? '' : getAsCurrency(0 - transaction['paid_out'].to_f)[0], :color => transactionColor)
end
last_date = transaction['date']
end
end
puts "\n#{getRuleString(@colWidthTotal)}"
end
# Translates Description
# @return string
def getDescriptionAndColor(transaction)
@recognizedTransactions.each do |translation|
if transaction['bank_account_id'].to_i == translation[:bank_account_id] && transaction['type'] == translation[:type] && translation[:terms].any? { |w| transaction['description'] =~ /#{w}/ }
return {:description => translation[:translation].upcase, :color => translation[:color]}
end
end
{:description => transaction['description'].upcase, :color => @white}
end
# Returns TRUE if transaction is internal transfer
# @return boolean
def isInternalTransfer(transaction)
if inArray(@ignoredTransactions, transaction['id'].to_i)
return true
end
@internalTransfers.each do |match|
if match[:bank_account_id].any? { |w| transaction['bank_account_id'] =~ /#{w}/ } && match[:terms].any? { |w| transaction['description'].upcase =~ /#{w}/ } && match[:type] == transaction['type']
if match.has_key?(:terms_not)
if match[:terms_not].any? { |w| transaction['description'] =~ /#{w}/ }
return false
end
end
return true
end
end
false
end
# Inserts a divider to display the month
# @return void
def displayTransactionsMonth(month)
displayTransactionsBlankRow
row do
@pastMonthDeployed = true
column(getRuleString(@transWidth_1))
column(getRuleString(@transWidth_2))
column(getRuleString(@transWidth_3))
column(" [ #{month.upcase} ] #{getRuleString(@transWidth_4 - (month.length + 6))}")
column(getRuleString(@transWidth_5))
column(getRuleString(@transWidth_6))
column(getRuleString(@transWidth_7))
end
displayTransactionsBlankRow
end
# Displays a blank transaction row
# @return void
def displayTransactionsBlankRow
row do
column('')
column('')
column('')
column('')
column('')
column('')
column('')
end
end
# Display Bank Accounts
def displayBankAccounts
table(:border => false) do
row do
column(' Bank', :width => @colWidth_1, :align => 'left', :bold => 'true')
column('Name', :width => @colWidth_2, :align => 'left', :bold => 'true')
column('Balance', :width => @colWidth_3, :align => 'right')
column('Available', :width => @colWidth_4, :align => 'right')
column('Overdraft', :width => @colWidth_5, :align => 'right')
column('', :width => @colWidth_6, :align => 'right')
column('', :width => @colWidth_7, :align => 'right')
column('', :width => @colWidth_8, :align => 'right')
column(' |', :width => @colWidth_9, :align => 'right')
column('Last Fetch', :width => @colWidth_10, :align => 'right')
end
row do
column(getRuleString(@colWidth_1))
column(getRuleString(@colWidth_2))
column(getRuleString(@colWidth_3))
column(getRuleString(@colWidth_4))
column(getRuleString(@colWidth_5))
column(getRuleString(@colWidth_6))
column(getRuleString(@colWidth_7))
column(getRuleString(@colWidth_8))
column(getRuleString(@colWidth_9))
column(getRuleString(@colWidth_10))
end
@bankAccounts.each do |row|
row = row[1]
if row['bank_account_type_id'].to_i == 1 && row['id'].to_i != 3
bankAndColor = getBankAndColor(row['bank_id'])
balances = @bankAccountBalances[row['id'].to_i]
balances['date_fetched_string'] = normalizeTimestamp(balances['date_fetched_string'])
row do
column(" #{bankAndColor[0]}", :color => bankAndColor[1])
column(row['title'], :color => bankAndColor[1])
column(getAsCurrency(balances['balance'])[0], :color => getAsCurrency(balances['balance'])[1])
column(getAsCurrency(balances['balance_available'])[0], :color => @white)
column(getAsCurrency(balances['balance_overdraft'])[0], :color => @white)
column('—', :color => @white)
column('—', :color => @white)
column('—', :color => @white)
column(' |')
column("#{getTimeAgoInHumanReadable(balances['date_fetched_string'])}", :color => @white)
end
end
end
end
puts "#{getRuleString(@colWidthTotal)}"
end
# Display CreditCards
def displayCreditCards
summaryTitle = "\x1B[48;5;92m SUMMARY FOR \xe2\x86\x92 #{DateTime.now.strftime('%^B %e, %Y (%^A)')} \x1B[0m"
puts '|'.rjust(173, ' ')
puts "#{summaryTitle.ljust(186, ' ')}|"
puts '|'.rjust(173, ' ')
table(:border => false) do
row do
column(' Credit Card', :width => @colWidth_1, :align => 'left', :bold => 'true')
column('Name', :width => @colWidth_2, :align => 'left', :bold => 'true')
column('Balance', :width => @colWidth_3, :align => 'right')
column('Available', :width => @colWidth_4, :align => 'right')
column('Limit', :width => @colWidth_5, :align => 'right')
column('Pending', :width => @colWidth_6, :align => 'right')
column('Minimum Payment', :width => @colWidth_7, :align => 'right')
column('Payment Date', :width => @colWidth_8, :align => 'right')
column(' |', :width => @colWidth_9, :align => 'left')
column('Last Fetch', :width => @colWidth_10, :align => 'right')
end
row do
column(getRuleString(@colWidth_1))
column(getRuleString(@colWidth_2))
column(getRuleString(@colWidth_3))
column(getRuleString(@colWidth_4))
column(getRuleString(@colWidth_5))
column(getRuleString(@colWidth_6))
column(getRuleString(@colWidth_7))
column(getRuleString(@colWidth_8))
column(getRuleString(@colWidth_9))
column(getRuleString(@colWidth_10))
end
@bankAccounts.each do |row|
row = row[1]
if row['bank_account_type_id'].to_i == 2
bankAndColor = getBankAndColor(row['bank_id'])
balances = @bankAccountBalances[row['id'].to_i]
balances['date_fetched_string'] = normalizeTimestamp(balances['date_fetched_string'])
row do
column(" #{bankAndColor[0]}", :color => bankAndColor[1])
column(row['title'], :color => bankAndColor[1])
creditCardBalance = 0 - balances['balance'].to_f
minimumPaymentDate = balances['minimum_payment_date']
if minimumPaymentDate == '0000-00-00'
minimumPaymentDate = '1983-10-29'
end
timeStamp = DateTime.strptime(minimumPaymentDate, '%Y-%m-%d')
timeNow = DateTime.now
minimumPaymentDateIn = (timeStamp - timeNow).to_i
if minimumPaymentDateIn <= 3
minimumPaymentColor = @red
else
minimumPaymentColor = @white
end
# Calculate Pending Transacions for LLoyds & Capital One.
if row['id'].to_i == 7 || row['id'].to_i == 10
balances['pending_transactions'] = '%.2f' % (balances['balance_limit'].to_f - balances['balance_available'].to_f - balances['balance'].to_f)
end
column(getAsCurrency(creditCardBalance)[0], :color => (getAsCurrency(creditCardBalance)[1] == @red) ? @red : @white)
column(getAsCurrency(balances['balance_available'])[0], :color => @white)
column(getAsCurrency(balances['balance_limit'])[0], :color => @white)
column(balances['pending_transactions'].to_f <= 0 ? '—' : getAsCurrency(0 - balances['pending_transactions'].to_f)[0], :color => balances['pending_transactions'].to_f <= 0 ? @white : getAsCurrency(0 - balances['pending_transactions'].to_f)[1])
column(getAsCurrency(balances['minimum_payment'])[0], :color => (balances['minimum_payment'].to_f > 0) ? ((minimumPaymentDateIn <= 3) ? @red : @white) : @white)
if minimumPaymentDateIn < 0 || balances['minimum_payment'].to_f == 0
column('—', :color => @white)
else
column("#{DateTime.strptime(minimumPaymentDate, '%Y-%m-%d').strftime('%d %b %Y')}", :color => minimumPaymentColor)
end
column(' |')
column("#{getTimeAgoInHumanReadable(balances['date_fetched_string'])}", :color => @white)
end
end
end
end
puts "#{getRuleString(@colWidthTotal)}\n"
end
# Display Summary
def displaySummary
# Get some info for 'Estimated [XX]' column.
endOfMonthDate = DateTime.new(@month1.strftime('%Y').to_i, @month1.strftime('%m').to_i, getEndOfMonthDay, 0, 0, 0, 0)
case endOfMonthDate.strftime('%d').to_i
when 28..30
endOfMonthSuffix = 'th'
when 31
endOfMonthSuffix = 'st'
else
endOfMonthSuffix = ''
end
projectedOEMBalance = getProjectedOEMBalance
@rightHandSideContent = Array[
Array['Current Balance', @white],
Array[getAsCurrency(@totalCash)[0], getAsCurrency(@totalCash)[1]],
Array['After Bills/Wages', @white],
Array[getBalanceAfterBills.nil? ? '-' : getAsCurrency(getBalanceAfterBills)[0], getBalanceAfterBills.nil? ? @white : getAsCurrency(getBalanceAfterBills)[1]],
Array["Estimated [#{endOfMonthDate.strftime('%b %d')}#{endOfMonthSuffix}]", @white],
Array[projectedOEMBalance.nil? ? '—' : getAsCurrency(projectedOEMBalance)[0], projectedOEMBalance.nil? ? @white : getAsCurrency(projectedOEMBalance)[1]],
Array['NatWest Savings Account', @white],
Array[getAsCurrency(@bankAccountBalances[3]['balance'])[0], getAsCurrency(@bankAccountBalances[3]['balance'])[1]],
Array['Credit Total', @white],
Array[getAsCurrency(@totalCredit)[0], @cyan],
Array['Credit Used', @white],
Array["#{calculateCreditUsed}%", @magenta],
Array['Monthly Outgoings', @white],
Array[getAsCurrency(@fixedMonthlyOutgoings)[0], @cyan],
Array['Remaining Outgoings', @white],
Array["#{@moneyOutRemaining > 0 ? '—' : ''}#{getAsCurrency(@moneyOutRemaining)[0]}", @moneyOutRemaining <= 0 ? @white : @red],
Array['Remaining Incomings', @white],
Array["#{getAsCurrency(@moneyInRemaining)[0]}", @moneyInRemaining <= 0 ? @white : @cyan],
Array['Credit Score', @white],
Array["#{@creditScore[0]} (#{@creditScore[1]})", @green],
]
table(:border => false) do
row do
column('', :width => @summaryWidth_1, :align => 'left')
column("#{@month1.strftime('%B %Y')}", :width => @summaryWidth_2, :align => 'right', :color => 245)
column("#{@month2.strftime('%B %Y')}", :width => @summaryWidth_3, :align => 'right', :color => 245)
column("#{@month3.strftime('%B %Y')}", :width => @summaryWidth_4, :align => 'right', :color => 245)
column("#{@month4.strftime('%B %Y')}", :width => @summaryWidth_5, :align => 'right', :color => 245)
column("#{@month5.strftime('%B %Y')}", :width => @summaryWidth_6, :align => 'right', :color => 245)
column('5-Month Total', :width => @summaryWidth_7, :align => 'right', :color => 245)
column(' |', :width => @summaryWidth_8, :align => 'left')
column(insertRightHandContent[0], :color => insertRightHandContent[1], :width => @summaryWidth_9, :align => 'right')
end
displaySummaryDivider
row do
column(' STARTING BALANCE', :color => @white)
column("#{@summaryData[:month1][:starting_balances][:totalCash].nil? ? '—' : getAsCurrency(@summaryData[:month1][:starting_balances][:totalCash])[0]}", :color => @white)
column("#{@summaryData[:month2][:starting_balances][:totalCash].nil? ? '—' : getAsCurrency(@summaryData[:month2][:starting_balances][:totalCash])[0]}", :color => @white)
column("#{@summaryData[:month3][:starting_balances][:totalCash].nil? ? '—' : getAsCurrency(@summaryData[:month3][:starting_balances][:totalCash])[0]}", :color => @white)
column("#{@summaryData[:month4][:starting_balances][:totalCash].nil? ? '—' : getAsCurrency(@summaryData[:month4][:starting_balances][:totalCash])[0]}", :color => @white)
column("#{@summaryData[:month5][:starting_balances][:totalCash].nil? ? '—' : getAsCurrency(@summaryData[:month5][:starting_balances][:totalCash])[0]}", :color => @white)
column('—', :color => @white)
column(' |')
column(insertRightHandContent[0], :color => insertRightHandContent[1])
end
displaySummaryDivider
row do
column(' CASH DEPOSITED', :color => @green)
column("#{@summaryData[:month1][:cash_in].to_f <= 0 ? '—' : getAsCurrency(@summaryData[:month1][:cash_in])[0]}", :color => (@summaryData[:month1][:cash_in].to_f <= 0 ? @white : @green))
column("#{@summaryData[:month2][:cash_in].to_f <= 0 ? '—' : getAsCurrency(@summaryData[:month2][:cash_in])[0]}", :color => (@summaryData[:month2][:cash_in].to_f <= 0 ? @white : @green))
column("#{@summaryData[:month3][:cash_in].to_f <= 0 ? '—' : getAsCurrency(@summaryData[:month3][:cash_in])[0]}", :color => (@summaryData[:month3][:cash_in].to_f <= 0 ? @white : @green))
column("#{@summaryData[:month4][:cash_in].to_f <= 0 ? '—' : getAsCurrency(@summaryData[:month4][:cash_in])[0]}", :color => (@summaryData[:month4][:cash_in].to_f <= 0 ? @white : @green))
column("#{@summaryData[:month5][:cash_in].to_f <= 0 ? '—' : getAsCurrency(@summaryData[:month5][:cash_in])[0]}", :color => (@summaryData[:month5][:cash_in].to_f <= 0 ? @white : @green))
column("#{@summaryData[:monthTotal][:cash_in].to_f <= 0 ? '—' : getAsCurrency(@summaryData[:monthTotal][:cash_in])[0]}", :color => (@summaryData[:monthTotal][:cash_in].to_f <= 0 ? @white : @green))
column(' |')
column(insertRightHandContent[0], :color => insertRightHandContent[1])
end
displaySummaryDivider
recognizedTransactionLoop(2)
# row do
# column(' MISC (IN)', :color => @plus_color)
# column(@summaryData[:month1][:misc_in] <= 0 ? '—' : getAsCurrency(@summaryData[:month1][:misc_in])[0], :color => @summaryData[:month1][:misc_in] <= 0 ? @white : @plus_color)
# column(@summaryData[:month2][:misc_in] <= 0 ? '—' : getAsCurrency(@summaryData[:month2][:misc_in])[0], :color => @summaryData[:month2][:misc_in] <= 0 ? @white : @plus_color)
# column(@summaryData[:month3][:misc_in] <= 0 ? '—' : getAsCurrency(@summaryData[:month3][:misc_in])[0], :color => @summaryData[:month3][:misc_in] <= 0 ? @white : @plus_color)
# column(@summaryData[:month4][:misc_in] <= 0 ? '—' : getAsCurrency(@summaryData[:month4][:misc_in])[0], :color => @summaryData[:month4][:misc_in] <= 0 ? @white : @plus_color)
# column(@summaryData[:month5][:misc_in] <= 0 ? '—' : getAsCurrency(@summaryData[:month5][:misc_in])[0], :color => @summaryData[:month5][:misc_in] <= 0 ? @white : @plus_color)
# column(@summaryData[:monthTotal][:misc_in] <= 0 ? '—' : getAsCurrency(@summaryData[:monthTotal][:misc_in])[0], :color => @summaryData[:monthTotal][:misc_in] <= 0 ? @white : @plus_color)
# column(' |')
# column(insertRightHandContent[0], :color => insertRightHandContent[1])
# end
displaySummaryDivider
recognizedTransactionLoop(3)
# row do
# column(' MISC (OUT)', :color => @minus_color)
# column(@summaryData[:month1][:misc_out] <= 0 ? '—' : getAsCurrency(@summaryData[:month1][:misc_out])[0], :color => @summaryData[:month1][:misc_out] <= 0 ? @white : @minus_color)
# column(@summaryData[:month2][:misc_out] <= 0 ? '—' : getAsCurrency(@summaryData[:month2][:misc_out])[0], :color => @summaryData[:month2][:misc_out] <= 0 ? @white : @minus_color)
# column(@summaryData[:month3][:misc_out] <= 0 ? '—' : getAsCurrency(@summaryData[:month3][:misc_out])[0], :color => @summaryData[:month3][:misc_out] <= 0 ? @white : @minus_color)
# column(@summaryData[:month4][:misc_out] <= 0 ? '—' : getAsCurrency(@summaryData[:month4][:misc_out])[0], :color => @summaryData[:month4][:misc_out] <= 0 ? @white : @minus_color)
# column(@summaryData[:month5][:misc_out] <= 0 ? '—' : getAsCurrency(@summaryData[:month5][:misc_out])[0], :color => @summaryData[:month5][:misc_out] <= 0 ? @white : @minus_color)
# column(@summaryData[:monthTotal][:misc_out] <= 0 ? '—' : getAsCurrency(@summaryData[:monthTotal][:misc_out])[0], :color => @summaryData[:monthTotal][:misc_out] <= 0 ? @white : @minus_color)
# column(' |')
# column(insertRightHandContent[0], :color => insertRightHandContent[1])
# end
displaySummaryDivider
row do
column(' TOTAL MONEY IN', :color => @white)
column(@summaryData[:month1][:total_in] <= 0 ? '—' : getAsCurrency(@summaryData[:month1][:total_in])[0], :color => @white)
column(@summaryData[:month2][:total_in] <= 0 ? '—' : getAsCurrency(@summaryData[:month2][:total_in])[0], :color => @white)
column(@summaryData[:month3][:total_in] <= 0 ? '—' : getAsCurrency(@summaryData[:month3][:total_in])[0], :color => @white)
column(@summaryData[:month4][:total_in] <= 0 ? '—' : getAsCurrency(@summaryData[:month4][:total_in])[0], :color => @white)
column(@summaryData[:month5][:total_in] <= 0 ? '—' : getAsCurrency(@summaryData[:month5][:total_in])[0], :color => @white)
column(@summaryData[:monthTotal][:total_in] <= 0 ? '—' : getAsCurrency(@summaryData[:monthTotal][:total_in])[0], :color => @white)
column(' |')
column(insertRightHandContent[0], :color => insertRightHandContent[1])
end
row do
column(' TOTAL MONEY OUT', :color => @white)
column(@summaryData[:month1][:total_out] <= 0 ? '—' : getAsCurrency(0 - @summaryData[:month1][:total_out])[0], :color => @white)
column(@summaryData[:month2][:total_out] <= 0 ? '—' : getAsCurrency(0 - @summaryData[:month2][:total_out])[0], :color => @white)
column(@summaryData[:month3][:total_out] <= 0 ? '—' : getAsCurrency(0 - @summaryData[:month3][:total_out])[0], :color => @white)
column(@summaryData[:month4][:total_out] <= 0 ? '—' : getAsCurrency(0 - @summaryData[:month4][:total_out])[0], :color => @white)
column(@summaryData[:month5][:total_out] <= 0 ? '—' : getAsCurrency(0 - @summaryData[:month5][:total_out])[0], :color => @white)
column(@summaryData[:monthTotal][:total_out] <= 0 ? '—' : getAsCurrency(0 - @summaryData[:monthTotal][:total_out])[0], :color => @white)
column(' |')
column(insertRightHandContent[0], :color => insertRightHandContent[1])
end
displaySummaryDivider
row do
column(' ENDING BALANCE', :color => @white)
column('—', :color => @white)
column("#{@summaryData[:month1][:starting_balances][:totalCash].nil? ? '—' : getAsCurrency(@summaryData[:month1][:starting_balances][:totalCash])[0]}", :color => @white)
column("#{@summaryData[:month2][:starting_balances][:totalCash].nil? ? '—' : getAsCurrency(@summaryData[:month2][:starting_balances][:totalCash])[0]}", :color => @white)
column("#{@summaryData[:month3][:starting_balances][:totalCash].nil? ? '—' : getAsCurrency(@summaryData[:month3][:starting_balances][:totalCash])[0]}", :color => @white)
column("#{@summaryData[:month4][:starting_balances][:totalCash].nil? ? '—' : getAsCurrency(@summaryData[:month4][:starting_balances][:totalCash])[0]}", :color => @white)
column('—', :color => @white)
column(' |')
column(insertRightHandContent[0], :color => insertRightHandContent[1])
end
displaySummaryDivider
row do
column(' PROFIT/LOSS', :color => @white)
column(@summaryData[:month1][:profit_loss] == 0 ? '—' : getAsCurrency(@summaryData[:month1][:profit_loss])[0], :color => @summaryData[:month1][:profit_loss] == 0 ? @white : getAsCurrency(@summaryData[:month1][:profit_loss])[1])
column(@summaryData[:month2][:profit_loss] == 0 ? '—' : getAsCurrency(@summaryData[:month2][:profit_loss])[0], :color => @summaryData[:month2][:profit_loss] == 0 ? @white : getAsCurrency(@summaryData[:month2][:profit_loss])[1])
column(@summaryData[:month3][:profit_loss] == 0 ? '—' : getAsCurrency(@summaryData[:month3][:profit_loss])[0], :color => @summaryData[:month3][:profit_loss] == 0 ? @white : getAsCurrency(@summaryData[:month3][:profit_loss])[1])
column(@summaryData[:month4][:profit_loss] == 0 ? '—' : getAsCurrency(@summaryData[:month4][:profit_loss])[0], :color => @summaryData[:month4][:profit_loss] == 0 ? @white : getAsCurrency(@summaryData[:month4][:profit_loss])[1])
column(@summaryData[:month5][:profit_loss] == 0 ? '—' : getAsCurrency(@summaryData[:month5][:profit_loss])[0], :color => @summaryData[:month5][:profit_loss] == 0 ? @white : getAsCurrency(@summaryData[:month5][:profit_loss])[1])
column(@summaryData[:monthTotal][:profit_loss] == 0 ? '—' : getAsCurrency(@summaryData[:monthTotal][:profit_loss])[0], :color => @summaryData[:monthTotal][:profit_loss] == 0 ? @white : getAsCurrency(@summaryData[:monthTotal][:profit_loss])[1])
column(' |')
column(insertRightHandContent[0], :color => insertRightHandContent[1])
end
end
puts "#{getRuleString(@summaryWidthTotal)}"
# Calculates (displays) where to put arrow depending on how far through the month we are..
currentDay = @month1.strftime('%d').to_f
lastDay = (getEndOfMonthDay.to_f) - 1
percentOfMonthLeft = 100 - (currentDay - 1) / (lastDay / 100)
pixelsRemaining = ((@summaryWidthTotal - 1).to_f / 100) * percentOfMonthLeft
pixelToPutArrow = ((@summaryWidthTotal - 1) - pixelsRemaining)
puts " \x1B[36m#{getRuleString(pixelToPutArrow - 1, ' ')}\x1B[33m\xe2\x98\x85\x1B[0m\n\n"
# Uncomment for 'Enter to clear' functionality after script run.
# 31-10-2014 - Removing this because it's annoying.
# enter_to_clear
end
# @return void
def displaySummaryDivider
row do
column(getRuleString(@summaryWidth_1), :bold => false, :color => @white)
column(getRuleString(@summaryWidth_2), :bold => false, :color => @white)
column(getRuleString(@summaryWidth_3), :bold => false, :color => @white)
column(getRuleString(@summaryWidth_4), :bold => false, :color => @white)
column(getRuleString(@summaryWidth_5), :bold => false, :color => @white)
column(getRuleString(@summaryWidth_6), :bold => false, :color => @white)
column(getRuleString(@summaryWidth_7), :bold => false, :color => @white)
column(' |', :bold => false, :color => @white)
column(insertRightHandContent[0], :color => insertRightHandContent[1], :bold => false)
end
end
# @return void
def recognizedTransactionLoop(intTypeId)
@recognizedTransactions.each do |recognizedTransaction|
if recognizedTransaction[:intTypeID] == intTypeId
amt1, amt2, amt3, amt4, amt5, amtTotal = runCalculationFor(method(:calculateAmountPaidReceivedForRecognizedTransaction), recognizedTransaction[:id])
plus_color = 235
minus_color = 235
if intTypeId == 2
color = Array[plus_color, plus_color, plus_color, plus_color, plus_color, plus_color]
else
color = Array[minus_color, minus_color, minus_color, minus_color, minus_color, minus_color]
end
# Only do estimations for 1st column (current month)
if amt1[0] == 0
amt1 = Array[23, Array["#{(!recognizedTransaction[:estimated].nil?) ? '~' : ''}#{getAsCurrency(recognizedTransaction[:recurring_amount])[0].delete('£')}", @white]]
if intTypeId == 2
color[0] = @green
else
color[0] = @red
end
end
row do
column(" #{recognizedTransaction[:translation]}", :color => (intTypeId == 2) ? 47 : 196)
column(amt1[0] <= 0 ? '—' : "#{intTypeId == 3 && recognizedTransaction[:estimated].nil? ? '—' : ''}#{amt1[1][0]}", :color => amt1[0] <= 0 ? @white : color[0])
column(amt2[0] <= 0 ? '—' : "#{intTypeId == 3 ? '—' : ''}#{amt2[1][0]}", :color => amt2[0] <= 0 ? @white : color[1])
column(amt3[0] <= 0 ? '—' : "#{intTypeId == 3 ? '—' : ''}#{amt3[1][0]}", :color => amt3[0] <= 0 ? @white : color[2])
column(amt4[0] <= 0 ? '—' : "#{intTypeId == 3 ? '—' : ''}#{amt4[1][0]}", :color => amt4[0] <= 0 ? @white : color[3])
column(amt5[0] <= 0 ? '—' : "#{intTypeId == 3 ? '—' : ''}#{amt5[1][0]}", :color => amt5[0] <= 0 ? @white : color[4])
column(amtTotal[0] <= 0 ? '—' : "#{intTypeId == 3 ? '—' : ''}#{amtTotal[1][0]}", :color => amtTotal[0] <= 0 ? @white : color[5])
column(' |')
column(insertRightHandContent[0], :color => insertRightHandContent[1])
end
end
end
end
# @return array
def insertRightHandContent
@rightHandSideCount = @rightHandSideCount + 1
if @rightHandSideCount == 3
@rightHandSideContentCount = @rightHandSideContentCount + 1
elsif @rightHandSideCount >= 7
@rightHandSideCount = 1
@rightHandSideContentCount = @rightHandSideContentCount + 1
elsif @rightHandSideCount >= 5
if @rightHandSideContentExists
return Array['', @white]
# DO NOT DELETE!
# The following line of code would insert '----' between the outputs.
#
# return Array[" #{getRuleString(@summaryWidth_9 - 1)}", @white]
else
return Array['', @white]
end
end
content = ''
color = 208
if !@rightHandSideContent[@rightHandSideContentCount].nil?
content = @rightHandSideContent[@rightHandSideContentCount][0]
color = @rightHandSideContent[@rightHandSideContentCount][1]
else
@rightHandSideContentExists = false
end
Array[content, color]
end
# @return array
def runCalculationFor(callback, param1 = nil)
var1 = Array.new
var2 = Array.new
var3 = Array.new
var4 = Array.new
var5 = Array.new
var1[0] = callback.call(@month1, param1)
var1[1] = getAsCurrency(var1[0])
var2[0] = callback.call(@month2, param1)
var2[1] = getAsCurrency(var2[0])
var3[0] = callback.call(@month3, param1)
var3[1] = getAsCurrency(var3[0])
var4[0] = callback.call(@month4, param1)
var4[1] = getAsCurrency(var4[0])
var5[0] = callback.call(@month5, param1)
var5[1] = getAsCurrency(var5[0])
varTotal = Array.new
varTotal[0] = var1[0] + var2[0] + var3[0] + var4[0] + var5[0]
varTotal[1] = getAsCurrency(varTotal[0])
Array[var1, var2, var3, var4, var5, varTotal]
end
# Get all the totals (for current month)
# @return void
def getTotals
totals = calculateTotals(@bankAccountBalances)
@totalAvailable = totals[:totalAvailable]
@totalCreditUsed = totals[:totalCreditUsed]
@totalCredit = totals[:totalCredit]
@totalCash = totals[:totalCash]
end
# @return object
def calculateTotals(data)
# If any of the balances are nil, abort mission
data.each do |object|
object.each do |value|
if value.nil?
return {
:totalAvailable => nil,
:totalCreditUsed => nil,
:totalCredit => nil,
:totalCash => nil
}
end
end
end
totalAvailable =
(data[1]['balance_available'].to_f +
data[2]['balance_available'].to_f +
data[4]['balance_available'].to_f +
data[5]['balance_available'].to_f +
data[7]['balance_available'].to_f +
data[8]['balance_available'].to_f +
data[9]['balance_available'].to_f +
data[10]['balance_available'].to_f +
data[11]['balance_available'].to_f).round(2)
totalCreditUsed =
(data[7]['balance'].to_f +
data[7]['pending_transactions'].to_f +
data[10]['balance'].to_f +
data[10]['pending_transactions'].to_f +
data[9]['balance'].to_f +
data[9]['pending_transactions'].to_f +
data[11]['balance'].to_f +
data[11]['pending_transactions'].to_f +
(data[1]['balance'].to_f < 0 ? -data[1]['balance'].to_f : 0) +
(data[2]['balance'].to_f < 0 ? -data[2]['balance'].to_f : 0) +
(data[4]['balance'].to_f < 0 ? -data[4]['balance'].to_f : 0) +
(data[5]['balance'].to_f < 0 ? -data[5]['balance'].to_f : 0) +
(data[8]['balance'].to_f < 0 ? -data[8]['balance'].to_f : 0)).round(2)
totalCredit =
(data[1]['balance_overdraft'].to_f +
data[2]['balance_overdraft'].to_f +
data[4]['balance_overdraft'].to_f +
data[5]['balance_overdraft'].to_f +
data[8]['balance_overdraft'].to_f +
data[7]['balance_limit'].to_f +
data[9]['balance_limit'].to_f +
data[10]['balance_limit'].to_f +
data[11]['balance_limit'].to_f).round(2)
totalCash = (totalAvailable - totalCredit).round(2)
{
:totalAvailable => totalAvailable,
:totalCreditUsed => totalCreditUsed,
:totalCredit => totalCredit,
:totalCash => totalCash,
}
end
# Get summary data for 5 month output
# @return void
def calculateSummary
# Get start of month balances
Array[@month1, @month2, @month3, @month4, @month5].each do |month|
# Get start of month balances.
thisMonthBalances = {}
@bankAccounts.each do |bankAccount|
bankAccount = bankAccount[1]
case bankAccount['bank_account_type_id'].to_i
when 1
bankAccountTable = 'bank_account_type_bank_account'
when 2
bankAccountTable = 'bank_account_type_credit_card'
when 3
bankAccountTable = 'bank_account_type_isa'
else
raise(RuntimeError, "bank_account_type => #{bankAccount['bank_account_type']} doesn't exist.")
end
balance = @databaseConnection.query("SELECT * FROM #{bankAccountTable} WHERE bank_account_id='#{bankAccount['id']}' AND (date_fetched>='#{month.strftime('%Y-%m-01')}' AND date_fetched<='#{month.strftime('%Y-%m-07')}') ORDER BY date_fetched ASC LIMIT 1")
thisMonthBalances[bankAccount['id'].to_i] = balance.fetch_hash
balance.free
end
monthObject = getMonthObject(month.strftime('%Y-%m'))
monthObject[:starting_balances] = calculateTotals(thisMonthBalances)
end
@transactions.each do |transaction|
# Skip internal transfers
if isInternalTransfer(transaction)
next
end
# Find out what month we're in and retrieve relevant location in memory for object.
monthObject = getMonthObject(DateTime.strptime(transaction['date'], '%Y-%m-%d').strftime('%Y-%m'))
# If it's a month we don't recognize, skip to next transaction.
if monthObject.nil?
next
end
transactionRecognized = false
transactionAdded = false
# Check if transaction is recognized.
@recognizedTransactions.each do |rt|
if transaction['bank_account_id'].to_i == rt[:bank_account_id] && transaction['type'] == rt[:type] && rt[:terms].any? { |w| transaction['description'] =~ /#{w}/ }
transactionRecognized = rt[:id]
break
end
end
# Process recurring transactions
if transactionRecognized
transactionAdded = true
rt = @recognizedTransactionsIndexedID["#{transactionRecognized}"]
if rt[:intTypeID] == 1
monthObject[:cash_in] = monthObject[:cash_in] + transaction['paid_in'].to_f
elsif rt[:intTypeID] == 2
if monthObject[:"#{rt[:id]}"].nil?
monthObject[:"#{rt[:id]}"] = 0
end
monthObject[:"#{rt[:id]}"] = monthObject[:"#{rt[:id]}"] + transaction['paid_in'].to_f
elsif rt[:intTypeID] == 3
if monthObject[:"#{rt[:id]}"].nil?
monthObject[:"#{rt[:id]}"] = 0
end
monthObject[:"#{rt[:id]}"] = monthObject[:"#{rt[:id]}"] + transaction['paid_out'].to_f
else
transactionAdded = false
end
end
# Process remaining transactions (un-recurring)
unless transactionAdded
if transaction['paid_in'].to_f > 0
monthObject[:misc_in] = monthObject[:misc_in] + transaction['paid_in'].to_f
elsif transaction['paid_out'].to_f > 0
monthObject[:misc_out] = monthObject[:misc_out] + transaction['paid_out'].to_f
end
end
# Calculate Totals (for all transactions)
if transaction['paid_in'].to_f > 0
monthObject[:total_in] = monthObject[:total_in] + transaction['paid_in'].to_f
elsif transaction['paid_out'].to_f > 0
monthObject[:total_out] = monthObject[:total_out] + transaction['paid_out'].to_f
end
end
# Monthly Totals
@summaryData.each do |monthObject|
if monthObject[0].to_s != 'monthTotal'
monthObject[1].each do |key, value|
unless key.to_s == 'starting_balances'
if @summaryData[:monthTotal][:"#{key}"].nil?
@summaryData[:monthTotal][:"#{key}"] = 0
end
@summaryData[:monthTotal][:"#{key}"] = @summaryData[:monthTotal][:"#{key}"] + value
end
end
end
end
# Montly Profit Loss
@summaryData.each do |monthObject|
monthObject[1][:profit_loss] = monthObject[1][:total_in] - monthObject[1][:total_out]
end
end
# @return array
def calculateAmountPaidReceivedForRecognizedTransaction(month, id)
case month.strftime('%m')
when @month1.strftime('%m')
amt = @summaryData[:month1][:"#{id}"].to_f
when @month2.strftime('%m')
amt = @summaryData[:month2][:"#{id}"].to_f
when @month3.strftime('%m')
amt = @summaryData[:month3][:"#{id}"].to_f
when @month4.strftime('%m')
amt = @summaryData[:month4][:"#{id}"].to_f
when @month5.strftime('%m')
amt = @summaryData[:month5][:"#{id}"].to_f
else
raise(RuntimeError('Month not found.'))
end
amt
end
# @return void
def calculateMoneyRemaining
@recognizedTransactions.each do |rt|
if rt[:intTypeID] == 2 || rt[:intTypeID] == 3
transactionFound = false
@transactions.each do |transaction|
if @month1.strftime('%Y-%m') == DateTime.strptime(transaction['date'], '%Y-%m-%d').strftime('%Y-%m')
if transaction['bank_account_id'].to_i == rt[:bank_account_id] && transaction['type'] == rt[:type] && rt[:terms].any? { |w| transaction['description'] =~ /#{w}/ }
transactionFound = true
end
end
end
if !transactionFound
if rt[:intTypeID] == 2
@moneyInRemaining = @moneyInRemaining + rt[:recurring_amount]
elsif rt[:intTypeID] == 3
@moneyOutRemaining = @moneyOutRemaining + rt[:recurring_amount]
end
end
end
end
end
# @return void
def calculateFixedMonthlyOutgoings
@recognizedTransactions.each do |rt|
if rt[:intTypeID] == 3
@fixedMonthlyOutgoings = @fixedMonthlyOutgoings + rt[:recurring_amount]
end
end
end
# @return float
def calculateCreditUsed
if @totalCreditUsed.to_f > 0
((@totalCreditUsed.to_f / @totalCredit.to_f) * 100).round(2)
else
0
end
end
# @return array
def getCreditScore
creditScore = @databaseConnection.query('SELECT * FROM experian_credit_report ORDER BY date_fetched DESC LIMIT 1')
creditScore = creditScore.fetch_hash
@creditScore = Array[creditScore['score'], creditScore['score_text']]
end
# Returns name of bank account + associated color.
def getBankAndColor(bankId)
returnHash = {}
returnHash[0] = @banks[bankId.to_s]
case bankId.to_i
when 1
returnHash[1] = @magenta
when 2
returnHash[1] = @blue
when 3
returnHash[1] = @green
when 4
returnHash[1] = @cyan
when 5
returnHash[1] = 113
else
returnHash[1] = @white
end
returnHash
end
# @return float
def getBalanceAfterBills
(@totalCash + @moneyInRemaining) - @moneyOutRemaining
end
# Parameter passed in must be in string form as '%Y-%m'
# @return object
def getMonthObject(transactionMonth)
case transactionMonth
when @month1.strftime('%Y-%m')
monthObject = @summaryData[:month1]
when @month2.strftime('%Y-%m')
monthObject = @summaryData[:month2]
when @month3.strftime('%Y-%m')
monthObject = @summaryData[:month3]
when @month4.strftime('%Y-%m')
monthObject = @summaryData[:month4]
when @month5.strftime('%Y-%m')
monthObject = @summaryData[:month5]
else
monthObject = nil
end
monthObject
end
# Calculates (depending on the last 4 month trend) how much money I should have by the end of the month.
# @return float
def getProjectedOEMBalance
averageIn =
((@summaryData[:month2][:total_in] +
@summaryData[:month3][:total_in] +
@summaryData[:month4][:total_in] +
@summaryData[:month5][:total_in]) / 4).round(2)
averageOut =
((@summaryData[:month2][:total_out] +
@summaryData[:month3][:total_out] +
@summaryData[:month4][:total_out] +
@summaryData[:month5][:total_out]) / 4).round(2)
if @summaryData[:month1][:starting_balances][:totalCash].nil?
return nil
end
(@summaryData[:month1][:starting_balances][:totalCash] + averageIn) - averageOut
end
# Calculates (depending on the last 4 month trend) how much money I should have by the end of the month.
# @return float
def getProjectedOEMBalanceDeprecated
averageCashIn =
((@summaryData[:month2][:cash_in] +
@summaryData[:month3][:cash_in] +
@summaryData[:month4][:cash_in] +
@summaryData[:month5][:cash_in]) / 4).round(2)
averageMiscIn =
((@summaryData[:month2][:misc_in] +
@summaryData[:month3][:misc_in] +
@summaryData[:month4][:misc_in] +
@summaryData[:month5][:misc_in]) / 4).round(2)
averageMiscOut =
((@summaryData[:month2][:misc_out] +
@summaryData[:month3][:misc_out] +
@summaryData[:month4][:misc_out] +
@summaryData[:month5][:misc_out]) / 4).round(2)
# An estimate of how much 'MISC' goes in/out based on last 4 months.
inAdjustment = (averageMiscIn - @summaryData[:month1][:misc_in]) + (averageCashIn - @summaryData[:month1][:cash_in])
outAdjustment = (averageMiscOut - @summaryData[:month1][:misc_out])
# Calculate how much % of month is left...
totalDaysInCurrentMonth = Date.civil(@month1.strftime('%Y').to_i, @month1.strftime('%m').to_i, -1).day
currentDay = @month1.strftime('%d')
percentOfMonthLeft = 100 - (currentDay.to_f / (totalDaysInCurrentMonth.to_f / 100))
# ...and adjust adjustments accordingly.
inAdjustment = inAdjustment * (percentOfMonthLeft / 100)
outAdjustment = outAdjustment * (percentOfMonthLeft / 100)
# Add ajustments to @moneyRemaining variables.
moneyInRemaining = @moneyInRemaining + inAdjustment
moneyOutRemaining = @moneyOutRemaining + outAdjustment
(@totalCash - moneyOutRemaining) + moneyInRemaining
end
# Gets end of month day (for current month)
# @return int
def getEndOfMonthDay
Date.civil(@month1.strftime('%Y').to_i, @month1.strftime('%m').to_i, -1).day.to_i
end
# Returns '━━━━'
# @retrun string
def getRuleString(length, delimiter = '━')
ruleString = ''
for i in 0..length - 1
ruleString = "#{ruleString}#{delimiter}"
end
ruleString
end
# Returns the amount as currency formatted string with color (as hash)
# return object
def getAsCurrency(amount, symbol = '£', delimiter = ',')
amount = amount.to_f
returnHash = {}
# Set index '1' to color
if amount < 0
returnHash[1] = @red
else
returnHash[1] = @green
end
# Set index '0' to formatted amount
minus = (amount < 0) ? '-' : ''
amount = '%.2f' % amount.abs
amount = amount.to_s.reverse.gsub(%r{([0-9]{3}(?=([0-9])))}, "\\1#{delimiter}").reverse
returnHash[0] = "#{minus}#{symbol}#{amount}"
returnHash
end
# If timestamp is blank, this gives it a normalized timestamp so script doesn't error.
# @return string
def normalizeTimestamp(timestamp)
if timestamp == '0000-00-00 00:00:00' || timestamp == '0000-00-00T00:00:00+00:00'
timestamp = '1983-10-29T03:16:00+00:00'
end
timestamp
end
# Gives a prompt where ONLY 'Enter' will clear the screen, any other key will not.
# @return void
def enter_to_clear
begin
system('stty raw -echo')
response = STDIN.getc
ensure
system('stty -raw echo')
end
if response.chr == "\r"
system('clear')
end
exit
end
end
ShowBankTransactions.new(ARGV[0]).run |
require 'spec_helper'
module BSON
shared_examples_for 'a bson element' do
it 'has correct bson type' do
obj.bson_type.should == type
end
it 'serializes to bson' do
obj.bson_value.should == value
end
it 'deserializes from bson' do
io = StringIO.new(value)
Types::MAP[obj.bson_type.ord].from_bson(io).should == obj
end
end
describe Float do
it_behaves_like 'a bson element' do
let(:type) { "\x01" }
let(:obj) { 1.2 }
let(:value) { "333333\xF3?" }
end
end
describe String do
it_behaves_like 'a bson element' do
let(:type) { "\x02" }
let(:obj) { "string" }
let(:value) { "\a\x00\x00\x00string\x00" }
end
end
describe Hash do
it_behaves_like 'a bson element' do
let(:type) { "\x03" }
let(:obj) { { :a => 1 } }
let(:value) { "\f\x00\x00\x00\x10a\x00\x01\x00\x00\x00\x00" }
end
end
describe Array do
it_behaves_like 'a bson element' do
let(:type) { "\x04" }
let(:obj) { ['a'] }
let(:value) { "\x0E\x00\x00\x00\x020\x00\x02\x00\x00\x00a\x00\x00" }
end
end
describe Binary do
it_behaves_like 'a bson element' do
let(:type) { "\x05" }
let(:obj) { Binary.new("a") }
let(:value) { "\x01\x00\x00\x00\x00a" }
end
end
describe Undefined do
it_behaves_like 'a bson element' do
let(:type) { "\x06" }
let(:obj) { Undefined }
let(:value) { "" }
end
end
describe FalseClass do
it_behaves_like "a bson element" do
let(:type) { "\x08" }
let(:obj) { false }
let(:value) { "\x00" }
end
end
describe TrueClass do
it_behaves_like 'a bson element' do
let(:type) { "\x08" }
let(:obj) { true }
let(:value) { "\x01" }
end
end
end
reordering ocd
require 'spec_helper'
module BSON
shared_examples_for 'a bson element' do
it 'has correct bson type' do
obj.bson_type.should == type
end
it 'serializes to bson' do
obj.bson_value.should == value
end
it 'deserializes from bson' do
io = StringIO.new(value)
Types::MAP[obj.bson_type.ord].from_bson(io).should == obj
end
end
describe Float do
it_behaves_like 'a bson element' do
let(:obj) { 1.2 }
let(:type) { "\x01" }
let(:value) { "333333\xF3?" }
end
end
describe String do
it_behaves_like 'a bson element' do
let(:obj) { "string" }
let(:type) { "\x02" }
let(:value) { "\a\x00\x00\x00string\x00" }
end
end
describe Hash do
it_behaves_like 'a bson element' do
let(:obj) { { :a => 1 } }
let(:type) { "\x03" }
let(:value) { "\f\x00\x00\x00\x10a\x00\x01\x00\x00\x00\x00" }
end
end
describe Array do
it_behaves_like 'a bson element' do
let(:obj) { ['a'] }
let(:type) { "\x04" }
let(:value) { "\x0E\x00\x00\x00\x020\x00\x02\x00\x00\x00a\x00\x00" }
end
end
describe Binary do
it_behaves_like 'a bson element' do
let(:obj) { Binary.new("a") }
let(:type) { "\x05" }
let(:value) { "\x01\x00\x00\x00\x00a" }
end
end
describe Undefined do
it_behaves_like 'a bson element' do
let(:obj) { Undefined }
let(:type) { "\x06" }
let(:value) { "" }
end
end
describe FalseClass do
it_behaves_like "a bson element" do
let(:obj) { false }
let(:type) { "\x08" }
let(:value) { "\x00" }
end
end
describe TrueClass do
it_behaves_like 'a bson element' do
let(:obj) { true }
let(:type) { "\x08" }
let(:value) { "\x01" }
end
end
end |
#!/usr/local/rvm/bin/rvm-auto-ruby
require "/opt/engos/lib/ruby/ManagedContainer.rb"
require "/opt/engos/lib/ruby/SysConfig.rb"
require "/opt/engos/lib/ruby/ManagedEngine.rb"
require "/opt/engos/lib/ruby/EnginesOSapi.rb"
require "/opt/engos/lib/ruby/EnginesOSapiResult.rb"
def print_usage
puts("Usage engines.rb service|engine command engine_name|service_name")
puts("Where command is one of status|lasterror|stats|demonitor|monitor|registerdns|deregisterdns|registersite|deregistersite|create|recreate|deleteimage|destroy|ps|logs|restart|start|stop|pause|unpause")
end
def format_databases(volumes)
res = String.new
volumes.keys.each do |key|
res = res + key +"\n"
end
return res
end
def format_backups(volumes)
res = String.new
volumes.keys.each do |key|
res = res + key +"\n"
end
return res
end
def format_volumes(volumes)
res = String.new
volumes.keys.each do |key|
res = res + key +"\n"
end
return res
end
def do_cmd(c_type,containerName,command)
engines_api = EnginesOSapi.new()
docker_api = engines_api.docker_api
# puts "Command" + command + " on " + containerName
case command
when "check_and_act"
if c_type == "container"
eng = engines_api.loadManagedEngine(containerName)
else
eng = EnginesOSapi.loadManagedService(containerName,docker_api)
end
state = engines_api.read_state(eng)
if eng.setState != state
res = "Error:" + containerName + ":" + state + ":set to:" + eng.setState
case eng.setState
when "running"
res = "Error:starting " + containerName + " was in " + state
if state == "nocontainer"
do_cmd(c_type,containerName,"create")
elsif state == "paused"
do_cmd(c_type,containerName,"unpause")
else
do_cmd(c_type,containerName,"start")
end
when "stopped"
res = "Error:stopping " + containerName + " was in " + state
do_cmd(c_type,containerName,"stop")
end
end
when "stop"
if c_type == "container"
res = engines_api.stopEngine(containerName)
else
res = engines_api.stopService(containerName)
end
when "start"
if c_type == "container"
res = engines_api.startEngine(containerName)
else
res = engines_api.startService(containerName)
end
when "pause"
if c_type == "container"
res = engines_api.pauseEngine(containerName)
else
res = engines_api.pauseService(containerName)
end
when "unpause"
if c_type == "container"
res = engines_api.unpauseEngine(containerName)
else
res = engines_api.unpauseService(containerName)
end
when "restart"
if c_type == "container"
res = engines_api.restartEngine(containerName)
else
res = engines_api.restartService(containerName)
end
when "rebuild"
if c_type == "container"
res = engines_api.rebuild_engine_container(containerName)
else
puts "Cannot rebuild Service"
end
when "logs"
if c_type == "container"
eng = engines_api.loadManagedEngine(containerName)
else
eng = EnginesOSapi.loadManagedService(containerName,docker_api)
end
res = containerName + ":" + engines_api.logs_container
when "ps"
if c_type == "container"
eng = engines_api.loadManagedEngine(containerName)
else
eng = EnginesOSapi.loadManagedService(containerName,docker_api)
end
res = containerName + ":" + engines_api.ps_container
when "destroy"
if c_type == "container"
res = engines_api.destroyEngine(containerName)
else
puts ("Error cannot destroy Service")
end
when "deleteimage"
if c_type == "container"
res = engines_api.delete_image(containerName)
else
puts ("Error cannot delete a Service Image")
end
when "create"
if c_type == "container"
res = engines_api.createEngine(containerName)
else
res = engines_api.createService(containerName)
end
when "recreate"
if c_type == "container"
res = engines_api.recreateEngine(containerName)
else
res = engines_api.recreateService(containerName)
end
when "registersite"
if c_type == "container"
res = engines_api.registerEngineWebSite(containerName)
else
res = engines_api.registerServiceWebSite(containerName)
end
when "deregistersite"
if c_type == "container"
res = engines_api.deregisterEngineWebSite(containerName)
else
res = engines_api.deregisterServiceWebSite(containerName)
end
when "registerdns"
if c_type == "container"
res = engines_api.registerEngineDNS(containerName)
else
res = engines_api.registerServiceDNS(containerName)
end
when "deregisterdns"
if c_type == "container"
res = engines_api.deregisterEngineDNS(containerName)
else
res = engines_api.deregisterServiceDNS(containerName)
end
when "monitor"
if c_type == "container"
res = engines_api.monitorEngine(containerName)
else
puts ("Error Monitor Service not applicable")
end
when "demonitor"
if c_type == "container"
res = engines_api.demonitorEngine(containerName)
else
puts ("Error Monitor Service not applicable")
end
when "stats"
if c_type == "container"
eng = engines_api.loadManagedEngine(containerName)
else
eng = EnginesOSapi.loadManagedService(containerName,docker_api)
end
if eng.is_a?(EnginesOSapiResult) == false
res = eng.stats
if res != nil && res != false
if res.state == "stopped"
res = "State:" + res.state + res.proc_cnt.to_s + " Procs " + "Stopped:" + res.stopped_ts + "Memory:V:" + res.RSSMemory.to_s + " R:" + res.VSSMemory.to_s + " cpu:" + res.cpuTime.to_s
else
res = "State:" + res.state + res.proc_cnt.to_s + " Procs " + "Started:" + res.started_ts + "Memory:V:" + res.RSSMemory.to_s + " R:" + res.VSSMemory.to_s + " cpu:" + res.cpuTime.to_s
end
end
else
res = eng
end
when "status"
if c_type == "container"
eng = engines_api.loadManagedEngine(containerName)
else
eng = EnginesOSapi.loadManagedService(containerName,docker_api)
end
state = engines_api.read_state(eng)
if eng.setState != state
res = "Error:" + containerName + ":" + state + ":set to:" + eng.setState
else
res = containerName + ":" + state
end
when "lasterror"
if c_type == "container"
eng = engines_api.loadManagedEngine(containerName)
else
eng = EnginesOSapi.loadManagedService(containerName,docker_api)
end
res = eng.last_error
when "databases"
databases = engines_api.get_databases
res = format_databases databases
when "volumes"
volumes = engines_api.get_volumes
res = format_volumes volumes
when "backups"
backups = engines_api.get_backups
res = format_backups backups
when "backup_vol"
#backupname engine_name volumename proto host folder user pass
args=containerName.split(":")
backup_name=args[0]
engine_name=args[1]
volume_name=args[2]
dest_hash = Hash.new
dest_hash[:dest_proto]=args[3]
dest_hash[:dest_address]=args[4]
dest_hash[:dest_folder]=args[5]
dest_hash[:dest_user]=args[6]
dest_hash[:dest_pass]=args[7]
p dest_hash
res = engines_api.backup_volume(backup_name,engine_name,volume_name,dest_hash,docker_api)
when "backup_db"
args=containerName.split(":")
backup_name=args[0]
engine_name=args[1]
database_name=args[2]
dest_hash = Hash.new
dest_hash[:dest_proto]=args[3]
dest_hash[:dest_address]=args[4]
dest_hash[:dest_folder]=args[5]
dest_hash[:dest_user]=args[6]
dest_hash[:dest_pass]=args[7]
p dest_hash
res = engines_api.backup_database(backup_name,engine_name,database_name,dest_hash,docker_api)
when "stop_backup"
backup_name= containerName
res = engines_api.stop_backup(backup_name)
when "register_consumers"
eng = EnginesOSapi.loadManagedService(containerName,docker_api)
eng.reregister_consumers
else
res = "command:" + command + " unknown"
print_usage
end
if res !=nil && res.is_a?(EnginesOSapiResult)
if res.was_success == false
puts ("Failed:" + res.result_mesg.to_s)
else
res = res.result_mesg
end
if res.length >0
puts res
end
end
end
containerName =""
c_type= ARGV[0]
if c_type== "engine"
c_type = "container"
elsif c_type == "engines"
c_type = "container"
containerName = "all"
elsif c_type == "services"
c_type = "service"
containerName = "all"
elsif c_type == "service"
c_type = "service"
else
puts("unknown container type: Please use engine or service")
print_usage
exit
end
if ARGV.length != 3 && containerName != "all" || ARGV.length < 2 && containerName == "all"
print_usage
exit
end
command = ARGV[1]
if containerName != "all" #backward for scripts that use all instead of plural
containerName = ARGV[2]
end
if containerName == "all"
engines_api = EnginesOSapi.new()
if c_type == "container"
engines = engines_api.getManagedEngines()
engines.each do |engine|
do_cmd(c_type,engine.containerName,command)
end
elsif c_type == "service"
services = engines_api.getManagedServices()
services.each do |service|
do_cmd(c_type,service.containerName,command)
end
end
else
do_cmd(c_type,containerName,command)
end
all to avoid \n in check_and_Act when no act
#!/usr/local/rvm/bin/rvm-auto-ruby
require "/opt/engos/lib/ruby/ManagedContainer.rb"
require "/opt/engos/lib/ruby/SysConfig.rb"
require "/opt/engos/lib/ruby/ManagedEngine.rb"
require "/opt/engos/lib/ruby/EnginesOSapi.rb"
require "/opt/engos/lib/ruby/EnginesOSapiResult.rb"
def print_usage
puts("Usage engines.rb service|engine command engine_name|service_name")
puts("Where command is one of status|lasterror|stats|demonitor|monitor|registerdns|deregisterdns|registersite|deregistersite|create|recreate|deleteimage|destroy|ps|logs|restart|start|stop|pause|unpause")
end
def format_databases(volumes)
res = String.new
volumes.keys.each do |key|
res = res + key +"\n"
end
return res
end
def format_backups(volumes)
res = String.new
volumes.keys.each do |key|
res = res + key +"\n"
end
return res
end
def format_volumes(volumes)
res = String.new
volumes.keys.each do |key|
res = res + key +"\n"
end
return res
end
def do_cmd(c_type,containerName,command)
engines_api = EnginesOSapi.new()
docker_api = engines_api.docker_api
# puts "Command" + command + " on " + containerName
case command
when "check_and_act"
if c_type == "container"
eng = engines_api.loadManagedEngine(containerName)
else
eng = EnginesOSapi.loadManagedService(containerName,docker_api)
end
state = engines_api.read_state(eng)
if eng.setState != state
res = "Error:" + containerName + ":" + state + ":set to:" + eng.setState
case eng.setState
when "running"
res = "Error:starting " + containerName + " was in " + state
if state == "nocontainer"
do_cmd(c_type,containerName,"create")
elsif state == "paused"
do_cmd(c_type,containerName,"unpause")
else
do_cmd(c_type,containerName,"start")
end
when "stopped"
res = "Error:stopping " + containerName + " was in " + state
do_cmd(c_type,containerName,"stop")
end
end
when "stop"
if c_type == "container"
res = engines_api.stopEngine(containerName)
else
res = engines_api.stopService(containerName)
end
when "start"
if c_type == "container"
res = engines_api.startEngine(containerName)
else
res = engines_api.startService(containerName)
end
when "pause"
if c_type == "container"
res = engines_api.pauseEngine(containerName)
else
res = engines_api.pauseService(containerName)
end
when "unpause"
if c_type == "container"
res = engines_api.unpauseEngine(containerName)
else
res = engines_api.unpauseService(containerName)
end
when "restart"
if c_type == "container"
res = engines_api.restartEngine(containerName)
else
res = engines_api.restartService(containerName)
end
when "rebuild"
if c_type == "container"
res = engines_api.rebuild_engine_container(containerName)
else
puts "Cannot rebuild Service"
end
when "logs"
if c_type == "container"
eng = engines_api.loadManagedEngine(containerName)
else
eng = EnginesOSapi.loadManagedService(containerName,docker_api)
end
res = containerName + ":" + engines_api.logs_container
when "ps"
if c_type == "container"
eng = engines_api.loadManagedEngine(containerName)
else
eng = EnginesOSapi.loadManagedService(containerName,docker_api)
end
res = containerName + ":" + engines_api.ps_container
when "destroy"
if c_type == "container"
res = engines_api.destroyEngine(containerName)
else
puts ("Error cannot destroy Service")
end
when "deleteimage"
if c_type == "container"
res = engines_api.delete_image(containerName)
else
puts ("Error cannot delete a Service Image")
end
when "create"
if c_type == "container"
res = engines_api.createEngine(containerName)
else
res = engines_api.createService(containerName)
end
when "recreate"
if c_type == "container"
res = engines_api.recreateEngine(containerName)
else
res = engines_api.recreateService(containerName)
end
when "registersite"
if c_type == "container"
res = engines_api.registerEngineWebSite(containerName)
else
res = engines_api.registerServiceWebSite(containerName)
end
when "deregistersite"
if c_type == "container"
res = engines_api.deregisterEngineWebSite(containerName)
else
res = engines_api.deregisterServiceWebSite(containerName)
end
when "registerdns"
if c_type == "container"
res = engines_api.registerEngineDNS(containerName)
else
res = engines_api.registerServiceDNS(containerName)
end
when "deregisterdns"
if c_type == "container"
res = engines_api.deregisterEngineDNS(containerName)
else
res = engines_api.deregisterServiceDNS(containerName)
end
when "monitor"
if c_type == "container"
res = engines_api.monitorEngine(containerName)
else
puts ("Error Monitor Service not applicable")
end
when "demonitor"
if c_type == "container"
res = engines_api.demonitorEngine(containerName)
else
puts ("Error Monitor Service not applicable")
end
when "stats"
if c_type == "container"
eng = engines_api.loadManagedEngine(containerName)
else
eng = EnginesOSapi.loadManagedService(containerName,docker_api)
end
if eng.is_a?(EnginesOSapiResult) == false
res = eng.stats
if res != nil && res != false
if res.state == "stopped"
res = "State:" + res.state + res.proc_cnt.to_s + " Procs " + "Stopped:" + res.stopped_ts + "Memory:V:" + res.RSSMemory.to_s + " R:" + res.VSSMemory.to_s + " cpu:" + res.cpuTime.to_s
else
res = "State:" + res.state + res.proc_cnt.to_s + " Procs " + "Started:" + res.started_ts + "Memory:V:" + res.RSSMemory.to_s + " R:" + res.VSSMemory.to_s + " cpu:" + res.cpuTime.to_s
end
end
else
res = eng
end
when "status"
if c_type == "container"
eng = engines_api.loadManagedEngine(containerName)
else
eng = EnginesOSapi.loadManagedService(containerName,docker_api)
end
state = engines_api.read_state(eng)
if eng.setState != state
res = "Error:" + containerName + ":" + state + ":set to:" + eng.setState
else
res = containerName + ":" + state
end
when "lasterror"
if c_type == "container"
eng = engines_api.loadManagedEngine(containerName)
else
eng = EnginesOSapi.loadManagedService(containerName,docker_api)
end
res = eng.last_error
when "databases"
databases = engines_api.get_databases
res = format_databases databases
when "volumes"
volumes = engines_api.get_volumes
res = format_volumes volumes
when "backups"
backups = engines_api.get_backups
res = format_backups backups
when "backup_vol"
#backupname engine_name volumename proto host folder user pass
args=containerName.split(":")
backup_name=args[0]
engine_name=args[1]
volume_name=args[2]
dest_hash = Hash.new
dest_hash[:dest_proto]=args[3]
dest_hash[:dest_address]=args[4]
dest_hash[:dest_folder]=args[5]
dest_hash[:dest_user]=args[6]
dest_hash[:dest_pass]=args[7]
p dest_hash
res = engines_api.backup_volume(backup_name,engine_name,volume_name,dest_hash,docker_api)
when "backup_db"
args=containerName.split(":")
backup_name=args[0]
engine_name=args[1]
database_name=args[2]
dest_hash = Hash.new
dest_hash[:dest_proto]=args[3]
dest_hash[:dest_address]=args[4]
dest_hash[:dest_folder]=args[5]
dest_hash[:dest_user]=args[6]
dest_hash[:dest_pass]=args[7]
p dest_hash
res = engines_api.backup_database(backup_name,engine_name,database_name,dest_hash,docker_api)
when "stop_backup"
backup_name= containerName
res = engines_api.stop_backup(backup_name)
when "register_consumers"
eng = EnginesOSapi.loadManagedService(containerName,docker_api)
eng.reregister_consumers
else
res = "command:" + command + " unknown"
print_usage
end
if res !=nil && res.is_a?(EnginesOSapiResult)
if res.was_success == false
output = "Failed:" + res.result_mesg.to_s
else
output = res.result_mesg
end
if output.length >0
puts output
end
end
end
containerName =""
c_type= ARGV[0]
if c_type== "engine"
c_type = "container"
elsif c_type == "engines"
c_type = "container"
containerName = "all"
elsif c_type == "services"
c_type = "service"
containerName = "all"
elsif c_type == "service"
c_type = "service"
else
puts("unknown container type: Please use engine or service")
print_usage
exit
end
if ARGV.length != 3 && containerName != "all" || ARGV.length < 2 && containerName == "all"
print_usage
exit
end
command = ARGV[1]
if containerName != "all" #backward for scripts that use all instead of plural
containerName = ARGV[2]
end
if containerName == "all"
engines_api = EnginesOSapi.new()
if c_type == "container"
engines = engines_api.getManagedEngines()
engines.each do |engine|
do_cmd(c_type,engine.containerName,command)
end
elsif c_type == "service"
services = engines_api.getManagedServices()
services.each do |service|
do_cmd(c_type,service.containerName,command)
end
end
else
do_cmd(c_type,containerName,command)
end
|
#
# Be sure to run `pod lib lint NXFramework.podspec' to ensure this is a
# valid spec before submitting.
#
# Any lines starting with a # are optional, but their use is encouraged
# To learn more about a Podspec see http://guides.cocoapods.org/syntax/podspec.html
#
Pod::Spec.new do |s|
s.name = 'NXFramework' # 库的名称
s.version = '0.0.1' # 库的版本
s.summary = '公用组件' #简介
# This description is used to generate tags and improve search results.
# * Think: What does it do? Why did you write it? What is the focus?
# * Try to keep it short, snappy and to the point.
# * Write the description between the DESC delimiters below.
# * Finally, don't worry about the indent, CocoaPods strips it!
s.description = <<-DESC
TODO: Add long description of the pod here.
DESC
s.homepage = 'https://github.com/jsonwang/NXFramework.git'
# s.screenshots = 'www.example.com/screenshots_1', 'www.example.com/screenshots_2'
s.license = { :type => 'MIT', :file => 'LICENSE' }
s.author = { 'ak' => '287971051@qq.com.com' }
# s.source = { :git => 'https://github.com/jsonwang/NXFramework.git', :tag => s.version.to_s } #指定 TAG 的写法
s.source = { :git => 'https://github.com/jsonwang/NXFramework.git' }
#组件github/svn地址
# s.social_media_url = 'https://twitter.com/<TWITTER_USERNAME>'
s.ios.deployment_target = '7.0'
s.requires_arc = true
# s.source_files = 'NXFramework/Classes/**/*.{h,m,swift,mm}','NXFramework/Classes/NXAdapted/NXAdaptedDevice/*.{h,m,swift,mm}'
s.source_files = 'NXFramework/Classes/NXFramework.h'
s.resource_bundles = {
'NXFramework' => ['NXFramework/Assets/*.*']
}
# 类似于pch,文件,多个用逗号隔开
s.prefix_header_contents = '#import <UIKit/UIKit.h>', '#import <Foundation/Foundation.h>'
# 指定头文件 暴露的头文件中, 引入都的头文件也必须是 public的
s.public_header_files = 'NXFramework/Classes/NXFramework.h'
#声明了库所依赖的系统核心库
s.frameworks = 'UIKit'
# s.vendored_libraries
# 依赖的第三方库
#required 图片缓存
s.dependency 'SDWebImage', '~> 5.0'
# s.dependency 'SDWebImage/WebP', '~> 4.1.2'
#optional 建议使用的库
#下载刷新组件
# pod 'MJRefresh', '~> 3.1.12'
#精准 iOS 内存泄露检测工具
#pod 'MLeaksFinder', '~> 1.0.0'
#lodingUI kit
# pod 'SVProgressHUD', :git => 'https://github.com/SVProgressHUD/SVProgressHUD.git'
# s.subspec 'AdaptedDevice' do |ss|
# ss.source_files = 'NXFramework/Classes/NXAdapted/NXAdaptedDevice/**/*.{h,m}'
# ss.public_header_files = 'NXFramework/Classes/NXAdapted/NXAdaptedDevice/**/*.{h}'
# ss.frameworks = 'AdaptedDevice'
# end
s.subspec 'Base' do |ss|
ss.source_files = 'NXFramework/Classes/NXObject.{h,m}'
ss.public_header_files = 'NXFramework/Classes/NXObject.h'
end
s.subspec 'NXMacro' do |ss|
ss.source_files = 'NXFramework/Classes/NXMacro/**/*.{h,m}'
ss.public_header_files = 'NXFramework/Classes/NXMacro/**/*.h'
end
s.subspec 'NXDBManager' do |ss|
ss.libraries = 'sqlite3','z'#libz.tdb
ss.dependency 'FMDB', '~> 2.7.2'
#required YYKIT
ss.dependency 'YYKit', '1.0.9'
ss.source_files = 'NXFramework/Classes/NXDBManager/*.{h,m}'
ss.public_header_files = 'NXFramework/Classes/NXDBManager/*.h'
end
s.subspec 'NXNetworkManager' do |ss|
# ss1.ios.frameworks = 'MobileCoreServices', 'CoreGraphics'
ss.source_files = 'NXFramework/Classes/NXNetworkManager/*.{h,m}'
ss.public_header_files = 'NXFramework/Classes/NXNetworkManager/*.h'
#required 网络库 同看 https://github.com/shaioz/AFNetworking-AutoRetry
ss.dependency 'AFNetworking', '~> 3.1.0'
end
s.subspec 'NXFoundation' do |ss|
ss.source_files = 'NXFramework/Classes/NXFoundation/*.{h,m}'
ss.public_header_files = 'NXFramework/Classes/NXFoundation/*.h'
end
s.subspec 'NXPhotoLibrary' do |ss|
ss.ios.frameworks = 'Photos'
ss.dependency 'NXFramework/NXMacro'
#重新划分 NXPhotoLibrary 的功能模块
ss.subspec 'NXPhotoCategory' do |sss|
sss.source_files = 'NXFramework/Classes/NXPhotoLibrary/NXPhotoCategory/*.{h,m}'
sss.public_header_files = 'NXFramework/Classes/NXPhotoLibrary/NXPhotoCategory/*.h'
end
ss.subspec 'NXPhotoServiece' do |sss|
sss.dependency 'NXFramework/NXPhotoLibrary/NXPhotoCategory'
sss.source_files = 'NXFramework/Classes/NXPhotoLibrary/NXPhotoServiece/*.{h,m}'
sss.public_header_files = 'NXFramework/Classes/NXPhotoLibrary/NXPhotoServiece/*.h'
end
ss.subspec 'NXPhotoImagePicker' do |sss|
sss.dependency 'SVProgressHUD'
sss.dependency 'NXFramework/NXPhotoLibrary/NXPhotoServiece'
sss.source_files = 'NXFramework/Classes/NXPhotoLibrary/NXPhotoImagePicker/*.{h,m}'
sss.public_header_files = 'NXFramework/Classes/NXPhotoLibrary/NXPhotoImagePicker/*.h'
end
ss.subspec 'NXPhotoUtility' do |sss|
sss.dependency 'NXFramework/NXUtility/NXCommond'
sss.dependency 'NXFramework/NXPhotoLibrary/NXPhotoImagePicker'
sss.source_files = 'NXFramework/Classes/NXPhotoLibrary/NXPhotoUtility/*.{h,m}'
sss.public_header_files = 'NXFramework/Classes/NXPhotoLibrary/NXPhotoUtility/*.h'
end
end
s.subspec 'NXUtility' do |ss|
#required UI自适应
ss.dependency 'SDAutoLayout', '~> 2.2.0'
#required 切面编程库
ss.dependency 'Aspects' , '~> 1.4.1'
ss.subspec 'NXAdapted' do |sss|
sss.source_files = 'NXFramework/Classes/NXAdapted/**/*.{h,m}'
sss.public_header_files = 'NXFramework/Classes/NXAdapted/**/*.h'
sss.dependency 'NXFramework/NXMacro'
end
ss.subspec 'NXCommond' do |sss|
sss.source_files ='NXFramework/Classes/NXUtility/**/*.{h,m}', 'NXFramework/Classes/NXCategory/**/*.{h,m}','NXFramework/Classes/NXCustomViews/**/*.{h,m}'
sss.public_header_files ='NXFramework/Classes/NXUtility/**/*.h', 'NXFramework/Classes/NXCategory/**/*.h','NXFramework/Classes/NXCustomViews/**/*.h'
sss.dependency 'NXFramework/NXMacro'
sss.dependency 'NXFramework/Base'
sss.dependency 'NXFramework/NXUtility/NXAdapted'
end
end
s.subspec 'NXBusiness' do |ss|
ss.source_files = 'NXFramework/Classes/NXBusiness/**/*.{h,m}'
ss.public_header_files = 'NXFramework/Classes/NXBusiness/**/*.h'
ss.dependency 'NXFramework/NXMacro'
ss.dependency 'NXFramework/NXUtility'
ss.dependency 'NXFramework/Base'
ss.dependency 'NXFramework/NXUtility/NXAdapted'
end
s.subspec 'NXDebug' do |ss|
ss.source_files = 'NXFramework/Classes/NXDebug/**/*.{h,m}'
ss.public_header_files = 'NXFramework/Classes/NXDebug/**/*.h'
ss.dependency 'NXFramework/NXMacro'
ss.dependency 'NXFramework/NXUtility/NXCommond'
end
end
SDAutoLayout 使用 2.2.2 版本
SDAutoLayout 使用 2.2.2 版本
#
# Be sure to run `pod lib lint NXFramework.podspec' to ensure this is a
# valid spec before submitting.
#
# Any lines starting with a # are optional, but their use is encouraged
# To learn more about a Podspec see http://guides.cocoapods.org/syntax/podspec.html
#
Pod::Spec.new do |s|
s.name = 'NXFramework' # 库的名称
s.version = '0.0.1' # 库的版本
s.summary = '公用组件' #简介
# This description is used to generate tags and improve search results.
# * Think: What does it do? Why did you write it? What is the focus?
# * Try to keep it short, snappy and to the point.
# * Write the description between the DESC delimiters below.
# * Finally, don't worry about the indent, CocoaPods strips it!
s.description = <<-DESC
TODO: Add long description of the pod here.
DESC
s.homepage = 'https://github.com/jsonwang/NXFramework.git'
# s.screenshots = 'www.example.com/screenshots_1', 'www.example.com/screenshots_2'
s.license = { :type => 'MIT', :file => 'LICENSE' }
s.author = { 'ak' => '287971051@qq.com.com' }
# s.source = { :git => 'https://github.com/jsonwang/NXFramework.git', :tag => s.version.to_s } #指定 TAG 的写法
s.source = { :git => 'https://github.com/jsonwang/NXFramework.git' }
#组件github/svn地址
# s.social_media_url = 'https://twitter.com/<TWITTER_USERNAME>'
s.ios.deployment_target = '7.0'
s.requires_arc = true
# s.source_files = 'NXFramework/Classes/**/*.{h,m,swift,mm}','NXFramework/Classes/NXAdapted/NXAdaptedDevice/*.{h,m,swift,mm}'
s.source_files = 'NXFramework/Classes/NXFramework.h'
s.resource_bundles = {
'NXFramework' => ['NXFramework/Assets/*.*']
}
# 类似于pch,文件,多个用逗号隔开
s.prefix_header_contents = '#import <UIKit/UIKit.h>', '#import <Foundation/Foundation.h>'
# 指定头文件 暴露的头文件中, 引入都的头文件也必须是 public的
s.public_header_files = 'NXFramework/Classes/NXFramework.h'
#声明了库所依赖的系统核心库
s.frameworks = 'UIKit'
# s.vendored_libraries
# 依赖的第三方库
#required 图片缓存
s.dependency 'SDWebImage', '~> 5.0'
# s.dependency 'SDWebImage/WebP', '~> 4.1.2'
#optional 建议使用的库
#下载刷新组件
# pod 'MJRefresh', '~> 3.1.12'
#精准 iOS 内存泄露检测工具
#pod 'MLeaksFinder', '~> 1.0.0'
#lodingUI kit
# pod 'SVProgressHUD', :git => 'https://github.com/SVProgressHUD/SVProgressHUD.git'
# s.subspec 'AdaptedDevice' do |ss|
# ss.source_files = 'NXFramework/Classes/NXAdapted/NXAdaptedDevice/**/*.{h,m}'
# ss.public_header_files = 'NXFramework/Classes/NXAdapted/NXAdaptedDevice/**/*.{h}'
# ss.frameworks = 'AdaptedDevice'
# end
s.subspec 'Base' do |ss|
ss.source_files = 'NXFramework/Classes/NXObject.{h,m}'
ss.public_header_files = 'NXFramework/Classes/NXObject.h'
end
s.subspec 'NXMacro' do |ss|
ss.source_files = 'NXFramework/Classes/NXMacro/**/*.{h,m}'
ss.public_header_files = 'NXFramework/Classes/NXMacro/**/*.h'
end
s.subspec 'NXDBManager' do |ss|
ss.libraries = 'sqlite3','z'#libz.tdb
ss.dependency 'FMDB', '~> 2.7.2'
#required YYKIT
ss.dependency 'YYKit', '1.0.9'
ss.source_files = 'NXFramework/Classes/NXDBManager/*.{h,m}'
ss.public_header_files = 'NXFramework/Classes/NXDBManager/*.h'
end
s.subspec 'NXNetworkManager' do |ss|
# ss1.ios.frameworks = 'MobileCoreServices', 'CoreGraphics'
ss.source_files = 'NXFramework/Classes/NXNetworkManager/*.{h,m}'
ss.public_header_files = 'NXFramework/Classes/NXNetworkManager/*.h'
#required 网络库 同看 https://github.com/shaioz/AFNetworking-AutoRetry
ss.dependency 'AFNetworking', '~> 3.1.0'
end
s.subspec 'NXFoundation' do |ss|
ss.source_files = 'NXFramework/Classes/NXFoundation/*.{h,m}'
ss.public_header_files = 'NXFramework/Classes/NXFoundation/*.h'
end
s.subspec 'NXPhotoLibrary' do |ss|
ss.ios.frameworks = 'Photos'
ss.dependency 'NXFramework/NXMacro'
#重新划分 NXPhotoLibrary 的功能模块
ss.subspec 'NXPhotoCategory' do |sss|
sss.source_files = 'NXFramework/Classes/NXPhotoLibrary/NXPhotoCategory/*.{h,m}'
sss.public_header_files = 'NXFramework/Classes/NXPhotoLibrary/NXPhotoCategory/*.h'
end
ss.subspec 'NXPhotoServiece' do |sss|
sss.dependency 'NXFramework/NXPhotoLibrary/NXPhotoCategory'
sss.source_files = 'NXFramework/Classes/NXPhotoLibrary/NXPhotoServiece/*.{h,m}'
sss.public_header_files = 'NXFramework/Classes/NXPhotoLibrary/NXPhotoServiece/*.h'
end
ss.subspec 'NXPhotoImagePicker' do |sss|
sss.dependency 'SVProgressHUD'
sss.dependency 'NXFramework/NXPhotoLibrary/NXPhotoServiece'
sss.source_files = 'NXFramework/Classes/NXPhotoLibrary/NXPhotoImagePicker/*.{h,m}'
sss.public_header_files = 'NXFramework/Classes/NXPhotoLibrary/NXPhotoImagePicker/*.h'
end
ss.subspec 'NXPhotoUtility' do |sss|
sss.dependency 'NXFramework/NXUtility/NXCommond'
sss.dependency 'NXFramework/NXPhotoLibrary/NXPhotoImagePicker'
sss.source_files = 'NXFramework/Classes/NXPhotoLibrary/NXPhotoUtility/*.{h,m}'
sss.public_header_files = 'NXFramework/Classes/NXPhotoLibrary/NXPhotoUtility/*.h'
end
end
s.subspec 'NXUtility' do |ss|
#required UI自适应
ss.dependency 'SDAutoLayout', '2.2.1'
#required 切面编程库
ss.dependency 'Aspects' , '~> 1.4.1'
ss.subspec 'NXAdapted' do |sss|
sss.source_files = 'NXFramework/Classes/NXAdapted/**/*.{h,m}'
sss.public_header_files = 'NXFramework/Classes/NXAdapted/**/*.h'
sss.dependency 'NXFramework/NXMacro'
end
ss.subspec 'NXCommond' do |sss|
sss.source_files ='NXFramework/Classes/NXUtility/**/*.{h,m}', 'NXFramework/Classes/NXCategory/**/*.{h,m}','NXFramework/Classes/NXCustomViews/**/*.{h,m}'
sss.public_header_files ='NXFramework/Classes/NXUtility/**/*.h', 'NXFramework/Classes/NXCategory/**/*.h','NXFramework/Classes/NXCustomViews/**/*.h'
sss.dependency 'NXFramework/NXMacro'
sss.dependency 'NXFramework/Base'
sss.dependency 'NXFramework/NXUtility/NXAdapted'
end
end
s.subspec 'NXBusiness' do |ss|
ss.source_files = 'NXFramework/Classes/NXBusiness/**/*.{h,m}'
ss.public_header_files = 'NXFramework/Classes/NXBusiness/**/*.h'
ss.dependency 'NXFramework/NXMacro'
ss.dependency 'NXFramework/NXUtility'
ss.dependency 'NXFramework/Base'
ss.dependency 'NXFramework/NXUtility/NXAdapted'
end
s.subspec 'NXDebug' do |ss|
ss.source_files = 'NXFramework/Classes/NXDebug/**/*.{h,m}'
ss.public_header_files = 'NXFramework/Classes/NXDebug/**/*.h'
ss.dependency 'NXFramework/NXMacro'
ss.dependency 'NXFramework/NXUtility/NXCommond'
end
end
|
#!/usr/bin/env ruby
# Copyright 2014-2015 Julien GIRARD
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
require 'net/http'
require 'uri'
require 'json'
require 'optparse'
require 'logger'
require 'io/console'
require_relative 'ovhapi'
# Define logger
logger = Logger.new(STDOUT)
logger.level = Logger::ERROR
# Define constant
# URL of the OVH availability API
url = 'https://ws.ovh.com/dedicated/r2/ws.dispatcher/getAvailability2'
# References of Kimsufi offers
references = {
'KS-1' => '150sk10',
'KS-2' => '150sk20',
'KS-2 SSD' => '150sk22',
'KS-3' => '150sk30',
'KS-4' => '150sk40',
'KS-5' => '150sk50',
'KS-6' => '150sk60'
}
# Zones where Kimsufi offers are available
zones = {
'gra' => 'Gravelines',
'sbg' => 'Strasbourg',
'rbx' => 'Roubaix',
'bhs' => 'Beauharnois'
}
# Define default options of the script
options = {}
options[:verbose] = false
options[:loop] = false
options[:interval] = 0
options[:offers] = references.keys
options[:commands] = []
options[:proxy_addr] = nil
options[:proxy_port] = nil
options[:proxy_user] = nil
options[:proxy_pass] = nil
# Parse user specified options
OptionParser.new do |opts|
opts.banner = "Usage: ./kimsufi-availability.rb [options]"
# Verbose option
opts.on('-v', '--[no-]verbose', 'Run verbosely.') do |v|
options[:verbose] = v
logger.level = Logger::INFO
end
# Loop option
opts.on('-l N', '--loop N', Integer, 'When this option is set, the script will check the OVH API every N seconds.') do |n|
puts 'Press Ctrl+C at any time to terminate the script.'
trap('INT') { puts 'Shutting down.'; exit}
options[:loop] = true
options[:interval] = n
end
# Offers option
opts.on('-o x,y,z', '--offers x,y,z', Array, "List offers to watch in the list #{options[:offers]}.") do |offers|
options[:offers] = offers
end
# Commands option
opts.on('-c x,y,z', '--commands x,y,z', Array, 'List of commands to execute on offer availability (firefox https://www.kimsufi.com/fr/commande/kimsufi.xml?reference=150sk10).') do |commands|
options[:commands] = commands
end
# Proxy option
opts.on('-p', '--proxy [addr:port]', String, 'Addresse of the proxy server to use to request ovh api.') do |proxy|
split = proxy.split(':')
options[:proxy_addr] = split.length > 0 ? split[0] : nil
options[:proxy_port] = split.length > 1 ? split[1] : nil
end
# User proxy option
opts.on('-u', '--user [user]', String, 'User to use for proxy authentification. Password will be asked dynamically.') do |user|
options[:proxy_user] = user
puts('Proxy password ?')
pass = STDIN.noecho(&:gets).chomp
options[:proxy_pass] = pass.empty? ? nil : pass
end
end.parse!
# Initialize api interface
api = OvhApi.new()
uri = URI(url)
request = Net::HTTP::Get.new uri
Net::HTTP.start(uri.host, uri.port, options[:proxy_addr], options[:proxy_port], options[:proxy_user], options[:proxy_pass], {:use_ssl => true, :keep_alive_timeout => (10 + options[:interval].to_i)})do |http|
begin
# Request OVH api
response = http.request request
# Apply received data to ovh api class
api.set_data(response.body)
options[:offers].each do |offer|
# Retrieve reference of the current offer
reference = references.include?(offer) ? references[offer] : offer
# Check if the reference is in api
if api.include?(reference)
availability = []
# Retrieve available zone for the specified reference
api.get_availability(reference).each do |zone|
availability.push(zones.include?(zone) ? zones[zone] : zone)
end
if availability.length > 0
logger.info("Offer #{offer} currently available in the following locations: #{availability}.")
# The offer is available, we execute the list of commands
options[:commands].each do |command|
logger.info("About to execute command: '#{command}'.")
if system(command)
logger.info("Command executed successfully.")
else
logger.error("Command failed.")
end
end
# Exit script when commands have run
exit 0
else
# The offer is currently unavailable
logger.info("Offer #{offer} currently not available.")
end
else
logger.error("Offer #{offer}(reference: #{reference} not present in api.)")
end
end
# Wait before retry
sleep options[:interval]
end while options[:loop]
end
Change to make use of the httpclient gem
#!/usr/bin/env ruby
# Copyright 2014-2015 Julien GIRARD
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
require 'net/http'
require 'uri'
require 'json'
require 'optparse'
require 'logger'
require 'io/console'
require 'httpclient'
require_relative 'ovhapi'
# Define logger
logger = Logger.new(STDOUT)
logger.level = Logger::ERROR
# Define constant
# URL of the OVH availability API
url = 'https://ws.ovh.com/dedicated/r2/ws.dispatcher/getAvailability2'
# References of Kimsufi offers
references = {
'KS-1' => '150sk10',
'KS-2' => '150sk20',
'KS-2 SSD' => '150sk22',
'KS-3' => '150sk30',
'KS-4' => '150sk40',
'KS-5' => '150sk50',
'KS-6' => '150sk60'
}
# Zones where Kimsufi offers are available
zones = {
'gra' => 'Gravelines',
'sbg' => 'Strasbourg',
'rbx' => 'Roubaix',
'bhs' => 'Beauharnois'
}
# Define number of try to contact ovh api
number_of_try = 3
# Define default options of the script
options = {}
options[:verbose] = false
options[:loop] = false
options[:interval] = 0
options[:offers] = references.keys
options[:commands] = []
options[:proxy_addr] = nil
options[:proxy_port] = nil
options[:proxy_user] = nil
options[:proxy_pass] = nil
# Parse user specified options
OptionParser.new do |opts|
opts.banner = "Usage: ./kimsufi-availability.rb [options]"
# Verbose option
opts.on('-v', '--[no-]verbose', 'Run verbosely.') do |v|
options[:verbose] = v
logger.level = Logger::INFO
end
# Loop option
opts.on('-l N', '--loop N', Integer, 'When this option is set, the script will check the OVH API every N seconds.') do |n|
puts 'Press Ctrl+C at any time to terminate the script.'
trap('INT') { puts 'Shutting down.'; exit}
options[:loop] = true
options[:interval] = n
end
# Offers option
opts.on('-o x,y,z', '--offers x,y,z', Array, "List offers to watch in the list #{options[:offers]}.") do |offers|
options[:offers] = offers
end
# Commands option
opts.on('-c x,y,z', '--commands x,y,z', Array, 'List of commands to execute on offer availability (firefox https://www.kimsufi.com/fr/commande/kimsufi.xml?reference=150sk10).') do |commands|
options[:commands] = commands
end
# Proxy option
opts.on('-p', '--proxy [addr:port]', String, 'Addresse of the proxy server to use to request ovh api.') do |proxy|
options[:proxy] = proxy
end
# User proxy option
opts.on('-u', '--user [user]', String, 'User to use for proxy authentification. Password will be asked dynamically.') do |user|
options[:proxy_user] = user
puts('Proxy password ?')
pass = STDIN.noecho(&:gets).chomp
options[:proxy_pass] = pass.empty? ? nil : pass
end
end.parse!
# Initialize api interface
api = OvhApi.new()
# Initialize http client
clnt = options[:proxy] == nil ? HTTPClient.new() : HTTPClient.new(options[:proxy])
if(options[:proxy] != nil && options[:proxy_user] != nil)
logger.info("Setting proxy authentification.")
clnt.set_proxy_auth(options[:proxy_user], options[:proxy_pass])
end
begin
# Request OVH api
counter = number_of_try
response = nil
begin
response = clnt.get(url)
counter += -1
end while !response.ok? && counter > 0
if counter == 0
logger.error("Maximum number of try to contact ovh api reached. with error: '#{response.message}'.")
exit 1
end
# Apply received data to ovh api class
api.set_data(response.content)
options[:offers].each do |offer|
# Retrieve reference of the current offer
reference = references.include?(offer) ? references[offer] : offer
# Check if the reference is in api
if api.include?(reference)
availability = []
# Retrieve available zone for the specified reference
api.get_availability(reference).each do |zone|
availability.push(zones.include?(zone) ? zones[zone] : zone)
end
if availability.length > 0
logger.info("Offer #{offer} currently available in the following locations: #{availability}.")
# The offer is available, we execute the list of commands
options[:commands].each do |command|
logger.info("About to execute command: '#{command}'.")
if system(command)
logger.info("Command executed successfully.")
else
logger.error("Command failed.")
end
end
# Exit script when commands have run
exit 0
else
# The offer is currently unavailable
logger.info("Offer #{offer} currently not available.")
end
else
logger.error("Offer #{offer}(reference: #{reference} not present in api.)")
end
end
# Wait before retry
sleep options[:interval]
end while options[:loop]
|
Pod::Spec.new do |s|
s.name = "NilColorKit"
s.version = "0.1"
s.summary = "NilColorKit extends UIColor with Google Material colors and FlatUI colors in Swift."
s.homepage = "https://github.com/NilStack/NilColorKit"
s.license = { :type => "MIT", :file => "LICENSE" }
s.author = { "Peng Guo" => "guoleii@gmail.com" }
s.social_media_url = "http://twitter.com/NilStack"
s.source = { :git => "https://github.com/NilStack/NilColorKit.git", :tag => "v0.1" }
s.source_files = "NilColorKit/NilColorKit.swift"
s.frameworks = "Foundation", "UIKit"
s.requires_arc = true
end
update podspec
Pod::Spec.new do |s|
s.name = "NilColorKit"
s.version = "0.2.0"
s.summary = "NilColorKit extends UIColor with Google Material colors and FlatUI colors in Swift."
s.homepage = "https://github.com/NilStack/NilColorKit"
s.license = { :type => "MIT", :file => "LICENSE" }
s.author = { "Peng Guo" => "guoleii@gmail.com" }
s.social_media_url = "http://twitter.com/NilStack"
s.source = { :git => "https://github.com/NilStack/NilColorKit.git", :tag => "v0.2.0" }
s.source_files = "NilColorKit/NilColorKit.swift"
s.frameworks = "Foundation", "UIKit"
s.requires_arc = true
end
|
require 'spec_helper'
describe 'Testing easyzpl Gem' do
context 'When creating an empty label' do
it 'should output a blank label' do
label = Easyzpl::Label.new
expect(label.to_s).to eq('^XA^PQ1^XZ')
end
end
context 'When creating a simple lable' do
it 'should output a label with the text "Zebra" and a barcode representation.' do
label = Easyzpl::Label.new
label.home_position(30, 30)
label.draw_border(0, 0, 400, 300)
label.text_field('ZEBRA', 10, 10)
label.bar_code_39('ZEBRA', 10, 30)
expect(label.to_s).to eq('^XA^LH30,30^FO0,0^GB400,300,1^FS^FO10,10^AFN,10,10^FDZEBRA^FS^FO10,30^B3N,Y,20,N,N^FDZEBRA^FS^PQ1^XZ')
end
end
end
Adds tests for storing and accessing saved label templates.
require 'spec_helper'
describe 'Testing easyzpl Gem' do
context 'When creating an empty label' do
it 'should output a blank label' do
label = Easyzpl::Label.new
expect(label.to_s).to eq('^XA^PQ1^XZ')
end
end
context 'When creating a simple lable' do
it 'should output a label with the text "Zebra" and a barcode representation' do
label = Easyzpl::Label.new
label.home_position(30, 30)
label.draw_border(0, 0, 400, 300)
label.text_field('ZEBRA', 10, 10)
label.bar_code_39('ZEBRA', 10, 30)
expect(label.to_s).to eq('^XA^LH30,30^FO0,0^GB400,300,1^FS^FO10,10^AFN,10,10^FDZEBRA^FS^FO10,30^B3N,Y,20,N,N^FDZEBRA^FS^PQ1^XZ')
end
end
context 'When creating a stored template' do
it 'should output a label template with one variable text field and one variable barcode' do
label = Easyzpl::LabelTemplate.new('Template1')
label.home_position(30, 30)
label.draw_border(0, 0, 400, 300)
label.variable_text_field(10, 10)
label.variable_bar_code_39(10, 30)
expect(label.to_s).to eq('^XA^DFTemplate1^FS^LH30,30^FO0,0^GB400,300,1^FS^FO10,10^FN1^FS^FO10,30^B3N,Y,20,N,N^FN2^FS^PQ1^XZ')
end
end
context 'When accessing the stored template' do
it 'should output a label with only two fields of data that are passed into a saved template' do
label = Easyzpl::StoredLabel.new('Template1')
label.add_field('ZEBRA')
label.add_field('ZEBRA')
expect(label.to_s).to eq('^XA^XFTemplate1^FS^FN1^FDZEBRA^FS^FN2^FDZEBRA^FS^PQ1^XZ')
end
end
end
|
require 'rails_spec_helper'
RSpec.describe AssignmentExportPerformer, type: :background_job do
include PerformerToolkit::SharedExamples
include Toolkits::Performers::AssignmentExport::SharedExamples
include ModelAddons::SharedExamples
extend Toolkits::Performers::AssignmentExport::Context
define_context
subject { performer }
describe "export_file_basename" do
subject { performer.instance_eval { export_file_basename }}
let(:filename_timestamp) { "2020-10-15_2394829348234893" }
before(:each) do
performer.instance_variable_set(:@export_file_basename, nil)
allow(performer).to receive(:archive_basename) { "some_great_assignment" }
allow(performer).to receive(:filename_timestamp) { filename_timestamp }
end
it "includes the fileized_assignment_name" do
expect(subject).to match(/^some_great_assignment/)
end
it "is appended with a YYYY-MM-DD formatted timestamp" do
expect(subject).to match(/2020-10-15_/)
end
it "caches the filename" do
subject
expect(performer).not_to receive(:archive_basename)
subject
end
it "sets the filename to an @export_file_basename" do
subject
expect(performer.instance_variable_get(:@export_file_basename)).to eq("some_great_assignment_export_#{filename_timestamp}")
end
end
describe "student_directory_file_path" do
let(:student_double) { double(:student) }
subject { performer.instance_eval { student_directory_file_path(@some_student, "whats_up.doc") }}
before do
performer.instance_variable_set(:@some_student, student_double)
allow(performer).to receive(:student_directory_path) { "/this/great/path" }
end
it "gets the student directory path from the student" do
expect(performer).to receive(:student_directory_path).with(student_double)
subject
end
it "builds the correct path relative to the student directory" do
expect(subject).to eq("/this/great/path/whats_up.doc")
end
end
describe "archive_basename" do
subject { performer.instance_eval { archive_basename }}
before(:each) do
allow(performer).to receive(:formatted_assignment_name) { "blog_entry_5" }
allow(performer).to receive(:formatted_team_name) { "the_walloping_wildebeest" }
end
context "team_present? is true" do
before { allow(performer).to receive(:team_present?) { true }}
it "combines the formatted assignment and team names" do
expect(subject).to eq("blog_entry_5_the_walloping_wildebeest")
end
end
context "team_present? is false" do
before { allow(performer).to receive(:team_present?) { false }}
it "returns only the formatted assignment name" do
expect(subject).to eq("blog_entry_5")
end
end
end
describe "formatted_assignment_name" do
subject { performer.instance_eval { formatted_assignment_name }}
it "passes the assignment name into the formatter" do
expect(performer).to receive(:formatted_filename_fragment).with(assignment.name)
subject
end
end
describe "formatted_team_name" do
subject { performer_with_team.instance_eval { formatted_team_name }}
it "passes the team name into the formatter" do
expect(performer_with_team).to receive(:formatted_filename_fragment).with(team.name)
subject
end
end
describe "formatted_filename_fragment" do
subject { performer.instance_eval { formatted_filename_fragment("ABCDEFGHIJKLMNOPQRSTUVWXYZ") }}
it "sanitizes the fragment" do
allow(performer).to receive(:sanitize_filename) { "this is a jocular output" }
expect(performer).to receive(:sanitize_filename).with("ABCDEFGHIJKLMNOPQRSTUVWXYZ")
subject
end
it "truncates the final string to twenty five characters" do
expect(subject).to eq("abcdefghijklmnopqrstuvwxy")
end
end
describe "sanitize_filename" do
it "downcases everything" do
expect(performer.instance_eval { sanitize_filename("THISISSUPERCAPPY") }).to \
eq("thisissupercappy")
end
it "substitutes consecutive non-word characters with underscores" do
expect(performer.instance_eval { sanitize_filename("whoa\\ gEORG !!! IS ...dead") }).to \
eq("whoa_georg_is_dead")
end
it "removes leading underscores" do
expect(performer.instance_eval { sanitize_filename("____________garrett_rules") }).to \
eq("garrett_rules")
end
it "removes trailing underscores" do
expect(performer.instance_eval { sanitize_filename("garrett_sucks__________") }).to \
eq("garrett_sucks")
end
end
describe "tmp_dir" do
subject { performer.instance_eval { tmp_dir }}
it "builds a temporary directory" do
expect(subject).to match(/\/tmp\/[\w\d-]+/) # match the tmp dir hash
end
it "caches the temporary directory" do
original_tmp_dir = subject
expect(subject).to eq(original_tmp_dir)
end
it "sets the directory path to @tmp_dir" do
subject
expect(performer.instance_variable_get(:@tmp_dir)).to eq(subject)
end
end
describe "archive_tmp_dir" do
subject { performer.instance_eval { archive_tmp_dir }}
it "builds a temporary directory for the archive" do
expect(subject).to match(/\/tmp\/[\w\d-]+/) # match the tmp dir hash
end
it "caches the temporary directory" do
original_tmp_dir = subject
expect(subject).to eq(original_tmp_dir)
end
it "sets the directory path to @archive_tmp_dir" do
subject
expect(performer.instance_variable_get(:@archive_tmp_dir)).to eq(subject)
end
end
describe "expanded_archive_base_path" do
subject { performer.instance_eval { expanded_archive_base_path }}
before do
allow(performer).to receive(:export_file_basename) { "the_best_filename" }
allow(performer).to receive(:archive_tmp_dir) { "/archive/tmp/dir" }
end
it "expands the export file basename from the archive tmp dir path" do
expect(subject).to eq("/archive/tmp/dir/the_best_filename")
end
it "caches the basename" do
subject
expect(performer).not_to receive(:export_file_basename)
subject
end
it "sets the expanded path to @expanded_archive_base_path" do
subject
expect(performer.instance_variable_get(:@expanded_archive_base_path)).to eq(subject)
end
end
end
add spec examples for filename timestamp formatting
require 'rails_spec_helper'
RSpec.describe AssignmentExportPerformer, type: :background_job do
include PerformerToolkit::SharedExamples
include Toolkits::Performers::AssignmentExport::SharedExamples
include ModelAddons::SharedExamples
extend Toolkits::Performers::AssignmentExport::Context
define_context
subject { performer }
describe "export_file_basename" do
subject { performer.instance_eval { export_file_basename }}
let(:filename_timestamp) { "2020-10-15_2394829348234893" }
before(:each) do
performer.instance_variable_set(:@export_file_basename, nil)
allow(performer).to receive(:archive_basename) { "some_great_assignment" }
allow(performer).to receive(:filename_timestamp) { filename_timestamp }
end
it "includes the fileized_assignment_name" do
expect(subject).to match(/^some_great_assignment/)
end
it "is appended with a YYYY-MM-DD formatted timestamp" do
expect(subject).to match(/2020-10-15_/)
end
it "caches the filename" do
subject
expect(performer).not_to receive(:archive_basename)
subject
end
it "sets the filename to an @export_file_basename" do
subject
expect(performer.instance_variable_get(:@export_file_basename)).to eq("some_great_assignment_export_#{filename_timestamp}")
end
end
describe "#filename_timestamp" do
subject { performer.instance_eval { filename_timestamp }}
let(:filename_time) { Date.parse("Jan 20 1995").to_time }
before do
allow(performer).to receive(:filename_time) { filename_time }
end
it "formats the filename time" do
expect(subject).to match(/^1995-01-20/)
end
it "converts the filename time to a float" do
expect(subject).to match("#{filename_time.to_f}".gsub(".",""))
end
it "removes decimal points from the timestamp" do
expect(subject).not_to match(/\./)
end
end
describe "#filename_time" do
subject { performer.instance_eval { filename_time }}
let(:time_now) { Date.parse("Jan 20 1995").to_time }
before do
allow(Time).to receive(:now) { time_now }
end
it "caches the time" do
subject
expect(Time).not_to receive(:now)
subject
end
it "gets the time now" do
expect(Time).to receive(:now) { time_now }
subject
end
end
describe "student_directory_file_path" do
let(:student_double) { double(:student) }
subject { performer.instance_eval { student_directory_file_path(@some_student, "whats_up.doc") }}
before do
performer.instance_variable_set(:@some_student, student_double)
allow(performer).to receive(:student_directory_path) { "/this/great/path" }
end
it "gets the student directory path from the student" do
expect(performer).to receive(:student_directory_path).with(student_double)
subject
end
it "builds the correct path relative to the student directory" do
expect(subject).to eq("/this/great/path/whats_up.doc")
end
end
describe "archive_basename" do
subject { performer.instance_eval { archive_basename }}
before(:each) do
allow(performer).to receive(:formatted_assignment_name) { "blog_entry_5" }
allow(performer).to receive(:formatted_team_name) { "the_walloping_wildebeest" }
end
context "team_present? is true" do
before { allow(performer).to receive(:team_present?) { true }}
it "combines the formatted assignment and team names" do
expect(subject).to eq("blog_entry_5_the_walloping_wildebeest")
end
end
context "team_present? is false" do
before { allow(performer).to receive(:team_present?) { false }}
it "returns only the formatted assignment name" do
expect(subject).to eq("blog_entry_5")
end
end
end
describe "formatted_assignment_name" do
subject { performer.instance_eval { formatted_assignment_name }}
it "passes the assignment name into the formatter" do
expect(performer).to receive(:formatted_filename_fragment).with(assignment.name)
subject
end
end
describe "formatted_team_name" do
subject { performer_with_team.instance_eval { formatted_team_name }}
it "passes the team name into the formatter" do
expect(performer_with_team).to receive(:formatted_filename_fragment).with(team.name)
subject
end
end
describe "formatted_filename_fragment" do
subject { performer.instance_eval { formatted_filename_fragment("ABCDEFGHIJKLMNOPQRSTUVWXYZ") }}
it "sanitizes the fragment" do
allow(performer).to receive(:sanitize_filename) { "this is a jocular output" }
expect(performer).to receive(:sanitize_filename).with("ABCDEFGHIJKLMNOPQRSTUVWXYZ")
subject
end
it "truncates the final string to twenty five characters" do
expect(subject).to eq("abcdefghijklmnopqrstuvwxy")
end
end
describe "sanitize_filename" do
it "downcases everything" do
expect(performer.instance_eval { sanitize_filename("THISISSUPERCAPPY") }).to \
eq("thisissupercappy")
end
it "substitutes consecutive non-word characters with underscores" do
expect(performer.instance_eval { sanitize_filename("whoa\\ gEORG !!! IS ...dead") }).to \
eq("whoa_georg_is_dead")
end
it "removes leading underscores" do
expect(performer.instance_eval { sanitize_filename("____________garrett_rules") }).to \
eq("garrett_rules")
end
it "removes trailing underscores" do
expect(performer.instance_eval { sanitize_filename("garrett_sucks__________") }).to \
eq("garrett_sucks")
end
end
describe "tmp_dir" do
subject { performer.instance_eval { tmp_dir }}
it "builds a temporary directory" do
expect(subject).to match(/\/tmp\/[\w\d-]+/) # match the tmp dir hash
end
it "caches the temporary directory" do
original_tmp_dir = subject
expect(subject).to eq(original_tmp_dir)
end
it "sets the directory path to @tmp_dir" do
subject
expect(performer.instance_variable_get(:@tmp_dir)).to eq(subject)
end
end
describe "archive_tmp_dir" do
subject { performer.instance_eval { archive_tmp_dir }}
it "builds a temporary directory for the archive" do
expect(subject).to match(/\/tmp\/[\w\d-]+/) # match the tmp dir hash
end
it "caches the temporary directory" do
original_tmp_dir = subject
expect(subject).to eq(original_tmp_dir)
end
it "sets the directory path to @archive_tmp_dir" do
subject
expect(performer.instance_variable_get(:@archive_tmp_dir)).to eq(subject)
end
end
describe "expanded_archive_base_path" do
subject { performer.instance_eval { expanded_archive_base_path }}
before do
allow(performer).to receive(:export_file_basename) { "the_best_filename" }
allow(performer).to receive(:archive_tmp_dir) { "/archive/tmp/dir" }
end
it "expands the export file basename from the archive tmp dir path" do
expect(subject).to eq("/archive/tmp/dir/the_best_filename")
end
it "caches the basename" do
subject
expect(performer).not_to receive(:export_file_basename)
subject
end
it "sets the expanded path to @expanded_archive_base_path" do
subject
expect(performer.instance_variable_get(:@expanded_archive_base_path)).to eq(subject)
end
end
end
|
working_path = Dir.pwd
Pod::Spec.new do |spec|
spec.name = "OmniVirtSDK"
spec.version = "1.0.14"
spec.summary = "Virtual Reality Embed Player and Monetization for iOS Apps"
spec.homepage = "https://www.omnivirt.com"
spec.license = { type: 'MIT', file: 'LICENSE' }
spec.authors = { "OmniVirt Team" => 'contact@omnivirt.com' }
spec.social_media_url = "https://www.facebook.com/omnivirt"
spec.platform = :ios, "8.0"
spec.requires_arc = true
spec.source = { git: "https://github.com/OmniVirt/iOS-VR-Example.git", tag: "v#{spec.version}", submodules: true }
spec.ios.vendored_frameworks = "VRKit.framework"
spec.prepare_command = "gem install xcodeproj || echo ''; ruby ./install_run_script.rb '#{path}' '#{working_path}'"
end
Increase podspecs version.
working_path = Dir.pwd
Pod::Spec.new do |spec|
spec.name = "OmniVirtSDK"
spec.version = "1.0.20"
spec.summary = "Virtual Reality Embed Player and Monetization for iOS Apps"
spec.homepage = "https://www.omnivirt.com"
spec.license = { type: 'MIT', file: 'LICENSE' }
spec.authors = { "OmniVirt Team" => 'contact@omnivirt.com' }
spec.social_media_url = "https://www.facebook.com/omnivirt"
spec.platform = :ios, "8.0"
spec.requires_arc = true
spec.source = { git: "https://github.com/OmniVirt/iOS-VR-Example.git", tag: "v#{spec.version}", submodules: true }
spec.ios.vendored_frameworks = "VRKit.framework"
spec.prepare_command = "gem install xcodeproj || echo ''; ruby ./install_run_script.rb '#{path}' '#{working_path}'"
end
|
Pod::Spec.new do |s|
s.name = "OriginateUI"
s.version = "0.0.10"
s.summary = "A lightweight user interface theming framework."
s.homepage = "https://github.com/Originate/OriginateHTTP"
s.license = 'MIT'
s.author = { "Philip Kluz" => "philip.kluz@originate.com" }
s.source = { :git => "https://github.com/Originate/OriginateUI.git", :tag => s.version.to_s }
s.platform = :ios, '8.0'
s.requires_arc = true
s.source_files = 'Pod/Sources/**/*'
s.public_header_files = 'Pod/Sources/**/*.h'
end
[0.0.11] - Version Bump.
Pod::Spec.new do |s|
s.name = "OriginateUI"
s.version = "0.0.11"
s.summary = "A lightweight user interface theming framework."
s.homepage = "https://github.com/Originate/OriginateHTTP"
s.license = 'MIT'
s.author = { "Philip Kluz" => "philip.kluz@originate.com" }
s.source = { :git => "https://github.com/Originate/OriginateUI.git", :tag => s.version.to_s }
s.platform = :ios, '8.0'
s.requires_arc = true
s.source_files = 'Pod/Sources/**/*'
s.public_header_files = 'Pod/Sources/**/*.h'
end
|
module Accessibility
module Browser
def self.current_view
Accessibility::Data[:view]
end
def self.current_view=(view)
Accessibility::Data[:view]=view
end
def self.cursor
Accessibility::Data[:cursor]
end
def self.cursor=(view)
Accessibility::Data[:cursor]=view
end
def self.views
[self.current_view.superview]+self.current_view.subviews
end
def self.init(view=nil)
if view.nil?&&self.current_view.nil?
view=UIApplication.sharedApplication.keyWindow
view=view.subviews.first while view.subviews.length==1
self.current_view=view
else
self.current_view=view if view
end
end
def self.display_view(view, index=nil)
display=Array.new
control=view.class.to_s
control="Superview #{control}" if index==0
name=view.accessibility_value||view.accessibility_label
if index
if index>0 and not(view.subviews.empty?)
indicator="+"
else
indicator=" "
end
indicator+=index.to_s
display<<indicator
end
display<<control
display<<name if name
display.join(" ")
end
def self.display_views
puts "Browsing "+self.display_view(self.current_view)
self.views.each_index do |index|
next if self.views[index].nil?
puts self.display_view(self.views[index], index)
end
end
def self.browse(request=nil)
self.init
new_view=nil
request=0 if request==:back||request==:up
if request.nil?
self.display_views
else
raise "You cannot go back any further" if self.current_view.superview.nil?
found=self.find_view(request)
new_view=found if found
end
if new_view
raise "This view has no subviews" if new_view.subviews.empty?
self.current_view=new_view
self.cursor=nil
self.display_views
end
nil
end
def self.find_view(request)
found=nil
if request.kind_of?(Fixnum)
raise "Invalid number" unless request>=0&&request<self.views.length
found=self.views[request]
elsif request.kind_of?(String)
results=[]
self.current_view.subviews.each do |view|
next unless view.accessibility_label
pattern=Regexp.new(request,true)
compare=view.accessibility_label=~pattern
next if compare.nil?
if view.accessibility_label.downcase==request.downcase
return view
else
results<<view
end
end
raise "\"#{request}\" could refer to more than one view." if results.length>1
found=results.first
else
raise "Unknown request: #{request}: #{request.class}"
end
found
end
def self.view(request=nil)
self.init
return self.cursor unless request
result=self.find_view(request)
raise "Unknown view" unless result
self.cursor=result
say_view result
end
end
end
class NSObject
def browse(*args)
A11y::Browser.browse(*args)
end
def view(*args)
A11y::Browser.view(*args)
end
alias :b :browse
alias :v :view
end
Fixed a bug
module Accessibility
module Browser
def self.current_view
Accessibility::Data[:view]
end
def self.current_view=(view)
Accessibility::Data[:view]=view
end
def self.cursor
Accessibility::Data[:cursor]
end
def self.cursor=(view)
Accessibility::Data[:cursor]=view
end
def self.views
[self.current_view.superview]+self.current_view.subviews
end
def self.init(view=nil)
if view.nil?&&self.current_view.nil?
view=UIApplication.sharedApplication.keyWindow
view=view.subviews.first while view.subviews.length==1
self.current_view=view
else
self.current_view=view if view
end
end
def self.display_view(view, index=nil)
display=Array.new
control=view.class.to_s
control="Superview #{control}" if index==0
name=view.accessibility_value||view.accessibility_label
if index
if index>0 and not(view.subviews.empty?)
indicator="+"
else
indicator=" "
end
indicator+=index.to_s
display<<indicator
end
display<<control
display<<name if name
display.join(" ")
end
def self.display_views
puts "Browsing "+self.display_view(self.current_view)
self.views.each_index do |index|
next if self.views[index].nil?
puts self.display_view(self.views[index], index)
end
end
def self.browse(request=nil)
self.init
new_view=nil
request=0 if request==:back||request==:up
if request.nil?
self.display_views
else
raise "You cannot go back any further" if self.current_view.superview.nil?
found=self.find_view(request)
new_view=found if found
end
if new_view
raise "This view has no subviews" if new_view.subviews.empty?
self.current_view=new_view
self.cursor=nil
self.display_views
end
nil
end
def self.find_view(request)
found=nil
if request.kind_of?(Fixnum)
raise "Invalid number" unless request>=0&&request<self.views.length
found=self.views[request]
elsif request.kind_of?(String)
results=[]
self.current_view.subviews.each do |view|
next unless view.accessibility_label
pattern=Regexp.new(request,true)
compare=view.accessibility_label=~pattern
next if compare.nil?
if view.accessibility_label.downcase==request.downcase
return view
else
results<<view
end
end
raise "\"#{request}\" could refer to more than one view." if results.length>1
found=results.first
else
raise "Unknown request: #{request}: #{request.class}"
end
found
end
def self.view(request=nil)
self.init
return self.cursor unless request
result=self.find_view(request)
raise "Unknown view" unless result
self.cursor=result
display_view result
end
end
end
class NSObject
def browse(*args)
A11y::Browser.browse(*args)
end
def view(*args)
A11y::Browser.view(*args)
end
alias :b :browse
alias :v :view
end
|
newproperty(:versionidentifier) do
include EasyType
desc 'The version identifier'
to_translate_to_resource do | raw_resource|
raw_resource['versionidentifier']
end
end
let wls_deployment read the default versionidentifier from the specffied war or ear
newproperty(:versionidentifier) do
include EasyType
desc 'The version identifier'
defaultto {
version_in_ear
}
to_translate_to_resource do | raw_resource|
raw_resource['versionidentifier']
end
private
def version_in_ear
begin
open("| unzip -p #{resource[:localpath]} META-INF/MANIFEST.MF | grep WebLogic-Application-Version:").read.split(':')[1].strip
rescue
nil
end
end
end
|
require 'fog/core/model'
require 'rackspace-monitoring/monitoring/models/base'
module Fog
module Monitoring
class Rackspace
class Check < Fog::Monitoring::Rackspace::Base
identity :id
attribute :entity
attribute :entity_id
attribute :label
attribute :metadata
attribute :target_alias
attribute :target_resolver
attribute :target_hostname
attribute :period
attribute :timeout
attribute :type
attribute :details
attribute :disabled
attribute :monitoring_zones_poll
def prep
options = {
'label' => label,
'metadata' => metadata,
'target_alias'=> target_alias,
'target_resolver' => target_resolver,
'target_hostname' => target_hostname,
'period' => period,
'timeout'=> timeout,
'details'=> details,
'monitoring_zones_poll'=> monitoring_zones_poll,
'disabled'=> disabled
}
options = options.reject {|key, value| value.nil?}
options
end
def save
begin
requires :entity
entity_id = entity.identity
rescue
requires :entity_id
end
options = prep
if identity then
data = connection.update_check(entity_id, identity, options)
else
requires :type
options['type'] = type
data = connection.create_check(entity_id, options)
end
true
end
def compare?(b)
a_o = prep
b_o = b.prep
remain = a_o.reject {|key, value| b_o[key] === value}
remain.empty?
end
end
end
end
end
Update with refactored code
require 'fog/core/model'
require 'rackspace-monitoring/monitoring/models/base'
module Fog
module Monitoring
class Rackspace
class Check < Fog::Monitoring::Rackspace::Base
identity :id
attribute :entity
attribute :entity_id
attribute :label
attribute :metadata
attribute :target_alias
attribute :target_resolver
attribute :target_hostname
attribute :period
attribute :timeout
attribute :type
attribute :details
attribute :disabled
attribute :monitoring_zones_poll
def prep
options = {
'label' => label,
'metadata' => metadata,
'target_alias'=> target_alias,
'target_resolver' => target_resolver,
'target_hostname' => target_hostname,
'period' => period,
'timeout'=> timeout,
'details'=> details,
'monitoring_zones_poll'=> monitoring_zones_poll,
'disabled'=> disabled
}
options = options.reject {|key, value| value.nil?}
options
end
def save
begin
requires :entity
entity_id = entity.identity
rescue
requires :entity_id
end
options = prep
if identity then
data = connection.update_check(entity_id, identity, options)
else
requires :type
options['type'] = type
data = connection.create_check(entity_id, options)
end
true
end
end
end
end
end
|
require_dependency 'repository'
module RedmineGitHosting
module Patches
module RepositoryPatch
def self.included(base)
base.send(:extend, ClassMethods)
base.send(:include, InstanceMethods)
base.class_eval do
unloadable
class << self
alias_method_chain :factory, :git_hosting
end
end
end
module ClassMethods
def factory_with_git_hosting(klass_name, *args)
new_repo = factory_without_git_hosting(klass_name, *args)
if new_repo.is_a?(Repository::Git)
if new_repo.extra.nil?
# Note that this autoinitializes default values and hook key
RedmineGitolite::GitHosting.logger.error { "Automatic initialization of RepositoryGitExtra failed for #{self.project.to_s}" }
end
end
return new_repo
end
end
module InstanceMethods
def global_statistics
total_commits = Changeset.where("repository_id = ?", self.id).count
first_commit = Changeset.where("repository_id = ?", self.id).order('commit_date ASC').first
last_commit = Changeset.where("repository_id = ?", self.id).order('commit_date ASC').last
active_for = (last_commit.commit_date - first_commit.commit_date).to_i
committers = Changeset.where("repository_id = ?", self.id).map(&:committer).uniq.size
data = {}
data[l(:label_total_commits)] = total_commits
data[l(:label_total_contributors)] = committers
data[l(:label_first_commit_date)] = first_commit.commit_date
data[l(:label_latest_commit_date)] = last_commit.commit_date
data[l(:label_active_for)] = "#{active_for} #{l(:label_active_days)}"
data[l(:label_average_commit_per_day)] = total_commits.fdiv(active_for).round(2)
data[l(:label_average_contributor_commits)] = total_commits.fdiv(committers).round(2)
return data
end
def commits_per_hours
total_commits_by_hour = Changeset.where("repository_id = ?", self.id).map(&:committed_on)
commits_by_hour = [0] * 24
total_commits_by_hour.each {|c| commits_by_hour[get_hour_from_date(c)] += 1 }
hours = (0..23).step(1).to_a
new_hours = []
hours.each { |h| new_hours.push("#{h}h") }
data = {}
data[:categories] = new_hours
data[:series] = []
data[:series].push({:name => l(:label_commit_plural), :data => commits_by_hour})
return data
end
def commits_per_day
total_commits_by_day = Changeset.where("repository_id = ?", self.id).group(:commit_date).order(:commit_date).count
total_changes_by_day = Change.joins(:changeset).where("#{Changeset.table_name}.repository_id = ?", self.id).group(:commit_date).order(:commit_date).count
data = {}
data[:categories] = total_commits_by_day.keys
data[:series] = []
data[:series].push({:name => l(:label_commit_plural), :data => total_commits_by_day.values})
data[:series].push({:name => l(:label_change_plural), :data => total_changes_by_day.values})
return data
end
def commits_per_weekday
week_day = {}
week_day[l(:label_monday)] = 0
week_day[l(:label_tuesday)] = 0
week_day[l(:label_wednesday)] = 0
week_day[l(:label_thursday)] = 0
week_day[l(:label_friday)] = 0
week_day[l(:label_saturday)] = 0
week_day[l(:label_sunday)] = 0
total_commits = Changeset.where("repository_id = ?", self.id).group(:commit_date).count
total_commits.each do |commit_date, commit_count|
case commit_date.to_date.wday
when 0
week_day[l(:label_sunday)] += commit_count
when 1
week_day[l(:label_monday)] += commit_count
when 2
week_day[l(:label_tuesday)] += commit_count
when 3
week_day[l(:label_wednesday)] += commit_count
when 4
week_day[l(:label_thursday)] += commit_count
when 5
week_day[l(:label_friday)] += commit_count
when 6
week_day[l(:label_saturday)] += commit_count
end
end
data = {}
data[:name] = l(:label_commit_plural)
data[:data] = []
week_day.each do |key, value|
data[:data].push([key, value])
end
return [data]
end
def commits_per_month
@date_to = Date.today
@date_from = @date_to << 11
@date_from = Date.civil(@date_from.year, @date_from.month, 1)
commits_by_day = Changeset.
where("repository_id = ? AND commit_date BETWEEN ? AND ?", self.id, @date_from, @date_to).
group(:commit_date).
count
commits_by_month = [0] * 12
commits_by_day.each {|c| commits_by_month[(@date_to.month - c.first.to_date.month) % 12] += c.last }
changes_by_day = Change.
joins(:changeset).
where("#{Changeset.table_name}.repository_id = ? AND #{Changeset.table_name}.commit_date BETWEEN ? AND ?", self.id, @date_from, @date_to).
group(:commit_date).
count
changes_by_month = [0] * 12
changes_by_day.each {|c| changes_by_month[(@date_to.month - c.first.to_date.month) % 12] += c.last }
fields = []
12.times {|m| fields << month_name(((Date.today.month - 1 - m) % 12) + 1)}
data = {}
data[:categories] = fields.reverse
data[:series] = []
data[:series].push({:name => l(:label_commit_plural), :data => commits_by_month[0..11].reverse})
data[:series].push({:name => l(:label_change_plural), :data => changes_by_month[0..11].reverse})
return data
end
def commits_per_author_global
commits_by_author = Changeset.where("repository_id = ?", self.id).group(:committer).count
commits_by_author.to_a.sort! {|x, y| x.last <=> y.last}
changes_by_author = Change.joins(:changeset).where("#{Changeset.table_name}.repository_id = ?", self.id).group(:committer).count
h = changes_by_author.inject({}) {|o, i| o[i.first] = i.last; o}
fields = commits_by_author.collect {|r| r.first}
commits_data = commits_by_author.collect {|r| r.last}
changes_data = commits_by_author.collect {|r| h[r.first] || 0}
fields = fields + [""]*(10 - fields.length) if fields.length<10
commits_data = commits_data + [0]*(10 - commits_data.length) if commits_data.length<10
changes_data = changes_data + [0]*(10 - changes_data.length) if changes_data.length<10
# Remove email adress in usernames
fields = fields.collect {|c| c.gsub(%r{<.+@.+>}, '').strip }
data = {}
data[:categories] = fields
data[:series] = []
data[:series].push({:name => l(:label_commit_plural), :data => commits_data})
data[:series].push({:name => l(:label_change_plural), :data => changes_data})
return data
end
def commits_per_author
data = []
committers = Changeset.where("repository_id = ?", self.id).map(&:committer).uniq
committers.each do |committer|
commits = Changeset.where("repository_id = ? AND committer = ?", self.id, committer).group(:commit_date).order(:commit_date).count
committer_name = committer.split('<')[0].strip
committer_mail = committer.split('<')[1].gsub('>', '')
commits_data = {}
commits_data[:author_name] = committer_name
commits_data[:author_mail] = committer_mail
commits_data[:total_commits] = commits.values.map(&:to_i).reduce(:+)
commits_data[:categories] = commits.keys
commits_data[:series] = []
commits_data[:series].push({:name => l(:label_commit_plural), :data => commits.values})
data.push(commits_data)
end
return data
end
private
def get_hour_from_date(date)
return nil unless date
time = date.to_time
zone = User.current.time_zone
local = zone ? time.in_time_zone(zone) : (time.utc? ? time.localtime : time)
local.hour
end
end
end
end
end
unless Repository.included_modules.include?(RedmineGitHosting::Patches::RepositoryPatch)
Repository.send(:include, RedmineGitHosting::Patches::RepositoryPatch)
end
Fix https://github.com/jbox-web/redmine_git_hosting/issues/257
require_dependency 'repository'
module RedmineGitHosting
module Patches
module RepositoryPatch
def self.included(base)
base.send(:extend, ClassMethods)
base.send(:include, InstanceMethods)
base.class_eval do
unloadable
class << self
alias_method_chain :factory, :git_hosting
end
end
end
module ClassMethods
def factory_with_git_hosting(klass_name, *args)
new_repo = factory_without_git_hosting(klass_name, *args)
if new_repo.is_a?(Repository::Git)
if new_repo.extra.nil?
# Note that this autoinitializes default values and hook key
RedmineGitolite::GitHosting.logger.error { "Automatic initialization of RepositoryGitExtra failed for #{self.project.to_s}" }
end
end
return new_repo
end
end
module InstanceMethods
def global_statistics
total_commits = Changeset.where("repository_id = ?", self.id).count
first_commit = Changeset.where("repository_id = ?", self.id).order('commit_date ASC').first
last_commit = Changeset.where("repository_id = ?", self.id).order('commit_date ASC').last
active_for = (last_commit.commit_date - first_commit.commit_date).to_i
committers = Changeset.where("repository_id = ?", self.id).map(&:committer).uniq.size
data = {}
data[l(:label_total_commits)] = total_commits
data[l(:label_total_contributors)] = committers
data[l(:label_first_commit_date)] = first_commit.commit_date
data[l(:label_latest_commit_date)] = last_commit.commit_date
data[l(:label_active_for)] = "#{active_for} #{l(:label_active_days)}"
data[l(:label_average_commit_per_day)] = total_commits.fdiv(active_for).round(2)
data[l(:label_average_contributor_commits)] = total_commits.fdiv(committers).round(2)
return data
end
def commits_per_hours
total_commits_by_hour = Changeset.where("repository_id = ?", self.id).map(&:committed_on)
commits_by_hour = [0] * 24
total_commits_by_hour.each {|c| commits_by_hour[get_hour_from_date(c)] += 1 }
hours = (0..23).step(1).to_a
new_hours = []
hours.each { |h| new_hours.push("#{h}h") }
data = {}
data[:categories] = new_hours
data[:series] = []
data[:series].push({:name => l(:label_commit_plural), :data => commits_by_hour})
return data
end
def commits_per_day
total_commits_by_day = Changeset.where("repository_id = ?", self.id).group(:commit_date).order(:commit_date).count
total_changes_by_day = Change.joins(:changeset).where("#{Changeset.table_name}.repository_id = ?", self.id).group(:commit_date).order(:commit_date).count
data = {}
data[:categories] = total_commits_by_day.keys
data[:series] = []
data[:series].push({:name => l(:label_commit_plural), :data => total_commits_by_day.values})
data[:series].push({:name => l(:label_change_plural), :data => total_changes_by_day.values})
return data
end
def commits_per_weekday
week_day = {}
week_day[l(:label_monday)] = 0
week_day[l(:label_tuesday)] = 0
week_day[l(:label_wednesday)] = 0
week_day[l(:label_thursday)] = 0
week_day[l(:label_friday)] = 0
week_day[l(:label_saturday)] = 0
week_day[l(:label_sunday)] = 0
total_commits = Changeset.where("repository_id = ?", self.id).group(:commit_date).count
total_commits.each do |commit_date, commit_count|
case commit_date.to_date.wday
when 0
week_day[l(:label_sunday)] += commit_count
when 1
week_day[l(:label_monday)] += commit_count
when 2
week_day[l(:label_tuesday)] += commit_count
when 3
week_day[l(:label_wednesday)] += commit_count
when 4
week_day[l(:label_thursday)] += commit_count
when 5
week_day[l(:label_friday)] += commit_count
when 6
week_day[l(:label_saturday)] += commit_count
end
end
data = {}
data[:name] = l(:label_commit_plural)
data[:data] = []
week_day.each do |key, value|
data[:data].push([key, value])
end
return [data]
end
def commits_per_month
@date_to = Date.today
@date_from = @date_to << 11
@date_from = Date.civil(@date_from.year, @date_from.month, 1)
commits_by_day = Changeset.
where("repository_id = ? AND commit_date BETWEEN ? AND ?", self.id, @date_from, @date_to).
group(:commit_date).
count
commits_by_month = [0] * 12
commits_by_day.each {|c| commits_by_month[(@date_to.month - c.first.to_date.month) % 12] += c.last }
changes_by_day = Change.
joins(:changeset).
where("#{Changeset.table_name}.repository_id = ? AND #{Changeset.table_name}.commit_date BETWEEN ? AND ?", self.id, @date_from, @date_to).
group(:commit_date).
count
changes_by_month = [0] * 12
changes_by_day.each {|c| changes_by_month[(@date_to.month - c.first.to_date.month) % 12] += c.last }
fields = []
12.times {|m| fields << month_name(((Date.today.month - 1 - m) % 12) + 1)}
data = {}
data[:categories] = fields.reverse
data[:series] = []
data[:series].push({:name => l(:label_commit_plural), :data => commits_by_month[0..11].reverse})
data[:series].push({:name => l(:label_change_plural), :data => changes_by_month[0..11].reverse})
return data
end
def commits_per_author_global
commits_by_author = Changeset.where("repository_id = ?", self.id).group(:committer).count
commits_by_author.to_a.sort! {|x, y| x.last <=> y.last}
changes_by_author = Change.joins(:changeset).where("#{Changeset.table_name}.repository_id = ?", self.id).group(:committer).count
h = changes_by_author.inject({}) {|o, i| o[i.first] = i.last; o}
fields = commits_by_author.collect {|r| r.first}
commits_data = commits_by_author.collect {|r| r.last}
changes_data = commits_by_author.collect {|r| h[r.first] || 0}
fields = fields + [""]*(10 - fields.length) if fields.length<10
commits_data = commits_data + [0]*(10 - commits_data.length) if commits_data.length<10
changes_data = changes_data + [0]*(10 - changes_data.length) if changes_data.length<10
# Remove email adress in usernames
fields = fields.collect {|c| c.gsub(%r{<.+@.+>}, '').strip }
data = {}
data[:categories] = fields
data[:series] = []
data[:series].push({:name => l(:label_commit_plural), :data => commits_data})
data[:series].push({:name => l(:label_change_plural), :data => changes_data})
return data
end
def commits_per_author
data = []
committers = Changeset.where("repository_id = ?", self.id).map(&:committer).uniq
committers.each do |committer|
commits = Changeset.where("repository_id = ? AND committer = ?", self.id, committer).group(:commit_date).order(:commit_date).count
committer_name = committer.split('<')[0].strip
committer_mail = committer.split('<')[1].gsub('>', '') rescue ''
commits_data = {}
commits_data[:author_name] = committer_name
commits_data[:author_mail] = committer_mail
commits_data[:total_commits] = commits.values.map(&:to_i).reduce(:+)
commits_data[:categories] = commits.keys
commits_data[:series] = []
commits_data[:series].push({:name => l(:label_commit_plural), :data => commits.values})
data.push(commits_data)
end
return data
end
private
def get_hour_from_date(date)
return nil unless date
time = date.to_time
zone = User.current.time_zone
local = zone ? time.in_time_zone(zone) : (time.utc? ? time.localtime : time)
local.hour
end
end
end
end
end
unless Repository.included_modules.include?(RedmineGitHosting::Patches::RepositoryPatch)
Repository.send(:include, RedmineGitHosting::Patches::RepositoryPatch)
end
|
require_dependency 'issue'
module RedmineRestrictTracker
module Patches
module IssuePatch
def self.included(base)
base.send :include, InstanceMethods
base.class_eval do
before_save :restrict_tracker_type
end
end
module InstanceMethods
def restrict_tracker_type
parent_issue_id ? restrict_parent : restrict_root
end
def restrict_root
if [2,5,6,8,9].include? tracker_id
true
else
errors.add :base, "#{tracker.name} can't be a root node, please assign a parent."
false
end
end
def restrict_parent
case tracker_id
when 1
restrict_for [2], 'Bugs can only be children for Features.'
when 2
restrict_for [5,6], 'Features can only be children for Epics and Releases.'
when 3
restrict_for [2], 'Supports can only be children for Features.'
when 4
restrict_for [2], 'Task can only be children for Features.'
when 5
restrict_for [5], 'Epics can only be children for Epics.'
when 6
restrict_for [], 'Releases can only be root nodes.'
when 7
restrict_for [2], 'QA Tasks can only be children for Features.'
when 8
restrict_for [2,5], 'Enhancements can only be children for Features and Epics.'
when 9
restrict_for [2], 'Alerts can only be children for Features.'
else
true
end
end
def restrict_for(tracker_ids, error_message)
restriction = tracker_ids.include? Issue.where(id: parent_issue_id).select(:tracker_id).first.tracker_id
errors.add(:base, error_message) unless restriction
restriction
end
end
end
end
end
unless Issue.included_modules.include? RedmineRestrictTracker::Patches::IssuePatch
Issue.send :include, RedmineRestrictTracker::Patches::IssuePatch
end
Using configuration values in the hook
require_dependency 'issue'
module RedmineRestrictTracker
module Patches
module IssuePatch
def self.included(base)
base.send :include, InstanceMethods
base.class_eval do
before_save :restrict_tracker_type
end
end
module InstanceMethods
def restrict_tracker_type
parent_issue_id ? restrict_parent : restrict_root
end
def restrict_root
if (Setting.plugin_redmine_restrict_tracker[:root_nodes].split(',')
.map(&:to_i).include?(tracker_id))
true
else
errors.add :base, "#{tracker.name} can't be a root node, please assign a parent!"
false
end
end
def restrict_parent
tracker_name = tracker.name
setting_name = "parents_for_#{tracker_name.downcase.split(' ').join('_')}"
possible_parent_trackers = Setting.plugin_redmine_restrict_tracker[setting_name]
.split(',').map(&:to_i)
if (possible_parent_trackers.include?(tracker_id))
true
else
possible_parents = Tracker.where(id: possible_parent_trackers).pluck(:name).map(&:pluralize)
if possible_parents.size == 0
errors.add :base, "#{tracker_name.pluralize} can't be set as children!"
return false
elsif possible_parents.size > 1
parents_string = possible_parents[0..-2].join(', ') << " and " << possible_parents[-1]
else
parents_string = possible_parents[0]
end
errors.add :base, "#{tracker_name.pluralize} can only be children of #{parents_string}!"
false
end
end
end
end
end
end
unless Issue.included_modules.include? RedmineRestrictTracker::Patches::IssuePatch
Issue.send :include, RedmineRestrictTracker::Patches::IssuePatch
end
|
module RSpec::Rails
# Extends ActionController::TestCase::Behavior to work with RSpec.
#
# == Examples
#
# == with stubs
#
# describe WidgetsController do
# describe "GET index" do
# it "assigns all widgets to @widgets" do
# widget = stub_model(Widget)
# Widget.stub(:all) { widget }
# get :index
# assigns(:widgets).should eq([widget])
# end
# end
# end
#
# === with a factory
#
# describe WidgetsController do
# describe "GET index" do
# it "assigns all widgets to @widgets" do
# widget = Factory(:widget)
# get :index
# assigns(:widgets).should eq([widget])
# end
# end
# end
#
# === with fixtures
#
# describe WidgetsController do
# describe "GET index" do
# fixtures :widgets
#
# it "assigns all widgets to @widgets" do
# get :index
# assigns(:widgets).should eq(Widget.all)
# end
# end
# end
#
# == Matchers
#
# In addition to the stock matchers from rspec-expectations, controller
# specs add these matchers, which delegate to rails' assertions:
#
# response.should render_template(*args)
# => delegates to assert_template(*args)
#
# response.should redirect_to(destination)
# => delegates to assert_redirected_to(destination)
#
# == Isolation from views
#
# RSpec's preferred approach to spec'ing controller behaviour is to isolate
# the controller from its collaborators. By default, therefore, controller
# example groups do not render the views in your app. Due to the way Rails
# searches for view templates, the template still needs to exist, but it
# won't actually be loaded.
#
# NOTE that this is different from rspec-rails-1 with rails-2, which did not
# require the presence of the file at all. Due to changes in rails-3, this
# was no longer feasible in rspec-rails-2.
#
# == View rendering
#
# If you prefer a more integrated approach, similar to that of Rails'
# functional tests, you can tell controller groups to render the views in the
# app with the +render_views+ declaration:
#
# describe WidgetsController do
# render_views
# ...
#
module ControllerExampleGroup
extend ActiveSupport::Concern
include RSpec::Rails::RailsExampleGroup
include ActionController::TestCase::Behavior
include RSpec::Rails::ViewRendering
include RSpec::Rails::Matchers::RedirectTo
include RSpec::Rails::Matchers::RenderTemplate
include RSpec::Rails::Matchers::RoutingMatchers
module ClassMethods
def controller_class
describes
end
# Supports a simple DSL for specifying behaviour of
# ApplicationController. Creates an anonymous subclass of
# ApplicationController and evals the +body+ in that context. Also sets
# up implicit routes for this controller, that are separate from those
# defined in <tt>config/routes.rb</tt>.
#
# == Examples
#
# describe ApplicationController do
# controller do
# def index
# raise ApplicationController::AccessDenied
# end
# end
#
# describe "handling AccessDenied exceptions" do
# it "redirects to the /401.html page" do
# get :index
# response.should redirect_to("/401.html")
# end
# end
# end
#
# If you would like to spec a subclass of ApplicationController, call
# controller like so:
#
# controller(ApplicationControllerSubclass) do
# # ....
# end
#
# NOTICE: Due to Ruby 1.8 scoping rules in anoymous subclasses, constants
# defined in +ApplicationController+ must be fully qualified (e.g.
# ApplicationController::AccessDenied) in the block passed to the
# +controller+ method. Any instance methods, filters, etc, that are
# defined in +ApplicationController+, however, are accessible from within
# the block.
def controller(base_class = ApplicationController, &body)
base_class.dup.tap do |new_base|
def new_base.name; "StubResourcesController"; end
metadata[:example_group][:describes] = Class.new(new_base, &body)
end
before do
@orig_routes, @routes = @routes, ActionDispatch::Routing::RouteSet.new
@routes.draw { resources :stub_resources }
end
after do
@routes, @orig_routes = @orig_routes, nil
end
end
end
module InstanceMethods
attr_reader :controller, :routes
def method_missing(method, *args, &block)
if @orig_routes && @orig_routes.named_routes.helpers.include?(method)
controller.send(method, *args, &block)
else
super
end
end
end
included do
subject { controller }
metadata[:type] = :controller
before do
@routes = ::Rails.application.routes
ActionController::Base.allow_forgery_protection = false
end
end
end
end
Revert "Define name before subclassing in anonymous controller spec"
- this had been done to get around a problem in rails-3.1.0.beta1, but
that issue has been addressed in the coming rails-3.1.0.rc
This reverts commit 9ebf963d742291f4ec8ccdaaa230c89edbdba983.
module RSpec::Rails
# Extends ActionController::TestCase::Behavior to work with RSpec.
#
# == Examples
#
# == with stubs
#
# describe WidgetsController do
# describe "GET index" do
# it "assigns all widgets to @widgets" do
# widget = stub_model(Widget)
# Widget.stub(:all) { widget }
# get :index
# assigns(:widgets).should eq([widget])
# end
# end
# end
#
# === with a factory
#
# describe WidgetsController do
# describe "GET index" do
# it "assigns all widgets to @widgets" do
# widget = Factory(:widget)
# get :index
# assigns(:widgets).should eq([widget])
# end
# end
# end
#
# === with fixtures
#
# describe WidgetsController do
# describe "GET index" do
# fixtures :widgets
#
# it "assigns all widgets to @widgets" do
# get :index
# assigns(:widgets).should eq(Widget.all)
# end
# end
# end
#
# == Matchers
#
# In addition to the stock matchers from rspec-expectations, controller
# specs add these matchers, which delegate to rails' assertions:
#
# response.should render_template(*args)
# => delegates to assert_template(*args)
#
# response.should redirect_to(destination)
# => delegates to assert_redirected_to(destination)
#
# == Isolation from views
#
# RSpec's preferred approach to spec'ing controller behaviour is to isolate
# the controller from its collaborators. By default, therefore, controller
# example groups do not render the views in your app. Due to the way Rails
# searches for view templates, the template still needs to exist, but it
# won't actually be loaded.
#
# NOTE that this is different from rspec-rails-1 with rails-2, which did not
# require the presence of the file at all. Due to changes in rails-3, this
# was no longer feasible in rspec-rails-2.
#
# == View rendering
#
# If you prefer a more integrated approach, similar to that of Rails'
# functional tests, you can tell controller groups to render the views in the
# app with the +render_views+ declaration:
#
# describe WidgetsController do
# render_views
# ...
#
module ControllerExampleGroup
extend ActiveSupport::Concern
include RSpec::Rails::RailsExampleGroup
include ActionController::TestCase::Behavior
include RSpec::Rails::ViewRendering
include RSpec::Rails::Matchers::RedirectTo
include RSpec::Rails::Matchers::RenderTemplate
include RSpec::Rails::Matchers::RoutingMatchers
module ClassMethods
def controller_class
describes
end
# Supports a simple DSL for specifying behaviour of
# ApplicationController. Creates an anonymous subclass of
# ApplicationController and evals the +body+ in that context. Also sets
# up implicit routes for this controller, that are separate from those
# defined in <tt>config/routes.rb</tt>.
#
# == Examples
#
# describe ApplicationController do
# controller do
# def index
# raise ApplicationController::AccessDenied
# end
# end
#
# describe "handling AccessDenied exceptions" do
# it "redirects to the /401.html page" do
# get :index
# response.should redirect_to("/401.html")
# end
# end
# end
#
# If you would like to spec a subclass of ApplicationController, call
# controller like so:
#
# controller(ApplicationControllerSubclass) do
# # ....
# end
#
# NOTICE: Due to Ruby 1.8 scoping rules in anoymous subclasses, constants
# defined in +ApplicationController+ must be fully qualified (e.g.
# ApplicationController::AccessDenied) in the block passed to the
# +controller+ method. Any instance methods, filters, etc, that are
# defined in +ApplicationController+, however, are accessible from within
# the block.
def controller(base_class = ApplicationController, &body)
metadata[:example_group][:describes] = Class.new(base_class, &body)
metadata[:example_group][:describes].singleton_class.class_eval do
def name
"StubResourcesController"
end
end
before do
@orig_routes, @routes = @routes, ActionDispatch::Routing::RouteSet.new
@routes.draw { resources :stub_resources }
end
after do
@routes, @orig_routes = @orig_routes, nil
end
end
end
module InstanceMethods
attr_reader :controller, :routes
def method_missing(method, *args, &block)
if @orig_routes && @orig_routes.named_routes.helpers.include?(method)
controller.send(method, *args, &block)
else
super
end
end
end
included do
subject { controller }
metadata[:type] = :controller
before do
@routes = ::Rails.application.routes
ActionController::Base.allow_forgery_protection = false
end
end
end
end
|
Some initial specs for the helpers
require 'spec_helper'
require 'magic_grid/helpers'
require 'action_controller'
describe MagicGrid::Helpers do
# Let's use the helpers the way they're meant to be used!
include MagicGrid::Helpers
let(:controller) {
stub_controller = ActionController::Base.new
def stub_controller.params(*ignored) {} end
stub_controller
}
describe "#normalize_magic" do
it "should turn an array into a MagicGrid::Definition" do
expect(normalize_magic([])).to be_a(MagicGrid::Definition)
end
it "should give back the MagicGrid::Definition given, if given one" do
definition = normalize_magic([])
expect(normalize_magic(definition)).to be(definition)
end
end
end
|
require File.dirname(__FILE__) + '/../spec_helper'
require 'jesus/interface'
describe "God interface" do
before(:all) do
@server = Jesus::Interface.new
end
describe 'no mock' do
it 'should get the server' do
@server.server.should be_kind_of DRb::DRbObject
end
it 'should fail because the server is not connected' do
lambda {
@server.status
}.should raise_error DRb::DRbConnError
end
end
describe 'with mocking' do
before(:all) do
@server.server.stubs(:status).returns({
:FirstProcess => { :group => 'GroupName', :status => :up },
:SecondProcess => { :group => 'Group', :status => :unmonitored },
})
end
it 'should retrieve the status ordered by group' do
status = @server.status
status.length.should eql(2)
status['Group'].should eql({ :SecondProcess => { :status => :unmonitored, :group => "Group" }})
status['GroupName'].should eql({ :FirstProcess => { :status => :up, :group=>"GroupName"}})
end
end
end
Fixing test when god isn't available
require File.dirname(__FILE__) + '/../spec_helper'
require 'jesus/interface'
describe "God interface" do
before(:all) do
@server = Jesus::Interface.new
end
describe 'no mock' do
it 'should get the server' do
@server.server.should be_kind_of DRb::DRbObject
end
it 'should fail because the server is not connected' do
@server.status.should be_nil
end
end
describe 'with mocking' do
before(:all) do
@server.server.stubs(:status).returns({
:FirstProcess => { :group => 'GroupName', :status => :up },
:SecondProcess => { :group => 'Group', :status => :unmonitored },
})
end
it 'should retrieve the status ordered by group' do
status = @server.status
status.length.should eql(2)
status['Group'].should eql({ :SecondProcess => { :status => :unmonitored, :group => "Group" }})
status['GroupName'].should eql({ :FirstProcess => { :status => :up, :group=>"GroupName"}})
end
end
end
|
#
# Copyright (c) 2013, Seth Chisamore
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
module VagrantPlugins
module PuppetInstall
module Action
# @author Seth Chisamore <schisamo@opscode.com>
#
# This action installs Puppet packages at the desired version.
class InstallPuppet
ubuntu_codename = %x[lsb_release --codename | awk '{ print $2 }'].chomp
APT_PACKAGE_FILE = "puppetlabs-release-lucid.deb"
APT_PACKAGE_FILE_URL = "http://apt.puppetlabs.com/#{APT_PACKAGE_FILE}".freeze
def initialize(app, env)
@app = app
# Config#finalize! SHOULD be called automatically
env[:global_config].puppet_install.finalize!
end
def call(env)
desired_puppet_version = env[:global_config].puppet_install.version
unless desired_puppet_version.nil?
env[:ui].info("Ensuring Puppet is installed at requested version of #{desired_puppet_version}.")
if env[:installed_puppet_version] == desired_puppet_version
env[:ui].info("Puppet #{desired_puppet_version} package is already installed...skipping installation.")
else
env[:ui].info("Puppet #{desired_puppet_version} package is not installed...installing now.")
env[:ssh_run_command] = install_puppet_command(desired_puppet_version)
end
end
@app.call(env)
end
private
def install_puppet_command(version='*')
<<-INSTALL_PUPPET
cd /tmp
if command -v wget &>/dev/null; then
wget --quiet #{APT_PACKAGE_FILE_URL}
elif command -v curl &>/dev/null; then
curl --location --remote-name #{APT_PACKAGE_FILE_URL}
else
echo "Neither wget nor curl found. Please install one and try again." >&2
exit 1
fi
sudo dpkg --install #{APT_PACKAGE_FILE}
sudo apt-get update --quiet
sudo apt-get install puppet-common=#{version}* -y
INSTALL_PUPPET
end
end
end
end
end
Using the omnibus install and doing a ctrl+r...
#
# Copyright (c) 2013, Seth Chisamore
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
require 'log4r'
require 'shellwords'
require 'vagrant/util/downloader'
module VagrantPlugins
module PuppetInstall
module Action
# @author Seth Chisamore <schisamo@opscode.com>
#
# This action installs Puppet packages at the desired version.
class InstallPuppet
def initialize(app, env)
@app = app
@logger =
Log4r::Logger.new('vagrantplugins::puppet_install::action::installpuppet')
@machine = env[:machine]
@install_script = find_install_script
@machine.config.puppet_install.finalize!
end
def call(env)
@app.call(env)
return unless @machine.communicate.ready? && provision_enabled?(env)
desired_version = @machine.config.puppet_install.chef_version
unless desired_version.nil?
if installed_version == desired_version
env[:ui].info I18n.t(
'vagrant-puppet_install.action.installed',
version: desired_version
)
else
fetch_or_create_install_script(env)
env[:ui].info I18n.t(
'vagrant-puppet_install.action.installing',
version: desired_version
)
install(desired_version, env)
recover(env)
end
end
@app.call(env)
end
private
# Determines what flavor of install script should be used
def find_install_script
if !ENV['PUPPET_INSTALL_URL'].nil?
ENV['PUPPET_INSTALL_URL']
else
'https://raw2.github.com/petems/puppet-install-shell/master/install_puppet.sh'
end
end
def install_script_name
'install.sh'
end
def windows_guest?
@machine.config.vm.guest.eql?(:windows)
end
def provision_enabled?(env)
env.fetch(:provision_enabled, true)
end
def installed_version
version = nil
opts = nil
if windows_guest?
# Not sure how to do this yet...
else
command = 'echo $(puppet --version)'
end
@machine.communicate.sudo(command, opts) do |type, data|
if [:stderr, :stdout].include?(type)
version_match = data.match(/^(.+)/)
version = version_match.captures[0].strip if version_match
end
end
version
end
#
# Upload install script from Host's Vagrant TMP directory to guest
# and executes.
#
def install(version, env)
shell_escaped_version = Shellwords.escape(version)
@machine.communicate.tap do |comm|
comm.upload(@script_tmp_path, install_script_name)
if windows_guest?
# Not sure yet...
else
# TODO: Execute with `sh` once install.sh removes it's bash-isms.
install_cmd =
"bash #{install_script_name} -v #{shell_escaped_version} 2>&1"
end
comm.sudo(install_cmd) do |type, data|
if [:stderr, :stdout].include?(type)
next if data =~ /stdin: is not a tty/
env[:ui].info(data)
end
end
end
end
#
# Fetches or creates a platform specific install script to the Host's
# Vagrant TMP directory.
#
def fetch_or_create_install_script(env)
@script_tmp_path =
env[:tmp_path].join("#{Time.now.to_i.to_s}-#{install_script_name}")
@logger.info("Generating install script at: #{@script_tmp_path}")
url = @install_script
if File.file?(url) || url !~ /^[a-z0-9]+:.*$/i
@logger.info('Assuming URL is a file.')
file_path = File.expand_path(url)
file_path = Vagrant::Util::Platform.cygwin_windows_path(file_path)
url = "file:#{file_path}"
end
# Download the install.sh or create install.bat file to a temporary
# path. We store the temporary path as an instance variable so that
# the `#recover` method can access it.
begin
if windows_guest?
# generate a install.bat file at the `@script_tmp_path` location
#
# We'll also disable Rubocop for this embedded PowerShell code:
#
# rubocop:disable LineLength, SpaceAroundBlockBraces
#
File.open(@script_tmp_path, 'w') do |f|
f.puts <<-EOH.gsub(/^\s{18}/, '')
@echo off
set version=%1
set dest=%~dp0chef-client-%version%-1.windows.msi
echo Downloading Chef %version% for Windows...
powershell -command "(New-Object System.Net.WebClient).DownloadFile('#{url}?v=%version%', '%dest%')"
echo Installing Chef %version%
msiexec /q /i %dest%
EOH
end
# rubocop:enable LineLength, SpaceAroundBlockBraces
else
downloader = Vagrant::Util::Downloader.new(
url,
@script_tmp_path,
{}
)
downloader.download!
end
rescue Vagrant::Errors::DownloaderInterrupted
# The downloader was interrupted, so just return, because that
# means we were interrupted as well.
env[:ui].info(I18n.t('vagrant-puppet_install.download.interrupted'))
return
end
end
def recover(env)
if @script_tmp_path && File.exist?(@script_tmp_path)
File.unlink(@script_tmp_path)
end
end
end
end
end
end
|
require 'test_helper'
class I18nVizIntegrationTest < ActionDispatch::IntegrationTest
test 'translate without i18n_viz url parameter' do
visit "/test"
assert !page.has_content?("--hello--")
assert !page.has_css?("#i18n_viz_tooltip")
end
test 'translate with i18n_viz url parameter' do
I18nViz.enabled = true
visit "/test?i18n_viz=1"
assert page.has_content?("--hello--")
assert !page.has_css?(".i18n-viz")
assert !page.has_css?("#i18n_viz_tooltip")
assert page.has_content?("bar")
assert !page.has_content?("--foo--")
assert page.has_content?("foo")
end
test 'disable I18nViz' do
I18nViz.enabled = false
visit "/test?i18n_viz=1"
assert !page.has_content?("--hello--")
end
end
class I18nVizJavascriptIntegrationTest < ActionDispatch::IntegrationTest
setup do
Capybara.current_driver = Capybara.javascript_driver # :selenium by default
end
test 'translate without i18n_viz url parameter' do
visit "/test"
assert !page.has_content?("--hello--")
assert !page.has_css?(".i18n-viz")
end
test 'translate with i18n_viz url parameter' do
visit "/test?i18n_viz=1"
assert !page.has_content?("--hello--") # should be removed by js
assert page.has_css?(".i18n-viz")
save_and_open_page
assert page.has_css?("#i18n_viz_tooltip")
end
end
fix tests, as the popover is only inserted when the element is hovered
require 'test_helper'
class I18nVizIntegrationTest < ActionDispatch::IntegrationTest
test 'translate without i18n_viz url parameter' do
visit "/test"
assert !page.has_content?("--hello--")
assert !page.has_css?("#i18n_viz_tooltip")
end
test 'translate with i18n_viz url parameter' do
I18nViz.enabled = true
visit "/test?i18n_viz=1"
assert page.has_content?("--hello--")
assert !page.has_css?(".i18n-viz")
assert !page.has_css?("#i18n_viz_tooltip")
assert page.has_content?("bar")
assert !page.has_content?("--foo--")
assert page.has_content?("foo")
end
test 'disable I18nViz' do
I18nViz.enabled = false
visit "/test?i18n_viz=1"
assert !page.has_content?("--hello--")
end
end
class I18nVizJavascriptIntegrationTest < ActionDispatch::IntegrationTest
setup do
Capybara.current_driver = Capybara.javascript_driver # :selenium by default
end
test 'translate without i18n_viz url parameter' do
visit "/test"
assert !page.has_content?("--hello--")
assert !page.has_css?(".i18n-viz")
end
test 'translate with i18n_viz url parameter' do
visit "/test?i18n_viz=1"
assert !page.has_content?("--hello--") # should be removed by js
assert page.has_css?(".i18n-viz")
first('.i18n-viz').hover
assert page.has_css?("#i18n_viz_tooltip")
end
end
|
require 'test_helper'
class InstanceTest < BaseTest
Song = Struct.new(:id, :title)
Song.class_eval do
def self.find(id)
new(id, "Invincible")
end
end
# TODO: use *args in from_hash.
# DISCUSS: do we need parse_strategy?
describe "property with :instance" do
representer!(:inject => :song_representer) do
property :song,
:instance => lambda { |fragment, *args| fragment["id"] == song.id ? song : Song.find(fragment["id"]) },
:extend => song_representer
end
it { OpenStruct.new(:song => Song.new(1, "The Answer Is Still No")).extend(representer).
from_hash("song" => {"id" => 1}).song.must_equal Song.new(1, "The Answer Is Still No") }
it { OpenStruct.new(:song => Song.new(1, "The Answer Is Still No")).extend(representer).
from_hash("song" => {"id" => 2}).song.must_equal Song.new(2, "Invincible") }
end
describe "collection with :instance" do
representer!(:inject => :song_representer) do
collection :songs,
:instance => lambda { |fragment, i, *args|
fragment["id"] == songs[i].id ? songs[i] : Song.find(fragment["id"])
}, # let's not allow returning nil anymore. make sure we can still do everything as with nil. also, let's remove parse_strategy: sync.
:extend => song_representer
# :parse_strategy => :sync
end
# problem: when returning nil in this lambda WITHOUT parse: true, the original model's collection is empty and object.call in #instance_for doesn't work, so we still try to create a brand-new object.
it {
album= Struct.new(:songs).new(songs = [
Song.new(1, "The Answer Is Still No"),
Song.new(2, "")])
album.
extend(representer).
from_hash("songs" => [{"id" => 2},{"id" => 2, "title"=>"The Answer Is Still No"}]).songs.must_equal [
Song.new(2, "Invincible"), Song.new(2, "The Answer Is Still No")]
}
# it { OpenStruct.new(:song => Song.new(1, "The Answer Is Still No")).extend(representer).
# from_hash("song" => {"id" => 2}).song.must_equal Song.new(2, "Invincible") }
end
describe "lambda receiving fragment and args" do
representer!(:inject => :song_representer) do
property :song, :instance => lambda { |fragment, args| Struct.new(:args, :id).new([fragment, args]) }, :extend => song_representer
end
it { OpenStruct.new(:song => Song.new(1, "The Answer Is Still No")).extend(representer).
from_hash({"song" => {"id" => 1}}, {:volume => 1}).song.args.must_equal([{"id"=>1}, {:volume=>1}]) }
end
# TODO: raise and test instance:{nil}
# describe "property with instance: { nil }" do # TODO: introduce :representable option?
# representer!(:inject => :song_representer) do
# property :song, :instance => lambda { |*| nil }, :extend => song_representer
# end
# let (:hit) { hit = OpenStruct.new(:song => song).extend(representer) }
# it "calls #to_hash on song instance, nothing else" do
# hit.to_hash.must_equal("song"=>{"title"=>"Resist Stance"})
# end
# it "calls #from_hash on the existing song instance, nothing else" do
# song_id = hit.song.object_id
# hit.from_hash("song"=>{"title"=>"Suffer"})
# hit.song.title.must_equal "Suffer"
# hit.song.object_id.must_equal song_id
# end
# end
# lambda { |fragment, i, Context(binding: <..>, args: [..])| }
describe "sync" do
representer!(:inject => :song_representer) do
collection :songs,
:instance => lambda { |fragment, i, *args|
songs[i]
},
:extend => song_representer,
# :parse_strategy => :sync
:setter => lambda { |*| }
end
it {
album= Struct.new(:songs).new(songs = [
Song.new(1, "The Answer Is Still No"),
Song.new(2, "Invncble")])
album.
extend(representer).
from_hash("songs" => [{"title" => "The Answer Is Still No"}, {"title" => "Invincible"}])
album.songs.must_equal [
Song.new(1, "The Answer Is Still No"),
Song.new(2, "Invincible")]
songs.object_id.must_equal album.songs.object_id
songs[0].object_id.must_equal album.songs[0].object_id
songs[1].object_id.must_equal album.songs[1].object_id
}
end
describe "update existing elements, only" do
representer!(:inject => :song_representer) do
collection :songs,
:instance => lambda { |fragment, i, *args|
#fragment["id"] == songs[i].id ? songs[i] : Song.find(fragment["id"])
songs.find { |s| s.id == fragment["id"] }
}, # let's not allow returning nil anymore. make sure we can still do everything as with nil. also, let's remove parse_strategy: sync.
:extend => song_representer,
# :parse_strategy => :sync
:setter => lambda { |*| }
end
it {
album= Struct.new(:songs).new(songs = [
Song.new(1, "The Answer Is Still No"),
Song.new(2, "Invncble")])
album.
extend(representer).
from_hash("songs" => [{"id" => 2, "title" => "Invincible"}]).
songs.must_equal [
Song.new(1, "The Answer Is Still No"),
Song.new(2, "Invincible")]
# TODO: check elements object_id!
songs.object_id.must_equal album.songs.object_id
songs[0].object_id.must_equal album.songs[0].object_id
songs[1].object_id.must_equal album.songs[1].object_id
}
end
describe "add incoming elements, only" do
representer!(:inject => :song_representer) do
collection :songs,
:instance => lambda { |fragment, i, *args|
songs << song=Song.new(2)
song
}, # let's not allow returning nil anymore. make sure we can still do everything as with nil. also, let's remove parse_strategy: sync.
:extend => song_representer,
# :parse_strategy => :sync
:setter => lambda { |*| }
end
it {
album= Struct.new(:songs).new(songs = [
Song.new(1, "The Answer Is Still No")])
album.
extend(representer).
from_hash("songs" => [{"title" => "Invincible"}]).
songs.must_equal [
Song.new(1, "The Answer Is Still No"),
Song.new(2, "Invincible")]
songs.object_id.must_equal album.songs.object_id
songs[0].object_id.must_equal album.songs[0].object_id
}
end
# not sure if this must be a library strategy
describe "replace existing element" do
representer!(:inject => :song_representer) do
collection :songs,
:instance => lambda { |fragment, i, *args|
id = fragment.delete("replace_id")
replaced = songs.find { |s| s.id == id }
songs[songs.index(replaced)] = song=Song.new(3)
song
}, # let's not allow returning nil anymore. make sure we can still do everything as with nil. also, let's remove parse_strategy: sync.
:extend => song_representer,
# :parse_strategy => :sync
:setter => lambda { |*| }
end
it {
album= Struct.new(:songs).new(songs = [
Song.new(1, "The Answer Is Still No"),
Song.new(2, "Invincible")])
album.
extend(representer).
from_hash("songs" => [{"replace_id"=>2, "id" => 3, "title" => "Soulmate"}]).
songs.must_equal [
Song.new(1, "The Answer Is Still No"),
Song.new(3, "Soulmate")]
songs.object_id.must_equal album.songs.object_id
songs[0].object_id.must_equal album.songs[0].object_id
}
end
describe "replace collection" do
representer!(:inject => :song_representer) do
collection :songs,
:extend => song_representer, :class => Song
end
it {
album= Struct.new(:songs).new(songs = [
Song.new(1, "The Answer Is Still No")])
album.
extend(representer).
from_hash("songs" => [{"title" => "Invincible"}]).
songs.must_equal [
Song.new(nil, "Invincible")]
songs.object_id.wont_equal album.songs.object_id
}
end
end
cleanup
require 'test_helper'
class InstanceTest < BaseTest
Song = Struct.new(:id, :title)
Song.class_eval do
def self.find(id)
new(id, "Invincible")
end
end
# TODO: use *args in from_hash.
# DISCUSS: do we need parse_strategy?
describe "property with :instance" do
representer!(:inject => :song_representer) do
property :song,
:instance => lambda { |fragment, *args| fragment["id"] == song.id ? song : Song.find(fragment["id"]) },
:extend => song_representer
end
it { OpenStruct.new(:song => Song.new(1, "The Answer Is Still No")).extend(representer).
from_hash("song" => {"id" => 1}).song.must_equal Song.new(1, "The Answer Is Still No") }
it { OpenStruct.new(:song => Song.new(1, "The Answer Is Still No")).extend(representer).
from_hash("song" => {"id" => 2}).song.must_equal Song.new(2, "Invincible") }
end
describe "collection with :instance" do
representer!(:inject => :song_representer) do
collection :songs,
:instance => lambda { |fragment, i, *args|
fragment["id"] == songs[i].id ? songs[i] : Song.find(fragment["id"])
}, # let's not allow returning nil anymore. make sure we can still do everything as with nil. also, let's remove parse_strategy: sync.
:extend => song_representer
end
it {
album= Struct.new(:songs).new(songs = [
Song.new(1, "The Answer Is Still No"),
Song.new(2, "")])
album.
extend(representer).
from_hash("songs" => [{"id" => 2},{"id" => 2, "title"=>"The Answer Is Still No"}]).songs.must_equal [
Song.new(2, "Invincible"), Song.new(2, "The Answer Is Still No")]
}
end
describe "lambda receiving fragment and args" do
representer!(:inject => :song_representer) do
property :song, :instance => lambda { |fragment, args| Struct.new(:args, :id).new([fragment, args]) }, :extend => song_representer
end
it { OpenStruct.new(:song => Song.new(1, "The Answer Is Still No")).extend(representer).
from_hash({"song" => {"id" => 1}}, {:volume => 1}).song.args.must_equal([{"id"=>1}, {:volume=>1}]) }
end
# TODO: raise and test instance:{nil}
# describe "property with instance: { nil }" do # TODO: introduce :representable option?
# representer!(:inject => :song_representer) do
# property :song, :instance => lambda { |*| nil }, :extend => song_representer
# end
# let (:hit) { hit = OpenStruct.new(:song => song).extend(representer) }
# it "calls #to_hash on song instance, nothing else" do
# hit.to_hash.must_equal("song"=>{"title"=>"Resist Stance"})
# end
# it "calls #from_hash on the existing song instance, nothing else" do
# song_id = hit.song.object_id
# hit.from_hash("song"=>{"title"=>"Suffer"})
# hit.song.title.must_equal "Suffer"
# hit.song.object_id.must_equal song_id
# end
# end
# lambda { |fragment, i, Context(binding: <..>, args: [..])| }
describe "sync" do
representer!(:inject => :song_representer) do
collection :songs,
:instance => lambda { |fragment, i, *args|
songs[i]
},
:extend => song_representer,
# :parse_strategy => :sync
:setter => lambda { |*| }
end
it {
album= Struct.new(:songs).new(songs = [
Song.new(1, "The Answer Is Still No"),
Song.new(2, "Invncble")])
album.
extend(representer).
from_hash("songs" => [{"title" => "The Answer Is Still No"}, {"title" => "Invincible"}])
album.songs.must_equal [
Song.new(1, "The Answer Is Still No"),
Song.new(2, "Invincible")]
songs.object_id.must_equal album.songs.object_id
songs[0].object_id.must_equal album.songs[0].object_id
songs[1].object_id.must_equal album.songs[1].object_id
}
end
describe "update existing elements, only" do
representer!(:inject => :song_representer) do
collection :songs,
:instance => lambda { |fragment, i, *args|
#fragment["id"] == songs[i].id ? songs[i] : Song.find(fragment["id"])
songs.find { |s| s.id == fragment["id"] }
}, # let's not allow returning nil anymore. make sure we can still do everything as with nil. also, let's remove parse_strategy: sync.
:extend => song_representer,
# :parse_strategy => :sync
:setter => lambda { |*| }
end
it {
album= Struct.new(:songs).new(songs = [
Song.new(1, "The Answer Is Still No"),
Song.new(2, "Invncble")])
album.
extend(representer).
from_hash("songs" => [{"id" => 2, "title" => "Invincible"}]).
songs.must_equal [
Song.new(1, "The Answer Is Still No"),
Song.new(2, "Invincible")]
# TODO: check elements object_id!
songs.object_id.must_equal album.songs.object_id
songs[0].object_id.must_equal album.songs[0].object_id
songs[1].object_id.must_equal album.songs[1].object_id
}
end
describe "add incoming elements, only" do
representer!(:inject => :song_representer) do
collection :songs,
:instance => lambda { |fragment, i, *args|
songs << song=Song.new(2)
song
}, # let's not allow returning nil anymore. make sure we can still do everything as with nil. also, let's remove parse_strategy: sync.
:extend => song_representer,
# :parse_strategy => :sync
:setter => lambda { |*| }
end
it {
album= Struct.new(:songs).new(songs = [
Song.new(1, "The Answer Is Still No")])
album.
extend(representer).
from_hash("songs" => [{"title" => "Invincible"}]).
songs.must_equal [
Song.new(1, "The Answer Is Still No"),
Song.new(2, "Invincible")]
songs.object_id.must_equal album.songs.object_id
songs[0].object_id.must_equal album.songs[0].object_id
}
end
# not sure if this must be a library strategy
describe "replace existing element" do
representer!(:inject => :song_representer) do
collection :songs,
:instance => lambda { |fragment, i, *args|
id = fragment.delete("replace_id")
replaced = songs.find { |s| s.id == id }
songs[songs.index(replaced)] = song=Song.new(3)
song
}, # let's not allow returning nil anymore. make sure we can still do everything as with nil. also, let's remove parse_strategy: sync.
:extend => song_representer,
# :parse_strategy => :sync
:setter => lambda { |*| }
end
it {
album= Struct.new(:songs).new(songs = [
Song.new(1, "The Answer Is Still No"),
Song.new(2, "Invincible")])
album.
extend(representer).
from_hash("songs" => [{"replace_id"=>2, "id" => 3, "title" => "Soulmate"}]).
songs.must_equal [
Song.new(1, "The Answer Is Still No"),
Song.new(3, "Soulmate")]
songs.object_id.must_equal album.songs.object_id
songs[0].object_id.must_equal album.songs[0].object_id
}
end
describe "replace collection" do
representer!(:inject => :song_representer) do
collection :songs,
:extend => song_representer, :class => Song
end
it {
album= Struct.new(:songs).new(songs = [
Song.new(1, "The Answer Is Still No")])
album.
extend(representer).
from_hash("songs" => [{"title" => "Invincible"}]).
songs.must_equal [
Song.new(nil, "Invincible")]
songs.object_id.wont_equal album.songs.object_id
}
end
end |
require_relative '../lib/metybur'
require_relative 'mocks/websocket_mock'
require 'ffaker'
require 'json'
describe Metybur do
before :all do
Metybur.websocket_client_class = WebsocketMock
end
let(:url) { FFaker::Internet.http_url }
let(:websocket) { WebsocketMock.instance }
let(:last_sent_message) { parse(websocket.sent.last) }
def parse(string_data)
JSON.parse(string_data, symbolize_names: true)
end
it 'connects to a Meteor URL' do
Metybur.connect url
expect(websocket.url).to eq url
connect_message = parse(websocket.sent.first)
expect(connect_message)
.to eq msg: 'connect',
version: '1',
support: ['1']
end
context 'logging in' do
it 'calls the login method with email and password' do
email = FFaker::Internet.email
password = FFaker::Internet.password
Metybur.connect url, email: email, password: password
expect(last_sent_message[:msg]).to eq 'method'
expect(last_sent_message).to have_key :id # we don't care about the value here
expect(last_sent_message[:method]).to eq 'login'
expect(last_sent_message[:params][0])
.to eq user: { email: email },
password: password
end
it 'calls the login method with username and password' do
username = FFaker::Internet.user_name
password = FFaker::Internet.password
Metybur.connect url, username: username, password: password
expect(last_sent_message[:msg]).to eq 'method'
expect(last_sent_message).to have_key :id # we don't care about the value here
expect(last_sent_message[:method]).to eq 'login'
expect(last_sent_message[:params][0])
.to eq user: { username: username },
password: password
end
it 'calls the login method with user id and password' do
userid = FFaker::Guid.guid
password = FFaker::Internet.password
Metybur.connect url, id: userid, password: password
expect(last_sent_message[:msg]).to eq 'method'
expect(last_sent_message).to have_key :id # we don't care about the value here
expect(last_sent_message[:method]).to eq 'login'
expect(last_sent_message[:params][0])
.to eq user: { id: userid },
password: password
end
it "doesn't log in without credentials" do
Metybur.connect url
expect(last_sent_message[:msg]).to eq 'connect'
end
end
context 'ping pong' do
it 'responds with pong to a ping' do
Metybur.connect url
websocket.receive({msg: 'ping'}.to_json)
expect(last_sent_message[:msg]).to eq 'pong'
end
end
context 'logging' do
it "doesn't log any messages by default" do
output = StringIO.new
Metybur.log_stream = output
Metybur.connect url
websocket.receive({msg: 'logged_message'}.to_json)
expect(output.string).to be_empty
end
it 'logs a message when the log level is set to debug' do
output = StringIO.new
Metybur.log_level = :debug
Metybur.log_stream = output
Metybur.connect url
websocket.receive({msg: 'logged_message'}.to_json)
expect(output.string).not_to be_empty
end
end
context 'subscription' do
it 'subscribes to a published record set' do
record_set = FFaker::Internet.user_name
meteor = Metybur.connect url
meteor.subscribe(record_set)
expect(last_sent_message[:msg]).to eq 'sub'
expect(last_sent_message).to have_key :id # we don't care about the value here
expect(last_sent_message[:name]).to eq record_set
end
end
context 'collections' do
it 'gets notified when a record is added' do
collection = FFaker::Internet.user_name
callback_called = false
id = FFaker::Guid.guid
fields = {city: FFaker::Address.city}
meteor = Metybur.connect url
meteor.collection(collection)
.on(:added) do |added_id, added_fields|
callback_called = true
expect(added_id).to eq id
expect(added_fields).to eq fields
end
message = {
msg: 'added',
collection: collection,
id: id,
fields: fields
}.to_json
websocket.receive message
fail("Callback didn't get called.") unless callback_called
end
it 'gets notified when a record is changed' do
collection = FFaker::Internet.user_name
callback_called = false
id = FFaker::Guid.guid
fields = {city: FFaker::Address.city}
cleared = [FFaker::Guid.guid]
meteor = Metybur.connect url
meteor.collection(collection)
.on(:changed) do |changed_id, changed_fields, cleared_fields|
callback_called = true
expect(changed_id).to eq id
expect(changed_fields).to eq fields
expect(cleared_fields).to eq cleared
end
message = {
msg: 'changed',
collection: collection,
id: id,
fields: fields,
cleared: cleared
}.to_json
websocket.receive message
fail("Callback didn't get called.") unless callback_called
end
it 'gets notified when a record is removed' do
collection = FFaker::Internet.user_name
callback_called = false
id = FFaker::Guid.guid
meteor = Metybur.connect url
meteor.collection(collection)
.on(:removed) do |removed_id|
callback_called = true
expect(removed_id).to eq id
end
message = {
msg: 'removed',
collection: collection,
id: id
}.to_json
websocket.receive message
fail("Callback didn't get called.") unless callback_called
end
it "doesn't get notified of a ping message" do
meteor = Metybur.connect(url)
meteor.collection('my-collection')
.on(:added) { fail('Callback got called') }
websocket.receive({msg: 'ping'}.to_json)
end
it "doesn't get notified of a record from another collection" do
meteor = Metybur.connect(url)
meteor.collection('my-collection')
.on(:added) { fail('Callback got called') }
message = {
msg: 'added',
collection: 'another-collection',
id: 'xyz',
fields: {country: 'Belarus'}
}.to_json
websocket.receive message
end
end
context 'methods' do
it 'calls a method through the call method' do
method = %w(postChatMessage sendEmail submitOrder).sample
params = %w(35 Vienna true).sample(2)
hashParams = {emailAddress: 'myemail@example.com', myMessage: 'Alright!', userId: 'rtnilctrniae'}
.to_a.sample(2)
params << Hash[hashParams]
meteor = Metybur.connect(url)
meteor.call(method, params)
expect(last_sent_message[:msg]).to eq 'method'
expect(last_sent_message[:method]).to eq method
expect(last_sent_message).to have_key :id # we don't care about the value here
expect(last_sent_message[:params]).to eq params
end
it 'calls a method called on the client directly' do
meteor = Metybur.connect(url)
meteor.activate('user', id: 'utrtrvlc')
expect(last_sent_message[:msg]).to eq 'method'
expect(last_sent_message[:method]).to eq 'activate'
expect(last_sent_message).to have_key :id # we don't care about the value here
expect(last_sent_message[:params]).to eq ['user', {id: 'utrtrvlc'}]
end
it 'camel-cases methods and parameters called on the client directly' do
meteor = Metybur.connect(url)
meteor.activate_user('Hans', user_id: 'utrtrvlc', is_admin: false)
expect(last_sent_message[:msg]).to eq 'method'
expect(last_sent_message[:method]).to eq 'activateUser'
expect(last_sent_message).to have_key :id # we don't care about the value here
expect(last_sent_message[:params]).to eq ['Hans', {userId: 'utrtrvlc', isAdmin: false}]
end
end
end
Refactor collection specs.
require_relative '../lib/metybur'
require_relative 'mocks/websocket_mock'
require 'ffaker'
require 'json'
describe Metybur do
before :all do
Metybur.websocket_client_class = WebsocketMock
end
let(:url) { FFaker::Internet.http_url }
let(:websocket) { WebsocketMock.instance }
let(:last_sent_message) { parse(websocket.sent.last) }
def parse(string_data)
JSON.parse(string_data, symbolize_names: true)
end
it 'connects to a Meteor URL' do
Metybur.connect url
expect(websocket.url).to eq url
connect_message = parse(websocket.sent.first)
expect(connect_message)
.to eq msg: 'connect',
version: '1',
support: ['1']
end
context 'logging in' do
it 'calls the login method with email and password' do
email = FFaker::Internet.email
password = FFaker::Internet.password
Metybur.connect url, email: email, password: password
expect(last_sent_message[:msg]).to eq 'method'
expect(last_sent_message).to have_key :id # we don't care about the value here
expect(last_sent_message[:method]).to eq 'login'
expect(last_sent_message[:params][0])
.to eq user: { email: email },
password: password
end
it 'calls the login method with username and password' do
username = FFaker::Internet.user_name
password = FFaker::Internet.password
Metybur.connect url, username: username, password: password
expect(last_sent_message[:msg]).to eq 'method'
expect(last_sent_message).to have_key :id # we don't care about the value here
expect(last_sent_message[:method]).to eq 'login'
expect(last_sent_message[:params][0])
.to eq user: { username: username },
password: password
end
it 'calls the login method with user id and password' do
userid = FFaker::Guid.guid
password = FFaker::Internet.password
Metybur.connect url, id: userid, password: password
expect(last_sent_message[:msg]).to eq 'method'
expect(last_sent_message).to have_key :id # we don't care about the value here
expect(last_sent_message[:method]).to eq 'login'
expect(last_sent_message[:params][0])
.to eq user: { id: userid },
password: password
end
it "doesn't log in without credentials" do
Metybur.connect url
expect(last_sent_message[:msg]).to eq 'connect'
end
end
context 'ping pong' do
it 'responds with pong to a ping' do
Metybur.connect url
websocket.receive({msg: 'ping'}.to_json)
expect(last_sent_message[:msg]).to eq 'pong'
end
end
context 'logging' do
it "doesn't log any messages by default" do
output = StringIO.new
Metybur.log_stream = output
Metybur.connect url
websocket.receive({msg: 'logged_message'}.to_json)
expect(output.string).to be_empty
end
it 'logs a message when the log level is set to debug' do
output = StringIO.new
Metybur.log_level = :debug
Metybur.log_stream = output
Metybur.connect url
websocket.receive({msg: 'logged_message'}.to_json)
expect(output.string).not_to be_empty
end
end
context 'subscription' do
it 'subscribes to a published record set' do
record_set = FFaker::Internet.user_name
meteor = Metybur.connect url
meteor.subscribe(record_set)
expect(last_sent_message[:msg]).to eq 'sub'
expect(last_sent_message).to have_key :id # we don't care about the value here
expect(last_sent_message[:name]).to eq record_set
end
end
context 'collections' do
def wait_for_callback
callback_called = false
done = -> { callback_called = true }
yield done
fail("Callback didn't get called.") unless callback_called
end
it 'gets notified when a record is added' do
collection = FFaker::Internet.user_name
id = FFaker::Guid.guid
fields = {city: FFaker::Address.city}
meteor = Metybur.connect url
wait_for_callback do |done|
meteor.collection(collection)
.on(:added) do |added_id, added_fields|
done.call()
expect(added_id).to eq id
expect(added_fields).to eq fields
end
message = {
msg: 'added',
collection: collection,
id: id,
fields: fields
}.to_json
websocket.receive message
end
end
it 'gets notified when a record is changed' do
collection = FFaker::Internet.user_name
id = FFaker::Guid.guid
fields = {city: FFaker::Address.city}
cleared = [FFaker::Guid.guid]
meteor = Metybur.connect url
wait_for_callback do |done|
meteor.collection(collection)
.on(:changed) do |changed_id, changed_fields, cleared_fields|
done.call()
expect(changed_id).to eq id
expect(changed_fields).to eq fields
expect(cleared_fields).to eq cleared
end
message = {
msg: 'changed',
collection: collection,
id: id,
fields: fields,
cleared: cleared
}.to_json
websocket.receive message
end
end
it 'gets notified when a record is removed' do
collection = FFaker::Internet.user_name
id = FFaker::Guid.guid
meteor = Metybur.connect url
wait_for_callback do |done|
meteor.collection(collection)
.on(:removed) do |removed_id|
done.call()
expect(removed_id).to eq id
end
message = {
msg: 'removed',
collection: collection,
id: id
}.to_json
websocket.receive message
end
end
it "doesn't get notified of a ping message" do
meteor = Metybur.connect(url)
meteor.collection('my-collection')
.on(:added) { fail('Callback got called') }
websocket.receive({msg: 'ping'}.to_json)
end
it "doesn't get notified of a record from another collection" do
meteor = Metybur.connect(url)
meteor.collection('my-collection')
.on(:added) { fail('Callback got called') }
message = {
msg: 'added',
collection: 'another-collection',
id: 'xyz',
fields: {country: 'Belarus'}
}.to_json
websocket.receive message
end
end
context 'methods' do
it 'calls a method through the call method' do
method = %w(postChatMessage sendEmail submitOrder).sample
params = %w(35 Vienna true).sample(2)
hashParams = {emailAddress: 'myemail@example.com', myMessage: 'Alright!', userId: 'rtnilctrniae'}
.to_a.sample(2)
params << Hash[hashParams]
meteor = Metybur.connect(url)
meteor.call(method, params)
expect(last_sent_message[:msg]).to eq 'method'
expect(last_sent_message[:method]).to eq method
expect(last_sent_message).to have_key :id # we don't care about the value here
expect(last_sent_message[:params]).to eq params
end
it 'calls a method called on the client directly' do
meteor = Metybur.connect(url)
meteor.activate('user', id: 'utrtrvlc')
expect(last_sent_message[:msg]).to eq 'method'
expect(last_sent_message[:method]).to eq 'activate'
expect(last_sent_message).to have_key :id # we don't care about the value here
expect(last_sent_message[:params]).to eq ['user', {id: 'utrtrvlc'}]
end
it 'camel-cases methods and parameters called on the client directly' do
meteor = Metybur.connect(url)
meteor.activate_user('Hans', user_id: 'utrtrvlc', is_admin: false)
expect(last_sent_message[:msg]).to eq 'method'
expect(last_sent_message[:method]).to eq 'activateUser'
expect(last_sent_message).to have_key :id # we don't care about the value here
expect(last_sent_message[:params]).to eq ['Hans', {userId: 'utrtrvlc', isAdmin: false}]
end
end
end
|
#
# File: parsers_spec.rb
# Author: alex@testcore.net
#
# Test of Classes to parse various data formats
# into native ruby data structures
require 'rspec'
require "#{ File.dirname __FILE__ }/../lib/splmig/parsers"
describe 'Migration Parsing' do
before :all do
class Migration::Foo < Migration::Parser
class << self
def parse(it)
'pass'
end
def valid?(v)
v == 'passme'
end
end
end
end
it 'exists' do
expect( Object.const_defined? 'Migration::Parser' ).to be_truthy
end
it 'catches nil values' do
expect( Migration::Parser.parse nil ).to eq( nil )
end
it 'tracks its parsers' do
expect( Migration::Parser.parsers.include? Migration::Foo ).to be_truthy
end
it 'selects a parser' do
expect( Migration::Parser.parse 'passme' ).to eq( 'pass' )
end
it 'returns the original value if no parser was found' do
expect( Migration::Parser.parse 'no parsers for me' ).to eq( 'no parsers for me' )
end
end
describe 'Migration Stanza Parsing' do
before :all do
@ini = "[artifact name]\nowner = admin\nsearch = index=foobar\n"
end
it 'exists' do
expect( Object.const_defined? 'Migration::StanzaParser' ).to be_truthy
end
it 'expects an ini formatted string' do
expect( Migration::StanzaParser.parse 'not an ini string' ).to be_falsey
end
it 'parses an ini string into a hash' do
expect( Migration::StanzaParser.parse @ini ).to include( name: 'artifact name', owner: 'admin', search: 'index=foobar' )
end
it 'parses multiline statements' do
ini = "[artifact name]\nowner = admin\nsearch = index=foobar some | \\nsearch terms here"
expect( Migration::StanzaParser.parse ini ).to include( search: 'index=foobar some | \\nsearch terms here' )
end
it 'performs validation' do
notini = 'this is not an ini string'
expect( Migration::StanzaParser.valid? notini ).to be_falsey
end
end
describe 'Migration Yaml Parsing' do
before :all do
@str = "---\nssh:\n keyfile: \"/path/to/key\"\n user: admin\nmigration:\n- search\n- eventtypes\n"
end
it 'parses a yaml string' do
expect( Migration::YamlParser.parse @str ).to include( 'migration' => %w(search eventtypes))
end
it 'is a parser' do
expect( Migration::YamlParser.ancestors[1] ).to eq( Migration::Parser )
end
end
describe 'Migration Conf Parsing' do
before :all do
@str = File.read "#{ File.dirname __FILE__ }/data/sample.conf"
end
it 'parses a string' do
parsed = Migration::ConfParser.parse( @str )
expect( parsed ).to include "[Test]\naction.script = 1"
expect( parsed ).to include "[tstb]\naction.script.filename = pagerduty_index_alert"
expect( parsed ).to include "[Test Cee]\naction.email.sendpdf = 1"
end
it 'performs simple validation' do
expect( Migration::ConfParser.valid? 'not a conf string' ).to be_falsey
end
it 'is a parser' do
expect( Migration::ConfParser.ancestors[1] ).to eq( Migration::Parser )
end
end
describe 'Migration File Parsing' do
it 'parses conf file contents' do
expect( Migration::FileParser.parse "#{ File.dirname( __FILE__ )}/data/sample.conf" ).to include( "[Test]\naction.script = 1" )
end
it 'parses yaml file contents' do
expect( Migration::FileParser.parse( "#{ File.dirname( __FILE__ )}/data/sample.yml" ).keys ).to include( :ssh ).and include( :migration )
end
it 'rejects non-existant files' do
expect( Migration::FileParser.parse '/path/to/nowhere' ).to be_falsey
end
end
describe 'Migration File List Parsing' do
before :all do
@data = "default/conf/data.conf\nlocal/conf/web.conf\nmeta/conf/local.meta\nmeta/conf/default.meta"
end
it 'parses a multiline string to an array' do
%w[ default/conf/data.conf local/conf/web.conf meta/conf/local.meta meta/conf/default.meta ].each do |file|
expect( Migration::ListParser.parse @data ).to include( file )
end
end
it 'performs simple validation' do
expect( Migration::ListParser.valid? 123 ).to be_falsey
end
it 'validates the list' do
expect( Migration::ListParser.valid? "a\nb" ).to be_truthy
end
end
describe 'Migration Path Parsing' do
before :all do
@path = '/path/to/nowhere'
end
it 'parses a path to an array' do
expect( Migration::PathParser.parse @path ).to eq( %w[ path to nowhere ])
end
it 'validates the path' do
expect( Migration::PathParser.valid? 'this is not a path' ).to be_falsey
end
it 'returns an empty array for invalid paths' do
expect( Migration::PathParser.parse 'bin/' ).to eq([])
end
end
describe 'Migration Path Hash Parsing' do
before :all do
@path = '/path/to/nowhere'
end
it 'parses a path to a hash' do
expect( Migration::PathHashParser.parse @path ).to eq({ 'path' => { 'to' => { 'nowhere' => {} }}})
end
it 'returns an empty hash for invalid paths' do
expect( Migration::PathHashParser.parse 'bin/' ).to eq({})
end
end
Fix up tests after parser refactors
#
# File: parsers_spec.rb
# Author: alex@testcore.net
#
# Test of Classes to parse various data formats
# into native ruby data structures
require 'rspec'
require "#{ File.dirname __FILE__ }/../lib/splmig/parsers"
# TODO: Bring consistency to the testing
describe 'Migration Parsing' do
before :all do
class Migration::Foo < Migration::Parser
class << self
def parse(it)
'pass'
end
def valid?(v)
v == 'passme'
end
end
end
end
it 'exists' do
expect( Object.const_defined? 'Migration::Parser' ).to be_truthy
end
it 'catches nil values' do
expect( Migration::Parser.parse nil ).to eq( nil )
end
it 'tracks its parsers' do
expect( Migration::Parser.parsers.include? Migration::Foo ).to be_truthy
end
it 'selects a parser' do
expect( Migration::Parser.parse 'passme' ).to eq( 'pass' )
end
it 'returns the original value if no parser was found' do
expect( Migration::Parser.parse 'no parsers for me' ).to eq( 'no parsers for me' )
end
end
describe 'Migration Stanza Parsing' do
before :all do
@ini = "[artifact name]\nowner = admin\nsearch = index=foobar\n"
end
it 'exists' do
expect( Object.const_defined? 'Migration::StanzaParser' ).to be_truthy
end
it 'expects an ini formatted string' do
expect( Migration::StanzaParser.parse 'not an ini string' ).to be_falsey
end
it 'parses an ini string into a hash' do
expect( Migration::StanzaParser.parse @ini ).to include( name: 'artifact name', owner: 'admin', search: 'index=foobar' )
end
it 'parses multiline statements' do
ini = "[artifact name]\nowner = admin\nsearch = index=foobar some | \\nsearch terms here"
expect( Migration::StanzaParser.parse ini ).to include( search: 'index=foobar some | \\nsearch terms here' )
end
it 'performs validation' do
notini = 'this is not an ini string'
expect( Migration::StanzaParser.valid? notini ).to be_falsey
end
end
describe 'Migration Yaml Parsing' do
before :all do
@str = "---\nssh:\n keyfile: \"/path/to/key\"\n user: admin\nmigration:\n- search\n- eventtypes\n"
end
it 'parses a yaml string' do
expect( Migration::YamlParser.parse @str ).to include( 'migration' => %w(search eventtypes))
end
it 'is a parser' do
expect( Migration::YamlParser.ancestors[1] ).to eq( Migration::Parser )
end
end
describe 'Migration Conf Parsing' do
before :all do
@str = File.read "#{ File.dirname __FILE__ }/data/sample.conf"
end
it 'parses a string' do
parsed = Migration::ConfParser.parse( @str )
expect( parsed ).to include "[Test]\naction.script = 1"
expect( parsed ).to include "[tstb]\naction.script.filename = pagerduty_index_alert"
expect( parsed ).to include "[Test Cee]\naction.email.sendpdf = 1"
end
it 'performs simple validation' do
expect( Migration::ConfParser.valid? 'not a conf string' ).to be_falsey
end
it 'is a parser' do
expect( Migration::ConfParser.ancestors[1] ).to eq( Migration::Parser )
end
end
describe 'Migration File Parsing' do
it 'parses conf file contents' do
expect( Migration::FileParser.parse "#{ File.dirname( __FILE__ )}/data/sample.conf" ).to include( "[Test]\naction.script = 1" )
end
it 'parses yaml file contents' do
expect( Migration::FileParser.parse( "#{ File.dirname( __FILE__ )}/data/sample.yml" ).keys ).to include( 'ssh' ).and include( 'migration' )
end
it 'performs file validation' do
expect( Migration::FileParser.parse '/path/to/nowhere' ).to be_falsey
end
end
describe 'Migration File List Parsing' do
before :all do
@data = "default/conf/data.conf\nlocal/conf/web.conf\nmeta/conf/local.meta\nmeta/conf/default.meta"
end
it 'parses a multiline string to an array' do
%w[ default/conf/data.conf local/conf/web.conf meta/conf/local.meta meta/conf/default.meta ].each do |file|
expect( Migration::ListParser.parse @data ).to include( file )
end
end
it 'performs simple validation' do
expect( Migration::ListParser.valid? 123 ).to be_falsey
end
it 'validates the list' do
expect( Migration::ListParser.valid? "a\nb" ).to be_truthy
end
end
describe 'Migration Path Parsing' do
before :all do
@path = '/path/to/nowhere'
end
it 'parses a path to an array' do
expect( Migration::PathParser.parse @path ).to eq( %w[ path to nowhere ])
end
it 'validates the path' do
expect( Migration::PathParser.valid? 'this is not a path' ).to be_falsey
end
end
describe 'Migration Path Hash Parsing' do
before :all do
@path = '/path/to/nowhere'
end
it 'parses a path to a hash' do
expect( Migration::PathHashParser.parse @path ).to eq({ 'path' => { 'to' => { 'nowhere' => {} }}})
end
it 'handlles single element paths' do
expect( Migration::PathHashParser.parse 'bin/' ).to eq( 'bin' => {})
end
it 'cannot parse a plain string' do
expect( Migration::PathHashParser.parse 'not a path' ).to eq({})
end
it 'can only parse a path' do
expect( Migration::PathHashParser.parse 3.14 ).to eq({})
end
end
|
add font Font Noto Sans Egyptian Hieroglyphs
class FontNotoSansEgyptianHieroglyphs < Cask
version 'latest'
sha256 :no_check
url 'https://www.google.com/get/noto/pkgs/NotoSansEgyptianHieroglyphs-unhinted.zip'
homepage 'http://www.google.com/get/noto'
font 'NotoSansEgyptianHieroglyphs-Regular.ttf'
end
|
require 'rprogram/program'
require 'spec_helper'
require 'classes/ls_program'
describe Program do
subject { Program.new('/usr/bin/cc') }
it "should create a Program from a path" do
subject.should_not be_nil
end
it "should derive the program name from a path" do
subject.name.should == 'cc'
end
it "should return the program path when converted to a String" do
subject.to_s.should == '/usr/bin/cc'
end
it "should raise an exception for invalid paths" do
lambda {
Program.new('/totally/doesnt/exist')
}.should raise_error(ProgramNotFound)
end
it "should find a program from a path" do
prog = Program.find_with_path('/usr/bin/cc')
prog.should_not be_nil
end
it "should find a program from given paths" do
prog = Program.find_with_paths(['/usr/bin/ls','/bin/ls'])
prog.should_not be_nil
end
it "should be able to find a program based on the program names" do
ls = nil
lambda {
ls = LS.find
}.should_not raise_error(ProgramNotFound)
File.executable?(ls.path).should == true
end
end
Split a spec.
require 'rprogram/program'
require 'spec_helper'
require 'classes/ls_program'
describe Program do
subject { Program.new('/usr/bin/cc') }
it "should create a Program from a path" do
subject.should_not be_nil
end
it "should derive the program name from a path" do
subject.name.should == 'cc'
end
it "should return the program path when converted to a String" do
subject.to_s.should == '/usr/bin/cc'
end
it "should raise an exception for invalid paths" do
lambda {
Program.new('/totally/doesnt/exist')
}.should raise_error(ProgramNotFound)
end
it "should find a program from a path" do
prog = Program.find_with_path('/usr/bin/cc')
prog.should_not be_nil
end
it "should find a program from given paths" do
prog = Program.find_with_paths(['/usr/bin/ls','/bin/ls'])
prog.should_not be_nil
end
it "should be able to find a program based on the program names" do
ls = LS.find
File.executable?(ls.path).should == true
end
it "should raise a ProgramNotFound exception if no path/name is valid" do
lambda {
Program.find
}.should raise_error(ProgramNotFound)
end
end
|
# frozen_string_literal: true
RSpec.describe 'RuboCop Project', type: :feature do
let(:cop_names) do
RuboCop::Cop::Cop
.registry
.without_department(:Test)
.without_department(:InternalAffairs)
.cops
.map(&:cop_name)
end
describe 'default configuration file' do
subject(:config) { RuboCop::ConfigLoader.load_file('config/default.yml') }
let(:configuration_keys) { config.keys }
it 'has configuration for all cops' do
expect(configuration_keys).to match_array(%w[AllCops] + cop_names)
end
it 'has a nicely formatted description for all cops' do
cop_names.each do |name|
description = config[name]['Description']
expect(description.nil?).to be(false)
expect(description).not_to include("\n")
end
end
it 'requires a nicely formatted `VersionAdded` metadata for all cops' do
cop_names.each do |name|
version = config[name]['VersionAdded']
expect(version.nil?).to(be(false),
"VersionAdded is required for #{name}.")
expect(version).to(match(/\A\d+\.\d+\z/),
"#{version} should be format ('X.Y') for #{name}.")
end
end
%w[VersionChanged VersionRemoved].each do |version_type|
it "requires a nicely formatted `#{version_type}` metadata for all cops" do
cop_names.each do |name|
version = config[name][version_type]
next unless version
expect(version).to(match(/\A\d+\.\d+\z/),
"#{version} should be format ('X.Y') for #{name}.")
end
end
end
it 'has a period at EOL of description' do
cop_names.each do |name|
description = config[name]['Description']
expect(description).to match(/\.\z/)
end
end
it 'sorts configuration keys alphabetically' do
expected = configuration_keys.sort
configuration_keys.each_with_index do |key, idx|
expect(key).to eq expected[idx]
end
end
it 'has a SupportedStyles for all EnforcedStyle ' \
'and EnforcedStyle is valid' do
errors = []
cop_names.each do |name|
enforced_styles = config[name]
.select { |key, _| key.start_with?('Enforced') }
enforced_styles.each do |style_name, style|
supported_key = RuboCop::Cop::Util.to_supported_styles(style_name)
valid = config[name][supported_key]
unless valid
errors.push("#{supported_key} is missing for #{name}")
next
end
next if valid.include?(style)
errors.push("invalid #{style_name} '#{style}' for #{name} found")
end
end
raise errors.join("\n") unless errors.empty?
end
it 'does not have nay duplication' do
fname = File.expand_path('../config/default.yml', __dir__)
content = File.read(fname)
RuboCop::YAMLDuplicationChecker.check(content, fname) do |key1, key2|
raise "#{fname} has duplication of #{key1.value} " \
"on line #{key1.start_line} and line #{key2.start_line}"
end
end
end
describe 'cop message' do
let(:cops) { RuboCop::Cop::Registry.all }
it 'end with a period or a question mark' do
cops.each do |cop|
begin
msg = cop.const_get(:MSG)
rescue NameError
next
end
expect(msg).to match(/(?:[.?]|(?:\[.+\])|%s)$/)
end
end
end
shared_examples 'has Changelog format' do
let(:lines) { changelog.each_line }
let(:non_reference_lines) do
lines.take_while { |line| !line.start_with?('[@') }
end
it 'has newline at end of file' do
expect(changelog.end_with?("\n")).to be true
end
it 'has either entries, headers, or empty lines' do
expect(non_reference_lines).to all(match(/^(\*|#|$)/))
end
describe 'entry' do
it 'has a whitespace between the * and the body' do
expect(entries).to all(match(/^\* \S/))
end
describe 'link to related issue' do
let(:issues) do
entries.map do |entry|
entry.match(%r{
(?<=^\*\s)
\[(?<ref>(?:(?<repo>rubocop-hq/[a-z_-]+)?\#(?<number>\d+))|.*)\]
\((?<url>[^)]+)\)
}x)
end.compact
end
it 'has a reference' do
issues.each do |issue|
expect(issue[:ref].blank?).to eq(false)
end
end
it 'has a valid issue number prefixed with #' do
issues.each do |issue|
expect(issue[:number]).to match(/^\d+$/)
end
end
it 'has a valid URL' do
issues.each do |issue|
number = issue[:number]&.gsub(/\D/, '')
repo = issue[:repo] || 'rubocop-hq/rubocop'
pattern = %r{^https://github\.com/#{repo}/(?:issues|pull)/#{number}$}
expect(issue[:url]).to match(pattern)
end
end
it 'has a colon and a whitespace at the end' do
entries_including_issue_link = entries.select do |entry|
entry.match(/^\*\s*\[/)
end
expect(entries_including_issue_link).to all(include('): '))
end
end
describe 'contributor name' do
subject(:contributor_names) { lines.grep(/\A\[@/).map(&:chomp) }
it 'has a unique contributor name' do
expect(contributor_names.uniq.size).to eq contributor_names.size
end
end
describe 'body' do
let(:bodies) do
entries.map do |entry|
entry
.gsub(/`[^`]+`/, '``')
.sub(/^\*\s*(?:\[.+?\):\s*)?/, '')
.sub(/\s*\([^)]+\)$/, '')
end
end
it 'does not start with a lower case' do
bodies.each do |body|
expect(body).not_to match(/^[a-z]/)
end
end
it 'ends with a punctuation' do
expect(bodies).to all(match(/[.!]$/))
end
it 'does not include a [Fix #x] directive' do
bodies.each do |body|
expect(body).not_to match(/\[Fix(es)? \#.*?\]/i)
end
end
end
end
end
describe 'Changelog' do
subject(:changelog) do
File.read(path)
end
let(:path) do
File.join(File.dirname(__FILE__), '..', 'CHANGELOG.md')
end
let(:entries) { lines.grep(/^\*/).map(&:chomp) }
include_examples 'has Changelog format'
context 'future entries' do
dir = File.join(File.dirname(__FILE__), '..', 'changelog')
Dir["#{dir}/*.md"].each do |path|
context "For #{path}" do
let(:path) { path }
include_examples 'has Changelog format'
end
end
end
it 'has link definitions for all implicit links' do
implicit_link_names = changelog.scan(/\[([^\]]+)\]\[\]/).flatten.uniq
implicit_link_names.each do |name|
expect(changelog.include?("[#{name}]: http"))
.to be(true), "missing a link for #{name}. " \
'Please add this link to the bottom of the file.'
end
end
context 'after version 0.14.0' do
let(:lines) do
changelog.each_line.take_while do |line|
!line.start_with?('## 0.14.0')
end
end
it 'has a link to the contributors at the end' do
expect(entries).to all(match(/\(\[@\S+\]\[\](?:, \[@\S+\]\[\])*\)$/))
end
end
end
describe 'requiring all of `lib` with verbose warnings enabled' do
it 'emits no warnings' do
warnings = `ruby -Ilib -w -W2 lib/rubocop.rb 2>&1`
.lines
.grep(%r{/lib/rubocop}) # ignore warnings from dependencies
expect(warnings).to eq []
end
end
end
Fix project spec for <<next>>
# frozen_string_literal: true
version_regexp = /\A\d+\.\d+\z|\A<<next>>\z/
RSpec.describe 'RuboCop Project', type: :feature do
let(:cop_names) do
RuboCop::Cop::Cop
.registry
.without_department(:Test)
.without_department(:InternalAffairs)
.cops
.map(&:cop_name)
end
describe 'default configuration file' do
subject(:config) { RuboCop::ConfigLoader.load_file('config/default.yml') }
let(:configuration_keys) { config.keys }
it 'has configuration for all cops' do
expect(configuration_keys).to match_array(%w[AllCops] + cop_names)
end
it 'has a nicely formatted description for all cops' do
cop_names.each do |name|
description = config[name]['Description']
expect(description.nil?).to be(false)
expect(description).not_to include("\n")
end
end
it 'requires a nicely formatted `VersionAdded` metadata for all cops' do
cop_names.each do |name|
version = config[name]['VersionAdded']
expect(version.nil?).to(be(false),
"VersionAdded is required for #{name}.")
expect(version).to(match(version_regexp),
"#{version} should be format ('X.Y' or '<<next>>') for #{name}.")
end
end
%w[VersionChanged VersionRemoved].each do |version_type|
it "requires a nicely formatted `#{version_type}` metadata for all cops" do
cop_names.each do |name|
version = config[name][version_type]
next unless version
expect(version).to(match(version_regexp),
"#{version} should be format ('X.Y' or '<<next>>') for #{name}.")
end
end
end
it 'has a period at EOL of description' do
cop_names.each do |name|
description = config[name]['Description']
expect(description).to match(/\.\z/)
end
end
it 'sorts configuration keys alphabetically' do
expected = configuration_keys.sort
configuration_keys.each_with_index do |key, idx|
expect(key).to eq expected[idx]
end
end
it 'has a SupportedStyles for all EnforcedStyle ' \
'and EnforcedStyle is valid' do
errors = []
cop_names.each do |name|
enforced_styles = config[name]
.select { |key, _| key.start_with?('Enforced') }
enforced_styles.each do |style_name, style|
supported_key = RuboCop::Cop::Util.to_supported_styles(style_name)
valid = config[name][supported_key]
unless valid
errors.push("#{supported_key} is missing for #{name}")
next
end
next if valid.include?(style)
errors.push("invalid #{style_name} '#{style}' for #{name} found")
end
end
raise errors.join("\n") unless errors.empty?
end
it 'does not have nay duplication' do
fname = File.expand_path('../config/default.yml', __dir__)
content = File.read(fname)
RuboCop::YAMLDuplicationChecker.check(content, fname) do |key1, key2|
raise "#{fname} has duplication of #{key1.value} " \
"on line #{key1.start_line} and line #{key2.start_line}"
end
end
end
describe 'cop message' do
let(:cops) { RuboCop::Cop::Registry.all }
it 'end with a period or a question mark' do
cops.each do |cop|
begin
msg = cop.const_get(:MSG)
rescue NameError
next
end
expect(msg).to match(/(?:[.?]|(?:\[.+\])|%s)$/)
end
end
end
shared_examples 'has Changelog format' do
let(:lines) { changelog.each_line }
let(:non_reference_lines) do
lines.take_while { |line| !line.start_with?('[@') }
end
it 'has newline at end of file' do
expect(changelog.end_with?("\n")).to be true
end
it 'has either entries, headers, or empty lines' do
expect(non_reference_lines).to all(match(/^(\*|#|$)/))
end
describe 'entry' do
it 'has a whitespace between the * and the body' do
expect(entries).to all(match(/^\* \S/))
end
describe 'link to related issue' do
let(:issues) do
entries.map do |entry|
entry.match(%r{
(?<=^\*\s)
\[(?<ref>(?:(?<repo>rubocop-hq/[a-z_-]+)?\#(?<number>\d+))|.*)\]
\((?<url>[^)]+)\)
}x)
end.compact
end
it 'has a reference' do
issues.each do |issue|
expect(issue[:ref].blank?).to eq(false)
end
end
it 'has a valid issue number prefixed with #' do
issues.each do |issue|
expect(issue[:number]).to match(/^\d+$/)
end
end
it 'has a valid URL' do
issues.each do |issue|
number = issue[:number]&.gsub(/\D/, '')
repo = issue[:repo] || 'rubocop-hq/rubocop'
pattern = %r{^https://github\.com/#{repo}/(?:issues|pull)/#{number}$}
expect(issue[:url]).to match(pattern)
end
end
it 'has a colon and a whitespace at the end' do
entries_including_issue_link = entries.select do |entry|
entry.match(/^\*\s*\[/)
end
expect(entries_including_issue_link).to all(include('): '))
end
end
describe 'contributor name' do
subject(:contributor_names) { lines.grep(/\A\[@/).map(&:chomp) }
it 'has a unique contributor name' do
expect(contributor_names.uniq.size).to eq contributor_names.size
end
end
describe 'body' do
let(:bodies) do
entries.map do |entry|
entry
.gsub(/`[^`]+`/, '``')
.sub(/^\*\s*(?:\[.+?\):\s*)?/, '')
.sub(/\s*\([^)]+\)$/, '')
end
end
it 'does not start with a lower case' do
bodies.each do |body|
expect(body).not_to match(/^[a-z]/)
end
end
it 'ends with a punctuation' do
expect(bodies).to all(match(/[.!]$/))
end
it 'does not include a [Fix #x] directive' do
bodies.each do |body|
expect(body).not_to match(/\[Fix(es)? \#.*?\]/i)
end
end
end
end
end
describe 'Changelog' do
subject(:changelog) do
File.read(path)
end
let(:path) do
File.join(File.dirname(__FILE__), '..', 'CHANGELOG.md')
end
let(:entries) { lines.grep(/^\*/).map(&:chomp) }
include_examples 'has Changelog format'
context 'future entries' do
dir = File.join(File.dirname(__FILE__), '..', 'changelog')
Dir["#{dir}/*.md"].each do |path|
context "For #{path}" do
let(:path) { path }
include_examples 'has Changelog format'
end
end
end
it 'has link definitions for all implicit links' do
implicit_link_names = changelog.scan(/\[([^\]]+)\]\[\]/).flatten.uniq
implicit_link_names.each do |name|
expect(changelog.include?("[#{name}]: http"))
.to be(true), "missing a link for #{name}. " \
'Please add this link to the bottom of the file.'
end
end
context 'after version 0.14.0' do
let(:lines) do
changelog.each_line.take_while do |line|
!line.start_with?('## 0.14.0')
end
end
it 'has a link to the contributors at the end' do
expect(entries).to all(match(/\(\[@\S+\]\[\](?:, \[@\S+\]\[\])*\)$/))
end
end
end
describe 'requiring all of `lib` with verbose warnings enabled' do
it 'emits no warnings' do
warnings = `ruby -Ilib -w -W2 lib/rubocop.rb 2>&1`
.lines
.grep(%r{/lib/rubocop}) # ignore warnings from dependencies
expect(warnings).to eq []
end
end
end
|
# This file is copied to spec/ when you run 'rails generate rspec:install'
ENV['RAILS_ENV'] ||= 'test'
require File.expand_path('../dummy/config/environment', __FILE__)
# Prevent database truncation if the environment is production
abort('The Rails environment is running in production mode!') if Rails.env.production?
require 'spec_helper'
require 'rspec/rails'
# require 'rspec/autorun'
# Add additional requires below this line. Rails is not loaded until this point!
# Requires supporting ruby files with custom matchers and macros, etc, in
# spec/support/ and its subdirectories. Files matching `spec/**/*_spec.rb` are
# run as spec files by default. This means that files in spec/support that end
# in _spec.rb will both be required and run as specs, causing the specs to be
# run twice. It is recommended that you do not name files matching this glob to
# end with _spec.rb. You can configure this pattern with the --pattern
# option on the command line or in ~/.rspec, .rspec or `.rspec-local`.
#
# The following line is provided for convenience purposes. It has the downside
# of increasing the boot-up time by auto-requiring all files in the support
# directory. Alternatively, in the individual `*_spec.rb` files, manually
# require only the support files necessary.
#
# Dir[Rails.root.join('spec/support/**/*.rb')].each { |f| require f }
ENGINE_RAILS_ROOT = File.join(File.dirname(__FILE__), '../')
Dir[File.join(ENGINE_RAILS_ROOT, 'spec/support/**/*.rb')].each { |f| require f }
# Checks for pending migration and applies them before tests are run.
# If you are not using ActiveRecord, you can remove this line.
ActiveRecord::Migration.maintain_test_schema!
RSpec.configure do |config|
# Remove this line if you're not using ActiveRecord or ActiveRecord fixtures
config.fixture_path = "#{::Rails.root}/spec/fixtures"
# If you're not using ActiveRecord, or you'd prefer not to run each of your
# examples within a transaction, remove the following line or assign false
# instead of true.
config.use_transactional_fixtures = true
config.render_views = true
# RSpec Rails can automatically mix in different behaviours to your tests
# based on their file location, for example enabling you to call `get` and
# `post` in specs under `spec/controllers`.
#
# You can disable this behaviour by removing the line below, and instead
# explicitly tag your specs with their type, e.g.:
#
# RSpec.describe UsersController, :type => :controller do
# # ...
# end
#
# The different available types are documented in the features, such as in
# https://relishapp.com/rspec/rspec-rails/docs
config.infer_spec_type_from_file_location!
# Filter lines from Rails gems in backtraces.
config.filter_rails_from_backtrace!
# arbitrary gems may also be filtered via:
# config.filter_gems_from_backtrace('gem name')
end
fix factory girl missing
# This file is copied to spec/ when you run 'rails generate rspec:install'
ENV['RAILS_ENV'] ||= 'test'
require File.expand_path('../dummy/config/environment', __FILE__)
# Prevent database truncation if the environment is production
abort('The Rails environment is running in production mode!') if Rails.env.production?
require 'spec_helper'
require 'rspec/rails'
require 'factory_girl_rails'
# require 'rspec/autorun'
# Add additional requires below this line. Rails is not loaded until this point!
# Requires supporting ruby files with custom matchers and macros, etc, in
# spec/support/ and its subdirectories. Files matching `spec/**/*_spec.rb` are
# run as spec files by default. This means that files in spec/support that end
# in _spec.rb will both be required and run as specs, causing the specs to be
# run twice. It is recommended that you do not name files matching this glob to
# end with _spec.rb. You can configure this pattern with the --pattern
# option on the command line or in ~/.rspec, .rspec or `.rspec-local`.
#
# The following line is provided for convenience purposes. It has the downside
# of increasing the boot-up time by auto-requiring all files in the support
# directory. Alternatively, in the individual `*_spec.rb` files, manually
# require only the support files necessary.
#
# Dir[Rails.root.join('spec/support/**/*.rb')].each { |f| require f }
ENGINE_RAILS_ROOT = File.join(File.dirname(__FILE__), '../')
Dir[File.join(ENGINE_RAILS_ROOT, 'spec/support/**/*.rb')].each { |f| require f }
# Checks for pending migration and applies them before tests are run.
# If you are not using ActiveRecord, you can remove this line.
ActiveRecord::Migration.maintain_test_schema!
RSpec.configure do |config|
# Remove this line if you're not using ActiveRecord or ActiveRecord fixtures
config.fixture_path = "#{::Rails.root}/spec/fixtures"
# If you're not using ActiveRecord, or you'd prefer not to run each of your
# examples within a transaction, remove the following line or assign false
# instead of true.
config.use_transactional_fixtures = true
config.render_views = true
# RSpec Rails can automatically mix in different behaviours to your tests
# based on their file location, for example enabling you to call `get` and
# `post` in specs under `spec/controllers`.
#
# You can disable this behaviour by removing the line below, and instead
# explicitly tag your specs with their type, e.g.:
#
# RSpec.describe UsersController, :type => :controller do
# # ...
# end
#
# The different available types are documented in the features, such as in
# https://relishapp.com/rspec/rspec-rails/docs
config.infer_spec_type_from_file_location!
# Filter lines from Rails gems in backtraces.
config.filter_rails_from_backtrace!
# arbitrary gems may also be filtered via:
# config.filter_gems_from_backtrace('gem name')
end
|
# This file is copied to spec/ when you run 'rails generate rspec:install'
ENV['RAILS_ENV'] ||= 'test'
require File.expand_path('../../config/environment', __FILE__)
# Prevent database truncation if the environment is production
abort('The Rails environment is running in production mode!') if Rails.env.production?
require 'spec_helper'
require 'rspec/rails'
# Add additional requires below this line. Rails is not loaded until this point!
require 'support/factory_girl'
require 'simplecov'
SimpleCov.start
if ENV['CI'] == 'true'
require 'codecov'
SimpleCov.formatter = SimpleCov::Formatter::Codecov
end
# Requires supporting ruby files with custom matchers and macros, etc, in
# spec/support/ and its subdirectories. Files matching `spec/**/*_spec.rb` are
# run as spec files by default. This means that files in spec/support that end
# in _spec.rb will both be required and run as specs, causing the specs to be
# run twice. It is recommended that you do not name files matching this glob to
# end with _spec.rb. You can configure this pattern with the --pattern
# option on the command line or in ~/.rspec, .rspec or `.rspec-local`.
#
# The following line is provided for convenience purposes. It has the downside
# of increasing the boot-up time by auto-requiring all files in the support
# directory. Alternatively, in the individual `*_spec.rb` files, manually
# require only the support files necessary.
#
# Dir[Rails.root.join('spec/support/**/*.rb')].each { |f| require f }
# Checks for pending migration and applies them before tests are run.
# If you are not using ActiveRecord, you can remove this line.
ActiveRecord::Migration.maintain_test_schema!
RSpec.configure do |config|
# Remove this line if you're not using ActiveRecord or ActiveRecord fixtures
config.fixture_path = "#{::Rails.root}/spec/fixtures"
# If you're not using ActiveRecord, or you'd prefer not to run each of your
# examples within a transaction, remove the following line or assign false
# instead of true.
config.use_transactional_fixtures = true
# RSpec Rails can automatically mix in different behaviours to your tests
# based on their file location, for example enabling you to call `get` and
# `post` in specs under `spec/controllers`.
#
# You can disable this behaviour by removing the line below, and instead
# explicitly tag your specs with their type, e.g.:
#
# RSpec.describe UsersController, :type => :controller do
# # ...
# end
#
# The different available types are documented in the features, such as in
# https://relishapp.com/rspec/rspec-rails/docs
config.infer_spec_type_from_file_location!
# Filter lines from Rails gems in backtraces.
config.filter_rails_from_backtrace!
# arbitrary gems may also be filtered via:
# config.filter_gems_from_backtrace("gem name")
end
Shoulda::Matchers.configure do |config|
config.integrate do |with|
with.test_framework :rspec
with.library :rails
end
end
Simplecov should ignore spec files in coverage data
# This file is copied to spec/ when you run 'rails generate rspec:install'
ENV['RAILS_ENV'] ||= 'test'
require File.expand_path('../../config/environment', __FILE__)
# Prevent database truncation if the environment is production
abort('The Rails environment is running in production mode!') if Rails.env.production?
require 'spec_helper'
require 'rspec/rails'
# Add additional requires below this line. Rails is not loaded until this point!
require 'support/factory_girl'
require 'simplecov'
SimpleCov.start do
add_filter '/spec/'
end
if ENV['CI'] == 'true'
require 'codecov'
SimpleCov.formatter = SimpleCov::Formatter::Codecov
end
# Requires supporting ruby files with custom matchers and macros, etc, in
# spec/support/ and its subdirectories. Files matching `spec/**/*_spec.rb` are
# run as spec files by default. This means that files in spec/support that end
# in _spec.rb will both be required and run as specs, causing the specs to be
# run twice. It is recommended that you do not name files matching this glob to
# end with _spec.rb. You can configure this pattern with the --pattern
# option on the command line or in ~/.rspec, .rspec or `.rspec-local`.
#
# The following line is provided for convenience purposes. It has the downside
# of increasing the boot-up time by auto-requiring all files in the support
# directory. Alternatively, in the individual `*_spec.rb` files, manually
# require only the support files necessary.
#
# Dir[Rails.root.join('spec/support/**/*.rb')].each { |f| require f }
# Checks for pending migration and applies them before tests are run.
# If you are not using ActiveRecord, you can remove this line.
ActiveRecord::Migration.maintain_test_schema!
RSpec.configure do |config|
# Remove this line if you're not using ActiveRecord or ActiveRecord fixtures
config.fixture_path = "#{::Rails.root}/spec/fixtures"
# If you're not using ActiveRecord, or you'd prefer not to run each of your
# examples within a transaction, remove the following line or assign false
# instead of true.
config.use_transactional_fixtures = true
# RSpec Rails can automatically mix in different behaviours to your tests
# based on their file location, for example enabling you to call `get` and
# `post` in specs under `spec/controllers`.
#
# You can disable this behaviour by removing the line below, and instead
# explicitly tag your specs with their type, e.g.:
#
# RSpec.describe UsersController, :type => :controller do
# # ...
# end
#
# The different available types are documented in the features, such as in
# https://relishapp.com/rspec/rspec-rails/docs
config.infer_spec_type_from_file_location!
# Filter lines from Rails gems in backtraces.
config.filter_rails_from_backtrace!
# arbitrary gems may also be filtered via:
# config.filter_gems_from_backtrace("gem name")
end
Shoulda::Matchers.configure do |config|
config.integrate do |with|
with.test_framework :rspec
with.library :rails
end
end
|
require 'coveralls'
Coveralls.wear!
# This file is copied to spec/ when you run 'rails generate rspec:install'
ENV['RAILS_ENV'] ||= 'test'
require 'spec_helper'
require File.expand_path('../dummy/config/environment.rb', __FILE__)
require 'rspec/rails'
require 'ffaker'
require 'shoulda/matchers'
require 'factory_girl_rails'
require 'capybara/rails'
require 'capybara/poltergeist'
require 'database_cleaner'
#Capybara.register_driver(:poltergeist) do |app|
# Capybara::Poltergeist::Driver.new app,
# js_errors: false,
# timeout: 180,
# phantomjs_logger: nil,
# logger: nil,
# phantomjs_options:
# [
# '--load-images=no',
# '--ignore-ssl-errors=yes'
# ]
#end
Capybara.javascript_driver = :poltergeist
# Requires supporting ruby files with custom matchers and macros, etc, in
# spec/support/ and its subdirectories. Files matching `spec/**/*_spec.rb` are
# run as spec files by default. This means that files in spec/support that end
# in _spec.rb will both be required and run as specs, causing the specs to be
# run twice. It is recommended that you do not name files matching this glob to
# end with _spec.rb. You can configure this pattern with with the --pattern
# option on the command line or in ~/.rspec, .rspec or `.rspec-local`.
Dir["#{File.dirname(__FILE__)}/support/**/*.rb"].each { |f| require f }
# Checks for pending migrations before tests are run.
# If you are not using ActiveRecord, you can remove this line.
ActiveRecord::Migration.maintain_test_schema!
RSpec.configure do |config|
config.include FactoryGirl::Syntax::Methods
config.include LoginHelpers, type: :feature
config.include Devise::TestHelpers, type: :controller
# Remove this line if you're not using ActiveRecord or ActiveRecord fixtures
# config.fixture_path = "#{::Rails.root}/spec/fixtures"
# If you're not using ActiveRecord, or you'd prefer not to run each of your
# examples within a transaction, remove the following line or assign false
# instead of true.
config.use_transactional_fixtures = false
config.before(:each) do
DatabaseCleaner.strategy = :transaction
end
config.before(:each, :js => true) do
DatabaseCleaner.strategy = :truncation
end
config.before(:each) do
DatabaseCleaner.start
end
config.after(:each) do
DatabaseCleaner.clean
end
# RSpec Rails can automatically mix in different behaviours to your tests
# based on their file location, for example enabling you to call `get` and
# `post` in specs under `spec/controllers`.
#
# You can disable this behaviour by removing the line below, and instead
# explicitly tag your specs with their type, e.g.:
#
# RSpec.describe UsersController, :type => :controller do
# # ...
# end
#
# The different available types are documented in the features, such as in
# https://relishapp.com/rspec/rspec-rails/docs
config.infer_spec_type_from_file_location!
end
speed up test tweaking phantom js options
require 'coveralls'
Coveralls.wear!
# This file is copied to spec/ when you run 'rails generate rspec:install'
ENV['RAILS_ENV'] ||= 'test'
require 'spec_helper'
require File.expand_path('../dummy/config/environment.rb', __FILE__)
require 'rspec/rails'
require 'ffaker'
require 'shoulda/matchers'
require 'factory_girl_rails'
require 'capybara/rails'
require 'capybara/poltergeist'
require 'database_cleaner'
Capybara.register_driver(:poltergeist) do |app|
Capybara::Poltergeist::Driver.new app,
phantomjs_options:
[
'--load-images=no'
]
end
Capybara.javascript_driver = :poltergeist
# Requires supporting ruby files with custom matchers and macros, etc, in
# spec/support/ and its subdirectories. Files matching `spec/**/*_spec.rb` are
# run as spec files by default. This means that files in spec/support that end
# in _spec.rb will both be required and run as specs, causing the specs to be
# run twice. It is recommended that you do not name files matching this glob to
# end with _spec.rb. You can configure this pattern with with the --pattern
# option on the command line or in ~/.rspec, .rspec or `.rspec-local`.
Dir["#{File.dirname(__FILE__)}/support/**/*.rb"].each { |f| require f }
# Checks for pending migrations before tests are run.
# If you are not using ActiveRecord, you can remove this line.
ActiveRecord::Migration.maintain_test_schema!
RSpec.configure do |config|
config.include FactoryGirl::Syntax::Methods
config.include LoginHelpers, type: :feature
config.include Devise::TestHelpers, type: :controller
# Remove this line if you're not using ActiveRecord or ActiveRecord fixtures
# config.fixture_path = "#{::Rails.root}/spec/fixtures"
# If you're not using ActiveRecord, or you'd prefer not to run each of your
# examples within a transaction, remove the following line or assign false
# instead of true.
config.use_transactional_fixtures = false
config.before(:each) do
DatabaseCleaner.strategy = :transaction
end
config.before(:each, :js => true) do
DatabaseCleaner.strategy = :truncation
end
config.before(:each) do
DatabaseCleaner.start
end
config.after(:each) do
DatabaseCleaner.clean
end
# RSpec Rails can automatically mix in different behaviours to your tests
# based on their file location, for example enabling you to call `get` and
# `post` in specs under `spec/controllers`.
#
# You can disable this behaviour by removing the line below, and instead
# explicitly tag your specs with their type, e.g.:
#
# RSpec.describe UsersController, :type => :controller do
# # ...
# end
#
# The different available types are documented in the features, such as in
# https://relishapp.com/rspec/rspec-rails/docs
config.infer_spec_type_from_file_location!
end
|
require "fileutils"
module Bosh::Workspace
describe Release do
let(:name) { "foo" }
let(:release) { load_release(release_data) }
let(:version) { "3" }
let(:url) { "http://local.url/release" }
let(:release_data) { { "name" => name, "version" => version, "git" => repo } }
let(:releases_dir) { File.join(asset_dir("manifests-repo"), ".releases") }
let(:templates) { Dir[File.join(releases_dir, name, "templates/*.yml")].to_s }
let(:callback) { proc {} }
def load_release(release_data, options = {}, skip_update = false)
Release.new(release_data, releases_dir, callback, options).tap do |r|
r.update_repo(options) unless skip_update
end
end
context "with new structure within 'releases' folder" do
let(:repo) { extracted_asset_dir("foo", "foo-boshrelease-repo-new-structure.zip") }
describe "#update_repo" do
subject do
Dir[File.join(releases_dir, name, "releases/**/foo*.yml")].to_s
end
context "latest version" do
before { release.update_repo }
let(:version) { "latest" }
it "checks out repo" do
expect(subject).to match(/releases\/foo\/foo-12.yml/)
end
end
context "version from before new structure" do
before { release.update_repo }
let(:version) { "11" }
it "checks out repo" do
expect(subject).to match(/releases\/foo-11.yml/)
end
end
end
describe "attributes" do
let(:release_data) { { "name" => name, "version" => version, "git" => repo } }
let(:version) { "12" }
subject { release }
its(:name) { should eq name }
its(:git_url) { should eq repo }
its(:repo_dir) { should match(/\/#{name}$/) }
its(:release_dir) { should match(/\/#{name}$/) }
its(:manifest) { should match "releases/#{name}/#{name}-#{version}.yml$" }
its(:name_version) { should eq "#{name}/#{version}" }
its(:version) { should eq version }
its(:manifest_file) do
should match(/\/releases\/#{name}\/#{name}-#{version}.yml$/)
end
context ", using a local url" do
context "with a version placeholder" do
let(:url) { "http://local.url/release?^VERSION^" }
let(:version) { "12" }
let(:release_data) { { "name" => name, "version" => version, "url" => url } }
it 'replaces the version placeholder with the version number' do
expect(release.url).to eq "http://local.url/release?12"
end
end
context 'with no version placeholder' do
let(:url) { "http://local.url/release" }
let(:version) { "12" }
let(:release_data) {{ "name" => name, "version" => version, "url" => url }}
it 'returns the same url' do
expect(release.url).to eq "http://local.url/release"
end
end
end
end
end
context "given a release with index + release v1 last touched on the same commit" do
let(:repo) { extracted_asset_dir("foo-bar", "foo-bar-boshrelease-repo.zip") }
let(:name) { "foo-bar" }
before do
FileUtils.rm_rf(releases_dir)
end
describe "#update_repo" do
subject do
Dir[File.join(releases_dir, name, "releases/foo-bar/foo-bar*.yml")].to_s
end
context "latest version" do
before { release.update_repo }
let(:version) { "latest" }
it "checks out repo" do
expect(subject).to match(/releases\/foo-bar\/foo-bar-2.yml/)
end
end
end
describe "attributes" do
let(:version) { "1" }
subject { release }
its(:name) { should eq name}
its(:git_url) { should eq repo }
its(:repo_dir) { should match(/\/#{name}$/) }
its(:manifest) { should match "releases/#{name}/#{name}-#{version}.yml$" }
its(:name_version) { should eq "#{name}/#{version}" }
its(:version) { should eq version }
its(:manifest_file) do
should match(/\/releases\/#{name}\/#{name}-#{version}.yml$/)
end
end
end
context "given a release with submodule templates" do
let(:repo) { extracted_asset_dir("supermodule", "supermodule-boshrelease-repo.zip") }
let(:subrepo) { extracted_asset_dir("submodule-boshrelease", "submodule-boshrelease-repo.zip") }
let(:name) { "supermodule" }
let(:version) { "2" }
before do
FileUtils.rm_rf(releases_dir)
allow_any_instance_of(Rugged::Submodule).to receive(:url).and_return(subrepo)
end
describe "#update_repo" do
subject { Rugged::Repository.new(File.join(releases_dir, name)) }
context "with templates in submodules" do
before do
release = load_release("name" => name, "version" => 1, "git" => repo)
end
it "clones + checks out required submodules" do
expect(subject.submodules["src/submodule"].workdir_oid)
.to eq "2244c436777f7c305fb81a8a6e29079c92a2ab9d"
end
it "doesn't clone/checkout extraneous submodules" do
expect(subject.submodules["src/other"].workdir_oid).to eq nil
end
end
context "with templates in submodules" do
before { load_release(release_data) }
it "clones + checks out required submodules" do
expect(subject.submodules["src/submodule"].workdir_oid)
.to eq "95eed8c967af969d659a766b0551a75a729a7b65"
end
it "doesn't clone/checkout extraneous submodules" do
expect(subject.submodules["src/other"].workdir_oid).to eq nil
end
end
context "from v1 to v2" do
let(:version) { 1 }
before { load_release(release_data) }
it "updates the submodules appropriately" do
expect(subject.submodules["src/submodule"].workdir_oid)
.to eq "2244c436777f7c305fb81a8a6e29079c92a2ab9d"
expect(subject.submodules["src/other"].workdir_oid).to eq nil
# Now move to v2 on existing repo
release = load_release("name" => name, "version" => 2, "git" => repo)
release.update_repo
expect(subject.submodules["src/submodule"].workdir_oid)
.to eq "95eed8c967af969d659a766b0551a75a729a7b65"
expect(subject.submodules["src/other"].workdir_oid).to eq nil
end
end
context "while being offline" do
subject { load_release(release_data, offline: true) }
it 'fails when repo does not yet exist' do
expect{ subject }.to raise_error /not allowed in offline mode/
end
context "with an already cloned release" do
before { load_release(release_data) }
it 'validates local data exists' do
expect{ subject }.to_not raise_error
end
context "when using latest version" do
let(:version) { "latest" }
subject { load_release(release_data, { offline: true }, true) }
it 'warns when using latest while offline' do
expect(subject).to receive(:warning).with(/using 'latest' local/i)
subject.update_repo
end
end
end
end
end
end
context "given a release with deprecated structure within 'releases' folder" do
let(:repo) { extracted_asset_dir("foo", "foo-boshrelease-repo.zip") }
describe "#update_repo" do
subject { Dir[File.join(releases_dir, name, "releases/foo*.yml")].to_s }
context "latest version" do
before { release.update_repo }
let(:version) { "latest" }
it "checks out repo" do
expect(subject).to match(/foo-12.yml/)
end
it "does not include templates from master" do
expect(templates).to_not match(/deployment.yml/)
end
end
context "specific version" do
let(:version) { "12" }
before { release.update_repo }
it "checks out repo" do
expect(subject).to match(/foo-11.yml/)
end
it "does not include templates from master" do
expect(templates).to_not match(/deployment.yml/)
end
end
context "specific version" do
let(:version) { "2" }
it "checks out repo" do
release.update_repo
expect(subject).to match(/foo-2.yml/)
end
end
context "specific ref with latest release" do
let(:release_data) do
{"name" => name, "version" => "latest", "ref" => "66658", "git" => repo}
end
it "checks out repo" do
release.update_repo
expect(subject).to match(/foo-2.yml/)
expect(subject).to_not match(/foo-3.yml/)
end
end
context "updated version" do
let(:version) { "11" }
it "checks out file with multiple commits" do
release.update_repo
expect(subject).to match(/foo-11.yml/)
end
end
context "non existing version " do
let(:version) { "13" }
it "raises an error" do
expect { release.update_repo }
.to raise_error(/Could not find version/)
end
end
context "already cloned repo" do
before do
load_release("name" => name, "version" => 1, "git" => repo).update_repo
end
it "version 3" do
release.update_repo
expect(subject).to match(/foo-3.yml/)
end
end
context "multiple releases" do
let(:version) { "11" }
before do
load_release("name" => "foo", "version" => 2, "git" => repo).update_repo
end
it "version 11" do
release.update_repo
expect(subject).to match(/foo-11.yml/)
expect(templates).to_not match(/deployment.yml/)
end
end
context "specific version" do
let(:version) { "11" }
before { release.update_repo }
it "checks out repo" do
expect(subject).to match(/foo-11.yml/)
end
it "does not include templates from master" do
expect(templates).to_not match(/deployment.yml/)
end
end
context "specific version" do
let(:version) { "2" }
it "checks out repo" do
release.update_repo
expect(subject).to match(/foo-2.yml/)
end
end
context "specific ref with latest release" do
let(:release_data) do
{"name" => name, "version" => "latest", "ref" => "66658", "git" => repo}
end
it "checks out repo" do
release.update_repo
expect(subject).to match(/foo-2.yml/)
expect(subject).to_not match(/foo-3.yml/)
end
end
context "updated version " do
let(:version) { "11" }
it "checks out file with multiple commits" do
release.update_repo
expect(subject).to match(/foo-11.yml/)
end
end
context "non existing version " do
let(:version) { "13" }
it "raises an error" do
expect { release.version }.
to raise_error(/Could not find version/)
end
end
context "already cloned repo" do
before do
load_release("name" => name, "version" => 1, "git" => repo).update_repo
end
it "version 3" do
release.update_repo
expect(subject).to match(/foo-3.yml/)
end
end
context "multiple releases" do
let(:version) { "11" }
before do
load_release("name" => "foo", "version" => 2, "git" => repo).update_repo
end
it "version 11" do
release.update_repo
expect(subject).to match(/foo-11.yml/)
expect(templates).to_not match(/deployment.yml/)
end
end
context "new release in already cloned repo" do
let(:version) { "12" }
before do
load_release("name" => name, "version" => 1, "git" => repo).update_repo
extracted_asset_dir("foo", "foo-boshrelease-repo-updated.zip")
end
it "version 12" do
release.update_repo
expect(subject).to match(/foo-12.yml/)
end
end
end
describe "attributes" do
subject { release }
its(:name) { should eq name }
its(:git_url) { should eq repo }
its(:repo_dir) { should match(/\/#{name}$/) }
its(:release_dir) { should match(/\/#{name}$/) }
its(:manifest_file) { should match(/\/#{name}-#{version}.yml$/) }
its(:manifest) { should match "releases/#{name}-#{version}.yml$" }
its(:name_version) { should eq "#{name}/#{version}" }
its(:version) { should eq version.to_s }
end
end
context "given a release which is located in a subfolder" do
let(:repo) { extracted_asset_dir("foo", "foo-boshrelease-repo-subdir.zip") }
let(:release_data) do
{ "name" => name, "version" => version, "git" => repo, "path" => "release" }
end
describe "#update_repo" do
subject do
Dir[File.join(releases_dir, name, "release/releases/**/*.yml")].to_s
end
context "latest version" do
before { release.update_repo }
let(:version) { "latest" }
it "checks out repo" do
expect(subject).to match(/release\/releases\/foo-12.yml/)
end
end
end
describe "attributes" do
let(:version) { "12" }
subject { release }
its(:name) { should eq name }
its(:git_url) { should eq repo }
its(:repo_dir) { should match(/\/#{name}$/) }
its(:release_dir) { should match(/\/#{name}\/release$/) }
its(:manifest) { should match "release/releases/#{name}-#{version}.yml$" }
its(:name_version) { should eq "#{name}/#{version}" }
its(:version) { should eq version }
its(:manifest_file) do
should match(/\/release\/releases\/#{name}-#{version}.yml$/)
end
end
end
context "correct checkout behavior:" do
let(:repo) { extracted_asset_dir("foo", "foo-boshrelease-repo.zip") }
let(:release_data) { { "name" => name, "version" => version,
"git" => repo, "ref" => :fooref } }
let(:repo) { 'foo/bar' }
let(:repository) do
instance_double('Rugged::Repository', lookup: double(oid: :fooref))
end
describe "#update_repo_with_ref" do
subject do
Release.new(release_data, releases_dir, callback)
end
before do
expect(Rugged::Repository).to receive(:new)
.and_return(repository).at_least(:once)
expect(repository).to receive(:fetch)
expect(repository).to receive(:references) do
{ 'refs/remotes/origin/HEAD' =>
double(resolve: double(target_id: :oid)) }
end
end
it "calls checkout_tree and checkout" do
expect(repository).to receive("checkout_tree").at_least(:once)
expect(repository).to receive("checkout").at_least(:once)
subject.update_repo
end
end
end
context "given a release which moved a directory to a symlink across versions" do
let(:repo) do
extracted_asset_dir("symlinkreplacement", "symlinkreplacement-boshrelease-repo.zip")
end
let(:name) { "symlinkreplacement" }
describe "#update_repo" do
subject { Rugged::Repository.new(File.join(releases_dir, name)) }
context "using a previous version should work" do
before do
FileUtils.rm_rf(releases_dir)
release = load_release("name" => name, "version" => "1", "git" => repo)
release.update_repo
end
it "git state is happy" do
expect(subject.head.target.oid).to eq "d96521d1940934b1941e0f4a462d3a5e9f31c75d"
expect(subject.diff_workdir(subject.head.target.oid).size).to eq 0
end
end
end
end
end
end
Fixing specs with different run ordering, fixes #94
Signed-off-by: Ruben Koster <90c76174248417950ce4b0ced982f565a9b87432@starkandwayne.com>
require "fileutils"
module Bosh::Workspace
describe Release do
let(:name) { "foo" }
let(:release) { load_release(release_data) }
let(:version) { "3" }
let(:url) { "http://local.url/release" }
let(:release_data) { { "name" => name, "version" => version, "git" => repo } }
let(:releases_dir) { File.join(asset_dir("manifests-repo"), ".releases") }
let(:templates) { Dir[File.join(releases_dir, name, "templates/*.yml")].to_s }
let(:callback) { proc {} }
def load_release(release_data, options = {}, skip_update = false)
Release.new(release_data, releases_dir, callback, options).tap do |r|
r.update_repo(options) unless skip_update
end
end
context "with new structure within 'releases' folder" do
let(:repo) { extracted_asset_dir("foo", "foo-boshrelease-repo-new-structure.zip") }
describe "#update_repo" do
subject do
Dir[File.join(releases_dir, name, "releases/**/foo*.yml")].to_s
end
context "latest version" do
before { release.update_repo }
let(:version) { "latest" }
it "checks out repo" do
expect(subject).to match(/releases\/foo\/foo-12.yml/)
end
end
context "version from before new structure" do
before { release.update_repo }
let(:version) { "11" }
it "checks out repo" do
expect(subject).to match(/releases\/foo-11.yml/)
end
end
end
describe "attributes" do
let(:release_data) { { "name" => name, "version" => version, "git" => repo } }
let(:version) { "12" }
subject { release }
its(:name) { should eq name }
its(:git_url) { should eq repo }
its(:repo_dir) { should match(/\/#{name}$/) }
its(:release_dir) { should match(/\/#{name}$/) }
its(:manifest) { should match "releases/#{name}/#{name}-#{version}.yml$" }
its(:name_version) { should eq "#{name}/#{version}" }
its(:version) { should eq version }
its(:manifest_file) do
should match(/\/releases\/#{name}\/#{name}-#{version}.yml$/)
end
context ", using a local url" do
context "with a version placeholder" do
let(:url) { "http://local.url/release?^VERSION^" }
let(:version) { "12" }
let(:release_data) { { "name" => name, "version" => version, "url" => url } }
it 'replaces the version placeholder with the version number' do
expect(release.url).to eq "http://local.url/release?12"
end
end
context 'with no version placeholder' do
let(:url) { "http://local.url/release" }
let(:version) { "12" }
let(:release_data) {{ "name" => name, "version" => version, "url" => url }}
it 'returns the same url' do
expect(release.url).to eq "http://local.url/release"
end
end
end
end
end
context "given a release with index + release v1 last touched on the same commit" do
let(:repo) { extracted_asset_dir("foo-bar", "foo-bar-boshrelease-repo.zip") }
let(:name) { "foo-bar" }
before do
FileUtils.rm_rf(releases_dir)
end
describe "#update_repo" do
subject do
Dir[File.join(releases_dir, name, "releases/foo-bar/foo-bar*.yml")].to_s
end
context "latest version" do
before { release.update_repo }
let(:version) { "latest" }
it "checks out repo" do
expect(subject).to match(/releases\/foo-bar\/foo-bar-2.yml/)
end
end
end
describe "attributes" do
let(:version) { "1" }
subject { release }
its(:name) { should eq name}
its(:git_url) { should eq repo }
its(:repo_dir) { should match(/\/#{name}$/) }
its(:manifest) { should match "releases/#{name}/#{name}-#{version}.yml$" }
its(:name_version) { should eq "#{name}/#{version}" }
its(:version) { should eq version }
its(:manifest_file) do
should match(/\/releases\/#{name}\/#{name}-#{version}.yml$/)
end
end
end
context "given a release with submodule templates" do
let(:repo) { extracted_asset_dir("supermodule", "supermodule-boshrelease-repo.zip") }
let(:subrepo) { extracted_asset_dir("submodule-boshrelease", "submodule-boshrelease-repo.zip") }
let(:name) { "supermodule" }
let(:version) { "2" }
before do
FileUtils.rm_rf(releases_dir)
allow_any_instance_of(Rugged::Submodule).to receive(:url).and_return(subrepo)
end
describe "#update_repo" do
subject { Rugged::Repository.new(File.join(releases_dir, name)) }
context "with templates in submodules" do
before do
release = load_release("name" => name, "version" => 1, "git" => repo)
end
it "clones + checks out required submodules" do
expect(subject.submodules["src/submodule"].workdir_oid)
.to eq "2244c436777f7c305fb81a8a6e29079c92a2ab9d"
end
it "doesn't clone/checkout extraneous submodules" do
expect(subject.submodules["src/other"].workdir_oid).to eq nil
end
end
context "with templates in submodules" do
before { load_release(release_data) }
it "clones + checks out required submodules" do
expect(subject.submodules["src/submodule"].workdir_oid)
.to eq "95eed8c967af969d659a766b0551a75a729a7b65"
end
it "doesn't clone/checkout extraneous submodules" do
expect(subject.submodules["src/other"].workdir_oid).to eq nil
end
end
context "from v1 to v2" do
let(:version) { 1 }
before { load_release(release_data) }
it "updates the submodules appropriately" do
expect(subject.submodules["src/submodule"].workdir_oid)
.to eq "2244c436777f7c305fb81a8a6e29079c92a2ab9d"
expect(subject.submodules["src/other"].workdir_oid).to eq nil
# Now move to v2 on existing repo
release = load_release("name" => name, "version" => 2, "git" => repo)
release.update_repo
expect(subject.submodules["src/submodule"].workdir_oid)
.to eq "95eed8c967af969d659a766b0551a75a729a7b65"
expect(subject.submodules["src/other"].workdir_oid).to eq nil
end
end
context "while being offline" do
subject { load_release(release_data, offline: true) }
it 'fails when repo does not yet exist' do
expect{ subject }.to raise_error /not allowed in offline mode/
end
context "with an already cloned release" do
before { load_release(release_data) }
it 'validates local data exists' do
expect{ subject }.to_not raise_error
end
context "when using latest version" do
let(:version) { "latest" }
subject { load_release(release_data, { offline: true }, true) }
it 'warns when using latest while offline' do
expect(subject).to receive(:warning).with(/using 'latest' local/i)
subject.update_repo
end
end
end
end
end
end
context "given a release with deprecated structure within 'releases' folder" do
let(:repo) { extracted_asset_dir("foo", "foo-boshrelease-repo.zip") }
describe "#update_repo" do
subject { Dir[File.join(releases_dir, name, "releases/foo*.yml")].to_s }
context "latest version" do
before { release.update_repo }
let(:version) { "latest" }
it "checks out repo" do
expect(subject).to match(/foo-12.yml/)
end
it "does not include templates from master" do
expect(templates).to_not match(/deployment.yml/)
end
end
context "specific version" do
let(:version) { "12" }
before { release.update_repo }
it "checks out repo" do
expect(subject).to match(/foo-11.yml/)
end
it "does not include templates from master" do
expect(templates).to_not match(/deployment.yml/)
end
end
context "specific version" do
let(:version) { "2" }
it "checks out repo" do
release.update_repo
expect(subject).to match(/foo-2.yml/)
end
end
context "specific ref with latest release" do
let(:release_data) do
{"name" => name, "version" => "latest", "ref" => "66658", "git" => repo}
end
it "checks out repo" do
release.update_repo
expect(subject).to match(/foo-2.yml/)
expect(subject).to_not match(/foo-3.yml/)
end
end
context "updated version" do
let(:version) { "11" }
it "checks out file with multiple commits" do
release.update_repo
expect(subject).to match(/foo-11.yml/)
end
end
context "non existing version " do
let(:version) { "13" }
it "raises an error" do
expect { release.update_repo }
.to raise_error(/Could not find version/)
end
end
context "already cloned repo" do
before do
load_release("name" => name, "version" => 1, "git" => repo).update_repo
end
it "version 3" do
release.update_repo
expect(subject).to match(/foo-3.yml/)
end
end
context "multiple releases" do
let(:version) { "11" }
before do
load_release("name" => "foo", "version" => 2, "git" => repo).update_repo
end
it "version 11" do
release.update_repo
expect(subject).to match(/foo-11.yml/)
expect(templates).to_not match(/deployment.yml/)
end
end
context "specific version" do
let(:version) { "11" }
before { release.update_repo }
it "checks out repo" do
expect(subject).to match(/foo-11.yml/)
end
it "does not include templates from master" do
expect(templates).to_not match(/deployment.yml/)
end
end
context "specific version" do
let(:version) { "2" }
it "checks out repo" do
release.update_repo
expect(subject).to match(/foo-2.yml/)
end
end
context "specific ref with latest release" do
let(:release_data) do
{"name" => name, "version" => "latest", "ref" => "66658", "git" => repo}
end
it "checks out repo" do
release.update_repo
expect(subject).to match(/foo-2.yml/)
expect(subject).to_not match(/foo-3.yml/)
end
end
context "updated version " do
let(:version) { "11" }
it "checks out file with multiple commits" do
release.update_repo
expect(subject).to match(/foo-11.yml/)
end
end
context "non existing version " do
let(:version) { "13" }
it "raises an error" do
expect { release.version }.
to raise_error(/Could not find version/)
end
end
context "already cloned repo" do
before do
load_release("name" => name, "version" => 1, "git" => repo).update_repo
end
it "version 3" do
release.update_repo
expect(subject).to match(/foo-3.yml/)
end
end
context "multiple releases" do
let(:version) { "11" }
before do
load_release("name" => "foo", "version" => 2, "git" => repo).update_repo
end
it "version 11" do
release.update_repo
expect(subject).to match(/foo-11.yml/)
expect(templates).to_not match(/deployment.yml/)
end
end
context "new release in already cloned repo" do
let(:version) { "12" }
before do
load_release("name" => name, "version" => 1, "git" => repo).update_repo
extracted_asset_dir("foo", "foo-boshrelease-repo-updated.zip")
end
it "version 12" do
release.update_repo
expect(subject).to match(/foo-12.yml/)
end
end
end
describe "attributes" do
subject { release }
its(:name) { should eq name }
its(:git_url) { should eq repo }
its(:repo_dir) { should match(/\/#{name}$/) }
its(:release_dir) { should match(/\/#{name}$/) }
its(:manifest_file) { should match(/\/#{name}-#{version}.yml$/) }
its(:manifest) { should match "releases/#{name}-#{version}.yml$" }
its(:name_version) { should eq "#{name}/#{version}" }
its(:version) { should eq version.to_s }
end
end
context "given a release which is located in a subfolder" do
let(:repo) { extracted_asset_dir("foo", "foo-boshrelease-repo-subdir.zip") }
let(:release_data) do
{ "name" => name, "version" => version, "git" => repo, "path" => "release" }
end
describe "#update_repo" do
subject do
Dir[File.join(releases_dir, name, "release/releases/**/*.yml")].to_s
end
context "latest version" do
before { release.update_repo }
let(:version) { "latest" }
it "checks out repo" do
expect(subject).to match(/release\/releases\/foo-12.yml/)
end
end
end
describe "attributes" do
let(:version) { "12" }
subject { release }
its(:name) { should eq name }
its(:git_url) { should eq repo }
its(:repo_dir) { should match(/\/#{name}$/) }
its(:release_dir) { should match(/\/#{name}\/release$/) }
its(:manifest) { should match "release/releases/#{name}-#{version}.yml$" }
its(:name_version) { should eq "#{name}/#{version}" }
its(:version) { should eq version }
its(:manifest_file) do
should match(/\/release\/releases\/#{name}-#{version}.yml$/)
end
end
end
context "correct checkout behavior:" do
let(:release_data) { { "name" => name, "version" => version,
"git" => repo, "ref" => :fooref } }
let(:repo) { 'foo/bar' }
let(:repository) do
instance_double('Rugged::Repository', lookup: double(oid: :fooref))
end
describe "#update_repo_with_ref" do
subject do
Release.new(release_data, releases_dir, callback)
end
before do
expect(Rugged::Repository).to receive(:new)
.and_return(repository).at_least(:once)
expect(repository).to receive(:fetch)
expect(repository).to receive(:references) do
{ 'refs/remotes/origin/HEAD' =>
double(resolve: double(target_id: :oid)) }
end
allow(subject).to receive(:repo_exists?).and_return(true)
end
it "calls checkout_tree and checkout" do
expect(repository).to receive("checkout_tree").at_least(:once)
expect(repository).to receive("checkout").at_least(:once)
subject.update_repo
end
end
end
context "given a release which moved a directory to a symlink across versions" do
let(:repo) do
extracted_asset_dir("symlinkreplacement", "symlinkreplacement-boshrelease-repo.zip")
end
let(:name) { "symlinkreplacement" }
describe "#update_repo" do
subject { Rugged::Repository.new(File.join(releases_dir, name)) }
context "using a previous version should work" do
before do
FileUtils.rm_rf(releases_dir)
release = load_release("name" => name, "version" => "1", "git" => repo)
release.update_repo
end
it "git state is happy" do
expect(subject.head.target.oid).to eq "d96521d1940934b1941e0f4a462d3a5e9f31c75d"
expect(subject.diff_workdir(subject.head.target.oid).size).to eq 0
end
end
end
end
end
end
|
# -*- encoding: utf-8 -*-
require File.join(File.dirname(__FILE__), "/spec_helper")
describe MetaInspector::Request do
describe "read" do
it "should return the content of the page" do
page_request = MetaInspector::Request.new(url('http://pagerankalert.com'))
page_request.read[0..14].should == "<!DOCTYPE html>"
end
end
describe "content_type" do
it "should return the correct content type of the url for html pages" do
page_request = MetaInspector::Request.new(url('http://pagerankalert.com'))
page_request.content_type.should == "text/html"
end
it "should return the correct content type of the url for non html pages" do
image_request = MetaInspector::Request.new(url('http://pagerankalert.com/image.png'))
image_request.content_type.should == "image/png"
end
end
describe 'exception handling' do
let(:logger) { MetaInspector::ExceptionLog.new }
before(:each) do
FakeWeb.allow_net_connect = true
end
after(:each) do
FakeWeb.allow_net_connect = false
end
it "should handle timeouts" do
logger.should receive(:<<).with(an_instance_of(Timeout::Error))
MetaInspector::Request.new(url('http://example.com/timeout'), timeout: 0.0000000000000000001, exception_log: logger)
end
it "should handle socket errors" do
logger.should receive(:<<).with(an_instance_of(SocketError))
MetaInspector::Request.new(url('http://caca232dsdsaer3sdsd-asd343.org'), exception_log: logger)
end
end
private
def url(initial_url)
MetaInspector::URL.new(initial_url)
end
end
make spec not dependent on network conditions
# -*- encoding: utf-8 -*-
require File.join(File.dirname(__FILE__), "/spec_helper")
describe MetaInspector::Request do
describe "read" do
it "should return the content of the page" do
page_request = MetaInspector::Request.new(url('http://pagerankalert.com'))
page_request.read[0..14].should == "<!DOCTYPE html>"
end
end
describe "content_type" do
it "should return the correct content type of the url for html pages" do
page_request = MetaInspector::Request.new(url('http://pagerankalert.com'))
page_request.content_type.should == "text/html"
end
it "should return the correct content type of the url for non html pages" do
image_request = MetaInspector::Request.new(url('http://pagerankalert.com/image.png'))
image_request.content_type.should == "image/png"
end
end
describe 'exception handling' do
let(:logger) { MetaInspector::ExceptionLog.new }
before(:each) do
FakeWeb.allow_net_connect = true
end
after(:each) do
FakeWeb.allow_net_connect = false
end
it "should handle timeouts" do
logger.should receive(:<<).with(an_instance_of(Timeout::Error))
MetaInspector::Request.new(url('http://example.com/timeout'), timeout: 0.0000000000000000001, exception_log: logger)
end
it "should handle socket errors" do
TCPSocket.stub(:open).and_raise(SocketError)
logger.should receive(:<<).with(an_instance_of(SocketError))
MetaInspector::Request.new(url('http://caca232dsdsaer3sdsd-asd343.org'), exception_log: logger)
end
end
private
def url(initial_url)
MetaInspector::URL.new(initial_url)
end
end
|
# encoding: utf-8
require 'logger'
require 'socket'
require 'girl_friday'
require 'redis'
require 'active_support/core_ext/object'
require 'active_support/json/encoding'
require 'rollbar/item'
require 'ostruct'
begin
require 'rollbar/delay/sidekiq'
require 'rollbar/delay/sucker_punch'
rescue LoadError
end
begin
require 'sucker_punch'
require 'sucker_punch/testing/inline'
rescue LoadError
end
begin
require 'rollbar/delay/shoryuken'
rescue LoadError
end
require 'spec_helper'
describe Rollbar do
let(:notifier) { Rollbar.notifier }
before do
Rollbar.clear_notifier!
configure
end
context 'when notifier has been used before configure it' do
before do
Rollbar.clear_notifier!
end
it 'is finally reset' do
Rollbar.log_debug('Testing notifier')
expect(Rollbar.error('error message')).to be_eql('disabled')
reconfigure_notifier
expect(Rollbar.error('error message')).not_to be_eql('disabled')
end
end
shared_examples 'stores the root notifier' do
end
describe '.configure' do
before { Rollbar.clear_notifier! }
it 'stores the root notifier' do
Rollbar.configure { |c| }
expect(Rollbar.root_notifier).to be(Rollbar.notifier)
end
end
describe '.preconfigure' do
before { Rollbar.clear_notifier! }
it 'stores the root notifier' do
Rollbar.preconfigure { |c| }
expect(Rollbar.root_notifier).to be(Rollbar.notifier)
end
end
describe '.reconfigure' do
before { Rollbar.clear_notifier! }
it 'stores the root notifier' do
Rollbar.reconfigure { |c| }
expect(Rollbar.root_notifier).to be(Rollbar.notifier)
end
end
describe '.unconfigure' do
before { Rollbar.clear_notifier! }
it 'stores the root notifier' do
expect(Rollbar.root_notifier).to receive(:unconfigure)
Rollbar.unconfigure
expect(Rollbar.root_notifier).to be(Rollbar.notifier)
end
end
context 'Notifier' do
describe '#log' do
let(:exception) do
begin
foo = bar
rescue => e
e
end
end
let(:configuration) { Rollbar.configuration }
context 'executing a Thread before Rollbar is configured' do
before do
Rollbar.clear_notifier!
Thread.new {}
Rollbar.configure do |config|
config.access_token = 'my-access-token'
end
end
it 'sets correct configuration for Rollbar.notifier' do
expect(Rollbar.notifier.configuration.enabled).to be_truthy
end
end
it 'should report a simple message' do
expect(notifier).to receive(:report).with('error', 'test message', nil, nil, nil)
notifier.log('error', 'test message')
end
it 'should report a simple message with extra data' do
extra_data = {:key => 'value', :hash => {:inner_key => 'inner_value'}}
expect(notifier).to receive(:report).with('error', 'test message', nil, extra_data, nil)
notifier.log('error', 'test message', extra_data)
end
it 'should report an exception' do
expect(notifier).to receive(:report).with('error', nil, exception, nil, nil)
notifier.log('error', exception)
end
it 'should report an exception with extra data' do
extra_data = {:key => 'value', :hash => {:inner_key => 'inner_value'}}
expect(notifier).to receive(:report).with('error', nil, exception, extra_data, nil)
notifier.log('error', exception, extra_data)
end
it 'should report an exception with a description' do
expect(notifier).to receive(:report).with('error', 'exception description', exception, nil, nil)
notifier.log('error', exception, 'exception description')
end
it 'should report an exception with a description and extra data' do
extra_data = {:key => 'value', :hash => {:inner_key => 'inner_value'}}
expect(notifier).to receive(:report).with('error', 'exception description', exception, extra_data, nil)
notifier.log('error', exception, extra_data, 'exception description')
end
context 'with :on_error_response hook configured' do
let!(:notifier) { Rollbar::Notifier.new }
let(:configuration) do
config = Rollbar::Configuration.new
config.access_token = test_access_token
config.enabled = true
config.hook :on_error_response do |response|
return ":on_error_response executed"
end
config
end
let(:message) { 'foo' }
let(:level) { 'foo' }
before do
notifier.configuration = configuration
allow_any_instance_of(Net::HTTP).to receive(:request).and_return(OpenStruct.new(:code => 500, :body => "Error"))
@uri = URI.parse(Rollbar::Configuration::DEFAULT_ENDPOINT)
end
it "calls the :on_error_response hook if response status is not 200" do
expect(Net::HTTP).to receive(:new).with(@uri.host, @uri.port, nil, nil, nil, nil).and_call_original
expect(notifier.configuration.hook(:on_error_response)).to receive(:call)
notifier.log(level, message)
end
end
context 'with :on_report_internal_error hook configured' do
let!(:notifier) { Rollbar::Notifier.new }
let(:configuration) do
config = Rollbar::Configuration.new
config.access_token = test_access_token
config.enabled = true
config.hook :on_report_internal_error do |response|
return ":on_report_internal_error executed"
end
config
end
let(:message) { 'foo' }
let(:level) { 'foo' }
before do
notifier.configuration = configuration
end
it "calls the :on_report_internal_error hook if" do
expect(notifier.configuration.hook(:on_report_internal_error)).to receive(:call)
expect(notifier).to receive(:report) do
raise StandardError.new
end
notifier.log(level, message)
end
end
context 'an item with a context' do
let(:context) { { :controller => 'ExampleController' } }
context 'with a custom_data_method configured' do
before do
Rollbar.configure do |config|
config.custom_data_method = lambda do |message, exception, context|
{ :result => "MyApp#" + context[:controller] }
end
end
end
it 'should have access to the context data through configuration.custom_data_method' do
result = notifier.log('error', "Custom message", { :custom_data_method_context => context})
result[:body][:message][:extra].should_not be_nil
result[:body][:message][:extra][:result].should == "MyApp#"+context[:controller]
result[:body][:message][:extra][:custom_data_method_context].should be_nil
end
end
end
end
context 'with before_process handlers in configuration' do
let!(:notifier) { Rollbar::Notifier.new }
let(:scope) { { :bar => :foo } }
let(:configuration) do
config = Rollbar::Configuration.new
config.access_token = test_access_token
config.enabled = true
config
end
let(:message) { 'message' }
let(:exception) { Exception.new }
let(:extra) { {:foo => :bar } }
let(:level) { 'error' }
before do
notifier.configuration = configuration
notifier.scope!(scope)
end
context 'without raise Rollbar::Ignore' do
let(:handler) do
proc do |options|
end
end
before do
configuration.before_process = handler
end
it 'calls the handler with the correct options' do
options = {
:level => level,
:scope => Rollbar::LazyStore.new(scope),
:exception => exception,
:message => message,
:extra => extra
}
expect(handler).to receive(:call).with(options)
expect(notifier).to receive(:report).with(level, message, exception, extra, nil)
notifier.log(level, message, exception, extra)
end
end
context 'raising Rollbar::Ignore in the handler' do
let(:handler) do
proc do |options|
raise Rollbar::Ignore
end
end
before do
configuration.before_process = handler
end
it "calls the handler with correct options and doesn't call #report" do
options = {
:level => level,
:scope => Rollbar::LazyStore.new(scope),
:exception => exception,
:message => message,
:extra => extra
}
expect(handler).to receive(:call).with(options).and_call_original
expect(notifier).not_to receive(:report)
result = notifier.log(level, message, exception, extra)
expect(result).to be_eql('ignored')
end
end
context 'with 2 handlers, raising Rollbar::Ignore in the first one' do
let(:handler1) do
proc do |options|
raise Rollbar::Ignore
end
end
let(:handler2) do
proc do |options|
end
end
before do
configuration.before_process << handler1
configuration.before_process << handler2
end
it "calls only the first handler and doesn't calls #report" do
options = {
:level => level,
:scope => Rollbar::LazyStore.new(scope),
:exception => exception,
:message => message,
:extra => extra
}
expect(handler1).to receive(:call).with(options).and_call_original
expect(handler2).not_to receive(:call)
expect(notifier).not_to receive(:report)
result = notifier.log(level, message, exception, extra)
expect(result).to be_eql('ignored')
end
context 'if the first handler fails' do
let(:exception) { StandardError.new('foo') }
let(:handler1) do
proc { |options| raise exception }
end
it 'doesnt call the second handler and logs the error' do
expect(handler2).not_to receive(:call)
expect(notifier).to receive(:log_error).with("[Rollbar] Error calling the `before_process` hook: #{exception}")
notifier.log(level, message, exception, extra)
end
end
end
end
context 'debug/info/warning/error/critical' do
let(:exception) do
begin
foo = bar
rescue => e
e
end
end
let(:extra_data) { {:key => 'value', :hash => {:inner_key => 'inner_value'}} }
it 'should report with a debug level' do
expect(notifier).to receive(:report).with('debug', nil, exception, nil, nil)
notifier.debug(exception)
expect(notifier).to receive(:report).with('debug', 'description', exception, nil, nil)
notifier.debug(exception, 'description')
expect(notifier).to receive(:report).with('debug', 'description', exception, extra_data, nil)
notifier.debug(exception, 'description', extra_data)
end
it 'should report with an info level' do
expect(notifier).to receive(:report).with('info', nil, exception, nil, nil)
notifier.info(exception)
expect(notifier).to receive(:report).with('info', 'description', exception, nil, nil)
notifier.info(exception, 'description')
expect(notifier).to receive(:report).with('info', 'description', exception, extra_data, nil)
notifier.info(exception, 'description', extra_data)
end
it 'should report with a warning level' do
expect(notifier).to receive(:report).with('warning', nil, exception, nil, nil)
notifier.warning(exception)
expect(notifier).to receive(:report).with('warning', 'description', exception, nil, nil)
notifier.warning(exception, 'description')
expect(notifier).to receive(:report).with('warning', 'description', exception, extra_data, nil)
notifier.warning(exception, 'description', extra_data)
end
it 'should report with an error level' do
expect(notifier).to receive(:report).with('error', nil, exception, nil, nil)
notifier.error(exception)
expect(notifier).to receive(:report).with('error', 'description', exception, nil, nil)
notifier.error(exception, 'description')
expect(notifier).to receive(:report).with('error', 'description', exception, extra_data, nil)
notifier.error(exception, 'description', extra_data)
end
it 'should report with a critical level' do
expect(notifier).to receive(:report).with('critical', nil, exception, nil, nil)
notifier.critical(exception)
expect(notifier).to receive(:report).with('critical', 'description', exception, nil, nil)
notifier.critical(exception, 'description')
expect(notifier).to receive(:report).with('critical', 'description', exception, extra_data, nil)
notifier.critical(exception, 'description', extra_data)
end
end
context 'scope' do
it 'should create a new notifier object' do
notifier2 = notifier.scope
notifier2.should_not eq(notifier)
notifier2.should be_instance_of(Rollbar::Notifier)
end
it 'should create a copy of the parent notifier\'s configuration' do
notifier.configure do |config|
config.code_version = '123'
config.payload_options = {
:a => 'a',
:b => {:c => 'c'}
}
end
notifier2 = notifier.scope
notifier2.configuration.code_version.should == '123'
notifier2.configuration.should_not equal(notifier.configuration)
notifier2.configuration.payload_options.should_not equal(notifier.configuration.payload_options)
notifier2.configuration.payload_options.should == notifier.configuration.payload_options
notifier2.configuration.payload_options.should == {
:a => 'a',
:b => {:c => 'c'}
}
end
it 'should not modify any parent notifier configuration' do
Rollbar.clear_notifier!
configure
Rollbar.configuration.code_version.should be_nil
Rollbar.configuration.payload_options.should be_empty
notifier = Rollbar.notifier.scope
notifier.configure do |config|
config.code_version = '123'
config.payload_options = {
:a => 'a',
:b => {:c => 'c'}
}
end
notifier2 = notifier.scope
notifier2.configure do |config|
config.payload_options[:c] = 'c'
end
notifier.configuration.payload_options[:c].should be_nil
notifier3 = notifier2.scope({
:b => {:c => 3, :d => 'd'}
})
notifier3.configure do |config|
config.code_version = '456'
end
notifier.configuration.code_version.should == '123'
notifier.configuration.payload_options.should == {
:a => 'a',
:b => {:c => 'c'}
}
notifier2.configuration.code_version.should == '123'
notifier2.configuration.payload_options.should == {
:a => 'a',
:b => {:c => 'c'},
:c => 'c'
}
notifier3.configuration.code_version.should == '456'
notifier3.configuration.payload_options.should == {
:a => 'a',
:b => {:c => 'c'},
:c => 'c'
}
Rollbar.configuration.code_version.should be_nil
Rollbar.configuration.payload_options.should be_empty
end
end
context 'report' do
let(:logger_mock) { double("Rails.logger").as_null_object }
before(:each) do
configure
Rollbar.configure do |config|
config.logger = logger_mock
end
end
after do
configure
end
it 'should reject input that doesn\'t contain an exception, message or extra data' do
expect(logger_mock).to receive(:error).with('[Rollbar] Tried to send a report with no message, exception or extra data.')
expect(notifier).not_to receive(:schedule_payload)
result = notifier.send(:report, 'info', nil, nil, nil, nil)
result.should == 'error'
end
it 'should be ignored if the person is ignored' do
person_data = {
:id => 1,
:username => "test",
:email => "test@example.com"
}
notifier.configure do |config|
config.ignored_person_ids += [1]
config.payload_options = { :person => person_data }
end
expect(notifier).not_to receive(:schedule_payload)
result = notifier.send(:report, 'info', 'message', nil, nil, nil)
result.should == 'ignored'
end
end
end
context 'reporting' do
let(:exception) do
begin
foo = bar
rescue => e
e
end
end
let(:logger_mock) { double("Rails.logger").as_null_object }
let(:user) { User.create(:email => 'email@example.com', :encrypted_password => '', :created_at => Time.now, :updated_at => Time.now) }
before do
Rollbar.unconfigure
configure
Rollbar.configure do |config|
config.logger = logger_mock
end
end
it 'should report exceptions without person or request data' do
logger_mock.should_receive(:info).with('[Rollbar] Success')
Rollbar.error(exception)
end
it 'should not report anything when disabled' do
logger_mock.should_not_receive(:info).with('[Rollbar] Success')
Rollbar.configure do |config|
config.enabled = false
end
Rollbar.error(exception).should == 'disabled'
end
it 'should report exceptions without person or request data' do
logger_mock.should_receive(:info).with('[Rollbar] Success')
Rollbar.error(exception)
end
it 'should be enabled when freshly configured' do
Rollbar.configuration.enabled.should == true
end
it 'should not be enabled when not configured' do
Rollbar.clear_notifier!
Rollbar.configuration.enabled.should be_nil
Rollbar.error(exception).should == 'disabled'
end
it 'should stay disabled if configure is called again' do
# configure once, setting enabled to false.
Rollbar.configure do |config|
config.enabled = false
end
# now configure again (perhaps to change some other values)
Rollbar.configure { |_| }
Rollbar.configuration.enabled.should == false
Rollbar.error(exception).should == 'disabled'
end
context 'using configuration.use_exception_level_filters_default' do
before do
Rollbar.configure do |config|
config.use_exception_level_filters_default = true
end
end
context 'without use_exception_level_filters argument' do
it 'sends the correct filtered level' do
Rollbar.configure do |config|
config.exception_level_filters = { 'NameError' => 'warning' }
end
Rollbar.error(exception)
expect(Rollbar.last_report[:level]).to be_eql('warning')
end
it 'ignore ignored exception classes' do
Rollbar.configure do |config|
config.exception_level_filters = { 'NameError' => 'ignore' }
end
logger_mock.should_not_receive(:info)
logger_mock.should_not_receive(:warn)
logger_mock.should_not_receive(:error)
Rollbar.error(exception)
end
it 'should not use the filters if overriden at log site' do
Rollbar.configure do |config|
config.exception_level_filters = { 'NameError' => 'ignore' }
end
Rollbar.error(exception, :use_exception_level_filters => false)
expect(Rollbar.last_report[:level]).to be_eql('error')
end
end
end
context 'using :use_exception_level_filters option as true' do
it 'sends the correct filtered level' do
Rollbar.configure do |config|
config.exception_level_filters = { 'NameError' => 'warning' }
end
Rollbar.error(exception, :use_exception_level_filters => true)
expect(Rollbar.last_report[:level]).to be_eql('warning')
end
it 'ignore ignored exception classes' do
Rollbar.configure do |config|
config.exception_level_filters = { 'NameError' => 'ignore' }
end
logger_mock.should_not_receive(:info)
logger_mock.should_not_receive(:warn)
logger_mock.should_not_receive(:error)
Rollbar.error(exception, :use_exception_level_filters => true)
end
context 'using :use_exception_level_filters option as false' do
it 'sends the correct filtered level' do
Rollbar.configure do |config|
config.exception_level_filters = { 'NameError' => 'warning' }
end
Rollbar.error(exception, :use_exception_level_filters => false)
expect(Rollbar.last_report[:level]).to be_eql('error')
end
it 'ignore ignored exception classes' do
Rollbar.configure do |config|
config.exception_level_filters = { 'NameError' => 'ignore' }
end
Rollbar.error(exception, :use_exception_level_filters => false)
expect(Rollbar.last_report[:level]).to be_eql('error')
end
end
end
context 'using :use_exception_level_filters option as true' do
it 'sends the correct filtered level' do
Rollbar.configure do |config|
config.exception_level_filters = { 'NameError' => 'warning' }
end
Rollbar.error(exception, :use_exception_level_filters => true)
expect(Rollbar.last_report[:level]).to be_eql('warning')
end
it 'ignore ignored exception classes' do
Rollbar.configure do |config|
config.exception_level_filters = { 'NameError' => 'ignore' }
end
logger_mock.should_not_receive(:info)
logger_mock.should_not_receive(:warn)
logger_mock.should_not_receive(:error)
Rollbar.error(exception, :use_exception_level_filters => true)
end
end
context 'if not using :use_exception_level_filters option' do
it 'sends the level defined by the used method' do
Rollbar.configure do |config|
config.exception_level_filters = { 'NameError' => 'warning' }
end
Rollbar.error(exception)
expect(Rollbar.last_report[:level]).to be_eql('error')
end
it 'ignore ignored exception classes' do
Rollbar.configure do |config|
config.exception_level_filters = { 'NameError' => 'ignore' }
end
Rollbar.error(exception)
expect(Rollbar.last_report[:level]).to be_eql('error')
end
end
# Skip jruby 1.9+ (https://github.com/jruby/jruby/issues/2373)
if defined?(RUBY_ENGINE) && RUBY_ENGINE == 'jruby' && (not RUBY_VERSION =~ /^1\.9/)
it "should work with an IO object as rack.errors" do
logger_mock.should_receive(:info).with('[Rollbar] Success')
Rollbar.error(exception, :env => { :"rack.errors" => IO.new(2, File::WRONLY) })
end
end
it 'should ignore ignored persons' do
person_data = {
:id => 1,
:username => "test",
:email => "test@example.com"
}
Rollbar.configure do |config|
config.payload_options = { :person => person_data }
config.ignored_person_ids += [1]
end
logger_mock.should_not_receive(:info)
logger_mock.should_not_receive(:warn)
logger_mock.should_not_receive(:error)
Rollbar.error(exception)
end
it 'should not ignore non-ignored persons' do
person_data = {
:id => 1,
:username => "test",
:email => "test@example.com"
}
Rollbar.configure do |config|
config.payload_options = { :person => person_data }
config.ignored_person_ids += [1]
end
Rollbar.last_report = nil
Rollbar.error(exception)
Rollbar.last_report.should be_nil
person_data = {
:id => 2,
:username => "test2",
:email => "test2@example.com"
}
new_options = {
:person => person_data
}
Rollbar.scoped(new_options) do
Rollbar.error(exception)
end
Rollbar.last_report.should_not be_nil
end
it 'should allow callables to set exception filtered level' do
callable_mock = double
saved_filters = Rollbar.configuration.exception_level_filters
Rollbar.configure do |config|
config.exception_level_filters = { 'NameError' => callable_mock }
end
callable_mock.should_receive(:call).with(exception).at_least(:once).and_return("info")
logger_mock.should_receive(:info)
logger_mock.should_not_receive(:warn)
logger_mock.should_not_receive(:error)
Rollbar.error(exception, :use_exception_level_filters => true)
end
it 'should not report exceptions when silenced' do
expect_any_instance_of(Rollbar::Notifier).to_not receive(:schedule_payload)
begin
test_var = 1
Rollbar.silenced do
test_var = 2
raise
end
rescue => e
Rollbar.error(e)
end
test_var.should == 2
end
it 'should report exception objects with no backtrace' do
payload = nil
notifier.stub(:schedule_item) do |*args|
payload = args[0]
end
Rollbar.error(StandardError.new("oops"))
payload["data"][:body][:trace][:frames].should == []
payload["data"][:body][:trace][:exception][:class].should == "StandardError"
payload["data"][:body][:trace][:exception][:message].should == "oops"
end
it 'gets the backtrace from the caller' do
Rollbar.configure do |config|
config.populate_empty_backtraces = true
end
exception = Exception.new
Rollbar.error(exception)
gem_dir = Gem::Specification.find_by_name('rollbar').gem_dir
gem_lib_dir = gem_dir + '/lib'
last_report = Rollbar.last_report
filepaths = last_report[:body][:trace][:frames].map {|frame| frame[:filename] }.reverse
expect(filepaths[0]).not_to include(gem_lib_dir)
expect(filepaths.any? {|filepath| filepath.include?(gem_dir) }).to eq true
end
it 'should return the exception data with a uuid, on platforms with SecureRandom' do
if defined?(SecureRandom) and SecureRandom.respond_to?(:uuid)
exception_data = Rollbar.error(StandardError.new("oops"))
exception_data[:uuid].should_not be_nil
end
end
it 'should report exception objects with nonstandard backtraces' do
payload = nil
notifier.stub(:schedule_item) do |*args|
payload = args[0]
end
class CustomException < StandardError
def backtrace
["custom backtrace line"]
end
end
exception = CustomException.new("oops")
notifier.error(exception)
payload["data"][:body][:trace][:frames][0][:method].should == "custom backtrace line"
end
it 'should report exceptions with a custom level' do
payload = nil
notifier.stub(:schedule_item) do |*args|
payload = args[0]
end
Rollbar.error(exception)
payload['data'][:level].should == 'error'
Rollbar.log('debug', exception)
payload['data'][:level].should == 'debug'
end
context 'with invalid utf8 encoding' do
let(:extra) do
{ :extra => force_to_ascii("bad value 1\255") }
end
it 'removes te invalid characteres' do
Rollbar.info('removing invalid chars', extra)
extra_value = Rollbar.last_report[:body][:message][:extra][:extra]
expect(extra_value).to be_eql('bad value 1')
end
end
end
# Backwards
context 'report_message' do
before(:each) do
configure
Rollbar.configure do |config|
config.logger = logger_mock
end
end
let(:logger_mock) { double("Rails.logger").as_null_object }
let(:user) { User.create(:email => 'email@example.com', :encrypted_password => '', :created_at => Time.now, :updated_at => Time.now) }
it 'should report simple messages' do
logger_mock.should_receive(:info).with('[Rollbar] Scheduling item')
logger_mock.should_receive(:info).with('[Rollbar] Success')
Rollbar.error('Test message')
end
it 'should not report anything when disabled' do
logger_mock.should_not_receive(:info).with('[Rollbar] Success')
Rollbar.configure do |config|
config.enabled = false
end
Rollbar.error('Test message that should be ignored')
Rollbar.configure do |config|
config.enabled = true
end
end
it 'should report messages with extra data' do
logger_mock.should_receive(:info).with('[Rollbar] Success')
Rollbar.debug('Test message with extra data', 'debug', :foo => "bar",
:hash => { :a => 123, :b => "xyz" })
end
# END Backwards
it 'should not crash with circular extra_data' do
a = { :foo => "bar" }
b = { :a => a }
c = { :b => b }
a[:c] = c
logger_mock.should_receive(:error).with(/\[Rollbar\] Reporting internal error encountered while sending data to Rollbar./)
Rollbar.error("Test message with circular extra data", a)
end
it 'should be able to report form validation errors when they are present' do
logger_mock.should_receive(:info).with('[Rollbar] Success')
user.errors.add(:example, "error")
user.report_validation_errors_to_rollbar
end
it 'should not report form validation errors when they are not present' do
logger_mock.should_not_receive(:info).with('[Rollbar] Success')
user.errors.clear
user.report_validation_errors_to_rollbar
end
it 'should report messages with extra data' do
logger_mock.should_receive(:info).with('[Rollbar] Success')
Rollbar.info("Test message with extra data", :foo => "bar",
:hash => { :a => 123, :b => "xyz" })
end
it 'should report messages with request, person data and extra data' do
logger_mock.should_receive(:info).with('[Rollbar] Scheduling item')
logger_mock.should_receive(:info).with('[Rollbar] Success')
request_data = {
:params => {:foo => 'bar'}
}
person_data = {
:id => 123,
:username => 'username'
}
extra_data = {
:extra_foo => 'extra_bar'
}
Rollbar.configure do |config|
config.payload_options = {
:request => request_data,
:person => person_data
}
end
Rollbar.info("Test message", extra_data)
Rollbar.last_report[:request].should == request_data
Rollbar.last_report[:person].should == person_data
Rollbar.last_report[:body][:message][:extra][:extra_foo].should == 'extra_bar'
end
end
context 'payload_destination' do
before(:each) do
configure
Rollbar.configure do |config|
config.logger = logger_mock
config.filepath = 'test.rollbar'
end
end
after do
configure
end
let(:exception) do
begin
foo = bar
rescue => e
e
end
end
let(:logger_mock) { double("Rails.logger").as_null_object }
it 'should send the payload over the network by default' do
logger_mock.should_not_receive(:info).with('[Rollbar] Writing payload to file')
logger_mock.should_receive(:info).with('[Rollbar] Sending item').once
logger_mock.should_receive(:info).with('[Rollbar] Success').once
Rollbar.error(exception)
end
it 'should save the payload to a file if set' do
logger_mock.should_not_receive(:info).with('[Rollbar] Sending item')
logger_mock.should_receive(:info).with('[Rollbar] Writing item to file').once
logger_mock.should_receive(:info).with('[Rollbar] Success').once
filepath = ''
Rollbar.configure do |config|
config.write_to_file = true
filepath = config.filepath
end
Rollbar.error(exception)
File.exist?(filepath).should == true
File.read(filepath).should include test_access_token
File.delete(filepath)
Rollbar.configure do |config|
config.write_to_file = false
end
end
end
context 'using a proxy server' do
before do
allow_any_instance_of(Net::HTTP).to receive(:request).and_return(OpenStruct.new(:code => 200, :body => "Success"))
@env_vars = clear_proxy_env_vars
end
after do
restore_proxy_env_vars(@env_vars)
end
context 'via environment variables' do
before do
@uri = URI.parse(Rollbar::Configuration::DEFAULT_ENDPOINT)
end
it 'honors proxy settings in the environment' do
ENV['http_proxy'] = 'http://user:pass@example.com:80'
ENV['https_proxy'] = 'http://user:pass@example.com:80'
expect(Net::HTTP).to receive(:new).with(@uri.host, @uri.port, 'example.com', 80, 'user', 'pass').and_call_original
Rollbar.info("proxy this")
end
it 'does not use a proxy if no proxy settings in environemnt' do
expect(Net::HTTP).to receive(:new).with(@uri.host, @uri.port, nil, nil, nil, nil).and_call_original
Rollbar.info("proxy this")
end
end
context 'set in configuration file' do
before do
Rollbar.configure do |config|
config.proxy = {
:host => 'http://config.com',
:port => 8080,
:user => 'foo',
:password => 'bar'
}
end
@uri = URI.parse(Rollbar::Configuration::DEFAULT_ENDPOINT)
end
it 'honors proxy settings in the config file' do
expect(Net::HTTP).to receive(:new).with(@uri.host, @uri.port, 'config.com', 8080, 'foo', 'bar').and_call_original
Rollbar.info("proxy this")
end
it 'gives the configuration settings precedence over environment' do
ENV['http_proxy'] = 'http://user:pass@example.com:80'
ENV['https_proxy'] = 'http://user:pass@example.com:80'
expect(Net::HTTP).to receive(:new).with(@uri.host, @uri.port, 'config.com', 8080, 'foo', 'bar').and_call_original
Rollbar.info("proxy this")
end
it 'allows @-signs in passwords' do
Rollbar.configure do |config|
config.proxy[:password] = "manh@tan"
end
expect(Net::HTTP).to receive(:new).with(@uri.host, @uri.port, 'config.com', 8080, 'foo', 'manh@tan').and_call_original
Rollbar.info("proxy this")
end
end
end
context 'asynchronous_handling' do
before do
Rollbar.clear_notifier!
configure
Rollbar.configure do |config|
config.logger = logger_mock
end
end
after do
configure
end
let(:exception) do
begin
foo = bar
rescue => e
e
end
end
let(:logger_mock) { double("Rails.logger").as_null_object }
it 'should send the payload using the default asynchronous handler girl_friday' do
logger_mock.should_receive(:info).with('[Rollbar] Scheduling item')
logger_mock.should_receive(:info).with('[Rollbar] Sending item')
logger_mock.should_receive(:info).with('[Rollbar] Success')
Rollbar.configure do |config|
config.use_async = true
GirlFriday::WorkQueue.immediate!
end
Rollbar.error(exception)
Rollbar.configure do |config|
config.use_async = false
GirlFriday::WorkQueue.queue!
end
end
it 'should send the payload using a user-supplied asynchronous handler' do
logger_mock.should_receive(:info).with('Custom async handler called')
logger_mock.should_receive(:info).with('[Rollbar] Sending item')
logger_mock.should_receive(:info).with('[Rollbar] Success')
Rollbar.configure do |config|
config.use_async = true
config.async_handler = Proc.new { |payload|
logger_mock.info 'Custom async handler called'
Rollbar.process_from_async_handler(payload)
}
end
Rollbar.error(exception)
end
# We should be able to send String payloads, generated
# by a previous version of the gem. This can happend just
# after a deploy with an gem upgrade.
context 'with a payload generated as String' do
let(:async_handler) do
proc do |payload|
# simulate previous gem version
string_payload = Rollbar::JSON.dump(payload)
Rollbar.process_from_async_handler(string_payload)
end
end
before do
Rollbar.configuration.stub(:use_async).and_return(true)
Rollbar.configuration.stub(:async_handler).and_return(async_handler)
end
it 'sends a payload generated as String, not as a Hash' do
logger_mock.should_receive(:info).with('[Rollbar] Success')
Rollbar.error(exception)
end
context 'with async failover handlers' do
before do
Rollbar.reconfigure do |config|
config.use_async = true
config.async_handler = async_handler
config.failover_handlers = handlers
config.logger = logger_mock
end
end
let(:exception) { StandardError.new('the error') }
context 'if the async handler doesnt fail' do
let(:async_handler) { proc { |_| 'success' } }
let(:handler) { proc { |_| 'success' } }
let(:handlers) { [handler] }
it 'doesnt call any failover handler' do
expect(handler).not_to receive(:call)
Rollbar.error(exception)
end
end
context 'if the async handler fails' do
let(:async_handler) { proc { |_| fail 'this handler will crash' } }
context 'if any failover handlers is configured' do
let(:handlers) { [] }
let(:log_message) do
'[Rollbar] Async handler failed, and there are no failover handlers configured. See the docs for "failover_handlers"'
end
it 'logs the error but doesnt try to report an internal error' do
expect(logger_mock).to receive(:error).with(log_message)
Rollbar.error(exception)
end
end
context 'if the first failover handler success' do
let(:handler) { proc { |_| 'success' } }
let(:handlers) { [handler] }
it 'calls the failover handler and doesnt report internal error' do
expect(Rollbar).not_to receive(:report_internal_error)
expect(handler).to receive(:call)
Rollbar.error(exception)
end
end
context 'with two handlers, the first failing' do
let(:handler1) { proc { |_| fail 'this handler fails' } }
let(:handler2) { proc { |_| 'success' } }
let(:handlers) { [handler1, handler2] }
it 'calls the second handler and doesnt report internal error' do
expect(handler2).to receive(:call)
Rollbar.error(exception)
end
end
context 'with two handlers, both failing' do
let(:handler1) { proc { |_| fail 'this handler fails' } }
let(:handler2) { proc { |_| fail 'this will also fail' } }
let(:handlers) { [handler1, handler2] }
it 'reports internal error' do
expect(logger_mock).to receive(:error)
Rollbar.error(exception)
end
end
end
end
end
describe "#use_sucker_punch", :if => defined?(SuckerPunch) do
it "should send the payload to sucker_punch delayer" do
logger_mock.should_receive(:info).with('[Rollbar] Scheduling item')
expect(Rollbar::Delay::SuckerPunch).to receive(:call)
Rollbar.configure(&:use_sucker_punch)
Rollbar.error(exception)
end
end
describe "#use_shoryuken", :if => defined?(Shoryuken) do
it "should send the payload to shoryuken delayer" do
logger_mock.should_receive(:info).with('[Rollbar] Scheduling item')
expect(Rollbar::Delay::Shoryuken).to receive(:call)
Rollbar.configure(&:use_shoryuken)
Rollbar.error(exception)
end
end
describe "#use_sidekiq", :if => defined?(Sidekiq) do
it "should instanciate sidekiq delayer with custom values" do
Rollbar::Delay::Sidekiq.should_receive(:new).with('queue' => 'test_queue')
config = Rollbar::Configuration.new
config.use_sidekiq 'queue' => 'test_queue'
end
it "should send the payload to sidekiq delayer" do
handler = double('sidekiq_handler_mock')
handler.should_receive(:call)
Rollbar.configure do |config|
config.use_sidekiq
config.async_handler = handler
end
Rollbar.error(exception)
end
end
end
context 'logger' do
before(:each) do
reset_configuration
end
it 'should have use the Rails logger when configured to do so' do
configure
expect(Rollbar.send(:logger)).to be_kind_of(Rollbar::LoggerProxy)
expect(Rollbar.send(:logger).object).to eq ::Rails.logger
end
it 'should use the default_logger when no logger is set' do
logger = Logger.new(STDERR)
Rollbar.configure do |config|
config.default_logger = lambda { logger }
end
Rollbar.send(:logger).object.should == logger
end
it 'should have a default default_logger' do
Rollbar.send(:logger).should_not be_nil
end
after do
reset_configuration
end
end
context "project_gems" do
it "should include gem paths for specified project gems in the payload" do
gems = ['rack', 'rspec-rails']
gem_paths = []
Rollbar.configure do |config|
config.project_gems = gems
end
gem_paths = gems.map do |gem|
gem_spec = Gem::Specification.find_all_by_name(gem)[0]
gem_spec.gem_dir if gem_spec
end.compact
data = notifier.send(:build_item, 'info', 'test', nil, {}, nil)['data']
data[:project_package_paths].kind_of?(Array).should == true
data[:project_package_paths].length.should == gem_paths.length
data[:project_package_paths].each_with_index{|path, index|
path.should == gem_paths[index]
}
end
it "should handle regex gem patterns" do
gems = ["rack", /rspec/, /roll/]
gem_paths = []
Rollbar.configure do |config|
config.project_gems = gems
end
gem_paths = gems.map{|gem| Gem::Specification.find_all_by_name(gem).map(&:gem_dir) }.flatten.compact.uniq
gem_paths.length.should > 1
gem_paths.any?{|path| path.include? 'rollbar-gem'}.should == true
gem_paths.any?{|path| path.include? 'rspec-rails'}.should == true
data = notifier.send(:build_item, 'info', 'test', nil, {}, nil)['data']
data[:project_package_paths].kind_of?(Array).should == true
data[:project_package_paths].length.should == gem_paths.length
(data[:project_package_paths] - gem_paths).length.should == 0
end
it "should not break on non-existent gems" do
gems = ["this_gem_does_not_exist", "rack"]
Rollbar.configure do |config|
config.project_gems = gems
end
data = notifier.send(:build_item, 'info', 'test', nil, {}, nil)['data']
data[:project_package_paths].kind_of?(Array).should == true
data[:project_package_paths].length.should == 1
end
end
context 'report_internal_error', :reconfigure_notifier => true do
it "should not crash when given an exception object" do
begin
1 / 0
rescue => e
notifier.send(:report_internal_error, e)
end
end
end
context "send_failsafe" do
let(:exception) { StandardError.new }
it "doesn't crash when given a message and exception" do
sent_payload = notifier.send(:send_failsafe, "test failsafe", exception)
expected_message = 'Failsafe from rollbar-gem. StandardError: test failsafe'
expect(sent_payload['data'][:body][:message][:body]).to be_eql(expected_message)
end
it "doesn't crash when given all nils" do
notifier.send(:send_failsafe, nil, nil)
end
context 'with a non default exception message' do
let(:exception) { StandardError.new 'Something is wrong' }
it 'adds it to exception info' do
sent_payload = notifier.send(:send_failsafe, "test failsafe", exception)
expected_message = 'Failsafe from rollbar-gem. StandardError: "Something is wrong": test failsafe'
expect(sent_payload['data'][:body][:message][:body]).to be_eql(expected_message)
end
end
context 'without exception object' do
it 'just sends the given message' do
sent_payload = notifier.send(:send_failsafe, "test failsafe", nil)
expected_message = 'Failsafe from rollbar-gem. test failsafe'
expect(sent_payload['data'][:body][:message][:body]).to be_eql(expected_message)
end
end
context 'if the exception has a backtrace' do
let(:backtrace) { ['func3', 'func2', 'func1'] }
let(:failsafe_reason) { 'StandardError in func3: test failsafe' }
let(:expected_body) { "Failsafe from rollbar-gem. #{failsafe_reason}" }
let(:expected_log_message) do
"[Rollbar] Sending failsafe response due to #{failsafe_reason}"
end
before { exception.set_backtrace(backtrace) }
it 'adds the nearest frame to the message' do
expect(notifier).to receive(:log_error).with(expected_log_message)
sent_payload = notifier.send(:send_failsafe, "test failsafe", exception)
expect(sent_payload['data'][:body][:message][:body]).to be_eql(expected_body)
end
end
context 'with uuid and host' do
let(:host) { 'the-host' }
let(:uuid) { 'the-uuid' }
it 'sets the uuid and host in correct keys' do
sent_payload = notifier.send(:send_failsafe, 'testing uuid and host',
exception, uuid, host)
expect(sent_payload['data'][:custom][:orig_uuid]).to be_eql('the-uuid')
expect(sent_payload['data'][:custom][:orig_host]).to be_eql('the-host')
end
end
end
context 'when reporting internal error with nil context' do
let(:context_proc) { proc {} }
let(:scoped_notifier) { notifier.scope(:context => context_proc) }
let(:exception) { Exception.new }
let(:logger_mock) { double("Rails.logger").as_null_object }
it 'reports successfully' do
configure
Rollbar.configure do |config|
config.logger = logger_mock
end
logger_mock.should_receive(:info).with('[Rollbar] Sending item').once
logger_mock.should_receive(:info).with('[Rollbar] Success').once
scoped_notifier.send(:report_internal_error, exception)
end
end
context "request_data_extractor" do
before(:each) do
class DummyClass
end
@dummy_class = DummyClass.new
@dummy_class.extend(Rollbar::RequestDataExtractor)
end
context "rollbar_headers" do
it "should not include cookies" do
env = {"HTTP_USER_AGENT" => "test", "HTTP_COOKIE" => "cookie"}
headers = @dummy_class.send(:rollbar_headers, env)
headers.should have_key "User-Agent"
headers.should_not have_key "Cookie"
end
end
end
describe '.scoped' do
let(:scope_options) do
{ :foo => 'bar' }
end
it 'changes data in scope_object inside the block' do
Rollbar.clear_notifier!
configure
current_notifier_id = Rollbar.notifier.object_id
Rollbar.scoped(scope_options) do
scope_object = Rollbar.notifier.scope_object
expect(Rollbar.notifier.object_id).not_to be_eql(current_notifier_id)
expect(scope_object).to be_eql(scope_options)
end
expect(Rollbar.notifier.object_id).to be_eql(current_notifier_id)
end
context 'if the block fails' do
let(:crashing_block) { proc { fail } }
it 'restores the old notifier' do
notifier = Rollbar.notifier
expect { Rollbar.scoped(&crashing_block) }.to raise_error
expect(notifier).to be_eql(Rollbar.notifier)
end
end
context 'if the block creates a new thread' do
let(:block) do
proc do
Thread.new do
scope = Rollbar.notifier.scope_object
Thread.main[:inner_scope] = scope
end.join
end
end
let(:scope) do
{ :foo => 'bar' }
end
it 'maintains the parent thread notifier scope' do
Rollbar.scoped(scope, &block)
expect(Thread.main[:inner_scope]).to be_eql(scope)
end
end
end
describe '.scope!' do
let(:new_scope) do
{ :person => { :id => 1 } }
end
before { reconfigure_notifier }
it 'adds the new scope to the payload options' do
scope_object = Rollbar.notifier.scope_object
Rollbar.scope!(new_scope)
expect(scope_object).to be_eql(new_scope)
end
end
describe '.clear_notifier' do
before { Rollbar.notifier }
it 'resets the notifier' do
Rollbar.clear_notifier!
expect(Rollbar.instance_variable_get('@notifier')).to be_nil
expect(Rollbar.instance_variable_get('@root_notifier')).to be_nil
end
end
describe '.process_item' do
context 'if there is an exception sending the payload' do
let(:exception) { StandardError.new('error message') }
let(:payload) { Rollbar::Item.build_with({ :foo => :bar }) }
it 'logs the error and the payload' do
allow(Rollbar.notifier).to receive(:send_item).and_raise(exception)
expect(Rollbar.notifier).to receive(:log_error)
expect { Rollbar.notifier.process_item(payload) }.to raise_error(exception)
end
end
end
describe '.process_from_async_handler' do
context 'with errors' do
let(:exception) { StandardError.new('the error') }
it 'raises anything and sends internal error' do
allow(Rollbar.notifier).to receive(:process_item).and_raise(exception)
expect(Rollbar.notifier).to receive(:report_internal_error).with(exception)
expect do
Rollbar.notifier.process_from_async_handler({})
end.to raise_error(exception)
rollbar_do_not_report = exception.instance_variable_get(:@_rollbar_do_not_report)
expect(rollbar_do_not_report).to be_eql(true)
end
end
end
describe '.preconfigure'do
before do
Rollbar.clear_notifier!
end
it 'resets the notifier' do
Rollbar.configure do |config|
config.access_token = 'foo'
end
Thread.new {}
Rollbar.preconfigure do |config|
config.root = 'bar'
end
notifier_config = Rollbar.notifier.configuration
expect(notifier_config.root).to be_eql('bar')
end
end
context 'having timeout issues (for ruby > 1.9.3)' do
before do
skip if Rollbar::LanguageSupport.ruby_18? || Rollbar::LanguageSupport.ruby_19?
end
let(:exception_class) do
Rollbar::LanguageSupport.timeout_exceptions.first
end
let(:net_exception) do
exception_class.new
end
before do
allow_any_instance_of(Net::HTTP).to receive(:request).and_raise(net_exception)
end
it 'retries the request' do
expect_any_instance_of(Net::HTTP).to receive(:request).exactly(3)
expect(Rollbar.notifier).to receive(:report_internal_error).with(net_exception)
Rollbar.info('foo')
end
end
describe '.with_config' do
let(:new_config) do
{ 'environment' => 'foo' }
end
it 'uses the new config and restores the old one' do
config1 = described_class.configuration
subject.with_config(:environment => 'bar') do
expect(described_class.configuration).not_to be(config1)
end
expect(described_class.configuration).to be(config1)
end
end
# configure with some basic params
def configure
reconfigure_notifier
end
end
Rework gem regex spec to avoid deprecation warning.
# encoding: utf-8
require 'logger'
require 'socket'
require 'girl_friday'
require 'redis'
require 'active_support/core_ext/object'
require 'active_support/json/encoding'
require 'rollbar/item'
require 'ostruct'
begin
require 'rollbar/delay/sidekiq'
require 'rollbar/delay/sucker_punch'
rescue LoadError
end
begin
require 'sucker_punch'
require 'sucker_punch/testing/inline'
rescue LoadError
end
begin
require 'rollbar/delay/shoryuken'
rescue LoadError
end
require 'spec_helper'
describe Rollbar do
let(:notifier) { Rollbar.notifier }
before do
Rollbar.clear_notifier!
configure
end
context 'when notifier has been used before configure it' do
before do
Rollbar.clear_notifier!
end
it 'is finally reset' do
Rollbar.log_debug('Testing notifier')
expect(Rollbar.error('error message')).to be_eql('disabled')
reconfigure_notifier
expect(Rollbar.error('error message')).not_to be_eql('disabled')
end
end
shared_examples 'stores the root notifier' do
end
describe '.configure' do
before { Rollbar.clear_notifier! }
it 'stores the root notifier' do
Rollbar.configure { |c| }
expect(Rollbar.root_notifier).to be(Rollbar.notifier)
end
end
describe '.preconfigure' do
before { Rollbar.clear_notifier! }
it 'stores the root notifier' do
Rollbar.preconfigure { |c| }
expect(Rollbar.root_notifier).to be(Rollbar.notifier)
end
end
describe '.reconfigure' do
before { Rollbar.clear_notifier! }
it 'stores the root notifier' do
Rollbar.reconfigure { |c| }
expect(Rollbar.root_notifier).to be(Rollbar.notifier)
end
end
describe '.unconfigure' do
before { Rollbar.clear_notifier! }
it 'stores the root notifier' do
expect(Rollbar.root_notifier).to receive(:unconfigure)
Rollbar.unconfigure
expect(Rollbar.root_notifier).to be(Rollbar.notifier)
end
end
context 'Notifier' do
describe '#log' do
let(:exception) do
begin
foo = bar
rescue => e
e
end
end
let(:configuration) { Rollbar.configuration }
context 'executing a Thread before Rollbar is configured' do
before do
Rollbar.clear_notifier!
Thread.new {}
Rollbar.configure do |config|
config.access_token = 'my-access-token'
end
end
it 'sets correct configuration for Rollbar.notifier' do
expect(Rollbar.notifier.configuration.enabled).to be_truthy
end
end
it 'should report a simple message' do
expect(notifier).to receive(:report).with('error', 'test message', nil, nil, nil)
notifier.log('error', 'test message')
end
it 'should report a simple message with extra data' do
extra_data = {:key => 'value', :hash => {:inner_key => 'inner_value'}}
expect(notifier).to receive(:report).with('error', 'test message', nil, extra_data, nil)
notifier.log('error', 'test message', extra_data)
end
it 'should report an exception' do
expect(notifier).to receive(:report).with('error', nil, exception, nil, nil)
notifier.log('error', exception)
end
it 'should report an exception with extra data' do
extra_data = {:key => 'value', :hash => {:inner_key => 'inner_value'}}
expect(notifier).to receive(:report).with('error', nil, exception, extra_data, nil)
notifier.log('error', exception, extra_data)
end
it 'should report an exception with a description' do
expect(notifier).to receive(:report).with('error', 'exception description', exception, nil, nil)
notifier.log('error', exception, 'exception description')
end
it 'should report an exception with a description and extra data' do
extra_data = {:key => 'value', :hash => {:inner_key => 'inner_value'}}
expect(notifier).to receive(:report).with('error', 'exception description', exception, extra_data, nil)
notifier.log('error', exception, extra_data, 'exception description')
end
context 'with :on_error_response hook configured' do
let!(:notifier) { Rollbar::Notifier.new }
let(:configuration) do
config = Rollbar::Configuration.new
config.access_token = test_access_token
config.enabled = true
config.hook :on_error_response do |response|
return ":on_error_response executed"
end
config
end
let(:message) { 'foo' }
let(:level) { 'foo' }
before do
notifier.configuration = configuration
allow_any_instance_of(Net::HTTP).to receive(:request).and_return(OpenStruct.new(:code => 500, :body => "Error"))
@uri = URI.parse(Rollbar::Configuration::DEFAULT_ENDPOINT)
end
it "calls the :on_error_response hook if response status is not 200" do
expect(Net::HTTP).to receive(:new).with(@uri.host, @uri.port, nil, nil, nil, nil).and_call_original
expect(notifier.configuration.hook(:on_error_response)).to receive(:call)
notifier.log(level, message)
end
end
context 'with :on_report_internal_error hook configured' do
let!(:notifier) { Rollbar::Notifier.new }
let(:configuration) do
config = Rollbar::Configuration.new
config.access_token = test_access_token
config.enabled = true
config.hook :on_report_internal_error do |response|
return ":on_report_internal_error executed"
end
config
end
let(:message) { 'foo' }
let(:level) { 'foo' }
before do
notifier.configuration = configuration
end
it "calls the :on_report_internal_error hook if" do
expect(notifier.configuration.hook(:on_report_internal_error)).to receive(:call)
expect(notifier).to receive(:report) do
raise StandardError.new
end
notifier.log(level, message)
end
end
context 'an item with a context' do
let(:context) { { :controller => 'ExampleController' } }
context 'with a custom_data_method configured' do
before do
Rollbar.configure do |config|
config.custom_data_method = lambda do |message, exception, context|
{ :result => "MyApp#" + context[:controller] }
end
end
end
it 'should have access to the context data through configuration.custom_data_method' do
result = notifier.log('error', "Custom message", { :custom_data_method_context => context})
result[:body][:message][:extra].should_not be_nil
result[:body][:message][:extra][:result].should == "MyApp#"+context[:controller]
result[:body][:message][:extra][:custom_data_method_context].should be_nil
end
end
end
end
context 'with before_process handlers in configuration' do
let!(:notifier) { Rollbar::Notifier.new }
let(:scope) { { :bar => :foo } }
let(:configuration) do
config = Rollbar::Configuration.new
config.access_token = test_access_token
config.enabled = true
config
end
let(:message) { 'message' }
let(:exception) { Exception.new }
let(:extra) { {:foo => :bar } }
let(:level) { 'error' }
before do
notifier.configuration = configuration
notifier.scope!(scope)
end
context 'without raise Rollbar::Ignore' do
let(:handler) do
proc do |options|
end
end
before do
configuration.before_process = handler
end
it 'calls the handler with the correct options' do
options = {
:level => level,
:scope => Rollbar::LazyStore.new(scope),
:exception => exception,
:message => message,
:extra => extra
}
expect(handler).to receive(:call).with(options)
expect(notifier).to receive(:report).with(level, message, exception, extra, nil)
notifier.log(level, message, exception, extra)
end
end
context 'raising Rollbar::Ignore in the handler' do
let(:handler) do
proc do |options|
raise Rollbar::Ignore
end
end
before do
configuration.before_process = handler
end
it "calls the handler with correct options and doesn't call #report" do
options = {
:level => level,
:scope => Rollbar::LazyStore.new(scope),
:exception => exception,
:message => message,
:extra => extra
}
expect(handler).to receive(:call).with(options).and_call_original
expect(notifier).not_to receive(:report)
result = notifier.log(level, message, exception, extra)
expect(result).to be_eql('ignored')
end
end
context 'with 2 handlers, raising Rollbar::Ignore in the first one' do
let(:handler1) do
proc do |options|
raise Rollbar::Ignore
end
end
let(:handler2) do
proc do |options|
end
end
before do
configuration.before_process << handler1
configuration.before_process << handler2
end
it "calls only the first handler and doesn't calls #report" do
options = {
:level => level,
:scope => Rollbar::LazyStore.new(scope),
:exception => exception,
:message => message,
:extra => extra
}
expect(handler1).to receive(:call).with(options).and_call_original
expect(handler2).not_to receive(:call)
expect(notifier).not_to receive(:report)
result = notifier.log(level, message, exception, extra)
expect(result).to be_eql('ignored')
end
context 'if the first handler fails' do
let(:exception) { StandardError.new('foo') }
let(:handler1) do
proc { |options| raise exception }
end
it 'doesnt call the second handler and logs the error' do
expect(handler2).not_to receive(:call)
expect(notifier).to receive(:log_error).with("[Rollbar] Error calling the `before_process` hook: #{exception}")
notifier.log(level, message, exception, extra)
end
end
end
end
context 'debug/info/warning/error/critical' do
let(:exception) do
begin
foo = bar
rescue => e
e
end
end
let(:extra_data) { {:key => 'value', :hash => {:inner_key => 'inner_value'}} }
it 'should report with a debug level' do
expect(notifier).to receive(:report).with('debug', nil, exception, nil, nil)
notifier.debug(exception)
expect(notifier).to receive(:report).with('debug', 'description', exception, nil, nil)
notifier.debug(exception, 'description')
expect(notifier).to receive(:report).with('debug', 'description', exception, extra_data, nil)
notifier.debug(exception, 'description', extra_data)
end
it 'should report with an info level' do
expect(notifier).to receive(:report).with('info', nil, exception, nil, nil)
notifier.info(exception)
expect(notifier).to receive(:report).with('info', 'description', exception, nil, nil)
notifier.info(exception, 'description')
expect(notifier).to receive(:report).with('info', 'description', exception, extra_data, nil)
notifier.info(exception, 'description', extra_data)
end
it 'should report with a warning level' do
expect(notifier).to receive(:report).with('warning', nil, exception, nil, nil)
notifier.warning(exception)
expect(notifier).to receive(:report).with('warning', 'description', exception, nil, nil)
notifier.warning(exception, 'description')
expect(notifier).to receive(:report).with('warning', 'description', exception, extra_data, nil)
notifier.warning(exception, 'description', extra_data)
end
it 'should report with an error level' do
expect(notifier).to receive(:report).with('error', nil, exception, nil, nil)
notifier.error(exception)
expect(notifier).to receive(:report).with('error', 'description', exception, nil, nil)
notifier.error(exception, 'description')
expect(notifier).to receive(:report).with('error', 'description', exception, extra_data, nil)
notifier.error(exception, 'description', extra_data)
end
it 'should report with a critical level' do
expect(notifier).to receive(:report).with('critical', nil, exception, nil, nil)
notifier.critical(exception)
expect(notifier).to receive(:report).with('critical', 'description', exception, nil, nil)
notifier.critical(exception, 'description')
expect(notifier).to receive(:report).with('critical', 'description', exception, extra_data, nil)
notifier.critical(exception, 'description', extra_data)
end
end
context 'scope' do
it 'should create a new notifier object' do
notifier2 = notifier.scope
notifier2.should_not eq(notifier)
notifier2.should be_instance_of(Rollbar::Notifier)
end
it 'should create a copy of the parent notifier\'s configuration' do
notifier.configure do |config|
config.code_version = '123'
config.payload_options = {
:a => 'a',
:b => {:c => 'c'}
}
end
notifier2 = notifier.scope
notifier2.configuration.code_version.should == '123'
notifier2.configuration.should_not equal(notifier.configuration)
notifier2.configuration.payload_options.should_not equal(notifier.configuration.payload_options)
notifier2.configuration.payload_options.should == notifier.configuration.payload_options
notifier2.configuration.payload_options.should == {
:a => 'a',
:b => {:c => 'c'}
}
end
it 'should not modify any parent notifier configuration' do
Rollbar.clear_notifier!
configure
Rollbar.configuration.code_version.should be_nil
Rollbar.configuration.payload_options.should be_empty
notifier = Rollbar.notifier.scope
notifier.configure do |config|
config.code_version = '123'
config.payload_options = {
:a => 'a',
:b => {:c => 'c'}
}
end
notifier2 = notifier.scope
notifier2.configure do |config|
config.payload_options[:c] = 'c'
end
notifier.configuration.payload_options[:c].should be_nil
notifier3 = notifier2.scope({
:b => {:c => 3, :d => 'd'}
})
notifier3.configure do |config|
config.code_version = '456'
end
notifier.configuration.code_version.should == '123'
notifier.configuration.payload_options.should == {
:a => 'a',
:b => {:c => 'c'}
}
notifier2.configuration.code_version.should == '123'
notifier2.configuration.payload_options.should == {
:a => 'a',
:b => {:c => 'c'},
:c => 'c'
}
notifier3.configuration.code_version.should == '456'
notifier3.configuration.payload_options.should == {
:a => 'a',
:b => {:c => 'c'},
:c => 'c'
}
Rollbar.configuration.code_version.should be_nil
Rollbar.configuration.payload_options.should be_empty
end
end
context 'report' do
let(:logger_mock) { double("Rails.logger").as_null_object }
before(:each) do
configure
Rollbar.configure do |config|
config.logger = logger_mock
end
end
after do
configure
end
it 'should reject input that doesn\'t contain an exception, message or extra data' do
expect(logger_mock).to receive(:error).with('[Rollbar] Tried to send a report with no message, exception or extra data.')
expect(notifier).not_to receive(:schedule_payload)
result = notifier.send(:report, 'info', nil, nil, nil, nil)
result.should == 'error'
end
it 'should be ignored if the person is ignored' do
person_data = {
:id => 1,
:username => "test",
:email => "test@example.com"
}
notifier.configure do |config|
config.ignored_person_ids += [1]
config.payload_options = { :person => person_data }
end
expect(notifier).not_to receive(:schedule_payload)
result = notifier.send(:report, 'info', 'message', nil, nil, nil)
result.should == 'ignored'
end
end
end
context 'reporting' do
let(:exception) do
begin
foo = bar
rescue => e
e
end
end
let(:logger_mock) { double("Rails.logger").as_null_object }
let(:user) { User.create(:email => 'email@example.com', :encrypted_password => '', :created_at => Time.now, :updated_at => Time.now) }
before do
Rollbar.unconfigure
configure
Rollbar.configure do |config|
config.logger = logger_mock
end
end
it 'should report exceptions without person or request data' do
logger_mock.should_receive(:info).with('[Rollbar] Success')
Rollbar.error(exception)
end
it 'should not report anything when disabled' do
logger_mock.should_not_receive(:info).with('[Rollbar] Success')
Rollbar.configure do |config|
config.enabled = false
end
Rollbar.error(exception).should == 'disabled'
end
it 'should report exceptions without person or request data' do
logger_mock.should_receive(:info).with('[Rollbar] Success')
Rollbar.error(exception)
end
it 'should be enabled when freshly configured' do
Rollbar.configuration.enabled.should == true
end
it 'should not be enabled when not configured' do
Rollbar.clear_notifier!
Rollbar.configuration.enabled.should be_nil
Rollbar.error(exception).should == 'disabled'
end
it 'should stay disabled if configure is called again' do
# configure once, setting enabled to false.
Rollbar.configure do |config|
config.enabled = false
end
# now configure again (perhaps to change some other values)
Rollbar.configure { |_| }
Rollbar.configuration.enabled.should == false
Rollbar.error(exception).should == 'disabled'
end
context 'using configuration.use_exception_level_filters_default' do
before do
Rollbar.configure do |config|
config.use_exception_level_filters_default = true
end
end
context 'without use_exception_level_filters argument' do
it 'sends the correct filtered level' do
Rollbar.configure do |config|
config.exception_level_filters = { 'NameError' => 'warning' }
end
Rollbar.error(exception)
expect(Rollbar.last_report[:level]).to be_eql('warning')
end
it 'ignore ignored exception classes' do
Rollbar.configure do |config|
config.exception_level_filters = { 'NameError' => 'ignore' }
end
logger_mock.should_not_receive(:info)
logger_mock.should_not_receive(:warn)
logger_mock.should_not_receive(:error)
Rollbar.error(exception)
end
it 'should not use the filters if overriden at log site' do
Rollbar.configure do |config|
config.exception_level_filters = { 'NameError' => 'ignore' }
end
Rollbar.error(exception, :use_exception_level_filters => false)
expect(Rollbar.last_report[:level]).to be_eql('error')
end
end
end
context 'using :use_exception_level_filters option as true' do
it 'sends the correct filtered level' do
Rollbar.configure do |config|
config.exception_level_filters = { 'NameError' => 'warning' }
end
Rollbar.error(exception, :use_exception_level_filters => true)
expect(Rollbar.last_report[:level]).to be_eql('warning')
end
it 'ignore ignored exception classes' do
Rollbar.configure do |config|
config.exception_level_filters = { 'NameError' => 'ignore' }
end
logger_mock.should_not_receive(:info)
logger_mock.should_not_receive(:warn)
logger_mock.should_not_receive(:error)
Rollbar.error(exception, :use_exception_level_filters => true)
end
context 'using :use_exception_level_filters option as false' do
it 'sends the correct filtered level' do
Rollbar.configure do |config|
config.exception_level_filters = { 'NameError' => 'warning' }
end
Rollbar.error(exception, :use_exception_level_filters => false)
expect(Rollbar.last_report[:level]).to be_eql('error')
end
it 'ignore ignored exception classes' do
Rollbar.configure do |config|
config.exception_level_filters = { 'NameError' => 'ignore' }
end
Rollbar.error(exception, :use_exception_level_filters => false)
expect(Rollbar.last_report[:level]).to be_eql('error')
end
end
end
context 'using :use_exception_level_filters option as true' do
it 'sends the correct filtered level' do
Rollbar.configure do |config|
config.exception_level_filters = { 'NameError' => 'warning' }
end
Rollbar.error(exception, :use_exception_level_filters => true)
expect(Rollbar.last_report[:level]).to be_eql('warning')
end
it 'ignore ignored exception classes' do
Rollbar.configure do |config|
config.exception_level_filters = { 'NameError' => 'ignore' }
end
logger_mock.should_not_receive(:info)
logger_mock.should_not_receive(:warn)
logger_mock.should_not_receive(:error)
Rollbar.error(exception, :use_exception_level_filters => true)
end
end
context 'if not using :use_exception_level_filters option' do
it 'sends the level defined by the used method' do
Rollbar.configure do |config|
config.exception_level_filters = { 'NameError' => 'warning' }
end
Rollbar.error(exception)
expect(Rollbar.last_report[:level]).to be_eql('error')
end
it 'ignore ignored exception classes' do
Rollbar.configure do |config|
config.exception_level_filters = { 'NameError' => 'ignore' }
end
Rollbar.error(exception)
expect(Rollbar.last_report[:level]).to be_eql('error')
end
end
# Skip jruby 1.9+ (https://github.com/jruby/jruby/issues/2373)
if defined?(RUBY_ENGINE) && RUBY_ENGINE == 'jruby' && (not RUBY_VERSION =~ /^1\.9/)
it "should work with an IO object as rack.errors" do
logger_mock.should_receive(:info).with('[Rollbar] Success')
Rollbar.error(exception, :env => { :"rack.errors" => IO.new(2, File::WRONLY) })
end
end
it 'should ignore ignored persons' do
person_data = {
:id => 1,
:username => "test",
:email => "test@example.com"
}
Rollbar.configure do |config|
config.payload_options = { :person => person_data }
config.ignored_person_ids += [1]
end
logger_mock.should_not_receive(:info)
logger_mock.should_not_receive(:warn)
logger_mock.should_not_receive(:error)
Rollbar.error(exception)
end
it 'should not ignore non-ignored persons' do
person_data = {
:id => 1,
:username => "test",
:email => "test@example.com"
}
Rollbar.configure do |config|
config.payload_options = { :person => person_data }
config.ignored_person_ids += [1]
end
Rollbar.last_report = nil
Rollbar.error(exception)
Rollbar.last_report.should be_nil
person_data = {
:id => 2,
:username => "test2",
:email => "test2@example.com"
}
new_options = {
:person => person_data
}
Rollbar.scoped(new_options) do
Rollbar.error(exception)
end
Rollbar.last_report.should_not be_nil
end
it 'should allow callables to set exception filtered level' do
callable_mock = double
saved_filters = Rollbar.configuration.exception_level_filters
Rollbar.configure do |config|
config.exception_level_filters = { 'NameError' => callable_mock }
end
callable_mock.should_receive(:call).with(exception).at_least(:once).and_return("info")
logger_mock.should_receive(:info)
logger_mock.should_not_receive(:warn)
logger_mock.should_not_receive(:error)
Rollbar.error(exception, :use_exception_level_filters => true)
end
it 'should not report exceptions when silenced' do
expect_any_instance_of(Rollbar::Notifier).to_not receive(:schedule_payload)
begin
test_var = 1
Rollbar.silenced do
test_var = 2
raise
end
rescue => e
Rollbar.error(e)
end
test_var.should == 2
end
it 'should report exception objects with no backtrace' do
payload = nil
notifier.stub(:schedule_item) do |*args|
payload = args[0]
end
Rollbar.error(StandardError.new("oops"))
payload["data"][:body][:trace][:frames].should == []
payload["data"][:body][:trace][:exception][:class].should == "StandardError"
payload["data"][:body][:trace][:exception][:message].should == "oops"
end
it 'gets the backtrace from the caller' do
Rollbar.configure do |config|
config.populate_empty_backtraces = true
end
exception = Exception.new
Rollbar.error(exception)
gem_dir = Gem::Specification.find_by_name('rollbar').gem_dir
gem_lib_dir = gem_dir + '/lib'
last_report = Rollbar.last_report
filepaths = last_report[:body][:trace][:frames].map {|frame| frame[:filename] }.reverse
expect(filepaths[0]).not_to include(gem_lib_dir)
expect(filepaths.any? {|filepath| filepath.include?(gem_dir) }).to eq true
end
it 'should return the exception data with a uuid, on platforms with SecureRandom' do
if defined?(SecureRandom) and SecureRandom.respond_to?(:uuid)
exception_data = Rollbar.error(StandardError.new("oops"))
exception_data[:uuid].should_not be_nil
end
end
it 'should report exception objects with nonstandard backtraces' do
payload = nil
notifier.stub(:schedule_item) do |*args|
payload = args[0]
end
class CustomException < StandardError
def backtrace
["custom backtrace line"]
end
end
exception = CustomException.new("oops")
notifier.error(exception)
payload["data"][:body][:trace][:frames][0][:method].should == "custom backtrace line"
end
it 'should report exceptions with a custom level' do
payload = nil
notifier.stub(:schedule_item) do |*args|
payload = args[0]
end
Rollbar.error(exception)
payload['data'][:level].should == 'error'
Rollbar.log('debug', exception)
payload['data'][:level].should == 'debug'
end
context 'with invalid utf8 encoding' do
let(:extra) do
{ :extra => force_to_ascii("bad value 1\255") }
end
it 'removes te invalid characteres' do
Rollbar.info('removing invalid chars', extra)
extra_value = Rollbar.last_report[:body][:message][:extra][:extra]
expect(extra_value).to be_eql('bad value 1')
end
end
end
# Backwards
context 'report_message' do
before(:each) do
configure
Rollbar.configure do |config|
config.logger = logger_mock
end
end
let(:logger_mock) { double("Rails.logger").as_null_object }
let(:user) { User.create(:email => 'email@example.com', :encrypted_password => '', :created_at => Time.now, :updated_at => Time.now) }
it 'should report simple messages' do
logger_mock.should_receive(:info).with('[Rollbar] Scheduling item')
logger_mock.should_receive(:info).with('[Rollbar] Success')
Rollbar.error('Test message')
end
it 'should not report anything when disabled' do
logger_mock.should_not_receive(:info).with('[Rollbar] Success')
Rollbar.configure do |config|
config.enabled = false
end
Rollbar.error('Test message that should be ignored')
Rollbar.configure do |config|
config.enabled = true
end
end
it 'should report messages with extra data' do
logger_mock.should_receive(:info).with('[Rollbar] Success')
Rollbar.debug('Test message with extra data', 'debug', :foo => "bar",
:hash => { :a => 123, :b => "xyz" })
end
# END Backwards
it 'should not crash with circular extra_data' do
a = { :foo => "bar" }
b = { :a => a }
c = { :b => b }
a[:c] = c
logger_mock.should_receive(:error).with(/\[Rollbar\] Reporting internal error encountered while sending data to Rollbar./)
Rollbar.error("Test message with circular extra data", a)
end
it 'should be able to report form validation errors when they are present' do
logger_mock.should_receive(:info).with('[Rollbar] Success')
user.errors.add(:example, "error")
user.report_validation_errors_to_rollbar
end
it 'should not report form validation errors when they are not present' do
logger_mock.should_not_receive(:info).with('[Rollbar] Success')
user.errors.clear
user.report_validation_errors_to_rollbar
end
it 'should report messages with extra data' do
logger_mock.should_receive(:info).with('[Rollbar] Success')
Rollbar.info("Test message with extra data", :foo => "bar",
:hash => { :a => 123, :b => "xyz" })
end
it 'should report messages with request, person data and extra data' do
logger_mock.should_receive(:info).with('[Rollbar] Scheduling item')
logger_mock.should_receive(:info).with('[Rollbar] Success')
request_data = {
:params => {:foo => 'bar'}
}
person_data = {
:id => 123,
:username => 'username'
}
extra_data = {
:extra_foo => 'extra_bar'
}
Rollbar.configure do |config|
config.payload_options = {
:request => request_data,
:person => person_data
}
end
Rollbar.info("Test message", extra_data)
Rollbar.last_report[:request].should == request_data
Rollbar.last_report[:person].should == person_data
Rollbar.last_report[:body][:message][:extra][:extra_foo].should == 'extra_bar'
end
end
context 'payload_destination' do
before(:each) do
configure
Rollbar.configure do |config|
config.logger = logger_mock
config.filepath = 'test.rollbar'
end
end
after do
configure
end
let(:exception) do
begin
foo = bar
rescue => e
e
end
end
let(:logger_mock) { double("Rails.logger").as_null_object }
it 'should send the payload over the network by default' do
logger_mock.should_not_receive(:info).with('[Rollbar] Writing payload to file')
logger_mock.should_receive(:info).with('[Rollbar] Sending item').once
logger_mock.should_receive(:info).with('[Rollbar] Success').once
Rollbar.error(exception)
end
it 'should save the payload to a file if set' do
logger_mock.should_not_receive(:info).with('[Rollbar] Sending item')
logger_mock.should_receive(:info).with('[Rollbar] Writing item to file').once
logger_mock.should_receive(:info).with('[Rollbar] Success').once
filepath = ''
Rollbar.configure do |config|
config.write_to_file = true
filepath = config.filepath
end
Rollbar.error(exception)
File.exist?(filepath).should == true
File.read(filepath).should include test_access_token
File.delete(filepath)
Rollbar.configure do |config|
config.write_to_file = false
end
end
end
context 'using a proxy server' do
before do
allow_any_instance_of(Net::HTTP).to receive(:request).and_return(OpenStruct.new(:code => 200, :body => "Success"))
@env_vars = clear_proxy_env_vars
end
after do
restore_proxy_env_vars(@env_vars)
end
context 'via environment variables' do
before do
@uri = URI.parse(Rollbar::Configuration::DEFAULT_ENDPOINT)
end
it 'honors proxy settings in the environment' do
ENV['http_proxy'] = 'http://user:pass@example.com:80'
ENV['https_proxy'] = 'http://user:pass@example.com:80'
expect(Net::HTTP).to receive(:new).with(@uri.host, @uri.port, 'example.com', 80, 'user', 'pass').and_call_original
Rollbar.info("proxy this")
end
it 'does not use a proxy if no proxy settings in environemnt' do
expect(Net::HTTP).to receive(:new).with(@uri.host, @uri.port, nil, nil, nil, nil).and_call_original
Rollbar.info("proxy this")
end
end
context 'set in configuration file' do
before do
Rollbar.configure do |config|
config.proxy = {
:host => 'http://config.com',
:port => 8080,
:user => 'foo',
:password => 'bar'
}
end
@uri = URI.parse(Rollbar::Configuration::DEFAULT_ENDPOINT)
end
it 'honors proxy settings in the config file' do
expect(Net::HTTP).to receive(:new).with(@uri.host, @uri.port, 'config.com', 8080, 'foo', 'bar').and_call_original
Rollbar.info("proxy this")
end
it 'gives the configuration settings precedence over environment' do
ENV['http_proxy'] = 'http://user:pass@example.com:80'
ENV['https_proxy'] = 'http://user:pass@example.com:80'
expect(Net::HTTP).to receive(:new).with(@uri.host, @uri.port, 'config.com', 8080, 'foo', 'bar').and_call_original
Rollbar.info("proxy this")
end
it 'allows @-signs in passwords' do
Rollbar.configure do |config|
config.proxy[:password] = "manh@tan"
end
expect(Net::HTTP).to receive(:new).with(@uri.host, @uri.port, 'config.com', 8080, 'foo', 'manh@tan').and_call_original
Rollbar.info("proxy this")
end
end
end
context 'asynchronous_handling' do
before do
Rollbar.clear_notifier!
configure
Rollbar.configure do |config|
config.logger = logger_mock
end
end
after do
configure
end
let(:exception) do
begin
foo = bar
rescue => e
e
end
end
let(:logger_mock) { double("Rails.logger").as_null_object }
it 'should send the payload using the default asynchronous handler girl_friday' do
logger_mock.should_receive(:info).with('[Rollbar] Scheduling item')
logger_mock.should_receive(:info).with('[Rollbar] Sending item')
logger_mock.should_receive(:info).with('[Rollbar] Success')
Rollbar.configure do |config|
config.use_async = true
GirlFriday::WorkQueue.immediate!
end
Rollbar.error(exception)
Rollbar.configure do |config|
config.use_async = false
GirlFriday::WorkQueue.queue!
end
end
it 'should send the payload using a user-supplied asynchronous handler' do
logger_mock.should_receive(:info).with('Custom async handler called')
logger_mock.should_receive(:info).with('[Rollbar] Sending item')
logger_mock.should_receive(:info).with('[Rollbar] Success')
Rollbar.configure do |config|
config.use_async = true
config.async_handler = Proc.new { |payload|
logger_mock.info 'Custom async handler called'
Rollbar.process_from_async_handler(payload)
}
end
Rollbar.error(exception)
end
# We should be able to send String payloads, generated
# by a previous version of the gem. This can happend just
# after a deploy with an gem upgrade.
context 'with a payload generated as String' do
let(:async_handler) do
proc do |payload|
# simulate previous gem version
string_payload = Rollbar::JSON.dump(payload)
Rollbar.process_from_async_handler(string_payload)
end
end
before do
Rollbar.configuration.stub(:use_async).and_return(true)
Rollbar.configuration.stub(:async_handler).and_return(async_handler)
end
it 'sends a payload generated as String, not as a Hash' do
logger_mock.should_receive(:info).with('[Rollbar] Success')
Rollbar.error(exception)
end
context 'with async failover handlers' do
before do
Rollbar.reconfigure do |config|
config.use_async = true
config.async_handler = async_handler
config.failover_handlers = handlers
config.logger = logger_mock
end
end
let(:exception) { StandardError.new('the error') }
context 'if the async handler doesnt fail' do
let(:async_handler) { proc { |_| 'success' } }
let(:handler) { proc { |_| 'success' } }
let(:handlers) { [handler] }
it 'doesnt call any failover handler' do
expect(handler).not_to receive(:call)
Rollbar.error(exception)
end
end
context 'if the async handler fails' do
let(:async_handler) { proc { |_| fail 'this handler will crash' } }
context 'if any failover handlers is configured' do
let(:handlers) { [] }
let(:log_message) do
'[Rollbar] Async handler failed, and there are no failover handlers configured. See the docs for "failover_handlers"'
end
it 'logs the error but doesnt try to report an internal error' do
expect(logger_mock).to receive(:error).with(log_message)
Rollbar.error(exception)
end
end
context 'if the first failover handler success' do
let(:handler) { proc { |_| 'success' } }
let(:handlers) { [handler] }
it 'calls the failover handler and doesnt report internal error' do
expect(Rollbar).not_to receive(:report_internal_error)
expect(handler).to receive(:call)
Rollbar.error(exception)
end
end
context 'with two handlers, the first failing' do
let(:handler1) { proc { |_| fail 'this handler fails' } }
let(:handler2) { proc { |_| 'success' } }
let(:handlers) { [handler1, handler2] }
it 'calls the second handler and doesnt report internal error' do
expect(handler2).to receive(:call)
Rollbar.error(exception)
end
end
context 'with two handlers, both failing' do
let(:handler1) { proc { |_| fail 'this handler fails' } }
let(:handler2) { proc { |_| fail 'this will also fail' } }
let(:handlers) { [handler1, handler2] }
it 'reports internal error' do
expect(logger_mock).to receive(:error)
Rollbar.error(exception)
end
end
end
end
end
describe "#use_sucker_punch", :if => defined?(SuckerPunch) do
it "should send the payload to sucker_punch delayer" do
logger_mock.should_receive(:info).with('[Rollbar] Scheduling item')
expect(Rollbar::Delay::SuckerPunch).to receive(:call)
Rollbar.configure(&:use_sucker_punch)
Rollbar.error(exception)
end
end
describe "#use_shoryuken", :if => defined?(Shoryuken) do
it "should send the payload to shoryuken delayer" do
logger_mock.should_receive(:info).with('[Rollbar] Scheduling item')
expect(Rollbar::Delay::Shoryuken).to receive(:call)
Rollbar.configure(&:use_shoryuken)
Rollbar.error(exception)
end
end
describe "#use_sidekiq", :if => defined?(Sidekiq) do
it "should instanciate sidekiq delayer with custom values" do
Rollbar::Delay::Sidekiq.should_receive(:new).with('queue' => 'test_queue')
config = Rollbar::Configuration.new
config.use_sidekiq 'queue' => 'test_queue'
end
it "should send the payload to sidekiq delayer" do
handler = double('sidekiq_handler_mock')
handler.should_receive(:call)
Rollbar.configure do |config|
config.use_sidekiq
config.async_handler = handler
end
Rollbar.error(exception)
end
end
end
context 'logger' do
before(:each) do
reset_configuration
end
it 'should have use the Rails logger when configured to do so' do
configure
expect(Rollbar.send(:logger)).to be_kind_of(Rollbar::LoggerProxy)
expect(Rollbar.send(:logger).object).to eq ::Rails.logger
end
it 'should use the default_logger when no logger is set' do
logger = Logger.new(STDERR)
Rollbar.configure do |config|
config.default_logger = lambda { logger }
end
Rollbar.send(:logger).object.should == logger
end
it 'should have a default default_logger' do
Rollbar.send(:logger).should_not be_nil
end
after do
reset_configuration
end
end
context "project_gems" do
it "should include gem paths for specified project gems in the payload" do
gems = ['rack', 'rspec-rails']
gem_paths = []
Rollbar.configure do |config|
config.project_gems = gems
end
gem_paths = gems.map do |gem|
gem_spec = Gem::Specification.find_all_by_name(gem)[0]
gem_spec.gem_dir if gem_spec
end.compact
data = notifier.send(:build_item, 'info', 'test', nil, {}, nil)['data']
data[:project_package_paths].kind_of?(Array).should == true
data[:project_package_paths].length.should == gem_paths.length
data[:project_package_paths].each_with_index{|path, index|
path.should == gem_paths[index]
}
end
it "should handle regex gem patterns" do
gems = ["rack", /rspec/, /roll/]
gem_paths = []
Rollbar.configure do |config|
config.project_gems = gems
end
gem_paths = gems.map do |name|
Gem::Specification.each.select { |spec| name === spec.name }
end.flatten.uniq.map(&:gem_dir)
gem_paths.length.should > 1
gem_paths.any?{|path| path.include? 'rollbar-gem'}.should == true
gem_paths.any?{|path| path.include? 'rspec-rails'}.should == true
data = notifier.send(:build_item, 'info', 'test', nil, {}, nil)['data']
data[:project_package_paths].kind_of?(Array).should == true
data[:project_package_paths].length.should == gem_paths.length
(data[:project_package_paths] - gem_paths).length.should == 0
end
it "should not break on non-existent gems" do
gems = ["this_gem_does_not_exist", "rack"]
Rollbar.configure do |config|
config.project_gems = gems
end
data = notifier.send(:build_item, 'info', 'test', nil, {}, nil)['data']
data[:project_package_paths].kind_of?(Array).should == true
data[:project_package_paths].length.should == 1
end
end
context 'report_internal_error', :reconfigure_notifier => true do
it "should not crash when given an exception object" do
begin
1 / 0
rescue => e
notifier.send(:report_internal_error, e)
end
end
end
context "send_failsafe" do
let(:exception) { StandardError.new }
it "doesn't crash when given a message and exception" do
sent_payload = notifier.send(:send_failsafe, "test failsafe", exception)
expected_message = 'Failsafe from rollbar-gem. StandardError: test failsafe'
expect(sent_payload['data'][:body][:message][:body]).to be_eql(expected_message)
end
it "doesn't crash when given all nils" do
notifier.send(:send_failsafe, nil, nil)
end
context 'with a non default exception message' do
let(:exception) { StandardError.new 'Something is wrong' }
it 'adds it to exception info' do
sent_payload = notifier.send(:send_failsafe, "test failsafe", exception)
expected_message = 'Failsafe from rollbar-gem. StandardError: "Something is wrong": test failsafe'
expect(sent_payload['data'][:body][:message][:body]).to be_eql(expected_message)
end
end
context 'without exception object' do
it 'just sends the given message' do
sent_payload = notifier.send(:send_failsafe, "test failsafe", nil)
expected_message = 'Failsafe from rollbar-gem. test failsafe'
expect(sent_payload['data'][:body][:message][:body]).to be_eql(expected_message)
end
end
context 'if the exception has a backtrace' do
let(:backtrace) { ['func3', 'func2', 'func1'] }
let(:failsafe_reason) { 'StandardError in func3: test failsafe' }
let(:expected_body) { "Failsafe from rollbar-gem. #{failsafe_reason}" }
let(:expected_log_message) do
"[Rollbar] Sending failsafe response due to #{failsafe_reason}"
end
before { exception.set_backtrace(backtrace) }
it 'adds the nearest frame to the message' do
expect(notifier).to receive(:log_error).with(expected_log_message)
sent_payload = notifier.send(:send_failsafe, "test failsafe", exception)
expect(sent_payload['data'][:body][:message][:body]).to be_eql(expected_body)
end
end
context 'with uuid and host' do
let(:host) { 'the-host' }
let(:uuid) { 'the-uuid' }
it 'sets the uuid and host in correct keys' do
sent_payload = notifier.send(:send_failsafe, 'testing uuid and host',
exception, uuid, host)
expect(sent_payload['data'][:custom][:orig_uuid]).to be_eql('the-uuid')
expect(sent_payload['data'][:custom][:orig_host]).to be_eql('the-host')
end
end
end
context 'when reporting internal error with nil context' do
let(:context_proc) { proc {} }
let(:scoped_notifier) { notifier.scope(:context => context_proc) }
let(:exception) { Exception.new }
let(:logger_mock) { double("Rails.logger").as_null_object }
it 'reports successfully' do
configure
Rollbar.configure do |config|
config.logger = logger_mock
end
logger_mock.should_receive(:info).with('[Rollbar] Sending item').once
logger_mock.should_receive(:info).with('[Rollbar] Success').once
scoped_notifier.send(:report_internal_error, exception)
end
end
context "request_data_extractor" do
before(:each) do
class DummyClass
end
@dummy_class = DummyClass.new
@dummy_class.extend(Rollbar::RequestDataExtractor)
end
context "rollbar_headers" do
it "should not include cookies" do
env = {"HTTP_USER_AGENT" => "test", "HTTP_COOKIE" => "cookie"}
headers = @dummy_class.send(:rollbar_headers, env)
headers.should have_key "User-Agent"
headers.should_not have_key "Cookie"
end
end
end
describe '.scoped' do
let(:scope_options) do
{ :foo => 'bar' }
end
it 'changes data in scope_object inside the block' do
Rollbar.clear_notifier!
configure
current_notifier_id = Rollbar.notifier.object_id
Rollbar.scoped(scope_options) do
scope_object = Rollbar.notifier.scope_object
expect(Rollbar.notifier.object_id).not_to be_eql(current_notifier_id)
expect(scope_object).to be_eql(scope_options)
end
expect(Rollbar.notifier.object_id).to be_eql(current_notifier_id)
end
context 'if the block fails' do
let(:crashing_block) { proc { fail } }
it 'restores the old notifier' do
notifier = Rollbar.notifier
expect { Rollbar.scoped(&crashing_block) }.to raise_error
expect(notifier).to be_eql(Rollbar.notifier)
end
end
context 'if the block creates a new thread' do
let(:block) do
proc do
Thread.new do
scope = Rollbar.notifier.scope_object
Thread.main[:inner_scope] = scope
end.join
end
end
let(:scope) do
{ :foo => 'bar' }
end
it 'maintains the parent thread notifier scope' do
Rollbar.scoped(scope, &block)
expect(Thread.main[:inner_scope]).to be_eql(scope)
end
end
end
describe '.scope!' do
let(:new_scope) do
{ :person => { :id => 1 } }
end
before { reconfigure_notifier }
it 'adds the new scope to the payload options' do
scope_object = Rollbar.notifier.scope_object
Rollbar.scope!(new_scope)
expect(scope_object).to be_eql(new_scope)
end
end
describe '.clear_notifier' do
before { Rollbar.notifier }
it 'resets the notifier' do
Rollbar.clear_notifier!
expect(Rollbar.instance_variable_get('@notifier')).to be_nil
expect(Rollbar.instance_variable_get('@root_notifier')).to be_nil
end
end
describe '.process_item' do
context 'if there is an exception sending the payload' do
let(:exception) { StandardError.new('error message') }
let(:payload) { Rollbar::Item.build_with({ :foo => :bar }) }
it 'logs the error and the payload' do
allow(Rollbar.notifier).to receive(:send_item).and_raise(exception)
expect(Rollbar.notifier).to receive(:log_error)
expect { Rollbar.notifier.process_item(payload) }.to raise_error(exception)
end
end
end
describe '.process_from_async_handler' do
context 'with errors' do
let(:exception) { StandardError.new('the error') }
it 'raises anything and sends internal error' do
allow(Rollbar.notifier).to receive(:process_item).and_raise(exception)
expect(Rollbar.notifier).to receive(:report_internal_error).with(exception)
expect do
Rollbar.notifier.process_from_async_handler({})
end.to raise_error(exception)
rollbar_do_not_report = exception.instance_variable_get(:@_rollbar_do_not_report)
expect(rollbar_do_not_report).to be_eql(true)
end
end
end
describe '.preconfigure'do
before do
Rollbar.clear_notifier!
end
it 'resets the notifier' do
Rollbar.configure do |config|
config.access_token = 'foo'
end
Thread.new {}
Rollbar.preconfigure do |config|
config.root = 'bar'
end
notifier_config = Rollbar.notifier.configuration
expect(notifier_config.root).to be_eql('bar')
end
end
context 'having timeout issues (for ruby > 1.9.3)' do
before do
skip if Rollbar::LanguageSupport.ruby_18? || Rollbar::LanguageSupport.ruby_19?
end
let(:exception_class) do
Rollbar::LanguageSupport.timeout_exceptions.first
end
let(:net_exception) do
exception_class.new
end
before do
allow_any_instance_of(Net::HTTP).to receive(:request).and_raise(net_exception)
end
it 'retries the request' do
expect_any_instance_of(Net::HTTP).to receive(:request).exactly(3)
expect(Rollbar.notifier).to receive(:report_internal_error).with(net_exception)
Rollbar.info('foo')
end
end
describe '.with_config' do
let(:new_config) do
{ 'environment' => 'foo' }
end
it 'uses the new config and restores the old one' do
config1 = described_class.configuration
subject.with_config(:environment => 'bar') do
expect(described_class.configuration).not_to be(config1)
end
expect(described_class.configuration).to be(config1)
end
end
# configure with some basic params
def configure
reconfigure_notifier
end
end
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.