CombinedText stringlengths 4 3.42M |
|---|
ENV["RAILS_ENV"] ||= 'test'
require 'rubygems'
require File.expand_path("../../config/environment", __FILE__)
require 'rspec/rails'
require 'rspec-given'
require 'database_cleaner'
require 'webmock/rspec'
WebMock.enable!
# save to CircleCI's artifacts directory if we're on CircleCI
if ENV['CIRCLE_ARTIFACTS']
require 'simplecov'
dir = File.join("tmp", "coverage")
SimpleCov.coverage_dir(dir)
SimpleCov.start do
add_filter "/spec/"
end
end
allowed_hosts = [/codeclimate\.com/]
WebMock.disable_net_connect!(allow_localhost: true, allow: allowed_hosts)
load File.join(Rails.root, 'Rakefile')
Dir[Rails.root.join("spec/support/**/*.rb")].each { |f| require f }
Shoulda::Matchers.configure do |config|
config.integrate do |with|
with.test_framework :rspec
with.library :rails
end
end
RSpec.configure do |config|
config.run_all_when_everything_filtered = true
config.filter_run :focus
config.infer_spec_type_from_file_location!
config.order = 'random'
config.include FactoryGirl::Syntax::Methods
config.expect_with :rspec do |c|
c.syntax = [:expect]
end
config.before(:suite) do
DatabaseCleaner.strategy = :transaction
end
config.before :each do |example|
DatabaseCleaner.start unless example.metadata[:nodb]
end
config.after(:each) do |example|
DatabaseCleaner.clean unless example.metadata[:nodb]
end
config.before(:all) do
FactoryGirl.reload
end
# Fakes
config.before :each do
stub_request(:any, /passthepopcorn.me/).to_rack(FakePTP)
stub_request(:any, /api.themoviedb.org/).to_rack(FakeTmdb)
stub_request(:any, /trakt.tv/).to_rack(FakeTrakt)
stub_request(:any, /spoiled.mskog.com/).to_rack(FakeSpoiled)
end
end
Circle artifacts env no longer exists in v2
ENV["RAILS_ENV"] ||= 'test'
require 'rubygems'
require File.expand_path("../../config/environment", __FILE__)
require 'rspec/rails'
require 'rspec-given'
require 'database_cleaner'
require 'webmock/rspec'
WebMock.enable!
# save to CircleCI's artifacts directory if we're on CircleCI
if ENV['CIRCLECI']
require 'simplecov'
dir = File.join("tmp", "coverage")
SimpleCov.coverage_dir(dir)
SimpleCov.start do
add_filter "/spec/"
end
end
allowed_hosts = [/codeclimate\.com/]
WebMock.disable_net_connect!(allow_localhost: true, allow: allowed_hosts)
load File.join(Rails.root, 'Rakefile')
Dir[Rails.root.join("spec/support/**/*.rb")].each { |f| require f }
Shoulda::Matchers.configure do |config|
config.integrate do |with|
with.test_framework :rspec
with.library :rails
end
end
RSpec.configure do |config|
config.run_all_when_everything_filtered = true
config.filter_run :focus
config.infer_spec_type_from_file_location!
config.order = 'random'
config.include FactoryGirl::Syntax::Methods
config.expect_with :rspec do |c|
c.syntax = [:expect]
end
config.before(:suite) do
DatabaseCleaner.strategy = :transaction
end
config.before :each do |example|
DatabaseCleaner.start unless example.metadata[:nodb]
end
config.after(:each) do |example|
DatabaseCleaner.clean unless example.metadata[:nodb]
end
config.before(:all) do
FactoryGirl.reload
end
# Fakes
config.before :each do
stub_request(:any, /passthepopcorn.me/).to_rack(FakePTP)
stub_request(:any, /api.themoviedb.org/).to_rack(FakeTmdb)
stub_request(:any, /trakt.tv/).to_rack(FakeTrakt)
stub_request(:any, /spoiled.mskog.com/).to_rack(FakeSpoiled)
end
end
|
require 'rubygems'
require 'pathname'
require 'fakeweb'
require 'dm-validations'
# Support running specs with 'rake spec' and 'spec'
$LOAD_PATH.unshift('lib') unless $LOAD_PATH.include?('lib')
require 'dm-rest-adapter'
ROOT = Pathname(__FILE__).dirname.parent
DataMapper.setup(:default, 'rest://admin:secret@localhost:4000/?format=xml')
Dir[ROOT / 'spec' / 'fixtures' / '**' / '*.rb'].each { |rb| require rb }
FakeWeb.allow_net_connect = false
Remove Pathname#/ usage.
require 'rubygems'
require 'pathname'
require 'fakeweb'
require 'dm-validations'
# Support running specs with 'rake spec' and 'spec'
$LOAD_PATH.unshift('lib') unless $LOAD_PATH.include?('lib')
require 'dm-rest-adapter'
ROOT = Pathname(__FILE__).dirname.parent
DataMapper.setup(:default, 'rest://admin:secret@localhost:4000/?format=xml')
Pathname.glob((ROOT + 'spec/fixtures/**/*.rb').to_s).each { |file| require file }
FakeWeb.allow_net_connect = false
|
require 'rubygems'
require 'spork'
Spork.prefork do
# Loading more in this block will cause your tests to run faster. However,
# if you change any configuration or code from libraries loaded here, you'll
# need to restart spork for it take effect.
ENV["RAILS_ENV"] ||= 'test'
require File.expand_path("../../config/environment", __FILE__)
require 'rspec/rails'
require "paperclip/matchers"
require 'factory_girl'
require 'ffaker'
# Requires supporting ruby files with custom matchers and macros, etc,
# in spec/support/ and its subdirectories.
Dir[Rails.root.join("spec/support/**/*.rb")].each {|f| require f}
begin
Dir[('factories/*.rb')].each {|f| require f }
rescue Factory::DuplicateDefinitionError
nil
end
RSpec.configure do |config|
# == Mock Framework
#
# If you prefer to use mocha, flexmock or RR, uncomment the appropriate line:
#
# config.mock_with :mocha
# config.mock_with :flexmock
# config.mock_with :rr
config.mock_with :rspec
# Remove this line if you're not using ActiveRecord or ActiveRecord fixtures
config.fixture_path = "#{::Rails.root}/spec/fixtures"
# If you're not using ActiveRecord, or you'd prefer not to run each of your
# examples within a transaction, remove the following line or assign false
# instead of true.
config.use_transactional_fixtures = true
config.include Paperclip::Shoulda::Matchers
config.include(ControllerMacros, :type => :controller)
config.include(UserMocks)
end
end
Spork.each_run do
# This code will be run each time you run your specs.
#Load all factories and report if there is duplicate definition
end
#Enable to have coverage tool
#require 'simplecov'
#SimpleCov.start
module UserSpecHelper
def valid_user_attributes
{
:provider => 'facebook',
:uid => '111111111'
}
end
def valid_omniauth_hash
{
"provider" => 'facebook',
"uid" => '111111111',
"user_info" => {
"name" => "Slobodan Kovacevic",
"nickname" => "basti"
},
"credentials" => {
"token" => "111111111"
},
"extra" => {
"email" => "testing@test.net"
}
}
end
end
Spec's helpers
require 'rubygems'
require 'spork'
Spork.prefork do
# Loading more in this block will cause your tests to run faster. However,
# if you change any configuration or code from libraries loaded here, you'll
# need to restart spork for it take effect.
ENV["RAILS_ENV"] ||= 'test'
require File.expand_path("../../config/environment", __FILE__)
require 'rspec/rails'
require "paperclip/matchers"
require 'factory_girl'
require 'ffaker'
# Requires supporting ruby files with custom matchers and macros, etc,
# in spec/support/ and its subdirectories.
Dir[Rails.root.join("spec/support/**/*.rb")].each {|f| require f}
begin
Dir[('factories/*.rb')].each {|f| require f }
rescue Factory::DuplicateDefinitionError
nil
end
RSpec.configure do |config|
# == Mock Framework
#
# If you prefer to use mocha, flexmock or RR, uncomment the appropriate line:
#
# config.mock_with :mocha
# config.mock_with :flexmock
# config.mock_with :rr
config.mock_with :rspec
# Remove this line if you're not using ActiveRecord or ActiveRecord fixtures
config.fixture_path = "#{::Rails.root}/spec/fixtures"
# If you're not using ActiveRecord, or you'd prefer not to run each of your
# examples within a transaction, remove the following line or assign false
# instead of true.
config.use_transactional_fixtures = true
config.include Paperclip::Shoulda::Matchers
config.include(ControllerMacros, :type => :controller)
config.include(UserMocks)
end
end
Spork.each_run do
# This code will be run each time you run your specs.
#Load all factories and report if there is duplicate definition
end
#Enable to have coverage tool
#require 'simplecov'
#SimpleCov.start
module UserSpecHelper
def valid_user_attributes
{
:provider => 'facebook',
:uid => '111111111'
}
end
def valid_omniauth_hash
{
"provider" => 'facebook',
"uid" => '111111111',
"user_info" => {
"name" => "Slobodan Kovacevic",
"nickname" => "basti"
},
"credentials" => {
"token" => "111111111"
},
"extra" => {
"email" => "testing@test.net"
}
}
end
end
module Spec
module Mocks
module Methods
def stub_association!(association_name, methods_to_be_stubbed = {})
mock_association = Spec::Mocks::Mock.new(association_name.to_s)
methods_to_be_stubbed.each do |method, return_value|
mock_association.stub!(method).and_return(return_value)
end
self.stub!(association_name).and_return(mock_association)
end
end
end
end
|
ENV["RAILS_ENV"] ||= 'test'
require File.expand_path("../../config/environment", __FILE__)
require 'rspec/autorun'
require 'nokogiri'
%w{spec/support lib}.each do |dir|
Dir[Rails.root.join("#{dir}/**/*.rb")].each {|f| require f}
end
RSpec.configure do |config|
config.mock_with :rspec
config.order = "random"
config.include FactoryGirl::Syntax::Methods
end
RSpec::Matchers.define :have_same_attributes_as do |expected|
match do |actual|
if actual.class != Array
actual = [actual]
expected = [expected]
end
ignored = [:_id]
actual.each_with_index {|c, i| c.attributes.except(*ignored) == expected[i].attributes.except(*ignored) }
end
end
def xml_fixture file_name
f = File.open("#{Rails.root}/spec/fixtures/import/#{file_name}.xml")
Nokogiri::XML(f)
end
fix custom matcher
ENV["RAILS_ENV"] ||= 'test'
require File.expand_path("../../config/environment", __FILE__)
require 'rspec/autorun'
require 'nokogiri'
%w{spec/support lib}.each do |dir|
Dir[Rails.root.join("#{dir}/**/*.rb")].each {|f| require f}
end
RSpec.configure do |config|
config.mock_with :rspec
config.order = "random"
config.include FactoryGirl::Syntax::Methods
end
RSpec::Matchers.define :have_same_attributes_as do |expected|
match do |actual|
if actual.class != Array
actual = [actual]
expected = [expected]
end
raise "different size: expected: #{expected.size} actual: #{actual.size}" if expected.size != actual.size
ignored = ['_id']
actual.each_with_index {|c, i| c.attributes.except(*ignored) == expected[i].attributes.except(*ignored) }
end
end
def xml_fixture file_name
f = File.open("#{Rails.root}/spec/fixtures/import/#{file_name}.xml")
Nokogiri::XML(f)
end |
# -*- encoding : utf-8 -*-
require 'rubygems'
require 'rspec'
require File.expand_path(File.join(File.dirname(__FILE__), "..", "lib", "saulabs", "trueskill.rb"))
require File.expand_path(File.join(File.dirname(__FILE__), "true_skill_matchers.rb"))
include Saulabs
Spec::Runner.configure do |config|
config.include(TrueSkillMatchers)
end
def tolerance
0.001
end
def create_teams
[
[
TrueSkill::Rating.new(25, 4.1)
],
[
TrueSkill::Rating.new(27, 3.1),
TrueSkill::Rating.new(10, 1.0)
],
[
TrueSkill::Rating.new(32, 0.2)
]
]
end
New rspec module for spec_helper
# -*- encoding : utf-8 -*-
require 'rubygems'
require 'rspec'
require File.expand_path(File.join(File.dirname(__FILE__), "..", "lib", "saulabs", "trueskill.rb"))
require File.expand_path(File.join(File.dirname(__FILE__), "true_skill_matchers.rb"))
include Saulabs
RSpec.configure do |config|
config.include(TrueSkillMatchers)
end
def tolerance
0.001
end
def create_teams
[
[
TrueSkill::Rating.new(25, 4.1)
],
[
TrueSkill::Rating.new(27, 3.1),
TrueSkill::Rating.new(10, 1.0)
],
[
TrueSkill::Rating.new(32, 0.2)
]
]
end
|
require "bundler/setup"
require "rspec"
require "active_record"
require "active_support"
require "sqlite3"
require "merchant_sidekick"
require "ruby-debug"
RSpec.configure do |config|
# config.use_transactional_fixtures = true
# config.use_instantiated_fixtures = false
# config.fixture_path = File.dirname(__FILE__) + '/fixtures'
end
# If you want to see the ActiveRecord log, invoke the tests using `rake test LOG=true`
if ENV["LOG"]
require "logger"
ActiveRecord::Base.logger = Logger.new($stdout)
end
ActiveRecord::Base.establish_connection :adapter => "sqlite3", :database => ":memory:"
ActiveRecord::Migration.verbose = false
require "schema"
at_exit {ActiveRecord::Base.connection.disconnect!}
Money.default_currency = Money::Currency.wrap("USD")
#--- Sudo fixtures
def transaction
ActiveRecord::Base.connection.transaction do
send(:setup) if respond_to?(:setup)
yield
raise ActiveRecord::Rollback
end
end
def users(key, options = {})
values = YAML::load_file(File.expand_path("../fixtures/users.yml", __FILE__))
(values[key.to_s]["type"] || "User").constantize.create! values[key.to_s].merge(options)
end
def products(key, options = {})
values = YAML::load_file(File.expand_path("../fixtures/products.yml", __FILE__))
(values[key.to_s]["type"] || "Product").constantize.create! values[key.to_s].merge(options)
end
def addresses(key, options = {})
values = YAML::load_file(File.expand_path("../fixtures/addresses.yml", __FILE__))
(values[key.to_s]["type"] || "MerchantSidekick::Addressable::Address").constantize.create! values[key.to_s].merge(options)
end
def orders(key, options = {})
values = YAML::load_file(File.expand_path("../fixtures/orders.yml", __FILE__))
(values[key.to_s]["type"] || "MerchantSidekick::Order").constantize.create! values[key.to_s].merge(options)
end
def payments(key, options = {})
values = YAML::load_file(File.expand_path("../fixtures/payments.yml", __FILE__))
(values[key.to_s]["type"] || "MerchantSidekick::Payment").constantize.create! values[key.to_s].merge(options)
end
def line_items(key, options = {})
values = YAML::load_file(File.expand_path("../fixtures/line_items.yml", __FILE__))
(values[key.to_s]["type"] || "MerchantSidekick::LineItem").constantize.create! values[key.to_s].merge(options)
end
#--- MerchantSidekick::Addressable test models
class MerchantSidekick::Addressable::Address
# extends to_s to add name for testing purposes
def to_s_with_name
name = []
name << self.first_name
name << self.middle_name if MerchantSidekick::Addressable::Address.middle_name?
name << self.last_name
name = name.reject(&:blank?).join(" ")
[name, to_s_without_name].reject(&:blank?).join(", ")
end
alias_method_chain :to_s, :name
end
class Addressable < ActiveRecord::Base
end
class HasOneSingleAddressModel < Addressable
# acts_as_addressable :has_one => true
has_address
end
class HasManySingleAddressModel < Addressable
# acts_as_addressable :has_many => true
has_addresses
end
class HasOneMultipleAddressModel < Addressable
# acts_as_addressable :billing, :shipping, :has_one => true
has_address :billing, :shipping
end
class HasManyMultipleAddressModel < Addressable
# acts_as_addressable :billing, :shipping, :has_many => true
has_addresses :billing, :shipping
end
def valid_address_attributes(attributes = {})
{
:first_name => "George",
:last_name => "Bush",
:gender => 'm',
:street => "100 Washington St.",
:postal_code => "95065",
:city => "Santa Cruz",
:province_code => "CA",
:province => "California",
:company_name => "Exxon",
:phone => "+1 831 123-4567",
:mobile => "+1 831 223-4567",
:fax => "+1 831 323-4567",
:country_code => "US",
:country => "United States of America"
}.merge(MerchantSidekick::Addressable::Address.middle_name? ? { :middle_name => "W." } : {}).merge(attributes)
end
#--- MerchantSidekick generic test models
class Product < ActiveRecord::Base
money :price, :cents => :price_cents, :currency => :price_currency
acts_as_sellable
# TODO weird cart serialization workaround
def target; true; end
end
class ProductWithNameAndSku < Product
def name; "A beautiful name"; end
def sku; "PR1234"; end
def description; "Wonderful name!"; end
def taxable; true; end
end
class ProductWithTitleAndNumber < Product
def title; "A beautiful title"; end
def number; "PR1234"; end
def description; "Wonderful title!"; end
def new_record?; true; end
end
class ProductWithCopy < Product
def copy_name(options={}); "customized name"; end
def copy_item_number(options = {}); "customized item number"; end
def copy_description(options = {}); "customized description"; end
def copy_price(options = {}); Money.new(9999, "USD"); end
end
class User < ActiveRecord::Base
# acts_as_addressable :billing, :shipping
has_address :billing, :shipping
end
# TODO rename to just "Buyer"
class BuyingUser < User
acts_as_buyer
end
# TODO rename to just "Seller"
class SellingUser < User
acts_as_seller
end
#--- MerchantSidekick shopping cart
def valid_cart_line_item_attributes(attributes = {})
{:quantity => 5}.merge(attributes)
end
#--- ActiveMerchant related helpers
def valid_credit_card_attributes(attributes = {})
{
:number => "1", #"4242424242424242",
:first_name => "Claudio",
:last_name => "Almende",
:month => "8",
:year => "#{ Time.now.year + 1 }",
:verification_value => '123',
:type => 'visa'
}.merge(attributes)
end
def invalid_credit_card_attributes(attributes = {})
{
:first_name => "Bad",
:last_name => "Boy",
:month => "8",
:year => Time.now.year + 1,
:number => "2",
:type => "bogus"
}.merge(attributes)
end
def credit_card(options={})
ActiveMerchant::Billing::CreditCard.new(valid_credit_card_attributes(options))
end
def valid_credit_card(options={})
credit_card(valid_credit_card_attributes(options))
end
def invalid_credit_card(options={})
credit_card(invalid_credit_card_attributes(options))
end
module ActiveMerchant
module Billing
class BogusGateway < Gateway
# Transfers money to one or multiple recipients (bulk transfer).
#
# Overloaded activemerchant bogus gateways to support transfers, similar
# to Paypal Website Payments Pro functionality.
#
# E.g.
#
# @gateway.transfer 1000, "bob@example.com",
# :subject => "The money I owe you", :note => "Sorry, it's coming in late."
#
# gateway.transfer [1000, 'fred@example.com'],
# [2450, 'wilma@example.com', :note => 'You will receive an extra payment on March 24.'],
# [2000, 'barney@example.com'],
# :subject => "Salary January.", :note => "Thanks for your hard work."
def transfer(money, paypal_account, options={})
if paypal_account == 'fail@error.tst'
Response.new(false, FAILURE_MESSAGE, {:paid_amount => money.to_s, :error => FAILURE_MESSAGE },:test => true)
elsif paypal_account == 'error@error.tst'
raise Error, ERROR_MESSAGE
elsif /[\w-]+(?:\.[\w-]+)*@(?:[\w-]+\.)+[a-zA-Z]{2,7}$/i.match(paypal_account)
Response.new(true, SUCCESS_MESSAGE, {:paid_amount => money.to_s}, :test => true)
else
raise Error, ERROR_MESSAGE
end
end
end
end
end
ActiveMerchant::Billing::Base.mode = :test
MerchantSidekick::Gateways::Gateway.default_gateway = ActiveMerchant::Billing::BogusGateway.new
ActiveMerchant::Billing::CreditCard.require_verification_value = true
Removed commented acts_as_addressable definitions
require "bundler/setup"
require "rspec"
require "active_record"
require "active_support"
require "sqlite3"
require "merchant_sidekick"
require "ruby-debug"
RSpec.configure do |config|
# config.use_transactional_fixtures = true
# config.use_instantiated_fixtures = false
# config.fixture_path = File.dirname(__FILE__) + '/fixtures'
end
# If you want to see the ActiveRecord log, invoke the tests using `rake test LOG=true`
if ENV["LOG"]
require "logger"
ActiveRecord::Base.logger = Logger.new($stdout)
end
ActiveRecord::Base.establish_connection :adapter => "sqlite3", :database => ":memory:"
ActiveRecord::Migration.verbose = false
require "schema"
at_exit {ActiveRecord::Base.connection.disconnect!}
Money.default_currency = Money::Currency.wrap("USD")
#--- Sudo fixtures
def transaction
ActiveRecord::Base.connection.transaction do
send(:setup) if respond_to?(:setup)
yield
raise ActiveRecord::Rollback
end
end
def users(key, options = {})
values = YAML::load_file(File.expand_path("../fixtures/users.yml", __FILE__))
(values[key.to_s]["type"] || "User").constantize.create! values[key.to_s].merge(options)
end
def products(key, options = {})
values = YAML::load_file(File.expand_path("../fixtures/products.yml", __FILE__))
(values[key.to_s]["type"] || "Product").constantize.create! values[key.to_s].merge(options)
end
def addresses(key, options = {})
values = YAML::load_file(File.expand_path("../fixtures/addresses.yml", __FILE__))
(values[key.to_s]["type"] || "MerchantSidekick::Addressable::Address").constantize.create! values[key.to_s].merge(options)
end
def orders(key, options = {})
values = YAML::load_file(File.expand_path("../fixtures/orders.yml", __FILE__))
(values[key.to_s]["type"] || "MerchantSidekick::Order").constantize.create! values[key.to_s].merge(options)
end
def payments(key, options = {})
values = YAML::load_file(File.expand_path("../fixtures/payments.yml", __FILE__))
(values[key.to_s]["type"] || "MerchantSidekick::Payment").constantize.create! values[key.to_s].merge(options)
end
def line_items(key, options = {})
values = YAML::load_file(File.expand_path("../fixtures/line_items.yml", __FILE__))
(values[key.to_s]["type"] || "MerchantSidekick::LineItem").constantize.create! values[key.to_s].merge(options)
end
#--- MerchantSidekick::Addressable test models
class MerchantSidekick::Addressable::Address
# extends to_s to add name for testing purposes
def to_s_with_name
name = []
name << self.first_name
name << self.middle_name if MerchantSidekick::Addressable::Address.middle_name?
name << self.last_name
name = name.reject(&:blank?).join(" ")
[name, to_s_without_name].reject(&:blank?).join(", ")
end
alias_method_chain :to_s, :name
end
class Addressable < ActiveRecord::Base; end
class HasOneSingleAddressModel < Addressable
has_address
end
class HasManySingleAddressModel < Addressable
has_addresses
end
class HasOneMultipleAddressModel < Addressable
has_address :billing, :shipping
end
class HasManyMultipleAddressModel < Addressable
has_addresses :billing, :shipping
end
def valid_address_attributes(attributes = {})
{
:first_name => "George",
:last_name => "Bush",
:gender => 'm',
:street => "100 Washington St.",
:postal_code => "95065",
:city => "Santa Cruz",
:province_code => "CA",
:province => "California",
:company_name => "Exxon",
:phone => "+1 831 123-4567",
:mobile => "+1 831 223-4567",
:fax => "+1 831 323-4567",
:country_code => "US",
:country => "United States of America"
}.merge(MerchantSidekick::Addressable::Address.middle_name? ? { :middle_name => "W." } : {}).merge(attributes)
end
#--- MerchantSidekick generic test models
class Product < ActiveRecord::Base
money :price, :cents => :price_cents, :currency => :price_currency
acts_as_sellable
# TODO weird cart serialization workaround
def target; true; end
end
class ProductWithNameAndSku < Product
def name; "A beautiful name"; end
def sku; "PR1234"; end
def description; "Wonderful name!"; end
def taxable; true; end
end
class ProductWithTitleAndNumber < Product
def title; "A beautiful title"; end
def number; "PR1234"; end
def description; "Wonderful title!"; end
def new_record?; true; end
end
class ProductWithCopy < Product
def copy_name(options={}); "customized name"; end
def copy_item_number(options = {}); "customized item number"; end
def copy_description(options = {}); "customized description"; end
def copy_price(options = {}); Money.new(9999, "USD"); end
end
class User < ActiveRecord::Base
has_address :billing, :shipping
end
# TODO rename to just "Buyer"
class BuyingUser < User
acts_as_buyer
end
# TODO rename to just "Seller"
class SellingUser < User
acts_as_seller
end
#--- MerchantSidekick shopping cart
def valid_cart_line_item_attributes(attributes = {})
{:quantity => 5}.merge(attributes)
end
#--- ActiveMerchant related helpers
def valid_credit_card_attributes(attributes = {})
{
:number => "1", #"4242424242424242",
:first_name => "Claudio",
:last_name => "Almende",
:month => "8",
:year => "#{ Time.now.year + 1 }",
:verification_value => '123',
:type => 'visa'
}.merge(attributes)
end
def invalid_credit_card_attributes(attributes = {})
{
:first_name => "Bad",
:last_name => "Boy",
:month => "8",
:year => Time.now.year + 1,
:number => "2",
:type => "bogus"
}.merge(attributes)
end
def credit_card(options={})
ActiveMerchant::Billing::CreditCard.new(valid_credit_card_attributes(options))
end
def valid_credit_card(options={})
credit_card(valid_credit_card_attributes(options))
end
def invalid_credit_card(options={})
credit_card(invalid_credit_card_attributes(options))
end
module ActiveMerchant
module Billing
class BogusGateway < Gateway
# Transfers money to one or multiple recipients (bulk transfer).
#
# Overloaded activemerchant bogus gateways to support transfers, similar
# to Paypal Website Payments Pro functionality.
#
# E.g.
#
# @gateway.transfer 1000, "bob@example.com",
# :subject => "The money I owe you", :note => "Sorry, it's coming in late."
#
# gateway.transfer [1000, 'fred@example.com'],
# [2450, 'wilma@example.com', :note => 'You will receive an extra payment on March 24.'],
# [2000, 'barney@example.com'],
# :subject => "Salary January.", :note => "Thanks for your hard work."
def transfer(money, paypal_account, options={})
if paypal_account == 'fail@error.tst'
Response.new(false, FAILURE_MESSAGE, {:paid_amount => money.to_s, :error => FAILURE_MESSAGE },:test => true)
elsif paypal_account == 'error@error.tst'
raise Error, ERROR_MESSAGE
elsif /[\w-]+(?:\.[\w-]+)*@(?:[\w-]+\.)+[a-zA-Z]{2,7}$/i.match(paypal_account)
Response.new(true, SUCCESS_MESSAGE, {:paid_amount => money.to_s}, :test => true)
else
raise Error, ERROR_MESSAGE
end
end
end
end
end
ActiveMerchant::Billing::Base.mode = :test
MerchantSidekick::Gateways::Gateway.default_gateway = ActiveMerchant::Billing::BogusGateway.new
ActiveMerchant::Billing::CreditCard.require_verification_value = true
|
require "collectnik"
SEARCH_BIRDS = JSON.parse(%q<{
"nyplAPI":{
"request":{
"page":"1",
"perPage":"10",
"search_text":"birds",
"totalPages":"1199"
},
"response":{
"headers":{
"code":"200",
"message":"OK",
"status":"success"
},
"numResults":"11985",
"result":[
{
"imageID":"820261",
"itemLink":"http://digitalgallery.nypl.org/nypldigital/id?820261",
"title":" Wading Birds.",
"uuid":"510d47e0-d4fd-a3d9-e040-e00a18064a99"
},
{
"imageID":"821523",
"itemLink":"http://digitalgallery.nypl.org/nypldigital/id?821523",
"title":" Toxostoma Curvirostris.",
"uuid":"510d47e1-0424-a3d9-e040-e00a18064a99"
},
{
"imageID":"820999",
"itemLink":"http://digitalgallery.nypl.org/nypldigital/id?820999",
"title":"The Communities Of Birds.",
"uuid":"510d47e0-dac3-a3d9-e040-e00a18064a99"
},
{
"imageID":"821048",
"itemLink":"http://digitalgallery.nypl.org/nypldigital/id?821048",
"title":" Orchard Oriole.",
"uuid":"510d47e1-28aa-a3d9-e040-e00a18064a99"
},
{
"imageID":"820568",
"itemLink":"http://digitalgallery.nypl.org/nypldigital/id?820568",
"title":" Frigate, And Tropic-Birds.",
"uuid":"510d47e0-d92c-a3d9-e040-e00a18064a99"
},
{
"imageID":"820808",
"itemLink":"http://digitalgallery.nypl.org/nypldigital/id?820808",
"title":" Great Blue Heron.",
"uuid":"510d47e0-d999-a3d9-e040-e00a18064a99"
},
{
"imageID":"821616",
"itemLink":"http://digitalgallery.nypl.org/nypldigital/id?821616",
"title":" Tailor-Bird -- Orthot\u00f3mus Longicaudus.",
"uuid":"510d47e1-0404-a3d9-e040-e00a18064a99"
},
{
"imageID":"820090",
"itemLink":"http://digitalgallery.nypl.org/nypldigital/id?820090",
"title":" Various Birds On Attached Cards.]",
"uuid":"510d47e1-3f27-a3d9-e040-e00a18064a99"
},
{
"imageID":"820091",
"itemLink":"http://digitalgallery.nypl.org/nypldigital/id?820091",
"title":" Various Birds On Attached Cards.]",
"uuid":"510d47e1-3f34-a3d9-e040-e00a18064a99"
},
{
"imageID":"820129",
"itemLink":"http://digitalgallery.nypl.org/nypldigital/id?820129",
"title":" Various Kinds Of Birds.",
"uuid":"510d47e1-3f72-a3d9-e040-e00a18064a99"
}
]
}
}
}>)
ITEMS_510d47e2 = JSON.parse(%q<{
"nyplAPI": {
"request": {
"page": "0",
"perPage": "0",
"totalPages": "0",
"uuid": "510d47e2-8e15-a3d9-e040-e00a18064a99"
},
"response": {
"headers": {
"code": "200",
"message": "ok",
"status": "success"
},
"numResults": "0"
}
}
}>)
ITEMS_5fa75050 = JSON.parse(%q<{
"nyplAPI":{
"request":{
"uuid":"5fa75050-c6c7-012f-e24b-58d385a7bc34",
"perPage":"10",
"page":"1",
"totalPages":"13"
},
"response":{
"headers":{
"status":"success",
"code":"200",
"message":"ok"
},
"numResults":"125",
"capture":[
{
"uuid":"510d47e3-6c3e-a3d9-e040-e00a18064a99",
"imageID":"1582665",
"itemLink":"http://digitalgallery.nypl.org/nypldigital/id?1582665",
"title":" Fra Angelico, Windsor, 163 verso. [St. Lawrence, a woman holding a child, and a youth with clasped hands.]"
},
{
"uuid":"510d47e3-6c3c-a3d9-e040-e00a18064a99",
"imageID":"1582663",
"itemLink":"http://digitalgallery.nypl.org/nypldigital/id?1582663",
"title":" Fra Angelico, British museum, 162. [The youthful David with a crown on his curly head, sitting on a settle, singing and playing on the psaltery.]"
},
{
"uuid":"510d47e3-6c3a-a3d9-e040-e00a18064a99",
"imageID":"1582661",
"itemLink":"http://digitalgallery.nypl.org/nypldigital/id?1582661",
"title":" Title page.]"
},
{
"uuid":"510d47e3-6c3f-a3d9-e040-e00a18064a99",
"imageID":"1582666",
"itemLink":"http://digitalgallery.nypl.org/nypldigital/id?1582666",
"title":" Domenico di Michelino, Louvre, 1752. [Group of saints.]"
},
{
"uuid":"510d47e3-6c3b-a3d9-e040-e00a18064a99",
"imageID":"1582662",
"itemLink":"http://digitalgallery.nypl.org/nypldigital/id?1582662",
"title":" Agnolo Gaddi, Louvre, 758. [The blessed virgin ascending steps of a temple.]"
},
{
"uuid":"510d47e3-6c41-a3d9-e040-e00a18064a99",
"imageID":"1582668",
"itemLink":"http://digitalgallery.nypl.org/nypldigital/id?1582668",
"title":" Benozzo, British museum, 540. [The Madonna appearing to a girl saint, who sits up in her bed.]"
},
{
"uuid":"510d47e3-6c3d-a3d9-e040-e00a18064a99",
"imageID":"1582664",
"itemLink":"http://digitalgallery.nypl.org/nypldigital/id?1582664",
"title":" Fra Angelico, Windsor, 163. [Bust of St. Lawrence.]"
},
{
"uuid":"510d47e3-6c40-a3d9-e040-e00a18064a99",
"imageID":"1582667",
"itemLink":"http://digitalgallery.nypl.org/nypldigital/id?1582667",
"title":" Benozzo, Dresden, 532. [The full-length figure of St. Michael holding the hilt of a sword in his right hand, and a globe in his left..]"
},
{
"uuid":"510d47e3-6c42-a3d9-e040-e00a18064a99",
"imageID":"1582669",
"itemLink":"http://digitalgallery.nypl.org/nypldigital/id?1582669",
"title":" Benozzo, Uffizi, 537. [A cardinal followed by an acolyte.]"
},
{
"uuid":"510d47e3-6c43-a3d9-e040-e00a18064a99",
"imageID":"1582670",
"itemLink":"http://digitalgallery.nypl.org/nypldigital/id?1582670",
"title":" Benozzo, British museum, 542. [A bare-headed young man, in a cloak, pointing with his right hand as he walks.]"
}
]
}
}
}>)
ITEM_1582665 = JSON.parse(%q<{
"uuid":"510d47e3-6c3e-a3d9-e040-e00a18064a99",
"imageID":"1582665",
"itemLink":"http://digitalgallery.nypl.org/nypldigital/id?1582665",
"title":"Fra Angelico, Windsor, 163 verso. [St. Lawrence, a woman holding a child, and a youth with clasped hands.]"
}>)
MODS_1582665 = JSON.parse(%q<{
"nyplAPI":{
"request":{
"uuid":{
"$":"510d47e3-6c3e-a3d9-e040-e00a18064a99"
}
},
"response":{
"headers":{
"status":{
"$":"success"
},
"code":{
"$":"200"
},
"message":{
"$":"OK"
}
},
"mods":{
"version":"3.4",
"schemaLocation":"http://www.loc.gov/mods/v3 http://www.loc.gov/standards/mods/v3/mods-3-4.xsd",
"titleInfo":[
{
"lang":"eng",
"supplied":"no",
"usage":"primary",
"title":{
"$":"Drawings of Florentine painters, classified, criticised, and studied as documents in the history and appreciation of Tucson [sic] Art. With a copious Catalogue raissonn\u00e9."
}
},
{
"supplied":"no",
"usage":"primary",
"title":{
"$":"Fra Angelico, Windsor, 163 verso. [St. Lawrence, a woman holding a child, and a youth with clasped hands.]"
}
}
],
"typeOfResource":{
"$":"still image"
},
"originInfo":{
"dateIssued":{
"encoding":"w3cdtf",
"keyDate":"yes",
"$":"1903"
},
"place":{
"placeTerm":{
"$":"New York"
}
},
"publisher":{
"$":"E.P. Dutton and Co."
}
},
"identifier":[
{
"displayLabel":"RLIN/OCLC",
"type":"local_other",
"$":"40687640"
},
{
"displayLabel":"CATNYP ID (legacy)",
"type":"local_catnyp",
"$":"b5572501"
},
{
"displayLabel":"NYPL catalog ID (B-number)",
"type":"local_bnumber",
"$":"b13987862"
},
{
"displayLabel":"Hades struc ID (legacy)",
"type":"local_hades",
"$":"1067384"
}
],
"location":[
{
"physicalLocation":[
{
"authority":"marcorg",
"type":"repository",
"$":"nn"
},
{
"type":"division",
"$":"Wallach Division: Art & Architecture Collection"
}
],
"shelfLocator":{
"$":"MBH++ (Berenson, B. Drawings of Floretine painters, classified, criticised, and studied)"
}
},
{
"physicalLocation":{
"type":"division",
"$":"Wallach Division: Art & Architecture Collection"
}
}
],
"relatedItem":{
"type":"host",
"titleInfo":{
"title":{
"$":"Drawings of Florentine painters, classified, criticised, and studied as documents in the history and appreciation of Tucson [sic] Art. With a copious Catalogue raissonn\u00e9."
}
},
"identifier":{
"type":"uuid",
"$":"5fa75050-c6c7-012f-e24b-58d385a7bc34"
}
}
}
}
}
}>)
MODS_510d47e2 = JSON.parse(%q<{
"nyplAPI":{
"request":{
"uuid":{
"$":"510d47e2-8e15-a3d9-e040-e00a18064a99"
}
},
"response":{
"headers":{
"code":{
"$":"200"
},
"message":{
"$":"OK"
},
"status":{
"$":"success"
}
},
"mods":{
"genre":{
"$":"Pochoir prints"
},
"identifier":[
{
"$":"1617433",
"displayLabel":"Dynix",
"type":"local_other"
},
{
"$":"b17540797",
"displayLabel":"NYPL catalog ID (B-number)",
"type":"local_bnumber"
},
{
"$":"1017240",
"displayLabel":"Hades struc ID (legacy)",
"type":"local_hades"
},
{
"$":"33333201354160",
"displayLabel":"Barcode",
"type":"local_barcode"
}
],
"location":[
{
"physicalLocation":[
{
"$":"nn",
"authority":"marcorg",
"type":"repository"
},
{
"$":"Art and Picture Collection",
"type":"division"
}
]
},
{
"shelfLocator":{
"$":"PC NEW YC-Skyl"
}
},
{
"physicalLocation":{
"$":"Art and Picture Collection",
"type":"division"
}
}
],
"name":{
"affiliation":{
},
"authority":"",
"namePart":{
"$":"Franz Huld"
},
"role":{
"roleTerm":[
{
"$":"pbl",
"authority":"marcrelator",
"type":"code",
"valueURI":"http://id.loc.gov/vocabulary/relators/pbl"
},
{
"$":"Publisher",
"authority":"marcrelator",
"type":"text",
"valueURI":"http://id.loc.gov/vocabulary/relators/pbl"
}
]
},
"type":"corporate",
"valueURI":""
},
"originInfo":{
"place":{
"placeTerm":{
"$":"New York"
}
}
},
"relatedItem":{
"identifier":{
"$":"e6b85000-c55f-012f-27fc-58d385a7bc34",
"type":"uuid"
},
"relatedItem":{
"identifier":{
"$":"79d4a650-c52e-012f-67ad-58d385a7bc34",
"type":"uuid"
},
"titleInfo":{
"title":{
"$":"Mid-Manhattan Picture Collection"
}
},
"type":"host"
},
"titleInfo":{
"title":{
"$":"New York City -- skyline"
}
},
"type":"host"
},
"schemaLocation":"http://www.loc.gov/mods/v3 http://www.loc.gov/standards/mods/v3/mods-3-4.xsd",
"subject":[
{
"topic":{
"$":"Sunrises & sunsets -- New York (State) -- New York",
"authority":"lctgm"
}
},
{
"topic":{
"$":"Skyscrapers -- New York (State) -- New York -- 1900-1909",
"authority":"lctgm"
}
},
{
"cartographics":{
},
"geographic":{
"$":"New York (N.Y.) -- 1900-1909",
"authority":"naf"
}
}
],
"titleInfo":[
{
"lang":"eng",
"supplied":"no",
"title":{
"$":"Mid-Manhattan Picture Collection"
},
"usage":"primary"
},
{
"supplied":"no",
"title":{
"$":"New York City -- Skyline"
},
"usage":"primary"
},
{
"lang":"eng",
"supplied":"no",
"title":{
"$":"High buildings in New York at night."
},
"usage":"primary"
}
],
"typeOfResource":{
"$":"still image"
},
"version":"3.4"
}
}
}
}>)
Add second page of results for 'birds' search
require "collectnik"
SEARCH_BIRDS = JSON.parse(%q<{
"nyplAPI":{
"request":{
"page":"1",
"perPage":"10",
"search_text":"birds",
"totalPages":"1199"
},
"response":{
"headers":{
"code":"200",
"message":"OK",
"status":"success"
},
"numResults":"11985",
"result":[
{
"imageID":"820261",
"itemLink":"http://digitalgallery.nypl.org/nypldigital/id?820261",
"title":" Wading Birds.",
"uuid":"510d47e0-d4fd-a3d9-e040-e00a18064a99"
},
{
"imageID":"821523",
"itemLink":"http://digitalgallery.nypl.org/nypldigital/id?821523",
"title":" Toxostoma Curvirostris.",
"uuid":"510d47e1-0424-a3d9-e040-e00a18064a99"
},
{
"imageID":"820999",
"itemLink":"http://digitalgallery.nypl.org/nypldigital/id?820999",
"title":"The Communities Of Birds.",
"uuid":"510d47e0-dac3-a3d9-e040-e00a18064a99"
},
{
"imageID":"821048",
"itemLink":"http://digitalgallery.nypl.org/nypldigital/id?821048",
"title":" Orchard Oriole.",
"uuid":"510d47e1-28aa-a3d9-e040-e00a18064a99"
},
{
"imageID":"820568",
"itemLink":"http://digitalgallery.nypl.org/nypldigital/id?820568",
"title":" Frigate, And Tropic-Birds.",
"uuid":"510d47e0-d92c-a3d9-e040-e00a18064a99"
},
{
"imageID":"820808",
"itemLink":"http://digitalgallery.nypl.org/nypldigital/id?820808",
"title":" Great Blue Heron.",
"uuid":"510d47e0-d999-a3d9-e040-e00a18064a99"
},
{
"imageID":"821616",
"itemLink":"http://digitalgallery.nypl.org/nypldigital/id?821616",
"title":" Tailor-Bird -- Orthot\u00f3mus Longicaudus.",
"uuid":"510d47e1-0404-a3d9-e040-e00a18064a99"
},
{
"imageID":"820090",
"itemLink":"http://digitalgallery.nypl.org/nypldigital/id?820090",
"title":" Various Birds On Attached Cards.]",
"uuid":"510d47e1-3f27-a3d9-e040-e00a18064a99"
},
{
"imageID":"820091",
"itemLink":"http://digitalgallery.nypl.org/nypldigital/id?820091",
"title":" Various Birds On Attached Cards.]",
"uuid":"510d47e1-3f34-a3d9-e040-e00a18064a99"
},
{
"imageID":"820129",
"itemLink":"http://digitalgallery.nypl.org/nypldigital/id?820129",
"title":" Various Kinds Of Birds.",
"uuid":"510d47e1-3f72-a3d9-e040-e00a18064a99"
}
]
}
}
}>)
SEARCH_BIRDS2 = JSON.parse(%q<{
"nyplAPI":{
"request":{
"page":"2",
"perPage":"10",
"search_text":"birds",
"totalPages":"1199"
},
"response":{
"headers":{
"code":"200",
"message":"OK",
"status":"success"
},
"numResults":"11985",
"result":[
{
"imageID":"1140323",
"itemLink":"http://digitalgallery.nypl.org/nypldigital/id?1140323",
"title":" Blackcap.",
"typeOfResource":"still image",
"uuid":"510d47da-c080-a3d9-e040-e00a18064a99"
},
{
"imageID":"1133231",
"itemLink":"http://digitalgallery.nypl.org/nypldigital/id?1133231",
"title":" Bearded tit.",
"typeOfResource":"still image",
"uuid":"510d47da-c083-a3d9-e040-e00a18064a99"
},
{
"imageID":"1133204",
"itemLink":"http://digitalgallery.nypl.org/nypldigital/id?1133204",
"title":"The ptarmign.",
"typeOfResource":"still image",
"uuid":"510d47da-c04d-a3d9-e040-e00a18064a99"
},
{
"imageID":"1133213",
"itemLink":"http://digitalgallery.nypl.org/nypldigital/id?1133213",
"title":" Jackdaw.",
"typeOfResource":"still image",
"uuid":"510d47da-c05f-a3d9-e040-e00a18064a99"
},
{
"imageID":"1133222",
"itemLink":"http://digitalgallery.nypl.org/nypldigital/id?1133222",
"title":" Common tern.",
"typeOfResource":"still image",
"uuid":"510d47da-c071-a3d9-e040-e00a18064a99"
},
{
"imageID":"1140322",
"itemLink":"http://digitalgallery.nypl.org/nypldigital/id?1140322",
"title":" Blue titmouse.",
"typeOfResource":"still image",
"uuid":"510d47da-c07e-a3d9-e040-e00a18064a99"
},
{
"imageID":"1140304",
"itemLink":"http://digitalgallery.nypl.org/nypldigital/id?1140304",
"title":" Linnet.",
"typeOfResource":"still image",
"uuid":"510d47da-c05a-a3d9-e040-e00a18064a99"
},
{
"imageID":"1140313",
"itemLink":"http://digitalgallery.nypl.org/nypldigital/id?1140313",
"title":" Golden eagle.",
"typeOfResource":"still image",
"uuid":"510d47da-c06c-a3d9-e040-e00a18064a99"
},
{
"imageID":"1133230",
"itemLink":"http://digitalgallery.nypl.org/nypldigital/id?1133230",
"title":" Blackbird.",
"typeOfResource":"still image",
"uuid":"510d47da-c081-a3d9-e040-e00a18064a99"
},
{
"imageID":"1133203",
"itemLink":"http://digitalgallery.nypl.org/nypldigital/id?1133203",
"title":" Mouse sparrow.",
"typeOfResource":"still image",
"uuid":"510d47da-c04b-a3d9-e040-e00a18064a99"
}
]
}
}
}>)
ITEMS_510d47e2 = JSON.parse(%q<{
"nyplAPI": {
"request": {
"page": "0",
"perPage": "0",
"totalPages": "0",
"uuid": "510d47e2-8e15-a3d9-e040-e00a18064a99"
},
"response": {
"headers": {
"code": "200",
"message": "ok",
"status": "success"
},
"numResults": "0"
}
}
}>)
ITEMS_5fa75050 = JSON.parse(%q<{
"nyplAPI":{
"request":{
"uuid":"5fa75050-c6c7-012f-e24b-58d385a7bc34",
"perPage":"10",
"page":"1",
"totalPages":"13"
},
"response":{
"headers":{
"status":"success",
"code":"200",
"message":"ok"
},
"numResults":"125",
"capture":[
{
"uuid":"510d47e3-6c3e-a3d9-e040-e00a18064a99",
"imageID":"1582665",
"itemLink":"http://digitalgallery.nypl.org/nypldigital/id?1582665",
"title":" Fra Angelico, Windsor, 163 verso. [St. Lawrence, a woman holding a child, and a youth with clasped hands.]"
},
{
"uuid":"510d47e3-6c3c-a3d9-e040-e00a18064a99",
"imageID":"1582663",
"itemLink":"http://digitalgallery.nypl.org/nypldigital/id?1582663",
"title":" Fra Angelico, British museum, 162. [The youthful David with a crown on his curly head, sitting on a settle, singing and playing on the psaltery.]"
},
{
"uuid":"510d47e3-6c3a-a3d9-e040-e00a18064a99",
"imageID":"1582661",
"itemLink":"http://digitalgallery.nypl.org/nypldigital/id?1582661",
"title":" Title page.]"
},
{
"uuid":"510d47e3-6c3f-a3d9-e040-e00a18064a99",
"imageID":"1582666",
"itemLink":"http://digitalgallery.nypl.org/nypldigital/id?1582666",
"title":" Domenico di Michelino, Louvre, 1752. [Group of saints.]"
},
{
"uuid":"510d47e3-6c3b-a3d9-e040-e00a18064a99",
"imageID":"1582662",
"itemLink":"http://digitalgallery.nypl.org/nypldigital/id?1582662",
"title":" Agnolo Gaddi, Louvre, 758. [The blessed virgin ascending steps of a temple.]"
},
{
"uuid":"510d47e3-6c41-a3d9-e040-e00a18064a99",
"imageID":"1582668",
"itemLink":"http://digitalgallery.nypl.org/nypldigital/id?1582668",
"title":" Benozzo, British museum, 540. [The Madonna appearing to a girl saint, who sits up in her bed.]"
},
{
"uuid":"510d47e3-6c3d-a3d9-e040-e00a18064a99",
"imageID":"1582664",
"itemLink":"http://digitalgallery.nypl.org/nypldigital/id?1582664",
"title":" Fra Angelico, Windsor, 163. [Bust of St. Lawrence.]"
},
{
"uuid":"510d47e3-6c40-a3d9-e040-e00a18064a99",
"imageID":"1582667",
"itemLink":"http://digitalgallery.nypl.org/nypldigital/id?1582667",
"title":" Benozzo, Dresden, 532. [The full-length figure of St. Michael holding the hilt of a sword in his right hand, and a globe in his left..]"
},
{
"uuid":"510d47e3-6c42-a3d9-e040-e00a18064a99",
"imageID":"1582669",
"itemLink":"http://digitalgallery.nypl.org/nypldigital/id?1582669",
"title":" Benozzo, Uffizi, 537. [A cardinal followed by an acolyte.]"
},
{
"uuid":"510d47e3-6c43-a3d9-e040-e00a18064a99",
"imageID":"1582670",
"itemLink":"http://digitalgallery.nypl.org/nypldigital/id?1582670",
"title":" Benozzo, British museum, 542. [A bare-headed young man, in a cloak, pointing with his right hand as he walks.]"
}
]
}
}
}>)
ITEM_1582665 = JSON.parse(%q<{
"uuid":"510d47e3-6c3e-a3d9-e040-e00a18064a99",
"imageID":"1582665",
"itemLink":"http://digitalgallery.nypl.org/nypldigital/id?1582665",
"title":"Fra Angelico, Windsor, 163 verso. [St. Lawrence, a woman holding a child, and a youth with clasped hands.]"
}>)
MODS_1582665 = JSON.parse(%q<{
"nyplAPI":{
"request":{
"uuid":{
"$":"510d47e3-6c3e-a3d9-e040-e00a18064a99"
}
},
"response":{
"headers":{
"status":{
"$":"success"
},
"code":{
"$":"200"
},
"message":{
"$":"OK"
}
},
"mods":{
"version":"3.4",
"schemaLocation":"http://www.loc.gov/mods/v3 http://www.loc.gov/standards/mods/v3/mods-3-4.xsd",
"titleInfo":[
{
"lang":"eng",
"supplied":"no",
"usage":"primary",
"title":{
"$":"Drawings of Florentine painters, classified, criticised, and studied as documents in the history and appreciation of Tucson [sic] Art. With a copious Catalogue raissonn\u00e9."
}
},
{
"supplied":"no",
"usage":"primary",
"title":{
"$":"Fra Angelico, Windsor, 163 verso. [St. Lawrence, a woman holding a child, and a youth with clasped hands.]"
}
}
],
"typeOfResource":{
"$":"still image"
},
"originInfo":{
"dateIssued":{
"encoding":"w3cdtf",
"keyDate":"yes",
"$":"1903"
},
"place":{
"placeTerm":{
"$":"New York"
}
},
"publisher":{
"$":"E.P. Dutton and Co."
}
},
"identifier":[
{
"displayLabel":"RLIN/OCLC",
"type":"local_other",
"$":"40687640"
},
{
"displayLabel":"CATNYP ID (legacy)",
"type":"local_catnyp",
"$":"b5572501"
},
{
"displayLabel":"NYPL catalog ID (B-number)",
"type":"local_bnumber",
"$":"b13987862"
},
{
"displayLabel":"Hades struc ID (legacy)",
"type":"local_hades",
"$":"1067384"
}
],
"location":[
{
"physicalLocation":[
{
"authority":"marcorg",
"type":"repository",
"$":"nn"
},
{
"type":"division",
"$":"Wallach Division: Art & Architecture Collection"
}
],
"shelfLocator":{
"$":"MBH++ (Berenson, B. Drawings of Floretine painters, classified, criticised, and studied)"
}
},
{
"physicalLocation":{
"type":"division",
"$":"Wallach Division: Art & Architecture Collection"
}
}
],
"relatedItem":{
"type":"host",
"titleInfo":{
"title":{
"$":"Drawings of Florentine painters, classified, criticised, and studied as documents in the history and appreciation of Tucson [sic] Art. With a copious Catalogue raissonn\u00e9."
}
},
"identifier":{
"type":"uuid",
"$":"5fa75050-c6c7-012f-e24b-58d385a7bc34"
}
}
}
}
}
}>)
MODS_510d47e2 = JSON.parse(%q<{
"nyplAPI":{
"request":{
"uuid":{
"$":"510d47e2-8e15-a3d9-e040-e00a18064a99"
}
},
"response":{
"headers":{
"code":{
"$":"200"
},
"message":{
"$":"OK"
},
"status":{
"$":"success"
}
},
"mods":{
"genre":{
"$":"Pochoir prints"
},
"identifier":[
{
"$":"1617433",
"displayLabel":"Dynix",
"type":"local_other"
},
{
"$":"b17540797",
"displayLabel":"NYPL catalog ID (B-number)",
"type":"local_bnumber"
},
{
"$":"1017240",
"displayLabel":"Hades struc ID (legacy)",
"type":"local_hades"
},
{
"$":"33333201354160",
"displayLabel":"Barcode",
"type":"local_barcode"
}
],
"location":[
{
"physicalLocation":[
{
"$":"nn",
"authority":"marcorg",
"type":"repository"
},
{
"$":"Art and Picture Collection",
"type":"division"
}
]
},
{
"shelfLocator":{
"$":"PC NEW YC-Skyl"
}
},
{
"physicalLocation":{
"$":"Art and Picture Collection",
"type":"division"
}
}
],
"name":{
"affiliation":{
},
"authority":"",
"namePart":{
"$":"Franz Huld"
},
"role":{
"roleTerm":[
{
"$":"pbl",
"authority":"marcrelator",
"type":"code",
"valueURI":"http://id.loc.gov/vocabulary/relators/pbl"
},
{
"$":"Publisher",
"authority":"marcrelator",
"type":"text",
"valueURI":"http://id.loc.gov/vocabulary/relators/pbl"
}
]
},
"type":"corporate",
"valueURI":""
},
"originInfo":{
"place":{
"placeTerm":{
"$":"New York"
}
}
},
"relatedItem":{
"identifier":{
"$":"e6b85000-c55f-012f-27fc-58d385a7bc34",
"type":"uuid"
},
"relatedItem":{
"identifier":{
"$":"79d4a650-c52e-012f-67ad-58d385a7bc34",
"type":"uuid"
},
"titleInfo":{
"title":{
"$":"Mid-Manhattan Picture Collection"
}
},
"type":"host"
},
"titleInfo":{
"title":{
"$":"New York City -- skyline"
}
},
"type":"host"
},
"schemaLocation":"http://www.loc.gov/mods/v3 http://www.loc.gov/standards/mods/v3/mods-3-4.xsd",
"subject":[
{
"topic":{
"$":"Sunrises & sunsets -- New York (State) -- New York",
"authority":"lctgm"
}
},
{
"topic":{
"$":"Skyscrapers -- New York (State) -- New York -- 1900-1909",
"authority":"lctgm"
}
},
{
"cartographics":{
},
"geographic":{
"$":"New York (N.Y.) -- 1900-1909",
"authority":"naf"
}
}
],
"titleInfo":[
{
"lang":"eng",
"supplied":"no",
"title":{
"$":"Mid-Manhattan Picture Collection"
},
"usage":"primary"
},
{
"supplied":"no",
"title":{
"$":"New York City -- Skyline"
},
"usage":"primary"
},
{
"lang":"eng",
"supplied":"no",
"title":{
"$":"High buildings in New York at night."
},
"usage":"primary"
}
],
"typeOfResource":{
"$":"still image"
},
"version":"3.4"
}
}
}
}>)
|
require 'simplecov'
require 'coveralls'
SimpleCov.formatter = SimpleCov::Formatter::MultiFormatter[
SimpleCov::Formatter::HTMLFormatter,
Coveralls::SimpleCov::Formatter
]
SimpleCov.start
require 'squib'
RSpec.configure do |config|
config.mock_with :rspec do |mocks|
mocks.verify_partial_doubles = true
end
end
def layout_file(str)
"#{File.expand_path(File.dirname(__FILE__))}/data/layouts/#{str}"
end
def sample_file(file)
"#{File.expand_path(File.dirname(__FILE__))}/../samples/#{file}"
end
def sample_regression_file(file)
"#{File.expand_path(File.dirname(__FILE__))}/data/samples/#{file}.txt"
end
def csv_file(file)
"#{File.expand_path(File.dirname(__FILE__))}/data/csv/#{file}"
end
def overwrite_sample(sample_name, log)
# Use this to overwrite the regression with current state
File.open(sample_regression_file(sample_name), 'w+:UTF-8') do |f|
f.write(log.string)
end
end
def scrub_hex(str)
str.gsub(/0x\w{1,8}/,'')
.gsub(/ptr=\w{1,8}/,'')
.gsub(/#<Pango::FontDescription:.*>/,'')
.gsub(/#<Cairo::ImageSurface:.*>/,'ImageSurface')
.gsub(/#<Cairo::LinearPattern:.*>/,'LinearPattern')
.gsub(/#<Cairo::Matrix:.*>/,'Matrix')
.gsub(/#<RSVG::Handle.*>/,'RSVG::Handle')
.gsub(/#<RSpec::Mocks::Double:.*>/,'MockDouble')
.gsub(/RGB:\w{1,8}/,'RGB:')
end
# Build a mock cairo instance that allows basically any method
# and logs that call to the string buffer
def mock_cairo(strio)
cxt = double(Cairo::Context)
surface = double(Cairo::ImageSurface)
pango = double(Pango::Layout)
allow(Squib.logger).to receive(:warn) {}
allow(ProgressBar).to receive(:create).and_return(Squib::DoNothing.new)
allow(Cairo::ImageSurface).to receive(:new).and_return(surface)
allow(surface).to receive(:width).and_return(100)
allow(surface).to receive(:height).and_return(101)
allow(Cairo::Context).to receive(:new).and_return(cxt)
allow(cxt).to receive(:create_pango_layout).and_return(pango)
allow(cxt).to receive(:target).and_return(surface)
allow(pango).to receive(:height).and_return(25)
allow(pango).to receive(:width).and_return(25)
allow(pango).to receive(:extents).and_return([Pango::Rectangle.new(0,0,0,0)]*2)
%w(save set_source_color paint restore translate rotate move_to
update_pango_layout width height show_pango_layout rounded_rectangle
set_line_width stroke fill set_source scale render_rsvg_handle circle
triangle line_to operator= show_page clip transform mask).each do |m|
allow(cxt).to receive(m) { |*args| strio << scrub_hex("cairo: #{m}(#{args})\n") }
end
%w(font_description= text= width= height= wrap= ellipsize= alignment=
justify= spacing= markup=).each do |m|
allow(pango).to receive(m) {|*args| strio << scrub_hex("pango: #{m}(#{args})\n") }
end
%w(write_to_png).each do |m|
allow(surface).to receive(m) { |*args| strio << scrub_hex("surface: #{m}(#{args})\n") }
end
end
# Refine Squib to allow setting the logger and progress bar
module Squib
def logger=(l)
@logger = l
end
module_function 'logger='
class Deck
attr_accessor :progress_bar
end
end
def mock_squib_logger(old_logger)
old_logger = Squib.logger
Squib.logger = instance_double(Logger)
yield
Squib.logger = old_logger
end
def output_dir
File.expand_path('../samples/_output', File.dirname(__FILE__))
end
Scrubbing hex for regression testing
require 'simplecov'
require 'coveralls'
SimpleCov.formatter = SimpleCov::Formatter::MultiFormatter[
SimpleCov::Formatter::HTMLFormatter,
Coveralls::SimpleCov::Formatter
]
SimpleCov.start
require 'squib'
RSpec.configure do |config|
config.mock_with :rspec do |mocks|
mocks.verify_partial_doubles = true
end
end
def layout_file(str)
"#{File.expand_path(File.dirname(__FILE__))}/data/layouts/#{str}"
end
def sample_file(file)
"#{File.expand_path(File.dirname(__FILE__))}/../samples/#{file}"
end
def sample_regression_file(file)
"#{File.expand_path(File.dirname(__FILE__))}/data/samples/#{file}.txt"
end
def csv_file(file)
"#{File.expand_path(File.dirname(__FILE__))}/data/csv/#{file}"
end
def overwrite_sample(sample_name, log)
# Use this to overwrite the regression with current state
File.open(sample_regression_file(sample_name), 'w+:UTF-8') do |f|
f.write(log.string)
end
end
def scrub_hex(str)
str.gsub(/0x\w{1,8}/,'')
.gsub(/ptr=\w{1,8}/,'')
.gsub(/#<Pango::FontDescription:.*>/,'')
.gsub(/#<Cairo::ImageSurface:.*>/,'ImageSurface')
.gsub(/#<Cairo::LinearPattern:.*>/,'LinearPattern')
.gsub(/#<Cairo::RadialPattern:.*>/,'RadialPattern')
.gsub(/#<Cairo::Matrix:.*>/,'Matrix')
.gsub(/#<RSVG::Handle.*>/,'RSVG::Handle')
.gsub(/#<RSpec::Mocks::Double:.*>/,'MockDouble')
.gsub(/RGB:\w{1,8}/,'RGB:')
end
# Build a mock cairo instance that allows basically any method
# and logs that call to the string buffer
def mock_cairo(strio)
cxt = double(Cairo::Context)
surface = double(Cairo::ImageSurface)
pango = double(Pango::Layout)
allow(Squib.logger).to receive(:warn) {}
allow(ProgressBar).to receive(:create).and_return(Squib::DoNothing.new)
allow(Cairo::ImageSurface).to receive(:new).and_return(surface)
allow(surface).to receive(:width).and_return(100)
allow(surface).to receive(:height).and_return(101)
allow(Cairo::Context).to receive(:new).and_return(cxt)
allow(cxt).to receive(:create_pango_layout).and_return(pango)
allow(cxt).to receive(:target).and_return(surface)
allow(pango).to receive(:height).and_return(25)
allow(pango).to receive(:width).and_return(25)
allow(pango).to receive(:extents).and_return([Pango::Rectangle.new(0,0,0,0)]*2)
%w(save set_source_color paint restore translate rotate move_to
update_pango_layout width height show_pango_layout rounded_rectangle
set_line_width stroke fill set_source scale render_rsvg_handle circle
triangle line_to operator= show_page clip transform mask).each do |m|
allow(cxt).to receive(m) { |*args| strio << scrub_hex("cairo: #{m}(#{args})\n") }
end
%w(font_description= text= width= height= wrap= ellipsize= alignment=
justify= spacing= markup=).each do |m|
allow(pango).to receive(m) {|*args| strio << scrub_hex("pango: #{m}(#{args})\n") }
end
%w(write_to_png).each do |m|
allow(surface).to receive(m) { |*args| strio << scrub_hex("surface: #{m}(#{args})\n") }
end
end
# Refine Squib to allow setting the logger and progress bar
module Squib
def logger=(l)
@logger = l
end
module_function 'logger='
class Deck
attr_accessor :progress_bar
end
end
def mock_squib_logger(old_logger)
old_logger = Squib.logger
Squib.logger = instance_double(Logger)
yield
Squib.logger = old_logger
end
def output_dir
File.expand_path('../samples/_output', File.dirname(__FILE__))
end
|
require 'machinist/active_record'
require 'sham'
require 'faker'
require 'ransack'
require 'pry'
I18n.enforce_available_locales = false
Time.zone = 'Eastern Time (US & Canada)'
I18n.load_path += Dir[File.join(File.dirname(__FILE__), 'support', '*.yml')]
Dir[File.expand_path('../{helpers,support,blueprints}/*.rb', __FILE__)]
.each { |f| require f }
Sham.define do
name { Faker::Name.name }
title { Faker::Lorem.sentence }
body { Faker::Lorem.paragraph }
salary { |index| 30000 + (index * 1000) }
tag_name { Faker::Lorem.words(3).join(' ') }
note { Faker::Lorem.words(7).join(' ') }
only_admin { Faker::Lorem.words(3).join(' ') }
only_search { Faker::Lorem.words(3).join(' ') }
only_sort { Faker::Lorem.words(3).join(' ') }
notable_id { |id| id }
end
RSpec.configure do |config|
config.alias_it_should_behave_like_to :it_has_behavior, 'has behavior'
config.before(:suite) do
puts '=' * 80
connection_name = ActiveRecord::Base.connection.adapter_name
puts "Running specs against #{connection_name}, ActiveRecord #{
ActiveRecord::VERSION::STRING} and ARel #{Arel::VERSION}..."
puts '=' * 80
Schema.create
end
config.before(:all) { Sham.reset(:before_all) }
config.before(:each) { Sham.reset(:before_each) }
config.include RansackHelper
end
RSpec::Matchers.define :be_like do |expected|
match do |actual|
actual.gsub(/^\s+|\s+$/, '').gsub(/\s+/, ' ').strip ==
expected.gsub(/^\s+|\s+$/, '').gsub(/\s+/, ' ').strip
end
end
RSpec::Matchers.define :have_attribute_method do |expected|
match do |actual|
actual.attribute_method?(expected)
end
end
Improve message when running tests
require 'machinist/active_record'
require 'sham'
require 'faker'
require 'ransack'
require 'pry'
I18n.enforce_available_locales = false
Time.zone = 'Eastern Time (US & Canada)'
I18n.load_path += Dir[File.join(File.dirname(__FILE__), 'support', '*.yml')]
Dir[File.expand_path('../{helpers,support,blueprints}/*.rb', __FILE__)]
.each { |f| require f }
Sham.define do
name { Faker::Name.name }
title { Faker::Lorem.sentence }
body { Faker::Lorem.paragraph }
salary { |index| 30000 + (index * 1000) }
tag_name { Faker::Lorem.words(3).join(' ') }
note { Faker::Lorem.words(7).join(' ') }
only_admin { Faker::Lorem.words(3).join(' ') }
only_search { Faker::Lorem.words(3).join(' ') }
only_sort { Faker::Lorem.words(3).join(' ') }
notable_id { |id| id }
end
RSpec.configure do |config|
config.alias_it_should_behave_like_to :it_has_behavior, 'has behavior'
config.before(:suite) do
connection_name = ActiveRecord::Base.connection.adapter_name
message = "Running specs against #{connection_name}, Active Record #{
ActiveRecord::VERSION::STRING} and Arel #{Arel::VERSION}..."
puts '=' * message.length, message, '=' * message.length
Schema.create
end
config.before(:all) { Sham.reset(:before_all) }
config.before(:each) { Sham.reset(:before_each) }
config.include RansackHelper
end
RSpec::Matchers.define :be_like do |expected|
match do |actual|
actual.gsub(/^\s+|\s+$/, '').gsub(/\s+/, ' ').strip ==
expected.gsub(/^\s+|\s+$/, '').gsub(/\s+/, ' ').strip
end
end
RSpec::Matchers.define :have_attribute_method do |expected|
match do |actual|
actual.attribute_method?(expected)
end
end
|
# encoding: UTF-8
PROJECT_ROOT = File.expand_path("../..", __FILE__)
$LOAD_PATH << File.join(PROJECT_ROOT, "lib")
Bundler.require
Dir[File.join(PROJECT_ROOT,"spec/support/**/*.rb")].each {|f| require f}
require 'riik'
require 'ripple'
require 'vcr'
VCR.config do |c|
c.cassette_library_dir = 'spec/fixtures/vcr_cassettes'
c.stub_with :webmock
c.default_cassette_options = { :record => :new_episodes }
end
# Test class.
#
module Riik
class Person
include Riik::Document
property :first_name
property :last_name
end
end
# Ripple configuration.
#
if ENV['TDDIUM_RIAK_HOST']
Ripple.client = Riak::Client.new(:nodes => [
{:host => ENV['TDDIUM_RIAK_HOST'], :http_port => ENV['TDDIUM_RIAK_HTTP_PORT']}
])
end
Fix bad ripple configuration.
# encoding: UTF-8
PROJECT_ROOT = File.expand_path("../..", __FILE__)
$LOAD_PATH << File.join(PROJECT_ROOT, "lib")
Bundler.require
Dir[File.join(PROJECT_ROOT,"spec/support/**/*.rb")].each {|f| require f}
require 'riik'
require 'ripple'
require 'vcr'
VCR.config do |c|
c.cassette_library_dir = 'spec/fixtures/vcr_cassettes'
c.stub_with :webmock
c.default_cassette_options = { :record => :new_episodes }
end
# Test class.
#
module Riik
class Person
include Riik::Document
property :first_name
property :last_name
end
end
# Ripple configuration.
#
if ENV['TDDIUM_RIAK_HOST']
Ripple.client = Riak::Client.new({:host => ENV['TDDIUM_RIAK_HOST'], :http_port => ENV['TDDIUM_RIAK_HTTP_PORT']})
end
|
require 'coveralls'
Coveralls.wear!('rails')
RSpec.configure do |config|
# These two settings work together to allow you to limit a spec run
# to individual examples or groups you care about by tagging them with
# `:focus` metadata. When nothing is tagged with `:focus`, all examples
# get run.
config.filter_run :focus
config.run_all_when_everything_filtered = true
# Many RSpec users commonly either run the entire suite or an individual
# file, and it's useful to allow more verbose output when running an
# individual spec file.
if config.files_to_run.one?
# Use the documentation formatter for detailed output,
# unless a formatter has already been configured
# (e.g. via a command-line flag).
config.default_formatter = 'doc'
end
# Print the 10 slowest examples and example groups at the
# end of the spec run, to help surface which specs are running
# particularly slow.
config.profile_examples = 10
# Run specs in random order to surface order dependencies. If you find an
# order dependency and want to debug it, you can fix the order by providing
# the seed, which is printed after each run.
# --seed 1234
config.order = :random
# Seed global randomization in this process using the `--seed` CLI option.
# Setting this allows you to use `--seed` to deterministically reproduce
# test failures related to randomization by passing the same `--seed` value
# as the one that triggered the failure.
Kernel.srand config.seed
# rspec-expectations config goes here. You can use an alternate
# assertion/expectation library such as wrong or the stdlib/minitest
# assertions if you prefer.
config.expect_with :rspec do |expectations|
# Enable only the newer, non-monkey-patching expect syntax.
# For more details, see:
# - http://myronmars.to/n/dev-blog/2012/06/rspecs-new-expectation-syntax
expectations.syntax = :expect
end
# rspec-mocks config goes here. You can use an alternate test double
# library (such as bogus or mocha) by changing the `mock_with` option here.
config.mock_with :rspec do |mocks|
# Enable only the newer, non-monkey-patching expect syntax.
# For more details, see:
# - http://teaisaweso.me/blog/2013/05/27/rspecs-new-message-expectation-syntax/
mocks.syntax = :expect
# Prevents you from mocking or stubbing a method that does not exist on
# a real object. This is generally recommended.
mocks.verify_partial_doubles = true
end
end
Add local coverage reports as well.
require 'simplecov'
require 'coveralls'
SimpleCov.formatter = SimpleCov::Formatter::MultiFormatter[
SimpleCov::Formatter::HTMLFormatter,
Coveralls::SimpleCov::Formatter
]
SimpleCov.start
RSpec.configure do |config|
# These two settings work together to allow you to limit a spec run
# to individual examples or groups you care about by tagging them with
# `:focus` metadata. When nothing is tagged with `:focus`, all examples
# get run.
config.filter_run :focus
config.run_all_when_everything_filtered = true
# Many RSpec users commonly either run the entire suite or an individual
# file, and it's useful to allow more verbose output when running an
# individual spec file.
if config.files_to_run.one?
# Use the documentation formatter for detailed output,
# unless a formatter has already been configured
# (e.g. via a command-line flag).
config.default_formatter = 'doc'
end
# Print the 10 slowest examples and example groups at the
# end of the spec run, to help surface which specs are running
# particularly slow.
config.profile_examples = 10
# Run specs in random order to surface order dependencies. If you find an
# order dependency and want to debug it, you can fix the order by providing
# the seed, which is printed after each run.
# --seed 1234
config.order = :random
# Seed global randomization in this process using the `--seed` CLI option.
# Setting this allows you to use `--seed` to deterministically reproduce
# test failures related to randomization by passing the same `--seed` value
# as the one that triggered the failure.
Kernel.srand config.seed
# rspec-expectations config goes here. You can use an alternate
# assertion/expectation library such as wrong or the stdlib/minitest
# assertions if you prefer.
config.expect_with :rspec do |expectations|
# Enable only the newer, non-monkey-patching expect syntax.
# For more details, see:
# - http://myronmars.to/n/dev-blog/2012/06/rspecs-new-expectation-syntax
expectations.syntax = :expect
end
# rspec-mocks config goes here. You can use an alternate test double
# library (such as bogus or mocha) by changing the `mock_with` option here.
config.mock_with :rspec do |mocks|
# Enable only the newer, non-monkey-patching expect syntax.
# For more details, see:
# - http://teaisaweso.me/blog/2013/05/27/rspecs-new-message-expectation-syntax/
mocks.syntax = :expect
# Prevents you from mocking or stubbing a method that does not exist on
# a real object. This is generally recommended.
mocks.verify_partial_doubles = true
end
end
|
$LOAD_PATH << "models"
require 'database'
require 'expense'
Database.environment = 'test'
def run_ltk_with_input(*inputs)
shell_output = ""
IO.popen('ENVIRONMENT=test ./budget', 'r+') do |pipe|
inputs.each do |input|
pipe.puts input
end
pipe.close_write
shell_output << pipe.read
end
shell_output
end
RSpec.configure do |config|
config.treat_symbols_as_metadata_keys_with_true_values = true
config.run_all_when_everything_filtered = true
config.filter_run :focus
# Run specs in random order to surface order dependencies. If you find an
# order dependency and want to debug it, you can fix the order by providing
# the seed, which is printed after each run.
# --seed 1234
config.order = 'random'
config.after(:each) do
Expense.destroy_all
end
end
Menu integration tests
$LOAD_PATH << "models"
require 'database'
require 'expense'
Database.environment = 'test'
def run_budget_with_input(*inputs)
shell_output = ""
IO.popen('ENVIRONMENT=test ./budget', 'r+') do |pipe|
inputs.each do |input|
pipe.puts input
end
pipe.close_write
shell_output << pipe.read
end
shell_output
end
RSpec.configure do |config|
config.treat_symbols_as_metadata_keys_with_true_values = true
config.run_all_when_everything_filtered = true
config.filter_run :focus
# Run specs in random order to surface order dependencies. If you find an
# order dependency and want to debug it, you can fix the order by providing
# the seed, which is printed after each run.
# --seed 1234
config.order = 'random'
config.after(:each) do
Expense.destroy_all
end
end
|
require 'rspec/mocks/standalone'
require 'simplecov'
SimpleCov.formatter = SimpleCov::Formatter::MultiFormatter[
SimpleCov::Formatter::HTMLFormatter
]
SimpleCov.start do
add_filter 'spec'
end
RSpec.configure do |config|
end
Add sepc settings
require 'rspec/mocks/standalone'
require 'simplecov'
SimpleCov.formatter = SimpleCov::Formatter::MultiFormatter[
SimpleCov::Formatter::HTMLFormatter
]
SimpleCov.start do
add_filter 'spec'
end
RSpec.configure do |config|
config.filter_run :focus
config.run_all_when_everything_filtered = true
if config.files_to_run.one?
config.full_backtrace = true
config.default_formatter = 'doc'
end
config.order = :random
Kernel.srand config.seed
end
|
# This file was generated by the `rspec --init` command. Conventionally, all
# specs live under a `spec` directory, which RSpec adds to the `$LOAD_PATH`.
# The generated `.rspec` file contains `--require spec_helper` which will cause
# this file to always be loaded, without a need to explicitly require it in any
# files.
#
# Given that it is always loaded, you are encouraged to keep this file as
# light-weight as possible. Requiring heavyweight dependencies from this file
# will add to the boot time of your test suite on EVERY test run, even for an
# individual file that may not need all of that loaded. Instead, consider making
# a separate helper file that requires the additional dependencies and performs
# the additional setup, and require it from the spec files that actually need
# it.
#
# The `.rspec` file also contains a few flags that are not defaults but that
# users commonly want.
#
# See http://rubydoc.info/gems/rspec-core/RSpec/Core/Configuration
require 'pathname'
ENV['TEST_DOMAIN'] ||= 'test.nginx-acme.site'
ENV['FORCE_RENEW'] ||= 'false'
RootPath = Pathname(File.expand_path('../..', __FILE__))
CompositionsPath = RootPath.join('spec/compositions')
Dir[RootPath.join('spec/support/**/*.rb')].each { |f| require f }
RSpec.configure do |config|
# rspec-expectations config goes here. You can use an alternate
# assertion/expectation library such as wrong or the stdlib/minitest
# assertions if you prefer.
config.expect_with :rspec do |expectations|
# This option will default to `true` in RSpec 4. It makes the `description`
# and `failure_message` of custom matchers include text for helper methods
# defined using `chain`, e.g.:
# be_bigger_than(2).and_smaller_than(4).description
# # => 'be bigger than 2 and smaller than 4'
# ...rather than:
# # => 'be bigger than 2'
expectations.include_chain_clauses_in_custom_matcher_descriptions = true
end
# rspec-mocks config goes here. You can use an alternate test double
# library (such as bogus or mocha) by changing the `mock_with` option here.
config.mock_with :rspec do |mocks|
# Prevents you from mocking or stubbing a method that does not exist on
# a real object. This is generally recommended, and will default to
# `true` in RSpec 4.
mocks.verify_partial_doubles = true
end
# The settings below are suggested to provide a good initial experience
# with RSpec, but feel free to customize to your heart's content.
# These two settings work together to allow you to limit a spec run
# to individual examples or groups you care about by tagging them with
# `:focus` metadata. When nothing is tagged with `:focus`, all examples
# get run.
config.filter_run :focus
config.run_all_when_everything_filtered = true
# Allows RSpec to persist some state between runs in order to support
# the `--only-failures` and `--next-failure` CLI options. We recommend
# you configure your source control system to ignore this file.
config.example_status_persistence_file_path = 'spec/examples.txt'
# Limits the available syntax to the non-monkey patched syntax that is
# recommended. For more details, see:
# - http://rspec.info/blog/2012/06/rspecs-new-expectation-syntax/
# - http://www.teaisaweso.me/blog/2013/05/27/rspecs-new-message-expectation-syntax/
# - http://rspec.info/blog/2014/05/notable-changes-in-rspec-3/#zero-monkey-patching-mode
config.disable_monkey_patching!
# This setting enables warnings. It's recommended, but in some cases may
# be too noisy due to issues in dependencies.
config.warnings = true
# Many RSpec users commonly either run the entire suite or an individual
# file, and it's useful to allow more verbose output when running an
# individual spec file.
if config.files_to_run.one?
# Use the documentation formatter for detailed output,
# unless a formatter has already been configured
# (e.g. via a command-line flag).
config.default_formatter = 'doc'
end
# Print the 10 slowest examples and example groups at the
# end of the spec run, to help surface which specs are running
# particularly slow.
config.profile_examples = 10
# Intentional configure RSpec to run ours specs in defined order.
# This is important since some example groups reuse existing containers
# created by previous example group.
config.order = :defined
config.include PortalHelpers
config.before :suite do
puts "TEST_DOMAIN: #{ENV['TEST_DOMAIN']}"
# Ensure the build process have existing cached layers to reuse, by
# explicitly rebuild the docker image for spec.
puts 'Rebuilding docker image for spec...'
Dir.chdir CompositionsPath.children.first do
PortalHelpers.docker_compose :build
end
end
config.around :all do |example|
Dir.chdir CompositionsPath.join(example.metadata[:composition]) do
purge_existing_containers unless example.metadata[:reuse_container]
example.run
end
end
config.after :each do
docker_compose :stop
end
end
Correctly purge containers before a example group
# This file was generated by the `rspec --init` command. Conventionally, all
# specs live under a `spec` directory, which RSpec adds to the `$LOAD_PATH`.
# The generated `.rspec` file contains `--require spec_helper` which will cause
# this file to always be loaded, without a need to explicitly require it in any
# files.
#
# Given that it is always loaded, you are encouraged to keep this file as
# light-weight as possible. Requiring heavyweight dependencies from this file
# will add to the boot time of your test suite on EVERY test run, even for an
# individual file that may not need all of that loaded. Instead, consider making
# a separate helper file that requires the additional dependencies and performs
# the additional setup, and require it from the spec files that actually need
# it.
#
# The `.rspec` file also contains a few flags that are not defaults but that
# users commonly want.
#
# See http://rubydoc.info/gems/rspec-core/RSpec/Core/Configuration
require 'pathname'
ENV['TEST_DOMAIN'] ||= 'test.nginx-acme.site'
ENV['FORCE_RENEW'] ||= 'false'
RootPath = Pathname(File.expand_path('../..', __FILE__))
CompositionsPath = RootPath.join('spec/compositions')
Dir[RootPath.join('spec/support/**/*.rb')].each { |f| require f }
RSpec.configure do |config|
# rspec-expectations config goes here. You can use an alternate
# assertion/expectation library such as wrong or the stdlib/minitest
# assertions if you prefer.
config.expect_with :rspec do |expectations|
# This option will default to `true` in RSpec 4. It makes the `description`
# and `failure_message` of custom matchers include text for helper methods
# defined using `chain`, e.g.:
# be_bigger_than(2).and_smaller_than(4).description
# # => 'be bigger than 2 and smaller than 4'
# ...rather than:
# # => 'be bigger than 2'
expectations.include_chain_clauses_in_custom_matcher_descriptions = true
end
# rspec-mocks config goes here. You can use an alternate test double
# library (such as bogus or mocha) by changing the `mock_with` option here.
config.mock_with :rspec do |mocks|
# Prevents you from mocking or stubbing a method that does not exist on
# a real object. This is generally recommended, and will default to
# `true` in RSpec 4.
mocks.verify_partial_doubles = true
end
# The settings below are suggested to provide a good initial experience
# with RSpec, but feel free to customize to your heart's content.
# These two settings work together to allow you to limit a spec run
# to individual examples or groups you care about by tagging them with
# `:focus` metadata. When nothing is tagged with `:focus`, all examples
# get run.
config.filter_run :focus
config.run_all_when_everything_filtered = true
# Allows RSpec to persist some state between runs in order to support
# the `--only-failures` and `--next-failure` CLI options. We recommend
# you configure your source control system to ignore this file.
config.example_status_persistence_file_path = 'spec/examples.txt'
# Limits the available syntax to the non-monkey patched syntax that is
# recommended. For more details, see:
# - http://rspec.info/blog/2012/06/rspecs-new-expectation-syntax/
# - http://www.teaisaweso.me/blog/2013/05/27/rspecs-new-message-expectation-syntax/
# - http://rspec.info/blog/2014/05/notable-changes-in-rspec-3/#zero-monkey-patching-mode
config.disable_monkey_patching!
# This setting enables warnings. It's recommended, but in some cases may
# be too noisy due to issues in dependencies.
config.warnings = true
# Many RSpec users commonly either run the entire suite or an individual
# file, and it's useful to allow more verbose output when running an
# individual spec file.
if config.files_to_run.one?
# Use the documentation formatter for detailed output,
# unless a formatter has already been configured
# (e.g. via a command-line flag).
config.default_formatter = 'doc'
end
# Print the 10 slowest examples and example groups at the
# end of the spec run, to help surface which specs are running
# particularly slow.
config.profile_examples = 10
# Intentional configure RSpec to run ours specs in defined order.
# This is important since some example groups reuse existing containers
# created by previous example group.
config.order = :defined
config.include PortalHelpers
config.before :suite do
puts "TEST_DOMAIN: #{ENV['TEST_DOMAIN']}"
# Ensure the build process have existing cached layers to reuse, by
# explicitly rebuild the docker image for spec.
puts 'Rebuilding docker image for spec...'
Dir.chdir CompositionsPath.children.first do
PortalHelpers.docker_compose :build
end
end
config.before :all do |example|
unless example.class.metadata[:reuse_container]
Dir.chdir CompositionsPath.join(example.class.metadata[:composition]) do
purge_existing_containers
end
end
end
config.around :example do |example|
Dir.chdir CompositionsPath.join(example.metadata[:composition]) do
example.run
end
end
config.after :example do
docker_compose :stop
end
end
|
$LOAD_PATH.unshift File.expand_path('../../lib', __FILE__)
require "grease"
require "sprockets"
require "tilt"
Setup SimpleCov
$LOAD_PATH.unshift File.expand_path("../../lib", __FILE__)
require "simplecov"
require "codeclimate-test-reporter"
SimpleCov.start do
add_filter "/spec/"
formatter SimpleCov::Formatter::MultiFormatter.new([
SimpleCov::Formatter::HTMLFormatter,
CodeClimate::TestReporter::Formatter
])
end
require "grease"
require "sprockets"
require "tilt"
|
# $LOAD_PATH.unshift File.expand_path('../../lib', __FILE__)
require 'ubiregi'
require 'vcr'
require 'webmock/rspec'
require 'coveralls'
require 'simplecov'
VCR.configure do |c|
c.cassette_library_dir = 'spec/vcr_cassettes'
c.hook_into :webmock
c.allow_http_connections_when_no_cassette = true
end
SimpleCov.formatter = SimpleCov::Formatter::MultiFormatter[
SimpleCov::Formatter::HTMLFormatter,
Coveralls::SimpleCov::Formatter
]
SimpleCov.start do
add_filter '/spec/'
end
include WebMock::API
def a_get(path)
a_request(:get, Ubiregi::Connect::Client::ENDPOINT + path)
end
def a_post(path)
a_request(:post, Ubiregi::Connect::Client::ENDPOINT + path)
end
def a_put(path)
a_request(:put, Ubiregi::Connect::Client::ENDPOINT + path)
end
def a_delete(path)
a_request(:delete, Ubiregi::Connect::Client::ENDPOINT + path)
end
def stub_get(path)
stub_request(:get, Ubiregi::Connect::Client::ENDPOINT + path)
end
def stub_post(path)
stub_request(:post, Ubiregi::Connect::Client::ENDPOINT + path)
end
def stub_put(path)
stub_request(:put, Ubiregi::Connect::Client::ENDPOINT + path)
end
def stub_delete(path)
stub_request(:delete, Ubiregi::Connect::Client::ENDPOINT + path)
end
add test coverails tool code
# $LOAD_PATH.unshift File.expand_path('../../lib', __FILE__)
require 'ubiregi'
require 'vcr'
require 'webmock/rspec'
require 'coveralls'
require 'simplecov'
VCR.configure do |c|
c.cassette_library_dir = 'spec/vcr_cassettes'
c.hook_into :webmock
c.allow_http_connections_when_no_cassette = true
end
SimpleCov.formatter = SimpleCov::Formatter::MultiFormatter[
SimpleCov::Formatter::HTMLFormatter,
Coveralls::SimpleCov::Formatter
Coveralls.wear!
]
SimpleCov.start do
add_filter '/spec/'
end
include WebMock::API
def a_get(path)
a_request(:get, Ubiregi::Connect::Client::ENDPOINT + path)
end
def a_post(path)
a_request(:post, Ubiregi::Connect::Client::ENDPOINT + path)
end
def a_put(path)
a_request(:put, Ubiregi::Connect::Client::ENDPOINT + path)
end
def a_delete(path)
a_request(:delete, Ubiregi::Connect::Client::ENDPOINT + path)
end
def stub_get(path)
stub_request(:get, Ubiregi::Connect::Client::ENDPOINT + path)
end
def stub_post(path)
stub_request(:post, Ubiregi::Connect::Client::ENDPOINT + path)
end
def stub_put(path)
stub_request(:put, Ubiregi::Connect::Client::ENDPOINT + path)
end
def stub_delete(path)
stub_request(:delete, Ubiregi::Connect::Client::ENDPOINT + path)
end
|
require 'codeclimate-test-reporter'
CodeClimate::TestReporter.start
ENV['RAILS_ENV'] = 'test'
begin
require 'pry'
rescue LoadError
end
require_relative '../config/environment'
require 'rspec/rails'
require 'factory_girl'
require 'feature/testing'
require 'html_validation'
require 'webmock/rspec'
require 'database_cleaner'
require 'shoulda-matchers'
require 'timecop'
Time.zone = 'London'
# this seems to be required for the CI to work properly
ENV['TZ'] = 'Europe/London'
DatabaseCleaner.strategy = :deletion
Draper::ViewContext.test_strategy :fast
FactoryGirl.definition_file_paths << File.join(File.dirname(__FILE__), '..', 'features', 'factories')
FactoryGirl.find_definitions
I18n.available_locales = [:en, :cy]
PageValidations::HTMLValidation.ignored_attribute_errors = %w(tabindex itemscope itemtype itemprop)
PageValidations::HTMLValidation.ignored_tag_errors = %w(main svg symbol polygon use)
PageValidations::HTMLValidation.ignored_errors = ['letter not allowed here']
VCR.configure do |c|
c.cassette_library_dir = 'spec/cassettes'
c.hook_into :webmock
c.ignore_hosts 'codeclimate.com'
end
WebMock.disable_net_connect!(allow: 'codeclimate.com')
Dir[Rails.root.join('spec/support/**/*.rb')].each { |f| require f }
RSpec.configure do |c|
c.include FactoryGirl::Syntax::Methods
c.include Devise::TestHelpers, type: :controller
c.include PageValidations
c.include Rails.application.routes.url_helpers
c.infer_base_class_for_anonymous_controllers = false
c.alias_it_should_behave_like_to :it_has_behavior, 'exhibits behaviour of an'
c.use_transactional_fixtures = true
c.order = 'random'
c.run_all_when_everything_filtered = true
c.disable_monkey_patching!
c.around(:example) do |example|
action_plan_repository = Core::Registry::Repository[:action_plan]
article_repository = Core::Registry::Repository[:article]
category_repository = Core::Registry::Repository[:category]
corporate_repository = Core::Registry::Repository[:corporate]
search_repository = Core::Registry::Repository[:search]
Core::Registry::Repository[:action_plan] = Core::Repository::VCR.new(action_plan_repository)
Core::Registry::Repository[:article] = Core::Repository::VCR.new(article_repository)
Core::Registry::Repository[:category] = Core::Repository::VCR.new(category_repository)
Core::Registry::Repository[:corporate] = Core::Repository::VCR.new(corporate_repository)
Core::Registry::Repository[:search] = Core::Repository::VCR.new(search_repository)
if example.metadata[:features]
Feature.run_with_activated(*example.metadata[:features]) do
Rails.application.reload_routes!
Devise.regenerate_helpers!
Devise.class_variable_set(:@@warden_configured, false)
Devise.configure_warden!
example.run
end
Rails.application.reload_routes!
Devise.regenerate_helpers!
Devise.class_variable_set(:@@warden_configured, false)
Devise.configure_warden!
else
example.run
end
Core::Registry::Repository[:action_plan] = action_plan_repository
Core::Registry::Repository[:article] = article_repository
Core::Registry::Repository[:category] = category_repository
Core::Registry::Repository[:corporate] = corporate_repository
Core::Registry::Repository[:search] = search_repository
end
c.before(:suite) do
DatabaseCleaner.clean
ActiveRecord::Tasks::DatabaseTasks.load_schema(:ruby, ENV['SCHEMA'])
Core::Registry::Repository[:customer] = Core::Repository::Customers::Fake.new
end
c.before :each do
I18n.locale = :en
Core::Registry::Repository[:customer].clear
end
end
Add SVG rect to validator ignore list
require 'codeclimate-test-reporter'
CodeClimate::TestReporter.start
ENV['RAILS_ENV'] = 'test'
begin
require 'pry'
rescue LoadError
end
require_relative '../config/environment'
require 'rspec/rails'
require 'factory_girl'
require 'feature/testing'
require 'html_validation'
require 'webmock/rspec'
require 'database_cleaner'
require 'shoulda-matchers'
require 'timecop'
Time.zone = 'London'
# this seems to be required for the CI to work properly
ENV['TZ'] = 'Europe/London'
DatabaseCleaner.strategy = :deletion
Draper::ViewContext.test_strategy :fast
FactoryGirl.definition_file_paths << File.join(File.dirname(__FILE__), '..', 'features', 'factories')
FactoryGirl.find_definitions
I18n.available_locales = [:en, :cy]
PageValidations::HTMLValidation.ignored_attribute_errors = %w(tabindex itemscope itemtype itemprop)
PageValidations::HTMLValidation.ignored_tag_errors = %w(main svg symbol polygon use rect)
PageValidations::HTMLValidation.ignored_errors = ['letter not allowed here']
VCR.configure do |c|
c.cassette_library_dir = 'spec/cassettes'
c.hook_into :webmock
c.ignore_hosts 'codeclimate.com'
end
WebMock.disable_net_connect!(allow: 'codeclimate.com')
Dir[Rails.root.join('spec/support/**/*.rb')].each { |f| require f }
RSpec.configure do |c|
c.include FactoryGirl::Syntax::Methods
c.include Devise::TestHelpers, type: :controller
c.include PageValidations
c.include Rails.application.routes.url_helpers
c.infer_base_class_for_anonymous_controllers = false
c.alias_it_should_behave_like_to :it_has_behavior, 'exhibits behaviour of an'
c.use_transactional_fixtures = true
c.order = 'random'
c.run_all_when_everything_filtered = true
c.disable_monkey_patching!
c.around(:example) do |example|
action_plan_repository = Core::Registry::Repository[:action_plan]
article_repository = Core::Registry::Repository[:article]
category_repository = Core::Registry::Repository[:category]
corporate_repository = Core::Registry::Repository[:corporate]
search_repository = Core::Registry::Repository[:search]
Core::Registry::Repository[:action_plan] = Core::Repository::VCR.new(action_plan_repository)
Core::Registry::Repository[:article] = Core::Repository::VCR.new(article_repository)
Core::Registry::Repository[:category] = Core::Repository::VCR.new(category_repository)
Core::Registry::Repository[:corporate] = Core::Repository::VCR.new(corporate_repository)
Core::Registry::Repository[:search] = Core::Repository::VCR.new(search_repository)
if example.metadata[:features]
Feature.run_with_activated(*example.metadata[:features]) do
Rails.application.reload_routes!
Devise.regenerate_helpers!
Devise.class_variable_set(:@@warden_configured, false)
Devise.configure_warden!
example.run
end
Rails.application.reload_routes!
Devise.regenerate_helpers!
Devise.class_variable_set(:@@warden_configured, false)
Devise.configure_warden!
else
example.run
end
Core::Registry::Repository[:action_plan] = action_plan_repository
Core::Registry::Repository[:article] = article_repository
Core::Registry::Repository[:category] = category_repository
Core::Registry::Repository[:corporate] = corporate_repository
Core::Registry::Repository[:search] = search_repository
end
c.before(:suite) do
DatabaseCleaner.clean
ActiveRecord::Tasks::DatabaseTasks.load_schema(:ruby, ENV['SCHEMA'])
Core::Registry::Repository[:customer] = Core::Repository::Customers::Fake.new
end
c.before :each do
I18n.locale = :en
Core::Registry::Repository[:customer].clear
end
end
|
require 'neo-tmdb'
api_key = ENV["TMDB_API_KEY"] or raise "You must set the TMDB_API_KEY environment variable to run these tests."
TMDb.configure do |config|
config.api_key = api_key
end
Add basic vcr config to spec_helper
require 'neo-tmdb'
require 'vcr'
VCR.configure do |config|
config.cassette_library_dir = 'spec/cassettes'
config.hook_into :faraday
config.configure_rspec_metadata!
end
RSpec.configure do |config|
# In RSpec 3 this will no longer be necessary.
config.treat_symbols_as_metadata_keys_with_true_values = true
end
api_key = ENV["TMDB_API_KEY"] or raise "You must set the TMDB_API_KEY environment variable to run these tests."
TMDb.configure do |config|
config.api_key = api_key
end
|
require "rubygems"
require "spec"
require "ruby-plsql"
# Establish connection to database where tests will be performed.
# Change according to your needs.
DATABASE_USER = "hr"
DATABASE_PASSWORD = "hr"
DATABASE_NAME = "orcl"
DATABASE_HOST = "localhost" # necessary for JDBC connection
DATABASE_PORT = 1521 # necessary for JDBC connection
unless defined?(JRUBY_VERSION)
plsql.connection = OCI8.new DATABASE_USER, DATABASE_PASSWORD, DATABASE_NAME
else
plsql.connection = java.sql.DriverManager.getConnection("jdbc:oracle:thin:@#{DATABASE_HOST}:#{DATABASE_PORT}:#{DATABASE_NAME}",
DATABASE_USER, DATABASE_PASSWORD)
end
# Set autocommit to false so that automatic commits after each statement are _not_ performed
plsql.connection.autocommit = false
# reduce network traffic in case of large resultsets
plsql.connection.prefetch_rows = 100
# uncomment to log DBMS_OUTPUT to standard output
# plsql.dbms_output_stream = STDOUT
Spec::Runner.configure do |config|
config.before(:each) do
plsql.execute "SAVEPOINT before_each"
end
config.after(:each) do
# Always perform rollback to savepoint after each test
plsql.execute "ROLLBACK TO before_each"
end
config.after(:all) do
# Always perform rollback after each describe block
plsql.rollback
end
end
# require all helper methods which are located in any helpers subdirectories
Dir[File.dirname(__FILE__) + '/**/helpers/*.rb'].each {|f| require f}
# require all factory modules which are located in any factories subdirectories
Dir[File.dirname(__FILE__) + '/**/factories/*.rb'].each {|f| require f}
# Add source directory to load path where PL/SQL example procedures are defined.
# It is not required if PL/SQL procedures are already loaded in test database in some other way.
$:.push File.dirname(__FILE__) + '/../source'
added connection logoff when exiting
require "rubygems"
require "spec"
require "ruby-plsql"
# Establish connection to database where tests will be performed.
# Change according to your needs.
DATABASE_USER = "hr"
DATABASE_PASSWORD = "hr"
DATABASE_NAME = "orcl"
DATABASE_HOST = "localhost" # necessary for JDBC connection
DATABASE_PORT = 1521 # necessary for JDBC connection
unless defined?(JRUBY_VERSION)
plsql.connection = OCI8.new DATABASE_USER, DATABASE_PASSWORD, DATABASE_NAME
else
plsql.connection = java.sql.DriverManager.getConnection("jdbc:oracle:thin:@#{DATABASE_HOST}:#{DATABASE_PORT}:#{DATABASE_NAME}",
DATABASE_USER, DATABASE_PASSWORD)
end
# Set autocommit to false so that automatic commits after each statement are _not_ performed
plsql.connection.autocommit = false
# reduce network traffic in case of large resultsets
plsql.connection.prefetch_rows = 100
# uncomment to log DBMS_OUTPUT to standard output
# plsql.dbms_output_stream = STDOUT
# Do logoff when exiting to ensure that session temporary tables
# (used when calling procedures with table types defined in packages)
at_exit do
plsql.logoff
end
Spec::Runner.configure do |config|
config.before(:each) do
plsql.execute "SAVEPOINT before_each"
end
config.after(:each) do
# Always perform rollback to savepoint after each test
plsql.execute "ROLLBACK TO before_each"
end
config.after(:all) do
# Always perform rollback after each describe block
plsql.rollback
end
end
# require all helper methods which are located in any helpers subdirectories
Dir[File.dirname(__FILE__) + '/**/helpers/*.rb'].each {|f| require f}
# require all factory modules which are located in any factories subdirectories
Dir[File.dirname(__FILE__) + '/**/factories/*.rb'].each {|f| require f}
# Add source directory to load path where PL/SQL example procedures are defined.
# It is not required if PL/SQL procedures are already loaded in test database in some other way.
$:.push File.dirname(__FILE__) + '/../source'
|
require 'coveralls'
Coveralls.wear!
require 'rubygems'
require 'bundler/setup'
# Require in order, so both CELLULOID_TEST and CELLULOID_DEBUG are true
require 'celluloid/test'
module CelluloidSpecs
def self.included_module
# Celluloid::IO implements this with with 'Celluloid::IO'
Celluloid
end
# Timer accuracy enforced by the tests (50ms)
TIMER_QUANTUM = 0.05
end
$CELLULOID_DEBUG = true
$CELLULOID_BYPASS_FLAKY = ENV['CLLLD_BYPASS_FLAKY'] != "false" # defaults to bypass
require 'celluloid/probe'
require 'rspec/log_split'
Celluloid.shutdown_timeout = 1
Dir['./spec/support/*.rb'].map {|f| require f }
RSpec.configure do |config|
config.filter_run :focus => true
config.run_all_when_everything_filtered = true
config.disable_monkey_patching!
config.log_split_dir = File.expand_path("../../log/#{Time.now.iso8601}", __FILE__)
config.log_split_module = Celluloid
config.around do |ex|
Celluloid.actor_system = nil
Thread.list.each do |thread|
next if thread == Thread.current
if defined?(JRUBY_VERSION)
# Avoid disrupting jRuby's "fiber" threads.
next if /Fiber/ =~ thread.to_java.getNativeThread.get_name
end
thread.kill
end
ex.run
end
config.around actor_system: :global do |ex|
Celluloid.boot
ex.run
Celluloid.shutdown
end
config.around actor_system: :within do |ex|
Celluloid::ActorSystem.new.within do
ex.run
end
end
config.filter_gems_from_backtrace(*%w(rspec-expectations rspec-core rspec-mocks))
config.mock_with :rspec do |mocks|
mocks.verify_doubled_constant_names = true
mocks.verify_partial_doubles = true
end
config.around(:each) do |example|
config.default_retry_count = example.metadata[:flaky] ? 3 : 1
if example.metadata[:flaky] and $CELLULOID_BYPASS_FLAKY
example.run broken: true
else
example.run
end
end
# Must be *after* the around hook above
require 'rspec/retry'
config.verbose_retry = true
config.default_sleep_interval = 3
end
remove rspec-retry from backtraces
require 'coveralls'
Coveralls.wear!
require 'rubygems'
require 'bundler/setup'
# Require in order, so both CELLULOID_TEST and CELLULOID_DEBUG are true
require 'celluloid/test'
module CelluloidSpecs
def self.included_module
# Celluloid::IO implements this with with 'Celluloid::IO'
Celluloid
end
# Timer accuracy enforced by the tests (50ms)
TIMER_QUANTUM = 0.05
end
$CELLULOID_DEBUG = true
$CELLULOID_BYPASS_FLAKY = ENV['CLLLD_BYPASS_FLAKY'] != "false" # defaults to bypass
require 'celluloid/probe'
require 'rspec/log_split'
Celluloid.shutdown_timeout = 1
Dir['./spec/support/*.rb'].map {|f| require f }
RSpec.configure do |config|
config.filter_run :focus => true
config.run_all_when_everything_filtered = true
config.disable_monkey_patching!
config.log_split_dir = File.expand_path("../../log/#{Time.now.iso8601}", __FILE__)
config.log_split_module = Celluloid
config.around do |ex|
Celluloid.actor_system = nil
Thread.list.each do |thread|
next if thread == Thread.current
if defined?(JRUBY_VERSION)
# Avoid disrupting jRuby's "fiber" threads.
next if /Fiber/ =~ thread.to_java.getNativeThread.get_name
end
thread.kill
end
ex.run
end
config.around actor_system: :global do |ex|
Celluloid.boot
ex.run
Celluloid.shutdown
end
config.around actor_system: :within do |ex|
Celluloid::ActorSystem.new.within do
ex.run
end
end
config.filter_gems_from_backtrace(*%w(rspec-expectations rspec-core rspec-mocks rspec-retry))
config.mock_with :rspec do |mocks|
mocks.verify_doubled_constant_names = true
mocks.verify_partial_doubles = true
end
config.around(:each) do |example|
config.default_retry_count = example.metadata[:flaky] ? 3 : 1
if example.metadata[:flaky] and $CELLULOID_BYPASS_FLAKY
example.run broken: true
else
example.run
end
end
# Must be *after* the around hook above
require 'rspec/retry'
config.verbose_retry = true
config.default_sleep_interval = 3
end |
VCR.configure do |config|
config.cassette_library_dir = 'spec/cassettes'
config.hook_into :webmock
config.filter_sensitive_data('<API_KEY>') { ENV['BNET_API_KEY'] }
end
RSpec.configure do |c|
c.around(:each, :vcr) do |example|
name = example.metadata[:full_description].split(/\s+/, 2).join("/").downcase.gsub(/\s+/,"_")
VCR.use_cassette(name) { example.call }
end
end
Try to solve CI problem with vcr.
VCR.configure do |config|
config.cassette_library_dir = 'spec/cassettes'
config.hook_into :webmock
config.filter_sensitive_data('API_KEY') { ENV['BNET_API_KEY'] }
end
RSpec.configure do |c|
c.around(:each, :vcr) do |example|
name = example.metadata[:full_description].split(/\s+/, 2).join("/").downcase.gsub(/\s+/,"_")
VCR.use_cassette(name) { example.call }
end
end
|
# Copyright (c) 2011 - 2013, SoundCloud Ltd., Rany Keddo, Tobias Bielohlawek, Tobias
# Schmidt
require 'minitest/autorun'
require 'minitest/spec'
require 'minitest/mock'
require 'mocha/mini_test'
require 'pathname'
require 'lhm'
$project = Pathname.new(File.dirname(__FILE__) + '/..').cleanpath
$spec = $project.join('spec')
$fixtures = $spec.join('fixtures')
require 'active_record'
begin
require 'mysql2'
rescue LoadError
require 'mysql'
end
logger = Logger.new STDOUT
logger.level = Logger::WARN
Lhm.logger = logger
def without_verbose(&block)
old_verbose, $VERBOSE = $VERBOSE, nil
yield
ensure
$VERBOSE = old_verbose
end
Fix mocha/mini_test deprecation warning
# Copyright (c) 2011 - 2013, SoundCloud Ltd., Rany Keddo, Tobias Bielohlawek, Tobias
# Schmidt
require 'minitest/autorun'
require 'minitest/spec'
require 'minitest/mock'
require 'mocha/minitest'
require 'pathname'
require 'lhm'
$project = Pathname.new(File.dirname(__FILE__) + '/..').cleanpath
$spec = $project.join('spec')
$fixtures = $spec.join('fixtures')
require 'active_record'
begin
require 'mysql2'
rescue LoadError
require 'mysql'
end
logger = Logger.new STDOUT
logger.level = Logger::WARN
Lhm.logger = logger
def without_verbose(&block)
old_verbose, $VERBOSE = $VERBOSE, nil
yield
ensure
$VERBOSE = old_verbose
end
|
require 'spec_helper'
describe Vmstat do
context "#boot_time" do
let(:boot_time) { Vmstat.boot_time }
it "should be an array" do
boot_time.should be_a(Time)
end
it "has to be a time before now" do
boot_time.should < Time.now
end
end
context "Vmstat#filter_devices" do
it "should filter ethernet devices" do
Vmstat.ethernet_devices.size.should == 2
end
it "should filter loopback devices" do
Vmstat.loopback_devices.size.should == 1
end
end
context "performance" do
shared_examples "a not memory leaking method" do |method_name, *args|
it "should not grow the memory in method #{method_name} more than 10% " do
mem_before = Vmstat.task.resident_size
10000.times { Vmstat.send(method_name, *args) }
mem_after = Vmstat.task.resident_size
mem_after.should < (mem_before * 1.10)
end
end
it_should_behave_like "a not memory leaking method", :network_interfaces
it_should_behave_like "a not memory leaking method", :cpu
it_should_behave_like "a not memory leaking method", :memory
it_should_behave_like "a not memory leaking method", :disk, "/"
it_should_behave_like "a not memory leaking method", :boot_time
it_should_behave_like "a not memory leaking method", :load_average
end
end
accept more then one ethernet device
require 'spec_helper'
describe Vmstat do
context "#boot_time" do
let(:boot_time) { Vmstat.boot_time }
it "should be an array" do
boot_time.should be_a(Time)
end
it "has to be a time before now" do
boot_time.should < Time.now
end
end
context "Vmstat#filter_devices" do
it "should filter ethernet devices" do
Vmstat.ethernet_devices.size.should >= 2
end
it "should filter loopback devices" do
Vmstat.loopback_devices.size.should == 1
end
end
context "performance" do
shared_examples "a not memory leaking method" do |method_name, *args|
it "should not grow the memory in method #{method_name} more than 10% " do
mem_before = Vmstat.task.resident_size
10000.times { Vmstat.send(method_name, *args) }
mem_after = Vmstat.task.resident_size
mem_after.should < (mem_before * 1.10)
end
end
it_should_behave_like "a not memory leaking method", :network_interfaces
it_should_behave_like "a not memory leaking method", :cpu
it_should_behave_like "a not memory leaking method", :memory
it_should_behave_like "a not memory leaking method", :disk, "/"
it_should_behave_like "a not memory leaking method", :boot_time
it_should_behave_like "a not memory leaking method", :load_average
end
end
|
# -*- coding:utf-8; mode:ruby; -*-
require 'active_window_x'
include ActiveWindowX
describe Window do
before do
@raw_display = mock Xlib::Display
@display = mock Display
@display.stub(:raw){@raw_display}
@display.stub(:kind_of?).with(Display).and_return(true)
@id = 123456
@window = Window.new @display, @id
end
describe '#root' do
context ', which recive a window from x_query_tree,' do
before do
@root = 111
Xlib.should_receive(:x_query_tree).
with(@display.raw, @window.id).
and_return([@root, nil, []])
end
it 'should return the root Window' do
@window.root.id.should == @root
end
end
context ', which recive nil from x_query_tree,' do
before do
Xlib.should_receive(:x_query_tree).
with(@display.raw, @window.id).
and_return([nil, nil, []])
end
it 'should return nil' do
@window.root.should be_nil
end
end
end
describe '#parent' do
context ', which recive a window from x_query_tree,' do
before do
@parent = 111
Xlib.should_receive(:x_query_tree).
with(@display.raw, @window.id).
and_return([nil, @parent, []])
end
it 'should return the parent Window' do
@window.parent.id.should == @parent
end
end
context ', which recive nil from x_query_tree,' do
before do
Xlib.should_receive(:x_query_tree).
with(@display.raw, @window.id).
and_return([nil, nil, []])
end
it 'should return nil' do
@window.root.should be_nil
end
end
end
describe '#children' do
before do
@children = []
@children.push 111
@children.push 222
@children.push 333
Xlib.should_receive(:x_query_tree).
with(@display.raw, @window.id).
and_return([nil, nil, @children])
end
it 'should return the child Windows' do
c = @window.children
c[0].id.should == @children[0]
c[1].id.should == @children[1]
c[2].id.should == @children[2]
end
end
describe '#prop' do
before do
@prop_id = 1234
@prop_name = 'FOO'
@display.should_receive(:intern_atom).with(@prop_name).and_return(@prop_id)
end
context 'with a property name, which does not exist for the window,' do
before do
Xlib.should_receive(:x_get_window_property).
with(@display.raw, @window.id, @prop_id, 0, Window::READ_BUFF_LENGTH, false, Xlib::AnyPropertyType).
and_return([nil, 0, 0, 0, nil])
end
it 'should return nil' do
@window.prop(@prop_name).should be_nil
end
end
context 'with a property name, which exist for the window and is the long type,' do
before do
@prop = [123, 456]
Xlib.should_receive(:x_get_window_property).
with(@display.raw, @window.id, @prop_id, 0, Window::READ_BUFF_LENGTH, false, Xlib::AnyPropertyType).
and_return([@prop_id, 32, @prop.length, 0, @prop.pack('l!*')])
end
it 'should return a Array of Numeric' do
@window.prop(@prop_name).should == @prop
end
end
context 'with a property name, which exist for the window and is the short type,' do
before do
@prop = [12, 34, 46]
Xlib.should_receive(:x_get_window_property).
with(@display.raw, @window.id, @prop_id, 0, Window::READ_BUFF_LENGTH, false, Xlib::AnyPropertyType).
and_return([@prop_id, 16, @prop.length, 0, @prop.pack('s*')])
end
it 'should return a Array of Numeric' do
@window.prop(@prop_name).should == @prop
end
end
context 'with a property name, which exist for the window and is the char type,' do
before do
@prop = "abcdefg\0hijklmn"
Xlib.should_receive(:x_get_window_property).
with(@display.raw, @window.id, @prop_id, 0, Window::READ_BUFF_LENGTH, false, Xlib::AnyPropertyType).
and_return([@prop_id, 8, @prop.length, 0, @prop])
end
it 'should return String' do
@window.prop(@prop_name).should == @prop
end
end
end
describe '#prop_atom_ids' do
context ', which recieve property atoms(Numeric),' do
before do
@prop_list = [000, 111, 222]
Xlib.should_receive(:x_list_properties).and_return(@prop_list)
end
it 'shuold return the atoms' do
@window.prop_atom_ids.should == @prop_list
end
end
context ', which recieve no property atoms,' do
before do
@prop_list = nil
Xlib.should_receive(:x_list_properties){ @prop_list }
end
it 'shuold return an empty array' do
@window.prop_atom_ids.should == []
end
end
end
describe '#select_input' do
context 'with a valid mask' do
before do
@mask = Xlib::PropertyChangeMask
Xlib.should_receive(:x_select_input).with(@display.raw, @id, @mask).and_return(1)
end
it 'should call Xlib::x_select_input' do
@window.select_input @mask
end
end
end
end
add examples for Window#tittle
# -*- coding:utf-8; mode:ruby; -*-
require 'active_window_x'
include ActiveWindowX
describe Window do
before do
@raw_display = mock Xlib::Display
@display = mock Display
@display.stub(:raw){@raw_display}
@display.stub(:kind_of?).with(Display).and_return(true)
@id = 123456
@window = Window.new @display, @id
end
describe '#root' do
context ', which recive a window from x_query_tree,' do
before do
@root = 111
Xlib.should_receive(:x_query_tree).
with(@display.raw, @window.id).
and_return([@root, nil, []])
end
it 'should return the root Window' do
@window.root.id.should == @root
end
end
context ', which recive nil from x_query_tree,' do
before do
Xlib.should_receive(:x_query_tree).
with(@display.raw, @window.id).
and_return([nil, nil, []])
end
it 'should return nil' do
@window.root.should be_nil
end
end
end
describe '#parent' do
context ', which recive a window from x_query_tree,' do
before do
@parent = 111
Xlib.should_receive(:x_query_tree).
with(@display.raw, @window.id).
and_return([nil, @parent, []])
end
it 'should return the parent Window' do
@window.parent.id.should == @parent
end
end
context ', which recive nil from x_query_tree,' do
before do
Xlib.should_receive(:x_query_tree).
with(@display.raw, @window.id).
and_return([nil, nil, []])
end
it 'should return nil' do
@window.root.should be_nil
end
end
end
describe '#children' do
before do
@children = []
@children.push 111
@children.push 222
@children.push 333
Xlib.should_receive(:x_query_tree).
with(@display.raw, @window.id).
and_return([nil, nil, @children])
end
it 'should return the child Windows' do
c = @window.children
c[0].id.should == @children[0]
c[1].id.should == @children[1]
c[2].id.should == @children[2]
end
end
describe '#title' do
before do
end
context ', when the _NET_WM_NAME and WM_NAME prop was not found,' do
before do
@window.stub(:prop).with('_NET_WM_NAME').and_return(nil)
@window.stub(:prop).with('WM_NAME').and_return(nil)
end
it 'should return nil' do
@window.title.should be_nil
end
end
context ', when the WM_NAME prop was found,' do
before do
@title = 'foo bar baz'
@window.stub(:prop).with('_NET_WM_NAME').and_return(nil)
@window.stub(:prop).with('WM_NAME').and_return(@title)
end
it 'should return the value' do
@window.title.should == @title
end
end
context ', when the _NET_WM_NAME prop was found,' do
before do
@title = 'foo bar baz'
@window.stub(:prop).with('_NET_WM_NAME').and_return(@title)
end
it 'should return the value' do
@window.title.should == @title
end
end
end
describe '#name' do
context ', when the name prop was not found,'
context ', when the name prop was found,'
end
describe '#class' do
context ', when the class prop was not found,'
context ', when the class prop was found,'
end
describe '#pid' do
context ', when the pid prop was not found,'
context ', when the pid prop was found,'
end
describe '#command' do
context ', when the pid prop was not found,'
context ', when the pid prop was found,'
end
describe '#prop' do
before do
@prop_id = 1234
@prop_name = 'FOO'
@display.should_receive(:intern_atom).with(@prop_name).and_return(@prop_id)
end
context 'with a property name, which does not exist for the window,' do
before do
Xlib.should_receive(:x_get_window_property).
with(@display.raw, @window.id, @prop_id, 0, Window::READ_BUFF_LENGTH, false, Xlib::AnyPropertyType).
and_return([nil, 0, 0, 0, nil])
end
it 'should return nil' do
@window.prop(@prop_name).should be_nil
end
end
context 'with a property name, which exist for the window and is the long type,' do
before do
@prop = [123, 456]
Xlib.should_receive(:x_get_window_property).
with(@display.raw, @window.id, @prop_id, 0, Window::READ_BUFF_LENGTH, false, Xlib::AnyPropertyType).
and_return([@prop_id, 32, @prop.length, 0, @prop.pack('l!*')])
end
it 'should return a Array of Numeric' do
@window.prop(@prop_name).should == @prop
end
end
context 'with a property name, which exist for the window and is the short type,' do
before do
@prop = [12, 34, 46]
Xlib.should_receive(:x_get_window_property).
with(@display.raw, @window.id, @prop_id, 0, Window::READ_BUFF_LENGTH, false, Xlib::AnyPropertyType).
and_return([@prop_id, 16, @prop.length, 0, @prop.pack('s*')])
end
it 'should return a Array of Numeric' do
@window.prop(@prop_name).should == @prop
end
end
context 'with a property name, which exist for the window and is the char type,' do
before do
@prop = "abcdefg\0hijklmn"
Xlib.should_receive(:x_get_window_property).
with(@display.raw, @window.id, @prop_id, 0, Window::READ_BUFF_LENGTH, false, Xlib::AnyPropertyType).
and_return([@prop_id, 8, @prop.length, 0, @prop])
end
it 'should return String' do
@window.prop(@prop_name).should == @prop
end
end
end
describe '#prop_atom_ids' do
context ', which recieve property atoms(Numeric),' do
before do
@prop_list = [000, 111, 222]
Xlib.should_receive(:x_list_properties).and_return(@prop_list)
end
it 'shuold return the atoms' do
@window.prop_atom_ids.should == @prop_list
end
end
context ', which recieve no property atoms,' do
before do
@prop_list = nil
Xlib.should_receive(:x_list_properties){ @prop_list }
end
it 'shuold return an empty array' do
@window.prop_atom_ids.should == []
end
end
end
describe '#select_input' do
context 'with a valid mask' do
before do
@mask = Xlib::PropertyChangeMask
Xlib.should_receive(:x_select_input).with(@display.raw, @id, @mask).and_return(1)
end
it 'should call Xlib::x_select_input' do
@window.select_input @mask
end
end
end
end
|
#!/usr/bin/env ruby
# frozen_string_literal: true
require 'sinatra/base'
require 'slim'
require 'pathname'
require_relative '../lib/meetup_generator'
ROOT = Pathname.new(__FILE__).dirname.parent
LIB = ROOT + 'lib'
MG = MeetupGenerator.new
# The Meetup Generator. Works is done in lib/meetup_generator.rb
#
class MeetupGeneratorWebApp < Sinatra::Base
def api_methods
%w[talk title talker role company refreshment agenda date location]
end
set :root, ROOT
get '/api/:item?/?*' do
content_type :json
return [404, 'not found'] unless api_methods.include?(params[:item])
MG.send(params[:item]).to_json.chomp
end
get '*' do
@agenda = MG.agenda(5)
slim :default
end
run! if $PROGRAM_NAME == __FILE__
end
remove constant def
#!/usr/bin/env ruby
# frozen_string_literal: true
require 'sinatra/base'
require 'slim'
require 'pathname'
require_relative '../lib/meetup_generator'
ROOT = Pathname.new(__FILE__).dirname.parent
MG = MeetupGenerator.new
# The Meetup Generator. Works is done in lib/meetup_generator.rb
#
class MeetupGeneratorWebApp < Sinatra::Base
def api_methods
%w[talk title talker role company refreshment agenda date location]
end
set :root, ROOT
get '/api/:item?/?*' do
content_type :json
return [404, 'not found'] unless api_methods.include?(params[:item])
MG.send(params[:item]).to_json.chomp
end
get '*' do
@agenda = MG.agenda(5)
slim :default
end
run! if $PROGRAM_NAME == __FILE__
end
|
=begin
Copyright (c) 2006-2011 Ruby-GNOME2 Project Team
This program is licenced under the same licence as Ruby-GNOME2.
=end
require 'glib2'
require 'pango'
require 'gdk_pixbuf2'
base_dir = Pathname.new(__FILE__).dirname.dirname.dirname.expand_path
vendor_dir = base_dir + "vendor" + "local"
vendor_bin_dir = vendor_dir + "bin"
GLib.prepend_environment_path(vendor_bin_dir)
begin
major, minor, _ = RUBY_VERSION.split(/\./)
require "#{major}.#{minor}/gdk3.so"
rescue LoadError
require "gdk3.so"
end
if vendor_dir.exist?
begin
require "gobject-introspection"
vendor_girepository_dir = vendor_dir + "lib" + "girepository-1.0"
GObjectIntrospection.prepend_typelib_path(vendor_girepository_dir)
rescue LoadError
end
end
module Gdk
LOG_DOMAIN = "Gdk"
end
if Gdk.cairo_available?
module Cairo
class Context
if method_defined?(:set_source_color)
alias_method :set_source_not_gdk_color, :set_source_color
def set_source_color(color)
if color.is_a?(Gdk::Color)
set_source_gdk_color(color)
else
set_source_not_gdk_color(color)
end
end
else
alias_method :set_source_color, :set_source_gdk_color
end
def source_color=(color)
set_source_color(color)
color
end
end
end
end
GLib::Log.set_log_domain(Gdk::LOG_DOMAIN)
gdk3: follow prepend DLL path method name change
=begin
Copyright (c) 2006-2011 Ruby-GNOME2 Project Team
This program is licenced under the same licence as Ruby-GNOME2.
=end
require 'glib2'
require 'pango'
require 'gdk_pixbuf2'
base_dir = Pathname.new(__FILE__).dirname.dirname.dirname.expand_path
vendor_dir = base_dir + "vendor" + "local"
vendor_bin_dir = vendor_dir + "bin"
GLib.prepend_dll_path(vendor_bin_dir)
begin
major, minor, _ = RUBY_VERSION.split(/\./)
require "#{major}.#{minor}/gdk3.so"
rescue LoadError
require "gdk3.so"
end
if vendor_dir.exist?
begin
require "gobject-introspection"
vendor_girepository_dir = vendor_dir + "lib" + "girepository-1.0"
GObjectIntrospection.prepend_typelib_path(vendor_girepository_dir)
rescue LoadError
end
end
module Gdk
LOG_DOMAIN = "Gdk"
end
if Gdk.cairo_available?
module Cairo
class Context
if method_defined?(:set_source_color)
alias_method :set_source_not_gdk_color, :set_source_color
def set_source_color(color)
if color.is_a?(Gdk::Color)
set_source_gdk_color(color)
else
set_source_not_gdk_color(color)
end
end
else
alias_method :set_source_color, :set_source_gdk_color
end
def source_color=(color)
set_source_color(color)
color
end
end
end
end
GLib::Log.set_log_domain(Gdk::LOG_DOMAIN)
|
Pod::Spec.new do |s|
s.name = 'PopOverMenu'
s.version = '1.6'
s.license = 'MIT'
s.summary = 'PopOverMenu is a PopOver style menu.'
s.homepage = 'https://github.com/tichise/PopOverMenu'
s.social_media_url = 'http://twitter.com/tichise'
s.author = "Takuya Ichise"
s.source = { :git => 'https://github.com/tichise/PopOverMenu.git', :tag => s.version }
s.ios.deployment_target = '8.0'
s.source_files = 'Sources/*.swift'
s.requires_arc = true
s.resource_bundles = {
'Storyboards' => [
'Storyboards/*.storyboard'
]
}
end
update version to 1.7
Pod::Spec.new do |s|
s.name = 'PopOverMenu'
s.version = '1.7'
s.license = 'MIT'
s.summary = 'PopOverMenu is a PopOver style menu.'
s.homepage = 'https://github.com/tichise/PopOverMenu'
s.social_media_url = 'http://twitter.com/tichise'
s.author = "Takuya Ichise"
s.source = { :git => 'https://github.com/tichise/PopOverMenu.git', :tag => s.version }
s.ios.deployment_target = '8.0'
s.source_files = 'Sources/*.swift'
s.requires_arc = true
s.resource_bundles = {
'Storyboards' => [
'Storyboards/*.storyboard'
]
}
end |
Pod::Spec.new do |s|
s.name = 'PopupDialog'
s.version = '0.5.1'
s.summary = 'A simple custom popup dialog view controller'
s.homepage = 'https://github.com/orderella/PopupDialog'
s.license = { :type => 'MIT', :file => 'LICENSE' }
s.author = { 'Martin Wildfeuer' => 'mwfire@mwfire.de' }
s.source = { :git => 'https://github.com/orderella/PopupDialog.git', :tag => s.version.to_s }
s.social_media_url = 'https://twitter.com/orderella'
s.ios.deployment_target = '8.0'
s.source_files = 'PopupDialog/Classes/**/*'
end
Version bump
Pod::Spec.new do |s|
s.name = 'PopupDialog'
s.version = '0.5.2'
s.summary = 'A simple custom popup dialog view controller'
s.homepage = 'https://github.com/orderella/PopupDialog'
s.license = { :type => 'MIT', :file => 'LICENSE' }
s.author = { 'Martin Wildfeuer' => 'mwfire@mwfire.de' }
s.source = { :git => 'https://github.com/orderella/PopupDialog.git', :tag => s.version.to_s }
s.social_media_url = 'https://twitter.com/orderella'
s.ios.deployment_target = '8.0'
s.source_files = 'PopupDialog/Classes/**/*'
end
|
require 'selenium-webdriver'
require 'appium_lib'
require 'yaml'
require_relative './spec_helper.rb'
require_relative './app_spec_helpers.rb'
require_relative './pages/login_page.rb'
require_relative './pages/me_page.rb'
require_relative './pages/page.rb'
require_relative './api_helpers.rb'
CONFIG = YAML.load_file('config.yml')
require_relative "#{CONFIG['app_name']}/custom_spec.rb"
shared_examples 'app' do |webdriver_url, browser_capabilities|
# Helpers
include AppSpecHelpers
include ApiHelpers
before :all do
@wait = Selenium::WebDriver::Wait.new(timeout: 10)
@email = "sysops+#{Time.now.to_i}#{Process.pid}@meedan.com"
@password = '12345678'
@source_name = 'Iron Maiden'
@source_url = 'https://twitter.com/ironmaiden?timestamp=' + Time.now.to_i.to_s
@media_url = 'https://twitter.com/meedan/status/773947372527288320/?t=' + Time.now.to_i.to_s
@config = CONFIG
$source_id = nil
$media_id = nil
@team1_slug = 'team1'+Time.now.to_i.to_s
@user_mail = 'sysops_' + Time.now.to_i.to_s + '@meedan.com'
begin
FileUtils.cp('./config.js', '../build/web/js/config.js')
rescue
puts "Could not copy local ./config.js to ../build/web/js/"
end
#EXTRACT USER:PWD FROM URL FOR CHROME
if ((browser_capabilities == :chrome) and (@config['self_url'].include? "@" and @config['self_url'].include? ":"))
@config['self_url'] = @config['self_url'][0..(@config['self_url'].index('//')+1)] + @config['self_url'][(@config['self_url'].index('@')+1)..-1]
end
@driver = new_driver(webdriver_url,browser_capabilities)
api_create_team_project_and_claim(true)
end
after :all do
FileUtils.cp('../config.js', '../build/web/js/config.js')
end
before :each do |example|
$caller_name = example.metadata[:description_args]
p $caller_name
@driver = new_driver(webdriver_url,browser_capabilities)
end
after :each do |example|
if example.exception
link = save_screenshot("Test failed: #{example.description}")
print " [Test \"#{example.description}\" failed! Check screenshot at #{link}] "
end
@driver.quit
end
# The tests themselves start here
context "web" do
include_examples "custom"
it "should filter by medias or sources", binx6: true do
api_create_team_project_and_link 'https://twitter.com/TheWho/status/890135323216367616'
@driver.navigate.to @config['self_url']
wait_for_selector("card-with-border", :class)
expect(@driver.page_source.include?("The Who's official Twitter page")).to be(false)
expect(@driver.page_source.include?('Happy birthday Mick')).to be(true)
el = wait_for_selector("//span[contains(text(), 'Sources')]", :xpath)
el.click
wait_for_selector("source-card", :class)
expect(@driver.page_source.include?("The Who's official Twitter page")).to be(true)
expect(@driver.page_source.include?('Happy birthday Mick')).to be(true)
old = @driver.find_elements(:class, "medias__item").length
el = wait_for_selector("//span[contains(text(), 'Media')]", :xpath)
el.click
wait_for_size_change(old, "medias__item", :class)
expect(@driver.page_source.include?("The Who's official Twitter page")).to be(true)
expect(@driver.page_source.include?('Happy birthday Mick')).to be(false)
end
it "should register and create a claim", bin5: true do
page = LoginPage.new(config: @config, driver: @driver).load
page = page.register_and_login_with_email(email: "sysops+#{Time.now.to_i}#{rand(1000)}@meedan.com", password: @password)
page
.create_team
.create_project
.create_media(input: 'Claim')
.logout
end
it "should redirect to access denied page", binx6: true do
user = api_register_and_login_with_email
api_logout
api_register_and_login_with_email
me_pg = MePage.new(config: @config, driver: @driver).load
sleep 3
expect(@driver.page_source.include?('Access Denied')).to be(false)
expect((@driver.current_url.to_s =~ /\/forbidden$/).nil?).to be(true)
unauthorized_pg = SourcePage.new(id: user.dbid, config: @config, driver: @driver).load
sleep 3
expect(@driver.page_source.include?('Access Denied')).to be(true)
expect((@driver.current_url.to_s =~ /\/forbidden$/).nil?).to be(false)
end
it "should edit the title of a media", bin1 : true do
url = 'https://twitter.com/softlandscapes/status/834385935240462338'
media_pg = api_create_team_project_and_link_and_redirect_to_media_page url
media_pg.wait_for_element('.media-detail')
media_pg.toggle_card # Make sure the card is closed
expect(media_pg.primary_heading.text).to eq('https://t.co/i17DJNqiWX')
media_pg.toggle_card # Expand the card so the edit button is accessible
media_pg.wait_for_element('.media-actions')
sleep 3 # Clicks can misfire if pender iframe moves the button position at the wrong moment
media_pg.set_title('Edited media title')
expect(media_pg.primary_heading.text).to eq('Edited media title')
project_pg = media_pg.go_to_project
project_pg.wait_for_element('.media__heading')
expect(project_pg.elements('.media__heading').map(&:text).include?('Edited media title')).to be(true)
end
# This test fails ~ 30% of the time for some reason.
# Todo: consider fixing it
# CGB 2017-9-29
#
# it "should not add a duplicated tag from tags list", binx3: true, quick: true do
# page = api_create_team_project_and_claim_and_redirect_to_media_page
# new_tag = Time.now.to_i.to_s
# # Validate assumption that tag does not exist
# expect(page.has_tag?(new_tag)).to be(false)
# # Add tag from tags list
# page.add_tag(new_tag)
# expect(page.has_tag?(new_tag)).to be(true)
# # Try to add duplicate
# page.add_tag(new_tag)
# sleep 20
# # Verify that tag is not added and that error message is displayed
# expect(page.tags.count(new_tag)).to be(1)
# expect(page.contains_string?('Tag already exists')).to be(true)
# end
it "should display a default title for new media", bin1 : true, quick:true do
# Tweets
media_pg = api_create_team_project_and_link_and_redirect_to_media_page('https://twitter.com/firstdraftnews/status/835587295394869249')
media_pg.toggle_card # Collapse card to show the title
expect(media_pg.primary_heading.text.include?('In a chat about getting')).to be(true)
project_pg = media_pg.go_to_project
sleep 1
@wait.until {
element = @driver.find_element(:partial_link_text, 'In a chat about getting')
expect(element.displayed?).to be(true)
}
# YouTube
media_pg = api_create_team_project_and_link_and_redirect_to_media_page('https://www.youtube.com/watch?v=ykLgjhBnik0')
media_pg.toggle_card # Collapse card to show the title
expect(media_pg.primary_heading.text).to eq("How To Check An Account's Authenticity")
project_pg = media_pg.go_to_project
sleep 5
expect(project_pg.elements('.media__heading').map(&:text).include?("How To Check An Account's Authenticity")).to be(true)
# Facebook
media_pg = api_create_team_project_and_link_and_redirect_to_media_page('https://www.facebook.com/FirstDraftNews/posts/1808121032783161')
media_pg.toggle_card # Collapse card to show the title
wait_for_selector('.media__heading')
expect(media_pg.primary_heading.text).to eq('First Draft on Facebook')
project_pg = media_pg.go_to_project
wait_for_selector('.media__heading')
expect(project_pg.elements('.media__heading').map(&:text).include?('First Draft on Facebook')).to be(true)
end
it "should login using Slack", bin5: true, quick:true do
login_with_slack
@driver.navigate.to @config['self_url'] + '/check/me'
displayed_name = get_element('h1.source__name').text.upcase
expected_name = @config['slack_name'].upcase
expect(displayed_name == expected_name).to be(true)
end
it "should localize interface based on browser language", binx6: true do
unless browser_capabilities['appiumVersion']
caps = Selenium::WebDriver::Remote::Capabilities.chrome(chromeOptions: { prefs: { 'intl.accept_languages' => 'fr' } })
driver = Selenium::WebDriver.for(:remote, url: webdriver_url, desired_capabilities: caps)
driver.navigate.to @config['self_url']
sleep 1
expect(driver.find_element(:css, '.login__heading span').text == 'Connexion').to be(true)
driver.quit
caps = Selenium::WebDriver::Remote::Capabilities.chrome(chromeOptions: { prefs: { 'intl.accept_languages' => 'pt' } })
driver = Selenium::WebDriver.for(:remote, url: webdriver_url, desired_capabilities: caps)
driver.navigate.to @config['self_url']
sleep 1
expect(driver.find_element(:css, '.login__heading span').text == 'Entrar').to be(true)
driver.quit
end
end
it "should access user confirmed page", bin5: true do
@driver.navigate.to @config['self_url'] + '/check/user/confirmed'
title = get_element('.main-title')
expect(title.text == 'Account Confirmed').to be(true)
end
it "should access user unconfirmed page", bin5: true do
@driver.navigate.to @config['self_url'] + '/check/user/unconfirmed'
title = get_element('.main-title')
expect(title.text == 'Error').to be(true)
end
it "should access user already confirmed page", bin5: true do
@driver.navigate.to @config['self_url'] + '/check/user/already-confirmed'
title = get_element('.main-title')
expect(title.text == 'Account Already Confirmed').to be(true)
end
it "should login using Facebook", bin5: true, quick:true do
login_pg = LoginPage.new(config: @config, driver: @driver).load
login_pg.login_with_facebook
me_pg = MePage.new(config: @config, driver: login_pg.driver).load
displayed_name = me_pg.title
expected_name = @config['facebook_name']
expect(displayed_name).to eq(expected_name)
end
it "should register and login using e-mail", bin5: true, quick:true do
login_pg = LoginPage.new(config: @config, driver: @driver).load
email, password = ['sysops+' + Time.now.to_i.to_s + '@meedan.com', '22345678']
login_pg.register_and_login_with_email(email: email, password: password)
me_pg = MePage.new(config: @config, driver: login_pg.driver).load # reuse tab
displayed_name = me_pg.title
expect(displayed_name == 'User With Email').to be(true)
end
it "should create a project for a team", binx3: true do
team = api_create_team
@driver.navigate.to @config['self_url']
project_name = "Project #{Time.now}"
project_pg = TeamPage.new(config: @config, driver: @driver).create_project(name: project_name)
expect(project_pg.driver.current_url.to_s.match(/\/project\/[0-9]+$/).nil?).to be(false)
team_pg = project_pg.click_team_link
team_pg.click_projects_tab
sleep 2
element = @driver.find_element(:partial_link_text, project_name)
expect(element.displayed?).to be(true)
end
it "should create project media", bin1 : true do
api_create_team_and_project
page = ProjectPage.new(config: @config, driver: @driver).load
expect(page.contains_string?('This is a test')).to be(false)
page.create_media(input: 'https://twitter.com/marcouza/status/771009514732650497?t=' + Time.now.to_i.to_s)
page.driver.navigate.to @config['self_url']
page.wait_for_element('.project .media-detail')
expect(page.contains_string?('This is a test')).to be(true)
end
# Disable because it is flaky.
# Todo:
# it "should search for image", binx2: true do
# api_create_team_and_project
# sleep 2
# page = ProjectPage.new(config: @config, driver: @driver).load
# .create_image_media(File.join(File.dirname(__FILE__), 'test.png'))
# sleep 10 # wait for Sidekiq
# @driver.navigate.to @config['self_url'] + '/' + get_team + '/search'
# sleep 5
# expect(@driver.find_element(:link_text, 'test.png').nil?).to be(false)
# end
it "should redirect to 404 page", bin4: true do
@driver.navigate.to @config['self_url'] + '/something-that/does-not-exist'
title = get_element('.main-title')
expect(title.text == 'Not Found').to be(true)
end
it "should redirect to login screen if not logged in", bin5: true do
@driver.navigate.to @config['self_url'] + '/check/teams'
title = get_element('.login__heading')
expect(title.text == 'Sign in').to be(true)
end
it "should login using Twitter", bin5: true, quick:true do
login_with_twitter
@driver.navigate.to @config['self_url'] + '/check/me'
displayed_name = get_element('h1.source__name').text.upcase
expected_name = @config['twitter_name'].upcase
expect(displayed_name == expected_name).to be(true)
end
it "should go to source page through user/:id", binx6: true do
user = api_register_and_login_with_email
@driver.navigate.to @config['self_url'] + '/check/user/' + user.dbid.to_s
sleep 1
title = get_element('.source__name')
expect(title.text == 'User With Email').to be(true)
end
it "should go back and forward in the history", bin4: true do
@driver.navigate.to @config['self_url']
expect((@driver.current_url.to_s =~ /\/$/).nil?).to be(false)
@driver.navigate.to @config['self_url'] + '/check/tos'
expect((@driver.current_url.to_s =~ /\/tos$/).nil?).to be(false)
@driver.navigate.back
expect((@driver.current_url.to_s =~ /\/$/).nil?).to be(false)
@driver.navigate.forward
expect((@driver.current_url.to_s =~ /\/tos$/).nil?).to be(false)
end
it "should create source and redirect to newly created source", binx6: true do
api_create_team_and_project
@driver.navigate.to @config['self_url']
sleep 15
@driver.find_element(:css, '#create-media__source').click
sleep 1
fill_field('#create-media-source-name-input', @source_name)
fill_field('#create-media-source-url-input', @source_url)
sleep 1
press_button('#create-media-submit')
sleep 15
expect(@driver.current_url.to_s.match(/\/source\/[0-9]+$/).nil?).to be(false)
title = get_element('.source__name').text
expect(title == @source_name).to be(true)
end
it "should not create duplicated source", binx6: true do
api_create_team_project_and_source_and_redirect_to_source('Megadeth', 'https://twitter.com/megadeth')
id1 = @driver.current_url.to_s.gsub(/^.*\/source\//, '').to_i
expect(id1 > 0).to be(true)
@driver.navigate.to @driver.current_url.to_s.gsub(/\/source\/[0-9]+$/, '')
sleep 5
@driver.find_element(:css, '#create-media__source').click
sleep 1
fill_field('#create-media-source-name-input', 'Megadeth')
fill_field('#create-media-source-url-input', 'https://twitter.com/megadeth')
sleep 1
press_button('#create-media-submit')
sleep 10
id2 = @driver.current_url.to_s.gsub(/^.*\/source\//, '').to_i
expect(id2 > 0).to be(true)
expect(id1 == id2).to be(true)
end
# This test is flaky
# Todo: consider fixing it or removing it
#
# CGB 2017-9-29
#
# it "should tag source as a command", binx6: true do
# api_create_team_project_and_source_and_redirect_to_source('ACDC', 'https://twitter.com/acdc')
# sleep 3
# @driver.find_element(:css, '.source__tab-button-notes').click
# expect(@driver.page_source.include?('Tagged #command')).to be(false)
# fill_field('#cmd-input', '/tag command')
# @driver.action.send_keys(:enter).perform
# sleep 5
# expect(@driver.page_source.include?('Tagged #command')).to be(true)
# @driver.navigate.refresh
# sleep 5
# @driver.find_element(:css, '.source__tab-button-notes').click
# expect(@driver.page_source.include?('Tagged #command')).to be(true)
# end
# This test is flaky
# Todo: consider fixing it or removing it
#
# CGB 2017-10-2
#
# it "should comment source as a command", binx6: true do
# api_create_team_project_and_source_and_redirect_to_source('The Beatles', 'https://twitter.com/thebeatles')
# sleep 3
# @driver.find_element(:css, '.source__tab-button-notes').click
# expect(@driver.page_source.include?('This is my comment')).to be(false)
# fill_field('#cmd-input', '/comment This is my comment')
# @driver.action.send_keys(:enter).perform
# sleep 5
# expect(@driver.page_source.include?('This is my comment')).to be(true)
# @driver.navigate.refresh
# sleep 5
# @driver.find_element(:css, '.source__tab-button-notes').click
# expect(@driver.page_source.include?('This is my comment')).to be(true)
# end
it "should not create report as source", binx6: true do
api_create_team_and_project
@driver.navigate.to @config['self_url']
sleep 5
@driver.find_element(:css, '#create-media__source').click
sleep 1
fill_field('#create-media-source-url-input', 'https://twitter.com/IronMaiden/status/832726327459446784')
sleep 1
press_button('#create-media-submit')
sleep 15
expect(@driver.current_url.to_s.match(/\/source\/[0-9]+$/).nil?).to be(true)
message = get_element('.message').text
expect(message.match(/Sorry, this is not a profile/).nil?).to be(false)
end
it "should tag source multiple times with commas with command", binx6: true do
api_create_team_project_and_source_and_redirect_to_source('Motorhead', 'https://twitter.com/mymotorhead')
sleep 5
@driver.find_element(:css, '.source__tab-button-notes').click
fill_field('#cmd-input', '/tag foo, bar')
@driver.action.send_keys(:enter).perform
sleep 5
expect(@driver.page_source.include?('Tagged #foo')).to be(true)
expect(@driver.page_source.include?('Tagged #bar')).to be(true)
end
it "should edit basic source data (name, description/bio, avatar)", binx6: true do
api_create_team_project_and_source_and_redirect_to_source('ACDC', 'https://twitter.com/acdc')
sleep 5
element = @driver.find_element(:class, "source__edit-button")
element.click
input = @driver.find_element(:id, 'source__name-container')
input.send_keys(" - EDIT ACDC")
input = @driver.find_element(:id, 'source__bio-container')
input.send_keys(" - EDIT DESC")
@driver.find_element(:class, "source__edit-avatar-button").click
sleep 1
input = @driver.find_element(:css, 'input[type=file]')
input.send_keys(File.join(File.dirname(__FILE__), 'test.png'))
sleep 1
@driver.find_element(:class, 'source__edit-save-button').click
sleep 5
displayed_name = get_element('h1.source__name').text
expect(displayed_name.include? "EDIT").to be(true)
end
# This tests is unreliable
# Todo: Methods that deal react with hidden menus
#
# ccx 2017-10-13
=begin
it "should add and remove accounts to sources", binx6: true do
api_create_team_project_and_source_and_redirect_to_source('GOT', 'https://twitter.com/GameOfThrones')
#sleep 5
element = wait_for_selector("source__edit-button", :class)
element.click
#sleep 3
element = wait_for_selector("source__edit-addinfo-button",:class)
element.click
#sleep 1
element = wait_for_selector("source__add-link",:class)
element.click
sleep 1
fill_field("source__link-input0", "www.acdc.com", :id)
sleep 2
element = wait_for_selector('source__edit-save-button',:class)
element.click
#@driver.find_element(:class, 'source__edit-save-button').click
sleep 5
expect(@driver.page_source.include?('AC/DC Official Website')).to be(true)
#networks tab
element = wait_for_selector("source__tab-button-account",:class)
element.click
sleep 5
expect(@driver.page_source.include?('The Official AC/DC website and store')).to be(true)
#delete
element = @driver.find_element(:class, "source__edit-button")
element.click
sleep 3
list = @driver.find_elements(:css => "svg[class='create-task__remove-option-button']")
list[1].click
sleep 1
@driver.find_element(:class, 'source__edit-save-button').click
sleep 5
expect(@driver.page_source.include?('AC/DC Official Website')).to be(false)
end
=end
it "should edit source metadata (contact, phone, location, organization, other)", binx6: true do
api_create_team_project_and_source_and_redirect_to_source('GOT', 'https://twitter.com/GameOfThrones')
sleep 5
element = @driver.find_element(:class, "source__edit-button")
element.click
sleep 1
@driver.find_element(:class, "source__edit-addinfo-button").click
sleep 1
@driver.find_element(:class, "source__add-phone").click
str= @driver.page_source
str = str[str.index('undefined-undefined-Phone-')..str.length]
str = str[0..(str.index('"')-1)]
element = @driver.find_element(:id, str)
fill_field(str, "989898989", :id)
sleep 1
@driver.find_element(:class, "source__edit-addinfo-button").click
sleep 1
@driver.find_element(:class, "source__add-organization").click
str= @driver.page_source
str = str[str.index('undefined-undefined-Organization-')..str.length]
str = str[0..(str.index('"')-1)]
element = @driver.find_element(:id, str)
fill_field(str, "ORGANIZATION", :id)
@driver.find_element(:class, "source__edit-addinfo-button").click
sleep 1
@driver.find_element(:class, "source__add-location").click
str= @driver.page_source
str = str[str.index('undefined-undefined-Location-')..str.length]
str = str[0..(str.index('"')-1)]
fill_field(str, "Location 123", :id)
sleep 1
#source__add-other
@driver.find_element(:class, "source__edit-addinfo-button").click
sleep 1
@driver.find_element(:class, "source__add-other").click
sleep 1
fill_field("source__other-label-input", "label", :id)
fill_field("source__other-value-input", "value", :id)
@driver.action.send_keys("\t").perform
@driver.action.send_keys("\t").perform
@driver.action.send_keys("\n").perform
sleep 2
@driver.find_element(:class, 'source__edit-save-button').click
sleep 5
expect(@driver.page_source.include?('label: value')).to be(true)
expect(@driver.page_source.include?('Location 123')).to be(true)
expect(@driver.page_source.include?('ORGANIZATION')).to be(true)
expect(@driver.page_source.include?('989898989')).to be(true)
end
it "should add and remove source tags", binx6: true do
api_create_team_project_and_source_and_redirect_to_source('GOT', 'https://twitter.com/GameOfThrones')
sleep 5
element = @driver.find_element(:class, "source__edit-button")
element.click
sleep 1
@driver.find_element(:class, "source__edit-addinfo-button").click
sleep 1
@driver.find_element(:class, "source__add-tags").click
sleep 1
fill_field("sourceTagInput", "TAG1", :id)
@driver.action.send_keys("\n").perform
fill_field("sourceTagInput", "TAG2", :id)
@driver.action.send_keys("\n").perform
sleep 3
@driver.navigate.refresh
sleep 3
expect(@driver.page_source.include?('TAG1')).to be(true)
expect(@driver.page_source.include?('TAG2')).to be(true)
#delete
sleep 1 until element = @driver.find_element(:class, "source__edit-button")
element.click
list = @driver.find_elements(:css => "div.source-tags__tag svg")
list[0].click
sleep 1
@driver.navigate.refresh
sleep 3
expect(@driver.page_source.include?('TAG1')).to be(true)
expect(@driver.page_source.include?('TAG2')).to be(false)
end
it "should add and remove source languages",binx6: true do
api_create_team_project_and_source_and_redirect_to_source('GOT', 'https://twitter.com/GameOfThrones')
sleep 5
element = @driver.find_element(:class, "source__edit-button")
element.click
sleep 1
@driver.find_element(:class, "source__edit-addinfo-button").click
sleep 1
@driver.find_element(:class, "source__add-languages").click
fill_field("sourceLanguageInput", "Acoli", :id)
@driver.action.send_keys(:down).perform
@driver.action.send_keys(:return).perform
sleep 2
@driver.navigate.refresh
sleep 3
expect(@driver.page_source.include?('Acoli')).to be(true)
sleep 1 until element = @driver.find_element(:class, "source__edit-button")
element.click
sleep 1
list = @driver.find_elements(:css => "div.source-tags__tag svg")
list[0].click
sleep 1
@driver.navigate.refresh
sleep 3
expect(@driver.page_source.include?('Acoli')).to be(false)
end
it "should not add a duplicated tag from command line", binx3: true do
media_pg = api_create_team_project_and_claim_and_redirect_to_media_page
new_tag = Time.now.to_i.to_s
# Validate assumption that tag does not exist
expect(media_pg.has_tag?(new_tag)).to be(false)
# Try to add from command line
media_pg.add_annotation("/tag #{new_tag}")
Selenium::WebDriver::Wait.new(timeout: 10).until { media_pg.has_tag?(new_tag) } # TODO: wait inside MediaPage
expect(media_pg.has_tag?(new_tag)).to be(true)
# Try to add duplicate from command line
media_pg.add_annotation("/tag #{new_tag}")
# Verify that tag is not added and that error message is displayed
expect(media_pg.tags.count(new_tag)).to be(1)
expect(@driver.page_source.include?('Tag already exists')).to be(true)
end
it "should not create duplicated media", bin1 : true do
api_create_team_project_and_link_and_redirect_to_media_page @media_url
id1 = @driver.current_url.to_s.gsub(/^.*\/media\//, '').to_i
expect(id1 > 0).to be(true)
@driver.navigate.to @driver.current_url.to_s.gsub(/\/media\/[0-9]+$/, '')
sleep 3
fill_field('#create-media-input', @media_url)
sleep 2
press_button('#create-media-submit')
sleep 10
id2 = @driver.current_url.to_s.gsub(/^.*\/media\//, '').to_i
expect(id2 > 0).to be(true)
expect(id1 == id2).to be(true)
end
# This test is flaky
# Todo: consider fixing it or removing it
#
# CGB 2017-10-2
#
# it "should tag media from tags list", binx3: true do
# page = api_create_team_project_and_claim_and_redirect_to_media_page
# new_tag = Time.now.to_i.to_s
# expect(page.contains_string?("Tagged \##{new_tag}")).to be(false)
# page.add_tag(new_tag)
# expect(page.has_tag?(new_tag)).to be(true)
# sleep 2
# expect(page.contains_string?("Tagged \##{new_tag}")).to be(true)
# page.driver.navigate.refresh
# page.wait_for_element('.media')
# expect(page.has_tag?(new_tag)).to be(true)
# expect(page.contains_string?("Tagged \##{new_tag}")).to be(true)
# end
it "should tag media as a command", bin: true, bin4: true do
begin
p "21"
page = api_create_team_project_and_claim_and_redirect_to_media_page
p "22"
expect(page.has_tag?('command')).to be(false)
p "23"
expect(page.contains_string?('Tagged #command')).to be(false)
p "24"
# Add a tag as a command
page.add_annotation('/tag command')
p "25"
# Verify that tag was added to tags list and annotations list
expect(page.has_tag?('command')).to be(true)
p "26"
expect(page.contains_string?('Tagged #command')).to be(true)
p "27"
# Reload the page and verify that tags are still there
page.driver.navigate.refresh
p "28"
page.wait_for_element('.media')
p "29"
expect(page.has_tag?('command')).to be(true)
p "2a"
expect(page.contains_string?('Tagged #command')).to be(true)
rescue => e
p e
end
end
it "should comment media as a command", bin4: true, quick:true do
api_create_team_project_and_claim_and_redirect_to_media_page
# First, verify that there isn't any comment
expect(@driver.page_source.include?('This is my comment')).to be(false)
# Add a comment as a command
fill_field('#cmd-input', '/comment This is my comment')
@driver.action.send_keys(:enter).perform
sleep 5
# Verify that comment was added to annotations list
expect(@driver.page_source.include?('This is my comment')).to be(true)
# Reload the page and verify that comment is still there
@driver.navigate.refresh
sleep 5
expect(@driver.page_source.include?('This is my comment')).to be(true)
end
it "should flag media as a command", bin4: true do
media_pg = api_create_team_project_and_claim_and_redirect_to_media_page
expect(media_pg.contains_string?('Flag')).to be(false)
media_pg.fill_input('#cmd-input', '/flag Spam')
media_pg.element('#cmd-input').submit
sleep 5
expect(media_pg.contains_string?('Flag')).to be(true)
media_pg.driver.navigate.refresh
media_pg.wait_for_element('.media')
expect(media_pg.contains_string?('Flag')).to be(true)
end
# This test needs a little work.
# See the `edit` method in in project_page.rb
#
# @chris 2017-10-19
#
# it "should edit project", bin4: true do
# api_create_team_and_project
# project_pg = ProjectPage.new(config: @config, driver: @driver).load
# new_title = "Changed title #{Time.now.to_i}"
# new_description = "Set description #{Time.now.to_i}"
# expect(project_pg.contains_string?(new_title)).to be(false)
# expect(project_pg.contains_string?(new_description)).to be(false)
# project_pg.edit(title: new_title, description: new_description)
# expect(@driver.page_source.include?(new_title)).to be(true)
# expect(@driver.page_source.include?(new_description)).to be(true)
# end
it "should redirect to 404 page if id does not exist", bin4: true do
api_create_team_and_project
@driver.navigate.to @config['self_url']
sleep 3
url = @driver.current_url.to_s
@driver.navigate.to url.gsub(/project\/([0-9]+).*/, 'project/999')
title = get_element('.main-title')
expect(title.text == 'Not Found').to be(true)
expect((@driver.current_url.to_s =~ /\/404$/).nil?).to be(false)
end
it "should logout", bin5: true do
api_create_team_and_project
@driver.navigate.to @config['self_url']
page = ProjectPage.new(config: @config, driver: @driver).logout
expect(page.contains_string?('Sign in')).to be(true)
end
# it "should delete annotation from annotations list (for media, source and project)" do
# skip("Needs to be implemented")
# end
# it "should delete tag from tags list (for media and source)" do
# skip("Needs to be implemented")
# end
# it "should edit team" do
# skip("Needs to be implemented")
# end
# it "should show 'manage team' link only to team owners" do
# skip("Needs to be implemented")
# end
# it "should show 'edit project' link only to users with 'update project' permission" do
# skip("Needs to be implemented")
# end
# it "should edit team logo" do
# skip("Needs to be implemented")
# end
it "should navigate between teams", bin5: true, quick: true do
# setup
@user_mail = "test" +Time.now.to_i.to_s+rand(9999).to_s + @user_mail
@team1_slug = 'team1'+Time.now.to_i.to_s+rand(9999).to_s
user = api_register_and_login_with_email(email: @user_mail, password: @password)
team = request_api 'team', { name: 'Team 1', email: user.email, slug: @team1_slug }
request_api 'project', { title: 'Team 1 Project', team_id: team.dbid }
team = request_api 'team', { name: 'Team 2', email: user.email, slug: "team-2-#{rand(9999)}#{Time.now.to_i}" }
request_api 'project', { title: 'Team 2 Project', team_id: team.dbid }
page = MePage.new(config: @config, driver: @driver).load.select_team(name: 'Team 1')
page.click_projects_tab
expect(page.team_name).to eq('Team 1')
expect(page.project_titles.include?('Team 1 Project')).to be(true)
expect(page.project_titles.include?('Team 2 Project')).to be(false)
page = MePage.new(config: @config, driver: @driver).load.select_team(name: 'Team 2')
page.click_projects_tab
expect(page.team_name).to eq('Team 2')
expect(page.project_titles.include?('Team 2 Project')).to be(true)
expect(page.project_titles.include?('Team 1 Project')).to be(false)
#As a different user, request to join one team and be accepted.
user = api_register_and_login_with_email(email: "new"+@user_mail, password: @password)
page = MePage.new(config: @config, driver: @driver).load
page.ask_join_team(subdomain: @team1_slug)
@wait.until {
expect(@driver.find_element(:class, "message").nil?).to be(false)
}
api_logout
@driver.quit
@driver = new_driver(webdriver_url,browser_capabilities)
page = Page.new(config: @config, driver: @driver)
page.go(@config['api_path'] + '/test/session?email='+@user_mail)
#As the group creator, go to the members page and approve the joining request.
page = MePage.new(config: @config, driver: @driver).load
page.go(@config['self_url'] + '/check/me')
page.approve_join_team(subdomain: @team1_slug)
@wait.until {
elems = @driver.find_elements(:css => ".team-members__list > div")
expect(elems.size).to be > 1
}
# "should redirect to team page if user asking to join a team is already a member"
page = Page.new(config: @config, driver: @driver)
page.go(@config['api_path'] + '/test/session?email=new'+@user_mail)
#page = MePage.new(config: @config, driver: @driver).load
@driver.navigate.to @config['self_url'] + "/"+@team1_slug+"/join"
sleep 3
@wait.until {
expect(@driver.current_url.eql? @config['self_url']+"/"+@team1_slug ).to be(true)
}
# "should reject member to join team"
user = api_register_and_login_with_email
page = MePage.new(config: @config, driver: @driver).load
page.ask_join_team(subdomain: @team1_slug)
@wait.until {
expect(@driver.find_element(:class, "message").nil?).to be(false)
}
api_logout
@driver.quit
@driver = new_driver(webdriver_url,browser_capabilities)
page = Page.new(config: @config, driver: @driver)
page.go(@config['api_path'] + '/test/session?email='+@user_mail)
#As the group creator, go to the members page and approve the joining request.
page = MePage.new(config: @config, driver: @driver).load
.disapprove_join_team(subdomain: @team1_slug)
@wait.until {
expect(@driver.page_source.include?('Requests to join')).to be(false)
}
# "should delete member from team"
page = Page.new(config: @config, driver: @driver)
page.go(@config['api_path'] + '/test/session?email='+@user_mail)
#As the group creator, go to the members page and approve the joining request.
sleep 1
page = MePage.new(config: @config, driver: @driver).load
@driver.navigate.to @config['self_url'] + '/'+@team1_slug
sleep 2
wait_for_selector('team-members__member',:class)
el = wait_for_selector('team-members__edit-button',:class)
el.click
sleep 5
l = wait_for_selector_list('//button',:xpath)
old = l.length
expect(l.length > 4).to be(true)
l[l.length-2].click
sleep 1
expect(wait_for_selector_list('//button',:xpath).length < old).to be(true)
end
#As a different user, request to join one team.
# it "should join team", bin4:true, quick: true do
# api_register_and_login_with_email
# page = MePage.new(config: @config, driver: @driver).load
# page.ask_join_team(subdomain: @team1_slug)
# @wait.until {
# expect(@driver.find_element(:class, "message").nil?).to be(false)
# }
# api_logout
# @driver = new_driver(webdriver_url,browser_capabilities)
# page = Page.new(config: @config, driver: @driver)
# page.go(@config['api_path'] + '/test/session?email='+@user_mail)
# #As the group creator, go to the members page and approve the joining request.
# page = MePage.new(config: @config, driver: @driver).load
# .approve_join_team(subdomain: @team1_slug)
# @wait.until {
# elems = @driver.find_elements(:css => ".team-members__list > div")
# expect(elems.size).to be > 1
# }
# end
it "should update notes count after delete annotation", binx3: true do
media_pg = api_create_team_project_and_claim_and_redirect_to_media_page
media_pg.fill_input('#cmd-input', 'Test')
media_pg.element('#cmd-input').submit
sleep 3
notes_count = get_element('.media-detail__check-notes-count')
expect(notes_count.text == '2 notes').to be(true)
expect(@driver.page_source.include?('Comment deleted')).to be(false)
media_pg.delete_annotation
wait_for_selector('.annotation__deleted')
expect(notes_count.text == '2 notes').to be(true)
expect(@driver.page_source.include?('Comment deleted')).to be(true)
end
it "should auto refresh project when media is created", bin1 : true do
api_create_team_and_project
@driver.navigate.to @config['self_url']
url = @driver.current_url
sleep 3
expect(@driver.page_source.include?('Auto-Refresh')).to be(false)
current_window = @driver.window_handles.last
@driver.execute_script("window.open('#{url}')")
@driver.switch_to.window(@driver.window_handles.last)
fill_field('#create-media-input', 'Auto-Refresh')
press_button('#create-media-submit')
sleep 5
@driver.execute_script('window.close()')
@driver.switch_to.window(current_window)
sleep 5
expect(@driver.page_source.include?('Auto-Refresh')).to be(true)
end
it "should auto refresh media when annotation is created", binx3: true do
media_pg = api_create_team_project_and_claim_and_redirect_to_media_page
url = media_pg.driver.current_url
sleep 3
expect(@driver.page_source.include?('Auto-Refresh')).to be(false)
current_window = @driver.window_handles.last
@driver.execute_script("window.open('#{url}')")
@driver.switch_to.window(@driver.window_handles.last)
media_pg.fill_input('#cmd-input', 'Auto-Refresh')
media_pg.element('#cmd-input').submit
sleep 5
@driver.execute_script('window.close()')
@driver.switch_to.window(current_window)
sleep 5
expect(@driver.page_source.include?('Auto-Refresh')).to be(true)
end
# it "should cancel request through switch teams" do
# skip("Needs to be implemented")
# end
# it "should give 404 when trying to acess a media that is not related to the project on the URL" do
# skip("Needs to be implemented")
# end
# it "should linkify URLs on comments" do
# skip("Needs to be implemented")
# end
# it "should add and remove suggested tags" do
# skip("Needs to be implemented")
# end
# it "should find all medias with an empty search" do
# skip("Needs to be implemented")
# end
# it "should find medias when searching by keyword" do
# skip("Needs to be implemented")
# end
# it "should find medias when searching by status" do
# skip("Needs to be implemented")
# end
# it "should find medias when searching by tag" do
# skip("Needs to be implemented")
# end
# it "should move media to another project" do
# skip("Needs to be implemented")
# end
it "should add, edit, answer, update answer and delete short answer task", binx3: true do
media_pg = api_create_team_project_and_claim_and_redirect_to_media_page
wait_for_selector('.create-task__add-button')
# Create a task
expect(@driver.page_source.include?('Foo or bar?')).to be(false)
expect(@driver.page_source.include?('Task "Foo or bar?" created by')).to be(false)
el = wait_for_selector('.create-task__add-button', :css)
el.click
el = wait_for_selector('.create-task__add-short-answer', :css)
el.click
wait_for_selector('#task-label-input', :css)
fill_field('#task-label-input', 'Foo or bar?')
el = wait_for_selector('.create-task__dialog-submit-button', :css)
el.click
media_pg.wait_all_elements(2, "annotations__list-item", :class) #Wait for refresh page
expect(@driver.page_source.include?('Foo or bar?')).to be(true)
expect(@driver.page_source.include?('Task "Foo or bar?" created by')).to be(true)
# Answer task
expect(@driver.page_source.include?('Task "Foo or bar?" answered by')).to be(false)
fill_field('textarea[name="response"]', 'Foo')
@driver.find_element(:css, '.task__save').click
media_pg.wait_all_elements(3, "annotations__list-item", :class)
expect(@driver.page_source.include?('Task "Foo or bar?" answered by')).to be(true)
# Edit task
expect(@driver.page_source.include?('Task "Foo or bar?" edited to "Foo or bar???" by')).to be(false)
el = wait_for_selector('.task-actions__icon', :css)
el.click
media_pg.wait_all_elements(6, "annotations__list-item", :class)
editbutton = wait_for_selector('.task-actions__edit', :css)
editbutton.location_once_scrolled_into_view
editbutton.click
fill_field('textarea[name="label"]', '??')
editbutton = wait_for_selector('.task__save', :css)
editbutton.click
media_pg.wait_all_elements(7, "annotations__list-item", :class)
expect(@driver.page_source.include?('Task "Foo or bar?" edited to "Foo or bar???" by')).to be(true)
# Edit task answer
expect(@driver.page_source.gsub(/<\/?[^>]*>/, '').include?('Task "Foo or bar???" answered by User With Email: "Foo edited"')).to be(false)
el = wait_for_selector('.task-actions__icon', :css)
el.click
el = wait_for_selector('.task-actions__edit-response', :css)
el.click
# Ensure menu closes and textarea is focused...
el = wait_for_selector('textarea[name="response"]', :css)
el.click
fill_field('textarea[name="response"]', ' edited')
@driver.find_element(:css, '.task__save').click
media_pg.wait_all_elements(8, "annotations__list-item", :class) #Wait for refresh page
expect(@driver.page_source.gsub(/<\/?[^>]*>/, '').include?('Task "Foo or bar???" answered by User With Email: "Foo edited"')).to be(true)
# Delete task
delete_task('Foo')
end
# it "should add, edit, answer, update answer and delete single_choice task" do
# skip("Needs to be implemented")
# end
# it "should add, edit, answer, update answer and delete multiple_choice task" do
# skip("Needs to be implemented")
# end
it "should search for reverse images", binx2: true do
page = api_create_team_project_and_link_and_redirect_to_media_page 'https://www.instagram.com/p/BRYob0dA1SC/'
wait_for_selector('.annotation__reverse-image')
expect(@driver.page_source.include?('This item contains at least one image. Click Search to look for potential duplicates on Google.')).to be(true)
expect((@driver.current_url.to_s =~ /google/).nil?).to be(true)
current_window = @driver.window_handles.last
@driver.find_element(:css, '.annotation__reverse-image-search').click
sleep 3
@driver.switch_to.window(@driver.window_handles.last)
expect((@driver.current_url.to_s =~ /google/).nil?).to be(false)
@driver.switch_to.window(current_window)
end
it "should refresh media", bin1 : true do
page = api_create_team_project_and_link_and_redirect_to_media_page 'http://ca.ios.ba/files/meedan/random.php'
wait_for_selector("add-annotation", :class)
title1 = @driver.title
expect((title1 =~ /Random/).nil?).to be(false)
el = wait_for_selector('.media-actions__icon')
el.click
sleep 1
@driver.find_element(:css, '.media-actions__refresh').click
sleep 10 #Needs to wait the refresh
wait_for_selector("add-annotation", :class)
title2 = @driver.title
expect((title2 =~ /Random/).nil?).to be(false)
expect(title1 != title2).to be(true)
end
it "should search by project", binx2: true do
api_create_claim_and_go_to_search_page
expect((@driver.current_url.to_s.match(/project/)).nil?).to be(true)
@driver.find_element(:xpath, "//div[contains(text(), 'Project')]").click
sleep 10
expect((@driver.current_url.to_s.match(/project/)).nil?).to be(false)
expect((@driver.title =~ /Project/).nil?).to be(false)
@driver.find_element(:xpath, "//div[contains(text(), 'Project')]").click
sleep 10
expect((@driver.title =~ /Project/).nil?).to be(true)
end
it "should search and change sort criteria", binx2: true do
api_create_claim_and_go_to_search_page
expect((@driver.current_url.to_s.match(/recent_activity/)).nil?).to be(true)
@driver.find_element(:xpath, "//span[contains(text(), 'Recent activity')]").click
sleep 10
expect((@driver.current_url.to_s.match(/recent_activity/)).nil?).to be(false)
expect((@driver.current_url.to_s.match(/recent_added/)).nil?).to be(true)
expect(@driver.page_source.include?('My search result')).to be(true)
@driver.find_element(:xpath, "//span[contains(text(), 'Created')]").click
sleep 10
expect((@driver.current_url.to_s.match(/recent_activity/)).nil?).to be(true)
expect((@driver.current_url.to_s.match(/recent_added/)).nil?).to be(false)
expect(@driver.page_source.include?('My search result')).to be(true)
end
it "should search and change sort order", binx2: true do
api_create_claim_and_go_to_search_page
expect((@driver.current_url.to_s.match(/ASC|DESC/)).nil?).to be(true)
@driver.find_element(:xpath, "//span[contains(text(), 'Newest')]").click
sleep 10
expect((@driver.current_url.to_s.match(/DESC/)).nil?).to be(false)
expect((@driver.current_url.to_s.match(/ASC/)).nil?).to be(true)
expect(@driver.page_source.include?('My search result')).to be(true)
@driver.find_element(:xpath, "//span[contains(text(), 'Oldest')]").click
sleep 20
expect((@driver.current_url.to_s.match(/DESC/)).nil?).to be(true)
expect((@driver.current_url.to_s.match(/ASC/)).nil?).to be(false)
expect(@driver.page_source.include?('My search result')).to be(true)
end
it "should search by project through URL", binx2: true do
api_create_claim_and_go_to_search_page
@driver.navigate.to @config['self_url'] + '/' + get_team + '/search/%7B"projects"%3A%5B0%5D%7D'
sleep 10
expect(@driver.page_source.include?('My search result')).to be(false)
selected = @driver.find_elements(:css, '.media-tags__suggestion--selected')
expect(selected.size == 3).to be(true)
end
it "should change search sort criteria through URL", binx2: true do
api_create_claim_and_go_to_search_page
@driver.navigate.to @config['self_url'] + '/' + get_team + '/search/%7B"sort"%3A"recent_activity"%7D'
sleep 10
expect(@driver.page_source.include?('My search result')).to be(true)
selected = @driver.find_elements(:css, '.media-tags__suggestion--selected').map(&:text).sort
expect(selected == ['Recent activity', 'Newest first', 'Media'].sort).to be(true)
end
it "should change search sort order through URL", binx2: true do
api_create_claim_and_go_to_search_page
@driver.navigate.to @config['self_url'] + '/' + get_team + '/search/%7B"sort_type"%3A"ASC"%7D'
sleep 10
expect(@driver.page_source.include?('My search result')).to be(true)
selected = @driver.find_elements(:css, '.media-tags__suggestion--selected').map(&:text).sort
expect(selected == ['Created', 'Oldest first', 'Media'].sort).to be(true)
end
it "should not reset password", bin5: true do
page = LoginPage.new(config: @config, driver: @driver)
page.reset_password('test@meedan.com')
sleep 2
expect(@driver.page_source.include?('Email not found')).to be(true)
expect(@driver.page_source.include?('Password reset sent')).to be(false)
end
it "should reset password", bin5: true do
user = api_create_and_confirm_user
page = LoginPage.new(config: @config, driver: @driver)
page.reset_password(user.email)
sleep 2
expect(@driver.page_source.include?('Email not found')).to be(false)
expect(@driver.page_source.include?('Password reset sent')).to be(true)
end
it "should set metatags", bin5: true do
api_create_team_project_and_link_and_redirect_to_media_page 'https://twitter.com/marcouza/status/875424957613920256'
sleep 2
request_api('make_team_public', { slug: get_team })
sleep 1
url = @driver.current_url.to_s
@driver.navigate.to url
site = @driver.find_element(:css, 'meta[name="twitter\\:site"]').attribute('content')
expect(site == @config['app_name']).to be(true)
twitter_title = @driver.find_element(:css, 'meta[name="twitter\\:title"]').attribute('content')
expect(twitter_title == 'This is a test').to be(true)
end
it "should embed", bin1 : true do
api_create_team_project_and_claim_and_redirect_to_media_page
sleep 2
request_api('make_team_public', { slug: get_team })
@driver.navigate.refresh
sleep 5
@driver.find_element(:css, '.media-actions__icon').click
sleep 1
if @config['app_name'] == 'bridge'
expect(@driver.page_source.include?('Embed')).to be(false)
@driver.navigate.to "#{@driver.current_url}/embed"
sleep 2
expect(@driver.page_source.include?('Not available')).to be(true)
elsif @config['app_name'] == 'check'
expect(@driver.page_source.include?('Embed')).to be(true)
url = @driver.current_url.to_s
@driver.find_element(:css, '.media-actions__embed').click
sleep 2
expect(@driver.current_url.to_s == "#{url}/embed").to be(true)
expect(@driver.page_source.include?('Not available')).to be(false)
@driver.find_element(:css, '#media-embed__actions-customize').click
sleep 1
@driver.find_elements(:css, '#media-embed__customization-menu input[type=checkbox]').map(&:click)
sleep 1
@driver.find_elements(:css, 'body').map(&:click)
sleep 1
@driver.find_element(:css, '#media-embed__actions-copy').click
sleep 1
@driver.navigate.to 'https://pastebinx.mozilla.org/'
@driver.find_element(:css, '#code').send_keys(' ')
@driver.action.send_keys(:control, 'v').perform
sleep 1
expect((@driver.find_element(:css, '#code').attribute('value') =~ /hide_open_tasks%3D1%26hide_tasks%3D1%26hide_notes%3D1/).nil?).to be(false)
sleep 5
end
end
#Add slack notifications to a team
it "should add slack notifications to a team", binx3:true, quick: true do
team = "testteam#{Time.now.to_i}"
api_create_team(team:team)
p = Page.new(config: @config, driver: @driver)
p.go(@config['self_url'] + '/' + team)
sleep 5
el = wait_for_selector("team__edit-button", :class)
el.click
el = wait_for_selector("team__settings-slack-notifications-enabled", :id)
el.click
el = wait_for_selector("team__settings-slack-webhook", :id)
el.click
el = wait_for_selector("team__settings-slack-webhook", :id)
el.send_keys "https://hooks.slack.com/services/T02528QUL/BBBBBBBBB/AAAAAAAAAAAAAAAAAAAAAAAA"
el = wait_for_selector("team__save-button", :class)
el.click
sleep 2
expect(@driver.find_element(:class, "message").nil?).to be(false)
end
it "should paginate project page", binx2: true do
page = api_create_team_project_claims_sources_and_redirect_to_project_page 21
page.load
el = wait_for_selector("//span[contains(text(), 'Sources')]", :xpath)
el.click
wait_for_selector("source-card", :class)
results = @driver.find_elements(:css, '.medias__item')
expect(results.size == 40).to be(true)
old = results.size
results.last.location_once_scrolled_into_view
size = wait_for_size_change(old, '.medias__item')
expect(size == 42).to be(true)
end
=begin
***Unstable
it "should show teams at /check/teams", bin4: true do
api_create_team
@driver.navigate.to @config['self_url'] + '/check/teams'
sleep 2
expect(@driver.find_elements(:css, '.teams').empty?).to be(false)
end
it "should add, edit, answer, update answer and delete geolocation task", binx3: true do
media_pg = api_create_team_project_and_claim_and_redirect_to_media_page
wait_for_selector('.create-task__add-button')
# Create a task
expect(@driver.page_source.include?('Where?')).to be(false)
expect(@driver.page_source.include?('Task "Where?" created by')).to be(false)
@driver.find_element(:css, '.create-task__add-button').click
sleep 1
@driver.find_element(:css, '.create-task__add-geolocation').click
sleep 1
fill_field('#task-label-input', 'Where?')
@driver.find_element(:css, '.create-task__dialog-submit-button').click
sleep 2
expect(@driver.page_source.include?('Where?')).to be(true)
expect(@driver.page_source.include?('Task "Where?" created by')).to be(true)
# Answer task
expect(@driver.page_source.include?('Task "Where?" answered by')).to be(false)
fill_field('textarea[name="response"]', 'Salvador')
fill_field('textarea[name="coordinates"]', '-12.9015866, -38.560239')
@driver.action.send_keys(:enter).perform
wait_for_selector('.annotation--task_response_geolocation')
expect(@driver.page_source.include?('Task "Where?" answered by')).to be(true)
# Edit task
expect(@driver.page_source.include?('Task "Where?" edited to "Where was it?" by')).to be(false)
@driver.find_element(:css, '.task-actions__icon').click
sleep 2
@driver.find_element(:css, '.task-actions__edit').click
update_field('textarea[name="label"]', 'Where was it?')
@driver.find_element(:css, '.task__save').click
sleep 2
expect(@driver.page_source.include?('Task "Where?" edited to "Where was it?" by')).to be(true)
# Edit task answer
expect(@driver.page_source.gsub(/<\/?[^>]*>/, '').include?('Task "Where was it?" answered by User With Email: "Vancouver"')).to be(false)
@driver.find_element(:css, '.task-actions__icon').click
@driver.find_element(:css, '.task-actions__edit-response').click
update_field('textarea[name="response"]', 'Vancouver')
update_field('textarea[name="coordinates"]', '49.2577142, -123.1941156')
@driver.action.send_keys(:enter).perform
sleep 2
expect(@driver.page_source.gsub(/<\/?[^>]*>/, '').include?('Task "Where was it?" answered by User With Email: "Vancouver"')).to be(true)
# Delete task
delete_task('Where was it')
end
=end
=begin
# This tests is unreliable
# Todo: locally prints 1 / 2 and remote 0 / 1
#
# ccx 2017-10-13
it "should add image to media comment", binx3: true do
p "----------"
api_create_team_project_and_claim_and_redirect_to_media_page
# First, verify that there isn't any comment with image
expect(@driver.page_source.include?('This is my comment with image')).to be(false)
old = @driver.find_elements(:class, "annotations__list-item").length
# Add a comment as a command
fill_field('#cmd-input', 'This is my comment with image')
el = wait_for_selector('.add-annotation__insert-photo')
el.click
wait_for_selector('input[type=file]')
input = wait_for_selector('input[type=file]')
input.send_keys(File.join(File.dirname(__FILE__), 'test.png'))
el = wait_for_selector('.add-annotation__buttons button')
el.click
p old
p wait_for_size_change(old, "annotations__list-item", :class)
# Verify that comment was added to annotations list
expect(@driver.page_source.include?('This is my comment with image')).to be(true)
imgsrc = @driver.find_element(:css, '.annotation__card-thumbnail').attribute('src')
expect(imgsrc.match(/test\.png$/).nil?).to be(false)
# Zoom image
expect(@driver.find_elements(:css, '.image-current').empty?).to be(true)
@driver.find_element(:css, '.annotation__card-thumbnail').click
expect(@driver.find_elements(:css, '.image-current').empty?).to be(false)
@driver.action.send_keys(:escape).perform
expect(@driver.find_elements(:css, '.image-current').empty?).to be(true)
# Reload the page and verify that comment is still there
@driver.navigate.refresh
wait_for_selector('add-annotation__buttons', :class)
expect(@driver.page_source.include?('This is my comment with image')).to be(true)
imgsrc = @driver.find_element(:css, '.annotation__card-thumbnail').attribute('src')
expect(imgsrc.match(/test\.png$/).nil?).to be(false)
p "----------"
end
=end
=begin
***Unstable***
it "should add, edit, answer, update answer and delete datetime task", binx3: true do
media_pg = api_create_team_project_and_claim_and_redirect_to_media_page
wait_for_selector('.create-task__add-button')
# Create a task
expect(@driver.page_source.include?('When?')).to be(false)
expect(@driver.page_source.include?('Task "When?" created by')).to be(false)
@driver.find_element(:css, '.create-task__add-button').click
sleep 1
@driver.find_element(:css, '.create-task__add-datetime').click
sleep 1
fill_field('#task-label-input', 'When?')
@driver.find_element(:css, '.create-task__dialog-submit-button').click
sleep 2
expect(@driver.page_source.include?('When?')).to be(true)
expect(@driver.page_source.include?('Task "When?" created by')).to be(true)
# Answer task
expect(@driver.page_source.include?('Task "When?" answered by')).to be(false)
fill_field('input[name="hour"]', '23')
fill_field('input[name="minute"]', '59')
@driver.find_element(:css, '#task__response-date').click
sleep 2
@driver.find_elements(:css, 'button').last.click
sleep 1
fill_field('textarea[name="note"]', 'Test')
@driver.action.send_keys(:enter).perform
sleep 2
expect(@driver.page_source.include?('Task "When?" answered by')).to be(true)
# Edit task
expect(@driver.page_source.include?('Task "When?" edited to "When was it?" by')).to be(false)
@driver.find_element(:css, '.task-actions__icon').click
sleep 2
@driver.find_element(:css, '.task-actions__edit').click
update_field('textarea[name="label"]', 'When was it?')
@driver.find_element(:css, '.task__save').click
sleep 2
expect(@driver.page_source.include?('Task "When?" edited to "When was it?" by')).to be(true)
# Edit task response
expect(@driver.page_source.gsub(/<\/?[^>]*>/, '').include?('12:34')).to be(false)
@driver.find_element(:css, '.task-actions__icon').click
@driver.find_element(:css, '.task-actions__edit-response').click
update_field('input[name="hour"]', '12')
update_field('input[name="minute"]', '34')
update_field('textarea[name="note"]', '')
@driver.action.send_keys(:enter).perform
sleep 2
expect(@driver.page_source.gsub(/<\/?[^>]*>/, '').include?('12:34')).to be(true)
# Delete task
delete_task('When was it')
end
=end
# Disabled because the functionality changed to use a
# background image in CSS instead of an <img> element.
#
# I think we could do something like this instead:
# https://stackoverflow.com/questions/11198882/how-do-you-test-if-a-div-has-a-certain-css-style-in-rspec-capybara
#
# it "should upload image when registering", bin5 : true do
# email, password, avatar = ["test-#{Time.now.to_i}@example.com", '12345678', File.join(File.dirname(__FILE__), 'test.png')]
# page = LoginPage.new(config: @config, driver: @driver).load
# .register_and_login_with_email(email: email, password: password, file: avatar)
# me_page = MePage.new(config: @config, driver: page.driver).load
# avatar = me_page.avatar
# expect(avatar.attribute('src').match(/test\.png/).nil?).to be(false)
# end
end
end
update bin4 + 5 + 1 + 6
require 'selenium-webdriver'
require 'appium_lib'
require 'yaml'
require_relative './spec_helper.rb'
require_relative './app_spec_helpers.rb'
require_relative './pages/login_page.rb'
require_relative './pages/me_page.rb'
require_relative './pages/page.rb'
require_relative './api_helpers.rb'
CONFIG = YAML.load_file('config.yml')
require_relative "#{CONFIG['app_name']}/custom_spec.rb"
shared_examples 'app' do |webdriver_url, browser_capabilities|
# Helpers
include AppSpecHelpers
include ApiHelpers
before :all do
@wait = Selenium::WebDriver::Wait.new(timeout: 10)
@email = "sysops+#{Time.now.to_i}#{Process.pid}@meedan.com"
@password = '12345678'
@source_name = 'Iron Maiden'
@source_url = 'https://twitter.com/ironmaiden?timestamp=' + Time.now.to_i.to_s
@media_url = 'https://twitter.com/meedan/status/773947372527288320/?t=' + Time.now.to_i.to_s
@config = CONFIG
$source_id = nil
$media_id = nil
@team1_slug = 'team1'+Time.now.to_i.to_s
@user_mail = 'sysops_' + Time.now.to_i.to_s + '@meedan.com'
begin
FileUtils.cp('./config.js', '../build/web/js/config.js')
rescue
puts "Could not copy local ./config.js to ../build/web/js/"
end
#EXTRACT USER:PWD FROM URL FOR CHROME
if ((browser_capabilities == :chrome) and (@config['self_url'].include? "@" and @config['self_url'].include? ":"))
@config['self_url'] = @config['self_url'][0..(@config['self_url'].index('//')+1)] + @config['self_url'][(@config['self_url'].index('@')+1)..-1]
end
@driver = new_driver(webdriver_url,browser_capabilities)
api_create_team_project_and_claim(true)
end
after :all do
FileUtils.cp('../config.js', '../build/web/js/config.js')
end
before :each do |example|
$caller_name = example.metadata[:description_args]
p $caller_name
@driver = new_driver(webdriver_url,browser_capabilities)
end
after :each do |example|
if example.exception
link = save_screenshot("Test failed: #{example.description}")
print " [Test \"#{example.description}\" failed! Check screenshot at #{link}] "
end
@driver.quit
end
# The tests themselves start here
context "web" do
include_examples "custom"
it "should filter by medias or sources", bin6: true do
api_create_team_project_and_link 'https://twitter.com/TheWho/status/890135323216367616'
@driver.navigate.to @config['self_url']
wait_for_selector("card-with-border", :class)
expect(@driver.page_source.include?("The Who's official Twitter page")).to be(false)
expect(@driver.page_source.include?('Happy birthday Mick')).to be(true)
el = wait_for_selector("//span[contains(text(), 'Sources')]", :xpath)
el.click
wait_for_selector("source-card", :class)
expect(@driver.page_source.include?("The Who's official Twitter page")).to be(true)
expect(@driver.page_source.include?('Happy birthday Mick')).to be(true)
old = @driver.find_elements(:class, "medias__item").length
el = wait_for_selector("//span[contains(text(), 'Media')]", :xpath)
el.click
wait_for_size_change(old, "medias__item", :class)
expect(@driver.page_source.include?("The Who's official Twitter page")).to be(true)
expect(@driver.page_source.include?('Happy birthday Mick')).to be(false)
end
it "should register and create a claim", bin5: true do
page = LoginPage.new(config: @config, driver: @driver).load
page = page.register_and_login_with_email(email: "sysops+#{Time.now.to_i}#{rand(1000)}@meedan.com", password: @password)
page
.create_team
.create_project
.create_media(input: 'Claim')
.logout
end
it "should redirect to access denied page", bin6: true do
user = api_register_and_login_with_email
api_logout
api_register_and_login_with_email
me_pg = MePage.new(config: @config, driver: @driver).load
sleep 3
expect(@driver.page_source.include?('Access Denied')).to be(false)
expect((@driver.current_url.to_s =~ /\/forbidden$/).nil?).to be(true)
unauthorized_pg = SourcePage.new(id: user.dbid, config: @config, driver: @driver).load
sleep 3
expect(@driver.page_source.include?('Access Denied')).to be(true)
expect((@driver.current_url.to_s =~ /\/forbidden$/).nil?).to be(false)
end
it "should edit the title of a media", bin1: true do
url = 'https://twitter.com/softlandscapes/status/834385935240462338'
media_pg = api_create_team_project_and_link_and_redirect_to_media_page url
media_pg.wait_for_element('.media-detail')
media_pg.toggle_card # Make sure the card is closed
expect(media_pg.primary_heading.text).to eq('https://t.co/i17DJNqiWX')
media_pg.toggle_card # Expand the card so the edit button is accessible
media_pg.wait_for_element('.media-actions')
sleep 3 # Clicks can misfire if pender iframe moves the button position at the wrong moment
media_pg.set_title('Edited media title')
expect(media_pg.primary_heading.text).to eq('Edited media title')
project_pg = media_pg.go_to_project
project_pg.wait_for_element('.media__heading')
expect(project_pg.elements('.media__heading').map(&:text).include?('Edited media title')).to be(true)
end
# This test fails ~ 30% of the time for some reason.
# Todo: consider fixing it
# CGB 2017-9-29
#
# it "should not add a duplicated tag from tags list", binx3: true, quick: true do
# page = api_create_team_project_and_claim_and_redirect_to_media_page
# new_tag = Time.now.to_i.to_s
# # Validate assumption that tag does not exist
# expect(page.has_tag?(new_tag)).to be(false)
# # Add tag from tags list
# page.add_tag(new_tag)
# expect(page.has_tag?(new_tag)).to be(true)
# # Try to add duplicate
# page.add_tag(new_tag)
# sleep 20
# # Verify that tag is not added and that error message is displayed
# expect(page.tags.count(new_tag)).to be(1)
# expect(page.contains_string?('Tag already exists')).to be(true)
# end
it "should display a default title for new media", bin1: true, quick:true do
# Tweets
media_pg = api_create_team_project_and_link_and_redirect_to_media_page('https://twitter.com/firstdraftnews/status/835587295394869249')
media_pg.toggle_card # Collapse card to show the title
expect(media_pg.primary_heading.text.include?('In a chat about getting')).to be(true)
project_pg = media_pg.go_to_project
sleep 1
@wait.until {
element = @driver.find_element(:partial_link_text, 'In a chat about getting')
expect(element.displayed?).to be(true)
}
# YouTube
media_pg = api_create_team_project_and_link_and_redirect_to_media_page('https://www.youtube.com/watch?v=ykLgjhBnik0')
media_pg.toggle_card # Collapse card to show the title
expect(media_pg.primary_heading.text).to eq("How To Check An Account's Authenticity")
project_pg = media_pg.go_to_project
sleep 5
expect(project_pg.elements('.media__heading').map(&:text).include?("How To Check An Account's Authenticity")).to be(true)
# Facebook
media_pg = api_create_team_project_and_link_and_redirect_to_media_page('https://www.facebook.com/FirstDraftNews/posts/1808121032783161')
media_pg.toggle_card # Collapse card to show the title
wait_for_selector('.media__heading')
expect(media_pg.primary_heading.text).to eq('First Draft on Facebook')
project_pg = media_pg.go_to_project
wait_for_selector('.media__heading')
expect(project_pg.elements('.media__heading').map(&:text).include?('First Draft on Facebook')).to be(true)
end
it "should login using Slack", bin5: true, quick:true do
login_with_slack
@driver.navigate.to @config['self_url'] + '/check/me'
displayed_name = get_element('h1.source__name').text.upcase
expected_name = @config['slack_name'].upcase
expect(displayed_name == expected_name).to be(true)
end
it "should localize interface based on browser language", bin6: true do
unless browser_capabilities['appiumVersion']
caps = Selenium::WebDriver::Remote::Capabilities.chrome(chromeOptions: { prefs: { 'intl.accept_languages' => 'fr' } })
driver = Selenium::WebDriver.for(:remote, url: webdriver_url, desired_capabilities: caps)
driver.navigate.to @config['self_url']
sleep 1
expect(driver.find_element(:css, '.login__heading span').text == 'Connexion').to be(true)
driver.quit
caps = Selenium::WebDriver::Remote::Capabilities.chrome(chromeOptions: { prefs: { 'intl.accept_languages' => 'pt' } })
driver = Selenium::WebDriver.for(:remote, url: webdriver_url, desired_capabilities: caps)
driver.navigate.to @config['self_url']
sleep 1
expect(driver.find_element(:css, '.login__heading span').text == 'Entrar').to be(true)
driver.quit
end
end
it "should access user confirmed page", bin5: true do
@driver.navigate.to @config['self_url'] + '/check/user/confirmed'
title = get_element('.main-title')
expect(title.text == 'Account Confirmed').to be(true)
end
it "should access user unconfirmed page", bin5: true do
@driver.navigate.to @config['self_url'] + '/check/user/unconfirmed'
title = get_element('.main-title')
expect(title.text == 'Error').to be(true)
end
it "should access user already confirmed page", bin5: true do
@driver.navigate.to @config['self_url'] + '/check/user/already-confirmed'
title = get_element('.main-title')
expect(title.text == 'Account Already Confirmed').to be(true)
end
it "should login using Facebook", bin5: true, quick:true do
login_pg = LoginPage.new(config: @config, driver: @driver).load
login_pg.login_with_facebook
me_pg = MePage.new(config: @config, driver: login_pg.driver).load
displayed_name = me_pg.title
expected_name = @config['facebook_name']
expect(displayed_name).to eq(expected_name)
end
it "should register and login using e-mail", bin5: true, quick:true do
login_pg = LoginPage.new(config: @config, driver: @driver).load
email, password = ['sysops+' + Time.now.to_i.to_s + '@meedan.com', '22345678']
login_pg.register_and_login_with_email(email: email, password: password)
me_pg = MePage.new(config: @config, driver: login_pg.driver).load # reuse tab
displayed_name = me_pg.title
expect(displayed_name == 'User With Email').to be(true)
end
it "should create a project for a team", binx3: true do
team = api_create_team
@driver.navigate.to @config['self_url']
project_name = "Project #{Time.now}"
project_pg = TeamPage.new(config: @config, driver: @driver).create_project(name: project_name)
expect(project_pg.driver.current_url.to_s.match(/\/project\/[0-9]+$/).nil?).to be(false)
team_pg = project_pg.click_team_link
team_pg.click_projects_tab
sleep 2
element = @driver.find_element(:partial_link_text, project_name)
expect(element.displayed?).to be(true)
end
it "should create project media", bin1: true do
api_create_team_and_project
page = ProjectPage.new(config: @config, driver: @driver).load
expect(page.contains_string?('This is a test')).to be(false)
page.create_media(input: 'https://twitter.com/marcouza/status/771009514732650497?t=' + Time.now.to_i.to_s)
page.driver.navigate.to @config['self_url']
page.wait_for_element('.project .media-detail')
expect(page.contains_string?('This is a test')).to be(true)
end
# Disable because it is flaky.
# Todo:
# it "should search for image", binx2: true do
# api_create_team_and_project
# sleep 2
# page = ProjectPage.new(config: @config, driver: @driver).load
# .create_image_media(File.join(File.dirname(__FILE__), 'test.png'))
# sleep 10 # wait for Sidekiq
# @driver.navigate.to @config['self_url'] + '/' + get_team + '/search'
# sleep 5
# expect(@driver.find_element(:link_text, 'test.png').nil?).to be(false)
# end
it "should redirect to 404 page", bin4: true do
@driver.navigate.to @config['self_url'] + '/something-that/does-not-exist'
title = get_element('.main-title')
expect(title.text == 'Not Found').to be(true)
end
it "should redirect to login screen if not logged in", bin5: true do
@driver.navigate.to @config['self_url'] + '/check/teams'
title = get_element('.login__heading')
expect(title.text == 'Sign in').to be(true)
end
it "should login using Twitter", bin5: true, quick:true do
login_with_twitter
@driver.navigate.to @config['self_url'] + '/check/me'
displayed_name = get_element('h1.source__name').text.upcase
expected_name = @config['twitter_name'].upcase
expect(displayed_name == expected_name).to be(true)
end
it "should go to source page through user/:id", bin6: true do
user = api_register_and_login_with_email
@driver.navigate.to @config['self_url'] + '/check/user/' + user.dbid.to_s
sleep 1
title = get_element('.source__name')
expect(title.text == 'User With Email').to be(true)
end
it "should go back and forward in the history", bin4: true do
@driver.navigate.to @config['self_url']
expect((@driver.current_url.to_s =~ /\/$/).nil?).to be(false)
@driver.navigate.to @config['self_url'] + '/check/tos'
expect((@driver.current_url.to_s =~ /\/tos$/).nil?).to be(false)
@driver.navigate.back
expect((@driver.current_url.to_s =~ /\/$/).nil?).to be(false)
@driver.navigate.forward
expect((@driver.current_url.to_s =~ /\/tos$/).nil?).to be(false)
end
it "should create source and redirect to newly created source", bin6: true do
api_create_team_and_project
@driver.navigate.to @config['self_url']
sleep 15
@driver.find_element(:css, '#create-media__source').click
sleep 1
fill_field('#create-media-source-name-input', @source_name)
fill_field('#create-media-source-url-input', @source_url)
sleep 1
press_button('#create-media-submit')
sleep 15
expect(@driver.current_url.to_s.match(/\/source\/[0-9]+$/).nil?).to be(false)
title = get_element('.source__name').text
expect(title == @source_name).to be(true)
end
it "should not create duplicated source", bin6: true do
api_create_team_project_and_source_and_redirect_to_source('Megadeth', 'https://twitter.com/megadeth')
id1 = @driver.current_url.to_s.gsub(/^.*\/source\//, '').to_i
expect(id1 > 0).to be(true)
@driver.navigate.to @driver.current_url.to_s.gsub(/\/source\/[0-9]+$/, '')
sleep 5
@driver.find_element(:css, '#create-media__source').click
sleep 1
fill_field('#create-media-source-name-input', 'Megadeth')
fill_field('#create-media-source-url-input', 'https://twitter.com/megadeth')
sleep 1
press_button('#create-media-submit')
sleep 10
id2 = @driver.current_url.to_s.gsub(/^.*\/source\//, '').to_i
expect(id2 > 0).to be(true)
expect(id1 == id2).to be(true)
end
# This test is flaky
# Todo: consider fixing it or removing it
#
# CGB 2017-9-29
#
# it "should tag source as a command", bin6: true do
# api_create_team_project_and_source_and_redirect_to_source('ACDC', 'https://twitter.com/acdc')
# sleep 3
# @driver.find_element(:css, '.source__tab-button-notes').click
# expect(@driver.page_source.include?('Tagged #command')).to be(false)
# fill_field('#cmd-input', '/tag command')
# @driver.action.send_keys(:enter).perform
# sleep 5
# expect(@driver.page_source.include?('Tagged #command')).to be(true)
# @driver.navigate.refresh
# sleep 5
# @driver.find_element(:css, '.source__tab-button-notes').click
# expect(@driver.page_source.include?('Tagged #command')).to be(true)
# end
# This test is flaky
# Todo: consider fixing it or removing it
#
# CGB 2017-10-2
#
# it "should comment source as a command", bin6: true do
# api_create_team_project_and_source_and_redirect_to_source('The Beatles', 'https://twitter.com/thebeatles')
# sleep 3
# @driver.find_element(:css, '.source__tab-button-notes').click
# expect(@driver.page_source.include?('This is my comment')).to be(false)
# fill_field('#cmd-input', '/comment This is my comment')
# @driver.action.send_keys(:enter).perform
# sleep 5
# expect(@driver.page_source.include?('This is my comment')).to be(true)
# @driver.navigate.refresh
# sleep 5
# @driver.find_element(:css, '.source__tab-button-notes').click
# expect(@driver.page_source.include?('This is my comment')).to be(true)
# end
it "should not create report as source", bin6: true do
api_create_team_and_project
@driver.navigate.to @config['self_url']
sleep 5
@driver.find_element(:css, '#create-media__source').click
sleep 1
fill_field('#create-media-source-url-input', 'https://twitter.com/IronMaiden/status/832726327459446784')
sleep 1
press_button('#create-media-submit')
sleep 15
expect(@driver.current_url.to_s.match(/\/source\/[0-9]+$/).nil?).to be(true)
message = get_element('.message').text
expect(message.match(/Sorry, this is not a profile/).nil?).to be(false)
end
it "should tag source multiple times with commas with command", bin6: true do
api_create_team_project_and_source_and_redirect_to_source('Motorhead', 'https://twitter.com/mymotorhead')
sleep 5
@driver.find_element(:css, '.source__tab-button-notes').click
fill_field('#cmd-input', '/tag foo, bar')
@driver.action.send_keys(:enter).perform
sleep 5
expect(@driver.page_source.include?('Tagged #foo')).to be(true)
expect(@driver.page_source.include?('Tagged #bar')).to be(true)
end
it "should edit basic source data (name, description/bio, avatar)", bin6: true do
api_create_team_project_and_source_and_redirect_to_source('ACDC', 'https://twitter.com/acdc')
sleep 5
element = @driver.find_element(:class, "source__edit-button")
element.click
input = @driver.find_element(:id, 'source__name-container')
input.send_keys(" - EDIT ACDC")
input = @driver.find_element(:id, 'source__bio-container')
input.send_keys(" - EDIT DESC")
@driver.find_element(:class, "source__edit-avatar-button").click
sleep 1
input = @driver.find_element(:css, 'input[type=file]')
input.send_keys(File.join(File.dirname(__FILE__), 'test.png'))
sleep 1
@driver.find_element(:class, 'source__edit-save-button').click
sleep 5
displayed_name = get_element('h1.source__name').text
expect(displayed_name.include? "EDIT").to be(true)
end
# This tests is unreliable
# Todo: Methods that deal react with hidden menus
#
# ccx 2017-10-13
=begin
it "should add and remove accounts to sources", bin6: true do
api_create_team_project_and_source_and_redirect_to_source('GOT', 'https://twitter.com/GameOfThrones')
#sleep 5
element = wait_for_selector("source__edit-button", :class)
element.click
#sleep 3
element = wait_for_selector("source__edit-addinfo-button",:class)
element.click
#sleep 1
element = wait_for_selector("source__add-link",:class)
element.click
sleep 1
fill_field("source__link-input0", "www.acdc.com", :id)
sleep 2
element = wait_for_selector('source__edit-save-button',:class)
element.click
#@driver.find_element(:class, 'source__edit-save-button').click
sleep 5
expect(@driver.page_source.include?('AC/DC Official Website')).to be(true)
#networks tab
element = wait_for_selector("source__tab-button-account",:class)
element.click
sleep 5
expect(@driver.page_source.include?('The Official AC/DC website and store')).to be(true)
#delete
element = @driver.find_element(:class, "source__edit-button")
element.click
sleep 3
list = @driver.find_elements(:css => "svg[class='create-task__remove-option-button']")
list[1].click
sleep 1
@driver.find_element(:class, 'source__edit-save-button').click
sleep 5
expect(@driver.page_source.include?('AC/DC Official Website')).to be(false)
end
=end
it "should edit source metadata (contact, phone, location, organization, other)", bin6: true do
api_create_team_project_and_source_and_redirect_to_source('GOT', 'https://twitter.com/GameOfThrones')
sleep 5
element = @driver.find_element(:class, "source__edit-button")
element.click
sleep 1
@driver.find_element(:class, "source__edit-addinfo-button").click
sleep 1
@driver.find_element(:class, "source__add-phone").click
str= @driver.page_source
str = str[str.index('undefined-undefined-Phone-')..str.length]
str = str[0..(str.index('"')-1)]
element = @driver.find_element(:id, str)
fill_field(str, "989898989", :id)
sleep 1
@driver.find_element(:class, "source__edit-addinfo-button").click
sleep 1
@driver.find_element(:class, "source__add-organization").click
str= @driver.page_source
str = str[str.index('undefined-undefined-Organization-')..str.length]
str = str[0..(str.index('"')-1)]
element = @driver.find_element(:id, str)
fill_field(str, "ORGANIZATION", :id)
@driver.find_element(:class, "source__edit-addinfo-button").click
sleep 1
@driver.find_element(:class, "source__add-location").click
str= @driver.page_source
str = str[str.index('undefined-undefined-Location-')..str.length]
str = str[0..(str.index('"')-1)]
fill_field(str, "Location 123", :id)
sleep 1
#source__add-other
@driver.find_element(:class, "source__edit-addinfo-button").click
sleep 1
@driver.find_element(:class, "source__add-other").click
sleep 1
fill_field("source__other-label-input", "label", :id)
fill_field("source__other-value-input", "value", :id)
@driver.action.send_keys("\t").perform
@driver.action.send_keys("\t").perform
@driver.action.send_keys("\n").perform
sleep 2
@driver.find_element(:class, 'source__edit-save-button').click
sleep 5
expect(@driver.page_source.include?('label: value')).to be(true)
expect(@driver.page_source.include?('Location 123')).to be(true)
expect(@driver.page_source.include?('ORGANIZATION')).to be(true)
expect(@driver.page_source.include?('989898989')).to be(true)
end
it "should add and remove source tags", bin6: true do
api_create_team_project_and_source_and_redirect_to_source('GOT', 'https://twitter.com/GameOfThrones')
sleep 5
element = @driver.find_element(:class, "source__edit-button")
element.click
sleep 1
@driver.find_element(:class, "source__edit-addinfo-button").click
sleep 1
@driver.find_element(:class, "source__add-tags").click
sleep 1
fill_field("sourceTagInput", "TAG1", :id)
@driver.action.send_keys("\n").perform
fill_field("sourceTagInput", "TAG2", :id)
@driver.action.send_keys("\n").perform
sleep 3
@driver.navigate.refresh
sleep 3
expect(@driver.page_source.include?('TAG1')).to be(true)
expect(@driver.page_source.include?('TAG2')).to be(true)
#delete
sleep 1 until element = @driver.find_element(:class, "source__edit-button")
element.click
list = @driver.find_elements(:css => "div.source-tags__tag svg")
list[0].click
sleep 1
@driver.navigate.refresh
sleep 3
expect(@driver.page_source.include?('TAG1')).to be(true)
expect(@driver.page_source.include?('TAG2')).to be(false)
end
it "should add and remove source languages",bin6: true do
api_create_team_project_and_source_and_redirect_to_source('GOT', 'https://twitter.com/GameOfThrones')
sleep 5
element = @driver.find_element(:class, "source__edit-button")
element.click
sleep 1
@driver.find_element(:class, "source__edit-addinfo-button").click
sleep 1
@driver.find_element(:class, "source__add-languages").click
fill_field("sourceLanguageInput", "Acoli", :id)
@driver.action.send_keys(:down).perform
@driver.action.send_keys(:return).perform
sleep 2
@driver.navigate.refresh
sleep 3
expect(@driver.page_source.include?('Acoli')).to be(true)
sleep 1 until element = @driver.find_element(:class, "source__edit-button")
element.click
sleep 1
list = @driver.find_elements(:css => "div.source-tags__tag svg")
list[0].click
sleep 1
@driver.navigate.refresh
sleep 3
expect(@driver.page_source.include?('Acoli')).to be(false)
end
it "should not add a duplicated tag from command line", binx3: true do
media_pg = api_create_team_project_and_claim_and_redirect_to_media_page
new_tag = Time.now.to_i.to_s
# Validate assumption that tag does not exist
expect(media_pg.has_tag?(new_tag)).to be(false)
# Try to add from command line
media_pg.add_annotation("/tag #{new_tag}")
Selenium::WebDriver::Wait.new(timeout: 10).until { media_pg.has_tag?(new_tag) } # TODO: wait inside MediaPage
expect(media_pg.has_tag?(new_tag)).to be(true)
# Try to add duplicate from command line
media_pg.add_annotation("/tag #{new_tag}")
# Verify that tag is not added and that error message is displayed
expect(media_pg.tags.count(new_tag)).to be(1)
expect(@driver.page_source.include?('Tag already exists')).to be(true)
end
it "should not create duplicated media", bin4: true do
api_create_team_project_and_link_and_redirect_to_media_page @media_url
id1 = @driver.current_url.to_s.gsub(/^.*\/media\//, '').to_i
expect(id1 > 0).to be(true)
@driver.navigate.to @driver.current_url.to_s.gsub(/\/media\/[0-9]+$/, '')
sleep 3
wait_for_selector("medias__item",:class)
media_url = @driver.current_url.to_s.gsub(/\/media\/[0-9]+$/, '')
fill_field('#create-media-input', @media_url)
sleep 2
press_button('#create-media-submit')
sleep 3
wait_for_selector("add-annotation__insert-photo",:class)
expect(@driver.current_url.to_s.gsub(/\/media\/[0-9]+$/, '') == media_url).to be(true)
end
it "should tag media from tags list", bin5: true do
page = api_create_team_project_and_claim_and_redirect_to_media_page
sleep 5
wait_for_selector("add-annotation__buttons", :class)
new_tag = Time.now.to_i.to_s
expect(page.contains_string?("Tagged \##{new_tag}")).to be(false)
page.add_tag(new_tag)
expect(page.has_tag?(new_tag)).to be(true)
sleep 2
expect(page.contains_string?("Tagged \##{new_tag}")).to be(true)
page.driver.navigate.refresh
page.wait_for_element('.media')
expect(page.has_tag?(new_tag)).to be(true)
expect(page.contains_string?("Tagged \##{new_tag}")).to be(true)
end
it "should tag media as a command", bin4: true do
page = api_create_team_project_and_claim_and_redirect_to_media_page
expect(page.has_tag?('command')).to be(false)
expect(page.contains_string?('Tagged #command')).to be(false)
# Add a tag as a command
page.add_annotation('/tag command')
# Verify that tag was added to tags list and annotations list
expect(page.has_tag?('command')).to be(true)
expect(page.contains_string?('Tagged #command')).to be(true)
# Reload the page and verify that tags are still there
page.driver.navigate.refresh
page.wait_for_element('.media')
expect(page.has_tag?('command')).to be(true)
expect(page.contains_string?('Tagged #command')).to be(true)
end
it "should comment media as a command", bin4: true, quick:true do
api_create_team_project_and_claim_and_redirect_to_media_page
# First, verify that there isn't any comment
expect(@driver.page_source.include?('This is my comment')).to be(false)
# Add a comment as a command
fill_field('#cmd-input', '/comment This is my comment')
@driver.action.send_keys(:enter).perform
sleep 5
# Verify that comment was added to annotations list
expect(@driver.page_source.include?('This is my comment')).to be(true)
# Reload the page and verify that comment is still there
@driver.navigate.refresh
sleep 5
expect(@driver.page_source.include?('This is my comment')).to be(true)
end
it "should flag media as a command", bin4: true do
media_pg = api_create_team_project_and_claim_and_redirect_to_media_page
expect(media_pg.contains_string?('Flag')).to be(false)
media_pg.fill_input('#cmd-input', '/flag Spam')
media_pg.element('#cmd-input').submit
sleep 5
expect(media_pg.contains_string?('Flag')).to be(true)
media_pg.driver.navigate.refresh
media_pg.wait_for_element('.media')
expect(media_pg.contains_string?('Flag')).to be(true)
end
# This test needs a little work.
# See the `edit` method in in project_page.rb
#
# @chris 2017-10-19
#
# it "should edit project", bin4: true do
# api_create_team_and_project
# project_pg = ProjectPage.new(config: @config, driver: @driver).load
# new_title = "Changed title #{Time.now.to_i}"
# new_description = "Set description #{Time.now.to_i}"
# expect(project_pg.contains_string?(new_title)).to be(false)
# expect(project_pg.contains_string?(new_description)).to be(false)
# project_pg.edit(title: new_title, description: new_description)
# expect(@driver.page_source.include?(new_title)).to be(true)
# expect(@driver.page_source.include?(new_description)).to be(true)
# end
it "should redirect to 404 page if id does not exist", bin4: true do
api_create_team_and_project
@driver.navigate.to @config['self_url']
sleep 3
url = @driver.current_url.to_s
@driver.navigate.to url.gsub(/project\/([0-9]+).*/, 'project/999')
title = get_element('.main-title')
expect(title.text == 'Not Found').to be(true)
expect((@driver.current_url.to_s =~ /\/404$/).nil?).to be(false)
end
it "should logout", bin5: true do
api_create_team_and_project
@driver.navigate.to @config['self_url']
page = ProjectPage.new(config: @config, driver: @driver).logout
expect(page.contains_string?('Sign in')).to be(true)
end
# it "should delete annotation from annotations list (for media, source and project)" do
# skip("Needs to be implemented")
# end
# it "should delete tag from tags list (for media and source)" do
# skip("Needs to be implemented")
# end
# it "should edit team" do
# skip("Needs to be implemented")
# end
# it "should show 'manage team' link only to team owners" do
# skip("Needs to be implemented")
# end
# it "should show 'edit project' link only to users with 'update project' permission" do
# skip("Needs to be implemented")
# end
# it "should edit team logo" do
# skip("Needs to be implemented")
# end
it "should navigate between teams", bin5: true, quick: true do
# setup
@user_mail = "test" +Time.now.to_i.to_s+rand(9999).to_s + @user_mail
@team1_slug = 'team1'+Time.now.to_i.to_s+rand(9999).to_s
user = api_register_and_login_with_email(email: @user_mail, password: @password)
team = request_api 'team', { name: 'Team 1', email: user.email, slug: @team1_slug }
request_api 'project', { title: 'Team 1 Project', team_id: team.dbid }
team = request_api 'team', { name: 'Team 2', email: user.email, slug: "team-2-#{rand(9999)}#{Time.now.to_i}" }
request_api 'project', { title: 'Team 2 Project', team_id: team.dbid }
page = MePage.new(config: @config, driver: @driver).load.select_team(name: 'Team 1')
page.click_projects_tab
expect(page.team_name).to eq('Team 1')
expect(page.project_titles.include?('Team 1 Project')).to be(true)
expect(page.project_titles.include?('Team 2 Project')).to be(false)
page = MePage.new(config: @config, driver: @driver).load.select_team(name: 'Team 2')
page.click_projects_tab
expect(page.team_name).to eq('Team 2')
expect(page.project_titles.include?('Team 2 Project')).to be(true)
expect(page.project_titles.include?('Team 1 Project')).to be(false)
#As a different user, request to join one team and be accepted.
user = api_register_and_login_with_email(email: "new"+@user_mail, password: @password)
page = MePage.new(config: @config, driver: @driver).load
page.ask_join_team(subdomain: @team1_slug)
@wait.until {
expect(@driver.find_element(:class, "message").nil?).to be(false)
}
api_logout
@driver.quit
@driver = new_driver(webdriver_url,browser_capabilities)
page = Page.new(config: @config, driver: @driver)
page.go(@config['api_path'] + '/test/session?email='+@user_mail)
#As the group creator, go to the members page and approve the joining request.
page = MePage.new(config: @config, driver: @driver).load
page.go(@config['self_url'] + '/check/me')
page.approve_join_team(subdomain: @team1_slug)
@wait.until {
elems = @driver.find_elements(:css => ".team-members__list > div")
expect(elems.size).to be > 1
}
# "should redirect to team page if user asking to join a team is already a member"
page = Page.new(config: @config, driver: @driver)
page.go(@config['api_path'] + '/test/session?email=new'+@user_mail)
#page = MePage.new(config: @config, driver: @driver).load
@driver.navigate.to @config['self_url'] + "/"+@team1_slug+"/join"
sleep 3
@wait.until {
expect(@driver.current_url.eql? @config['self_url']+"/"+@team1_slug ).to be(true)
}
# "should reject member to join team"
user = api_register_and_login_with_email
page = MePage.new(config: @config, driver: @driver).load
page.ask_join_team(subdomain: @team1_slug)
@wait.until {
expect(@driver.find_element(:class, "message").nil?).to be(false)
}
api_logout
@driver.quit
@driver = new_driver(webdriver_url,browser_capabilities)
page = Page.new(config: @config, driver: @driver)
page.go(@config['api_path'] + '/test/session?email='+@user_mail)
#As the group creator, go to the members page and approve the joining request.
page = MePage.new(config: @config, driver: @driver).load
.disapprove_join_team(subdomain: @team1_slug)
@wait.until {
expect(@driver.page_source.include?('Requests to join')).to be(false)
}
# "should delete member from team"
page = Page.new(config: @config, driver: @driver)
page.go(@config['api_path'] + '/test/session?email='+@user_mail)
#As the group creator, go to the members page and approve the joining request.
sleep 1
page = MePage.new(config: @config, driver: @driver).load
@driver.navigate.to @config['self_url'] + '/'+@team1_slug
sleep 2
wait_for_selector('team-members__member',:class)
el = wait_for_selector('team-members__edit-button',:class)
el.click
sleep 5
l = wait_for_selector_list('//button',:xpath)
old = l.length
expect(l.length > 4).to be(true)
l[l.length-2].click
sleep 1
expect(wait_for_selector_list('//button',:xpath).length < old).to be(true)
end
#As a different user, request to join one team.
# it "should join team", bin4:true, quick: true do
# api_register_and_login_with_email
# page = MePage.new(config: @config, driver: @driver).load
# page.ask_join_team(subdomain: @team1_slug)
# @wait.until {
# expect(@driver.find_element(:class, "message").nil?).to be(false)
# }
# api_logout
# @driver = new_driver(webdriver_url,browser_capabilities)
# page = Page.new(config: @config, driver: @driver)
# page.go(@config['api_path'] + '/test/session?email='+@user_mail)
# #As the group creator, go to the members page and approve the joining request.
# page = MePage.new(config: @config, driver: @driver).load
# .approve_join_team(subdomain: @team1_slug)
# @wait.until {
# elems = @driver.find_elements(:css => ".team-members__list > div")
# expect(elems.size).to be > 1
# }
# end
it "should update notes count after delete annotation", binx3: true do
media_pg = api_create_team_project_and_claim_and_redirect_to_media_page
media_pg.fill_input('#cmd-input', 'Test')
media_pg.element('#cmd-input').submit
sleep 3
notes_count = get_element('.media-detail__check-notes-count')
expect(notes_count.text == '2 notes').to be(true)
expect(@driver.page_source.include?('Comment deleted')).to be(false)
media_pg.delete_annotation
wait_for_selector('.annotation__deleted')
expect(notes_count.text == '2 notes').to be(true)
expect(@driver.page_source.include?('Comment deleted')).to be(true)
end
it "should auto refresh project when media is created", bin1: true do
api_create_team_and_project
@driver.navigate.to @config['self_url']
url = @driver.current_url
sleep 3
expect(@driver.page_source.include?('Auto-Refresh')).to be(false)
current_window = @driver.window_handles.last
@driver.execute_script("window.open('#{url}')")
@driver.switch_to.window(@driver.window_handles.last)
fill_field('#create-media-input', 'Auto-Refresh')
press_button('#create-media-submit')
sleep 5
@driver.execute_script('window.close()')
@driver.switch_to.window(current_window)
sleep 5
expect(@driver.page_source.include?('Auto-Refresh')).to be(true)
end
it "should auto refresh media when annotation is created", binx3: true do
media_pg = api_create_team_project_and_claim_and_redirect_to_media_page
url = media_pg.driver.current_url
sleep 3
expect(@driver.page_source.include?('Auto-Refresh')).to be(false)
current_window = @driver.window_handles.last
@driver.execute_script("window.open('#{url}')")
@driver.switch_to.window(@driver.window_handles.last)
media_pg.fill_input('#cmd-input', 'Auto-Refresh')
media_pg.element('#cmd-input').submit
sleep 5
@driver.execute_script('window.close()')
@driver.switch_to.window(current_window)
sleep 5
expect(@driver.page_source.include?('Auto-Refresh')).to be(true)
end
# it "should cancel request through switch teams" do
# skip("Needs to be implemented")
# end
# it "should give 404 when trying to acess a media that is not related to the project on the URL" do
# skip("Needs to be implemented")
# end
# it "should linkify URLs on comments" do
# skip("Needs to be implemented")
# end
# it "should add and remove suggested tags" do
# skip("Needs to be implemented")
# end
# it "should find all medias with an empty search" do
# skip("Needs to be implemented")
# end
# it "should find medias when searching by keyword" do
# skip("Needs to be implemented")
# end
# it "should find medias when searching by status" do
# skip("Needs to be implemented")
# end
# it "should find medias when searching by tag" do
# skip("Needs to be implemented")
# end
# it "should move media to another project" do
# skip("Needs to be implemented")
# end
it "should add, edit, answer, update answer and delete short answer task", binx3: true do
media_pg = api_create_team_project_and_claim_and_redirect_to_media_page
wait_for_selector('.create-task__add-button')
# Create a task
expect(@driver.page_source.include?('Foo or bar?')).to be(false)
expect(@driver.page_source.include?('Task "Foo or bar?" created by')).to be(false)
el = wait_for_selector('.create-task__add-button', :css)
el.click
el = wait_for_selector('.create-task__add-short-answer', :css)
el.click
wait_for_selector('#task-label-input', :css)
fill_field('#task-label-input', 'Foo or bar?')
el = wait_for_selector('.create-task__dialog-submit-button', :css)
el.click
media_pg.wait_all_elements(2, "annotations__list-item", :class) #Wait for refresh page
expect(@driver.page_source.include?('Foo or bar?')).to be(true)
expect(@driver.page_source.include?('Task "Foo or bar?" created by')).to be(true)
# Answer task
expect(@driver.page_source.include?('Task "Foo or bar?" answered by')).to be(false)
fill_field('textarea[name="response"]', 'Foo')
@driver.find_element(:css, '.task__save').click
media_pg.wait_all_elements(3, "annotations__list-item", :class)
expect(@driver.page_source.include?('Task "Foo or bar?" answered by')).to be(true)
# Edit task
expect(@driver.page_source.include?('Task "Foo or bar?" edited to "Foo or bar???" by')).to be(false)
el = wait_for_selector('.task-actions__icon', :css)
el.click
media_pg.wait_all_elements(6, "annotations__list-item", :class)
editbutton = wait_for_selector('.task-actions__edit', :css)
editbutton.location_once_scrolled_into_view
editbutton.click
fill_field('textarea[name="label"]', '??')
editbutton = wait_for_selector('.task__save', :css)
editbutton.click
media_pg.wait_all_elements(7, "annotations__list-item", :class)
expect(@driver.page_source.include?('Task "Foo or bar?" edited to "Foo or bar???" by')).to be(true)
# Edit task answer
expect(@driver.page_source.gsub(/<\/?[^>]*>/, '').include?('Task "Foo or bar???" answered by User With Email: "Foo edited"')).to be(false)
el = wait_for_selector('.task-actions__icon', :css)
el.click
el = wait_for_selector('.task-actions__edit-response', :css)
el.click
# Ensure menu closes and textarea is focused...
el = wait_for_selector('textarea[name="response"]', :css)
el.click
fill_field('textarea[name="response"]', ' edited')
@driver.find_element(:css, '.task__save').click
media_pg.wait_all_elements(8, "annotations__list-item", :class) #Wait for refresh page
expect(@driver.page_source.gsub(/<\/?[^>]*>/, '').include?('Task "Foo or bar???" answered by User With Email: "Foo edited"')).to be(true)
# Delete task
delete_task('Foo')
end
# it "should add, edit, answer, update answer and delete single_choice task" do
# skip("Needs to be implemented")
# end
# it "should add, edit, answer, update answer and delete multiple_choice task" do
# skip("Needs to be implemented")
# end
it "should search for reverse images", binx2: true do
page = api_create_team_project_and_link_and_redirect_to_media_page 'https://www.instagram.com/p/BRYob0dA1SC/'
wait_for_selector('.annotation__reverse-image')
expect(@driver.page_source.include?('This item contains at least one image. Click Search to look for potential duplicates on Google.')).to be(true)
expect((@driver.current_url.to_s =~ /google/).nil?).to be(true)
current_window = @driver.window_handles.last
@driver.find_element(:css, '.annotation__reverse-image-search').click
sleep 3
@driver.switch_to.window(@driver.window_handles.last)
expect((@driver.current_url.to_s =~ /google/).nil?).to be(false)
@driver.switch_to.window(current_window)
end
it "should refresh media", bin1: true do
page = api_create_team_project_and_link_and_redirect_to_media_page 'http://ca.ios.ba/files/meedan/random.php'
wait_for_selector("add-annotation", :class)
title1 = @driver.title
expect((title1 =~ /Random/).nil?).to be(false)
el = wait_for_selector('.media-actions__icon')
el.click
sleep 1
@driver.find_element(:css, '.media-actions__refresh').click
sleep 10 #Needs to wait the refresh
wait_for_selector("add-annotation", :class)
title2 = @driver.title
expect((title2 =~ /Random/).nil?).to be(false)
expect(title1 != title2).to be(true)
end
it "should search by project", binx2: true do
api_create_claim_and_go_to_search_page
expect((@driver.current_url.to_s.match(/project/)).nil?).to be(true)
@driver.find_element(:xpath, "//div[contains(text(), 'Project')]").click
sleep 10
expect((@driver.current_url.to_s.match(/project/)).nil?).to be(false)
expect((@driver.title =~ /Project/).nil?).to be(false)
@driver.find_element(:xpath, "//div[contains(text(), 'Project')]").click
sleep 10
expect((@driver.title =~ /Project/).nil?).to be(true)
end
it "should search and change sort criteria", binx2: true do
api_create_claim_and_go_to_search_page
expect((@driver.current_url.to_s.match(/recent_activity/)).nil?).to be(true)
@driver.find_element(:xpath, "//span[contains(text(), 'Recent activity')]").click
sleep 10
expect((@driver.current_url.to_s.match(/recent_activity/)).nil?).to be(false)
expect((@driver.current_url.to_s.match(/recent_added/)).nil?).to be(true)
expect(@driver.page_source.include?('My search result')).to be(true)
@driver.find_element(:xpath, "//span[contains(text(), 'Created')]").click
sleep 10
expect((@driver.current_url.to_s.match(/recent_activity/)).nil?).to be(true)
expect((@driver.current_url.to_s.match(/recent_added/)).nil?).to be(false)
expect(@driver.page_source.include?('My search result')).to be(true)
end
it "should search and change sort order", binx2: true do
api_create_claim_and_go_to_search_page
expect((@driver.current_url.to_s.match(/ASC|DESC/)).nil?).to be(true)
@driver.find_element(:xpath, "//span[contains(text(), 'Newest')]").click
sleep 10
expect((@driver.current_url.to_s.match(/DESC/)).nil?).to be(false)
expect((@driver.current_url.to_s.match(/ASC/)).nil?).to be(true)
expect(@driver.page_source.include?('My search result')).to be(true)
@driver.find_element(:xpath, "//span[contains(text(), 'Oldest')]").click
sleep 20
expect((@driver.current_url.to_s.match(/DESC/)).nil?).to be(true)
expect((@driver.current_url.to_s.match(/ASC/)).nil?).to be(false)
expect(@driver.page_source.include?('My search result')).to be(true)
end
it "should search by project through URL", binx2: true do
api_create_claim_and_go_to_search_page
@driver.navigate.to @config['self_url'] + '/' + get_team + '/search/%7B"projects"%3A%5B0%5D%7D'
sleep 10
expect(@driver.page_source.include?('My search result')).to be(false)
selected = @driver.find_elements(:css, '.media-tags__suggestion--selected')
expect(selected.size == 3).to be(true)
end
it "should change search sort criteria through URL", binx2: true do
api_create_claim_and_go_to_search_page
@driver.navigate.to @config['self_url'] + '/' + get_team + '/search/%7B"sort"%3A"recent_activity"%7D'
sleep 10
expect(@driver.page_source.include?('My search result')).to be(true)
selected = @driver.find_elements(:css, '.media-tags__suggestion--selected').map(&:text).sort
expect(selected == ['Recent activity', 'Newest first', 'Media'].sort).to be(true)
end
it "should change search sort order through URL", binx2: true do
api_create_claim_and_go_to_search_page
@driver.navigate.to @config['self_url'] + '/' + get_team + '/search/%7B"sort_type"%3A"ASC"%7D'
sleep 10
expect(@driver.page_source.include?('My search result')).to be(true)
selected = @driver.find_elements(:css, '.media-tags__suggestion--selected').map(&:text).sort
expect(selected == ['Created', 'Oldest first', 'Media'].sort).to be(true)
end
it "should not reset password", bin5: true do
page = LoginPage.new(config: @config, driver: @driver)
page.reset_password('test@meedan.com')
sleep 2
expect(@driver.page_source.include?('Email not found')).to be(true)
expect(@driver.page_source.include?('Password reset sent')).to be(false)
end
it "should reset password", bin5: true do
user = api_create_and_confirm_user
page = LoginPage.new(config: @config, driver: @driver)
page.reset_password(user.email)
sleep 2
expect(@driver.page_source.include?('Email not found')).to be(false)
expect(@driver.page_source.include?('Password reset sent')).to be(true)
end
it "should set metatags", bin5: true do
api_create_team_project_and_link_and_redirect_to_media_page 'https://twitter.com/marcouza/status/875424957613920256'
sleep 2
request_api('make_team_public', { slug: get_team })
sleep 1
url = @driver.current_url.to_s
@driver.navigate.to url
site = @driver.find_element(:css, 'meta[name="twitter\\:site"]').attribute('content')
expect(site == @config['app_name']).to be(true)
twitter_title = @driver.find_element(:css, 'meta[name="twitter\\:title"]').attribute('content')
expect(twitter_title == 'This is a test').to be(true)
end
it "should embed", bin1: true do
api_create_team_project_and_claim_and_redirect_to_media_page
sleep 2
request_api('make_team_public', { slug: get_team })
@driver.navigate.refresh
sleep 5
@driver.find_element(:css, '.media-actions__icon').click
sleep 1
if @config['app_name'] == 'bridge'
expect(@driver.page_source.include?('Embed')).to be(false)
@driver.navigate.to "#{@driver.current_url}/embed"
sleep 2
expect(@driver.page_source.include?('Not available')).to be(true)
elsif @config['app_name'] == 'check'
expect(@driver.page_source.include?('Embed')).to be(true)
url = @driver.current_url.to_s
@driver.find_element(:css, '.media-actions__embed').click
sleep 2
expect(@driver.current_url.to_s == "#{url}/embed").to be(true)
expect(@driver.page_source.include?('Not available')).to be(false)
@driver.find_element(:css, '#media-embed__actions-customize').click
sleep 1
@driver.find_elements(:css, '#media-embed__customization-menu input[type=checkbox]').map(&:click)
sleep 1
@driver.find_elements(:css, 'body').map(&:click)
sleep 1
@driver.find_element(:css, '#media-embed__actions-copy').click
sleep 1
@driver.navigate.to 'https://pastebinx.mozilla.org/'
@driver.find_element(:css, '#code').send_keys(' ')
@driver.action.send_keys(:control, 'v').perform
sleep 1
expect((@driver.find_element(:css, '#code').attribute('value') =~ /hide_open_tasks%3D1%26hide_tasks%3D1%26hide_notes%3D1/).nil?).to be(false)
sleep 5
end
end
#Add slack notifications to a team
it "should add slack notifications to a team", binx3:true, quick: true do
team = "testteam#{Time.now.to_i}"
api_create_team(team:team)
p = Page.new(config: @config, driver: @driver)
p.go(@config['self_url'] + '/' + team)
sleep 5
el = wait_for_selector("team__edit-button", :class)
el.click
el = wait_for_selector("team__settings-slack-notifications-enabled", :id)
el.click
el = wait_for_selector("team__settings-slack-webhook", :id)
el.click
el = wait_for_selector("team__settings-slack-webhook", :id)
el.send_keys "https://hooks.slack.com/services/T02528QUL/BBBBBBBBB/AAAAAAAAAAAAAAAAAAAAAAAA"
el = wait_for_selector("team__save-button", :class)
el.click
sleep 2
expect(@driver.find_element(:class, "message").nil?).to be(false)
end
it "should paginate project page", binx2: true do
page = api_create_team_project_claims_sources_and_redirect_to_project_page 21
page.load
el = wait_for_selector("//span[contains(text(), 'Sources')]", :xpath)
el.click
wait_for_selector("source-card", :class)
results = @driver.find_elements(:css, '.medias__item')
expect(results.size == 40).to be(true)
old = results.size
results.last.location_once_scrolled_into_view
size = wait_for_size_change(old, '.medias__item')
expect(size == 42).to be(true)
end
=begin
***Unstable
it "should show teams at /check/teams", bin4: true do
api_create_team
@driver.navigate.to @config['self_url'] + '/check/teams'
sleep 2
expect(@driver.find_elements(:css, '.teams').empty?).to be(false)
end
it "should add, edit, answer, update answer and delete geolocation task", binx3: true do
media_pg = api_create_team_project_and_claim_and_redirect_to_media_page
wait_for_selector('.create-task__add-button')
# Create a task
expect(@driver.page_source.include?('Where?')).to be(false)
expect(@driver.page_source.include?('Task "Where?" created by')).to be(false)
@driver.find_element(:css, '.create-task__add-button').click
sleep 1
@driver.find_element(:css, '.create-task__add-geolocation').click
sleep 1
fill_field('#task-label-input', 'Where?')
@driver.find_element(:css, '.create-task__dialog-submit-button').click
sleep 2
expect(@driver.page_source.include?('Where?')).to be(true)
expect(@driver.page_source.include?('Task "Where?" created by')).to be(true)
# Answer task
expect(@driver.page_source.include?('Task "Where?" answered by')).to be(false)
fill_field('textarea[name="response"]', 'Salvador')
fill_field('textarea[name="coordinates"]', '-12.9015866, -38.560239')
@driver.action.send_keys(:enter).perform
wait_for_selector('.annotation--task_response_geolocation')
expect(@driver.page_source.include?('Task "Where?" answered by')).to be(true)
# Edit task
expect(@driver.page_source.include?('Task "Where?" edited to "Where was it?" by')).to be(false)
@driver.find_element(:css, '.task-actions__icon').click
sleep 2
@driver.find_element(:css, '.task-actions__edit').click
update_field('textarea[name="label"]', 'Where was it?')
@driver.find_element(:css, '.task__save').click
sleep 2
expect(@driver.page_source.include?('Task "Where?" edited to "Where was it?" by')).to be(true)
# Edit task answer
expect(@driver.page_source.gsub(/<\/?[^>]*>/, '').include?('Task "Where was it?" answered by User With Email: "Vancouver"')).to be(false)
@driver.find_element(:css, '.task-actions__icon').click
@driver.find_element(:css, '.task-actions__edit-response').click
update_field('textarea[name="response"]', 'Vancouver')
update_field('textarea[name="coordinates"]', '49.2577142, -123.1941156')
@driver.action.send_keys(:enter).perform
sleep 2
expect(@driver.page_source.gsub(/<\/?[^>]*>/, '').include?('Task "Where was it?" answered by User With Email: "Vancouver"')).to be(true)
# Delete task
delete_task('Where was it')
end
=end
=begin
# This tests is unreliable
# Todo: locally prints 1 / 2 and remote 0 / 1
#
# ccx 2017-10-13
it "should add image to media comment", binx3: true do
p "----------"
api_create_team_project_and_claim_and_redirect_to_media_page
# First, verify that there isn't any comment with image
expect(@driver.page_source.include?('This is my comment with image')).to be(false)
old = @driver.find_elements(:class, "annotations__list-item").length
# Add a comment as a command
fill_field('#cmd-input', 'This is my comment with image')
el = wait_for_selector('.add-annotation__insert-photo')
el.click
wait_for_selector('input[type=file]')
input = wait_for_selector('input[type=file]')
input.send_keys(File.join(File.dirname(__FILE__), 'test.png'))
el = wait_for_selector('.add-annotation__buttons button')
el.click
p old
p wait_for_size_change(old, "annotations__list-item", :class)
# Verify that comment was added to annotations list
expect(@driver.page_source.include?('This is my comment with image')).to be(true)
imgsrc = @driver.find_element(:css, '.annotation__card-thumbnail').attribute('src')
expect(imgsrc.match(/test\.png$/).nil?).to be(false)
# Zoom image
expect(@driver.find_elements(:css, '.image-current').empty?).to be(true)
@driver.find_element(:css, '.annotation__card-thumbnail').click
expect(@driver.find_elements(:css, '.image-current').empty?).to be(false)
@driver.action.send_keys(:escape).perform
expect(@driver.find_elements(:css, '.image-current').empty?).to be(true)
# Reload the page and verify that comment is still there
@driver.navigate.refresh
wait_for_selector('add-annotation__buttons', :class)
expect(@driver.page_source.include?('This is my comment with image')).to be(true)
imgsrc = @driver.find_element(:css, '.annotation__card-thumbnail').attribute('src')
expect(imgsrc.match(/test\.png$/).nil?).to be(false)
p "----------"
end
=end
=begin
***Unstable***
it "should add, edit, answer, update answer and delete datetime task", binx3: true do
media_pg = api_create_team_project_and_claim_and_redirect_to_media_page
wait_for_selector('.create-task__add-button')
# Create a task
expect(@driver.page_source.include?('When?')).to be(false)
expect(@driver.page_source.include?('Task "When?" created by')).to be(false)
@driver.find_element(:css, '.create-task__add-button').click
sleep 1
@driver.find_element(:css, '.create-task__add-datetime').click
sleep 1
fill_field('#task-label-input', 'When?')
@driver.find_element(:css, '.create-task__dialog-submit-button').click
sleep 2
expect(@driver.page_source.include?('When?')).to be(true)
expect(@driver.page_source.include?('Task "When?" created by')).to be(true)
# Answer task
expect(@driver.page_source.include?('Task "When?" answered by')).to be(false)
fill_field('input[name="hour"]', '23')
fill_field('input[name="minute"]', '59')
@driver.find_element(:css, '#task__response-date').click
sleep 2
@driver.find_elements(:css, 'button').last.click
sleep 1
fill_field('textarea[name="note"]', 'Test')
@driver.action.send_keys(:enter).perform
sleep 2
expect(@driver.page_source.include?('Task "When?" answered by')).to be(true)
# Edit task
expect(@driver.page_source.include?('Task "When?" edited to "When was it?" by')).to be(false)
@driver.find_element(:css, '.task-actions__icon').click
sleep 2
@driver.find_element(:css, '.task-actions__edit').click
update_field('textarea[name="label"]', 'When was it?')
@driver.find_element(:css, '.task__save').click
sleep 2
expect(@driver.page_source.include?('Task "When?" edited to "When was it?" by')).to be(true)
# Edit task response
expect(@driver.page_source.gsub(/<\/?[^>]*>/, '').include?('12:34')).to be(false)
@driver.find_element(:css, '.task-actions__icon').click
@driver.find_element(:css, '.task-actions__edit-response').click
update_field('input[name="hour"]', '12')
update_field('input[name="minute"]', '34')
update_field('textarea[name="note"]', '')
@driver.action.send_keys(:enter).perform
sleep 2
expect(@driver.page_source.gsub(/<\/?[^>]*>/, '').include?('12:34')).to be(true)
# Delete task
delete_task('When was it')
end
=end
# Disabled because the functionality changed to use a
# background image in CSS instead of an <img> element.
#
# I think we could do something like this instead:
# https://stackoverflow.com/questions/11198882/how-do-you-test-if-a-div-has-a-certain-css-style-in-rspec-capybara
#
# it "should upload image when registering", bin5 : true do
# email, password, avatar = ["test-#{Time.now.to_i}@example.com", '12345678', File.join(File.dirname(__FILE__), 'test.png')]
# page = LoginPage.new(config: @config, driver: @driver).load
# .register_and_login_with_email(email: email, password: password, file: avatar)
# me_page = MePage.new(config: @config, driver: page.driver).load
# avatar = me_page.avatar
# expect(avatar.attribute('src').match(/test\.png/).nil?).to be(false)
# end
end
end
|
require 'selenium-webdriver'
require 'yaml'
require File.join(File.expand_path(File.dirname(__FILE__)), 'spec_helper')
require File.join(File.expand_path(File.dirname(__FILE__)), 'app_spec_helpers')
require_relative './pages/login_page.rb'
require_relative './pages/me_page.rb'
describe 'app' do
# Helpers
include AppSpecHelpers
# Start a webserver for the web app before the tests
before :all do
@wait = Selenium::WebDriver::Wait.new(timeout: 5)
@email = 'sysops+' + Time.now.to_i.to_s + '@meedan.com'
@password = '12345678'
@source_url = 'https://twitter.com/ironmaiden?timestamp=' + Time.now.to_i.to_s
@media_url = 'https://twitter.com/meedan/status/773947372527288320/?t=' + Time.now.to_i.to_s
@config = YAML.load_file('config.yml')
$source_id = nil
$media_id = nil
FileUtils.cp(@config['config_file_path'], '../build/web/js/config.js') unless @config['config_file_path'].nil?
LoginPage.new(config: @config)
.register_and_login_with_email(email: @email, password: @password)
.create_team
.create_project
.create_media(input: @media_url)
.logout_and_close
end
# Close the testing webserver after all tests run
after :all do
FileUtils.cp('../config.js', '../build/web/js/config.js')
end
# Start Google Chrome before each test
before :each do
@driver = Selenium::WebDriver.for :remote, url: @config['chromedriver_url'], :desired_capabilities => :chrome
end
# Close Google Chrome after each test
after :each do |example|
if example.exception
require 'rest-client'
path = '/tmp/' + (0...8).map{ (65 + rand(26)).chr }.join + '.png'
@driver.save_screenshot(path) # TODO: fix for page model tests
response = RestClient.post('https://file.io?expires=2', file: File.new(path))
link = JSON.parse(response.body)['link']
puts "Test \"#{example.to_s}\" failed! Check screenshot at #{link} and following browser output: #{console_logs}"
end
@driver.quit
end
# The tests themselves start here
context "web" do
it "should access user confirmed page" do
@driver.navigate.to @config['self_url'] + '/user/confirmed'
title = get_element('.main-title')
expect(title.text == 'Account Confirmed').to be(true)
end
it "should access user unconfirmed page" do
@driver.navigate.to @config['self_url'] + '/user/unconfirmed'
title = get_element('.main-title')
expect(title.text == 'Error').to be(true)
end
it "should login using Facebook" do
login_pg = LoginPage.new(config: @config)
login_pg.login_with_facebook
me_pg = MePage.new(config: @config, driver: login_pg.driver).load
displayed_name = me_pg.title
expected_name = @config['facebook_name']
expect(displayed_name).to eq(expected_name)
end
it "should register and login using e-mail" do
login_pg = LoginPage.new(config: @config)
email, password = ['sysops+' + Time.now.to_i.to_s + '@meedan.com', '22345678']
login_pg.register_and_login_with_email(email: email, password: password)
me_pg = MePage.new(config: @config, driver: login_pg.driver).load # reuse tab
displayed_name = me_pg.title
expect(displayed_name == 'User With Email').to be(true)
end
it "should create a project for a team" do
project_name = "Project #{Time.now}"
project_pg = LoginPage.new(config: @config)
.register_and_login_with_email(email: 'sysops+' + Time.now.to_i.to_s + '@meedan.com', password: Time.now.to_i.to_s)
.create_team
.create_project(name: project_name)
expect(project_pg.driver.current_url.to_s.match(/\/project\/[0-9]+$/).nil?).to be(false)
expect(project_pg.element('.team-sidebar__project-link').text == project_name).to be(true)
end
it "should create project media" do
page = LoginPage.new(config: @config)
.login_with_email(email: @email, password: @password)
.create_media(input: 'https://twitter.com/marcouza/status/771009514732650497?t=' + Time.now.to_i.to_s)
expect(page.contains_string?('Added')).to be(true)
expect(page.contains_string?('User With Email')).to be(true)
expect(page.status_label == 'UNSTARTED').to be(true)
page.driver.navigate.to @config['self_url']
page.wait_for_element('.project')
expect(page.contains_string?('Added')).to be(true)
expect(page.contains_string?('User With Email')).to be(true)
expect(page.status_label == 'UNSTARTED').to be(true)
end
it "should register and redirect to newly created media" do
page = LoginPage.new(config: @config)
.login_with_email(email: @email, password: @password)
.create_media(input: @media_url)
expect(page.contains_string?('Added')).to be(true)
expect(page.contains_string?('User With Email')).to be(true)
expect(page.status_label == 'UNSTARTED').to be(true)
$media_id = page.driver.current_url.to_s.match(/\/media\/([0-9]+)$/)[1]
expect($media_id.nil?).to be(false)
end
it "should upload image when registering" do
email, password, avatar = [@email + '.br', '12345678', File.join(File.dirname(__FILE__), 'test.png')]
page = LoginPage.new(config: @config)
.register_and_login_with_email(email: email, password: password, file: avatar)
me_page = MePage.new(config: @config, driver: page.driver).load
avatar = me_page.avatar
expect(avatar.attribute('src').match(/test\.png$/).nil?).to be(false)
end
it "should redirect to 404 page" do
@driver.navigate.to @config['self_url'] + '/something-that-does-not-exist'
title = get_element('.main-title')
expect(title.text == 'Not Found').to be(true)
end
it "should click to go to Terms of Service" do
@driver.navigate.to @config['self_url'] + '/tos'
title = get_element('.main-title')
expect(title.text == 'Terms of Service').to be(true)
end
it "should redirect to login screen if not logged in" do
@driver.navigate.to @config['self_url'] + '/teams'
title = get_element('.login-menu__heading')
expect(title.text == 'SIGN IN').to be(true)
end
it "should login using Twitter" do
login_with_twitter
@driver.navigate.to @config['self_url'] + '/me'
displayed_name = get_element('h2.source-name').text.upcase
expected_name = @config['twitter_name'].upcase
expect(displayed_name == expected_name).to be(true)
end
it "should login using Slack" do
login_with_slack
@driver.navigate.to @config['self_url'] + '/me'
displayed_name = get_element('h2.source-name').text.upcase
expected_name = @config['slack_name'].upcase
expect(displayed_name == expected_name).to be(true)
end
it "should show team options at /teams" do
login_with_email
@driver.navigate.to @config['self_url'] + '/teams'
sleep 3
expect(@driver.find_elements(:css, '.teams').empty?).to be(false)
end
it "should go to user page" do
login_with_email
@driver.find_element(:css, '.fa-ellipsis-h').click
sleep 1
@driver.find_element(:xpath, "//a[@id='link-me']").click
expect((@driver.current_url.to_s =~ /\/me$/).nil?).to be(false)
title = get_element('.source-name')
expect(title.text == 'User With Email').to be(true)
end
it "should go to source page through source/:id" do
login_with_email
@driver.navigate.to @config['self_url'] + '/me'
sleep 5
source_id = $source_id = @driver.find_element(:css, '.source').attribute('data-id')
@driver.navigate.to team_url('source/' + source_id.to_s)
sleep 1
title = get_element('.source-name')
expect(title.text == 'User With Email').to be(true)
end
it "should go to source page through user/:id" do
login_with_email
@driver.navigate.to @config['self_url'] + '/me'
sleep 5
user_id = @driver.find_element(:css, '.source').attribute('data-user-id')
@driver.navigate.to @config['self_url'] + '/user/' + user_id.to_s
sleep 1
title = get_element('.source-name')
expect(title.text == 'User With Email').to be(true)
end
it "should go back and forward in the history" do
@driver.navigate.to @config['self_url']
expect((@driver.current_url.to_s =~ /\/$/).nil?).to be(false)
@driver.navigate.to @config['self_url'] + '/tos'
expect((@driver.current_url.to_s =~ /\/tos$/).nil?).to be(false)
@driver.navigate.back
expect((@driver.current_url.to_s =~ /\/$/).nil?).to be(false)
@driver.navigate.forward
expect((@driver.current_url.to_s =~ /\/tos$/).nil?).to be(false)
end
it "should tag source from tags list" do
login_with_email
@driver.navigate.to @config['self_url'] + '/me'
sleep 1
# First, verify that there isn't any tag
expect(@driver.find_elements(:css, '.ReactTags__tag').empty?).to be(true)
expect(@driver.page_source.include?('Tagged #selenium')).to be(false)
# Add a tag from tags list
fill_field('.ReactTags__tagInput input', 'selenium')
@driver.action.send_keys(:enter).perform
sleep 5
# Verify that tag was added to tags list and annotations list
tag = get_element('.ReactTags__tag span')
expect(tag.text == 'selenium').to be(true)
expect(@driver.page_source.include?('Tagged #selenium')).to be(true)
# Reload the page and verify that tags are still there
@driver.navigate.refresh
sleep 1
tag = get_element('.ReactTags__tag span')
expect(tag.text == 'selenium').to be(true)
expect(@driver.page_source.include?('Tagged #selenium')).to be(true)
end
it "should tag source as a command" do
login_with_email
@driver.navigate.to @config['self_url'] + '/me'
sleep 1
# First, verify that there isn't any tag
expect(@driver.page_source.include?('Tagged #command')).to be(false)
# Add a tag as a command
fill_field('#cmd-input', '/tag command')
@driver.action.send_keys(:enter).perform
sleep 5
# Verify that tag was added to tags list and annotations list
tag = get_element('.ReactTags__tag span')
expect(tag.text == 'command').to be(true)
expect(@driver.page_source.include?('Tagged #command')).to be(true)
# Reload the page and verify that tags are still there
@driver.navigate.refresh
sleep 1
tag = get_element('.ReactTags__tag span')
expect(tag.text == 'command').to be(true)
expect(@driver.page_source.include?('Tagged #command')).to be(true)
end
it "should redirect to access denied page" do
user_1 = {email: 'sysops+' + Time.now.to_i.to_s + '@meedan.com', password: '12345678'}
login_pg = LoginPage.new(config: @config)
login_pg.register_and_login_with_email(email: user_1[:email], password: user_1[:password])
me_pg = MePage.new(config: @config, driver: login_pg.driver).load
user_1_source_id = me_pg.source_id
me_pg.logout_and_close
user_2 = {email: 'sysops+' + Time.now.to_i.to_s + '@meedan.com', password: '22345678'}
login_pg = LoginPage.new(config: @config)
login_pg.register_and_login_with_email(email: user_2[:email], password: user_2[:password])
unauthorized_pg = SourcePage.new(id: user_1_source_id, config: @config, driver: login_pg.driver).load
@wait.until { unauthorized_pg.contains_string?('Access Denied') }
expect(unauthorized_pg.contains_string?('Access Denied')).to be(true)
expect((unauthorized_pg.driver.current_url.to_s =~ /\/forbidden$/).nil?).to be(false)
end
it "should comment source as a command" do
login_with_email
@driver.navigate.to @config['self_url'] + '/me'
sleep 1
# First, verify that there isn't any comment
expect(@driver.page_source.include?('This is my comment')).to be(false)
# Add a comment as a command
fill_field('#cmd-input', '/comment This is my comment')
@driver.action.send_keys(:enter).perform
sleep 5
# Verify that comment was added to annotations list
expect(@driver.page_source.include?('This is my comment')).to be(true)
# Reload the page and verify that comment is still there
@driver.navigate.refresh
sleep 1
expect(@driver.page_source.include?('This is my comment')).to be(true)
end
it "should create source and redirect to newly created source" do
login_with_email
@driver.navigate.to team_url('sources/new')
sleep 1
fill_field('#create-account-url', @source_url)
sleep 1
press_button('#create-account-submit')
sleep 15
expect(@driver.current_url.to_s.match(/\/source\/[0-9]+$/).nil?).to be(false)
title = get_element('.source-name').text
expect(title == 'Iron Maiden').to be(true)
end
it "should not create duplicated source" do
login_with_email
@driver.navigate.to team_url('sources/new')
sleep 1
fill_field('#create-account-url', @source_url)
sleep 1
press_button('#create-account-submit')
sleep 10
expect(@driver.current_url.to_s.match(/\/source\/[0-9]+$/).nil?).to be(false)
title = get_element('.source-name').text
expect(title == 'Iron Maiden').to be(true)
end
it "should not create report as source" do
login_with_email
@driver.navigate.to team_url('sources/new')
sleep 1
fill_field('#create-account-url', 'https://www.youtube.com/watch?v=b708rEG7spI')
sleep 1
press_button('#create-account-submit')
sleep 10
expect(@driver.current_url.to_s.match(/\/source\/[0-9]+$/).nil?).to be(true)
message = get_element('.create-account .message').text
expect(message == 'Validation failed: Sorry, this is not a profile').to be(true)
end
it "should tag source multiple times with commas with command" do
login_with_email
@driver.navigate.to @config['self_url'] + '/me'
sleep 1
# Add tags as a command
fill_field('#cmd-input', '/tag foo, bar')
@driver.action.send_keys(:enter).perform
sleep 5
# Verify that tags were added to tags list and annotations list
tag = @driver.find_elements(:css, '.ReactTags__tag span').select{ |s| s.text == 'foo' }
expect(tag.empty?).to be(false)
expect(@driver.page_source.include?('Tagged #foo')).to be(true)
tag = @driver.find_elements(:css, '.ReactTags__tag span').select{ |s| s.text == 'bar' }
expect(tag.empty?).to be(false)
expect(@driver.page_source.include?('Tagged #bar')).to be(true)
end
it "should tag source multiple times with commas from tags list" do
login_with_email
@driver.navigate.to @config['self_url'] + '/me'
sleep 1
# Add tags from tags list
fill_field('.ReactTags__tagInput input', 'bla,bli')
@driver.action.send_keys(:enter).perform
sleep 5
# Verify that tags were added to tags list and annotations list
tag = @driver.find_elements(:css, '.ReactTags__tag span').select{ |s| s.text == 'bla' }
expect(tag.empty?).to be(false)
expect(@driver.page_source.include?('Tagged #bla')).to be(true)
tag = @driver.find_elements(:css, '.ReactTags__tag span').select{ |s| s.text == 'bli' }
expect(tag.empty?).to be(false)
expect(@driver.page_source.include?('Tagged #bli')).to be(true)
end
it "should not add a duplicated tag from tags list" do
login_with_email
@driver.navigate.to team_url('project/' + get_project + '/media/' + $media_id)
sleep 1
# Validate assumption that tag does not exist
get_element('.media-actions').click
get_element('.media-actions__menu-item').click
tag = @driver.find_elements(:css, '.ReactTags__tag span').select{ |s| s.text == 'bla' }
expect(tag.size == 0).to be(true)
# Add tag from tags list
fill_field('.ReactTags__tagInput input', 'bla')
@driver.action.send_keys(:enter).perform
tag = get_element('.ReactTags__tag span')
expect(tag.text == 'bla').to be(true)
# Try to add duplicate
fill_field('.ReactTags__tagInput input', 'bla')
@driver.action.send_keys(:enter).perform
sleep 5
# Verify that tag is not added and that error message is displayed
tag = @driver.find_elements(:css, '.ReactTags__tag span').select{ |s| s.text == 'bla' }
expect(tag.size == 1).to be(true)
expect(@driver.page_source.include?('This tag already exists')).to be(true)
end
it "should not add a duplicated tag from command line" do
media_pg = LoginPage.new(config: @config)
.login_with_email(email: @email, password: @password)
.click_media
new_tag = Time.now.to_i.to_s
# Validate assumption that tag does not exist
expect(media_pg.has_tag?(new_tag)).to be(false)
# Try to add from command line
media_pg.add_annotation("/tag #{new_tag}")
Selenium::WebDriver::Wait.new(timeout: 10).until { media_pg.has_tag?(new_tag) } # TODO: wait inside MediaPage
expect(media_pg.has_tag?(new_tag)).to be(true)
# Try to add duplicate from command line
media_pg.add_annotation("/tag #{new_tag}")
# Verify that tag is not added and that error message is displayed
expect(media_pg.tags.count(new_tag)).to be(1)
expect(media_pg.contains_string?('This tag already exists')).to be(true)
end
it "should not create duplicated media if registered" do
login_with_email
sleep 3
fill_field('#create-media-input', @media_url)
sleep 2
press_button('#create-media-submit')
sleep 10
expect(@driver.current_url.to_s.match(/\/media\/[0-9]+$/).nil?).to be(false)
end
it "should not create source as media if registered" do
login_with_email
sleep 3
fill_field('#create-media-input', 'https://www.facebook.com/ironmaidenbeer/?fref=ts')
sleep 1
press_button('#create-media-submit')
sleep 10
expect(@driver.current_url.to_s.match(/\/media\/[0-9]+$/).nil?).to be(true)
message = get_element('.create-media .message').text
expect(message == 'Something went wrong! Try pasting the text of this post instead, or adding a different link.').to be(true)
end
it "should tag media from tags list" do
login_with_email
@driver.navigate.to team_url('project/' + get_project + '/media/' + $media_id)
sleep 1
# First, verify that there isn't any tag
expect(@driver.page_source.include?('Tagged #tellurium')).to be(false)
# Add a tag from tags list
get_element('.media-actions').click
get_element('.media-actions__menu-item').click
fill_field('.ReactTags__tagInput input', 'tellurium')
@driver.action.send_keys(:enter).perform
sleep 5
# Verify that tag was added to tags list and annotations list
tag = get_element('.ReactTags__tag span')
expect(tag.text == 'tellurium').to be(true)
expect(@driver.page_source.include?('Tagged #tellurium')).to be(true)
# Reload the page and verify that tags are still there
@driver.navigate.refresh
sleep 1
get_element('.media-actions').click
get_element('.media-actions__menu-item').click
tag = get_element('.ReactTags__tag span')
expect(tag.text == 'tellurium').to be(true)
expect(@driver.page_source.include?('Tagged #tellurium')).to be(true)
end
it "should tag media as a command" do
login_with_email
@driver.navigate.to team_url('project/' + get_project + '/media/' + $media_id)
sleep 1
# First, verify that there isn't any tag
expect(@driver.page_source.include?('Tagged #command')).to be(false)
# Add a tag as a command
fill_field('#cmd-input', '/tag command')
@driver.action.send_keys(:enter).perform
sleep 5
# Verify that tag was added to tags list and annotations list
tag = get_element('.media-tags__tag')
expect(tag.text == 'command').to be(true)
expect(@driver.page_source.include?('Tagged #command')).to be(true)
# Reload the page and verify that tags are still there
@driver.navigate.refresh
sleep 1
tag = get_element('.media-tags__tag')
expect(tag.text == 'command').to be(true)
expect(@driver.page_source.include?('Tagged #command')).to be(true)
end
it "should comment media as a command" do
login_with_email
@driver.navigate.to team_url('project/' + get_project + '/media/' + $media_id)
sleep 1
# First, verify that there isn't any comment
expect(@driver.page_source.include?('This is my comment')).to be(false)
# Add a comment as a command
fill_field('#cmd-input', '/comment This is my comment')
@driver.action.send_keys(:enter).perform
sleep 5
# Verify that comment was added to annotations list
expect(@driver.page_source.include?('This is my comment')).to be(true)
# Reload the page and verify that comment is still there
@driver.navigate.refresh
sleep 3
expect(@driver.page_source.include?('This is my comment')).to be(true)
end
it "should set status to media as a command" do
login_with_email
@driver.navigate.to team_url('project/' + get_project + '/media/' + $media_id)
sleep 1
# Add a status as a command
fill_field('#cmd-input', '/status In Progress')
@driver.action.send_keys(:enter).perform
sleep 5
# Verify that status was added to annotations list
expect(@driver.page_source.include?('Status')).to be(true)
# Reload the page and verify that status is still there
@driver.navigate.refresh
sleep 3
expect(@driver.page_source.include?('Status')).to be(true)
end
it "should flag media as a command" do
login_with_email
@driver.navigate.to team_url('project/' + get_project + '/media/' + $media_id)
sleep 1
# First, verify that there isn't any flag
expect(@driver.page_source.include?('Flag')).to be(false)
# Add a flag as a command
fill_field('#cmd-input', '/flag Spam')
@driver.action.send_keys(:enter).perform
sleep 5
# Verify that flag was added to annotations list
expect(@driver.page_source.include?('Flag')).to be(true)
# Reload the page and verify that flag is still there
@driver.navigate.refresh
sleep 3
expect(@driver.page_source.include?('Flag')).to be(true)
end
it "should edit project" do
login_with_email
@driver.navigate.to @config['self_url']
sleep 1
title = "Project #{Time.now}"
fill_field('#create-project-title', title)
@driver.action.send_keys(:enter).perform
sleep 5
@driver.find_element(:css, '.project-header__project-settings-icon').click
sleep 1
@driver.find_element(:css, '.project-header__project-setting--edit').click
sleep 1
fill_field('.project-header__project-name-input', 'Changed title')
fill_field('.project-header__project-description-input', 'Set description')
@driver.find_element(:css, '.project-header__project-editing-button--cancel').click
sleep 3
expect(@driver.page_source.include?('Changed title')).to be(false)
expect(@driver.page_source.include?('Set description')).to be(false)
@driver.find_element(:css, '.project-header__project-settings-icon').click
sleep 1
@driver.find_element(:css, '.project-header__project-setting--edit').click
sleep 1
fill_field('.project-header__project-name-input', 'Changed title')
fill_field('.project-header__project-description-input', 'Set description')
@driver.find_element(:css, '.project-header__project-editing-button--save').click
sleep 3
expect(@driver.page_source.include?('Changed title')).to be(true)
expect(@driver.page_source.include?('Set description')).to be(true)
end
# it "should comment project as a command" do
# login_with_email
# @driver.navigate.to @config['self_url']
# sleep 1
# title = "Project #{Time.now}"
# fill_field('#create-project-title', title)
# @driver.action.send_keys(:enter).perform
# sleep 5
# # First, verify that there isn't any comment
# expect(@driver.page_source.include?('This is my comment')).to be(false)
# # Add a comment as a command
# fill_field('#cmd-input', '/comment This is my comment')
# @driver.action.send_keys(:enter).perform
# sleep 5
# # Verify that comment was added to annotations list
# expect(@driver.page_source.include?('This is my comment')).to be(true)
# # Reload the page and verify that comment is still there
# @driver.navigate.refresh
# sleep 3
# expect(@driver.page_source.include?('This is my comment')).to be(true)
# end
it "should redirect to 404 page if id does not exist" do
login_with_email
url = @driver.current_url.to_s
@driver.navigate.to url.gsub(/project\/([0-9]+).*/, 'project/999')
title = get_element('.main-title')
expect(title.text == 'Not Found').to be(true)
expect((@driver.current_url.to_s =~ /\/404$/).nil?).to be(false)
end
it "should change a media status via the dropdown menu" do
register_with_email(true, 'sysops+' + Time.now.to_i.to_s + '@meedan.com')
wait = Selenium::WebDriver::Wait.new(timeout: 10)
wait.until { @driver.find_element(:css, '.team') }
create_project
wait.until { @driver.find_element(:css, '.project') }
create_media('https://twitter.com/marcouza/status/771009514732650497?t=' + Time.now.to_i.to_s)
wait.until { @driver.find_element(:css, '.media') }
current_status = @driver.find_element(:css, '.media-status__label')
expect(current_status.text == 'UNSTARTED').to be(true)
current_status.click
verified_menu_item = (wait.until { @driver.find_element(:css, '.media-status__menu-item--verified') })
verified_menu_item.click
sleep 3
current_status = @driver.find_element(:css, '.media-status__label')
expect(current_status.text == 'VERIFIED').to be(true)
expect(!!@driver.find_element(:css, '.annotation__status--verified')).to be(true)
end
it "should logout" do
unless login_or_register_with_email
create_team
create_project
end
@driver.navigate.to @config['self_url']
menu = @wait.until { @driver.find_element(:css, '.fa-ellipsis-h') }
menu.click
logout = @wait.until { @driver.find_element(:css, '.project-header__logout') }
logout.click
@wait.until { @driver.find_element(:css, '#login-menu') }
expect(@driver.page_source.include? 'Sign in').to be(true)
end
it "should ask to join team" do
skip("Needs to be implemented")
end
it "should redirect to team page if user asking to join a team is already a member" do
skip("Needs to be implemented")
end
it "should reject member to join team" do
skip("Needs to be implemented")
end
it "should accept member to join team" do
skip("Needs to be implemented")
end
it "should change member role" do
skip("Needs to be implemented")
end
it "should delete member from team" do
skip("Needs to be implemented")
end
it "should delete annotation from annotations list (for media, source and project)" do
skip("Needs to be implemented")
end
it "should delete tag from tags list (for media and source)" do
skip("Needs to be implemented")
end
it "should edit team" do
skip("Needs to be implemented")
end
it "should show 'manage team' link only to team owners" do
skip("Needs to be implemented")
end
it "should show 'edit project' link only to users with 'update project' permission" do
skip("Needs to be implemented")
end
it "should switch teams" do
login_or_register_with_email
@driver.navigate.to "#{@config['self_url']}/teams/new"
create_team
wait = Selenium::WebDriver::Wait.new(timeout: 5)
team_1_name = @driver.find_element(:css, '.team__name').text
create_project
project_1_id = (wait.until { @driver.current_url.to_s.match(/\/project\/([0-9]+)$/) })[1]
create_media(@media_url)
wait.until { @driver.find_element(:css, '.media') }
@driver.navigate.to "#{@config['self_url']}/teams/new"
wait.until { @driver.find_element(:css, '.create-team') }
create_team
team_2_name = @driver.find_element(:css, '.team__name').text
create_project
project_2_id = (wait.until { @driver.current_url.to_s.match(/\/project\/([0-9]+)$/) })[1]
create_media(@media_url)
wait.until { @driver.find_element(:css, '.media') }
@driver.navigate.to @config['self_url'] + '/teams'
wait.until { @driver.find_element(:css, '.teams') }
(wait.until { @driver.find_element(:xpath, "//*[contains(text(), '#{team_1_name}')]") }).click
wait.until { @driver.find_element(:css, '.team') }
expect(@driver.find_element(:css, '.team__name').text == team_1_name).to be(true)
@driver.find_element(:css, '.team__project-link').click
wait.until { @driver.find_element(:css, '.project') }
url = @driver.current_url.to_s
media_1_url = @driver.find_element(:css, '.media-detail__check-timestamp').attribute('href')
expect(media_1_url.include?("/project/#{project_1_id}/media/")).to be(true)
@driver.navigate.to @config['self_url'] + '/teams'
wait.until { @driver.find_element(:css, '.teams') }
(wait.until { @driver.find_element(:xpath, "//*[contains(text(), '#{team_2_name}')]") }).click
wait.until { @driver.find_element(:css, '.team') }
expect(@driver.find_element(:css, '.team__name').text == team_2_name).to be(true)
@driver.find_element(:css, '.team__project-link').click
wait.until { @driver.find_element(:css, '.project') }
url = @driver.current_url.to_s
media_2_url = @driver.find_element(:css, '.media-detail__check-timestamp').attribute('href')
expect(media_2_url.include?("project/#{project_2_id}/media/")).to be(true)
end
it "should cancel request through switch teams" do
skip("Needs to be implemented")
end
it "should auto refresh project page when media is created remotely" do
skip("Needs to be implemented")
end
it "should give 404 when trying to acess a media that is not related to the project on the URL" do
skip("Needs to be implemented")
end
it "should linkify URLs on comments" do
skip("Needs to be implemented")
end
it "should add and remove suggested tags" do
skip("Needs to be implemented")
end
it "should find all medias with an empty search" do
skip("Needs to be implemented")
end
it "should find medias when searching by keyword" do
skip("Needs to be implemented")
end
it "should find medias when searching by status" do
skip("Needs to be implemented")
end
it "should find medias when searching by tag" do
skip("Needs to be implemented")
end
it "should edit the title of a media" do
skip("Needs to be implemented")
end
end
end
Refactor /teams page test
require 'selenium-webdriver'
require 'yaml'
require File.join(File.expand_path(File.dirname(__FILE__)), 'spec_helper')
require File.join(File.expand_path(File.dirname(__FILE__)), 'app_spec_helpers')
require_relative './pages/login_page.rb'
require_relative './pages/me_page.rb'
describe 'app' do
# Helpers
include AppSpecHelpers
# Start a webserver for the web app before the tests
before :all do
@wait = Selenium::WebDriver::Wait.new(timeout: 5)
@email = 'sysops+' + Time.now.to_i.to_s + '@meedan.com'
@password = '12345678'
@source_url = 'https://twitter.com/ironmaiden?timestamp=' + Time.now.to_i.to_s
@media_url = 'https://twitter.com/meedan/status/773947372527288320/?t=' + Time.now.to_i.to_s
@config = YAML.load_file('config.yml')
$source_id = nil
$media_id = nil
FileUtils.cp(@config['config_file_path'], '../build/web/js/config.js') unless @config['config_file_path'].nil?
LoginPage.new(config: @config)
.register_and_login_with_email(email: @email, password: @password)
.create_team
.create_project
.create_media(input: @media_url)
.logout_and_close
end
# Close the testing webserver after all tests run
after :all do
FileUtils.cp('../config.js', '../build/web/js/config.js')
end
# Start Google Chrome before each test
before :each do
@driver = Selenium::WebDriver.for :remote, url: @config['chromedriver_url'], :desired_capabilities => :chrome
end
# Close Google Chrome after each test
after :each do |example|
if example.exception
require 'rest-client'
path = '/tmp/' + (0...8).map{ (65 + rand(26)).chr }.join + '.png'
@driver.save_screenshot(path) # TODO: fix for page model tests
response = RestClient.post('https://file.io?expires=2', file: File.new(path))
link = JSON.parse(response.body)['link']
puts "Test \"#{example.to_s}\" failed! Check screenshot at #{link} and following browser output: #{console_logs}"
end
@driver.quit
end
# The tests themselves start here
context "web" do
it "should access user confirmed page" do
@driver.navigate.to @config['self_url'] + '/user/confirmed'
title = get_element('.main-title')
expect(title.text == 'Account Confirmed').to be(true)
end
it "should access user unconfirmed page" do
@driver.navigate.to @config['self_url'] + '/user/unconfirmed'
title = get_element('.main-title')
expect(title.text == 'Error').to be(true)
end
it "should login using Facebook" do
login_pg = LoginPage.new(config: @config)
login_pg.login_with_facebook
me_pg = MePage.new(config: @config, driver: login_pg.driver).load
displayed_name = me_pg.title
expected_name = @config['facebook_name']
expect(displayed_name).to eq(expected_name)
end
it "should register and login using e-mail" do
login_pg = LoginPage.new(config: @config)
email, password = ['sysops+' + Time.now.to_i.to_s + '@meedan.com', '22345678']
login_pg.register_and_login_with_email(email: email, password: password)
me_pg = MePage.new(config: @config, driver: login_pg.driver).load # reuse tab
displayed_name = me_pg.title
expect(displayed_name == 'User With Email').to be(true)
end
it "should create a project for a team" do
project_name = "Project #{Time.now}"
project_pg = LoginPage.new(config: @config)
.register_and_login_with_email(email: 'sysops+' + Time.now.to_i.to_s + '@meedan.com', password: Time.now.to_i.to_s)
.create_team
.create_project(name: project_name)
expect(project_pg.driver.current_url.to_s.match(/\/project\/[0-9]+$/).nil?).to be(false)
expect(project_pg.element('.team-sidebar__project-link').text == project_name).to be(true)
end
it "should create project media" do
page = LoginPage.new(config: @config)
.login_with_email(email: @email, password: @password)
.create_media(input: 'https://twitter.com/marcouza/status/771009514732650497?t=' + Time.now.to_i.to_s)
expect(page.contains_string?('Added')).to be(true)
expect(page.contains_string?('User With Email')).to be(true)
expect(page.status_label == 'UNSTARTED').to be(true)
page.driver.navigate.to @config['self_url']
page.wait_for_element('.project')
expect(page.contains_string?('Added')).to be(true)
expect(page.contains_string?('User With Email')).to be(true)
expect(page.status_label == 'UNSTARTED').to be(true)
end
it "should register and redirect to newly created media" do
page = LoginPage.new(config: @config)
.login_with_email(email: @email, password: @password)
.create_media(input: @media_url)
expect(page.contains_string?('Added')).to be(true)
expect(page.contains_string?('User With Email')).to be(true)
expect(page.status_label == 'UNSTARTED').to be(true)
$media_id = page.driver.current_url.to_s.match(/\/media\/([0-9]+)$/)[1]
expect($media_id.nil?).to be(false)
end
it "should upload image when registering" do
email, password, avatar = [@email + '.br', '12345678', File.join(File.dirname(__FILE__), 'test.png')]
page = LoginPage.new(config: @config)
.register_and_login_with_email(email: email, password: password, file: avatar)
me_page = MePage.new(config: @config, driver: page.driver).load
avatar = me_page.avatar
expect(avatar.attribute('src').match(/test\.png$/).nil?).to be(false)
end
it "should redirect to 404 page" do
@driver.navigate.to @config['self_url'] + '/something-that-does-not-exist'
title = get_element('.main-title')
expect(title.text == 'Not Found').to be(true)
end
it "should click to go to Terms of Service" do
@driver.navigate.to @config['self_url'] + '/tos'
title = get_element('.main-title')
expect(title.text == 'Terms of Service').to be(true)
end
it "should redirect to login screen if not logged in" do
@driver.navigate.to @config['self_url'] + '/teams'
title = get_element('.login-menu__heading')
expect(title.text == 'SIGN IN').to be(true)
end
it "should login using Twitter" do
login_with_twitter
@driver.navigate.to @config['self_url'] + '/me'
displayed_name = get_element('h2.source-name').text.upcase
expected_name = @config['twitter_name'].upcase
expect(displayed_name == expected_name).to be(true)
end
it "should login using Slack" do
login_with_slack
@driver.navigate.to @config['self_url'] + '/me'
displayed_name = get_element('h2.source-name').text.upcase
expected_name = @config['slack_name'].upcase
expect(displayed_name == expected_name).to be(true)
end
it "should show team options at /teams" do
page = LoginPage.new(config: @config).login_with_email(email: @email, password: @password)
page.driver.navigate.to @config['self_url'] + '/teams'
page.wait_for_element('.teams')
expect(page.driver.find_elements(:css, '.teams').empty?).to be(false)
end
it "should go to user page" do
login_with_email
@driver.find_element(:css, '.fa-ellipsis-h').click
sleep 1
@driver.find_element(:xpath, "//a[@id='link-me']").click
expect((@driver.current_url.to_s =~ /\/me$/).nil?).to be(false)
title = get_element('.source-name')
expect(title.text == 'User With Email').to be(true)
end
it "should go to source page through source/:id" do
login_with_email
@driver.navigate.to @config['self_url'] + '/me'
sleep 5
source_id = $source_id = @driver.find_element(:css, '.source').attribute('data-id')
@driver.navigate.to team_url('source/' + source_id.to_s)
sleep 1
title = get_element('.source-name')
expect(title.text == 'User With Email').to be(true)
end
it "should go to source page through user/:id" do
login_with_email
@driver.navigate.to @config['self_url'] + '/me'
sleep 5
user_id = @driver.find_element(:css, '.source').attribute('data-user-id')
@driver.navigate.to @config['self_url'] + '/user/' + user_id.to_s
sleep 1
title = get_element('.source-name')
expect(title.text == 'User With Email').to be(true)
end
it "should go back and forward in the history" do
@driver.navigate.to @config['self_url']
expect((@driver.current_url.to_s =~ /\/$/).nil?).to be(false)
@driver.navigate.to @config['self_url'] + '/tos'
expect((@driver.current_url.to_s =~ /\/tos$/).nil?).to be(false)
@driver.navigate.back
expect((@driver.current_url.to_s =~ /\/$/).nil?).to be(false)
@driver.navigate.forward
expect((@driver.current_url.to_s =~ /\/tos$/).nil?).to be(false)
end
it "should tag source from tags list" do
login_with_email
@driver.navigate.to @config['self_url'] + '/me'
sleep 1
# First, verify that there isn't any tag
expect(@driver.find_elements(:css, '.ReactTags__tag').empty?).to be(true)
expect(@driver.page_source.include?('Tagged #selenium')).to be(false)
# Add a tag from tags list
fill_field('.ReactTags__tagInput input', 'selenium')
@driver.action.send_keys(:enter).perform
sleep 5
# Verify that tag was added to tags list and annotations list
tag = get_element('.ReactTags__tag span')
expect(tag.text == 'selenium').to be(true)
expect(@driver.page_source.include?('Tagged #selenium')).to be(true)
# Reload the page and verify that tags are still there
@driver.navigate.refresh
sleep 1
tag = get_element('.ReactTags__tag span')
expect(tag.text == 'selenium').to be(true)
expect(@driver.page_source.include?('Tagged #selenium')).to be(true)
end
it "should tag source as a command" do
login_with_email
@driver.navigate.to @config['self_url'] + '/me'
sleep 1
# First, verify that there isn't any tag
expect(@driver.page_source.include?('Tagged #command')).to be(false)
# Add a tag as a command
fill_field('#cmd-input', '/tag command')
@driver.action.send_keys(:enter).perform
sleep 5
# Verify that tag was added to tags list and annotations list
tag = get_element('.ReactTags__tag span')
expect(tag.text == 'command').to be(true)
expect(@driver.page_source.include?('Tagged #command')).to be(true)
# Reload the page and verify that tags are still there
@driver.navigate.refresh
sleep 1
tag = get_element('.ReactTags__tag span')
expect(tag.text == 'command').to be(true)
expect(@driver.page_source.include?('Tagged #command')).to be(true)
end
it "should redirect to access denied page" do
user_1 = {email: 'sysops+' + Time.now.to_i.to_s + '@meedan.com', password: '12345678'}
login_pg = LoginPage.new(config: @config)
login_pg.register_and_login_with_email(email: user_1[:email], password: user_1[:password])
me_pg = MePage.new(config: @config, driver: login_pg.driver).load
user_1_source_id = me_pg.source_id
me_pg.logout_and_close
user_2 = {email: 'sysops+' + Time.now.to_i.to_s + '@meedan.com', password: '22345678'}
login_pg = LoginPage.new(config: @config)
login_pg.register_and_login_with_email(email: user_2[:email], password: user_2[:password])
unauthorized_pg = SourcePage.new(id: user_1_source_id, config: @config, driver: login_pg.driver).load
@wait.until { unauthorized_pg.contains_string?('Access Denied') }
expect(unauthorized_pg.contains_string?('Access Denied')).to be(true)
expect((unauthorized_pg.driver.current_url.to_s =~ /\/forbidden$/).nil?).to be(false)
end
it "should comment source as a command" do
login_with_email
@driver.navigate.to @config['self_url'] + '/me'
sleep 1
# First, verify that there isn't any comment
expect(@driver.page_source.include?('This is my comment')).to be(false)
# Add a comment as a command
fill_field('#cmd-input', '/comment This is my comment')
@driver.action.send_keys(:enter).perform
sleep 5
# Verify that comment was added to annotations list
expect(@driver.page_source.include?('This is my comment')).to be(true)
# Reload the page and verify that comment is still there
@driver.navigate.refresh
sleep 1
expect(@driver.page_source.include?('This is my comment')).to be(true)
end
it "should create source and redirect to newly created source" do
login_with_email
@driver.navigate.to team_url('sources/new')
sleep 1
fill_field('#create-account-url', @source_url)
sleep 1
press_button('#create-account-submit')
sleep 15
expect(@driver.current_url.to_s.match(/\/source\/[0-9]+$/).nil?).to be(false)
title = get_element('.source-name').text
expect(title == 'Iron Maiden').to be(true)
end
it "should not create duplicated source" do
login_with_email
@driver.navigate.to team_url('sources/new')
sleep 1
fill_field('#create-account-url', @source_url)
sleep 1
press_button('#create-account-submit')
sleep 10
expect(@driver.current_url.to_s.match(/\/source\/[0-9]+$/).nil?).to be(false)
title = get_element('.source-name').text
expect(title == 'Iron Maiden').to be(true)
end
it "should not create report as source" do
login_with_email
@driver.navigate.to team_url('sources/new')
sleep 1
fill_field('#create-account-url', 'https://www.youtube.com/watch?v=b708rEG7spI')
sleep 1
press_button('#create-account-submit')
sleep 10
expect(@driver.current_url.to_s.match(/\/source\/[0-9]+$/).nil?).to be(true)
message = get_element('.create-account .message').text
expect(message == 'Validation failed: Sorry, this is not a profile').to be(true)
end
it "should tag source multiple times with commas with command" do
login_with_email
@driver.navigate.to @config['self_url'] + '/me'
sleep 1
# Add tags as a command
fill_field('#cmd-input', '/tag foo, bar')
@driver.action.send_keys(:enter).perform
sleep 5
# Verify that tags were added to tags list and annotations list
tag = @driver.find_elements(:css, '.ReactTags__tag span').select{ |s| s.text == 'foo' }
expect(tag.empty?).to be(false)
expect(@driver.page_source.include?('Tagged #foo')).to be(true)
tag = @driver.find_elements(:css, '.ReactTags__tag span').select{ |s| s.text == 'bar' }
expect(tag.empty?).to be(false)
expect(@driver.page_source.include?('Tagged #bar')).to be(true)
end
it "should tag source multiple times with commas from tags list" do
login_with_email
@driver.navigate.to @config['self_url'] + '/me'
sleep 1
# Add tags from tags list
fill_field('.ReactTags__tagInput input', 'bla,bli')
@driver.action.send_keys(:enter).perform
sleep 5
# Verify that tags were added to tags list and annotations list
tag = @driver.find_elements(:css, '.ReactTags__tag span').select{ |s| s.text == 'bla' }
expect(tag.empty?).to be(false)
expect(@driver.page_source.include?('Tagged #bla')).to be(true)
tag = @driver.find_elements(:css, '.ReactTags__tag span').select{ |s| s.text == 'bli' }
expect(tag.empty?).to be(false)
expect(@driver.page_source.include?('Tagged #bli')).to be(true)
end
it "should not add a duplicated tag from tags list" do
login_with_email
@driver.navigate.to team_url('project/' + get_project + '/media/' + $media_id)
sleep 1
# Validate assumption that tag does not exist
get_element('.media-actions').click
get_element('.media-actions__menu-item').click
tag = @driver.find_elements(:css, '.ReactTags__tag span').select{ |s| s.text == 'bla' }
expect(tag.size == 0).to be(true)
# Add tag from tags list
fill_field('.ReactTags__tagInput input', 'bla')
@driver.action.send_keys(:enter).perform
tag = get_element('.ReactTags__tag span')
expect(tag.text == 'bla').to be(true)
# Try to add duplicate
fill_field('.ReactTags__tagInput input', 'bla')
@driver.action.send_keys(:enter).perform
sleep 5
# Verify that tag is not added and that error message is displayed
tag = @driver.find_elements(:css, '.ReactTags__tag span').select{ |s| s.text == 'bla' }
expect(tag.size == 1).to be(true)
expect(@driver.page_source.include?('This tag already exists')).to be(true)
end
it "should not add a duplicated tag from command line" do
media_pg = LoginPage.new(config: @config)
.login_with_email(email: @email, password: @password)
.click_media
new_tag = Time.now.to_i.to_s
# Validate assumption that tag does not exist
expect(media_pg.has_tag?(new_tag)).to be(false)
# Try to add from command line
media_pg.add_annotation("/tag #{new_tag}")
Selenium::WebDriver::Wait.new(timeout: 10).until { media_pg.has_tag?(new_tag) } # TODO: wait inside MediaPage
expect(media_pg.has_tag?(new_tag)).to be(true)
# Try to add duplicate from command line
media_pg.add_annotation("/tag #{new_tag}")
# Verify that tag is not added and that error message is displayed
expect(media_pg.tags.count(new_tag)).to be(1)
expect(media_pg.contains_string?('This tag already exists')).to be(true)
end
it "should not create duplicated media if registered" do
login_with_email
sleep 3
fill_field('#create-media-input', @media_url)
sleep 2
press_button('#create-media-submit')
sleep 10
expect(@driver.current_url.to_s.match(/\/media\/[0-9]+$/).nil?).to be(false)
end
it "should not create source as media if registered" do
login_with_email
sleep 3
fill_field('#create-media-input', 'https://www.facebook.com/ironmaidenbeer/?fref=ts')
sleep 1
press_button('#create-media-submit')
sleep 10
expect(@driver.current_url.to_s.match(/\/media\/[0-9]+$/).nil?).to be(true)
message = get_element('.create-media .message').text
expect(message == 'Something went wrong! Try pasting the text of this post instead, or adding a different link.').to be(true)
end
it "should tag media from tags list" do
login_with_email
@driver.navigate.to team_url('project/' + get_project + '/media/' + $media_id)
sleep 1
# First, verify that there isn't any tag
expect(@driver.page_source.include?('Tagged #tellurium')).to be(false)
# Add a tag from tags list
get_element('.media-actions').click
get_element('.media-actions__menu-item').click
fill_field('.ReactTags__tagInput input', 'tellurium')
@driver.action.send_keys(:enter).perform
sleep 5
# Verify that tag was added to tags list and annotations list
tag = get_element('.ReactTags__tag span')
expect(tag.text == 'tellurium').to be(true)
expect(@driver.page_source.include?('Tagged #tellurium')).to be(true)
# Reload the page and verify that tags are still there
@driver.navigate.refresh
sleep 1
get_element('.media-actions').click
get_element('.media-actions__menu-item').click
tag = get_element('.ReactTags__tag span')
expect(tag.text == 'tellurium').to be(true)
expect(@driver.page_source.include?('Tagged #tellurium')).to be(true)
end
it "should tag media as a command" do
login_with_email
@driver.navigate.to team_url('project/' + get_project + '/media/' + $media_id)
sleep 1
# First, verify that there isn't any tag
expect(@driver.page_source.include?('Tagged #command')).to be(false)
# Add a tag as a command
fill_field('#cmd-input', '/tag command')
@driver.action.send_keys(:enter).perform
sleep 5
# Verify that tag was added to tags list and annotations list
tag = get_element('.media-tags__tag')
expect(tag.text == 'command').to be(true)
expect(@driver.page_source.include?('Tagged #command')).to be(true)
# Reload the page and verify that tags are still there
@driver.navigate.refresh
sleep 1
tag = get_element('.media-tags__tag')
expect(tag.text == 'command').to be(true)
expect(@driver.page_source.include?('Tagged #command')).to be(true)
end
it "should comment media as a command" do
login_with_email
@driver.navigate.to team_url('project/' + get_project + '/media/' + $media_id)
sleep 1
# First, verify that there isn't any comment
expect(@driver.page_source.include?('This is my comment')).to be(false)
# Add a comment as a command
fill_field('#cmd-input', '/comment This is my comment')
@driver.action.send_keys(:enter).perform
sleep 5
# Verify that comment was added to annotations list
expect(@driver.page_source.include?('This is my comment')).to be(true)
# Reload the page and verify that comment is still there
@driver.navigate.refresh
sleep 3
expect(@driver.page_source.include?('This is my comment')).to be(true)
end
it "should set status to media as a command" do
login_with_email
@driver.navigate.to team_url('project/' + get_project + '/media/' + $media_id)
sleep 1
# Add a status as a command
fill_field('#cmd-input', '/status In Progress')
@driver.action.send_keys(:enter).perform
sleep 5
# Verify that status was added to annotations list
expect(@driver.page_source.include?('Status')).to be(true)
# Reload the page and verify that status is still there
@driver.navigate.refresh
sleep 3
expect(@driver.page_source.include?('Status')).to be(true)
end
it "should flag media as a command" do
login_with_email
@driver.navigate.to team_url('project/' + get_project + '/media/' + $media_id)
sleep 1
# First, verify that there isn't any flag
expect(@driver.page_source.include?('Flag')).to be(false)
# Add a flag as a command
fill_field('#cmd-input', '/flag Spam')
@driver.action.send_keys(:enter).perform
sleep 5
# Verify that flag was added to annotations list
expect(@driver.page_source.include?('Flag')).to be(true)
# Reload the page and verify that flag is still there
@driver.navigate.refresh
sleep 3
expect(@driver.page_source.include?('Flag')).to be(true)
end
it "should edit project" do
login_with_email
@driver.navigate.to @config['self_url']
sleep 1
title = "Project #{Time.now}"
fill_field('#create-project-title', title)
@driver.action.send_keys(:enter).perform
sleep 5
@driver.find_element(:css, '.project-header__project-settings-icon').click
sleep 1
@driver.find_element(:css, '.project-header__project-setting--edit').click
sleep 1
fill_field('.project-header__project-name-input', 'Changed title')
fill_field('.project-header__project-description-input', 'Set description')
@driver.find_element(:css, '.project-header__project-editing-button--cancel').click
sleep 3
expect(@driver.page_source.include?('Changed title')).to be(false)
expect(@driver.page_source.include?('Set description')).to be(false)
@driver.find_element(:css, '.project-header__project-settings-icon').click
sleep 1
@driver.find_element(:css, '.project-header__project-setting--edit').click
sleep 1
fill_field('.project-header__project-name-input', 'Changed title')
fill_field('.project-header__project-description-input', 'Set description')
@driver.find_element(:css, '.project-header__project-editing-button--save').click
sleep 3
expect(@driver.page_source.include?('Changed title')).to be(true)
expect(@driver.page_source.include?('Set description')).to be(true)
end
# it "should comment project as a command" do
# login_with_email
# @driver.navigate.to @config['self_url']
# sleep 1
# title = "Project #{Time.now}"
# fill_field('#create-project-title', title)
# @driver.action.send_keys(:enter).perform
# sleep 5
# # First, verify that there isn't any comment
# expect(@driver.page_source.include?('This is my comment')).to be(false)
# # Add a comment as a command
# fill_field('#cmd-input', '/comment This is my comment')
# @driver.action.send_keys(:enter).perform
# sleep 5
# # Verify that comment was added to annotations list
# expect(@driver.page_source.include?('This is my comment')).to be(true)
# # Reload the page and verify that comment is still there
# @driver.navigate.refresh
# sleep 3
# expect(@driver.page_source.include?('This is my comment')).to be(true)
# end
it "should redirect to 404 page if id does not exist" do
login_with_email
url = @driver.current_url.to_s
@driver.navigate.to url.gsub(/project\/([0-9]+).*/, 'project/999')
title = get_element('.main-title')
expect(title.text == 'Not Found').to be(true)
expect((@driver.current_url.to_s =~ /\/404$/).nil?).to be(false)
end
it "should change a media status via the dropdown menu" do
register_with_email(true, 'sysops+' + Time.now.to_i.to_s + '@meedan.com')
wait = Selenium::WebDriver::Wait.new(timeout: 10)
wait.until { @driver.find_element(:css, '.team') }
create_project
wait.until { @driver.find_element(:css, '.project') }
create_media('https://twitter.com/marcouza/status/771009514732650497?t=' + Time.now.to_i.to_s)
wait.until { @driver.find_element(:css, '.media') }
current_status = @driver.find_element(:css, '.media-status__label')
expect(current_status.text == 'UNSTARTED').to be(true)
current_status.click
verified_menu_item = (wait.until { @driver.find_element(:css, '.media-status__menu-item--verified') })
verified_menu_item.click
sleep 3
current_status = @driver.find_element(:css, '.media-status__label')
expect(current_status.text == 'VERIFIED').to be(true)
expect(!!@driver.find_element(:css, '.annotation__status--verified')).to be(true)
end
it "should logout" do
unless login_or_register_with_email
create_team
create_project
end
@driver.navigate.to @config['self_url']
menu = @wait.until { @driver.find_element(:css, '.fa-ellipsis-h') }
menu.click
logout = @wait.until { @driver.find_element(:css, '.project-header__logout') }
logout.click
@wait.until { @driver.find_element(:css, '#login-menu') }
expect(@driver.page_source.include? 'Sign in').to be(true)
end
it "should ask to join team" do
skip("Needs to be implemented")
end
it "should redirect to team page if user asking to join a team is already a member" do
skip("Needs to be implemented")
end
it "should reject member to join team" do
skip("Needs to be implemented")
end
it "should accept member to join team" do
skip("Needs to be implemented")
end
it "should change member role" do
skip("Needs to be implemented")
end
it "should delete member from team" do
skip("Needs to be implemented")
end
it "should delete annotation from annotations list (for media, source and project)" do
skip("Needs to be implemented")
end
it "should delete tag from tags list (for media and source)" do
skip("Needs to be implemented")
end
it "should edit team" do
skip("Needs to be implemented")
end
it "should show 'manage team' link only to team owners" do
skip("Needs to be implemented")
end
it "should show 'edit project' link only to users with 'update project' permission" do
skip("Needs to be implemented")
end
it "should switch teams" do
login_or_register_with_email
@driver.navigate.to "#{@config['self_url']}/teams/new"
create_team
wait = Selenium::WebDriver::Wait.new(timeout: 5)
team_1_name = @driver.find_element(:css, '.team__name').text
create_project
project_1_id = (wait.until { @driver.current_url.to_s.match(/\/project\/([0-9]+)$/) })[1]
create_media(@media_url)
wait.until { @driver.find_element(:css, '.media') }
@driver.navigate.to "#{@config['self_url']}/teams/new"
wait.until { @driver.find_element(:css, '.create-team') }
create_team
team_2_name = @driver.find_element(:css, '.team__name').text
create_project
project_2_id = (wait.until { @driver.current_url.to_s.match(/\/project\/([0-9]+)$/) })[1]
create_media(@media_url)
wait.until { @driver.find_element(:css, '.media') }
@driver.navigate.to @config['self_url'] + '/teams'
wait.until { @driver.find_element(:css, '.teams') }
(wait.until { @driver.find_element(:xpath, "//*[contains(text(), '#{team_1_name}')]") }).click
wait.until { @driver.find_element(:css, '.team') }
expect(@driver.find_element(:css, '.team__name').text == team_1_name).to be(true)
@driver.find_element(:css, '.team__project-link').click
wait.until { @driver.find_element(:css, '.project') }
url = @driver.current_url.to_s
media_1_url = @driver.find_element(:css, '.media-detail__check-timestamp').attribute('href')
expect(media_1_url.include?("/project/#{project_1_id}/media/")).to be(true)
@driver.navigate.to @config['self_url'] + '/teams'
wait.until { @driver.find_element(:css, '.teams') }
(wait.until { @driver.find_element(:xpath, "//*[contains(text(), '#{team_2_name}')]") }).click
wait.until { @driver.find_element(:css, '.team') }
expect(@driver.find_element(:css, '.team__name').text == team_2_name).to be(true)
@driver.find_element(:css, '.team__project-link').click
wait.until { @driver.find_element(:css, '.project') }
url = @driver.current_url.to_s
media_2_url = @driver.find_element(:css, '.media-detail__check-timestamp').attribute('href')
expect(media_2_url.include?("project/#{project_2_id}/media/")).to be(true)
end
it "should cancel request through switch teams" do
skip("Needs to be implemented")
end
it "should auto refresh project page when media is created remotely" do
skip("Needs to be implemented")
end
it "should give 404 when trying to acess a media that is not related to the project on the URL" do
skip("Needs to be implemented")
end
it "should linkify URLs on comments" do
skip("Needs to be implemented")
end
it "should add and remove suggested tags" do
skip("Needs to be implemented")
end
it "should find all medias with an empty search" do
skip("Needs to be implemented")
end
it "should find medias when searching by keyword" do
skip("Needs to be implemented")
end
it "should find medias when searching by status" do
skip("Needs to be implemented")
end
it "should find medias when searching by tag" do
skip("Needs to be implemented")
end
it "should edit the title of a media" do
skip("Needs to be implemented")
end
end
end
|
require_relative 'matchers'
require_relative 'test_interface'
module Byebug
#
# Misc tools for the test suite
#
module TestUtils
#
# Adds commands to the input queue, so they will be later retrieved by
# Processor, i.e., it emulates user's input.
#
# If a command is a Proc object, it will be executed before being retrieved
# by Processor. May be handy when you need build a command depending on the
# current context/state.
#
# Usage:
# enter 'b 12'
# enter 'b 12', 'cont'
# enter ['b 12', 'cont']
# enter 'b 12', ->{"disable #{breakpoint.id}"}, 'cont'
#
def enter(*messages)
messages = messages.first.is_a?(Array) ? messages.first : messages
interface.input_queue.concat(messages)
end
#
# Runs the provided Proc
#
# You also can specify a block, which will be executed when Processor
# extracts all the commands from the input queue. You can use that for
# making assertions on the current test. If you specified the block and it
# was never executed, the test will fail.
#
# Usage:
# debug_proc -> { byebug; puts 'Hello' }
#
# enter 'b 4', 'cont'
# code = -> do
# byebug
# puts 'hello'
# end
# debug_proc(code) { assert_equal 4, state.line }
#
def debug_proc(program, &block)
Byebug.stubs(:run_init_script)
interface.test_block = block
begin
program.call
ensure
interface.test_block.call if interface.test_block
end
end
#
# Checks the output of byebug.
#
# By default it checks output queue of the current interface, but you can
# check again any queue by providing it as a second argument.
#
# Usage:
# enter 'break 4', 'cont'
# debug 'ex1'
# check_output "Breakpoint 1 at #{fullpath('ex1')}:4"
#
def check_output(check_method, queue, *args)
queue_messages = queue.map(&:strip)
messages = Array(args).map { |msg| msg.is_a?(String) ? msg.strip : msg }
send(check_method, messages, queue_messages)
end
%w(output error confirm).each do |queue_name|
define_method(:"check_#{queue_name}_includes") do |*args|
queue = interface.send(:"#{queue_name}_queue")
send(:check_output, :assert_includes_in_order, queue, *args)
end
define_method(:"check_#{queue_name}_doesnt_include") do |*args|
queue = interface.send(:"#{queue_name}_queue")
send(:check_output, :refute_includes_in_order, queue, *args)
end
end
#
# Set default settings for testing
#
def set_defaults
Byebug::Setting.load
Byebug::Setting[:autolist] = false
Byebug::Setting[:autosave] = false
Byebug::Setting[:testing] = true
Byebug::Setting[:width] = 80
end
def interface
Byebug.handler.interface
end
def state
Thread.current.thread_variable_get('state')
end
def context
state.context
end
def force_set_const(klass, const, value)
force_unset_const(klass, const)
klass.const_set(const, value)
end
def force_unset_const(klass, const)
klass.send(:remove_const, const) if klass.const_defined?(const)
end
def change_line_in_file(file, line, new_line_content)
old_content = File.read(file)
new_content = old_content.split("\n")
.tap { |c| c[line - 1] = new_line_content }
.join("\n") + "\n"
File.open(file, 'w') { |f| f.write(new_content) }
end
end
end
Remove unnecessary method
require_relative 'matchers'
require_relative 'test_interface'
module Byebug
#
# Misc tools for the test suite
#
module TestUtils
#
# Adds commands to the input queue, so they will be later retrieved by
# Processor, i.e., it emulates user's input.
#
# If a command is a Proc object, it will be executed before being retrieved
# by Processor. May be handy when you need build a command depending on the
# current context/state.
#
# Usage:
# enter 'b 12'
# enter 'b 12', 'cont'
# enter ['b 12', 'cont']
# enter 'b 12', ->{"disable #{breakpoint.id}"}, 'cont'
#
def enter(*messages)
messages = messages.first.is_a?(Array) ? messages.first : messages
interface.input_queue.concat(messages)
end
#
# Runs the provided Proc
#
# You also can specify a block, which will be executed when Processor
# extracts all the commands from the input queue. You can use that for
# making assertions on the current test. If you specified the block and it
# was never executed, the test will fail.
#
# Usage:
# debug_proc -> { byebug; puts 'Hello' }
#
# enter 'b 4', 'cont'
# code = -> do
# byebug
# puts 'hello'
# end
# debug_proc(code) { assert_equal 4, state.line }
#
def debug_proc(program, &block)
Byebug.stubs(:run_init_script)
interface.test_block = block
begin
program.call
ensure
interface.test_block.call if interface.test_block
end
end
#
# Checks the output of byebug.
#
# By default it checks output queue of the current interface, but you can
# check again any queue by providing it as a second argument.
#
# Usage:
# enter 'break 4', 'cont'
# debug 'ex1'
# check_output "Breakpoint 1 at #{fullpath('ex1')}:4"
#
def check_output(check_method, queue, *args)
queue_messages = queue.map(&:strip)
messages = Array(args).map { |msg| msg.is_a?(String) ? msg.strip : msg }
send(check_method, messages, queue_messages)
end
%w(output error confirm).each do |queue_name|
define_method(:"check_#{queue_name}_includes") do |*args|
queue = interface.send(:"#{queue_name}_queue")
send(:check_output, :assert_includes_in_order, queue, *args)
end
define_method(:"check_#{queue_name}_doesnt_include") do |*args|
queue = interface.send(:"#{queue_name}_queue")
send(:check_output, :refute_includes_in_order, queue, *args)
end
end
#
# Set default settings for testing
#
def set_defaults
Byebug::Setting.load
Byebug::Setting[:autolist] = false
Byebug::Setting[:autosave] = false
Byebug::Setting[:testing] = true
Byebug::Setting[:width] = 80
end
def interface
Byebug.handler.interface
end
def state
Thread.current.thread_variable_get('state')
end
def context
state.context
end
def force_set_const(klass, const, value)
klass.send(:remove_const, const) if klass.const_defined?(const)
klass.const_set(const, value)
end
def change_line_in_file(file, line, new_line_content)
old_content = File.read(file)
new_content = old_content.split("\n")
.tap { |c| c[line - 1] = new_line_content }
.join("\n") + "\n"
File.open(file, 'w') { |f| f.write(new_content) }
end
end
end
|
#!/usr/bin/env ruby
require "test/unit"
require "fileutils"
# require "rubygems"
# require "ruby-debug"
# Debugger.start
SRC_DIR = File.expand_path(File.dirname(__FILE__)) + "/" unless
defined?(SRC_DIR)
require File.join(SRC_DIR, "helper.rb")
include TestHelper
# Test frame commands
class TestBreakpoints < Test::Unit::TestCase
require 'stringio'
# Test commands in stepping.rb
def test_basic
Dir.chdir(SRC_DIR) do
assert_equal(true,
run_debugger("annotate",
"--script annotate.cmd -- gcd.rb 3 5"))
end
end
end
My old cut-and-paste error. Gotta get in my "method redefined" patch into rake's test/unit.
#!/usr/bin/env ruby
require "test/unit"
require "fileutils"
# require "rubygems"
# require "ruby-debug"
# Debugger.start
SRC_DIR = File.expand_path(File.dirname(__FILE__)) + "/" unless
defined?(SRC_DIR)
require File.join(SRC_DIR, "helper.rb")
include TestHelper
# Test frame commands
class TestAnnotate < Test::Unit::TestCase
require 'stringio'
# Test commands in stepping.rb
def test_basic
Dir.chdir(SRC_DIR) do
assert_equal(true,
run_debugger("annotate",
"--script annotate.cmd -- gcd.rb 3 5"))
end
end
end
|
module FelyneBot
module Commands
module RolePlay
extend Discordrb::Commands::CommandContainer
command(:rp) do |event, *phrase|
cmdcount += 1
phrase = phrase.join(' ')
event << "sent **#{phrase}** to mhodiscussion"
bot.send_message(122526505606709257, phrase)
puts 'CMD: roleplay'
end
end
end
end
Added Files
module FelyneBot
module Commands
module RolePlay
extend Discordrb::Commands::CommandContainer
command(:rp) do |event, *phrase|
phrase = phrase.join(' ')
event << "sent **#{phrase}** to mhodiscussion"
bot.send_message(122526505606709257, phrase)
puts 'CMD: roleplay'
end
end
end
end |
require 'helper'
require 'rack/test'
require 'jim/rack'
class TestJimRack < Test::Unit::TestCase
include Rack::Test::Methods
def setup
@bundle_uri = '/javascripts/'
end
def app
jimfile = fixture_path('jimfile')
Rack::Builder.new {
use Jim::Rack, :bundle_uri => @bundle_uri,
:jimfile => jimfile,
:jimhome => File.join(File.dirname(__FILE__), 'tmp')
run lambda { |env| [200, {'Content-Type' => 'text/plain'}, 'okay' ] }
}
end
context "Jim::Rack" do
setup do
# clear the tmp dir
FileUtils.rm_rf(tmp_path) if File.directory?(tmp_path)
root = File.dirname(__FILE__)
Jim::Installer.new(fixture_path('jquery-1.4.1.js'), File.join(root, 'tmp', 'lib')).install
Jim::Installer.new(fixture_path('infoincomments.js'), File.join(root, 'tmp', 'lib')).install
Jim::Installer.new(fixture_path('localfile.js'), File.join(root, 'tmp', 'lib')).install
end
should "get individual bundle" do
Jim::Bundler.any_instance.expects(:bundle!).with('default').once.returns('jQuery')
get "#{@bundle_uri}default.js"
assert last_response
assert_equal 'jQuery', last_response.body
assert_equal 'text/javascript', last_response.headers['Content-Type']
end
should "get individual compressed bundle" do
Jim::Bundler.any_instance.expects(:compress!).with('default').once.returns('jQuery')
get "#{@bundle_uri}default.min.js"
assert last_response
assert_equal 'jQuery', last_response.body
assert_equal 'text/javascript', last_response.headers['Content-Type']
end
end
end
Dont need any of this setup
require 'helper'
require 'rack/test'
require 'jim/rack'
class TestJimRack < Test::Unit::TestCase
include Rack::Test::Methods
def setup
@bundle_uri = '/javascripts/'
end
def app
jimfile = fixture_path('jimfile')
Rack::Builder.new {
use Jim::Rack, :bundle_uri => @bundle_uri,
:jimfile => jimfile,
:jimhome => File.join(File.dirname(__FILE__), 'tmp')
run lambda { |env| [200, {'Content-Type' => 'text/plain'}, ['okay']] }
}
end
context "Jim::Rack" do
should "get individual bundle" do
Jim::Bundler.any_instance.expects(:bundle!).with('default').once.returns('jQuery')
get "#{@bundle_uri}default.js"
assert last_response
assert_equal 'jQuery', last_response.body
assert_equal 'text/javascript', last_response.headers['Content-Type']
end
should "get individual compressed bundle" do
Jim::Bundler.any_instance.expects(:compress!).with('default').once.returns('jQuery')
get "#{@bundle_uri}default.min.js"
assert last_response
assert_equal 'jQuery', last_response.body
assert_equal 'text/javascript', last_response.headers['Content-Type']
end
end
end
|
require 'rubygems'
require 'bundler/setup'
require 'test/unit'
require File.expand_path('../lib/turnpike', File.dirname(__FILE__))
class TestTurnpike < Test::Unit::TestCase
def setup
Redis.current.flushall
end
def time_out_in(seconds, &block)
original_timeout = Turnpike.timeout
Turnpike.configure { |c| c.timeout = seconds }
block.call
Turnpike.configure { |c| c.timeout = original_timeout }
end
def test_bracket_method
assert_equal("turnpike:foo", Turnpike["foo"].name)
assert(Turnpike["foo"] == Turnpike["foo"])
end
def test_emptiness
queue = Turnpike.new
assert(queue.empty?)
queue << 1
assert(!queue.empty?)
queue.clear
assert(queue.empty?)
end
def test_pushing_items
queue = Turnpike.new
queue.push(1)
assert_equal(1, queue.length)
queue.push(2, 3)
assert_equal(3, queue.length)
assert_equal(['1', '2', '3'], queue.peek(0, 3))
end
def test_unshifting_items
queue = Turnpike.new
queue.unshift(1)
assert_equal(1, queue.length)
queue.unshift(2, 3)
assert_equal(3, queue.length)
assert_equal(['3', '2', '1'], queue.peek(0, 3))
end
def test_popping_items
queue = Turnpike.new
queue.push(1, 2)
assert_equal('2', queue.pop)
assert_equal('1', queue.pop)
assert_equal(nil, queue.pop)
end
def test_shifting_items
queue = Turnpike.new
queue.push(1, 2)
assert_equal('1', queue.shift)
assert_equal('2', queue.shift)
assert_equal(nil, queue.shift)
end
def test_enumeration
queue = Turnpike.new
queue.push(1, 2)
items = []
queue.each { |item| items << item }
assert_equal(['1', '2'], items)
queue.push(1, 2, 3, 4)
slices = []
queue.each_slice(3) { |slice| slices << slice }
assert_equal([['1', '2', '3'], ['4']], slices)
queue.push(1, 2)
slices = []
queue.each_slice(2) { |slice| slices << slice }
assert_equal([['1', '2']], slices)
end
def test_aliases
queue = Turnpike.new
queue << 1
assert_equal(1, queue.size)
end
def test_multiple_queues
queue1 = Turnpike.new("foo")
queue2 = Turnpike.new("bar")
queue1.push(1)
queue2.push(2, 3)
assert_equal(1, queue1.length)
assert_equal(2, queue2.length)
end
def test_blocking_pop
queue = Turnpike.new
started_at = Time.now.to_i
Thread.new do
sleep(1)
queue.push(1)
end
assert_equal(0, queue.length)
assert_equal('1', queue.pop(true))
assert(Time.now.to_i - started_at > 0)
end
def test_blocking_shift
queue = Turnpike.new
started_at = Time.now.to_i
Thread.new do
sleep(1)
queue.push(1)
end
assert_equal(0, queue.length)
assert_equal('1', queue.shift(true))
assert(Time.now.to_i - started_at > 0)
end
def test_timeout
time_out_in 1 do
queue = Turnpike.new
thread = Thread.new do
sleep(2)
queue.push(1)
end
assert_equal(0, queue.length)
assert_equal(nil, queue.shift(true))
thread.join
assert_equal(1, queue.length)
end
end
end
fix indentation
require 'rubygems'
require 'bundler/setup'
require 'test/unit'
require File.expand_path('../lib/turnpike', File.dirname(__FILE__))
class TestTurnpike < Test::Unit::TestCase
def setup
Redis.current.flushall
end
def time_out_in(seconds, &block)
original_timeout = Turnpike.timeout
Turnpike.configure { |c| c.timeout = seconds }
block.call
Turnpike.configure { |c| c.timeout = original_timeout }
end
def test_bracket_method
assert_equal("turnpike:foo", Turnpike["foo"].name)
assert(Turnpike["foo"] == Turnpike["foo"])
end
def test_emptiness
queue = Turnpike.new
assert(queue.empty?)
queue << 1
assert(!queue.empty?)
queue.clear
assert(queue.empty?)
end
def test_pushing_items
queue = Turnpike.new
queue.push(1)
assert_equal(1, queue.length)
queue.push(2, 3)
assert_equal(3, queue.length)
assert_equal(['1', '2', '3'], queue.peek(0, 3))
end
def test_unshifting_items
queue = Turnpike.new
queue.unshift(1)
assert_equal(1, queue.length)
queue.unshift(2, 3)
assert_equal(3, queue.length)
assert_equal(['3', '2', '1'], queue.peek(0, 3))
end
def test_popping_items
queue = Turnpike.new
queue.push(1, 2)
assert_equal('2', queue.pop)
assert_equal('1', queue.pop)
assert_equal(nil, queue.pop)
end
def test_shifting_items
queue = Turnpike.new
queue.push(1, 2)
assert_equal('1', queue.shift)
assert_equal('2', queue.shift)
assert_equal(nil, queue.shift)
end
def test_enumeration
queue = Turnpike.new
queue.push(1, 2)
items = []
queue.each { |item| items << item }
assert_equal(['1', '2'], items)
queue.push(1, 2, 3, 4)
slices = []
queue.each_slice(3) { |slice| slices << slice }
assert_equal([['1', '2', '3'], ['4']], slices)
queue.push(1, 2)
slices = []
queue.each_slice(2) { |slice| slices << slice }
assert_equal([['1', '2']], slices)
end
def test_aliases
queue = Turnpike.new
queue << 1
assert_equal(1, queue.size)
end
def test_multiple_queues
queue1 = Turnpike.new("foo")
queue2 = Turnpike.new("bar")
queue1.push(1)
queue2.push(2, 3)
assert_equal(1, queue1.length)
assert_equal(2, queue2.length)
end
def test_blocking_pop
queue = Turnpike.new
started_at = Time.now.to_i
Thread.new do
sleep(1)
queue.push(1)
end
assert_equal(0, queue.length)
assert_equal('1', queue.pop(true))
assert(Time.now.to_i - started_at > 0)
end
def test_blocking_shift
queue = Turnpike.new
started_at = Time.now.to_i
Thread.new do
sleep(1)
queue.push(1)
end
assert_equal(0, queue.length)
assert_equal('1', queue.shift(true))
assert(Time.now.to_i - started_at > 0)
end
def test_timeout
time_out_in 1 do
queue = Turnpike.new
thread = Thread.new do
sleep(2)
queue.push(1)
end
assert_equal(0, queue.length)
assert_equal(nil, queue.shift(true))
thread.join
assert_equal(1, queue.length)
end
end
end
|
require 'test_helper'
class SopTest < ActiveSupport::TestCase
fixtures :all
test "project" do
s=sops(:editable_sop)
p=projects(:sysmo_project)
assert_equal p,s.projects.first
end
test "sort by updated_at" do
last = 9999999999999 #safe until the year 318857 !
Sop.record_timestamps = false
Factory(:sop,:title=>"8 day old SOP",:updated_at=>8.day.ago)
Factory(:sop,:title=>"20 day old SOP",:updated_at=>20.days.ago)
Sop.record_timestamps = true
sops = Sop.find(:all)
sops.each do |sop|
assert sop.updated_at.to_i <= last
last=sop.updated_at.to_i
end
end
def test_title_trimmed
sop=Factory(:sop, :title => " test sop")
assert_equal("test sop",sop.title)
end
test "validation" do
asset=Sop.new :title=>"fred",:projects=>[projects(:sysmo_project)]
assert asset.valid?
asset=Sop.new :projects=>[projects(:sysmo_project)]
assert !asset.valid?
asset=Sop.new :title=>"fred"
assert !asset.valid?
end
test "assay association" do
sop = sops(:sop_with_fully_public_policy)
assay = assays(:modelling_assay_with_data_and_relationship)
assay_asset = assay_assets(:metabolomics_assay_asset1)
assert_not_equal assay_asset.asset, sop
assert_not_equal assay_asset.assay, assay
assay_asset.asset = sop
assay_asset.assay = assay
User.with_current_user(assay.contributor.user){assay_asset.save!}
assay_asset.reload
assert assay_asset.valid?
assert_equal assay_asset.asset, sop
assert_equal assay_asset.assay, assay
end
def test_avatar_key
assert_nil sops(:editable_sop).avatar_key
assert sops(:editable_sop).use_mime_type_for_avatar?
assert_nil sop_versions(:my_first_sop_v1).avatar_key
assert sop_versions(:my_first_sop_v1).use_mime_type_for_avatar?
end
def test_defaults_to_private_policy
sop=Sop.new Factory.attributes_for(:sop).tap{|h|h[:policy] = nil}
sop.save!
sop.reload
assert_not_nil sop.policy
assert_equal Policy::PRIVATE, sop.policy.sharing_scope
assert_equal Policy::NO_ACCESS, sop.policy.access_type
assert_equal false,sop.policy.use_whitelist
assert_equal false,sop.policy.use_blacklist
assert sop.policy.permissions.empty?
end
def test_version_created_for_new_sop
sop=Factory(:sop)
assert sop.save
sop=Sop.find(sop.id)
assert 1,sop.version
assert 1,sop.versions.size
assert_equal sop,sop.versions.last.sop
assert_equal sop.title,sop.versions.first.title
end
#really just to test the fixtures for versions, but may as well leave here.
def test_version_from_fixtures
sop_version=sop_versions(:my_first_sop_v1)
assert_equal 1,sop_version.version
assert_equal users(:owner_of_my_first_sop),sop_version.contributor
assert_equal content_blobs(:content_blob_with_little_file2),sop_version.content_blob
sop=sops(:my_first_sop)
assert_equal sop.id,sop_version.sop_id
assert_equal 1,sop.version
assert_equal sop.title,sop.versions.first.title
end
def test_create_new_version
sop=sops(:my_first_sop)
User.current_user = sop.contributor
sop.save!
sop=Sop.find(sop.id)
assert_equal 1,sop.version
assert_equal 1,sop.versions.size
assert_equal "My First Favourite SOP",sop.title
sop.save!
sop=Sop.find(sop.id)
assert_equal 1,sop.version
assert_equal 1,sop.versions.size
assert_equal "My First Favourite SOP",sop.title
sop.title="Updated Sop"
sop.save_as_new_version("Updated sop as part of a test")
sop=Sop.find(sop.id)
assert_equal 2,sop.version
assert_equal 2,sop.versions.size
assert_equal "Updated Sop",sop.title
assert_equal "Updated Sop",sop.versions.last.title
assert_equal "Updated sop as part of a test",sop.versions.last.revision_comments
assert_equal "My First Favourite SOP",sop.versions.first.title
assert_equal "My First Favourite SOP",sop.find_version(1).title
assert_equal "Updated Sop",sop.find_version(2).title
end
def test_project_for_sop_and_sop_version_match
sop=sops(:my_first_sop)
project=projects(:sysmo_project)
assert_equal project,sop.projects.first
assert_equal project,sop.latest_version.projects.first
end
test "sop with no contributor" do
sop=sops(:sop_with_no_contributor)
assert_nil sop.contributor
end
test "versions destroyed as dependent" do
sop = sops(:my_first_sop)
assert_equal 1,sop.versions.size,"There should be 1 version of this SOP"
assert_difference(["Sop.count","Sop::Version.count"],-1) do
User.current_user = sop.contributor
sop.destroy
end
end
test "make sure content blob is preserved after deletion" do
sop = sops(:my_first_sop)
assert_not_nil sop.content_blob,"Must have an associated content blob for this test to work"
cb=sop.content_blob
assert_difference("Sop.count",-1) do
assert_no_difference("ContentBlob.count") do
User.current_user = sop.contributor
sop.destroy
end
end
assert_not_nil ContentBlob.find(cb.id)
end
test "is restorable after destroy" do
sop = Factory :sop, :policy => Factory(:all_sysmo_viewable_policy), :title => 'is it restorable?'
User.current_user = sop.contributor
assert_difference("Sop.count",-1) do
sop.destroy
end
assert_nil Sop.find_by_title 'is it restorable?'
assert_difference("Sop.count",1) do
disable_authorization_checks {Sop.restore_trash!(sop.id)}
end
assert_not_nil Sop.find_by_title 'is it restorable?'
end
test 'failing to delete due to can_delete does not create trash' do
sop = Factory :sop, :policy => Factory(:private_policy)
assert_no_difference("Sop.count") do
sop.destroy
end
assert_nil Sop.restore_trash(sop.id)
end
test "test uuid generated" do
x = sops(:my_first_sop)
assert_nil x.attributes["uuid"]
x.save
assert_not_nil x.attributes["uuid"]
end
test "uuid doesn't change" do
x = sops(:my_first_sop)
x.save
uuid = x.attributes["uuid"]
x.save
assert_equal x.uuid, uuid
end
test "contributing_user" do
sop = Factory :sop
assert sop.contributor
assert_equal sop.contributor, sop.contributing_user
sop_without_contributor = Factory :sop, :contributor => nil
assert_equal nil, sop_without_contributor.contributing_user
end
test 'is_downloadable_pdf?' do
sop_with_pdf_format = Factory(:sop, :content_blob => Factory(:content_blob, :content_type=>"application/pdf", :data => File.new("#{Rails.root}/test/fixtures/files/a_pdf_file.pdf","rb").read))
User.with_current_user sop_with_pdf_format.contributor do
assert sop_with_pdf_format.is_pdf?
assert sop_with_pdf_format.is_downloadable_pdf?
end
sop_with_no_pdf_format = Factory(:sop, :content_blob => Factory(:content_blob, :content_type=>"text/plain", :data => File.new("#{Rails.root}/test/fixtures/files/little_file.txt","rb").read))
User.with_current_user sop_with_no_pdf_format.contributor do
assert !sop_with_no_pdf_format.is_pdf?
assert !sop_with_no_pdf_format.is_downloadable_pdf?
end
end
test 'is_content_viewable?' do
viewable_formats= %w[application/pdf application/msword application/vnd.ms-powerpoint application/vnd.oasis.opendocument.presentation application/vnd.oasis.opendocument.text]
viewable_formats.each do |viewable_format|
sop_with_content_viewable_format = Factory(:sop, :content_blob => Factory(:content_blob, :content_type=>viewable_format, :data => File.new("#{Rails.root}/test/fixtures/files/a_pdf_file.pdf","rb").read))
User.with_current_user sop_with_content_viewable_format.contributor do
assert sop_with_content_viewable_format.is_viewable_format?
assert sop_with_content_viewable_format.is_content_viewable?
end
end
sop_with_no_viewable_format = Factory(:sop, :content_blob => Factory(:content_blob, :content_type=>"text/plain", :data => File.new("#{Rails.root}/test/fixtures/files/little_file.txt","rb").read))
User.with_current_user sop_with_no_viewable_format.contributor do
assert !sop_with_no_viewable_format.is_viewable_format?
assert !sop_with_no_viewable_format.is_content_viewable?
end
end
test 'filter_text_content' do
ms_word_sop = Factory(:ms_word_sop)
content = "test \n content \f only"
filtered_content = ms_word_sop.filter_text_content(content)
assert !filtered_content.include?('\n')
assert !filtered_content.include?('\f')
end
test 'pdf_contents_for_search' do
ms_word_sop = Factory(:ms_word_sop)
assert ms_word_sop.is_viewable_format?
content = ms_word_sop.pdf_contents_for_search
assert_equal 'This is a ms word file', content
end
end
use send to call a private method
require 'test_helper'
class SopTest < ActiveSupport::TestCase
fixtures :all
test "project" do
s=sops(:editable_sop)
p=projects(:sysmo_project)
assert_equal p,s.projects.first
end
test "sort by updated_at" do
last = 9999999999999 #safe until the year 318857 !
Sop.record_timestamps = false
Factory(:sop,:title=>"8 day old SOP",:updated_at=>8.day.ago)
Factory(:sop,:title=>"20 day old SOP",:updated_at=>20.days.ago)
Sop.record_timestamps = true
sops = Sop.find(:all)
sops.each do |sop|
assert sop.updated_at.to_i <= last
last=sop.updated_at.to_i
end
end
def test_title_trimmed
sop=Factory(:sop, :title => " test sop")
assert_equal("test sop",sop.title)
end
test "validation" do
asset=Sop.new :title=>"fred",:projects=>[projects(:sysmo_project)]
assert asset.valid?
asset=Sop.new :projects=>[projects(:sysmo_project)]
assert !asset.valid?
asset=Sop.new :title=>"fred"
assert !asset.valid?
end
test "assay association" do
sop = sops(:sop_with_fully_public_policy)
assay = assays(:modelling_assay_with_data_and_relationship)
assay_asset = assay_assets(:metabolomics_assay_asset1)
assert_not_equal assay_asset.asset, sop
assert_not_equal assay_asset.assay, assay
assay_asset.asset = sop
assay_asset.assay = assay
User.with_current_user(assay.contributor.user){assay_asset.save!}
assay_asset.reload
assert assay_asset.valid?
assert_equal assay_asset.asset, sop
assert_equal assay_asset.assay, assay
end
def test_avatar_key
assert_nil sops(:editable_sop).avatar_key
assert sops(:editable_sop).use_mime_type_for_avatar?
assert_nil sop_versions(:my_first_sop_v1).avatar_key
assert sop_versions(:my_first_sop_v1).use_mime_type_for_avatar?
end
def test_defaults_to_private_policy
sop=Sop.new Factory.attributes_for(:sop).tap{|h|h[:policy] = nil}
sop.save!
sop.reload
assert_not_nil sop.policy
assert_equal Policy::PRIVATE, sop.policy.sharing_scope
assert_equal Policy::NO_ACCESS, sop.policy.access_type
assert_equal false,sop.policy.use_whitelist
assert_equal false,sop.policy.use_blacklist
assert sop.policy.permissions.empty?
end
def test_version_created_for_new_sop
sop=Factory(:sop)
assert sop.save
sop=Sop.find(sop.id)
assert 1,sop.version
assert 1,sop.versions.size
assert_equal sop,sop.versions.last.sop
assert_equal sop.title,sop.versions.first.title
end
#really just to test the fixtures for versions, but may as well leave here.
def test_version_from_fixtures
sop_version=sop_versions(:my_first_sop_v1)
assert_equal 1,sop_version.version
assert_equal users(:owner_of_my_first_sop),sop_version.contributor
assert_equal content_blobs(:content_blob_with_little_file2),sop_version.content_blob
sop=sops(:my_first_sop)
assert_equal sop.id,sop_version.sop_id
assert_equal 1,sop.version
assert_equal sop.title,sop.versions.first.title
end
def test_create_new_version
sop=sops(:my_first_sop)
User.current_user = sop.contributor
sop.save!
sop=Sop.find(sop.id)
assert_equal 1,sop.version
assert_equal 1,sop.versions.size
assert_equal "My First Favourite SOP",sop.title
sop.save!
sop=Sop.find(sop.id)
assert_equal 1,sop.version
assert_equal 1,sop.versions.size
assert_equal "My First Favourite SOP",sop.title
sop.title="Updated Sop"
sop.save_as_new_version("Updated sop as part of a test")
sop=Sop.find(sop.id)
assert_equal 2,sop.version
assert_equal 2,sop.versions.size
assert_equal "Updated Sop",sop.title
assert_equal "Updated Sop",sop.versions.last.title
assert_equal "Updated sop as part of a test",sop.versions.last.revision_comments
assert_equal "My First Favourite SOP",sop.versions.first.title
assert_equal "My First Favourite SOP",sop.find_version(1).title
assert_equal "Updated Sop",sop.find_version(2).title
end
def test_project_for_sop_and_sop_version_match
sop=sops(:my_first_sop)
project=projects(:sysmo_project)
assert_equal project,sop.projects.first
assert_equal project,sop.latest_version.projects.first
end
test "sop with no contributor" do
sop=sops(:sop_with_no_contributor)
assert_nil sop.contributor
end
test "versions destroyed as dependent" do
sop = sops(:my_first_sop)
assert_equal 1,sop.versions.size,"There should be 1 version of this SOP"
assert_difference(["Sop.count","Sop::Version.count"],-1) do
User.current_user = sop.contributor
sop.destroy
end
end
test "make sure content blob is preserved after deletion" do
sop = sops(:my_first_sop)
assert_not_nil sop.content_blob,"Must have an associated content blob for this test to work"
cb=sop.content_blob
assert_difference("Sop.count",-1) do
assert_no_difference("ContentBlob.count") do
User.current_user = sop.contributor
sop.destroy
end
end
assert_not_nil ContentBlob.find(cb.id)
end
test "is restorable after destroy" do
sop = Factory :sop, :policy => Factory(:all_sysmo_viewable_policy), :title => 'is it restorable?'
User.current_user = sop.contributor
assert_difference("Sop.count",-1) do
sop.destroy
end
assert_nil Sop.find_by_title 'is it restorable?'
assert_difference("Sop.count",1) do
disable_authorization_checks {Sop.restore_trash!(sop.id)}
end
assert_not_nil Sop.find_by_title 'is it restorable?'
end
test 'failing to delete due to can_delete does not create trash' do
sop = Factory :sop, :policy => Factory(:private_policy)
assert_no_difference("Sop.count") do
sop.destroy
end
assert_nil Sop.restore_trash(sop.id)
end
test "test uuid generated" do
x = sops(:my_first_sop)
assert_nil x.attributes["uuid"]
x.save
assert_not_nil x.attributes["uuid"]
end
test "uuid doesn't change" do
x = sops(:my_first_sop)
x.save
uuid = x.attributes["uuid"]
x.save
assert_equal x.uuid, uuid
end
test "contributing_user" do
sop = Factory :sop
assert sop.contributor
assert_equal sop.contributor, sop.contributing_user
sop_without_contributor = Factory :sop, :contributor => nil
assert_equal nil, sop_without_contributor.contributing_user
end
test 'is_downloadable_pdf?' do
sop_with_pdf_format = Factory(:sop, :content_blob => Factory(:content_blob, :content_type=>"application/pdf", :data => File.new("#{Rails.root}/test/fixtures/files/a_pdf_file.pdf","rb").read))
User.with_current_user sop_with_pdf_format.contributor do
assert sop_with_pdf_format.is_pdf?
assert sop_with_pdf_format.is_downloadable_pdf?
end
sop_with_no_pdf_format = Factory(:sop, :content_blob => Factory(:content_blob, :content_type=>"text/plain", :data => File.new("#{Rails.root}/test/fixtures/files/little_file.txt","rb").read))
User.with_current_user sop_with_no_pdf_format.contributor do
assert !sop_with_no_pdf_format.is_pdf?
assert !sop_with_no_pdf_format.is_downloadable_pdf?
end
end
test 'is_content_viewable?' do
viewable_formats= %w[application/pdf application/msword application/vnd.ms-powerpoint application/vnd.oasis.opendocument.presentation application/vnd.oasis.opendocument.text]
viewable_formats.each do |viewable_format|
sop_with_content_viewable_format = Factory(:sop, :content_blob => Factory(:content_blob, :content_type=>viewable_format, :data => File.new("#{Rails.root}/test/fixtures/files/a_pdf_file.pdf","rb").read))
User.with_current_user sop_with_content_viewable_format.contributor do
assert sop_with_content_viewable_format.is_viewable_format?
assert sop_with_content_viewable_format.is_content_viewable?
end
end
sop_with_no_viewable_format = Factory(:sop, :content_blob => Factory(:content_blob, :content_type=>"text/plain", :data => File.new("#{Rails.root}/test/fixtures/files/little_file.txt","rb").read))
User.with_current_user sop_with_no_viewable_format.contributor do
assert !sop_with_no_viewable_format.is_viewable_format?
assert !sop_with_no_viewable_format.is_content_viewable?
end
end
test 'filter_text_content' do
ms_word_sop = Factory(:ms_word_sop)
content = "test \n content \f only"
filtered_content = ms_word_sop.send(:filter_text_content,content)
assert !filtered_content.include?('\n')
assert !filtered_content.include?('\f')
end
test 'pdf_contents_for_search' do
ms_word_sop = Factory(:ms_word_sop)
assert ms_word_sop.is_viewable_format?
content = ms_word_sop.pdf_contents_for_search
assert_equal 'This is a ms word file', content
end
end
|
#! ruby -Ku
# coding: utf-8
require "test_helper"
require "stringio"
require "msgpack_pure/unpacker"
class UnpackerTest < Test::Unit::TestCase
def setup
@module = MessagePackPure::Unpacker
end
def test_unpack__positive_fixnum
assert_equal(+0x00, unpack("\x00"))
assert_equal(+0x7F, unpack("\x7F"))
end
def test_unpack__negative_fixnum
assert_equal(-0x01, unpack("\xFF"))
assert_equal(-0x20, unpack("\xE0"))
end
def test_unpack__uint8
assert_equal(+0x00, unpack("\xCC\x00"))
assert_equal(+0xFF, unpack("\xCC\xFF"))
end
def test_unpack__uint16
assert_equal(+0x0000, unpack("\xCD\x00\x00"))
assert_equal(+0x0001, unpack("\xCD\x00\x01"))
assert_equal(+0xFFFF, unpack("\xCD\xFF\xFF"))
end
def test_unpack__uint32
assert_equal(+0x00000000, unpack("\xCE\x00\x00\x00\x00"))
assert_equal(+0x00000001, unpack("\xCE\x00\x00\x00\x01"))
assert_equal(+0xFFFFFFFF, unpack("\xCE\xFF\xFF\xFF\xFF"))
end
def test_unpack__uint64
assert_equal(+0x0000000000000000, unpack("\xCF\x00\x00\x00\x00\x00\x00\x00\x00"))
assert_equal(+0x0000000000000001, unpack("\xCF\x00\x00\x00\x00\x00\x00\x00\x01"))
assert_equal(+0xFFFFFFFFFFFFFFFF, unpack("\xCF\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF"))
end
def test_unpack__int8
assert_equal(+0x00, unpack("\xD0\x00"))
assert_equal(+0x7F, unpack("\xD0\x7F"))
assert_equal(-0x01, unpack("\xD0\xFF"))
assert_equal(-0x80, unpack("\xD0\x80"))
end
def test_unpack__int16
assert_equal(+0x0000, unpack("\xD1\x00\x00"))
assert_equal(+0x7FFF, unpack("\xD1\x7F\xFF"))
assert_equal(-0x0001, unpack("\xD1\xFF\xFF"))
assert_equal(-0x8000, unpack("\xD1\x80\x00"))
end
def test_unpack__int32
assert_equal(+0x00000000, unpack("\xD2\x00\x00\x00\x00"))
assert_equal(+0x7FFFFFFF, unpack("\xD2\x7F\xFF\xFF\xFF"))
assert_equal(-0x00000001, unpack("\xD2\xFF\xFF\xFF\xFF"))
assert_equal(-0x80000000, unpack("\xD2\x80\x00\x00\x00"))
end
def test_unpack__int64
assert_equal(+0x0000000000000000, unpack("\xD3\x00\x00\x00\x00\x00\x00\x00\x00"))
assert_equal(+0x7FFFFFFFFFFFFFFF, unpack("\xD3\x7F\xFF\xFF\xFF\xFF\xFF\xFF\xFF"))
assert_equal(-0x0000000000000001, unpack("\xD3\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF"))
assert_equal(-0x8000000000000000, unpack("\xD3\x80\x00\x00\x00\x00\x00\x00\x00"))
end
def test_unpack__nil
assert_equal(nil, unpack("\xC0"))
end
def test_unpack__true
assert_equal(true, unpack("\xC3"))
end
def test_unpack__false
assert_equal(false, unpack("\xC2"))
end
def test_unpack__float
assert_equal(+0.0, unpack("\xCA\x00\x00\x00\x00"))
assert_equal(+0.5, unpack("\xCA\x3F\x00\x00\x00"))
assert_equal(-0.5, unpack("\xCA\xBF\x00\x00\x00"))
end
def test_unpack__double
assert_equal(+0.0, unpack("\xCB\x00\x00\x00\x00\x00\x00\x00\x00"))
assert_equal(+0.5, unpack("\xCB\x3F\xE0\x00\x00\x00\x00\x00\x00"))
assert_equal(-0.5, unpack("\xCB\xBF\xE0\x00\x00\x00\x00\x00\x00"))
end
def test_unpack__fixraw
assert_equal("", unpack("\xA0"))
assert_equal("ABC", unpack("\xA3ABC"))
assert_equal("A" * 31, unpack("\xBF" + "A" * 31))
end
def test_unpack__raw16
assert_equal("", unpack("\xDA\x00\x00"))
assert_equal("ABC", unpack("\xDA\x00\x03ABC"))
assert_equal(
"A" * 0xFFFF,
unpack("\xDA\xFF\xFF" + "A" * 0xFFFF))
end
def test_unpack__raw32
assert_equal("", unpack("\xDB\x00\x00\x00\x00"))
assert_equal("ABC", unpack("\xDB\x00\x00\x00\x03ABC"))
assert_equal(
"A" * 0x10000,
unpack("\xDB\x00\x01\x00\x00" + "A" * 0x10000))
end
def test_unpack__fixarray
assert_equal([], unpack("\x90"))
assert_equal([0, 1, 2], unpack("\x93\x00\x01\x02"))
io = StringIO.new
io.write("\x9F")
array = 15.times.map { |i|
io.write("\xCD") # uint16: i
io.write([i].pack("n"))
i
}
io.rewind
assert_equal(array, @module.unpack(io))
end
def test_unpack__array16
assert_equal([], unpack("\xDC\x00\x00"))
assert_equal([0, 1, 2], unpack("\xDC\x00\x03\x00\x01\x02"))
io = StringIO.new
io.write("\xDC\xFF\xFF")
array = 0xFFFF.times.map { |i|
io.write("\xCD") # uint16: i
io.write([i].pack("n"))
i
}
io.rewind
assert_equal(array, @module.unpack(io))
end
def test_unpack__array32
assert_equal([], unpack("\xDD\x00\x00\x00\x00"))
assert_equal([0, 1, 2], unpack("\xDD\x00\x00\x00\x03\x00\x01\x02"))
io = StringIO.new
io.write("\xDD\x00\x01\x00\x00")
array = 0x10000.times.map { |i|
io.write("\xCD") # uint16: i
io.write([i].pack("n"))
i
}
io.rewind
assert_equal(array, @module.unpack(io))
end
def test_unpack__fixmap
assert_equal({}, unpack("\x80"))
assert_equal(
{0 => 1, 2 => 3},
unpack("\x82\x00\x01\x02\x03"))
io = StringIO.new
io.write("\x8F")
hash = 15.times.inject({}) { |memo, i|
io.write("\xCD") # uint16: i
io.write([i].pack("n"))
io.write("\x00") # fixnum: 0
memo[i] = 0
memo
}
io.rewind
assert_equal(hash, @module.unpack(io))
end
def test_unpack__map16
assert_equal({}, unpack("\xDE\x00\x00"))
assert_equal(
{0 => 1, 2 => 3},
unpack("\xDE\x00\x02\x00\x01\x02\x03"))
io = StringIO.new
io.write("\xDE\xFF\xFF")
hash = 0xFFFF.times.inject({}) { |memo, i|
io.write("\xCD") # uint16: i
io.write([i].pack("n"))
io.write("\x00") # fixnum: 0
memo[i] = 0
memo
}
io.rewind
assert_equal(hash, @module.unpack(io))
end
def test_unpack__map32
assert_equal({}, unpack("\xDF\x00\x00\x00\x00"))
assert_equal(
{0 => 1, 2 => 3},
unpack("\xDF\x00\x00\x00\x02\x00\x01\x02\x03"))
io = StringIO.new
io.write("\xDF\x00\x01\x00\x00")
hash = 0x10000.times.inject({}) { |memo, i|
io.write("\xCD") # uint16: i
io.write([i].pack("n"))
io.write("\x00") # fixnum: 0
memo[i] = 0
memo
}
io.rewind
assert_equal(hash, @module.unpack(io))
end
private
def unpack(binary)
return @module.unpack(StringIO.new(binary))
end
end
リファクタリング
#! ruby -Ku
# coding: utf-8
require "test_helper"
require "stringio"
require "msgpack_pure/unpacker"
class UnpackerTest < Test::Unit::TestCase
def setup
@module = MessagePackPure::Unpacker
end
def test_unpack__positive_fixnum
assert_equal(+0x00, unpack("\x00"))
assert_equal(+0x7F, unpack("\x7F"))
end
def test_unpack__negative_fixnum
assert_equal(-0x01, unpack("\xFF"))
assert_equal(-0x20, unpack("\xE0"))
end
def test_unpack__uint8
assert_equal(+0x00, unpack("\xCC\x00"))
assert_equal(+0xFF, unpack("\xCC\xFF"))
end
def test_unpack__uint16
assert_equal(+0x0000, unpack("\xCD\x00\x00"))
assert_equal(+0x0001, unpack("\xCD\x00\x01"))
assert_equal(+0xFFFF, unpack("\xCD\xFF\xFF"))
end
def test_unpack__uint32
assert_equal(+0x00000000, unpack("\xCE\x00\x00\x00\x00"))
assert_equal(+0x00000001, unpack("\xCE\x00\x00\x00\x01"))
assert_equal(+0xFFFFFFFF, unpack("\xCE\xFF\xFF\xFF\xFF"))
end
def test_unpack__uint64
assert_equal(+0x0000000000000000, unpack("\xCF\x00\x00\x00\x00\x00\x00\x00\x00"))
assert_equal(+0x0000000000000001, unpack("\xCF\x00\x00\x00\x00\x00\x00\x00\x01"))
assert_equal(+0xFFFFFFFFFFFFFFFF, unpack("\xCF\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF"))
end
def test_unpack__int8
assert_equal(+0x00, unpack("\xD0\x00"))
assert_equal(+0x7F, unpack("\xD0\x7F"))
assert_equal(-0x01, unpack("\xD0\xFF"))
assert_equal(-0x80, unpack("\xD0\x80"))
end
def test_unpack__int16
assert_equal(+0x0000, unpack("\xD1\x00\x00"))
assert_equal(+0x7FFF, unpack("\xD1\x7F\xFF"))
assert_equal(-0x0001, unpack("\xD1\xFF\xFF"))
assert_equal(-0x8000, unpack("\xD1\x80\x00"))
end
def test_unpack__int32
assert_equal(+0x00000000, unpack("\xD2\x00\x00\x00\x00"))
assert_equal(+0x7FFFFFFF, unpack("\xD2\x7F\xFF\xFF\xFF"))
assert_equal(-0x00000001, unpack("\xD2\xFF\xFF\xFF\xFF"))
assert_equal(-0x80000000, unpack("\xD2\x80\x00\x00\x00"))
end
def test_unpack__int64
assert_equal(+0x0000000000000000, unpack("\xD3\x00\x00\x00\x00\x00\x00\x00\x00"))
assert_equal(+0x7FFFFFFFFFFFFFFF, unpack("\xD3\x7F\xFF\xFF\xFF\xFF\xFF\xFF\xFF"))
assert_equal(-0x0000000000000001, unpack("\xD3\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF"))
assert_equal(-0x8000000000000000, unpack("\xD3\x80\x00\x00\x00\x00\x00\x00\x00"))
end
def test_unpack__nil
assert_equal(nil, unpack("\xC0"))
end
def test_unpack__true
assert_equal(true, unpack("\xC3"))
end
def test_unpack__false
assert_equal(false, unpack("\xC2"))
end
def test_unpack__float
assert_equal(+0.0, unpack("\xCA\x00\x00\x00\x00"))
assert_equal(+0.5, unpack("\xCA\x3F\x00\x00\x00"))
assert_equal(-0.5, unpack("\xCA\xBF\x00\x00\x00"))
end
def test_unpack__double
assert_equal(+0.0, unpack("\xCB\x00\x00\x00\x00\x00\x00\x00\x00"))
assert_equal(+0.5, unpack("\xCB\x3F\xE0\x00\x00\x00\x00\x00\x00"))
assert_equal(-0.5, unpack("\xCB\xBF\xE0\x00\x00\x00\x00\x00\x00"))
end
def test_unpack__fixraw
assert_equal("", unpack("\xA0"))
assert_equal("ABC", unpack("\xA3ABC"))
assert_equal("A" * 31, unpack("\xBF" + "A" * 31))
end
def test_unpack__raw16
assert_equal("", unpack("\xDA\x00\x00"))
assert_equal("ABC", unpack("\xDA\x00\x03ABC"))
assert_equal(
"A" * 0xFFFF,
unpack("\xDA\xFF\xFF" + "A" * 0xFFFF))
end
def test_unpack__raw32
assert_equal("", unpack("\xDB\x00\x00\x00\x00"))
assert_equal("ABC", unpack("\xDB\x00\x00\x00\x03ABC"))
assert_equal(
"A" * 0x10000,
unpack("\xDB\x00\x01\x00\x00" + "A" * 0x10000))
end
def test_unpack__fixarray
assert_equal([], unpack("\x90"))
assert_equal([0, 1, 2], unpack("\x93\x00\x01\x02"))
io = StringIO.new("\x9F", "a+")
array = 15.times.map { |i|
io.write("\xCD") # uint16: i
io.write([i].pack("n"))
i
}
io.rewind
assert_equal(array, @module.unpack(io))
end
def test_unpack__array16
assert_equal([], unpack("\xDC\x00\x00"))
assert_equal([0, 1, 2], unpack("\xDC\x00\x03\x00\x01\x02"))
io = StringIO.new("\xDC\xFF\xFF", "a+")
array = 0xFFFF.times.map { |i|
io.write("\xCD") # uint16: i
io.write([i].pack("n"))
i
}
io.rewind
assert_equal(array, @module.unpack(io))
end
def test_unpack__array32
assert_equal([], unpack("\xDD\x00\x00\x00\x00"))
assert_equal([0, 1, 2], unpack("\xDD\x00\x00\x00\x03\x00\x01\x02"))
io = StringIO.new("\xDD\x00\x01\x00\x00", "a+")
array = 0x10000.times.map { |i|
io.write("\xCD") # uint16: i
io.write([i].pack("n"))
i
}
io.rewind
assert_equal(array, @module.unpack(io))
end
def test_unpack__fixmap
assert_equal({}, unpack("\x80"))
assert_equal(
{0 => 1, 2 => 3},
unpack("\x82\x00\x01\x02\x03"))
io = StringIO.new("\x8F", "a+")
hash = 15.times.inject({}) { |memo, i|
io.write("\xCD") # uint16: i
io.write([i].pack("n"))
io.write("\x00") # fixnum: 0
memo[i] = 0
memo
}
io.rewind
assert_equal(hash, @module.unpack(io))
end
def test_unpack__map16
assert_equal({}, unpack("\xDE\x00\x00"))
assert_equal(
{0 => 1, 2 => 3},
unpack("\xDE\x00\x02\x00\x01\x02\x03"))
io = StringIO.new("\xDE\xFF\xFF", "a+")
hash = 0xFFFF.times.inject({}) { |memo, i|
io.write("\xCD") # uint16: i
io.write([i].pack("n"))
io.write("\x00") # fixnum: 0
memo[i] = 0
memo
}
io.rewind
assert_equal(hash, @module.unpack(io))
end
def test_unpack__map32
assert_equal({}, unpack("\xDF\x00\x00\x00\x00"))
assert_equal(
{0 => 1, 2 => 3},
unpack("\xDF\x00\x00\x00\x02\x00\x01\x02\x03"))
io = StringIO.new("\xDF\x00\x01\x00\x00", "a+")
hash = 0x10000.times.inject({}) { |memo, i|
io.write("\xCD") # uint16: i
io.write([i].pack("n"))
io.write("\x00") # fixnum: 0
memo[i] = 0
memo
}
io.rewind
assert_equal(hash, @module.unpack(io))
end
private
def unpack(binary)
return @module.unpack(StringIO.new(binary, "r"))
end
end
|
require 'test_helper'
class ContractValidateTest < MiniTest::Spec
Song = Struct.new(:title, :album, :composer)
Album = Struct.new(:name, :songs, :artist)
Artist = Struct.new(:name)
class AlbumForm < Reform::Contract
property :name
validates :name, presence: true
collection :songs do
property :title
validates :title, presence: true
property :composer do
validates :name, presence: true
property :name
end
end
property :artist do
property :name
end
end
let (:song) { Song.new("Broken") }
let (:song_with_composer) { Song.new("Resist Stance", nil, composer) }
let (:composer) { Artist.new("Greg Graffin") }
let (:artist) { Artist.new("Bad Religion") }
let (:album) { Album.new("The Dissent Of Man", [song, song_with_composer], artist) }
let (:form) { AlbumForm.new(album) }
# valid
it do
form.validate.must_equal true
form.errors.messages.inspect.must_equal "{}"
end
# invalid
it do
album.songs[1].composer.name = nil
album.name = nil
form.validate.must_equal false
form.errors.messages.inspect.must_equal "{:name=>[\"can't be blank\"], :\"songs.composer.name\"=>[\"can't be blank\"]}"
end
end
# no configuration results in "sync" (formerly known as parse_strategy: :sync).
class ValidateWithoutConfigurationTest < MiniTest::Spec
Song = Struct.new(:title, :album, :composer)
Album = Struct.new(:name, :songs, :artist)
Artist = Struct.new(:name)
class AlbumForm < Reform::Form
property :name
validates :name, presence: true
collection :songs do
property :title
validates :title, presence: true
property :composer do
property :name
validates :name, presence: true
end
end
property :artist do
property :name
end
end
let (:song) { Song.new("Broken") }
let (:song_with_composer) { Song.new("Resist Stance", nil, composer) }
let (:composer) { Artist.new("Greg Graffin") }
let (:artist) { Artist.new("Bad Religion") }
let (:album) { Album.new("The Dissent Of Man", [song, song_with_composer], artist) }
let (:form) { AlbumForm.new(album) }
# valid.
it do
object_ids = {song: form.songs[0].object_id, song_with_composer: form.songs[1].object_id,
artist: form.artist.object_id, composer: form.songs[1].composer.object_id}
form.validate(
"name" => "Best Of",
"songs" => [{"title" => "Fallout"}, {"title" => "Roxanne", "composer" => {"name" => "Sting"}}],
"artist" => {"name" => "The Police"},
).must_equal true
form.errors.messages.inspect.must_equal "{}"
# form has updated.
form.name.must_equal "Best Of"
form.songs[0].title.must_equal "Fallout"
form.songs[1].title.must_equal "Roxanne"
form.songs[1].composer.name.must_equal "Sting"
form.artist.name.must_equal "The Police"
# objects are still the same.
form.songs[0].object_id.must_equal object_ids[:song]
form.songs[1].object_id.must_equal object_ids[:song_with_composer]
form.songs[1].composer.object_id.must_equal object_ids[:composer]
form.artist.object_id.must_equal object_ids[:artist]
# model has not changed, yet.
album.name.must_equal "The Dissent Of Man"
album.songs[0].title.must_equal "Broken"
album.songs[1].title.must_equal "Resist Stance"
album.songs[1].composer.name.must_equal "Greg Graffin"
album.artist.name.must_equal "Bad Religion"
end
# with symbols.
it do
form.validate(
name: "Best Of",
songs: [{title: "The X-Creep"}, {title: "Trudging", composer: {name: "SNFU"}}],
artist: {name: "The Police"},
).must_equal true
form.name.must_equal "Best Of"
form.songs[0].title.must_equal "The X-Creep"
form.songs[1].title.must_equal "Trudging"
form.songs[1].composer.name.must_equal "SNFU"
form.artist.name.must_equal "The Police"
end
# throws exception when no populators.
it do
album = Album.new("The Dissent Of Man", [])
assert_raises do
AlbumForm.new(album).validate(songs: {title: "Resist-Stance"})
end
end
end
class ValidateWithDeserializerOptionTest < MiniTest::Spec
Song = Struct.new(:title, :album, :composer)
Album = Struct.new(:name, :songs, :artist)
Artist = Struct.new(:name)
class AlbumForm < Reform::Form
property :name
validates :name, presence: true
collection :songs,
deserializer: {instance: lambda { |fragment, index, options|
collection = options.binding.get
(item = collection[index]) ? item : collection.insert(index, Song.new) },
setter: nil} do
property :title
validates :title, presence: true
property :composer, deserializer: { instance: lambda { |fragment, options| (item = options.binding.get) ? item : Artist.new } } do
property :name
validates :name, presence: true
end
end
property :artist, deserializer: { instance: lambda { |fragment, options| (item = options.binding.get) ? item : Artist.new } } do
property :name
validates :name, presence: true
end
end
let (:song) { Song.new("Broken") }
let (:song_with_composer) { Song.new("Resist Stance", nil, composer) }
let (:composer) { Artist.new("Greg Graffin") }
let (:artist) { Artist.new("Bad Religion") }
let (:album) { Album.new("The Dissent Of Man", [song, song_with_composer], artist) }
let (:form) { AlbumForm.new(album) }
# valid.
it("xxx") do
form.validate(
"name" => "Best Of",
"songs" => [{"title" => "Fallout"}, {"title" => "Roxanne", "composer" => {"name" => "Sting"}}],
"artist" => {"name" => "The Police"},
).must_equal true
form.errors.messages.inspect.must_equal "{}"
# form has updated.
form.name.must_equal "Best Of"
form.songs[0].title.must_equal "Fallout"
form.songs[1].title.must_equal "Roxanne"
form.songs[1].composer.name.must_equal "Sting"
form.artist.name.must_equal "The Police"
# model has not changed, yet.
album.name.must_equal "The Dissent Of Man"
album.songs[0].title.must_equal "Broken"
album.songs[1].title.must_equal "Resist Stance"
album.songs[1].composer.name.must_equal "Greg Graffin"
album.artist.name.must_equal "Bad Religion"
end
# invalid.
it do
form.validate(
"name" => "",
"songs" => [{"title" => "Fallout"}, {"title" => "Roxanne", "composer" => {"name" => ""}}],
"artist" => {"name" => ""},
).must_equal false
form.errors.messages.inspect.must_equal "{:name=>[\"can't be blank\"], :\"songs.composer.name\"=>[\"can't be blank\"], :\"artist.name\"=>[\"can't be blank\"]}"
end
# adding to collection via :instance.
# valid.
it do
form.validate(
"songs" => [{"title" => "Fallout"}, {"title" => "Roxanne"}, {"title" => "Rime Of The Ancient Mariner"}],
).must_equal true
form.errors.messages.inspect.must_equal "{}"
# form has updated.
form.name.must_equal "The Dissent Of Man"
form.songs[0].title.must_equal "Fallout"
form.songs[1].title.must_equal "Roxanne"
form.songs[1].composer.name.must_equal "Greg Graffin"
form.songs[1].title.must_equal "Roxanne"
form.songs[2].title.must_equal "Rime Of The Ancient Mariner" # new song added.
form.songs.size.must_equal 3
form.artist.name.must_equal "Bad Religion"
# model has not changed, yet.
album.name.must_equal "The Dissent Of Man"
album.songs[0].title.must_equal "Broken"
album.songs[1].title.must_equal "Resist Stance"
album.songs[1].composer.name.must_equal "Greg Graffin"
album.songs.size.must_equal 2
album.artist.name.must_equal "Bad Religion"
end
# allow writeable: false even in the deserializer.
class SongForm < Reform::Form
property :title, deserializer: {writeable: false}
end
it do
form = SongForm.new(song = Song.new)
form.validate("title" => "Ignore me!")
form.title.must_equal nil
form.title = "Unopened"
form.sync # only the deserializer is marked as not-writeable.
song.title.must_equal "Unopened"
end
end
# # not sure if we should catch that in Reform or rather do that in disposable. this is https://github.com/apotonick/reform/pull/104
# # describe ":populator with :empty" do
# # let (:form) {
# # Class.new(Reform::Form) do
# # collection :songs, :empty => true, :populator => lambda { |fragment, index, args|
# # songs[index] = args.binding[:form].new(Song.new)
# # } do
# # property :title
# # end
# # end
# # }
# # let (:params) {
# # {
# # "songs" => [{"title" => "Fallout"}, {"title" => "Roxanne"}]
# # }
# # }
# # subject { form.new(Album.new("Hits", [], [])) }
# # before { subject.validate(params) }
# # it { subject.songs[0].title.must_equal "Fallout" }
# # it { subject.songs[1].title.must_equal "Roxanne" }
# # end
# # test cardinalities.
# describe "with empty collection and cardinality" do
# let (:album) { Album.new }
# subject { Class.new(Reform::Form) do
# include Reform::Form::ActiveModel
# model :album
# collection :songs do
# property :title
# end
# property :hit do
# property :title
# end
# validates :songs, :length => {:minimum => 1}
# validates :hit, :presence => true
# end.new(album) }
# describe "invalid" do
# before { subject.validate({}).must_equal false }
# it do
# # ensure that only hit and songs keys are present
# subject.errors.messages.keys.sort.must_equal([:hit, :songs])
# # validate content of hit and songs keys
# subject.errors.messages[:hit].must_equal(["can't be blank"])
# subject.errors.messages[:songs].first.must_match(/\Ais too short \(minimum is 1 characters?\)\z/)
# end
# end
# describe "valid" do
# let (:album) { Album.new(nil, Song.new, [Song.new("Urban Myth")]) }
# before {
# subject.validate({"songs" => [{"title"=>"Daddy, Brother, Lover, Little Boy"}], "hit" => {"title"=>"The Horse"}}).
# must_equal true
# }
# it { subject.errors.messages.must_equal({}) }
# end
# end
# # providing manual validator method allows accessing form's API.
# describe "with ::validate" do
# let (:form) {
# Class.new(Reform::Form) do
# property :title
# validate :title?
# def title?
# errors.add :title, "not lowercase" if title == "Fallout"
# end
# end
# }
# let (:params) { {"title" => "Fallout"} }
# let (:song) { Song.new("Englishman") }
# subject { form.new(song) }
# before { @res = subject.validate(params) }
# it { @res.must_equal false }
# it { subject.errors.messages.must_equal({:title=>["not lowercase"]}) }
# end
# # overriding the reader for a nested form should only be considered when rendering.
# describe "with overridden reader for nested form" do
# let (:form) {
# Class.new(Reform::Form) do
# property :band, :populate_if_empty => lambda { |*| Band.new } do
# property :label
# end
# collection :songs, :populate_if_empty => lambda { |*| Song.new } do
# property :title
# end
# def band
# raise "only call me when rendering the form!"
# end
# def songs
# raise "only call me when rendering the form!"
# end
# end.new(album)
# }
# let (:album) { Album.new }
# # don't use #artist when validating!
# it do
# form.validate("band" => {"label" => "Hellcat"}, "songs" => [{"title" => "Stand Your Ground"}, {"title" => "Otherside"}])
# form.sync
# album.band.label.must_equal "Hellcat"
# album.songs.first.title.must_equal "Stand Your Ground"
# end
# end
# end
fix tests for 4.0.
require 'test_helper'
class ContractValidateTest < MiniTest::Spec
Song = Struct.new(:title, :album, :composer)
Album = Struct.new(:name, :songs, :artist)
Artist = Struct.new(:name)
class AlbumForm < Reform::Contract
property :name
validates :name, presence: true
collection :songs do
property :title
validates :title, presence: true
property :composer do
validates :name, presence: true
property :name
end
end
property :artist do
property :name
end
end
let (:song) { Song.new("Broken") }
let (:song_with_composer) { Song.new("Resist Stance", nil, composer) }
let (:composer) { Artist.new("Greg Graffin") }
let (:artist) { Artist.new("Bad Religion") }
let (:album) { Album.new("The Dissent Of Man", [song, song_with_composer], artist) }
let (:form) { AlbumForm.new(album) }
# valid
it do
form.validate.must_equal true
form.errors.messages.inspect.must_equal "{}"
end
# invalid
it do
album.songs[1].composer.name = nil
album.name = nil
form.validate.must_equal false
form.errors.messages.inspect.must_equal "{:name=>[\"can't be blank\"], :\"songs.composer.name\"=>[\"can't be blank\"]}"
end
end
# no configuration results in "sync" (formerly known as parse_strategy: :sync).
class ValidateWithoutConfigurationTest < MiniTest::Spec
Song = Struct.new(:title, :album, :composer)
Album = Struct.new(:name, :songs, :artist)
Artist = Struct.new(:name)
class AlbumForm < Reform::Form
property :name
validates :name, presence: true
collection :songs do
property :title
validates :title, presence: true
property :composer do
property :name
validates :name, presence: true
end
end
property :artist do
property :name
end
end
let (:song) { Song.new("Broken") }
let (:song_with_composer) { Song.new("Resist Stance", nil, composer) }
let (:composer) { Artist.new("Greg Graffin") }
let (:artist) { Artist.new("Bad Religion") }
let (:album) { Album.new("The Dissent Of Man", [song, song_with_composer], artist) }
let (:form) { AlbumForm.new(album) }
# valid.
it do
object_ids = {song: form.songs[0].object_id, song_with_composer: form.songs[1].object_id,
artist: form.artist.object_id, composer: form.songs[1].composer.object_id}
form.validate(
"name" => "Best Of",
"songs" => [{"title" => "Fallout"}, {"title" => "Roxanne", "composer" => {"name" => "Sting"}}],
"artist" => {"name" => "The Police"},
).must_equal true
form.errors.messages.inspect.must_equal "{}"
# form has updated.
form.name.must_equal "Best Of"
form.songs[0].title.must_equal "Fallout"
form.songs[1].title.must_equal "Roxanne"
form.songs[1].composer.name.must_equal "Sting"
form.artist.name.must_equal "The Police"
# objects are still the same.
form.songs[0].object_id.must_equal object_ids[:song]
form.songs[1].object_id.must_equal object_ids[:song_with_composer]
form.songs[1].composer.object_id.must_equal object_ids[:composer]
form.artist.object_id.must_equal object_ids[:artist]
# model has not changed, yet.
album.name.must_equal "The Dissent Of Man"
album.songs[0].title.must_equal "Broken"
album.songs[1].title.must_equal "Resist Stance"
album.songs[1].composer.name.must_equal "Greg Graffin"
album.artist.name.must_equal "Bad Religion"
end
# with symbols.
it do
form.validate(
name: "Best Of",
songs: [{title: "The X-Creep"}, {title: "Trudging", composer: {name: "SNFU"}}],
artist: {name: "The Police"},
).must_equal true
form.name.must_equal "Best Of"
form.songs[0].title.must_equal "The X-Creep"
form.songs[1].title.must_equal "Trudging"
form.songs[1].composer.name.must_equal "SNFU"
form.artist.name.must_equal "The Police"
end
# throws exception when no populators.
it do
album = Album.new("The Dissent Of Man", [])
assert_raises RuntimeError do
AlbumForm.new(album).validate(songs: {title: "Resist-Stance"})
end
end
end
class ValidateWithDeserializerOptionTest < MiniTest::Spec
Song = Struct.new(:title, :album, :composer)
Album = Struct.new(:name, :songs, :artist)
Artist = Struct.new(:name)
class AlbumForm < Reform::Form
property :name
validates :name, presence: true
collection :songs,
deserializer: {instance: lambda { |fragment, index, options|
collection = options.binding.get
(item = collection[index]) ? item : collection.insert(index, Song.new) },
setter: nil} do
property :title
validates :title, presence: true
property :composer, deserializer: { instance: lambda { |fragment, options| (item = options.binding.get) ? item : Artist.new } } do
property :name
validates :name, presence: true
end
end
property :artist, deserializer: { instance: lambda { |fragment, options| (item = options.binding.get) ? item : Artist.new } } do
property :name
validates :name, presence: true
end
end
let (:song) { Song.new("Broken") }
let (:song_with_composer) { Song.new("Resist Stance", nil, composer) }
let (:composer) { Artist.new("Greg Graffin") }
let (:artist) { Artist.new("Bad Religion") }
let (:album) { Album.new("The Dissent Of Man", [song, song_with_composer], artist) }
let (:form) { AlbumForm.new(album) }
# valid.
it("xxx") do
form.validate(
"name" => "Best Of",
"songs" => [{"title" => "Fallout"}, {"title" => "Roxanne", "composer" => {"name" => "Sting"}}],
"artist" => {"name" => "The Police"},
).must_equal true
form.errors.messages.inspect.must_equal "{}"
# form has updated.
form.name.must_equal "Best Of"
form.songs[0].title.must_equal "Fallout"
form.songs[1].title.must_equal "Roxanne"
form.songs[1].composer.name.must_equal "Sting"
form.artist.name.must_equal "The Police"
# model has not changed, yet.
album.name.must_equal "The Dissent Of Man"
album.songs[0].title.must_equal "Broken"
album.songs[1].title.must_equal "Resist Stance"
album.songs[1].composer.name.must_equal "Greg Graffin"
album.artist.name.must_equal "Bad Religion"
end
# invalid.
it do
form.validate(
"name" => "",
"songs" => [{"title" => "Fallout"}, {"title" => "Roxanne", "composer" => {"name" => ""}}],
"artist" => {"name" => ""},
).must_equal false
form.errors.messages.inspect.must_equal "{:name=>[\"can't be blank\"], :\"songs.composer.name\"=>[\"can't be blank\"], :\"artist.name\"=>[\"can't be blank\"]}"
end
# adding to collection via :instance.
# valid.
it do
form.validate(
"songs" => [{"title" => "Fallout"}, {"title" => "Roxanne"}, {"title" => "Rime Of The Ancient Mariner"}],
).must_equal true
form.errors.messages.inspect.must_equal "{}"
# form has updated.
form.name.must_equal "The Dissent Of Man"
form.songs[0].title.must_equal "Fallout"
form.songs[1].title.must_equal "Roxanne"
form.songs[1].composer.name.must_equal "Greg Graffin"
form.songs[1].title.must_equal "Roxanne"
form.songs[2].title.must_equal "Rime Of The Ancient Mariner" # new song added.
form.songs.size.must_equal 3
form.artist.name.must_equal "Bad Religion"
# model has not changed, yet.
album.name.must_equal "The Dissent Of Man"
album.songs[0].title.must_equal "Broken"
album.songs[1].title.must_equal "Resist Stance"
album.songs[1].composer.name.must_equal "Greg Graffin"
album.songs.size.must_equal 2
album.artist.name.must_equal "Bad Religion"
end
# allow writeable: false even in the deserializer.
class SongForm < Reform::Form
property :title, deserializer: {writeable: false}
end
it do
form = SongForm.new(song = Song.new)
form.validate("title" => "Ignore me!")
form.title.must_equal nil
form.title = "Unopened"
form.sync # only the deserializer is marked as not-writeable.
song.title.must_equal "Unopened"
end
end
# # not sure if we should catch that in Reform or rather do that in disposable. this is https://github.com/apotonick/reform/pull/104
# # describe ":populator with :empty" do
# # let (:form) {
# # Class.new(Reform::Form) do
# # collection :songs, :empty => true, :populator => lambda { |fragment, index, args|
# # songs[index] = args.binding[:form].new(Song.new)
# # } do
# # property :title
# # end
# # end
# # }
# # let (:params) {
# # {
# # "songs" => [{"title" => "Fallout"}, {"title" => "Roxanne"}]
# # }
# # }
# # subject { form.new(Album.new("Hits", [], [])) }
# # before { subject.validate(params) }
# # it { subject.songs[0].title.must_equal "Fallout" }
# # it { subject.songs[1].title.must_equal "Roxanne" }
# # end
# # test cardinalities.
# describe "with empty collection and cardinality" do
# let (:album) { Album.new }
# subject { Class.new(Reform::Form) do
# include Reform::Form::ActiveModel
# model :album
# collection :songs do
# property :title
# end
# property :hit do
# property :title
# end
# validates :songs, :length => {:minimum => 1}
# validates :hit, :presence => true
# end.new(album) }
# describe "invalid" do
# before { subject.validate({}).must_equal false }
# it do
# # ensure that only hit and songs keys are present
# subject.errors.messages.keys.sort.must_equal([:hit, :songs])
# # validate content of hit and songs keys
# subject.errors.messages[:hit].must_equal(["can't be blank"])
# subject.errors.messages[:songs].first.must_match(/\Ais too short \(minimum is 1 characters?\)\z/)
# end
# end
# describe "valid" do
# let (:album) { Album.new(nil, Song.new, [Song.new("Urban Myth")]) }
# before {
# subject.validate({"songs" => [{"title"=>"Daddy, Brother, Lover, Little Boy"}], "hit" => {"title"=>"The Horse"}}).
# must_equal true
# }
# it { subject.errors.messages.must_equal({}) }
# end
# end
# # providing manual validator method allows accessing form's API.
# describe "with ::validate" do
# let (:form) {
# Class.new(Reform::Form) do
# property :title
# validate :title?
# def title?
# errors.add :title, "not lowercase" if title == "Fallout"
# end
# end
# }
# let (:params) { {"title" => "Fallout"} }
# let (:song) { Song.new("Englishman") }
# subject { form.new(song) }
# before { @res = subject.validate(params) }
# it { @res.must_equal false }
# it { subject.errors.messages.must_equal({:title=>["not lowercase"]}) }
# end
# # overriding the reader for a nested form should only be considered when rendering.
# describe "with overridden reader for nested form" do
# let (:form) {
# Class.new(Reform::Form) do
# property :band, :populate_if_empty => lambda { |*| Band.new } do
# property :label
# end
# collection :songs, :populate_if_empty => lambda { |*| Song.new } do
# property :title
# end
# def band
# raise "only call me when rendering the form!"
# end
# def songs
# raise "only call me when rendering the form!"
# end
# end.new(album)
# }
# let (:album) { Album.new }
# # don't use #artist when validating!
# it do
# form.validate("band" => {"label" => "Hellcat"}, "songs" => [{"title" => "Stand Your Ground"}, {"title" => "Otherside"}])
# form.sync
# album.band.label.must_equal "Hellcat"
# album.songs.first.title.must_equal "Stand Your Ground"
# end
# end
# end |
# -*- coding:utf-8; mode:ruby; -*-
## user settings
# if you use a keyboard which have a left ctrl key at the left of "A" key,
# then you must set false
@swap_left_ctrl_with_caps = true
# for apple keyboard
@swap_left_opt_with_left_cmd = true
# load a local config
@local_config = File.join "#{ENV['HOME']}", "/.rbindkeys.local.rb"
if File.file? @local_config
puts "load a local config"
eval File.new(@local_config).read
end
##
if @swap_left_ctrl_with_caps
pre_bind_key KEY_CAPSLOCK, KEY_LEFTCTRL
pre_bind_key KEY_LEFTCTRL, KEY_CAPSLOCK
end
if @swap_left_opt_with_left_cmd
pre_bind_key KEY_LEFTMETA, KEY_LEFTALT
pre_bind_key KEY_LEFTALT, KEY_LEFTMETA
end
bind_key [KEY_LEFTCTRL, KEY_F], KEY_RIGHT
bind_key [KEY_LEFTCTRL, KEY_B], KEY_LEFT
bind_key [KEY_LEFTCTRL, KEY_P], KEY_UP
bind_key [KEY_LEFTCTRL, KEY_N], KEY_DOWN
bind_key [KEY_LEFTALT, KEY_F], [KEY_LEFTCTRL, KEY_RIGHT]
bind_key [KEY_LEFTALT, KEY_B], [KEY_LEFTCTRL, KEY_LEFT]
bind_key [KEY_LEFTCTRL, KEY_A], KEY_HOME
bind_key [KEY_LEFTCTRL, KEY_E], KEY_END
bind_key [KEY_LEFTCTRL, KEY_V], KEY_PAGEDOWN
bind_key [KEY_LEFTALT, KEY_V], KEY_PAGEUP
bind_key [KEY_LEFTCTRL, KEY_D], KEY_DELETE
bind_key [KEY_LEFTCTRL, KEY_H], KEY_BACKSPACE
bind_key [KEY_LEFTCTRL, KEY_M], KEY_ENTER
bind_key [KEY_LEFTCTRL, KEY_I], KEY_TAB
bind_key [KEY_LEFTCTRL, KEY_LEFTBRACE], KEY_ESC
bind_key [KEY_LEFTCTRL, KEY_S], [KEY_LEFTCTRL, KEY_F]
# give a block sample
@caps_led_state = 0
bind_key KEY_CAPSLOCK do |event, operator|
@caps_led_state = @caps_led_state ^ 1
operator.send_event EV_LED, LED_CAPSL, @caps_led_state
end
# binds related kill-ring
bind_key [KEY_LEFTCTRL, KEY_W], [KEY_LEFTCTRL,KEY_X]
bind_key [KEY_LEFTALT, KEY_W], [KEY_LEFTCTRL,KEY_C]
bind_key [KEY_LEFTCTRL, KEY_Y], [KEY_LEFTCTRL,KEY_V]
# kill line
bind_key [KEY_LEFTCTRL, KEY_K] do |event, operator|
# select to end of line
operator.press_key KEY_LEFTSHIFT
operator.press_key KEY_END
operator.release_key KEY_END
operator.release_key KEY_LEFTSHIFT
# cut
operator.press_key KEY_LEFTCTRL
operator.press_key KEY_X
operator.release_key KEY_X
operator.release_key KEY_LEFTCTRL
end
# region mode
@region_mode = false
def cancel_region op
op.release_key KEY_LEFTSHIFT
op.press_key KEY_RIGHT
op.release_key KEY_RIGHT
@region_mode = false
end
def start_region op
operator.press_key KEY_LEFTSHIFT
@region_mode = true
end
bind_key [KEY_LEFTCTRL, KEY_SPACE] do |event, operator|
cancel_region operator if @region_mode
start_region operator
end
bind_key [KEY_LEFTCTRL, KEY_G] do |event, operator|
if not @region_mode
return :through
end
cancel_region operator
end
# 2 stroke binds
bind_prefix_key [KEY_LEFTCTRL, KEY_X] do
bind_key KEY_K, [KEY_LEFTCTRL, KEY_W]
bind_key KEY_S, [KEY_LEFTCTRL, KEY_S]
bind_key KEY_B, [KEY_LEFTCTRL, KEY_TAB]
bind_key KEY_H, [KEY_LEFTCTRL, KEY_A]
bind_key [KEY_LEFTCTRL, KEY_G], :ignore
bind_key [KEY_LEFTCTRL, KEY_C], [KEY_LEFTALT, KEY_F4]
end
# settings per window class (or title)
# through all key inputs if active
window(:through, :class => /gnome-terminal/)
# add new bind_key to default binds
window(@default_bind_resolver, :class => /google-chrome/) do
# search
bind_key [KEY_LEFTCTRL, KEY_S], [KEY_LEFTCTRL, KEY_F]
end
fix bug
# -*- coding:utf-8; mode:ruby; -*-
## user settings
# if you use a keyboard which have a left ctrl key at the left of "A" key,
# then you must set false
@swap_left_ctrl_with_caps = true
# for apple keyboard
@swap_left_opt_with_left_cmd = true
# load a local config
@local_config = File.join "#{ENV['HOME']}", "/.rbindkeys.local.rb"
if File.file? @local_config
puts "load a local config"
eval File.new(@local_config).read
end
##
if @swap_left_ctrl_with_caps
pre_bind_key KEY_CAPSLOCK, KEY_LEFTCTRL
pre_bind_key KEY_LEFTCTRL, KEY_CAPSLOCK
end
if @swap_left_opt_with_left_cmd
pre_bind_key KEY_LEFTMETA, KEY_LEFTALT
pre_bind_key KEY_LEFTALT, KEY_LEFTMETA
end
bind_key [KEY_LEFTCTRL, KEY_F], KEY_RIGHT
bind_key [KEY_LEFTCTRL, KEY_B], KEY_LEFT
bind_key [KEY_LEFTCTRL, KEY_P], KEY_UP
bind_key [KEY_LEFTCTRL, KEY_N], KEY_DOWN
bind_key [KEY_LEFTALT, KEY_F], [KEY_LEFTCTRL, KEY_RIGHT]
bind_key [KEY_LEFTALT, KEY_B], [KEY_LEFTCTRL, KEY_LEFT]
bind_key [KEY_LEFTCTRL, KEY_A], KEY_HOME
bind_key [KEY_LEFTCTRL, KEY_E], KEY_END
bind_key [KEY_LEFTCTRL, KEY_V], KEY_PAGEDOWN
bind_key [KEY_LEFTALT, KEY_V], KEY_PAGEUP
bind_key [KEY_LEFTCTRL, KEY_D], KEY_DELETE
bind_key [KEY_LEFTCTRL, KEY_H], KEY_BACKSPACE
bind_key [KEY_LEFTCTRL, KEY_M], KEY_ENTER
bind_key [KEY_LEFTCTRL, KEY_I], KEY_TAB
bind_key [KEY_LEFTCTRL, KEY_LEFTBRACE], KEY_ESC
bind_key [KEY_LEFTCTRL, KEY_S], [KEY_LEFTCTRL, KEY_F]
# give a block sample
@caps_led_state = 0
bind_key KEY_CAPSLOCK do |event, operator|
@caps_led_state = @caps_led_state ^ 1
operator.send_event EV_LED, LED_CAPSL, @caps_led_state
end
# binds related kill-ring
bind_key [KEY_LEFTCTRL, KEY_W], [KEY_LEFTCTRL,KEY_X]
bind_key [KEY_LEFTALT, KEY_W], [KEY_LEFTCTRL,KEY_C]
bind_key [KEY_LEFTCTRL, KEY_Y], [KEY_LEFTCTRL,KEY_V]
# kill line
bind_key [KEY_LEFTCTRL, KEY_K] do |event, operator|
# select to end of line
operator.press_key KEY_LEFTSHIFT
operator.press_key KEY_END
operator.release_key KEY_END
operator.release_key KEY_LEFTSHIFT
# cut
operator.press_key KEY_LEFTCTRL
operator.press_key KEY_X
operator.release_key KEY_X
operator.release_key KEY_LEFTCTRL
end
# region mode
@region_mode = false
def cancel_region op
op.release_key KEY_LEFTSHIFT
op.press_key KEY_RIGHT
op.release_key KEY_RIGHT
@region_mode = false
end
def start_region op
operator.press_key KEY_LEFTSHIFT
@region_mode = true
end
bind_key [KEY_LEFTCTRL, KEY_SPACE] do |event, operator|
cancel_region operator if @region_mode
start_region operator
end
bind_key [KEY_LEFTCTRL, KEY_G] do |event, operator|
if not @region_mode
:through
else
cancel_region operator
:ignore
end
end
# 2 stroke binds
bind_prefix_key [KEY_LEFTCTRL, KEY_X] do
bind_key KEY_K, [KEY_LEFTCTRL, KEY_W]
bind_key KEY_S, [KEY_LEFTCTRL, KEY_S]
bind_key KEY_B, [KEY_LEFTCTRL, KEY_TAB]
bind_key KEY_H, [KEY_LEFTCTRL, KEY_A]
bind_key [KEY_LEFTCTRL, KEY_G], :ignore
bind_key [KEY_LEFTCTRL, KEY_C], [KEY_LEFTALT, KEY_F4]
end
# settings per window class (or title)
# through all key inputs if active
window(:through, :class => /gnome-terminal/)
# add new bind_key to default binds
window(@default_bind_resolver, :class => /google-chrome/) do
# search
bind_key [KEY_LEFTCTRL, KEY_S], [KEY_LEFTCTRL, KEY_F]
end
|
# Generated by jeweler
# DO NOT EDIT THIS FILE DIRECTLY
# Instead, edit Jeweler::Tasks in Rakefile, and run 'rake gemspec'
# -*- encoding: utf-8 -*-
Gem::Specification.new do |s|
s.name = "drafter"
s.version = "0.2.8"
s.required_rubygems_version = Gem::Requirement.new(">= 0") if s.respond_to? :required_rubygems_version=
s.authors = ["futurechimp"]
s.date = "2012-08-29"
s.description = "A"
s.email = "dave.hrycyszyn@headlondon.com"
s.extra_rdoc_files = [
"LICENSE.txt",
"README.rdoc"
]
s.files = [
".document",
"Gemfile",
"Gemfile.lock",
"LICENSE.txt",
"README.rdoc",
"Rakefile",
"VERSION",
"drafter.gemspec",
"lib/drafter.rb",
"lib/drafter/apply.rb",
"lib/drafter/creation.rb",
"lib/drafter/diffing.rb",
"lib/drafter/draft.rb",
"lib/drafter/draft_upload.rb",
"lib/drafter/draft_uploader.rb",
"lib/drafter/draftable.rb",
"test/drafter/test_apply.rb",
"test/drafter/test_creation.rb",
"test/drafter/test_diffing.rb",
"test/drafter/test_draft.rb",
"test/drafter/test_draft_upload.rb",
"test/drafter/test_draftable.rb",
"test/fixtures/bar.txt",
"test/fixtures/foo.txt",
"test/helper.rb",
"test/support/data.rb",
"test/support/models.rb",
"test/support/schema.rb",
"test/support/uploader.rb",
"test/test_drafter.rb",
"test/watchr.rb"
]
s.homepage = "http://github.com/futurechimp/drafter"
s.licenses = ["MIT"]
s.require_paths = ["lib"]
s.rubygems_version = "1.8.17"
s.summary = "Simple"
if s.respond_to? :specification_version then
s.specification_version = 3
if Gem::Version.new(Gem::VERSION) >= Gem::Version.new('1.2.0') then
s.add_runtime_dependency(%q<activerecord>, [">= 0"])
s.add_runtime_dependency(%q<diffy>, [">= 0"])
s.add_development_dependency(%q<sqlite3>, [">= 0"])
s.add_development_dependency(%q<debugger>, [">= 0"])
s.add_development_dependency(%q<minitest>, [">= 0"])
s.add_development_dependency(%q<bundler>, ["~> 1.0.0"])
s.add_development_dependency(%q<jeweler>, ["~> 1.6.4"])
s.add_development_dependency(%q<rcov>, [">= 0"])
s.add_development_dependency(%q<turn>, [">= 0"])
s.add_development_dependency(%q<carrierwave>, [">= 0"])
s.add_development_dependency(%q<minitest-rails-shoulda>, [">= 0"])
else
s.add_dependency(%q<activerecord>, [">= 0"])
s.add_dependency(%q<diffy>, [">= 0"])
s.add_dependency(%q<sqlite3>, [">= 0"])
s.add_dependency(%q<debugger>, [">= 0"])
s.add_dependency(%q<minitest>, [">= 0"])
s.add_dependency(%q<bundler>, ["~> 1.0.0"])
s.add_dependency(%q<jeweler>, ["~> 1.6.4"])
s.add_dependency(%q<rcov>, [">= 0"])
s.add_dependency(%q<turn>, [">= 0"])
s.add_dependency(%q<carrierwave>, [">= 0"])
s.add_dependency(%q<minitest-rails-shoulda>, [">= 0"])
end
else
s.add_dependency(%q<activerecord>, [">= 0"])
s.add_dependency(%q<diffy>, [">= 0"])
s.add_dependency(%q<sqlite3>, [">= 0"])
s.add_dependency(%q<debugger>, [">= 0"])
s.add_dependency(%q<minitest>, [">= 0"])
s.add_dependency(%q<bundler>, ["~> 1.0.0"])
s.add_dependency(%q<jeweler>, ["~> 1.6.4"])
s.add_dependency(%q<rcov>, [">= 0"])
s.add_dependency(%q<turn>, [">= 0"])
s.add_dependency(%q<carrierwave>, [">= 0"])
s.add_dependency(%q<minitest-rails-shoulda>, [">= 0"])
end
end
Regenerate gemspec for version 0.3.0
# Generated by jeweler
# DO NOT EDIT THIS FILE DIRECTLY
# Instead, edit Jeweler::Tasks in Rakefile, and run 'rake gemspec'
# -*- encoding: utf-8 -*-
Gem::Specification.new do |s|
s.name = "drafter"
s.version = "0.3.0"
s.required_rubygems_version = Gem::Requirement.new(">= 0") if s.respond_to? :required_rubygems_version=
s.authors = ["futurechimp"]
s.date = "2012-09-05"
s.description = "A"
s.email = "dave.hrycyszyn@headlondon.com"
s.extra_rdoc_files = [
"LICENSE.txt",
"README.rdoc"
]
s.files = [
".document",
"Gemfile",
"Gemfile.lock",
"LICENSE.txt",
"README.rdoc",
"Rakefile",
"VERSION",
"drafter.gemspec",
"lib/drafter.rb",
"lib/drafter/apply.rb",
"lib/drafter/creation.rb",
"lib/drafter/diffing.rb",
"lib/drafter/draft.rb",
"lib/drafter/draft_upload.rb",
"lib/drafter/draft_uploader.rb",
"lib/drafter/draftable.rb",
"lib/drafter/id_hash.rb",
"lib/drafter/subdrafts.rb",
"test/drafter/test_apply.rb",
"test/drafter/test_creation.rb",
"test/drafter/test_diffing.rb",
"test/drafter/test_draft.rb",
"test/drafter/test_draft_upload.rb",
"test/drafter/test_draftable.rb",
"test/drafter/test_id_hash.rb",
"test/drafter/test_subdrafts.rb",
"test/fixtures/bar.txt",
"test/fixtures/foo.txt",
"test/helper.rb",
"test/support/data.rb",
"test/support/models.rb",
"test/support/schema.rb",
"test/support/uploader.rb",
"test/test_drafter.rb",
"test/watchr.rb"
]
s.homepage = "http://github.com/futurechimp/drafter"
s.licenses = ["MIT"]
s.require_paths = ["lib"]
s.rubygems_version = "1.8.17"
s.summary = "Simple"
if s.respond_to? :specification_version then
s.specification_version = 3
if Gem::Version.new(Gem::VERSION) >= Gem::Version.new('1.2.0') then
s.add_runtime_dependency(%q<activerecord>, [">= 0"])
s.add_runtime_dependency(%q<diffy>, [">= 0"])
s.add_development_dependency(%q<sqlite3>, [">= 0"])
s.add_development_dependency(%q<database_cleaner>, [">= 0"])
s.add_development_dependency(%q<debugger>, [">= 0"])
s.add_development_dependency(%q<minitest>, [">= 0"])
s.add_development_dependency(%q<bundler>, ["~> 1.0.0"])
s.add_development_dependency(%q<jeweler>, ["~> 1.6.4"])
s.add_development_dependency(%q<rcov>, [">= 0"])
s.add_development_dependency(%q<turn>, [">= 0"])
s.add_development_dependency(%q<carrierwave>, [">= 0"])
s.add_development_dependency(%q<minitest-rails-shoulda>, [">= 0"])
else
s.add_dependency(%q<activerecord>, [">= 0"])
s.add_dependency(%q<diffy>, [">= 0"])
s.add_dependency(%q<sqlite3>, [">= 0"])
s.add_dependency(%q<database_cleaner>, [">= 0"])
s.add_dependency(%q<debugger>, [">= 0"])
s.add_dependency(%q<minitest>, [">= 0"])
s.add_dependency(%q<bundler>, ["~> 1.0.0"])
s.add_dependency(%q<jeweler>, ["~> 1.6.4"])
s.add_dependency(%q<rcov>, [">= 0"])
s.add_dependency(%q<turn>, [">= 0"])
s.add_dependency(%q<carrierwave>, [">= 0"])
s.add_dependency(%q<minitest-rails-shoulda>, [">= 0"])
end
else
s.add_dependency(%q<activerecord>, [">= 0"])
s.add_dependency(%q<diffy>, [">= 0"])
s.add_dependency(%q<sqlite3>, [">= 0"])
s.add_dependency(%q<database_cleaner>, [">= 0"])
s.add_dependency(%q<debugger>, [">= 0"])
s.add_dependency(%q<minitest>, [">= 0"])
s.add_dependency(%q<bundler>, ["~> 1.0.0"])
s.add_dependency(%q<jeweler>, ["~> 1.6.4"])
s.add_dependency(%q<rcov>, [">= 0"])
s.add_dependency(%q<turn>, [">= 0"])
s.add_dependency(%q<carrierwave>, [">= 0"])
s.add_dependency(%q<minitest-rails-shoulda>, [">= 0"])
end
end
|
# -*- encoding: utf-8 -*-
Gem::Specification.new do |s|
s.name = 'gotime-cassandra_object'
s.version = '2.10.7'
s.description = 'Cassandra ActiveModel'
s.summary = 'Cassandra ActiveModel'
s.required_ruby_version = '>= 1.9.2'
s.required_rubygems_version = '>= 1.3.5'
s.authors = ["Michael Koziarski", "gotime"]
s.email = 'gems@gotime.com'
s.homepage = 'http://github.com/gotime/cassandra_object'
s.extra_rdoc_files = ["README.rdoc"]
s.files = `git ls-files`.split("\n")
s.test_files = `git ls-files -- {test}/*`.split("\n")
s.require_paths = ['lib']
s.add_runtime_dependency('activemodel', ">= 3.0")
s.add_runtime_dependency('cassandra', ">= 0.12.0")
s.add_development_dependency('bundler')
end
new version
# -*- encoding: utf-8 -*-
Gem::Specification.new do |s|
s.name = 'gotime-cassandra_object'
s.version = '2.10.8'
s.description = 'Cassandra ActiveModel'
s.summary = 'Cassandra ActiveModel'
s.required_ruby_version = '>= 1.9.2'
s.required_rubygems_version = '>= 1.3.5'
s.authors = ["Michael Koziarski", "gotime"]
s.email = 'gems@gotime.com'
s.homepage = 'http://github.com/gotime/cassandra_object'
s.extra_rdoc_files = ["README.rdoc"]
s.files = `git ls-files`.split("\n")
s.test_files = `git ls-files -- {test}/*`.split("\n")
s.require_paths = ['lib']
s.add_runtime_dependency('activemodel', ">= 3.0")
s.add_runtime_dependency('cassandra', ">= 0.12.0")
s.add_development_dependency('bundler')
end
|
# frozen_string_literal: true
require_relative 'lib/dyndnsd/version'
Gem::Specification.new do |s|
s.name = 'dyndnsd'
s.version = Dyndnsd::VERSION
s.summary = 'dyndnsd.rb'
s.description = 'A small, lightweight and extensible DynDNS server written with Ruby and Rack.'
s.author = 'Christian Nicolai'
s.homepage = 'https://github.com/cmur2/dyndnsd'
s.license = 'Apache-2.0'
s.metadata = {
'bug_tracker_uri' => "#{s.homepage}/issues",
'changelog_uri' => "#{s.homepage}/blob/master/CHANGELOG.md",
'source_code_uri' => s.homepage
}
s.files = `git ls-files -z`.split("\x0").select do |f|
f.match(%r{^(init.d|lib)/})
end
s.require_paths = ['lib']
s.bindir = 'exe'
s.executables = ['dyndnsd']
s.extra_rdoc_files = Dir['README.md', 'CHANGELOG.md', 'LICENSE']
s.required_ruby_version = '>= 2.5'
s.add_runtime_dependency 'async-dns', '~> 1.3.0'
s.add_runtime_dependency 'metriks'
s.add_runtime_dependency 'opentelemetry-exporter-jaeger', '~> 0.20.0'
s.add_runtime_dependency 'opentelemetry-instrumentation-rack', '~> 0.20.0'
s.add_runtime_dependency 'opentelemetry-sdk', '~> 1.0.0.rc2'
s.add_runtime_dependency 'rack', '~> 2.0'
s.add_runtime_dependency 'webrick', '>= 1.6.1'
s.add_development_dependency 'bundler'
s.add_development_dependency 'bundler-audit', '~> 0.9.0'
s.add_development_dependency 'rack-test'
s.add_development_dependency 'rake'
s.add_development_dependency 'rspec'
s.add_development_dependency 'rubocop', '~> 1.24.0'
s.add_development_dependency 'rubocop-rake', '~> 0.6.0'
s.add_development_dependency 'rubocop-rspec', '~> 2.6.0'
s.add_development_dependency 'solargraph', '~> 0.44.0'
end
gems: pin async dependency to pre v2 version
- https://rubygems.org/gems/async-io/versions/1.32.2 does not limit the version range
- async v2 required very modern Ruby versions
# frozen_string_literal: true
require_relative 'lib/dyndnsd/version'
Gem::Specification.new do |s|
s.name = 'dyndnsd'
s.version = Dyndnsd::VERSION
s.summary = 'dyndnsd.rb'
s.description = 'A small, lightweight and extensible DynDNS server written with Ruby and Rack.'
s.author = 'Christian Nicolai'
s.homepage = 'https://github.com/cmur2/dyndnsd'
s.license = 'Apache-2.0'
s.metadata = {
'bug_tracker_uri' => "#{s.homepage}/issues",
'changelog_uri' => "#{s.homepage}/blob/master/CHANGELOG.md",
'source_code_uri' => s.homepage
}
s.files = `git ls-files -z`.split("\x0").select do |f|
f.match(%r{^(init.d|lib)/})
end
s.require_paths = ['lib']
s.bindir = 'exe'
s.executables = ['dyndnsd']
s.extra_rdoc_files = Dir['README.md', 'CHANGELOG.md', 'LICENSE']
s.required_ruby_version = '>= 2.5'
s.add_runtime_dependency 'async', '~> 1.30.0'
s.add_runtime_dependency 'async-dns', '~> 1.3.0'
s.add_runtime_dependency 'metriks'
s.add_runtime_dependency 'opentelemetry-exporter-jaeger', '~> 0.20.0'
s.add_runtime_dependency 'opentelemetry-instrumentation-rack', '~> 0.20.0'
s.add_runtime_dependency 'opentelemetry-sdk', '~> 1.0.0.rc2'
s.add_runtime_dependency 'rack', '~> 2.0'
s.add_runtime_dependency 'webrick', '>= 1.6.1'
s.add_development_dependency 'bundler'
s.add_development_dependency 'bundler-audit', '~> 0.9.0'
s.add_development_dependency 'rack-test'
s.add_development_dependency 'rake'
s.add_development_dependency 'rspec'
s.add_development_dependency 'rubocop', '~> 1.24.0'
s.add_development_dependency 'rubocop-rake', '~> 0.6.0'
s.add_development_dependency 'rubocop-rspec', '~> 2.6.0'
s.add_development_dependency 'solargraph', '~> 0.44.0'
end
|
# encoding: utf-8
require File.expand_path("../spec_helper", __FILE__)
describe "Element" do
before :each do
browser.goto(WatirSpec.files + "/forms_with_input_elements.html")
end
describe ".new" do
it "finds elements matching the conditions when given a hash of :how => 'what' arguments" do
browser.checkbox(:name => 'new_user_interests', :title => 'Dancing is fun!').value.should == 'dancing'
browser.text_field(:class_name => 'name', :index => 1).id.should == 'new_user_last_name'
end
it "raises UnknownObjectException with a sane error message when given a hash of :how => 'what' arguments (non-existing object)" do
lambda { browser.text_field(:index => 100, :name => "foo").id }.should raise_error(UnknownObjectException)
end
it "raises ArgumentError if given the wrong number of arguments" do
container = mock("container", :null_object => true)
lambda { Element.new(container, 1,2,3,4) }.should raise_error(ArgumentError)
lambda { Element.new(container, "foo") }.should raise_error(ArgumentError)
end
end
describe "#== and #eql?" do
before { browser.goto(WatirSpec.files + "/definition_lists.html") }
it "returns true if the two elements point to the same DOM element" do
a = browser.dl(:id => "experience-list")
b = browser.dl
a.should == b
a.should eql(b)
end
it "returns false if the two elements are not the same" do
a = browser.dls[0]
b = browser.dls[1]
a.should_not == b
a.should_not eql(b)
end
it "returns false if the other object is not an Element" do
browser.dl.should_not == 1
end
end
describe "data-* attributes" do
before { browser.goto("file://" + File.expand_path("html/data_attributes.html", File.dirname(__FILE__))) }
bug "http://github.com/jarib/celerity/issues#issue/27", :celerity do
it "finds elements by a data-* attribute" do
browser.p(:data_type => "ruby-library").should exist
end
it "returns the value of a data-* attribute" do
browser.p.data_type.should == "ruby-library"
end
end
end
describe "finding with unknown tag name" do
it "finds an element by xpath" do
browser.element(:xpath => "//*[@for='new_user_first_name']").should exist
end
it "finds an element by arbitrary attribute" do
browser.element(:id => "new_user").should exist
end
it "finds several elements by xpath" do
browser.elements(:xpath => "//a").length.should == 1
end
it "finds finds several elements by arbitrary attribute" do
browser.elements(:name => /^new_user/).length.should == 30
end
end
describe "#to_subtype" do
it "returns a more precise subtype of Element" do
el = browser.element(:xpath => "//input[@type='radio']").to_subtype
el.should be_kind_of(Watir::Radio)
end
end
describe "#focus" do
bug "http://github.com/jarib/watir-webdriver/issues/issue/20", [:webdriver, :firefox] do
it "fires the onfocus event for the given element" do
tf = browser.text_field(:id, "new_user_occupation")
tf.value.should == "Developer"
tf.focus
browser.div(:id, "onfocus_test").text.should == "changed by onfocus event"
end
end
end
describe "#fire_event" do
it "should fire the given event" do
browser.div(:id, "onfocus_test").text.should be_empty
browser.text_field(:id, "new_user_occupation").fire_event('onfocus')
browser.div(:id, "onfocus_test").text.should == "changed by onfocus event"
end
end
describe "#parent" do
bug "http://github.com/jarib/celerity/issues#issue/28", :celerity do
it "gets the parent of this element" do
browser.text_field(:id, "new_user_email").parent.should be_instance_of(FieldSet)
end
end
end
describe "#visible?" do
it "returns true if the element is visible" do
browser.text_field(:id, "new_user_email").should be_visible
end
it "returns false if the element is input element where type == 'hidden'" do
browser.text_field(:id, "new_user_interests_dolls").should_not be_visible
end
it "returns false if the element has style='display: none;'" do
browser.div(:id, 'changed_language').should_not be_visible
end
it "returns false if the element has style='visibility: hidden;" do
browser.div(:id, 'wants_newsletter').should_not be_visible
end
it "returns false if one of the parent elements is hidden" do
browser.div(:id, 'hidden_parent').should_not be_visible
end
end
describe "#exist?" do
it "doesn't raise when called on nested elements" do
browser.div(:id, 'no_such_div').link(:id, 'no_such_id').should_not exist
end
it "raises ArgumentError error if selector hash with :xpath has multiple entries" do
lambda { browser.div(:xpath => "//div", :class => "foo").exists? }.should raise_error(ArgumentError)
end
end
end
Fix for RSpec 2.0
# encoding: utf-8
require File.expand_path("../spec_helper", __FILE__)
describe "Element" do
before :each do
browser.goto(WatirSpec.files + "/forms_with_input_elements.html")
end
describe ".new" do
it "finds elements matching the conditions when given a hash of :how => 'what' arguments" do
browser.checkbox(:name => 'new_user_interests', :title => 'Dancing is fun!').value.should == 'dancing'
browser.text_field(:class_name => 'name', :index => 1).id.should == 'new_user_last_name'
end
it "raises UnknownObjectException with a sane error message when given a hash of :how => 'what' arguments (non-existing object)" do
lambda { browser.text_field(:index => 100, :name => "foo").id }.should raise_error(UnknownObjectException)
end
it "raises ArgumentError if given the wrong number of arguments" do
container = mock("container").as_null_object
lambda { Element.new(container, 1,2,3,4) }.should raise_error(ArgumentError)
lambda { Element.new(container, "foo") }.should raise_error(ArgumentError)
end
end
describe "#== and #eql?" do
before { browser.goto(WatirSpec.files + "/definition_lists.html") }
it "returns true if the two elements point to the same DOM element" do
a = browser.dl(:id => "experience-list")
b = browser.dl
a.should == b
a.should eql(b)
end
it "returns false if the two elements are not the same" do
a = browser.dls[0]
b = browser.dls[1]
a.should_not == b
a.should_not eql(b)
end
it "returns false if the other object is not an Element" do
browser.dl.should_not == 1
end
end
describe "data-* attributes" do
before { browser.goto("file://" + File.expand_path("html/data_attributes.html", File.dirname(__FILE__))) }
bug "http://github.com/jarib/celerity/issues#issue/27", :celerity do
it "finds elements by a data-* attribute" do
browser.p(:data_type => "ruby-library").should exist
end
it "returns the value of a data-* attribute" do
browser.p.data_type.should == "ruby-library"
end
end
end
describe "finding with unknown tag name" do
it "finds an element by xpath" do
browser.element(:xpath => "//*[@for='new_user_first_name']").should exist
end
it "finds an element by arbitrary attribute" do
browser.element(:id => "new_user").should exist
end
it "finds several elements by xpath" do
browser.elements(:xpath => "//a").length.should == 1
end
it "finds finds several elements by arbitrary attribute" do
browser.elements(:name => /^new_user/).length.should == 30
end
end
describe "#to_subtype" do
it "returns a more precise subtype of Element" do
el = browser.element(:xpath => "//input[@type='radio']").to_subtype
el.should be_kind_of(Watir::Radio)
end
end
describe "#focus" do
bug "http://github.com/jarib/watir-webdriver/issues/issue/20", [:webdriver, :firefox] do
it "fires the onfocus event for the given element" do
tf = browser.text_field(:id, "new_user_occupation")
tf.value.should == "Developer"
tf.focus
browser.div(:id, "onfocus_test").text.should == "changed by onfocus event"
end
end
end
describe "#fire_event" do
it "should fire the given event" do
browser.div(:id, "onfocus_test").text.should be_empty
browser.text_field(:id, "new_user_occupation").fire_event('onfocus')
browser.div(:id, "onfocus_test").text.should == "changed by onfocus event"
end
end
describe "#parent" do
bug "http://github.com/jarib/celerity/issues#issue/28", :celerity do
it "gets the parent of this element" do
browser.text_field(:id, "new_user_email").parent.should be_instance_of(FieldSet)
end
end
end
describe "#visible?" do
it "returns true if the element is visible" do
browser.text_field(:id, "new_user_email").should be_visible
end
it "returns false if the element is input element where type == 'hidden'" do
browser.text_field(:id, "new_user_interests_dolls").should_not be_visible
end
it "returns false if the element has style='display: none;'" do
browser.div(:id, 'changed_language').should_not be_visible
end
it "returns false if the element has style='visibility: hidden;" do
browser.div(:id, 'wants_newsletter').should_not be_visible
end
it "returns false if one of the parent elements is hidden" do
browser.div(:id, 'hidden_parent').should_not be_visible
end
end
describe "#exist?" do
it "doesn't raise when called on nested elements" do
browser.div(:id, 'no_such_div').link(:id, 'no_such_id').should_not exist
end
it "raises ArgumentError error if selector hash with :xpath has multiple entries" do
lambda { browser.div(:xpath => "//div", :class => "foo").exists? }.should raise_error(ArgumentError)
end
end
end
|
Gem::Specification.new do |s|
s.name = 'gitlab-gollum-lib'
s.version = `cat VERSION`
s.date = Time.now.strftime("%Y-%m-%d")
s.license = 'MIT'
s.summary = "A simple, Git-powered wiki."
s.description = "A simple, Git-powered wiki with a sweet API and local frontend."
s.authors = ["Tom Preston-Werner", "Rick Olson", "Dmitriy Zaporozhets"]
s.email = 'dmitriy.zaporozhets@gmail.com'
s.homepage = 'https://gitlab.com/gitlab-org/gollum-lib'
#s.require_paths = %w[lib]
#s.rdoc_options = ["--charset=UTF-8"]
#s.extra_rdoc_files = %w[README.md LICENSE]
s.add_dependency('gitlab-grit', '~> 2.6.1')
s.add_dependency('github-markup', ['>= 0.7.5', '< 1.0.0'])
s.add_dependency('github-markdown', '~> 0.5.3')
s.add_dependency('sanitize', '~> 2.0.3')
s.add_dependency('nokogiri', '~> 1.5.9')
s.add_dependency('stringex', '~> 1.5.1')
# = MANIFEST =
s.files = %w[
VERSION
Gemfile
CHANGELOG
LICENSE
README.md
Rakefile
docs/sanitization.md
gollum-lib.gemspec
lib/gollum-lib.rb
lib/gollum-lib/blob_entry.rb
lib/gollum-lib/committer.rb
lib/gollum-lib/file.rb
lib/gollum-lib/file_view.rb
lib/gollum-lib/git_access.rb
lib/gollum-lib/gitcode.rb
lib/gollum-lib/grit_ext.rb
lib/gollum-lib/helpers.rb
lib/gollum-lib/markup.rb
lib/gollum-lib/markups.rb
lib/gollum-lib/page.rb
lib/gollum-lib/pagination.rb
lib/gollum-lib/remote_code.rb
lib/gollum-lib/sanitization.rb
lib/gollum-lib/web_sequence_diagram.rb
lib/gollum-lib/wiki.rb
licenses/licenses.txt
]
# = MANIFEST =
s.test_files = s.files.select { |path| path =~ /^test\/test_.*\.rb/ }
end
Cleanup gemspec
Signed-off-by: Dmitriy Zaporozhets <be23d75b156792e5acab51b196a2deb155d35d6a@gmail.com>
Gem::Specification.new do |s|
s.name = 'gitlab-gollum-lib'
s.version = `cat VERSION`
s.date = Time.now.strftime("%Y-%m-%d")
s.license = 'MIT'
s.summary = "A simple, Git-powered wiki."
s.description = "A simple, Git-powered wiki with a sweet API and local frontend."
s.authors = ["Tom Preston-Werner", "Rick Olson", "Dmitriy Zaporozhets"]
s.email = 'dmitriy.zaporozhets@gmail.com'
s.homepage = 'https://gitlab.com/gitlab-org/gollum-lib'
s.add_dependency('gitlab-grit', '~> 2.6.1')
s.add_dependency('github-markup', ['>= 0.7.5', '< 1.0.0'])
s.add_dependency('github-markdown', '~> 0.5.3')
s.add_dependency('sanitize', '~> 2.0.3')
s.add_dependency('nokogiri', '~> 1.5.9')
s.add_dependency('stringex', '~> 1.5.1')
s.files = %w[
VERSION
Gemfile
CHANGELOG
LICENSE
README.md
Rakefile
docs/sanitization.md
gollum-lib.gemspec
lib/gollum-lib.rb
lib/gollum-lib/blob_entry.rb
lib/gollum-lib/committer.rb
lib/gollum-lib/file.rb
lib/gollum-lib/file_view.rb
lib/gollum-lib/git_access.rb
lib/gollum-lib/gitcode.rb
lib/gollum-lib/grit_ext.rb
lib/gollum-lib/helpers.rb
lib/gollum-lib/markup.rb
lib/gollum-lib/markups.rb
lib/gollum-lib/page.rb
lib/gollum-lib/pagination.rb
lib/gollum-lib/remote_code.rb
lib/gollum-lib/sanitization.rb
lib/gollum-lib/web_sequence_diagram.rb
lib/gollum-lib/wiki.rb
licenses/licenses.txt
]
s.test_files = s.files.select { |path| path =~ /^test\/test_.*\.rb/ }
end
|
cask "android-studio-preview-canary" do
version "4.2.0.9,202.6795674"
sha256 "11505a2cc661ae10572711ca0c3b3622311c21a483cedddfa6206a562a1b4901"
# dl.google.com/dl/android/studio/ was verified as official when first introduced to the cask
url "https://dl.google.com/dl/android/studio/ide-zips/#{version.before_comma}/android-studio-ide-#{version.after_comma}-mac.zip"
name "Android Studio Preview (Canary)"
homepage "https://developer.android.com/studio/preview/"
conflicts_with cask: "android-studio-preview-beta"
app "Android Studio #{version.major_minor} Preview.app"
zap trash: [
"~/Library/Android/sdk",
"~/Library/Application Support/AndroidStudio#{version.major_minor}",
"~/Library/Caches/AndroidStudio#{version.major_minor}",
"~/Library/Logs/AndroidStudio#{version.major_minor}",
"~/Library/Preferences/AndroidStudio#{version.major_minor}",
"~/Library/Preferences/com.android.Emulator.plist",
"~/Library/Saved Application State/com.google.android.studio.savedState",
"~/.android",
],
rmdir: [
"~/AndroidStudioProjects",
"~/Library/Android",
]
end
Update android-studio-preview-canary from 4.2.0.9,202.6795674 to 4.2.0.10,202.6811877 (#9593)
cask "android-studio-preview-canary" do
version "4.2.0.10,202.6811877"
sha256 "8222060377a14cdc0461b2d3e31a0765c0fd924fc58053a0c00a227f9cbcea77"
# dl.google.com/dl/android/studio/ was verified as official when first introduced to the cask
url "https://dl.google.com/dl/android/studio/ide-zips/#{version.before_comma}/android-studio-ide-#{version.after_comma}-mac.zip"
name "Android Studio Preview (Canary)"
homepage "https://developer.android.com/studio/preview/"
conflicts_with cask: "android-studio-preview-beta"
app "Android Studio #{version.major_minor} Preview.app"
zap trash: [
"~/Library/Android/sdk",
"~/Library/Application Support/AndroidStudio#{version.major_minor}",
"~/Library/Caches/AndroidStudio#{version.major_minor}",
"~/Library/Logs/AndroidStudio#{version.major_minor}",
"~/Library/Preferences/AndroidStudio#{version.major_minor}",
"~/Library/Preferences/com.android.Emulator.plist",
"~/Library/Saved Application State/com.google.android.studio.savedState",
"~/.android",
],
rmdir: [
"~/AndroidStudioProjects",
"~/Library/Android",
]
end
|
# -*- encoding: utf-8 -*-
Gem::Specification.new do |s|
s.name = %q{greengreen}
s.version = "0.0.2"
s.required_rubygems_version = Gem::Requirement.new(">= 0") if s.respond_to? :required_rubygems_version=
s.authors = ["Jason Morrison"]
s.date = %q{2009-04-01}
s.email = %q{jmorrison@thoughtbot.com}
s.extra_rdoc_files = ["README.rdoc", "LICENSE"]
s.files = ["README.rdoc", "VERSION.yml", "lib/greengreen", "lib/greengreen/autotest.rb", "lib/greengreen.rb", "LICENSE"]
s.has_rdoc = true
s.homepage = %q{http://github.com/jasonm/greengreen}
s.rdoc_options = ["--inline-source", "--charset=UTF-8"]
s.require_paths = ["lib"]
s.rubygems_version = %q{1.3.1}
s.summary = %q{GreenGreen is a tool for assuring 100% product quality.}
if s.respond_to? :specification_version then
current_version = Gem::Specification::CURRENT_SPECIFICATION_VERSION
s.specification_version = 2
if Gem::Version.new(Gem::RubyGemsVersion) >= Gem::Version.new('1.2.0') then
else
end
else
end
end
bump gemspec
# -*- encoding: utf-8 -*-
Gem::Specification.new do |s|
s.name = %q{greengreen}
s.version = "0.0.3"
s.required_rubygems_version = Gem::Requirement.new(">= 0") if s.respond_to? :required_rubygems_version=
s.authors = ["Jason Morrison"]
s.date = %q{2009-04-01}
s.email = %q{jmorrison@thoughtbot.com}
s.extra_rdoc_files = ["README.rdoc", "LICENSE"]
s.files = ["README.rdoc", "VERSION.yml", "lib/greengreen", "lib/greengreen/autotest.rb", "lib/greengreen.rb", "LICENSE"]
s.has_rdoc = true
s.homepage = %q{http://github.com/jasonm/greengreen}
s.rdoc_options = ["--inline-source", "--charset=UTF-8"]
s.require_paths = ["lib"]
s.rubygems_version = %q{1.3.1}
s.summary = %q{GreenGreen is a tool for assuring 100% product quality.}
if s.respond_to? :specification_version then
current_version = Gem::Specification::CURRENT_SPECIFICATION_VERSION
s.specification_version = 2
if Gem::Version.new(Gem::RubyGemsVersion) >= Gem::Version.new('1.2.0') then
else
end
else
end
end
|
#Gui written with Shoes to control the Hammer
Shoes.app :width => 200, :height => 250, :title => 'Hammer Controls' do
@hostname = nil
@default_drb_port = 9050
#Set some background colors
background '#FFFFFF'
#Use this method to connect to the DRb service
def connect_to_drb(url, action)
begin
hammer = DRbObject.new_with_uri url
case action
when 'start'
result = hammer.start_calls
when 'stop'
result = hammer.stop_calls
when 'status'
result = hammer.hammer_status
end
return { :status => 'ok', :message => result }
rescue => err
return { :status => 'error', :message => err }
end
end
#Create a 'stack' of elements that are grouped together
stack :center => true, :width => 200, :margin => 30 do
@hostname = edit_line :width => 120
button 'Hammer Status' do
if @hostname.text == ''
alert 'Please enter in a hostname and port in the text field (ie - localhost:9050)'
else
result = connect_to_drb(@hostname.text, 'status')
alert "Hammer Status is... #{result[:message]} @ #{@hostname.text}"
end
end
button 'Start Hammer' do
if @hostname.text == ''
alert 'Please enter in a hostname and port in the text field (ie - localhost:9050)'
else
if confirm("Are you sure you want to start the hammer?")
result = connect_to_drb(@hostname.text, 'start')
alert "Starting the hammer @ #{@hostname.text}"
end
end
end
button 'Stop Hammer' do
if @hostname.text == ''
alert 'Please enter in a hostname and port in the text field (ie - localhost:9050)'
else
if confirm("Are you sure you want to stop the hammer?")
result = connect_to_drb(@hostname.text, 'stop')
alert "Stopping the hammer @ #{@hostname.text}. Result: #{result[:message]}"
end
end
end
button 'Quit' do
if confirm("Are you sure you want to quit?")
exit
end
end
end
para 'Copyright (C) 2008 Jason Goecke', :align => 'center', :size => 8
end
Properly formatted the druby uri
#Gui written with Shoes to control the Hammer
Shoes.app :width => 200, :height => 250, :title => 'Hammer Controls' do
@hostname = nil
@default_drb_port = 9050
#Set some background colors
background '#FFFFFF'
#Use this method to connect to the DRb service
def connect_to_drb(url, action)
url = "druby://" + url
begin
hammer = DRbObject.new_with_uri url
case action
when 'start'
result = hammer.start_calls
when 'stop'
result = hammer.stop_calls
when 'status'
result = hammer.hammer_status
end
return { :status => 'ok', :message => result }
rescue => err
return { :status => 'error', :message => err }
end
end
#Create a 'stack' of elements that are grouped together
stack :center => true, :width => 200, :margin => 30 do
@hostname = edit_line :width => 120
button 'Hammer Status' do
if @hostname.text == ''
alert 'Please enter in a hostname and port in the text field (ie - localhost:9050)'
else
result = connect_to_drb(@hostname.text, 'status')
alert "Hammer Status is... #{result[:message]} @ #{@hostname.text}"
end
end
button 'Start Hammer' do
if @hostname.text == ''
alert 'Please enter in a hostname and port in the text field (ie - localhost:9050)'
else
if confirm("Are you sure you want to start the hammer?")
result = connect_to_drb(@hostname.text, 'start')
alert "Starting the hammer @ #{@hostname.text}"
end
end
end
button 'Stop Hammer' do
if @hostname.text == ''
alert 'Please enter in a hostname and port in the text field (ie - localhost:9050)'
else
if confirm("Are you sure you want to stop the hammer?")
result = connect_to_drb(@hostname.text, 'stop')
alert "Stopping the hammer @ #{@hostname.text}. Result: #{result[:message]}"
end
end
end
button 'Quit' do
if confirm("Are you sure you want to quit?")
exit
end
end
end
para 'Copyright (C) 2008 Jason Goecke', :align => 'center', :size => 8
end |
# -*- encoding: utf-8 -*-
$:.push File.expand_path("../lib", __FILE__)
require 'habberdash/version'
Gem::Specification.new do |s|
# General Gem Information
s.name = 'habberdash'
s.date = '2012-06-20'
s.version = Habberdash::VERSION
s.authors = ['Factory Design Labs']
s.email = ['is.team@factorylabs.com']
s.homepage = 'http://github.com/factory/habberdash'
s.summary = %Q{Habberdash: The Rails Dashboard Engine}
s.description = %Q{Habberdash: Write dashboard widgets, and embed them within your application.}
s.licenses = ['MIT']
# Runtime Dependencies
s.add_dependency 'rails', '>= 3.2'
s.add_dependency 'coffee-rails'
s.add_dependency 'haml_coffee_assets' # '>= 1.1.1'
# Testing dependencies
s.add_development_dependency 'rspec-core', '>= 2.8.0'
s.add_development_dependency 'evergreen', '>= 1.0.0'
s.add_development_dependency 'selenium-webdriver', '>= 2.20.0'
#s.add_development_dependency 'cucumber-rails', '>= 1.3.0'
s.add_development_dependency 'capybara'
s.add_development_dependency 'capybara-firebug', '>= 1.1.0'
s.add_development_dependency 'aruba'
s.add_development_dependency 'database_cleaner'
# Gem Files
s.extra_rdoc_files = %w(LICENSE POST_INSTALL)
# = MANIFEST =
s.files = Dir['lib/**/*', 'app/**/*', 'config/routes.rb']
s.test_files = `git ls-files -- {test,spec,features}/*`.split("\n")
s.executables = `git ls-files -- bin/*`.split("\n").map{ |f| File.basename(f) }
# = MANIFEST =
s.require_paths = %w(lib)
end
Add rspec-rails as a development dependency
# -*- encoding: utf-8 -*-
$:.push File.expand_path("../lib", __FILE__)
require 'habberdash/version'
Gem::Specification.new do |s|
# General Gem Information
s.name = 'habberdash'
s.date = '2012-06-20'
s.version = Habberdash::VERSION
s.authors = ['Factory Design Labs']
s.email = ['is.team@factorylabs.com']
s.homepage = 'http://github.com/factory/habberdash'
s.summary = %Q{Habberdash: The Rails Dashboard Engine}
s.description = %Q{Habberdash: Write dashboard widgets, and embed them within your application.}
s.licenses = ['MIT']
# Runtime Dependencies
s.add_dependency 'rails', '>= 3.2'
s.add_dependency 'coffee-rails'
s.add_dependency 'haml_coffee_assets' # '>= 1.1.1'
# Testing dependencies
s.add_development_dependency 'rspec-rails', '~> 2.10.1'
s.add_development_dependency 'evergreen', '>= 1.0.0'
s.add_development_dependency 'selenium-webdriver', '>= 2.20.0'
#s.add_development_dependency 'cucumber-rails', '>= 1.3.0'
s.add_development_dependency 'capybara'
s.add_development_dependency 'capybara-firebug', '>= 1.1.0'
s.add_development_dependency 'aruba'
s.add_development_dependency 'database_cleaner'
# Gem Files
s.extra_rdoc_files = %w(LICENSE POST_INSTALL)
# = MANIFEST =
s.files = Dir['lib/**/*', 'app/**/*', 'config/routes.rb']
s.test_files = `git ls-files -- {test,spec,features}/*`.split("\n")
s.executables = `git ls-files -- bin/*`.split("\n").map{ |f| File.basename(f) }
# = MANIFEST =
s.require_paths = %w(lib)
end
|
# coding: utf-8
lib = File.expand_path('../lib', __FILE__)
$LOAD_PATH.unshift(lib) unless $LOAD_PATH.include?(lib)
require 'dataly/version'
Gem::Specification.new do |spec|
spec.name = "dataly"
spec.version = Dataly::VERSION
spec.authors = ["Andrew McNamara"]
spec.email = ["andrewm@jobready.com.au"]
spec.summary = %q{Data import}
spec.description = %q{TODO: Write a longer description. Optional.}
spec.homepage = ""
spec.license = "MIT"
spec.files = `git ls-files -z`.split("\x0")
spec.executables = spec.files.grep(%r{^bin/}) { |f| File.basename(f) }
spec.test_files = spec.files.grep(%r{^(test|spec|features)/})
spec.require_paths = ["lib"]
spec.add_development_dependency "bundler", "~> 1.6"
spec.add_development_dependency "rake"
spec.add_development_dependency "rspec"
spec.add_development_dependency "fakefs"
spec.add_development_dependency "pry-byebug"
spec.add_dependency 'activesupport', "~> 4.0"
end
Update description and summary.
# coding: utf-8
lib = File.expand_path('../lib', __FILE__)
$LOAD_PATH.unshift(lib) unless $LOAD_PATH.include?(lib)
require 'dataly/version'
Gem::Specification.new do |spec|
spec.name = "dataly"
spec.version = Dataly::VERSION
spec.authors = ["Andrew McNamara"]
spec.email = ["andrewm@jobready.com.au"]
spec.summary = %q{Simple data import from CSV.}
spec.description = %q{This gem allows you to define a mapper class, and a creator class to import data into your application via CSV.}
spec.homepage = ""
spec.license = "MIT"
spec.files = `git ls-files -z`.split("\x0")
spec.executables = spec.files.grep(%r{^bin/}) { |f| File.basename(f) }
spec.test_files = spec.files.grep(%r{^(test|spec|features)/})
spec.require_paths = ["lib"]
spec.add_development_dependency "bundler", "~> 1.6"
spec.add_development_dependency "rake"
spec.add_development_dependency "rspec"
spec.add_development_dependency "fakefs"
spec.add_development_dependency "pry-byebug"
spec.add_dependency 'activesupport', "~> 4.0"
end
|
Gem::Specification.new do |s|
s.name = 'bing_translator'
s.version = '5.0.0'
s.date = '2017-03-01'
s.homepage = 'https://www.github.com/relrod/bing_translator-gem'
s.summary = "Translate using the Bing HTTP API"
s.description = "Translate strings using the Bing HTTP API. Requires that you have a Client ID and Secret. See README.md for information."
s.authors = ["Ricky Elrod"]
s.email = 'ricky@elrod.me'
s.files = ["lib/bing_translator.rb"]
s.licenses = ["MIT"]
s.add_dependency "nokogiri", "~> 1.8.1"
s.add_dependency "json", "~> 1.8.0"
s.add_dependency "savon", "~> 2.10.0"
end
5.1.0 release
Signed-off-by: Ricky Elrod <3de8762d49a778edd8b1aa9f381ea5a9ccb62944@elrod.me>
Gem::Specification.new do |s|
s.name = 'bing_translator'
s.version = '5.1.0'
s.date = '2017-12-18'
s.homepage = 'https://www.github.com/relrod/bing_translator-gem'
s.summary = "Translate using the Bing HTTP API"
s.description = "Translate strings using the Bing HTTP API. Requires that you have a Client ID and Secret. See README.md for information."
s.authors = ["Ricky Elrod"]
s.email = 'ricky@elrod.me'
s.files = ["lib/bing_translator.rb"]
s.licenses = ["MIT"]
s.add_dependency "nokogiri", "~> 1.8.1"
s.add_dependency "json", "~> 1.8.0"
s.add_dependency "savon", "~> 2.10.0"
end
|
require "aws-sdk"
require "sanitize"
require "RMagick"
require "uuidtools"
require_relative "../server"
require_relative "../utils/routes_utils"
require_relative "../utils/string_utils"
class HikeApp < Sinatra::Base
get "/api/v1/hikes", :provides => "json" do
array_as_json(Hike.order(:id).all, get_fields_filter)
end
post "/api/v1/hikes", :provides => "json" do
return 403 if not is_admin?
json = JSON.parse request.body.read rescue return 400
return 400 if not is_valid_hike_input? json
return 409 if Hike[:string_id => Hike.create_string_id_from_name(json["name"])]
hike = Hike.create_from_json json
hike.update_keywords
hike.save
hike.as_json
end
get "/api/v1/hikes/search", :provides => "json" do
query = params[:q]
return 400 if not query
search_executor = SearchExecutor.new
search_executor.logger = logger
search_executor.query = query
search_results = search_executor.execute
if (search_executor.has_best_result)
array_as_json([search_results[0]], get_fields_filter)
else
array_as_json(search_results, get_fields_filter)
end
end
get "/api/v1/hikes/:hike_id", :provides => "json" do
hike = RoutesUtils.get_hike_from_id params[:hike_id]
return 404 if not hike
hike.as_json get_fields_filter if hike
end
put "/api/v1/hikes/:hike_id", :provides => "json" do
return 403 if not is_admin?
hike = RoutesUtils.get_hike_from_id params[:hike_id]
return 404 if not hike
json = JSON.parse request.body.read rescue return 400
return 400 if not is_valid_hike_input? json
hike.update_from_json json
hike.update_keywords if json["name"] != hike.name
removed_photos = []
hike.each_photo do |photo_key|
existing_photo = hike.send(photo_key)
if json[photo_key] != nil
hike.send "#{photo_key}=", Photo.find(:id => json[photo_key]["id"])
move_photo_if_needed existing_photo, hike if existing_photo
elsif existing_photo
removed_photos.push existing_photo
hike.send "#{photo_key}=", nil
end
end
if json["photos_generic"]
new_generic_photos = []
json["photos_generic"].each do |photo, index|
photo = Photo.find(:id => photo["id"])
new_generic_photos.push(photo) if photo
end
added_generic_photos = new_generic_photos - hike.photos_generic
removed_generic_photos = hike.photos_generic - new_generic_photos
added_generic_photos.each do |photo|
hike.add_photos_generic(photo)
move_photo_if_needed photo, hike
end
removed_photos += removed_generic_photos
removed_generic_photos.each do |photo|
hike.remove_photos_generic(photo)
end
end
hike.edit_time = Time.now
hike.location.save_changes
hike.save_changes
removed_photos.each do |photo|
photo.delete
if settings.production?
bucket = s3.buckets["assets.hike.io"]
src = "hike-images/" + photo.string_id
dst = "hike-images/tmp/deleted/" + photo.string_id
bucket.objects[src + "-original.jpg"].move_to(dst + "-original.jpg")
bucket.objects[src + "-large.jpg"].move_to(dst + "-large.jpg")
bucket.objects[src + "-medium.jpg"].move_to(dst + "-medium.jpg")
bucket.objects[src + "-small.jpg"].move_to(dst + "-small.jpg")
bucket.objects[src + "-thumb.jpg"].move_to(dst + "-thumb.jpg")
else
src = self.root + "/public/hike-images/" + photo.string_id
dst_dir = self.root + "/public/hike-images/tmp/deleted/"
FileUtils.mkdir_p(dst_dir)
FileUtils.mv(src + "-original.jpg", dst_dir)
FileUtils.mv(src + "-large.jpg", dst_dir)
FileUtils.mv(src + "-medium.jpg", dst_dir)
FileUtils.mv(src + "-small.jpg", dst_dir)
FileUtils.mv(src + "-thumb.jpg", dst_dir)
end
end
hike.as_json
end
post "/api/v1/hikes/:hike_id/photos", :provides => "json" do
return 403 if not is_admin?
hike = RoutesUtils.get_hike_from_id params[:hike_id]
uploaded_file = params[:file]
return 404 if not hike
return 400 if not uploaded_file
name = UUIDTools::UUID.random_create.to_s
original_image = Magick::Image.read(uploaded_file[:tempfile].path).first
original_image.resize_to_fit!(2400, 2400)
original_image.strip!
original_image.profile!("*", nil)
sharpened_image = original_image.unsharp_mask(2, 0.5, 0.7, 0) #http://even.li/imagemagick-sharp-web-sized-photographs/
if original_image.columns > original_image.rows
large_image = sharpened_image.resize_to_fit(1200)
medium_image = sharpened_image.resize_to_fit(800)
small_image = sharpened_image.resize_to_fit(400)
tiny_image = sharpened_image.resize_to_fit(200)
else
large_image = sharpened_image.resize_to_fit(1200, 2400)
medium_image = sharpened_image.resize_to_fit(800, 1600)
small_image = sharpened_image.resize_to_fit(400, 800)
tiny_image = sharpened_image.resize_to_fit(200, 400)
end
thumb_image = sharpened_image.crop_resized(400, 400)
tiny_thumb_image = sharpened_image.crop_resized(200, 200)
if settings.production?
bucket = s3.buckets["assets.hike.io"]
dst_dir = "hike-images/tmp/uploading/"
bucket.objects[dst_dir + name + "-original.jpg"].write(original_image.to_blob)
bucket.objects[dst_dir + name + "-large.jpg"].write(large_image.to_blob { self.quality = 87 })
bucket.objects[dst_dir + name + "-medium.jpg"].write(medium_image.to_blob { self.quality = 87 })
bucket.objects[dst_dir + name + "-small.jpg"].write(small_image.to_blob { self.quality = 87 })
bucket.objects[dst_dir + name + "-tiny.jpg"].write(tiny_image.to_blob { self.quality = 87 })
bucket.objects[dst_dir + name + "-thumb.jpg"].write(thumb_image.to_blob { self.quality = 87 })
bucket.objects[dst_dir + name + "-thumb-tiny.jpg"].write(tiny_thumb_image.to_blob { self.quality = 87 })
else
dst_dir = self.root + "/public/hike-images/tmp/uploading/"
FileUtils.mkdir_p(dst_dir)
original_image.write(dst_dir + name + "-original.jpg") { self.quality = 87 }
large_image.write(dst_dir + name + "-large.jpg") { self.quality = 87 }
medium_image.write(dst_dir + name + "-medium.jpg") { self.quality = 87 }
small_image.write(dst_dir + name + "-small.jpg") { self.quality = 87 }
tiny_image.write(dst_dir + name + "-tiny.jpg") { self.quality = 87 }
thumb_image.write(dst_dir + name + "-thumb.jpg") { self.quality = 87 }
tiny_thumb_image.write(dst_dir + name + "-thumb-tiny.jpg") { self.quality = 87 }
end
photo = Photo.create({
:string_id => "tmp/uploading/" + name,
:width => original_image.columns,
:height => original_image.rows
})
photo.as_json
end
def move_photo_if_needed photo, hike
if photo.string_id.start_with? "tmp/"
src = "hike-images/" + photo.string_id
photo_id = photo.string_id["tmp/uploading/".length..-1]
dst_dir = "hike-images/" + hike.string_id + "/"
dst = dst_dir + photo_id
if settings.production?
s3.buckets["assets.hike.io"].objects[src + "-original.jpg"].move_to(dst + "-original.jpg")
s3.buckets["assets.hike.io"].objects[src + "-large.jpg"].move_to(dst + "-large.jpg")
s3.buckets["assets.hike.io"].objects[src + "-medium.jpg"].move_to(dst + "-medium.jpg")
s3.buckets["assets.hike.io"].objects[src + "-small.jpg"].move_to(dst + "-small.jpg")
s3.buckets["assets.hike.io"].objects[src + "-thumb.jpg"].move_to(dst + "-thumb.jpg")
else
FileUtils.mkdir_p(self.root + "/public/" + dst_dir)
FileUtils.mv(self.root + "/public/" + src + "-original.jpg", self.root + "/public/" + dst + "-original.jpg")
FileUtils.mv(self.root + "/public/" + src + "-large.jpg", self.root + "/public/" + dst + "-large.jpg")
FileUtils.mv(self.root + "/public/" + src + "-medium.jpg", self.root + "/public/" + dst + "-medium.jpg")
FileUtils.mv(self.root + "/public/" + src + "-small.jpg", self.root + "/public/" + dst + "-small.jpg")
FileUtils.mv(self.root + "/public/" + src + "-thumb.jpg", self.root + "/public/" + dst + "-thumb.jpg")
end
photo.string_id = hike.string_id + "/" + photo_id
photo.save_changes
end
end
def s3
@s3 = @s3 || AWS::S3.new(
:access_key_id => ENV["S3_ACCESS_KEY_ID"],
:secret_access_key => ENV["S3_SECRET_ACCESS_KEY"]
)
@s3
end
def is_valid_hike_input? json
json["name"] &&
json["locality"] &&
json["distance"] &&
json["elevation_max"] &&
json["location"] &&
json["location"]["latitude"] &&
json["location"]["longitude"] &&
StringUtils.is_numeric?(json["distance"]) &&
StringUtils.is_numeric?(json["elevation_max"]) &&
StringUtils.is_numeric?(json["location"]["latitude"]) &&
StringUtils.is_numeric?(json["location"]["longitude"]) &&
is_valid_latitude?(json["location"]["latitude"]) &&
is_valid_longitude?(json["location"]["longitude"])
end
def is_valid_latitude? latitude
latitude = latitude.to_f
latitude >= -90 and latitude <= 90
end
def is_valid_longitude? longitude
longitude = longitude.to_f
longitude >= -180 and longitude <= 180
end
def get_fields_filter
params[:fields] ? params[:fields].split(",") : nil
end
end
Use to_json instead of as_json for photo.
require "aws-sdk"
require "sanitize"
require "RMagick"
require "uuidtools"
require_relative "../server"
require_relative "../utils/routes_utils"
require_relative "../utils/string_utils"
class HikeApp < Sinatra::Base
get "/api/v1/hikes", :provides => "json" do
array_as_json(Hike.order(:id).all, get_fields_filter)
end
post "/api/v1/hikes", :provides => "json" do
return 403 if not is_admin?
json = JSON.parse request.body.read rescue return 400
return 400 if not is_valid_hike_input? json
return 409 if Hike[:string_id => Hike.create_string_id_from_name(json["name"])]
hike = Hike.create_from_json json
hike.update_keywords
hike.save
hike.as_json
end
get "/api/v1/hikes/search", :provides => "json" do
query = params[:q]
return 400 if not query
search_executor = SearchExecutor.new
search_executor.logger = logger
search_executor.query = query
search_results = search_executor.execute
if (search_executor.has_best_result)
array_as_json([search_results[0]], get_fields_filter)
else
array_as_json(search_results, get_fields_filter)
end
end
get "/api/v1/hikes/:hike_id", :provides => "json" do
hike = RoutesUtils.get_hike_from_id params[:hike_id]
return 404 if not hike
hike.as_json get_fields_filter if hike
end
put "/api/v1/hikes/:hike_id", :provides => "json" do
return 403 if not is_admin?
hike = RoutesUtils.get_hike_from_id params[:hike_id]
return 404 if not hike
json = JSON.parse request.body.read rescue return 400
return 400 if not is_valid_hike_input? json
hike.update_from_json json
hike.update_keywords if json["name"] != hike.name
removed_photos = []
hike.each_photo do |photo_key|
existing_photo = hike.send(photo_key)
if json[photo_key] != nil
hike.send "#{photo_key}=", Photo.find(:id => json[photo_key]["id"])
move_photo_if_needed existing_photo, hike if existing_photo
elsif existing_photo
removed_photos.push existing_photo
hike.send "#{photo_key}=", nil
end
end
if json["photos_generic"]
new_generic_photos = []
json["photos_generic"].each do |photo, index|
photo = Photo.find(:id => photo["id"])
new_generic_photos.push(photo) if photo
end
added_generic_photos = new_generic_photos - hike.photos_generic
removed_generic_photos = hike.photos_generic - new_generic_photos
added_generic_photos.each do |photo|
hike.add_photos_generic(photo)
move_photo_if_needed photo, hike
end
removed_photos += removed_generic_photos
removed_generic_photos.each do |photo|
hike.remove_photos_generic(photo)
end
end
hike.edit_time = Time.now
hike.location.save_changes
hike.save_changes
removed_photos.each do |photo|
photo.delete
if settings.production?
bucket = s3.buckets["assets.hike.io"]
src = "hike-images/" + photo.string_id
dst = "hike-images/tmp/deleted/" + photo.string_id
bucket.objects[src + "-original.jpg"].move_to(dst + "-original.jpg")
bucket.objects[src + "-large.jpg"].move_to(dst + "-large.jpg")
bucket.objects[src + "-medium.jpg"].move_to(dst + "-medium.jpg")
bucket.objects[src + "-small.jpg"].move_to(dst + "-small.jpg")
bucket.objects[src + "-thumb.jpg"].move_to(dst + "-thumb.jpg")
else
src = self.root + "/public/hike-images/" + photo.string_id
dst_dir = self.root + "/public/hike-images/tmp/deleted/"
FileUtils.mkdir_p(dst_dir)
FileUtils.mv(src + "-original.jpg", dst_dir)
FileUtils.mv(src + "-large.jpg", dst_dir)
FileUtils.mv(src + "-medium.jpg", dst_dir)
FileUtils.mv(src + "-small.jpg", dst_dir)
FileUtils.mv(src + "-thumb.jpg", dst_dir)
end
end
hike.as_json
end
post "/api/v1/hikes/:hike_id/photos", :provides => "json" do
return 403 if not is_admin?
hike = RoutesUtils.get_hike_from_id params[:hike_id]
uploaded_file = params[:file]
return 404 if not hike
return 400 if not uploaded_file
name = UUIDTools::UUID.random_create.to_s
original_image = Magick::Image.read(uploaded_file[:tempfile].path).first
original_image.resize_to_fit!(2400, 2400)
original_image.strip!
original_image.profile!("*", nil)
sharpened_image = original_image.unsharp_mask(2, 0.5, 0.7, 0) #http://even.li/imagemagick-sharp-web-sized-photographs/
if original_image.columns > original_image.rows
large_image = sharpened_image.resize_to_fit(1200)
medium_image = sharpened_image.resize_to_fit(800)
small_image = sharpened_image.resize_to_fit(400)
tiny_image = sharpened_image.resize_to_fit(200)
else
large_image = sharpened_image.resize_to_fit(1200, 2400)
medium_image = sharpened_image.resize_to_fit(800, 1600)
small_image = sharpened_image.resize_to_fit(400, 800)
tiny_image = sharpened_image.resize_to_fit(200, 400)
end
thumb_image = sharpened_image.crop_resized(400, 400)
tiny_thumb_image = sharpened_image.crop_resized(200, 200)
if settings.production?
bucket = s3.buckets["assets.hike.io"]
dst_dir = "hike-images/tmp/uploading/"
bucket.objects[dst_dir + name + "-original.jpg"].write(original_image.to_blob)
bucket.objects[dst_dir + name + "-large.jpg"].write(large_image.to_blob { self.quality = 87 })
bucket.objects[dst_dir + name + "-medium.jpg"].write(medium_image.to_blob { self.quality = 87 })
bucket.objects[dst_dir + name + "-small.jpg"].write(small_image.to_blob { self.quality = 87 })
bucket.objects[dst_dir + name + "-tiny.jpg"].write(tiny_image.to_blob { self.quality = 87 })
bucket.objects[dst_dir + name + "-thumb.jpg"].write(thumb_image.to_blob { self.quality = 87 })
bucket.objects[dst_dir + name + "-thumb-tiny.jpg"].write(tiny_thumb_image.to_blob { self.quality = 87 })
else
dst_dir = self.root + "/public/hike-images/tmp/uploading/"
FileUtils.mkdir_p(dst_dir)
original_image.write(dst_dir + name + "-original.jpg") { self.quality = 87 }
large_image.write(dst_dir + name + "-large.jpg") { self.quality = 87 }
medium_image.write(dst_dir + name + "-medium.jpg") { self.quality = 87 }
small_image.write(dst_dir + name + "-small.jpg") { self.quality = 87 }
tiny_image.write(dst_dir + name + "-tiny.jpg") { self.quality = 87 }
thumb_image.write(dst_dir + name + "-thumb.jpg") { self.quality = 87 }
tiny_thumb_image.write(dst_dir + name + "-thumb-tiny.jpg") { self.quality = 87 }
end
photo = Photo.create({
:string_id => "tmp/uploading/" + name,
:width => original_image.columns,
:height => original_image.rows
})
photo.to_json
end
def move_photo_if_needed photo, hike
if photo.string_id.start_with? "tmp/"
src = "hike-images/" + photo.string_id
photo_id = photo.string_id["tmp/uploading/".length..-1]
dst_dir = "hike-images/" + hike.string_id + "/"
dst = dst_dir + photo_id
if settings.production?
s3.buckets["assets.hike.io"].objects[src + "-original.jpg"].move_to(dst + "-original.jpg")
s3.buckets["assets.hike.io"].objects[src + "-large.jpg"].move_to(dst + "-large.jpg")
s3.buckets["assets.hike.io"].objects[src + "-medium.jpg"].move_to(dst + "-medium.jpg")
s3.buckets["assets.hike.io"].objects[src + "-small.jpg"].move_to(dst + "-small.jpg")
s3.buckets["assets.hike.io"].objects[src + "-thumb.jpg"].move_to(dst + "-thumb.jpg")
else
FileUtils.mkdir_p(self.root + "/public/" + dst_dir)
FileUtils.mv(self.root + "/public/" + src + "-original.jpg", self.root + "/public/" + dst + "-original.jpg")
FileUtils.mv(self.root + "/public/" + src + "-large.jpg", self.root + "/public/" + dst + "-large.jpg")
FileUtils.mv(self.root + "/public/" + src + "-medium.jpg", self.root + "/public/" + dst + "-medium.jpg")
FileUtils.mv(self.root + "/public/" + src + "-small.jpg", self.root + "/public/" + dst + "-small.jpg")
FileUtils.mv(self.root + "/public/" + src + "-thumb.jpg", self.root + "/public/" + dst + "-thumb.jpg")
end
photo.string_id = hike.string_id + "/" + photo_id
photo.save_changes
end
end
def s3
@s3 = @s3 || AWS::S3.new(
:access_key_id => ENV["S3_ACCESS_KEY_ID"],
:secret_access_key => ENV["S3_SECRET_ACCESS_KEY"]
)
@s3
end
def is_valid_hike_input? json
json["name"] &&
json["locality"] &&
json["distance"] &&
json["elevation_max"] &&
json["location"] &&
json["location"]["latitude"] &&
json["location"]["longitude"] &&
StringUtils.is_numeric?(json["distance"]) &&
StringUtils.is_numeric?(json["elevation_max"]) &&
StringUtils.is_numeric?(json["location"]["latitude"]) &&
StringUtils.is_numeric?(json["location"]["longitude"]) &&
is_valid_latitude?(json["location"]["latitude"]) &&
is_valid_longitude?(json["location"]["longitude"])
end
def is_valid_latitude? latitude
latitude = latitude.to_f
latitude >= -90 and latitude <= 90
end
def is_valid_longitude? longitude
longitude = longitude.to_f
longitude >= -180 and longitude <= 180
end
def get_fields_filter
params[:fields] ? params[:fields].split(",") : nil
end
end |
class KubernetesServiceCatalogClient < Formula
desc "Consume Services in k8s using the OSB API"
homepage "https://svc-cat.io/"
url "https://github.com/kubernetes-sigs/service-catalog/archive/v0.3.1.tar.gz"
sha256 "5b463be2102b32bd5a5fed5d433ef53da4d1f70bf007b5a4b78eee7024ca52e3"
license "Apache-2.0"
bottle do
cellar :any_skip_relocation
sha256 "a6f26e163ee15f601fef1b974e3c55f22a4c7333aea3ddf6ce009f386b58db18" => :catalina
sha256 "9d29ae7fed57216e663459a4964c9946475329bdd4a6aa0666d69019840c6abf" => :mojave
sha256 "a6b37292f716de1ba860d6e38905aa80063120ca8018d58b0bd05bca7475a253" => :high_sierra
end
depends_on "go" => :build
def install
ENV["NO_DOCKER"] = "1"
ldflags = %W[
-s -w
-X github.com/kubernetes-sigs/service-catalog/pkg.VERSION=v#{version}
]
system "go", "build", "-ldflags", ldflags.join(" "), "-o",
bin/"svcat", "./cmd/svcat"
prefix.install_metafiles
end
test do
version_output = shell_output("#{bin}/svcat version --client 2>&1", 1)
assert_match "Error: could not get Kubernetes config for context", version_output
end
end
kubernetes-service-catalog-client: update 0.3.1 bottle.
class KubernetesServiceCatalogClient < Formula
desc "Consume Services in k8s using the OSB API"
homepage "https://svc-cat.io/"
url "https://github.com/kubernetes-sigs/service-catalog/archive/v0.3.1.tar.gz"
sha256 "5b463be2102b32bd5a5fed5d433ef53da4d1f70bf007b5a4b78eee7024ca52e3"
license "Apache-2.0"
bottle do
cellar :any_skip_relocation
sha256 "a6f26e163ee15f601fef1b974e3c55f22a4c7333aea3ddf6ce009f386b58db18" => :catalina
sha256 "9d29ae7fed57216e663459a4964c9946475329bdd4a6aa0666d69019840c6abf" => :mojave
sha256 "a6b37292f716de1ba860d6e38905aa80063120ca8018d58b0bd05bca7475a253" => :high_sierra
sha256 "ccdeb0fce202364b94bbb2cde41d7f77637eea083527721fe961b384ab8e70ea" => :x86_64_linux
end
depends_on "go" => :build
def install
ENV["NO_DOCKER"] = "1"
ldflags = %W[
-s -w
-X github.com/kubernetes-sigs/service-catalog/pkg.VERSION=v#{version}
]
system "go", "build", "-ldflags", ldflags.join(" "), "-o",
bin/"svcat", "./cmd/svcat"
prefix.install_metafiles
end
test do
version_output = shell_output("#{bin}/svcat version --client 2>&1", 1)
assert_match "Error: could not get Kubernetes config for context", version_output
end
end
|
class KubernetesServiceCatalogClient < Formula
desc "Consume Services in k8s using the OSB API"
homepage "https://svc-cat.io/"
url "https://github.com/kubernetes-sigs/service-catalog.git",
:tag => "v0.2.3",
:revision => "62201e94f74962ff94dba0664e5e9e757c58188a"
bottle do
cellar :any_skip_relocation
sha256 "fab55f3ff9b603beae7a4f8a5c0ea79cd3c4a24a821b4249dc2f9d2c345fa094" => :catalina
sha256 "c1107702caae510cb024520cd793a999581d7133159a03d84e727198e34fda8e" => :mojave
sha256 "dc5468299aea99767c0fdd5d7da91fdde53bee5b44d012476ead31c5d2288e2a" => :high_sierra
end
depends_on "go" => :build
def install
ENV["GOPATH"] = buildpath
ENV["NO_DOCKER"] = "1"
dir = buildpath/"src/github.com/kubernetes-sigs/service-catalog"
dir.install buildpath.children
cd dir do
ldflags = %W[
-s -w -X
github.com/kubernetes-sigs/service-catalog/pkg.VERSION=v#{version}
]
system "go", "build", "-ldflags", ldflags.join(" "), "-o",
bin/"svcat", "./cmd/svcat"
prefix.install_metafiles
end
end
test do
version_output = shell_output("#{bin}/svcat version --client 2>&1")
assert_match "Client Version: v#{version}", version_output
end
end
kubernetes-service-catalog-client 0.3.0
- remove gopath
- update to use tarball/sha256
- update test
Closes #55316.
Signed-off-by: chenrui <5fd29470147430022ff146db88de16ee91dea376@gmail.com>
class KubernetesServiceCatalogClient < Formula
desc "Consume Services in k8s using the OSB API"
homepage "https://svc-cat.io/"
url "https://github.com/kubernetes-sigs/service-catalog/archive/v0.3.0.tar.gz"
sha256 "3f79365cbe9c63513ddf3ad836177f0bd9ecee5b36cad015a3e9353e47cc874e"
bottle do
cellar :any_skip_relocation
sha256 "fab55f3ff9b603beae7a4f8a5c0ea79cd3c4a24a821b4249dc2f9d2c345fa094" => :catalina
sha256 "c1107702caae510cb024520cd793a999581d7133159a03d84e727198e34fda8e" => :mojave
sha256 "dc5468299aea99767c0fdd5d7da91fdde53bee5b44d012476ead31c5d2288e2a" => :high_sierra
end
depends_on "go" => :build
def install
ENV["NO_DOCKER"] = "1"
ldflags = %W[
-s -w
-X github.com/kubernetes-sigs/service-catalog/pkg.VERSION=v#{version}
]
system "go", "build", "-ldflags", ldflags.join(" "), "-o",
bin/"svcat", "./cmd/svcat"
prefix.install_metafiles
end
test do
version_output = shell_output("#{bin}/svcat version --client 2>&1", 1)
assert_match "Error: could not get Kubernetes config for context", version_output
end
end
|
# A simple integer calculator to answer the question about how to do
# left and right associativity in parslet (PEG) once and for all.
$:.unshift File.dirname(__FILE__) + "/../lib"
require 'rspec'
require 'parslet'
require 'parslet/rig/rspec'
class CalcParser < Parslet::Parser
root :addition
rule(:addition) {
multiplication.as(:l) >> (add_op >> multiplication.as(:r)).repeat(1) |
multiplication
}
rule(:multiplication) {
integer.as(:l) >> (mult_op >> integer.as(:r)).repeat(1) |
integer }
rule(:integer) { digit.repeat(1).as(:i) >> space? }
rule(:mult_op) { match['*/'].as(:o) >> space? }
rule(:add_op) { match['+-'].as(:o) >> space? }
rule(:digit) { match['0-9'] }
rule(:space?) { match['\s'].repeat }
end
Int = Struct.new(:int) {
def eval; self end
def op(operation, other)
left = int
right = other.int
Int.new(
case operation
when '+'
left + right
when '-'
left - right
when '*'
left * right
when '/'
left / right
end)
end
def to_i
int
end
}
Seq = Struct.new(:sequence) {
def eval
sequence.reduce { |accum, operation|
operation.call(accum) }
end
}
LeftOp = Struct.new(:operation, :right) {
def call(left)
left = left.eval
right = self.right.eval
left.op(operation, right)
end
}
class CalcTransform < Parslet::Transform
rule(i: simple(:i)) { Int.new(Integer(i)) }
rule(o: simple(:o), r: simple(:i)) { LeftOp.new(o, i) }
rule(l: simple(:i)) { i }
rule(sequence(:seq)) { Seq.new(seq) }
end
def calculate(str)
tree = CalcParser.new.parse(str)
CalcTransform.new.apply(tree).eval.to_i
end
# A test suite for the above parser
describe CalcParser do
let(:p) { described_class.new }
describe '#integer' do
let(:i) { p.integer }
it "parses integers" do
i.should parse('1')
i.should parse('123')
end
it "consumes trailing white space" do
i.should parse('123 ')
end
it "doesn't parse floats" do
i.should_not parse('1.3')
end
end
describe '#multiplication' do
let(:m) { p.multiplication }
it "parses simple multiplication" do
m.should parse('1*2')
end
it "parses division" do
m.should parse('1/2')
end
end
describe '#addition' do
let(:a) { p.addition }
it "parses simple addition" do
a.should parse('1+2')
a.should parse('1+2+3-4')
end
end
describe 'whole computation specs' do
def self.result_of(str, int)
it(str) { calculate(str).should == int }
end
result_of '1+1', 2
result_of '1-1-1', -1
result_of '1+1+3*5/2', 9
end
end
describe CalcTransform do
def t(obj)
described_class.new.apply(obj)
end
it "transforms integers" do
t(i: '1').should == Int.new(1)
end
it "unwraps left operand" do
t(l: :obj).should == :obj
end
end
RSpec::Core::Runner.run([], $stderr, $stdout)
str = ARGV.join
unless str.match(/\s+/)
puts
print "You're asking me for the result of #{str}: -> "
puts calculate(ARGV.join)
end
. nicer running
# A simple integer calculator to answer the question about how to do
# left and right associativity in parslet (PEG) once and for all.
$:.unshift File.dirname(__FILE__) + "/../lib"
require 'rspec'
require 'parslet'
require 'parslet/rig/rspec'
class CalcParser < Parslet::Parser
root :addition
rule(:addition) {
multiplication.as(:l) >> (add_op >> multiplication.as(:r)).repeat(1) |
multiplication
}
rule(:multiplication) {
integer.as(:l) >> (mult_op >> integer.as(:r)).repeat(1) |
integer }
rule(:integer) { digit.repeat(1).as(:i) >> space? }
rule(:mult_op) { match['*/'].as(:o) >> space? }
rule(:add_op) { match['+-'].as(:o) >> space? }
rule(:digit) { match['0-9'] }
rule(:space?) { match['\s'].repeat }
end
Int = Struct.new(:int) {
def eval; self end
def op(operation, other)
left = int
right = other.int
Int.new(
case operation
when '+'
left + right
when '-'
left - right
when '*'
left * right
when '/'
left / right
end)
end
def to_i
int
end
}
Seq = Struct.new(:sequence) {
def eval
sequence.reduce { |accum, operation|
operation.call(accum) }
end
}
LeftOp = Struct.new(:operation, :right) {
def call(left)
left = left.eval
right = self.right.eval
left.op(operation, right)
end
}
class CalcTransform < Parslet::Transform
rule(i: simple(:i)) { Int.new(Integer(i)) }
rule(o: simple(:o), r: simple(:i)) { LeftOp.new(o, i) }
rule(l: simple(:i)) { i }
rule(sequence(:seq)) { Seq.new(seq) }
end
def calculate(str)
tree = CalcParser.new.parse(str)
CalcTransform.new.apply(tree).eval.to_i
end
# A test suite for the above parser
describe CalcParser do
let(:p) { described_class.new }
describe '#integer' do
let(:i) { p.integer }
it "parses integers" do
i.should parse('1')
i.should parse('123')
end
it "consumes trailing white space" do
i.should parse('123 ')
end
it "doesn't parse floats" do
i.should_not parse('1.3')
end
end
describe '#multiplication' do
let(:m) { p.multiplication }
it "parses simple multiplication" do
m.should parse('1*2')
end
it "parses division" do
m.should parse('1/2')
end
end
describe '#addition' do
let(:a) { p.addition }
it "parses simple addition" do
a.should parse('1+2')
a.should parse('1+2+3-4')
end
end
end
describe CalcTransform do
def t(obj)
described_class.new.apply(obj)
end
it "transforms integers" do
t(i: '1').should == Int.new(1)
end
it "unwraps left operand" do
t(l: :obj).should == :obj
end
end
describe 'whole computation specs' do
def self.result_of(str, int)
it(str) { calculate(str).should == int }
end
result_of '1+1', 2
result_of '1-1-1', -1
result_of '1+1+3*5/2', 9
result_of '123*2', 246
end
# Enable these if you want to change the code.
# RSpec::Core::Runner.run([], $stderr, $stdout)
str = ARGV.join
str = '123*2' if str.match(/^\s*$/)
print "#{str} (command line): -> "
puts calculate(str)
|
Regenerate gemspec for version 0.1.0
|
require File.dirname(__FILE__) + '/../../../spec_helper'
require 'net/ftp'
describe "Net::FTP#passive" do
it "needs to be reviewed for spec completeness"
end
describe "Net::FTP#passive=" do
it "needs to be reviewed for spec completeness"
end
Added specs for Net::FTP#passive and Net::FTP#passive=.
require File.dirname(__FILE__) + '/../../../spec_helper'
require 'net/ftp'
describe "Net::FTP#passive" do
it "returns true when self is in passive mode" do
ftp = Net::FTP.new
ftp.passive.should be_false
ftp.passive = true
ftp.passive.should be_true
end
end
describe "Net::FTP#passive=" do
it "sets self to passive mode when passed true" do
ftp = Net::FTP.new
ftp.passive = true
ftp.passive.should be_true
ftp.passive = false
ftp.passive.should be_false
end
end
|
require File.expand_path('../../../spec_helper', __FILE__)
require 'rational'
describe :rational_exponent, :shared => true do
describe "when passed Rational" do
conflicts_with :Prime do
ruby_version_is ""..."1.9" do
it "converts self to a Float and returns it raised to the passed argument" do
(Rational(3, 4) ** Rational(4, 3)).should be_close(0.681420222312052, TOLERANCE)
(Rational(3, 4) ** Rational(-4, 3)).should be_close(1.46752322173095, TOLERANCE)
(Rational(3, 4) ** Rational(4, -3)).should be_close(1.46752322173095, TOLERANCE)
(Rational(3, 4) ** Rational(0, 3)).should eql(1.0)
(Rational(-3, 4) ** Rational(0, 3)).should eql(1.0)
(Rational(3, -4) ** Rational(0, 3)).should eql(1.0)
(Rational(3, 4) ** Rational(0, -3)).should eql(1.0)
(Rational(bignum_value, 4) ** Rational(0, 3)).should eql(1.0)
(Rational(3, -bignum_value) ** Rational(0, 3)).should eql(1.0)
(Rational(3, 4) ** Rational(0, bignum_value)).should eql(1.0)
(Rational(3, 4) ** Rational(0, -bignum_value)).should eql(1.0)
end
end
ruby_version_is "1.9" do
it "returns Rational(1) if the exponent is Rational(0)" do
(Rational(0) ** Rational(0)).should eql(Rational(1))
(Rational(1) ** Rational(0)).should eql(Rational(1))
(Rational(3, 4) ** Rational(0)).should eql(Rational(1))
(Rational(-1) ** Rational(0)).should eql(Rational(1))
(Rational(-3, 4) ** Rational(0)).should eql(Rational(1))
(Rational(bignum_value) ** Rational(0)).should eql(Rational(1))
(Rational(-bignum_value) ** Rational(0)).should eql(Rational(1))
end
it "returns self raised to the argument as a Rational if the exponent's denominator is 1" do
(Rational(3, 4) ** Rational(1, 1)).should eql(Rational(3, 4))
(Rational(3, 4) ** Rational(2, 1)).should eql(Rational(9, 16))
(Rational(3, 4) ** Rational(-1, 1)).should eql(Rational(4, 3))
(Rational(3, 4) ** Rational(-2, 1)).should eql(Rational(16, 9))
end
it "returns self raised to the argument as a Float if the exponent's denominator is not 1" do
(Rational(3, 4) ** Rational(4, 3)).should be_close(0.681420222312052, TOLERANCE)
(Rational(3, 4) ** Rational(-4, 3)).should be_close(1.46752322173095, TOLERANCE)
(Rational(3, 4) ** Rational(4, -3)).should be_close(1.46752322173095, TOLERANCE)
end
it "returns a complex number when self is negative and the passed argument is not 0" do
(Rational(-3, 4) ** Rational(-4, 3)).should == Complex(
-0.7337616108654732, 1.2709123906625817)
end
end
ruby_version_is ""..."1.9" do
it "returns NaN when self is negative and the passed argument is not 0" do
(Rational(-3, 4) ** Rational(-4, 3)).nan?.should be_true
end
end
end
end
describe "when passed Integer" do
it "returns the Rational value of self raised to the passed argument" do
(Rational(3, 4) ** 4).should == Rational(81, 256)
(Rational(3, 4) ** -4).should == Rational(256, 81)
(Rational(-3, 4) ** -4).should == Rational(256, 81)
(Rational(3, -4) ** -4).should == Rational(256, 81)
(Rational(bignum_value, 4) ** 4).should == Rational(28269553036454149273332760011886696253239742350009903329945699220681916416, 1)
(Rational(3, bignum_value) ** -4).should == Rational(7237005577332262213973186563042994240829374041602535252466099000494570602496, 81)
(Rational(-bignum_value, 4) ** -4).should == Rational(1, 28269553036454149273332760011886696253239742350009903329945699220681916416)
(Rational(3, -bignum_value) ** -4).should == Rational(7237005577332262213973186563042994240829374041602535252466099000494570602496, 81)
end
conflicts_with :Prime do
it "returns Rational(1, 1) when the passed argument is 0" do
(Rational(3, 4) ** 0).should eql(Rational(1, 1))
(Rational(-3, 4) ** 0).should eql(Rational(1, 1))
(Rational(3, -4) ** 0).should eql(Rational(1, 1))
(Rational(bignum_value, 4) ** 0).should eql(Rational(1, 1))
(Rational(3, -bignum_value) ** 0).should eql(Rational(1, 1))
end
end
end
describe "when passed Bignum" do
ruby_version_is ""..."1.9" do
it "returns Rational(0) when self is Rational(0) and the exponent is positive" do
(Rational(0) ** bignum_value).should eql(Rational(0))
end
it "returns Rational(1, 0) when self is Rational(0) and the exponent is negative" do
result = (Rational(0) ** -bignum_value)
result.numerator.should eql(1)
result.denominator.should eql(0)
end
it "returns Rational(1) when self is Rational(1)" do
(Rational(1) ** bignum_value).should eql(Rational(1))
end
it "returns Rational(1) when self is Rational(-1) and the exponent is even" do
(Rational(-1) ** bignum_value(0)).should eql(Rational(1))
end
it "returns Rational(-1) when self is Rational(-1) and the exponent is odd" do
(Rational(-1) ** bignum_value(1)).should eql(Rational(-1))
end
it "raises FloatDomainError when self is > 1 or < -1" do
lambda { Rational(2) ** bignum_value }.should raise_error(FloatDomainError)
lambda { Rational(-2) ** bignum_value }.should raise_error(FloatDomainError)
lambda { Rational(fixnum_max) ** bignum_value }.should raise_error(FloatDomainError)
lambda { Rational(fixnum_min) ** bignum_value }.should raise_error(FloatDomainError)
lambda { Rational(2) ** -bignum_value }.should raise_error(FloatDomainError)
lambda { Rational(-2) ** -bignum_value }.should raise_error(FloatDomainError)
lambda { Rational(fixnum_max) ** -bignum_value }.should raise_error(FloatDomainError)
lambda { Rational(fixnum_min) ** -bignum_value }.should raise_error(FloatDomainError)
end
end
ruby_version_is "1.9" do
ruby_bug "#5713", "2.0" do
it "returns Rational(0) when self is Rational(0) and the exponent is positive" do
(Rational(0) ** bignum_value).should eql(Rational(0))
end
it "raises ZeroDivisionError when self is Rational(0) and the exponent is negative" do
lambda { Rational(0) ** -bignum_value }.should raise_error(ZeroDivisionError)
end
it "returns Rational(1) when self is Rational(1)" do
(Rational(1) ** bignum_value).should eql(Rational(1))
(Rational(1) ** -bignum_value).should eql(Rational(1))
end
it "returns Rational(1) when self is Rational(-1) and the exponent is positive and even" do
(Rational(-1) ** bignum_value(0)).should eql(Rational(1))
(Rational(-1) ** bignum_value(2)).should eql(Rational(1))
end
it "returns Rational(-1) when self is Rational(-1) and the exponent is positive and odd" do
(Rational(-1) ** bignum_value(1)).should eql(Rational(-1))
(Rational(-1) ** bignum_value(3)).should eql(Rational(-1))
end
end
it "returns positive Infinity when self is > 1" do
(Rational(2) ** bignum_value).infinite?.should == 1
(Rational(fixnum_max) ** bignum_value).infinite?.should == 1
end
it "returns 0.0 when self is > 1 and the exponent is negative" do
(Rational(2) ** -bignum_value).should eql(0.0)
(Rational(fixnum_max) ** -bignum_value).should eql(0.0)
end
# Fails on linux due to pow() bugs in glibc: http://sources.redhat.com/bugzilla/show_bug.cgi?id=3866
platform_is_not :linux do
it "returns positive Infinity when self < -1" do
(Rational(-2) ** bignum_value).infinite?.should == 1
(Rational(-2) ** (bignum_value + 1)).infinite?.should == 1
(Rational(fixnum_min) ** bignum_value).infinite?.should == 1
end
it "returns 0.0 when self is < -1 and the exponent is negative" do
(Rational(-2) ** -bignum_value).should eql(0.0)
(Rational(fixnum_min) ** -bignum_value).should eql(0.0)
end
end
end
end
describe "when passed Float" do
it "returns self converted to Float and raised to the passed argument" do
(Rational(3, 1) ** 3.0).should eql(27.0)
(Rational(3, 1) ** 1.5).should be_close(5.19615242270663, TOLERANCE)
(Rational(3, 1) ** -1.5).should be_close(0.192450089729875, TOLERANCE)
end
ruby_version_is ""..."1.9" do
it "returns NaN if self is negative and the passed argument is not 0" do
(Rational(-3, 2) ** 1.5).nan?.should be_true
(Rational(3, -2) ** 1.5).nan?.should be_true
(Rational(3, -2) ** -1.5).nan?.should be_true
end
it "returns 1.0 when the passed argument is 0.0" do
(Rational(3, 4) ** 0.0).should eql(1.0)
(Rational(-3, 4) ** 0.0).should eql(1.0)
(Rational(-3, 4) ** 0.0).should eql(1.0)
end
end
ruby_version_is "1.9" do
it "returns a complex number if self is negative and the passed argument is not 0" do
(Rational(-3, 2) ** 1.5).should be_close( Complex(
-3.374618290464398e-16, -1.8371173070873836), TOLERANCE)
(Rational(3, -2) ** 1.5).should be_close( Complex(
-3.374618290464398e-16, -1.8371173070873836), TOLERANCE)
(Rational(3, -2) ** -1.5).should be_close( Complex(
-9.998869008783402e-17, 0.5443310539518174), TOLERANCE)
end
it "returns Complex(1.0) when the passed argument is 0.0" do
(Rational(3, 4) ** 0.0).should == Complex(1.0)
(Rational(-3, 4) ** 0.0).should == Complex(1.0)
(Rational(-3, 4) ** 0.0).should == Complex(1.0)
end
end
end
it "calls #coerce on the passed argument with self" do
rational = Rational(3, 4)
obj = mock("Object")
obj.should_receive(:coerce).with(rational).and_return([1, 2])
rational ** obj
end
it "calls #** on the coerced Rational with the coerced Object" do
rational = Rational(3, 4)
coerced_rational = mock("Coerced Rational")
coerced_rational.should_receive(:**).and_return(:result)
coerced_obj = mock("Coerced Object")
obj = mock("Object")
obj.should_receive(:coerce).and_return([coerced_rational, coerced_obj])
(rational ** obj).should == :result
end
ruby_version_is ""..."1.9" do
it "returns Rational(1, 0) for Rational(0, 1) passed a negative Integer" do
[-1, -4, -9999].each do |exponent|
result = (Rational(0, 1) ** exponent)
result.numerator.should eql(1)
result.denominator.should eql(0)
end
end
conflicts_with :Prime do
it "returns Infinity for Rational(0, 1) passed a negative Rational" do
[Rational(-1, 1), Rational(-3, 1), Rational(-3, 2)].each do |exponent|
(Rational(0, 1) ** exponent).infinite?.should == 1
end
end
end
end
ruby_version_is "1.9" do
it "raises ZeroDivisionError for Rational(0, 1) passed a negative Integer" do
[-1, -4, -9999].each do |exponent|
lambda { Rational(0, 1) ** exponent }.should raise_error(ZeroDivisionError, "divided by 0")
end
end
it "raises ZeroDivisionError for Rational(0, 1) passed a negative Rational with denominator 1" do
[Rational(-1, 1), Rational(-3, 1)].each do |exponent|
lambda { Rational(0, 1) ** exponent }.should raise_error(ZeroDivisionError, "divided by 0")
end
end
it "returns Infinity for Rational(0, 1) passed a negative Rational with denominator not 1" do
(Rational(0, 1) ** Rational(-3, 2)).infinite?.should == 1
end
end
it "returns Infinity for Rational(0, 1) passed a negative Float" do
[-1.0, -3.0, -3.14].each do |exponent|
(Rational(0, 1) ** exponent).infinite?.should == 1
end
end
end
Fix specs for Rational(0) ** -exact value
require File.expand_path('../../../spec_helper', __FILE__)
require 'rational'
describe :rational_exponent, :shared => true do
describe "when passed Rational" do
conflicts_with :Prime do
ruby_version_is ""..."1.9" do
it "converts self to a Float and returns it raised to the passed argument" do
(Rational(3, 4) ** Rational(4, 3)).should be_close(0.681420222312052, TOLERANCE)
(Rational(3, 4) ** Rational(-4, 3)).should be_close(1.46752322173095, TOLERANCE)
(Rational(3, 4) ** Rational(4, -3)).should be_close(1.46752322173095, TOLERANCE)
(Rational(3, 4) ** Rational(0, 3)).should eql(1.0)
(Rational(-3, 4) ** Rational(0, 3)).should eql(1.0)
(Rational(3, -4) ** Rational(0, 3)).should eql(1.0)
(Rational(3, 4) ** Rational(0, -3)).should eql(1.0)
(Rational(bignum_value, 4) ** Rational(0, 3)).should eql(1.0)
(Rational(3, -bignum_value) ** Rational(0, 3)).should eql(1.0)
(Rational(3, 4) ** Rational(0, bignum_value)).should eql(1.0)
(Rational(3, 4) ** Rational(0, -bignum_value)).should eql(1.0)
end
end
ruby_version_is "1.9" do
it "returns Rational(1) if the exponent is Rational(0)" do
(Rational(0) ** Rational(0)).should eql(Rational(1))
(Rational(1) ** Rational(0)).should eql(Rational(1))
(Rational(3, 4) ** Rational(0)).should eql(Rational(1))
(Rational(-1) ** Rational(0)).should eql(Rational(1))
(Rational(-3, 4) ** Rational(0)).should eql(Rational(1))
(Rational(bignum_value) ** Rational(0)).should eql(Rational(1))
(Rational(-bignum_value) ** Rational(0)).should eql(Rational(1))
end
it "returns self raised to the argument as a Rational if the exponent's denominator is 1" do
(Rational(3, 4) ** Rational(1, 1)).should eql(Rational(3, 4))
(Rational(3, 4) ** Rational(2, 1)).should eql(Rational(9, 16))
(Rational(3, 4) ** Rational(-1, 1)).should eql(Rational(4, 3))
(Rational(3, 4) ** Rational(-2, 1)).should eql(Rational(16, 9))
end
it "returns self raised to the argument as a Float if the exponent's denominator is not 1" do
(Rational(3, 4) ** Rational(4, 3)).should be_close(0.681420222312052, TOLERANCE)
(Rational(3, 4) ** Rational(-4, 3)).should be_close(1.46752322173095, TOLERANCE)
(Rational(3, 4) ** Rational(4, -3)).should be_close(1.46752322173095, TOLERANCE)
end
it "returns a complex number when self is negative and the passed argument is not 0" do
(Rational(-3, 4) ** Rational(-4, 3)).should == Complex(
-0.7337616108654732, 1.2709123906625817)
end
end
ruby_version_is ""..."1.9" do
it "returns NaN when self is negative and the passed argument is not 0" do
(Rational(-3, 4) ** Rational(-4, 3)).nan?.should be_true
end
end
end
end
describe "when passed Integer" do
it "returns the Rational value of self raised to the passed argument" do
(Rational(3, 4) ** 4).should == Rational(81, 256)
(Rational(3, 4) ** -4).should == Rational(256, 81)
(Rational(-3, 4) ** -4).should == Rational(256, 81)
(Rational(3, -4) ** -4).should == Rational(256, 81)
(Rational(bignum_value, 4) ** 4).should == Rational(28269553036454149273332760011886696253239742350009903329945699220681916416, 1)
(Rational(3, bignum_value) ** -4).should == Rational(7237005577332262213973186563042994240829374041602535252466099000494570602496, 81)
(Rational(-bignum_value, 4) ** -4).should == Rational(1, 28269553036454149273332760011886696253239742350009903329945699220681916416)
(Rational(3, -bignum_value) ** -4).should == Rational(7237005577332262213973186563042994240829374041602535252466099000494570602496, 81)
end
conflicts_with :Prime do
it "returns Rational(1, 1) when the passed argument is 0" do
(Rational(3, 4) ** 0).should eql(Rational(1, 1))
(Rational(-3, 4) ** 0).should eql(Rational(1, 1))
(Rational(3, -4) ** 0).should eql(Rational(1, 1))
(Rational(bignum_value, 4) ** 0).should eql(Rational(1, 1))
(Rational(3, -bignum_value) ** 0).should eql(Rational(1, 1))
end
end
end
describe "when passed Bignum" do
ruby_version_is ""..."1.9" do
it "returns Rational(0) when self is Rational(0) and the exponent is positive" do
(Rational(0) ** bignum_value).should eql(Rational(0))
end
it "returns Rational(1, 0) when self is Rational(0) and the exponent is negative" do
result = (Rational(0) ** -bignum_value)
result.numerator.should eql(1)
result.denominator.should eql(0)
end
it "returns Rational(1) when self is Rational(1)" do
(Rational(1) ** bignum_value).should eql(Rational(1))
end
it "returns Rational(1) when self is Rational(-1) and the exponent is even" do
(Rational(-1) ** bignum_value(0)).should eql(Rational(1))
end
it "returns Rational(-1) when self is Rational(-1) and the exponent is odd" do
(Rational(-1) ** bignum_value(1)).should eql(Rational(-1))
end
it "raises FloatDomainError when self is > 1 or < -1" do
lambda { Rational(2) ** bignum_value }.should raise_error(FloatDomainError)
lambda { Rational(-2) ** bignum_value }.should raise_error(FloatDomainError)
lambda { Rational(fixnum_max) ** bignum_value }.should raise_error(FloatDomainError)
lambda { Rational(fixnum_min) ** bignum_value }.should raise_error(FloatDomainError)
lambda { Rational(2) ** -bignum_value }.should raise_error(FloatDomainError)
lambda { Rational(-2) ** -bignum_value }.should raise_error(FloatDomainError)
lambda { Rational(fixnum_max) ** -bignum_value }.should raise_error(FloatDomainError)
lambda { Rational(fixnum_min) ** -bignum_value }.should raise_error(FloatDomainError)
end
end
ruby_version_is "1.9" do
ruby_bug "#5713", "2.0" do
it "returns Rational(0) when self is Rational(0) and the exponent is positive" do
(Rational(0) ** bignum_value).should eql(Rational(0))
end
it "raises ZeroDivisionError when self is Rational(0) and the exponent is negative" do
lambda { Rational(0) ** -bignum_value }.should raise_error(ZeroDivisionError)
end
it "returns Rational(1) when self is Rational(1)" do
(Rational(1) ** bignum_value).should eql(Rational(1))
(Rational(1) ** -bignum_value).should eql(Rational(1))
end
it "returns Rational(1) when self is Rational(-1) and the exponent is positive and even" do
(Rational(-1) ** bignum_value(0)).should eql(Rational(1))
(Rational(-1) ** bignum_value(2)).should eql(Rational(1))
end
it "returns Rational(-1) when self is Rational(-1) and the exponent is positive and odd" do
(Rational(-1) ** bignum_value(1)).should eql(Rational(-1))
(Rational(-1) ** bignum_value(3)).should eql(Rational(-1))
end
end
it "returns positive Infinity when self is > 1" do
(Rational(2) ** bignum_value).infinite?.should == 1
(Rational(fixnum_max) ** bignum_value).infinite?.should == 1
end
it "returns 0.0 when self is > 1 and the exponent is negative" do
(Rational(2) ** -bignum_value).should eql(0.0)
(Rational(fixnum_max) ** -bignum_value).should eql(0.0)
end
# Fails on linux due to pow() bugs in glibc: http://sources.redhat.com/bugzilla/show_bug.cgi?id=3866
platform_is_not :linux do
it "returns positive Infinity when self < -1" do
(Rational(-2) ** bignum_value).infinite?.should == 1
(Rational(-2) ** (bignum_value + 1)).infinite?.should == 1
(Rational(fixnum_min) ** bignum_value).infinite?.should == 1
end
it "returns 0.0 when self is < -1 and the exponent is negative" do
(Rational(-2) ** -bignum_value).should eql(0.0)
(Rational(fixnum_min) ** -bignum_value).should eql(0.0)
end
end
end
end
describe "when passed Float" do
it "returns self converted to Float and raised to the passed argument" do
(Rational(3, 1) ** 3.0).should eql(27.0)
(Rational(3, 1) ** 1.5).should be_close(5.19615242270663, TOLERANCE)
(Rational(3, 1) ** -1.5).should be_close(0.192450089729875, TOLERANCE)
end
ruby_version_is ""..."1.9" do
it "returns NaN if self is negative and the passed argument is not 0" do
(Rational(-3, 2) ** 1.5).nan?.should be_true
(Rational(3, -2) ** 1.5).nan?.should be_true
(Rational(3, -2) ** -1.5).nan?.should be_true
end
it "returns 1.0 when the passed argument is 0.0" do
(Rational(3, 4) ** 0.0).should eql(1.0)
(Rational(-3, 4) ** 0.0).should eql(1.0)
(Rational(-3, 4) ** 0.0).should eql(1.0)
end
end
ruby_version_is "1.9" do
it "returns a complex number if self is negative and the passed argument is not 0" do
(Rational(-3, 2) ** 1.5).should be_close( Complex(
-3.374618290464398e-16, -1.8371173070873836), TOLERANCE)
(Rational(3, -2) ** 1.5).should be_close( Complex(
-3.374618290464398e-16, -1.8371173070873836), TOLERANCE)
(Rational(3, -2) ** -1.5).should be_close( Complex(
-9.998869008783402e-17, 0.5443310539518174), TOLERANCE)
end
it "returns Complex(1.0) when the passed argument is 0.0" do
(Rational(3, 4) ** 0.0).should == Complex(1.0)
(Rational(-3, 4) ** 0.0).should == Complex(1.0)
(Rational(-3, 4) ** 0.0).should == Complex(1.0)
end
end
end
it "calls #coerce on the passed argument with self" do
rational = Rational(3, 4)
obj = mock("Object")
obj.should_receive(:coerce).with(rational).and_return([1, 2])
rational ** obj
end
it "calls #** on the coerced Rational with the coerced Object" do
rational = Rational(3, 4)
coerced_rational = mock("Coerced Rational")
coerced_rational.should_receive(:**).and_return(:result)
coerced_obj = mock("Coerced Object")
obj = mock("Object")
obj.should_receive(:coerce).and_return([coerced_rational, coerced_obj])
(rational ** obj).should == :result
end
ruby_version_is ""..."1.9" do
it "returns Rational(1, 0) for Rational(0, 1) passed a negative Integer" do
[-1, -4, -9999].each do |exponent|
result = (Rational(0, 1) ** exponent)
result.numerator.should eql(1)
result.denominator.should eql(0)
end
end
conflicts_with :Prime do
it "returns Infinity for Rational(0, 1) passed a negative Rational" do
[Rational(-1, 1), Rational(-3, 1), Rational(-3, 2)].each do |exponent|
(Rational(0, 1) ** exponent).infinite?.should == 1
end
end
end
end
ruby_version_is "1.9" do
it "raises ZeroDivisionError for Rational(0, 1) passed a negative Integer" do
[-1, -4, -9999].each do |exponent|
lambda { Rational(0, 1) ** exponent }.should raise_error(ZeroDivisionError, "divided by 0")
end
end
it "raises ZeroDivisionError for Rational(0, 1) passed a negative Rational with denominator 1" do
[Rational(-1, 1), Rational(-3, 1)].each do |exponent|
lambda { Rational(0, 1) ** exponent }.should raise_error(ZeroDivisionError, "divided by 0")
end
end
ruby_bug "#7513", "2.0.0" do
it "raises ZeroDivisionError for Rational(0, 1) passed a negative Rational" do
lambda { Rational(0, 1) ** Rational(-3, 2) }.should raise_error(ZeroDivisionError, "divided by 0")
end
end
end
it "returns Infinity for Rational(0, 1) passed a negative Float" do
[-1.0, -3.0, -3.14].each do |exponent|
(Rational(0, 1) ** exponent).infinite?.should == 1
end
end
end
|
lib = File.expand_path('../lib', __FILE__)
$LOAD_PATH.unshift(lib) unless $LOAD_PATH.include?(lib)
require 'sinatra-websocketio/version'
Gem::Specification.new do |gem|
gem.name = "sinatra-websocketio"
gem.version = Sinatra::WebSocketIO::VERSION
gem.authors = ["Sho Hashimoto"]
gem.email = ["hashimoto@shokai.org"]
gem.description = %q{Node.js like WebSocket I/O plugin for Sinatra.}
gem.summary = gem.description
gem.homepage = "http://shokai.github.com/sinatra-websocketio"
gem.files = `git ls-files`.split($/).reject{|i| i=="Gemfile.lock" }
gem.executables = gem.files.grep(%r{^bin/}).map{ |f| File.basename(f) }
gem.test_files = gem.files.grep(%r{^(test|spec|features)/})
gem.require_paths = ["lib"]
gem.add_dependency 'rack'
gem.add_dependency 'sinatra', '>= 1.3.3'
gem.add_dependency 'eventmachine', '>= 1.0.0'
gem.add_dependency 'sinatra-contrib', '>= 1.3.2'
gem.add_dependency 'json', '>= 1.7.0'
gem.add_dependency 'event_emitter', '>= 0.2.0'
gem.add_dependency 'em-websocket'
end
fix gem.homepage
lib = File.expand_path('../lib', __FILE__)
$LOAD_PATH.unshift(lib) unless $LOAD_PATH.include?(lib)
require 'sinatra-websocketio/version'
Gem::Specification.new do |gem|
gem.name = "sinatra-websocketio"
gem.version = Sinatra::WebSocketIO::VERSION
gem.authors = ["Sho Hashimoto"]
gem.email = ["hashimoto@shokai.org"]
gem.description = %q{Node.js like WebSocket I/O plugin for Sinatra.}
gem.summary = gem.description
gem.homepage = "https://github.com/shokai/sinatra-websocketio"
gem.files = `git ls-files`.split($/).reject{|i| i=="Gemfile.lock" }
gem.executables = gem.files.grep(%r{^bin/}).map{ |f| File.basename(f) }
gem.test_files = gem.files.grep(%r{^(test|spec|features)/})
gem.require_paths = ["lib"]
gem.add_dependency 'rack'
gem.add_dependency 'sinatra', '>= 1.3.3'
gem.add_dependency 'eventmachine', '>= 1.0.0'
gem.add_dependency 'sinatra-contrib', '>= 1.3.2'
gem.add_dependency 'json', '>= 1.7.0'
gem.add_dependency 'event_emitter', '>= 0.2.0'
gem.add_dependency 'em-websocket'
end
|
module SinatraTemplate
module Helpers
def generate_uuid
BSON::ObjectId.new.to_s
end
def log
settings.log
end
def session_id_header(request)
log.debug "Get HTTP_MU_SESSION_ID request header from #{request.env.inspect}"
request.env['HTTP_MU_SESSION_ID']
end
def rewrite_url_header(request)
log.debug "Get HTTP_X_REWRITE_URL request header from #{request.env.inspect}"
request.env['HTTP_X_REWRITE_URL']
end
def error(title, status = 400)
log.error "HTTP status #{status}: #{title}"
halt status, { errors: [{ title: title }] }.to_json
end
def validate_json_api_content_type(request)
error("Content-Type must be application/vnd.api+json instead of #{request.env['CONTENT_TYPE']}.") if not request.env['CONTENT_TYPE'] =~ /^application\/vnd\.api\+json/
end
def validate_resource_type(expected_type, data)
error("Incorrect type. Type must be #{expected_type}, instead of #{data['type']}.", 409) if data['type'] != expected_type
end
def query(query)
log.info "Executing query: #{query}"
settings.sparql_client.query query
end
def update(query)
log.info "Executing query: #{query}"
settings.sparql_client.update query
end
def update_modified(subject, modified = DateTime.now.xmlschema)
query = " WITH <#{settings.graph}> "
query += " DELETE {"
query += " <#{subject}> <#{RDF::Vocab::DC.modified}> ?modified ."
query += " }"
query += " WHERE {"
query += " <#{subject}> <#{RDF::Vocab::DC.modified}> ?modified ."
query += " }"
update(query)
query = " INSERT DATA {"
query += " GRAPH <#{settings.graph}> {"
query += " <#{subject}> <#{RDF::Vocab::DC.modified}> \"#{modified}\"^^xsd:dateTime ."
query += " }"
query += " }"
update(query)
end
def escape_string_parameter (parameter)
if parameter and parameter.is_a? String
parameter.gsub(/[\\"']/){|s|'\\'+s}
end
end
def verify_string_parameter (parameter)
if parameter and parameter.is_a? String
raise "unauthorized insert in string parameter" if parameter.downcase.include? "insert"
raise "unauthorized delete in string parameter" if parameter.downcase.include? "delete"
raise "unauthorized load in string parameter" if parameter.downcase.include? "load"
raise "unauthorized clear in string parameter" if parameter.downcase.include? "clear"
raise "unauthorized create in string parameter" if parameter.downcase.include? "create"
raise "unauthorized drop in string parameter" if parameter.downcase.include? "drop"
raise "unauthorized copy in string parameter" if parameter.downcase.include? "copy"
raise "unauthorized move in string parameter" if parameter.downcase.include? "move"
raise "unauthorized add in string parameter" if parameter.downcase.include? "add"
end
end
end
fix syntax in helpers
module SinatraTemplate
module Helpers
def generate_uuid
BSON::ObjectId.new.to_s
end
def log
settings.log
end
def session_id_header(request)
log.debug "Get HTTP_MU_SESSION_ID request header from #{request.env.inspect}"
request.env['HTTP_MU_SESSION_ID']
end
def rewrite_url_header(request)
log.debug "Get HTTP_X_REWRITE_URL request header from #{request.env.inspect}"
request.env['HTTP_X_REWRITE_URL']
end
def error(title, status = 400)
log.error "HTTP status #{status}: #{title}"
halt status, { errors: [{ title: title }] }.to_json
end
def validate_json_api_content_type(request)
error("Content-Type must be application/vnd.api+json instead of #{request.env['CONTENT_TYPE']}.") if not request.env['CONTENT_TYPE'] =~ /^application\/vnd\.api\+json/
end
def validate_resource_type(expected_type, data)
error("Incorrect type. Type must be #{expected_type}, instead of #{data['type']}.", 409) if data['type'] != expected_type
end
def query(query)
log.info "Executing query: #{query}"
settings.sparql_client.query query
end
def update(query)
log.info "Executing query: #{query}"
settings.sparql_client.update query
end
def update_modified(subject, modified = DateTime.now.xmlschema)
query = " WITH <#{settings.graph}> "
query += " DELETE {"
query += " <#{subject}> <#{RDF::Vocab::DC.modified}> ?modified ."
query += " }"
query += " WHERE {"
query += " <#{subject}> <#{RDF::Vocab::DC.modified}> ?modified ."
query += " }"
update(query)
query = " INSERT DATA {"
query += " GRAPH <#{settings.graph}> {"
query += " <#{subject}> <#{RDF::Vocab::DC.modified}> \"#{modified}\"^^xsd:dateTime ."
query += " }"
query += " }"
update(query)
end
def escape_string_parameter (parameter)
if parameter and parameter.is_a? String
parameter.gsub(/[\\"']/){|s|'\\'+s}
end
end
def verify_string_parameter (parameter)
if parameter and parameter.is_a? String
raise "unauthorized insert in string parameter" if parameter.downcase.include? "insert"
raise "unauthorized delete in string parameter" if parameter.downcase.include? "delete"
raise "unauthorized load in string parameter" if parameter.downcase.include? "load"
raise "unauthorized clear in string parameter" if parameter.downcase.include? "clear"
raise "unauthorized create in string parameter" if parameter.downcase.include? "create"
raise "unauthorized drop in string parameter" if parameter.downcase.include? "drop"
raise "unauthorized copy in string parameter" if parameter.downcase.include? "copy"
raise "unauthorized move in string parameter" if parameter.downcase.include? "move"
raise "unauthorized add in string parameter" if parameter.downcase.include? "add"
end
end
end
end
|
# coding: utf-8
lib = File.expand_path('../lib', __FILE__)
$LOAD_PATH.unshift(lib) unless $LOAD_PATH.include?(lib)
require 'i_love_sudoku/version'
Gem::Specification.new do |spec|
spec.name = "i_love_sudoku"
spec.version = ILoveSudoku::VERSION
spec.authors = ["Dan Berger"]
spec.email = ["dsberger@gmail.com"]
spec.summary = "A gem to solve any sudoku puzzle."
spec.homepage = "TODO: Put your gem's website or public repo URL here."
spec.license = "MIT"
# Prevent pushing this gem to RubyGems.org by setting 'allowed_push_host', or
# delete this section to allow pushing this gem to any host.
if spec.respond_to?(:metadata)
spec.metadata['allowed_push_host'] = "https://rubygems.org"
else
raise "RubyGems 2.0 or newer is required to protect against public gem pushes."
end
spec.files = `git ls-files -z`.split("\x0").reject { |f| f.match(%r{^(test|spec|features)/}) }
spec.bindir = "exe"
spec.executables = spec.files.grep(%r{^exe/}) { |f| File.basename(f) }
spec.require_paths = ["lib"]
spec.required_ruby_version = ">= 2.1.0"
spec.add_development_dependency "bundler", "~> 1.10"
spec.add_development_dependency "rake", "~> 10.0"
spec.add_development_dependency "rspec"
end
updates URL
# coding: utf-8
lib = File.expand_path('../lib', __FILE__)
$LOAD_PATH.unshift(lib) unless $LOAD_PATH.include?(lib)
require 'i_love_sudoku/version'
Gem::Specification.new do |spec|
spec.name = "i_love_sudoku"
spec.version = ILoveSudoku::VERSION
spec.authors = ["Dan Berger"]
spec.email = ["dsberger@gmail.com"]
spec.summary = "A gem to solve any sudoku puzzle."
spec.homepage = "http://github.com/dsberger/i_love_sudoku"
spec.license = "MIT"
# Prevent pushing this gem to RubyGems.org by setting 'allowed_push_host', or
# delete this section to allow pushing this gem to any host.
if spec.respond_to?(:metadata)
spec.metadata['allowed_push_host'] = "https://rubygems.org"
else
raise "RubyGems 2.0 or newer is required to protect against public gem pushes."
end
spec.files = `git ls-files -z`.split("\x0").reject { |f| f.match(%r{^(test|spec|features)/}) }
spec.bindir = "exe"
spec.executables = spec.files.grep(%r{^exe/}) { |f| File.basename(f) }
spec.require_paths = ["lib"]
spec.required_ruby_version = ">= 2.1.0"
spec.add_development_dependency "bundler", "~> 1.10"
spec.add_development_dependency "rake", "~> 10.0"
spec.add_development_dependency "rspec"
end
|
class IgnitionTransport < Formula
homepage "http://ignitionrobotics.org"
url "http://gazebosim.org/distributions/ign-transport/releases/ignition-transport-0.7.0.tar.bz2"
sha256 "93629936bf1de3fe8168f97028d76d5c34ad1ecb0869d1a2bbfc7ede0797dc61"
head "https://bitbucket.org/ignitionrobotics/ign-transport", :branch => "default", :using => :hg
depends_on "cmake" => :build
depends_on "doxygen" => [:build, :optional]
depends_on "pkg-config" => :build
depends_on "protobuf"
depends_on "protobuf-c" => :build
depends_on "ossp-uuid"
depends_on "zeromq"
depends_on "bertjwregeer/compat/cppzmq"
def install
system "cmake", ".", *std_cmake_args
system "make", "install"
end
test do
system "false"
end
end
ignition-transport 0.8.1
class IgnitionTransport < Formula
homepage "http://ignitionrobotics.org"
url "http://gazebosim.org/distributions/ign-transport/releases/ignition-transport-0.8.1.tar.bz2"
sha256 "f5ec419a47a730391a9ed7a137ec5241edafdd39c2215d81f3e5f7963c7cb67a"
head "https://bitbucket.org/ignitionrobotics/ign-transport", :branch => "default", :using => :hg
depends_on "cmake" => :build
depends_on "doxygen" => [:build, :optional]
depends_on "pkg-config" => :build
depends_on "protobuf"
depends_on "protobuf-c" => :build
depends_on "ossp-uuid"
depends_on "zeromq"
depends_on "bertjwregeer/compat/cppzmq"
def install
system "cmake", ".", *std_cmake_args
system "make", "install"
end
test do
system "false"
end
end
|
# -*- encoding: utf-8 -*-
$:.push File.expand_path("../lib", __FILE__)
require "RestShifter/version"
Gem::Specification.new do |s|
s.name = "RestShifter"
s.version = RestShifter::VERSION
s.authors = ["Camilo Ribeiro"]
s.email = ["camilo@camiloribeiro.com"]
s.homepage = "http://github.com/camiloribeiro/RestShifter"
s.license = "Apache 2.0"
s.summary = %q{Simple Rest Mock service}
s.description = %q{RestShifter is a simple rest service that can shift into any kind of service, allowing you to mock rest services with declarative configuration}
s.rubyforge_project = "RestShifter"
s.files = `git ls-files`.split("\n")
s.test_files = `git ls-files -- {test,spec,features}/*`.split("\n")
s.executables = `git ls-files -- bin/*`.split("\n").map{ |f| File.basename(f) }
s.default_executable = 'cello'
s.require_paths = ["lib"]
s.add_development_dependency 'simplecov'
s.add_development_dependency 'pry'
s.add_development_dependency 'cucumber'
s.add_development_dependency 'rake'
s.add_development_dependency 'rest-client'
s.add_development_dependency 'rspec'
s.add_development_dependency 'coveralls'
s.add_development_dependency 'nyan-cat-formatter'
s.add_development_dependency 'rack-test'
s.add_dependency 'icecream'
s.add_dependency 'sinatra'
end
Fixing gem spec
# -*- encoding: utf-8 -*-
$:.push File.expand_path("../lib", __FILE__)
require "RestShifter/version"
Gem::Specification.new do |s|
s.name = "RestShifter"
s.version = RestShifter::VERSION
s.authors = ["Camilo Ribeiro"]
s.email = ["camilo@camiloribeiro.com"]
s.homepage = "http://github.com/camiloribeiro/RestShifter"
s.license = "Apache 2.0"
s.summary = %q{Simple Rest Mock service}
s.description = %q{RestShifter is a simple rest service that can shift into any kind of service, allowing you to mock rest services with declarative configuration}
s.rubyforge_project = "RestShifter"
s.files = `git ls-files`.split("\n")
s.test_files = `git ls-files -- {test,spec,features}/*`.split("\n")
s.executables = `git ls-files -- bin/*`.split("\n").map{ |f| File.basename(f) }
s.default_executable = 'RestShifter'
s.require_paths = ["lib"]
s.add_development_dependency 'simplecov'
s.add_development_dependency 'pry'
s.add_development_dependency 'cucumber'
s.add_development_dependency 'rake'
s.add_development_dependency 'rest-client'
s.add_development_dependency 'rspec'
s.add_development_dependency 'coveralls'
s.add_development_dependency 'nyan-cat-formatter'
s.add_development_dependency 'rack-test'
s.add_dependency 'icecream'
s.add_dependency 'sinatra'
end
|
Pod::Spec.new do |s|
s.name = 'SLFramework'
s.version = '0.0.2'
s.license = 'MIT'
s.summary = 'Framework for boosting iOS application development.'
s.homepage = 'https://github.com/anlaital/SLFramework'
s.social_media_url = 'https://twitter.com/anlaital'
s.authors = {
'Antti Laitala' => 'antti.o.laitala@gmail.com'
}
s.source = {
:git => 'https://github.com/anlaital/SLFramework.git',
:tag => '0.0.2',
:submodules => false
}
s.requires_arc = true
s.ios.deployment_target = '6.0'
s.header_dir = 'SLFramework'
s.public_header_files = 'SLFramework/*.h'
s.source_files = 'SLFramework/*.m'
s.dependency 'ISO8601DateFormatter'
end
0.0.1 tagged.
Pod::Spec.new do |s|
s.name = 'SLFramework'
s.version = '0.0.1'
s.license = 'MIT'
s.summary = 'Framework for boosting iOS application development.'
s.homepage = 'https://github.com/anlaital/SLFramework'
s.social_media_url = 'https://twitter.com/anlaital'
s.authors = {
'Antti Laitala' => 'antti.o.laitala@gmail.com'
}
s.source = {
:git => 'https://github.com/anlaital/SLFramework.git',
:tag => '0.0.1',
:submodules => false
}
s.requires_arc = true
s.ios.deployment_target = '6.0'
s.source_files = 'SLFramework/*.{h,m}'
s.dependency 'ISO8601DateFormatter'
end |
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
module Domgen
module Gwt
class Entrypoint < Domgen.ParentedElement(:gwt_repository)
def initialize(gwt_repository, name, options = {}, &block)
@name = name
super(gwt_repository, options, &block)
end
include Domgen::Java::BaseJavaGenerator
java_artifact :entrypoint, nil, :client, :gwt, '#{qualified_name}'
java_artifact :entrypoint_module, :ioc, :client, :gwt, '#{qualified_name}EntrypointModule'
java_artifact :gwt_module, :modules, nil, :gwt, '#{qualified_name}EntrypointSupport'
def modules_package
entrypoint.gwt_repository.modules_package
end
def qualified_application_name
"#{gwt_repository.repository.gwt.client_package}.#{qualified_name}App"
end
def qualified_name
Domgen::Naming.pascal_case(name)
end
attr_reader :name
end
end
FacetManager.facet(:gwt => [:java, :json]) do |facet|
facet.enhance(Repository) do
include Domgen::Java::BaseJavaGenerator
include Domgen::Java::JavaClientServerApplication
attr_writer :module_name
def module_name
@module_name || Domgen::Naming.underscore(repository.name)
end
attr_writer :client_event_package
def client_event_package
@client_event_package || "#{client_package}.event"
end
java_artifact :async_callback, :service, :client, :gwt, '#{repository.name}AsyncCallback'
java_artifact :async_error_callback, :service, :client, :gwt, '#{repository.name}AsyncErrorCallback'
java_artifact :abstract_application, nil, :client, :gwt, 'Abstract#{repository.name}App'
java_artifact :aggregate_module, :ioc, :client, :gwt, '#{repository.name}Module'
java_artifact :dev_module, :modules, nil, :gwt, '#{repository.name}DevSupport'
java_artifact :prod_module, :modules, nil, :gwt, '#{repository.name}ProdSupport'
java_artifact :app_module, nil, nil, :gwt, '#{repository.name}AppSupport'
attr_writer :modules_package
def modules_package
@modules_package || "#{repository.java.base_package}.modules"
end
attr_writer :client_ioc_package
def client_ioc_package
@client_ioc_package || "#{client_package}.ioc"
end
attr_writer :enable_entrypoints
def enable_entrypoints?
@enable_entrypoints.nil? ? true : !!@enable_entrypoints
end
def default_entrypoint
key = Domgen::Naming.underscore(repository.name.to_s)
entrypoint(key) unless entrypoint_by_name?(key)
entrypoint_by_key(key)
end
def entrypoint_by_name?(name)
!!entrypoint_map[name.to_s]
end
def entrypoint_by_key(name)
raise "No gwt entrypoint with name #{name} defined." unless entrypoint_map[name.to_s]
entrypoint_map[name.to_s]
end
def entrypoint(name, options = {}, &block)
raise "Gwt entrypoint with key #{name} already defined." if entrypoint_map[name.to_s]
entrypoint_map[name.to_s] = Domgen::Gwt::Entrypoint.new(self, name, options, &block)
end
def entrypoints
return [] unless enable_entrypoints?
entrypoint_map.values
end
TargetManager.register_target('gwt.entrypoint', :repository, :gwt, :entrypoints)
def pre_complete
repository.ee.beans_xml_content_fragments << <<XML
<!-- gwt fragment is auto-generated -->
<scan>
<exclude name="#{repository.gwt.client_package}.**"/>
</scan>
<!-- gwt fragment end -->
XML
end
protected
def facet_key
:gwt
end
private
def entrypoint_map
raise "Attempted to retrieve gwt entrypoints on #{repository.name} when entrypoints not defined." unless enable_entrypoints?
unless @entrypoints
@entrypoints = {}
default_entrypoint
end
@entrypoints
end
end
facet.enhance(DataModule) do
include Domgen::Java::ClientServerJavaPackage
attr_writer :client_data_type_package
def client_data_type_package
@client_data_type_package || resolve_package(:client_data_type_package)
end
attr_writer :client_event_package
def client_event_package
@client_event_package || resolve_package(:client_event_package)
end
protected
def facet_key
:gwt
end
end
facet.enhance(Message) do
include Domgen::Java::BaseJavaGenerator
java_artifact :event, :event, :client, :gwt, '#{message.name}Event'
end
facet.enhance(MessageParameter) do
include Domgen::Java::ImitJavaCharacteristic
protected
def characteristic
parameter
end
end
facet.enhance(Struct) do
include Domgen::Java::BaseJavaGenerator
# Needed to hook into standard java type resolution code
def qualified_name
self.qualified_interface_name
end
attr_writer :generate_overlay
def generate_overlay?
@generate_overlay.nil? ? true : !!@generate_overlay
end
java_artifact :interface, :data_type, :client, :gwt, '#{struct.name}'
java_artifact :jso, :data_type, :client, :gwt, 'Jso#{struct.name}'
java_artifact :java, :data_type, :client, :gwt, 'Java#{struct.name}'
java_artifact :factory, :data_type, :client, :gwt, '#{struct.name}Factory'
end
facet.enhance(StructField) do
include Domgen::Java::ImitJavaCharacteristic
def name
field.name
end
protected
def characteristic
field
end
end
facet.enhance(EnumerationSet) do
include Domgen::Java::BaseJavaGenerator
java_artifact :name, :data_type, :client, :gwt, '#{enumeration.name}'
end
end
end
Generate module into modules directory
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
module Domgen
module Gwt
class Entrypoint < Domgen.ParentedElement(:gwt_repository)
def initialize(gwt_repository, name, options = {}, &block)
@name = name
super(gwt_repository, options, &block)
end
include Domgen::Java::BaseJavaGenerator
java_artifact :entrypoint, nil, :client, :gwt, '#{qualified_name}'
java_artifact :entrypoint_module, :ioc, :client, :gwt, '#{qualified_name}EntrypointModule'
java_artifact :gwt_module, :modules, nil, :gwt, '#{qualified_name}EntrypointSupport'
def modules_package
entrypoint.gwt_repository.modules_package
end
def qualified_application_name
"#{gwt_repository.repository.gwt.client_package}.#{qualified_name}App"
end
def qualified_name
Domgen::Naming.pascal_case(name)
end
attr_reader :name
end
end
FacetManager.facet(:gwt => [:java, :json]) do |facet|
facet.enhance(Repository) do
include Domgen::Java::BaseJavaGenerator
include Domgen::Java::JavaClientServerApplication
attr_writer :module_name
def module_name
@module_name || Domgen::Naming.underscore(repository.name)
end
attr_writer :client_event_package
def client_event_package
@client_event_package || "#{client_package}.event"
end
java_artifact :async_callback, :service, :client, :gwt, '#{repository.name}AsyncCallback'
java_artifact :async_error_callback, :service, :client, :gwt, '#{repository.name}AsyncErrorCallback'
java_artifact :abstract_application, nil, :client, :gwt, 'Abstract#{repository.name}App'
java_artifact :aggregate_module, :ioc, :client, :gwt, '#{repository.name}Module'
java_artifact :dev_module, :modules, nil, :gwt, '#{repository.name}DevSupport'
java_artifact :prod_module, :modules, nil, :gwt, '#{repository.name}ProdSupport'
java_artifact :app_module, :modules, nil, :gwt, '#{repository.name}AppSupport'
attr_writer :modules_package
def modules_package
@modules_package || "#{repository.java.base_package}.modules"
end
attr_writer :client_ioc_package
def client_ioc_package
@client_ioc_package || "#{client_package}.ioc"
end
attr_writer :enable_entrypoints
def enable_entrypoints?
@enable_entrypoints.nil? ? true : !!@enable_entrypoints
end
def default_entrypoint
key = Domgen::Naming.underscore(repository.name.to_s)
entrypoint(key) unless entrypoint_by_name?(key)
entrypoint_by_key(key)
end
def entrypoint_by_name?(name)
!!entrypoint_map[name.to_s]
end
def entrypoint_by_key(name)
raise "No gwt entrypoint with name #{name} defined." unless entrypoint_map[name.to_s]
entrypoint_map[name.to_s]
end
def entrypoint(name, options = {}, &block)
raise "Gwt entrypoint with key #{name} already defined." if entrypoint_map[name.to_s]
entrypoint_map[name.to_s] = Domgen::Gwt::Entrypoint.new(self, name, options, &block)
end
def entrypoints
return [] unless enable_entrypoints?
entrypoint_map.values
end
TargetManager.register_target('gwt.entrypoint', :repository, :gwt, :entrypoints)
def pre_complete
repository.ee.beans_xml_content_fragments << <<XML
<!-- gwt fragment is auto-generated -->
<scan>
<exclude name="#{repository.gwt.client_package}.**"/>
</scan>
<!-- gwt fragment end -->
XML
end
protected
def facet_key
:gwt
end
private
def entrypoint_map
raise "Attempted to retrieve gwt entrypoints on #{repository.name} when entrypoints not defined." unless enable_entrypoints?
unless @entrypoints
@entrypoints = {}
default_entrypoint
end
@entrypoints
end
end
facet.enhance(DataModule) do
include Domgen::Java::ClientServerJavaPackage
attr_writer :client_data_type_package
def client_data_type_package
@client_data_type_package || resolve_package(:client_data_type_package)
end
attr_writer :client_event_package
def client_event_package
@client_event_package || resolve_package(:client_event_package)
end
protected
def facet_key
:gwt
end
end
facet.enhance(Message) do
include Domgen::Java::BaseJavaGenerator
java_artifact :event, :event, :client, :gwt, '#{message.name}Event'
end
facet.enhance(MessageParameter) do
include Domgen::Java::ImitJavaCharacteristic
protected
def characteristic
parameter
end
end
facet.enhance(Struct) do
include Domgen::Java::BaseJavaGenerator
# Needed to hook into standard java type resolution code
def qualified_name
self.qualified_interface_name
end
attr_writer :generate_overlay
def generate_overlay?
@generate_overlay.nil? ? true : !!@generate_overlay
end
java_artifact :interface, :data_type, :client, :gwt, '#{struct.name}'
java_artifact :jso, :data_type, :client, :gwt, 'Jso#{struct.name}'
java_artifact :java, :data_type, :client, :gwt, 'Java#{struct.name}'
java_artifact :factory, :data_type, :client, :gwt, '#{struct.name}Factory'
end
facet.enhance(StructField) do
include Domgen::Java::ImitJavaCharacteristic
def name
field.name
end
protected
def characteristic
field
end
end
facet.enhance(EnumerationSet) do
include Domgen::Java::BaseJavaGenerator
java_artifact :name, :data_type, :client, :gwt, '#{enumeration.name}'
end
end
end
|
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
module Domgen
module Sql
class Sequence < Domgen.ParentedElement(:schema)
def initialize(schema, name, options, &block)
@name = name
schema.send(:register_sequence, name, self)
super(schema, options, &block)
end
attr_reader :name
attr_accessor :sql_type
def quoted_sequence_name
schema.dialect.quote(self.name)
end
def qualified_sequence_name
"#{schema.quoted_schema}.#{quoted_sequence_name}"
end
end
class Index < Domgen.ParentedElement(:table)
attr_accessor :attribute_names
attr_accessor :include_attribute_names
attr_accessor :filter
def initialize(table, attribute_names, options, &block)
@attribute_names = attribute_names
@include_attribute_names = []
super(table, options, &block)
end
def to_s
"Index[#{self.qualified_index_name}]"
end
attr_reader :index_type
def index_type=(index_type)
Domgen.error("index_type #{index_type} on #{qualified_index_name} is invalid") unless self.class.valid_index_types.include?(index_type)
@index_type = index_type
end
attr_writer :index_name
def index_name
if @index_name.nil?
prefix = cluster? ? 'CL' : unique? ? 'UQ' : gist? ? 'GS' : 'IX'
suffix = attribute_names.join('_')
@index_name = "#{prefix}_#{table.entity.name}_#{suffix}"
end
@index_name
end
def quoted_index_name
table.dialect.quote(self.index_name)
end
def qualified_index_name
"#{table.entity.data_module.sql.quoted_schema}.#{quoted_index_name}"
end
def ordered?
!gist?
end
def cluster?
index_type == :cluster
end
def gist?
index_type == :gist
end
def normal?
index_type == :normal
end
attr_writer :unique
def unique?
@unique.nil? ? false : @unique
end
attr_writer :allow_page_locks
def allow_page_locks?
@allow_page_locks.nil? ? true : !!@allow_page_locks
end
def partial?
!self.filter.nil?
end
private
def self.valid_index_types
[:cluster, :gist, :normal]
end
end
class ForeignKey < Domgen.ParentedElement(:table)
ACTION_MAP =
{
:cascade => 'CASCADE',
:set_null => 'SET NULL',
:set_default => 'SET DEFAULT',
:no_action => 'NO ACTION'
}.freeze
attr_accessor :attribute_names
attr_accessor :referenced_entity_name
attr_accessor :referenced_attribute_names
def initialize(table, attribute_names, referenced_entity_name, referenced_attribute_names, options, &block)
@attribute_names, @referenced_entity_name, @referenced_attribute_names =
attribute_names, referenced_entity_name, referenced_attribute_names
super(table, options, &block)
# Ensure that the attributes exist
attribute_names.each { |a| table.entity.attribute_by_name(a) }
# Ensure that the remote attributes exist on remote type
referenced_attribute_names.each { |a| referenced_entity.attribute_by_name(a) }
end
attr_writer :name
def name
if @name.nil?
@name = "#{attribute_names.join('_')}"
end
@name
end
def referenced_entity
table.entity.data_module.entity_by_name(referenced_entity_name)
end
def on_update=(on_update)
Domgen.error("on_update #{on_update} on #{name} is invalid") unless ACTION_MAP.keys.include?(on_update)
@on_update = on_update
end
def on_update
@on_update || :no_action
end
def on_delete=(on_delete)
Domgen.error("on_delete #{on_delete} on #{name} is invalid") unless ACTION_MAP.keys.include?(on_delete)
@on_delete = on_delete
end
def on_delete
@on_delete || :no_action
end
def foreign_key_name
"FK_#{s(table.entity.name)}_#{s(name)}"
end
def quoted_foreign_key_name
table.dialect.quote(self.foreign_key_name)
end
def qualified_foreign_key_name
"#{table.entity.data_module.sql.quoted_schema}.#{quoted_foreign_key_name}"
end
def constraint_name
foreign_key_name
end
def quoted_constraint_name
quoted_foreign_key_name
end
def to_s
"ForeignKey[#{self.qualified_foreign_key_name}]"
end
end
class Constraint < Domgen.ParentedElement(:table)
attr_reader :name
attr_accessor :sql
def initialize(table, name, options = {}, &block)
@name = name
super(table, options, &block)
end
attr_writer :standard
# Return true if this was defined by domgen or is derivable via rules.
# standard constraints do not typically need to be tested
def standard?
@standard.nil? ? false : @standard
end
attr_writer :invariant
# Return true if this constraint should always be true, not just on insert or update.
def invariant?
@invariant.nil? ? true : @invariant
end
def constraint_name
"CK_#{s(table.entity.name)}_#{s(name)}"
end
def quoted_constraint_name
table.dialect.quote(self.constraint_name)
end
def qualified_constraint_name
"#{table.entity.data_module.sql.quoted_schema}.#{self.quoted_constraint_name}"
end
def to_s
"Constraint[#{self.qualified_constraint_name}]"
end
def constraint_sql
@sql
end
end
class FunctionConstraint < Domgen.ParentedElement(:table)
attr_reader :name
# The SQL that is part of function invoked
attr_accessor :positive_sql
attr_accessor :parameters
attr_accessor :common_table_expression
attr_accessor :or_conditions
def initialize(table, name, parameters, options = {}, & block)
@name = name
@parameters = parameters
@or_conditions = []
super(table, options, & block)
end
attr_writer :standard
# Return true if this was defined by domgen or is derivable via rules.
# standard constraints do not typically need to be tested
def standard?
@standard.nil? ? false : @standard
end
attr_writer :invariant
# Return true if this constraint should always be true, not just on insert or update.
def invariant?
@invariant.nil? ? true : @invariant
end
def constraint_name
"CK_#{s(table.entity.name)}_#{s(name)}"
end
def quoted_constraint_name
table.dialect.quote(self.constraint_name)
end
def qualified_constraint_name
"#{table.entity.data_module.sql.quoted_schema}.#{self.quoted_constraint_name}"
end
def function_name
"#{table.entity.name}_#{name}"
end
def quoted_function_name
table.dialect.quote(self.function_name)
end
def qualified_function_name
"#{table.entity.data_module.sql.quoted_schema}.#{self.quoted_function_name}"
end
# The SQL generated in constraint
def constraint_sql
parameter_string = parameters.collect { |parameter_name| " #{table.entity.attribute_by_name(parameter_name).sql.column_name}" }.join(',')
function_call = "#{self.qualified_function_name}(#{parameter_string}) = 1"
(self.or_conditions + [function_call]).join(' OR ')
end
def to_s
"FunctionConstraint[#{self.qualified_constraint_name}]"
end
end
class SequencedSqlElement < Domgen.ParentedElement(:table)
VALID_AFTER = [:insert, :update, :delete]
attr_reader :name
attr_reader :after
attr_reader :instead_of
def initialize(table, name, options = {}, & block)
@name = name
@after = [:insert, :update]
@instead_of = []
super(table, options, & block)
end
def after=(after)
@after = scope('after', after)
end
def instead_of=(instead_of)
@instead_of = scope('instead_of', instead_of)
end
private
def scope(label, scope)
if scope.nil?
scope = []
elsif !scope.is_a?(Array)
scope = [scope]
end
scope.each do |a|
Domgen.error("Unknown #{label} specififier #{a}") unless VALID_AFTER.include?(a)
end
scope
end
end
class Validation < SequencedSqlElement
attr_accessor :negative_sql
attr_accessor :invariant_negative_sql
attr_accessor :common_table_expression
attr_accessor :guard
attr_writer :priority
attr_writer :standard
# Return true if this was defined by domgen or is derivable via rules.
# standard constraints do not typically need to be tested
def standard?
@standard.nil? ? false : @standard
end
def priority
@priority || 1
end
def to_s
"Validation[#{self.name}]"
end
end
class Action < SequencedSqlElement
attr_accessor :sql
attr_accessor :guard
attr_writer :priority
def priority
@priority || 1
end
def to_s
"Action[#{self.name}]"
end
end
class Trigger < SequencedSqlElement
attr_accessor :sql
def trigger_name
@trigger_name ||= sql_name(:trigger, "#{table.entity.name}#{self.name}")
end
def quoted_trigger_name
table.dialect.quote(self.trigger_name)
end
def qualified_trigger_name
"#{table.entity.data_module.sql.quoted_schema}.#{self.quoted_trigger_name}"
end
def to_s
"Action[#{self.qualified_trigger_name}]"
end
end
end
FacetManager.facet(:sql) do |facet|
facet.enhance(Repository) do
def dialect
@dialect ||= (repository.mssql? ? Domgen::Mssql::MssqlDialect.new : repository.pgsql? ? Domgen::Pgsql::PgsqlDialect.new : (Domgen.error('Unable to determine the dialect in use')))
end
def error_handler
@error_handler ||= Proc.new do |error_message|
self.dialect.raise_error_sql(error_message)
end
end
def define_error_handler(&block)
@error_handler = block
end
def emit_error(error_message)
error_handler.call(error_message)
end
def pre_complete
# TODO: This will re-enable disabled sql facets which seems sub-par
self.repository.enable_facet(:mssql) if !self.repository.mssql? && !self.repository.pgsql?
end
def perform_verify
self.repository.data_modules.select { |data_module| data_module.sql? }.each do |dm|
self.repository.data_modules.select { |data_module| data_module.sql? }.each do |other|
if dm != other && dm.sql.schema.to_s == other.sql.schema.to_s
Domgen.error("Multiple data modules (#{dm.name} && #{other.name}) are mapped to the same schema #{other.sql.schema}")
end
end
end
end
attr_writer :sql_driver
def sql_driver
if @sql_driver.nil?
@sql_driver =
if self.repository.pgsql?
'org.postgresql.Driver'
elsif self.repository.mssql?
'net.sourceforge.jtds.jdbc.Driver'
else
Domgen.error('No default SQL driver available, specify one with repository.sql.sql_driver = "your.driver.here"')
end
end
@sql_driver
end
end
facet.enhance(DataModule) do
def dialect
data_module.repository.sql.dialect
end
attr_writer :schema
def schema
@schema || data_module.name
end
def quoted_schema
self.dialect.quote(self.schema)
end
def sequence(name, options = {}, &block)
Domgen::Sql::Sequence.new(self, name, options, &block)
end
def sequences
sequence_map.values
end
def sequence_by_name(name)
sequence = sequence_map[name.to_s]
Domgen.error("Unable to locate sequence #{name} in #{data_module.name}") unless sequence
sequence
end
def sequence_by_name?(name)
!!sequence_map[name.to_s]
end
protected
def sequence_map
@sequences ||= {}
end
def register_sequence(name, sequence)
sequence_map[name.to_s] = sequence
end
end
facet.enhance(Entity) do
def dialect
entity.data_module.sql.dialect
end
def load_from_fixture=(load_from_fixture)
@load_from_fixture = load_from_fixture
end
def load_from_fixture?
@load_from_fixture.nil? ? false : !!@load_from_fixture
end
def sequence_table=(sequence_table)
@sequence_table = sequence_table
end
def sequence_table?
@sequence_table.nil? ? false : !!@sequence_table
end
attr_accessor :partition_scheme
#+force_overflow_for_large_objects+ if set to true will force the native *VARCHAR(max) and XML datatypes (i.e.
# text attributes to always be stored in overflow page by database engine. Otherwise they will be stored inline
# as long as the data fits into a 8,060 byte row. It is a performance hit to access the overflow table so this
# should be set to false unless the data columns are infrequently accessed relative to the other columns
# TODO: MSSQL Specific
attr_accessor :force_overflow_for_large_objects
def table_name=(table_name)
Domgen.error("sql.table_name= invoked on abstract entity #{entity.qualified_name}") if entity.abstract?
@table_name = table_name
end
def table_name
Domgen.error("sql.table_name invoked on abstract entity #{entity.qualified_name}") if entity.abstract?
@table_name || sql_name(:table, entity.name)
end
def quoted_table_name
self.dialect.quote(table_name)
end
def qualified_table_name
"#{entity.data_module.sql.quoted_schema}.#{quoted_table_name}"
end
def view?
entity.direct_subtypes.size != 0
end
# A view is created for any entity that has subtypes, and the view abstracts over all subclasses
def view_name=(view_name)
Domgen.error("sql.view_name= invoked on entity #{entity.qualified_name} with no subtypes") if entity.direct_subtypes.size == 0
@view_name = view_name
end
def view_name
Domgen.error("sql.view_name invoked on entity #{entity.qualified_name} with no subtypes") if entity.direct_subtypes.size == 0
@view_name || sql_name(:view, entity.name)
end
def view_insert_trigger
Domgen.error("sql.view_insert_trigger invoked on entity #{entity.qualified_name} with no subtypes") if entity.direct_subtypes.size == 0
sql_name(:trigger, "#{entity.name}Insert")
end
def quoted_view_insert_trigger
self.dialect.quote(view_insert_trigger)
end
def qualified_view_insert_trigger
"#{entity.data_module.sql.quoted_schema}.#{quoted_view_insert_trigger}"
end
def view_update_trigger
Domgen.error("sql.view_update_trigger invoked on entity #{entity.qualified_name} with no subtypes") if entity.direct_subtypes.size == 0
sql_name(:trigger, "#{entity.name}Update")
end
def quoted_view_update_trigger
self.dialect.quote(view_update_trigger)
end
def qualified_view_update_trigger
"#{entity.data_module.sql.quoted_schema}.#{quoted_view_update_trigger}"
end
def view_delete_trigger
Domgen.error("sql.view_delete_trigger invoked on entity #{entity.qualified_name} with no subtypes") if entity.direct_subtypes.size == 0
sql_name(:trigger, "#{entity.name}Delete")
end
def quoted_view_delete_trigger
self.dialect.quote(view_delete_trigger)
end
def qualified_view_delete_trigger
"#{entity.data_module.sql.quoted_schema}.#{quoted_view_delete_trigger}"
end
def quoted_view_name
self.dialect.quote(view_name)
end
def qualified_view_name
"#{entity.data_module.sql.quoted_schema}.#{quoted_view_name}"
end
def discriminator=(discriminator)
Domgen.error("Attempted to call 'sql.discriminator=' on non-subclass #{entity.qualified_name}") if entity.extends.nil?
@discriminator = discriminator
end
def discriminator
Domgen.error("Attempted to call 'sql.discriminator' on non-subclass #{entity.qualified_name}") if entity.extends.nil?
@discriminator || entity.qualified_name.to_s
end
def constraint_values
@constraint_values ||= {}
end
def constraints
constraint_values.values
end
def constraint_by_name(name)
constraint_values[name.to_s]
end
def constraint(name, options = {}, &block)
existing = constraint_by_name(name)
Domgen.error("Constraint named #{name} already defined on table #{qualified_table_name}") if existing
constraint = Domgen::Sql::Constraint.new(self, name, options, &block)
constraint_values[name.to_s] = constraint
constraint
end
def function_constraint_values
@function_constraint_values ||= {}
end
def function_constraints
function_constraint_values.values
end
def function_constraint_by_name(name)
function_constraint = function_constraint_values[name.to_s]
Domgen.error("No Function Constraint named #{name} defined on table #{qualified_table_name}") unless function_constraint
function_constraint
end
def function_constraint?(name)
!!function_constraint_values[name.to_s]
end
def function_constraint(name, parameters, options = {}, &block)
Domgen.error("Function Constraint named #{name} already defined on table #{qualified_table_name}") if function_constraint?(name)
function_constraint = Domgen::Sql::FunctionConstraint.new(self, name, parameters, options, &block)
function_constraint_values[name.to_s] = function_constraint
function_constraint
end
def validation_values
@validation_values ||= {}
end
def validations
validation_values.values
end
def validation_by_name(name)
validation = validation_values[name.to_s]
Domgen.error("No validation named #{name} defined on table #{qualified_table_name}") unless validation
validation
end
def validation?(name)
!!validation_values[name.to_s]
end
def validation(name, options = {}, &block)
Domgen.error("Validation named #{name} already defined on table #{qualified_table_name}") if validation?(name)
validation = Domgen::Sql::Validation.new(self, name, options, &block)
validation_values[name.to_s] = validation
validation
end
def action_values
@action_values ||= {}
end
def actions
action_values.values
end
def action_by_name(name)
action = action_values[name.to_s]
Domgen.error("No action named #{name} defined on table #{qualified_table_name}") unless action
action
end
def action?(name)
!!action_values[name.to_s]
end
def action(name, options = {}, &block)
Domgen.error("Action named #{name} already defined on table #{qualified_table_name}") if action?(name)
action = Action.new(self, name, options, &block)
action_values[name.to_s] = action
action
end
def trigger_values
@trigger_values ||= {}
end
def triggers
trigger_values.values
end
def trigger_by_name(name)
trigger = trigger_values[name.to_s]
Domgen.error("No trigger named #{name} on table #{qualified_table_name}") unless trigger
trigger
end
def trigger?(name)
!!trigger_values[name.to_s]
end
def trigger(name, options = {}, &block)
Domgen.error("Trigger named #{name} already defined on table #{qualified_table_name}") if trigger?(name)
trigger = Domgen::Sql::Trigger.new(self, name, options, &block)
trigger_values[name.to_s] = trigger
trigger
end
def cluster(attribute_names, options = {}, &block)
index(attribute_names, options.merge(:index_type => :cluster), &block)
end
def index_values
@index_values ||= {}
end
def indexes
index_values.values
end
def index(attribute_names, options = {}, skip_if_present = false, &block)
index = Domgen::Sql::Index.new(self, attribute_names, options, &block)
return index_values[index.index_name] if index_values[index.index_name] && skip_if_present
Domgen.error("Index named #{index.index_name} already defined on table #{qualified_table_name}") if index_values[index.index_name]
attribute_names.each do |attribute_name|
Domgen.error("Index named #{index.index_name} declares attribute name #{attribute_name} that does not exist on containing entity #{entity.qualified_name}") unless entity.attribute_by_name?(attribute_name)
end
index_values[index.index_name] = index
index
end
def foreign_key_values
@foreign_key_values ||= {}
end
def foreign_keys
foreign_key_values.values
end
def foreign_key(attribute_names, referenced_entity_name, referenced_attribute_names, options = {}, skip_if_present = false, &block)
foreign_key = Domgen::Sql::ForeignKey.new(self, attribute_names, referenced_entity_name, referenced_attribute_names, options, &block)
return if foreign_key_values[foreign_key.name] && skip_if_present
Domgen.error("Foreign Key named #{foreign_key.name} already defined on table #{table_name}") if foreign_key_values[foreign_key.name]
foreign_key_values[foreign_key.name] = foreign_key
foreign_key
end
def post_verify
if self.partition_scheme && indexes.select { |index| index.cluster? }.empty?
Domgen.error('Must specify a clustered index if using a partition scheme')
end
self.indexes.each do |index|
if index.cluster? && index.partial?
Domgen.error("Must not specify a partial clustered index. Index = #{index.qualified_index_name}")
end
end
if indexes.select { |i| i.cluster? }.size > 1
Domgen.error("#{qualified_table_name} defines multiple clustering indexes")
end
entity.unique_constraints.each do |c|
index(c.attribute_names, { :unique => true }, true)
end
entity.relationship_constraints.each do |c|
lhs = entity.attribute_by_name(c.lhs_operand)
rhs = entity.attribute_by_name(c.rhs_operand)
op = c.class.operators[c.operator]
constraint_sql = []
constraint_sql << "#{lhs.sql.quoted_column_name} IS NULL" if lhs.nullable?
constraint_sql << "#{rhs.sql.quoted_column_name} IS NULL" if rhs.nullable?
constraint_sql << "#{lhs.sql.quoted_column_name} #{op} #{rhs.sql.quoted_column_name}"
constraint(c.name, :standard => true, :sql => constraint_sql.join(' OR ')) unless constraint_by_name(c.name)
copy_tags(c, constraint_by_name(c.name))
end
entity.xor_constraints.each do |c|
unless constraint_by_name(c.name)
sql = []
c.attribute_names.each_with_index do |name, index|
s = "#{entity.attribute_by_name(name).sql.quoted_column_name} IS NOT NULL AND " +
"#{c.attribute_names.select { |n| n != name }.collect do |n|
"#{entity.attribute_by_name(n).sql.quoted_column_name} IS NULL"
end.join(' AND ')}"
sql << "(#{s})"
end
constraint(c.name, :standard => true, :sql => sql.join(' OR '))
end
copy_tags(c, constraint_by_name(c.name))
end
entity.dependency_constraints.each do |c|
constraint(c.name, :standard => true, :sql => <<SQL) unless constraint_by_name(c.name)
#{entity.attribute_by_name(c.attribute_name).sql.quoted_column_name} IS NULL OR
( #{c.dependent_attribute_names.collect { |name| "#{entity.attribute_by_name(name).sql.quoted_column_name} IS NOT NULL" }.join(' AND ') } )
SQL
copy_tags(c, constraint_by_name(c.name))
end
entity.codependent_constraints.each do |c|
constraint(c.name, :standard => true, :sql => <<SQL) unless constraint_by_name(c.name)
( #{c.attribute_names.collect { |name| "#{entity.attribute_by_name(name).sql.quoted_column_name} IS NOT NULL" }.join(' AND ')} ) OR
( #{c.attribute_names.collect { |name| "#{entity.attribute_by_name(name).sql.quoted_column_name} IS NULL" }.join(' AND ') } )
SQL
copy_tags(c, constraint_by_name(c.name))
end
entity.dependency_constraints.each do |c|
constraint(c.name, :standard => true, :sql => <<SQL) unless constraint_by_name(c.name)
#{entity.attribute_by_name(c.attribute_name).sql.quoted_column_name} IS NULL OR
( #{c.dependent_attribute_names.collect { |name| "#{entity.attribute_by_name(name).sql.quoted_column_name} IS NOT NULL" }.join(' AND ') } )
SQL
copy_tags(c, constraint_by_name(c.name))
end
entity.incompatible_constraints.each do |c|
sql = (0..(c.attribute_names.size)).collect do |i|
candidate = c.attribute_names[i]
str = c.attribute_names.collect { |name| "#{entity.attribute_by_name(name).sql.quoted_column_name} IS#{(candidate == name) ? ' NOT' : ''} NULL" }.join(' AND ')
"(#{str})"
end.join(' OR ')
constraint(c.name, :standard => true, :sql => sql) unless constraint_by_name(c.name)
copy_tags(c, constraint_by_name(c.name))
end
entity.attributes.select { |a| a.enumeration? && a.enumeration.numeric_values? }.each do |a|
sorted_values = (0..(a.enumeration.values.length)).collect { |v| v }
constraint_name = "#{a.name}_Enum"
constraint(constraint_name, :standard => true, :sql => <<SQL) unless constraint_by_name(constraint_name)
#{a.sql.quoted_column_name} >= #{sorted_values[0]} AND
#{a.sql.quoted_column_name} <= #{sorted_values[sorted_values.size - 1]}
SQL
end
entity.attributes.select { |a| a.attribute_type == :enumeration && a.enumeration.textual_values? }.each do |a|
constraint_name = "#{a.name}_Enum"
constraint(constraint_name, :standard => true, :sql => <<SQL) unless constraint_by_name(constraint_name)
#{a.sql.quoted_column_name} IN (#{a.enumeration.values.collect { |v| "'#{v.value}'" }.join(',')})
SQL
end
entity.attributes.select { |a| (a.allows_length?) && !a.allow_blank? }.each do |a|
constraint_name = "#{a.name}_NotEmpty"
sql = self.dialect.disallow_blank_constraint(a.sql.column_name)
constraint(constraint_name, :standard => true, :sql => sql) unless constraint_by_name(constraint_name)
end
entity.attributes.select { |a| a.set_once? }.each do |a|
validation_name = "#{a.name}_SetOnce"
validation(validation_name, :standard => true, :negative_sql => self.dialect.set_once_sql(a), :after => :update) unless validation?(validation_name)
end
entity.cycle_constraints.each do |c|
target_attribute = entity.attribute_by_name(c.attribute_name)
target_entity = entity.attribute_by_name(c.attribute_name).referenced_entity
scoping_attribute = target_entity.attribute_by_name(c.scoping_attribute)
attribute_name_path = c.attribute_name_path
object_path = []
entity = self.entity
attribute_name_path.each do |attribute_name_path_element|
object_path << entity
other = entity.attribute_by_name(attribute_name_path_element)
entity = other.referenced_entity
end
joins = []
next_id = "@#{target_attribute.sql.column_name}"
last_name = '@'
attribute_name_path.each_with_index do |attribute_name, index|
ot = object_path[index]
name = "C#{index}"
if index != 0
joins << "LEFT JOIN #{ot.sql.qualified_table_name} #{name} ON #{last_name}#{object_path[index - 1].attribute_by_name(attribute_name_path[index - 1]).sql.column_name} = #{name}.#{ot.primary_key.sql.column_name}"
last_name = "#{name}."
end
next_id = "#{last_name}#{ot.attribute_by_name(attribute_name).sql.column_name}"
end
comparison_id = "C0.#{scoping_attribute.sql.column_name}"
functional_constraint_name = "#{c.name}_Scope"
unless function_constraint?(functional_constraint_name)
function_constraint(functional_constraint_name, [c.attribute_name, c.attribute_name_path[0]], :standard => true) do |constraint|
constraint.invariant = true
start_attribute = self.entity.attribute_by_name(c.attribute_name)
sql = ''
if start_attribute.nullable?
sql += "SELECT 1 AS Result WHERE @#{start_attribute.sql.column_name} IS NULL\nUNION\n"
end
first_attribute_step = self.entity.attribute_by_name(c.attribute_name_path[0])
if first_attribute_step.nullable?
sql += "SELECT 1 AS Result WHERE @#{first_attribute_step.sql.column_name} IS NULL\nUNION\n"
end
sql += <<SQL
SELECT 1 AS Result
FROM
#{target_entity.sql.qualified_table_name} C0
#{joins.join("\n")}
WHERE #{comparison_id} = #{next_id} AND C0.#{target_entity.primary_key.sql.quoted_column_name} = @#{start_attribute.sql.column_name}
SQL
constraint.positive_sql = sql
end
copy_tags(c, function_constraint_by_name(functional_constraint_name))
end
end
immutable_attributes = self.entity.attributes.select { |a| a.immutable? && !a.primary_key? }
if immutable_attributes.size > 0
validation_name = 'Immuter'
unless validation?(validation_name)
guard = self.dialect.immuter_guard(self.entity, immutable_attributes)
guard_sql = self.dialect.immuter_sql(self.entity, immutable_attributes)
validation(validation_name, :standard => true, :negative_sql => guard_sql, :after => :update, :guard => guard)
end
end
abstract_relationships = self.entity.attributes.select { |a| a.reference? && a.referenced_entity.abstract? }
if abstract_relationships.size > 0
abstract_relationships.each do |attribute|
concrete_subtypes = {}
attribute.referenced_entity.concrete_subtypes.each_with_index do |subtype, index|
concrete_subtypes["C#{index}"] = subtype
end
names = concrete_subtypes.keys
validation_name = "#{attribute.name}ForeignKey"
#TODO: Turn this into a functional validation
unless validation?(validation_name)
guard = "UPDATE(#{attribute.sql.quoted_column_name})"
sql = <<SQL
SELECT I.#{self.entity.primary_key.sql.quoted_column_name}
FROM
inserted I
SQL
concrete_subtypes.each_pair do |name, subtype|
sql << " LEFT JOIN #{subtype.sql.qualified_table_name} #{name} ON #{name}.#{self.dialect.quote('Id')} = I.#{attribute.sql.quoted_column_name}"
end
sql << " WHERE (#{names.collect { |name| "#{name}.#{self.dialect.quote('Id')} IS NULL" }.join(' AND ') })"
(0..(names.size - 2)).each do |index|
sql << " OR\n (#{names[index] }.#{self.dialect.quote('Id')} IS NOT NULL AND (#{((index + 1)..(names.size - 1)).collect { |index2| "#{names[index2]}.#{self.dialect.quote('Id')} IS NOT NULL" }.join(' OR ') }))"
end
validation(validation_name, :negative_sql => sql, :guard => guard) unless validation?(validation_name)
end
end
end
if self.entity.read_only?
trigger_name = 'ReadOnlyCheck'
unless trigger?(trigger_name)
trigger(trigger_name) do |trigger|
trigger.description("Ensure that #{self.entity.name} is read only.")
trigger.after = []
trigger.instead_of = [:insert, :update, :delete]
trigger.sql = self.entity.data_module.repository.sql.emit_error("#{self.entity.name} is read only")
end
end
end
Domgen::Sql::Trigger::VALID_AFTER.each do |after|
desc = "Trigger after #{after} on #{self.entity.name}\n\n"
validations = self.validations.select { |v| v.after.include?(after) }.sort { |a, b| b.priority <=> a.priority }
actions = self.actions.select { |a| a.after.include?(after) }.sort { |a, b| b.priority <=> a.priority }
if !validations.empty? || !actions.empty?
trigger_name = "After#{after.to_s.capitalize}"
trigger(trigger_name) do |trigger|
sql = self.dialect.validations_trigger_sql(self.entity, validations, actions)
if !validations.empty?
desc += "Enforce following validations:\n"
validations.each do |validation|
desc += "* #{validation.name}#{validation.tags[:Description] ? ': ' : ''}#{validation.tags[:Description]}\n"
end
desc += "\n"
end
if !actions.empty?
desc += "Performing the following actions:\n"
actions.each do |action|
desc += "* #{action.name}#{action.tags[:Description] ? ': ' : ''}#{action.tags[:Description]}\n"
end
end
trigger.description(desc)
trigger.sql = sql
trigger.after = after
end
end
end
self.entity.attributes.select { |a| a.reference? && !a.abstract? && !a.polymorphic? }.each do |a|
foreign_key([a.name],
a.referenced_entity.qualified_name,
[a.referenced_entity.primary_key.name],
{ :on_update => a.sql.on_update, :on_delete => a.sql.on_delete },
true)
end
self.dialect.post_verify_table_customization(self)
end
def copy_tags(from, to)
from.tags.each_pair do |k, v|
to.tags[k] = v
end
end
end
facet.enhance(Attribute) do
def dialect
attribute.entity.sql.dialect
end
attr_accessor :column_name
def column_name
if @column_name.nil?
if attribute.reference? || attribute.remote_reference?
@column_name = attribute.referencing_link_name
else
@column_name = attribute.name
end
end
@column_name
end
def quoted_column_name
self.dialect.quote(self.column_name)
end
attr_writer :sql_type
def sql_type
@sql_type ||= self.dialect.column_type(self)
end
def generator_type
return :identity if @generator_type.nil? && attribute.generated_value? && attribute.primary_key?
@generator_type || :none
end
def generator_type=(generator_type)
Domgen.error("generator_type supplied #{generator_type} not valid") unless [:none, :identity, :sequence].include?(generator_type)
attribute.generated_value = true
@generator_type = generator_type
end
def sequence?
self.generator_type == :sequence
end
def identity?
self.generator_type == :identity
end
def sequence_name
Domgen.error("sequence_name called on #{attribute.qualified_name} when not a sequence") unless self.sequence?
@sequence_name || "#{attribute.entity.abstract? ? sql_name(:table, attribute.entity.name) : attribute.entity.sql.table_name}#{attribute.name}Seq"
end
def sequence_name=(sequence_name)
Domgen.error("sequence_name= called on #{attribute.qualified_name} when not a sequence") if !@generator_type.nil? && !self.sequence?
@sequence_name = sequence_name
end
def sequence
Domgen.error("sequence called on #{attribute.qualified_name} when not a sequence") unless self.sequence?
if attribute.entity.data_module.sql.sequence_by_name?(self.sequence_name)
attribute.entity.data_module.sql.sequence_by_name(self.sequence_name)
else
attribute.entity.data_module.sql.sequence(self.sequence_name, 'sql_type' => self.sql_type)
end
end
# TODO: MSSQL Specific
attr_writer :sparse
def sparse?
@sparse.nil? ? false : @sparse
end
# The calculation to create column
attr_accessor :calculation
def persistent_calculation=(persistent_calculation)
Domgen.error('Non calculated column can not be persistent') unless @calculation
@persistent_calculation = persistent_calculation
end
def persistent_calculation?
@persistent_calculation.nil? ? false : @persistent_calculation
end
def on_update=(on_update)
Domgen.error("on_update on #{column_name} is invalid as attribute is not a reference") unless attribute.reference?
Domgen.error("on_update #{on_update} on #{column_name} is invalid") unless self.class.change_actions.include?(on_update)
@on_update = on_update
end
def on_update
Domgen.error("on_update on #{name} is invalid as attribute is not a reference") unless attribute.reference?
@on_update.nil? ? :no_action : @on_update
end
def on_delete=(on_delete)
Domgen.error("on_delete on #{column_name} is invalid as attribute is not a reference") unless attribute.reference?
Domgen.error("on_delete #{on_delete} on #{column_name} is invalid") unless self.class.change_actions.include?(on_delete)
@on_delete = on_delete
end
def on_delete
Domgen.error("on_delete on #{name} is invalid as attribute is not a reference") unless attribute.reference?
@on_delete.nil? ? :no_action : @on_delete
end
def self.change_actions
[:cascade, :restrict, :set_null, :set_default, :no_action]
end
attr_accessor :default_value
def perform_complete
self.sequence if self.sequence?
end
end
facet.enhance(RemoteEntityAttribute) do
def dialect
attribute.remote_entity.data_module.sql.dialect
end
attr_accessor :column_name
def column_name
@column_name || attribute.name
end
def quoted_column_name
self.dialect.quote(self.column_name)
end
attr_writer :sql_type
def sql_type
@sql_type ||= self.dialect.column_type(self)
end
end
end
end
Derive the primary key rather than hardcoding it
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
module Domgen
module Sql
class Sequence < Domgen.ParentedElement(:schema)
def initialize(schema, name, options, &block)
@name = name
schema.send(:register_sequence, name, self)
super(schema, options, &block)
end
attr_reader :name
attr_accessor :sql_type
def quoted_sequence_name
schema.dialect.quote(self.name)
end
def qualified_sequence_name
"#{schema.quoted_schema}.#{quoted_sequence_name}"
end
end
class Index < Domgen.ParentedElement(:table)
attr_accessor :attribute_names
attr_accessor :include_attribute_names
attr_accessor :filter
def initialize(table, attribute_names, options, &block)
@attribute_names = attribute_names
@include_attribute_names = []
super(table, options, &block)
end
def to_s
"Index[#{self.qualified_index_name}]"
end
attr_reader :index_type
def index_type=(index_type)
Domgen.error("index_type #{index_type} on #{qualified_index_name} is invalid") unless self.class.valid_index_types.include?(index_type)
@index_type = index_type
end
attr_writer :index_name
def index_name
if @index_name.nil?
prefix = cluster? ? 'CL' : unique? ? 'UQ' : gist? ? 'GS' : 'IX'
suffix = attribute_names.join('_')
@index_name = "#{prefix}_#{table.entity.name}_#{suffix}"
end
@index_name
end
def quoted_index_name
table.dialect.quote(self.index_name)
end
def qualified_index_name
"#{table.entity.data_module.sql.quoted_schema}.#{quoted_index_name}"
end
def ordered?
!gist?
end
def cluster?
index_type == :cluster
end
def gist?
index_type == :gist
end
def normal?
index_type == :normal
end
attr_writer :unique
def unique?
@unique.nil? ? false : @unique
end
attr_writer :allow_page_locks
def allow_page_locks?
@allow_page_locks.nil? ? true : !!@allow_page_locks
end
def partial?
!self.filter.nil?
end
private
def self.valid_index_types
[:cluster, :gist, :normal]
end
end
class ForeignKey < Domgen.ParentedElement(:table)
ACTION_MAP =
{
:cascade => 'CASCADE',
:set_null => 'SET NULL',
:set_default => 'SET DEFAULT',
:no_action => 'NO ACTION'
}.freeze
attr_accessor :attribute_names
attr_accessor :referenced_entity_name
attr_accessor :referenced_attribute_names
def initialize(table, attribute_names, referenced_entity_name, referenced_attribute_names, options, &block)
@attribute_names, @referenced_entity_name, @referenced_attribute_names =
attribute_names, referenced_entity_name, referenced_attribute_names
super(table, options, &block)
# Ensure that the attributes exist
attribute_names.each { |a| table.entity.attribute_by_name(a) }
# Ensure that the remote attributes exist on remote type
referenced_attribute_names.each { |a| referenced_entity.attribute_by_name(a) }
end
attr_writer :name
def name
if @name.nil?
@name = "#{attribute_names.join('_')}"
end
@name
end
def referenced_entity
table.entity.data_module.entity_by_name(referenced_entity_name)
end
def on_update=(on_update)
Domgen.error("on_update #{on_update} on #{name} is invalid") unless ACTION_MAP.keys.include?(on_update)
@on_update = on_update
end
def on_update
@on_update || :no_action
end
def on_delete=(on_delete)
Domgen.error("on_delete #{on_delete} on #{name} is invalid") unless ACTION_MAP.keys.include?(on_delete)
@on_delete = on_delete
end
def on_delete
@on_delete || :no_action
end
def foreign_key_name
"FK_#{s(table.entity.name)}_#{s(name)}"
end
def quoted_foreign_key_name
table.dialect.quote(self.foreign_key_name)
end
def qualified_foreign_key_name
"#{table.entity.data_module.sql.quoted_schema}.#{quoted_foreign_key_name}"
end
def constraint_name
foreign_key_name
end
def quoted_constraint_name
quoted_foreign_key_name
end
def to_s
"ForeignKey[#{self.qualified_foreign_key_name}]"
end
end
class Constraint < Domgen.ParentedElement(:table)
attr_reader :name
attr_accessor :sql
def initialize(table, name, options = {}, &block)
@name = name
super(table, options, &block)
end
attr_writer :standard
# Return true if this was defined by domgen or is derivable via rules.
# standard constraints do not typically need to be tested
def standard?
@standard.nil? ? false : @standard
end
attr_writer :invariant
# Return true if this constraint should always be true, not just on insert or update.
def invariant?
@invariant.nil? ? true : @invariant
end
def constraint_name
"CK_#{s(table.entity.name)}_#{s(name)}"
end
def quoted_constraint_name
table.dialect.quote(self.constraint_name)
end
def qualified_constraint_name
"#{table.entity.data_module.sql.quoted_schema}.#{self.quoted_constraint_name}"
end
def to_s
"Constraint[#{self.qualified_constraint_name}]"
end
def constraint_sql
@sql
end
end
class FunctionConstraint < Domgen.ParentedElement(:table)
attr_reader :name
# The SQL that is part of function invoked
attr_accessor :positive_sql
attr_accessor :parameters
attr_accessor :common_table_expression
attr_accessor :or_conditions
def initialize(table, name, parameters, options = {}, & block)
@name = name
@parameters = parameters
@or_conditions = []
super(table, options, & block)
end
attr_writer :standard
# Return true if this was defined by domgen or is derivable via rules.
# standard constraints do not typically need to be tested
def standard?
@standard.nil? ? false : @standard
end
attr_writer :invariant
# Return true if this constraint should always be true, not just on insert or update.
def invariant?
@invariant.nil? ? true : @invariant
end
def constraint_name
"CK_#{s(table.entity.name)}_#{s(name)}"
end
def quoted_constraint_name
table.dialect.quote(self.constraint_name)
end
def qualified_constraint_name
"#{table.entity.data_module.sql.quoted_schema}.#{self.quoted_constraint_name}"
end
def function_name
"#{table.entity.name}_#{name}"
end
def quoted_function_name
table.dialect.quote(self.function_name)
end
def qualified_function_name
"#{table.entity.data_module.sql.quoted_schema}.#{self.quoted_function_name}"
end
# The SQL generated in constraint
def constraint_sql
parameter_string = parameters.collect { |parameter_name| " #{table.entity.attribute_by_name(parameter_name).sql.column_name}" }.join(',')
function_call = "#{self.qualified_function_name}(#{parameter_string}) = 1"
(self.or_conditions + [function_call]).join(' OR ')
end
def to_s
"FunctionConstraint[#{self.qualified_constraint_name}]"
end
end
class SequencedSqlElement < Domgen.ParentedElement(:table)
VALID_AFTER = [:insert, :update, :delete]
attr_reader :name
attr_reader :after
attr_reader :instead_of
def initialize(table, name, options = {}, & block)
@name = name
@after = [:insert, :update]
@instead_of = []
super(table, options, & block)
end
def after=(after)
@after = scope('after', after)
end
def instead_of=(instead_of)
@instead_of = scope('instead_of', instead_of)
end
private
def scope(label, scope)
if scope.nil?
scope = []
elsif !scope.is_a?(Array)
scope = [scope]
end
scope.each do |a|
Domgen.error("Unknown #{label} specififier #{a}") unless VALID_AFTER.include?(a)
end
scope
end
end
class Validation < SequencedSqlElement
attr_accessor :negative_sql
attr_accessor :invariant_negative_sql
attr_accessor :common_table_expression
attr_accessor :guard
attr_writer :priority
attr_writer :standard
# Return true if this was defined by domgen or is derivable via rules.
# standard constraints do not typically need to be tested
def standard?
@standard.nil? ? false : @standard
end
def priority
@priority || 1
end
def to_s
"Validation[#{self.name}]"
end
end
class Action < SequencedSqlElement
attr_accessor :sql
attr_accessor :guard
attr_writer :priority
def priority
@priority || 1
end
def to_s
"Action[#{self.name}]"
end
end
class Trigger < SequencedSqlElement
attr_accessor :sql
def trigger_name
@trigger_name ||= sql_name(:trigger, "#{table.entity.name}#{self.name}")
end
def quoted_trigger_name
table.dialect.quote(self.trigger_name)
end
def qualified_trigger_name
"#{table.entity.data_module.sql.quoted_schema}.#{self.quoted_trigger_name}"
end
def to_s
"Action[#{self.qualified_trigger_name}]"
end
end
end
FacetManager.facet(:sql) do |facet|
facet.enhance(Repository) do
def dialect
@dialect ||= (repository.mssql? ? Domgen::Mssql::MssqlDialect.new : repository.pgsql? ? Domgen::Pgsql::PgsqlDialect.new : (Domgen.error('Unable to determine the dialect in use')))
end
def error_handler
@error_handler ||= Proc.new do |error_message|
self.dialect.raise_error_sql(error_message)
end
end
def define_error_handler(&block)
@error_handler = block
end
def emit_error(error_message)
error_handler.call(error_message)
end
def pre_complete
# TODO: This will re-enable disabled sql facets which seems sub-par
self.repository.enable_facet(:mssql) if !self.repository.mssql? && !self.repository.pgsql?
end
def perform_verify
self.repository.data_modules.select { |data_module| data_module.sql? }.each do |dm|
self.repository.data_modules.select { |data_module| data_module.sql? }.each do |other|
if dm != other && dm.sql.schema.to_s == other.sql.schema.to_s
Domgen.error("Multiple data modules (#{dm.name} && #{other.name}) are mapped to the same schema #{other.sql.schema}")
end
end
end
end
attr_writer :sql_driver
def sql_driver
if @sql_driver.nil?
@sql_driver =
if self.repository.pgsql?
'org.postgresql.Driver'
elsif self.repository.mssql?
'net.sourceforge.jtds.jdbc.Driver'
else
Domgen.error('No default SQL driver available, specify one with repository.sql.sql_driver = "your.driver.here"')
end
end
@sql_driver
end
end
facet.enhance(DataModule) do
def dialect
data_module.repository.sql.dialect
end
attr_writer :schema
def schema
@schema || data_module.name
end
def quoted_schema
self.dialect.quote(self.schema)
end
def sequence(name, options = {}, &block)
Domgen::Sql::Sequence.new(self, name, options, &block)
end
def sequences
sequence_map.values
end
def sequence_by_name(name)
sequence = sequence_map[name.to_s]
Domgen.error("Unable to locate sequence #{name} in #{data_module.name}") unless sequence
sequence
end
def sequence_by_name?(name)
!!sequence_map[name.to_s]
end
protected
def sequence_map
@sequences ||= {}
end
def register_sequence(name, sequence)
sequence_map[name.to_s] = sequence
end
end
facet.enhance(Entity) do
def dialect
entity.data_module.sql.dialect
end
def load_from_fixture=(load_from_fixture)
@load_from_fixture = load_from_fixture
end
def load_from_fixture?
@load_from_fixture.nil? ? false : !!@load_from_fixture
end
def sequence_table=(sequence_table)
@sequence_table = sequence_table
end
def sequence_table?
@sequence_table.nil? ? false : !!@sequence_table
end
attr_accessor :partition_scheme
#+force_overflow_for_large_objects+ if set to true will force the native *VARCHAR(max) and XML datatypes (i.e.
# text attributes to always be stored in overflow page by database engine. Otherwise they will be stored inline
# as long as the data fits into a 8,060 byte row. It is a performance hit to access the overflow table so this
# should be set to false unless the data columns are infrequently accessed relative to the other columns
# TODO: MSSQL Specific
attr_accessor :force_overflow_for_large_objects
def table_name=(table_name)
Domgen.error("sql.table_name= invoked on abstract entity #{entity.qualified_name}") if entity.abstract?
@table_name = table_name
end
def table_name
Domgen.error("sql.table_name invoked on abstract entity #{entity.qualified_name}") if entity.abstract?
@table_name || sql_name(:table, entity.name)
end
def quoted_table_name
self.dialect.quote(table_name)
end
def qualified_table_name
"#{entity.data_module.sql.quoted_schema}.#{quoted_table_name}"
end
def view?
entity.direct_subtypes.size != 0
end
# A view is created for any entity that has subtypes, and the view abstracts over all subclasses
def view_name=(view_name)
Domgen.error("sql.view_name= invoked on entity #{entity.qualified_name} with no subtypes") if entity.direct_subtypes.size == 0
@view_name = view_name
end
def view_name
Domgen.error("sql.view_name invoked on entity #{entity.qualified_name} with no subtypes") if entity.direct_subtypes.size == 0
@view_name || sql_name(:view, entity.name)
end
def view_insert_trigger
Domgen.error("sql.view_insert_trigger invoked on entity #{entity.qualified_name} with no subtypes") if entity.direct_subtypes.size == 0
sql_name(:trigger, "#{entity.name}Insert")
end
def quoted_view_insert_trigger
self.dialect.quote(view_insert_trigger)
end
def qualified_view_insert_trigger
"#{entity.data_module.sql.quoted_schema}.#{quoted_view_insert_trigger}"
end
def view_update_trigger
Domgen.error("sql.view_update_trigger invoked on entity #{entity.qualified_name} with no subtypes") if entity.direct_subtypes.size == 0
sql_name(:trigger, "#{entity.name}Update")
end
def quoted_view_update_trigger
self.dialect.quote(view_update_trigger)
end
def qualified_view_update_trigger
"#{entity.data_module.sql.quoted_schema}.#{quoted_view_update_trigger}"
end
def view_delete_trigger
Domgen.error("sql.view_delete_trigger invoked on entity #{entity.qualified_name} with no subtypes") if entity.direct_subtypes.size == 0
sql_name(:trigger, "#{entity.name}Delete")
end
def quoted_view_delete_trigger
self.dialect.quote(view_delete_trigger)
end
def qualified_view_delete_trigger
"#{entity.data_module.sql.quoted_schema}.#{quoted_view_delete_trigger}"
end
def quoted_view_name
self.dialect.quote(view_name)
end
def qualified_view_name
"#{entity.data_module.sql.quoted_schema}.#{quoted_view_name}"
end
def discriminator=(discriminator)
Domgen.error("Attempted to call 'sql.discriminator=' on non-subclass #{entity.qualified_name}") if entity.extends.nil?
@discriminator = discriminator
end
def discriminator
Domgen.error("Attempted to call 'sql.discriminator' on non-subclass #{entity.qualified_name}") if entity.extends.nil?
@discriminator || entity.qualified_name.to_s
end
def constraint_values
@constraint_values ||= {}
end
def constraints
constraint_values.values
end
def constraint_by_name(name)
constraint_values[name.to_s]
end
def constraint(name, options = {}, &block)
existing = constraint_by_name(name)
Domgen.error("Constraint named #{name} already defined on table #{qualified_table_name}") if existing
constraint = Domgen::Sql::Constraint.new(self, name, options, &block)
constraint_values[name.to_s] = constraint
constraint
end
def function_constraint_values
@function_constraint_values ||= {}
end
def function_constraints
function_constraint_values.values
end
def function_constraint_by_name(name)
function_constraint = function_constraint_values[name.to_s]
Domgen.error("No Function Constraint named #{name} defined on table #{qualified_table_name}") unless function_constraint
function_constraint
end
def function_constraint?(name)
!!function_constraint_values[name.to_s]
end
def function_constraint(name, parameters, options = {}, &block)
Domgen.error("Function Constraint named #{name} already defined on table #{qualified_table_name}") if function_constraint?(name)
function_constraint = Domgen::Sql::FunctionConstraint.new(self, name, parameters, options, &block)
function_constraint_values[name.to_s] = function_constraint
function_constraint
end
def validation_values
@validation_values ||= {}
end
def validations
validation_values.values
end
def validation_by_name(name)
validation = validation_values[name.to_s]
Domgen.error("No validation named #{name} defined on table #{qualified_table_name}") unless validation
validation
end
def validation?(name)
!!validation_values[name.to_s]
end
def validation(name, options = {}, &block)
Domgen.error("Validation named #{name} already defined on table #{qualified_table_name}") if validation?(name)
validation = Domgen::Sql::Validation.new(self, name, options, &block)
validation_values[name.to_s] = validation
validation
end
def action_values
@action_values ||= {}
end
def actions
action_values.values
end
def action_by_name(name)
action = action_values[name.to_s]
Domgen.error("No action named #{name} defined on table #{qualified_table_name}") unless action
action
end
def action?(name)
!!action_values[name.to_s]
end
def action(name, options = {}, &block)
Domgen.error("Action named #{name} already defined on table #{qualified_table_name}") if action?(name)
action = Action.new(self, name, options, &block)
action_values[name.to_s] = action
action
end
def trigger_values
@trigger_values ||= {}
end
def triggers
trigger_values.values
end
def trigger_by_name(name)
trigger = trigger_values[name.to_s]
Domgen.error("No trigger named #{name} on table #{qualified_table_name}") unless trigger
trigger
end
def trigger?(name)
!!trigger_values[name.to_s]
end
def trigger(name, options = {}, &block)
Domgen.error("Trigger named #{name} already defined on table #{qualified_table_name}") if trigger?(name)
trigger = Domgen::Sql::Trigger.new(self, name, options, &block)
trigger_values[name.to_s] = trigger
trigger
end
def cluster(attribute_names, options = {}, &block)
index(attribute_names, options.merge(:index_type => :cluster), &block)
end
def index_values
@index_values ||= {}
end
def indexes
index_values.values
end
def index(attribute_names, options = {}, skip_if_present = false, &block)
index = Domgen::Sql::Index.new(self, attribute_names, options, &block)
return index_values[index.index_name] if index_values[index.index_name] && skip_if_present
Domgen.error("Index named #{index.index_name} already defined on table #{qualified_table_name}") if index_values[index.index_name]
attribute_names.each do |attribute_name|
Domgen.error("Index named #{index.index_name} declares attribute name #{attribute_name} that does not exist on containing entity #{entity.qualified_name}") unless entity.attribute_by_name?(attribute_name)
end
index_values[index.index_name] = index
index
end
def foreign_key_values
@foreign_key_values ||= {}
end
def foreign_keys
foreign_key_values.values
end
def foreign_key(attribute_names, referenced_entity_name, referenced_attribute_names, options = {}, skip_if_present = false, &block)
foreign_key = Domgen::Sql::ForeignKey.new(self, attribute_names, referenced_entity_name, referenced_attribute_names, options, &block)
return if foreign_key_values[foreign_key.name] && skip_if_present
Domgen.error("Foreign Key named #{foreign_key.name} already defined on table #{table_name}") if foreign_key_values[foreign_key.name]
foreign_key_values[foreign_key.name] = foreign_key
foreign_key
end
def post_verify
if self.partition_scheme && indexes.select { |index| index.cluster? }.empty?
Domgen.error('Must specify a clustered index if using a partition scheme')
end
self.indexes.each do |index|
if index.cluster? && index.partial?
Domgen.error("Must not specify a partial clustered index. Index = #{index.qualified_index_name}")
end
end
if indexes.select { |i| i.cluster? }.size > 1
Domgen.error("#{qualified_table_name} defines multiple clustering indexes")
end
entity.unique_constraints.each do |c|
index(c.attribute_names, { :unique => true }, true)
end
entity.relationship_constraints.each do |c|
lhs = entity.attribute_by_name(c.lhs_operand)
rhs = entity.attribute_by_name(c.rhs_operand)
op = c.class.operators[c.operator]
constraint_sql = []
constraint_sql << "#{lhs.sql.quoted_column_name} IS NULL" if lhs.nullable?
constraint_sql << "#{rhs.sql.quoted_column_name} IS NULL" if rhs.nullable?
constraint_sql << "#{lhs.sql.quoted_column_name} #{op} #{rhs.sql.quoted_column_name}"
constraint(c.name, :standard => true, :sql => constraint_sql.join(' OR ')) unless constraint_by_name(c.name)
copy_tags(c, constraint_by_name(c.name))
end
entity.xor_constraints.each do |c|
unless constraint_by_name(c.name)
sql = []
c.attribute_names.each_with_index do |name, index|
s = "#{entity.attribute_by_name(name).sql.quoted_column_name} IS NOT NULL AND " +
"#{c.attribute_names.select { |n| n != name }.collect do |n|
"#{entity.attribute_by_name(n).sql.quoted_column_name} IS NULL"
end.join(' AND ')}"
sql << "(#{s})"
end
constraint(c.name, :standard => true, :sql => sql.join(' OR '))
end
copy_tags(c, constraint_by_name(c.name))
end
entity.dependency_constraints.each do |c|
constraint(c.name, :standard => true, :sql => <<SQL) unless constraint_by_name(c.name)
#{entity.attribute_by_name(c.attribute_name).sql.quoted_column_name} IS NULL OR
( #{c.dependent_attribute_names.collect { |name| "#{entity.attribute_by_name(name).sql.quoted_column_name} IS NOT NULL" }.join(' AND ') } )
SQL
copy_tags(c, constraint_by_name(c.name))
end
entity.codependent_constraints.each do |c|
constraint(c.name, :standard => true, :sql => <<SQL) unless constraint_by_name(c.name)
( #{c.attribute_names.collect { |name| "#{entity.attribute_by_name(name).sql.quoted_column_name} IS NOT NULL" }.join(' AND ')} ) OR
( #{c.attribute_names.collect { |name| "#{entity.attribute_by_name(name).sql.quoted_column_name} IS NULL" }.join(' AND ') } )
SQL
copy_tags(c, constraint_by_name(c.name))
end
entity.dependency_constraints.each do |c|
constraint(c.name, :standard => true, :sql => <<SQL) unless constraint_by_name(c.name)
#{entity.attribute_by_name(c.attribute_name).sql.quoted_column_name} IS NULL OR
( #{c.dependent_attribute_names.collect { |name| "#{entity.attribute_by_name(name).sql.quoted_column_name} IS NOT NULL" }.join(' AND ') } )
SQL
copy_tags(c, constraint_by_name(c.name))
end
entity.incompatible_constraints.each do |c|
sql = (0..(c.attribute_names.size)).collect do |i|
candidate = c.attribute_names[i]
str = c.attribute_names.collect { |name| "#{entity.attribute_by_name(name).sql.quoted_column_name} IS#{(candidate == name) ? ' NOT' : ''} NULL" }.join(' AND ')
"(#{str})"
end.join(' OR ')
constraint(c.name, :standard => true, :sql => sql) unless constraint_by_name(c.name)
copy_tags(c, constraint_by_name(c.name))
end
entity.attributes.select { |a| a.enumeration? && a.enumeration.numeric_values? }.each do |a|
sorted_values = (0..(a.enumeration.values.length)).collect { |v| v }
constraint_name = "#{a.name}_Enum"
constraint(constraint_name, :standard => true, :sql => <<SQL) unless constraint_by_name(constraint_name)
#{a.sql.quoted_column_name} >= #{sorted_values[0]} AND
#{a.sql.quoted_column_name} <= #{sorted_values[sorted_values.size - 1]}
SQL
end
entity.attributes.select { |a| a.attribute_type == :enumeration && a.enumeration.textual_values? }.each do |a|
constraint_name = "#{a.name}_Enum"
constraint(constraint_name, :standard => true, :sql => <<SQL) unless constraint_by_name(constraint_name)
#{a.sql.quoted_column_name} IN (#{a.enumeration.values.collect { |v| "'#{v.value}'" }.join(',')})
SQL
end
entity.attributes.select { |a| (a.allows_length?) && !a.allow_blank? }.each do |a|
constraint_name = "#{a.name}_NotEmpty"
sql = self.dialect.disallow_blank_constraint(a.sql.column_name)
constraint(constraint_name, :standard => true, :sql => sql) unless constraint_by_name(constraint_name)
end
entity.attributes.select { |a| a.set_once? }.each do |a|
validation_name = "#{a.name}_SetOnce"
validation(validation_name, :standard => true, :negative_sql => self.dialect.set_once_sql(a), :after => :update) unless validation?(validation_name)
end
entity.cycle_constraints.each do |c|
target_attribute = entity.attribute_by_name(c.attribute_name)
target_entity = entity.attribute_by_name(c.attribute_name).referenced_entity
scoping_attribute = target_entity.attribute_by_name(c.scoping_attribute)
attribute_name_path = c.attribute_name_path
object_path = []
entity = self.entity
attribute_name_path.each do |attribute_name_path_element|
object_path << entity
other = entity.attribute_by_name(attribute_name_path_element)
entity = other.referenced_entity
end
joins = []
next_id = "@#{target_attribute.sql.column_name}"
last_name = '@'
attribute_name_path.each_with_index do |attribute_name, index|
ot = object_path[index]
name = "C#{index}"
if index != 0
joins << "LEFT JOIN #{ot.sql.qualified_table_name} #{name} ON #{last_name}#{object_path[index - 1].attribute_by_name(attribute_name_path[index - 1]).sql.column_name} = #{name}.#{ot.primary_key.sql.column_name}"
last_name = "#{name}."
end
next_id = "#{last_name}#{ot.attribute_by_name(attribute_name).sql.column_name}"
end
comparison_id = "C0.#{scoping_attribute.sql.column_name}"
functional_constraint_name = "#{c.name}_Scope"
unless function_constraint?(functional_constraint_name)
function_constraint(functional_constraint_name, [c.attribute_name, c.attribute_name_path[0]], :standard => true) do |constraint|
constraint.invariant = true
start_attribute = self.entity.attribute_by_name(c.attribute_name)
sql = ''
if start_attribute.nullable?
sql += "SELECT 1 AS Result WHERE @#{start_attribute.sql.column_name} IS NULL\nUNION\n"
end
first_attribute_step = self.entity.attribute_by_name(c.attribute_name_path[0])
if first_attribute_step.nullable?
sql += "SELECT 1 AS Result WHERE @#{first_attribute_step.sql.column_name} IS NULL\nUNION\n"
end
sql += <<SQL
SELECT 1 AS Result
FROM
#{target_entity.sql.qualified_table_name} C0
#{joins.join("\n")}
WHERE #{comparison_id} = #{next_id} AND C0.#{target_entity.primary_key.sql.quoted_column_name} = @#{start_attribute.sql.column_name}
SQL
constraint.positive_sql = sql
end
copy_tags(c, function_constraint_by_name(functional_constraint_name))
end
end
immutable_attributes = self.entity.attributes.select { |a| a.immutable? && !a.primary_key? }
if immutable_attributes.size > 0
validation_name = 'Immuter'
unless validation?(validation_name)
guard = self.dialect.immuter_guard(self.entity, immutable_attributes)
guard_sql = self.dialect.immuter_sql(self.entity, immutable_attributes)
validation(validation_name, :standard => true, :negative_sql => guard_sql, :after => :update, :guard => guard)
end
end
abstract_relationships = self.entity.attributes.select { |a| a.reference? && a.referenced_entity.abstract? }
if abstract_relationships.size > 0
abstract_relationships.each do |attribute|
pk_name = attribute.referenced_entity.primary_key.sql.column_name
concrete_subtypes = {}
attribute.referenced_entity.concrete_subtypes.each_with_index do |subtype, index|
concrete_subtypes["C#{index}"] = subtype
end
names = concrete_subtypes.keys
validation_name = "#{attribute.name}ForeignKey"
#TODO: Turn this into a functional validation
unless validation?(validation_name)
guard = "UPDATE(#{attribute.sql.quoted_column_name})"
sql = <<SQL
SELECT I.#{self.entity.primary_key.sql.quoted_column_name}
FROM
inserted I
SQL
concrete_subtypes.each_pair do |name, subtype|
sql << " LEFT JOIN #{subtype.sql.qualified_table_name} #{name} ON #{name}.#{self.dialect.quote(pk_name)} = I.#{attribute.sql.quoted_column_name}\n"
end
sql << " WHERE (#{names.collect { |name| "#{name}.#{self.dialect.quote(pk_name)} IS NULL" }.join(' AND ') })"
(0..(names.size - 2)).each do |index|
sql << " OR\n (#{names[index] }.#{self.dialect.quote(pk_name)} IS NOT NULL AND (#{((index + 1)..(names.size - 1)).collect { |index2| "#{names[index2]}.#{self.dialect.quote(pk_name)} IS NOT NULL" }.join(' OR ') }))"
end
validation(validation_name, :negative_sql => sql, :guard => guard) unless validation?(validation_name)
end
end
end
if self.entity.read_only?
trigger_name = 'ReadOnlyCheck'
unless trigger?(trigger_name)
trigger(trigger_name) do |trigger|
trigger.description("Ensure that #{self.entity.name} is read only.")
trigger.after = []
trigger.instead_of = [:insert, :update, :delete]
trigger.sql = self.entity.data_module.repository.sql.emit_error("#{self.entity.name} is read only")
end
end
end
Domgen::Sql::Trigger::VALID_AFTER.each do |after|
desc = "Trigger after #{after} on #{self.entity.name}\n\n"
validations = self.validations.select { |v| v.after.include?(after) }.sort { |a, b| b.priority <=> a.priority }
actions = self.actions.select { |a| a.after.include?(after) }.sort { |a, b| b.priority <=> a.priority }
if !validations.empty? || !actions.empty?
trigger_name = "After#{after.to_s.capitalize}"
trigger(trigger_name) do |trigger|
sql = self.dialect.validations_trigger_sql(self.entity, validations, actions)
if !validations.empty?
desc += "Enforce following validations:\n"
validations.each do |validation|
desc += "* #{validation.name}#{validation.tags[:Description] ? ': ' : ''}#{validation.tags[:Description]}\n"
end
desc += "\n"
end
if !actions.empty?
desc += "Performing the following actions:\n"
actions.each do |action|
desc += "* #{action.name}#{action.tags[:Description] ? ': ' : ''}#{action.tags[:Description]}\n"
end
end
trigger.description(desc)
trigger.sql = sql
trigger.after = after
end
end
end
self.entity.attributes.select { |a| a.reference? && !a.abstract? && !a.polymorphic? }.each do |a|
foreign_key([a.name],
a.referenced_entity.qualified_name,
[a.referenced_entity.primary_key.name],
{ :on_update => a.sql.on_update, :on_delete => a.sql.on_delete },
true)
end
self.dialect.post_verify_table_customization(self)
end
def copy_tags(from, to)
from.tags.each_pair do |k, v|
to.tags[k] = v
end
end
end
facet.enhance(Attribute) do
def dialect
attribute.entity.sql.dialect
end
attr_accessor :column_name
def column_name
if @column_name.nil?
if attribute.reference? || attribute.remote_reference?
@column_name = attribute.referencing_link_name
else
@column_name = attribute.name
end
end
@column_name
end
def quoted_column_name
self.dialect.quote(self.column_name)
end
attr_writer :sql_type
def sql_type
@sql_type ||= self.dialect.column_type(self)
end
def generator_type
return :identity if @generator_type.nil? && attribute.generated_value? && attribute.primary_key?
@generator_type || :none
end
def generator_type=(generator_type)
Domgen.error("generator_type supplied #{generator_type} not valid") unless [:none, :identity, :sequence].include?(generator_type)
attribute.generated_value = true
@generator_type = generator_type
end
def sequence?
self.generator_type == :sequence
end
def identity?
self.generator_type == :identity
end
def sequence_name
Domgen.error("sequence_name called on #{attribute.qualified_name} when not a sequence") unless self.sequence?
@sequence_name || "#{attribute.entity.abstract? ? sql_name(:table, attribute.entity.name) : attribute.entity.sql.table_name}#{attribute.name}Seq"
end
def sequence_name=(sequence_name)
Domgen.error("sequence_name= called on #{attribute.qualified_name} when not a sequence") if !@generator_type.nil? && !self.sequence?
@sequence_name = sequence_name
end
def sequence
Domgen.error("sequence called on #{attribute.qualified_name} when not a sequence") unless self.sequence?
if attribute.entity.data_module.sql.sequence_by_name?(self.sequence_name)
attribute.entity.data_module.sql.sequence_by_name(self.sequence_name)
else
attribute.entity.data_module.sql.sequence(self.sequence_name, 'sql_type' => self.sql_type)
end
end
# TODO: MSSQL Specific
attr_writer :sparse
def sparse?
@sparse.nil? ? false : @sparse
end
# The calculation to create column
attr_accessor :calculation
def persistent_calculation=(persistent_calculation)
Domgen.error('Non calculated column can not be persistent') unless @calculation
@persistent_calculation = persistent_calculation
end
def persistent_calculation?
@persistent_calculation.nil? ? false : @persistent_calculation
end
def on_update=(on_update)
Domgen.error("on_update on #{column_name} is invalid as attribute is not a reference") unless attribute.reference?
Domgen.error("on_update #{on_update} on #{column_name} is invalid") unless self.class.change_actions.include?(on_update)
@on_update = on_update
end
def on_update
Domgen.error("on_update on #{name} is invalid as attribute is not a reference") unless attribute.reference?
@on_update.nil? ? :no_action : @on_update
end
def on_delete=(on_delete)
Domgen.error("on_delete on #{column_name} is invalid as attribute is not a reference") unless attribute.reference?
Domgen.error("on_delete #{on_delete} on #{column_name} is invalid") unless self.class.change_actions.include?(on_delete)
@on_delete = on_delete
end
def on_delete
Domgen.error("on_delete on #{name} is invalid as attribute is not a reference") unless attribute.reference?
@on_delete.nil? ? :no_action : @on_delete
end
def self.change_actions
[:cascade, :restrict, :set_null, :set_default, :no_action]
end
attr_accessor :default_value
def perform_complete
self.sequence if self.sequence?
end
end
facet.enhance(RemoteEntityAttribute) do
def dialect
attribute.remote_entity.data_module.sql.dialect
end
attr_accessor :column_name
def column_name
@column_name || attribute.name
end
def quoted_column_name
self.dialect.quote(self.column_name)
end
attr_writer :sql_type
def sql_type
@sql_type ||= self.dialect.column_type(self)
end
end
end
end
|
module Dooby
class CLIHelper
DEL_TASKS = "Sure you want to delete all the taks???".red_on_white.bold
TRASH = "Sure you want to delete the .dooby directory???".red_on_white.bold
def self.flush?
agree DEL_TASKS, true
end
def self.trash?
agree TRASH, true
end
def self.keep_asking(question, autocompletion = nil)
Readline.completion_append_character = " "
Readline.completion_proc = proc { |s| autocompletion.grep( /^#{Regexp.escape(s)}/ ) }
stty_save = `stty -g`.chomp
while value = Readline.readline(question, true)
exit if value == 'q'
yield value.chomp.strip
end
rescue Interrupt
system("stty", stty_save)
exit
end
end
end
avoid errors if autocompletion is not set in CLIHelper#keep_asking
module Dooby
class CLIHelper
DEL_TASKS = "Sure you want to delete all the taks???".red_on_white.bold
TRASH = "Sure you want to delete the .dooby directory???".red_on_white.bold
def self.flush?
agree DEL_TASKS, true
end
def self.trash?
agree TRASH, true
end
def self.keep_asking(question, autocompletion = [])
Readline.completion_append_character = " "
Readline.completion_proc = proc { |s| autocompletion.grep( /^#{Regexp.escape(s)}/ ) }
stty_save = `stty -g`.chomp
while value = Readline.readline(question, true)
exit if value == 'q'
yield value.chomp.strip
end
rescue Interrupt
system("stty", stty_save)
exit
end
end
end |
module Dslimple
VERSION = '1.1.0'.freeze
end
v2.0.0
module Dslimple
VERSION = '2.0.0'.freeze
end
|
require 'dumb_down_viewer/version'
require 'fileutils'
require 'find'
require 'nokogiri'
module DumbDownViewer
def self.collect_directories_and_files(path)
entries = Dir.entries(path) - ['.', '..']
entries.partition do |entry|
entry_path = File.expand_path(File.join(path, entry))
File.directory? entry_path
end
end
def self.build_node_tree(dir)
dirname, filename = File.split(dir)
DirNode.new(dirname, filename, 0).tap {|dir| dir.collect_entries }
end
class Node
attr_reader :sub_nodes, :directory, :name, :depth
def initialize(pwd, name, depth)
@directory = pwd
@name = name
@depth = depth
@name_with_path = pwd.empty? ? @name : File.join(pwd, name)
setup
end
def setup
end
def accept(visitor, memo)
visitor.visit(self, memo)
end
def to_s
@name
end
end
class DirNode < Node
attr_reader :directories, :files
def collect_entries
dirs, files = DumbDownViewer.collect_directories_and_files(@name_with_path)
depth = @depth + 1
@directories = entry_nodes(dirs, DirNode, depth)
@directories.each {|dir| dir.collect_entries }
@files = entry_nodes(files, FileNode, depth)
@sub_nodes = @files + @directories
end
def entry_nodes(nodes, node_class, depth)
nodes.map {|node| node_class.new(@name_with_path, node, depth) }
end
end
class FileNode < Node
attr_reader :extention
def setup
extract_extention
@sub_nodes = []
end
private
def extract_extention
m = /\.([^.]+)\Z/.match(@name)
@extention = m ? m[1] : ''
end
end
end
redefine DirNode#sub_nodes
require 'dumb_down_viewer/version'
require 'fileutils'
require 'find'
require 'nokogiri'
module DumbDownViewer
def self.collect_directories_and_files(path)
entries = Dir.entries(path) - ['.', '..']
entries.partition do |entry|
entry_path = File.expand_path(File.join(path, entry))
File.directory? entry_path
end
end
def self.build_node_tree(dir)
dirname, filename = File.split(dir)
DirNode.new(dirname, filename, 0).tap {|dir| dir.collect_entries }
end
class Node
attr_reader :sub_nodes, :directory, :name, :depth
def initialize(pwd, name, depth)
@directory = pwd
@name = name
@depth = depth
@name_with_path = pwd.empty? ? @name : File.join(pwd, name)
setup
end
def setup
end
def accept(visitor, memo)
visitor.visit(self, memo)
end
def to_s
@name
end
end
class DirNode < Node
attr_reader :directories, :files
def sub_nodes
@files + @directories
end
def collect_entries
dirs, files = DumbDownViewer.collect_directories_and_files(@name_with_path)
depth = @depth + 1
@directories = entry_nodes(dirs, DirNode, depth)
@directories.each {|dir| dir.collect_entries }
@files = entry_nodes(files, FileNode, depth)
end
def entry_nodes(nodes, node_class, depth)
nodes.map {|node| node_class.new(@name_with_path, node, depth) }
end
end
class FileNode < Node
attr_reader :extention
def setup
extract_extention
@sub_nodes = []
end
private
def extract_extention
m = /\.([^.]+)\Z/.match(@name)
@extention = m ? m[1] : ''
end
end
end
|
# require only 'concurrent/atom' once this issue is resolved:
# https://github.com/ruby-concurrency/concurrent-ruby/pull/377
require 'concurrent'
# encoding: utf-8
module Dynamoid
# Adapter's value-add:
# 1) For the rest of Dynamoid, the gateway to DynamoDB.
# 2) Allows switching `config.adapter` to ease development of a new adapter.
# 3) Caches the list of tables Dynamoid knows about.
class Adapter
def initialize
@adapter_ = Concurrent::Atom.new(nil)
@tables_ = Concurrent::Atom.new(nil)
end
def tables
if !@tables_.value
@tables_.swap{|value, args| benchmark('Cache Tables') {list_tables|| []}}
end
@tables_.value
end
# The actual adapter currently in use.
#
# @since 0.2.0
def adapter
if !@adapter_.value
adapter = self.class.adapter_plugin_class.new
adapter.connect! if adapter.respond_to?(:connect!)
@adapter_.compare_and_set(nil, adapter)
clear_cache!
end
@adapter_.value
end
def clear_cache!
@tables_.swap{|value, args| nil}
end
# Shows how long it takes a method to run on the adapter. Useful for generating logged output.
#
# @param [Symbol] method the name of the method to appear in the log
# @param [Array] args the arguments to the method to appear in the log
# @yield the actual code to benchmark
#
# @return the result of the yield
#
# @since 0.2.0
def benchmark(method, *args)
start = Time.now
result = yield
Dynamoid.logger.info "(#{((Time.now - start) * 1000.0).round(2)} ms) #{method.to_s.split('_').collect(&:upcase).join(' ')}#{ " - #{args.inspect}" unless args.nil? || args.empty? }"
return result
end
# Write an object to the adapter.
#
# @param [String] table the name of the table to write the object to
# @param [Object] object the object itself
# @param [Hash] options Options that are passed to the put_item call
#
# @return [Object] the persisted object
#
# @since 0.2.0
def write(table, object, options = nil)
put_item(table, object, options)
end
# Read one or many keys from the selected table.
# This method intelligently calls batch_get or get on the underlying adapter
# depending on whether ids is a range or a single key.
# If a range key is present, it will also interpolate that into the ids so
# that the batch get will acquire the correct record.
#
# @param [String] table the name of the table to write the object to
# @param [Array] ids to fetch, can also be a string of just one id
# @param [Hash] options: Passed to the underlying query. The :range_key option is required whenever the table has a range key,
# unless multiple ids are passed in.
#
# @since 0.2.0
def read(table, ids, options = {})
range_key = options.delete(:range_key)
if ids.respond_to?(:each)
ids = ids.collect{|id| range_key ? [id, range_key] : id}
batch_get_item({table => ids}, options)
else
options[:range_key] = range_key if range_key
get_item(table, ids, options)
end
end
# Delete an item from a table.
#
# @param [String] table the name of the table to write the object to
# @param [Array] ids to delete, can also be a string of just one id
# @param [Array] range_key of the record to delete, can also be a string of just one range_key
#
def delete(table, ids, options = {})
range_key = options[:range_key] #array of range keys that matches the ids passed in
if ids.respond_to?(:each)
if range_key.respond_to?(:each)
#turn ids into array of arrays each element being hash_key, range_key
ids = ids.each_with_index.map{|id,i| [id,range_key[i]]}
else
ids = range_key ? [[ids, range_key]] : ids
end
batch_delete_item(table => ids)
else
delete_item(table, ids, options)
end
end
# Scans a table. Generally quite slow; try to avoid using scan if at all possible.
#
# @param [String] table the name of the table to write the object to
# @param [Hash] scan_hash a hash of attributes: matching records will be returned by the scan
#
# @since 0.2.0
def scan(table, query, opts = {})
benchmark('Scan', table, query) {adapter.scan(table, query, opts)}
end
def create_table(table_name, key, options = {})
if !tables.include?(table_name)
benchmark('Create Table') { adapter.create_table(table_name, key, options) }
tables << table_name
end
end
# @since 0.2.0
def delete_table(table_name, options = {})
if tables.include?(table_name)
benchmark('Delete Table') { adapter.delete_table(table_name, options) }
idx = tables.index(table_name)
tables.delete_at(idx)
end
end
[:batch_get_item, :delete_item, :get_item, :list_tables, :put_item, :truncate, :batch_write_item, :batch_delete_item].each do |m|
# Method delegation with benchmark to the underlying adapter. Faster than relying on method_missing.
#
# @since 0.2.0
define_method(m) do |*args|
benchmark("#{m.to_s}", args) {adapter.send(m, *args)}
end
end
# Delegate all methods that aren't defind here to the underlying adapter.
#
# @since 0.2.0
def method_missing(method, *args, &block)
return benchmark(method, *args) {adapter.send(method, *args, &block)} if adapter.respond_to?(method)
super
end
# Query the DynamoDB table. This employs DynamoDB's indexes so is generally faster than scanning, but is
# only really useful for range queries, since it can only find by one hash key at once. Only provide
# one range key to the hash.
#
# @param [String] table_name the name of the table
# @param [Hash] opts the options to query the table with
# @option opts [String] :hash_value the value of the hash key to find
# @option opts [Range] :range_value find the range key within this range
# @option opts [Number] :range_greater_than find range keys greater than this
# @option opts [Number] :range_less_than find range keys less than this
# @option opts [Number] :range_gte find range keys greater than or equal to this
# @option opts [Number] :range_lte find range keys less than or equal to this
#
# @return [Array] an array of all matching items
#
def query(table_name, opts = {})
adapter.query(table_name, opts)
end
private
def self.adapter_plugin_class
unless Dynamoid.const_defined?(:AdapterPlugin) && Dynamoid::AdapterPlugin.const_defined?(Dynamoid::Config.adapter.camelcase)
require "dynamoid/adapter_plugin/#{Dynamoid::Config.adapter}"
end
Dynamoid::AdapterPlugin.const_get(Dynamoid::Config.adapter.camelcase)
end
end
end
revert spacing [ci skip]
# require only 'concurrent/atom' once this issue is resolved:
# https://github.com/ruby-concurrency/concurrent-ruby/pull/377
require 'concurrent'
# encoding: utf-8
module Dynamoid
# Adapter's value-add:
# 1) For the rest of Dynamoid, the gateway to DynamoDB.
# 2) Allows switching `config.adapter` to ease development of a new adapter.
# 3) Caches the list of tables Dynamoid knows about.
class Adapter
def initialize
@adapter_ = Concurrent::Atom.new(nil)
@tables_ = Concurrent::Atom.new(nil)
end
def tables
if !@tables_.value
@tables_.swap{|value, args| benchmark('Cache Tables') { list_tables || [] } }
end
@tables_.value
end
# The actual adapter currently in use.
#
# @since 0.2.0
def adapter
if !@adapter_.value
adapter = self.class.adapter_plugin_class.new
adapter.connect! if adapter.respond_to?(:connect!)
@adapter_.compare_and_set(nil, adapter)
clear_cache!
end
@adapter_.value
end
def clear_cache!
@tables_.swap{|value, args| nil}
end
# Shows how long it takes a method to run on the adapter. Useful for generating logged output.
#
# @param [Symbol] method the name of the method to appear in the log
# @param [Array] args the arguments to the method to appear in the log
# @yield the actual code to benchmark
#
# @return the result of the yield
#
# @since 0.2.0
def benchmark(method, *args)
start = Time.now
result = yield
Dynamoid.logger.info "(#{((Time.now - start) * 1000.0).round(2)} ms) #{method.to_s.split('_').collect(&:upcase).join(' ')}#{ " - #{args.inspect}" unless args.nil? || args.empty? }"
return result
end
# Write an object to the adapter.
#
# @param [String] table the name of the table to write the object to
# @param [Object] object the object itself
# @param [Hash] options Options that are passed to the put_item call
#
# @return [Object] the persisted object
#
# @since 0.2.0
def write(table, object, options = nil)
put_item(table, object, options)
end
# Read one or many keys from the selected table.
# This method intelligently calls batch_get or get on the underlying adapter
# depending on whether ids is a range or a single key.
# If a range key is present, it will also interpolate that into the ids so
# that the batch get will acquire the correct record.
#
# @param [String] table the name of the table to write the object to
# @param [Array] ids to fetch, can also be a string of just one id
# @param [Hash] options: Passed to the underlying query. The :range_key option is required whenever the table has a range key,
# unless multiple ids are passed in.
#
# @since 0.2.0
def read(table, ids, options = {})
range_key = options.delete(:range_key)
if ids.respond_to?(:each)
ids = ids.collect{|id| range_key ? [id, range_key] : id}
batch_get_item({table => ids}, options)
else
options[:range_key] = range_key if range_key
get_item(table, ids, options)
end
end
# Delete an item from a table.
#
# @param [String] table the name of the table to write the object to
# @param [Array] ids to delete, can also be a string of just one id
# @param [Array] range_key of the record to delete, can also be a string of just one range_key
#
def delete(table, ids, options = {})
range_key = options[:range_key] #array of range keys that matches the ids passed in
if ids.respond_to?(:each)
if range_key.respond_to?(:each)
#turn ids into array of arrays each element being hash_key, range_key
ids = ids.each_with_index.map{|id,i| [id,range_key[i]]}
else
ids = range_key ? [[ids, range_key]] : ids
end
batch_delete_item(table => ids)
else
delete_item(table, ids, options)
end
end
# Scans a table. Generally quite slow; try to avoid using scan if at all possible.
#
# @param [String] table the name of the table to write the object to
# @param [Hash] scan_hash a hash of attributes: matching records will be returned by the scan
#
# @since 0.2.0
def scan(table, query, opts = {})
benchmark('Scan', table, query) {adapter.scan(table, query, opts)}
end
def create_table(table_name, key, options = {})
if !tables.include?(table_name)
benchmark('Create Table') { adapter.create_table(table_name, key, options) }
tables << table_name
end
end
# @since 0.2.0
def delete_table(table_name, options = {})
if tables.include?(table_name)
benchmark('Delete Table') { adapter.delete_table(table_name, options) }
idx = tables.index(table_name)
tables.delete_at(idx)
end
end
[:batch_get_item, :delete_item, :get_item, :list_tables, :put_item, :truncate, :batch_write_item, :batch_delete_item].each do |m|
# Method delegation with benchmark to the underlying adapter. Faster than relying on method_missing.
#
# @since 0.2.0
define_method(m) do |*args|
benchmark("#{m.to_s}", args) {adapter.send(m, *args)}
end
end
# Delegate all methods that aren't defind here to the underlying adapter.
#
# @since 0.2.0
def method_missing(method, *args, &block)
return benchmark(method, *args) {adapter.send(method, *args, &block)} if adapter.respond_to?(method)
super
end
# Query the DynamoDB table. This employs DynamoDB's indexes so is generally faster than scanning, but is
# only really useful for range queries, since it can only find by one hash key at once. Only provide
# one range key to the hash.
#
# @param [String] table_name the name of the table
# @param [Hash] opts the options to query the table with
# @option opts [String] :hash_value the value of the hash key to find
# @option opts [Range] :range_value find the range key within this range
# @option opts [Number] :range_greater_than find range keys greater than this
# @option opts [Number] :range_less_than find range keys less than this
# @option opts [Number] :range_gte find range keys greater than or equal to this
# @option opts [Number] :range_lte find range keys less than or equal to this
#
# @return [Array] an array of all matching items
#
def query(table_name, opts = {})
adapter.query(table_name, opts)
end
private
def self.adapter_plugin_class
unless Dynamoid.const_defined?(:AdapterPlugin) && Dynamoid::AdapterPlugin.const_defined?(Dynamoid::Config.adapter.camelcase)
require "dynamoid/adapter_plugin/#{Dynamoid::Config.adapter}"
end
Dynamoid::AdapterPlugin.const_get(Dynamoid::Config.adapter.camelcase)
end
end
end
|
class EApp
module Setup
# set the baseurl for assets.
# by default, assets URL is empty.
#
# @example assets_url not set
# script_tag 'master.js'
# => <script src="master.js"
# style_tag 'theme.css'
# => <link href="theme.css"
#
# @example assets_url set to /assets
#
# script_tag 'master.js'
# => <script src="/assets/master.js"
# style_tag 'theme.css'
# => <link href="/assets/theme.css"
#
# @note
# if second argument given, Espresso will reserve given URL for serving assets,
# so make sure it does not interfere with your actions.
#
# @example
#
# class App < E
# map '/'
#
# # actions inside controller are not affected
# # cause app is not set to serve assets, thus no URLs are reserved.
#
# end
#
# app = EApp.new do
# assets_url '/'
# mount App
# end
# app.run
#
#
# @example
#
# class App < E
# map '/'
#
# # no action here will work cause "/" URL is reserved for assets
#
# end
#
# app = EApp.new do
# assets_url '/', :serve
# mount App
# end
# app.run
#
# @example
#
# class App < E
# map '/'
#
# def assets
# # this action wont work cause "/assets" URL is reserved for assets
# end
#
# # other actions will work normally
#
# end
#
# app = EApp.new do
# assets_url '/assets', :serve
# mount App
# end
# app.run
#
def assets_url url = nil, serve = nil
if url
assets_url = rootify_url(url)
@assets_url = (assets_url =~ /\/\Z/ ? assets_url : assets_url << '/').freeze
@assets_server = true if serve
end
@assets_url ||= ''
end
alias assets_map assets_url
def assets_server?
@assets_server
end
# used when app is serving assets.
# by default, Espresso will serve files found under public/ folder inside app root.
# use `assets_path` at class level to set custom path.
#
# @note `assets_path` is used to set paths relative to app root.
# to set absolute path to assets, use `assets_fullpath` instead.
#
def assets_path path = nil
@assets_path = root + normalize_path(path).freeze if path
@assets_path ||= '' << root << 'public/'.freeze
end
def assets_fullpath path = nil
@assets_fullpath = normalize_path(path).freeze if path
@assets_fullpath
end
end
class AssetsHelper
def initialize app
@app = app
@wd = ''.freeze
end
def path
(fullpath = @app.assets_fullpath) ? fullpath : @app.assets_path
end
def url path = nil
'' << @app.assets_url << @wd << (path||'')
end
# please note that all controllers share same assets instance,
# so if you do `assets.chdir` in one controller
# the change will be reflected in all controllers
#
# @example Slim engine
#
# - assets.chdir 'vendor'
# == script_tag 'jquery.js'
#
# - assets.chdir :bootstrap
# == script_tag 'js/bootstrap.min.js'
# == style_tag 'css/bootstrap.min.css'
# == style_tag 'css/bootstrap-responsive.min.css'
#
# - assets.chdir '../google-code-prettify'
# == script_tag 'prettify.js'
# == style_tag 'tomorrow-night-eighties.css'
#
# - assets.chdir '../noty'
# == script_tag 'jquery.noty.js'
# == script_tag 'layouts/top.js'
# == script_tag 'layouts/topRight.js'
# == script_tag 'themes/default.js'
#
# - assets.chdir '/'
# == script_tag 'master.js'
# == script_tag 'e-crudify-bootstrap.js'
# == style_tag 'master.css'
#
# will result in:
#
# <script src="/vendor/jquery.js" type="text/javascript"></script>
#
# <script src="/vendor/bootstrap/js/bootstrap.min.js" type="text/javascript"></script>
# <link href="/vendor/bootstrap/css/bootstrap.min.css" rel="stylesheet" />
# <link href="/vendor/bootstrap/css/bootstrap-responsive.min.css" rel="stylesheet" />
#
# <script src="/vendor/google-code-prettify/prettify.js" type="text/javascript"></script>
# <link href="/vendor/google-code-prettify/tomorrow-night-eighties.css" rel="stylesheet" />
#
# <script src="/vendor/noty/jquery.noty.js" type="text/javascript"></script>
# <script src="/vendor/noty/layouts/top.js" type="text/javascript"></script>
# <script src="/vendor/noty/layouts/topRight.js" type="text/javascript"></script>
# <script src="/vendor/noty/themes/default.js" type="text/javascript"></script>
#
# <script src="/master.js" type="text/javascript"></script>
# <script src="/e-crudify-bootstrap.js" type="text/javascript"></script>
# <link href="/master.css" rel="stylesheet" />
#
def chdir path
path = path.to_s
return @wd = '' if path == '/'
wd, path = @wd.split('/'), path.split('/')
path.each do |c|
c.empty? && next
c == '..' && wd.pop && next
wd << c
end
@wd = (wd.size > 0 ? wd.reject { |c| c.empty? }.join('/') << '/' : '').freeze
end
alias cd chdir
def opts_to_s opts
(@assets_opts ||= {})[opts] = opts.keys.inject([]) do |f, k|
f << '%s="%s"' % [k, ::CGI.escapeHTML(opts[k])]
end.join(' ')
end
end
def assets
@assets_helper ||= AssetsHelper.new(self)
end
end
class E
def assets
self.class.app.assets
end
def image_tag src = nil, opts = {}
src.is_a?(Hash) && (opts = src.dup) && (src = nil)
opted_src = opts.delete(:src)
src ||= opted_src || raise('Please provide image URL as first argument or :src option')
opts[:alt] ||= ::File.basename(src, ::File.extname(src))
"<img src=\"%s\" %s />\n" % [
opted_src ? opted_src : assets.url(src),
assets.opts_to_s(opts)
]
end
alias img_tag image_tag
def script_tag src = nil, opts = {}, &proc
src.is_a?(Hash) && (opts = src.dup) && (src = nil)
opts[:type] ||= 'text/javascript'
if proc
"<script %s>\n%s\n</script>\n" % [assets.opts_to_s(opts), proc.call]
else
opted_src = opts.delete(:src)
src ||= opted_src || raise('Please provide script URL as first argument or via :src option')
"<script src=\"%s\" %s></script>\n" % [
opted_src ? opted_src : assets.url(src),
assets.opts_to_s(opts)
]
end
end
def style_tag src = nil, opts = {}, &proc
src.is_a?(Hash) && (opts = src.dup) && (src = nil)
if proc
opts[:type] ||= 'text/css'
"<style %s>\n%s\n</style>\n" % [assets.opts_to_s(opts), proc.call]
else
opts[:rel] = 'stylesheet'
opted_src = opts.delete(:href) || opts.delete(:src)
src ||= opted_src || raise('Please URL as first argument or :href option')
"<link href=\"%s\" %s />\n" % [
opted_src ? opted_src : assets.url(src),
assets.opts_to_s(opts)
]
end
end
end
assets - do not rootify URLs starting with a protocol
class EApp
module Setup
# set the baseurl for assets.
# by default, assets URL is empty.
#
# @example assets_url not set
# script_tag 'master.js'
# => <script src="master.js"
# style_tag 'theme.css'
# => <link href="theme.css"
#
# @example assets_url set to /assets
#
# script_tag 'master.js'
# => <script src="/assets/master.js"
# style_tag 'theme.css'
# => <link href="/assets/theme.css"
#
# @note
# if second argument given, Espresso will reserve given URL for serving assets,
# so make sure it does not interfere with your actions.
#
# @example
#
# class App < E
# map '/'
#
# # actions inside controller are not affected
# # cause app is not set to serve assets, thus no URLs are reserved.
#
# end
#
# app = EApp.new do
# assets_url '/'
# mount App
# end
# app.run
#
#
# @example
#
# class App < E
# map '/'
#
# # no action here will work cause "/" URL is reserved for assets
#
# end
#
# app = EApp.new do
# assets_url '/', :serve
# mount App
# end
# app.run
#
# @example
#
# class App < E
# map '/'
#
# def assets
# # this action wont work cause "/assets" URL is reserved for assets
# end
#
# # other actions will work normally
#
# end
#
# app = EApp.new do
# assets_url '/assets', :serve
# mount App
# end
# app.run
#
def assets_url url = nil, serve = nil
if url
assets_url = url =~ /\A[\w|\d]+\:\/\// ? url : rootify_url(url)
@assets_url = (assets_url =~ /\/\Z/ ? assets_url : assets_url << '/').freeze
@assets_server = true if serve
end
@assets_url ||= ''
end
alias assets_map assets_url
def assets_server?
@assets_server
end
# used when app is serving assets.
# by default, Espresso will serve files found under public/ folder inside app root.
# use `assets_path` at class level to set custom path.
#
# @note `assets_path` is used to set paths relative to app root.
# to set absolute path to assets, use `assets_fullpath` instead.
#
def assets_path path = nil
@assets_path = root + normalize_path(path).freeze if path
@assets_path ||= '' << root << 'public/'.freeze
end
def assets_fullpath path = nil
@assets_fullpath = normalize_path(path).freeze if path
@assets_fullpath
end
end
class AssetsHelper
def initialize app
@app = app
@wd = ''.freeze
end
def path
(fullpath = @app.assets_fullpath) ? fullpath : @app.assets_path
end
def url path = nil
'' << @app.assets_url << @wd << (path||'')
end
# please note that all controllers share same assets instance,
# so if you do `assets.chdir` in one controller
# the change will be reflected in all controllers
#
# @example Slim engine
#
# - assets.chdir 'vendor'
# == script_tag 'jquery.js'
#
# - assets.chdir :bootstrap
# == script_tag 'js/bootstrap.min.js'
# == style_tag 'css/bootstrap.min.css'
# == style_tag 'css/bootstrap-responsive.min.css'
#
# - assets.chdir '../google-code-prettify'
# == script_tag 'prettify.js'
# == style_tag 'tomorrow-night-eighties.css'
#
# - assets.chdir '../noty'
# == script_tag 'jquery.noty.js'
# == script_tag 'layouts/top.js'
# == script_tag 'layouts/topRight.js'
# == script_tag 'themes/default.js'
#
# - assets.chdir '/'
# == script_tag 'master.js'
# == script_tag 'e-crudify-bootstrap.js'
# == style_tag 'master.css'
#
# will result in:
#
# <script src="/vendor/jquery.js" type="text/javascript"></script>
#
# <script src="/vendor/bootstrap/js/bootstrap.min.js" type="text/javascript"></script>
# <link href="/vendor/bootstrap/css/bootstrap.min.css" rel="stylesheet" />
# <link href="/vendor/bootstrap/css/bootstrap-responsive.min.css" rel="stylesheet" />
#
# <script src="/vendor/google-code-prettify/prettify.js" type="text/javascript"></script>
# <link href="/vendor/google-code-prettify/tomorrow-night-eighties.css" rel="stylesheet" />
#
# <script src="/vendor/noty/jquery.noty.js" type="text/javascript"></script>
# <script src="/vendor/noty/layouts/top.js" type="text/javascript"></script>
# <script src="/vendor/noty/layouts/topRight.js" type="text/javascript"></script>
# <script src="/vendor/noty/themes/default.js" type="text/javascript"></script>
#
# <script src="/master.js" type="text/javascript"></script>
# <script src="/e-crudify-bootstrap.js" type="text/javascript"></script>
# <link href="/master.css" rel="stylesheet" />
#
def chdir path
path = path.to_s
return @wd = '' if path == '/'
wd, path = @wd.split('/'), path.split('/')
path.each do |c|
c.empty? && next
c == '..' && wd.pop && next
wd << c
end
@wd = (wd.size > 0 ? wd.reject { |c| c.empty? }.join('/') << '/' : '').freeze
end
alias cd chdir
def opts_to_s opts
(@assets_opts ||= {})[opts] = opts.keys.inject([]) do |f, k|
f << '%s="%s"' % [k, ::CGI.escapeHTML(opts[k])]
end.join(' ')
end
end
def assets
@assets_helper ||= AssetsHelper.new(self)
end
end
class E
def assets
self.class.app.assets
end
def image_tag src = nil, opts = {}
src.is_a?(Hash) && (opts = src.dup) && (src = nil)
opted_src = opts.delete(:src)
src ||= opted_src || raise('Please provide image URL as first argument or :src option')
opts[:alt] ||= ::File.basename(src, ::File.extname(src))
"<img src=\"%s\" %s />\n" % [
opted_src ? opted_src : assets.url(src),
assets.opts_to_s(opts)
]
end
alias img_tag image_tag
def script_tag src = nil, opts = {}, &proc
src.is_a?(Hash) && (opts = src.dup) && (src = nil)
opts[:type] ||= 'text/javascript'
if proc
"<script %s>\n%s\n</script>\n" % [assets.opts_to_s(opts), proc.call]
else
opted_src = opts.delete(:src)
src ||= opted_src || raise('Please provide script URL as first argument or via :src option')
"<script src=\"%s\" %s></script>\n" % [
opted_src ? opted_src : assets.url(src),
assets.opts_to_s(opts)
]
end
end
def style_tag src = nil, opts = {}, &proc
src.is_a?(Hash) && (opts = src.dup) && (src = nil)
if proc
opts[:type] ||= 'text/css'
"<style %s>\n%s\n</style>\n" % [assets.opts_to_s(opts), proc.call]
else
opts[:rel] = 'stylesheet'
opted_src = opts.delete(:href) || opts.delete(:src)
src ||= opted_src || raise('Please URL as first argument or :href option')
"<link href=\"%s\" %s />\n" % [
opted_src ? opted_src : assets.url(src),
assets.opts_to_s(opts)
]
end
end
end
|
module Ecm
module Core
VERSION = '0.0.1'
end
end
Bumped version to 0.0.2
module Ecm
module Core
VERSION = '0.0.2'
end
end
|
require 'sequel'
module ETL::Input
# Input class that uses Sequel connection for accessing data. Currently it
# just supports raw SQL with query param replacement.
# https://github.com/jeremyevans/sequel
class Sequel < Base
attr_accessor :params, :sql, :sql_params, :test_connection
# Construct reader based on Sequel connection params and SQL query
def initialize(params, sql = nil, sql_params = nil)
super()
@params = params
@sql = sql
@sql_params = sql_params
@conn = nil
@test_connection = false
end
def conn
@conn ||= ::Sequel.connect(@params)
@conn.test_connection if @test_connection
end
# Display connection string for this input
# TODO: Add table name to this - easier if we're given a Sequel dataset
def name
"Sequel #{@params[:adapter]}:#{@params[:user]}@#{@params[:host]}/#{@params[:database]}"
end
def query_debug_str
str = "Executing Sequel query #{@sql}"
unless @sql_params.nil? || @sql_params.empty?
if @sql_params.respond_to?(:join)
param_str = @sql_params.join(", ")
elsif
param_str = @sql_params.to_s
end
str += " with params #{param_str}"
else
str += " with no params"
end
str
end
# Reads each row from the query and passes it to the specified block.
def each_row(batch = ETL::Batch.new)
log.debug(query_debug_str)
# block used to process each row
row_proc = Proc.new do |row_in|
row = {}
# Sequel returns columns as symbols so we need to translate to strings
row_in.each do |k, v|
row[k.to_s] = v
end
transform_row!(row)
yield row
@rows_processed += 1
end
@rows_processed = 0
# need to splat differently depending on params type
if @sql_params.is_a?(Hash)
conn.fetch(sql, **@sql_params, &row_proc)
else
conn.fetch(sql, *@sql_params, &row_proc)
end
end
end
end
Fix sequel input
require 'sequel'
module ETL::Input
# Input class that uses Sequel connection for accessing data. Currently it
# just supports raw SQL with query param replacement.
# https://github.com/jeremyevans/sequel
class Sequel < Base
attr_accessor :params, :sql, :sql_params, :test_connection
# Construct reader based on Sequel connection params and SQL query
def initialize(params, sql = nil, sql_params = nil)
super()
@params = params
@sql = sql
@sql_params = sql_params
@conn = nil
@test_connection = false
end
def conn
@conn ||= ::Sequel.connect(@params)
@conn.test_connection if @test_connection
@conn
end
# Display connection string for this input
# TODO: Add table name to this - easier if we're given a Sequel dataset
def name
"Sequel #{@params[:adapter]}:#{@params[:user]}@#{@params[:host]}/#{@params[:database]}"
end
def query_debug_str
str = "Executing Sequel query #{@sql}"
unless @sql_params.nil? || @sql_params.empty?
if @sql_params.respond_to?(:join)
param_str = @sql_params.join(", ")
elsif
param_str = @sql_params.to_s
end
str += " with params #{param_str}"
else
str += " with no params"
end
str
end
# Reads each row from the query and passes it to the specified block.
def each_row(batch = ETL::Batch.new)
log.debug(query_debug_str)
# block used to process each row
row_proc = Proc.new do |row_in|
row = {}
# Sequel returns columns as symbols so we need to translate to strings
row_in.each do |k, v|
row[k.to_s] = v
end
transform_row!(row)
yield row
@rows_processed += 1
end
@rows_processed = 0
# need to splat differently depending on params type
if @sql_params.is_a?(Hash)
conn.fetch(sql, **@sql_params, &row_proc)
else
conn.fetch(sql, *@sql_params, &row_proc)
end
end
end
end
|
require 'forwardable'
module Everything
class Piece
extend Forwardable
attr_reader :full_path
# TODO: Add the following methods:
# - dir (Relative to Everything.path)
# - path (Relative to Everything.path)
# - absolute_dir
# - absolute_path
def initialize(full_path)
@full_path = full_path
end
def absolute_dir
@absolute_dir ||= File.join(Everything.path, dir)
end
def absolute_path
@absolute_path ||= File.join(absolute_dir, file_name)
end
def content
@content ||= Content.new(full_path)
end
def dir
@dir ||= calculated_dir
end
def_delegators :content, :body, :file_name, :raw_markdown, :raw_markdown=, :title
def metadata
@metadata ||= Metadata.new(full_path)
end
def public?
metadata['public']
end
def_delegators :metadata, :raw_yaml, :raw_yaml=
def name
@name ||= File.basename(full_path)
end
def path
@path ||= File.join(dir, content.file_name)
end
def save
content.save
metadata.save
end
private
def calculated_dir
everything_pathname = Pathname.new(Everything.path)
full_pathname = Pathname.new(full_path)
relative_pathname = full_pathname.relative_path_from(everything_pathname)
relative_pathname.to_s
end
end
end
require 'everything/piece/content'
require 'everything/piece/metadata'
Remove TODOs that are done
require 'forwardable'
module Everything
class Piece
extend Forwardable
attr_reader :full_path
def initialize(full_path)
@full_path = full_path
end
def absolute_dir
@absolute_dir ||= File.join(Everything.path, dir)
end
def absolute_path
@absolute_path ||= File.join(absolute_dir, file_name)
end
def content
@content ||= Content.new(full_path)
end
def dir
@dir ||= calculated_dir
end
def_delegators :content, :body, :file_name, :raw_markdown, :raw_markdown=, :title
def metadata
@metadata ||= Metadata.new(full_path)
end
def public?
metadata['public']
end
def_delegators :metadata, :raw_yaml, :raw_yaml=
def name
@name ||= File.basename(full_path)
end
def path
@path ||= File.join(dir, content.file_name)
end
def save
content.save
metadata.save
end
private
def calculated_dir
everything_pathname = Pathname.new(Everything.path)
full_pathname = Pathname.new(full_path)
relative_pathname = full_pathname.relative_path_from(everything_pathname)
relative_pathname.to_s
end
end
end
require 'everything/piece/content'
require 'everything/piece/metadata'
|
module Excon
class Connection
attr_reader :connection, :proxy
# Initializes a new Connection instance
# @param [String] url The destination URL
# @param [Hash<Symbol, >] params One or more optional params
# @option params [String] :body Default text to be sent over a socket. Only used if :body absent in Connection#request params
# @option params [Hash<Symbol, String>] :headers The default headers to supply in a request. Only used if params[:headers] is not supplied to Connection#request
# @option params [String] :host The destination host's reachable DNS name or IP, in the form of a String
# @option params [String] :path Default path; appears after 'scheme://host:port/'. Only used if params[:path] is not supplied to Connection#request
# @option params [Fixnum] :port The port on which to connect, to the destination host
# @option params [Hash] :query Default query; appended to the 'scheme://host:port/path/' in the form of '?key=value'. Will only be used if params[:query] is not supplied to Connection#request
# @option params [String] :scheme The protocol; 'https' causes OpenSSL to be used
# @option params [String] :proxy Proxy server; e.g. 'http://myproxy.com:8888'
# @option params [Fixnum] :retry_limit Set how many times we'll retry a failed request. (Default 4)
# @option params [Class] :instrumentor Responds to #instrument as in ActiveSupport::Notifications
# @option params [String] :instrumentor_name Name prefix for #instrument events. Defaults to 'excon'
def initialize(url, params = {})
uri = URI.parse(url)
@connection = Excon.defaults.merge({
:host => uri.host,
:host_port => '' << uri.host << ':' << uri.port.to_s,
:path => uri.path,
:port => uri.port.to_s,
:query => uri.query,
:scheme => uri.scheme,
}).merge!(params)
# merge does not deep-dup, so make sure headers is not the original
@connection[:headers] = @connection[:headers].dup
@proxy = nil
if @connection[:scheme] == HTTPS && (ENV.has_key?('https_proxy') || ENV.has_key?('HTTPS_PROXY'))
@proxy = setup_proxy(ENV['https_proxy'] || ENV['HTTPS_PROXY'])
elsif (ENV.has_key?('http_proxy') || ENV.has_key?('HTTP_PROXY'))
@proxy = setup_proxy(ENV['http_proxy'] || ENV['HTTP_PROXY'])
elsif @connection.has_key?(:proxy)
@proxy = setup_proxy(@connection[:proxy])
end
if @proxy
@connection[:headers]['Proxy-Connection'] ||= 'Keep-Alive'
# https credentials happen in handshake
if @connection[:scheme] == 'http' && (@proxy[:user] || @proxy[:password])
auth = ['' << @proxy[:user].to_s << ':' << @proxy[:password].to_s].pack('m').delete(Excon::CR_NL)
@connection[:headers]['Proxy-Authorization'] = 'Basic ' << auth
end
end
if ENV.has_key?('EXCON_DEBUG') || ENV.has_key?('EXCON_STANDARD_INSTRUMENTOR')
@connection[:instrumentor] = Excon::StandardInstrumentor
end
# Use Basic Auth if url contains a login
if uri.user || uri.password
@connection[:headers]['Authorization'] ||= 'Basic ' << ['' << uri.user.to_s << ':' << uri.password.to_s].pack('m').delete(Excon::CR_NL)
end
@socket_key = '' << @connection[:host_port]
reset
end
# Sends the supplied request to the destination host.
# @yield [chunk] @see Response#self.parse
# @param [Hash<Symbol, >] params One or more optional params, override defaults set in Connection.new
# @option params [String] :body text to be sent over a socket
# @option params [Hash<Symbol, String>] :headers The default headers to supply in a request
# @option params [String] :host The destination host's reachable DNS name or IP, in the form of a String
# @option params [String] :path appears after 'scheme://host:port/'
# @option params [Fixnum] :port The port on which to connect, to the destination host
# @option params [Hash] :query appended to the 'scheme://host:port/path/' in the form of '?key=value'
# @option params [String] :scheme The protocol; 'https' causes OpenSSL to be used
def request(params, &block)
# connection has defaults, merge in new params to override
params = @connection.merge(params)
params[:host_port] = '' << params[:host] << ':' << params[:port].to_s
params[:headers] = @connection[:headers].merge(params[:headers] || {})
params[:headers]['Host'] ||= '' << params[:host_port]
# if path is empty or doesn't start with '/', insert one
unless params[:path][0, 1] == '/'
params[:path].insert(0, '/')
end
if block_given?
$stderr.puts("Excon requests with a block are deprecated, pass :response_block instead (#{caller.first})")
params[:response_block] = Proc.new
end
if params.has_key?(:instrumentor)
if (retries_remaining ||= params[:retry_limit]) < params[:retry_limit]
event_name = "#{params[:instrumentor_name]}.retry"
else
event_name = "#{params[:instrumentor_name]}.request"
end
response = params[:instrumentor].instrument(event_name, params) do
request_kernel(params)
end
params[:instrumentor].instrument("#{params[:instrumentor_name]}.response", response.attributes)
response
else
request_kernel(params)
end
rescue => request_error
if params[:idempotent] && [Excon::Errors::Timeout, Excon::Errors::SocketError,
Excon::Errors::HTTPStatusError].any? {|ex| request_error.kind_of? ex }
retries_remaining ||= params[:retry_limit]
retries_remaining -= 1
if retries_remaining > 0
retry
else
if params.has_key?(:instrumentor)
params[:instrumentor].instrument("#{params[:instrumentor_name]}.error", :error => request_error)
end
raise(request_error)
end
else
if params.has_key?(:instrumentor)
params[:instrumentor].instrument("#{params[:instrumentor_name]}.error", :error => request_error)
end
raise(request_error)
end
end
def reset
(old_socket = sockets.delete(@socket_key)) && old_socket.close
end
# Generate HTTP request verb methods
Excon::HTTP_VERBS.each do |method|
class_eval <<-DEF, __FILE__, __LINE__ + 1
def #{method}(params={}, &block)
request(params.merge!(:method => :#{method}), &block)
end
DEF
end
def retry_limit=(new_retry_limit)
$stderr.puts("Excon::Connection#retry_limit= is deprecated, pass :retry_limit to the initializer (#{caller.first})")
@connection[:retry_limit] = new_retry_limit
end
def retry_limit
$stderr.puts("Excon::Connection#retry_limit is deprecated, pass :retry_limit to the initializer (#{caller.first})")
@connection[:retry_limit] ||= DEFAULT_RETRY_LIMIT
end
def inspect
vars = instance_variables.inject({}) do |accum, var|
accum.merge!(var.to_sym => instance_variable_get(var))
end
if vars[:'@connection'][:headers].has_key?('Authorization')
vars[:'@connection'] = vars[:'@connection'].dup
vars[:'@connection'][:headers] = vars[:'@connection'][:headers].dup
vars[:'@connection'][:headers]['Authorization'] = REDACTED
end
inspection = '#<Excon::Connection:'
inspection << (object_id << 1).to_s(16)
vars.each do |key, value|
inspection << ' ' << key.to_s << '=' << value.inspect
end
inspection << '>'
inspection
end
private
def detect_content_length(body)
if body.is_a?(String)
if FORCE_ENC
body.force_encoding('BINARY')
end
body.length
elsif body.respond_to?(:size)
# IO object: File, Tempfile, etc.
body.size
else
begin
File.size(body) # for 1.8.7 where file does not have size
rescue
0
end
end
end
def request_kernel(params)
begin
response = if params[:mock]
invoke_stub(params)
else
socket.params = params
# start with "METHOD /path"
request = params[:method].to_s.upcase << ' '
if @proxy
request << params[:scheme] << '://' << params[:host_port]
end
request << params[:path]
# add query to path, if there is one
case params[:query]
when String
request << '?' << params[:query]
when Hash
request << '?'
params[:query].each do |key, values|
if values.nil?
request << key.to_s << '&'
else
[*values].each do |value|
request << key.to_s << '=' << CGI.escape(value.to_s) << '&'
end
end
end
request.chop! # remove trailing '&'
end
# finish first line with "HTTP/1.1\r\n"
request << HTTP_1_1
if params.has_key?(:request_block)
params[:headers]['Transfer-Encoding'] = 'chunked'
elsif ! (params[:method].to_s.casecmp('GET') == 0 && params[:body].nil?)
# The HTTP spec isn't clear on it, but specifically, GET requests don't usually send bodies;
# if they don't, sending Content-Length:0 can cause issues.
params[:headers]['Content-Length'] = detect_content_length(params[:body])
end
# add headers to request
params[:headers].each do |key, values|
[*values].each do |value|
request << key.to_s << ': ' << value.to_s << CR_NL
end
end
# add additional "\r\n" to indicate end of headers
request << CR_NL
# write out the request, sans body
socket.write(request)
# write out the body
if params.has_key?(:request_block)
while true
chunk = params[:request_block].call
if FORCE_ENC
chunk.force_encoding('BINARY')
end
if chunk.length > 0
socket.write(chunk.length.to_s(16) << CR_NL << chunk << CR_NL)
else
socket.write('0' << CR_NL << CR_NL)
break
end
end
elsif !params[:body].nil?
if params[:body].is_a?(String)
unless params[:body].empty?
socket.write(params[:body])
end
else
if params[:body].respond_to?(:binmode)
params[:body].binmode
end
if params[:body].respond_to?(:pos=)
params[:body].pos = 0
end
while chunk = params[:body].read(params[:chunk_size])
socket.write(chunk)
end
end
end
# read the response
response = Excon::Response.parse(socket, params)
if response.headers['Connection'] == 'close'
reset
end
response
end
rescue Excon::Errors::StubNotFound, Excon::Errors::Timeout => error
raise(error)
rescue => socket_error
reset
raise(Excon::Errors::SocketError.new(socket_error))
end
if params.has_key?(:expects) && ![*params[:expects]].include?(response.status)
reset
raise(Excon::Errors.status_error(params, response))
else
response
end
end
def invoke_stub(params)
# convert File/Tempfile body to string before matching:
unless params[:body].nil? || params[:body].is_a?(String)
if params[:body].respond_to?(:binmode)
params[:body].binmode
end
if params[:body].respond_to?(:rewind)
params[:body].rewind
end
params[:body] = params[:body].read
end
params[:captures] = {:headers => {}} # setup data to hold captures
Excon.stubs.each do |stub, response|
headers_match = !stub.has_key?(:headers) || stub[:headers].keys.all? do |key|
case value = stub[:headers][key]
when Regexp
if match = value.match(params[:headers][key])
params[:captures][:headers][key] = match.captures
end
match
else
value == params[:headers][key]
end
end
non_headers_match = (stub.keys - [:headers]).all? do |key|
case value = stub[key]
when Regexp
if match = value.match(params[key])
params[:captures][key] = match.captures
end
match
else
value == params[key]
end
end
if headers_match && non_headers_match
response_attributes = case response
when Proc
response.call(params)
else
response
end
if params[:expects] && ![*params[:expects]].include?(response_attributes[:status])
# don't pass stuff into a block if there was an error
elsif params.has_key?(:response_block) && response_attributes.has_key?(:body)
body = response_attributes.delete(:body)
content_length = remaining = body.bytesize
i = 0
while i < body.length
params[:response_block].call(body[i, params[:chunk_size]], [remaining - params[:chunk_size], 0].max, content_length)
remaining -= params[:chunk_size]
i += params[:chunk_size]
end
end
return Excon::Response.new(response_attributes)
end
end
# if we reach here no stubs matched
raise(Excon::Errors::StubNotFound.new('no stubs matched ' << params.inspect))
end
def socket
sockets[@socket_key] ||= if @connection[:scheme] == HTTPS
Excon::SSLSocket.new(@connection, @proxy)
else
Excon::Socket.new(@connection, @proxy)
end
end
def sockets
Thread.current[:_excon_sockets] ||= {}
end
def setup_proxy(proxy)
uri = URI.parse(proxy)
unless uri.host and uri.port and uri.scheme
raise Excon::Errors::ProxyParseError, "Proxy is invalid"
end
{
:host => uri.host,
:host_port => '' << uri.host << ':' << uri.port.to_s,
:password => uri.password,
:port => uri.port,
:scheme => uri.scheme,
:user => uri.user
}
end
end
end
headers/query [*values].each should be consistent between 1.8.7 and 1.9.3
Fixes #174
module Excon
class Connection
attr_reader :connection, :proxy
# Initializes a new Connection instance
# @param [String] url The destination URL
# @param [Hash<Symbol, >] params One or more optional params
# @option params [String] :body Default text to be sent over a socket. Only used if :body absent in Connection#request params
# @option params [Hash<Symbol, String>] :headers The default headers to supply in a request. Only used if params[:headers] is not supplied to Connection#request
# @option params [String] :host The destination host's reachable DNS name or IP, in the form of a String
# @option params [String] :path Default path; appears after 'scheme://host:port/'. Only used if params[:path] is not supplied to Connection#request
# @option params [Fixnum] :port The port on which to connect, to the destination host
# @option params [Hash] :query Default query; appended to the 'scheme://host:port/path/' in the form of '?key=value'. Will only be used if params[:query] is not supplied to Connection#request
# @option params [String] :scheme The protocol; 'https' causes OpenSSL to be used
# @option params [String] :proxy Proxy server; e.g. 'http://myproxy.com:8888'
# @option params [Fixnum] :retry_limit Set how many times we'll retry a failed request. (Default 4)
# @option params [Class] :instrumentor Responds to #instrument as in ActiveSupport::Notifications
# @option params [String] :instrumentor_name Name prefix for #instrument events. Defaults to 'excon'
def initialize(url, params = {})
uri = URI.parse(url)
@connection = Excon.defaults.merge({
:host => uri.host,
:host_port => '' << uri.host << ':' << uri.port.to_s,
:path => uri.path,
:port => uri.port.to_s,
:query => uri.query,
:scheme => uri.scheme,
}).merge!(params)
# merge does not deep-dup, so make sure headers is not the original
@connection[:headers] = @connection[:headers].dup
@proxy = nil
if @connection[:scheme] == HTTPS && (ENV.has_key?('https_proxy') || ENV.has_key?('HTTPS_PROXY'))
@proxy = setup_proxy(ENV['https_proxy'] || ENV['HTTPS_PROXY'])
elsif (ENV.has_key?('http_proxy') || ENV.has_key?('HTTP_PROXY'))
@proxy = setup_proxy(ENV['http_proxy'] || ENV['HTTP_PROXY'])
elsif @connection.has_key?(:proxy)
@proxy = setup_proxy(@connection[:proxy])
end
if @proxy
@connection[:headers]['Proxy-Connection'] ||= 'Keep-Alive'
# https credentials happen in handshake
if @connection[:scheme] == 'http' && (@proxy[:user] || @proxy[:password])
auth = ['' << @proxy[:user].to_s << ':' << @proxy[:password].to_s].pack('m').delete(Excon::CR_NL)
@connection[:headers]['Proxy-Authorization'] = 'Basic ' << auth
end
end
if ENV.has_key?('EXCON_DEBUG') || ENV.has_key?('EXCON_STANDARD_INSTRUMENTOR')
@connection[:instrumentor] = Excon::StandardInstrumentor
end
# Use Basic Auth if url contains a login
if uri.user || uri.password
@connection[:headers]['Authorization'] ||= 'Basic ' << ['' << uri.user.to_s << ':' << uri.password.to_s].pack('m').delete(Excon::CR_NL)
end
@socket_key = '' << @connection[:host_port]
reset
end
# Sends the supplied request to the destination host.
# @yield [chunk] @see Response#self.parse
# @param [Hash<Symbol, >] params One or more optional params, override defaults set in Connection.new
# @option params [String] :body text to be sent over a socket
# @option params [Hash<Symbol, String>] :headers The default headers to supply in a request
# @option params [String] :host The destination host's reachable DNS name or IP, in the form of a String
# @option params [String] :path appears after 'scheme://host:port/'
# @option params [Fixnum] :port The port on which to connect, to the destination host
# @option params [Hash] :query appended to the 'scheme://host:port/path/' in the form of '?key=value'
# @option params [String] :scheme The protocol; 'https' causes OpenSSL to be used
def request(params, &block)
# connection has defaults, merge in new params to override
params = @connection.merge(params)
params[:host_port] = '' << params[:host] << ':' << params[:port].to_s
params[:headers] = @connection[:headers].merge(params[:headers] || {})
params[:headers]['Host'] ||= '' << params[:host_port]
# if path is empty or doesn't start with '/', insert one
unless params[:path][0, 1] == '/'
params[:path].insert(0, '/')
end
if block_given?
$stderr.puts("Excon requests with a block are deprecated, pass :response_block instead (#{caller.first})")
params[:response_block] = Proc.new
end
if params.has_key?(:instrumentor)
if (retries_remaining ||= params[:retry_limit]) < params[:retry_limit]
event_name = "#{params[:instrumentor_name]}.retry"
else
event_name = "#{params[:instrumentor_name]}.request"
end
response = params[:instrumentor].instrument(event_name, params) do
request_kernel(params)
end
params[:instrumentor].instrument("#{params[:instrumentor_name]}.response", response.attributes)
response
else
request_kernel(params)
end
rescue => request_error
if params[:idempotent] && [Excon::Errors::Timeout, Excon::Errors::SocketError,
Excon::Errors::HTTPStatusError].any? {|ex| request_error.kind_of? ex }
retries_remaining ||= params[:retry_limit]
retries_remaining -= 1
if retries_remaining > 0
retry
else
if params.has_key?(:instrumentor)
params[:instrumentor].instrument("#{params[:instrumentor_name]}.error", :error => request_error)
end
raise(request_error)
end
else
if params.has_key?(:instrumentor)
params[:instrumentor].instrument("#{params[:instrumentor_name]}.error", :error => request_error)
end
raise(request_error)
end
end
def reset
(old_socket = sockets.delete(@socket_key)) && old_socket.close
end
# Generate HTTP request verb methods
Excon::HTTP_VERBS.each do |method|
class_eval <<-DEF, __FILE__, __LINE__ + 1
def #{method}(params={}, &block)
request(params.merge!(:method => :#{method}), &block)
end
DEF
end
def retry_limit=(new_retry_limit)
$stderr.puts("Excon::Connection#retry_limit= is deprecated, pass :retry_limit to the initializer (#{caller.first})")
@connection[:retry_limit] = new_retry_limit
end
def retry_limit
$stderr.puts("Excon::Connection#retry_limit is deprecated, pass :retry_limit to the initializer (#{caller.first})")
@connection[:retry_limit] ||= DEFAULT_RETRY_LIMIT
end
def inspect
vars = instance_variables.inject({}) do |accum, var|
accum.merge!(var.to_sym => instance_variable_get(var))
end
if vars[:'@connection'][:headers].has_key?('Authorization')
vars[:'@connection'] = vars[:'@connection'].dup
vars[:'@connection'][:headers] = vars[:'@connection'][:headers].dup
vars[:'@connection'][:headers]['Authorization'] = REDACTED
end
inspection = '#<Excon::Connection:'
inspection << (object_id << 1).to_s(16)
vars.each do |key, value|
inspection << ' ' << key.to_s << '=' << value.inspect
end
inspection << '>'
inspection
end
private
def detect_content_length(body)
if body.is_a?(String)
if FORCE_ENC
body.force_encoding('BINARY')
end
body.length
elsif body.respond_to?(:size)
# IO object: File, Tempfile, etc.
body.size
else
begin
File.size(body) # for 1.8.7 where file does not have size
rescue
0
end
end
end
def request_kernel(params)
begin
response = if params[:mock]
invoke_stub(params)
else
socket.params = params
# start with "METHOD /path"
request = params[:method].to_s.upcase << ' '
if @proxy
request << params[:scheme] << '://' << params[:host_port]
end
request << params[:path]
# add query to path, if there is one
case params[:query]
when String
request << '?' << params[:query]
when Hash
request << '?'
params[:query].each do |key, values|
if values.nil?
request << key.to_s << '&'
else
[values].flatten.each do |value|
request << key.to_s << '=' << CGI.escape(value.to_s) << '&'
end
end
end
request.chop! # remove trailing '&'
end
# finish first line with "HTTP/1.1\r\n"
request << HTTP_1_1
if params.has_key?(:request_block)
params[:headers]['Transfer-Encoding'] = 'chunked'
elsif ! (params[:method].to_s.casecmp('GET') == 0 && params[:body].nil?)
# The HTTP spec isn't clear on it, but specifically, GET requests don't usually send bodies;
# if they don't, sending Content-Length:0 can cause issues.
params[:headers]['Content-Length'] = detect_content_length(params[:body])
end
# add headers to request
params[:headers].each do |key, values|
[values].flatten.each do |value|
request << key.to_s << ': ' << value.to_s << CR_NL
end
end
# add additional "\r\n" to indicate end of headers
request << CR_NL
# write out the request, sans body
socket.write(request)
# write out the body
if params.has_key?(:request_block)
while true
chunk = params[:request_block].call
if FORCE_ENC
chunk.force_encoding('BINARY')
end
if chunk.length > 0
socket.write(chunk.length.to_s(16) << CR_NL << chunk << CR_NL)
else
socket.write('0' << CR_NL << CR_NL)
break
end
end
elsif !params[:body].nil?
if params[:body].is_a?(String)
unless params[:body].empty?
socket.write(params[:body])
end
else
if params[:body].respond_to?(:binmode)
params[:body].binmode
end
if params[:body].respond_to?(:pos=)
params[:body].pos = 0
end
while chunk = params[:body].read(params[:chunk_size])
socket.write(chunk)
end
end
end
# read the response
response = Excon::Response.parse(socket, params)
if response.headers['Connection'] == 'close'
reset
end
response
end
rescue Excon::Errors::StubNotFound, Excon::Errors::Timeout => error
raise(error)
rescue => socket_error
reset
raise(Excon::Errors::SocketError.new(socket_error))
end
if params.has_key?(:expects) && ![*params[:expects]].include?(response.status)
reset
raise(Excon::Errors.status_error(params, response))
else
response
end
end
def invoke_stub(params)
# convert File/Tempfile body to string before matching:
unless params[:body].nil? || params[:body].is_a?(String)
if params[:body].respond_to?(:binmode)
params[:body].binmode
end
if params[:body].respond_to?(:rewind)
params[:body].rewind
end
params[:body] = params[:body].read
end
params[:captures] = {:headers => {}} # setup data to hold captures
Excon.stubs.each do |stub, response|
headers_match = !stub.has_key?(:headers) || stub[:headers].keys.all? do |key|
case value = stub[:headers][key]
when Regexp
if match = value.match(params[:headers][key])
params[:captures][:headers][key] = match.captures
end
match
else
value == params[:headers][key]
end
end
non_headers_match = (stub.keys - [:headers]).all? do |key|
case value = stub[key]
when Regexp
if match = value.match(params[key])
params[:captures][key] = match.captures
end
match
else
value == params[key]
end
end
if headers_match && non_headers_match
response_attributes = case response
when Proc
response.call(params)
else
response
end
if params[:expects] && ![*params[:expects]].include?(response_attributes[:status])
# don't pass stuff into a block if there was an error
elsif params.has_key?(:response_block) && response_attributes.has_key?(:body)
body = response_attributes.delete(:body)
content_length = remaining = body.bytesize
i = 0
while i < body.length
params[:response_block].call(body[i, params[:chunk_size]], [remaining - params[:chunk_size], 0].max, content_length)
remaining -= params[:chunk_size]
i += params[:chunk_size]
end
end
return Excon::Response.new(response_attributes)
end
end
# if we reach here no stubs matched
raise(Excon::Errors::StubNotFound.new('no stubs matched ' << params.inspect))
end
def socket
sockets[@socket_key] ||= if @connection[:scheme] == HTTPS
Excon::SSLSocket.new(@connection, @proxy)
else
Excon::Socket.new(@connection, @proxy)
end
end
def sockets
Thread.current[:_excon_sockets] ||= {}
end
def setup_proxy(proxy)
uri = URI.parse(proxy)
unless uri.host and uri.port and uri.scheme
raise Excon::Errors::ProxyParseError, "Proxy is invalid"
end
{
:host => uri.host,
:host_port => '' << uri.host << ':' << uri.port.to_s,
:password => uri.password,
:port => uri.port,
:scheme => uri.scheme,
:user => uri.user
}
end
end
end
|
module Excon
class Connection
VALID_CONNECTION_KEYS = [:body, :headers, :host, :path, :port, :query, :scheme, :user, :password,
:instrumentor, :instrumentor_name, :ssl_ca_file, :ssl_verify_peer, :chunk_size,
:nonblock, :retry_limit, :connect_timeout, :read_timeout, :write_timeout, :captures,
:exception, :expects, :mock, :proxy, :method, :idempotent, :request_block, :response_block,
:middlewares, :retries_remaining, :connection, :stack, :response, :pipeline]
attr_reader :data
def params
$stderr.puts("Excon::Connection#params is deprecated use Excon::Connection#data instead (#{caller.first})")
@data
end
def params=(new_params)
$stderr.puts("Excon::Connection#params= is deprecated use Excon::Connection#data= instead (#{caller.first})")
@data = new_params
end
def proxy
$stderr.puts("Excon::Connection#proxy is deprecated use Excon::Connection#data[:proxy] instead (#{caller.first})")
@data[:proxy]
end
def proxy=(new_proxy)
$stderr.puts("Excon::Connection#proxy= is deprecated use Excon::Connection#data[:proxy]= instead (#{caller.first})")
@data[:proxy] = new_proxy
end
def assert_valid_keys_for_argument!(argument, valid_keys)
invalid_keys = argument.keys - valid_keys
return true if invalid_keys.empty?
raise ArgumentError, "The following keys are invalid: #{invalid_keys.map(&:inspect).join(', ')}"
end
private :assert_valid_keys_for_argument!
# Initializes a new Connection instance
# @param [String] url The destination URL
# @param [Hash<Symbol, >] params One or more optional params
# @option params [String] :body Default text to be sent over a socket. Only used if :body absent in Connection#request params
# @option params [Hash<Symbol, String>] :headers The default headers to supply in a request. Only used if params[:headers] is not supplied to Connection#request
# @option params [String] :host The destination host's reachable DNS name or IP, in the form of a String
# @option params [String] :path Default path; appears after 'scheme://host:port/'. Only used if params[:path] is not supplied to Connection#request
# @option params [Fixnum] :port The port on which to connect, to the destination host
# @option params [Hash] :query Default query; appended to the 'scheme://host:port/path/' in the form of '?key=value'. Will only be used if params[:query] is not supplied to Connection#request
# @option params [String] :scheme The protocol; 'https' causes OpenSSL to be used
# @option params [String] :proxy Proxy server; e.g. 'http://myproxy.com:8888'
# @option params [Fixnum] :retry_limit Set how many times we'll retry a failed request. (Default 4)
# @option params [Class] :instrumentor Responds to #instrument as in ActiveSupport::Notifications
# @option params [String] :instrumentor_name Name prefix for #instrument events. Defaults to 'excon'
def initialize(url, params = {})
assert_valid_keys_for_argument!(params, VALID_CONNECTION_KEYS)
uri = URI.parse(url)
@data = Excon.defaults.merge({
:host => uri.host,
:path => uri.path,
:port => uri.port.to_s,
:query => uri.query,
:scheme => uri.scheme,
:user => (URI.decode(uri.user) if uri.user),
:password => (URI.decode(uri.password) if uri.password),
}).merge!(params)
# merge does not deep-dup, so make sure headers is not the original
@data[:headers] = @data[:headers].dup
if @data[:scheme] == HTTPS && (ENV.has_key?('https_proxy') || ENV.has_key?('HTTPS_PROXY'))
@data[:proxy] = setup_proxy(ENV['https_proxy'] || ENV['HTTPS_PROXY'])
elsif (ENV.has_key?('http_proxy') || ENV.has_key?('HTTP_PROXY'))
@data[:proxy] = setup_proxy(ENV['http_proxy'] || ENV['HTTP_PROXY'])
elsif @data.has_key?(:proxy)
@data[:proxy] = setup_proxy(@data[:proxy])
end
if @data[:proxy]
@data[:headers]['Proxy-Connection'] ||= 'Keep-Alive'
# https credentials happen in handshake
if @data[:scheme] == 'http' && (@data[:proxy][:user] || @data[:proxy][:password])
auth = ['' << @data[:proxy][:user].to_s << ':' << @data[:proxy][:password].to_s].pack('m').delete(Excon::CR_NL)
@data[:headers]['Proxy-Authorization'] = 'Basic ' << auth
end
end
if ENV.has_key?('EXCON_DEBUG') || ENV.has_key?('EXCON_STANDARD_INSTRUMENTOR')
@data[:instrumentor] = Excon::StandardInstrumentor
end
# Use Basic Auth if url contains a login
if uri.user || uri.password
@data[:headers]['Authorization'] ||= 'Basic ' << ['' << uri.user.to_s << ':' << uri.password.to_s].pack('m').delete(Excon::CR_NL)
end
@socket_key = '' << @data[:host] << ':' << @data[:port]
reset
end
def request_call(datum)
begin
if datum.has_key?(:response)
# we already have data from a middleware, so bail
return datum
else
socket.data = datum
# start with "METHOD /path"
request = datum[:method].to_s.upcase << ' '
if @data[:proxy]
request << datum[:scheme] << '://' << @data[:host] << ':' << @data[:port].to_s
end
request << datum[:path]
# add query to path, if there is one
case datum[:query]
when String
request << '?' << datum[:query]
when Hash
request << '?'
datum[:query].each do |key, values|
if values.nil?
request << key.to_s << '&'
else
[values].flatten.each do |value|
request << key.to_s << '=' << CGI.escape(value.to_s) << '&'
end
end
end
request.chop! # remove trailing '&'
end
# finish first line with "HTTP/1.1\r\n"
request << HTTP_1_1
if datum.has_key?(:request_block)
datum[:headers]['Transfer-Encoding'] = 'chunked'
elsif ! (datum[:method].to_s.casecmp('GET') == 0 && datum[:body].nil?)
# The HTTP spec isn't clear on it, but specifically, GET requests don't usually send bodies;
# if they don't, sending Content-Length:0 can cause issues.
datum[:headers]['Content-Length'] = detect_content_length(datum[:body])
end
# add headers to request
datum[:headers].each do |key, values|
[values].flatten.each do |value|
request << key.to_s << ': ' << value.to_s << CR_NL
end
end
# add additional "\r\n" to indicate end of headers
request << CR_NL
# write out the request, sans body
socket.write(request)
# write out the body
if datum.has_key?(:request_block)
while true
chunk = datum[:request_block].call
if FORCE_ENC
chunk.force_encoding('BINARY')
end
if chunk.length > 0
socket.write(chunk.length.to_s(16) << CR_NL << chunk << CR_NL)
else
socket.write('0' << CR_NL << CR_NL)
break
end
end
elsif !datum[:body].nil?
if datum[:body].is_a?(String)
unless datum[:body].empty?
socket.write(datum[:body])
end
else
if datum[:body].respond_to?(:binmode)
datum[:body].binmode
end
if datum[:body].respond_to?(:pos=)
datum[:body].pos = 0
end
while chunk = datum[:body].read(datum[:chunk_size])
socket.write(chunk)
end
end
end
end
rescue => error
case error
when Excon::Errors::StubNotFound, Excon::Errors::Timeout
raise(error)
else
raise(Excon::Errors::SocketError.new(error))
end
end
datum
end
def response_call(datum)
datum
end
# Sends the supplied request to the destination host.
# @yield [chunk] @see Response#self.parse
# @param [Hash<Symbol, >] params One or more optional params, override defaults set in Connection.new
# @option params [String] :body text to be sent over a socket
# @option params [Hash<Symbol, String>] :headers The default headers to supply in a request
# @option params [String] :host The destination host's reachable DNS name or IP, in the form of a String
# @option params [String] :path appears after 'scheme://host:port/'
# @option params [Fixnum] :port The port on which to connect, to the destination host
# @option params [Hash] :query appended to the 'scheme://host:port/path/' in the form of '?key=value'
# @option params [String] :scheme The protocol; 'https' causes OpenSSL to be used
def request(params, &block)
# @data has defaults, merge in new params to override
datum = @data.merge(params)
assert_valid_keys_for_argument!(params, VALID_CONNECTION_KEYS)
datum[:headers] = @data[:headers].merge(datum[:headers] || {})
datum[:headers]['Host'] ||= '' << datum[:host] << ':' << datum[:port]
datum[:retries_remaining] ||= datum[:retry_limit]
# if path is empty or doesn't start with '/', insert one
unless datum[:path][0, 1] == '/'
datum[:path].insert(0, '/')
end
if block_given?
$stderr.puts("Excon requests with a block are deprecated, pass :response_block instead (#{caller.first})")
datum[:response_block] = Proc.new
end
datum[:connection] = self
datum[:stack] = datum[:middlewares].map do |middleware|
lambda {|stack| middleware.new(stack)}
end.reverse.inject(self) do |middlewares, middleware|
middleware.call(middlewares)
end
datum = datum[:stack].request_call(datum)
unless datum[:pipeline]
datum = response(datum)
if datum[:response][:headers]['Connection'] == 'close'
reset
end
Excon::Response.new(datum[:response])
else
datum
end
rescue => request_error
reset
if datum[:idempotent] && [Excon::Errors::Timeout, Excon::Errors::SocketError,
Excon::Errors::HTTPStatusError].any? {|ex| request_error.kind_of? ex } && datum[:retries_remaining] > 1
datum[:retries_remaining] -= 1
request(datum, &block)
else
if datum.has_key?(:instrumentor)
datum[:instrumentor].instrument("#{datum[:instrumentor_name]}.error", :error => request_error)
end
raise(request_error)
end
end
# Sends the supplied requests to the destination host using pipelining.
# @pipeline_params [Array<Hash>] pipeline_params An array of one or more optional params, override defaults set in Connection.new, see #request for details
def requests(pipeline_params)
pipeline_params.map do |params|
request(params.merge!(:pipeline => true))
end.map do |datum|
Excon::Response.new(response(datum)[:response])
end
end
def reset
(old_socket = sockets.delete(@socket_key)) && old_socket.close
end
# Generate HTTP request verb methods
Excon::HTTP_VERBS.each do |method|
class_eval <<-DEF, __FILE__, __LINE__ + 1
def #{method}(params={}, &block)
request(params.merge!(:method => :#{method}), &block)
end
DEF
end
def retry_limit=(new_retry_limit)
$stderr.puts("Excon::Connection#retry_limit= is deprecated, pass :retry_limit to the initializer (#{caller.first})")
@data[:retry_limit] = new_retry_limit
end
def retry_limit
$stderr.puts("Excon::Connection#retry_limit is deprecated, pass :retry_limit to the initializer (#{caller.first})")
@data[:retry_limit] ||= DEFAULT_RETRY_LIMIT
end
def inspect
vars = instance_variables.inject({}) do |accum, var|
accum.merge!(var.to_sym => instance_variable_get(var))
end
if vars[:'@data'][:headers].has_key?('Authorization')
vars[:'@data'] = vars[:'@data'].dup
vars[:'@data'][:headers] = vars[:'@data'][:headers].dup
vars[:'@data'][:headers]['Authorization'] = REDACTED
end
inspection = '#<Excon::Connection:'
inspection << (object_id << 1).to_s(16)
vars.each do |key, value|
inspection << ' ' << key.to_s << '=' << value.inspect
end
inspection << '>'
inspection
end
private
def detect_content_length(body)
if body.is_a?(String)
if FORCE_ENC
body.force_encoding('BINARY')
end
body.length
elsif body.respond_to?(:size)
# IO object: File, Tempfile, etc.
body.size
else
begin
File.size(body) # for 1.8.7 where file does not have size
rescue
0
end
end
end
def response(datum={})
unless datum.has_key?(:response)
datum[:response] = {
:body => '',
:headers => {},
:status => socket.read(12)[9, 11].to_i,
:remote_ip => socket.remote_ip
}
socket.readline # read the rest of the status line and CRLF
until ((data = socket.readline).chop!).empty?
key, value = data.split(/:\s*/, 2)
datum[:response][:headers][key] = ([*datum[:response][:headers][key]] << value).compact.join(', ')
if key.casecmp('Content-Length') == 0
content_length = value.to_i
elsif (key.casecmp('Transfer-Encoding') == 0) && (value.casecmp('chunked') == 0)
transfer_encoding_chunked = true
end
end
unless (['HEAD', 'CONNECT'].include?(datum[:method].to_s.upcase)) || NO_ENTITY.include?(datum[:response][:status])
# check to see if expects was set and matched
expected_status = !datum.has_key?(:expects) || [*datum[:expects]].include?(datum[:response][:status])
# if expects matched and there is a block, use it
if expected_status && datum.has_key?(:response_block)
if transfer_encoding_chunked
# 2 == "/r/n".length
while (chunk_size = socket.readline.chop!.to_i(16)) > 0
datum[:response_block].call(socket.read(chunk_size + 2).chop!, nil, nil)
end
socket.read(2)
elsif remaining = content_length
while remaining > 0
datum[:response_block].call(socket.read([datum[:chunk_size], remaining].min), [remaining - datum[:chunk_size], 0].max, content_length)
remaining -= datum[:chunk_size]
end
else
while remaining = socket.read(datum[:chunk_size])
datum[:response_block].call(remaining, remaining.length, content_length)
end
end
else # no block or unexpected status
if transfer_encoding_chunked
while (chunk_size = socket.readline.chop!.to_i(16)) > 0
datum[:response][:body] << socket.read(chunk_size + 2).chop! # 2 == "/r/n".length
end
socket.read(2) # 2 == "/r/n".length
elsif remaining = content_length
while remaining > 0
datum[:response][:body] << socket.read([datum[:chunk_size], remaining].min)
remaining -= datum[:chunk_size]
end
else
datum[:response][:body] << socket.read
end
end
end
end
datum[:stack].response_call(datum)
rescue => error
case error
when Excon::Errors::HTTPStatusError, Excon::Errors::Timeout
raise(error)
else
raise(Excon::Errors::SocketError.new(error))
end
end
def socket
sockets[@socket_key] ||= if @data[:scheme] == HTTPS
Excon::SSLSocket.new(@data)
else
Excon::Socket.new(@data)
end
end
def sockets
Thread.current[:_excon_sockets] ||= {}
end
def setup_proxy(proxy)
case proxy
when String
uri = URI.parse(proxy)
unless uri.host and uri.port and uri.scheme
raise Excon::Errors::ProxyParseError, "Proxy is invalid"
end
{
:host => uri.host,
:password => uri.password,
:port => uri.port.to_s,
:scheme => uri.scheme,
:user => uri.user
}
else
proxy
end
end
end
end
add family to known keys
module Excon
class Connection
VALID_CONNECTION_KEYS = [:body, :family, :headers, :host, :path, :port, :query, :scheme, :user, :password,
:instrumentor, :instrumentor_name, :ssl_ca_file, :ssl_verify_peer, :chunk_size,
:nonblock, :retry_limit, :connect_timeout, :read_timeout, :write_timeout, :captures,
:exception, :expects, :mock, :proxy, :method, :idempotent, :request_block, :response_block,
:middlewares, :retries_remaining, :connection, :stack, :response, :pipeline]
attr_reader :data
def params
$stderr.puts("Excon::Connection#params is deprecated use Excon::Connection#data instead (#{caller.first})")
@data
end
def params=(new_params)
$stderr.puts("Excon::Connection#params= is deprecated use Excon::Connection#data= instead (#{caller.first})")
@data = new_params
end
def proxy
$stderr.puts("Excon::Connection#proxy is deprecated use Excon::Connection#data[:proxy] instead (#{caller.first})")
@data[:proxy]
end
def proxy=(new_proxy)
$stderr.puts("Excon::Connection#proxy= is deprecated use Excon::Connection#data[:proxy]= instead (#{caller.first})")
@data[:proxy] = new_proxy
end
def assert_valid_keys_for_argument!(argument, valid_keys)
invalid_keys = argument.keys - valid_keys
return true if invalid_keys.empty?
raise ArgumentError, "The following keys are invalid: #{invalid_keys.map(&:inspect).join(', ')}"
end
private :assert_valid_keys_for_argument!
# Initializes a new Connection instance
# @param [String] url The destination URL
# @param [Hash<Symbol, >] params One or more optional params
# @option params [String] :body Default text to be sent over a socket. Only used if :body absent in Connection#request params
# @option params [Hash<Symbol, String>] :headers The default headers to supply in a request. Only used if params[:headers] is not supplied to Connection#request
# @option params [String] :host The destination host's reachable DNS name or IP, in the form of a String
# @option params [String] :path Default path; appears after 'scheme://host:port/'. Only used if params[:path] is not supplied to Connection#request
# @option params [Fixnum] :port The port on which to connect, to the destination host
# @option params [Hash] :query Default query; appended to the 'scheme://host:port/path/' in the form of '?key=value'. Will only be used if params[:query] is not supplied to Connection#request
# @option params [String] :scheme The protocol; 'https' causes OpenSSL to be used
# @option params [String] :proxy Proxy server; e.g. 'http://myproxy.com:8888'
# @option params [Fixnum] :retry_limit Set how many times we'll retry a failed request. (Default 4)
# @option params [Class] :instrumentor Responds to #instrument as in ActiveSupport::Notifications
# @option params [String] :instrumentor_name Name prefix for #instrument events. Defaults to 'excon'
def initialize(url, params = {})
assert_valid_keys_for_argument!(params, VALID_CONNECTION_KEYS)
uri = URI.parse(url)
@data = Excon.defaults.merge({
:host => uri.host,
:path => uri.path,
:port => uri.port.to_s,
:query => uri.query,
:scheme => uri.scheme,
:user => (URI.decode(uri.user) if uri.user),
:password => (URI.decode(uri.password) if uri.password),
}).merge!(params)
# merge does not deep-dup, so make sure headers is not the original
@data[:headers] = @data[:headers].dup
if @data[:scheme] == HTTPS && (ENV.has_key?('https_proxy') || ENV.has_key?('HTTPS_PROXY'))
@data[:proxy] = setup_proxy(ENV['https_proxy'] || ENV['HTTPS_PROXY'])
elsif (ENV.has_key?('http_proxy') || ENV.has_key?('HTTP_PROXY'))
@data[:proxy] = setup_proxy(ENV['http_proxy'] || ENV['HTTP_PROXY'])
elsif @data.has_key?(:proxy)
@data[:proxy] = setup_proxy(@data[:proxy])
end
if @data[:proxy]
@data[:headers]['Proxy-Connection'] ||= 'Keep-Alive'
# https credentials happen in handshake
if @data[:scheme] == 'http' && (@data[:proxy][:user] || @data[:proxy][:password])
auth = ['' << @data[:proxy][:user].to_s << ':' << @data[:proxy][:password].to_s].pack('m').delete(Excon::CR_NL)
@data[:headers]['Proxy-Authorization'] = 'Basic ' << auth
end
end
if ENV.has_key?('EXCON_DEBUG') || ENV.has_key?('EXCON_STANDARD_INSTRUMENTOR')
@data[:instrumentor] = Excon::StandardInstrumentor
end
# Use Basic Auth if url contains a login
if uri.user || uri.password
@data[:headers]['Authorization'] ||= 'Basic ' << ['' << uri.user.to_s << ':' << uri.password.to_s].pack('m').delete(Excon::CR_NL)
end
@socket_key = '' << @data[:host] << ':' << @data[:port]
reset
end
def request_call(datum)
begin
if datum.has_key?(:response)
# we already have data from a middleware, so bail
return datum
else
socket.data = datum
# start with "METHOD /path"
request = datum[:method].to_s.upcase << ' '
if @data[:proxy]
request << datum[:scheme] << '://' << @data[:host] << ':' << @data[:port].to_s
end
request << datum[:path]
# add query to path, if there is one
case datum[:query]
when String
request << '?' << datum[:query]
when Hash
request << '?'
datum[:query].each do |key, values|
if values.nil?
request << key.to_s << '&'
else
[values].flatten.each do |value|
request << key.to_s << '=' << CGI.escape(value.to_s) << '&'
end
end
end
request.chop! # remove trailing '&'
end
# finish first line with "HTTP/1.1\r\n"
request << HTTP_1_1
if datum.has_key?(:request_block)
datum[:headers]['Transfer-Encoding'] = 'chunked'
elsif ! (datum[:method].to_s.casecmp('GET') == 0 && datum[:body].nil?)
# The HTTP spec isn't clear on it, but specifically, GET requests don't usually send bodies;
# if they don't, sending Content-Length:0 can cause issues.
datum[:headers]['Content-Length'] = detect_content_length(datum[:body])
end
# add headers to request
datum[:headers].each do |key, values|
[values].flatten.each do |value|
request << key.to_s << ': ' << value.to_s << CR_NL
end
end
# add additional "\r\n" to indicate end of headers
request << CR_NL
# write out the request, sans body
socket.write(request)
# write out the body
if datum.has_key?(:request_block)
while true
chunk = datum[:request_block].call
if FORCE_ENC
chunk.force_encoding('BINARY')
end
if chunk.length > 0
socket.write(chunk.length.to_s(16) << CR_NL << chunk << CR_NL)
else
socket.write('0' << CR_NL << CR_NL)
break
end
end
elsif !datum[:body].nil?
if datum[:body].is_a?(String)
unless datum[:body].empty?
socket.write(datum[:body])
end
else
if datum[:body].respond_to?(:binmode)
datum[:body].binmode
end
if datum[:body].respond_to?(:pos=)
datum[:body].pos = 0
end
while chunk = datum[:body].read(datum[:chunk_size])
socket.write(chunk)
end
end
end
end
rescue => error
case error
when Excon::Errors::StubNotFound, Excon::Errors::Timeout
raise(error)
else
raise(Excon::Errors::SocketError.new(error))
end
end
datum
end
def response_call(datum)
datum
end
# Sends the supplied request to the destination host.
# @yield [chunk] @see Response#self.parse
# @param [Hash<Symbol, >] params One or more optional params, override defaults set in Connection.new
# @option params [String] :body text to be sent over a socket
# @option params [Hash<Symbol, String>] :headers The default headers to supply in a request
# @option params [String] :host The destination host's reachable DNS name or IP, in the form of a String
# @option params [String] :path appears after 'scheme://host:port/'
# @option params [Fixnum] :port The port on which to connect, to the destination host
# @option params [Hash] :query appended to the 'scheme://host:port/path/' in the form of '?key=value'
# @option params [String] :scheme The protocol; 'https' causes OpenSSL to be used
def request(params, &block)
# @data has defaults, merge in new params to override
datum = @data.merge(params)
assert_valid_keys_for_argument!(params, VALID_CONNECTION_KEYS)
datum[:headers] = @data[:headers].merge(datum[:headers] || {})
datum[:headers]['Host'] ||= '' << datum[:host] << ':' << datum[:port]
datum[:retries_remaining] ||= datum[:retry_limit]
# if path is empty or doesn't start with '/', insert one
unless datum[:path][0, 1] == '/'
datum[:path].insert(0, '/')
end
if block_given?
$stderr.puts("Excon requests with a block are deprecated, pass :response_block instead (#{caller.first})")
datum[:response_block] = Proc.new
end
datum[:connection] = self
datum[:stack] = datum[:middlewares].map do |middleware|
lambda {|stack| middleware.new(stack)}
end.reverse.inject(self) do |middlewares, middleware|
middleware.call(middlewares)
end
datum = datum[:stack].request_call(datum)
unless datum[:pipeline]
datum = response(datum)
if datum[:response][:headers]['Connection'] == 'close'
reset
end
Excon::Response.new(datum[:response])
else
datum
end
rescue => request_error
reset
if datum[:idempotent] && [Excon::Errors::Timeout, Excon::Errors::SocketError,
Excon::Errors::HTTPStatusError].any? {|ex| request_error.kind_of? ex } && datum[:retries_remaining] > 1
datum[:retries_remaining] -= 1
request(datum, &block)
else
if datum.has_key?(:instrumentor)
datum[:instrumentor].instrument("#{datum[:instrumentor_name]}.error", :error => request_error)
end
raise(request_error)
end
end
# Sends the supplied requests to the destination host using pipelining.
# @pipeline_params [Array<Hash>] pipeline_params An array of one or more optional params, override defaults set in Connection.new, see #request for details
def requests(pipeline_params)
pipeline_params.map do |params|
request(params.merge!(:pipeline => true))
end.map do |datum|
Excon::Response.new(response(datum)[:response])
end
end
def reset
(old_socket = sockets.delete(@socket_key)) && old_socket.close
end
# Generate HTTP request verb methods
Excon::HTTP_VERBS.each do |method|
class_eval <<-DEF, __FILE__, __LINE__ + 1
def #{method}(params={}, &block)
request(params.merge!(:method => :#{method}), &block)
end
DEF
end
def retry_limit=(new_retry_limit)
$stderr.puts("Excon::Connection#retry_limit= is deprecated, pass :retry_limit to the initializer (#{caller.first})")
@data[:retry_limit] = new_retry_limit
end
def retry_limit
$stderr.puts("Excon::Connection#retry_limit is deprecated, pass :retry_limit to the initializer (#{caller.first})")
@data[:retry_limit] ||= DEFAULT_RETRY_LIMIT
end
def inspect
vars = instance_variables.inject({}) do |accum, var|
accum.merge!(var.to_sym => instance_variable_get(var))
end
if vars[:'@data'][:headers].has_key?('Authorization')
vars[:'@data'] = vars[:'@data'].dup
vars[:'@data'][:headers] = vars[:'@data'][:headers].dup
vars[:'@data'][:headers]['Authorization'] = REDACTED
end
inspection = '#<Excon::Connection:'
inspection << (object_id << 1).to_s(16)
vars.each do |key, value|
inspection << ' ' << key.to_s << '=' << value.inspect
end
inspection << '>'
inspection
end
private
def detect_content_length(body)
if body.is_a?(String)
if FORCE_ENC
body.force_encoding('BINARY')
end
body.length
elsif body.respond_to?(:size)
# IO object: File, Tempfile, etc.
body.size
else
begin
File.size(body) # for 1.8.7 where file does not have size
rescue
0
end
end
end
def response(datum={})
unless datum.has_key?(:response)
datum[:response] = {
:body => '',
:headers => {},
:status => socket.read(12)[9, 11].to_i,
:remote_ip => socket.remote_ip
}
socket.readline # read the rest of the status line and CRLF
until ((data = socket.readline).chop!).empty?
key, value = data.split(/:\s*/, 2)
datum[:response][:headers][key] = ([*datum[:response][:headers][key]] << value).compact.join(', ')
if key.casecmp('Content-Length') == 0
content_length = value.to_i
elsif (key.casecmp('Transfer-Encoding') == 0) && (value.casecmp('chunked') == 0)
transfer_encoding_chunked = true
end
end
unless (['HEAD', 'CONNECT'].include?(datum[:method].to_s.upcase)) || NO_ENTITY.include?(datum[:response][:status])
# check to see if expects was set and matched
expected_status = !datum.has_key?(:expects) || [*datum[:expects]].include?(datum[:response][:status])
# if expects matched and there is a block, use it
if expected_status && datum.has_key?(:response_block)
if transfer_encoding_chunked
# 2 == "/r/n".length
while (chunk_size = socket.readline.chop!.to_i(16)) > 0
datum[:response_block].call(socket.read(chunk_size + 2).chop!, nil, nil)
end
socket.read(2)
elsif remaining = content_length
while remaining > 0
datum[:response_block].call(socket.read([datum[:chunk_size], remaining].min), [remaining - datum[:chunk_size], 0].max, content_length)
remaining -= datum[:chunk_size]
end
else
while remaining = socket.read(datum[:chunk_size])
datum[:response_block].call(remaining, remaining.length, content_length)
end
end
else # no block or unexpected status
if transfer_encoding_chunked
while (chunk_size = socket.readline.chop!.to_i(16)) > 0
datum[:response][:body] << socket.read(chunk_size + 2).chop! # 2 == "/r/n".length
end
socket.read(2) # 2 == "/r/n".length
elsif remaining = content_length
while remaining > 0
datum[:response][:body] << socket.read([datum[:chunk_size], remaining].min)
remaining -= datum[:chunk_size]
end
else
datum[:response][:body] << socket.read
end
end
end
end
datum[:stack].response_call(datum)
rescue => error
case error
when Excon::Errors::HTTPStatusError, Excon::Errors::Timeout
raise(error)
else
raise(Excon::Errors::SocketError.new(error))
end
end
def socket
sockets[@socket_key] ||= if @data[:scheme] == HTTPS
Excon::SSLSocket.new(@data)
else
Excon::Socket.new(@data)
end
end
def sockets
Thread.current[:_excon_sockets] ||= {}
end
def setup_proxy(proxy)
case proxy
when String
uri = URI.parse(proxy)
unless uri.host and uri.port and uri.scheme
raise Excon::Errors::ProxyParseError, "Proxy is invalid"
end
{
:host => uri.host,
:password => uri.password,
:port => uri.port.to_s,
:scheme => uri.scheme,
:user => uri.user
}
else
proxy
end
end
end
end
|
# -*- coding: utf-8 -*-
#
require 'ostruct'
module GoogleDrive
mattr_accessor :email, :password
def self.open_spreadsheet key
@last_session = login email, password
if key=~/google.com/
@last_session.spreadsheet_by_url key
else
@last_session.spreadsheet_by_key key
end
end
class SuperRow
attr_accessor :config, :row_num, :groups
delegate :columns, :params_h, :ws, :to => :config
def initialize _config, _row_num
self.config = _config
self.row_num = _row_num
self.groups = {}
config.params_h.keys.each do |key|
if key=~/:/
group, name = key.split ':'
groups[group] ||= {}
groups[group][name] = {
:row => row_num,
:col => config.params_h[key]
}
else
class_eval do
define_method key do
get key
end
define_method "#{key}=" do |value|
set key, value
end
end
end
end
after_init
end
def get *args
key = args.join(':')
raise "No such column name '#{key}'" unless params_h.has_key? key
value = ws[ row_num, params_h[key] ]
config.opts.strip_values ? value.strip : value
end
def set key, value
key = key.join(':') if key.is_a? Array
ws[ row_num, params_h[key] ] = value
end
def after_init
end
end
class SuperConfig
attr_accessor :params_a, :params_h, :ws, :opts
delegate :row_class, :first_content_row, :to => :opts
delegate :save, :num_rows, :num_cols, :to => :ws
def initialize _ws, _opts={}
self.params_a = []
self.params_h = {}
self.ws = _ws
self.opts = OpenStruct.new _opts.reverse_merge(
:row_class => SuperRow,
:first_content_row => 2,
:strip_values => true
)
init_params
after_init
end
def after_init
end
def super_row row_num
row_class.new self, row_num
end
def columns
params_h.keys.map &:to_sym
end
def each_content_row &block
for row in first_content_row..num_rows
block.call super_row(row)
end
end
protected
def init_params
ws.rows[0].each_with_index do |v,i|
params_a[i] = v
next if v.blank?
params_h[v] = i+1
end
end
end
end
minor
# -*- coding: utf-8 -*-
#
require 'ostruct'
module GoogleDrive
mattr_accessor :email, :password
def self.open_spreadsheet key
@last_session = login email, password
if key=~/google.com/
@last_session.spreadsheet_by_url key
else
@last_session.spreadsheet_by_key key
end
end
class SuperRow
attr_accessor :config, :row_num, :groups
delegate :columns, :params_h, :ws, :to => :config
def initialize _config, _row_num
self.config = _config
self.row_num = _row_num
self.groups = {}
config.params_h.keys.each do |key|
if key=~/:/
group, name = key.split ':'
groups[group] ||= {}
groups[group][name] = {
:row => row_num,
:col => config.params_h[key]
}
else
class_eval do
define_method key do
get key
end
define_method "#{key}=" do |value|
set key, value
end
end
end
end
after_init
end
def get *args
key = args.join(':')
raise "No such column name '#{key}'" unless params_h.has_key? key
value = ws[ row_num, params_h[key] ]
config.opts.strip_values ? value.strip : value
end
def set key, value
key = key.join(':') if key.is_a? Array
ws[ row_num, params_h[key.to_s] ] = value
end
def after_init
end
end
class SuperConfig
attr_accessor :params_a, :params_h, :ws, :opts
delegate :row_class, :first_content_row, :to => :opts
delegate :save, :num_rows, :num_cols, :to => :ws
def initialize _ws, _opts={}
self.params_a = []
self.params_h = {}
self.ws = _ws
self.opts = OpenStruct.new _opts.reverse_merge(
:row_class => SuperRow,
:first_content_row => 2,
:strip_values => true
)
init_params
after_init
end
def after_init
end
def super_row row_num
row_class.new self, row_num
end
def columns
params_h.keys.map &:to_sym
end
def each_content_row &block
for row in first_content_row..num_rows
block.call super_row(row)
end
end
protected
def init_params
ws.rows[0].each_with_index do |v,i|
params_a[i] = v
next if v.blank?
params_h[v] = i+1
end
end
end
end
|
module Concurrent
# @!visibility private
def self.allow_c_extensions?
defined?(RUBY_ENGINE) && RUBY_ENGINE == 'ruby'
end
# @!visibility private
def self.allow_c_native_class?(clazz)
allow_c_extensions? && defined?(Kernel.const_get("Concurrent::#{clazz}"))
rescue
false
end
# @!visibility private
def self.safe_require_c_extensions
require 'concurrent_ruby_ext' if allow_c_extensions?
rescue LoadError
warn 'Attempted to load C extensions on unsupported platform. Continuing with pure-Ruby.'
end
# @!visibility private
def self.safe_require_java_extensions
require 'concurrent_ruby_ext' if RUBY_PLATFORM == 'java'
rescue LoadError
warn 'Attempted to load Java extensions on unsupported platform. Continuing with pure-Ruby.'
end
end
Fixed buggy extension guard.
module Concurrent
# @!visibility private
def self.allow_c_extensions?
defined?(RUBY_ENGINE) && RUBY_ENGINE == 'ruby'
end
# @!visibility private
def self.allow_c_native_class?(clazz)
allow_c_extensions? && Concurrent.const_defined?(clazz)
rescue
false
end
# @!visibility private
def self.safe_require_c_extensions
require 'concurrent_ruby_ext' if allow_c_extensions?
rescue LoadError
warn 'Attempted to load C extensions on unsupported platform. Continuing with pure-Ruby.'
end
# @!visibility private
def self.safe_require_java_extensions
require 'concurrent_ruby_ext' if RUBY_PLATFORM == 'java'
rescue LoadError
warn 'Attempted to load Java extensions on unsupported platform. Continuing with pure-Ruby.'
end
end
|
module FakeDelete
module Core
def self.included(base)
base.send :extend, Query
end
module Query
def fake_delete?
true
end
def only_deleted
scoped.tap { |x| x.default_scoped = false }.where("#{self.table_name}.deleted != 0")
end
def with_deleted
scoped.tap { |x| x.default_scoped = false }
end
end
def destroy
run_callbacks(:destroy) { delete }
end
def delete
return if new_record? || destroyed?
update_column :deleted, Time.now.to_i
end
def recover
update_column :deleted, 0
end
def destroyed?
self.deleted != 0
end
alias :deleted? :destroyed?
end
end
Use <> instead of != (SQL standards)
module FakeDelete
module Core
def self.included(base)
base.send :extend, Query
end
module Query
def fake_delete?
true
end
def only_deleted
scoped.tap { |x| x.default_scoped = false }.where("#{self.table_name}.deleted <> 0")
end
def with_deleted
scoped.tap { |x| x.default_scoped = false }
end
end
def destroy
run_callbacks(:destroy) { delete }
end
def delete
return if new_record? || destroyed?
update_column :deleted, Time.now.to_i
end
def recover
update_column :deleted, 0
end
def destroyed?
self.deleted != 0
end
alias :deleted? :destroyed?
end
end
|
require 'socket'
require 'active_support/core_ext'
Thread::abort_on_exception = true
module FakeSmtp
class Server
def initialize(options={})
@port = options[:port]
end
def run
server = TCPServer.open(@port)
puts "FakeSmtp: Listening on port #{@port}"
loop {Thread.start(server.accept) {|socket| SmtpSession.new(socket).run}}
end
end
class Session
attr_accessor :done
attr_accessor :cmd
class_attribute :handlers
self.handlers = {}
def initialize(socket)
@done = false
@socket = socket
end
def self.on(regular_expression, &action)
handlers[regular_expression] = action
end
def run
log("New session: #{@socket}")
handlers[:start].call if handlers[:start]
until done
break unless cmd = read_line
handle_command cmd
end
handlers[:end].call if handlers[:end]
@socket.close
log("Session finished: #{@socket}")
end
def handle_command(command)
self.cmd = command
handlers.keys.each do |re|
next unless re.kind_of? Regexp
if re =~ command
instance_eval &handlers[re]
break
end
end
end
def log(*args)
puts args.join(' ')
end
def respond_with(msg)
@socket.puts msg
end
def read_line
line = @socket.gets
line = line ? line.rstrip : line
log('READ: ', line)
line
end
end
class SmtpSession < Session
on /^HELO|^EHLO/ do
words = cmd.split(' ')
respond_with "Hello #{words[1]} I am glad to meet you"
end
on /^QUIT/ do
respond_with('221 Bye')
self.done = true
end
on /^DATA/ do
respond_with '354 End data with <CR><LF>.<CR><LF>'
while line = read_line
break if line == '.'
end
respond_with("250 OK: queued as #{rand(1000)}")
end
on /.*/ do
respond_with("250 OK")
end
end
end
Fixed some bugs, added outgoing message logging. This version really does work with rails
require 'socket'
require 'active_support/core_ext'
Thread::abort_on_exception = true
module FakeSmtp
class Server
def initialize(options={})
@port = options[:port]
end
def run
server = TCPServer.open(@port)
puts "FakeSmtp: Listening on port #{@port}"
loop {Thread.start(server.accept) {|socket| SmtpSession.new(socket).run}}
end
end
class Session
attr_accessor :done
attr_accessor :cmd
class_attribute :handlers
self.handlers = {}
def initialize(socket)
@done = false
@socket = socket
end
def self.on(regular_expression, &action)
handlers[regular_expression] = action
end
def run
log("New session: #{@socket}")
instance_eval &handlers[:start] if handlers[:start]
until done
break unless cmd = read_line
handle_command cmd
end
instance_eval &handlers[:end] if handlers[:end]
@socket.close
log("Session finished: #{@socket}")
end
def handle_command(command)
self.cmd = command
handlers.keys.each do |re|
next unless re.kind_of? Regexp
if re =~ command
instance_eval &handlers[re]
break
end
end
end
def log(*args)
puts args.join(' ')
end
def respond_with(msg)
log "Response: #{msg}"
@socket.puts msg
end
def read_line
line = @socket.gets
line = line ? line.rstrip : line
log('READ: ', line)
line
end
end
class SmtpSession < Session
on :start do
respond_with '220 smtp.example.com SMTP FakeSmtp'
end
on /^HELO|^EHLO/ do
words = cmd.split(' ')
respond_with "250: Hello #{words[1]} I am glad to meet you"
end
on /^QUIT/ do
respond_with('221 Bye')
self.done = true
end
on /^DATA/ do
respond_with '354 End data with <CR><LF>.<CR><LF>'
while line = read_line
break if line == '.'
end
respond_with("250 OK: queued as #{rand(1000)}")
end
on /.*/ do
respond_with("250 OK")
end
end
end
|
module Firehose
VERSION = "0.0.9"
CODENAME = "Garden Hose"
end
Bumped version
module Firehose
VERSION = "0.0.10"
CODENAME = "Garden Hose"
end |
module Fixation
VERSION = "2.1.0"
end
tag v2.2.0
module Fixation
VERSION = "2.2.0"
end
|
require 'flt/dec_num'
module Flt
# Trigonometry functions. The angular units used by these functions can be specified
# with the +angle+ attribute of the context. The accepted values are:
# * :rad for radians
# * :deg for degrees
# * :grad for gradians
#
# These functions are injected in Context objects.
module Trigonometry
# Cosine of an angle given in the units specified by the context +angle+ attribute.
def cos(x)
cos_base(num_class[x])
end
# Sine of an angle given in the units specified by the context +angle+ attribute.
def sin(x)
sin_base(num_class[x])
end
# Tangent of an angle given in the units specified by the context +angle+ attribute.
def tan(x)
tan_base(num_class[x])
end
# Arc-tangent. The result is in the units specified by the context +angle+ attribute.
# If the angular units are radians the result is in [-pi/2, pi/2]; it is in [-90,90] in degrees.
def atan(x)
atan_base(num_class[x])
end
# Arc-tangent with two arguments (principal value of the argument of the complex number x+i*y).
# The result is in the units specified by the context +angle+ attribute.
# If the angular units are radians the result is in [-pi, pi]; it is in [-180,180] in degrees.
def atan2(y, x)
atan2_base(num_class[y], num_class[x])
end
# Arc-sine. The result is in the units specified by the context +angle+ attribute.
# If the angular units are radians the result is in [-pi/2, pi/2]; it is in [-90,90] in degrees.
def asin(x)
asin_base(num_class[x])
end
# Arc-cosine. The result is in the units specified by the context +angle+ attribute.
# If the angular units are radians the result is in [-pi/2, pi/2]; it is in [-90,90] in degrees.
def acos(x)
acos_base(num_class[x])
end
# Length of the hypotenuse of a right-angle triangle (modulus or absolute value of the complex x+i*y).
def hypot(x, y)
hypot_base(num_class[x], num_class[y])
end
# Pi
def pi(round_digits=nil)
unless defined?(@pi_cache)
@pi_value = nil
@pi_digits = 0
end
round_digits ||= self.precision
if @pi_digits < round_digits
# provisional implementation (very slow)
lasts = 0
t, s, n, na, d, da = [num_class[3], 3, 1, 0, 0, 24]
num_class.context(self) do |local_context|
local_context.precision = round_digits + 6
while s != lasts
lasts = s
n, na = n+na, na+8
d, da = d+da, da+32
t = (t * n) / d
s += t
end
end
@pi_value = s
@pi_digits = round_digits
end
num_class.context(self, :precision=>round_digits){+@pi_value}
end
def e(digits=nil)
num_class.context(self) do |local_context|
local_context.precision = digits if digits
num_class.Num(1).exp
end
end
def half
num_class['0.5']
end
protected
def cos_base(x)
x = x.copy_sign(+1) # note that abs rounds; copy_sign does not.
rev_sign = false
s = nil
num_class.context(self) do |local_context|
local_context.precision += 3 # extra digits for intermediate steps
x,k,pi_2 = local_context.reduce_angle2(x,2)
rev_sign = true if k>1
if k % 2 == 0
x = pi_2 - x
else
rev_sign = !rev_sign
end
x = local_context.to_rad(x)
i, lasts, fact, num = 1, 0, 1, num_class[x]
s = num
x2 = -x*x
while s != lasts
lasts = s
i += 2
fact *= i * (i-1)
num *= x2
s += num / fact
end
end
return rev_sign ? minus(s) : plus(s)
end
def sin_base(x)
sign = x.sign
s = nil
num_class.context(self) do |local_context|
local_context.precision += 3 # extra digits for intermediate steps
x = x.copy_sign(+1) if sign<0
x,k,pi_2 = local_context.reduce_angle2(x,2)
sign = -sign if k>1
x = pi_2 - x if k % 2 == 1
x = local_context.to_rad(x)
i, lasts, fact, num = 1, 0, 1, num_class[x]
s = num
x2 = -x*x
while s != lasts
lasts = s
i += 2
fact *= i * (i-1)
num *= x2
s += num / fact
end
end
return plus(s).copy_sign(sign)
end
def tan_base(x)
plus(num_class.context(self) do |local_context|
local_context.precision += 2 # extra digits for intermediate steps
s,c = local_context.sin(x), local_context.cos(x)
s/c
end)
end
def atan_base(x)
s = nil
conversion = true
extra_prec = num_class.radix==2 ? 4 : 2
num_class.context(self) do |local_context|
local_context.precision += extra_prec
if x == 0
return num_class.zero
elsif x.abs > 1
if x.infinite?
s = local_context.quarter_cycle.copy_sign(x)
conversion = false
break
else
# c = (quarter_cycle).copy_sign(x)
c = (half*local_context.pi).copy_sign(x)
x = 1 / x
end
end
local_context.precision += extra_prec
x_squared = x ** 2
if x_squared.zero? || x_squared.subnormal?
s = x
s = c - s if c && c!=0
break
end
y = x_squared / (1 + x_squared)
y_over_x = y / x
i = num_class.zero; lasts = 0; s = y_over_x; coeff = 1; num = y_over_x
while s != lasts
lasts = s
i += 2
coeff *= i / (i + 1)
num *= y
s += coeff * num
end
if c && c!= 0
s = c - s
end
end
return conversion ? rad_to(s) : plus(s)
end
def atan2_base(y, x)
abs_y = y.abs
abs_x = x.abs
y_is_real = !x.infinite?
if x != 0
if y_is_real
a = y!=0 ? atan(y / x) : num_class.zero
a += half_cycle.copy_sign(y) if x < 0
return a
elsif abs_y == abs_x
one = num_class[1]
x = one.copy_sign(x)
y = one.copy_sign(y)
return half_cycle * (2 - x) / (4 * y)
end
end
if y != 0
return atan(num_class.infinity(y.sign))
elsif x < 0
return half_cycle.copy_sign(x)
else
return num_class.zero
end
end
def asin_base(x)
x = +x
return self.exception(Num::InvalidOperation, 'asin needs -1 <= x <= 1') if x.abs > 1
if x == -1
return -quarter_cycle
elsif x == 0
return num_class.zero
elsif x == 1
return quarter_cycle
end
num_class.context(self) do |local_context|
local_context.precision += 3
x = x/(1-x*x).sqrt
x = local_context.atan(x)
end
+x
end
def acos_base(x)
return self.exception(Num::InvalidOperation, 'acos needs -1 <= x <= 2') if x.abs > 1
if x == -1
return half_cycle
elsif x == 0
return quarter_cycle
elsif x == 1
return num_class.zero
end
required_precision = self.precision
if x < half
num_class.context(self, :precision=>required_precision+2) do
x = x/(1-x*x).sqrt
x = num_class.context.quarter_cycle - num_class.context.atan(x)
end
else
# valid for x>=0
num_class.context(self, :precision=>required_precision+3) do
# x = (1-x*x).sqrt # x*x may require double precision if x*x is near 1
x = (1-num_class.context(self, :precision=>required_precision*2){x*x}).sqrt
x = num_class.context.asin(x)
end
end
+x
end
def hypot_base(x, y)
num_class.context(self) do |local_context|
local_context.precision += 3
(x*x + y*y).sqrt
end
end
def pi2(decimals=nil)
num_class.context(self, :precision=>decimals) do |local_context|
local_context.pi*2
end
end
def invpi(decimals=nil)
num_class.context(self, :precision=>decimals) do |local_context|
num_class[1]/local_context.pi
end
end
def inv2pi(decimals=nil)
num_class.context(self, :precision=>decimals) do |local_context|
num_class.Num(1)/local_context.pi2
end
end
# class <<self
# private
def modtwopi(x)
return plus(num_class.context(self, :precision=>self.precision*3){x.modulo(one_cycle)})
end
# Reduce angle to [0,2Pi)
def reduce_angle(a)
modtwopi(a)
end
# Reduce angle to [0,Pi/k0) (result is not rounded to precision)
def reduce_angle2(a,k0=nil) # divisor of pi or nil for pi*2
# we could reduce first to pi*2 to avoid the mod k0 operation
k,r,divisor = num_class.context do
num_class.context.precision *= 3
m = k0.nil? ? one_cycle : half_cycle/k0
a.divmod(m)+[m]
end
[r, k.modulo(k0*2).to_i, divisor]
end
def one_cycle
case self.angle
when :rad
pi2
when :deg
num_class.Num(360)
when :grad
num_class.Num(400)
end
end
def half_cycle
case self.angle
when :rad
pi(num_class.context.precision)
when :deg
num_class.Num(180)
when :grad
num_class.Num(200)
end
end
def quarter_cycle
case self.angle
when :rad
half*pi(num_class.context.precision)
when :deg
num_class.Num(90)
when :grad
num_class.Num(100)
end
end
def to_rad(x)
case self.angle
when :rad
plus(x)
else
plus(num_class.context(self, :extra_precision=>3){|lc| x*lc.pi/half_cycle})
end
end
def to_deg(x)
case self.angle
when :deg
plus(x)
else
plus(num_class.context(self, :extra_precision=>3){x*num_class[180]/half_cycle})
end
end
def to_grad(x)
case self.angle
when :deg
plus(x)
else
plus(num_class.context(self, :extra_precision=>3){x*num_class[200]/half_cycle})
end
end
def to_angle(angular_units, x)
return plus(x) if angular_units == self.angle
case angular_units
when :rad
to_rad(x)
when :deg
to_deg(x)
when :grad
to_grad(x)
end
end
def rad_to(x)
case self.angle
when :rad
plus(x)
else
plus(num_class.context(self, :extra_precision=>3){|lc| x*half_cycle/lc.pi})
end
end
def deg_to(x)
case self.angle
when :deg
plus(x)
else
plus(num_class.context(self, :extra_precision=>3){x*half_cycle/num_class[180]})
end
end
def grad_to(x)
case self.angle
when :grad
plus(x)
else
plus(num_class.context(self, :extra_precision=>3){x*half_cycle/num_class[200]})
end
end
def angle_to(x, angular_units)
return plus(x) if angular_units == self.angle
case angular_units
when :rad
rad_to(x)
when :deg
deg_to(x)
when :grad
grad_to(x)
end
end
#end
end # Trigonometry
Num::ContextBase.class_eval{include Trigonometry}
class DecNum
module Trigonometry
# Pi
@pi_cache = nil # truncated pi digits as a string
@pi_cache_digits = 0
PI_MARGIN = 10
class <<self
attr_accessor :pi_cache, :pi_cache_digits
end
def pi(round_digits=nil)
round_digits ||= self.precision
digits = round_digits
if Trigonometry.pi_cache_digits <= digits # we need at least one more truncated digit
continue = true
while continue
margin = PI_MARGIN # margin to reduce recomputing with more digits to avoid ending in 0 or 5
digits += margin + 1
fudge = 10
unity = 10**(digits+fudge)
v = 4*(4*Trigonometry.iarccot(5, unity) - Trigonometry.iarccot(239, unity))
v = v.to_s[0,digits]
# if the last digit is 0 or 5 the truncated value may not be good for rounding
loop do
#last_digit = v%10
last_digit = v[-1,1].to_i
continue = (last_digit==5 || last_digit==0)
if continue && margin>0
# if we have margin we back-up one digit
margin -= 1
v = v[0...-1]
else
break
end
end
end
Trigonometry.pi_cache_digits = digits + margin - PI_MARGIN # @pi_cache.size
Trigonometry.pi_cache = v # DecNum(+1, v, 1-digits) # cache truncated value
end
# Now we avoid rounding too much because it is slow
l = round_digits + 1
while (l<Trigonometry.pi_cache_digits) && [0,5].include?(Trigonometry.pi_cache[l-1,1].to_i)
l += 1
end
v = Trigonometry.pi_cache[0,l]
num_class.context(self, :precision=>round_digits){+num_class.Num(+1,v.to_i,1-l)}
end
private
class <<self
def iarccot(x, unity)
xpow = unity / x
n = 1
sign = 1
sum = 0
loop do
term = xpow / n
break if term == 0
sum += sign * (xpow/n)
xpow /= x*x
n += 2
sign = -sign
end
sum
end
end
end # DecNum::Trigonometry
DecNum::Context.class_eval{include DecNum::Trigonometry}
end # DecNum
end # Flt
In the generic pi computation, the state is kept to be resumed to compute more digits; to achieve this Rational is used for computation.
require 'flt/dec_num'
module Flt
# Trigonometry functions. The angular units used by these functions can be specified
# with the +angle+ attribute of the context. The accepted values are:
# * :rad for radians
# * :deg for degrees
# * :grad for gradians
#
# These functions are injected in Context objects.
module Trigonometry
# Cosine of an angle given in the units specified by the context +angle+ attribute.
def cos(x)
cos_base(num_class[x])
end
# Sine of an angle given in the units specified by the context +angle+ attribute.
def sin(x)
sin_base(num_class[x])
end
# Tangent of an angle given in the units specified by the context +angle+ attribute.
def tan(x)
tan_base(num_class[x])
end
# Arc-tangent. The result is in the units specified by the context +angle+ attribute.
# If the angular units are radians the result is in [-pi/2, pi/2]; it is in [-90,90] in degrees.
def atan(x)
atan_base(num_class[x])
end
# Arc-tangent with two arguments (principal value of the argument of the complex number x+i*y).
# The result is in the units specified by the context +angle+ attribute.
# If the angular units are radians the result is in [-pi, pi]; it is in [-180,180] in degrees.
def atan2(y, x)
atan2_base(num_class[y], num_class[x])
end
# Arc-sine. The result is in the units specified by the context +angle+ attribute.
# If the angular units are radians the result is in [-pi/2, pi/2]; it is in [-90,90] in degrees.
def asin(x)
asin_base(num_class[x])
end
# Arc-cosine. The result is in the units specified by the context +angle+ attribute.
# If the angular units are radians the result is in [-pi/2, pi/2]; it is in [-90,90] in degrees.
def acos(x)
acos_base(num_class[x])
end
# Length of the hypotenuse of a right-angle triangle (modulus or absolute value of the complex x+i*y).
def hypot(x, y)
hypot_base(num_class[x], num_class[y])
end
# Pi
def pi(round_digits=nil)
round_digits ||= self.precision
if Trigonometry.pi_digits < round_digits
# provisional implementation (very slow)
lasts = 0
t, s, n, na, d, da = Trigonometry.pi_cache
num_class.context(self) do |local_context|
local_context.precision = round_digits + 6
tol = Rational(1,num_class.int_radix_power(local_context.precision+1))
while (s-lasts)>tol
lasts = s
n, na = n+na, na+8
d, da = d+da, da+32
t = (t * n) / d
s += t
end
Trigonometry.pi_value = num_class[s]
Trigonometry.pi_digits = round_digits
Trigonometry.pi_cache = [t, s, n, na, d, da]
end
end
num_class.context(self, :precision=>round_digits){+Trigonometry.pi_value}
end
def e(digits=nil)
num_class.context(self) do |local_context|
local_context.precision = digits if digits
num_class.Num(1).exp
end
end
def half
num_class['0.5']
end
protected
@pi_value = nil
@pi_digits = 0
@pi_cache = [Rational(3), Rational(3), 1, 0, 0, 24]
class <<self
attr_accessor :pi_value, :pi_digits, :pi_cache
end
def cos_base(x)
x = x.copy_sign(+1) # note that abs rounds; copy_sign does not.
rev_sign = false
s = nil
num_class.context(self) do |local_context|
local_context.precision += 3 # extra digits for intermediate steps
x,k,pi_2 = local_context.reduce_angle2(x,2)
rev_sign = true if k>1
if k % 2 == 0
x = pi_2 - x
else
rev_sign = !rev_sign
end
x = local_context.to_rad(x)
i, lasts, fact, num = 1, 0, 1, num_class[x]
s = num
x2 = -x*x
while s != lasts
lasts = s
i += 2
fact *= i * (i-1)
num *= x2
s += num / fact
end
end
return rev_sign ? minus(s) : plus(s)
end
def sin_base(x)
sign = x.sign
s = nil
num_class.context(self) do |local_context|
local_context.precision += 3 # extra digits for intermediate steps
x = x.copy_sign(+1) if sign<0
x,k,pi_2 = local_context.reduce_angle2(x,2)
sign = -sign if k>1
x = pi_2 - x if k % 2 == 1
x = local_context.to_rad(x)
i, lasts, fact, num = 1, 0, 1, num_class[x]
s = num
x2 = -x*x
while s != lasts
lasts = s
i += 2
fact *= i * (i-1)
num *= x2
s += num / fact
end
end
return plus(s).copy_sign(sign)
end
def tan_base(x)
plus(num_class.context(self) do |local_context|
local_context.precision += 2 # extra digits for intermediate steps
s,c = local_context.sin(x), local_context.cos(x)
s/c
end)
end
def atan_base(x)
s = nil
conversion = true
extra_prec = num_class.radix==2 ? 4 : 2
num_class.context(self) do |local_context|
local_context.precision += extra_prec
if x == 0
return num_class.zero
elsif x.abs > 1
if x.infinite?
s = local_context.quarter_cycle.copy_sign(x)
conversion = false
break
else
# c = (quarter_cycle).copy_sign(x)
c = (half*local_context.pi).copy_sign(x)
x = 1 / x
end
end
local_context.precision += extra_prec
x_squared = x ** 2
if x_squared.zero? || x_squared.subnormal?
s = x
s = c - s if c && c!=0
break
end
y = x_squared / (1 + x_squared)
y_over_x = y / x
i = num_class.zero; lasts = 0; s = y_over_x; coeff = 1; num = y_over_x
while s != lasts
lasts = s
i += 2
coeff *= i / (i + 1)
num *= y
s += coeff * num
end
if c && c!= 0
s = c - s
end
end
return conversion ? rad_to(s) : plus(s)
end
def atan2_base(y, x)
abs_y = y.abs
abs_x = x.abs
y_is_real = !x.infinite?
if x != 0
if y_is_real
a = y!=0 ? atan(y / x) : num_class.zero
a += half_cycle.copy_sign(y) if x < 0
return a
elsif abs_y == abs_x
one = num_class[1]
x = one.copy_sign(x)
y = one.copy_sign(y)
return half_cycle * (2 - x) / (4 * y)
end
end
if y != 0
return atan(num_class.infinity(y.sign))
elsif x < 0
return half_cycle.copy_sign(x)
else
return num_class.zero
end
end
def asin_base(x)
x = +x
return self.exception(Num::InvalidOperation, 'asin needs -1 <= x <= 1') if x.abs > 1
if x == -1
return -quarter_cycle
elsif x == 0
return num_class.zero
elsif x == 1
return quarter_cycle
end
num_class.context(self) do |local_context|
local_context.precision += 3
x = x/(1-x*x).sqrt
x = local_context.atan(x)
end
+x
end
def acos_base(x)
return self.exception(Num::InvalidOperation, 'acos needs -1 <= x <= 2') if x.abs > 1
if x == -1
return half_cycle
elsif x == 0
return quarter_cycle
elsif x == 1
return num_class.zero
end
required_precision = self.precision
if x < half
num_class.context(self, :precision=>required_precision+2) do
x = x/(1-x*x).sqrt
x = num_class.context.quarter_cycle - num_class.context.atan(x)
end
else
# valid for x>=0
num_class.context(self, :precision=>required_precision+3) do
# x = (1-x*x).sqrt # x*x may require double precision if x*x is near 1
x = (1-num_class.context(self, :precision=>required_precision*2){x*x}).sqrt
x = num_class.context.asin(x)
end
end
+x
end
def hypot_base(x, y)
num_class.context(self) do |local_context|
local_context.precision += 3
(x*x + y*y).sqrt
end
end
def pi2(decimals=nil)
num_class.context(self, :precision=>decimals) do |local_context|
local_context.pi*2
end
end
def invpi(decimals=nil)
num_class.context(self, :precision=>decimals) do |local_context|
num_class[1]/local_context.pi
end
end
def inv2pi(decimals=nil)
num_class.context(self, :precision=>decimals) do |local_context|
num_class.Num(1)/local_context.pi2
end
end
# class <<self
# private
def modtwopi(x)
return plus(num_class.context(self, :precision=>self.precision*3){x.modulo(one_cycle)})
end
# Reduce angle to [0,2Pi)
def reduce_angle(a)
modtwopi(a)
end
# Reduce angle to [0,Pi/k0) (result is not rounded to precision)
def reduce_angle2(a,k0=nil) # divisor of pi or nil for pi*2
# we could reduce first to pi*2 to avoid the mod k0 operation
k,r,divisor = num_class.context do
num_class.context.precision *= 3
m = k0.nil? ? one_cycle : half_cycle/k0
a.divmod(m)+[m]
end
[r, k.modulo(k0*2).to_i, divisor]
end
def one_cycle
case self.angle
when :rad
pi2
when :deg
num_class.Num(360)
when :grad
num_class.Num(400)
end
end
def half_cycle
case self.angle
when :rad
pi(num_class.context.precision)
when :deg
num_class.Num(180)
when :grad
num_class.Num(200)
end
end
def quarter_cycle
case self.angle
when :rad
half*pi(num_class.context.precision)
when :deg
num_class.Num(90)
when :grad
num_class.Num(100)
end
end
def to_rad(x)
case self.angle
when :rad
plus(x)
else
plus(num_class.context(self, :extra_precision=>3){|lc| x*lc.pi/half_cycle})
end
end
def to_deg(x)
case self.angle
when :deg
plus(x)
else
plus(num_class.context(self, :extra_precision=>3){x*num_class[180]/half_cycle})
end
end
def to_grad(x)
case self.angle
when :deg
plus(x)
else
plus(num_class.context(self, :extra_precision=>3){x*num_class[200]/half_cycle})
end
end
def to_angle(angular_units, x)
return plus(x) if angular_units == self.angle
case angular_units
when :rad
to_rad(x)
when :deg
to_deg(x)
when :grad
to_grad(x)
end
end
def rad_to(x)
case self.angle
when :rad
plus(x)
else
plus(num_class.context(self, :extra_precision=>3){|lc| x*half_cycle/lc.pi})
end
end
def deg_to(x)
case self.angle
when :deg
plus(x)
else
plus(num_class.context(self, :extra_precision=>3){x*half_cycle/num_class[180]})
end
end
def grad_to(x)
case self.angle
when :grad
plus(x)
else
plus(num_class.context(self, :extra_precision=>3){x*half_cycle/num_class[200]})
end
end
def angle_to(x, angular_units)
return plus(x) if angular_units == self.angle
case angular_units
when :rad
rad_to(x)
when :deg
deg_to(x)
when :grad
grad_to(x)
end
end
#end
end # Trigonometry
Num::ContextBase.class_eval{include Trigonometry}
class DecNum
module Trigonometry
# Pi
@pi_cache = nil # truncated pi digits as a string
@pi_cache_digits = 0
PI_MARGIN = 10
class <<self
attr_accessor :pi_cache, :pi_cache_digits
end
def pi(round_digits=nil)
round_digits ||= self.precision
digits = round_digits
if Trigonometry.pi_cache_digits <= digits # we need at least one more truncated digit
continue = true
while continue
margin = PI_MARGIN # margin to reduce recomputing with more digits to avoid ending in 0 or 5
digits += margin + 1
fudge = 10
unity = 10**(digits+fudge)
v = 4*(4*Trigonometry.iarccot(5, unity) - Trigonometry.iarccot(239, unity))
v = v.to_s[0,digits]
# if the last digit is 0 or 5 the truncated value may not be good for rounding
loop do
#last_digit = v%10
last_digit = v[-1,1].to_i
continue = (last_digit==5 || last_digit==0)
if continue && margin>0
# if we have margin we back-up one digit
margin -= 1
v = v[0...-1]
else
break
end
end
end
Trigonometry.pi_cache_digits = digits + margin - PI_MARGIN # @pi_cache.size
Trigonometry.pi_cache = v # DecNum(+1, v, 1-digits) # cache truncated value
end
# Now we avoid rounding too much because it is slow
l = round_digits + 1
while (l<Trigonometry.pi_cache_digits) && [0,5].include?(Trigonometry.pi_cache[l-1,1].to_i)
l += 1
end
v = Trigonometry.pi_cache[0,l]
num_class.context(self, :precision=>round_digits){+num_class.Num(+1,v.to_i,1-l)}
end
private
class <<self
def iarccot(x, unity)
xpow = unity / x
n = 1
sign = 1
sum = 0
loop do
term = xpow / n
break if term == 0
sum += sign * (xpow/n)
xpow /= x*x
n += 2
sign = -sign
end
sum
end
end
end # DecNum::Trigonometry
DecNum::Context.class_eval{include DecNum::Trigonometry}
end # DecNum
end # Flt |
module Flume
class LogDevice
lazy_accessor :redis
lazy_accessor :cap
lazy_accessor :step
lazy_accessor :cycle
lazy_accessor :list
def initialize(*args, &block)
options = args.last.is_a?(Hash) ? args.pop : {}
@config = OpenStruct.new(options)
block.call(@config) if block
@redis = @config[:redis] || proc { Redis.new }
@cap = @config[:cap] || (2 ** 16)
@step = @config[:step] || 0
@cycle = @config[:cycle] || (2 ** 8)
@list = @config[:list] || 'flume:log'
end
def channel
"flume:#{list}"
end
def write(message)
begin
redis.lpush(list, message)
rescue Object => e
error = "#{ e.message } (#{ e.class })\n#{ Array(e.backtrace).join(10.chr) }"
STDERR.puts(error)
STDERR.puts(message)
end
ensure
redis.publish(channel, message)
if (step % cycle).zero?
truncate(cap) rescue nil
end
self.step = (step + 1) % cycle
end
def close
redis.quit rescue nil
end
def tail(n = 80)
redis.lrange(list, 0, n - 1).reverse
end
def tailf(&block)
begin
redis.subscribe(channel) do |on|
on.message do |channel, message|
block.call(message)
end
end
rescue Redis::BaseConnectionError => error
puts "#{error}, retrying in 1s"
sleep 1
retry
end
end
def truncate(n)
redis.ltrim(list, 0, n - 1)
end
def size
redis.llen(list)
end
end
end
Dereference config props as methods
module Flume
class LogDevice
lazy_accessor :redis
lazy_accessor :cap
lazy_accessor :step
lazy_accessor :cycle
lazy_accessor :list
def initialize(*args, &block)
options = args.last.is_a?(Hash) ? args.pop : {}
@config = OpenStruct.new(options)
block.call(@config) if block
@redis = @config.redis || proc { Redis.new }
@cap = @config.cap || (2 ** 16)
@step = @config.step || 0
@cycle = @config.cycle || (2 ** 8)
@list = @config.list || 'flume:log'
end
def channel
"flume:#{list}"
end
def write(message)
begin
redis.lpush(list, message)
rescue Object => e
error = "#{ e.message } (#{ e.class })\n#{ Array(e.backtrace).join(10.chr) }"
STDERR.puts(error)
STDERR.puts(message)
end
ensure
redis.publish(channel, message)
if (step % cycle).zero?
truncate(cap) rescue nil
end
self.step = (step + 1) % cycle
end
def close
redis.quit rescue nil
end
def tail(n = 80)
redis.lrange(list, 0, n - 1).reverse
end
def tailf(&block)
begin
redis.subscribe(channel) do |on|
on.message do |channel, message|
block.call(message)
end
end
rescue Redis::BaseConnectionError => error
puts "#{error}, retrying in 1s"
sleep 1
retry
end
end
def truncate(n)
redis.ltrim(list, 0, n - 1)
end
def size
redis.llen(list)
end
end
end
|
# This file contains all of the rules that ship with foodcritic.
#
# * Foodcritic rules perform static code analysis - rather than the cookbook
# code being loaded by the interpreter it is parsed into a tree (AST) that is
# then passed to each rule.
# * Rules can use a number of API functions that ship with foodcritic to make
# sense of the parse tree.
# * Rules can also use XPath to query the AST. A rule can consist of a XPath
# query only, as any nodes returned from a `recipe` block will be converted
# into warnings.
rule 'FC001',
'Use strings in preference to symbols to access node attributes' do
tags %w(style attributes)
recipe do |ast|
attribute_access(ast, type: :symbol)
end
end
rule 'FC002', 'Avoid string interpolation where not required' do
tags %w(style strings)
recipe do |ast|
ast.xpath(%q{//*[self::string_literal | self::assoc_new]/string_add[
count(descendant::string_embexpr) = 1 and
count(string_add) = 0]})
end
end
rule 'FC003',
'Check whether you are running with chef server before using'\
' server-specific features' do
tags %w(portability solo)
recipe do |ast, filename|
unless checks_for_chef_solo?(ast) || chef_solo_search_supported?(filename)
searches(ast)
end
end
end
rule 'FC004', 'Use a service resource to start and stop services' do
tags %w(style services)
recipe do |ast|
find_resources(ast, type: 'execute').find_all do |cmd|
cmd_str = (resource_attribute(cmd, 'command') || resource_name(cmd)).to_s
(cmd_str.include?('/etc/init.d') || ['service ', '/sbin/service ',
'start ', 'stop ', 'invoke-rc.d '].any? do |service_cmd|
cmd_str.start_with?(service_cmd)
end) && %w(start stop restart reload).any? { |a| cmd_str.include?(a) }
end
end
end
rule 'FC005', 'Avoid repetition of resource declarations' do
tags %w(style)
recipe do |ast|
resources = find_resources(ast).map do |res|
resource_attributes(res).merge({ type: resource_type(res),
ast: res })
end.chunk do |res|
res[:type] +
res[:ast].xpath("ancestor::*[self::if | self::unless | self::elsif |
self::else | self::when | self::method_add_block/call][position() = 1]/
descendant::pos[position() = 1]").to_s +
res[:ast].xpath("ancestor::method_add_block/command[
ident/@value='action']/args_add_block/descendant::ident/@value").to_s
end.reject { |res| res[1].size < 3 }
resources.map do |cont_res|
first_resource = cont_res[1][0][:ast]
# we have contiguous resources of the same type, but do they share the
# same attributes?
sorted_atts = cont_res[1].map do |atts|
atts.delete_if { |k| k == :ast }.to_a.sort do |x, y|
x.first.to_s <=> y.first.to_s
end
end
first_resource if sorted_atts.all? do |att|
(att - sorted_atts.inject { |atts, a| atts & a }).length == 1
end
end.compact
end
end
rule 'FC006',
'Mode should be quoted or fully specified when '\
'setting file permissions' do
tags %w(correctness files)
recipe do |ast|
ast.xpath(%q{//ident[@value='mode']/parent::command/
descendant::int[string-length(@value) < 5
and not(starts-with(@value, "0")
and string-length(@value) = 4)][count(ancestor::aref) = 0]/
ancestor::method_add_block})
end
end
rule 'FC007', 'Ensure recipe dependencies are reflected '\
'in cookbook metadata' do
tags %w(correctness metadata)
recipe do |ast, filename|
metadata_path = Pathname.new(
File.join(File.dirname(filename), '..', 'metadata.rb')).cleanpath
next unless File.exist? metadata_path
actual_included = included_recipes(ast, with_partial_names: false)
undeclared = actual_included.keys.map do |recipe|
recipe.split('::').first
end - [cookbook_name(filename)] -
declared_dependencies(read_ast(metadata_path))
actual_included.map do |recipe, include_stmts|
if undeclared.include?(recipe) ||
undeclared.any? { |u| recipe.start_with?("#{u}::") }
include_stmts
end
end.flatten.compact
end
end
rule 'FC008', 'Generated cookbook metadata needs updating' do
tags %w(style metadata)
metadata do |ast, filename|
{
'maintainer' => 'YOUR_COMPANY_NAME',
'maintainer_email' => 'YOUR_EMAIL'
}.map do |field, value|
ast.xpath(%Q(//command[ident/@value='#{field}']/
descendant::tstring_content[@value='#{value}']))
end
end
end
rule 'FC009', 'Resource attribute not recognised' do
tags %w(correctness)
recipe do |ast|
matches = []
resource_attributes_by_type(ast).each do |type, resources|
resources.each do |resource|
resource.keys.map(&:to_sym).reject do |att|
resource_attribute?(type.to_sym, att)
end.each do |invalid_att|
matches << find_resources(ast, type: type).find do |res|
resource_attributes(res).include?(invalid_att.to_s)
end
end
end
end
matches
end
end
rule 'FC010', 'Invalid search syntax' do
tags %w(correctness search)
recipe do |ast|
# This only works for literal search strings
literal_searches(ast).reject { |search| valid_query?(search['value']) }
end
end
rule 'FC011', 'Missing README in markdown format' do
tags %w(style readme)
cookbook do |filename|
unless File.exist?(File.join(filename, 'README.md'))
[file_match(File.join(filename, 'README.md'))]
end
end
end
rule 'FC012', 'Use Markdown for README rather than RDoc' do
tags %w(style readme)
cookbook do |filename|
if File.exist?(File.join(filename, 'README.rdoc'))
[file_match(File.join(filename, 'README.rdoc'))]
end
end
end
rule 'FC013', 'Use file_cache_path rather than hard-coding tmp paths' do
tags %w(style files)
recipe do |ast|
find_resources(ast, type: 'remote_file').find_all do |download|
path = (resource_attribute(download, 'path') ||
resource_name(download)).to_s
path.start_with?('/tmp/')
end
end
end
rule 'FC014', 'Consider extracting long ruby_block to library' do
tags %w(style libraries)
recipe do |ast|
find_resources(ast, type: 'ruby_block').find_all do |rb|
lines = rb.xpath("descendant::fcall[ident/@value='block']/../../
descendant::*[@line]/@line").map { |n| n.value.to_i }.sort
(!lines.empty?) && (lines.last - lines.first) > 15
end
end
end
rule 'FC015', 'Consider converting definition to a Custom Resource' do
tags %w(style definitions lwrp)
applies_to { |version| version >= gem_version('0.7.12') }
cookbook do |dir|
Dir[File.join(dir, 'definitions', '*.rb')].reject do |entry|
['.', '..'].include? entry
end.map { |entry| file_match(entry) }
end
end
rule 'FC016', 'LWRP does not declare a default action' do
tags %w(correctness lwrp)
applies_to { |version| version >= gem_version('0.7.12') }
resource do |ast, filename|
unless ["//ident/@value='default_action'",
"//def/bodystmt/descendant::assign/
var_field/ivar/@value='@action'"].any? { |expr| ast.xpath(expr) }
[file_match(filename)]
end
end
end
rule 'FC017', 'LWRP does not notify when updated' do
tags %w(correctness lwrp)
applies_to do |version|
version >= gem_version('0.7.12')
end
provider do |ast, filename|
use_inline_resources = !ast.xpath('//*[self::vcall or self::var_ref]/ident
[@value="use_inline_resources"]').empty?
unless use_inline_resources
actions = ast.xpath('//method_add_block/command[ident/@value="action"]/
args_add_block/descendant::symbol/ident')
actions.reject do |action|
blk = action.xpath('ancestor::command[1]/
following-sibling::*[self::do_block or self::brace_block]')
empty = !blk.xpath('stmts_add/void_stmt').empty?
converge_by = !blk.xpath('descendant::*[self::command or self::fcall]
/ident[@value="converge_by"]').empty?
updated_by_last_action = !blk.xpath('descendant::*[self::call or
self::command_call]/*[self::vcall or self::var_ref/ident/
@value="new_resource"]/../ident[@value="updated_by_last_action"]
').empty?
empty || converge_by || updated_by_last_action
end
end
end
end
rule 'FC018', 'LWRP uses deprecated notification syntax' do
tags %w(style lwrp deprecated)
applies_to { |version| version >= gem_version('0.9.10') }
provider do |ast|
ast.xpath("//assign/var_field/ivar[@value='@updated']").map do |class_var|
match(class_var)
end + ast.xpath(%q{//assign/field/*[self::vcall or self::var_ref/ident/
@value='new_resource']/../ident[@value='updated']})
end
end
rule 'FC019', 'Access node attributes in a consistent manner' do
tags %w(style attributes)
cookbook do |cookbook_dir|
asts = {}; files = Dir["#{cookbook_dir}/*/*.rb"].reject do |file|
relative_path = Pathname.new(file).relative_path_from(
Pathname.new(cookbook_dir))
relative_path.to_s.split(File::SEPARATOR).include?('spec')
end.map do |file|
{ path: file, ast: read_ast(file) }
end
types = [:string, :symbol, :vivified].map do |type|
{
access_type: type, count: files.map do |file|
attribute_access(file[:ast], type: type, ignore_calls: true,
cookbook_dir: cookbook_dir, ignore: 'run_state').tap do |ast|
unless ast.empty?
(asts[type] ||= []) << { ast: ast, path: file[:path] }
end
end.size
end.inject(:+)
}
end.reject { |type| type[:count] == 0 }
if asts.size > 1
least_used = asts[types.min do |a, b|
a[:count] <=> b[:count]
end[:access_type]]
least_used.map do |file|
file[:ast].map do |ast|
match(ast).merge(filename: file[:path])
end.flatten
end
end
end
end
rule 'FC021', 'Resource condition in provider may not behave as expected' do
tags %w(correctness lwrp)
applies_to { |version| version >= gem_version('0.10.6') }
provider do |ast|
find_resources(ast).map do |resource|
condition = resource.xpath(%q{//method_add_block/
descendant::ident[@value='not_if' or @value='only_if']/
ancestor::*[self::method_add_block or self::command][1][descendant::
ident/@value='new_resource']/ancestor::stmts_add[2]/method_add_block/
command[count(descendant::string_embexpr) = 0]})
condition
end.compact
end
end
rule 'FC022', 'Resource condition within loop may not behave as expected' do
tags %w(correctness)
applies_to { |version| version >= gem_version('0.10.6') }
recipe do |ast|
ast.xpath("//call[ident/@value='each']/../do_block[count(ancestor::
method_add_block/method_add_arg/fcall/ident[@value='only_if' or
@value = 'not_if']) = 0]").map do |lp|
block_vars = lp.xpath('block_var/params/child::*').map do |n|
n.name.sub(/^ident/, '')
end + lp.xpath('block_var/params/child::*/descendant::ident').map do |v|
v['value']
end
find_resources(lp).map do |resource|
# if any of the parameters to the block are used in a condition then we
# have a match
unless (block_vars &
(resource.xpath(%q{descendant::ident[@value='not_if' or
@value='only_if']/ancestor::*[self::method_add_block or
self::command][1]/descendant::ident/@value}).map do |a|
a.value
end)).empty?
c = resource.xpath('command[count(descendant::string_embexpr) = 0]')
if resource.xpath('command/ident/@value').first.value == 'define'
next
end
resource unless c.empty? || block_vars.any? do |var|
!resource.xpath(%Q(command/args_add_block/args_add/
var_ref/ident[@value='#{var}'])).empty?
end
end
end
end.flatten.compact
end
end
rule 'FC023', 'Prefer conditional attributes' do
tags %w(style)
recipe do |ast|
ast.xpath(%q{//method_add_block[command/ident][count(descendant::ident
[@value='only_if' or @value='not_if']) = 0]/ancestor::*[self::if or
self::unless][count(descendant::method_add_block[command/ident]) = 1]
[count(stmts_add/method_add_block/call) = 0]
[count(stmts_add/stmts_add) = 0]
[count(descendant::*[self::else or self::elsif]) = 0]})
end
end
rule 'FC024', 'Consider adding platform equivalents' do
tags %w(portability)
RHEL = %w(amazon centos redhat scientific oracle)
recipe do |ast, filename|
next if Pathname.new(filename).basename.to_s == 'metadata.rb'
metadata_path = Pathname.new(
File.join(File.dirname(filename), '..', 'metadata.rb')).cleanpath
md_platforms = if File.exist?(metadata_path)
supported_platforms(read_ast(
metadata_path)).map { |p| p[:platform] }
else
[]
end
md_platforms = RHEL if md_platforms.empty?
['//method_add_arg[fcall/ident/@value="platform?"]/
arg_paren/args_add_block',
'//when'].map do |expr|
ast.xpath(expr).map do |whn|
platforms = whn.xpath('args_add/
descendant::tstring_content').map do |p|
p['value']
end.sort
unless platforms.size == 1 || (md_platforms & platforms).empty?
whn unless (platforms & RHEL).empty? ||
((md_platforms & RHEL) - (platforms & RHEL)).empty?
end
end.compact
end.flatten
end
end
rule 'FC025', 'Prefer chef_gem to compile-time gem install' do
tags %w(style deprecated)
applies_to { |version| version >= gem_version('0.10.10') }
recipe do |ast|
gem_install = ast.xpath("//stmts_add/assign[method_add_block[command/ident/
@value='gem_package'][do_block/stmts_add/command[ident/@value='action']
[descendant::ident/@value='nothing']]]")
gem_install.map do |install|
gem_var = install.xpath('var_field/ident/@value')
unless ast.xpath("//method_add_arg[call/
var_ref/ident/@value='#{gem_var}']
[arg_paren/descendant::ident/@value='install' or
arg_paren/descendant::ident/@value='upgrade']").empty?
gem_install
end
end
end
end
rule 'FC026', 'Conditional execution block attribute contains only string' do
tags %w(correctness)
applies_to { |version| version >= gem_version('0.7.4') }
recipe do |ast|
find_resources(ast).map { |r| resource_attributes(r) }.map do |resource|
[resource['not_if'], resource['only_if']]
end.flatten.compact.select do |condition|
condition.respond_to?(:xpath) &&
!condition.xpath('descendant::string_literal').empty? &&
!condition.xpath('stmts_add/string_literal').empty? &&
condition.xpath('descendant::stmts_add[count(ancestor::
string_literal) = 0]').size == 1
end
end
end
rule 'FC027', 'Resource sets internal attribute' do
tags %w(correctness)
recipe do |ast|
find_resources(ast, type: :service).map do |service|
service unless (resource_attributes(service).keys &
['enabled', 'running']).empty?
end.compact
end
end
rule 'FC028', 'Incorrect #platform? usage' do
tags %w(correctness)
recipe do |ast|
ast.xpath(%q{//*[self::call | self::command_call]
[(var_ref|vcall)/ident/@value='node']
[ident/@value="platform?"]})
end
end
rule 'FC029', 'No leading cookbook name in recipe metadata' do
tags %w(correctness metadata)
metadata do |ast, filename|
ast.xpath('//command[ident/@value="recipe"]').map do |declared_recipe|
next unless declared_recipe.xpath('count(//vcall|//var_ref)').to_i == 0
recipe_name = declared_recipe.xpath('args_add_block/
descendant::tstring_content[1]/@value').to_s
unless recipe_name.empty? ||
recipe_name.split('::').first == cookbook_name(filename.to_s)
declared_recipe
end
end.compact
end
end
rule 'FC030', 'Cookbook contains debugger breakpoints' do
tags %w(annoyances)
def pry_bindings(ast)
ast.xpath('//call[(vcall|var_ref)/ident/@value="binding"]
[ident/@value="pry"]')
end
recipe { |ast| pry_bindings(ast) }
library { |ast| pry_bindings(ast) }
metadata { |ast| pry_bindings(ast) }
template { |ast| pry_bindings(ast) }
end
rule 'FC031', 'Cookbook without metadata file' do
tags %w(correctness metadata)
cookbook do |filename|
if !File.exist?(File.join(filename, 'metadata.rb'))
[file_match(File.join(filename, 'metadata.rb'))]
end
end
end
rule 'FC032', 'Invalid notification timing' do
tags %w(correctness notifications)
recipe do |ast|
find_resources(ast).select do |resource|
notifications(resource).any? do |notification|
! [:delayed, :immediate].include? notification[:timing]
end
end
end
end
rule 'FC033', 'Missing template' do
tags %w(correctness)
recipe do |ast, filename|
find_resources(ast, type: :template).reject do |resource|
resource_attributes(resource)['local'] ||
resource_attributes(resource)['cookbook']
end.map do |resource|
file = template_file(resource_attributes(resource,
return_expressions: true))
{ resource: resource, file: file }
end.reject do |resource|
resource[:file].respond_to?(:xpath)
end.select do |resource|
template_paths(filename).none? do |path|
relative_path = []
Pathname.new(path).ascend do |template_path|
relative_path << template_path.basename
break if template_path.dirname.dirname.basename.to_s == 'templates'
end
File.join(relative_path.reverse) == resource[:file]
end
end.map { |resource| resource[:resource] }
end
end
rule 'FC034', 'Unused template variables' do
tags %w(correctness)
recipe do |ast, filename|
Array(resource_attributes_by_type(ast)['template']).select do |t|
t['variables'] && t['variables'].respond_to?(:xpath)
end.map do |resource|
all_templates = template_paths(filename)
template_paths = all_templates.select do |path|
File.basename(path) == template_file(resource)
end
next unless template_paths.any?
passed_vars = resource['variables'].xpath(
'symbol/ident/@value').map { |tv| tv.to_s }
unused_vars_exist = template_paths.all? do |template_path|
begin
template_vars = templates_included(
all_templates, template_path).map do |template|
read_ast(template).xpath('//var_ref/ivar/@value').map do |v|
v.to_s.sub(/^@/, '')
end
end.flatten
! (passed_vars - template_vars).empty?
rescue RecursedTooFarError
false
end
end
file_match(template_paths.first) if unused_vars_exist
end.compact
end
end
rule 'FC037', 'Invalid notification action' do
tags %w(correctness)
recipe do |ast|
find_resources(ast).select do |resource|
notifications(resource).any? do |n|
type = case n[:type]
when :notifies then n[:resource_type]
when :subscribes then resource_type(resource).to_sym
end
n[:action].size > 0 && !resource_action?(type, n[:action])
end
end
end
end
rule 'FC038', 'Invalid resource action' do
tags %w(correctness)
recipe do |ast|
find_resources(ast).select do |resource|
actions = resource_attributes(resource)['action']
if actions.respond_to?(:xpath)
actions = actions.xpath('descendant::array/
descendant::symbol/ident/@value')
else
actions = Array(actions)
end
actions.reject { |a| a.to_s.empty? }.any? do |action|
!resource_action?(resource_type(resource), action)
end
end
end
end
rule 'FC039', 'Node method cannot be accessed with key' do
tags %w(correctness)
recipe do |ast|
[{ type: :string, path: '@value' },
{ type: :symbol, path: 'ident/@value' }].map do |access_type|
attribute_access(ast, type: access_type[:type]).select do |att|
att_name = att.xpath(access_type[:path]).to_s.to_sym
att_name != :tags && chef_node_methods.include?(att_name)
end.select do |att|
!att.xpath('ancestor::args_add_block[position() = 1]
[preceding-sibling::vcall | preceding-sibling::var_ref]').empty?
end.select do |att|
att_type = att.xpath('ancestor::args_add_block[position() = 1]
/../var_ref/ident/@value').to_s
ast.xpath("//assign/var_field/ident[@value='#{att_type}']").empty?
end
end.flatten
end
end
rule 'FC040', 'Execute resource used to run git commands' do
tags %w(style recipe etsy)
recipe do |ast|
possible_git_commands = %w( clone fetch pull checkout reset )
find_resources(ast, type: 'execute').select do |cmd|
cmd_str = (resource_attribute(cmd, 'command') || resource_name(cmd)).to_s
actual_git_commands = cmd_str.scan(/git ([a-z]+)/).map { |c| c.first }
(possible_git_commands & actual_git_commands).any?
end
end
end
rule 'FC041', 'Execute resource used to run curl or wget commands' do
tags %w(style recipe etsy)
recipe do |ast|
find_resources(ast, type: 'execute').select do |cmd|
cmd_str = (resource_attribute(cmd, 'command') || resource_name(cmd)).to_s
(cmd_str.match(/^curl.*(-o|>|--output).*$/) || cmd_str.include?('wget '))
end
end
end
rule 'FC042', 'Prefer include_recipe to require_recipe' do
tags %w(deprecated)
recipe do |ast|
ast.xpath('//command[ident/@value="require_recipe"]')
end
end
rule 'FC043', 'Prefer new notification syntax' do
tags %w(style notifications deprecated)
applies_to { |version| version >= gem_version('0.9.10') }
recipe do |ast|
find_resources(ast).select do |resource|
notifications(resource).any? { |notify| notify[:style] == :old }
end
end
end
rule 'FC044', 'Avoid bare attribute keys' do
tags %w(style)
attributes do |ast|
declared = ast.xpath('//descendant::var_field/ident/@value').map do |v|
v.to_s
end
ast.xpath('//assign/*[self::vcall or self::var_ref]
[count(child::kw) = 0]/ident').select do |v|
local_declared = v.xpath("ancestor::*[self::brace_block or self::do_block]
/block_var/descendant::ident/@value").map do |v|
v.to_s
end
(v['value'] != 'secure_password') &&
!(declared + local_declared).uniq.include?(v['value']) &&
!v.xpath("ancestor::*[self::brace_block or self::do_block]/block_var/
descendant::ident/@value='#{v['value']}'")
end
end
end
rule 'FC045', 'Metadata does not contain cookbook name' do
tags %w(correctness metadata chef12)
metadata do |ast, filename|
unless ast.xpath('descendant::stmts_add/command/ident/@value="name"')
[file_match(filename)]
end
end
cookbook do |filename|
if !File.exist?(File.join(filename, 'metadata.rb'))
[file_match(File.join(filename, 'metadata.rb'))]
end
end
end
rule 'FC046', 'Attribute assignment uses assign unless nil' do
attributes do |ast|
attribute_access(ast).map do |a|
a.xpath('ancestor::opassign/op[@value="||="]')
end
end
end
rule 'FC047', 'Attribute assignment does not specify precedence' do
tags %w(attributes correctness chef11)
recipe do |ast|
attribute_access(ast).map do |att|
exclude_att_types = '[count(following-sibling::ident[
is_att_type(@value) or @value = "run_state"]) = 0]'
att.xpath(%Q(ancestor::assign[*[self::field | self::aref_field]
[descendant::*[self::vcall | self::var_ref][ident/@value="node"]
#{exclude_att_types}]]), AttFilter.new) +
att.xpath(%Q{ancestor::binary[@value="<<"]/*[position() = 1]
[self::aref]
[descendant::*[self::vcall | self::var_ref]#{exclude_att_types}
/ident/@value="node"]}, AttFilter.new)
end
end
end
rule 'FC048', 'Prefer Mixlib::ShellOut' do
tags %w(style processes)
recipe do |ast|
xstring_literal=ast.xpath('//xstring_literal')
next xstring_literal if xstring_literal.any?
ast.xpath('//*[self::command or self::fcall]/ident[@value="system"]').select do |x|
resource_name = x.xpath('ancestor::do_block/preceding-sibling::command/ident/@value')
if resource_name.any? && resource_attribute?(resource_name.to_s, 'system')
next false
end
next x.xpath('count(following-sibling::args_add_block/descendant::kw[@value="true" or @value="false"]) = 0')
end
end
end
rule 'FC049', 'Role name does not match containing file name' do
tags %w(style roles)
role do |ast, filename|
role_name_specified = field_value(ast, :name)
role_name_file = Pathname.new(filename).basename.sub_ext('').to_s
if role_name_specified && role_name_specified != role_name_file
field(ast, :name)
end
end
end
rule 'FC050', 'Name includes invalid characters' do
tags %w(correctness environments roles)
def invalid_name(ast)
field(ast, :name) unless field_value(ast, :name) =~ /^[a-zA-Z0-9_\-]+$/
end
environment { |ast| invalid_name(ast) }
role { |ast| invalid_name(ast) }
end
rule 'FC051', 'Template partials loop indefinitely' do
tags %w(correctness)
recipe do |_, filename|
cbk_templates = template_paths(filename)
cbk_templates.select do |template|
begin
templates_included(cbk_templates, template)
false
rescue RecursedTooFarError
true
end
end.map { |t| file_match(t) }
end
end
rule 'FC052', 'Metadata uses the unimplemented "suggests" keyword' do
tags %w(style metadata)
metadata do |ast, filename|
ast.xpath(%Q(//command[ident/@value='suggests']))
end
end
rule 'FC053', 'Metadata uses the unimplemented "recommends" keyword' do
tags %w(style metadata)
metadata do |ast, filename|
ast.xpath(%Q(//command[ident/@value='recommends']))
end
end
# NOTE: FC054 was yanked and should be considered reserved, do not reuse it
rule 'FC055', 'Ensure maintainer is set in metadata' do
tags %w(correctness metadata)
metadata do |ast, filename|
[file_match(filename)] unless field(ast, 'maintainer').any?
end
end
rule 'FC056', 'Ensure maintainer_email is set in metadata' do
tags %w(correctness metadata)
metadata do |ast, filename|
[file_match(filename)] unless field(ast, 'maintainer_email').any?
end
end
rule 'FC057', 'Library provider does not declare use_inline_resources' do
tags %w(correctness)
library do |ast, filename|
ast.xpath('//const_path_ref/const[@value="LWRPBase"]/..//const[@value="Provider"]/../../..').select do |x|
x.xpath('//*[self::vcall or self::var_ref]/ident[@value="use_inline_resources"]').empty?
end
end
end
rule 'FC058', 'Library provider declares use_inline_resources and declares #action_<name> methods' do
tags %w(correctness)
library do |ast, filename|
ast.xpath('//const_path_ref/const[@value="LWRPBase"]/..//const[@value="Provider"]/../../..').select do |x|
x.xpath('//*[self::vcall or self::var_ref]/ident[@value="use_inline_resources"]') &&
x.xpath(%Q(//def[ident[contains(@value, 'action_')]]))
end
end
end
rule 'FC059', 'LWRP provider does not declare use_inline_resources' do
tags %w(correctness)
provider do |ast, filename|
use_inline_resources = !ast.xpath('//*[self::vcall or self::var_ref]/ident
[@value="use_inline_resources"]').empty?
unless use_inline_resources
[file_match(filename)]
end
end
end
rule 'FC060', 'LWRP provider declares use_inline_resources and declares #action_<name> methods' do
tags %w(correctness)
provider do |ast, filename|
use_inline_resources = !ast.xpath('//*[self::vcall or self::var_ref]/ident
[@value="use_inline_resources"]').empty?
if use_inline_resources
ast.xpath(%Q(//def[ident[contains(@value, 'action_')]]))
end
end
end
rule 'FC061', 'Valid cookbook versions are of the form x.y or x.y.z' do
tags %w{metadata correctness}
metadata do |ast, filename|
# matches a version method with a string literal with no interpolation
ver = ast.xpath(%Q(//command[ident/@value='version']//string_literal[not(.//string_embexpr)]//tstring_content/@value))
if !ver.empty? && ver.to_s !~ /\A\d+\.\d+(\.\d+)?\z/
[file_match(filename)]
end
end
end
rule 'FC062', 'Cookbook should have version metadata' do
tags %w{metadata}
metadata do |ast, filename|
[file_match(filename)] unless field(ast, 'version').any?
end
end
use_inline_resources checks apply to Chef 11+
# This file contains all of the rules that ship with foodcritic.
#
# * Foodcritic rules perform static code analysis - rather than the cookbook
# code being loaded by the interpreter it is parsed into a tree (AST) that is
# then passed to each rule.
# * Rules can use a number of API functions that ship with foodcritic to make
# sense of the parse tree.
# * Rules can also use XPath to query the AST. A rule can consist of a XPath
# query only, as any nodes returned from a `recipe` block will be converted
# into warnings.
rule 'FC001',
'Use strings in preference to symbols to access node attributes' do
tags %w(style attributes)
recipe do |ast|
attribute_access(ast, type: :symbol)
end
end
rule 'FC002', 'Avoid string interpolation where not required' do
tags %w(style strings)
recipe do |ast|
ast.xpath(%q{//*[self::string_literal | self::assoc_new]/string_add[
count(descendant::string_embexpr) = 1 and
count(string_add) = 0]})
end
end
rule 'FC003',
'Check whether you are running with chef server before using'\
' server-specific features' do
tags %w(portability solo)
recipe do |ast, filename|
unless checks_for_chef_solo?(ast) || chef_solo_search_supported?(filename)
searches(ast)
end
end
end
rule 'FC004', 'Use a service resource to start and stop services' do
tags %w(style services)
recipe do |ast|
find_resources(ast, type: 'execute').find_all do |cmd|
cmd_str = (resource_attribute(cmd, 'command') || resource_name(cmd)).to_s
(cmd_str.include?('/etc/init.d') || ['service ', '/sbin/service ',
'start ', 'stop ', 'invoke-rc.d '].any? do |service_cmd|
cmd_str.start_with?(service_cmd)
end) && %w(start stop restart reload).any? { |a| cmd_str.include?(a) }
end
end
end
rule 'FC005', 'Avoid repetition of resource declarations' do
tags %w(style)
recipe do |ast|
resources = find_resources(ast).map do |res|
resource_attributes(res).merge({ type: resource_type(res),
ast: res })
end.chunk do |res|
res[:type] +
res[:ast].xpath("ancestor::*[self::if | self::unless | self::elsif |
self::else | self::when | self::method_add_block/call][position() = 1]/
descendant::pos[position() = 1]").to_s +
res[:ast].xpath("ancestor::method_add_block/command[
ident/@value='action']/args_add_block/descendant::ident/@value").to_s
end.reject { |res| res[1].size < 3 }
resources.map do |cont_res|
first_resource = cont_res[1][0][:ast]
# we have contiguous resources of the same type, but do they share the
# same attributes?
sorted_atts = cont_res[1].map do |atts|
atts.delete_if { |k| k == :ast }.to_a.sort do |x, y|
x.first.to_s <=> y.first.to_s
end
end
first_resource if sorted_atts.all? do |att|
(att - sorted_atts.inject { |atts, a| atts & a }).length == 1
end
end.compact
end
end
rule 'FC006',
'Mode should be quoted or fully specified when '\
'setting file permissions' do
tags %w(correctness files)
recipe do |ast|
ast.xpath(%q{//ident[@value='mode']/parent::command/
descendant::int[string-length(@value) < 5
and not(starts-with(@value, "0")
and string-length(@value) = 4)][count(ancestor::aref) = 0]/
ancestor::method_add_block})
end
end
rule 'FC007', 'Ensure recipe dependencies are reflected '\
'in cookbook metadata' do
tags %w(correctness metadata)
recipe do |ast, filename|
metadata_path = Pathname.new(
File.join(File.dirname(filename), '..', 'metadata.rb')).cleanpath
next unless File.exist? metadata_path
actual_included = included_recipes(ast, with_partial_names: false)
undeclared = actual_included.keys.map do |recipe|
recipe.split('::').first
end - [cookbook_name(filename)] -
declared_dependencies(read_ast(metadata_path))
actual_included.map do |recipe, include_stmts|
if undeclared.include?(recipe) ||
undeclared.any? { |u| recipe.start_with?("#{u}::") }
include_stmts
end
end.flatten.compact
end
end
rule 'FC008', 'Generated cookbook metadata needs updating' do
tags %w(style metadata)
metadata do |ast, filename|
{
'maintainer' => 'YOUR_COMPANY_NAME',
'maintainer_email' => 'YOUR_EMAIL'
}.map do |field, value|
ast.xpath(%Q(//command[ident/@value='#{field}']/
descendant::tstring_content[@value='#{value}']))
end
end
end
rule 'FC009', 'Resource attribute not recognised' do
tags %w(correctness)
recipe do |ast|
matches = []
resource_attributes_by_type(ast).each do |type, resources|
resources.each do |resource|
resource.keys.map(&:to_sym).reject do |att|
resource_attribute?(type.to_sym, att)
end.each do |invalid_att|
matches << find_resources(ast, type: type).find do |res|
resource_attributes(res).include?(invalid_att.to_s)
end
end
end
end
matches
end
end
rule 'FC010', 'Invalid search syntax' do
tags %w(correctness search)
recipe do |ast|
# This only works for literal search strings
literal_searches(ast).reject { |search| valid_query?(search['value']) }
end
end
rule 'FC011', 'Missing README in markdown format' do
tags %w(style readme)
cookbook do |filename|
unless File.exist?(File.join(filename, 'README.md'))
[file_match(File.join(filename, 'README.md'))]
end
end
end
rule 'FC012', 'Use Markdown for README rather than RDoc' do
tags %w(style readme)
cookbook do |filename|
if File.exist?(File.join(filename, 'README.rdoc'))
[file_match(File.join(filename, 'README.rdoc'))]
end
end
end
rule 'FC013', 'Use file_cache_path rather than hard-coding tmp paths' do
tags %w(style files)
recipe do |ast|
find_resources(ast, type: 'remote_file').find_all do |download|
path = (resource_attribute(download, 'path') ||
resource_name(download)).to_s
path.start_with?('/tmp/')
end
end
end
rule 'FC014', 'Consider extracting long ruby_block to library' do
tags %w(style libraries)
recipe do |ast|
find_resources(ast, type: 'ruby_block').find_all do |rb|
lines = rb.xpath("descendant::fcall[ident/@value='block']/../../
descendant::*[@line]/@line").map { |n| n.value.to_i }.sort
(!lines.empty?) && (lines.last - lines.first) > 15
end
end
end
rule 'FC015', 'Consider converting definition to a Custom Resource' do
tags %w(style definitions lwrp)
applies_to { |version| version >= gem_version('0.7.12') }
cookbook do |dir|
Dir[File.join(dir, 'definitions', '*.rb')].reject do |entry|
['.', '..'].include? entry
end.map { |entry| file_match(entry) }
end
end
rule 'FC016', 'LWRP does not declare a default action' do
tags %w(correctness lwrp)
applies_to { |version| version >= gem_version('0.7.12') }
resource do |ast, filename|
unless ["//ident/@value='default_action'",
"//def/bodystmt/descendant::assign/
var_field/ivar/@value='@action'"].any? { |expr| ast.xpath(expr) }
[file_match(filename)]
end
end
end
rule 'FC017', 'LWRP does not notify when updated' do
tags %w(correctness lwrp)
applies_to do |version|
version >= gem_version('0.7.12')
end
provider do |ast, filename|
use_inline_resources = !ast.xpath('//*[self::vcall or self::var_ref]/ident
[@value="use_inline_resources"]').empty?
unless use_inline_resources
actions = ast.xpath('//method_add_block/command[ident/@value="action"]/
args_add_block/descendant::symbol/ident')
actions.reject do |action|
blk = action.xpath('ancestor::command[1]/
following-sibling::*[self::do_block or self::brace_block]')
empty = !blk.xpath('stmts_add/void_stmt').empty?
converge_by = !blk.xpath('descendant::*[self::command or self::fcall]
/ident[@value="converge_by"]').empty?
updated_by_last_action = !blk.xpath('descendant::*[self::call or
self::command_call]/*[self::vcall or self::var_ref/ident/
@value="new_resource"]/../ident[@value="updated_by_last_action"]
').empty?
empty || converge_by || updated_by_last_action
end
end
end
end
rule 'FC018', 'LWRP uses deprecated notification syntax' do
tags %w(style lwrp deprecated)
applies_to { |version| version >= gem_version('0.9.10') }
provider do |ast|
ast.xpath("//assign/var_field/ivar[@value='@updated']").map do |class_var|
match(class_var)
end + ast.xpath(%q{//assign/field/*[self::vcall or self::var_ref/ident/
@value='new_resource']/../ident[@value='updated']})
end
end
rule 'FC019', 'Access node attributes in a consistent manner' do
tags %w(style attributes)
cookbook do |cookbook_dir|
asts = {}; files = Dir["#{cookbook_dir}/*/*.rb"].reject do |file|
relative_path = Pathname.new(file).relative_path_from(
Pathname.new(cookbook_dir))
relative_path.to_s.split(File::SEPARATOR).include?('spec')
end.map do |file|
{ path: file, ast: read_ast(file) }
end
types = [:string, :symbol, :vivified].map do |type|
{
access_type: type, count: files.map do |file|
attribute_access(file[:ast], type: type, ignore_calls: true,
cookbook_dir: cookbook_dir, ignore: 'run_state').tap do |ast|
unless ast.empty?
(asts[type] ||= []) << { ast: ast, path: file[:path] }
end
end.size
end.inject(:+)
}
end.reject { |type| type[:count] == 0 }
if asts.size > 1
least_used = asts[types.min do |a, b|
a[:count] <=> b[:count]
end[:access_type]]
least_used.map do |file|
file[:ast].map do |ast|
match(ast).merge(filename: file[:path])
end.flatten
end
end
end
end
rule 'FC021', 'Resource condition in provider may not behave as expected' do
tags %w(correctness lwrp)
applies_to { |version| version >= gem_version('0.10.6') }
provider do |ast|
find_resources(ast).map do |resource|
condition = resource.xpath(%q{//method_add_block/
descendant::ident[@value='not_if' or @value='only_if']/
ancestor::*[self::method_add_block or self::command][1][descendant::
ident/@value='new_resource']/ancestor::stmts_add[2]/method_add_block/
command[count(descendant::string_embexpr) = 0]})
condition
end.compact
end
end
rule 'FC022', 'Resource condition within loop may not behave as expected' do
tags %w(correctness)
applies_to { |version| version >= gem_version('0.10.6') }
recipe do |ast|
ast.xpath("//call[ident/@value='each']/../do_block[count(ancestor::
method_add_block/method_add_arg/fcall/ident[@value='only_if' or
@value = 'not_if']) = 0]").map do |lp|
block_vars = lp.xpath('block_var/params/child::*').map do |n|
n.name.sub(/^ident/, '')
end + lp.xpath('block_var/params/child::*/descendant::ident').map do |v|
v['value']
end
find_resources(lp).map do |resource|
# if any of the parameters to the block are used in a condition then we
# have a match
unless (block_vars &
(resource.xpath(%q{descendant::ident[@value='not_if' or
@value='only_if']/ancestor::*[self::method_add_block or
self::command][1]/descendant::ident/@value}).map do |a|
a.value
end)).empty?
c = resource.xpath('command[count(descendant::string_embexpr) = 0]')
if resource.xpath('command/ident/@value').first.value == 'define'
next
end
resource unless c.empty? || block_vars.any? do |var|
!resource.xpath(%Q(command/args_add_block/args_add/
var_ref/ident[@value='#{var}'])).empty?
end
end
end
end.flatten.compact
end
end
rule 'FC023', 'Prefer conditional attributes' do
tags %w(style)
recipe do |ast|
ast.xpath(%q{//method_add_block[command/ident][count(descendant::ident
[@value='only_if' or @value='not_if']) = 0]/ancestor::*[self::if or
self::unless][count(descendant::method_add_block[command/ident]) = 1]
[count(stmts_add/method_add_block/call) = 0]
[count(stmts_add/stmts_add) = 0]
[count(descendant::*[self::else or self::elsif]) = 0]})
end
end
rule 'FC024', 'Consider adding platform equivalents' do
tags %w(portability)
RHEL = %w(amazon centos redhat scientific oracle)
recipe do |ast, filename|
next if Pathname.new(filename).basename.to_s == 'metadata.rb'
metadata_path = Pathname.new(
File.join(File.dirname(filename), '..', 'metadata.rb')).cleanpath
md_platforms = if File.exist?(metadata_path)
supported_platforms(read_ast(
metadata_path)).map { |p| p[:platform] }
else
[]
end
md_platforms = RHEL if md_platforms.empty?
['//method_add_arg[fcall/ident/@value="platform?"]/
arg_paren/args_add_block',
'//when'].map do |expr|
ast.xpath(expr).map do |whn|
platforms = whn.xpath('args_add/
descendant::tstring_content').map do |p|
p['value']
end.sort
unless platforms.size == 1 || (md_platforms & platforms).empty?
whn unless (platforms & RHEL).empty? ||
((md_platforms & RHEL) - (platforms & RHEL)).empty?
end
end.compact
end.flatten
end
end
rule 'FC025', 'Prefer chef_gem to compile-time gem install' do
tags %w(style deprecated)
applies_to { |version| version >= gem_version('0.10.10') }
recipe do |ast|
gem_install = ast.xpath("//stmts_add/assign[method_add_block[command/ident/
@value='gem_package'][do_block/stmts_add/command[ident/@value='action']
[descendant::ident/@value='nothing']]]")
gem_install.map do |install|
gem_var = install.xpath('var_field/ident/@value')
unless ast.xpath("//method_add_arg[call/
var_ref/ident/@value='#{gem_var}']
[arg_paren/descendant::ident/@value='install' or
arg_paren/descendant::ident/@value='upgrade']").empty?
gem_install
end
end
end
end
rule 'FC026', 'Conditional execution block attribute contains only string' do
tags %w(correctness)
applies_to { |version| version >= gem_version('0.7.4') }
recipe do |ast|
find_resources(ast).map { |r| resource_attributes(r) }.map do |resource|
[resource['not_if'], resource['only_if']]
end.flatten.compact.select do |condition|
condition.respond_to?(:xpath) &&
!condition.xpath('descendant::string_literal').empty? &&
!condition.xpath('stmts_add/string_literal').empty? &&
condition.xpath('descendant::stmts_add[count(ancestor::
string_literal) = 0]').size == 1
end
end
end
rule 'FC027', 'Resource sets internal attribute' do
tags %w(correctness)
recipe do |ast|
find_resources(ast, type: :service).map do |service|
service unless (resource_attributes(service).keys &
['enabled', 'running']).empty?
end.compact
end
end
rule 'FC028', 'Incorrect #platform? usage' do
tags %w(correctness)
recipe do |ast|
ast.xpath(%q{//*[self::call | self::command_call]
[(var_ref|vcall)/ident/@value='node']
[ident/@value="platform?"]})
end
end
rule 'FC029', 'No leading cookbook name in recipe metadata' do
tags %w(correctness metadata)
metadata do |ast, filename|
ast.xpath('//command[ident/@value="recipe"]').map do |declared_recipe|
next unless declared_recipe.xpath('count(//vcall|//var_ref)').to_i == 0
recipe_name = declared_recipe.xpath('args_add_block/
descendant::tstring_content[1]/@value').to_s
unless recipe_name.empty? ||
recipe_name.split('::').first == cookbook_name(filename.to_s)
declared_recipe
end
end.compact
end
end
rule 'FC030', 'Cookbook contains debugger breakpoints' do
tags %w(annoyances)
def pry_bindings(ast)
ast.xpath('//call[(vcall|var_ref)/ident/@value="binding"]
[ident/@value="pry"]')
end
recipe { |ast| pry_bindings(ast) }
library { |ast| pry_bindings(ast) }
metadata { |ast| pry_bindings(ast) }
template { |ast| pry_bindings(ast) }
end
rule 'FC031', 'Cookbook without metadata file' do
tags %w(correctness metadata)
cookbook do |filename|
if !File.exist?(File.join(filename, 'metadata.rb'))
[file_match(File.join(filename, 'metadata.rb'))]
end
end
end
rule 'FC032', 'Invalid notification timing' do
tags %w(correctness notifications)
recipe do |ast|
find_resources(ast).select do |resource|
notifications(resource).any? do |notification|
! [:delayed, :immediate].include? notification[:timing]
end
end
end
end
rule 'FC033', 'Missing template' do
tags %w(correctness)
recipe do |ast, filename|
find_resources(ast, type: :template).reject do |resource|
resource_attributes(resource)['local'] ||
resource_attributes(resource)['cookbook']
end.map do |resource|
file = template_file(resource_attributes(resource,
return_expressions: true))
{ resource: resource, file: file }
end.reject do |resource|
resource[:file].respond_to?(:xpath)
end.select do |resource|
template_paths(filename).none? do |path|
relative_path = []
Pathname.new(path).ascend do |template_path|
relative_path << template_path.basename
break if template_path.dirname.dirname.basename.to_s == 'templates'
end
File.join(relative_path.reverse) == resource[:file]
end
end.map { |resource| resource[:resource] }
end
end
rule 'FC034', 'Unused template variables' do
tags %w(correctness)
recipe do |ast, filename|
Array(resource_attributes_by_type(ast)['template']).select do |t|
t['variables'] && t['variables'].respond_to?(:xpath)
end.map do |resource|
all_templates = template_paths(filename)
template_paths = all_templates.select do |path|
File.basename(path) == template_file(resource)
end
next unless template_paths.any?
passed_vars = resource['variables'].xpath(
'symbol/ident/@value').map { |tv| tv.to_s }
unused_vars_exist = template_paths.all? do |template_path|
begin
template_vars = templates_included(
all_templates, template_path).map do |template|
read_ast(template).xpath('//var_ref/ivar/@value').map do |v|
v.to_s.sub(/^@/, '')
end
end.flatten
! (passed_vars - template_vars).empty?
rescue RecursedTooFarError
false
end
end
file_match(template_paths.first) if unused_vars_exist
end.compact
end
end
rule 'FC037', 'Invalid notification action' do
tags %w(correctness)
recipe do |ast|
find_resources(ast).select do |resource|
notifications(resource).any? do |n|
type = case n[:type]
when :notifies then n[:resource_type]
when :subscribes then resource_type(resource).to_sym
end
n[:action].size > 0 && !resource_action?(type, n[:action])
end
end
end
end
rule 'FC038', 'Invalid resource action' do
tags %w(correctness)
recipe do |ast|
find_resources(ast).select do |resource|
actions = resource_attributes(resource)['action']
if actions.respond_to?(:xpath)
actions = actions.xpath('descendant::array/
descendant::symbol/ident/@value')
else
actions = Array(actions)
end
actions.reject { |a| a.to_s.empty? }.any? do |action|
!resource_action?(resource_type(resource), action)
end
end
end
end
rule 'FC039', 'Node method cannot be accessed with key' do
tags %w(correctness)
recipe do |ast|
[{ type: :string, path: '@value' },
{ type: :symbol, path: 'ident/@value' }].map do |access_type|
attribute_access(ast, type: access_type[:type]).select do |att|
att_name = att.xpath(access_type[:path]).to_s.to_sym
att_name != :tags && chef_node_methods.include?(att_name)
end.select do |att|
!att.xpath('ancestor::args_add_block[position() = 1]
[preceding-sibling::vcall | preceding-sibling::var_ref]').empty?
end.select do |att|
att_type = att.xpath('ancestor::args_add_block[position() = 1]
/../var_ref/ident/@value').to_s
ast.xpath("//assign/var_field/ident[@value='#{att_type}']").empty?
end
end.flatten
end
end
rule 'FC040', 'Execute resource used to run git commands' do
tags %w(style recipe etsy)
recipe do |ast|
possible_git_commands = %w( clone fetch pull checkout reset )
find_resources(ast, type: 'execute').select do |cmd|
cmd_str = (resource_attribute(cmd, 'command') || resource_name(cmd)).to_s
actual_git_commands = cmd_str.scan(/git ([a-z]+)/).map { |c| c.first }
(possible_git_commands & actual_git_commands).any?
end
end
end
rule 'FC041', 'Execute resource used to run curl or wget commands' do
tags %w(style recipe etsy)
recipe do |ast|
find_resources(ast, type: 'execute').select do |cmd|
cmd_str = (resource_attribute(cmd, 'command') || resource_name(cmd)).to_s
(cmd_str.match(/^curl.*(-o|>|--output).*$/) || cmd_str.include?('wget '))
end
end
end
rule 'FC042', 'Prefer include_recipe to require_recipe' do
tags %w(deprecated)
recipe do |ast|
ast.xpath('//command[ident/@value="require_recipe"]')
end
end
rule 'FC043', 'Prefer new notification syntax' do
tags %w(style notifications deprecated)
applies_to { |version| version >= gem_version('0.9.10') }
recipe do |ast|
find_resources(ast).select do |resource|
notifications(resource).any? { |notify| notify[:style] == :old }
end
end
end
rule 'FC044', 'Avoid bare attribute keys' do
tags %w(style)
attributes do |ast|
declared = ast.xpath('//descendant::var_field/ident/@value').map do |v|
v.to_s
end
ast.xpath('//assign/*[self::vcall or self::var_ref]
[count(child::kw) = 0]/ident').select do |v|
local_declared = v.xpath("ancestor::*[self::brace_block or self::do_block]
/block_var/descendant::ident/@value").map do |v|
v.to_s
end
(v['value'] != 'secure_password') &&
!(declared + local_declared).uniq.include?(v['value']) &&
!v.xpath("ancestor::*[self::brace_block or self::do_block]/block_var/
descendant::ident/@value='#{v['value']}'")
end
end
end
rule 'FC045', 'Metadata does not contain cookbook name' do
tags %w(correctness metadata chef12)
metadata do |ast, filename|
unless ast.xpath('descendant::stmts_add/command/ident/@value="name"')
[file_match(filename)]
end
end
cookbook do |filename|
if !File.exist?(File.join(filename, 'metadata.rb'))
[file_match(File.join(filename, 'metadata.rb'))]
end
end
end
rule 'FC046', 'Attribute assignment uses assign unless nil' do
attributes do |ast|
attribute_access(ast).map do |a|
a.xpath('ancestor::opassign/op[@value="||="]')
end
end
end
rule 'FC047', 'Attribute assignment does not specify precedence' do
tags %w(attributes correctness chef11)
recipe do |ast|
attribute_access(ast).map do |att|
exclude_att_types = '[count(following-sibling::ident[
is_att_type(@value) or @value = "run_state"]) = 0]'
att.xpath(%Q(ancestor::assign[*[self::field | self::aref_field]
[descendant::*[self::vcall | self::var_ref][ident/@value="node"]
#{exclude_att_types}]]), AttFilter.new) +
att.xpath(%Q{ancestor::binary[@value="<<"]/*[position() = 1]
[self::aref]
[descendant::*[self::vcall | self::var_ref]#{exclude_att_types}
/ident/@value="node"]}, AttFilter.new)
end
end
end
rule 'FC048', 'Prefer Mixlib::ShellOut' do
tags %w(style processes)
recipe do |ast|
xstring_literal=ast.xpath('//xstring_literal')
next xstring_literal if xstring_literal.any?
ast.xpath('//*[self::command or self::fcall]/ident[@value="system"]').select do |x|
resource_name = x.xpath('ancestor::do_block/preceding-sibling::command/ident/@value')
if resource_name.any? && resource_attribute?(resource_name.to_s, 'system')
next false
end
next x.xpath('count(following-sibling::args_add_block/descendant::kw[@value="true" or @value="false"]) = 0')
end
end
end
rule 'FC049', 'Role name does not match containing file name' do
tags %w(style roles)
role do |ast, filename|
role_name_specified = field_value(ast, :name)
role_name_file = Pathname.new(filename).basename.sub_ext('').to_s
if role_name_specified && role_name_specified != role_name_file
field(ast, :name)
end
end
end
rule 'FC050', 'Name includes invalid characters' do
tags %w(correctness environments roles)
def invalid_name(ast)
field(ast, :name) unless field_value(ast, :name) =~ /^[a-zA-Z0-9_\-]+$/
end
environment { |ast| invalid_name(ast) }
role { |ast| invalid_name(ast) }
end
rule 'FC051', 'Template partials loop indefinitely' do
tags %w(correctness)
recipe do |_, filename|
cbk_templates = template_paths(filename)
cbk_templates.select do |template|
begin
templates_included(cbk_templates, template)
false
rescue RecursedTooFarError
true
end
end.map { |t| file_match(t) }
end
end
rule 'FC052', 'Metadata uses the unimplemented "suggests" keyword' do
tags %w(style metadata)
metadata do |ast, filename|
ast.xpath(%Q(//command[ident/@value='suggests']))
end
end
rule 'FC053', 'Metadata uses the unimplemented "recommends" keyword' do
tags %w(style metadata)
metadata do |ast, filename|
ast.xpath(%Q(//command[ident/@value='recommends']))
end
end
# NOTE: FC054 was yanked and should be considered reserved, do not reuse it
rule 'FC055', 'Ensure maintainer is set in metadata' do
tags %w(correctness metadata)
metadata do |ast, filename|
[file_match(filename)] unless field(ast, 'maintainer').any?
end
end
rule 'FC056', 'Ensure maintainer_email is set in metadata' do
tags %w(correctness metadata)
metadata do |ast, filename|
[file_match(filename)] unless field(ast, 'maintainer_email').any?
end
end
rule 'FC057', 'Library provider does not declare use_inline_resources' do
tags %w(correctness)
applies_to do |version|
version >= gem_version('11.0.0')
end
library do |ast, filename|
ast.xpath('//const_path_ref/const[@value="LWRPBase"]/..//const[@value="Provider"]/../../..').select do |x|
x.xpath('//*[self::vcall or self::var_ref]/ident[@value="use_inline_resources"]').empty?
end
end
end
rule 'FC058', 'Library provider declares use_inline_resources and declares #action_<name> methods' do
tags %w(correctness)
applies_to do |version|
version >= gem_version('11.0.0')
end
library do |ast, filename|
ast.xpath('//const_path_ref/const[@value="LWRPBase"]/..//const[@value="Provider"]/../../..').select do |x|
x.xpath('//*[self::vcall or self::var_ref]/ident[@value="use_inline_resources"]') &&
x.xpath(%Q(//def[ident[contains(@value, 'action_')]]))
end
end
end
rule 'FC059', 'LWRP provider does not declare use_inline_resources' do
tags %w(correctness)
applies_to do |version|
version >= gem_version('11.0.0')
end
provider do |ast, filename|
use_inline_resources = !ast.xpath('//*[self::vcall or self::var_ref]/ident
[@value="use_inline_resources"]').empty?
unless use_inline_resources
[file_match(filename)]
end
end
end
rule 'FC060', 'LWRP provider declares use_inline_resources and declares #action_<name> methods' do
tags %w(correctness)
applies_to do |version|
version >= gem_version('11.0.0')
end
provider do |ast, filename|
use_inline_resources = !ast.xpath('//*[self::vcall or self::var_ref]/ident
[@value="use_inline_resources"]').empty?
if use_inline_resources
ast.xpath(%Q(//def[ident[contains(@value, 'action_')]]))
end
end
end
rule 'FC061', 'Valid cookbook versions are of the form x.y or x.y.z' do
tags %w{metadata correctness}
metadata do |ast, filename|
# matches a version method with a string literal with no interpolation
ver = ast.xpath(%Q(//command[ident/@value='version']//string_literal[not(.//string_embexpr)]//tstring_content/@value))
if !ver.empty? && ver.to_s !~ /\A\d+\.\d+(\.\d+)?\z/
[file_match(filename)]
end
end
end
rule 'FC062', 'Cookbook should have version metadata' do
tags %w{metadata}
metadata do |ast, filename|
[file_match(filename)] unless field(ast, 'version').any?
end
end
|
# This file contains all of the rules that ship with foodcritic.
#
# * Foodcritic rules perform static code analysis - rather than the cookbook code
# being loaded by the interpreter it is parsed into a tree (AST) that is then
# passed to each rule.
# * Rules can use a number of API functions that ship with foodcritic to make
# sense of the parse tree.
# * Rules can also use XPath to query the AST. A rule can consist of a XPath
# query only, as any nodes returned from a `recipe` block will be converted
# into warnings.
rule "FC002", "Avoid string interpolation where not required" do
tags %w{style strings}
recipe do |ast|
ast.xpath(%q{//*[self::string_literal | self::assoc_new]/string_add[
count(descendant::string_embexpr) = 1 and
count(tstring_content|string_add/tstring_content) = 0]})
end
end
rule "FC003",
"Check whether you are running with chef server before using" +
" server-specific features" do
tags %w{portability solo}
recipe do |ast,filename|
unless checks_for_chef_solo?(ast) or chef_solo_search_supported?(filename)
searches(ast)
end
end
end
rule "FC004", "Use a service resource to start and stop services" do
tags %w{style services}
recipe do |ast|
find_resources(ast, :type => 'execute').find_all do |cmd|
cmd_str = (resource_attribute(cmd, 'command') || resource_name(cmd)).to_s
(cmd_str.include?('/etc/init.d') || ['service ', '/sbin/service ',
'start ', 'stop ', 'invoke-rc.d '].any? do |service_cmd|
cmd_str.start_with?(service_cmd)
end) && %w{start stop restart reload}.any?{|a| cmd_str.include?(a)}
end
end
end
rule "FC005", "Avoid repetition of resource declarations" do
tags %w{style}
recipe do |ast|
resources = find_resources(ast).map do |res|
resource_attributes(res).merge({:type => resource_type(res),
:ast => res})
end.chunk do |res|
res[:type] +
res[:ast].xpath("ancestor::*[self::if | self::unless | self::elsif |
self::else | self::when | self::method_add_block/call][position() = 1]/
descendant::pos[position() = 1]").to_s +
res[:ast].xpath("ancestor::method_add_block/command[
ident/@value='action']/args_add_block/descendant::ident/@value").to_s
end.reject{|res| res[1].size < 3}
resources.map do |cont_res|
first_resource = cont_res[1][0][:ast]
# we have contiguous resources of the same type, but do they share the
# same attributes?
sorted_atts = cont_res[1].map do |atts|
atts.delete_if{|k| k == :ast}.to_a.sort do |x,y|
x.first.to_s <=> y.first.to_s
end
end
first_resource if sorted_atts.all? do |att|
(att - sorted_atts.inject{|atts,a| atts & a}).length == 1
end
end.compact
end
end
rule "FC006",
"Mode should be quoted or fully specified when setting file permissions" do
tags %w{correctness files}
recipe do |ast|
ast.xpath(%q{//ident[@value='mode']/parent::command/
descendant::int[string-length(@value) < 5 and not(starts-with(@value, "0")
and string-length(@value) = 4)]/ancestor::method_add_block})
end
end
rule "FC007", "Ensure recipe dependencies are reflected in cookbook metadata" do
tags %w{correctness metadata}
recipe do |ast,filename|
metadata_path =Pathname.new(
File.join(File.dirname(filename), '..', 'metadata.rb')).cleanpath
next unless File.exists? metadata_path
actual_included = included_recipes(ast, :with_partial_names => false)
undeclared = actual_included.keys.map do |recipe|
recipe.split('::').first
end - [cookbook_name(filename)] -
declared_dependencies(read_ast(metadata_path))
actual_included.map do |recipe, include_stmts|
if undeclared.include?(recipe) ||
undeclared.any?{|u| recipe.start_with?("#{u}::")}
include_stmts
end
end.flatten.compact
end
end
rule "FC008", "Generated cookbook metadata needs updating" do
tags %w{style metadata}
metadata do |ast,filename|
{'maintainer' => 'YOUR_COMPANY_NAME',
'maintainer_email' => 'YOUR_EMAIL'}.map do |field,value|
ast.xpath(%Q{//command[ident/@value='#{field}']/
descendant::tstring_content[@value='#{value}']})
end
end
end
rule "FC009", "Resource attribute not recognised" do
tags %w{correctness}
recipe do |ast|
matches = []
resource_attributes_by_type(ast).each do |type,resources|
resources.each do |resource|
resource.keys.map(&:to_sym).reject do |att|
resource_attribute?(type.to_sym, att)
end.each do |invalid_att|
matches << find_resources(ast, :type => type).find do |res|
resource_attributes(res).include?(invalid_att.to_s)
end
end
end
end
matches
end
end
rule "FC010", "Invalid search syntax" do
tags %w{correctness search}
recipe do |ast|
# This only works for literal search strings
literal_searches(ast).reject{|search| valid_query?(search['value'])}
end
end
rule "FC011", "Missing README in markdown format" do
tags %w{style readme}
cookbook do |filename|
unless File.exists?(File.join(filename, 'README.md'))
[file_match(File.join(filename, 'README.md'))]
end
end
end
rule "FC012", "Use Markdown for README rather than RDoc" do
tags %w{style readme}
cookbook do |filename|
if File.exists?(File.join(filename, 'README.rdoc'))
[file_match(File.join(filename, 'README.rdoc'))]
end
end
end
rule "FC013", "Use file_cache_path rather than hard-coding tmp paths" do
tags %w{style files}
recipe do |ast|
find_resources(ast, :type => 'remote_file').find_all do |download|
path = (resource_attribute(download, 'path') ||
resource_name(download)).to_s
path.start_with?('/tmp/')
end
end
end
rule "FC014", "Consider extracting long ruby_block to library" do
tags %w{style libraries}
recipe do |ast|
find_resources(ast, :type => 'ruby_block').find_all do |rb|
! rb.xpath("descendant::fcall[ident/@value='block' and
count(ancestor::*) = 8]/../../
do_block[count(descendant::*) > 100]").empty?
end
end
end
rule "FC015", "Consider converting definition to a LWRP" do
tags %w{style definitions lwrp}
applies_to {|version| version >= gem_version("0.7.12")}
cookbook do |dir|
Dir[File.join(dir, 'definitions', '*.rb')].reject do |entry|
['.', '..'].include? entry
end.map{|entry| file_match(entry)}
end
end
rule "FC016", "LWRP does not declare a default action" do
tags %w{correctness lwrp}
applies_to {|version| version >= gem_version("0.7.12")}
resource do |ast, filename|
unless ["//ident/@value='default_action'",
"//def/bodystmt/descendant::assign/
var_field/ivar/@value='@action'"].any? {|expr| ast.xpath(expr)}
[file_match(filename)]
end
end
end
rule "FC017", "LWRP does not notify when updated" do
tags %w{correctness lwrp}
applies_to do |version|
version >= gem_version("0.7.12")
end
provider do |ast, filename|
if ast.xpath(%q{//*[self::call or self::command_call]/
*[self::vcall or self::var_ref/ident/
@value='new_resource']/../
ident[@value='updated_by_last_action']}).empty?
[file_match(filename)]
end
end
end
rule "FC018", "LWRP uses deprecated notification syntax" do
tags %w{style lwrp deprecated}
applies_to {|version| version >= gem_version("0.9.10")}
provider do |ast|
ast.xpath("//assign/var_field/ivar[@value='@updated']").map do |class_var|
match(class_var)
end + ast.xpath(%q{//assign/field/*[self::vcall or self::var_ref/ident/
@value='new_resource']/../ident[@value='updated']})
end
end
rule "FC019", "Access node attributes in a consistent manner" do
tags %w{style attributes}
cookbook do |cookbook_dir|
asts = {}; files = Dir["#{cookbook_dir}/*/*.rb"].reject do |file|
relative_path = Pathname.new(file).relative_path_from(Pathname.new(cookbook_dir))
relative_path.to_s.split(File::SEPARATOR).include?('spec')
end.map do |file|
{:path => file, :ast => read_ast(file)}
end
types = [:string, :symbol, :vivified].map do |type|
{:access_type => type, :count => files.map do |file|
attribute_access(file[:ast], :type => type, :ignore_calls => true,
:cookbook_dir => cookbook_dir).tap do |ast|
unless ast.empty?
(asts[type] ||= []) << {:ast => ast, :path => file[:path]}
end
end.size
end.inject(:+)}
end.reject{|type| type[:count] == 0}
if asts.size > 1
least_used = asts[types.min{|a,b| a[:count] <=> b[:count]}[:access_type]]
least_used.map do |file|
file[:ast].map{|ast| match(ast).merge(:filename => file[:path])}.flatten
end
end
end
end
rule "FC021", "Resource condition in provider may not behave as expected" do
tags %w{correctness lwrp}
applies_to {|version| version >= gem_version("0.10.6")}
provider do |ast|
find_resources(ast).map do |resource|
condition = resource.xpath(%q{//method_add_block/
descendant::ident[@value='not_if' or @value='only_if']/
ancestor::*[self::method_add_block or self::command][1][descendant::
ident/@value='new_resource']/ancestor::stmts_add[2]/method_add_block/
command[count(descendant::string_embexpr) = 0]})
condition
end.compact
end
end
rule "FC022", "Resource condition within loop may not behave as expected" do
tags %w{correctness}
applies_to {|version| version >= gem_version("0.10.6")}
recipe do |ast|
ast.xpath("//call[ident/@value='each']/../do_block").map do |loop|
block_vars = loop.xpath("block_var/params/child::*").map do |n|
n.name.sub(/^ident/, '')
end
find_resources(loop).map do |resource|
# if any of the parameters to the block are used in a condition then we
# have a match
unless (block_vars &
(resource.xpath(%q{descendant::ident[@value='not_if' or
@value='only_if']/ancestor::*[self::method_add_block or
self::command][1]/descendant::ident/@value}).map{|a| a.value})).empty?
c = resource.xpath('command[count(descendant::string_embexpr) = 0]')
resource unless c.empty? || block_vars.any? do |var|
! resource.xpath(%Q{command/args_add_block/args_add/
var_ref/ident[@value='#{var}']}).empty?
end
end
end
end.flatten.compact
end
end
rule "FC023", "Prefer conditional attributes" do
tags %w{style}
recipe do |ast|
ast.xpath(%q{//method_add_block[command/ident][count(descendant::ident
[@value='only_if' or @value='not_if']) = 0]/ancestor::*[self::if or
self::unless][count(descendant::method_add_block[command/ident]) = 1]
[count(stmts_add/method_add_block/call) = 0]
[count(stmts_add/stmts_add) = 0]
[count(descendant::*[self::else or self::elsif]) = 0]})
end
end
rule "FC024", "Consider adding platform equivalents" do
tags %w{portability}
RHEL = %w{amazon centos redhat scientific}
recipe do |ast, filename|
next if Pathname.new(filename).basename.to_s == 'metadata.rb'
metadata_path = Pathname.new(
File.join(File.dirname(filename), '..', 'metadata.rb')).cleanpath
md_platforms = if File.exists?(metadata_path)
supported_platforms(read_ast(metadata_path)).map{|p| p[:platform]}
else
[]
end
md_platforms = RHEL if md_platforms.empty?
['//method_add_arg[fcall/ident/@value="platform?"]/arg_paren/args_add_block',
"//when"].map do |expr|
ast.xpath(expr).map do |whn|
platforms = whn.xpath("args_add/descendant::tstring_content").map do |p|
p['value']
end.sort
unless platforms.size == 1 || (md_platforms & platforms).empty?
whn unless (platforms & RHEL).empty? ||
((md_platforms & RHEL) - (platforms & RHEL)).empty?
end
end.compact
end.flatten
end
end
rule "FC025", "Prefer chef_gem to compile-time gem install" do
tags %w{style deprecated}
applies_to {|version| version >= gem_version("0.10.10")}
recipe do |ast|
gem_install = ast.xpath("//stmts_add/assign[method_add_block[command/ident/
@value='gem_package'][do_block/stmts_add/command[ident/@value='action']
[descendant::ident/@value='nothing']]]")
gem_install.map do |install|
gem_var = install.xpath("var_field/ident/@value")
unless ast.xpath("//method_add_arg[call/var_ref/ident/@value='#{gem_var}']
[arg_paren/descendant::ident/@value='install' or
arg_paren/descendant::ident/@value='upgrade']").empty?
gem_install
end
end
end
end
rule "FC026", "Conditional execution block attribute contains only string" do
tags %w{correctness}
applies_to {|version| version >= gem_version("0.7.4")}
recipe do |ast|
find_resources(ast).map{|r| resource_attributes(r)}.map do |resource|
[resource['not_if'], resource['only_if']]
end.flatten.compact.select do |condition|
condition.respond_to?(:xpath) and
! condition.xpath('descendant::string_literal').empty? and
! condition.xpath('stmts_add/string_literal').empty? and
condition.xpath('descendant::stmts_add[count(ancestor::
string_literal) = 0]').size == 1
end
end
end
rule "FC027", "Resource sets internal attribute" do
tags %w{correctness}
recipe do |ast|
find_resources(ast, :type => :service).map do |service|
service unless (resource_attributes(service).keys &
['enabled', 'running']).empty?
end.compact
end
end
rule "FC028", "Incorrect #platform? usage" do
tags %w{correctness}
recipe do |ast|
ast.xpath(%q{//*[self::call | self::command_call]
[(var_ref|vcall)/ident/@value='node']
[ident/@value="platform?"]})
end
end
rule "FC029", "No leading cookbook name in recipe metadata" do
tags %w{correctness metadata}
metadata do |ast,filename|
ast.xpath('//command[ident/@value="recipe"]').map do |declared_recipe|
next unless declared_recipe.xpath('count(//vcall|//var_ref)').to_i == 0
recipe_name = declared_recipe.xpath('args_add_block/
descendant::tstring_content[1]/@value').to_s
unless recipe_name.empty? ||
recipe_name.split('::').first == cookbook_name(filename.to_s)
declared_recipe
end
end.compact
end
end
rule "FC030", "Cookbook contains debugger breakpoints" do
tags %w{annoyances}
def pry_bindings(ast)
ast.xpath('//call[(vcall|var_ref)/ident/@value="binding"]
[ident/@value="pry"]')
end
recipe{|ast| pry_bindings(ast)}
library{|ast| pry_bindings(ast)}
metadata{|ast| pry_bindings(ast)}
template{|ast| pry_bindings(ast)}
end
rule "FC031", "Cookbook without metadata file" do
tags %w{correctness metadata}
cookbook do |filename|
if ! File.exists?(File.join(filename, 'metadata.rb'))
[file_match(File.join(filename, 'metadata.rb'))]
end
end
end
rule "FC032", "Invalid notification timing" do
tags %w{correctness notifications}
recipe do |ast|
find_resources(ast).select do |resource|
notifications(resource).any? do |notification|
! [:delayed, :immediate].include? notification[:timing]
end
end
end
end
rule "FC033", "Missing template" do
tags %w{correctness}
recipe do |ast,filename|
find_resources(ast, :type => :template).reject do |resource|
resource_attributes(resource)['local'] ||
resource_attributes(resource)['cookbook']
end.map do |resource|
file = template_file(resource_attributes(resource,
:return_expressions => true))
{:resource => resource, :file => file}
end.reject do |resource|
resource[:file].respond_to?(:xpath)
end.select do |resource|
template_paths(filename).none? do |path|
relative_path = []
Pathname.new(path).ascend do |template_path|
relative_path << template_path.basename
break if template_path.dirname.dirname.basename.to_s == 'templates'
end
File.join(relative_path.reverse) == resource[:file]
end
end.map{|resource| resource[:resource]}
end
end
rule "FC034", "Unused template variables" do
tags %w{correctness}
recipe do |ast,filename|
Array(resource_attributes_by_type(ast)['template']).select do
|t| t['variables'] and t['variables'].respond_to?(:xpath)
end.map do |resource|
template_paths = Dir[Pathname.new(filename).dirname.dirname +
'templates' + '**/*.erb']
template_path = template_paths.find{|p| File.basename(p) == resource['source']}
next unless template_path
passed_vars = resource['variables'].xpath('symbol/ident/@value').map{|tv| tv.to_s}
template_vars = read_ast(template_path).xpath('//var_ref/ivar/' +
'@value').map{|v| v.to_s.sub(/^@/, '')}
file_match(template_path) unless (passed_vars - template_vars).empty?
end.compact
end
end
rule "FC037", "Invalid notification action" do
tags %w{correctness}
recipe do |ast|
find_resources(ast).select do |resource|
notifications(resource).any? do |n|
type = case n[:type]
when :notifies then n[:resource_type]
when :subscribes then resource_type(resource).to_sym
end
n[:action].size > 0 and ! resource_action?(type, n[:action])
end
end
end
end
rule "FC038", "Invalid resource action" do
tags %w{correctness}
recipe do |ast|
find_resources(ast).select do |resource|
actions = resource_attributes(resource)['action']
if actions.respond_to?(:xpath)
actions = actions.xpath('descendant::array/descendant::symbol/ident/@value')
else
actions = Array(actions)
end
actions.reject{|a| a.to_s.empty?}.any? do |action|
! resource_action?(resource_type(resource), action)
end
end
end
end
rule "FC039", "Node method cannot be accessed with key" do
tags %w{correctness}
recipe do |ast|
[{:type => :string, :path => '@value'},
{:type => :symbol, :path => 'ident/@value'}].map do |access_type|
attribute_access(ast, :type => access_type[:type]).select do |att|
att_name = att.xpath(access_type[:path]).to_s.to_sym
att_name != :tags && chef_node_methods.include?(att_name)
end.select do |att|
! att.xpath('ancestor::args_add_block[position() = 1]
[preceding-sibling::vcall | preceding-sibling::var_ref]').empty?
end.select do |att|
att_type = att.xpath('ancestor::args_add_block[position() = 1]
/../var_ref/ident/@value').to_s
ast.xpath("//assign/var_field/ident[@value='#{att_type}']").empty?
end
end.flatten
end
end
rule "FC040", "Execute resource used to run git commands" do
tags %w{style recipe etsy}
recipe do |ast|
possible_git_commands = %w{ clone fetch pull checkout reset }
find_resources(ast, :type => 'execute').select do |cmd|
cmd_str = (resource_attribute(cmd, 'command') || resource_name(cmd)).to_s
git_cmd = cmd_str.match(/git ([a-z]+)/)
break false if git_cmd.nil?
!git_cmd.captures.nil? && possible_git_commands.include?(git_cmd.captures[0])
end
end
end
rule "FC041", "Execute resource used to run curl or wget commands" do
tags %w{style recipe etsy}
recipe do |ast|
find_resources(ast, :type => 'execute').select do |cmd|
cmd_str = (resource_attribute(cmd, 'command') || resource_name(cmd)).to_s
(cmd_str.include?('curl ') || cmd_str.include?('wget '))
end
end
end
rule "FC042", "Prefer include_recipe to require_recipe" do
tags %w{deprecated}
recipe do |ast|
ast.xpath('//command[ident/@value="require_recipe"]')
end
end
rule "FC043", "Prefer new notification syntax" do
tags %w{style notifications deprecated}
applies_to {|version| version >= gem_version("0.9.10")}
recipe do |ast|
find_resources(ast).select do |resource|
notifications(resource).any?{|notify| notify[:style] == :old}
end
end
end
rule "FC044", "Avoid bare attribute keys" do
tags %w{style}
attributes do |ast|
declared = ast.xpath('//descendant::var_field/ident/@value').map{|v| v.to_s}
ast.xpath('//assign/*[self::vcall or self::var_ref]
[count(child::kw) = 0]/ident').select do |v|
(v['value'] != 'secure_password') &&
! declared.include?(v['value']) &&
! v.xpath("ancestor::*[self::brace_block or self::do_block]/block_var/
descendant::ident/@value='#{v['value']}'")
end
end
end
rule "FC045", "Consider setting cookbook name in metadata" do
tags %w{annoyances metadata}
metadata do |ast, filename|
unless ast.xpath('descendant::stmts_add/command/ident/@value="name"')
[file_match(filename)]
end
end
cookbook do |filename|
if ! File.exists?(File.join(filename, 'metadata.rb'))
[file_match(File.join(filename, 'metadata.rb'))]
end
end
end
rule "FC046", "Attribute assignment uses assign unless nil" do
attributes do |ast|
attribute_access(ast).map{|a| a.xpath('ancestor::opassign/op[@value="||="]')}
end
end
FC014: Remove arbitrary depth check.
# This file contains all of the rules that ship with foodcritic.
#
# * Foodcritic rules perform static code analysis - rather than the cookbook code
# being loaded by the interpreter it is parsed into a tree (AST) that is then
# passed to each rule.
# * Rules can use a number of API functions that ship with foodcritic to make
# sense of the parse tree.
# * Rules can also use XPath to query the AST. A rule can consist of a XPath
# query only, as any nodes returned from a `recipe` block will be converted
# into warnings.
rule "FC002", "Avoid string interpolation where not required" do
tags %w{style strings}
recipe do |ast|
ast.xpath(%q{//*[self::string_literal | self::assoc_new]/string_add[
count(descendant::string_embexpr) = 1 and
count(tstring_content|string_add/tstring_content) = 0]})
end
end
rule "FC003",
"Check whether you are running with chef server before using" +
" server-specific features" do
tags %w{portability solo}
recipe do |ast,filename|
unless checks_for_chef_solo?(ast) or chef_solo_search_supported?(filename)
searches(ast)
end
end
end
rule "FC004", "Use a service resource to start and stop services" do
tags %w{style services}
recipe do |ast|
find_resources(ast, :type => 'execute').find_all do |cmd|
cmd_str = (resource_attribute(cmd, 'command') || resource_name(cmd)).to_s
(cmd_str.include?('/etc/init.d') || ['service ', '/sbin/service ',
'start ', 'stop ', 'invoke-rc.d '].any? do |service_cmd|
cmd_str.start_with?(service_cmd)
end) && %w{start stop restart reload}.any?{|a| cmd_str.include?(a)}
end
end
end
rule "FC005", "Avoid repetition of resource declarations" do
tags %w{style}
recipe do |ast|
resources = find_resources(ast).map do |res|
resource_attributes(res).merge({:type => resource_type(res),
:ast => res})
end.chunk do |res|
res[:type] +
res[:ast].xpath("ancestor::*[self::if | self::unless | self::elsif |
self::else | self::when | self::method_add_block/call][position() = 1]/
descendant::pos[position() = 1]").to_s +
res[:ast].xpath("ancestor::method_add_block/command[
ident/@value='action']/args_add_block/descendant::ident/@value").to_s
end.reject{|res| res[1].size < 3}
resources.map do |cont_res|
first_resource = cont_res[1][0][:ast]
# we have contiguous resources of the same type, but do they share the
# same attributes?
sorted_atts = cont_res[1].map do |atts|
atts.delete_if{|k| k == :ast}.to_a.sort do |x,y|
x.first.to_s <=> y.first.to_s
end
end
first_resource if sorted_atts.all? do |att|
(att - sorted_atts.inject{|atts,a| atts & a}).length == 1
end
end.compact
end
end
rule "FC006",
"Mode should be quoted or fully specified when setting file permissions" do
tags %w{correctness files}
recipe do |ast|
ast.xpath(%q{//ident[@value='mode']/parent::command/
descendant::int[string-length(@value) < 5 and not(starts-with(@value, "0")
and string-length(@value) = 4)]/ancestor::method_add_block})
end
end
rule "FC007", "Ensure recipe dependencies are reflected in cookbook metadata" do
tags %w{correctness metadata}
recipe do |ast,filename|
metadata_path =Pathname.new(
File.join(File.dirname(filename), '..', 'metadata.rb')).cleanpath
next unless File.exists? metadata_path
actual_included = included_recipes(ast, :with_partial_names => false)
undeclared = actual_included.keys.map do |recipe|
recipe.split('::').first
end - [cookbook_name(filename)] -
declared_dependencies(read_ast(metadata_path))
actual_included.map do |recipe, include_stmts|
if undeclared.include?(recipe) ||
undeclared.any?{|u| recipe.start_with?("#{u}::")}
include_stmts
end
end.flatten.compact
end
end
rule "FC008", "Generated cookbook metadata needs updating" do
tags %w{style metadata}
metadata do |ast,filename|
{'maintainer' => 'YOUR_COMPANY_NAME',
'maintainer_email' => 'YOUR_EMAIL'}.map do |field,value|
ast.xpath(%Q{//command[ident/@value='#{field}']/
descendant::tstring_content[@value='#{value}']})
end
end
end
rule "FC009", "Resource attribute not recognised" do
tags %w{correctness}
recipe do |ast|
matches = []
resource_attributes_by_type(ast).each do |type,resources|
resources.each do |resource|
resource.keys.map(&:to_sym).reject do |att|
resource_attribute?(type.to_sym, att)
end.each do |invalid_att|
matches << find_resources(ast, :type => type).find do |res|
resource_attributes(res).include?(invalid_att.to_s)
end
end
end
end
matches
end
end
rule "FC010", "Invalid search syntax" do
tags %w{correctness search}
recipe do |ast|
# This only works for literal search strings
literal_searches(ast).reject{|search| valid_query?(search['value'])}
end
end
rule "FC011", "Missing README in markdown format" do
tags %w{style readme}
cookbook do |filename|
unless File.exists?(File.join(filename, 'README.md'))
[file_match(File.join(filename, 'README.md'))]
end
end
end
rule "FC012", "Use Markdown for README rather than RDoc" do
tags %w{style readme}
cookbook do |filename|
if File.exists?(File.join(filename, 'README.rdoc'))
[file_match(File.join(filename, 'README.rdoc'))]
end
end
end
rule "FC013", "Use file_cache_path rather than hard-coding tmp paths" do
tags %w{style files}
recipe do |ast|
find_resources(ast, :type => 'remote_file').find_all do |download|
path = (resource_attribute(download, 'path') ||
resource_name(download)).to_s
path.start_with?('/tmp/')
end
end
end
rule "FC014", "Consider extracting long ruby_block to library" do
tags %w{style libraries}
recipe do |ast|
find_resources(ast, :type => 'ruby_block').find_all do |rb|
! rb.xpath("descendant::fcall[ident/@value='block']
/../../do_block[count(descendant::*) > 100]").empty?
end
end
end
rule "FC015", "Consider converting definition to a LWRP" do
tags %w{style definitions lwrp}
applies_to {|version| version >= gem_version("0.7.12")}
cookbook do |dir|
Dir[File.join(dir, 'definitions', '*.rb')].reject do |entry|
['.', '..'].include? entry
end.map{|entry| file_match(entry)}
end
end
rule "FC016", "LWRP does not declare a default action" do
tags %w{correctness lwrp}
applies_to {|version| version >= gem_version("0.7.12")}
resource do |ast, filename|
unless ["//ident/@value='default_action'",
"//def/bodystmt/descendant::assign/
var_field/ivar/@value='@action'"].any? {|expr| ast.xpath(expr)}
[file_match(filename)]
end
end
end
rule "FC017", "LWRP does not notify when updated" do
tags %w{correctness lwrp}
applies_to do |version|
version >= gem_version("0.7.12")
end
provider do |ast, filename|
if ast.xpath(%q{//*[self::call or self::command_call]/
*[self::vcall or self::var_ref/ident/
@value='new_resource']/../
ident[@value='updated_by_last_action']}).empty?
[file_match(filename)]
end
end
end
rule "FC018", "LWRP uses deprecated notification syntax" do
tags %w{style lwrp deprecated}
applies_to {|version| version >= gem_version("0.9.10")}
provider do |ast|
ast.xpath("//assign/var_field/ivar[@value='@updated']").map do |class_var|
match(class_var)
end + ast.xpath(%q{//assign/field/*[self::vcall or self::var_ref/ident/
@value='new_resource']/../ident[@value='updated']})
end
end
rule "FC019", "Access node attributes in a consistent manner" do
tags %w{style attributes}
cookbook do |cookbook_dir|
asts = {}; files = Dir["#{cookbook_dir}/*/*.rb"].reject do |file|
relative_path = Pathname.new(file).relative_path_from(Pathname.new(cookbook_dir))
relative_path.to_s.split(File::SEPARATOR).include?('spec')
end.map do |file|
{:path => file, :ast => read_ast(file)}
end
types = [:string, :symbol, :vivified].map do |type|
{:access_type => type, :count => files.map do |file|
attribute_access(file[:ast], :type => type, :ignore_calls => true,
:cookbook_dir => cookbook_dir).tap do |ast|
unless ast.empty?
(asts[type] ||= []) << {:ast => ast, :path => file[:path]}
end
end.size
end.inject(:+)}
end.reject{|type| type[:count] == 0}
if asts.size > 1
least_used = asts[types.min{|a,b| a[:count] <=> b[:count]}[:access_type]]
least_used.map do |file|
file[:ast].map{|ast| match(ast).merge(:filename => file[:path])}.flatten
end
end
end
end
rule "FC021", "Resource condition in provider may not behave as expected" do
tags %w{correctness lwrp}
applies_to {|version| version >= gem_version("0.10.6")}
provider do |ast|
find_resources(ast).map do |resource|
condition = resource.xpath(%q{//method_add_block/
descendant::ident[@value='not_if' or @value='only_if']/
ancestor::*[self::method_add_block or self::command][1][descendant::
ident/@value='new_resource']/ancestor::stmts_add[2]/method_add_block/
command[count(descendant::string_embexpr) = 0]})
condition
end.compact
end
end
rule "FC022", "Resource condition within loop may not behave as expected" do
tags %w{correctness}
applies_to {|version| version >= gem_version("0.10.6")}
recipe do |ast|
ast.xpath("//call[ident/@value='each']/../do_block").map do |loop|
block_vars = loop.xpath("block_var/params/child::*").map do |n|
n.name.sub(/^ident/, '')
end
find_resources(loop).map do |resource|
# if any of the parameters to the block are used in a condition then we
# have a match
unless (block_vars &
(resource.xpath(%q{descendant::ident[@value='not_if' or
@value='only_if']/ancestor::*[self::method_add_block or
self::command][1]/descendant::ident/@value}).map{|a| a.value})).empty?
c = resource.xpath('command[count(descendant::string_embexpr) = 0]')
resource unless c.empty? || block_vars.any? do |var|
! resource.xpath(%Q{command/args_add_block/args_add/
var_ref/ident[@value='#{var}']}).empty?
end
end
end
end.flatten.compact
end
end
rule "FC023", "Prefer conditional attributes" do
tags %w{style}
recipe do |ast|
ast.xpath(%q{//method_add_block[command/ident][count(descendant::ident
[@value='only_if' or @value='not_if']) = 0]/ancestor::*[self::if or
self::unless][count(descendant::method_add_block[command/ident]) = 1]
[count(stmts_add/method_add_block/call) = 0]
[count(stmts_add/stmts_add) = 0]
[count(descendant::*[self::else or self::elsif]) = 0]})
end
end
rule "FC024", "Consider adding platform equivalents" do
tags %w{portability}
RHEL = %w{amazon centos redhat scientific}
recipe do |ast, filename|
next if Pathname.new(filename).basename.to_s == 'metadata.rb'
metadata_path = Pathname.new(
File.join(File.dirname(filename), '..', 'metadata.rb')).cleanpath
md_platforms = if File.exists?(metadata_path)
supported_platforms(read_ast(metadata_path)).map{|p| p[:platform]}
else
[]
end
md_platforms = RHEL if md_platforms.empty?
['//method_add_arg[fcall/ident/@value="platform?"]/arg_paren/args_add_block',
"//when"].map do |expr|
ast.xpath(expr).map do |whn|
platforms = whn.xpath("args_add/descendant::tstring_content").map do |p|
p['value']
end.sort
unless platforms.size == 1 || (md_platforms & platforms).empty?
whn unless (platforms & RHEL).empty? ||
((md_platforms & RHEL) - (platforms & RHEL)).empty?
end
end.compact
end.flatten
end
end
rule "FC025", "Prefer chef_gem to compile-time gem install" do
tags %w{style deprecated}
applies_to {|version| version >= gem_version("0.10.10")}
recipe do |ast|
gem_install = ast.xpath("//stmts_add/assign[method_add_block[command/ident/
@value='gem_package'][do_block/stmts_add/command[ident/@value='action']
[descendant::ident/@value='nothing']]]")
gem_install.map do |install|
gem_var = install.xpath("var_field/ident/@value")
unless ast.xpath("//method_add_arg[call/var_ref/ident/@value='#{gem_var}']
[arg_paren/descendant::ident/@value='install' or
arg_paren/descendant::ident/@value='upgrade']").empty?
gem_install
end
end
end
end
rule "FC026", "Conditional execution block attribute contains only string" do
tags %w{correctness}
applies_to {|version| version >= gem_version("0.7.4")}
recipe do |ast|
find_resources(ast).map{|r| resource_attributes(r)}.map do |resource|
[resource['not_if'], resource['only_if']]
end.flatten.compact.select do |condition|
condition.respond_to?(:xpath) and
! condition.xpath('descendant::string_literal').empty? and
! condition.xpath('stmts_add/string_literal').empty? and
condition.xpath('descendant::stmts_add[count(ancestor::
string_literal) = 0]').size == 1
end
end
end
rule "FC027", "Resource sets internal attribute" do
tags %w{correctness}
recipe do |ast|
find_resources(ast, :type => :service).map do |service|
service unless (resource_attributes(service).keys &
['enabled', 'running']).empty?
end.compact
end
end
rule "FC028", "Incorrect #platform? usage" do
tags %w{correctness}
recipe do |ast|
ast.xpath(%q{//*[self::call | self::command_call]
[(var_ref|vcall)/ident/@value='node']
[ident/@value="platform?"]})
end
end
rule "FC029", "No leading cookbook name in recipe metadata" do
tags %w{correctness metadata}
metadata do |ast,filename|
ast.xpath('//command[ident/@value="recipe"]').map do |declared_recipe|
next unless declared_recipe.xpath('count(//vcall|//var_ref)').to_i == 0
recipe_name = declared_recipe.xpath('args_add_block/
descendant::tstring_content[1]/@value').to_s
unless recipe_name.empty? ||
recipe_name.split('::').first == cookbook_name(filename.to_s)
declared_recipe
end
end.compact
end
end
rule "FC030", "Cookbook contains debugger breakpoints" do
tags %w{annoyances}
def pry_bindings(ast)
ast.xpath('//call[(vcall|var_ref)/ident/@value="binding"]
[ident/@value="pry"]')
end
recipe{|ast| pry_bindings(ast)}
library{|ast| pry_bindings(ast)}
metadata{|ast| pry_bindings(ast)}
template{|ast| pry_bindings(ast)}
end
rule "FC031", "Cookbook without metadata file" do
tags %w{correctness metadata}
cookbook do |filename|
if ! File.exists?(File.join(filename, 'metadata.rb'))
[file_match(File.join(filename, 'metadata.rb'))]
end
end
end
rule "FC032", "Invalid notification timing" do
tags %w{correctness notifications}
recipe do |ast|
find_resources(ast).select do |resource|
notifications(resource).any? do |notification|
! [:delayed, :immediate].include? notification[:timing]
end
end
end
end
rule "FC033", "Missing template" do
tags %w{correctness}
recipe do |ast,filename|
find_resources(ast, :type => :template).reject do |resource|
resource_attributes(resource)['local'] ||
resource_attributes(resource)['cookbook']
end.map do |resource|
file = template_file(resource_attributes(resource,
:return_expressions => true))
{:resource => resource, :file => file}
end.reject do |resource|
resource[:file].respond_to?(:xpath)
end.select do |resource|
template_paths(filename).none? do |path|
relative_path = []
Pathname.new(path).ascend do |template_path|
relative_path << template_path.basename
break if template_path.dirname.dirname.basename.to_s == 'templates'
end
File.join(relative_path.reverse) == resource[:file]
end
end.map{|resource| resource[:resource]}
end
end
rule "FC034", "Unused template variables" do
tags %w{correctness}
recipe do |ast,filename|
Array(resource_attributes_by_type(ast)['template']).select do
|t| t['variables'] and t['variables'].respond_to?(:xpath)
end.map do |resource|
template_paths = Dir[Pathname.new(filename).dirname.dirname +
'templates' + '**/*.erb']
template_path = template_paths.find{|p| File.basename(p) == resource['source']}
next unless template_path
passed_vars = resource['variables'].xpath('symbol/ident/@value').map{|tv| tv.to_s}
template_vars = read_ast(template_path).xpath('//var_ref/ivar/' +
'@value').map{|v| v.to_s.sub(/^@/, '')}
file_match(template_path) unless (passed_vars - template_vars).empty?
end.compact
end
end
rule "FC037", "Invalid notification action" do
tags %w{correctness}
recipe do |ast|
find_resources(ast).select do |resource|
notifications(resource).any? do |n|
type = case n[:type]
when :notifies then n[:resource_type]
when :subscribes then resource_type(resource).to_sym
end
n[:action].size > 0 and ! resource_action?(type, n[:action])
end
end
end
end
rule "FC038", "Invalid resource action" do
tags %w{correctness}
recipe do |ast|
find_resources(ast).select do |resource|
actions = resource_attributes(resource)['action']
if actions.respond_to?(:xpath)
actions = actions.xpath('descendant::array/descendant::symbol/ident/@value')
else
actions = Array(actions)
end
actions.reject{|a| a.to_s.empty?}.any? do |action|
! resource_action?(resource_type(resource), action)
end
end
end
end
rule "FC039", "Node method cannot be accessed with key" do
tags %w{correctness}
recipe do |ast|
[{:type => :string, :path => '@value'},
{:type => :symbol, :path => 'ident/@value'}].map do |access_type|
attribute_access(ast, :type => access_type[:type]).select do |att|
att_name = att.xpath(access_type[:path]).to_s.to_sym
att_name != :tags && chef_node_methods.include?(att_name)
end.select do |att|
! att.xpath('ancestor::args_add_block[position() = 1]
[preceding-sibling::vcall | preceding-sibling::var_ref]').empty?
end.select do |att|
att_type = att.xpath('ancestor::args_add_block[position() = 1]
/../var_ref/ident/@value').to_s
ast.xpath("//assign/var_field/ident[@value='#{att_type}']").empty?
end
end.flatten
end
end
rule "FC040", "Execute resource used to run git commands" do
tags %w{style recipe etsy}
recipe do |ast|
possible_git_commands = %w{ clone fetch pull checkout reset }
find_resources(ast, :type => 'execute').select do |cmd|
cmd_str = (resource_attribute(cmd, 'command') || resource_name(cmd)).to_s
git_cmd = cmd_str.match(/git ([a-z]+)/)
break false if git_cmd.nil?
!git_cmd.captures.nil? && possible_git_commands.include?(git_cmd.captures[0])
end
end
end
rule "FC041", "Execute resource used to run curl or wget commands" do
tags %w{style recipe etsy}
recipe do |ast|
find_resources(ast, :type => 'execute').select do |cmd|
cmd_str = (resource_attribute(cmd, 'command') || resource_name(cmd)).to_s
(cmd_str.include?('curl ') || cmd_str.include?('wget '))
end
end
end
rule "FC042", "Prefer include_recipe to require_recipe" do
tags %w{deprecated}
recipe do |ast|
ast.xpath('//command[ident/@value="require_recipe"]')
end
end
rule "FC043", "Prefer new notification syntax" do
tags %w{style notifications deprecated}
applies_to {|version| version >= gem_version("0.9.10")}
recipe do |ast|
find_resources(ast).select do |resource|
notifications(resource).any?{|notify| notify[:style] == :old}
end
end
end
rule "FC044", "Avoid bare attribute keys" do
tags %w{style}
attributes do |ast|
declared = ast.xpath('//descendant::var_field/ident/@value').map{|v| v.to_s}
ast.xpath('//assign/*[self::vcall or self::var_ref]
[count(child::kw) = 0]/ident').select do |v|
(v['value'] != 'secure_password') &&
! declared.include?(v['value']) &&
! v.xpath("ancestor::*[self::brace_block or self::do_block]/block_var/
descendant::ident/@value='#{v['value']}'")
end
end
end
rule "FC045", "Consider setting cookbook name in metadata" do
tags %w{annoyances metadata}
metadata do |ast, filename|
unless ast.xpath('descendant::stmts_add/command/ident/@value="name"')
[file_match(filename)]
end
end
cookbook do |filename|
if ! File.exists?(File.join(filename, 'metadata.rb'))
[file_match(File.join(filename, 'metadata.rb'))]
end
end
end
rule "FC046", "Attribute assignment uses assign unless nil" do
attributes do |ast|
attribute_access(ast).map{|a| a.xpath('ancestor::opassign/op[@value="||="]')}
end
end
|
# Support for multi-step forms.
class FormInput
# Turn this form into multi-step form using given steps.
def self.define_steps( steps )
@steps = steps = steps.to_hash.dup.freeze
self.send( :include, StepMethods )
opts = { filter: ->{ steps.keys.find{ |x| x.to_s == self } }, class: Symbol }
param :step, opts, type: :hidden
param :next, opts, type: :ignore
param :last, opts, type: :hidden
param :seen, opts, type: :hidden
end
# Get hash mapping defined steps to their names, or nil if there are none.
def self.form_steps
@steps
end
# Additional methods used for multi-step form processing.
module StepMethods
# Initialize new instance.
def initialize( *args )
super
self.seen = last_step( seen, step )
self.step ||= steps.first
self.next ||= step
self.last ||= step
if valid?( current_params )
self.step = self.next
self.seen = last_step( seen, previous_step( step ) )
end
self.last = last_step( step, last )
end
# Make all steps instantly available.
# Returns self for chaining.
def unlock_steps
self.last = self.seen = steps.last
self
end
# Get the parameters relevant for the current step.
def current_params
tagged_params( step )
end
# Get the parameters irrelevant for the current step.
def other_params
untagged_params( step )
end
# Get hash mapping defined steps to their names.
def form_steps
self.class.form_steps
end
# Get allowed form steps as list of symbols.
def steps
form_steps.keys
end
# Get name of current or given step, if any.
def step_name( step = self.step )
form_steps[ step ]
end
# Get hash of steps along with their names, for use as sidebar.
def step_names
form_steps.reject{ |k,v| v.nil? }
end
# Get index of given/current step among all steps.
def step_index( step = self.steps )
steps.index( step )
end
# Get first step, or first step among given list of steps, if any.
def first_step( *args )
if args.empty?
steps.first
else
args.flatten.compact.min_by{ |x| step_index( x ) }
end
end
# Get last step, or last step among given list of steps, if any.
def last_step( *args )
if args.empty?
steps.last
else
args.flatten.compact.max_by{ |x| step_index( x ) }
end
end
# Get steps before given/current step.
def previous_steps( step = self.step )
index = steps.index( step ) || 0
steps.first( index )
end
# Get steps after given/current step.
def next_steps( step = self.step )
index = steps.index( step ) || -1
steps[ index + 1 .. -1 ]
end
# Get the next step, or nil.
def next_step( step = self.step )
next_steps( step ).first
end
# Get the previous step, or nil.
def previous_step( step = self.step )
previous_steps( step ).last
end
# Get steps with some parameters defined.
def regular_steps
steps.reject{ |step| tagged_params( step ).empty? }
end
# Get steps with no parameters defined.
def extra_steps
steps.select{ |step| tagged_params( step ).empty? }
end
# Filter steps by testing their corresponding parameters with given block. Excludes steps without parameters.
def filter_steps
steps.select do |step|
params = tagged_params( step )
yield params unless params.empty?
end
end
# Get steps which have required parameters. Obviously excludes steps without parameters.
def required_steps
filter_steps{ |params| params.any?{ |p| p.required? } }
end
# Get steps which have no required parameters. Excludes steps without parameters.
def optional_steps
filter_steps{ |params| params.none?{ |p| p.required? } }
end
# Get steps which have some data filled in. Obviously excludes steps without parameters.
def filled_steps
filter_steps{ |params| params.any?{ |p| p.filled? } }
end
# Get steps which have no data filled in. Excludes steps without parameters.
def unfilled_steps
filter_steps{ |params| params.none?{ |p| p.filled? } }
end
# Get steps which have only valid data data filled in. Excludes steps without parameters.
def valid_steps
filter_steps{ |params| valid?( params ) }
end
# Get steps which have some invalid data filled in. Obviously excludes steps without parameters.
def invalid_steps
filter_steps{ |params| invalid?( params ) }
end
# Get first step with invalid data, or nil if there is none.
def invalid_step
invalid_steps.first
end
# Test if given/current step is invalid.
def invalid_step?( step = self.step )
invalid_steps.include?( step )
end
# Get steps which are enabled.
def enabled_steps
filter_steps{ |params| params.all?{ |p| p.enabled? } }
end
# Get steps which are disabled.
def disabled_steps
filter_steps{ |params| params.any?{ |p| p.disabled? } }
end
# Test if given/current step is disabled.
def disabled_step?( step = self.step )
disabled_steps.include?( step )
end
# Get unfinished steps, those we have not yet visited or visited for the first time.
def unfinished_steps
next_steps( seen )
end
# Get finished steps, those we have visited or skipped over before.
def finished_steps
steps - unfinished_steps
end
# Test if given/current step is finished.
def finished_step?( step = self.step )
finished_steps.include?( step )
end
# Get inaccessible steps, excluding the last accessed step.
def inaccessible_steps
next_steps( last )
end
# Get accessible steps, including the last accessed step.
def accessible_steps
steps - inaccessible_steps
end
# Get valid finished steps. Excludes steps without parameters.
def complete_steps
valid_steps & finished_steps
end
# Get invalid finished steps. Excludes steps without parameters.
def incomplete_steps
invalid_steps & finished_steps
end
# Get steps which shell be checked off as ok in the sidebar.
def good_steps
complete_steps & filled_steps
end
# Get steps which shell be marked as having errors in the sidebar.
def bad_steps
incomplete_steps
end
end
end
# EOF #
Added few more boolean testers and improved meaning of enabled and disabled steps.
# Support for multi-step forms.
class FormInput
# Turn this form into multi-step form using given steps.
def self.define_steps( steps )
@steps = steps = steps.to_hash.dup.freeze
self.send( :include, StepMethods )
opts = { filter: ->{ steps.keys.find{ |x| x.to_s == self } }, class: Symbol }
param :step, opts, type: :hidden
param :next, opts, type: :ignore
param :last, opts, type: :hidden
param :seen, opts, type: :hidden
end
# Get hash mapping defined steps to their names, or nil if there are none.
def self.form_steps
@steps
end
# Additional methods used for multi-step form processing.
module StepMethods
# Initialize new instance.
def initialize( *args )
super
self.seen = last_step( seen, step )
self.step ||= steps.first
self.next ||= step
self.last ||= step
if valid_step?
self.step = self.next
self.seen = last_step( seen, previous_step( step ) )
end
self.last = last_step( step, last )
end
# Make all steps instantly available.
# Returns self for chaining.
def unlock_steps
self.last = self.seen = steps.last
self
end
# Get parameters relevant for given step.
def step_params( step )
tagged_params( step )
end
# Get the parameters relevant for the current step.
def current_params
tagged_params( step )
end
# Get the parameters irrelevant for the current step.
def other_params
untagged_params( step )
end
# Get hash mapping defined steps to their names.
def form_steps
self.class.form_steps
end
# Get allowed form steps as list of symbols.
def steps
form_steps.keys
end
# Get name of current or given step, if any.
def step_name( step = self.step )
form_steps[ step ]
end
# Get hash of steps along with their names, for use as sidebar.
def step_names
form_steps.reject{ |k,v| v.nil? }
end
# Get index of given/current step among all steps.
def step_index( step = self.steps )
steps.index( step )
end
# Get first step, or first step among given list of steps, if any.
def first_step( *args )
if args.empty?
steps.first
else
args.flatten.compact.min_by{ |x| step_index( x ) }
end
end
# Get last step, or last step among given list of steps, if any.
def last_step( *args )
if args.empty?
steps.last
else
args.flatten.compact.max_by{ |x| step_index( x ) }
end
end
# Get steps before given/current step.
def previous_steps( step = self.step )
index = steps.index( step ) || 0
steps.first( index )
end
# Get steps after given/current step.
def next_steps( step = self.step )
index = steps.index( step ) || -1
steps[ index + 1 .. -1 ]
end
# Get the next step, or nil.
def next_step( step = self.step )
next_steps( step ).first
end
# Get the previous step, or nil.
def previous_step( step = self.step )
previous_steps( step ).last
end
# Test if the current/given step has no parameters defined.
def extra_step?( step = self.step )
step_params( step ).empty?
end
# Test if the current/given step has some parameters defined.
def regular_step?( step = self.step )
not extra_step?( step )
end
# Get steps with no parameters defined.
def extra_steps
steps.select{ |step| extra_step?( step ) }
end
# Get steps with some parameters defined.
def regular_steps
steps.select{ |step| regular_step?( step ) }
end
# Filter steps by testing their corresponding parameters with given block. Excludes steps without parameters.
def filter_steps
steps.select do |step|
params = step_params( step )
yield params unless params.empty?
end
end
# Get steps which have required parameters. Excludes steps without parameters.
def required_steps
filter_steps{ |params| params.any?{ |p| p.required? } }
end
# Get steps which have no required parameters. Excludes steps without parameters.
def optional_steps
filter_steps{ |params| params.none?{ |p| p.required? } }
end
# Get steps which have some data filled in. Excludes steps without parameters.
def filled_steps
filter_steps{ |params| params.any?{ |p| p.filled? } }
end
# Get steps which have no data filled in. Excludes steps without parameters.
def unfilled_steps
filter_steps{ |params| params.none?{ |p| p.filled? } }
end
# Get steps which have all data filled in correctly. Excludes steps without parameters.
def valid_steps
filter_steps{ |params| valid?( params ) }
end
# Test if the current/given step has all data filled in correctly. Considered true for steps without parameters.
def valid_step?( step = self.step )
valid?( step_params( step ) )
end
# Get steps which have some invalid data filled in. Excludes steps without parameters.
def invalid_steps
filter_steps{ |params| invalid?( params ) }
end
# Get first step with invalid data, or nil if there is none.
def invalid_step
invalid_steps.first
end
# Test if the current/given step has some invalid data filled in. False for steps without parameters.
def invalid_step?( step = self.step )
invalid?( step_params( step ) )
end
# Get steps with some parameters enabled. Excludes steps without parameters.
def enabled_steps
filter_steps{ |params| params.any?{ |p| p.enabled? } }
end
# Test if given/current step has some parameters enabled. Considered true for steps without parameters.
def enabled_step?( step = self.step )
params = step_params( step )
params.empty? or params.any?{ |p| p.enabled? }
end
# Get steps with all parameters disabled. Excludes steps without parameters.
def disabled_steps
filter_steps{ |params| params.all?{ |p| p.disabled? } }
end
# Test if given/current step has all parameters disabled. False for steps without parameters.
def disabled_step?( step = self.step )
not enabled_step?( step )
end
# Get unfinished steps, those we have not yet visited or visited for the first time.
def unfinished_steps
next_steps( seen )
end
# Get finished steps, those we have visited or skipped over before.
def finished_steps
steps - unfinished_steps
end
# Test if given/current step is finished.
def finished_step?( step = self.step )
finished_steps.include?( step )
end
# Get inaccessible steps, excluding the last accessed step.
def inaccessible_steps
next_steps( last )
end
# Get accessible steps, including the last accessed step.
def accessible_steps
steps - inaccessible_steps
end
# Get valid finished steps. Excludes steps without parameters.
def complete_steps
valid_steps & finished_steps
end
# Get invalid finished steps. Excludes steps without parameters.
def incomplete_steps
invalid_steps & finished_steps
end
# Get steps which shell be checked off as ok in the sidebar.
def good_steps
complete_steps & filled_steps
end
# Get steps which shell be marked as having errors in the sidebar.
def bad_steps
incomplete_steps
end
end
end
# EOF #
|
require "fpm/namespace"
require "fpm/package"
require "fpm/util"
require "fileutils"
require "find"
class FPM::Package::CPAN < FPM::Package
# Flags '--foo' will be accessable as attributes[:npm_foo]
option "--perl-bin", "PERL_EXECUTABLE",
"The path to the perl executable you wish to run.", :default => "perl"
option "--cpanm-bin", "CPANM_EXECUTABLE",
"The path to the cpanm executable you wish to run.", :default => "cpanm"
option "--mirror", "CPAN_MIRROR",
"The CPAN mirror to use instead of the default."
option "--mirror-only", :flag,
"Only use the specified mirror for metadata.", :default => false
option "--package-name-prefix", "NAME_PREFIX",
"Name to prefix the package name with.", :default => "perl"
option "--test", :flag,
"Run the tests before packaging?", :default => true
option "--perl-lib-path", "PERL_LIB_PATH",
"Path of target Perl Libraries"
private
def input(package)
#if RUBY_VERSION =~ /^1\.8/
#raise FPM::Package::InvalidArgument,
#"Sorry, CPAN support requires ruby 1.9 or higher. You have " \
#"#{RUBY_VERSION}. If this negatively impacts you, please let " \
#"me know by filing an issue: " \
#"https://github.com/jordansissel/fpm/issues"
#end
#require "ftw" # for http access
require "net/http"
require "json"
if (attributes[:cpan_local_module?])
moduledir = package
else
result = search(package)
tarball = download(result, version)
moduledir = unpack(tarball)
end
# Read package metadata (name, version, etc)
if File.exists?(File.join(moduledir, "META.json"))
metadata = JSON.parse(File.read(File.join(moduledir, ("META.json"))))
elsif File.exists?(File.join(moduledir, ("META.yml")))
require "yaml"
metadata = YAML.load_file(File.join(moduledir, ("META.yml")))
elsif File.exists?(File.join(moduledir, "MYMETA.json"))
metadata = JSON.parse(File.read(File.join(moduledir, ("MYMETA.json"))))
elsif File.exists?(File.join(moduledir, ("MYMETA.yml")))
require "yaml"
metadata = YAML.load_file(File.join(moduledir, ("MYMETA.yml")))
else
raise FPM::InvalidPackageConfiguration,
"Could not find package metadata. Checked for META.json and META.yml"
end
self.version = metadata["version"]
self.description = metadata["abstract"]
self.license = case metadata["license"]
when Array; metadata["license"].first
when nil; "unknown"
else; metadata["license"]
end
unless metadata["distribution"].nil?
@logger.info("Setting package name from 'distribution'",
:distribution => metadata["distribution"])
self.name = fix_name(metadata["distribution"])
else
@logger.info("Setting package name from 'name'",
:name => metadata["name"])
self.name = fix_name(metadata["name"])
end
# Not all things have 'author' listed.
self.vendor = metadata["author"].join(", ") unless metadata["author"].nil?
self.url = metadata["resources"]["homepage"] rescue "unknown"
# TODO(sissel): figure out if this perl module compiles anything
# and set the architecture appropriately.
self.architecture = "all"
# Install any build/configure dependencies with cpanm.
# We'll install to a temporary directory.
@logger.info("Installing any build or configure dependencies")
cpanm_flags = ["-L", build_path("cpan"), moduledir]
cpanm_flags += ["-n"] if attributes[:cpan_test?]
cpanm_flags += ["--mirror", "#{attributes[:cpan_mirror]}"] if !attributes[:cpan_mirror].nil?
cpanm_flags += ["--mirror-only"] if attributes[:cpan_mirror_only?] && !attributes[:cpan_mirror].nil?
safesystem(attributes[:cpan_cpanm_bin], *cpanm_flags)
if !attributes[:no_auto_depends?]
unless metadata["requires"].nil?
metadata["requires"].each do |dep_name, version|
# Special case for representing perl core as a version.
if dep_name == "perl"
self.dependencies << "#{dep_name} >= #{version}"
next
end
dep = search(dep_name)
if dep.include?("distribution")
name = fix_name(dep["distribution"])
else
name = fix_name(dep_name)
end
if version.to_s == "0"
# Assume 'Foo = 0' means any version?
self.dependencies << "#{name}"
else
# The 'version' string can be something complex like:
# ">= 0, != 1.0, != 1.2"
if version.is_a?(String)
version.split(/\s*,\s*/).each do |v|
if v =~ /\s*[><=]/
self.dependencies << "#{name} #{v}"
else
self.dependencies << "#{name} = #{v}"
end
end
else
self.dependencies << "#{name} >= #{version}"
end
end
end
end
end #no_auto_depends
::Dir.chdir(moduledir) do
# TODO(sissel): install build and config dependencies to resolve
# build/configure requirements.
# META.yml calls it 'configure_requires' and 'build_requires'
# META.json calls it prereqs/build and prereqs/configure
prefix = attributes[:prefix] || "/usr/local"
# TODO(sissel): Set default INSTALL path?
# Try Makefile.PL, Build.PL
#
if File.exists?("Makefile.PL")
if attributes[:cpan_perl_lib_path]
perl_lib_path = attributes[:cpan_perl_lib_path]
safesystem(attributes[:cpan_perl_bin],
"-Mlocal::lib=#{build_path("cpan")}",
"Makefile.PL", "PREFIX=#{prefix}", "LIB=#{perl_lib_path}",
# Empty install_base to avoid local::lib being used.
"INSTALL_BASE=")
else
safesystem(attributes[:cpan_perl_bin],
"-Mlocal::lib=#{build_path("cpan")}",
"Makefile.PL", "PREFIX=#{prefix}",
# Empty install_base to avoid local::lib being used.
"INSTALL_BASE=")
end
if attributes[:cpan_test?]
make = [ "env", "PERL5LIB=#{build_path("cpan/lib/perl5")}", "make" ]
else
make = [ "make" ]
end
safesystem(*make)
safesystem(*(make + ["test"])) if attributes[:cpan_test?]
safesystem(*(make + ["DESTDIR=#{staging_path}", "install"]))
elsif File.exists?("Build.PL")
# Module::Build is in use here; different actions required.
safesystem(attributes[:cpan_perl_bin],
"-Mlocal::lib=#{build_path("cpan")}",
"Build.PL")
safesystem(attributes[:cpan_perl_bin],
"-Mlocal::lib=#{build_path("cpan")}",
"./Build")
if attributes[:cpan_test?]
safesystem(attributes[:cpan_perl_bin],
"-Mlocal::lib=#{build_path("cpan")}",
"./Build", "test")
end
if attributes[:cpan_perl_lib_path]
perl_lib_path = attributes[:cpan_perl_lib_path]
safesystem("./Build install --install_path lib=#{perl_lib_path} \
--destdir #{staging_path} --prefix #{prefix} --destdir #{staging_path}")
else
safesystem("./Build", "install",
"--prefix", prefix, "--destdir", staging_path,
# Empty install_base to avoid local::lib being used.
"--install_base", "")
end
else
raise FPM::InvalidPackageConfiguration,
"I don't know how to build #{name}. No Makefile.PL nor " \
"Build.PL found"
end
# Fix any files likely to cause conflicts that are duplicated
# across packages.
# https://github.com/jordansissel/fpm/issues/443
# https://github.com/jordansissel/fpm/issues/510
::Dir.glob(File.join(staging_path, prefix, "**/perllocal.pod")).each do |path|
@logger.debug("Removing useless file.",
:path => path.gsub(staging_path, ""))
File.unlink(path)
end
end
# TODO(sissel): figure out if this perl module compiles anything
# and set the architecture appropriately.
self.architecture = "all"
# Find any shared objects in the staging directory to set architecture as
# native if found; otherwise keep the 'all' default.
Find.find(staging_path) do |path|
if path =~ /\.so$/
@logger.info("Found shared library, setting architecture=native",
:path => path)
self.architecture = "native"
end
end
end
def unpack(tarball)
directory = build_path("module")
::Dir.mkdir(directory)
args = [ "-C", directory, "-zxf", tarball,
"--strip-components", "1" ]
safesystem("tar", *args)
return directory
end
def download(metadata, cpan_version=nil)
distribution = metadata["distribution"]
author = metadata["author"]
@logger.info("Downloading perl module",
:distribution => distribution,
:version => cpan_version)
# default to latest versionunless we specify one
if cpan_version.nil?
self.version = metadata["version"]
else
if metadata["version"] =~ /^v\d/
self.version = "v#{cpan_version}"
else
self.version = cpan_version
end
end
metacpan_release_url = "http://api.metacpan.org/v0/release/#{author}/#{distribution}-#{self.version}"
begin
release_response = httpfetch(metacpan_release_url)
rescue Net::HTTPServerException => e
@logger.error("metacpan release query failed.", :error => e.message,
:module => package, :url => metacpan_release_url)
raise FPM::InvalidPackageConfiguration, "metacpan release query failed"
end
data = release_response.body
release_metadata = JSON.parse(data)
archive = release_metadata["archive"]
# should probably be basepathed from the url
tarball = File.basename(archive)
url_base = "http://www.cpan.org/"
url_base = "#{attributes[:cpan_mirror]}" if !attributes[:cpan_mirror].nil?
#url = "http://www.cpan.org/CPAN/authors/id/#{author[0,1]}/#{author[0,2]}/#{author}/#{tarball}"
url = "#{url_base}/authors/id/#{author[0,1]}/#{author[0,2]}/#{author}/#{archive}"
@logger.debug("Fetching perl module", :url => url)
begin
response = httpfetch(url)
rescue Net::HTTPServerException => e
#@logger.error("Download failed", :error => response.status_line,
#:url => url)
@logger.error("Download failed", :error => e, :url => url)
raise FPM::InvalidPackageConfiguration, "metacpan query failed"
end
File.open(build_path(tarball), "w") do |fd|
#response.read_body { |c| fd.write(c) }
fd.write(response.body)
end
return build_path(tarball)
end # def download
def search(package)
@logger.info("Asking metacpan about a module", :module => package)
metacpan_url = "http://api.metacpan.org/v0/module/" + package
begin
response = httpfetch(metacpan_url)
rescue Net::HTTPServerException => e
#@logger.error("metacpan query failed.", :error => response.status_line,
#:module => package, :url => metacpan_url)
@logger.error("metacpan query failed.", :error => e.message,
:module => package, :url => metacpan_url)
raise FPM::InvalidPackageConfiguration, "metacpan query failed"
end
#data = ""
#response.read_body { |c| p c; data << c }
data = response.body
metadata = JSON.parse(data)
return metadata
end # def metadata
def fix_name(name)
case name
when "perl"; return "perl"
else; return [attributes[:cpan_package_name_prefix], name].join("-").gsub("::", "-")
end
end # def fix_name
def httpfetch(url)
uri = URI.parse(url)
if ENV['http_proxy']
proxy = URI.parse(ENV['http_proxy'])
http = Net::HTTP.Proxy(proxy.host,proxy.port,proxy.user,proxy.password).new(uri.host, uri.port)
else
http = Net::HTTP.new(uri.host, uri.port)
end
response = http.request(Net::HTTP::Get.new(uri.request_uri))
case response
when Net::HTTPSuccess; return response
when Net::HTTPRedirection; return httpfetch(response["location"])
else; response.error!
end
end
public(:input)
end # class FPM::Package::NPM
Added type checks for cpan authors
require "fpm/namespace"
require "fpm/package"
require "fpm/util"
require "fileutils"
require "find"
class FPM::Package::CPAN < FPM::Package
# Flags '--foo' will be accessable as attributes[:npm_foo]
option "--perl-bin", "PERL_EXECUTABLE",
"The path to the perl executable you wish to run.", :default => "perl"
option "--cpanm-bin", "CPANM_EXECUTABLE",
"The path to the cpanm executable you wish to run.", :default => "cpanm"
option "--mirror", "CPAN_MIRROR",
"The CPAN mirror to use instead of the default."
option "--mirror-only", :flag,
"Only use the specified mirror for metadata.", :default => false
option "--package-name-prefix", "NAME_PREFIX",
"Name to prefix the package name with.", :default => "perl"
option "--test", :flag,
"Run the tests before packaging?", :default => true
option "--perl-lib-path", "PERL_LIB_PATH",
"Path of target Perl Libraries"
private
def input(package)
#if RUBY_VERSION =~ /^1\.8/
#raise FPM::Package::InvalidArgument,
#"Sorry, CPAN support requires ruby 1.9 or higher. You have " \
#"#{RUBY_VERSION}. If this negatively impacts you, please let " \
#"me know by filing an issue: " \
#"https://github.com/jordansissel/fpm/issues"
#end
#require "ftw" # for http access
require "net/http"
require "json"
if (attributes[:cpan_local_module?])
moduledir = package
else
result = search(package)
tarball = download(result, version)
moduledir = unpack(tarball)
end
# Read package metadata (name, version, etc)
if File.exists?(File.join(moduledir, "META.json"))
metadata = JSON.parse(File.read(File.join(moduledir, ("META.json"))))
elsif File.exists?(File.join(moduledir, ("META.yml")))
require "yaml"
metadata = YAML.load_file(File.join(moduledir, ("META.yml")))
elsif File.exists?(File.join(moduledir, "MYMETA.json"))
metadata = JSON.parse(File.read(File.join(moduledir, ("MYMETA.json"))))
elsif File.exists?(File.join(moduledir, ("MYMETA.yml")))
require "yaml"
metadata = YAML.load_file(File.join(moduledir, ("MYMETA.yml")))
else
raise FPM::InvalidPackageConfiguration,
"Could not find package metadata. Checked for META.json and META.yml"
end
self.version = metadata["version"]
self.description = metadata["abstract"]
self.license = case metadata["license"]
when Array; metadata["license"].first
when nil; "unknown"
else; metadata["license"]
end
unless metadata["distribution"].nil?
@logger.info("Setting package name from 'distribution'",
:distribution => metadata["distribution"])
self.name = fix_name(metadata["distribution"])
else
@logger.info("Setting package name from 'name'",
:name => metadata["name"])
self.name = fix_name(metadata["name"])
end
# author is not always set or it may be a string instead of an array
unless metadata["author"].nil?
if metadata["author"].respond_to?(:to_str)
self.vendor = metadata["author"]
elsif metadata["author"].respond_to?(:to_ary)
self.vendor = metadata["author"].join(", ")
end
end
self.url = metadata["resources"]["homepage"] rescue "unknown"
# TODO(sissel): figure out if this perl module compiles anything
# and set the architecture appropriately.
self.architecture = "all"
# Install any build/configure dependencies with cpanm.
# We'll install to a temporary directory.
@logger.info("Installing any build or configure dependencies")
cpanm_flags = ["-L", build_path("cpan"), moduledir]
cpanm_flags += ["-n"] if attributes[:cpan_test?]
cpanm_flags += ["--mirror", "#{attributes[:cpan_mirror]}"] if !attributes[:cpan_mirror].nil?
cpanm_flags += ["--mirror-only"] if attributes[:cpan_mirror_only?] && !attributes[:cpan_mirror].nil?
safesystem(attributes[:cpan_cpanm_bin], *cpanm_flags)
if !attributes[:no_auto_depends?]
unless metadata["requires"].nil?
metadata["requires"].each do |dep_name, version|
# Special case for representing perl core as a version.
if dep_name == "perl"
self.dependencies << "#{dep_name} >= #{version}"
next
end
dep = search(dep_name)
if dep.include?("distribution")
name = fix_name(dep["distribution"])
else
name = fix_name(dep_name)
end
if version.to_s == "0"
# Assume 'Foo = 0' means any version?
self.dependencies << "#{name}"
else
# The 'version' string can be something complex like:
# ">= 0, != 1.0, != 1.2"
if version.is_a?(String)
version.split(/\s*,\s*/).each do |v|
if v =~ /\s*[><=]/
self.dependencies << "#{name} #{v}"
else
self.dependencies << "#{name} = #{v}"
end
end
else
self.dependencies << "#{name} >= #{version}"
end
end
end
end
end #no_auto_depends
::Dir.chdir(moduledir) do
# TODO(sissel): install build and config dependencies to resolve
# build/configure requirements.
# META.yml calls it 'configure_requires' and 'build_requires'
# META.json calls it prereqs/build and prereqs/configure
prefix = attributes[:prefix] || "/usr/local"
# TODO(sissel): Set default INSTALL path?
# Try Makefile.PL, Build.PL
#
if File.exists?("Makefile.PL")
if attributes[:cpan_perl_lib_path]
perl_lib_path = attributes[:cpan_perl_lib_path]
safesystem(attributes[:cpan_perl_bin],
"-Mlocal::lib=#{build_path("cpan")}",
"Makefile.PL", "PREFIX=#{prefix}", "LIB=#{perl_lib_path}",
# Empty install_base to avoid local::lib being used.
"INSTALL_BASE=")
else
safesystem(attributes[:cpan_perl_bin],
"-Mlocal::lib=#{build_path("cpan")}",
"Makefile.PL", "PREFIX=#{prefix}",
# Empty install_base to avoid local::lib being used.
"INSTALL_BASE=")
end
if attributes[:cpan_test?]
make = [ "env", "PERL5LIB=#{build_path("cpan/lib/perl5")}", "make" ]
else
make = [ "make" ]
end
safesystem(*make)
safesystem(*(make + ["test"])) if attributes[:cpan_test?]
safesystem(*(make + ["DESTDIR=#{staging_path}", "install"]))
elsif File.exists?("Build.PL")
# Module::Build is in use here; different actions required.
safesystem(attributes[:cpan_perl_bin],
"-Mlocal::lib=#{build_path("cpan")}",
"Build.PL")
safesystem(attributes[:cpan_perl_bin],
"-Mlocal::lib=#{build_path("cpan")}",
"./Build")
if attributes[:cpan_test?]
safesystem(attributes[:cpan_perl_bin],
"-Mlocal::lib=#{build_path("cpan")}",
"./Build", "test")
end
if attributes[:cpan_perl_lib_path]
perl_lib_path = attributes[:cpan_perl_lib_path]
safesystem("./Build install --install_path lib=#{perl_lib_path} \
--destdir #{staging_path} --prefix #{prefix} --destdir #{staging_path}")
else
safesystem("./Build", "install",
"--prefix", prefix, "--destdir", staging_path,
# Empty install_base to avoid local::lib being used.
"--install_base", "")
end
else
raise FPM::InvalidPackageConfiguration,
"I don't know how to build #{name}. No Makefile.PL nor " \
"Build.PL found"
end
# Fix any files likely to cause conflicts that are duplicated
# across packages.
# https://github.com/jordansissel/fpm/issues/443
# https://github.com/jordansissel/fpm/issues/510
::Dir.glob(File.join(staging_path, prefix, "**/perllocal.pod")).each do |path|
@logger.debug("Removing useless file.",
:path => path.gsub(staging_path, ""))
File.unlink(path)
end
end
# TODO(sissel): figure out if this perl module compiles anything
# and set the architecture appropriately.
self.architecture = "all"
# Find any shared objects in the staging directory to set architecture as
# native if found; otherwise keep the 'all' default.
Find.find(staging_path) do |path|
if path =~ /\.so$/
@logger.info("Found shared library, setting architecture=native",
:path => path)
self.architecture = "native"
end
end
end
def unpack(tarball)
directory = build_path("module")
::Dir.mkdir(directory)
args = [ "-C", directory, "-zxf", tarball,
"--strip-components", "1" ]
safesystem("tar", *args)
return directory
end
def download(metadata, cpan_version=nil)
distribution = metadata["distribution"]
author = metadata["author"]
@logger.info("Downloading perl module",
:distribution => distribution,
:version => cpan_version)
# default to latest versionunless we specify one
if cpan_version.nil?
self.version = metadata["version"]
else
if metadata["version"] =~ /^v\d/
self.version = "v#{cpan_version}"
else
self.version = cpan_version
end
end
metacpan_release_url = "http://api.metacpan.org/v0/release/#{author}/#{distribution}-#{self.version}"
begin
release_response = httpfetch(metacpan_release_url)
rescue Net::HTTPServerException => e
@logger.error("metacpan release query failed.", :error => e.message,
:module => package, :url => metacpan_release_url)
raise FPM::InvalidPackageConfiguration, "metacpan release query failed"
end
data = release_response.body
release_metadata = JSON.parse(data)
archive = release_metadata["archive"]
# should probably be basepathed from the url
tarball = File.basename(archive)
url_base = "http://www.cpan.org/"
url_base = "#{attributes[:cpan_mirror]}" if !attributes[:cpan_mirror].nil?
#url = "http://www.cpan.org/CPAN/authors/id/#{author[0,1]}/#{author[0,2]}/#{author}/#{tarball}"
url = "#{url_base}/authors/id/#{author[0,1]}/#{author[0,2]}/#{author}/#{archive}"
@logger.debug("Fetching perl module", :url => url)
begin
response = httpfetch(url)
rescue Net::HTTPServerException => e
#@logger.error("Download failed", :error => response.status_line,
#:url => url)
@logger.error("Download failed", :error => e, :url => url)
raise FPM::InvalidPackageConfiguration, "metacpan query failed"
end
File.open(build_path(tarball), "w") do |fd|
#response.read_body { |c| fd.write(c) }
fd.write(response.body)
end
return build_path(tarball)
end # def download
def search(package)
@logger.info("Asking metacpan about a module", :module => package)
metacpan_url = "http://api.metacpan.org/v0/module/" + package
begin
response = httpfetch(metacpan_url)
rescue Net::HTTPServerException => e
#@logger.error("metacpan query failed.", :error => response.status_line,
#:module => package, :url => metacpan_url)
@logger.error("metacpan query failed.", :error => e.message,
:module => package, :url => metacpan_url)
raise FPM::InvalidPackageConfiguration, "metacpan query failed"
end
#data = ""
#response.read_body { |c| p c; data << c }
data = response.body
metadata = JSON.parse(data)
return metadata
end # def metadata
def fix_name(name)
case name
when "perl"; return "perl"
else; return [attributes[:cpan_package_name_prefix], name].join("-").gsub("::", "-")
end
end # def fix_name
def httpfetch(url)
uri = URI.parse(url)
if ENV['http_proxy']
proxy = URI.parse(ENV['http_proxy'])
http = Net::HTTP.Proxy(proxy.host,proxy.port,proxy.user,proxy.password).new(uri.host, uri.port)
else
http = Net::HTTP.new(uri.host, uri.port)
end
response = http.request(Net::HTTP::Get.new(uri.request_uri))
case response
when Net::HTTPSuccess; return response
when Net::HTTPRedirection; return httpfetch(response["location"])
else; response.error!
end
end
public(:input)
end # class FPM::Package::NPM
|
class Frenetic
VERSION = '0.0.10'
end
Version bump to 0.0.11
class Frenetic
VERSION = '0.0.11'
end
|
module Fudge
module Tasks
# Allow use of Cane complexity and style analyser
class Cane < Shell
include Helpers::BundleAware
private
def cmd(options={})
cmd = ["cane"] + tty_options
bundle_cmd(cmd.join(' '), options)
end
def check_for
/\A\Z/
end
def tty_options
args = []
args << "--no-doc" unless options.fetch(:doc, true)
args << "--no-style" unless options.fetch(:style, true)
if options.has_key?(:max_width)
args << "--style-measure #{options.fetch(:max_width)}"
end
args
end
end
register Cane
end
end
Rephrased in less than 80 chars
module Fudge
module Tasks
# Allow use of Cane complexity and style analyser
class Cane < Shell
include Helpers::BundleAware
private
def cmd(options={})
cmd = ["cane"] + tty_options
bundle_cmd(cmd.join(' '), options)
end
def check_for
/\A\Z/
end
def tty_options
args = []
args << "--no-doc" unless options.fetch(:doc, true)
args << "--no-style" unless options.fetch(:style, true)
args << "--style-measure #{max}" if max = options.fetch?(:max_width)
args
end
end
register Cane
end
end
|
module FxPotato
VERSION = '2.0.0'
end
Bump to 2.0.1
module FxPotato
VERSION = '2.0.1'
end
|
module Galaxy
class Transport
@@transports = []
def self.register transport
@@transports << transport
end
def self.locate url, log=nil
handler_for(url).locate url, log
end
def self.publish url, object, log=nil
handler_for(url).publish url, object, log
end
def self.unpublish url
handler_for(url).unpublish url
end
def self.handler_for url
@@transports.select { |t| t.can_handle? url }.first or raise "No handler found for #{url}"
end
def initialize pattern
@pattern = pattern
end
def can_handle? url
@pattern =~ url
end
def self.join url
handler_for(url).join url
end
end
class DRbTransport < Transport
require 'drb'
def initialize
super(/^druby:.*/)
@servers = {}
end
def locate url, log=nil
DRbObject.new_with_uri url
end
def publish url, object, log=nil
@servers[url] = DRb.start_service url, object
end
def unpublish url
@servers[url].stop_service
@servers[url] = nil
end
def join url
@servers[url].thread.join
end
end
class LocalTransport < Transport
def initialize
super(/^local:/)
@servers = {}
end
def locate url, log=nil
@servers[url]
end
def publish url, object, log=nil
@servers[url] = object
end
def unpublish url
@servers[url] = nil
end
def join url
raise "Not yet implemented"
end
end
# This http transport isn't used in Galaxy 2.4, which uses http only for anonucements. However, this code shows
# how announcements could be merged via transport. The unit test for this class shows one-direction communication
# (eg, for announcements). To do two way, servers (eg, locate()) would be needed on both sides.
# Note that the console code assumes that the transport initialize blocks, so the calling code (eg console) waits
# for an explicit 'join'. But the Announcer class used here starts a server without blocking and returns immediately.
# Therefore, explicit join is not necessary. So to use, make the console work like the agent: track the main polling
# thread started in initialize() and kill/join when done.
#
class HttpTransport < Transport
require 'galaxy/announcements'
def initialize
super(/^http:.*/)
@servers = {}
@log = nil
end
# get object (ie announce fn)
# - install announce() callback
def locate url, log=nil
#DRbObject.new_with_uri url
HTTPAnnouncementSender.new url, log
end
# make object available (ie console)
def publish url, obj, log=nil
if !obj.respond_to?('process_get') || !obj.respond_to?('process_post')
raise TypeError.new("#{obj.class.name} doesn't contain 'process_post' and 'process_get' methods")
end
return @servers[url] if @servers[url]
@servers[url] = Galaxy::HTTPServer.new(url, obj)
end
def unpublish url
@servers[url].shutdown
@servers[url] = nil
end
def join url
#nop
end
end
end
Galaxy::Transport.register Galaxy::DRbTransport.new
Galaxy::Transport.register Galaxy::LocalTransport.new
Galaxy::Transport.register Galaxy::HttpTransport.new
# Disable DRb persistent connections (monkey patch)
module DRb
class DRbConn
remove_const :POOL_SIZE
POOL_SIZE = 0
end
end
http: better error message if Mongrel is not installed
Signed-off-by: Pierre-Alexandre Meyer <ff019a5748a52b5641624af88a54a2f0e46a9fb5@ning.com>
module Galaxy
class Transport
@@transports = []
def self.register transport
@@transports << transport
end
def self.locate url, log=nil
handler_for(url).locate url, log
end
def self.publish url, object, log=nil
handler_for(url).publish url, object, log
end
def self.unpublish url
handler_for(url).unpublish url
end
def self.handler_for url
@@transports.select { |t| t.can_handle? url }.first or raise "No handler found for #{url}"
end
def initialize pattern
@pattern = pattern
end
def can_handle? url
@pattern =~ url
end
def self.join url
handler_for(url).join url
end
end
class DRbTransport < Transport
require 'drb'
def initialize
super(/^druby:.*/)
@servers = {}
end
def locate url, log=nil
DRbObject.new_with_uri url
end
def publish url, object, log=nil
@servers[url] = DRb.start_service url, object
end
def unpublish url
@servers[url].stop_service
@servers[url] = nil
end
def join url
@servers[url].thread.join
end
end
class LocalTransport < Transport
def initialize
super(/^local:/)
@servers = {}
end
def locate url, log=nil
@servers[url]
end
def publish url, object, log=nil
@servers[url] = object
end
def unpublish url
@servers[url] = nil
end
def join url
raise "Not yet implemented"
end
end
# This http transport isn't used in Galaxy 2.4, which uses http only for anonucements. However, this code shows
# how announcements could be merged via transport. The unit test for this class shows one-direction communication
# (eg, for announcements). To do two way, servers (eg, locate()) would be needed on both sides.
# Note that the console code assumes that the transport initialize blocks, so the calling code (eg console) waits
# for an explicit 'join'. But the Announcer class used here starts a server without blocking and returns immediately.
# Therefore, explicit join is not necessary. So to use, make the console work like the agent: track the main polling
# thread started in initialize() and kill/join when done.
#
class HttpTransport < Transport
require 'galaxy/announcements'
def initialize
super(/^http:.*/)
@servers = {}
@log = nil
end
# get object (ie announce fn)
# - install announce() callback
def locate url, log=nil
#DRbObject.new_with_uri url
HTTPAnnouncementSender.new url, log
end
# make object available (ie console)
def publish url, obj, log=nil
if !obj.respond_to?('process_get') || !obj.respond_to?('process_post')
raise TypeError.new("#{obj.class.name} doesn't contain 'process_post' and 'process_get' methods")
end
return @servers[url] if @servers[url]
begin
@servers[url] = Galaxy::HTTPServer.new(url, obj)
rescue NameError
raise NameError.new("Unable to create the http server. Is mongrel installed?")
end
return @servers[url]
end
def unpublish url
@servers[url].shutdown
@servers[url] = nil
end
def join url
#nop
end
end
end
Galaxy::Transport.register Galaxy::DRbTransport.new
Galaxy::Transport.register Galaxy::LocalTransport.new
Galaxy::Transport.register Galaxy::HttpTransport.new
# Disable DRb persistent connections (monkey patch)
module DRb
class DRbConn
remove_const :POOL_SIZE
POOL_SIZE = 0
end
end
|
require 'uri'
begin
require 'moped'
rescue LoadError
begin
require 'mongo'
rescue LoadError
raise "[activr] Can't find any suitable mongodb driver: please install 'mongo' or 'moped' gem"
end
end
#
# Generic Mongodb driver
#
# This is main interface with the underlying MongobDB driver, which can be either the official `mongo` driver or `moped`, the `mongoid` driver.
#
class Activr::Storage::MongoDriver
def initialize
# check settings
raise "Missing setting :uri in config: #{self.config.inspect}" if self.config[:uri].blank?
@collections = { }
@kind = if defined?(::Moped)
if defined?(::Moped::BSON)
# moped < 2.0.0
:moped_1
else
# moped driver
:moped
end
elsif defined?(::Mongo::MongoClient)
# mongo ruby driver < 2.0.0
:mongo
elsif defined?(::Mongo::Client)
raise "Sorry, mongo gem >= 2.0 is not supported yet"
else
raise "Can't find any suitable mongodb driver: please install 'mongo' or 'moped' gem"
end
# Activr.logger.info("Using mongodb driver: #{@kind}")
if @kind == :mongo
uri = URI.parse(self.config[:uri])
@db_name = uri.path[1..-1]
raise "Missing database name in setting uri: #{config[:uri]}" if @db_name.blank?
end
end
# MongoDB config
#
# @api private
#
# @return [hash] Config
def config
Activr.config.mongodb
end
# Mongodb connection/session
#
# @api private
#
# @return [Mongo::MongoClient, Mongo::MongoReplicaSetClient, Moped::Session] Connection handler
def conn
@conn ||= begin
case @kind
when :moped_1, :moped
::Moped::Session.connect(self.config[:uri])
when :mongo
::Mongo::MongoClient.from_uri(self.config[:uri])
end
end
end
# Mongodb collection
#
# @api private
#
# @param col_name [String] Collection name
# @return [Mongo::Collection, Moped::Collection] Collection handler
def collection(col_name)
case @kind
when :moped_1, :moped
self.conn[col_name]
when :mongo
self.conn.db(@db_name).collection(col_name)
end
end
# Insert a document into given collection
#
# @api private
#
# @param col [Mongo::Collection, Moped::Collection] Collection handler
# @param doc [Hash] Document hash to insert
# @return [BSON::ObjectId, Moped::BSON::ObjectId] Inserted document id
def insert(col, doc)
case @kind
when :moped_1, :moped
doc_id = doc[:_id] || doc['_id']
if doc_id.nil?
doc_id = case @kind
when :moped_1
# Moped < 2.0.0 uses a custom BSON implementation
::Moped::BSON::ObjectId.new
when :moped
# Moped >= 2.0.0 uses bson gem
::BSON::ObjectId.new
end
doc['_id'] = doc_id
end
col.insert(doc)
doc_id
when :mongo
col.insert(doc)
end
end
# Find a document by id
#
# @api private
#
# @param col [Mongo::Collection, Moped::Collection] Collection handler
# @param selector [Hash] Selector hash
# @return [Hash, OrderedHash, Nil] Document
def find_one(col, selector)
case @kind
when :moped_1, :moped
col.find(selector).one
when :mongo
col.find_one(selector)
end
end
# Find documents in given collection
#
# @api private
#
# @param col [Mongo::Collection, Moped::Collection] Collection handler
# @param selector [Hash] Selector hash
# @param limit [Integer] Maximum number of documents to find
# @param skip [Integer] Number of documents to skip
# @param sort_field [Symbol,String] The field to use to sort documents in descending order
# @return [Enumerable] An enumerable on found documents
def find(col, selector, limit, skip, sort_field = nil)
case @kind
when :moped_1, :moped
result = col.find(selector).skip(skip).limit(limit)
result.sort(sort_field => -1) if sort_field
result
when :mongo
# compute options hash
options = {
:limit => limit,
:skip => skip,
}
options[:sort] = [ sort_field, ::Mongo::DESCENDING ] if sort_field
options[:batch_size] = 100 if (limit > 100)
col.find(selector, options)
end
end
# Count documents in given collection
#
# @api private
#
# @param col [Mongo::Collection, Moped::Collection] Collection handler
# @param selector [Hash] Selector hash
# @return [Integer] Number of documents in collections that satisfy given selector
def count(col, selector)
case @kind
when :moped_1, :moped, :mongo
col.find(selector).count()
end
end
# Delete documents in given collection
#
# @api private
#
# @param col [Mongo::Collection, Moped::Collection] Collection handler
# @param selector [Hash] Selector hash
def delete(col, selector)
case @kind
when :moped_1, :moped
col.find(selector).remove_all
when :mongo
col.remove(selector)
end
end
# Add index to given collection
#
# @api private
#
# @param col [Mongo::Collection, Moped::Collection] Collection handler
# @param index_spec [Array] Array of [ {String}, {Integer} ] tuplets with {String} being a field to index and {Integer} the order (`-1` of DESC and `1` for ASC)
# @param options [Hash] Options hash
# @option options [Boolean] :background Background indexing ? (default: `true`)
# @option options [Boolean] :sparse Is it a sparse index ? (default: `false`)
# @return [String] Index created
def add_index(col, index_spec, options = { })
options = {
:background => true,
:sparse => false,
}.merge(options)
case @kind
when :moped_1, :moped
index_spec = index_spec.inject(ActiveSupport::OrderedHash.new) do |memo, field_spec|
memo[field_spec[0]] = field_spec[1]
memo
end
col.indexes.create(index_spec, options)
index_spec
when :mongo
col.create_index(index_spec, options)
end
end
# Get all indexes for given collection
#
# @api private
#
# @param col [Mongo::Collection, Moped::Collection] Collection handler
# @return [Array<String>] Indexes names
def indexes(col)
result = [ ]
case @kind
when :moped_1, :moped
col.indexes.each do |index_spec|
result << index_spec["name"]
end
when :mongo
result = col.index_information.keys
end
result
end
# Drop all indexes for given collection
#
# @api private
#
# @param col [Mongo::Collection, Moped::Collection] Collection handler
def drop_indexes(col)
case @kind
when :moped_1, :moped
col.indexes.drop
when :mongo
col.drop_indexes
end
end
# Get handler for `activities` collection
#
# @api private
#
# @return [Mongo::Collection, Moped::Collection] Collection handler
def activity_collection
@activity_collection ||= begin
col_name = self.config[:activities_col]
if col_name.nil?
col_name = "activities"
col_name = "#{self.config[:col_prefix]}_#{col_name}" unless self.config[:col_prefix].blank?
end
self.collection(col_name)
end
end
# Get handler for a `<kind>_timelines` collection
#
# @api private
#
# @param kind [String] Timeline kind
# @return [Mongo::Collection, Moped::Collection] Collection handler
def timeline_collection(kind)
@timeline_collection ||= { }
@timeline_collection[kind] ||= begin
col_name = self.config[:timelines_col]
if col_name.nil?
col_name = "#{kind}_timelines"
col_name = "#{self.config[:col_prefix]}_#{col_name}" unless self.config[:col_prefix].blank?
end
self.collection(col_name)
end
end
#
# Main interface with the Storage
#
# (see Activr::Storage#valid_id?)
def valid_id?(doc_id)
case @kind
when :moped_1
doc_id.is_a?(String) || doc_id.is_a?(::Moped::BSON::ObjectId)
when :mongo, :moped
doc_id.is_a?(String) || doc_id.is_a?(::BSON::ObjectId)
end
end
# Is it a serialized document id (ie. with format { '$oid' => ... })
#
# @return [true,false]
def serialized_id?(doc_id)
doc_id.is_a?(Hash) && !doc_id['$oid'].blank?
end
# Unserialize a document id
#
# @param doc_id [String,Hash] Document id
# @return [BSON::ObjectId,Moped::BSON::ObjectId] Unserialized document id
def unserialize_id(doc_id)
# get string representation
doc_id = self.serialized_id?(doc_id) ? doc_id['$oid'] : doc_id
if @kind == :moped_1
# Moped < 2.0.0 uses a custom BSON implementation
if doc_id.is_a?(::Moped::BSON::ObjectId)
doc_id
else
::Moped::BSON::ObjectId(doc_id)
end
else
if doc_id.is_a?(::BSON::ObjectId)
doc_id
else
::BSON::ObjectId.from_string(doc_id)
end
end
end
#
# Activities
#
# Insert an activity document
#
# @api private
#
# @param activity_hash [Hash] Activity document to insert
# @return [BSON::ObjectId, Moped::BSON::ObjectId] Inserted activity id
def insert_activity(activity_hash)
self.insert(self.activity_collection, activity_hash)
end
# Find an activity document
#
# @api private
#
# @param activity_id [BSON::ObjectId, Moped::BSON::ObjectId] The activity id
# @return [Hash, OrderedHash, Nil] Activity document
def find_activity(activity_id)
self.find_one(self.activity_collection, { '_id' => activity_id })
end
# Compute selector for querying `activities` collection
#
# @api private
#
# @param options [Hash] Options when querying `activities` collection
# @return [Hash] The computed selector
def activities_selector(options)
result = { }
# compute selector
if options[:before]
result['at'] ||= { }
result['at']["$lt"] = options[:before]
end
if options[:after]
result['at'] ||= { }
result['at']["$gt"] = options[:after]
end
(options[:entities] || { }).each do |name, value|
result[name.to_s] = value
end
if !options[:only].blank?
result['kind'] ||= { }
result['kind']['$in'] = options[:only].map(&:kind)
end
if !options[:except].blank?
result['kind'] ||= { }
result['kind']['$nin'] = options[:except].map(&:kind)
end
result
end
# (see Storage#find_activities)
#
# @api private
def find_activities(limit, options = { })
selector = options[:mongo_selector] || self.activities_selector(options)
self.find(self.activity_collection, selector, limit, options[:skip], 'at')
end
# (see Storage#count_activities)
#
# @api private
def count_activities(options = { })
selector = options[:mongo_selector] || self.activities_selector(options)
self.count(self.activity_collection, selector)
end
# (see Storage#delete_activities)
#
# @api private
def delete_activities(options = { })
selector = options[:mongo_selector] || self.activities_selector(options)
self.delete(self.activity_collection, selector)
end
# Add index for activities
#
# @api private
#
# @param index [String,Array<String>] Field or array of fields
# @param options [Hash] Options hash
# @option options (see Activr::Storage::MongoDriver#add_index)
# @return [String] Index created
def add_activity_index(index, options = { })
index = index.is_a?(Array) ? index : [ index ]
index_spec = index.map{ |field| [ field, 1 ] }
self.add_index(self.activity_collection, index_spec, options)
end
#
# Timeline entries
#
# Insert a timeline entry document
#
# @api private
#
# @param timeline_kind [String] Timeline kind
# @param timeline_entry_hash [Hash] Timeline entry document to insert
def insert_timeline_entry(timeline_kind, timeline_entry_hash)
self.insert(self.timeline_collection(timeline_kind), timeline_entry_hash)
end
# Find a timeline entry document
#
# @api private
#
# @param timeline_kind [String] Timeline kind
# @param tl_entry_id [BSON::ObjectId, Moped::BSON::ObjectId] Timeline entry document id
# @return [Hash, OrderedHash, Nil] Timeline entry document
def find_timeline_entry(timeline_kind, tl_entry_id)
self.find_one(self.timeline_collection(timeline_kind), { '_id' => tl_entry_id })
end
# Compute selector for querying a `*_timelines` collection
#
# @api private
#
# @param timeline_kind [String] Timeline kind
# @param recipient_id [String, BSON::ObjectId, Moped::BSON::ObjectId] Recipient id
# @param options (see Storage#find_timeline)
# @return [Hash] The computed selector
def timeline_selector(timeline_kind, recipient_id, options = { })
result = { }
# compute selector
result['rcpt'] = recipient_id unless recipient_id.nil?
if options[:before]
result['activity.at'] = { "$lt" => options[:before] }
end
(options[:entities] || { }).each do |name, value|
result["activity.#{name}"] = value
end
if !options[:only].blank?
result['$or'] = options[:only].map do |route|
{ 'routing' => route.routing_kind, 'activity.kind' => route.activity_class.kind }
end
end
result
end
# Find several timeline entry documents
#
# @api private
#
# @param timeline_kind [String] Timeline kind
# @param recipient_id [String, BSON::ObjectId, Moped::BSON::ObjectId] Recipient id
# @param limit [Integer] Max number of entries to find
# @param options (see Storage#find_timeline)
# @return [Array<Hash>] An array of timeline entry documents
def find_timeline_entries(timeline_kind, recipient_id, limit, options = { })
selector = options[:mongo_selector] || self.timeline_selector(timeline_kind, recipient_id, options)
self.find(self.timeline_collection(timeline_kind), selector, limit, options[:skip], 'activity.at')
end
# Count number of timeline entry documents
#
# @api private
#
# @param timeline_kind [String] Timeline kind
# @param recipient_id [String, BSON::ObjectId, Moped::BSON::ObjectId] Recipient id
# @param options (see Storage#count_timeline)
# @return [Integer] Number of documents in given timeline
def count_timeline_entries(timeline_kind, recipient_id, options = { })
selector = options[:mongo_selector] || self.timeline_selector(timeline_kind, recipient_id, options)
self.count(self.timeline_collection(timeline_kind), selector)
end
# Delete timeline entry documents
#
# @api private
#
# WARNING: If recipient_id is `nil` then documents are deleted for ALL recipients
#
# @param timeline_kind [String] Timeline kind
# @param recipient_id [String, BSON::ObjectId, Moped::BSON::ObjectId, nil] Recipient id
# @param options (see Storage#delete_timeline)
def delete_timeline_entries(timeline_kind, recipient_id, options = { })
selector = options[:mongo_selector] || self.timeline_selector(timeline_kind, recipient_id, options)
# "end of the world" check
raise "Deleting everything is not the solution" if selector.blank?
self.delete(self.timeline_collection(timeline_kind), selector)
end
# Add index for timeline entries
#
# @api private
#
# @param timeline_kind [String] Timeline kind
# @param index [String,Array<String>] Field or array of fields
# @param options [Hash] Options hash
# @option options (see Activr::Storage::MongoDriver#add_index)
# @return [String] Index created
def add_timeline_index(timeline_kind, index, options = { })
index = index.is_a?(Array) ? index : [ index ]
index_spec = index.map{ |field| [ field, 1 ] }
self.add_index(self.timeline_collection(timeline_kind), index_spec, options)
end
#
# Indexes
#
# (see Storage#create_indexes)
#
# @api private
def create_indexes
# Create indexes on 'activities' collection for models that includes Activr::Entity::ModelMixin
#
# eg: activities
# [['actor', Mongo::ASCENDING], ['at', Mongo::ASCENDING]]
# [['album', Mongo::ASCENDING], ['at', Mongo::ASCENDING]]
# [['picture', Mongo::ASCENDING], ['at', Mongo::ASCENDING]]
Activr.registry.models.each do |model_class|
if !model_class.activr_entity_settings[:feed_index]
# @todo Output a warning to remove the index if it exists
else
fields = [ model_class.activr_entity_feed_actual_name.to_s, 'at' ]
index_name = self.add_activity_index(fields)
yield("activity / #{index_name}") if block_given?
end
end
# Create indexes on '*_timelines' collections for defined timeline classes
#
# eg: user_news_feed_timelines
# [['rcpt', Mongo::ASCENDING], ['activity.at', Mongo::ASCENDING]]
Activr.registry.timelines.each do |timeline_kind, timeline_class|
fields = [ 'rcpt', 'activity.at' ]
index_name = self.add_timeline_index(timeline_kind, fields)
yield("#{timeline_kind} timeline / #{index_name}") if block_given?
end
# Create sparse indexes to remove activities and timeline entries when entity is deleted
#
# eg: activities
# [['actor', Mongo::ASCENDING]], :sparse => true
#
# eg: user_news_feed_timelines
# [['activity.actor', Mongo::ASCENDING]], :sparse => true
# [['activity.album', Mongo::ASCENDING]], :sparse => true
# [['activity.picture', Mongo::ASCENDING]], :sparse => true
Activr.registry.models.each do |model_class|
if model_class.activr_entity_settings[:deletable]
# create sparse index on `activities`
Activr.registry.activity_entities_for_model(model_class).each do |entity_name|
# if entity activity feed is enabled and this is the entity name used to fetch that feed then we can use the existing index...
if !model_class.activr_entity_settings[:feed_index] || (entity_name != model_class.activr_entity_feed_actual_name)
# ... else we create an index
index_name = self.add_activity_index(entity_name.to_s, :sparse => true)
yield("activity / #{index_name}") if block_given?
end
end
# create sparse index on timeline classes where that entity can be present
Activr.registry.timeline_entities_for_model(model_class).each do |timeline_class, entities|
entities.each do |entity_name|
index_name = self.add_timeline_index(timeline_class.kind, "activity.#{entity_name}", :sparse => true)
yield("#{timeline_class.kind} timeline / #{index_name}") if block_given?
end
end
end
end
end
end # class Storage::MongoDriver
Adds :mongo_sort_field option to storage driver
require 'uri'
begin
require 'moped'
rescue LoadError
begin
require 'mongo'
rescue LoadError
raise "[activr] Can't find any suitable mongodb driver: please install 'mongo' or 'moped' gem"
end
end
#
# Generic Mongodb driver
#
# This is main interface with the underlying MongobDB driver, which can be either the official `mongo` driver or `moped`, the `mongoid` driver.
#
class Activr::Storage::MongoDriver
def initialize
# check settings
raise "Missing setting :uri in config: #{self.config.inspect}" if self.config[:uri].blank?
@collections = { }
@kind = if defined?(::Moped)
if defined?(::Moped::BSON)
# moped < 2.0.0
:moped_1
else
# moped driver
:moped
end
elsif defined?(::Mongo::MongoClient)
# mongo ruby driver < 2.0.0
:mongo
elsif defined?(::Mongo::Client)
raise "Sorry, mongo gem >= 2.0 is not supported yet"
else
raise "Can't find any suitable mongodb driver: please install 'mongo' or 'moped' gem"
end
# Activr.logger.info("Using mongodb driver: #{@kind}")
if @kind == :mongo
uri = URI.parse(self.config[:uri])
@db_name = uri.path[1..-1]
raise "Missing database name in setting uri: #{config[:uri]}" if @db_name.blank?
end
end
# MongoDB config
#
# @api private
#
# @return [hash] Config
def config
Activr.config.mongodb
end
# Mongodb connection/session
#
# @api private
#
# @return [Mongo::MongoClient, Mongo::MongoReplicaSetClient, Moped::Session] Connection handler
def conn
@conn ||= begin
case @kind
when :moped_1, :moped
::Moped::Session.connect(self.config[:uri])
when :mongo
::Mongo::MongoClient.from_uri(self.config[:uri])
end
end
end
# Mongodb collection
#
# @api private
#
# @param col_name [String] Collection name
# @return [Mongo::Collection, Moped::Collection] Collection handler
def collection(col_name)
case @kind
when :moped_1, :moped
self.conn[col_name]
when :mongo
self.conn.db(@db_name).collection(col_name)
end
end
# Insert a document into given collection
#
# @api private
#
# @param col [Mongo::Collection, Moped::Collection] Collection handler
# @param doc [Hash] Document hash to insert
# @return [BSON::ObjectId, Moped::BSON::ObjectId] Inserted document id
def insert(col, doc)
case @kind
when :moped_1, :moped
doc_id = doc[:_id] || doc['_id']
if doc_id.nil?
doc_id = case @kind
when :moped_1
# Moped < 2.0.0 uses a custom BSON implementation
::Moped::BSON::ObjectId.new
when :moped
# Moped >= 2.0.0 uses bson gem
::BSON::ObjectId.new
end
doc['_id'] = doc_id
end
col.insert(doc)
doc_id
when :mongo
col.insert(doc)
end
end
# Find a document by id
#
# @api private
#
# @param col [Mongo::Collection, Moped::Collection] Collection handler
# @param selector [Hash] Selector hash
# @return [Hash, OrderedHash, Nil] Document
def find_one(col, selector)
case @kind
when :moped_1, :moped
col.find(selector).one
when :mongo
col.find_one(selector)
end
end
# Find documents in given collection
#
# @api private
#
# @param col [Mongo::Collection, Moped::Collection] Collection handler
# @param selector [Hash] Selector hash
# @param limit [Integer] Maximum number of documents to find
# @param skip [Integer] Number of documents to skip
# @param sort_field [Symbol,String] The field to use to sort documents in descending order
# @return [Enumerable] An enumerable on found documents
def find(col, selector, limit, skip, sort_field = nil)
case @kind
when :moped_1, :moped
result = col.find(selector).skip(skip).limit(limit)
result.sort(sort_field => -1) if sort_field
result
when :mongo
# compute options hash
options = {
:limit => limit,
:skip => skip,
}
options[:sort] = [ sort_field, ::Mongo::DESCENDING ] if sort_field
options[:batch_size] = 100 if (limit > 100)
col.find(selector, options)
end
end
# Count documents in given collection
#
# @api private
#
# @param col [Mongo::Collection, Moped::Collection] Collection handler
# @param selector [Hash] Selector hash
# @return [Integer] Number of documents in collections that satisfy given selector
def count(col, selector)
case @kind
when :moped_1, :moped, :mongo
col.find(selector).count()
end
end
# Delete documents in given collection
#
# @api private
#
# @param col [Mongo::Collection, Moped::Collection] Collection handler
# @param selector [Hash] Selector hash
def delete(col, selector)
case @kind
when :moped_1, :moped
col.find(selector).remove_all
when :mongo
col.remove(selector)
end
end
# Add index to given collection
#
# @api private
#
# @param col [Mongo::Collection, Moped::Collection] Collection handler
# @param index_spec [Array] Array of [ {String}, {Integer} ] tuplets with {String} being a field to index and {Integer} the order (`-1` of DESC and `1` for ASC)
# @param options [Hash] Options hash
# @option options [Boolean] :background Background indexing ? (default: `true`)
# @option options [Boolean] :sparse Is it a sparse index ? (default: `false`)
# @return [String] Index created
def add_index(col, index_spec, options = { })
options = {
:background => true,
:sparse => false,
}.merge(options)
case @kind
when :moped_1, :moped
index_spec = index_spec.inject(ActiveSupport::OrderedHash.new) do |memo, field_spec|
memo[field_spec[0]] = field_spec[1]
memo
end
col.indexes.create(index_spec, options)
index_spec
when :mongo
col.create_index(index_spec, options)
end
end
# Get all indexes for given collection
#
# @api private
#
# @param col [Mongo::Collection, Moped::Collection] Collection handler
# @return [Array<String>] Indexes names
def indexes(col)
result = [ ]
case @kind
when :moped_1, :moped
col.indexes.each do |index_spec|
result << index_spec["name"]
end
when :mongo
result = col.index_information.keys
end
result
end
# Drop all indexes for given collection
#
# @api private
#
# @param col [Mongo::Collection, Moped::Collection] Collection handler
def drop_indexes(col)
case @kind
when :moped_1, :moped
col.indexes.drop
when :mongo
col.drop_indexes
end
end
# Get handler for `activities` collection
#
# @api private
#
# @return [Mongo::Collection, Moped::Collection] Collection handler
def activity_collection
@activity_collection ||= begin
col_name = self.config[:activities_col]
if col_name.nil?
col_name = "activities"
col_name = "#{self.config[:col_prefix]}_#{col_name}" unless self.config[:col_prefix].blank?
end
self.collection(col_name)
end
end
# Get handler for a `<kind>_timelines` collection
#
# @api private
#
# @param kind [String] Timeline kind
# @return [Mongo::Collection, Moped::Collection] Collection handler
def timeline_collection(kind)
@timeline_collection ||= { }
@timeline_collection[kind] ||= begin
col_name = self.config[:timelines_col]
if col_name.nil?
col_name = "#{kind}_timelines"
col_name = "#{self.config[:col_prefix]}_#{col_name}" unless self.config[:col_prefix].blank?
end
self.collection(col_name)
end
end
#
# Main interface with the Storage
#
# (see Activr::Storage#valid_id?)
def valid_id?(doc_id)
case @kind
when :moped_1
doc_id.is_a?(String) || doc_id.is_a?(::Moped::BSON::ObjectId)
when :mongo, :moped
doc_id.is_a?(String) || doc_id.is_a?(::BSON::ObjectId)
end
end
# Is it a serialized document id (ie. with format { '$oid' => ... })
#
# @return [true,false]
def serialized_id?(doc_id)
doc_id.is_a?(Hash) && !doc_id['$oid'].blank?
end
# Unserialize a document id
#
# @param doc_id [String,Hash] Document id
# @return [BSON::ObjectId,Moped::BSON::ObjectId] Unserialized document id
def unserialize_id(doc_id)
# get string representation
doc_id = self.serialized_id?(doc_id) ? doc_id['$oid'] : doc_id
if @kind == :moped_1
# Moped < 2.0.0 uses a custom BSON implementation
if doc_id.is_a?(::Moped::BSON::ObjectId)
doc_id
else
::Moped::BSON::ObjectId(doc_id)
end
else
if doc_id.is_a?(::BSON::ObjectId)
doc_id
else
::BSON::ObjectId.from_string(doc_id)
end
end
end
#
# Activities
#
# Insert an activity document
#
# @api private
#
# @param activity_hash [Hash] Activity document to insert
# @return [BSON::ObjectId, Moped::BSON::ObjectId] Inserted activity id
def insert_activity(activity_hash)
self.insert(self.activity_collection, activity_hash)
end
# Find an activity document
#
# @api private
#
# @param activity_id [BSON::ObjectId, Moped::BSON::ObjectId] The activity id
# @return [Hash, OrderedHash, Nil] Activity document
def find_activity(activity_id)
self.find_one(self.activity_collection, { '_id' => activity_id })
end
# Compute selector for querying `activities` collection
#
# @api private
#
# @param options [Hash] Options when querying `activities` collection
# @return [Hash] The computed selector
def activities_selector(options)
result = { }
# compute selector
if options[:before]
result['at'] ||= { }
result['at']["$lt"] = options[:before]
end
if options[:after]
result['at'] ||= { }
result['at']["$gt"] = options[:after]
end
(options[:entities] || { }).each do |name, value|
result[name.to_s] = value
end
if !options[:only].blank?
result['kind'] ||= { }
result['kind']['$in'] = options[:only].map(&:kind)
end
if !options[:except].blank?
result['kind'] ||= { }
result['kind']['$nin'] = options[:except].map(&:kind)
end
result
end
# (see Storage#find_activities)
#
# @api private
def find_activities(limit, options = { })
selector = options[:mongo_selector] || self.activities_selector(options)
sort_field = options[:mongo_sort_field] || 'at'
self.find(self.activity_collection, selector, limit, options[:skip], sort_field)
end
# (see Storage#count_activities)
#
# @api private
def count_activities(options = { })
selector = options[:mongo_selector] || self.activities_selector(options)
self.count(self.activity_collection, selector)
end
# (see Storage#delete_activities)
#
# @api private
def delete_activities(options = { })
selector = options[:mongo_selector] || self.activities_selector(options)
self.delete(self.activity_collection, selector)
end
# Add index for activities
#
# @api private
#
# @param index [String,Array<String>] Field or array of fields
# @param options [Hash] Options hash
# @option options (see Activr::Storage::MongoDriver#add_index)
# @return [String] Index created
def add_activity_index(index, options = { })
index = index.is_a?(Array) ? index : [ index ]
index_spec = index.map{ |field| [ field, 1 ] }
self.add_index(self.activity_collection, index_spec, options)
end
#
# Timeline entries
#
# Insert a timeline entry document
#
# @api private
#
# @param timeline_kind [String] Timeline kind
# @param timeline_entry_hash [Hash] Timeline entry document to insert
def insert_timeline_entry(timeline_kind, timeline_entry_hash)
self.insert(self.timeline_collection(timeline_kind), timeline_entry_hash)
end
# Find a timeline entry document
#
# @api private
#
# @param timeline_kind [String] Timeline kind
# @param tl_entry_id [BSON::ObjectId, Moped::BSON::ObjectId] Timeline entry document id
# @return [Hash, OrderedHash, Nil] Timeline entry document
def find_timeline_entry(timeline_kind, tl_entry_id)
self.find_one(self.timeline_collection(timeline_kind), { '_id' => tl_entry_id })
end
# Compute selector for querying a `*_timelines` collection
#
# @api private
#
# @param timeline_kind [String] Timeline kind
# @param recipient_id [String, BSON::ObjectId, Moped::BSON::ObjectId] Recipient id
# @param options (see Storage#find_timeline)
# @return [Hash] The computed selector
def timeline_selector(timeline_kind, recipient_id, options = { })
result = { }
# compute selector
result['rcpt'] = recipient_id unless recipient_id.nil?
if options[:before]
result['activity.at'] = { "$lt" => options[:before] }
end
(options[:entities] || { }).each do |name, value|
result["activity.#{name}"] = value
end
if !options[:only].blank?
result['$or'] = options[:only].map do |route|
{ 'routing' => route.routing_kind, 'activity.kind' => route.activity_class.kind }
end
end
result
end
# Find several timeline entry documents
#
# @api private
#
# @param timeline_kind [String] Timeline kind
# @param recipient_id [String, BSON::ObjectId, Moped::BSON::ObjectId] Recipient id
# @param limit [Integer] Max number of entries to find
# @param options (see Storage#find_timeline)
# @return [Array<Hash>] An array of timeline entry documents
def find_timeline_entries(timeline_kind, recipient_id, limit, options = { })
selector = options[:mongo_selector] || self.timeline_selector(timeline_kind, recipient_id, options)
sort_field = options[:mongo_sort_field] || 'activity.at'
self.find(self.timeline_collection(timeline_kind), selector, limit, options[:skip], sort_field)
end
# Count number of timeline entry documents
#
# @api private
#
# @param timeline_kind [String] Timeline kind
# @param recipient_id [String, BSON::ObjectId, Moped::BSON::ObjectId] Recipient id
# @param options (see Storage#count_timeline)
# @return [Integer] Number of documents in given timeline
def count_timeline_entries(timeline_kind, recipient_id, options = { })
selector = options[:mongo_selector] || self.timeline_selector(timeline_kind, recipient_id, options)
self.count(self.timeline_collection(timeline_kind), selector)
end
# Delete timeline entry documents
#
# @api private
#
# WARNING: If recipient_id is `nil` then documents are deleted for ALL recipients
#
# @param timeline_kind [String] Timeline kind
# @param recipient_id [String, BSON::ObjectId, Moped::BSON::ObjectId, nil] Recipient id
# @param options (see Storage#delete_timeline)
def delete_timeline_entries(timeline_kind, recipient_id, options = { })
selector = options[:mongo_selector] || self.timeline_selector(timeline_kind, recipient_id, options)
# "end of the world" check
raise "Deleting everything is not the solution" if selector.blank?
self.delete(self.timeline_collection(timeline_kind), selector)
end
# Add index for timeline entries
#
# @api private
#
# @param timeline_kind [String] Timeline kind
# @param index [String,Array<String>] Field or array of fields
# @param options [Hash] Options hash
# @option options (see Activr::Storage::MongoDriver#add_index)
# @return [String] Index created
def add_timeline_index(timeline_kind, index, options = { })
index = index.is_a?(Array) ? index : [ index ]
index_spec = index.map{ |field| [ field, 1 ] }
self.add_index(self.timeline_collection(timeline_kind), index_spec, options)
end
#
# Indexes
#
# (see Storage#create_indexes)
#
# @api private
def create_indexes
# Create indexes on 'activities' collection for models that includes Activr::Entity::ModelMixin
#
# eg: activities
# [['actor', Mongo::ASCENDING], ['at', Mongo::ASCENDING]]
# [['album', Mongo::ASCENDING], ['at', Mongo::ASCENDING]]
# [['picture', Mongo::ASCENDING], ['at', Mongo::ASCENDING]]
Activr.registry.models.each do |model_class|
if !model_class.activr_entity_settings[:feed_index]
# @todo Output a warning to remove the index if it exists
else
fields = [ model_class.activr_entity_feed_actual_name.to_s, 'at' ]
index_name = self.add_activity_index(fields)
yield("activity / #{index_name}") if block_given?
end
end
# Create indexes on '*_timelines' collections for defined timeline classes
#
# eg: user_news_feed_timelines
# [['rcpt', Mongo::ASCENDING], ['activity.at', Mongo::ASCENDING]]
Activr.registry.timelines.each do |timeline_kind, timeline_class|
fields = [ 'rcpt', 'activity.at' ]
index_name = self.add_timeline_index(timeline_kind, fields)
yield("#{timeline_kind} timeline / #{index_name}") if block_given?
end
# Create sparse indexes to remove activities and timeline entries when entity is deleted
#
# eg: activities
# [['actor', Mongo::ASCENDING]], :sparse => true
#
# eg: user_news_feed_timelines
# [['activity.actor', Mongo::ASCENDING]], :sparse => true
# [['activity.album', Mongo::ASCENDING]], :sparse => true
# [['activity.picture', Mongo::ASCENDING]], :sparse => true
Activr.registry.models.each do |model_class|
if model_class.activr_entity_settings[:deletable]
# create sparse index on `activities`
Activr.registry.activity_entities_for_model(model_class).each do |entity_name|
# if entity activity feed is enabled and this is the entity name used to fetch that feed then we can use the existing index...
if !model_class.activr_entity_settings[:feed_index] || (entity_name != model_class.activr_entity_feed_actual_name)
# ... else we create an index
index_name = self.add_activity_index(entity_name.to_s, :sparse => true)
yield("activity / #{index_name}") if block_given?
end
end
# create sparse index on timeline classes where that entity can be present
Activr.registry.timeline_entities_for_model(model_class).each do |timeline_class, entities|
entities.each do |entity_name|
index_name = self.add_timeline_index(timeline_class.kind, "activity.#{entity_name}", :sparse => true)
yield("#{timeline_class.kind} timeline / #{index_name}") if block_given?
end
end
end
end
end
end # class Storage::MongoDriver
|
require 'date'
require 'thread'
module AdminUI
class ScheduledThreadPool
def initialize(logger, number_threads)
@logger = logger
@queue = []
@mutex = Mutex.new
number_threads.times do
Thread.new do
loop do
entry = nil
now = Time.now
@mutex.synchronize do
first = @queue.first
entry = @queue.shift if first && first[:time] <= now
end
if entry
begin
entry[:block].call
rescue => error
@logger.debug("Error during #{ entry[:key] }: #{ error.inspect }")
@logger.debug(error.backtrace.join("\n"))
end
end
sleep 1
end
end
end
end
def schedule(key, time, &block)
return if key.nil? || time.nil? || block.nil?
entry = { :key => key, :time => time, :block => block }
@mutex.synchronize do
# Intentionally overwrite any existing entry for this key
@queue.reject! { |existing_entry| key == existing_entry[:key] }
@queue.push(entry)
@queue.sort! { |a, b| a[:time] <=> b[:time] }
end
end
end
end
Change scheduled_thread_pool to only replace existing key'd entry if the
new entry is sooner in time than the existing one
require 'date'
require 'thread'
module AdminUI
class ScheduledThreadPool
def initialize(logger, number_threads)
@logger = logger
@queue = []
@mutex = Mutex.new
number_threads.times do
Thread.new do
loop do
entry = nil
now = Time.now
@mutex.synchronize do
first = @queue.first
entry = @queue.shift if first && first[:time] <= now
end
if entry
begin
entry[:block].call
rescue => error
@logger.debug("Error during #{ entry[:key] }: #{ error.inspect }")
@logger.debug(error.backtrace.join("\n"))
end
end
sleep 1
end
end
end
end
def schedule(key, time, &block)
return if key.nil? || time.nil? || block.nil?
@mutex.synchronize do
index = @queue.index { |entry| key == entry[:key] }
if index
return if @queue.at(index)[:time] <= time
@queue.delete_at(index)
end
@queue.push(:key => key, :time => time, :block => block)
@queue.sort! { |a, b| a[:time] <=> b[:time] }
end
end
end
end
|
require './lib/airbrake/version'
Gem::Specification.new do |s|
s.name = 'airbrake'
s.version = Airbrake::AIRBRAKE_VERSION.dup
s.date = Time.now.strftime('%Y-%m-%d')
s.summary = <<SUMMARY
Airbrake is an online tool that provides robust exception tracking in any of
your Ruby applications.
SUMMARY
s.description = <<DESC
Airbrake is an online tool that provides robust exception tracking in any of
your Ruby applications. In doing so, it allows you to easily review errors, tie
an error to an individual piece of code, and trace the cause back to recent
changes. The Airbrake dashboard provides easy categorization, searching, and
prioritization of exceptions so that when errors occur, your team can quickly
determine the root cause.
Additionally, this gem includes integrations with such popular libraries and
frameworks as Rails, Sinatra, Resque, Sidekiq, Delayed Job, Shoryuken,
ActiveJob and many more.
DESC
s.author = 'Airbrake Technologies, Inc.'
s.email = 'support@airbrake.io'
s.homepage = 'https://airbrake.io'
s.license = 'MIT'
s.require_path = 'lib'
s.files = ['lib/airbrake.rb', *Dir.glob('lib/**/*')]
s.required_ruby_version = '>= 2.1'
s.add_dependency 'airbrake-ruby', '~> 4.5'
s.add_development_dependency 'rspec', '~> 3'
s.add_development_dependency 'rspec-wait', '~> 0'
s.add_development_dependency 'rake', '~> 12'
s.add_development_dependency 'pry', '~> 0'
s.add_development_dependency 'appraisal', '~> 2'
s.add_development_dependency 'rack', '~> 1'
s.add_development_dependency 'webmock', '~> 2'
s.add_development_dependency 'sneakers', '~> 2'
# We still support Ruby 2.1.0+, but sneakers 2 wants 2.2+.
s.add_development_dependency 'amq-protocol', '= 2.2.0'
s.add_development_dependency 'rack-test', '= 0.6.3'
s.add_development_dependency 'redis', '= 3.3.3'
# Fixes build failure with public_suffix v3
# https://circleci.com/gh/airbrake/airbrake-ruby/889
s.add_development_dependency 'public_suffix', '~> 2.0', '< 3.0'
# Newer versions don't support Ruby 2.2.0 and lower.
s.add_development_dependency 'nokogiri', '= 1.9.1'
# Parallel above v1.13.0 doesn't support Ruby v2.1 and lower (and we do).
s.add_development_dependency 'parallel', '= 1.13.0'
if Gem::Version.new(RUBY_VERSION) >= Gem::Version.new('2.2.2')
s.add_development_dependency 'sidekiq', '~> 5'
end
s.add_development_dependency 'curb', '~> 0.9' if RUBY_ENGINE == 'ruby'
s.add_development_dependency 'excon', '~> 0.64'
s.add_development_dependency 'http', '~> 2.2'
s.add_development_dependency 'httpclient', '~> 2.8'
s.add_development_dependency 'typhoeus', '~> 1.3'
end
gemspec: depend on airbrake-ruby '~> 4.6'
require './lib/airbrake/version'
Gem::Specification.new do |s|
s.name = 'airbrake'
s.version = Airbrake::AIRBRAKE_VERSION.dup
s.date = Time.now.strftime('%Y-%m-%d')
s.summary = <<SUMMARY
Airbrake is an online tool that provides robust exception tracking in any of
your Ruby applications.
SUMMARY
s.description = <<DESC
Airbrake is an online tool that provides robust exception tracking in any of
your Ruby applications. In doing so, it allows you to easily review errors, tie
an error to an individual piece of code, and trace the cause back to recent
changes. The Airbrake dashboard provides easy categorization, searching, and
prioritization of exceptions so that when errors occur, your team can quickly
determine the root cause.
Additionally, this gem includes integrations with such popular libraries and
frameworks as Rails, Sinatra, Resque, Sidekiq, Delayed Job, Shoryuken,
ActiveJob and many more.
DESC
s.author = 'Airbrake Technologies, Inc.'
s.email = 'support@airbrake.io'
s.homepage = 'https://airbrake.io'
s.license = 'MIT'
s.require_path = 'lib'
s.files = ['lib/airbrake.rb', *Dir.glob('lib/**/*')]
s.required_ruby_version = '>= 2.1'
s.add_dependency 'airbrake-ruby', '~> 4.6'
s.add_development_dependency 'rspec', '~> 3'
s.add_development_dependency 'rspec-wait', '~> 0'
s.add_development_dependency 'rake', '~> 12'
s.add_development_dependency 'pry', '~> 0'
s.add_development_dependency 'appraisal', '~> 2'
s.add_development_dependency 'rack', '~> 1'
s.add_development_dependency 'webmock', '~> 2'
s.add_development_dependency 'sneakers', '~> 2'
# We still support Ruby 2.1.0+, but sneakers 2 wants 2.2+.
s.add_development_dependency 'amq-protocol', '= 2.2.0'
s.add_development_dependency 'rack-test', '= 0.6.3'
s.add_development_dependency 'redis', '= 3.3.3'
# Fixes build failure with public_suffix v3
# https://circleci.com/gh/airbrake/airbrake-ruby/889
s.add_development_dependency 'public_suffix', '~> 2.0', '< 3.0'
# Newer versions don't support Ruby 2.2.0 and lower.
s.add_development_dependency 'nokogiri', '= 1.9.1'
# Parallel above v1.13.0 doesn't support Ruby v2.1 and lower (and we do).
s.add_development_dependency 'parallel', '= 1.13.0'
if Gem::Version.new(RUBY_VERSION) >= Gem::Version.new('2.2.2')
s.add_development_dependency 'sidekiq', '~> 5'
end
s.add_development_dependency 'curb', '~> 0.9' if RUBY_ENGINE == 'ruby'
s.add_development_dependency 'excon', '~> 0.64'
s.add_development_dependency 'http', '~> 2.2'
s.add_development_dependency 'httpclient', '~> 2.8'
s.add_development_dependency 'typhoeus', '~> 1.3'
end
|
#! /usr/bin/env ruby -I.
require 'gosu_enhanced'
require 'pp'
require 'resources'
require 'puzloader'
require 'grid'
require 'drawer'
module Crossword
# Crossword!
class Game < Gosu::Window
include Constants
attr_reader :width, :height, :font, :grid
KEY_FUNCS = {
Gosu::KbEscape => -> { close },
Gosu::KbSpace => -> { @position = @current.gpos },
Gosu::KbTab => -> { next_clue },
Gosu::KbDown => -> { @position = @grid.cell_down( @current.gpos ) },
Gosu::KbUp => -> { @position = @grid.cell_up( @current.gpos ) },
Gosu::KbLeft => -> { @position = @grid.cell_left( @current.gpos ) },
Gosu::KbRight => -> { @position = @grid.cell_right( @current.gpos ) },
Gosu::MsLeft => -> { @position = GridPoint.from_xy( mouse_x, mouse_y ) }
}
def initialize( grid, title )
@grid = grid
@width = BASE_WIDTH + grid.width * CELL_SIZE.width
@height = BASE_HEIGHT + grid.height * CELL_SIZE.height
super( @width, @height, false, 100 )
self.caption = "Ankh #{title}"
@down_left = @width - (MARGIN * 2 + CLUE_COLUMN_WIDTH)
@across_left = @down_left - (MARGIN * 2 + CLUE_COLUMN_WIDTH)
@font = ResourceLoader.fonts( self )
@drawer = Drawer.new( self )
initial_highlight
end
def needs_cursor?
true
end
def update
update_cell unless @char.nil?
update_current unless @position.nil?
highlight_word
highlight_current
end
def draw
@drawer.background
@drawer.grid
draw_clues
end
def button_down( btn_id )
instance_exec( &KEY_FUNCS[btn_id] ) if KEY_FUNCS.key? btn_id
char = button_id_to_char( btn_id )
@char = char.upcase unless char.nil? || !char.between?( 'a', 'z' )
@char = '' if btn_id == Gosu::KbBackspace
end
private
def initial_highlight
number = @grid.first_clue( :across )
cur_word = @grid.word_cells( number, :across )
@current = CurrentState.new( cur_word[0], number, :across )
end
def highlight_word
cells = @grid.word_cells( @current.number, @current.dir )
cells.each { |gpoint| @grid.cell_at( gpoint ).highlight = :word }
end
def highlight_current
@grid.cell_at( @current.gpos ).highlight = :current
end
def unhighlight
cells = @grid.word_cells( @current.number, @current.dir )
cells.each { |gpoint| @grid.cell_at( gpoint ).highlight = :none }
@grid.cell_at( @current.gpos ).highlight = :none
end
def update_cell
unhighlight
if @char.empty?
cell_empty = @grid.cell_at( @current.gpos ).user.empty?
@grid.prev_word_cell( @current ) if cell_empty
@grid.cell_at( @current.gpos ).user = ''
@grid.prev_word_cell( @current ) unless cell_empty
else
@grid.cell_at( @current.gpos ).user = @char
@grid.next_word_cell( @current )
end
@char = nil
end
def update_current
unhighlight
new_num = @grid.word_num_from_pos( @position, @current.dir )
unless new_num == 0
if @position == @current.gpos # Click on current == swap direction
@current.swap_direction
new_num = @grid.word_num_from_pos( @position, @current.dir )
end
@current.gpos, @current.number = @position, new_num
end
@position = nil
end
def next_clue
unhighlight
number = @grid.next_clue( @current.number, @current.dir )
@current.new_word( number, @grid.cell_number( number, @current.dir ) )
end
def draw_clues
across_point = Point.new( @across_left, MARGIN * 2 )
down_point = Point.new( @down_left, MARGIN * 2 )
draw_clue_header( across_point, 'Across' )
draw_clue_header( down_point, 'Down' )
draw_clue_list_with_current( across_point, @grid.across_clues,
@current.dir == :across )
draw_clue_list_with_current( down_point, @grid.down_clues,
@current.dir == :down )
end
def draw_clue_header( pos, header )
@font[:header].draw( header, pos.x, pos.y, 1, 1, 1, WHITE )
pos.move_by!( 0, @font[:header].height )
end
# Render the clue list off screen first if it's the list with the current clue,
# then redraw it where asked, potentially not from the start if the current
# clue wouldn't be displayed
def draw_clue_list_with_current( pos, list, current_list )
if current_list
off_screen = pos.offset( width, 0 )
skip = draw_clue_list( off_screen, list, current_list )
else
skip = 0
end
draw_clue_list( pos, list[skip..-1], current_list )
end
# Draw the list of clues, ensuring that the current one is on screen
def draw_clue_list( pos, list, current_list )
found = !current_list # Not current, found OK. Current, still looking
shown = 0
list.each do |clue|
is_current = current_list && @current.number == clue.number
found ||= is_current
lh = clue.draw( self, pos, CLUE_COLUMN_WIDTH, is_current )
shown += 1
break if pos.y >= height - (MARGIN + lh)
end
current_list && !found ? (list.size - shown) : 0
end
end
# Hold the current state: The cell position, and word number and direction
# that it's a part of.
class CurrentState < Struct.new( :gpos, :number, :dir )
def swap_direction
self.dir = dir == :across ? :down : :across
end
def new_word( clue_number, pos )
self.number, self.gpos = clue_number, pos
end
end
end
filename = ARGV[0] || '2014-4-22-LosAngelesTimes.puz'
puz = PuzzleLoader.new( filename )
puts "Size: #{puz.width} x #{puz.height}"
puts "Clues: #{puz.num_clues}"
puts 'Scrambled!' if puz.scrambled?
puts %(
Title: #{puz.title}
Author: #{puz.author}
Copyright: #{puz.copyright}
)
cgrid = Crossword::Grid.new( puz.rows, puz.clues )
Crossword::Game.new( cgrid, "#{puz.title} - #{puz.author}" ).show
The current in the clue list kept off the bottom.
Extensively refactored.
#! /usr/bin/env ruby -I.
require 'gosu_enhanced'
require 'pp'
require 'resources'
require 'puzloader'
require 'grid'
require 'drawer'
module Crossword
# Crossword!
class Game < Gosu::Window
include Constants
attr_reader :width, :height, :font, :grid
KEY_FUNCS = {
Gosu::KbEscape => -> { close },
Gosu::KbSpace => -> { @position = @current.gpos },
Gosu::KbTab => -> { next_clue },
Gosu::KbDown => -> { @position = @grid.cell_down( @current.gpos ) },
Gosu::KbUp => -> { @position = @grid.cell_up( @current.gpos ) },
Gosu::KbLeft => -> { @position = @grid.cell_left( @current.gpos ) },
Gosu::KbRight => -> { @position = @grid.cell_right( @current.gpos ) },
Gosu::MsLeft => -> { @position = GridPoint.from_xy( mouse_x, mouse_y ) }
}
def initialize( grid, title )
@grid = grid
@width = BASE_WIDTH + grid.width * CELL_SIZE.width
@height = BASE_HEIGHT + grid.height * CELL_SIZE.height
super( @width, @height, false, 100 )
self.caption = "Ankh #{title}"
@down_left = @width - (MARGIN * 2 + CLUE_COLUMN_WIDTH)
@across_left = @down_left - (MARGIN * 2 + CLUE_COLUMN_WIDTH)
@font = ResourceLoader.fonts( self )
@drawer = Drawer.new( self )
initial_highlight
end
def needs_cursor?
true
end
def update
update_cell unless @char.nil?
update_current unless @position.nil?
highlight_word
highlight_current
end
def draw
@drawer.background
@drawer.grid
draw_clues
end
def button_down( btn_id )
instance_exec( &KEY_FUNCS[btn_id] ) if KEY_FUNCS.key? btn_id
char = button_id_to_char( btn_id )
@char = char.upcase unless char.nil? || !char.between?( 'a', 'z' )
@char = '' if btn_id == Gosu::KbBackspace
end
private
def initial_highlight
number = @grid.first_clue( :across )
cur_word = @grid.word_cells( number, :across )
@current = CurrentState.new( cur_word[0], number, :across )
end
def highlight_word
cells = @grid.word_cells( @current.number, @current.dir )
cells.each { |gpoint| @grid.cell_at( gpoint ).highlight = :word }
end
def highlight_current
@grid.cell_at( @current.gpos ).highlight = :current
end
def unhighlight
cells = @grid.word_cells( @current.number, @current.dir )
cells.each { |gpoint| @grid.cell_at( gpoint ).highlight = :none }
@grid.cell_at( @current.gpos ).highlight = :none
end
def update_cell
unhighlight
if @char.empty?
empty_cell
else
@grid.cell_at( @current.gpos ).user = @char
@grid.next_word_cell( @current )
end
@char = nil
end
def empty_cell
cell_empty = @grid.cell_at( @current.gpos ).user.empty?
@grid.prev_word_cell( @current ) if cell_empty
@grid.cell_at( @current.gpos ).user = ''
@grid.prev_word_cell( @current ) unless cell_empty
end
def update_current
unhighlight
return current_from_clue if @position.out_of_range?( @grid )
current_from_cell
@position = nil
end
def current_from_clue
mouse_pos = Point.new( mouse_x, mouse_y )
@grid.all_clues.each do |clue|
next if clue.region.nil?
if clue.region.contains?( mouse_pos )
@current = CurrentState.new(
@grid.cell_number( clue.number, clue.direction ),
clue.number, clue.direction )
break
end
end
@position = nil
end
def current_from_cell
new_num = @grid.word_num_from_pos( @position, @current.dir )
unless new_num == 0 # Blank square, most likely
if @position == @current.gpos # Click on current == swap direction
@current.swap_direction
new_num = @grid.word_num_from_pos( @position, @current.dir )
end
@current.gpos, @current.number = @position, new_num
end
end
def next_clue
unhighlight
number = @grid.next_clue( @current.number, @current.dir )
@current.new_word( number, @grid.cell_number( number, @current.dir ) )
end
def draw_clues
across_point = Point.new( @across_left, MARGIN * 2 )
down_point = Point.new( @down_left, MARGIN * 2 )
draw_clue_header( across_point, 'Across' )
draw_clue_header( down_point, 'Down' )
draw_clue_list_with_current( across_point, @grid.across_clues,
@current.dir == :across )
draw_clue_list_with_current( down_point, @grid.down_clues,
@current.dir == :down )
end
def draw_clue_header( pos, header )
@font[:header].draw( header, pos.x, pos.y, 1, 1, 1, WHITE )
pos.move_by!( 0, @font[:header].height )
end
# Render the clue list off screen first if it's the list with the current clue,
# then redraw it where asked, potentially not from the start if the current
# clue wouldn't be displayed
def draw_clue_list_with_current( pos, list, current_list )
skip = 0
if current_list
off_screen = pos.offset( width, 0 )
skip = draw_clue_list( off_screen, list, current_list )
end
draw_clue_list( pos, list[skip..-1], current_list )
end
# Draw the list of clues, ensuring that the current one is on screen
def draw_clue_list( pos, list, current_list )
found = -1
shown = 0
list.each_with_index do |clue, idx|
is_current = current_list && @current.number == clue.number
found = idx if is_current
lh = clue.draw( self, pos, CLUE_COLUMN_WIDTH, is_current )
shown += 1
break if pos.y >= height - (MARGIN + lh)
end
# If it's not the current list, we just show the beginning
return 0 unless current_list
# If it's not there, show the end
return list.size - shown if found == -1
# If we're nearing the bottom, move it up a bit
return ((list.size - shown) / 2).floor if (shown - found) < 4
# Otherwise, everything's hunky-dory
0
end
end
# Hold the current state: The cell position, and word number and direction
# that it's a part of.
class CurrentState < Struct.new( :gpos, :number, :dir )
def swap_direction
self.dir = dir == :across ? :down : :across
end
def new_word( clue_number, pos )
self.number, self.gpos = clue_number, pos
end
end
end
filename = ARGV[0] || '2014-4-22-LosAngelesTimes.puz'
puz = PuzzleLoader.new( filename )
puts "Size: #{puz.width} x #{puz.height}"
puts "Clues: #{puz.num_clues}"
puts 'Scrambled!' if puz.scrambled?
puts %(
Title: #{puz.title}
Author: #{puz.author}
Copyright: #{puz.copyright}
)
cgrid = Crossword::Grid.new( puz.rows, puz.clues )
Crossword::Game.new( cgrid, "#{puz.title} - #{puz.author}" ).show
|
# frozen_string_literal: true
#
# Copyright (C) 2011 - present Instructure, Inc.
#
# This file is part of Canvas.
#
# Canvas is free software: you can redistribute it and/or modify it under
# the terms of the GNU Affero General Public License as published by the Free
# Software Foundation, version 3 of the License.
#
# Canvas is distributed in the hope that it will be useful, but WITHOUT ANY
# WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR
# A PARTICULAR PURPOSE. See the GNU Affero General Public License for more
# details.
#
# You should have received a copy of the GNU Affero General Public License along
# with this program. If not, see <http://www.gnu.org/licenses/>.
require 'active_support/callbacks/suspension'
class ActiveRecord::Base
self.cache_timestamp_format = :usec
public :write_attribute
class << self
delegate :distinct_on, :find_ids_in_batches, :explain, to: :all
def find_ids_in_ranges(opts={}, &block)
opts.reverse_merge!(:loose => true)
all.find_ids_in_ranges(opts, &block)
end
attr_accessor :in_migration
# determines if someone started a transaction in addition to the spec fixture transaction
# impossible to just count open transactions, cause by default it won't nest a transaction
# unless specifically requested
def in_transaction_in_test?
return false unless Rails.env.test?
stacktrace = caller
transaction_index, wrap_index, after_index = [
ActiveRecord::ConnectionAdapters::DatabaseStatements.instance_method(:transaction),
defined?(SpecTransactionWrapper) && SpecTransactionWrapper.method(:wrap_block_in_transaction),
AfterTransactionCommit::Transaction.instance_method(:commit_records)
].map do |method|
if method
regex = /\A#{Regexp.escape(method.source_location.first)}:\d+:in `#{Regexp.escape(method.name)}'\z/.freeze
stacktrace.index{|s| s =~ regex}
end
end
if transaction_index
# we wrap a transaction around controller actions, so try to see if this call came from that
if wrap_index && (transaction_index..wrap_index).all?{|i| stacktrace[i].match?(/transaction|synchronize/)}
false
else
# check if this is being run through an after_transaction_commit since the last transaction
!(after_index && after_index < transaction_index)
end
else
false
end
end
def default_scope(*)
raise "please don't ever use default_scope. it may seem like a great solution, but I promise, it isn't"
end
def vacuum
GuardRail.activate(:deploy) do
connection.vacuum(table_name, analyze: true)
end
end
end
def read_or_initialize_attribute(attr_name, default_value)
# have to read the attribute again because serialized attributes in Rails 4.2 get duped
read_attribute(attr_name) || (write_attribute(attr_name, default_value) && read_attribute(attr_name))
end
alias :clone :dup
def serializable_hash(options = nil)
result = super
if result.present?
result = result.with_indifferent_access
user_content_fields = options[:user_content] || []
result.keys.each do |name|
if user_content_fields.include?(name.to_s)
result[name] = UserContent.escape(result[name])
end
end
end
if options && options[:include_root]
result = {self.class.base_class.model_name.element => result}
end
result
end
# See ActiveModel#serializable_add_includes
def serializable_add_includes(options = {}, &block)
super(options) do |association, records, opts|
yield association, records, opts.reverse_merge(:include_root => options[:include_root])
end
end
def feed_code
id = self.uuid rescue self.id
"#{self.class.reflection_type_name}_#{id}"
end
def self.all_models
return @all_models if @all_models.present?
@all_models = (ActiveRecord::Base.models_from_files +
[Version]).compact.uniq.reject { |model|
(model < Tableless) ||
model.abstract_class?
}
end
def self.models_from_files
@from_files ||= begin
Dir[
"#{Rails.root}/app/models/**/*.rb",
"#{Rails.root}/vendor/plugins/*/app/models/**/*.rb",
"#{Rails.root}/gems/plugins/*/app/models/**/*.rb",
].sort.each do |file|
next if const_defined?(file.sub(%r{.*/app/models/(.*)\.rb$}, '\1').camelize)
ActiveSupport::Dependencies.require_or_load(file)
end
ActiveRecord::Base.descendants
end
end
def self.maximum_text_length
@maximum_text_length ||= 64.kilobytes-1
end
def self.maximum_long_text_length
@maximum_long_text_length ||= 500.kilobytes-1
end
def self.maximum_string_length
255
end
def self.find_by_asset_string(string, asset_types=nil)
find_all_by_asset_string([string], asset_types)[0]
end
def self.find_all_by_asset_string(strings, asset_types=nil)
# TODO: start checking asset_types, if provided
strings.map{ |str| parse_asset_string(str) }.group_by(&:first).inject([]) do |result, (klass, id_pairs)|
next result if asset_types && !asset_types.include?(klass)
result.concat((klass.constantize.where(id: id_pairs.map(&:last)).to_a rescue []))
end
end
# takes an asset string list, like "course_5,user_7" and turns it into an
# array of [class_name, id] like [ ["Course", 5], ["User", 7] ]
def self.parse_asset_string_list(asset_string_list)
asset_string_list.to_s.split(",").map { |str| parse_asset_string(str) }
end
def self.parse_asset_string(str)
code = asset_string_components(str)
[convert_class_name(code.first), code.last.try(:to_i)]
end
def self.asset_string_components(str)
components = str.split('_', -1)
id = components.pop
[components.join('_'), id.presence]
end
def self.convert_class_name(str)
namespaces = str.split(':')
class_name = namespaces.pop
(namespaces.map(&:camelize) + [class_name.try(:classify)]).join('::')
end
def self.asset_string(id)
"#{self.reflection_type_name}_#{id}"
end
def asset_string
@asset_string ||= {}
@asset_string[Shard.current] ||= self.class.asset_string(id)
end
def global_asset_string
@global_asset_string ||= "#{self.class.reflection_type_name}_#{global_id}"
end
# little helper to keep checks concise and avoid a db lookup
def has_asset?(asset, field = :context)
asset&.id == send("#{field}_id") && asset.class.base_class.name == send("#{field}_type")
end
def context_string(field = :context)
send("#{field}_type").underscore + "_" + send("#{field}_id").to_s if send("#{field}_type")
end
def self.asset_string_backcompat_module
@asset_string_backcompat_module ||= Module.new.tap { |m| prepend(m) }
end
def self.define_asset_string_backcompat_method(string_version_name, association_version_name = string_version_name, method = nil)
# just chain to the two methods
unless method
# this is weird, but gets the instance methods defined so they can be chained
begin
self.new.send("#{association_version_name}_id")
rescue
# the db doesn't exist yet; no need to bother with backcompat methods anyway
return
end
define_asset_string_backcompat_method(string_version_name, association_version_name, 'id')
define_asset_string_backcompat_method(string_version_name, association_version_name, 'type')
return
end
asset_string_backcompat_module.class_eval <<-CODE, __FILE__, __LINE__ + 1
def #{association_version_name}_#{method}
res = super
if !res && #{string_version_name}.present?
type, id = ActiveRecord::Base.parse_asset_string(#{string_version_name})
write_attribute(:#{association_version_name}_type, type)
write_attribute(:#{association_version_name}_id, id)
res = super
end
res
end
CODE
end
def export_columns(format = nil)
self.class.content_columns.map(&:name)
end
def to_row(format = nil)
export_columns(format).map { |c| self.send(c) }
end
def is_a_context?
false
end
def cached_context_short_name
if self.respond_to?(:context)
code = self.respond_to?(:context_code) ? self.context_code : self.context.asset_string
@cached_context_name ||= Rails.cache.fetch(['short_name_lookup', code].cache_key) do
self.context.short_name rescue ""
end
else
raise "Can only call cached_context_short_name on items with a context"
end
end
def self.skip_touch_context(skip=true)
@@skip_touch_context = skip
end
def save_without_touching_context
@skip_touch_context = true
self.save
@skip_touch_context = false
end
def touch_context
return if (@@skip_touch_context ||= false || @skip_touch_context ||= false)
if self.respond_to?(:context_type) && self.respond_to?(:context_id) && self.context_type && self.context_id
self.class.connection.after_transaction_commit do
self.context_type.constantize.where(id: self.context_id).update_all(updated_at: Time.now.utc)
end
end
rescue
Canvas::Errors.capture_exception(:touch_context, $ERROR_INFO)
end
def touch_user
if self.respond_to?(:user_id) && self.user_id
User.connection.after_transaction_commit do
User.where(:id => self.user_id).update_all(:updated_at => Time.now.utc)
end
end
true
rescue
Canvas::Errors.capture_exception(:touch_user, $ERROR_INFO)
false
end
def context_url_prefix
"#{self.context_type.downcase.pluralize}/#{self.context_id}"
end
# Example:
# obj.to_json(:permissions => {:user => u, :policies => [:read, :write, :update]})
def as_json(options = nil)
options = options.try(:dup) || {}
self.set_serialization_options if self.respond_to?(:set_serialization_options)
except = options.delete(:except) || []
except = Array(except).dup
except.concat(self.class.serialization_excludes) if self.class.respond_to?(:serialization_excludes)
except.concat(self.serialization_excludes) if self.respond_to?(:serialization_excludes)
except.uniq!
methods = options.delete(:methods) || []
methods = Array(methods).dup
methods.concat(self.class.serialization_methods) if self.class.respond_to?(:serialization_methods)
methods.concat(self.serialization_methods) if self.respond_to?(:serialization_methods)
methods.uniq!
options[:except] = except unless except.empty?
options[:methods] = methods unless methods.empty?
# We include a root in all the association json objects (if it's a
# collection), which is different than the rails behavior of just including
# the root in the base json object. Hence the hackies.
#
# We are in the process of migrating away from including the root in all our
# json serializations at all. Once that's done, we can remove this and the
# monkey patch to Serialzer, below.
# ^hahahahahahaha
unless options.key?(:include_root)
options[:include_root] = true
end
hash = serializable_hash(options)
if options[:permissions]
obj_hash = options[:include_root] ? hash[self.class.base_class.model_name.element] : hash
if self.respond_to?(:filter_attributes_for_user)
self.filter_attributes_for_user(obj_hash, options[:permissions][:user], options[:permissions][:session])
end
unless options[:permissions][:include_permissions] == false
permissions_hash = self.rights_status(options[:permissions][:user], options[:permissions][:session], *options[:permissions][:policies])
if self.respond_to?(:serialize_permissions)
permissions_hash = self.serialize_permissions(permissions_hash, options[:permissions][:user], options[:permissions][:session])
end
obj_hash["permissions"] = permissions_hash
end
end
self.revert_from_serialization_options if self.respond_to?(:revert_from_serialization_options)
hash.with_indifferent_access
end
def class_name
self.class.to_s
end
def sanitize_sql(*args)
self.class.send :sanitize_sql_for_conditions, *args
end
def self.reflection_type_name
base_class.name.underscore
end
def wildcard(*args)
self.class.wildcard(*args)
end
def self.wildcard(*args)
options = args.last.is_a?(Hash) ? args.pop : {}
options[:type] ||= :full
value = args.pop
if options[:delimiter]
options[:type] = :full
value = options[:delimiter] + value + options[:delimiter]
delimiter = connection.quote(options[:delimiter])
column_str = "#{delimiter} || %s || #{delimiter}"
args = args.map{ |a| column_str % a.to_s }
end
value = wildcard_pattern(value, options)
cols = args.map{ |col| like_condition(col, '?', !options[:case_sensitive]) }
sanitize_sql_array ["(" + cols.join(" OR ") + ")", *([value] * cols.size)]
end
def self.wildcard_pattern(value, options = {})
value = value.to_s
value = value.downcase unless options[:case_sensitive]
value = value.gsub('\\', '\\\\\\\\').gsub('%', '\\%').gsub('_', '\\_')
value = '%' + value unless options[:type] == :right
value += '%' unless options[:type] == :left
value
end
def self.coalesced_wildcard(*args)
value = args.pop
value = wildcard_pattern(value)
cols = coalesce_chain(args)
sanitize_sql_array ["(#{like_condition(cols, '?', false)})", value]
end
def self.coalesce_chain(cols)
"(#{cols.map{|col| coalesce_clause(col)}.join(" || ' ' || ")})"
end
def self.coalesce_clause(column)
"COALESCE(LOWER(#{column}), '')"
end
def self.like_condition(value, pattern = '?', downcase = true)
value = "LOWER(#{value})" if downcase
"#{value} LIKE #{pattern}"
end
def self.best_unicode_collation_key(col)
val = if ActiveRecord::Base.configurations[Rails.env]['adapter'] == 'postgresql'
# For PostgreSQL, we can't trust a simple LOWER(column), with any collation, since
# Postgres just defers to the C library which is different for each platform. The best
# choice is the collkey function from pg_collkey which uses ICU to get a full unicode sort.
# If that extension isn't around, casting to a bytea sucks for international characters,
# but at least it's consistent, and orders commas before letters so you don't end up with
# Johnson, Bob sorting before Johns, Jimmy
unless @collkey&.key?(Shard.current.database_server.id)
@collkey ||= {}
@collkey[Shard.current.database_server.id] = connection.extension(:pg_collkey)&.schema
end
if (collation = Canvas::ICU.choose_pg12_collation(connection.icu_collations) && false)
"(#{col} COLLATE #{collation})"
elsif (schema = @collkey[Shard.current.database_server.id])
# The collation level of 3 is the default, but is explicitly specified here and means that
# case, accents and base characters are all taken into account when creating a collation key
# for a string - more at https://pgxn.org/dist/pg_collkey/0.5.1/
# if you change these arguments, you need to rebuild all db indexes that use them,
# and you should also match the settings with Canvas::ICU::Collator and natcompare.js
"#{schema}.collkey(#{col}, '#{Canvas::ICU.locale_for_collation}', false, 3, true)"
else
"CAST(LOWER(replace(#{col}, '\\', '\\\\')) AS bytea)"
end
else
col
end
Arel.sql(val)
end
def self.count_by_date(options = {})
column = options[:column] || "created_at"
max_date = (options[:max_date] || Time.zone.now).midnight
num_days = options[:num_days] || 20
min_date = (options[:min_date] || max_date.advance(:days => -(num_days-1))).midnight
offset = max_date.utc_offset
expression = "((#{column} || '-00')::TIMESTAMPTZ AT TIME ZONE '#{Time.zone.tzinfo.name}')::DATE"
result = where(
"#{column} >= ? AND #{column} < ?",
min_date,
max_date.advance(:days => 1)
).
group(expression).
order(Arel.sql(expression)).
count
return result if result.keys.first.is_a?(Date)
Hash[result.map { |date, count|
[Time.zone.parse(date).to_date, count]
}]
end
def self.rank_sql(ary, col)
sql = ary.each_with_index.inject(+'CASE '){ |string, (values, i)|
string << "WHEN #{col} IN (" << Array(values).map{ |value| connection.quote(value) }.join(', ') << ") THEN #{i} "
} << "ELSE #{ary.size} END"
Arel.sql(sql)
end
def self.rank_hash(ary)
ary.each_with_index.inject(Hash.new(ary.size + 1)){ |hash, (values, i)|
Array(values).each{ |value| hash[value] = i + 1 }
hash
}
end
def self.distinct_values(column, include_nil: false)
column = column.to_s
result = if ActiveRecord::Base.configurations[Rails.env]['adapter'] == 'postgresql'
sql = +''
sql << "SELECT NULL AS #{column} WHERE EXISTS (SELECT * FROM #{quoted_table_name} WHERE #{column} IS NULL) UNION ALL (" if include_nil
sql << <<~SQL
WITH RECURSIVE t AS (
SELECT MIN(#{column}) AS #{column} FROM #{quoted_table_name}
UNION ALL
SELECT (SELECT MIN(#{column}) FROM #{quoted_table_name} WHERE #{column} > t.#{column})
FROM t
WHERE t.#{column} IS NOT NULL
)
SELECT #{column} FROM t WHERE #{column} IS NOT NULL
SQL
sql << ")" if include_nil
find_by_sql(sql)
else
conditions = "#{column} IS NOT NULL" unless include_nil
find(:all, :select => "DISTINCT #{column}", :conditions => conditions, :order => column)
end
result.map(&column.to_sym)
end
# direction is nil, :asc, or :desc
def self.nulls(first_or_last, column, direction = nil)
if connection.adapter_name == 'PostgreSQL'
clause = if first_or_last == :first && direction != :desc
" NULLS FIRST"
elsif first_or_last == :last && direction == :desc
" NULLS LAST"
end
Arel.sql("#{column} #{direction.to_s.upcase}#{clause}".strip)
else
Arel.sql("#{column} IS#{" NOT" unless first_or_last == :last} NULL, #{column} #{direction.to_s.upcase}".strip)
end
end
# set up class-specific getters/setters for a polymorphic association, e.g.
# belongs_to :context, polymorphic: [:course, :account]
def self.belongs_to(name, scope = nil, **options)
if options[:polymorphic] == true
raise "Please pass an array of valid types for polymorphic associations. Use exhaustive: false if you really don't want to validate them"
end
polymorphic_prefix = options.delete(:polymorphic_prefix)
exhaustive = options.delete(:exhaustive)
reflection = super[name.to_s]
if name.to_s == 'developer_key'
reflection.instance_eval do
def association_class
DeveloperKey::CacheOnAssociation
end
end
end
include Canvas::RootAccountCacher if name.to_s == 'root_account'
Canvas::AccountCacher.apply_to_reflections(self)
if reflection.options[:polymorphic].is_a?(Array) ||
reflection.options[:polymorphic].is_a?(Hash)
reflection.options[:exhaustive] = exhaustive
reflection.options[:polymorphic_prefix] = polymorphic_prefix
add_polymorph_methods(reflection)
end
reflection
end
def self.canonicalize_polymorph_list(list)
specifics = []
Array.wrap(list).each do |name|
if name.is_a?(Hash)
specifics.concat(name.to_a)
else
specifics << [name, name.to_s.camelize]
end
end
specifics
end
def self.add_polymorph_methods(reflection)
unless @polymorph_module
@polymorph_module = Module.new
include(@polymorph_module)
end
specifics = canonicalize_polymorph_list(reflection.options[:polymorphic])
unless reflection.options[:exhaustive] == false
specific_classes = specifics.map(&:last).sort
validates reflection.foreign_type, inclusion: { in: specific_classes }, allow_nil: true
@polymorph_module.class_eval <<-RUBY, __FILE__, __LINE__ + 1
def #{reflection.name}=(record)
if record && [#{specific_classes.join(', ')}].none? { |klass| record.is_a?(klass) }
message = "one of #{specific_classes.join(', ')} expected, got \#{record.class}"
raise ActiveRecord::AssociationTypeMismatch, message
end
super
end
RUBY
end
if reflection.options[:polymorphic_prefix] == true
prefix = "#{reflection.name}_"
elsif reflection.options[:polymorphic_prefix]
prefix = "#{reflection.options[:polymorphic_prefix]}_"
end
specifics.each do |(name, class_name)|
# ensure we capture this class's table name
table_name = self.table_name
belongs_to :"#{prefix}#{name}", -> { where(table_name => { reflection.foreign_type => class_name }) },
foreign_key: reflection.foreign_key,
class_name: class_name
correct_type = "#{reflection.foreign_type} && self.class.send(:compute_type, #{reflection.foreign_type}) <= #{class_name}"
@polymorph_module.class_eval <<-RUBY, __FILE__, __LINE__ + 1
def #{prefix}#{name}
#{reflection.name} if #{correct_type}
end
def #{prefix}#{name}=(record)
# we don't want to unset it if it's currently some other type, i.e.
# foo.bar = Bar.new
# foo.baz = nil
# foo.bar.should_not be_nil
return if record.nil? && !(#{correct_type})
association(:#{prefix}#{name}).send(:raise_on_type_mismatch!, record) if record
self.#{reflection.name} = record
end
RUBY
end
end
def self.unique_constraint_retry(retries = 1)
# runs the block in a (possibly nested) transaction. if a unique constraint
# violation occurs, it will run it "retries" more times. the nested
# transaction (savepoint) ensures we don't mess up things for the outer
# transaction. useful for possible race conditions where we don't want to
# take a lock (e.g. when we create a submission).
retries.times do |retry_count|
begin
result = transaction(:requires_new => true) { uncached { yield(retry_count) } }
connection.clear_query_cache
return result
rescue ActiveRecord::RecordNotUnique
end
end
GuardRail.activate(:primary) do
result = transaction(:requires_new => true) { uncached { yield(retries) } }
connection.clear_query_cache
result
end
end
def self.current_xlog_location
Shard.current(shard_category).database_server.unguard do
GuardRail.activate(:primary) do
if Rails.env.test? ? self.in_transaction_in_test? : connection.open_transactions > 0
raise "don't run current_xlog_location in a transaction"
else
connection.current_wal_lsn
end
end
end
end
def self.wait_for_replication(start: nil, timeout: nil)
return true unless GuardRail.activate(:secondary) { connection.readonly? }
start ||= current_xlog_location
GuardRail.activate(:secondary) do
# positive == first value greater, negative == second value greater
start_time = Time.now.utc
while connection.wal_lsn_diff(start, :last_replay) >= 0
return false if timeout && Time.now.utc > start_time + timeout
sleep 0.1
end
end
true
end
def self.bulk_insert_objects(objects, excluded_columns: ['primary_key'])
return if objects.empty?
hashed_objects = []
excluded_columns << objects.first.class.primary_key if excluded_columns.delete('primary_key')
objects.each do |object|
hashed_objects << object.attributes.except(excluded_columns.join(',')).map do |(name, value)|
if (type = object.class.attribute_types[name]).is_a?(ActiveRecord::Type::Serialized)
value = type.serialize(value)
end
[name, value]
end.to_h
end
objects.first.class.bulk_insert(hashed_objects)
end
def self.bulk_insert(records)
return if records.empty?
array_columns = records.first.select{|k, v| v.is_a?(Array)}.map(&:first)
array_columns.each do |column_name|
cast_type = connection.send(:lookup_cast_type_from_column, self.columns_hash[column_name.to_s])
records.each do |row|
row[column_name] = cast_type.serialize(row[column_name])
end
end
if self.respond_to?(:attrs_in_partition_groups)
# this model is partitioned, we need to send a separate
# insert statement for each partition represented
# in the input records
self.attrs_in_partition_groups(records) do |partition_name, partition_records|
transaction do
connection.bulk_insert(partition_name, partition_records)
end
end
else
transaction do
connection.bulk_insert(table_name, records)
end
end
end
include ActiveSupport::Callbacks::Suspension
def self.touch_all_records
self.find_ids_in_ranges do |min_id, max_id|
self.where(primary_key => min_id..max_id).touch_all
end
end
scope :non_shadow, ->(key = primary_key) { where("#{key}<=? AND #{key}>?", Shard::IDS_PER_SHARD, 0) }
# skips validations, callbacks, and a transaction
# do _NOT_ improve in the future to handle validations and callbacks - make
# it a separate method or optional functionality. some callers explicitly
# rely on no callbacks or validations
def save_without_transaction(touch: true)
return unless changed?
self.updated_at = Time.now.utc if touch
if new_record?
self.created_at = updated_at if touch
self.id = self.class._insert_record(attributes_with_values(attribute_names_for_partial_writes))
@new_record = false
else
update_columns(attributes_with_values(attribute_names_for_partial_writes))
end
changes_applied
end
def self.with_statement_timeout(timeout = 30_000)
raise ArgumentError.new("timeout must be an integer") unless timeout.is_a? Integer
transaction do
connection.execute "SET LOCAL statement_timeout = #{timeout}"
yield
rescue ActiveRecord::StatementInvalid => e
raise ActiveRecord::QueryTimeout if e.cause.is_a? PG::QueryCanceled
raise e
end
end
end
module ActiveRecord
class QueryTimeout < ActiveRecord::StatementInvalid; end
end
module UsefulFindInBatches
# add the strategy param
def find_each(start: nil, finish: nil, **kwargs)
if block_given?
find_in_batches(start: start, finish: finish, **kwargs) do |records|
records.each { |record| yield record }
end
else
enum_for(:find_each, start: start, finish: finish, **kwargs) do
relation = self
apply_limits(relation, start, finish).size
end
end
end
# add the strategy param
def find_in_batches(batch_size: 1000, start: nil, finish: nil, **kwargs)
relation = self
unless block_given?
return to_enum(:find_in_batches, start: start, finish: finish, batch_size: batch_size, **kwargs) do
total = apply_limits(relation, start, finish).size
(total - 1).div(batch_size) + 1
end
end
in_batches(of: batch_size, start: start, finish: finish, load: true, **kwargs) do |batch|
yield batch.to_a
end
end
def in_batches(strategy: nil, start: nil, finish: nil, **kwargs, &block)
unless block_given?
return ActiveRecord::Batches::BatchEnumerator.new(strategy: strategy, start: start, relation: self, **kwargs)
end
strategy ||= infer_in_batches_strategy
if strategy == :id
raise ArgumentError, "GROUP BY is incompatible with :id batches strategy" unless group_values.empty?
return activate { |r| r.call_super(:in_batches, UsefulFindInBatches, start: start, finish: finish, **kwargs, &block) }
end
kwargs.delete(:error_on_ignore)
activate { |r| r.send("in_batches_with_#{strategy}", start: start, finish: finish, **kwargs, &block); nil }
end
def infer_in_batches_strategy
strategy ||= :copy if in_batches_can_use_copy?
strategy ||= :cursor if in_batches_can_use_cursor?
strategy ||= :temp_table if in_batches_needs_temp_table?
strategy || :id
end
private
def in_batches_can_use_copy?
connection.open_transactions == 0 && eager_load_values.empty? && !ActiveRecord::Base.in_migration
end
def in_batches_can_use_cursor?
eager_load_values.empty? && (GuardRail.environment == :secondary || connection.readonly?)
end
def in_batches_needs_temp_table?
order_values.any? ||
group_values.any? ||
select_values.to_s =~ /DISTINCT/i ||
distinct_value ||
in_batches_select_values_necessitate_temp_table?
end
def in_batches_select_values_necessitate_temp_table?
return false if select_values.blank?
selects = select_values.flat_map { |sel| sel.to_s.split(",").map(&:strip) }
id_keys = [primary_key, "*", "#{table_name}.#{primary_key}", "#{table_name}.*"]
id_keys.all? { |k| !selects.include?(k) }
end
def in_batches_with_cursor(of: 1000, start: nil, finish: nil, load: false)
klass.transaction do
relation = apply_limits(clone, start, finish)
relation.skip_query_cache!
unless load
relation = relation.except(:select).select(primary_key)
end
sql = relation.to_sql
cursor = "#{table_name}_in_batches_cursor_#{sql.hash.abs.to_s(36)}"
connection.execute("DECLARE #{cursor} CURSOR FOR #{sql}")
loop do
if load
records = connection.uncached { klass.find_by_sql("FETCH FORWARD #{of} FROM #{cursor}") }
ids = records.map(&:id)
preload_associations(records)
yielded_relation = where(primary_key => ids).preload(includes_values + preload_values)
yielded_relation.send(:load_records, records)
else
ids = connection.uncached { connection.select_values("FETCH FORWARD #{of} FROM #{cursor}") }
yielded_relation = where(primary_key => ids).preload(includes_values + preload_values)
yielded_relation = yielded_relation.extending(BatchWithColumnsPreloaded).set_values(ids)
end
break if ids.empty?
yield yielded_relation
break if ids.size < of
end
ensure
unless $!.is_a?(ActiveRecord::StatementInvalid)
connection.execute("CLOSE #{cursor}")
end
end
end
def in_batches_with_copy(of: 1000, start: nil, finish: nil, load: false)
limited_query = limit(0).to_sql
relation = self
relation_for_copy = apply_limits(relation, start, finish)
unless load
relation_for_copy = relation_for_copy.except(:select).select(primary_key)
end
full_query = "COPY (#{relation_for_copy.to_sql}) TO STDOUT"
conn = connection
full_query = conn.annotate_sql(full_query) if defined?(Marginalia)
pool = conn.pool
# remove the connection from the pool so that any queries executed
# while we're running this will get a new connection
pool.remove(conn)
checkin = -> do
pool&.restore_connection(conn)
pool = nil
end
# make sure to log _something_, even if the dbtime is totally off
conn.send(:log, full_query, "#{klass.name} Load") do
decoder = if load
# set up all our metadata based on a dummy query (COPY doesn't return any metadata)
result = conn.raw_connection.exec(limited_query)
type_map = conn.raw_connection.type_map_for_results.build_column_map(result)
# see PostgreSQLAdapter#exec_query
types = {}
fields = result.fields
fields.each_with_index do |fname, i|
ftype = result.ftype i
fmod = result.fmod i
types[fname] = conn.send(:get_oid_type, ftype, fmod, fname)
end
column_types = types.dup
columns_hash.each_key { |k| column_types.delete k }
PG::TextDecoder::CopyRow.new(type_map: type_map)
else
pkey_oid = columns_hash[primary_key].sql_type_metadata.oid
# this is really dumb that we have to manually search through this, but
# PG::TypeMapByOid doesn't have a direct lookup method
coder = conn.raw_connection.type_map_for_results.coders.find { |c| c.oid == pkey_oid }
PG::TextDecoder::CopyRow.new(type_map: PG::TypeMapByColumn.new([coder]))
end
rows = []
build_relation = -> do
if load
records = ActiveRecord::Result.new(fields, rows, types).map { |record| instantiate(record, column_types) }
ids = records.map(&:id)
yielded_relation = relation.where(primary_key => ids)
preload_associations(records)
yielded_relation.send(:load_records, records)
else
ids = rows.map(&:first)
yielded_relation = relation.where(primary_key => ids)
yielded_relation = yielded_relation.extending(BatchWithColumnsPreloaded).set_values(ids)
end
yielded_relation
end
conn.raw_connection.copy_data(full_query, decoder) do
while (row = conn.raw_connection.get_copy_data)
rows << row
if rows.size == of
yield build_relation.call
rows = []
end
end
end
# return the connection now, in case there was only 1 batch, we can avoid a separate connection if the block needs it
checkin.call
unless rows.empty?
yield build_relation.call
end
end
nil
ensure
# put the connection back in the pool for reuse
checkin&.call
end
# in some cases we're doing a lot of work inside
# the yielded block, and holding open a transaction
# or even a connection while we do all that work can
# be a problem for the database, especially if a lot
# of these are happening at once. This strategy
# makes one query to hold onto all the IDs needed for the
# iteration (make sure they'll fit in memory, or you could be sad)
# and yields the objects in batches in the same order as the scope specified
# so the DB connection can be fully recycled during each block.
def in_batches_with_pluck_ids(of: 1000, start: nil, finish: nil, load: false)
relation = apply_limits(self, start, finish)
all_object_ids = relation.pluck(:id)
current_order_values = order_values
all_object_ids.in_groups_of(of) do |id_batch|
object_batch = klass.unscoped.where(id: id_batch).order(current_order_values).preload(includes_values + preload_values)
yield object_batch
end
end
def in_batches_with_temp_table(of: 1000, start: nil, finish: nil, load: false, ignore_transaction: false)
Shard.current.database_server.unguard do
can_do_it = ignore_transaction ||
Rails.env.production? ||
ActiveRecord::Base.in_migration ||
GuardRail.environment == :deploy ||
(!Rails.env.test? && connection.open_transactions > 0) ||
ActiveRecord::Base.in_transaction_in_test?
unless can_do_it
raise ArgumentError, "in_batches with temp_table probably won't work outside a migration
and outside a transaction. Unfortunately, it's impossible to automatically
determine a better way to do it that will work correctly. You can try
switching to secondary first (then switching to primary if you modify anything
inside your loop), wrapping in a transaction (but be wary of locking records
for the duration of your query if you do any writes in your loop), or not
forcing in_batches to use a temp table (avoiding custom selects,
group, or order)."
end
relation = apply_limits(self, start, finish)
sql = relation.to_sql
table = "#{table_name}_in_batches_temp_table_#{sql.hash.abs.to_s(36)}"
table = table[-63..-1] if table.length > 63
remaining = connection.update("CREATE TEMPORARY TABLE #{table} AS #{sql}")
begin
return if remaining.zero?
if remaining > of
begin
old_proc = connection.raw_connection.set_notice_processor {}
index = if (select_values.empty? || select_values.any? { |v| v.to_s == primary_key.to_s }) && order_values.empty?
connection.execute(%{CREATE INDEX "temp_primary_key" ON #{connection.quote_local_table_name(table)}(#{connection.quote_column_name(primary_key)})})
primary_key.to_s
else
connection.execute "ALTER TABLE #{table} ADD temp_primary_key SERIAL PRIMARY KEY"
'temp_primary_key'
end
ensure
connection.raw_connection.set_notice_processor(&old_proc) if old_proc
end
end
klass.unscoped do
batch_relation = klass.from(table).select("*").limit(of).preload(includes_values + preload_values)
batch_relation = batch_relation.order(Arel.sql(connection.quote_column_name(index))) if index
yielded_relation = batch_relation
loop do
yield yielded_relation
remaining -= of
break if remaining <= 0
last_value = if yielded_relation.loaded?
yielded_relation.last[index]
else
yielded_relation.offset(of - 1).limit(1).pluck(index).first
end
break if last_value.nil?
yielded_relation = batch_relation.where("#{connection.quote_column_name(index)} > ?", last_value)
end
end
ensure
if !$!.is_a?(ActiveRecord::StatementInvalid) || connection.open_transactions == 0
connection.execute "DROP TABLE #{table}"
end
end
end
end
end
ActiveRecord::Relation.prepend(UsefulFindInBatches)
module UsefulBatchEnumerator
def initialize(strategy: nil, **kwargs)
@strategy = strategy
@kwargs = kwargs.except(:relation)
super(**kwargs.slice(:of, :start, :finish, :relation))
end
def each_record
return to_enum(:each_record) unless block_given?
@relation.to_enum(:in_batches, strategy: @strategy, load: true, **@kwargs).each do |relation|
relation.records.each { |record| yield record }
end
end
def delete_all
if @strategy.nil? && (strategy = @relation.infer_in_batches_strategy) == :id
sum = 0
loop do
current = @relation.limit(@of).delete_all
sum += current
break unless current == @of
end
return sum
end
@relation.in_batches(strategy: strategy, load: false, **@kwargs, &:delete_all)
end
def update_all(*args)
@relation.in_batches(strategy: @strategy, load: false, **@kwargs) do |relation|
relation.update_all(*args)
end
end
def destroy_all
@relation.in_batches(strategy: @strategy, load: true, **@kwargs, &:destroy_all)
end
def each
enum = @relation.to_enum(:in_batches, strategy: @strategy, load: true, **@kwargs)
return enum.each { |relation| yield relation } if block_given?
enum
end
def pluck(*args)
return to_enum(:pluck, *args) unless block_given?
@relation.except(:select)
.select(*args)
.in_batches(strategy: @strategy, load: false, **@kwargs) do |relation|
yield relation.pluck(*args)
end
end
end
ActiveRecord::Batches::BatchEnumerator.prepend(UsefulBatchEnumerator)
module BatchWithColumnsPreloaded
def set_values(values)
@loaded_values = values
self
end
def pluck(*args)
return @loaded_values if args == [primary_key.to_sym] && @loaded_values
super
end
end
module LockForNoKeyUpdate
def lock(lock_type = true)
lock_type = 'FOR NO KEY UPDATE' if lock_type == :no_key_update
super(lock_type)
end
end
ActiveRecord::Relation.prepend(LockForNoKeyUpdate)
ActiveRecord::Relation.class_eval do
def includes(*args)
return super if args.empty? || args == [nil]
raise "Use preload or eager_load instead of includes"
end
def where!(*args)
raise "where!.not doesn't work in Rails 4.2" if args.empty?
super
end
def uniq(*)
raise "use #distinct instead of #uniq on relations (Rails 5.1 will delegate uniq to to_a)"
end
def polymorphic_where(args)
raise ArgumentError unless args.length == 1
column = args.first.first
values = Array(args.first.last)
original_length = values.length
values = values.compact
raise ArgumentError, "need to call polymorphic_where with at least one object" if values.empty?
sql = (["(#{column}_id=? AND #{column}_type=?)"] * values.length).join(" OR ")
sql << " OR (#{column}_id IS NULL AND #{column}_type IS NULL)" if values.length < original_length
where(sql, *values.map { |value| [value, value.class.base_class.name] }.flatten)
end
def not_recently_touched
scope = self
if((personal_space = Setting.get('touch_personal_space', 0).to_i) != 0)
personal_space -= 1
# truncate to seconds
bound = Time.at(Time.now.to_i - personal_space).utc
scope = scope.where("#{connection.quote_local_table_name(table_name)}.updated_at<?", bound)
end
scope
end
def update_all_locked_in_order(updates)
locked_scope = lock(:no_key_update).order(primary_key.to_sym)
if Setting.get("update_all_locked_in_order_subquery", "true") == "true"
unscoped.where(primary_key => locked_scope).update_all(updates)
else
transaction do
ids = locked_scope.pluck(primary_key)
unscoped.where(primary_key => ids).update_all(updates) unless ids.empty?
end
end
end
def touch_all
self.activate do |relation|
relation.update_all_locked_in_order(updated_at: Time.now.utc)
end
end
def distinct_on(*args)
args.map! do |column_name|
if column_name.is_a?(Symbol) && column_names.include?(column_name.to_s)
"#{connection.quote_local_table_name(table_name)}.#{connection.quote_column_name(column_name)}"
else
column_name.to_s
end
end
relation = clone
old_select = relation.select_values
relation.select_values = [+"DISTINCT ON (#{args.join(', ')}) "]
relation.distinct_value = false
relation.select_values.first << (old_select.empty? ? "*" : old_select.uniq.join(', '))
relation
end
# if this sql is constructed on one shard then executed on another it wont work
# dont use it for cross shard queries
def union(*scopes)
uniq_identifier = "#{table_name}.#{primary_key}"
scopes << self
sub_query = (scopes).map {|s| s.except(:select, :order).select(uniq_identifier).to_sql}.join(" UNION ALL ")
unscoped.where("#{uniq_identifier} IN (#{sub_query})")
end
# returns batch_size ids at a time, working through the primary key from
# smallest to largest.
#
# note this does a raw connection.select_values, so it doesn't work with scopes
def find_ids_in_batches(options = {})
batch_size = options[:batch_size] || 1000
key = "#{quoted_table_name}.#{primary_key}"
scope = except(:select).select(key).reorder(Arel.sql(key)).limit(batch_size)
ids = connection.select_values(scope.to_sql)
ids = ids.map(&:to_i) unless options[:no_integer_cast]
while ids.present?
yield ids
break if ids.size < batch_size
last_value = ids.last
ids = connection.select_values(scope.where("#{key}>?", last_value).to_sql)
ids = ids.map(&:to_i) unless options[:no_integer_cast]
end
end
# returns 2 ids at a time (the min and the max of a range), working through
# the primary key from smallest to largest.
def find_ids_in_ranges(options = {})
is_integer = columns_hash[primary_key.to_s].type == :integer
loose_mode = options[:loose] && is_integer
# loose_mode: if we don't care about getting exactly batch_size ids in between
# don't get the max - just get the min and add batch_size so we get that many _at most_
values = loose_mode ? "MIN(id)" : "MIN(id), MAX(id)"
batch_size = options[:batch_size].try(:to_i) || 1000
quoted_primary_key = "#{klass.connection.quote_local_table_name(table_name)}.#{klass.connection.quote_column_name(primary_key)}"
as_id = " AS id" unless primary_key == 'id'
subquery_scope = except(:select).select("#{quoted_primary_key}#{as_id}").reorder(primary_key.to_sym).limit(loose_mode ? 1 : batch_size)
subquery_scope = subquery_scope.where("#{quoted_primary_key} <= ?", options[:end_at]) if options[:end_at]
first_subquery_scope = options[:start_at] ? subquery_scope.where("#{quoted_primary_key} >= ?", options[:start_at]) : subquery_scope
ids = connection.select_rows("SELECT #{values} FROM (#{first_subquery_scope.to_sql}) AS subquery").first
while ids.first.present?
ids.map!(&:to_i) if is_integer
ids << ids.first + batch_size if loose_mode
yield(*ids)
last_value = ids.last
next_subquery_scope = subquery_scope.where(["#{quoted_primary_key}>?", last_value])
ids = connection.select_rows("SELECT #{values} FROM (#{next_subquery_scope.to_sql}) AS subquery").first
end
end
end
module UpdateAndDeleteWithJoins
def deconstruct_joins(joins_sql=nil)
unless joins_sql
joins_sql = ''
add_joins!(joins_sql, nil)
end
tables = []
join_conditions = []
joins_sql.strip.split('INNER JOIN')[1..-1].each do |join|
# this could probably be improved
raise "PostgreSQL update_all/delete_all only supports INNER JOIN" unless join.strip =~ /([a-zA-Z0-9'"_\.]+(?:(?:\s+[aA][sS])?\s+[a-zA-Z0-9'"_]+)?)\s+ON\s+(.*)/m
tables << $1
join_conditions << $2
end
[tables, join_conditions]
end
def update_all(updates, *args)
db = Shard.current(klass.shard_category).database_server
if joins_values.empty?
if ::GuardRail.environment != db.guard_rail_environment
Shard.current.database_server.unguard {return super }
else
return super
end
end
stmt = Arel::UpdateManager.new
stmt.set Arel.sql(@klass.send(:sanitize_sql_for_assignment, updates))
from = from_clause.value
stmt.table(from ? Arel::Nodes::SqlLiteral.new(from) : table)
stmt.key = table[primary_key]
sql = stmt.to_sql
collector = connection.send(:collector)
arel.join_sources.each do |node|
connection.visitor.accept(node, collector)
end
join_sql = collector.value
tables, join_conditions = deconstruct_joins(join_sql)
unless tables.empty?
sql.concat(' FROM ')
sql.concat(tables.join(', '))
sql.concat(' ')
end
scope = self
join_conditions.each { |join| scope = scope.where(join) }
# skip any binds that are used in the join
collector = connection.send(:collector)
scope.arel.constraints.each do |node|
connection.visitor.accept(node, collector)
end
where_sql = collector.value
sql.concat('WHERE ' + where_sql)
if ::GuardRail.environment != db.guard_rail_environment
Shard.current.database_server.unguard {connection.update(sql, "#{name} Update")}
else
connection.update(sql, "#{name} Update")
end
end
def delete_all
return super if joins_values.empty?
sql = +"DELETE FROM #{quoted_table_name} "
join_sql = arel.join_sources.map(&:to_sql).join(" ")
tables, join_conditions = deconstruct_joins(join_sql)
sql.concat('USING ')
sql.concat(tables.join(', '))
sql.concat(' ')
scope = self
join_conditions.each { |join| scope = scope.where(join) }
collector = connection.send(:collector)
scope.arel.constraints.each do |node|
connection.visitor.accept(node, collector)
end
where_sql = collector.value
sql.concat('WHERE ' + where_sql)
connection.delete(sql, "SQL", [])
end
end
ActiveRecord::Relation.prepend(UpdateAndDeleteWithJoins)
module UpdateAndDeleteAllWithLimit
def delete_all(*args)
if limit_value || offset_value
scope = except(:select).select(primary_key)
return unscoped.where(primary_key => scope).delete_all
end
super
end
def update_all(updates, *args)
if limit_value || offset_value
scope = except(:select).select(primary_key)
return unscoped.where(primary_key => scope).update_all(updates)
end
super
end
end
ActiveRecord::Relation.prepend(UpdateAndDeleteAllWithLimit)
ActiveRecord::Associations::CollectionProxy.class_eval do
def respond_to?(name, include_private = false)
return super if [:marshal_dump, :_dump, 'marshal_dump', '_dump'].include?(name)
super ||
(load_target && target.respond_to?(name, include_private)) ||
proxy_association.klass.respond_to?(name, include_private)
end
def temp_record(*args)
# creates a record with attributes like a child record but is not added to the collection for autosaving
record = klass.unscoped.merge(scope).new(*args)
@association.set_inverse_instance(record)
record
end
def uniq(*args)
raise "use #distinct instead of #uniq on relations (Rails 5.1 will delegate uniq to to_a)"
end
end
ActiveRecord::ConnectionAdapters::AbstractAdapter.class_eval do
def bulk_insert(table_name, records)
keys = records.first.keys
quoted_keys = keys.map{ |k| quote_column_name(k) }.join(', ')
records.each do |record|
execute <<~SQL
INSERT INTO #{quote_table_name(table_name)}
(#{quoted_keys})
VALUES
(#{keys.map{ |k| quote(record[k]) }.join(', ')})
SQL
end
end
end
class ActiveRecord::ConnectionAdapters::AbstractAdapter
# for functions that differ from one adapter to the next, use the following
# method (overriding as needed in non-standard adapters), e.g.
#
# connection.func(:group_concat, :name, '|') ->
# group_concat(name, '|') (default)
# group_concat(name SEPARATOR '|') (mysql)
# string_agg(name::text, '|') (postgres)
def func(name, *args)
"#{name}(#{args.map{ |arg| func_arg_esc(arg) }.join(', ')})"
end
def func_arg_esc(arg)
arg.is_a?(Symbol) ? arg : quote(arg)
end
def group_by(*columns)
# the first item should be the primary key(s) that the other columns are
# functionally dependent on. alternatively, it can be a class, and all
# columns will be inferred from it. this is useful for cases where you want
# to select all columns from one table, and an aggregate from another.
Array(infer_group_by_columns(columns).first).join(", ")
end
def infer_group_by_columns(columns)
columns.map { |col|
col.respond_to?(:columns) ?
col.columns.map { |c|
"#{col.quoted_table_name}.#{quote_column_name(c.name)}"
} :
col
}
end
end
ActiveRecord::Associations::HasOneAssociation.class_eval do
def create_scope
scope = self.scope.scope_for_create.stringify_keys
scope = scope.except(klass.primary_key) unless klass.primary_key.to_s == reflection.foreign_key.to_s
scope
end
end
class ActiveRecord::Migration
# at least one of these tags is required
DEPLOY_TAGS = [:predeploy, :postdeploy]
class << self
def is_postgres?
connection.adapter_name == 'PostgreSQL'
end
def has_postgres_proc?(procname)
connection.select_value("SELECT COUNT(*) FROM pg_proc WHERE proname='#{procname}'").to_i != 0
end
end
def connection
if self.class.respond_to?(:connection)
return self.class.connection
else
@connection || ActiveRecord::Base.connection
end
end
def tags
self.class.tags
end
end
class ActiveRecord::MigrationProxy
delegate :connection, :cassandra_cluster, to: :migration
def initialize(*)
super
if version&.to_s&.length == 14 && version.to_s > Time.now.utc.strftime("%Y%m%d%H%M%S")
raise "please don't create migrations with a version number in the future: #{name} #{version}"
end
end
def runnable?
!migration.respond_to?(:runnable?) || migration.runnable?
end
def load_migration
load(filename)
@migration = name.constantize
raise "#{self.name} (#{self.version}) is not tagged as exactly one of predeploy or postdeploy!" unless (@migration.tags & ActiveRecord::Migration::DEPLOY_TAGS).length == 1
@migration
end
end
module MigratorCache
def migrations(paths)
@@migrations_hash ||= {}
@@migrations_hash[paths] ||= super
end
def migrations_paths
@@migrations_paths ||= [File.join(Rails.root, "db/migrate")]
end
end
ActiveRecord::Migrator.singleton_class.prepend(MigratorCache)
module Migrator
def skipped_migrations
pending_migrations(call_super: true).reject(&:runnable?)
end
def pending_migrations(call_super: false)
return super() if call_super
super().select(&:runnable?)
end
def runnable
super.select(&:runnable?)
end
def execute_migration_in_transaction(migration, direct)
old_in_migration, ActiveRecord::Base.in_migration = ActiveRecord::Base.in_migration, true
if defined?(Marginalia)
old_migration_name, Marginalia::Comment.migration = Marginalia::Comment.migration, migration.name
end
if down? && !Rails.env.test? && !$confirmed_migrate_down
require 'highline'
if HighLine.new.ask("Revert migration #{migration.name} (#{migration.version}) ? [y/N/a] > ") !~ /^([ya])/i
raise("Revert not confirmed")
end
$confirmed_migrate_down = true if $1.downcase == 'a'
end
super
ensure
ActiveRecord::Base.in_migration = old_in_migration
Marginalia::Comment.migration = old_migration_name if defined?(Marginalia)
end
end
ActiveRecord::Migrator.prepend(Migrator)
ActiveRecord::Migrator.migrations_paths.concat Dir[Rails.root.join('gems', 'plugins', '*', 'db', 'migrate')]
ActiveRecord::Tasks::DatabaseTasks.migrations_paths = ActiveRecord::Migrator.migrations_paths
ActiveRecord::ConnectionAdapters::SchemaStatements.class_eval do
def find_foreign_key(from_table, to_table, column: nil)
column ||= "#{to_table.to_s.singularize}_id"
foreign_keys(from_table).find do |key|
key.to_table == to_table.to_s && key.column == column.to_s
end&.name
end
def alter_constraint(table, constraint, new_name: nil, deferrable: nil)
raise ArgumentError, "must specify deferrable or a new name" if new_name.nil? && deferrable.nil?
# can't rename and alter options in the same statement, so do the rename first
if new_name && new_name != constraint
execute("ALTER TABLE #{quote_table_name(table)}
RENAME CONSTRAINT #{quote_column_name(constraint)} TO #{quote_column_name(new_name)}")
constraint = new_name
end
unless deferrable.nil?
options = deferrable ? "DEFERRABLE" : "NOT DEFERRABLE"
execute("ALTER TABLE #{quote_table_name(table)}
ALTER CONSTRAINT #{quote_column_name(constraint)} #{options}")
end
end
def foreign_key_for(from_table, **options)
return unless supports_foreign_keys?
fks = foreign_keys(from_table).select { |fk| fk.defined_for?(options) }
# prefer a FK on a column named after the table
if options[:to_table]
column = foreign_key_column_for(options[:to_table])
return fks.find { |fk| fk.column == column } || fks.first
end
fks.first
end
def remove_foreign_key(from_table, to_table = nil, **options)
return unless supports_foreign_keys?
if options.delete(:if_exists)
fk_name_to_delete = foreign_key_for(from_table, to_table: to_table, **options)&.name
return if fk_name_to_delete.nil?
else
fk_name_to_delete = foreign_key_for!(from_table, to_table: to_table, **options).name
end
at = create_alter_table from_table
at.drop_foreign_key fk_name_to_delete
execute schema_creation.accept(at)
end
def add_replica_identity(model_name, column_name, default_value)
klass = model_name.constantize
if columns(klass.table_name).find { |c| c.name == column_name.to_s }.null
DataFixup::BackfillNulls.run(klass, column_name, default_value: default_value)
end
change_column_null klass.table_name, column_name, false
primary_column = klass.primary_key
index_name = "index_#{klass.table_name}_replica_identity"
add_index klass.table_name, [column_name, primary_column], name: index_name, algorithm: :concurrently, unique: true, if_not_exists: true
set_replica_identity klass.table_name, index_name
end
def remove_replica_identity(model_name)
klass = model_name.constantize
set_replica_identity klass.table_name, :default
remove_index klass.table_name, name: "index_#{klass.table_name}_replica_identity", if_exists: true
end
end
# yes, various versions of rails supports various if_exists/if_not_exists options,
# but _none_ of them (as of writing) will invert them on reversion. Some will
# purposely strip the option, but most don't do anything.
module ExistenceInversions
%w{index foreign_key column}.each do |type|
# these methods purposely pull the flag from the incoming args,
# and assign to the outgoing args, not relying on it getting
# passed through. and sometimes they even modify args.
class_eval <<-RUBY, __FILE__, __LINE__ + 1
def invert_add_#{type}(args)
orig_args = args.dup
result = super
if orig_args.last.is_a?(Hash) && orig_args.last[:if_not_exists]
result[1] << {} unless result[1].last.is_a?(Hash)
result[1].last[:if_exists] = orig_args.last[:if_not_exists]
result[1].last.delete(:if_not_exists)
end
result
end
def invert_remove_#{type}(args)
orig_args = args.dup
result = super
if orig_args.last.is_a?(Hash) && orig_args.last[:if_exists]
result[1] << {} unless result[1].last.is_a?(Hash)
result[1].last[:if_not_exists] = orig_args.last[:if_exists]
result[1].last.delete(:if_exists)
end
result
end
RUBY
end
end
ActiveRecord::Migration::CommandRecorder.prepend(ExistenceInversions)
ActiveRecord::Associations::CollectionAssociation.class_eval do
# CollectionAssociation implements uniq for :uniq option, in its
# own special way. re-implement, but as a relation
def distinct
scope.distinct
end
end
if CANVAS_RAILS6_0
module UnscopeCallbacks
def run_callbacks(*args)
# in rails 6.1, we can get rid of this entire monkeypatch
scope = self.class.current_scope&.clone || self.class.default_scoped
scope = scope.klass.unscoped
scope.scoping { super }
end
end
ActiveRecord::Base.send(:include, UnscopeCallbacks)
end
module MatchWithDiscard
def match(model, name)
result = super
return nil if result && !result.is_a?(ActiveRecord::DynamicMatchers::FindBy)
result
end
end
ActiveRecord::DynamicMatchers::Method.singleton_class.prepend(MatchWithDiscard)
# see https://github.com/rails/rails/issues/18659
class AttributesDefiner
# defines attribute methods when loaded through Marshal
def initialize(klass)
@klass = klass
end
def marshal_dump
@klass
end
def marshal_load(klass)
klass.define_attribute_methods
@klass = klass
end
end
module DefineAttributeMethods
def init_internals
@define_attributes_helper = AttributesDefiner.new(self.class)
super
end
end
ActiveRecord::Base.include(DefineAttributeMethods)
module SkipTouchCallbacks
module Base
def skip_touch_callbacks(name)
@skip_touch_callbacks ||= Set.new
if @skip_touch_callbacks.include?(name)
yield
else
@skip_touch_callbacks << name
yield
@skip_touch_callbacks.delete(name)
end
end
def touch_callbacks_skipped?(name)
(@skip_touch_callbacks && @skip_touch_callbacks.include?(name)) ||
(self.superclass < ActiveRecord::Base && self.superclass.touch_callbacks_skipped?(name))
end
end
module BelongsTo
def touch_record(o, _changes, _foreign_key, name, *)
return if o.class.touch_callbacks_skipped?(name)
super
end
end
end
ActiveRecord::Base.singleton_class.include(SkipTouchCallbacks::Base)
ActiveRecord::Associations::Builder::BelongsTo.singleton_class.prepend(SkipTouchCallbacks::BelongsTo)
module ReadonlyCloning
def calculate_changes_from_defaults
if @readonly_clone
@changed_attributes = @changed_attributes.dup if @changed_attributes # otherwise changes to the clone will dirty the original
else
super # no reason to do this if we're creating a readonly clone - can take a long time with serialized columns
end
end
end
ActiveRecord::Base.prepend(ReadonlyCloning)
module DupArraysInMutationTracker
# setting a serialized attribute to an array of hashes shouldn't change all the hashes to indifferent access
# when the array gets stored in the indifferent access hash inside the mutation tracker
# not that it really matters too much but having some consistency is nice
def change_to_attribute(*args)
change = super
if change
val = change[1]
change[1] = val.dup if val.is_a?(Array)
end
change
end
end
ActiveModel::AttributeMutationTracker.prepend(DupArraysInMutationTracker)
module IgnoreOutOfSequenceMigrationDates
def current_migration_number(dirname)
migration_lookup_at(dirname).map do |file|
digits = File.basename(file).split("_").first
next if ActiveRecord::Base.timestamped_migrations && digits.length != 14
digits.to_i
end.compact.max.to_i
end
end
# Thor doesn't call `super` in its `inherited` method, so hook in so that we can hook in later :)
Thor::Group.singleton_class.prepend(Autoextend::ClassMethods)
Autoextend.hook(:"ActiveRecord::Generators::MigrationGenerator",
IgnoreOutOfSequenceMigrationDates,
singleton: true,
method: :prepend,
optional: true)
module AlwaysUseMigrationDates
def next_migration_number(number)
if ActiveRecord::Base.timestamped_migrations
Time.now.utc.strftime("%Y%m%d%H%M%S")
else
SchemaMigration.normalize_migration_number(number)
end
end
end
ActiveRecord::Migration.prepend(AlwaysUseMigrationDates)
module ExplainAnalyze
def exec_explain(queries, analyze: false) # :nodoc:
str = queries.map do |sql, binds|
msg = "EXPLAIN #{"ANALYZE " if analyze}for: #{sql}"
unless binds.empty?
msg << " "
msg << binds.map { |attr| render_bind(attr) }.inspect
end
msg << "\n"
msg << connection.explain(sql, binds, analyze: analyze)
end.join("\n")
# Overriding inspect to be more human readable, especially in the console.
def str.inspect
self
end
str
end
def explain(analyze: false)
#TODO: Fix for binds.
exec_explain(collecting_queries_for_explain do
if block_given?
yield
else
# fold in switchman's override
self.activate { |relation| relation.send(:exec_queries) }
end
end, analyze: analyze)
end
end
ActiveRecord::Relation.prepend(ExplainAnalyze)
# fake Rails into grabbing correct column information for a table rename in-progress
module TableRename
RENAMES = { 'authentication_providers' => 'account_authorization_configs' }.freeze
def columns(table_name)
if (old_name = RENAMES[table_name])
table_name = old_name if data_source_exists?(old_name)
end
super
end
end
module DefeatInspectionFilterMarshalling
def inspect
result = super
@inspection_filter = nil
result
end
def pretty_print(_pp)
super
@inspection_filter = nil
end
end
ActiveRecord::ConnectionAdapters::SchemaCache.prepend(TableRename)
ActiveRecord::Base.prepend(DefeatInspectionFilterMarshalling)
ActiveRecord::Base.prepend(ActiveRecord::CacheRegister::Base)
ActiveRecord::Base.singleton_class.prepend(ActiveRecord::CacheRegister::Base::ClassMethods)
ActiveRecord::Relation.prepend(ActiveRecord::CacheRegister::Relation)
# see https://github.com/rails/rails/issues/37745
module DontExplicitlyNameColumnsBecauseOfIgnores
def build_select(arel)
if select_values.any?
arel.project(*arel_columns(select_values.uniq))
elsif !from_clause.value && klass.ignored_columns.any? && !(klass.ignored_columns & klass.column_names).empty?
arel.project(*klass.column_names.map { |field| arel_attribute(field) })
else
arel.project(table[Arel.star])
end
end
end
ActiveRecord::Relation.prepend(DontExplicitlyNameColumnsBecauseOfIgnores)
module PreserveShardAfterTransaction
def after_transaction_commit(&block)
shards = Shard.send(:active_shards)
shards[:delayed_jobs] = Shard.current.delayed_jobs_shard if ::ActiveRecord::Migration.open_migrations.positive?
super { Shard.activate(shards, &block) }
end
end
ActiveRecord::ConnectionAdapters::Transaction.prepend(PreserveShardAfterTransaction)
module ConnectionWithMaxRuntime
def initialize(*)
super
@created_at = Concurrent.monotonic_time
end
def runtime
Concurrent.monotonic_time - @created_at
end
end
ActiveRecord::ConnectionAdapters::AbstractAdapter.prepend(ConnectionWithMaxRuntime)
module RestoreConnectionConnectionPool
def restore_connection(conn)
synchronize do
adopt_connection(conn)
# check if a new connection was checked out in the meantime, and check it back in
if (old_conn = @thread_cached_conns[connection_cache_key(current_thread)]) && old_conn != conn
# this is just the necessary parts of #checkin
old_conn.lock.synchronize do
old_conn._run_checkin_callbacks do
old_conn.expire
end
@available.add old_conn
end
end
@thread_cached_conns[connection_cache_key(current_thread)] = conn
end
end
end
ActiveRecord::ConnectionAdapters::ConnectionPool.prepend(RestoreConnectionConnectionPool)
module MaxRuntimeConnectionPool
def max_runtime
# TODO: Rails 6.1 uses a PoolConfig object instead
if CANVAS_RAILS6_0
@spec.config[:max_runtime]
else
db_config.configuration_hash[:max_runtime]
end
end
def acquire_connection(*)
loop do
conn = super
return conn unless max_runtime && conn.runtime >= max_runtime
@connections.delete(conn)
conn.disconnect!
end
end
def checkin(conn)
return super unless max_runtime && conn.runtime >= max_runtime
conn.lock.synchronize do
synchronize do
remove_connection_from_thread_cache conn
@connections.delete(conn)
conn.disconnect!
end
end
end
def flush(*)
super
return unless max_runtime
old_connections = synchronize do
# TODO: Rails 6.1 adds a `discarded?` method instead of checking this directly
return unless @connections
@connections.select do |conn|
!conn.in_use? && conn.runtime >= max_runtime
end.each do |conn|
conn.lease
@available.delete conn
@connections.delete conn
end
end
old_connections.each(&:disconnect!)
end
end
ActiveRecord::ConnectionAdapters::ConnectionPool.prepend(MaxRuntimeConnectionPool)
Rails.application.config.after_initialize do
ActiveSupport.on_load(:active_record) do
cache = MultiCache.fetch("schema_cache")
next if cache.nil?
connection_pool.set_schema_cache(cache)
LoadAccount.schema_cache_loaded!
end
end
don't allocate an unscope if there are no callbacks to worry about
as is the common case, especially for :find or :initialize, which
are also the most commonly called
Change-Id: Icbf4eff99914b2dda685604829ee8c31bffdd355
Reviewed-on: https://gerrit.instructure.com/c/canvas-lms/+/270032
Tested-by: Service Cloud Jenkins <9144042a601061f88f1e1d7a1753ea3e2972119d@instructure.com>
Reviewed-by: Jacob Burroughs <8ecea6e385af5cf9f53123f5ca17fb5fd6a6d4b2@instructure.com>
QA-Review: Cody Cutrer <c4fe2b1d90ef8f2548c8aeabfa633434316f0305@instructure.com>
Product-Review: Cody Cutrer <c4fe2b1d90ef8f2548c8aeabfa633434316f0305@instructure.com>
# frozen_string_literal: true
#
# Copyright (C) 2011 - present Instructure, Inc.
#
# This file is part of Canvas.
#
# Canvas is free software: you can redistribute it and/or modify it under
# the terms of the GNU Affero General Public License as published by the Free
# Software Foundation, version 3 of the License.
#
# Canvas is distributed in the hope that it will be useful, but WITHOUT ANY
# WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR
# A PARTICULAR PURPOSE. See the GNU Affero General Public License for more
# details.
#
# You should have received a copy of the GNU Affero General Public License along
# with this program. If not, see <http://www.gnu.org/licenses/>.
require 'active_support/callbacks/suspension'
class ActiveRecord::Base
self.cache_timestamp_format = :usec
public :write_attribute
class << self
delegate :distinct_on, :find_ids_in_batches, :explain, to: :all
def find_ids_in_ranges(opts={}, &block)
opts.reverse_merge!(:loose => true)
all.find_ids_in_ranges(opts, &block)
end
attr_accessor :in_migration
# determines if someone started a transaction in addition to the spec fixture transaction
# impossible to just count open transactions, cause by default it won't nest a transaction
# unless specifically requested
def in_transaction_in_test?
return false unless Rails.env.test?
stacktrace = caller
transaction_index, wrap_index, after_index = [
ActiveRecord::ConnectionAdapters::DatabaseStatements.instance_method(:transaction),
defined?(SpecTransactionWrapper) && SpecTransactionWrapper.method(:wrap_block_in_transaction),
AfterTransactionCommit::Transaction.instance_method(:commit_records)
].map do |method|
if method
regex = /\A#{Regexp.escape(method.source_location.first)}:\d+:in `#{Regexp.escape(method.name)}'\z/.freeze
stacktrace.index{|s| s =~ regex}
end
end
if transaction_index
# we wrap a transaction around controller actions, so try to see if this call came from that
if wrap_index && (transaction_index..wrap_index).all?{|i| stacktrace[i].match?(/transaction|synchronize/)}
false
else
# check if this is being run through an after_transaction_commit since the last transaction
!(after_index && after_index < transaction_index)
end
else
false
end
end
def default_scope(*)
raise "please don't ever use default_scope. it may seem like a great solution, but I promise, it isn't"
end
def vacuum
GuardRail.activate(:deploy) do
connection.vacuum(table_name, analyze: true)
end
end
end
def read_or_initialize_attribute(attr_name, default_value)
# have to read the attribute again because serialized attributes in Rails 4.2 get duped
read_attribute(attr_name) || (write_attribute(attr_name, default_value) && read_attribute(attr_name))
end
alias :clone :dup
def serializable_hash(options = nil)
result = super
if result.present?
result = result.with_indifferent_access
user_content_fields = options[:user_content] || []
result.keys.each do |name|
if user_content_fields.include?(name.to_s)
result[name] = UserContent.escape(result[name])
end
end
end
if options && options[:include_root]
result = {self.class.base_class.model_name.element => result}
end
result
end
# See ActiveModel#serializable_add_includes
def serializable_add_includes(options = {}, &block)
super(options) do |association, records, opts|
yield association, records, opts.reverse_merge(:include_root => options[:include_root])
end
end
def feed_code
id = self.uuid rescue self.id
"#{self.class.reflection_type_name}_#{id}"
end
def self.all_models
return @all_models if @all_models.present?
@all_models = (ActiveRecord::Base.models_from_files +
[Version]).compact.uniq.reject { |model|
(model < Tableless) ||
model.abstract_class?
}
end
def self.models_from_files
@from_files ||= begin
Dir[
"#{Rails.root}/app/models/**/*.rb",
"#{Rails.root}/vendor/plugins/*/app/models/**/*.rb",
"#{Rails.root}/gems/plugins/*/app/models/**/*.rb",
].sort.each do |file|
next if const_defined?(file.sub(%r{.*/app/models/(.*)\.rb$}, '\1').camelize)
ActiveSupport::Dependencies.require_or_load(file)
end
ActiveRecord::Base.descendants
end
end
def self.maximum_text_length
@maximum_text_length ||= 64.kilobytes-1
end
def self.maximum_long_text_length
@maximum_long_text_length ||= 500.kilobytes-1
end
def self.maximum_string_length
255
end
def self.find_by_asset_string(string, asset_types=nil)
find_all_by_asset_string([string], asset_types)[0]
end
def self.find_all_by_asset_string(strings, asset_types=nil)
# TODO: start checking asset_types, if provided
strings.map{ |str| parse_asset_string(str) }.group_by(&:first).inject([]) do |result, (klass, id_pairs)|
next result if asset_types && !asset_types.include?(klass)
result.concat((klass.constantize.where(id: id_pairs.map(&:last)).to_a rescue []))
end
end
# takes an asset string list, like "course_5,user_7" and turns it into an
# array of [class_name, id] like [ ["Course", 5], ["User", 7] ]
def self.parse_asset_string_list(asset_string_list)
asset_string_list.to_s.split(",").map { |str| parse_asset_string(str) }
end
def self.parse_asset_string(str)
code = asset_string_components(str)
[convert_class_name(code.first), code.last.try(:to_i)]
end
def self.asset_string_components(str)
components = str.split('_', -1)
id = components.pop
[components.join('_'), id.presence]
end
def self.convert_class_name(str)
namespaces = str.split(':')
class_name = namespaces.pop
(namespaces.map(&:camelize) + [class_name.try(:classify)]).join('::')
end
def self.asset_string(id)
"#{self.reflection_type_name}_#{id}"
end
def asset_string
@asset_string ||= {}
@asset_string[Shard.current] ||= self.class.asset_string(id)
end
def global_asset_string
@global_asset_string ||= "#{self.class.reflection_type_name}_#{global_id}"
end
# little helper to keep checks concise and avoid a db lookup
def has_asset?(asset, field = :context)
asset&.id == send("#{field}_id") && asset.class.base_class.name == send("#{field}_type")
end
def context_string(field = :context)
send("#{field}_type").underscore + "_" + send("#{field}_id").to_s if send("#{field}_type")
end
def self.asset_string_backcompat_module
@asset_string_backcompat_module ||= Module.new.tap { |m| prepend(m) }
end
def self.define_asset_string_backcompat_method(string_version_name, association_version_name = string_version_name, method = nil)
# just chain to the two methods
unless method
# this is weird, but gets the instance methods defined so they can be chained
begin
self.new.send("#{association_version_name}_id")
rescue
# the db doesn't exist yet; no need to bother with backcompat methods anyway
return
end
define_asset_string_backcompat_method(string_version_name, association_version_name, 'id')
define_asset_string_backcompat_method(string_version_name, association_version_name, 'type')
return
end
asset_string_backcompat_module.class_eval <<-CODE, __FILE__, __LINE__ + 1
def #{association_version_name}_#{method}
res = super
if !res && #{string_version_name}.present?
type, id = ActiveRecord::Base.parse_asset_string(#{string_version_name})
write_attribute(:#{association_version_name}_type, type)
write_attribute(:#{association_version_name}_id, id)
res = super
end
res
end
CODE
end
def export_columns(format = nil)
self.class.content_columns.map(&:name)
end
def to_row(format = nil)
export_columns(format).map { |c| self.send(c) }
end
def is_a_context?
false
end
def cached_context_short_name
if self.respond_to?(:context)
code = self.respond_to?(:context_code) ? self.context_code : self.context.asset_string
@cached_context_name ||= Rails.cache.fetch(['short_name_lookup', code].cache_key) do
self.context.short_name rescue ""
end
else
raise "Can only call cached_context_short_name on items with a context"
end
end
def self.skip_touch_context(skip=true)
@@skip_touch_context = skip
end
def save_without_touching_context
@skip_touch_context = true
self.save
@skip_touch_context = false
end
def touch_context
return if (@@skip_touch_context ||= false || @skip_touch_context ||= false)
if self.respond_to?(:context_type) && self.respond_to?(:context_id) && self.context_type && self.context_id
self.class.connection.after_transaction_commit do
self.context_type.constantize.where(id: self.context_id).update_all(updated_at: Time.now.utc)
end
end
rescue
Canvas::Errors.capture_exception(:touch_context, $ERROR_INFO)
end
def touch_user
if self.respond_to?(:user_id) && self.user_id
User.connection.after_transaction_commit do
User.where(:id => self.user_id).update_all(:updated_at => Time.now.utc)
end
end
true
rescue
Canvas::Errors.capture_exception(:touch_user, $ERROR_INFO)
false
end
def context_url_prefix
"#{self.context_type.downcase.pluralize}/#{self.context_id}"
end
# Example:
# obj.to_json(:permissions => {:user => u, :policies => [:read, :write, :update]})
def as_json(options = nil)
options = options.try(:dup) || {}
self.set_serialization_options if self.respond_to?(:set_serialization_options)
except = options.delete(:except) || []
except = Array(except).dup
except.concat(self.class.serialization_excludes) if self.class.respond_to?(:serialization_excludes)
except.concat(self.serialization_excludes) if self.respond_to?(:serialization_excludes)
except.uniq!
methods = options.delete(:methods) || []
methods = Array(methods).dup
methods.concat(self.class.serialization_methods) if self.class.respond_to?(:serialization_methods)
methods.concat(self.serialization_methods) if self.respond_to?(:serialization_methods)
methods.uniq!
options[:except] = except unless except.empty?
options[:methods] = methods unless methods.empty?
# We include a root in all the association json objects (if it's a
# collection), which is different than the rails behavior of just including
# the root in the base json object. Hence the hackies.
#
# We are in the process of migrating away from including the root in all our
# json serializations at all. Once that's done, we can remove this and the
# monkey patch to Serialzer, below.
# ^hahahahahahaha
unless options.key?(:include_root)
options[:include_root] = true
end
hash = serializable_hash(options)
if options[:permissions]
obj_hash = options[:include_root] ? hash[self.class.base_class.model_name.element] : hash
if self.respond_to?(:filter_attributes_for_user)
self.filter_attributes_for_user(obj_hash, options[:permissions][:user], options[:permissions][:session])
end
unless options[:permissions][:include_permissions] == false
permissions_hash = self.rights_status(options[:permissions][:user], options[:permissions][:session], *options[:permissions][:policies])
if self.respond_to?(:serialize_permissions)
permissions_hash = self.serialize_permissions(permissions_hash, options[:permissions][:user], options[:permissions][:session])
end
obj_hash["permissions"] = permissions_hash
end
end
self.revert_from_serialization_options if self.respond_to?(:revert_from_serialization_options)
hash.with_indifferent_access
end
def class_name
self.class.to_s
end
def sanitize_sql(*args)
self.class.send :sanitize_sql_for_conditions, *args
end
def self.reflection_type_name
base_class.name.underscore
end
def wildcard(*args)
self.class.wildcard(*args)
end
def self.wildcard(*args)
options = args.last.is_a?(Hash) ? args.pop : {}
options[:type] ||= :full
value = args.pop
if options[:delimiter]
options[:type] = :full
value = options[:delimiter] + value + options[:delimiter]
delimiter = connection.quote(options[:delimiter])
column_str = "#{delimiter} || %s || #{delimiter}"
args = args.map{ |a| column_str % a.to_s }
end
value = wildcard_pattern(value, options)
cols = args.map{ |col| like_condition(col, '?', !options[:case_sensitive]) }
sanitize_sql_array ["(" + cols.join(" OR ") + ")", *([value] * cols.size)]
end
def self.wildcard_pattern(value, options = {})
value = value.to_s
value = value.downcase unless options[:case_sensitive]
value = value.gsub('\\', '\\\\\\\\').gsub('%', '\\%').gsub('_', '\\_')
value = '%' + value unless options[:type] == :right
value += '%' unless options[:type] == :left
value
end
def self.coalesced_wildcard(*args)
value = args.pop
value = wildcard_pattern(value)
cols = coalesce_chain(args)
sanitize_sql_array ["(#{like_condition(cols, '?', false)})", value]
end
def self.coalesce_chain(cols)
"(#{cols.map{|col| coalesce_clause(col)}.join(" || ' ' || ")})"
end
def self.coalesce_clause(column)
"COALESCE(LOWER(#{column}), '')"
end
def self.like_condition(value, pattern = '?', downcase = true)
value = "LOWER(#{value})" if downcase
"#{value} LIKE #{pattern}"
end
def self.best_unicode_collation_key(col)
val = if ActiveRecord::Base.configurations[Rails.env]['adapter'] == 'postgresql'
# For PostgreSQL, we can't trust a simple LOWER(column), with any collation, since
# Postgres just defers to the C library which is different for each platform. The best
# choice is the collkey function from pg_collkey which uses ICU to get a full unicode sort.
# If that extension isn't around, casting to a bytea sucks for international characters,
# but at least it's consistent, and orders commas before letters so you don't end up with
# Johnson, Bob sorting before Johns, Jimmy
unless @collkey&.key?(Shard.current.database_server.id)
@collkey ||= {}
@collkey[Shard.current.database_server.id] = connection.extension(:pg_collkey)&.schema
end
if (collation = Canvas::ICU.choose_pg12_collation(connection.icu_collations) && false)
"(#{col} COLLATE #{collation})"
elsif (schema = @collkey[Shard.current.database_server.id])
# The collation level of 3 is the default, but is explicitly specified here and means that
# case, accents and base characters are all taken into account when creating a collation key
# for a string - more at https://pgxn.org/dist/pg_collkey/0.5.1/
# if you change these arguments, you need to rebuild all db indexes that use them,
# and you should also match the settings with Canvas::ICU::Collator and natcompare.js
"#{schema}.collkey(#{col}, '#{Canvas::ICU.locale_for_collation}', false, 3, true)"
else
"CAST(LOWER(replace(#{col}, '\\', '\\\\')) AS bytea)"
end
else
col
end
Arel.sql(val)
end
def self.count_by_date(options = {})
column = options[:column] || "created_at"
max_date = (options[:max_date] || Time.zone.now).midnight
num_days = options[:num_days] || 20
min_date = (options[:min_date] || max_date.advance(:days => -(num_days-1))).midnight
offset = max_date.utc_offset
expression = "((#{column} || '-00')::TIMESTAMPTZ AT TIME ZONE '#{Time.zone.tzinfo.name}')::DATE"
result = where(
"#{column} >= ? AND #{column} < ?",
min_date,
max_date.advance(:days => 1)
).
group(expression).
order(Arel.sql(expression)).
count
return result if result.keys.first.is_a?(Date)
Hash[result.map { |date, count|
[Time.zone.parse(date).to_date, count]
}]
end
def self.rank_sql(ary, col)
sql = ary.each_with_index.inject(+'CASE '){ |string, (values, i)|
string << "WHEN #{col} IN (" << Array(values).map{ |value| connection.quote(value) }.join(', ') << ") THEN #{i} "
} << "ELSE #{ary.size} END"
Arel.sql(sql)
end
def self.rank_hash(ary)
ary.each_with_index.inject(Hash.new(ary.size + 1)){ |hash, (values, i)|
Array(values).each{ |value| hash[value] = i + 1 }
hash
}
end
def self.distinct_values(column, include_nil: false)
column = column.to_s
result = if ActiveRecord::Base.configurations[Rails.env]['adapter'] == 'postgresql'
sql = +''
sql << "SELECT NULL AS #{column} WHERE EXISTS (SELECT * FROM #{quoted_table_name} WHERE #{column} IS NULL) UNION ALL (" if include_nil
sql << <<~SQL
WITH RECURSIVE t AS (
SELECT MIN(#{column}) AS #{column} FROM #{quoted_table_name}
UNION ALL
SELECT (SELECT MIN(#{column}) FROM #{quoted_table_name} WHERE #{column} > t.#{column})
FROM t
WHERE t.#{column} IS NOT NULL
)
SELECT #{column} FROM t WHERE #{column} IS NOT NULL
SQL
sql << ")" if include_nil
find_by_sql(sql)
else
conditions = "#{column} IS NOT NULL" unless include_nil
find(:all, :select => "DISTINCT #{column}", :conditions => conditions, :order => column)
end
result.map(&column.to_sym)
end
# direction is nil, :asc, or :desc
def self.nulls(first_or_last, column, direction = nil)
if connection.adapter_name == 'PostgreSQL'
clause = if first_or_last == :first && direction != :desc
" NULLS FIRST"
elsif first_or_last == :last && direction == :desc
" NULLS LAST"
end
Arel.sql("#{column} #{direction.to_s.upcase}#{clause}".strip)
else
Arel.sql("#{column} IS#{" NOT" unless first_or_last == :last} NULL, #{column} #{direction.to_s.upcase}".strip)
end
end
# set up class-specific getters/setters for a polymorphic association, e.g.
# belongs_to :context, polymorphic: [:course, :account]
def self.belongs_to(name, scope = nil, **options)
if options[:polymorphic] == true
raise "Please pass an array of valid types for polymorphic associations. Use exhaustive: false if you really don't want to validate them"
end
polymorphic_prefix = options.delete(:polymorphic_prefix)
exhaustive = options.delete(:exhaustive)
reflection = super[name.to_s]
if name.to_s == 'developer_key'
reflection.instance_eval do
def association_class
DeveloperKey::CacheOnAssociation
end
end
end
include Canvas::RootAccountCacher if name.to_s == 'root_account'
Canvas::AccountCacher.apply_to_reflections(self)
if reflection.options[:polymorphic].is_a?(Array) ||
reflection.options[:polymorphic].is_a?(Hash)
reflection.options[:exhaustive] = exhaustive
reflection.options[:polymorphic_prefix] = polymorphic_prefix
add_polymorph_methods(reflection)
end
reflection
end
def self.canonicalize_polymorph_list(list)
specifics = []
Array.wrap(list).each do |name|
if name.is_a?(Hash)
specifics.concat(name.to_a)
else
specifics << [name, name.to_s.camelize]
end
end
specifics
end
def self.add_polymorph_methods(reflection)
unless @polymorph_module
@polymorph_module = Module.new
include(@polymorph_module)
end
specifics = canonicalize_polymorph_list(reflection.options[:polymorphic])
unless reflection.options[:exhaustive] == false
specific_classes = specifics.map(&:last).sort
validates reflection.foreign_type, inclusion: { in: specific_classes }, allow_nil: true
@polymorph_module.class_eval <<-RUBY, __FILE__, __LINE__ + 1
def #{reflection.name}=(record)
if record && [#{specific_classes.join(', ')}].none? { |klass| record.is_a?(klass) }
message = "one of #{specific_classes.join(', ')} expected, got \#{record.class}"
raise ActiveRecord::AssociationTypeMismatch, message
end
super
end
RUBY
end
if reflection.options[:polymorphic_prefix] == true
prefix = "#{reflection.name}_"
elsif reflection.options[:polymorphic_prefix]
prefix = "#{reflection.options[:polymorphic_prefix]}_"
end
specifics.each do |(name, class_name)|
# ensure we capture this class's table name
table_name = self.table_name
belongs_to :"#{prefix}#{name}", -> { where(table_name => { reflection.foreign_type => class_name }) },
foreign_key: reflection.foreign_key,
class_name: class_name
correct_type = "#{reflection.foreign_type} && self.class.send(:compute_type, #{reflection.foreign_type}) <= #{class_name}"
@polymorph_module.class_eval <<-RUBY, __FILE__, __LINE__ + 1
def #{prefix}#{name}
#{reflection.name} if #{correct_type}
end
def #{prefix}#{name}=(record)
# we don't want to unset it if it's currently some other type, i.e.
# foo.bar = Bar.new
# foo.baz = nil
# foo.bar.should_not be_nil
return if record.nil? && !(#{correct_type})
association(:#{prefix}#{name}).send(:raise_on_type_mismatch!, record) if record
self.#{reflection.name} = record
end
RUBY
end
end
def self.unique_constraint_retry(retries = 1)
# runs the block in a (possibly nested) transaction. if a unique constraint
# violation occurs, it will run it "retries" more times. the nested
# transaction (savepoint) ensures we don't mess up things for the outer
# transaction. useful for possible race conditions where we don't want to
# take a lock (e.g. when we create a submission).
retries.times do |retry_count|
begin
result = transaction(:requires_new => true) { uncached { yield(retry_count) } }
connection.clear_query_cache
return result
rescue ActiveRecord::RecordNotUnique
end
end
GuardRail.activate(:primary) do
result = transaction(:requires_new => true) { uncached { yield(retries) } }
connection.clear_query_cache
result
end
end
def self.current_xlog_location
Shard.current(shard_category).database_server.unguard do
GuardRail.activate(:primary) do
if Rails.env.test? ? self.in_transaction_in_test? : connection.open_transactions > 0
raise "don't run current_xlog_location in a transaction"
else
connection.current_wal_lsn
end
end
end
end
def self.wait_for_replication(start: nil, timeout: nil)
return true unless GuardRail.activate(:secondary) { connection.readonly? }
start ||= current_xlog_location
GuardRail.activate(:secondary) do
# positive == first value greater, negative == second value greater
start_time = Time.now.utc
while connection.wal_lsn_diff(start, :last_replay) >= 0
return false if timeout && Time.now.utc > start_time + timeout
sleep 0.1
end
end
true
end
def self.bulk_insert_objects(objects, excluded_columns: ['primary_key'])
return if objects.empty?
hashed_objects = []
excluded_columns << objects.first.class.primary_key if excluded_columns.delete('primary_key')
objects.each do |object|
hashed_objects << object.attributes.except(excluded_columns.join(',')).map do |(name, value)|
if (type = object.class.attribute_types[name]).is_a?(ActiveRecord::Type::Serialized)
value = type.serialize(value)
end
[name, value]
end.to_h
end
objects.first.class.bulk_insert(hashed_objects)
end
def self.bulk_insert(records)
return if records.empty?
array_columns = records.first.select{|k, v| v.is_a?(Array)}.map(&:first)
array_columns.each do |column_name|
cast_type = connection.send(:lookup_cast_type_from_column, self.columns_hash[column_name.to_s])
records.each do |row|
row[column_name] = cast_type.serialize(row[column_name])
end
end
if self.respond_to?(:attrs_in_partition_groups)
# this model is partitioned, we need to send a separate
# insert statement for each partition represented
# in the input records
self.attrs_in_partition_groups(records) do |partition_name, partition_records|
transaction do
connection.bulk_insert(partition_name, partition_records)
end
end
else
transaction do
connection.bulk_insert(table_name, records)
end
end
end
include ActiveSupport::Callbacks::Suspension
def self.touch_all_records
self.find_ids_in_ranges do |min_id, max_id|
self.where(primary_key => min_id..max_id).touch_all
end
end
scope :non_shadow, ->(key = primary_key) { where("#{key}<=? AND #{key}>?", Shard::IDS_PER_SHARD, 0) }
# skips validations, callbacks, and a transaction
# do _NOT_ improve in the future to handle validations and callbacks - make
# it a separate method or optional functionality. some callers explicitly
# rely on no callbacks or validations
def save_without_transaction(touch: true)
return unless changed?
self.updated_at = Time.now.utc if touch
if new_record?
self.created_at = updated_at if touch
self.id = self.class._insert_record(attributes_with_values(attribute_names_for_partial_writes))
@new_record = false
else
update_columns(attributes_with_values(attribute_names_for_partial_writes))
end
changes_applied
end
def self.with_statement_timeout(timeout = 30_000)
raise ArgumentError.new("timeout must be an integer") unless timeout.is_a? Integer
transaction do
connection.execute "SET LOCAL statement_timeout = #{timeout}"
yield
rescue ActiveRecord::StatementInvalid => e
raise ActiveRecord::QueryTimeout if e.cause.is_a? PG::QueryCanceled
raise e
end
end
end
module ActiveRecord
class QueryTimeout < ActiveRecord::StatementInvalid; end
end
module UsefulFindInBatches
# add the strategy param
def find_each(start: nil, finish: nil, **kwargs)
if block_given?
find_in_batches(start: start, finish: finish, **kwargs) do |records|
records.each { |record| yield record }
end
else
enum_for(:find_each, start: start, finish: finish, **kwargs) do
relation = self
apply_limits(relation, start, finish).size
end
end
end
# add the strategy param
def find_in_batches(batch_size: 1000, start: nil, finish: nil, **kwargs)
relation = self
unless block_given?
return to_enum(:find_in_batches, start: start, finish: finish, batch_size: batch_size, **kwargs) do
total = apply_limits(relation, start, finish).size
(total - 1).div(batch_size) + 1
end
end
in_batches(of: batch_size, start: start, finish: finish, load: true, **kwargs) do |batch|
yield batch.to_a
end
end
def in_batches(strategy: nil, start: nil, finish: nil, **kwargs, &block)
unless block_given?
return ActiveRecord::Batches::BatchEnumerator.new(strategy: strategy, start: start, relation: self, **kwargs)
end
strategy ||= infer_in_batches_strategy
if strategy == :id
raise ArgumentError, "GROUP BY is incompatible with :id batches strategy" unless group_values.empty?
return activate { |r| r.call_super(:in_batches, UsefulFindInBatches, start: start, finish: finish, **kwargs, &block) }
end
kwargs.delete(:error_on_ignore)
activate { |r| r.send("in_batches_with_#{strategy}", start: start, finish: finish, **kwargs, &block); nil }
end
def infer_in_batches_strategy
strategy ||= :copy if in_batches_can_use_copy?
strategy ||= :cursor if in_batches_can_use_cursor?
strategy ||= :temp_table if in_batches_needs_temp_table?
strategy || :id
end
private
def in_batches_can_use_copy?
connection.open_transactions == 0 && eager_load_values.empty? && !ActiveRecord::Base.in_migration
end
def in_batches_can_use_cursor?
eager_load_values.empty? && (GuardRail.environment == :secondary || connection.readonly?)
end
def in_batches_needs_temp_table?
order_values.any? ||
group_values.any? ||
select_values.to_s =~ /DISTINCT/i ||
distinct_value ||
in_batches_select_values_necessitate_temp_table?
end
def in_batches_select_values_necessitate_temp_table?
return false if select_values.blank?
selects = select_values.flat_map { |sel| sel.to_s.split(",").map(&:strip) }
id_keys = [primary_key, "*", "#{table_name}.#{primary_key}", "#{table_name}.*"]
id_keys.all? { |k| !selects.include?(k) }
end
def in_batches_with_cursor(of: 1000, start: nil, finish: nil, load: false)
klass.transaction do
relation = apply_limits(clone, start, finish)
relation.skip_query_cache!
unless load
relation = relation.except(:select).select(primary_key)
end
sql = relation.to_sql
cursor = "#{table_name}_in_batches_cursor_#{sql.hash.abs.to_s(36)}"
connection.execute("DECLARE #{cursor} CURSOR FOR #{sql}")
loop do
if load
records = connection.uncached { klass.find_by_sql("FETCH FORWARD #{of} FROM #{cursor}") }
ids = records.map(&:id)
preload_associations(records)
yielded_relation = where(primary_key => ids).preload(includes_values + preload_values)
yielded_relation.send(:load_records, records)
else
ids = connection.uncached { connection.select_values("FETCH FORWARD #{of} FROM #{cursor}") }
yielded_relation = where(primary_key => ids).preload(includes_values + preload_values)
yielded_relation = yielded_relation.extending(BatchWithColumnsPreloaded).set_values(ids)
end
break if ids.empty?
yield yielded_relation
break if ids.size < of
end
ensure
unless $!.is_a?(ActiveRecord::StatementInvalid)
connection.execute("CLOSE #{cursor}")
end
end
end
def in_batches_with_copy(of: 1000, start: nil, finish: nil, load: false)
limited_query = limit(0).to_sql
relation = self
relation_for_copy = apply_limits(relation, start, finish)
unless load
relation_for_copy = relation_for_copy.except(:select).select(primary_key)
end
full_query = "COPY (#{relation_for_copy.to_sql}) TO STDOUT"
conn = connection
full_query = conn.annotate_sql(full_query) if defined?(Marginalia)
pool = conn.pool
# remove the connection from the pool so that any queries executed
# while we're running this will get a new connection
pool.remove(conn)
checkin = -> do
pool&.restore_connection(conn)
pool = nil
end
# make sure to log _something_, even if the dbtime is totally off
conn.send(:log, full_query, "#{klass.name} Load") do
decoder = if load
# set up all our metadata based on a dummy query (COPY doesn't return any metadata)
result = conn.raw_connection.exec(limited_query)
type_map = conn.raw_connection.type_map_for_results.build_column_map(result)
# see PostgreSQLAdapter#exec_query
types = {}
fields = result.fields
fields.each_with_index do |fname, i|
ftype = result.ftype i
fmod = result.fmod i
types[fname] = conn.send(:get_oid_type, ftype, fmod, fname)
end
column_types = types.dup
columns_hash.each_key { |k| column_types.delete k }
PG::TextDecoder::CopyRow.new(type_map: type_map)
else
pkey_oid = columns_hash[primary_key].sql_type_metadata.oid
# this is really dumb that we have to manually search through this, but
# PG::TypeMapByOid doesn't have a direct lookup method
coder = conn.raw_connection.type_map_for_results.coders.find { |c| c.oid == pkey_oid }
PG::TextDecoder::CopyRow.new(type_map: PG::TypeMapByColumn.new([coder]))
end
rows = []
build_relation = -> do
if load
records = ActiveRecord::Result.new(fields, rows, types).map { |record| instantiate(record, column_types) }
ids = records.map(&:id)
yielded_relation = relation.where(primary_key => ids)
preload_associations(records)
yielded_relation.send(:load_records, records)
else
ids = rows.map(&:first)
yielded_relation = relation.where(primary_key => ids)
yielded_relation = yielded_relation.extending(BatchWithColumnsPreloaded).set_values(ids)
end
yielded_relation
end
conn.raw_connection.copy_data(full_query, decoder) do
while (row = conn.raw_connection.get_copy_data)
rows << row
if rows.size == of
yield build_relation.call
rows = []
end
end
end
# return the connection now, in case there was only 1 batch, we can avoid a separate connection if the block needs it
checkin.call
unless rows.empty?
yield build_relation.call
end
end
nil
ensure
# put the connection back in the pool for reuse
checkin&.call
end
# in some cases we're doing a lot of work inside
# the yielded block, and holding open a transaction
# or even a connection while we do all that work can
# be a problem for the database, especially if a lot
# of these are happening at once. This strategy
# makes one query to hold onto all the IDs needed for the
# iteration (make sure they'll fit in memory, or you could be sad)
# and yields the objects in batches in the same order as the scope specified
# so the DB connection can be fully recycled during each block.
def in_batches_with_pluck_ids(of: 1000, start: nil, finish: nil, load: false)
relation = apply_limits(self, start, finish)
all_object_ids = relation.pluck(:id)
current_order_values = order_values
all_object_ids.in_groups_of(of) do |id_batch|
object_batch = klass.unscoped.where(id: id_batch).order(current_order_values).preload(includes_values + preload_values)
yield object_batch
end
end
def in_batches_with_temp_table(of: 1000, start: nil, finish: nil, load: false, ignore_transaction: false)
Shard.current.database_server.unguard do
can_do_it = ignore_transaction ||
Rails.env.production? ||
ActiveRecord::Base.in_migration ||
GuardRail.environment == :deploy ||
(!Rails.env.test? && connection.open_transactions > 0) ||
ActiveRecord::Base.in_transaction_in_test?
unless can_do_it
raise ArgumentError, "in_batches with temp_table probably won't work outside a migration
and outside a transaction. Unfortunately, it's impossible to automatically
determine a better way to do it that will work correctly. You can try
switching to secondary first (then switching to primary if you modify anything
inside your loop), wrapping in a transaction (but be wary of locking records
for the duration of your query if you do any writes in your loop), or not
forcing in_batches to use a temp table (avoiding custom selects,
group, or order)."
end
relation = apply_limits(self, start, finish)
sql = relation.to_sql
table = "#{table_name}_in_batches_temp_table_#{sql.hash.abs.to_s(36)}"
table = table[-63..-1] if table.length > 63
remaining = connection.update("CREATE TEMPORARY TABLE #{table} AS #{sql}")
begin
return if remaining.zero?
if remaining > of
begin
old_proc = connection.raw_connection.set_notice_processor {}
index = if (select_values.empty? || select_values.any? { |v| v.to_s == primary_key.to_s }) && order_values.empty?
connection.execute(%{CREATE INDEX "temp_primary_key" ON #{connection.quote_local_table_name(table)}(#{connection.quote_column_name(primary_key)})})
primary_key.to_s
else
connection.execute "ALTER TABLE #{table} ADD temp_primary_key SERIAL PRIMARY KEY"
'temp_primary_key'
end
ensure
connection.raw_connection.set_notice_processor(&old_proc) if old_proc
end
end
klass.unscoped do
batch_relation = klass.from(table).select("*").limit(of).preload(includes_values + preload_values)
batch_relation = batch_relation.order(Arel.sql(connection.quote_column_name(index))) if index
yielded_relation = batch_relation
loop do
yield yielded_relation
remaining -= of
break if remaining <= 0
last_value = if yielded_relation.loaded?
yielded_relation.last[index]
else
yielded_relation.offset(of - 1).limit(1).pluck(index).first
end
break if last_value.nil?
yielded_relation = batch_relation.where("#{connection.quote_column_name(index)} > ?", last_value)
end
end
ensure
if !$!.is_a?(ActiveRecord::StatementInvalid) || connection.open_transactions == 0
connection.execute "DROP TABLE #{table}"
end
end
end
end
end
ActiveRecord::Relation.prepend(UsefulFindInBatches)
module UsefulBatchEnumerator
def initialize(strategy: nil, **kwargs)
@strategy = strategy
@kwargs = kwargs.except(:relation)
super(**kwargs.slice(:of, :start, :finish, :relation))
end
def each_record
return to_enum(:each_record) unless block_given?
@relation.to_enum(:in_batches, strategy: @strategy, load: true, **@kwargs).each do |relation|
relation.records.each { |record| yield record }
end
end
def delete_all
if @strategy.nil? && (strategy = @relation.infer_in_batches_strategy) == :id
sum = 0
loop do
current = @relation.limit(@of).delete_all
sum += current
break unless current == @of
end
return sum
end
@relation.in_batches(strategy: strategy, load: false, **@kwargs, &:delete_all)
end
def update_all(*args)
@relation.in_batches(strategy: @strategy, load: false, **@kwargs) do |relation|
relation.update_all(*args)
end
end
def destroy_all
@relation.in_batches(strategy: @strategy, load: true, **@kwargs, &:destroy_all)
end
def each
enum = @relation.to_enum(:in_batches, strategy: @strategy, load: true, **@kwargs)
return enum.each { |relation| yield relation } if block_given?
enum
end
def pluck(*args)
return to_enum(:pluck, *args) unless block_given?
@relation.except(:select)
.select(*args)
.in_batches(strategy: @strategy, load: false, **@kwargs) do |relation|
yield relation.pluck(*args)
end
end
end
ActiveRecord::Batches::BatchEnumerator.prepend(UsefulBatchEnumerator)
module BatchWithColumnsPreloaded
def set_values(values)
@loaded_values = values
self
end
def pluck(*args)
return @loaded_values if args == [primary_key.to_sym] && @loaded_values
super
end
end
module LockForNoKeyUpdate
def lock(lock_type = true)
lock_type = 'FOR NO KEY UPDATE' if lock_type == :no_key_update
super(lock_type)
end
end
ActiveRecord::Relation.prepend(LockForNoKeyUpdate)
ActiveRecord::Relation.class_eval do
def includes(*args)
return super if args.empty? || args == [nil]
raise "Use preload or eager_load instead of includes"
end
def where!(*args)
raise "where!.not doesn't work in Rails 4.2" if args.empty?
super
end
def uniq(*)
raise "use #distinct instead of #uniq on relations (Rails 5.1 will delegate uniq to to_a)"
end
def polymorphic_where(args)
raise ArgumentError unless args.length == 1
column = args.first.first
values = Array(args.first.last)
original_length = values.length
values = values.compact
raise ArgumentError, "need to call polymorphic_where with at least one object" if values.empty?
sql = (["(#{column}_id=? AND #{column}_type=?)"] * values.length).join(" OR ")
sql << " OR (#{column}_id IS NULL AND #{column}_type IS NULL)" if values.length < original_length
where(sql, *values.map { |value| [value, value.class.base_class.name] }.flatten)
end
def not_recently_touched
scope = self
if((personal_space = Setting.get('touch_personal_space', 0).to_i) != 0)
personal_space -= 1
# truncate to seconds
bound = Time.at(Time.now.to_i - personal_space).utc
scope = scope.where("#{connection.quote_local_table_name(table_name)}.updated_at<?", bound)
end
scope
end
def update_all_locked_in_order(updates)
locked_scope = lock(:no_key_update).order(primary_key.to_sym)
if Setting.get("update_all_locked_in_order_subquery", "true") == "true"
unscoped.where(primary_key => locked_scope).update_all(updates)
else
transaction do
ids = locked_scope.pluck(primary_key)
unscoped.where(primary_key => ids).update_all(updates) unless ids.empty?
end
end
end
def touch_all
self.activate do |relation|
relation.update_all_locked_in_order(updated_at: Time.now.utc)
end
end
def distinct_on(*args)
args.map! do |column_name|
if column_name.is_a?(Symbol) && column_names.include?(column_name.to_s)
"#{connection.quote_local_table_name(table_name)}.#{connection.quote_column_name(column_name)}"
else
column_name.to_s
end
end
relation = clone
old_select = relation.select_values
relation.select_values = [+"DISTINCT ON (#{args.join(', ')}) "]
relation.distinct_value = false
relation.select_values.first << (old_select.empty? ? "*" : old_select.uniq.join(', '))
relation
end
# if this sql is constructed on one shard then executed on another it wont work
# dont use it for cross shard queries
def union(*scopes)
uniq_identifier = "#{table_name}.#{primary_key}"
scopes << self
sub_query = (scopes).map {|s| s.except(:select, :order).select(uniq_identifier).to_sql}.join(" UNION ALL ")
unscoped.where("#{uniq_identifier} IN (#{sub_query})")
end
# returns batch_size ids at a time, working through the primary key from
# smallest to largest.
#
# note this does a raw connection.select_values, so it doesn't work with scopes
def find_ids_in_batches(options = {})
batch_size = options[:batch_size] || 1000
key = "#{quoted_table_name}.#{primary_key}"
scope = except(:select).select(key).reorder(Arel.sql(key)).limit(batch_size)
ids = connection.select_values(scope.to_sql)
ids = ids.map(&:to_i) unless options[:no_integer_cast]
while ids.present?
yield ids
break if ids.size < batch_size
last_value = ids.last
ids = connection.select_values(scope.where("#{key}>?", last_value).to_sql)
ids = ids.map(&:to_i) unless options[:no_integer_cast]
end
end
# returns 2 ids at a time (the min and the max of a range), working through
# the primary key from smallest to largest.
def find_ids_in_ranges(options = {})
is_integer = columns_hash[primary_key.to_s].type == :integer
loose_mode = options[:loose] && is_integer
# loose_mode: if we don't care about getting exactly batch_size ids in between
# don't get the max - just get the min and add batch_size so we get that many _at most_
values = loose_mode ? "MIN(id)" : "MIN(id), MAX(id)"
batch_size = options[:batch_size].try(:to_i) || 1000
quoted_primary_key = "#{klass.connection.quote_local_table_name(table_name)}.#{klass.connection.quote_column_name(primary_key)}"
as_id = " AS id" unless primary_key == 'id'
subquery_scope = except(:select).select("#{quoted_primary_key}#{as_id}").reorder(primary_key.to_sym).limit(loose_mode ? 1 : batch_size)
subquery_scope = subquery_scope.where("#{quoted_primary_key} <= ?", options[:end_at]) if options[:end_at]
first_subquery_scope = options[:start_at] ? subquery_scope.where("#{quoted_primary_key} >= ?", options[:start_at]) : subquery_scope
ids = connection.select_rows("SELECT #{values} FROM (#{first_subquery_scope.to_sql}) AS subquery").first
while ids.first.present?
ids.map!(&:to_i) if is_integer
ids << ids.first + batch_size if loose_mode
yield(*ids)
last_value = ids.last
next_subquery_scope = subquery_scope.where(["#{quoted_primary_key}>?", last_value])
ids = connection.select_rows("SELECT #{values} FROM (#{next_subquery_scope.to_sql}) AS subquery").first
end
end
end
module UpdateAndDeleteWithJoins
def deconstruct_joins(joins_sql=nil)
unless joins_sql
joins_sql = ''
add_joins!(joins_sql, nil)
end
tables = []
join_conditions = []
joins_sql.strip.split('INNER JOIN')[1..-1].each do |join|
# this could probably be improved
raise "PostgreSQL update_all/delete_all only supports INNER JOIN" unless join.strip =~ /([a-zA-Z0-9'"_\.]+(?:(?:\s+[aA][sS])?\s+[a-zA-Z0-9'"_]+)?)\s+ON\s+(.*)/m
tables << $1
join_conditions << $2
end
[tables, join_conditions]
end
def update_all(updates, *args)
db = Shard.current(klass.shard_category).database_server
if joins_values.empty?
if ::GuardRail.environment != db.guard_rail_environment
Shard.current.database_server.unguard {return super }
else
return super
end
end
stmt = Arel::UpdateManager.new
stmt.set Arel.sql(@klass.send(:sanitize_sql_for_assignment, updates))
from = from_clause.value
stmt.table(from ? Arel::Nodes::SqlLiteral.new(from) : table)
stmt.key = table[primary_key]
sql = stmt.to_sql
collector = connection.send(:collector)
arel.join_sources.each do |node|
connection.visitor.accept(node, collector)
end
join_sql = collector.value
tables, join_conditions = deconstruct_joins(join_sql)
unless tables.empty?
sql.concat(' FROM ')
sql.concat(tables.join(', '))
sql.concat(' ')
end
scope = self
join_conditions.each { |join| scope = scope.where(join) }
# skip any binds that are used in the join
collector = connection.send(:collector)
scope.arel.constraints.each do |node|
connection.visitor.accept(node, collector)
end
where_sql = collector.value
sql.concat('WHERE ' + where_sql)
if ::GuardRail.environment != db.guard_rail_environment
Shard.current.database_server.unguard {connection.update(sql, "#{name} Update")}
else
connection.update(sql, "#{name} Update")
end
end
def delete_all
return super if joins_values.empty?
sql = +"DELETE FROM #{quoted_table_name} "
join_sql = arel.join_sources.map(&:to_sql).join(" ")
tables, join_conditions = deconstruct_joins(join_sql)
sql.concat('USING ')
sql.concat(tables.join(', '))
sql.concat(' ')
scope = self
join_conditions.each { |join| scope = scope.where(join) }
collector = connection.send(:collector)
scope.arel.constraints.each do |node|
connection.visitor.accept(node, collector)
end
where_sql = collector.value
sql.concat('WHERE ' + where_sql)
connection.delete(sql, "SQL", [])
end
end
ActiveRecord::Relation.prepend(UpdateAndDeleteWithJoins)
module UpdateAndDeleteAllWithLimit
def delete_all(*args)
if limit_value || offset_value
scope = except(:select).select(primary_key)
return unscoped.where(primary_key => scope).delete_all
end
super
end
def update_all(updates, *args)
if limit_value || offset_value
scope = except(:select).select(primary_key)
return unscoped.where(primary_key => scope).update_all(updates)
end
super
end
end
ActiveRecord::Relation.prepend(UpdateAndDeleteAllWithLimit)
ActiveRecord::Associations::CollectionProxy.class_eval do
def respond_to?(name, include_private = false)
return super if [:marshal_dump, :_dump, 'marshal_dump', '_dump'].include?(name)
super ||
(load_target && target.respond_to?(name, include_private)) ||
proxy_association.klass.respond_to?(name, include_private)
end
def temp_record(*args)
# creates a record with attributes like a child record but is not added to the collection for autosaving
record = klass.unscoped.merge(scope).new(*args)
@association.set_inverse_instance(record)
record
end
def uniq(*args)
raise "use #distinct instead of #uniq on relations (Rails 5.1 will delegate uniq to to_a)"
end
end
ActiveRecord::ConnectionAdapters::AbstractAdapter.class_eval do
def bulk_insert(table_name, records)
keys = records.first.keys
quoted_keys = keys.map{ |k| quote_column_name(k) }.join(', ')
records.each do |record|
execute <<~SQL
INSERT INTO #{quote_table_name(table_name)}
(#{quoted_keys})
VALUES
(#{keys.map{ |k| quote(record[k]) }.join(', ')})
SQL
end
end
end
class ActiveRecord::ConnectionAdapters::AbstractAdapter
# for functions that differ from one adapter to the next, use the following
# method (overriding as needed in non-standard adapters), e.g.
#
# connection.func(:group_concat, :name, '|') ->
# group_concat(name, '|') (default)
# group_concat(name SEPARATOR '|') (mysql)
# string_agg(name::text, '|') (postgres)
def func(name, *args)
"#{name}(#{args.map{ |arg| func_arg_esc(arg) }.join(', ')})"
end
def func_arg_esc(arg)
arg.is_a?(Symbol) ? arg : quote(arg)
end
def group_by(*columns)
# the first item should be the primary key(s) that the other columns are
# functionally dependent on. alternatively, it can be a class, and all
# columns will be inferred from it. this is useful for cases where you want
# to select all columns from one table, and an aggregate from another.
Array(infer_group_by_columns(columns).first).join(", ")
end
def infer_group_by_columns(columns)
columns.map { |col|
col.respond_to?(:columns) ?
col.columns.map { |c|
"#{col.quoted_table_name}.#{quote_column_name(c.name)}"
} :
col
}
end
end
ActiveRecord::Associations::HasOneAssociation.class_eval do
def create_scope
scope = self.scope.scope_for_create.stringify_keys
scope = scope.except(klass.primary_key) unless klass.primary_key.to_s == reflection.foreign_key.to_s
scope
end
end
class ActiveRecord::Migration
# at least one of these tags is required
DEPLOY_TAGS = [:predeploy, :postdeploy]
class << self
def is_postgres?
connection.adapter_name == 'PostgreSQL'
end
def has_postgres_proc?(procname)
connection.select_value("SELECT COUNT(*) FROM pg_proc WHERE proname='#{procname}'").to_i != 0
end
end
def connection
if self.class.respond_to?(:connection)
return self.class.connection
else
@connection || ActiveRecord::Base.connection
end
end
def tags
self.class.tags
end
end
class ActiveRecord::MigrationProxy
delegate :connection, :cassandra_cluster, to: :migration
def initialize(*)
super
if version&.to_s&.length == 14 && version.to_s > Time.now.utc.strftime("%Y%m%d%H%M%S")
raise "please don't create migrations with a version number in the future: #{name} #{version}"
end
end
def runnable?
!migration.respond_to?(:runnable?) || migration.runnable?
end
def load_migration
load(filename)
@migration = name.constantize
raise "#{self.name} (#{self.version}) is not tagged as exactly one of predeploy or postdeploy!" unless (@migration.tags & ActiveRecord::Migration::DEPLOY_TAGS).length == 1
@migration
end
end
module MigratorCache
def migrations(paths)
@@migrations_hash ||= {}
@@migrations_hash[paths] ||= super
end
def migrations_paths
@@migrations_paths ||= [File.join(Rails.root, "db/migrate")]
end
end
ActiveRecord::Migrator.singleton_class.prepend(MigratorCache)
module Migrator
def skipped_migrations
pending_migrations(call_super: true).reject(&:runnable?)
end
def pending_migrations(call_super: false)
return super() if call_super
super().select(&:runnable?)
end
def runnable
super.select(&:runnable?)
end
def execute_migration_in_transaction(migration, direct)
old_in_migration, ActiveRecord::Base.in_migration = ActiveRecord::Base.in_migration, true
if defined?(Marginalia)
old_migration_name, Marginalia::Comment.migration = Marginalia::Comment.migration, migration.name
end
if down? && !Rails.env.test? && !$confirmed_migrate_down
require 'highline'
if HighLine.new.ask("Revert migration #{migration.name} (#{migration.version}) ? [y/N/a] > ") !~ /^([ya])/i
raise("Revert not confirmed")
end
$confirmed_migrate_down = true if $1.downcase == 'a'
end
super
ensure
ActiveRecord::Base.in_migration = old_in_migration
Marginalia::Comment.migration = old_migration_name if defined?(Marginalia)
end
end
ActiveRecord::Migrator.prepend(Migrator)
ActiveRecord::Migrator.migrations_paths.concat Dir[Rails.root.join('gems', 'plugins', '*', 'db', 'migrate')]
ActiveRecord::Tasks::DatabaseTasks.migrations_paths = ActiveRecord::Migrator.migrations_paths
ActiveRecord::ConnectionAdapters::SchemaStatements.class_eval do
def find_foreign_key(from_table, to_table, column: nil)
column ||= "#{to_table.to_s.singularize}_id"
foreign_keys(from_table).find do |key|
key.to_table == to_table.to_s && key.column == column.to_s
end&.name
end
def alter_constraint(table, constraint, new_name: nil, deferrable: nil)
raise ArgumentError, "must specify deferrable or a new name" if new_name.nil? && deferrable.nil?
# can't rename and alter options in the same statement, so do the rename first
if new_name && new_name != constraint
execute("ALTER TABLE #{quote_table_name(table)}
RENAME CONSTRAINT #{quote_column_name(constraint)} TO #{quote_column_name(new_name)}")
constraint = new_name
end
unless deferrable.nil?
options = deferrable ? "DEFERRABLE" : "NOT DEFERRABLE"
execute("ALTER TABLE #{quote_table_name(table)}
ALTER CONSTRAINT #{quote_column_name(constraint)} #{options}")
end
end
def foreign_key_for(from_table, **options)
return unless supports_foreign_keys?
fks = foreign_keys(from_table).select { |fk| fk.defined_for?(options) }
# prefer a FK on a column named after the table
if options[:to_table]
column = foreign_key_column_for(options[:to_table])
return fks.find { |fk| fk.column == column } || fks.first
end
fks.first
end
def remove_foreign_key(from_table, to_table = nil, **options)
return unless supports_foreign_keys?
if options.delete(:if_exists)
fk_name_to_delete = foreign_key_for(from_table, to_table: to_table, **options)&.name
return if fk_name_to_delete.nil?
else
fk_name_to_delete = foreign_key_for!(from_table, to_table: to_table, **options).name
end
at = create_alter_table from_table
at.drop_foreign_key fk_name_to_delete
execute schema_creation.accept(at)
end
def add_replica_identity(model_name, column_name, default_value)
klass = model_name.constantize
if columns(klass.table_name).find { |c| c.name == column_name.to_s }.null
DataFixup::BackfillNulls.run(klass, column_name, default_value: default_value)
end
change_column_null klass.table_name, column_name, false
primary_column = klass.primary_key
index_name = "index_#{klass.table_name}_replica_identity"
add_index klass.table_name, [column_name, primary_column], name: index_name, algorithm: :concurrently, unique: true, if_not_exists: true
set_replica_identity klass.table_name, index_name
end
def remove_replica_identity(model_name)
klass = model_name.constantize
set_replica_identity klass.table_name, :default
remove_index klass.table_name, name: "index_#{klass.table_name}_replica_identity", if_exists: true
end
end
# yes, various versions of rails supports various if_exists/if_not_exists options,
# but _none_ of them (as of writing) will invert them on reversion. Some will
# purposely strip the option, but most don't do anything.
module ExistenceInversions
%w{index foreign_key column}.each do |type|
# these methods purposely pull the flag from the incoming args,
# and assign to the outgoing args, not relying on it getting
# passed through. and sometimes they even modify args.
class_eval <<-RUBY, __FILE__, __LINE__ + 1
def invert_add_#{type}(args)
orig_args = args.dup
result = super
if orig_args.last.is_a?(Hash) && orig_args.last[:if_not_exists]
result[1] << {} unless result[1].last.is_a?(Hash)
result[1].last[:if_exists] = orig_args.last[:if_not_exists]
result[1].last.delete(:if_not_exists)
end
result
end
def invert_remove_#{type}(args)
orig_args = args.dup
result = super
if orig_args.last.is_a?(Hash) && orig_args.last[:if_exists]
result[1] << {} unless result[1].last.is_a?(Hash)
result[1].last[:if_not_exists] = orig_args.last[:if_exists]
result[1].last.delete(:if_exists)
end
result
end
RUBY
end
end
ActiveRecord::Migration::CommandRecorder.prepend(ExistenceInversions)
ActiveRecord::Associations::CollectionAssociation.class_eval do
# CollectionAssociation implements uniq for :uniq option, in its
# own special way. re-implement, but as a relation
def distinct
scope.distinct
end
end
if CANVAS_RAILS6_0
module UnscopeCallbacks
def run_callbacks(kind)
# no callbacks, or it's the Switchman initialize callback that is safe for scoping; avoid
# all the object allocations of creating a new scope
return super if __callbacks[kind].empty? || kind == :initialize && __callbacks[kind].send(:chain).length == 1
# in rails 6.1, we can get rid of this entire monkeypatch
scope = self.class.current_scope&.clone || self.class.default_scoped
scope = scope.klass.unscoped
scope.scoping { super }
end
end
ActiveRecord::Base.send(:include, UnscopeCallbacks)
end
module MatchWithDiscard
def match(model, name)
result = super
return nil if result && !result.is_a?(ActiveRecord::DynamicMatchers::FindBy)
result
end
end
ActiveRecord::DynamicMatchers::Method.singleton_class.prepend(MatchWithDiscard)
# see https://github.com/rails/rails/issues/18659
class AttributesDefiner
# defines attribute methods when loaded through Marshal
def initialize(klass)
@klass = klass
end
def marshal_dump
@klass
end
def marshal_load(klass)
klass.define_attribute_methods
@klass = klass
end
end
module DefineAttributeMethods
def init_internals
@define_attributes_helper = AttributesDefiner.new(self.class)
super
end
end
ActiveRecord::Base.include(DefineAttributeMethods)
module SkipTouchCallbacks
module Base
def skip_touch_callbacks(name)
@skip_touch_callbacks ||= Set.new
if @skip_touch_callbacks.include?(name)
yield
else
@skip_touch_callbacks << name
yield
@skip_touch_callbacks.delete(name)
end
end
def touch_callbacks_skipped?(name)
(@skip_touch_callbacks && @skip_touch_callbacks.include?(name)) ||
(self.superclass < ActiveRecord::Base && self.superclass.touch_callbacks_skipped?(name))
end
end
module BelongsTo
def touch_record(o, _changes, _foreign_key, name, *)
return if o.class.touch_callbacks_skipped?(name)
super
end
end
end
ActiveRecord::Base.singleton_class.include(SkipTouchCallbacks::Base)
ActiveRecord::Associations::Builder::BelongsTo.singleton_class.prepend(SkipTouchCallbacks::BelongsTo)
module ReadonlyCloning
def calculate_changes_from_defaults
if @readonly_clone
@changed_attributes = @changed_attributes.dup if @changed_attributes # otherwise changes to the clone will dirty the original
else
super # no reason to do this if we're creating a readonly clone - can take a long time with serialized columns
end
end
end
ActiveRecord::Base.prepend(ReadonlyCloning)
module DupArraysInMutationTracker
# setting a serialized attribute to an array of hashes shouldn't change all the hashes to indifferent access
# when the array gets stored in the indifferent access hash inside the mutation tracker
# not that it really matters too much but having some consistency is nice
def change_to_attribute(*args)
change = super
if change
val = change[1]
change[1] = val.dup if val.is_a?(Array)
end
change
end
end
ActiveModel::AttributeMutationTracker.prepend(DupArraysInMutationTracker)
module IgnoreOutOfSequenceMigrationDates
def current_migration_number(dirname)
migration_lookup_at(dirname).map do |file|
digits = File.basename(file).split("_").first
next if ActiveRecord::Base.timestamped_migrations && digits.length != 14
digits.to_i
end.compact.max.to_i
end
end
# Thor doesn't call `super` in its `inherited` method, so hook in so that we can hook in later :)
Thor::Group.singleton_class.prepend(Autoextend::ClassMethods)
Autoextend.hook(:"ActiveRecord::Generators::MigrationGenerator",
IgnoreOutOfSequenceMigrationDates,
singleton: true,
method: :prepend,
optional: true)
module AlwaysUseMigrationDates
def next_migration_number(number)
if ActiveRecord::Base.timestamped_migrations
Time.now.utc.strftime("%Y%m%d%H%M%S")
else
SchemaMigration.normalize_migration_number(number)
end
end
end
ActiveRecord::Migration.prepend(AlwaysUseMigrationDates)
module ExplainAnalyze
def exec_explain(queries, analyze: false) # :nodoc:
str = queries.map do |sql, binds|
msg = "EXPLAIN #{"ANALYZE " if analyze}for: #{sql}"
unless binds.empty?
msg << " "
msg << binds.map { |attr| render_bind(attr) }.inspect
end
msg << "\n"
msg << connection.explain(sql, binds, analyze: analyze)
end.join("\n")
# Overriding inspect to be more human readable, especially in the console.
def str.inspect
self
end
str
end
def explain(analyze: false)
#TODO: Fix for binds.
exec_explain(collecting_queries_for_explain do
if block_given?
yield
else
# fold in switchman's override
self.activate { |relation| relation.send(:exec_queries) }
end
end, analyze: analyze)
end
end
ActiveRecord::Relation.prepend(ExplainAnalyze)
# fake Rails into grabbing correct column information for a table rename in-progress
module TableRename
RENAMES = { 'authentication_providers' => 'account_authorization_configs' }.freeze
def columns(table_name)
if (old_name = RENAMES[table_name])
table_name = old_name if data_source_exists?(old_name)
end
super
end
end
module DefeatInspectionFilterMarshalling
def inspect
result = super
@inspection_filter = nil
result
end
def pretty_print(_pp)
super
@inspection_filter = nil
end
end
ActiveRecord::ConnectionAdapters::SchemaCache.prepend(TableRename)
ActiveRecord::Base.prepend(DefeatInspectionFilterMarshalling)
ActiveRecord::Base.prepend(ActiveRecord::CacheRegister::Base)
ActiveRecord::Base.singleton_class.prepend(ActiveRecord::CacheRegister::Base::ClassMethods)
ActiveRecord::Relation.prepend(ActiveRecord::CacheRegister::Relation)
# see https://github.com/rails/rails/issues/37745
module DontExplicitlyNameColumnsBecauseOfIgnores
def build_select(arel)
if select_values.any?
arel.project(*arel_columns(select_values.uniq))
elsif !from_clause.value && klass.ignored_columns.any? && !(klass.ignored_columns & klass.column_names).empty?
arel.project(*klass.column_names.map { |field| arel_attribute(field) })
else
arel.project(table[Arel.star])
end
end
end
ActiveRecord::Relation.prepend(DontExplicitlyNameColumnsBecauseOfIgnores)
module PreserveShardAfterTransaction
def after_transaction_commit(&block)
shards = Shard.send(:active_shards)
shards[:delayed_jobs] = Shard.current.delayed_jobs_shard if ::ActiveRecord::Migration.open_migrations.positive?
super { Shard.activate(shards, &block) }
end
end
ActiveRecord::ConnectionAdapters::Transaction.prepend(PreserveShardAfterTransaction)
module ConnectionWithMaxRuntime
def initialize(*)
super
@created_at = Concurrent.monotonic_time
end
def runtime
Concurrent.monotonic_time - @created_at
end
end
ActiveRecord::ConnectionAdapters::AbstractAdapter.prepend(ConnectionWithMaxRuntime)
module RestoreConnectionConnectionPool
def restore_connection(conn)
synchronize do
adopt_connection(conn)
# check if a new connection was checked out in the meantime, and check it back in
if (old_conn = @thread_cached_conns[connection_cache_key(current_thread)]) && old_conn != conn
# this is just the necessary parts of #checkin
old_conn.lock.synchronize do
old_conn._run_checkin_callbacks do
old_conn.expire
end
@available.add old_conn
end
end
@thread_cached_conns[connection_cache_key(current_thread)] = conn
end
end
end
ActiveRecord::ConnectionAdapters::ConnectionPool.prepend(RestoreConnectionConnectionPool)
module MaxRuntimeConnectionPool
def max_runtime
# TODO: Rails 6.1 uses a PoolConfig object instead
if CANVAS_RAILS6_0
@spec.config[:max_runtime]
else
db_config.configuration_hash[:max_runtime]
end
end
def acquire_connection(*)
loop do
conn = super
return conn unless max_runtime && conn.runtime >= max_runtime
@connections.delete(conn)
conn.disconnect!
end
end
def checkin(conn)
return super unless max_runtime && conn.runtime >= max_runtime
conn.lock.synchronize do
synchronize do
remove_connection_from_thread_cache conn
@connections.delete(conn)
conn.disconnect!
end
end
end
def flush(*)
super
return unless max_runtime
old_connections = synchronize do
# TODO: Rails 6.1 adds a `discarded?` method instead of checking this directly
return unless @connections
@connections.select do |conn|
!conn.in_use? && conn.runtime >= max_runtime
end.each do |conn|
conn.lease
@available.delete conn
@connections.delete conn
end
end
old_connections.each(&:disconnect!)
end
end
ActiveRecord::ConnectionAdapters::ConnectionPool.prepend(MaxRuntimeConnectionPool)
Rails.application.config.after_initialize do
ActiveSupport.on_load(:active_record) do
cache = MultiCache.fetch("schema_cache")
next if cache.nil?
connection_pool.set_schema_cache(cache)
LoadAccount.schema_cache_loaded!
end
end
|
Ensure nodes loaded at app boot
# Ensure all models are loaded at startup
# This ensures that any children of Node are loaded so that we
# know all of the Node types at runtime
# FIXME: Put nodes in a subdir so we can load just them
Dir.glob('./app/models/*.rb') { |f| require f }
|
Pony.options = {
:via => :smtp,
:via_options => {
:address => 'smtp.sendgrid.net',
:port => '587',
:domain => 'heroku.com',
:user_name => ENV['SENDGRID_USERNAME'],
:password => ENV['SENDGRID_PASSWORD'],
:authentication => :plain,
:enable_starttls_auto => true
}
}
Only try to configure sendgrid if the sendgrid environment variables are present
if ENV['SENDGRID_USERNAME'].present? &&
ENV['SENDGRID_PASSWORD'].present?
Pony.options = {
:via => :smtp,
:via_options => {
:address => 'smtp.sendgrid.net',
:port => '587',
:domain => 'heroku.com',
:user_name => ENV['SENDGRID_USERNAME'],
:password => ENV['SENDGRID_PASSWORD'],
:authentication => :plain,
:enable_starttls_auto => true
}
}
end |
# Be sure to restart your server when you modify this file.
RacingOnRails::Application.config.session_store :cookie_store, :key => '_racing_on_rails_session'
# Use the database for sessions instead of the cookie-based default,
# which shouldn't be used to store highly confidential information
# (create the session table with "rails generate session_migration")
# RacingOnRails::Application.config.session_store :active_record_store
Ditch comments
RacingOnRails::Application.config.session_store :cookie_store, :key => '_racing_on_rails_session'
|
# Be sure to restart your server when you modify this file.
# Your secret key for verifying cookie session data integrity.
# If you change this key, all old sessions will become invalid!
# Make sure the secret is at least 30 characters and all random,
# no regular words or you'll be exposed to dictionary attacks.
ActionController::Base.session = {
:key => '_rails_authlogic_rpx_sample_session',
:secret => '7c1b646c73c6cbe9fb7f21f7b4c66ddb832265d3b54afeda45e5356464e266f1d45c5dc059feacadec5fd4a575e35544f44cba53b5e9a5e055d8825315790521'
}
# Use the database for sessions instead of the cookie-based default,
# which shouldn't be used to store highly confidential information
# (create the session table with "rake db:sessions:create")
# ActionController::Base.session_store = :active_record_store
differentiate session key for rails2 version of the sample application
# Be sure to restart your server when you modify this file.
# Your secret key for verifying cookie session data integrity.
# If you change this key, all old sessions will become invalid!
# Make sure the secret is at least 30 characters and all random,
# no regular words or you'll be exposed to dictionary attacks.
ActionController::Base.session = {
:key => '_rails2_authlogic_rpx_sample_session',
:secret => '8c1b646c73c6cbe9fb7f21f7b4c66ddb832265d3b54afeda45e5356464e266f1d45c5dc059feacadec5fd4a575e35544f44cba53b5e9a5e055d8825315790521'
}
# Use the database for sessions instead of the cookie-based default,
# which shouldn't be used to store highly confidential information
# (create the session table with "rake db:sessions:create")
# ActionController::Base.session_store = :active_record_store
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.