CombinedText stringlengths 4 3.42M |
|---|
# Be sure to restart your server when you modify this file
# Uncomment below to force Rails into production mode when
# you don't control web/app server and can't set it the proper way
# ENV['RAILS_ENV'] ||= 'production'
# Specifies gem version of Rails to use when vendor/rails is not present
require File.join(File.dirname(__FILE__), 'boot')
require 'radius'
Radiant::Initializer.run do |config|
# Settings in config/environments/* take precedence over those specified here.
# Application configuration should go into files in config/initializers
# -- all .rb files in that directory are automatically loaded.
# See Rails::Configuration for more options.
# Skip frameworks you're not going to use (only works if using vendor/rails).
# To use Rails without a database, you must remove the Active Record framework
config.frameworks -= [ :action_mailer ]
# Only load the plugins named here, in the order given. By default, all plugins
# in vendor/plugins are loaded in alphabetical order.
# :all can be used as a placeholder for all plugins not explicitly named
# config.plugins = [ :exception_notification, :ssl_requirement, :all ]
# Only load the extensions named here, in the order given. By default all
# extensions in vendor/extensions are loaded, in alphabetical order. :all
# can be used as a placeholder for all extensions not explicitly named.
# config.extensions = [ :all ]
# Force all environments to use the same logger level
# (by default production uses :info, the others :debug)
# config.log_level = :debug
# Your secret key for verifying cookie session data integrity.
# If you change this key, all old sessions will become invalid!
# Make sure the secret is at least 30 characters and all random,
# no regular words or you'll be exposed to dictionary attacks.
config.action_controller.session = {
:session_key => '_radiant_session',
:secret => 'asdfqwerfxcoivswqenadfasdfqewpfioutyqwel'
}
# Use the database for sessions instead of the cookie-based default,
# which shouldn't be used to store highly confidential information
# (create the session table with 'rake db:sessions:create')
config.action_controller.session_store = :cookie_store
# Use SQL instead of Active Record's schema dumper when creating the test database.
# This is necessary if your schema can't be completely dumped by the schema dumper,
# like if you have constraints or database-specific column types
# config.active_record.schema_format = :sql
# Enable page/fragment caching by setting a file-based store
# (remember to create the caching directory and make it readable to the application)
# config.action_controller.fragment_cache_store = :file_store, "#{RAILS_ROOT}/fragment_cache"
config.action_controller.page_cache_directory = "#{RAILS_ROOT}/cache"
# Activate observers that should always be running
config.active_record.observers = :user_action_observer
# Make Active Record use UTC-base instead of local time
config.active_record.default_timezone = :utc
# config.time_zone = 'UTC'
# Set the default field error proc
config.action_view.field_error_proc = Proc.new do |html, instance|
if html !~ /label/
%{<div class="error-with-field">#{html} <small class="error">• #{[instance.error_message].flatten.first}</small></div>}
else
html
end
end
config.after_initialize do
# Add new inflection rules using the following format:
ActiveSupport::Inflector.inflections do |inflect|
inflect.uncountable 'config'
end
# Auto-require text filters
Dir["#{RADIANT_ROOT}/app/models/*_filter.rb"].each do |filter|
require_dependency File.basename(filter).sub(/\.rb$/, '')
end
# Response Caching Defaults
ResponseCache.defaults[:directory] = ActionController::Base.page_cache_directory
ResponseCache.defaults[:logger] = ActionController::Base.logger
end
end
Radiant::Config['mailer.post_to_page?'] = true
Enable action mailer.
# Be sure to restart your server when you modify this file
# Uncomment below to force Rails into production mode when
# you don't control web/app server and can't set it the proper way
# ENV['RAILS_ENV'] ||= 'production'
# Specifies gem version of Rails to use when vendor/rails is not present
require File.join(File.dirname(__FILE__), 'boot')
require 'radius'
Radiant::Initializer.run do |config|
# Settings in config/environments/* take precedence over those specified here.
# Application configuration should go into files in config/initializers
# -- all .rb files in that directory are automatically loaded.
# See Rails::Configuration for more options.
# Skip frameworks you're not going to use (only works if using vendor/rails).
# To use Rails without a database, you must remove the Active Record framework
#config.frameworks -= [ :action_mailer ]
# Only load the plugins named here, in the order given. By default, all plugins
# in vendor/plugins are loaded in alphabetical order.
# :all can be used as a placeholder for all plugins not explicitly named
# config.plugins = [ :exception_notification, :ssl_requirement, :all ]
# Only load the extensions named here, in the order given. By default all
# extensions in vendor/extensions are loaded, in alphabetical order. :all
# can be used as a placeholder for all extensions not explicitly named.
# config.extensions = [ :all ]
# Force all environments to use the same logger level
# (by default production uses :info, the others :debug)
# config.log_level = :debug
# Your secret key for verifying cookie session data integrity.
# If you change this key, all old sessions will become invalid!
# Make sure the secret is at least 30 characters and all random,
# no regular words or you'll be exposed to dictionary attacks.
config.action_controller.session = {
:session_key => '_radiant_session',
:secret => 'asdfqwerfxcoivswqenadfasdfqewpfioutyqwel'
}
# Use the database for sessions instead of the cookie-based default,
# which shouldn't be used to store highly confidential information
# (create the session table with 'rake db:sessions:create')
config.action_controller.session_store = :cookie_store
# Use SQL instead of Active Record's schema dumper when creating the test database.
# This is necessary if your schema can't be completely dumped by the schema dumper,
# like if you have constraints or database-specific column types
# config.active_record.schema_format = :sql
# Enable page/fragment caching by setting a file-based store
# (remember to create the caching directory and make it readable to the application)
# config.action_controller.fragment_cache_store = :file_store, "#{RAILS_ROOT}/fragment_cache"
config.action_controller.page_cache_directory = "#{RAILS_ROOT}/cache"
# Activate observers that should always be running
config.active_record.observers = :user_action_observer
# Make Active Record use UTC-base instead of local time
config.active_record.default_timezone = :utc
# config.time_zone = 'UTC'
# Set the default field error proc
config.action_view.field_error_proc = Proc.new do |html, instance|
if html !~ /label/
%{<div class="error-with-field">#{html} <small class="error">• #{[instance.error_message].flatten.first}</small></div>}
else
html
end
end
config.after_initialize do
# Add new inflection rules using the following format:
ActiveSupport::Inflector.inflections do |inflect|
inflect.uncountable 'config'
end
# Auto-require text filters
Dir["#{RADIANT_ROOT}/app/models/*_filter.rb"].each do |filter|
require_dependency File.basename(filter).sub(/\.rb$/, '')
end
# Response Caching Defaults
ResponseCache.defaults[:directory] = ActionController::Base.page_cache_directory
ResponseCache.defaults[:logger] = ActionController::Base.logger
end
end
Radiant::Config['mailer.post_to_page?'] = true |
# Be sure to restart your server when you modify this file
# Uncomment below to force Rails into production mode when
# you don't control web/app server and can't set it the proper way
# ENV['RAILS_ENV'] ||= 'production'
# Specifies gem version of Rails to use when vendor/rails is not present
# RAILS_GEM_VERSION = '2.0.2' unless defined? RAILS_GEM_VERSION
# Bootstrap the Rails environment, frameworks, and default configuration
require File.join(File.dirname(__FILE__), 'boot')
if Gem::VERSION >= "1.3.6"
module Rails
class GemDependency
def requirement
r = super
(r == Gem::Requirement.default) ? nil : r
end
end
end
end
Rails::Initializer.run do |config|
# Settings in config/environments/* take precedence over those specified here.
# Application configuration should go into files in config/initializers
# -- all .rb files in that directory are automatically loaded.
# See Rails::Configuration for more options.
# Skip frameworks you're not going to use (only works if using vendor/rails).
# To use Rails without a database, you must remove the Active Record framework
# config.frameworks -= [ :active_record, :active_resource, :action_mailer ]
# Only load the plugins named here, in the order given. By default, all plugins
# in vendor/plugins are loaded in alphabetical order.
# :all can be used as a placeholder for all plugins not explicitly named
# config.plugins = [ :exception_notification, :ssl_requirement, :all ]
# Add additional load paths for your own custom dirs
# config.load_paths += %W( #{RAILS_ROOT}/extras )
# Force all environments to use the same logger level
# (by default production uses :info, the others :debug)
# config.log_level = :debug
# Your secret key for verifying cookie session data integrity.
# If you change this key, all old sessions will become invalid!
# Make sure the secret is at least 30 characters and all random,
# no regular words or you'll be exposed to dictionary attacks.
config.action_controller.session = {
:session_key => '_lexicon_session',
:secret => '4763b5d1c3ae629e268f1e6098ae8ce4be0606c93c7e8fcdd94d9e0b6021bf343c0dc0ae0d7f00a8e11316bc1599ef1f2333f9d5050291f174c0e432736faac0'
}
# Use the database for sessions instead of the cookie-based default,
# which shouldn't be used to store highly confidential information
# (create the session table with 'rake db:sessions:create')
# config.action_controller.session_store = :active_record_store
# Use SQL instead of Active Record's schema dumper when creating the test database.
# This is necessary if your schema can't be completely dumped by the schema dumper,
# like if you have constraints or database-specific column types
# config.active_record.schema_format = :sql
# Activate observers that should always be running
# config.active_record.observers = :cacher, :garbage_collector
# Make Active Record use UTC-base instead of local time
# config.active_record.default_timezone = :utc
config.gem 'mislav-will_paginate', :version => '~> 2.3.8', :lib => 'will_paginate',
:source => 'http://gems.github.com'
end
deprecations - :session_key -> :key
# Be sure to restart your server when you modify this file
# Uncomment below to force Rails into production mode when
# you don't control web/app server and can't set it the proper way
# ENV['RAILS_ENV'] ||= 'production'
# Specifies gem version of Rails to use when vendor/rails is not present
# RAILS_GEM_VERSION = '2.0.2' unless defined? RAILS_GEM_VERSION
# Bootstrap the Rails environment, frameworks, and default configuration
require File.join(File.dirname(__FILE__), 'boot')
if Gem::VERSION >= "1.3.6"
module Rails
class GemDependency
def requirement
r = super
(r == Gem::Requirement.default) ? nil : r
end
end
end
end
Rails::Initializer.run do |config|
# Settings in config/environments/* take precedence over those specified here.
# Application configuration should go into files in config/initializers
# -- all .rb files in that directory are automatically loaded.
# See Rails::Configuration for more options.
# Skip frameworks you're not going to use (only works if using vendor/rails).
# To use Rails without a database, you must remove the Active Record framework
# config.frameworks -= [ :active_record, :active_resource, :action_mailer ]
# Only load the plugins named here, in the order given. By default, all plugins
# in vendor/plugins are loaded in alphabetical order.
# :all can be used as a placeholder for all plugins not explicitly named
# config.plugins = [ :exception_notification, :ssl_requirement, :all ]
# Add additional load paths for your own custom dirs
# config.load_paths += %W( #{RAILS_ROOT}/extras )
# Force all environments to use the same logger level
# (by default production uses :info, the others :debug)
# config.log_level = :debug
# Your secret key for verifying cookie session data integrity.
# If you change this key, all old sessions will become invalid!
# Make sure the secret is at least 30 characters and all random,
# no regular words or you'll be exposed to dictionary attacks.
config.action_controller.session = {
:key => '_lexicon_session',
:secret => '4763b5d1c3ae629e268f1e6098ae8ce4be0606c93c7e8fcdd94d9e0b6021bf343c0dc0ae0d7f00a8e11316bc1599ef1f2333f9d5050291f174c0e432736faac0'
}
# Use the database for sessions instead of the cookie-based default,
# which shouldn't be used to store highly confidential information
# (create the session table with 'rake db:sessions:create')
# config.action_controller.session_store = :active_record_store
# Use SQL instead of Active Record's schema dumper when creating the test database.
# This is necessary if your schema can't be completely dumped by the schema dumper,
# like if you have constraints or database-specific column types
# config.active_record.schema_format = :sql
# Activate observers that should always be running
# config.active_record.observers = :cacher, :garbage_collector
# Make Active Record use UTC-base instead of local time
# config.active_record.default_timezone = :utc
config.gem 'mislav-will_paginate', :version => '~> 2.3.8', :lib => 'will_paginate',
:source => 'http://gems.github.com'
end |
# Be sure to restart your server when you modify this file
# Specifies gem version of Rails to use when vendor/rails is not present
RAILS_GEM_VERSION = '2.3.10' unless defined? RAILS_GEM_VERSION
# Bootstrap the Rails environment, frameworks, and default configuration
require File.join(File.dirname(__FILE__), 'boot')
Rails::Initializer.run do |config|
# Specify gems that this application depends on and have them installed with rake gems:install
config.gem 'aasm', :version => '2.2.0'
config.gem 'authlogic', :version => '2.1.6'
config.gem 'super_exception_notifier', :version => '3.0.13', :lib => "exception_notification"
config.gem 'json', :version => '1.5.1'
if ['development', 'test'].include? RAILS_ENV
config.gem 'factory_girl', :version => '1.3.3', :lib => false
config.gem 'rspec-rails', :version => '1.3.3', :lib => false unless File.directory?(File.join(Rails.root, 'vendor/plugins/rspec-rails'))
end
# Libraries
require 'digest/md5'
# Set Time.zone default to the specified zone and make Active Record auto-convert to this zone.
# Run "rake -D time" for a list of tasks for finding time zone names.
config.time_zone = 'UTC'
# Load custom libraries before "config/initializers" run.
$LOAD_PATH.unshift("#{RAILS_ROOT}/lib")
end
Remove obsoleted code from when rspec-rails was vendored.
# Be sure to restart your server when you modify this file
# Specifies gem version of Rails to use when vendor/rails is not present
RAILS_GEM_VERSION = '2.3.10' unless defined? RAILS_GEM_VERSION
# Bootstrap the Rails environment, frameworks, and default configuration
require File.join(File.dirname(__FILE__), 'boot')
Rails::Initializer.run do |config|
# Specify gems that this application depends on and have them installed with rake gems:install
config.gem 'aasm', :version => '2.2.0'
config.gem 'authlogic', :version => '2.1.6'
config.gem 'super_exception_notifier', :version => '3.0.13', :lib => "exception_notification"
config.gem 'json', :version => '1.5.1'
if ['development', 'test'].include? RAILS_ENV
config.gem 'factory_girl', :version => '1.3.3', :lib => false
config.gem 'rspec-rails', :version => '1.3.3', :lib => false
end
# Libraries
require 'digest/md5'
# Set Time.zone default to the specified zone and make Active Record auto-convert to this zone.
# Run "rake -D time" for a list of tasks for finding time zone names.
config.time_zone = 'UTC'
# Load custom libraries before "config/initializers" run.
$LOAD_PATH.unshift("#{RAILS_ROOT}/lib")
end
|
# Be sure to restart your server when you modify this file
# Specifies gem version of Rails to use when vendor/rails is not present
RAILS_GEM_VERSION = '2.3.10' unless defined? RAILS_GEM_VERSION
# Bootstrap the Rails environment, frameworks, and default configuration
require File.join(File.dirname(__FILE__), 'boot')
Rails::Initializer.run do |config|
# rails 3 requires i18n gem which will load
# a version incompatible with 2.3.8.
# It will generate several of ...
# The {{key}} interpolation syntax in I18n
# messages is deprecated. Please use %{key} instead.
# This must be called early, or someone else will load a newer version.
config.gem "i18n", :version => '=0.3.7'
config.gem 'jakewendt-calnet_authenticated',
:lib => 'calnet_authenticated'
config.gem 'ryanb-acts-as-list',
:lib => 'acts_as_list'
config.plugin_paths = [
File.expand_path(File.join(File.dirname(__FILE__),'../..')),
File.expand_path(File.join(File.dirname(__FILE__),'../../..','peter'))
]
config.plugins = [ :authorized, :html_test, :html_test_extension]
config.frameworks -= [:active_resource]
config.routes_configuration_file = File.expand_path(
File.join(File.dirname(__FILE__),'..','test/config/routes.rb'))
config.autoload_paths += [
File.expand_path(
File.join(File.dirname(__FILE__),'..','test/app/models')),
File.expand_path(
File.join(File.dirname(__FILE__),'..','test/app/controllers'))
]
# config.eager_load_paths += [
# File.expand_path(
# File.join(File.dirname(__FILE__),'..','test/app/models')),
# File.expand_path(
# File.join(File.dirname(__FILE__),'..','test/app/controllers'))
# ]
#
# config.controller_paths += [
# File.expand_path(
# File.join(File.dirname(__FILE__),'..','test/app/controllers'))
# ]
config.view_path = [
File.expand_path(
File.join(File.dirname(__FILE__),'..','test/app/views'))
]
if RUBY_PLATFORM =~ /java/
# I'm surprised that I don't need this in my apps.
config.gem 'activerecord-jdbcsqlite3-adapter',
:lib => 'active_record/connection_adapters/jdbcsqlite3_adapter',
:version => '~>0.9'
# 1.0.1 is for rails 3 I think
config.gem 'jdbc-sqlite3', :lib => 'jdbc/sqlite3'
config.gem 'jruby-openssl', :lib => 'openssl'
else
config.gem "sqlite3-ruby", :lib => "sqlite3"
end
config.action_mailer.default_url_options = {
:host => "localhost:3000" }
end
require 'user'
Removed some stuff from config/enfironment.
# Be sure to restart your server when you modify this file
# Specifies gem version of Rails to use when vendor/rails is not present
RAILS_GEM_VERSION = '2.3.10' unless defined? RAILS_GEM_VERSION
# Bootstrap the Rails environment, frameworks, and default configuration
require File.join(File.dirname(__FILE__), 'boot')
Rails::Initializer.run do |config|
# rails 3 requires i18n gem which will load
# a version incompatible with 2.3.8.
# It will generate several of ...
# The {{key}} interpolation syntax in I18n
# messages is deprecated. Please use %{key} instead.
# This must be called early, or someone else will load a newer version.
config.gem "i18n", :version => '=0.3.7'
config.gem 'jakewendt-calnet_authenticated',
:lib => 'calnet_authenticated'
config.gem 'ryanb-acts-as-list',
:lib => 'acts_as_list'
config.plugin_paths = [
File.expand_path(File.join(File.dirname(__FILE__),'../..')),
File.expand_path(File.join(File.dirname(__FILE__),'../../..','peter'))
]
config.plugins = [ :authorized, :html_test, :html_test_extension]
config.frameworks -= [:active_resource]
config.routes_configuration_file = File.expand_path(
File.join(File.dirname(__FILE__),'..','test/config/routes.rb'))
config.autoload_paths += [
File.expand_path(
File.join(File.dirname(__FILE__),'..','test/app/models')),
File.expand_path(
File.join(File.dirname(__FILE__),'..','test/app/controllers'))
]
# config.eager_load_paths += [
# File.expand_path(
# File.join(File.dirname(__FILE__),'..','test/app/models')),
# File.expand_path(
# File.join(File.dirname(__FILE__),'..','test/app/controllers'))
# ]
#
# config.controller_paths += [
# File.expand_path(
# File.join(File.dirname(__FILE__),'..','test/app/controllers'))
# ]
config.view_path = [
File.expand_path(
File.join(File.dirname(__FILE__),'..','test/app/views'))
]
if RUBY_PLATFORM =~ /java/
# I'm surprised that I don't need this in my apps.
config.gem 'activerecord-jdbcsqlite3-adapter',
:lib => 'active_record/connection_adapters/jdbcsqlite3_adapter',
:version => '~>0.9'
# 1.0.1 is for rails 3 I think
config.gem 'jdbc-sqlite3', :lib => 'jdbc/sqlite3'
config.gem 'jruby-openssl', :lib => 'openssl'
else
config.gem "sqlite3-ruby", :lib => "sqlite3"
end
# config.action_mailer.default_url_options = {
# :host => "localhost:3000" }
end
require 'user'
|
# Be sure to restart your server when you modify this file
# Specifies gem version of Rails to use when vendor/rails is not present
RAILS_GEM_VERSION = '2.3.5' unless defined? RAILS_GEM_VERSION
# Bootstrap the Rails environment, frameworks, and default configuration
require File.join(File.dirname(__FILE__), 'boot')
# Load Prey Fetcher settings
require File.join(File.dirname(__FILE__), 'prey_fetcher')
Rails::Initializer.run do |config|
# Settings in config/environments/* take precedence over those specified here.
# Application configuration should go into files in config/initializers
# -- all .rb files in that directory are automatically loaded.
# Add additional load paths for your own custom dirs
# config.load_paths += %W( #{RAILS_ROOT}/extras )
# Specify gems that this application depends on and have them installed with rake gems:install
# config.gem "bj"
# config.gem "hpricot", :version => '0.6', :source => "http://code.whytheluckystiff.net"
# config.gem "sqlite3-ruby", :lib => "sqlite3"
# config.gem "aws-s3", :lib => "aws/s3"
config.gem 'haml'
config.gem 'soauth', :lib => false
config.gem 'twitter-login', :lib => 'twitter/login'
config.gem 'typhoeus', :lib => false
config.gem 'whenever', :lib => false
# Only load the plugins named here, in the order given (default is alphabetical).
# :all can be used as a placeholder for all plugins not explicitly named
# config.plugins = [ :exception_notification, :ssl_requirement, :all ]
# Skip frameworks you're not going to use. To use Rails without a database,
# you must remove the Active Record framework.
# config.frameworks -= [ :active_record, :active_resource, :action_mailer ]
# Activate observers that should always be running
# config.active_record.observers = :cacher, :garbage_collector, :forum_observer
# Set Time.zone default to the specified zone and make Active Record auto-convert to this zone.
# Run "rake -D time" for a list of tasks for finding time zone names.
config.time_zone = 'UTC'
# The default locale is :en and all translations from config/locales/*.rb,yml are auto loaded.
# config.i18n.load_path += Dir[Rails.root.join('my', 'locales', '*.{rb,yml}')]
# config.i18n.default_locale = :de
# Middleware to load
config.middleware.use "Twitter::Login", :consumer_key => OAUTH_SETTINGS['consumer_key'], :secret => OAUTH_SETTINGS['consumer_secret']
HOST = "0.0.0.0:3000"
end
Update required gems
# Be sure to restart your server when you modify this file
# Specifies gem version of Rails to use when vendor/rails is not present
RAILS_GEM_VERSION = '2.3.5' unless defined? RAILS_GEM_VERSION
# Bootstrap the Rails environment, frameworks, and default configuration
require File.join(File.dirname(__FILE__), 'boot')
# Load Prey Fetcher settings
require File.join(File.dirname(__FILE__), 'prey_fetcher')
Rails::Initializer.run do |config|
# Settings in config/environments/* take precedence over those specified here.
# Application configuration should go into files in config/initializers
# -- all .rb files in that directory are automatically loaded.
# Add additional load paths for your own custom dirs
# config.load_paths += %W( #{RAILS_ROOT}/extras )
# Specify gems that this application depends on and have them installed with rake gems:install
# config.gem "bj"
# config.gem "hpricot", :version => '0.6', :source => "http://code.whytheluckystiff.net"
# config.gem "sqlite3-ruby", :lib => "sqlite3"
# config.gem "aws-s3", :lib => "aws/s3"
config.gem 'haml'
config.gem 'fastprowl', :lib => false
config.gem 'soauth', :lib => false
config.gem 'twitter', :lib => false
config.gem 'twitter-login', :lib => 'twitter/login'
config.gem 'typhoeus', :lib => false
config.gem 'whenever', :lib => false
# Only load the plugins named here, in the order given (default is alphabetical).
# :all can be used as a placeholder for all plugins not explicitly named
# config.plugins = [ :exception_notification, :ssl_requirement, :all ]
# Skip frameworks you're not going to use. To use Rails without a database,
# you must remove the Active Record framework.
# config.frameworks -= [ :active_record, :active_resource, :action_mailer ]
# Activate observers that should always be running
# config.active_record.observers = :cacher, :garbage_collector, :forum_observer
# Set Time.zone default to the specified zone and make Active Record auto-convert to this zone.
# Run "rake -D time" for a list of tasks for finding time zone names.
config.time_zone = 'UTC'
# The default locale is :en and all translations from config/locales/*.rb,yml are auto loaded.
# config.i18n.load_path += Dir[Rails.root.join('my', 'locales', '*.{rb,yml}')]
# config.i18n.default_locale = :de
# Middleware to load
config.middleware.use "Twitter::Login", :consumer_key => OAUTH_SETTINGS['consumer_key'], :secret => OAUTH_SETTINGS['consumer_secret']
HOST = "0.0.0.0:3000"
end
|
RAILS_GEM_VERSION = '2.2.2' unless defined? RAILS_GEM_VERSION
require File.join(File.dirname(__FILE__), 'boot')
require 'yaml'
config_file_path = File.join(RAILS_ROOT, *%w(config settings.yml))
if File.exist?(config_file_path)
config = YAML.load_file(config_file_path)
APP_CONFIG = config.has_key?(RAILS_ENV) ? config[RAILS_ENV] : {}
else
puts "WARNING: configuration file #{config_file_path} not found."
APP_CONFIG = {}
end
DEFAULT_HOST = APP_CONFIG[:default_host] || "localhost:3000"
Rails::Initializer.run do |config|
config.gem "haml", :version => '>=2.0.6'
config.gem "fastercsv"
config.gem 'thoughtbot-factory_girl', :lib => 'factory_girl', :source => 'http://gems.github.com'
config.gem "rubyist-aasm", :lib => "aasm", :version => '>=2.0.5', :source => 'http://gems.github.com'
config.gem 'mislav-will_paginate', :lib => 'will_paginate', :source => 'http://gems.github.com/'
config.time_zone = 'UTC'
config.load_paths += %W( #{RAILS_ROOT}/app/sweepers )
DEFAULT_SECRET = "552e024ba5bbf493d1ae37aacb875359804da2f1002fa908f304c7b0746ef9ab67875b69e66361eb9484fc0308cabdced715f7e97f02395874934d401a07d3e0"
secret = APP_CONFIG[:action_controller][:session][:secret] rescue DEFAULT_SECRET
config.action_controller.session = { :session_key => '_spotus_session', :secret => secret }
end
Add mirror subdomains for the main site
RAILS_GEM_VERSION = '2.2.2' unless defined? RAILS_GEM_VERSION
require File.join(File.dirname(__FILE__), 'boot')
require 'yaml'
config_file_path = File.join(RAILS_ROOT, *%w(config settings.yml))
if File.exist?(config_file_path)
config = YAML.load_file(config_file_path)
APP_CONFIG = config.has_key?(RAILS_ENV) ? config[RAILS_ENV] : {}
else
puts "WARNING: configuration file #{config_file_path} not found."
APP_CONFIG = {}
end
DEFAULT_HOST = APP_CONFIG[:default_host] || "localhost:3000"
Rails::Initializer.run do |config|
config.gem "haml", :version => '>=2.0.6'
config.gem "fastercsv"
config.gem 'thoughtbot-factory_girl', :lib => 'factory_girl', :source => 'http://gems.github.com'
config.gem "rubyist-aasm", :lib => "aasm", :version => '>=2.0.5', :source => 'http://gems.github.com'
config.gem 'mislav-will_paginate', :lib => 'will_paginate', :source => 'http://gems.github.com/'
config.time_zone = 'UTC'
config.load_paths += %W( #{RAILS_ROOT}/app/sweepers )
DEFAULT_SECRET = "552e024ba5bbf493d1ae37aacb875359804da2f1002fa908f304c7b0746ef9ab67875b69e66361eb9484fc0308cabdced715f7e97f02395874934d401a07d3e0"
secret = APP_CONFIG[:action_controller][:session][:secret] rescue DEFAULT_SECRET
config.action_controller.session = { :session_key => '_spotus_session', :secret => secret }
end
# These are the subdomains that will be equivalent to no subdomain
SubdomainFu.mirrors = ["www", "spotus"]
|
$environment ||= (ENV['RACK_ENV'] || :development).to_sym
$token = ENV['API_TOKEN'] || 'billbo'
# Include lib
%w{app lib config}.each do |dir|
$: << File.expand_path("../../#{dir}", __FILE__)
end
# Bundle (gems)
require 'boot'
I18n.config.enforce_available_locales = true
# Require needed active support bits
require 'active_support/core_ext/integer'
# Load Environment variables from env files
Dotenv.load(
File.expand_path("../../.env.#{$environment}", __FILE__),
File.expand_path('../../.env', __FILE__)
)
# Configuration
require 'configuration_service'
require 'configuration'
Configuration.from_env
# Connect to database
Configuration.db = Sequel.connect(Configuration.database_url)
# Serialize models into JSON
Sequel::Model.plugin :json_serializer
# Configure Stripe
Stripe.api_key = Configuration.stripe_secret_key
Stripe.api_version = '2019-12-03'
# Configure Shrimp
Shrimp.configure do |config|
config.format = 'A4'
config.zoom = 1
config.orientation = 'portrait'
end
# Configure Money
Money.locale_backend = :currency
Money.default_bank = EuCentralBank.new
Money.rounding_mode= BigDecimal::ROUND_HALF_EVEN
# Configure Timeouts for VIES checks
{
open_timeout: 10,
read_timeout: 10
}.each do |key, d|
Valvat::Lookup.client.globals[key] = d
end
# Configure Rumor
require 'rumor/async/sucker_punch'
# DB schema
require 'schema'
# Models
require 'invoice'
# Invoice generation.
require 'invoice_file_uploader'
require 'invoice_cloud_uploader'
# Services
require 'vat_service'
require 'stripe_service'
require 'invoice_service'
require 'pdf_service'
# The Apis
require 'base'
require 'hooks'
require 'app'
# Load plugins
require 'plugins/sentry' if Configuration.sentry?
require 'plugins/segmentio' if Configuration.segmentio?
require 'plugins/s3' if Configuration.s3?
# The Rack app
require 'rack_app'
# Cronjob
require 'job'
# Preload and validate configuration
Configuration.preload
raise 'configuration not valid' unless Configuration.valid?
# Disconnect before forking.
Configuration.db.disconnect
Add Money.default_currency configuration (fixes deprecation warning)
$environment ||= (ENV['RACK_ENV'] || :development).to_sym
$token = ENV['API_TOKEN'] || 'billbo'
# Include lib
%w{app lib config}.each do |dir|
$: << File.expand_path("../../#{dir}", __FILE__)
end
# Bundle (gems)
require 'boot'
I18n.config.enforce_available_locales = true
# Require needed active support bits
require 'active_support/core_ext/integer'
# Load Environment variables from env files
Dotenv.load(
File.expand_path("../../.env.#{$environment}", __FILE__),
File.expand_path('../../.env', __FILE__)
)
# Configuration
require 'configuration_service'
require 'configuration'
Configuration.from_env
# Connect to database
Configuration.db = Sequel.connect(Configuration.database_url)
# Serialize models into JSON
Sequel::Model.plugin :json_serializer
# Configure Stripe
Stripe.api_key = Configuration.stripe_secret_key
Stripe.api_version = '2019-12-03'
# Configure Shrimp
Shrimp.configure do |config|
config.format = 'A4'
config.zoom = 1
config.orientation = 'portrait'
end
# Configure Money
Money.locale_backend = :currency
Money.default_bank = EuCentralBank.new
Money.rounding_mode = BigDecimal::ROUND_HALF_EVEN
Money.default_currency = Configuration.default_currency
# Configure Timeouts for VIES checks
{
open_timeout: 10,
read_timeout: 10
}.each do |key, d|
Valvat::Lookup.client.globals[key] = d
end
# Configure Rumor
require 'rumor/async/sucker_punch'
# DB schema
require 'schema'
# Models
require 'invoice'
# Invoice generation.
require 'invoice_file_uploader'
require 'invoice_cloud_uploader'
# Services
require 'vat_service'
require 'stripe_service'
require 'invoice_service'
require 'pdf_service'
# The Apis
require 'base'
require 'hooks'
require 'app'
# Load plugins
require 'plugins/sentry' if Configuration.sentry?
require 'plugins/segmentio' if Configuration.segmentio?
require 'plugins/s3' if Configuration.s3?
# The Rack app
require 'rack_app'
# Cronjob
require 'job'
# Preload and validate configuration
Configuration.preload
raise 'configuration not valid' unless Configuration.valid?
# Disconnect before forking.
Configuration.db.disconnect
|
# Be sure to restart your server when you modify this file
# Specifies gem version of Rails to use when vendor/rails is not present
RAILS_GEM_VERSION = '2.3.2' unless defined? RAILS_GEM_VERSION
# Bootstrap the Rails environment, frameworks, and default configuration
require File.join(File.dirname(__FILE__), 'boot')
Rails::Initializer.run do |config|
# Settings in config/environments/* take precedence over those specified here.
# Application configuration should go into files in config/initializers
# -- all .rb files in that directory are automatically loaded.
# Add additional load paths for your own custom dirs
# config.load_paths += %W( #{RAILS_ROOT}/extras )
# Specify gems that this application depends on and have them installed with rake gems:install
config.gem "activerdf", :lib => 'active_rdf'
config.gem "activerdf_sparql", :lib => false
config.gem "nokogiri"
config.gem "graticule"
config.gem "reddy"
# config.gem "bj"
# config.gem "hpricot", :version => '0.6', :source => "http://code.whytheluckystiff.net"
# config.gem "sqlite3-ruby", :lib => "sqlite3"
# config.gem "aws-s3", :lib => "aws/s3"
# Only load the plugins named here, in the order given (default is alphabetical).
# :all can be used as a placeholder for all plugins not explicitly named
# config.plugins = [ :exception_notification, :ssl_requirement, :all ]
# Skip frameworks you're not going to use. To use Rails without a database,
# you must remove the Active Record framework.
# config.frameworks -= [ :active_record, :active_resource, :action_mailer ]
# Activate observers that should always be running
# config.active_record.observers = :cacher, :garbage_collector, :forum_observer
# Set Time.zone default to the specified zone and make Active Record auto-convert to this zone.
# Run "rake -D time" for a list of tasks for finding time zone names.
config.time_zone = 'UTC'
# The default locale is :en and all translations from config/locales/*.rb,yml are auto loaded.
# config.i18n.load_path += Dir[Rails.root.join('my', 'locales', '*.{rb,yml}')]
# config.i18n.default_locale = :de
end
#require 'zipcode'
SUNLIGHT_API_KEY = '4882ac4cb40d0ef34e4ee6576a63e5c7'
GOOGLE_MAPS_API_KEY = 'ABQIAAAAYCj92a1XA1huILESx2GjSxRi4oveorZe7mwxtUZRjxycUeOp9xTJXvhqFw-0v0c7PCORiYak8XdvMA'
removing reddy
# Be sure to restart your server when you modify this file
# Specifies gem version of Rails to use when vendor/rails is not present
RAILS_GEM_VERSION = '2.3.2' unless defined? RAILS_GEM_VERSION
# Bootstrap the Rails environment, frameworks, and default configuration
require File.join(File.dirname(__FILE__), 'boot')
Rails::Initializer.run do |config|
# Settings in config/environments/* take precedence over those specified here.
# Application configuration should go into files in config/initializers
# -- all .rb files in that directory are automatically loaded.
# Add additional load paths for your own custom dirs
# config.load_paths += %W( #{RAILS_ROOT}/extras )
# Specify gems that this application depends on and have them installed with rake gems:install
config.gem "activerdf", :lib => 'active_rdf'
config.gem "activerdf_sparql", :lib => false
config.gem "nokogiri"
config.gem "graticule"
# config.gem "bj"
# config.gem "hpricot", :version => '0.6', :source => "http://code.whytheluckystiff.net"
# config.gem "sqlite3-ruby", :lib => "sqlite3"
# config.gem "aws-s3", :lib => "aws/s3"
# Only load the plugins named here, in the order given (default is alphabetical).
# :all can be used as a placeholder for all plugins not explicitly named
# config.plugins = [ :exception_notification, :ssl_requirement, :all ]
# Skip frameworks you're not going to use. To use Rails without a database,
# you must remove the Active Record framework.
# config.frameworks -= [ :active_record, :active_resource, :action_mailer ]
# Activate observers that should always be running
# config.active_record.observers = :cacher, :garbage_collector, :forum_observer
# Set Time.zone default to the specified zone and make Active Record auto-convert to this zone.
# Run "rake -D time" for a list of tasks for finding time zone names.
config.time_zone = 'UTC'
# The default locale is :en and all translations from config/locales/*.rb,yml are auto loaded.
# config.i18n.load_path += Dir[Rails.root.join('my', 'locales', '*.{rb,yml}')]
# config.i18n.default_locale = :de
end
#require 'zipcode'
SUNLIGHT_API_KEY = '4882ac4cb40d0ef34e4ee6576a63e5c7'
GOOGLE_MAPS_API_KEY = 'ABQIAAAAYCj92a1XA1huILESx2GjSxRi4oveorZe7mwxtUZRjxycUeOp9xTJXvhqFw-0v0c7PCORiYak8XdvMA'
|
# Be sure to restart your server when you modify this file
# Uncomment below to force Rails into production mode when
# you don't control web/app server and can't set it the proper way
# ENV['RAILS_ENV'] ||= 'production'
# Specifies gem version of Rails to use when vendor/rails is not present
require File.join(File.dirname(__FILE__), 'boot')
require 'radius'
Radiant::Initializer.run do |config|
# Skip frameworks you're not going to use (only works if using vendor/rails).
# To use Rails without a database, you must remove the Active Record framework
config.frameworks -= [ :action_mailer ]
config.extensions = [ :settings ]
# Only load the extensions named here, in the order given. By default all
# extensions in vendor/extensions are loaded, in alphabetical order. :all
# can be used as a placeholder for all extensions not explicitly named.
# config.extensions = [ :all ]
# Your secret key for verifying cookie session data integrity.
# If you change this key, all old sessions will become invalid!
# Make sure the secret is at least 30 characters and all random,
# no regular words or you'll be exposed to dictionary attacks.
config.action_controller.session = {
:session_key => '_ignite_session',
:secret => 'e951fe6bf094201eee45a8d38ea6dae3353f6115'
}
# Comment out this line if you want to turn off all caching, or
# add options to modify the behavior. In the majority of deployment
# scenarios it is desirable to leave Radiant's cache enabled and in
# the default configuration.
#
# Additional options:
# :use_x_sendfile => true
# Turns on X-Sendfile support for Apache with mod_xsendfile or lighttpd.
# :use_x_accel_redirect => '/some/virtual/path'
# Turns on X-Accel-Redirect support for nginx. You have to provide
# a path that corresponds to a virtual location in your webserver
# configuration.
# :entitystore => "radiant:tmp/cache/entity"
# Sets the entity store type (preceding the colon) and storage
# location (following the colon, relative to Rails.root).
# We recommend you use radiant: since this will enable manual expiration.
# :metastore => "radiant:tmp/cache/meta"
# Sets the meta store type and storage location. We recommend you use
# radiant: since this will enable manual expiration and acceleration headers.
config.middleware.use ::Radiant::Cache
# Use the database for sessions instead of the cookie-based default,
# which shouldn't be used to store highly confidential information
# (create the session table with 'rake db:sessions:create')
config.action_controller.session_store = :cookie_store
# Activate observers that should always be running
config.active_record.observers = :user_action_observer
# Make Active Record use UTC-base instead of local time
config.time_zone = 'UTC'
# Set the default field error proc
config.action_view.field_error_proc = Proc.new do |html, instance|
if html !~ /label/
%{<div class="error-with-field">#{html} <small class="error">• #{[instance.error_message].flatten.first}</small></div>}
else
html
end
end
config.after_initialize do
# Add new inflection rules using the following format:
ActiveSupport::Inflector.inflections do |inflect|
inflect.uncountable 'config'
end
end
end
work on config
# Be sure to restart your server when you modify this file
# Uncomment below to force Rails into production mode when
# you don't control web/app server and can't set it the proper way
# ENV['RAILS_ENV'] ||= 'production'
# Specifies gem version of Rails to use when vendor/rails is not present
require File.join(File.dirname(__FILE__), 'boot')
require 'radius'
Radiant::Initializer.run do |config|
# Skip frameworks you're not going to use (only works if using vendor/rails).
# To use Rails without a database, you must remove the Active Record framework
config.frameworks -= [ :action_mailer ]
config.extensions = [ :settings, :all ]
# Only load the extensions named here, in the order given. By default all
# extensions in vendor/extensions are loaded, in alphabetical order. :all
# can be used as a placeholder for all extensions not explicitly named.
# config.extensions = [ :all ]
# Your secret key for verifying cookie session data integrity.
# If you change this key, all old sessions will become invalid!
# Make sure the secret is at least 30 characters and all random,
# no regular words or you'll be exposed to dictionary attacks.
config.action_controller.session = {
:session_key => '_ignite_session',
:secret => 'e951fe6bf094201eee45a8d38ea6dae3353f6115'
}
# Comment out this line if you want to turn off all caching, or
# add options to modify the behavior. In the majority of deployment
# scenarios it is desirable to leave Radiant's cache enabled and in
# the default configuration.
#
# Additional options:
# :use_x_sendfile => true
# Turns on X-Sendfile support for Apache with mod_xsendfile or lighttpd.
# :use_x_accel_redirect => '/some/virtual/path'
# Turns on X-Accel-Redirect support for nginx. You have to provide
# a path that corresponds to a virtual location in your webserver
# configuration.
# :entitystore => "radiant:tmp/cache/entity"
# Sets the entity store type (preceding the colon) and storage
# location (following the colon, relative to Rails.root).
# We recommend you use radiant: since this will enable manual expiration.
# :metastore => "radiant:tmp/cache/meta"
# Sets the meta store type and storage location. We recommend you use
# radiant: since this will enable manual expiration and acceleration headers.
config.middleware.use ::Radiant::Cache
# Use the database for sessions instead of the cookie-based default,
# which shouldn't be used to store highly confidential information
# (create the session table with 'rake db:sessions:create')
config.action_controller.session_store = :cookie_store
# Activate observers that should always be running
config.active_record.observers = :user_action_observer
# Make Active Record use UTC-base instead of local time
config.time_zone = 'UTC'
# Set the default field error proc
config.action_view.field_error_proc = Proc.new do |html, instance|
if html !~ /label/
%{<div class="error-with-field">#{html} <small class="error">• #{[instance.error_message].flatten.first}</small></div>}
else
html
end
end
config.after_initialize do
# Add new inflection rules using the following format:
ActiveSupport::Inflector.inflections do |inflect|
inflect.uncountable 'config'
end
end
end |
# Load the Rails application.
require File.expand_path('../application', __FILE__)
# Initialize the Rails application.
Rails.application.initialize!
ActionMailer::Base.smtp_settings = {
:address => 'smtp.sendgrid.net',
:port => '587',
:authentication => :plain,
:user_name => ENV['SENDGIRD_USERNAME'],
:password => ENV['SENDGIRD_PASSWORD'],
:domain => 'heroku.com',
:enable_startstls_auto => true
}
fix type
# Load the Rails application.
require File.expand_path('../application', __FILE__)
# Initialize the Rails application.
Rails.application.initialize!
ActionMailer::Base.smtp_settings = {
:address => 'smtp.sendgrid.net',
:port => '587',
:authentication => :plain,
:user_name => ENV['SENDGRID_USERNAME'],
:password => ENV['SENDGRID_PASSWORD'],
:domain => 'heroku.com',
:enable_startstls_auto => true
} |
FactoryGirl.define do
factory :email, class: OpenStruct do
to ["my-pushcart-address@#{EMAIL_URI}"]
from 'notices@some_grocery_email.com'
subject 'email subject'
body 'Hello!'
attachments {[]}
trait :fresh_direct_receipt_one do
from 'receipt@freshdirect.com'
subject 'Your order for Sunday, Jan 26 2014'
# raw_html File.read(Rails.root.to_s + '/lib/sample_emails/fresh_direct/receipt_one.eml')
# raw_text File.read(Rails.root.to_s + '/lib/sample_emails/fresh_direct/receipt_one.eml')
end
trait :fresh_direct_receipt_two do
from 'receipt@freshdirect.com'
subject 'Your order for Sunday, Jan 26 2014'
# raw_html File.read(Rails.root.to_s + '/lib/sample_emails/fresh_direct/receipt_two.eml')
# raw_text File.read(Rails.root.to_s + '/lib/sample_emails/fresh_direct/receipt_two.eml')
end
trait :instacart_receipt do
from 'orders@instacart.com'
subject 'Fwd: Your Order with Instacart'
# raw_html File.read(Rails.root.to_s + '/lib/sample_emails/instacart/instacart_receipt.eml')
end
trait :peapod_receipt_one do
from 'yourfriends@peapod.com'
subject 'Peapod by Stop & Shop Order Confirmation j50570360'
raw_html File.read(Rails.root.to_s + '/lib/sample_emails/peapod/receipt_one.eml')
end
trait :peapod_receipt_two do
from 'yourfriends@peapod.com'
subject 'Fwd: Peapod by Stop & Shop Order Confirmation j49123888'
raw_html File.read(Rails.root.to_s + '/lib/sample_emails/peapod/receipt_two.eml')
end
trait :peapod_receipt_three do
from 'yourfriends@peapod.com'
subject 'Fwd: Peapod by Stop & Shop Order Confirmation j48340159'
raw_html File.read(Rails.root.to_s + '/lib/sample_emails/peapod/receipt_three.eml')
end
end
factory :inbound_email do
user
to ["my-pushcart-address@#{EMAIL_URI}"]
from 'notices@some_grocery_email.com'
subject 'email subject'
raw_html ''
raw_text ''
trait :fresh_direct_receipt_one do
from 'receipt@freshdirect.com'
subject 'Your order for Sunday, Jan 26 2014'
# raw_html File.read(Rails.root.to_s + '/lib/sample_emails/fresh_direct/receipt_one.eml')
# raw_text File.read(Rails.root.to_s + '/lib/sample_emails/fresh_direct/receipt_one.eml')
end
trait :fresh_direct_receipt_two do
from 'receipt@freshdirect.com'
subject 'Your order for Sunday, Jan 26 2014'
# raw_html File.read(Rails.root.to_s + '/lib/sample_emails/fresh_direct/receipt_two.eml')
# raw_text File.read(Rails.root.to_s + '/lib/sample_emails/fresh_direct/receipt_two.eml')
end
trait :instacart_receipt do
from 'orders@instacart.com'
subject 'Fwd: Your Order with Instacart'
# raw_html File.read(Rails.root.to_s + '/lib/sample_emails/instacart/instacart_receipt.eml')
end
end
end
uncommenting comments
FactoryGirl.define do
factory :email, class: OpenStruct do
to ["my-pushcart-address@#{EMAIL_URI}"]
from 'notices@some_grocery_email.com'
subject 'email subject'
body 'Hello!'
attachments {[]}
trait :fresh_direct_receipt_one do
from 'receipt@freshdirect.com'
subject 'Your order for Sunday, Jan 26 2014'
raw_html File.read(Rails.root.to_s + '/lib/sample_emails/fresh_direct/receipt_one.eml')
raw_text File.read(Rails.root.to_s + '/lib/sample_emails/fresh_direct/receipt_one.eml')
end
trait :fresh_direct_receipt_two do
from 'receipt@freshdirect.com'
subject 'Your order for Sunday, Jan 26 2014'
raw_html File.read(Rails.root.to_s + '/lib/sample_emails/fresh_direct/receipt_two.eml')
raw_text File.read(Rails.root.to_s + '/lib/sample_emails/fresh_direct/receipt_two.eml')
end
trait :instacart_receipt do
from 'orders@instacart.com'
subject 'Fwd: Your Order with Instacart'
raw_html File.read(Rails.root.to_s + '/lib/sample_emails/instacart/instacart_receipt.eml')
end
trait :peapod_receipt_one do
from 'yourfriends@peapod.com'
subject 'Peapod by Stop & Shop Order Confirmation j50570360'
raw_html File.read(Rails.root.to_s + '/lib/sample_emails/peapod/receipt_one.eml')
end
trait :peapod_receipt_two do
from 'yourfriends@peapod.com'
subject 'Fwd: Peapod by Stop & Shop Order Confirmation j49123888'
raw_html File.read(Rails.root.to_s + '/lib/sample_emails/peapod/receipt_two.eml')
end
trait :peapod_receipt_three do
from 'yourfriends@peapod.com'
subject 'Fwd: Peapod by Stop & Shop Order Confirmation j48340159'
raw_html File.read(Rails.root.to_s + '/lib/sample_emails/peapod/receipt_three.eml')
end
end
factory :inbound_email do
user
to ["my-pushcart-address@#{EMAIL_URI}"]
from 'notices@some_grocery_email.com'
subject 'email subject'
raw_html ''
raw_text ''
trait :fresh_direct_receipt_one do
from 'receipt@freshdirect.com'
subject 'Your order for Sunday, Jan 26 2014'
raw_html File.read(Rails.root.to_s + '/lib/sample_emails/fresh_direct/receipt_one.eml')
raw_text File.read(Rails.root.to_s + '/lib/sample_emails/fresh_direct/receipt_one.eml')
end
trait :fresh_direct_receipt_two do
from 'receipt@freshdirect.com'
subject 'Your order for Sunday, Jan 26 2014'
raw_html File.read(Rails.root.to_s + '/lib/sample_emails/fresh_direct/receipt_two.eml')
raw_text File.read(Rails.root.to_s + '/lib/sample_emails/fresh_direct/receipt_two.eml')
end
trait :instacart_receipt do
from 'orders@instacart.com'
subject 'Fwd: Your Order with Instacart'
raw_html File.read(Rails.root.to_s + '/lib/sample_emails/instacart/instacart_receipt.eml')
end
end
end
|
# Be sure to restart your server when you modify this file
# Uncomment below to force Rails into production mode when
# you don't control web/app server and can't set it the proper way
# ENV['RAILS_ENV'] ||= 'production'
# Specifies gem version of Rails to use when vendor/rails is not present
RAILS_GEM_VERSION = '2.1.0' unless defined? RAILS_GEM_VERSION
# Bootstrap the Rails environment, frameworks, and default configuration
require File.join(File.dirname(__FILE__), 'boot')
Rails::Initializer.run do |config|
config.gem "htmlentities"
config.gem "vpim"
config.gem "lucene_query"
config.gem "rubyzip", :lib => "zip/zip"
config.gem "has_many_polymorphs"
config.time_zone = "Pacific Time (US & Canada)"
# Settings in config/environments/* take precedence over those specified here.
# Application configuration should go into files in config/initializers
# -- all .rb files in that directory are automatically loaded.
# See Rails::Configuration for more options.
# Skip frameworks you're not going to use (only works if using vendor/rails).
# To use Rails without a database, you must remove the Active Record framework
# config.frameworks -= [ :active_record, :active_resource, :action_mailer ]
# Only load the plugins named here, in the order given. By default, all plugins
# in vendor/plugins are loaded in alphabetical order.
# :all can be used as a placeholder for all plugins not explicitly named
# config.plugins = [ :exception_notification, :ssl_requirement, :all ]
# Add additional load paths for your own custom dirs
# config.load_paths += %W( #{RAILS_ROOT}/extras )
# Force all environments to use the same logger level
# (by default production uses :info, the others :debug)
# config.log_level = :debug
# Your secret key for verifying cookie session data integrity.
# If you change this key, all old sessions will become invalid!
# Make sure the secret is at least 30 characters and all random,
# no regular words or you'll be exposed to dictionary attacks.
config.action_controller.session = {
:session_key => '_calagator_session',
:secret => '7da1bbbbda1fbe53f8e845ccb07a0cff6951f9bad8b2cd9a3f80321ac842ffd801a746fecf8fcc2cf495041553be02c39e7cdc6c0a0d9710db19fd7d73a03802'
}
# Use the database for sessions instead of the cookie-based default,
# which shouldn't be used to store highly confidential information
# (create the session table with 'rake db:sessions:create')
# config.action_controller.session_store = :active_record_store
# Use SQL instead of Active Record's schema dumper when creating the test database.
# This is necessary if your schema can't be completely dumped by the schema dumper,
# like if you have constraints or database-specific column types
# config.active_record.schema_format = :sql
# Activate observers that should always be running
# config.active_record.observers = :cacher, :garbage_collector
config.active_record.observers = :janitor_observer
# Make Active Record use UTC-base instead of local time
# config.active_record.default_timezone = :utc
config.load_paths += %W[
#{RAILS_ROOT}/app/mixins
]
cache_path = "#{RAILS_ROOT}/tmp/cache/#{RAILS_ENV}"
config.cache_store = :file_store, cache_path
FileUtils.mkdir_p(cache_path)
end
# NOTE: See config/initializers/ directory for additional code loaded at start-up
Updated rails gem version.
git-svn-id: 8ae0a6b396effb9e2d04f216330b631e689e8608@1247 54c6411d-1744-0410-a9eb-b99409f43359
# Be sure to restart your server when you modify this file
# Uncomment below to force Rails into production mode when
# you don't control web/app server and can't set it the proper way
# ENV['RAILS_ENV'] ||= 'production'
# Specifies gem version of Rails to use when vendor/rails is not present
RAILS_GEM_VERSION = '> 2.1.0' unless defined? RAILS_GEM_VERSION
# Bootstrap the Rails environment, frameworks, and default configuration
require File.join(File.dirname(__FILE__), 'boot')
Rails::Initializer.run do |config|
config.gem "htmlentities"
config.gem "vpim"
config.gem "lucene_query"
config.gem "rubyzip", :lib => "zip/zip"
config.gem "has_many_polymorphs"
config.gem "hpricot"
config.time_zone = "Pacific Time (US & Canada)"
# Settings in config/environments/* take precedence over those specified here.
# Application configuration should go into files in config/initializers
# -- all .rb files in that directory are automatically loaded.
# See Rails::Configuration for more options.
# Skip frameworks you're not going to use (only works if using vendor/rails).
# To use Rails without a database, you must remove the Active Record framework
# config.frameworks -= [ :active_record, :active_resource, :action_mailer ]
# Only load the plugins named here, in the order given. By default, all plugins
# in vendor/plugins are loaded in alphabetical order.
# :all can be used as a placeholder for all plugins not explicitly named
# config.plugins = [ :exception_notification, :ssl_requirement, :all ]
# Add additional load paths for your own custom dirs
# config.load_paths += %W( #{RAILS_ROOT}/extras )
# Force all environments to use the same logger level
# (by default production uses :info, the others :debug)
# config.log_level = :debug
# Your secret key for verifying cookie session data integrity.
# If you change this key, all old sessions will become invalid!
# Make sure the secret is at least 30 characters and all random,
# no regular words or you'll be exposed to dictionary attacks.
config.action_controller.session = {
:session_key => '_calagator_session',
:secret => '7da1bbbbda1fbe53f8e845ccb07a0cff6951f9bad8b2cd9a3f80321ac842ffd801a746fecf8fcc2cf495041553be02c39e7cdc6c0a0d9710db19fd7d73a03802'
}
# Use the database for sessions instead of the cookie-based default,
# which shouldn't be used to store highly confidential information
# (create the session table with 'rake db:sessions:create')
# config.action_controller.session_store = :active_record_store
# Use SQL instead of Active Record's schema dumper when creating the test database.
# This is necessary if your schema can't be completely dumped by the schema dumper,
# like if you have constraints or database-specific column types
# config.active_record.schema_format = :sql
# Activate observers that should always be running
# config.active_record.observers = :cacher, :garbage_collector
config.active_record.observers = :janitor_observer
# Make Active Record use UTC-base instead of local time
# config.active_record.default_timezone = :utc
config.load_paths += %W[
#{RAILS_ROOT}/app/mixins
]
cache_path = "#{RAILS_ROOT}/tmp/cache/#{RAILS_ENV}"
config.cache_store = :file_store, cache_path
FileUtils.mkdir_p(cache_path)
end
# NOTE: See config/initializers/ directory for additional code loaded at start-up
|
# Read about factories at https://github.com/thoughtbot/factory_girl
# {:category => 'life',:name =>'marriage',:description => 'was married', :effect => '{|a,b| a.marry b }'},
FactoryGirl.define do
factory :event do
name "nothing"
category "personal"
description ""
effect '{|a,b| a,b }'
end
factory :marriage, :parent => :event do
name "marriage"
effect '{|a,b| a.marry b }'
end
end
removed comments
FactoryGirl.define do
factory :event do
name "nothing"
category "personal"
description ""
effect '{|a,b| a,b }'
end
factory :marriage, :parent => :event do
name "marriage"
effect '{|a,b| a.marry b }'
end
end
|
RAILS_GEM_VERSION = '2.3.2' unless defined? RAILS_GEM_VERSION
require File.join(File.dirname(__FILE__), 'boot')
require 'yaml'
config_file_path = File.join(RAILS_ROOT, *%w(config settings.yml))
if File.exist?(config_file_path)
config = YAML.load_file(config_file_path)
APP_CONFIG = config.has_key?(RAILS_ENV) ? config[RAILS_ENV] : {}
else
puts "WARNING: configuration file #{config_file_path} not found."
APP_CONFIG = {}
end
DEFAULT_HOST = APP_CONFIG[:default_host] || "spotus.local"
Rails::Initializer.run do |config|
config.gem "haml", :version => '>=2.0.6'
config.gem "fastercsv"
config.gem "mysql", :version => '2.7'
config.gem 'thoughtbot-factory_girl', :lib => 'factory_girl', :source => 'http://gems.github.com'
config.gem "rubyist-aasm", :lib => "aasm", :version => '>=2.0.5', :source => 'http://gems.github.com'
config.gem 'mislav-will_paginate', :lib => 'will_paginate', :version => '>=2.3.7', :source => 'http://gems.github.com/'
config.gem "rspec-rails", :lib => false, :version => "= 1.2.2"
config.gem "rspec", :lib => false, :version => "= 1.2.2"
config.gem "cucumber", :lib => false, :version => "= 0.1.16"
config.gem "webrat", :lib => false, :version => ">= 0.4.4"
config.gem "money", :version => ">=2.1.3"
config.time_zone = 'UTC'
config.load_paths += %W( #{RAILS_ROOT}/app/sweepers )
DEFAULT_SECRET = "552e024ba5bbf493d1ae37aacb875359804da2f1002fa908f304c7b0746ef9ab67875b69e66361eb9484fc0308cabdced715f7e97f02395874934d401a07d3e0"
secret = APP_CONFIG[:action_controller][:session][:secret] rescue DEFAULT_SECRET
config.action_controller.session = { :session_key => '_spotus_session', :secret => secret }
end
# use this domain for cookies so switching networks doesn't drop cookies
ActionController::Base.session_options[:domain] = DEFAULT_HOST
# These are the sizes of the domain (i.e. 0 for localhost, 1 for something.com)
# for each of your environments
SubdomainFu.tld_sizes = { :development => 1,
:test => 1,
:staging => 2,
:production => 1 }
# These are the subdomains that will be equivalent to no subdomain
SubdomainFu.mirrors = %w(www spotus)
# This is the "preferred mirror" if you would rather show this subdomain
# in the URL than no subdomain at all.
# SubdomainFu.preferred_mirror = "www"
b4 removing environment.rb from repo
RAILS_GEM_VERSION = '2.3.2' unless defined? RAILS_GEM_VERSION
require File.join(File.dirname(__FILE__), 'boot')
require 'yaml'
config_file_path = File.join(RAILS_ROOT, *%w(config settings.yml))
if File.exist?(config_file_path)
config = YAML.load_file(config_file_path)
APP_CONFIG = config.has_key?(RAILS_ENV) ? config[RAILS_ENV] : {}
else
puts "WARNING: configuration file #{config_file_path} not found."
APP_CONFIG = {}
end
DEFAULT_HOST = APP_CONFIG[:default_host] || "spotus.local"
Rails::Initializer.run do |config|
config.gem "haml", :version => '>=2.0.6'
config.gem "fastercsv"
#config.gem "mysql", :version => '2.7'
config.gem 'thoughtbot-factory_girl', :lib => 'factory_girl', :source => 'http://gems.github.com'
config.gem "rubyist-aasm", :lib => "aasm", :version => '>=2.0.5', :source => 'http://gems.github.com'
config.gem 'mislav-will_paginate', :lib => 'will_paginate', :version => '>=2.3.7', :source => 'http://gems.github.com/'
config.gem "rspec-rails", :lib => false, :version => "= 1.2.2"
config.gem "rspec", :lib => false, :version => "= 1.2.2"
config.gem "cucumber", :lib => false, :version => "= 0.1.16"
config.gem "webrat", :lib => false, :version => ">= 0.4.4"
config.gem "money", :version => ">=2.1.3"
config.time_zone = 'UTC'
config.load_paths += %W( #{RAILS_ROOT}/app/sweepers )
DEFAULT_SECRET = "552e024ba5bbf493d1ae37aacb875359804da2f1002fa908f304c7b0746ef9ab67875b69e66361eb9484fc0308cabdced715f7e97f02395874934d401a07d3e0"
secret = APP_CONFIG[:action_controller][:session][:secret] rescue DEFAULT_SECRET
config.action_controller.session = { :session_key => '_spotus_session', :secret => secret }
end
# use this domain for cookies so switching networks doesn't drop cookies
ActionController::Base.session_options[:domain] = DEFAULT_HOST
# These are the sizes of the domain (i.e. 0 for localhost, 1 for something.com)
# for each of your environments
SubdomainFu.tld_sizes = { :development => 1,
:test => 1,
:staging => 2,
:production => 1 }
# These are the subdomains that will be equivalent to no subdomain
SubdomainFu.mirrors = %w(www spotus)
# This is the "preferred mirror" if you would rather show this subdomain
# in the URL than no subdomain at all.
# SubdomainFu.preferred_mirror = "www"
|
FactoryGirl.define do
factory :event do |f|
f.name "Eventname"
f.description "Eventdescription"
f.participant_count 15
f.starts_at Time.now
f.ends_at Time.now + 7200
f.is_private false
f.user_id 122
f.rooms { build_list :room, 1 }
end
factory :upcoming_event, :class => Event do
sequence(:name) { |n| "Eventname#{n}" }
description "Eventdescription"
participant_count 15
starts_at DateTime.now.advance(:days => +1)
ends_at DateTime.now.advance(:days => +1, :hours => +1)
is_private true
user_id 122
rooms { build_list :room, 1 }
end
factory :my_upcoming_event, :class => Event do
sequence(:name) { |n| "Eventname#{n}" }
description "Eventdescription"
participant_count 15
starts_at Date.new(9999, 9, 10)
ends_at Date.new(9999, 10, 10)
is_private true
sequence(:user_id) { |id| id }
rooms { build_list :room, 1 }
end
factory :standardEvent, parent: :event, :class => Event do
sequence(:name) { |n| "Party#{n}" }
description "All night long glühwein for free"
participant_count 80
rooms { build_list :room, 3 }
end
factory :scheduledEvent, parent: :event do
starts_at_date (Time.now).strftime("%Y-%m-%d")
ends_at_date (Time.now + 7200).strftime("%Y-%m-%d") # + 2h
starts_at_time (Time.now).strftime("%H:%M:%S")
ends_at_time (Time.now + 7200).strftime("%H:%M:%S")
room_ids ['1']
is_private false
end
factory :event_on_multiple_days_with_multiple_rooms, parent: :scheduledEvent do
ends_at_date (Time.now + 86400).strftime("%Y-%m-%d") # + 24h
ends_at_time (Time.now + 86400).strftime("%H:%M:%S")
room_ids ['1', '2']
end
factory :event_on_one_day_with_multiple_rooms, parent: :scheduledEvent do
ends_at_date (Time.now).strftime("%Y-%m-%d")
ends_at_time (Time.now).strftime("%H:%M:%S")
room_ids ['1', '2']
end
factory :event_on_multiple_days_with_one_room, parent: :scheduledEvent do
ends_at_date (Time.now + 86400).strftime("%Y-%m-%d") # + 24h
ends_at_time (Time.now + 86400).strftime("%H:%M:%S")
rooms { create_list :room, 1 }
end
factory :event_on_one_day_with_one_room, parent: :scheduledEvent do
ends_at_date (Time.now).strftime("%Y-%m-%d")
ends_at_time (Time.now).strftime("%H:%M:%S")
rooms { create_list :room, 1 }
end
factory :event_suggestion, :class => Event do
starts_at Time.now + 1
ends_at Time.now + 2
user_id 122
name 'Test'
description 'Event Suggestion test instance'
participant_count 12
status 'suggested'
rooms { build_list :room, 3 }
end
factory :declined_event_suggestion, parent: :event_suggestion do
status 'rejected_suggestion'
end
trait :with_assignments do
after :create do |event|
FactoryGirl.create_list :task, 2, :event => event
end
end
trait :with_assignments_that_have_attachments do
after :create do |event|
FactoryGirl.create_list :task_with_attachment, 2, :event => event
end
end
factory :event_today, parent: :event do
starts_at DateTime.current.advance(:minutes => +2)
ends_at DateTime.current.advance(:minutes => +3)
end
factory :declined_event, parent: :event_today do
status 'declined'
end
factory :approved_event, parent: :event_today do
status 'approved'
end
factory :daily_recurring_event, :class => Event do |f|
f.name "Daily recurring"
f.description "Eventdescription"
f.participant_count 15
f.is_private false
#starts_at = Time.local(2015, 8, 1, 8, 0, 0)
starts_at = Time.now
f.starts_at starts_at
#ends_at = Time.local(2015, 8, 1, 9, 30, 0)
ends_at = starts_at + (60*60)
f.ends_at ends_at
schedule = IceCube::Schedule.new(starts_at, end_time: ends_at) do |s|
s.add_recurrence_rule(IceCube::Rule.daily)
end
f.schedule schedule
f.rooms { build_list :room, 1 }
end
factory :weekly_recurring_event, :class => Event do |f|
f.name "Weekly recurring"
f.description "Eventdescription"
f.participant_count 15
f.is_private false
# <<<<<<< HEAD
# starts_at = Time.local(2015, 7, 1, 11, 0, 0)
# f.starts_at starts_at
# ends_at = Time.local(2015, 7, 1, 12, 30, 0)
# =======
starts_at = Time.now
f.starts_at starts_at
ends_at = Time.now + 90.minutes
# >>>>>>> dev
f.ends_at ends_at
schedule = IceCube::Schedule.new(starts_at, end_time: ends_at) do |s|
s.add_recurrence_rule(IceCube::Rule.weekly)
end
f.schedule schedule
f.rooms { build_list :room, 1 }
end
factory :upcoming_daily_recurring_event, parent: :daily_recurring_event do |f|
starts_at = Time.now + 1.hours
f.starts_at starts_at
ends_at = starts_at + 1.hours
f.ends_at ends_at
schedule = IceCube::Schedule.new(starts_at, end_time: ends_at) do |s|
s.add_recurrence_rule(IceCube::Rule.daily)
end
f.schedule schedule
end
factory :upcoming_daily_recurring_event2, parent: :daily_recurring_event do |f|
starts_at = Time.now + 90.minutes
f.starts_at starts_at
ends_at = starts_at + 1.hours
f.ends_at ends_at
schedule = IceCube::Schedule.new(starts_at, end_time: ends_at) do |s|
s.add_recurrence_rule(IceCube::Rule.daily)
end
f.schedule schedule
end
factory :sortEvent1, parent: :event do
name "A1"
starts_at Date.new(2111,1,1)
ends_at Date.new(2333,1,1)
status "AIn Bearbeitung"
end
factory :sortEvent2, parent: :event do
name "Z2"
starts_at Date.new(2112,1,1)
ends_at Date.new(2333,1,1)
status "CIn Bearbeitung"
end
factory :sortEvent3, parent: :event do
name "M3"
starts_at Date.new(2110,1,1)
ends_at Date.new(2111,1,1)
status "BIn Bearbeitung"
end
factory :invalid_event_without_rooms, parent: :event do
room_ids []
rooms []
end
factory :conflictingEvent, parent: :event do
starts_at_date Time.now.strftime("%Y-%m-%d")
ends_at_date (Time.now + 3600).strftime("%Y-%m-%d")
starts_at_time Time.now.strftime("%H:%M:%S")
ends_at_time (Time.now + 3600).strftime("%H:%M:%S")
end
end
removed commented code from merge
FactoryGirl.define do
factory :event do |f|
f.name "Eventname"
f.description "Eventdescription"
f.participant_count 15
f.starts_at Time.now
f.ends_at Time.now + 7200
f.is_private false
f.user_id 122
f.rooms { build_list :room, 1 }
end
factory :upcoming_event, :class => Event do
sequence(:name) { |n| "Eventname#{n}" }
description "Eventdescription"
participant_count 15
starts_at DateTime.now.advance(:days => +1)
ends_at DateTime.now.advance(:days => +1, :hours => +1)
is_private true
user_id 122
rooms { build_list :room, 1 }
end
factory :my_upcoming_event, :class => Event do
sequence(:name) { |n| "Eventname#{n}" }
description "Eventdescription"
participant_count 15
starts_at Date.new(9999, 9, 10)
ends_at Date.new(9999, 10, 10)
is_private true
sequence(:user_id) { |id| id }
rooms { build_list :room, 1 }
end
factory :standardEvent, parent: :event, :class => Event do
sequence(:name) { |n| "Party#{n}" }
description "All night long glühwein for free"
participant_count 80
rooms { build_list :room, 3 }
end
factory :scheduledEvent, parent: :event do
starts_at_date (Time.now).strftime("%Y-%m-%d")
ends_at_date (Time.now + 7200).strftime("%Y-%m-%d") # + 2h
starts_at_time (Time.now).strftime("%H:%M:%S")
ends_at_time (Time.now + 7200).strftime("%H:%M:%S")
room_ids ['1']
is_private false
end
factory :event_on_multiple_days_with_multiple_rooms, parent: :scheduledEvent do
ends_at_date (Time.now + 86400).strftime("%Y-%m-%d") # + 24h
ends_at_time (Time.now + 86400).strftime("%H:%M:%S")
room_ids ['1', '2']
end
factory :event_on_one_day_with_multiple_rooms, parent: :scheduledEvent do
ends_at_date (Time.now).strftime("%Y-%m-%d")
ends_at_time (Time.now).strftime("%H:%M:%S")
room_ids ['1', '2']
end
factory :event_on_multiple_days_with_one_room, parent: :scheduledEvent do
ends_at_date (Time.now + 86400).strftime("%Y-%m-%d") # + 24h
ends_at_time (Time.now + 86400).strftime("%H:%M:%S")
rooms { create_list :room, 1 }
end
factory :event_on_one_day_with_one_room, parent: :scheduledEvent do
ends_at_date (Time.now).strftime("%Y-%m-%d")
ends_at_time (Time.now).strftime("%H:%M:%S")
rooms { create_list :room, 1 }
end
factory :event_suggestion, :class => Event do
starts_at Time.now + 1
ends_at Time.now + 2
user_id 122
name 'Test'
description 'Event Suggestion test instance'
participant_count 12
status 'suggested'
rooms { build_list :room, 3 }
end
factory :declined_event_suggestion, parent: :event_suggestion do
status 'rejected_suggestion'
end
trait :with_assignments do
after :create do |event|
FactoryGirl.create_list :task, 2, :event => event
end
end
trait :with_assignments_that_have_attachments do
after :create do |event|
FactoryGirl.create_list :task_with_attachment, 2, :event => event
end
end
factory :event_today, parent: :event do
starts_at DateTime.current.advance(:minutes => +2)
ends_at DateTime.current.advance(:minutes => +3)
end
factory :declined_event, parent: :event_today do
status 'declined'
end
factory :approved_event, parent: :event_today do
status 'approved'
end
factory :daily_recurring_event, :class => Event do |f|
f.name "Daily recurring"
f.description "Eventdescription"
f.participant_count 15
f.is_private false
#starts_at = Time.local(2015, 8, 1, 8, 0, 0)
starts_at = Time.now
f.starts_at starts_at
#ends_at = Time.local(2015, 8, 1, 9, 30, 0)
ends_at = starts_at + (60*60)
f.ends_at ends_at
schedule = IceCube::Schedule.new(starts_at, end_time: ends_at) do |s|
s.add_recurrence_rule(IceCube::Rule.daily)
end
f.schedule schedule
f.rooms { build_list :room, 1 }
end
factory :weekly_recurring_event, :class => Event do |f|
f.name "Weekly recurring"
f.description "Eventdescription"
f.participant_count 15
f.is_private false
starts_at = Time.now
f.starts_at starts_at
ends_at = Time.now + 90.minutes
f.ends_at ends_at
schedule = IceCube::Schedule.new(starts_at, end_time: ends_at) do |s|
s.add_recurrence_rule(IceCube::Rule.weekly)
end
f.schedule schedule
f.rooms { build_list :room, 1 }
end
factory :upcoming_daily_recurring_event, parent: :daily_recurring_event do |f|
starts_at = Time.now + 1.hours
f.starts_at starts_at
ends_at = starts_at + 1.hours
f.ends_at ends_at
schedule = IceCube::Schedule.new(starts_at, end_time: ends_at) do |s|
s.add_recurrence_rule(IceCube::Rule.daily)
end
f.schedule schedule
end
factory :upcoming_daily_recurring_event2, parent: :daily_recurring_event do |f|
starts_at = Time.now + 90.minutes
f.starts_at starts_at
ends_at = starts_at + 1.hours
f.ends_at ends_at
schedule = IceCube::Schedule.new(starts_at, end_time: ends_at) do |s|
s.add_recurrence_rule(IceCube::Rule.daily)
end
f.schedule schedule
end
factory :sortEvent1, parent: :event do
name "A1"
starts_at Date.new(2111,1,1)
ends_at Date.new(2333,1,1)
status "AIn Bearbeitung"
end
factory :sortEvent2, parent: :event do
name "Z2"
starts_at Date.new(2112,1,1)
ends_at Date.new(2333,1,1)
status "CIn Bearbeitung"
end
factory :sortEvent3, parent: :event do
name "M3"
starts_at Date.new(2110,1,1)
ends_at Date.new(2111,1,1)
status "BIn Bearbeitung"
end
factory :invalid_event_without_rooms, parent: :event do
room_ids []
rooms []
end
factory :conflictingEvent, parent: :event do
starts_at_date Time.now.strftime("%Y-%m-%d")
ends_at_date (Time.now + 3600).strftime("%Y-%m-%d")
starts_at_time Time.now.strftime("%H:%M:%S")
ends_at_time (Time.now + 3600).strftime("%H:%M:%S")
end
end
|
#
# Cookbook Name:: travel
# Recipe:: default
#
# Copyright 2012, YOUR_COMPANY_NAME
#
# All rights reserved - Do Not Redistribute
#
include_recipe "travel::common"
include_recipe "tomcat"
directory node["travel"]["war_directory"] do
mode "0755"
end
tarball = ::File.join(node["travel"]["war_directory"], node["travel"]["war_name"])
webapp_dir = ::File.join(node["tomcat"]["webapp_dir"], "travel")
remote_file tarball do
#source "http://repository.cloudifysource.org/org/cloudifysource/2.0.0/travel-mongo-example.war"
source node["travel"]["war_url"]
checksum node["travel"]["war_checksum"]
mode "0644"
action :create
notifies :run, "execute[unzip -o #{tarball} -d #{webapp_dir}]", :immediately
end
execute "unzip -o #{tarball} -d #{webapp_dir}" do
notifies :restart, "service[tomcat]"
action :nothing
end
mysql_host = search(:node, 'run_list:recipe\[mysql\:\:server\]'').first.ipaddress
template ::File.join(webapp_dir, "WEB-INF", "classes", "jdbc.properties") do
mode "0644"
variables :mysql_host => mysql_host,
:mysql_password => node["travel"]["db_pass"],
:mysql_user => node["travel"]["db_user"]
notifies :restart, "service[tomcat]"
end
Fixed search command in travel default recipe
#
# Cookbook Name:: travel
# Recipe:: default
#
# Copyright 2012, YOUR_COMPANY_NAME
#
# All rights reserved - Do Not Redistribute
#
include_recipe "travel::common"
include_recipe "tomcat"
directory node["travel"]["war_directory"] do
mode "0755"
end
tarball = ::File.join(node["travel"]["war_directory"], node["travel"]["war_name"])
webapp_dir = ::File.join(node["tomcat"]["webapp_dir"], "travel")
remote_file tarball do
#source "http://repository.cloudifysource.org/org/cloudifysource/2.0.0/travel-mongo-example.war"
source node["travel"]["war_url"]
checksum node["travel"]["war_checksum"]
mode "0644"
action :create
notifies :run, "execute[unzip -o #{tarball} -d #{webapp_dir}]", :immediately
end
execute "unzip -o #{tarball} -d #{webapp_dir}" do
notifies :restart, "service[tomcat]"
action :nothing
end
mysql_host = search(:node, 'run_list:recipe\[mysql\:\:server\]').first.ipaddress
template ::File.join(webapp_dir, "WEB-INF", "classes", "jdbc.properties") do
mode "0644"
variables :mysql_host => mysql_host,
:mysql_password => node["travel"]["db_pass"],
:mysql_user => node["travel"]["db_user"]
notifies :restart, "service[tomcat]"
end
|
require 'rubygems'
require 'active_record'
require 'yaml'
require 'load_path'
LoadPath.configure do
add parent_directory('.', up: 1)
add path_builder { sibling_directory('lib') }
add path_builder { sibling_directory('lib').child_directory('models') }
add path_builder { sibling_directory('db') }
add path_builder { sibling_directory('config') }
end
require 'model'
require 'output_file'
require 'input_file'
require 'dictionary_table_migration'
require 'dictionary_uniq_const_migration'
require 'dictionary_view_migration'
require 'muti_io'
require 'output'
require 'joined'
# Загружаем файл настройки соединения с БД
dbconfig = YAML::load(File.open(File.join(File.dirname(__FILE__), 'database.yml')))
# Ошибки работы с БД направим в стандартный поток (консоль)
ActiveRecord::Base.logger = Logger.new(File.expand_path(File.join(File.dirname(__FILE__), "../log/db.log"))) # Simple logging utility. logger.rb -- standart lib
ActiveRecord::Base.logger.level = Logger::DEBUG
# Соединяемся с БД
ActiveRecord::Base.establish_connection(dbconfig)
Logger configuration changed
require 'rubygems'
require 'active_record'
require 'yaml'
require 'load_path'
LoadPath.configure do
add parent_directory('.', up: 1)
add path_builder { sibling_directory('lib') }
add path_builder { sibling_directory('lib').child_directory('models') }
add path_builder { sibling_directory('db') }
add path_builder { sibling_directory('config') }
end
require 'model'
require 'output_file'
require 'input_file'
require 'dictionary_table_migration'
require 'dictionary_uniq_const_migration'
require 'dictionary_view_migration'
require 'muti_io'
require 'output'
require 'joined'
# Загружаем файл настройки соединения с БД
dbconfig = YAML::load(File.open(File.join(File.dirname(__FILE__), 'database.yml')))
# Ошибки работы с БД направим в стандартный поток (консоль)
ActiveRecord::Base.logger = Logger.new(File.expand_path(File.join(File.dirname(__FILE__), "../log/db.log"))) # Simple logging utility. logger.rb -- standart lib
ActiveRecord::Base.logger.level = Logger::INFO
# Соединяемся с БД
ActiveRecord::Base.establish_connection(dbconfig)
|
FactoryGirl.define do
factory :score do
roast_level 1
aroma 8
aftertaste 7.25
acidity 9
body 6.5
flavor 7.75
uniformity 8
balance 9.5
clean_cup 6.5
sweetness 8.25
overall 9
defects 1
total_score 71
final_score 85.50
notes "tea-like"
association :grader, factory: :user
cupping
cupped_coffee
end
def score_range(min, max)
(min..max).step(0.25).to_a.sample
end
end
Update scores factory
FactoryGirl.define do
factory :score do
roast_level { score_range(1, 4, 1) }
aroma { score_range(6, 10) }
acidity { score_range(6, 10) }
body { score_range(6, 10) }
flavor { score_range(6, 10) }
sweetness { score_range(6, 10) }
clean_cup { score_range(6, 10) }
balance { score_range(6, 10) }
uniformity { score_range(6, 10) }
aftertaste { score_range(6, 10) }
overall { score_range(6, 10) }
defects { score_range(0, 10, 2) }
total_score do
[aroma, acidity, body, flavor, sweetness, clean_cup,
balance, uniformity, aftertaste, overall].inject(:+)
end
final_score { total_score - defects }
notes Faker::Coffee.notes
association :grader, factory: :user
cupping
cupped_coffee
end
end
def score_range(min, max, step = 0.25)
(min..max).step(step).to_a.sample
end
|
# Be sure to restart your server when you modify this file
# Uncomment below to force Rails into production mode when
# you don't control web/app server and can't set it the proper way
# ENV['RAILS_ENV'] ||= 'production'
# Specifies gem version of Rails to use when vendor/rails is not present
RAILS_GEM_VERSION = '2.1.1' unless defined? RAILS_GEM_VERSION
# Bootstrap the Rails environment, frameworks, and default configuration
require File.join(File.dirname(__FILE__), 'boot')
Rails::Initializer.run do |config|
# Settings in config/environments/* take precedence over those specified here.
# Application configuration should go into files in config/initializers
# -- all .rb files in that directory are automatically loaded.
# See Rails::Configuration for more options.
# Skip frameworks you're not going to use. To use Rails without a database
# you must remove the Active Record framework.
# config.frameworks -= [ :active_record, :active_resource, :action_mailer ]
# Specify gems that this application depends on.
# They can then be installed with "rake gems:install" on new installations.
# config.gem "bj"
# config.gem "hpricot", :version => '0.6', :source => "http://code.whytheluckystiff.net"
# config.gem "aws-s3", :lib => "aws/s3"
config.gem "gravtastic"
# config.gem "BlueCloth"
config.gem "paperclip"
# Only load the plugins named here, in the order given. By default, all plugins
# in vendor/plugins are loaded in alphabetical order.
# :all can be used as a placeholder for all plugins not explicitly named
# config.plugins = [ :exception_notification, :ssl_requirement, :all ]
# Add additional load paths for your own custom dirs
# config.load_paths += %W( #{RAILS_ROOT}/extras )
# Force all environments to use the same logger level
# (by default production uses :info, the others :debug)
# config.log_level = :debug
# Make Time.zone default to the specified zone, and make Active Record store time values
# in the database in UTC, and return them converted to the specified local zone.
# Run "rake -D time" for a list of tasks for finding time zone names. Comment line to use default local time.
config.time_zone = 'UTC'
# Your secret key for verifying cookie session data integrity.
# If you change this key, all old sessions will become invalid!
# Make sure the secret is at least 30 characters and all random,
# no regular words or you'll be exposed to dictionary attacks.
config.action_controller.session = {
:session_key => '_echowaves_session',
:secret => '66ff24ef9207ebaa90231b6b28bfac105da30650f733823e77de8afffa0297d7933eff5d83fa941bb12fb7cd78a46cf3636d5f855b7a39c1c0033f6156290a27'
}
# Use the database for sessions instead of the cookie-based default,
# which shouldn't be used to store highly confidential information
# (create the session table with "rake db:sessions:create")
config.action_controller.session_store = :active_record_store
# Use SQL instead of Active Record's schema dumper when creating the test database.
# This is necessary if your schema can't be completely dumped by the schema dumper,
# like if you have constraints or database-specific column types
# config.active_record.schema_format = :sql
# Activate observers that should always be running
# config.active_record.observers = :cacher, :garbage_collector
# Second, add the :user_observer
# Rails::Initializer.run do |config|
# The user observer goes inside the Rails::Initializer block
# !!!!!!!!!!!!!the following line must be uncommented in production for users to be able to register
# config.active_record.observers = :user_observer
end
# active mailer configuration
# First, specify the Host that we will be using later for user_notifier.rb
HOST = 'http://localhost:3000'
# Third, add your SMTP settings
ActionMailer::Base.delivery_method = :smtp
ActionMailer::Base.smtp_settings = {
:address => "mail.echowaves.com",
:port => 25,
:domain => "localhost:3000",
:user_name => "carmelyne@echowaves.com",
:password => "yourrailsapp",
:authentication => :login
}
ORBITED_HOST = 'localhost'
ORBITED_PORT = '8500'
ORBITED_DOMAIN = 'localhost'
STOMP_HOST = 'localhost'
STOMP_PORT = '61613'
REFRESH_FREQUINCY = 3600
more deployment config
# Be sure to restart your server when you modify this file
# Uncomment below to force Rails into production mode when
# you don't control web/app server and can't set it the proper way
# ENV['RAILS_ENV'] ||= 'production'
# Specifies gem version of Rails to use when vendor/rails is not present
RAILS_GEM_VERSION = '2.1.1' unless defined? RAILS_GEM_VERSION
# Bootstrap the Rails environment, frameworks, and default configuration
require File.join(File.dirname(__FILE__), 'boot')
Rails::Initializer.run do |config|
# Settings in config/environments/* take precedence over those specified here.
# Application configuration should go into files in config/initializers
# -- all .rb files in that directory are automatically loaded.
# See Rails::Configuration for more options.
# Skip frameworks you're not going to use. To use Rails without a database
# you must remove the Active Record framework.
# config.frameworks -= [ :active_record, :active_resource, :action_mailer ]
# Specify gems that this application depends on.
# They can then be installed with "rake gems:install" on new installations.
# config.gem "bj"
# config.gem "hpricot", :version => '0.6', :source => "http://code.whytheluckystiff.net"
# config.gem "aws-s3", :lib => "aws/s3"
config.gem "gravtastic"
# config.gem "BlueCloth"
config.gem "paperclip"
# Only load the plugins named here, in the order given. By default, all plugins
# in vendor/plugins are loaded in alphabetical order.
# :all can be used as a placeholder for all plugins not explicitly named
# config.plugins = [ :exception_notification, :ssl_requirement, :all ]
# Add additional load paths for your own custom dirs
# config.load_paths += %W( #{RAILS_ROOT}/extras )
# Force all environments to use the same logger level
# (by default production uses :info, the others :debug)
# config.log_level = :debug
# Make Time.zone default to the specified zone, and make Active Record store time values
# in the database in UTC, and return them converted to the specified local zone.
# Run "rake -D time" for a list of tasks for finding time zone names. Comment line to use default local time.
config.time_zone = 'UTC'
# Your secret key for verifying cookie session data integrity.
# If you change this key, all old sessions will become invalid!
# Make sure the secret is at least 30 characters and all random,
# no regular words or you'll be exposed to dictionary attacks.
config.action_controller.session = {
:session_key => '_echowaves_session',
:secret => '66ff24ef9207ebaa90231b6b28bfac105da30650f733823e77de8afffa0297d7933eff5d83fa941bb12fb7cd78a46cf3636d5f855b7a39c1c0033f6156290a27'
}
# Use the database for sessions instead of the cookie-based default,
# which shouldn't be used to store highly confidential information
# (create the session table with "rake db:sessions:create")
config.action_controller.session_store = :active_record_store
# Use SQL instead of Active Record's schema dumper when creating the test database.
# This is necessary if your schema can't be completely dumped by the schema dumper,
# like if you have constraints or database-specific column types
# config.active_record.schema_format = :sql
# Activate observers that should always be running
# config.active_record.observers = :cacher, :garbage_collector
# Second, add the :user_observer
# Rails::Initializer.run do |config|
# The user observer goes inside the Rails::Initializer block
# !!!!!!!!!!!!!the following line must be uncommented in production for users to be able to register
# config.active_record.observers = :user_observer
end
# active mailer configuration
# First, specify the Host that we will be using later for user_notifier.rb
HOST = 'http://localhost:3000'
# Third, add your SMTP settings
ActionMailer::Base.delivery_method = :smtp
ActionMailer::Base.smtp_settings = {
:address => "mail.echowaves.com",
:port => 25,
:domain => "localhost:3000",
:user_name => "carmelyne@echowaves.com",
:password => "yourrailsapp",
:authentication => :login
}
ORBITED_HOST = 'localhost'
ORBITED_PORT = '8500'
ORBITED_DOMAIN = 'localhost'
STOMP_HOST = 'localhost'
STOMP_PORT = '61613'
REFRESH_FREQUINCY = 60
|
I accidentally my initial specs :( Recreating.
require 'spec_helper'
class TestObject
def defined_method
raise "Y U NO MOCK?"
end
end
describe '#fire_double' do
def self.should_allow(method_name)
it "should allow #{method_name}" do
object = fire_double("UnloadedObject")
lambda {
object.send(method_under_test, method_name)
}.should_not raise_error
object.rspec_reset
end
end
def self.should_not_allow(method_name)
it "should not allow #{method_name}" do
object = fire_double("TestObject")
lambda {
object.send(method_under_test, method_name)
}.should fail_matching("#{method_name} does not implement", method_name)
end
end
shared_examples_for 'a fire-enhanced double' do
describe 'doubled class is not loaded' do
should_allow(:undefined_method)
end
describe 'doubled class is loaded' do
should_allow(:defined_method)
should_not_allow(:undefined_method)
end
end
describe '#should_receive' do
let(:method_under_test) { :should_receive }
it_should_behave_like 'a fire-enhanced double'
end
describe '#should_not_receive' do
let(:method_under_test) { :should_not_receive }
it_should_behave_like 'a fire-enhanced double'
end
describe '#stub' do
let(:method_under_test) { :stub }
it_should_behave_like 'a fire-enhanced double'
end
end
|
require 'active_support/all'
require 'mumukit/bridge'
describe 'Server' do
let(:bridge) { Mumukit::Bridge::Runner.new('http://localhost:4568') }
before(:all) do
@pid = Process.spawn 'rackup -p 4568', err: '/dev/null'
sleep 8
end
after(:all) { Process.kill 'TERM', @pid }
let(:test) {
%q{
examples:
- initial_board: |
GBB/1.0
size 3 3
head 0 0
final_board: |
GBB/1.0
size 3 3
head 0 1
- initial_board: |
GBB/1.0
size 2 2
head 0 0
final_board: |
GBB/1.0
size 2 2
head 0 1
}}
it 'answers a valid hash when submission passes' do
response = bridge.run_tests!(test: test, extra: '', content: %q{
program {
Mover(Norte)
}}, expectations: [])
expect(response[:status]).to eq :passed
expect(response[:test_results].size).to eq 2
expect(response[:response_type]).to eq :structured
end
it 'answers a valid hash when submission passes and boards do not have a GBB spec' do
response = bridge.run_tests!(test: %q{
examples:
- initial_board: |
size 3 3
head 0 0
final_board: |
size 3 3
head 0 1}, extra: '', content: %q{
program {
Mover(Norte)
}}, expectations: [])
expect(response[:status]).to eq :passed
expect(response[:response_type]).to eq :structured
end
it 'answers a valid hash when submission passes, with expectations' do
response = bridge.run_tests!(
content: '
procedure PonerUnaDeCada() {
Poner (Rojo)
Poner (Azul)
Poner (Negro)
Poner (Verde)
}',
extra: '',
expectations: [
{binding: 'program', inspection: 'HasUsage:PonerUnaDecada'},
{binding: 'program', inspection: 'Not:HasBinding'},
],
test: '
check_head_position: true
subject: PonerUnaDeCada
examples:
- initial_board: |
GBB/1.0
size 4 4
head 0 0
final_board: |
GBB/1.0
size 4 4
cell 0 0 Azul 1 Rojo 1 Verde 1 Negro 1
head 0 0
- initial_board: |
GBB/1.0
size 5 5
head 3 3
final_board: |
GBB/1.0
size 5 5
cell 3 3 Azul 1 Rojo 1 Verde 1 Negro 1
head 3 3')
expect(response.except(:test_results)).to eq response_type: :structured,
status: :passed_with_warnings,
feedback: '',
expectation_results: [
{binding: "program", inspection: "Uses:=PonerUnaDecada", result: :failed},
{binding: "*", inspection: "Not:Declares:=program", result: :passed},
{binding: "*", inspection: "Declares:=PonerUnaDeCada", result: :passed}
],
result: ''
expect(response[:test_results].size).to eq 2
end
it 'answers a valid hash when submission is aborted and expected' do
response = bridge.run_tests!(
content: '
procedure HastaElInfinito() {
while (puedeMover(Este)) {
Poner(Rojo)
}
}',
extra: '',
expectations: [
{binding: "program", inspection: "Not:HasBinding"},
],
test: '
subject: HastaElInfinito
expect_endless_while: true
examples:
- initial_board: |
GBB/1.0
size 2 2
head 0 0
final_board: |
GBB/1.0
size 2 2
head 0 0')
expect(response.except(:test_results)).to eq response_type: :structured,
status: :passed,
feedback: '',
expectation_results: [
{binding: "*", inspection: "Not:Declares:=program", result: :passed},
{binding: "*", inspection: "Declares:=HastaElInfinito", result: :passed}
],
result: ''
end
it 'answers a valid hash when the expected boom type is wrong_arguments_type' do
response = bridge.run_tests!(
content: "program {\nDibujarLinea3(Este, Verde)\nMover(Este)\nDibujarLinea3(Norte, Rojo)\nMover(Norte)\nDibujarLinea3(Oeste, Negro)\nMover(Oeste)\nDibujarLinea3(Sur, Azul)\n}",
extra: "procedure DibujarLinea3(color, direccion) {\n Poner(color)\n Mover(direccion)\n Poner(color)\n Mover(direccion)\n Poner(color)\n }",
expectations: [],
test: "check_head_position: true\n\nexamples:\n - title: '¡BOOM!'\n initial_board: |\n GBB/1.0\n size 3 3\n head 0 0\n error: wrong_argument_type")
expect(response[:status]).to eq :passed
expect(response[:response_type]).to eq :structured
end
it 'answers a valid hash when the expected boom type is unassigned_variable and the initial board is not defined' do
response = bridge.run_tests!(
content: "function boomBoomKid() {\n return (unaVariableQueNoExiste)\n}",
test: "subject: boomBoomKid\n\nshow_initial_board: false\n\nexamples:\n - error: unassigned_variable",
expectations: [],
extra: ""
)
expect(response[:status]).to eq :passed
expect(response[:response_type]).to eq :structured
end
it 'answers a valid hash when the return checker has to compare a return value of True' do
response = bridge.run_tests!(
content: "function esLibreCostados(){\nreturn(puedeMover(Este)&&puedeMover(Oeste))\n\n}",
test: "subject: esLibreCostados\n\nexamples:\n - initial_board: |\n GBB/1.0\n size 3 2\n head 0 0\n return: 'False'\n \n - initial_board: |\n GBB/1.0\n size 3 2\n head 1 0\n return: 'True'",
expectations: [
{
binding: "esLibreCostados",
inspection: "HasUsage:puedeMover"
}
],
extra: "",
)
expect(response[:status]).to eq :passed
expect(response[:response_type]).to eq :structured
end
it 'answers a valid hash when the return checker has to compare a numeric value and it is defined in the test as string' do
response = bridge.run_tests!(
content: "function numero(){\nvalor:=cifra()\nMover(Este)\nvalor:=(valor*100)+(cifra()*10)\nMover(Este)\nvalor:=(valor+cifra())\nreturn(valor)\n}",
test: "subject: numero\n\nexamples:\n - initial_board: |\n GBB/1.0\n size 3 1\n cell 0 0 Rojo 1\n cell 1 0 Rojo 3\n cell 2 0 Rojo 2\n head 0 0\n return: '132'\n\n - initial_board: |\n GBB/1.0\n size 3 1\n cell 0 0 Rojo 6\n cell 1 0 Rojo 7\n cell 2 0 Rojo 8\n head 0 0\n return: '678'",
expectations: [
{
binding: "numero",
inspection: "HasDeclaration"
},
{
binding: "numero",
inspection: "HasUsage:cifra"
}
],
extra: "function cifra() {\n return (nroBolitas(Rojo))\n}"
)
expect(response[:status]).to eq :passed
expect(response[:response_type]).to eq :structured
end
it 'answers a valid hash when the error checker is waiting for a wrong_arguments_quantity error' do
response = bridge.run_tests!(
content: "program{ \nDibujarLinea3(Verde)\n}",
test: %q{
check_head_position: true
examples:
- initial_board: |
GBB/1.0
size 3 3
head 0 0
error: wrong_arguments_quantity
},
expectations: [ ],
extra: "procedure DibujarLinea3(color, direccion) {\n Poner(color)\n Mover(direccion)\n Poner(color)\n Mover(direccion)\n Poner(color)\n}"
)
expect(response[:status]).to eq :passed
expect(response[:response_type]).to eq :structured
end
it 'answers a well formed error when the content has no program definition' do
response = bridge.run_tests!(
content: "",
test: %q{
check_head_position: true
examples:
- initial_board: |
GBB/1.0
size 3 3
head 0 0
error: wrong_arguments_quantity
},
expectations: [ ],
extra: ""
)
expect(response[:status]).to eq :errored
expect(response[:response_type]).to eq :unstructured
expect(response[:result]).to eq "<pre>[0:0]: No program definition was found</pre>"
end
# See https://github.com/mumuki/mulang/issues/144. Caused by not excluding the proper smells
it 'checks an inspection over a function correctly' do
response = bridge.run_tests!(
{
content: "function rojoEsDominante(){\nreturn (nroBolitas(Rojo)\u003enroBolitasTotal()-nroBolitas(Rojo))\n}",
test: "subject: rojoEsDominante\n\nexamples:\n - initial_board: |\n GBB/1.0\n size 2 2\n cell 0 0 Azul 3 Negro 2 Rojo 4 Verde 3\n head 0 0\n return: 'False'\n \n - initial_board: |\n GBB/1.0\n size 2 2\n cell 0 0 Azul 3 Negro 2 Rojo 10 Verde 3\n head 0 0\n return: 'True'",
expectations: [
{
binding: "rojoEsDominante",
inspection: "HasUsage:todasMenos"
}
],
extra: "function nroBolitasTotal() {\n return (nroBolitas(Azul) + nroBolitas(Negro) + nroBolitas(Rojo) + nroBolitas(Verde))\n}\n\nfunction todasMenos(color) {\n return (nroBolitasTotal() - nroBolitas(color))\n}"
}
)
expect(response[:status]).to eq :passed_with_warnings
expect(response[:response_type]).to eq :structured
end
it 'answers a valid hash when locale is pt, with directions' do
response = bridge.run_tests!({
content: "program {\n Mover(Sul); Mover(Leste) \n}",
test: "
check_head_position: true
examples:
- initial_board: |
GBB/1.0
size 3 3
head 0 2
final_board: |
GBB/1.0
size 3 3
head 1 1",
expectations: [ ],
locale: "pt",
extra: "",
})
expect(response[:status]).to eq :passed
expect(response[:response_type]).to eq :structured
end
it 'answers a valid hash when locale is pt and submission is wrong, with directions' do
response = bridge.run_tests!({
content: "program {\n Mover(Sul); Mover(Leste) \n}",
test: "
check_head_position: true
examples:
- initial_board: |
GBB/1.0
size 3 3
head 0 2
final_board: |
GBB/1.0
size 3 3
head 0 0",
expectations: [ ],
locale: "pt",
extra: "",
})
expect(response[:status]).to eq :failed
expect(response[:response_type]).to eq :structured
end
it 'answers a valid hash when locale is pt, with colors' do
response = bridge.run_tests!(
{
content: "program {\n Colocar(Vermelho) \n}",
test: "
examples:
- initial_board: |
GBB/1.0
size 3 3
head 0 0
final_board: |
GBB/1.0
size 3 3
cell 0 0 Rojo 1
head 0 0",
expectations: [ ],
locale: "pt",
extra: "",
}
)
expect(response[:status]).to eq :passed
expect(response[:response_type]).to eq :structured
end
it 'fails when locale is pt and the content of the submission is wrong' do
response = bridge.run_tests!(
{
content: "program {\n Colocar(Preto) \n}",
test: "
examples:
- initial_board: |
GBB/1.0
size 3 3
head 0 0
final_board: |
GBB/1.0
size 3 3
cell 0 0 Rojo 1
head 0 0",
expectations: [
],
locale: "pt",
extra: "",
}
)
expect(response[:status]).to eq :failed
expect(response[:response_type]).to eq :structured
end
it 'responds a properly structured response when there are unexpected booms and no expected final boards' do
response = bridge.run_tests!(
{
content: "
function hayBolitasLejosAl(direccion, color, distancia) {
MoverN(distancia, direcion)
return (True)
}
",
test: "
subject: hayBolitasLejosAl
examples:
- arguments:
- Norte
- Rojo
- 2
initial_board: |
GBB/1.0
size 3 3
cell 0 2 Rojo 1
head 0 0
return: 'True'",
expectations: [
],
extra: "procedure MoverN(n, direccion) { repeat (n) { Mover(direccion) } }"
}
)
expect(response[:response_type]).to eq :structured
expect(response[:status]).to eq :failed
end
it 'can accept Blockly XML as content' do
response = bridge.run_tests!(
content: '<xml xmlns="http://www.w3.org/1999/xhtml"><variables></variables><block type="Program" id="xB~]3G#lp3SsK`Ys{VS^" deletable="false" x="30" y="30"><mutation timestamp="1523891789396"></mutation><statement name="program"><block type="Asignacion" id="FW1Q]83JP$a0!!$wYxyd"><field name="varName">unColor</field><value name="varValue"><block type="ColorSelector" id="l4c.8v[N.mvxPf$Zx^VW"><field name="ColorDropdown">Negro</field></block></value><next><block type="Poner" id="C1cG`0n#kyzHT5WF88~L"><value name="COLOR"><block type="variables_get" id="jv[rAEP5uKPbN{RN[.I|"><mutation var="unColor"></mutation><field name="VAR">unColor</field></block></value><next><block type="Mover" id="RqKR#pt]B~yQuOg4(u$p"><value name="DIRECCION"><block type="DireccionSelector" id="t?xv9#gqOXx$iKiVH]S;"><field name="DireccionDropdown">Norte</field></block></value></block></next></block></next></block></statement></block></xml>',
extra: '',
expectations: [
{binding: 'program', inspection: 'HasUsage:unColor'},
{binding: 'program', inspection: 'Not:HasUsage:otraCosa'}
],
test: '
check_head_position: true
examples:
- initial_board: |
GBB/1.0
size 4 4
head 0 0
final_board: |
GBB/1.0
size 4 4
cell 0 0 Azul 0 Rojo 0 Verde 0 Negro 1
head 0 1
')
expect(response.except(:test_results)).to eq response_type: :structured,
status: :passed,
feedback: '',
expectation_results: [
{binding: "program", inspection: "Uses:=unColor", result: :passed},
{binding: "program", inspection: "Not:Uses:=otraCosa", result: :passed}
],
result: ''
end
it 'can accept Blockly XML as extra AND content, using primitive actions' do
response = bridge.run_tests!(
content: '<xml xmlns="http://www.w3.org/1999/xhtml"><variables></variables><block type="Program" id="PuD+$,0HT^k)5IUPdU!?" deletable="false" x="150" y="-160"><mutation timestamp="1523892212925"></mutation><statement name="program"><block type="RepeticionSimple" id="YeG}Q75;8ra*Wp3:EU2q"><value name="count"><block type="dobleDe_" id="3/`k,b:TE+S9Y9qKDX~p"><value name="arg1"><block type="math_number" id="[op/fExN+uq4:U]0NqoH"><field name="NUM">3</field></block></value></block></value><statement name="block"><block type="PonerTres_" id="MGSh1,-~Pi}EJ7{pZ;mX"><value name="arg1"><block type="ColorSelector" id="veM!Uyw2$}S=hJtas!Cs"><field name="ColorDropdown">Azul</field></block></value></block></statement></block></statement></block></xml>',
extra: '<xml xmlns="http://www.w3.org/1999/xhtml"><variables></variables><block type="procedures_defnoreturn" id="h#T:7OUn=gaPXqUplXe]" x="80" y="-207"><mutation><arg name="color"></arg></mutation><field name="NAME">PonerTres_</field><field name="ARG0">color</field><statement name="STACK"><block type="Poner" id="YcB^xSAiU-+sa4[z8wKO"><value name="COLOR"><block type="variables_get" id="rsm([Dp*s5Hi+*bWE}*i"><mutation var="color" parent="h#T:7OUn=gaPXqUplXe]"></mutation></block></value><next><block type="Poner" id="jGqg=|!OChe?VZ0_dsUd"><value name="COLOR"><block type="variables_get" id="RawKnPa#tN{0vEvMFmY_"><mutation var="color" parent="h#T:7OUn=gaPXqUplXe]"></mutation></block></value><next><block type="Poner" id="BnYamcc*iwjGM,-yoa}n"><value name="COLOR"><block type="variables_get" id="z#LO[/(O-spS=WQVeh)*"><mutation var="color" parent="h#T:7OUn=gaPXqUplXe]"></mutation></block></value></block></next></block></next></block></statement></block><block type="procedures_defreturnsimplewithparams" id=")(t47XNXCjl]b^(zV:4;" x="80" y="-8"><mutation statements="false"><arg name="número"></arg></mutation><field name="NAME">dobleDe_</field><field name="ARG0">número</field><value name="RETURN"><block type="OperadorNumerico" id=":}n%C$e}/%y;JdE1]`D("><field name="OPERATOR">*</field><value name="arg1"><block type="variables_get" id="h6)YVE)7%.KldhC)wE$~"><mutation var="número" parent=")(t47XNXCjl]b^(zV:4;"></mutation></block></value><value name="arg2"><block type="math_number" id="EB$._vZ}Qk+wIYcE:V74"><field name="NUM">2</field></block></value></block></value></block></xml>',
expectations: [
{binding: 'program', inspection: 'Uses:dobleDe_'},
{binding: 'program', inspection: 'Uses:PonerTres_'}
],
test: '
check_head_position: true
examples:
- initial_board: |
GBB/1.0
size 4 4
head 0 0
final_board: |
GBB/1.0
size 4 4
cell 0 0 Azul 18 Rojo 0 Verde 0 Negro 0
head 0 0
')
expect(response.except(:test_results)).to eq response_type: :structured,
status: :passed,
feedback: '',
expectation_results: [
{binding: "program", inspection: "Uses:dobleDe_", result: :passed},
{binding: "program", inspection: "Uses:PonerTres_", result: :passed}
],
result: ''
end
context 'Blockly XML with finer expectations' do
let(:expectations) { [
{binding: '*', inspection: 'Uses:RecolectarPolen'},
{binding: '*', inspection: 'Declares:RecolectarPolen'},
{binding: 'RecolectarPolen', inspection: 'UsesRepeat'}
] }
def run_expectations!(content)
extra = '<xml xmlns="http://www.w3.org/1999/xhtml"><variables><variable type="" id="#tkEaZ|1O/iYzpk$jb*F">direccion</variable></variables><block type="procedures_defnoreturn" id="ta2r.#tE.Z3=cqo_~(GS" x="42" y="-63"><mutation><arg name="direccion"></arg></mutation><field name="NAME">Volar Al_</field><field name="ARG0">direccion</field><statement name="STACK"><block type="Sacar" id="5*l*j7d[{}-sxylSK?yc"><value name="COLOR"><block type="ColorSelector" id="k]:7DyS5cSE%dt-#;N~8"><field name="ColorDropdown">Negro</field></block></value><next><block type="Mover" id="Ld#c~6FG8LB)y94=iHi("><value name="DIRECCION"><block type="variables_get" id="ea,RDtzQN,XRBgfNU5AU"><mutation var="direccion" parent="ta2r.#tE.Z3=cqo_~(GS"></mutation></block></value><next><block type="Poner" id="+HbxwbY6f-b.ZL/fNm%j"><value name="COLOR"><block type="ColorSelector" id="ZjyHOJAVte5Tlt:L`%oO"><field name="ColorDropdown">Negro</field></block></value></block></next></block></next></block></statement></block></xml>'
bridge.run_tests!(
content: content,
extra: extra,
expectations: expectations,
test: '
check_head_position: true
examples:
- initial_board: |
GBB/1.0
size 4 4
head 0 0
final_board: |
GBB/1.0
size 4 4
cell 0 0 Azul 18 Rojo 0 Verde 0 Negro 0
head 0 0
')[:expectation_results]
end
it 'works when all pass' do
# Equivalent to program { VolarAl_(Este) ; RecolectarPolen() } procedure RecolectarPolen() { repeat(5) { Sacar(Verde) } }
content = '<xml xmlns="http://www.w3.org/1999/xhtml"><variables><variable type="" id="#tkEaZ|1O/iYzpk$jb*F">direccion</variable></variables><block type="Program" id="Rp8VMHPa/]EZ}FvH/Pi|" deletable="false" x="42" y="-98"><mutation timestamp="1528058851731"></mutation><statement name="program"><block type="VolarAl_" id="`I4gt4t#Rj|OB3QwE2HP"><value name="arg1"><block type="DireccionSelector" id="7A8Ipt/$RClcH5KNEZ)^"><field name="DireccionDropdown">Este</field></block></value><next><block type="procedures_callnoreturnnoparams" id="31;^FQ_kr`XO|9D9`y|1"><mutation name="Recolectar Polen"></mutation></block></next></block></statement></block><block type="procedures_defnoreturnnoparams" id="9)jdRaCNSwCopGPcG^6w" x="40" y="112"><field name="NAME">Recolectar Polen</field><statement name="STACK"><block type="RepeticionSimple" id=";q2M8~vOaO_Qo~#Qxz#Z"><value name="count"><block type="math_number" id="6o]|L|{7Xh|#+p2VnYn("><field name="NUM">5</field></block></value><statement name="block"><block type="Sacar" id="8{aYL%2e+he~ztS%MlZ$"><value name="COLOR"><block type="ColorSelector" id="(VW][LR1vg)z*2!r,{kG"><field name="ColorDropdown">Verde</field></block></value></block></statement></block></statement></block></xml>'
expect(run_expectations! content).to eq [{binding: '*', inspection: "Uses:RecolectarPolen", result: :passed},
{binding: '*', inspection: "Declares:RecolectarPolen", result: :passed},
{binding: 'RecolectarPolen', inspection: "UsesRepeat", result: :passed}]
end
it 'works when all expectations fail' do
# Equivalent to program { VolarAl_(Este) ; repeat(5) { Sacar(Verde) } }
content = '<xml xmlns="http://www.w3.org/1999/xhtml"><variables><variable type="" id="#tkEaZ|1O/iYzpk$jb*F">direccion</variable></variables><block type="Program" id="Rp8VMHPa/]EZ}FvH/Pi|" deletable="false" x="42" y="-98"><mutation timestamp="1528054201699"></mutation><statement name="program"><block type="VolarAl_" id="`I4gt4t#Rj|OB3QwE2HP"><value name="arg1"><block type="DireccionSelector" id="7A8Ipt/$RClcH5KNEZ)^"><field name="DireccionDropdown">Este</field></block></value><next><block type="RepeticionSimple" id=";q2M8~vOaO_Qo~#Qxz#Z"><value name="count"><block type="math_number" id="6o]|L|{7Xh|#+p2VnYn("><field name="NUM">5</field></block></value><statement name="block"><block type="Sacar" id="8{aYL%2e+he~ztS%MlZ$"><value name="COLOR"><block type="ColorSelector" id="(VW][LR1vg)z*2!r,{kG"><field name="ColorDropdown">Verde</field></block></value></block></statement></block></next></block></statement></block></xml>'
expect(run_expectations! content).to eq [{binding: '*', inspection: "Uses:RecolectarPolen", result: :failed},
{binding: '*', inspection: "Declares:RecolectarPolen", result: :failed},
{binding: 'RecolectarPolen', inspection: "UsesRepeat", result: :failed}]
end
it 'works when some fail with empty blocks' do
# Equivalent to program { VolarAl_(Este) ; repeat(5) { Sacar(Verde) } } procedure HacerAlgo() { }
content = '<xml xmlns="http://www.w3.org/1999/xhtml"><variables><variable type="" id="#tkEaZ|1O/iYzpk$jb*F">direccion</variable></variables><block type="Program" id="Rp8VMHPa/]EZ}FvH/Pi|" deletable="false" x="42" y="-98"><mutation timestamp="1528058068163"></mutation><statement name="program"><block type="VolarAl_" id="`I4gt4t#Rj|OB3QwE2HP"><value name="arg1"><block type="DireccionSelector" id="7A8Ipt/$RClcH5KNEZ)^"><field name="DireccionDropdown">Este</field></block></value><next><block type="RepeticionSimple" id=";q2M8~vOaO_Qo~#Qxz#Z"><value name="count"><block type="math_number" id="6o]|L|{7Xh|#+p2VnYn("><field name="NUM">5</field></block></value><statement name="block"><block type="Sacar" id="8{aYL%2e+he~ztS%MlZ$"><value name="COLOR"><block type="ColorSelector" id="(VW][LR1vg)z*2!r,{kG"><field name="ColorDropdown">Verde</field></block></value></block></statement></block></next></block></statement></block><block type="procedures_defnoreturnnoparams" id="9)jdRaCNSwCopGPcG^6w" x="40" y="112"><field name="NAME">Hacer algo</field></block></xml>'
expect(run_expectations! content).to eq [{binding: '*', inspection: "Uses:RecolectarPolen", result: :failed},
{binding: '*', inspection: "Declares:RecolectarPolen", result: :failed},
{binding: 'RecolectarPolen', inspection: "UsesRepeat", result: :failed}]
end
it 'works when some fail because of similar names' do
# Equivalent to program { VolarAl_(Este) ; RecolectarPolenta() } procedure RecolectarPolenta() { repeat(5) { Sacar(Verde) } }
content = '<xml xmlns="http://www.w3.org/1999/xhtml"><variables><variable type="" id="#tkEaZ|1O/iYzpk$jb*F">direccion</variable></variables><block type="Program" id="Rp8VMHPa/]EZ}FvH/Pi|" deletable="false" x="42" y="-98"><mutation timestamp="1528058279725"></mutation><statement name="program"><block type="VolarAl_" id="`I4gt4t#Rj|OB3QwE2HP"><value name="arg1"><block type="DireccionSelector" id="7A8Ipt/$RClcH5KNEZ)^"><field name="DireccionDropdown">Este</field></block></value><next><block type="procedures_callnoreturnnoparams" id="31;^FQ_kr`XO|9D9`y|1"><mutation name="Recolectar Polenta"></mutation></block></next></block></statement></block><block type="procedures_defnoreturnnoparams" id="9)jdRaCNSwCopGPcG^6w" x="40" y="112"><field name="NAME">Recolectar Polenta</field><statement name="STACK"><block type="RepeticionSimple" id=";q2M8~vOaO_Qo~#Qxz#Z"><value name="count"><block type="math_number" id="6o]|L|{7Xh|#+p2VnYn("><field name="NUM">5</field></block></value><statement name="block"><block type="Sacar" id="8{aYL%2e+he~ztS%MlZ$"><value name="COLOR"><block type="ColorSelector" id="(VW][LR1vg)z*2!r,{kG"><field name="ColorDropdown">Verde</field></block></value></block></statement></block></statement></block></xml>'
expect(run_expectations! content).to eq [{binding: '*', inspection: "Uses:RecolectarPolen", result: :failed},
{binding: '*', inspection: "Declares:RecolectarPolen", result: :failed},
{binding: 'RecolectarPolen', inspection: "UsesRepeat", result: :failed}]
end
end
end
Adding tests
require 'active_support/all'
require 'mumukit/bridge'
describe 'Server' do
let(:bridge) { Mumukit::Bridge::Runner.new('http://localhost:4568') }
before(:all) do
@pid = Process.spawn 'rackup -p 4568', err: '/dev/null'
sleep 8
end
after(:all) { Process.kill 'TERM', @pid }
let(:test) {
%q{
examples:
- initial_board: |
GBB/1.0
size 3 3
head 0 0
final_board: |
GBB/1.0
size 3 3
head 0 1
- initial_board: |
GBB/1.0
size 2 2
head 0 0
final_board: |
GBB/1.0
size 2 2
head 0 1
}}
it 'answers a valid hash when submission passes' do
response = bridge.run_tests!(test: test, extra: '', content: %q{
program {
Mover(Norte)
}}, expectations: [])
expect(response[:status]).to eq :passed
expect(response[:test_results].size).to eq 2
expect(response[:response_type]).to eq :structured
end
it 'answers a valid hash when submission passes and boards do not have a GBB spec' do
response = bridge.run_tests!(test: %q{
examples:
- initial_board: |
size 3 3
head 0 0
final_board: |
size 3 3
head 0 1}, extra: '', content: %q{
program {
Mover(Norte)
}}, expectations: [])
expect(response[:status]).to eq :passed
expect(response[:response_type]).to eq :structured
end
it 'answers a valid hash when submission passes, with expectations' do
response = bridge.run_tests!(
content: '
procedure PonerUnaDeCada() {
Poner (Rojo)
Poner (Azul)
Poner (Negro)
Poner (Verde)
}',
extra: '',
expectations: [
{binding: 'program', inspection: 'HasUsage:PonerUnaDecada'},
{binding: 'program', inspection: 'Not:HasBinding'},
],
test: '
check_head_position: true
subject: PonerUnaDeCada
examples:
- initial_board: |
GBB/1.0
size 4 4
head 0 0
final_board: |
GBB/1.0
size 4 4
cell 0 0 Azul 1 Rojo 1 Verde 1 Negro 1
head 0 0
- initial_board: |
GBB/1.0
size 5 5
head 3 3
final_board: |
GBB/1.0
size 5 5
cell 3 3 Azul 1 Rojo 1 Verde 1 Negro 1
head 3 3')
expect(response.except(:test_results)).to eq response_type: :structured,
status: :passed_with_warnings,
feedback: '',
expectation_results: [
{binding: "program", inspection: "Uses:=PonerUnaDecada", result: :failed},
{binding: "*", inspection: "Not:Declares:=program", result: :passed},
{binding: "*", inspection: "Declares:=PonerUnaDeCada", result: :passed}
],
result: ''
expect(response[:test_results].size).to eq 2
end
it 'answers a valid hash when submission is aborted and expected' do
response = bridge.run_tests!(
content: '
procedure HastaElInfinito() {
while (puedeMover(Este)) {
Poner(Rojo)
}
}',
extra: '',
expectations: [
{binding: "program", inspection: "Not:HasBinding"},
],
test: '
subject: HastaElInfinito
expect_endless_while: true
examples:
- initial_board: |
GBB/1.0
size 2 2
head 0 0
final_board: |
GBB/1.0
size 2 2
head 0 0')
expect(response.except(:test_results)).to eq response_type: :structured,
status: :passed,
feedback: '',
expectation_results: [
{binding: "*", inspection: "Not:Declares:=program", result: :passed},
{binding: "*", inspection: "Declares:=HastaElInfinito", result: :passed}
],
result: ''
end
it 'answers a valid hash when the expected boom type is wrong_arguments_type' do
response = bridge.run_tests!(
content: "program {\nDibujarLinea3(Este, Verde)\nMover(Este)\nDibujarLinea3(Norte, Rojo)\nMover(Norte)\nDibujarLinea3(Oeste, Negro)\nMover(Oeste)\nDibujarLinea3(Sur, Azul)\n}",
extra: "procedure DibujarLinea3(color, direccion) {\n Poner(color)\n Mover(direccion)\n Poner(color)\n Mover(direccion)\n Poner(color)\n }",
expectations: [],
test: "check_head_position: true\n\nexamples:\n - title: '¡BOOM!'\n initial_board: |\n GBB/1.0\n size 3 3\n head 0 0\n error: wrong_argument_type")
expect(response[:status]).to eq :passed
expect(response[:response_type]).to eq :structured
end
it 'answers a valid hash when the expected boom type is unassigned_variable and the initial board is not defined' do
response = bridge.run_tests!(
content: "function boomBoomKid() {\n return (unaVariableQueNoExiste)\n}",
test: "subject: boomBoomKid\n\nshow_initial_board: false\n\nexamples:\n - error: unassigned_variable",
expectations: [],
extra: ""
)
expect(response[:status]).to eq :passed
expect(response[:response_type]).to eq :structured
end
it 'answers a valid hash when the return checker has to compare a return value of True' do
response = bridge.run_tests!(
content: "function esLibreCostados(){\nreturn(puedeMover(Este)&&puedeMover(Oeste))\n\n}",
test: "subject: esLibreCostados\n\nexamples:\n - initial_board: |\n GBB/1.0\n size 3 2\n head 0 0\n return: 'False'\n \n - initial_board: |\n GBB/1.0\n size 3 2\n head 1 0\n return: 'True'",
expectations: [
{
binding: "esLibreCostados",
inspection: "HasUsage:puedeMover"
}
],
extra: "",
)
expect(response[:status]).to eq :passed
expect(response[:response_type]).to eq :structured
end
it 'answers a valid hash when the return checker has to compare a numeric value and it is defined in the test as string' do
response = bridge.run_tests!(
content: "function numero(){\nvalor:=cifra()\nMover(Este)\nvalor:=(valor*100)+(cifra()*10)\nMover(Este)\nvalor:=(valor+cifra())\nreturn(valor)\n}",
test: "subject: numero\n\nexamples:\n - initial_board: |\n GBB/1.0\n size 3 1\n cell 0 0 Rojo 1\n cell 1 0 Rojo 3\n cell 2 0 Rojo 2\n head 0 0\n return: '132'\n\n - initial_board: |\n GBB/1.0\n size 3 1\n cell 0 0 Rojo 6\n cell 1 0 Rojo 7\n cell 2 0 Rojo 8\n head 0 0\n return: '678'",
expectations: [
{
binding: "numero",
inspection: "HasDeclaration"
},
{
binding: "numero",
inspection: "HasUsage:cifra"
}
],
extra: "function cifra() {\n return (nroBolitas(Rojo))\n}"
)
expect(response[:status]).to eq :passed
expect(response[:response_type]).to eq :structured
end
it 'answers a valid hash when the error checker is waiting for a wrong_arguments_quantity error' do
response = bridge.run_tests!(
content: "program{ \nDibujarLinea3(Verde)\n}",
test: %q{
check_head_position: true
examples:
- initial_board: |
GBB/1.0
size 3 3
head 0 0
error: wrong_arguments_quantity
},
expectations: [ ],
extra: "procedure DibujarLinea3(color, direccion) {\n Poner(color)\n Mover(direccion)\n Poner(color)\n Mover(direccion)\n Poner(color)\n}"
)
expect(response[:status]).to eq :passed
expect(response[:response_type]).to eq :structured
end
it 'answers a well formed error when the content has no program definition' do
response = bridge.run_tests!(
content: "",
test: %q{
check_head_position: true
examples:
- initial_board: |
GBB/1.0
size 3 3
head 0 0
error: wrong_arguments_quantity
},
expectations: [ ],
extra: ""
)
expect(response[:status]).to eq :errored
expect(response[:response_type]).to eq :unstructured
expect(response[:result]).to eq "<pre>[0:0]: No program definition was found</pre>"
end
context 'Board rendering' do
let(:response) {
bridge.run_tests!(
content: 'program {}',
extra: '',
test: "
check_head_position: #{check_head_position}
examples:
- initial_board: |
GBB/1.0
size 4 4
head 0 0
final_board: |
GBB/1.0
size 4 4
cell 0 0 Azul 1 Rojo 1 Verde 1 Negro 1
head 0 0")
}
let(:result) {
response[:test_results][0][:result]
}
context "when the test doesn't check the head's position" do
let(:check_head_position) { false }
it "renders the boards with the 'without-header' attribute" do
expect(result).to include "<gs-board without-header>"
end
end
context "when the test does check the head's position" do
let(:check_head_position) { true }
it "renders the boards without the 'without-header' attribute" do
expect(result).not_to include "<gs-board without-header>"
end
end
end
# See https://github.com/mumuki/mulang/issues/144. Caused by not excluding the proper smells
it 'checks an inspection over a function correctly' do
response = bridge.run_tests!(
{
content: "function rojoEsDominante(){\nreturn (nroBolitas(Rojo)\u003enroBolitasTotal()-nroBolitas(Rojo))\n}",
test: "subject: rojoEsDominante\n\nexamples:\n - initial_board: |\n GBB/1.0\n size 2 2\n cell 0 0 Azul 3 Negro 2 Rojo 4 Verde 3\n head 0 0\n return: 'False'\n \n - initial_board: |\n GBB/1.0\n size 2 2\n cell 0 0 Azul 3 Negro 2 Rojo 10 Verde 3\n head 0 0\n return: 'True'",
expectations: [
{
binding: "rojoEsDominante",
inspection: "HasUsage:todasMenos"
}
],
extra: "function nroBolitasTotal() {\n return (nroBolitas(Azul) + nroBolitas(Negro) + nroBolitas(Rojo) + nroBolitas(Verde))\n}\n\nfunction todasMenos(color) {\n return (nroBolitasTotal() - nroBolitas(color))\n}"
}
)
expect(response[:status]).to eq :passed_with_warnings
expect(response[:response_type]).to eq :structured
end
it 'answers a valid hash when locale is pt, with directions' do
response = bridge.run_tests!({
content: "program {\n Mover(Sul); Mover(Leste) \n}",
test: "
check_head_position: true
examples:
- initial_board: |
GBB/1.0
size 3 3
head 0 2
final_board: |
GBB/1.0
size 3 3
head 1 1",
expectations: [ ],
locale: "pt",
extra: "",
})
expect(response[:status]).to eq :passed
expect(response[:response_type]).to eq :structured
end
it 'answers a valid hash when locale is pt and submission is wrong, with directions' do
response = bridge.run_tests!({
content: "program {\n Mover(Sul); Mover(Leste) \n}",
test: "
check_head_position: true
examples:
- initial_board: |
GBB/1.0
size 3 3
head 0 2
final_board: |
GBB/1.0
size 3 3
head 0 0",
expectations: [ ],
locale: "pt",
extra: "",
})
expect(response[:status]).to eq :failed
expect(response[:response_type]).to eq :structured
end
it 'answers a valid hash when locale is pt, with colors' do
response = bridge.run_tests!(
{
content: "program {\n Colocar(Vermelho) \n}",
test: "
examples:
- initial_board: |
GBB/1.0
size 3 3
head 0 0
final_board: |
GBB/1.0
size 3 3
cell 0 0 Rojo 1
head 0 0",
expectations: [ ],
locale: "pt",
extra: "",
}
)
expect(response[:status]).to eq :passed
expect(response[:response_type]).to eq :structured
end
it 'fails when locale is pt and the content of the submission is wrong' do
response = bridge.run_tests!(
{
content: "program {\n Colocar(Preto) \n}",
test: "
examples:
- initial_board: |
GBB/1.0
size 3 3
head 0 0
final_board: |
GBB/1.0
size 3 3
cell 0 0 Rojo 1
head 0 0",
expectations: [
],
locale: "pt",
extra: "",
}
)
expect(response[:status]).to eq :failed
expect(response[:response_type]).to eq :structured
end
it 'responds a properly structured response when there are unexpected booms and no expected final boards' do
response = bridge.run_tests!(
{
content: "
function hayBolitasLejosAl(direccion, color, distancia) {
MoverN(distancia, direcion)
return (True)
}
",
test: "
subject: hayBolitasLejosAl
examples:
- arguments:
- Norte
- Rojo
- 2
initial_board: |
GBB/1.0
size 3 3
cell 0 2 Rojo 1
head 0 0
return: 'True'",
expectations: [
],
extra: "procedure MoverN(n, direccion) { repeat (n) { Mover(direccion) } }"
}
)
expect(response[:response_type]).to eq :structured
expect(response[:status]).to eq :failed
end
it 'can accept Blockly XML as content' do
response = bridge.run_tests!(
content: '<xml xmlns="http://www.w3.org/1999/xhtml"><variables></variables><block type="Program" id="xB~]3G#lp3SsK`Ys{VS^" deletable="false" x="30" y="30"><mutation timestamp="1523891789396"></mutation><statement name="program"><block type="Asignacion" id="FW1Q]83JP$a0!!$wYxyd"><field name="varName">unColor</field><value name="varValue"><block type="ColorSelector" id="l4c.8v[N.mvxPf$Zx^VW"><field name="ColorDropdown">Negro</field></block></value><next><block type="Poner" id="C1cG`0n#kyzHT5WF88~L"><value name="COLOR"><block type="variables_get" id="jv[rAEP5uKPbN{RN[.I|"><mutation var="unColor"></mutation><field name="VAR">unColor</field></block></value><next><block type="Mover" id="RqKR#pt]B~yQuOg4(u$p"><value name="DIRECCION"><block type="DireccionSelector" id="t?xv9#gqOXx$iKiVH]S;"><field name="DireccionDropdown">Norte</field></block></value></block></next></block></next></block></statement></block></xml>',
extra: '',
expectations: [
{binding: 'program', inspection: 'HasUsage:unColor'},
{binding: 'program', inspection: 'Not:HasUsage:otraCosa'}
],
test: '
check_head_position: true
examples:
- initial_board: |
GBB/1.0
size 4 4
head 0 0
final_board: |
GBB/1.0
size 4 4
cell 0 0 Azul 0 Rojo 0 Verde 0 Negro 1
head 0 1
')
expect(response.except(:test_results)).to eq response_type: :structured,
status: :passed,
feedback: '',
expectation_results: [
{binding: "program", inspection: "Uses:=unColor", result: :passed},
{binding: "program", inspection: "Not:Uses:=otraCosa", result: :passed}
],
result: ''
end
it 'can accept Blockly XML as extra AND content, using primitive actions' do
response = bridge.run_tests!(
content: '<xml xmlns="http://www.w3.org/1999/xhtml"><variables></variables><block type="Program" id="PuD+$,0HT^k)5IUPdU!?" deletable="false" x="150" y="-160"><mutation timestamp="1523892212925"></mutation><statement name="program"><block type="RepeticionSimple" id="YeG}Q75;8ra*Wp3:EU2q"><value name="count"><block type="dobleDe_" id="3/`k,b:TE+S9Y9qKDX~p"><value name="arg1"><block type="math_number" id="[op/fExN+uq4:U]0NqoH"><field name="NUM">3</field></block></value></block></value><statement name="block"><block type="PonerTres_" id="MGSh1,-~Pi}EJ7{pZ;mX"><value name="arg1"><block type="ColorSelector" id="veM!Uyw2$}S=hJtas!Cs"><field name="ColorDropdown">Azul</field></block></value></block></statement></block></statement></block></xml>',
extra: '<xml xmlns="http://www.w3.org/1999/xhtml"><variables></variables><block type="procedures_defnoreturn" id="h#T:7OUn=gaPXqUplXe]" x="80" y="-207"><mutation><arg name="color"></arg></mutation><field name="NAME">PonerTres_</field><field name="ARG0">color</field><statement name="STACK"><block type="Poner" id="YcB^xSAiU-+sa4[z8wKO"><value name="COLOR"><block type="variables_get" id="rsm([Dp*s5Hi+*bWE}*i"><mutation var="color" parent="h#T:7OUn=gaPXqUplXe]"></mutation></block></value><next><block type="Poner" id="jGqg=|!OChe?VZ0_dsUd"><value name="COLOR"><block type="variables_get" id="RawKnPa#tN{0vEvMFmY_"><mutation var="color" parent="h#T:7OUn=gaPXqUplXe]"></mutation></block></value><next><block type="Poner" id="BnYamcc*iwjGM,-yoa}n"><value name="COLOR"><block type="variables_get" id="z#LO[/(O-spS=WQVeh)*"><mutation var="color" parent="h#T:7OUn=gaPXqUplXe]"></mutation></block></value></block></next></block></next></block></statement></block><block type="procedures_defreturnsimplewithparams" id=")(t47XNXCjl]b^(zV:4;" x="80" y="-8"><mutation statements="false"><arg name="número"></arg></mutation><field name="NAME">dobleDe_</field><field name="ARG0">número</field><value name="RETURN"><block type="OperadorNumerico" id=":}n%C$e}/%y;JdE1]`D("><field name="OPERATOR">*</field><value name="arg1"><block type="variables_get" id="h6)YVE)7%.KldhC)wE$~"><mutation var="número" parent=")(t47XNXCjl]b^(zV:4;"></mutation></block></value><value name="arg2"><block type="math_number" id="EB$._vZ}Qk+wIYcE:V74"><field name="NUM">2</field></block></value></block></value></block></xml>',
expectations: [
{binding: 'program', inspection: 'Uses:dobleDe_'},
{binding: 'program', inspection: 'Uses:PonerTres_'}
],
test: '
check_head_position: true
examples:
- initial_board: |
GBB/1.0
size 4 4
head 0 0
final_board: |
GBB/1.0
size 4 4
cell 0 0 Azul 18 Rojo 0 Verde 0 Negro 0
head 0 0
')
expect(response.except(:test_results)).to eq response_type: :structured,
status: :passed,
feedback: '',
expectation_results: [
{binding: "program", inspection: "Uses:dobleDe_", result: :passed},
{binding: "program", inspection: "Uses:PonerTres_", result: :passed}
],
result: ''
end
context 'Blockly XML with finer expectations' do
let(:expectations) { [
{binding: '*', inspection: 'Uses:RecolectarPolen'},
{binding: '*', inspection: 'Declares:RecolectarPolen'},
{binding: 'RecolectarPolen', inspection: 'UsesRepeat'}
] }
def run_expectations!(content)
extra = '<xml xmlns="http://www.w3.org/1999/xhtml"><variables><variable type="" id="#tkEaZ|1O/iYzpk$jb*F">direccion</variable></variables><block type="procedures_defnoreturn" id="ta2r.#tE.Z3=cqo_~(GS" x="42" y="-63"><mutation><arg name="direccion"></arg></mutation><field name="NAME">Volar Al_</field><field name="ARG0">direccion</field><statement name="STACK"><block type="Sacar" id="5*l*j7d[{}-sxylSK?yc"><value name="COLOR"><block type="ColorSelector" id="k]:7DyS5cSE%dt-#;N~8"><field name="ColorDropdown">Negro</field></block></value><next><block type="Mover" id="Ld#c~6FG8LB)y94=iHi("><value name="DIRECCION"><block type="variables_get" id="ea,RDtzQN,XRBgfNU5AU"><mutation var="direccion" parent="ta2r.#tE.Z3=cqo_~(GS"></mutation></block></value><next><block type="Poner" id="+HbxwbY6f-b.ZL/fNm%j"><value name="COLOR"><block type="ColorSelector" id="ZjyHOJAVte5Tlt:L`%oO"><field name="ColorDropdown">Negro</field></block></value></block></next></block></next></block></statement></block></xml>'
bridge.run_tests!(
content: content,
extra: extra,
expectations: expectations,
test: '
check_head_position: true
examples:
- initial_board: |
GBB/1.0
size 4 4
head 0 0
final_board: |
GBB/1.0
size 4 4
cell 0 0 Azul 18 Rojo 0 Verde 0 Negro 0
head 0 0
')[:expectation_results]
end
it 'works when all pass' do
# Equivalent to program { VolarAl_(Este) ; RecolectarPolen() } procedure RecolectarPolen() { repeat(5) { Sacar(Verde) } }
content = '<xml xmlns="http://www.w3.org/1999/xhtml"><variables><variable type="" id="#tkEaZ|1O/iYzpk$jb*F">direccion</variable></variables><block type="Program" id="Rp8VMHPa/]EZ}FvH/Pi|" deletable="false" x="42" y="-98"><mutation timestamp="1528058851731"></mutation><statement name="program"><block type="VolarAl_" id="`I4gt4t#Rj|OB3QwE2HP"><value name="arg1"><block type="DireccionSelector" id="7A8Ipt/$RClcH5KNEZ)^"><field name="DireccionDropdown">Este</field></block></value><next><block type="procedures_callnoreturnnoparams" id="31;^FQ_kr`XO|9D9`y|1"><mutation name="Recolectar Polen"></mutation></block></next></block></statement></block><block type="procedures_defnoreturnnoparams" id="9)jdRaCNSwCopGPcG^6w" x="40" y="112"><field name="NAME">Recolectar Polen</field><statement name="STACK"><block type="RepeticionSimple" id=";q2M8~vOaO_Qo~#Qxz#Z"><value name="count"><block type="math_number" id="6o]|L|{7Xh|#+p2VnYn("><field name="NUM">5</field></block></value><statement name="block"><block type="Sacar" id="8{aYL%2e+he~ztS%MlZ$"><value name="COLOR"><block type="ColorSelector" id="(VW][LR1vg)z*2!r,{kG"><field name="ColorDropdown">Verde</field></block></value></block></statement></block></statement></block></xml>'
expect(run_expectations! content).to eq [{binding: '*', inspection: "Uses:RecolectarPolen", result: :passed},
{binding: '*', inspection: "Declares:RecolectarPolen", result: :passed},
{binding: 'RecolectarPolen', inspection: "UsesRepeat", result: :passed}]
end
it 'works when all expectations fail' do
# Equivalent to program { VolarAl_(Este) ; repeat(5) { Sacar(Verde) } }
content = '<xml xmlns="http://www.w3.org/1999/xhtml"><variables><variable type="" id="#tkEaZ|1O/iYzpk$jb*F">direccion</variable></variables><block type="Program" id="Rp8VMHPa/]EZ}FvH/Pi|" deletable="false" x="42" y="-98"><mutation timestamp="1528054201699"></mutation><statement name="program"><block type="VolarAl_" id="`I4gt4t#Rj|OB3QwE2HP"><value name="arg1"><block type="DireccionSelector" id="7A8Ipt/$RClcH5KNEZ)^"><field name="DireccionDropdown">Este</field></block></value><next><block type="RepeticionSimple" id=";q2M8~vOaO_Qo~#Qxz#Z"><value name="count"><block type="math_number" id="6o]|L|{7Xh|#+p2VnYn("><field name="NUM">5</field></block></value><statement name="block"><block type="Sacar" id="8{aYL%2e+he~ztS%MlZ$"><value name="COLOR"><block type="ColorSelector" id="(VW][LR1vg)z*2!r,{kG"><field name="ColorDropdown">Verde</field></block></value></block></statement></block></next></block></statement></block></xml>'
expect(run_expectations! content).to eq [{binding: '*', inspection: "Uses:RecolectarPolen", result: :failed},
{binding: '*', inspection: "Declares:RecolectarPolen", result: :failed},
{binding: 'RecolectarPolen', inspection: "UsesRepeat", result: :failed}]
end
it 'works when some fail with empty blocks' do
# Equivalent to program { VolarAl_(Este) ; repeat(5) { Sacar(Verde) } } procedure HacerAlgo() { }
content = '<xml xmlns="http://www.w3.org/1999/xhtml"><variables><variable type="" id="#tkEaZ|1O/iYzpk$jb*F">direccion</variable></variables><block type="Program" id="Rp8VMHPa/]EZ}FvH/Pi|" deletable="false" x="42" y="-98"><mutation timestamp="1528058068163"></mutation><statement name="program"><block type="VolarAl_" id="`I4gt4t#Rj|OB3QwE2HP"><value name="arg1"><block type="DireccionSelector" id="7A8Ipt/$RClcH5KNEZ)^"><field name="DireccionDropdown">Este</field></block></value><next><block type="RepeticionSimple" id=";q2M8~vOaO_Qo~#Qxz#Z"><value name="count"><block type="math_number" id="6o]|L|{7Xh|#+p2VnYn("><field name="NUM">5</field></block></value><statement name="block"><block type="Sacar" id="8{aYL%2e+he~ztS%MlZ$"><value name="COLOR"><block type="ColorSelector" id="(VW][LR1vg)z*2!r,{kG"><field name="ColorDropdown">Verde</field></block></value></block></statement></block></next></block></statement></block><block type="procedures_defnoreturnnoparams" id="9)jdRaCNSwCopGPcG^6w" x="40" y="112"><field name="NAME">Hacer algo</field></block></xml>'
expect(run_expectations! content).to eq [{binding: '*', inspection: "Uses:RecolectarPolen", result: :failed},
{binding: '*', inspection: "Declares:RecolectarPolen", result: :failed},
{binding: 'RecolectarPolen', inspection: "UsesRepeat", result: :failed}]
end
it 'works when some fail because of similar names' do
# Equivalent to program { VolarAl_(Este) ; RecolectarPolenta() } procedure RecolectarPolenta() { repeat(5) { Sacar(Verde) } }
content = '<xml xmlns="http://www.w3.org/1999/xhtml"><variables><variable type="" id="#tkEaZ|1O/iYzpk$jb*F">direccion</variable></variables><block type="Program" id="Rp8VMHPa/]EZ}FvH/Pi|" deletable="false" x="42" y="-98"><mutation timestamp="1528058279725"></mutation><statement name="program"><block type="VolarAl_" id="`I4gt4t#Rj|OB3QwE2HP"><value name="arg1"><block type="DireccionSelector" id="7A8Ipt/$RClcH5KNEZ)^"><field name="DireccionDropdown">Este</field></block></value><next><block type="procedures_callnoreturnnoparams" id="31;^FQ_kr`XO|9D9`y|1"><mutation name="Recolectar Polenta"></mutation></block></next></block></statement></block><block type="procedures_defnoreturnnoparams" id="9)jdRaCNSwCopGPcG^6w" x="40" y="112"><field name="NAME">Recolectar Polenta</field><statement name="STACK"><block type="RepeticionSimple" id=";q2M8~vOaO_Qo~#Qxz#Z"><value name="count"><block type="math_number" id="6o]|L|{7Xh|#+p2VnYn("><field name="NUM">5</field></block></value><statement name="block"><block type="Sacar" id="8{aYL%2e+he~ztS%MlZ$"><value name="COLOR"><block type="ColorSelector" id="(VW][LR1vg)z*2!r,{kG"><field name="ColorDropdown">Verde</field></block></value></block></statement></block></statement></block></xml>'
expect(run_expectations! content).to eq [{binding: '*', inspection: "Uses:RecolectarPolen", result: :failed},
{binding: '*', inspection: "Declares:RecolectarPolen", result: :failed},
{binding: 'RecolectarPolen', inspection: "UsesRepeat", result: :failed}]
end
end
end
|
describe Kana01Odai do
describe "evalex" do
using RSpec::Parameterized::TableSyntax
where(:formula, :expected) do
'4*5+6&7|8' |44
'15*5' |75
'15+5' |20
'15&5' |5
'15|5' |15
'30*15*5' |2250
'30*15+5' |600
'30*15&5' |150
'30*15|5' |450
'30+15*5' |225
'30+15+5' |50
'30+15&5' |35
'30+15|5' |45
'30&15*5' |70
'30&15+5' |19
'30&15&5' |4
'30&15|5' |14
'30|15*5' |155
'30|15+5' |36
'30|15&5' |5
'30|15|5' |31
'1+2+3+4+5+6+7+8+9+10' |55
'1*2*3*4*5*6*7*8*9*10' |3628800
'1+2+3+4+5*6+7+8+9+10' |600
'1*2*3*4*5+6*7*8*9*10' |1330560
'1|2|4|8|16|32|64|128|256|512' |1023
'2046&2045&2043&2039&2031&2015&1983&1919&1791&1535' |1024
'0+1|7*6' |42
'6|4+2&9+4' |10
'0&6+1&6|4*2' |0
'4|4*7+7+4&9' |56
'9&8*2+3*1|2|7' |280
'230+83*751&176' |50080
'89+62465*94&84' |5254536
'668&925+398*562' |599092
'15|9+348*302&23&77' |1452
'3&3&6|7+3|5*3|2&4*4' |0
'7*6|7|7*1&7|7&3&8*3' |0
'896+316*209*264&728' |2026464
'1844+41*64|7906|66842' |138965970
'2&41&6884*69857+68083' |0
'2+3*9*3|6|7&0+3+3*6&8' |0
'895+400*988|549&237+488' |938875
'30*48&99036+140&33+75|645' |22050
'278+2033+53*96*56|3303|3&14' |3177216
'5380&27|643*2+1888&74+30|16' |0
'0&3845+6645*4293+78&78*3102|9|3' |90127550385
'78&44956&67*974|413+13237*5588|54*668' |0
'500*206+145|167|163|465&668+662+806*681&458' |123896000
'82+14&0*344+34+542916&18*11|844|64*873223|840993' |23813260003764
'374958|6727+53965&53*954&29|6*138572+59|547783&43*8998' |1.21782747565908E+016
end
with_them do
it_is_asserted_by { Kana01Odai.evalex(formula) == expected }
end
end
describe "#parse" do
it_is_asserted_by{ Kana01Odai.parse("4*5+6&7|8") == [4, "*", 5, "+", 6, "&", 7, "|", 8] }
end
describe "#convert_rpn" do
it_is_asserted_by{ Kana01Odai.convert_rpn("4*5+6&7|8") == [4, 5, 6, 7, 8, "|", "&", "+", "*"] }
end
describe "#calc_rpn" do
subject{ Kana01Odai.calc_rpn(rpn_array) }
using RSpec::Parameterized::TableSyntax
where(:rpn_array, :expected) do
[10, 20, "+"] | 30
[10, 20, 30, "*", "+"] | 610
end
with_them do
it{ should eq expected }
end
end
end
Tweak format
describe Kana01Odai do
describe "evalex" do
using RSpec::Parameterized::TableSyntax
where(:formula, :expected) do
'4*5+6&7|8' | 44
'15*5' | 75
'15+5' | 20
'15&5' | 5
'15|5' | 15
'30*15*5' | 2250
'30*15+5' | 600
'30*15&5' | 150
'30*15|5' | 450
'30+15*5' | 225
'30+15+5' | 50
'30+15&5' | 35
'30+15|5' | 45
'30&15*5' | 70
'30&15+5' | 19
'30&15&5' | 4
'30&15|5' | 14
'30|15*5' | 155
'30|15+5' | 36
'30|15&5' | 5
'30|15|5' | 31
'1+2+3+4+5+6+7+8+9+10' | 55
'1*2*3*4*5*6*7*8*9*10' | 3628800
'1+2+3+4+5*6+7+8+9+10' | 600
'1*2*3*4*5+6*7*8*9*10' | 1330560
'1|2|4|8|16|32|64|128|256|512' | 1023
'2046&2045&2043&2039&2031&2015&1983&1919&1791&1535' | 1024
'0+1|7*6' | 42
'6|4+2&9+4' | 10
'0&6+1&6|4*2' | 0
'4|4*7+7+4&9' | 56
'9&8*2+3*1|2|7' | 280
'230+83*751&176' | 50080
'89+62465*94&84' | 5254536
'668&925+398*562' | 599092
'15|9+348*302&23&77' | 1452
'3&3&6|7+3|5*3|2&4*4' | 0
'7*6|7|7*1&7|7&3&8*3' | 0
'896+316*209*264&728' | 2026464
'1844+41*64|7906|66842' | 138965970
'2&41&6884*69857+68083' | 0
'2+3*9*3|6|7&0+3+3*6&8' | 0
'895+400*988|549&237+488' | 938875
'30*48&99036+140&33+75|645' | 22050
'278+2033+53*96*56|3303|3&14' | 3177216
'5380&27|643*2+1888&74+30|16' | 0
'0&3845+6645*4293+78&78*3102|9|3' | 90127550385
'78&44956&67*974|413+13237*5588|54*668' | 0
'500*206+145|167|163|465&668+662+806*681&458' | 123896000
'82+14&0*344+34+542916&18*11|844|64*873223|840993' | 23813260003764
'374958|6727+53965&53*954&29|6*138572+59|547783&43*8998' | 12178274756590800
end
with_them do
it_is_asserted_by { Kana01Odai.evalex(formula) == expected }
end
end
describe "#parse" do
it_is_asserted_by{ Kana01Odai.parse("4*5+6&7|8") == [4, "*", 5, "+", 6, "&", 7, "|", 8] }
end
describe "#convert_rpn" do
it_is_asserted_by{ Kana01Odai.convert_rpn("4*5+6&7|8") == [4, 5, 6, 7, 8, "|", "&", "+", "*"] }
end
describe "#calc_rpn" do
subject{ Kana01Odai.calc_rpn(rpn_array) }
using RSpec::Parameterized::TableSyntax
where(:rpn_array, :expected) do
[10, 20, "+"] | 30
[10, 20, 30, "*", "+"] | 610
end
with_them do
it{ should eq expected }
end
end
end
|
module TFA
describe CLI do
subject { CLI.new }
let(:secret) { ::ROTP::Base32.random_base32 }
describe "#run" do
context "when adding a key" do
it "saves a new secret" do
subject.add("development", secret)
expect(subject.show("development")).to eql(secret)
end
end
context "when getting a one time password" do
it "creates a totp for a certain key" do
subject.add("development", secret)
expect(subject.totp("development")).to_not be_nil
end
end
end
end
end
delete duplicate specs.
|
require 'find'
# FactoryLoader is intended to help scale object creation with less pain and less
# refactoring.
#
# In the early stages of a project object creation is simple and
# dependencies are kept to a minimum. As the project grows so does the
# complexity of object creation and dependencies. It doesn't make
# sense to create custom factory classes upfront to deal with complex
# object construction that may not exist yet. But when those custom
# factories are needed it is usually painful and time consuming to update
# the code base to use them. It's also easy for developers to give-in
# due to time constraints and start making bad decisions.
#
# This is where FactoryLoader comes into play. It automatically creates a Factory
# class for your objects and provides a Factory#create method which passes any arguments
# along to your object's constructor.
#
# When you need to have custom factory behavior you can implement the factory
# without having to update other code references (assuming you've used the factory
# in the rest of your application rather then direct class references).
#
# project/
# init.rb
# lib/
# |--things/
# |-- foo.rb
# |-- bar.rb
# |--factories/
# |-- bar_factory.rb
#
# Given the above project directory structure you could have the following code
# in init.rb:
# factory_loader = FactoryLoader.new("lib/factories")
# factory_loader.load("lib/things")
#
# The first call constructs a factory loader telling it which directory is used
# to store developer-written custom factories.
#
# The second call will create a in-memory factory class for each *.rb file
# in the lib/things/ directory. A FooFactory class will be created to
# correspond with the foo.rb file. The generated factory
# will provide a #create method which will pass along all arguments to
# the constructor of the object it wraps. So...
# FooFactory.new.create :a => :b
# is the same as:
# Foo.new :a => :b
#
# A BarFactory will NOT be created. This is because
# we told the FactoryLoader that custom factories are storied in lib/factories/
# and a bar_factory.rb file exists there, so FactoryLoader assumes you want to use
# a custom factory. It also assumes that the class inside of bar_factory.rb is BarFactory.
#
# FactoryLoader dynamically creates the factory classes -- they are not written
# to disk. FactoryLoader also uses file naming conventions to determine
# what to do. For example:
# foo.rb => FooFactory
# crazy_dolphins.rb => CrazyDolphinsFactory
#
# === Factory.new
# The dynamically created factories are CLASSES and create is an INSTANCE method on them. You
# have to construct a factory in order to use it. This is so the factories themselves can be easily used in dependency injection
# frameworks.
#
# === Public Git repository:
# git://github.com/zdennis/factory_loader.git
#
# === Homepage:
# http://www.continuousthinking.com/factory_loader
#
# === Author:
# * Zach Dennis at Mutually Human Software (zach.dennis@gmail.com, zdennis@mutuallyhuman.com)
#
# === Special Thanks
# * Dave Crosby at Atomic Object
# * Ryan Fogle at Atomic Object
class FactoryLoader
VERSION = "0.1.0"
# Constructs a FactoryLoader. The passed in factory_paths are searched recursively.
def initialize(*factory_paths)
@factory_paths = factory_paths.map{ |f| File.expand_path(f) }
end
# Creates factory classes based on searching filenames in the passed in directory
# and comparing them against factory file names in the passed in factory_paths
# given to the constructor.
def load(directory) # :nodoc:
Dir[directory + "/**/*.rb"].each do |file|
object_filename = File.basename(file, ".rb")
factory_filepath = "#{object_filename}_factory.rb"
unless custom_factory_exists?(factory_filepath)
load_object_factory object_filename.classify
end
end
end
private
def custom_factory_exists?(factory_filepath)
found = false
@factory_paths.find do |path|
found = Dir["#{path}/**/#{factory_filepath}"].any?
end
found
end
def load_object_factory(object_name)
factory_name = "#{object_name}Factory"
unless Object.const_defined?(factory_name)
eval <<-CODE
class ::#{factory_name}
def create(options={})
#{object_name}.new options
end
end
CODE
end
end
end
updated the version number to 0.1.1
require 'find'
# FactoryLoader is intended to help scale object creation with less pain and less
# refactoring.
#
# In the early stages of a project object creation is simple and
# dependencies are kept to a minimum. As the project grows so does the
# complexity of object creation and dependencies. It doesn't make
# sense to create custom factory classes upfront to deal with complex
# object construction that may not exist yet. But when those custom
# factories are needed it is usually painful and time consuming to update
# the code base to use them. It's also easy for developers to give-in
# due to time constraints and start making bad decisions.
#
# This is where FactoryLoader comes into play. It automatically creates a Factory
# class for your objects and provides a Factory#create method which passes any arguments
# along to your object's constructor.
#
# When you need to have custom factory behavior you can implement the factory
# without having to update other code references (assuming you've used the factory
# in the rest of your application rather then direct class references).
#
# project/
# init.rb
# lib/
# |--things/
# |-- foo.rb
# |-- bar.rb
# |--factories/
# |-- bar_factory.rb
#
# Given the above project directory structure you could have the following code
# in init.rb:
# factory_loader = FactoryLoader.new("lib/factories")
# factory_loader.load("lib/things")
#
# The first call constructs a factory loader telling it which directory is used
# to store developer-written custom factories.
#
# The second call will create a in-memory factory class for each *.rb file
# in the lib/things/ directory. A FooFactory class will be created to
# correspond with the foo.rb file. The generated factory
# will provide a #create method which will pass along all arguments to
# the constructor of the object it wraps. So...
# FooFactory.new.create :a => :b
# is the same as:
# Foo.new :a => :b
#
# A BarFactory will NOT be created. This is because
# we told the FactoryLoader that custom factories are storied in lib/factories/
# and a bar_factory.rb file exists there, so FactoryLoader assumes you want to use
# a custom factory. It also assumes that the class inside of bar_factory.rb is BarFactory.
#
# FactoryLoader dynamically creates the factory classes -- they are not written
# to disk. FactoryLoader also uses file naming conventions to determine
# what to do. For example:
# foo.rb => FooFactory
# crazy_dolphins.rb => CrazyDolphinsFactory
#
# === Factory.new
# The dynamically created factories are CLASSES and create is an INSTANCE method on them. You
# have to construct a factory in order to use it. This is so the factories themselves can be easily used in dependency injection
# frameworks.
#
# === Public Git repository:
# git://github.com/zdennis/factory_loader.git
#
# === Homepage:
# http://www.continuousthinking.com/factory_loader
#
# === Author:
# * Zach Dennis at Mutually Human Software (zach.dennis@gmail.com, zdennis@mutuallyhuman.com)
#
# === Special Thanks
# * Dave Crosby at Atomic Object
# * Ryan Fogle at Atomic Object
class FactoryLoader
VERSION = "0.1.1"
# Constructs a FactoryLoader. The passed in factory_paths are searched recursively.
def initialize(*factory_paths)
@factory_paths = factory_paths.map{ |f| File.expand_path(f) }
end
# Creates factory classes based on searching filenames in the passed in directory
# and comparing them against factory file names in the passed in factory_paths
# given to the constructor.
def load(directory) # :nodoc:
Dir[directory + "/**/*.rb"].each do |file|
object_filename = File.basename(file, ".rb")
factory_filepath = "#{object_filename}_factory.rb"
unless custom_factory_exists?(factory_filepath)
load_object_factory object_filename.classify
end
end
end
private
def custom_factory_exists?(factory_filepath)
found = false
@factory_paths.find do |path|
found = Dir["#{path}/**/#{factory_filepath}"].any?
end
found
end
def load_object_factory(object_name)
factory_name = "#{object_name}Factory"
unless Object.const_defined?(factory_name)
eval <<-CODE
class ::#{factory_name}
def create(options={})
#{object_name}.new options
end
end
CODE
end
end
end
|
describe "Medic::Store" do
before do
@subject = Medic::Store.new
end
describe "#authorize" do
it "calls #requestAuthorizationToShareTypes:readTypes:completion with correct args" do
@subject.mock! 'requestAuthorizationToShareTypes:readTypes:completion' do |share, read, comp|
share.should.be.kind_of? NSSet
read.should.be.kind_of? NSSet
comp.should.respond_to? :call
true
end
@subject.authorize(read: :step_count){|success, error|}.should == true
end
end
describe "#authorized?" do
it "calls #authorizationStatusForType with correct args" do
@subject.mock! 'authorizationStatusForType' do |type|
type.should.be.kind_of? HKObjectType
false
end
@subject.authorized?(:step_count).should == false
end
it "has an #is_authorized? alias" do
@subject.method(:is_authorized?).should == @subject.method(:authorized?)
end
it "has an #authorized_for? alias" do
@subject.method(:authorized_for?).should == @subject.method(:authorized?)
end
it "has an #is_authorized_for? alias" do
@subject.method(:is_authorized_for?).should == @subject.method(:authorized?)
end
end
describe "#biological_sex" do
it "calls #biologicalSexWithError with correct args" do
@subject.mock! 'biologicalSexWithError' do |error|
error.should.be.kind_of? Pointer
mock(:biologicalSex, return: HKBiologicalSexFemale)
end
@subject.biological_sex.should == :female
end
end
describe "#blood_type" do
it "calls #bloodTypeWithError with correct args" do
@subject.mock! 'bloodTypeWithError' do |error|
error.should.be.kind_of? Pointer
mock(:bloodType, return: HKBloodTypeONegative)
end
@subject.blood_type.should == :o_negative
end
end
describe "#date_of_birth" do
it "calls #dateOfBirthWithError with correct args" do
@subject.mock! 'dateOfBirthWithError' do |error|
error.should.be.kind_of? Pointer
true
end
@subject.date_of_birth.should == true
end
end
describe "#save" do
it "calls #saveObject:withCompletion with correct args" do
@subject.mock! 'saveObjects:withCompletion' do |object, comp|
object.first.should.be.kind_of? HKObjectType
comp.should.respond_to? :call
end
steps = @subject.object_type(:step_count)
@subject.save(steps){|success, error|}
end
end
describe "#delete" do
it "calls #deleteObject:withCompletion with correct args" do
@subject.mock! 'deleteObject:withCompletion' do |object, comp|
object.should.be.kind_of? HKObjectType
comp.should.respond_to? :call
end
steps = @subject.object_type(:step_count)
@subject.delete(steps){|success, error|}
end
end
describe "#execute" do
it "calls #executeQuery with correct args" do
@subject.mock! 'executeQuery' do |query|
query.should.be.kind_of? HKQuery
end
query = HKSampleQuery.alloc.initWithSampleType(@subject.object_type(:step_count), predicate:nil, limit:HKObjectQueryNoLimit, sortDescriptors:nil, resultsHandler:->(q,r,e){})
@subject.execute(query)
end
it "has an #execute_query alias" do
@subject.method(:execute_query).should == @subject.method(:execute)
end
end
describe "#stop" do
it "calls #stopQuery with correct args" do
@subject.mock! 'stopQuery' do |query|
query.should.be.kind_of? HKQuery
end
query = HKSampleQuery.alloc.initWithSampleType(@subject.object_type(:step_count), predicate:nil, limit:HKObjectQueryNoLimit, sortDescriptors:nil, resultsHandler:->(q,r,e){})
@subject.stop(query)
end
it "has an #stop_query alias" do
@subject.method(:stop_query).should == @subject.method(:stop)
end
end
describe "#enable_background_delivery" do
it "calls #enableBackgroundDeliveryForType with correct args" do
@subject.mock! 'enableBackgroundDeliveryForType:frequency:withCompletion' do |type, freq, comp|
type.should.be.kind_of? HKObjectType
freq.should == HKUpdateFrequencyWeekly
comp.should.respond_to? :call
end
@subject.enable_background_delivery(:step_count, :weekly){|success, error|}
end
it "has an #enable_background_delivery_for alias" do
@subject.method(:enable_background_delivery_for).should == @subject.method(:enable_background_delivery)
end
end
describe "#disable_background_delivery" do
it "calls #disableBackgroundDeliveryForType:withCompletion with correct args" do
@subject.mock! 'disableBackgroundDeliveryForType:withCompletion' do |type, comp|
type.should.be.kind_of? HKObjectType
comp.should.respond_to? :call
end
@subject.disable_background_delivery(:step_count){|success, error|}
end
it "has a #disable_background_delivery_for alias" do
@subject.method(:disable_background_delivery_for).should == @subject.method(:disable_background_delivery)
end
end
describe "#disable_all_background_delivery" do
it "calls #disableAllBackgroundDeliveryWithCompletion with correct args" do
@subject.mock! 'disableAllBackgroundDeliveryWithCompletion' do |comp|
comp.should.respond_to? :call
end
@subject.disable_all_background_delivery{|success, error|}
end
end
end
Fix broken spec
describe "Medic::Store" do
before do
@subject = Medic::Store.new
end
describe "#authorize" do
it "calls #requestAuthorizationToShareTypes:readTypes:completion with correct args" do
@subject.mock! 'requestAuthorizationToShareTypes:readTypes:completion' do |share, read, comp|
share.should.be.kind_of? NSSet
read.should.be.kind_of? NSSet
comp.should.respond_to? :call
true
end
@subject.authorize(read: :step_count){|success, error|}.should == true
end
end
describe "#authorized?" do
it "calls #authorizationStatusForType with correct args" do
@subject.mock! 'authorizationStatusForType' do |type|
type.should.be.kind_of? HKObjectType
false
end
@subject.authorized?(:step_count).should == false
end
it "has an #is_authorized? alias" do
@subject.method(:is_authorized?).should == @subject.method(:authorized?)
end
it "has an #authorized_for? alias" do
@subject.method(:authorized_for?).should == @subject.method(:authorized?)
end
it "has an #is_authorized_for? alias" do
@subject.method(:is_authorized_for?).should == @subject.method(:authorized?)
end
end
describe "#biological_sex" do
it "calls #biologicalSexWithError with correct args" do
@subject.mock! 'biologicalSexWithError' do |error|
error.should.be.kind_of? Pointer
mock(:biologicalSex, return: HKBiologicalSexFemale)
end
@subject.biological_sex.should == :female
end
end
describe "#blood_type" do
it "calls #bloodTypeWithError with correct args" do
@subject.mock! 'bloodTypeWithError' do |error|
error.should.be.kind_of? Pointer
mock(:bloodType, return: HKBloodTypeONegative)
end
@subject.blood_type.should == :o_negative
end
end
describe "#date_of_birth" do
it "calls #dateOfBirthWithError with correct args" do
@subject.mock! 'dateOfBirthWithError' do |error|
error.should.be.kind_of? Pointer
true
end
@subject.date_of_birth.should == true
end
end
describe "#save" do
it "calls #saveObject:withCompletion with correct args" do
@subject.mock! 'saveObjects:withCompletion' do |object, comp|
object.first.should.be.kind_of? HKSample
comp.should.respond_to? :call
end
steps = { quantity_type: :step_count, quantity: 50 }
@subject.save(steps){|success, error|}
end
end
describe "#delete" do
it "calls #deleteObject:withCompletion with correct args" do
@subject.mock! 'deleteObject:withCompletion' do |object, comp|
object.should.be.kind_of? HKObjectType
comp.should.respond_to? :call
end
steps = @subject.object_type(:step_count)
@subject.delete(steps){|success, error|}
end
end
describe "#execute" do
it "calls #executeQuery with correct args" do
@subject.mock! 'executeQuery' do |query|
query.should.be.kind_of? HKQuery
end
query = HKSampleQuery.alloc.initWithSampleType(@subject.object_type(:step_count), predicate:nil, limit:HKObjectQueryNoLimit, sortDescriptors:nil, resultsHandler:->(q,r,e){})
@subject.execute(query)
end
it "has an #execute_query alias" do
@subject.method(:execute_query).should == @subject.method(:execute)
end
end
describe "#stop" do
it "calls #stopQuery with correct args" do
@subject.mock! 'stopQuery' do |query|
query.should.be.kind_of? HKQuery
end
query = HKSampleQuery.alloc.initWithSampleType(@subject.object_type(:step_count), predicate:nil, limit:HKObjectQueryNoLimit, sortDescriptors:nil, resultsHandler:->(q,r,e){})
@subject.stop(query)
end
it "has an #stop_query alias" do
@subject.method(:stop_query).should == @subject.method(:stop)
end
end
describe "#enable_background_delivery" do
it "calls #enableBackgroundDeliveryForType with correct args" do
@subject.mock! 'enableBackgroundDeliveryForType:frequency:withCompletion' do |type, freq, comp|
type.should.be.kind_of? HKObjectType
freq.should == HKUpdateFrequencyWeekly
comp.should.respond_to? :call
end
@subject.enable_background_delivery(:step_count, :weekly){|success, error|}
end
it "has an #enable_background_delivery_for alias" do
@subject.method(:enable_background_delivery_for).should == @subject.method(:enable_background_delivery)
end
end
describe "#disable_background_delivery" do
it "calls #disableBackgroundDeliveryForType:withCompletion with correct args" do
@subject.mock! 'disableBackgroundDeliveryForType:withCompletion' do |type, comp|
type.should.be.kind_of? HKObjectType
comp.should.respond_to? :call
end
@subject.disable_background_delivery(:step_count){|success, error|}
end
it "has a #disable_background_delivery_for alias" do
@subject.method(:disable_background_delivery_for).should == @subject.method(:disable_background_delivery)
end
end
describe "#disable_all_background_delivery" do
it "calls #disableAllBackgroundDeliveryWithCompletion with correct args" do
@subject.mock! 'disableAllBackgroundDeliveryWithCompletion' do |comp|
comp.should.respond_to? :call
end
@subject.disable_all_background_delivery{|success, error|}
end
end
end
|
require 'spec_helper'
describe Club do
it { should belong_to :user }
it { should have_many :courses }
it { should have_many :articles }
it { should have_one :discussion_board }
it { should have_one :sales_page }
it { should have_one :upsell_page }
it { should have_many(:topics).through(:discussion_board) }
it { should have_many :subscriptions }
it { should have_many(:lessons).through(:courses) }
it "can be instantiated" do
Club.new.should be_an_instance_of(Club)
end
describe "initialize" do
let!(:club) { FactoryGirl.create(:user).clubs.first }
it "assigns the default name" do
club.name.should == Settings.clubs[:default_name]
end
it "assigns the default sub_heading" do
club.sub_heading.should == Settings.clubs[:default_sub_heading]
end
it "assigns the default description" do
club.description.should == Settings.clubs[:default_description]
end
it "assigns the default logo" do
club.logo.to_s.should == Settings.clubs[:default_logo]
end
it "assigns the default price_cents" do
club.price_cents.should == Settings.clubs[:default_price_cents]
end
it "assigns the default free_content" do
club.free_content.should == Settings.clubs[:default_free_content]
end
# custom headings
it "assigns the default courses_heading" do
club.courses_heading.should == Settings.clubs[:default_courses_heading]
end
it "assigns the default articles_heading" do
club.articles_heading.should == Settings.clubs[:default_articles_heading]
end
it "assigns the default discussions_heading" do
club.discussions_heading.should == Settings.clubs[:default_discussions_heading]
end
it "assigns the default lessons_heading" do
club.lessons_heading.should == Settings.clubs[:default_lessons_heading]
end
# discussion_board
it "builds a default discussion_board" do
club.discussion_board.should_not be_blank
end
# sales_page
it "builds a default sales_page" do
club.sales_page.should_not be_blank
end
# upsell_page
it "builds a default upsell_page" do
club.upsell_page.should_not be_blank
end
end
describe "valid?" do
# name
it "returns false when no name is specified" do
FactoryGirl.build(:club, :name => "").should_not be_valid
end
it "returns false when name is greater than max characters" do
FactoryGirl.build(:club, :name => Faker::Lorem.characters(Settings.clubs[:name_max_length] + 1)).should_not be_valid
end
# sub_heading
it "returns false when no sub_heading is specified" do
FactoryGirl.build(:club, :sub_heading => "").should_not be_valid
end
it "returns false when sub_heading is greater than max characters" do
FactoryGirl.build(:club, :sub_heading => Faker::Lorem.characters(Settings.clubs[:sub_heading_max_length] + 1)).should_not be_valid
end
# description
it "returns false when no description is specified" do
FactoryGirl.build(:club, :description => "").should_not be_valid
end
# price_cents
it "returns false when no price is specified" do
FactoryGirl.build(:club, :price_cents => "").should_not be_valid
end
it "returns true when having a price of at least $10" do
FactoryGirl.build(:club, :price_cents => "1000").should be_valid
end
it "returns false when having a price of less than $10" do
FactoryGirl.build(:club, :price_cents => "1").should_not be_valid
end
# free_content
describe "for free_content" do
it "returns false when no free_content is specified" do
FactoryGirl.build(:club, :free_content => "").should_not be_valid
end
it "returns true when free_content is a string of a boolean" do
FactoryGirl.build(:club, :free_content => true).should be_valid
end
end
# courses_heading
it "returns false when no courses_heading is specified" do
FactoryGirl.build(:club, :courses_heading => "").should_not be_valid
end
# articles_heading
it "returns false when no articles_heading is specified" do
FactoryGirl.build(:club, :articles_heading => "").should_not be_valid
end
# discussions_heading
it "returns false when no discussions_heading is specified" do
FactoryGirl.build(:club, :discussions_heading => "").should_not be_valid
end
# lessons_heading
it "returns false when no lessons_heading is specified" do
FactoryGirl.build(:club, :lessons_heading => "").should_not be_valid
end
# user association
it "returns false when missing a user_id" do
FactoryGirl.build(:club, :user_id => nil).should_not be_valid
end
end
describe "courses" do
before :each do
@club = FactoryGirl.create :club
FactoryGirl.create :course, :club_id => @club.id
end
it "should be destroyed when the club is destroyed" do
expect { @club.destroy }.to change(Course, :count).by(-1)
end
end
describe "courses ordering" do
let(:club) { FactoryGirl.create :club }
let(:course3) { FactoryGirl.create :course, :club_id => club.id, :position => 3 }
let(:course1) { FactoryGirl.create :course, :club_id => club.id, :position => 1 }
let(:course2) { FactoryGirl.create :course, :club_id => club.id, :position => 2 }
it "should order courses by position" do
club.courses.should == [ course1, course2, course3 ]
end
end
describe "articles" do
before :each do
@club = FactoryGirl.create :club
FactoryGirl.create :article, :club_id => @club.id
end
it "should be destroyed when the club is destroyed" do
expect { @club.destroy }.to change(Article, :count).by(-1)
end
end
describe "discussion_board" do
before :each do
@club = FactoryGirl.create :club
FactoryGirl.create :discussion_board, :club_id => @club.id
end
it "should be destroyed when the club is destroyed" do
expect { @club.destroy }.to change(DiscussionBoard, :count).by(-1)
end
end
describe "sales_page" do
before :each do
@club = FactoryGirl.create :club
FactoryGirl.create :sales_page, :club_id => @club.id
end
it "should be destroyed when the club is destroyed" do
expect { @club.destroy }.to change(SalesPage, :count).by(-1)
end
end
describe "upsell_page" do
before :each do
@club = FactoryGirl.create :club
FactoryGirl.create :upsell_page, :club_id => @club.id
end
it "should be destroyed when the club is destroyed" do
expect { @club.destroy }.to change(UpsellPage, :count).by(-1)
end
end
describe "members" do
let!(:club) { FactoryGirl.create :club }
let!(:subscriber) { FactoryGirl.create :user }
let!(:subscription) { FactoryGirl.create :subscription, :club => club, :user => subscriber }
it "reports the list of members" do
club.members.should include(subscriber)
end
describe "for pro members" do
let!(:active_pro_subscriber) { FactoryGirl.create :user }
let!(:expired_pro_subscriber) { FactoryGirl.create :user }
let!(:active_pro_subscription) { FactoryGirl.create :subscription, :club => club, :user => active_pro_subscriber, :level => 'pro', :pro_status => "ACTIVE" }
let!(:expired_pro_subscription) { FactoryGirl.create :subscription, :club => club, :user => expired_pro_subscriber, :level => 'pro', :pro_status => "INACTIVE" }
it "includes pro members who have an active pro subscription" do
club.members.should include(active_pro_subscriber)
end
it "does not include pro members whose pro subscription has expired" do
club.members.should_not include(expired_pro_subscriber)
end
end
end
end
Add Spec to Verify Custom Heading Length for Club
Add tests to verify that custom heading lengths are checked for Clubs.
require 'spec_helper'
describe Club do
it { should belong_to :user }
it { should have_many :courses }
it { should have_many :articles }
it { should have_one :discussion_board }
it { should have_one :sales_page }
it { should have_one :upsell_page }
it { should have_many(:topics).through(:discussion_board) }
it { should have_many :subscriptions }
it { should have_many(:lessons).through(:courses) }
it "can be instantiated" do
Club.new.should be_an_instance_of(Club)
end
describe "initialize" do
let!(:club) { FactoryGirl.create(:user).clubs.first }
it "assigns the default name" do
club.name.should == Settings.clubs[:default_name]
end
it "assigns the default sub_heading" do
club.sub_heading.should == Settings.clubs[:default_sub_heading]
end
it "assigns the default description" do
club.description.should == Settings.clubs[:default_description]
end
it "assigns the default logo" do
club.logo.to_s.should == Settings.clubs[:default_logo]
end
it "assigns the default price_cents" do
club.price_cents.should == Settings.clubs[:default_price_cents]
end
it "assigns the default free_content" do
club.free_content.should == Settings.clubs[:default_free_content]
end
# custom headings
it "assigns the default courses_heading" do
club.courses_heading.should == Settings.clubs[:default_courses_heading]
end
it "assigns the default articles_heading" do
club.articles_heading.should == Settings.clubs[:default_articles_heading]
end
it "assigns the default discussions_heading" do
club.discussions_heading.should == Settings.clubs[:default_discussions_heading]
end
it "assigns the default lessons_heading" do
club.lessons_heading.should == Settings.clubs[:default_lessons_heading]
end
# discussion_board
it "builds a default discussion_board" do
club.discussion_board.should_not be_blank
end
# sales_page
it "builds a default sales_page" do
club.sales_page.should_not be_blank
end
# upsell_page
it "builds a default upsell_page" do
club.upsell_page.should_not be_blank
end
end
describe "valid?" do
# name
it "returns false when no name is specified" do
FactoryGirl.build(:club, :name => "").should_not be_valid
end
it "returns false when name is greater than max characters" do
FactoryGirl.build(:club, :name => Faker::Lorem.characters(Settings.clubs[:name_max_length] + 1)).should_not be_valid
end
# sub_heading
it "returns false when no sub_heading is specified" do
FactoryGirl.build(:club, :sub_heading => "").should_not be_valid
end
it "returns false when sub_heading is greater than max characters" do
FactoryGirl.build(:club, :sub_heading => Faker::Lorem.characters(Settings.clubs[:sub_heading_max_length] + 1)).should_not be_valid
end
# description
it "returns false when no description is specified" do
FactoryGirl.build(:club, :description => "").should_not be_valid
end
# price_cents
it "returns false when no price is specified" do
FactoryGirl.build(:club, :price_cents => "").should_not be_valid
end
it "returns true when having a price of at least $10" do
FactoryGirl.build(:club, :price_cents => "1000").should be_valid
end
it "returns false when having a price of less than $10" do
FactoryGirl.build(:club, :price_cents => "1").should_not be_valid
end
# free_content
describe "for free_content" do
it "returns false when no free_content is specified" do
FactoryGirl.build(:club, :free_content => "").should_not be_valid
end
it "returns true when free_content is a string of a boolean" do
FactoryGirl.build(:club, :free_content => true).should be_valid
end
end
# courses_heading
it "returns false when no courses_heading is specified" do
FactoryGirl.build(:club, :courses_heading => "").should_not be_valid
end
it "returns false when courses_heading is greater than max characters" do
FactoryGirl.build(:club, :courses_heading => Faker::Lorem.characters(Settings.clubs[:courses_heading_max_length] + 1)).should_not be_valid
end
# articles_heading
it "returns false when no articles_heading is specified" do
FactoryGirl.build(:club, :articles_heading => "").should_not be_valid
end
it "returns false when articles_heading is greater than max characters" do
FactoryGirl.build(:club, :articles_heading => Faker::Lorem.characters(Settings.clubs[:articles_heading_max_length] + 1)).should_not be_valid
end
# discussions_heading
it "returns false when no discussions_heading is specified" do
FactoryGirl.build(:club, :discussions_heading => "").should_not be_valid
end
it "returns false when discussions_heading is greater than max characters" do
FactoryGirl.build(:club, :discussions_heading => Faker::Lorem.characters(Settings.clubs[:discussions_heading_max_length] + 1)).should_not be_valid
end
# lessons_heading
it "returns false when no lessons_heading is specified" do
FactoryGirl.build(:club, :lessons_heading => "").should_not be_valid
end
it "returns false when lessons_heading is greater than max characters" do
FactoryGirl.build(:club, :lessons_heading => Faker::Lorem.characters(Settings.clubs[:lessons_heading_max_length] + 1)).should_not be_valid
end
# user association
it "returns false when missing a user_id" do
FactoryGirl.build(:club, :user_id => nil).should_not be_valid
end
end
describe "courses" do
before :each do
@club = FactoryGirl.create :club
FactoryGirl.create :course, :club_id => @club.id
end
it "should be destroyed when the club is destroyed" do
expect { @club.destroy }.to change(Course, :count).by(-1)
end
end
describe "courses ordering" do
let(:club) { FactoryGirl.create :club }
let(:course3) { FactoryGirl.create :course, :club_id => club.id, :position => 3 }
let(:course1) { FactoryGirl.create :course, :club_id => club.id, :position => 1 }
let(:course2) { FactoryGirl.create :course, :club_id => club.id, :position => 2 }
it "should order courses by position" do
club.courses.should == [ course1, course2, course3 ]
end
end
describe "articles" do
before :each do
@club = FactoryGirl.create :club
FactoryGirl.create :article, :club_id => @club.id
end
it "should be destroyed when the club is destroyed" do
expect { @club.destroy }.to change(Article, :count).by(-1)
end
end
describe "discussion_board" do
before :each do
@club = FactoryGirl.create :club
FactoryGirl.create :discussion_board, :club_id => @club.id
end
it "should be destroyed when the club is destroyed" do
expect { @club.destroy }.to change(DiscussionBoard, :count).by(-1)
end
end
describe "sales_page" do
before :each do
@club = FactoryGirl.create :club
FactoryGirl.create :sales_page, :club_id => @club.id
end
it "should be destroyed when the club is destroyed" do
expect { @club.destroy }.to change(SalesPage, :count).by(-1)
end
end
describe "upsell_page" do
before :each do
@club = FactoryGirl.create :club
FactoryGirl.create :upsell_page, :club_id => @club.id
end
it "should be destroyed when the club is destroyed" do
expect { @club.destroy }.to change(UpsellPage, :count).by(-1)
end
end
describe "members" do
let!(:club) { FactoryGirl.create :club }
let!(:subscriber) { FactoryGirl.create :user }
let!(:subscription) { FactoryGirl.create :subscription, :club => club, :user => subscriber }
it "reports the list of members" do
club.members.should include(subscriber)
end
describe "for pro members" do
let!(:active_pro_subscriber) { FactoryGirl.create :user }
let!(:expired_pro_subscriber) { FactoryGirl.create :user }
let!(:active_pro_subscription) { FactoryGirl.create :subscription, :club => club, :user => active_pro_subscriber, :level => 'pro', :pro_status => "ACTIVE" }
let!(:expired_pro_subscription) { FactoryGirl.create :subscription, :club => club, :user => expired_pro_subscriber, :level => 'pro', :pro_status => "INACTIVE" }
it "includes pro members who have an active pro subscription" do
club.members.should include(active_pro_subscriber)
end
it "does not include pro members whose pro subscription has expired" do
club.members.should_not include(expired_pro_subscriber)
end
end
end
end
|
require 'rails_helper'
describe Game do
let!(:game) { Game.create(score: 10, scrabble_score: 10, level: 1, lines: 3, user_id: 1) }
let!(:user) { User.create(username:"stephen", email:"stephen@gmail.com", password:"password")}
context "score" do
it "should return score" do
expect(game.score).to eq(10)
end
end
context "scrabble_score" do
it "should return scrabble score" do
expect(game.scrabble_score).to eq(10)
end
end
context "level" do
it "should return level" do
expect(game.level).to eq(1)
end
end
context "lines" do
it "should return lines" do
expect(game.lines).to eq(3)
end
end
context "user" do
it "should return user rid" do
expect(game.user_id).to eq(1)
end
end
context "validate presence of score" do
it { should validate_presence_of(:score) }
end
context "validate presence of scrabble_score" do
it { should validate_presence_of(:scrabble_score) }
end
context "validate presence of level" do
it { should validate_presence_of(:level) }
end
end
validates presence of lines in game spec
-rspec success
require 'rails_helper'
describe Game do
let!(:game) { Game.create(score: 10, scrabble_score: 10, level: 1, lines: 3, user_id: 1) }
let!(:user) { User.create(username:"stephen", email:"stephen@gmail.com", password:"password")}
context "score" do
it "should return score" do
expect(game.score).to eq(10)
end
end
context "scrabble_score" do
it "should return scrabble score" do
expect(game.scrabble_score).to eq(10)
end
end
context "level" do
it "should return level" do
expect(game.level).to eq(1)
end
end
context "lines" do
it "should return lines" do
expect(game.lines).to eq(3)
end
end
context "user" do
it "should return user rid" do
expect(game.user_id).to eq(1)
end
end
context "validate presence of score" do
it { should validate_presence_of(:score) }
end
context "validate presence of scrabble_score" do
it { should validate_presence_of(:scrabble_score) }
end
context "validate presence of level" do
it { should validate_presence_of(:level) }
end
context "validate presence" do
it { should validate_presence_of(:lines) }
end
end
|
# encoding: utf-8
require "spec_helper"
describe Page do
describe ".archive_finder" do
subject { Page.archive_finder }
it { is_expected.to be_a(PagesCore::ArchiveFinder) }
specify { expect(subject.timestamp_attribute).to eq(:published_at) }
end
describe ".published" do
let!(:published_page) { create(:page) }
let!(:hidden_page) { create(:page, status: 3) }
let!(:autopublish_page) do
create(:page, published_at: (Time.now + 2.hours))
end
subject { Page.published }
it { is_expected.to include(published_page) }
it { is_expected.not_to include(hidden_page) }
it { is_expected.not_to include(autopublish_page) }
end
describe ".localized" do
let!(:norwegian_page) { Page.create(name: "Test", locale: "nb") }
let!(:english_page) { Page.create(name: "Test", locale: "en") }
subject { Page.localized("nb") }
it { is_expected.to include(norwegian_page) }
it { is_expected.not_to include(english_page) }
end
describe ".locales" do
let(:page) do
Page.create(
excerpt: { "en" => "My test page", "nb" => "Testside" },
locale: "en"
)
end
subject { page.locales }
it { is_expected.to match(%w(en nb)) }
end
describe "with ancestors" do
let(:root) { Page.create }
let(:parent) { Page.create(parent: root) }
let(:page) { Page.create(parent: parent) }
it "belongs to the parent" do
expect(page.parent).to eq(parent)
end
it "is a child of root" do
expect(page.ancestors).to include(root)
end
it "has both as ancestors" do
expect(page.ancestors).to eq([parent, root])
end
it "has a root page" do
expect(page.root).to eq(root)
end
end
describe "setting multiple locales" do
let(:page) do
Page.create(
excerpt: { "en" => "My test page", "nb" => "Testside" },
locale: "en"
)
end
it "should respond with the locale specific string" do
expect(page.excerpt?).to eq(true)
expect(page.excerpt.to_s).to eq("My test page")
expect(page.localize("nb").excerpt.to_s).to eq("Testside")
end
it "should remove the unnecessary locales" do
expect(page.locales).to match(%w(en nb))
page.update(excerpt: "")
expect(page.locales).to match(["nb"])
end
end
it "should return a blank Localization for uninitialized columns" do
page = Page.new
expect(page.body?).to eq(false)
expect(page.body).to be_a(String)
end
describe "with an excerpt" do
let(:page) { Page.create(excerpt: "My test page", locale: "en") }
it "responds to excerpt?" do
expect(page.excerpt?).to eq(true)
page.excerpt = nil
expect(page.excerpt?).to eq(false)
end
it "excerpt should be a localization" do
expect(page.excerpt).to be_kind_of(String)
expect(page.excerpt.to_s).to eq("My test page")
end
it "should be changed when saved" do
page.update(excerpt: "Hi")
page.reload
expect(page.excerpt.to_s).to eq("Hi")
end
it "should remove the localization when nilified" do
page.update(excerpt: nil)
expect(page.valid?).to eq(true)
page.reload
expect(page.excerpt?).to eq(false)
end
end
end
Fix broken Page spec
# encoding: utf-8
require "spec_helper"
describe Page do
describe ".archive_finder" do
subject { Page.archive_finder }
it { is_expected.to be_a(PagesCore::ArchiveFinder) }
specify { expect(subject.timestamp_attribute).to eq(:published_at) }
end
describe ".published" do
let!(:published_page) { create(:page) }
let!(:hidden_page) { create(:page, status: 3) }
let!(:autopublish_page) do
create(:page, published_at: (Time.now + 2.hours))
end
subject { Page.published }
it { is_expected.to include(published_page) }
it { is_expected.not_to include(hidden_page) }
it { is_expected.not_to include(autopublish_page) }
end
describe ".localized" do
let!(:norwegian_page) { Page.create(name: "Test", locale: "nb") }
let!(:english_page) { Page.create(name: "Test", locale: "en") }
subject { Page.localized("nb") }
it { is_expected.to include(norwegian_page) }
it { is_expected.not_to include(english_page) }
end
describe ".locales" do
let(:page) do
Page.create(
excerpt: { "en" => "My test page", "nb" => "Testside" },
locale: "en"
)
end
subject { page.locales }
it { is_expected.to match(%w(en nb)) }
end
describe "with ancestors" do
let(:root) { Page.create }
let(:parent) { Page.create(parent: root) }
let(:page) { Page.create(parent: parent) }
it "belongs to the parent" do
expect(page.parent).to eq(parent)
end
it "is a child of root" do
expect(page.ancestors).to include(root)
end
it "has both as ancestors" do
expect(page.ancestors).to eq([parent, root])
end
it "has a root page" do
expect(page.root).to eq(root)
end
end
describe "setting multiple locales" do
let(:page) do
Page.create(
excerpt: { "en" => "My test page", "nb" => "Testside" },
locale: "en"
)
end
it "should respond with the locale specific string" do
expect(page.excerpt?).to eq(true)
expect(page.excerpt.to_s).to eq("My test page")
expect(page.localize("nb").excerpt.to_s).to eq("Testside")
end
it "should remove the unnecessary locales" do
expect(page.locales).to match(%w(en nb))
page.update(excerpt: "")
page.reload
expect(page.locales).to match(["nb"])
end
end
it "should return a blank Localization for uninitialized columns" do
page = Page.new
expect(page.body?).to eq(false)
expect(page.body).to be_a(String)
end
describe "with an excerpt" do
let(:page) { Page.create(excerpt: "My test page", locale: "en") }
it "responds to excerpt?" do
expect(page.excerpt?).to eq(true)
page.excerpt = nil
expect(page.excerpt?).to eq(false)
end
it "excerpt should be a localization" do
expect(page.excerpt).to be_kind_of(String)
expect(page.excerpt.to_s).to eq("My test page")
end
it "should be changed when saved" do
page.update(excerpt: "Hi")
page.reload
expect(page.excerpt.to_s).to eq("Hi")
end
it "should remove the localization when nilified" do
page.update(excerpt: nil)
expect(page.valid?).to eq(true)
page.reload
expect(page.excerpt?).to eq(false)
end
end
end
|
# == Schema Information
#
# Table name: plans
#
# active :boolean not null
# created_at :datetime
# id :integer not null, primary key
# name :string(255) not null
# updated_at :datetime
# user_id :integer not null
#
describe Plan do
subject { plan }
let!(:user) { create(:user) }
let(:plan) { build(:plan, user: user) }
let(:users_other_plan) { build(:plan, user: user) }
let(:duplicate_plan) { build(:plan, name: plan.name, user: user) }
it { should belong_to(:user) }
it { should have_many(:planned_courses).dependent(:destroy) }
it { should respond_to(:user) }
it { should respond_to(:name) }
it { should respond_to(:active) }
it { should respond_to(:activate!) }
it { should respond_to(:created_at) }
it { should respond_to(:updated_at) }
it { should respond_to(:planned_courses) }
it { should respond_to(:courses) }
it { should respond_to(:degree_requirement_counts) }
it { should respond_to(:taken_and_planned_courses) }
it { should validate_presence_of(:user) }
it { should validate_presence_of(:name) }
it { should ensure_length_of(:name).is_at_least(1) }
it { should ensure_length_of(:name).is_at_most(35) }
describe "with valid attributes" do
it { should be_valid }
end
describe "with taken name" do
before { duplicate_plan.save }
it { should be_invalid }
end
describe "creation" do
it "changes user's number of plans by 1" do
saving_plan_changes_users_plans_size(plan, 0, 1)
expect(user.plans.first).to eq(plan)
end
context "when user already has another plan" do
before { users_other_plan.save }
it "changes user's number of plans from 1 to 2" do
saving_plan_changes_users_plans_size(plan, 1, 2)
end
it "changes user's active plan to the new plan" do
expect { plan.save }.to change(user, :active_plan)
.from(users_other_plan).to(plan)
end
it "changes the old plan's active attribute to false" do
expect do
plan.save
users_other_plan.reload
end.to change(users_other_plan, :active).from(true).to(false)
end
end
context "#active = true" do
context "when user has no other plans" do
it "sets a new plan to active" do
expect { plan.save }.to change(plan, :active).to(true)
end
it "makes user#active_plan return it" do
expect { plan.save }.to change(user, :active_plan).from(nil).to(plan)
end
end
context "when user's other plan was active" do
before { users_other_plan.save }
it "becomes the user's new active plan" do
expect { plan.save }.to change(user, :active_plan)
.from(users_other_plan).to(plan)
end
it "sets user's other plans to false" do
expect do
plan.save
users_other_plan.reload
end.to change(users_other_plan, :active).from(true).to(false)
end
it "changes user's total plans count to two" do
saving_plan_changes_users_plans_size(plan, 1, 2)
end
end
end
end
describe "#activate!" do
context "when other plan was active" do
before do
plan.save
users_other_plan.save
plan.reload
end
it "makes the plan active" do
expect do
plan.activate!
users_other_plan.reload
plan.reload
end.to change(plan, :active).from(false).to(true)
end
it "makes the other plan inactive" do
expect do
plan.activate!
users_other_plan.reload
plan.reload
end.to change(users_other_plan, :active).from(true).to(false)
end
end
describe "#degree_requirement_counts" do
before do
2.times { create :planned_course, :required, plan: plan }
3.times do
create :planned_course,
:distribution,
plan: plan
end
5.times { create :planned_course, :free_elective, plan: plan }
end
it "returns the correct amount of credits for each type" do
counts = plan.degree_requirement_counts
expect(counts[:required_course]).to eq(6)
expect(counts[:distribution_requirement]).to eq(9)
expect(counts[:free_elective]).to eq(15)
end
end
context "when user has taken courses and plan has planned courses" do
let!(:planned_course) { create :planned_course, plan: plan }
let!(:taken_course) { create :taken_course, user: user }
describe "#taken_and_planned_courses" do
it "returns user's taken courses and plan's plannned courses" do
expect(subject.taken_and_planned_courses).to eq([taken_course,
planned_course])
end
end
end
end
def saving_plan_changes_users_plans_size(plan, before_size, after_size)
expect(user.plans(true).size).to eq(before_size)
plan.save
expect(user.plans(true).size).to eq(after_size)
end
end
fix formatting in plan_spec
# == Schema Information
#
# Table name: plans
#
# active :boolean not null
# created_at :datetime
# id :integer not null, primary key
# name :string(255) not null
# updated_at :datetime
# user_id :integer not null
#
describe Plan do
subject { plan }
let!(:user) { create(:user) }
let(:plan) { build(:plan, user: user) }
let(:users_other_plan) { build(:plan, user: user) }
let(:duplicate_plan) { build(:plan, name: plan.name, user: user) }
it { should belong_to(:user) }
it { should have_many(:planned_courses).dependent(:destroy) }
it { should respond_to(:user) }
it { should respond_to(:name) }
it { should respond_to(:active) }
it { should respond_to(:activate!) }
it { should respond_to(:created_at) }
it { should respond_to(:updated_at) }
it { should respond_to(:planned_courses) }
it { should respond_to(:courses) }
it { should respond_to(:degree_requirement_counts) }
it { should respond_to(:taken_and_planned_courses) }
it { should validate_presence_of(:user) }
it { should validate_presence_of(:name) }
it { should ensure_length_of(:name).is_at_least(1) }
it { should ensure_length_of(:name).is_at_most(35) }
describe "with valid attributes" do
it { should be_valid }
end
describe "with taken name" do
before { duplicate_plan.save }
it { should be_invalid }
end
describe "creation" do
it "changes user's number of plans by 1" do
saving_plan_changes_users_plans_size(plan, 0, 1)
expect(user.plans.first).to eq(plan)
end
context "when user already has another plan" do
before { users_other_plan.save }
it "changes user's number of plans from 1 to 2" do
saving_plan_changes_users_plans_size(plan, 1, 2)
end
it "changes user's active plan to the new plan" do
expect { plan.save }.to change(user, :active_plan)
.from(users_other_plan).to(plan)
end
it "changes the old plan's active attribute to false" do
expect do
plan.save
users_other_plan.reload
end.to change(users_other_plan, :active).from(true).to(false)
end
end
context "#active = true" do
context "when user has no other plans" do
it "sets a new plan to active" do
expect { plan.save }.to change(plan, :active).to(true)
end
it "makes user#active_plan return it" do
expect { plan.save }.to change(user, :active_plan).from(nil).to(plan)
end
end
context "when user's other plan was active" do
before { users_other_plan.save }
it "becomes the user's new active plan" do
expect { plan.save }.to change(user, :active_plan)
.from(users_other_plan).to(plan)
end
it "sets user's other plans to false" do
expect do
plan.save
users_other_plan.reload
end.to change(users_other_plan, :active).from(true).to(false)
end
it "changes user's total plans count to two" do
saving_plan_changes_users_plans_size(plan, 1, 2)
end
end
end
end
describe "#activate!" do
context "when other plan was active" do
before do
plan.save
users_other_plan.save
plan.reload
end
it "makes the plan active" do
expect do
plan.activate!
users_other_plan.reload
plan.reload
end.to change(plan, :active).from(false).to(true)
end
it "makes the other plan inactive" do
expect do
plan.activate!
users_other_plan.reload
plan.reload
end.to change(users_other_plan, :active).from(true).to(false)
end
end
describe "#degree_requirement_counts" do
before do
2.times { create :planned_course, :required, plan: plan }
3.times do
create :planned_course,
:distribution,
plan: plan
end
5.times { create :planned_course, :free_elective, plan: plan }
end
it "returns the correct amount of credits for each type" do
counts = plan.degree_requirement_counts
expect(counts[:required_course]).to eq(6)
expect(counts[:distribution_requirement]).to eq(9)
expect(counts[:free_elective]).to eq(15)
end
end
context "when user has taken courses and plan has planned courses" do
let!(:planned_course) { create :planned_course, plan: plan }
let!(:taken_course) { create :taken_course, user: user }
describe "#taken_and_planned_courses" do
it "returns user's taken courses and plan's plannned courses" do
expect(subject.taken_and_planned_courses).to eq([taken_course,
planned_course])
end
end
end
end
def saving_plan_changes_users_plans_size(plan, before_size, after_size)
expect(user.plans(true).size).to eq(before_size)
plan.save
expect(user.plans(true).size).to eq(after_size)
end
end
|
require 'rails_helper'
describe Repo do
let!(:repo) { create(:repo) }
let!(:user) { create(:user) }
it "should have correct attributes as strings" do
expect(repo.url).to be_kind_of(String)
expect(repo.name).to be_kind_of(String)
expect(repo.full_name).to be_kind_of(String)
expect(repo.html_url).to be_kind_of(String)
expect(repo.description).to be_kind_of(String)
expect(repo.language).to be_kind_of(String)
end
it "should have correct attributes as integers" do
expect(repo.github_repo_id).to be_kind_of(Integer)
expect(repo.stargazers_count).to be_kind_of(Integer)
expect(repo.forks_count).to be_kind_of(Integer)
expect(repo.open_issues_count).to be_kind_of(Integer)
expect(repo.contributors_count).to be_kind_of(Integer)
expect(repo.pull_request_count).to be_kind_of(Integer)
end
it "should return repos in the correct language" do
expect(Repo.language_repos(user, 'Ruby').length).to eq(1)
end
it "should not return repos in the wrong language" do
expect(Repo.language_repos(user, 'Javascript').length).to eq(0)
end
end
add tests for update repos
require 'rails_helper'
describe Repo do
let!(:repo) { create(:repo) }
let!(:user) { create(:user) }
it "should have correct attributes as strings" do
expect(repo.url).to be_kind_of(String)
expect(repo.name).to be_kind_of(String)
expect(repo.full_name).to be_kind_of(String)
expect(repo.html_url).to be_kind_of(String)
expect(repo.description).to be_kind_of(String)
expect(repo.language).to be_kind_of(String)
end
it "should have correct attributes as integers" do
expect(repo.github_repo_id).to be_kind_of(Integer)
expect(repo.stargazers_count).to be_kind_of(Integer)
expect(repo.forks_count).to be_kind_of(Integer)
expect(repo.open_issues_count).to be_kind_of(Integer)
expect(repo.contributors_count).to be_kind_of(Integer)
expect(repo.pull_request_count).to be_kind_of(Integer)
end
it "should return repos in the correct language" do
expect(Repo.language_repos(user, 'Ruby').length).to eq(1)
end
it "should not return repos in the wrong language" do
expect(Repo.language_repos(user, 'Javascript').length).to eq(0)
end
it "should update repo with the propper information" do
updated = Repo.update_all[0]
expect(updated).to eq(repo)
expect(updated.language).to eq(nil)
expect(updated.forks_count).to be > 1107
end
end
|
add song model spec
require "spec_helper"
describe Song do
describe "validation" do
it "Song has name each artist" do
artist = Artist.create! :name => 'Hikaru Utada'
song1 = Song.create! :name => 'Automatic', :artist_id => artist.id
song2 = Song.new :name => 'Automatic', :artist_id => artist.id
song2.invalid?.should be_true
end
end
describe ".singing!" do
it "count up SingLog" do
artist = Artist.create! :name => 'Hikaru Utada'
song1 = Song.create! :name => 'Automatic', :artist_id => artist.id
count = SingLog.count
song1.singing!
SingLog.count.should == count + 1
end
end
end
|
require 'json'
module Fastr
module Template
EXTENSIONS = {} unless defined?(EXTENSIONS)
TEMPLATE_CACHE = {} unless defined?(TEMPLATE_CACHE)
def self.included(kls)
kls.extend(ClassMethods)
end
module ClassMethods
end
# Finds the engine for a particular path.
#
# ==== Parameters
# path<String>:: The path of the file to find an engine for.
#
# ==== Returns
# Class:: The engine.
def engine_for(path)
path = File.expand_path(path)
EXTENSIONS[path.match(/\.([^\.]*)$/)[1]]
end
# Get all known template extensions
#
# ==== Returns
# Array:: Extension strings.
def template_extensions
EXTENSIONS.keys
end
# Registers the extensions that will trigger a particular templating
# engine.
#
# ==== Parameters
# engine<Class>:: The class of the engine that is being registered
# extensions<Array[String]>::
# The list of extensions that will be registered with this templating
# language
#
# ==== Raises
# ArgumentError:: engine does not have a compile_template method.
#
# ==== Returns
# nil
#
# ==== Example
# Fastr::Template.register_extensions(Fastr::Template::Erubis, ["erb"])
def self.register_extensions(engine, extensions)
raise ArgumentError, "The class you are registering does not have a result method" unless
engine.respond_to?(:result)
extensions.each{|ext| EXTENSIONS[ext] = engine }
Fastr::Controller.class_eval <<-HERE
include #{engine}::Mixin
HERE
end
def render_template(tpl_path, opts={})
unless engine = engine_for(tpl_path)
raise ArgumentError, "No template engine registered for #{tpl_path}"
end
@vars = opts[:vars] || {}
@headers = {"Content-Type" => "text/html"}.merge(opts[:headers] || {})
@response_code = opts[:response_code] || 200
[ @response_code, @headers, [engine.result(tpl_path, binding())] ]
end
def render_text(text, opts={})
@headers = {"Content-Type" => "text/plain"}.merge(opts[:headers] || {})
@response_code = opts[:response_code] || 200
[ @response_code, @headers, [text] ]
end
def render_json(obj, opts={})
@headers = {"Content-Type" => "application/json"}.merge(opts[:headers] || {})
@response_code = opts[:response_code] || 200
[ @response_code, @headers, [obj.to_json.to_s] ]
end
end
end
added back in a generic render(...) method
require 'json'
module Fastr
module Template
EXTENSIONS = {} unless defined?(EXTENSIONS)
TEMPLATE_CACHE = {} unless defined?(TEMPLATE_CACHE)
def self.included(kls)
kls.extend(ClassMethods)
end
module ClassMethods
end
# Finds the engine for a particular path.
#
# ==== Parameters
# path<String>:: The path of the file to find an engine for.
#
# ==== Returns
# Class:: The engine.
def engine_for(path)
path = File.expand_path(path)
EXTENSIONS[path.match(/\.([^\.]*)$/)[1]]
end
# Get all known template extensions
#
# ==== Returns
# Array:: Extension strings.
def template_extensions
EXTENSIONS.keys
end
# Registers the extensions that will trigger a particular templating
# engine.
#
# ==== Parameters
# engine<Class>:: The class of the engine that is being registered
# extensions<Array[String]>::
# The list of extensions that will be registered with this templating
# language
#
# ==== Raises
# ArgumentError:: engine does not have a compile_template method.
#
# ==== Returns
# nil
#
# ==== Example
# Fastr::Template.register_extensions(Fastr::Template::Erubis, ["erb"])
def self.register_extensions(engine, extensions)
raise ArgumentError, "The class you are registering does not have a result method" unless
engine.respond_to?(:result)
extensions.each{|ext| EXTENSIONS[ext] = engine }
Fastr::Controller.class_eval <<-HERE
include #{engine}::Mixin
HERE
end
def render(kind, tpl, opts={})
case kind.to_sym
when :template then
render_template(tpl, opts)
when :text then
render_text(tpl, opts)
when :json then
render_json(tpl, opts)
else
raise ArgumentError, "Unknown render type: #{kind.inspect}"
end
end
def render_template(tpl_path, opts={})
unless engine = engine_for(tpl_path)
raise ArgumentError, "No template engine registered for #{tpl_path}"
end
@vars = opts[:vars] || {}
@headers = {"Content-Type" => "text/html"}.merge(opts[:headers] || {})
@response_code = opts[:response_code] || 200
[ @response_code, @headers, [engine.result(tpl_path, binding())] ]
end
def render_text(text, opts={})
@headers = {"Content-Type" => "text/plain"}.merge(opts[:headers] || {})
@response_code = opts[:response_code] || 200
[ @response_code, @headers, [text] ]
end
def render_json(obj, opts={})
@headers = {"Content-Type" => "application/json"}.merge(opts[:headers] || {})
@response_code = opts[:response_code] || 200
[ @response_code, @headers, [obj.to_json.to_s] ]
end
end
end |
require 'spec_helper'
describe User do
let(:uid) { '12345' }
let(:name) { 'Mike Skalnik' }
let(:login) { 'skalnik' }
let(:token) { 'abcedf123456' }
let(:auth_hash) {
{ 'uid' => uid, 'info' => { 'name' => name,
'nickname' => login },
'credentials' => { 'token' => token } }
}
context "validations" do
it "is invalid if it has a non-unique uid" do
user1 = FactoryGirl.create(:user, :uid => uid)
user2 = FactoryGirl.build(:user, :uid => uid)
user2.should be_invalid
end
end
describe ".find_by_hash" do
it "finds a user by uid" do
User.should_receive(:find_by_uid).with(uid)
User.find_by_hash(auth_hash)
end
end
describe ".create_from_hash" do
it "creates a new user with parameters given" do
User.should_receive(:create).with(:name => name, :uid => uid,
:github_login => login, :github_token => token)
User.create_from_hash(auth_hash)
end
end
describe "#octokit" do
it "creates an octokit client with the users GitHub credentials" do
user = FactoryGirl.build(:user, :github_login => login,
:github_token => token)
Octokit::Client.should_receive(:new).with(:login => login,
:oauth_token => token)
user.octokit
end
end
describe '#atlrug_team_id' do
it "looks up the Owners team id" do
user = FactoryGirl.build(:user)
teams = [stub(:name => 'ATLRUGers'), stub(:name => 'Owners', :id => 1),
stub(:name => 'Other')]
octokit = stub(:org_teams => teams)
user.stub(:octokit => octokit)
user.atlrug_team_id.should == 1
end
it "doesn't raise exception if user has no permission to ATLRUG org" do
user = FactoryGirl.build(:user)
octokit = stub and octokit.stub(:org_teams).and_raise(Octokit::Forbidden)
user.stub(:octokit => octokit)
expect { user.atlrug_team_id }.to_not raise_error
end
end
describe '#atlrug_organizer?' do
let(:user) { FactoryGirl.build(:user, :uid => uid) }
it "is true if the user is in the ATLRUG Owners team" do
members = [stub(:id => uid + "1"), stub(:id => uid),
stub(:id => uid + "10")]
octokit = stub(:team_members => members)
user.stub(:octokit => octokit)
user.stub(:atlrug_team_id => 1)
user.atlrug_organizer?.should be_true
end
it "is false if the user is not in the ATLRUG Owners team" do
members = [stub(:id => uid + "1"), stub(:id => uid + "10")]
octokit = stub(:team_members => members)
user.stub(:octokit => octokit)
user.stub(:atlrug_team_id)
user.atlrug_organizer?.should be_false
end
end
end
Changed stub() to double() in spec/models tests
require 'spec_helper'
describe User do
let(:uid) { '12345' }
let(:name) { 'Mike Skalnik' }
let(:login) { 'skalnik' }
let(:token) { 'abcedf123456' }
let(:auth_hash) {
{ 'uid' => uid, 'info' => { 'name' => name,
'nickname' => login },
'credentials' => { 'token' => token } }
}
context "validations" do
it "is invalid if it has a non-unique uid" do
user1 = FactoryGirl.create(:user, :uid => uid)
user2 = FactoryGirl.build(:user, :uid => uid)
user2.should be_invalid
end
end
describe ".find_by_hash" do
it "finds a user by uid" do
User.should_receive(:find_by_uid).with(uid)
User.find_by_hash(auth_hash)
end
end
describe ".create_from_hash" do
it "creates a new user with parameters given" do
User.should_receive(:create).with(:name => name, :uid => uid,
:github_login => login, :github_token => token)
User.create_from_hash(auth_hash)
end
end
describe "#octokit" do
it "creates an octokit client with the users GitHub credentials" do
user = FactoryGirl.build(:user, :github_login => login,
:github_token => token)
Octokit::Client.should_receive(:new).with(:login => login,
:oauth_token => token)
user.octokit
end
end
describe '#atlrug_team_id' do
it "looks up the Owners team id" do
user = FactoryGirl.build(:user)
teams = [double(:name => 'ATLRUGers'), double(:name => 'Owners', :id => 1),
double(:name => 'Other')]
octokit = double(:org_teams => teams)
user.stub(:octokit => octokit)
user.atlrug_team_id.should == 1
end
it "doesn't raise exception if user has no permission to ATLRUG org" do
user = FactoryGirl.build(:user)
octokit = double and octokit.stub(:org_teams).and_raise(Octokit::Forbidden)
user.stub(:octokit => octokit)
expect { user.atlrug_team_id }.to_not raise_error
end
end
describe '#atlrug_organizer?' do
let(:user) { FactoryGirl.build(:user, :uid => uid) }
it "is true if the user is in the ATLRUG Owners team" do
members = [double(:id => uid + "1"), double(:id => uid),
double(:id => uid + "10")]
octokit = double(:team_members => members)
user.stub(:octokit => octokit)
user.stub(:atlrug_team_id => 1)
user.atlrug_organizer?.should be_true
end
it "is false if the user is not in the ATLRUG Owners team" do
members = [double(:id => uid + "1"), double(:id => uid + "10")]
octokit = double(:team_members => members)
user.stub(:octokit => octokit)
user.stub(:atlrug_team_id)
user.atlrug_organizer?.should be_false
end
end
end
|
require 'json'
require 'pathname'
require 'open-uri'
module Fauxhai
class Mocker
# The base URL for the GitHub project (raw)
RAW_BASE = 'https://raw.githubusercontent.com/chefspec/fauxhai/master'.freeze
# A message about where to find a list of platforms
PLATFORM_LIST_MESSAGE = 'A list of available platforms is available at https://github.com/chefspec/fauxhai/blob/master/PLATFORMS.md'.freeze
# @return [Hash] The raw ohai data for the given Mock
attr_reader :data
# Create a new Ohai Mock with fauxhai.
#
# @param [Hash] options
# the options for the mocker
# @option options [String] :platform
# the platform to mock
# @option options [String] :version
# the version of the platform to mock
# @option options [String] :path
# the path to a local JSON file
# @option options [Bool] :github_fetching
# whether to try loading from Github
def initialize(options = {}, &override_attributes)
@options = { github_fetching: true }.merge(options)
@data = fauxhai_data
yield(@data) if block_given?
end
private
def fauxhai_data
@fauxhai_data ||= lambda do
# If a path option was specified, use it
if @options[:path]
filepath = File.expand_path(@options[:path])
unless File.exist?(filepath)
raise Fauxhai::Exception::InvalidPlatform.new("You specified a path to a JSON file on the local system that does not exist: '#{filepath}'")
end
else
filepath = File.join(platform_path, "#{version}.json")
end
if File.exist?(filepath)
parse_and_validate(File.read(filepath))
elsif @options[:github_fetching]
# Try loading from github (in case someone submitted a PR with a new file, but we haven't
# yet updated the gem version). Cache the response locally so it's faster next time.
begin
response = open("#{RAW_BASE}/lib/fauxhai/platforms/#{platform}/#{version}.json")
rescue OpenURI::HTTPError
raise Fauxhai::Exception::InvalidPlatform.new("Could not find platform '#{platform}/#{version}' on the local disk and an HTTP error was encountered when fetching from Github. #{PLATFORM_LIST_MESSAGE}")
end
if response.status.first.to_i == 200
response_body = response.read
path = Pathname.new(filepath)
FileUtils.mkdir_p(path.dirname)
File.open(filepath, 'w') { |f| f.write(response_body) }
return parse_and_validate(response_body)
else
raise Fauxhai::Exception::InvalidPlatform.new("Could not find platform '#{platform}/#{version}' on the local disk and an Github fetching returned http error code #{response.status.first.to_i}! #{PLATFORM_LIST_MESSAGE}")
end
else
raise Fauxhai::Exception::InvalidPlatform.new("Could not find platform '#{platform}/#{version}' on the local disk and Github fetching is disabled! #{PLATFORM_LIST_MESSAGE}")
end
end.call
end
# As major releases of Ohai ship it's difficult and sometimes impossible
# to regenerate all fauxhai data. This allows us to deprecate old releases
# and eventually remove them while giving end users ample warning.
def parse_and_validate(unparsed_data)
parsed_data = JSON.parse(unparsed_data)
if parsed_data['deprecated']
STDERR.puts "WARNING: Fauxhai platform data for #{parsed_data['platform']} #{parsed_data['platform_version']} is deprecated and will be removed in the 6.0 release 3/2018. #{PLATFORM_LIST_MESSAGE}"
end
parsed_data
end
def platform
@options[:platform] ||= begin
STDERR.puts "WARNING: you must specify a 'platform' and 'version' to your ChefSpec Runner and/or Fauxhai constructor, in the future omitting these will become a hard error. #{PLATFORM_LIST_MESSAGE}"
'chefspec'
end
end
def platform_path
File.join(Fauxhai.root, 'lib', 'fauxhai', 'platforms', platform)
end
def version
@options[:version] ||= chefspec_version || raise(Fauxhai::Exception::InvalidVersion.new("Platform version not specified. #{PLATFORM_LIST_MESSAGE}"))
end
def chefspec_version
platform == 'chefspec' ? '0.6.1' : nil
end
end
end
You don't pass a version, try to use the newest for the platform.
require 'json'
require 'pathname'
require 'open-uri'
module Fauxhai
class Mocker
# The base URL for the GitHub project (raw)
RAW_BASE = 'https://raw.githubusercontent.com/chefspec/fauxhai/master'.freeze
# A message about where to find a list of platforms
PLATFORM_LIST_MESSAGE = 'A list of available platforms is available at https://github.com/chefspec/fauxhai/blob/master/PLATFORMS.md'.freeze
# @return [Hash] The raw ohai data for the given Mock
attr_reader :data
# Create a new Ohai Mock with fauxhai.
#
# @param [Hash] options
# the options for the mocker
# @option options [String] :platform
# the platform to mock
# @option options [String] :version
# the version of the platform to mock
# @option options [String] :path
# the path to a local JSON file
# @option options [Bool] :github_fetching
# whether to try loading from Github
def initialize(options = {}, &override_attributes)
@options = { github_fetching: true }.merge(options)
@data = fauxhai_data
yield(@data) if block_given?
end
private
def fauxhai_data
@fauxhai_data ||= lambda do
# If a path option was specified, use it
if @options[:path]
filepath = File.expand_path(@options[:path])
unless File.exist?(filepath)
raise Fauxhai::Exception::InvalidPlatform.new("You specified a path to a JSON file on the local system that does not exist: '#{filepath}'")
end
else
filepath = File.join(platform_path, "#{version}.json")
end
if File.exist?(filepath)
parse_and_validate(File.read(filepath))
elsif @options[:github_fetching]
# Try loading from github (in case someone submitted a PR with a new file, but we haven't
# yet updated the gem version). Cache the response locally so it's faster next time.
begin
response = open("#{RAW_BASE}/lib/fauxhai/platforms/#{platform}/#{version}.json")
rescue OpenURI::HTTPError
raise Fauxhai::Exception::InvalidPlatform.new("Could not find platform '#{platform}/#{version}' on the local disk and an HTTP error was encountered when fetching from Github. #{PLATFORM_LIST_MESSAGE}")
end
if response.status.first.to_i == 200
response_body = response.read
path = Pathname.new(filepath)
FileUtils.mkdir_p(path.dirname)
File.open(filepath, 'w') { |f| f.write(response_body) }
return parse_and_validate(response_body)
else
raise Fauxhai::Exception::InvalidPlatform.new("Could not find platform '#{platform}/#{version}' on the local disk and an Github fetching returned http error code #{response.status.first.to_i}! #{PLATFORM_LIST_MESSAGE}")
end
else
raise Fauxhai::Exception::InvalidPlatform.new("Could not find platform '#{platform}/#{version}' on the local disk and Github fetching is disabled! #{PLATFORM_LIST_MESSAGE}")
end
end.call
end
# As major releases of Ohai ship it's difficult and sometimes impossible
# to regenerate all fauxhai data. This allows us to deprecate old releases
# and eventually remove them while giving end users ample warning.
def parse_and_validate(unparsed_data)
parsed_data = JSON.parse(unparsed_data)
if parsed_data['deprecated']
STDERR.puts "WARNING: Fauxhai platform data for #{parsed_data['platform']} #{parsed_data['platform_version']} is deprecated and will be removed in the 6.0 release 3/2018. #{PLATFORM_LIST_MESSAGE}"
end
parsed_data
end
def platform
@options[:platform] ||= begin
STDERR.puts "WARNING: you must specify a 'platform' for your ChefSpec Runner and/or Fauxhai constructor, in the future omitting these will become a hard error. #{PLATFORM_LIST_MESSAGE}"
'chefspec'
end
end
def platform_path
File.join(Fauxhai.root, 'lib', 'fauxhai', 'platforms', platform)
end
def version
@options[:version] ||= begin
# Get a list of all versions.
versions = Dir[File.join(platform_path, '*.json')].map {|path| File.basename(path, '.json') }
# Check if this is an unknown platform.
if versions.empty?
raise Fauxhai::Exception::InvalidPlatform.new("Platform #{platform} not known. #{PLATFORM_LIST_MESSAGE}")
end
# Take the highest version available. Treat R like a separator because Windows.
begin
versions.max_by {|ver| Gem::Version.create(ver.gsub(/r/i, '.')) }
rescue ArgumentError
# Welp, do something stable.
versions.max
end
end
end
end
end
|
# coding: utf-8
require 'ostruct'
require_relative '../spec_helper'
require_relative 'user_shared_examples'
require_relative '../../services/dataservices-metrics/lib/isolines_usage_metrics'
require_relative '../../services/dataservices-metrics/lib/observatory_snapshot_usage_metrics'
require_relative '../../services/dataservices-metrics/lib/observatory_general_usage_metrics'
require 'factories/organizations_contexts'
require_relative '../../app/model_factories/layer_factory'
require_dependency 'cartodb/redis_vizjson_cache'
require 'helpers/rate_limits_helper'
require 'helpers/unique_names_helper'
require 'helpers/account_types_helper'
require 'factories/users_helper'
require 'factories/database_configuration_contexts'
describe 'refactored behaviour' do
it_behaves_like 'user models' do
def get_twitter_imports_count_by_user_id(user_id)
get_user_by_id(user_id).get_twitter_imports_count
end
def get_user_by_id(user_id)
::User.where(id: user_id).first
end
def create_user
FactoryGirl.create(:valid_user)
end
end
end
describe User do
include UniqueNamesHelper
include AccountTypesHelper
include RateLimitsHelper
before(:each) do
CartoDB::UserModule::DBService.any_instance.stubs(:enable_remote_db_user).returns(true)
end
before(:all) do
bypass_named_maps
@user_password = 'admin123'
puts "\n[rspec][user_spec] Creating test user databases..."
@user = create_user :email => 'admin@example.com', :username => 'admin', :password => @user_password
@user2 = create_user :email => 'user@example.com', :username => 'user', :password => 'user123'
puts "[rspec][user_spec] Loading user data..."
reload_user_data(@user) && @user.reload
puts "[rspec][user_spec] Running..."
end
before(:each) do
bypass_named_maps
CartoDB::Varnish.any_instance.stubs(:send_command).returns(true)
CartoDB::UserModule::DBService.any_instance.stubs(:enable_remote_db_user).returns(true)
Table.any_instance.stubs(:update_cdb_tablemetadata)
end
after(:all) do
bypass_named_maps
@user.destroy
@user2.destroy
@account_type.destroy if @account_type
@account_type_org.destroy if @account_type_org
end
it "should only allow legal usernames" do
illegal_usernames = %w(si$mon 'sergio estella' j@vi sergio£££ simon_tokumine SIMON Simon jose.rilla -rilla rilla-)
legal_usernames = %w(simon javier-de-la-torre sergio-leiva sergio99)
illegal_usernames.each do |name|
@user.username = name
@user.valid?.should be_false
@user.errors[:username].should be_present
end
legal_usernames.each do |name|
@user.username = name
@user.valid?.should be_true
@user.errors[:username].should be_blank
end
end
it "should not allow a username in use by an organization" do
org = create_org('testusername', 10.megabytes, 1)
@user.username = org.name
@user.valid?.should be_false
@user.username = 'wadus'
@user.valid?.should be_true
end
describe 'organization checks' do
it "should not be valid if his organization doesn't have more seats" do
organization = create_org('testorg', 10.megabytes, 1)
user1 = create_user email: 'user1@testorg.com',
username: 'user1',
password: 'user11',
account_type: 'ORGANIZATION USER'
user1.organization = organization
user1.save
organization.owner_id = user1.id
organization.save
organization.reload
user1.reload
user2 = new_user
user2.organization = organization
user2.valid?.should be_false
user2.errors.keys.should include(:organization)
organization.destroy
user1.destroy
end
it 'should be valid if his organization has enough seats' do
organization = create_org('testorg', 10.megabytes, 1)
user = ::User.new
user.organization = organization
user.valid?
user.errors.keys.should_not include(:organization)
organization.destroy
end
it "should not be valid if his organization doesn't have enough disk space" do
organization = create_org('testorg', 10.megabytes, 1)
organization.stubs(:assigned_quota).returns(10.megabytes)
user = ::User.new
user.organization = organization
user.quota_in_bytes = 1.megabyte
user.valid?.should be_false
user.errors.keys.should include(:quota_in_bytes)
organization.destroy
end
it 'should be valid if his organization has enough disk space' do
organization = create_org('testorg', 10.megabytes, 1)
organization.stubs(:assigned_quota).returns(9.megabytes)
user = ::User.new
user.organization = organization
user.quota_in_bytes = 1.megabyte
user.valid?
user.errors.keys.should_not include(:quota_in_bytes)
organization.destroy
end
describe '#org_admin' do
before(:all) do
@organization = create_organization_with_owner
end
after(:all) do
@organization.destroy
end
def create_role(user)
# NOTE: It's hard to test the real Groups API call here, it needs a Rails server up and running
# Instead, we test the main step that this function does internally (creating a role)
user.in_database["CREATE ROLE \"#{user.database_username}_#{unique_name('role')}\""].all
end
it 'cannot be owner and viewer at the same time' do
@organization.owner.viewer = true
@organization.owner.should_not be_valid
@organization.owner.errors.keys.should include(:viewer)
end
it 'cannot be admin and viewer at the same time' do
user = ::User.new
user.organization = @organization
user.viewer = true
user.org_admin = true
user.should_not be_valid
user.errors.keys.should include(:viewer)
end
it 'should not be able to create groups without admin rights' do
user = FactoryGirl.create(:valid_user, organization: @organization)
expect { create_role(user) }.to raise_error
end
it 'should be able to create groups with admin rights' do
user = FactoryGirl.create(:valid_user, organization: @organization, org_admin: true)
expect { create_role(user) }.to_not raise_error
end
it 'should revoke admin rights on demotion' do
user = FactoryGirl.create(:valid_user, organization: @organization, org_admin: true)
expect { create_role(user) }.to_not raise_error
user.org_admin = false
user.save
expect { create_role(user) }.to raise_error
end
end
describe 'organization email whitelisting' do
before(:each) do
@organization = create_org('testorg', 10.megabytes, 1)
end
after(:each) do
@organization.destroy
end
it 'valid_user is valid' do
user = FactoryGirl.build(:valid_user)
user.valid?.should == true
end
it 'user email is valid if organization has not whitelisted domains' do
user = FactoryGirl.build(:valid_user, organization: @organization)
user.valid?.should == true
end
it 'user email is not valid if organization has whitelisted domains and email is not under that domain' do
@organization.whitelisted_email_domains = [ 'organization.org' ]
user = FactoryGirl.build(:valid_user, organization: @organization)
user.valid?.should eq false
user.errors[:email].should_not be_nil
end
it 'user email is valid if organization has whitelisted domains and email is under that domain' do
user = FactoryGirl.build(:valid_user, organization: @organization)
@organization.whitelisted_email_domains = [ user.email.split('@')[1] ]
user.valid?.should eq true
user.errors[:email].should == []
end
end
describe 'when updating user quota' do
it 'should be valid if his organization has enough disk space' do
organization = create_organization_with_users(quota_in_bytes: 70.megabytes)
organization.assigned_quota.should == 70.megabytes
user = organization.owner
user.quota_in_bytes = 1.megabyte
user.valid?
user.errors.keys.should_not include(:quota_in_bytes)
organization.destroy
end
it "should not be valid if his organization doesn't have enough disk space" do
organization = create_organization_with_users(quota_in_bytes: 70.megabytes)
organization.assigned_quota.should == 70.megabytes
user = organization.owner
user.quota_in_bytes = 71.megabytes
user.valid?.should be_false
user.errors.keys.should include(:quota_in_bytes)
organization.destroy
end
end
describe 'when updating viewer state' do
before(:all) do
@organization = create_organization_with_users(quota_in_bytes: 70.megabytes)
end
after(:all) do
@organization.destroy
end
before(:each) do
@organization.viewer_seats = 10
@organization.seats = 10
@organization.save
end
it 'should not allow changing to viewer without seats' do
@organization.viewer_seats = 0
@organization.save
user = @organization.users.find { |u| !u.organization_owner? }
user.reload
user.viewer = true
expect(user).not_to be_valid
expect(user.errors.keys).to include(:organization)
end
it 'should allow changing to viewer with enough seats' do
user = @organization.users.find { |u| !u.organization_owner? }
user.reload
user.viewer = true
expect(user).to be_valid
expect(user.errors.keys).not_to include(:organization)
end
it 'should not allow changing to builder without seats' do
user = @organization.users.find { |u| !u.organization_owner? }
user.reload
user.viewer = true
user.save
@organization.seats = 1
@organization.save
user.reload
user.viewer = false
expect(user).not_to be_valid
expect(user.errors.keys).to include(:organization)
end
it 'should allow changing to builder with seats' do
user = @organization.users.find { |u| !u.organization_owner? }
user.reload
user.viewer = true
user.save
user.reload
user.viewer = false
expect(user).to be_valid
expect(user.errors.keys).not_to include(:organization)
end
end
it 'should set account_type properly' do
organization = create_organization_with_users
organization.users.reject(&:organization_owner?).each do |u|
u.account_type.should == "ORGANIZATION USER"
end
organization.destroy
end
it 'should set default settings properly unless overriden' do
organization = create_organization_with_users
organization.users.reject(&:organization_owner?).each do |u|
u.max_layers.should eq ::User::DEFAULT_MAX_LAYERS
u.private_tables_enabled.should be_true
u.sync_tables_enabled.should be_true
end
user = FactoryGirl.build(:user, organization: organization)
user.max_layers = 3
user.save
user.max_layers.should == 3
organization.destroy
end
describe 'google_maps_key and google_maps_private_key' do
before(:all) do
@organization = create_organization_with_users(google_maps_key: 'gmk', google_maps_private_key: 'gmpk')
@organization.google_maps_key.should_not be_nil
@organization.google_maps_private_key.should_not be_nil
end
after(:all) do
@organization.destroy
end
it 'should be inherited from organization for new users' do
@organization.users.should_not be_empty
@organization.users.reject(&:organization_owner?).each do |u|
u.google_maps_key.should == @organization.google_maps_key
u.google_maps_private_key.should == @organization.google_maps_private_key
end
end
end
it 'should inherit twitter_datasource_enabled from organizations with custom config on creation' do
organization = create_organization_with_users(twitter_datasource_enabled: true)
organization.save
organization.twitter_datasource_enabled.should be_true
organization.users.reject(&:organization_owner?).each do |u|
CartoDB::Datasources::DatasourcesFactory.stubs(:customized_config?).with(Search::Twitter::DATASOURCE_NAME, u).returns(true)
u.twitter_datasource_enabled.should be_true
end
CartoDB::Datasources::DatasourcesFactory.stubs(:customized_config?).returns(true)
user = create_user(organization: organization)
user.save
CartoDB::Datasources::DatasourcesFactory.stubs(:customized_config?).with(Search::Twitter::DATASOURCE_NAME, user).returns(true)
user.twitter_datasource_enabled.should be_true
organization.destroy
end
it "should return proper values for non-persisted settings" do
organization = create_organization_with_users
organization.users.reject(&:organization_owner?).each do |u|
u.private_maps_enabled.should be_true
end
organization.destroy
end
end
describe 'central synchronization' do
it 'should create remote user in central if needed' do
pending "Central API credentials not provided" unless ::User.new.sync_data_with_cartodb_central?
organization = create_org('testorg', 500.megabytes, 1)
user = create_user email: 'user1@testorg.com',
username: 'user1',
password: 'user11',
account_type: 'ORGANIZATION USER'
user.organization = organization
user.save
Cartodb::Central.any_instance.expects(:create_organization_user).with(organization.name, user.allowed_attributes_to_central(:create)).once
user.create_in_central.should be_true
organization.destroy
end
end
it 'should store feature flags' do
ff = FactoryGirl.create(:feature_flag, id: 10001, name: 'ff10001')
user = create_user email: 'ff@example.com', username: 'ff-user-01', password: '000ff-user-01'
user.set_relationships_from_central({ feature_flags: [ ff.id.to_s ]})
user.save
user.feature_flags_user.map { |ffu| ffu.feature_flag_id }.should include(ff.id)
user.destroy
end
it 'should delete feature flags assignations to a deleted user' do
ff = FactoryGirl.create(:feature_flag, id: 10002, name: 'ff10002')
user = create_user email: 'ff2@example.com', username: 'ff2-user-01', password: '000ff2-user-01'
user.set_relationships_from_central({ feature_flags: [ ff.id.to_s ]})
user.save
user_id = user.id
user.destroy
SequelRails.connection["select count(*) from feature_flags_users where user_id = '#{user_id}'"].first[:count].should eq 0
SequelRails.connection["select count(*) from feature_flags where id = '#{ff.id}'"].first[:count].should eq 1
end
it "should have a default dashboard_viewed? false" do
user = ::User.new
user.dashboard_viewed?.should be_false
end
it "should reset dashboard_viewed when dashboard gets viewed" do
user = ::User.new
user.view_dashboard
user.dashboard_viewed?.should be_true
end
it "should validate that password is present if record is new and crypted_password or salt are blank" do
user = ::User.new
user.username = "adminipop"
user.email = "adminipop@example.com"
user.valid?.should be_false
user.errors[:password].should be_present
another_user = new_user(user.values.merge(:password => "admin123"))
user.crypted_password = another_user.crypted_password
user.salt = another_user.salt
user.valid?.should be_true
user.save
# Let's ensure that crypted_password and salt does not change
user_check = ::User[user.id]
user_check.crypted_password.should == another_user.crypted_password
user_check.salt.should == another_user.salt
user.password = nil
user.valid?.should be_true
user.destroy
end
it "should validate password presence and length" do
user = ::User.new
user.username = "adminipop"
user.email = "adminipop@example.com"
user.valid?.should be_false
user.errors[:password].should be_present
user.password = 'short'
user.valid?.should be_false
user.errors[:password].should be_present
user.password = 'manolo' * 11
user.valid?.should be_false
user.errors[:password].should be_present
end
it "should validate password is different than username" do
user = ::User.new
user.username = "adminipop"
user.email = "adminipop@example.com"
user.password = user.password_confirmation = "adminipop"
user.valid?.should be_false
user.errors[:password].should be_present
end
it "should validate password is not a common one" do
user = ::User.new
user.username = "adminipop"
user.email = "adminipop@example.com"
user.password = user.password_confirmation = '123456'
user.valid?.should be_false
user.errors[:password].should be_present
end
it "should set default statement timeout values" do
@user.in_database["show statement_timeout"].first[:statement_timeout].should == "5min"
@user.in_database(as: :public_user)["show statement_timeout"].first[:statement_timeout].should == "5min"
end
it "should keep in sync user statement_timeout" do
@user.user_timeout = 1000000
@user.database_timeout = 300000
@user.save
@user.in_database["show statement_timeout"].first[:statement_timeout].should == "1000s"
@user.in_database(as: :public_user)["show statement_timeout"].first[:statement_timeout].should == "5min"
end
it "should keep in sync database statement_timeout" do
@user.user_timeout = 300000
@user.database_timeout = 1000000
@user.save
@user.in_database["show statement_timeout"].first[:statement_timeout].should == "5min"
@user.in_database(as: :public_user)["show statement_timeout"].first[:statement_timeout].should == "1000s"
end
it "should invalidate all his vizjsons when his account type changes" do
@account_type = create_account_type_fg('WADUS')
@user.account_type = 'WADUS'
CartoDB::Varnish.any_instance.expects(:purge)
.with("#{@user.database_name}.*:vizjson").times(1).returns(true)
@user.save
end
it "should invalidate all his vizjsons when his disqus_shortname changes" do
@user.disqus_shortname = 'WADUS'
CartoDB::Varnish.any_instance.expects(:purge)
.with("#{@user.database_name}.*:vizjson").times(1).returns(true)
@user.save
end
it "should not invalidate anything when his quota_in_bytes changes" do
@user.quota_in_bytes = @user.quota_in_bytes + 1.megabytes
CartoDB::Varnish.any_instance.expects(:purge).times(0)
@user.save
end
it "should rebuild the quota trigger after changing the quota" do
@user.db_service.expects(:rebuild_quota_trigger).once
@user.quota_in_bytes = @user.quota_in_bytes + 1.megabytes
@user.save
end
it "should read api calls from external service" do
pending "This is deprecated. This code has been moved"
@user.stubs(:get_old_api_calls).returns({
"per_day" => [0, 0, 0, 0, 24, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 8, 0, 0, 0, 0, 0, 0, 0, 0, 17, 4, 0, 0, 0, 0],
"total"=>49,
"updated_at"=>1370362756
})
@user.stubs(:get_es_api_calls_from_redis).returns([
21, 0, 0, 0, 2, 0, 0, 5, 0, 0, 0, 0, 0, 0, 0, 8, 0, 5, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0
])
@user.get_api_calls.should == [21, 0, 0, 0, 6, 17, 0, 5, 0, 0, 0, 0, 0, 0, 8, 8, 0, 5, 0, 0, 0, 0, 0, 0, 0, 24, 0, 0, 0, 0]
@user.get_api_calls(
from: (Date.today - 6.days),
to: Date.today
).should == [21, 0, 0, 0, 6, 17, 0]
end
it "should get final api calls from es" do
yesterday = Date.today - 1
today = Date.today
from_date = DateTime.new(yesterday.year, yesterday.month, yesterday.day, 0, 0, 0).strftime("%Q")
to_date = DateTime.new(today.year, today.month, today.day, 0, 0, 0).strftime("%Q")
api_url = %r{search}
api_response = {
"aggregations" => {
"0" => {
"buckets" => [
{
"key" => from_date.to_i,
"doc_count" => 4
},
{
"key" => to_date.to_i,
"doc_count" => 6
}
]
}
}
}
Typhoeus.stub(api_url,
{ method: :post }
)
.and_return(
Typhoeus::Response.new(code: 200, body: api_response.to_json.to_s)
)
@user.get_api_calls_from_es.should == {from_date.to_i => 4, to_date.to_i => 6}
end
describe "avatar checks" do
let(:user1) do
create_user(email: 'ewdewfref34r43r43d32f45g5@example.com', username: 'u1', password: 'foobar')
end
after(:each) do
user1.destroy
end
it "should load a cartodb avatar url if no gravatar associated" do
avatar_kind = Cartodb.config[:avatars]['kinds'][0]
avatar_color = Cartodb.config[:avatars]['colors'][0]
avatar_base_url = Cartodb.config[:avatars]['base_url']
Random.any_instance.stubs(:rand).returns(0)
gravatar_url = %r{gravatar.com}
Typhoeus.stub(gravatar_url, { method: :get }).and_return(Typhoeus::Response.new(code: 404))
user1.stubs(:gravatar_enabled?).returns(true)
user1.avatar_url = nil
user1.save
user1.reload_avatar
user1.avatar_url.should == "#{avatar_base_url}/avatar_#{avatar_kind}_#{avatar_color}.png"
end
it "should load a cartodb avatar url if gravatar disabled" do
avatar_kind = Cartodb.config[:avatars]['kinds'][0]
avatar_color = Cartodb.config[:avatars]['colors'][0]
avatar_base_url = Cartodb.config[:avatars]['base_url']
Random.any_instance.stubs(:rand).returns(0)
gravatar_url = %r{gravatar.com}
Typhoeus.stub(gravatar_url, { method: :get }).and_return(Typhoeus::Response.new(code: 200))
user1.stubs(:gravatar_enabled?).returns(false)
user1.avatar_url = nil
user1.save
user1.reload_avatar
user1.avatar_url.should == "#{avatar_base_url}/avatar_#{avatar_kind}_#{avatar_color}.png"
end
it "should load a the user gravatar url" do
gravatar_url = %r{gravatar.com}
Typhoeus.stub(gravatar_url, { method: :get }).and_return(Typhoeus::Response.new(code: 200))
user1.stubs(:gravatar_enabled?).returns(true)
user1.reload_avatar
user1.avatar_url.should == "//#{user1.gravatar_user_url}"
end
describe '#gravatar_enabled?' do
it 'should be enabled by default (every setting but false will enable it)' do
user = ::User.new
Cartodb.with_config(avatars: {}) { user.gravatar_enabled?.should be_true }
Cartodb.with_config(avatars: { 'gravatar_enabled' => true }) { user.gravatar_enabled?.should be_true }
Cartodb.with_config(avatars: { 'gravatar_enabled' => 'true' }) { user.gravatar_enabled?.should be_true }
Cartodb.with_config(avatars: { 'gravatar_enabled' => 'wadus' }) { user.gravatar_enabled?.should be_true }
end
it 'can be disabled' do
user = ::User.new
Cartodb.with_config(avatars: { 'gravatar_enabled' => false }) { user.gravatar_enabled?.should be_false }
Cartodb.with_config(avatars: { 'gravatar_enabled' => 'false' }) { user.gravatar_enabled?.should be_false }
end
end
end
describe '#private_maps_enabled?' do
it 'should not have private maps enabled by default' do
user_missing_private_maps = create_user :email => 'user_mpm@example.com', :username => 'usermpm', :password => '000usermpm'
user_missing_private_maps.private_maps_enabled?.should eq false
user_missing_private_maps.destroy
end
it 'should have private maps if enabled' do
user_with_private_maps = create_user :email => 'user_wpm@example.com', :username => 'userwpm', :password => '000userwpm', :private_maps_enabled => true
user_with_private_maps.private_maps_enabled?.should eq true
user_with_private_maps.destroy
end
it 'should not have private maps if disabled' do
user_without_private_maps = create_user :email => 'user_opm@example.com', :username => 'useropm', :password => '000useropm', :private_maps_enabled => false
user_without_private_maps.private_maps_enabled?.should eq false
user_without_private_maps.destroy
end
end
describe '#get_geocoding_calls' do
before do
delete_user_data @user
@user.geocoder_provider = 'heremaps'
@user.stubs(:last_billing_cycle).returns(Date.today)
@mock_redis = MockRedis.new
@usage_metrics = CartoDB::GeocoderUsageMetrics.new(@user.username, nil, @mock_redis)
@usage_metrics.incr(:geocoder_here, :success_responses, 1, Time.now)
@usage_metrics.incr(:geocoder_internal, :success_responses, 1, Time.now)
@usage_metrics.incr(:geocoder_here, :success_responses, 1, Time.now - 5.days)
@usage_metrics.incr(:geocoder_cache, :success_responses, 1, Time.now - 5.days)
CartoDB::GeocoderUsageMetrics.stubs(:new).returns(@usage_metrics)
end
it "should return the sum of geocoded rows for the current billing period" do
@user.get_geocoding_calls.should eq 1
end
it "should return the sum of geocoded rows for the specified period" do
@user.get_geocoding_calls(from: Time.now-5.days).should eq 3
@user.get_geocoding_calls(from: Time.now-5.days, to: Time.now - 2.days).should eq 2
end
it "should return 0 when no geocodings" do
@user.get_geocoding_calls(from: Time.now - 15.days, to: Time.now - 10.days).should eq 0
end
end
describe '#get_here_isolines_calls' do
before do
delete_user_data @user
@user.isolines_provider = 'heremaps'
@mock_redis = MockRedis.new
@usage_metrics = CartoDB::IsolinesUsageMetrics.new(@user.username, nil, @mock_redis)
CartoDB::IsolinesUsageMetrics.stubs(:new).returns(@usage_metrics)
@user.stubs(:last_billing_cycle).returns(Date.today)
@user.period_end_date = (DateTime.current + 1) << 1
@user.save.reload
end
it "should return the sum of here isolines rows for the current billing period" do
@usage_metrics.incr(:here_isolines, :isolines_generated, 10, DateTime.current)
@usage_metrics.incr(:here_isolines, :isolines_generated, 100, (DateTime.current - 2))
@user.get_here_isolines_calls.should eq 10
end
it "should return the sum of here isolines rows for the specified period" do
@usage_metrics.incr(:here_isolines, :isolines_generated, 10, DateTime.current)
@usage_metrics.incr(:here_isolines, :isolines_generated, 100, (DateTime.current - 2))
@usage_metrics.incr(:here_isolines, :isolines_generated, 100, (DateTime.current - 7))
@user.get_here_isolines_calls(from: Time.now-5.days).should eq 110
@user.get_here_isolines_calls(from: Time.now-5.days, to: Time.now - 2.days).should eq 100
end
it "should return 0 when no here isolines actions" do
@user.get_here_isolines_calls(from: Time.now - 15.days, to: Time.now - 10.days).should eq 0
end
end
describe '#get_obs_snapshot_calls' do
before do
delete_user_data @user
@mock_redis = MockRedis.new
@usage_metrics = CartoDB::ObservatorySnapshotUsageMetrics.new(@user.username, nil, @mock_redis)
CartoDB::ObservatorySnapshotUsageMetrics.stubs(:new).returns(@usage_metrics)
@user.stubs(:last_billing_cycle).returns(Date.today)
@user.period_end_date = (DateTime.current + 1) << 1
@user.save.reload
end
it "should return the sum of data observatory snapshot rows for the current billing period" do
@usage_metrics.incr(:obs_snapshot, :success_responses, 10, DateTime.current)
@usage_metrics.incr(:obs_snapshot, :success_responses, 100, (DateTime.current - 2))
@user.get_obs_snapshot_calls.should eq 10
end
it "should return the sum of data observatory snapshot rows for the specified period" do
@usage_metrics.incr(:obs_snapshot, :success_responses, 10, DateTime.current)
@usage_metrics.incr(:obs_snapshot, :success_responses, 100, (DateTime.current - 2))
@usage_metrics.incr(:obs_snapshot, :success_responses, 100, (DateTime.current - 7))
@user.get_obs_snapshot_calls(from: Time.now - 5.days).should eq 110
@user.get_obs_snapshot_calls(from: Time.now - 5.days, to: Time.now - 2.days).should eq 100
end
it "should return 0 when no here isolines actions" do
@user.get_obs_snapshot_calls(from: Time.now - 15.days, to: Time.now - 10.days).should eq 0
end
end
describe '#get_obs_general_calls' do
before do
delete_user_data @user
@mock_redis = MockRedis.new
@usage_metrics = CartoDB::ObservatoryGeneralUsageMetrics.new(@user.username, nil, @mock_redis)
CartoDB::ObservatoryGeneralUsageMetrics.stubs(:new).returns(@usage_metrics)
@user.stubs(:last_billing_cycle).returns(Date.today)
@user.period_end_date = (DateTime.current + 1) << 1
@user.save.reload
end
it "should return the sum of data observatory general rows for the current billing period" do
@usage_metrics.incr(:obs_general, :success_responses, 10, DateTime.current)
@usage_metrics.incr(:obs_general, :success_responses, 100, (DateTime.current - 2))
@user.get_obs_general_calls.should eq 10
end
it "should return the sum of data observatory general rows for the specified period" do
@usage_metrics.incr(:obs_general, :success_responses, 10, DateTime.current)
@usage_metrics.incr(:obs_general, :success_responses, 100, (DateTime.current - 2))
@usage_metrics.incr(:obs_general, :success_responses, 100, (DateTime.current - 7))
@user.get_obs_general_calls(from: Time.now - 5.days).should eq 110
@user.get_obs_general_calls(from: Time.now - 5.days, to: Time.now - 2.days).should eq 100
end
it "should return 0 when no data observatory general actions" do
@user.get_obs_general_calls(from: Time.now - 15.days, to: Time.now - 10.days).should eq 0
end
end
describe "organization user deletion" do
it "should transfer tweet imports to owner" do
u1 = create_user(email: 'u1@exampleb.com', username: 'ub1', password: 'admin123')
org = create_org('cartodbtestb', 1234567890, 5)
u1.organization = org
u1.save
u1.reload
org = u1.organization
org.owner_id = u1.id
org.save
u1.reload
u2 = create_user(email: 'u2@exampleb.com', username: 'ub2', password: 'admin123', organization: org)
tweet_attributes = {
user: u2,
table_id: '96a86fb7-0270-4255-a327-15410c2d49d4',
data_import_id: '96a86fb7-0270-4255-a327-15410c2d49d4',
service_item_id: '555',
state: ::SearchTweet::STATE_COMPLETE
}
st1 = SearchTweet.create(tweet_attributes.merge(retrieved_items: 5))
st2 = SearchTweet.create(tweet_attributes.merge(retrieved_items: 10))
u1.reload
u2.reload
u2.get_twitter_imports_count.should == st1.retrieved_items + st2.retrieved_items
u1.get_twitter_imports_count.should == 0
u2.destroy
u1.reload
u1.get_twitter_imports_count.should == st1.retrieved_items + st2.retrieved_items
org.destroy
end
end
it "should have many tables" do
@user2.tables.should be_empty
create_table :user_id => @user2.id, :name => 'My first table', :privacy => UserTable::PRIVACY_PUBLIC
@user2.reload
@user2.tables.all.should == [UserTable.first(:user_id => @user2.id)]
end
it "should generate a data report"
it "should update remaining quotas when adding or removing tables" do
initial_quota = @user2.remaining_quota
expect { create_table :user_id => @user2.id, :privacy => UserTable::PRIVACY_PUBLIC }
.to change { @user2.remaining_table_quota }.by(-1)
table = Table.new(user_table: UserTable.filter(:user_id => @user2.id).first)
50.times { |i| table.insert_row!(:name => "row #{i}") }
@user2.remaining_quota.should be < initial_quota
initial_quota = @user2.remaining_quota
expect { table.destroy }
.to change { @user2.remaining_table_quota }.by(1)
@user2.remaining_quota.should be > initial_quota
end
it "should has his own database, created when the account is created" do
@user.database_name.should == "cartodb_test_user_#{@user.id}_db"
@user.database_username.should == "test_cartodb_user_#{@user.id}"
@user.in_database.test_connection.should == true
end
it 'creates an importer schema in the user database' do
@user.in_database[%Q(SELECT * FROM pg_namespace)]
.map { |record| record.fetch(:nspname) }
.should include 'cdb_importer'
end
it 'creates a cdb schema in the user database' do
pending "I believe cdb schema was never used"
@user.in_database[%Q(SELECT * FROM pg_namespace)]
.map { |record| record.fetch(:nspname) }
.should include 'cdb'
end
it 'allows access to the importer schema by the owner' do
@user.in_database.run(%Q{
CREATE TABLE cdb_importer.bogus ( bogus varchar(40) )
})
query = %Q(SELECT * FROM cdb_importer.bogus)
expect { @user.in_database(as: :public_user)[query].to_a }
.to raise_error(Sequel::DatabaseError)
@user.in_database[query].to_a
end
it 'allows access to the cdb schema by the owner' do
pending "I believe cdb schema was never used"
@user.in_database.run(%Q{
CREATE TABLE cdb.bogus ( bogus varchar(40) )
})
query = %Q(SELECT * FROM cdb.bogus)
expect { @user.in_database(as: :public_user)[query].to_a }
.to raise_error(Sequel::DatabaseError)
@user.in_database[query].to_a
end
it "should create a dabase user that only can read it's own database" do
connection = ::Sequel.connect(
::SequelRails.configuration.environment_for(Rails.env).merge(
'database' => @user.database_name, :logger => ::Rails.logger,
'username' => @user.database_username, 'password' => @user.database_password
)
)
connection.test_connection.should == true
connection.disconnect
connection = nil
connection = ::Sequel.connect(
::SequelRails.configuration.environment_for(Rails.env).merge(
'database' => @user2.database_name, :logger => ::Rails.logger,
'username' => @user.database_username, 'password' => @user.database_password
)
)
begin
connection.test_connection
true.should_not be_true
rescue
true.should be_true
ensure
connection.disconnect
end
connection = ::Sequel.connect(
::SequelRails.configuration.environment_for(Rails.env).merge(
'database' => @user2.database_name, :logger => ::Rails.logger,
'username' => @user2.database_username, 'password' => @user2.database_password
)
)
connection.test_connection.should == true
connection.disconnect
connection = ::Sequel.connect(
::SequelRails.configuration.environment_for(Rails.env).merge(
'database' => @user.database_name, :logger => ::Rails.logger,
'username' => @user2.database_username, 'password' => @user2.database_password
)
)
begin
connection.test_connection
true.should_not be_true
rescue
true.should be_true
ensure
connection.disconnect
end
end
it "should run valid queries against his database" do
# initial select tests
query_result = @user.db_service.run_pg_query("select * from import_csv_1 where family='Polynoidae' limit 10")
query_result[:time].should_not be_blank
query_result[:time].to_s.match(/^\d+\.\d+$/).should be_true
query_result[:total_rows].should == 2
query_result[:rows].first.keys.sort.should == [:cartodb_id, :the_geom, :the_geom_webmercator, :id, :name_of_species, :kingdom, :family, :lat, :lon, :views].sort
query_result[:rows][0][:name_of_species].should == "Barrukia cristata"
query_result[:rows][1][:name_of_species].should == "Eulagisca gigantea"
# update and reselect
query_result = @user.db_service.run_pg_query("update import_csv_1 set family='polynoidae' where family='Polynoidae'")
query_result = @user.db_service.run_pg_query("select * from import_csv_1 where family='Polynoidae' limit 10")
query_result[:total_rows].should == 0
# check counts
query_result = @user.db_service.run_pg_query("select * from import_csv_1 where family='polynoidae' limit 10")
query_result[:total_rows].should == 2
# test a product
query_result = @user.db_service.run_pg_query("select import_csv_1.family as fam, twitters.login as login from import_csv_1, twitters where family='polynoidae' limit 10")
query_result[:total_rows].should == 10
query_result[:rows].first.keys.should == [:fam, :login]
query_result[:rows][0].should == { :fam=>"polynoidae", :login=>"vzlaturistica " }
# test counts
query_result = @user.db_service.run_pg_query("select count(*) from import_csv_1 where family='polynoidae' ")
query_result[:time].should_not be_blank
query_result[:time].to_s.match(/^\d+\.\d+$/).should be_true
query_result[:total_rows].should == 1
query_result[:rows].first.keys.should == [:count]
query_result[:rows][0].should == {:count => 2}
end
it "should raise errors when running invalid queries against his database" do
lambda {
@user.db_service.run_pg_query("selectttt * from import_csv_1 where family='Polynoidae' limit 10")
}.should raise_error(CartoDB::ErrorRunningQuery)
end
it "should run valid queries against his database in pg mode" do
reload_user_data(@user) && @user.reload
# initial select tests
# tests results and modified flags
query_result = @user.db_service.run_pg_query("select * from import_csv_1 where family='Polynoidae' limit 10")
query_result[:time].should_not be_blank
query_result[:time].to_s.match(/^\d+\.\d+$/).should be_true
query_result[:total_rows].should == 2
query_result[:rows].first.keys.sort.should == [:cartodb_id, :the_geom, :the_geom_webmercator, :id, :name_of_species, :kingdom, :family, :lat, :lon, :views].sort
query_result[:rows][0][:name_of_species].should == "Barrukia cristata"
query_result[:rows][1][:name_of_species].should == "Eulagisca gigantea"
query_result[:results].should == true
query_result[:modified].should == false
# update and reselect
query_result = @user.db_service.run_pg_query("update import_csv_1 set family='polynoidae' where family='Polynoidae'")
query_result[:modified].should == true
query_result[:results].should == false
query_result = @user.db_service.run_pg_query("select * from import_csv_1 where family='Polynoidae' limit 10")
query_result[:total_rows].should == 0
query_result[:modified].should == false
query_result[:results].should == true
# # check counts
query_result = @user.db_service.run_pg_query("select * from import_csv_1 where family='polynoidae' limit 10")
query_result[:total_rows].should == 2
query_result[:results].should == true
# test a product
query_result = @user.db_service.run_pg_query("select import_csv_1.family as fam, twitters.login as login from import_csv_1, twitters where family='polynoidae' limit 10")
query_result[:total_rows].should == 10
query_result[:rows].first.keys.should == [:fam, :login]
query_result[:rows][0].should == { :fam=>"polynoidae", :login=>"vzlaturistica " }
# test counts
query_result = @user.db_service.run_pg_query("select count(*) from import_csv_1 where family='polynoidae' ")
query_result[:time].should_not be_blank
query_result[:time].to_s.match(/^\d+\.\d+$/).should be_true
query_result[:total_rows].should == 1
query_result[:rows].first.keys.should == [:count]
query_result[:rows][0].should == {:count => 2}
end
it "should raise errors when running invalid queries against his database in pg mode" do
lambda {
@user.db_service.run_pg_query("selectttt * from import_csv_1 where family='Polynoidae' limit 10")
}.should raise_error(CartoDB::ErrorRunningQuery)
end
it "should raise errors when invalid table name used in pg mode" do
lambda {
@user.db_service.run_pg_query("select * from this_table_is_not_here where family='Polynoidae' limit 10")
}.should raise_error(CartoDB::TableNotExists)
end
it "should raise errors when invalid column used in pg mode" do
lambda {
@user.db_service.run_pg_query("select not_a_col from import_csv_1 where family='Polynoidae' limit 10")
}.should raise_error(CartoDB::ColumnNotExists)
end
it "should create a client_application for each user" do
@user.client_application.should_not be_nil
end
it "should reset its client application" do
old_key = @user.client_application.key
@user.reset_client_application!
@user.reload
@user.client_application.key.should_not == old_key
end
it "should return the result from the last select query if multiple selects" do
reload_user_data(@user) && @user.reload
query_result = @user.db_service.run_pg_query("select * from import_csv_1 where family='Polynoidae' limit 1; select * from import_csv_1 where family='Polynoidae' limit 10")
query_result[:time].should_not be_blank
query_result[:time].to_s.match(/^\d+\.\d+$/).should be_true
query_result[:total_rows].should == 2
query_result[:rows][0][:name_of_species].should == "Barrukia cristata"
query_result[:rows][1][:name_of_species].should == "Eulagisca gigantea"
end
it "should allow multiple queries in the format: insert_query; select_query" do
query_result = @user.db_service.run_pg_query("insert into import_csv_1 (name_of_species,family) values ('cristata barrukia','Polynoidae'); select * from import_csv_1 where family='Polynoidae' ORDER BY name_of_species ASC limit 10")
query_result[:total_rows].should == 3
query_result[:rows].map { |i| i[:name_of_species] }.should =~ ["Barrukia cristata", "Eulagisca gigantea", "cristata barrukia"]
end
it "should fail with error if table doesn't exist" do
reload_user_data(@user) && @user.reload
lambda {
@user.db_service.run_pg_query("select * from wadus")
}.should raise_error(CartoDB::TableNotExists)
end
it "should have a method that generates users redis users_metadata key" do
@user.key.should == "rails:users:#{@user.username}"
end
it "replicates some user metadata in redis after saving" do
@user.stubs(:database_name).returns('wadus')
@user.save
$users_metadata.HGET(@user.key, 'id').should == @user.id.to_s
$users_metadata.HGET(@user.key, 'database_name').should == 'wadus'
$users_metadata.HGET(@user.key, 'database_password').should == @user.database_password
$users_metadata.HGET(@user.key, 'database_host').should == @user.database_host
$users_metadata.HGET(@user.key, 'map_key').should == @user.api_key
end
it "should store its metadata automatically after creation" do
user = FactoryGirl.create :user
$users_metadata.HGET(user.key, 'id').should == user.id.to_s
$users_metadata.HGET(user.key, 'database_name').should == user.database_name
$users_metadata.HGET(user.key, 'database_password').should == user.database_password
$users_metadata.HGET(user.key, 'database_host').should == user.database_host
$users_metadata.HGET(user.key, 'map_key').should == user.api_key
user.destroy
end
it "should have a method that generates users redis limits metadata key" do
@user.timeout_key.should == "limits:timeout:#{@user.username}"
end
it "replicates db timeout limits in redis after saving and applies them to db" do
@user.user_timeout = 200007
@user.database_timeout = 100007
@user.save
$users_metadata.HGET(@user.timeout_key, 'db').should == '200007'
$users_metadata.HGET(@user.timeout_key, 'db_public').should == '100007'
@user.in_database do |db|
db[%{SHOW statement_timeout}].first.should eq({ statement_timeout: '200007ms' })
end
@user.in_database(as: :public_user) do |db|
db[%{SHOW statement_timeout}].first.should eq({ statement_timeout: '100007ms' })
end
end
it "replicates render timeout limits in redis after saving" do
@user.user_render_timeout = 200001
@user.database_render_timeout = 100001
@user.save
$users_metadata.HGET(@user.timeout_key, 'render').should == '200001'
$users_metadata.HGET(@user.timeout_key, 'render_public').should == '100001'
end
it "should store db timeout limits in redis after creation" do
user = FactoryGirl.create :user, user_timeout: 200002, database_timeout: 100002
user.user_timeout.should == 200002
user.database_timeout.should == 100002
$users_metadata.HGET(user.timeout_key, 'db').should == '200002'
$users_metadata.HGET(user.timeout_key, 'db_public').should == '100002'
user.in_database do |db|
db[%{SHOW statement_timeout}].first.should eq({ statement_timeout: '200002ms' })
end
user.in_database(as: :public_user) do |db|
db[%{SHOW statement_timeout}].first.should eq({ statement_timeout: '100002ms' })
end
user.destroy
end
it "should store render timeout limits in redis after creation" do
user = FactoryGirl.create :user, user_render_timeout: 200003, database_render_timeout: 100003
user.reload
user.user_render_timeout.should == 200003
user.database_render_timeout.should == 100003
$users_metadata.HGET(user.timeout_key, 'render').should == '200003'
$users_metadata.HGET(user.timeout_key, 'render_public').should == '100003'
user.destroy
end
it "should have valid non-zero db timeout limits by default" do
user = FactoryGirl.create :user
user.user_timeout.should > 0
user.database_timeout.should > 0
$users_metadata.HGET(user.timeout_key, 'db').should == user.user_timeout.to_s
$users_metadata.HGET(user.timeout_key, 'db_public').should == user.database_timeout.to_s
user.in_database do |db|
result = db[%{SELECT setting FROM pg_settings WHERE name = 'statement_timeout'}]
result.first.should eq(setting: user.user_timeout.to_s)
end
user.in_database(as: :public_user) do |db|
result = db[%{SELECT setting FROM pg_settings WHERE name = 'statement_timeout'}]
result.first.should eq(setting: user.database_timeout.to_s)
end
user.destroy
end
it "should have zero render timeout limits by default" do
user = FactoryGirl.create :user
user.user_render_timeout.should eq 0
user.database_render_timeout.should eq 0
$users_metadata.HGET(user.timeout_key, 'render').should eq '0'
$users_metadata.HGET(user.timeout_key, 'render_public').should eq '0'
user.destroy
end
it "should not regenerate the api_key after saving" do
expect { @user.save }.to_not change { @user.api_key }
end
it "should remove its metadata from redis after deletion" do
doomed_user = create_user :email => 'doomed@example.com', :username => 'doomed', :password => 'doomed123'
$users_metadata.HGET(doomed_user.key, 'id').should == doomed_user.id.to_s
$users_metadata.HGET(doomed_user.timeout_key, 'db').should_not be_nil
$users_metadata.HGET(doomed_user.timeout_key, 'db_public').should_not be_nil
key = doomed_user.key
timeout_key = doomed_user.timeout_key
doomed_user.destroy
$users_metadata.HGET(key, 'id').should be_nil
$users_metadata.HGET(timeout_key, 'db').should be_nil
$users_metadata.HGET(timeout_key, 'db_public').should be_nil
$users_metadata.HGET(timeout_key, 'render').should be_nil
$users_metadata.HGET(timeout_key, 'render_public').should be_nil
end
it "should remove its database and database user after deletion" do
doomed_user = create_user :email => 'doomed1@example.com', :username => 'doomed1', :password => 'doomed123'
create_table :user_id => doomed_user.id, :name => 'My first table', :privacy => UserTable::PRIVACY_PUBLIC
doomed_user.reload
SequelRails.connection["select count(*) from pg_catalog.pg_database where datname = '#{doomed_user.database_name}'"]
.first[:count].should == 1
SequelRails.connection["select count(*) from pg_catalog.pg_user where usename = '#{doomed_user.database_username}'"]
.first[:count].should == 1
doomed_user.destroy
SequelRails.connection["select count(*) from pg_catalog.pg_database where datname = '#{doomed_user.database_name}'"]
.first[:count].should == 0
SequelRails.connection["select count(*) from pg_catalog.pg_user where usename = '#{doomed_user.database_username}'"]
.first[:count].should == 0
end
it "should invalidate its Varnish cache after deletion" do
doomed_user = create_user :email => 'doomed2@example.com', :username => 'doomed2', :password => 'doomed123'
CartoDB::Varnish.any_instance.expects(:purge).with("#{doomed_user.database_name}.*").at_least(2).returns(true)
doomed_user.destroy
end
it "should remove its user tables, layers and data imports after deletion" do
doomed_user = create_user(email: 'doomed2@example.com', username: 'doomed2', password: 'doomed123')
data_import = DataImport.create(user_id: doomed_user.id, data_source: fake_data_path('clubbing.csv')).run_import!
doomed_user.add_layer Layer.create(kind: 'carto')
table_id = data_import.table_id
uuid = UserTable.where(id: table_id).first.table_visualization.id
CartoDB::Varnish.any_instance.expects(:purge)
.with("#{doomed_user.database_name}.*")
.at_least(1)
.returns(true)
CartoDB::Varnish.any_instance.expects(:purge)
.with(".*#{uuid}:vizjson")
.at_least_once
.returns(true)
doomed_user.destroy
DataImport.where(user_id: doomed_user.id).count.should == 0
UserTable.where(user_id: doomed_user.id).count.should == 0
Layer.db["SELECT * from layers_users WHERE user_id = '#{doomed_user.id}'"].count.should == 0
end
it "should correctly identify last billing cycle" do
user = create_user :email => 'example@example.com', :username => 'example', :password => 'testingbilling'
Delorean.time_travel_to(Date.parse("2013-01-01")) do
user.stubs(:period_end_date).returns(Date.parse("2012-12-15"))
user.last_billing_cycle.should == Date.parse("2012-12-15")
end
Delorean.time_travel_to(Date.parse("2013-01-01")) do
user.stubs(:period_end_date).returns(Date.parse("2012-12-02"))
user.last_billing_cycle.should == Date.parse("2012-12-02")
end
Delorean.time_travel_to(Date.parse("2013-03-01")) do
user.stubs(:period_end_date).returns(Date.parse("2012-12-31"))
user.last_billing_cycle.should == Date.parse("2013-02-28")
end
Delorean.time_travel_to(Date.parse("2013-03-15")) do
user.stubs(:period_end_date).returns(Date.parse("2012-12-02"))
user.last_billing_cycle.should == Date.parse("2013-03-02")
end
user.destroy
Delorean.back_to_the_present
end
it "should calculate the trial end date" do
@user.stubs(:upgraded_at).returns(nil)
@user.trial_ends_at.should be_nil
@user.stubs(:upgraded_at).returns(Time.now - 5.days)
@user.stubs(:account_type).returns('CORONELLI')
@user.trial_ends_at.should be_nil
@user.stubs(:account_type).returns('MAGELLAN')
@user.trial_ends_at.should_not be_nil
@user.stubs(:upgraded_at).returns(nil)
@user.trial_ends_at.should be_nil
@user.stubs(:upgraded_at).returns(Time.now - (::User::TRIAL_DURATION_DAYS - 1).days)
@user.trial_ends_at.should_not be_nil
end
describe '#hard_geocoding_limit?' do
it 'returns true when the plan is AMBASSADOR or FREE unless it has been manually set to false' do
@user[:soft_geocoding_limit].should be_nil
@user.stubs(:account_type).returns('AMBASSADOR')
@user.soft_geocoding_limit?.should be_false
@user.soft_geocoding_limit.should be_false
@user.hard_geocoding_limit?.should be_true
@user.hard_geocoding_limit.should be_true
@user.stubs(:account_type).returns('FREE')
@user.soft_geocoding_limit?.should be_false
@user.soft_geocoding_limit.should be_false
@user.hard_geocoding_limit?.should be_true
@user.hard_geocoding_limit.should be_true
@user.hard_geocoding_limit = false
@user[:soft_geocoding_limit].should_not be_nil
@user.stubs(:account_type).returns('AMBASSADOR')
@user.soft_geocoding_limit?.should be_true
@user.soft_geocoding_limit.should be_true
@user.hard_geocoding_limit?.should be_false
@user.hard_geocoding_limit.should be_false
@user.stubs(:account_type).returns('FREE')
@user.soft_geocoding_limit?.should be_true
@user.soft_geocoding_limit.should be_true
@user.hard_geocoding_limit?.should be_false
@user.hard_geocoding_limit.should be_false
end
it 'returns true when for enterprise accounts unless it has been manually set to false' do
['ENTERPRISE', 'ENTERPRISE LUMP-SUM', 'Enterprise Medium Lumpsum AWS'].each do |account_type|
@user.stubs(:account_type).returns(account_type)
@user.soft_geocoding_limit = nil
@user.soft_geocoding_limit?.should be_false
@user.soft_geocoding_limit.should be_false
@user.hard_geocoding_limit?.should be_true
@user.hard_geocoding_limit.should be_true
@user.soft_geocoding_limit = true
@user.soft_geocoding_limit?.should be_true
@user.soft_geocoding_limit.should be_true
@user.hard_geocoding_limit?.should be_false
@user.hard_geocoding_limit.should be_false
end
end
it 'returns false when the plan is CORONELLI or MERCATOR unless it has been manually set to true' do
@user.stubs(:account_type).returns('CORONELLI')
@user.hard_geocoding_limit?.should be_false
@user.stubs(:account_type).returns('MERCATOR')
@user.hard_geocoding_limit?.should be_false
@user.hard_geocoding_limit = true
@user.stubs(:account_type).returns('CORONELLI')
@user.hard_geocoding_limit?.should be_true
@user.stubs(:account_type).returns('MERCATOR')
@user.hard_geocoding_limit?.should be_true
end
end
describe '#hard_here_isolines_limit?' do
before(:each) do
@user_account = create_user
end
it 'returns true with every plan unless it has been manually set to false' do
@user_account[:soft_here_isolines_limit].should be_nil
@user_account.stubs(:account_type).returns('AMBASSADOR')
@user_account.soft_here_isolines_limit?.should be_false
@user_account.soft_here_isolines_limit.should be_false
@user_account.hard_here_isolines_limit?.should be_true
@user_account.hard_here_isolines_limit.should be_true
@user_account.stubs(:account_type).returns('FREE')
@user_account.soft_here_isolines_limit?.should be_false
@user_account.soft_here_isolines_limit.should be_false
@user_account.hard_here_isolines_limit?.should be_true
@user_account.hard_here_isolines_limit.should be_true
@user_account.hard_here_isolines_limit = false
@user_account[:soft_here_isolines_limit].should_not be_nil
@user_account.stubs(:account_type).returns('AMBASSADOR')
@user_account.soft_here_isolines_limit?.should be_true
@user_account.soft_here_isolines_limit.should be_true
@user_account.hard_here_isolines_limit?.should be_false
@user_account.hard_here_isolines_limit.should be_false
@user_account.stubs(:account_type).returns('FREE')
@user_account.soft_here_isolines_limit?.should be_true
@user_account.soft_here_isolines_limit.should be_true
@user_account.hard_here_isolines_limit?.should be_false
@user_account.hard_here_isolines_limit.should be_false
end
end
describe '#hard_obs_snapshot_limit?' do
before(:each) do
@user_account = create_user
end
it 'returns true with every plan unless it has been manually set to false' do
@user_account[:soft_obs_snapshot_limit].should be_nil
@user_account.stubs(:account_type).returns('AMBASSADOR')
@user_account.soft_obs_snapshot_limit?.should be_false
@user_account.soft_obs_snapshot_limit.should be_false
@user_account.hard_obs_snapshot_limit?.should be_true
@user_account.hard_obs_snapshot_limit.should be_true
@user_account.stubs(:account_type).returns('FREE')
@user_account.soft_obs_snapshot_limit?.should be_false
@user_account.soft_obs_snapshot_limit.should be_false
@user_account.hard_obs_snapshot_limit?.should be_true
@user_account.hard_obs_snapshot_limit.should be_true
@user_account.hard_obs_snapshot_limit = false
@user_account[:soft_obs_snapshot_limit].should_not be_nil
@user_account.stubs(:account_type).returns('AMBASSADOR')
@user_account.soft_obs_snapshot_limit?.should be_true
@user_account.soft_obs_snapshot_limit.should be_true
@user_account.hard_obs_snapshot_limit?.should be_false
@user_account.hard_obs_snapshot_limit.should be_false
@user_account.stubs(:account_type).returns('FREE')
@user_account.soft_obs_snapshot_limit?.should be_true
@user_account.soft_obs_snapshot_limit.should be_true
@user_account.hard_obs_snapshot_limit?.should be_false
@user_account.hard_obs_snapshot_limit.should be_false
end
end
describe '#hard_obs_general_limit?' do
before(:each) do
@user_account = create_user
end
it 'returns true with every plan unless it has been manually set to false' do
@user_account[:soft_obs_general_limit].should be_nil
@user_account.stubs(:account_type).returns('AMBASSADOR')
@user_account.soft_obs_general_limit?.should be_false
@user_account.soft_obs_general_limit.should be_false
@user_account.hard_obs_general_limit?.should be_true
@user_account.hard_obs_general_limit.should be_true
@user_account.stubs(:account_type).returns('FREE')
@user_account.soft_obs_general_limit?.should be_false
@user_account.soft_obs_general_limit.should be_false
@user_account.hard_obs_general_limit?.should be_true
@user_account.hard_obs_general_limit.should be_true
@user_account.hard_obs_general_limit = false
@user_account[:soft_obs_general_limit].should_not be_nil
@user_account.stubs(:account_type).returns('AMBASSADOR')
@user_account.soft_obs_general_limit?.should be_true
@user_account.soft_obs_general_limit.should be_true
@user_account.hard_obs_general_limit?.should be_false
@user_account.hard_obs_general_limit.should be_false
@user_account.stubs(:account_type).returns('FREE')
@user_account.soft_obs_general_limit?.should be_true
@user_account.soft_obs_general_limit.should be_true
@user_account.hard_obs_general_limit?.should be_false
@user_account.hard_obs_general_limit.should be_false
end
end
describe '#shared_tables' do
it 'Checks that shared tables include not only owned ones' do
require_relative '../../app/models/visualization/collection'
CartoDB::Varnish.any_instance.stubs(:send_command).returns(true)
bypass_named_maps
# No need to really touch the DB for the permissions
Table::any_instance.stubs(:add_read_permission).returns(nil)
# We're leaking tables from some tests, make sure there are no tables
@user.tables.all.each { |t| t.destroy }
@user2.tables.all.each { |t| t.destroy }
table = Table.new
table.user_id = @user.id
table.save.reload
table2 = Table.new
table2.user_id = @user.id
table2.save.reload
table3 = Table.new
table3.user_id = @user2.id
table3.name = 'sharedtable'
table3.save.reload
table4 = Table.new
table4.user_id = @user2.id
table4.name = 'table4'
table4.save.reload
# Only owned tables
user_tables = tables_including_shared(@user)
user_tables.count.should eq 2
# Grant permission
user2_vis = CartoDB::Visualization::Collection.new.fetch(user_id: @user2.id, name: table3.name).first
permission = user2_vis.permission
permission.acl = [
{
type: CartoDB::Permission::TYPE_USER,
entity: {
id: @user.id,
username: @user.username
},
access: CartoDB::Permission::ACCESS_READONLY
}
]
permission.save
# Now owned + shared...
user_tables = tables_including_shared(@user)
user_tables.count.should eq 3
contains_shared_table = false
user_tables.each{ |item|
contains_shared_table ||= item.id == table3.id
}
contains_shared_table.should eq true
contains_shared_table = false
user_tables.each{ |item|
contains_shared_table ||= item.id == table4.id
}
contains_shared_table.should eq false
@user.tables.all.each { |t| t.destroy }
@user2.tables.all.each { |t| t.destroy }
end
end
describe '#destroy' do
it 'deletes database role' do
u1 = create_user(email: 'ddr@example.com', username: 'ddr', password: 'admin123')
role = u1.database_username
db = u1.in_database
db_service = u1.db_service
db_service.role_exists?(db, role).should == true
u1.destroy
expect do
db_service.role_exists?(db, role).should == false
end.to raise_error(/role "#{role}" does not exist/)
db.disconnect
end
it 'deletes api keys' do
user = create_user(email: 'ddr@example.com', username: 'ddr', password: 'admin123')
api_key = FactoryGirl.create(:api_key_apis, user_id: user.id)
user.destroy
expect(Carto::ApiKey.exists?(api_key.id)).to be_false
expect($users_metadata.exists(api_key.send(:redis_key))).to be_false
end
describe "on organizations" do
include_context 'organization with users helper'
it 'deletes database role' do
role = @org_user_1.database_username
db = @org_user_1.in_database
db_service = @org_user_1.db_service
db_service.role_exists?(db, role).should == true
@org_user_1.destroy
expect do
db_service.role_exists?(db, role).should == false
end.to raise_error(/role "#{role}" does not exist/)
db.disconnect
end
it 'deletes temporary analysis tables' do
db = @org_user_2.in_database
db.run('CREATE TABLE analysis_cd60938c7b_2ad1345b134ed3cd363c6de651283be9bd65094e (a int)')
db.run(%{INSERT INTO cdb_analysis_catalog (username, cache_tables, node_id, analysis_def)
VALUES ('#{@org_user_2.username}', '{analysis_cd60938c7b_2ad1345b134ed3cd363c6de651283be9bd65094e}', 'a0', '{}')})
@org_user_2.destroy
db = @org_user_owner.in_database
db["SELECT COUNT(*) FROM cdb_analysis_catalog WHERE username='#{@org_user_2.username}'"].first[:count].should eq 0
end
describe 'User#destroy' do
include TableSharing
it 'blocks deletion with shared entities' do
@not_to_be_deleted = TestUserFactory.new.create_test_user(unique_name('user'), @organization)
table = create_random_table(@not_to_be_deleted)
share_table_with_user(table, @org_user_owner)
expect { @not_to_be_deleted.destroy }.to raise_error(/Cannot delete user, has shared entities/)
::User[@not_to_be_deleted.id].should be
end
it 'deletes api keys and associated roles' do
user = TestUserFactory.new.create_test_user(unique_name('user'), @organization)
api_key = FactoryGirl.create(:api_key_apis, user_id: user.id)
user.destroy
expect(Carto::ApiKey.exists?(api_key.id)).to be_false
expect($users_metadata.exists(api_key.send(:redis_key))).to be_false
expect(
@org_user_owner.in_database["SELECT 1 FROM pg_roles WHERE rolname = '#{api_key.db_role}'"].first
).to be_nil
end
it 'deletes client_application and friends' do
user = create_user(email: 'clientapp@example.com', username: 'clientapp', password: @user_password)
user.create_client_application
user.client_application.access_tokens << ::AccessToken.new(
token: "access_token",
secret: "access_secret",
callback_url: "http://callback2",
verifier: "v2",
scope: nil,
client_application_id: user.client_application.id
).save
user.client_application.oauth_tokens << ::OauthToken.new(
token: "oauth_token",
secret: "oauth_secret",
callback_url: "http//callback.com",
verifier: "v1",
scope: nil,
client_application_id: user.client_application.id
).save
base_key = "rails:oauth_access_tokens:#{user.client_application.access_tokens.first.token}"
client_application = ClientApplication.where(user_id: user.id).first
expect(ClientApplication.where(user_id: user.id).count).to eq 2
expect(client_application.tokens).to_not be_empty
expect(client_application.tokens.length).to eq 2
$api_credentials.keys.should include(base_key)
user.destroy
expect(ClientApplication.where(user_id: user.id).first).to be_nil
expect(AccessToken.where(user_id: user.id).first).to be_nil
expect(OauthToken.where(user_id: user.id).first).to be_nil
$api_credentials.keys.should_not include(base_key)
end
end
end
end
describe 'User#destroy_cascade' do
include_context 'organization with users helper'
include TableSharing
it 'allows deletion even with shared entities' do
table = create_random_table(@org_user_1)
share_table_with_user(table, @org_user_1)
@org_user_1.destroy_cascade
::User[@org_user_1.id].should_not be
end
end
describe '#destroy_restrictions' do
it 'Checks some scenarios upon user destruction regarding organizations' do
u1 = create_user(email: 'u1@example.com', username: 'u1', password: 'admin123')
u2 = create_user(email: 'u2@example.com', username: 'u2', password: 'admin123')
org = create_org('cartodb', 1234567890, 5)
u1.organization = org
u1.save
u1.reload
u1.organization.nil?.should eq false
org = u1.organization
org.owner_id = u1.id
org.save
u1.reload
u1.organization.owner.id.should eq u1.id
u2.organization = org
u2.save
u2.reload
u2.organization.nil?.should eq false
u2.reload
# Cannot remove as more users depend on the org
expect {
u1.destroy
}.to raise_exception CartoDB::BaseCartoDBError
org.destroy
end
end
describe '#cartodb_postgresql_extension_versioning' do
it 'should report pre multi user for known <0.3.0 versions' do
before_mu_known_versions = %w(0.1.0 0.1.1 0.2.0 0.2.1)
before_mu_known_versions.each { |version|
stub_and_check_version_pre_mu(version, true)
}
end
it 'should report post multi user for >=0.3.0 versions' do
after_mu_known_versions = %w(0.3.0 0.3.1 0.3.2 0.3.3 0.3.4 0.3.5 0.4.0 0.5.5 0.10.0)
after_mu_known_versions.each { |version|
stub_and_check_version_pre_mu(version, false)
}
end
it 'should report post multi user for versions with minor<3 but major>0' do
minor_version_edge_cases = %w(1.0.0 1.0.1 1.2.0 1.2.1 1.3.0 1.4.4)
minor_version_edge_cases.each { |version|
stub_and_check_version_pre_mu(version, false)
}
end
it 'should report correct version with old version strings' do
before_mu_old_known_versions = [
'0.1.0 0.1.0',
'0.1.1 0.1.1',
'0.2.0 0.2.0',
'0.2.1 0.2.1'
]
before_mu_old_known_versions.each { |version|
stub_and_check_version_pre_mu(version, true)
}
end
it 'should report correct version with old version strings' do
after_mu_old_known_versions = [
'0.3.0 0.3.0',
'0.3.1 0.3.1',
'0.3.2 0.3.2',
'0.3.3 0.3.3',
'0.3.4 0.3.4',
'0.3.5 0.3.5',
'0.4.0 0.4.0',
'0.5.5 0.5.5',
'0.10.0 0.10.0'
]
after_mu_old_known_versions.each { |version|
stub_and_check_version_pre_mu(version, false)
}
end
it 'should report correct version with `git describe` not being a tag' do
stub_and_check_version_pre_mu('0.2.1 0.2.0-8-g7840e7c', true)
after_mu_old_known_versions = [
'0.3.6 0.3.5-8-g7840e7c',
'0.4.0 0.3.6-8-g7840e7c'
]
after_mu_old_known_versions.each { |version|
stub_and_check_version_pre_mu(version, false)
}
end
def stub_and_check_version_pre_mu(version, is_pre_mu)
@user.db_service.stubs(:cartodb_extension_version).returns(version)
@user.db_service.cartodb_extension_version_pre_mu?.should eq is_pre_mu
end
end
# INFO: since user can be also created in Central, and it can fail, we need to request notification explicitly. See #3022 for more info
it "can notify a new user creation" do
::Resque.stubs(:enqueue).returns(nil)
@account_type_org = create_account_type_fg('ORGANIZATION USER')
organization = create_organization_with_owner(quota_in_bytes: 1000.megabytes)
user1 = new_user(username: 'test',
email: "client@example.com",
organization: organization,
organization_id: organization.id,
quota_in_bytes: 20.megabytes,
account_type: 'ORGANIZATION USER')
user1.id = UUIDTools::UUID.timestamp_create.to_s
::Resque.expects(:enqueue).with(::Resque::UserJobs::Mail::NewOrganizationUser, user1.id).once
user1.save
# INFO: if user must be synched with a remote server it should happen before notifying
user1.notify_new_organization_user
organization.destroy
end
it "Tests password change" do
new_valid_password = '000123456'
old_crypted_password = @user.crypted_password
@user.change_password('aaabbb', new_valid_password, new_valid_password)
@user.valid?.should eq false
@user.errors.fetch(:old_password).nil?.should eq false
expect {
@user.save(raise_on_failure: true)
}.to raise_exception(Sequel::ValidationFailed, "old_password Old password not valid") # "to_s" of validation msg
@user.change_password(@user_password, 'aaabbb', 'bbbaaa')
@user.valid?.should eq false
@user.errors.fetch(:new_password).nil?.should eq false
expect {
@user.save(raise_on_failure: true)
}.to raise_exception(Sequel::ValidationFailed, "new_password New password doesn't match confirmation")
@user.change_password('aaaaaa', 'aaabbb', 'bbbaaa')
@user.valid?.should eq false
@user.errors.fetch(:old_password).nil?.should eq false
@user.errors.fetch(:new_password).nil?.should eq false
expect {
@user.save(raise_on_failure: true)
}.to raise_exception(Sequel::ValidationFailed, "old_password Old password not valid, new_password New password doesn't match confirmation")
@user.change_password(@user_password, 'tiny', 'tiny')
@user.valid?.should eq false
@user.errors.fetch(:new_password).nil?.should eq false
expect {
@user.save(raise_on_failure: true)
}.to raise_exception(Sequel::ValidationFailed, "new_password Must be at least 6 characters long")
long_password = 'long' * 20
@user.change_password(@user_password, long_password, long_password)
@user.valid?.should eq false
@user.errors.fetch(:new_password).nil?.should eq false
expect {
@user.save(raise_on_failure: true)
}.to raise_exception(Sequel::ValidationFailed, "new_password Must be at most 64 characters long")
@user.change_password('aaaaaa', nil, nil)
@user.valid?.should eq false
@user.errors.fetch(:old_password).nil?.should eq false
expect {
@user.save(raise_on_failure: true)
}.to raise_exception(Sequel::ValidationFailed, "old_password Old password not valid, new_password New password can't be blank")
@user.change_password(@user_password, nil, nil)
@user.valid?.should eq false
@user.errors.fetch(:new_password).nil?.should eq false
expect {
@user.save(raise_on_failure: true)
}.to raise_exception(Sequel::ValidationFailed, "new_password New password can't be blank")
@user.change_password(nil, nil, nil)
@user.valid?.should eq false
@user.errors.fetch(:old_password).nil?.should eq false
expect {
@user.save(raise_on_failure: true)
}.to raise_exception(Sequel::ValidationFailed, "old_password Old password not valid, new_password New password can't be blank")
@user.change_password(nil, new_valid_password, new_valid_password)
@user.valid?.should eq false
@user.errors.fetch(:old_password).nil?.should eq false
expect {
@user.save(raise_on_failure: true)
}.to raise_exception(Sequel::ValidationFailed, "old_password Old password not valid")
@user.change_password(@user_password, new_valid_password, new_valid_password)
@user.valid?.should eq true
@user.save
new_crypted_password = @user.crypted_password
(old_crypted_password != new_crypted_password).should eq true
@user.change_password(new_valid_password, @user_password, @user_password)
@user.valid?.should eq true
@user.save
@user.crypted_password.should eq old_crypted_password
last_password_change_date = @user.last_password_change_date
@user.change_password(@user_password, @user_password, @user_password)
@user.save
@user.last_password_change_date.should eq last_password_change_date
end
describe "when user is signed up with google sign-in and don't have any password yet" do
before(:each) do
@user.google_sign_in = true
@user.last_password_change_date = nil
@user.save
@user.needs_password_confirmation?.should == false
new_valid_password = '000123456'
@user.change_password("doesn't matter in this case", new_valid_password, new_valid_password)
@user.needs_password_confirmation?.should == true
end
it 'should allow updating password w/o a current password' do
@user.valid?.should eq true
@user.save
end
it 'should have updated last password change date' do
@user.last_password_change_date.should_not eq nil
@user.save
end
end
describe "#purge_redis_vizjson_cache" do
it "shall iterate on the user's visualizations and purge their redis cache" do
# Create a few tables with their default vizs
(1..3).each do |i|
t = Table.new
t.user_id = @user.id
t.save
end
collection = CartoDB::Visualization::Collection.new.fetch({user_id: @user.id})
redis_spy = RedisDoubles::RedisSpy.new
redis_vizjson_cache = CartoDB::Visualization::RedisVizjsonCache.new()
redis_embed_cache = EmbedRedisCache.new()
CartoDB::Visualization::RedisVizjsonCache.any_instance.stubs(:redis).returns(redis_spy)
EmbedRedisCache.any_instance.stubs(:redis).returns(redis_spy)
redis_vizjson_keys = collection.map { |v|
[
redis_vizjson_cache.key(v.id, false), redis_vizjson_cache.key(v.id, true),
redis_vizjson_cache.key(v.id, false, 3), redis_vizjson_cache.key(v.id, true, 3),
redis_vizjson_cache.key(v.id, false, '3n'), redis_vizjson_cache.key(v.id, true, '3n'),
redis_vizjson_cache.key(v.id, false, '3a'), redis_vizjson_cache.key(v.id, true, '3a'),
]
}.flatten
redis_vizjson_keys.should_not be_empty
redis_embed_keys = collection.map { |v|
[redis_embed_cache.key(v.id, false), redis_embed_cache.key(v.id, true)]
}.flatten
redis_embed_keys.should_not be_empty
@user.purge_redis_vizjson_cache
redis_spy.deleted.should include(*redis_vizjson_keys)
redis_spy.deleted.should include(*redis_embed_keys)
redis_spy.deleted.count.should eq redis_vizjson_keys.count + redis_embed_keys.count
redis_spy.invokes(:del).count.should eq 2
redis_spy.invokes(:del).map(&:sort).should include(redis_vizjson_keys.sort)
redis_spy.invokes(:del).map(&:sort).should include(redis_embed_keys.sort)
end
it "shall not fail if the user does not have visualizations" do
user = create_user
collection = CartoDB::Visualization::Collection.new.fetch({user_id: user.id})
# 'http' keys
redis_keys = collection.map(&:redis_vizjson_key)
redis_keys.should be_empty
# 'https' keys
redis_keys = collection.map { |item| item.redis_vizjson_key(true) }
redis_keys.should be_empty
CartoDB::Visualization::Member.expects(:redis_cache).never
user.purge_redis_vizjson_cache
user.destroy
end
end
describe "#regressions" do
it "Tests geocodings and data import FK not breaking user destruction" do
user = create_user
user_id = user.id
data_import_id = '11111111-1111-1111-1111-111111111111'
SequelRails.connection.run(%Q{
INSERT INTO data_imports("data_source","data_type","table_name","state","success","logger","updated_at",
"created_at","tables_created_count",
"table_names","append","id","table_id","user_id",
"service_name","service_item_id","stats","type_guessing","quoted_fields_guessing","content_guessing","server","host",
"resque_ppid","upload_host","create_visualization","user_defined_limits")
VALUES('test','url','test','complete','t','11111111-1111-1111-1111-111111111112',
'2015-03-17 00:00:00.94006+00','2015-03-17 00:00:00.810581+00','1',
'test','f','#{data_import_id}','11111111-1111-1111-1111-111111111113',
'#{user_id}','public_url', 'test',
'[{"type":".csv","size":5015}]','t','f','t','test','0.0.0.0','13204','test','f','{"twitter_credits_limit":0}');
})
SequelRails.connection.run(%Q{
INSERT INTO geocodings("table_name","processed_rows","created_at","updated_at","formatter","state",
"id","user_id",
"cache_hits","kind","geometry_type","processable_rows","real_rows","used_credits",
"data_import_id"
) VALUES('importer_123456','197','2015-03-17 00:00:00.279934+00','2015-03-17 00:00:00.536383+00','field_1','finished',
'11111111-1111-1111-1111-111111111114','#{user_id}','0','admin0','polygon','195','0','0',
'#{data_import_id}');
})
user.destroy
::User.find(id:user_id).should eq nil
end
end
describe '#needs_password_confirmation?' do
it 'is true for a normal user' do
user = FactoryGirl.build(:carto_user, :google_sign_in => nil)
user.needs_password_confirmation?.should == true
user = FactoryGirl.build(:user, :google_sign_in => false)
user.needs_password_confirmation?.should == true
end
it 'is false for users that signed in with Google' do
user = FactoryGirl.build(:user, :google_sign_in => true)
user.needs_password_confirmation?.should == false
end
it 'is true for users that signed in with Google but changed the password' do
user = FactoryGirl.build(:user, :google_sign_in => true, :last_password_change_date => Time.now)
user.needs_password_confirmation?.should == true
end
it 'is false for users that were created with http authentication' do
user = FactoryGirl.build(:valid_user, last_password_change_date: nil)
Carto::UserCreation.stubs(:http_authentication).returns(stub(find_by_user_id: FactoryGirl.build(:user_creation)))
user.needs_password_confirmation?.should == false
end
end
describe 'User creation and DB critical calls' do
it 'Properly setups a new user (not belonging to an organization)' do
CartoDB::UserModule::DBService.any_instance.stubs(
cartodb_extension_version_pre_mu?: nil,
monitor_user_notification: nil,
enable_remote_db_user: nil
)
user_timeout_secs = 666
user = ::User.new
user.username = unique_name('user')
user.email = unique_email
user.password = user.email.split('@').first
user.password_confirmation = user.password
user.admin = false
user.private_tables_enabled = true
user.private_maps_enabled = true
user.enabled = true
user.table_quota = 500
user.quota_in_bytes = 1234567890
user.user_timeout = user_timeout_secs * 1000
user.database_timeout = 123000
user.geocoding_quota = 1000
user.geocoding_block_price = 1500
user.sync_tables_enabled = false
user.organization = nil
user.twitter_datasource_enabled = false
user.avatar_url = user.default_avatar
user.valid?.should == true
user.save
user.nil?.should == false
# To avoid connection pool caching
CartoDB::UserModule::DBService.terminate_database_connections(user.database_name, user.database_host)
user.reload
# Just to be sure all following checks will not falsely report ok using wrong schema
user.database_schema.should eq CartoDB::UserModule::DBService::SCHEMA_PUBLIC
user.database_schema.should_not eq user.username
test_table_name = "table_perm_test"
# Safety check
user.in_database.fetch(%{
SELECT * FROM pg_extension WHERE extname='postgis';
}).first.nil?.should == false
# Replicate functionality inside ::UserModule::DBService.configure_database
# -------------------------------------------------------------------
user.in_database.fetch(%{
SHOW search_path;
}).first[:search_path].should == user.db_service.build_search_path(user.database_schema, false)
# @see http://www.postgresql.org/docs/current/static/functions-info.html#FUNCTIONS-INFO-ACCESS-TABLE
user.in_database(as: :superuser).fetch(%{
SELECT * FROM has_database_privilege('#{user.database_username}', '#{user.database_name}', 'CONNECT');
}).first[:has_database_privilege].should == true
# Careful as PG formatter timeout output changes to XXmin if too big
user.in_database.fetch(%{
SHOW statement_timeout;
}).first[:statement_timeout].should eq "#{user_timeout_secs}s"
# No check for `set_user_as_organization_member` as cartodb-postgresql already tests it
# Checks for "grant_read_on_schema_queries(SCHEMA_CARTODB, db_user)"
user.in_database(as: :superuser).fetch(%{
SELECT * FROM has_schema_privilege('#{user.database_username}',
'#{CartoDB::UserModule::DBService::SCHEMA_CARTODB}', 'USAGE');
}).first[:has_schema_privilege].should == true
user.in_database(as: :superuser).fetch(%{
SELECT * FROM has_function_privilege('#{user.database_username}',
'#{user.database_schema}._cdb_userquotainbytes()', 'EXECUTE');
}).first[:has_function_privilege].should == true
# SCHEMA_CARTODB has no tables to select from, except CDB_CONF on which has no permission
user.in_database(as: :superuser).fetch(%{
SELECT * FROM has_table_privilege('#{user.database_username}',
'cartodb.CDB_CONF',
'SELECT, INSERT, UPDATE, DELETE, TRUNCATE, REFERENCES, TRIGGER');
}).first[:has_table_privilege].should == false
# Checks on SCHEMA_PUBLIC
user.in_database(as: :superuser).fetch(%{
SELECT * FROM has_schema_privilege('#{user.database_username}',
'#{CartoDB::UserModule::DBService::SCHEMA_PUBLIC}', 'USAGE');
}).first[:has_schema_privilege].should == true
user.in_database(as: :superuser).fetch(%{
SELECT * FROM has_table_privilege('#{user.database_username}',
'#{CartoDB::UserModule::DBService::SCHEMA_PUBLIC}.spatial_ref_sys', 'SELECT');
}).first[:has_table_privilege].should == true
user.in_database(as: :superuser).fetch(%{
SELECT * FROM has_function_privilege('#{user.database_username}',
'#{CartoDB::UserModule::DBService::SCHEMA_PUBLIC}._postgis_stats(regclass, text, text)',
'EXECUTE');
}).first[:has_function_privilege].should == true
# Checks on own schema
user.in_database(as: :superuser).fetch(%{
SELECT * FROM has_schema_privilege('#{user.database_username}',
'#{user.database_schema}', 'CREATE, USAGE');
}).first[:has_schema_privilege].should == true
user.in_database.run(%{
CREATE TABLE #{test_table_name}(x int);
})
user.in_database(as: :superuser).fetch(%{
SELECT * FROM has_table_privilege('#{user.database_username}',
'#{user.database_schema}.#{test_table_name}', 'SELECT');
}).first[:has_table_privilege].should == true
# _cdb_userquotainbytes is always created on the user schema
user.in_database(as: :superuser).fetch(%{
SELECT * FROM has_function_privilege('#{user.database_username}',
'#{user.database_schema}._cdb_userquotainbytes()', 'EXECUTE');
}).first[:has_function_privilege].should == true
# Checks on non-org "owned" schemas
user.in_database(as: :superuser).fetch(%{
SELECT * FROM has_schema_privilege('#{user.database_username}',
'#{CartoDB::UserModule::DBService::SCHEMA_IMPORTER}', 'CREATE, USAGE');
}).first[:has_schema_privilege].should == true
user.in_database(as: :superuser).fetch(%{
SELECT * FROM has_schema_privilege('#{user.database_username}',
'#{CartoDB::UserModule::DBService::SCHEMA_GEOCODING}', 'CREATE, USAGE');
}).first[:has_schema_privilege].should == true
# Special raster and geo columns
user.in_database(as: :superuser).fetch(%{
SELECT * FROM has_table_privilege('#{user.database_username}',
'#{CartoDB::UserModule::DBService::SCHEMA_PUBLIC}.geometry_columns', 'SELECT');
}).first[:has_table_privilege].should == true
user.in_database(as: :superuser).fetch(%{
SELECT * FROM has_table_privilege('#{user.database_username}',
'#{CartoDB::UserModule::DBService::SCHEMA_PUBLIC}.geography_columns', 'SELECT');
}).first[:has_table_privilege].should == true
user.in_database(as: :superuser).fetch(%{
SELECT * FROM has_table_privilege('#{CartoDB::PUBLIC_DB_USER}',
'#{CartoDB::UserModule::DBService::SCHEMA_PUBLIC}.raster_overviews', 'SELECT');
}).first[:has_table_privilege].should == true
user.in_database(as: :superuser).fetch(%{
SELECT * FROM has_table_privilege('#{CartoDB::PUBLIC_DB_USER}',
'#{CartoDB::UserModule::DBService::SCHEMA_PUBLIC}.raster_columns', 'SELECT');
}).first[:has_table_privilege].should == true
# quota check
user.in_database(as: :superuser).fetch(%{
SELECT #{user.database_schema}._CDB_UserQuotaInBytes();
}).first[:_cdb_userquotainbytes].nil?.should == false
# Varnish invalidation function
user.in_database(as: :superuser).fetch(%{
SELECT * FROM has_function_privilege(
'#{user.database_username}',
'#{CartoDB::UserModule::DBService::SCHEMA_PUBLIC}.cdb_invalidate_varnish(text)', 'EXECUTE');
}).first[:has_function_privilege].should == true
# Checks of publicuser
user.in_database(as: :superuser).fetch(%{
SELECT * FROM has_schema_privilege('#{CartoDB::PUBLIC_DB_USER}',
'#{user.database_schema}', 'USAGE');
}).first[:has_schema_privilege].should == true
user.in_database(as: :superuser).fetch(%{
SELECT * FROM has_database_privilege('#{CartoDB::PUBLIC_DB_USER}',
'#{user.database_name}', 'CONNECT');
}).first[:has_database_privilege].should == true
user.in_database(as: :superuser).fetch(%{
SELECT * FROM has_schema_privilege('#{CartoDB::PUBLIC_DB_USER}',
'#{CartoDB::UserModule::DBService::SCHEMA_CARTODB}', 'USAGE');
}).first[:has_schema_privilege].should == true
user.in_database(as: :superuser).fetch(%{
SELECT * FROM has_function_privilege(
'#{CartoDB::PUBLIC_DB_USER}',
'#{CartoDB::UserModule::DBService::SCHEMA_CARTODB}.CDB_LatLng (NUMERIC, NUMERIC)',
'EXECUTE');
}).first[:has_function_privilege].should == true
user.in_database(as: :superuser).fetch(%{
SELECT * FROM has_table_privilege('#{CartoDB::PUBLIC_DB_USER}',
'#{CartoDB::UserModule::DBService::SCHEMA_CARTODB}.CDB_CONF',
'SELECT, INSERT, UPDATE, DELETE, TRUNCATE, REFERENCES, TRIGGER');
}).first[:has_table_privilege].should == false
# Additional public user grants/revokes
user.in_database(as: :superuser).fetch(%{
SELECT * FROM has_table_privilege('#{CartoDB::PUBLIC_DB_USER}',
'#{CartoDB::UserModule::DBService::SCHEMA_CARTODB}.cdb_tablemetadata',
'SELECT');
}).first[:has_table_privilege].should == false
user.in_database(as: :superuser).fetch(%{
SELECT * FROM has_schema_privilege('#{CartoDB::PUBLIC_DB_USER}',
'#{CartoDB::UserModule::DBService::SCHEMA_PUBLIC}', 'USAGE');
}).first[:has_schema_privilege].should == true
user.in_database(as: :superuser).fetch(%{
SELECT * FROM has_function_privilege(
'#{CartoDB::PUBLIC_DB_USER}',
'#{CartoDB::UserModule::DBService::SCHEMA_PUBLIC}._postgis_stats(regclass, text, text)',
'EXECUTE');
}).first[:has_function_privilege].should == true
user.in_database(as: :superuser).fetch(%{
SELECT * FROM has_table_privilege('#{CartoDB::PUBLIC_DB_USER}',
'#{CartoDB::UserModule::DBService::SCHEMA_PUBLIC}.spatial_ref_sys', 'SELECT');
}).first[:has_table_privilege].should == true
user.destroy
end
it 'Properly setups a new organization user' do
CartoDB::UserModule::DBService.any_instance.stubs(
cartodb_extension_version_pre_mu?: nil,
monitor_user_notification: nil,
enable_remote_db_user: nil
)
disk_quota = 1234567890
user_timeout_secs = 666
max_import_file_size = 6666666666
max_import_table_row_count = 55555555
max_concurrent_import_count = 44
max_layers = 11
# create an owner
organization = create_org('org-user-creation-db-checks-organization', disk_quota * 10, 10)
user1 = create_user email: 'user1@whatever.com', username: 'creation-db-checks-org-owner', password: 'user11'
user1.organization = organization
user1.max_import_file_size = max_import_file_size
user1.max_import_table_row_count = max_import_table_row_count
user1.max_concurrent_import_count = max_concurrent_import_count
user1.max_layers = 11
user1.save
organization.owner_id = user1.id
organization.save
organization.reload
user1.reload
user = ::User.new
user.username = unique_name('user')
user.email = unique_email
user.password = user.email.split('@').first
user.password_confirmation = user.password
user.admin = false
user.private_tables_enabled = true
user.private_maps_enabled = true
user.enabled = true
user.table_quota = 500
user.quota_in_bytes = disk_quota
user.user_timeout = user_timeout_secs * 1000
user.database_timeout = 123000
user.geocoding_quota = 1000
user.geocoding_block_price = 1500
user.sync_tables_enabled = false
user.organization = organization
user.twitter_datasource_enabled = false
user.avatar_url = user.default_avatar
user.valid?.should == true
user.save
user.nil?.should == false
# To avoid connection pool caching
CartoDB::UserModule::DBService.terminate_database_connections(user.database_name, user.database_host)
user.reload
user.max_import_file_size.should eq max_import_file_size
user.max_import_table_row_count.should eq max_import_table_row_count
user.max_concurrent_import_count.should eq max_concurrent_import_count
user.max_layers.should eq max_layers
# Just to be sure all following checks will not falsely report ok using wrong schema
user.database_schema.should_not eq CartoDB::UserModule::DBService::SCHEMA_PUBLIC
user.database_schema.should eq user.username
test_table_name = "table_perm_test"
# Safety check
user.in_database.fetch(%{
SELECT * FROM pg_extension WHERE extname='postgis';
}).first.nil?.should == false
# Replicate functionality inside ::UserModule::DBService.configure_database
# -------------------------------------------------------------------
user.in_database.fetch(%{
SHOW search_path;
}).first[:search_path].should == user.db_service.build_search_path(user.database_schema, false)
# @see http://www.postgresql.org/docs/current/static/functions-info.html#FUNCTIONS-INFO-ACCESS-TABLE
user.in_database(as: :superuser).fetch(%{
SELECT * FROM has_database_privilege('#{user.database_username}', '#{user.database_name}', 'CONNECT');
}).first[:has_database_privilege].should == true
# Careful as PG formatter timeout output changes to XXmin if too big
user.in_database.fetch(%{
SHOW statement_timeout;
}).first[:statement_timeout].should eq "#{user_timeout_secs}s"
# No check for `set_user_as_organization_member` as cartodb-postgresql already tests it
user.in_database(as: :superuser).fetch(%{
SELECT * FROM has_function_privilege('#{user.database_username}',
'#{user.database_schema}._cdb_userquotainbytes()', 'EXECUTE');
}).first[:has_function_privilege].should == true
# SCHEMA_CARTODB has no tables to select from, except CDB_CONF on which has no permission
user.in_database(as: :superuser).fetch(%{
SELECT * FROM has_table_privilege('#{user.database_username}',
'cartodb.CDB_CONF',
'SELECT, INSERT, UPDATE, DELETE, TRUNCATE, REFERENCES, TRIGGER');
}).first[:has_table_privilege].should == false
# Checks on SCHEMA_PUBLIC
user.in_database(as: :superuser).fetch(%{
SELECT * FROM has_schema_privilege('#{user.database_username}',
'#{CartoDB::UserModule::DBService::SCHEMA_PUBLIC}', 'USAGE');
}).first[:has_schema_privilege].should == true
user.in_database(as: :superuser).fetch(%{
SELECT * FROM has_table_privilege('#{user.database_username}',
'#{CartoDB::UserModule::DBService::SCHEMA_PUBLIC}.spatial_ref_sys', 'SELECT');
}).first[:has_table_privilege].should == true
user.in_database(as: :superuser).fetch(%{
SELECT * FROM has_function_privilege('#{user.database_username}',
'#{CartoDB::UserModule::DBService::SCHEMA_PUBLIC}._postgis_stats(regclass, text, text)',
'EXECUTE');
}).first[:has_function_privilege].should == true
# Checks on own schema
user.in_database(as: :superuser).fetch(%{
SELECT * FROM has_schema_privilege('#{user.database_username}',
'#{user.database_schema}', 'CREATE, USAGE');
}).first[:has_schema_privilege].should == true
user.in_database.run(%{
CREATE TABLE #{test_table_name}(x int);
})
user.in_database(as: :superuser).fetch(%{
SELECT * FROM has_table_privilege('#{user.database_username}',
'#{user.database_schema}.#{test_table_name}', 'SELECT');
}).first[:has_table_privilege].should == true
# _cdb_userquotainbytes is always created on the user schema
user.in_database(as: :superuser).fetch(%{
SELECT * FROM has_function_privilege('#{user.database_username}',
'#{user.database_schema}._cdb_userquotainbytes()', 'EXECUTE');
}).first[:has_function_privilege].should == true
# quota check
user.in_database(as: :superuser).fetch(%{
SELECT #{user.database_schema}._CDB_UserQuotaInBytes();
}).first[:_cdb_userquotainbytes].nil?.should == false
# Varnish invalidation function
user.in_database(as: :superuser).fetch(%{
SELECT * FROM has_function_privilege(
'#{user.database_username}',
'#{CartoDB::UserModule::DBService::SCHEMA_PUBLIC}.cdb_invalidate_varnish(text)', 'EXECUTE');
}).first[:has_function_privilege].should == true
# Checks of publicuser
user.in_database(as: :superuser).fetch(%{
SELECT * FROM has_database_privilege('#{CartoDB::PUBLIC_DB_USER}',
'#{user.database_name}', 'CONNECT');
}).first[:has_database_privilege].should == true
user.in_database(as: :superuser).fetch(%{
SELECT * FROM has_schema_privilege('#{CartoDB::PUBLIC_DB_USER}',
'#{user.database_schema}', 'USAGE');
}).first[:has_schema_privilege].should == true
user.in_database(as: :superuser).fetch(%{
SELECT * FROM has_schema_privilege('#{CartoDB::PUBLIC_DB_USER}',
'#{CartoDB::UserModule::DBService::SCHEMA_CARTODB}', 'USAGE');
}).first[:has_schema_privilege].should == true
user.in_database(as: :superuser).fetch(%{
SELECT * FROM has_function_privilege(
'#{CartoDB::PUBLIC_DB_USER}',
'#{CartoDB::UserModule::DBService::SCHEMA_CARTODB}.CDB_LatLng (NUMERIC, NUMERIC)',
'EXECUTE');
}).first[:has_function_privilege].should == true
user.in_database(as: :superuser).fetch(%{
SELECT * FROM has_table_privilege('#{CartoDB::PUBLIC_DB_USER}',
'#{CartoDB::UserModule::DBService::SCHEMA_CARTODB}.CDB_CONF',
'SELECT, INSERT, UPDATE, DELETE, TRUNCATE, REFERENCES, TRIGGER');
}).first[:has_table_privilege].should == false
# Additional public user grants/revokes
user.in_database(as: :superuser).fetch(%{
SELECT * FROM has_table_privilege('#{CartoDB::PUBLIC_DB_USER}',
'#{CartoDB::UserModule::DBService::SCHEMA_CARTODB}.cdb_tablemetadata',
'SELECT');
}).first[:has_table_privilege].should == false
user.in_database(as: :superuser).fetch(%{
SELECT * FROM has_schema_privilege('#{CartoDB::PUBLIC_DB_USER}',
'#{CartoDB::UserModule::DBService::SCHEMA_PUBLIC}', 'USAGE');
}).first[:has_schema_privilege].should == true
user.in_database(as: :superuser).fetch(%{
SELECT * FROM has_function_privilege(
'#{CartoDB::PUBLIC_DB_USER}',
'#{CartoDB::UserModule::DBService::SCHEMA_PUBLIC}._postgis_stats(regclass, text, text)',
'EXECUTE');
}).first[:has_function_privilege].should == true
user.in_database(as: :superuser).fetch(%{
SELECT * FROM has_table_privilege('#{CartoDB::PUBLIC_DB_USER}',
'#{CartoDB::UserModule::DBService::SCHEMA_PUBLIC}.spatial_ref_sys', 'SELECT');
}).first[:has_table_privilege].should == true
user.in_database.run(%{
DROP TABLE #{user.database_schema}.#{test_table_name};
})
user.destroy
organization.destroy
end
end
describe "Write locking" do
it "detects locking properly" do
@user.db_service.writes_enabled?.should eq true
@user.db_service.disable_writes
@user.db_service.terminate_database_connections
@user.db_service.writes_enabled?.should eq false
@user.db_service.enable_writes
@user.db_service.terminate_database_connections
@user.db_service.writes_enabled?.should eq true
end
it "enables and disables writes in user database" do
@user.db_service.run_pg_query("create table foo_1(a int);")
@user.db_service.disable_writes
@user.db_service.terminate_database_connections
lambda {
@user.db_service.run_pg_query("create table foo_2(a int);")
}.should raise_error(CartoDB::ErrorRunningQuery)
@user.db_service.enable_writes
@user.db_service.terminate_database_connections
@user.db_service.run_pg_query("create table foo_3(a int);")
end
end
describe '#destroy' do
def create_full_data
carto_user = FactoryGirl.create(:carto_user)
user = ::User[carto_user.id]
table = create_table(user_id: carto_user.id, name: 'My first table', privacy: UserTable::PRIVACY_PUBLIC)
canonical_visualization = table.table_visualization
map = FactoryGirl.create(:carto_map_with_layers, user_id: carto_user.id)
carto_visualization = FactoryGirl.create(:carto_visualization, user: carto_user, map: map)
visualization = CartoDB::Visualization::Member.new(id: carto_visualization.id).fetch
# Force ORM to cache layers (to check if they are deleted later)
canonical_visualization.map.layers
visualization.map.layers
user_layer = Layer.create(kind: 'tiled')
user.add_layer(user_layer)
[user, table, [canonical_visualization, visualization], user_layer]
end
def check_deleted_data(user_id, table_id, visualizations, layer_id)
::User[user_id].should be_nil
visualizations.each do |visualization|
Carto::Visualization.exists?(visualization.id).should be_false
visualization.map.layers.each { |layer| Carto::Layer.exists?(layer.id).should be_false }
end
Carto::UserTable.exists?(table_id).should be_false
Carto::Layer.exists?(layer_id).should be_false
end
it 'destroys all related information' do
user, table, visualizations, layer = create_full_data
::User[user.id].destroy
check_deleted_data(user.id, table.id, visualizations, layer.id)
end
it 'destroys all related information, even for viewer users' do
user, table, visualizations, layer = create_full_data
user.viewer = true
user.save
user.reload
user.destroy
check_deleted_data(user.id, table.id, visualizations, layer.id)
end
end
describe '#visualization_count' do
include_context 'organization with users helper'
include TableSharing
it 'filters by type if asked' do
vis = FactoryGirl.create(:carto_visualization, user_id: @org_user_1.id, type: Carto::Visualization::TYPE_DERIVED)
@org_user_1.visualization_count.should eq 1
@org_user_1.visualization_count(type: Carto::Visualization::TYPE_DERIVED).should eq 1
[Carto::Visualization::TYPE_CANONICAL, Carto::Visualization::TYPE_REMOTE].each do |type|
@org_user_1.visualization_count(type: type).should eq 0
end
vis.destroy
end
it 'filters by privacy if asked' do
vis = FactoryGirl.create(:carto_visualization,
user_id: @org_user_1.id,
privacy: Carto::Visualization::PRIVACY_PUBLIC)
@org_user_1.visualization_count.should eq 1
@org_user_1.visualization_count(privacy: Carto::Visualization::PRIVACY_PUBLIC).should eq 1
[
Carto::Visualization::PRIVACY_PRIVATE,
Carto::Visualization::PRIVACY_LINK,
Carto::Visualization::PRIVACY_PROTECTED
].each do |privacy|
@org_user_1.visualization_count(privacy: privacy).should eq 0
end
vis.destroy
end
it 'filters by shared exclusion if asked' do
vis = FactoryGirl.create(:carto_visualization, user_id: @org_user_1.id, type: Carto::Visualization::TYPE_DERIVED)
share_visualization_with_user(vis, @org_user_2)
@org_user_2.visualization_count.should eq 1
@org_user_2.visualization_count(exclude_shared: true).should eq 0
vis.destroy
end
it 'filters by raster exclusion if asked' do
vis = FactoryGirl.create(:carto_visualization, user_id: @org_user_1.id, kind: Carto::Visualization::KIND_RASTER)
@org_user_1.visualization_count.should eq 1
@org_user_1.visualization_count(exclude_raster: true).should eq 0
vis.destroy
end
end
describe 'viewer user' do
def verify_viewer_quota(user)
user.quota_in_bytes.should eq 0
user.geocoding_quota.should eq 0
user.soft_geocoding_limit.should eq false
user.twitter_datasource_quota.should eq 0
user.soft_twitter_datasource_limit.should eq false
user.here_isolines_quota.should eq 0
user.soft_here_isolines_limit.should eq false
user.obs_snapshot_quota.should eq 0
user.soft_obs_snapshot_limit.should eq false
user.obs_general_quota.should eq 0
user.soft_obs_general_limit.should eq false
end
describe 'creation' do
it 'assigns 0 as quota and no soft limit no matter what is requested' do
@user = create_user email: 'u_v@whatever.com', username: 'viewer', password: 'user11', viewer: true,
geocoding_quota: 10, soft_geocoding_limit: true, twitter_datasource_quota: 100,
soft_twitter_datasource_limit: 10, here_isolines_quota: 10, soft_here_isolines_limit: true,
obs_snapshot_quota: 100, soft_obs_snapshot_limit: true, obs_general_quota: 100,
soft_obs_general_limit: true
verify_viewer_quota(@user)
@user.destroy
end
end
describe 'builder -> viewer' do
it 'assigns 0 as quota and no soft limit no matter what is requested' do
@user = create_user email: 'u_v@whatever.com', username: 'builder-to-viewer', password: 'user11', viewer: false,
geocoding_quota: 10, soft_geocoding_limit: true, twitter_datasource_quota: 100,
soft_twitter_datasource_limit: 10, here_isolines_quota: 10, soft_here_isolines_limit: true,
obs_snapshot_quota: 100, soft_obs_snapshot_limit: true, obs_general_quota: 100,
soft_obs_general_limit: true
# Random check, but we can trust create_user
@user.quota_in_bytes.should_not eq 0
@user.viewer = true
@user.save
@user.reload
verify_viewer_quota(@user)
@user.destroy
end
end
describe 'quotas' do
it "can't change for viewer users" do
@user = create_user(viewer: true)
verify_viewer_quota(@user)
@user.quota_in_bytes = 666
@user.save
@user.reload
verify_viewer_quota(@user)
@user.destroy
end
end
end
describe 'api keys' do
before(:all) do
@auth_api_user = FactoryGirl.create(:valid_user)
end
after(:all) do
@auth_api_user.destroy
end
describe 'create api keys on user creation' do
it "creates master api key on user creation" do
api_keys = Carto::ApiKey.where(user_id: @auth_api_user.id)
api_keys.should_not be_empty
master_api_key = Carto::ApiKey.where(user_id: @auth_api_user.id).master.first
master_api_key.should be
master_api_key.token.should eq @auth_api_user.api_key
end
end
it 'syncs api key changes with master api key' do
master_key = Carto::ApiKey.where(user_id: @auth_api_user.id).master.first
expect(@auth_api_user.api_key).to eq master_key.token
expect { @auth_api_user.regenerate_api_key }.to(change { @auth_api_user.api_key })
master_key.reload
expect(@auth_api_user.api_key).to eq master_key.token
end
describe 'are enabled/disabled' do
before(:all) do
@regular_key = @auth_api_user.api_keys.create_regular_key!(name: 'regkey', grants: [{ type: 'apis', apis: [] }])
end
after(:all) do
@regular_key.destroy
end
before(:each) do
@auth_api_user.state = 'active'
@auth_api_user.engine_enabled = true
@auth_api_user.save
end
def enabled_api_key?(api_key)
$users_metadata.exists(api_key.send(:redis_key))
end
it 'disables all api keys for locked users' do
@auth_api_user.state = 'locked'
@auth_api_user.save
expect(@auth_api_user.api_keys.none? { |k| enabled_api_key?(k) }).to be_true
expect(@auth_api_user.api_key).to_not eq($users_metadata.HGET(@auth_api_user.send(:key), 'map_key'))
end
it 'disables regular keys for engine disabled' do
@auth_api_user.engine_enabled = false
@auth_api_user.save
expect(@auth_api_user.api_keys.regular.none? { |k| enabled_api_key?(k) }).to be_true
expect(@auth_api_user.api_keys.master.all? { |k| enabled_api_key?(k) }).to be_true
expect(@auth_api_user.api_keys.default_public.all? { |k| enabled_api_key?(k) }).to be_true
expect(@auth_api_user.api_key).to eq($users_metadata.HGET(@auth_api_user.send(:key), 'map_key'))
end
it 'enables all keys for active engine users' do
expect(@auth_api_user.api_keys.all? { |k| enabled_api_key?(k) }).to be_true
expect(@auth_api_user.api_key).to eq($users_metadata.HGET(@auth_api_user.send(:key), 'map_key'))
end
end
describe '#regenerate_all_api_keys' do
before(:all) do
@regular_key = @auth_api_user.api_keys.create_regular_key!(name: 'regkey', grants: [{ type: 'apis', apis: [] }])
end
after(:all) do
@regular_key.destroy
end
it 'regenerates master key at user model' do
expect { @auth_api_user.regenerate_all_api_keys }.to(change { @auth_api_user.api_key })
end
it 'regenerates master key model' do
expect { @auth_api_user.regenerate_all_api_keys }.to(change { @auth_api_user.api_keys.master.first.token })
end
it 'regenerates regular key' do
expect { @auth_api_user.regenerate_all_api_keys }.to(change { @regular_key.reload.token })
end
end
end
describe '#rate limits' do
before :all do
@limits_feature_flag = FactoryGirl.create(:feature_flag, name: 'limits_v2', restricted: false)
@account_type = create_account_type_fg('FREE')
@account_type_pro = create_account_type_fg('PRO')
@account_type_org = create_account_type_fg('ORGANIZATION USER')
@rate_limits_custom = FactoryGirl.create(:rate_limits_custom)
@rate_limits = FactoryGirl.create(:rate_limits)
@rate_limits_pro = FactoryGirl.create(:rate_limits_pro)
@user_rt = FactoryGirl.create(:valid_user, rate_limit_id: @rate_limits.id)
@organization = FactoryGirl.create(:organization)
owner = FactoryGirl.create(:user, account_type: 'PRO')
uo = CartoDB::UserOrganization.new(@organization.id, owner.id)
uo.promote_user_to_admin
@organization.reload
@user_org = FactoryGirl.build(:user, account_type: 'FREE')
@user_org.organization = @organization
@user_org.enabled = true
@user_org.save
@map_prefix = "limits:rate:store:#{@user_rt.username}:maps:"
@sql_prefix = "limits:rate:store:#{@user_rt.username}:sql:"
end
after :all do
@user_rt.destroy unless @user_rt.nil?
@user_no_ff.destroy unless @user_no_ff.nil?
@organization.destroy unless @organization.nil?
@account_type.destroy unless @account_type.nil?
@account_type_pro.destroy unless @account_type_pro.nil?
@account_type_org.destroy unless @account_type_org.nil?
@account_type.rate_limit.destroy unless @account_type.nil?
@account_type_pro.rate_limit.destroy unless @account_type_pro.nil?
@account_type_org.rate_limit.destroy unless @account_type_org.nil?
@rate_limits.destroy unless @rate_limits.nil?
@rate_limits_custom.destroy unless @rate_limits_custom.nil?
@rate_limits_custom2.destroy unless @rate_limits_custom2.nil?
@rate_limits_pro.destroy unless @rate_limits_pro.nil?
end
before :each do
unless FeatureFlag.where(name: 'limits_v2').first.present?
@limits_feature_flag = FactoryGirl.create(:feature_flag, name: 'limits_v2', restricted: false)
end
end
after :each do
@limits_feature_flag.destroy if @limits_feature_flag.exists?
end
it 'does not create rate limits if feature flag is not enabled' do
@limits_feature_flag.destroy
@user_no_ff = FactoryGirl.create(:valid_user, rate_limit_id: @rate_limits.id)
map_prefix = "limits:rate:store:#{@user_no_ff.username}:maps:"
sql_prefix = "limits:rate:store:#{@user_no_ff.username}:sql:"
$limits_metadata.EXISTS("#{map_prefix}anonymous").should eq 0
$limits_metadata.EXISTS("#{sql_prefix}query").should eq 0
end
it 'creates rate limits from user account type' do
expect_rate_limits_saved_to_redis(@user_rt.username)
end
it 'updates rate limits from user custom rate_limit' do
expect_rate_limits_saved_to_redis(@user_rt.username)
@user_rt.rate_limit_id = @rate_limits_custom.id
@user_rt.save
expect_rate_limits_custom_saved_to_redis(@user_rt.username)
end
it 'creates rate limits for a org user' do
expect_rate_limits_pro_saved_to_redis(@user_org.username)
end
it 'destroy rate limits' do
user2 = FactoryGirl.create(:valid_user, rate_limit_id: @rate_limits_pro.id)
expect_rate_limits_pro_saved_to_redis(user2.username)
user2.destroy
expect {
Carto::RateLimit.find(user2.rate_limit_id)
}.to raise_error(ActiveRecord::RecordNotFound)
expect_rate_limits_exist(user2.username)
end
it 'updates rate limits when user has no rate limits' do
user = FactoryGirl.create(:valid_user)
user.update_rate_limits(@rate_limits.api_attributes)
user.reload
user.rate_limit.should_not be_nil
user.rate_limit.api_attributes.should eq @rate_limits.api_attributes
user.destroy
end
it 'does nothing when user has no rate limits' do
user = FactoryGirl.create(:valid_user)
user.update_rate_limits(nil)
user.reload
user.rate_limit.should be_nil
user.destroy
end
it 'updates rate limits when user has rate limits' do
@rate_limits_custom2 = FactoryGirl.create(:rate_limits_custom2)
user = FactoryGirl.create(:valid_user, rate_limit_id: @rate_limits_custom2.id)
user.update_rate_limits(@rate_limits.api_attributes)
user.reload
user.rate_limit.should_not be_nil
user.rate_limit_id.should eq @rate_limits_custom2.id
user.rate_limit.api_attributes.should eq @rate_limits.api_attributes
@rate_limits.api_attributes.should eq @rate_limits_custom2.reload.api_attributes
user.destroy
end
it 'set rate limits to nil when user has rate limits' do
@rate_limits_custom2 = FactoryGirl.create(:rate_limits_custom2)
user = FactoryGirl.create(:valid_user, rate_limit_id: @rate_limits_custom2.id)
user.update_rate_limits(nil)
user.reload
user.rate_limit.should be_nil
expect {
Carto::RateLimit.find(@rate_limits_custom2.id)
}.to raise_error(ActiveRecord::RecordNotFound)
# limits reverted to the ones from the account type
expect_rate_limits_saved_to_redis(user.username)
user.destroy
end
end
describe '#password_expired?' do
before(:all) do
@organization_password = create_organization_with_owner
end
after(:all) do
@organization_password.destroy
end
before(:each) do
@github_user = FactoryGirl.build(:valid_user, github_user_id: 932847)
@google_user = FactoryGirl.build(:valid_user, google_sign_in: true)
@password_user = FactoryGirl.build(:valid_user)
@org_user = FactoryGirl.create(:valid_user,
account_type: 'ORGANIZATION USER',
organization: @organization_password)
end
it 'never expires without configuration' do
Cartodb.with_config(passwords: { 'expiration_in_d' => nil }) do
expect(@github_user.password_expired?).to be_false
expect(@google_user.password_expired?).to be_false
expect(@password_user.password_expired?).to be_false
expect(@org_user.password_expired?).to be_false
end
end
it 'never expires for users without password' do
Cartodb.with_config(passwords: { 'expiration_in_d' => 5 }) do
Delorean.jump(10.days)
expect(@github_user.password_expired?).to be_false
expect(@google_user.password_expired?).to be_false
Delorean.back_to_the_present
end
end
it 'expires for users with oauth and changed passwords' do
Cartodb.with_config(passwords: { 'expiration_in_d' => 5 }) do
@github_user.last_password_change_date = Time.now - 10.days
expect(@github_user.password_expired?).to be_true
@google_user.last_password_change_date = Time.now - 10.days
expect(@google_user.password_expired?).to be_true
end
end
it 'expires for password users after a while has passed' do
@password_user.save
Cartodb.with_config(passwords: { 'expiration_in_d' => 15 }) do
expect(@password_user.password_expired?).to be_false
Delorean.jump(30.days)
expect(@password_user.password_expired?).to be_true
@password_user.password = @password_user.password_confirmation = 'waduspass'
@password_user.save
expect(@password_user.password_expired?).to be_false
Delorean.jump(30.days)
expect(@password_user.password_expired?).to be_true
Delorean.back_to_the_present
end
@password_user.destroy
end
it 'expires for org users with password_expiration set' do
@organization_password.stubs(:password_expiration_in_d).returns(2)
org_user2 = FactoryGirl.create(:valid_user,
account_type: 'ORGANIZATION USER',
organization: @organization_password)
Cartodb.with_config(passwords: { 'expiration_in_d' => 5 }) do
expect(org_user2.password_expired?).to be_false
Delorean.jump(1.day)
expect(org_user2.password_expired?).to be_false
Delorean.jump(5.days)
expect(org_user2.password_expired?).to be_true
org_user2.password = org_user2.password_confirmation = 'waduspass'
org_user2.save
Delorean.jump(1.day)
expect(org_user2.password_expired?).to be_false
Delorean.jump(5.day)
expect(org_user2.password_expired?).to be_true
Delorean.back_to_the_present
end
end
it 'never expires for org users with no password_expiration set' do
@organization_password.stubs(:password_expiration_in_d).returns(nil)
org_user2 = FactoryGirl.create(:valid_user, organization: @organization_password)
Cartodb.with_config(passwords: { 'expiration_in_d' => 5 }) do
expect(org_user2.password_expired?).to be_false
Delorean.jump(10.days)
expect(org_user2.password_expired?).to be_false
org_user2.password = org_user2.password_confirmation = 'waduspass'
org_user2.save
Delorean.jump(10.days)
expect(org_user2.password_expired?).to be_false
Delorean.back_to_the_present
end
end
end
protected
def create_org(org_name, org_quota, org_seats)
organization = Organization.new
organization.name = unique_name(org_name)
organization.quota_in_bytes = org_quota
organization.seats = org_seats
organization.save
organization
end
def tables_including_shared(user)
Carto::VisualizationQueryBuilder
.new
.with_owned_by_or_shared_with_user_id(user.id)
.with_type(Carto::Visualization::TYPE_CANONICAL)
.build.map(&:table)
end
end
hound
# coding: utf-8
require 'ostruct'
require_relative '../spec_helper'
require_relative 'user_shared_examples'
require_relative '../../services/dataservices-metrics/lib/isolines_usage_metrics'
require_relative '../../services/dataservices-metrics/lib/observatory_snapshot_usage_metrics'
require_relative '../../services/dataservices-metrics/lib/observatory_general_usage_metrics'
require 'factories/organizations_contexts'
require_relative '../../app/model_factories/layer_factory'
require_dependency 'cartodb/redis_vizjson_cache'
require 'helpers/rate_limits_helper'
require 'helpers/unique_names_helper'
require 'helpers/account_types_helper'
require 'factories/users_helper'
require 'factories/database_configuration_contexts'
describe 'refactored behaviour' do
it_behaves_like 'user models' do
def get_twitter_imports_count_by_user_id(user_id)
get_user_by_id(user_id).get_twitter_imports_count
end
def get_user_by_id(user_id)
::User.where(id: user_id).first
end
def create_user
FactoryGirl.create(:valid_user)
end
end
end
describe User do
include UniqueNamesHelper
include AccountTypesHelper
include RateLimitsHelper
before(:each) do
CartoDB::UserModule::DBService.any_instance.stubs(:enable_remote_db_user).returns(true)
end
before(:all) do
bypass_named_maps
@user_password = 'admin123'
puts "\n[rspec][user_spec] Creating test user databases..."
@user = create_user :email => 'admin@example.com', :username => 'admin', :password => @user_password
@user2 = create_user :email => 'user@example.com', :username => 'user', :password => 'user123'
puts "[rspec][user_spec] Loading user data..."
reload_user_data(@user) && @user.reload
puts "[rspec][user_spec] Running..."
end
before(:each) do
bypass_named_maps
CartoDB::Varnish.any_instance.stubs(:send_command).returns(true)
CartoDB::UserModule::DBService.any_instance.stubs(:enable_remote_db_user).returns(true)
Table.any_instance.stubs(:update_cdb_tablemetadata)
end
after(:all) do
bypass_named_maps
@user.destroy
@user2.destroy
@account_type.destroy if @account_type
@account_type_org.destroy if @account_type_org
end
it "should only allow legal usernames" do
illegal_usernames = %w(si$mon 'sergio estella' j@vi sergio£££ simon_tokumine SIMON Simon jose.rilla -rilla rilla-)
legal_usernames = %w(simon javier-de-la-torre sergio-leiva sergio99)
illegal_usernames.each do |name|
@user.username = name
@user.valid?.should be_false
@user.errors[:username].should be_present
end
legal_usernames.each do |name|
@user.username = name
@user.valid?.should be_true
@user.errors[:username].should be_blank
end
end
it "should not allow a username in use by an organization" do
org = create_org('testusername', 10.megabytes, 1)
@user.username = org.name
@user.valid?.should be_false
@user.username = 'wadus'
@user.valid?.should be_true
end
describe 'organization checks' do
it "should not be valid if his organization doesn't have more seats" do
organization = create_org('testorg', 10.megabytes, 1)
user1 = create_user email: 'user1@testorg.com',
username: 'user1',
password: 'user11',
account_type: 'ORGANIZATION USER'
user1.organization = organization
user1.save
organization.owner_id = user1.id
organization.save
organization.reload
user1.reload
user2 = new_user
user2.organization = organization
user2.valid?.should be_false
user2.errors.keys.should include(:organization)
organization.destroy
user1.destroy
end
it 'should be valid if his organization has enough seats' do
organization = create_org('testorg', 10.megabytes, 1)
user = ::User.new
user.organization = organization
user.valid?
user.errors.keys.should_not include(:organization)
organization.destroy
end
it "should not be valid if his organization doesn't have enough disk space" do
organization = create_org('testorg', 10.megabytes, 1)
organization.stubs(:assigned_quota).returns(10.megabytes)
user = ::User.new
user.organization = organization
user.quota_in_bytes = 1.megabyte
user.valid?.should be_false
user.errors.keys.should include(:quota_in_bytes)
organization.destroy
end
it 'should be valid if his organization has enough disk space' do
organization = create_org('testorg', 10.megabytes, 1)
organization.stubs(:assigned_quota).returns(9.megabytes)
user = ::User.new
user.organization = organization
user.quota_in_bytes = 1.megabyte
user.valid?
user.errors.keys.should_not include(:quota_in_bytes)
organization.destroy
end
describe '#org_admin' do
before(:all) do
@organization = create_organization_with_owner
end
after(:all) do
@organization.destroy
end
def create_role(user)
# NOTE: It's hard to test the real Groups API call here, it needs a Rails server up and running
# Instead, we test the main step that this function does internally (creating a role)
user.in_database["CREATE ROLE \"#{user.database_username}_#{unique_name('role')}\""].all
end
it 'cannot be owner and viewer at the same time' do
@organization.owner.viewer = true
@organization.owner.should_not be_valid
@organization.owner.errors.keys.should include(:viewer)
end
it 'cannot be admin and viewer at the same time' do
user = ::User.new
user.organization = @organization
user.viewer = true
user.org_admin = true
user.should_not be_valid
user.errors.keys.should include(:viewer)
end
it 'should not be able to create groups without admin rights' do
user = FactoryGirl.create(:valid_user, organization: @organization)
expect { create_role(user) }.to raise_error
end
it 'should be able to create groups with admin rights' do
user = FactoryGirl.create(:valid_user, organization: @organization, org_admin: true)
expect { create_role(user) }.to_not raise_error
end
it 'should revoke admin rights on demotion' do
user = FactoryGirl.create(:valid_user, organization: @organization, org_admin: true)
expect { create_role(user) }.to_not raise_error
user.org_admin = false
user.save
expect { create_role(user) }.to raise_error
end
end
describe 'organization email whitelisting' do
before(:each) do
@organization = create_org('testorg', 10.megabytes, 1)
end
after(:each) do
@organization.destroy
end
it 'valid_user is valid' do
user = FactoryGirl.build(:valid_user)
user.valid?.should == true
end
it 'user email is valid if organization has not whitelisted domains' do
user = FactoryGirl.build(:valid_user, organization: @organization)
user.valid?.should == true
end
it 'user email is not valid if organization has whitelisted domains and email is not under that domain' do
@organization.whitelisted_email_domains = [ 'organization.org' ]
user = FactoryGirl.build(:valid_user, organization: @organization)
user.valid?.should eq false
user.errors[:email].should_not be_nil
end
it 'user email is valid if organization has whitelisted domains and email is under that domain' do
user = FactoryGirl.build(:valid_user, organization: @organization)
@organization.whitelisted_email_domains = [ user.email.split('@')[1] ]
user.valid?.should eq true
user.errors[:email].should == []
end
end
describe 'when updating user quota' do
it 'should be valid if his organization has enough disk space' do
organization = create_organization_with_users(quota_in_bytes: 70.megabytes)
organization.assigned_quota.should == 70.megabytes
user = organization.owner
user.quota_in_bytes = 1.megabyte
user.valid?
user.errors.keys.should_not include(:quota_in_bytes)
organization.destroy
end
it "should not be valid if his organization doesn't have enough disk space" do
organization = create_organization_with_users(quota_in_bytes: 70.megabytes)
organization.assigned_quota.should == 70.megabytes
user = organization.owner
user.quota_in_bytes = 71.megabytes
user.valid?.should be_false
user.errors.keys.should include(:quota_in_bytes)
organization.destroy
end
end
describe 'when updating viewer state' do
before(:all) do
@organization = create_organization_with_users(quota_in_bytes: 70.megabytes)
end
after(:all) do
@organization.destroy
end
before(:each) do
@organization.viewer_seats = 10
@organization.seats = 10
@organization.save
end
it 'should not allow changing to viewer without seats' do
@organization.viewer_seats = 0
@organization.save
user = @organization.users.find { |u| !u.organization_owner? }
user.reload
user.viewer = true
expect(user).not_to be_valid
expect(user.errors.keys).to include(:organization)
end
it 'should allow changing to viewer with enough seats' do
user = @organization.users.find { |u| !u.organization_owner? }
user.reload
user.viewer = true
expect(user).to be_valid
expect(user.errors.keys).not_to include(:organization)
end
it 'should not allow changing to builder without seats' do
user = @organization.users.find { |u| !u.organization_owner? }
user.reload
user.viewer = true
user.save
@organization.seats = 1
@organization.save
user.reload
user.viewer = false
expect(user).not_to be_valid
expect(user.errors.keys).to include(:organization)
end
it 'should allow changing to builder with seats' do
user = @organization.users.find { |u| !u.organization_owner? }
user.reload
user.viewer = true
user.save
user.reload
user.viewer = false
expect(user).to be_valid
expect(user.errors.keys).not_to include(:organization)
end
end
it 'should set account_type properly' do
organization = create_organization_with_users
organization.users.reject(&:organization_owner?).each do |u|
u.account_type.should == "ORGANIZATION USER"
end
organization.destroy
end
it 'should set default settings properly unless overriden' do
organization = create_organization_with_users
organization.users.reject(&:organization_owner?).each do |u|
u.max_layers.should eq ::User::DEFAULT_MAX_LAYERS
u.private_tables_enabled.should be_true
u.sync_tables_enabled.should be_true
end
user = FactoryGirl.build(:user, organization: organization)
user.max_layers = 3
user.save
user.max_layers.should == 3
organization.destroy
end
describe 'google_maps_key and google_maps_private_key' do
before(:all) do
@organization = create_organization_with_users(google_maps_key: 'gmk', google_maps_private_key: 'gmpk')
@organization.google_maps_key.should_not be_nil
@organization.google_maps_private_key.should_not be_nil
end
after(:all) do
@organization.destroy
end
it 'should be inherited from organization for new users' do
@organization.users.should_not be_empty
@organization.users.reject(&:organization_owner?).each do |u|
u.google_maps_key.should == @organization.google_maps_key
u.google_maps_private_key.should == @organization.google_maps_private_key
end
end
end
it 'should inherit twitter_datasource_enabled from organizations with custom config on creation' do
organization = create_organization_with_users(twitter_datasource_enabled: true)
organization.save
organization.twitter_datasource_enabled.should be_true
organization.users.reject(&:organization_owner?).each do |u|
CartoDB::Datasources::DatasourcesFactory.stubs(:customized_config?).with(Search::Twitter::DATASOURCE_NAME, u).returns(true)
u.twitter_datasource_enabled.should be_true
end
CartoDB::Datasources::DatasourcesFactory.stubs(:customized_config?).returns(true)
user = create_user(organization: organization)
user.save
CartoDB::Datasources::DatasourcesFactory.stubs(:customized_config?).with(Search::Twitter::DATASOURCE_NAME, user).returns(true)
user.twitter_datasource_enabled.should be_true
organization.destroy
end
it "should return proper values for non-persisted settings" do
organization = create_organization_with_users
organization.users.reject(&:organization_owner?).each do |u|
u.private_maps_enabled.should be_true
end
organization.destroy
end
end
describe 'central synchronization' do
it 'should create remote user in central if needed' do
pending "Central API credentials not provided" unless ::User.new.sync_data_with_cartodb_central?
organization = create_org('testorg', 500.megabytes, 1)
user = create_user email: 'user1@testorg.com',
username: 'user1',
password: 'user11',
account_type: 'ORGANIZATION USER'
user.organization = organization
user.save
Cartodb::Central.any_instance.expects(:create_organization_user).with(organization.name, user.allowed_attributes_to_central(:create)).once
user.create_in_central.should be_true
organization.destroy
end
end
it 'should store feature flags' do
ff = FactoryGirl.create(:feature_flag, id: 10001, name: 'ff10001')
user = create_user email: 'ff@example.com', username: 'ff-user-01', password: '000ff-user-01'
user.set_relationships_from_central({ feature_flags: [ ff.id.to_s ]})
user.save
user.feature_flags_user.map { |ffu| ffu.feature_flag_id }.should include(ff.id)
user.destroy
end
it 'should delete feature flags assignations to a deleted user' do
ff = FactoryGirl.create(:feature_flag, id: 10002, name: 'ff10002')
user = create_user email: 'ff2@example.com', username: 'ff2-user-01', password: '000ff2-user-01'
user.set_relationships_from_central({ feature_flags: [ ff.id.to_s ]})
user.save
user_id = user.id
user.destroy
SequelRails.connection["select count(*) from feature_flags_users where user_id = '#{user_id}'"].first[:count].should eq 0
SequelRails.connection["select count(*) from feature_flags where id = '#{ff.id}'"].first[:count].should eq 1
end
it "should have a default dashboard_viewed? false" do
user = ::User.new
user.dashboard_viewed?.should be_false
end
it "should reset dashboard_viewed when dashboard gets viewed" do
user = ::User.new
user.view_dashboard
user.dashboard_viewed?.should be_true
end
it "should validate that password is present if record is new and crypted_password or salt are blank" do
user = ::User.new
user.username = "adminipop"
user.email = "adminipop@example.com"
user.valid?.should be_false
user.errors[:password].should be_present
another_user = new_user(user.values.merge(:password => "admin123"))
user.crypted_password = another_user.crypted_password
user.salt = another_user.salt
user.valid?.should be_true
user.save
# Let's ensure that crypted_password and salt does not change
user_check = ::User[user.id]
user_check.crypted_password.should == another_user.crypted_password
user_check.salt.should == another_user.salt
user.password = nil
user.valid?.should be_true
user.destroy
end
it "should validate password presence and length" do
user = ::User.new
user.username = "adminipop"
user.email = "adminipop@example.com"
user.valid?.should be_false
user.errors[:password].should be_present
user.password = 'short'
user.valid?.should be_false
user.errors[:password].should be_present
user.password = 'manolo' * 11
user.valid?.should be_false
user.errors[:password].should be_present
end
it "should validate password is different than username" do
user = ::User.new
user.username = "adminipop"
user.email = "adminipop@example.com"
user.password = user.password_confirmation = "adminipop"
user.valid?.should be_false
user.errors[:password].should be_present
end
it "should validate password is not a common one" do
user = ::User.new
user.username = "adminipop"
user.email = "adminipop@example.com"
user.password = user.password_confirmation = '123456'
user.valid?.should be_false
user.errors[:password].should be_present
end
it "should set default statement timeout values" do
@user.in_database["show statement_timeout"].first[:statement_timeout].should == "5min"
@user.in_database(as: :public_user)["show statement_timeout"].first[:statement_timeout].should == "5min"
end
it "should keep in sync user statement_timeout" do
@user.user_timeout = 1000000
@user.database_timeout = 300000
@user.save
@user.in_database["show statement_timeout"].first[:statement_timeout].should == "1000s"
@user.in_database(as: :public_user)["show statement_timeout"].first[:statement_timeout].should == "5min"
end
it "should keep in sync database statement_timeout" do
@user.user_timeout = 300000
@user.database_timeout = 1000000
@user.save
@user.in_database["show statement_timeout"].first[:statement_timeout].should == "5min"
@user.in_database(as: :public_user)["show statement_timeout"].first[:statement_timeout].should == "1000s"
end
it "should invalidate all his vizjsons when his account type changes" do
@account_type = create_account_type_fg('WADUS')
@user.account_type = 'WADUS'
CartoDB::Varnish.any_instance.expects(:purge)
.with("#{@user.database_name}.*:vizjson").times(1).returns(true)
@user.save
end
it "should invalidate all his vizjsons when his disqus_shortname changes" do
@user.disqus_shortname = 'WADUS'
CartoDB::Varnish.any_instance.expects(:purge)
.with("#{@user.database_name}.*:vizjson").times(1).returns(true)
@user.save
end
it "should not invalidate anything when his quota_in_bytes changes" do
@user.quota_in_bytes = @user.quota_in_bytes + 1.megabytes
CartoDB::Varnish.any_instance.expects(:purge).times(0)
@user.save
end
it "should rebuild the quota trigger after changing the quota" do
@user.db_service.expects(:rebuild_quota_trigger).once
@user.quota_in_bytes = @user.quota_in_bytes + 1.megabytes
@user.save
end
it "should read api calls from external service" do
pending "This is deprecated. This code has been moved"
@user.stubs(:get_old_api_calls).returns({
"per_day" => [0, 0, 0, 0, 24, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 8, 0, 0, 0, 0, 0, 0, 0, 0, 17, 4, 0, 0, 0, 0],
"total"=>49,
"updated_at"=>1370362756
})
@user.stubs(:get_es_api_calls_from_redis).returns([
21, 0, 0, 0, 2, 0, 0, 5, 0, 0, 0, 0, 0, 0, 0, 8, 0, 5, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0
])
@user.get_api_calls.should == [21, 0, 0, 0, 6, 17, 0, 5, 0, 0, 0, 0, 0, 0, 8, 8, 0, 5, 0, 0, 0, 0, 0, 0, 0, 24, 0, 0, 0, 0]
@user.get_api_calls(
from: (Date.today - 6.days),
to: Date.today
).should == [21, 0, 0, 0, 6, 17, 0]
end
it "should get final api calls from es" do
yesterday = Date.today - 1
today = Date.today
from_date = DateTime.new(yesterday.year, yesterday.month, yesterday.day, 0, 0, 0).strftime("%Q")
to_date = DateTime.new(today.year, today.month, today.day, 0, 0, 0).strftime("%Q")
api_url = %r{search}
api_response = {
"aggregations" => {
"0" => {
"buckets" => [
{
"key" => from_date.to_i,
"doc_count" => 4
},
{
"key" => to_date.to_i,
"doc_count" => 6
}
]
}
}
}
Typhoeus.stub(api_url,
{ method: :post }
)
.and_return(
Typhoeus::Response.new(code: 200, body: api_response.to_json.to_s)
)
@user.get_api_calls_from_es.should == {from_date.to_i => 4, to_date.to_i => 6}
end
describe "avatar checks" do
let(:user1) do
create_user(email: 'ewdewfref34r43r43d32f45g5@example.com', username: 'u1', password: 'foobar')
end
after(:each) do
user1.destroy
end
it "should load a cartodb avatar url if no gravatar associated" do
avatar_kind = Cartodb.config[:avatars]['kinds'][0]
avatar_color = Cartodb.config[:avatars]['colors'][0]
avatar_base_url = Cartodb.config[:avatars]['base_url']
Random.any_instance.stubs(:rand).returns(0)
gravatar_url = %r{gravatar.com}
Typhoeus.stub(gravatar_url, { method: :get }).and_return(Typhoeus::Response.new(code: 404))
user1.stubs(:gravatar_enabled?).returns(true)
user1.avatar_url = nil
user1.save
user1.reload_avatar
user1.avatar_url.should == "#{avatar_base_url}/avatar_#{avatar_kind}_#{avatar_color}.png"
end
it "should load a cartodb avatar url if gravatar disabled" do
avatar_kind = Cartodb.config[:avatars]['kinds'][0]
avatar_color = Cartodb.config[:avatars]['colors'][0]
avatar_base_url = Cartodb.config[:avatars]['base_url']
Random.any_instance.stubs(:rand).returns(0)
gravatar_url = %r{gravatar.com}
Typhoeus.stub(gravatar_url, { method: :get }).and_return(Typhoeus::Response.new(code: 200))
user1.stubs(:gravatar_enabled?).returns(false)
user1.avatar_url = nil
user1.save
user1.reload_avatar
user1.avatar_url.should == "#{avatar_base_url}/avatar_#{avatar_kind}_#{avatar_color}.png"
end
it "should load a the user gravatar url" do
gravatar_url = %r{gravatar.com}
Typhoeus.stub(gravatar_url, { method: :get }).and_return(Typhoeus::Response.new(code: 200))
user1.stubs(:gravatar_enabled?).returns(true)
user1.reload_avatar
user1.avatar_url.should == "//#{user1.gravatar_user_url}"
end
describe '#gravatar_enabled?' do
it 'should be enabled by default (every setting but false will enable it)' do
user = ::User.new
Cartodb.with_config(avatars: {}) { user.gravatar_enabled?.should be_true }
Cartodb.with_config(avatars: { 'gravatar_enabled' => true }) { user.gravatar_enabled?.should be_true }
Cartodb.with_config(avatars: { 'gravatar_enabled' => 'true' }) { user.gravatar_enabled?.should be_true }
Cartodb.with_config(avatars: { 'gravatar_enabled' => 'wadus' }) { user.gravatar_enabled?.should be_true }
end
it 'can be disabled' do
user = ::User.new
Cartodb.with_config(avatars: { 'gravatar_enabled' => false }) { user.gravatar_enabled?.should be_false }
Cartodb.with_config(avatars: { 'gravatar_enabled' => 'false' }) { user.gravatar_enabled?.should be_false }
end
end
end
describe '#private_maps_enabled?' do
it 'should not have private maps enabled by default' do
user_missing_private_maps = create_user email: 'user_mpm@example.com',
username: 'usermpm',
password: '000usermpm'
user_missing_private_maps.private_maps_enabled?.should eq false
user_missing_private_maps.destroy
end
it 'should have private maps if enabled' do
user_with_private_maps = create_user email: 'user_wpm@example.com',
username: 'userwpm',
password: '000userwpm',
private_maps_enabled: true
user_with_private_maps.private_maps_enabled?.should eq true
user_with_private_maps.destroy
end
it 'should not have private maps if disabled' do
user_without_private_maps = create_user email: 'user_opm@example.com',
username: 'useropm',
password: '000useropm',
private_maps_enabled: false
user_without_private_maps.private_maps_enabled?.should eq false
user_without_private_maps.destroy
end
end
describe '#get_geocoding_calls' do
before do
delete_user_data @user
@user.geocoder_provider = 'heremaps'
@user.stubs(:last_billing_cycle).returns(Date.today)
@mock_redis = MockRedis.new
@usage_metrics = CartoDB::GeocoderUsageMetrics.new(@user.username, nil, @mock_redis)
@usage_metrics.incr(:geocoder_here, :success_responses, 1, Time.now)
@usage_metrics.incr(:geocoder_internal, :success_responses, 1, Time.now)
@usage_metrics.incr(:geocoder_here, :success_responses, 1, Time.now - 5.days)
@usage_metrics.incr(:geocoder_cache, :success_responses, 1, Time.now - 5.days)
CartoDB::GeocoderUsageMetrics.stubs(:new).returns(@usage_metrics)
end
it "should return the sum of geocoded rows for the current billing period" do
@user.get_geocoding_calls.should eq 1
end
it "should return the sum of geocoded rows for the specified period" do
@user.get_geocoding_calls(from: Time.now-5.days).should eq 3
@user.get_geocoding_calls(from: Time.now-5.days, to: Time.now - 2.days).should eq 2
end
it "should return 0 when no geocodings" do
@user.get_geocoding_calls(from: Time.now - 15.days, to: Time.now - 10.days).should eq 0
end
end
describe '#get_here_isolines_calls' do
before do
delete_user_data @user
@user.isolines_provider = 'heremaps'
@mock_redis = MockRedis.new
@usage_metrics = CartoDB::IsolinesUsageMetrics.new(@user.username, nil, @mock_redis)
CartoDB::IsolinesUsageMetrics.stubs(:new).returns(@usage_metrics)
@user.stubs(:last_billing_cycle).returns(Date.today)
@user.period_end_date = (DateTime.current + 1) << 1
@user.save.reload
end
it "should return the sum of here isolines rows for the current billing period" do
@usage_metrics.incr(:here_isolines, :isolines_generated, 10, DateTime.current)
@usage_metrics.incr(:here_isolines, :isolines_generated, 100, (DateTime.current - 2))
@user.get_here_isolines_calls.should eq 10
end
it "should return the sum of here isolines rows for the specified period" do
@usage_metrics.incr(:here_isolines, :isolines_generated, 10, DateTime.current)
@usage_metrics.incr(:here_isolines, :isolines_generated, 100, (DateTime.current - 2))
@usage_metrics.incr(:here_isolines, :isolines_generated, 100, (DateTime.current - 7))
@user.get_here_isolines_calls(from: Time.now-5.days).should eq 110
@user.get_here_isolines_calls(from: Time.now-5.days, to: Time.now - 2.days).should eq 100
end
it "should return 0 when no here isolines actions" do
@user.get_here_isolines_calls(from: Time.now - 15.days, to: Time.now - 10.days).should eq 0
end
end
describe '#get_obs_snapshot_calls' do
before do
delete_user_data @user
@mock_redis = MockRedis.new
@usage_metrics = CartoDB::ObservatorySnapshotUsageMetrics.new(@user.username, nil, @mock_redis)
CartoDB::ObservatorySnapshotUsageMetrics.stubs(:new).returns(@usage_metrics)
@user.stubs(:last_billing_cycle).returns(Date.today)
@user.period_end_date = (DateTime.current + 1) << 1
@user.save.reload
end
it "should return the sum of data observatory snapshot rows for the current billing period" do
@usage_metrics.incr(:obs_snapshot, :success_responses, 10, DateTime.current)
@usage_metrics.incr(:obs_snapshot, :success_responses, 100, (DateTime.current - 2))
@user.get_obs_snapshot_calls.should eq 10
end
it "should return the sum of data observatory snapshot rows for the specified period" do
@usage_metrics.incr(:obs_snapshot, :success_responses, 10, DateTime.current)
@usage_metrics.incr(:obs_snapshot, :success_responses, 100, (DateTime.current - 2))
@usage_metrics.incr(:obs_snapshot, :success_responses, 100, (DateTime.current - 7))
@user.get_obs_snapshot_calls(from: Time.now - 5.days).should eq 110
@user.get_obs_snapshot_calls(from: Time.now - 5.days, to: Time.now - 2.days).should eq 100
end
it "should return 0 when no here isolines actions" do
@user.get_obs_snapshot_calls(from: Time.now - 15.days, to: Time.now - 10.days).should eq 0
end
end
describe '#get_obs_general_calls' do
before do
delete_user_data @user
@mock_redis = MockRedis.new
@usage_metrics = CartoDB::ObservatoryGeneralUsageMetrics.new(@user.username, nil, @mock_redis)
CartoDB::ObservatoryGeneralUsageMetrics.stubs(:new).returns(@usage_metrics)
@user.stubs(:last_billing_cycle).returns(Date.today)
@user.period_end_date = (DateTime.current + 1) << 1
@user.save.reload
end
it "should return the sum of data observatory general rows for the current billing period" do
@usage_metrics.incr(:obs_general, :success_responses, 10, DateTime.current)
@usage_metrics.incr(:obs_general, :success_responses, 100, (DateTime.current - 2))
@user.get_obs_general_calls.should eq 10
end
it "should return the sum of data observatory general rows for the specified period" do
@usage_metrics.incr(:obs_general, :success_responses, 10, DateTime.current)
@usage_metrics.incr(:obs_general, :success_responses, 100, (DateTime.current - 2))
@usage_metrics.incr(:obs_general, :success_responses, 100, (DateTime.current - 7))
@user.get_obs_general_calls(from: Time.now - 5.days).should eq 110
@user.get_obs_general_calls(from: Time.now - 5.days, to: Time.now - 2.days).should eq 100
end
it "should return 0 when no data observatory general actions" do
@user.get_obs_general_calls(from: Time.now - 15.days, to: Time.now - 10.days).should eq 0
end
end
describe "organization user deletion" do
it "should transfer tweet imports to owner" do
u1 = create_user(email: 'u1@exampleb.com', username: 'ub1', password: 'admin123')
org = create_org('cartodbtestb', 1234567890, 5)
u1.organization = org
u1.save
u1.reload
org = u1.organization
org.owner_id = u1.id
org.save
u1.reload
u2 = create_user(email: 'u2@exampleb.com', username: 'ub2', password: 'admin123', organization: org)
tweet_attributes = {
user: u2,
table_id: '96a86fb7-0270-4255-a327-15410c2d49d4',
data_import_id: '96a86fb7-0270-4255-a327-15410c2d49d4',
service_item_id: '555',
state: ::SearchTweet::STATE_COMPLETE
}
st1 = SearchTweet.create(tweet_attributes.merge(retrieved_items: 5))
st2 = SearchTweet.create(tweet_attributes.merge(retrieved_items: 10))
u1.reload
u2.reload
u2.get_twitter_imports_count.should == st1.retrieved_items + st2.retrieved_items
u1.get_twitter_imports_count.should == 0
u2.destroy
u1.reload
u1.get_twitter_imports_count.should == st1.retrieved_items + st2.retrieved_items
org.destroy
end
end
it "should have many tables" do
@user2.tables.should be_empty
create_table :user_id => @user2.id, :name => 'My first table', :privacy => UserTable::PRIVACY_PUBLIC
@user2.reload
@user2.tables.all.should == [UserTable.first(:user_id => @user2.id)]
end
it "should generate a data report"
it "should update remaining quotas when adding or removing tables" do
initial_quota = @user2.remaining_quota
expect { create_table :user_id => @user2.id, :privacy => UserTable::PRIVACY_PUBLIC }
.to change { @user2.remaining_table_quota }.by(-1)
table = Table.new(user_table: UserTable.filter(:user_id => @user2.id).first)
50.times { |i| table.insert_row!(:name => "row #{i}") }
@user2.remaining_quota.should be < initial_quota
initial_quota = @user2.remaining_quota
expect { table.destroy }
.to change { @user2.remaining_table_quota }.by(1)
@user2.remaining_quota.should be > initial_quota
end
it "should has his own database, created when the account is created" do
@user.database_name.should == "cartodb_test_user_#{@user.id}_db"
@user.database_username.should == "test_cartodb_user_#{@user.id}"
@user.in_database.test_connection.should == true
end
it 'creates an importer schema in the user database' do
@user.in_database[%Q(SELECT * FROM pg_namespace)]
.map { |record| record.fetch(:nspname) }
.should include 'cdb_importer'
end
it 'creates a cdb schema in the user database' do
pending "I believe cdb schema was never used"
@user.in_database[%Q(SELECT * FROM pg_namespace)]
.map { |record| record.fetch(:nspname) }
.should include 'cdb'
end
it 'allows access to the importer schema by the owner' do
@user.in_database.run(%Q{
CREATE TABLE cdb_importer.bogus ( bogus varchar(40) )
})
query = %Q(SELECT * FROM cdb_importer.bogus)
expect { @user.in_database(as: :public_user)[query].to_a }
.to raise_error(Sequel::DatabaseError)
@user.in_database[query].to_a
end
it 'allows access to the cdb schema by the owner' do
pending "I believe cdb schema was never used"
@user.in_database.run(%Q{
CREATE TABLE cdb.bogus ( bogus varchar(40) )
})
query = %Q(SELECT * FROM cdb.bogus)
expect { @user.in_database(as: :public_user)[query].to_a }
.to raise_error(Sequel::DatabaseError)
@user.in_database[query].to_a
end
it "should create a dabase user that only can read it's own database" do
connection = ::Sequel.connect(
::SequelRails.configuration.environment_for(Rails.env).merge(
'database' => @user.database_name, :logger => ::Rails.logger,
'username' => @user.database_username, 'password' => @user.database_password
)
)
connection.test_connection.should == true
connection.disconnect
connection = nil
connection = ::Sequel.connect(
::SequelRails.configuration.environment_for(Rails.env).merge(
'database' => @user2.database_name, :logger => ::Rails.logger,
'username' => @user.database_username, 'password' => @user.database_password
)
)
begin
connection.test_connection
true.should_not be_true
rescue
true.should be_true
ensure
connection.disconnect
end
connection = ::Sequel.connect(
::SequelRails.configuration.environment_for(Rails.env).merge(
'database' => @user2.database_name, :logger => ::Rails.logger,
'username' => @user2.database_username, 'password' => @user2.database_password
)
)
connection.test_connection.should == true
connection.disconnect
connection = ::Sequel.connect(
::SequelRails.configuration.environment_for(Rails.env).merge(
'database' => @user.database_name, :logger => ::Rails.logger,
'username' => @user2.database_username, 'password' => @user2.database_password
)
)
begin
connection.test_connection
true.should_not be_true
rescue
true.should be_true
ensure
connection.disconnect
end
end
it "should run valid queries against his database" do
# initial select tests
query_result = @user.db_service.run_pg_query("select * from import_csv_1 where family='Polynoidae' limit 10")
query_result[:time].should_not be_blank
query_result[:time].to_s.match(/^\d+\.\d+$/).should be_true
query_result[:total_rows].should == 2
query_result[:rows].first.keys.sort.should == [:cartodb_id, :the_geom, :the_geom_webmercator, :id, :name_of_species, :kingdom, :family, :lat, :lon, :views].sort
query_result[:rows][0][:name_of_species].should == "Barrukia cristata"
query_result[:rows][1][:name_of_species].should == "Eulagisca gigantea"
# update and reselect
query_result = @user.db_service.run_pg_query("update import_csv_1 set family='polynoidae' where family='Polynoidae'")
query_result = @user.db_service.run_pg_query("select * from import_csv_1 where family='Polynoidae' limit 10")
query_result[:total_rows].should == 0
# check counts
query_result = @user.db_service.run_pg_query("select * from import_csv_1 where family='polynoidae' limit 10")
query_result[:total_rows].should == 2
# test a product
query_result = @user.db_service.run_pg_query("select import_csv_1.family as fam, twitters.login as login from import_csv_1, twitters where family='polynoidae' limit 10")
query_result[:total_rows].should == 10
query_result[:rows].first.keys.should == [:fam, :login]
query_result[:rows][0].should == { :fam=>"polynoidae", :login=>"vzlaturistica " }
# test counts
query_result = @user.db_service.run_pg_query("select count(*) from import_csv_1 where family='polynoidae' ")
query_result[:time].should_not be_blank
query_result[:time].to_s.match(/^\d+\.\d+$/).should be_true
query_result[:total_rows].should == 1
query_result[:rows].first.keys.should == [:count]
query_result[:rows][0].should == {:count => 2}
end
it "should raise errors when running invalid queries against his database" do
lambda {
@user.db_service.run_pg_query("selectttt * from import_csv_1 where family='Polynoidae' limit 10")
}.should raise_error(CartoDB::ErrorRunningQuery)
end
it "should run valid queries against his database in pg mode" do
reload_user_data(@user) && @user.reload
# initial select tests
# tests results and modified flags
query_result = @user.db_service.run_pg_query("select * from import_csv_1 where family='Polynoidae' limit 10")
query_result[:time].should_not be_blank
query_result[:time].to_s.match(/^\d+\.\d+$/).should be_true
query_result[:total_rows].should == 2
query_result[:rows].first.keys.sort.should == [:cartodb_id, :the_geom, :the_geom_webmercator, :id, :name_of_species, :kingdom, :family, :lat, :lon, :views].sort
query_result[:rows][0][:name_of_species].should == "Barrukia cristata"
query_result[:rows][1][:name_of_species].should == "Eulagisca gigantea"
query_result[:results].should == true
query_result[:modified].should == false
# update and reselect
query_result = @user.db_service.run_pg_query("update import_csv_1 set family='polynoidae' where family='Polynoidae'")
query_result[:modified].should == true
query_result[:results].should == false
query_result = @user.db_service.run_pg_query("select * from import_csv_1 where family='Polynoidae' limit 10")
query_result[:total_rows].should == 0
query_result[:modified].should == false
query_result[:results].should == true
# # check counts
query_result = @user.db_service.run_pg_query("select * from import_csv_1 where family='polynoidae' limit 10")
query_result[:total_rows].should == 2
query_result[:results].should == true
# test a product
query_result = @user.db_service.run_pg_query("select import_csv_1.family as fam, twitters.login as login from import_csv_1, twitters where family='polynoidae' limit 10")
query_result[:total_rows].should == 10
query_result[:rows].first.keys.should == [:fam, :login]
query_result[:rows][0].should == { :fam=>"polynoidae", :login=>"vzlaturistica " }
# test counts
query_result = @user.db_service.run_pg_query("select count(*) from import_csv_1 where family='polynoidae' ")
query_result[:time].should_not be_blank
query_result[:time].to_s.match(/^\d+\.\d+$/).should be_true
query_result[:total_rows].should == 1
query_result[:rows].first.keys.should == [:count]
query_result[:rows][0].should == {:count => 2}
end
it "should raise errors when running invalid queries against his database in pg mode" do
lambda {
@user.db_service.run_pg_query("selectttt * from import_csv_1 where family='Polynoidae' limit 10")
}.should raise_error(CartoDB::ErrorRunningQuery)
end
it "should raise errors when invalid table name used in pg mode" do
lambda {
@user.db_service.run_pg_query("select * from this_table_is_not_here where family='Polynoidae' limit 10")
}.should raise_error(CartoDB::TableNotExists)
end
it "should raise errors when invalid column used in pg mode" do
lambda {
@user.db_service.run_pg_query("select not_a_col from import_csv_1 where family='Polynoidae' limit 10")
}.should raise_error(CartoDB::ColumnNotExists)
end
it "should create a client_application for each user" do
@user.client_application.should_not be_nil
end
it "should reset its client application" do
old_key = @user.client_application.key
@user.reset_client_application!
@user.reload
@user.client_application.key.should_not == old_key
end
it "should return the result from the last select query if multiple selects" do
reload_user_data(@user) && @user.reload
query_result = @user.db_service.run_pg_query("select * from import_csv_1 where family='Polynoidae' limit 1; select * from import_csv_1 where family='Polynoidae' limit 10")
query_result[:time].should_not be_blank
query_result[:time].to_s.match(/^\d+\.\d+$/).should be_true
query_result[:total_rows].should == 2
query_result[:rows][0][:name_of_species].should == "Barrukia cristata"
query_result[:rows][1][:name_of_species].should == "Eulagisca gigantea"
end
it "should allow multiple queries in the format: insert_query; select_query" do
query_result = @user.db_service.run_pg_query("insert into import_csv_1 (name_of_species,family) values ('cristata barrukia','Polynoidae'); select * from import_csv_1 where family='Polynoidae' ORDER BY name_of_species ASC limit 10")
query_result[:total_rows].should == 3
query_result[:rows].map { |i| i[:name_of_species] }.should =~ ["Barrukia cristata", "Eulagisca gigantea", "cristata barrukia"]
end
it "should fail with error if table doesn't exist" do
reload_user_data(@user) && @user.reload
lambda {
@user.db_service.run_pg_query("select * from wadus")
}.should raise_error(CartoDB::TableNotExists)
end
it "should have a method that generates users redis users_metadata key" do
@user.key.should == "rails:users:#{@user.username}"
end
it "replicates some user metadata in redis after saving" do
@user.stubs(:database_name).returns('wadus')
@user.save
$users_metadata.HGET(@user.key, 'id').should == @user.id.to_s
$users_metadata.HGET(@user.key, 'database_name').should == 'wadus'
$users_metadata.HGET(@user.key, 'database_password').should == @user.database_password
$users_metadata.HGET(@user.key, 'database_host').should == @user.database_host
$users_metadata.HGET(@user.key, 'map_key').should == @user.api_key
end
it "should store its metadata automatically after creation" do
user = FactoryGirl.create :user
$users_metadata.HGET(user.key, 'id').should == user.id.to_s
$users_metadata.HGET(user.key, 'database_name').should == user.database_name
$users_metadata.HGET(user.key, 'database_password').should == user.database_password
$users_metadata.HGET(user.key, 'database_host').should == user.database_host
$users_metadata.HGET(user.key, 'map_key').should == user.api_key
user.destroy
end
it "should have a method that generates users redis limits metadata key" do
@user.timeout_key.should == "limits:timeout:#{@user.username}"
end
it "replicates db timeout limits in redis after saving and applies them to db" do
@user.user_timeout = 200007
@user.database_timeout = 100007
@user.save
$users_metadata.HGET(@user.timeout_key, 'db').should == '200007'
$users_metadata.HGET(@user.timeout_key, 'db_public').should == '100007'
@user.in_database do |db|
db[%{SHOW statement_timeout}].first.should eq({ statement_timeout: '200007ms' })
end
@user.in_database(as: :public_user) do |db|
db[%{SHOW statement_timeout}].first.should eq({ statement_timeout: '100007ms' })
end
end
it "replicates render timeout limits in redis after saving" do
@user.user_render_timeout = 200001
@user.database_render_timeout = 100001
@user.save
$users_metadata.HGET(@user.timeout_key, 'render').should == '200001'
$users_metadata.HGET(@user.timeout_key, 'render_public').should == '100001'
end
it "should store db timeout limits in redis after creation" do
user = FactoryGirl.create :user, user_timeout: 200002, database_timeout: 100002
user.user_timeout.should == 200002
user.database_timeout.should == 100002
$users_metadata.HGET(user.timeout_key, 'db').should == '200002'
$users_metadata.HGET(user.timeout_key, 'db_public').should == '100002'
user.in_database do |db|
db[%{SHOW statement_timeout}].first.should eq({ statement_timeout: '200002ms' })
end
user.in_database(as: :public_user) do |db|
db[%{SHOW statement_timeout}].first.should eq({ statement_timeout: '100002ms' })
end
user.destroy
end
it "should store render timeout limits in redis after creation" do
user = FactoryGirl.create :user, user_render_timeout: 200003, database_render_timeout: 100003
user.reload
user.user_render_timeout.should == 200003
user.database_render_timeout.should == 100003
$users_metadata.HGET(user.timeout_key, 'render').should == '200003'
$users_metadata.HGET(user.timeout_key, 'render_public').should == '100003'
user.destroy
end
it "should have valid non-zero db timeout limits by default" do
user = FactoryGirl.create :user
user.user_timeout.should > 0
user.database_timeout.should > 0
$users_metadata.HGET(user.timeout_key, 'db').should == user.user_timeout.to_s
$users_metadata.HGET(user.timeout_key, 'db_public').should == user.database_timeout.to_s
user.in_database do |db|
result = db[%{SELECT setting FROM pg_settings WHERE name = 'statement_timeout'}]
result.first.should eq(setting: user.user_timeout.to_s)
end
user.in_database(as: :public_user) do |db|
result = db[%{SELECT setting FROM pg_settings WHERE name = 'statement_timeout'}]
result.first.should eq(setting: user.database_timeout.to_s)
end
user.destroy
end
it "should have zero render timeout limits by default" do
user = FactoryGirl.create :user
user.user_render_timeout.should eq 0
user.database_render_timeout.should eq 0
$users_metadata.HGET(user.timeout_key, 'render').should eq '0'
$users_metadata.HGET(user.timeout_key, 'render_public').should eq '0'
user.destroy
end
it "should not regenerate the api_key after saving" do
expect { @user.save }.to_not change { @user.api_key }
end
it "should remove its metadata from redis after deletion" do
doomed_user = create_user :email => 'doomed@example.com', :username => 'doomed', :password => 'doomed123'
$users_metadata.HGET(doomed_user.key, 'id').should == doomed_user.id.to_s
$users_metadata.HGET(doomed_user.timeout_key, 'db').should_not be_nil
$users_metadata.HGET(doomed_user.timeout_key, 'db_public').should_not be_nil
key = doomed_user.key
timeout_key = doomed_user.timeout_key
doomed_user.destroy
$users_metadata.HGET(key, 'id').should be_nil
$users_metadata.HGET(timeout_key, 'db').should be_nil
$users_metadata.HGET(timeout_key, 'db_public').should be_nil
$users_metadata.HGET(timeout_key, 'render').should be_nil
$users_metadata.HGET(timeout_key, 'render_public').should be_nil
end
it "should remove its database and database user after deletion" do
doomed_user = create_user :email => 'doomed1@example.com', :username => 'doomed1', :password => 'doomed123'
create_table :user_id => doomed_user.id, :name => 'My first table', :privacy => UserTable::PRIVACY_PUBLIC
doomed_user.reload
SequelRails.connection["select count(*) from pg_catalog.pg_database where datname = '#{doomed_user.database_name}'"]
.first[:count].should == 1
SequelRails.connection["select count(*) from pg_catalog.pg_user where usename = '#{doomed_user.database_username}'"]
.first[:count].should == 1
doomed_user.destroy
SequelRails.connection["select count(*) from pg_catalog.pg_database where datname = '#{doomed_user.database_name}'"]
.first[:count].should == 0
SequelRails.connection["select count(*) from pg_catalog.pg_user where usename = '#{doomed_user.database_username}'"]
.first[:count].should == 0
end
it "should invalidate its Varnish cache after deletion" do
doomed_user = create_user :email => 'doomed2@example.com', :username => 'doomed2', :password => 'doomed123'
CartoDB::Varnish.any_instance.expects(:purge).with("#{doomed_user.database_name}.*").at_least(2).returns(true)
doomed_user.destroy
end
it "should remove its user tables, layers and data imports after deletion" do
doomed_user = create_user(email: 'doomed2@example.com', username: 'doomed2', password: 'doomed123')
data_import = DataImport.create(user_id: doomed_user.id, data_source: fake_data_path('clubbing.csv')).run_import!
doomed_user.add_layer Layer.create(kind: 'carto')
table_id = data_import.table_id
uuid = UserTable.where(id: table_id).first.table_visualization.id
CartoDB::Varnish.any_instance.expects(:purge)
.with("#{doomed_user.database_name}.*")
.at_least(1)
.returns(true)
CartoDB::Varnish.any_instance.expects(:purge)
.with(".*#{uuid}:vizjson")
.at_least_once
.returns(true)
doomed_user.destroy
DataImport.where(user_id: doomed_user.id).count.should == 0
UserTable.where(user_id: doomed_user.id).count.should == 0
Layer.db["SELECT * from layers_users WHERE user_id = '#{doomed_user.id}'"].count.should == 0
end
it "should correctly identify last billing cycle" do
user = create_user :email => 'example@example.com', :username => 'example', :password => 'testingbilling'
Delorean.time_travel_to(Date.parse("2013-01-01")) do
user.stubs(:period_end_date).returns(Date.parse("2012-12-15"))
user.last_billing_cycle.should == Date.parse("2012-12-15")
end
Delorean.time_travel_to(Date.parse("2013-01-01")) do
user.stubs(:period_end_date).returns(Date.parse("2012-12-02"))
user.last_billing_cycle.should == Date.parse("2012-12-02")
end
Delorean.time_travel_to(Date.parse("2013-03-01")) do
user.stubs(:period_end_date).returns(Date.parse("2012-12-31"))
user.last_billing_cycle.should == Date.parse("2013-02-28")
end
Delorean.time_travel_to(Date.parse("2013-03-15")) do
user.stubs(:period_end_date).returns(Date.parse("2012-12-02"))
user.last_billing_cycle.should == Date.parse("2013-03-02")
end
user.destroy
Delorean.back_to_the_present
end
it "should calculate the trial end date" do
@user.stubs(:upgraded_at).returns(nil)
@user.trial_ends_at.should be_nil
@user.stubs(:upgraded_at).returns(Time.now - 5.days)
@user.stubs(:account_type).returns('CORONELLI')
@user.trial_ends_at.should be_nil
@user.stubs(:account_type).returns('MAGELLAN')
@user.trial_ends_at.should_not be_nil
@user.stubs(:upgraded_at).returns(nil)
@user.trial_ends_at.should be_nil
@user.stubs(:upgraded_at).returns(Time.now - (::User::TRIAL_DURATION_DAYS - 1).days)
@user.trial_ends_at.should_not be_nil
end
describe '#hard_geocoding_limit?' do
it 'returns true when the plan is AMBASSADOR or FREE unless it has been manually set to false' do
@user[:soft_geocoding_limit].should be_nil
@user.stubs(:account_type).returns('AMBASSADOR')
@user.soft_geocoding_limit?.should be_false
@user.soft_geocoding_limit.should be_false
@user.hard_geocoding_limit?.should be_true
@user.hard_geocoding_limit.should be_true
@user.stubs(:account_type).returns('FREE')
@user.soft_geocoding_limit?.should be_false
@user.soft_geocoding_limit.should be_false
@user.hard_geocoding_limit?.should be_true
@user.hard_geocoding_limit.should be_true
@user.hard_geocoding_limit = false
@user[:soft_geocoding_limit].should_not be_nil
@user.stubs(:account_type).returns('AMBASSADOR')
@user.soft_geocoding_limit?.should be_true
@user.soft_geocoding_limit.should be_true
@user.hard_geocoding_limit?.should be_false
@user.hard_geocoding_limit.should be_false
@user.stubs(:account_type).returns('FREE')
@user.soft_geocoding_limit?.should be_true
@user.soft_geocoding_limit.should be_true
@user.hard_geocoding_limit?.should be_false
@user.hard_geocoding_limit.should be_false
end
it 'returns true when for enterprise accounts unless it has been manually set to false' do
['ENTERPRISE', 'ENTERPRISE LUMP-SUM', 'Enterprise Medium Lumpsum AWS'].each do |account_type|
@user.stubs(:account_type).returns(account_type)
@user.soft_geocoding_limit = nil
@user.soft_geocoding_limit?.should be_false
@user.soft_geocoding_limit.should be_false
@user.hard_geocoding_limit?.should be_true
@user.hard_geocoding_limit.should be_true
@user.soft_geocoding_limit = true
@user.soft_geocoding_limit?.should be_true
@user.soft_geocoding_limit.should be_true
@user.hard_geocoding_limit?.should be_false
@user.hard_geocoding_limit.should be_false
end
end
it 'returns false when the plan is CORONELLI or MERCATOR unless it has been manually set to true' do
@user.stubs(:account_type).returns('CORONELLI')
@user.hard_geocoding_limit?.should be_false
@user.stubs(:account_type).returns('MERCATOR')
@user.hard_geocoding_limit?.should be_false
@user.hard_geocoding_limit = true
@user.stubs(:account_type).returns('CORONELLI')
@user.hard_geocoding_limit?.should be_true
@user.stubs(:account_type).returns('MERCATOR')
@user.hard_geocoding_limit?.should be_true
end
end
describe '#hard_here_isolines_limit?' do
before(:each) do
@user_account = create_user
end
it 'returns true with every plan unless it has been manually set to false' do
@user_account[:soft_here_isolines_limit].should be_nil
@user_account.stubs(:account_type).returns('AMBASSADOR')
@user_account.soft_here_isolines_limit?.should be_false
@user_account.soft_here_isolines_limit.should be_false
@user_account.hard_here_isolines_limit?.should be_true
@user_account.hard_here_isolines_limit.should be_true
@user_account.stubs(:account_type).returns('FREE')
@user_account.soft_here_isolines_limit?.should be_false
@user_account.soft_here_isolines_limit.should be_false
@user_account.hard_here_isolines_limit?.should be_true
@user_account.hard_here_isolines_limit.should be_true
@user_account.hard_here_isolines_limit = false
@user_account[:soft_here_isolines_limit].should_not be_nil
@user_account.stubs(:account_type).returns('AMBASSADOR')
@user_account.soft_here_isolines_limit?.should be_true
@user_account.soft_here_isolines_limit.should be_true
@user_account.hard_here_isolines_limit?.should be_false
@user_account.hard_here_isolines_limit.should be_false
@user_account.stubs(:account_type).returns('FREE')
@user_account.soft_here_isolines_limit?.should be_true
@user_account.soft_here_isolines_limit.should be_true
@user_account.hard_here_isolines_limit?.should be_false
@user_account.hard_here_isolines_limit.should be_false
end
end
describe '#hard_obs_snapshot_limit?' do
before(:each) do
@user_account = create_user
end
it 'returns true with every plan unless it has been manually set to false' do
@user_account[:soft_obs_snapshot_limit].should be_nil
@user_account.stubs(:account_type).returns('AMBASSADOR')
@user_account.soft_obs_snapshot_limit?.should be_false
@user_account.soft_obs_snapshot_limit.should be_false
@user_account.hard_obs_snapshot_limit?.should be_true
@user_account.hard_obs_snapshot_limit.should be_true
@user_account.stubs(:account_type).returns('FREE')
@user_account.soft_obs_snapshot_limit?.should be_false
@user_account.soft_obs_snapshot_limit.should be_false
@user_account.hard_obs_snapshot_limit?.should be_true
@user_account.hard_obs_snapshot_limit.should be_true
@user_account.hard_obs_snapshot_limit = false
@user_account[:soft_obs_snapshot_limit].should_not be_nil
@user_account.stubs(:account_type).returns('AMBASSADOR')
@user_account.soft_obs_snapshot_limit?.should be_true
@user_account.soft_obs_snapshot_limit.should be_true
@user_account.hard_obs_snapshot_limit?.should be_false
@user_account.hard_obs_snapshot_limit.should be_false
@user_account.stubs(:account_type).returns('FREE')
@user_account.soft_obs_snapshot_limit?.should be_true
@user_account.soft_obs_snapshot_limit.should be_true
@user_account.hard_obs_snapshot_limit?.should be_false
@user_account.hard_obs_snapshot_limit.should be_false
end
end
describe '#hard_obs_general_limit?' do
before(:each) do
@user_account = create_user
end
it 'returns true with every plan unless it has been manually set to false' do
@user_account[:soft_obs_general_limit].should be_nil
@user_account.stubs(:account_type).returns('AMBASSADOR')
@user_account.soft_obs_general_limit?.should be_false
@user_account.soft_obs_general_limit.should be_false
@user_account.hard_obs_general_limit?.should be_true
@user_account.hard_obs_general_limit.should be_true
@user_account.stubs(:account_type).returns('FREE')
@user_account.soft_obs_general_limit?.should be_false
@user_account.soft_obs_general_limit.should be_false
@user_account.hard_obs_general_limit?.should be_true
@user_account.hard_obs_general_limit.should be_true
@user_account.hard_obs_general_limit = false
@user_account[:soft_obs_general_limit].should_not be_nil
@user_account.stubs(:account_type).returns('AMBASSADOR')
@user_account.soft_obs_general_limit?.should be_true
@user_account.soft_obs_general_limit.should be_true
@user_account.hard_obs_general_limit?.should be_false
@user_account.hard_obs_general_limit.should be_false
@user_account.stubs(:account_type).returns('FREE')
@user_account.soft_obs_general_limit?.should be_true
@user_account.soft_obs_general_limit.should be_true
@user_account.hard_obs_general_limit?.should be_false
@user_account.hard_obs_general_limit.should be_false
end
end
describe '#shared_tables' do
it 'Checks that shared tables include not only owned ones' do
require_relative '../../app/models/visualization/collection'
CartoDB::Varnish.any_instance.stubs(:send_command).returns(true)
bypass_named_maps
# No need to really touch the DB for the permissions
Table::any_instance.stubs(:add_read_permission).returns(nil)
# We're leaking tables from some tests, make sure there are no tables
@user.tables.all.each { |t| t.destroy }
@user2.tables.all.each { |t| t.destroy }
table = Table.new
table.user_id = @user.id
table.save.reload
table2 = Table.new
table2.user_id = @user.id
table2.save.reload
table3 = Table.new
table3.user_id = @user2.id
table3.name = 'sharedtable'
table3.save.reload
table4 = Table.new
table4.user_id = @user2.id
table4.name = 'table4'
table4.save.reload
# Only owned tables
user_tables = tables_including_shared(@user)
user_tables.count.should eq 2
# Grant permission
user2_vis = CartoDB::Visualization::Collection.new.fetch(user_id: @user2.id, name: table3.name).first
permission = user2_vis.permission
permission.acl = [
{
type: CartoDB::Permission::TYPE_USER,
entity: {
id: @user.id,
username: @user.username
},
access: CartoDB::Permission::ACCESS_READONLY
}
]
permission.save
# Now owned + shared...
user_tables = tables_including_shared(@user)
user_tables.count.should eq 3
contains_shared_table = false
user_tables.each{ |item|
contains_shared_table ||= item.id == table3.id
}
contains_shared_table.should eq true
contains_shared_table = false
user_tables.each{ |item|
contains_shared_table ||= item.id == table4.id
}
contains_shared_table.should eq false
@user.tables.all.each { |t| t.destroy }
@user2.tables.all.each { |t| t.destroy }
end
end
describe '#destroy' do
it 'deletes database role' do
u1 = create_user(email: 'ddr@example.com', username: 'ddr', password: 'admin123')
role = u1.database_username
db = u1.in_database
db_service = u1.db_service
db_service.role_exists?(db, role).should == true
u1.destroy
expect do
db_service.role_exists?(db, role).should == false
end.to raise_error(/role "#{role}" does not exist/)
db.disconnect
end
it 'deletes api keys' do
user = create_user(email: 'ddr@example.com', username: 'ddr', password: 'admin123')
api_key = FactoryGirl.create(:api_key_apis, user_id: user.id)
user.destroy
expect(Carto::ApiKey.exists?(api_key.id)).to be_false
expect($users_metadata.exists(api_key.send(:redis_key))).to be_false
end
describe "on organizations" do
include_context 'organization with users helper'
it 'deletes database role' do
role = @org_user_1.database_username
db = @org_user_1.in_database
db_service = @org_user_1.db_service
db_service.role_exists?(db, role).should == true
@org_user_1.destroy
expect do
db_service.role_exists?(db, role).should == false
end.to raise_error(/role "#{role}" does not exist/)
db.disconnect
end
it 'deletes temporary analysis tables' do
db = @org_user_2.in_database
db.run('CREATE TABLE analysis_cd60938c7b_2ad1345b134ed3cd363c6de651283be9bd65094e (a int)')
db.run(%{INSERT INTO cdb_analysis_catalog (username, cache_tables, node_id, analysis_def)
VALUES ('#{@org_user_2.username}', '{analysis_cd60938c7b_2ad1345b134ed3cd363c6de651283be9bd65094e}', 'a0', '{}')})
@org_user_2.destroy
db = @org_user_owner.in_database
db["SELECT COUNT(*) FROM cdb_analysis_catalog WHERE username='#{@org_user_2.username}'"].first[:count].should eq 0
end
describe 'User#destroy' do
include TableSharing
it 'blocks deletion with shared entities' do
@not_to_be_deleted = TestUserFactory.new.create_test_user(unique_name('user'), @organization)
table = create_random_table(@not_to_be_deleted)
share_table_with_user(table, @org_user_owner)
expect { @not_to_be_deleted.destroy }.to raise_error(/Cannot delete user, has shared entities/)
::User[@not_to_be_deleted.id].should be
end
it 'deletes api keys and associated roles' do
user = TestUserFactory.new.create_test_user(unique_name('user'), @organization)
api_key = FactoryGirl.create(:api_key_apis, user_id: user.id)
user.destroy
expect(Carto::ApiKey.exists?(api_key.id)).to be_false
expect($users_metadata.exists(api_key.send(:redis_key))).to be_false
expect(
@org_user_owner.in_database["SELECT 1 FROM pg_roles WHERE rolname = '#{api_key.db_role}'"].first
).to be_nil
end
it 'deletes client_application and friends' do
user = create_user(email: 'clientapp@example.com', username: 'clientapp', password: @user_password)
user.create_client_application
user.client_application.access_tokens << ::AccessToken.new(
token: "access_token",
secret: "access_secret",
callback_url: "http://callback2",
verifier: "v2",
scope: nil,
client_application_id: user.client_application.id
).save
user.client_application.oauth_tokens << ::OauthToken.new(
token: "oauth_token",
secret: "oauth_secret",
callback_url: "http//callback.com",
verifier: "v1",
scope: nil,
client_application_id: user.client_application.id
).save
base_key = "rails:oauth_access_tokens:#{user.client_application.access_tokens.first.token}"
client_application = ClientApplication.where(user_id: user.id).first
expect(ClientApplication.where(user_id: user.id).count).to eq 2
expect(client_application.tokens).to_not be_empty
expect(client_application.tokens.length).to eq 2
$api_credentials.keys.should include(base_key)
user.destroy
expect(ClientApplication.where(user_id: user.id).first).to be_nil
expect(AccessToken.where(user_id: user.id).first).to be_nil
expect(OauthToken.where(user_id: user.id).first).to be_nil
$api_credentials.keys.should_not include(base_key)
end
end
end
end
describe 'User#destroy_cascade' do
include_context 'organization with users helper'
include TableSharing
it 'allows deletion even with shared entities' do
table = create_random_table(@org_user_1)
share_table_with_user(table, @org_user_1)
@org_user_1.destroy_cascade
::User[@org_user_1.id].should_not be
end
end
describe '#destroy_restrictions' do
it 'Checks some scenarios upon user destruction regarding organizations' do
u1 = create_user(email: 'u1@example.com', username: 'u1', password: 'admin123')
u2 = create_user(email: 'u2@example.com', username: 'u2', password: 'admin123')
org = create_org('cartodb', 1234567890, 5)
u1.organization = org
u1.save
u1.reload
u1.organization.nil?.should eq false
org = u1.organization
org.owner_id = u1.id
org.save
u1.reload
u1.organization.owner.id.should eq u1.id
u2.organization = org
u2.save
u2.reload
u2.organization.nil?.should eq false
u2.reload
# Cannot remove as more users depend on the org
expect {
u1.destroy
}.to raise_exception CartoDB::BaseCartoDBError
org.destroy
end
end
describe '#cartodb_postgresql_extension_versioning' do
it 'should report pre multi user for known <0.3.0 versions' do
before_mu_known_versions = %w(0.1.0 0.1.1 0.2.0 0.2.1)
before_mu_known_versions.each { |version|
stub_and_check_version_pre_mu(version, true)
}
end
it 'should report post multi user for >=0.3.0 versions' do
after_mu_known_versions = %w(0.3.0 0.3.1 0.3.2 0.3.3 0.3.4 0.3.5 0.4.0 0.5.5 0.10.0)
after_mu_known_versions.each { |version|
stub_and_check_version_pre_mu(version, false)
}
end
it 'should report post multi user for versions with minor<3 but major>0' do
minor_version_edge_cases = %w(1.0.0 1.0.1 1.2.0 1.2.1 1.3.0 1.4.4)
minor_version_edge_cases.each { |version|
stub_and_check_version_pre_mu(version, false)
}
end
it 'should report correct version with old version strings' do
before_mu_old_known_versions = [
'0.1.0 0.1.0',
'0.1.1 0.1.1',
'0.2.0 0.2.0',
'0.2.1 0.2.1'
]
before_mu_old_known_versions.each { |version|
stub_and_check_version_pre_mu(version, true)
}
end
it 'should report correct version with old version strings' do
after_mu_old_known_versions = [
'0.3.0 0.3.0',
'0.3.1 0.3.1',
'0.3.2 0.3.2',
'0.3.3 0.3.3',
'0.3.4 0.3.4',
'0.3.5 0.3.5',
'0.4.0 0.4.0',
'0.5.5 0.5.5',
'0.10.0 0.10.0'
]
after_mu_old_known_versions.each { |version|
stub_and_check_version_pre_mu(version, false)
}
end
it 'should report correct version with `git describe` not being a tag' do
stub_and_check_version_pre_mu('0.2.1 0.2.0-8-g7840e7c', true)
after_mu_old_known_versions = [
'0.3.6 0.3.5-8-g7840e7c',
'0.4.0 0.3.6-8-g7840e7c'
]
after_mu_old_known_versions.each { |version|
stub_and_check_version_pre_mu(version, false)
}
end
def stub_and_check_version_pre_mu(version, is_pre_mu)
@user.db_service.stubs(:cartodb_extension_version).returns(version)
@user.db_service.cartodb_extension_version_pre_mu?.should eq is_pre_mu
end
end
# INFO: since user can be also created in Central, and it can fail, we need to request notification explicitly. See #3022 for more info
it "can notify a new user creation" do
::Resque.stubs(:enqueue).returns(nil)
@account_type_org = create_account_type_fg('ORGANIZATION USER')
organization = create_organization_with_owner(quota_in_bytes: 1000.megabytes)
user1 = new_user(username: 'test',
email: "client@example.com",
organization: organization,
organization_id: organization.id,
quota_in_bytes: 20.megabytes,
account_type: 'ORGANIZATION USER')
user1.id = UUIDTools::UUID.timestamp_create.to_s
::Resque.expects(:enqueue).with(::Resque::UserJobs::Mail::NewOrganizationUser, user1.id).once
user1.save
# INFO: if user must be synched with a remote server it should happen before notifying
user1.notify_new_organization_user
organization.destroy
end
it "Tests password change" do
new_valid_password = '000123456'
old_crypted_password = @user.crypted_password
@user.change_password('aaabbb', new_valid_password, new_valid_password)
@user.valid?.should eq false
@user.errors.fetch(:old_password).nil?.should eq false
expect {
@user.save(raise_on_failure: true)
}.to raise_exception(Sequel::ValidationFailed, "old_password Old password not valid") # "to_s" of validation msg
@user.change_password(@user_password, 'aaabbb', 'bbbaaa')
@user.valid?.should eq false
@user.errors.fetch(:new_password).nil?.should eq false
expect {
@user.save(raise_on_failure: true)
}.to raise_exception(Sequel::ValidationFailed, "new_password New password doesn't match confirmation")
@user.change_password('aaaaaa', 'aaabbb', 'bbbaaa')
@user.valid?.should eq false
@user.errors.fetch(:old_password).nil?.should eq false
@user.errors.fetch(:new_password).nil?.should eq false
expect {
@user.save(raise_on_failure: true)
}.to raise_exception(Sequel::ValidationFailed, "old_password Old password not valid, new_password New password doesn't match confirmation")
@user.change_password(@user_password, 'tiny', 'tiny')
@user.valid?.should eq false
@user.errors.fetch(:new_password).nil?.should eq false
expect {
@user.save(raise_on_failure: true)
}.to raise_exception(Sequel::ValidationFailed, "new_password Must be at least 6 characters long")
long_password = 'long' * 20
@user.change_password(@user_password, long_password, long_password)
@user.valid?.should eq false
@user.errors.fetch(:new_password).nil?.should eq false
expect {
@user.save(raise_on_failure: true)
}.to raise_exception(Sequel::ValidationFailed, "new_password Must be at most 64 characters long")
@user.change_password('aaaaaa', nil, nil)
@user.valid?.should eq false
@user.errors.fetch(:old_password).nil?.should eq false
expect {
@user.save(raise_on_failure: true)
}.to raise_exception(Sequel::ValidationFailed, "old_password Old password not valid, new_password New password can't be blank")
@user.change_password(@user_password, nil, nil)
@user.valid?.should eq false
@user.errors.fetch(:new_password).nil?.should eq false
expect {
@user.save(raise_on_failure: true)
}.to raise_exception(Sequel::ValidationFailed, "new_password New password can't be blank")
@user.change_password(nil, nil, nil)
@user.valid?.should eq false
@user.errors.fetch(:old_password).nil?.should eq false
expect {
@user.save(raise_on_failure: true)
}.to raise_exception(Sequel::ValidationFailed, "old_password Old password not valid, new_password New password can't be blank")
@user.change_password(nil, new_valid_password, new_valid_password)
@user.valid?.should eq false
@user.errors.fetch(:old_password).nil?.should eq false
expect {
@user.save(raise_on_failure: true)
}.to raise_exception(Sequel::ValidationFailed, "old_password Old password not valid")
@user.change_password(@user_password, new_valid_password, new_valid_password)
@user.valid?.should eq true
@user.save
new_crypted_password = @user.crypted_password
(old_crypted_password != new_crypted_password).should eq true
@user.change_password(new_valid_password, @user_password, @user_password)
@user.valid?.should eq true
@user.save
@user.crypted_password.should eq old_crypted_password
last_password_change_date = @user.last_password_change_date
@user.change_password(@user_password, @user_password, @user_password)
@user.save
@user.last_password_change_date.should eq last_password_change_date
end
describe "when user is signed up with google sign-in and don't have any password yet" do
before(:each) do
@user.google_sign_in = true
@user.last_password_change_date = nil
@user.save
@user.needs_password_confirmation?.should == false
new_valid_password = '000123456'
@user.change_password("doesn't matter in this case", new_valid_password, new_valid_password)
@user.needs_password_confirmation?.should == true
end
it 'should allow updating password w/o a current password' do
@user.valid?.should eq true
@user.save
end
it 'should have updated last password change date' do
@user.last_password_change_date.should_not eq nil
@user.save
end
end
describe "#purge_redis_vizjson_cache" do
it "shall iterate on the user's visualizations and purge their redis cache" do
# Create a few tables with their default vizs
(1..3).each do |i|
t = Table.new
t.user_id = @user.id
t.save
end
collection = CartoDB::Visualization::Collection.new.fetch({user_id: @user.id})
redis_spy = RedisDoubles::RedisSpy.new
redis_vizjson_cache = CartoDB::Visualization::RedisVizjsonCache.new()
redis_embed_cache = EmbedRedisCache.new()
CartoDB::Visualization::RedisVizjsonCache.any_instance.stubs(:redis).returns(redis_spy)
EmbedRedisCache.any_instance.stubs(:redis).returns(redis_spy)
redis_vizjson_keys = collection.map { |v|
[
redis_vizjson_cache.key(v.id, false), redis_vizjson_cache.key(v.id, true),
redis_vizjson_cache.key(v.id, false, 3), redis_vizjson_cache.key(v.id, true, 3),
redis_vizjson_cache.key(v.id, false, '3n'), redis_vizjson_cache.key(v.id, true, '3n'),
redis_vizjson_cache.key(v.id, false, '3a'), redis_vizjson_cache.key(v.id, true, '3a'),
]
}.flatten
redis_vizjson_keys.should_not be_empty
redis_embed_keys = collection.map { |v|
[redis_embed_cache.key(v.id, false), redis_embed_cache.key(v.id, true)]
}.flatten
redis_embed_keys.should_not be_empty
@user.purge_redis_vizjson_cache
redis_spy.deleted.should include(*redis_vizjson_keys)
redis_spy.deleted.should include(*redis_embed_keys)
redis_spy.deleted.count.should eq redis_vizjson_keys.count + redis_embed_keys.count
redis_spy.invokes(:del).count.should eq 2
redis_spy.invokes(:del).map(&:sort).should include(redis_vizjson_keys.sort)
redis_spy.invokes(:del).map(&:sort).should include(redis_embed_keys.sort)
end
it "shall not fail if the user does not have visualizations" do
user = create_user
collection = CartoDB::Visualization::Collection.new.fetch({user_id: user.id})
# 'http' keys
redis_keys = collection.map(&:redis_vizjson_key)
redis_keys.should be_empty
# 'https' keys
redis_keys = collection.map { |item| item.redis_vizjson_key(true) }
redis_keys.should be_empty
CartoDB::Visualization::Member.expects(:redis_cache).never
user.purge_redis_vizjson_cache
user.destroy
end
end
describe "#regressions" do
it "Tests geocodings and data import FK not breaking user destruction" do
user = create_user
user_id = user.id
data_import_id = '11111111-1111-1111-1111-111111111111'
SequelRails.connection.run(%Q{
INSERT INTO data_imports("data_source","data_type","table_name","state","success","logger","updated_at",
"created_at","tables_created_count",
"table_names","append","id","table_id","user_id",
"service_name","service_item_id","stats","type_guessing","quoted_fields_guessing","content_guessing","server","host",
"resque_ppid","upload_host","create_visualization","user_defined_limits")
VALUES('test','url','test','complete','t','11111111-1111-1111-1111-111111111112',
'2015-03-17 00:00:00.94006+00','2015-03-17 00:00:00.810581+00','1',
'test','f','#{data_import_id}','11111111-1111-1111-1111-111111111113',
'#{user_id}','public_url', 'test',
'[{"type":".csv","size":5015}]','t','f','t','test','0.0.0.0','13204','test','f','{"twitter_credits_limit":0}');
})
SequelRails.connection.run(%Q{
INSERT INTO geocodings("table_name","processed_rows","created_at","updated_at","formatter","state",
"id","user_id",
"cache_hits","kind","geometry_type","processable_rows","real_rows","used_credits",
"data_import_id"
) VALUES('importer_123456','197','2015-03-17 00:00:00.279934+00','2015-03-17 00:00:00.536383+00','field_1','finished',
'11111111-1111-1111-1111-111111111114','#{user_id}','0','admin0','polygon','195','0','0',
'#{data_import_id}');
})
user.destroy
::User.find(id:user_id).should eq nil
end
end
describe '#needs_password_confirmation?' do
it 'is true for a normal user' do
user = FactoryGirl.build(:carto_user, :google_sign_in => nil)
user.needs_password_confirmation?.should == true
user = FactoryGirl.build(:user, :google_sign_in => false)
user.needs_password_confirmation?.should == true
end
it 'is false for users that signed in with Google' do
user = FactoryGirl.build(:user, :google_sign_in => true)
user.needs_password_confirmation?.should == false
end
it 'is true for users that signed in with Google but changed the password' do
user = FactoryGirl.build(:user, :google_sign_in => true, :last_password_change_date => Time.now)
user.needs_password_confirmation?.should == true
end
it 'is false for users that were created with http authentication' do
user = FactoryGirl.build(:valid_user, last_password_change_date: nil)
Carto::UserCreation.stubs(:http_authentication).returns(stub(find_by_user_id: FactoryGirl.build(:user_creation)))
user.needs_password_confirmation?.should == false
end
end
describe 'User creation and DB critical calls' do
it 'Properly setups a new user (not belonging to an organization)' do
CartoDB::UserModule::DBService.any_instance.stubs(
cartodb_extension_version_pre_mu?: nil,
monitor_user_notification: nil,
enable_remote_db_user: nil
)
user_timeout_secs = 666
user = ::User.new
user.username = unique_name('user')
user.email = unique_email
user.password = user.email.split('@').first
user.password_confirmation = user.password
user.admin = false
user.private_tables_enabled = true
user.private_maps_enabled = true
user.enabled = true
user.table_quota = 500
user.quota_in_bytes = 1234567890
user.user_timeout = user_timeout_secs * 1000
user.database_timeout = 123000
user.geocoding_quota = 1000
user.geocoding_block_price = 1500
user.sync_tables_enabled = false
user.organization = nil
user.twitter_datasource_enabled = false
user.avatar_url = user.default_avatar
user.valid?.should == true
user.save
user.nil?.should == false
# To avoid connection pool caching
CartoDB::UserModule::DBService.terminate_database_connections(user.database_name, user.database_host)
user.reload
# Just to be sure all following checks will not falsely report ok using wrong schema
user.database_schema.should eq CartoDB::UserModule::DBService::SCHEMA_PUBLIC
user.database_schema.should_not eq user.username
test_table_name = "table_perm_test"
# Safety check
user.in_database.fetch(%{
SELECT * FROM pg_extension WHERE extname='postgis';
}).first.nil?.should == false
# Replicate functionality inside ::UserModule::DBService.configure_database
# -------------------------------------------------------------------
user.in_database.fetch(%{
SHOW search_path;
}).first[:search_path].should == user.db_service.build_search_path(user.database_schema, false)
# @see http://www.postgresql.org/docs/current/static/functions-info.html#FUNCTIONS-INFO-ACCESS-TABLE
user.in_database(as: :superuser).fetch(%{
SELECT * FROM has_database_privilege('#{user.database_username}', '#{user.database_name}', 'CONNECT');
}).first[:has_database_privilege].should == true
# Careful as PG formatter timeout output changes to XXmin if too big
user.in_database.fetch(%{
SHOW statement_timeout;
}).first[:statement_timeout].should eq "#{user_timeout_secs}s"
# No check for `set_user_as_organization_member` as cartodb-postgresql already tests it
# Checks for "grant_read_on_schema_queries(SCHEMA_CARTODB, db_user)"
user.in_database(as: :superuser).fetch(%{
SELECT * FROM has_schema_privilege('#{user.database_username}',
'#{CartoDB::UserModule::DBService::SCHEMA_CARTODB}', 'USAGE');
}).first[:has_schema_privilege].should == true
user.in_database(as: :superuser).fetch(%{
SELECT * FROM has_function_privilege('#{user.database_username}',
'#{user.database_schema}._cdb_userquotainbytes()', 'EXECUTE');
}).first[:has_function_privilege].should == true
# SCHEMA_CARTODB has no tables to select from, except CDB_CONF on which has no permission
user.in_database(as: :superuser).fetch(%{
SELECT * FROM has_table_privilege('#{user.database_username}',
'cartodb.CDB_CONF',
'SELECT, INSERT, UPDATE, DELETE, TRUNCATE, REFERENCES, TRIGGER');
}).first[:has_table_privilege].should == false
# Checks on SCHEMA_PUBLIC
user.in_database(as: :superuser).fetch(%{
SELECT * FROM has_schema_privilege('#{user.database_username}',
'#{CartoDB::UserModule::DBService::SCHEMA_PUBLIC}', 'USAGE');
}).first[:has_schema_privilege].should == true
user.in_database(as: :superuser).fetch(%{
SELECT * FROM has_table_privilege('#{user.database_username}',
'#{CartoDB::UserModule::DBService::SCHEMA_PUBLIC}.spatial_ref_sys', 'SELECT');
}).first[:has_table_privilege].should == true
user.in_database(as: :superuser).fetch(%{
SELECT * FROM has_function_privilege('#{user.database_username}',
'#{CartoDB::UserModule::DBService::SCHEMA_PUBLIC}._postgis_stats(regclass, text, text)',
'EXECUTE');
}).first[:has_function_privilege].should == true
# Checks on own schema
user.in_database(as: :superuser).fetch(%{
SELECT * FROM has_schema_privilege('#{user.database_username}',
'#{user.database_schema}', 'CREATE, USAGE');
}).first[:has_schema_privilege].should == true
user.in_database.run(%{
CREATE TABLE #{test_table_name}(x int);
})
user.in_database(as: :superuser).fetch(%{
SELECT * FROM has_table_privilege('#{user.database_username}',
'#{user.database_schema}.#{test_table_name}', 'SELECT');
}).first[:has_table_privilege].should == true
# _cdb_userquotainbytes is always created on the user schema
user.in_database(as: :superuser).fetch(%{
SELECT * FROM has_function_privilege('#{user.database_username}',
'#{user.database_schema}._cdb_userquotainbytes()', 'EXECUTE');
}).first[:has_function_privilege].should == true
# Checks on non-org "owned" schemas
user.in_database(as: :superuser).fetch(%{
SELECT * FROM has_schema_privilege('#{user.database_username}',
'#{CartoDB::UserModule::DBService::SCHEMA_IMPORTER}', 'CREATE, USAGE');
}).first[:has_schema_privilege].should == true
user.in_database(as: :superuser).fetch(%{
SELECT * FROM has_schema_privilege('#{user.database_username}',
'#{CartoDB::UserModule::DBService::SCHEMA_GEOCODING}', 'CREATE, USAGE');
}).first[:has_schema_privilege].should == true
# Special raster and geo columns
user.in_database(as: :superuser).fetch(%{
SELECT * FROM has_table_privilege('#{user.database_username}',
'#{CartoDB::UserModule::DBService::SCHEMA_PUBLIC}.geometry_columns', 'SELECT');
}).first[:has_table_privilege].should == true
user.in_database(as: :superuser).fetch(%{
SELECT * FROM has_table_privilege('#{user.database_username}',
'#{CartoDB::UserModule::DBService::SCHEMA_PUBLIC}.geography_columns', 'SELECT');
}).first[:has_table_privilege].should == true
user.in_database(as: :superuser).fetch(%{
SELECT * FROM has_table_privilege('#{CartoDB::PUBLIC_DB_USER}',
'#{CartoDB::UserModule::DBService::SCHEMA_PUBLIC}.raster_overviews', 'SELECT');
}).first[:has_table_privilege].should == true
user.in_database(as: :superuser).fetch(%{
SELECT * FROM has_table_privilege('#{CartoDB::PUBLIC_DB_USER}',
'#{CartoDB::UserModule::DBService::SCHEMA_PUBLIC}.raster_columns', 'SELECT');
}).first[:has_table_privilege].should == true
# quota check
user.in_database(as: :superuser).fetch(%{
SELECT #{user.database_schema}._CDB_UserQuotaInBytes();
}).first[:_cdb_userquotainbytes].nil?.should == false
# Varnish invalidation function
user.in_database(as: :superuser).fetch(%{
SELECT * FROM has_function_privilege(
'#{user.database_username}',
'#{CartoDB::UserModule::DBService::SCHEMA_PUBLIC}.cdb_invalidate_varnish(text)', 'EXECUTE');
}).first[:has_function_privilege].should == true
# Checks of publicuser
user.in_database(as: :superuser).fetch(%{
SELECT * FROM has_schema_privilege('#{CartoDB::PUBLIC_DB_USER}',
'#{user.database_schema}', 'USAGE');
}).first[:has_schema_privilege].should == true
user.in_database(as: :superuser).fetch(%{
SELECT * FROM has_database_privilege('#{CartoDB::PUBLIC_DB_USER}',
'#{user.database_name}', 'CONNECT');
}).first[:has_database_privilege].should == true
user.in_database(as: :superuser).fetch(%{
SELECT * FROM has_schema_privilege('#{CartoDB::PUBLIC_DB_USER}',
'#{CartoDB::UserModule::DBService::SCHEMA_CARTODB}', 'USAGE');
}).first[:has_schema_privilege].should == true
user.in_database(as: :superuser).fetch(%{
SELECT * FROM has_function_privilege(
'#{CartoDB::PUBLIC_DB_USER}',
'#{CartoDB::UserModule::DBService::SCHEMA_CARTODB}.CDB_LatLng (NUMERIC, NUMERIC)',
'EXECUTE');
}).first[:has_function_privilege].should == true
user.in_database(as: :superuser).fetch(%{
SELECT * FROM has_table_privilege('#{CartoDB::PUBLIC_DB_USER}',
'#{CartoDB::UserModule::DBService::SCHEMA_CARTODB}.CDB_CONF',
'SELECT, INSERT, UPDATE, DELETE, TRUNCATE, REFERENCES, TRIGGER');
}).first[:has_table_privilege].should == false
# Additional public user grants/revokes
user.in_database(as: :superuser).fetch(%{
SELECT * FROM has_table_privilege('#{CartoDB::PUBLIC_DB_USER}',
'#{CartoDB::UserModule::DBService::SCHEMA_CARTODB}.cdb_tablemetadata',
'SELECT');
}).first[:has_table_privilege].should == false
user.in_database(as: :superuser).fetch(%{
SELECT * FROM has_schema_privilege('#{CartoDB::PUBLIC_DB_USER}',
'#{CartoDB::UserModule::DBService::SCHEMA_PUBLIC}', 'USAGE');
}).first[:has_schema_privilege].should == true
user.in_database(as: :superuser).fetch(%{
SELECT * FROM has_function_privilege(
'#{CartoDB::PUBLIC_DB_USER}',
'#{CartoDB::UserModule::DBService::SCHEMA_PUBLIC}._postgis_stats(regclass, text, text)',
'EXECUTE');
}).first[:has_function_privilege].should == true
user.in_database(as: :superuser).fetch(%{
SELECT * FROM has_table_privilege('#{CartoDB::PUBLIC_DB_USER}',
'#{CartoDB::UserModule::DBService::SCHEMA_PUBLIC}.spatial_ref_sys', 'SELECT');
}).first[:has_table_privilege].should == true
user.destroy
end
it 'Properly setups a new organization user' do
CartoDB::UserModule::DBService.any_instance.stubs(
cartodb_extension_version_pre_mu?: nil,
monitor_user_notification: nil,
enable_remote_db_user: nil
)
disk_quota = 1234567890
user_timeout_secs = 666
max_import_file_size = 6666666666
max_import_table_row_count = 55555555
max_concurrent_import_count = 44
max_layers = 11
# create an owner
organization = create_org('org-user-creation-db-checks-organization', disk_quota * 10, 10)
user1 = create_user email: 'user1@whatever.com', username: 'creation-db-checks-org-owner', password: 'user11'
user1.organization = organization
user1.max_import_file_size = max_import_file_size
user1.max_import_table_row_count = max_import_table_row_count
user1.max_concurrent_import_count = max_concurrent_import_count
user1.max_layers = 11
user1.save
organization.owner_id = user1.id
organization.save
organization.reload
user1.reload
user = ::User.new
user.username = unique_name('user')
user.email = unique_email
user.password = user.email.split('@').first
user.password_confirmation = user.password
user.admin = false
user.private_tables_enabled = true
user.private_maps_enabled = true
user.enabled = true
user.table_quota = 500
user.quota_in_bytes = disk_quota
user.user_timeout = user_timeout_secs * 1000
user.database_timeout = 123000
user.geocoding_quota = 1000
user.geocoding_block_price = 1500
user.sync_tables_enabled = false
user.organization = organization
user.twitter_datasource_enabled = false
user.avatar_url = user.default_avatar
user.valid?.should == true
user.save
user.nil?.should == false
# To avoid connection pool caching
CartoDB::UserModule::DBService.terminate_database_connections(user.database_name, user.database_host)
user.reload
user.max_import_file_size.should eq max_import_file_size
user.max_import_table_row_count.should eq max_import_table_row_count
user.max_concurrent_import_count.should eq max_concurrent_import_count
user.max_layers.should eq max_layers
# Just to be sure all following checks will not falsely report ok using wrong schema
user.database_schema.should_not eq CartoDB::UserModule::DBService::SCHEMA_PUBLIC
user.database_schema.should eq user.username
test_table_name = "table_perm_test"
# Safety check
user.in_database.fetch(%{
SELECT * FROM pg_extension WHERE extname='postgis';
}).first.nil?.should == false
# Replicate functionality inside ::UserModule::DBService.configure_database
# -------------------------------------------------------------------
user.in_database.fetch(%{
SHOW search_path;
}).first[:search_path].should == user.db_service.build_search_path(user.database_schema, false)
# @see http://www.postgresql.org/docs/current/static/functions-info.html#FUNCTIONS-INFO-ACCESS-TABLE
user.in_database(as: :superuser).fetch(%{
SELECT * FROM has_database_privilege('#{user.database_username}', '#{user.database_name}', 'CONNECT');
}).first[:has_database_privilege].should == true
# Careful as PG formatter timeout output changes to XXmin if too big
user.in_database.fetch(%{
SHOW statement_timeout;
}).first[:statement_timeout].should eq "#{user_timeout_secs}s"
# No check for `set_user_as_organization_member` as cartodb-postgresql already tests it
user.in_database(as: :superuser).fetch(%{
SELECT * FROM has_function_privilege('#{user.database_username}',
'#{user.database_schema}._cdb_userquotainbytes()', 'EXECUTE');
}).first[:has_function_privilege].should == true
# SCHEMA_CARTODB has no tables to select from, except CDB_CONF on which has no permission
user.in_database(as: :superuser).fetch(%{
SELECT * FROM has_table_privilege('#{user.database_username}',
'cartodb.CDB_CONF',
'SELECT, INSERT, UPDATE, DELETE, TRUNCATE, REFERENCES, TRIGGER');
}).first[:has_table_privilege].should == false
# Checks on SCHEMA_PUBLIC
user.in_database(as: :superuser).fetch(%{
SELECT * FROM has_schema_privilege('#{user.database_username}',
'#{CartoDB::UserModule::DBService::SCHEMA_PUBLIC}', 'USAGE');
}).first[:has_schema_privilege].should == true
user.in_database(as: :superuser).fetch(%{
SELECT * FROM has_table_privilege('#{user.database_username}',
'#{CartoDB::UserModule::DBService::SCHEMA_PUBLIC}.spatial_ref_sys', 'SELECT');
}).first[:has_table_privilege].should == true
user.in_database(as: :superuser).fetch(%{
SELECT * FROM has_function_privilege('#{user.database_username}',
'#{CartoDB::UserModule::DBService::SCHEMA_PUBLIC}._postgis_stats(regclass, text, text)',
'EXECUTE');
}).first[:has_function_privilege].should == true
# Checks on own schema
user.in_database(as: :superuser).fetch(%{
SELECT * FROM has_schema_privilege('#{user.database_username}',
'#{user.database_schema}', 'CREATE, USAGE');
}).first[:has_schema_privilege].should == true
user.in_database.run(%{
CREATE TABLE #{test_table_name}(x int);
})
user.in_database(as: :superuser).fetch(%{
SELECT * FROM has_table_privilege('#{user.database_username}',
'#{user.database_schema}.#{test_table_name}', 'SELECT');
}).first[:has_table_privilege].should == true
# _cdb_userquotainbytes is always created on the user schema
user.in_database(as: :superuser).fetch(%{
SELECT * FROM has_function_privilege('#{user.database_username}',
'#{user.database_schema}._cdb_userquotainbytes()', 'EXECUTE');
}).first[:has_function_privilege].should == true
# quota check
user.in_database(as: :superuser).fetch(%{
SELECT #{user.database_schema}._CDB_UserQuotaInBytes();
}).first[:_cdb_userquotainbytes].nil?.should == false
# Varnish invalidation function
user.in_database(as: :superuser).fetch(%{
SELECT * FROM has_function_privilege(
'#{user.database_username}',
'#{CartoDB::UserModule::DBService::SCHEMA_PUBLIC}.cdb_invalidate_varnish(text)', 'EXECUTE');
}).first[:has_function_privilege].should == true
# Checks of publicuser
user.in_database(as: :superuser).fetch(%{
SELECT * FROM has_database_privilege('#{CartoDB::PUBLIC_DB_USER}',
'#{user.database_name}', 'CONNECT');
}).first[:has_database_privilege].should == true
user.in_database(as: :superuser).fetch(%{
SELECT * FROM has_schema_privilege('#{CartoDB::PUBLIC_DB_USER}',
'#{user.database_schema}', 'USAGE');
}).first[:has_schema_privilege].should == true
user.in_database(as: :superuser).fetch(%{
SELECT * FROM has_schema_privilege('#{CartoDB::PUBLIC_DB_USER}',
'#{CartoDB::UserModule::DBService::SCHEMA_CARTODB}', 'USAGE');
}).first[:has_schema_privilege].should == true
user.in_database(as: :superuser).fetch(%{
SELECT * FROM has_function_privilege(
'#{CartoDB::PUBLIC_DB_USER}',
'#{CartoDB::UserModule::DBService::SCHEMA_CARTODB}.CDB_LatLng (NUMERIC, NUMERIC)',
'EXECUTE');
}).first[:has_function_privilege].should == true
user.in_database(as: :superuser).fetch(%{
SELECT * FROM has_table_privilege('#{CartoDB::PUBLIC_DB_USER}',
'#{CartoDB::UserModule::DBService::SCHEMA_CARTODB}.CDB_CONF',
'SELECT, INSERT, UPDATE, DELETE, TRUNCATE, REFERENCES, TRIGGER');
}).first[:has_table_privilege].should == false
# Additional public user grants/revokes
user.in_database(as: :superuser).fetch(%{
SELECT * FROM has_table_privilege('#{CartoDB::PUBLIC_DB_USER}',
'#{CartoDB::UserModule::DBService::SCHEMA_CARTODB}.cdb_tablemetadata',
'SELECT');
}).first[:has_table_privilege].should == false
user.in_database(as: :superuser).fetch(%{
SELECT * FROM has_schema_privilege('#{CartoDB::PUBLIC_DB_USER}',
'#{CartoDB::UserModule::DBService::SCHEMA_PUBLIC}', 'USAGE');
}).first[:has_schema_privilege].should == true
user.in_database(as: :superuser).fetch(%{
SELECT * FROM has_function_privilege(
'#{CartoDB::PUBLIC_DB_USER}',
'#{CartoDB::UserModule::DBService::SCHEMA_PUBLIC}._postgis_stats(regclass, text, text)',
'EXECUTE');
}).first[:has_function_privilege].should == true
user.in_database(as: :superuser).fetch(%{
SELECT * FROM has_table_privilege('#{CartoDB::PUBLIC_DB_USER}',
'#{CartoDB::UserModule::DBService::SCHEMA_PUBLIC}.spatial_ref_sys', 'SELECT');
}).first[:has_table_privilege].should == true
user.in_database.run(%{
DROP TABLE #{user.database_schema}.#{test_table_name};
})
user.destroy
organization.destroy
end
end
describe "Write locking" do
it "detects locking properly" do
@user.db_service.writes_enabled?.should eq true
@user.db_service.disable_writes
@user.db_service.terminate_database_connections
@user.db_service.writes_enabled?.should eq false
@user.db_service.enable_writes
@user.db_service.terminate_database_connections
@user.db_service.writes_enabled?.should eq true
end
it "enables and disables writes in user database" do
@user.db_service.run_pg_query("create table foo_1(a int);")
@user.db_service.disable_writes
@user.db_service.terminate_database_connections
lambda {
@user.db_service.run_pg_query("create table foo_2(a int);")
}.should raise_error(CartoDB::ErrorRunningQuery)
@user.db_service.enable_writes
@user.db_service.terminate_database_connections
@user.db_service.run_pg_query("create table foo_3(a int);")
end
end
describe '#destroy' do
def create_full_data
carto_user = FactoryGirl.create(:carto_user)
user = ::User[carto_user.id]
table = create_table(user_id: carto_user.id, name: 'My first table', privacy: UserTable::PRIVACY_PUBLIC)
canonical_visualization = table.table_visualization
map = FactoryGirl.create(:carto_map_with_layers, user_id: carto_user.id)
carto_visualization = FactoryGirl.create(:carto_visualization, user: carto_user, map: map)
visualization = CartoDB::Visualization::Member.new(id: carto_visualization.id).fetch
# Force ORM to cache layers (to check if they are deleted later)
canonical_visualization.map.layers
visualization.map.layers
user_layer = Layer.create(kind: 'tiled')
user.add_layer(user_layer)
[user, table, [canonical_visualization, visualization], user_layer]
end
def check_deleted_data(user_id, table_id, visualizations, layer_id)
::User[user_id].should be_nil
visualizations.each do |visualization|
Carto::Visualization.exists?(visualization.id).should be_false
visualization.map.layers.each { |layer| Carto::Layer.exists?(layer.id).should be_false }
end
Carto::UserTable.exists?(table_id).should be_false
Carto::Layer.exists?(layer_id).should be_false
end
it 'destroys all related information' do
user, table, visualizations, layer = create_full_data
::User[user.id].destroy
check_deleted_data(user.id, table.id, visualizations, layer.id)
end
it 'destroys all related information, even for viewer users' do
user, table, visualizations, layer = create_full_data
user.viewer = true
user.save
user.reload
user.destroy
check_deleted_data(user.id, table.id, visualizations, layer.id)
end
end
describe '#visualization_count' do
include_context 'organization with users helper'
include TableSharing
it 'filters by type if asked' do
vis = FactoryGirl.create(:carto_visualization, user_id: @org_user_1.id, type: Carto::Visualization::TYPE_DERIVED)
@org_user_1.visualization_count.should eq 1
@org_user_1.visualization_count(type: Carto::Visualization::TYPE_DERIVED).should eq 1
[Carto::Visualization::TYPE_CANONICAL, Carto::Visualization::TYPE_REMOTE].each do |type|
@org_user_1.visualization_count(type: type).should eq 0
end
vis.destroy
end
it 'filters by privacy if asked' do
vis = FactoryGirl.create(:carto_visualization,
user_id: @org_user_1.id,
privacy: Carto::Visualization::PRIVACY_PUBLIC)
@org_user_1.visualization_count.should eq 1
@org_user_1.visualization_count(privacy: Carto::Visualization::PRIVACY_PUBLIC).should eq 1
[
Carto::Visualization::PRIVACY_PRIVATE,
Carto::Visualization::PRIVACY_LINK,
Carto::Visualization::PRIVACY_PROTECTED
].each do |privacy|
@org_user_1.visualization_count(privacy: privacy).should eq 0
end
vis.destroy
end
it 'filters by shared exclusion if asked' do
vis = FactoryGirl.create(:carto_visualization, user_id: @org_user_1.id, type: Carto::Visualization::TYPE_DERIVED)
share_visualization_with_user(vis, @org_user_2)
@org_user_2.visualization_count.should eq 1
@org_user_2.visualization_count(exclude_shared: true).should eq 0
vis.destroy
end
it 'filters by raster exclusion if asked' do
vis = FactoryGirl.create(:carto_visualization, user_id: @org_user_1.id, kind: Carto::Visualization::KIND_RASTER)
@org_user_1.visualization_count.should eq 1
@org_user_1.visualization_count(exclude_raster: true).should eq 0
vis.destroy
end
end
describe 'viewer user' do
def verify_viewer_quota(user)
user.quota_in_bytes.should eq 0
user.geocoding_quota.should eq 0
user.soft_geocoding_limit.should eq false
user.twitter_datasource_quota.should eq 0
user.soft_twitter_datasource_limit.should eq false
user.here_isolines_quota.should eq 0
user.soft_here_isolines_limit.should eq false
user.obs_snapshot_quota.should eq 0
user.soft_obs_snapshot_limit.should eq false
user.obs_general_quota.should eq 0
user.soft_obs_general_limit.should eq false
end
describe 'creation' do
it 'assigns 0 as quota and no soft limit no matter what is requested' do
@user = create_user email: 'u_v@whatever.com', username: 'viewer', password: 'user11', viewer: true,
geocoding_quota: 10, soft_geocoding_limit: true, twitter_datasource_quota: 100,
soft_twitter_datasource_limit: 10, here_isolines_quota: 10, soft_here_isolines_limit: true,
obs_snapshot_quota: 100, soft_obs_snapshot_limit: true, obs_general_quota: 100,
soft_obs_general_limit: true
verify_viewer_quota(@user)
@user.destroy
end
end
describe 'builder -> viewer' do
it 'assigns 0 as quota and no soft limit no matter what is requested' do
@user = create_user email: 'u_v@whatever.com', username: 'builder-to-viewer', password: 'user11', viewer: false,
geocoding_quota: 10, soft_geocoding_limit: true, twitter_datasource_quota: 100,
soft_twitter_datasource_limit: 10, here_isolines_quota: 10, soft_here_isolines_limit: true,
obs_snapshot_quota: 100, soft_obs_snapshot_limit: true, obs_general_quota: 100,
soft_obs_general_limit: true
# Random check, but we can trust create_user
@user.quota_in_bytes.should_not eq 0
@user.viewer = true
@user.save
@user.reload
verify_viewer_quota(@user)
@user.destroy
end
end
describe 'quotas' do
it "can't change for viewer users" do
@user = create_user(viewer: true)
verify_viewer_quota(@user)
@user.quota_in_bytes = 666
@user.save
@user.reload
verify_viewer_quota(@user)
@user.destroy
end
end
end
describe 'api keys' do
before(:all) do
@auth_api_user = FactoryGirl.create(:valid_user)
end
after(:all) do
@auth_api_user.destroy
end
describe 'create api keys on user creation' do
it "creates master api key on user creation" do
api_keys = Carto::ApiKey.where(user_id: @auth_api_user.id)
api_keys.should_not be_empty
master_api_key = Carto::ApiKey.where(user_id: @auth_api_user.id).master.first
master_api_key.should be
master_api_key.token.should eq @auth_api_user.api_key
end
end
it 'syncs api key changes with master api key' do
master_key = Carto::ApiKey.where(user_id: @auth_api_user.id).master.first
expect(@auth_api_user.api_key).to eq master_key.token
expect { @auth_api_user.regenerate_api_key }.to(change { @auth_api_user.api_key })
master_key.reload
expect(@auth_api_user.api_key).to eq master_key.token
end
describe 'are enabled/disabled' do
before(:all) do
@regular_key = @auth_api_user.api_keys.create_regular_key!(name: 'regkey', grants: [{ type: 'apis', apis: [] }])
end
after(:all) do
@regular_key.destroy
end
before(:each) do
@auth_api_user.state = 'active'
@auth_api_user.engine_enabled = true
@auth_api_user.save
end
def enabled_api_key?(api_key)
$users_metadata.exists(api_key.send(:redis_key))
end
it 'disables all api keys for locked users' do
@auth_api_user.state = 'locked'
@auth_api_user.save
expect(@auth_api_user.api_keys.none? { |k| enabled_api_key?(k) }).to be_true
expect(@auth_api_user.api_key).to_not eq($users_metadata.HGET(@auth_api_user.send(:key), 'map_key'))
end
it 'disables regular keys for engine disabled' do
@auth_api_user.engine_enabled = false
@auth_api_user.save
expect(@auth_api_user.api_keys.regular.none? { |k| enabled_api_key?(k) }).to be_true
expect(@auth_api_user.api_keys.master.all? { |k| enabled_api_key?(k) }).to be_true
expect(@auth_api_user.api_keys.default_public.all? { |k| enabled_api_key?(k) }).to be_true
expect(@auth_api_user.api_key).to eq($users_metadata.HGET(@auth_api_user.send(:key), 'map_key'))
end
it 'enables all keys for active engine users' do
expect(@auth_api_user.api_keys.all? { |k| enabled_api_key?(k) }).to be_true
expect(@auth_api_user.api_key).to eq($users_metadata.HGET(@auth_api_user.send(:key), 'map_key'))
end
end
describe '#regenerate_all_api_keys' do
before(:all) do
@regular_key = @auth_api_user.api_keys.create_regular_key!(name: 'regkey', grants: [{ type: 'apis', apis: [] }])
end
after(:all) do
@regular_key.destroy
end
it 'regenerates master key at user model' do
expect { @auth_api_user.regenerate_all_api_keys }.to(change { @auth_api_user.api_key })
end
it 'regenerates master key model' do
expect { @auth_api_user.regenerate_all_api_keys }.to(change { @auth_api_user.api_keys.master.first.token })
end
it 'regenerates regular key' do
expect { @auth_api_user.regenerate_all_api_keys }.to(change { @regular_key.reload.token })
end
end
end
describe '#rate limits' do
before :all do
@limits_feature_flag = FactoryGirl.create(:feature_flag, name: 'limits_v2', restricted: false)
@account_type = create_account_type_fg('FREE')
@account_type_pro = create_account_type_fg('PRO')
@account_type_org = create_account_type_fg('ORGANIZATION USER')
@rate_limits_custom = FactoryGirl.create(:rate_limits_custom)
@rate_limits = FactoryGirl.create(:rate_limits)
@rate_limits_pro = FactoryGirl.create(:rate_limits_pro)
@user_rt = FactoryGirl.create(:valid_user, rate_limit_id: @rate_limits.id)
@organization = FactoryGirl.create(:organization)
owner = FactoryGirl.create(:user, account_type: 'PRO')
uo = CartoDB::UserOrganization.new(@organization.id, owner.id)
uo.promote_user_to_admin
@organization.reload
@user_org = FactoryGirl.build(:user, account_type: 'FREE')
@user_org.organization = @organization
@user_org.enabled = true
@user_org.save
@map_prefix = "limits:rate:store:#{@user_rt.username}:maps:"
@sql_prefix = "limits:rate:store:#{@user_rt.username}:sql:"
end
after :all do
@user_rt.destroy unless @user_rt.nil?
@user_no_ff.destroy unless @user_no_ff.nil?
@organization.destroy unless @organization.nil?
@account_type.destroy unless @account_type.nil?
@account_type_pro.destroy unless @account_type_pro.nil?
@account_type_org.destroy unless @account_type_org.nil?
@account_type.rate_limit.destroy unless @account_type.nil?
@account_type_pro.rate_limit.destroy unless @account_type_pro.nil?
@account_type_org.rate_limit.destroy unless @account_type_org.nil?
@rate_limits.destroy unless @rate_limits.nil?
@rate_limits_custom.destroy unless @rate_limits_custom.nil?
@rate_limits_custom2.destroy unless @rate_limits_custom2.nil?
@rate_limits_pro.destroy unless @rate_limits_pro.nil?
end
before :each do
unless FeatureFlag.where(name: 'limits_v2').first.present?
@limits_feature_flag = FactoryGirl.create(:feature_flag, name: 'limits_v2', restricted: false)
end
end
after :each do
@limits_feature_flag.destroy if @limits_feature_flag.exists?
end
it 'does not create rate limits if feature flag is not enabled' do
@limits_feature_flag.destroy
@user_no_ff = FactoryGirl.create(:valid_user, rate_limit_id: @rate_limits.id)
map_prefix = "limits:rate:store:#{@user_no_ff.username}:maps:"
sql_prefix = "limits:rate:store:#{@user_no_ff.username}:sql:"
$limits_metadata.EXISTS("#{map_prefix}anonymous").should eq 0
$limits_metadata.EXISTS("#{sql_prefix}query").should eq 0
end
it 'creates rate limits from user account type' do
expect_rate_limits_saved_to_redis(@user_rt.username)
end
it 'updates rate limits from user custom rate_limit' do
expect_rate_limits_saved_to_redis(@user_rt.username)
@user_rt.rate_limit_id = @rate_limits_custom.id
@user_rt.save
expect_rate_limits_custom_saved_to_redis(@user_rt.username)
end
it 'creates rate limits for a org user' do
expect_rate_limits_pro_saved_to_redis(@user_org.username)
end
it 'destroy rate limits' do
user2 = FactoryGirl.create(:valid_user, rate_limit_id: @rate_limits_pro.id)
expect_rate_limits_pro_saved_to_redis(user2.username)
user2.destroy
expect {
Carto::RateLimit.find(user2.rate_limit_id)
}.to raise_error(ActiveRecord::RecordNotFound)
expect_rate_limits_exist(user2.username)
end
it 'updates rate limits when user has no rate limits' do
user = FactoryGirl.create(:valid_user)
user.update_rate_limits(@rate_limits.api_attributes)
user.reload
user.rate_limit.should_not be_nil
user.rate_limit.api_attributes.should eq @rate_limits.api_attributes
user.destroy
end
it 'does nothing when user has no rate limits' do
user = FactoryGirl.create(:valid_user)
user.update_rate_limits(nil)
user.reload
user.rate_limit.should be_nil
user.destroy
end
it 'updates rate limits when user has rate limits' do
@rate_limits_custom2 = FactoryGirl.create(:rate_limits_custom2)
user = FactoryGirl.create(:valid_user, rate_limit_id: @rate_limits_custom2.id)
user.update_rate_limits(@rate_limits.api_attributes)
user.reload
user.rate_limit.should_not be_nil
user.rate_limit_id.should eq @rate_limits_custom2.id
user.rate_limit.api_attributes.should eq @rate_limits.api_attributes
@rate_limits.api_attributes.should eq @rate_limits_custom2.reload.api_attributes
user.destroy
end
it 'set rate limits to nil when user has rate limits' do
@rate_limits_custom2 = FactoryGirl.create(:rate_limits_custom2)
user = FactoryGirl.create(:valid_user, rate_limit_id: @rate_limits_custom2.id)
user.update_rate_limits(nil)
user.reload
user.rate_limit.should be_nil
expect {
Carto::RateLimit.find(@rate_limits_custom2.id)
}.to raise_error(ActiveRecord::RecordNotFound)
# limits reverted to the ones from the account type
expect_rate_limits_saved_to_redis(user.username)
user.destroy
end
end
describe '#password_expired?' do
before(:all) do
@organization_password = create_organization_with_owner
end
after(:all) do
@organization_password.destroy
end
before(:each) do
@github_user = FactoryGirl.build(:valid_user, github_user_id: 932847)
@google_user = FactoryGirl.build(:valid_user, google_sign_in: true)
@password_user = FactoryGirl.build(:valid_user)
@org_user = FactoryGirl.create(:valid_user,
account_type: 'ORGANIZATION USER',
organization: @organization_password)
end
it 'never expires without configuration' do
Cartodb.with_config(passwords: { 'expiration_in_d' => nil }) do
expect(@github_user.password_expired?).to be_false
expect(@google_user.password_expired?).to be_false
expect(@password_user.password_expired?).to be_false
expect(@org_user.password_expired?).to be_false
end
end
it 'never expires for users without password' do
Cartodb.with_config(passwords: { 'expiration_in_d' => 5 }) do
Delorean.jump(10.days)
expect(@github_user.password_expired?).to be_false
expect(@google_user.password_expired?).to be_false
Delorean.back_to_the_present
end
end
it 'expires for users with oauth and changed passwords' do
Cartodb.with_config(passwords: { 'expiration_in_d' => 5 }) do
@github_user.last_password_change_date = Time.now - 10.days
expect(@github_user.password_expired?).to be_true
@google_user.last_password_change_date = Time.now - 10.days
expect(@google_user.password_expired?).to be_true
end
end
it 'expires for password users after a while has passed' do
@password_user.save
Cartodb.with_config(passwords: { 'expiration_in_d' => 15 }) do
expect(@password_user.password_expired?).to be_false
Delorean.jump(30.days)
expect(@password_user.password_expired?).to be_true
@password_user.password = @password_user.password_confirmation = 'waduspass'
@password_user.save
expect(@password_user.password_expired?).to be_false
Delorean.jump(30.days)
expect(@password_user.password_expired?).to be_true
Delorean.back_to_the_present
end
@password_user.destroy
end
it 'expires for org users with password_expiration set' do
@organization_password.stubs(:password_expiration_in_d).returns(2)
org_user2 = FactoryGirl.create(:valid_user,
account_type: 'ORGANIZATION USER',
organization: @organization_password)
Cartodb.with_config(passwords: { 'expiration_in_d' => 5 }) do
expect(org_user2.password_expired?).to be_false
Delorean.jump(1.day)
expect(org_user2.password_expired?).to be_false
Delorean.jump(5.days)
expect(org_user2.password_expired?).to be_true
org_user2.password = org_user2.password_confirmation = 'waduspass'
org_user2.save
Delorean.jump(1.day)
expect(org_user2.password_expired?).to be_false
Delorean.jump(5.day)
expect(org_user2.password_expired?).to be_true
Delorean.back_to_the_present
end
end
it 'never expires for org users with no password_expiration set' do
@organization_password.stubs(:password_expiration_in_d).returns(nil)
org_user2 = FactoryGirl.create(:valid_user, organization: @organization_password)
Cartodb.with_config(passwords: { 'expiration_in_d' => 5 }) do
expect(org_user2.password_expired?).to be_false
Delorean.jump(10.days)
expect(org_user2.password_expired?).to be_false
org_user2.password = org_user2.password_confirmation = 'waduspass'
org_user2.save
Delorean.jump(10.days)
expect(org_user2.password_expired?).to be_false
Delorean.back_to_the_present
end
end
end
protected
def create_org(org_name, org_quota, org_seats)
organization = Organization.new
organization.name = unique_name(org_name)
organization.quota_in_bytes = org_quota
organization.seats = org_seats
organization.save
organization
end
def tables_including_shared(user)
Carto::VisualizationQueryBuilder
.new
.with_owned_by_or_shared_with_user_id(user.id)
.with_type(Carto::Visualization::TYPE_CANONICAL)
.build.map(&:table)
end
end
|
require 'fcrepo_wrapper/version'
require 'fcrepo_wrapper/configuration'
require 'fcrepo_wrapper/settings'
require 'fcrepo_wrapper/downloader'
require 'fcrepo_wrapper/md5'
require 'fcrepo_wrapper/instance'
module FcrepoWrapper
def self.default_fcrepo_version
'4.5.1'
end
def self.default_fcrepo_port
'8080'
end
def self.default_instance(options = {})
@default_instance ||= FcrepoWrapper::Instance.new options
end
##
# Ensures a fcrepo service is running before executing the block
def self.wrap(options = {}, &block)
default_instance(options).wrap &block
end
end
Use fcrepo 4.6.0 by default
require 'fcrepo_wrapper/version'
require 'fcrepo_wrapper/configuration'
require 'fcrepo_wrapper/settings'
require 'fcrepo_wrapper/downloader'
require 'fcrepo_wrapper/md5'
require 'fcrepo_wrapper/instance'
module FcrepoWrapper
def self.default_fcrepo_version
'4.6.0'
end
def self.default_fcrepo_port
'8080'
end
def self.default_instance(options = {})
@default_instance ||= FcrepoWrapper::Instance.new options
end
##
# Ensures a fcrepo service is running before executing the block
def self.wrap(options = {}, &block)
default_instance(options).wrap &block
end
end
|
# == Schema Information
#
# Table name: users
#
# id :integer not null, primary key
# email :string(255) default(""), not null
# encrypted_password :string(255) default(""), not null
# reset_password_token :string(255)
# reset_password_sent_at :datetime
# remember_created_at :datetime
# sign_in_count :integer default(0)
# current_sign_in_at :datetime
# last_sign_in_at :datetime
# current_sign_in_ip :string(255)
# last_sign_in_ip :string(255)
# created_at :datetime not null
# updated_at :datetime not null
# name :string(255)
# admin :boolean default(FALSE), not null
# projects_limit :integer default(10)
# skype :string(255) default(""), not null
# linkedin :string(255) default(""), not null
# twitter :string(255) default(""), not null
# authentication_token :string(255)
# theme_id :integer default(1), not null
# bio :string(255)
# failed_attempts :integer default(0)
# locked_at :datetime
# extern_uid :string(255)
# provider :string(255)
# username :string(255)
# can_create_group :boolean default(TRUE), not null
# can_create_team :boolean default(TRUE), not null
# state :string(255)
# color_scheme_id :integer default(1), not null
# notification_level :integer default(1), not null
# password_expires_at :datetime
# created_by_id :integer
# avatar :string(255)
# confirmation_token :string(255)
# confirmed_at :datetime
# confirmation_sent_at :datetime
# unconfirmed_email :string(255)
# hide_no_ssh_key :boolean default(FALSE)
# website_url :string(255) default(""), not null
#
require 'spec_helper'
describe User do
describe "Associations" do
it { should have_one(:namespace) }
it { should have_many(:snippets).class_name('Snippet').dependent(:destroy) }
it { should have_many(:users_projects).dependent(:destroy) }
it { should have_many(:groups) }
it { should have_many(:keys).dependent(:destroy) }
it { should have_many(:events).class_name('Event').dependent(:destroy) }
it { should have_many(:recent_events).class_name('Event') }
it { should have_many(:issues).dependent(:destroy) }
it { should have_many(:notes).dependent(:destroy) }
it { should have_many(:assigned_issues).dependent(:destroy) }
it { should have_many(:merge_requests).dependent(:destroy) }
it { should have_many(:assigned_merge_requests).dependent(:destroy) }
end
describe "Mass assignment" do
it { should_not allow_mass_assignment_of(:projects_limit) }
it { should allow_mass_assignment_of(:projects_limit).as(:admin) }
end
describe 'validations' do
it { should validate_presence_of(:username) }
it { should validate_presence_of(:projects_limit) }
it { should validate_numericality_of(:projects_limit) }
it { should allow_value(0).for(:projects_limit) }
it { should_not allow_value(-1).for(:projects_limit) }
it { should ensure_length_of(:bio).is_within(0..255) }
describe 'email' do
it 'accepts info@example.com' do
user = build(:user, email: 'info@example.com')
expect(user).to be_valid
end
it 'accepts info+test@example.com' do
user = build(:user, email: 'info+test@example.com')
expect(user).to be_valid
end
it 'rejects test@test@example.com' do
user = build(:user, email: 'test@test@example.com')
expect(user).to be_invalid
end
it 'rejects mailto:test@example.com' do
user = build(:user, email: 'mailto:test@example.com')
expect(user).to be_invalid
end
end
end
describe "Respond to" do
it { should respond_to(:is_admin?) }
it { should respond_to(:name) }
it { should respond_to(:private_token) }
end
describe '#generate_password' do
it "should execute callback when force_random_password specified" do
user = build(:user, force_random_password: true)
user.should_receive(:generate_password)
user.save
end
it "should not generate password by default" do
user = create(:user, password: 'abcdefghe')
user.password.should == 'abcdefghe'
end
it "should generate password when forcing random password" do
Devise.stub(:friendly_token).and_return('123456789')
user = create(:user, password: 'abcdefg', force_random_password: true)
user.password.should == '12345678'
end
end
describe 'authentication token' do
it "should have authentication token" do
user = create(:user)
user.authentication_token.should_not be_blank
end
end
describe 'projects' do
before do
ActiveRecord::Base.observers.enable(:user_observer)
@user = create :user
@project = create :project, namespace: @user.namespace
@project_2 = create :project, group: create(:group) # Grant MASTER access to the user
@project_3 = create :project, group: create(:group) # Grant DEVELOPER access to the user
@project_2.team << [@user, :master]
@project_3.team << [@user, :developer]
end
it { @user.authorized_projects.should include(@project) }
it { @user.authorized_projects.should include(@project_2) }
it { @user.authorized_projects.should include(@project_3) }
it { @user.owned_projects.should include(@project) }
it { @user.owned_projects.should_not include(@project_2) }
it { @user.owned_projects.should_not include(@project_3) }
it { @user.personal_projects.should include(@project) }
it { @user.personal_projects.should_not include(@project_2) }
it { @user.personal_projects.should_not include(@project_3) }
end
describe 'groups' do
before do
ActiveRecord::Base.observers.enable(:user_observer)
@user = create :user
@group = create :group
@group.add_owner(@user)
end
it { @user.several_namespaces?.should be_true }
it { @user.authorized_groups.should == [@group] }
it { @user.owned_groups.should == [@group] }
end
describe 'group multiple owners' do
before do
ActiveRecord::Base.observers.enable(:user_observer)
@user = create :user
@user2 = create :user
@group = create :group
@group.add_owner(@user)
@group.add_user(@user2, UsersGroup::OWNER)
end
it { @user2.several_namespaces?.should be_true }
end
describe 'namespaced' do
before do
ActiveRecord::Base.observers.enable(:user_observer)
@user = create :user
@project = create :project, namespace: @user.namespace
end
it { @user.several_namespaces?.should be_false }
end
describe 'blocking user' do
let(:user) { create(:user, name: 'John Smith') }
it "should block user" do
user.block
user.blocked?.should be_true
end
end
describe 'filter' do
before do
User.delete_all
@user = create :user
@admin = create :user, admin: true
@blocked = create :user, state: :blocked
end
it { User.filter("admins").should == [@admin] }
it { User.filter("blocked").should == [@blocked] }
it { User.filter("wop").should include(@user, @admin, @blocked) }
it { User.filter(nil).should include(@user, @admin) }
end
describe :not_in_project do
before do
User.delete_all
@user = create :user
@project = create :project
end
it { User.not_in_project(@project).should include(@user, @project.owner) }
end
describe 'user creation' do
describe 'normal user' do
let(:user) { create(:user, name: 'John Smith') }
it { user.is_admin?.should be_false }
it { user.require_ssh_key?.should be_true }
it { user.can_create_group?.should be_true }
it { user.can_create_project?.should be_true }
it { user.first_name.should == 'John' }
end
describe 'without defaults' do
let(:user) { User.new }
it "should not apply defaults to user" do
user.projects_limit.should == 10
user.can_create_group.should be_true
user.theme_id.should == Gitlab::Theme::BASIC
end
end
context 'as admin' do
describe 'with defaults' do
let(:user) { User.build_user({}, as: :admin) }
it "should apply defaults to user" do
user.projects_limit.should == Gitlab.config.gitlab.default_projects_limit
user.can_create_group.should == Gitlab.config.gitlab.default_can_create_group
user.theme_id.should == Gitlab.config.gitlab.default_theme
end
end
describe 'with default overrides' do
let(:user) { User.build_user({projects_limit: 123, can_create_group: true, can_create_team: true, theme_id: Gitlab::Theme::BASIC}, as: :admin) }
it "should apply defaults to user" do
Gitlab.config.gitlab.default_projects_limit.should_not == 123
Gitlab.config.gitlab.default_can_create_group.should_not be_true
Gitlab.config.gitlab.default_theme.should_not == Gitlab::Theme::BASIC
user.projects_limit.should == 123
user.can_create_group.should be_true
user.theme_id.should == Gitlab::Theme::BASIC
end
end
end
context 'as user' do
describe 'with defaults' do
let(:user) { User.build_user }
it "should apply defaults to user" do
user.projects_limit.should == Gitlab.config.gitlab.default_projects_limit
user.can_create_group.should == Gitlab.config.gitlab.default_can_create_group
user.theme_id.should == Gitlab.config.gitlab.default_theme
end
end
describe 'with default overrides' do
let(:user) { User.build_user(projects_limit: 123, can_create_group: true, theme_id: Gitlab::Theme::BASIC) }
it "should apply defaults to user" do
user.projects_limit.should == Gitlab.config.gitlab.default_projects_limit
user.can_create_group.should == Gitlab.config.gitlab.default_can_create_group
user.theme_id.should == Gitlab.config.gitlab.default_theme
end
end
end
end
describe 'by_username_or_id' do
let(:user1) { create(:user, username: 'foo') }
it "should get the correct user" do
User.by_username_or_id(user1.id).should == user1
User.by_username_or_id('foo').should == user1
User.by_username_or_id(-1).should be_nil
User.by_username_or_id('bar').should be_nil
end
end
describe 'all_ssh_keys' do
it { should have_many(:keys).dependent(:destroy) }
it "should have all ssh keys" do
user = create :user
key = create :key, key: "ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAABAQD33bWLBxu48Sev9Fert1yzEO4WGcWglWF7K/AwblIUFselOt/QdOL9DSjpQGxLagO1s9wl53STIO8qGS4Ms0EJZyIXOEFMjFJ5xmjSy+S37By4sG7SsltQEHMxtbtFOaW5LV2wCrX+rUsRNqLMamZjgjcPO0/EgGCXIGMAYW4O7cwGZdXWYIhQ1Vwy+CsVMDdPkPgBXqK7nR/ey8KMs8ho5fMNgB5hBw/AL9fNGhRw3QTD6Q12Nkhl4VZES2EsZqlpNnJttnPdp847DUsT6yuLRlfiQfz5Cn9ysHFdXObMN5VYIiPFwHeYCZp1X2S4fDZooRE8uOLTfxWHPXwrhqSH", user_id: user.id
user.all_ssh_keys.should include(key.key)
end
end
describe :avatar_type do
let(:user) { create(:user) }
it "should be true if avatar is image" do
user.update_attribute(:avatar, 'uploads/avatar.png')
user.avatar_type.should be_true
end
it "should be false if avatar is html page" do
user.update_attribute(:avatar, 'uploads/avatar.html')
user.avatar_type.should == ["only images allowed"]
end
end
describe '#full_website_url' do
let(:user) { create(:user) }
it 'begins with http if website url omits it' do
user.website_url = 'test.com'
expect(user.full_website_url).to eq 'http://test.com'
end
it 'begins with http if website url begins with http' do
user.website_url = 'http://test.com'
expect(user.full_website_url).to eq 'http://test.com'
end
it 'begins with https if website url begins with https' do
user.website_url = 'https://test.com'
expect(user.full_website_url).to eq 'https://test.com'
end
end
describe '#short_website_url' do
let(:user) { create(:user) }
it 'does not begin with http if website url omits it' do
user.website_url = 'test.com'
expect(user.short_website_url).to eq 'test.com'
end
it 'does not begin with http if website url begins with http' do
user.website_url = 'http://test.com'
expect(user.short_website_url).to eq 'test.com'
end
it 'does not begin with https if website url begins with https' do
user.website_url = 'https://test.com'
expect(user.short_website_url).to eq 'test.com'
end
end
end
Add the tests for user search.
# == Schema Information
#
# Table name: users
#
# id :integer not null, primary key
# email :string(255) default(""), not null
# encrypted_password :string(255) default(""), not null
# reset_password_token :string(255)
# reset_password_sent_at :datetime
# remember_created_at :datetime
# sign_in_count :integer default(0)
# current_sign_in_at :datetime
# last_sign_in_at :datetime
# current_sign_in_ip :string(255)
# last_sign_in_ip :string(255)
# created_at :datetime not null
# updated_at :datetime not null
# name :string(255)
# admin :boolean default(FALSE), not null
# projects_limit :integer default(10)
# skype :string(255) default(""), not null
# linkedin :string(255) default(""), not null
# twitter :string(255) default(""), not null
# authentication_token :string(255)
# theme_id :integer default(1), not null
# bio :string(255)
# failed_attempts :integer default(0)
# locked_at :datetime
# extern_uid :string(255)
# provider :string(255)
# username :string(255)
# can_create_group :boolean default(TRUE), not null
# can_create_team :boolean default(TRUE), not null
# state :string(255)
# color_scheme_id :integer default(1), not null
# notification_level :integer default(1), not null
# password_expires_at :datetime
# created_by_id :integer
# avatar :string(255)
# confirmation_token :string(255)
# confirmed_at :datetime
# confirmation_sent_at :datetime
# unconfirmed_email :string(255)
# hide_no_ssh_key :boolean default(FALSE)
# website_url :string(255) default(""), not null
#
require 'spec_helper'
describe User do
describe "Associations" do
it { should have_one(:namespace) }
it { should have_many(:snippets).class_name('Snippet').dependent(:destroy) }
it { should have_many(:users_projects).dependent(:destroy) }
it { should have_many(:groups) }
it { should have_many(:keys).dependent(:destroy) }
it { should have_many(:events).class_name('Event').dependent(:destroy) }
it { should have_many(:recent_events).class_name('Event') }
it { should have_many(:issues).dependent(:destroy) }
it { should have_many(:notes).dependent(:destroy) }
it { should have_many(:assigned_issues).dependent(:destroy) }
it { should have_many(:merge_requests).dependent(:destroy) }
it { should have_many(:assigned_merge_requests).dependent(:destroy) }
end
describe "Mass assignment" do
it { should_not allow_mass_assignment_of(:projects_limit) }
it { should allow_mass_assignment_of(:projects_limit).as(:admin) }
end
describe 'validations' do
it { should validate_presence_of(:username) }
it { should validate_presence_of(:projects_limit) }
it { should validate_numericality_of(:projects_limit) }
it { should allow_value(0).for(:projects_limit) }
it { should_not allow_value(-1).for(:projects_limit) }
it { should ensure_length_of(:bio).is_within(0..255) }
describe 'email' do
it 'accepts info@example.com' do
user = build(:user, email: 'info@example.com')
expect(user).to be_valid
end
it 'accepts info+test@example.com' do
user = build(:user, email: 'info+test@example.com')
expect(user).to be_valid
end
it 'rejects test@test@example.com' do
user = build(:user, email: 'test@test@example.com')
expect(user).to be_invalid
end
it 'rejects mailto:test@example.com' do
user = build(:user, email: 'mailto:test@example.com')
expect(user).to be_invalid
end
end
end
describe "Respond to" do
it { should respond_to(:is_admin?) }
it { should respond_to(:name) }
it { should respond_to(:private_token) }
end
describe '#generate_password' do
it "should execute callback when force_random_password specified" do
user = build(:user, force_random_password: true)
user.should_receive(:generate_password)
user.save
end
it "should not generate password by default" do
user = create(:user, password: 'abcdefghe')
user.password.should == 'abcdefghe'
end
it "should generate password when forcing random password" do
Devise.stub(:friendly_token).and_return('123456789')
user = create(:user, password: 'abcdefg', force_random_password: true)
user.password.should == '12345678'
end
end
describe 'authentication token' do
it "should have authentication token" do
user = create(:user)
user.authentication_token.should_not be_blank
end
end
describe 'projects' do
before do
ActiveRecord::Base.observers.enable(:user_observer)
@user = create :user
@project = create :project, namespace: @user.namespace
@project_2 = create :project, group: create(:group) # Grant MASTER access to the user
@project_3 = create :project, group: create(:group) # Grant DEVELOPER access to the user
@project_2.team << [@user, :master]
@project_3.team << [@user, :developer]
end
it { @user.authorized_projects.should include(@project) }
it { @user.authorized_projects.should include(@project_2) }
it { @user.authorized_projects.should include(@project_3) }
it { @user.owned_projects.should include(@project) }
it { @user.owned_projects.should_not include(@project_2) }
it { @user.owned_projects.should_not include(@project_3) }
it { @user.personal_projects.should include(@project) }
it { @user.personal_projects.should_not include(@project_2) }
it { @user.personal_projects.should_not include(@project_3) }
end
describe 'groups' do
before do
ActiveRecord::Base.observers.enable(:user_observer)
@user = create :user
@group = create :group
@group.add_owner(@user)
end
it { @user.several_namespaces?.should be_true }
it { @user.authorized_groups.should == [@group] }
it { @user.owned_groups.should == [@group] }
end
describe 'group multiple owners' do
before do
ActiveRecord::Base.observers.enable(:user_observer)
@user = create :user
@user2 = create :user
@group = create :group
@group.add_owner(@user)
@group.add_user(@user2, UsersGroup::OWNER)
end
it { @user2.several_namespaces?.should be_true }
end
describe 'namespaced' do
before do
ActiveRecord::Base.observers.enable(:user_observer)
@user = create :user
@project = create :project, namespace: @user.namespace
end
it { @user.several_namespaces?.should be_false }
end
describe 'blocking user' do
let(:user) { create(:user, name: 'John Smith') }
it "should block user" do
user.block
user.blocked?.should be_true
end
end
describe 'filter' do
before do
User.delete_all
@user = create :user
@admin = create :user, admin: true
@blocked = create :user, state: :blocked
end
it { User.filter("admins").should == [@admin] }
it { User.filter("blocked").should == [@blocked] }
it { User.filter("wop").should include(@user, @admin, @blocked) }
it { User.filter(nil).should include(@user, @admin) }
end
describe :not_in_project do
before do
User.delete_all
@user = create :user
@project = create :project
end
it { User.not_in_project(@project).should include(@user, @project.owner) }
end
describe 'user creation' do
describe 'normal user' do
let(:user) { create(:user, name: 'John Smith') }
it { user.is_admin?.should be_false }
it { user.require_ssh_key?.should be_true }
it { user.can_create_group?.should be_true }
it { user.can_create_project?.should be_true }
it { user.first_name.should == 'John' }
end
describe 'without defaults' do
let(:user) { User.new }
it "should not apply defaults to user" do
user.projects_limit.should == 10
user.can_create_group.should be_true
user.theme_id.should == Gitlab::Theme::BASIC
end
end
context 'as admin' do
describe 'with defaults' do
let(:user) { User.build_user({}, as: :admin) }
it "should apply defaults to user" do
user.projects_limit.should == Gitlab.config.gitlab.default_projects_limit
user.can_create_group.should == Gitlab.config.gitlab.default_can_create_group
user.theme_id.should == Gitlab.config.gitlab.default_theme
end
end
describe 'with default overrides' do
let(:user) { User.build_user({projects_limit: 123, can_create_group: true, can_create_team: true, theme_id: Gitlab::Theme::BASIC}, as: :admin) }
it "should apply defaults to user" do
Gitlab.config.gitlab.default_projects_limit.should_not == 123
Gitlab.config.gitlab.default_can_create_group.should_not be_true
Gitlab.config.gitlab.default_theme.should_not == Gitlab::Theme::BASIC
user.projects_limit.should == 123
user.can_create_group.should be_true
user.theme_id.should == Gitlab::Theme::BASIC
end
end
end
context 'as user' do
describe 'with defaults' do
let(:user) { User.build_user }
it "should apply defaults to user" do
user.projects_limit.should == Gitlab.config.gitlab.default_projects_limit
user.can_create_group.should == Gitlab.config.gitlab.default_can_create_group
user.theme_id.should == Gitlab.config.gitlab.default_theme
end
end
describe 'with default overrides' do
let(:user) { User.build_user(projects_limit: 123, can_create_group: true, theme_id: Gitlab::Theme::BASIC) }
it "should apply defaults to user" do
user.projects_limit.should == Gitlab.config.gitlab.default_projects_limit
user.can_create_group.should == Gitlab.config.gitlab.default_can_create_group
user.theme_id.should == Gitlab.config.gitlab.default_theme
end
end
end
end
describe 'search' do
let(:user1) { create(:user, username: 'James', email: 'james@testing.com') }
let(:user2) { create(:user, username: 'jameson', email: 'jameson@example.com') }
it "should be case insensitive" do
User.search(user1.username.upcase).to_a.should == [user1]
User.search(user1.username.downcase).to_a.should == [user1]
User.search(user2.username.upcase).to_a.should == [user2]
User.search(user2.username.downcase).to_a.should == [user2]
end
end
describe 'by_username_or_id' do
let(:user1) { create(:user, username: 'foo') }
it "should get the correct user" do
User.by_username_or_id(user1.id).should == user1
User.by_username_or_id('foo').should == user1
User.by_username_or_id(-1).should be_nil
User.by_username_or_id('bar').should be_nil
end
end
describe 'all_ssh_keys' do
it { should have_many(:keys).dependent(:destroy) }
it "should have all ssh keys" do
user = create :user
key = create :key, key: "ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAABAQD33bWLBxu48Sev9Fert1yzEO4WGcWglWF7K/AwblIUFselOt/QdOL9DSjpQGxLagO1s9wl53STIO8qGS4Ms0EJZyIXOEFMjFJ5xmjSy+S37By4sG7SsltQEHMxtbtFOaW5LV2wCrX+rUsRNqLMamZjgjcPO0/EgGCXIGMAYW4O7cwGZdXWYIhQ1Vwy+CsVMDdPkPgBXqK7nR/ey8KMs8ho5fMNgB5hBw/AL9fNGhRw3QTD6Q12Nkhl4VZES2EsZqlpNnJttnPdp847DUsT6yuLRlfiQfz5Cn9ysHFdXObMN5VYIiPFwHeYCZp1X2S4fDZooRE8uOLTfxWHPXwrhqSH", user_id: user.id
user.all_ssh_keys.should include(key.key)
end
end
describe :avatar_type do
let(:user) { create(:user) }
it "should be true if avatar is image" do
user.update_attribute(:avatar, 'uploads/avatar.png')
user.avatar_type.should be_true
end
it "should be false if avatar is html page" do
user.update_attribute(:avatar, 'uploads/avatar.html')
user.avatar_type.should == ["only images allowed"]
end
end
describe '#full_website_url' do
let(:user) { create(:user) }
it 'begins with http if website url omits it' do
user.website_url = 'test.com'
expect(user.full_website_url).to eq 'http://test.com'
end
it 'begins with http if website url begins with http' do
user.website_url = 'http://test.com'
expect(user.full_website_url).to eq 'http://test.com'
end
it 'begins with https if website url begins with https' do
user.website_url = 'https://test.com'
expect(user.full_website_url).to eq 'https://test.com'
end
end
describe '#short_website_url' do
let(:user) { create(:user) }
it 'does not begin with http if website url omits it' do
user.website_url = 'test.com'
expect(user.short_website_url).to eq 'test.com'
end
it 'does not begin with http if website url begins with http' do
user.website_url = 'http://test.com'
expect(user.short_website_url).to eq 'test.com'
end
it 'does not begin with https if website url begins with https' do
user.website_url = 'https://test.com'
expect(user.short_website_url).to eq 'test.com'
end
end
end
|
module Fierce
VERSION = "0.2.0"
end
bump version
module Fierce
VERSION = "0.2.1"
end
|
require 'rails_helper'
RSpec.describe User, type: :model do
describe 'model validations' do
it 'creates a valid user' do
user = build(:user)
expect(user).to be_valid
end
describe 'presence validations' do
it 'requires a uid' do
user = build(:user, uid: nil)
expect(user).to be_invalid
expect(user.errors).to include :uid
end
it 'requires an email' do
user = build(:user, email: nil)
expect(user).to be_invalid
expect(user.errors).to include :email
end
end
describe 'uniqueness validations' do
it 'requires a unique uid' do
create(:user, uid: '1234')
user = build(:user, uid: '1234')
expect(user).to be_invalid
expect(user.errors).to include :uid
end
it 'requires a unique email' do
create(:user, email: 'email@example.com')
user = build(:user, email: 'email@example.com')
expect(user).to be_invalid
expect(user.errors).to include :email
end
end
end
end
Adds test that user.destroy will delete associated activities records.
require 'rails_helper'
RSpec.describe User, type: :model do
describe 'model validations' do
it 'creates a valid user' do
user = build(:user)
expect(user).to be_valid
end
describe 'presence validations' do
it 'requires a uid' do
user = build(:user, uid: nil)
expect(user).to be_invalid
expect(user.errors).to include :uid
end
it 'requires an email' do
user = build(:user, email: nil)
expect(user).to be_invalid
expect(user.errors).to include :email
end
end
describe 'uniqueness validations' do
it 'requires a unique uid' do
create(:user, uid: '1234')
user = build(:user, uid: '1234')
expect(user).to be_invalid
expect(user.errors).to include :uid
end
it 'requires a unique email' do
create(:user, email: 'email@example.com')
user = build(:user, email: 'email@example.com')
expect(user).to be_invalid
expect(user.errors).to include :email
end
end
end
describe 'destroy' do
it 'destroys all associated activities records' do
user = create(:user)
num_activities = 2
num_activities.times { create(:activity, user: user) }
expect{ user.destroy }.to change{ Activity.count }.by(-num_activities)
end
end
end
|
class Bio::FinishM::Wanderer
include Bio::FinishM::Logging
DEFAULT_OPTIONS = {
:contig_end_length => 200,
:graph_search_leash_length => 20000,
:unscaffold_first => false,
:recoherence_kmer => 1,
}
def add_options(optparse_object, options)
optparse_object.banner = "\nUsage: finishm wander --contigs <contig_file> --fastq-gz <reads..> --output-connections <output.csv> --output-scaffolds <output.fasta>
Takes a set of contigs/scaffolds from a genome and finds connections in the graph between them. A connection here is given as
the length of the shortest path between them, without actually computing all the paths.
This can be used for scaffolding, because if a contig end only connects to one other contig end, then
those contigs might be scaffolded together.
This method can also be used for 'pre-scaffolding', in the following sense. If the shortest path between
two contig ends is 10kb, and a mate pair library with insert size 2kb suggests a linkage
between the two ends, then the mate pair linkage is likely false (as long as there is sufficient
coverage in the reads, and not overwhelming amounts of strain heterogeneity, etc.).
Example:
finishm wander --contigs contigs.fasta --fastq-gz reads.1.fq.gz,reads.2.fq.gz --output-scaffolds scaffolds.fasta
That will create a collapsed de-Bruijn graph from reads.1.fq.gz and reads.2.fq.gz, then try to find connections between
the starts and the ends of the contigs in contigs.fasta through the de-Bruijn graph. The new scaffolds are then
output to scaffolds.fasta
\n\n"
options.merge!(DEFAULT_OPTIONS)
optparse_object.separator "\nRequired arguments:\n\n"
optparse_object.on("--contigs FILE", "fasta file of single contig containing Ns that are to be closed [required]") do |arg|
options[:contigs_file] = arg
end
optparse_object.separator "\nOutput modes:\n\n"
optparse_object.on("--output-scaffolds PATH", "Output scaffolds in FASTA format [required]") do |arg|
options[:output_scaffolds_file] = arg
end
optparse_object.on("--output-connections PATH", "Output connections in tab-separated format [required]") do |arg|
options[:output_connection_file] = arg
end
optparse_object.separator "\nThere must be some definition of reads too:\n\n" #TODO improve this help
Bio::FinishM::ReadInput.new.add_options(optparse_object, options)
optparse_object.separator "\nOptional arguments:\n\n"
optparse_object.on("--overhang NUM", Integer, "Start assembling this far from the ends of the contigs [default: #{options[:contig_end_length] }]") do |arg|
options[:contig_end_length] = arg.to_i
end
optparse_object.on("--recoherence-kmer NUM", Integer, "Use a kmer longer than the original velvet one, to help remove bubbles and circular paths [default: none]") do |arg|
options[:recoherence_kmer] = arg
end
optparse_object.on("--leash-length NUM", Integer, "Don't explore too far in the graph, only this far and not much more [default: #{options[:graph_search_leash_length] }]") do |arg|
options[:graph_search_leash_length] = arg
end
optparse_object.on("--unscaffold-first", "Break the scaffolds in the contigs file apart, and then wander between the resultant contigs. [default: #{options[:unscaffold_first] }]") do
options[:unscaffold_first] = true
end
optparse_object.on("--proceed-on-short-contigs", "By default, when overly short contigs are encountered, finishm croaks. This option stops the croaking [default: #{options[:proceed_on_short_contigs] }]") do
options[:proceed_on_short_contigs] = true
end
Bio::FinishM::GraphGenerator.new.add_options optparse_object, options
end
def validate_options(options, argv)
#TODO: give a better description of the error that has occurred
#TODO: require reads options
if argv.length != 0
return "Dangling argument(s) found e.g. #{argv[0] }"
else
[
:contigs_file,
].each do |sym|
if options[sym].nil?
return "No option found to specify #{sym}."
end
end
if options[:output_scaffolds_file].nil? and
options[:output_connection_file].nil?
return "Need to specify either output scaffolds or output connections file"
end
#if return nil from here, options all were parsed successfully
return Bio::FinishM::ReadInput.new.validate_options(options, [])
end
end
def run(options, argv=[])
# Read in all the contigs sequences, removing those that are too short
probe_sequences = []
contig_sequences = []
contig_names = []
overly_short_sequence_count = 0
process_sequence = lambda do |name, seq|
if seq.length < 2*options[:contig_end_length]
log.warn "Not attempting to make connections from this contig, as it is overly short: #{name}"
overly_short_sequence_count += 1
nil
else
contig_sequences.push seq.to_s
contig_names.push name
sequence = seq.seq
fwd2 = Bio::Sequence::NA.new(sequence[0...options[:contig_end_length]])
probe_sequences.push fwd2.reverse_complement.to_s
probe_sequences.push sequence[(sequence.length-options[:contig_end_length])...sequence.length]
# 'return' the probe indices that have been assigned
[probe_sequences.length-2, probe_sequences.length-1]
end
end
scaffolds = nil #Array of Bio::FinishM::ScaffoldBreaker::Scaffold objects.
scaffolded_contig_to_probe_ids = {}
if options[:unscaffold_first]
log.info "Unscaffolding scaffolds (before trying to connect them together again)"
scaffolds = Bio::FinishM::ScaffoldBreaker.new.break_scaffolds options[:contigs_file]
scaffolds.each do |scaffold|
scaffold.contigs.each do |contig|
process_sequence.call contig.name, contig.sequence
end
end
else
# Else don't split up any of the sequences
log.info "Reading input sequences.."
Bio::FlatFile.foreach(options[:contigs_file]) do |seq|
process_sequence.call seq.definition, seq.seq
end
end
if overly_short_sequence_count > 0
unless options[:proceed_on_short_contigs]
raise "Not proceding as some contigs are too short (length < 2 * overhang). You might try: "+
"(1) omitting the smaller contigs, (2) reducing the --overhang parameter, or "+
"(3) using --proceed-on-short-contigs to continue optimistically ignoring the #{overly_short_sequence_count} short contigs"
end
end
log.info "Searching from #{probe_sequences.length} different contig ends (#{probe_sequences.length / 2} contigs)"
# Generate the graph with the probe sequences in it.
read_input = Bio::FinishM::ReadInput.new
read_input.parse_options options
finishm_graph = Bio::FinishM::GraphGenerator.new.generate_graph(probe_sequences, read_input, options)
log.info "Finding possible connections with recoherence kmer of #{options[:recoherence_kmer] }"
all_connections = probed_graph_to_connections(finishm_graph, options)
log.debug "Finished actual wandering, found #{all_connections.length} connections" if log.debug?
# Determine scaffolding connections
interpreter = Bio::FinishM::ConnectionInterpreter.new(all_connections, (0...contig_sequences.length))
connections = interpreter.doubly_single_contig_connections
log.debug "Found #{connections.length} connections between contigs that can be used for scaffolding" if log.debug?
scaffolds = interpreter.scaffolds(connections)
# Gather some stats
circular_scaffold_names = []
num_contigs_in_circular_scaffolds = 0
num_singleton_contigs = 0
num_scaffolded_contigs = 0
scaffolds.each do |scaffold|
if scaffold.circular?
circular_scaffold_names.push name
num_contigs_in_circular_scaffolds += scaffold.contigs.length
elsif scaffold.contigs.length == 1
num_singleton_contigs += 1
else
num_scaffolded_contigs += scaffold.contigs.length
end
end
log.info "Found #{circular_scaffold_names.length} circular scaffolds encompassing #{num_contigs_in_circular_scaffolds} contigs"
log.info "#{num_scaffolded_contigs} contigs were incorporated into scaffolds"
log.info "#{num_singleton_contigs} contigs were not incorporated into any scaffolds"
unless options[:output_scaffolds_file].nil?
File.open(options[:output_scaffolds_file],'w') do |scaffold_file|
scaffolds.each_with_index do |scaffold, i|
name = nil
if scaffold.contigs.length == 1
name = "scaffold#{i+1}"
else
name = "scaffold#{i+1}"
end
if scaffold.circular?
name += ' circular'
end
scaffold_file.puts ">#{name}"
# Output the NA sequence wrapped
seq = scaffold.sequence(contig_sequences)
scaffold_file.puts seq.gsub(/(.{80})/,"\\1\n").gsub(/\n$/,'')
end
end
end
# Write out all connections to the given file if wanted
unless options[:output_connection_file].nil?
File.open(options[:output_connection_file], 'w') do |out|
all_connections.each do |conn|
out.puts [
"#{contig_names[conn.probe1.sequence_index]}:#{conn.probe1.side}",
"#{contig_names[conn.probe2.sequence_index]}:#{conn.probe2.side}",
conn.distance
].join("\t")
end
end
end
log.info "All done."
end
# Given a probed graph, wander between all the nodes, and then return an
# instance of Bio::FinishM::ConnectionInterpreter::Scaffold. Required options:
# * :graph_search_leash_length
# * :recoherence_kmer
def probed_graph_to_connections(finishm_graph, options)
# Loop over the ends, trying to make connections from each one
cartographer = Bio::AssemblyGraphAlgorithms::SingleCoherentWanderer.new
first_connections = cartographer.wander(finishm_graph, options[:graph_search_leash_length], options[:recoherence_kmer], finishm_graph.velvet_sequences, options)
log.debug "Initially found #{first_connections.length} connections with less distance than the leash length" if log.debug?
probe_descriptions = []
(0...finishm_graph.probe_nodes.length).each do |i|
desc = Bio::FinishM::ConnectionInterpreter::Probe.new
if i % 2 == 0
desc.side = :start
desc.sequence_index = i / 2
else
desc.side = :end
desc.sequence_index = (i-1) / 2
end
probe_descriptions.push desc
end
# Gather connections ready for output
distance_calibrator = Bio::AssemblyGraphAlgorithms::AcyclicConnectionFinder.new
all_connections = []
first_connections.each do |node_indices, distance|
calibrated_distance = distance_calibrator.calibrate_distance_accounting_for_probes(
finishm_graph,
node_indices[0],
node_indices[1],
distance
)
# It is possible that a connection just larger than the leash length is returned.
# weed these out.
conn = Bio::FinishM::ConnectionInterpreter::Connection.new
conn.probe1 = probe_descriptions[node_indices[0]]
conn.probe2 = probe_descriptions[node_indices[1]]
conn.distance = calibrated_distance
if calibrated_distance > options[:graph_search_leash_length]
log.debug "Disregarding connection #{conn} because it was ultimately outside the allowable leash length" if log.debug?
else
all_connections.push conn
end
end
return all_connections
end
end
update wander cmdline help
class Bio::FinishM::Wanderer
include Bio::FinishM::Logging
DEFAULT_OPTIONS = {
:contig_end_length => 200,
:graph_search_leash_length => 20000,
:unscaffold_first => false,
:recoherence_kmer => 1,
}
def add_options(optparse_object, options)
optparse_object.banner = "\nUsage: finishm wander --contigs <contig_file> --fastq-gz <reads..> --output-connections <output.csv> --output-scaffolds <output.fasta>
Takes a set of contigs/scaffolds from a genome and finds connections in the graph between them. A connection here is given as
the length of the shortest path between them, without actually computing all the paths.
This can be used for scaffolding, because if a contig end only connects to one other contig end, then
those contigs might be scaffolded together.
This method can also be used for 'pre-scaffolding', in the following sense. If the shortest path between
two contig ends is 10kb, and a mate pair library with insert size 2kb suggests a linkage
between the two ends, then the mate pair linkage is likely false (as long as there is sufficient
coverage in the reads, and not overwhelming amounts of strain heterogeneity, etc.).
Example:
finishm wander --contigs contigs.fasta --fastq-gz reads.1.fq.gz,reads.2.fq.gz --output-scaffolds scaffolds.fasta
That will create a collapsed de-Bruijn graph from reads.1.fq.gz and reads.2.fq.gz, then try to find connections between
the starts and the ends of the contigs in contigs.fasta through the de-Bruijn graph. The new scaffolds are then
output to scaffolds.fasta
\n\n"
options.merge!(DEFAULT_OPTIONS)
optparse_object.separator "\nRequired arguments:\n\n"
optparse_object.on("--contigs FILE", "fasta file of single contig containing Ns that are to be closed [required]") do |arg|
options[:contigs_file] = arg
end
optparse_object.separator "\nOutput modes:\n\n"
optparse_object.on("--output-scaffolds FILE", "Output scaffolds in FASTA format [required]") do |arg|
options[:output_scaffolds_file] = arg
end
optparse_object.on("--output-connections FILE", "Output connections in tab-separated format [required]") do |arg|
options[:output_connection_file] = arg
end
optparse_object.separator "\nThere must be some definition of reads too:\n\n" #TODO improve this help
Bio::FinishM::ReadInput.new.add_options(optparse_object, options)
optparse_object.separator "\nOptional arguments:\n\n"
optparse_object.on("--overhang NUM", Integer, "Start assembling this far from the ends of the contigs [default: #{options[:contig_end_length] }]") do |arg|
options[:contig_end_length] = arg.to_i
end
optparse_object.on("--recoherence-kmer NUM", Integer, "Use a kmer longer than the original velvet one, to help remove bubbles and circular paths [default: none]") do |arg|
options[:recoherence_kmer] = arg
end
optparse_object.on("--leash-length NUM", Integer, "Don't explore too far in the graph, only this far and not much more [default: #{options[:graph_search_leash_length] }]") do |arg|
options[:graph_search_leash_length] = arg
end
optparse_object.on("--unscaffold-first", "Break the scaffolds in the contigs file apart, and then wander between the resultant contigs. [default: #{options[:unscaffold_first] }]") do
options[:unscaffold_first] = true
end
optparse_object.on("--proceed-on-short-contigs", "By default, when overly short contigs are encountered, finishm croaks. This option stops the croaking [default: #{options[:proceed_on_short_contigs] }]") do
options[:proceed_on_short_contigs] = true
end
Bio::FinishM::GraphGenerator.new.add_options optparse_object, options
end
def validate_options(options, argv)
#TODO: give a better description of the error that has occurred
#TODO: require reads options
if argv.length != 0
return "Dangling argument(s) found e.g. #{argv[0] }"
else
[
:contigs_file,
].each do |sym|
if options[sym].nil?
return "No option found to specify #{sym}."
end
end
if options[:output_scaffolds_file].nil? and
options[:output_connection_file].nil?
return "Need to specify either output scaffolds or output connections file"
end
#if return nil from here, options all were parsed successfully
return Bio::FinishM::ReadInput.new.validate_options(options, [])
end
end
def run(options, argv=[])
# Read in all the contigs sequences, removing those that are too short
probe_sequences = []
contig_sequences = []
contig_names = []
overly_short_sequence_count = 0
process_sequence = lambda do |name, seq|
if seq.length < 2*options[:contig_end_length]
log.warn "Not attempting to make connections from this contig, as it is overly short: #{name}"
overly_short_sequence_count += 1
nil
else
contig_sequences.push seq.to_s
contig_names.push name
sequence = seq.seq
fwd2 = Bio::Sequence::NA.new(sequence[0...options[:contig_end_length]])
probe_sequences.push fwd2.reverse_complement.to_s
probe_sequences.push sequence[(sequence.length-options[:contig_end_length])...sequence.length]
# 'return' the probe indices that have been assigned
[probe_sequences.length-2, probe_sequences.length-1]
end
end
scaffolds = nil #Array of Bio::FinishM::ScaffoldBreaker::Scaffold objects.
scaffolded_contig_to_probe_ids = {}
if options[:unscaffold_first]
log.info "Unscaffolding scaffolds (before trying to connect them together again)"
scaffolds = Bio::FinishM::ScaffoldBreaker.new.break_scaffolds options[:contigs_file]
scaffolds.each do |scaffold|
scaffold.contigs.each do |contig|
process_sequence.call contig.name, contig.sequence
end
end
else
# Else don't split up any of the sequences
log.info "Reading input sequences.."
Bio::FlatFile.foreach(options[:contigs_file]) do |seq|
process_sequence.call seq.definition, seq.seq
end
end
if overly_short_sequence_count > 0
unless options[:proceed_on_short_contigs]
raise "Not proceding as some contigs are too short (length < 2 * overhang). You might try: "+
"(1) omitting the smaller contigs, (2) reducing the --overhang parameter, or "+
"(3) using --proceed-on-short-contigs to continue optimistically ignoring the #{overly_short_sequence_count} short contigs"
end
end
log.info "Searching from #{probe_sequences.length} different contig ends (#{probe_sequences.length / 2} contigs)"
# Generate the graph with the probe sequences in it.
read_input = Bio::FinishM::ReadInput.new
read_input.parse_options options
finishm_graph = Bio::FinishM::GraphGenerator.new.generate_graph(probe_sequences, read_input, options)
log.info "Finding possible connections with recoherence kmer of #{options[:recoherence_kmer] }"
all_connections = probed_graph_to_connections(finishm_graph, options)
log.debug "Finished actual wandering, found #{all_connections.length} connections" if log.debug?
# Determine scaffolding connections
interpreter = Bio::FinishM::ConnectionInterpreter.new(all_connections, (0...contig_sequences.length))
connections = interpreter.doubly_single_contig_connections
log.debug "Found #{connections.length} connections between contigs that can be used for scaffolding" if log.debug?
scaffolds = interpreter.scaffolds(connections)
# Gather some stats
circular_scaffold_names = []
num_contigs_in_circular_scaffolds = 0
num_singleton_contigs = 0
num_scaffolded_contigs = 0
scaffolds.each do |scaffold|
if scaffold.circular?
circular_scaffold_names.push name
num_contigs_in_circular_scaffolds += scaffold.contigs.length
elsif scaffold.contigs.length == 1
num_singleton_contigs += 1
else
num_scaffolded_contigs += scaffold.contigs.length
end
end
log.info "Found #{circular_scaffold_names.length} circular scaffolds encompassing #{num_contigs_in_circular_scaffolds} contigs"
log.info "#{num_scaffolded_contigs} contigs were incorporated into scaffolds"
log.info "#{num_singleton_contigs} contigs were not incorporated into any scaffolds"
unless options[:output_scaffolds_file].nil?
File.open(options[:output_scaffolds_file],'w') do |scaffold_file|
scaffolds.each_with_index do |scaffold, i|
name = nil
if scaffold.contigs.length == 1
name = "scaffold#{i+1}"
else
name = "scaffold#{i+1}"
end
if scaffold.circular?
name += ' circular'
end
scaffold_file.puts ">#{name}"
# Output the NA sequence wrapped
seq = scaffold.sequence(contig_sequences)
scaffold_file.puts seq.gsub(/(.{80})/,"\\1\n").gsub(/\n$/,'')
end
end
end
# Write out all connections to the given file if wanted
unless options[:output_connection_file].nil?
File.open(options[:output_connection_file], 'w') do |out|
all_connections.each do |conn|
out.puts [
"#{contig_names[conn.probe1.sequence_index]}:#{conn.probe1.side}",
"#{contig_names[conn.probe2.sequence_index]}:#{conn.probe2.side}",
conn.distance
].join("\t")
end
end
end
log.info "All done."
end
# Given a probed graph, wander between all the nodes, and then return an
# instance of Bio::FinishM::ConnectionInterpreter::Scaffold. Required options:
# * :graph_search_leash_length
# * :recoherence_kmer
def probed_graph_to_connections(finishm_graph, options)
# Loop over the ends, trying to make connections from each one
cartographer = Bio::AssemblyGraphAlgorithms::SingleCoherentWanderer.new
first_connections = cartographer.wander(finishm_graph, options[:graph_search_leash_length], options[:recoherence_kmer], finishm_graph.velvet_sequences, options)
log.debug "Initially found #{first_connections.length} connections with less distance than the leash length" if log.debug?
probe_descriptions = []
(0...finishm_graph.probe_nodes.length).each do |i|
desc = Bio::FinishM::ConnectionInterpreter::Probe.new
if i % 2 == 0
desc.side = :start
desc.sequence_index = i / 2
else
desc.side = :end
desc.sequence_index = (i-1) / 2
end
probe_descriptions.push desc
end
# Gather connections ready for output
distance_calibrator = Bio::AssemblyGraphAlgorithms::AcyclicConnectionFinder.new
all_connections = []
first_connections.each do |node_indices, distance|
calibrated_distance = distance_calibrator.calibrate_distance_accounting_for_probes(
finishm_graph,
node_indices[0],
node_indices[1],
distance
)
# It is possible that a connection just larger than the leash length is returned.
# weed these out.
conn = Bio::FinishM::ConnectionInterpreter::Connection.new
conn.probe1 = probe_descriptions[node_indices[0]]
conn.probe2 = probe_descriptions[node_indices[1]]
conn.distance = calibrated_distance
if calibrated_distance > options[:graph_search_leash_length]
log.debug "Disregarding connection #{conn} because it was ultimately outside the allowable leash length" if log.debug?
else
all_connections.push conn
end
end
return all_connections
end
end
|
require "rails_helper"
describe User, type: :model do
before do
@user = FactoryBot.create(:user)
@attr = {
name: "Example User",
email: "user@example.com",
password: "foobar888",
password_confirmation: "foobar888",
}
end
it "should create a new instance given a valid attribute" do
user = User.new(@attr)
user.skip_confirmation!
user.save!
end
it "should require an email address" do
no_email_user = User.new(@attr.merge(email: ""))
expect(no_email_user).to be_invalid
end
it "should accept valid email addresses" do
addresses = %w[user@foo.com THE_USER@foo.bar.org first.last@foo.jp]
addresses.each do |address|
valid_email_user = User.new(@attr.merge(email: address))
expect(valid_email_user).to be_valid
end
end
it "should reject invalid email addresses" do
addresses = %w[user@foo,com user_at_foo.org example.user@foo.]
addresses.each do |address|
invalid_email_user = User.new(@attr.merge(email: address))
expect(invalid_email_user).to be_invalid
end
end
it "should reject duplicate email addresses" do
user = FactoryBot.create(:user)
user_with_duplicate_email = User.new(@attr.merge(email: user.email))
expect(user_with_duplicate_email).to be_invalid
end
it "should reject email addresses identical up to case" do
email = "a_random_uppercase_email@example.com"
FactoryBot.create(:user, email: email)
user_with_duplicate_email = User.new(@attr.merge(email: email.upcase))
expect(user_with_duplicate_email).to be_invalid
end
it "should raise an exception if an attempt is made to write a duplicate email to the database" do
user = FactoryBot.create(:user)
user_with_duplicate_email = User.new(@attr.merge(email: user.email))
expect do
user_with_duplicate_email.save(validate: false)
end.to raise_error(ActiveRecord::RecordNotUnique)
end
describe "passwords" do
it "should have a password attribute" do
expect(@user).to respond_to(:password)
end
it "should have a password confirmation attribute" do
expect(@user).to respond_to(:password_confirmation)
end
end
describe "password validations" do
it "should require a password" do
user = FactoryBot.build(:user, @attr.merge(password: "", password_confirmation: ""))
expect(user).to be_invalid
end
it "should require a matching password confirmation" do
user = User.new(@attr.merge(password_confirmation: "invalid"))
expect(user).to be_invalid
end
it "should reject short passwords" do
short = "a" * 5
hash = @attr.merge(password: short, password_confirmation: short)
user = User.new(hash)
expect(user).to be_invalid
end
end
describe "password encryption" do
it "should have an encrypted password attribute" do
expect(@user).to respond_to(:encrypted_password)
end
it "should set the encrypted password attribute" do
expect(@user.encrypted_password).to be_present
end
end
describe "display_name" do
it "should provide the name" do
user = FactoryBot.create(:user, name: "test guy")
expect(user.display_name).to eq("test guy")
end
end
describe "omniauth" do
before do
@uid = "test"
@provider = "facebook"
@existing_email = "test@example.com"
@new_email = "newtest@example.com"
@provider_url = "https://www.facebook.com"
end
describe "for_auth" do
before do
@user = FactoryBot.create(:user, email: @existing_email)
@user.authentications.create!(uid: @uid, provider: @provider, provider_url: @provider_url)
end
describe "user already exists" do
it "should find the existing user" do
auth = get_omniauth(
"uuid" => @uid,
"provider" => @provider,
"facebook" => {
"email" => @existing_email,
"url" => @provider_url,
},
)
user = User.for_auth(auth)
expect(user.id).to eq(@user.id)
expect(user.email).to eq(@existing_email)
end
end
describe "user doesn't exist" do
it "should not find a user" do
auth = get_omniauth(
"uuid" => "non_existing_uid",
"provider" => @provider,
"facebook" => { "email" => "other@example.com" },
)
user = User.for_auth(auth)
expect(user).to be_nil
end
end
end
describe "apply_oauth" do
it "should add values from the omniauth result to the user" do
auth = get_omniauth("uuid" => @uid, "provider" => @provider, "facebook" => { "email" => @new_email })
user = User.new
user.apply_oauth(auth)
expect(user.email).to eq(@new_email)
expect(user.name).to eq("foo bar") # from default omniauth test data
end
end
describe "oauth_name" do
it "should extract the correct name from the auth object" do
auth = get_canvas_omniauth
name = User.oauth_name(auth)
expect(name).to eq("Test Guy")
end
end
describe "oauth_email" do
it "should extract the correct email from the auth object" do
auth = get_canvas_omniauth
email = User.oauth_email(auth)
expect(email).to eq("testguy@example.com")
end
it "should handle a request that doesn't include an email" do
auth = get_canvas_omniauth_no_email
email = User.oauth_email(auth)
expect(email).to eq("1@atomicjolt.instructure.com")
end
end
describe "params_for_create" do
it "should get the create parameters for the user" do
auth = get_canvas_omniauth
attributes = User.params_for_create(auth)
expect(attributes[:email]).to eq(auth["extra"]["raw_info"]["primary_email"])
expect(attributes[:name]).to eq(auth["info"]["name"])
end
end
describe "associate_account" do
before do
@uid = "test"
@provider = "facebook"
@new_email = "newtest@example.com"
end
it "should add an authentication for an existing user account" do
user = FactoryBot.create(:user, email: "test@example.com")
auth = get_omniauth(
"uuid" => @uid,
"provider" => @provider,
"facebook" => { "email" => @new_email },
)
count = user.authentications.length
user.associate_account(auth)
expect(user.authentications.length).to eq(count + 1)
expect(user.authentications.last.uid).to eq(@uid)
end
end
describe "setup_authentication" do
before do
@uid = "anewuser"
@email = "anewuser@example.com"
end
it "should set the provider url" do
provider_url = "http://www.example.com"
auth = get_omniauth(
"uuid" => @uid,
"provider" => @provider,
"facebook" => { "email" => @email, "url" => provider_url },
)
auth = @user.setup_authentication(auth)
expect(auth.provider_url).to eq(provider_url)
end
it "should set the provider url without the path" do
provider_url = "http://www.example.com/some/path"
auth = get_omniauth(
"uuid" => @uid,
"provider" => @provider,
"facebook" => { "email" => @email, "url" => provider_url },
)
auth = @user.setup_authentication(auth)
expect(auth.provider_url).to eq("http://www.example.com")
end
it "should handle sub domains" do
provider_url = "http://foo.example.com/some/path"
auth = get_omniauth(
"uuid" => @uid,
"provider" => @provider,
"facebook" => { "email" => @email, "url" => provider_url },
)
auth = @user.setup_authentication(auth)
expect(auth.provider_url).to eq("http://foo.example.com")
end
end
end
describe "roles" do
describe "add_to_role" do
it "adds the user to given role" do
user = FactoryBot.create(:user)
user.add_to_role("thefoo")
expect(user.role?("thefoo")).to be true
end
it "adds the user to given role with a context" do
context_id = "asdf"
user = FactoryBot.create(:user)
user.add_to_role("thefoo", context_id)
expect(user.role?("thefoo", context_id)).to be true
end
end
describe "has_role?" do
it "checks to see if the user is any of the specified roles with context" do
context_id = "asdf"
user = FactoryBot.create(:user)
user.add_to_role("thefoo", context_id)
user.add_to_role("thewall")
expect(user.has_role?(context_id, "thefoo", "brick")).to be true
expect(user.has_role?(nil, "thewall", "brick")).to be true
expect(user.has_role?(nil, "brick", "foo")).to be false
end
end
describe "any_role?" do
it "checks to see if the user is any of the specified roles" do
user = FactoryBot.create(:user)
user.add_to_role("thefoo")
user.add_to_role("thewall")
expect(user.any_role?("thewall", "brick")).to be true
expect(user.any_role?("brick", "foo")).to be false
end
end
end
end
Fix spec
require "rails_helper"
describe User, type: :model do
before do
@user = FactoryBot.create(:user)
@attr = {
name: "Example User",
email: "user@example.com",
password: "foobar888",
password_confirmation: "foobar888",
}
end
it "should create a new instance given a valid attribute" do
user = User.new(@attr)
user.skip_confirmation!
user.save!
end
it "should require an email address" do
no_email_user = User.new(@attr.merge(email: ""))
expect(no_email_user).to be_invalid
end
it "should accept valid email addresses" do
addresses = %w[user@foo.com THE_USER@foo.bar.org first.last@foo.jp]
addresses.each do |address|
valid_email_user = User.new(@attr.merge(email: address))
expect(valid_email_user).to be_valid
end
end
it "should reject invalid email addresses" do
addresses = %w[user@foo,com user_at_foo.org example.user@foo.]
addresses.each do |address|
invalid_email_user = User.new(@attr.merge(email: address))
expect(invalid_email_user).to be_invalid
end
end
it "should reject duplicate email addresses" do
user = FactoryBot.create(:user)
user_with_duplicate_email = User.new(@attr.merge(email: user.email))
expect(user_with_duplicate_email).to be_invalid
end
it "should reject email addresses identical up to case" do
email = "a_random_uppercase_email@example.com"
FactoryBot.create(:user, email: email)
user_with_duplicate_email = User.new(@attr.merge(email: email.upcase))
expect(user_with_duplicate_email).to be_invalid
end
it "should raise an exception if an attempt is made to write a duplicate email to the database" do
user = FactoryBot.create(:user)
user_with_duplicate_email = User.new(@attr.merge(email: user.email))
expect do
user_with_duplicate_email.save(validate: false)
end.to raise_error(ActiveRecord::RecordNotUnique)
end
describe "passwords" do
it "should have a password attribute" do
expect(@user).to respond_to(:password)
end
it "should have a password confirmation attribute" do
expect(@user).to respond_to(:password_confirmation)
end
end
describe "password validations" do
it "should require a password" do
user = User.new(@attr.merge(password: "", password_confirmation: ""))
expect(user).to be_invalid
end
it "should require a matching password confirmation" do
user = User.new(@attr.merge(password_confirmation: "invalid"))
expect(user).to be_invalid
end
it "should reject short passwords" do
short = "a" * 5
hash = @attr.merge(password: short, password_confirmation: short)
user = User.new(hash)
expect(user).to be_invalid
end
end
describe "password encryption" do
it "should have an encrypted password attribute" do
expect(@user).to respond_to(:encrypted_password)
end
it "should set the encrypted password attribute" do
expect(@user.encrypted_password).to be_present
end
end
describe "display_name" do
it "should provide the name" do
user = FactoryBot.create(:user, name: "test guy")
expect(user.display_name).to eq("test guy")
end
end
describe "omniauth" do
before do
@uid = "test"
@provider = "facebook"
@existing_email = "test@example.com"
@new_email = "newtest@example.com"
@provider_url = "https://www.facebook.com"
end
describe "for_auth" do
before do
@user = FactoryBot.create(:user, email: @existing_email)
@user.authentications.create!(uid: @uid, provider: @provider, provider_url: @provider_url)
end
describe "user already exists" do
it "should find the existing user" do
auth = get_omniauth(
"uuid" => @uid,
"provider" => @provider,
"facebook" => {
"email" => @existing_email,
"url" => @provider_url,
},
)
user = User.for_auth(auth)
expect(user.id).to eq(@user.id)
expect(user.email).to eq(@existing_email)
end
end
describe "user doesn't exist" do
it "should not find a user" do
auth = get_omniauth(
"uuid" => "non_existing_uid",
"provider" => @provider,
"facebook" => { "email" => "other@example.com" },
)
user = User.for_auth(auth)
expect(user).to be_nil
end
end
end
describe "apply_oauth" do
it "should add values from the omniauth result to the user" do
auth = get_omniauth("uuid" => @uid, "provider" => @provider, "facebook" => { "email" => @new_email })
user = User.new
user.apply_oauth(auth)
expect(user.email).to eq(@new_email)
expect(user.name).to eq("foo bar") # from default omniauth test data
end
end
describe "oauth_name" do
it "should extract the correct name from the auth object" do
auth = get_canvas_omniauth
name = User.oauth_name(auth)
expect(name).to eq("Test Guy")
end
end
describe "oauth_email" do
it "should extract the correct email from the auth object" do
auth = get_canvas_omniauth
email = User.oauth_email(auth)
expect(email).to eq("testguy@example.com")
end
it "should handle a request that doesn't include an email" do
auth = get_canvas_omniauth_no_email
email = User.oauth_email(auth)
expect(email).to eq("1@atomicjolt.instructure.com")
end
end
describe "params_for_create" do
it "should get the create parameters for the user" do
auth = get_canvas_omniauth
attributes = User.params_for_create(auth)
expect(attributes[:email]).to eq(auth["extra"]["raw_info"]["primary_email"])
expect(attributes[:name]).to eq(auth["info"]["name"])
end
end
describe "associate_account" do
before do
@uid = "test"
@provider = "facebook"
@new_email = "newtest@example.com"
end
it "should add an authentication for an existing user account" do
user = FactoryBot.create(:user, email: "test@example.com")
auth = get_omniauth(
"uuid" => @uid,
"provider" => @provider,
"facebook" => { "email" => @new_email },
)
count = user.authentications.length
user.associate_account(auth)
expect(user.authentications.length).to eq(count + 1)
expect(user.authentications.last.uid).to eq(@uid)
end
end
describe "setup_authentication" do
before do
@uid = "anewuser"
@email = "anewuser@example.com"
end
it "should set the provider url" do
provider_url = "http://www.example.com"
auth = get_omniauth(
"uuid" => @uid,
"provider" => @provider,
"facebook" => { "email" => @email, "url" => provider_url },
)
auth = @user.setup_authentication(auth)
expect(auth.provider_url).to eq(provider_url)
end
it "should set the provider url without the path" do
provider_url = "http://www.example.com/some/path"
auth = get_omniauth(
"uuid" => @uid,
"provider" => @provider,
"facebook" => { "email" => @email, "url" => provider_url },
)
auth = @user.setup_authentication(auth)
expect(auth.provider_url).to eq("http://www.example.com")
end
it "should handle sub domains" do
provider_url = "http://foo.example.com/some/path"
auth = get_omniauth(
"uuid" => @uid,
"provider" => @provider,
"facebook" => { "email" => @email, "url" => provider_url },
)
auth = @user.setup_authentication(auth)
expect(auth.provider_url).to eq("http://foo.example.com")
end
end
end
describe "roles" do
describe "add_to_role" do
it "adds the user to given role" do
user = FactoryBot.create(:user)
user.add_to_role("thefoo")
expect(user.role?("thefoo")).to be true
end
it "adds the user to given role with a context" do
context_id = "asdf"
user = FactoryBot.create(:user)
user.add_to_role("thefoo", context_id)
expect(user.role?("thefoo", context_id)).to be true
end
end
describe "has_role?" do
it "checks to see if the user is any of the specified roles with context" do
context_id = "asdf"
user = FactoryBot.create(:user)
user.add_to_role("thefoo", context_id)
user.add_to_role("thewall")
expect(user.has_role?(context_id, "thefoo", "brick")).to be true
expect(user.has_role?(nil, "thewall", "brick")).to be true
expect(user.has_role?(nil, "brick", "foo")).to be false
end
end
describe "any_role?" do
it "checks to see if the user is any of the specified roles" do
user = FactoryBot.create(:user)
user.add_to_role("thefoo")
user.add_to_role("thewall")
expect(user.any_role?("thewall", "brick")).to be true
expect(user.any_role?("brick", "foo")).to be false
end
end
end
end
|
require "fit-commit/installer"
module FitCommit
class Cli
attr_accessor :args
def initialize(*args)
self.args = args
end
def execute
action_name = args.shift
if action_name == "install"
FitCommit::Installer.new.install
else
warn "Usage: fit-commit install"
false
end
end
end
end
Add version to CLI help output
require "fit-commit/installer"
require "fit-commit/version"
module FitCommit
class Cli
attr_accessor :args
def initialize(*args)
self.args = args
end
def execute
action_name = args.shift
if action_name == "install"
FitCommit::Installer.new.install
else
warn "fit-commit v#{FitCommit::VERSION}"
warn "Usage: fit-commit install"
false
end
end
end
end
|
require 'spec_helper'
describe User do
before { @user = User.new(name: "Example User", email: "user@example.com",
password: "foobar", password_confirmation: "foobar") }
subject { @user }
it { should respond_to(:name) }
it { should respond_to(:email) }
it { should respond_to(:password_digest) }
it { should respond_to(:password) }
it { should respond_to(:password_confirmation) }
it { should respond_to(:authenticate) }
it { should be_valid }
describe "when name is not present" do
before { @user.name = " " }
it { should_not be_valid }
end
describe "when email is not present" do
before { @user.email = " " }
it { should_not be_valid }
end
describe "when name is too long" do
before { @user.name = "a" * 51 }
it { should_not be_valid }
end
describe "when email format is invalid" do
it "should be invalid" do
addresses = %w[user@foo,com user_at_foo.org example.user@foo.foo@bar_baz.com foo@bar+baz.xcom]
addresses.each do |invalid_address|
@user.email = invalid_address
expect(@user).not_to be_valid
end
end
end
describe "when email format is valid" do
it "should be valid" do
addresses = %w[user@foo.COM A_US-ER@f.b.org frst.lst@foo.jp a+b@baz.cn]
addresses.each do |valid_address|
@user.email = valid_address
expect(@user).to be_valid
end
end
end
describe "when email address is already taken" do
before do
user_with_same_email = @user.dup
user_with_same_email.email = @user.email.upcase
user_with_same_email.save
end
it { should_not be_valid }
end
describe "when password is not present" do
before do
@user = User.new(name: "Example User", email: "user@example.com",
password: " ", password_confirmation: " ")
end
it { should_not be_valid }
end
describe "when password doesn't match confirmation" do
before { @user.password_confirmation = "mismatch" }
it { should_not be_valid }
end
describe "with a password that's too short" do
before { @user.password = @user.password_confirmation = "a" * 5 }
it { should be_invalid }
end
describe "return value of authenticate method" do
before { @user.save }
let(:found_user) { User.find_by_email(@user.email) }
describe "with valid password" do
it { should eq found_user.authenticate(@user.password) }
end
describe "with invalid password" do
let(:user_for_invalid_password) { found_user.authenticate("invalid") }
it { should_not eq user_for_invalid_password }
specify { expect(user_for_invalid_password).to be_false }
end
end
describe User do
subject { create(:user) }
its(:name) { should == 'Bob Jones' }
its(:email) { should == 'bob.jones@example.com'}
end
end
More coverage
require 'spec_helper'
describe User do
before { @user = User.new(name: "Example User", email: "user@example.com",
password: "foobar", password_confirmation: "foobar") }
subject { @user }
it { should respond_to(:name) }
it { should respond_to(:email) }
it { should respond_to(:password_digest) }
it { should respond_to(:password) }
it { should respond_to(:password_confirmation) }
it { should respond_to(:authenticate) }
it { should be_valid }
describe "when name is not present" do
before { @user.name = " " }
it { should_not be_valid }
end
describe "Facebook authentication" do
before { @data.stub(:info) { {"email" => "me@mydomain.com", "name" => "Me"} }}
subject {User.find_for_facebook_oauth(@data)}
its(:email) { should == "me@mydomain.com" }
its(:name) { should == "Me" }
end
describe "Google authentication" do
before { @data.stub(:info) { {"email" => "me@mydomain.com", "name" => "Me"} }}
subject {User.find_for_google_oauth2(@data)}
its(:email) { should == "me@mydomain.com" }
its(:name) { should == "Me" }
end
describe "when email is not present" do
before { @user.email = " " }
it { should_not be_valid }
end
describe "when name is too long" do
before { @user.name = "a" * 51 }
it { should_not be_valid }
end
describe "when email format is invalid" do
it "should be invalid" do
addresses = %w[user@foo,com user_at_foo.org example.user@foo.foo@bar_baz.com foo@bar+baz.xcom]
addresses.each do |invalid_address|
@user.email = invalid_address
expect(@user).not_to be_valid
end
end
end
describe "when email format is valid" do
it "should be valid" do
addresses = %w[user@foo.COM A_US-ER@f.b.org frst.lst@foo.jp a+b@baz.cn]
addresses.each do |valid_address|
@user.email = valid_address
expect(@user).to be_valid
end
end
end
describe "when email address is already taken" do
before do
user_with_same_email = @user.dup
user_with_same_email.email = @user.email.upcase
user_with_same_email.save
end
it { should_not be_valid }
end
describe "when password is not present" do
before do
@user = User.new(name: "Example User", email: "user@example.com",
password: " ", password_confirmation: " ")
end
it { should_not be_valid }
end
describe "when password doesn't match confirmation" do
before { @user.password_confirmation = "mismatch" }
it { should_not be_valid }
end
describe "with a password that's too short" do
before { @user.password = @user.password_confirmation = "a" * 5 }
it { should be_invalid }
end
describe "return value of authenticate method" do
before { @user.save }
let(:found_user) { User.find_by_email(@user.email) }
describe "with valid password" do
it { should eq found_user.authenticate(@user.password) }
end
describe "with invalid password" do
let(:user_for_invalid_password) { found_user.authenticate("invalid") }
it { should_not eq user_for_invalid_password }
specify { expect(user_for_invalid_password).to be_false }
end
end
describe User do
subject { create(:user) }
its(:name) { should == 'Bob Jones' }
its(:email) { should == 'bob.jones@example.com'}
end
end
|
require 'spec_helper'
describe User do
before do
@user = User.create(:email => "testuser@example.com",
:password => "password",
:password_confirmation => "password")
end
after do
@user.delete
end
it "should have a login and email" do
@user.login.should == "testuser@example.com"
@user.email.should == "testuser@example.com"
end
it "should have zero batches by default" do
#b = Batch.find(:all, :query => {:creator => @user.email})
Batch.find(:all, :query => {:creator => @user.email}).count.should == 0
end
it "should now have one batch" do
#batch = Batch.find(:all, :query => {:creator => @user.email})
#batch.each do |b|
# puts b
# b.delete
#end
f = Batch.create(:creator => @user.email)
Batch.find(:all, :query => {:creator => @user.email}).count.should == 1
f.delete
end
end
Removing Batch tests that were not testing anything and were failing periodically.
require 'spec_helper'
describe User do
before do
@user = User.create(:email => "testuser@example.com",
:password => "password",
:password_confirmation => "password")
end
after do
@user.delete
end
it "should have a login and email" do
@user.login.should == "testuser@example.com"
@user.email.should == "testuser@example.com"
end
end
|
module Fog
class Collection < Array
Array.public_instance_methods(false).each do |method|
class_eval <<-RUBY
def #{method}(*args)
lazy_load
super
end
RUBY
end
%w[collect map reject select].each do |method|
class_eval <<-RUBY
def #{method}(*args)
lazy_load
self.class.new({}).concat(super)
end
RUBY
end
def self._load(marhsalled)
new(Marshal.load(marshalled))
end
def self.attribute(name, other_names = [])
class_eval <<-EOS, __FILE__, __LINE__
attr_accessor :#{name}
EOS
@attributes ||= []
@attributes |= [name]
for other_name in [*other_names]
aliases[other_name] = name
end
end
def self.model(new_model)
@model = new_model
end
def self.aliases
@aliases ||= {}
end
def self.attributes
@attributes ||= []
end
def _dump
Marshal.dump(attributes)
end
def attributes
attributes = {}
for attribute in self.class.attributes
attributes[attribute] = send("#{attribute}")
end
attributes
end
def connection=(new_connection)
@connection = new_connection
end
def connection
@connection
end
def create(attributes = {})
object = new(attributes)
object.save
object
end
def initialize(attributes = {})
merge_attributes(attributes)
@loaded = false
end
def inspect
Thread.current[:formatador] ||= Formatador.new
data = "#{Thread.current[:formatador].indentation}<#{self.class.name}\n"
Thread.current[:formatador].indent do
unless self.class.attributes.empty?
data << "#{Thread.current[:formatador].indentation}"
data << self.class.attributes.map {|attribute| "#{attribute}=#{send(attribute).inspect}"}.join(",\n#{Thread.current[:formatador].indentation}")
data << "\n"
end
data << "#{Thread.current[:formatador].indentation}["
unless self.empty?
data << "\n"
Thread.current[:formatador].indent do
data << self.map {|member| member.inspect}.join(",\n")
data << "\n"
end
data << Thread.current[:formatador].indentation
end
data << "]\n"
end
data << "#{Thread.current[:formatador].indentation}>"
data
end
def model
self.class.instance_variable_get('@model')
end
def merge_attributes(new_attributes = {})
for key, value in new_attributes
if aliased_key = self.class.aliases[key]
send("#{aliased_key}=", value)
else
send("#{key}=", value)
end
end
self
end
def new(attributes = {})
model.new(
attributes.merge!(
:collection => self,
:connection => connection
)
)
end
def reload
self.clear.concat(all)
end
def table(attributes = nil)
Formatador.display_table(self.map {|instance| instance.attributes}, attributes)
end
private
def lazy_load
unless @loaded
self.all
end
end
def remap_attributes(attributes, mapping)
for key, value in mapping
if attributes.key?(key)
attributes[value] = attributes.delete(key)
end
end
end
end
end
fix select/reject vs lazy_loading weirdness
module Fog
class Collection < Array
Array.public_instance_methods(false).each do |method|
class_eval <<-RUBY
def #{method}(*args)
lazy_load
super
end
RUBY
end
%w[reject select].each do |method|
class_eval <<-RUBY
def #{method}(*args)
lazy_load
self.class.new(:connection => self.connection).load(super)
end
RUBY
end
def self._load(marhsalled)
new(Marshal.load(marshalled))
end
def self.attribute(name, other_names = [])
class_eval <<-EOS, __FILE__, __LINE__
attr_accessor :#{name}
EOS
@attributes ||= []
@attributes |= [name]
for other_name in [*other_names]
aliases[other_name] = name
end
end
def self.model(new_model)
@model = new_model
end
def self.aliases
@aliases ||= {}
end
def self.attributes
@attributes ||= []
end
def _dump
Marshal.dump(attributes)
end
def attributes
attributes = {}
for attribute in self.class.attributes
attributes[attribute] = send("#{attribute}")
end
attributes
end
def connection=(new_connection)
@connection = new_connection
end
def connection
@connection
end
def create(attributes = {})
object = new(attributes)
object.save
object
end
def initialize(attributes = {})
merge_attributes(attributes)
@loaded = false
end
def inspect
Thread.current[:formatador] ||= Formatador.new
data = "#{Thread.current[:formatador].indentation}<#{self.class.name}\n"
Thread.current[:formatador].indent do
unless self.class.attributes.empty?
data << "#{Thread.current[:formatador].indentation}"
data << self.class.attributes.map {|attribute| "#{attribute}=#{send(attribute).inspect}"}.join(",\n#{Thread.current[:formatador].indentation}")
data << "\n"
end
data << "#{Thread.current[:formatador].indentation}["
unless self.empty?
data << "\n"
Thread.current[:formatador].indent do
data << self.map {|member| member.inspect}.join(",\n")
data << "\n"
end
data << Thread.current[:formatador].indentation
end
data << "]\n"
end
data << "#{Thread.current[:formatador].indentation}>"
data
end
def load(array)
@loaded = true
self.clear.concat(array)
end
def model
self.class.instance_variable_get('@model')
end
def merge_attributes(new_attributes = {})
for key, value in new_attributes
if aliased_key = self.class.aliases[key]
send("#{aliased_key}=", value)
else
send("#{key}=", value)
end
end
self
end
def new(attributes = {})
model.new(
attributes.merge!(
:collection => self,
:connection => connection
)
)
end
def reload
self.clear.concat(all)
end
def table(attributes = nil)
Formatador.display_table(self.map {|instance| instance.attributes}, attributes)
end
private
def lazy_load
unless @loaded
self.all
end
end
def remap_attributes(attributes, mapping)
for key, value in mapping
if attributes.key?(key)
attributes[value] = attributes.delete(key)
end
end
end
end
end
|
module Formal
VERSION = "0.0.2"
end
Update to 0.0.3
module Formal
VERSION = "0.0.3"
end
|
hardlink-osx 0.1.1 (new formula)
require "formula"
class HardlinkOsx < Formula
homepage "https://github.com/selkhateeb/hardlink"
url "https://github.com/selkhateeb/hardlink/archive/v0.1.1.tar.gz"
sha1 "ce89e04b7c6b31a06b497449f2d383a7dab513fb"
def install
system "make"
bin.mkdir
system "make", "install", "PREFIX=#{prefix}"
end
def caveats; <<-EOS.undent
Hardlinks can not be created under the same directory root. If you try to
`hln source directory` to target directory under the same root you will get an error!
Also, remember the binary is named `hln` due to a naming conflict.
EOS
end
test do
system "mkdir", "-p", "test1/inner"
system "touch", "test1/inner/file"
system "mkdir", "otherdir"
system "#{bin}/hln", "test1", "otherdir/test2"
system "test", "-d", "otherdir/test2"
assert_equal 0, $?.exitstatus
system "test", "-d", "otherdir/test2/inner"
assert_equal 0, $?.exitstatus
system "test", "-f", "otherdir/test2/inner/file"
assert_equal 0, $?.exitstatus
end
end
|
# coding: utf-8
require 'ostruct'
require_relative '../spec_helper'
require_relative 'user_shared_examples'
require_relative '../../services/dataservices-metrics/lib/isolines_usage_metrics'
require_relative '../../services/dataservices-metrics/lib/observatory_snapshot_usage_metrics'
require_relative '../../services/dataservices-metrics/lib/observatory_general_usage_metrics'
require 'factories/organizations_contexts'
require_relative '../../app/model_factories/layer_factory'
require_dependency 'cartodb/redis_vizjson_cache'
require 'helpers/unique_names_helper'
require 'factories/users_helper'
require 'factories/database_configuration_contexts'
include UniqueNamesHelper
describe 'refactored behaviour' do
it_behaves_like 'user models' do
def get_twitter_imports_count_by_user_id(user_id)
get_user_by_id(user_id).get_twitter_imports_count
end
def get_user_by_id(user_id)
::User.where(id: user_id).first
end
def create_user
FactoryGirl.create(:valid_user)
end
end
end
describe User do
before(:each) do
CartoDB::UserModule::DBService.any_instance.stubs(:enable_remote_db_user).returns(true)
end
before(:all) do
bypass_named_maps
@user_password = 'admin123'
puts "\n[rspec][user_spec] Creating test user databases..."
@user = create_user :email => 'admin@example.com', :username => 'admin', :password => @user_password
@user2 = create_user :email => 'user@example.com', :username => 'user', :password => 'user123'
puts "[rspec][user_spec] Loading user data..."
reload_user_data(@user) && @user.reload
puts "[rspec][user_spec] Running..."
end
before(:each) do
bypass_named_maps
CartoDB::Varnish.any_instance.stubs(:send_command).returns(true)
CartoDB::UserModule::DBService.any_instance.stubs(:enable_remote_db_user).returns(true)
Table.any_instance.stubs(:update_cdb_tablemetadata)
end
after(:all) do
bypass_named_maps
@user.destroy
@user2.destroy
end
it "should only allow legal usernames" do
illegal_usernames = %w(si$mon 'sergio estella' j@vi sergio£££ simon_tokumine SIMON Simon jose.rilla -rilla rilla-)
legal_usernames = %w(simon javier-de-la-torre sergio-leiva sergio99)
illegal_usernames.each do |name|
@user.username = name
@user.valid?.should be_false
@user.errors[:username].should be_present
end
legal_usernames.each do |name|
@user.username = name
@user.valid?.should be_true
@user.errors[:username].should be_blank
end
end
it "should not allow a username in use by an organization" do
org = create_org('testusername', 10.megabytes, 1)
@user.username = org.name
@user.valid?.should be_false
@user.username = 'wadus'
@user.valid?.should be_true
end
describe 'organization checks' do
it "should not be valid if his organization doesn't have more seats" do
organization = create_org('testorg', 10.megabytes, 1)
user1 = create_user email: 'user1@testorg.com', username: 'user1', password: 'user11'
user1.organization = organization
user1.save
organization.owner_id = user1.id
organization.save
organization.reload
user1.reload
user2 = new_user
user2.organization = organization
user2.valid?.should be_false
user2.errors.keys.should include(:organization)
organization.destroy
user1.destroy
end
it 'should be valid if his organization has enough seats' do
organization = create_org('testorg', 10.megabytes, 1)
user = ::User.new
user.organization = organization
user.valid?
user.errors.keys.should_not include(:organization)
organization.destroy
end
it "should not be valid if his organization doesn't have enough disk space" do
organization = create_org('testorg', 10.megabytes, 1)
organization.stubs(:assigned_quota).returns(10.megabytes)
user = ::User.new
user.organization = organization
user.quota_in_bytes = 1.megabyte
user.valid?.should be_false
user.errors.keys.should include(:quota_in_bytes)
organization.destroy
end
it 'should be valid if his organization has enough disk space' do
organization = create_org('testorg', 10.megabytes, 1)
organization.stubs(:assigned_quota).returns(9.megabytes)
user = ::User.new
user.organization = organization
user.quota_in_bytes = 1.megabyte
user.valid?
user.errors.keys.should_not include(:quota_in_bytes)
organization.destroy
end
describe '#org_admin' do
before(:all) do
@organization = create_organization_with_owner
end
after(:all) do
@organization.destroy
end
def create_role(user)
# NOTE: It's hard to test the real Groups API call here, it needs a Rails server up and running
# Instead, we test the main step that this function does internally (creating a role)
user.in_database["CREATE ROLE \"#{user.database_username}_#{unique_name('role')}\""].all
end
it 'cannot be owner and viewer at the same time' do
@organization.owner.viewer = true
@organization.owner.should_not be_valid
@organization.owner.errors.keys.should include(:viewer)
end
it 'cannot be admin and viewer at the same time' do
user = ::User.new
user.organization = @organization
user.viewer = true
user.org_admin = true
user.should_not be_valid
user.errors.keys.should include(:viewer)
end
it 'should not be able to create groups without admin rights' do
user = FactoryGirl.create(:valid_user, organization: @organization)
expect { create_role(user) }.to raise_error
end
it 'should be able to create groups with admin rights' do
user = FactoryGirl.create(:valid_user, organization: @organization, org_admin: true)
expect { create_role(user) }.to_not raise_error
end
it 'should revoke admin rights on demotion' do
user = FactoryGirl.create(:valid_user, organization: @organization, org_admin: true)
expect { create_role(user) }.to_not raise_error
user.org_admin = false
user.save
expect { create_role(user) }.to raise_error
end
end
describe 'organization email whitelisting' do
before(:each) do
@organization = create_org('testorg', 10.megabytes, 1)
end
after(:each) do
@organization.destroy
end
it 'valid_user is valid' do
user = FactoryGirl.build(:valid_user)
user.valid?.should == true
end
it 'user email is valid if organization has not whitelisted domains' do
user = FactoryGirl.build(:valid_user, organization: @organization)
user.valid?.should == true
end
it 'user email is not valid if organization has whitelisted domains and email is not under that domain' do
@organization.whitelisted_email_domains = [ 'organization.org' ]
user = FactoryGirl.build(:valid_user, organization: @organization)
user.valid?.should eq false
user.errors[:email].should_not be_nil
end
it 'user email is valid if organization has whitelisted domains and email is under that domain' do
user = FactoryGirl.build(:valid_user, organization: @organization)
@organization.whitelisted_email_domains = [ user.email.split('@')[1] ]
user.valid?.should eq true
user.errors[:email].should == []
end
end
describe 'when updating user quota' do
it 'should be valid if his organization has enough disk space' do
organization = create_organization_with_users(quota_in_bytes: 70.megabytes)
organization.assigned_quota.should == 70.megabytes
user = organization.owner
user.quota_in_bytes = 1.megabyte
user.valid?
user.errors.keys.should_not include(:quota_in_bytes)
organization.destroy
end
it "should not be valid if his organization doesn't have enough disk space" do
organization = create_organization_with_users(quota_in_bytes: 70.megabytes)
organization.assigned_quota.should == 70.megabytes
user = organization.owner
user.quota_in_bytes = 71.megabytes
user.valid?.should be_false
user.errors.keys.should include(:quota_in_bytes)
organization.destroy
end
end
describe 'when updating viewer state' do
before(:all) do
@organization = create_organization_with_users(quota_in_bytes: 70.megabytes)
end
after(:all) do
@organization.destroy
end
before(:each) do
@organization.viewer_seats = 10
@organization.seats = 10
@organization.save
end
it 'should not allow changing to viewer without seats' do
@organization.viewer_seats = 0
@organization.save
user = @organization.users.find { |u| !u.organization_owner? }
user.reload
user.viewer = true
expect(user).not_to be_valid
expect(user.errors.keys).to include(:organization)
end
it 'should allow changing to viewer with enough seats' do
user = @organization.users.find { |u| !u.organization_owner? }
user.reload
user.viewer = true
expect(user).to be_valid
expect(user.errors.keys).not_to include(:organization)
end
it 'should not allow changing to builder without seats' do
user = @organization.users.find { |u| !u.organization_owner? }
user.reload
user.viewer = true
user.save
@organization.seats = 1
@organization.save
user.reload
user.viewer = false
expect(user).not_to be_valid
expect(user.errors.keys).to include(:organization)
end
it 'should allow changing to builder with seats' do
user = @organization.users.find { |u| !u.organization_owner? }
user.reload
user.viewer = true
user.save
user.reload
user.viewer = false
expect(user).to be_valid
expect(user.errors.keys).not_to include(:organization)
end
end
it 'should set account_type properly' do
organization = create_organization_with_users
organization.users.reject(&:organization_owner?).each do |u|
u.account_type.should == "ORGANIZATION USER"
end
organization.destroy
end
it 'should set default settings properly unless overriden' do
organization = create_organization_with_users
organization.users.reject(&:organization_owner?).each do |u|
u.max_layers.should eq ::User::DEFAULT_MAX_LAYERS
u.private_tables_enabled.should be_true
u.sync_tables_enabled.should be_true
end
user = FactoryGirl.build(:user, organization: organization)
user.max_layers = 3
user.save
user.max_layers.should == 3
organization.destroy
end
describe 'google_maps_key and google_maps_private_key' do
before(:all) do
@organization = create_organization_with_users(google_maps_key: 'gmk', google_maps_private_key: 'gmpk')
@organization.google_maps_key.should_not be_nil
@organization.google_maps_private_key.should_not be_nil
end
after(:all) do
@organization.destroy
end
it 'should be inherited from organization for new users' do
@organization.users.should_not be_empty
@organization.users.reject(&:organization_owner?).each do |u|
u.google_maps_key.should == @organization.google_maps_key
u.google_maps_private_key.should == @organization.google_maps_private_key
end
end
end
it 'should inherit twitter_datasource_enabled from organization on creation' do
organization = create_organization_with_users(twitter_datasource_enabled: true)
organization.save
organization.twitter_datasource_enabled.should be_true
organization.users.reject(&:organization_owner?).each do |u|
u.twitter_datasource_enabled.should be_true
end
user = create_user(organization: organization)
user.save
user.twitter_datasource_enabled.should be_true
organization.destroy
end
it "should return proper values for non-persisted settings" do
organization = create_organization_with_users
organization.users.reject(&:organization_owner?).each do |u|
u.private_maps_enabled.should be_true
end
organization.destroy
end
end
describe 'central synchronization' do
it 'should create remote user in central if needed' do
pending "Central API credentials not provided" unless ::User.new.sync_data_with_cartodb_central?
organization = create_org('testorg', 500.megabytes, 1)
user = create_user email: 'user1@testorg.com', username: 'user1', password: 'user11'
user.organization = organization
user.save
Cartodb::Central.any_instance.expects(:create_organization_user).with(organization.name, user.allowed_attributes_to_central(:create)).once
user.create_in_central.should be_true
organization.destroy
end
end
it 'should store feature flags' do
ff = FactoryGirl.create(:feature_flag, id: 10001, name: 'ff10001')
user = create_user :email => 'ff@example.com', :username => 'ff-user-01', :password => 'ff-user-01'
user.set_relationships_from_central({ feature_flags: [ ff.id.to_s ]})
user.save
user.feature_flags_user.map { |ffu| ffu.feature_flag_id }.should include(ff.id)
user.destroy
end
it 'should delete feature flags assignations to a deleted user' do
ff = FactoryGirl.create(:feature_flag, id: 10002, name: 'ff10002')
user = create_user :email => 'ff2@example.com', :username => 'ff2-user-01', :password => 'ff2-user-01'
user.set_relationships_from_central({ feature_flags: [ ff.id.to_s ]})
user.save
user_id = user.id
user.destroy
SequelRails.connection["select count(*) from feature_flags_users where user_id = '#{user_id}'"].first[:count].should eq 0
SequelRails.connection["select count(*) from feature_flags where id = '#{ff.id}'"].first[:count].should eq 1
end
it "should have a default dashboard_viewed? false" do
user = ::User.new
user.dashboard_viewed?.should be_false
end
it "should reset dashboard_viewed when dashboard gets viewed" do
user = ::User.new
user.view_dashboard
user.dashboard_viewed?.should be_true
end
it "should validate that password is present if record is new and crypted_password or salt are blank" do
user = ::User.new
user.username = "adminipop"
user.email = "adminipop@example.com"
user.valid?.should be_false
user.errors[:password].should be_present
another_user = new_user(user.values.merge(:password => "admin123"))
user.crypted_password = another_user.crypted_password
user.salt = another_user.salt
user.valid?.should be_true
user.save
# Let's ensure that crypted_password and salt does not change
user_check = ::User[user.id]
user_check.crypted_password.should == another_user.crypted_password
user_check.salt.should == another_user.salt
user.password = nil
user.valid?.should be_true
user.destroy
end
it "should validate password presence and length" do
user = ::User.new
user.username = "adminipop"
user.email = "adminipop@example.com"
user.valid?.should be_false
user.errors[:password].should be_present
user.password = 'short'
user.valid?.should be_false
user.errors[:password].should be_present
user.password = 'manolo' * 11
user.valid?.should be_false
user.errors[:password].should be_present
end
it "should set default statement timeout values" do
@user.in_database["show statement_timeout"].first[:statement_timeout].should == "5min"
@user.in_database(as: :public_user)["show statement_timeout"].first[:statement_timeout].should == "5min"
end
it "should keep in sync user statement_timeout" do
@user.user_timeout = 1000000
@user.database_timeout = 300000
@user.save
@user.in_database["show statement_timeout"].first[:statement_timeout].should == "1000s"
@user.in_database(as: :public_user)["show statement_timeout"].first[:statement_timeout].should == "5min"
end
it "should keep in sync database statement_timeout" do
@user.user_timeout = 300000
@user.database_timeout = 1000000
@user.save
@user.in_database["show statement_timeout"].first[:statement_timeout].should == "5min"
@user.in_database(as: :public_user)["show statement_timeout"].first[:statement_timeout].should == "1000s"
end
it "should invalidate all his vizjsons when his account type changes" do
@user.account_type = 'WADUS'
CartoDB::Varnish.any_instance.expects(:purge)
.with("#{@user.database_name}.*:vizjson").times(1).returns(true)
@user.save
end
it "should invalidate all his vizjsons when his disqus_shortname changes" do
@user.disqus_shortname = 'WADUS'
CartoDB::Varnish.any_instance.expects(:purge)
.with("#{@user.database_name}.*:vizjson").times(1).returns(true)
@user.save
end
it "should not invalidate anything when his quota_in_bytes changes" do
@user.quota_in_bytes = @user.quota_in_bytes + 1.megabytes
CartoDB::Varnish.any_instance.expects(:purge).times(0)
@user.save
end
it "should rebuild the quota trigger after changing the quota" do
@user.db_service.expects(:rebuild_quota_trigger).once
@user.quota_in_bytes = @user.quota_in_bytes + 1.megabytes
@user.save
end
it "should read api calls from external service" do
pending "This is deprecated. This code has been moved"
@user.stubs(:get_old_api_calls).returns({
"per_day" => [0, 0, 0, 0, 24, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 8, 0, 0, 0, 0, 0, 0, 0, 0, 17, 4, 0, 0, 0, 0],
"total"=>49,
"updated_at"=>1370362756
})
@user.stubs(:get_es_api_calls_from_redis).returns([
21, 0, 0, 0, 2, 0, 0, 5, 0, 0, 0, 0, 0, 0, 0, 8, 0, 5, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0
])
@user.get_api_calls.should == [21, 0, 0, 0, 6, 17, 0, 5, 0, 0, 0, 0, 0, 0, 8, 8, 0, 5, 0, 0, 0, 0, 0, 0, 0, 24, 0, 0, 0, 0]
@user.get_api_calls(
from: (Date.today - 6.days),
to: Date.today
).should == [21, 0, 0, 0, 6, 17, 0]
end
it "should get final api calls from es" do
yesterday = Date.today - 1
today = Date.today
from_date = DateTime.new(yesterday.year, yesterday.month, yesterday.day, 0, 0, 0).strftime("%Q")
to_date = DateTime.new(today.year, today.month, today.day, 0, 0, 0).strftime("%Q")
api_url = %r{search}
api_response = {
"aggregations" => {
"0" => {
"buckets" => [
{
"key" => from_date.to_i,
"doc_count" => 4
},
{
"key" => to_date.to_i,
"doc_count" => 6
}
]
}
}
}
Typhoeus.stub(api_url,
{ method: :post }
)
.and_return(
Typhoeus::Response.new(code: 200, body: api_response.to_json.to_s)
)
@user.get_api_calls_from_es.should == {from_date.to_i => 4, to_date.to_i => 6}
end
describe "avatar checks" do
let(:user1) do
create_user(email: 'ewdewfref34r43r43d32f45g5@example.com', username: 'u1', password: 'foobar')
end
after(:each) do
user1.destroy
end
it "should load a cartodb avatar url if no gravatar associated" do
avatar_kind = Cartodb.config[:avatars]['kinds'][0]
avatar_color = Cartodb.config[:avatars]['colors'][0]
avatar_base_url = Cartodb.config[:avatars]['base_url']
Random.any_instance.stubs(:rand).returns(0)
gravatar_url = %r{gravatar.com}
Typhoeus.stub(gravatar_url, { method: :get }).and_return(Typhoeus::Response.new(code: 404))
user1.stubs(:gravatar_enabled?).returns(true)
user1.avatar_url = nil
user1.save
user1.reload_avatar
user1.avatar_url.should == "#{avatar_base_url}/avatar_#{avatar_kind}_#{avatar_color}.png"
end
it "should load a cartodb avatar url if gravatar disabled" do
avatar_kind = Cartodb.config[:avatars]['kinds'][0]
avatar_color = Cartodb.config[:avatars]['colors'][0]
avatar_base_url = Cartodb.config[:avatars]['base_url']
Random.any_instance.stubs(:rand).returns(0)
gravatar_url = %r{gravatar.com}
Typhoeus.stub(gravatar_url, { method: :get }).and_return(Typhoeus::Response.new(code: 200))
user1.stubs(:gravatar_enabled?).returns(false)
user1.avatar_url = nil
user1.save
user1.reload_avatar
user1.avatar_url.should == "#{avatar_base_url}/avatar_#{avatar_kind}_#{avatar_color}.png"
end
it "should load a the user gravatar url" do
gravatar_url = %r{gravatar.com}
Typhoeus.stub(gravatar_url, { method: :get }).and_return(Typhoeus::Response.new(code: 200))
user1.stubs(:gravatar_enabled?).returns(true)
user1.reload_avatar
user1.avatar_url.should == "//#{user1.gravatar_user_url}"
end
describe '#gravatar_enabled?' do
it 'should be enabled by default (every setting but false will enable it)' do
user = ::User.new
Cartodb.with_config(avatars: {}) { user.gravatar_enabled?.should be_true }
Cartodb.with_config(avatars: { 'gravatar_enabled' => true }) { user.gravatar_enabled?.should be_true }
Cartodb.with_config(avatars: { 'gravatar_enabled' => 'true' }) { user.gravatar_enabled?.should be_true }
Cartodb.with_config(avatars: { 'gravatar_enabled' => 'wadus' }) { user.gravatar_enabled?.should be_true }
end
it 'can be disabled' do
user = ::User.new
Cartodb.with_config(avatars: { 'gravatar_enabled' => false }) { user.gravatar_enabled?.should be_false }
Cartodb.with_config(avatars: { 'gravatar_enabled' => 'false' }) { user.gravatar_enabled?.should be_false }
end
end
end
describe '#private_maps_enabled?' do
it 'should not have private maps enabled by default' do
user_missing_private_maps = create_user :email => 'user_mpm@example.com', :username => 'usermpm', :password => 'usermpm'
user_missing_private_maps.private_maps_enabled?.should eq false
user_missing_private_maps.destroy
end
it 'should have private maps if enabled' do
user_with_private_maps = create_user :email => 'user_wpm@example.com', :username => 'userwpm', :password => 'userwpm', :private_maps_enabled => true
user_with_private_maps.private_maps_enabled?.should eq true
user_with_private_maps.destroy
end
it 'should not have private maps if disabled' do
user_without_private_maps = create_user :email => 'user_opm@example.com', :username => 'useropm', :password => 'useropm', :private_maps_enabled => false
user_without_private_maps.private_maps_enabled?.should eq false
user_without_private_maps.destroy
end
end
describe '#get_geocoding_calls' do
before do
delete_user_data @user
@user.stubs(:last_billing_cycle).returns(Date.today)
@mock_redis = MockRedis.new
@usage_metrics = CartoDB::GeocoderUsageMetrics.new(@user.username, nil, @mock_redis)
@usage_metrics.incr(:geocoder_here, :success_responses, 1, Time.now)
@usage_metrics.incr(:geocoder_internal, :success_responses, 1, Time.now)
@usage_metrics.incr(:geocoder_here, :success_responses, 1, Time.now - 5.days)
@usage_metrics.incr(:geocoder_cache, :success_responses, 1, Time.now - 5.days)
CartoDB::GeocoderUsageMetrics.stubs(:new).returns(@usage_metrics)
end
it "should return the sum of geocoded rows for the current billing period" do
@user.get_geocoding_calls.should eq 1
end
it "should return the sum of geocoded rows for the specified period" do
@user.get_geocoding_calls(from: Time.now-5.days).should eq 3
@user.get_geocoding_calls(from: Time.now-5.days, to: Time.now - 2.days).should eq 2
end
it "should return 0 when no geocodings" do
@user.get_geocoding_calls(from: Time.now - 15.days, to: Time.now - 10.days).should eq 0
end
end
describe '#get_here_isolines_calls' do
before do
delete_user_data @user
@mock_redis = MockRedis.new
@usage_metrics = CartoDB::IsolinesUsageMetrics.new(@user.username, nil, @mock_redis)
CartoDB::IsolinesUsageMetrics.stubs(:new).returns(@usage_metrics)
@user.stubs(:last_billing_cycle).returns(Date.today)
@user.period_end_date = (DateTime.current + 1) << 1
@user.save.reload
end
it "should return the sum of here isolines rows for the current billing period" do
@usage_metrics.incr(:here_isolines, :isolines_generated, 10, DateTime.current)
@usage_metrics.incr(:here_isolines, :isolines_generated, 100, (DateTime.current - 2))
@user.get_here_isolines_calls.should eq 10
end
it "should return the sum of here isolines rows for the specified period" do
@usage_metrics.incr(:here_isolines, :isolines_generated, 10, DateTime.current)
@usage_metrics.incr(:here_isolines, :isolines_generated, 100, (DateTime.current - 2))
@usage_metrics.incr(:here_isolines, :isolines_generated, 100, (DateTime.current - 7))
@user.get_here_isolines_calls(from: Time.now-5.days).should eq 110
@user.get_here_isolines_calls(from: Time.now-5.days, to: Time.now - 2.days).should eq 100
end
it "should return 0 when no here isolines actions" do
@user.get_here_isolines_calls(from: Time.now - 15.days, to: Time.now - 10.days).should eq 0
end
end
describe '#get_obs_snapshot_calls' do
before do
delete_user_data @user
@mock_redis = MockRedis.new
@usage_metrics = CartoDB::ObservatorySnapshotUsageMetrics.new(@user.username, nil, @mock_redis)
CartoDB::ObservatorySnapshotUsageMetrics.stubs(:new).returns(@usage_metrics)
@user.stubs(:last_billing_cycle).returns(Date.today)
@user.period_end_date = (DateTime.current + 1) << 1
@user.save.reload
end
it "should return the sum of data observatory snapshot rows for the current billing period" do
@usage_metrics.incr(:obs_snapshot, :success_responses, 10, DateTime.current)
@usage_metrics.incr(:obs_snapshot, :success_responses, 100, (DateTime.current - 2))
@user.get_obs_snapshot_calls.should eq 10
end
it "should return the sum of data observatory snapshot rows for the specified period" do
@usage_metrics.incr(:obs_snapshot, :success_responses, 10, DateTime.current)
@usage_metrics.incr(:obs_snapshot, :success_responses, 100, (DateTime.current - 2))
@usage_metrics.incr(:obs_snapshot, :success_responses, 100, (DateTime.current - 7))
@user.get_obs_snapshot_calls(from: Time.now - 5.days).should eq 110
@user.get_obs_snapshot_calls(from: Time.now - 5.days, to: Time.now - 2.days).should eq 100
end
it "should return 0 when no here isolines actions" do
@user.get_obs_snapshot_calls(from: Time.now - 15.days, to: Time.now - 10.days).should eq 0
end
end
describe '#get_obs_general_calls' do
before do
delete_user_data @user
@mock_redis = MockRedis.new
@usage_metrics = CartoDB::ObservatoryGeneralUsageMetrics.new(@user.username, nil, @mock_redis)
CartoDB::ObservatoryGeneralUsageMetrics.stubs(:new).returns(@usage_metrics)
@user.stubs(:last_billing_cycle).returns(Date.today)
@user.period_end_date = (DateTime.current + 1) << 1
@user.save.reload
end
it "should return the sum of data observatory general rows for the current billing period" do
@usage_metrics.incr(:obs_general, :success_responses, 10, DateTime.current)
@usage_metrics.incr(:obs_general, :success_responses, 100, (DateTime.current - 2))
@user.get_obs_general_calls.should eq 10
end
it "should return the sum of data observatory general rows for the specified period" do
@usage_metrics.incr(:obs_general, :success_responses, 10, DateTime.current)
@usage_metrics.incr(:obs_general, :success_responses, 100, (DateTime.current - 2))
@usage_metrics.incr(:obs_general, :success_responses, 100, (DateTime.current - 7))
@user.get_obs_general_calls(from: Time.now - 5.days).should eq 110
@user.get_obs_general_calls(from: Time.now - 5.days, to: Time.now - 2.days).should eq 100
end
it "should return 0 when no data observatory general actions" do
@user.get_obs_general_calls(from: Time.now - 15.days, to: Time.now - 10.days).should eq 0
end
end
describe "organization user deletion" do
it "should transfer tweet imports to owner" do
u1 = create_user(email: 'u1@exampleb.com', username: 'ub1', password: 'admin123')
org = create_org('cartodbtestb', 1234567890, 5)
u1.organization = org
u1.save
u1.reload
org = u1.organization
org.owner_id = u1.id
org.save
u1.reload
u2 = create_user(email: 'u2@exampleb.com', username: 'ub2', password: 'admin123', organization: org)
tweet_attributes = {
user: u2,
table_id: '96a86fb7-0270-4255-a327-15410c2d49d4',
data_import_id: '96a86fb7-0270-4255-a327-15410c2d49d4',
service_item_id: '555',
state: ::SearchTweet::STATE_COMPLETE
}
st1 = SearchTweet.create(tweet_attributes.merge(retrieved_items: 5))
st2 = SearchTweet.create(tweet_attributes.merge(retrieved_items: 10))
u1.reload
u2.reload
u2.get_twitter_imports_count.should == st1.retrieved_items + st2.retrieved_items
u1.get_twitter_imports_count.should == 0
u2.destroy
u1.reload
u1.get_twitter_imports_count.should == st1.retrieved_items + st2.retrieved_items
org.destroy
end
end
it "should have many tables" do
@user2.tables.should be_empty
create_table :user_id => @user2.id, :name => 'My first table', :privacy => UserTable::PRIVACY_PUBLIC
@user2.reload
@user2.tables.all.should == [UserTable.first(:user_id => @user2.id)]
end
it "should generate a data report"
it "should update remaining quotas when adding or removing tables" do
initial_quota = @user2.remaining_quota
expect { create_table :user_id => @user2.id, :privacy => UserTable::PRIVACY_PUBLIC }
.to change { @user2.remaining_table_quota }.by(-1)
table = Table.new(user_table: UserTable.filter(:user_id => @user2.id).first)
50.times { |i| table.insert_row!(:name => "row #{i}") }
@user2.remaining_quota.should be < initial_quota
initial_quota = @user2.remaining_quota
expect { table.destroy }
.to change { @user2.remaining_table_quota }.by(1)
@user2.remaining_quota.should be > initial_quota
end
it "should has his own database, created when the account is created" do
@user.database_name.should == "cartodb_test_user_#{@user.id}_db"
@user.database_username.should == "test_cartodb_user_#{@user.id}"
@user.in_database.test_connection.should == true
end
it 'creates an importer schema in the user database' do
@user.in_database[%Q(SELECT * FROM pg_namespace)]
.map { |record| record.fetch(:nspname) }
.should include 'cdb_importer'
end
it 'creates a cdb schema in the user database' do
pending "I believe cdb schema was never used"
@user.in_database[%Q(SELECT * FROM pg_namespace)]
.map { |record| record.fetch(:nspname) }
.should include 'cdb'
end
it 'allows access to the importer schema by the owner' do
@user.in_database.run(%Q{
CREATE TABLE cdb_importer.bogus ( bogus varchar(40) )
})
query = %Q(SELECT * FROM cdb_importer.bogus)
expect { @user.in_database(as: :public_user)[query].to_a }
.to raise_error(Sequel::DatabaseError)
@user.in_database[query].to_a
end
it 'allows access to the cdb schema by the owner' do
pending "I believe cdb schema was never used"
@user.in_database.run(%Q{
CREATE TABLE cdb.bogus ( bogus varchar(40) )
})
query = %Q(SELECT * FROM cdb.bogus)
expect { @user.in_database(as: :public_user)[query].to_a }
.to raise_error(Sequel::DatabaseError)
@user.in_database[query].to_a
end
it "should create a dabase user that only can read it's own database" do
connection = ::Sequel.connect(
::SequelRails.configuration.environment_for(Rails.env).merge(
'database' => @user.database_name, :logger => ::Rails.logger,
'username' => @user.database_username, 'password' => @user.database_password
)
)
connection.test_connection.should == true
connection.disconnect
connection = nil
connection = ::Sequel.connect(
::SequelRails.configuration.environment_for(Rails.env).merge(
'database' => @user2.database_name, :logger => ::Rails.logger,
'username' => @user.database_username, 'password' => @user.database_password
)
)
begin
connection.test_connection
true.should_not be_true
rescue
true.should be_true
ensure
connection.disconnect
end
connection = ::Sequel.connect(
::SequelRails.configuration.environment_for(Rails.env).merge(
'database' => @user2.database_name, :logger => ::Rails.logger,
'username' => @user2.database_username, 'password' => @user2.database_password
)
)
connection.test_connection.should == true
connection.disconnect
connection = ::Sequel.connect(
::SequelRails.configuration.environment_for(Rails.env).merge(
'database' => @user.database_name, :logger => ::Rails.logger,
'username' => @user2.database_username, 'password' => @user2.database_password
)
)
begin
connection.test_connection
true.should_not be_true
rescue
true.should be_true
ensure
connection.disconnect
end
end
it "should run valid queries against his database" do
# initial select tests
query_result = @user.db_service.run_pg_query("select * from import_csv_1 where family='Polynoidae' limit 10")
query_result[:time].should_not be_blank
query_result[:time].to_s.match(/^\d+\.\d+$/).should be_true
query_result[:total_rows].should == 2
query_result[:rows].first.keys.sort.should == [:cartodb_id, :the_geom, :the_geom_webmercator, :id, :name_of_species, :kingdom, :family, :lat, :lon, :views].sort
query_result[:rows][0][:name_of_species].should == "Barrukia cristata"
query_result[:rows][1][:name_of_species].should == "Eulagisca gigantea"
# update and reselect
query_result = @user.db_service.run_pg_query("update import_csv_1 set family='polynoidae' where family='Polynoidae'")
query_result = @user.db_service.run_pg_query("select * from import_csv_1 where family='Polynoidae' limit 10")
query_result[:total_rows].should == 0
# check counts
query_result = @user.db_service.run_pg_query("select * from import_csv_1 where family='polynoidae' limit 10")
query_result[:total_rows].should == 2
# test a product
query_result = @user.db_service.run_pg_query("select import_csv_1.family as fam, twitters.login as login from import_csv_1, twitters where family='polynoidae' limit 10")
query_result[:total_rows].should == 10
query_result[:rows].first.keys.should == [:fam, :login]
query_result[:rows][0].should == { :fam=>"polynoidae", :login=>"vzlaturistica " }
# test counts
query_result = @user.db_service.run_pg_query("select count(*) from import_csv_1 where family='polynoidae' ")
query_result[:time].should_not be_blank
query_result[:time].to_s.match(/^\d+\.\d+$/).should be_true
query_result[:total_rows].should == 1
query_result[:rows].first.keys.should == [:count]
query_result[:rows][0].should == {:count => 2}
end
it "should raise errors when running invalid queries against his database" do
lambda {
@user.db_service.run_pg_query("selectttt * from import_csv_1 where family='Polynoidae' limit 10")
}.should raise_error(CartoDB::ErrorRunningQuery)
end
it "should run valid queries against his database in pg mode" do
reload_user_data(@user) && @user.reload
# initial select tests
# tests results and modified flags
query_result = @user.db_service.run_pg_query("select * from import_csv_1 where family='Polynoidae' limit 10")
query_result[:time].should_not be_blank
query_result[:time].to_s.match(/^\d+\.\d+$/).should be_true
query_result[:total_rows].should == 2
query_result[:rows].first.keys.sort.should == [:cartodb_id, :the_geom, :the_geom_webmercator, :id, :name_of_species, :kingdom, :family, :lat, :lon, :views].sort
query_result[:rows][0][:name_of_species].should == "Barrukia cristata"
query_result[:rows][1][:name_of_species].should == "Eulagisca gigantea"
query_result[:results].should == true
query_result[:modified].should == false
# update and reselect
query_result = @user.db_service.run_pg_query("update import_csv_1 set family='polynoidae' where family='Polynoidae'")
query_result[:modified].should == true
query_result[:results].should == false
query_result = @user.db_service.run_pg_query("select * from import_csv_1 where family='Polynoidae' limit 10")
query_result[:total_rows].should == 0
query_result[:modified].should == false
query_result[:results].should == true
# # check counts
query_result = @user.db_service.run_pg_query("select * from import_csv_1 where family='polynoidae' limit 10")
query_result[:total_rows].should == 2
query_result[:results].should == true
# test a product
query_result = @user.db_service.run_pg_query("select import_csv_1.family as fam, twitters.login as login from import_csv_1, twitters where family='polynoidae' limit 10")
query_result[:total_rows].should == 10
query_result[:rows].first.keys.should == [:fam, :login]
query_result[:rows][0].should == { :fam=>"polynoidae", :login=>"vzlaturistica " }
# test counts
query_result = @user.db_service.run_pg_query("select count(*) from import_csv_1 where family='polynoidae' ")
query_result[:time].should_not be_blank
query_result[:time].to_s.match(/^\d+\.\d+$/).should be_true
query_result[:total_rows].should == 1
query_result[:rows].first.keys.should == [:count]
query_result[:rows][0].should == {:count => 2}
end
it "should raise errors when running invalid queries against his database in pg mode" do
lambda {
@user.db_service.run_pg_query("selectttt * from import_csv_1 where family='Polynoidae' limit 10")
}.should raise_error(CartoDB::ErrorRunningQuery)
end
it "should raise errors when invalid table name used in pg mode" do
lambda {
@user.db_service.run_pg_query("select * from this_table_is_not_here where family='Polynoidae' limit 10")
}.should raise_error(CartoDB::TableNotExists)
end
it "should raise errors when invalid column used in pg mode" do
lambda {
@user.db_service.run_pg_query("select not_a_col from import_csv_1 where family='Polynoidae' limit 10")
}.should raise_error(CartoDB::ColumnNotExists)
end
it "should create a client_application for each user" do
@user.client_application.should_not be_nil
end
it "should reset its client application" do
old_key = @user.client_application.key
@user.reset_client_application!
@user.reload
@user.client_application.key.should_not == old_key
end
it "should return the result from the last select query if multiple selects" do
reload_user_data(@user) && @user.reload
query_result = @user.db_service.run_pg_query("select * from import_csv_1 where family='Polynoidae' limit 1; select * from import_csv_1 where family='Polynoidae' limit 10")
query_result[:time].should_not be_blank
query_result[:time].to_s.match(/^\d+\.\d+$/).should be_true
query_result[:total_rows].should == 2
query_result[:rows][0][:name_of_species].should == "Barrukia cristata"
query_result[:rows][1][:name_of_species].should == "Eulagisca gigantea"
end
it "should allow multiple queries in the format: insert_query; select_query" do
query_result = @user.db_service.run_pg_query("insert into import_csv_1 (name_of_species,family) values ('cristata barrukia','Polynoidae'); select * from import_csv_1 where family='Polynoidae' ORDER BY name_of_species ASC limit 10")
query_result[:total_rows].should == 3
query_result[:rows].map { |i| i[:name_of_species] }.should =~ ["Barrukia cristata", "Eulagisca gigantea", "cristata barrukia"]
end
it "should fail with error if table doesn't exist" do
reload_user_data(@user) && @user.reload
lambda {
@user.db_service.run_pg_query("select * from wadus")
}.should raise_error(CartoDB::TableNotExists)
end
it "should have a method that generates users redis users_metadata key" do
@user.key.should == "rails:users:#{@user.username}"
end
it "replicates some user metadata in redis after saving" do
@user.stubs(:database_name).returns('wadus')
@user.save
$users_metadata.HGET(@user.key, 'id').should == @user.id.to_s
$users_metadata.HGET(@user.key, 'database_name').should == 'wadus'
$users_metadata.HGET(@user.key, 'database_password').should == @user.database_password
$users_metadata.HGET(@user.key, 'database_host').should == @user.database_host
$users_metadata.HGET(@user.key, 'map_key').should == @user.api_key
end
it "should store its metadata automatically after creation" do
user = FactoryGirl.create :user
$users_metadata.HGET(user.key, 'id').should == user.id.to_s
$users_metadata.HGET(user.key, 'database_name').should == user.database_name
$users_metadata.HGET(user.key, 'database_password').should == user.database_password
$users_metadata.HGET(user.key, 'database_host').should == user.database_host
$users_metadata.HGET(user.key, 'map_key').should == user.api_key
user.destroy
end
it "should have a method that generates users redis limits metadata key" do
@user.timeout_key.should == "limits:timeout:#{@user.username}"
end
it "replicates db timeout limits in redis after saving and applies them to db" do
@user.user_timeout = 200007
@user.database_timeout = 100007
@user.save
$users_metadata.HGET(@user.timeout_key, 'db').should == '200007'
$users_metadata.HGET(@user.timeout_key, 'db_public').should == '100007'
@user.in_database do |db|
db[%{SHOW statement_timeout}].first.should eq({ statement_timeout: '200007ms' })
end
@user.in_database(as: :public_user) do |db|
db[%{SHOW statement_timeout}].first.should eq({ statement_timeout: '100007ms' })
end
end
it "replicates render timeout limits in redis after saving" do
@user.user_render_timeout = 200001
@user.database_render_timeout = 100001
@user.save
$users_metadata.HGET(@user.timeout_key, 'render').should == '200001'
$users_metadata.HGET(@user.timeout_key, 'render_public').should == '100001'
end
it "should store db timeout limits in redis after creation" do
user = FactoryGirl.create :user, user_timeout: 200002, database_timeout: 100002
user.user_timeout.should == 200002
user.database_timeout.should == 100002
$users_metadata.HGET(user.timeout_key, 'db').should == '200002'
$users_metadata.HGET(user.timeout_key, 'db_public').should == '100002'
user.in_database do |db|
db[%{SHOW statement_timeout}].first.should eq({ statement_timeout: '200002ms' })
end
user.in_database(as: :public_user) do |db|
db[%{SHOW statement_timeout}].first.should eq({ statement_timeout: '100002ms' })
end
user.destroy
end
it "should store render timeout limits in redis after creation" do
user = FactoryGirl.create :user, user_render_timeout: 200003, database_render_timeout: 100003
user.reload
user.user_render_timeout.should == 200003
user.database_render_timeout.should == 100003
$users_metadata.HGET(user.timeout_key, 'render').should == '200003'
$users_metadata.HGET(user.timeout_key, 'render_public').should == '100003'
user.destroy
end
it "should have valid non-zero db timeout limits by default" do
user = FactoryGirl.create :user
user.user_timeout.should > 0
user.database_timeout.should > 0
$users_metadata.HGET(user.timeout_key, 'db').should == user.user_timeout.to_s
$users_metadata.HGET(user.timeout_key, 'db_public').should == user.database_timeout.to_s
user.in_database do |db|
result = db[%{SELECT setting FROM pg_settings WHERE name = 'statement_timeout'}]
result.first.should eq(setting: user.user_timeout.to_s)
end
user.in_database(as: :public_user) do |db|
result = db[%{SELECT setting FROM pg_settings WHERE name = 'statement_timeout'}]
result.first.should eq(setting: user.database_timeout.to_s)
end
user.destroy
end
it "should have zero render timeout limits by default" do
user = FactoryGirl.create :user
user.user_render_timeout.should eq 0
user.database_render_timeout.should eq 0
$users_metadata.HGET(user.timeout_key, 'render').should eq '0'
$users_metadata.HGET(user.timeout_key, 'render_public').should eq '0'
user.destroy
end
it "should not regenerate the api_key after saving" do
expect { @user.save }.to_not change { @user.api_key }
end
it "should remove its metadata from redis after deletion" do
doomed_user = create_user :email => 'doomed@example.com', :username => 'doomed', :password => 'doomed123'
$users_metadata.HGET(doomed_user.key, 'id').should == doomed_user.id.to_s
$users_metadata.HGET(doomed_user.timeout_key, 'db').should_not be_nil
$users_metadata.HGET(doomed_user.timeout_key, 'db_public').should_not be_nil
key = doomed_user.key
timeout_key = doomed_user.timeout_key
doomed_user.destroy
$users_metadata.HGET(key, 'id').should be_nil
$users_metadata.HGET(timeout_key, 'db').should be_nil
$users_metadata.HGET(timeout_key, 'db_public').should be_nil
$users_metadata.HGET(timeout_key, 'render').should be_nil
$users_metadata.HGET(timeout_key, 'render_public').should be_nil
end
it "should remove its database and database user after deletion" do
doomed_user = create_user :email => 'doomed1@example.com', :username => 'doomed1', :password => 'doomed123'
create_table :user_id => doomed_user.id, :name => 'My first table', :privacy => UserTable::PRIVACY_PUBLIC
doomed_user.reload
SequelRails.connection["select count(*) from pg_catalog.pg_database where datname = '#{doomed_user.database_name}'"]
.first[:count].should == 1
SequelRails.connection["select count(*) from pg_catalog.pg_user where usename = '#{doomed_user.database_username}'"]
.first[:count].should == 1
doomed_user.destroy
SequelRails.connection["select count(*) from pg_catalog.pg_database where datname = '#{doomed_user.database_name}'"]
.first[:count].should == 0
SequelRails.connection["select count(*) from pg_catalog.pg_user where usename = '#{doomed_user.database_username}'"]
.first[:count].should == 0
end
it "should invalidate its Varnish cache after deletion" do
doomed_user = create_user :email => 'doomed2@example.com', :username => 'doomed2', :password => 'doomed123'
CartoDB::Varnish.any_instance.expects(:purge).with("#{doomed_user.database_name}.*").returns(true)
doomed_user.destroy
end
it "should remove its user tables, layers and data imports after deletion" do
doomed_user = create_user(email: 'doomed2@example.com', username: 'doomed2', password: 'doomed123')
data_import = DataImport.create(user_id: doomed_user.id, data_source: fake_data_path('clubbing.csv')).run_import!
doomed_user.add_layer Layer.create(kind: 'carto')
table_id = data_import.table_id
uuid = UserTable.where(id: table_id).first.table_visualization.id
CartoDB::Varnish.any_instance.expects(:purge)
.with("#{doomed_user.database_name}.*")
.returns(true)
CartoDB::Varnish.any_instance.expects(:purge)
.with(".*#{uuid}:vizjson")
.at_least_once
.returns(true)
doomed_user.destroy
DataImport.where(user_id: doomed_user.id).count.should == 0
UserTable.where(user_id: doomed_user.id).count.should == 0
Layer.db["SELECT * from layers_users WHERE user_id = '#{doomed_user.id}'"].count.should == 0
end
it "should correctly identify last billing cycle" do
user = create_user :email => 'example@example.com', :username => 'example', :password => 'testingbilling'
Delorean.time_travel_to(Date.parse("2013-01-01")) do
user.stubs(:period_end_date).returns(Date.parse("2012-12-15"))
user.last_billing_cycle.should == Date.parse("2012-12-15")
end
Delorean.time_travel_to(Date.parse("2013-01-01")) do
user.stubs(:period_end_date).returns(Date.parse("2012-12-02"))
user.last_billing_cycle.should == Date.parse("2012-12-02")
end
Delorean.time_travel_to(Date.parse("2013-03-01")) do
user.stubs(:period_end_date).returns(Date.parse("2012-12-31"))
user.last_billing_cycle.should == Date.parse("2013-02-28")
end
Delorean.time_travel_to(Date.parse("2013-03-15")) do
user.stubs(:period_end_date).returns(Date.parse("2012-12-02"))
user.last_billing_cycle.should == Date.parse("2013-03-02")
end
user.destroy
Delorean.back_to_the_present
end
it "should calculate the trial end date" do
@user.stubs(:upgraded_at).returns(nil)
@user.trial_ends_at.should be_nil
@user.stubs(:upgraded_at).returns(Time.now - 5.days)
@user.stubs(:account_type).returns('CORONELLI')
@user.trial_ends_at.should be_nil
@user.stubs(:account_type).returns('MAGELLAN')
@user.trial_ends_at.should_not be_nil
@user.stubs(:upgraded_at).returns(nil)
@user.trial_ends_at.should be_nil
@user.stubs(:upgraded_at).returns(Time.now - (::User::TRIAL_DURATION_DAYS - 1).days)
@user.trial_ends_at.should_not be_nil
end
describe '#hard_geocoding_limit?' do
it 'returns true when the plan is AMBASSADOR or FREE unless it has been manually set to false' do
@user[:soft_geocoding_limit].should be_nil
@user.stubs(:account_type).returns('AMBASSADOR')
@user.soft_geocoding_limit?.should be_false
@user.soft_geocoding_limit.should be_false
@user.hard_geocoding_limit?.should be_true
@user.hard_geocoding_limit.should be_true
@user.stubs(:account_type).returns('FREE')
@user.soft_geocoding_limit?.should be_false
@user.soft_geocoding_limit.should be_false
@user.hard_geocoding_limit?.should be_true
@user.hard_geocoding_limit.should be_true
@user.hard_geocoding_limit = false
@user[:soft_geocoding_limit].should_not be_nil
@user.stubs(:account_type).returns('AMBASSADOR')
@user.soft_geocoding_limit?.should be_true
@user.soft_geocoding_limit.should be_true
@user.hard_geocoding_limit?.should be_false
@user.hard_geocoding_limit.should be_false
@user.stubs(:account_type).returns('FREE')
@user.soft_geocoding_limit?.should be_true
@user.soft_geocoding_limit.should be_true
@user.hard_geocoding_limit?.should be_false
@user.hard_geocoding_limit.should be_false
end
it 'returns true when for enterprise accounts unless it has been manually set to false' do
['ENTERPRISE', 'ENTERPRISE LUMP-SUM', 'Enterprise Medium Lumpsum AWS'].each do |account_type|
@user.stubs(:account_type).returns(account_type)
@user.soft_geocoding_limit = nil
@user.soft_geocoding_limit?.should be_false
@user.soft_geocoding_limit.should be_false
@user.hard_geocoding_limit?.should be_true
@user.hard_geocoding_limit.should be_true
@user.soft_geocoding_limit = true
@user.soft_geocoding_limit?.should be_true
@user.soft_geocoding_limit.should be_true
@user.hard_geocoding_limit?.should be_false
@user.hard_geocoding_limit.should be_false
end
end
it 'returns false when the plan is CORONELLI or MERCATOR unless it has been manually set to true' do
@user.stubs(:account_type).returns('CORONELLI')
@user.hard_geocoding_limit?.should be_false
@user.stubs(:account_type).returns('MERCATOR')
@user.hard_geocoding_limit?.should be_false
@user.hard_geocoding_limit = true
@user.stubs(:account_type).returns('CORONELLI')
@user.hard_geocoding_limit?.should be_true
@user.stubs(:account_type).returns('MERCATOR')
@user.hard_geocoding_limit?.should be_true
end
end
describe '#hard_here_isolines_limit?' do
before(:each) do
@user_account = create_user
end
it 'returns true with every plan unless it has been manually set to false' do
@user_account[:soft_here_isolines_limit].should be_nil
@user_account.stubs(:account_type).returns('AMBASSADOR')
@user_account.soft_here_isolines_limit?.should be_false
@user_account.soft_here_isolines_limit.should be_false
@user_account.hard_here_isolines_limit?.should be_true
@user_account.hard_here_isolines_limit.should be_true
@user_account.stubs(:account_type).returns('FREE')
@user_account.soft_here_isolines_limit?.should be_false
@user_account.soft_here_isolines_limit.should be_false
@user_account.hard_here_isolines_limit?.should be_true
@user_account.hard_here_isolines_limit.should be_true
@user_account.hard_here_isolines_limit = false
@user_account[:soft_here_isolines_limit].should_not be_nil
@user_account.stubs(:account_type).returns('AMBASSADOR')
@user_account.soft_here_isolines_limit?.should be_true
@user_account.soft_here_isolines_limit.should be_true
@user_account.hard_here_isolines_limit?.should be_false
@user_account.hard_here_isolines_limit.should be_false
@user_account.stubs(:account_type).returns('FREE')
@user_account.soft_here_isolines_limit?.should be_true
@user_account.soft_here_isolines_limit.should be_true
@user_account.hard_here_isolines_limit?.should be_false
@user_account.hard_here_isolines_limit.should be_false
end
end
describe '#hard_obs_snapshot_limit?' do
before(:each) do
@user_account = create_user
end
it 'returns true with every plan unless it has been manually set to false' do
@user_account[:soft_obs_snapshot_limit].should be_nil
@user_account.stubs(:account_type).returns('AMBASSADOR')
@user_account.soft_obs_snapshot_limit?.should be_false
@user_account.soft_obs_snapshot_limit.should be_false
@user_account.hard_obs_snapshot_limit?.should be_true
@user_account.hard_obs_snapshot_limit.should be_true
@user_account.stubs(:account_type).returns('FREE')
@user_account.soft_obs_snapshot_limit?.should be_false
@user_account.soft_obs_snapshot_limit.should be_false
@user_account.hard_obs_snapshot_limit?.should be_true
@user_account.hard_obs_snapshot_limit.should be_true
@user_account.hard_obs_snapshot_limit = false
@user_account[:soft_obs_snapshot_limit].should_not be_nil
@user_account.stubs(:account_type).returns('AMBASSADOR')
@user_account.soft_obs_snapshot_limit?.should be_true
@user_account.soft_obs_snapshot_limit.should be_true
@user_account.hard_obs_snapshot_limit?.should be_false
@user_account.hard_obs_snapshot_limit.should be_false
@user_account.stubs(:account_type).returns('FREE')
@user_account.soft_obs_snapshot_limit?.should be_true
@user_account.soft_obs_snapshot_limit.should be_true
@user_account.hard_obs_snapshot_limit?.should be_false
@user_account.hard_obs_snapshot_limit.should be_false
end
end
describe '#hard_obs_general_limit?' do
before(:each) do
@user_account = create_user
end
it 'returns true with every plan unless it has been manually set to false' do
@user_account[:soft_obs_general_limit].should be_nil
@user_account.stubs(:account_type).returns('AMBASSADOR')
@user_account.soft_obs_general_limit?.should be_false
@user_account.soft_obs_general_limit.should be_false
@user_account.hard_obs_general_limit?.should be_true
@user_account.hard_obs_general_limit.should be_true
@user_account.stubs(:account_type).returns('FREE')
@user_account.soft_obs_general_limit?.should be_false
@user_account.soft_obs_general_limit.should be_false
@user_account.hard_obs_general_limit?.should be_true
@user_account.hard_obs_general_limit.should be_true
@user_account.hard_obs_general_limit = false
@user_account[:soft_obs_general_limit].should_not be_nil
@user_account.stubs(:account_type).returns('AMBASSADOR')
@user_account.soft_obs_general_limit?.should be_true
@user_account.soft_obs_general_limit.should be_true
@user_account.hard_obs_general_limit?.should be_false
@user_account.hard_obs_general_limit.should be_false
@user_account.stubs(:account_type).returns('FREE')
@user_account.soft_obs_general_limit?.should be_true
@user_account.soft_obs_general_limit.should be_true
@user_account.hard_obs_general_limit?.should be_false
@user_account.hard_obs_general_limit.should be_false
end
end
describe '#shared_tables' do
it 'Checks that shared tables include not only owned ones' do
require_relative '../../app/models/visualization/collection'
CartoDB::Varnish.any_instance.stubs(:send_command).returns(true)
bypass_named_maps
# No need to really touch the DB for the permissions
Table::any_instance.stubs(:add_read_permission).returns(nil)
# We're leaking tables from some tests, make sure there are no tables
@user.tables.all.each { |t| t.destroy }
@user2.tables.all.each { |t| t.destroy }
table = Table.new
table.user_id = @user.id
table.save.reload
table2 = Table.new
table2.user_id = @user.id
table2.save.reload
table3 = Table.new
table3.user_id = @user2.id
table3.name = 'sharedtable'
table3.save.reload
table4 = Table.new
table4.user_id = @user2.id
table4.name = 'table4'
table4.save.reload
# Only owned tables
user_tables = tables_including_shared(@user)
user_tables.count.should eq 2
# Grant permission
user2_vis = CartoDB::Visualization::Collection.new.fetch(user_id: @user2.id, name: table3.name).first
permission = user2_vis.permission
permission.acl = [
{
type: CartoDB::Permission::TYPE_USER,
entity: {
id: @user.id,
username: @user.username
},
access: CartoDB::Permission::ACCESS_READONLY
}
]
permission.save
# Now owned + shared...
user_tables = tables_including_shared(@user)
user_tables.count.should eq 3
contains_shared_table = false
user_tables.each{ |item|
contains_shared_table ||= item.id == table3.id
}
contains_shared_table.should eq true
contains_shared_table = false
user_tables.each{ |item|
contains_shared_table ||= item.id == table4.id
}
contains_shared_table.should eq false
@user.tables.all.each { |t| t.destroy }
@user2.tables.all.each { |t| t.destroy }
end
end
describe '#destroy' do
it 'deletes database role' do
u1 = create_user(email: 'ddr@example.com', username: 'ddr', password: 'admin123')
role = u1.database_username
db = u1.in_database
db_service = u1.db_service
db_service.role_exists?(db, role).should == true
u1.destroy
expect do
db_service.role_exists?(db, role).should == false
end.to raise_error(/role "#{role}" does not exist/)
db.disconnect
end
describe "on organizations" do
include_context 'organization with users helper'
it 'deletes database role' do
role = @org_user_1.database_username
db = @org_user_1.in_database
db_service = @org_user_1.db_service
db_service.role_exists?(db, role).should == true
@org_user_1.destroy
expect do
db_service.role_exists?(db, role).should == false
end.to raise_error(/role "#{role}" does not exist/)
db.disconnect
end
it 'deletes temporary analysis tables' do
db = @org_user_2.in_database
db.run('CREATE TABLE analysis_cd60938c7b_2ad1345b134ed3cd363c6de651283be9bd65094e (a int)')
db.run(%{INSERT INTO cdb_analysis_catalog (username, cache_tables, node_id, analysis_def)
VALUES ('#{@org_user_2.username}', '{analysis_cd60938c7b_2ad1345b134ed3cd363c6de651283be9bd65094e}', 'a0', '{}')})
@org_user_2.destroy
db = @org_user_owner.in_database
db["SELECT COUNT(*) FROM cdb_analysis_catalog WHERE username='#{@org_user_2.username}'"].first[:count].should eq 0
end
describe 'User#destroy' do
include TableSharing
it 'blocks deletion with shared entities' do
@not_to_be_deleted = TestUserFactory.new.create_test_user(unique_name('user'), @organization)
table = create_random_table(@not_to_be_deleted)
share_table_with_user(table, @org_user_owner)
expect { @not_to_be_deleted.destroy }.to raise_error(/Cannot delete user, has shared entities/)
::User[@not_to_be_deleted.id].should be
end
end
end
end
describe 'User#destroy_cascade' do
include_context 'organization with users helper'
include TableSharing
it 'allows deletion even with shared entities' do
table = create_random_table(@org_user_1)
share_table_with_user(table, @org_user_1)
@org_user_1.destroy_cascade
::User[@org_user_1.id].should_not be
end
end
describe '#destroy_restrictions' do
it 'Checks some scenarios upon user destruction regarding organizations' do
u1 = create_user(email: 'u1@example.com', username: 'u1', password: 'admin123')
u2 = create_user(email: 'u2@example.com', username: 'u2', password: 'admin123')
org = create_org('cartodb', 1234567890, 5)
u1.organization = org
u1.save
u1.reload
u1.organization.nil?.should eq false
org = u1.organization
org.owner_id = u1.id
org.save
u1.reload
u1.organization.owner.id.should eq u1.id
u2.organization = org
u2.save
u2.reload
u2.organization.nil?.should eq false
u2.reload
# Cannot remove as more users depend on the org
expect {
u1.destroy
}.to raise_exception CartoDB::BaseCartoDBError
org.destroy
end
end
describe '#cartodb_postgresql_extension_versioning' do
it 'should report pre multi user for known <0.3.0 versions' do
before_mu_known_versions = %w(0.1.0 0.1.1 0.2.0 0.2.1)
before_mu_known_versions.each { |version|
stub_and_check_version_pre_mu(version, true)
}
end
it 'should report post multi user for >=0.3.0 versions' do
after_mu_known_versions = %w(0.3.0 0.3.1 0.3.2 0.3.3 0.3.4 0.3.5 0.4.0 0.5.5 0.10.0)
after_mu_known_versions.each { |version|
stub_and_check_version_pre_mu(version, false)
}
end
it 'should report post multi user for versions with minor<3 but major>0' do
minor_version_edge_cases = %w(1.0.0 1.0.1 1.2.0 1.2.1 1.3.0 1.4.4)
minor_version_edge_cases.each { |version|
stub_and_check_version_pre_mu(version, false)
}
end
it 'should report correct version with old version strings' do
before_mu_old_known_versions = [
'0.1.0 0.1.0',
'0.1.1 0.1.1',
'0.2.0 0.2.0',
'0.2.1 0.2.1'
]
before_mu_old_known_versions.each { |version|
stub_and_check_version_pre_mu(version, true)
}
end
it 'should report correct version with old version strings' do
after_mu_old_known_versions = [
'0.3.0 0.3.0',
'0.3.1 0.3.1',
'0.3.2 0.3.2',
'0.3.3 0.3.3',
'0.3.4 0.3.4',
'0.3.5 0.3.5',
'0.4.0 0.4.0',
'0.5.5 0.5.5',
'0.10.0 0.10.0'
]
after_mu_old_known_versions.each { |version|
stub_and_check_version_pre_mu(version, false)
}
end
it 'should report correct version with `git describe` not being a tag' do
stub_and_check_version_pre_mu('0.2.1 0.2.0-8-g7840e7c', true)
after_mu_old_known_versions = [
'0.3.6 0.3.5-8-g7840e7c',
'0.4.0 0.3.6-8-g7840e7c'
]
after_mu_old_known_versions.each { |version|
stub_and_check_version_pre_mu(version, false)
}
end
def stub_and_check_version_pre_mu(version, is_pre_mu)
@user.db_service.stubs(:cartodb_extension_version).returns(version)
@user.db_service.cartodb_extension_version_pre_mu?.should eq is_pre_mu
end
end
# INFO: since user can be also created in Central, and it can fail, we need to request notification explicitly. See #3022 for more info
it "can notify a new user creation" do
::Resque.stubs(:enqueue).returns(nil)
organization = create_organization_with_owner(quota_in_bytes: 1000.megabytes)
user1 = new_user(:username => 'test', :email => "client@example.com", :organization => organization, :organization_id => organization.id, :quota_in_bytes => 20.megabytes)
user1.id = UUIDTools::UUID.timestamp_create.to_s
::Resque.expects(:enqueue).with(::Resque::UserJobs::Mail::NewOrganizationUser, user1.id).once
user1.save
# INFO: if user must be synched with a remote server it should happen before notifying
user1.notify_new_organization_user
organization.destroy
end
it "Tests password change" do
new_valid_password = '123456'
old_crypted_password = @user.crypted_password
@user.change_password('aaabbb', new_valid_password, new_valid_password)
@user.valid?.should eq false
@user.errors.fetch(:old_password).nil?.should eq false
expect {
@user.save(raise_on_failure: true)
}.to raise_exception(Sequel::ValidationFailed, "old_password Old password not valid") # "to_s" of validation msg
@user.change_password(@user_password, 'aaabbb', 'bbbaaa')
@user.valid?.should eq false
@user.errors.fetch(:new_password).nil?.should eq false
expect {
@user.save(raise_on_failure: true)
}.to raise_exception(Sequel::ValidationFailed, "new_password New password doesn't match confirmation")
@user.change_password('aaaaaa', 'aaabbb', 'bbbaaa')
@user.valid?.should eq false
@user.errors.fetch(:old_password).nil?.should eq false
@user.errors.fetch(:new_password).nil?.should eq false
expect {
@user.save(raise_on_failure: true)
}.to raise_exception(Sequel::ValidationFailed, "old_password Old password not valid, new_password New password doesn't match confirmation")
@user.change_password(@user_password, 'tiny', 'tiny')
@user.valid?.should eq false
@user.errors.fetch(:new_password).nil?.should eq false
expect {
@user.save(raise_on_failure: true)
}.to raise_exception(Sequel::ValidationFailed, "new_password Must be at least 6 characters long")
long_password = 'long' * 20
@user.change_password(@user_password, long_password, long_password)
@user.valid?.should eq false
@user.errors.fetch(:new_password).nil?.should eq false
expect {
@user.save(raise_on_failure: true)
}.to raise_exception(Sequel::ValidationFailed, "new_password Must be at most 64 characters long")
@user.change_password('aaaaaa', nil, nil)
@user.valid?.should eq false
@user.errors.fetch(:old_password).nil?.should eq false
expect {
@user.save(raise_on_failure: true)
}.to raise_exception(Sequel::ValidationFailed, "old_password Old password not valid, new_password New password can't be blank")
@user.change_password(@user_password, nil, nil)
@user.valid?.should eq false
@user.errors.fetch(:new_password).nil?.should eq false
expect {
@user.save(raise_on_failure: true)
}.to raise_exception(Sequel::ValidationFailed, "new_password New password can't be blank")
@user.change_password(nil, nil, nil)
@user.valid?.should eq false
@user.errors.fetch(:old_password).nil?.should eq false
expect {
@user.save(raise_on_failure: true)
}.to raise_exception(Sequel::ValidationFailed, "old_password Old password not valid, new_password New password can't be blank")
@user.change_password(nil, new_valid_password, new_valid_password)
@user.valid?.should eq false
@user.errors.fetch(:old_password).nil?.should eq false
expect {
@user.save(raise_on_failure: true)
}.to raise_exception(Sequel::ValidationFailed, "old_password Old password not valid")
@user.change_password(@user_password, new_valid_password, new_valid_password)
@user.valid?.should eq true
@user.save
new_crypted_password = @user.crypted_password
(old_crypted_password != new_crypted_password).should eq true
@user.change_password(new_valid_password, @user_password, @user_password)
@user.valid?.should eq true
@user.save
@user.crypted_password.should eq old_crypted_password
end
describe "when user is signed up with google sign-in and don't have any password yet" do
before(:each) do
@user.google_sign_in = true
@user.last_password_change_date = nil
@user.save
@user.needs_password_confirmation?.should == false
new_valid_password = '123456'
@user.change_password("doesn't matter in this case", new_valid_password, new_valid_password)
@user.needs_password_confirmation?.should == true
end
it 'should allow updating password w/o a current password' do
@user.valid?.should eq true
@user.save
end
it 'should have updated last password change date' do
@user.last_password_change_date.should_not eq nil
@user.save
end
end
describe "#purge_redis_vizjson_cache" do
it "shall iterate on the user's visualizations and purge their redis cache" do
# Create a few tables with their default vizs
(1..3).each do |i|
t = Table.new
t.user_id = @user.id
t.save
end
collection = CartoDB::Visualization::Collection.new.fetch({user_id: @user.id})
redis_spy = RedisDoubles::RedisSpy.new
redis_vizjson_cache = CartoDB::Visualization::RedisVizjsonCache.new()
redis_embed_cache = EmbedRedisCache.new()
CartoDB::Visualization::RedisVizjsonCache.any_instance.stubs(:redis).returns(redis_spy)
EmbedRedisCache.any_instance.stubs(:redis).returns(redis_spy)
redis_vizjson_keys = collection.map { |v|
[
redis_vizjson_cache.key(v.id, false), redis_vizjson_cache.key(v.id, true),
redis_vizjson_cache.key(v.id, false, 3), redis_vizjson_cache.key(v.id, true, 3),
redis_vizjson_cache.key(v.id, false, '3n'), redis_vizjson_cache.key(v.id, true, '3n'),
redis_vizjson_cache.key(v.id, false, '3a'), redis_vizjson_cache.key(v.id, true, '3a'),
]
}.flatten
redis_vizjson_keys.should_not be_empty
redis_embed_keys = collection.map { |v|
[redis_embed_cache.key(v.id, false), redis_embed_cache.key(v.id, true)]
}.flatten
redis_embed_keys.should_not be_empty
@user.purge_redis_vizjson_cache
redis_spy.deleted.should include(*redis_vizjson_keys)
redis_spy.deleted.should include(*redis_embed_keys)
redis_spy.deleted.count.should eq redis_vizjson_keys.count + redis_embed_keys.count
redis_spy.invokes(:del).count.should eq 2
redis_spy.invokes(:del).map(&:sort).should include(redis_vizjson_keys.sort)
redis_spy.invokes(:del).map(&:sort).should include(redis_embed_keys.sort)
end
it "shall not fail if the user does not have visualizations" do
user = create_user
collection = CartoDB::Visualization::Collection.new.fetch({user_id: user.id})
# 'http' keys
redis_keys = collection.map(&:redis_vizjson_key)
redis_keys.should be_empty
# 'https' keys
redis_keys = collection.map { |item| item.redis_vizjson_key(true) }
redis_keys.should be_empty
CartoDB::Visualization::Member.expects(:redis_cache).never
user.purge_redis_vizjson_cache
user.destroy
end
end
describe "#regressions" do
it "Tests geocodings and data import FK not breaking user destruction" do
user = create_user
user_id = user.id
data_import_id = '11111111-1111-1111-1111-111111111111'
SequelRails.connection.run(%Q{
INSERT INTO data_imports("data_source","data_type","table_name","state","success","logger","updated_at",
"created_at","tables_created_count",
"table_names","append","id","table_id","user_id",
"service_name","service_item_id","stats","type_guessing","quoted_fields_guessing","content_guessing","server","host",
"resque_ppid","upload_host","create_visualization","user_defined_limits")
VALUES('test','url','test','complete','t','11111111-1111-1111-1111-111111111112',
'2015-03-17 00:00:00.94006+00','2015-03-17 00:00:00.810581+00','1',
'test','f','#{data_import_id}','11111111-1111-1111-1111-111111111113',
'#{user_id}','public_url', 'test',
'[{"type":".csv","size":5015}]','t','f','t','test','0.0.0.0','13204','test','f','{"twitter_credits_limit":0}');
})
SequelRails.connection.run(%Q{
INSERT INTO geocodings("table_name","processed_rows","created_at","updated_at","formatter","state",
"id","user_id",
"cache_hits","kind","geometry_type","processable_rows","real_rows","used_credits",
"data_import_id"
) VALUES('importer_123456','197','2015-03-17 00:00:00.279934+00','2015-03-17 00:00:00.536383+00','field_1','finished',
'11111111-1111-1111-1111-111111111114','#{user_id}','0','admin0','polygon','195','0','0',
'#{data_import_id}');
})
user.destroy
::User.find(id:user_id).should eq nil
end
end
describe '#needs_password_confirmation?' do
it 'is true for a normal user' do
user = FactoryGirl.build(:carto_user, :google_sign_in => nil)
user.needs_password_confirmation?.should == true
user = FactoryGirl.build(:user, :google_sign_in => false)
user.needs_password_confirmation?.should == true
end
it 'is false for users that signed in with Google' do
user = FactoryGirl.build(:user, :google_sign_in => true)
user.needs_password_confirmation?.should == false
end
it 'is true for users that signed in with Google but changed the password' do
user = FactoryGirl.build(:user, :google_sign_in => true, :last_password_change_date => Time.now)
user.needs_password_confirmation?.should == true
end
it 'is false for users that were created with http authentication' do
user = FactoryGirl.build(:valid_user, last_password_change_date: nil)
Carto::UserCreation.stubs(:http_authentication).returns(stub(find_by_user_id: FactoryGirl.build(:user_creation)))
user.needs_password_confirmation?.should == false
end
end
describe 'User creation and DB critical calls' do
it 'Properly setups a new user (not belonging to an organization)' do
CartoDB::UserModule::DBService.any_instance.stubs(
cartodb_extension_version_pre_mu?: nil,
monitor_user_notification: nil,
enable_remote_db_user: nil
)
user_timeout_secs = 666
user = ::User.new
user.username = unique_name('user')
user.email = unique_email
user.password = user.email.split('@').first
user.password_confirmation = user.password
user.admin = false
user.private_tables_enabled = true
user.private_maps_enabled = true
user.enabled = true
user.table_quota = 500
user.quota_in_bytes = 1234567890
user.user_timeout = user_timeout_secs * 1000
user.database_timeout = 123000
user.geocoding_quota = 1000
user.geocoding_block_price = 1500
user.sync_tables_enabled = false
user.organization = nil
user.twitter_datasource_enabled = false
user.avatar_url = user.default_avatar
user.valid?.should == true
user.save
user.nil?.should == false
# To avoid connection pool caching
CartoDB::UserModule::DBService.terminate_database_connections(user.database_name, user.database_host)
user.reload
# Just to be sure all following checks will not falsely report ok using wrong schema
user.database_schema.should eq CartoDB::UserModule::DBService::SCHEMA_PUBLIC
user.database_schema.should_not eq user.username
test_table_name = "table_perm_test"
# Safety check
user.in_database.fetch(%{
SELECT * FROM pg_extension WHERE extname='postgis';
}).first.nil?.should == false
# Replicate functionality inside ::UserModule::DBService.configure_database
# -------------------------------------------------------------------
user.in_database.fetch(%{
SHOW search_path;
}).first[:search_path].should == user.db_service.build_search_path(user.database_schema, false)
# @see http://www.postgresql.org/docs/current/static/functions-info.html#FUNCTIONS-INFO-ACCESS-TABLE
user.in_database(as: :superuser).fetch(%{
SELECT * FROM has_database_privilege('#{user.database_username}', '#{user.database_name}', 'CONNECT');
}).first[:has_database_privilege].should == true
# Careful as PG formatter timeout output changes to XXmin if too big
user.in_database.fetch(%{
SHOW statement_timeout;
}).first[:statement_timeout].should eq "#{user_timeout_secs}s"
# No check for `set_user_as_organization_member` as cartodb-postgresql already tests it
# Checks for "grant_read_on_schema_queries(SCHEMA_CARTODB, db_user)"
user.in_database(as: :superuser).fetch(%{
SELECT * FROM has_schema_privilege('#{user.database_username}',
'#{CartoDB::UserModule::DBService::SCHEMA_CARTODB}', 'USAGE');
}).first[:has_schema_privilege].should == true
user.in_database(as: :superuser).fetch(%{
SELECT * FROM has_function_privilege('#{user.database_username}',
'#{user.database_schema}._cdb_userquotainbytes()', 'EXECUTE');
}).first[:has_function_privilege].should == true
# SCHEMA_CARTODB has no tables to select from, except CDB_CONF on which has no permission
user.in_database(as: :superuser).fetch(%{
SELECT * FROM has_table_privilege('#{user.database_username}',
'cartodb.CDB_CONF',
'SELECT, INSERT, UPDATE, DELETE, TRUNCATE, REFERENCES, TRIGGER');
}).first[:has_table_privilege].should == false
# Checks on SCHEMA_PUBLIC
user.in_database(as: :superuser).fetch(%{
SELECT * FROM has_schema_privilege('#{user.database_username}',
'#{CartoDB::UserModule::DBService::SCHEMA_PUBLIC}', 'USAGE');
}).first[:has_schema_privilege].should == true
user.in_database(as: :superuser).fetch(%{
SELECT * FROM has_table_privilege('#{user.database_username}',
'#{CartoDB::UserModule::DBService::SCHEMA_PUBLIC}.spatial_ref_sys', 'SELECT');
}).first[:has_table_privilege].should == true
user.in_database(as: :superuser).fetch(%{
SELECT * FROM has_function_privilege('#{user.database_username}',
'#{CartoDB::UserModule::DBService::SCHEMA_PUBLIC}._postgis_stats(regclass, text, text)',
'EXECUTE');
}).first[:has_function_privilege].should == true
# Checks on own schema
user.in_database(as: :superuser).fetch(%{
SELECT * FROM has_schema_privilege('#{user.database_username}',
'#{user.database_schema}', 'CREATE, USAGE');
}).first[:has_schema_privilege].should == true
user.in_database.run(%{
CREATE TABLE #{test_table_name}(x int);
})
user.in_database(as: :superuser).fetch(%{
SELECT * FROM has_table_privilege('#{user.database_username}',
'#{user.database_schema}.#{test_table_name}', 'SELECT');
}).first[:has_table_privilege].should == true
# _cdb_userquotainbytes is always created on the user schema
user.in_database(as: :superuser).fetch(%{
SELECT * FROM has_function_privilege('#{user.database_username}',
'#{user.database_schema}._cdb_userquotainbytes()', 'EXECUTE');
}).first[:has_function_privilege].should == true
# Checks on non-org "owned" schemas
user.in_database(as: :superuser).fetch(%{
SELECT * FROM has_schema_privilege('#{user.database_username}',
'#{CartoDB::UserModule::DBService::SCHEMA_IMPORTER}', 'CREATE, USAGE');
}).first[:has_schema_privilege].should == true
user.in_database(as: :superuser).fetch(%{
SELECT * FROM has_schema_privilege('#{user.database_username}',
'#{CartoDB::UserModule::DBService::SCHEMA_GEOCODING}', 'CREATE, USAGE');
}).first[:has_schema_privilege].should == true
# Special raster and geo columns
user.in_database(as: :superuser).fetch(%{
SELECT * FROM has_table_privilege('#{user.database_username}',
'#{CartoDB::UserModule::DBService::SCHEMA_PUBLIC}.geometry_columns', 'SELECT');
}).first[:has_table_privilege].should == true
user.in_database(as: :superuser).fetch(%{
SELECT * FROM has_table_privilege('#{user.database_username}',
'#{CartoDB::UserModule::DBService::SCHEMA_PUBLIC}.geography_columns', 'SELECT');
}).first[:has_table_privilege].should == true
user.in_database(as: :superuser).fetch(%{
SELECT * FROM has_table_privilege('#{CartoDB::PUBLIC_DB_USER}',
'#{CartoDB::UserModule::DBService::SCHEMA_PUBLIC}.raster_overviews', 'SELECT');
}).first[:has_table_privilege].should == true
user.in_database(as: :superuser).fetch(%{
SELECT * FROM has_table_privilege('#{CartoDB::PUBLIC_DB_USER}',
'#{CartoDB::UserModule::DBService::SCHEMA_PUBLIC}.raster_columns', 'SELECT');
}).first[:has_table_privilege].should == true
# quota check
user.in_database(as: :superuser).fetch(%{
SELECT #{user.database_schema}._CDB_UserQuotaInBytes();
}).first[:_cdb_userquotainbytes].nil?.should == false
# Varnish invalidation function
user.in_database(as: :superuser).fetch(%{
SELECT * FROM has_function_privilege(
'#{user.database_username}',
'#{CartoDB::UserModule::DBService::SCHEMA_PUBLIC}.cdb_invalidate_varnish(text)', 'EXECUTE');
}).first[:has_function_privilege].should == true
# Checks of publicuser
user.in_database(as: :superuser).fetch(%{
SELECT * FROM has_schema_privilege('#{CartoDB::PUBLIC_DB_USER}',
'#{user.database_schema}', 'USAGE');
}).first[:has_schema_privilege].should == true
user.in_database(as: :superuser).fetch(%{
SELECT * FROM has_database_privilege('#{CartoDB::PUBLIC_DB_USER}',
'#{user.database_name}', 'CONNECT');
}).first[:has_database_privilege].should == true
user.in_database(as: :superuser).fetch(%{
SELECT * FROM has_schema_privilege('#{CartoDB::PUBLIC_DB_USER}',
'#{CartoDB::UserModule::DBService::SCHEMA_CARTODB}', 'USAGE');
}).first[:has_schema_privilege].should == true
user.in_database(as: :superuser).fetch(%{
SELECT * FROM has_function_privilege(
'#{CartoDB::PUBLIC_DB_USER}',
'#{CartoDB::UserModule::DBService::SCHEMA_CARTODB}.CDB_LatLng (NUMERIC, NUMERIC)',
'EXECUTE');
}).first[:has_function_privilege].should == true
user.in_database(as: :superuser).fetch(%{
SELECT * FROM has_table_privilege('#{CartoDB::PUBLIC_DB_USER}',
'#{CartoDB::UserModule::DBService::SCHEMA_CARTODB}.CDB_CONF',
'SELECT, INSERT, UPDATE, DELETE, TRUNCATE, REFERENCES, TRIGGER');
}).first[:has_table_privilege].should == false
# Additional public user grants/revokes
user.in_database(as: :superuser).fetch(%{
SELECT * FROM has_table_privilege('#{CartoDB::PUBLIC_DB_USER}',
'#{CartoDB::UserModule::DBService::SCHEMA_CARTODB}.cdb_tablemetadata',
'SELECT');
}).first[:has_table_privilege].should == false
user.in_database(as: :superuser).fetch(%{
SELECT * FROM has_schema_privilege('#{CartoDB::PUBLIC_DB_USER}',
'#{CartoDB::UserModule::DBService::SCHEMA_PUBLIC}', 'USAGE');
}).first[:has_schema_privilege].should == true
user.in_database(as: :superuser).fetch(%{
SELECT * FROM has_function_privilege(
'#{CartoDB::PUBLIC_DB_USER}',
'#{CartoDB::UserModule::DBService::SCHEMA_PUBLIC}._postgis_stats(regclass, text, text)',
'EXECUTE');
}).first[:has_function_privilege].should == true
user.in_database(as: :superuser).fetch(%{
SELECT * FROM has_table_privilege('#{CartoDB::PUBLIC_DB_USER}',
'#{CartoDB::UserModule::DBService::SCHEMA_PUBLIC}.spatial_ref_sys', 'SELECT');
}).first[:has_table_privilege].should == true
user.destroy
end
it 'Properly setups a new organization user' do
CartoDB::UserModule::DBService.any_instance.stubs(
cartodb_extension_version_pre_mu?: nil,
monitor_user_notification: nil,
enable_remote_db_user: nil
)
disk_quota = 1234567890
user_timeout_secs = 666
max_import_file_size = 6666666666
max_import_table_row_count = 55555555
max_concurrent_import_count = 44
max_layers = 11
# create an owner
organization = create_org('org-user-creation-db-checks-organization', disk_quota * 10, 10)
user1 = create_user email: 'user1@whatever.com', username: 'creation-db-checks-org-owner', password: 'user11'
user1.organization = organization
user1.max_import_file_size = max_import_file_size
user1.max_import_table_row_count = max_import_table_row_count
user1.max_concurrent_import_count = max_concurrent_import_count
user1.max_layers = 11
user1.save
organization.owner_id = user1.id
organization.save
organization.reload
user1.reload
user = ::User.new
user.username = unique_name('user')
user.email = unique_email
user.password = user.email.split('@').first
user.password_confirmation = user.password
user.admin = false
user.private_tables_enabled = true
user.private_maps_enabled = true
user.enabled = true
user.table_quota = 500
user.quota_in_bytes = disk_quota
user.user_timeout = user_timeout_secs * 1000
user.database_timeout = 123000
user.geocoding_quota = 1000
user.geocoding_block_price = 1500
user.sync_tables_enabled = false
user.organization = organization
user.twitter_datasource_enabled = false
user.avatar_url = user.default_avatar
user.valid?.should == true
user.save
user.nil?.should == false
# To avoid connection pool caching
CartoDB::UserModule::DBService.terminate_database_connections(user.database_name, user.database_host)
user.reload
user.max_import_file_size.should eq max_import_file_size
user.max_import_table_row_count.should eq max_import_table_row_count
user.max_concurrent_import_count.should eq max_concurrent_import_count
user.max_layers.should eq max_layers
# Just to be sure all following checks will not falsely report ok using wrong schema
user.database_schema.should_not eq CartoDB::UserModule::DBService::SCHEMA_PUBLIC
user.database_schema.should eq user.username
test_table_name = "table_perm_test"
# Safety check
user.in_database.fetch(%{
SELECT * FROM pg_extension WHERE extname='postgis';
}).first.nil?.should == false
# Replicate functionality inside ::UserModule::DBService.configure_database
# -------------------------------------------------------------------
user.in_database.fetch(%{
SHOW search_path;
}).first[:search_path].should == user.db_service.build_search_path(user.database_schema, false)
# @see http://www.postgresql.org/docs/current/static/functions-info.html#FUNCTIONS-INFO-ACCESS-TABLE
user.in_database(as: :superuser).fetch(%{
SELECT * FROM has_database_privilege('#{user.database_username}', '#{user.database_name}', 'CONNECT');
}).first[:has_database_privilege].should == true
# Careful as PG formatter timeout output changes to XXmin if too big
user.in_database.fetch(%{
SHOW statement_timeout;
}).first[:statement_timeout].should eq "#{user_timeout_secs}s"
# No check for `set_user_as_organization_member` as cartodb-postgresql already tests it
user.in_database(as: :superuser).fetch(%{
SELECT * FROM has_function_privilege('#{user.database_username}',
'#{user.database_schema}._cdb_userquotainbytes()', 'EXECUTE');
}).first[:has_function_privilege].should == true
# SCHEMA_CARTODB has no tables to select from, except CDB_CONF on which has no permission
user.in_database(as: :superuser).fetch(%{
SELECT * FROM has_table_privilege('#{user.database_username}',
'cartodb.CDB_CONF',
'SELECT, INSERT, UPDATE, DELETE, TRUNCATE, REFERENCES, TRIGGER');
}).first[:has_table_privilege].should == false
# Checks on SCHEMA_PUBLIC
user.in_database(as: :superuser).fetch(%{
SELECT * FROM has_schema_privilege('#{user.database_username}',
'#{CartoDB::UserModule::DBService::SCHEMA_PUBLIC}', 'USAGE');
}).first[:has_schema_privilege].should == true
user.in_database(as: :superuser).fetch(%{
SELECT * FROM has_table_privilege('#{user.database_username}',
'#{CartoDB::UserModule::DBService::SCHEMA_PUBLIC}.spatial_ref_sys', 'SELECT');
}).first[:has_table_privilege].should == true
user.in_database(as: :superuser).fetch(%{
SELECT * FROM has_function_privilege('#{user.database_username}',
'#{CartoDB::UserModule::DBService::SCHEMA_PUBLIC}._postgis_stats(regclass, text, text)',
'EXECUTE');
}).first[:has_function_privilege].should == true
# Checks on own schema
user.in_database(as: :superuser).fetch(%{
SELECT * FROM has_schema_privilege('#{user.database_username}',
'#{user.database_schema}', 'CREATE, USAGE');
}).first[:has_schema_privilege].should == true
user.in_database.run(%{
CREATE TABLE #{test_table_name}(x int);
})
user.in_database(as: :superuser).fetch(%{
SELECT * FROM has_table_privilege('#{user.database_username}',
'#{user.database_schema}.#{test_table_name}', 'SELECT');
}).first[:has_table_privilege].should == true
# _cdb_userquotainbytes is always created on the user schema
user.in_database(as: :superuser).fetch(%{
SELECT * FROM has_function_privilege('#{user.database_username}',
'#{user.database_schema}._cdb_userquotainbytes()', 'EXECUTE');
}).first[:has_function_privilege].should == true
# quota check
user.in_database(as: :superuser).fetch(%{
SELECT #{user.database_schema}._CDB_UserQuotaInBytes();
}).first[:_cdb_userquotainbytes].nil?.should == false
# Varnish invalidation function
user.in_database(as: :superuser).fetch(%{
SELECT * FROM has_function_privilege(
'#{user.database_username}',
'#{CartoDB::UserModule::DBService::SCHEMA_PUBLIC}.cdb_invalidate_varnish(text)', 'EXECUTE');
}).first[:has_function_privilege].should == true
# Checks of publicuser
user.in_database(as: :superuser).fetch(%{
SELECT * FROM has_database_privilege('#{CartoDB::PUBLIC_DB_USER}',
'#{user.database_name}', 'CONNECT');
}).first[:has_database_privilege].should == true
user.in_database(as: :superuser).fetch(%{
SELECT * FROM has_schema_privilege('#{CartoDB::PUBLIC_DB_USER}',
'#{user.database_schema}', 'USAGE');
}).first[:has_schema_privilege].should == true
user.in_database(as: :superuser).fetch(%{
SELECT * FROM has_schema_privilege('#{CartoDB::PUBLIC_DB_USER}',
'#{CartoDB::UserModule::DBService::SCHEMA_CARTODB}', 'USAGE');
}).first[:has_schema_privilege].should == true
user.in_database(as: :superuser).fetch(%{
SELECT * FROM has_function_privilege(
'#{CartoDB::PUBLIC_DB_USER}',
'#{CartoDB::UserModule::DBService::SCHEMA_CARTODB}.CDB_LatLng (NUMERIC, NUMERIC)',
'EXECUTE');
}).first[:has_function_privilege].should == true
user.in_database(as: :superuser).fetch(%{
SELECT * FROM has_table_privilege('#{CartoDB::PUBLIC_DB_USER}',
'#{CartoDB::UserModule::DBService::SCHEMA_CARTODB}.CDB_CONF',
'SELECT, INSERT, UPDATE, DELETE, TRUNCATE, REFERENCES, TRIGGER');
}).first[:has_table_privilege].should == false
# Additional public user grants/revokes
user.in_database(as: :superuser).fetch(%{
SELECT * FROM has_table_privilege('#{CartoDB::PUBLIC_DB_USER}',
'#{CartoDB::UserModule::DBService::SCHEMA_CARTODB}.cdb_tablemetadata',
'SELECT');
}).first[:has_table_privilege].should == false
user.in_database(as: :superuser).fetch(%{
SELECT * FROM has_schema_privilege('#{CartoDB::PUBLIC_DB_USER}',
'#{CartoDB::UserModule::DBService::SCHEMA_PUBLIC}', 'USAGE');
}).first[:has_schema_privilege].should == true
user.in_database(as: :superuser).fetch(%{
SELECT * FROM has_function_privilege(
'#{CartoDB::PUBLIC_DB_USER}',
'#{CartoDB::UserModule::DBService::SCHEMA_PUBLIC}._postgis_stats(regclass, text, text)',
'EXECUTE');
}).first[:has_function_privilege].should == true
user.in_database(as: :superuser).fetch(%{
SELECT * FROM has_table_privilege('#{CartoDB::PUBLIC_DB_USER}',
'#{CartoDB::UserModule::DBService::SCHEMA_PUBLIC}.spatial_ref_sys', 'SELECT');
}).first[:has_table_privilege].should == true
user.in_database.run(%{
DROP TABLE #{user.database_schema}.#{test_table_name};
})
user.destroy
organization.destroy
end
end
describe "Write locking" do
it "detects locking properly" do
@user.db_service.writes_enabled?.should eq true
@user.db_service.disable_writes
@user.db_service.terminate_database_connections
@user.db_service.writes_enabled?.should eq false
@user.db_service.enable_writes
@user.db_service.terminate_database_connections
@user.db_service.writes_enabled?.should eq true
end
it "enables and disables writes in user database" do
@user.db_service.run_pg_query("create table foo_1(a int);")
@user.db_service.disable_writes
@user.db_service.terminate_database_connections
lambda {
@user.db_service.run_pg_query("create table foo_2(a int);")
}.should raise_error(CartoDB::ErrorRunningQuery)
@user.db_service.enable_writes
@user.db_service.terminate_database_connections
@user.db_service.run_pg_query("create table foo_3(a int);")
end
end
describe '#destroy' do
def create_full_data
carto_user = FactoryGirl.create(:carto_user)
user = ::User[carto_user.id]
table = create_table(user_id: carto_user.id, name: 'My first table', privacy: UserTable::PRIVACY_PUBLIC)
canonical_visualization = table.table_visualization
map = FactoryGirl.create(:carto_map_with_layers, user_id: carto_user.id)
carto_visualization = FactoryGirl.create(:carto_visualization, user: carto_user, map: map)
visualization = CartoDB::Visualization::Member.new(id: carto_visualization.id).fetch
# Force ORM to cache layers (to check if they are deleted later)
canonical_visualization.map.layers
visualization.map.layers
user_layer = Layer.create(kind: 'tiled')
user.add_layer(user_layer)
[user, table, [canonical_visualization, visualization], user_layer]
end
def check_deleted_data(user_id, table_id, visualizations, layer_id)
::User[user_id].should be_nil
visualizations.each do |visualization|
Carto::Visualization.exists?(visualization.id).should be_false
visualization.map.layers.each { |layer| Carto::Layer.exists?(layer.id).should be_false }
end
Carto::UserTable.exists?(table_id).should be_false
Carto::Layer.exists?(layer_id).should be_false
end
it 'destroys all related information' do
user, table, visualizations, layer = create_full_data
::User[user.id].destroy
check_deleted_data(user.id, table.id, visualizations, layer.id)
end
it 'destroys all related information, even for viewer users' do
user, table, visualizations, layer = create_full_data
user.viewer = true
user.save
user.reload
user.destroy
check_deleted_data(user.id, table.id, visualizations, layer.id)
end
end
describe '#visualization_count' do
include_context 'organization with users helper'
include TableSharing
it 'filters by type if asked' do
vis = FactoryGirl.create(:carto_visualization, user_id: @org_user_1.id, type: Carto::Visualization::TYPE_DERIVED)
@org_user_1.visualization_count.should eq 1
@org_user_1.visualization_count(type: Carto::Visualization::TYPE_DERIVED).should eq 1
[Carto::Visualization::TYPE_CANONICAL, Carto::Visualization::TYPE_REMOTE].each do |type|
@org_user_1.visualization_count(type: type).should eq 0
end
vis.destroy
end
it 'filters by privacy if asked' do
vis = FactoryGirl.create(:carto_visualization,
user_id: @org_user_1.id,
privacy: Carto::Visualization::PRIVACY_PUBLIC)
@org_user_1.visualization_count.should eq 1
@org_user_1.visualization_count(privacy: Carto::Visualization::PRIVACY_PUBLIC).should eq 1
[
Carto::Visualization::PRIVACY_PRIVATE,
Carto::Visualization::PRIVACY_LINK,
Carto::Visualization::PRIVACY_PROTECTED
].each do |privacy|
@org_user_1.visualization_count(privacy: privacy).should eq 0
end
vis.destroy
end
it 'filters by shared exclusion if asked' do
vis = FactoryGirl.create(:carto_visualization, user_id: @org_user_1.id, type: Carto::Visualization::TYPE_DERIVED)
share_visualization_with_user(vis, @org_user_2)
@org_user_2.visualization_count.should eq 1
@org_user_2.visualization_count(exclude_shared: true).should eq 0
vis.destroy
end
it 'filters by raster exclusion if asked' do
vis = FactoryGirl.create(:carto_visualization, user_id: @org_user_1.id, kind: Carto::Visualization::KIND_RASTER)
@org_user_1.visualization_count.should eq 1
@org_user_1.visualization_count(exclude_raster: true).should eq 0
vis.destroy
end
end
describe 'viewer user' do
after(:each) do
@user.destroy if @user
end
def verify_viewer_quota(user)
user.quota_in_bytes.should eq 0
user.geocoding_quota.should eq 0
user.soft_geocoding_limit.should eq false
user.twitter_datasource_quota.should eq 0
user.soft_twitter_datasource_limit.should eq false
user.here_isolines_quota.should eq 0
user.soft_here_isolines_limit.should eq false
user.obs_snapshot_quota.should eq 0
user.soft_obs_snapshot_limit.should eq false
user.obs_general_quota.should eq 0
user.soft_obs_general_limit.should eq false
end
describe 'creation' do
it 'assigns 0 as quota and no soft limit no matter what is requested' do
@user = create_user email: 'u_v@whatever.com', username: 'viewer', password: 'user11', viewer: true,
geocoding_quota: 10, soft_geocoding_limit: true, twitter_datasource_quota: 100,
soft_twitter_datasource_limit: 10, here_isolines_quota: 10, soft_here_isolines_limit: true,
obs_snapshot_quota: 100, soft_obs_snapshot_limit: true, obs_general_quota: 100,
soft_obs_general_limit: true
verify_viewer_quota(@user)
end
end
describe 'builder -> viewer' do
it 'assigns 0 as quota and no soft limit no matter what is requested' do
@user = create_user email: 'u_v@whatever.com', username: 'builder-to-viewer', password: 'user11', viewer: false,
geocoding_quota: 10, soft_geocoding_limit: true, twitter_datasource_quota: 100,
soft_twitter_datasource_limit: 10, here_isolines_quota: 10, soft_here_isolines_limit: true,
obs_snapshot_quota: 100, soft_obs_snapshot_limit: true, obs_general_quota: 100,
soft_obs_general_limit: true
# Random check, but we can trust create_user
@user.quota_in_bytes.should_not eq 0
@user.viewer = true
@user.save
@user.reload
verify_viewer_quota(@user)
end
end
describe 'quotas' do
it "can't change for viewer users" do
@user = create_user(viewer: true)
verify_viewer_quota(@user)
@user.quota_in_bytes = 666
@user.save
@user.reload
verify_viewer_quota(@user)
end
end
end
protected
def create_org(org_name, org_quota, org_seats)
organization = Organization.new
organization.name = unique_name(org_name)
organization.quota_in_bytes = org_quota
organization.seats = org_seats
organization.save
organization
end
def tables_including_shared(user)
Carto::VisualizationQueryBuilder
.new
.with_owned_by_or_shared_with_user_id(user.id)
.with_type(Carto::Visualization::TYPE_CANONICAL)
.build.map(&:table)
end
end
Tests for API key deletion
# coding: utf-8
require 'ostruct'
require_relative '../spec_helper'
require_relative 'user_shared_examples'
require_relative '../../services/dataservices-metrics/lib/isolines_usage_metrics'
require_relative '../../services/dataservices-metrics/lib/observatory_snapshot_usage_metrics'
require_relative '../../services/dataservices-metrics/lib/observatory_general_usage_metrics'
require 'factories/organizations_contexts'
require_relative '../../app/model_factories/layer_factory'
require_dependency 'cartodb/redis_vizjson_cache'
require 'helpers/unique_names_helper'
require 'factories/users_helper'
require 'factories/database_configuration_contexts'
include UniqueNamesHelper
describe 'refactored behaviour' do
it_behaves_like 'user models' do
def get_twitter_imports_count_by_user_id(user_id)
get_user_by_id(user_id).get_twitter_imports_count
end
def get_user_by_id(user_id)
::User.where(id: user_id).first
end
def create_user
FactoryGirl.create(:valid_user)
end
end
end
describe User do
before(:each) do
CartoDB::UserModule::DBService.any_instance.stubs(:enable_remote_db_user).returns(true)
end
before(:all) do
bypass_named_maps
@user_password = 'admin123'
puts "\n[rspec][user_spec] Creating test user databases..."
@user = create_user :email => 'admin@example.com', :username => 'admin', :password => @user_password
@user2 = create_user :email => 'user@example.com', :username => 'user', :password => 'user123'
puts "[rspec][user_spec] Loading user data..."
reload_user_data(@user) && @user.reload
puts "[rspec][user_spec] Running..."
end
before(:each) do
bypass_named_maps
CartoDB::Varnish.any_instance.stubs(:send_command).returns(true)
CartoDB::UserModule::DBService.any_instance.stubs(:enable_remote_db_user).returns(true)
Table.any_instance.stubs(:update_cdb_tablemetadata)
end
after(:all) do
bypass_named_maps
@user.destroy
@user2.destroy
end
it "should only allow legal usernames" do
illegal_usernames = %w(si$mon 'sergio estella' j@vi sergio£££ simon_tokumine SIMON Simon jose.rilla -rilla rilla-)
legal_usernames = %w(simon javier-de-la-torre sergio-leiva sergio99)
illegal_usernames.each do |name|
@user.username = name
@user.valid?.should be_false
@user.errors[:username].should be_present
end
legal_usernames.each do |name|
@user.username = name
@user.valid?.should be_true
@user.errors[:username].should be_blank
end
end
it "should not allow a username in use by an organization" do
org = create_org('testusername', 10.megabytes, 1)
@user.username = org.name
@user.valid?.should be_false
@user.username = 'wadus'
@user.valid?.should be_true
end
describe 'organization checks' do
it "should not be valid if his organization doesn't have more seats" do
organization = create_org('testorg', 10.megabytes, 1)
user1 = create_user email: 'user1@testorg.com', username: 'user1', password: 'user11'
user1.organization = organization
user1.save
organization.owner_id = user1.id
organization.save
organization.reload
user1.reload
user2 = new_user
user2.organization = organization
user2.valid?.should be_false
user2.errors.keys.should include(:organization)
organization.destroy
user1.destroy
end
it 'should be valid if his organization has enough seats' do
organization = create_org('testorg', 10.megabytes, 1)
user = ::User.new
user.organization = organization
user.valid?
user.errors.keys.should_not include(:organization)
organization.destroy
end
it "should not be valid if his organization doesn't have enough disk space" do
organization = create_org('testorg', 10.megabytes, 1)
organization.stubs(:assigned_quota).returns(10.megabytes)
user = ::User.new
user.organization = organization
user.quota_in_bytes = 1.megabyte
user.valid?.should be_false
user.errors.keys.should include(:quota_in_bytes)
organization.destroy
end
it 'should be valid if his organization has enough disk space' do
organization = create_org('testorg', 10.megabytes, 1)
organization.stubs(:assigned_quota).returns(9.megabytes)
user = ::User.new
user.organization = organization
user.quota_in_bytes = 1.megabyte
user.valid?
user.errors.keys.should_not include(:quota_in_bytes)
organization.destroy
end
describe '#org_admin' do
before(:all) do
@organization = create_organization_with_owner
end
after(:all) do
@organization.destroy
end
def create_role(user)
# NOTE: It's hard to test the real Groups API call here, it needs a Rails server up and running
# Instead, we test the main step that this function does internally (creating a role)
user.in_database["CREATE ROLE \"#{user.database_username}_#{unique_name('role')}\""].all
end
it 'cannot be owner and viewer at the same time' do
@organization.owner.viewer = true
@organization.owner.should_not be_valid
@organization.owner.errors.keys.should include(:viewer)
end
it 'cannot be admin and viewer at the same time' do
user = ::User.new
user.organization = @organization
user.viewer = true
user.org_admin = true
user.should_not be_valid
user.errors.keys.should include(:viewer)
end
it 'should not be able to create groups without admin rights' do
user = FactoryGirl.create(:valid_user, organization: @organization)
expect { create_role(user) }.to raise_error
end
it 'should be able to create groups with admin rights' do
user = FactoryGirl.create(:valid_user, organization: @organization, org_admin: true)
expect { create_role(user) }.to_not raise_error
end
it 'should revoke admin rights on demotion' do
user = FactoryGirl.create(:valid_user, organization: @organization, org_admin: true)
expect { create_role(user) }.to_not raise_error
user.org_admin = false
user.save
expect { create_role(user) }.to raise_error
end
end
describe 'organization email whitelisting' do
before(:each) do
@organization = create_org('testorg', 10.megabytes, 1)
end
after(:each) do
@organization.destroy
end
it 'valid_user is valid' do
user = FactoryGirl.build(:valid_user)
user.valid?.should == true
end
it 'user email is valid if organization has not whitelisted domains' do
user = FactoryGirl.build(:valid_user, organization: @organization)
user.valid?.should == true
end
it 'user email is not valid if organization has whitelisted domains and email is not under that domain' do
@organization.whitelisted_email_domains = [ 'organization.org' ]
user = FactoryGirl.build(:valid_user, organization: @organization)
user.valid?.should eq false
user.errors[:email].should_not be_nil
end
it 'user email is valid if organization has whitelisted domains and email is under that domain' do
user = FactoryGirl.build(:valid_user, organization: @organization)
@organization.whitelisted_email_domains = [ user.email.split('@')[1] ]
user.valid?.should eq true
user.errors[:email].should == []
end
end
describe 'when updating user quota' do
it 'should be valid if his organization has enough disk space' do
organization = create_organization_with_users(quota_in_bytes: 70.megabytes)
organization.assigned_quota.should == 70.megabytes
user = organization.owner
user.quota_in_bytes = 1.megabyte
user.valid?
user.errors.keys.should_not include(:quota_in_bytes)
organization.destroy
end
it "should not be valid if his organization doesn't have enough disk space" do
organization = create_organization_with_users(quota_in_bytes: 70.megabytes)
organization.assigned_quota.should == 70.megabytes
user = organization.owner
user.quota_in_bytes = 71.megabytes
user.valid?.should be_false
user.errors.keys.should include(:quota_in_bytes)
organization.destroy
end
end
describe 'when updating viewer state' do
before(:all) do
@organization = create_organization_with_users(quota_in_bytes: 70.megabytes)
end
after(:all) do
@organization.destroy
end
before(:each) do
@organization.viewer_seats = 10
@organization.seats = 10
@organization.save
end
it 'should not allow changing to viewer without seats' do
@organization.viewer_seats = 0
@organization.save
user = @organization.users.find { |u| !u.organization_owner? }
user.reload
user.viewer = true
expect(user).not_to be_valid
expect(user.errors.keys).to include(:organization)
end
it 'should allow changing to viewer with enough seats' do
user = @organization.users.find { |u| !u.organization_owner? }
user.reload
user.viewer = true
expect(user).to be_valid
expect(user.errors.keys).not_to include(:organization)
end
it 'should not allow changing to builder without seats' do
user = @organization.users.find { |u| !u.organization_owner? }
user.reload
user.viewer = true
user.save
@organization.seats = 1
@organization.save
user.reload
user.viewer = false
expect(user).not_to be_valid
expect(user.errors.keys).to include(:organization)
end
it 'should allow changing to builder with seats' do
user = @organization.users.find { |u| !u.organization_owner? }
user.reload
user.viewer = true
user.save
user.reload
user.viewer = false
expect(user).to be_valid
expect(user.errors.keys).not_to include(:organization)
end
end
it 'should set account_type properly' do
organization = create_organization_with_users
organization.users.reject(&:organization_owner?).each do |u|
u.account_type.should == "ORGANIZATION USER"
end
organization.destroy
end
it 'should set default settings properly unless overriden' do
organization = create_organization_with_users
organization.users.reject(&:organization_owner?).each do |u|
u.max_layers.should eq ::User::DEFAULT_MAX_LAYERS
u.private_tables_enabled.should be_true
u.sync_tables_enabled.should be_true
end
user = FactoryGirl.build(:user, organization: organization)
user.max_layers = 3
user.save
user.max_layers.should == 3
organization.destroy
end
describe 'google_maps_key and google_maps_private_key' do
before(:all) do
@organization = create_organization_with_users(google_maps_key: 'gmk', google_maps_private_key: 'gmpk')
@organization.google_maps_key.should_not be_nil
@organization.google_maps_private_key.should_not be_nil
end
after(:all) do
@organization.destroy
end
it 'should be inherited from organization for new users' do
@organization.users.should_not be_empty
@organization.users.reject(&:organization_owner?).each do |u|
u.google_maps_key.should == @organization.google_maps_key
u.google_maps_private_key.should == @organization.google_maps_private_key
end
end
end
it 'should inherit twitter_datasource_enabled from organization on creation' do
organization = create_organization_with_users(twitter_datasource_enabled: true)
organization.save
organization.twitter_datasource_enabled.should be_true
organization.users.reject(&:organization_owner?).each do |u|
u.twitter_datasource_enabled.should be_true
end
user = create_user(organization: organization)
user.save
user.twitter_datasource_enabled.should be_true
organization.destroy
end
it "should return proper values for non-persisted settings" do
organization = create_organization_with_users
organization.users.reject(&:organization_owner?).each do |u|
u.private_maps_enabled.should be_true
end
organization.destroy
end
end
describe 'central synchronization' do
it 'should create remote user in central if needed' do
pending "Central API credentials not provided" unless ::User.new.sync_data_with_cartodb_central?
organization = create_org('testorg', 500.megabytes, 1)
user = create_user email: 'user1@testorg.com', username: 'user1', password: 'user11'
user.organization = organization
user.save
Cartodb::Central.any_instance.expects(:create_organization_user).with(organization.name, user.allowed_attributes_to_central(:create)).once
user.create_in_central.should be_true
organization.destroy
end
end
it 'should store feature flags' do
ff = FactoryGirl.create(:feature_flag, id: 10001, name: 'ff10001')
user = create_user :email => 'ff@example.com', :username => 'ff-user-01', :password => 'ff-user-01'
user.set_relationships_from_central({ feature_flags: [ ff.id.to_s ]})
user.save
user.feature_flags_user.map { |ffu| ffu.feature_flag_id }.should include(ff.id)
user.destroy
end
it 'should delete feature flags assignations to a deleted user' do
ff = FactoryGirl.create(:feature_flag, id: 10002, name: 'ff10002')
user = create_user :email => 'ff2@example.com', :username => 'ff2-user-01', :password => 'ff2-user-01'
user.set_relationships_from_central({ feature_flags: [ ff.id.to_s ]})
user.save
user_id = user.id
user.destroy
SequelRails.connection["select count(*) from feature_flags_users where user_id = '#{user_id}'"].first[:count].should eq 0
SequelRails.connection["select count(*) from feature_flags where id = '#{ff.id}'"].first[:count].should eq 1
end
it "should have a default dashboard_viewed? false" do
user = ::User.new
user.dashboard_viewed?.should be_false
end
it "should reset dashboard_viewed when dashboard gets viewed" do
user = ::User.new
user.view_dashboard
user.dashboard_viewed?.should be_true
end
it "should validate that password is present if record is new and crypted_password or salt are blank" do
user = ::User.new
user.username = "adminipop"
user.email = "adminipop@example.com"
user.valid?.should be_false
user.errors[:password].should be_present
another_user = new_user(user.values.merge(:password => "admin123"))
user.crypted_password = another_user.crypted_password
user.salt = another_user.salt
user.valid?.should be_true
user.save
# Let's ensure that crypted_password and salt does not change
user_check = ::User[user.id]
user_check.crypted_password.should == another_user.crypted_password
user_check.salt.should == another_user.salt
user.password = nil
user.valid?.should be_true
user.destroy
end
it "should validate password presence and length" do
user = ::User.new
user.username = "adminipop"
user.email = "adminipop@example.com"
user.valid?.should be_false
user.errors[:password].should be_present
user.password = 'short'
user.valid?.should be_false
user.errors[:password].should be_present
user.password = 'manolo' * 11
user.valid?.should be_false
user.errors[:password].should be_present
end
it "should set default statement timeout values" do
@user.in_database["show statement_timeout"].first[:statement_timeout].should == "5min"
@user.in_database(as: :public_user)["show statement_timeout"].first[:statement_timeout].should == "5min"
end
it "should keep in sync user statement_timeout" do
@user.user_timeout = 1000000
@user.database_timeout = 300000
@user.save
@user.in_database["show statement_timeout"].first[:statement_timeout].should == "1000s"
@user.in_database(as: :public_user)["show statement_timeout"].first[:statement_timeout].should == "5min"
end
it "should keep in sync database statement_timeout" do
@user.user_timeout = 300000
@user.database_timeout = 1000000
@user.save
@user.in_database["show statement_timeout"].first[:statement_timeout].should == "5min"
@user.in_database(as: :public_user)["show statement_timeout"].first[:statement_timeout].should == "1000s"
end
it "should invalidate all his vizjsons when his account type changes" do
@user.account_type = 'WADUS'
CartoDB::Varnish.any_instance.expects(:purge)
.with("#{@user.database_name}.*:vizjson").times(1).returns(true)
@user.save
end
it "should invalidate all his vizjsons when his disqus_shortname changes" do
@user.disqus_shortname = 'WADUS'
CartoDB::Varnish.any_instance.expects(:purge)
.with("#{@user.database_name}.*:vizjson").times(1).returns(true)
@user.save
end
it "should not invalidate anything when his quota_in_bytes changes" do
@user.quota_in_bytes = @user.quota_in_bytes + 1.megabytes
CartoDB::Varnish.any_instance.expects(:purge).times(0)
@user.save
end
it "should rebuild the quota trigger after changing the quota" do
@user.db_service.expects(:rebuild_quota_trigger).once
@user.quota_in_bytes = @user.quota_in_bytes + 1.megabytes
@user.save
end
it "should read api calls from external service" do
pending "This is deprecated. This code has been moved"
@user.stubs(:get_old_api_calls).returns({
"per_day" => [0, 0, 0, 0, 24, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 8, 0, 0, 0, 0, 0, 0, 0, 0, 17, 4, 0, 0, 0, 0],
"total"=>49,
"updated_at"=>1370362756
})
@user.stubs(:get_es_api_calls_from_redis).returns([
21, 0, 0, 0, 2, 0, 0, 5, 0, 0, 0, 0, 0, 0, 0, 8, 0, 5, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0
])
@user.get_api_calls.should == [21, 0, 0, 0, 6, 17, 0, 5, 0, 0, 0, 0, 0, 0, 8, 8, 0, 5, 0, 0, 0, 0, 0, 0, 0, 24, 0, 0, 0, 0]
@user.get_api_calls(
from: (Date.today - 6.days),
to: Date.today
).should == [21, 0, 0, 0, 6, 17, 0]
end
it "should get final api calls from es" do
yesterday = Date.today - 1
today = Date.today
from_date = DateTime.new(yesterday.year, yesterday.month, yesterday.day, 0, 0, 0).strftime("%Q")
to_date = DateTime.new(today.year, today.month, today.day, 0, 0, 0).strftime("%Q")
api_url = %r{search}
api_response = {
"aggregations" => {
"0" => {
"buckets" => [
{
"key" => from_date.to_i,
"doc_count" => 4
},
{
"key" => to_date.to_i,
"doc_count" => 6
}
]
}
}
}
Typhoeus.stub(api_url,
{ method: :post }
)
.and_return(
Typhoeus::Response.new(code: 200, body: api_response.to_json.to_s)
)
@user.get_api_calls_from_es.should == {from_date.to_i => 4, to_date.to_i => 6}
end
describe "avatar checks" do
let(:user1) do
create_user(email: 'ewdewfref34r43r43d32f45g5@example.com', username: 'u1', password: 'foobar')
end
after(:each) do
user1.destroy
end
it "should load a cartodb avatar url if no gravatar associated" do
avatar_kind = Cartodb.config[:avatars]['kinds'][0]
avatar_color = Cartodb.config[:avatars]['colors'][0]
avatar_base_url = Cartodb.config[:avatars]['base_url']
Random.any_instance.stubs(:rand).returns(0)
gravatar_url = %r{gravatar.com}
Typhoeus.stub(gravatar_url, { method: :get }).and_return(Typhoeus::Response.new(code: 404))
user1.stubs(:gravatar_enabled?).returns(true)
user1.avatar_url = nil
user1.save
user1.reload_avatar
user1.avatar_url.should == "#{avatar_base_url}/avatar_#{avatar_kind}_#{avatar_color}.png"
end
it "should load a cartodb avatar url if gravatar disabled" do
avatar_kind = Cartodb.config[:avatars]['kinds'][0]
avatar_color = Cartodb.config[:avatars]['colors'][0]
avatar_base_url = Cartodb.config[:avatars]['base_url']
Random.any_instance.stubs(:rand).returns(0)
gravatar_url = %r{gravatar.com}
Typhoeus.stub(gravatar_url, { method: :get }).and_return(Typhoeus::Response.new(code: 200))
user1.stubs(:gravatar_enabled?).returns(false)
user1.avatar_url = nil
user1.save
user1.reload_avatar
user1.avatar_url.should == "#{avatar_base_url}/avatar_#{avatar_kind}_#{avatar_color}.png"
end
it "should load a the user gravatar url" do
gravatar_url = %r{gravatar.com}
Typhoeus.stub(gravatar_url, { method: :get }).and_return(Typhoeus::Response.new(code: 200))
user1.stubs(:gravatar_enabled?).returns(true)
user1.reload_avatar
user1.avatar_url.should == "//#{user1.gravatar_user_url}"
end
describe '#gravatar_enabled?' do
it 'should be enabled by default (every setting but false will enable it)' do
user = ::User.new
Cartodb.with_config(avatars: {}) { user.gravatar_enabled?.should be_true }
Cartodb.with_config(avatars: { 'gravatar_enabled' => true }) { user.gravatar_enabled?.should be_true }
Cartodb.with_config(avatars: { 'gravatar_enabled' => 'true' }) { user.gravatar_enabled?.should be_true }
Cartodb.with_config(avatars: { 'gravatar_enabled' => 'wadus' }) { user.gravatar_enabled?.should be_true }
end
it 'can be disabled' do
user = ::User.new
Cartodb.with_config(avatars: { 'gravatar_enabled' => false }) { user.gravatar_enabled?.should be_false }
Cartodb.with_config(avatars: { 'gravatar_enabled' => 'false' }) { user.gravatar_enabled?.should be_false }
end
end
end
describe '#private_maps_enabled?' do
it 'should not have private maps enabled by default' do
user_missing_private_maps = create_user :email => 'user_mpm@example.com', :username => 'usermpm', :password => 'usermpm'
user_missing_private_maps.private_maps_enabled?.should eq false
user_missing_private_maps.destroy
end
it 'should have private maps if enabled' do
user_with_private_maps = create_user :email => 'user_wpm@example.com', :username => 'userwpm', :password => 'userwpm', :private_maps_enabled => true
user_with_private_maps.private_maps_enabled?.should eq true
user_with_private_maps.destroy
end
it 'should not have private maps if disabled' do
user_without_private_maps = create_user :email => 'user_opm@example.com', :username => 'useropm', :password => 'useropm', :private_maps_enabled => false
user_without_private_maps.private_maps_enabled?.should eq false
user_without_private_maps.destroy
end
end
describe '#get_geocoding_calls' do
before do
delete_user_data @user
@user.stubs(:last_billing_cycle).returns(Date.today)
@mock_redis = MockRedis.new
@usage_metrics = CartoDB::GeocoderUsageMetrics.new(@user.username, nil, @mock_redis)
@usage_metrics.incr(:geocoder_here, :success_responses, 1, Time.now)
@usage_metrics.incr(:geocoder_internal, :success_responses, 1, Time.now)
@usage_metrics.incr(:geocoder_here, :success_responses, 1, Time.now - 5.days)
@usage_metrics.incr(:geocoder_cache, :success_responses, 1, Time.now - 5.days)
CartoDB::GeocoderUsageMetrics.stubs(:new).returns(@usage_metrics)
end
it "should return the sum of geocoded rows for the current billing period" do
@user.get_geocoding_calls.should eq 1
end
it "should return the sum of geocoded rows for the specified period" do
@user.get_geocoding_calls(from: Time.now-5.days).should eq 3
@user.get_geocoding_calls(from: Time.now-5.days, to: Time.now - 2.days).should eq 2
end
it "should return 0 when no geocodings" do
@user.get_geocoding_calls(from: Time.now - 15.days, to: Time.now - 10.days).should eq 0
end
end
describe '#get_here_isolines_calls' do
before do
delete_user_data @user
@mock_redis = MockRedis.new
@usage_metrics = CartoDB::IsolinesUsageMetrics.new(@user.username, nil, @mock_redis)
CartoDB::IsolinesUsageMetrics.stubs(:new).returns(@usage_metrics)
@user.stubs(:last_billing_cycle).returns(Date.today)
@user.period_end_date = (DateTime.current + 1) << 1
@user.save.reload
end
it "should return the sum of here isolines rows for the current billing period" do
@usage_metrics.incr(:here_isolines, :isolines_generated, 10, DateTime.current)
@usage_metrics.incr(:here_isolines, :isolines_generated, 100, (DateTime.current - 2))
@user.get_here_isolines_calls.should eq 10
end
it "should return the sum of here isolines rows for the specified period" do
@usage_metrics.incr(:here_isolines, :isolines_generated, 10, DateTime.current)
@usage_metrics.incr(:here_isolines, :isolines_generated, 100, (DateTime.current - 2))
@usage_metrics.incr(:here_isolines, :isolines_generated, 100, (DateTime.current - 7))
@user.get_here_isolines_calls(from: Time.now-5.days).should eq 110
@user.get_here_isolines_calls(from: Time.now-5.days, to: Time.now - 2.days).should eq 100
end
it "should return 0 when no here isolines actions" do
@user.get_here_isolines_calls(from: Time.now - 15.days, to: Time.now - 10.days).should eq 0
end
end
describe '#get_obs_snapshot_calls' do
before do
delete_user_data @user
@mock_redis = MockRedis.new
@usage_metrics = CartoDB::ObservatorySnapshotUsageMetrics.new(@user.username, nil, @mock_redis)
CartoDB::ObservatorySnapshotUsageMetrics.stubs(:new).returns(@usage_metrics)
@user.stubs(:last_billing_cycle).returns(Date.today)
@user.period_end_date = (DateTime.current + 1) << 1
@user.save.reload
end
it "should return the sum of data observatory snapshot rows for the current billing period" do
@usage_metrics.incr(:obs_snapshot, :success_responses, 10, DateTime.current)
@usage_metrics.incr(:obs_snapshot, :success_responses, 100, (DateTime.current - 2))
@user.get_obs_snapshot_calls.should eq 10
end
it "should return the sum of data observatory snapshot rows for the specified period" do
@usage_metrics.incr(:obs_snapshot, :success_responses, 10, DateTime.current)
@usage_metrics.incr(:obs_snapshot, :success_responses, 100, (DateTime.current - 2))
@usage_metrics.incr(:obs_snapshot, :success_responses, 100, (DateTime.current - 7))
@user.get_obs_snapshot_calls(from: Time.now - 5.days).should eq 110
@user.get_obs_snapshot_calls(from: Time.now - 5.days, to: Time.now - 2.days).should eq 100
end
it "should return 0 when no here isolines actions" do
@user.get_obs_snapshot_calls(from: Time.now - 15.days, to: Time.now - 10.days).should eq 0
end
end
describe '#get_obs_general_calls' do
before do
delete_user_data @user
@mock_redis = MockRedis.new
@usage_metrics = CartoDB::ObservatoryGeneralUsageMetrics.new(@user.username, nil, @mock_redis)
CartoDB::ObservatoryGeneralUsageMetrics.stubs(:new).returns(@usage_metrics)
@user.stubs(:last_billing_cycle).returns(Date.today)
@user.period_end_date = (DateTime.current + 1) << 1
@user.save.reload
end
it "should return the sum of data observatory general rows for the current billing period" do
@usage_metrics.incr(:obs_general, :success_responses, 10, DateTime.current)
@usage_metrics.incr(:obs_general, :success_responses, 100, (DateTime.current - 2))
@user.get_obs_general_calls.should eq 10
end
it "should return the sum of data observatory general rows for the specified period" do
@usage_metrics.incr(:obs_general, :success_responses, 10, DateTime.current)
@usage_metrics.incr(:obs_general, :success_responses, 100, (DateTime.current - 2))
@usage_metrics.incr(:obs_general, :success_responses, 100, (DateTime.current - 7))
@user.get_obs_general_calls(from: Time.now - 5.days).should eq 110
@user.get_obs_general_calls(from: Time.now - 5.days, to: Time.now - 2.days).should eq 100
end
it "should return 0 when no data observatory general actions" do
@user.get_obs_general_calls(from: Time.now - 15.days, to: Time.now - 10.days).should eq 0
end
end
describe "organization user deletion" do
it "should transfer tweet imports to owner" do
u1 = create_user(email: 'u1@exampleb.com', username: 'ub1', password: 'admin123')
org = create_org('cartodbtestb', 1234567890, 5)
u1.organization = org
u1.save
u1.reload
org = u1.organization
org.owner_id = u1.id
org.save
u1.reload
u2 = create_user(email: 'u2@exampleb.com', username: 'ub2', password: 'admin123', organization: org)
tweet_attributes = {
user: u2,
table_id: '96a86fb7-0270-4255-a327-15410c2d49d4',
data_import_id: '96a86fb7-0270-4255-a327-15410c2d49d4',
service_item_id: '555',
state: ::SearchTweet::STATE_COMPLETE
}
st1 = SearchTweet.create(tweet_attributes.merge(retrieved_items: 5))
st2 = SearchTweet.create(tweet_attributes.merge(retrieved_items: 10))
u1.reload
u2.reload
u2.get_twitter_imports_count.should == st1.retrieved_items + st2.retrieved_items
u1.get_twitter_imports_count.should == 0
u2.destroy
u1.reload
u1.get_twitter_imports_count.should == st1.retrieved_items + st2.retrieved_items
org.destroy
end
end
it "should have many tables" do
@user2.tables.should be_empty
create_table :user_id => @user2.id, :name => 'My first table', :privacy => UserTable::PRIVACY_PUBLIC
@user2.reload
@user2.tables.all.should == [UserTable.first(:user_id => @user2.id)]
end
it "should generate a data report"
it "should update remaining quotas when adding or removing tables" do
initial_quota = @user2.remaining_quota
expect { create_table :user_id => @user2.id, :privacy => UserTable::PRIVACY_PUBLIC }
.to change { @user2.remaining_table_quota }.by(-1)
table = Table.new(user_table: UserTable.filter(:user_id => @user2.id).first)
50.times { |i| table.insert_row!(:name => "row #{i}") }
@user2.remaining_quota.should be < initial_quota
initial_quota = @user2.remaining_quota
expect { table.destroy }
.to change { @user2.remaining_table_quota }.by(1)
@user2.remaining_quota.should be > initial_quota
end
it "should has his own database, created when the account is created" do
@user.database_name.should == "cartodb_test_user_#{@user.id}_db"
@user.database_username.should == "test_cartodb_user_#{@user.id}"
@user.in_database.test_connection.should == true
end
it 'creates an importer schema in the user database' do
@user.in_database[%Q(SELECT * FROM pg_namespace)]
.map { |record| record.fetch(:nspname) }
.should include 'cdb_importer'
end
it 'creates a cdb schema in the user database' do
pending "I believe cdb schema was never used"
@user.in_database[%Q(SELECT * FROM pg_namespace)]
.map { |record| record.fetch(:nspname) }
.should include 'cdb'
end
it 'allows access to the importer schema by the owner' do
@user.in_database.run(%Q{
CREATE TABLE cdb_importer.bogus ( bogus varchar(40) )
})
query = %Q(SELECT * FROM cdb_importer.bogus)
expect { @user.in_database(as: :public_user)[query].to_a }
.to raise_error(Sequel::DatabaseError)
@user.in_database[query].to_a
end
it 'allows access to the cdb schema by the owner' do
pending "I believe cdb schema was never used"
@user.in_database.run(%Q{
CREATE TABLE cdb.bogus ( bogus varchar(40) )
})
query = %Q(SELECT * FROM cdb.bogus)
expect { @user.in_database(as: :public_user)[query].to_a }
.to raise_error(Sequel::DatabaseError)
@user.in_database[query].to_a
end
it "should create a dabase user that only can read it's own database" do
connection = ::Sequel.connect(
::SequelRails.configuration.environment_for(Rails.env).merge(
'database' => @user.database_name, :logger => ::Rails.logger,
'username' => @user.database_username, 'password' => @user.database_password
)
)
connection.test_connection.should == true
connection.disconnect
connection = nil
connection = ::Sequel.connect(
::SequelRails.configuration.environment_for(Rails.env).merge(
'database' => @user2.database_name, :logger => ::Rails.logger,
'username' => @user.database_username, 'password' => @user.database_password
)
)
begin
connection.test_connection
true.should_not be_true
rescue
true.should be_true
ensure
connection.disconnect
end
connection = ::Sequel.connect(
::SequelRails.configuration.environment_for(Rails.env).merge(
'database' => @user2.database_name, :logger => ::Rails.logger,
'username' => @user2.database_username, 'password' => @user2.database_password
)
)
connection.test_connection.should == true
connection.disconnect
connection = ::Sequel.connect(
::SequelRails.configuration.environment_for(Rails.env).merge(
'database' => @user.database_name, :logger => ::Rails.logger,
'username' => @user2.database_username, 'password' => @user2.database_password
)
)
begin
connection.test_connection
true.should_not be_true
rescue
true.should be_true
ensure
connection.disconnect
end
end
it "should run valid queries against his database" do
# initial select tests
query_result = @user.db_service.run_pg_query("select * from import_csv_1 where family='Polynoidae' limit 10")
query_result[:time].should_not be_blank
query_result[:time].to_s.match(/^\d+\.\d+$/).should be_true
query_result[:total_rows].should == 2
query_result[:rows].first.keys.sort.should == [:cartodb_id, :the_geom, :the_geom_webmercator, :id, :name_of_species, :kingdom, :family, :lat, :lon, :views].sort
query_result[:rows][0][:name_of_species].should == "Barrukia cristata"
query_result[:rows][1][:name_of_species].should == "Eulagisca gigantea"
# update and reselect
query_result = @user.db_service.run_pg_query("update import_csv_1 set family='polynoidae' where family='Polynoidae'")
query_result = @user.db_service.run_pg_query("select * from import_csv_1 where family='Polynoidae' limit 10")
query_result[:total_rows].should == 0
# check counts
query_result = @user.db_service.run_pg_query("select * from import_csv_1 where family='polynoidae' limit 10")
query_result[:total_rows].should == 2
# test a product
query_result = @user.db_service.run_pg_query("select import_csv_1.family as fam, twitters.login as login from import_csv_1, twitters where family='polynoidae' limit 10")
query_result[:total_rows].should == 10
query_result[:rows].first.keys.should == [:fam, :login]
query_result[:rows][0].should == { :fam=>"polynoidae", :login=>"vzlaturistica " }
# test counts
query_result = @user.db_service.run_pg_query("select count(*) from import_csv_1 where family='polynoidae' ")
query_result[:time].should_not be_blank
query_result[:time].to_s.match(/^\d+\.\d+$/).should be_true
query_result[:total_rows].should == 1
query_result[:rows].first.keys.should == [:count]
query_result[:rows][0].should == {:count => 2}
end
it "should raise errors when running invalid queries against his database" do
lambda {
@user.db_service.run_pg_query("selectttt * from import_csv_1 where family='Polynoidae' limit 10")
}.should raise_error(CartoDB::ErrorRunningQuery)
end
it "should run valid queries against his database in pg mode" do
reload_user_data(@user) && @user.reload
# initial select tests
# tests results and modified flags
query_result = @user.db_service.run_pg_query("select * from import_csv_1 where family='Polynoidae' limit 10")
query_result[:time].should_not be_blank
query_result[:time].to_s.match(/^\d+\.\d+$/).should be_true
query_result[:total_rows].should == 2
query_result[:rows].first.keys.sort.should == [:cartodb_id, :the_geom, :the_geom_webmercator, :id, :name_of_species, :kingdom, :family, :lat, :lon, :views].sort
query_result[:rows][0][:name_of_species].should == "Barrukia cristata"
query_result[:rows][1][:name_of_species].should == "Eulagisca gigantea"
query_result[:results].should == true
query_result[:modified].should == false
# update and reselect
query_result = @user.db_service.run_pg_query("update import_csv_1 set family='polynoidae' where family='Polynoidae'")
query_result[:modified].should == true
query_result[:results].should == false
query_result = @user.db_service.run_pg_query("select * from import_csv_1 where family='Polynoidae' limit 10")
query_result[:total_rows].should == 0
query_result[:modified].should == false
query_result[:results].should == true
# # check counts
query_result = @user.db_service.run_pg_query("select * from import_csv_1 where family='polynoidae' limit 10")
query_result[:total_rows].should == 2
query_result[:results].should == true
# test a product
query_result = @user.db_service.run_pg_query("select import_csv_1.family as fam, twitters.login as login from import_csv_1, twitters where family='polynoidae' limit 10")
query_result[:total_rows].should == 10
query_result[:rows].first.keys.should == [:fam, :login]
query_result[:rows][0].should == { :fam=>"polynoidae", :login=>"vzlaturistica " }
# test counts
query_result = @user.db_service.run_pg_query("select count(*) from import_csv_1 where family='polynoidae' ")
query_result[:time].should_not be_blank
query_result[:time].to_s.match(/^\d+\.\d+$/).should be_true
query_result[:total_rows].should == 1
query_result[:rows].first.keys.should == [:count]
query_result[:rows][0].should == {:count => 2}
end
it "should raise errors when running invalid queries against his database in pg mode" do
lambda {
@user.db_service.run_pg_query("selectttt * from import_csv_1 where family='Polynoidae' limit 10")
}.should raise_error(CartoDB::ErrorRunningQuery)
end
it "should raise errors when invalid table name used in pg mode" do
lambda {
@user.db_service.run_pg_query("select * from this_table_is_not_here where family='Polynoidae' limit 10")
}.should raise_error(CartoDB::TableNotExists)
end
it "should raise errors when invalid column used in pg mode" do
lambda {
@user.db_service.run_pg_query("select not_a_col from import_csv_1 where family='Polynoidae' limit 10")
}.should raise_error(CartoDB::ColumnNotExists)
end
it "should create a client_application for each user" do
@user.client_application.should_not be_nil
end
it "should reset its client application" do
old_key = @user.client_application.key
@user.reset_client_application!
@user.reload
@user.client_application.key.should_not == old_key
end
it "should return the result from the last select query if multiple selects" do
reload_user_data(@user) && @user.reload
query_result = @user.db_service.run_pg_query("select * from import_csv_1 where family='Polynoidae' limit 1; select * from import_csv_1 where family='Polynoidae' limit 10")
query_result[:time].should_not be_blank
query_result[:time].to_s.match(/^\d+\.\d+$/).should be_true
query_result[:total_rows].should == 2
query_result[:rows][0][:name_of_species].should == "Barrukia cristata"
query_result[:rows][1][:name_of_species].should == "Eulagisca gigantea"
end
it "should allow multiple queries in the format: insert_query; select_query" do
query_result = @user.db_service.run_pg_query("insert into import_csv_1 (name_of_species,family) values ('cristata barrukia','Polynoidae'); select * from import_csv_1 where family='Polynoidae' ORDER BY name_of_species ASC limit 10")
query_result[:total_rows].should == 3
query_result[:rows].map { |i| i[:name_of_species] }.should =~ ["Barrukia cristata", "Eulagisca gigantea", "cristata barrukia"]
end
it "should fail with error if table doesn't exist" do
reload_user_data(@user) && @user.reload
lambda {
@user.db_service.run_pg_query("select * from wadus")
}.should raise_error(CartoDB::TableNotExists)
end
it "should have a method that generates users redis users_metadata key" do
@user.key.should == "rails:users:#{@user.username}"
end
it "replicates some user metadata in redis after saving" do
@user.stubs(:database_name).returns('wadus')
@user.save
$users_metadata.HGET(@user.key, 'id').should == @user.id.to_s
$users_metadata.HGET(@user.key, 'database_name').should == 'wadus'
$users_metadata.HGET(@user.key, 'database_password').should == @user.database_password
$users_metadata.HGET(@user.key, 'database_host').should == @user.database_host
$users_metadata.HGET(@user.key, 'map_key').should == @user.api_key
end
it "should store its metadata automatically after creation" do
user = FactoryGirl.create :user
$users_metadata.HGET(user.key, 'id').should == user.id.to_s
$users_metadata.HGET(user.key, 'database_name').should == user.database_name
$users_metadata.HGET(user.key, 'database_password').should == user.database_password
$users_metadata.HGET(user.key, 'database_host').should == user.database_host
$users_metadata.HGET(user.key, 'map_key').should == user.api_key
user.destroy
end
it "should have a method that generates users redis limits metadata key" do
@user.timeout_key.should == "limits:timeout:#{@user.username}"
end
it "replicates db timeout limits in redis after saving and applies them to db" do
@user.user_timeout = 200007
@user.database_timeout = 100007
@user.save
$users_metadata.HGET(@user.timeout_key, 'db').should == '200007'
$users_metadata.HGET(@user.timeout_key, 'db_public').should == '100007'
@user.in_database do |db|
db[%{SHOW statement_timeout}].first.should eq({ statement_timeout: '200007ms' })
end
@user.in_database(as: :public_user) do |db|
db[%{SHOW statement_timeout}].first.should eq({ statement_timeout: '100007ms' })
end
end
it "replicates render timeout limits in redis after saving" do
@user.user_render_timeout = 200001
@user.database_render_timeout = 100001
@user.save
$users_metadata.HGET(@user.timeout_key, 'render').should == '200001'
$users_metadata.HGET(@user.timeout_key, 'render_public').should == '100001'
end
it "should store db timeout limits in redis after creation" do
user = FactoryGirl.create :user, user_timeout: 200002, database_timeout: 100002
user.user_timeout.should == 200002
user.database_timeout.should == 100002
$users_metadata.HGET(user.timeout_key, 'db').should == '200002'
$users_metadata.HGET(user.timeout_key, 'db_public').should == '100002'
user.in_database do |db|
db[%{SHOW statement_timeout}].first.should eq({ statement_timeout: '200002ms' })
end
user.in_database(as: :public_user) do |db|
db[%{SHOW statement_timeout}].first.should eq({ statement_timeout: '100002ms' })
end
user.destroy
end
it "should store render timeout limits in redis after creation" do
user = FactoryGirl.create :user, user_render_timeout: 200003, database_render_timeout: 100003
user.reload
user.user_render_timeout.should == 200003
user.database_render_timeout.should == 100003
$users_metadata.HGET(user.timeout_key, 'render').should == '200003'
$users_metadata.HGET(user.timeout_key, 'render_public').should == '100003'
user.destroy
end
it "should have valid non-zero db timeout limits by default" do
user = FactoryGirl.create :user
user.user_timeout.should > 0
user.database_timeout.should > 0
$users_metadata.HGET(user.timeout_key, 'db').should == user.user_timeout.to_s
$users_metadata.HGET(user.timeout_key, 'db_public').should == user.database_timeout.to_s
user.in_database do |db|
result = db[%{SELECT setting FROM pg_settings WHERE name = 'statement_timeout'}]
result.first.should eq(setting: user.user_timeout.to_s)
end
user.in_database(as: :public_user) do |db|
result = db[%{SELECT setting FROM pg_settings WHERE name = 'statement_timeout'}]
result.first.should eq(setting: user.database_timeout.to_s)
end
user.destroy
end
it "should have zero render timeout limits by default" do
user = FactoryGirl.create :user
user.user_render_timeout.should eq 0
user.database_render_timeout.should eq 0
$users_metadata.HGET(user.timeout_key, 'render').should eq '0'
$users_metadata.HGET(user.timeout_key, 'render_public').should eq '0'
user.destroy
end
it "should not regenerate the api_key after saving" do
expect { @user.save }.to_not change { @user.api_key }
end
it "should remove its metadata from redis after deletion" do
doomed_user = create_user :email => 'doomed@example.com', :username => 'doomed', :password => 'doomed123'
$users_metadata.HGET(doomed_user.key, 'id').should == doomed_user.id.to_s
$users_metadata.HGET(doomed_user.timeout_key, 'db').should_not be_nil
$users_metadata.HGET(doomed_user.timeout_key, 'db_public').should_not be_nil
key = doomed_user.key
timeout_key = doomed_user.timeout_key
doomed_user.destroy
$users_metadata.HGET(key, 'id').should be_nil
$users_metadata.HGET(timeout_key, 'db').should be_nil
$users_metadata.HGET(timeout_key, 'db_public').should be_nil
$users_metadata.HGET(timeout_key, 'render').should be_nil
$users_metadata.HGET(timeout_key, 'render_public').should be_nil
end
it "should remove its database and database user after deletion" do
doomed_user = create_user :email => 'doomed1@example.com', :username => 'doomed1', :password => 'doomed123'
create_table :user_id => doomed_user.id, :name => 'My first table', :privacy => UserTable::PRIVACY_PUBLIC
doomed_user.reload
SequelRails.connection["select count(*) from pg_catalog.pg_database where datname = '#{doomed_user.database_name}'"]
.first[:count].should == 1
SequelRails.connection["select count(*) from pg_catalog.pg_user where usename = '#{doomed_user.database_username}'"]
.first[:count].should == 1
doomed_user.destroy
SequelRails.connection["select count(*) from pg_catalog.pg_database where datname = '#{doomed_user.database_name}'"]
.first[:count].should == 0
SequelRails.connection["select count(*) from pg_catalog.pg_user where usename = '#{doomed_user.database_username}'"]
.first[:count].should == 0
end
it "should invalidate its Varnish cache after deletion" do
doomed_user = create_user :email => 'doomed2@example.com', :username => 'doomed2', :password => 'doomed123'
CartoDB::Varnish.any_instance.expects(:purge).with("#{doomed_user.database_name}.*").returns(true)
doomed_user.destroy
end
it "should remove its user tables, layers and data imports after deletion" do
doomed_user = create_user(email: 'doomed2@example.com', username: 'doomed2', password: 'doomed123')
data_import = DataImport.create(user_id: doomed_user.id, data_source: fake_data_path('clubbing.csv')).run_import!
doomed_user.add_layer Layer.create(kind: 'carto')
table_id = data_import.table_id
uuid = UserTable.where(id: table_id).first.table_visualization.id
CartoDB::Varnish.any_instance.expects(:purge)
.with("#{doomed_user.database_name}.*")
.returns(true)
CartoDB::Varnish.any_instance.expects(:purge)
.with(".*#{uuid}:vizjson")
.at_least_once
.returns(true)
doomed_user.destroy
DataImport.where(user_id: doomed_user.id).count.should == 0
UserTable.where(user_id: doomed_user.id).count.should == 0
Layer.db["SELECT * from layers_users WHERE user_id = '#{doomed_user.id}'"].count.should == 0
end
it "should correctly identify last billing cycle" do
user = create_user :email => 'example@example.com', :username => 'example', :password => 'testingbilling'
Delorean.time_travel_to(Date.parse("2013-01-01")) do
user.stubs(:period_end_date).returns(Date.parse("2012-12-15"))
user.last_billing_cycle.should == Date.parse("2012-12-15")
end
Delorean.time_travel_to(Date.parse("2013-01-01")) do
user.stubs(:period_end_date).returns(Date.parse("2012-12-02"))
user.last_billing_cycle.should == Date.parse("2012-12-02")
end
Delorean.time_travel_to(Date.parse("2013-03-01")) do
user.stubs(:period_end_date).returns(Date.parse("2012-12-31"))
user.last_billing_cycle.should == Date.parse("2013-02-28")
end
Delorean.time_travel_to(Date.parse("2013-03-15")) do
user.stubs(:period_end_date).returns(Date.parse("2012-12-02"))
user.last_billing_cycle.should == Date.parse("2013-03-02")
end
user.destroy
Delorean.back_to_the_present
end
it "should calculate the trial end date" do
@user.stubs(:upgraded_at).returns(nil)
@user.trial_ends_at.should be_nil
@user.stubs(:upgraded_at).returns(Time.now - 5.days)
@user.stubs(:account_type).returns('CORONELLI')
@user.trial_ends_at.should be_nil
@user.stubs(:account_type).returns('MAGELLAN')
@user.trial_ends_at.should_not be_nil
@user.stubs(:upgraded_at).returns(nil)
@user.trial_ends_at.should be_nil
@user.stubs(:upgraded_at).returns(Time.now - (::User::TRIAL_DURATION_DAYS - 1).days)
@user.trial_ends_at.should_not be_nil
end
describe '#hard_geocoding_limit?' do
it 'returns true when the plan is AMBASSADOR or FREE unless it has been manually set to false' do
@user[:soft_geocoding_limit].should be_nil
@user.stubs(:account_type).returns('AMBASSADOR')
@user.soft_geocoding_limit?.should be_false
@user.soft_geocoding_limit.should be_false
@user.hard_geocoding_limit?.should be_true
@user.hard_geocoding_limit.should be_true
@user.stubs(:account_type).returns('FREE')
@user.soft_geocoding_limit?.should be_false
@user.soft_geocoding_limit.should be_false
@user.hard_geocoding_limit?.should be_true
@user.hard_geocoding_limit.should be_true
@user.hard_geocoding_limit = false
@user[:soft_geocoding_limit].should_not be_nil
@user.stubs(:account_type).returns('AMBASSADOR')
@user.soft_geocoding_limit?.should be_true
@user.soft_geocoding_limit.should be_true
@user.hard_geocoding_limit?.should be_false
@user.hard_geocoding_limit.should be_false
@user.stubs(:account_type).returns('FREE')
@user.soft_geocoding_limit?.should be_true
@user.soft_geocoding_limit.should be_true
@user.hard_geocoding_limit?.should be_false
@user.hard_geocoding_limit.should be_false
end
it 'returns true when for enterprise accounts unless it has been manually set to false' do
['ENTERPRISE', 'ENTERPRISE LUMP-SUM', 'Enterprise Medium Lumpsum AWS'].each do |account_type|
@user.stubs(:account_type).returns(account_type)
@user.soft_geocoding_limit = nil
@user.soft_geocoding_limit?.should be_false
@user.soft_geocoding_limit.should be_false
@user.hard_geocoding_limit?.should be_true
@user.hard_geocoding_limit.should be_true
@user.soft_geocoding_limit = true
@user.soft_geocoding_limit?.should be_true
@user.soft_geocoding_limit.should be_true
@user.hard_geocoding_limit?.should be_false
@user.hard_geocoding_limit.should be_false
end
end
it 'returns false when the plan is CORONELLI or MERCATOR unless it has been manually set to true' do
@user.stubs(:account_type).returns('CORONELLI')
@user.hard_geocoding_limit?.should be_false
@user.stubs(:account_type).returns('MERCATOR')
@user.hard_geocoding_limit?.should be_false
@user.hard_geocoding_limit = true
@user.stubs(:account_type).returns('CORONELLI')
@user.hard_geocoding_limit?.should be_true
@user.stubs(:account_type).returns('MERCATOR')
@user.hard_geocoding_limit?.should be_true
end
end
describe '#hard_here_isolines_limit?' do
before(:each) do
@user_account = create_user
end
it 'returns true with every plan unless it has been manually set to false' do
@user_account[:soft_here_isolines_limit].should be_nil
@user_account.stubs(:account_type).returns('AMBASSADOR')
@user_account.soft_here_isolines_limit?.should be_false
@user_account.soft_here_isolines_limit.should be_false
@user_account.hard_here_isolines_limit?.should be_true
@user_account.hard_here_isolines_limit.should be_true
@user_account.stubs(:account_type).returns('FREE')
@user_account.soft_here_isolines_limit?.should be_false
@user_account.soft_here_isolines_limit.should be_false
@user_account.hard_here_isolines_limit?.should be_true
@user_account.hard_here_isolines_limit.should be_true
@user_account.hard_here_isolines_limit = false
@user_account[:soft_here_isolines_limit].should_not be_nil
@user_account.stubs(:account_type).returns('AMBASSADOR')
@user_account.soft_here_isolines_limit?.should be_true
@user_account.soft_here_isolines_limit.should be_true
@user_account.hard_here_isolines_limit?.should be_false
@user_account.hard_here_isolines_limit.should be_false
@user_account.stubs(:account_type).returns('FREE')
@user_account.soft_here_isolines_limit?.should be_true
@user_account.soft_here_isolines_limit.should be_true
@user_account.hard_here_isolines_limit?.should be_false
@user_account.hard_here_isolines_limit.should be_false
end
end
describe '#hard_obs_snapshot_limit?' do
before(:each) do
@user_account = create_user
end
it 'returns true with every plan unless it has been manually set to false' do
@user_account[:soft_obs_snapshot_limit].should be_nil
@user_account.stubs(:account_type).returns('AMBASSADOR')
@user_account.soft_obs_snapshot_limit?.should be_false
@user_account.soft_obs_snapshot_limit.should be_false
@user_account.hard_obs_snapshot_limit?.should be_true
@user_account.hard_obs_snapshot_limit.should be_true
@user_account.stubs(:account_type).returns('FREE')
@user_account.soft_obs_snapshot_limit?.should be_false
@user_account.soft_obs_snapshot_limit.should be_false
@user_account.hard_obs_snapshot_limit?.should be_true
@user_account.hard_obs_snapshot_limit.should be_true
@user_account.hard_obs_snapshot_limit = false
@user_account[:soft_obs_snapshot_limit].should_not be_nil
@user_account.stubs(:account_type).returns('AMBASSADOR')
@user_account.soft_obs_snapshot_limit?.should be_true
@user_account.soft_obs_snapshot_limit.should be_true
@user_account.hard_obs_snapshot_limit?.should be_false
@user_account.hard_obs_snapshot_limit.should be_false
@user_account.stubs(:account_type).returns('FREE')
@user_account.soft_obs_snapshot_limit?.should be_true
@user_account.soft_obs_snapshot_limit.should be_true
@user_account.hard_obs_snapshot_limit?.should be_false
@user_account.hard_obs_snapshot_limit.should be_false
end
end
describe '#hard_obs_general_limit?' do
before(:each) do
@user_account = create_user
end
it 'returns true with every plan unless it has been manually set to false' do
@user_account[:soft_obs_general_limit].should be_nil
@user_account.stubs(:account_type).returns('AMBASSADOR')
@user_account.soft_obs_general_limit?.should be_false
@user_account.soft_obs_general_limit.should be_false
@user_account.hard_obs_general_limit?.should be_true
@user_account.hard_obs_general_limit.should be_true
@user_account.stubs(:account_type).returns('FREE')
@user_account.soft_obs_general_limit?.should be_false
@user_account.soft_obs_general_limit.should be_false
@user_account.hard_obs_general_limit?.should be_true
@user_account.hard_obs_general_limit.should be_true
@user_account.hard_obs_general_limit = false
@user_account[:soft_obs_general_limit].should_not be_nil
@user_account.stubs(:account_type).returns('AMBASSADOR')
@user_account.soft_obs_general_limit?.should be_true
@user_account.soft_obs_general_limit.should be_true
@user_account.hard_obs_general_limit?.should be_false
@user_account.hard_obs_general_limit.should be_false
@user_account.stubs(:account_type).returns('FREE')
@user_account.soft_obs_general_limit?.should be_true
@user_account.soft_obs_general_limit.should be_true
@user_account.hard_obs_general_limit?.should be_false
@user_account.hard_obs_general_limit.should be_false
end
end
describe '#shared_tables' do
it 'Checks that shared tables include not only owned ones' do
require_relative '../../app/models/visualization/collection'
CartoDB::Varnish.any_instance.stubs(:send_command).returns(true)
bypass_named_maps
# No need to really touch the DB for the permissions
Table::any_instance.stubs(:add_read_permission).returns(nil)
# We're leaking tables from some tests, make sure there are no tables
@user.tables.all.each { |t| t.destroy }
@user2.tables.all.each { |t| t.destroy }
table = Table.new
table.user_id = @user.id
table.save.reload
table2 = Table.new
table2.user_id = @user.id
table2.save.reload
table3 = Table.new
table3.user_id = @user2.id
table3.name = 'sharedtable'
table3.save.reload
table4 = Table.new
table4.user_id = @user2.id
table4.name = 'table4'
table4.save.reload
# Only owned tables
user_tables = tables_including_shared(@user)
user_tables.count.should eq 2
# Grant permission
user2_vis = CartoDB::Visualization::Collection.new.fetch(user_id: @user2.id, name: table3.name).first
permission = user2_vis.permission
permission.acl = [
{
type: CartoDB::Permission::TYPE_USER,
entity: {
id: @user.id,
username: @user.username
},
access: CartoDB::Permission::ACCESS_READONLY
}
]
permission.save
# Now owned + shared...
user_tables = tables_including_shared(@user)
user_tables.count.should eq 3
contains_shared_table = false
user_tables.each{ |item|
contains_shared_table ||= item.id == table3.id
}
contains_shared_table.should eq true
contains_shared_table = false
user_tables.each{ |item|
contains_shared_table ||= item.id == table4.id
}
contains_shared_table.should eq false
@user.tables.all.each { |t| t.destroy }
@user2.tables.all.each { |t| t.destroy }
end
end
describe '#destroy' do
it 'deletes database role' do
u1 = create_user(email: 'ddr@example.com', username: 'ddr', password: 'admin123')
role = u1.database_username
db = u1.in_database
db_service = u1.db_service
db_service.role_exists?(db, role).should == true
u1.destroy
expect do
db_service.role_exists?(db, role).should == false
end.to raise_error(/role "#{role}" does not exist/)
db.disconnect
end
it 'deletes api keys' do
user = create_user(email: 'ddr@example.com', username: 'ddr', password: 'admin123')
api_key = FactoryGirl.create(:api_key_apis, user_id: user.id)
user.destroy
expect(Carto::ApiKey.exists?(api_key.id)).to be_false
expect($users_metadata.exists(api_key.send(:redis_key))).to be_false
end
describe "on organizations" do
include_context 'organization with users helper'
it 'deletes database role' do
role = @org_user_1.database_username
db = @org_user_1.in_database
db_service = @org_user_1.db_service
db_service.role_exists?(db, role).should == true
@org_user_1.destroy
expect do
db_service.role_exists?(db, role).should == false
end.to raise_error(/role "#{role}" does not exist/)
db.disconnect
end
it 'deletes temporary analysis tables' do
db = @org_user_2.in_database
db.run('CREATE TABLE analysis_cd60938c7b_2ad1345b134ed3cd363c6de651283be9bd65094e (a int)')
db.run(%{INSERT INTO cdb_analysis_catalog (username, cache_tables, node_id, analysis_def)
VALUES ('#{@org_user_2.username}', '{analysis_cd60938c7b_2ad1345b134ed3cd363c6de651283be9bd65094e}', 'a0', '{}')})
@org_user_2.destroy
db = @org_user_owner.in_database
db["SELECT COUNT(*) FROM cdb_analysis_catalog WHERE username='#{@org_user_2.username}'"].first[:count].should eq 0
end
describe 'User#destroy' do
include TableSharing
it 'blocks deletion with shared entities' do
@not_to_be_deleted = TestUserFactory.new.create_test_user(unique_name('user'), @organization)
table = create_random_table(@not_to_be_deleted)
share_table_with_user(table, @org_user_owner)
expect { @not_to_be_deleted.destroy }.to raise_error(/Cannot delete user, has shared entities/)
::User[@not_to_be_deleted.id].should be
end
it 'deletes api keys and associated roles' do
user = TestUserFactory.new.create_test_user(unique_name('user'), @organization)
api_key = FactoryGirl.create(:api_key_apis, user_id: user.id)
user.destroy
expect(Carto::ApiKey.exists?(api_key.id)).to be_false
expect($users_metadata.exists(api_key.send(:redis_key))).to be_false
expect(
@org_user_owner.in_database["SELECT 1 FROM pg_roles WHERE rolname = '#{api_key.db_role}'"].first
).to be_nil
end
end
end
end
describe 'User#destroy_cascade' do
include_context 'organization with users helper'
include TableSharing
it 'allows deletion even with shared entities' do
table = create_random_table(@org_user_1)
share_table_with_user(table, @org_user_1)
@org_user_1.destroy_cascade
::User[@org_user_1.id].should_not be
end
end
describe '#destroy_restrictions' do
it 'Checks some scenarios upon user destruction regarding organizations' do
u1 = create_user(email: 'u1@example.com', username: 'u1', password: 'admin123')
u2 = create_user(email: 'u2@example.com', username: 'u2', password: 'admin123')
org = create_org('cartodb', 1234567890, 5)
u1.organization = org
u1.save
u1.reload
u1.organization.nil?.should eq false
org = u1.organization
org.owner_id = u1.id
org.save
u1.reload
u1.organization.owner.id.should eq u1.id
u2.organization = org
u2.save
u2.reload
u2.organization.nil?.should eq false
u2.reload
# Cannot remove as more users depend on the org
expect {
u1.destroy
}.to raise_exception CartoDB::BaseCartoDBError
org.destroy
end
end
describe '#cartodb_postgresql_extension_versioning' do
it 'should report pre multi user for known <0.3.0 versions' do
before_mu_known_versions = %w(0.1.0 0.1.1 0.2.0 0.2.1)
before_mu_known_versions.each { |version|
stub_and_check_version_pre_mu(version, true)
}
end
it 'should report post multi user for >=0.3.0 versions' do
after_mu_known_versions = %w(0.3.0 0.3.1 0.3.2 0.3.3 0.3.4 0.3.5 0.4.0 0.5.5 0.10.0)
after_mu_known_versions.each { |version|
stub_and_check_version_pre_mu(version, false)
}
end
it 'should report post multi user for versions with minor<3 but major>0' do
minor_version_edge_cases = %w(1.0.0 1.0.1 1.2.0 1.2.1 1.3.0 1.4.4)
minor_version_edge_cases.each { |version|
stub_and_check_version_pre_mu(version, false)
}
end
it 'should report correct version with old version strings' do
before_mu_old_known_versions = [
'0.1.0 0.1.0',
'0.1.1 0.1.1',
'0.2.0 0.2.0',
'0.2.1 0.2.1'
]
before_mu_old_known_versions.each { |version|
stub_and_check_version_pre_mu(version, true)
}
end
it 'should report correct version with old version strings' do
after_mu_old_known_versions = [
'0.3.0 0.3.0',
'0.3.1 0.3.1',
'0.3.2 0.3.2',
'0.3.3 0.3.3',
'0.3.4 0.3.4',
'0.3.5 0.3.5',
'0.4.0 0.4.0',
'0.5.5 0.5.5',
'0.10.0 0.10.0'
]
after_mu_old_known_versions.each { |version|
stub_and_check_version_pre_mu(version, false)
}
end
it 'should report correct version with `git describe` not being a tag' do
stub_and_check_version_pre_mu('0.2.1 0.2.0-8-g7840e7c', true)
after_mu_old_known_versions = [
'0.3.6 0.3.5-8-g7840e7c',
'0.4.0 0.3.6-8-g7840e7c'
]
after_mu_old_known_versions.each { |version|
stub_and_check_version_pre_mu(version, false)
}
end
def stub_and_check_version_pre_mu(version, is_pre_mu)
@user.db_service.stubs(:cartodb_extension_version).returns(version)
@user.db_service.cartodb_extension_version_pre_mu?.should eq is_pre_mu
end
end
# INFO: since user can be also created in Central, and it can fail, we need to request notification explicitly. See #3022 for more info
it "can notify a new user creation" do
::Resque.stubs(:enqueue).returns(nil)
organization = create_organization_with_owner(quota_in_bytes: 1000.megabytes)
user1 = new_user(:username => 'test', :email => "client@example.com", :organization => organization, :organization_id => organization.id, :quota_in_bytes => 20.megabytes)
user1.id = UUIDTools::UUID.timestamp_create.to_s
::Resque.expects(:enqueue).with(::Resque::UserJobs::Mail::NewOrganizationUser, user1.id).once
user1.save
# INFO: if user must be synched with a remote server it should happen before notifying
user1.notify_new_organization_user
organization.destroy
end
it "Tests password change" do
new_valid_password = '123456'
old_crypted_password = @user.crypted_password
@user.change_password('aaabbb', new_valid_password, new_valid_password)
@user.valid?.should eq false
@user.errors.fetch(:old_password).nil?.should eq false
expect {
@user.save(raise_on_failure: true)
}.to raise_exception(Sequel::ValidationFailed, "old_password Old password not valid") # "to_s" of validation msg
@user.change_password(@user_password, 'aaabbb', 'bbbaaa')
@user.valid?.should eq false
@user.errors.fetch(:new_password).nil?.should eq false
expect {
@user.save(raise_on_failure: true)
}.to raise_exception(Sequel::ValidationFailed, "new_password New password doesn't match confirmation")
@user.change_password('aaaaaa', 'aaabbb', 'bbbaaa')
@user.valid?.should eq false
@user.errors.fetch(:old_password).nil?.should eq false
@user.errors.fetch(:new_password).nil?.should eq false
expect {
@user.save(raise_on_failure: true)
}.to raise_exception(Sequel::ValidationFailed, "old_password Old password not valid, new_password New password doesn't match confirmation")
@user.change_password(@user_password, 'tiny', 'tiny')
@user.valid?.should eq false
@user.errors.fetch(:new_password).nil?.should eq false
expect {
@user.save(raise_on_failure: true)
}.to raise_exception(Sequel::ValidationFailed, "new_password Must be at least 6 characters long")
long_password = 'long' * 20
@user.change_password(@user_password, long_password, long_password)
@user.valid?.should eq false
@user.errors.fetch(:new_password).nil?.should eq false
expect {
@user.save(raise_on_failure: true)
}.to raise_exception(Sequel::ValidationFailed, "new_password Must be at most 64 characters long")
@user.change_password('aaaaaa', nil, nil)
@user.valid?.should eq false
@user.errors.fetch(:old_password).nil?.should eq false
expect {
@user.save(raise_on_failure: true)
}.to raise_exception(Sequel::ValidationFailed, "old_password Old password not valid, new_password New password can't be blank")
@user.change_password(@user_password, nil, nil)
@user.valid?.should eq false
@user.errors.fetch(:new_password).nil?.should eq false
expect {
@user.save(raise_on_failure: true)
}.to raise_exception(Sequel::ValidationFailed, "new_password New password can't be blank")
@user.change_password(nil, nil, nil)
@user.valid?.should eq false
@user.errors.fetch(:old_password).nil?.should eq false
expect {
@user.save(raise_on_failure: true)
}.to raise_exception(Sequel::ValidationFailed, "old_password Old password not valid, new_password New password can't be blank")
@user.change_password(nil, new_valid_password, new_valid_password)
@user.valid?.should eq false
@user.errors.fetch(:old_password).nil?.should eq false
expect {
@user.save(raise_on_failure: true)
}.to raise_exception(Sequel::ValidationFailed, "old_password Old password not valid")
@user.change_password(@user_password, new_valid_password, new_valid_password)
@user.valid?.should eq true
@user.save
new_crypted_password = @user.crypted_password
(old_crypted_password != new_crypted_password).should eq true
@user.change_password(new_valid_password, @user_password, @user_password)
@user.valid?.should eq true
@user.save
@user.crypted_password.should eq old_crypted_password
end
describe "when user is signed up with google sign-in and don't have any password yet" do
before(:each) do
@user.google_sign_in = true
@user.last_password_change_date = nil
@user.save
@user.needs_password_confirmation?.should == false
new_valid_password = '123456'
@user.change_password("doesn't matter in this case", new_valid_password, new_valid_password)
@user.needs_password_confirmation?.should == true
end
it 'should allow updating password w/o a current password' do
@user.valid?.should eq true
@user.save
end
it 'should have updated last password change date' do
@user.last_password_change_date.should_not eq nil
@user.save
end
end
describe "#purge_redis_vizjson_cache" do
it "shall iterate on the user's visualizations and purge their redis cache" do
# Create a few tables with their default vizs
(1..3).each do |i|
t = Table.new
t.user_id = @user.id
t.save
end
collection = CartoDB::Visualization::Collection.new.fetch({user_id: @user.id})
redis_spy = RedisDoubles::RedisSpy.new
redis_vizjson_cache = CartoDB::Visualization::RedisVizjsonCache.new()
redis_embed_cache = EmbedRedisCache.new()
CartoDB::Visualization::RedisVizjsonCache.any_instance.stubs(:redis).returns(redis_spy)
EmbedRedisCache.any_instance.stubs(:redis).returns(redis_spy)
redis_vizjson_keys = collection.map { |v|
[
redis_vizjson_cache.key(v.id, false), redis_vizjson_cache.key(v.id, true),
redis_vizjson_cache.key(v.id, false, 3), redis_vizjson_cache.key(v.id, true, 3),
redis_vizjson_cache.key(v.id, false, '3n'), redis_vizjson_cache.key(v.id, true, '3n'),
redis_vizjson_cache.key(v.id, false, '3a'), redis_vizjson_cache.key(v.id, true, '3a'),
]
}.flatten
redis_vizjson_keys.should_not be_empty
redis_embed_keys = collection.map { |v|
[redis_embed_cache.key(v.id, false), redis_embed_cache.key(v.id, true)]
}.flatten
redis_embed_keys.should_not be_empty
@user.purge_redis_vizjson_cache
redis_spy.deleted.should include(*redis_vizjson_keys)
redis_spy.deleted.should include(*redis_embed_keys)
redis_spy.deleted.count.should eq redis_vizjson_keys.count + redis_embed_keys.count
redis_spy.invokes(:del).count.should eq 2
redis_spy.invokes(:del).map(&:sort).should include(redis_vizjson_keys.sort)
redis_spy.invokes(:del).map(&:sort).should include(redis_embed_keys.sort)
end
it "shall not fail if the user does not have visualizations" do
user = create_user
collection = CartoDB::Visualization::Collection.new.fetch({user_id: user.id})
# 'http' keys
redis_keys = collection.map(&:redis_vizjson_key)
redis_keys.should be_empty
# 'https' keys
redis_keys = collection.map { |item| item.redis_vizjson_key(true) }
redis_keys.should be_empty
CartoDB::Visualization::Member.expects(:redis_cache).never
user.purge_redis_vizjson_cache
user.destroy
end
end
describe "#regressions" do
it "Tests geocodings and data import FK not breaking user destruction" do
user = create_user
user_id = user.id
data_import_id = '11111111-1111-1111-1111-111111111111'
SequelRails.connection.run(%Q{
INSERT INTO data_imports("data_source","data_type","table_name","state","success","logger","updated_at",
"created_at","tables_created_count",
"table_names","append","id","table_id","user_id",
"service_name","service_item_id","stats","type_guessing","quoted_fields_guessing","content_guessing","server","host",
"resque_ppid","upload_host","create_visualization","user_defined_limits")
VALUES('test','url','test','complete','t','11111111-1111-1111-1111-111111111112',
'2015-03-17 00:00:00.94006+00','2015-03-17 00:00:00.810581+00','1',
'test','f','#{data_import_id}','11111111-1111-1111-1111-111111111113',
'#{user_id}','public_url', 'test',
'[{"type":".csv","size":5015}]','t','f','t','test','0.0.0.0','13204','test','f','{"twitter_credits_limit":0}');
})
SequelRails.connection.run(%Q{
INSERT INTO geocodings("table_name","processed_rows","created_at","updated_at","formatter","state",
"id","user_id",
"cache_hits","kind","geometry_type","processable_rows","real_rows","used_credits",
"data_import_id"
) VALUES('importer_123456','197','2015-03-17 00:00:00.279934+00','2015-03-17 00:00:00.536383+00','field_1','finished',
'11111111-1111-1111-1111-111111111114','#{user_id}','0','admin0','polygon','195','0','0',
'#{data_import_id}');
})
user.destroy
::User.find(id:user_id).should eq nil
end
end
describe '#needs_password_confirmation?' do
it 'is true for a normal user' do
user = FactoryGirl.build(:carto_user, :google_sign_in => nil)
user.needs_password_confirmation?.should == true
user = FactoryGirl.build(:user, :google_sign_in => false)
user.needs_password_confirmation?.should == true
end
it 'is false for users that signed in with Google' do
user = FactoryGirl.build(:user, :google_sign_in => true)
user.needs_password_confirmation?.should == false
end
it 'is true for users that signed in with Google but changed the password' do
user = FactoryGirl.build(:user, :google_sign_in => true, :last_password_change_date => Time.now)
user.needs_password_confirmation?.should == true
end
it 'is false for users that were created with http authentication' do
user = FactoryGirl.build(:valid_user, last_password_change_date: nil)
Carto::UserCreation.stubs(:http_authentication).returns(stub(find_by_user_id: FactoryGirl.build(:user_creation)))
user.needs_password_confirmation?.should == false
end
end
describe 'User creation and DB critical calls' do
it 'Properly setups a new user (not belonging to an organization)' do
CartoDB::UserModule::DBService.any_instance.stubs(
cartodb_extension_version_pre_mu?: nil,
monitor_user_notification: nil,
enable_remote_db_user: nil
)
user_timeout_secs = 666
user = ::User.new
user.username = unique_name('user')
user.email = unique_email
user.password = user.email.split('@').first
user.password_confirmation = user.password
user.admin = false
user.private_tables_enabled = true
user.private_maps_enabled = true
user.enabled = true
user.table_quota = 500
user.quota_in_bytes = 1234567890
user.user_timeout = user_timeout_secs * 1000
user.database_timeout = 123000
user.geocoding_quota = 1000
user.geocoding_block_price = 1500
user.sync_tables_enabled = false
user.organization = nil
user.twitter_datasource_enabled = false
user.avatar_url = user.default_avatar
user.valid?.should == true
user.save
user.nil?.should == false
# To avoid connection pool caching
CartoDB::UserModule::DBService.terminate_database_connections(user.database_name, user.database_host)
user.reload
# Just to be sure all following checks will not falsely report ok using wrong schema
user.database_schema.should eq CartoDB::UserModule::DBService::SCHEMA_PUBLIC
user.database_schema.should_not eq user.username
test_table_name = "table_perm_test"
# Safety check
user.in_database.fetch(%{
SELECT * FROM pg_extension WHERE extname='postgis';
}).first.nil?.should == false
# Replicate functionality inside ::UserModule::DBService.configure_database
# -------------------------------------------------------------------
user.in_database.fetch(%{
SHOW search_path;
}).first[:search_path].should == user.db_service.build_search_path(user.database_schema, false)
# @see http://www.postgresql.org/docs/current/static/functions-info.html#FUNCTIONS-INFO-ACCESS-TABLE
user.in_database(as: :superuser).fetch(%{
SELECT * FROM has_database_privilege('#{user.database_username}', '#{user.database_name}', 'CONNECT');
}).first[:has_database_privilege].should == true
# Careful as PG formatter timeout output changes to XXmin if too big
user.in_database.fetch(%{
SHOW statement_timeout;
}).first[:statement_timeout].should eq "#{user_timeout_secs}s"
# No check for `set_user_as_organization_member` as cartodb-postgresql already tests it
# Checks for "grant_read_on_schema_queries(SCHEMA_CARTODB, db_user)"
user.in_database(as: :superuser).fetch(%{
SELECT * FROM has_schema_privilege('#{user.database_username}',
'#{CartoDB::UserModule::DBService::SCHEMA_CARTODB}', 'USAGE');
}).first[:has_schema_privilege].should == true
user.in_database(as: :superuser).fetch(%{
SELECT * FROM has_function_privilege('#{user.database_username}',
'#{user.database_schema}._cdb_userquotainbytes()', 'EXECUTE');
}).first[:has_function_privilege].should == true
# SCHEMA_CARTODB has no tables to select from, except CDB_CONF on which has no permission
user.in_database(as: :superuser).fetch(%{
SELECT * FROM has_table_privilege('#{user.database_username}',
'cartodb.CDB_CONF',
'SELECT, INSERT, UPDATE, DELETE, TRUNCATE, REFERENCES, TRIGGER');
}).first[:has_table_privilege].should == false
# Checks on SCHEMA_PUBLIC
user.in_database(as: :superuser).fetch(%{
SELECT * FROM has_schema_privilege('#{user.database_username}',
'#{CartoDB::UserModule::DBService::SCHEMA_PUBLIC}', 'USAGE');
}).first[:has_schema_privilege].should == true
user.in_database(as: :superuser).fetch(%{
SELECT * FROM has_table_privilege('#{user.database_username}',
'#{CartoDB::UserModule::DBService::SCHEMA_PUBLIC}.spatial_ref_sys', 'SELECT');
}).first[:has_table_privilege].should == true
user.in_database(as: :superuser).fetch(%{
SELECT * FROM has_function_privilege('#{user.database_username}',
'#{CartoDB::UserModule::DBService::SCHEMA_PUBLIC}._postgis_stats(regclass, text, text)',
'EXECUTE');
}).first[:has_function_privilege].should == true
# Checks on own schema
user.in_database(as: :superuser).fetch(%{
SELECT * FROM has_schema_privilege('#{user.database_username}',
'#{user.database_schema}', 'CREATE, USAGE');
}).first[:has_schema_privilege].should == true
user.in_database.run(%{
CREATE TABLE #{test_table_name}(x int);
})
user.in_database(as: :superuser).fetch(%{
SELECT * FROM has_table_privilege('#{user.database_username}',
'#{user.database_schema}.#{test_table_name}', 'SELECT');
}).first[:has_table_privilege].should == true
# _cdb_userquotainbytes is always created on the user schema
user.in_database(as: :superuser).fetch(%{
SELECT * FROM has_function_privilege('#{user.database_username}',
'#{user.database_schema}._cdb_userquotainbytes()', 'EXECUTE');
}).first[:has_function_privilege].should == true
# Checks on non-org "owned" schemas
user.in_database(as: :superuser).fetch(%{
SELECT * FROM has_schema_privilege('#{user.database_username}',
'#{CartoDB::UserModule::DBService::SCHEMA_IMPORTER}', 'CREATE, USAGE');
}).first[:has_schema_privilege].should == true
user.in_database(as: :superuser).fetch(%{
SELECT * FROM has_schema_privilege('#{user.database_username}',
'#{CartoDB::UserModule::DBService::SCHEMA_GEOCODING}', 'CREATE, USAGE');
}).first[:has_schema_privilege].should == true
# Special raster and geo columns
user.in_database(as: :superuser).fetch(%{
SELECT * FROM has_table_privilege('#{user.database_username}',
'#{CartoDB::UserModule::DBService::SCHEMA_PUBLIC}.geometry_columns', 'SELECT');
}).first[:has_table_privilege].should == true
user.in_database(as: :superuser).fetch(%{
SELECT * FROM has_table_privilege('#{user.database_username}',
'#{CartoDB::UserModule::DBService::SCHEMA_PUBLIC}.geography_columns', 'SELECT');
}).first[:has_table_privilege].should == true
user.in_database(as: :superuser).fetch(%{
SELECT * FROM has_table_privilege('#{CartoDB::PUBLIC_DB_USER}',
'#{CartoDB::UserModule::DBService::SCHEMA_PUBLIC}.raster_overviews', 'SELECT');
}).first[:has_table_privilege].should == true
user.in_database(as: :superuser).fetch(%{
SELECT * FROM has_table_privilege('#{CartoDB::PUBLIC_DB_USER}',
'#{CartoDB::UserModule::DBService::SCHEMA_PUBLIC}.raster_columns', 'SELECT');
}).first[:has_table_privilege].should == true
# quota check
user.in_database(as: :superuser).fetch(%{
SELECT #{user.database_schema}._CDB_UserQuotaInBytes();
}).first[:_cdb_userquotainbytes].nil?.should == false
# Varnish invalidation function
user.in_database(as: :superuser).fetch(%{
SELECT * FROM has_function_privilege(
'#{user.database_username}',
'#{CartoDB::UserModule::DBService::SCHEMA_PUBLIC}.cdb_invalidate_varnish(text)', 'EXECUTE');
}).first[:has_function_privilege].should == true
# Checks of publicuser
user.in_database(as: :superuser).fetch(%{
SELECT * FROM has_schema_privilege('#{CartoDB::PUBLIC_DB_USER}',
'#{user.database_schema}', 'USAGE');
}).first[:has_schema_privilege].should == true
user.in_database(as: :superuser).fetch(%{
SELECT * FROM has_database_privilege('#{CartoDB::PUBLIC_DB_USER}',
'#{user.database_name}', 'CONNECT');
}).first[:has_database_privilege].should == true
user.in_database(as: :superuser).fetch(%{
SELECT * FROM has_schema_privilege('#{CartoDB::PUBLIC_DB_USER}',
'#{CartoDB::UserModule::DBService::SCHEMA_CARTODB}', 'USAGE');
}).first[:has_schema_privilege].should == true
user.in_database(as: :superuser).fetch(%{
SELECT * FROM has_function_privilege(
'#{CartoDB::PUBLIC_DB_USER}',
'#{CartoDB::UserModule::DBService::SCHEMA_CARTODB}.CDB_LatLng (NUMERIC, NUMERIC)',
'EXECUTE');
}).first[:has_function_privilege].should == true
user.in_database(as: :superuser).fetch(%{
SELECT * FROM has_table_privilege('#{CartoDB::PUBLIC_DB_USER}',
'#{CartoDB::UserModule::DBService::SCHEMA_CARTODB}.CDB_CONF',
'SELECT, INSERT, UPDATE, DELETE, TRUNCATE, REFERENCES, TRIGGER');
}).first[:has_table_privilege].should == false
# Additional public user grants/revokes
user.in_database(as: :superuser).fetch(%{
SELECT * FROM has_table_privilege('#{CartoDB::PUBLIC_DB_USER}',
'#{CartoDB::UserModule::DBService::SCHEMA_CARTODB}.cdb_tablemetadata',
'SELECT');
}).first[:has_table_privilege].should == false
user.in_database(as: :superuser).fetch(%{
SELECT * FROM has_schema_privilege('#{CartoDB::PUBLIC_DB_USER}',
'#{CartoDB::UserModule::DBService::SCHEMA_PUBLIC}', 'USAGE');
}).first[:has_schema_privilege].should == true
user.in_database(as: :superuser).fetch(%{
SELECT * FROM has_function_privilege(
'#{CartoDB::PUBLIC_DB_USER}',
'#{CartoDB::UserModule::DBService::SCHEMA_PUBLIC}._postgis_stats(regclass, text, text)',
'EXECUTE');
}).first[:has_function_privilege].should == true
user.in_database(as: :superuser).fetch(%{
SELECT * FROM has_table_privilege('#{CartoDB::PUBLIC_DB_USER}',
'#{CartoDB::UserModule::DBService::SCHEMA_PUBLIC}.spatial_ref_sys', 'SELECT');
}).first[:has_table_privilege].should == true
user.destroy
end
it 'Properly setups a new organization user' do
CartoDB::UserModule::DBService.any_instance.stubs(
cartodb_extension_version_pre_mu?: nil,
monitor_user_notification: nil,
enable_remote_db_user: nil
)
disk_quota = 1234567890
user_timeout_secs = 666
max_import_file_size = 6666666666
max_import_table_row_count = 55555555
max_concurrent_import_count = 44
max_layers = 11
# create an owner
organization = create_org('org-user-creation-db-checks-organization', disk_quota * 10, 10)
user1 = create_user email: 'user1@whatever.com', username: 'creation-db-checks-org-owner', password: 'user11'
user1.organization = organization
user1.max_import_file_size = max_import_file_size
user1.max_import_table_row_count = max_import_table_row_count
user1.max_concurrent_import_count = max_concurrent_import_count
user1.max_layers = 11
user1.save
organization.owner_id = user1.id
organization.save
organization.reload
user1.reload
user = ::User.new
user.username = unique_name('user')
user.email = unique_email
user.password = user.email.split('@').first
user.password_confirmation = user.password
user.admin = false
user.private_tables_enabled = true
user.private_maps_enabled = true
user.enabled = true
user.table_quota = 500
user.quota_in_bytes = disk_quota
user.user_timeout = user_timeout_secs * 1000
user.database_timeout = 123000
user.geocoding_quota = 1000
user.geocoding_block_price = 1500
user.sync_tables_enabled = false
user.organization = organization
user.twitter_datasource_enabled = false
user.avatar_url = user.default_avatar
user.valid?.should == true
user.save
user.nil?.should == false
# To avoid connection pool caching
CartoDB::UserModule::DBService.terminate_database_connections(user.database_name, user.database_host)
user.reload
user.max_import_file_size.should eq max_import_file_size
user.max_import_table_row_count.should eq max_import_table_row_count
user.max_concurrent_import_count.should eq max_concurrent_import_count
user.max_layers.should eq max_layers
# Just to be sure all following checks will not falsely report ok using wrong schema
user.database_schema.should_not eq CartoDB::UserModule::DBService::SCHEMA_PUBLIC
user.database_schema.should eq user.username
test_table_name = "table_perm_test"
# Safety check
user.in_database.fetch(%{
SELECT * FROM pg_extension WHERE extname='postgis';
}).first.nil?.should == false
# Replicate functionality inside ::UserModule::DBService.configure_database
# -------------------------------------------------------------------
user.in_database.fetch(%{
SHOW search_path;
}).first[:search_path].should == user.db_service.build_search_path(user.database_schema, false)
# @see http://www.postgresql.org/docs/current/static/functions-info.html#FUNCTIONS-INFO-ACCESS-TABLE
user.in_database(as: :superuser).fetch(%{
SELECT * FROM has_database_privilege('#{user.database_username}', '#{user.database_name}', 'CONNECT');
}).first[:has_database_privilege].should == true
# Careful as PG formatter timeout output changes to XXmin if too big
user.in_database.fetch(%{
SHOW statement_timeout;
}).first[:statement_timeout].should eq "#{user_timeout_secs}s"
# No check for `set_user_as_organization_member` as cartodb-postgresql already tests it
user.in_database(as: :superuser).fetch(%{
SELECT * FROM has_function_privilege('#{user.database_username}',
'#{user.database_schema}._cdb_userquotainbytes()', 'EXECUTE');
}).first[:has_function_privilege].should == true
# SCHEMA_CARTODB has no tables to select from, except CDB_CONF on which has no permission
user.in_database(as: :superuser).fetch(%{
SELECT * FROM has_table_privilege('#{user.database_username}',
'cartodb.CDB_CONF',
'SELECT, INSERT, UPDATE, DELETE, TRUNCATE, REFERENCES, TRIGGER');
}).first[:has_table_privilege].should == false
# Checks on SCHEMA_PUBLIC
user.in_database(as: :superuser).fetch(%{
SELECT * FROM has_schema_privilege('#{user.database_username}',
'#{CartoDB::UserModule::DBService::SCHEMA_PUBLIC}', 'USAGE');
}).first[:has_schema_privilege].should == true
user.in_database(as: :superuser).fetch(%{
SELECT * FROM has_table_privilege('#{user.database_username}',
'#{CartoDB::UserModule::DBService::SCHEMA_PUBLIC}.spatial_ref_sys', 'SELECT');
}).first[:has_table_privilege].should == true
user.in_database(as: :superuser).fetch(%{
SELECT * FROM has_function_privilege('#{user.database_username}',
'#{CartoDB::UserModule::DBService::SCHEMA_PUBLIC}._postgis_stats(regclass, text, text)',
'EXECUTE');
}).first[:has_function_privilege].should == true
# Checks on own schema
user.in_database(as: :superuser).fetch(%{
SELECT * FROM has_schema_privilege('#{user.database_username}',
'#{user.database_schema}', 'CREATE, USAGE');
}).first[:has_schema_privilege].should == true
user.in_database.run(%{
CREATE TABLE #{test_table_name}(x int);
})
user.in_database(as: :superuser).fetch(%{
SELECT * FROM has_table_privilege('#{user.database_username}',
'#{user.database_schema}.#{test_table_name}', 'SELECT');
}).first[:has_table_privilege].should == true
# _cdb_userquotainbytes is always created on the user schema
user.in_database(as: :superuser).fetch(%{
SELECT * FROM has_function_privilege('#{user.database_username}',
'#{user.database_schema}._cdb_userquotainbytes()', 'EXECUTE');
}).first[:has_function_privilege].should == true
# quota check
user.in_database(as: :superuser).fetch(%{
SELECT #{user.database_schema}._CDB_UserQuotaInBytes();
}).first[:_cdb_userquotainbytes].nil?.should == false
# Varnish invalidation function
user.in_database(as: :superuser).fetch(%{
SELECT * FROM has_function_privilege(
'#{user.database_username}',
'#{CartoDB::UserModule::DBService::SCHEMA_PUBLIC}.cdb_invalidate_varnish(text)', 'EXECUTE');
}).first[:has_function_privilege].should == true
# Checks of publicuser
user.in_database(as: :superuser).fetch(%{
SELECT * FROM has_database_privilege('#{CartoDB::PUBLIC_DB_USER}',
'#{user.database_name}', 'CONNECT');
}).first[:has_database_privilege].should == true
user.in_database(as: :superuser).fetch(%{
SELECT * FROM has_schema_privilege('#{CartoDB::PUBLIC_DB_USER}',
'#{user.database_schema}', 'USAGE');
}).first[:has_schema_privilege].should == true
user.in_database(as: :superuser).fetch(%{
SELECT * FROM has_schema_privilege('#{CartoDB::PUBLIC_DB_USER}',
'#{CartoDB::UserModule::DBService::SCHEMA_CARTODB}', 'USAGE');
}).first[:has_schema_privilege].should == true
user.in_database(as: :superuser).fetch(%{
SELECT * FROM has_function_privilege(
'#{CartoDB::PUBLIC_DB_USER}',
'#{CartoDB::UserModule::DBService::SCHEMA_CARTODB}.CDB_LatLng (NUMERIC, NUMERIC)',
'EXECUTE');
}).first[:has_function_privilege].should == true
user.in_database(as: :superuser).fetch(%{
SELECT * FROM has_table_privilege('#{CartoDB::PUBLIC_DB_USER}',
'#{CartoDB::UserModule::DBService::SCHEMA_CARTODB}.CDB_CONF',
'SELECT, INSERT, UPDATE, DELETE, TRUNCATE, REFERENCES, TRIGGER');
}).first[:has_table_privilege].should == false
# Additional public user grants/revokes
user.in_database(as: :superuser).fetch(%{
SELECT * FROM has_table_privilege('#{CartoDB::PUBLIC_DB_USER}',
'#{CartoDB::UserModule::DBService::SCHEMA_CARTODB}.cdb_tablemetadata',
'SELECT');
}).first[:has_table_privilege].should == false
user.in_database(as: :superuser).fetch(%{
SELECT * FROM has_schema_privilege('#{CartoDB::PUBLIC_DB_USER}',
'#{CartoDB::UserModule::DBService::SCHEMA_PUBLIC}', 'USAGE');
}).first[:has_schema_privilege].should == true
user.in_database(as: :superuser).fetch(%{
SELECT * FROM has_function_privilege(
'#{CartoDB::PUBLIC_DB_USER}',
'#{CartoDB::UserModule::DBService::SCHEMA_PUBLIC}._postgis_stats(regclass, text, text)',
'EXECUTE');
}).first[:has_function_privilege].should == true
user.in_database(as: :superuser).fetch(%{
SELECT * FROM has_table_privilege('#{CartoDB::PUBLIC_DB_USER}',
'#{CartoDB::UserModule::DBService::SCHEMA_PUBLIC}.spatial_ref_sys', 'SELECT');
}).first[:has_table_privilege].should == true
user.in_database.run(%{
DROP TABLE #{user.database_schema}.#{test_table_name};
})
user.destroy
organization.destroy
end
end
describe "Write locking" do
it "detects locking properly" do
@user.db_service.writes_enabled?.should eq true
@user.db_service.disable_writes
@user.db_service.terminate_database_connections
@user.db_service.writes_enabled?.should eq false
@user.db_service.enable_writes
@user.db_service.terminate_database_connections
@user.db_service.writes_enabled?.should eq true
end
it "enables and disables writes in user database" do
@user.db_service.run_pg_query("create table foo_1(a int);")
@user.db_service.disable_writes
@user.db_service.terminate_database_connections
lambda {
@user.db_service.run_pg_query("create table foo_2(a int);")
}.should raise_error(CartoDB::ErrorRunningQuery)
@user.db_service.enable_writes
@user.db_service.terminate_database_connections
@user.db_service.run_pg_query("create table foo_3(a int);")
end
end
describe '#destroy' do
def create_full_data
carto_user = FactoryGirl.create(:carto_user)
user = ::User[carto_user.id]
table = create_table(user_id: carto_user.id, name: 'My first table', privacy: UserTable::PRIVACY_PUBLIC)
canonical_visualization = table.table_visualization
map = FactoryGirl.create(:carto_map_with_layers, user_id: carto_user.id)
carto_visualization = FactoryGirl.create(:carto_visualization, user: carto_user, map: map)
visualization = CartoDB::Visualization::Member.new(id: carto_visualization.id).fetch
# Force ORM to cache layers (to check if they are deleted later)
canonical_visualization.map.layers
visualization.map.layers
user_layer = Layer.create(kind: 'tiled')
user.add_layer(user_layer)
[user, table, [canonical_visualization, visualization], user_layer]
end
def check_deleted_data(user_id, table_id, visualizations, layer_id)
::User[user_id].should be_nil
visualizations.each do |visualization|
Carto::Visualization.exists?(visualization.id).should be_false
visualization.map.layers.each { |layer| Carto::Layer.exists?(layer.id).should be_false }
end
Carto::UserTable.exists?(table_id).should be_false
Carto::Layer.exists?(layer_id).should be_false
end
it 'destroys all related information' do
user, table, visualizations, layer = create_full_data
::User[user.id].destroy
check_deleted_data(user.id, table.id, visualizations, layer.id)
end
it 'destroys all related information, even for viewer users' do
user, table, visualizations, layer = create_full_data
user.viewer = true
user.save
user.reload
user.destroy
check_deleted_data(user.id, table.id, visualizations, layer.id)
end
end
describe '#visualization_count' do
include_context 'organization with users helper'
include TableSharing
it 'filters by type if asked' do
vis = FactoryGirl.create(:carto_visualization, user_id: @org_user_1.id, type: Carto::Visualization::TYPE_DERIVED)
@org_user_1.visualization_count.should eq 1
@org_user_1.visualization_count(type: Carto::Visualization::TYPE_DERIVED).should eq 1
[Carto::Visualization::TYPE_CANONICAL, Carto::Visualization::TYPE_REMOTE].each do |type|
@org_user_1.visualization_count(type: type).should eq 0
end
vis.destroy
end
it 'filters by privacy if asked' do
vis = FactoryGirl.create(:carto_visualization,
user_id: @org_user_1.id,
privacy: Carto::Visualization::PRIVACY_PUBLIC)
@org_user_1.visualization_count.should eq 1
@org_user_1.visualization_count(privacy: Carto::Visualization::PRIVACY_PUBLIC).should eq 1
[
Carto::Visualization::PRIVACY_PRIVATE,
Carto::Visualization::PRIVACY_LINK,
Carto::Visualization::PRIVACY_PROTECTED
].each do |privacy|
@org_user_1.visualization_count(privacy: privacy).should eq 0
end
vis.destroy
end
it 'filters by shared exclusion if asked' do
vis = FactoryGirl.create(:carto_visualization, user_id: @org_user_1.id, type: Carto::Visualization::TYPE_DERIVED)
share_visualization_with_user(vis, @org_user_2)
@org_user_2.visualization_count.should eq 1
@org_user_2.visualization_count(exclude_shared: true).should eq 0
vis.destroy
end
it 'filters by raster exclusion if asked' do
vis = FactoryGirl.create(:carto_visualization, user_id: @org_user_1.id, kind: Carto::Visualization::KIND_RASTER)
@org_user_1.visualization_count.should eq 1
@org_user_1.visualization_count(exclude_raster: true).should eq 0
vis.destroy
end
end
describe 'viewer user' do
after(:each) do
@user.destroy if @user
end
def verify_viewer_quota(user)
user.quota_in_bytes.should eq 0
user.geocoding_quota.should eq 0
user.soft_geocoding_limit.should eq false
user.twitter_datasource_quota.should eq 0
user.soft_twitter_datasource_limit.should eq false
user.here_isolines_quota.should eq 0
user.soft_here_isolines_limit.should eq false
user.obs_snapshot_quota.should eq 0
user.soft_obs_snapshot_limit.should eq false
user.obs_general_quota.should eq 0
user.soft_obs_general_limit.should eq false
end
describe 'creation' do
it 'assigns 0 as quota and no soft limit no matter what is requested' do
@user = create_user email: 'u_v@whatever.com', username: 'viewer', password: 'user11', viewer: true,
geocoding_quota: 10, soft_geocoding_limit: true, twitter_datasource_quota: 100,
soft_twitter_datasource_limit: 10, here_isolines_quota: 10, soft_here_isolines_limit: true,
obs_snapshot_quota: 100, soft_obs_snapshot_limit: true, obs_general_quota: 100,
soft_obs_general_limit: true
verify_viewer_quota(@user)
end
end
describe 'builder -> viewer' do
it 'assigns 0 as quota and no soft limit no matter what is requested' do
@user = create_user email: 'u_v@whatever.com', username: 'builder-to-viewer', password: 'user11', viewer: false,
geocoding_quota: 10, soft_geocoding_limit: true, twitter_datasource_quota: 100,
soft_twitter_datasource_limit: 10, here_isolines_quota: 10, soft_here_isolines_limit: true,
obs_snapshot_quota: 100, soft_obs_snapshot_limit: true, obs_general_quota: 100,
soft_obs_general_limit: true
# Random check, but we can trust create_user
@user.quota_in_bytes.should_not eq 0
@user.viewer = true
@user.save
@user.reload
verify_viewer_quota(@user)
end
end
describe 'quotas' do
it "can't change for viewer users" do
@user = create_user(viewer: true)
verify_viewer_quota(@user)
@user.quota_in_bytes = 666
@user.save
@user.reload
verify_viewer_quota(@user)
end
end
end
protected
def create_org(org_name, org_quota, org_seats)
organization = Organization.new
organization.name = unique_name(org_name)
organization.quota_in_bytes = org_quota
organization.seats = org_seats
organization.save
organization
end
def tables_including_shared(user)
Carto::VisualizationQueryBuilder
.new
.with_owned_by_or_shared_with_user_id(user.id)
.with_type(Carto::Visualization::TYPE_CANONICAL)
.build.map(&:table)
end
end
|
require 'formula'
class LibgpgError <Formula
url 'ftp://ftp.gnupg.org/gcrypt/libgpg-error/libgpg-error-1.9.tar.bz2'
homepage 'http://www.gnupg.org/'
sha1 '6836579e42320b057a2372bbcd0325130fe2561e'
def install
ENV.j1
system "./configure", "--disable-dependency-tracking",
"--prefix=#{prefix}"
system "make install"
end
end
Update libgpg-error to 1.10.
require 'formula'
class LibgpgError <Formula
url 'ftp://ftp.gnupg.org/gcrypt/libgpg-error/libgpg-error-1.10.tar.bz2'
homepage 'http://www.gnupg.org/'
sha1 '95b324359627fbcb762487ab6091afbe59823b29'
def install
ENV.j1
system "./configure", "--disable-dependency-tracking",
"--prefix=#{prefix}"
system "make install"
end
end
|
Rails.application.configure do
# Settings specified here will take precedence over those in config/application.rb.
# Code is not reloaded between requests.
config.cache_classes = true
# Eager load code on boot. This eager loads most of Rails and
# your application in memory, allowing both threaded web servers
# and those relying on copy on write to perform better.
# Rake tasks automatically ignore this option for performance.
config.eager_load = true
# Full error reports are disabled and caching is turned on.
config.consider_all_requests_local = false
config.action_controller.perform_caching = true
# Disable serving static files from the `/public` folder by default since
# Apache or NGINX already handles this.
config.public_file_server.enabled = ENV['RAILS_SERVE_STATIC_FILES'].present?
# Compress JavaScripts and CSS.
config.assets.js_compressor = :uglifier
# config.assets.css_compressor = :sass
# Do not fallback to assets pipeline if a precompiled asset is missed.
config.assets.compile = false
# `config.assets.precompile` and `config.assets.version` have moved to config/initializers/assets.rb
# Enable serving of images, stylesheets, and JavaScripts from an asset server.
config.action_controller.asset_host = 'http://assets.antikb.site'
# Specifies the header that your server uses for sending files.
# config.action_dispatch.x_sendfile_header = 'X-Sendfile' # for Apache
# config.action_dispatch.x_sendfile_header = 'X-Accel-Redirect' # for NGINX
# Mount Action Cable outside main process or domain
# config.action_cable.mount_path = nil
# config.action_cable.url = 'wss://example.com/cable'
# config.action_cable.allowed_request_origins = [ 'http://example.com', /http:\/\/example.*/ ]
# Force all access to the app over SSL, use Strict-Transport-Security, and use secure cookies.
# config.force_ssl = true
# Use the lowest log level to ensure availability of diagnostic information
# when problems arise.
config.log_level = :debug
# Prepend all log lines with the following tags.
config.log_tags = [ :request_id ]
# Use a different cache store in production.
config.cache_store = :redis_store, {
host: "localhost",
port: 6379,
db: 0,
namespace: "cache"
}
config.action_dispatch.rack_cache = {
metastore: "redis://localhost:6379/1/metastore",
entitystore: "redis://localhost:6379/1/entitystore"
}
# Use a real queuing backend for Active Job (and separate queues per environment)
# config.active_job.queue_adapter = :resque
# config.active_job.queue_name_prefix = "rails5_#{Rails.env}"
config.action_mailer.perform_caching = false
config.action_mailer.raise_delivery_errors = true
config.action_mailer.default_url_options = { :host => 'localhost' }
config.action_mailer.delivery_method = :smtp
config.action_mailer.smtp_settings = {
address: 'localhost',
port: 25,
authentication: :plain,
enable_starttls_auto: false,
openssl_verify_mode: 'none'
}
# Ignore bad email addresses and do not raise email delivery errors.
# Set this to true and configure the email server for immediate delivery to raise delivery errors.
# config.action_mailer.raise_delivery_errors = false
# Enable locale fallbacks for I18n (makes lookups for any locale fall back to
# the I18n.default_locale when a translation cannot be found).
config.i18n.fallbacks = true
# Send deprecation notices to registered listeners.
config.active_support.deprecation = :notify
# Use default logging formatter so that PID and timestamp are not suppressed.
config.log_formatter = ::Logger::Formatter.new
# Use a different logger for distributed setups.
# require 'syslog/logger'
# config.logger = ActiveSupport::TaggedLogging.new(Syslog::Logger.new 'app-name')
if ENV["RAILS_LOG_TO_STDOUT"].present?
logger = ActiveSupport::Logger.new(STDOUT)
logger.formatter = config.log_formatter
config.logger = ActiveSupport::TaggedLogging.new(logger)
end
# Do not dump schema after migrations.
config.active_record.dump_schema_after_migration = false
end
수정함: config/environments/production.rb
app/mailers/application_mailer.rb
app/views/devise/
Rails.application.configure do
# Settings specified here will take precedence over those in config/application.rb.
# Code is not reloaded between requests.
config.cache_classes = true
# Eager load code on boot. This eager loads most of Rails and
# your application in memory, allowing both threaded web servers
# and those relying on copy on write to perform better.
# Rake tasks automatically ignore this option for performance.
config.eager_load = true
# Full error reports are disabled and caching is turned on.
config.consider_all_requests_local = false
config.action_controller.perform_caching = true
# Disable serving static files from the `/public` folder by default since
# Apache or NGINX already handles this.
config.public_file_server.enabled = ENV['RAILS_SERVE_STATIC_FILES'].present?
# Compress JavaScripts and CSS.
config.assets.js_compressor = :uglifier
# config.assets.css_compressor = :sass
# Do not fallback to assets pipeline if a precompiled asset is missed.
config.assets.compile = false
# `config.assets.precompile` and `config.assets.version` have moved to config/initializers/assets.rb
# Enable serving of images, stylesheets, and JavaScripts from an asset server.
config.action_controller.asset_host = 'http://assets.antikb.site'
# Specifies the header that your server uses for sending files.
# config.action_dispatch.x_sendfile_header = 'X-Sendfile' # for Apache
# config.action_dispatch.x_sendfile_header = 'X-Accel-Redirect' # for NGINX
# Mount Action Cable outside main process or domain
# config.action_cable.mount_path = nil
# config.action_cable.url = 'wss://example.com/cable'
# config.action_cable.allowed_request_origins = [ 'http://example.com', /http:\/\/example.*/ ]
# Force all access to the app over SSL, use Strict-Transport-Security, and use secure cookies.
# config.force_ssl = true
# Use the lowest log level to ensure availability of diagnostic information
# when problems arise.
config.log_level = :debug
# Prepend all log lines with the following tags.
config.log_tags = [ :request_id ]
# Use a different cache store in production.
config.cache_store = :redis_store, {
host: "localhost",
port: 6379,
db: 0,
namespace: "cache"
}
config.action_dispatch.rack_cache = {
metastore: "redis://localhost:6379/1/metastore",
entitystore: "redis://localhost:6379/1/entitystore"
}
# Use a real queuing backend for Active Job (and separate queues per environment)
# config.active_job.queue_adapter = :resque
# config.active_job.queue_name_prefix = "rails5_#{Rails.env}"
config.action_mailer.perform_caching = false
config.action_mailer.raise_delivery_errors = true
config.action_mailer.default_url_options = { :host => 'antikb.site' }
config.action_mailer.delivery_method = :smtp
config.action_mailer.smtp_settings = {
address: 'localhost',
port: 25,
authentication: :plain,
enable_starttls_auto: false,
openssl_verify_mode: 'none'
}
# Ignore bad email addresses and do not raise email delivery errors.
# Set this to true and configure the email server for immediate delivery to raise delivery errors.
# config.action_mailer.raise_delivery_errors = false
# Enable locale fallbacks for I18n (makes lookups for any locale fall back to
# the I18n.default_locale when a translation cannot be found).
config.i18n.fallbacks = true
# Send deprecation notices to registered listeners.
config.active_support.deprecation = :notify
# Use default logging formatter so that PID and timestamp are not suppressed.
config.log_formatter = ::Logger::Formatter.new
# Use a different logger for distributed setups.
# require 'syslog/logger'
# config.logger = ActiveSupport::TaggedLogging.new(Syslog::Logger.new 'app-name')
if ENV["RAILS_LOG_TO_STDOUT"].present?
logger = ActiveSupport::Logger.new(STDOUT)
logger.formatter = config.log_formatter
config.logger = ActiveSupport::TaggedLogging.new(logger)
end
# Do not dump schema after migrations.
config.active_record.dump_schema_after_migration = false
end
|
model the way user currently works
require 'spec_helper'
describe User do
let(:valid_user) {User.new email: 'foo@bar.com', password: 'sekret', username: 'iggy'}
let(:user) {valid_user}
describe '#valid?' do
it 'with valid attributes' do
expect(valid_user).to be_valid
end
it 'when username <= 10' do
valid_user.username = 'x'*10
expect(valid_user).to be_valid
end
it 'when username >= 4' do
valid_user.username = 'x'*4
expect(valid_user).to be_valid
end
end
describe '#invalid?' do
it 'when username > 10' do
user.username = 'x'*11
expect(user).to be_invalid
end
it 'when username < 4' do
user.username = 'x'*3
expect(user).to be_invalid
end
end
end |
require "formula"
class Libmemcached < Formula
homepage "http://libmemcached.org"
url "https://launchpad.net/libmemcached/1.0/1.0.18/+download/libmemcached-1.0.18.tar.gz"
sha1 "8be06b5b95adbc0a7cb0f232e237b648caf783e1"
bottle do
cellar :any
revision 2
sha1 "274d4fc366072ec4a530ea8c0a8a3283b361ff5a" => :yosemite
sha1 "5966d2bcccbcc24fadfbc91b9dca113c7c8c0dda" => :mavericks
sha1 "614d50333395e4f79db56eae6b10ee12e713d39b" => :mountain_lion
end
option "with-sasl", "Build with sasl support"
if build.with? "sasl"
depends_on "memcached" => "enable-sasl"
else
depends_on "memcached"
end
# https://bugs.launchpad.net/libmemcached/+bug/1245562
patch :DATA
def install
ENV.append_to_cflags "-undefined dynamic_lookup" if MacOS.version <= :leopard
args = ["--prefix=#{prefix}"]
if build.with? "sasl"
args << "--with-memcached-sasl=#{Formula["memcached"].bin}/memcached"
end
system "./configure", *args
system "make install"
end
end
__END__
diff --git a/clients/memflush.cc b/clients/memflush.cc
index 8bd0dbf..cdba743 100644
--- a/clients/memflush.cc
+++ b/clients/memflush.cc
@@ -39,7 +39,7 @@ int main(int argc, char *argv[])
{
options_parse(argc, argv);
- if (opt_servers == false)
+ if (*opt_servers != NULL)
{
char *temp;
@@ -48,7 +48,7 @@ int main(int argc, char *argv[])
opt_servers= strdup(temp);
}
- if (opt_servers == false)
+ if (*opt_servers != NULL)
{
std::cerr << "No Servers provided" << std::endl;
exit(EXIT_FAILURE);
diff --git a/example/byteorder.cc b/example/byteorder.cc
index fdfa021..8c03d35 100644
--- a/example/byteorder.cc
+++ b/example/byteorder.cc
@@ -42,27 +42,59 @@
#include <example/byteorder.h>
/* Byte swap a 64-bit number. */
-#ifndef swap64
-static inline uint64_t swap64(uint64_t in)
-{
-#ifndef WORDS_BIGENDIAN
- /* Little endian, flip the bytes around until someone makes a faster/better
+#if !defined(htonll) && !defined(ntohll)
+#if __BYTE_ORDER == __LITTLE_ENDIAN
+# if defined(__FreeBSD__)
+# include <sys/endian.h>
+# define htonll(x) bswap64(x)
+# define ntohll(x) bswap64(x)
+# elif defined(__APPLE__)
+# include <libkern/OSByteOrder.h>
+# define htonll(x) OSSwapInt64(x)
+# define ntohll(x) OSSwapInt64(x)
+# elif defined(__OpenBSD__)
+# include <sys/types.h>
+# define htonll(x) swap64(x)
+# define ntohll(x) swap64(x)
+# elif defined(__NetBSD__)
+# include <sys/types.h>
+# include <machine/bswap.h>
+# if defined(__BSWAP_RENAME) && !defined(__bswap_32)
+# define htonll(x) bswap64(x)
+# define ntohll(x) bswap64(x)
+# endif
+# elif defined(__sun) || defined(sun)
+# include <sys/byteorder.h>
+# define htonll(x) BSWAP_64(x)
+# define ntohll(x) BSWAP_64(x)
+# elif defined(_MSC_VER)
+# include <stdlib.h>
+# define htonll(x) _byteswap_uint64(x)
+# define ntohll(x) _byteswap_uint64(x)
+# else
+# include <byteswap.h>
+# ifndef bswap_64
+ /* Little endian, flip the bytes around until someone makes a faster/better
* way to do this. */
- uint64_t rv= 0;
- for (uint8_t x= 0; x < 8; x++)
- {
- rv= (rv << 8) | (in & 0xff);
- in >>= 8;
- }
- return rv;
+ static inline uint64_t bswap_64(uint64_t in)
+ {
+ uint64_t rv= 0;
+ for (uint8_t x= 0; x < 8; x++)
+ {
+ rv= (rv << 8) | (in & 0xff);
+ in >>= 8;
+ }
+ return rv;
+ }
+# endif
+# define htonll(x) bswap_64(x)
+# define ntohll(x) bswap_64(x)
+# endif
#else
- /* big-endian machines don't need byte swapping */
- return in;
-#endif // WORDS_BIGENDIAN
-}
+# define htonll(x) (x)
+# define ntohll(x) (x)
+#endif
#endif
-
-#ifdef HAVE_HTONLL
uint64_t example_ntohll(uint64_t value)
{
@@ -73,17 +105,3 @@ uint64_t example_htonll(uint64_t value)
{
return htonll(value);
}
-
-#else // HAVE_HTONLL
-
-uint64_t example_ntohll(uint64_t value)
-{
- return swap64(value);
-}
-
-uint64_t example_htonll(uint64_t value)
-{
- return swap64(value);
-}
-
-#endif // HAVE_HTONLL
diff --git a/libmemcached-1.0/memcached.h b/libmemcached-1.0/memcached.h
index bc16e73..dcee395 100644
--- a/libmemcached-1.0/memcached.h
+++ b/libmemcached-1.0/memcached.h
@@ -43,7 +43,11 @@
#endif
#ifdef __cplusplus
+#ifdef _LIBCPP_VERSION
# include <cinttypes>
+#else
+# include <tr1/cinttypes>
+#endif
# include <cstddef>
# include <cstdlib>
#else
diff --git a/libmemcached/byteorder.cc b/libmemcached/byteorder.cc
index 9f11aa8..f167822 100644
--- a/libmemcached/byteorder.cc
+++ b/libmemcached/byteorder.cc
@@ -39,41 +39,66 @@
#include "libmemcached/byteorder.h"
/* Byte swap a 64-bit number. */
-#ifndef swap64
-static inline uint64_t swap64(uint64_t in)
-{
-#ifndef WORDS_BIGENDIAN
- /* Little endian, flip the bytes around until someone makes a faster/better
+#if !defined(htonll) && !defined(ntohll)
+#if __BYTE_ORDER == __LITTLE_ENDIAN
+# if defined(__FreeBSD__)
+# include <sys/endian.h>
+# define htonll(x) bswap64(x)
+# define ntohll(x) bswap64(x)
+# elif defined(__APPLE__)
+# include <libkern/OSByteOrder.h>
+# define htonll(x) OSSwapInt64(x)
+# define ntohll(x) OSSwapInt64(x)
+# elif defined(__OpenBSD__)
+# include <sys/types.h>
+# define htonll(x) swap64(x)
+# define ntohll(x) swap64(x)
+# elif defined(__NetBSD__)
+# include <sys/types.h>
+# include <machine/bswap.h>
+# if defined(__BSWAP_RENAME) && !defined(__bswap_32)
+# define htonll(x) bswap64(x)
+# define ntohll(x) bswap64(x)
+# endif
+# elif defined(__sun) || defined(sun)
+# include <sys/byteorder.h>
+# define htonll(x) BSWAP_64(x)
+# define ntohll(x) BSWAP_64(x)
+# elif defined(_MSC_VER)
+# include <stdlib.h>
+# define htonll(x) _byteswap_uint64(x)
+# define ntohll(x) _byteswap_uint64(x)
+# else
+# include <byteswap.h>
+# ifndef bswap_64
+ /* Little endian, flip the bytes around until someone makes a faster/better
* way to do this. */
- uint64_t rv= 0;
- for (uint8_t x= 0; x < 8; ++x)
- {
- rv= (rv << 8) | (in & 0xff);
- in >>= 8;
- }
- return rv;
+ static inline uint64_t bswap_64(uint64_t in)
+ {
+ uint64_t rv= 0;
+ for (uint8_t x= 0; x < 8; x++)
+ {
+ rv= (rv << 8) | (in & 0xff);
+ in >>= 8;
+ }
+ return rv;
+ }
+# endif
+# define htonll(x) bswap_64(x)
+# define ntohll(x) bswap_64(x)
+# endif
#else
- /* big-endian machines don't need byte swapping */
- return in;
-#endif // WORDS_BIGENDIAN
-}
+# define htonll(x) (x)
+# define ntohll(x) (x)
+#endif
#endif
-
uint64_t memcached_ntohll(uint64_t value)
{
-#ifdef HAVE_HTONLL
return ntohll(value);
-#else
- return swap64(value);
-#endif
}
uint64_t memcached_htonll(uint64_t value)
{
-#ifdef HAVE_HTONLL
return htonll(value);
-#else
- return swap64(value);
-#endif
}
libmemcached: adjust patch based on upstream comments
Closes #35113.
Signed-off-by: Jack Nagel <43386ce32af96f5c56f2a88e458cb94cebee3751@gmail.com>
require "formula"
class Libmemcached < Formula
homepage "http://libmemcached.org"
url "https://launchpad.net/libmemcached/1.0/1.0.18/+download/libmemcached-1.0.18.tar.gz"
sha1 "8be06b5b95adbc0a7cb0f232e237b648caf783e1"
revision 1
bottle do
cellar :any
revision 2
sha1 "274d4fc366072ec4a530ea8c0a8a3283b361ff5a" => :yosemite
sha1 "5966d2bcccbcc24fadfbc91b9dca113c7c8c0dda" => :mavericks
sha1 "614d50333395e4f79db56eae6b10ee12e713d39b" => :mountain_lion
end
option "with-sasl", "Build with sasl support"
if build.with? "sasl"
depends_on "memcached" => "enable-sasl"
else
depends_on "memcached"
end
# https://bugs.launchpad.net/libmemcached/+bug/1245562
patch :DATA
def install
ENV.append_to_cflags "-undefined dynamic_lookup" if MacOS.version <= :leopard
args = ["--prefix=#{prefix}"]
if build.with? "sasl"
args << "--with-memcached-sasl=#{Formula["memcached"].bin}/memcached"
end
system "./configure", *args
system "make install"
end
end
__END__
diff --git a/clients/memflush.cc b/clients/memflush.cc
index 8bd0dbf..71545ea 100644
--- a/clients/memflush.cc
+++ b/clients/memflush.cc
@@ -39,7 +39,7 @@ int main(int argc, char *argv[])
{
options_parse(argc, argv);
- if (opt_servers == false)
+ if (opt_servers == NULL)
{
char *temp;
@@ -48,7 +48,7 @@ int main(int argc, char *argv[])
opt_servers= strdup(temp);
}
- if (opt_servers == false)
+ if (opt_servers == NULL)
{
std::cerr << "No Servers provided" << std::endl;
exit(EXIT_FAILURE);
diff --git a/example/byteorder.cc b/example/byteorder.cc
index fdfa021..8c03d35 100644
--- a/example/byteorder.cc
+++ b/example/byteorder.cc
@@ -42,27 +42,59 @@
#include <example/byteorder.h>
/* Byte swap a 64-bit number. */
-#ifndef swap64
-static inline uint64_t swap64(uint64_t in)
-{
-#ifndef WORDS_BIGENDIAN
- /* Little endian, flip the bytes around until someone makes a faster/better
+#if !defined(htonll) && !defined(ntohll)
+#if __BYTE_ORDER == __LITTLE_ENDIAN
+# if defined(__FreeBSD__)
+# include <sys/endian.h>
+# define htonll(x) bswap64(x)
+# define ntohll(x) bswap64(x)
+# elif defined(__APPLE__)
+# include <libkern/OSByteOrder.h>
+# define htonll(x) OSSwapInt64(x)
+# define ntohll(x) OSSwapInt64(x)
+# elif defined(__OpenBSD__)
+# include <sys/types.h>
+# define htonll(x) swap64(x)
+# define ntohll(x) swap64(x)
+# elif defined(__NetBSD__)
+# include <sys/types.h>
+# include <machine/bswap.h>
+# if defined(__BSWAP_RENAME) && !defined(__bswap_32)
+# define htonll(x) bswap64(x)
+# define ntohll(x) bswap64(x)
+# endif
+# elif defined(__sun) || defined(sun)
+# include <sys/byteorder.h>
+# define htonll(x) BSWAP_64(x)
+# define ntohll(x) BSWAP_64(x)
+# elif defined(_MSC_VER)
+# include <stdlib.h>
+# define htonll(x) _byteswap_uint64(x)
+# define ntohll(x) _byteswap_uint64(x)
+# else
+# include <byteswap.h>
+# ifndef bswap_64
+ /* Little endian, flip the bytes around until someone makes a faster/better
* way to do this. */
- uint64_t rv= 0;
- for (uint8_t x= 0; x < 8; x++)
- {
- rv= (rv << 8) | (in & 0xff);
- in >>= 8;
- }
- return rv;
+ static inline uint64_t bswap_64(uint64_t in)
+ {
+ uint64_t rv= 0;
+ for (uint8_t x= 0; x < 8; x++)
+ {
+ rv= (rv << 8) | (in & 0xff);
+ in >>= 8;
+ }
+ return rv;
+ }
+# endif
+# define htonll(x) bswap_64(x)
+# define ntohll(x) bswap_64(x)
+# endif
#else
- /* big-endian machines don't need byte swapping */
- return in;
-#endif // WORDS_BIGENDIAN
-}
+# define htonll(x) (x)
+# define ntohll(x) (x)
+#endif
#endif
-
-#ifdef HAVE_HTONLL
uint64_t example_ntohll(uint64_t value)
{
@@ -73,17 +105,3 @@ uint64_t example_htonll(uint64_t value)
{
return htonll(value);
}
-
-#else // HAVE_HTONLL
-
-uint64_t example_ntohll(uint64_t value)
-{
- return swap64(value);
-}
-
-uint64_t example_htonll(uint64_t value)
-{
- return swap64(value);
-}
-
-#endif // HAVE_HTONLL
diff --git a/libmemcached-1.0/memcached.h b/libmemcached-1.0/memcached.h
index bc16e73..dcee395 100644
--- a/libmemcached-1.0/memcached.h
+++ b/libmemcached-1.0/memcached.h
@@ -43,7 +43,11 @@
#endif
#ifdef __cplusplus
+#ifdef _LIBCPP_VERSION
# include <cinttypes>
+#else
+# include <tr1/cinttypes>
+#endif
# include <cstddef>
# include <cstdlib>
#else
diff --git a/libmemcached/byteorder.cc b/libmemcached/byteorder.cc
index 9f11aa8..f167822 100644
--- a/libmemcached/byteorder.cc
+++ b/libmemcached/byteorder.cc
@@ -39,41 +39,66 @@
#include "libmemcached/byteorder.h"
/* Byte swap a 64-bit number. */
-#ifndef swap64
-static inline uint64_t swap64(uint64_t in)
-{
-#ifndef WORDS_BIGENDIAN
- /* Little endian, flip the bytes around until someone makes a faster/better
+#if !defined(htonll) && !defined(ntohll)
+#if __BYTE_ORDER == __LITTLE_ENDIAN
+# if defined(__FreeBSD__)
+# include <sys/endian.h>
+# define htonll(x) bswap64(x)
+# define ntohll(x) bswap64(x)
+# elif defined(__APPLE__)
+# include <libkern/OSByteOrder.h>
+# define htonll(x) OSSwapInt64(x)
+# define ntohll(x) OSSwapInt64(x)
+# elif defined(__OpenBSD__)
+# include <sys/types.h>
+# define htonll(x) swap64(x)
+# define ntohll(x) swap64(x)
+# elif defined(__NetBSD__)
+# include <sys/types.h>
+# include <machine/bswap.h>
+# if defined(__BSWAP_RENAME) && !defined(__bswap_32)
+# define htonll(x) bswap64(x)
+# define ntohll(x) bswap64(x)
+# endif
+# elif defined(__sun) || defined(sun)
+# include <sys/byteorder.h>
+# define htonll(x) BSWAP_64(x)
+# define ntohll(x) BSWAP_64(x)
+# elif defined(_MSC_VER)
+# include <stdlib.h>
+# define htonll(x) _byteswap_uint64(x)
+# define ntohll(x) _byteswap_uint64(x)
+# else
+# include <byteswap.h>
+# ifndef bswap_64
+ /* Little endian, flip the bytes around until someone makes a faster/better
* way to do this. */
- uint64_t rv= 0;
- for (uint8_t x= 0; x < 8; ++x)
- {
- rv= (rv << 8) | (in & 0xff);
- in >>= 8;
- }
- return rv;
+ static inline uint64_t bswap_64(uint64_t in)
+ {
+ uint64_t rv= 0;
+ for (uint8_t x= 0; x < 8; x++)
+ {
+ rv= (rv << 8) | (in & 0xff);
+ in >>= 8;
+ }
+ return rv;
+ }
+# endif
+# define htonll(x) bswap_64(x)
+# define ntohll(x) bswap_64(x)
+# endif
#else
- /* big-endian machines don't need byte swapping */
- return in;
-#endif // WORDS_BIGENDIAN
-}
+# define htonll(x) (x)
+# define ntohll(x) (x)
+#endif
#endif
-
uint64_t memcached_ntohll(uint64_t value)
{
-#ifdef HAVE_HTONLL
return ntohll(value);
-#else
- return swap64(value);
-#endif
}
uint64_t memcached_htonll(uint64_t value)
{
-#ifdef HAVE_HTONLL
return htonll(value);
-#else
- return swap64(value);
-#endif
}
|
Rails.application.configure do
# Settings specified here will take precedence over those in config/application.rb.
# Code is not reloaded between requests.
config.cache_classes = true
# Eager load code on boot. This eager loads most of Rails and
# your application in memory, allowing both threaded web servers
# and those relying on copy on write to perform better.
# Rake tasks automatically ignore this option for performance.
config.eager_load = true
# Full error reports are disabled and caching is turned on.
config.consider_all_requests_local = false
config.action_controller.perform_caching = true
# Enable Rack::Cache to put a simple HTTP cache in front of your application
# Add `rack-cache` to your Gemfile before enabling this.
# For large-scale production use, consider using a caching reverse proxy like
# NGINX, varnish or squid.
# config.action_dispatch.rack_cache = true
# Disable serving static files from the `/public` folder by default since
# Apache or NGINX already handles this.
config.serve_static_files = ENV['RAILS_SERVE_STATIC_FILES'].present?
# Compress JavaScripts and CSS.
config.assets.js_compressor = :uglifier
config.assets.css_compressor = :csso
# Do not fallback to assets pipeline if a precompiled asset is missed.
config.assets.compile = false
# Asset digests allow you to set far-future HTTP expiration dates on all assets,
# yet still be able to expire them through the digest params.
config.assets.digest = true
# `config.assets.precompile` and `config.assets.version` have moved to config/initializers/assets.rb
# Specifies the header that your server uses for sending files.
# config.action_dispatch.x_sendfile_header = 'X-Sendfile' # for Apache
# config.action_dispatch.x_sendfile_header = 'X-Accel-Redirect' # for NGINX
# Force all access to the app over SSL, use Strict-Transport-Security, and use secure cookies.
# config.force_ssl = true
# Use the lowest log level to ensure availability of diagnostic information
# when problems arise.
config.log_level = :debug
# Prepend all log lines with the following tags.
# config.log_tags = [ :subdomain, :uuid ]
# Use a different logger for distributed setups.
# config.logger = ActiveSupport::TaggedLogging.new(SyslogLogger.new)
# Use a different cache store in production.
# config.cache_store = :mem_cache_store
# Enable serving of images, stylesheets, and JavaScripts from an asset server.
# config.action_controller.asset_host = 'http://assets.example.com'
# Ignore bad email addresses and do not raise email delivery errors.
# Set this to true and configure the email server for immediate delivery to raise delivery errors.
# config.action_mailer.raise_delivery_errors = false
# 127.0.0.1:8080
config.action_mailer.default_url_options = { :host => '127.0.0.1:8080' }
config.action_mailer.smtp_settings = {
user_name: ENV["SENDGRID_USERNAME"],
password: ENV["SENDGRID_PASSWORD"],
domain: 'westcornwallevents.co.uk',
address: 'smtp.sendgrid.net',
port: 587,
authentication: :plain,
enable_starttls_auto: true
}
# Enable locale fallbacks for I18n (makes lookups for any locale fall back to
# the I18n.default_locale when a translation cannot be found).
config.i18n.fallbacks = true
# Send deprecation notices to registered listeners.
config.active_support.deprecation = :notify
# Use default logging formatter so that PID and timestamp are not suppressed.
config.log_formatter = ::Logger::Formatter.new
# Do not dump schema after migrations.
config.active_record.dump_schema_after_migration = false
config.after_initialize do
Bullet.enable = true
Bullet.bullet_logger = true
Bullet.rollbar = true
end
end
Update config
Rails.application.configure do
# Settings specified here will take precedence over those in config/application.rb.
# Code is not reloaded between requests.
config.cache_classes = true
# Eager load code on boot. This eager loads most of Rails and
# your application in memory, allowing both threaded web servers
# and those relying on copy on write to perform better.
# Rake tasks automatically ignore this option for performance.
config.eager_load = true
# Full error reports are disabled and caching is turned on.
config.consider_all_requests_local = false
config.action_controller.perform_caching = true
# Enable Rack::Cache to put a simple HTTP cache in front of your application
# Add `rack-cache` to your Gemfile before enabling this.
# For large-scale production use, consider using a caching reverse proxy like
# NGINX, varnish or squid.
# config.action_dispatch.rack_cache = true
# Disable serving static files from the `/public` folder by default since
# Apache or NGINX already handles this.
config.serve_static_files = ENV['RAILS_SERVE_STATIC_FILES'].present?
# Compress JavaScripts and CSS.
config.assets.js_compressor = :uglifier
# Do not fallback to assets pipeline if a precompiled asset is missed.
config.assets.compile = false
# Asset digests allow you to set far-future HTTP expiration dates on all assets,
# yet still be able to expire them through the digest params.
config.assets.digest = true
# `config.assets.precompile` and `config.assets.version` have moved to config/initializers/assets.rb
# Specifies the header that your server uses for sending files.
# config.action_dispatch.x_sendfile_header = 'X-Sendfile' # for Apache
# config.action_dispatch.x_sendfile_header = 'X-Accel-Redirect' # for NGINX
# Force all access to the app over SSL, use Strict-Transport-Security, and use secure cookies.
# config.force_ssl = true
# Use the lowest log level to ensure availability of diagnostic information
# when problems arise.
config.log_level = :debug
# Prepend all log lines with the following tags.
# config.log_tags = [ :subdomain, :uuid ]
# Use a different logger for distributed setups.
# config.logger = ActiveSupport::TaggedLogging.new(SyslogLogger.new)
# Use a different cache store in production.
# config.cache_store = :mem_cache_store
# Enable serving of images, stylesheets, and JavaScripts from an asset server.
# config.action_controller.asset_host = 'http://assets.example.com'
# Ignore bad email addresses and do not raise email delivery errors.
# Set this to true and configure the email server for immediate delivery to raise delivery errors.
# config.action_mailer.raise_delivery_errors = false
# 127.0.0.1:8080
config.action_mailer.default_url_options = { :host => '127.0.0.1:8080' }
config.action_mailer.smtp_settings = {
user_name: ENV["SENDGRID_USERNAME"],
password: ENV["SENDGRID_PASSWORD"],
domain: 'westcornwallevents.co.uk',
address: 'smtp.sendgrid.net',
port: 587,
authentication: :plain,
enable_starttls_auto: true
}
# Enable locale fallbacks for I18n (makes lookups for any locale fall back to
# the I18n.default_locale when a translation cannot be found).
config.i18n.fallbacks = true
# Send deprecation notices to registered listeners.
config.active_support.deprecation = :notify
# Use default logging formatter so that PID and timestamp are not suppressed.
config.log_formatter = ::Logger::Formatter.new
# Do not dump schema after migrations.
config.active_record.dump_schema_after_migration = false
config.after_initialize do
Bullet.enable = true
Bullet.bullet_logger = true
Bullet.rollbar = true
end
end
|
require 'spec_helper'
describe User do
it { should have_many(:user_friendships) }
it { should have_many(:friends) }
it { should_not allow_value("blah").for(:email) }
it { should_not allow_value("1").for(:password) }
it { should allow_value("1jhdsJaZJd5").for(:password) }
end
added more tests to user model spec.
require 'spec_helper'
describe User do
it { should have_many(:user_friendships) }
it { should have_many(:friends) }
it { should_not allow_value("blah").for(:email) }
it { should_not allow_value("1").for(:password) }
it { should allow_value("1jhdsJaZJd5").for(:password) }
it "should not allow to create 2 users with the same e-mail" do
credentials = {:email => 'asd@def.com', :password => 'password', :password_confirmation => 'password'}
user = User.new credentials
user.save.should eq(true)
user = User.new credentials
user.save.should eq(false)
end
end
|
require 'formula'
class Liquidprompt < Formula
homepage 'https://github.com/nojhan/liquidprompt'
url 'https://github.com/nojhan/liquidprompt/archive/v_1.6.tar.gz'
sha1 '04f2c5b9d01908ed0f2e71df53aba73a72630a43'
def install
(share+'liquidprompt').install 'liquidpromptrc-dist'
bin.install 'liquidprompt'
end
def caveats; <<-EOS.undent
Add the following lines to your bash or zsh config (e.g. ~/.bash_profile):
if [ -f $(brew --prefix)/bin/liquidprompt ]; then
. $(brew --prefix)/bin/liquidprompt
fi
If you'd like to reconfigure options, you may do so in ~/.liquidpromptrc.
A sample file you may copy and modify has been installed to
#{HOMEBREW_PREFIX}/share/liquidprompt/liquidpromptrc-dist
Don't modify the PROMPT_COMMAND variable elsewhere in your shell config;
that will break things.
EOS
end
end
liquidprompt 1.7
Closes #24887.
Signed-off-by: Adam Vandenberg <34c2b6407fd5a10249a15d699d40f9ed1782e98c@gmail.com>
require 'formula'
class Liquidprompt < Formula
homepage 'https://github.com/nojhan/liquidprompt'
url 'https://github.com/nojhan/liquidprompt/archive/v_1.7.tar.gz'
sha1 'db7f24b20f09480b3491c5250f30f7ccd67ee44e'
def install
(share+'liquidprompt').install 'liquidpromptrc-dist'
bin.install 'liquidprompt'
end
def caveats; <<-EOS.undent
Add the following lines to your bash or zsh config (e.g. ~/.bash_profile):
if [ -f $(brew --prefix)/bin/liquidprompt ]; then
. $(brew --prefix)/bin/liquidprompt
fi
If you'd like to reconfigure options, you may do so in ~/.liquidpromptrc.
A sample file you may copy and modify has been installed to
#{HOMEBREW_PREFIX}/share/liquidprompt/liquidpromptrc-dist
Don't modify the PROMPT_COMMAND variable elsewhere in your shell config;
that will break things.
EOS
end
end
|
ActiveAdmin.register Project do
# See permitted parameters documentation:
# https://github.com/activeadmin/activeadmin/blob/master/docs/2-resource-customization.md#setting-up-strong-parameters
#
# permit_params :list, :of, :attributes, :on, :model
#
# or
#
# permit_params do
# permitted = [:permitted, :attributes]
# permitted << :other if resource.something?
# permitted
# end
end
Customize projects page
ActiveAdmin.register Project do
# See permitted parameters documentation:
# https://github.com/activeadmin/activeadmin/blob/master/docs/2-resource-customization.md#setting-up-strong-parameters
#
# permit_params :list, :of, :attributes, :on, :model
#
# or
#
# permit_params do
# permitted = [:permitted, :attributes]
# permitted << :other if resource.something?
# permitted
# end
index do
column :id
column :full_name
column :description
column :created_at
column :updated_at
actions
end
end
|
BasicRuby::Application.configure do
# Settings specified here will take precedence over those in config/application.rb.
# Code is not reloaded between requests.
config.cache_classes = true
# Eager load code on boot. This eager loads most of Rails and
# your application in memory, allowing both thread web servers
# and those relying on copy on write to perform better.
# Rake tasks automatically ignore this option for performance.
config.eager_load = true
# Full error reports are disabled and caching is turned on.
config.consider_all_requests_local = false
config.action_controller.perform_caching = true
# Enable Rack::Cache to put a simple HTTP cache in front of your application
# Add `rack-cache` to your Gemfile before enabling this.
# For large-scale production use, consider using a caching reverse proxy like nginx, varnish or squid.
# config.action_dispatch.rack_cache = true
# Disable Rails's static asset server (Apache or nginx will already do this).
config.serve_static_assets = false
# Compress JavaScripts and CSS.
config.assets.js_compressor = :uglifier
# config.assets.css_compressor = :sass
# Do not fallback to assets pipeline if a precompiled asset is missed.
config.assets.compile = false
# Generate digests for assets URLs.
config.assets.digest = true
# Version of your assets, change this if you want to expire all your assets.
config.assets.version = '1.0'
# Specifies the header that your server uses for sending files.
# config.action_dispatch.x_sendfile_header = "X-Sendfile" # for apache
# config.action_dispatch.x_sendfile_header = 'X-Accel-Redirect' # for nginx
# Force all access to the app over SSL, use Strict-Transport-Security, and use secure cookies.
# config.force_ssl = true
# Set to :debug to see everything in the log.
config.log_level = :info
# Prepend all log lines with the following tags.
# config.log_tags = [ :subdomain, :uuid ]
# Use a different logger for distributed setups.
# config.logger = ActiveSupport::TaggedLogging.new(SyslogLogger.new)
# Use a different cache store in production.
# config.cache_store = :mem_cache_store
# Enable serving of images, stylesheets, and JavaScripts from an asset server.
# config.action_controller.asset_host = "http://assets.example.com"
# Precompile additional assets.
# application.js, application.css, and all non-JS/CSS in app/assets folder are already added.
config.assets.precompile += %w( vendor.js tutor/all.css tutor.js tutor/images/*.png )
# Ignore bad email addresses and do not raise email delivery errors.
# Set this to true and configure the email server for immediate delivery to raise delivery errors.
# config.action_mailer.raise_delivery_errors = false
# Enable locale fallbacks for I18n (makes lookups for any locale fall back to
# the I18n.default_locale when a translation can not be found).
config.i18n.fallbacks = true
# Send deprecation notices to registered listeners.
config.active_support.deprecation = :notify
# Disable automatic flushing of the log to improve performance.
# config.autoflush_log = false
# Use default logging formatter so that PID and timestamp are not suppressed.
config.log_formatter = ::Logger::Formatter.new
end
Use CloudFront to serve production assets.
BasicRuby::Application.configure do
# Settings specified here will take precedence over those in config/application.rb.
# Code is not reloaded between requests.
config.cache_classes = true
# Eager load code on boot. This eager loads most of Rails and
# your application in memory, allowing both thread web servers
# and those relying on copy on write to perform better.
# Rake tasks automatically ignore this option for performance.
config.eager_load = true
# Full error reports are disabled and caching is turned on.
config.consider_all_requests_local = false
config.action_controller.perform_caching = true
# Enable Rack::Cache to put a simple HTTP cache in front of your application
# Add `rack-cache` to your Gemfile before enabling this.
# For large-scale production use, consider using a caching reverse proxy like nginx, varnish or squid.
# config.action_dispatch.rack_cache = true
# Disable Rails's static asset server (Apache or nginx will already do this).
config.serve_static_assets = false
# Compress JavaScripts and CSS.
config.assets.js_compressor = :uglifier
# config.assets.css_compressor = :sass
# Do not fallback to assets pipeline if a precompiled asset is missed.
config.assets.compile = false
# Generate digests for assets URLs.
config.assets.digest = true
# Version of your assets, change this if you want to expire all your assets.
config.assets.version = '1.0'
# Specifies the header that your server uses for sending files.
# config.action_dispatch.x_sendfile_header = "X-Sendfile" # for apache
# config.action_dispatch.x_sendfile_header = 'X-Accel-Redirect' # for nginx
# Force all access to the app over SSL, use Strict-Transport-Security, and use secure cookies.
# config.force_ssl = true
# Set to :debug to see everything in the log.
config.log_level = :info
# Prepend all log lines with the following tags.
# config.log_tags = [ :subdomain, :uuid ]
# Use a different logger for distributed setups.
# config.logger = ActiveSupport::TaggedLogging.new(SyslogLogger.new)
# Use a different cache store in production.
# config.cache_store = :mem_cache_store
# Enable serving of images, stylesheets, and JavaScripts from an asset server.
config.action_controller.asset_host = "http://d3h2cj9g0xymyj.cloudfront.net"
# Precompile additional assets.
# application.js, application.css, and all non-JS/CSS in app/assets folder are already added.
config.assets.precompile += %w( vendor.js tutor/all.css tutor.js tutor/images/*.png )
# Ignore bad email addresses and do not raise email delivery errors.
# Set this to true and configure the email server for immediate delivery to raise delivery errors.
# config.action_mailer.raise_delivery_errors = false
# Enable locale fallbacks for I18n (makes lookups for any locale fall back to
# the I18n.default_locale when a translation can not be found).
config.i18n.fallbacks = true
# Send deprecation notices to registered listeners.
config.active_support.deprecation = :notify
# Disable automatic flushing of the log to improve performance.
# config.autoflush_log = false
# Use default logging formatter so that PID and timestamp are not suppressed.
config.log_formatter = ::Logger::Formatter.new
end
|
require 'spec_helper'
describe User do
context 'when validating' do
before(:each) { Fabricate(:user) }
it { should validate_presence_of :username }
it { should ensure_length_of(:username).is_at_most(50) }
it { should validate_presence_of :email }
it { should validate_uniqueness_of :email }
%w[user@foo.com THE_USER@foo.bar.org first.last@foo.jp].each do |address|
it { should allow_value(address).for(:email) }
end
%w[user@foo,com user_at_foo.org example.user@foo.].each do |address|
it { should_not allow_value(address).for(:email) }
end
end
context 'as a student' do
let(:student) { Fabricate(:student_with_grades) }
let(:team) { Fabricate(:team, :users => [student]) }
it "sums up grades correctly" do
lambda do
student.grades << Fabricate(:grade, :score => 150)
student.save
end.should change(student, :sortable_score).by(150)
end
it "combines grades and team grades" do
grades = [Fabricate(:grade, :gradeable => student),Fabricate(:grade, :gradeable => team)]
grades.each do |grade|
student.earned_grades.should include(grade)
end
end
end
end
Fixed user spec to test user grades separately from team grades
require 'spec_helper'
describe User do
context 'when validating' do
before(:each) { Fabricate(:user) }
it { should validate_presence_of :username }
it { should ensure_length_of(:username).is_at_most(50) }
it { should validate_presence_of :email }
it { should validate_uniqueness_of :email }
%w[user@foo.com THE_USER@foo.bar.org first.last@foo.jp].each do |address|
it { should allow_value(address).for(:email) }
end
%w[user@foo,com user_at_foo.org example.user@foo.].each do |address|
it { should_not allow_value(address).for(:email) }
end
end
context 'as a student' do
let(:student) { Fabricate(:student_with_grades) }
let(:team) { Fabricate(:team, :users => [student]) }
it "sums up grades correctly" do
lambda do
student.grades << Fabricate(:grade, :score => 150)
student.save
end.should change(student, :sortable_score).by(150)
end
it "returns user grades" do
grade = Fabricate(:grade, :gradeable => student)
student.reload.earned_grades.should include(grade)
end
it "return team grades" do
grade = Fabricate(:grade, :gradeable => team)
student.reload.earned_grades.should include(grade)
end
it "combines grades and team grades" do
grades = [Fabricate(:grade, :gradeable => student),Fabricate(:grade, :gradeable => team)]
grades.each do |grade|
student.reload.earned_grades.should include(grade)
end
end
end
end
|
require 'friendly/table'
module Friendly
class Index < Table
attr_reader :klass, :fields, :datastore
def initialize(klass, fields, datastore = Friendly.datastore)
@klass = klass
@fields = fields
@datastore = datastore
end
def table_name
["index", klass.table_name, "on", fields.join("_and_")].join("_")
end
def satisfies?(query)
condition_fields = query.conditions.keys.map { |k| k.to_sym }
exact_match?(condition_fields) || valid_partial_match?(condition_fields)
end
def first(conditions)
row = datastore.first(self, conditions)
row && klass.first(:id => row[:id])
end
def all(conditions)
klass.all(:id => datastore.all(self, conditions).map { |row| row[:id] })
end
def create(document)
datastore.insert(self, record(document))
end
def update(document)
datastore.update(self, document.id, record(document))
end
def destroy(document)
datastore.delete(self, document.id)
end
protected
def exact_match?(condition_fields)
condition_fields.map { |f| f.to_s }.sort == fields.map { |f| f.to_s }.sort
end
def valid_partial_match?(condition_fields)
sorted = condition_fields.sort { |a,b| field_index(a) <=> field_index(b) }
sorted.zip(fields).all? { |a,b| a == b }
end
def field_index(attr)
fields.index(attr) || 0
end
def record(document)
Hash[*(fields + [:id]).map { |f| [f, document.send(f)] }.flatten]
end
end
end
bit of naming
require 'friendly/table'
module Friendly
class Index < Table
attr_reader :klass, :fields, :datastore
def initialize(klass, fields, datastore = Friendly.datastore)
@klass = klass
@fields = fields
@datastore = datastore
end
def table_name
["index", klass.table_name, "on", fields.join("_and_")].join("_")
end
def satisfies?(query)
condition_fields = query.conditions.keys.map { |k| k.to_sym }
exact_match?(condition_fields) || valid_partial_match?(condition_fields)
end
def first(query)
row = datastore.first(self, query)
row && klass.first(:id => row[:id])
end
def all(query)
klass.all(:id => datastore.all(self, query).map { |row| row[:id] })
end
def create(document)
datastore.insert(self, record(document))
end
def update(document)
datastore.update(self, document.id, record(document))
end
def destroy(document)
datastore.delete(self, document.id)
end
protected
def exact_match?(condition_fields)
condition_fields.map { |f| f.to_s }.sort == fields.map { |f| f.to_s }.sort
end
def valid_partial_match?(condition_fields)
sorted = condition_fields.sort { |a,b| field_index(a) <=> field_index(b) }
sorted.zip(fields).all? { |a,b| a == b }
end
def field_index(attr)
fields.index(attr) || 0
end
def record(document)
Hash[*(fields + [:id]).map { |f| [f, document.send(f)] }.flatten]
end
end
end
|
[Admin] Regionen ... Kann sich das bitte jemand angucken ... :(
ActiveAdmin.register Region do
menu priority: 9
index do
column :id
column :name
column :slug
default_actions
end
controller do
with_role :admin
end
end
|
Plate::Application.configure do
# Settings specified here will take precedence over those in config/application.rb.
# Code is not reloaded between requests.
config.cache_classes = true
# Eager load code on boot. This eager loads most of Rails and
# your application in memory, allowing both thread web servers
# and those relying on copy on write to perform better.
# Rake tasks automatically ignore this option for performance.
config.eager_load = true
# Full error reports are disabled and caching is turned on.
config.consider_all_requests_local = false
# Enable Rack::Cache to put a simple HTTP cache in front of your application
# Add `rack-cache` to your Gemfile before enabling this.
# For large-scale production use, consider using a caching reverse proxy like nginx, varnish or squid.
# config.action_dispatch.rack_cache = true
# Disable Rails's static asset server (Apache or nginx will already do this).
config.serve_static_files = false
# Compress JavaScripts and CSS.
#https://github.com/mishoo/UglifyJS2/issues/328
config.assets.js_compressor = Uglifier.new(
output: { ascii_only: true, quote_keys: true }
)
config.assets.css_compressor = :sass
# Do not fallback to assets pipeline if a precompiled asset is missed.
config.assets.compile = true #must be true bc https://github.com/rumblelabs/asset_sync/issues/255
# Generate digests for assets URLs.
config.assets.digest = true
# Version of your assets, change this if you want to expire all your assets.
config.assets.version = '1.0'
# Specifies the header that your server uses for sending files.
# config.action_dispatch.x_sendfile_header = "X-Sendfile" # for apache
config.action_dispatch.x_sendfile_header = 'X-Accel-Redirect' # for nginx
# Force all access to the app over SSL, use Strict-Transport-Security, and use secure cookies.
# config.force_ssl = true
# Set to :debug to see everything in the log.
config.log_level = :info
# Prepend all log lines with the following tags.
# config.log_tags = [ :subdomain, :uuid ]
# Use a different logger for distributed setups.
# config.logger = ActiveSupport::TaggedLogging.new(SyslogLogger.new)
# Use a different cache store in production.
#config.cache_store = :redis_store, {
#host: "",
#port: 6379,
#expires_in: 90.minutes ,
#namespace: 'cache'
#}
# Enable serving of images, stylesheets, and JavaScripts from an asset server.
# config.action_controller.asset_host = "http://assets.example.com"
config.action_controller.perform_caching = true
config.action_controller.asset_host = ENV['S3_HOST_ALIAS']
# Precompile additional assets.
# application.js, application.css, and all non-JS/CSS in app/assets folder are already added.
# config.assets.precompile += %w( search.js )
config.assets.precompile << /\.(?:svg|eot|woff|ttf)$/
config.assets.initialize_on_precompile = false
config.assets.prefix = "/assets"
# Ignore bad email addresses and do not raise email delivery errors.
# Set this to true and configure the email server for immediate delivery to raise delivery errors.
# config.action_mailer.raise_delivery_errors = false
# Enable locale fallbacks for I18n (makes lookups for any locale fall back to
# the I18n.default_locale when a translation can not be found).
config.i18n.fallbacks = true
# Send deprecation notices to registered listeners.
config.active_support.deprecation = :notify
# Disable automatic flushing of the log to improve performance.
# config.autoflush_log = false
# Use default logging formatter so that PID and timestamp are not suppressed.
config.log_formatter = ::Logger::FileFormatter.new
config.paperclip_defaults = {
:storage => :fog,
:fog_credentials => {
:provider => ENV['FOG_PROVIDER'],
#:region => ENV['FOG_REGION'] ,
:aws_access_key_id => ENV['AWS_ACCESS_KEY_ID'],
:aws_secret_access_key => ENV['AWS_SECRET_ACCESS_KEY'],
},
:fog_directory => ENV['FOG_DIRECTORY'],
:fog_host => ENV['S3_HOST_ALIAS']
}
end
production environment fix
Plate::Application.configure do
# Settings specified here will take precedence over those in config/application.rb.
# Code is not reloaded between requests.
config.cache_classes = true
# Eager load code on boot. This eager loads most of Rails and
# your application in memory, allowing both thread web servers
# and those relying on copy on write to perform better.
# Rake tasks automatically ignore this option for performance.
config.eager_load = true
# Full error reports are disabled and caching is turned on.
config.consider_all_requests_local = false
# Enable Rack::Cache to put a simple HTTP cache in front of your application
# Add `rack-cache` to your Gemfile before enabling this.
# For large-scale production use, consider using a caching reverse proxy like nginx, varnish or squid.
# config.action_dispatch.rack_cache = true
# Disable Rails's static asset server (Apache or nginx will already do this).
config.serve_static_files = false
# Compress JavaScripts and CSS.
#https://github.com/mishoo/UglifyJS2/issues/328
config.assets.js_compressor = Uglifier.new(
output: { ascii_only: true, quote_keys: true }
)
config.assets.css_compressor = :sass
# Do not fallback to assets pipeline if a precompiled asset is missed.
config.assets.compile = true #must be true bc https://github.com/rumblelabs/asset_sync/issues/255
# Generate digests for assets URLs.
config.assets.digest = true
# Version of your assets, change this if you want to expire all your assets.
config.assets.version = '1.0'
# Specifies the header that your server uses for sending files.
# config.action_dispatch.x_sendfile_header = "X-Sendfile" # for apache
config.action_dispatch.x_sendfile_header = 'X-Accel-Redirect' # for nginx
# Force all access to the app over SSL, use Strict-Transport-Security, and use secure cookies.
# config.force_ssl = true
# Set to :debug to see everything in the log.
config.log_level = :info
# Prepend all log lines with the following tags.
# config.log_tags = [ :subdomain, :uuid ]
# Use a different logger for distributed setups.
# config.logger = ActiveSupport::TaggedLogging.new(SyslogLogger.new)
# Use a different cache store in production.
#config.cache_store = :redis_store, {
#host: "",
#port: 6379,
#expires_in: 90.minutes ,
#namespace: 'cache'
#}
# Enable serving of images, stylesheets, and JavaScripts from an asset server.
# config.action_controller.asset_host = "http://assets.example.com"
config.action_controller.perform_caching = true
config.action_controller.asset_host = ENV['S3_HOST_ALIAS']
# Precompile additional assets.
# application.js, application.css, and all non-JS/CSS in app/assets folder are already added.
# config.assets.precompile += %w( search.js )
config.assets.precompile << /\.(?:svg|eot|woff|ttf)$/
config.assets.initialize_on_precompile = false
config.assets.prefix = "/assets"
# Ignore bad email addresses and do not raise email delivery errors.
# Set this to true and configure the email server for immediate delivery to raise delivery errors.
# config.action_mailer.raise_delivery_errors = false
# Enable locale fallbacks for I18n (makes lookups for any locale fall back to
# the I18n.default_locale when a translation can not be found).
config.i18n.fallbacks = true
# Send deprecation notices to registered listeners.
config.active_support.deprecation = :notify
# Disable automatic flushing of the log to improve performance.
# config.autoflush_log = false
# Use default logging formatter so that PID and timestamp are not suppressed.
config.log_formatter = ::Logger::Formatter.new
config.paperclip_defaults = {
:storage => :fog,
:fog_credentials => {
:provider => ENV['FOG_PROVIDER'],
#:region => ENV['FOG_REGION'] ,
:aws_access_key_id => ENV['AWS_ACCESS_KEY_ID'],
:aws_secret_access_key => ENV['AWS_SECRET_ACCESS_KEY'],
},
:fog_directory => ENV['FOG_DIRECTORY'],
:fog_host => ENV['S3_HOST_ALIAS']
}
end
|
sourcekitten 0.3.1 (new formula)
Closes #35926.
Signed-off-by: Mike McQuaid <a17fed27eaa842282862ff7c1b9c8395a26ac320@mikemcquaid.com>
class Sourcekitten < Formula
homepage "https://github.com/jpsim/SourceKitten"
url "https://github.com/jpsim/SourceKitten.git", :tag => "0.3.1"
head "https://github.com/jpsim/SourceKitten.git"
depends_on :xcode => ["6.1.1", :build]
def install
system "make", "prefix_install", "PREFIX=#{prefix}", "TEMPORARY_FOLDER=#{buildpath}/SourceKitten.dst"
end
test do
system "#{bin}/sourcekitten", "syntax", "--text", "import Foundation // Hello World"
end
end
|
# frozen_string_literal: true
# rubocop:disable Layout/LineLength
# == Schema Information
#
# Table name: users
#
# id :uuid not null, primary key
# active :boolean default(TRUE), not null
# admin :boolean default(FALSE), not null
# api_key :string(255)
# birth_year :integer
# crypted_password :string(255) not null
# current_login_at :datetime
# email :string(255)
# experience :text
# gender :string(255)
# gender_custom :string(255)
# import_num :integer
# last_request_at :datetime
# login :string(255) not null
# login_count :integer default(0), not null
# name :string(255) not null
# nationality :string(255)
# notes :text
# password_salt :string(255) not null
# perishable_token :string(255)
# persistence_token :string(255)
# phone :string(255)
# phone2 :string(255)
# pref_lang :string(255) not null
# sms_auth_code :string(255)
# created_at :datetime not null
# updated_at :datetime not null
# last_mission_id :uuid
#
# Indexes
#
# index_users_on_email (email)
# index_users_on_last_mission_id (last_mission_id)
# index_users_on_login (login) UNIQUE
# index_users_on_name (name)
# index_users_on_sms_auth_code (sms_auth_code) UNIQUE
#
# Foreign Keys
#
# users_last_mission_id_fkey (last_mission_id => missions.id) ON DELETE => nullify ON UPDATE => restrict
#
# rubocop:enable Layout/LineLength
require "rails_helper"
describe User do
let(:mission) { get_mission }
describe "validation" do
describe "password strength" do
let(:user) do
build(:user, password: password, password_confirmation: password)
end
shared_examples_for "too weak" do
it do
expect(user).not_to be_valid
expect(user.errors[:password]).to include(
"Password must include at least one number, one lowercase letter, and one capital letter."
)
end
end
context "with new record" do
context "with weak password" do
let(:password) { "passw0rd" }
it_behaves_like "too weak"
end
context "with dictionary password" do
let(:password) { "contortionist" }
it_behaves_like "too weak"
end
context "with strong password" do
let(:password) { "2a89fhq;*42ata2;84ty8;Q:4t8qa" }
it { expect(user).to be_valid }
end
end
context "with persisted record" do
let(:password) { "2a89fhq;*42ata2;84ty8;Q:4t8qa" }
let(:saved) do
create(:user, password: password, password_confirmation: password)
end
let(:user) { User.find(saved.id) } # Reload so password is definitely wiped.
it "updates cleanly when password not set" do
user.phone = "+1234567890"
expect(user).to be_valid
end
it "updates cleanly when password empty string" do
user.update(phone: "+1234567890", password: "")
expect(user).to be_valid
end
it "errors when password changed and invalid" do
user.update(phone: "+1234567890", password: "foo", password_confirmation: "foo")
expect(user.errors[:password].join).to match(/must include/)
end
end
end
describe "password confirmation" do
let(:password) { "2a89fhq;*42ata2;84ty8;Q:4t8qa" }
let(:user) { build(:user, password: password, password_confirmation: confirmation) }
context "with matching confirmaiton" do
let(:confirmation) { password }
it { expect(user).to be_valid }
end
context "without matching confirmation" do
let(:confirmation) { "x" }
it do
expect(user).not_to be_valid
expect(user.errors[:password_confirmation]).to include("doesn't match Password")
end
end
end
end
describe "creation" do
let(:user) { create(:user, email: "foo@bar.com") }
it "should have an api_key generated" do
expect(user.api_key).to_not(be_blank)
end
it "should have an SMS auth code generated" do
expect(user.sms_auth_code).to_not(be_blank)
end
context "when distinct user exists with same email" do
let(:other_user) { create(:user, email: "foo@bar.com") }
it "should allow creation" do
expect(user.email).to eq(other_user.email)
end
end
end
describe "best_mission" do
before do
@user = build(:user)
end
context "with no last mission" do
context "with no assignments" do
before { allow(@user).to receive(:assignments).and_return([]) }
specify { expect(@user.best_mission).to be_nil }
end
context "with assignments" do
before do
allow(@user).to receive(:assignments).and_return([
build(:assignment, user: @user, updated_at: 2.days.ago),
@most_recent = build(:assignment, user: @user, updated_at: 1.hour.ago),
build(:assignment, user: @user, updated_at: 1.day.ago)
])
end
it "should return the mission from the most recently updated assignment" do
expect(@user.best_mission).to eq(@most_recent.mission)
end
end
end
context "with last mission" do
before do
@last_mission = build(:mission)
allow(@user).to receive(:last_mission).and_return(@last_mission)
end
context "and a more recent assignment to another mission" do
before do
allow(@user).to receive(:assignments).and_return([
build(:assignment, user: @user, mission: @last_mission, updated_at: 2.days.ago),
build(:assignment, user: @user, updated_at: 1.hour.ago)
])
end
specify { expect(@user.best_mission.name).to eq(@last_mission.name) }
end
context "but no longer assigned to last mission" do
before { allow(@user).to receive(:assignments).and_return([]) }
specify { expect(@user.best_mission).to be_nil }
end
end
end
describe "username validation" do
it "should allow letters numbers and periods" do
["foobar", "foo.bar9", "1234", "..1_23"].each do |login|
user = build(:user, login: login)
expect(user).to be_valid
end
end
it "should allow unicode word chars" do
%w[foo_bar.baz 123 foébar テスト].each do |login|
user = build(:user, login: login)
expect(user).to be_valid, "Expected login to be allowed: #{login}"
end
end
it "should disallow unicode non-word chars" do
["foo bar", "foo\nbar", "foo'bar", "foo✓bar", "foo😂bar", "foo\u00A0bar"].each do |login|
user = build(:user, login: login)
expect(user).not_to be_valid
# TODO: Update error messages.
expect(user.errors[:login].join)
.to match(/letters, numbers, periods/), "Expected login to be disallowed: #{login}"
end
end
it "should trim spaces and convert to lowercase" do
user = build(:user, login: "FOOBAR \n ")
expect(user).to be_valid
expect(user.login).to eq("foobar")
end
describe "uniqueness" do
let!(:user) { create(:user, login: "jayita") }
it "returns an error when the login is not unique" do
user2 = build(:user, login: "jayita")
expect(user2).not_to be_valid
expect(user2.errors.full_messages.join).to match(/Username: Please enter a unique value/)
end
it "can create a user with the same login after deleting" do
user.destroy
user2 = build(:user, login: "jayita")
expect(user2).to be_valid
end
end
end
it "creating a user with minimal info should produce good defaults", :investigate do
user = User.create!(name: "Alpha Tester", login: "alpha", reset_password_method: "print",
assignments: [Assignment.new(mission: mission, role: User::ROLES.first)])
expect(user.pref_lang).to eq("en")
expect(user.login).to eq("alpha")
end
describe "destruction" do
let!(:user) { create(:user) }
context "without associations" do
it "destroys cleanly" do
user.destroy
end
end
context "with submitted response" do
let!(:response) { create(:response, user: user) }
it "raises DeleteRestrictionError" do
expect { user.destroy }.to raise_error(ActiveRecord::DeleteRestrictionError) do |e|
# We test the exact wording of the error because the last word is sometimes used to
# lookup i18n strings.
expect(e.to_s).to eq("Cannot delete record because of dependent responses")
end
end
end
context "with reviewed response" do
let!(:response) { create(:response, reviewer: user) }
it "raises DeleteRestrictionError" do
expect { user.destroy }.to raise_error(ActiveRecord::DeleteRestrictionError) do |e|
# We test the exact wording of the error because the last word is sometimes used to
# lookup i18n strings.
expect(e.to_s).to eq("Cannot delete record because of dependent reviewed_responses")
end
end
end
context "with checked out response" do
let!(:response) { create(:response, checked_out_by: user) }
it "nullifies" do
user.destroy
expect(response.reload.checked_out_by).to be_nil
end
end
context "with SMS message" do
let!(:message) { create(:sms_reply, user: user) }
it "raises DeleteRestrictionError" do
expect { user.destroy }.to raise_error(ActiveRecord::DeleteRestrictionError) do |e|
# We test the exact wording of the error because the last word is sometimes used to
# lookup i18n strings.
expect(e.to_s).to eq("Cannot delete record because of dependent sms_messages")
end
end
end
end
private
def assert_phone_uniqueness_error(user)
user.valid?
expect(user.errors.full_messages.join).to match(/phone.+assigned/i)
end
end
11159: Add additional specs
# frozen_string_literal: true
# rubocop:disable Layout/LineLength
# == Schema Information
#
# Table name: users
#
# id :uuid not null, primary key
# active :boolean default(TRUE), not null
# admin :boolean default(FALSE), not null
# api_key :string(255)
# birth_year :integer
# crypted_password :string(255) not null
# current_login_at :datetime
# email :string(255)
# experience :text
# gender :string(255)
# gender_custom :string(255)
# import_num :integer
# last_request_at :datetime
# login :string(255) not null
# login_count :integer default(0), not null
# name :string(255) not null
# nationality :string(255)
# notes :text
# password_salt :string(255) not null
# perishable_token :string(255)
# persistence_token :string(255)
# phone :string(255)
# phone2 :string(255)
# pref_lang :string(255) not null
# sms_auth_code :string(255)
# created_at :datetime not null
# updated_at :datetime not null
# last_mission_id :uuid
#
# Indexes
#
# index_users_on_email (email)
# index_users_on_last_mission_id (last_mission_id)
# index_users_on_login (login) UNIQUE
# index_users_on_name (name)
# index_users_on_sms_auth_code (sms_auth_code) UNIQUE
#
# Foreign Keys
#
# users_last_mission_id_fkey (last_mission_id => missions.id) ON DELETE => nullify ON UPDATE => restrict
#
# rubocop:enable Layout/LineLength
require "rails_helper"
describe User do
let(:mission) { get_mission }
describe "validation" do
describe "password strength" do
let(:user) do
build(:user, password: password, password_confirmation: password)
end
shared_examples_for "too weak" do
it do
expect(user).not_to be_valid
expect(user.errors[:password]).to include(
"Password must include at least one number, one lowercase letter, and one capital letter."
)
end
end
context "with new record" do
context "with weak password" do
let(:password) { "passw0rd" }
it_behaves_like "too weak"
end
context "with dictionary password" do
let(:password) { "contortionist" }
it_behaves_like "too weak"
end
context "with strong password" do
let(:password) { "2a89fhq;*42ata2;84ty8;Q:4t8qa" }
it { expect(user).to be_valid }
end
end
context "with persisted record" do
let(:password) { "2a89fhq;*42ata2;84ty8;Q:4t8qa" }
let(:saved) do
create(:user, password: password, password_confirmation: password)
end
let(:user) { User.find(saved.id) } # Reload so password is definitely wiped.
it "updates cleanly when password not set" do
user.phone = "+1234567890"
expect(user).to be_valid
end
it "updates cleanly when password nil" do
user.update(phone: "+1234567890", password: nil)
expect(user).to be_valid
end
it "updates cleanly when password empty string" do
user.update(phone: "+1234567890", password: "")
expect(user).to be_valid
end
it "updates cleanly when password and confirmation nil" do
user.update(phone: "+1234567890", password: nil, password_confirmation: nil)
expect(user).to be_valid
end
it "updates cleanly when password and confirmation empty string" do
user.update(phone: "+1234567890", password: "", password_confirmation: "")
expect(user).to be_valid
end
it "errors when password changed and invalid" do
user.update(phone: "+1234567890", password: "foo", password_confirmation: "foo")
expect(user.errors[:password].join).to match(/must include/)
end
end
end
describe "password confirmation" do
let(:password) { "2a89fhq;*42ata2;84ty8;Q:4t8qa" }
let(:user) { build(:user, password: password, password_confirmation: confirmation) }
context "with matching confirmaiton" do
let(:confirmation) { password }
it { expect(user).to be_valid }
end
context "without matching confirmation" do
let(:confirmation) { "x" }
it do
expect(user).not_to be_valid
expect(user.errors[:password_confirmation]).to include("doesn't match Password")
end
end
end
end
describe "creation" do
let(:user) { create(:user, email: "foo@bar.com") }
it "should have an api_key generated" do
expect(user.api_key).to_not(be_blank)
end
it "should have an SMS auth code generated" do
expect(user.sms_auth_code).to_not(be_blank)
end
context "when distinct user exists with same email" do
let(:other_user) { create(:user, email: "foo@bar.com") }
it "should allow creation" do
expect(user.email).to eq(other_user.email)
end
end
end
describe "best_mission" do
before do
@user = build(:user)
end
context "with no last mission" do
context "with no assignments" do
before { allow(@user).to receive(:assignments).and_return([]) }
specify { expect(@user.best_mission).to be_nil }
end
context "with assignments" do
before do
allow(@user).to receive(:assignments).and_return([
build(:assignment, user: @user, updated_at: 2.days.ago),
@most_recent = build(:assignment, user: @user, updated_at: 1.hour.ago),
build(:assignment, user: @user, updated_at: 1.day.ago)
])
end
it "should return the mission from the most recently updated assignment" do
expect(@user.best_mission).to eq(@most_recent.mission)
end
end
end
context "with last mission" do
before do
@last_mission = build(:mission)
allow(@user).to receive(:last_mission).and_return(@last_mission)
end
context "and a more recent assignment to another mission" do
before do
allow(@user).to receive(:assignments).and_return([
build(:assignment, user: @user, mission: @last_mission, updated_at: 2.days.ago),
build(:assignment, user: @user, updated_at: 1.hour.ago)
])
end
specify { expect(@user.best_mission.name).to eq(@last_mission.name) }
end
context "but no longer assigned to last mission" do
before { allow(@user).to receive(:assignments).and_return([]) }
specify { expect(@user.best_mission).to be_nil }
end
end
end
describe "username validation" do
it "should allow letters numbers and periods" do
["foobar", "foo.bar9", "1234", "..1_23"].each do |login|
user = build(:user, login: login)
expect(user).to be_valid
end
end
it "should allow unicode word chars" do
%w[foo_bar.baz 123 foébar テスト].each do |login|
user = build(:user, login: login)
expect(user).to be_valid, "Expected login to be allowed: #{login}"
end
end
it "should disallow unicode non-word chars" do
["foo bar", "foo\nbar", "foo'bar", "foo✓bar", "foo😂bar", "foo\u00A0bar"].each do |login|
user = build(:user, login: login)
expect(user).not_to be_valid
# TODO: Update error messages.
expect(user.errors[:login].join)
.to match(/letters, numbers, periods/), "Expected login to be disallowed: #{login}"
end
end
it "should trim spaces and convert to lowercase" do
user = build(:user, login: "FOOBAR \n ")
expect(user).to be_valid
expect(user.login).to eq("foobar")
end
describe "uniqueness" do
let!(:user) { create(:user, login: "jayita") }
it "returns an error when the login is not unique" do
user2 = build(:user, login: "jayita")
expect(user2).not_to be_valid
expect(user2.errors.full_messages.join).to match(/Username: Please enter a unique value/)
end
it "can create a user with the same login after deleting" do
user.destroy
user2 = build(:user, login: "jayita")
expect(user2).to be_valid
end
end
end
it "creating a user with minimal info should produce good defaults", :investigate do
user = User.create!(name: "Alpha Tester", login: "alpha", reset_password_method: "print",
assignments: [Assignment.new(mission: mission, role: User::ROLES.first)])
expect(user.pref_lang).to eq("en")
expect(user.login).to eq("alpha")
end
describe "destruction" do
let!(:user) { create(:user) }
context "without associations" do
it "destroys cleanly" do
user.destroy
end
end
context "with submitted response" do
let!(:response) { create(:response, user: user) }
it "raises DeleteRestrictionError" do
expect { user.destroy }.to raise_error(ActiveRecord::DeleteRestrictionError) do |e|
# We test the exact wording of the error because the last word is sometimes used to
# lookup i18n strings.
expect(e.to_s).to eq("Cannot delete record because of dependent responses")
end
end
end
context "with reviewed response" do
let!(:response) { create(:response, reviewer: user) }
it "raises DeleteRestrictionError" do
expect { user.destroy }.to raise_error(ActiveRecord::DeleteRestrictionError) do |e|
# We test the exact wording of the error because the last word is sometimes used to
# lookup i18n strings.
expect(e.to_s).to eq("Cannot delete record because of dependent reviewed_responses")
end
end
end
context "with checked out response" do
let!(:response) { create(:response, checked_out_by: user) }
it "nullifies" do
user.destroy
expect(response.reload.checked_out_by).to be_nil
end
end
context "with SMS message" do
let!(:message) { create(:sms_reply, user: user) }
it "raises DeleteRestrictionError" do
expect { user.destroy }.to raise_error(ActiveRecord::DeleteRestrictionError) do |e|
# We test the exact wording of the error because the last word is sometimes used to
# lookup i18n strings.
expect(e.to_s).to eq("Cannot delete record because of dependent sms_messages")
end
end
end
end
private
def assert_phone_uniqueness_error(user)
user.valid?
expect(user.errors.full_messages.join).to match(/phone.+assigned/i)
end
end
|
# Copyright (C) 2013 by Dmitry Maksyoma <ledestin@gmail.com>
require 'hitimes'
require 'monitor'
require 'thread'
require 'timeout'
#--
# {{{1 Rdoc
#++
# Timeout.timeout() replacement using only 2 threads
# = Example
#
# require 'frugal_timeout'
#
# begin
# FrugalTimeout.timeout(0.1) { sleep }
# rescue Timeout::Error
# puts 'it works!'
# end
#
# # Ensure that calling timeout() will use FrugalTimeout.timeout().
# FrugalTimeout.dropin!
#
# # Rescue frugal-specific exception if needed.
# begin
# timeout(0.1) { sleep }
# rescue FrugalTimeout::Error
# puts 'yay!'
# end
#--
# }}}1
module FrugalTimeout
# {{{1 Error
class Error < Timeout::Error; end # :nodoc:
# {{{1 MonotonicTime
class MonotonicTime # :nodoc:
NANOS_IN_SECOND = 1_000_000_000
def self.measure
start = now
yield
now - start
end
def self.now
Hitimes::Interval.now.start_instant.to_f/NANOS_IN_SECOND
end
end
# {{{1 Request
class Request # :nodoc:
include Comparable
@@mutex = Mutex.new
attr_reader :at, :thread
def initialize thread, at, klass
@thread, @at, @klass = thread, at, klass
end
def <=>(other)
@at <=> other.at
end
def done!
@@mutex.synchronize { @done = true }
end
def done?
@@mutex.synchronize { @done }
end
def enforceTimeout
@thread.raise @klass || Error, 'execution expired' unless done?
end
end
# {{{1 SleeperNotifier
class SleeperNotifier # :nodoc:
include MonitorMixin
def initialize notifyQueue
super()
@notifyQueue = notifyQueue
@latestDelay = nil
@thread = Thread.new {
loop {
unless sleepFor = latestDelay
sleep
else
sleptFor = MonotonicTime.measure { sleep(sleepFor) }
end
synchronize {
@notifyQueue.push :expired if sleepFor && sleptFor >= sleepFor
}
}
}
ObjectSpace.define_finalizer self, proc { @thread.kill }
end
def latestDelay
synchronize {
tmp = @latestDelay
@latestDelay = nil
tmp
}
end
private :latestDelay
def notifyAfter sec
synchronize {
sleep 0.01 until @thread.status == 'sleep'
@latestDelay = sec
@thread.wakeup
}
end
private :synchronize
end
# {{{1 Main code
@in = Queue.new
@sleeper = SleeperNotifier.new @in
# {{{2 Timeout request and expiration processing thread
Thread.new {
nearestTimeout, requests = nil, []
loop {
request = @in.shift
now = MonotonicTime.now
if request == :expired
# Enforce all expired timeouts.
requests.sort!
requests.each_with_index { |r, i|
break if r.at > now
r.enforceTimeout
requests[i] = nil
}
requests.compact!
# Activate the nearest non-expired timeout.
nearestTimeout = unless requests.first
nil
else
@sleeper.notifyAfter requests.first.at - now
requests.first.at
end
next
end
# New timeout request.
# Already expired, enforce right away.
if request.at <= now
request.enforceTimeout
next
end
# Queue new timeout for later enforcing. Activate if it's nearest to
# enforce.
requests << request
next if nearestTimeout && request.at > nearestTimeout
@sleeper.notifyAfter request.at - now
nearestTimeout = request.at
}
}
# {{{2 Methods
# Ensure that calling timeout() will use FrugalTimeout.timeout()
def self.dropin!
Object.class_eval \
'def timeout t, klass=nil, &b
FrugalTimeout.timeout t, klass, &b
end'
end
# Same as Timeout.timeout()
def self.timeout sec, klass=nil
return yield sec if sec == nil || sec <= 0
@in.push request = Request.new(Thread.current, MonotonicTime.now + sec,
klass)
begin
yield sec
ensure
request.done! unless $!.is_a? FrugalTimeout::Error
end
end
# }}}1
end
! Race condition.
# Copyright (C) 2013 by Dmitry Maksyoma <ledestin@gmail.com>
require 'hitimes'
require 'monitor'
require 'thread'
require 'timeout'
#--
# {{{1 Rdoc
#++
# Timeout.timeout() replacement using only 2 threads
# = Example
#
# require 'frugal_timeout'
#
# begin
# FrugalTimeout.timeout(0.1) { sleep }
# rescue Timeout::Error
# puts 'it works!'
# end
#
# # Ensure that calling timeout() will use FrugalTimeout.timeout().
# FrugalTimeout.dropin!
#
# # Rescue frugal-specific exception if needed.
# begin
# timeout(0.1) { sleep }
# rescue FrugalTimeout::Error
# puts 'yay!'
# end
#--
# }}}1
module FrugalTimeout
# {{{1 Error
class Error < Timeout::Error; end # :nodoc:
# {{{1 MonotonicTime
class MonotonicTime # :nodoc:
NANOS_IN_SECOND = 1_000_000_000
def self.measure
start = now
yield
now - start
end
def self.now
Hitimes::Interval.now.start_instant.to_f/NANOS_IN_SECOND
end
end
# {{{1 Request
class Request # :nodoc:
include Comparable
@@mutex = Mutex.new
attr_reader :at, :thread
def initialize thread, at, klass
@thread, @at, @klass = thread, at, klass
end
def <=>(other)
@at <=> other.at
end
def done!
@@mutex.synchronize { @done = true }
end
def done?
@@mutex.synchronize { @done }
end
def enforceTimeout
@@mutex.synchronize {
@thread.raise @klass || Error, 'execution expired' unless @done
}
end
end
# {{{1 SleeperNotifier
class SleeperNotifier # :nodoc:
include MonitorMixin
def initialize notifyQueue
super()
@notifyQueue = notifyQueue
@latestDelay = nil
@thread = Thread.new {
loop {
unless sleepFor = latestDelay
sleep
else
sleptFor = MonotonicTime.measure { sleep(sleepFor) }
end
synchronize {
@notifyQueue.push :expired if sleepFor && sleptFor >= sleepFor
}
}
}
ObjectSpace.define_finalizer self, proc { @thread.kill }
end
def latestDelay
synchronize {
tmp = @latestDelay
@latestDelay = nil
tmp
}
end
private :latestDelay
def notifyAfter sec
synchronize {
sleep 0.01 until @thread.status == 'sleep'
@latestDelay = sec
@thread.wakeup
}
end
private :synchronize
end
# {{{1 Main code
@in = Queue.new
@sleeper = SleeperNotifier.new @in
# {{{2 Timeout request and expiration processing thread
Thread.new {
nearestTimeout, requests = nil, []
loop {
request = @in.shift
now = MonotonicTime.now
if request == :expired
# Enforce all expired timeouts.
requests.sort!
requests.each_with_index { |r, i|
break if r.at > now
r.enforceTimeout
requests[i] = nil
}
requests.compact!
# Activate the nearest non-expired timeout.
nearestTimeout = unless requests.first
nil
else
@sleeper.notifyAfter requests.first.at - now
requests.first.at
end
next
end
# New timeout request.
# Already expired, enforce right away.
if request.at <= now
request.enforceTimeout
next
end
# Queue new timeout for later enforcing. Activate if it's nearest to
# enforce.
requests << request
next if nearestTimeout && request.at > nearestTimeout
@sleeper.notifyAfter request.at - now
nearestTimeout = request.at
}
}
# {{{2 Methods
# Ensure that calling timeout() will use FrugalTimeout.timeout()
def self.dropin!
Object.class_eval \
'def timeout t, klass=nil, &b
FrugalTimeout.timeout t, klass, &b
end'
end
# Same as Timeout.timeout()
def self.timeout sec, klass=nil
return yield sec if sec == nil || sec <= 0
@in.push request = Request.new(Thread.current, MonotonicTime.now + sec,
klass)
begin
yield sec
ensure
request.done! unless $!.is_a? FrugalTimeout::Error
end
end
# }}}1
end
|
Rails.application.configure do
# Settings specified here will take precedence over those in config/application.rb.
# Code is not reloaded between requests.
config.cache_classes = true
# Eager load code on boot. This eager loads most of Rails and
# your application in memory, allowing both threaded web servers
# and those relying on copy on write to perform better.
# Rake tasks automatically ignore this option for performance.
config.eager_load = true
# Full error reports are disabled and caching is turned on.
config.consider_all_requests_local = false
config.action_controller.perform_caching = true
# Enable Rack::Cache to put a simple HTTP cache in front of your application
# Add `rack-cache` to your Gemfile before enabling this.
# For large-scale production use, consider using a caching reverse proxy like NGINX, varnish or squid.
# config.action_dispatch.rack_cache = true
# Disable Rails's static asset server (Apache or NGINX will already do this).
config.serve_static_files = false
# Compress JavaScripts and CSS.
config.assets.js_compressor = :uglifier
# config.assets.css_compressor = :sass
# Do not fallback to assets pipeline if a precompiled asset is missed.
config.assets.compile = false
# Generate digests for assets URLs.
config.assets.digest = true
# Version of your assets, change this if you want to expire all your assets.
config.assets.version = '1.0'
# Specifies the header that your server uses for sending files.
# config.action_dispatch.x_sendfile_header = "X-Sendfile" # for Apache
config.action_dispatch.x_sendfile_header = 'X-Accel-Redirect' # for NGINX
# Force all access to the app over SSL, use Strict-Transport-Security, and use secure cookies.
# config.force_ssl = true
# Decrease the log volume.
# config.log_level = :info
# Prepend all log lines with the following tags.
# config.log_tags = [ :subdomain, :uuid ]
# Use a different logger for distributed setups.
# config.logger = ActiveSupport::TaggedLogging.new(SyslogLogger.new)
# Use a different cache store in production.
config.cache_store = :mem_cache_store, "10.2.2.21", {expires_in: 1.day}
# Enable serving of images, stylesheets, and JavaScripts from an asset server.
# config.action_controller.asset_host = "http://assets.example.com"
# Ignore bad email addresses and do not raise email delivery errors.
# Set this to true and configure the email server for immediate delivery to raise delivery errors.
# config.action_mailer.raise_delivery_errors = false
# Enable locale fallbacks for I18n (makes lookups for any locale fall back to
# the I18n.default_locale when a translation cannot be found).
config.i18n.fallbacks = true
# Send deprecation notices to registered listeners.
config.active_support.deprecation = :notify
# Use default logging formatter so that PID and timestamp are not suppressed.
config.log_formatter = ::Logger::Formatter.new
# Root URL for ActionMailer
config.action_mailer.default_url_options = { :host => 'mezuro.org' }
# Do not dump schema after migrations.
config.active_record.dump_schema_after_migration = false
# Kalibro URL
KalibroGatekeeperClient.configure_with(Rails.root.join('config', 'kalibro_gatekeeper.yml'))
# ActionMailer SMTP
config.action_mailer.delivery_method = :smtp
config.action_mailer.smtp_settings = {
address: 'smtp.gmail.com',
port: 587,
domain: 'mezuro.org',
user_name: ENV['SMTP_USERNAME'], # Configure these as a environment vars on the production server
password: ENV['SMTP_PASSWORD'],
authentication: 'plain',
enable_starttls_auto: true }
# Exception Notification
config.middleware.use ExceptionNotification::Rack,
:ignore_crawlers => %w{Googlebot bingbot SeznamBot Baiduspider AhrefsBot TurnitinBot Mail.RU_Bot Slurp DotBot},
:email => {
:email_prefix => "[Prezento Error] ",
:sender_address => %{"mezurometrics" <mezurometrics@gmail.com>},
:exception_recipients => %w{mezuro-core@lists.ime.usp.br}
}
# Google Analytics
GA.tracker = ENV['GA_TRACKER']
GA.script_source = :doubleclick
end
Changing configuration of Kalibro Client for production
Signed off by: Rafael Reggiani Manzo <rr.manzo@gmail.com>
Rails.application.configure do
# Settings specified here will take precedence over those in config/application.rb.
# Code is not reloaded between requests.
config.cache_classes = true
# Eager load code on boot. This eager loads most of Rails and
# your application in memory, allowing both threaded web servers
# and those relying on copy on write to perform better.
# Rake tasks automatically ignore this option for performance.
config.eager_load = true
# Full error reports are disabled and caching is turned on.
config.consider_all_requests_local = false
config.action_controller.perform_caching = true
# Enable Rack::Cache to put a simple HTTP cache in front of your application
# Add `rack-cache` to your Gemfile before enabling this.
# For large-scale production use, consider using a caching reverse proxy like NGINX, varnish or squid.
# config.action_dispatch.rack_cache = true
# Disable Rails's static asset server (Apache or NGINX will already do this).
config.serve_static_files = false
# Compress JavaScripts and CSS.
config.assets.js_compressor = :uglifier
# config.assets.css_compressor = :sass
# Do not fallback to assets pipeline if a precompiled asset is missed.
config.assets.compile = false
# Generate digests for assets URLs.
config.assets.digest = true
# Version of your assets, change this if you want to expire all your assets.
config.assets.version = '1.0'
# Specifies the header that your server uses for sending files.
# config.action_dispatch.x_sendfile_header = "X-Sendfile" # for Apache
config.action_dispatch.x_sendfile_header = 'X-Accel-Redirect' # for NGINX
# Force all access to the app over SSL, use Strict-Transport-Security, and use secure cookies.
# config.force_ssl = true
# Decrease the log volume.
# config.log_level = :info
# Prepend all log lines with the following tags.
# config.log_tags = [ :subdomain, :uuid ]
# Use a different logger for distributed setups.
# config.logger = ActiveSupport::TaggedLogging.new(SyslogLogger.new)
# Use a different cache store in production.
config.cache_store = :mem_cache_store, "10.2.2.21", {expires_in: 1.day}
# Enable serving of images, stylesheets, and JavaScripts from an asset server.
# config.action_controller.asset_host = "http://assets.example.com"
# Ignore bad email addresses and do not raise email delivery errors.
# Set this to true and configure the email server for immediate delivery to raise delivery errors.
# config.action_mailer.raise_delivery_errors = false
# Enable locale fallbacks for I18n (makes lookups for any locale fall back to
# the I18n.default_locale when a translation cannot be found).
config.i18n.fallbacks = true
# Send deprecation notices to registered listeners.
config.active_support.deprecation = :notify
# Use default logging formatter so that PID and timestamp are not suppressed.
config.log_formatter = ::Logger::Formatter.new
# Root URL for ActionMailer
config.action_mailer.default_url_options = { :host => 'mezuro.org' }
# Do not dump schema after migrations.
config.active_record.dump_schema_after_migration = false
# Kalibro URL
KalibroClient.configure_with(Rails.root.join('config', 'kalibro.yml'))
# ActionMailer SMTP
config.action_mailer.delivery_method = :smtp
config.action_mailer.smtp_settings = {
address: 'smtp.gmail.com',
port: 587,
domain: 'mezuro.org',
user_name: ENV['SMTP_USERNAME'], # Configure these as a environment vars on the production server
password: ENV['SMTP_PASSWORD'],
authentication: 'plain',
enable_starttls_auto: true }
# Exception Notification
config.middleware.use ExceptionNotification::Rack,
:ignore_crawlers => %w{Googlebot bingbot SeznamBot Baiduspider AhrefsBot TurnitinBot Mail.RU_Bot Slurp DotBot},
:email => {
:email_prefix => "[Prezento Error] ",
:sender_address => %{"mezurometrics" <mezurometrics@gmail.com>},
:exception_recipients => %w{mezuro-core@lists.ime.usp.br}
}
# Google Analytics
GA.tracker = ENV['GA_TRACKER']
GA.script_source = :doubleclick
end
|
require 'formula'
class Spidermonkey < Formula
homepage 'https://developer.mozilla.org/en/SpiderMonkey'
url 'http://ftp.mozilla.org/pub/mozilla.org/js/js185-1.0.0.tar.gz'
version '1.8.5'
sha1 '52a01449c48d7a117b35f213d3e4263578d846d6'
revision 1
head 'https://hg.mozilla.org/tracemonkey/archive/tip.tar.gz'
bottle do
revision 1
sha1 "6c6fd6d40d41764a086a6fb134176253deb1a51b" => :yosemite
sha1 "5d19010b10a5f1827511ca791debf9f2d9076e47" => :mavericks
sha1 "37d04b64aba47dbf65f197aec94da9acf5f1fd4c" => :mountain_lion
end
conflicts_with 'narwhal', :because => 'both install a js binary'
depends_on 'readline'
depends_on 'nspr'
def install
cd "js/src" do
# Remove the broken *(for anyone but FF) install_name
inreplace "config/rules.mk",
"-install_name @executable_path/$(SHARED_LIBRARY) ",
"-install_name #{lib}/$(SHARED_LIBRARY) "
end
mkdir "brew-build" do
system "../js/src/configure", "--prefix=#{prefix}",
"--enable-readline",
"--enable-threadsafe",
"--with-system-nspr",
"--with-nspr-prefix=#{Formula["nspr"].opt_prefix}",
"--enable-macos-target=#{MacOS.version}"
inreplace "js-config", /JS_CONFIG_LIBS=.*?$/, "JS_CONFIG_LIBS=''"
# These need to be in separate steps.
system "make"
system "make install"
# Also install js REPL.
bin.install "shell/js"
end
end
test do
path = testpath/"test.js"
path.write "print('hello');"
assert_equal "hello", shell_output("#{bin}/js #{path}").strip
end
end
spidermonkey: update 1.8.5_1 bottle.
require 'formula'
class Spidermonkey < Formula
homepage 'https://developer.mozilla.org/en/SpiderMonkey'
url 'http://ftp.mozilla.org/pub/mozilla.org/js/js185-1.0.0.tar.gz'
version '1.8.5'
sha1 '52a01449c48d7a117b35f213d3e4263578d846d6'
revision 1
head 'https://hg.mozilla.org/tracemonkey/archive/tip.tar.gz'
bottle do
sha256 "7ab660cad3aac11fbf4befa3fbbf65a7ee64d858539ad81298271389b2957375" => :yosemite
sha256 "cda0b81bd974640690fe067691efca6bc7d1583117cd5db28cca43ab8e2f884c" => :mavericks
sha256 "769035a4fa0ed09b71aa9747c2834a51285903e51d9bc478f865c037a8666370" => :mountain_lion
end
conflicts_with 'narwhal', :because => 'both install a js binary'
depends_on 'readline'
depends_on 'nspr'
def install
cd "js/src" do
# Remove the broken *(for anyone but FF) install_name
inreplace "config/rules.mk",
"-install_name @executable_path/$(SHARED_LIBRARY) ",
"-install_name #{lib}/$(SHARED_LIBRARY) "
end
mkdir "brew-build" do
system "../js/src/configure", "--prefix=#{prefix}",
"--enable-readline",
"--enable-threadsafe",
"--with-system-nspr",
"--with-nspr-prefix=#{Formula["nspr"].opt_prefix}",
"--enable-macos-target=#{MacOS.version}"
inreplace "js-config", /JS_CONFIG_LIBS=.*?$/, "JS_CONFIG_LIBS=''"
# These need to be in separate steps.
system "make"
system "make install"
# Also install js REPL.
bin.install "shell/js"
end
end
test do
path = testpath/"test.js"
path.write "print('hello');"
assert_equal "hello", shell_output("#{bin}/js #{path}").strip
end
end
|
require 'rails_helper'
describe User do
it { should validate_presence_of :name }
it { should validate_presence_of :email }
it { should have_secure_password }
it { should have_many(:votes) }
end
Tests not passing with name even though it has been replaced by username.
require 'rails_helper'
describe User do
it { should validate_presence_of :username }
it { should validate_presence_of :email }
it { should have_secure_password }
it { should have_many(:votes) }
end
|
# Copyright (C) 2013, 2014 by Dmitry Maksyoma <ledestin@gmail.com>
require 'monitor'
require 'thread'
require 'timeout'
require 'frugal_timeout/support'
#--
# {{{1 Rdoc
#++
# Timeout.timeout() replacement using only 1 thread
# = Example
#
# require 'frugal_timeout'
#
# begin
# FrugalTimeout.timeout(0.1) { sleep }
# rescue Timeout::Error
# puts 'it works!'
# end
#
# # Ensure that calling timeout() will use FrugalTimeout.timeout().
# FrugalTimeout.dropin!
#
# # Rescue frugal-specific exception if needed.
# begin
# timeout(0.1) { sleep }
# rescue FrugalTimeout::Error
# puts 'yay!'
# end
#--
# }}}1
module FrugalTimeout
# {{{1 Error
class Error < Timeout::Error #:nodoc:
end
# {{{1 Request
# Timeout request, holding expiry time, what exception to raise and in which
# thread. It is active by default, but can be defused. If it's defused, then
# timeout won't be enforced when #enforceTimeout is called.
class Request #:nodoc:
include Comparable
@@mutex = Mutex.new
attr_reader :at, :klass, :thread
def initialize thread, at, klass
@thread, @at, @klass = thread, at, klass
@defused = false
end
def <=>(other)
@at <=> other.at
end
# Timeout won't be enforced if you defuse a request.
def defuse!
@@mutex.synchronize { @defused = true }
end
def defused?
@@mutex.synchronize { @defused }
end
def enforceTimeout
@@mutex.synchronize {
return if @defused
@thread.raise @klass, 'execution expired'
@defused = true
true
}
end
end
# {{{1 RequestQueue
class RequestQueue #:nodoc:
include Hookable
include MonitorMixin
def initialize
super
def_hook_synced :onEnforce, :onNewNearestRequest
@requests, @threadIdx = SortedQueue.new, Storage.new
@requests.onAdd { |r| @threadIdx.set r.thread, r }
@requests.onRemove { |r| @threadIdx.delete r.thread, r }
end
def handleExpiry
synchronize {
purgeAndEnforceExpired
sendNearestActive
}
end
def size
synchronize { @requests.size }
end
def queue sec, klass
request = Request.new(Thread.current, MonotonicTime.now + sec, klass)
synchronize {
@requests << request
@onNewNearestRequest.call(request) if @requests.first == request
}
request
end
private
def defuseForThread! thread
return unless request = @threadIdx[thread]
if request.respond_to? :each
request.each { |r| r.defuse! }
else
request.defuse!
end
end
def purgeAndEnforceExpired
@onEnforce.call
now = MonotonicTime.now
@requests.reject_until_mismatch! { |r|
if r.at <= now
r.enforceTimeout && defuseForThread!(r.thread)
true
end
}
end
def sendNearestActive
@requests.reject_until_mismatch! { |r| r.defused? }
@onNewNearestRequest.call @requests.first unless @requests.empty?
end
end
# {{{1 SleeperNotifier
# Executes callback when a request expires.
# 1. Set callback to execute with #onExpiry=.
# 2. Set expiry time with #expireAt.
# 3. After the expiry time comes, execute the callback.
#
# It's possible to set a new expiry time before the time set previously
# expires. In this case, processing of the old request stops and the new
# request processing starts.
class SleeperNotifier #:nodoc:
include Hookable
include MonitorMixin
def initialize
super()
def_hook_synced :onExpiry
@condVar, @expireAt = new_cond, nil
@thread = Thread.new {
loop {
synchronize { @onExpiry }.call if synchronize {
# Sleep forever until a request comes in.
unless @expireAt
wait
next
end
timeLeft = calcTimeLeft
disposeOfRequest
elapsedTime = MonotonicTime.measure { wait timeLeft }
elapsedTime >= timeLeft
}
}
}
ObjectSpace.define_finalizer self, proc { @thread.kill }
end
def expireAt time
synchronize {
@expireAt = time
signalThread
}
end
private
def calcTimeLeft
synchronize {
delay = @expireAt - MonotonicTime.now
delay < 0 ? 0 : delay
}
end
def disposeOfRequest
@expireAt = nil
end
def signalThread
@condVar.signal
end
def wait sec=nil
@condVar.wait sec
end
end
# {{{1 Main code
@requestQueue = RequestQueue.new
sleeper = SleeperNotifier.new
@requestQueue.onNewNearestRequest { |request|
sleeper.expireAt request.at
}
sleeper.onExpiry { @requestQueue.handleExpiry }
# {{{2 Methods
# Ensure that calling timeout() will use FrugalTimeout.timeout()
def self.dropin!
Object.class_eval \
'def timeout t, klass=nil, &b
FrugalTimeout.timeout t, klass, &b
end'
end
def self.on_enforce &b #:nodoc:
@requestQueue.onEnforce &b
end
def self.on_ensure &b #:nodoc:
@onEnsure = b
end
# Same as Timeout.timeout()
def self.timeout sec, klass=nil
return yield sec if sec.nil? || sec <= 0
innerException = klass || Class.new(Timeout::ExitException)
request = @requestQueue.queue(sec, innerException)
begin
yield sec
rescue innerException => e
raise if klass
raise Error, e.message, e.backtrace
ensure
@onEnsure.call if @onEnsure
request.defuse!
end
end
# }}}1
end
* More comments.
# Copyright (C) 2013, 2014 by Dmitry Maksyoma <ledestin@gmail.com>
require 'monitor'
require 'thread'
require 'timeout'
require 'frugal_timeout/support'
#--
# {{{1 Rdoc
#++
# Timeout.timeout() replacement using only 1 thread
# = Example
#
# require 'frugal_timeout'
#
# begin
# FrugalTimeout.timeout(0.1) { sleep }
# rescue Timeout::Error
# puts 'it works!'
# end
#
# # Ensure that calling timeout() will use FrugalTimeout.timeout().
# FrugalTimeout.dropin!
#
# # Rescue frugal-specific exception if needed.
# begin
# timeout(0.1) { sleep }
# rescue FrugalTimeout::Error
# puts 'yay!'
# end
#--
# }}}1
module FrugalTimeout
# {{{1 Error
class Error < Timeout::Error #:nodoc:
end
# {{{1 Request
# Timeout request, holding expiry time, what exception to raise and in which
# thread. It is active by default, but can be defused. If it's defused, then
# timeout won't be enforced when #enforceTimeout is called.
class Request #:nodoc:
include Comparable
@@mutex = Mutex.new
attr_reader :at, :klass, :thread
def initialize thread, at, klass
@thread, @at, @klass = thread, at, klass
@defused = false
end
def <=>(other)
@at <=> other.at
end
# Timeout won't be enforced if you defuse a request.
def defuse!
@@mutex.synchronize { @defused = true }
end
def defused?
@@mutex.synchronize { @defused }
end
def enforceTimeout
@@mutex.synchronize {
return if @defused
@thread.raise @klass, 'execution expired'
@defused = true
true
}
end
end
# {{{1 RequestQueue
# Contains requests to be processed. Calls @onNewNearestRequest when another
# request becomes the first in line. Calls @onEnforce when expired requests
# are removed and enforced.
#
# #queue adds requests.
# #handleExpiry removes and enforces requests.
class RequestQueue #:nodoc:
include Hookable
include MonitorMixin
def initialize
super
def_hook_synced :onEnforce, :onNewNearestRequest
@requests, @threadIdx = SortedQueue.new, Storage.new
@requests.onAdd { |r| @threadIdx.set r.thread, r }
@requests.onRemove { |r| @threadIdx.delete r.thread, r }
end
def handleExpiry
synchronize {
purgeAndEnforceExpired
sendNearestActive
}
end
def size
synchronize { @requests.size }
end
def queue sec, klass
request = Request.new(Thread.current, MonotonicTime.now + sec, klass)
synchronize {
@requests << request
@onNewNearestRequest.call(request) if @requests.first == request
}
request
end
private
# Defuses requests belonging to the passed thread.
def defuseForThread! thread
return unless request = @threadIdx[thread]
if request.respond_to? :each
request.each { |r| r.defuse! }
else
request.defuse!
end
end
def purgeAndEnforceExpired
@onEnforce.call
now = MonotonicTime.now
@requests.reject_until_mismatch! { |r|
if r.at <= now
r.enforceTimeout && defuseForThread!(r.thread)
true
end
}
end
def sendNearestActive
@requests.reject_until_mismatch! { |r| r.defused? }
@onNewNearestRequest.call @requests.first unless @requests.empty?
end
end
# {{{1 SleeperNotifier
# Executes callback when a request expires.
# 1. Set callback to execute with #onExpiry=.
# 2. Set expiry time with #expireAt.
# 3. After the expiry time comes, execute the callback.
#
# It's possible to set a new expiry time before the time set previously
# expires. In this case, processing of the old request stops and the new
# request processing starts.
class SleeperNotifier #:nodoc:
include Hookable
include MonitorMixin
def initialize
super()
def_hook_synced :onExpiry
@condVar, @expireAt = new_cond, nil
@thread = Thread.new {
loop {
synchronize { @onExpiry }.call if synchronize {
# Sleep forever until a request comes in.
unless @expireAt
wait
next
end
timeLeft = calcTimeLeft
disposeOfRequest
elapsedTime = MonotonicTime.measure { wait timeLeft }
elapsedTime >= timeLeft
}
}
}
ObjectSpace.define_finalizer self, proc { @thread.kill }
end
def expireAt time
synchronize {
@expireAt = time
signalThread
}
end
private
def calcTimeLeft
synchronize {
delay = @expireAt - MonotonicTime.now
delay < 0 ? 0 : delay
}
end
def disposeOfRequest
@expireAt = nil
end
def signalThread
@condVar.signal
end
def wait sec=nil
@condVar.wait sec
end
end
# {{{1 Main code
@requestQueue = RequestQueue.new
sleeper = SleeperNotifier.new
@requestQueue.onNewNearestRequest { |request|
sleeper.expireAt request.at
}
sleeper.onExpiry { @requestQueue.handleExpiry }
# {{{2 Methods
# Ensure that calling timeout() will use FrugalTimeout.timeout()
def self.dropin!
Object.class_eval \
'def timeout t, klass=nil, &b
FrugalTimeout.timeout t, klass, &b
end'
end
def self.on_enforce &b #:nodoc:
@requestQueue.onEnforce &b
end
def self.on_ensure &b #:nodoc:
@onEnsure = b
end
# Same as Timeout.timeout()
def self.timeout sec, klass=nil
return yield sec if sec.nil? || sec <= 0
innerException = klass || Class.new(Timeout::ExitException)
request = @requestQueue.queue(sec, innerException)
begin
yield sec
rescue innerException => e
raise if klass
raise Error, e.message, e.backtrace
ensure
@onEnsure.call if @onEnsure
request.defuse!
end
end
# }}}1
end
|
SpeakerinnenListe::Application.configure do
# Settings specified here will take precedence over those in config/application.rb.
# Code is not reloaded between requests.
config.cache_classes = true
# Eager load code on boot. This eager loads most of Rails and
# your application in memory, allowing both thread web servers
# and those relying on copy on write to perform better.
# Rake tasks automatically ignore this option for performance.
config.eager_load = true
# Full error reports are disabled and caching is turned on.
config.consider_all_requests_local = false
config.action_controller.perform_caching = true
# Enable Rack::Cache to put a simple HTTP cache in front of your application
# Add `rack-cache` to your Gemfile before enabling this.
# For large-scale production use, consider using a caching reverse proxy like nginx, varnish or squid.
# config.action_dispatch.rack_cache = true
# Disable Rails's static asset server (Apache or nginx will already do this).
#config.serve_static_assets = true
# Compress JavaScripts and CSS.
config.assets.js_compressor = :uglifier
# config.assets.css_compressor = :sass
# Do not fallback to assets pipeline if a precompiled asset is missed.
config.assets.compile = false
# Generate digests for assets URLs.
config.assets.digest = true
# Version of your assets, change this if you want to expire all your assets.
config.assets.version = '1.0'
# Specifies the header that your server uses for sending files.
# config.action_dispatch.x_sendfile_header = "X-Sendfile" # for apache
# config.action_dispatch.x_sendfile_header = 'X-Accel-Redirect' # for nginx
# Force all access to the app over SSL, use Strict-Transport-Security, and use secure cookies.
# config.force_ssl = true
# Set to :debug to see everything in the log.
config.log_level = :info
# Prepend all log lines with the following tags.
# config.log_tags = [ :subdomain, :uuid ]
# Use a different logger for distributed setups.
# config.logger = ActiveSupport::TaggedLogging.new(SyslogLogger.new)
# Use a different cache store in production.
# config.cache_store = :mem_cache_store
# Enable serving of images, stylesheets, and JavaScripts from an asset server.
# config.action_controller.asset_host = "http://assets.example.com"
# Precompile additional assets.
# application.js, application.css, and all non-JS/CSS in app/assets folder are already added.
# config.assets.precompile += %w( search.js )
# Ignore bad email addresses and do not raise email delivery errors.
# Set this to true and configure the email server for immediate delivery to raise delivery errors.
# config.action_mailer.raise_delivery_errors = false
# Enable locale fallbacks for I18n (makes lookups for any locale fall back to
# the I18n.default_locale when a translation can not be found).
config.i18n.fallbacks = true
# Send deprecation notices to registered listeners.
config.active_support.deprecation = :notify
config.action_mailer.default_url_options = { host: 'speakerinnen.org'}
config.action_mailer.delivery_method = :smtp
config.action_mailer.perform_deliveries = true
config.action_mailer.raise_delivery_errors = false
config.action_mailer.default charset: "utf-8"
config.action_mailer.smtp_settings = {
port: '587',
address: 'smtp.mandrillapp.com',
user_name: ENV['MANDRILL_USERNAME'],
password: ENV['MANDRILL_APIKEY'],
domain: 'heroku.com',
authentication: :plain
}
# piwik data collection and analytics
config.gem 'rack-piwik', lib: 'rack/piwik'
config.middleware.use Rack::Piwik, piwik_url: 'speaki.hadar.uberspace.de/piwik', piwik_id: '1', disable_cookies: false
# Disable automatic flushing of the log to improve performance.
# config.autoflush_log = false
# Use default logging formatter so that PID and timestamp are not suppressed.
config.log_formatter = ::Logger::Formatter.new
end
server static assets true to load the fonts in rails4
SpeakerinnenListe::Application.configure do
# Settings specified here will take precedence over those in config/application.rb.
# Code is not reloaded between requests.
config.cache_classes = true
# Eager load code on boot. This eager loads most of Rails and
# your application in memory, allowing both thread web servers
# and those relying on copy on write to perform better.
# Rake tasks automatically ignore this option for performance.
config.eager_load = true
# Full error reports are disabled and caching is turned on.
config.consider_all_requests_local = false
config.action_controller.perform_caching = true
# Enable Rack::Cache to put a simple HTTP cache in front of your application
# Add `rack-cache` to your Gemfile before enabling this.
# For large-scale production use, consider using a caching reverse proxy like nginx, varnish or squid.
# config.action_dispatch.rack_cache = true
# Disable Rails's static asset server (Apache or nginx will already do this).
config.serve_static_assets = true
# Compress JavaScripts and CSS.
config.assets.js_compressor = :uglifier
# config.assets.css_compressor = :sass
# Do not fallback to assets pipeline if a precompiled asset is missed.
config.assets.compile = false
# Generate digests for assets URLs.
config.assets.digest = true
# Version of your assets, change this if you want to expire all your assets.
config.assets.version = '1.0'
# Specifies the header that your server uses for sending files.
# config.action_dispatch.x_sendfile_header = "X-Sendfile" # for apache
# config.action_dispatch.x_sendfile_header = 'X-Accel-Redirect' # for nginx
# Force all access to the app over SSL, use Strict-Transport-Security, and use secure cookies.
# config.force_ssl = true
# Set to :debug to see everything in the log.
config.log_level = :info
# Prepend all log lines with the following tags.
# config.log_tags = [ :subdomain, :uuid ]
# Use a different logger for distributed setups.
# config.logger = ActiveSupport::TaggedLogging.new(SyslogLogger.new)
# Use a different cache store in production.
# config.cache_store = :mem_cache_store
# Enable serving of images, stylesheets, and JavaScripts from an asset server.
# config.action_controller.asset_host = "http://assets.example.com"
# Precompile additional assets.
# application.js, application.css, and all non-JS/CSS in app/assets folder are already added.
# config.assets.precompile += %w( search.js )
# Ignore bad email addresses and do not raise email delivery errors.
# Set this to true and configure the email server for immediate delivery to raise delivery errors.
# config.action_mailer.raise_delivery_errors = false
# Enable locale fallbacks for I18n (makes lookups for any locale fall back to
# the I18n.default_locale when a translation can not be found).
config.i18n.fallbacks = true
# Send deprecation notices to registered listeners.
config.active_support.deprecation = :notify
config.action_mailer.default_url_options = { host: 'speakerinnen.org'}
config.action_mailer.delivery_method = :smtp
config.action_mailer.perform_deliveries = true
config.action_mailer.raise_delivery_errors = false
config.action_mailer.default charset: "utf-8"
config.action_mailer.smtp_settings = {
port: '587',
address: 'smtp.mandrillapp.com',
user_name: ENV['MANDRILL_USERNAME'],
password: ENV['MANDRILL_APIKEY'],
domain: 'heroku.com',
authentication: :plain
}
# piwik data collection and analytics
config.gem 'rack-piwik', lib: 'rack/piwik'
config.middleware.use Rack::Piwik, piwik_url: 'speaki.hadar.uberspace.de/piwik', piwik_id: '1', disable_cookies: false
# Disable automatic flushing of the log to improve performance.
# config.autoflush_log = false
# Use default logging formatter so that PID and timestamp are not suppressed.
config.log_formatter = ::Logger::Formatter.new
end
|
require 'formula'
require 'keg'
require 'bottles'
module Homebrew
def cleanup
# individual cleanup_ methods should also check for the existence of the
# appropriate directories before assuming they exist
return unless HOMEBREW_CELLAR.directory?
if ARGV.named.empty?
cleanup_cellar
cleanup_cache
cleanup_logs
unless ARGV.dry_run?
cleanup_lockfiles
rm_DS_Store
end
else
ARGV.formulae.each { |f| cleanup_formula(f) }
end
end
def cleanup_logs
return unless HOMEBREW_LOGS.directory?
time = Time.now - 2 * 7 * 24 * 60 * 60 # two weeks
HOMEBREW_LOGS.subdirs.each do |dir|
if dir.mtime < time
if ARGV.dry_run?
puts "Would remove: #{dir}"
else
puts "Removing: #{dir}..."
dir.rmtree
end
end
end
end
def cleanup_cellar
HOMEBREW_CELLAR.subdirs.each do |rack|
begin
cleanup_formula Formulary.factory(rack.basename.to_s)
rescue FormulaUnavailableError
# Don't complain about directories from DIY installs
end
end
end
def cleanup_formula f
if f.installed?
eligible_kegs = f.rack.subdirs.map { |d| Keg.new(d) }.select { |k| f.pkg_version > k.version }
eligible_kegs.each do |keg|
if f.can_cleanup?
cleanup_keg(keg)
else
opoo "Skipping (old) keg-only: #{keg}"
end
end
elsif f.rack.subdirs.length > 1
# If the cellar only has one version installed, don't complain
# that we can't tell which one to keep.
opoo "Skipping #{f.name}: most recent version #{f.version} not installed"
end
end
def cleanup_keg keg
if keg.linked?
opoo "Skipping (old) #{keg} due to it being linked"
elsif ARGV.dry_run?
puts "Would remove: #{keg}"
else
puts "Removing: #{keg}..."
keg.uninstall
end
end
def cleanup_cache
return unless HOMEBREW_CACHE.directory?
HOMEBREW_CACHE.children.select(&:file?).each do |file|
next unless (version = file.version)
next unless (name = file.basename.to_s[/(.*)-(?:#{Regexp.escape(version)})/, 1])
begin
f = Formulary.factory(name)
rescue FormulaUnavailableError
next
end
if f.version > version || ARGV.switch?('s') && !f.installed? || bottle_file_outdated?(f, file)
cleanup_cached_file(file)
end
end
end
def cleanup_cached_file file
if ARGV.dry_run?
puts "Would remove: #{file}"
else
puts "Removing: #{file}..."
file.unlink
end
end
def cleanup_lockfiles
return unless HOMEBREW_CACHE_FORMULA.directory?
candidates = HOMEBREW_CACHE_FORMULA.children
lockfiles = candidates.select { |f| f.file? && f.extname == '.brewing' }
lockfiles.select(&:readable?).each do |file|
file.open.flock(File::LOCK_EX | File::LOCK_NB) and file.unlink
end
end
def rm_DS_Store
quiet_system "find", HOMEBREW_PREFIX.to_s, "-name", ".DS_Store", "-delete"
end
end
class Formula
def can_cleanup?
# It used to be the case that keg-only kegs could not be cleaned up, because
# older brews were built against the full path to the keg-only keg. Then we
# introduced the opt symlink, and built against that instead. So provided
# no brew exists that was built against an old-style keg-only keg, we can
# remove it.
if not keg_only? or ARGV.force?
true
elsif opt_prefix.directory?
# SHA records were added to INSTALL_RECEIPTS the same day as opt symlinks
!Formula.installed.
select{ |ff| ff.deps.map{ |d| d.to_s }.include? name }.
map{ |ff| ff.rack.subdirs rescue [] }.
flatten.
map{ |keg_path| Tab.for_keg(keg_path).HEAD }.
include? nil
end
end
end
cleanup: optimize perfermance
Only remove .DS_Store files from whitelisted directories.
Closes Homebrew/homebrew#37409.
Signed-off-by: Xu Cheng <9a05244150b861e40c78843b801eed71bca54eac@me.com>
require 'formula'
require 'keg'
require 'bottles'
module Homebrew
def cleanup
# individual cleanup_ methods should also check for the existence of the
# appropriate directories before assuming they exist
return unless HOMEBREW_CELLAR.directory?
if ARGV.named.empty?
cleanup_cellar
cleanup_cache
cleanup_logs
unless ARGV.dry_run?
cleanup_lockfiles
rm_DS_Store
end
else
ARGV.formulae.each { |f| cleanup_formula(f) }
end
end
def cleanup_logs
return unless HOMEBREW_LOGS.directory?
time = Time.now - 2 * 7 * 24 * 60 * 60 # two weeks
HOMEBREW_LOGS.subdirs.each do |dir|
if dir.mtime < time
if ARGV.dry_run?
puts "Would remove: #{dir}"
else
puts "Removing: #{dir}..."
dir.rmtree
end
end
end
end
def cleanup_cellar
HOMEBREW_CELLAR.subdirs.each do |rack|
begin
cleanup_formula Formulary.factory(rack.basename.to_s)
rescue FormulaUnavailableError
# Don't complain about directories from DIY installs
end
end
end
def cleanup_formula f
if f.installed?
eligible_kegs = f.rack.subdirs.map { |d| Keg.new(d) }.select { |k| f.pkg_version > k.version }
eligible_kegs.each do |keg|
if f.can_cleanup?
cleanup_keg(keg)
else
opoo "Skipping (old) keg-only: #{keg}"
end
end
elsif f.rack.subdirs.length > 1
# If the cellar only has one version installed, don't complain
# that we can't tell which one to keep.
opoo "Skipping #{f.name}: most recent version #{f.version} not installed"
end
end
def cleanup_keg keg
if keg.linked?
opoo "Skipping (old) #{keg} due to it being linked"
elsif ARGV.dry_run?
puts "Would remove: #{keg}"
else
puts "Removing: #{keg}..."
keg.uninstall
end
end
def cleanup_cache
return unless HOMEBREW_CACHE.directory?
HOMEBREW_CACHE.children.select(&:file?).each do |file|
next unless (version = file.version)
next unless (name = file.basename.to_s[/(.*)-(?:#{Regexp.escape(version)})/, 1])
begin
f = Formulary.factory(name)
rescue FormulaUnavailableError
next
end
if f.version > version || ARGV.switch?('s') && !f.installed? || bottle_file_outdated?(f, file)
cleanup_cached_file(file)
end
end
end
def cleanup_cached_file file
if ARGV.dry_run?
puts "Would remove: #{file}"
else
puts "Removing: #{file}..."
file.unlink
end
end
def cleanup_lockfiles
return unless HOMEBREW_CACHE_FORMULA.directory?
candidates = HOMEBREW_CACHE_FORMULA.children
lockfiles = candidates.select { |f| f.file? && f.extname == '.brewing' }
lockfiles.select(&:readable?).each do |file|
file.open.flock(File::LOCK_EX | File::LOCK_NB) and file.unlink
end
end
def rm_DS_Store
paths = %w[Cellar Frameworks Library bin etc include lib opt sbin share var].
map { |p| HOMEBREW_PREFIX/p }.select(&:exist?)
args = paths.map(&:to_s) + %w[-name .DS_Store -delete]
quiet_system "find", *args
end
end
class Formula
def can_cleanup?
# It used to be the case that keg-only kegs could not be cleaned up, because
# older brews were built against the full path to the keg-only keg. Then we
# introduced the opt symlink, and built against that instead. So provided
# no brew exists that was built against an old-style keg-only keg, we can
# remove it.
if not keg_only? or ARGV.force?
true
elsif opt_prefix.directory?
# SHA records were added to INSTALL_RECEIPTS the same day as opt symlinks
!Formula.installed.
select{ |ff| ff.deps.map{ |d| d.to_s }.include? name }.
map{ |ff| ff.rack.subdirs rescue [] }.
flatten.
map{ |keg_path| Tab.for_keg(keg_path).HEAD }.
include? nil
end
end
end
|
require File.expand_path('../../../spec_helper', __FILE__)
describe Portal::GradesController do
def mock_grade(stubs={})
# @mock_grade ||= mock_model(Portal::Grade, stubs)
allow(@mock_grade).to receive(stubs) unless stubs.empty?
@mock_grade
end
before(:each) do
generate_default_settings_and_jnlps_with_mocks
generate_portal_resources_with_mocks
login_admin
end
describe "GET index" do
it "assigns all portal_grades as @portal_grades" do
allow(Portal::Grade).to receive(:all).and_return([mock_grade])
get :index
expect(assigns[:portal_grades]).to eq([mock_grade])
end
end
describe "GET show" do
it "assigns the requested grade as @portal_grade" do
allow(Portal::Grade).to receive(:find).with("37").and_return(mock_grade)
get :show, :id => "37"
expect(assigns[:portal_grade]).to equal(mock_grade)
end
end
describe "GET new" do
it "assigns a new grade as @portal_grade" do
allow(Portal::Grade).to receive(:new).and_return(mock_grade)
get :new
expect(assigns[:portal_grade]).to equal(mock_grade)
end
end
describe "GET edit" do
it "assigns the requested grade as @portal_grade" do
allow(Portal::Grade).to receive(:find).with("37").and_return(mock_grade)
get :edit, :id => "37"
expect(assigns[:portal_grade]).to equal(mock_grade)
end
end
describe "POST create" do
describe "with valid params" do
it "assigns a newly created grade as @portal_grade" do
allow(Portal::Grade).to receive(:new).with({'these' => 'params'}).and_return(mock_grade(:save => true))
post :create, :portal_grade => {:these => 'params'}
expect(assigns[:portal_grade]).to equal(mock_grade)
end
it "redirects to the created grade" do
allow(Portal::Grade).to receive(:new).and_return(mock_grade(:save => true))
post :create, :portal_grade => {}
expect(response).to redirect_to(portal_grade_url(mock_grade))
end
end
describe "with invalid params" do
it "assigns a newly created but unsaved grade as @portal_grade" do
allow(Portal::Grade).to receive(:new).with({'these' => 'params'}).and_return(mock_grade(:save => false))
post :create, :portal_grade => {:these => 'params'}
expect(assigns[:portal_grade]).to equal(mock_grade)
end
it "re-renders the 'new' template" do
allow(Portal::Grade).to receive(:new).and_return(mock_grade(:save => false))
post :create, :portal_grade => {}
expect(response).to render_template('new')
end
end
end
describe "PUT update" do
describe "with valid params" do
it "updates the requested grade" do
expect(Portal::Grade).to receive(:find).with("37").and_return(mock_grade)
expect(mock_grade).to receive(:update_attributes).with({'these' => 'params'})
put :update, :id => "37", :portal_grade => {:these => 'params'}
end
it "assigns the requested grade as @portal_grade" do
allow(Portal::Grade).to receive(:find).and_return(mock_grade(:update_attributes => true))
put :update, :id => "1"
expect(assigns[:portal_grade]).to equal(mock_grade)
end
it "redirects to the grade" do
allow(Portal::Grade).to receive(:find).and_return(mock_grade(:update_attributes => true))
put :update, :id => "1"
expect(response).to redirect_to(portal_grade_url(mock_grade))
end
end
describe "with invalid params" do
it "updates the requested grade" do
expect(Portal::Grade).to receive(:find).with("37").and_return(mock_grade)
expect(mock_grade).to receive(:update_attributes).with({'these' => 'params'})
put :update, :id => "37", :portal_grade => {:these => 'params'}
end
it "assigns the grade as @portal_grade" do
allow(Portal::Grade).to receive(:find).and_return(mock_grade(:update_attributes => false))
put :update, :id => "1"
expect(assigns[:portal_grade]).to equal(mock_grade)
end
it "re-renders the 'edit' template" do
allow(Portal::Grade).to receive(:find).and_return(mock_grade(:update_attributes => false))
put :update, :id => "1"
expect(response).to render_template('edit')
end
end
end
describe "DELETE destroy" do
it "destroys the requested grade" do
expect(Portal::Grade).to receive(:find).with("37").and_return(mock_grade)
expect(mock_grade).to receive(:destroy)
delete :destroy, :id => "37"
end
it "redirects to the portal_grades list" do
allow(Portal::Grade).to receive(:find).and_return(mock_grade(:destroy => true))
delete :destroy, :id => "1"
expect(response).to redirect_to(portal_grades_url)
end
end
end
Update mock_grade method to accommodate updated rspec syntax
require File.expand_path('../../../spec_helper', __FILE__)
describe Portal::GradesController do
def mock_grade(stubs={})
stubs.each do |key, value|
allow(@mock_grade).to receive(key).and_return(value)
end
@mock_grade
end
before(:each) do
generate_default_settings_and_jnlps_with_mocks
generate_portal_resources_with_mocks
login_admin
end
describe "GET index" do
it "assigns all portal_grades as @portal_grades" do
allow(Portal::Grade).to receive(:all).and_return([mock_grade])
get :index
expect(assigns[:portal_grades]).to eq([mock_grade])
end
end
describe "GET show" do
it "assigns the requested grade as @portal_grade" do
allow(Portal::Grade).to receive(:find).with("37").and_return(mock_grade)
get :show, :id => "37"
expect(assigns[:portal_grade]).to equal(mock_grade)
end
end
describe "GET new" do
it "assigns a new grade as @portal_grade" do
allow(Portal::Grade).to receive(:new).and_return(mock_grade)
get :new
expect(assigns[:portal_grade]).to equal(mock_grade)
end
end
describe "GET edit" do
it "assigns the requested grade as @portal_grade" do
allow(Portal::Grade).to receive(:find).with("37").and_return(mock_grade)
get :edit, :id => "37"
expect(assigns[:portal_grade]).to equal(mock_grade)
end
end
describe "POST create" do
describe "with valid params" do
it "assigns a newly created grade as @portal_grade" do
allow(Portal::Grade).to receive(:new).with({'these' => 'params'}).and_return(mock_grade(:save => true))
post :create, :portal_grade => {:these => 'params'}
expect(assigns[:portal_grade]).to equal(mock_grade)
end
it "redirects to the created grade" do
allow(Portal::Grade).to receive(:new).and_return(mock_grade(:save => true))
post :create, :portal_grade => {}
expect(response).to redirect_to(portal_grade_url(mock_grade))
end
end
describe "with invalid params" do
it "assigns a newly created but unsaved grade as @portal_grade" do
allow(Portal::Grade).to receive(:new).with({'these' => 'params'}).and_return(mock_grade(:save => false))
post :create, :portal_grade => {:these => 'params'}
expect(assigns[:portal_grade]).to equal(mock_grade)
end
it "re-renders the 'new' template" do
allow(Portal::Grade).to receive(:new).and_return(mock_grade(:save => false))
post :create, :portal_grade => {}
expect(response).to render_template('new')
end
end
end
describe "PUT update" do
describe "with valid params" do
it "updates the requested grade" do
expect(Portal::Grade).to receive(:find).with("37").and_return(mock_grade)
expect(mock_grade).to receive(:update_attributes).with({'these' => 'params'})
put :update, :id => "37", :portal_grade => {:these => 'params'}
end
it "assigns the requested grade as @portal_grade" do
allow(Portal::Grade).to receive(:find).and_return(mock_grade(:update_attributes => true))
put :update, :id => "1"
expect(assigns[:portal_grade]).to equal(mock_grade)
end
it "redirects to the grade" do
allow(Portal::Grade).to receive(:find).and_return(mock_grade(:update_attributes => true))
put :update, :id => "1"
expect(response).to redirect_to(portal_grade_url(mock_grade))
end
end
describe "with invalid params" do
it "updates the requested grade" do
expect(Portal::Grade).to receive(:find).with("37").and_return(mock_grade)
expect(mock_grade).to receive(:update_attributes).with({'these' => 'params'})
put :update, :id => "37", :portal_grade => {:these => 'params'}
end
it "assigns the grade as @portal_grade" do
allow(Portal::Grade).to receive(:find).and_return(mock_grade(:update_attributes => false))
put :update, :id => "1"
expect(assigns[:portal_grade]).to equal(mock_grade)
end
it "re-renders the 'edit' template" do
allow(Portal::Grade).to receive(:find).and_return(mock_grade(:update_attributes => false))
put :update, :id => "1"
expect(response).to render_template('edit')
end
end
end
describe "DELETE destroy" do
it "destroys the requested grade" do
expect(Portal::Grade).to receive(:find).with("37").and_return(mock_grade)
expect(mock_grade).to receive(:destroy)
delete :destroy, :id => "37"
end
it "redirects to the portal_grades list" do
allow(Portal::Grade).to receive(:find).and_return(mock_grade(:destroy => true))
delete :destroy, :id => "1"
expect(response).to redirect_to(portal_grades_url)
end
end
end
|
OpenGovernment::Application.configure do
# Settings specified here will take precedence over those in config/application.rb
# Code is not reloaded between requests
config.cache_classes = true
# Full error reports are disabled and caching is turned on
config.consider_all_requests_local = false
config.action_controller.perform_caching = true
# Disable Rails's static asset server (Apache or nginx will already do this)
config.serve_static_assets = true
# Compress JavaScripts and CSS
config.assets.compress = true
# Don't fallback to assets pipeline if a precompiled asset is missed
config.assets.compile = false
# Generate digests for assets URLs
config.assets.digest = true
# Defaults to nil and saved in location specified by config.assets.prefix
# config.assets.manifest = YOUR_PATH
# Specifies the header that your server uses for sending files
# config.action_dispatch.x_sendfile_header = "X-Sendfile" # for apache
# config.action_dispatch.x_sendfile_header = 'X-Accel-Redirect' # for nginx
# Force all access to the app over SSL, use Strict-Transport-Security, and use secure cookies.
# config.force_ssl = true
# See everything in the log (default is :info)
# config.log_level = :debug
# Prepend all log lines with the following tags
# config.log_tags = [ :subdomain, :uuid ]
# Use a different logger for distributed setups
# config.logger = ActiveSupport::TaggedLogging.new(SyslogLogger.new)
# Use a different cache store in production
# config.cache_store = :mem_cache_store
# Enable serving of images, stylesheets, and JavaScripts from an asset server
# config.action_controller.asset_host = "http://assets.example.com"
# Precompile additional assets (application.js, application.css, and all non-JS/CSS are already added)
# config.assets.precompile += %w( search.js )
config.assets.precompile += %w(modernizr-2.5.3.min.js)
# Disable delivery errors, bad email addresses will be ignored
# config.action_mailer.raise_delivery_errors = false
config.action_mailer.default_url_options = {host: 'oglocal.herokuapp.com'}
# Enable threaded mode
# config.threadsafe!
# Enable locale fallbacks for I18n (makes lookups for any locale fall back to
# the I18n.default_locale when a translation can not be found)
config.i18n.fallbacks = true
# Send deprecation notices to registered listeners
config.active_support.deprecation = :notify
if ENV['MAILTRAP_HOST'].present?
ActionMailer::Base.delivery_method = :smtp
ActionMailer::Base.smtp_settings = {
:user_name => ENV['MAILTRAP_USER_NAME'],
:password => ENV['MAILTRAP_PASSWORD'],
:address => ENV['MAILTRAP_HOST'],
:port => ENV['MAILTRAP_PORT'],
:authentication => :plain
}
end
end
Drop mailtrap, switch to preview.askthem.io host
opengovernment/ops#5
OpenGovernment::Application.configure do
# Settings specified here will take precedence over those in config/application.rb
# Code is not reloaded between requests
config.cache_classes = true
# Full error reports are disabled and caching is turned on
config.consider_all_requests_local = false
config.action_controller.perform_caching = true
# Disable Rails's static asset server (Apache or nginx will already do this)
config.serve_static_assets = true
# Compress JavaScripts and CSS
config.assets.compress = true
# Don't fallback to assets pipeline if a precompiled asset is missed
config.assets.compile = false
# Generate digests for assets URLs
config.assets.digest = true
# Defaults to nil and saved in location specified by config.assets.prefix
# config.assets.manifest = YOUR_PATH
# Specifies the header that your server uses for sending files
# config.action_dispatch.x_sendfile_header = "X-Sendfile" # for apache
# config.action_dispatch.x_sendfile_header = 'X-Accel-Redirect' # for nginx
# Force all access to the app over SSL, use Strict-Transport-Security, and use secure cookies.
# config.force_ssl = true
# See everything in the log (default is :info)
# config.log_level = :debug
# Prepend all log lines with the following tags
# config.log_tags = [ :subdomain, :uuid ]
# Use a different logger for distributed setups
# config.logger = ActiveSupport::TaggedLogging.new(SyslogLogger.new)
# Use a different cache store in production
# config.cache_store = :mem_cache_store
# Enable serving of images, stylesheets, and JavaScripts from an asset server
# config.action_controller.asset_host = "http://assets.example.com"
# Precompile additional assets (application.js, application.css, and all non-JS/CSS are already added)
# config.assets.precompile += %w( search.js )
config.assets.precompile += %w(modernizr-2.5.3.min.js)
# Disable delivery errors, bad email addresses will be ignored
# config.action_mailer.raise_delivery_errors = false
config.action_mailer.default_url_options = {host: 'preview.askthem.io'}
# Enable threaded mode
# config.threadsafe!
# Enable locale fallbacks for I18n (makes lookups for any locale fall back to
# the I18n.default_locale when a translation can not be found)
config.i18n.fallbacks = true
# Send deprecation notices to registered listeners
config.active_support.deprecation = :notify
end
|
require 'formula'
require 'keg'
require 'bottles'
require 'cmd/prune'
module Homebrew extend self
def cleanup
if ARGV.named.empty?
if HOMEBREW_CELLAR.directory?
HOMEBREW_CELLAR.children.each do |rack|
begin
cleanup_formula rack.basename.to_s if rack.directory?
rescue FormulaUnavailableError => e
# Don't complain about Cellar folders that are from DIY installs
# instead of core formulae.
end
end
end
clean_cache
# seems like a good time to do some additional cleanup
unless ARGV.dry_run?
Homebrew.prune
rm_DS_Store
end
else
ARGV.formulae.each do |f|
cleanup_formula f
end
end
end
def cleanup_formula f
f = Formula.factory f
if f.installed? and f.rack.directory?
f.rack.children.each do |keg|
if File.directory? keg and f.version > Keg.new(keg).version
if f.can_cleanup?
if ARGV.dry_run?
puts "Would remove: #{keg}"
else
puts "Removing: #{keg}..."
rm_rf keg
end
else
opoo "Skipping (old) keg-only: #{keg}"
end
end
end
elsif f.rack.children.length > 1
# If the cellar only has one version installed, don't complain
# that we can't tell which one to keep.
opoo "Skipping #{f.name}: most recent version #{f.version} not installed"
end
end
def clean_cache
return unless HOMEBREW_CACHE.directory?
HOMEBREW_CACHE.children.each do |pn|
next unless pn.file?
version = pn.version
name = pn.basename.to_s.match(/(.*)-(#{version})/).captures.first rescue nil
if name and version
f = Formula.factory(name) rescue nil
old_bottle = bottle_file_outdated? f, pn
if (f and f.version > version) or (ARGV.switch? "s" and (f and (not f.installed?))) or old_bottle
if ARGV.dry_run?
puts "Would remove: #{pn}"
else
puts "Removing: #{pn}..."
rm pn
end
end
end
end
end
def rm_DS_Store
system "find #{HOMEBREW_PREFIX} -name .DS_Store -delete"
end
end
class Formula
def can_cleanup?
# It used to be the case that keg-only kegs could not be cleaned up, because
# older brews were built against the full path to the keg-only keg. Then we
# introduced the opt symlink, and built against that instead. So provided
# no brew exists that was built against an old-style keg-only keg, we can
# remove it.
if not keg_only? or ARGV.force?
true
elsif opt_prefix.directory?
# SHA records were added to INSTALL_RECEIPTS the same day as opt symlinks
!Formula.installed.
select{ |ff| ff.deps.map{ |d| d.to_s }.include? name }.
map{ |ff| ff.rack.children rescue [] }.
flatten.
map{ |keg_path| Tab.for_keg(keg_path).send("HEAD") }.
include? nil
end
end
end
Don't remove linked formulas when doing a 'cleanup'
Closes #16947.
Signed-off-by: Adam Vandenberg <34c2b6407fd5a10249a15d699d40f9ed1782e98c@gmail.com>
require 'formula'
require 'keg'
require 'bottles'
require 'cmd/prune'
module Homebrew extend self
def cleanup
if ARGV.named.empty?
if HOMEBREW_CELLAR.directory?
HOMEBREW_CELLAR.children.each do |rack|
begin
cleanup_formula rack.basename.to_s if rack.directory?
rescue FormulaUnavailableError => e
# Don't complain about Cellar folders that are from DIY installs
# instead of core formulae.
end
end
end
clean_cache
# seems like a good time to do some additional cleanup
unless ARGV.dry_run?
Homebrew.prune
rm_DS_Store
end
else
ARGV.formulae.each do |f|
cleanup_formula f
end
end
end
def cleanup_formula f
f = Formula.factory f
if f.installed? and f.rack.directory?
f.rack.children.each do |keg|
if File.directory? keg and f.version > Keg.new(keg).version
if f.can_cleanup?
if !Keg.new(keg).linked?
if ARGV.dry_run?
puts "Would remove: #{keg}"
else
puts "Removing: #{keg}..."
rm_rf keg
end
else
opoo "Skipping (old) #{keg} due to it being linked"
end
else
opoo "Skipping (old) keg-only: #{keg}"
end
end
end
elsif f.rack.children.length > 1
# If the cellar only has one version installed, don't complain
# that we can't tell which one to keep.
opoo "Skipping #{f.name}: most recent version #{f.version} not installed"
end
end
def clean_cache
return unless HOMEBREW_CACHE.directory?
HOMEBREW_CACHE.children.each do |pn|
next unless pn.file?
version = pn.version
name = pn.basename.to_s.match(/(.*)-(#{version})/).captures.first rescue nil
if name and version
f = Formula.factory(name) rescue nil
old_bottle = bottle_file_outdated? f, pn
if (f and f.version > version) or (ARGV.switch? "s" and (f and (not f.installed?))) or old_bottle
if ARGV.dry_run?
puts "Would remove: #{pn}"
else
puts "Removing: #{pn}..."
rm pn
end
end
end
end
end
def rm_DS_Store
system "find #{HOMEBREW_PREFIX} -name .DS_Store -delete"
end
end
class Formula
def can_cleanup?
# It used to be the case that keg-only kegs could not be cleaned up, because
# older brews were built against the full path to the keg-only keg. Then we
# introduced the opt symlink, and built against that instead. So provided
# no brew exists that was built against an old-style keg-only keg, we can
# remove it.
if not keg_only? or ARGV.force?
true
elsif opt_prefix.directory?
# SHA records were added to INSTALL_RECEIPTS the same day as opt symlinks
!Formula.installed.
select{ |ff| ff.deps.map{ |d| d.to_s }.include? name }.
map{ |ff| ff.rack.children rescue [] }.
flatten.
map{ |keg_path| Tab.for_keg(keg_path).send("HEAD") }.
include? nil
end
end
end
|
require 'music-utils'
describe MusicUtils do
include MusicUtils::Scales
context "Intervals" do
context "By number" do
context "Diatonic from DO" do
it "the number of simple interval do-re should be a 2th" do
MusicUtils.number(DO, RE).should == 2
end
it "the number of do-re compound interval should be a 9th" do
MusicUtils.number(DO, RE, 1).should == 9
end
it "the number of simple interval do-mi should be a 3rd" do
MusicUtils.number(DO, MI).should == 3
end
it "the number of do-mi compound interval should be a 10th" do
MusicUtils.number(DO, MI, 1).should == 10
end
it "the number of simple interval do-fa should be a 4th" do
MusicUtils.number(DO, FA).should == 4
end
it "the number of do-fa compound interval should be a 11th" do
MusicUtils.number(DO, FA, 1).should == 11
end
it "the number of simple interval do-sol should be a 5th" do
MusicUtils.number(DO, SOL).should == 5
end
it "the number of do-sol compound interval should be a 12th" do
MusicUtils.number(DO, SOL, 1).should == 12
end
it "the number of simple interval do-la should be a 6th" do
MusicUtils.number(DO, LA).should == 6
end
it "the number of do-la compound interval should be a 13th" do
MusicUtils.number(DO, LA, 1).should == 13
end
it "the number of simple interval do-si should be a 7th" do
MusicUtils.number(DO, SI).should == 7
end
it "the number of do-si compound interval should be a 14th" do
MusicUtils.number(DO, SI, 1).should == 14
end
it "the number of simple interval do-do should be a 8th" do
MusicUtils.number(DO, DO).should == 8
end
it "the number of do-do compound interval should be a 15th" do
MusicUtils.number(DO, DO, 1).should == 15
end
end
end
context "By semitones" do
context "Diatonic from DO" do
it "simple interval do-re should be 2 semitones" do
MusicUtils.semitones(DO, RE).should == 2
end
it "compound interval do-re should be 14 semitones" do
MusicUtils.semitones(DO, RE, 1).should == 14
end
it "simple interval do-mi should be 4 semitones" do
MusicUtils.semitones(DO, MI).should == 4
end
it "compound interval do-mi should be 16 semitones" do
MusicUtils.semitones(DO, MI, 1).should == 16
end
it "simple interval do-fa should be 5 semitones" do
MusicUtils.semitones(DO, FA).should == 5
end
it "compound interval do-fa should be 17 semitones" do
MusicUtils.semitones(DO, FA, 1).should == 17
end
it "simple interval do-sol should be 7 semitones" do
MusicUtils.semitones(DO, SOL).should == 7
end
it "compound interval do-sol should be 17 semitones" do
MusicUtils.semitones(DO, SOL, 1).should == 19
end
it "simple interval do-la should be 9 semitones" do
MusicUtils.semitones(DO, LA).should == 9
end
it "compound interval do-la should be 21 semitones" do
MusicUtils.semitones(DO, LA, 1).should == 21
end
it "simple interval do-si should be 11 semitones" do
MusicUtils.semitones(DO, SI).should == 11
end
it "compound interval do-si should be 21 semitones" do
MusicUtils.semitones(DO, SI, 1).should == 23
end
it "compound interval do-do should be 21 semitones" do
MusicUtils.semitones(DO, DO).should == 12
end
end
context "Diatonic from others" do
it "simple interval so-la should be 2 semitones" do
MusicUtils.semitones(SOL, LA).should == 2
end
it "simple interval re-do should be 10 semitones" do
MusicUtils.semitones(RE, DO).should == 10
end
it "simple interval fa-re should be 9 semitones" do
MusicUtils.semitones(FA, RE).should == 9
end
it "simple interval si-mi should be 5 semitones" do
MusicUtils.semitones(SI, MI).should == 5
end
it "simple interval mi-si should be 7 semitones" do
MusicUtils.semitones(MI, SI).should == 7
end
it "simple interval fa-do should be 7 semitones" do
MusicUtils.semitones(FA, DO).should == 7
end
end
end
context "By quality" do
context "Diatonic from DO" do
it "quality of interval do-re should be M" do
MusicUtils.quality(DO, RE).should == 'M'
end
it "quality of compound interval do-re should be M" do
MusicUtils.quality(DO, RE, 1).should == 'M'
end
it "quality of interval do-mi should be M" do
MusicUtils.quality(DO, MI).should == 'M'
end
it "quality of compound interval do-mi should be M" do
MusicUtils.quality(DO, MI, 1).should == 'M'
end
it "quality of interval do-fa should be P" do
MusicUtils.quality(DO, FA).should == 'P'
end
it "quality of compound interval do-fa should be P" do
MusicUtils.quality(DO, FA, 1).should == 'P'
end
it "quality of interval do-sol should be P" do
MusicUtils.quality(DO, SOL).should == 'P'
end
it "quality of compound interval do-sol should be P" do
MusicUtils.quality(DO, SOL, 1).should == 'P'
end
it "quality of interval do-la should be m" do
MusicUtils.quality(DO, LA).should == 'm'
end
it "quality of compound interval do-la should be m" do
MusicUtils.quality(DO, LA, 1).should == 'm'
end
it "quality of interval do-si should be M" do
MusicUtils.quality(DO, SI).should == 'M'
end
it "quality of compound interval do-si should be M" do
MusicUtils.quality(DO, SI, 1).should == 'M'
end
end
end
context "By short notation" do
it "the short notation of do#-mi interval should be m3" do
MusicUtils.short(DOS, MI).should == 'm3'
end
it "the short notation of do#-mi compound interval should be m10" do
MusicUtils.short(DOS, MI, 1).should == 'm10'
end
end
context "Calculate 2nd note of an interval from sol" do
# From sol
it "sol m2" do
MusicUtils.second_note(:sol, :m2).should == :laf
end
it "sol M2" do
MusicUtils.second_note(:sol, :M2).should == :la
end
it "sol m3" do
MusicUtils.second_note(:sol, :m3).should == :sif
end
it "sol M3" do
MusicUtils.second_note(:sol, :M3).should == :si
end
it "sol d4" do
MusicUtils.second_note(:sol, :d4).should == :dof
end
it "sol P4" do
MusicUtils.second_note(:sol, :P4).should == :do
end
it "sol A4" do
MusicUtils.second_note(:sol, :A4).should == :dos
end
it "sol AA4" do
MusicUtils.second_note(:sol, :AA4).should == :doss
end
it "sol dd5" do
MusicUtils.second_note(:sol, :dd5).should == :reff
end
it "sol d5" do
MusicUtils.second_note(:sol, :d5).should == :ref
end
it "sol P5" do
MusicUtils.second_note(:sol, :P5).should == :re
end
it "sol A5" do
MusicUtils.second_note(:sol, :A5).should == :res
end
it "sol AA5" do
MusicUtils.second_note(:sol, :AA5).should == :ress
end
it "sol AA5" do
MusicUtils.second_note(:sol, :AA5).should == :ress
end
it "sol dd6" do
MusicUtils.second_note(:sol, :dd6).should == :miff
end
it "sol d6" do
MusicUtils.second_note(:sol, :d6).should == :mif
end
it "sol m6" do
MusicUtils.second_note(:sol, :m6).should == :mi
end
it "sol M6" do
MusicUtils.second_note(:sol, :M6).should == :mis
end
it "sol A6" do
MusicUtils.second_note(:sol, :A6).should == :miss
end
it "sol dd7" do
MusicUtils.second_note(:sol, :dd7).should == :faff
end
it "sol d7" do
MusicUtils.second_note(:sol, :d7).should == :faf
end
it "sol m7" do
MusicUtils.second_note(:sol, :m7).should == :fa
end
it "sol M7" do
MusicUtils.second_note(:sol, :M7).should == :fas
end
it "sol A7" do
MusicUtils.second_note(:sol, :A7).should == :fass
end
end
end
context "Scales" do
context "Major scale" do
context "Natural notes" do
it "the major scale of DO should be [DO, RE, MI, FA, SOL, LA, SI, DO]" do
MusicUtils.scale(DO, MAJ_SCALE).should == [DO, RE, MI, FA, SOL, LA, SI]
end
it "the major scale of RE should be [RE, MI, FAS, SOL, LA, SI, DOS, RE]" do
MusicUtils.scale(RE, MAJ_SCALE).should == [RE, MI, FAS, SOL, LA, SI, DOS]
end
it "the major scale of MI should be [MI, FAS, SOLS, LA, SI, DOS, RES, MI]" do
MusicUtils.scale(MI, MAJ_SCALE).should == [MI, FAS, SOLS, LA, SI, DOS, RES]
end
it "the major scale of FA should be [FA, SOL, LA, SIF, DO, RE, MI, FA]" do
MusicUtils.scale(FA, MAJ_SCALE).should == [FA, SOL, LA, SIF, DO, RE, MI]
end
it "the major scale of SOL should be [SOL, LA, SI, DO, RE, MI, FAS, SOL]" do
MusicUtils.scale(SOL, MAJ_SCALE).should == [SOL, LA, SI, DO, RE, MI, FAS]
end
it "the major scale of LA should be [LA, SI, DOS, RE, MI, FAS, SOLS, LA]" do
MusicUtils.scale(LA, MAJ_SCALE).should == [LA, SI, DOS, RE, MI, FAS, SOLS]
end
it "the major scale of SI should be [SI, DOS, RES, MI, FAS, SOLS, LAS, SI]" do
MusicUtils.scale(SI, MAJ_SCALE).should == [SI, DOS, RES, MI, FAS, SOLS, LAS]
end
end
context "Sharped notes" do
it "the major scale of DO# should be [DOS, RES, MIS, FAS, SOLS, LAS, SIS]" do
MusicUtils.scale(DOS, MAJ_SCALE).should == [DOS, RES, MIS, FAS, SOLS, LAS, SIS]
end
it "the major scale of RE# should be [RES, MIS, FASS, SOLS, LAS, SIS, DOSS]" do
MusicUtils.scale(RES, MAJ_SCALE).should == [RES, MIS, FASS, SOLS, LAS, SIS, DOSS]
end
it "the major scale of MI# should be [MIS, FASS, SOLSS, LAS, SIS, DOSS, RESS]" do
MusicUtils.scale(MIS, MAJ_SCALE).should == [MIS, FASS, SOLSS, LAS, SIS, DOSS, RESS]
end
it "the major scale of FA# should be [FAS, SOLS, LAS, SI, DOS, RES, MISS]" do
MusicUtils.scale(FAS, MAJ_SCALE).should == [FAS, SOLS, LAS, SI, DOS, RES, MIS]
end
it "the major scale of SOL# should be [SOLS, LAS, SIS, DOS, RES, MIS, FASS]" do
MusicUtils.scale(SOLS, MAJ_SCALE).should == [SOLS, LAS, SIS, DOS, RES, MIS, FASS]
end
it "the major scale of LA# should be [SOLS, LAS, SIS, DOS, RES, MIS, FASS]" do
MusicUtils.scale(LAS, MAJ_SCALE).should == [LAS, SIS, DOSS, RES, MIS, FASS, SOLSS]
end
it "the major scale of SI# should be [SIS, DOSS, RESS, MIS, FASS, SOLSS, LASS]" do
MusicUtils.scale(SIS, MAJ_SCALE).should == [SIS, DOSS, RESS, MIS, FASS, SOLSS, LASS]
end
end
context "Flated notes" do
it "the major scale of DOb should be [DOF, REF, MIF, FAF, SOLF, LAF, SIF]" do
MusicUtils.scale(DOF, MAJ_SCALE).should == [DOF, REF, MIF, FAF, SOLF, LAF, SIF]
end
it "the major scale of REb should be [REF, MIF, FA, SOLF, LAF, SIF, DO]" do
MusicUtils.scale(REF, MAJ_SCALE).should == [REF, MIF, FA, SOLF, LAF, SIF, DO]
end
it "the major scale of MIb should be [MIF, FA, SOL, LAF, SIF, DO, RE]" do
MusicUtils.scale(MIF, MAJ_SCALE).should == [MIF, FA, SOL, LAF, SIF, DO, RE]
end
it "the major scale of FAb should be [FAF, SOLF, LAF, SIFF, DOF, REF, MIF]" do
MusicUtils.scale(FAF, MAJ_SCALE).should == [FAF, SOLF, LAF, SIFF, DOF, REF, MIF]
end
it "the major scale of SOLb should be [SOLF, LAF, SIF, DOF, REF, MIF, FA]" do
MusicUtils.scale(SOLF, MAJ_SCALE).should == [SOLF, LAF, SIF, DOF, REF, MIF, FA]
end
it "the major scale of LAb should be [LAF, SIF, DO, REF, MIF, FA, SOL]" do
MusicUtils.scale(LAF, MAJ_SCALE).should == [LAF, SIF, DO, REF, MIF, FA, SOL]
end
it "the major scale of SIb should be [SIF, DO, RE, MIF, FA, SOL, LA]" do
MusicUtils.scale(SIF, MAJ_SCALE).should == [SIF, DO, RE, MIF, FA, SOL, LA]
end
end
context "Natural Minor scale" do
# [2, 1, 2, 2, 1, 2]
context "Natural notes" do
it "the natural minor scale of DO should be [DO, RE, MIF, FA, SOL, LAF, SIF]" do
MusicUtils.scale(DO, NATURAL_MIN_SCALE).should == [DO, RE, MIF, FA, SOL, LAF, SIF]
end
it "the natural minor scale of RE should be [RE, MI, FA, SOL, LA, SIF, DO]" do
MusicUtils.scale(RE, NATURAL_MIN_SCALE).should == [RE, MI, FA, SOL, LA, SIF, DO]
end
it "the natural minor scale of MI should be [MI, FAS, SOL, LA, SI, DO, RE]" do
MusicUtils.scale(MI, NATURAL_MIN_SCALE).should == [MI, FAS, SOL, LA, SI, DO, RE]
end
it "the natural minor scale of FA should be [FA, SOL, LAF, SIF, DO, REF, MIF]" do
MusicUtils.scale(FA, NATURAL_MIN_SCALE).should == [FA, SOL, LAF, SIF, DO, REF, MIF]
end
it "the natural minor scale of SOL should be [SOL, LA, SIF, DO, RE, MIF, FA]" do
MusicUtils.scale(SOL, NATURAL_MIN_SCALE).should == [SOL, LA, SIF, DO, RE, MIF, FA]
end
it "the natural minor scale of LA should be [LA, SI, DO, RE, MI, FA, SOL]" do
MusicUtils.scale(LA, NATURAL_MIN_SCALE).should == [LA, SI, DO, RE, MI, FA, SOL]
end
it "the natural minor scale of SI should be [SI, DOS, RES, MI, FAS, SOLS, LAS, SI]" do
MusicUtils.scale(SI, NATURAL_MIN_SCALE).should == [SI, DOS, RE, MI, FAS, SOL, LA]
end
end
# [2, 1, 2, 2, 1, 2]
context "Sharped notes" do
it "the natural minor scale of DO# should be [DO, RE, MIF, FA, SOL, LAF, SIF]" do
MusicUtils.scale(DOS, NATURAL_MIN_SCALE).should == [DOS, RES, MI, FAS, SOLS, LA, SI]
end
it "the natural minor scale of RE# should be [RE, MI, FA, SOL, LA, SIF, DO]" do
MusicUtils.scale(RES, NATURAL_MIN_SCALE).should == [RES, MIS, FAS, SOLS, LAS, SI, DOS]
end
it "the natural minor scale of MI# should be [MI, FAS, SOL, LA, SI, DO, RE]" do
MusicUtils.scale(MIS, NATURAL_MIN_SCALE).should == [MIS, FASS, SOLS, LAS, SIS, DOS, RES]
end
it "the natural minor scale of FA# should be [FA, SOL, LAF, SIF, DO, REF, MIF]" do
MusicUtils.scale(FAS, NATURAL_MIN_SCALE).should == [FAS, SOLS, LA, SI, DOS, RE, MI]
end
it "the natural minor scale of SOL# should be [SOL, LA, SIF, DO, RE, MIF, FA]" do
MusicUtils.scale(SOLS, NATURAL_MIN_SCALE).should == [SOLS, LAS, SI, DOS, RES, MI, FAS]
end
it "the natural minor scale of LA# should be [LA, SI, DO, RE, MI, FA, SOL]" do
MusicUtils.scale(LAS, NATURAL_MIN_SCALE).should == [LAS, SIS, DOS, RES, MIS, FAS, SOLS]
end
it "the natural minor scale of SI# should be [SI, DOS, RES, MI, FAS, SOLS, LAS, SI]" do
MusicUtils.scale(SIS, NATURAL_MIN_SCALE).should == [SIS, DOSS, RES, MIS, FASS, SOLS, LAS]
end
end
# [2, 1, 2, 2, 1, 2]
context "Flated notes" do
it "the natural minor scale of DOb should be [DOF, REF, MIFF, FAF, SOLF, LAFF, SIFF]" do
MusicUtils.scale(DOF, NATURAL_MIN_SCALE).should == [DOF, REF, MIFF, FAF, SOLF, LAFF, SIFF]
end
it "the natural minor scale of REb should be [REF, MIF, FAF, SOLF, LAF, SIFF, DOF]" do
MusicUtils.scale(REF, NATURAL_MIN_SCALE).should == [REF, MIF, FAF, SOLF, LAF, SIFF, DOF]
end
it "the natural minor scale of MIb should be [MIF, FA, SOLF, LAF, SIF, DOF, REF]" do
MusicUtils.scale(MIF, NATURAL_MIN_SCALE).should == [MIF, FA, SOLF, LAF, SIF, DOF, REF]
end
it "the natural minor scale of FAb should be [FAF, SOLF, LAFF, SIFF, DOF, REFF, MIFF]" do
MusicUtils.scale(FAF, NATURAL_MIN_SCALE).should == [FAF, SOLF, LAFF, SIFF, DOF, REFF, MIFF]
end
it "the natural minor scale of SOLb should be [SOLF, LAF, SIFF, DOF, REF, MIFF, FAF]" do
MusicUtils.scale(SOLF, NATURAL_MIN_SCALE).should == [SOLF, LAF, SIFF, DOF, REF, MIFF, FAF]
end
it "the natural minor scale of LAb should be [LAF, SIF, DOF, REF, MIF, FAF, SOLF]" do
MusicUtils.scale(LAF, NATURAL_MIN_SCALE).should == [LAF, SIF, DOF, REF, MIF, FAF, SOLF]
end
it "the natural minor scale of SIb should be [SIF, DO, REF, MIF, FA, SOLF, LAF]" do
MusicUtils.scale(SIF, NATURAL_MIN_SCALE).should == [SIF, DO, REF, MIF, FA, SOLF, LAF]
end
end
end
end
end
end
Added examples to test high_note method
require 'music-utils'
describe MusicUtils do
include MusicUtils::Scales
context "Intervals" do
context "By number" do
context "Diatonic from DO" do
it "the number of simple interval do-re should be a 2th" do
MusicUtils.number(DO, RE).should == 2
end
it "the number of do-re compound interval should be a 9th" do
MusicUtils.number(DO, RE, 1).should == 9
end
it "the number of simple interval do-mi should be a 3rd" do
MusicUtils.number(DO, MI).should == 3
end
it "the number of do-mi compound interval should be a 10th" do
MusicUtils.number(DO, MI, 1).should == 10
end
it "the number of simple interval do-fa should be a 4th" do
MusicUtils.number(DO, FA).should == 4
end
it "the number of do-fa compound interval should be a 11th" do
MusicUtils.number(DO, FA, 1).should == 11
end
it "the number of simple interval do-sol should be a 5th" do
MusicUtils.number(DO, SOL).should == 5
end
it "the number of do-sol compound interval should be a 12th" do
MusicUtils.number(DO, SOL, 1).should == 12
end
it "the number of simple interval do-la should be a 6th" do
MusicUtils.number(DO, LA).should == 6
end
it "the number of do-la compound interval should be a 13th" do
MusicUtils.number(DO, LA, 1).should == 13
end
it "the number of simple interval do-si should be a 7th" do
MusicUtils.number(DO, SI).should == 7
end
it "the number of do-si compound interval should be a 14th" do
MusicUtils.number(DO, SI, 1).should == 14
end
it "the number of simple interval do-do should be a 8th" do
MusicUtils.number(DO, DO).should == 8
end
it "the number of do-do compound interval should be a 15th" do
MusicUtils.number(DO, DO, 1).should == 15
end
end
end
context "By semitones" do
context "Diatonic from DO" do
it "simple interval do-re should be 2 semitones" do
MusicUtils.semitones(DO, RE).should == 2
end
it "compound interval do-re should be 14 semitones" do
MusicUtils.semitones(DO, RE, 1).should == 14
end
it "simple interval do-mi should be 4 semitones" do
MusicUtils.semitones(DO, MI).should == 4
end
it "compound interval do-mi should be 16 semitones" do
MusicUtils.semitones(DO, MI, 1).should == 16
end
it "simple interval do-fa should be 5 semitones" do
MusicUtils.semitones(DO, FA).should == 5
end
it "compound interval do-fa should be 17 semitones" do
MusicUtils.semitones(DO, FA, 1).should == 17
end
it "simple interval do-sol should be 7 semitones" do
MusicUtils.semitones(DO, SOL).should == 7
end
it "compound interval do-sol should be 17 semitones" do
MusicUtils.semitones(DO, SOL, 1).should == 19
end
it "simple interval do-la should be 9 semitones" do
MusicUtils.semitones(DO, LA).should == 9
end
it "compound interval do-la should be 21 semitones" do
MusicUtils.semitones(DO, LA, 1).should == 21
end
it "simple interval do-si should be 11 semitones" do
MusicUtils.semitones(DO, SI).should == 11
end
it "compound interval do-si should be 21 semitones" do
MusicUtils.semitones(DO, SI, 1).should == 23
end
it "compound interval do-do should be 21 semitones" do
MusicUtils.semitones(DO, DO).should == 12
end
end
context "Diatonic from others" do
it "simple interval so-la should be 2 semitones" do
MusicUtils.semitones(SOL, LA).should == 2
end
it "simple interval re-do should be 10 semitones" do
MusicUtils.semitones(RE, DO).should == 10
end
it "simple interval fa-re should be 9 semitones" do
MusicUtils.semitones(FA, RE).should == 9
end
it "simple interval si-mi should be 5 semitones" do
MusicUtils.semitones(SI, MI).should == 5
end
it "simple interval mi-si should be 7 semitones" do
MusicUtils.semitones(MI, SI).should == 7
end
it "simple interval fa-do should be 7 semitones" do
MusicUtils.semitones(FA, DO).should == 7
end
end
end
context "By quality" do
context "Diatonic from DO" do
it "quality of interval do-re should be M" do
MusicUtils.quality(DO, RE).should == 'M'
end
it "quality of compound interval do-re should be M" do
MusicUtils.quality(DO, RE, 1).should == 'M'
end
it "quality of interval do-mi should be M" do
MusicUtils.quality(DO, MI).should == 'M'
end
it "quality of compound interval do-mi should be M" do
MusicUtils.quality(DO, MI, 1).should == 'M'
end
it "quality of interval do-fa should be P" do
MusicUtils.quality(DO, FA).should == 'P'
end
it "quality of compound interval do-fa should be P" do
MusicUtils.quality(DO, FA, 1).should == 'P'
end
it "quality of interval do-sol should be P" do
MusicUtils.quality(DO, SOL).should == 'P'
end
it "quality of compound interval do-sol should be P" do
MusicUtils.quality(DO, SOL, 1).should == 'P'
end
it "quality of interval do-la should be m" do
MusicUtils.quality(DO, LA).should == 'm'
end
it "quality of compound interval do-la should be m" do
MusicUtils.quality(DO, LA, 1).should == 'm'
end
it "quality of interval do-si should be M" do
MusicUtils.quality(DO, SI).should == 'M'
end
it "quality of compound interval do-si should be M" do
MusicUtils.quality(DO, SI, 1).should == 'M'
end
end
end
context "By short notation" do
it "the short notation of do#-mi interval should be m3" do
MusicUtils.short(DOS, MI).should == 'm3'
end
it "the short notation of do#-mi compound interval should be m10" do
MusicUtils.short(DOS, MI, 1).should == 'm10'
end
end
context "Calculate higher note of intervals from sol" do
it "the higher note of m2 from sol should be laf" do
MusicUtils.high_note(:sol, :m2).should == :laf
end
it "the higher note of M2 from sol should be la" do
MusicUtils.high_note(:sol, :M2).should == :la
end
it "the higher note of m3 from sol should be sif" do
MusicUtils.high_note(:sol, :m3).should == :sif
end
it "the higher note of M3 from sol should be si" do
MusicUtils.high_note(:sol, :M3).should == :si
end
it "the higher note of d4 from sol should be dof" do
MusicUtils.high_note(:sol, :d4).should == :dof
end
it "the higher note of P4 from sol should be do" do
MusicUtils.high_note(:sol, :P4).should == :do
end
it "the higher note of A4 from sol should be dos" do
MusicUtils.high_note(:sol, :A4).should == :dos
end
it "the higher note of AA4 from sol should be doss" do
MusicUtils.high_note(:sol, :AA4).should == :doss
end
it "the higher note of dd5 from sol should be reff" do
MusicUtils.high_note(:sol, :dd5).should == :reff
end
it "the higher note of d5 from sol should be ref" do
MusicUtils.high_note(:sol, :d5).should == :ref
end
it "the higher note of P5 from sol should be re" do
MusicUtils.high_note(:sol, :P5).should == :re
end
it "the higher note of A5 from sol should be res" do
MusicUtils.high_note(:sol, :A5).should == :res
end
it "the higher note of AA5 from sol should be ress" do
MusicUtils.high_note(:sol, :AA5).should == :ress
end
it "the higher note of dd6 from sol should be miff" do
MusicUtils.high_note(:sol, :dd6).should == :miff
end
it "the higher note of d6 from sol should be mif" do
MusicUtils.high_note(:sol, :d6).should == :mif
end
it "the higher note of m6 from sol should be mi" do
MusicUtils.high_note(:sol, :m6).should == :mi
end
it "the higher note of M6 from sol should be mis" do
MusicUtils.high_note(:sol, :M6).should == :mis
end
it "the higher note of A6 from sol should be miss" do
MusicUtils.high_note(:sol, :A6).should == :miss
end
it "the higher note of dd7 from sol should be faff" do
MusicUtils.high_note(:sol, :dd7).should == :faff
end
it "the higher note of d7 from sol should be faf" do
MusicUtils.high_note(:sol, :d7).should == :faf
end
it "the higher note of m7 from sol should be fa" do
MusicUtils.high_note(:sol, :m7).should == :fa
end
it "the higher note of M7 from sol should be fas" do
MusicUtils.high_note(:sol, :M7).should == :fas
end
it "the higher note of A7 from sol should be fass" do
MusicUtils.high_note(:sol, :A7).should == :fass
end
end
context "Calculate higher note of intervals from fas" do
# From sol
it "the higher note of m2 from fas should be " do
MusicUtils.high_note(:fas, :m2).should == :sol
end
it "the higher note of M2 from fas should be " do
MusicUtils.high_note(:fas, :M2).should == :sols
end
it "the higher note of m3 from fas should be " do
MusicUtils.high_note(:fas, :m3).should == :la
end
it "the higher note of M3 from fas should be " do
MusicUtils.high_note(:fas, :M3).should == :las
end
it "the higher note of d4 from fas should be " do
MusicUtils.high_note(:fas, :d4).should == :sif
end
it "the higher note of P4 from fas should be " do
MusicUtils.high_note(:fas, :P4).should == :si
end
it "the higher note of A4 from fas should be " do
MusicUtils.high_note(:fas, :A4).should == :sis
end
it "the higher note of AA4 from fas should be " do
MusicUtils.high_note(:fas, :AA4).should == :siss
end
it "the higher note of dd5 from fas should be " do
MusicUtils.high_note(:fas, :dd5).should == :dof
end
it "the higher note of d5 from fas should be " do
MusicUtils.high_note(:fas, :d5).should == :do
end
it "the higher note of P5 from fas should be " do
MusicUtils.high_note(:fas, :P5).should == :dos
end
it "the higher note of A5 from fas should be " do
MusicUtils.high_note(:fas, :A5).should == :doss
end
it "the higher note of dd6 from fas should be " do
MusicUtils.high_note(:fas, :dd6).should == :ref
end
it "the higher note of d6 from fas should be " do
MusicUtils.high_note(:fas, :d6).should == :re
end
it "the higher note of m6 from fas should be " do
MusicUtils.high_note(:fas, :m6).should == :res
end
it "the higher note of M6 from fas should be " do
MusicUtils.high_note(:fas, :M6).should == :ress
end
it "the higher note of dd7 from fas should be " do
MusicUtils.high_note(:fas, :dd7).should == :miff
end
it "the higher note of d7 from fas should be " do
MusicUtils.high_note(:fas, :d7).should == :mif
end
it "the higher note of m7 from fas should be " do
MusicUtils.high_note(:fas, :m7).should == :mi
end
it "the higher note of M7 from fas should be " do
MusicUtils.high_note(:fas, :M7).should == :mis
end
it "the higher note of A7 from fas should be " do
# comments
MusicUtils.high_note(:fas, :A7).should == :miss
end
end
end
context "Scales" do
context "Major scale" do
context "Natural notes" do
it "the major scale of DO should be [DO, RE, MI, FA, SOL, LA, SI, DO]" do
MusicUtils.scale(DO, MAJ_SCALE).should == [DO, RE, MI, FA, SOL, LA, SI]
end
it "the major scale of RE should be [RE, MI, FAS, SOL, LA, SI, DOS, RE]" do
MusicUtils.scale(RE, MAJ_SCALE).should == [RE, MI, FAS, SOL, LA, SI, DOS]
end
it "the major scale of MI should be [MI, FAS, SOLS, LA, SI, DOS, RES, MI]" do
MusicUtils.scale(MI, MAJ_SCALE).should == [MI, FAS, SOLS, LA, SI, DOS, RES]
end
it "the major scale of FA should be [FA, SOL, LA, SIF, DO, RE, MI, FA]" do
MusicUtils.scale(FA, MAJ_SCALE).should == [FA, SOL, LA, SIF, DO, RE, MI]
end
it "the major scale of SOL should be [SOL, LA, SI, DO, RE, MI, FAS, SOL]" do
MusicUtils.scale(SOL, MAJ_SCALE).should == [SOL, LA, SI, DO, RE, MI, FAS]
end
it "the major scale of LA should be [LA, SI, DOS, RE, MI, FAS, SOLS, LA]" do
MusicUtils.scale(LA, MAJ_SCALE).should == [LA, SI, DOS, RE, MI, FAS, SOLS]
end
it "the major scale of SI should be [SI, DOS, RES, MI, FAS, SOLS, LAS, SI]" do
MusicUtils.scale(SI, MAJ_SCALE).should == [SI, DOS, RES, MI, FAS, SOLS, LAS]
end
end
context "Sharped notes" do
it "the major scale of DO# should be [DOS, RES, MIS, FAS, SOLS, LAS, SIS]" do
MusicUtils.scale(DOS, MAJ_SCALE).should == [DOS, RES, MIS, FAS, SOLS, LAS, SIS]
end
it "the major scale of RE# should be [RES, MIS, FASS, SOLS, LAS, SIS, DOSS]" do
MusicUtils.scale(RES, MAJ_SCALE).should == [RES, MIS, FASS, SOLS, LAS, SIS, DOSS]
end
it "the major scale of MI# should be [MIS, FASS, SOLSS, LAS, SIS, DOSS, RESS]" do
MusicUtils.scale(MIS, MAJ_SCALE).should == [MIS, FASS, SOLSS, LAS, SIS, DOSS, RESS]
end
it "the major scale of FA# should be [FAS, SOLS, LAS, SI, DOS, RES, MISS]" do
MusicUtils.scale(FAS, MAJ_SCALE).should == [FAS, SOLS, LAS, SI, DOS, RES, MIS]
end
it "the major scale of SOL# should be [SOLS, LAS, SIS, DOS, RES, MIS, FASS]" do
MusicUtils.scale(SOLS, MAJ_SCALE).should == [SOLS, LAS, SIS, DOS, RES, MIS, FASS]
end
it "the major scale of LA# should be [SOLS, LAS, SIS, DOS, RES, MIS, FASS]" do
MusicUtils.scale(LAS, MAJ_SCALE).should == [LAS, SIS, DOSS, RES, MIS, FASS, SOLSS]
end
it "the major scale of SI# should be [SIS, DOSS, RESS, MIS, FASS, SOLSS, LASS]" do
MusicUtils.scale(SIS, MAJ_SCALE).should == [SIS, DOSS, RESS, MIS, FASS, SOLSS, LASS]
end
end
context "Flated notes" do
it "the major scale of DOb should be [DOF, REF, MIF, FAF, SOLF, LAF, SIF]" do
MusicUtils.scale(DOF, MAJ_SCALE).should == [DOF, REF, MIF, FAF, SOLF, LAF, SIF]
end
it "the major scale of REb should be [REF, MIF, FA, SOLF, LAF, SIF, DO]" do
MusicUtils.scale(REF, MAJ_SCALE).should == [REF, MIF, FA, SOLF, LAF, SIF, DO]
end
it "the major scale of MIb should be [MIF, FA, SOL, LAF, SIF, DO, RE]" do
MusicUtils.scale(MIF, MAJ_SCALE).should == [MIF, FA, SOL, LAF, SIF, DO, RE]
end
it "the major scale of FAb should be [FAF, SOLF, LAF, SIFF, DOF, REF, MIF]" do
MusicUtils.scale(FAF, MAJ_SCALE).should == [FAF, SOLF, LAF, SIFF, DOF, REF, MIF]
end
it "the major scale of SOLb should be [SOLF, LAF, SIF, DOF, REF, MIF, FA]" do
MusicUtils.scale(SOLF, MAJ_SCALE).should == [SOLF, LAF, SIF, DOF, REF, MIF, FA]
end
it "the major scale of LAb should be [LAF, SIF, DO, REF, MIF, FA, SOL]" do
MusicUtils.scale(LAF, MAJ_SCALE).should == [LAF, SIF, DO, REF, MIF, FA, SOL]
end
it "the major scale of SIb should be [SIF, DO, RE, MIF, FA, SOL, LA]" do
MusicUtils.scale(SIF, MAJ_SCALE).should == [SIF, DO, RE, MIF, FA, SOL, LA]
end
end
context "Natural Minor scale" do
# [2, 1, 2, 2, 1, 2]
context "Natural notes" do
it "the natural minor scale of DO should be [DO, RE, MIF, FA, SOL, LAF, SIF]" do
MusicUtils.scale(DO, NATURAL_MIN_SCALE).should == [DO, RE, MIF, FA, SOL, LAF, SIF]
end
it "the natural minor scale of RE should be [RE, MI, FA, SOL, LA, SIF, DO]" do
MusicUtils.scale(RE, NATURAL_MIN_SCALE).should == [RE, MI, FA, SOL, LA, SIF, DO]
end
it "the natural minor scale of MI should be [MI, FAS, SOL, LA, SI, DO, RE]" do
MusicUtils.scale(MI, NATURAL_MIN_SCALE).should == [MI, FAS, SOL, LA, SI, DO, RE]
end
it "the natural minor scale of FA should be [FA, SOL, LAF, SIF, DO, REF, MIF]" do
MusicUtils.scale(FA, NATURAL_MIN_SCALE).should == [FA, SOL, LAF, SIF, DO, REF, MIF]
end
it "the natural minor scale of SOL should be [SOL, LA, SIF, DO, RE, MIF, FA]" do
MusicUtils.scale(SOL, NATURAL_MIN_SCALE).should == [SOL, LA, SIF, DO, RE, MIF, FA]
end
it "the natural minor scale of LA should be [LA, SI, DO, RE, MI, FA, SOL]" do
MusicUtils.scale(LA, NATURAL_MIN_SCALE).should == [LA, SI, DO, RE, MI, FA, SOL]
end
it "the natural minor scale of SI should be [SI, DOS, RES, MI, FAS, SOLS, LAS, SI]" do
MusicUtils.scale(SI, NATURAL_MIN_SCALE).should == [SI, DOS, RE, MI, FAS, SOL, LA]
end
end
# [2, 1, 2, 2, 1, 2]
context "Sharped notes" do
it "the natural minor scale of DO# should be [DO, RE, MIF, FA, SOL, LAF, SIF]" do
MusicUtils.scale(DOS, NATURAL_MIN_SCALE).should == [DOS, RES, MI, FAS, SOLS, LA, SI]
end
it "the natural minor scale of RE# should be [RE, MI, FA, SOL, LA, SIF, DO]" do
MusicUtils.scale(RES, NATURAL_MIN_SCALE).should == [RES, MIS, FAS, SOLS, LAS, SI, DOS]
end
it "the natural minor scale of MI# should be [MI, FAS, SOL, LA, SI, DO, RE]" do
MusicUtils.scale(MIS, NATURAL_MIN_SCALE).should == [MIS, FASS, SOLS, LAS, SIS, DOS, RES]
end
it "the natural minor scale of FA# should be [FA, SOL, LAF, SIF, DO, REF, MIF]" do
MusicUtils.scale(FAS, NATURAL_MIN_SCALE).should == [FAS, SOLS, LA, SI, DOS, RE, MI]
end
it "the natural minor scale of SOL# should be [SOL, LA, SIF, DO, RE, MIF, FA]" do
MusicUtils.scale(SOLS, NATURAL_MIN_SCALE).should == [SOLS, LAS, SI, DOS, RES, MI, FAS]
end
it "the natural minor scale of LA# should be [LA, SI, DO, RE, MI, FA, SOL]" do
MusicUtils.scale(LAS, NATURAL_MIN_SCALE).should == [LAS, SIS, DOS, RES, MIS, FAS, SOLS]
end
it "the natural minor scale of SI# should be [SI, DOS, RES, MI, FAS, SOLS, LAS, SI]" do
MusicUtils.scale(SIS, NATURAL_MIN_SCALE).should == [SIS, DOSS, RES, MIS, FASS, SOLS, LAS]
end
end
# [2, 1, 2, 2, 1, 2]
context "Flated notes" do
it "the natural minor scale of DOb should be [DOF, REF, MIFF, FAF, SOLF, LAFF, SIFF]" do
MusicUtils.scale(DOF, NATURAL_MIN_SCALE).should == [DOF, REF, MIFF, FAF, SOLF, LAFF, SIFF]
end
it "the natural minor scale of REb should be [REF, MIF, FAF, SOLF, LAF, SIFF, DOF]" do
MusicUtils.scale(REF, NATURAL_MIN_SCALE).should == [REF, MIF, FAF, SOLF, LAF, SIFF, DOF]
end
it "the natural minor scale of MIb should be [MIF, FA, SOLF, LAF, SIF, DOF, REF]" do
MusicUtils.scale(MIF, NATURAL_MIN_SCALE).should == [MIF, FA, SOLF, LAF, SIF, DOF, REF]
end
it "the natural minor scale of FAb should be [FAF, SOLF, LAFF, SIFF, DOF, REFF, MIFF]" do
MusicUtils.scale(FAF, NATURAL_MIN_SCALE).should == [FAF, SOLF, LAFF, SIFF, DOF, REFF, MIFF]
end
it "the natural minor scale of SOLb should be [SOLF, LAF, SIFF, DOF, REF, MIFF, FAF]" do
MusicUtils.scale(SOLF, NATURAL_MIN_SCALE).should == [SOLF, LAF, SIFF, DOF, REF, MIFF, FAF]
end
it "the natural minor scale of LAb should be [LAF, SIF, DOF, REF, MIF, FAF, SOLF]" do
MusicUtils.scale(LAF, NATURAL_MIN_SCALE).should == [LAF, SIF, DOF, REF, MIF, FAF, SOLF]
end
it "the natural minor scale of SIb should be [SIF, DO, REF, MIF, FA, SOLF, LAF]" do
MusicUtils.scale(SIF, NATURAL_MIN_SCALE).should == [SIF, DO, REF, MIF, FA, SOLF, LAF]
end
end
end
end
end
end
|
module GameMechanics
@@save_file = '../lib/save_game.json'
def prompt
print ">> "
end
def yes_no
#restrict input to valid answers, but don't worry about case
begin
puts "Please enter [yes] or [no]:"
prompt; @game_select = STDIN.gets.chomp.downcase
end while not (@game_select == "yes" or @game_select == "no")
end
def save_data
case
when (0..1000).include?(@player.xp)
@player.lvl = 1
when (1001..2500).include?(@player.xp)
@player.lvl = 2
when (2501..5000).include?(@player.xp)
@player.lvl = 3
when (5001..8000).include?(@player.xp)
@player.lvl = 4
when (8001..11500).include?(@player.xp)
@player.lvl = 5
when (11501..15000).include?(@player.xp)
@player.lvl = 6
end
save_info = {
role: @player.class,
cur_hp: @player.cur_hp,
cur_mana: @player.cur_mana,
xp: @player.xp,
lvl: @player.lvl,
coin: @player.coin,
name: @player.name
}
File.open(@@save_file, "w") do |f|
f.write(save_info.to_json)
end
end
def load_data
load_info = JSON.parse(File.read(@@save_file))
role = load_info['role']
if role == "Knight"
@player = Knight.new
elsif role == "Wizard"
@player = Wizard.new
end
# Set stats based off information in load_info
@player.lvl = load_info['lvl']
@player.xp = load_info['xp']
@player.coin = load_info['coin']
@player.name = load_info['name']
@player.cur_hp = load_info['cur_hp']
@player.cur_mana = load_info['cur_mana']
# Adjust stats based off player level
@player.hp = @player.hp*@player.lvl
@player.mana = @player.mana*@player.lvl
@player.dmg = @player.dmg*@player.lvl
@player
# I was trying to do the above assignments with iteration, there has to be a way!
# load_info.each do |attribute, value|
# @player.#{attribute} = value unless attribute == "role"
# end
end
def restore_player
@player.cur_hp = @player.hp
@player.cur_mana = @player.mana
save_data
end
def bar_top
"_"*27 + " STATS " + "_"*27
end
def stat_bar name, xp, lvl, coin, cur_hp, cur_mana
" Name: #{name} | XP: #{xp} | Lvl: #{lvl} | Coin: #{coin} | HP: #{cur_hp} | Mana: #{cur_mana}"
end
def bar_low
"-"*61
end
def player_croaks
puts # formatting
puts "It happens to the best of us #{@player.name}."
puts "Fortunately for you, the game of Destiny never ends."
puts "The game will exit now and you can restart in town."
puts # formatting
puts "Better luck next time, eh?"
exit
end
def combat bad_guy
# create an opponent
@bad_guy = bad_guy.new
# scale power of opponent to level of player
@bad_guy.cur_hp = @bad_guy.hp*@player.lvl
@bad_guy.cur_mana = @bad_guy.mana*@player.lvl
@bad_guy.dmg = @bad_guy.dmg*@player.lvl
puts @bad_guy.name + " says, you kill my father, now you will die!!" unless (@bad_guy.name == "ROUS" or @bad_guy.name == "Skeleton")
move = 0
until move == "2"
begin
puts # formatting
puts bar_low + "--"
puts " #{@player.name} - HP: #{@player.cur_hp} - Mana: #{@player.cur_mana} | - VS - | #{@bad_guy.name} - HP: #{@bad_guy.cur_hp} - Mana: #{@bad_guy.cur_mana}"
puts bar_low + "--"
puts # formatting
puts "#{@bad_guy.name} vs. #{@player.name}, what will you do?"
puts "[1]. Attack."
puts "[2]. Run."
prompt; move = gets.chomp
end while not (move == "1" or move == "2")
case
when move == "1"
puts # formatting
if @player.class.to_s == "Knight"
puts "#{@player.name} swings the mighty sword at the #{@bad_guy.name}."
puts # formatting
dmg_mod = (@player.str-10)/2 # knights use their str for damage mod
@dmg_dlt = dice(@player.dmg) + dmg_mod
elsif @player.class.to_s == "Wizard"
begin
puts "How many magic darts will you shoot?"
puts "[1]."
puts "[2]."
puts "[3]."
prompt; darts = gets.chomp.to_i
end while not (darts == 1 or darts == 2 or darts == 3)
puts # formatting
puts "#{@player.name} conjures #{darts} magic darts that zip toward the #{@bad_guy.name}."
dmg_mod = (@player.int-10)/2 # wizards use their int for damage mod
@dmg_dlt = dice(@player.dmg) + darts*@player.lvl + dmg_mod# more darts more damage, scales with level
@player.cur_mana = @player.cur_mana - darts*@player.lvl # more darts more mana spent, scales with level
end
miss_chance = dice(100)
agi_boost = (@bad_guy.agi-10)*2 + @bad_guy.dodge
if (1..agi_boost).include?(miss_chance)
puts @bad_guy.name + " jumps out of the way, avoiding being hit by " + @player.name + "!"
puts # formatting
else
@dmg_dlt = @dmg_dlt - @bad_guy.armor/4
@dmg_dlt = 0 if @dmg_dlt < 1
puts #formatting
puts "You deal #{@dmg_dlt} damage to the #{@bad_guy.name}." unless @dmg_dlt < 1
puts # formatting
@bad_guy.cur_hp = @bad_guy.cur_hp - @dmg_dlt
end
if @bad_guy.cur_hp <= 0
puts "You have slain the #{@bad_guy.name} and won the day!"
# rewards for winning the battle!
@player.xp = @player.xp + @bad_guy.xp
@player.coin = @player.coin + @bad_guy.coin
save_data
return
else
puts "#{@bad_guy.name} viciously attacks #{@player.name}!"
puts # formatting
miss_chance = dice(100)
agi_boost = (@player.agi-10)*2 + @player.dodge
if (1..agi_boost).include?(miss_chance)
puts @player.name + " totally leaps out of the way, avoiding being hit by " + @bad_guy.name + "!"
else
dmg_taken = dice(@bad_guy.dmg) - @player.armor/4
dmg_taken = 0 if dmg_taken < 1
@player.cur_hp = @player.cur_hp - dmg_taken
puts "#{@bad_guy.name} hits YOU for #{dmg_taken} damage!" unless dmg_taken < 1
puts "OUCH!" unless dmg_taken < 1
end
puts #formatting
end
if @player.cur_hp <= 0
puts "You were killed by the #{@bad_guy.name}."
puts "Killed dead."
player_croaks
end
when move == "2"
puts # formatting
puts "Sometimes the right thing to do is run."
puts "This is one of those times."
puts # formatting
puts "You shout what is that? and point frantically in the opposite direction."
puts "The #{@bad_guy.name} turns to look and you high tail it away!"
puts # formatting
run_away = dice(10)
case
when (1..8).include?(run_away)
# you got away this time
puts "You escape from the #{@bad_guy.name} while it foolishly looks away."
when (9..10).include?(run_away)
# not so lucky this time
puts "#{@bad_guy.name} says, do you think I was spawned yesterday?"
puts # formatting
puts "#{@bad_guy.name} viciously attacks #{@player.name}!"
puts # formatting
miss_chance = dice(100)
agi_boost = (@player.agi-10)*2 + @player.dodge
if (1..agi_boost).include?(miss_chance)
puts @player.name + " totally leaps out of the way, avoiding being hit by " + @bad_guy.name + "!"
else
dmg_taken = dice(@bad_guy.dmg) - @player.armor/4
dmg_taken = 0 if dmg_taken < 1
@player.cur_hp = @player.cur_hp - dmg_taken
puts "#{@bad_guy.name} hits YOU for #{dmg_taken} damage!" unless dmg_taken < 1
puts "OUCH!" unless dmg_taken < 1
end
puts #formatting
if @player.cur_hp <= 0
puts "You knew when to fold em, but the #{@bad_guy.name} got the better of you anyway."
player_croaks
end
puts "You manage to accidentally stick a boot firmly in the #{@bad_guy.name}'s face"
puts "allowing you to escape!"
puts # formatting
end
save_data
return
end
end
end
def dice(sides=6,&block)
if block_given?
block.call(rand(1..sides))
else
rand(1..sides)
end
end
def random_encounter
chance = dice(20)
case
when (1..2).include?(chance)
puts # formatting
puts "You get the feeling you are being watched..."
puts # formatting
when (3..4).include?(chance)
puts # formatting
puts "You notice a coin stuck in the dirt, pry it"
puts "loose, and place the coin in your wallet."
puts # formatting
puts "Today must be your lucky day, #{@player.name}!"
@player.xp = @player.xp + @player.lvl*100
@player.coin = @player.coin + @player.lvl*2
puts # formatting
when (5..8).include?(chance)
puts #format
puts "A small goblin springs from the shadows and attacks!!"
puts #format
combat(Goblin)
when (9..11).include?(chance)
puts #format
puts "You hear squeeking sounds. BIG squeeking sounds!"
puts #format
combat(GiantRat)
when (12..15).include?(chance)
puts #format
puts "A kobold peers out of a hole in the wall and then snarls."
puts #format
combat(Kobold)
when (16..18).include?(chance)
puts #format
puts "Although you have never heard bones scrape across a floor"
puts "before, you know without a doubt what approaches..."
puts #format
combat(Skeleton)
when (19..20).include?(chance)
puts # formatting
trip_event = dice(3)
trip_part = "knee" if trip_event == 1
trip_part = "elbow" if trip_event == 2
trip_part = "hands" if trip_event == 3
trip_damage = @player.lvl
puts "You stumble and scrape your #{trip_part}."
puts "You take #{trip_damage} damage."
puts # formatting
@player.cur_hp = @player.cur_hp - trip_damage
if @player.cur_hp <= 0
puts "You have tripped and died."
player_croaks
end
end
end
end
Fixed a bug with the wizard where it would let you cast as many darts as you wanted whether you had the mana or not. Now it checks and adjuts the menu accordingly. It will always let you fire one, no matter what though.
module GameMechanics
@@save_file = '../lib/save_game.json'
def prompt
print ">> "
end
def yes_no
#restrict input to valid answers, but don't worry about case
begin
puts "Please enter [yes] or [no]:"
prompt; @game_select = STDIN.gets.chomp.downcase
end while not (@game_select == "yes" or @game_select == "no")
end
def save_data
case
when (0..1000).include?(@player.xp)
@player.lvl = 1
when (1001..2500).include?(@player.xp)
@player.lvl = 2
when (2501..5000).include?(@player.xp)
@player.lvl = 3
when (5001..8000).include?(@player.xp)
@player.lvl = 4
when (8001..11500).include?(@player.xp)
@player.lvl = 5
when (11501..15000).include?(@player.xp)
@player.lvl = 6
end
save_info = {
role: @player.class,
cur_hp: @player.cur_hp,
cur_mana: @player.cur_mana,
xp: @player.xp,
lvl: @player.lvl,
coin: @player.coin,
name: @player.name
}
File.open(@@save_file, "w") do |f|
f.write(save_info.to_json)
end
end
def load_data
load_info = JSON.parse(File.read(@@save_file))
role = load_info['role']
if role == "Knight"
@player = Knight.new
elsif role == "Wizard"
@player = Wizard.new
end
# Set stats based off information in load_info
@player.lvl = load_info['lvl']
@player.xp = load_info['xp']
@player.coin = load_info['coin']
@player.name = load_info['name']
@player.cur_hp = load_info['cur_hp']
@player.cur_mana = load_info['cur_mana']
# Adjust stats based off player level
@player.hp = @player.hp*@player.lvl
@player.mana = @player.mana*@player.lvl
@player.dmg = @player.dmg*@player.lvl
@player
# I was trying to do the above assignments with iteration, there has to be a way!
# load_info.each do |attribute, value|
# @player.#{attribute} = value unless attribute == "role"
# end
end
def restore_player
@player.cur_hp = @player.hp
@player.cur_mana = @player.mana
save_data
end
def bar_top
"_"*27 + " STATS " + "_"*27
end
def stat_bar name, xp, lvl, coin, cur_hp, cur_mana
" Name: #{name} | XP: #{xp} | Lvl: #{lvl} | Coin: #{coin} | HP: #{cur_hp} | Mana: #{cur_mana}"
end
def bar_low
"-"*61
end
def player_croaks
puts # formatting
puts "It happens to the best of us #{@player.name}."
puts "Fortunately for you, the game of Destiny never ends."
puts "The game will exit now and you can restart in town."
puts # formatting
puts "Better luck next time, eh?"
exit
end
def combat bad_guy
# create an opponent
@bad_guy = bad_guy.new
# scale power of opponent to level of player
@bad_guy.cur_hp = @bad_guy.hp*@player.lvl
@bad_guy.cur_mana = @bad_guy.mana*@player.lvl
@bad_guy.dmg = @bad_guy.dmg*@player.lvl
puts @bad_guy.name + " says, you kill my father, now you will die!!" unless (@bad_guy.name == "Giant Rat" or @bad_guy.name == "Skeleton")
move = 0
until move == "2"
begin
puts # formatting
puts bar_low + "--"
puts " #{@player.name} - HP: #{@player.cur_hp} - Mana: #{@player.cur_mana} | - VS - | #{@bad_guy.name} - HP: #{@bad_guy.cur_hp} - Mana: #{@bad_guy.cur_mana}"
puts bar_low + "--"
puts # formatting
puts "#{@bad_guy.name} vs. #{@player.name}, what will you do?"
puts "[1]. Attack."
puts "[2]. Run."
prompt; move = gets.chomp
end while not (move == "1" or move == "2")
case
when move == "1"
puts # formatting
if @player.class.to_s == "Knight"
puts "#{@player.name} swings the mighty sword at the #{@bad_guy.name}."
puts # formatting
dmg_mod = (@player.str-10)/2 # knights use their str for damage mod
@dmg_dlt = dice(@player.dmg) + dmg_mod
elsif @player.class.to_s == "Wizard"
begin
puts "How many magic darts will you shoot?"
puts "[1]."
puts "[2]." if @player.cur_mana - 2*@player.lvl >= 0
puts "[3]." if @player.cur_mana - 3*@player.lvl >= 0
prompt; darts = gets.chomp.to_i
darts = 4 if darts == 2 and @player.cur_mana - 2*@player.lvl < 0
darts = 4 if darts == 3 and @player.cur_mana - 3*@player.lvl < 0
end while not (darts == 1 or darts == 2 or darts == 3)
puts # formatting
puts "#{@player.name} conjures #{darts} magic dart that zips toward the #{@bad_guy.name}." if darts == 1
puts "#{@player.name} conjures #{darts} magic darts that zip toward the #{@bad_guy.name}."
dmg_mod = (@player.int-10)/2 # wizards use their int for damage mod
@dmg_dlt = dice(@player.dmg) + darts*@player.lvl + dmg_mod# more darts more damage, scales with level
@player.cur_mana = @player.cur_mana - darts*@player.lvl # more darts more mana spent, scales with level
# prevent negative mana, but always allow wizard to shoot at least one dart, no matter what
@player.cur_mana = 0 if @player.cur_mana < 0
end
miss_chance = dice(100)
agi_boost = (@bad_guy.agi-10)*2 + @bad_guy.dodge
if (1..agi_boost).include?(miss_chance)
puts @bad_guy.name + " jumps out of the way, avoiding being hit by " + @player.name + "!"
puts # formatting
else
@dmg_dlt = @dmg_dlt - @bad_guy.armor/4
@dmg_dlt = 0 if @dmg_dlt < 1
puts #formatting
puts "You deal #{@dmg_dlt} damage to the #{@bad_guy.name}." unless @dmg_dlt < 1
puts # formatting
@bad_guy.cur_hp = @bad_guy.cur_hp - @dmg_dlt
end
if @bad_guy.cur_hp <= 0
puts "You have slain the #{@bad_guy.name} and won the day!"
# rewards for winning the battle!
@player.xp = @player.xp + @bad_guy.xp
@player.coin = @player.coin + @bad_guy.coin
save_data
return
else
puts "#{@bad_guy.name} viciously attacks #{@player.name}!"
puts # formatting
miss_chance = dice(100)
agi_boost = (@player.agi-10)*2 + @player.dodge
if (1..agi_boost).include?(miss_chance)
puts @player.name + " totally leaps out of the way, avoiding being hit by " + @bad_guy.name + "!"
else
dmg_taken = dice(@bad_guy.dmg) - @player.armor/4
dmg_taken = 0 if dmg_taken < 1
@player.cur_hp = @player.cur_hp - dmg_taken
puts "#{@bad_guy.name} hits YOU for #{dmg_taken} damage!" unless dmg_taken < 1
puts "OUCH!" unless dmg_taken < 1
end
puts #formatting
end
if @player.cur_hp <= 0
puts "You were killed by the #{@bad_guy.name}."
puts "Killed dead."
player_croaks
end
when move == "2"
puts # formatting
puts "Sometimes the right thing to do is run."
puts "This is one of those times."
puts # formatting
puts "You shout what is that? and point frantically in the opposite direction."
puts "The #{@bad_guy.name} turns to look and you high tail it away!"
puts # formatting
run_away = dice(10)
case
when (1..8).include?(run_away)
# you got away this time
puts "You escape from the #{@bad_guy.name} while it foolishly looks away."
when (9..10).include?(run_away)
# not so lucky this time
puts "#{@bad_guy.name} says, do you think I was spawned yesterday?"
puts # formatting
puts "#{@bad_guy.name} viciously attacks #{@player.name}!"
puts # formatting
miss_chance = dice(100)
agi_boost = (@player.agi-10)*2 + @player.dodge
if (1..agi_boost).include?(miss_chance)
puts @player.name + " totally leaps out of the way, avoiding being hit by " + @bad_guy.name + "!"
else
dmg_taken = dice(@bad_guy.dmg) - @player.armor/4
dmg_taken = 0 if dmg_taken < 1
@player.cur_hp = @player.cur_hp - dmg_taken
puts "#{@bad_guy.name} hits YOU for #{dmg_taken} damage!" unless dmg_taken < 1
puts "OUCH!" unless dmg_taken < 1
end
puts #formatting
if @player.cur_hp <= 0
puts "You knew when to fold em, but the #{@bad_guy.name} got the better of you anyway."
player_croaks
end
puts "You manage to accidentally stick a boot firmly in the #{@bad_guy.name}'s face"
puts "allowing you to escape!"
puts # formatting
end
save_data
return
end
end
end
def dice(sides=6,&block)
if block_given?
block.call(rand(1..sides))
else
rand(1..sides)
end
end
def random_encounter
chance = dice(20)
case
when (1..2).include?(chance)
puts # formatting
puts "You get the feeling you are being watched..."
puts # formatting
when (3..4).include?(chance)
puts # formatting
puts "You notice a coin stuck in the dirt, pry it"
puts "loose, and place the coin in your wallet."
puts # formatting
puts "Today must be your lucky day, #{@player.name}!"
@player.xp = @player.xp + @player.lvl*100
@player.coin = @player.coin + @player.lvl*2
puts # formatting
when (5..8).include?(chance)
puts #format
puts "A small goblin springs from the shadows and attacks!!"
puts #format
combat(Goblin)
when (9..11).include?(chance)
puts #format
puts "You hear squeeking sounds. BIG squeeking sounds!"
puts #format
combat(GiantRat)
when (12..15).include?(chance)
puts #format
puts "A kobold peers out of a hole in the wall and then snarls."
puts #format
combat(Kobold)
when (16..18).include?(chance)
puts #format
puts "Although you have never heard bones scrape across a floor"
puts "before, you know without a doubt what approaches..."
puts #format
combat(Skeleton)
when (19..20).include?(chance)
puts # formatting
trip_event = dice(3)
trip_part = "knee" if trip_event == 1
trip_part = "elbow" if trip_event == 2
trip_part = "hands" if trip_event == 3
trip_damage = @player.lvl
puts "You stumble and scrape your #{trip_part}."
puts "You take #{trip_damage} damage."
puts # formatting
@player.cur_hp = @player.cur_hp - trip_damage
if @player.cur_hp <= 0
puts "You have tripped and died."
player_croaks
end
end
end
end |
describe SignUpSheetController do
let(:assignment) { build(:assignment, id: 1, instructor_id: 6, due_dates: [due_date], microtask: true, staggered_deadline: true) }
let(:instructor) { build(:instructor, id: 6) }
let(:student) { build(:student, id: 8) }
let(:participant) { build(:participant, id: 1, user_id: 6, assignment: assignment) }
let(:topic) { build(:topic, id: 1) }
let(:signed_up_team) { build(:signed_up_team, team: team, topic: topic) }
let(:signed_up_team2) { build(:signed_up_team, team_id: 2, is_waitlisted: true) }
let(:team) { build(:assignment_team, id: 1, assignment: assignment) }
let(:due_date) { build(:assignment_due_date, deadline_type_id: 1) }
let(:due_date2) { build(:assignment_due_date, deadline_type_id: 2) }
let(:bid) { Bid.new(topic_id: 1, priority: 1) }
let(:team_user) { build(:team_user) }
before(:each) do
allow(Assignment).to receive(:find).with('1').and_return(assignment)
allow(Assignment).to receive(:find).with(1).and_return(assignment)
stub_current_user(instructor, instructor.role.name, instructor.role)
allow(SignUpTopic).to receive(:find).with('1').and_return(topic)
allow(Participant).to receive(:find_by).with(id: '1').and_return(participant)
allow(AssignmentParticipant).to receive(:find).with('1').and_return(participant)
allow(AssignmentParticipant).to receive(:find).with(1).and_return(participant)
allow(AssignmentParticipant).to receive(:find_by).with(user_id: student.id, parent_id: 1).and_return(participant)
allow(Team).to receive(:find).with('1').and_return(team)
allow(participant).to receive(:team).and_return(team)
allow(participant.team).to receive(:submitted_files).and_return([])
allow(participant.team).to receive(:hyperlinks).and_return([])
allow(TeamsUser).to receive(:find_by).with(team_id: 1).and_return(team_user)
allow(team_user).to receive(:user).and_return(student)
end
describe '#new' do
it 'builds a new sign up topic and renders sign_up_sheet#new page'
end
describe '#create' do
context 'when topic cannot be found' do
context 'when new topic can be saved successfully' do
it 'sets up a new topic and redirects to assignment#edit page'
end
context 'when new topic cannot be saved successfully' do
it 'sets up a new topic and renders sign_up_sheet#new page'
end
end
context 'when topic can be found' do
it 'updates the existing topic and redirects to sign_up_sheet#add_signup_topics_staggered page'
end
end
describe '#destroy' do
context 'when topic can be found' do
it 'redirects to assignment#edit page'
end
context 'when topic cannot be found' do
it 'shows an error flash message and redirects to assignment#edit page'
end
end
describe '#edit' do
it 'renders sign_up_sheet#edit page'
end
describe '#update' do
context 'when topic cannot be found' do
it 'shows an error flash message and redirects to assignment#edit page'
end
context 'when topic can be found' do
it 'updates current topic and redirects to assignment#edit page'
end
end
describe '#list' do
context 'when current assignment is intelligent assignment and has submission duedate (deadline_type_id 1)' do
it 'renders sign_up_sheet#intelligent_topic_selection page'
end
context 'when current assignment is intelligent assignment and has submission duedate (deadline_type_id 1)' do
it 'renders sign_up_sheet#list page'
end
end
describe '#sign_up' do
context 'when SignUpSheet.signup_team method return nil' do
it 'shows an error flash message and redirects to sign_up_sheet#list page'
end
end
describe '#signup_as_instructor_action' do
context 'when user cannot be found' do
it 'shows an flash error message and redirects to assignment#edit page'
end
context 'when user cannot be found' do
context 'when an assignment_participant can be found' do
context 'when creating team related objects successfully' do
it 'shows a flash success message and redirects to assignment#edit page'
end
context 'when creating team related objects unsuccessfully' do
it 'shows a flash error message and redirects to assignment#edit page'
end
end
context 'when an assignment_participant can be found' do
it 'shows a flash error message and redirects to assignment#edit page'
end
end
end
describe '#delete_signup' do
let(:params) { { id: 1, topic_id: 1 } }
context 'when either submitted files or hyperlinks of current team are not empty' do
it 'shows a flash error message and redirects to sign_up_sheet#list page' do
allow(participant.team).to receive(:submitted_files).and_return(['file'])
expect = proc {
delete :delete_signup, params
expect(flash.now[:error]).to eq("You have already submitted your work, so you are not allowed to drop your topic.")
expect(response).to redirect_to(action: 'list', id: params[:id])
}
expect.call
allow(participant.team).to receive(:hyperlinks).and_return(['link'])
expect.call
allow(participant.team).to receive(:submitted_files).and_return([])
expect.call
end
end
context 'when both submitted files and hyperlinks of current team are empty and drop topic deadline is not nil and its due date has already passed' do
it 'shows a flash error message and redirects to sign_up_sheet#list page' do
allow(due_date).to receive(:due_at).and_return(Time.now - 1.day)
allow(assignment).to receive_message_chain(:due_dates, :find_by_deadline_type_id).with(no_args).with(6).and_return(due_date)
delete :delete_signup, params
expect(flash.now[:error]).to eq("You cannot drop your topic after the drop topic deadline!")
expect(response).to redirect_to(action: 'list', id: params[:id])
end
end
context 'when both submitted files and hyperlinks of current team are empty and drop topic deadline is nil' do
let(:session) { { user: student } }
it 'shows a flash success message and redirects to sign_up_sheet#list page' do
allow(assignment).to receive_message_chain(:due_dates, :find_by_deadline_type_id).with(no_args).with(6).and_return nil
allow(SignedUpTeam).to receive(:find_team_users).with(participant.assignment.id, session[:user].id).and_return([signed_up_team])
allow(signed_up_team).to receive(:t_id).and_return(1)
allow(SignedUpTeam).to receive_message_chain(:where, :first).with(topic_id: session[:topic_id], team_id: signed_up_team.t_id).with(no_args).and_return(signed_up_team2)
delete :delete_signup, params, session
expect(flash.now[:success]).to eq("You have successfully dropped your topic!")
expect(response).to redirect_to(action: 'list', id: params[:id])
end
end
end
describe '#delete_signup_as_instructor' do
let(:params) { { id: 1, topic_id: 1 } }
context 'when either submitted files or hyperlinks of current team are not empty' do
it 'shows a flash error message and redirects to assignment#edit page' do
allow(participant.team).to receive(:submitted_files).and_return(['file'])
expect = proc {
delete :delete_signup_as_instructor, params
expect(flash.now[:error]).to eq("The student has already submitted their work, so you are not allowed to remove them.")
expect(response).to redirect_to controller: 'assignments', action: 'edit', id: assignment.id
}
expect.call
allow(participant.team).to receive(:hyperlinks).and_return(['link'])
expect.call
allow(participant.team).to receive(:submitted_files).and_return([])
expect.call
end
end
context 'when both submitted files and hyperlinks of current team are empty and drop topic deadline is not nil and its due date has already passed' do
it 'shows a flash error message and redirects to assignment#edit page' do
allow(due_date).to receive(:due_at).and_return(Time.now - 1.day)
allow(assignment).to receive_message_chain(:due_dates, :find_by_deadline_type_id).with(no_args).with(6).and_return(due_date)
delete :delete_signup_as_instructor, params
expect(flash.now[:error]).to eq("You cannot drop a student after the drop topic deadline!")
expect(response).to redirect_to controller: 'assignments', action: 'edit', id: assignment.id
end
end
context 'when both submitted files and hyperlinks of current team are empty and drop topic deadline is nil' do
let(:session) { { user: instructor } }
it 'shows a flash success message and redirects to assignment#edit page' do
allow(assignment).to receive_message_chain(:due_dates, :find_by_deadline_type_id).with(no_args).with(6).and_return nil
allow(SignedUpTeam).to receive(:find_team_users).with(participant.assignment.id, session[:user].id).and_return([signed_up_team])
allow(signed_up_team).to receive(:t_id).and_return(1)
allow(SignedUpTeam).to receive_message_chain(:where, :first).with(topic_id: session[:topic_id], team_id: signed_up_team.t_id).with(no_args).and_return(signed_up_team2)
delete :delete_signup_as_instructor, params, session
expect(flash.now[:success]).to eq("You have successfully dropped the student from the topic!")
expect(response).to redirect_to controller: 'assignments', action: 'edit', id: assignment.id
end
end
end
describe '#set_priority' do
let(:params) { { participant_id: '1', id: 1, topic: Array.new(1 + rand(5)) { 1 }, assignment_id: 1 } }
let(:team_id) { participant.team.try(:id) }
let(:bid) { Array.new(1 + rand(5)) { Bid.new } }
it 'sets priority of bidding topic and redirects to sign_up_sheet#list page' do
allow(AssignmentParticipant).to receive(:find_by).with(id: params[:participant_id]).and_return(participant)
allow(SignUpTopic).to receive_message_chain(:find, :assignment).with(params[:topic].first).with(no_args).and_return(assignment)
allow(Bid).to receive(:where).with(team_id: team_id).and_return(bid)
bid.each do |x|
allow(x).to receive(:topic_id).and_return(1)
end
allow(Bid).to receive(:where).with(topic_id: Integer, team_id: team_id).and_return(bid)
allow(Bid).to receive(:where).with(topic_id: String, team_id: team_id).and_return(bid)
allow(bid).to receive(:update_all).with(priority: Integer)
expect(bid).to receive(:update_all).with(priority: Integer)
get :set_priority, params
expect(response).to redirect_to action: 'list', assignment_id: params[:assignment_id]
end
end
describe '#save_topic_deadlines' do
let(:params) { { assignment_id: 1, due_date: Hash.new } }
let(:topics) { [topic] }
context 'when topic_due_date cannot be found' do
it 'creates a new topic_due_date record and redirects to assignment#edit page' do
allow(TopicDueDate).to receive(:where).with(any_args).and_return nil
allow(SignUpTopic).to receive(:where).with(any_args).and_return(topics)
allow(assignment).to receive(:num_review_rounds).and_return(1)
assignment.due_dates = assignment.due_dates.push(due_date2)
allow(DeadlineType).to receive_message_chain(:find_by_name, :id).with(String).with(no_args).and_return(1)
expect(TopicDueDate).to receive(:create).exactly(2).times.with(any_args)
get :save_topic_deadlines, params
expect(response).to redirect_to controller: 'assignments', action: 'edit', id: params[:assignment_id]
end
end
context 'when topic_due_date can be found' do
it 'updates the existing topic_due_date record and redirects to assignment#edit page' do
allow(TopicDueDate).to receive(:where).with(any_args).and_return([due_date])
allow(SignUpTopic).to receive(:where).with(any_args).and_return(topics)
allow(assignment).to receive(:num_review_rounds).and_return(1)
assignment.due_dates = assignment.due_dates.push(due_date2)
allow(DeadlineType).to receive_message_chain(:find_by_name, :id).with(String).with(no_args).and_return(1)
expect(due_date).to receive(:update_attributes).exactly(2).times.with(any_args)
get :save_topic_deadlines, params
expect(response).to redirect_to controller: 'assignments', action: 'edit', id: params[:assignment_id]
end
end
end
describe '#show_team' do
let(:params) { { id: '1', assignment_id: 1 } }
it 'renders show_team page' do
allow(SignedUpTeam).to receive(:where).with(any_args).and_return([signed_up_team])
get :show_team, params
expect(response).to render_template(:show_team)
end
end
describe '#switch_original_topic_to_approved_suggested_topic' do
it 'redirects to sign_up_sheet#list page'
end
end
Finished #switch_original_topic_to_approved_suggested_topic test.
describe SignUpSheetController do
let(:assignment) { build(:assignment, id: 1, instructor_id: 6, due_dates: [due_date], microtask: true, staggered_deadline: true) }
let(:instructor) { build(:instructor, id: 6) }
let(:student) { build(:student, id: 8) }
let(:participant) { build(:participant, id: 1, user_id: 6, assignment: assignment) }
let(:topic) { build(:topic, id: 1) }
let(:signed_up_team) { build(:signed_up_team, team: team, topic: topic) }
let(:signed_up_team2) { build(:signed_up_team, team_id: 2, is_waitlisted: true) }
let(:team) { build(:assignment_team, id: 1, assignment: assignment) }
let(:due_date) { build(:assignment_due_date, deadline_type_id: 1) }
let(:due_date2) { build(:assignment_due_date, deadline_type_id: 2) }
let(:bid) { Bid.new(topic_id: 1, priority: 1) }
let(:team_user) { build(:team_user) }
before(:each) do
allow(Assignment).to receive(:find).with('1').and_return(assignment)
allow(Assignment).to receive(:find).with(1).and_return(assignment)
stub_current_user(instructor, instructor.role.name, instructor.role)
allow(SignUpTopic).to receive(:find).with('1').and_return(topic)
allow(Participant).to receive(:find_by).with(id: '1').and_return(participant)
allow(AssignmentParticipant).to receive(:find).with('1').and_return(participant)
allow(AssignmentParticipant).to receive(:find).with(1).and_return(participant)
allow(AssignmentParticipant).to receive(:find_by).with(user_id: student.id, parent_id: 1).and_return(participant)
allow(Team).to receive(:find).with('1').and_return(team)
allow(participant).to receive(:team).and_return(team)
allow(participant.team).to receive(:submitted_files).and_return([])
allow(participant.team).to receive(:hyperlinks).and_return([])
allow(TeamsUser).to receive(:find_by).with(team_id: 1).and_return(team_user)
allow(team_user).to receive(:user).and_return(student)
end
describe '#new' do
it 'builds a new sign up topic and renders sign_up_sheet#new page'
end
describe '#create' do
context 'when topic cannot be found' do
context 'when new topic can be saved successfully' do
it 'sets up a new topic and redirects to assignment#edit page'
end
context 'when new topic cannot be saved successfully' do
it 'sets up a new topic and renders sign_up_sheet#new page'
end
end
context 'when topic can be found' do
it 'updates the existing topic and redirects to sign_up_sheet#add_signup_topics_staggered page'
end
end
describe '#destroy' do
context 'when topic can be found' do
it 'redirects to assignment#edit page'
end
context 'when topic cannot be found' do
it 'shows an error flash message and redirects to assignment#edit page'
end
end
describe '#edit' do
it 'renders sign_up_sheet#edit page'
end
describe '#update' do
context 'when topic cannot be found' do
it 'shows an error flash message and redirects to assignment#edit page'
end
context 'when topic can be found' do
it 'updates current topic and redirects to assignment#edit page'
end
end
describe '#list' do
context 'when current assignment is intelligent assignment and has submission duedate (deadline_type_id 1)' do
it 'renders sign_up_sheet#intelligent_topic_selection page'
end
context 'when current assignment is intelligent assignment and has submission duedate (deadline_type_id 1)' do
it 'renders sign_up_sheet#list page'
end
end
describe '#sign_up' do
context 'when SignUpSheet.signup_team method return nil' do
it 'shows an error flash message and redirects to sign_up_sheet#list page'
end
end
describe '#signup_as_instructor_action' do
context 'when user cannot be found' do
it 'shows an flash error message and redirects to assignment#edit page'
end
context 'when user cannot be found' do
context 'when an assignment_participant can be found' do
context 'when creating team related objects successfully' do
it 'shows a flash success message and redirects to assignment#edit page'
end
context 'when creating team related objects unsuccessfully' do
it 'shows a flash error message and redirects to assignment#edit page'
end
end
context 'when an assignment_participant can be found' do
it 'shows a flash error message and redirects to assignment#edit page'
end
end
end
describe '#delete_signup' do
let(:params) { { id: 1, topic_id: 1 } }
context 'when either submitted files or hyperlinks of current team are not empty' do
it 'shows a flash error message and redirects to sign_up_sheet#list page' do
allow(participant.team).to receive(:submitted_files).and_return(['file'])
expect = proc {
delete :delete_signup, params
expect(flash.now[:error]).to eq("You have already submitted your work, so you are not allowed to drop your topic.")
expect(response).to redirect_to(action: 'list', id: params[:id])
}
expect.call
allow(participant.team).to receive(:hyperlinks).and_return(['link'])
expect.call
allow(participant.team).to receive(:submitted_files).and_return([])
expect.call
end
end
context 'when both submitted files and hyperlinks of current team are empty and drop topic deadline is not nil and its due date has already passed' do
it 'shows a flash error message and redirects to sign_up_sheet#list page' do
allow(due_date).to receive(:due_at).and_return(Time.now - 1.day)
allow(assignment).to receive_message_chain(:due_dates, :find_by_deadline_type_id).with(no_args).with(6).and_return(due_date)
delete :delete_signup, params
expect(flash.now[:error]).to eq("You cannot drop your topic after the drop topic deadline!")
expect(response).to redirect_to(action: 'list', id: params[:id])
end
end
context 'when both submitted files and hyperlinks of current team are empty and drop topic deadline is nil' do
let(:session) { { user: student } }
it 'shows a flash success message and redirects to sign_up_sheet#list page' do
allow(assignment).to receive_message_chain(:due_dates, :find_by_deadline_type_id).with(no_args).with(6).and_return nil
allow(SignedUpTeam).to receive(:find_team_users).with(participant.assignment.id, session[:user].id).and_return([signed_up_team])
allow(signed_up_team).to receive(:t_id).and_return(1)
allow(SignedUpTeam).to receive_message_chain(:where, :first).with(topic_id: session[:topic_id], team_id: signed_up_team.t_id).with(no_args).and_return(signed_up_team2)
delete :delete_signup, params, session
expect(flash.now[:success]).to eq("You have successfully dropped your topic!")
expect(response).to redirect_to(action: 'list', id: params[:id])
end
end
end
describe '#delete_signup_as_instructor' do
let(:params) { { id: 1, topic_id: 1 } }
context 'when either submitted files or hyperlinks of current team are not empty' do
it 'shows a flash error message and redirects to assignment#edit page' do
allow(participant.team).to receive(:submitted_files).and_return(['file'])
expect = proc {
delete :delete_signup_as_instructor, params
expect(flash.now[:error]).to eq("The student has already submitted their work, so you are not allowed to remove them.")
expect(response).to redirect_to controller: 'assignments', action: 'edit', id: assignment.id
}
expect.call
allow(participant.team).to receive(:hyperlinks).and_return(['link'])
expect.call
allow(participant.team).to receive(:submitted_files).and_return([])
expect.call
end
end
context 'when both submitted files and hyperlinks of current team are empty and drop topic deadline is not nil and its due date has already passed' do
it 'shows a flash error message and redirects to assignment#edit page' do
allow(due_date).to receive(:due_at).and_return(Time.now - 1.day)
allow(assignment).to receive_message_chain(:due_dates, :find_by_deadline_type_id).with(no_args).with(6).and_return(due_date)
delete :delete_signup_as_instructor, params
expect(flash.now[:error]).to eq("You cannot drop a student after the drop topic deadline!")
expect(response).to redirect_to controller: 'assignments', action: 'edit', id: assignment.id
end
end
context 'when both submitted files and hyperlinks of current team are empty and drop topic deadline is nil' do
let(:session) { { user: instructor } }
it 'shows a flash success message and redirects to assignment#edit page' do
allow(assignment).to receive_message_chain(:due_dates, :find_by_deadline_type_id).with(no_args).with(6).and_return nil
allow(SignedUpTeam).to receive(:find_team_users).with(participant.assignment.id, session[:user].id).and_return([signed_up_team])
allow(signed_up_team).to receive(:t_id).and_return(1)
allow(SignedUpTeam).to receive_message_chain(:where, :first).with(topic_id: session[:topic_id], team_id: signed_up_team.t_id).with(no_args).and_return(signed_up_team2)
delete :delete_signup_as_instructor, params, session
expect(flash.now[:success]).to eq("You have successfully dropped the student from the topic!")
expect(response).to redirect_to controller: 'assignments', action: 'edit', id: assignment.id
end
end
end
describe '#set_priority' do
let(:params) { { participant_id: '1', id: 1, topic: Array.new(1 + rand(5)) { 1 }, assignment_id: 1 } }
let(:team_id) { participant.team.try(:id) }
let(:bid) { Array.new(1 + rand(5)) { Bid.new } }
it 'sets priority of bidding topic and redirects to sign_up_sheet#list page' do
allow(AssignmentParticipant).to receive(:find_by).with(id: params[:participant_id]).and_return(participant)
allow(SignUpTopic).to receive_message_chain(:find, :assignment).with(params[:topic].first).with(no_args).and_return(assignment)
allow(Bid).to receive(:where).with(team_id: team_id).and_return(bid)
bid.each do |x|
allow(x).to receive(:topic_id).and_return(1)
end
allow(Bid).to receive(:where).with(topic_id: Integer, team_id: team_id).and_return(bid)
allow(Bid).to receive(:where).with(topic_id: String, team_id: team_id).and_return(bid)
allow(bid).to receive(:update_all).with(priority: Integer)
expect(bid).to receive(:update_all).with(priority: Integer)
get :set_priority, params
expect(response).to redirect_to action: 'list', assignment_id: params[:assignment_id]
end
end
describe '#save_topic_deadlines' do
let(:params) { { assignment_id: 1, due_date: Hash.new } }
let(:topics) { [topic] }
context 'when topic_due_date cannot be found' do
it 'creates a new topic_due_date record and redirects to assignment#edit page' do
allow(TopicDueDate).to receive(:where).with(any_args).and_return nil
allow(SignUpTopic).to receive(:where).with(any_args).and_return(topics)
allow(assignment).to receive(:num_review_rounds).and_return(1)
assignment.due_dates = assignment.due_dates.push(due_date2)
allow(DeadlineType).to receive_message_chain(:find_by_name, :id).with(String).with(no_args).and_return(1)
expect(TopicDueDate).to receive(:create).exactly(2).times.with(any_args)
get :save_topic_deadlines, params
expect(response).to redirect_to controller: 'assignments', action: 'edit', id: params[:assignment_id]
end
end
context 'when topic_due_date can be found' do
it 'updates the existing topic_due_date record and redirects to assignment#edit page' do
allow(TopicDueDate).to receive(:where).with(any_args).and_return([due_date])
allow(SignUpTopic).to receive(:where).with(any_args).and_return(topics)
allow(assignment).to receive(:num_review_rounds).and_return(1)
assignment.due_dates = assignment.due_dates.push(due_date2)
allow(DeadlineType).to receive_message_chain(:find_by_name, :id).with(String).with(no_args).and_return(1)
expect(due_date).to receive(:update_attributes).exactly(2).times.with(any_args)
get :save_topic_deadlines, params
expect(response).to redirect_to controller: 'assignments', action: 'edit', id: params[:assignment_id]
end
end
end
describe '#show_team' do
let(:params) { { id: '1', assignment_id: 1 } }
it 'renders show_team page' do
allow(SignedUpTeam).to receive(:where).with(any_args).and_return([signed_up_team])
get :show_team, params
expect(response).to render_template(:show_team)
end
end
describe '#switch_original_topic_to_approved_suggested_topic' do
let(:params) { { id: 1, topic_id: 1 } }
let(:session) { { user: student } }
it 'redirects to sign_up_sheet#list page' do
allow(TeamsUser).to receive(:team_id).with(any_args).and_return(1)
allow(SignedUpTeam).to receive(:topic_id).with(any_args).and_return(1)
allow(SignUpTopic).to receive(:exists?).with(any_args).and_return(false)
allow(SignedUpTeam).to receive(:where).with(any_args).and_return([])
get :switch_original_topic_to_approved_suggested_topic, params, session
expect(response).to redirect_to action: 'list', id: params[:id]
end
end
end
|
# frozen_string_literal: true
# rubocop:disable Metrics/BlockLength
Rails.application.configure do
# Settings specified here will take precedence over those in
# config/application.rb.
# Code is not reloaded between requests.
config.cache_classes = true
# Eager load code on boot. This eager loads most of Rails and
# your application in memory, allowing both threaded web servers
# and those relying on copy on write to perform better.
# Rake tasks automatically ignore this option for performance.
config.eager_load = true
# Full error reports are disabled and caching is turned on.
config.consider_all_requests_local = false
config.action_controller.perform_caching = true
config.cache_store = :memory_store, { size: 64.megabytes }
# Enable Rack::Cache to put a simple HTTP cache in front of your application
# Add `rack-cache` to your Gemfile before enabling this.
# For large-scale production use, consider using a caching reverse proxy like
# NGINX, varnish or squid.
# config.action_dispatch.rack_cache = true
# Disable serving static files from the `/public` folder by default since
# Apache or NGINX already handles this.
# config.serve_static_files = ENV['RAILS_SERVE_STATIC_FILES'].present?
config.public_file_server.enabled = ENV['RAILS_SERVE_STATIC_FILES'].present?
# We want to serve compressed values
config.assets.compress = true
# Compress JavaScripts and CSS.
config.assets.js_compressor = :uglifier
# config.assets.css_compressor = :sass
# Do not fallback to assets pipeline if a precompiled asset is missed.
config.assets.compile = true
# Asset digests allow you to set far-future HTTP expiration dates on all
# assets, # yet still be able to expire them through the digest params.
config.assets.digest = true
# `config.assets.precompile` and `config.assets.version` have moved to
# config/initializers/assets.rb
# Specifies the header that your server uses for sending files.
# config.action_dispatch.x_sendfile_header = 'X-Sendfile' # for Apache
# config.action_dispatch.x_sendfile_header = 'X-Accel-Redirect' # for NGINX
# Force all access to the app over SSL, use Strict-Transport-Security, and use
# secure cookies.
config.force_ssl = true unless ENV['DISABLE_FORCE_SSL']
# Use the lowest log level to ensure availability of diagnostic information
# when problems arise.
config.log_level = :debug
# Prepend all log lines with the following tags.
# config.log_tags = [ :subdomain, :uuid ]
# Use a different logger for distributed setups.
# config.logger = ActiveSupport::TaggedLogging.new(SyslogLogger.new)
# Use a different cache store in production.
# config.cache_store = :mem_cache_store
# Enable serving of images, stylesheets, and JavaScripts from an asset server.
# config.action_controller.asset_host = 'http://assets.example.com'
# Configure email server.
# For discussion about how to do this, see:
# https://www.railstutorial.org/book/account_activation_password_reset
config.action_mailer.raise_delivery_errors = true
config.action_mailer.delivery_method = :smtp
host = (ENV['PUBLIC_HOSTNAME'] || 'public-hostname-not-configured')
config.action_mailer.default_url_options = { host: host }
ActionMailer::Base.smtp_settings = {
address: 'smtp.sendgrid.net',
port: '587',
authentication: :plain,
user_name: ENV['SENDGRID_USERNAME'],
password: ENV['SENDGRID_PASSWORD'],
domain: 'heroku.com',
enable_starttls_auto: true
}
# Enable locale fallbacks for I18n (makes lookups for any locale fall back to
# the I18n.default_locale when a translation cannot be found).
config.i18n.fallbacks = true
# Send deprecation notices to registered listeners.
config.active_support.deprecation = :notify
# Use default logging formatter so that PID and timestamp are not suppressed.
config.log_formatter = ::Logger::Formatter.new
# Do not dump schema after migrations.
config.active_record.dump_schema_after_migration = false
# Use Fastly as a CDN. See: https://devcenter.heroku.com/articles/fastly
# config.action_controller.asset_host = ENV['FASTLY_CDN_URL']
# Use CDN directly for static assets
# TODO: Do we need to set this to true?
# The documentation for Fastly suggests setting
# "config.serve_static_assets = true". However, this has since been
# renamed to "config.serve_static_files", which we already conditionally set.
# Cache static content. Cache for a long time; the asset cache is
config.public_file_server.headers =
{
'Cache-Control' =>
'public, s-maxage=31536000, max-age=31536000, immutable'
}
# Enable Rack's built-in compression mechanism; this is important for people
# with slow network connections
config.middleware.use Rack::Deflater
# In production and fake_production environments turn on "lograge".
# This makes the logs easier to read and removes cruft that, while useful
# in development, can be overwhelming in production.
config.lograge.enabled = true
# As a failsafe, trigger an exception if the response just hangs for
# too long. We only do this in production, because it's not
# supposed to happen in normal use - this is simply an automatic
# recovery mechanism if things get stuck. We don't do this in test or
# development, because it interferes with their purposes.
# The "use" form is preferred, but it doesn't actually work when placed
# in this file, so we'll just set the timeout directly.
# Ignore exceptions - in fake_production this will fail. That's good,
# because we do *not* want timeouts during a debug session.
# rubocop:disable Lint/HandleExceptions
begin
Rack::Timeout.service_timeout = 30 # seconds
rescue NameError
# Do nothing if it's unavailable (this happens if we didn't load the gem)
end
# rubocop:enable Lint/HandleExceptions
end
# rubocop:enable Metrics/BlockLength
Stop logging Rack::Timeout state changes
The timeout reports from Rack::Timeout are really noisy,
and don't seem to help debug typical problems.
If anything, they get in the way.
This commmit disables them.
Signed-off-by: David A. Wheeler <9ae72d22d8b894b865a7a496af4fab6320e6abb2@dwheeler.com>
# frozen_string_literal: true
# rubocop:disable Metrics/BlockLength
Rails.application.configure do
# Settings specified here will take precedence over those in
# config/application.rb.
# Code is not reloaded between requests.
config.cache_classes = true
# Eager load code on boot. This eager loads most of Rails and
# your application in memory, allowing both threaded web servers
# and those relying on copy on write to perform better.
# Rake tasks automatically ignore this option for performance.
config.eager_load = true
# Full error reports are disabled and caching is turned on.
config.consider_all_requests_local = false
config.action_controller.perform_caching = true
config.cache_store = :memory_store, { size: 64.megabytes }
# Enable Rack::Cache to put a simple HTTP cache in front of your application
# Add `rack-cache` to your Gemfile before enabling this.
# For large-scale production use, consider using a caching reverse proxy like
# NGINX, varnish or squid.
# config.action_dispatch.rack_cache = true
# Disable serving static files from the `/public` folder by default since
# Apache or NGINX already handles this.
# config.serve_static_files = ENV['RAILS_SERVE_STATIC_FILES'].present?
config.public_file_server.enabled = ENV['RAILS_SERVE_STATIC_FILES'].present?
# We want to serve compressed values
config.assets.compress = true
# Compress JavaScripts and CSS.
config.assets.js_compressor = :uglifier
# config.assets.css_compressor = :sass
# Do not fallback to assets pipeline if a precompiled asset is missed.
config.assets.compile = true
# Asset digests allow you to set far-future HTTP expiration dates on all
# assets, # yet still be able to expire them through the digest params.
config.assets.digest = true
# `config.assets.precompile` and `config.assets.version` have moved to
# config/initializers/assets.rb
# Specifies the header that your server uses for sending files.
# config.action_dispatch.x_sendfile_header = 'X-Sendfile' # for Apache
# config.action_dispatch.x_sendfile_header = 'X-Accel-Redirect' # for NGINX
# Force all access to the app over SSL, use Strict-Transport-Security, and use
# secure cookies.
config.force_ssl = true unless ENV['DISABLE_FORCE_SSL']
# Use the lowest log level to ensure availability of diagnostic information
# when problems arise.
config.log_level = :debug
# Prepend all log lines with the following tags.
# config.log_tags = [ :subdomain, :uuid ]
# Use a different logger for distributed setups.
# config.logger = ActiveSupport::TaggedLogging.new(SyslogLogger.new)
# Use a different cache store in production.
# config.cache_store = :mem_cache_store
# Enable serving of images, stylesheets, and JavaScripts from an asset server.
# config.action_controller.asset_host = 'http://assets.example.com'
# Configure email server.
# For discussion about how to do this, see:
# https://www.railstutorial.org/book/account_activation_password_reset
config.action_mailer.raise_delivery_errors = true
config.action_mailer.delivery_method = :smtp
host = (ENV['PUBLIC_HOSTNAME'] || 'public-hostname-not-configured')
config.action_mailer.default_url_options = { host: host }
ActionMailer::Base.smtp_settings = {
address: 'smtp.sendgrid.net',
port: '587',
authentication: :plain,
user_name: ENV['SENDGRID_USERNAME'],
password: ENV['SENDGRID_PASSWORD'],
domain: 'heroku.com',
enable_starttls_auto: true
}
# Enable locale fallbacks for I18n (makes lookups for any locale fall back to
# the I18n.default_locale when a translation cannot be found).
config.i18n.fallbacks = true
# Send deprecation notices to registered listeners.
config.active_support.deprecation = :notify
# Use default logging formatter so that PID and timestamp are not suppressed.
config.log_formatter = ::Logger::Formatter.new
# Do not dump schema after migrations.
config.active_record.dump_schema_after_migration = false
# Use Fastly as a CDN. See: https://devcenter.heroku.com/articles/fastly
# config.action_controller.asset_host = ENV['FASTLY_CDN_URL']
# Use CDN directly for static assets
# TODO: Do we need to set this to true?
# The documentation for Fastly suggests setting
# "config.serve_static_assets = true". However, this has since been
# renamed to "config.serve_static_files", which we already conditionally set.
# Cache static content. Cache for a long time; the asset cache is
config.public_file_server.headers =
{
'Cache-Control' =>
'public, s-maxage=31536000, max-age=31536000, immutable'
}
# Enable Rack's built-in compression mechanism; this is important for people
# with slow network connections
config.middleware.use Rack::Deflater
# In production and fake_production environments turn on "lograge".
# This makes the logs easier to read and removes cruft that, while useful
# in development, can be overwhelming in production.
config.lograge.enabled = true
# As a failsafe, trigger an exception if the response just hangs for
# too long. We only do this in production, because it's not
# supposed to happen in normal use - this is simply an automatic
# recovery mechanism if things get stuck. We don't do this in test or
# development, because it interferes with their purposes.
# The "use" form is preferred, but it doesn't actually work when placed
# in this file, so we'll just set the timeout directly.
# Ignore exceptions - in fake_production this will fail. That's good,
# because we do *not* want timeouts during a debug session.
# rubocop:disable Lint/HandleExceptions
begin
Rack::Timeout.service_timeout = 30 # seconds
# The timeout reports are really noisy, and don't seem to help debug
# typical problems (if anything they get in the way). Disable them.
Rack::Timeout.unregister_state_change_observer(:logger)
rescue NameError
# Do nothing if it's unavailable (this happens if we didn't load the gem)
end
# rubocop:enable Lint/HandleExceptions
end
# rubocop:enable Metrics/BlockLength
|
module Homebrew
def command
cmd = ARGV.first
cmd = HOMEBREW_INTERNAL_COMMAND_ALIASES.fetch(cmd, cmd)
if (path = HOMEBREW_LIBRARY_PATH/"cmd/#{cmd}.rb").file?
puts path
elsif ARGV.homebrew_developer? && (path = HOMEBREW_LIBRARY_PATH/"dev-cmd/#{cmd}.rb").file?
puts path
elsif (path = which("brew-#{cmd}") || which("brew-#{cmd}.rb"))
puts path
else
odie "Unknown command: #{cmd}"
end
end
end
command: fail early if no command is given
Closes #44450.
Signed-off-by: Baptiste Fontaine <bfee279af59f3e3f71f7ce1fa037ea7b90f93cbf@yahoo.fr>
module Homebrew
def command
abort "This command requires a command argument" if ARGV.empty?
cmd = ARGV.first
cmd = HOMEBREW_INTERNAL_COMMAND_ALIASES.fetch(cmd, cmd)
if (path = HOMEBREW_LIBRARY_PATH/"cmd/#{cmd}.rb").file?
puts path
elsif ARGV.homebrew_developer? && (path = HOMEBREW_LIBRARY_PATH/"dev-cmd/#{cmd}.rb").file?
puts path
elsif (path = which("brew-#{cmd}") || which("brew-#{cmd}.rb"))
puts path
else
odie "Unknown command: #{cmd}"
end
end
end
|
require_relative 'spec_helper'
require 'problem_017'
describe Problem017 do
before(:each) do
@prb = Problem017.new
end
describe "converting numbers to words" do
it "converts 1 to 'one'" do
1.to_words.should == 'one'
end
it "converts 2 to 'two'" do
2.to_words.should == 'two'
end
it "converts 3 to 'three'" do
3.to_words.should == 'three'
end
it "converts 4 to 'four'" do
4.to_words.should == 'four'
end
it "converts 5 to 'five'" do
5.to_words.should == 'five'
end
it "converts 6 to 'six'" do
6.to_words.should == 'six'
end
it "converts 7 to 'seven'" do
7.to_words.should == 'seven'
end
it "converts 8 to 'eight'" do
8.to_words.should == 'eight'
end
it "converts 9 to 'nine'" do
9.to_words.should == 'nine'
end
it "converts 10 to 'ten'" do
10.to_words.should == 'ten'
end
it "converts 11 to 'eleven'" do
11.to_words.should == 'eleven'
end
it "converts 12 to 'twelve'" do
12.to_words.should == 'twelve'
end
it "converts 13 to 'thirteen'" do
13.to_words.should == 'thirteen'
end
it "converts 14 to 'fourteen'" do
14.to_words.should == 'fourteen'
end
it "converts 15 to 'fifteen'" do
15.to_words.should == 'fifteen'
end
it "converts 16 to 'sixteen'" do
16.to_words.should == 'sixteen'
end
it "converts 17 to 'seventeen'" do
17.to_words.should == 'seventeen'
end
it "converts 18 to 'eighteen'" do
18.to_words.should == 'eighteen'
end
it "converts 19 to 'nineteen'" do
19.to_words.should == 'nineteen'
end
it "converts 20 to 'twenty'" do
20.to_words.should == 'twenty'
end
it "converts 21 to 'twenty-one'" do
21.to_words.should == 'twenty-one'
end
it "converts 30 to 'thirty'" do
30.to_words.should == 'thirty'
end
it "converts 35 to 'thirty-five'" do
35.to_words.should == 'thirty-five'
end
it "converts 40 to 'forty'" do
40.to_words.should == 'forty'
end
it "converts 50 to 'fifty'" do
50.to_words.should == 'fifty'
end
it "convert 60 to 'sixty'" do
60.to_words.should == 'sixty'
end
it "convert 70 to 'seventy'" do
70.to_words.should == 'seventy'
end
it "converts 80 to 'eighty'" do
80.to_words.should == 'eighty'
end
it "convert 90 to 'ninety'" do
90.to_words.should == 'ninety'
end
it "converts 100 to 'one hundred'" do
100.to_words.should == 'one hundred'
end
it "converts 300 to 'three hundred'" do
300.to_words.should == 'three hundred'
end
it "converts 103 to 'one hundred and three'" do
103.to_words.should == 'one hundred and three'
end
it "converts 1000 to 'one thousand'" do
1000.to_words.should == 'one thousand'
end
it "converts 342 to 'three hundred and forty-two'" do
342.to_words.should == 'three hundred and forty-two'
end
it "converts 115 to 'one hundred and fifteen'" do
115.to_words.should == 'one hundred and fifteen'
end
end
describe "computing total letters" do
it "counts the letters in 'one' as 3" do
@prb.count_letters('one').should == 3
end
it "counts the letters in 'twenty-one' as 9" do
@prb.count_letters('twenty-one').should == 9
end
it "counts the letters in 'one hundred and twenty-one' as 22" do
@prb.count_letters('one hundred and twenty-one').should == 22
end
it "computes 3 letters for numbers up to 1 ('one')" do
@prb.sum_to(1).should == 3
end
it "computes 11 letters for numbers up to 3 ('one'+'two'+'three')" do
@prb.sum_to(3).should == 11
end
it "computes 19 letters for numbers up to 5 ('one'+'two'+'three'+'four'+'five')" do
@prb.sum_to(5).should == 19
end
end
it "computes the total letters for all numbers up to 1,000" do
@prb.should_receive(:sum_to).with(1_000).and_return(999)
@prb.run.should == 999
end
end
simplified word generation specs
require_relative 'spec_helper'
require 'problem_017'
describe Problem017 do
before(:each) do
@prb = Problem017.new
end
describe "converting numbers to words" do
[
[1, 'one'],
[2, 'two'],
[3, 'three'],
[4, 'four'],
[5, 'five'],
[6, 'six'],
[7, 'seven'],
[8, 'eight'],
[9, 'nine'],
[10, 'ten'],
[11, 'eleven'],
[12, 'twelve'],
[13, 'thirteen'],
[14, 'fourteen'],
[15, 'fifteen'],
[16, 'sixteen'],
[17, 'seventeen'],
[18, 'eighteen'],
[19, 'nineteen'],
[20, 'twenty'],
[21, 'twenty-one'],
[30, 'thirty'],
[35, 'thirty-five'],
[40, 'forty'],
[50, 'fifty'],
[60, 'sixty'],
[70, 'seventy'],
[80, 'eighty'],
[90, 'ninety'],
[100, 'one hundred'],
[300, 'three hundred'],
[103, 'one hundred and three'],
[1000, 'one thousand'],
[342, 'three hundred and forty-two'],
[115, 'one hundred and fifteen']
].each do |n, word|
it "converts #{n} to '#{word}'" do
n.to_words.should == word
end
end
end
describe "computing total letters" do
it "counts the letters in 'one' as 3" do
@prb.count_letters('one').should == 3
end
it "counts the letters in 'twenty-one' as 9" do
@prb.count_letters('twenty-one').should == 9
end
it "counts the letters in 'one hundred and twenty-one' as 22" do
@prb.count_letters('one hundred and twenty-one').should == 22
end
it "computes 3 letters for numbers up to 1 ('one')" do
@prb.sum_to(1).should == 3
end
it "computes 11 letters for numbers up to 3 ('one'+'two'+'three')" do
@prb.sum_to(3).should == 11
end
it "computes 19 letters for numbers up to 5 ('one'+'two'+'three'+'four'+'five')" do
@prb.sum_to(5).should == 19
end
end
it "computes the total letters for all numbers up to 1,000" do
@prb.should_receive(:sum_to).with(1_000).and_return(999)
@prb.run.should == 999
end
end
|
require 'logger'
require './logging'
include Logging
require 'optparse'
require "erubis"
#####
#
# Runs the statistics for a given dataset
# IN: dataset_name source_of_tree
# out:
# 1) Sorted and appropriate sam files
# 2) runs compare2truth
# 3) runs compare junctions
#
####
# 2015/8/10 Katharina Hayer
$logger = Logger.new(STDERR)
$algorithms = [:contextmap2,
:crac, :gsnap, :hisat, :mapsplice2, :olego, :rum,
:star, :subjunc, :tophat2]
# Initialize logger
def setup_logger(loglevel)
case loglevel
when "debug"
$logger.level = Logger::DEBUG
when "warn"
$logger.level = Logger::WARN
when "info"
$logger.level = Logger::INFO
else
$logger.level = Logger::ERROR
end
end
def setup_options(args)
options = {:out_file => "overview_table.xls", :loglevel => "error",
:algorithm => "all", :transcripts => nil, :junctions_crossed => nil,
:cig_file => nil, :stats_path => nil, :tool_result_path => nil,
:aligner_benchmark => nil, :samtools => "samtools", :jobs_path => nil,
:species => "human", :debug => false
}
opt_parser = OptionParser.new do |opts|
opts.banner = "\nUsage: ruby master.rb [options] dataset source_of_tree"
opts.separator ""
opts.separator "e.g. dataset = t3r1"
opts.separator "e.g. source_of_tree = /project/itmatlab/aligner_benchmark"
opts.separator ""
# enumeration
opts.on('-a', '--algorithm ENUM', [:all,:contextmap2,
:crac, :gsnap, :hisat, :mapsplice2, :olego, :rum,
:star, :subjunc, :tophat2],'Choose from below:','all: DEFAULT',
'contextmap2','crac','gsnap','hisat', 'mapsplice2',
'olego','rum','star','subjunc','tophat2') do |v|
options[:algorithm] = v
end
opts.on("-d", "--debug", "Run in debug mode") do |v|
options[:log_level] = "debug"
options[:debug] = true
end
opts.on("-o", "--out_file [OUT_FILE]",
:REQUIRED,String,
"File for the output, Default: overview_table.xls") do |anno_file|
options[:out_file] = anno_file
end
opts.on("-s", "--species [String]",
:REQUIRED,String,
"Spiecies, Default: human") do |s|
options[:species] = s
end
opts.on("-v", "--verbose", "Run verbosely") do |v|
options[:log_level] = "info"
end
opts.separator ""
end
args = ["-h"] if args.length == 0
opt_parser.parse!(args)
setup_logger(options[:log_level])
raise "Please specify the input files" if args.length == 0
options
end
class Job
def initialize(jobnumber, cmd, status, working_dir)
@jobnumber = jobnumber
@cmd = cmd
@status = status
@working_dir = working_dir
end
attr_accessor :jobnumber, :cmd, :status
def to_s
"Jobnumber #{@jobnumber}; Cmd: #{@cmd}; Status: #{@status}; WD: #{@working_dir}"
end
def update_status
begin
l = `bjobs -l #{@jobnumber}`
rescue Exception => e
$logger.error(e)
$logger.error("bjobs not found!\n#{self}")
@status = "EXIT"
return
end
# if @status == "EXIT"
l.chomp!
if l == ""
$logger.error("Jobnumber #{@jobnumber} not found! #{self}")
@status = "EXIT"
else
l = l.split("\n")[1]
@status = l.split("Status ")[1].split(",")[0].gsub(/\W/,"")
end
end
end
def check_if_results_exist(stats_path)
File.exists?("#{stats_path}/comp_res.txt") && File.exists?("#{stats_path}/junctions_stats.txt")
end
def get_truth_files(options, source_of_tree, dataset)
cmd = "find #{source_of_tree}/jobs/settings/ -name \"*#{options[:species]}*#{dataset}*\""
$logger.debug(cmd)
l = `#{cmd}`
l = l.split("\n")
raise "Trouble finding #{dataset}: #{l}" if l.length != 1
l = l[0]
dir = nil
File.open(l).each do |line|
line.chomp!.delete!("\"")
fields = line.split("=")
case fields[0]
when "READS_PATH"
dir = fields[1]
when "CIG_FILE"
options[:cig_file] = "#{dir}/#{fields[1]}"
when "TRANSCRIPTS"
options[:transcripts] = "#{dir}/#{fields[1]}"
when "JUNCTIONS_CROSSED"
options[:junctions_crossed] = "#{dir}/#{fields[1]}"
end
end
$logger.debug(options)
end
def monitor_jobs(jobs)
while jobs.length > 0
jobs.each_with_index do |job,i|
job.update_status()
case job.status
when "DONE"
$logger.info("SUCCESS #{job}")
jobs.delete_at(i)
when "EXIT"
$logger.error("FAILED #{job}")
jobs.delete_at(i)
end
end
sleep(5)
end
#TODO
end
def submit(cmd, options)
if options[:debug]
$logger.debug("In submit: #{cmd}")
return 1234
else
begin
l = `#{cmd}`
rescue Exception => e
$logger.error(e)
$logger.error("bsub not found!#{cmd}")
return 1
end
num = l.split(/\W/)[2].to_i
end
num
end
def run_tophat2(options, source_of_tree, dataset)
cmd = "find #{source_of_tree}/tool_results/tophat2/alignment -name \"*#{options[:species]}*#{dataset}*\""
$logger.debug(cmd)
l = `#{cmd}`
l = l.split("\n")
raise "Trouble finding #{dataset}: #{l}" if l.length != 1
l = l[0]
erubis = Erubis::Eruby.new(File.read("#{options[:aligner_benchmark]}/templates/tophat2.sh"))
Dir.glob("#{l}/*").each do |p|
next unless File.directory? p
next unless File.exists?("#{p}/unmapped.bam")
next unless File.exists?("#{p}/accepted_hits.bam")
$logger.debug(p)
options[:stats_path] = "#{options[:out_directory]}/tophat2/#{p.split("/")[-1]}"
begin
Dir.mkdir(options[:stats_path])
rescue SystemCallError
if Dir.exists?(options[:stats_path])
logger.warn("Directory #{options[:stats_path]} exists!")
else
logger.error("Can't create directory #{options[:stats_path]}!")
raise("Trouble creating directory, log for detials.")
end
end
next if check_if_results_exist(options[:stats_path])
options[:tool_result_path] = p
o = File.open("#{options[:jobs_path]}/tophat2_statistics_#{options[:species]}_#{dataset}_#{p.split("/")[-1]}.sh","w")
o.puts(erubis.evaluate(options))
o.close()
Dir.chdir "#{options[:jobs_path]}"
$logger.debug(Dir.pwd)
cmd = "bsub < #{options[:jobs_path]}/tophat2_statistics_#{options[:species]}_#{dataset}_#{p.split("/")[-1]}.sh"
jobnumber = submit(cmd,options)
options[:jobs] << Job.new(jobnumber, cmd, "PEND",Dir.pwd)
end
$logger.debug(options[:jobs])
end
def run(argv)
options = setup_options(argv)
dataset = argv[0]
source_of_tree = argv[1]
options[:aligner_benchmark] = File.expand_path(File.dirname(__FILE__))
# Results go to
options[:out_directory] = "#{source_of_tree}/statistics/#{options[:species]}_#{dataset}"
begin
Dir.mkdir(options[:out_directory])
rescue SystemCallError
if Dir.exists?(options[:out_directory])
logger.warn("Directory #{options[:out_directory]} exists!")
else
logger.error("Can't create directory #{options[:out_directory]}!")
raise("Trouble creating directory, log for detials.")
end
end
#setup_logger(options[:log_level])
$logger.info("Hallo")
$logger.debug("DEBUG")
$logger.debug(options)
$logger.debug(argv)
if options[:algorithm] == "all"
algorithms = $algorithms
else
algorithms = [options[:algorithm]]
end
get_truth_files(options, source_of_tree, dataset)
$logger.debug("Algorithms = #{algorithms}")
options[:jobs] = []
algorithms.each do |alg|
options[:jobs_path] = "#{source_of_tree}/jobs/#{alg}"
begin
Dir.mkdir("#{options[:out_directory]}/#{alg}")
rescue SystemCallError
if Dir.exists?("#{options[:out_directory]}/#{alg}")
logger.warn("Directory #{options[:out_directory]}/#{alg} exists!")
else
logger.error("Can't create directory #{options[:out_directory]}/#{alg}!")
raise("Trouble creating directory, log for detials.")
end
end
case alg
when :tophat2
run_tophat2(options, source_of_tree, dataset)
when :star
puts "LALAA"
end
end
monitor_jobs(options[:jobs])
#puts options[:cut_off]
$logger.info("All done!")
end
if __FILE__ == $0
run(ARGV)
end
hitting the pause button
require 'logger'
require './logging'
include Logging
require 'optparse'
require "erubis"
#####
#
# Runs the statistics for a given dataset
# IN: dataset_name source_of_tree
# out:
# 1) Sorted and appropriate sam files
# 2) runs compare2truth
# 3) runs compare junctions
#
####
# 2015/8/10 Katharina Hayer
$logger = Logger.new(STDERR)
$algorithms = [:contextmap2,
:crac, :gsnap, :hisat, :mapsplice2, :olego, :rum,
:star, :subjunc, :tophat2]
# Initialize logger
def setup_logger(loglevel)
case loglevel
when "debug"
$logger.level = Logger::DEBUG
when "warn"
$logger.level = Logger::WARN
when "info"
$logger.level = Logger::INFO
else
$logger.level = Logger::ERROR
end
end
def setup_options(args)
options = {:out_file => "overview_table.xls", :loglevel => "error",
:algorithm => "all", :transcripts => nil, :junctions_crossed => nil,
:cig_file => nil, :stats_path => nil, :tool_result_path => nil,
:aligner_benchmark => nil, :samtools => "samtools", :jobs_path => nil,
:species => "human", :debug => false
}
opt_parser = OptionParser.new do |opts|
opts.banner = "\nUsage: ruby master.rb [options] dataset source_of_tree"
opts.separator ""
opts.separator "e.g. dataset = t3r1"
opts.separator "e.g. source_of_tree = /project/itmatlab/aligner_benchmark"
opts.separator ""
# enumeration
opts.on('-a', '--algorithm ENUM', [:all,:contextmap2,
:crac, :gsnap, :hisat, :mapsplice2, :olego, :rum,
:star, :subjunc, :tophat2],'Choose from below:','all: DEFAULT',
'contextmap2','crac','gsnap','hisat', 'mapsplice2',
'olego','rum','star','subjunc','tophat2') do |v|
options[:algorithm] = v
end
opts.on("-d", "--debug", "Run in debug mode") do |v|
options[:log_level] = "debug"
options[:debug] = true
end
opts.on("-o", "--out_file [OUT_FILE]",
:REQUIRED,String,
"File for the output, Default: overview_table.xls") do |anno_file|
options[:out_file] = anno_file
end
opts.on("-s", "--species [String]",
:REQUIRED,String,
"Spiecies, Default: human") do |s|
options[:species] = s
end
opts.on("-v", "--verbose", "Run verbosely") do |v|
options[:log_level] = "info"
end
opts.separator ""
end
args = ["-h"] if args.length == 0
opt_parser.parse!(args)
setup_logger(options[:log_level])
raise "Please specify the input files" if args.length == 0
options
end
class Job
def initialize(jobnumber, cmd, status, working_dir)
@jobnumber = jobnumber
@cmd = cmd
@status = status
@working_dir = working_dir
end
attr_accessor :jobnumber, :cmd, :status
def to_s
"Jobnumber #{@jobnumber}; Cmd: #{@cmd}; Status: #{@status}; WD: #{@working_dir}"
end
def update_status
begin
l = `bjobs -l #{@jobnumber}`
rescue Exception => e
$logger.error(e)
$logger.error("bjobs not found!\n#{self}")
@status = "EXIT"
return
end
# if @status == "EXIT"
l.chomp!
if l == ""
$logger.error("Jobnumber #{@jobnumber} not found! #{self}")
@status = "EXIT"
else
l = l.split("\n")[1]
@status = l.split("Status ")[1].split(",")[0].gsub(/\W/,"")
end
end
end
def check_if_results_exist(stats_path)
File.exists?("#{stats_path}/comp_res.txt") && File.exists?("#{stats_path}/junctions_stats.txt")
end
def get_truth_files(options, source_of_tree, dataset)
cmd = "find #{source_of_tree}/jobs/settings/ -name \"*#{options[:species]}*#{dataset}*\""
$logger.debug(cmd)
l = `#{cmd}`
l = l.split("\n")
raise "Trouble finding #{dataset}: #{l}" if l.length != 1
l = l[0]
dir = nil
File.open(l).each do |line|
line.chomp!.delete!("\"")
fields = line.split("=")
case fields[0]
when "READS_PATH"
dir = fields[1]
when "CIG_FILE"
options[:cig_file] = "#{dir}/#{fields[1]}"
when "TRANSCRIPTS"
options[:transcripts] = "#{dir}/#{fields[1]}"
when "JUNCTIONS_CROSSED"
options[:junctions_crossed] = "#{dir}/#{fields[1]}"
end
end
$logger.debug(options)
end
def monitor_jobs(jobs)
while jobs.length > 0
jobs.each_with_index do |job,i|
job.update_status()
case job.status
when "DONE"
$logger.info("SUCCESS #{job}")
jobs.delete_at(i)
when "EXIT"
$logger.error("FAILED #{job}")
jobs.delete_at(i)
end
end
sleep(5)
end
#TODO
end
def submit(cmd, options)
if options[:debug]
$logger.debug("In submit: #{cmd}")
return 1234
else
begin
l = `#{cmd}`
rescue Exception => e
$logger.error(e)
$logger.error("bsub not found!#{cmd}")
return 1
end
num = l.split(/\W/)[2].to_i
end
num
end
def run_tophat2(options, source_of_tree, dataset)
cmd = "find #{source_of_tree}/tool_results/tophat2/alignment -name \"*#{options[:species]}*#{dataset}*\""
$logger.debug(cmd)
l = `#{cmd}`
l = l.split("\n")
raise "Trouble finding #{dataset}: #{l}" if l.length != 1
l = l[0]
erubis = Erubis::Eruby.new(File.read("#{options[:aligner_benchmark]}/templates/tophat2.sh"))
Dir.glob("#{l}/*").each do |p|
next unless File.directory? p
next unless File.exists?("#{p}/unmapped.bam")
next unless File.exists?("#{p}/accepted_hits.bam")
$logger.debug(p)
options[:stats_path] = "#{options[:out_directory]}/tophat2/#{p.split("/")[-1]}"
begin
Dir.mkdir(options[:stats_path])
rescue SystemCallError
if Dir.exists?(options[:stats_path])
logger.warn("Directory #{options[:stats_path]} exists!")
else
logger.error("Can't create directory #{options[:stats_path]}!")
raise("Trouble creating directory, log for detials.")
end
end
next if check_if_results_exist(options[:stats_path])
options[:tool_result_path] = p
shell_file = "#{options[:jobs_path]}/tophat2_statistics_#{options[:species]}_#{dataset}_#{p.split("/")[-1]}.sh".gsub(/\W/,"")
o = File.open(shell_file,"w")
o.puts(erubis.evaluate(options))
o.close()
Dir.chdir "#{options[:jobs_path]}"
$logger.debug(Dir.pwd)
cmd = "bsub < #{shell_file}"
jobnumber = submit(cmd,options)
options[:jobs] << Job.new(jobnumber, cmd, "PEND",Dir.pwd)
end
$logger.debug(options[:jobs])
end
def run(argv)
options = setup_options(argv)
dataset = argv[0]
source_of_tree = argv[1]
options[:aligner_benchmark] = File.expand_path(File.dirname(__FILE__))
# Results go to
options[:out_directory] = "#{source_of_tree}/statistics/#{options[:species]}_#{dataset}"
begin
Dir.mkdir(options[:out_directory])
rescue SystemCallError
if Dir.exists?(options[:out_directory])
logger.warn("Directory #{options[:out_directory]} exists!")
else
logger.error("Can't create directory #{options[:out_directory]}!")
raise("Trouble creating directory, log for detials.")
end
end
#setup_logger(options[:log_level])
$logger.info("Hallo")
$logger.debug("DEBUG")
$logger.debug(options)
$logger.debug(argv)
if options[:algorithm] == "all"
algorithms = $algorithms
else
algorithms = [options[:algorithm]]
end
get_truth_files(options, source_of_tree, dataset)
$logger.debug("Algorithms = #{algorithms}")
options[:jobs] = []
algorithms.each do |alg|
options[:jobs_path] = "#{source_of_tree}/jobs/#{alg}"
begin
Dir.mkdir("#{options[:out_directory]}/#{alg}")
rescue SystemCallError
if Dir.exists?("#{options[:out_directory]}/#{alg}")
logger.warn("Directory #{options[:out_directory]}/#{alg} exists!")
else
logger.error("Can't create directory #{options[:out_directory]}/#{alg}!")
raise("Trouble creating directory, log for detials.")
end
end
case alg
when :tophat2
run_tophat2(options, source_of_tree, dataset)
when :star
puts "LALAA"
end
end
monitor_jobs(options[:jobs])
#puts options[:cut_off]
$logger.info("All done!")
end
if __FILE__ == $0
run(ARGV)
end
|
Rails.application.configure do
# Settings specified here will take precedence over those in config/application.rb.
# Operate in the subdirectory sparse.tamu.edu/matrix
config.action_controller.relative_url_root = '/matrix'
config.assets.prefix = '/matrix/assets'
# Code is not reloaded between requests.
config.cache_classes = true
# Eager load code on boot. This eager loads most of Rails and
# your application in memory, allowing both threaded web servers
# and those relying on copy on write to perform better.
# Rake tasks automatically ignore this option for performance.
config.eager_load = true
# Full error reports are disabled and caching is turned on.
config.consider_all_requests_local = false
config.action_controller.perform_caching = true
# Disable serving static files from the `/public` folder by default since
# Apache or NGINX already handles this.
config.public_file_server.enabled = ENV['RAILS_SERVE_STATIC_FILES'].present?
# Compress JavaScripts and CSS.
config.assets.js_compressor = :uglifier
# config.assets.css_compressor = :sass
# Do not fallback to assets pipeline if a precompiled asset is missed.
config.assets.compile = false
# `config.assets.precompile` and `config.assets.version` have moved to config/initializers/assets.rb
# Enable serving of images, stylesheets, and JavaScripts from an asset server.
# config.action_controller.asset_host = 'http://assets.example.com'
# Specifies the header that your server uses for sending files.
# config.action_dispatch.x_sendfile_header = 'X-Sendfile' # for Apache
# config.action_dispatch.x_sendfile_header = 'X-Accel-Redirect' # for NGINX
# Mount Action Cable outside main process or domain
# config.action_cable.mount_path = nil
# config.action_cable.url = 'wss://example.com/cable'
# config.action_cable.allowed_request_origins = [ 'http://example.com', /http:\/\/example.*/ ]
# Force all access to the app over SSL, use Strict-Transport-Security, and use secure cookies.
# config.force_ssl = true
# Use the lowest log level to ensure availability of diagnostic information
# when problems arise.
config.log_level = :debug
# Prepend all log lines with the following tags.
config.log_tags = [ :request_id ]
# Use a different cache store in production.
# config.cache_store = :mem_cache_store
# Use a real queuing backend for Active Job (and separate queues per environment)
# config.active_job.queue_adapter = :resque
# config.active_job.queue_name_prefix = "suite_sparse_matrix_collection_website_#{Rails.env}"
config.action_mailer.perform_caching = false
# Ignore bad email addresses and do not raise email delivery errors.
# Set this to true and configure the email server for immediate delivery to raise delivery errors.
# config.action_mailer.raise_delivery_errors = false
# Enable locale fallbacks for I18n (makes lookups for any locale fall back to
# the I18n.default_locale when a translation cannot be found).
config.i18n.fallbacks = true
# Send deprecation notices to registered listeners.
config.active_support.deprecation = :notify
# Use default logging formatter so that PID and timestamp are not suppressed.
config.log_formatter = ::Logger::Formatter.new
# Use a different logger for distributed setups.
# require 'syslog/logger'
# config.logger = ActiveSupport::TaggedLogging.new(Syslog::Logger.new 'app-name')
if ENV["RAILS_LOG_TO_STDOUT"].present?
logger = ActiveSupport::Logger.new(STDOUT)
logger.formatter = config.log_formatter
config.logger = ActiveSupport::TaggedLogging.new(logger)
end
# Do not dump schema after migrations.
config.active_record.dump_schema_after_migration = false
# Configure Mailer
config.action_mailer.delivery_method = :mailgun
config.action_mailer.mailgun_settings = {
api_key: ENV['MAILGUN_API_KEY'],
domain: ENV['MAILGUN_DOMAIN']
}
end
FIx subdirectory configuration for production
Rails.application.configure do
# Settings specified here will take precedence over those in config/application.rb.
# Operate in the subdirectory sparse.tamu.edu/matrix
config.action_controller.relative_url_root = '/matrix'
config.relative_url_root = '/matrix'
config.assets.prefix = '/matrix/assets'
# Code is not reloaded between requests.
config.cache_classes = true
# Eager load code on boot. This eager loads most of Rails and
# your application in memory, allowing both threaded web servers
# and those relying on copy on write to perform better.
# Rake tasks automatically ignore this option for performance.
config.eager_load = true
# Full error reports are disabled and caching is turned on.
config.consider_all_requests_local = false
config.action_controller.perform_caching = true
# Disable serving static files from the `/public` folder by default since
# Apache or NGINX already handles this.
config.public_file_server.enabled = ENV['RAILS_SERVE_STATIC_FILES'].present?
# Compress JavaScripts and CSS.
config.assets.js_compressor = :uglifier
# config.assets.css_compressor = :sass
# Do not fallback to assets pipeline if a precompiled asset is missed.
config.assets.compile = false
# `config.assets.precompile` and `config.assets.version` have moved to config/initializers/assets.rb
# Enable serving of images, stylesheets, and JavaScripts from an asset server.
# config.action_controller.asset_host = 'http://assets.example.com'
# Specifies the header that your server uses for sending files.
# config.action_dispatch.x_sendfile_header = 'X-Sendfile' # for Apache
# config.action_dispatch.x_sendfile_header = 'X-Accel-Redirect' # for NGINX
# Mount Action Cable outside main process or domain
# config.action_cable.mount_path = nil
# config.action_cable.url = 'wss://example.com/cable'
# config.action_cable.allowed_request_origins = [ 'http://example.com', /http:\/\/example.*/ ]
# Force all access to the app over SSL, use Strict-Transport-Security, and use secure cookies.
# config.force_ssl = true
# Use the lowest log level to ensure availability of diagnostic information
# when problems arise.
config.log_level = :debug
# Prepend all log lines with the following tags.
config.log_tags = [ :request_id ]
# Use a different cache store in production.
# config.cache_store = :mem_cache_store
# Use a real queuing backend for Active Job (and separate queues per environment)
# config.active_job.queue_adapter = :resque
# config.active_job.queue_name_prefix = "suite_sparse_matrix_collection_website_#{Rails.env}"
config.action_mailer.perform_caching = false
# Ignore bad email addresses and do not raise email delivery errors.
# Set this to true and configure the email server for immediate delivery to raise delivery errors.
# config.action_mailer.raise_delivery_errors = false
# Enable locale fallbacks for I18n (makes lookups for any locale fall back to
# the I18n.default_locale when a translation cannot be found).
config.i18n.fallbacks = true
# Send deprecation notices to registered listeners.
config.active_support.deprecation = :notify
# Use default logging formatter so that PID and timestamp are not suppressed.
config.log_formatter = ::Logger::Formatter.new
# Use a different logger for distributed setups.
# require 'syslog/logger'
# config.logger = ActiveSupport::TaggedLogging.new(Syslog::Logger.new 'app-name')
if ENV["RAILS_LOG_TO_STDOUT"].present?
logger = ActiveSupport::Logger.new(STDOUT)
logger.formatter = config.log_formatter
config.logger = ActiveSupport::TaggedLogging.new(logger)
end
# Do not dump schema after migrations.
config.active_record.dump_schema_after_migration = false
# Configure Mailer
config.action_mailer.delivery_method = :mailgun
config.action_mailer.mailgun_settings = {
api_key: ENV['MAILGUN_API_KEY'],
domain: ENV['MAILGUN_DOMAIN']
}
end
|
# typed: false
# frozen_string_literal: true
require "cli/parser"
require "formula"
module Homebrew
extend T::Sig
module_function
sig { returns(CLI::Parser) }
def cat_args
Homebrew::CLI::Parser.new do
description <<~EOS
Display the source of a <formula> or <cask>.
EOS
switch "--formula", "--formulae",
description: "Treat all named arguments as formulae."
switch "--cask", "--casks",
description: "Treat all named arguments as casks."
conflicts "--formula", "--cask"
named_args [:formula, :cask], number: 1
end
end
def cat
args = cat_args.parse
cd HOMEBREW_REPOSITORY
pager = if Homebrew::EnvConfig.bat?
unless Formula["bat"].any_version_installed?
# The user might want to capture the output of `brew cat ...`
# Redirect stdout to stderr
redirect_stdout($stderr) do
ohai "Installing `bat` for displaying <formula>/<cask> source..."
safe_system HOMEBREW_BREW_FILE, "install", "bat"
end
end
ENV["BAT_CONFIG_PATH"] = Homebrew::EnvConfig.bat_config_path
Formula["bat"].opt_bin/"bat"
else
"cat"
end
safe_system pager, args.named.to_paths.first
end
end
dev-cmd/cat: require "formula" only when necessary
# typed: false
# frozen_string_literal: true
require "cli/parser"
module Homebrew
extend T::Sig
module_function
sig { returns(CLI::Parser) }
def cat_args
Homebrew::CLI::Parser.new do
description <<~EOS
Display the source of a <formula> or <cask>.
EOS
switch "--formula", "--formulae",
description: "Treat all named arguments as formulae."
switch "--cask", "--casks",
description: "Treat all named arguments as casks."
conflicts "--formula", "--cask"
named_args [:formula, :cask], number: 1
end
end
def cat
args = cat_args.parse
cd HOMEBREW_REPOSITORY
pager = if Homebrew::EnvConfig.bat?
require "formula"
unless Formula["bat"].any_version_installed?
# The user might want to capture the output of `brew cat ...`
# Redirect stdout to stderr
redirect_stdout($stderr) do
ohai "Installing `bat` for displaying <formula>/<cask> source..."
safe_system HOMEBREW_BREW_FILE, "install", "bat"
end
end
ENV["BAT_CONFIG_PATH"] = Homebrew::EnvConfig.bat_config_path
Formula["bat"].opt_bin/"bat"
else
"cat"
end
safe_system pager, args.named.to_paths.first
end
end
|
# frozen_string_literal: true
class SessionsController < ApplicationController
def new
end
def create
session[:user_id] = Thredded.user_class.find_or_initialize_by(name: params[:name])
.tap { |user| user.update!(admin: params[:admin]) }.id
if request.referer != new_user_session_url
redirect_back fallback_location: root_path
else
redirect_to root_path
end
end
def destroy
session[:user_id] = nil
redirect_to root_path
end
end
Dummy app: Redirect back with 307 on sign in
Previously, if an anonymous user clicked on Follow and the signed in,
they would be redirected to a `topic/follow` follow page with GET
and would hit a 404.
The 307 status code preserves the original request method when
redirecting and thus makes this work.
# frozen_string_literal: true
class SessionsController < ApplicationController
def new
end
def create
session[:user_id] = Thredded.user_class.find_or_initialize_by(name: params[:name])
.tap { |user| user.update!(admin: params[:admin]) }.id
if request.referer != new_user_session_url
redirect_back fallback_location: root_path, status: 307
else
redirect_to root_path
end
end
def destroy
session[:user_id] = nil
redirect_to root_path
end
end
|
require File.expand_path(File.dirname(__FILE__) + '/spec_helper')
class PyBossa::API
# We don't want to test the PyBossa API. We just want to check that the
# wrapper works.
describe PyBossa::API do
EXAMPLE_SHORT_NAME = PyBossa::API.many('app').first['short_name']
describe '#many' do
it 'should return a non-empty array of hashes' do
response = PyBossa::API.many 'app'
response.should be_an(Array)
response.should have_at_least(1).item
response.each{|x| x.should be_a(Hash)}
end
it 'should respect the :limit argument' do
PyBossa::API.many('app', limit: 1).should have(1).item
end
it 'should respect a field argument' do
PyBossa::API.many('app', short_name: EXAMPLE_SHORT_NAME).find{|result|
result['short_name'] == EXAMPLE_SHORT_NAME
}.should_not be_nil
end
end
describe '#one' do
it 'should return a hash' do
PyBossa::API.one('app', PyBossa::API.many('app').first['id']).should be_a(Hash)
end
end
describe '#create' do
pending "Must use API key to test this method"
end
describe '#update' do
pending "Must use API key to test this method"
end
describe '#delete' do
pending "Must use API key to test this method"
end
end
end
update to new method name in spec
require File.expand_path(File.dirname(__FILE__) + '/spec_helper')
class PyBossa::API
# We don't want to test the PyBossa API. We just want to check that the
# wrapper works.
describe PyBossa::API do
EXAMPLE_SHORT_NAME = PyBossa::API.many('app').first['short_name']
describe '#many' do
it 'should return a non-empty array of hashes' do
response = PyBossa::API.many 'app'
response.should be_an(Array)
response.should have_at_least(1).item
response.each{|x| x.should be_a(Hash)}
end
it 'should respect the :limit argument' do
PyBossa::API.many('app', limit: 1).should have(1).item
end
it 'should respect a field argument' do
PyBossa::API.many('app', short_name: EXAMPLE_SHORT_NAME).find{|result|
result['short_name'] == EXAMPLE_SHORT_NAME
}.should_not be_nil
end
end
describe '#one' do
it 'should return a hash' do
PyBossa::API.retrieve('app', PyBossa::API.many('app').first['id']).should be_a(Hash)
end
end
describe '#create' do
pending "Must use API key to test this method"
end
describe '#update' do
pending "Must use API key to test this method"
end
describe '#delete' do
pending "Must use API key to test this method"
end
end
end
|
#!/usr/bin/env ruby
## grab metrics from AWS cloudwatch
### David Lutz
### 2012-07-15
### gem install fog --no-ri --no-rdoc
$:.unshift File.join(File.dirname(__FILE__), *%w[.. conf])
$:.unshift File.join(File.dirname(__FILE__), *%w[.. lib])
require 'config'
require 'Sendit'
require 'rubygems' if RUBY_VERSION < "1.9"
require 'fog'
require 'json'
require 'optparse'
options = {
:start_offset => 180,
:end_offset => 120
}
optparse = OptionParser.new do|opts|
opts.banner = "Usage: AWScloudwatchELB.rb [options] lb_names"
opts.on( '-s', '--start-offset [OFFSET_SECONDS]', 'Time in seconds to offset from current time as the start of the metrics period. Default 180') do |s|
options[:start_offset] = s
end
opts.on( '-e', '--end-offset [OFFSET_SECONDS]', 'Time in seconds to offset from current time as the start of the metrics period. Default 120') do |e|
options[:end_offset] = e
end
# This displays the help screen, all programs are
# assumed to have this option.
opts.on( '-h', '--help', '' ) do
puts opts
exit
end
end
optparse.parse!
if ARGV.length == 0
puts "Must specifiy at least one load balancer name to pull metrics for"
exit 1
end
lbs = []
ARGV.each do |lb|
lbs << lb
end
startTime = Time.now.utc - options[:start_offset].to_i
endTime = Time.now.utc - options[:end_offset].to_i
metricNames = {"RequestCount" => "Sum",
"HealthyHostCount" => "Minimum",
"UnHealthyHostCount" => "Maximum",
"HTTPCode_ELB_5XX" => "Sum",
"HTTPCode_ELB_4XX" => "Sum",
"HTTPCode_Backend_2XX" => "Sum",
"HTTPCode_Backend_3XX" => "Sum",
"HTTPCode_Backend_4XX" => "Sum",
"HTTPCode_Backend_5XX" => "Sum"
}
unit = 'Count'
cloudwatch = Fog::AWS::CloudWatch.new(:aws_secret_access_key => $awssecretkey, :aws_access_key_id => $awsaccesskey, :region => $awsregion)
lbs.each do |table|
metricNames.each do |metricName, statistic|
responses = cloudwatch.get_metric_statistics({
'Statistics' => statistic,
'StartTime' => startTime.iso8601,
'EndTime' => endTime.iso8601,
'Period' => 60,
'Unit' => unit,
'MetricName' => metricName,
'Namespace' => 'AWS/ELB',
'Dimensions' => [{
'Name' => 'LoadBalancerName',
'Value' => table
}]
}).body['GetMetricStatisticsResult']['Datapoints']
responses.each do |response|
metricpath = "AWScloudwatch.ELB." + table + "." + metricName
begin
metricvalue = response[statistic]
metrictimestamp = response["Timestamp"].to_i.to_s
rescue
metricvalue = 0
metrictimestamp = endTime.to_i.to_s
end
Sendit metricpath, metricvalue, metrictimestamp
end
end
end
#### also get latency (measured in seconds)
metricNames = {"Maximum" => "Latency",
"Average" => "Latency",
}
unit = 'Seconds'
cloudwatch = Fog::AWS::CloudWatch.new(:aws_secret_access_key => $awssecretkey, :aws_access_key_id => $awsaccesskey)
lbs.each do |table|
metricNames.each do |statistic, metricName|
responses = cloudwatch.get_metric_statistics({
'Statistics' => statistic,
'StartTime' => startTime.iso8601,
'EndTime' => endTime.iso8601,
'Period' => 60,
'Unit' => unit,
'MetricName' => metricName,
'Namespace' => 'AWS/ELB',
'Dimensions' => [{
'Name' => 'LoadBalancerName',
'Value' => table
}]
}).body['GetMetricStatisticsResult']['Datapoints']
metricpath = "AWScloudwatch.ELB." + table + "." + metricName + "_" + statistic
responses.each do |response|
begin
metricvalue = response[statistic]
metrictimestamp = response["Timestamp"].to_i.to_s
rescue
metricvalue = 0
metrictimestamp = endTime.to_i.to_s
end
Sendit metricpath, metricvalue, metrictimestamp
end
end
end
AWS ELB refactor
Now threaded, can pass "--all" rather than list out explicit ELBs,
and use the timer/retry library
#!/usr/bin/env ruby
## grab metrics from AWS cloudwatch
### David Lutz
### 2012-07-15
### gem install fog --no-ri --no-rdoc
$:.unshift File.join(File.dirname(__FILE__), *%w[.. conf])
$:.unshift File.join(File.dirname(__FILE__), *%w[.. lib])
require 'config'
require 'Sendit'
require 'rubygems' if RUBY_VERSION < "1.9"
require 'fog'
require 'json'
require 'optparse'
require 'thread'
begin
require 'system_timer'
rescue LoadError
require 'timeout'
SomeTimer = Timeout
end
options = {
:start_offset => 180,
:end_offset => 120,
:threads => 1
}
optparse = OptionParser.new do|opts|
opts.banner = "Usage: AWScloudwatchELB.rb [options] [--all|lb_names]"
opts.on( '-a', '--all', 'Collect metrics for all ELBs') do
options[:all] = true
end
opts.on( '-s', '--start-offset [OFFSET_SECONDS]', 'Time in seconds to offset from current time as the start of the metrics period. Default 180') do |s|
options[:start_offset] = s
end
opts.on( '-e', '--end-offset [OFFSET_SECONDS]', 'Time in seconds to offset from current time as the start of the metrics period. Default 120') do |e|
options[:end_offset] = e
end
opts.on('-t', '--threads [NUMBER_OF_THREADS]', 'Number of threads to use for querying CloudWatch. Default 1') do |t|
options[:threads] = t
end
# This displays the help screen, all programs are
# assumed to have this option.
opts.on( '-h', '--help', '' ) do
puts opts
exit
end
end
optparse.parse!
elb = Fog::AWS::ELB.new(:aws_secret_access_key => $awssecretkey, :aws_access_key_id => $awsaccesskey, :region => $awsregion)
$cloudwatch = Fog::AWS::CloudWatch.new(:aws_secret_access_key => $awssecretkey, :aws_access_key_id => $awsaccesskey, :region => $awsregion)
lbs = []
if options[:all]
my_lb_list = elb.load_balancers.all
my_lb_list.each do |my_lb|
lbs << my_lb.id
end
elsif ARGV.length == 0
puts "Must specifiy at least one load balancer name to pull metrics for"
exit 1
else
ARGV.each do |lb|
lbs << lb
end
end
$startTime = Time.now.utc - options[:start_offset].to_i
$endTime = Time.now.utc - options[:end_offset].to_i
$metricNames = {"RequestCount" => ["Sum"],
"HealthyHostCount" => ["Minimum"],
"UnHealthyHostCount" => ["Maximum"],
"HTTPCode_ELB_5XX" => ["Sum"],
"HTTPCode_ELB_4XX" => ["Sum"],
"HTTPCode_Backend_2XX" => ["Sum"],
"HTTPCode_Backend_3XX" => ["Sum"],
"HTTPCode_Backend_4XX" => ["Sum"],
"HTTPCode_Backend_5XX" => ["Sum"],
"Latency" => ["Maximum", "Average"]
}
def fetch_and_send(lb)
retries = $cloudwatchretries
responses = ''
$metricNames.each do |metricName, statistics|
unit = metricName == "Latency" ? 'Seconds' : 'Count'
statistics.each do |statistic|
begin
SomeTimer.timeout($cloudwatchtimeout) do
responses = $cloudwatch.get_metric_statistics({
'Statistics' => statistic,
'StartTime' => $startTime.iso8601,
'EndTime' => $endTime.iso8601,
'Period' => 60,
'Unit' => unit,
'MetricName' => metricName,
'Namespace' => 'AWS/ELB',
'Dimensions' => [{
'Name' => 'LoadBalancerName',
'Value' => lb
}]
}).body['GetMetricStatisticsResult']['Datapoints']
end
rescue => e
puts "error fetching metric :: " + metricName + " :: " + lb
puts "\terror: #{e}"
retries -= 1
puts "\tretries left: #{retries}"
retry if retries > 0
end
responses.each do |response|
metricpath = "AWScloudwatch.ELB." + lb + "." + metricName
metricpath += "_#{statistic}" if metricName == 'Latency'
begin
metricvalue = response[statistic]
metrictimestamp = response["Timestamp"].to_i.to_s
rescue
metricvalue = 0
metrictimestamp = endTime.to_i.to_s
end
Sendit metricpath, metricvalue, metrictimestamp
end
end
end
end
work_q = Queue.new
lbs.each{|lb| work_q.push lb}
workers = (0...options[:threads].to_i).map do
Thread.new do
begin
while lb = work_q.pop(true)
fetch_and_send(lb)
end
rescue ThreadError
end
end
end; "ok"
workers.map(&:join); "ok"
|
require "spec_helper"
RSpec.feature "Questions workflow", type: :feature do
scenario "business questions" do
when_i_visit_the_checklist_flow
and_i_answer_business_questions
then_i_should_see_the_results_page
#and_i_should_see_a_pet_action
#and_i_should_see_a_tourism_action
end
scenario "citizen questions" do
when_i_visit_the_checklist_flow
and_i_do_not_answer_business_questions
and_i_answer_citizen_questions
then_i_should_see_the_results_page
#and_i_should_see_a_pet_action
#and_i_should_not_see_a_tourism_action
end
scenario "skip all questions" do
when_i_visit_the_checklist_flow
and_i_dont_answer_enough_questions
then_i_should_see_the_no_results_page
end
def when_i_visit_the_checklist_flow
visit checklist_questions_path
end
def and_i_do_not_answer_business_questions
answer_question("do-you-own-a-business", "No")
end
def and_i_dont_answer_enough_questions
answer_question("do-you-own-a-business")
answer_question("nationality")
answer_question("living", "Rest of world")
answer_question("employment")
answer_question("travelling")
end
def and_i_answer_business_questions
answer_question("do-you-own-a-business", "Yes")
answer_question("sector-business-area", "Tourism")
answer_question("business-activity")
answer_question("employ-eu-citizens", "No")
answer_question("personal-data", "No")
answer_question("intellectual-property", "No")
answer_question("eu-uk-government-funding", "No")
answer_question("public-sector-procurement", "No")
and_i_answer_citizen_questions
end
def and_i_answer_citizen_questions
answer_question("nationality", "UK")
answer_question("living", "Rest of world")
answer_question("employment")
answer_question("travelling", "Yes", "To the EU")
answer_question("activities", "Bring your pet")
answer_question("returning", "Yes")
end
def then_i_should_see_the_results_page
expect(page).to have_content I18n.t!("checklists_results.title")
end
def then_i_should_see_the_no_results_page
expect(page).to have_content I18n.t!("checklists_results.title_no_actions")
end
def and_i_should_see_a_pet_action
action_is_shown("S009")
end
def and_i_should_not_see_a_tourism_action
action_not_shown("T063")
end
def and_i_should_see_a_tourism_action
action_is_shown("T063")
end
def action_not_shown(key)
action = Checklists::Action.find_by_id(key)
expect(page).to_not have_link(action.title, href: action.title_url)
end
def action_is_shown(key)
action = Checklists::Action.find_by_id(key)
expect(page).to have_content action.title
expect(page).to have_content action.lead_time
expect(page).to have_content action.consequence
if action.guidance_link_text
expect(page).to have_link(action.guidance_link_text, href: action.guidance_url)
end
end
def answer_question(key, *options)
question = Checklists::Question.find_by_key(key)
expect(page).to have_content(question.text)
options.each { |o| find_field(o).click }
click_on "Next"
end
end
Reenable action checks for checklist feature
require "spec_helper"
RSpec.feature "Questions workflow", type: :feature do
scenario "business questions" do
when_i_visit_the_checklist_flow
and_i_answer_business_questions
then_i_should_see_the_results_page
and_i_should_see_a_pet_action
and_i_should_see_a_tourism_action
end
scenario "citizen questions" do
when_i_visit_the_checklist_flow
and_i_do_not_answer_business_questions
and_i_answer_citizen_questions
then_i_should_see_the_results_page
and_i_should_see_a_pet_action
and_i_should_not_see_a_tourism_action
end
scenario "skip all questions" do
when_i_visit_the_checklist_flow
and_i_dont_answer_enough_questions
then_i_should_see_the_no_results_page
end
def when_i_visit_the_checklist_flow
visit checklist_questions_path
end
def and_i_do_not_answer_business_questions
answer_question("do-you-own-a-business", "No")
end
def and_i_dont_answer_enough_questions
answer_question("do-you-own-a-business")
answer_question("nationality")
answer_question("living", "Rest of world")
answer_question("employment")
answer_question("travelling")
end
def and_i_answer_business_questions
answer_question("do-you-own-a-business", "Yes")
answer_question("sector-business-area", "Tourism")
answer_question("business-activity")
answer_question("employ-eu-citizens", "No")
answer_question("personal-data", "No")
answer_question("intellectual-property", "No")
answer_question("eu-uk-government-funding", "No")
answer_question("public-sector-procurement", "No")
and_i_answer_citizen_questions
end
def and_i_answer_citizen_questions
answer_question("nationality", "UK")
answer_question("living", "UK")
answer_question("employment")
answer_question("travelling", "Yes", "To the EU")
answer_question("activities", "Bring your pet")
end
def then_i_should_see_the_results_page
expect(page).to have_content I18n.t!("checklists_results.title")
end
def then_i_should_see_the_no_results_page
expect(page).to have_content I18n.t!("checklists_results.title_no_actions")
end
def and_i_should_see_a_pet_action
action_is_shown("S009")
end
def and_i_should_not_see_a_tourism_action
action_not_shown("T063")
end
def and_i_should_see_a_tourism_action
action_is_shown("T063")
end
def action_not_shown(key)
action = Checklists::Action.find_by_id(key)
expect(page).to_not have_link(action.title, href: action.title_url)
end
def action_is_shown(key)
action = Checklists::Action.find_by_id(key)
expect(page).to have_content action.title
expect(page).to have_content action.lead_time
expect(page).to have_content action.consequence
if action.guidance_link_text
expect(page).to have_link(action.guidance_link_text, href: action.guidance_url)
end
end
def answer_question(key, *options)
question = Checklists::Question.find_by_key(key)
expect(page).to have_content(question.text)
options.each { |o| find_field(o).click }
click_on "Next"
end
end
|
require "spec_helper"
describe 'React', js: true do
describe "is_valid_element?" do
it "should return true if passed a valid element" do
client_option deprecation_warnings: :off
expect_evaluate_ruby do
element = React::Element.new(JS.call(:eval, "React.createElement('div')"))
React.is_valid_element?(element)
end.to eq(true)
end
it "should return false is passed a non React element" do
client_option deprecation_warnings: :off
expect_evaluate_ruby do
element = React::Element.new(JS.call(:eval, "{}"))
React.is_valid_element?(element)
end.to eq(false)
end
end
describe "create_element" do
it "should create a valid element with only tag" do
client_option deprecation_warnings: :off
expect_evaluate_ruby do
element = React.create_element('div')
React.is_valid_element?(element)
end.to eq(true)
end
context "with block" do
it "should create a valid element with text as only child when block yield String" do
client_option deprecation_warnings: :off
evaluate_ruby do
ELEMENT = React.create_element('div') { "lorem ipsum" }
end
expect_evaluate_ruby("React.is_valid_element?(ELEMENT)").to eq(true)
expect_evaluate_ruby("ELEMENT.props.children").to eq("lorem ipsum")
end
it "should create a valid element with children as array when block yield Array of element" do
client_option deprecation_warnings: :off
evaluate_ruby do
ELEMENT = React.create_element('div') do
[React.create_element('span'), React.create_element('span'), React.create_element('span')]
end
end
expect_evaluate_ruby("React.is_valid_element?(ELEMENT)").to eq(true)
expect_evaluate_ruby("ELEMENT.props.children.length").to eq(3)
end
it "should render element with children as array when block yield Array of element" do
client_option deprecation_warnings: :off
expect_evaluate_ruby do
element = React.create_element('div') do
[React.create_element('span'), React.create_element('span'), React.create_element('span')]
end
dom_node = React::Test::Utils.render_into_document(element)
dom_node.JS[:children].JS[:length]
end.to eq(3)
end
end
describe "custom element" do
before :each do
on_client do
class Foo < React::Component::Base
def initialize(native)
@native = native
end
def render
React.create_element("div") { "lorem" }
end
def props
Hash.new(@native.JS[:props])
end
end
end
end
it "should render element with only one children correctly" do
client_option deprecation_warnings: :off
evaluate_ruby do
element = React.create_element(Foo) { React.create_element('span') }
INSTANCE = React::Test::Utils.render_into_document(element)
true
end
expect_evaluate_ruby("INSTANCE.props[:children].is_a?(Array)").to be_falsy
expect_evaluate_ruby("INSTANCE.props[:children][:type]").to eq("span")
end
it "should render element with more than one children correctly" do
client_option deprecation_warnings: :off
evaluate_ruby do
element = React.create_element(Foo) { [React.create_element('span'), React.create_element('span')] }
INSTANCE = React::Test::Utils.render_into_document(element)
true
end
expect_evaluate_ruby("INSTANCE.props[:children].is_a?(Array)").to be_truthy
expect_evaluate_ruby("INSTANCE.props[:children].length").to eq(2)
end
it "should create a valid element provided class defined `render`" do
client_option deprecation_warnings: :off
expect_evaluate_ruby do
element = React.create_element(Foo)
React.is_valid_element?(element)
end.to eq(true)
end
it "should allow creating with properties" do
client_option deprecation_warnings: :off
expect_evaluate_ruby do
element = React.create_element(Foo, foo: "bar")
element.props.foo
end.to eq("bar")
end
it "should raise error if provided class doesn't defined `render`" do
client_option deprecation_warnings: :off
expect_evaluate_ruby do
begin
React.create_element(Array)
rescue
'failed'
end
end.to eq('failed')
end
it "should use the same instance for the same ReactComponent" do
client_option deprecation_warnings: :off
mount 'Foo' do
Foo.class_eval do
attr_accessor :a
def initialize(n)
self.a = 10
end
def component_will_mount
self.a = 20
end
def render
React.create_element("div") { self.a.to_s }
end
end
end
expect(page.body[-60..-19]).to include("<div>20</div>")
end
it "should match the instance cycle to ReactComponent life cycle" do
client_option deprecation_warnings: :off
expect_evaluate_ruby do
Foo.class_eval do
def initialize(native)
@@count ||= 0
@@count += 1
end
def render
React.create_element("div")
end
def self.count
@@count
end
end
React::Test::Utils.render_component_into_document(Foo)
React::Test::Utils.render_component_into_document(Foo)
Foo.count
end.to eq(2)
end
end
describe "create element with properties" do
it "should enforce snake-cased property name" do
client_option deprecation_warnings: :off
expect_evaluate_ruby do
element = React.create_element("div", class_name: "foo")
element.props.className
end.to eq("foo")
end
it "should allow custom property" do
client_option deprecation_warnings: :off
expect_evaluate_ruby do
element = React.create_element("div", foo: "bar")
element.props.foo
end.to eq("bar")
end
it "should not camel-case custom property" do
client_option deprecation_warnings: :off
expect_evaluate_ruby do
element = React.create_element("div", foo_bar: "foo")
element.props.foo_bar
end.to eq("foo")
end
end
describe "class_name helpers (React.addons.classSet)" do
it "should not alter behavior when passing a string" do
client_option deprecation_warnings: :off
expect_evaluate_ruby do
element = React.create_element("div", class_name: "foo bar")
element.props.className
end.to eq("foo bar")
end
end
end
describe "render" do
it "should render element to DOM" do # was async, don know how to handle
client_option deprecation_warnings: :off
evaluate_ruby do
DIV = JS.call(:eval, 'document.createElement("div")')
React.render(React.create_element('span') { "lorem" }, DIV)
'' # make to_json happy
end
expect_evaluate_ruby("DIV.JS[:children].JS[0].JS[:tagName]").to eq("SPAN")
expect_evaluate_ruby("DIV.JS[:textContent]").to eq("lorem")
end
it "should work without providing a block" do
client_option deprecation_warnings: :off
expect_evaluate_ruby do
begin
div = JS.call(:eval, 'document.createElement("div")')
React::Test::Utils.render_into_document(React.create_element('span') { "lorem" }, div)
true
rescue
false
end
end.to be_truthy
expect(page.body[-60..-10]).to include('<div><span>lorem</span></div>')
end
it "returns the actual ruby instance" do
client_option deprecation_warnings: :off
expect_evaluate_ruby do
class Foo
def render
React.create_element("div") { "lorem" }
end
end
div = JS.call(:eval, 'document.createElement("div")')
instance = React.render(React.create_element(Foo), div)
instance.is_a?(Foo)
end.to be_truthy
end
it "returns the actual DOM node" do
client_option deprecation_warnings: :off
expect_evaluate_ruby do
div = JS.call(:eval, 'document.createElement("div")')
node = React.render(React.create_element('span') { "lorem" }, div)
node.JS['nodeType']
end.to eq(1)
end
end
describe "unmount_component_at_node" do
it "should unmount component at node" do
client_option deprecation_warnings: :off
# was run_async
# unmount was passed in a block run_async which passed in a block to React.render
# trying to emulate that failed, becasue during render, _getOpalInstance was not yet defined.
# it is defined only after render, when the component was mounted. So we call unmount after render
expect_evaluate_ruby do
div = JS.call(:eval, 'document.createElement("div")')
React.render(React.create_element('span') { "lorem" }, div )
React.unmount_component_at_node(div)
end.to eq(true)
end
end
end
fixes
require "spec_helper"
describe 'React', js: true do
describe "is_valid_element?" do
it "should return true if passed a valid element" do
expect_evaluate_ruby do
element = React::Element.new(JS.call(:eval, "React.createElement('div')"))
React.is_valid_element?(element)
end.to eq(true)
end
it "should return false is passed a non React element" do
expect_evaluate_ruby do
element = React::Element.new(JS.call(:eval, "{}"))
React.is_valid_element?(element)
end.to eq(false)
end
end
describe "create_element" do
it "should create a valid element with only tag" do
expect_evaluate_ruby do
element = React.create_element('div')
React.is_valid_element?(element)
end.to eq(true)
end
context "with block" do
it "should create a valid element with text as only child when block yield String" do
evaluate_ruby do
ELEMENT = React.create_element('div') { "lorem ipsum" }
end
expect_evaluate_ruby("React.is_valid_element?(ELEMENT)").to eq(true)
expect_evaluate_ruby("ELEMENT.props.children").to eq("lorem ipsum")
end
it "should create a valid element with children as array when block yield Array of element" do
evaluate_ruby do
ELEMENT = React.create_element('div') do
[React.create_element('span'), React.create_element('span'), React.create_element('span')]
end
end
expect_evaluate_ruby("React.is_valid_element?(ELEMENT)").to eq(true)
expect_evaluate_ruby("ELEMENT.props.children.length").to eq(3)
end
it "should render element with children as array when block yield Array of element" do
expect_evaluate_ruby do
element = React.create_element('div') do
[React.create_element('span'), React.create_element('span'), React.create_element('span')]
end
dom_node = React::Test::Utils.render_into_document(element)
dom_node.JS[:children].JS[:length]
end.to eq(3)
end
end
describe "custom element" do
before :each do
on_client do
class Foo < React::Component::Base
def initialize(native)
@native = native
end
def render
React.create_element("div") { "lorem" }
end
def props
Hash.new(@native.JS[:props])
end
end
end
end
it "should render element with only one children correctly" do
evaluate_ruby do
element = React.create_element(Foo) { React.create_element('span') }
INSTANCE = React::Test::Utils.render_into_document(element)
true
end
expect_evaluate_ruby("INSTANCE.props[:children].is_a?(Array)").to be_falsy
expect_evaluate_ruby("INSTANCE.props[:children][:type]").to eq("span")
end
it "should render element with more than one children correctly" do
evaluate_ruby do
element = React.create_element(Foo) { [React.create_element('span'), React.create_element('span')] }
INSTANCE = React::Test::Utils.render_into_document(element)
true
end
expect_evaluate_ruby("INSTANCE.props[:children].is_a?(Array)").to be_truthy
expect_evaluate_ruby("INSTANCE.props[:children].length").to eq(2)
end
it "should create a valid element provided class defined `render`" do
expect_evaluate_ruby do
element = React.create_element(Foo)
React.is_valid_element?(element)
end.to eq(true)
end
it "should allow creating with properties" do
expect_evaluate_ruby do
element = React.create_element(Foo, foo: "bar")
element.props.foo
end.to eq("bar")
end
it "should raise error if provided class doesn't defined `render`" do
expect_evaluate_ruby do
begin
React.create_element(Array)
rescue
'failed'
end
end.to eq('failed')
end
it "should use the same instance for the same ReactComponent" do
mount 'Foo' do
Foo.class_eval do
attr_accessor :a
def initialize(n)
self.a = 10
end
def component_will_mount
self.a = 20
end
def render
React.create_element("div") { self.a.to_s }
end
end
end
expect(page.body[-60..-19]).to include("<div>20</div>")
end
it "should match the instance cycle to ReactComponent life cycle" do
expect_evaluate_ruby do
Foo.class_eval do
def initialize(native)
@@count ||= 0
@@count += 1
end
def render
React.create_element("div")
end
def self.count
@@count
end
end
React::Test::Utils.render_component_into_document(Foo)
React::Test::Utils.render_component_into_document(Foo)
Foo.count
end.to eq(2)
end
end
describe "create element with properties" do
it "should enforce snake-cased property name" do
expect_evaluate_ruby do
element = React.create_element("div", class_name: "foo")
element.props.className
end.to eq("foo")
end
it "should allow custom property" do
expect_evaluate_ruby do
element = React.create_element("div", foo: "bar")
element.props.foo
end.to eq("bar")
end
it "should not camel-case custom property" do
expect_evaluate_ruby do
element = React.create_element("div", foo_bar: "foo")
element.props.foo_bar
end.to eq("foo")
end
end
describe "class_name helpers (React.addons.classSet)" do
it "should not alter behavior when passing a string" do
expect_evaluate_ruby do
element = React.create_element("div", class_name: "foo bar")
element.props.className
end.to eq("foo bar")
end
end
end
describe "render" do
it "should render element to DOM" do # was async, don know how to handle
evaluate_ruby do
DIV = JS.call(:eval, 'document.createElement("div")')
React.render(React.create_element('span') { "lorem" }, DIV)
'' # make to_json happy
end
expect_evaluate_ruby("DIV.JS[:children].JS[0].JS[:tagName]").to eq("SPAN")
expect_evaluate_ruby("DIV.JS[:textContent]").to eq("lorem")
end
it "should work without providing a block" do
expect_evaluate_ruby do
begin
div = JS.call(:eval, 'document.createElement("div")')
React::Test::Utils.render_into_document(React.create_element('span') { "lorem" }, div)
true
rescue
false
end
end.to be_truthy
expect(page.body[-60..-10]).to include('<div><span>lorem</span></div>')
end
it "returns the actual ruby instance" do
expect_evaluate_ruby do
class Foo
def render
React.create_element("div") { "lorem" }
end
end
div = JS.call(:eval, 'document.createElement("div")')
instance = React.render(React.create_element(Foo), div)
instance.is_a?(Foo)
end.to be_truthy
end
it "returns the actual DOM node" do
expect_evaluate_ruby do
div = JS.call(:eval, 'document.createElement("div")')
node = React.render(React.create_element('span') { "lorem" }, div)
node.JS['nodeType']
end.to eq(1)
end
end
describe "unmount_component_at_node" do
it "should unmount component at node" do
# was run_async
# unmount was passed in a block run_async which passed in a block to React.render
# trying to emulate that failed, becasue during render, _getOpalInstance was not yet defined.
# it is defined only after render, when the component was mounted. So we call unmount after render
expect_evaluate_ruby do
div = JS.call(:eval, 'document.createElement("div")')
React.render(React.create_element('span') { "lorem" }, div )
React.unmount_component_at_node(div)
end.to eq(true)
end
end
end
|
# encoding: utf-8
require 'date'
require 'erb'
module GHI
module Formatting
class << self
attr_accessor :paginate
end
self.paginate = true # Default.
attr_accessor :paging
autoload :Colors, 'ghi/formatting/colors'
include Colors
CURSOR = {
:up => lambda { |n| "\e[#{n}A" },
:column => lambda { |n| "\e[#{n}G" },
:hide => "\e[?25l",
:show => "\e[?25h"
}
THROBBERS = [
%w(⠋ ⠙ ⠹ ⠸ ⠼ ⠴ ⠦ ⠧ ⠇ ⠏),
%w(⠋ ⠙ ⠚ ⠞ ⠖ ⠦ ⠴ ⠲ ⠳ ⠓),
%w(⠄ ⠆ ⠇ ⠋ ⠙ ⠸ ⠰ ⠠ ⠰ ⠸ ⠙ ⠋ ⠇ ⠆ ),
%w(⠋ ⠙ ⠚ ⠒ ⠂ ⠂ ⠒ ⠲ ⠴ ⠦ ⠖ ⠒ ⠐ ⠐ ⠒ ⠓ ⠋),
%w(⠁ ⠉ ⠙ ⠚ ⠒ ⠂ ⠂ ⠒ ⠲ ⠴ ⠤ ⠄ ⠄ ⠤ ⠴ ⠲ ⠒ ⠂ ⠂ ⠒ ⠚ ⠙ ⠉ ⠁),
%w(⠈ ⠉ ⠋ ⠓ ⠒ ⠐ ⠐ ⠒ ⠖ ⠦ ⠤ ⠠ ⠠ ⠤ ⠦ ⠖ ⠒ ⠐ ⠐ ⠒ ⠓ ⠋ ⠉ ⠈),
%w(⠁ ⠁ ⠉ ⠙ ⠚ ⠒ ⠂ ⠂ ⠒ ⠲ ⠴ ⠤ ⠄ ⠄ ⠤ ⠠ ⠠ ⠤ ⠦ ⠖ ⠒ ⠐ ⠐ ⠒ ⠓ ⠋ ⠉ ⠈ ⠈ ⠉)
]
def puts *strings
strings = strings.flatten.map { |s|
s.gsub(/(^| *)@(\w+)/) {
if $2 == Authorization.username
bright { fg(:yellow) { "#$1@#$2" } }
else
bright { "#$1@#$2" }
end
}
}
super strings
end
def page header = nil, throttle = 0
if paginate?
pager = GHI.config('ghi.pager') || GHI.config('core.pager')
pager ||= ENV['PAGER']
pager ||= 'less'
pager += ' -EKRX -b1' if pager =~ /^less( -[EKRX]+)?$/
if pager && !pager.empty? && pager != 'cat'
$stdout = IO.popen pager, 'w'
end
puts header if header
self.paging = true
end
loop do
yield
sleep throttle
end
rescue Errno::EPIPE
exit
ensure
unless $stdout == STDOUT
$stdout.close_write
$stdout = STDOUT
print CURSOR[:show]
exit
end
end
def paginate?
($stdout.tty? && $stdout == STDOUT && Formatting.paginate) || paging?
end
def paging?
!!paging
end
def truncate string, reserved
return string unless paginate?
result = string.scan(/.{0,#{columns - reserved}}(?:\s|\Z)/).first.strip
result << "..." if result != string
result
end
def indent string, level = 4, maxwidth = columns
string = string.gsub(/\r/, '')
string.gsub!(/[\t ]+$/, '')
string.gsub!(/\n{3,}/, "\n\n")
width = maxwidth - level - 1
lines = string.scan(
/.{0,#{width}}(?:\s|\Z)|[\S]{#{width},}/ # TODO: Test long lines.
).map { |line| " " * level + line.chomp }
format_markdown lines.join("\n").rstrip, level
end
def columns
dimensions[1] || 80
end
def dimensions
`stty size`.chomp.split(' ').map { |n| n.to_i }
end
#--
# Specific formatters:
#++
def format_username username
username == Authorization.username ? 'you' : username
end
def format_issues_header
state = assigns[:state] || 'open'
header = "# #{repo || 'Global,'} #{state} issues"
if repo
if milestone = assigns[:milestone]
case milestone
when '*' then header << ' with a milestone'
when 'none' then header << ' without a milestone'
else
header.sub! repo, "#{repo} milestone ##{milestone}"
end
end
if assignee = assigns[:assignee]
header << case assignee
when '*' then ', assigned'
when 'none' then ', unassigned'
else
", assigned to #{format_username assignee}"
end
end
if mentioned = assigns[:mentioned]
header << ", mentioning #{format_username mentioned}"
end
else
header << case assigns[:filter]
when 'created' then ' you created'
when 'mentioned' then ' that mention you'
when 'subscribed' then " you're subscribed to"
when 'all' then ' that you can see'
else
' assigned to you'
end
end
if creator = assigns[:creator]
header << " #{format_username creator} created"
end
if labels = assigns[:labels]
header << ", labeled #{labels.gsub ',', ', '}"
end
if excluded_labels = assigns[:exclude_labels]
header << ", excluding those labeled #{excluded_labels.gsub ',', ', '}"
end
if sort = assigns[:sort]
header << ", by #{sort} #{reverse ? 'ascending' : 'descending'}"
end
format_state assigns[:state], header
end
# TODO: Show milestones.
def format_issues issues, include_repo
return 'None.' if issues.empty?
include_repo and issues.each do |i|
%r{/repos/[^/]+/([^/]+)} === i['url'] and i['repo'] = $1
end
nmax, rmax = %w(number repo).map { |f|
issues.sort_by { |i| i[f].to_s.size }.last[f].to_s.size
}
issues.map { |i|
n, title, labels = i['number'], i['title'], i['labels']
l = 9 + nmax + rmax + no_color { format_labels labels }.to_s.length
a = i['assignee'] && i['assignee']['login'] == Authorization.username
l += 2 if a
p = i['pull_request']['html_url'] and l += 2
c = i['comments']
l += c.to_s.length + 1 unless c == 0
[
" ",
(i['repo'].to_s.rjust(rmax) if i['repo']),
format_number(n.to_s.rjust(nmax)),
truncate(title, l),
format_labels(labels),
(fg('aaaaaa') { c } unless c == 0),
(fg('aaaaaa') { '↑' } if p),
(fg(:yellow) { '@' } if a)
].compact.join ' '
}
end
def format_number n
colorize? ? "#{bright { n }}:" : "#{n} "
end
# TODO: Show milestone, number of comments, pull request attached.
def format_issue i, width = columns
return unless i['created_at']
ERB.new(<<EOF).result binding
<% p = i['pull_request']['html_url'] %>\
<%= bright { no_color { indent '%s%s: %s' % [p ? '↑' : '#', \
*i.values_at('number', 'title')], 0, width } } %>
@<%= i['user']['login'] %> opened this <%= p ? 'pull request' : 'issue' %> \
<%= format_date DateTime.parse(i['created_at']) %>. \
<%= format_state i['state'], format_tag(i['state']), :bg %> \
<% unless i['comments'] == 0 %>\
<%= fg('aaaaaa'){
template = "%d comment"
template << "s" unless i['comments'] == 1
'(' << template % i['comments'] << ')'
} %>\
<% end %>\
<% if i['assignee'] || !i['labels'].empty? %>
<% if i['assignee'] %>@<%= i['assignee']['login'] %> is assigned. <% end %>\
<% unless i['labels'].empty? %><%= format_labels(i['labels']) %><% end %>\
<% end %>\
<% if i['milestone'] %>
Milestone #<%= i['milestone']['number'] %>: <%= i['milestone']['title'] %>\
<%= " \#{bright{fg(:yellow){'⚠'}}}" if past_due? i['milestone'] %>\
<% end %>
<% if i['body'] && !i['body'].empty? %>
<%= indent i['body'], 4, width %>
<% end %>
EOF
end
def format_comments comments
return 'None.' if comments.empty?
comments.map { |comment| format_comment comment }
end
def format_comment c, width = columns
<<EOF
@#{c['user']['login']} commented \
#{format_date DateTime.parse(c['created_at'])}:
#{indent c['body'], 4, width}
EOF
end
def format_milestones milestones
return 'None.' if milestones.empty?
max = milestones.sort_by { |m|
m['number'].to_s.size
}.last['number'].to_s.size
milestones.map { |m|
line = [" #{m['number'].to_s.rjust max }:"]
space = past_due?(m) ? 6 : 4
line << truncate(m['title'], max + space)
line << '⚠' if past_due? m
percent m, line.join(' ')
}
end
def format_milestone m, width = columns
ERB.new(<<EOF).result binding
<%= bright { no_color { \
indent '#%s: %s' % m.values_at('number', 'title'), 0, width } } %>
@<%= m['creator']['login'] %> created this milestone \
<%= format_date DateTime.parse(m['created_at']) %>. \
<%= format_state m['state'], format_tag(m['state']), :bg %>
<% if m['due_on'] %>\
<% due_on = DateTime.parse m['due_on'] %>\
<% if past_due? m %>\
<%= bright{fg(:yellow){"⚠"}} %> \
<%= bright{fg(:red){"Past due by \#{format_date due_on, false}."}} %>
<% else %>\
Due in <%= format_date due_on, false %>.
<% end %>\
<% end %>\
<%= percent m %>
<% if m['description'] && !m['description'].empty? %>
<%= indent m['description'], 4, width %>
<% end %>
EOF
end
def past_due? milestone
return false unless milestone['due_on']
DateTime.parse(milestone['due_on']) <= DateTime.now
end
def percent milestone, string = nil
open, closed = milestone.values_at('open_issues', 'closed_issues')
complete = closed.to_f / (open + closed)
complete = 0 if complete.nan?
i = (columns * complete).round
if string.nil?
string = ' %d%% (%d closed, %d open)' % [complete * 100, closed, open]
end
string = string.ljust columns
[bg('2cc200'){string[0, i]}, string[i, columns - i]].join
end
def format_state state, string = state, layer = :fg
send(layer, state == 'closed' ? 'ff0000' : '2cc200') { string }
end
def format_labels labels
return if labels.empty?
[*labels].map { |l| bg(l['color']) { format_tag l['name'] } }.join ' '
end
def format_tag tag
(colorize? ? ' %s ' : '[%s]') % tag
end
#--
# Helpers:
#++
#--
# TODO: DRY up editor formatters.
#++
def format_editor issue = nil
message = ERB.new(<<EOF).result binding
Please explain the issue. The first line will become the title. Trailing
lines starting with '#' (like these) will be ignored, and empty messages will
not be submitted. Issues are formatted with GitHub Flavored Markdown (GFM):
http://github.github.com/github-flavored-markdown
On <%= repo %>
<%= no_color { format_issue issue, columns - 2 if issue } %>
EOF
message.rstrip!
message.gsub!(/(?!\A)^.*$/) { |line| "# #{line}".rstrip }
message.insert 0, [
issue['title'] || issue[:title], issue['body'] || issue[:body]
].compact.join("\n\n") if issue
message
end
def format_milestone_editor milestone = nil
message = ERB.new(<<EOF).result binding
Describe the milestone. The first line will become the title. Trailing lines
starting with '#' (like these) will be ignored, and empty messages will not be
submitted. Milestones are formatted with GitHub Flavored Markdown (GFM):
http://github.github.com/github-flavored-markdown
On <%= repo %>
<%= no_color { format_milestone milestone, columns - 2 } if milestone %>
EOF
message.rstrip!
message.gsub!(/(?!\A)^.*$/) { |line| "# #{line}".rstrip }
message.insert 0, [
milestone['title'], milestone['description']
].join("\n\n") if milestone
message
end
def format_comment_editor issue, comment = nil
message = ERB.new(<<EOF).result binding
Leave a comment. The first line will become the title. Trailing lines starting
with '#' (like these) will be ignored, and empty messages will not be
submitted. Comments are formatted with GitHub Flavored Markdown (GFM):
http://github.github.com/github-flavored-markdown
On <%= repo %> issue #<%= issue['number'] %>
<%= no_color { format_issue issue } if verbose %>\
<%= no_color { format_comment comment, columns - 2 } if comment %>
EOF
message.rstrip!
message.gsub!(/(?!\A)^.*$/) { |line| "# #{line}".rstrip }
message.insert 0, comment['body'] if comment
message
end
def format_markdown string, indent = 4
c = '268bd2'
# Headers.
string.gsub!(/^( {#{indent}}\#{1,6} .+)$/, bright{'\1'})
string.gsub!(
/(^ {#{indent}}.+$\n^ {#{indent}}[-=]+$)/, bright{'\1'}
)
# Strong.
string.gsub!(
/(^|\s)(\*{2}\w(?:[^*]*\w)?\*{2})(\s|$)/m, '\1' + bright{'\2'} + '\3'
)
string.gsub!(
/(^|\s)(_{2}\w(?:[^_]*\w)?_{2})(\s|$)/m, '\1' + bright {'\2'} + '\3'
)
# Emphasis.
string.gsub!(
/(^|\s)(\*\w(?:[^*]*\w)?\*)(\s|$)/m, '\1' + underline{'\2'} + '\3'
)
string.gsub!(
/(^|\s)(_\w(?:[^_]*\w)?_)(\s|$)/m, '\1' + underline{'\2'} + '\3'
)
# Bullets/Blockquotes.
string.gsub!(/(^ {#{indent}}(?:[*>-]|\d+\.) )/, fg(c){'\1'})
# URIs.
string.gsub!(
%r{\b(<)?(https?://\S+|[^@\s]+@[^@\s]+)(>)?\b},
fg(c){'\1' + underline{'\2'} + '\3'}
)
# Inline code
string.gsub!(/`([^`].+?)`(?=[^`])/, inverse { '\1' })
# Code blocks
string.gsub!(/(?<indent>^\ {#{indent}})(```)\s*(?<lang>\w*$)(\n)(?<code>.+?)(\n)(^\ {#{indent}}```$)/m) do |m|
highlight(Regexp.last_match)
end
string
end
def format_date date, suffix = true
days = (interval = DateTime.now - date).to_i.abs
string = if days.zero?
seconds, _ = interval.divmod Rational(1, 86400)
hours, seconds = seconds.divmod 3600
minutes, seconds = seconds.divmod 60
if hours > 0
"#{hours} hour#{'s' unless hours == 1}"
elsif minutes > 0
"#{minutes} minute#{'s' unless minutes == 1}"
else
"#{seconds} second#{'s' unless seconds == 1}"
end
else
"#{days} day#{'s' unless days == 1}"
end
ago = interval < 0 ? 'from now' : 'ago' if suffix
[string, ago].compact.join ' '
end
def throb position = 0, redraw = CURSOR[:up][1]
return yield unless paginate?
throb = THROBBERS[rand(THROBBERS.length)]
throb.reverse! if rand > 0.5
i = rand throb.length
thread = Thread.new do
dot = lambda do
print "\r#{CURSOR[:column][position]}#{throb[i]}#{CURSOR[:hide]}"
i = (i + 1) % throb.length
sleep 0.1 and dot.call
end
dot.call
end
yield
ensure
if thread
thread.kill
puts "\r#{CURSOR[:column][position]}#{redraw}#{CURSOR[:show]}"
end
end
end
end
Adds whitespace around inlined code blocks
... which improves readability quite a bit, as the outer characters do
not flow into the background anymore.
# encoding: utf-8
require 'date'
require 'erb'
module GHI
module Formatting
class << self
attr_accessor :paginate
end
self.paginate = true # Default.
attr_accessor :paging
autoload :Colors, 'ghi/formatting/colors'
include Colors
CURSOR = {
:up => lambda { |n| "\e[#{n}A" },
:column => lambda { |n| "\e[#{n}G" },
:hide => "\e[?25l",
:show => "\e[?25h"
}
THROBBERS = [
%w(⠋ ⠙ ⠹ ⠸ ⠼ ⠴ ⠦ ⠧ ⠇ ⠏),
%w(⠋ ⠙ ⠚ ⠞ ⠖ ⠦ ⠴ ⠲ ⠳ ⠓),
%w(⠄ ⠆ ⠇ ⠋ ⠙ ⠸ ⠰ ⠠ ⠰ ⠸ ⠙ ⠋ ⠇ ⠆ ),
%w(⠋ ⠙ ⠚ ⠒ ⠂ ⠂ ⠒ ⠲ ⠴ ⠦ ⠖ ⠒ ⠐ ⠐ ⠒ ⠓ ⠋),
%w(⠁ ⠉ ⠙ ⠚ ⠒ ⠂ ⠂ ⠒ ⠲ ⠴ ⠤ ⠄ ⠄ ⠤ ⠴ ⠲ ⠒ ⠂ ⠂ ⠒ ⠚ ⠙ ⠉ ⠁),
%w(⠈ ⠉ ⠋ ⠓ ⠒ ⠐ ⠐ ⠒ ⠖ ⠦ ⠤ ⠠ ⠠ ⠤ ⠦ ⠖ ⠒ ⠐ ⠐ ⠒ ⠓ ⠋ ⠉ ⠈),
%w(⠁ ⠁ ⠉ ⠙ ⠚ ⠒ ⠂ ⠂ ⠒ ⠲ ⠴ ⠤ ⠄ ⠄ ⠤ ⠠ ⠠ ⠤ ⠦ ⠖ ⠒ ⠐ ⠐ ⠒ ⠓ ⠋ ⠉ ⠈ ⠈ ⠉)
]
def puts *strings
strings = strings.flatten.map { |s|
s.gsub(/(^| *)@(\w+)/) {
if $2 == Authorization.username
bright { fg(:yellow) { "#$1@#$2" } }
else
bright { "#$1@#$2" }
end
}
}
super strings
end
def page header = nil, throttle = 0
if paginate?
pager = GHI.config('ghi.pager') || GHI.config('core.pager')
pager ||= ENV['PAGER']
pager ||= 'less'
pager += ' -EKRX -b1' if pager =~ /^less( -[EKRX]+)?$/
if pager && !pager.empty? && pager != 'cat'
$stdout = IO.popen pager, 'w'
end
puts header if header
self.paging = true
end
loop do
yield
sleep throttle
end
rescue Errno::EPIPE
exit
ensure
unless $stdout == STDOUT
$stdout.close_write
$stdout = STDOUT
print CURSOR[:show]
exit
end
end
def paginate?
($stdout.tty? && $stdout == STDOUT && Formatting.paginate) || paging?
end
def paging?
!!paging
end
def truncate string, reserved
return string unless paginate?
result = string.scan(/.{0,#{columns - reserved}}(?:\s|\Z)/).first.strip
result << "..." if result != string
result
end
def indent string, level = 4, maxwidth = columns
string = string.gsub(/\r/, '')
string.gsub!(/[\t ]+$/, '')
string.gsub!(/\n{3,}/, "\n\n")
width = maxwidth - level - 1
lines = string.scan(
/.{0,#{width}}(?:\s|\Z)|[\S]{#{width},}/ # TODO: Test long lines.
).map { |line| " " * level + line.chomp }
format_markdown lines.join("\n").rstrip, level
end
def columns
dimensions[1] || 80
end
def dimensions
`stty size`.chomp.split(' ').map { |n| n.to_i }
end
#--
# Specific formatters:
#++
def format_username username
username == Authorization.username ? 'you' : username
end
def format_issues_header
state = assigns[:state] || 'open'
header = "# #{repo || 'Global,'} #{state} issues"
if repo
if milestone = assigns[:milestone]
case milestone
when '*' then header << ' with a milestone'
when 'none' then header << ' without a milestone'
else
header.sub! repo, "#{repo} milestone ##{milestone}"
end
end
if assignee = assigns[:assignee]
header << case assignee
when '*' then ', assigned'
when 'none' then ', unassigned'
else
", assigned to #{format_username assignee}"
end
end
if mentioned = assigns[:mentioned]
header << ", mentioning #{format_username mentioned}"
end
else
header << case assigns[:filter]
when 'created' then ' you created'
when 'mentioned' then ' that mention you'
when 'subscribed' then " you're subscribed to"
when 'all' then ' that you can see'
else
' assigned to you'
end
end
if creator = assigns[:creator]
header << " #{format_username creator} created"
end
if labels = assigns[:labels]
header << ", labeled #{labels.gsub ',', ', '}"
end
if excluded_labels = assigns[:exclude_labels]
header << ", excluding those labeled #{excluded_labels.gsub ',', ', '}"
end
if sort = assigns[:sort]
header << ", by #{sort} #{reverse ? 'ascending' : 'descending'}"
end
format_state assigns[:state], header
end
# TODO: Show milestones.
def format_issues issues, include_repo
return 'None.' if issues.empty?
include_repo and issues.each do |i|
%r{/repos/[^/]+/([^/]+)} === i['url'] and i['repo'] = $1
end
nmax, rmax = %w(number repo).map { |f|
issues.sort_by { |i| i[f].to_s.size }.last[f].to_s.size
}
issues.map { |i|
n, title, labels = i['number'], i['title'], i['labels']
l = 9 + nmax + rmax + no_color { format_labels labels }.to_s.length
a = i['assignee'] && i['assignee']['login'] == Authorization.username
l += 2 if a
p = i['pull_request']['html_url'] and l += 2
c = i['comments']
l += c.to_s.length + 1 unless c == 0
[
" ",
(i['repo'].to_s.rjust(rmax) if i['repo']),
format_number(n.to_s.rjust(nmax)),
truncate(title, l),
format_labels(labels),
(fg('aaaaaa') { c } unless c == 0),
(fg('aaaaaa') { '↑' } if p),
(fg(:yellow) { '@' } if a)
].compact.join ' '
}
end
def format_number n
colorize? ? "#{bright { n }}:" : "#{n} "
end
# TODO: Show milestone, number of comments, pull request attached.
def format_issue i, width = columns
return unless i['created_at']
ERB.new(<<EOF).result binding
<% p = i['pull_request']['html_url'] %>\
<%= bright { no_color { indent '%s%s: %s' % [p ? '↑' : '#', \
*i.values_at('number', 'title')], 0, width } } %>
@<%= i['user']['login'] %> opened this <%= p ? 'pull request' : 'issue' %> \
<%= format_date DateTime.parse(i['created_at']) %>. \
<%= format_state i['state'], format_tag(i['state']), :bg %> \
<% unless i['comments'] == 0 %>\
<%= fg('aaaaaa'){
template = "%d comment"
template << "s" unless i['comments'] == 1
'(' << template % i['comments'] << ')'
} %>\
<% end %>\
<% if i['assignee'] || !i['labels'].empty? %>
<% if i['assignee'] %>@<%= i['assignee']['login'] %> is assigned. <% end %>\
<% unless i['labels'].empty? %><%= format_labels(i['labels']) %><% end %>\
<% end %>\
<% if i['milestone'] %>
Milestone #<%= i['milestone']['number'] %>: <%= i['milestone']['title'] %>\
<%= " \#{bright{fg(:yellow){'⚠'}}}" if past_due? i['milestone'] %>\
<% end %>
<% if i['body'] && !i['body'].empty? %>
<%= indent i['body'], 4, width %>
<% end %>
EOF
end
def format_comments comments
return 'None.' if comments.empty?
comments.map { |comment| format_comment comment }
end
def format_comment c, width = columns
<<EOF
@#{c['user']['login']} commented \
#{format_date DateTime.parse(c['created_at'])}:
#{indent c['body'], 4, width}
EOF
end
def format_milestones milestones
return 'None.' if milestones.empty?
max = milestones.sort_by { |m|
m['number'].to_s.size
}.last['number'].to_s.size
milestones.map { |m|
line = [" #{m['number'].to_s.rjust max }:"]
space = past_due?(m) ? 6 : 4
line << truncate(m['title'], max + space)
line << '⚠' if past_due? m
percent m, line.join(' ')
}
end
def format_milestone m, width = columns
ERB.new(<<EOF).result binding
<%= bright { no_color { \
indent '#%s: %s' % m.values_at('number', 'title'), 0, width } } %>
@<%= m['creator']['login'] %> created this milestone \
<%= format_date DateTime.parse(m['created_at']) %>. \
<%= format_state m['state'], format_tag(m['state']), :bg %>
<% if m['due_on'] %>\
<% due_on = DateTime.parse m['due_on'] %>\
<% if past_due? m %>\
<%= bright{fg(:yellow){"⚠"}} %> \
<%= bright{fg(:red){"Past due by \#{format_date due_on, false}."}} %>
<% else %>\
Due in <%= format_date due_on, false %>.
<% end %>\
<% end %>\
<%= percent m %>
<% if m['description'] && !m['description'].empty? %>
<%= indent m['description'], 4, width %>
<% end %>
EOF
end
def past_due? milestone
return false unless milestone['due_on']
DateTime.parse(milestone['due_on']) <= DateTime.now
end
def percent milestone, string = nil
open, closed = milestone.values_at('open_issues', 'closed_issues')
complete = closed.to_f / (open + closed)
complete = 0 if complete.nan?
i = (columns * complete).round
if string.nil?
string = ' %d%% (%d closed, %d open)' % [complete * 100, closed, open]
end
string = string.ljust columns
[bg('2cc200'){string[0, i]}, string[i, columns - i]].join
end
def format_state state, string = state, layer = :fg
send(layer, state == 'closed' ? 'ff0000' : '2cc200') { string }
end
def format_labels labels
return if labels.empty?
[*labels].map { |l| bg(l['color']) { format_tag l['name'] } }.join ' '
end
def format_tag tag
(colorize? ? ' %s ' : '[%s]') % tag
end
#--
# Helpers:
#++
#--
# TODO: DRY up editor formatters.
#++
def format_editor issue = nil
message = ERB.new(<<EOF).result binding
Please explain the issue. The first line will become the title. Trailing
lines starting with '#' (like these) will be ignored, and empty messages will
not be submitted. Issues are formatted with GitHub Flavored Markdown (GFM):
http://github.github.com/github-flavored-markdown
On <%= repo %>
<%= no_color { format_issue issue, columns - 2 if issue } %>
EOF
message.rstrip!
message.gsub!(/(?!\A)^.*$/) { |line| "# #{line}".rstrip }
message.insert 0, [
issue['title'] || issue[:title], issue['body'] || issue[:body]
].compact.join("\n\n") if issue
message
end
def format_milestone_editor milestone = nil
message = ERB.new(<<EOF).result binding
Describe the milestone. The first line will become the title. Trailing lines
starting with '#' (like these) will be ignored, and empty messages will not be
submitted. Milestones are formatted with GitHub Flavored Markdown (GFM):
http://github.github.com/github-flavored-markdown
On <%= repo %>
<%= no_color { format_milestone milestone, columns - 2 } if milestone %>
EOF
message.rstrip!
message.gsub!(/(?!\A)^.*$/) { |line| "# #{line}".rstrip }
message.insert 0, [
milestone['title'], milestone['description']
].join("\n\n") if milestone
message
end
def format_comment_editor issue, comment = nil
message = ERB.new(<<EOF).result binding
Leave a comment. The first line will become the title. Trailing lines starting
with '#' (like these) will be ignored, and empty messages will not be
submitted. Comments are formatted with GitHub Flavored Markdown (GFM):
http://github.github.com/github-flavored-markdown
On <%= repo %> issue #<%= issue['number'] %>
<%= no_color { format_issue issue } if verbose %>\
<%= no_color { format_comment comment, columns - 2 } if comment %>
EOF
message.rstrip!
message.gsub!(/(?!\A)^.*$/) { |line| "# #{line}".rstrip }
message.insert 0, comment['body'] if comment
message
end
def format_markdown string, indent = 4
c = '268bd2'
# Headers.
string.gsub!(/^( {#{indent}}\#{1,6} .+)$/, bright{'\1'})
string.gsub!(
/(^ {#{indent}}.+$\n^ {#{indent}}[-=]+$)/, bright{'\1'}
)
# Strong.
string.gsub!(
/(^|\s)(\*{2}\w(?:[^*]*\w)?\*{2})(\s|$)/m, '\1' + bright{'\2'} + '\3'
)
string.gsub!(
/(^|\s)(_{2}\w(?:[^_]*\w)?_{2})(\s|$)/m, '\1' + bright {'\2'} + '\3'
)
# Emphasis.
string.gsub!(
/(^|\s)(\*\w(?:[^*]*\w)?\*)(\s|$)/m, '\1' + underline{'\2'} + '\3'
)
string.gsub!(
/(^|\s)(_\w(?:[^_]*\w)?_)(\s|$)/m, '\1' + underline{'\2'} + '\3'
)
# Bullets/Blockquotes.
string.gsub!(/(^ {#{indent}}(?:[*>-]|\d+\.) )/, fg(c){'\1'})
# URIs.
string.gsub!(
%r{\b(<)?(https?://\S+|[^@\s]+@[^@\s]+)(>)?\b},
fg(c){'\1' + underline{'\2'} + '\3'}
)
# Inline code
string.gsub!(/`([^`].+?)`(?=[^`])/, inverse { ' \1 ' })
# Code blocks
string.gsub!(/(?<indent>^\ {#{indent}})(```)\s*(?<lang>\w*$)(\n)(?<code>.+?)(\n)(^\ {#{indent}}```$)/m) do |m|
highlight(Regexp.last_match)
end
string
end
def format_date date, suffix = true
days = (interval = DateTime.now - date).to_i.abs
string = if days.zero?
seconds, _ = interval.divmod Rational(1, 86400)
hours, seconds = seconds.divmod 3600
minutes, seconds = seconds.divmod 60
if hours > 0
"#{hours} hour#{'s' unless hours == 1}"
elsif minutes > 0
"#{minutes} minute#{'s' unless minutes == 1}"
else
"#{seconds} second#{'s' unless seconds == 1}"
end
else
"#{days} day#{'s' unless days == 1}"
end
ago = interval < 0 ? 'from now' : 'ago' if suffix
[string, ago].compact.join ' '
end
def throb position = 0, redraw = CURSOR[:up][1]
return yield unless paginate?
throb = THROBBERS[rand(THROBBERS.length)]
throb.reverse! if rand > 0.5
i = rand throb.length
thread = Thread.new do
dot = lambda do
print "\r#{CURSOR[:column][position]}#{throb[i]}#{CURSOR[:hide]}"
i = (i + 1) % throb.length
sleep 0.1 and dot.call
end
dot.call
end
yield
ensure
if thread
thread.kill
puts "\r#{CURSOR[:column][position]}#{redraw}#{CURSOR[:show]}"
end
end
end
end
|
Alm::Application.configure do
# Settings specified here will take precedence over those in config/application.rb
# Code is not reloaded between requests
config.cache_classes = true
# Full error reports are disabled and caching is turned on
config.consider_all_requests_local = false
config.action_controller.perform_caching = true
# Disable Rails's static asset server (Apache or nginx will already do this)
config.serve_static_assets = false
# Compress JavaScripts and CSS
config.assets.compress = true
# Don't fallback to assets pipeline if a precompiled asset is missed
config.assets.compile = false
# Generate digests for assets URLs
config.assets.digest = true
# Defaults to Rails.root.join("public/assets")
# config.assets.manifest = YOUR_PATH
# Specifies the header that your server uses for sending files
# config.action_dispatch.x_sendfile_header = "X-Sendfile" # for apache
# config.action_dispatch.x_sendfile_header = 'X-Accel-Redirect' # for nginx
# Force all access to the app over SSL, use Strict-Transport-Security, and use secure cookies.
# config.force_ssl = true
# See everything in the log (default is :info)
# config.log_level = :warn
# Use a different logger for distributed setups
# config.logger = SyslogLogger.new
# Use a different cache store in production
config.cache_store = :dalli_store, { :namespace => "alm" }
# Enable serving of images, stylesheets, and JavaScripts from an asset server
# config.action_controller.asset_host = "http://assets.example.com"
# Precompile additional assets (application.js, application.css, and all non-JS/CSS are already added)
config.assets.precompile += %w(greenrobo.css)
# Disable delivery errors, bad email addresses will be ignored
# config.action_mailer.raise_delivery_errors = false
# Enable threaded mode
# config.threadsafe!
# Enable locale fallbacks for I18n (makes lookups for any locale fall back to
# the I18n.default_locale when a translation can not be found)
config.i18n.fallbacks = true
# Send deprecation notices to registered listeners
config.active_support.deprecation = :notify
# for devise
# TODO Must set it with correct value!!
config.action_mailer.default_url_options = { :host => 'localhost:3000' }
end
Make :warn the standard log level in production. Closes #27
Alm::Application.configure do
# Settings specified here will take precedence over those in config/application.rb
# Code is not reloaded between requests
config.cache_classes = true
# Full error reports are disabled and caching is turned on
config.consider_all_requests_local = false
config.action_controller.perform_caching = true
# Disable Rails's static asset server (Apache or nginx will already do this)
config.serve_static_assets = false
# Compress JavaScripts and CSS
config.assets.compress = true
# Don't fallback to assets pipeline if a precompiled asset is missed
config.assets.compile = false
# Generate digests for assets URLs
config.assets.digest = true
# Defaults to Rails.root.join("public/assets")
# config.assets.manifest = YOUR_PATH
# Specifies the header that your server uses for sending files
# config.action_dispatch.x_sendfile_header = "X-Sendfile" # for apache
# config.action_dispatch.x_sendfile_header = 'X-Accel-Redirect' # for nginx
# Force all access to the app over SSL, use Strict-Transport-Security, and use secure cookies.
# config.force_ssl = true
# See everything in the log (default is :info)
config.log_level = :warn
# Use a different logger for distributed setups
# config.logger = SyslogLogger.new
# Use a different cache store in production
config.cache_store = :dalli_store, { :namespace => "alm" }
# Enable serving of images, stylesheets, and JavaScripts from an asset server
# config.action_controller.asset_host = "http://assets.example.com"
# Precompile additional assets (application.js, application.css, and all non-JS/CSS are already added)
config.assets.precompile += %w(greenrobo.css)
# Disable delivery errors, bad email addresses will be ignored
# config.action_mailer.raise_delivery_errors = false
# Enable threaded mode
# config.threadsafe!
# Enable locale fallbacks for I18n (makes lookups for any locale fall back to
# the I18n.default_locale when a translation can not be found)
config.i18n.fallbacks = true
# Send deprecation notices to registered listeners
config.active_support.deprecation = :notify
# for devise
# TODO Must set it with correct value!!
config.action_mailer.default_url_options = { :host => 'localhost:3000' }
end
|
require 'spec_helper'
describe Macmillan::Utils::Logger::Formatter do
let(:msg) { 'testing' }
let(:prefix) { nil }
let(:target) { File.open('/dev/null', 'w+') }
subject { Macmillan::Utils::Logger::Formatter.new(prefix) }
let(:logger) do
log = Macmillan::Utils::Logger::Factory.build_logger(:logger, target: target)
log.formatter = subject
log
end
describe '#call' do
it 'is called by the logger object' do
expect(target).to receive(:write).once
expect(subject).to receive(:call).once
logger.info 'this is a test'
end
context 'when a prefix is set' do
let(:prefix) { 'WEEEE' }
it 'is put in front of the log message' do
expect(target).to receive(:write).with("#{prefix} [ INFO]: #{msg}\n").once
logger.info msg
end
end
context 'when the log message is a string' do
it 'returns the string' do
expect(target).to receive(:write).with("[ INFO]: #{msg}\n").once
logger.info msg
end
end
context 'when the log message is an exception' do
it 'returns full details of the exception' do
ex = StandardError.new('qwerty')
expect(ex).to receive(:message).once
expect(ex).to receive(:class).once
expect(ex).to receive(:backtrace).once
logger.info ex
end
end
context 'when the log message is NOT a string or exception' do
it 'retuns object.inspect' do
ex = Array.new
expect(ex).to receive(:inspect).once
logger.info ex
end
end
end
end
Fix another failing test on master
require 'spec_helper'
describe Macmillan::Utils::Logger::Formatter do
let(:msg) { 'testing' }
let(:prefix) { nil }
let(:target) { File.open('/dev/null', 'w+') }
subject { Macmillan::Utils::Logger::Formatter.new(prefix) }
let(:logger) do
log = Macmillan::Utils::Logger::Factory.build_logger(:logger, target: target)
log.formatter = subject
log
end
describe '#call' do
it 'is called by the logger object' do
expect(target).to receive(:write).once
expect(subject).to receive(:call).once
logger.info 'this is a test'
end
context 'when a prefix is set' do
let(:prefix) { 'WEEEE' }
it 'is put in front of the log message' do
expect(target).to receive(:write).with("#{prefix} [ INFO]: #{msg}\n").once
logger.info msg
end
end
context 'when the log message is a string' do
it 'returns the string' do
expect(target).to receive(:write).with("[ INFO]: #{msg}\n").once
logger.info msg
end
end
context 'when the log message is an exception' do
it 'returns full details of the exception' do
ex = StandardError.new('qwerty')
ex.stub(:backtrace).and_return(%w(foo bar baz quux))
expect(target).to receive(:write).with("[ INFO]: qwerty (StandardError)\nfoo\nbar\nbaz\nquux\n").once
logger.info ex
end
end
context 'when the log message is NOT a string or exception' do
it 'retuns object.inspect' do
ex = Array.new
expect(ex).to receive(:inspect).once
logger.info ex
end
end
end
end
|
require File.dirname(__FILE__) + '/../spec_helper.rb'
require 'reek/code_parser'
require 'reek/smells'
require 'reek/report'
include Reek
describe Report, " when empty" do
before(:each) do
@rpt = Report.new
end
it 'should have zero length' do
@rpt.length.should == 0
end
it 'should claim to be empty' do
@rpt.should be_empty
end
end
describe Report, "to_s" do
before(:each) do
rpt = Report.new
chk = CodeParser.new(rpt)
chk.check_source('def simple(a) a[3] end')
@report = rpt.to_s.split("\n")
end
it 'should place each detailed report on a separate line' do
@report.length.should == 2
end
it 'should mention every smell name' do
@report[0].should match(/[Utility Function]/)
@report[1].should match(/[Feature Envy]/)
end
end
describe Report, " as a SortedSet" do
it 'should only add a smell once' do
rpt = Report.new
rpt << UtilityFunction.new(self, 1)
rpt.length.should == 1
rpt << UtilityFunction.new(self, 1)
rpt.length.should == 1
end
end
describe SortByContext do
before :each do
@sorter = SortByContext
@long_method = LongMethod.new('x', 30)
@large_class = LargeClass.new('y', 30)
end
it 'should return 0 for identical smells' do
@sorter.compare(@long_method, @long_method).should == 0
end
it 'should return non-0 for different smells' do
@sorter.compare(@long_method, @large_class).should == -1
end
end
describe SortBySmell do
before :each do
@sorter = SortBySmell
@long_method = LongMethod.new('x', 30)
@large_class = LargeClass.new('y', 30)
end
it 'should return 0 for identical smells' do
@sorter.compare(@long_method, @long_method).should == 0
end
it 'should differentiate identical smells with different contexts' do
@sorter.compare(LongMethod.new('x', 29), LongMethod.new('y', 29)).should == -1
end
it 'should differentiate different smells with identical contexts' do
@sorter.compare(@long_method, @large_class).should == 1
end
end
Fixed incorrect require statement
require File.dirname(__FILE__) + '/../spec_helper.rb'
require 'reek/code_parser'
require 'reek/smells/smells'
require 'reek/report'
include Reek
describe Report, " when empty" do
before(:each) do
@rpt = Report.new
end
it 'should have zero length' do
@rpt.length.should == 0
end
it 'should claim to be empty' do
@rpt.should be_empty
end
end
describe Report, "to_s" do
before(:each) do
rpt = Report.new
chk = CodeParser.new(rpt)
chk.check_source('def simple(a) a[3] end')
@report = rpt.to_s.split("\n")
end
it 'should place each detailed report on a separate line' do
@report.length.should == 2
end
it 'should mention every smell name' do
@report[0].should match(/[Utility Function]/)
@report[1].should match(/[Feature Envy]/)
end
end
describe Report, " as a SortedSet" do
it 'should only add a smell once' do
rpt = Report.new
rpt << UtilityFunction.new(self, 1)
rpt.length.should == 1
rpt << UtilityFunction.new(self, 1)
rpt.length.should == 1
end
end
describe SortByContext do
before :each do
@sorter = SortByContext
@long_method = LongMethod.new('x', 30)
@large_class = LargeClass.new('y', 30)
end
it 'should return 0 for identical smells' do
@sorter.compare(@long_method, @long_method).should == 0
end
it 'should return non-0 for different smells' do
@sorter.compare(@long_method, @large_class).should == -1
end
end
describe SortBySmell do
before :each do
@sorter = SortBySmell
@long_method = LongMethod.new('x', 30)
@large_class = LargeClass.new('y', 30)
end
it 'should return 0 for identical smells' do
@sorter.compare(@long_method, @long_method).should == 0
end
it 'should differentiate identical smells with different contexts' do
@sorter.compare(LongMethod.new('x', 29), LongMethod.new('y', 29)).should == -1
end
it 'should differentiate different smells with identical contexts' do
@sorter.compare(@long_method, @large_class).should == 1
end
end
|
Rails.application.configure do
# Settings specified here will take precedence over those in config/application.rb.
# Code is not reloaded between requests.
config.cache_classes = true
# Eager load code on boot. This eager loads most of Rails and
# your application in memory, allowing both threaded web servers
# and those relying on copy on write to perform better.
# Rake tasks automatically ignore this option for performance.
config.eager_load = true
# Full error reports are disabled and caching is turned on.
config.consider_all_requests_local = false
config.action_controller.perform_caching = true
# Enable Rack::Cache to put a simple HTTP cache in front of your application
# Add `rack-cache` to your Gemfile before enabling this.
# For large-scale production use, consider using a caching reverse proxy like
# NGINX, varnish or squid.
# config.action_dispatch.rack_cache = true
# Disable serving static files from the `/public` folder by default since
# Apache or NGINX already handles this.
config.serve_static_files = ENV['RAILS_SERVE_STATIC_FILES'].present?
# Compress JavaScripts and CSS.
config.assets.js_compressor = :uglifier
config.assets.css_compressor = :sass
# Do not fallback to assets pipeline if a precompiled asset is missed.
config.assets.compile = false
# Asset digests allow you to set far-future HTTP expiration dates on all assets,
# yet still be able to expire them through the digest params.
config.assets.digest = true
# `config.assets.precompile` and `config.assets.version` have moved to config/initializers/assets.rb
# Specifies the header that your server uses for sending files.
# config.action_dispatch.x_sendfile_header = 'X-Sendfile' # for Apache
# config.action_dispatch.x_sendfile_header = 'X-Accel-Redirect' # for NGINX
# Force all access to the app over SSL, use Strict-Transport-Security, and use secure cookies.
config.force_ssl = true
# Use the lowest log level to ensure availability of diagnostic information
# when problems arise.
config.log_level = :info
# Prepend all log lines with the following tags.
config.log_tags = [ :uuid ]
# Use a different logger for distributed setups.
# config.logger = ActiveSupport::TaggedLogging.new(SyslogLogger.new)
# Use a different cache store in production.
# config.cache_store = :mem_cache_store
# Enable serving of images, stylesheets, and JavaScripts from an asset server.
# config.action_controller.asset_host = 'http://assets.example.com'
# Ignore bad email addresses and do not raise email delivery errors.
# Set this to true and configure the email server for immediate delivery to raise delivery errors.
# config.action_mailer.raise_delivery_errors = false
# Configure email delivery using Mailgun
config.action_mailer.smtp_settings = { port: ENV['MAILGUN_SMTP_PORT'],
address: ENV['MAILGUN_SMTP_SERVER'],
user_name: ENV['MAILGUN_SMTP_LOGIN'],
password: ENV['MAILGUN_SMTP_PASSWORD'],
domain: 'pensionwise.gov.uk',
authentication: :plain }
# Configure email delivery method
config.action_mailer.delivery_method = :smtp
# Enable locale fallbacks for I18n (makes lookups for any locale fall back to
# the I18n.default_locale when a translation cannot be found).
config.i18n.fallbacks = true
# Send deprecation notices to registered listeners.
config.active_support.deprecation = :notify
# Use default logging formatter so that PID and timestamp are not suppressed.
config.log_formatter = ::Logger::Formatter.new
# Do not dump schema after migrations.
config.active_record.dump_schema_after_migration = false
end
Appease Rubocop
Rails.application.configure do
# Settings specified here will take precedence over those in config/application.rb.
# Code is not reloaded between requests.
config.cache_classes = true
# Eager load code on boot. This eager loads most of Rails and
# your application in memory, allowing both threaded web servers
# and those relying on copy on write to perform better.
# Rake tasks automatically ignore this option for performance.
config.eager_load = true
# Full error reports are disabled and caching is turned on.
config.consider_all_requests_local = false
config.action_controller.perform_caching = true
# Enable Rack::Cache to put a simple HTTP cache in front of your application
# Add `rack-cache` to your Gemfile before enabling this.
# For large-scale production use, consider using a caching reverse proxy like
# NGINX, varnish or squid.
# config.action_dispatch.rack_cache = true
# Disable serving static files from the `/public` folder by default since
# Apache or NGINX already handles this.
config.serve_static_files = ENV['RAILS_SERVE_STATIC_FILES'].present?
# Compress JavaScripts and CSS.
config.assets.js_compressor = :uglifier
config.assets.css_compressor = :sass
# Do not fallback to assets pipeline if a precompiled asset is missed.
config.assets.compile = false
# Asset digests allow you to set far-future HTTP expiration dates on all assets,
# yet still be able to expire them through the digest params.
config.assets.digest = true
# `config.assets.precompile` and `config.assets.version` have moved to config/initializers/assets.rb
# Specifies the header that your server uses for sending files.
# config.action_dispatch.x_sendfile_header = 'X-Sendfile' # for Apache
# config.action_dispatch.x_sendfile_header = 'X-Accel-Redirect' # for NGINX
# Force all access to the app over SSL, use Strict-Transport-Security, and use secure cookies.
config.force_ssl = true
# Use the lowest log level to ensure availability of diagnostic information
# when problems arise.
config.log_level = :info
# Prepend all log lines with the following tags.
config.log_tags = [:uuid]
# Use a different logger for distributed setups.
# config.logger = ActiveSupport::TaggedLogging.new(SyslogLogger.new)
# Use a different cache store in production.
# config.cache_store = :mem_cache_store
# Enable serving of images, stylesheets, and JavaScripts from an asset server.
# config.action_controller.asset_host = 'http://assets.example.com'
# Ignore bad email addresses and do not raise email delivery errors.
# Set this to true and configure the email server for immediate delivery to raise delivery errors.
# config.action_mailer.raise_delivery_errors = false
# Configure email delivery using Mailgun
config.action_mailer.smtp_settings = { port: ENV['MAILGUN_SMTP_PORT'],
address: ENV['MAILGUN_SMTP_SERVER'],
user_name: ENV['MAILGUN_SMTP_LOGIN'],
password: ENV['MAILGUN_SMTP_PASSWORD'],
domain: 'pensionwise.gov.uk',
authentication: :plain }
# Configure email delivery method
config.action_mailer.delivery_method = :smtp
# Enable locale fallbacks for I18n (makes lookups for any locale fall back to
# the I18n.default_locale when a translation cannot be found).
config.i18n.fallbacks = true
# Send deprecation notices to registered listeners.
config.active_support.deprecation = :notify
# Use default logging formatter so that PID and timestamp are not suppressed.
config.log_formatter = ::Logger::Formatter.new
# Do not dump schema after migrations.
config.active_record.dump_schema_after_migration = false
end
|
require 'spec_helper'
describe Sidekiq::Status::ServerMiddleware do
let!(:redis) { Sidekiq.redis { |conn| conn } }
let!(:job_id) { SecureRandom.hex(12) }
describe "without :expiration parameter" do
it "sets working/complete status" do
allow(SecureRandom).to receive(:hex).once.and_return(job_id)
start_server do
thread = redis_thread 4, "status_updates", "job_messages_#{job_id}"
expect(ConfirmationJob.perform_async arg1: 'val1').to eq(job_id)
expect(thread.value).to eq([
job_id,
job_id,
"while in #perform, status = working",
job_id
])
end
expect(redis.hget("sidekiq:status:#{job_id}", :status)).to eq('complete')
expect(Sidekiq::Status::complete?(job_id)).to be_truthy
end
it "sets failed status" do
allow(SecureRandom).to receive(:hex).once.and_return(job_id)
start_server do
expect(capture_status_updates(3) {
expect(FailingJob.perform_async).to eq(job_id)
}).to eq([job_id]*3)
end
expect(redis.hget("sidekiq:status:#{job_id}", :status)).to eq('failed')
expect(Sidekiq::Status::failed?(job_id)).to be_truthy
end
it "sets failed status when Exception raised" do
allow(SecureRandom).to receive(:hex).once.and_return(job_id)
start_server do
expect(capture_status_updates(3) {
expect(FailingHardJob.perform_async).to eq(job_id)
}).to eq([job_id]*3)
end
expect(redis.hget("sidekiq:status:#{job_id}", :status)).to eq('failed')
expect(Sidekiq::Status::failed?(job_id)).to be_truthy
end
context "when Sidekiq::Status::Worker is not included in the job" do
it "should not set a failed status" do
allow(SecureRandom).to receive(:hex).once.and_return(job_id)
start_server do
expect(FailingNoStatusJob.perform_async).to eq(job_id)
end
expect(redis.hget("sidekiq:status:#{job_id}", :status)).to be_nil
end
it "should not set any status when Exception raised" do
allow(SecureRandom).to receive(:hex).once.and_return(job_id)
start_server do
expect(FailingHardNoStatusJob.perform_async).to eq(job_id)
end
expect(redis.hget("sidekiq:status:#{job_id}", :status)).to be_nil
end
end
context "sets interrupted status" do
it "on system exit signal" do
allow(SecureRandom).to receive(:hex).once.and_return(job_id)
start_server do
expect(capture_status_updates(3) {
expect(ExitedJob.perform_async).to eq(job_id)
}).to eq([job_id]*3)
end
expect(redis.hget("sidekiq:status:#{job_id}", :status)).to eq('interrupted')
expect(Sidekiq::Status::interrupted?(job_id)).to be_truthy
end
it "on interrupt signal" do
allow(SecureRandom).to receive(:hex).once.and_return(job_id)
start_server do
expect(capture_status_updates(3) {
expect(InterruptedJob.perform_async).to eq(job_id)
}).to eq([job_id]*3)
end
expect(redis.hget("sidekiq:status:#{job_id}", :status)).to eq('interrupted')
expect(Sidekiq::Status::interrupted?(job_id)).to be_truthy
end
end
it "sets status hash ttl" do
allow(SecureRandom).to receive(:hex).once.and_return(job_id)
start_server do
expect(StubJob.perform_async arg1: 'val1').to eq(job_id)
end
expect(1..Sidekiq::Status::DEFAULT_EXPIRY).to cover redis.ttl("sidekiq:status:#{job_id}")
end
end
describe "with :expiration parameter" do
let(:huge_expiration) { Sidekiq::Status::DEFAULT_EXPIRY * 100 }
before do
allow(SecureRandom).to receive(:hex).once.and_return(job_id)
end
it "overwrites default expiry value" do
start_server(:expiration => huge_expiration) do
StubJob.perform_async arg1: 'val1'
end
expect((Sidekiq::Status::DEFAULT_EXPIRY-1)..huge_expiration).to cover redis.ttl("sidekiq:status:#{job_id}")
end
it "can be overwritten by worker expiration method" do
overwritten_expiration = huge_expiration * 100
allow_any_instance_of(StubJob).to receive(:expiration).and_return(overwritten_expiration)
start_server(:expiration => huge_expiration) do
StubJob.perform_async arg1: 'val1'
end
expect((huge_expiration+1)..overwritten_expiration).to cover redis.ttl("sidekiq:status:#{job_id}")
end
end
end
consistent spacing
require 'spec_helper'
describe Sidekiq::Status::ServerMiddleware do
let!(:redis) { Sidekiq.redis { |conn| conn } }
let!(:job_id) { SecureRandom.hex(12) }
describe "without :expiration parameter" do
it "sets working/complete status" do
allow(SecureRandom).to receive(:hex).once.and_return(job_id)
start_server do
thread = redis_thread 4, "status_updates", "job_messages_#{job_id}"
expect(ConfirmationJob.perform_async arg1: 'val1').to eq(job_id)
expect(thread.value).to eq([
job_id,
job_id,
"while in #perform, status = working",
job_id
])
end
expect(redis.hget("sidekiq:status:#{job_id}", :status)).to eq('complete')
expect(Sidekiq::Status::complete?(job_id)).to be_truthy
end
it "sets failed status" do
allow(SecureRandom).to receive(:hex).once.and_return(job_id)
start_server do
expect(capture_status_updates(3) {
expect(FailingJob.perform_async).to eq(job_id)
}).to eq([job_id]*3)
end
expect(redis.hget("sidekiq:status:#{job_id}", :status)).to eq('failed')
expect(Sidekiq::Status::failed?(job_id)).to be_truthy
end
it "sets failed status when Exception raised" do
allow(SecureRandom).to receive(:hex).once.and_return(job_id)
start_server do
expect(capture_status_updates(3) {
expect(FailingHardJob.perform_async).to eq(job_id)
}).to eq([job_id]*3)
end
expect(redis.hget("sidekiq:status:#{job_id}", :status)).to eq('failed')
expect(Sidekiq::Status::failed?(job_id)).to be_truthy
end
context "when Sidekiq::Status::Worker is not included in the job" do
it "should not set a failed status" do
allow(SecureRandom).to receive(:hex).once.and_return(job_id)
start_server do
expect(FailingNoStatusJob.perform_async).to eq(job_id)
end
expect(redis.hget("sidekiq:status:#{job_id}", :status)).to be_nil
end
it "should not set any status when Exception raised" do
allow(SecureRandom).to receive(:hex).once.and_return(job_id)
start_server do
expect(FailingHardNoStatusJob.perform_async).to eq(job_id)
end
expect(redis.hget("sidekiq:status:#{job_id}", :status)).to be_nil
end
end
context "sets interrupted status" do
it "on system exit signal" do
allow(SecureRandom).to receive(:hex).once.and_return(job_id)
start_server do
expect(capture_status_updates(3) {
expect(ExitedJob.perform_async).to eq(job_id)
}).to eq([job_id]*3)
end
expect(redis.hget("sidekiq:status:#{job_id}", :status)).to eq('interrupted')
expect(Sidekiq::Status::interrupted?(job_id)).to be_truthy
end
it "on interrupt signal" do
allow(SecureRandom).to receive(:hex).once.and_return(job_id)
start_server do
expect(capture_status_updates(3) {
expect(InterruptedJob.perform_async).to eq(job_id)
}).to eq([job_id]*3)
end
expect(redis.hget("sidekiq:status:#{job_id}", :status)).to eq('interrupted')
expect(Sidekiq::Status::interrupted?(job_id)).to be_truthy
end
end
it "sets status hash ttl" do
allow(SecureRandom).to receive(:hex).once.and_return(job_id)
start_server do
expect(StubJob.perform_async arg1: 'val1').to eq(job_id)
end
expect(1..Sidekiq::Status::DEFAULT_EXPIRY).to cover redis.ttl("sidekiq:status:#{job_id}")
end
end
describe "with :expiration parameter" do
let(:huge_expiration) { Sidekiq::Status::DEFAULT_EXPIRY * 100 }
before do
allow(SecureRandom).to receive(:hex).once.and_return(job_id)
end
it "overwrites default expiry value" do
start_server(:expiration => huge_expiration) do
StubJob.perform_async arg1: 'val1'
end
expect((Sidekiq::Status::DEFAULT_EXPIRY-1)..huge_expiration).to cover redis.ttl("sidekiq:status:#{job_id}")
end
it "can be overwritten by worker expiration method" do
overwritten_expiration = huge_expiration * 100
allow_any_instance_of(StubJob).to receive(:expiration).and_return(overwritten_expiration)
start_server(:expiration => huge_expiration) do
StubJob.perform_async arg1: 'val1'
end
expect((huge_expiration+1)..overwritten_expiration).to cover redis.ttl("sidekiq:status:#{job_id}")
end
end
end
|
require File.join(File.dirname(__FILE__), *%w[.. lib remote_jobs.rb])
class MockJob
include RemoteJobs
def self.find
[] << self.new
end
end
class MockNoJob < MockJob
def self.find
[]
end
end
class MockRemoteXmlReader
def child_nodes_to_hash(node_name)
[{reference:'1', title: 'job title'}]
end
end
class MockEmptyRemoteXmlReader
def child_nodes_to_hash(node_name)
[]
end
end
describe "RemoteJobs Interfaces" do
describe ".find_jobs_to_sync" do
subject { MockJob.find_jobs_to_sync { MockJob.find } }
it "returns an array" do
subject.should be_instance_of Array
end
context "when there are jobs to sync" do
it "of jobs to be synchronised with the remote source" do
subject.size.should_not == 0
subject.each {|job| job.should be_instance_of MockJob}
end
end
context "when there aren't any jobs to sync" do
subject { MockJob.find_jobs_to_sync { MockNoJob.find } }
it "returns an empty array" do
subject.size.should == 0
end
end
end
describe ".find_remote_jobs" do
before(:each) do
RemoteXmlReader.stub(:new).with('remote_url').
and_return(MockRemoteXmlReader.new)
end
subject { MockJob.find_remote_jobs('remote_url') }
it "takes a remote url and returns an array" do
subject.should be_instance_of Array
end
context "when there are remote jobs" do
it "of job attributes to be synchronised with the local database" do
subject.size.should_not == 0
subject.each {|job| job.should be_instance_of Hash}
end
end
context "when there aren't any remote jobs" do
before(:each) do
RemoteXmlReader.stub(:new).with('remote_url').
and_return(MockEmptyRemoteXmlReader.new)
end
it "returns an empty array" do
subject.size.should == 0
end
end
end
end
Adds stubs for the detailed behaviour of the module.
require File.join(File.dirname(__FILE__), *%w[.. lib remote_jobs.rb])
class MockJob
include RemoteJobs
def self.find
[] << self.new
end
end
class MockNoJob < MockJob
def self.find
[]
end
end
class MockRemoteXmlReader
def child_nodes_to_hash(node_name)
[{reference:'1', title: 'job title'}]
end
end
class MockEmptyRemoteXmlReader
def child_nodes_to_hash(node_name)
[]
end
end
describe RemoteJobs do
describe "Jobs that exist remotely but not locally" do
it "create a new job using the remote attributes"
end
describe "Jobs that exist remotely and locally" do
it "update the existing job using the remote attributes"
end
describe "Remote attributes cause validation errors" do
it "leaves the local job unchanged and writes an error to the log"
end
describe "Jobs that no longer exist in the remote source" do
it "mark the job as not to be published"
end
end
describe "RemoteJobs Interfaces" do
describe ".find_jobs_to_sync" do
subject { MockJob.find_jobs_to_sync { MockJob.find } }
it "returns an array" do
subject.should be_instance_of Array
end
context "when there are jobs to sync" do
it "of jobs to be synchronised with the remote source" do
subject.size.should_not == 0
subject.each {|job| job.should be_instance_of MockJob}
end
end
context "when there aren't any jobs to sync" do
subject { MockJob.find_jobs_to_sync { MockNoJob.find } }
it "returns an empty array" do
subject.size.should == 0
end
end
end
describe ".find_remote_jobs" do
before(:each) do
RemoteXmlReader.stub(:new).with('remote_url').
and_return(MockRemoteXmlReader.new)
end
subject { MockJob.find_remote_jobs('remote_url') }
it "takes a remote url and returns an array" do
subject.should be_instance_of Array
end
context "when there are remote jobs" do
it "of job attributes to be synchronised with the local database" do
subject.size.should_not == 0
subject.each {|job| job.should be_instance_of Hash}
end
end
context "when there aren't any remote jobs" do
before(:each) do
RemoteXmlReader.stub(:new).with('remote_url').
and_return(MockEmptyRemoteXmlReader.new)
end
it "returns an empty array" do
subject.size.should == 0
end
end
end
end
|
require 'spec_helper'
describe FindAnAccountPresenter do
let(:user){create(:user)}
let(:account_type){AccountType.SAFE_ACCOUNT}
let(:presenter){FindAnAccountPresenter.new(account_type, view)}
let(:token_branching_element){''}
let(:bank_account){create(:bank_account)}
before do
presenter.user = user
end
RSpec::Matchers.define :return_localized_content do |token, tag, interpolation_args|
chain :with_options do |options|
@options = options
end
match do |returned|
args = {default: ''}.merge(interpolation_args)
content = I18n.t(token, args)
if content == ''
returned.nil?
else
return_wrapped_content(content, tag).matches?(returned)
end
end
end
RSpec::Matchers.define :return_wrapped_content do |content, tag|
chain :with_options do |options|
@options = options
end
match do |returned|
if @options
have_tag(tag, text: content, with: @options).matches?(returned)
else
have_tag(tag, text: content).matches?(returned)
end
end
end
shared_examples 'a localized content wrapping method' do
let(:token) {"account_finder.account_type.#{account_type.name_id}.#{token_branching_element}#{property}"}
let(:tag) {:h3}
let(:interpolation_args) {{}}
it 'that passes options to the node' do
options = {class: 'someclass', id: 'someid'}
expect(presenter.send(property, options)).to return_localized_content(token, tag, interpolation_args).with_options(options)
end
end
shared_examples 'a content wrapping method' do
let(:options) { {class: 'someclass', id: 'someid'}}
it 'that passes options to the node' do
expect(presenter.send(method, options)).to return_wrapped_content(expected, tag).with_options(options)
end
end
shared_examples 'a google map block' do
it 'with specified query' do
src = URI::encode("https://www.google.com/maps/embed/v1/#{api_method}?key=#{ApiKeys.google_maps}&q=#{query}")
expected = view.render(partial: 'account_finder/account_type/google_map', locals: {src: src})
expect(presenter.send(method)).to eq(expected)
end
end
describe 'localized content strings' do
def expect_to_return_localized_string(presenter, property)
expect(presenter.send(property)).to eq(I18n.t("account_finder.account_type.#{account_type.name_id}.#{property}"))
end
describe '#sub_heading' do
it 'returns localized value' do
expect_to_return_localized_string(presenter, :sub_heading)
end
end
describe '#heading' do
it 'returns localized value' do
expect_to_return_localized_string(presenter, :heading)
end
end
describe '#page_title' do
it 'returns localized value' do
expect_to_return_localized_string(presenter, :page_title)
end
end
end
describe 'branched content' do
describe 'VETERANS ACCOUNT' do
let(:account_type){AccountType.VETERANS_ACCOUNT}
describe 'chase state' do
before do
user.state = State.find_by(code: 'NY')
user.save!(validate: false)
end
it_behaves_like 'a localized content wrapping method' do
let(:token_branching_element) {"chase_states."}
let(:property) {:intro_heading}
end
it_behaves_like 'a google map block' do
let(:query) {"Chase Branches near #{user.zipcode}"}
let(:api_method) {:search}
let(:method){:google_map_search}
end
end
describe 'non chase state' do
before do
user.state = State.find_by(code: 'AL')
user.save!(validate: false)
end
it_behaves_like 'a localized content wrapping method' do
let(:token_branching_element) {"non_chase_states."}
let(:property) {:intro_heading}
end
it 'returns nil for map search' do
expect(presenter.google_map_search).to be_nil
end
end
end
describe 'SENIORS_ACCOUNT' do
let(:account_type){AccountType.SENIORS_ACCOUNT}
describe 'U.S. Bank States' do
before do
user.state = State.find_by(code: 'ID')
user.save!(validate: false)
end
it_behaves_like 'a localized content wrapping method' do
let(:token_branching_element) {"us_bank_states."}
let(:property) {:intro_heading}
end
it_behaves_like 'a google map block' do
let(:query) {"USBank Branches near #{user.zipcode}"}
let(:api_method) {:search}
let(:method){:google_map_search}
end
end
describe 'Non U.S. Bank States' do
before do
user.state = State.find_by(code: 'NY')
user.save!(validate: false)
end
it_behaves_like 'a localized content wrapping method' do
let(:token_branching_element) {"non_us_bank_states."}
let(:property) {:intro_heading}
end
it_behaves_like 'a google map block' do
let(:query) {"Credit Unions near #{user.zipcode}"}
let(:api_method) {:search}
let(:method){:google_map_search}
end
end
end
end
describe 'content blocks' do
describe '#intro_heading' do
it_behaves_like 'a localized content wrapping method' do
let(:property) {:intro_heading}
end
end
describe '#intro' do
it_behaves_like 'a localized content wrapping method' do
let(:property) {:intro}
let(:tag) {:div}
end
end
describe '#we_recommend_heading' do
it_behaves_like 'a localized content wrapping method' do
let(:property) {:we_recommend_heading}
end
end
describe '#why_chosen_heading' do
it_behaves_like 'a localized content wrapping method' do
let(:property) {:why_chosen_heading}
end
end
describe '#why_chosen_description' do
it_behaves_like 'a localized content wrapping method' do
let(:property) {:why_chosen_description}
let(:tag) {:div}
let(:interpolation_args) {{zipcode: user.zipcode}}
end
end
describe '#geolocated_results_heading' do
it_behaves_like 'a localized content wrapping method' do
let(:property) {:geolocated_results_heading}
let(:interpolation_args) {{zipcode: user.zipcode}}
let(:tag) {:h4}
end
end
describe '#geolocated_results_subheading' do
it_behaves_like 'a localized content wrapping method' do
let(:property) {:geolocated_results_subheading}
let(:interpolation_args) {{zipcode: user.zipcode, num_results: 0}}
let(:tag) {:div}
end
end
end
describe '#recommended_option_block' do
it 'returns nil if no recommended option' do
expect(presenter.recommended_option_block).to be_nil
end
describe 'with recommended option' do
before do
presenter.recommended_option = bank_account
end
it 'renders a block' do
expected = view.render(partial:'account_finder/account_type/recommended_option', locals:{presenter: presenter})
expect(presenter.recommended_option_block).to eq expected
end
end
end
describe 'recommended_' do
before do
presenter.recommended_option = bank_account
end
describe '#recommended_account_name' do
it_behaves_like 'a content wrapping method' do
let(:method) {:recommended_account_name}
let(:expected) {bank_account.name}
let(:tag) {:h4}
end
end
describe '#recommended_branch_address' do
it_behaves_like 'a content wrapping method' do
let(:method) {:recommended_branch_address}
let(:expected) {bank_account.branch.full_address}
let(:tag) {:div}
end
end
describe '#recommended_branch_name' do
it_behaves_like 'a content wrapping method' do
let(:method) {:recommended_branch_name}
let(:expected) {bank_account.branch.full_name}
let(:tag) {:div}
end
end
describe '#recommended_available_at' do
it_behaves_like 'a localized content wrapping method' do
let(:property) {:recommended_available_at}
let(:interpolation_args) {{zipcode: user.zipcode, num_results: 0}}
let(:tag) {:div}
end
end
end
describe '#cta' do
it 'returns a link to help me open page' do
text = I18n.t('account_finder.account_type.help_to_open_cta')
path = account_opening_assistance_path(user, account_type)
expect(presenter.cta_button).to have_tag(:a, text: text, with: {href: path})
end
end
describe 'selected_result' do
before do
presenter.selected_result = bank_account
end
describe '#option_heading' do
it_behaves_like 'a localized content wrapping method' do
let(:property) {:option_heading}
let(:interpolation_args) {{branch_name: bank_account.branch.full_name}}
let(:tag) {:h3}
end
end
describe '#option_subheading' do
it_behaves_like 'a localized content wrapping method' do
let(:property) {:option_subheading}
let(:interpolation_args) {{branch_address: bank_account.branch.full_address}}
let(:tag) {:div}
end
end
describe '#geolocated_choice_map' do
describe 'when no selected choice' do
before do
presenter.selected_result = nil
end
it 'returns nil' do
expect(presenter.geolocated_choice_map).to be_nil
end
end
it_behaves_like 'a google map block' do
let(:query) {"#{bank_account.branch.bank.name}+#{bank_account.branch.full_address}"}
let(:api_method) {:place}
let(:method){:geolocated_choice_map}
end
end
end
describe 'results' do
let(:bank_account_2){create(:bank_account)}
let(:results){[bank_account, bank_account_2]}
before do
presenter.results = results
end
describe '#geolocated_options_block' do
it 'returns rendered block' do
expected = view.render(partial: 'account_finder/account_type/geolocated_options', locals: {presenter: presenter})
expect(presenter.geolocated_options_block).to eq expected
end
describe 'with no results' do
before do
presenter.results = nil
end
it 'returns nothing' do
expect(presenter.geolocated_options_block).to be_nil
end
end
end
describe '#geolocated_option_title' do
it 'returns branch name' do
expect(presenter.geolocated_option_title bank_account_2).to eq bank_account_2.branch.full_name
end
end
describe '#geolocated_option_street' do
it 'returns branch street' do
expect(presenter.geolocated_option_street bank_account_2).to eq bank_account_2.branch.street
end
end
describe '#geolocated_distance_from_user' do
it 'returns span tag' do
distance = user.distance_to(bank_account_2.branch)
content = view.number_to_human(distance, units: :miles)
expected = view.content_tag(:span, content)
expect(presenter.geolocated_distance_from_user(bank_account_2)).to eq expected
end
end
describe '#geolocated_result_link' do
it 'returns wrapped link' do
src = account_finder_path(user, selected_account_id: bank_account_2.id )
content = "thing"
expected = view.link_to(content, src)
result = presenter.geolocated_result_link(bank_account_2) do
content
end
expect(result).to eq expected
end
end
end
describe 'online only methods' do
let(:account_type){AccountType.PREPAY_CARD}
let(:presenter){FindAnAccountPresenter.new(account_type, view)}
describe 'online_options' do
it 'makes an array from localized content' do
expected = I18n.t("account_finder.account_type.#{account_type.name_id}.online_options").to_a.map{|obj| obj[1]}
expect(expected.empty?).to be_false
expect(presenter.online_options).to eq expected
end
end
describe '#online_option_feature_bullets' do
let(:list_options) {{class: 'listclass'}}
let(:bullet_options) {{class: 'bulletclass'}}
let(:bullets){{0=>"$4.95 per month", 1=>"No overdraft fees", 2=>"Uses the VISA network"}}
it 'returns list as rendered bullets' do
result = presenter.online_option_feature_bullets(bullets, list_options, bullet_options)
expect(result).to have_tag(:ul, with: list_options ) do
with_tag(:li, text: "$4.95 per month", with: bullet_options)
with_tag(:li, text: "No overdraft fees", with: bullet_options)
with_tag(:li, text: "Uses the VISA network", with: bullet_options)
end
end
end
end
describe 'google_map_search' do
describe 'credit union' do
let(:account_type) {AccountType.CREDIT_UNION}
it_behaves_like 'a google map block' do
let(:query) {"Credit Unions near #{user.zipcode}"}
let(:api_method) {:search}
let(:method){:google_map_search}
end
end
describe 'regular accounts' do
let(:account_type) {AccountType.REGULAR_ACCOUNT}
it_behaves_like 'a google map block' do
let(:query) {"Free Checking near #{user.zipcode}"}
let(:api_method) {:search}
let(:method){:google_map_search}
end
end
describe 'student accounts' do
let(:account_type) {AccountType.STUDENT_ACCOUNT}
it_behaves_like 'a google map block' do
let(:query) {"Free Student Checking near #{user.zipcode}"}
let(:api_method) {:search}
let(:method){:google_map_search}
end
end
end
end
fixed broken tests
require 'spec_helper'
describe FindAnAccountPresenter do
let(:user){create(:user)}
let(:account_type){AccountType.SAFE_ACCOUNT}
let(:presenter){FindAnAccountPresenter.new(account_type, view)}
let(:token_branching_element){''}
let(:bank_account){create(:bank_account)}
before do
presenter.user = user
end
RSpec::Matchers.define :return_localized_content do |token, tag, interpolation_args|
chain :with_options do |options|
@options = options
end
match do |returned|
args = {default: ''}.merge(interpolation_args)
content = I18n.t(token, args)
if content == ''
returned.nil?
else
return_wrapped_content(content, tag).matches?(returned)
end
end
end
RSpec::Matchers.define :return_wrapped_content do |content, tag|
chain :with_options do |options|
@options = options
end
match do |returned|
if @options
have_tag(tag, text: content, with: @options).matches?(returned)
else
have_tag(tag, text: content).matches?(returned)
end
end
end
shared_examples 'a localized content wrapping method' do
let(:token) {"account_finder.account_type.#{account_type.name_id}.#{token_branching_element}#{property}"}
let(:tag) {:h3}
let(:interpolation_args) {{}}
it 'that passes options to the node' do
options = {class: 'someclass', id: 'someid'}
expect(presenter.send(property, options)).to return_localized_content(token, tag, interpolation_args).with_options(options)
end
end
shared_examples 'a content wrapping method' do
let(:options) { {class: 'someclass', id: 'someid'}}
it 'that passes options to the node' do
expect(presenter.send(method, options)).to return_wrapped_content(expected, tag).with_options(options)
end
end
shared_examples 'a google map block' do
it 'with specified query' do
src = URI::encode("https://www.google.com/maps/embed/v1/#{api_method}?key=#{ApiKeys.google_maps}&q=#{query}")
expected = view.render(partial: 'account_finder/account_type/google_map', locals: {src: src})
expect(presenter.send(method)).to eq(expected)
end
end
describe 'localized content strings' do
def expect_to_return_localized_string(presenter, property)
expect(presenter.send(property)).to eq(I18n.t("account_finder.account_type.#{account_type.name_id}.#{property}"))
end
describe '#sub_heading' do
it 'returns localized value' do
expect_to_return_localized_string(presenter, :sub_heading)
end
end
describe '#heading' do
it 'returns localized value' do
expect_to_return_localized_string(presenter, :heading)
end
end
describe '#page_title' do
it 'returns localized value' do
expect_to_return_localized_string(presenter, :page_title)
end
end
end
describe 'branched content' do
describe 'VETERANS ACCOUNT' do
let(:account_type){AccountType.VETERANS_ACCOUNT}
describe 'chase state' do
before do
user.state = State.find_by(code: 'NY')
user.save!(validate: false)
end
it_behaves_like 'a localized content wrapping method' do
let(:token_branching_element) {"chase_states."}
let(:property) {:intro_heading}
end
it_behaves_like 'a google map block' do
let(:query) {"Chase Branches near #{user.zipcode}"}
let(:api_method) {:search}
let(:method){:google_map_search}
end
end
describe 'non chase state' do
before do
user.state = State.find_by(code: 'AL')
user.save!(validate: false)
end
it_behaves_like 'a localized content wrapping method' do
let(:token_branching_element) {"non_chase_states."}
let(:property) {:intro_heading}
end
it 'returns nil for map search' do
expect(presenter.google_map_search).to be_nil
end
end
end
describe 'SENIORS_ACCOUNT' do
let(:account_type){AccountType.SENIORS_ACCOUNT}
describe 'U.S. Bank States' do
before do
user.state = State.find_by(code: 'ID')
user.save!(validate: false)
end
it_behaves_like 'a localized content wrapping method' do
let(:token_branching_element) {"us_bank_states."}
let(:property) {:intro_heading}
end
it_behaves_like 'a google map block' do
let(:query) {"USBank Branches near #{user.zipcode}"}
let(:api_method) {:search}
let(:method){:google_map_search}
end
end
describe 'Non U.S. Bank States' do
before do
user.state = State.find_by(code: 'NY')
user.save!(validate: false)
end
it_behaves_like 'a localized content wrapping method' do
let(:token_branching_element) {"non_us_bank_states."}
let(:property) {:intro_heading}
end
it_behaves_like 'a google map block' do
let(:query) {"Credit Unions near #{user.zipcode}"}
let(:api_method) {:search}
let(:method){:google_map_search}
end
end
end
end
describe 'content blocks' do
describe '#intro_heading' do
it_behaves_like 'a localized content wrapping method' do
let(:property) {:intro_heading}
end
end
describe '#intro' do
it_behaves_like 'a localized content wrapping method' do
let(:property) {:intro}
let(:tag) {:div}
end
end
describe '#we_recommend_heading' do
it_behaves_like 'a localized content wrapping method' do
let(:property) {:we_recommend_heading}
let(:tag) {:div}
end
end
describe '#why_chosen_heading' do
it_behaves_like 'a localized content wrapping method' do
let(:property) {:why_chosen_heading}
end
end
describe '#why_chosen_description' do
it_behaves_like 'a localized content wrapping method' do
let(:property) {:why_chosen_description}
let(:tag) {:div}
let(:interpolation_args) {{zipcode: user.zipcode}}
end
end
describe '#geolocated_results_heading' do
it_behaves_like 'a localized content wrapping method' do
let(:property) {:geolocated_results_heading}
let(:interpolation_args) {{zipcode: user.zipcode}}
let(:tag) {:h4}
end
end
describe '#geolocated_results_subheading' do
it_behaves_like 'a localized content wrapping method' do
let(:property) {:geolocated_results_subheading}
let(:interpolation_args) {{zipcode: user.zipcode, num_results: 0}}
let(:tag) {:div}
end
end
end
describe '#recommended_option_block' do
it 'returns nil if no recommended option' do
expect(presenter.recommended_option_block).to be_nil
end
describe 'with recommended option' do
before do
presenter.recommended_option = bank_account
end
it 'renders a block' do
expected = view.render(partial:'account_finder/account_type/recommended_option', locals:{presenter: presenter})
expect(presenter.recommended_option_block).to eq expected
end
end
end
describe 'recommended_' do
before do
presenter.recommended_option = bank_account
end
describe '#recommended_account_name' do
it_behaves_like 'a content wrapping method' do
let(:method) {:recommended_account_name}
let(:expected) {bank_account.name}
let(:tag) {:h4}
end
end
describe '#recommended_branch_address' do
it_behaves_like 'a content wrapping method' do
let(:method) {:recommended_branch_address}
let(:expected) {bank_account.branch.full_address}
let(:tag) {:div}
end
end
describe '#recommended_branch_name' do
it_behaves_like 'a content wrapping method' do
let(:method) {:recommended_branch_name}
let(:expected) {bank_account.branch.full_name}
let(:tag) {:div}
end
end
describe '#recommended_available_at' do
it_behaves_like 'a localized content wrapping method' do
let(:property) {:recommended_available_at}
let(:interpolation_args) {{zipcode: user.zipcode, num_results: 0}}
let(:tag) {:div}
end
end
end
describe '#cta' do
it 'returns a link to help me open page' do
text = I18n.t('account_finder.account_type.help_to_open_cta')
path = account_opening_assistance_path(user, account_type)
expect(presenter.cta_button).to have_tag(:a, text: text, with: {href: path})
end
end
describe 'selected_result' do
before do
presenter.selected_result = bank_account
end
describe '#option_heading' do
it_behaves_like 'a localized content wrapping method' do
let(:property) {:option_heading}
let(:interpolation_args) {{branch_name: bank_account.branch.full_name}}
let(:tag) {:h3}
end
end
describe '#option_subheading' do
it_behaves_like 'a localized content wrapping method' do
let(:property) {:option_subheading}
let(:interpolation_args) {{branch_address: bank_account.branch.full_address}}
let(:tag) {:div}
end
end
describe '#geolocated_choice_map' do
describe 'when no selected choice' do
before do
presenter.selected_result = nil
end
it 'returns nil' do
expect(presenter.geolocated_choice_map).to be_nil
end
end
it_behaves_like 'a google map block' do
let(:query) {"#{bank_account.branch.bank.name}+#{bank_account.branch.full_address}"}
let(:api_method) {:place}
let(:method){:geolocated_choice_map}
end
end
end
describe 'results' do
let(:bank_account_2){create(:bank_account)}
let(:results){[bank_account, bank_account_2]}
before do
presenter.results = results
end
describe '#geolocated_options_block' do
it 'returns rendered block' do
expected = view.render(partial: 'account_finder/account_type/geolocated_options', locals: {presenter: presenter})
expect(presenter.geolocated_options_block).to eq expected
end
describe 'with no results' do
before do
presenter.results = nil
end
it 'returns nothing' do
expect(presenter.geolocated_options_block).to be_nil
end
end
end
describe '#geolocated_option_title' do
it 'returns branch name' do
expect(presenter.geolocated_option_title bank_account_2).to eq bank_account_2.branch.full_name
end
end
describe '#geolocated_option_street' do
it 'returns branch street' do
expect(presenter.geolocated_option_street bank_account_2).to eq bank_account_2.branch.street
end
end
describe '#geolocated_distance_from_user' do
it 'returns span tag' do
distance = user.distance_to(bank_account_2.branch)
content = view.number_to_human(distance, units: :miles)
expected = view.content_tag(:span, content)
expect(presenter.geolocated_distance_from_user(bank_account_2)).to eq expected
end
end
describe '#geolocated_result_link' do
it 'returns wrapped link' do
src = account_finder_path(user, selected_account_id: bank_account_2.id )
content = "thing"
expected = view.link_to(content, src)
result = presenter.geolocated_result_link(bank_account_2) do
content
end
expect(result).to eq expected
end
end
end
describe 'online only methods' do
let(:account_type){AccountType.PREPAY_CARD}
let(:presenter){FindAnAccountPresenter.new(account_type, view)}
describe 'online_options' do
it 'makes an array from localized content' do
expected = I18n.t("account_finder.account_type.#{account_type.name_id}.online_options").to_a.map{|obj| obj[1]}
expect(expected.empty?).to be_false
expect(presenter.online_options).to eq expected
end
end
describe '#online_option_feature_bullets' do
let(:list_options) {{class: 'listclass'}}
let(:bullet_options) {{class: 'bulletclass'}}
let(:bullets){{0=>"$4.95 per month", 1=>"No overdraft fees", 2=>"Uses the VISA network"}}
it 'returns list as rendered bullets' do
result = presenter.online_option_feature_bullets(bullets, list_options, bullet_options)
expect(result).to have_tag(:ul, with: list_options ) do
with_tag(:li, text: "$4.95 per month", with: bullet_options)
with_tag(:li, text: "No overdraft fees", with: bullet_options)
with_tag(:li, text: "Uses the VISA network", with: bullet_options)
end
end
end
end
describe 'google_map_search' do
describe 'credit union' do
let(:account_type) {AccountType.CREDIT_UNION}
it_behaves_like 'a google map block' do
let(:query) {"Credit Unions near #{user.zipcode}"}
let(:api_method) {:search}
let(:method){:google_map_search}
end
end
describe 'regular accounts' do
let(:account_type) {AccountType.REGULAR_ACCOUNT}
it_behaves_like 'a google map block' do
let(:query) {"Free Checking near #{user.zipcode}"}
let(:api_method) {:search}
let(:method){:google_map_search}
end
end
describe 'student accounts' do
let(:account_type) {AccountType.STUDENT_ACCOUNT}
it_behaves_like 'a google map block' do
let(:query) {"Free Student Checking near #{user.zipcode}"}
let(:api_method) {:search}
let(:method){:google_map_search}
end
end
end
end |
# encoding: utf-8
require 'fileutils'
require 'tmpdir'
require 'spec_helper'
require 'timeout'
describe RuboCop::CLI, :isolated_environment do
include FileHelper
subject(:cli) { described_class.new }
before(:each) do
$stdout = StringIO.new
$stderr = StringIO.new
RuboCop::ConfigLoader.debug = false
end
after(:each) do
$stdout = STDOUT
$stderr = STDERR
end
def abs(path)
File.expand_path(path)
end
describe 'option' do
describe '--version' do
it 'exits cleanly' do
expect { cli.run ['-v'] }.to exit_with_code(0)
expect { cli.run ['--version'] }.to exit_with_code(0)
expect($stdout.string).to eq((RuboCop::Version::STRING + "\n") * 2)
end
end
describe '--auto-correct' do
it 'corrects SymbolProc and SpaceBeforeBlockBraces offenses' do
source = ['foo.map{ |a| a.nil? }']
create_file('example.rb', source)
expect(cli.run(['-D', '--auto-correct'])).to eq(0)
corrected = "foo.map(&:nil?)\n"
expect(IO.read('example.rb')).to eq(corrected)
uncorrected = $stdout.string.split($RS).select do |line|
line.include?('example.rb:') && !line.include?('[Corrected]')
end
expect(uncorrected).to be_empty # Hence exit code 0.
end
it 'corrects only IndentationWidth without crashing' do
source = ['foo = if bar',
' something',
'elsif baz',
' other_thing',
'else',
' fail',
'end']
create_file('example.rb', source)
expect(cli.run([%w(--only IndentationWidth --auto-correct)])).to eq(0)
corrected = ['foo = if bar',
' something',
'elsif baz',
' other_thing',
'else',
' fail',
'end',
''].join("\n")
expect(IO.read('example.rb')).to eq(corrected)
end
it 'crashes on infinite loop but prints offenses' do
create_file('example.rb', '3.times{ something;other_thing;}')
# This configuration makes --auto-correct impossible to finish since a
# space will be added after each ; but then removed again for the one
# that's inside }.
create_file('.rubocop.yml', ['SpaceInsideBlockBraces:',
' EnforcedStyle: no_space',
' SpaceBeforeBlockParameters: false'])
cmd = %w(--only SpaceAfterSemicolon,SpaceInsideBlockBraces
--auto-correct --format simple)
expect { cli.run(cmd) }.to raise_error(RuboCop::Runner::
InfiniteCorrectionLoop)
expect(IO.read('example.rb'))
.to eq("3.times{something; other_thing;}\n")
expected_output = [
'== example.rb ==',
'C: 1: 9: [Corrected] Space inside { detected.',
'C: 1: 19: [Corrected] Space missing after semicolon.',
'C: 1: 31: [Corrected] Space missing after semicolon.',
'C: 1: 32: [Corrected] Space inside } detected.',
'C: 1: 33: [Corrected] Space inside } detected.',
'',
# We're interrupted during inspection, hence 0 files inspected.
'0 files inspected, 5 offenses detected, 5 offenses corrected',
''
]
expect($stdout.string).to eq(expected_output.join("\n"))
end
it 'corrects complicated cases conservatively' do
# Two cops make corrections here; Style/BracesAroundHashParameters, and
# Style/AlignHash. Because they make minimal corrections relating only
# to their specific areas, and stay away from cleaning up extra
# whitespace in the process, the combined changes don't interfere with
# each other and the result is semantically the same as the starting
# point.
source = ['# encoding: utf-8',
'expect(subject[:address]).to eq({',
" street1: '1 Market',",
" street2: '#200',",
" city: 'Some Town',",
" state: 'CA',",
" postal_code: '99999-1111'",
'})']
create_file('example.rb', source)
expect(cli.run(['-D', '--auto-correct'])).to eq(0)
corrected =
['# encoding: utf-8',
'expect(subject[:address]).to eq(',
" street1: '1 Market',",
" street2: '#200',",
" city: 'Some Town',",
" state: 'CA',",
" postal_code: '99999-1111'",
')']
expect(IO.read('example.rb')).to eq(corrected.join("\n") + "\n")
end
it 'honors Exclude settings in individual cops' do
source = ['# encoding: utf-8',
'puts %x(ls)']
create_file('example.rb', source)
create_file('.rubocop.yml', ['Style/UnneededPercentX:',
' Exclude:',
' - example.rb'])
expect(cli.run(['--auto-correct'])).to eq(0)
expect($stdout.string).to include('no offenses detected')
expect(IO.read('example.rb')).to eq(source.join("\n") + "\n")
end
it 'corrects code with indentation problems' do
create_file('example.rb', ['# encoding: utf-8',
'module Bar',
'class Goo',
' def something',
' first call',
" do_other 'things'",
' if other > 34',
' more_work',
' end',
' end',
'end',
'end',
'',
'module Foo',
'class Bar',
'',
' stuff = [',
' {',
" some: 'hash',",
' },',
' {',
" another: 'hash',",
" with: 'more'",
' },',
' ]',
'end',
'end'
])
expect(cli.run(['--auto-correct'])).to eq(1)
expect(IO.read('example.rb'))
.to eq(['# encoding: utf-8',
'module Bar',
' class Goo',
' def something',
' first call',
" do_other 'things'",
' if other > 34',
' more_work',
' end',
' end',
' end',
'end',
'',
'module Foo',
' class Bar',
' stuff = [',
' {',
" some: 'hash'",
' },',
' {',
" another: 'hash',",
" with: 'more'",
' }',
' ]',
' end',
'end',
''].join("\n"))
end
it 'can change block comments and indent them' do
create_file('example.rb', ['# encoding: utf-8',
'module Foo',
'class Bar',
'=begin',
'This is a nice long',
'comment',
'which spans a few lines',
'=end',
' def baz',
' do_something',
' end',
'end',
'end'])
expect(cli.run(['--auto-correct'])).to eq(1)
expect(IO.read('example.rb'))
.to eq(['# encoding: utf-8',
'module Foo',
' class Bar',
' # This is a nice long',
' # comment',
' # which spans a few lines',
' def baz',
' do_something',
' end',
' end',
'end',
''].join("\n"))
end
it 'can correct two problems with blocks' do
# {} should be do..end and space is missing.
create_file('example.rb', ['# encoding: utf-8',
'(1..10).each{ |i|',
' puts i',
'}'])
expect(cli.run(['--auto-correct'])).to eq(0)
expect(IO.read('example.rb'))
.to eq(['# encoding: utf-8',
'(1..10).each do |i|',
' puts i',
'end',
''].join("\n"))
end
it 'can handle spaces when removing braces' do
create_file('example.rb',
['# encoding: utf-8',
"assert_post_status_code 400, 's', {:type => 'bad'}"])
expect(cli.run(%w(--auto-correct --format emacs))).to eq(0)
expect(IO.read('example.rb'))
.to eq(['# encoding: utf-8',
"assert_post_status_code 400, 's', type: 'bad'",
''].join("\n"))
e = abs('example.rb')
expect($stdout.string)
.to eq(["#{e}:2:35: C: [Corrected] Redundant curly braces around " \
'a hash parameter.',
"#{e}:2:35: C: [Corrected] Use the new Ruby 1.9 hash " \
'syntax.',
# TODO: Don't report that a problem is corrected when it
# actually went away due to another correction.
"#{e}:2:35: C: [Corrected] Space inside { missing.",
# TODO: Don't report duplicates (HashSyntax in this case).
"#{e}:2:36: C: [Corrected] Use the new Ruby 1.9 hash " \
'syntax.',
"#{e}:2:50: C: [Corrected] Space inside } missing.",
''].join("\n"))
end
# A case where two cops, EmptyLinesAroundBody and EmptyLines, try to
# remove the same line in autocorrect.
it 'can correct two empty lines at end of class body' do
create_file('example.rb', ['class Test',
' def f',
' end',
'',
'',
'end'])
expect(cli.run(['--auto-correct'])).to eq(1)
expect($stderr.string).to eq('')
expect(IO.read('example.rb')).to eq(['class Test',
' def f',
' end',
'end',
''].join("\n"))
end
# A case where WordArray's correction can be clobbered by
# AccessModifierIndentation's correction.
it 'can correct indentation and another thing' do
create_file('example.rb', ['# encoding: utf-8',
'class Dsl',
'private',
' A = ["git", "path",]',
'end'])
expect(cli.run(%w(--auto-correct --format emacs))).to eq(1)
expect(IO.read('example.rb')).to eq(['# encoding: utf-8',
'class Dsl',
' private',
'',
' A = %w(git path)',
'end',
''].join("\n"))
e = abs('example.rb')
expect($stdout.string)
.to eq(["#{e}:2:1: C: Missing top-level class documentation " \
'comment.',
"#{e}:3:1: C: [Corrected] Indent access modifiers like " \
'`private`.',
"#{e}:3:1: C: [Corrected] Keep a blank line before and " \
'after `private`.',
"#{e}:3:3: W: Useless `private` access modifier.",
"#{e}:3:3: C: [Corrected] Keep a blank line before and " \
'after `private`.',
"#{e}:4:7: C: [Corrected] Use `%w` or `%W` " \
'for array of words.',
"#{e}:4:8: C: [Corrected] Prefer single-quoted strings " \
"when you don't need string interpolation or special " \
'symbols.',
"#{e}:4:15: C: [Corrected] Prefer single-quoted strings " \
"when you don't need string interpolation or special " \
'symbols.',
"#{e}:4:21: C: [Corrected] Avoid comma after the last item " \
'of an array.',
"#{e}:5:7: C: [Corrected] Use `%w` or `%W` " \
'for array of words.',
"#{e}:5:8: C: [Corrected] Prefer single-quoted strings " \
"when you don't need string interpolation or special " \
'symbols.',
"#{e}:5:15: C: [Corrected] Prefer single-quoted strings " \
"when you don't need string interpolation or special " \
'symbols.',
"#{e}:5:21: C: [Corrected] Avoid comma after the last item " \
'of an array.',
''].join("\n"))
end
# A case where the same cop could try to correct an offense twice in one
# place.
it 'can correct empty line inside special form of nested modules' do
create_file('example.rb', ['module A module B',
'',
'end end'])
expect(cli.run(['--auto-correct'])).to eq(1)
expect(IO.read('example.rb')).to eq(['module A module B',
'end end',
''].join("\n"))
uncorrected = $stdout.string.split($RS).select do |line|
line.include?('example.rb:') && !line.include?('[Corrected]')
end
expect(uncorrected).not_to be_empty # Hence exit code 1.
end
it 'can correct single line methods' do
create_file('example.rb', ['# encoding: utf-8',
'def func1; do_something end # comment',
'def func2() do_1; do_2; end'])
expect(cli.run(%w(--auto-correct --format offenses))).to eq(0)
expect(IO.read('example.rb')).to eq(['# encoding: utf-8',
'# comment',
'def func1',
' do_something',
'end',
'',
'def func2',
' do_1',
' do_2',
'end',
''].join("\n"))
expect($stdout.string).to eq(['',
'10 Style/TrailingWhitespace',
'5 Style/Semicolon',
'3 Style/SingleLineMethods',
'1 Style/DefWithParentheses',
'1 Style/EmptyLineBetweenDefs',
'--',
'20 Total',
'',
''].join("\n"))
end
# In this example, the auto-correction (changing "raise" to "fail")
# creates a new problem (alignment of parameters), which is also
# corrected automatically.
it 'can correct a problems and the problem it creates' do
create_file('example.rb',
['# encoding: utf-8',
'raise NotImplementedError,',
" 'Method should be overridden in child classes'"])
expect(cli.run(['--auto-correct'])).to eq(0)
expect(IO.read('example.rb'))
.to eq(['# encoding: utf-8',
'fail NotImplementedError,',
" 'Method should be overridden in child classes'",
''].join("\n"))
expect($stdout.string)
.to eq(['Inspecting 1 file',
'C',
'',
'Offenses:',
'',
'example.rb:2:1: C: [Corrected] Use fail instead of ' \
'raise to signal exceptions.',
'raise NotImplementedError,',
'^^^^^',
'example.rb:3:7: C: [Corrected] Align the parameters of a ' \
'method call if they span more than one line.',
" 'Method should be overridden in child classes'",
' ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^',
'',
'1 file inspected, 2 offenses detected, 2 offenses ' \
'corrected',
''].join("\n"))
end
# Thanks to repeated auto-correction, we can get rid of the trailing
# spaces, and then the extra empty line.
it 'can correct two problems in the same place' do
create_file('example.rb',
['# encoding: utf-8',
'# Example class.',
'class Klass',
' ',
' def f',
' end',
'end'])
expect(cli.run(['--auto-correct'])).to eq(0)
expect(IO.read('example.rb'))
.to eq(['# encoding: utf-8',
'# Example class.',
'class Klass',
' def f',
' end',
'end',
''].join("\n"))
expect($stderr.string).to eq('')
expect($stdout.string)
.to eq(['Inspecting 1 file',
'C',
'',
'Offenses:',
'',
'example.rb:4:1: C: [Corrected] Extra empty line detected ' \
'at class body beginning.',
'example.rb:4:1: C: [Corrected] Trailing whitespace ' \
'detected.',
'',
'1 file inspected, 2 offenses detected, 2 offenses ' \
'corrected',
''].join("\n"))
end
it 'can correct MethodDefParentheses and other offense' do
create_file('example.rb',
['# encoding: utf-8',
'def primes limit',
' 1.upto(limit).select { |i| i.even? }',
'end'])
expect(cli.run(%w(-D --auto-correct))).to eq(0)
expect($stderr.string).to eq('')
expect(IO.read('example.rb'))
.to eq(['# encoding: utf-8',
'def primes(limit)',
' 1.upto(limit).select(&:even?)',
'end',
''].join("\n"))
expect($stdout.string)
.to eq(['Inspecting 1 file',
'C',
'',
'Offenses:',
'',
'example.rb:2:12: C: [Corrected] ' \
'Style/MethodDefParentheses: ' \
'Use def with parentheses when there are parameters.',
'def primes limit',
' ^^^^^',
'example.rb:3:3: C: [Corrected] Style/SymbolProc: ' \
'Pass &:even? as an argument to select instead of a block.',
' 1.upto(limit).select { |i| i.even? }',
' ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^',
'',
'1 file inspected, 2 offenses detected, 2 offenses ' \
'corrected',
''].join("\n"))
end
it 'can correct WordArray and SpaceAfterComma offenses' do
create_file('example.rb',
['# encoding: utf-8',
"f(type: ['offline','offline_payment'],",
" bar_colors: ['958c12','953579','ff5800','0085cc'])"])
expect(cli.run(%w(-D --auto-correct --format o))).to eq(0)
expect($stdout.string)
.to eq(['',
'4 Style/SpaceAfterComma',
'2 Style/WordArray',
'--',
'6 Total',
'',
''].join("\n"))
expect(IO.read('example.rb'))
.to eq(['# encoding: utf-8',
'f(type: %w(offline offline_payment),',
' bar_colors: %w(958c12 953579 ff5800 0085cc))',
''].join("\n"))
end
it 'can correct SpaceAfterComma and HashSyntax offenses' do
create_file('example.rb',
['# encoding: utf-8',
"I18n.t('description',:property_name => property.name)"])
expect(cli.run(%w(-D --auto-correct --format emacs))).to eq(0)
expect($stdout.string)
.to eq(["#{abs('example.rb')}:2:21: C: [Corrected] " \
'Style/SpaceAfterComma: Space missing after comma.',
"#{abs('example.rb')}:2:22: C: [Corrected] " \
'Style/HashSyntax: Use the new Ruby 1.9 hash syntax.',
''].join("\n"))
expect(IO.read('example.rb'))
.to eq(['# encoding: utf-8',
"I18n.t('description', property_name: property.name)",
''].join("\n"))
end
it 'can correct HashSyntax and SpaceAroundOperators offenses' do
create_file('example.rb',
['# encoding: utf-8',
'{ :b=>1 }'])
expect(cli.run(%w(-D --auto-correct --format emacs))).to eq(0)
expect(IO.read('example.rb')).to eq(['# encoding: utf-8',
'{ b: 1 }',
''].join("\n"))
expect($stdout.string)
.to eq(["#{abs('example.rb')}:2:3: C: [Corrected] " \
'Style/HashSyntax: Use the new Ruby 1.9 hash syntax.',
"#{abs('example.rb')}:2:5: C: [Corrected] " \
'Style/SpaceAroundOperators: Surrounding space missing for ' \
"operator '=>'.",
''].join("\n"))
end
it 'can correct HashSyntax when --only is used' do
create_file('example.rb',
['# encoding: utf-8',
'{ :b=>1 }'])
expect(cli.run(%w(--auto-correct -f emacs
--only Style/HashSyntax))).to eq(0)
expect($stderr.string).to eq('')
expect(IO.read('example.rb')).to eq(['# encoding: utf-8',
'{ b: 1 }',
''].join("\n"))
expect($stdout.string)
.to eq(["#{abs('example.rb')}:2:3: C: [Corrected] Use the new " \
'Ruby 1.9 hash syntax.',
''].join("\n"))
end
it 'can correct TrailingBlankLines and TrailingWhitespace offenses' do
create_file('example.rb',
['# encoding: utf-8',
'',
' ',
'',
''])
expect(cli.run(%w(--auto-correct --format emacs))).to eq(0)
expect(IO.read('example.rb')).to eq(['# encoding: utf-8',
''].join("\n"))
expect($stdout.string)
.to eq(["#{abs('example.rb')}:2:1: C: [Corrected] 3 trailing " \
'blank lines detected.',
"#{abs('example.rb')}:3:1: C: [Corrected] Trailing " \
'whitespace detected.',
''].join("\n"))
end
it 'can correct MethodCallParentheses and EmptyLiteral offenses' do
create_file('example.rb',
['# encoding: utf-8',
'Hash.new()'])
expect(cli.run(%w(--auto-correct --format emacs))).to eq(0)
expect($stderr.string).to eq('')
expect(IO.read('example.rb')).to eq(['# encoding: utf-8',
'{}',
''].join("\n"))
expect($stdout.string)
.to eq(["#{abs('example.rb')}:2:1: C: [Corrected] Use hash " \
'literal `{}` instead of `Hash.new`.',
"#{abs('example.rb')}:2:9: C: [Corrected] Do not use " \
'parentheses for method calls with no arguments.',
''].join("\n"))
end
it 'can correct IndentHash offenses with separator style' do
create_file('example.rb',
['# encoding: utf-8',
'CONVERSION_CORRESPONDENCE = {',
' match_for_should: :match,',
' match_for_should_not: :match_when_negated,',
' failure_message_for_should: :failure_message,',
'failure_message_for_should_not: :failure_message_when',
'}'])
create_file('.rubocop.yml',
['Style/AlignHash:',
' EnforcedColonStyle: separator'])
expect(cli.run(%w(--auto-correct))).to eq(0)
expect(IO.read('example.rb'))
.to eq(['# encoding: utf-8',
'CONVERSION_CORRESPONDENCE = {',
' match_for_should: :match,',
' match_for_should_not: :match_when_negated,',
' failure_message_for_should: :failure_message,',
' failure_message_for_should_not: :failure_message_when',
'}',
''].join("\n"))
end
it 'does not say [Corrected] if correction was avoided' do
create_file('example.rb', ['# encoding: utf-8',
'a = c and b',
'not a && b',
'func a do b end'])
expect(cli.run(%w(-a -f simple))).to eq(1)
expect($stderr.string).to eq('')
expect(IO.read('example.rb')).to eq(['# encoding: utf-8',
'a = c and b',
'not a && b',
'func a do b end',
''].join("\n"))
expect($stdout.string)
.to eq(['== example.rb ==',
'C: 2: 7: Use && instead of and.',
'C: 3: 1: Use ! instead of not.',
'C: 4: 8: Prefer {...} over do...end for single-line ' \
'blocks.',
'',
'1 file inspected, 3 offenses detected',
''].join("\n"))
end
it 'does not hang SpaceAfterPunctuation and SpaceInsideParens' do
create_file('example.rb',
['# encoding: utf-8',
'some_method(a, )'])
Timeout.timeout(10) do
expect(cli.run(%w(--auto-correct))).to eq(0)
end
expect($stderr.string).to eq('')
expect(IO.read('example.rb')).to eq(['# encoding: utf-8',
'some_method(a)',
''].join("\n"))
end
it 'does not hang SpaceAfterPunctuation and SpaceInsideBrackets' do
create_file('example.rb',
['# encoding: utf-8',
'puts [1, ]'])
Timeout.timeout(10) do
expect(cli.run(%w(--auto-correct))).to eq(0)
end
expect($stderr.string).to eq('')
expect(IO.read('example.rb')).to eq(['# encoding: utf-8',
'puts [1]',
''].join("\n"))
end
end
describe '--auto-gen-config' do
before(:each) do
RuboCop::Formatter::DisabledConfigFormatter
.config_to_allow_offenses = {}
end
it 'overwrites an existing todo file' do
create_file('example1.rb', ['# encoding: utf-8',
'x= 0 ',
'#' * 85,
'y ',
'puts x'])
create_file('.rubocop_todo.yml', ['Metrics/LineLength:',
' Enabled: false'])
create_file('.rubocop.yml', ['inherit_from: .rubocop_todo.yml'])
expect(cli.run(['--auto-gen-config'])).to eq(1)
expect(IO.readlines('.rubocop_todo.yml')[7..-1].map(&:chomp))
.to eq(['# Offense count: 1',
'# Configuration parameters: AllowURI, URISchemes.',
'Metrics/LineLength:',
' Max: 85',
'',
'# Offense count: 1',
'# Cop supports --auto-correct.',
'Style/SpaceAroundOperators:',
' Enabled: false',
'',
'# Offense count: 2',
'# Cop supports --auto-correct.',
'Style/TrailingWhitespace:',
' Enabled: false'])
# Create new CLI instance to avoid using cached configuration.
new_cli = described_class.new
expect(new_cli.run(['example1.rb'])).to eq(0)
end
it 'exits with error if file arguments are given' do
create_file('example1.rb', ['# encoding: utf-8',
'x= 0 ',
'#' * 85,
'y ',
'puts x'])
expect { cli.run(['--auto-gen-config', 'example1.rb']) }
.to exit_with_code(1)
expect($stderr.string)
.to eq(['--auto-gen-config can not be combined with any other ' \
'arguments.',
''].join("\n"))
expect($stdout.string).to eq('')
end
it 'can generate a todo list' do
create_file('example1.rb', ['# encoding: utf-8',
'$x= 0 ',
'#' * 90,
'#' * 85,
'y ',
'puts x'])
create_file('example2.rb', ['# encoding: utf-8',
"\tx = 0",
'puts x'])
expect(cli.run(['--auto-gen-config'])).to eq(1)
expect($stderr.string).to eq('')
expect($stdout.string)
.to include(['Created .rubocop_todo.yml.',
'Run `rubocop --config .rubocop_todo.yml`, or',
'add inherit_from: .rubocop_todo.yml in a ' \
'.rubocop.yml file.',
''].join("\n"))
expected =
['# This configuration was generated by `rubocop --auto-gen-config`',
/# on .* using RuboCop version .*/,
'# The point is for the user to remove these configuration records',
'# one by one as the offenses are removed from the code base.',
'# Note that changes in the inspected code, or installation of new',
'# versions of RuboCop, may require this file to be generated ' \
'again.',
'',
'# Offense count: 2',
'# Configuration parameters: AllowURI, URISchemes.',
'Metrics/LineLength:',
' Max: 90',
'',
'# Offense count: 1',
'# Cop supports --auto-correct.',
'Style/CommentIndentation:',
' Enabled: false',
'',
'# Offense count: 1',
'# Configuration parameters: AllowedVariables.',
'Style/GlobalVars:',
' Enabled: false',
'',
'# Offense count: 1',
'# Cop supports --auto-correct.',
'Style/IndentationConsistency:',
' Enabled: false',
'',
'# Offense count: 1',
'# Cop supports --auto-correct.',
'Style/SpaceAroundOperators:',
' Enabled: false',
'',
'# Offense count: 1',
'# Cop supports --auto-correct.',
'Style/Tab:',
' Enabled: false',
'',
'# Offense count: 2',
'# Cop supports --auto-correct.',
'Style/TrailingWhitespace:',
' Enabled: false']
actual = IO.read('.rubocop_todo.yml').split($RS)
expected.each_with_index do |line, ix|
if line.is_a?(String)
expect(actual[ix]).to eq(line)
else
expect(actual[ix]).to match(line)
end
end
end
it 'does not generate configuration for the Syntax cop' do
create_file('example1.rb', ['# encoding: utf-8',
'x = < ', # Syntax error
'puts x'])
create_file('example2.rb', ['# encoding: utf-8',
"\tx = 0",
'puts x'])
expect(cli.run(['--auto-gen-config'])).to eq(1)
expect($stderr.string).to eq('')
expected =
['# This configuration was generated by `rubocop --auto-gen-config`',
/# on .* using RuboCop version .*/,
'# The point is for the user to remove these configuration records',
'# one by one as the offenses are removed from the code base.',
'# Note that changes in the inspected code, or installation of new',
'# versions of RuboCop, may require this file to be generated ' \
'again.',
'',
'# Offense count: 1',
'# Cop supports --auto-correct.',
'Style/CommentIndentation:',
' Enabled: false',
'',
'# Offense count: 1',
'# Cop supports --auto-correct.',
'Style/IndentationConsistency:',
' Enabled: false',
'',
'# Offense count: 1',
'# Cop supports --auto-correct.',
'Style/Tab:',
' Enabled: false']
actual = IO.read('.rubocop_todo.yml').split($RS)
expect(actual.length).to eq(expected.length)
expected.each_with_index do |line, ix|
if line.is_a?(String)
expect(actual[ix]).to eq(line)
else
expect(actual[ix]).to match(line)
end
end
end
it 'generates a todo list that removes the reports' do
RuboCop::Cop::Style::RegexpLiteral.slash_count = 0
create_file('example.rb', ['# encoding: utf-8',
'y.gsub!(%r{abc/xyz}, "#{x}")'])
expect(cli.run(%w(--format emacs))).to eq(1)
expect($stdout.string)
.to eq(["#{abs('example.rb')}:2:9: C: Use %r only for regular " \
"expressions matching more than 1 '/' character.",
''].join("\n"))
expect(cli.run(['--auto-gen-config'])).to eq(1)
expected =
['# This configuration was generated by `rubocop --auto-gen-config`',
/# on .* using RuboCop version .*/,
'# The point is for the user to remove these configuration records',
'# one by one as the offenses are removed from the code base.',
'# Note that changes in the inspected code, or installation of new',
'# versions of RuboCop, may require this file to be generated ' \
'again.',
'',
'# Offense count: 1',
'# Configuration parameters: MaxSlashes.',
'Style/RegexpLiteral:',
' Enabled: false']
actual = IO.read('.rubocop_todo.yml').split($RS)
expected.each_with_index do |line, ix|
if line.is_a?(String)
expect(actual[ix]).to eq(line)
else
expect(actual[ix]).to match(line)
end
end
$stdout = StringIO.new
result = cli.run(%w(--config .rubocop_todo.yml --format emacs))
expect($stdout.string).to eq('')
expect(result).to eq(0)
end
end
describe '--only' do
context 'when one cop is given' do
it 'runs just one cop' do
create_file('example.rb', ['if x== 0 ',
"\ty",
'end'])
# IfUnlessModifier depends on the configuration of LineLength.
expect(cli.run(['--format', 'simple',
'--only', 'Style/IfUnlessModifier',
'example.rb'])).to eq(1)
expect($stdout.string)
.to eq(['== example.rb ==',
'C: 1: 1: Favor modifier if usage when ' \
'having a single-line body. Another good alternative is ' \
'the usage of control flow &&/||.',
'',
'1 file inspected, 1 offense detected',
''].join("\n"))
end
it 'exits with error if an incorrect cop name is passed' do
create_file('example.rb', ['if x== 0 ',
"\ty",
'end'])
expect(cli.run(['--only', 'Style/123'])).to eq(1)
expect($stderr.string).to include('Unrecognized cop name: Style/123.')
end
it 'accepts cop names from plugins' do
create_file('.rubocop.yml', ['require: rubocop_ext',
'',
'Style/SomeCop:',
' Description: Something',
' Enabled: true'])
create_file('rubocop_ext.rb', ['module RuboCop',
' module Cop',
' module Style',
' class SomeCop < Cop',
' end',
' end',
' end',
'end'])
create_file('redirect.rb', '$stderr = STDOUT')
rubocop = "#{RuboCop::ConfigLoader::RUBOCOP_HOME}/bin/rubocop"
# Since we define a new cop class, we have to do this in a separate
# process. Otherwise, the extra cop will affect other specs.
output =
`ruby -I . #{rubocop} --require redirect.rb --only Style/SomeCop`
expect($CHILD_STATUS.success?).to be_truthy
# The warning about the unrecognized cop is expected. It's given due
# to the fact that we haven't supplied any default configuration for
# rubocop_ext in this example.
expect(output)
.to eq(['Warning: unrecognized cop Style/SomeCop found in ' \
"#{abs('.rubocop.yml')}",
'Inspecting 2 files',
'..',
'',
'2 files inspected, no offenses detected',
''].join("\n"))
end
context 'without using namespace' do
it 'runs just one cop' do
create_file('example.rb', ['if x== 0 ',
"\ty",
'end'])
expect(cli.run(['--format', 'simple',
'--display-cop-names',
'--only', 'IfUnlessModifier',
'example.rb'])).to eq(1)
expect($stdout.string)
.to eq(['== example.rb ==',
'C: 1: 1: Style/IfUnlessModifier: Favor modifier if ' \
'usage when having a single-line body. Another good ' \
'alternative is the usage of control flow &&/||.',
'',
'1 file inspected, 1 offense detected',
''].join("\n"))
end
end
it 'enables the given cop' do
create_file('example.rb',
['x = 0 ',
# Disabling comments still apply.
'# rubocop:disable Style/TrailingWhitespace',
'y = 1 '])
create_file('.rubocop.yml', ['Style/TrailingWhitespace:',
' Enabled: false'])
expect(cli.run(['--format', 'simple',
'--only', 'Style/TrailingWhitespace',
'example.rb'])).to eq(1)
expect($stderr.string).to eq('')
expect($stdout.string)
.to eq(['== example.rb ==',
'C: 1: 6: Trailing whitespace detected.',
'',
'1 file inspected, 1 offense detected',
''].join("\n"))
end
end
context 'when several cops are given' do
it 'runs the given cops' do
create_file('example.rb', ['if x== 100000000000000 ',
"\ty",
'end'])
expect(cli.run(['--format', 'simple',
'--only',
'Style/IfUnlessModifier,Style/Tab,' \
'Style/SpaceAroundOperators',
'example.rb'])).to eq(1)
expect($stderr.string).to eq('')
expect($stdout.string)
.to eq(['== example.rb ==',
'C: 1: 1: Favor modifier if usage when ' \
'having a single-line body. Another good alternative is ' \
'the usage of control flow &&/||.',
"C: 1: 5: Surrounding space missing for operator '=='.",
'C: 2: 1: Tab detected.',
'',
'1 file inspected, 3 offenses detected',
''].join("\n"))
end
context 'and --lint' do
it 'runs the given cops plus all enabled lint cops' do
create_file('example.rb', ['if x== 100000000000000 ',
"\ty = 3",
' end'])
create_file('.rubocop.yml', ['Lint/EndAlignment:',
' Enabled: false'])
expect(cli.run(['--format', 'simple',
'--only', 'Style/Tab,Style/SpaceAroundOperators',
'--lint',
'example.rb'])).to eq(1)
expect($stdout.string)
.to eq(['== example.rb ==',
'C: 1: 5: Surrounding space missing for operator ' \
"'=='.",
'C: 2: 1: Tab detected.',
'W: 2: 2: Useless assignment to variable - y.',
'',
'1 file inspected, 3 offenses detected',
''].join("\n"))
end
end
end
end
describe '--except' do
context 'when two cops are given' do
it 'runs all cops except the given' do
create_file('example.rb', ['if x== 0 ',
"\ty",
'end'])
expect(cli.run(['--format', 'offenses',
'--except', 'Style/IfUnlessModifier,Style/Tab',
'example.rb'])).to eq(1)
expect($stdout.string)
.to eq(['',
'1 Style/IndentationWidth',
'1 Style/SpaceAroundOperators',
'1 Style/TrailingWhitespace',
'--',
'3 Total',
'',
''].join("\n"))
end
it 'exits with error if an incorrect cop name is passed' do
create_file('example.rb', ['if x== 0 ',
"\ty",
'end'])
expect(cli.run(['--except', 'Style/123'])).to eq(1)
expect($stderr.string).to include('Unrecognized cop name: Style/123.')
end
context 'when one cop is given without namespace' do
it 'disables the given cop' do
create_file('example.rb', ['if x== 0 ',
"\ty",
'end'])
cli.run(['--format', 'offenses',
'--except', 'IfUnlessModifier',
'example.rb'])
with_option = $stdout.string
$stdout = StringIO.new
cli.run(['--format', 'offenses',
'example.rb'])
without_option = $stdout.string
expect(without_option.split($RS) - with_option.split($RS))
.to eq(['1 Style/IfUnlessModifier', '5 Total'])
end
end
end
context 'when several cops are given' do
it 'disables the given cops' do
create_file('example.rb', ['if x== 100000000000000 ',
"\ty",
'end'])
expect(cli.run(['--format', 'offenses',
'--except',
'Style/IfUnlessModifier,Style/Tab,' \
'Style/SpaceAroundOperators',
'example.rb'])).to eq(1)
expect($stderr.string).to eq('')
expect($stdout.string)
.to eq(['',
'1 Style/IndentationWidth',
'1 Style/NumericLiterals',
'1 Style/TrailingWhitespace',
'--',
'3 Total',
'',
''].join("\n"))
end
end
end
describe '--lint' do
it 'runs only lint cops' do
create_file('example.rb', ['if 0 ',
"\ty",
'end'])
# IfUnlessModifier depends on the configuration of LineLength.
expect(cli.run(['--format', 'simple', '--lint',
'example.rb'])).to eq(1)
expect($stdout.string)
.to eq(['== example.rb ==',
'W: 1: 4: Literal 0 appeared in a condition.',
'',
'1 file inspected, 1 offense detected',
''].join("\n"))
end
end
describe '-d/--debug' do
it 'shows config files' do
create_file('example1.rb', "\tputs 0")
expect(cli.run(['--debug', 'example1.rb'])).to eq(1)
home = File.dirname(File.dirname(File.dirname(__FILE__)))
expect($stdout.string.lines.grep(/configuration/).map(&:chomp))
.to eq(["For #{abs('')}:" \
" configuration from #{home}/config/default.yml",
"Inheriting configuration from #{home}/config/enabled.yml",
"Inheriting configuration from #{home}/config/disabled.yml"
])
end
it 'shows cop names' do
create_file('example1.rb', "\tputs 0")
expect(cli.run(['--format',
'emacs',
'--debug',
'example1.rb'])).to eq(1)
expect($stdout.string.lines.to_a[-1])
.to eq(["#{abs('example1.rb')}:1:1: C: Style/Tab: Tab detected.",
''].join("\n"))
end
end
describe '-D/--display-cop-names' do
it 'shows cop names' do
create_file('example1.rb', "\tputs 0")
expect(cli.run(['--format',
'emacs',
'--debug',
'example1.rb'])).to eq(1)
expect($stdout.string.lines.to_a[-1])
.to eq(["#{abs('example1.rb')}:1:1: C: Style/Tab: Tab detected.",
''].join("\n"))
end
end
describe '--show-cops' do
shared_examples(:prints_config) do
it 'prints the current configuration' do
out = stdout.lines.to_a
printed_config = YAML.load(out.join)
cop_names = (cop_list[0] || '').split(',')
cop_names.each do |cop_name|
global_conf[cop_name].each do |key, value|
printed_value = printed_config[cop_name][key]
expect(printed_value).to eq(value)
end
end
end
end
let(:cops) { RuboCop::Cop::Cop.all }
let(:global_conf) do
config_path =
RuboCop::ConfigLoader.configuration_file_for(Dir.pwd.to_s)
RuboCop::ConfigLoader.configuration_from_file(config_path)
end
let(:stdout) { $stdout.string }
before do
create_file('.rubocop.yml', ['Metrics/LineLength:',
' Max: 110'])
expect { cli.run ['--show-cops'] + cop_list }.to exit_with_code(0)
end
context 'with no args' do
let(:cop_list) { [] }
# Extracts the first line out of the description
def short_description_of_cop(cop)
desc = full_description_of_cop(cop)
desc ? desc.lines.first.strip : ''
end
# Gets the full description of the cop or nil if no description is set.
def full_description_of_cop(cop)
cop_config = global_conf.for_cop(cop)
cop_config['Description']
end
it 'prints all available cops and their description' do
cops.each do |cop|
expect(stdout).to include cop.cop_name
# Because of line breaks, we will only find the beginning.
expect(stdout).to include short_description_of_cop(cop)[0..60]
end
end
it 'prints all types' do
cops
.types
.map(&:to_s)
.map(&:capitalize)
.each { |type| expect(stdout).to include(type) }
end
it 'prints all cops in their right type listing' do
lines = stdout.lines
lines.slice_before(/Type /).each do |slice|
types = cops.types.map(&:to_s).map(&:capitalize)
current = types.delete(slice.shift[/Type '(?<c>[^']+)'/, 'c'])
# all cops in their type listing
cops.with_type(current).each do |cop|
expect(slice.any? { |l| l.include? cop.cop_name }).to be_truthy
end
# no cop in wrong type listing
types.each do |type|
cops.with_type(type).each do |cop|
expect(slice.any? { |l| l.include? cop.cop_name }).to be_falsey
end
end
end
end
include_examples :prints_config
end
context 'with one cop given' do
let(:cop_list) { ['Style/Tab'] }
it 'prints that cop and nothing else' do
expect(stdout).to eq(
['# Supports --auto-correct',
'Style/Tab:',
' Description: No hard tabs.',
' StyleGuide: ' \
'https://github.com/bbatsov/ruby-style-guide#spaces-indentation',
' Enabled: true',
'',
''].join("\n"))
end
include_examples :prints_config
end
context 'with two cops given' do
let(:cop_list) { ['Style/Tab,Metrics/LineLength'] }
include_examples :prints_config
end
context 'with one of the cops misspelled' do
let(:cop_list) { ['Style/Tab,Lint/X123'] }
it 'skips the unknown cop' do
expect(stdout).to eq(
['# Supports --auto-correct',
'Style/Tab:',
' Description: No hard tabs.',
' StyleGuide: ' \
'https://github.com/bbatsov/ruby-style-guide#spaces-indentation',
' Enabled: true',
'',
''].join("\n"))
end
end
end
describe '-f/--format' do
let(:target_file) { 'example.rb' }
before do
create_file(target_file, ['# encoding: utf-8',
'#' * 90])
end
describe 'builtin formatters' do
context 'when simple format is specified' do
it 'outputs with simple format' do
cli.run(['--format', 'simple', 'example.rb'])
expect($stdout.string)
.to include(["== #{target_file} ==",
'C: 2: 81: Line is too long. [90/80]'].join("\n"))
end
end
context 'when clang format is specified' do
it 'outputs with clang format' do
create_file('example1.rb', ['# encoding: utf-8',
'x= 0 ',
'#' * 85,
'y ',
'puts x'])
create_file('example2.rb', ['# encoding: utf-8',
"\tx",
'def a',
' puts',
'end'])
create_file('example3.rb', ['# encoding: utf-8',
'def badName',
' if something',
' test',
' end',
'end'])
expect(cli.run(['--format', 'clang', 'example1.rb',
'example2.rb', 'example3.rb']))
.to eq(1)
expect($stdout.string)
.to eq(['example1.rb:2:2: C: Surrounding space missing for ' \
"operator '='.",
'x= 0 ',
' ^',
'example1.rb:2:5: C: Trailing whitespace detected.',
'x= 0 ',
' ^',
'example1.rb:3:81: C: Line is too long. [85/80]',
'###################################################' \
'##################################',
' ' \
' ^^^^^',
'example1.rb:4:2: C: Trailing whitespace detected.',
'y ',
' ^',
'example2.rb:1:1: C: Incorrect indentation detected' \
' (column 0 instead of 1).',
'# encoding: utf-8',
'^^^^^^^^^^^^^^^^^',
'example2.rb:2:1: C: Tab detected.',
"\tx",
'^',
'example2.rb:3:1: C: Inconsistent indentation ' \
'detected.',
'def a',
'^^^^^',
'example2.rb:4:1: C: Use 2 (not 3) spaces for ' \
'indentation.',
' puts',
'^^^',
'example3.rb:2:5: C: Use snake_case for method names.',
'def badName',
' ^^^^^^^',
'example3.rb:3:3: C: Use a guard clause instead of ' \
'wrapping the code inside a conditional expression.',
' if something',
' ^^',
'example3.rb:3:3: C: Favor modifier if usage ' \
'when having a single-line body. Another good ' \
'alternative is the usage of control flow &&/||.',
' if something',
' ^^',
'example3.rb:5:5: W: end at 5, 4 is not aligned ' \
'with if at 3, 2',
' end',
' ^^^',
'',
'3 files inspected, 12 offenses detected',
''].join("\n"))
end
end
context 'when emacs format is specified' do
it 'outputs with emacs format' do
create_file('example1.rb', ['# encoding: utf-8',
'x= 0 ',
'y ',
'puts x'])
create_file('example2.rb', ['# encoding: utf-8',
"\tx = 0",
'puts x'])
expect(cli.run(['--format', 'emacs', 'example1.rb',
'example2.rb'])).to eq(1)
expected_output =
["#{abs('example1.rb')}:2:2: C: Surrounding space missing" \
" for operator '='.",
"#{abs('example1.rb')}:2:5: C: Trailing whitespace detected.",
"#{abs('example1.rb')}:3:2: C: Trailing whitespace detected.",
"#{abs('example2.rb')}:1:1: C: Incorrect indentation detected" \
' (column 0 instead of 1).',
"#{abs('example2.rb')}:2:1: C: Tab detected.",
"#{abs('example2.rb')}:3:1: C: Inconsistent indentation " \
'detected.',
''].join("\n")
expect($stdout.string).to eq(expected_output)
end
end
context 'when unknown format name is specified' do
it 'aborts with error message' do
expect { cli.run(['--format', 'unknown', 'example.rb']) }
.to exit_with_code(1)
expect($stderr.string)
.to include('No formatter for "unknown"')
end
end
context 'when ambiguous format name is specified' do
it 'aborts with error message' do
# Both 'files' and 'fuubar' start with an 'f'.
expect { cli.run(['--format', 'f', 'example.rb']) }
.to exit_with_code(1)
expect($stderr.string)
.to include('Cannot determine formatter for "f"')
end
end
end
describe 'custom formatter' do
let(:target_file) { abs('example.rb') }
context 'when a class name is specified' do
it 'uses the class as a formatter' do
module MyTool
class RuboCopFormatter < RuboCop::Formatter::BaseFormatter
def started(all_files)
output.puts "started: #{all_files.join(',')}"
end
def file_started(file, _options)
output.puts "file_started: #{file}"
end
def file_finished(file, _offenses)
output.puts "file_finished: #{file}"
end
def finished(processed_files)
output.puts "finished: #{processed_files.join(',')}"
end
end
end
cli.run(['--format', 'MyTool::RuboCopFormatter', 'example.rb'])
expect($stdout.string).to eq(["started: #{target_file}",
"file_started: #{target_file}",
"file_finished: #{target_file}",
"finished: #{target_file}",
''].join("\n"))
end
end
context 'when unknown class name is specified' do
it 'aborts with error message' do
args = '--format UnknownFormatter example.rb'
expect { cli.run(args.split) }.to exit_with_code(1)
expect($stderr.string).to include('UnknownFormatter')
end
end
end
it 'can be used multiple times' do
cli.run(['--format', 'simple', '--format', 'emacs', 'example.rb'])
expect($stdout.string)
.to include(["== #{target_file} ==",
'C: 2: 81: Line is too long. [90/80]',
"#{abs(target_file)}:2:81: C: Line is too long. " \
'[90/80]'].join("\n"))
end
end
describe '-o/--out option' do
let(:target_file) { 'example.rb' }
before do
create_file(target_file, ['# encoding: utf-8',
'#' * 90])
end
it 'redirects output to the specified file' do
cli.run(['--out', 'output.txt', target_file])
expect(File.read('output.txt')).to include('Line is too long.')
end
it 'is applied to the previously specified formatter' do
cli.run(['--format', 'simple',
'--format', 'emacs', '--out', 'emacs_output.txt',
target_file])
expect($stdout.string).to eq(["== #{target_file} ==",
'C: 2: 81: Line is too long. [90/80]',
'',
'1 file inspected, 1 offense detected',
''].join("\n"))
expect(File.read('emacs_output.txt'))
.to eq(["#{abs(target_file)}:2:81: C: Line is too long. [90/80]",
''].join("\n"))
end
end
describe '--fail-level option' do
let(:target_file) { 'example.rb' }
before do
create_file(target_file, ['# encoding: utf-8',
'#' * 90])
end
it 'fails when option is less than the severity level' do
expect(cli.run(['--fail-level', 'convention', target_file])).to eq(1)
end
it 'succeed when option is greater than the severity level' do
expect(cli.run(['--fail-level', 'warning', target_file])).to eq(0)
end
end
describe '--force-exclusion' do
let(:target_file) { 'example.rb' }
before do
create_file(target_file, ['# encoding: utf-8',
'#' * 90])
create_file('.rubocop.yml', ['AllCops:',
' Exclude:',
" - #{target_file}"])
end
it 'excludes files specified in the configuration Exclude ' \
'even if they are explicitly passed as arguments' do
expect(cli.run(['--force-exclusion', target_file])).to eq(0)
end
end
end
context 'when interrupted' do
it 'returns 1' do
allow_any_instance_of(RuboCop::Runner)
.to receive(:aborting?).and_return(true)
create_file('example.rb', '# encoding: utf-8')
expect(cli.run(['example.rb'])).to eq(1)
end
end
describe '#trap_interrupt' do
let(:runner) { RuboCop::Runner.new({}, RuboCop::ConfigStore.new) }
let(:interrupt_handlers) { [] }
before do
allow(Signal).to receive(:trap).with('INT') do |&block|
interrupt_handlers << block
end
end
def interrupt
interrupt_handlers.each(&:call)
end
it 'adds a handler for SIGINT' do
expect(interrupt_handlers).to be_empty
cli.trap_interrupt(runner)
expect(interrupt_handlers.size).to eq(1)
end
context 'with SIGINT once' do
it 'aborts processing' do
cli.trap_interrupt(runner)
expect(runner).to receive(:abort)
interrupt
end
it 'does not exit immediately' do
cli.trap_interrupt(runner)
expect_any_instance_of(Object).not_to receive(:exit)
expect_any_instance_of(Object).not_to receive(:exit!)
interrupt
end
end
context 'with SIGINT twice' do
it 'exits immediately' do
cli.trap_interrupt(runner)
expect_any_instance_of(Object).to receive(:exit!).with(1)
interrupt
interrupt
end
end
end
it 'checks a given correct file and returns 0' do
create_file('example.rb', ['# encoding: utf-8',
'x = 0',
'puts x'])
expect(cli.run(['--format', 'simple', 'example.rb'])).to eq(0)
expect($stdout.string)
.to eq(['',
'1 file inspected, no offenses detected',
''].join("\n"))
end
it 'checks a given file with faults and returns 1' do
create_file('example.rb', ['# encoding: utf-8',
'x = 0 ',
'puts x'])
expect(cli.run(['--format', 'simple', 'example.rb'])).to eq(1)
expect($stdout.string)
.to eq ['== example.rb ==',
'C: 2: 6: Trailing whitespace detected.',
'',
'1 file inspected, 1 offense detected',
''].join("\n")
end
it 'registers an offense for a syntax error' do
create_file('example.rb', ['# encoding: utf-8',
'class Test',
'en'])
expect(cli.run(['--format', 'emacs', 'example.rb'])).to eq(1)
expect($stdout.string)
.to eq(["#{abs('example.rb')}:4:1: E: unexpected " \
'token $end',
''].join("\n"))
end
it 'registers an offense for Parser warnings' do
create_file('example.rb', ['# encoding: utf-8',
'puts *test',
'if a then b else c end'])
expect(cli.run(['--format', 'emacs', 'example.rb'])).to eq(1)
expect($stdout.string)
.to eq(["#{abs('example.rb')}:2:6: W: " \
'Ambiguous splat operator. Parenthesize the method arguments ' \
"if it's surely a splat operator, or add a whitespace to the " \
'right of the `*` if it should be a multiplication.',
"#{abs('example.rb')}:3:1: C: " \
'Favor the ternary operator (?:) over if/then/else/end ' \
'constructs.',
''].join("\n"))
end
it 'can process a file with an invalid UTF-8 byte sequence' do
create_file('example.rb', ['# encoding: utf-8',
"# #{'f9'.hex.chr}#{'29'.hex.chr}"])
expect(cli.run(['--format', 'emacs', 'example.rb'])).to eq(1)
expect($stdout.string)
.to eq(["#{abs('example.rb')}:1:1: F: Invalid byte sequence in utf-8.",
''].join("\n"))
end
context 'when errors are raised while processing files due to bugs' do
let(:errors) do
['An error occurred while Encoding cop was inspecting file.rb.']
end
before do
allow_any_instance_of(RuboCop::Runner)
.to receive(:errors).and_return(errors)
end
it 'displays an error message to stderr' do
cli.run([])
expect($stderr.string)
.to include('1 error occurred:').and include(errors.first)
end
end
describe 'rubocop:disable comment' do
it 'can disable all cops in a code section' do
src = ['# encoding: utf-8',
'# rubocop:disable all',
'#' * 90,
'x(123456)',
'y("123")',
'def func',
' # rubocop: enable Metrics/LineLength,Style/StringLiterals',
' ' + '#' * 93,
' x(123456)',
' y("123")',
'end']
create_file('example.rb', src)
expect(cli.run(['--format', 'emacs', 'example.rb'])).to eq(1)
# all cops were disabled, then 2 were enabled again, so we
# should get 2 offenses reported.
expect($stdout.string)
.to eq(["#{abs('example.rb')}:8:81: C: Line is too long. [95/80]",
"#{abs('example.rb')}:10:5: C: Prefer single-quoted " \
"strings when you don't need string interpolation or " \
'special symbols.',
''].join("\n"))
end
it 'can disable selected cops in a code section' do
create_file('example.rb',
['# encoding: utf-8',
'# rubocop:disable Style/LineLength,' \
'Style/NumericLiterals,Style/StringLiterals',
'#' * 90,
'x(123456)',
'y("123")',
'def func',
' # rubocop: enable Metrics/LineLength, ' \
'Style/StringLiterals',
' ' + '#' * 93,
' x(123456)',
' y("123")',
'end'])
expect(cli.run(['--format', 'emacs', 'example.rb'])).to eq(1)
expect($stderr.string)
.to eq(["#{abs('example.rb')}: Style/LineLength has the wrong " \
'namespace - should be Metrics',
''].join("\n"))
# 3 cops were disabled, then 2 were enabled again, so we
# should get 2 offenses reported.
expect($stdout.string)
.to eq(["#{abs('example.rb')}:8:81: C: Line is too long. [95/80]",
"#{abs('example.rb')}:10:5: C: Prefer single-quoted " \
"strings when you don't need string interpolation or " \
'special symbols.',
''].join("\n"))
end
it 'can disable all cops on a single line' do
create_file('example.rb', ['# encoding: utf-8',
'y("123", 123456) # rubocop:disable all'
])
expect(cli.run(['--format', 'emacs', 'example.rb'])).to eq(0)
expect($stdout.string).to be_empty
end
it 'can disable selected cops on a single line' do
create_file('example.rb',
['# encoding: utf-8',
'a' * 90 + ' # rubocop:disable Metrics/LineLength',
'#' * 95,
'y("123") # rubocop:disable Metrics/LineLength,' \
'Style/StringLiterals'
])
expect(cli.run(['--format', 'emacs', 'example.rb'])).to eq(1)
expect($stdout.string)
.to eq(
["#{abs('example.rb')}:3:81: C: Line is too long. [95/80]",
''].join("\n"))
end
context 'without using namespace' do
it 'can disable selected cops on a single line' do
create_file('example.rb',
['# encoding: utf-8',
'a' * 90 + ' # rubocop:disable LineLength',
'#' * 95,
'y("123") # rubocop:disable StringLiterals'
])
expect(cli.run(['--format', 'emacs', 'example.rb'])).to eq(1)
expect($stdout.string)
.to eq(
["#{abs('example.rb')}:3:81: C: Line is too long. [95/80]",
''].join("\n"))
end
end
end
it 'finds a file with no .rb extension but has a shebang line' do
create_file('example', ['#!/usr/bin/env ruby',
'# encoding: utf-8',
'x = 0',
'puts x'
])
expect(cli.run(%w(--format simple))).to eq(0)
expect($stdout.string)
.to eq(['', '1 file inspected, no offenses detected', ''].join("\n"))
end
it 'does not register any offenses for an empty file' do
create_file('example.rb', '')
expect(cli.run(%w(--format simple))).to eq(0)
expect($stdout.string)
.to eq(['', '1 file inspected, no offenses detected', ''].join("\n"))
end
describe 'style guide only usage' do
context 'via the cli option' do
describe '--only-guide-cops' do
it 'skips cops that have no link to a style guide' do
create_file('example.rb', 'fail')
create_file('.rubocop.yml', ['Metrics/LineLength:',
' Enabled: true',
' StyleGuide: ~',
' Max: 2'])
expect(cli.run(['--format', 'simple', '--only-guide-cops',
'example.rb'])).to eq(0)
end
it 'runs cops for rules that link to a style guide' do
create_file('example.rb', 'fail')
create_file('.rubocop.yml', ['Metrics/LineLength:',
' Enabled: true',
' StyleGuide: "http://an.example/url"',
' Max: 2'])
expect(cli.run(['--format', 'simple', '--only-guide-cops',
'example.rb'])).to eq(1)
expect($stdout.string)
.to eq(['== example.rb ==',
'C: 1: 3: Line is too long. [4/2]',
'',
'1 file inspected, 1 offense detected',
''].join("\n"))
end
it 'overrides configuration of AllCops/StyleGuideCopsOnly' do
create_file('example.rb', 'fail')
create_file('.rubocop.yml', ['AllCops:',
' StyleGuideCopsOnly: false',
'Metrics/LineLength:',
' Enabled: true',
' StyleGuide: ~',
' Max: 2'])
expect(cli.run(['--format', 'simple', '--only-guide-cops',
'example.rb'])).to eq(0)
end
end
end
context 'via the config' do
before do
create_file('example.rb', 'fail')
create_file('.rubocop.yml', ['AllCops:',
" StyleGuideCopsOnly: #{guide_cops_only}",
'Metrics/LineLength:',
' Enabled: true',
' StyleGuide: ~',
' Max: 2'])
end
describe 'AllCops/StyleGuideCopsOnly' do
context 'when it is true' do
let(:guide_cops_only) { 'true' }
it 'skips cops that have no link to a style guide' do
expect(cli.run(['--format', 'simple', 'example.rb'])).to eq(0)
end
end
context 'when it is false' do
let(:guide_cops_only) { 'false' }
it 'runs cops for rules regardless of any link to the style guide' do
expect(cli.run(['--format', 'simple', 'example.rb'])).to eq(1)
expect($stdout.string)
.to eq(['== example.rb ==',
'C: 1: 3: Line is too long. [4/2]',
'',
'1 file inspected, 1 offense detected',
''].join("\n"))
end
end
end
end
end
describe 'rails cops' do
describe 'enabling/disabling' do
it 'by default does not run rails cops' do
create_file('app/models/example1.rb', ['# encoding: utf-8',
'read_attribute(:test)'])
expect(cli.run(['--format', 'simple', 'app/models/example1.rb']))
.to eq(0)
end
it 'with -R given runs rails cops' do
create_file('app/models/example1.rb', ['# encoding: utf-8',
'read_attribute(:test)'])
expect(cli.run(['--format', 'simple', '-R', 'app/models/example1.rb']))
.to eq(1)
expect($stdout.string).to include('Prefer self[:attr]')
end
it 'with configuration option true in one dir runs rails cops there' do
source = ['# encoding: utf-8',
'read_attribute(:test)']
create_file('dir1/app/models/example1.rb', source)
create_file('dir1/.rubocop.yml', ['AllCops:',
' RunRailsCops: true',
'',
'Rails/ReadWriteAttribute:',
' Include:',
' - app/models/**/*.rb'])
create_file('dir2/app/models/example2.rb', source)
create_file('dir2/.rubocop.yml', ['AllCops:',
' RunRailsCops: false',
'',
'Rails/ReadWriteAttribute:',
' Include:',
' - app/models/**/*.rb'])
expect(cli.run(%w(--format simple dir1 dir2))).to eq(1)
expect($stdout.string)
.to eq(['== dir1/app/models/example1.rb ==',
'C: 2: 1: Prefer self[:attr] over read_attribute' \
'(:attr).',
'',
'2 files inspected, 1 offense detected',
''].join("\n"))
end
it 'with configuration option false but -R given runs rails cops' do
create_file('app/models/example1.rb', ['# encoding: utf-8',
'read_attribute(:test)'])
create_file('.rubocop.yml', ['AllCops:',
' RunRailsCops: false'])
expect(cli.run(['--format', 'simple', '-R', 'app/models/example1.rb']))
.to eq(1)
expect($stdout.string).to include('Prefer self[:attr]')
end
end
describe 'including/excluding' do
it 'includes some directories by default' do
source = ['# encoding: utf-8',
'read_attribute(:test)',
"default_scope order: 'position'"]
# Several rails cops include app/models by default.
create_file('dir1/app/models/example1.rb', source)
create_file('dir1/app/models/example2.rb', source)
# No rails cops include app/views by default.
create_file('dir1/app/views/example3.rb', source)
# The .rubocop.yml file inherits from default.yml where the Include
# config parameter is set for the rails cops. The paths are interpreted
# as relative to dir1 because .rubocop.yml is placed there.
create_file('dir1/.rubocop.yml', ['AllCops:',
' RunRailsCops: true',
'',
'Rails/ReadWriteAttribute:',
' Exclude:',
' - "**/example2.rb"',
'',
'Rails/DefaultScope:',
' Exclude:',
' - "**/example2.rb"'])
# No .rubocop.yml file in dir2 means that the paths from default.yml
# are interpreted as relative to the current directory, so they don't
# match.
create_file('dir2/app/models/example4.rb', source)
expect(cli.run(%w(--format simple dir1 dir2))).to eq(1)
expect($stdout.string)
.to eq(['== dir1/app/models/example1.rb ==',
'C: 2: 1: Prefer self[:attr] over read_attribute' \
'(:attr).',
'C: 3: 15: default_scope expects a block as its sole' \
' argument.',
'',
'4 files inspected, 2 offenses detected',
''].join("\n"))
end
end
end
describe 'cops can exclude files based on config' do
it 'ignores excluded files' do
create_file('example.rb', ['# encoding: utf-8',
'x = 0'])
create_file('regexp.rb', ['# encoding: utf-8',
'x = 0'])
create_file('exclude_glob.rb', ['#!/usr/bin/env ruby',
'# encoding: utf-8',
'x = 0'])
create_file('dir/thing.rb', ['# encoding: utf-8',
'x = 0'])
create_file('.rubocop.yml', ['Lint/UselessAssignment:',
' Exclude:',
' - example.rb',
' - !ruby/regexp /regexp.rb\z/',
' - "exclude_*"',
' - "dir/*"'])
expect(cli.run(%w(--format simple))).to eq(0)
expect($stdout.string)
.to eq(['', '4 files inspected, no offenses detected',
''].join("\n"))
end
end
describe 'configuration from file' do
it 'allows the default configuration file as the -c argument' do
create_file('example.rb', ['# encoding: utf-8',
'x = 0',
'puts x'
])
create_file('.rubocop.yml', [])
expect(cli.run(%w(--format simple -c .rubocop.yml))).to eq(0)
expect($stdout.string)
.to eq(['', '1 file inspected, no offenses detected',
''].join("\n"))
end
it 'finds included files' do
create_file('file.rb', 'x=0') # Included by default
create_file('example', 'x=0')
create_file('regexp', 'x=0')
create_file('.dot1/file.rb', 'x=0') # Hidden but explicitly included
create_file('.dot2/file.rb', 'x=0') # Hidden, excluded by default
create_file('.rubocop.yml', ['AllCops:',
' Include:',
' - example',
' - !ruby/regexp /regexp$/',
' - .dot1/**/*'
])
expect(cli.run(%w(--format files))).to eq(1)
expect($stderr.string).to eq('')
expect($stdout.string.split($RS).sort).to eq([abs('.dot1/file.rb'),
abs('example'),
abs('file.rb'),
abs('regexp')])
end
it 'ignores excluded files' do
create_file('example.rb', ['# encoding: utf-8',
'x = 0',
'puts x'
])
create_file('regexp.rb', ['# encoding: utf-8',
'x = 0',
'puts x'
])
create_file('exclude_glob.rb', ['#!/usr/bin/env ruby',
'# encoding: utf-8',
'x = 0',
'puts x'
])
create_file('.rubocop.yml', ['AllCops:',
' Exclude:',
' - example.rb',
' - !ruby/regexp /regexp.rb$/',
' - "exclude_*"'
])
expect(cli.run(%w(--format simple))).to eq(0)
expect($stdout.string)
.to eq(['', '0 files inspected, no offenses detected',
''].join("\n"))
end
it 'only reads configuration in explicitly included hidden directories' do
create_file('.hidden/example.rb', ['# encoding: utf-8',
'x=0'])
# This file contains configuration for an unknown cop. This would cause a
# warning to be printed on stderr if the file was read. But it's in a
# hidden directory, so it's not read.
create_file('.hidden/.rubocop.yml', ['SymbolName:',
' Enabled: false'])
create_file('.other/example.rb', ['# encoding: utf-8',
'x=0'])
# The .other directory is explicitly included, so the configuration file
# is read, and modifies the behavior.
create_file('.other/.rubocop.yml', ['Style/SpaceAroundOperators:',
' Enabled: false'])
create_file('.rubocop.yml', ['AllCops:',
' Include:',
' - .other/**/*'])
expect(cli.run(%w(--format simple))).to eq(1)
expect($stderr.string).to eq('')
expect($stdout.string)
.to eq(['== .other/example.rb ==',
'W: 2: 1: Useless assignment to variable - x.',
'',
'1 file inspected, 1 offense detected',
''].join("\n"))
end
it 'does not consider Include parameters in subdirectories' do
create_file('dir/example.ruby', ['# encoding: utf-8',
'x=0'])
create_file('dir/.rubocop.yml', ['AllCops:',
' Include:',
' - "*.ruby"'])
expect(cli.run(%w(--format simple))).to eq(0)
expect($stderr.string).to eq('')
expect($stdout.string)
.to eq(['',
'0 files inspected, no offenses detected',
''].join("\n"))
end
it 'matches included/excluded files correctly when . argument is given' do
create_file('example.rb', 'x = 0')
create_file('special.dsl', ['# encoding: utf-8',
'setup { "stuff" }'
])
create_file('.rubocop.yml', ['AllCops:',
' Include:',
' - "*.dsl"',
' Exclude:',
' - example.rb'
])
expect(cli.run(%w(--format simple .))).to eq(1)
expect($stdout.string)
.to eq(['== special.dsl ==',
"C: 2: 9: Prefer single-quoted strings when you don't " \
'need string interpolation or special symbols.',
'',
'1 file inspected, 1 offense detected',
''].join("\n"))
end
# With rubinius 2.0.0.rc1 + rspec 2.13.1,
# File.stub(:open).and_call_original causes SystemStackError.
it 'does not read files in excluded list', broken: :rbx do
%w(rb.rb non-rb.ext without-ext).each do |filename|
create_file("example/ignored/#{filename}", ['# encoding: utf-8',
'#' * 90
])
end
create_file('example/.rubocop.yml', ['AllCops:',
' Exclude:',
' - ignored/**'])
expect(File).not_to receive(:open).with(%r{/ignored/})
allow(File).to receive(:open).and_call_original
expect(cli.run(%w(--format simple example))).to eq(0)
expect($stdout.string)
.to eq(['', '0 files inspected, no offenses detected',
''].join("\n"))
end
it 'can be configured with option to disable a certain error' do
create_file('example1.rb', 'puts 0 ')
create_file('rubocop.yml', ['Style/Encoding:',
' Enabled: false',
'',
'Style/CaseIndentation:',
' Enabled: false'])
expect(cli.run(['--format', 'simple',
'-c', 'rubocop.yml', 'example1.rb'])).to eq(1)
expect($stdout.string)
.to eq(['== example1.rb ==',
'C: 1: 7: Trailing whitespace detected.',
'',
'1 file inspected, 1 offense detected',
''].join("\n"))
end
context 'without using namespace' do
it 'can be configured with option to disable a certain error' do
create_file('example1.rb', 'puts 0 ')
create_file('rubocop.yml', ['Encoding:',
' Enabled: false',
'',
'CaseIndentation:',
' Enabled: false'])
expect(cli.run(['--format', 'simple',
'-c', 'rubocop.yml', 'example1.rb'])).to eq(1)
expect($stdout.string)
.to eq(['== example1.rb ==',
'C: 1: 7: Trailing whitespace detected.',
'',
'1 file inspected, 1 offense detected',
''].join("\n"))
end
end
it 'can disable parser-derived offenses with warning severity' do
# `-' interpreted as argument prefix
create_file('example.rb', 'puts -1')
create_file('.rubocop.yml', ['Style/Encoding:',
' Enabled: false',
'',
'Lint/AmbiguousOperator:',
' Enabled: false'
])
expect(cli.run(['--format', 'emacs', 'example.rb'])).to eq(0)
end
it 'cannot disable Syntax offenses with fatal/error severity' do
create_file('example.rb', 'class Test')
create_file('.rubocop.yml', ['Style/Encoding:',
' Enabled: false',
'',
'Syntax:',
' Enabled: false'
])
expect(cli.run(['--format', 'emacs', 'example.rb'])).to eq(1)
expect($stdout.string).to include('unexpected token $end')
end
it 'can be configured to merge a parameter that is a hash' do
create_file('example1.rb',
['# encoding: utf-8',
'puts %w(a b c)',
'puts %q|hi|'])
# We want to change the preferred delimiters for word arrays. The other
# settings from default.yml are unchanged.
create_file('rubocop.yml',
['Style/PercentLiteralDelimiters:',
' PreferredDelimiters:',
" '%w': '[]'",
" '%W': '[]'"])
cli.run(['--format', 'simple', '-c', 'rubocop.yml', 'example1.rb'])
expect($stdout.string)
.to eq(['== example1.rb ==',
'C: 2: 6: %w-literals should be delimited by [ and ]',
'C: 3: 6: %q-literals should be delimited by ( and )',
'C: 3: 6: Use %q only for strings that contain both single ' \
'quotes and double quotes.',
'',
'1 file inspected, 3 offenses detected',
''].join("\n"))
end
it 'can be configured to override a parameter that is a hash in a ' \
'special case' do
create_file('example1.rb',
['# encoding: utf-8',
'arr.select { |e| e > 0 }.collect { |e| e * 2 }',
'a2.find_all { |e| e > 0 }'])
# We prefer find_all over select. This setting overrides the default
# select over find_all. Other preferred methods appearing in the default
# config (e.g., map over collect) are kept.
create_file('rubocop.yml',
['Style/CollectionMethods:',
' PreferredMethods:',
' select: find_all'])
cli.run(['--format',
'simple',
'-c',
'rubocop.yml',
'--only',
'CollectionMethods',
'example1.rb'])
expect($stdout.string)
.to eq(['== example1.rb ==',
'C: 2: 5: Prefer find_all over select.',
'C: 2: 26: Prefer map over collect.',
'',
'1 file inspected, 2 offenses detected',
''].join("\n"))
end
it 'works when a cop that others depend on is disabled' do
create_file('example1.rb', ['if a',
' b',
'end'])
create_file('rubocop.yml', ['Style/Encoding:',
' Enabled: false',
'',
'Metrics/LineLength:',
' Enabled: false'
])
result = cli.run(['--format', 'simple',
'-c', 'rubocop.yml', 'example1.rb'])
expect($stdout.string)
.to eq(['== example1.rb ==',
'C: 1: 1: Favor modifier if usage when having ' \
'a single-line body. Another good alternative is the ' \
'usage of control flow &&/||.',
'',
'1 file inspected, 1 offense detected',
''].join("\n"))
expect(result).to eq(1)
end
it 'can be configured with project config to disable a certain error' do
create_file('example_src/example1.rb', 'puts 0 ')
create_file('example_src/.rubocop.yml', ['Style/Encoding:',
' Enabled: false',
'',
'Style/CaseIndentation:',
' Enabled: false'
])
expect(cli.run(['--format', 'simple',
'example_src/example1.rb'])).to eq(1)
expect($stdout.string)
.to eq(['== example_src/example1.rb ==',
'C: 1: 7: Trailing whitespace detected.',
'',
'1 file inspected, 1 offense detected',
''].join("\n"))
end
it 'can use an alternative max line length from a config file' do
create_file('example_src/example1.rb', ['# encoding: utf-8',
'#' * 90
])
create_file('example_src/.rubocop.yml', ['Metrics/LineLength:',
' Enabled: true',
' Max: 100'
])
expect(cli.run(['--format', 'simple',
'example_src/example1.rb'])).to eq(0)
expect($stdout.string)
.to eq(['', '1 file inspected, no offenses detected', ''].join("\n"))
end
it 'can have different config files in different directories' do
%w(src lib).each do |dir|
create_file("example/#{dir}/example1.rb", ['# encoding: utf-8',
'#' * 90
])
end
create_file('example/src/.rubocop.yml', ['Metrics/LineLength:',
' Enabled: true',
' Max: 100'
])
expect(cli.run(%w(--format simple example))).to eq(1)
expect($stdout.string).to eq(
['== example/lib/example1.rb ==',
'C: 2: 81: Line is too long. [90/80]',
'',
'2 files inspected, 1 offense detected',
''].join("\n"))
end
it 'prefers a config file in ancestor directory to another in home' do
create_file('example_src/example1.rb', ['# encoding: utf-8',
'#' * 90
])
create_file('example_src/.rubocop.yml', ['Metrics/LineLength:',
' Enabled: true',
' Max: 100'
])
create_file("#{Dir.home}/.rubocop.yml", ['Metrics/LineLength:',
' Enabled: true',
' Max: 80'
])
expect(cli.run(['--format', 'simple',
'example_src/example1.rb'])).to eq(0)
expect($stdout.string)
.to eq(['', '1 file inspected, no offenses detected', ''].join("\n"))
end
it 'can exclude directories relative to .rubocop.yml' do
%w(src etc/test etc/spec tmp/test tmp/spec).each do |dir|
create_file("example/#{dir}/example1.rb", ['# encoding: utf-8',
'#' * 90])
end
# Hidden subdirectories should also be excluded.
create_file('example/etc/.dot/example1.rb', ['# encoding: utf-8',
'#' * 90])
create_file('example/.rubocop.yml', ['AllCops:',
' Exclude:',
' - src/**',
' - etc/**/*',
' - tmp/spec/**'])
expect(cli.run(%w(--format simple example))).to eq(1)
expect($stderr.string).to eq('')
expect($stdout.string).to eq(['== example/tmp/test/example1.rb ==',
'C: 2: 81: Line is too long. [90/80]',
'',
'1 file inspected, 1 offense detected',
''].join("\n"))
end
it 'can exclude a typical vendor directory' do
create_file('vendor/bundle/ruby/1.9.1/gems/parser-2.0.0/.rubocop.yml',
['AllCops:',
' Exclude:',
' - lib/parser/lexer.rb'])
create_file('vendor/bundle/ruby/1.9.1/gems/parser-2.0.0/lib/ex.rb',
['# encoding: utf-8',
'#' * 90])
create_file('.rubocop.yml',
['AllCops:',
' Exclude:',
' - vendor/**/*'])
cli.run(%w(--format simple))
expect($stdout.string)
.to eq(['', '0 files inspected, no offenses detected',
''].join("\n"))
end
it 'excludes the vendor directory by default' do
create_file('vendor/ex.rb',
['# encoding: utf-8',
'#' * 90])
cli.run(%w(--format simple))
expect($stdout.string)
.to eq(['', '0 files inspected, no offenses detected',
''].join("\n"))
end
# Being immune to bad configuration files in excluded directories has
# become important due to a bug in rubygems
# (https://github.com/rubygems/rubygems/issues/680) that makes
# installations of, for example, rubocop lack their .rubocop.yml in the
# root directory.
it 'can exclude a vendor directory with an erroneous config file' do
create_file('vendor/bundle/ruby/1.9.1/gems/parser-2.0.0/.rubocop.yml',
['inherit_from: non_existent.yml'])
create_file('vendor/bundle/ruby/1.9.1/gems/parser-2.0.0/lib/ex.rb',
['# encoding: utf-8',
'#' * 90])
create_file('.rubocop.yml',
['AllCops:',
' Exclude:',
' - vendor/**/*'])
cli.run(%w(--format simple))
expect($stderr.string).to eq('')
expect($stdout.string)
.to eq(['', '0 files inspected, no offenses detected',
''].join("\n"))
end
# Relative exclude paths in .rubocop.yml files are relative to that file,
# but in configuration files with other names they will be relative to
# whatever file inherits from them.
it 'can exclude a vendor directory indirectly' do
create_file('vendor/bundle/ruby/1.9.1/gems/parser-2.0.0/.rubocop.yml',
['AllCops:',
' Exclude:',
' - lib/parser/lexer.rb'])
create_file('vendor/bundle/ruby/1.9.1/gems/parser-2.0.0/lib/ex.rb',
['# encoding: utf-8',
'#' * 90])
create_file('.rubocop.yml',
['inherit_from: config/default.yml'])
create_file('config/default.yml',
['AllCops:',
' Exclude:',
' - vendor/**/*'])
cli.run(%w(--format simple))
expect($stdout.string)
.to eq(['', '0 files inspected, no offenses detected',
''].join("\n"))
end
it 'prints a warning for an unrecognized cop name in .rubocop.yml' do
create_file('example/example1.rb', ['# encoding: utf-8',
'#' * 90])
create_file('example/.rubocop.yml', ['Style/LyneLenth:',
' Enabled: true',
' Max: 100'])
expect(cli.run(%w(--format simple example))).to eq(1)
expect($stderr.string)
.to eq(['Warning: unrecognized cop Style/LyneLenth found in ' +
abs('example/.rubocop.yml'),
''].join("\n"))
end
it 'prints a warning for an unrecognized configuration parameter' do
create_file('example/example1.rb', ['# encoding: utf-8',
'#' * 90])
create_file('example/.rubocop.yml', ['Metrics/LineLength:',
' Enabled: true',
' Min: 10'])
expect(cli.run(%w(--format simple example))).to eq(1)
expect($stderr.string)
.to eq(['Warning: unrecognized parameter Metrics/LineLength:Min ' \
'found in ' + abs('example/.rubocop.yml'),
''].join("\n"))
end
it 'works when a configuration file passed by -c specifies Exclude ' \
'with regexp' do
create_file('example/example1.rb', ['# encoding: utf-8',
'#' * 90])
create_file('rubocop.yml', ['AllCops:',
' Exclude:',
' - !ruby/regexp /example1\.rb$/'])
cli.run(%w(--format simple -c rubocop.yml))
expect($stdout.string)
.to eq(['', '0 files inspected, no offenses detected',
''].join("\n"))
end
it 'works when a configuration file passed by -c specifies Exclude ' \
'with strings' do
create_file('example/example1.rb', ['# encoding: utf-8',
'#' * 90])
create_file('rubocop.yml', ['AllCops:',
' Exclude:',
' - example/**'])
cli.run(%w(--format simple -c rubocop.yml))
expect($stdout.string)
.to eq(['', '0 files inspected, no offenses detected',
''].join("\n"))
end
it 'works when a configuration file specifies a Severity' do
create_file('example/example1.rb', ['# encoding: utf-8',
'#' * 90])
create_file('rubocop.yml', ['Metrics/LineLength:',
' Severity: error'])
cli.run(%w(--format simple -c rubocop.yml))
expect($stdout.string)
.to eq(['== example/example1.rb ==',
'E: 2: 81: Line is too long. [90/80]',
'',
'1 file inspected, 1 offense detected',
''].join("\n"))
expect($stderr.string).to eq('')
end
it 'fails when a configuration file specifies an invalid Severity' do
create_file('example/example1.rb', ['# encoding: utf-8',
'#' * 90])
create_file('rubocop.yml', ['Metrics/LineLength:',
' Severity: superbad'])
cli.run(%w(--format simple -c rubocop.yml))
expect($stderr.string)
.to eq(["Warning: Invalid severity 'superbad'. " \
'Valid severities are refactor, convention, ' \
'warning, error, fatal.',
''].join("\n"))
end
context 'when a file inherits from the old auto generated file' do
before do
create_file('rubocop-todo.yml', '')
create_file('.rubocop.yml', ['inherit_from: rubocop-todo.yml'])
end
it 'prints no warning when --auto-gen-config is not set' do
expect { cli.run(%w(-c .rubocop.yml)) }.not_to exit_with_code(1)
end
it 'prints a warning when --auto-gen-config is set' do
expect { cli.run(%w(-c .rubocop.yml --auto-gen-config)) }
.to exit_with_code(1)
expect($stderr.string)
.to eq(['Attention: rubocop-todo.yml has been renamed to ' \
'.rubocop_todo.yml',
''].join("\n"))
end
end
context 'when a file inherits from a higher level' do
before do
create_file('.rubocop.yml', ['Metrics/LineLength:',
' Exclude:',
' - dir/example.rb'])
create_file('dir/.rubocop.yml', 'inherit_from: ../.rubocop.yml')
create_file('dir/example.rb', '#' * 90)
end
it 'inherits relative excludes correctly' do
expect(cli.run([])).to eq(0)
end
end
end
end
Don't wrap .run arguments in two arrays
It accidentally works because Options#convert_deprecated_options calls
flatten! on the array.
# encoding: utf-8
require 'fileutils'
require 'tmpdir'
require 'spec_helper'
require 'timeout'
describe RuboCop::CLI, :isolated_environment do
include FileHelper
subject(:cli) { described_class.new }
before(:each) do
$stdout = StringIO.new
$stderr = StringIO.new
RuboCop::ConfigLoader.debug = false
end
after(:each) do
$stdout = STDOUT
$stderr = STDERR
end
def abs(path)
File.expand_path(path)
end
describe 'option' do
describe '--version' do
it 'exits cleanly' do
expect { cli.run ['-v'] }.to exit_with_code(0)
expect { cli.run ['--version'] }.to exit_with_code(0)
expect($stdout.string).to eq((RuboCop::Version::STRING + "\n") * 2)
end
end
describe '--auto-correct' do
it 'corrects SymbolProc and SpaceBeforeBlockBraces offenses' do
source = ['foo.map{ |a| a.nil? }']
create_file('example.rb', source)
expect(cli.run(['-D', '--auto-correct'])).to eq(0)
corrected = "foo.map(&:nil?)\n"
expect(IO.read('example.rb')).to eq(corrected)
uncorrected = $stdout.string.split($RS).select do |line|
line.include?('example.rb:') && !line.include?('[Corrected]')
end
expect(uncorrected).to be_empty # Hence exit code 0.
end
it 'corrects only IndentationWidth without crashing' do
source = ['foo = if bar',
' something',
'elsif baz',
' other_thing',
'else',
' fail',
'end']
create_file('example.rb', source)
expect(cli.run(%w(--only IndentationWidth --auto-correct))).to eq(0)
corrected = ['foo = if bar',
' something',
'elsif baz',
' other_thing',
'else',
' fail',
'end',
''].join("\n")
expect(IO.read('example.rb')).to eq(corrected)
end
it 'crashes on infinite loop but prints offenses' do
create_file('example.rb', '3.times{ something;other_thing;}')
# This configuration makes --auto-correct impossible to finish since a
# space will be added after each ; but then removed again for the one
# that's inside }.
create_file('.rubocop.yml', ['SpaceInsideBlockBraces:',
' EnforcedStyle: no_space',
' SpaceBeforeBlockParameters: false'])
cmd = %w(--only SpaceAfterSemicolon,SpaceInsideBlockBraces
--auto-correct --format simple)
expect { cli.run(cmd) }.to raise_error(RuboCop::Runner::
InfiniteCorrectionLoop)
expect(IO.read('example.rb'))
.to eq("3.times{something; other_thing;}\n")
expected_output = [
'== example.rb ==',
'C: 1: 9: [Corrected] Space inside { detected.',
'C: 1: 19: [Corrected] Space missing after semicolon.',
'C: 1: 31: [Corrected] Space missing after semicolon.',
'C: 1: 32: [Corrected] Space inside } detected.',
'C: 1: 33: [Corrected] Space inside } detected.',
'',
# We're interrupted during inspection, hence 0 files inspected.
'0 files inspected, 5 offenses detected, 5 offenses corrected',
''
]
expect($stdout.string).to eq(expected_output.join("\n"))
end
it 'corrects complicated cases conservatively' do
# Two cops make corrections here; Style/BracesAroundHashParameters, and
# Style/AlignHash. Because they make minimal corrections relating only
# to their specific areas, and stay away from cleaning up extra
# whitespace in the process, the combined changes don't interfere with
# each other and the result is semantically the same as the starting
# point.
source = ['# encoding: utf-8',
'expect(subject[:address]).to eq({',
" street1: '1 Market',",
" street2: '#200',",
" city: 'Some Town',",
" state: 'CA',",
" postal_code: '99999-1111'",
'})']
create_file('example.rb', source)
expect(cli.run(['-D', '--auto-correct'])).to eq(0)
corrected =
['# encoding: utf-8',
'expect(subject[:address]).to eq(',
" street1: '1 Market',",
" street2: '#200',",
" city: 'Some Town',",
" state: 'CA',",
" postal_code: '99999-1111'",
')']
expect(IO.read('example.rb')).to eq(corrected.join("\n") + "\n")
end
it 'honors Exclude settings in individual cops' do
source = ['# encoding: utf-8',
'puts %x(ls)']
create_file('example.rb', source)
create_file('.rubocop.yml', ['Style/UnneededPercentX:',
' Exclude:',
' - example.rb'])
expect(cli.run(['--auto-correct'])).to eq(0)
expect($stdout.string).to include('no offenses detected')
expect(IO.read('example.rb')).to eq(source.join("\n") + "\n")
end
it 'corrects code with indentation problems' do
create_file('example.rb', ['# encoding: utf-8',
'module Bar',
'class Goo',
' def something',
' first call',
" do_other 'things'",
' if other > 34',
' more_work',
' end',
' end',
'end',
'end',
'',
'module Foo',
'class Bar',
'',
' stuff = [',
' {',
" some: 'hash',",
' },',
' {',
" another: 'hash',",
" with: 'more'",
' },',
' ]',
'end',
'end'
])
expect(cli.run(['--auto-correct'])).to eq(1)
expect(IO.read('example.rb'))
.to eq(['# encoding: utf-8',
'module Bar',
' class Goo',
' def something',
' first call',
" do_other 'things'",
' if other > 34',
' more_work',
' end',
' end',
' end',
'end',
'',
'module Foo',
' class Bar',
' stuff = [',
' {',
" some: 'hash'",
' },',
' {',
" another: 'hash',",
" with: 'more'",
' }',
' ]',
' end',
'end',
''].join("\n"))
end
it 'can change block comments and indent them' do
create_file('example.rb', ['# encoding: utf-8',
'module Foo',
'class Bar',
'=begin',
'This is a nice long',
'comment',
'which spans a few lines',
'=end',
' def baz',
' do_something',
' end',
'end',
'end'])
expect(cli.run(['--auto-correct'])).to eq(1)
expect(IO.read('example.rb'))
.to eq(['# encoding: utf-8',
'module Foo',
' class Bar',
' # This is a nice long',
' # comment',
' # which spans a few lines',
' def baz',
' do_something',
' end',
' end',
'end',
''].join("\n"))
end
it 'can correct two problems with blocks' do
# {} should be do..end and space is missing.
create_file('example.rb', ['# encoding: utf-8',
'(1..10).each{ |i|',
' puts i',
'}'])
expect(cli.run(['--auto-correct'])).to eq(0)
expect(IO.read('example.rb'))
.to eq(['# encoding: utf-8',
'(1..10).each do |i|',
' puts i',
'end',
''].join("\n"))
end
it 'can handle spaces when removing braces' do
create_file('example.rb',
['# encoding: utf-8',
"assert_post_status_code 400, 's', {:type => 'bad'}"])
expect(cli.run(%w(--auto-correct --format emacs))).to eq(0)
expect(IO.read('example.rb'))
.to eq(['# encoding: utf-8',
"assert_post_status_code 400, 's', type: 'bad'",
''].join("\n"))
e = abs('example.rb')
expect($stdout.string)
.to eq(["#{e}:2:35: C: [Corrected] Redundant curly braces around " \
'a hash parameter.',
"#{e}:2:35: C: [Corrected] Use the new Ruby 1.9 hash " \
'syntax.',
# TODO: Don't report that a problem is corrected when it
# actually went away due to another correction.
"#{e}:2:35: C: [Corrected] Space inside { missing.",
# TODO: Don't report duplicates (HashSyntax in this case).
"#{e}:2:36: C: [Corrected] Use the new Ruby 1.9 hash " \
'syntax.',
"#{e}:2:50: C: [Corrected] Space inside } missing.",
''].join("\n"))
end
# A case where two cops, EmptyLinesAroundBody and EmptyLines, try to
# remove the same line in autocorrect.
it 'can correct two empty lines at end of class body' do
create_file('example.rb', ['class Test',
' def f',
' end',
'',
'',
'end'])
expect(cli.run(['--auto-correct'])).to eq(1)
expect($stderr.string).to eq('')
expect(IO.read('example.rb')).to eq(['class Test',
' def f',
' end',
'end',
''].join("\n"))
end
# A case where WordArray's correction can be clobbered by
# AccessModifierIndentation's correction.
it 'can correct indentation and another thing' do
create_file('example.rb', ['# encoding: utf-8',
'class Dsl',
'private',
' A = ["git", "path",]',
'end'])
expect(cli.run(%w(--auto-correct --format emacs))).to eq(1)
expect(IO.read('example.rb')).to eq(['# encoding: utf-8',
'class Dsl',
' private',
'',
' A = %w(git path)',
'end',
''].join("\n"))
e = abs('example.rb')
expect($stdout.string)
.to eq(["#{e}:2:1: C: Missing top-level class documentation " \
'comment.',
"#{e}:3:1: C: [Corrected] Indent access modifiers like " \
'`private`.',
"#{e}:3:1: C: [Corrected] Keep a blank line before and " \
'after `private`.',
"#{e}:3:3: W: Useless `private` access modifier.",
"#{e}:3:3: C: [Corrected] Keep a blank line before and " \
'after `private`.',
"#{e}:4:7: C: [Corrected] Use `%w` or `%W` " \
'for array of words.',
"#{e}:4:8: C: [Corrected] Prefer single-quoted strings " \
"when you don't need string interpolation or special " \
'symbols.',
"#{e}:4:15: C: [Corrected] Prefer single-quoted strings " \
"when you don't need string interpolation or special " \
'symbols.',
"#{e}:4:21: C: [Corrected] Avoid comma after the last item " \
'of an array.',
"#{e}:5:7: C: [Corrected] Use `%w` or `%W` " \
'for array of words.',
"#{e}:5:8: C: [Corrected] Prefer single-quoted strings " \
"when you don't need string interpolation or special " \
'symbols.',
"#{e}:5:15: C: [Corrected] Prefer single-quoted strings " \
"when you don't need string interpolation or special " \
'symbols.',
"#{e}:5:21: C: [Corrected] Avoid comma after the last item " \
'of an array.',
''].join("\n"))
end
# A case where the same cop could try to correct an offense twice in one
# place.
it 'can correct empty line inside special form of nested modules' do
create_file('example.rb', ['module A module B',
'',
'end end'])
expect(cli.run(['--auto-correct'])).to eq(1)
expect(IO.read('example.rb')).to eq(['module A module B',
'end end',
''].join("\n"))
uncorrected = $stdout.string.split($RS).select do |line|
line.include?('example.rb:') && !line.include?('[Corrected]')
end
expect(uncorrected).not_to be_empty # Hence exit code 1.
end
it 'can correct single line methods' do
create_file('example.rb', ['# encoding: utf-8',
'def func1; do_something end # comment',
'def func2() do_1; do_2; end'])
expect(cli.run(%w(--auto-correct --format offenses))).to eq(0)
expect(IO.read('example.rb')).to eq(['# encoding: utf-8',
'# comment',
'def func1',
' do_something',
'end',
'',
'def func2',
' do_1',
' do_2',
'end',
''].join("\n"))
expect($stdout.string).to eq(['',
'10 Style/TrailingWhitespace',
'5 Style/Semicolon',
'3 Style/SingleLineMethods',
'1 Style/DefWithParentheses',
'1 Style/EmptyLineBetweenDefs',
'--',
'20 Total',
'',
''].join("\n"))
end
# In this example, the auto-correction (changing "raise" to "fail")
# creates a new problem (alignment of parameters), which is also
# corrected automatically.
it 'can correct a problems and the problem it creates' do
create_file('example.rb',
['# encoding: utf-8',
'raise NotImplementedError,',
" 'Method should be overridden in child classes'"])
expect(cli.run(['--auto-correct'])).to eq(0)
expect(IO.read('example.rb'))
.to eq(['# encoding: utf-8',
'fail NotImplementedError,',
" 'Method should be overridden in child classes'",
''].join("\n"))
expect($stdout.string)
.to eq(['Inspecting 1 file',
'C',
'',
'Offenses:',
'',
'example.rb:2:1: C: [Corrected] Use fail instead of ' \
'raise to signal exceptions.',
'raise NotImplementedError,',
'^^^^^',
'example.rb:3:7: C: [Corrected] Align the parameters of a ' \
'method call if they span more than one line.',
" 'Method should be overridden in child classes'",
' ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^',
'',
'1 file inspected, 2 offenses detected, 2 offenses ' \
'corrected',
''].join("\n"))
end
# Thanks to repeated auto-correction, we can get rid of the trailing
# spaces, and then the extra empty line.
it 'can correct two problems in the same place' do
create_file('example.rb',
['# encoding: utf-8',
'# Example class.',
'class Klass',
' ',
' def f',
' end',
'end'])
expect(cli.run(['--auto-correct'])).to eq(0)
expect(IO.read('example.rb'))
.to eq(['# encoding: utf-8',
'# Example class.',
'class Klass',
' def f',
' end',
'end',
''].join("\n"))
expect($stderr.string).to eq('')
expect($stdout.string)
.to eq(['Inspecting 1 file',
'C',
'',
'Offenses:',
'',
'example.rb:4:1: C: [Corrected] Extra empty line detected ' \
'at class body beginning.',
'example.rb:4:1: C: [Corrected] Trailing whitespace ' \
'detected.',
'',
'1 file inspected, 2 offenses detected, 2 offenses ' \
'corrected',
''].join("\n"))
end
it 'can correct MethodDefParentheses and other offense' do
create_file('example.rb',
['# encoding: utf-8',
'def primes limit',
' 1.upto(limit).select { |i| i.even? }',
'end'])
expect(cli.run(%w(-D --auto-correct))).to eq(0)
expect($stderr.string).to eq('')
expect(IO.read('example.rb'))
.to eq(['# encoding: utf-8',
'def primes(limit)',
' 1.upto(limit).select(&:even?)',
'end',
''].join("\n"))
expect($stdout.string)
.to eq(['Inspecting 1 file',
'C',
'',
'Offenses:',
'',
'example.rb:2:12: C: [Corrected] ' \
'Style/MethodDefParentheses: ' \
'Use def with parentheses when there are parameters.',
'def primes limit',
' ^^^^^',
'example.rb:3:3: C: [Corrected] Style/SymbolProc: ' \
'Pass &:even? as an argument to select instead of a block.',
' 1.upto(limit).select { |i| i.even? }',
' ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^',
'',
'1 file inspected, 2 offenses detected, 2 offenses ' \
'corrected',
''].join("\n"))
end
it 'can correct WordArray and SpaceAfterComma offenses' do
create_file('example.rb',
['# encoding: utf-8',
"f(type: ['offline','offline_payment'],",
" bar_colors: ['958c12','953579','ff5800','0085cc'])"])
expect(cli.run(%w(-D --auto-correct --format o))).to eq(0)
expect($stdout.string)
.to eq(['',
'4 Style/SpaceAfterComma',
'2 Style/WordArray',
'--',
'6 Total',
'',
''].join("\n"))
expect(IO.read('example.rb'))
.to eq(['# encoding: utf-8',
'f(type: %w(offline offline_payment),',
' bar_colors: %w(958c12 953579 ff5800 0085cc))',
''].join("\n"))
end
it 'can correct SpaceAfterComma and HashSyntax offenses' do
create_file('example.rb',
['# encoding: utf-8',
"I18n.t('description',:property_name => property.name)"])
expect(cli.run(%w(-D --auto-correct --format emacs))).to eq(0)
expect($stdout.string)
.to eq(["#{abs('example.rb')}:2:21: C: [Corrected] " \
'Style/SpaceAfterComma: Space missing after comma.',
"#{abs('example.rb')}:2:22: C: [Corrected] " \
'Style/HashSyntax: Use the new Ruby 1.9 hash syntax.',
''].join("\n"))
expect(IO.read('example.rb'))
.to eq(['# encoding: utf-8',
"I18n.t('description', property_name: property.name)",
''].join("\n"))
end
it 'can correct HashSyntax and SpaceAroundOperators offenses' do
create_file('example.rb',
['# encoding: utf-8',
'{ :b=>1 }'])
expect(cli.run(%w(-D --auto-correct --format emacs))).to eq(0)
expect(IO.read('example.rb')).to eq(['# encoding: utf-8',
'{ b: 1 }',
''].join("\n"))
expect($stdout.string)
.to eq(["#{abs('example.rb')}:2:3: C: [Corrected] " \
'Style/HashSyntax: Use the new Ruby 1.9 hash syntax.',
"#{abs('example.rb')}:2:5: C: [Corrected] " \
'Style/SpaceAroundOperators: Surrounding space missing for ' \
"operator '=>'.",
''].join("\n"))
end
it 'can correct HashSyntax when --only is used' do
create_file('example.rb',
['# encoding: utf-8',
'{ :b=>1 }'])
expect(cli.run(%w(--auto-correct -f emacs
--only Style/HashSyntax))).to eq(0)
expect($stderr.string).to eq('')
expect(IO.read('example.rb')).to eq(['# encoding: utf-8',
'{ b: 1 }',
''].join("\n"))
expect($stdout.string)
.to eq(["#{abs('example.rb')}:2:3: C: [Corrected] Use the new " \
'Ruby 1.9 hash syntax.',
''].join("\n"))
end
it 'can correct TrailingBlankLines and TrailingWhitespace offenses' do
create_file('example.rb',
['# encoding: utf-8',
'',
' ',
'',
''])
expect(cli.run(%w(--auto-correct --format emacs))).to eq(0)
expect(IO.read('example.rb')).to eq(['# encoding: utf-8',
''].join("\n"))
expect($stdout.string)
.to eq(["#{abs('example.rb')}:2:1: C: [Corrected] 3 trailing " \
'blank lines detected.',
"#{abs('example.rb')}:3:1: C: [Corrected] Trailing " \
'whitespace detected.',
''].join("\n"))
end
it 'can correct MethodCallParentheses and EmptyLiteral offenses' do
create_file('example.rb',
['# encoding: utf-8',
'Hash.new()'])
expect(cli.run(%w(--auto-correct --format emacs))).to eq(0)
expect($stderr.string).to eq('')
expect(IO.read('example.rb')).to eq(['# encoding: utf-8',
'{}',
''].join("\n"))
expect($stdout.string)
.to eq(["#{abs('example.rb')}:2:1: C: [Corrected] Use hash " \
'literal `{}` instead of `Hash.new`.',
"#{abs('example.rb')}:2:9: C: [Corrected] Do not use " \
'parentheses for method calls with no arguments.',
''].join("\n"))
end
it 'can correct IndentHash offenses with separator style' do
create_file('example.rb',
['# encoding: utf-8',
'CONVERSION_CORRESPONDENCE = {',
' match_for_should: :match,',
' match_for_should_not: :match_when_negated,',
' failure_message_for_should: :failure_message,',
'failure_message_for_should_not: :failure_message_when',
'}'])
create_file('.rubocop.yml',
['Style/AlignHash:',
' EnforcedColonStyle: separator'])
expect(cli.run(%w(--auto-correct))).to eq(0)
expect(IO.read('example.rb'))
.to eq(['# encoding: utf-8',
'CONVERSION_CORRESPONDENCE = {',
' match_for_should: :match,',
' match_for_should_not: :match_when_negated,',
' failure_message_for_should: :failure_message,',
' failure_message_for_should_not: :failure_message_when',
'}',
''].join("\n"))
end
it 'does not say [Corrected] if correction was avoided' do
create_file('example.rb', ['# encoding: utf-8',
'a = c and b',
'not a && b',
'func a do b end'])
expect(cli.run(%w(-a -f simple))).to eq(1)
expect($stderr.string).to eq('')
expect(IO.read('example.rb')).to eq(['# encoding: utf-8',
'a = c and b',
'not a && b',
'func a do b end',
''].join("\n"))
expect($stdout.string)
.to eq(['== example.rb ==',
'C: 2: 7: Use && instead of and.',
'C: 3: 1: Use ! instead of not.',
'C: 4: 8: Prefer {...} over do...end for single-line ' \
'blocks.',
'',
'1 file inspected, 3 offenses detected',
''].join("\n"))
end
it 'does not hang SpaceAfterPunctuation and SpaceInsideParens' do
create_file('example.rb',
['# encoding: utf-8',
'some_method(a, )'])
Timeout.timeout(10) do
expect(cli.run(%w(--auto-correct))).to eq(0)
end
expect($stderr.string).to eq('')
expect(IO.read('example.rb')).to eq(['# encoding: utf-8',
'some_method(a)',
''].join("\n"))
end
it 'does not hang SpaceAfterPunctuation and SpaceInsideBrackets' do
create_file('example.rb',
['# encoding: utf-8',
'puts [1, ]'])
Timeout.timeout(10) do
expect(cli.run(%w(--auto-correct))).to eq(0)
end
expect($stderr.string).to eq('')
expect(IO.read('example.rb')).to eq(['# encoding: utf-8',
'puts [1]',
''].join("\n"))
end
end
describe '--auto-gen-config' do
before(:each) do
RuboCop::Formatter::DisabledConfigFormatter
.config_to_allow_offenses = {}
end
it 'overwrites an existing todo file' do
create_file('example1.rb', ['# encoding: utf-8',
'x= 0 ',
'#' * 85,
'y ',
'puts x'])
create_file('.rubocop_todo.yml', ['Metrics/LineLength:',
' Enabled: false'])
create_file('.rubocop.yml', ['inherit_from: .rubocop_todo.yml'])
expect(cli.run(['--auto-gen-config'])).to eq(1)
expect(IO.readlines('.rubocop_todo.yml')[7..-1].map(&:chomp))
.to eq(['# Offense count: 1',
'# Configuration parameters: AllowURI, URISchemes.',
'Metrics/LineLength:',
' Max: 85',
'',
'# Offense count: 1',
'# Cop supports --auto-correct.',
'Style/SpaceAroundOperators:',
' Enabled: false',
'',
'# Offense count: 2',
'# Cop supports --auto-correct.',
'Style/TrailingWhitespace:',
' Enabled: false'])
# Create new CLI instance to avoid using cached configuration.
new_cli = described_class.new
expect(new_cli.run(['example1.rb'])).to eq(0)
end
it 'exits with error if file arguments are given' do
create_file('example1.rb', ['# encoding: utf-8',
'x= 0 ',
'#' * 85,
'y ',
'puts x'])
expect { cli.run(['--auto-gen-config', 'example1.rb']) }
.to exit_with_code(1)
expect($stderr.string)
.to eq(['--auto-gen-config can not be combined with any other ' \
'arguments.',
''].join("\n"))
expect($stdout.string).to eq('')
end
it 'can generate a todo list' do
create_file('example1.rb', ['# encoding: utf-8',
'$x= 0 ',
'#' * 90,
'#' * 85,
'y ',
'puts x'])
create_file('example2.rb', ['# encoding: utf-8',
"\tx = 0",
'puts x'])
expect(cli.run(['--auto-gen-config'])).to eq(1)
expect($stderr.string).to eq('')
expect($stdout.string)
.to include(['Created .rubocop_todo.yml.',
'Run `rubocop --config .rubocop_todo.yml`, or',
'add inherit_from: .rubocop_todo.yml in a ' \
'.rubocop.yml file.',
''].join("\n"))
expected =
['# This configuration was generated by `rubocop --auto-gen-config`',
/# on .* using RuboCop version .*/,
'# The point is for the user to remove these configuration records',
'# one by one as the offenses are removed from the code base.',
'# Note that changes in the inspected code, or installation of new',
'# versions of RuboCop, may require this file to be generated ' \
'again.',
'',
'# Offense count: 2',
'# Configuration parameters: AllowURI, URISchemes.',
'Metrics/LineLength:',
' Max: 90',
'',
'# Offense count: 1',
'# Cop supports --auto-correct.',
'Style/CommentIndentation:',
' Enabled: false',
'',
'# Offense count: 1',
'# Configuration parameters: AllowedVariables.',
'Style/GlobalVars:',
' Enabled: false',
'',
'# Offense count: 1',
'# Cop supports --auto-correct.',
'Style/IndentationConsistency:',
' Enabled: false',
'',
'# Offense count: 1',
'# Cop supports --auto-correct.',
'Style/SpaceAroundOperators:',
' Enabled: false',
'',
'# Offense count: 1',
'# Cop supports --auto-correct.',
'Style/Tab:',
' Enabled: false',
'',
'# Offense count: 2',
'# Cop supports --auto-correct.',
'Style/TrailingWhitespace:',
' Enabled: false']
actual = IO.read('.rubocop_todo.yml').split($RS)
expected.each_with_index do |line, ix|
if line.is_a?(String)
expect(actual[ix]).to eq(line)
else
expect(actual[ix]).to match(line)
end
end
end
it 'does not generate configuration for the Syntax cop' do
create_file('example1.rb', ['# encoding: utf-8',
'x = < ', # Syntax error
'puts x'])
create_file('example2.rb', ['# encoding: utf-8',
"\tx = 0",
'puts x'])
expect(cli.run(['--auto-gen-config'])).to eq(1)
expect($stderr.string).to eq('')
expected =
['# This configuration was generated by `rubocop --auto-gen-config`',
/# on .* using RuboCop version .*/,
'# The point is for the user to remove these configuration records',
'# one by one as the offenses are removed from the code base.',
'# Note that changes in the inspected code, or installation of new',
'# versions of RuboCop, may require this file to be generated ' \
'again.',
'',
'# Offense count: 1',
'# Cop supports --auto-correct.',
'Style/CommentIndentation:',
' Enabled: false',
'',
'# Offense count: 1',
'# Cop supports --auto-correct.',
'Style/IndentationConsistency:',
' Enabled: false',
'',
'# Offense count: 1',
'# Cop supports --auto-correct.',
'Style/Tab:',
' Enabled: false']
actual = IO.read('.rubocop_todo.yml').split($RS)
expect(actual.length).to eq(expected.length)
expected.each_with_index do |line, ix|
if line.is_a?(String)
expect(actual[ix]).to eq(line)
else
expect(actual[ix]).to match(line)
end
end
end
it 'generates a todo list that removes the reports' do
RuboCop::Cop::Style::RegexpLiteral.slash_count = 0
create_file('example.rb', ['# encoding: utf-8',
'y.gsub!(%r{abc/xyz}, "#{x}")'])
expect(cli.run(%w(--format emacs))).to eq(1)
expect($stdout.string)
.to eq(["#{abs('example.rb')}:2:9: C: Use %r only for regular " \
"expressions matching more than 1 '/' character.",
''].join("\n"))
expect(cli.run(['--auto-gen-config'])).to eq(1)
expected =
['# This configuration was generated by `rubocop --auto-gen-config`',
/# on .* using RuboCop version .*/,
'# The point is for the user to remove these configuration records',
'# one by one as the offenses are removed from the code base.',
'# Note that changes in the inspected code, or installation of new',
'# versions of RuboCop, may require this file to be generated ' \
'again.',
'',
'# Offense count: 1',
'# Configuration parameters: MaxSlashes.',
'Style/RegexpLiteral:',
' Enabled: false']
actual = IO.read('.rubocop_todo.yml').split($RS)
expected.each_with_index do |line, ix|
if line.is_a?(String)
expect(actual[ix]).to eq(line)
else
expect(actual[ix]).to match(line)
end
end
$stdout = StringIO.new
result = cli.run(%w(--config .rubocop_todo.yml --format emacs))
expect($stdout.string).to eq('')
expect(result).to eq(0)
end
end
describe '--only' do
context 'when one cop is given' do
it 'runs just one cop' do
create_file('example.rb', ['if x== 0 ',
"\ty",
'end'])
# IfUnlessModifier depends on the configuration of LineLength.
expect(cli.run(['--format', 'simple',
'--only', 'Style/IfUnlessModifier',
'example.rb'])).to eq(1)
expect($stdout.string)
.to eq(['== example.rb ==',
'C: 1: 1: Favor modifier if usage when ' \
'having a single-line body. Another good alternative is ' \
'the usage of control flow &&/||.',
'',
'1 file inspected, 1 offense detected',
''].join("\n"))
end
it 'exits with error if an incorrect cop name is passed' do
create_file('example.rb', ['if x== 0 ',
"\ty",
'end'])
expect(cli.run(['--only', 'Style/123'])).to eq(1)
expect($stderr.string).to include('Unrecognized cop name: Style/123.')
end
it 'accepts cop names from plugins' do
create_file('.rubocop.yml', ['require: rubocop_ext',
'',
'Style/SomeCop:',
' Description: Something',
' Enabled: true'])
create_file('rubocop_ext.rb', ['module RuboCop',
' module Cop',
' module Style',
' class SomeCop < Cop',
' end',
' end',
' end',
'end'])
create_file('redirect.rb', '$stderr = STDOUT')
rubocop = "#{RuboCop::ConfigLoader::RUBOCOP_HOME}/bin/rubocop"
# Since we define a new cop class, we have to do this in a separate
# process. Otherwise, the extra cop will affect other specs.
output =
`ruby -I . #{rubocop} --require redirect.rb --only Style/SomeCop`
expect($CHILD_STATUS.success?).to be_truthy
# The warning about the unrecognized cop is expected. It's given due
# to the fact that we haven't supplied any default configuration for
# rubocop_ext in this example.
expect(output)
.to eq(['Warning: unrecognized cop Style/SomeCop found in ' \
"#{abs('.rubocop.yml')}",
'Inspecting 2 files',
'..',
'',
'2 files inspected, no offenses detected',
''].join("\n"))
end
context 'without using namespace' do
it 'runs just one cop' do
create_file('example.rb', ['if x== 0 ',
"\ty",
'end'])
expect(cli.run(['--format', 'simple',
'--display-cop-names',
'--only', 'IfUnlessModifier',
'example.rb'])).to eq(1)
expect($stdout.string)
.to eq(['== example.rb ==',
'C: 1: 1: Style/IfUnlessModifier: Favor modifier if ' \
'usage when having a single-line body. Another good ' \
'alternative is the usage of control flow &&/||.',
'',
'1 file inspected, 1 offense detected',
''].join("\n"))
end
end
it 'enables the given cop' do
create_file('example.rb',
['x = 0 ',
# Disabling comments still apply.
'# rubocop:disable Style/TrailingWhitespace',
'y = 1 '])
create_file('.rubocop.yml', ['Style/TrailingWhitespace:',
' Enabled: false'])
expect(cli.run(['--format', 'simple',
'--only', 'Style/TrailingWhitespace',
'example.rb'])).to eq(1)
expect($stderr.string).to eq('')
expect($stdout.string)
.to eq(['== example.rb ==',
'C: 1: 6: Trailing whitespace detected.',
'',
'1 file inspected, 1 offense detected',
''].join("\n"))
end
end
context 'when several cops are given' do
it 'runs the given cops' do
create_file('example.rb', ['if x== 100000000000000 ',
"\ty",
'end'])
expect(cli.run(['--format', 'simple',
'--only',
'Style/IfUnlessModifier,Style/Tab,' \
'Style/SpaceAroundOperators',
'example.rb'])).to eq(1)
expect($stderr.string).to eq('')
expect($stdout.string)
.to eq(['== example.rb ==',
'C: 1: 1: Favor modifier if usage when ' \
'having a single-line body. Another good alternative is ' \
'the usage of control flow &&/||.',
"C: 1: 5: Surrounding space missing for operator '=='.",
'C: 2: 1: Tab detected.',
'',
'1 file inspected, 3 offenses detected',
''].join("\n"))
end
context 'and --lint' do
it 'runs the given cops plus all enabled lint cops' do
create_file('example.rb', ['if x== 100000000000000 ',
"\ty = 3",
' end'])
create_file('.rubocop.yml', ['Lint/EndAlignment:',
' Enabled: false'])
expect(cli.run(['--format', 'simple',
'--only', 'Style/Tab,Style/SpaceAroundOperators',
'--lint',
'example.rb'])).to eq(1)
expect($stdout.string)
.to eq(['== example.rb ==',
'C: 1: 5: Surrounding space missing for operator ' \
"'=='.",
'C: 2: 1: Tab detected.',
'W: 2: 2: Useless assignment to variable - y.',
'',
'1 file inspected, 3 offenses detected',
''].join("\n"))
end
end
end
end
describe '--except' do
context 'when two cops are given' do
it 'runs all cops except the given' do
create_file('example.rb', ['if x== 0 ',
"\ty",
'end'])
expect(cli.run(['--format', 'offenses',
'--except', 'Style/IfUnlessModifier,Style/Tab',
'example.rb'])).to eq(1)
expect($stdout.string)
.to eq(['',
'1 Style/IndentationWidth',
'1 Style/SpaceAroundOperators',
'1 Style/TrailingWhitespace',
'--',
'3 Total',
'',
''].join("\n"))
end
it 'exits with error if an incorrect cop name is passed' do
create_file('example.rb', ['if x== 0 ',
"\ty",
'end'])
expect(cli.run(['--except', 'Style/123'])).to eq(1)
expect($stderr.string).to include('Unrecognized cop name: Style/123.')
end
context 'when one cop is given without namespace' do
it 'disables the given cop' do
create_file('example.rb', ['if x== 0 ',
"\ty",
'end'])
cli.run(['--format', 'offenses',
'--except', 'IfUnlessModifier',
'example.rb'])
with_option = $stdout.string
$stdout = StringIO.new
cli.run(['--format', 'offenses',
'example.rb'])
without_option = $stdout.string
expect(without_option.split($RS) - with_option.split($RS))
.to eq(['1 Style/IfUnlessModifier', '5 Total'])
end
end
end
context 'when several cops are given' do
it 'disables the given cops' do
create_file('example.rb', ['if x== 100000000000000 ',
"\ty",
'end'])
expect(cli.run(['--format', 'offenses',
'--except',
'Style/IfUnlessModifier,Style/Tab,' \
'Style/SpaceAroundOperators',
'example.rb'])).to eq(1)
expect($stderr.string).to eq('')
expect($stdout.string)
.to eq(['',
'1 Style/IndentationWidth',
'1 Style/NumericLiterals',
'1 Style/TrailingWhitespace',
'--',
'3 Total',
'',
''].join("\n"))
end
end
end
describe '--lint' do
it 'runs only lint cops' do
create_file('example.rb', ['if 0 ',
"\ty",
'end'])
# IfUnlessModifier depends on the configuration of LineLength.
expect(cli.run(['--format', 'simple', '--lint',
'example.rb'])).to eq(1)
expect($stdout.string)
.to eq(['== example.rb ==',
'W: 1: 4: Literal 0 appeared in a condition.',
'',
'1 file inspected, 1 offense detected',
''].join("\n"))
end
end
describe '-d/--debug' do
it 'shows config files' do
create_file('example1.rb', "\tputs 0")
expect(cli.run(['--debug', 'example1.rb'])).to eq(1)
home = File.dirname(File.dirname(File.dirname(__FILE__)))
expect($stdout.string.lines.grep(/configuration/).map(&:chomp))
.to eq(["For #{abs('')}:" \
" configuration from #{home}/config/default.yml",
"Inheriting configuration from #{home}/config/enabled.yml",
"Inheriting configuration from #{home}/config/disabled.yml"
])
end
it 'shows cop names' do
create_file('example1.rb', "\tputs 0")
expect(cli.run(['--format',
'emacs',
'--debug',
'example1.rb'])).to eq(1)
expect($stdout.string.lines.to_a[-1])
.to eq(["#{abs('example1.rb')}:1:1: C: Style/Tab: Tab detected.",
''].join("\n"))
end
end
describe '-D/--display-cop-names' do
it 'shows cop names' do
create_file('example1.rb', "\tputs 0")
expect(cli.run(['--format',
'emacs',
'--debug',
'example1.rb'])).to eq(1)
expect($stdout.string.lines.to_a[-1])
.to eq(["#{abs('example1.rb')}:1:1: C: Style/Tab: Tab detected.",
''].join("\n"))
end
end
describe '--show-cops' do
shared_examples(:prints_config) do
it 'prints the current configuration' do
out = stdout.lines.to_a
printed_config = YAML.load(out.join)
cop_names = (cop_list[0] || '').split(',')
cop_names.each do |cop_name|
global_conf[cop_name].each do |key, value|
printed_value = printed_config[cop_name][key]
expect(printed_value).to eq(value)
end
end
end
end
let(:cops) { RuboCop::Cop::Cop.all }
let(:global_conf) do
config_path =
RuboCop::ConfigLoader.configuration_file_for(Dir.pwd.to_s)
RuboCop::ConfigLoader.configuration_from_file(config_path)
end
let(:stdout) { $stdout.string }
before do
create_file('.rubocop.yml', ['Metrics/LineLength:',
' Max: 110'])
expect { cli.run ['--show-cops'] + cop_list }.to exit_with_code(0)
end
context 'with no args' do
let(:cop_list) { [] }
# Extracts the first line out of the description
def short_description_of_cop(cop)
desc = full_description_of_cop(cop)
desc ? desc.lines.first.strip : ''
end
# Gets the full description of the cop or nil if no description is set.
def full_description_of_cop(cop)
cop_config = global_conf.for_cop(cop)
cop_config['Description']
end
it 'prints all available cops and their description' do
cops.each do |cop|
expect(stdout).to include cop.cop_name
# Because of line breaks, we will only find the beginning.
expect(stdout).to include short_description_of_cop(cop)[0..60]
end
end
it 'prints all types' do
cops
.types
.map(&:to_s)
.map(&:capitalize)
.each { |type| expect(stdout).to include(type) }
end
it 'prints all cops in their right type listing' do
lines = stdout.lines
lines.slice_before(/Type /).each do |slice|
types = cops.types.map(&:to_s).map(&:capitalize)
current = types.delete(slice.shift[/Type '(?<c>[^']+)'/, 'c'])
# all cops in their type listing
cops.with_type(current).each do |cop|
expect(slice.any? { |l| l.include? cop.cop_name }).to be_truthy
end
# no cop in wrong type listing
types.each do |type|
cops.with_type(type).each do |cop|
expect(slice.any? { |l| l.include? cop.cop_name }).to be_falsey
end
end
end
end
include_examples :prints_config
end
context 'with one cop given' do
let(:cop_list) { ['Style/Tab'] }
it 'prints that cop and nothing else' do
expect(stdout).to eq(
['# Supports --auto-correct',
'Style/Tab:',
' Description: No hard tabs.',
' StyleGuide: ' \
'https://github.com/bbatsov/ruby-style-guide#spaces-indentation',
' Enabled: true',
'',
''].join("\n"))
end
include_examples :prints_config
end
context 'with two cops given' do
let(:cop_list) { ['Style/Tab,Metrics/LineLength'] }
include_examples :prints_config
end
context 'with one of the cops misspelled' do
let(:cop_list) { ['Style/Tab,Lint/X123'] }
it 'skips the unknown cop' do
expect(stdout).to eq(
['# Supports --auto-correct',
'Style/Tab:',
' Description: No hard tabs.',
' StyleGuide: ' \
'https://github.com/bbatsov/ruby-style-guide#spaces-indentation',
' Enabled: true',
'',
''].join("\n"))
end
end
end
describe '-f/--format' do
let(:target_file) { 'example.rb' }
before do
create_file(target_file, ['# encoding: utf-8',
'#' * 90])
end
describe 'builtin formatters' do
context 'when simple format is specified' do
it 'outputs with simple format' do
cli.run(['--format', 'simple', 'example.rb'])
expect($stdout.string)
.to include(["== #{target_file} ==",
'C: 2: 81: Line is too long. [90/80]'].join("\n"))
end
end
context 'when clang format is specified' do
it 'outputs with clang format' do
create_file('example1.rb', ['# encoding: utf-8',
'x= 0 ',
'#' * 85,
'y ',
'puts x'])
create_file('example2.rb', ['# encoding: utf-8',
"\tx",
'def a',
' puts',
'end'])
create_file('example3.rb', ['# encoding: utf-8',
'def badName',
' if something',
' test',
' end',
'end'])
expect(cli.run(['--format', 'clang', 'example1.rb',
'example2.rb', 'example3.rb']))
.to eq(1)
expect($stdout.string)
.to eq(['example1.rb:2:2: C: Surrounding space missing for ' \
"operator '='.",
'x= 0 ',
' ^',
'example1.rb:2:5: C: Trailing whitespace detected.',
'x= 0 ',
' ^',
'example1.rb:3:81: C: Line is too long. [85/80]',
'###################################################' \
'##################################',
' ' \
' ^^^^^',
'example1.rb:4:2: C: Trailing whitespace detected.',
'y ',
' ^',
'example2.rb:1:1: C: Incorrect indentation detected' \
' (column 0 instead of 1).',
'# encoding: utf-8',
'^^^^^^^^^^^^^^^^^',
'example2.rb:2:1: C: Tab detected.',
"\tx",
'^',
'example2.rb:3:1: C: Inconsistent indentation ' \
'detected.',
'def a',
'^^^^^',
'example2.rb:4:1: C: Use 2 (not 3) spaces for ' \
'indentation.',
' puts',
'^^^',
'example3.rb:2:5: C: Use snake_case for method names.',
'def badName',
' ^^^^^^^',
'example3.rb:3:3: C: Use a guard clause instead of ' \
'wrapping the code inside a conditional expression.',
' if something',
' ^^',
'example3.rb:3:3: C: Favor modifier if usage ' \
'when having a single-line body. Another good ' \
'alternative is the usage of control flow &&/||.',
' if something',
' ^^',
'example3.rb:5:5: W: end at 5, 4 is not aligned ' \
'with if at 3, 2',
' end',
' ^^^',
'',
'3 files inspected, 12 offenses detected',
''].join("\n"))
end
end
context 'when emacs format is specified' do
it 'outputs with emacs format' do
create_file('example1.rb', ['# encoding: utf-8',
'x= 0 ',
'y ',
'puts x'])
create_file('example2.rb', ['# encoding: utf-8',
"\tx = 0",
'puts x'])
expect(cli.run(['--format', 'emacs', 'example1.rb',
'example2.rb'])).to eq(1)
expected_output =
["#{abs('example1.rb')}:2:2: C: Surrounding space missing" \
" for operator '='.",
"#{abs('example1.rb')}:2:5: C: Trailing whitespace detected.",
"#{abs('example1.rb')}:3:2: C: Trailing whitespace detected.",
"#{abs('example2.rb')}:1:1: C: Incorrect indentation detected" \
' (column 0 instead of 1).',
"#{abs('example2.rb')}:2:1: C: Tab detected.",
"#{abs('example2.rb')}:3:1: C: Inconsistent indentation " \
'detected.',
''].join("\n")
expect($stdout.string).to eq(expected_output)
end
end
context 'when unknown format name is specified' do
it 'aborts with error message' do
expect { cli.run(['--format', 'unknown', 'example.rb']) }
.to exit_with_code(1)
expect($stderr.string)
.to include('No formatter for "unknown"')
end
end
context 'when ambiguous format name is specified' do
it 'aborts with error message' do
# Both 'files' and 'fuubar' start with an 'f'.
expect { cli.run(['--format', 'f', 'example.rb']) }
.to exit_with_code(1)
expect($stderr.string)
.to include('Cannot determine formatter for "f"')
end
end
end
describe 'custom formatter' do
let(:target_file) { abs('example.rb') }
context 'when a class name is specified' do
it 'uses the class as a formatter' do
module MyTool
class RuboCopFormatter < RuboCop::Formatter::BaseFormatter
def started(all_files)
output.puts "started: #{all_files.join(',')}"
end
def file_started(file, _options)
output.puts "file_started: #{file}"
end
def file_finished(file, _offenses)
output.puts "file_finished: #{file}"
end
def finished(processed_files)
output.puts "finished: #{processed_files.join(',')}"
end
end
end
cli.run(['--format', 'MyTool::RuboCopFormatter', 'example.rb'])
expect($stdout.string).to eq(["started: #{target_file}",
"file_started: #{target_file}",
"file_finished: #{target_file}",
"finished: #{target_file}",
''].join("\n"))
end
end
context 'when unknown class name is specified' do
it 'aborts with error message' do
args = '--format UnknownFormatter example.rb'
expect { cli.run(args.split) }.to exit_with_code(1)
expect($stderr.string).to include('UnknownFormatter')
end
end
end
it 'can be used multiple times' do
cli.run(['--format', 'simple', '--format', 'emacs', 'example.rb'])
expect($stdout.string)
.to include(["== #{target_file} ==",
'C: 2: 81: Line is too long. [90/80]',
"#{abs(target_file)}:2:81: C: Line is too long. " \
'[90/80]'].join("\n"))
end
end
describe '-o/--out option' do
let(:target_file) { 'example.rb' }
before do
create_file(target_file, ['# encoding: utf-8',
'#' * 90])
end
it 'redirects output to the specified file' do
cli.run(['--out', 'output.txt', target_file])
expect(File.read('output.txt')).to include('Line is too long.')
end
it 'is applied to the previously specified formatter' do
cli.run(['--format', 'simple',
'--format', 'emacs', '--out', 'emacs_output.txt',
target_file])
expect($stdout.string).to eq(["== #{target_file} ==",
'C: 2: 81: Line is too long. [90/80]',
'',
'1 file inspected, 1 offense detected',
''].join("\n"))
expect(File.read('emacs_output.txt'))
.to eq(["#{abs(target_file)}:2:81: C: Line is too long. [90/80]",
''].join("\n"))
end
end
describe '--fail-level option' do
let(:target_file) { 'example.rb' }
before do
create_file(target_file, ['# encoding: utf-8',
'#' * 90])
end
it 'fails when option is less than the severity level' do
expect(cli.run(['--fail-level', 'convention', target_file])).to eq(1)
end
it 'succeed when option is greater than the severity level' do
expect(cli.run(['--fail-level', 'warning', target_file])).to eq(0)
end
end
describe '--force-exclusion' do
let(:target_file) { 'example.rb' }
before do
create_file(target_file, ['# encoding: utf-8',
'#' * 90])
create_file('.rubocop.yml', ['AllCops:',
' Exclude:',
" - #{target_file}"])
end
it 'excludes files specified in the configuration Exclude ' \
'even if they are explicitly passed as arguments' do
expect(cli.run(['--force-exclusion', target_file])).to eq(0)
end
end
end
context 'when interrupted' do
it 'returns 1' do
allow_any_instance_of(RuboCop::Runner)
.to receive(:aborting?).and_return(true)
create_file('example.rb', '# encoding: utf-8')
expect(cli.run(['example.rb'])).to eq(1)
end
end
describe '#trap_interrupt' do
let(:runner) { RuboCop::Runner.new({}, RuboCop::ConfigStore.new) }
let(:interrupt_handlers) { [] }
before do
allow(Signal).to receive(:trap).with('INT') do |&block|
interrupt_handlers << block
end
end
def interrupt
interrupt_handlers.each(&:call)
end
it 'adds a handler for SIGINT' do
expect(interrupt_handlers).to be_empty
cli.trap_interrupt(runner)
expect(interrupt_handlers.size).to eq(1)
end
context 'with SIGINT once' do
it 'aborts processing' do
cli.trap_interrupt(runner)
expect(runner).to receive(:abort)
interrupt
end
it 'does not exit immediately' do
cli.trap_interrupt(runner)
expect_any_instance_of(Object).not_to receive(:exit)
expect_any_instance_of(Object).not_to receive(:exit!)
interrupt
end
end
context 'with SIGINT twice' do
it 'exits immediately' do
cli.trap_interrupt(runner)
expect_any_instance_of(Object).to receive(:exit!).with(1)
interrupt
interrupt
end
end
end
it 'checks a given correct file and returns 0' do
create_file('example.rb', ['# encoding: utf-8',
'x = 0',
'puts x'])
expect(cli.run(['--format', 'simple', 'example.rb'])).to eq(0)
expect($stdout.string)
.to eq(['',
'1 file inspected, no offenses detected',
''].join("\n"))
end
it 'checks a given file with faults and returns 1' do
create_file('example.rb', ['# encoding: utf-8',
'x = 0 ',
'puts x'])
expect(cli.run(['--format', 'simple', 'example.rb'])).to eq(1)
expect($stdout.string)
.to eq ['== example.rb ==',
'C: 2: 6: Trailing whitespace detected.',
'',
'1 file inspected, 1 offense detected',
''].join("\n")
end
it 'registers an offense for a syntax error' do
create_file('example.rb', ['# encoding: utf-8',
'class Test',
'en'])
expect(cli.run(['--format', 'emacs', 'example.rb'])).to eq(1)
expect($stdout.string)
.to eq(["#{abs('example.rb')}:4:1: E: unexpected " \
'token $end',
''].join("\n"))
end
it 'registers an offense for Parser warnings' do
create_file('example.rb', ['# encoding: utf-8',
'puts *test',
'if a then b else c end'])
expect(cli.run(['--format', 'emacs', 'example.rb'])).to eq(1)
expect($stdout.string)
.to eq(["#{abs('example.rb')}:2:6: W: " \
'Ambiguous splat operator. Parenthesize the method arguments ' \
"if it's surely a splat operator, or add a whitespace to the " \
'right of the `*` if it should be a multiplication.',
"#{abs('example.rb')}:3:1: C: " \
'Favor the ternary operator (?:) over if/then/else/end ' \
'constructs.',
''].join("\n"))
end
it 'can process a file with an invalid UTF-8 byte sequence' do
create_file('example.rb', ['# encoding: utf-8',
"# #{'f9'.hex.chr}#{'29'.hex.chr}"])
expect(cli.run(['--format', 'emacs', 'example.rb'])).to eq(1)
expect($stdout.string)
.to eq(["#{abs('example.rb')}:1:1: F: Invalid byte sequence in utf-8.",
''].join("\n"))
end
context 'when errors are raised while processing files due to bugs' do
let(:errors) do
['An error occurred while Encoding cop was inspecting file.rb.']
end
before do
allow_any_instance_of(RuboCop::Runner)
.to receive(:errors).and_return(errors)
end
it 'displays an error message to stderr' do
cli.run([])
expect($stderr.string)
.to include('1 error occurred:').and include(errors.first)
end
end
describe 'rubocop:disable comment' do
it 'can disable all cops in a code section' do
src = ['# encoding: utf-8',
'# rubocop:disable all',
'#' * 90,
'x(123456)',
'y("123")',
'def func',
' # rubocop: enable Metrics/LineLength,Style/StringLiterals',
' ' + '#' * 93,
' x(123456)',
' y("123")',
'end']
create_file('example.rb', src)
expect(cli.run(['--format', 'emacs', 'example.rb'])).to eq(1)
# all cops were disabled, then 2 were enabled again, so we
# should get 2 offenses reported.
expect($stdout.string)
.to eq(["#{abs('example.rb')}:8:81: C: Line is too long. [95/80]",
"#{abs('example.rb')}:10:5: C: Prefer single-quoted " \
"strings when you don't need string interpolation or " \
'special symbols.',
''].join("\n"))
end
it 'can disable selected cops in a code section' do
create_file('example.rb',
['# encoding: utf-8',
'# rubocop:disable Style/LineLength,' \
'Style/NumericLiterals,Style/StringLiterals',
'#' * 90,
'x(123456)',
'y("123")',
'def func',
' # rubocop: enable Metrics/LineLength, ' \
'Style/StringLiterals',
' ' + '#' * 93,
' x(123456)',
' y("123")',
'end'])
expect(cli.run(['--format', 'emacs', 'example.rb'])).to eq(1)
expect($stderr.string)
.to eq(["#{abs('example.rb')}: Style/LineLength has the wrong " \
'namespace - should be Metrics',
''].join("\n"))
# 3 cops were disabled, then 2 were enabled again, so we
# should get 2 offenses reported.
expect($stdout.string)
.to eq(["#{abs('example.rb')}:8:81: C: Line is too long. [95/80]",
"#{abs('example.rb')}:10:5: C: Prefer single-quoted " \
"strings when you don't need string interpolation or " \
'special symbols.',
''].join("\n"))
end
it 'can disable all cops on a single line' do
create_file('example.rb', ['# encoding: utf-8',
'y("123", 123456) # rubocop:disable all'
])
expect(cli.run(['--format', 'emacs', 'example.rb'])).to eq(0)
expect($stdout.string).to be_empty
end
it 'can disable selected cops on a single line' do
create_file('example.rb',
['# encoding: utf-8',
'a' * 90 + ' # rubocop:disable Metrics/LineLength',
'#' * 95,
'y("123") # rubocop:disable Metrics/LineLength,' \
'Style/StringLiterals'
])
expect(cli.run(['--format', 'emacs', 'example.rb'])).to eq(1)
expect($stdout.string)
.to eq(
["#{abs('example.rb')}:3:81: C: Line is too long. [95/80]",
''].join("\n"))
end
context 'without using namespace' do
it 'can disable selected cops on a single line' do
create_file('example.rb',
['# encoding: utf-8',
'a' * 90 + ' # rubocop:disable LineLength',
'#' * 95,
'y("123") # rubocop:disable StringLiterals'
])
expect(cli.run(['--format', 'emacs', 'example.rb'])).to eq(1)
expect($stdout.string)
.to eq(
["#{abs('example.rb')}:3:81: C: Line is too long. [95/80]",
''].join("\n"))
end
end
end
it 'finds a file with no .rb extension but has a shebang line' do
create_file('example', ['#!/usr/bin/env ruby',
'# encoding: utf-8',
'x = 0',
'puts x'
])
expect(cli.run(%w(--format simple))).to eq(0)
expect($stdout.string)
.to eq(['', '1 file inspected, no offenses detected', ''].join("\n"))
end
it 'does not register any offenses for an empty file' do
create_file('example.rb', '')
expect(cli.run(%w(--format simple))).to eq(0)
expect($stdout.string)
.to eq(['', '1 file inspected, no offenses detected', ''].join("\n"))
end
describe 'style guide only usage' do
context 'via the cli option' do
describe '--only-guide-cops' do
it 'skips cops that have no link to a style guide' do
create_file('example.rb', 'fail')
create_file('.rubocop.yml', ['Metrics/LineLength:',
' Enabled: true',
' StyleGuide: ~',
' Max: 2'])
expect(cli.run(['--format', 'simple', '--only-guide-cops',
'example.rb'])).to eq(0)
end
it 'runs cops for rules that link to a style guide' do
create_file('example.rb', 'fail')
create_file('.rubocop.yml', ['Metrics/LineLength:',
' Enabled: true',
' StyleGuide: "http://an.example/url"',
' Max: 2'])
expect(cli.run(['--format', 'simple', '--only-guide-cops',
'example.rb'])).to eq(1)
expect($stdout.string)
.to eq(['== example.rb ==',
'C: 1: 3: Line is too long. [4/2]',
'',
'1 file inspected, 1 offense detected',
''].join("\n"))
end
it 'overrides configuration of AllCops/StyleGuideCopsOnly' do
create_file('example.rb', 'fail')
create_file('.rubocop.yml', ['AllCops:',
' StyleGuideCopsOnly: false',
'Metrics/LineLength:',
' Enabled: true',
' StyleGuide: ~',
' Max: 2'])
expect(cli.run(['--format', 'simple', '--only-guide-cops',
'example.rb'])).to eq(0)
end
end
end
context 'via the config' do
before do
create_file('example.rb', 'fail')
create_file('.rubocop.yml', ['AllCops:',
" StyleGuideCopsOnly: #{guide_cops_only}",
'Metrics/LineLength:',
' Enabled: true',
' StyleGuide: ~',
' Max: 2'])
end
describe 'AllCops/StyleGuideCopsOnly' do
context 'when it is true' do
let(:guide_cops_only) { 'true' }
it 'skips cops that have no link to a style guide' do
expect(cli.run(['--format', 'simple', 'example.rb'])).to eq(0)
end
end
context 'when it is false' do
let(:guide_cops_only) { 'false' }
it 'runs cops for rules regardless of any link to the style guide' do
expect(cli.run(['--format', 'simple', 'example.rb'])).to eq(1)
expect($stdout.string)
.to eq(['== example.rb ==',
'C: 1: 3: Line is too long. [4/2]',
'',
'1 file inspected, 1 offense detected',
''].join("\n"))
end
end
end
end
end
describe 'rails cops' do
describe 'enabling/disabling' do
it 'by default does not run rails cops' do
create_file('app/models/example1.rb', ['# encoding: utf-8',
'read_attribute(:test)'])
expect(cli.run(['--format', 'simple', 'app/models/example1.rb']))
.to eq(0)
end
it 'with -R given runs rails cops' do
create_file('app/models/example1.rb', ['# encoding: utf-8',
'read_attribute(:test)'])
expect(cli.run(['--format', 'simple', '-R', 'app/models/example1.rb']))
.to eq(1)
expect($stdout.string).to include('Prefer self[:attr]')
end
it 'with configuration option true in one dir runs rails cops there' do
source = ['# encoding: utf-8',
'read_attribute(:test)']
create_file('dir1/app/models/example1.rb', source)
create_file('dir1/.rubocop.yml', ['AllCops:',
' RunRailsCops: true',
'',
'Rails/ReadWriteAttribute:',
' Include:',
' - app/models/**/*.rb'])
create_file('dir2/app/models/example2.rb', source)
create_file('dir2/.rubocop.yml', ['AllCops:',
' RunRailsCops: false',
'',
'Rails/ReadWriteAttribute:',
' Include:',
' - app/models/**/*.rb'])
expect(cli.run(%w(--format simple dir1 dir2))).to eq(1)
expect($stdout.string)
.to eq(['== dir1/app/models/example1.rb ==',
'C: 2: 1: Prefer self[:attr] over read_attribute' \
'(:attr).',
'',
'2 files inspected, 1 offense detected',
''].join("\n"))
end
it 'with configuration option false but -R given runs rails cops' do
create_file('app/models/example1.rb', ['# encoding: utf-8',
'read_attribute(:test)'])
create_file('.rubocop.yml', ['AllCops:',
' RunRailsCops: false'])
expect(cli.run(['--format', 'simple', '-R', 'app/models/example1.rb']))
.to eq(1)
expect($stdout.string).to include('Prefer self[:attr]')
end
end
describe 'including/excluding' do
it 'includes some directories by default' do
source = ['# encoding: utf-8',
'read_attribute(:test)',
"default_scope order: 'position'"]
# Several rails cops include app/models by default.
create_file('dir1/app/models/example1.rb', source)
create_file('dir1/app/models/example2.rb', source)
# No rails cops include app/views by default.
create_file('dir1/app/views/example3.rb', source)
# The .rubocop.yml file inherits from default.yml where the Include
# config parameter is set for the rails cops. The paths are interpreted
# as relative to dir1 because .rubocop.yml is placed there.
create_file('dir1/.rubocop.yml', ['AllCops:',
' RunRailsCops: true',
'',
'Rails/ReadWriteAttribute:',
' Exclude:',
' - "**/example2.rb"',
'',
'Rails/DefaultScope:',
' Exclude:',
' - "**/example2.rb"'])
# No .rubocop.yml file in dir2 means that the paths from default.yml
# are interpreted as relative to the current directory, so they don't
# match.
create_file('dir2/app/models/example4.rb', source)
expect(cli.run(%w(--format simple dir1 dir2))).to eq(1)
expect($stdout.string)
.to eq(['== dir1/app/models/example1.rb ==',
'C: 2: 1: Prefer self[:attr] over read_attribute' \
'(:attr).',
'C: 3: 15: default_scope expects a block as its sole' \
' argument.',
'',
'4 files inspected, 2 offenses detected',
''].join("\n"))
end
end
end
describe 'cops can exclude files based on config' do
it 'ignores excluded files' do
create_file('example.rb', ['# encoding: utf-8',
'x = 0'])
create_file('regexp.rb', ['# encoding: utf-8',
'x = 0'])
create_file('exclude_glob.rb', ['#!/usr/bin/env ruby',
'# encoding: utf-8',
'x = 0'])
create_file('dir/thing.rb', ['# encoding: utf-8',
'x = 0'])
create_file('.rubocop.yml', ['Lint/UselessAssignment:',
' Exclude:',
' - example.rb',
' - !ruby/regexp /regexp.rb\z/',
' - "exclude_*"',
' - "dir/*"'])
expect(cli.run(%w(--format simple))).to eq(0)
expect($stdout.string)
.to eq(['', '4 files inspected, no offenses detected',
''].join("\n"))
end
end
describe 'configuration from file' do
it 'allows the default configuration file as the -c argument' do
create_file('example.rb', ['# encoding: utf-8',
'x = 0',
'puts x'
])
create_file('.rubocop.yml', [])
expect(cli.run(%w(--format simple -c .rubocop.yml))).to eq(0)
expect($stdout.string)
.to eq(['', '1 file inspected, no offenses detected',
''].join("\n"))
end
it 'finds included files' do
create_file('file.rb', 'x=0') # Included by default
create_file('example', 'x=0')
create_file('regexp', 'x=0')
create_file('.dot1/file.rb', 'x=0') # Hidden but explicitly included
create_file('.dot2/file.rb', 'x=0') # Hidden, excluded by default
create_file('.rubocop.yml', ['AllCops:',
' Include:',
' - example',
' - !ruby/regexp /regexp$/',
' - .dot1/**/*'
])
expect(cli.run(%w(--format files))).to eq(1)
expect($stderr.string).to eq('')
expect($stdout.string.split($RS).sort).to eq([abs('.dot1/file.rb'),
abs('example'),
abs('file.rb'),
abs('regexp')])
end
it 'ignores excluded files' do
create_file('example.rb', ['# encoding: utf-8',
'x = 0',
'puts x'
])
create_file('regexp.rb', ['# encoding: utf-8',
'x = 0',
'puts x'
])
create_file('exclude_glob.rb', ['#!/usr/bin/env ruby',
'# encoding: utf-8',
'x = 0',
'puts x'
])
create_file('.rubocop.yml', ['AllCops:',
' Exclude:',
' - example.rb',
' - !ruby/regexp /regexp.rb$/',
' - "exclude_*"'
])
expect(cli.run(%w(--format simple))).to eq(0)
expect($stdout.string)
.to eq(['', '0 files inspected, no offenses detected',
''].join("\n"))
end
it 'only reads configuration in explicitly included hidden directories' do
create_file('.hidden/example.rb', ['# encoding: utf-8',
'x=0'])
# This file contains configuration for an unknown cop. This would cause a
# warning to be printed on stderr if the file was read. But it's in a
# hidden directory, so it's not read.
create_file('.hidden/.rubocop.yml', ['SymbolName:',
' Enabled: false'])
create_file('.other/example.rb', ['# encoding: utf-8',
'x=0'])
# The .other directory is explicitly included, so the configuration file
# is read, and modifies the behavior.
create_file('.other/.rubocop.yml', ['Style/SpaceAroundOperators:',
' Enabled: false'])
create_file('.rubocop.yml', ['AllCops:',
' Include:',
' - .other/**/*'])
expect(cli.run(%w(--format simple))).to eq(1)
expect($stderr.string).to eq('')
expect($stdout.string)
.to eq(['== .other/example.rb ==',
'W: 2: 1: Useless assignment to variable - x.',
'',
'1 file inspected, 1 offense detected',
''].join("\n"))
end
it 'does not consider Include parameters in subdirectories' do
create_file('dir/example.ruby', ['# encoding: utf-8',
'x=0'])
create_file('dir/.rubocop.yml', ['AllCops:',
' Include:',
' - "*.ruby"'])
expect(cli.run(%w(--format simple))).to eq(0)
expect($stderr.string).to eq('')
expect($stdout.string)
.to eq(['',
'0 files inspected, no offenses detected',
''].join("\n"))
end
it 'matches included/excluded files correctly when . argument is given' do
create_file('example.rb', 'x = 0')
create_file('special.dsl', ['# encoding: utf-8',
'setup { "stuff" }'
])
create_file('.rubocop.yml', ['AllCops:',
' Include:',
' - "*.dsl"',
' Exclude:',
' - example.rb'
])
expect(cli.run(%w(--format simple .))).to eq(1)
expect($stdout.string)
.to eq(['== special.dsl ==',
"C: 2: 9: Prefer single-quoted strings when you don't " \
'need string interpolation or special symbols.',
'',
'1 file inspected, 1 offense detected',
''].join("\n"))
end
# With rubinius 2.0.0.rc1 + rspec 2.13.1,
# File.stub(:open).and_call_original causes SystemStackError.
it 'does not read files in excluded list', broken: :rbx do
%w(rb.rb non-rb.ext without-ext).each do |filename|
create_file("example/ignored/#{filename}", ['# encoding: utf-8',
'#' * 90
])
end
create_file('example/.rubocop.yml', ['AllCops:',
' Exclude:',
' - ignored/**'])
expect(File).not_to receive(:open).with(%r{/ignored/})
allow(File).to receive(:open).and_call_original
expect(cli.run(%w(--format simple example))).to eq(0)
expect($stdout.string)
.to eq(['', '0 files inspected, no offenses detected',
''].join("\n"))
end
it 'can be configured with option to disable a certain error' do
create_file('example1.rb', 'puts 0 ')
create_file('rubocop.yml', ['Style/Encoding:',
' Enabled: false',
'',
'Style/CaseIndentation:',
' Enabled: false'])
expect(cli.run(['--format', 'simple',
'-c', 'rubocop.yml', 'example1.rb'])).to eq(1)
expect($stdout.string)
.to eq(['== example1.rb ==',
'C: 1: 7: Trailing whitespace detected.',
'',
'1 file inspected, 1 offense detected',
''].join("\n"))
end
context 'without using namespace' do
it 'can be configured with option to disable a certain error' do
create_file('example1.rb', 'puts 0 ')
create_file('rubocop.yml', ['Encoding:',
' Enabled: false',
'',
'CaseIndentation:',
' Enabled: false'])
expect(cli.run(['--format', 'simple',
'-c', 'rubocop.yml', 'example1.rb'])).to eq(1)
expect($stdout.string)
.to eq(['== example1.rb ==',
'C: 1: 7: Trailing whitespace detected.',
'',
'1 file inspected, 1 offense detected',
''].join("\n"))
end
end
it 'can disable parser-derived offenses with warning severity' do
# `-' interpreted as argument prefix
create_file('example.rb', 'puts -1')
create_file('.rubocop.yml', ['Style/Encoding:',
' Enabled: false',
'',
'Lint/AmbiguousOperator:',
' Enabled: false'
])
expect(cli.run(['--format', 'emacs', 'example.rb'])).to eq(0)
end
it 'cannot disable Syntax offenses with fatal/error severity' do
create_file('example.rb', 'class Test')
create_file('.rubocop.yml', ['Style/Encoding:',
' Enabled: false',
'',
'Syntax:',
' Enabled: false'
])
expect(cli.run(['--format', 'emacs', 'example.rb'])).to eq(1)
expect($stdout.string).to include('unexpected token $end')
end
it 'can be configured to merge a parameter that is a hash' do
create_file('example1.rb',
['# encoding: utf-8',
'puts %w(a b c)',
'puts %q|hi|'])
# We want to change the preferred delimiters for word arrays. The other
# settings from default.yml are unchanged.
create_file('rubocop.yml',
['Style/PercentLiteralDelimiters:',
' PreferredDelimiters:',
" '%w': '[]'",
" '%W': '[]'"])
cli.run(['--format', 'simple', '-c', 'rubocop.yml', 'example1.rb'])
expect($stdout.string)
.to eq(['== example1.rb ==',
'C: 2: 6: %w-literals should be delimited by [ and ]',
'C: 3: 6: %q-literals should be delimited by ( and )',
'C: 3: 6: Use %q only for strings that contain both single ' \
'quotes and double quotes.',
'',
'1 file inspected, 3 offenses detected',
''].join("\n"))
end
it 'can be configured to override a parameter that is a hash in a ' \
'special case' do
create_file('example1.rb',
['# encoding: utf-8',
'arr.select { |e| e > 0 }.collect { |e| e * 2 }',
'a2.find_all { |e| e > 0 }'])
# We prefer find_all over select. This setting overrides the default
# select over find_all. Other preferred methods appearing in the default
# config (e.g., map over collect) are kept.
create_file('rubocop.yml',
['Style/CollectionMethods:',
' PreferredMethods:',
' select: find_all'])
cli.run(['--format',
'simple',
'-c',
'rubocop.yml',
'--only',
'CollectionMethods',
'example1.rb'])
expect($stdout.string)
.to eq(['== example1.rb ==',
'C: 2: 5: Prefer find_all over select.',
'C: 2: 26: Prefer map over collect.',
'',
'1 file inspected, 2 offenses detected',
''].join("\n"))
end
it 'works when a cop that others depend on is disabled' do
create_file('example1.rb', ['if a',
' b',
'end'])
create_file('rubocop.yml', ['Style/Encoding:',
' Enabled: false',
'',
'Metrics/LineLength:',
' Enabled: false'
])
result = cli.run(['--format', 'simple',
'-c', 'rubocop.yml', 'example1.rb'])
expect($stdout.string)
.to eq(['== example1.rb ==',
'C: 1: 1: Favor modifier if usage when having ' \
'a single-line body. Another good alternative is the ' \
'usage of control flow &&/||.',
'',
'1 file inspected, 1 offense detected',
''].join("\n"))
expect(result).to eq(1)
end
it 'can be configured with project config to disable a certain error' do
create_file('example_src/example1.rb', 'puts 0 ')
create_file('example_src/.rubocop.yml', ['Style/Encoding:',
' Enabled: false',
'',
'Style/CaseIndentation:',
' Enabled: false'
])
expect(cli.run(['--format', 'simple',
'example_src/example1.rb'])).to eq(1)
expect($stdout.string)
.to eq(['== example_src/example1.rb ==',
'C: 1: 7: Trailing whitespace detected.',
'',
'1 file inspected, 1 offense detected',
''].join("\n"))
end
it 'can use an alternative max line length from a config file' do
create_file('example_src/example1.rb', ['# encoding: utf-8',
'#' * 90
])
create_file('example_src/.rubocop.yml', ['Metrics/LineLength:',
' Enabled: true',
' Max: 100'
])
expect(cli.run(['--format', 'simple',
'example_src/example1.rb'])).to eq(0)
expect($stdout.string)
.to eq(['', '1 file inspected, no offenses detected', ''].join("\n"))
end
it 'can have different config files in different directories' do
%w(src lib).each do |dir|
create_file("example/#{dir}/example1.rb", ['# encoding: utf-8',
'#' * 90
])
end
create_file('example/src/.rubocop.yml', ['Metrics/LineLength:',
' Enabled: true',
' Max: 100'
])
expect(cli.run(%w(--format simple example))).to eq(1)
expect($stdout.string).to eq(
['== example/lib/example1.rb ==',
'C: 2: 81: Line is too long. [90/80]',
'',
'2 files inspected, 1 offense detected',
''].join("\n"))
end
it 'prefers a config file in ancestor directory to another in home' do
create_file('example_src/example1.rb', ['# encoding: utf-8',
'#' * 90
])
create_file('example_src/.rubocop.yml', ['Metrics/LineLength:',
' Enabled: true',
' Max: 100'
])
create_file("#{Dir.home}/.rubocop.yml", ['Metrics/LineLength:',
' Enabled: true',
' Max: 80'
])
expect(cli.run(['--format', 'simple',
'example_src/example1.rb'])).to eq(0)
expect($stdout.string)
.to eq(['', '1 file inspected, no offenses detected', ''].join("\n"))
end
it 'can exclude directories relative to .rubocop.yml' do
%w(src etc/test etc/spec tmp/test tmp/spec).each do |dir|
create_file("example/#{dir}/example1.rb", ['# encoding: utf-8',
'#' * 90])
end
# Hidden subdirectories should also be excluded.
create_file('example/etc/.dot/example1.rb', ['# encoding: utf-8',
'#' * 90])
create_file('example/.rubocop.yml', ['AllCops:',
' Exclude:',
' - src/**',
' - etc/**/*',
' - tmp/spec/**'])
expect(cli.run(%w(--format simple example))).to eq(1)
expect($stderr.string).to eq('')
expect($stdout.string).to eq(['== example/tmp/test/example1.rb ==',
'C: 2: 81: Line is too long. [90/80]',
'',
'1 file inspected, 1 offense detected',
''].join("\n"))
end
it 'can exclude a typical vendor directory' do
create_file('vendor/bundle/ruby/1.9.1/gems/parser-2.0.0/.rubocop.yml',
['AllCops:',
' Exclude:',
' - lib/parser/lexer.rb'])
create_file('vendor/bundle/ruby/1.9.1/gems/parser-2.0.0/lib/ex.rb',
['# encoding: utf-8',
'#' * 90])
create_file('.rubocop.yml',
['AllCops:',
' Exclude:',
' - vendor/**/*'])
cli.run(%w(--format simple))
expect($stdout.string)
.to eq(['', '0 files inspected, no offenses detected',
''].join("\n"))
end
it 'excludes the vendor directory by default' do
create_file('vendor/ex.rb',
['# encoding: utf-8',
'#' * 90])
cli.run(%w(--format simple))
expect($stdout.string)
.to eq(['', '0 files inspected, no offenses detected',
''].join("\n"))
end
# Being immune to bad configuration files in excluded directories has
# become important due to a bug in rubygems
# (https://github.com/rubygems/rubygems/issues/680) that makes
# installations of, for example, rubocop lack their .rubocop.yml in the
# root directory.
it 'can exclude a vendor directory with an erroneous config file' do
create_file('vendor/bundle/ruby/1.9.1/gems/parser-2.0.0/.rubocop.yml',
['inherit_from: non_existent.yml'])
create_file('vendor/bundle/ruby/1.9.1/gems/parser-2.0.0/lib/ex.rb',
['# encoding: utf-8',
'#' * 90])
create_file('.rubocop.yml',
['AllCops:',
' Exclude:',
' - vendor/**/*'])
cli.run(%w(--format simple))
expect($stderr.string).to eq('')
expect($stdout.string)
.to eq(['', '0 files inspected, no offenses detected',
''].join("\n"))
end
# Relative exclude paths in .rubocop.yml files are relative to that file,
# but in configuration files with other names they will be relative to
# whatever file inherits from them.
it 'can exclude a vendor directory indirectly' do
create_file('vendor/bundle/ruby/1.9.1/gems/parser-2.0.0/.rubocop.yml',
['AllCops:',
' Exclude:',
' - lib/parser/lexer.rb'])
create_file('vendor/bundle/ruby/1.9.1/gems/parser-2.0.0/lib/ex.rb',
['# encoding: utf-8',
'#' * 90])
create_file('.rubocop.yml',
['inherit_from: config/default.yml'])
create_file('config/default.yml',
['AllCops:',
' Exclude:',
' - vendor/**/*'])
cli.run(%w(--format simple))
expect($stdout.string)
.to eq(['', '0 files inspected, no offenses detected',
''].join("\n"))
end
it 'prints a warning for an unrecognized cop name in .rubocop.yml' do
create_file('example/example1.rb', ['# encoding: utf-8',
'#' * 90])
create_file('example/.rubocop.yml', ['Style/LyneLenth:',
' Enabled: true',
' Max: 100'])
expect(cli.run(%w(--format simple example))).to eq(1)
expect($stderr.string)
.to eq(['Warning: unrecognized cop Style/LyneLenth found in ' +
abs('example/.rubocop.yml'),
''].join("\n"))
end
it 'prints a warning for an unrecognized configuration parameter' do
create_file('example/example1.rb', ['# encoding: utf-8',
'#' * 90])
create_file('example/.rubocop.yml', ['Metrics/LineLength:',
' Enabled: true',
' Min: 10'])
expect(cli.run(%w(--format simple example))).to eq(1)
expect($stderr.string)
.to eq(['Warning: unrecognized parameter Metrics/LineLength:Min ' \
'found in ' + abs('example/.rubocop.yml'),
''].join("\n"))
end
it 'works when a configuration file passed by -c specifies Exclude ' \
'with regexp' do
create_file('example/example1.rb', ['# encoding: utf-8',
'#' * 90])
create_file('rubocop.yml', ['AllCops:',
' Exclude:',
' - !ruby/regexp /example1\.rb$/'])
cli.run(%w(--format simple -c rubocop.yml))
expect($stdout.string)
.to eq(['', '0 files inspected, no offenses detected',
''].join("\n"))
end
it 'works when a configuration file passed by -c specifies Exclude ' \
'with strings' do
create_file('example/example1.rb', ['# encoding: utf-8',
'#' * 90])
create_file('rubocop.yml', ['AllCops:',
' Exclude:',
' - example/**'])
cli.run(%w(--format simple -c rubocop.yml))
expect($stdout.string)
.to eq(['', '0 files inspected, no offenses detected',
''].join("\n"))
end
it 'works when a configuration file specifies a Severity' do
create_file('example/example1.rb', ['# encoding: utf-8',
'#' * 90])
create_file('rubocop.yml', ['Metrics/LineLength:',
' Severity: error'])
cli.run(%w(--format simple -c rubocop.yml))
expect($stdout.string)
.to eq(['== example/example1.rb ==',
'E: 2: 81: Line is too long. [90/80]',
'',
'1 file inspected, 1 offense detected',
''].join("\n"))
expect($stderr.string).to eq('')
end
it 'fails when a configuration file specifies an invalid Severity' do
create_file('example/example1.rb', ['# encoding: utf-8',
'#' * 90])
create_file('rubocop.yml', ['Metrics/LineLength:',
' Severity: superbad'])
cli.run(%w(--format simple -c rubocop.yml))
expect($stderr.string)
.to eq(["Warning: Invalid severity 'superbad'. " \
'Valid severities are refactor, convention, ' \
'warning, error, fatal.',
''].join("\n"))
end
context 'when a file inherits from the old auto generated file' do
before do
create_file('rubocop-todo.yml', '')
create_file('.rubocop.yml', ['inherit_from: rubocop-todo.yml'])
end
it 'prints no warning when --auto-gen-config is not set' do
expect { cli.run(%w(-c .rubocop.yml)) }.not_to exit_with_code(1)
end
it 'prints a warning when --auto-gen-config is set' do
expect { cli.run(%w(-c .rubocop.yml --auto-gen-config)) }
.to exit_with_code(1)
expect($stderr.string)
.to eq(['Attention: rubocop-todo.yml has been renamed to ' \
'.rubocop_todo.yml',
''].join("\n"))
end
end
context 'when a file inherits from a higher level' do
before do
create_file('.rubocop.yml', ['Metrics/LineLength:',
' Exclude:',
' - dir/example.rb'])
create_file('dir/.rubocop.yml', 'inherit_from: ../.rubocop.yml')
create_file('dir/example.rb', '#' * 90)
end
it 'inherits relative excludes correctly' do
expect(cli.run([])).to eq(0)
end
end
end
end
|
Rails.application.configure do
# Settings specified here will take precedence over those in config/application.rb.
# General Settings
config.app_domain = 'http://hippopath.herokuapp.com/'
Rails.application.routes.default_url_options[:host] = config.app_domain
config.action_mailer.default_url_options = { host: config.app_domain }
# Email
config.action_mailer.delivery_method = :smtp
config.action_mailer.perform_deliveries = true
config.action_mailer.raise_delivery_errors = false
config.action_mailer.default :charset => "utf-8"
config.action_mailer.smtp_settings = {
address: "smtp.gmail.com",
port: 587,
domain: ENV["GMAIL_DOMAIN"],
authentication: "plain",
enable_starttls_auto: true,
user_name: ENV["GMAIL_USERNAME"],
password: ENV["GMAIL_PASSWORD"]
}
# Code is not reloaded between requests.
config.cache_classes = true
# Eager load code on boot. This eager loads most of Rails and
# your application in memory, allowing both threaded web servers
# and those relying on copy on write to perform better.
# Rake tasks automatically ignore this option for performance.
config.eager_load = true
# Full error reports are disabled and caching is turned on.
config.consider_all_requests_local = true
config.action_controller.perform_caching = true
# Enable Rack::Cache to put a simple HTTP cache in front of your application
# Add `rack-cache` to your Gemfile before enabling this.
# For large-scale production use, consider using a caching reverse proxy like
# NGINX, varnish or squid.
# config.action_dispatch.rack_cache = true
# Disable serving static files from the `/public` folder by default since
# Apache or NGINX already handles this.
config.serve_static_files = ENV['RAILS_SERVE_STATIC_FILES'].present?
# Compress JavaScripts and CSS.
config.assets.js_compressor = :uglifier
# config.assets.css_compressor = :sass
# Do not fallback to assets pipeline if a precompiled asset is missed.
config.assets.compile = false
# Asset digests allow you to set far-future HTTP expiration dates on all assets,
# yet still be able to expire them through the digest params.
config.assets.digest = true
# `config.assets.precompile` and `config.assets.version` have moved to config/initializers/assets.rb
# Specifies the header that your server uses for sending files.
# config.action_dispatch.x_sendfile_header = 'X-Sendfile' # for Apache
# config.action_dispatch.x_sendfile_header = 'X-Accel-Redirect' # for NGINX
# Force all access to the app over SSL, use Strict-Transport-Security, and use secure cookies.
# config.force_ssl = true
# Use the lowest log level to ensure availability of diagnostic information
# when problems arise.
config.log_level = :debug
# Prepend all log lines with the following tags.
# config.log_tags = [ :subdomain, :uuid ]
# Use a different logger for distributed setups.
# config.logger = ActiveSupport::TaggedLogging.new(SyslogLogger.new)
# Use a different cache store in production.
# config.cache_store = :mem_cache_store
# Enable serving of images, stylesheets, and JavaScripts from an asset server.
# config.action_controller.asset_host = 'http://assets.example.com'
# Ignore bad email addresses and do not raise email delivery errors.
# Set this to true and configure the email server for immediate delivery to raise delivery errors.
# config.action_mailer.raise_delivery_errors = false
# Enable locale fallbacks for I18n (makes lookups for any locale fall back to
# the I18n.default_locale when a translation cannot be found).
config.i18n.fallbacks = true
# Send deprecation notices to registered listeners.
config.active_support.deprecation = :notify
# Use default logging formatter so that PID and timestamp are not suppressed.
config.log_formatter = ::Logger::Formatter.new
# Do not dump schema after migrations.
config.active_record.dump_schema_after_migration = false
# react.js
config.react.addons = true
#aws
config.paperclip_defaults = {
:storage => :s3,
:s3_credentials => {
:bucket => ENV['S3_BUCKET_NAME'],
:access_key_id => ENV['AWS_ACCESS_KEY_ID'],
:secret_access_key => ENV['AWS_SECRET_ACCESS_KEY'],
:host_name => ENV['AWS_HOST_NAME']
}
}
end
remove aws host name
Rails.application.configure do
# Settings specified here will take precedence over those in config/application.rb.
# General Settings
config.app_domain = 'http://hippopath.herokuapp.com/'
Rails.application.routes.default_url_options[:host] = config.app_domain
config.action_mailer.default_url_options = { host: config.app_domain }
# Email
config.action_mailer.delivery_method = :smtp
config.action_mailer.perform_deliveries = true
config.action_mailer.raise_delivery_errors = false
config.action_mailer.default :charset => "utf-8"
config.action_mailer.smtp_settings = {
address: "smtp.gmail.com",
port: 587,
domain: ENV["GMAIL_DOMAIN"],
authentication: "plain",
enable_starttls_auto: true,
user_name: ENV["GMAIL_USERNAME"],
password: ENV["GMAIL_PASSWORD"]
}
# Code is not reloaded between requests.
config.cache_classes = true
# Eager load code on boot. This eager loads most of Rails and
# your application in memory, allowing both threaded web servers
# and those relying on copy on write to perform better.
# Rake tasks automatically ignore this option for performance.
config.eager_load = true
# Full error reports are disabled and caching is turned on.
config.consider_all_requests_local = true
config.action_controller.perform_caching = true
# Enable Rack::Cache to put a simple HTTP cache in front of your application
# Add `rack-cache` to your Gemfile before enabling this.
# For large-scale production use, consider using a caching reverse proxy like
# NGINX, varnish or squid.
# config.action_dispatch.rack_cache = true
# Disable serving static files from the `/public` folder by default since
# Apache or NGINX already handles this.
config.serve_static_files = ENV['RAILS_SERVE_STATIC_FILES'].present?
# Compress JavaScripts and CSS.
config.assets.js_compressor = :uglifier
# config.assets.css_compressor = :sass
# Do not fallback to assets pipeline if a precompiled asset is missed.
config.assets.compile = false
# Asset digests allow you to set far-future HTTP expiration dates on all assets,
# yet still be able to expire them through the digest params.
config.assets.digest = true
# `config.assets.precompile` and `config.assets.version` have moved to config/initializers/assets.rb
# Specifies the header that your server uses for sending files.
# config.action_dispatch.x_sendfile_header = 'X-Sendfile' # for Apache
# config.action_dispatch.x_sendfile_header = 'X-Accel-Redirect' # for NGINX
# Force all access to the app over SSL, use Strict-Transport-Security, and use secure cookies.
# config.force_ssl = true
# Use the lowest log level to ensure availability of diagnostic information
# when problems arise.
config.log_level = :debug
# Prepend all log lines with the following tags.
# config.log_tags = [ :subdomain, :uuid ]
# Use a different logger for distributed setups.
# config.logger = ActiveSupport::TaggedLogging.new(SyslogLogger.new)
# Use a different cache store in production.
# config.cache_store = :mem_cache_store
# Enable serving of images, stylesheets, and JavaScripts from an asset server.
# config.action_controller.asset_host = 'http://assets.example.com'
# Ignore bad email addresses and do not raise email delivery errors.
# Set this to true and configure the email server for immediate delivery to raise delivery errors.
# config.action_mailer.raise_delivery_errors = false
# Enable locale fallbacks for I18n (makes lookups for any locale fall back to
# the I18n.default_locale when a translation cannot be found).
config.i18n.fallbacks = true
# Send deprecation notices to registered listeners.
config.active_support.deprecation = :notify
# Use default logging formatter so that PID and timestamp are not suppressed.
config.log_formatter = ::Logger::Formatter.new
# Do not dump schema after migrations.
config.active_record.dump_schema_after_migration = false
# react.js
config.react.addons = true
#aws
config.paperclip_defaults = {
:storage => :s3,
:s3_credentials => {
:bucket => ENV['S3_BUCKET_NAME'],
:access_key_id => ENV['AWS_ACCESS_KEY_ID'],
:secret_access_key => ENV['AWS_SECRET_ACCESS_KEY']
}
}
end
|
require "spec_helper"
require 'arrthorizer/rails'
describe Arrthorizer::Rails do
describe "controller integration" do
let(:controller_class) { Class.new(ApplicationController) do def index; end end}
describe "each controller class" do
it "responds to :prepare_context" do
controller_class.should respond_to :to_prepare_context
end
it "responds to :arrthorizer_configuration" do
controller_class.should respond_to :arrthorizer_configuration
end
end
describe "each controller" do
let(:controller) { controller_class.new }
it "responds to :arrthorizer_context" do
controller.should respond_to :arrthorizer_context
end
context "when it has a proper configuration for context building" do
let(:injected_params) { { some_param: 1 } }
let(:current_action) { 'some_action' }
before :each do
controller.stub(:params).and_return injected_params
controller.stub(:action_name).and_return(current_action)
end
context "and there is no specific configuration for the current action" do
before :each do
controller_class.to_prepare_context do |c|
c.defaults do
params # this is an example config which can be easily tested
end
end
end
it "uses the 'default' config to build an Arrthorizer context" do
controller.send(:arrthorizer_context).should == Arrthorizer::Context(injected_params)
end
end
context "and there is a specific configuration for the current action" do
let(:action_specific_config) { { some_extra_key: 'some_value' }}
before :each do
controller_class.to_prepare_context do |c|
c.defaults do
params
end
c.for_action(current_action) do
arrthorizer_defaults.merge(some_extra_key: 'some_value')
end
end
end
it "uses the more specific configuration for the current action" do
context_hash = injected_params.merge(action_specific_config)
expected_context = Arrthorizer::Context(context_hash)
controller.send(:arrthorizer_context).should == expected_context
end
end
end
end
end
end
Fix a test in Ruby 2+
As discussed in http://tenderlovemaking.com/2012/09/07/protected-methods-and-ruby-2-0.html,
respond_to? returns false for protected methods from Ruby 2.0 onwards
require "spec_helper"
require 'arrthorizer/rails'
describe Arrthorizer::Rails do
describe "controller integration" do
let(:controller_class) { Class.new(ApplicationController) do def index; end end}
describe "each controller class" do
it "responds to :prepare_context" do
controller_class.should respond_to :to_prepare_context
end
it "responds to :arrthorizer_configuration" do
controller_class.should respond_to :arrthorizer_configuration
end
end
describe "each controller" do
let(:controller) { controller_class.new }
it "has a protected method called :arrthorizer_context" do
# this method is protected to prevent exposing it
# via default or wildcard routes
controller.protected_methods.should include :arrthorizer_context
end
context "when it has a proper configuration for context building" do
let(:injected_params) { { some_param: 1 } }
let(:current_action) { 'some_action' }
before :each do
controller.stub(:params).and_return injected_params
controller.stub(:action_name).and_return(current_action)
end
context "and there is no specific configuration for the current action" do
before :each do
controller_class.to_prepare_context do |c|
c.defaults do
params # this is an example config which can be easily tested
end
end
end
it "uses the 'default' config to build an Arrthorizer context" do
controller.send(:arrthorizer_context).should == Arrthorizer::Context(injected_params)
end
end
context "and there is a specific configuration for the current action" do
let(:action_specific_config) { { some_extra_key: 'some_value' }}
before :each do
controller_class.to_prepare_context do |c|
c.defaults do
params
end
c.for_action(current_action) do
arrthorizer_defaults.merge(some_extra_key: 'some_value')
end
end
end
it "uses the more specific configuration for the current action" do
context_hash = injected_params.merge(action_specific_config)
expected_context = Arrthorizer::Context(context_hash)
controller.send(:arrthorizer_context).should == expected_context
end
end
end
end
end
end
|
module Gitnesse
class Hooks
@config = Gitnesse::Config.instance
DIR = File.expand_path("./#{@config.features_dir}/support")
PATH = File.expand_path("./#{@config.features_dir}/support/gitnesse.rb")
# Public: Copies Gitnesse Cucumber hooks to Cucumber's support dir.
#
# Returns nothing
def self.create!
FileUtils.mkdir_p DIR unless File.directory?(DIR)
file = File.expand_path("#{File.dirname(__FILE__)}/hooks/gitnesse.rb")
FileUtils.cp file, PATH
end
# Public: Removes existing Gitnesse hooks in Cucumber's support dir
#
# Returns nothing
def self.destroy!
FileUtils.rm PATH, force: true
end
# Public: Used by Gitnesse hook to append results to wiki page for feature
#
# scenario - Cucumber scenario passed by post-scenario hook
#
# Returns nothing
def self.append_results(scenario)
Gitnesse::ConfigLoader.find_and_load
dir = Gitnesse::DirManager.project_dir
if scenario.respond_to?(:scenario_outline)
file = scenario.scenario_outline.file.gsub(/^#{@config.features_dir}\//, '')
name = "#{scenario.scenario_outline.name}"
subtitle = scenario.name.gsub(/(^\|\s+|\s+\|$)/, '').gsub(/\s+\|/, ',')
else
file = scenario.file.gsub(/^#{@config.features_dir}\//, '')
name = scenario.name
subtitle = nil
end
page = file.gsub("/", " > ")
status = scenario.status
@wiki = Gitnesse::Wiki.new(@config.repository_url, dir, clone: false)
page = @wiki.pages.find { |f| f.wiki_path.include?(page) }
return unless page
page.append_result name, status, subtitle
@wiki.repo.add(page.wiki_path)
end
end
end
Small refactors to hooks.rb
module Gitnesse
class Hooks
@config = Gitnesse::Config.instance
DIR = File.expand_path "./#{@config.features_dir}/support"
PATH = File.join DIR, "gitnesse.rb"
# Public: Copies Gitnesse Cucumber hooks to Cucumber's support dir.
#
# Returns nothing
def self.create!
FileUtils.mkdir_p DIR unless File.directory?(DIR)
file = File.expand_path("#{File.dirname(__FILE__)}/hooks/gitnesse.rb")
FileUtils.cp file, PATH
end
# Public: Removes existing Gitnesse hooks in Cucumber's support dir
#
# Returns nothing
def self.destroy!
FileUtils.rm PATH, force: true
end
# Public: Used by Gitnesse hook to append results to wiki page for feature
#
# scenario - Cucumber scenario passed by post-scenario hook
#
# Returns nothing
def self.append_results(scenario)
Gitnesse::ConfigLoader.find_and_load
dir = Gitnesse::DirManager.project_dir
if scenario.respond_to?(:scenario_outline)
outline = scenario.scenario_outline
file = outline.file.gsub(/^#{@config.features_dir}\//, '')
name = "#{outline.name}"
subtitle = scenario.name.gsub(/(^\|\s+|\s+\|$)/, '').gsub(/\s+\|/, ',')
else
file = scenario.file.gsub(/^#{@config.features_dir}\//, '')
name = scenario.name
subtitle = nil
end
page = file.gsub("/", " > ")
status = scenario.status
@wiki = Gitnesse::Wiki.new(@config.repository_url, dir, clone: false)
page = @wiki.pages.find { |f| f.wiki_path.include?(page) }
return unless page
page.append_result name, status, subtitle
@wiki.repo.add(page.wiki_path)
end
end
end
|
require 'spec_helper'
describe 'minimum2scp/ruby' do
context 'with env [APT_LINE=keep]' do
before(:all) do
start_container({
'Image' => ENV['DOCKER_IMAGE'] || "minimum2scp/#{File.basename(__dir__)}:latest",
'Env' => [ 'APT_LINE=keep' ]
})
end
after(:all) do
stop_container
end
#Dir["#{__dir__}/../baseimage/*_spec.rb"].sort.each do |spec|
# load spec
#end
describe file('/tmp/build') do
it { should_not be_directory }
end
%w[
ruby ruby-dev bundler rake pry
build-essential autoconf bison ca-certificates libgdbm-dev libncursesw5-dev libncurses5-dev libreadline-dev tcl-dev tk-dev zlib1g-dev libffi-dev libyaml-dev libgmp-dev
gem2deb
].each do |pkg|
describe package(pkg) do
it { should be_installed }
end
end
%w[libssl1.0-dev libssl1.0.2 libssl1.1].each do |pkg|
describe package(pkg) do
it { should be_installed }
end
end
describe package('libssl-dev') do
it { should_not be_installed }
end
describe file('/usr/bin/ruby') do
it { should be_symlink }
it { should be_linked_to('ruby2.3') }
end
describe command('ruby2.3 -v') do
its(:stdout) { should include 'ruby 2.3.3p222 (2016-11-21) [x86_64-linux-gnu]' }
end
describe file('/opt/rbenv') do
it { should be_directory }
end
describe file('/etc/profile.d/rbenv.sh') do
it { should be_file }
its(:content) { should match %r!^export RBENV_ROOT=/opt/rbenv$! }
end
describe file('/opt/rbenv/bin') do
it { should be_directory }
end
describe file('/opt/rbenv/versions') do
it { should be_directory }
end
describe file('/opt/rbenv/shims') do
it { should be_directory }
end
describe file('/opt/rbenv/plugins/ruby-build') do
it { should be_directory }
end
describe file('/opt/rbenv/plugins/rbenv-default-gems') do
it { should_not be_directory }
end
describe file('/opt/rbenv/plugins/rbenv-gem-rehash') do
it { should_not be_directory }
end
describe file('/opt/rbenv/plugins/rbenv-aliases') do
it { should_not be_directory }
end
describe file('/opt/rbenv/plugins/rbenv-update') do
it { should be_directory }
end
describe package('bundler') do
it { should be_installed.with_version('1.12.5-4') }
end
end
end
ruby: fixed spec
bundler 1.13.6-2 package was uploaded into sid
https://tracker.debian.org/news/822468
require 'spec_helper'
describe 'minimum2scp/ruby' do
context 'with env [APT_LINE=keep]' do
before(:all) do
start_container({
'Image' => ENV['DOCKER_IMAGE'] || "minimum2scp/#{File.basename(__dir__)}:latest",
'Env' => [ 'APT_LINE=keep' ]
})
end
after(:all) do
stop_container
end
#Dir["#{__dir__}/../baseimage/*_spec.rb"].sort.each do |spec|
# load spec
#end
describe file('/tmp/build') do
it { should_not be_directory }
end
%w[
ruby ruby-dev bundler rake pry
build-essential autoconf bison ca-certificates libgdbm-dev libncursesw5-dev libncurses5-dev libreadline-dev tcl-dev tk-dev zlib1g-dev libffi-dev libyaml-dev libgmp-dev
gem2deb
].each do |pkg|
describe package(pkg) do
it { should be_installed }
end
end
%w[libssl1.0-dev libssl1.0.2 libssl1.1].each do |pkg|
describe package(pkg) do
it { should be_installed }
end
end
describe package('libssl-dev') do
it { should_not be_installed }
end
describe file('/usr/bin/ruby') do
it { should be_symlink }
it { should be_linked_to('ruby2.3') }
end
describe command('ruby2.3 -v') do
its(:stdout) { should include 'ruby 2.3.3p222 (2016-11-21) [x86_64-linux-gnu]' }
end
describe file('/opt/rbenv') do
it { should be_directory }
end
describe file('/etc/profile.d/rbenv.sh') do
it { should be_file }
its(:content) { should match %r!^export RBENV_ROOT=/opt/rbenv$! }
end
describe file('/opt/rbenv/bin') do
it { should be_directory }
end
describe file('/opt/rbenv/versions') do
it { should be_directory }
end
describe file('/opt/rbenv/shims') do
it { should be_directory }
end
describe file('/opt/rbenv/plugins/ruby-build') do
it { should be_directory }
end
describe file('/opt/rbenv/plugins/rbenv-default-gems') do
it { should_not be_directory }
end
describe file('/opt/rbenv/plugins/rbenv-gem-rehash') do
it { should_not be_directory }
end
describe file('/opt/rbenv/plugins/rbenv-aliases') do
it { should_not be_directory }
end
describe file('/opt/rbenv/plugins/rbenv-update') do
it { should be_directory }
end
describe package('bundler') do
it { should be_installed.with_version('1.13.6-2') }
end
end
end
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.