CombinedText stringlengths 4 3.42M |
|---|
Rails.application.configure do
# Settings specified here will take precedence over those in config/application.rb.
# Code is not reloaded between requests.
config.cache_classes = true
# Eager load code on boot. This eager loads most of Rails and
# your application in memory, allowing both threaded web servers
# and those relying on copy on write to perform better.
# Rake tasks automatically ignore this option for performance.
config.eager_load = true
# Full error reports are disabled and caching is turned on.
config.consider_all_requests_local = false
config.action_controller.perform_caching = true
# Enable Rack::Cache to put a simple HTTP cache in front of your application
# Add `rack-cache` to your Gemfile before enabling this.
# For large-scale production use, consider using a caching reverse proxy like
# nginx, varnish or squid.
# config.action_dispatch.rack_cache = true
# Disable Rails's static asset server (Apache or nginx will already do this).
config.serve_static_files = false
# Compress JavaScripts and CSS.
config.assets.js_compressor = :uglifier
# config.assets.css_compressor = :sass
# Do not fallback to assets pipeline if a precompiled asset is missed.
config.assets.compile = false
# Generate digests for assets URLs.
config.assets.digest = true
# `config.assets.precompile` and `config.assets.version`
# have moved to config/initializers/assets.rb
# Specifies the header that your server uses for sending files.
# config.action_dispatch.x_sendfile_header = "X-Sendfile" # for apache
# config.action_dispatch.x_sendfile_header = 'X-Accel-Redirect' # for nginx
# Force all access to the app over SSL, use Strict-Transport-Security, and use secure cookies.
# config.force_ssl = true
# Set to :debug to see everything in the log.
config.log_level = :info
# Prepend all log lines with the following tags.
# config.log_tags = [ :subdomain, :uuid ]
# Use a different logger for distributed setups.
# config.logger = ActiveSupport::TaggedLogging.new(SyslogLogger.new)
# Use a different cache store in production.
# config.cache_store = :mem_cache_store
# Enable serving of images, stylesheets, and JavaScripts from an asset server.
# config.action_controller.asset_host = "http://assets.example.com"
# Ignore bad email addresses and do not raise email delivery errors.
# Set this to true and configure the email server for immediate delivery to raise delivery errors.
config.action_mailer.raise_delivery_errors = true
config.action_mailer.perform_deliveries = true
# Enable locale fallbacks for I18n (makes lookups for any locale fall back to
# the I18n.default_locale when a translation cannot be found).
config.i18n.fallbacks = true
# Send deprecation notices to registered listeners.
config.active_support.deprecation = :notify
# Disable automatic flushing of the log to improve performance.
# config.autoflush_log = false
# Use default logging formatter so that PID and timestamp are not suppressed.
config.log_formatter = ::Logger::Formatter.new
# Custom Logging
config.logstasher.enabled = true
config.logstasher.suppress_app_log = true
config.logstasher.log_level = Logger::INFO
config.logstasher.logger_path = "#{Rails.root}/log/logstash_#{Rails.env}.json"
config.logstasher.source = 'logstasher'
# Do not dump schema after migrations.
config.active_record.dump_schema_after_migration = false
config.after_initialize do
smtp_domain = ENV['SMTP_DOMAIN'] || 'localhost'
ActionMailer::Base.default_url_options = {
host: smtp_domain,
protocol: ENV['SMTP_PROTOCOL'] || 'http'
}
ActionMailer::Base.default from: Settings.mail_from
ActionMailer::Base.default reply_to: Settings.mail_reply_to
ActionMailer::Base.smtp_settings = {
address: ENV['SMTP_HOSTNAME'] || 'localhost',
port: ENV['SMTP_PORT'] || 587,
domain: smtp_domain,
user_name: ENV['SMTP_USERNAME'] || '',
password: ENV['SMTP_PASSWORD'] || '',
authentication: :login,
enable_starttls_auto: true
}
end
end
Updated production config
* Serve static assets = true
* set assets prefix
Rails.application.configure do
# Settings specified here will take precedence over those in config/application.rb.
# Code is not reloaded between requests.
config.cache_classes = true
# Eager load code on boot. This eager loads most of Rails and
# your application in memory, allowing both threaded web servers
# and those relying on copy on write to perform better.
# Rake tasks automatically ignore this option for performance.
config.eager_load = true
# Full error reports are disabled and caching is turned on.
config.consider_all_requests_local = false
config.action_controller.perform_caching = true
# Enable Rack::Cache to put a simple HTTP cache in front of your application
# Add `rack-cache` to your Gemfile before enabling this.
# For large-scale production use, consider using a caching reverse proxy like
# nginx, varnish or squid.
# config.action_dispatch.rack_cache = true
# Disable Rails's static asset server (Apache or nginx will already do this).
config.serve_static_files = true
config.assets.prefix = '/myassets'
# Compress JavaScripts and CSS.
config.assets.js_compressor = :uglifier
# config.assets.css_compressor = :sass
# Do not fallback to assets pipeline if a precompiled asset is missed.
config.assets.compile = false
# Generate digests for assets URLs.
config.assets.digest = true
# `config.assets.precompile` and `config.assets.version`
# have moved to config/initializers/assets.rb
# Specifies the header that your server uses for sending files.
# config.action_dispatch.x_sendfile_header = "X-Sendfile" # for apache
# config.action_dispatch.x_sendfile_header = 'X-Accel-Redirect' # for nginx
# Force all access to the app over SSL, use Strict-Transport-Security, and use secure cookies.
# config.force_ssl = true
# Set to :debug to see everything in the log.
config.log_level = :info
# Prepend all log lines with the following tags.
# config.log_tags = [ :subdomain, :uuid ]
# Use a different logger for distributed setups.
# config.logger = ActiveSupport::TaggedLogging.new(SyslogLogger.new)
# Use a different cache store in production.
# config.cache_store = :mem_cache_store
# Enable serving of images, stylesheets, and JavaScripts from an asset server.
# config.action_controller.asset_host = "http://assets.example.com"
# Ignore bad email addresses and do not raise email delivery errors.
# Set this to true and configure the email server for immediate delivery to raise delivery errors.
config.action_mailer.raise_delivery_errors = true
config.action_mailer.perform_deliveries = true
# Enable locale fallbacks for I18n (makes lookups for any locale fall back to
# the I18n.default_locale when a translation cannot be found).
config.i18n.fallbacks = true
# Send deprecation notices to registered listeners.
config.active_support.deprecation = :notify
# Disable automatic flushing of the log to improve performance.
# config.autoflush_log = false
# Use default logging formatter so that PID and timestamp are not suppressed.
config.log_formatter = ::Logger::Formatter.new
# Custom Logging
config.logstasher.enabled = true
config.logstasher.suppress_app_log = true
config.logstasher.log_level = Logger::INFO
config.logstasher.logger_path = "#{Rails.root}/log/logstash_#{Rails.env}.json"
config.logstasher.source = 'logstasher'
# Do not dump schema after migrations.
config.active_record.dump_schema_after_migration = false
config.after_initialize do
smtp_domain = ENV['SMTP_DOMAIN'] || 'localhost'
ActionMailer::Base.default_url_options = {
host: smtp_domain,
protocol: ENV['SMTP_PROTOCOL'] || 'http'
}
ActionMailer::Base.default from: Settings.mail_from
ActionMailer::Base.default reply_to: Settings.mail_reply_to
ActionMailer::Base.smtp_settings = {
address: ENV['SMTP_HOSTNAME'] || 'localhost',
port: ENV['SMTP_PORT'] || 587,
domain: smtp_domain,
user_name: ENV['SMTP_USERNAME'] || '',
password: ENV['SMTP_PASSWORD'] || '',
authentication: :login,
enable_starttls_auto: true
}
end
end
|
# encoding: utf-8
require 'rspec/mocks'
require_relative '../../../spec_helper'
require_relative '../../../../app/controllers/carto/api/database_groups_controller'
require_relative '.././../../factories/visualization_creation_helpers'
# cURL samples:
# - Rename group: curl -v -H "Content-Type: application/json" -X PUT -d '{ "display_name": "Demo Group" }' "http://central-org-b-admin.localhost.lan:3000/api/v1/organization/95c2c425-5c8c-4b20-8999-d79cd20c2f2c/groups/c662f7ee-aefb-4f49-93ea-1f671a77bb36?api_key=665646f527c3006b124c15a308bb98f4ed1f52e4"
# - Add users: curl -v -H "Content-Type: application/json" -X POST -d '{ "users" : ["78ee570a-812d-4cce-928c-e5ebeb4708e8", "7e53c96c-1598-43e0-b23e-290daf633547"] }' "http://central-org-b-admin.localhost.lan:3000/api/v1/organization/95c2c425-5c8c-4b20-8999-d79cd20c2f2c/groups/c662f7ee-aefb-4f49-93ea-1f671a77bb36/users?api_key=665646f527c3006b124c15a308bb98f4ed1f52e4"
# - Remove users: curl -v -H "Content-Type: application/json" -X DELETE -d '{ "users" : ["78ee570a-812d-4cce-928c-e5ebeb4708e8", "7e53c96c-1598-43e0-b23e-290daf633547"] }' "http://central-org-b-admin.localhost.lan:3000/api/v1/organization/95c2c425-5c8c-4b20-8999-d79cd20c2f2c/groups/c662f7ee-aefb-4f49-93ea-1f671a77bb36/users?api_key=665646f527c3006b124c15a308bb98f4ed1f52e4"
describe Carto::Api::GroupsController do
include_context 'organization with users helper'
describe 'Groups editor management' do
before(:all) do
@carto_org_user_1 = Carto::User.find(@org_user_1.id)
@org_user_1_json = { "id" => @org_user_1.id,
"username" => @org_user_1.username,
"avatar_url" => @org_user_1.avatar_url,
"base_url" => @org_user_1.public_url,
"viewer" => false
}
@carto_org_user_2 = Carto::User.find(@org_user_2.id)
@group_1 = FactoryGirl.create(:random_group, display_name: 'g_1', organization: @carto_organization)
@group_1_json = { 'id' => @group_1.id, 'organization_id' => @group_1.organization_id, 'name' => @group_1.name, 'display_name' => @group_1.display_name }
@group_2 = FactoryGirl.create(:random_group, display_name: 'g_2', organization: @carto_organization)
@group_2_json = { 'id' => @group_2.id, 'organization_id' => @group_2.organization_id, 'name' => @group_2.name, 'display_name' => @group_2.display_name }
@group_3 = FactoryGirl.create(:random_group, display_name: 'g_3', organization: @carto_organization)
@group_3_json = { 'id' => @group_3.id, 'organization_id' => @group_3.organization_id, 'name' => @group_3.name, 'display_name' => @group_3.display_name }
@headers = {'CONTENT_TYPE' => 'application/json', :format => "json" }
end
after(:all) do
@group_1.destroy
@group_2.destroy
@group_3.destroy
end
before(:each) do
@carto_organization.reload
end
describe '#index' do
it 'returns 401 without authentication' do
get_json api_v1_organization_groups_url(user_domain: @org_user_owner.username, organization_id: @carto_organization.id), {}, @headers do |response|
response.status.should == 401
end
end
it 'returns groups with pagination metadata' do
get_json api_v1_organization_groups_url(user_domain: @org_user_owner.username, organization_id: @carto_organization.id, api_key: @org_user_owner.api_key), {}, @headers do |response|
response.status.should == 200
expected_response = {
groups: [ @group_1_json, @group_2_json, @group_3_json ],
total_entries: 3
}
response.body.should == expected_response
end
end
it 'returns paginated groups with pagination metadata' do
get_json api_v1_organization_groups_url(user_domain: @org_user_owner.username, organization_id: @carto_organization.id, api_key: @org_user_owner.api_key), { page: 2, per_page: 1, order: 'display_name' }, @headers do |response|
response.status.should == 200
expected_response = {
groups: [ @group_2_json ],
total_entries: 3
}
response.body.should == expected_response
end
end
it 'can search by name' do
get_json api_v1_organization_groups_url(user_domain: @org_user_owner.username, organization_id: @carto_organization.id, api_key: @org_user_owner.api_key, q: @group_2.name), { page: 1, per_page: 1, order: 'display_name' }, @headers do |response|
response.status.should == 200
expected_response = {
groups: [ @group_2_json ],
total_entries: 1
}
response.body.should == expected_response
end
end
describe "users groups" do
before(:each) do
@group_1.add_user(@org_user_1.username)
end
after(:each) do
@group_1.remove_user(@org_user_1.username)
end
it 'returns user groups if user_id is requested' do
get_json api_v1_user_groups_url(user_domain: @org_user_1.username, user_id: @org_user_1.id, api_key: @org_user_1.api_key), {}, @headers do |response|
response.status.should == 200
expected_response = {
groups: [ @group_1_json ],
total_entries: 1
}
response.body.should == expected_response
end
end
it 'optionally fetches number of shared tables, maps and users' do
get_json api_v1_user_groups_url(user_domain: @org_user_1.username, user_id: @org_user_1.id, api_key: @org_user_1.api_key), {}, @headers do |response|
response.status.should == 200
expected_response = {
groups: [
@group_1_json
],
total_entries: 1
}
response.body.should == expected_response
end
get_json api_v1_user_groups_url(user_domain: @org_user_1.username, user_id: @org_user_1.id, api_key: @org_user_1.api_key, fetch_shared_tables_count: true, fetch_shared_maps_count: true, fetch_users: true), {}, @headers do |response|
response.status.should == 200
expected_response = {
groups: [
@group_1_json.merge({
'shared_tables_count' => 0,
'shared_maps_count' => 0,
'users' => [ @org_user_1_json ]
})
],
total_entries: 1
}
response.body.should == expected_response
end
end
it 'can fetch number of shared tables, maps and users when a table is shared' do
bypass_named_maps
table_user_2 = create_table_with_options(@org_user_2)
permission = CartoDB::Permission[Carto::Visualization.find(table_user_2['table_visualization'][:id]).permission.id]
permission.set_group_permission(@group_1, Carto::Permission::ACCESS_READONLY)
permission.save
get_json api_v1_user_groups_url(user_domain: @org_user_1.username, user_id: @org_user_1.id, api_key: @org_user_1.api_key, fetch_shared_tables_count: true, fetch_shared_maps_count: true, fetch_users: true), {}, @headers do |response|
response.status.should == 200
expected_response = {
groups: [
@group_1_json.merge({
'shared_tables_count' => 1,
'shared_maps_count' => 0,
'users' => [ @org_user_1_json ]
})
],
total_entries: 1
}
response.body.should == expected_response
end
end
end
end
it '#show returns a group' do
get_json api_v1_organization_groups_show_url(user_domain: @org_user_owner.username, organization_id: @carto_organization.id, group_id: @group_1.id, api_key: @org_user_owner.api_key), { }, @headers do |response|
response.status.should == 200
response.body.should == @group_1_json.symbolize_keys
end
end
it '#show support fetch_shared_maps_count, fetch_shared_tables_count and fetch_users' do
get_json api_v1_organization_groups_show_url(user_domain: @org_user_owner.username, organization_id: @carto_organization.id, group_id: @group_1.id, api_key: @org_user_owner.api_key, fetch_shared_tables_count: true, fetch_shared_maps_count: true, fetch_users: true), { }, @headers do |response|
response.status.should == 200
response.body[:shared_tables_count].should_not be_nil
response.body[:shared_maps_count].should_not be_nil
response.body[:users].should_not be_nil
end
end
it '#create fails if user is not owner' do
post_json api_v1_organization_groups_create_url(user_domain: @org_user_1.username, organization_id: @carto_organization.id, api_key: @org_user_1.api_key), { display_name: 'willfail' }, @headers do |response|
response.status.should == 400
end
end
it '#create triggers group creation' do
display_name = 'a new group'
name = 'a new group'
# Replacement for extension interaction
fake_database_role = 'fake_database_role'
fake_group_creation = Carto::Group.new_instance(@carto_organization.database_name, name, fake_database_role)
fake_group_creation.save
Carto::Group.expects(:create_group_extension_query).with(anything, name).returns(fake_group_creation)
post_json api_v1_organization_groups_create_url(user_domain: @org_user_owner.username, organization_id: @carto_organization.id, api_key: @org_user_owner.api_key), { display_name: display_name }, @headers do |response|
response.status.should == 200
response.body[:id].should_not be_nil
response.body[:organization_id].should == @carto_organization.id
response.body[:name].should == name
response.body[:display_name].should == display_name
# Also check database data because Group changes something after extension interaction
new_group = Carto::Group.find(response.body[:id])
new_group.organization_id.should == @carto_organization.id
new_group.name.should == name
new_group.display_name.should == display_name
new_group.database_role.should_not be_nil
end
end
it '#update triggers group renaming' do
group = @carto_organization.groups.first
new_display_name = 'A Group %Renamed'
expected_new_name = 'A Group _Renamed'
Carto::Group.expects(:rename_group_extension_query).with(anything, group.name, expected_new_name)
put_json api_v1_organization_groups_update_url(user_domain: @org_user_owner.username, organization_id: @carto_organization.id, group_id: group.id, api_key: @org_user_owner.api_key), { display_name: new_display_name }, @headers do |response|
response.status.should == 200
response.body[:id].should_not be_nil
response.body[:organization_id].should == @carto_organization.id
# INFO: since test doesn't actually trigger the extension we only check expectation on renaming call and display name
response.body[:display_name].should == new_display_name
response.body[:shared_tables_count].should_not be_nil
response.body[:shared_maps_count].should_not be_nil
response.body[:users].should_not be_nil
# Also check database data because Group changes something after extension interaction
new_group = Carto::Group.find(response.body[:id])
new_group.organization_id.should == @carto_organization.id
new_group.display_name.should == new_display_name
new_group.database_role.should_not be_nil
end
end
it '#update returns 409 and a meaningful error message if there is a group with the same name within the organization' do
group = @carto_organization.groups[0]
group_2 = @carto_organization.groups[1]
Carto::Group.expects(:rename_group_extension_query).with(anything, anything, anything).never
put_json api_v1_organization_groups_update_url(user_domain: @org_user_owner.username, organization_id: @carto_organization.id, group_id: group.id, api_key: @org_user_owner.api_key), { display_name: group_2.display_name }, @headers do |response|
response.status.should == 409
response.body[:errors][0].should match /A group with that name already exists/
end
end
it '#add_users triggers group inclusion' do
group = @carto_organization.groups.first
user = @org_user_1
Carto::Group.expects(:add_users_group_extension_query).with(anything, group.name, [user.username])
post_json api_v1_organization_groups_add_users_url(user_domain: @org_user_owner.username, organization_id: @carto_organization.id, group_id: group.id, api_key: @org_user_owner.api_key), { user_id: user.id }, @headers do |response|
response.status.should == 200
# INFO: since test doesn't actually trigger the extension we only check expectation on membership call
end
end
it '#remove_users triggers group exclusion' do
group = @carto_organization.groups.first
user = @carto_org_user_1
group.users << user
group.save
group.reload
group.users.include?(user)
Carto::Group.expects(:remove_users_group_extension_query).with(anything, group.name, [user.username])
delete_json api_v1_organization_groups_remove_users_url(user_domain: @org_user_owner.username, organization_id: @carto_organization.id, group_id: group.id, api_key: @org_user_owner.api_key, user_id: user.id), {}, @headers do |response|
response.status.should == 200
# INFO: since test doesn't actually trigger the extension we only check expectation on membership call
end
end
it '#add_users allows batches and triggers group inclusion' do
group = @carto_organization.groups.first
user_1 = @org_user_1
user_2 = @org_user_2
Carto::Group.expects(:add_users_group_extension_query).with(anything, group.name, [user_1.username, user_2.username])
post_json api_v1_organization_groups_add_users_url(user_domain: @org_user_owner.username, organization_id: @carto_organization.id, group_id: group.id, api_key: @org_user_owner.api_key), { users: [ user_1.id, user_2.id ] }, @headers do |response|
response.status.should == 200
# INFO: since test doesn't actually trigger the extension we only check expectation on membership call
end
end
it '#remove_users allows batches and triggers group exclusion' do
group = @carto_organization.groups.first
user_1 = @carto_org_user_1
user_2 = @carto_org_user_2
group.users << user_2
group.save
group.reload
group.users.include?(user_1)
group.users.include?(user_2)
Carto::Group.expects(:remove_users_group_extension_query).with(anything, group.name, [user_1.username, user_2.username])
delete_json api_v1_organization_groups_remove_users_url(user_domain: @org_user_owner.username, organization_id: @carto_organization.id, group_id: group.id, api_key: @org_user_owner.api_key), { users: [ user_1.id, user_2.id ] }, @headers do |response|
response.status.should == 200
# INFO: since test doesn't actually trigger the extension we only check expectation on membership call
end
end
it '#drops triggers deletion of existing groups' do
group = @carto_organization.groups.first
Carto::Group.expects(:destroy_group_extension_query).with(anything, group.name)
delete_json api_v1_organization_groups_destroy_url(user_domain: @org_user_owner.username, organization_id: @carto_organization.id, group_id: group.id, api_key: @org_user_owner.api_key), { }, @headers do |response|
response.status.should == 204
# Extension is simulated, so we delete the group manually
group.delete
end
end
end
end
Tests for all groups controller with org admin (not owner)
# encoding: utf-8
require 'rspec/mocks'
require_relative '../../../spec_helper'
require_relative '../../../../app/controllers/carto/api/database_groups_controller'
require_relative '.././../../factories/visualization_creation_helpers'
# cURL samples:
# - Rename group: curl -v -H "Content-Type: application/json" -X PUT -d '{ "display_name": "Demo Group" }' "http://central-org-b-admin.localhost.lan:3000/api/v1/organization/95c2c425-5c8c-4b20-8999-d79cd20c2f2c/groups/c662f7ee-aefb-4f49-93ea-1f671a77bb36?api_key=665646f527c3006b124c15a308bb98f4ed1f52e4"
# - Add users: curl -v -H "Content-Type: application/json" -X POST -d '{ "users" : ["78ee570a-812d-4cce-928c-e5ebeb4708e8", "7e53c96c-1598-43e0-b23e-290daf633547"] }' "http://central-org-b-admin.localhost.lan:3000/api/v1/organization/95c2c425-5c8c-4b20-8999-d79cd20c2f2c/groups/c662f7ee-aefb-4f49-93ea-1f671a77bb36/users?api_key=665646f527c3006b124c15a308bb98f4ed1f52e4"
# - Remove users: curl -v -H "Content-Type: application/json" -X DELETE -d '{ "users" : ["78ee570a-812d-4cce-928c-e5ebeb4708e8", "7e53c96c-1598-43e0-b23e-290daf633547"] }' "http://central-org-b-admin.localhost.lan:3000/api/v1/organization/95c2c425-5c8c-4b20-8999-d79cd20c2f2c/groups/c662f7ee-aefb-4f49-93ea-1f671a77bb36/users?api_key=665646f527c3006b124c15a308bb98f4ed1f52e4"
describe Carto::Api::GroupsController do
include_context 'organization with users helper'
shared_examples_for 'Groups editor management' do
before(:all) do
@org_user_1_json = {
"id" => @org_user_1.id,
"username" => @org_user_1.username,
"avatar_url" => @org_user_1.avatar_url,
"base_url" => @org_user_1.public_url,
"viewer" => false
}
@group_1 = FactoryGirl.create(:random_group, display_name: 'g_1', organization: @carto_organization)
@group_1_json = { 'id' => @group_1.id, 'organization_id' => @group_1.organization_id, 'name' => @group_1.name, 'display_name' => @group_1.display_name }
@group_2 = FactoryGirl.create(:random_group, display_name: 'g_2', organization: @carto_organization)
@group_2_json = { 'id' => @group_2.id, 'organization_id' => @group_2.organization_id, 'name' => @group_2.name, 'display_name' => @group_2.display_name }
@group_3 = FactoryGirl.create(:random_group, display_name: 'g_3', organization: @carto_organization)
@group_3_json = { 'id' => @group_3.id, 'organization_id' => @group_3.organization_id, 'name' => @group_3.name, 'display_name' => @group_3.display_name }
@headers = {'CONTENT_TYPE' => 'application/json', :format => "json" }
end
after(:all) do
@group_1.destroy
@group_2.destroy
@group_3.destroy
end
before(:each) do
@carto_organization.reload
end
describe '#index' do
it 'returns 401 without authentication' do
get_json api_v1_organization_groups_url(user_domain: @admin_user.username, organization_id: @carto_organization.id), {}, @headers do |response|
response.status.should == 401
end
end
it 'returns groups with pagination metadata' do
get_json api_v1_organization_groups_url(user_domain: @admin_user.username, organization_id: @carto_organization.id, api_key: @admin_user.api_key), {}, @headers do |response|
response.status.should == 200
expected_response = {
groups: [ @group_1_json, @group_2_json, @group_3_json ],
total_entries: 3
}
response.body.should == expected_response
end
end
it 'returns paginated groups with pagination metadata' do
get_json api_v1_organization_groups_url(user_domain: @admin_user.username, organization_id: @carto_organization.id, api_key: @admin_user.api_key), { page: 2, per_page: 1, order: 'display_name' }, @headers do |response|
response.status.should == 200
expected_response = {
groups: [ @group_2_json ],
total_entries: 3
}
response.body.should == expected_response
end
end
it 'can search by name' do
get_json api_v1_organization_groups_url(user_domain: @admin_user.username, organization_id: @carto_organization.id, api_key: @admin_user.api_key, q: @group_2.name), { page: 1, per_page: 1, order: 'display_name' }, @headers do |response|
response.status.should == 200
expected_response = {
groups: [ @group_2_json ],
total_entries: 1
}
response.body.should == expected_response
end
end
describe "users groups" do
before(:each) do
@group_1.add_user(@org_user_1.username)
end
after(:each) do
@group_1.remove_user(@org_user_1.username)
end
it 'returns user groups if user_id is requested' do
get_json api_v1_user_groups_url(user_domain: @org_user_1.username, user_id: @org_user_1.id, api_key: @org_user_1.api_key), {}, @headers do |response|
response.status.should == 200
expected_response = {
groups: [ @group_1_json ],
total_entries: 1
}
response.body.should == expected_response
end
end
it 'optionally fetches number of shared tables, maps and users' do
get_json api_v1_user_groups_url(user_domain: @org_user_1.username, user_id: @org_user_1.id, api_key: @org_user_1.api_key), {}, @headers do |response|
response.status.should == 200
expected_response = {
groups: [
@group_1_json
],
total_entries: 1
}
response.body.should == expected_response
end
get_json api_v1_user_groups_url(user_domain: @org_user_1.username, user_id: @org_user_1.id, api_key: @org_user_1.api_key, fetch_shared_tables_count: true, fetch_shared_maps_count: true, fetch_users: true), {}, @headers do |response|
response.status.should == 200
expected_response = {
groups: [
@group_1_json.merge({
'shared_tables_count' => 0,
'shared_maps_count' => 0,
'users' => [ @org_user_1_json ]
})
],
total_entries: 1
}
response.body.should == expected_response
end
end
it 'can fetch number of shared tables, maps and users when a table is shared' do
bypass_named_maps
table_user_2 = create_table_with_options(@org_user_2)
permission = CartoDB::Permission[Carto::Visualization.find(table_user_2['table_visualization'][:id]).permission.id]
permission.set_group_permission(@group_1, Carto::Permission::ACCESS_READONLY)
permission.save
get_json api_v1_user_groups_url(user_domain: @org_user_1.username, user_id: @org_user_1.id, api_key: @org_user_1.api_key, fetch_shared_tables_count: true, fetch_shared_maps_count: true, fetch_users: true), {}, @headers do |response|
response.status.should == 200
expected_response = {
groups: [
@group_1_json.merge({
'shared_tables_count' => 1,
'shared_maps_count' => 0,
'users' => [ @org_user_1_json ]
})
],
total_entries: 1
}
response.body.should == expected_response
end
end
end
end
it '#show returns a group' do
get_json api_v1_organization_groups_show_url(user_domain: @admin_user.username, organization_id: @carto_organization.id, group_id: @group_1.id, api_key: @admin_user.api_key), { }, @headers do |response|
response.status.should == 200
response.body.should == @group_1_json.symbolize_keys
end
end
it '#show support fetch_shared_maps_count, fetch_shared_tables_count and fetch_users' do
get_json api_v1_organization_groups_show_url(user_domain: @admin_user.username, organization_id: @carto_organization.id, group_id: @group_1.id, api_key: @admin_user.api_key, fetch_shared_tables_count: true, fetch_shared_maps_count: true, fetch_users: true), { }, @headers do |response|
response.status.should == 200
response.body[:shared_tables_count].should_not be_nil
response.body[:shared_maps_count].should_not be_nil
response.body[:users].should_not be_nil
end
end
it '#create fails if user is not owner' do
post_json api_v1_organization_groups_create_url(user_domain: @org_user_1.username, organization_id: @carto_organization.id, api_key: @org_user_1.api_key), { display_name: 'willfail' }, @headers do |response|
response.status.should == 400
end
end
it '#create triggers group creation' do
display_name = 'a new group'
name = 'a new group'
# Replacement for extension interaction
fake_database_role = 'fake_database_role'
fake_group_creation = Carto::Group.new_instance(@carto_organization.database_name, name, fake_database_role)
fake_group_creation.save
Carto::Group.expects(:create_group_extension_query).with(anything, name).returns(fake_group_creation)
post_json api_v1_organization_groups_create_url(user_domain: @admin_user.username, organization_id: @carto_organization.id, api_key: @admin_user.api_key), { display_name: display_name }, @headers do |response|
response.status.should == 200
response.body[:id].should_not be_nil
response.body[:organization_id].should == @carto_organization.id
response.body[:name].should == name
response.body[:display_name].should == display_name
# Also check database data because Group changes something after extension interaction
new_group = Carto::Group.find(response.body[:id])
new_group.organization_id.should == @carto_organization.id
new_group.name.should == name
new_group.display_name.should == display_name
new_group.database_role.should_not be_nil
end
end
it '#update triggers group renaming' do
group = @carto_organization.groups.first
new_display_name = 'A Group %Renamed'
expected_new_name = 'A Group _Renamed'
Carto::Group.expects(:rename_group_extension_query).with(anything, group.name, expected_new_name)
put_json api_v1_organization_groups_update_url(user_domain: @admin_user.username, organization_id: @carto_organization.id, group_id: group.id, api_key: @admin_user.api_key), { display_name: new_display_name }, @headers do |response|
response.status.should == 200
response.body[:id].should_not be_nil
response.body[:organization_id].should == @carto_organization.id
# INFO: since test doesn't actually trigger the extension we only check expectation on renaming call and display name
response.body[:display_name].should == new_display_name
response.body[:shared_tables_count].should_not be_nil
response.body[:shared_maps_count].should_not be_nil
response.body[:users].should_not be_nil
# Also check database data because Group changes something after extension interaction
new_group = Carto::Group.find(response.body[:id])
new_group.organization_id.should == @carto_organization.id
new_group.display_name.should == new_display_name
new_group.database_role.should_not be_nil
end
end
it '#update returns 409 and a meaningful error message if there is a group with the same name within the organization' do
group = @carto_organization.groups[0]
group_2 = @carto_organization.groups[1]
Carto::Group.expects(:rename_group_extension_query).with(anything, anything, anything).never
put_json api_v1_organization_groups_update_url(user_domain: @admin_user.username, organization_id: @carto_organization.id, group_id: group.id, api_key: @admin_user.api_key), { display_name: group_2.display_name }, @headers do |response|
response.status.should == 409
response.body[:errors][0].should match /A group with that name already exists/
end
end
it '#add_users triggers group inclusion' do
group = @carto_organization.groups.first
user = @org_user_1
Carto::Group.expects(:add_users_group_extension_query).with(anything, group.name, [user.username])
post_json api_v1_organization_groups_add_users_url(user_domain: @admin_user.username, organization_id: @carto_organization.id, group_id: group.id, api_key: @admin_user.api_key), { user_id: user.id }, @headers do |response|
response.status.should == 200
# INFO: since test doesn't actually trigger the extension we only check expectation on membership call
end
end
it '#remove_users triggers group exclusion' do
group = @carto_organization.groups.first
user = @carto_org_user_1
group.users << user
group.save
group.reload
group.users.include?(user)
Carto::Group.expects(:remove_users_group_extension_query).with(anything, group.name, [user.username])
delete_json api_v1_organization_groups_remove_users_url(user_domain: @admin_user.username, organization_id: @carto_organization.id, group_id: group.id, api_key: @admin_user.api_key, user_id: user.id), {}, @headers do |response|
response.status.should == 200
# INFO: since test doesn't actually trigger the extension we only check expectation on membership call
end
end
it '#add_users allows batches and triggers group inclusion' do
group = @carto_organization.groups.first
user_1 = @org_user_1
user_2 = @org_user_2
Carto::Group.expects(:add_users_group_extension_query).with(anything, group.name, [user_1.username, user_2.username])
post_json api_v1_organization_groups_add_users_url(user_domain: @admin_user.username, organization_id: @carto_organization.id, group_id: group.id, api_key: @admin_user.api_key), { users: [ user_1.id, user_2.id ] }, @headers do |response|
response.status.should == 200
# INFO: since test doesn't actually trigger the extension we only check expectation on membership call
end
end
it '#remove_users allows batches and triggers group exclusion' do
group = @carto_organization.groups.first
user_1 = @carto_org_user_1
user_2 = @carto_org_user_2
group.users << user_2
group.save
group.reload
group.users.include?(user_1)
group.users.include?(user_2)
Carto::Group.expects(:remove_users_group_extension_query).with(anything, group.name, [user_1.username, user_2.username])
delete_json api_v1_organization_groups_remove_users_url(user_domain: @admin_user.username, organization_id: @carto_organization.id, group_id: group.id, api_key: @admin_user.api_key), { users: [ user_1.id, user_2.id ] }, @headers do |response|
response.status.should == 200
# INFO: since test doesn't actually trigger the extension we only check expectation on membership call
end
end
it '#drops triggers deletion of existing groups' do
group = @carto_organization.groups.first
Carto::Group.expects(:destroy_group_extension_query).with(anything, group.name)
delete_json api_v1_organization_groups_destroy_url(user_domain: @admin_user.username, organization_id: @carto_organization.id, group_id: group.id, api_key: @admin_user.api_key), { }, @headers do |response|
response.status.should == 204
# Extension is simulated, so we delete the group manually
group.delete
end
end
end
describe 'with organization owner' do
it_behaves_like 'Groups editor management' do
before(:all) do
@admin_user = @organization.owner
end
end
end
describe 'with organization admin' do
it_behaves_like 'Groups editor management' do
before(:all) do
@org_user_2.org_admin = true
@org_user_2.save
@admin_user = @org_user_2
end
end
end
end
|
require "rails/all"
require 'action_view'
module Glass
class Template < ActionView::Base
# include ::Rails.application.routes.url_helpers
# include ::ActionView::Helpers::TagHelper
# attr_accessor :timeline_item, :template, :extension
def initialize(opts={})
opts.each do |k,v|
self.instance_variable_set(k, v)
end
if glass_template_path.present?
super(Rails.root.join(glass_template_path))
else
super(Rails.root.join("app", "views", "glass-templates"))
end
end
private
def glass_template_path
::Glass.glass_template_path
end
end
end
temlate
require "rails/all"
require 'action_view'
module Glass
class Template < ActionView::Base
# include ::Rails.application.routes.url_helpers
# include ::ActionView::Helpers::TagHelper
# attr_accessor :timeline_item, :template, :extension
def initialize(opts={})
opts.each do |k,v|
self.instance_variable_set("@#{k}".to_sym, v)
end
if glass_template_path.present?
super(Rails.root.join(glass_template_path))
else
super(Rails.root.join("app", "views", "glass-templates"))
end
end
private
def glass_template_path
::Glass.glass_template_path
end
end
end |
name "git"
default_version "1.9.1"
dependency "curl"
dependency "zlib"
dependency "openssl"
dependency "pcre"
dependency "libiconv"
dependency "expat"
dependency "perl"
relative_path "git-#{version}"
source :url => "https://github.com/git/git/archive/v#{version}.tar.gz",
:md5 => "906f984f5c8913176547dc456608be16"
env = {
"LDFLAGS" => "-L#{install_dir}/embedded/lib -I#{install_dir}/embedded/include",
"CFLAGS" => "-L#{install_dir}/embedded/lib -I#{install_dir}/embedded/include",
"LD_RUN_PATH" => "#{install_dir}/embedded/lib",
"NO_GETTEXT" => "1",
"NO_PYTHON" => "1",
"NO_TCLTK" => "1",
"NO_R_TO_GCC_LINKER" => "1",
"NEEDS_LIBICONV" => "1",
"PERL_PATH" => "#{install_dir}/embedded/bin/perl",
"ZLIB_PATH" => "#{install_dir}/embedded",
"ICONVDIR" => "#{install_dir}/embedded",
"OPENSSLDIR" => "#{install_dir}/embedded",
"EXPATDIR" => "#{install_dir}/embedded",
"CURLDIR" => "#{install_dir}/embedded",
"LIBPCREDIR" => "#{install_dir}/embedded"
}
build do
command "make -j #{max_build_jobs} prefix=#{install_dir}/embedded", :env => env
command "make install prefix=#{install_dir}/embedded", :env => env
end
Cleanup git
#
# Copyright 2014 Chef Software, Inc.omnibus
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
name "git"
default_version "1.9.1"
dependency "curl"
dependency "zlib"
dependency "openssl"
dependency "pcre"
dependency "libiconv"
dependency "expat"
dependency "perl"
relative_path "git-#{version}"
source url: "https://github.com/git/git/archive/v#{version}.tar.gz",
md5: "906f984f5c8913176547dc456608be16"
build do
env = {
"NO_GETTEXT" => "1",
"NO_PYTHON" => "1",
"NO_TCLTK" => "1",
"NO_R_TO_GCC_LINKER" => "1",
"NEEDS_LIBICONV" => "1",
"PERL_PATH" => "#{install_dir}/embedded/bin/perl",
"ZLIB_PATH" => "#{install_dir}/embedded",
"ICONVDIR" => "#{install_dir}/embedded",
"OPENSSLDIR" => "#{install_dir}/embedded",
"EXPATDIR" => "#{install_dir}/embedded",
"CURLDIR" => "#{install_dir}/embedded",
"LIBPCREDIR" => "#{install_dir}/embedded",
}
env = with_standard_compiler_flags(env)
command "make -j #{max_build_jobs} prefix=#{install_dir}/embedded", env: env
command "make install prefix=#{install_dir}/embedded", env: env
end
|
require 'shoes/spec_helper'
describe Shoes::Color do
describe ".create" do
let(:color) { Shoes::Color.new(40, 50, 60) }
it "accepts color" do
Shoes::Color.create(color).should eq(color)
end
it "accepts string" do
Shoes::Color.create("28323c").should eq(color)
end
end
shared_examples_for "black" do
its(:class) { should eq(Shoes::Color) }
its(:red) { should eq(0) }
its(:green) { should eq(0) }
its(:blue) { should eq(0) }
its(:hex) { should eq("#000000") }
it { should be_black }
it { should_not be_white }
end
shared_examples "color with bad arguments" do
it "raises ArgumentError" do
subject.should raise_error(ArgumentError)
end
end
context "with wrong number of arguments" do
subject { lambda { Shoes::Color.new(10, 10) } }
it_behaves_like "color with bad arguments"
end
context "with too many hex chars" do
subject { lambda { Shoes::Color.new("a1b2c3d") } }
it_behaves_like "color with bad arguments"
end
context "with too few hex chars" do
subject { lambda { Shoes::Color.new("a1") } }
it_behaves_like "color with bad arguments"
end
context "with invalid hex chars" do
subject { lambda { Shoes::Color.new("#g01234") } }
it_behaves_like "color with bad arguments"
end
context "hex" do
let(:rgb) { Shoes::Color.new(0, 0, 0, 255) }
context "with '#000000'" do
subject { Shoes::Color.new("#000000") }
it { should eq(rgb) }
end
context "with '000000'" do
subject { Shoes::Color.new("000000") }
it { should eq(rgb) }
end
context "with '000'" do
subject { Shoes::Color.new("000") }
it { should eq(rgb) }
end
context "with '#000'" do
subject { Shoes::Color.new("#000") }
it { should eq(rgb) }
end
context "with '#FFF'" do
let(:rgb) { Shoes::Color.new(255, 255, 255) }
subject { Shoes::Color.new("#FFF") }
it { should eq(rgb) }
end
context "with '#fff'" do
let(:rgb) { Shoes::Color.new(255, 255, 255) }
subject { Shoes::Color.new("#fff") }
it { should eq(rgb) }
end
end
context "rgb" do
context "black" do
context "with optional alpha" do
subject { Shoes::Color.new(0, 0, 0, 0) }
it_behaves_like "black"
its(:alpha) { should eq(0) }
end
context "without optional alpha" do
subject { Shoes::Color.new(0, 0, 0) }
it_behaves_like "black"
its(:alpha) { should eq(255) }
end
context "using floats" do
context "with optional alpha" do
subject { Shoes::Color.new(0.0, 0.0, 0.0, 0.0) }
it_behaves_like "black"
its(:alpha) { should eq(0) }
end
context "without optional alpha" do
subject { Shoes::Color.new(0.0, 0.0, 0.0) }
it_behaves_like "black"
its(:alpha) { should eq(255) }
end
end
end
context "white" do
subject { Shoes::Color.new(255, 255, 255) }
it { should be_white }
it { should_not be_black }
end
context "peru" do
shared_examples_for "peru" do
its(:class) { should eq(Shoes::Color) }
its(:red) { should eq(205) }
its(:green) { should eq(133) }
its(:blue) { should eq(63) }
it { should_not be_black }
it { should_not be_white }
end
context "with optional alpha" do
subject { Shoes::Color.new(205, 133, 63, 100) }
it_behaves_like("peru")
its(:alpha) { should eq(100) }
end
context "without optional alpha" do
subject { Shoes::Color.new(205, 133, 63) }
it_behaves_like("peru")
its(:alpha) { should eq(255) }
end
context "using floats" do
let(:red) { 0.805 }
let(:green) { 0.52 }
let(:blue) { 0.248 }
let(:alpha) { 0.392 }
context "with optional alpha" do
subject { Shoes::Color.new(red, green, blue, alpha) }
it_behaves_like "peru"
its(:alpha) { should eq(100) }
end
context "without optional alpha" do
subject { Shoes::Color.new(red, green, blue) }
it_behaves_like "peru"
its(:alpha) { should eq(255) }
end
end
end
describe "light and dark" do
let(:lightgreen) { Shoes::Color.new(144, 238, 144) }
let(:darkgreen) { Shoes::Color.new(0, 100, 0) }
let(:mediumseagreen) { Shoes::Color.new(60, 179, 113) }
specify "light color is light" do
lightgreen.should be_light
mediumseagreen.should_not be_light
darkgreen.should_not be_light
end
specify "dark color is dark" do
lightgreen.should_not be_dark
mediumseagreen.should_not be_dark
darkgreen.should be_dark
end
end
describe "transparency" do
let(:transparent) { Shoes::Color.new(25, 25, 112, 0) }
let(:semi) { Shoes::Color.new(25, 25, 112, 100) }
let(:opaque) { Shoes::Color.new(25, 25, 25) }
specify "only transparent colors are transparent" do
transparent.should be_transparent
semi.should_not be_transparent
opaque.should_not be_transparent
end
specify "only opaque colors should be opaque" do
transparent.should_not be_opaque
semi.should_not be_opaque
opaque.should be_opaque
end
end
describe "comparable" do
let(:color_1) { Shoes::Color.new(255, 69, 0) } # orangered
it "is equal when values are equal" do
color_2 = Shoes::Color.new(255, 69, 0)
color_1.should eq(color_2)
end
it "is less than when darker" do
color_2 = Shoes::Color.new(255, 70, 0)
color_1.should be < color_2
end
it "is greater than when lighter" do
color_2 = Shoes::Color.new(255, 68, 0)
color_1.should be > color_2
end
context "same rgb values" do
let(:color_2) { Shoes::Color.new(255, 69, 0, 254) }
it "is less than when less opaque" do
color_2.should be < color_1
end
it "is greater than when more opaque" do
color_1.should be > color_2
end
end
end
end
end
describe "Shoes built-in colors" do
specify "there are 140" do
Shoes::COLORS.length.should eq(140)
end
class MockApp
include Shoes::DSL
end
subject { MockApp.new }
its(:papayawhip) { should eq(Shoes::Color.new(255, 239, 213)) }
its(:aquamarine) { should eq(Shoes::Color.new(127, 255, 212)) }
its(:tomato) { should eq(Shoes::Color.new(255, 99, 71)) }
end
describe "Shoes built in gray" do
let(:app) { Shoes::App.new }
it "creates a dsl method for gray" do
app.should respond_to(:gray)
end
specify "gray with no parameters is [128, 128, 128, OPAQUE]" do
app.gray.should eq(Shoes::Color.new(128, 128, 128))
end
specify "single parameter specifies the gray level" do
app.gray(64).should eq(Shoes::Color.new(64, 64, 64))
end
specify "two parameters specifies the gray level and opacity" do
app.gray(13, 57).should eq(Shoes::Color.new(13, 13, 13, 57))
end
specify "float parameters should be normalised" do
app.gray(1.0, 0.5).should eq(Shoes::Color.new( 255, 255, 255, 128 ))
end
end
# Differences between this implementation and Red Shoes
describe "differences from Red Shoes" do
let(:white) { Shoes::Color.new(255, 255, 255) }
let(:transparent_black) { Shoes::Color.new(0, 0, 0, 0) }
context "integers" do
specify "too-large values become 255" do
Shoes::Color.new(256, 256, 256, 256).should eq(white)
end
specify "too-small values become 0" do
Shoes::Color.new(-1, -1, -1, -1).should eq(transparent_black)
end
end
context "floats" do
specify "too-large values become 255" do
Shoes::Color.new(1.1, 1.1, 1.1, 1.1).should eq(white)
end
specify "too-small values become 0" do
Shoes::Color.new(-0.1, -0.1, -0.1, -0.1).should eq(transparent_black)
end
end
# These specifications describe how this implementation differs from Red Shoes.
# These are examples of what Red Shoes _does_ do, and what this implementation
# _does not_ do.
describe "unusual input" do
let(:baseline) { Shoes::Color.new(50, 0, 200) }
describe "too-large values" do
specify "red does not get modulo-256'd into bounds" do
Shoes::Color.new(306, 0, 200).should_not eq(baseline)
Shoes::Color.new(1.197, 0, 200).should_not eq(baseline)
end
specify "green does not get modulo-256'd into bounds" do
Shoes::Color.new(50, 256, 200).should_not eq(baseline)
Shoes::Color.new(50, 2.005, 200).should_not eq(baseline)
end
specify "blue does not get modulo-256'd into bounds" do
Shoes::Color.new(50, 0, 456).should_not eq(baseline)
Shoes::Color.new(50, 0, 2.7913137254902).should_not eq(baseline)
end
end
describe "negative values" do
specify "-1 does not become 255" do
Shoes::Color.new(-1, -1, -1, -1).should_not eq(Shoes::Color.new(255, 255, 255))
end
specify "256 and neighbors" do
Shoes::Color.new(-256, -255, -257).should_not eq(Shoes::Color.new(0, 1, 255))
end
specify "float behaviour" do
Shoes::Color.new(-1.0, -0.5, -0.0).should_not eq(Shoes::Color.new(0, 128, 1))
end
end
describe "edge cases" do
specify "0.0 does not become 1" do
Shoes::Color.new(0.0, 0.0, 0.0).should_not eq(Shoes::Color.new(1, 1, 1))
end
specify "1.0 does not become 0" do
Shoes::Color.new(1.0, 1.0, 1.0).should_not eq(Shoes::Color.new(0, 0, 0))
end
end
end
end
Add spec for gray 0.93 (new default color)
require 'shoes/spec_helper'
describe Shoes::Color do
describe ".create" do
let(:color) { Shoes::Color.new(40, 50, 60) }
it "accepts color" do
Shoes::Color.create(color).should eq(color)
end
it "accepts string" do
Shoes::Color.create("28323c").should eq(color)
end
end
shared_examples_for "black" do
its(:class) { should eq(Shoes::Color) }
its(:red) { should eq(0) }
its(:green) { should eq(0) }
its(:blue) { should eq(0) }
its(:hex) { should eq("#000000") }
it { should be_black }
it { should_not be_white }
end
shared_examples "color with bad arguments" do
it "raises ArgumentError" do
subject.should raise_error(ArgumentError)
end
end
context "with wrong number of arguments" do
subject { lambda { Shoes::Color.new(10, 10) } }
it_behaves_like "color with bad arguments"
end
context "with too many hex chars" do
subject { lambda { Shoes::Color.new("a1b2c3d") } }
it_behaves_like "color with bad arguments"
end
context "with too few hex chars" do
subject { lambda { Shoes::Color.new("a1") } }
it_behaves_like "color with bad arguments"
end
context "with invalid hex chars" do
subject { lambda { Shoes::Color.new("#g01234") } }
it_behaves_like "color with bad arguments"
end
context "hex" do
let(:rgb) { Shoes::Color.new(0, 0, 0, 255) }
context "with '#000000'" do
subject { Shoes::Color.new("#000000") }
it { should eq(rgb) }
end
context "with '000000'" do
subject { Shoes::Color.new("000000") }
it { should eq(rgb) }
end
context "with '000'" do
subject { Shoes::Color.new("000") }
it { should eq(rgb) }
end
context "with '#000'" do
subject { Shoes::Color.new("#000") }
it { should eq(rgb) }
end
context "with '#FFF'" do
let(:rgb) { Shoes::Color.new(255, 255, 255) }
subject { Shoes::Color.new("#FFF") }
it { should eq(rgb) }
end
context "with '#fff'" do
let(:rgb) { Shoes::Color.new(255, 255, 255) }
subject { Shoes::Color.new("#fff") }
it { should eq(rgb) }
end
end
context "rgb" do
context "black" do
context "with optional alpha" do
subject { Shoes::Color.new(0, 0, 0, 0) }
it_behaves_like "black"
its(:alpha) { should eq(0) }
end
context "without optional alpha" do
subject { Shoes::Color.new(0, 0, 0) }
it_behaves_like "black"
its(:alpha) { should eq(255) }
end
context "using floats" do
context "with optional alpha" do
subject { Shoes::Color.new(0.0, 0.0, 0.0, 0.0) }
it_behaves_like "black"
its(:alpha) { should eq(0) }
end
context "without optional alpha" do
subject { Shoes::Color.new(0.0, 0.0, 0.0) }
it_behaves_like "black"
its(:alpha) { should eq(255) }
end
end
end
context "white" do
subject { Shoes::Color.new(255, 255, 255) }
it { should be_white }
it { should_not be_black }
end
context "peru" do
shared_examples_for "peru" do
its(:class) { should eq(Shoes::Color) }
its(:red) { should eq(205) }
its(:green) { should eq(133) }
its(:blue) { should eq(63) }
it { should_not be_black }
it { should_not be_white }
end
context "with optional alpha" do
subject { Shoes::Color.new(205, 133, 63, 100) }
it_behaves_like("peru")
its(:alpha) { should eq(100) }
end
context "without optional alpha" do
subject { Shoes::Color.new(205, 133, 63) }
it_behaves_like("peru")
its(:alpha) { should eq(255) }
end
context "using floats" do
let(:red) { 0.805 }
let(:green) { 0.52 }
let(:blue) { 0.248 }
let(:alpha) { 0.392 }
context "with optional alpha" do
subject { Shoes::Color.new(red, green, blue, alpha) }
it_behaves_like "peru"
its(:alpha) { should eq(100) }
end
context "without optional alpha" do
subject { Shoes::Color.new(red, green, blue) }
it_behaves_like "peru"
its(:alpha) { should eq(255) }
end
end
end
describe "light and dark" do
let(:lightgreen) { Shoes::Color.new(144, 238, 144) }
let(:darkgreen) { Shoes::Color.new(0, 100, 0) }
let(:mediumseagreen) { Shoes::Color.new(60, 179, 113) }
specify "light color is light" do
lightgreen.should be_light
mediumseagreen.should_not be_light
darkgreen.should_not be_light
end
specify "dark color is dark" do
lightgreen.should_not be_dark
mediumseagreen.should_not be_dark
darkgreen.should be_dark
end
end
describe "transparency" do
let(:transparent) { Shoes::Color.new(25, 25, 112, 0) }
let(:semi) { Shoes::Color.new(25, 25, 112, 100) }
let(:opaque) { Shoes::Color.new(25, 25, 25) }
specify "only transparent colors are transparent" do
transparent.should be_transparent
semi.should_not be_transparent
opaque.should_not be_transparent
end
specify "only opaque colors should be opaque" do
transparent.should_not be_opaque
semi.should_not be_opaque
opaque.should be_opaque
end
end
describe "comparable" do
let(:color_1) { Shoes::Color.new(255, 69, 0) } # orangered
it "is equal when values are equal" do
color_2 = Shoes::Color.new(255, 69, 0)
color_1.should eq(color_2)
end
it "is less than when darker" do
color_2 = Shoes::Color.new(255, 70, 0)
color_1.should be < color_2
end
it "is greater than when lighter" do
color_2 = Shoes::Color.new(255, 68, 0)
color_1.should be > color_2
end
context "same rgb values" do
let(:color_2) { Shoes::Color.new(255, 69, 0, 254) }
it "is less than when less opaque" do
color_2.should be < color_1
end
it "is greater than when more opaque" do
color_1.should be > color_2
end
end
end
end
end
describe "Shoes built-in colors" do
specify "there are 140" do
Shoes::COLORS.length.should eq(140)
end
class MockApp
include Shoes::DSL
end
subject { MockApp.new }
its(:papayawhip) { should eq(Shoes::Color.new(255, 239, 213)) }
its(:aquamarine) { should eq(Shoes::Color.new(127, 255, 212)) }
its(:tomato) { should eq(Shoes::Color.new(255, 99, 71)) }
end
describe "Shoes built in gray" do
let(:app) { Shoes::App.new }
it "creates a dsl method for gray" do
app.should respond_to(:gray)
end
specify "gray with no parameters is [128, 128, 128, OPAQUE]" do
app.gray.should eq(Shoes::Color.new(128, 128, 128))
end
specify "single parameter specifies the gray level" do
app.gray(64).should eq(Shoes::Color.new(64, 64, 64))
end
specify "two parameters specifies the gray level and opacity" do
app.gray(13, 57).should eq(Shoes::Color.new(13, 13, 13, 57))
end
specify "float parameters should be normalised" do
app.gray(1.0, 0.5).should eq(Shoes::Color.new( 255, 255, 255, 128 ))
end
it 'hangles 0.93 right as well' do
result_93 = (0.93 * 255).to_i
expect(app.gray(0.93)).to eq(Shoes::Color.new(result_93, result_93, result_93))
end
end
# Differences between this implementation and Red Shoes
describe "differences from Red Shoes" do
let(:white) { Shoes::Color.new(255, 255, 255) }
let(:transparent_black) { Shoes::Color.new(0, 0, 0, 0) }
context "integers" do
specify "too-large values become 255" do
Shoes::Color.new(256, 256, 256, 256).should eq(white)
end
specify "too-small values become 0" do
Shoes::Color.new(-1, -1, -1, -1).should eq(transparent_black)
end
end
context "floats" do
specify "too-large values become 255" do
Shoes::Color.new(1.1, 1.1, 1.1, 1.1).should eq(white)
end
specify "too-small values become 0" do
Shoes::Color.new(-0.1, -0.1, -0.1, -0.1).should eq(transparent_black)
end
end
# These specifications describe how this implementation differs from Red Shoes.
# These are examples of what Red Shoes _does_ do, and what this implementation
# _does not_ do.
describe "unusual input" do
let(:baseline) { Shoes::Color.new(50, 0, 200) }
describe "too-large values" do
specify "red does not get modulo-256'd into bounds" do
Shoes::Color.new(306, 0, 200).should_not eq(baseline)
Shoes::Color.new(1.197, 0, 200).should_not eq(baseline)
end
specify "green does not get modulo-256'd into bounds" do
Shoes::Color.new(50, 256, 200).should_not eq(baseline)
Shoes::Color.new(50, 2.005, 200).should_not eq(baseline)
end
specify "blue does not get modulo-256'd into bounds" do
Shoes::Color.new(50, 0, 456).should_not eq(baseline)
Shoes::Color.new(50, 0, 2.7913137254902).should_not eq(baseline)
end
end
describe "negative values" do
specify "-1 does not become 255" do
Shoes::Color.new(-1, -1, -1, -1).should_not eq(Shoes::Color.new(255, 255, 255))
end
specify "256 and neighbors" do
Shoes::Color.new(-256, -255, -257).should_not eq(Shoes::Color.new(0, 1, 255))
end
specify "float behaviour" do
Shoes::Color.new(-1.0, -0.5, -0.0).should_not eq(Shoes::Color.new(0, 128, 1))
end
end
describe "edge cases" do
specify "0.0 does not become 1" do
Shoes::Color.new(0.0, 0.0, 0.0).should_not eq(Shoes::Color.new(1, 1, 1))
end
specify "1.0 does not become 0" do
Shoes::Color.new(1.0, 1.0, 1.0).should_not eq(Shoes::Color.new(0, 0, 0))
end
end
end
end
|
Detentionlogs::Application.configure do
# Settings specified here will take precedence over those in config/application.rb
# Code is not reloaded between requests
config.cache_classes = true
# Full error reports are disabled and caching is turned on
config.consider_all_requests_local = false
config.action_controller.perform_caching = true
# Disable Rails's static asset server (Apache or nginx will already do this)
config.serve_static_assets = false
# Compress JavaScripts and CSS
config.assets.compress = true
# Don't fallback to assets pipeline if a precompiled asset is missed
config.assets.compile = true
# Generate digests for assets URLs
config.assets.digest = true
# Defaults to nil and saved in location specified by config.assets.prefix
# config.assets.manifest = YOUR_PATH
# Specifies the header that your server uses for sending files
# config.action_dispatch.x_sendfile_header = "X-Sendfile" # for apache
# config.action_dispatch.x_sendfile_header = 'X-Accel-Redirect' # for nginx
# Force all access to the app over SSL, use Strict-Transport-Security, and use secure cookies.
# config.force_ssl = true
# See everything in the log (default is :info)
# config.log_level = :debug
# Prepend all log lines with the following tags
# config.log_tags = [ :subdomain, :uuid ]
# Use a different logger for distributed setups
# config.logger = ActiveSupport::TaggedLogging.new(SyslogLogger.new)
# Use a different cache store in production
# config.cache_store = :mem_cache_store
# Enable serving of images, stylesheets, and JavaScripts from an asset server
# config.action_controller.asset_host = "http://assets.example.com"
# Precompile additional assets (application.js, application.css, and all non-JS/CSS are already added)
# config.assets.precompile += %w( search.js )
# Disable delivery errors, bad email addresses will be ignored
# config.action_mailer.raise_delivery_errors = false
# Enable threaded mode
# config.threadsafe!
# Enable locale fallbacks for I18n (makes lookups for any locale fall back to
# the I18n.default_locale when a translation can not be found)
config.i18n.fallbacks = true
# Send deprecation notices to registered listeners
config.active_support.deprecation = :notify
config.paperclip_defaults = {
:storage => :s3,
:s3_credentials => {
:bucket => 'detention_logs',
:access_key_id => 'AKIAJBTOM2DBBAPGCMAQ',
:secret_access_key => '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'
}
}
# Log the query plan for queries taking more than this (works
# with SQLite, MySQL, and PostgreSQL)
# config.active_record.auto_explain_threshold_in_seconds = 0.5
end
revoke and remove credentials
Detentionlogs::Application.configure do
# Settings specified here will take precedence over those in config/application.rb
# Code is not reloaded between requests
config.cache_classes = true
# Full error reports are disabled and caching is turned on
config.consider_all_requests_local = false
config.action_controller.perform_caching = true
# Disable Rails's static asset server (Apache or nginx will already do this)
config.serve_static_assets = false
# Compress JavaScripts and CSS
config.assets.compress = true
# Don't fallback to assets pipeline if a precompiled asset is missed
config.assets.compile = true
# Generate digests for assets URLs
config.assets.digest = true
# Defaults to nil and saved in location specified by config.assets.prefix
# config.assets.manifest = YOUR_PATH
# Specifies the header that your server uses for sending files
# config.action_dispatch.x_sendfile_header = "X-Sendfile" # for apache
# config.action_dispatch.x_sendfile_header = 'X-Accel-Redirect' # for nginx
# Force all access to the app over SSL, use Strict-Transport-Security, and use secure cookies.
# config.force_ssl = true
# See everything in the log (default is :info)
# config.log_level = :debug
# Prepend all log lines with the following tags
# config.log_tags = [ :subdomain, :uuid ]
# Use a different logger for distributed setups
# config.logger = ActiveSupport::TaggedLogging.new(SyslogLogger.new)
# Use a different cache store in production
# config.cache_store = :mem_cache_store
# Enable serving of images, stylesheets, and JavaScripts from an asset server
# config.action_controller.asset_host = "http://assets.example.com"
# Precompile additional assets (application.js, application.css, and all non-JS/CSS are already added)
# config.assets.precompile += %w( search.js )
# Disable delivery errors, bad email addresses will be ignored
# config.action_mailer.raise_delivery_errors = false
# Enable threaded mode
# config.threadsafe!
# Enable locale fallbacks for I18n (makes lookups for any locale fall back to
# the I18n.default_locale when a translation can not be found)
config.i18n.fallbacks = true
# Send deprecation notices to registered listeners
config.active_support.deprecation = :notify
config.paperclip_defaults = {
:storage => :s3,
:s3_credentials => {
:bucket => ENV['S3_BUCKET']
:access_key_id => ENV['S3_ACCESS_KEY_ID']
:secret_access_key => ENV['S3_SECRET_ACCESS_KEY']
}
}
# Log the query plan for queries taking more than this (works
# with SQLite, MySQL, and PostgreSQL)
# config.active_record.auto_explain_threshold_in_seconds = 0.5
end
|
# encoding: utf-8
require_relative '../../../spec_helper'
require_relative '../../../../app/controllers/carto/api/layers_controller'
require 'helpers/unique_names_helper'
describe Carto::Api::LayersController do
include UniqueNamesHelper
describe '#refactored tests' do
include Rack::Test::Methods
include Warden::Test::Helpers
include CacheHelper
include Carto::Factories::Visualizations
include_context 'users helper'
describe '#operations' do
after(:each) do
destroy_full_visualization(@map, @table, @table_visualization, @visualization)
@layer.destroy if @layer
@layer2.destroy if @layer2
end
let(:kind) { 'carto' }
let(:create_layer_url) do
api_v1_users_layers_create_url(user_domain: @user1.username, user_id: @user1.id, api_key: @user1.api_key)
end
def create_map_layer_url(map_id)
api_v1_maps_layers_create_url(user_domain: @user1.username, map_id: map_id, api_key: @user1.api_key)
end
def update_map_layer_url(map_id, layer_id = nil)
api_v1_maps_layers_update_url(
user_domain: @user1.username,
map_id: map_id,
id: layer_id,
api_key: @user1.api_key
)
end
def delete_map_layer_url(map_id, layer_id)
api_v1_maps_layers_destroy_url(
user_domain: @user1.username,
map_id: map_id,
id: layer_id,
api_key: @user1.api_key
)
end
let(:layer_json) do
{ kind: kind, options: { table_name: nil, user_name: nil }, order: 1, infowindow: {}, tooltip: {} }
end
it 'creates layers' do
post_json create_layer_url, layer_json do |response|
response.status.should eq 200
layer_response = response.body
layer_response.delete(:id).should_not be_nil
layer_response.should eq layer_json
end
end
it 'creates layers on maps' do
@map, @table, @table_visualization, @visualization = create_full_visualization(@carto_user1)
# Let's make room for another layer of the same kind
destroyed_layer = @map.layers.where(kind: layer_json[:kind]).first
destroyed_layer.destroy if destroyed_layer
post_json create_map_layer_url(@map.id), layer_json.merge(options: { table_name: @table.name }) do |response|
response.status.should eq 200
layer_response = response.body
layer_id = layer_response.delete(:id)
layer_id.should_not be_nil
layer_response.delete(:options).should eq ({ table_name: @table.name })
layer_response.should eq layer_json.except(:options)
@layer = Carto::Layer.find(layer_id)
@layer.maps.map(&:id).first.should eq @map.id
end
end
it 'registers table dependencies when creating a layer for a map' do
@map, @table, @table_visualization, @visualization = create_full_visualization(@carto_user1)
# Let's make room for another layer of the same kind
destroyed_layer = @map.layers.where(kind: layer_json[:kind]).first
destroyed_layer.destroy if destroyed_layer
post_json create_map_layer_url(@map.id), layer_json.merge(options: { table_name: @table.name }) do |response|
response.status.should eq 200
layer_response = response.body
@layer = Carto::Layer.find(layer_response[:id])
@layer.user_tables.should eq [@table]
end
end
it 'does not allow to exceed max_layers' do
@map, @table, @table_visualization, @visualization = create_full_visualization(@carto_user1)
@user1.max_layers = 1
@user1.save
post_json create_map_layer_url(@map.id), layer_json.merge(kind: 'tiled', order: 10) do |response|
response.status.to_s.should match /4../ # 422 in new, 403 in old
end
end
it 'updates one layer' do
map = FactoryGirl.create(:carto_map_with_layers, user_id: @user1.id)
@map, @table, @table_visualization, @visualization = create_full_visualization(@carto_user1, map: map)
@layer = map.layers.first
new_order = 2
new_layer_json = layer_json.merge(
options: { random: '1' },
order: new_order
)
put_json update_map_layer_url(map.id, @layer.id), new_layer_json do |response|
response.status.should eq 200
layer_response = response.body
layer_response[:id].should eq @layer.id
layer_response[:options].should eq new_layer_json[:options]
layer_response[:order].should eq new_order
end
end
it 'register table dependencies when updating layers' do
map = FactoryGirl.create(:carto_map_with_layers, user_id: @user1.id)
@map, @table, @table_visualization, @visualization = create_full_visualization(@carto_user1, map: map)
@layer = map.layers.first
new_order = 2
new_layer_json = layer_json.merge(
options: { random: '1' },
order: new_order
)
Carto::Layer.any_instance.expects(:register_table_dependencies).once
put_json update_map_layer_url(map.id, @layer.id), new_layer_json do |response|
response.status.should eq 200
end
end
it 'updates several layers at once' do
map = FactoryGirl.create(:carto_map_with_layers, user_id: @user1.id)
@map, @table, @table_visualization, @visualization = create_full_visualization(@carto_user1, map: map)
@layer = map.layers.first
@layer2 = map.layers[1]
new_order = 2
new_layer_json = layer_json.merge(
options: { 'random' => '1' },
order: new_order
)
new_layers_json = {
layers: [
new_layer_json.merge(id: @layer.id),
new_layer_json.merge(id: @layer2.id)
]
}
put_json update_map_layer_url(map.id), new_layers_json do |response|
response.status.should eq 200
layer_response = response.body
layer_response[:layers].map { |l| l['id'] }.should eq [@layer.id, @layer2.id]
layer_response[:layers].each do |layer|
layer['options'].reject { |k| k == 'table_name' }.should eq new_layer_json[:options]
layer['order'].should eq new_order
end
end
end
it 'does not update table_name or users_name options' do
map = FactoryGirl.create(:carto_map_with_layers, user_id: @user1.id)
@map, @table, @table_visualization, @visualization = create_full_visualization(@carto_user1, map: map)
@layer = map.data_layers.first
original_options = @layer.options.dup
new_layer_json = layer_json.merge(
options: { table_name: 'other_table_name', user_name: 'other_username' }
)
put_json update_map_layer_url(map.id, @layer.id), new_layer_json do |response|
response.status.should eq 200
layer_response = response.body
layer_response[:options].should eq original_options.slice(:table_name, :user_name).symbolize_keys
end
end
it 'does not remove table_name or users_name options' do
map = FactoryGirl.create(:carto_map_with_layers, user_id: @user1.id)
@map, @table, @table_visualization, @visualization = create_full_visualization(@carto_user1, map: map)
@layer = map.data_layers.first
original_options = @layer.options.dup
new_layer_json = layer_json.merge(
options: {}
)
put_json update_map_layer_url(map.id, @layer.id), new_layer_json do |response|
response.status.should eq 200
layer_response = response.body
layer_response[:options].should eq original_options.slice(:table_name, :user_name).symbolize_keys
end
end
it 'destroys layers' do
map = FactoryGirl.create(:carto_map_with_layers, user_id: @user1.id)
@map, @table, @table_visualization, @visualization = create_full_visualization(@carto_user1, map: map)
@layer = map.layers.first
delete_json delete_map_layer_url(map.id, @layer.id), {} do |response|
response.status.should eq 204
Carto::Layer.exists?(@layer.id).should be_false
end
end
end
describe 'creating a layer from an analysis node moves the style history' do
def create_layer(new_source, new_letter, from_letter)
url = api_v1_maps_layers_create_url(user_domain: @user2.username, map_id: @map.id, api_key: @user2.api_key)
payload = {
kind: 'carto',
options: {
source: new_source,
letter: new_letter,
table_name: @table.name,
user_name: @user2.username
},
infowindow: {},
tooltip: {},
from_layer_id: @original_layer.id,
from_letter: from_letter
}
post_json url, payload do |response|
response.status.should eq 200
layer_response = response.body
Carto::Layer.find(layer_response[:id])
end
end
before(:each) do
@map, @table, @table_visualization, @visualization = create_full_visualization(@carto_user2)
@original_layer = @map.data_layers.first
@original_layer.options[:source] = 'a2'
@original_layer.save
@original_layer.layer_node_styles.each(&:destroy)
['a2', 'a1', 'a0'].each do |node_id|
LayerNodeStyle.create(
layer_id: @original_layer.id,
source_id: node_id,
options: { original_id: node_id },
infowindow: {},
tooltip: {}
)
end
end
after(:each) do
@layer.destroy if @layer
destroy_full_visualization(@map, @table, @table_visualization, @visualization)
end
def verify_layer_node_styles(layer, styles_map)
# Map original_source_id -> new_source_id
layer.layer_node_styles.reload
actual_styles_map = layer.layer_node_styles.map { |lns| [lns.options[:original_id], lns.source_id] }.to_h
actual_styles_map.should eq styles_map
end
it 'when dragging an intermediate node' do
# A new layer B is created (copy A1 -> B1, A0 -> B0) and the old one starts using it as a source (rename A1 -> B1)
#
# _______ _______ ______
# | A | | A | | B |
# | | | | | |
# | [A2] | | [A2] | | |
# | {A1} | => | {B1} | | {B1} |
# | [A0] | | | | [B0] |
# |______| |______| |______|
@new_layer = create_layer('b1', 'b', 'a')
verify_layer_node_styles(@new_layer, nil => 'b1', 'a0' => 'b0')
verify_layer_node_styles(@original_layer, 'a2' => 'a2', 'a1' => 'b1')
end
describe 'when dragging a header node' do
# The original layer is renamed to B (rename A2 -> B1, A1 -> B1) and the new layer is named A (copy A1 and A0)
# The rename and the layer creation are independent requests, so we have to handle
# both possible orders of requests gracefully.
# _______ _______ ______
# | A | | A | | B |
# | | | | | |
# | {A2} | => | | | {B1} |
# | [A1] | | [A1] | | [A1] |
# | [A0] | | [A0] | | |
# |______| |______| |______|
it 'and the original layer has been previously renamed' do
old_model_layer = ::Layer[@original_layer.id]
old_model_layer.options['letter'] = 'b'
old_model_layer.options['source'] = 'b1'
old_model_layer.save
@new_layer = create_layer('a1', 'a', 'a')
verify_layer_node_styles(@new_layer, nil => 'a1', 'a0' => 'a0')
verify_layer_node_styles(@original_layer, nil => 'b1', 'a1' => 'a1')
end
it 'and the original layer has not yet been renamed' do
@new_layer = create_layer('a1', 'a', 'a')
verify_layer_node_styles(@new_layer, nil => 'a1', 'a0' => 'a0')
verify_layer_node_styles(@original_layer, 'a1' => 'a1')
end
end
end
describe "API 1.0 map layers management" do
before(:all) do
Capybara.current_driver = :rack_test
@user = create_user
end
before(:each) do
bypass_named_maps
delete_user_data @user
host! "#{@user.username}.localhost.lan"
@table = create_table(user_id: @user.id)
@map = create_map(user_id: @user.id, table_id: @table.id)
@table.reload
end
after(:all) do
bypass_named_maps
@user.destroy
end
let(:params) { { api_key: @user.api_key } }
it "Create a new layer associated to a map" do
opts = { type: "GMapsBase", base_type: "roadmap", style: "null", order: "0", query_history: [] }
infowindow = { fields: ['column1', 'column2', 'column3'] }
data = { kind: 'gmapsbase', infowindow: infowindow, options: opts }
post_json api_v1_maps_layers_create_url(params.merge(map_id: @map.id)), data do |response|
response.status.should be_success
@map.layers.size.should == 1
response.body[:id].should == @map.layers.first.id
response.body[:options].should == opts
response.body[:infowindow].should == infowindow
response.body[:order].should == 0
response.body[:kind].should == 'gmapsbase'
end
end
it "Get layer information" do
layer = Layer.create(
kind: 'carto',
order: 1,
options: { opt1: 'value' },
infowindow: { fields: ['column1', 'column2'] },
tooltip: { fields: ['column1', 'column3'] }
)
@map.add_layer layer
get_json api_v1_maps_layers_show_url(params.merge(id: layer.id, map_id: @map.id)) do |response|
response.status.should be_success
response.body[:id].should eq layer.id
response.body[:kind].should eq 'carto'
response.body[:order].should eq 1
response.body[:infowindow].should eq fields: ["column1", "column2"]
response.body[:tooltip].should eq fields: ["column1", "column3"]
end
end
it "Get all map layers" do
layer = Layer.create kind: 'carto', order: 3
layer2 = Layer.create kind: 'tiled', order: 2
layer3 = Layer.create kind: 'tiled', order: 1
@map.add_layer layer
@map.add_layer layer2
@map.add_layer layer3
get_json api_v1_maps_layers_index_url(params.merge(map_id: @map.id)) do |response|
response.status.should be_success
response.body[:total_entries].should == 3
response.body[:layers].size.should == 3
response.body[:layers][0]['id'].should == layer3.id
response.body[:layers][1]['id'].should == layer2.id
response.body[:layers][2]['id'].should == layer.id
end
end
# see https://cartodb.atlassian.net/browse/CDB-3350
it "Update a layer" do
layer = Layer.create kind: 'carto', order: 0
@map.add_layer layer
data = { options: { opt1: 'value' }, infowindow: { fields: ['column1', 'column2'] }, order: 3, kind: 'carto' }
put_json api_v1_maps_layers_update_url(params.merge(id: layer.id, map_id: @map.id)), data do |response|
response.status.should be_success
response.body[:id].should == layer.id
response.body[:options].should == { opt1: 'value' }
response.body[:infowindow].should == { fields: ['column1', 'column2'] }
response.body[:kind].should == 'carto'
response.body[:order].should == 3
end
end
it "Update several layers at once" do
layer1 = Layer.create kind: 'carto', order: 0
layer2 = Layer.create kind: 'carto', order: 1
@map.add_layer layer1
@map.add_layer layer2
data = { layers: [
{ id: layer1.id, options: { opt1: 'value' }, infowindow: { fields: ['column1'] }, order: 2, kind: 'carto' },
{ id: layer2.id, options: { opt1: 'value' }, infowindow: { fields: ['column1'] }, order: 3, kind: 'carto' }
] }
put_json api_v1_maps_layers_update_url(params.merge(map_id: @map.id)), data do |response|
response.status.should be_success
response_layers = response.body[:layers]
response_layers.count.should == 2
response_layers.find { |l| l['id'] == layer1.id }['order'].should == 2
response_layers.find { |l| l['id'] == layer2.id }['order'].should == 3
layer1.reload.order.should == 2
layer2.reload.order.should == 3
end
end
it "Update a layer does not change table_name neither user_name" do
layer = Layer.create kind: 'carto', order: 0, options: { table_name: 'table1', user_name: @user.username }
@map.add_layer layer
data = { options: { table_name: 't1', user_name: 'u1' }, order: 3, kind: 'carto' }
put_json api_v1_maps_layers_update_url(params.merge(id: layer.id, map_id: @map.id)), data do |response|
response.status.should be_success
layer.options[:table_name].should == 'table1'
layer.options[:user_name].should == @user.username
response.body[:options].should == { table_name: 'table1', user_name: @user.username }
end
end
# see https://cartodb.atlassian.net/browse/CDB-3350
it "Update a layer > tiler error" do
layer = Layer.create kind: 'carto', order: 0
@map.add_layer layer
Layer.any_instance.stubs(:after_save).raises(RuntimeError)
Carto::Layer.any_instance.stubs(:invalidate_maps).raises(RuntimeError)
data = { options: { opt1: 'value' }, infowindow: { fields: ['column1', 'column2'] }, order: 999, kind: 'carto' }
put_json api_v1_maps_layers_update_url(params.merge(id: layer.id, map_id: @map.id)), data do |response|
response.status.should eq 400
layer.reload.order.should_not eq 999
end
end
it "Drop a layer" do
layer = Layer.create kind: 'carto'
@map.add_layer layer
delete_json api_v1_maps_layers_destroy_url(params.merge(id: layer.id, map_id: @map.id)) do |response|
response.status.should eq 204
expect { layer.refresh }.to raise_error
end
end
end
describe "API 1.0 user layers management" do
before(:all) do
Capybara.current_driver = :rack_test
@user = create_user
end
before(:each) do
bypass_named_maps
delete_user_data @user
host! "#{@user.username}.localhost.lan"
@table = create_table(user_id: @user.id)
end
after(:all) do
bypass_named_maps
@user.destroy
end
let(:params) { { api_key: @user.api_key } }
it "Create a new layer associated to the current user" do
opts = { kind: 'carto' }
post_json api_v1_users_layers_create_url(params.merge(user_id: @user.id)), opts do |response|
response.status.should be_success
@user.layers.size.should eq 1
response.body[:id].should eq @user.layers.first.id
end
end
# see https://cartodb.atlassian.net/browse/CDB-3350
it "Update a layer" do
layer = Layer.create kind: 'carto'
@user.add_layer layer
opts = { options: { opt1: 'value' }, infowindow: { fields: ['column1', 'column2'] }, kind: 'carto' }
put_json api_v1_users_layers_update_url(params.merge(id: layer.id, user_id: @user.id)), opts do |response|
response.status.should be_success
response.body[:id].should eq layer.id
response.body[:options].should eq opt1: 'value'
response.body[:infowindow].should == { fields: ['column1', 'column2'] }
response.body[:kind].should eq 'carto'
end
end
it "Drop a layer" do
layer = Layer.create kind: 'carto'
@user.add_layer layer
delete_json api_v1_users_layers_destroy_url(params.merge(id: layer.id, user_id: @user.id)) do |response|
response.status.should eq 204
expect { layer.refresh }.to raise_error
end
end
end
end
before(:each) do
bypass_named_maps
end
describe 'attribution changes' do
include Rack::Test::Methods
include Warden::Test::Helpers
before(:all) do
CartoDB::Visualization::Member.any_instance.stubs(:invalidate_cache).returns(nil)
@headers = { 'CONTENT_TYPE' => 'application/json' }
@user = FactoryGirl.create(:valid_user)
end
after(:each) do
@user.destroy
end
it 'attribution changes in a visualization propagate to associated layers' do
table_1_attribution = 'attribution 1'
table_2_attribution = 'attribution 2'
modified_table_2_attribution = 'modified attribution 2'
table1 = create_table(privacy: UserTable::PRIVACY_PUBLIC, name: unique_name('table'), user_id: @user.id)
table2 = create_table(privacy: UserTable::PRIVACY_PUBLIC, name: unique_name('table'), user_id: @user.id)
payload = {
name: 'new visualization',
tables: [
table1.name,
table2.name
],
privacy: 'public'
}
login_as(@user, scope: @user.username)
host! "#{@user.username}.localhost.lan"
post api_v1_visualizations_create_url(api_key: @api_key), payload.to_json, @headers do |response|
response.status.should eq 200
@visualization_data = JSON.parse(response.body)
end
visualization = Carto::Visualization.find(@visualization_data.fetch('id'))
table1_visualization = CartoDB::Visualization::Member.new(id: table1.table_visualization.id).fetch
table1_visualization.attributions = table_1_attribution
table1_visualization.store
table2_visualization = CartoDB::Visualization::Member.new(id: table2.table_visualization.id).fetch
table2_visualization.attributions = table_2_attribution
table2_visualization.store
get_json api_v1_maps_layers_index_url(map_id: visualization.map.id, api_key: @api_key) do |response|
response.status.should be_success
@layers_data = response.body.with_indifferent_access
end
# Done this way to preserve the order
data_layers = @layers_data['layers']
data_layers.delete_if { |layer| layer['kind'] != 'carto' }
data_layers.count.should eq 2
data_layers.map { |l| l['options']['attribution'] }.sort
.should eq [table_1_attribution, table_2_attribution]
table2_visualization.attributions = modified_table_2_attribution
table2_visualization.store
get_json api_v1_maps_layers_index_url(map_id: visualization.map.id, api_key: @api_key) do |response|
response.status.should be_success
@layers_data = response.body.with_indifferent_access
end
data_layers = @layers_data['layers'].select { |layer| layer['kind'] == 'carto' }
data_layers.count.should eq 2
data_layers.map { |l| l['options']['attribution'] }.sort
.should eq [table_1_attribution, modified_table_2_attribution]
end
end
describe 'index' do
include Rack::Test::Methods
include Warden::Test::Helpers
include CacheHelper
include_context 'visualization creation helpers'
include_context 'users helper'
it 'fetches layers from shared visualizations' do
CartoDB::Visualization::Member.any_instance.stubs(:invalidate_cache).returns(nil)
@headers = { 'CONTENT_TYPE' => 'application/json' }
def factory(user, attributes = {})
{
name: attributes.fetch(:name, unique_name('viz')),
tags: attributes.fetch(:tags, ['foo', 'bar']),
map_id: attributes.fetch(:map_id, ::Map.create(user_id: user.id).id),
description: attributes.fetch(:description, 'bogus'),
type: attributes.fetch(:type, 'derived'),
privacy: attributes.fetch(:privacy, 'public'),
source_visualization_id: attributes.fetch(:source_visualization_id, nil),
parent_id: attributes.fetch(:parent_id, nil),
locked: attributes.fetch(:locked, false),
prev_id: attributes.fetch(:prev_id, nil),
next_id: attributes.fetch(:next_id, nil)
}
end
user_1 = create_user(
username: unique_name('user'),
email: unique_email,
password: 'clientex',
private_tables_enabled: false
)
user_2 = create_user(
username: unique_name('user'),
email: unique_email,
password: 'clientex',
private_tables_enabled: false
)
user_3 = create_user(
username: unique_name('user'),
email: unique_email,
password: 'clientex',
private_tables_enabled: false
)
organization = Organization.new
organization.name = unique_name('org')
organization.quota_in_bytes = 1234567890
organization.seats = 5
organization.save
organization.valid?.should eq true
user_org = CartoDB::UserOrganization.new(organization.id, user_1.id)
user_org.promote_user_to_admin
organization.reload
user_1.reload
user_2.organization_id = organization.id
user_2.save.reload
organization.reload
user_3.organization_id = organization.id
user_3.save.reload
organization.reload
default_url_options[:host] = "#{user_2.subdomain}.localhost.lan"
table = create_table(privacy: UserTable::PRIVACY_PRIVATE, name: unique_name('table'), user_id: user_1.id)
u1_t_1_perm_id = table.table_visualization.permission.id
put api_v1_permissions_update_url(user_domain: user_1.username, api_key: user_1.api_key, id: u1_t_1_perm_id),
{ acl: [{
type: CartoDB::Permission::TYPE_USER,
entity: {
id: user_2.id
},
access: CartoDB::Permission::ACCESS_READONLY
}] }.to_json, @headers
layer = Carto::Layer.create(
kind: 'carto',
tooltip: {},
options: {},
infowindow: {}
)
table.map.layers << layer
login_as(user_2, scope: user_2.username)
get_json api_v1_maps_layers_index_url(user_domain: user_2.username, map_id: table.map.id) do |response|
response.status.should be_success
body = JSON.parse(last_response.body)
body['layers'].count { |l| l['kind'] != 'tiled' }.should == 2
end
login_as(user_3, scope: user_3.username)
host! "#{user_3.username}.localhost.lan"
get_json api_v1_maps_layers_index_url(user_domain: user_3.username, map_id: table.map.id) do |response|
response.status.should == 404
end
end
end
describe '#show legacy tests' do
before(:all) do
@user = create_user
host! "#{@user.username}.localhost.lan"
end
before(:each) do
bypass_named_maps
delete_user_data @user
@table = create_table user_id: @user.id
end
after(:all) do
bypass_named_maps
@user.destroy
end
let(:params) { { api_key: @user.api_key } }
it "Get all user layers" do
layer = Layer.create kind: 'carto'
layer2 = Layer.create kind: 'tiled'
@user.add_layer layer
@user.add_layer layer2
default_url_options[:host] = "#{@user.subdomain}.localhost.lan"
get_json api_v1_users_layers_index_url(params.merge(user_id: @user.id)) do |response|
response.status.should be_success
response_body = response.body.with_indifferent_access
response_body['total_entries'].should eq 2
response_body['layers'].count { |l| l['kind'] != 'tiled' }.should eq 1
response_body['layers'].map { |l| l['id'] }.sort.should eq [layer.id, layer2.id].sort
end
end
it "Gets layers by map id" do
layer = Carto::Layer.create(
kind: 'carto',
tooltip: {},
options: {},
infowindow: {}
)
layer2 = Carto::Layer.create(
kind: 'tiled',
tooltip: {},
options: {},
infowindow: {}
)
expected_layers_ids = [layer.id, layer2.id]
existing_layers_ids = @table.map.layers.map(&:id)
existing_layers_count = @table.map.layers.count
@table.map.layers << layer
@table.map.layers << layer2
default_url_options[:host] = "#{@user.subdomain}.localhost.lan"
get_json api_v1_maps_layers_index_url(params.merge(map_id: @table.map.id)) do |response|
response.status.should be_success
response_body = response.body.with_indifferent_access
response_body['total_entries'].should eq 2 + existing_layers_count
response_body['layers'].count { |l| l['kind'] != 'tiled' }.should eq 2
new_layers_ids = response_body['layers'].map { |l| l['id'] }
(new_layers_ids - existing_layers_ids).should == expected_layers_ids
end
get_json api_v1_maps_layers_show_url(
params.merge(
map_id: @table.map.id,
id: layer.id
)) do |response|
response.status.should be_success
response_body = response.body.with_indifferent_access
response_body['id'].should eq layer.id
response_body['kind'].should eq layer.kind
end
end
end
end
Fix final layer controller specs
# encoding: utf-8
require_relative '../../../spec_helper'
require_relative '../../../../app/controllers/carto/api/layers_controller'
require 'helpers/unique_names_helper'
describe Carto::Api::LayersController do
include UniqueNamesHelper
describe '#refactored tests' do
include Rack::Test::Methods
include Warden::Test::Helpers
include CacheHelper
include Carto::Factories::Visualizations
include_context 'users helper'
describe '#operations' do
after(:each) do
destroy_full_visualization(@map, @table, @table_visualization, @visualization)
@layer.destroy if @layer
@layer2.destroy if @layer2
end
let(:kind) { 'carto' }
let(:create_layer_url) do
api_v1_users_layers_create_url(user_domain: @user1.username, user_id: @user1.id, api_key: @user1.api_key)
end
def create_map_layer_url(map_id)
api_v1_maps_layers_create_url(user_domain: @user1.username, map_id: map_id, api_key: @user1.api_key)
end
def update_map_layer_url(map_id, layer_id = nil)
api_v1_maps_layers_update_url(
user_domain: @user1.username,
map_id: map_id,
id: layer_id,
api_key: @user1.api_key
)
end
def delete_map_layer_url(map_id, layer_id)
api_v1_maps_layers_destroy_url(
user_domain: @user1.username,
map_id: map_id,
id: layer_id,
api_key: @user1.api_key
)
end
let(:layer_json) do
{ kind: kind, options: { table_name: nil, user_name: nil }, order: 1, infowindow: {}, tooltip: {} }
end
it 'creates layers' do
post_json create_layer_url, layer_json do |response|
response.status.should eq 200
layer_response = response.body
layer_response.delete(:id).should_not be_nil
layer_response.should eq layer_json
end
end
it 'creates layers on maps' do
@map, @table, @table_visualization, @visualization = create_full_visualization(@carto_user1)
# Let's make room for another layer of the same kind
destroyed_layer = @map.layers.where(kind: layer_json[:kind]).first
destroyed_layer.destroy if destroyed_layer
post_json create_map_layer_url(@map.id), layer_json.merge(options: { table_name: @table.name }) do |response|
response.status.should eq 200
layer_response = response.body
layer_id = layer_response.delete(:id)
layer_id.should_not be_nil
layer_response.delete(:options).should eq ({ table_name: @table.name })
layer_response.should eq layer_json.except(:options)
@layer = Carto::Layer.find(layer_id)
@layer.maps.map(&:id).first.should eq @map.id
end
end
it 'registers table dependencies when creating a layer for a map' do
@map, @table, @table_visualization, @visualization = create_full_visualization(@carto_user1)
# Let's make room for another layer of the same kind
destroyed_layer = @map.layers.where(kind: layer_json[:kind]).first
destroyed_layer.destroy if destroyed_layer
post_json create_map_layer_url(@map.id), layer_json.merge(options: { table_name: @table.name }) do |response|
response.status.should eq 200
layer_response = response.body
@layer = Carto::Layer.find(layer_response[:id])
@layer.user_tables.should eq [@table]
end
end
it 'does not allow to exceed max_layers' do
@map, @table, @table_visualization, @visualization = create_full_visualization(@carto_user1)
@user1.max_layers = 1
@user1.save
post_json create_map_layer_url(@map.id), layer_json.merge(kind: 'tiled', order: 10) do |response|
response.status.to_s.should match /4../ # 422 in new, 403 in old
end
end
it 'updates one layer' do
map = FactoryGirl.create(:carto_map_with_layers, user_id: @user1.id)
@map, @table, @table_visualization, @visualization = create_full_visualization(@carto_user1, map: map)
@layer = map.layers.first
new_order = 2
new_layer_json = layer_json.merge(
options: { random: '1' },
order: new_order
)
put_json update_map_layer_url(map.id, @layer.id), new_layer_json do |response|
response.status.should eq 200
layer_response = response.body
layer_response[:id].should eq @layer.id
layer_response[:options].should eq new_layer_json[:options]
layer_response[:order].should eq new_order
end
end
it 'register table dependencies when updating layers' do
map = FactoryGirl.create(:carto_map_with_layers, user_id: @user1.id)
@map, @table, @table_visualization, @visualization = create_full_visualization(@carto_user1, map: map)
@layer = map.layers.first
new_order = 2
new_layer_json = layer_json.merge(
options: { random: '1' },
order: new_order
)
Carto::Layer.any_instance.expects(:register_table_dependencies).once
put_json update_map_layer_url(map.id, @layer.id), new_layer_json do |response|
response.status.should eq 200
end
end
it 'updates several layers at once' do
map = FactoryGirl.create(:carto_map_with_layers, user_id: @user1.id)
@map, @table, @table_visualization, @visualization = create_full_visualization(@carto_user1, map: map)
@layer = map.layers.first
@layer2 = map.layers[1]
new_order = 2
new_layer_json = layer_json.merge(
options: { random: '1' },
order: new_order
)
new_layers_json = {
layers: [
new_layer_json.merge(id: @layer.id),
new_layer_json.merge(id: @layer2.id)
]
}
put_json update_map_layer_url(map.id), new_layers_json do |response|
response.status.should eq 200
layer_response = response.body
layer_response[:layers].map { |l| l[:id] }.should eq [@layer.id, @layer2.id]
layer_response[:layers].each do |layer|
layer[:options].reject { |k| k == :table_name }.should eq new_layer_json[:options]
layer[:order].should eq new_order
end
end
end
it 'does not update table_name or users_name options' do
map = FactoryGirl.create(:carto_map_with_layers, user_id: @user1.id)
@map, @table, @table_visualization, @visualization = create_full_visualization(@carto_user1, map: map)
@layer = map.data_layers.first
original_options = @layer.options.dup
new_layer_json = layer_json.merge(
options: { table_name: 'other_table_name', user_name: 'other_username' }
)
put_json update_map_layer_url(map.id, @layer.id), new_layer_json do |response|
response.status.should eq 200
layer_response = response.body
layer_response[:options].should eq original_options.slice(:table_name, :user_name).symbolize_keys
end
end
it 'does not remove table_name or users_name options' do
map = FactoryGirl.create(:carto_map_with_layers, user_id: @user1.id)
@map, @table, @table_visualization, @visualization = create_full_visualization(@carto_user1, map: map)
@layer = map.data_layers.first
original_options = @layer.options.dup
new_layer_json = layer_json.merge(
options: {}
)
put_json update_map_layer_url(map.id, @layer.id), new_layer_json do |response|
response.status.should eq 200
layer_response = response.body
layer_response[:options].should eq original_options.slice(:table_name, :user_name).symbolize_keys
end
end
it 'destroys layers' do
map = FactoryGirl.create(:carto_map_with_layers, user_id: @user1.id)
@map, @table, @table_visualization, @visualization = create_full_visualization(@carto_user1, map: map)
@layer = map.layers.first
delete_json delete_map_layer_url(map.id, @layer.id), {} do |response|
response.status.should eq 204
Carto::Layer.exists?(@layer.id).should be_false
end
end
end
describe 'creating a layer from an analysis node moves the style history' do
def create_layer(new_source, new_letter, from_letter)
url = api_v1_maps_layers_create_url(user_domain: @user2.username, map_id: @map.id, api_key: @user2.api_key)
payload = {
kind: 'carto',
options: {
source: new_source,
letter: new_letter,
table_name: @table.name,
user_name: @user2.username
},
infowindow: {},
tooltip: {},
from_layer_id: @original_layer.id,
from_letter: from_letter
}
post_json url, payload do |response|
response.status.should eq 200
layer_response = response.body
Carto::Layer.find(layer_response[:id])
end
end
before(:each) do
@map, @table, @table_visualization, @visualization = create_full_visualization(@carto_user2)
@original_layer = @map.data_layers.first
@original_layer.options[:source] = 'a2'
@original_layer.save
@original_layer.layer_node_styles.each(&:destroy)
['a2', 'a1', 'a0'].each do |node_id|
LayerNodeStyle.create(
layer_id: @original_layer.id,
source_id: node_id,
options: { original_id: node_id },
infowindow: {},
tooltip: {}
)
end
end
after(:each) do
@layer.destroy if @layer
destroy_full_visualization(@map, @table, @table_visualization, @visualization)
end
def verify_layer_node_styles(layer, styles_map)
# Map original_source_id -> new_source_id
layer.layer_node_styles.reload
actual_styles_map = layer.layer_node_styles.map { |lns| [lns.options[:original_id], lns.source_id] }.to_h
actual_styles_map.should eq styles_map
end
it 'when dragging an intermediate node' do
# A new layer B is created (copy A1 -> B1, A0 -> B0) and the old one starts using it as a source (rename A1 -> B1)
#
# _______ _______ ______
# | A | | A | | B |
# | | | | | |
# | [A2] | | [A2] | | |
# | {A1} | => | {B1} | | {B1} |
# | [A0] | | | | [B0] |
# |______| |______| |______|
@new_layer = create_layer('b1', 'b', 'a')
verify_layer_node_styles(@new_layer, nil => 'b1', 'a0' => 'b0')
verify_layer_node_styles(@original_layer, 'a2' => 'a2', 'a1' => 'b1')
end
describe 'when dragging a header node' do
# The original layer is renamed to B (rename A2 -> B1, A1 -> B1) and the new layer is named A (copy A1 and A0)
# The rename and the layer creation are independent requests, so we have to handle
# both possible orders of requests gracefully.
# _______ _______ ______
# | A | | A | | B |
# | | | | | |
# | {A2} | => | | | {B1} |
# | [A1] | | [A1] | | [A1] |
# | [A0] | | [A0] | | |
# |______| |______| |______|
it 'and the original layer has been previously renamed' do
old_model_layer = ::Layer[@original_layer.id]
old_model_layer.options['letter'] = 'b'
old_model_layer.options['source'] = 'b1'
old_model_layer.save
@new_layer = create_layer('a1', 'a', 'a')
verify_layer_node_styles(@new_layer, nil => 'a1', 'a0' => 'a0')
verify_layer_node_styles(@original_layer, nil => 'b1', 'a1' => 'a1')
end
it 'and the original layer has not yet been renamed' do
@new_layer = create_layer('a1', 'a', 'a')
verify_layer_node_styles(@new_layer, nil => 'a1', 'a0' => 'a0')
verify_layer_node_styles(@original_layer, 'a1' => 'a1')
end
end
end
describe "API 1.0 map layers management" do
before(:all) do
Capybara.current_driver = :rack_test
@user = create_user
end
before(:each) do
bypass_named_maps
delete_user_data @user
host! "#{@user.username}.localhost.lan"
@table = create_table(user_id: @user.id)
@map = create_map(user_id: @user.id, table_id: @table.id)
@table.reload
end
after(:all) do
bypass_named_maps
@user.destroy
end
let(:params) { { api_key: @user.api_key } }
it "Create a new layer associated to a map" do
opts = { type: "GMapsBase", base_type: "roadmap", style: "null", order: "0", query_history: nil }
infowindow = { fields: ['column1', 'column2', 'column3'] }
data = { kind: 'gmapsbase', infowindow: infowindow, options: opts }
post_json api_v1_maps_layers_create_url(params.merge(map_id: @map.id)), data do |response|
response.status.should be_success
@map.layers.size.should == 1
response.body[:id].should == @map.layers.first.id
response.body[:options].should == opts
response.body[:infowindow].should == infowindow
response.body[:order].should == 0
response.body[:kind].should == 'gmapsbase'
end
end
it "Get layer information" do
layer = Layer.create(
kind: 'carto',
order: 1,
options: { opt1: 'value' },
infowindow: { fields: ['column1', 'column2'] },
tooltip: { fields: ['column1', 'column3'] }
)
@map.add_layer layer
get_json api_v1_maps_layers_show_url(params.merge(id: layer.id, map_id: @map.id)) do |response|
response.status.should be_success
response.body[:id].should eq layer.id
response.body[:kind].should eq 'carto'
response.body[:order].should eq 1
response.body[:infowindow].should eq fields: ["column1", "column2"]
response.body[:tooltip].should eq fields: ["column1", "column3"]
end
end
it "Get all map layers" do
layer = Layer.create kind: 'carto', order: 3
layer2 = Layer.create kind: 'tiled', order: 2
layer3 = Layer.create kind: 'tiled', order: 1
@map.add_layer layer
@map.add_layer layer2
@map.add_layer layer3
get_json api_v1_maps_layers_index_url(params.merge(map_id: @map.id)) do |response|
response.status.should be_success
response.body[:total_entries].should == 3
response.body[:layers].size.should == 3
response.body[:layers][0][:id].should == layer3.id
response.body[:layers][1][:id].should == layer2.id
response.body[:layers][2][:id].should == layer.id
end
end
# see https://cartodb.atlassian.net/browse/CDB-3350
it "Update a layer" do
layer = Layer.create kind: 'carto', order: 0
@map.add_layer layer
data = { options: { opt1: 'value' }, infowindow: { fields: ['column1', 'column2'] }, order: 3, kind: 'carto' }
put_json api_v1_maps_layers_update_url(params.merge(id: layer.id, map_id: @map.id)), data do |response|
response.status.should be_success
response.body[:id].should == layer.id
response.body[:options].should == { opt1: 'value' }
response.body[:infowindow].should == { fields: ['column1', 'column2'] }
response.body[:kind].should == 'carto'
response.body[:order].should == 3
end
end
it "Update several layers at once" do
layer1 = Layer.create kind: 'carto', order: 0
layer2 = Layer.create kind: 'carto', order: 1
@map.add_layer layer1
@map.add_layer layer2
data = { layers: [
{ id: layer1.id, options: { opt1: 'value' }, infowindow: { fields: ['column1'] }, order: 2, kind: 'carto' },
{ id: layer2.id, options: { opt1: 'value' }, infowindow: { fields: ['column1'] }, order: 3, kind: 'carto' }
] }
put_json api_v1_maps_layers_update_url(params.merge(map_id: @map.id)), data do |response|
response.status.should be_success
response_layers = response.body[:layers]
response_layers.count.should == 2
response_layers.find { |l| l[:id] == layer1.id }[:order].should == 2
response_layers.find { |l| l[:id] == layer2.id }[:order].should == 3
layer1.reload.order.should == 2
layer2.reload.order.should == 3
end
end
it "Update a layer does not change table_name neither user_name" do
layer = Layer.create kind: 'carto', order: 0, options: { table_name: 'table1', user_name: @user.username }
@map.add_layer layer
data = { options: { table_name: 't1', user_name: 'u1' }, order: 3, kind: 'carto' }
put_json api_v1_maps_layers_update_url(params.merge(id: layer.id, map_id: @map.id)), data do |response|
response.status.should be_success
layer.options[:table_name].should == 'table1'
layer.options[:user_name].should == @user.username
response.body[:options].should == { table_name: 'table1', user_name: @user.username }
end
end
# see https://cartodb.atlassian.net/browse/CDB-3350
it "Update a layer > tiler error" do
layer = Layer.create kind: 'carto', order: 0
@map.add_layer layer
Layer.any_instance.stubs(:after_save).raises(RuntimeError)
Carto::Layer.any_instance.stubs(:invalidate_maps).raises(RuntimeError)
data = { options: { opt1: 'value' }, infowindow: { fields: ['column1', 'column2'] }, order: 999, kind: 'carto' }
put_json api_v1_maps_layers_update_url(params.merge(id: layer.id, map_id: @map.id)), data do |response|
response.status.should eq 400
layer.reload.order.should_not eq 999
end
end
it "Drop a layer" do
layer = Layer.create kind: 'carto'
@map.add_layer layer
delete_json api_v1_maps_layers_destroy_url(params.merge(id: layer.id, map_id: @map.id)) do |response|
response.status.should eq 204
expect { layer.refresh }.to raise_error
end
end
end
describe "API 1.0 user layers management" do
before(:all) do
Capybara.current_driver = :rack_test
@user = create_user
end
before(:each) do
bypass_named_maps
delete_user_data @user
host! "#{@user.username}.localhost.lan"
@table = create_table(user_id: @user.id)
end
after(:all) do
bypass_named_maps
@user.destroy
end
let(:params) { { api_key: @user.api_key } }
it "Create a new layer associated to the current user" do
opts = { kind: 'carto' }
post_json api_v1_users_layers_create_url(params.merge(user_id: @user.id)), opts do |response|
response.status.should be_success
@user.layers.size.should eq 1
response.body[:id].should eq @user.layers.first.id
end
end
# see https://cartodb.atlassian.net/browse/CDB-3350
it "Update a layer" do
layer = Layer.create kind: 'carto'
@user.add_layer layer
opts = { options: { opt1: 'value' }, infowindow: { fields: ['column1', 'column2'] }, kind: 'carto' }
put_json api_v1_users_layers_update_url(params.merge(id: layer.id, user_id: @user.id)), opts do |response|
response.status.should be_success
response.body[:id].should eq layer.id
response.body[:options].should eq opt1: 'value'
response.body[:infowindow].should == { fields: ['column1', 'column2'] }
response.body[:kind].should eq 'carto'
end
end
it "Drop a layer" do
layer = Layer.create kind: 'carto'
@user.add_layer layer
delete_json api_v1_users_layers_destroy_url(params.merge(id: layer.id, user_id: @user.id)) do |response|
response.status.should eq 204
expect { layer.refresh }.to raise_error
end
end
end
end
before(:each) do
bypass_named_maps
end
describe 'attribution changes' do
include Rack::Test::Methods
include Warden::Test::Helpers
before(:all) do
CartoDB::Visualization::Member.any_instance.stubs(:invalidate_cache).returns(nil)
@headers = { 'CONTENT_TYPE' => 'application/json' }
@user = FactoryGirl.create(:valid_user)
end
after(:each) do
@user.destroy
end
it 'attribution changes in a visualization propagate to associated layers' do
table_1_attribution = 'attribution 1'
table_2_attribution = 'attribution 2'
modified_table_2_attribution = 'modified attribution 2'
table1 = create_table(privacy: UserTable::PRIVACY_PUBLIC, name: unique_name('table'), user_id: @user.id)
table2 = create_table(privacy: UserTable::PRIVACY_PUBLIC, name: unique_name('table'), user_id: @user.id)
payload = {
name: 'new visualization',
tables: [
table1.name,
table2.name
],
privacy: 'public'
}
login_as(@user, scope: @user.username)
host! "#{@user.username}.localhost.lan"
post api_v1_visualizations_create_url(api_key: @api_key), payload.to_json, @headers do |response|
response.status.should eq 200
@visualization_data = JSON.parse(response.body)
end
visualization = Carto::Visualization.find(@visualization_data.fetch('id'))
table1_visualization = CartoDB::Visualization::Member.new(id: table1.table_visualization.id).fetch
table1_visualization.attributions = table_1_attribution
table1_visualization.store
table2_visualization = CartoDB::Visualization::Member.new(id: table2.table_visualization.id).fetch
table2_visualization.attributions = table_2_attribution
table2_visualization.store
get_json api_v1_maps_layers_index_url(map_id: visualization.map.id, api_key: @api_key) do |response|
response.status.should be_success
@layers_data = response.body.with_indifferent_access
end
# Done this way to preserve the order
data_layers = @layers_data['layers']
data_layers.delete_if { |layer| layer['kind'] != 'carto' }
data_layers.count.should eq 2
data_layers.map { |l| l['options']['attribution'] }.sort
.should eq [table_1_attribution, table_2_attribution]
table2_visualization.attributions = modified_table_2_attribution
table2_visualization.store
get_json api_v1_maps_layers_index_url(map_id: visualization.map.id, api_key: @api_key) do |response|
response.status.should be_success
@layers_data = response.body.with_indifferent_access
end
data_layers = @layers_data['layers'].select { |layer| layer['kind'] == 'carto' }
data_layers.count.should eq 2
data_layers.map { |l| l['options']['attribution'] }.sort
.should eq [table_1_attribution, modified_table_2_attribution]
end
end
describe 'index' do
include Rack::Test::Methods
include Warden::Test::Helpers
include CacheHelper
include_context 'visualization creation helpers'
include_context 'users helper'
it 'fetches layers from shared visualizations' do
CartoDB::Visualization::Member.any_instance.stubs(:invalidate_cache).returns(nil)
@headers = { 'CONTENT_TYPE' => 'application/json' }
def factory(user, attributes = {})
{
name: attributes.fetch(:name, unique_name('viz')),
tags: attributes.fetch(:tags, ['foo', 'bar']),
map_id: attributes.fetch(:map_id, ::Map.create(user_id: user.id).id),
description: attributes.fetch(:description, 'bogus'),
type: attributes.fetch(:type, 'derived'),
privacy: attributes.fetch(:privacy, 'public'),
source_visualization_id: attributes.fetch(:source_visualization_id, nil),
parent_id: attributes.fetch(:parent_id, nil),
locked: attributes.fetch(:locked, false),
prev_id: attributes.fetch(:prev_id, nil),
next_id: attributes.fetch(:next_id, nil)
}
end
user_1 = create_user(
username: unique_name('user'),
email: unique_email,
password: 'clientex',
private_tables_enabled: false
)
user_2 = create_user(
username: unique_name('user'),
email: unique_email,
password: 'clientex',
private_tables_enabled: false
)
user_3 = create_user(
username: unique_name('user'),
email: unique_email,
password: 'clientex',
private_tables_enabled: false
)
organization = Organization.new
organization.name = unique_name('org')
organization.quota_in_bytes = 1234567890
organization.seats = 5
organization.save
organization.valid?.should eq true
user_org = CartoDB::UserOrganization.new(organization.id, user_1.id)
user_org.promote_user_to_admin
organization.reload
user_1.reload
user_2.organization_id = organization.id
user_2.save.reload
organization.reload
user_3.organization_id = organization.id
user_3.save.reload
organization.reload
default_url_options[:host] = "#{user_2.subdomain}.localhost.lan"
table = create_table(privacy: UserTable::PRIVACY_PRIVATE, name: unique_name('table'), user_id: user_1.id)
u1_t_1_perm_id = table.table_visualization.permission.id
put api_v1_permissions_update_url(user_domain: user_1.username, api_key: user_1.api_key, id: u1_t_1_perm_id),
{ acl: [{
type: CartoDB::Permission::TYPE_USER,
entity: {
id: user_2.id
},
access: CartoDB::Permission::ACCESS_READONLY
}] }.to_json, @headers
layer = Carto::Layer.create(
kind: 'carto',
tooltip: {},
options: {},
infowindow: {}
)
table.map.layers << layer
login_as(user_2, scope: user_2.username)
get_json api_v1_maps_layers_index_url(user_domain: user_2.username, map_id: table.map.id) do |response|
response.status.should be_success
body = JSON.parse(last_response.body)
body['layers'].count { |l| l['kind'] != 'tiled' }.should == 2
end
login_as(user_3, scope: user_3.username)
host! "#{user_3.username}.localhost.lan"
get_json api_v1_maps_layers_index_url(user_domain: user_3.username, map_id: table.map.id) do |response|
response.status.should == 404
end
end
end
describe '#show legacy tests' do
before(:all) do
@user = create_user
host! "#{@user.username}.localhost.lan"
end
before(:each) do
bypass_named_maps
delete_user_data @user
@table = create_table user_id: @user.id
end
after(:all) do
bypass_named_maps
@user.destroy
end
let(:params) { { api_key: @user.api_key } }
it "Get all user layers" do
layer = Layer.create kind: 'carto'
layer2 = Layer.create kind: 'tiled'
@user.add_layer layer
@user.add_layer layer2
default_url_options[:host] = "#{@user.subdomain}.localhost.lan"
get_json api_v1_users_layers_index_url(params.merge(user_id: @user.id)) do |response|
response.status.should be_success
response_body = response.body.with_indifferent_access
response_body['total_entries'].should eq 2
response_body['layers'].count { |l| l['kind'] != 'tiled' }.should eq 1
response_body['layers'].map { |l| l['id'] }.sort.should eq [layer.id, layer2.id].sort
end
end
it "Gets layers by map id" do
layer = Carto::Layer.create(
kind: 'carto',
tooltip: {},
options: {},
infowindow: {}
)
layer2 = Carto::Layer.create(
kind: 'tiled',
tooltip: {},
options: {},
infowindow: {}
)
expected_layers_ids = [layer.id, layer2.id]
existing_layers_ids = @table.map.layers.map(&:id)
existing_layers_count = @table.map.layers.count
@table.map.layers << layer
@table.map.layers << layer2
default_url_options[:host] = "#{@user.subdomain}.localhost.lan"
get_json api_v1_maps_layers_index_url(params.merge(map_id: @table.map.id)) do |response|
response.status.should be_success
response_body = response.body.with_indifferent_access
response_body['total_entries'].should eq 2 + existing_layers_count
response_body['layers'].count { |l| l['kind'] != 'tiled' }.should eq 2
new_layers_ids = response_body['layers'].map { |l| l['id'] }
(new_layers_ids - existing_layers_ids).should == expected_layers_ids
end
get_json api_v1_maps_layers_show_url(
params.merge(
map_id: @table.map.id,
id: layer.id
)) do |response|
response.status.should be_success
response_body = response.body.with_indifferent_access
response_body['id'].should eq layer.id
response_body['kind'].should eq layer.kind
end
end
end
end
|
#
# Copyright 2014 Chef Software, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
name "git"
default_version "1.9.1"
dependency "curl"
dependency "zlib"
dependency "openssl"
dependency "pcre"
dependency "libiconv"
dependency "expat"
dependency "perl"
relative_path "git-#{version}"
source url: "https://github.com/git/git/archive/v#{version}.tar.gz",
md5: "906f984f5c8913176547dc456608be16"
build do
env = with_standard_compiler_flags(with_embedded_path).merge(
"NO_GETTEXT" => "1",
"NO_PYTHON" => "1",
"NO_TCLTK" => "1",
"NO_R_TO_GCC_LINKER" => "1",
"NEEDS_LIBICONV" => "1",
"PERL_PATH" => "#{install_dir}/embedded/bin/perl",
"ZLIB_PATH" => "#{install_dir}/embedded",
"ICONVDIR" => "#{install_dir}/embedded",
"OPENSSLDIR" => "#{install_dir}/embedded",
"EXPATDIR" => "#{install_dir}/embedded",
"CURLDIR" => "#{install_dir}/embedded",
"LIBPCREDIR" => "#{install_dir}/embedded",
)
command "make -j #{max_build_jobs} prefix=#{install_dir}/embedded", env: env
command "make install prefix=#{install_dir}/embedded", env: env
end
Use `make` instead of `command` in git
#
# Copyright 2014 Chef Software, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
name "git"
default_version "1.9.1"
dependency "curl"
dependency "zlib"
dependency "openssl"
dependency "pcre"
dependency "libiconv"
dependency "expat"
dependency "perl"
relative_path "git-#{version}"
source url: "https://github.com/git/git/archive/v#{version}.tar.gz",
md5: "906f984f5c8913176547dc456608be16"
build do
env = with_standard_compiler_flags(with_embedded_path).merge(
"NO_GETTEXT" => "1",
"NO_PYTHON" => "1",
"NO_TCLTK" => "1",
"NO_R_TO_GCC_LINKER" => "1",
"NEEDS_LIBICONV" => "1",
"PERL_PATH" => "#{install_dir}/embedded/bin/perl",
"ZLIB_PATH" => "#{install_dir}/embedded",
"ICONVDIR" => "#{install_dir}/embedded",
"OPENSSLDIR" => "#{install_dir}/embedded",
"EXPATDIR" => "#{install_dir}/embedded",
"CURLDIR" => "#{install_dir}/embedded",
"LIBPCREDIR" => "#{install_dir}/embedded",
)
make "-j #{max_build_jobs} prefix=#{install_dir}/embedded", env: env
make "install prefix=#{install_dir}/embedded", env: env
end
|
require 'shoes/spec_helper'
describe Shoes::Radio do
include_context "dsl app"
subject(:radio) { Shoes::Radio.new(app, parent, group, input_opts, input_block) }
let(:group) { :a_group }
it_behaves_like "checkable"
it_behaves_like "object with state"
# only one radio in a group can be checked
describe "#initialize" do
it "sets accessors" do
subject.parent.should == parent
subject.group.should == group
subject.blk.should == input_block
end
end
describe "#group=" do
it "changes the group" do
subject.group = "New Group"
subject.group.should == "New Group"
end
end
end
Update Radio specs to expect syntax
require 'shoes/spec_helper'
describe Shoes::Radio do
include_context "dsl app"
subject(:radio) { Shoes::Radio.new(app, parent, group, input_opts, input_block) }
let(:group) { :a_group }
it_behaves_like "checkable"
it_behaves_like "object with state"
# only one radio in a group can be checked
describe "#initialize" do
it "sets accessors" do
expect(radio.parent).to eq(parent)
expect(radio.group).to eq(group)
expect(radio.blk).to eq(input_block)
end
end
describe "#group=" do
it "changes the group" do
radio.group = "New Group"
expect(radio.group).to eq("New Group")
end
end
end
|
# coding: UTF-8
require File.expand_path(File.dirname(__FILE__) + '/acceptance_helper')
feature "Dashboard", %q{
In order to allow users to manage their databases
As a User
I want to be able to visit my databases and manage them
} do
scenario "Login and visit my dashboard" do
user = create_user
the_other = create_user
t = Time.now - 6.minutes
Timecop.travel(t)
20.times do |i|
create_table :user_id => user.id, :name => "Table ##{20 - i}", :privacy => Table::PRIVATE, :tags => 'personal'
end
20.times do |i|
create_table :user_id => the_other.id, :name => "Other Table ##{20 - i}", :privacy => Table::PRIVATE, :tags => 'vodka'
end
Timecop.travel(t + 1.minute)
create_table :user_id => user.id, :name => 'My check-ins', :privacy => Table::PRIVATE,
:tags => "4sq, personal, feed aggregator"
Timecop.travel(t + 2.minutes)
create_table :user_id => user.id, :name => 'Downloaded movies', :privacy => Table::PRIVATE,
:tags => "movies, personal"
Timecop.travel(t + 3.minutes)
create_table :user_id => the_other.id, :name => 'Favourite restaurants', :privacy => Table::PRIVATE,
:tags => "restaurants"
Timecop.travel(t + 4.minutes)
create_table :user_id => the_other.id, :name => 'Secret vodkas', :privacy => Table::PRIVATE,
:tags => "vodka, drinking"
Timecop.travel(t + 6.minutes)
log_in_as user
within(:css, "header") do
page.should have_link("CartoDB")
page.should have_content(user.email)
end
page.should have_css("footer")
page.should have_css("ul.tables_list li.selected a", :text => "Your tables")
page.should have_content("22 tables in your account")
within("ul.your_tables li:eq(1)") do
page.should have_link("downloaded_movies")
page.should have_content("PRIVATE")
# page.should have_content("4 minutes ago")
within(:css, "span.tags") do
page.should have_content("movies")
page.should have_content("personal")
end
end
within("ul.your_tables li:eq(2)") do
page.should have_link("my_check_ins")
page.should have_content("PRIVATE")
# page.should have_content("5 minutes ago")
within(:css, "span.tags") do
page.should have_content("4sq")
page.should have_content("personal")
page.should have_content("feed aggregator")
end
end
within("ul.your_tables li:eq(10).last") do
page.should have_link("table_8")
page.should have_content("PRIVATE")
# page.should have_content("6 minutes ago")
within(:css, "span.tags") do
page.should have_content("personal")
end
end
page.should have_content("BROWSE BY TAGS")
page.should have_css("ul li:eq(1) a span", :text => "personal")
page.should have_css("ul li a span", :text => "4sq")
page.should have_css("ul li a span", :text => "feed aggregator")
page.should have_css("ul li a span", :text => "movies")
page.should have_no_selector("div.paginate a.previous")
page.should have_selector("div.paginate a.next")
within(:css, "div.paginate ul") do
page.should have_css("li.selected a", :text => "1")
page.should have_css("li a", :text => "2")
page.should have_css("li a", :text => "3")
end
click_link_or_button('3')
within("ul.your_tables li:eq(1)") do
page.should have_link("table_19")
page.should have_content("PRIVATE")
within(:css, "span.tags") do
page.should have_content("personal")
end
end
within("ul.your_tables li:eq(2)") do
page.should have_link("table_20")
page.should have_content("PRIVATE")
within(:css, "span.tags") do
page.should have_content("personal")
end
end
page.should have_selector("div.paginate a.previous")
page.should have_no_selector("div.paginate a.next")
within(:css, "div.paginate ul") do
page.should have_css("li a", :text => "1")
page.should have_css("li a", :text => "2")
page.should have_css("li.selected a", :text => "3")
end
click_link_or_button('Previous')
within("ul.your_tables li:eq(1)") do
page.should have_link("table_9")
page.should have_content("PRIVATE")
within(:css, "span.tags") do
page.should have_content("personal")
end
end
within("ul.your_tables li:eq(2)") do
page.should have_link("table_10")
page.should have_content("PRIVATE")
within(:css, "span.tags") do
page.should have_content("personal")
end
end
page.should have_selector("div.paginate a.previous")
page.should have_selector("div.paginate a.next")
within(:css, "div.paginate ul") do
page.should have_css("li a", :text => "1")
page.should have_css("li.selected a", :text => "2")
page.should have_css("li a", :text => "3")
end
click_link_or_button('1')
click_link_or_button('downloaded_movies')
page.should have_css("h2", :text => 'downloaded_movies')
page.should have_css("p.status", :text => 'PRIVATE')
within(:css, "span.tags") do
page.should have_content("movies")
page.should have_content("personal")
end
page.should have_no_selector("footer")
visit '/dashboard'
click_link_or_button('close session')
page.current_path.should == '/login'
end
scenario "Browse by tags" do
user = create_user
the_other = create_user
t = Time.now - 6.minutes
Timecop.travel(t)
20.times do |i|
create_table :user_id => user.id, :name => "Table ##{20 - i}", :privacy => Table::PRIVATE, :tags => 'personal'
end
20.times do |i|
create_table :user_id => the_other.id, :name => "Other Table ##{20 - i}", :privacy => Table::PRIVATE, :tags => 'vodka'
end
Timecop.travel(t + 1.minute)
create_table :user_id => user.id, :name => 'My check-ins', :privacy => Table::PRIVATE,
:tags => "4sq, personal, feed aggregator"
Timecop.travel(t + 2.minutes)
create_table :user_id => user.id, :name => 'Downloaded movies', :privacy => Table::PRIVATE,
:tags => "movies"
Timecop.travel(t + 3.minutes)
create_table :user_id => the_other.id, :name => 'Favourite restaurants', :privacy => Table::PRIVATE,
:tags => "restaurants"
Timecop.travel(t + 4.minutes)
create_table :user_id => the_other.id, :name => 'Secret vodkas', :privacy => Table::PRIVATE,
:tags => "vodka, drinking"
Timecop.travel(t + 6.minutes)
log_in_as user
within(:css, "header") do
page.should have_link("CartoDB")
page.should have_content(user.email)
end
page.find("ul li a span", :text => "4sq").click
page.should have_content("1 table in your account")
page.should have_css("ul li:eq(1) a", :text => "View all tables")
page.should have_css("ul li:eq(2) a span", :text => "personal")
page.should have_css("ul li a span", :text => "4sq")
page.should have_css("ul li a span", :text => "feed aggregator")
page.should have_css("ul li a span", :text => "movies")
within("ul.your_tables li:eq(1)") do
page.should have_link("my_check_ins")
page.should have_content("PRIVATE")
within(:css, "span.tags") do
page.should have_content("4sq")
end
end
page.find("ul li a span", :text => "personal").click
page.should have_content("21 tables in your account")
within("ul.your_tables li:eq(1)") do
page.should have_link("my_check_ins")
page.should have_content("PRIVATE")
# page.should have_content("5 minutes ago")
within(:css, "span.tags") do
page.should have_content("4sq")
page.should have_content("personal")
page.should have_content("feed aggregator")
end
end
within("ul.your_tables li:eq(2)") do
page.should have_link("table_1")
page.should have_content("PRIVATE")
end
click_link_or_button('2')
within("ul.your_tables li:eq(1)") do
page.should have_link("table_10")
page.should have_content("PRIVATE")
end
end
# TODO: implement it
# scenario "Remove a table" do
# user = create_user
# create_table :user_id => user.id, :name => 'My check-ins', :privacy => Table::PUBLIC,
# :tags => "4sq, personal, feed aggregator"
# create_table :user_id => user.id, :name => 'Downloaded movies', :privacy => Table::PRIVATE,
# :tags => "movies, personal"
#
# log_in_as user
#
# # debugger
#
# page.driver.browser.execute_script("$('ul.your_tables li:eq(1)').trigger('mouseover')")
# page.find("ul.your_tables li a.delete").click
#
# page.find("div.delete_window a.cancel").click
# # page.find("ul.your_tables li:eq(1) p.status").click
# page.find("ul.your_tables li:eq(1) a.delete").click
# page.find("ul.your_tables li:eq(1) a.confirm_delete").click
# end
scenario "Create a new table with default attributes" do
user = create_user
log_in_as user
page.find('a.new_table').click
page.find('div.create_window span.bottom input#create_table').click
page.should have_css("h2 a", :text => 'untitle_table')
end
scenario "Get OAuth credentials" do
user = create_user
log_in_as user
click "Your api keys"
page.should have_content("Using the key and secret you can access CartoDB from external applications.")
within("span.form_block") do
page.should have_content("YOUR KEY")
page.should have_css("input[@value='#{user.client_application.key}']")
end
within("span.form_block.last") do
page.should have_content("YOUR SECRET")
page.should have_css("input[@value='#{user.client_application.secret}']")
end
old_key = user.client_application.key
page.find("span.end_key a.submit").click
user.reload
within("span.form_block") do
page.should have_content("YOUR KEY")
page.should have_css("input[@value='#{user.client_application.key}']")
end
within("span.form_block.last") do
page.should have_content("YOUR SECRET")
page.should have_css("input[@value='#{user.client_application.secret}']")
end
end
scenario "Manage JSONP API keys" do
user = create_user
log_in_as user
click "Your api keys"
click "JSONP"
fill_in "YOUR APP DOMAIN", :with => "http://test-app.heroku.com"
click "Get API key"
page.should have_field("APP", :content => "http://test-app.heroku.com")
page.should have_field("API KEY", :content => APIKey.first.api_key)
click "Remove key"
page.find("div.mamufas a.confirm_delete").click
APIKey.filter(:user_id => user.id).all.size.should == 0
end
end
Fixed spec
# coding: UTF-8
require File.expand_path(File.dirname(__FILE__) + '/acceptance_helper')
feature "Dashboard", %q{
In order to allow users to manage their databases
As a User
I want to be able to visit my databases and manage them
} do
scenario "Login and visit my dashboard" do
user = create_user
the_other = create_user
t = Time.now - 6.minutes
Timecop.travel(t)
20.times do |i|
create_table :user_id => user.id, :name => "Table ##{20 - i}", :privacy => Table::PRIVATE, :tags => 'personal'
end
20.times do |i|
create_table :user_id => the_other.id, :name => "Other Table ##{20 - i}", :privacy => Table::PRIVATE, :tags => 'vodka'
end
Timecop.travel(t + 1.minute)
create_table :user_id => user.id, :name => 'My check-ins', :privacy => Table::PRIVATE,
:tags => "4sq, personal, feed aggregator"
Timecop.travel(t + 2.minutes)
create_table :user_id => user.id, :name => 'Downloaded movies', :privacy => Table::PRIVATE,
:tags => "movies, personal"
Timecop.travel(t + 3.minutes)
create_table :user_id => the_other.id, :name => 'Favourite restaurants', :privacy => Table::PRIVATE,
:tags => "restaurants"
Timecop.travel(t + 4.minutes)
create_table :user_id => the_other.id, :name => 'Secret vodkas', :privacy => Table::PRIVATE,
:tags => "vodka, drinking"
Timecop.travel(t + 6.minutes)
log_in_as user
within(:css, "header") do
page.should have_link("CartoDB")
page.should have_content(user.email)
end
page.should have_css("footer")
page.should have_css("ul.tables_list li.selected a", :text => "Your tables")
page.should have_content("22 tables in your account")
within("ul.your_tables li:eq(1)") do
page.should have_link("downloaded_movies")
page.should have_content("PRIVATE")
# page.should have_content("4 minutes ago")
within(:css, "span.tags") do
page.should have_content("movies")
page.should have_content("personal")
end
end
within("ul.your_tables li:eq(2)") do
page.should have_link("my_check_ins")
page.should have_content("PRIVATE")
# page.should have_content("5 minutes ago")
within(:css, "span.tags") do
page.should have_content("4sq")
page.should have_content("personal")
page.should have_content("feed aggregator")
end
end
within("ul.your_tables li:eq(10).last") do
page.should have_link("table_8")
page.should have_content("PRIVATE")
# page.should have_content("6 minutes ago")
within(:css, "span.tags") do
page.should have_content("personal")
end
end
page.should have_content("BROWSE BY TAGS")
page.should have_css("ul li:eq(1) a span", :text => "personal")
page.should have_css("ul li a span", :text => "4sq")
page.should have_css("ul li a span", :text => "feed aggregator")
page.should have_css("ul li a span", :text => "movies")
page.should have_no_selector("div.paginate a.previous")
page.should have_selector("div.paginate a.next")
within(:css, "div.paginate ul") do
page.should have_css("li.selected a", :text => "1")
page.should have_css("li a", :text => "2")
page.should have_css("li a", :text => "3")
end
click_link_or_button('3')
within("ul.your_tables li:eq(1)") do
page.should have_link("table_19")
page.should have_content("PRIVATE")
within(:css, "span.tags") do
page.should have_content("personal")
end
end
within("ul.your_tables li:eq(2)") do
page.should have_link("table_20")
page.should have_content("PRIVATE")
within(:css, "span.tags") do
page.should have_content("personal")
end
end
page.should have_selector("div.paginate a.previous")
page.should have_no_selector("div.paginate a.next")
within(:css, "div.paginate ul") do
page.should have_css("li a", :text => "1")
page.should have_css("li a", :text => "2")
page.should have_css("li.selected a", :text => "3")
end
click_link_or_button('Previous')
within("ul.your_tables li:eq(1)") do
page.should have_link("table_9")
page.should have_content("PRIVATE")
within(:css, "span.tags") do
page.should have_content("personal")
end
end
within("ul.your_tables li:eq(2)") do
page.should have_link("table_10")
page.should have_content("PRIVATE")
within(:css, "span.tags") do
page.should have_content("personal")
end
end
page.should have_selector("div.paginate a.previous")
page.should have_selector("div.paginate a.next")
within(:css, "div.paginate ul") do
page.should have_css("li a", :text => "1")
page.should have_css("li.selected a", :text => "2")
page.should have_css("li a", :text => "3")
end
click_link_or_button('1')
click_link_or_button('downloaded_movies')
page.should have_css("h2", :text => 'downloaded_movies')
page.should have_css("p.status", :text => 'PRIVATE')
within(:css, "span.tags") do
page.should have_content("movies")
page.should have_content("personal")
end
page.should have_no_selector("footer")
visit '/dashboard'
click_link_or_button('close session')
page.current_path.should == '/login'
end
scenario "Browse by tags" do
user = create_user
the_other = create_user
t = Time.now - 6.minutes
Timecop.travel(t)
20.times do |i|
create_table :user_id => user.id, :name => "Table ##{20 - i}", :privacy => Table::PRIVATE, :tags => 'personal'
end
20.times do |i|
create_table :user_id => the_other.id, :name => "Other Table ##{20 - i}", :privacy => Table::PRIVATE, :tags => 'vodka'
end
Timecop.travel(t + 1.minute)
create_table :user_id => user.id, :name => 'My check-ins', :privacy => Table::PRIVATE,
:tags => "4sq, personal, feed aggregator"
Timecop.travel(t + 2.minutes)
create_table :user_id => user.id, :name => 'Downloaded movies', :privacy => Table::PRIVATE,
:tags => "movies"
Timecop.travel(t + 3.minutes)
create_table :user_id => the_other.id, :name => 'Favourite restaurants', :privacy => Table::PRIVATE,
:tags => "restaurants"
Timecop.travel(t + 4.minutes)
create_table :user_id => the_other.id, :name => 'Secret vodkas', :privacy => Table::PRIVATE,
:tags => "vodka, drinking"
Timecop.travel(t + 6.minutes)
log_in_as user
within(:css, "header") do
page.should have_link("CartoDB")
page.should have_content(user.email)
end
page.find("ul li a span", :text => "4sq").click
page.should have_content("1 table in your account")
page.should have_css("ul li:eq(1) a", :text => "view all tables")
page.should have_css("ul li:eq(2) a span", :text => "personal")
page.should have_css("ul li a span", :text => "4sq")
page.should have_css("ul li a span", :text => "feed aggregator")
page.should have_css("ul li a span", :text => "movies")
within("ul.your_tables li:eq(1)") do
page.should have_link("my_check_ins")
page.should have_content("PRIVATE")
within(:css, "span.tags") do
page.should have_content("4sq")
end
end
page.find("ul li a span", :text => "personal").click
page.should have_content("21 tables in your account")
within("ul.your_tables li:eq(1)") do
page.should have_link("my_check_ins")
page.should have_content("PRIVATE")
# page.should have_content("5 minutes ago")
within(:css, "span.tags") do
page.should have_content("4sq")
page.should have_content("personal")
page.should have_content("feed aggregator")
end
end
within("ul.your_tables li:eq(2)") do
page.should have_link("table_1")
page.should have_content("PRIVATE")
end
click_link_or_button('2')
within("ul.your_tables li:eq(1)") do
page.should have_link("table_10")
page.should have_content("PRIVATE")
end
end
# TODO: implement it
# scenario "Remove a table" do
# user = create_user
# create_table :user_id => user.id, :name => 'My check-ins', :privacy => Table::PUBLIC,
# :tags => "4sq, personal, feed aggregator"
# create_table :user_id => user.id, :name => 'Downloaded movies', :privacy => Table::PRIVATE,
# :tags => "movies, personal"
#
# log_in_as user
#
# # debugger
#
# page.driver.browser.execute_script("$('ul.your_tables li:eq(1)').trigger('mouseover')")
# page.find("ul.your_tables li a.delete").click
#
# page.find("div.delete_window a.cancel").click
# # page.find("ul.your_tables li:eq(1) p.status").click
# page.find("ul.your_tables li:eq(1) a.delete").click
# page.find("ul.your_tables li:eq(1) a.confirm_delete").click
# end
scenario "Create a new table with default attributes" do
user = create_user
log_in_as user
page.find('a.new_table').click
page.find('div.create_window span.bottom input#create_table').click
page.should have_css("h2 a", :text => 'untitle_table')
end
scenario "Get OAuth credentials" do
user = create_user
log_in_as user
click "Your api keys"
page.should have_content("Using the key and secret you can access CartoDB from external applications.")
within("span.form_block") do
page.should have_content("YOUR KEY")
page.should have_css("input[@value='#{user.client_application.key}']")
end
within("span.form_block.last") do
page.should have_content("YOUR SECRET")
page.should have_css("input[@value='#{user.client_application.secret}']")
end
old_key = user.client_application.key
page.find("span.end_key a.submit").click
user.reload
within("span.form_block") do
page.should have_content("YOUR KEY")
page.should have_css("input[@value='#{user.client_application.key}']")
end
within("span.form_block.last") do
page.should have_content("YOUR SECRET")
page.should have_css("input[@value='#{user.client_application.secret}']")
end
end
scenario "Manage JSONP API keys" do
user = create_user
log_in_as user
click "Your api keys"
click "JSONP"
fill_in "YOUR APP DOMAIN", :with => "http://test-app.heroku.com"
click "Get API key"
page.should have_field("APP", :content => "http://test-app.heroku.com")
page.should have_field("API KEY", :content => APIKey.first.api_key)
click "Remove key"
page.find("div.mamufas a.confirm_delete").click
APIKey.filter(:user_id => user.id).all.size.should == 0
end
end
|
require File.expand_path('../shared', __FILE__)
Rails.application.configure do
# Settings specified here will take precedence over those in config/application.rb.
# Code is not reloaded between requests.
config.cache_classes = true
# Eager load code on boot. This eager loads most of Rails and
# your application in memory, allowing both threaded web servers
# and those relying on copy on write to perform better.
# Rake tasks automatically ignore this option for performance.
config.eager_load = true
# Full error reports are disabled and caching is turned on.
config.consider_all_requests_local = false
config.action_controller.perform_caching = true
# Disable serving static files from the `/public` folder by default since
# Apache or NGINX already handles this.
config.public_file_server.enabled = ENV['RAILS_SERVE_STATIC_FILES'].present?
# Compress JavaScripts and CSS.
config.assets.js_compressor = :uglifier
# config.assets.css_compressor = :sass
# Do not fallback to assets pipeline if a precompiled asset is missed.
config.assets.compile = false
# `config.assets.precompile` and `config.assets.version` have moved to config/initializers/assets.rb
# Enable serving of images, stylesheets, and JavaScripts from an asset server.
# config.action_controller.asset_host = 'http://assets.example.com'
# Specifies the header that your server uses for sending files.
# config.action_dispatch.x_sendfile_header = 'X-Sendfile' # for Apache
# config.action_dispatch.x_sendfile_header = 'X-Accel-Redirect' # for NGINX
# Mount Action Cable outside main process or domain
# config.action_cable.mount_path = nil
# config.action_cable.url = 'wss://example.com/cable'
# config.action_cable.allowed_request_origins = [ 'http://example.com', /http:\/\/example.*/ ]
# Force all access to the app over SSL, use Strict-Transport-Security, and use secure cookies.
config.force_ssl = true
# Use the lowest log level to ensure availability of diagnostic information
# when problems arise.
config.log_level = :debug
# Prepend all log lines with the following tags.
config.log_tags = [ :request_id ]
config.logger = RemoteSyslogLogger.new('logs.papertrailapp.com', 22777, :program => "payload-app")
# Use a different cache store in production.
# config.cache_store = :mem_cache_store
# Use a real queuing backend for Active Job (and separate queues per environment)
# config.active_job.queue_adapter = :resque
# config.active_job.queue_name_prefix = "amingilani_#{Rails.env}"
config.action_mailer.perform_caching = false
# Ignore bad email addresses and do not raise email delivery errors.
# Set this to true and configure the email server for immediate delivery to raise delivery errors.
# config.action_mailer.raise_delivery_errors = false
# Enable locale fallbacks for I18n (makes lookups for any locale fall back to
# the I18n.default_locale when a translation cannot be found).
config.i18n.fallbacks = true
# Send deprecation notices to registered listeners.
config.active_support.deprecation = :notify
# Use default logging formatter so that PID and timestamp are not suppressed.
config.log_formatter = ::Logger::Formatter.new
# Use a different logger for distributed setups.
# require 'syslog/logger'
# config.logger = ActiveSupport::TaggedLogging.new(Syslog::Logger.new 'app-name')
if ENV["RAILS_LOG_TO_STDOUT"].present?
logger = ActiveSupport::Logger.new(STDOUT)
logger.formatter = config.log_formatter
config.logger = ActiveSupport::TaggedLogging.new(logger)
end
# Do not dump schema after migrations.
config.active_record.dump_schema_after_migration = false
end
Override vanilla logger
require File.expand_path('../shared', __FILE__)
Rails.application.configure do
# Settings specified here will take precedence over those in config/application.rb.
# Code is not reloaded between requests.
config.cache_classes = true
# Eager load code on boot. This eager loads most of Rails and
# your application in memory, allowing both threaded web servers
# and those relying on copy on write to perform better.
# Rake tasks automatically ignore this option for performance.
config.eager_load = true
# Full error reports are disabled and caching is turned on.
config.consider_all_requests_local = false
config.action_controller.perform_caching = true
# Disable serving static files from the `/public` folder by default since
# Apache or NGINX already handles this.
config.public_file_server.enabled = ENV['RAILS_SERVE_STATIC_FILES'].present?
# Compress JavaScripts and CSS.
config.assets.js_compressor = :uglifier
# config.assets.css_compressor = :sass
# Do not fallback to assets pipeline if a precompiled asset is missed.
config.assets.compile = false
# `config.assets.precompile` and `config.assets.version` have moved to config/initializers/assets.rb
# Enable serving of images, stylesheets, and JavaScripts from an asset server.
# config.action_controller.asset_host = 'http://assets.example.com'
# Specifies the header that your server uses for sending files.
# config.action_dispatch.x_sendfile_header = 'X-Sendfile' # for Apache
# config.action_dispatch.x_sendfile_header = 'X-Accel-Redirect' # for NGINX
# Mount Action Cable outside main process or domain
# config.action_cable.mount_path = nil
# config.action_cable.url = 'wss://example.com/cable'
# config.action_cable.allowed_request_origins = [ 'http://example.com', /http:\/\/example.*/ ]
# Force all access to the app over SSL, use Strict-Transport-Security, and use secure cookies.
config.force_ssl = true
# Use the lowest log level to ensure availability of diagnostic information
# when problems arise.
config.log_level = :debug
# Prepend all log lines with the following tags.
config.log_tags = [ :request_id ]
# Use a different cache store in production.
# config.cache_store = :mem_cache_store
# Use a real queuing backend for Active Job (and separate queues per environment)
# config.active_job.queue_adapter = :resque
# config.active_job.queue_name_prefix = "amingilani_#{Rails.env}"
config.action_mailer.perform_caching = false
# Ignore bad email addresses and do not raise email delivery errors.
# Set this to true and configure the email server for immediate delivery to raise delivery errors.
# config.action_mailer.raise_delivery_errors = false
# Enable locale fallbacks for I18n (makes lookups for any locale fall back to
# the I18n.default_locale when a translation cannot be found).
config.i18n.fallbacks = true
# Send deprecation notices to registered listeners.
config.active_support.deprecation = :notify
# Use default logging formatter so that PID and timestamp are not suppressed.
config.log_formatter = ::Logger::Formatter.new
# Use a different logger for distributed setups.
# require 'syslog/logger'
# config.logger = ActiveSupport::TaggedLogging.new(Syslog::Logger.new 'app-name')
if ENV["RAILS_LOG_TO_STDOUT"].present?
logger = ActiveSupport::Logger.new(STDOUT)
logger.formatter = config.log_formatter
config.logger = ActiveSupport::TaggedLogging.new(logger)
end
config.logger = RemoteSyslogLogger.new('logs.papertrailapp.com', 22777, :program => "payload-app")
# Do not dump schema after migrations.
config.active_record.dump_schema_after_migration = false
end
|
module Godmin
VERSION = "0.10.1"
end
Prepares release
module Godmin
VERSION = "0.10.2"
end
|
# encoding: utf-8
require 'spec_helper'
require 'rspec/core/formatters/html_formatter'
require 'nokogiri'
module RSpec
module Core
module Formatters
describe HtmlFormatter, :if => RUBY_VERSION =~ /^(1.8.7|1.9.2|1.9.3|2.0.0)$/ do
let(:suffix) {
if ::RUBY_PLATFORM == 'java'
"-jruby"
elsif defined?(Rubinius)
"-rbx"
else
""
end
}
let(:root) { File.expand_path("#{File.dirname(__FILE__)}/../../../..") }
let(:expected_file) do
"#{File.dirname(__FILE__)}/html_formatted-#{::RUBY_VERSION}#{suffix}.html"
end
let(:generated_html) do
options = %w[spec/rspec/core/resources/formatter_specs.rb --format html --order default]
err, out = StringIO.new, RSpec.configuration.output
err.set_encoding("utf-8") if err.respond_to?(:set_encoding)
command_line = RSpec::Core::CommandLine.new(options)
command_line.instance_variable_get("@configuration").backtrace_formatter.inclusion_patterns = []
command_line.run(err, out)
out.string.gsub(/\d+\.\d+(s| seconds)/, "n.nnnn\\1")
end
let(:expected_html) do
unless File.file?(expected_file)
raise "There is no HTML file with expected content for this platform: #{expected_file}"
end
File.read(expected_file)
end
before do
allow(RSpec.configuration).to receive(:load_spec_files) do
RSpec.configuration.files_to_run.map {|f| load File.expand_path(f) }
end
end
# Uncomment this group temporarily in order to overwrite the expected
# with actual. Use with care!!!
describe "file generator", :if => ENV['GENERATE'] do
it "generates a new comparison file" do
Dir.chdir(root) do
File.open(expected_file, 'w') {|io| io.write(generated_html)}
end
end
end
def extract_backtrace_from(doc)
doc.search("div.backtrace").
collect {|e| e.at("pre").inner_html}.
collect {|e| e.split("\n")}.flatten.
select {|e| e =~ /formatter_specs\.rb/}
end
describe 'produced HTML' do
it "produces HTML identical to the one we designed manually" do
Dir.chdir(root) do
actual_doc = Nokogiri::HTML(generated_html)
actual_backtraces = extract_backtrace_from(actual_doc)
actual_doc.css("div.backtrace").remove
expected_doc = Nokogiri::HTML(expected_html)
expected_backtraces = extract_backtrace_from(expected_doc)
expected_doc.search("div.backtrace").remove
expect(actual_doc.inner_html).to eq(expected_doc.inner_html)
expected_backtraces.each_with_index do |expected_line, i|
expected_path, expected_line_number, expected_suffix = expected_line.split(':')
actual_path, actual_line_number, actual_suffix = actual_backtraces[i].split(':')
expect(File.expand_path(actual_path)).to eq(File.expand_path(expected_path))
expect(actual_line_number).to eq(expected_line_number)
expect(actual_suffix).to eq(expected_suffix)
end
end
end
context 'with mathn loaded' do
include MathnIntegrationSupport
it "produces HTML identical to the one we designed manually" do
with_mathn_loaded do
Dir.chdir(root) do
actual_doc = Nokogiri::HTML(generated_html)
actual_backtraces = extract_backtrace_from(actual_doc)
actual_doc.css("div.backtrace").remove
expected_doc = Nokogiri::HTML(expected_html)
expected_backtraces = extract_backtrace_from(expected_doc)
expected_doc.search("div.backtrace").remove
expect(actual_doc.inner_html).to eq(expected_doc.inner_html)
expected_backtraces.each_with_index do |expected_line, i|
expected_path, expected_line_number, expected_suffix = expected_line.split(':')
actual_path, actual_line_number, actual_suffix = actual_backtraces[i].split(':')
expect(File.expand_path(actual_path)).to eq(File.expand_path(expected_path))
expect(actual_line_number).to eq(expected_line_number)
expect(actual_suffix).to eq(expected_suffix)
end
end
end
end
end
end
end
end
end
end
Add whitespace to make setup vs. assertions distinct
# encoding: utf-8
require 'spec_helper'
require 'rspec/core/formatters/html_formatter'
require 'nokogiri'
module RSpec
module Core
module Formatters
describe HtmlFormatter, :if => RUBY_VERSION =~ /^(1.8.7|1.9.2|1.9.3|2.0.0)$/ do
let(:suffix) {
if ::RUBY_PLATFORM == 'java'
"-jruby"
elsif defined?(Rubinius)
"-rbx"
else
""
end
}
let(:root) { File.expand_path("#{File.dirname(__FILE__)}/../../../..") }
let(:expected_file) do
"#{File.dirname(__FILE__)}/html_formatted-#{::RUBY_VERSION}#{suffix}.html"
end
let(:generated_html) do
options = %w[spec/rspec/core/resources/formatter_specs.rb --format html --order default]
err, out = StringIO.new, RSpec.configuration.output
err.set_encoding("utf-8") if err.respond_to?(:set_encoding)
command_line = RSpec::Core::CommandLine.new(options)
command_line.instance_variable_get("@configuration").backtrace_formatter.inclusion_patterns = []
command_line.run(err, out)
out.string.gsub(/\d+\.\d+(s| seconds)/, "n.nnnn\\1")
end
let(:expected_html) do
unless File.file?(expected_file)
raise "There is no HTML file with expected content for this platform: #{expected_file}"
end
File.read(expected_file)
end
before do
allow(RSpec.configuration).to receive(:load_spec_files) do
RSpec.configuration.files_to_run.map {|f| load File.expand_path(f) }
end
end
# Uncomment this group temporarily in order to overwrite the expected
# with actual. Use with care!!!
describe "file generator", :if => ENV['GENERATE'] do
it "generates a new comparison file" do
Dir.chdir(root) do
File.open(expected_file, 'w') {|io| io.write(generated_html)}
end
end
end
def extract_backtrace_from(doc)
doc.search("div.backtrace").
collect {|e| e.at("pre").inner_html}.
collect {|e| e.split("\n")}.flatten.
select {|e| e =~ /formatter_specs\.rb/}
end
describe 'produced HTML' do
it "produces HTML identical to the one we designed manually" do
Dir.chdir(root) do
actual_doc = Nokogiri::HTML(generated_html)
actual_backtraces = extract_backtrace_from(actual_doc)
actual_doc.css("div.backtrace").remove
expected_doc = Nokogiri::HTML(expected_html)
expected_backtraces = extract_backtrace_from(expected_doc)
expected_doc.search("div.backtrace").remove
expect(actual_doc.inner_html).to eq(expected_doc.inner_html)
expected_backtraces.each_with_index do |expected_line, i|
expected_path, expected_line_number, expected_suffix = expected_line.split(':')
actual_path, actual_line_number, actual_suffix = actual_backtraces[i].split(':')
expect(File.expand_path(actual_path)).to eq(File.expand_path(expected_path))
expect(actual_line_number).to eq(expected_line_number)
expect(actual_suffix).to eq(expected_suffix)
end
end
end
context 'with mathn loaded' do
include MathnIntegrationSupport
it "produces HTML identical to the one we designed manually" do
with_mathn_loaded do
Dir.chdir(root) do
actual_doc = Nokogiri::HTML(generated_html)
actual_backtraces = extract_backtrace_from(actual_doc)
actual_doc.css("div.backtrace").remove
expected_doc = Nokogiri::HTML(expected_html)
expected_backtraces = extract_backtrace_from(expected_doc)
expected_doc.search("div.backtrace").remove
expect(actual_doc.inner_html).to eq(expected_doc.inner_html)
expected_backtraces.each_with_index do |expected_line, i|
expected_path, expected_line_number, expected_suffix = expected_line.split(':')
actual_path, actual_line_number, actual_suffix = actual_backtraces[i].split(':')
expect(File.expand_path(actual_path)).to eq(File.expand_path(expected_path))
expect(actual_line_number).to eq(expected_line_number)
expect(actual_suffix).to eq(expected_suffix)
end
end
end
end
end
end
end
end
end
end
|
name "git"
version "1.8.5.3"
dependency "zlib"
dependency "openssl"
dependency "curl"
source :url => "https://git-core.googlecode.com/files/git-1.8.5.3.tar.gz",
:md5 => "57b966065882f83ef5879620a1e329ca"
relative_path 'git-1.8.5.3'
env = {
"LDFLAGS" => "-L#{install_dir}/embedded/lib -I#{install_dir}/embedded/include",
"CFLAGS" => "-L#{install_dir}/embedded/lib -I#{install_dir}/embedded/include",
"LD_RUN_PATH" => "#{install_dir}/embedded/lib",
}
build do
command ["./configure",
"--prefix=#{install_dir}/embedded",
"--with-curl=#{install_dir}/embedded",
"--with-ssl=#{install_dir}/embedded",
"--with-zlib=#{install_dir}/embedded"].join(" "), :env => env
# Ugly hack because ./configure does not pick these up from the env
block do
open(File.join(project_dir, "config.mak.autogen"), "a") do |file|
file.print <<-EOH
# Added by Omnibus git software definition git.rb
NO_PERL=YesPlease
NO_EXPAT=YesPlease
NO_TCLTK=YesPlease
NO_GETTEXT=YesPlease
NO_PYTHON=YesPlease
EOH
end
end
command "make -j #{max_build_jobs}", :env => env
command "make install"
end
Mention Git version only once
name "git"
version "1.8.5.3"
dependency "zlib"
dependency "openssl"
dependency "curl"
source :url => "https://git-core.googlecode.com/files/git-#{version}.tar.gz",
:md5 => "57b966065882f83ef5879620a1e329ca"
relative_path 'git-#{version}'
env = {
"LDFLAGS" => "-L#{install_dir}/embedded/lib -I#{install_dir}/embedded/include",
"CFLAGS" => "-L#{install_dir}/embedded/lib -I#{install_dir}/embedded/include",
"LD_RUN_PATH" => "#{install_dir}/embedded/lib",
}
build do
command ["./configure",
"--prefix=#{install_dir}/embedded",
"--with-curl=#{install_dir}/embedded",
"--with-ssl=#{install_dir}/embedded",
"--with-zlib=#{install_dir}/embedded"].join(" "), :env => env
# Ugly hack because ./configure does not pick these up from the env
block do
open(File.join(project_dir, "config.mak.autogen"), "a") do |file|
file.print <<-EOH
# Added by Omnibus git software definition git.rb
NO_PERL=YesPlease
NO_EXPAT=YesPlease
NO_TCLTK=YesPlease
NO_GETTEXT=YesPlease
NO_PYTHON=YesPlease
EOH
end
end
command "make -j #{max_build_jobs}", :env => env
command "make install"
end
|
require 'spec_helper'
describe ActiveForce::SObject do
before do
::Client = double('Client')
end
after do
Object.send :remove_const, 'Client'
end
describe ".new" do
it "create with valid values" do
@SObject = Whizbang.new
expect(@SObject).to be_an_instance_of Whizbang
end
end
describe ".build" do
let(:sobject_hash) { YAML.load(fixture('sobject/single_sobject_hash')) }
it "build a valid sobject from a JSON" do
expect(Whizbang.build sobject_hash).to be_an_instance_of Whizbang
end
end
describe ".field" do
it "add a mappings" do
expect(Whizbang.mappings).to include(
checkbox: 'Checkbox_Label',
text: 'Text_Label',
date: 'Date_Label',
datetime: 'DateTime_Label',
picklist_multiselect: 'Picklist_Multiselect_Label'
)
end
it "set an attribute" do
%w[checkbox text date datetime picklist_multiselect].each do |name|
expect(Whizbang.attribute_names).to include(name)
end
end
end
describe '#create' do
subject do
Whizbang.new
end
before do
Client.should_receive(:create!).and_return('id')
end
it 'delegates to the Client with create!' do
subject.create
end
it 'sets the id' do
subject.create
expect(subject.id).to eq('id')
end
end
describe "#count" do
let(:count_response){ [Restforce::Mash.new(expr0: 1)] }
it "responds to count" do
Whizbang.should respond_to(:count)
end
it "sends the query to the client" do
Client.should_receive(:query).and_return(count_response)
expect(Whizbang.count).to eq(1)
end
end
describe "#find_by" do
it "responds to find_by" do
Whizbang.should respond_to(:find_by)
end
end
end
Another spec for find_by.
require 'spec_helper'
describe ActiveForce::SObject do
before do
::Client = double('Client')
end
after do
Object.send :remove_const, 'Client'
end
describe ".new" do
it "create with valid values" do
@SObject = Whizbang.new
expect(@SObject).to be_an_instance_of Whizbang
end
end
describe ".build" do
let(:sobject_hash) { YAML.load(fixture('sobject/single_sobject_hash')) }
it "build a valid sobject from a JSON" do
expect(Whizbang.build sobject_hash).to be_an_instance_of Whizbang
end
end
describe ".field" do
it "add a mappings" do
expect(Whizbang.mappings).to include(
checkbox: 'Checkbox_Label',
text: 'Text_Label',
date: 'Date_Label',
datetime: 'DateTime_Label',
picklist_multiselect: 'Picklist_Multiselect_Label'
)
end
it "set an attribute" do
%w[checkbox text date datetime picklist_multiselect].each do |name|
expect(Whizbang.attribute_names).to include(name)
end
end
end
describe '#create' do
subject do
Whizbang.new
end
before do
Client.should_receive(:create!).and_return('id')
end
it 'delegates to the Client with create!' do
subject.create
end
it 'sets the id' do
subject.create
expect(subject.id).to eq('id')
end
end
describe "#count" do
let(:count_response){ [Restforce::Mash.new(expr0: 1)] }
it "responds to count" do
Whizbang.should respond_to(:count)
end
it "sends the query to the client" do
Client.should_receive(:query).and_return(count_response)
expect(Whizbang.count).to eq(1)
end
end
describe "#find_by" do
it "responds to find_by" do
Whizbang.should respond_to(:find_by)
end
it "should query the client" do
Client.should_receive(:query)
Whizbang.find_by id: 123, text: "foo"
end
end
end
|
Rails.application.configure do
# Settings specified here will take precedence over those in config/application.rb.
# Code is not reloaded between requests.
config.cache_classes = true
# Eager load code on boot. This eager loads most of Rails and
# your application in memory, allowing both threaded web servers
# and those relying on copy on write to perform better.
# Rake tasks automatically ignore this option for performance.
config.eager_load = true
# Full error reports are disabled and caching is turned on.
config.consider_all_requests_local = false
config.action_controller.perform_caching = true
# Enable Rack::Cache to put a simple HTTP cache in front of your application
# Add `rack-cache` to your Gemfile before enabling this.
# For large-scale production use, consider using a caching reverse proxy like
# NGINX, varnish or squid.
# config.action_dispatch.rack_cache = true
# Disable serving static files from the `/public` folder by default since
# Apache or NGINX already handles this.
config.serve_static_files = ENV['RAILS_SERVE_STATIC_FILES'].present?
# Compress JavaScripts and CSS.
config.assets.js_compressor = :uglifier
# config.assets.css_compressor = :sass
# Do not fallback to assets pipeline if a precompiled asset is missed.
config.assets.compile = false
# Asset digests allow you to set far-future HTTP expiration dates on all assets,
# yet still be able to expire them through the digest params.
config.assets.digest = true
# `config.assets.precompile` and `config.assets.version` have moved to config/initializers/assets.rb
# Specifies the header that your server uses for sending files.
# config.action_dispatch.x_sendfile_header = 'X-Sendfile' # for Apache
# config.action_dispatch.x_sendfile_header = 'X-Accel-Redirect' # for NGINX
# Force all access to the app over SSL, use Strict-Transport-Security, and use secure cookies.
# config.force_ssl = true
# Use the lowest log level to ensure availability of diagnostic information
# when problems arise.
config.log_level = :debug
# Prepend all log lines with the following tags.
# config.log_tags = [ :subdomain, :uuid ]
# Use a different logger for distributed setups.
# config.logger = ActiveSupport::TaggedLogging.new(SyslogLogger.new)
# Use a different cache store in production.
# config.cache_store = :mem_cache_store
# Enable serving of images, stylesheets, and JavaScripts from an asset server.
# config.action_controller.asset_host = 'http://assets.example.com'
# Ignore bad email addresses and do not raise email delivery errors.
# Set this to true and configure the email server for immediate delivery to raise delivery errors.
# config.action_mailer.raise_delivery_errors = false
# Enable locale fallbacks for I18n (makes lookups for any locale fall back to
# the I18n.default_locale when a translation cannot be found).
config.i18n.fallbacks = true
# Send deprecation notices to registered listeners.
config.active_support.deprecation = :notify
# Use default logging formatter so that PID and timestamp are not suppressed.
config.log_formatter = ::Logger::Formatter.new
# Do not dump schema after migrations.
config.active_record.dump_schema_after_migration = false
end
add secret key to production.rb
Rails.application.configure do
# Settings specified here will take precedence over those in config/application.rb.
# Code is not reloaded between requests.
config.cache_classes = true
# Eager load code on boot. This eager loads most of Rails and
# your application in memory, allowing both threaded web servers
# and those relying on copy on write to perform better.
# Rake tasks automatically ignore this option for performance.
config.eager_load = true
# Full error reports are disabled and caching is turned on.
config.consider_all_requests_local = false
config.action_controller.perform_caching = true
# Enable Rack::Cache to put a simple HTTP cache in front of your application
# Add `rack-cache` to your Gemfile before enabling this.
# For large-scale production use, consider using a caching reverse proxy like
# NGINX, varnish or squid.
# config.action_dispatch.rack_cache = true
# Disable serving static files from the `/public` folder by default since
# Apache or NGINX already handles this.
config.serve_static_files = ENV['RAILS_SERVE_STATIC_FILES'].present?
# Compress JavaScripts and CSS.
config.assets.js_compressor = :uglifier
# config.assets.css_compressor = :sass
# Do not fallback to assets pipeline if a precompiled asset is missed.
config.assets.compile = false
# Asset digests allow you to set far-future HTTP expiration dates on all assets,
# yet still be able to expire them through the digest params.
config.assets.digest = true
# `config.assets.precompile` and `config.assets.version` have moved to config/initializers/assets.rb
# Specifies the header that your server uses for sending files.
# config.action_dispatch.x_sendfile_header = 'X-Sendfile' # for Apache
# config.action_dispatch.x_sendfile_header = 'X-Accel-Redirect' # for NGINX
# Force all access to the app over SSL, use Strict-Transport-Security, and use secure cookies.
# config.force_ssl = true
# Use the lowest log level to ensure availability of diagnostic information
# when problems arise.
config.log_level = :debug
# Prepend all log lines with the following tags.
# config.log_tags = [ :subdomain, :uuid ]
# Use a different logger for distributed setups.
# config.logger = ActiveSupport::TaggedLogging.new(SyslogLogger.new)
# Use a different cache store in production.
# config.cache_store = :mem_cache_store
# Enable serving of images, stylesheets, and JavaScripts from an asset server.
# config.action_controller.asset_host = 'http://assets.example.com'
# Ignore bad email addresses and do not raise email delivery errors.
# Set this to true and configure the email server for immediate delivery to raise delivery errors.
# config.action_mailer.raise_delivery_errors = false
# Enable locale fallbacks for I18n (makes lookups for any locale fall back to
# the I18n.default_locale when a translation cannot be found).
config.i18n.fallbacks = true
# Send deprecation notices to registered listeners.
config.active_support.deprecation = :notify
# Use default logging formatter so that PID and timestamp are not suppressed.
config.log_formatter = ::Logger::Formatter.new
# Do not dump schema after migrations.
config.active_record.dump_schema_after_migration = false
config.secret_key_base = ENV["SECRET_KEY_BASE"]
end
|
# Copyright 2010 The Googlyscript Authors
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
class Googly
VERSION = "0.0.7.dev"
#TODO Thinking about these...
# COMPILER_VERSION = ??
# LIBRARY_VERSION = ??
# TEMPLATES_VERSION = ??
end
version bump for gem release
# Copyright 2010 The Googlyscript Authors
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
class Googly
VERSION = "0.0.8.dev"
#TODO Thinking about these...
# COMPILER_VERSION = ??
# LIBRARY_VERSION = ??
# TEMPLATES_VERSION = ??
end
|
require 'spec_helper.rb'
describe 'jira' do
describe 'jira::service' do
context 'supported operating systems' do
on_supported_os.each do |os, facts|
context "on #{os}" do
let(:facts) do
facts.merge(os: { family: facts['osfamily'] })
end
let(:params) do
{ javahome: '/opt/java' }
end
let(:pre_condition) do
'package { "jdk": }'
end
if os == 'RedHat'
context 'default params' do
it { is_expected.to contain_service('jira') }
it { is_expected.to compile.with_all_deps }
it do
is_expected.to contain_file('/etc/init.d/jira').
with_content(%r{Short-Description: Start up JIRA}).
with_content(%r{lockfile=/var/lock/subsys/jira})
end
it do
is_expected.not_to contain_file('/usr/lib/systemd/system/jira.service').
with_content(%r{Atlassian Systemd Jira Service})
end
it { is_expected.not_to contain_exec('refresh_systemd') }
end
end
if os == 'Debian'
context 'lockfile on Debian' do
it { is_expected.to compile.with_all_deps }
it do
is_expected.to contain_file('/etc/init.d/jira').
with_content(%r{/var/lock/jira})
end
end
end
if os =~ %r{ubuntu}
context 'default params' do
it { is_expected.to compile.with_all_deps }
end
end
if os =~ %r{ubuntu-12}
context 'default params' do
let(:facts) do
facts.merge( operatingsystem: 'Ubuntu', operatingsystemmajrelease: '12.04')
end
it { is_expected.not_to contain_file('/lib/systemd/system/jira.service') }
end
end
if os =~ %r{ubuntu-14}
context 'default params' do
let(:facts) do
facts.merge( operatingsystem: 'Ubuntu', operatingsystemmajrelease: '14.04')
end
it { is_expected.not_to contain_file('/lib/systemd/system/jira.service') }
end
end
if os =~ %r{ubuntu-16}
context 'default params' do
let(:facts) do
facts.merge( operatingsystem: 'Ubuntu', operatingsystemmajrelease: '16.04')
end
it { is_expected.to contain_file('/lib/systemd/system/jira.service') }
end
end
context 'overwriting service_manage param' do
let(:params) do
super().merge(service_manage: false)
end
it { is_expected.not_to contain_service('jira') }
end
context 'overwriting service params' do
let(:params) do
super().merge(
service_ensure: 'stopped',
service_enable: false,
service_subscribe: 'Package[jdk]'
)
end
it do
is_expected.to contain_service('jira').with('ensure' => 'stopped',
'enable' => 'false',
'notify' => nil,
'subscribe' => 'Package[jdk]')
end
end
context 'RedHat/CentOS 7 systemd init script' do
let(:facts) do
{
osfamily: 'RedHat',
operatingsystemmajrelease: '7',
os: { family: 'RedHat' }
}
end
it do
is_expected.to contain_file('/usr/lib/systemd/system/jira.service').
with_content(%r{Atlassian Systemd Jira Service})
end
it { is_expected.to contain_exec('refresh_systemd') }
end
end
end
end
end
end
removed whitespace style errors
require 'spec_helper.rb'
describe 'jira' do
describe 'jira::service' do
context 'supported operating systems' do
on_supported_os.each do |os, facts|
context "on #{os}" do
let(:facts) do
facts.merge(os: { family: facts['osfamily'] })
end
let(:params) do
{ javahome: '/opt/java' }
end
let(:pre_condition) do
'package { "jdk": }'
end
if os == 'RedHat'
context 'default params' do
it { is_expected.to contain_service('jira') }
it { is_expected.to compile.with_all_deps }
it do
is_expected.to contain_file('/etc/init.d/jira').
with_content(%r{Short-Description: Start up JIRA}).
with_content(%r{lockfile=/var/lock/subsys/jira})
end
it do
is_expected.not_to contain_file('/usr/lib/systemd/system/jira.service').
with_content(%r{Atlassian Systemd Jira Service})
end
it { is_expected.not_to contain_exec('refresh_systemd') }
end
end
if os == 'Debian'
context 'lockfile on Debian' do
it { is_expected.to compile.with_all_deps }
it do
is_expected.to contain_file('/etc/init.d/jira').
with_content(%r{/var/lock/jira})
end
end
end
if os =~ %r{ubuntu}
context 'default params' do
it { is_expected.to compile.with_all_deps }
end
end
if os =~ %r{ubuntu-12}
context 'default params' do
let(:facts) do
facts.merge(operatingsystem: 'Ubuntu', operatingsystemmajrelease: '12.04')
end
it { is_expected.not_to contain_file('/lib/systemd/system/jira.service') }
end
end
if os =~ %r{ubuntu-14}
context 'default params' do
let(:facts) do
facts.merge(operatingsystem: 'Ubuntu', operatingsystemmajrelease: '14.04')
end
it { is_expected.not_to contain_file('/lib/systemd/system/jira.service') }
end
end
if os =~ %r{ubuntu-16}
context 'default params' do
let(:facts) do
facts.merge(operatingsystem: 'Ubuntu', operatingsystemmajrelease: '16.04')
end
it { is_expected.to contain_file('/lib/systemd/system/jira.service') }
end
end
context 'overwriting service_manage param' do
let(:params) do
super().merge(service_manage: false)
end
it { is_expected.not_to contain_service('jira') }
end
context 'overwriting service params' do
let(:params) do
super().merge(
service_ensure: 'stopped',
service_enable: false,
service_subscribe: 'Package[jdk]'
)
end
it do
is_expected.to contain_service('jira').with('ensure' => 'stopped',
'enable' => 'false',
'notify' => nil,
'subscribe' => 'Package[jdk]')
end
end
context 'RedHat/CentOS 7 systemd init script' do
let(:facts) do
{
osfamily: 'RedHat',
operatingsystemmajrelease: '7',
os: { family: 'RedHat' }
}
end
it do
is_expected.to contain_file('/usr/lib/systemd/system/jira.service').
with_content(%r{Atlassian Systemd Jira Service})
end
it { is_expected.to contain_exec('refresh_systemd') }
end
end
end
end
end
end
|
Rails.application,configure do
# Settings specified here will take precedence over those in config/environment.rb
# The production environment is meant for finished, "live" apps.
# Code is not reloaded between requests
config.cache_classes = true
# Full error reports are disabled and caching is turned on
# config.action_controller.consider_all_requests_local = false
config.action_controller.perform_caching = true
config.action_view.cache_template_loading = true
# See everything in the log (default is :info)
# config.log_level = :debug
# Use a different logger for distributed setups
# config.logger = SyslogLogger.new
# Use a different cache store in production
# config.cache_store = :mem_cache_store
# Enable serving of images, stylesheets, and javascripts from an asset server
# config.action_controller.asset_host = "http://assets.example.com"
# Disable delivery errors, bad email addresses will be ignored
# config.action_mailer.raise_delivery_errors = false
# Enable threaded mode
# config.threadsafe!
end
Added settings for the production.
Rails.application.configure do
# Settings specified here will take precedence over those in config/application.rb.
# Code is not reloaded between requests.
config.cache_classes = true
# Eager load code on boot. This eager loads most of Rails and
# your application in memory, allowing both threaded web servers
# and those relying on copy on write to perform better.
# Rake tasks automatically ignore this option for performance.
config.eager_load = true
# Full error reports are disabled and caching is turned on.
config.consider_all_requests_local = false
config.action_controller.perform_caching = true
# Enable Rack::Cache to put a simple HTTP cache in front of your application
# Add `rack-cache` to your Gemfile before enabling this.
# For large-scale production use, consider using a caching reverse proxy like nginx, varnish or squid.
# config.action_dispatch.rack_cache = true
# Disable Rails's static asset server (Apache or nginx will already do this).
config.serve_static_assets = false
# Compress JavaScripts and CSS.
config.assets.js_compressor = :uglifier
# config.assets.css_compressor = :sass
# Do not fallback to assets pipeline if a precompiled asset is missed.
config.assets.compile = true
# Generate digests for assets URLs.
config.assets.digest = true
# Version of your assets, change this if you want to expire all your assets.
config.assets.version = '1.0'
# Specifies the header that your server uses for sending files.
# config.action_dispatch.x_sendfile_header = "X-Sendfile" # for apache
# config.action_dispatch.x_sendfile_header = 'X-Accel-Redirect' # for nginx
# Force all access to the app over SSL, use Strict-Transport-Security, and use secure cookies.
# config.force_ssl = true
# Set to :debug to see everything in the log.
config.log_level = :info
# Prepend all log lines with the following tags.
# config.log_tags = [ :subdomain, :uuid ]
# Use a different logger for distributed setups.
# config.logger = ActiveSupport::TaggedLogging.new(SyslogLogger.new)
# Use a different cache store in production.
# config.cache_store = :mem_cache_store
# Enable serving of images, stylesheets, and JavaScripts from an asset server.
# config.action_controller.asset_host = "http://assets.example.com"
# Precompile additional assets.
# application.js, application.css, and all non-JS/CSS in app/assets folder are already added.
# config.assets.precompile += %w( search.js )
# config.assets.precompile = ['*.js', '*.css', '*.css.erb']
# Ignore bad email addresses and do not raise email delivery errors.
# Set this to true and configure the email server for immediate delivery to raise delivery errors.
# config.action_mailer.raise_delivery_errors = false
# Enable locale fallbacks for I18n (makes lookups for any locale fall back to
# the I18n.default_locale when a translation cannot be found).
config.i18n.fallbacks = true
# Send deprecation notices to registered listeners.
config.active_support.deprecation = :notify
config.action_mailer.smtp_settings = {
address: "smtp.gmail.com",
port: 587,
domain: Rails.application.secrets.domain_name,
authentication: "plain",
enable_starttls_auto: true,
user_name: Rails.application.secrets.email_provider_username,
password: Rails.application.secrets.email_provider_password
}
# ActionMailer Config
config.action_mailer.default_url_options = { :host => Rails.application.secrets.domain_name }
config.action_mailer.delivery_method = :smtp
config.action_mailer.perform_deliveries = true
config.action_mailer.raise_delivery_errors = false
# Disable automatic flushing of the log to improve performance.
# config.autoflush_log = false
# Use default logging formatter so that PID and timestamp are not suppressed.
config.log_formatter = ::Logger::Formatter.new
# Do not dump schema after migrations.
config.active_record.dump_schema_after_migration = false
end
|
module Gordon
VERSION = "0.0.2"
end
Bump to 0.0.3.
module Gordon
VERSION = "0.0.3"
end
|
require 'spec_helper'
module Qrb
describe TypeFactory, "Factory#tuple" do
let(:factory){ TypeFactory.new }
let(:expected){ factory.tuple(r: Integer) }
context 'when use with {r: Integer}' do
subject{ factory.type(r: Integer) }
it{ should eq(expected) }
end
context 'when use with {r: Integer} and a name' do
subject{ factory.type({r: Integer}, "MyTuple") }
it{ should eq(expected) }
it 'should have the correct name' do
subject.name.should eq("MyTuple")
end
end
end
end
Make sure Range can be used for tuple attribute types.
require 'spec_helper'
module Qrb
describe TypeFactory, "Factory#tuple" do
let(:factory){ TypeFactory.new }
let(:expected){ factory.tuple(r: Integer) }
context 'when use with {r: Integer}' do
subject{ factory.type(r: Integer) }
it{ should eq(expected) }
end
context 'when use with {r: Integer} and a name' do
subject{ factory.type({r: Integer}, "MyTuple") }
it{ should eq(expected) }
it 'should have the correct name' do
subject.name.should eq("MyTuple")
end
end
context 'when use with {r: 0..255} and a name' do
subject{ factory.type({r: 0..255}, "MyTuple") }
it{ should be_a(TupleType) }
it 'should have the correct constraint on r' do
subject.up(r: 36)
->{
subject.up(r: 543)
}.should raise_error(UpError)
end
it 'should have the correct name' do
subject.name.should eq("MyTuple")
end
end
end
end
|
Rails.application.configure do
# Settings specified here will take precedence over those in config/application.rb.
# Code is not reloaded between requests.
config.cache_classes = true
# Eager load code on boot. This eager loads most of Rails and
# your application in memory, allowing both threaded web servers
# and those relying on copy on write to perform better.
# Rake tasks automatically ignore this option for performance.
config.eager_load = true
# Full error reports are disabled and caching is turned on.
config.consider_all_requests_local = false
config.action_controller.perform_caching = true
# Enable Rack::Cache to put a simple HTTP cache in front of your application
# Add `rack-cache` to your Gemfile before enabling this.
# For large-scale production use, consider using a caching reverse proxy like
# NGINX, varnish or squid.
# config.action_dispatch.rack_cache = true
# Disable serving static files from the `/public` folder by default since
# Apache or NGINX already handles this.
config.serve_static_files = ENV['RAILS_SERVE_STATIC_FILES'].present?
# Compress JavaScripts and CSS.
config.assets.js_compressor = :uglifier
# config.assets.css_compressor = :sass
# Do not fallback to assets pipeline if a precompiled asset is missed.
config.assets.compile = false
# Asset digests allow you to set far-future HTTP expiration dates on all assets,
# yet still be able to expire them through the digest params.
config.assets.digest = true
# `config.assets.precompile` and `config.assets.version` have moved to config/initializers/assets.rb
# Specifies the header that your server uses for sending files.
# config.action_dispatch.x_sendfile_header = 'X-Sendfile' # for Apache
# config.action_dispatch.x_sendfile_header = 'X-Accel-Redirect' # for NGINX
# Force all access to the app over SSL, use Strict-Transport-Security, and use secure cookies.
# config.force_ssl = true
# Use the lowest log level to ensure availability of diagnostic information
# when problems arise.
config.log_level = :debug
# Prepend all log lines with the following tags.
# config.log_tags = [ :subdomain, :uuid ]
# Use a different logger for distributed setups.
# config.logger = ActiveSupport::TaggedLogging.new(SyslogLogger.new)
# Use a different cache store in production.
# config.cache_store = :mem_cache_store
# Enable serving of images, stylesheets, and JavaScripts from an asset server.
# config.action_controller.asset_host = 'http://assets.example.com'
# Ignore bad email addresses and do not raise email delivery errors.
# Set this to true and configure the email server for immediate delivery to raise delivery errors.
# config.action_mailer.raise_delivery_errors = false
# Enable locale fallbacks for I18n (makes lookups for any locale fall back to
# the I18n.default_locale when a translation cannot be found).
config.i18n.fallbacks = true
# Send deprecation notices to registered listeners.
config.active_support.deprecation = :notify
# Use default logging formatter so that PID and timestamp are not suppressed.
config.log_formatter = ::Logger::Formatter.new
# Do not dump schema after migrations.
config.active_record.dump_schema_after_migration = false
config.action_mailer.delivery_method = :smtp
config.action_mailer.smtp_settings = {
address: ENV['SMTP_MAIL_ADDR'],
port: ENV['SMTP_MAIL_PORT'],
domain: ENV['SMTP_MAIL_DOMAIN'],
user_name: ENV['SMTP_MAIL_USER'],
password: ENV['SMTP_MAIL_PASS'],
authentication: 'plain'
enable_starttls_auto: true }
end
Adds missing comma.
Rails.application.configure do
# Settings specified here will take precedence over those in config/application.rb.
# Code is not reloaded between requests.
config.cache_classes = true
# Eager load code on boot. This eager loads most of Rails and
# your application in memory, allowing both threaded web servers
# and those relying on copy on write to perform better.
# Rake tasks automatically ignore this option for performance.
config.eager_load = true
# Full error reports are disabled and caching is turned on.
config.consider_all_requests_local = false
config.action_controller.perform_caching = true
# Enable Rack::Cache to put a simple HTTP cache in front of your application
# Add `rack-cache` to your Gemfile before enabling this.
# For large-scale production use, consider using a caching reverse proxy like
# NGINX, varnish or squid.
# config.action_dispatch.rack_cache = true
# Disable serving static files from the `/public` folder by default since
# Apache or NGINX already handles this.
config.serve_static_files = ENV['RAILS_SERVE_STATIC_FILES'].present?
# Compress JavaScripts and CSS.
config.assets.js_compressor = :uglifier
# config.assets.css_compressor = :sass
# Do not fallback to assets pipeline if a precompiled asset is missed.
config.assets.compile = false
# Asset digests allow you to set far-future HTTP expiration dates on all assets,
# yet still be able to expire them through the digest params.
config.assets.digest = true
# `config.assets.precompile` and `config.assets.version` have moved to config/initializers/assets.rb
# Specifies the header that your server uses for sending files.
# config.action_dispatch.x_sendfile_header = 'X-Sendfile' # for Apache
# config.action_dispatch.x_sendfile_header = 'X-Accel-Redirect' # for NGINX
# Force all access to the app over SSL, use Strict-Transport-Security, and use secure cookies.
# config.force_ssl = true
# Use the lowest log level to ensure availability of diagnostic information
# when problems arise.
config.log_level = :debug
# Prepend all log lines with the following tags.
# config.log_tags = [ :subdomain, :uuid ]
# Use a different logger for distributed setups.
# config.logger = ActiveSupport::TaggedLogging.new(SyslogLogger.new)
# Use a different cache store in production.
# config.cache_store = :mem_cache_store
# Enable serving of images, stylesheets, and JavaScripts from an asset server.
# config.action_controller.asset_host = 'http://assets.example.com'
# Ignore bad email addresses and do not raise email delivery errors.
# Set this to true and configure the email server for immediate delivery to raise delivery errors.
# config.action_mailer.raise_delivery_errors = false
# Enable locale fallbacks for I18n (makes lookups for any locale fall back to
# the I18n.default_locale when a translation cannot be found).
config.i18n.fallbacks = true
# Send deprecation notices to registered listeners.
config.active_support.deprecation = :notify
# Use default logging formatter so that PID and timestamp are not suppressed.
config.log_formatter = ::Logger::Formatter.new
# Do not dump schema after migrations.
config.active_record.dump_schema_after_migration = false
config.action_mailer.delivery_method = :smtp
config.action_mailer.smtp_settings = {
address: ENV['SMTP_MAIL_ADDR'],
port: ENV['SMTP_MAIL_PORT'],
domain: ENV['SMTP_MAIL_DOMAIN'],
user_name: ENV['SMTP_MAIL_USER'],
password: ENV['SMTP_MAIL_PASS'],
authentication: 'plain',
enable_starttls_auto: true }
end
|
module Gordon
VERSION = "0.0.8"
end
Bumped to 0.0.10.
module Gordon
VERSION = "0.0.10"
end
|
require 'rails_helper'
RSpec.describe ArrayInclusionValidator do
let(:klass) {
Class.new do
include Virtus.model
include ActiveModel::Validations
attribute :wotsit, Array[String]
validates :wotsit,
array_inclusion: { in: ['OK', 'Also fine'] }
end
}
subject { klass.new }
it 'passes validation if the attribute is in the acceptable list' do
subject.wotsit = 'OK'
expect(subject).to be_valid
end
it 'fails validation if the attribute is missing' do
subject.wotsit = nil
expect(subject).to be_valid
end
it 'fails validation if the attribute is not in the acceptable list' do
subject.wotsit = 'something else'
expect(subject).not_to be_valid
end
end
Correct specs for array inclusion validator
require 'rails_helper'
RSpec.describe ArrayInclusionValidator do
let(:klass) {
Class.new do
include Virtus.model
include ActiveModel::Validations
attribute :wotsit, Array[String]
validates :wotsit,
array_inclusion: { in: ['OK', 'Also fine'] }
end
}
subject { klass.new }
it 'passes validation if all attributes are in the acceptable list' do
subject.wotsit = %w[ OK ]
expect(subject).to be_valid
end
it 'fails validation if an attribute is not in the acceptable list' do
subject.wotsit = %w[ OK bad ]
expect(subject).not_to be_valid
end
end
|
require File.expand_path(File.dirname(__FILE__) + '/../spec_helper')
describe Constellation::Config do
before(:each) do
@config = Constellation::Config.new
end
describe "#watch" do
before(:each) do
@file_name = "LogFile.txt"
FileHelpers::create_file(@file_name)
end
after(:each) do
FileHelpers::destroy_file(@file_name)
end
context "given a file, that does exist" do
it "should add the file to the list of watched files" do
@config.watch(@file_name)
@config.instance_variable_get("@watched_files").should include(@file_name)
end
end
context "given a file, that has added twice to the watched files list" do
it "should raise an error" do
lambda {
@config.watch(@file_name)
@config.watch(@file_name)
}.should raise_exception
end
end
context "given a file, that does not exist" do
it "should raise an error" do
lambda {
@config.watch("DummyLogFile.txt")
}.should raise_exception
end
end
end
describe "#freeze!" do
it "should initialize a new DataStore object"
end
describe "#data_store" do
describe "#adapter=" do
it "should set the used data_store adapter" do
@config.data_store.adapter = :cassandra
@data_store = @config.instance_variable_get("@data_store")
@data_store[:adapter].should eql("cassandra")
end
end
describe "#host=" do
it "should set the used data_store host" do
@config.data_store.host = :localhost
@data_store = @config.instance_variable_get("@data_store")
@data_store[:host].should eql("localhost")
end
end
describe "#namespace=" do
it "should set the used data_store namespace" do
@config.data_store.namespace = :constellation
@data_store = @config.instance_variable_get("@data_store")
@data_store[:namespace].should eql("constellation")
end
end
describe "#user=" do
it "should set the used data_store user" do
@config.data_store.username = "admin"
@data_store = @config.instance_variable_get("@data_store")
@data_store[:username].should eql("admin")
end
end
describe "#password=" do
it "should set the used data_store password" do
@config.data_store.password = "secret"
@data_store = @config.instance_variable_get("@data_store")
@data_store[:password].should eql("secret")
end
end
end
end
Spec freezing the config
require File.expand_path(File.dirname(__FILE__) + '/../spec_helper')
describe Constellation::Config do
before(:each) do
@config = Constellation::Config.new
end
describe "#watch" do
before(:each) do
@file_name = "LogFile.txt"
FileHelpers::create_file(@file_name)
end
after(:each) do
FileHelpers::destroy_file(@file_name)
end
context "given a file, that does exist" do
it "should add the file to the list of watched files" do
@config.watch(@file_name)
@config.instance_variable_get("@watched_files").should include(@file_name)
end
end
context "given a file, that has added twice to the watched files list" do
it "should raise an error" do
lambda {
@config.watch(@file_name)
@config.watch(@file_name)
}.should raise_exception
end
end
context "given a file, that does not exist" do
it "should raise an error" do
lambda {
@config.watch("DummyLogFile.txt")
}.should raise_exception
end
end
end
describe "#freeze!" do
it "should initialize a new DataStore object" do
@config.data_store.adapter = :cassandra
@data_store_mock = mock(Constellation::DataStores::Cassandra)
@data_store_mock.stub!(:host=)
@data_store_mock.stub!(:username=)
@data_store_mock.stub!(:password=)
@data_store_mock.stub!(:adapter=)
@data_store_mock.stub!(:namespace=)
Constellation::DataStores::Cassandra.should_receive(:new).and_return(@data_store_mock)
@config.freeze!
end
end
describe "#data_store" do
describe "#adapter=" do
it "should set the used data_store adapter" do
@config.data_store.adapter = :cassandra
@data_store = @config.instance_variable_get("@data_store")
@data_store[:adapter].should eql("cassandra")
end
end
describe "#host=" do
it "should set the used data_store host" do
@config.data_store.host = :localhost
@data_store = @config.instance_variable_get("@data_store")
@data_store[:host].should eql("localhost")
end
end
describe "#namespace=" do
it "should set the used data_store namespace" do
@config.data_store.namespace = :constellation
@data_store = @config.instance_variable_get("@data_store")
@data_store[:namespace].should eql("constellation")
end
end
describe "#user=" do
it "should set the used data_store user" do
@config.data_store.username = "admin"
@data_store = @config.instance_variable_get("@data_store")
@data_store[:username].should eql("admin")
end
end
describe "#password=" do
it "should set the used data_store password" do
@config.data_store.password = "secret"
@data_store = @config.instance_variable_get("@data_store")
@data_store[:password].should eql("secret")
end
end
end
end |
require File.expand_path(File.dirname(__FILE__) + '/../spec_helper')
describe "Dynamoid::Persistence" do
let(:document_class) { Class.new.send :include, Dynamoid::Document }
before do
Random.stubs(:rand).with(Dynamoid::Config.partition_size).returns(0)
@address = Address.new
end
context 'without AWS keys' do
unless ENV['ACCESS_KEY'] && ENV['SECRET_KEY']
before do
Dynamoid::Adapter.delete_table(Address.table_name) if Dynamoid::Adapter.list_tables.include?(Address.table_name)
end
it 'creates a table' do
Address.create_table(Address.table_name)
Dynamoid::Adapter.list_tables.should include 'dynamoid_tests_addresses'
end
it 'checks if a table already exists' do
Address.create_table(Address.table_name)
Address.table_exists?(Address.table_name).should be_true
Address.table_exists?('crazytable').should be_false
end
end
end
it 'assigns itself an id on save' do
@address.save
Dynamoid::Adapter.read("dynamoid_tests_addresses", @address.id)[:id].should == @address.id
end
it 'assigns itself an id on save only if it does not have one' do
@address.id = 'test123'
@address.save
Dynamoid::Adapter.read("dynamoid_tests_addresses", 'test123').should_not be_empty
end
it 'has a table name' do
Address.table_name.should == 'dynamoid_tests_addresses'
end
it 'saves indexes along with itself' do
@user = User.new(:name => 'Josh')
@user.expects(:save_indexes).once.returns(true)
@user.save
end
it 'deletes an item completely' do
@user = User.create(:name => 'Josh')
@user.destroy
Dynamoid::Adapter.read("dynamoid_tests_users", @user.id).should be_nil
end
it 'keeps string attributes as strings' do
@user = User.new(:name => 'Josh')
@user.send(:dump)[:name].should == 'Josh'
end
it 'dumps datetime attributes' do
@user = User.create(:name => 'Josh')
@user.send(:dump)[:name].should == 'Josh'
end
it 'dumps integer attributes' do
@subscription = Subscription.create(:length => 10)
@subscription.send(:dump)[:length].should == 10
end
it 'dumps set attributes' do
@subscription = Subscription.create(:length => 10)
@magazine = @subscription.magazine.create
@subscription.send(:dump)[:magazine_ids].should == Set[@magazine.id]
end
it 'handles nil attributes properly' do
Address.undump(nil).should be_a(Hash)
end
it 'dumps and undump a serialized field' do
@address.options = (hash = {:x => [1, 2], "foobar" => 3.14})
Address.undump(@address.send(:dump))[:options].should == hash
end
it 'loads a hash into a serialized field' do
hash = {foo: :bar}
Address.new(options: hash).options.should == hash
end
it 'loads attributes from a hash' do
@time = DateTime.now
@hash = {:name => 'Josh', :created_at => @time.to_f}
User.undump(@hash)[:name].should == 'Josh'
User.undump(@hash)[:created_at].to_f == @time.to_f
end
it 'runs the before_create callback only once' do
document_class.before_create { doing_before_create }
document_class.any_instance.expects(:doing_before_create)
document_class.create
end
it 'runs after save callbacks when doing #create' do
document_class.after_create { doing_after_create }
document_class.any_instance.expects(:doing_after_create)
document_class.create
end
it 'runs after save callbacks when doing #save' do
document_class.after_create { doing_after_create }
document_class.any_instance.expects(:doing_after_create)
document_class.new.save
end
it 'tracks previous changes on save or update' do
@address.city = 'Chicago'
@address.save
@address.city = 'San Francisco'
@address.save
@address.city_was.should == 'Chicago'
end
it 'works with a HashWithIndifferentAccess' do
hash = ActiveSupport::HashWithIndifferentAccess.new("city" => "Atlanta")
lambda {Address.create(hash)}.should_not raise_error
end
it 'works with a HashWithIndifferentAccess' do
hash = ActiveSupport::HashWithIndifferentAccess.new("test" => "hi", "hello" => "there")
lambda {Address.create(hash)}.should_not raise_error
end
end
remove bad test
require File.expand_path(File.dirname(__FILE__) + '/../spec_helper')
describe "Dynamoid::Persistence" do
let(:document_class) { Class.new.send :include, Dynamoid::Document }
before do
Random.stubs(:rand).with(Dynamoid::Config.partition_size).returns(0)
@address = Address.new
end
context 'without AWS keys' do
unless ENV['ACCESS_KEY'] && ENV['SECRET_KEY']
before do
Dynamoid::Adapter.delete_table(Address.table_name) if Dynamoid::Adapter.list_tables.include?(Address.table_name)
end
it 'creates a table' do
Address.create_table(Address.table_name)
Dynamoid::Adapter.list_tables.should include 'dynamoid_tests_addresses'
end
it 'checks if a table already exists' do
Address.create_table(Address.table_name)
Address.table_exists?(Address.table_name).should be_true
Address.table_exists?('crazytable').should be_false
end
end
end
it 'assigns itself an id on save' do
@address.save
Dynamoid::Adapter.read("dynamoid_tests_addresses", @address.id)[:id].should == @address.id
end
it 'assigns itself an id on save only if it does not have one' do
@address.id = 'test123'
@address.save
Dynamoid::Adapter.read("dynamoid_tests_addresses", 'test123').should_not be_empty
end
it 'has a table name' do
Address.table_name.should == 'dynamoid_tests_addresses'
end
it 'saves indexes along with itself' do
@user = User.new(:name => 'Josh')
@user.expects(:save_indexes).once.returns(true)
@user.save
end
it 'deletes an item completely' do
@user = User.create(:name => 'Josh')
@user.destroy
Dynamoid::Adapter.read("dynamoid_tests_users", @user.id).should be_nil
end
it 'keeps string attributes as strings' do
@user = User.new(:name => 'Josh')
@user.send(:dump)[:name].should == 'Josh'
end
it 'dumps datetime attributes' do
@user = User.create(:name => 'Josh')
@user.send(:dump)[:name].should == 'Josh'
end
it 'dumps integer attributes' do
@subscription = Subscription.create(:length => 10)
@subscription.send(:dump)[:length].should == 10
end
it 'dumps set attributes' do
@subscription = Subscription.create(:length => 10)
@magazine = @subscription.magazine.create
@subscription.send(:dump)[:magazine_ids].should == Set[@magazine.id]
end
it 'handles nil attributes properly' do
Address.undump(nil).should be_a(Hash)
end
it 'dumps and undump a serialized field' do
@address.options = (hash = {:x => [1, 2], "foobar" => 3.14})
Address.undump(@address.send(:dump))[:options].should == hash
end
it 'loads a hash into a serialized field' do
hash = {foo: :bar}
Address.new(options: hash).options.should == hash
end
it 'loads attributes from a hash' do
@time = DateTime.now
@hash = {:name => 'Josh', :created_at => @time.to_f}
User.undump(@hash)[:name].should == 'Josh'
User.undump(@hash)[:created_at].to_f == @time.to_f
end
it 'runs the before_create callback only once' do
document_class.before_create { doing_before_create }
document_class.any_instance.expects(:doing_before_create)
document_class.create
end
it 'runs after save callbacks when doing #create' do
document_class.after_create { doing_after_create }
document_class.any_instance.expects(:doing_after_create)
document_class.create
end
it 'runs after save callbacks when doing #save' do
document_class.after_create { doing_after_create }
document_class.any_instance.expects(:doing_after_create)
document_class.new.save
end
it 'tracks previous changes on save or update' do
@address.city = 'Chicago'
@address.save
@address.city = 'San Francisco'
@address.save
@address.city_was.should == 'Chicago'
end
it 'works with a HashWithIndifferentAccess' do
hash = ActiveSupport::HashWithIndifferentAccess.new("city" => "Atlanta")
lambda {Address.create(hash)}.should_not raise_error
end
end
|
Rails.application.configure do
# Settings specified here will take precedence over those in config/application.rb.
# Code is not reloaded between requests.
config.cache_classes = true
# Eager load code on boot. This eager loads most of Rails and
# your application in memory, allowing both threaded web servers
# and those relying on copy on write to perform better.
# Rake tasks automatically ignore this option for performance.
config.eager_load = true
# Full error reports are disabled and caching is turned on.
config.consider_all_requests_local = false
config.action_controller.perform_caching = true
# Enable Rack::Cache to put a simple HTTP cache in front of your application
# Add `rack-cache` to your Gemfile before enabling this.
# For large-scale production use, consider using a caching reverse proxy like
# NGINX, varnish or squid.
# config.action_dispatch.rack_cache = true
# Disable serving static files from the `/public` folder by default since
# Apache or NGINX already handles this.
config.serve_static_files = ENV['RAILS_SERVE_STATIC_FILES'].present?
# Compress JavaScripts and CSS.
config.assets.js_compressor = :uglifier
# config.assets.css_compressor = :sass
# Do not fallback to assets pipeline if a precompiled asset is missed.
config.assets.compile = true
# Asset digests allow you to set far-future HTTP expiration dates on all assets,
# yet still be able to expire them through the digest params.
config.assets.digest = true
# `config.assets.precompile` and `config.assets.version` have moved to config/initializers/assets.rb
# Specifies the header that your server uses for sending files.
# config.action_dispatch.x_sendfile_header = 'X-Sendfile' # for Apache
# config.action_dispatch.x_sendfile_header = 'X-Accel-Redirect' # for NGINX
# Force all access to the app over SSL, use Strict-Transport-Security, and use secure cookies.
# config.force_ssl = true
# Use the lowest log level to ensure availability of diagnostic information
# when problems arise.
config.log_level = :debug
# Prepend all log lines with the following tags.
# config.log_tags = [ :subdomain, :uuid ]
# Use a different logger for distributed setups.
# config.logger = ActiveSupport::TaggedLogging.new(SyslogLogger.new)
# Use a different cache store in production.
# config.cache_store = :mem_cache_store
# Enable serving of images, stylesheets, and JavaScripts from an asset server.
# config.action_controller.asset_host = 'http://assets.example.com'
# Ignore bad email addresses and do not raise email delivery errors.
# Set this to true and configure the email server for immediate delivery to raise delivery errors.
# config.action_mailer.raise_delivery_errors = false
# Enable locale fallbacks for I18n (makes lookups for any locale fall back to
# the I18n.default_locale when a translation cannot be found).
config.i18n.fallbacks = true
# Send deprecation notices to registered listeners.
config.active_support.deprecation = :notify
# Use default logging formatter so that PID and timestamp are not suppressed.
config.log_formatter = ::Logger::Formatter.new
# Do not dump schema after migrations.
config.active_record.dump_schema_after_migration = false
Elasticsearch::Model.client = Elasticsearch::Client.new host: ENV['SEARCHBOX_URL']
end
prod fix, mark 2
Rails.application.configure do
# Settings specified here will take precedence over those in config/application.rb.
# Code is not reloaded between requests.
config.cache_classes = true
# Eager load code on boot. This eager loads most of Rails and
# your application in memory, allowing both threaded web servers
# and those relying on copy on write to perform better.
# Rake tasks automatically ignore this option for performance.
config.eager_load = true
# Full error reports are disabled and caching is turned on.
config.consider_all_requests_local = false
config.action_controller.perform_caching = true
# Enable Rack::Cache to put a simple HTTP cache in front of your application
# Add `rack-cache` to your Gemfile before enabling this.
# For large-scale production use, consider using a caching reverse proxy like
# NGINX, varnish or squid.
# config.action_dispatch.rack_cache = true
# Disable serving static files from the `/public` folder by default since
# Apache or NGINX already handles this.
config.serve_static_files = ENV['RAILS_SERVE_STATIC_FILES'].present?
# Compress JavaScripts and CSS.
config.assets.js_compressor = :uglifier
# config.assets.css_compressor = :sass
# Do not fallback to assets pipeline if a precompiled asset is missed.
config.assets.compile = true
# Asset digests allow you to set far-future HTTP expiration dates on all assets,
# yet still be able to expire them through the digest params.
config.assets.digest = true
# `config.assets.precompile` and `config.assets.version` have moved to config/initializers/assets.rb
# Specifies the header that your server uses for sending files.
# config.action_dispatch.x_sendfile_header = 'X-Sendfile' # for Apache
# config.action_dispatch.x_sendfile_header = 'X-Accel-Redirect' # for NGINX
# Force all access to the app over SSL, use Strict-Transport-Security, and use secure cookies.
# config.force_ssl = true
# Use the lowest log level to ensure availability of diagnostic information
# when problems arise.
config.log_level = :debug
# Prepend all log lines with the following tags.
# config.log_tags = [ :subdomain, :uuid ]
# Use a different logger for distributed setups.
# config.logger = ActiveSupport::TaggedLogging.new(SyslogLogger.new)
# Use a different cache store in production.
# config.cache_store = :mem_cache_store
# Enable serving of images, stylesheets, and JavaScripts from an asset server.
# config.action_controller.asset_host = 'http://assets.example.com'
# Ignore bad email addresses and do not raise email delivery errors.
# Set this to true and configure the email server for immediate delivery to raise delivery errors.
# config.action_mailer.raise_delivery_errors = false
# Enable locale fallbacks for I18n (makes lookups for any locale fall back to
# the I18n.default_locale when a translation cannot be found).
config.i18n.fallbacks = true
# Send deprecation notices to registered listeners.
config.active_support.deprecation = :notify
# Use default logging formatter so that PID and timestamp are not suppressed.
config.log_formatter = ::Logger::Formatter.new
# Do not dump schema after migrations.
config.active_record.dump_schema_after_migration = false
Elasticsearch::Model.client = Elasticsearch::Client.new host: ENV['SEARCHBOX_URL']
end
|
RSpec.describe 'metasploit-cache', :content do
def db_content_load
db_content_purge
db_content_load_schema
expect(Metasploit::Cache::Module::Class::Name.count).to eq(0)
end
def db_content_load_schema
begin
ActiveRecord::Base.establish_connection(ActiveRecord::Base.configurations.fetch('content'))
ActiveRecord::Schema.verbose = false
db_schema_load
ensure
ActiveRecord::Base.establish_connection(ActiveRecord::Base.configurations.fetch('test'))
end
end
def db_content_purge
ActiveRecord::Tasks::DatabaseTasks.root = Metasploit::Cache::Engine.root
ActiveRecord::Tasks::DatabaseTasks.purge ActiveRecord::Base.configurations.fetch('content')
end
def db_schema_load
file = File.join('spec', 'dummy', 'db', 'schema.rb')
if File.exist?(file)
load(file)
else
abort %{#{file} doesn't exist yet. Run `rake db:migrate` to create it, then try again. If you do not intend to use a database, you should instead alter #{Rails.root}/config/application.rb to limit the frameworks that will be loaded.}
end
end
#
# Callbacks
#
before(:all) do
db_content_load
metasploit_framework_root = Metasploit::Framework::Engine.root
require 'childprocess'
metasploit_cache_load = ChildProcess.build(
'bundle',
'exec',
'metasploit-cache',
'load',
metasploit_framework_root.join('modules').to_path,
'--database-yaml', 'config/database.yml',
'--environment', 'content',
'--include', metasploit_framework_root.to_path,
metasploit_framework_root.join('app', 'validators').to_path,
'--require', 'metasploit/framework',
'metasploit/framework/executable_path_validator',
'metasploit/framework/file_path_validator',
'--gem', 'metasploit-framewor',
'--logger-severity', 'ERROR',
'--name', 'modules',
'--only-type-directories', 'encoders', 'nops'
)
@metasploit_cache_load_out = Tempfile.new(['metasploit-cache-load', '.log'])
@metasploit_cache_load_out.sync = true
metasploit_cache_load.cwd = Metasploit::Cache::Engine.root.join('spec', 'dummy').to_path
metasploit_cache_load.io.stdout = @metasploit_cache_load_out
require 'benchmark'
Benchmark.bm do |report|
report.report do
metasploit_cache_load.start
metasploit_cache_load.poll_for_exit(5.minutes)
end
end
expect(metasploit_cache_load.exit_code).to eq(0),
->(){
@metasploit_cache_load_out.rewind
@metasploit_cache_load_out.read
}
end
after(:all) {
@metasploit_cache_load_out.rewind
puts @metasploit_cache_load_out.read
# close and delete
@metasploit_cache_load_out.close!
}
# :nocov:
# Can't just use the tag on the context because the below code will still run even if tag is filtered out
unless Bundler.settings.without.include? 'content'
context 'metasploit-framework', :content do
module_path_real_paths = Metasploit::Framework::Engine.paths['modules'].existent_directories
module_path_real_paths.each do |module_path_real_path|
module_path_real_pathname = Pathname.new(module_path_real_path)
module_path_relative_pathname = module_path_real_pathname.relative_path_from(
Metasploit::Framework::Engine.root
)
# use relative pathname so that context name is not dependent on build directory
context module_path_relative_pathname.to_s do
#
# Shared Examples
#
shared_examples_for 'can use full names' do |module_type, skip: nil|
context_options = {}
if skip
context_options = {
skip: skip
}
end
context module_type, context_options do
type_directory = Metasploit::Cache::Module::Ancestor::DIRECTORY_BY_MODULE_TYPE.fetch(module_type)
real_type_pathname = module_path_real_pathname.join(type_directory)
rule = File::Find.new(
ftype: 'file',
pattern: "*#{Metasploit::Cache::Module::Ancestor::EXTENSION}",
path: real_type_pathname.to_path
)
rule.find do |real_path|
real_pathname = Pathname.new(real_path)
reference_path = real_pathname.relative_path_from(real_type_pathname).to_s
reference_name = reference_path.chomp(File.extname(reference_path))
context reference_name do
full_name = "#{module_type}/#{reference_name}"
it "can be `use`d" do
metasploit_framework_root = Metasploit::Framework::Engine.root
metasploit_cache_use = ChildProcess.build(
'bundle',
'exec',
'metasploit-cache',
'use',
full_name,
'--database-yaml', 'config/database.yml',
'--environment', 'content',
'--include', metasploit_framework_root.to_path,
metasploit_framework_root.join('app', 'validators').to_path,
'--require', 'metasploit/framework',
'metasploit/framework/executable_path_validator',
'metasploit/framework/file_path_validator',
'--logger-severity', 'ERROR'
)
metasploit_cache_use_out = Tempfile.new(['metasploit-cache-use', '.log'])
metasploit_cache_use_out.sync = true
metasploit_cache_use.cwd = Metasploit::Cache::Engine.root.join('spec', 'dummy').to_path
metasploit_cache_use.io.stdout = metasploit_cache_use_out
metasploit_cache_use.start
metasploit_cache_use.wait
expect(metasploit_cache_use.exit_code).to eq(0), ->(){
metasploit_cache_use_out.rewind
"metasploit-cache use #{full_name} exited with non-zero status:\n#{metasploit_cache_use_out.read}"
}
end
end
end
end
end
include_examples 'can use full names',
'auxiliary',
skip: "require 'metasploit/framework' takes ~3 seconds, so each test is ~5 seconds, which " \
'means testing all auxiliary Metasploit Modules would take > 1 hour. Skip until ' \
'metasploit-framework loads faster. Add auxiliary to --only-type-directories when ' \
'removing this skip.'
include_examples 'can use full names', 'encoder'
include_examples 'can use full names',
'exploit',
skip: "require 'metasploit/framework' takes ~3 seconds, so each test is ~5 seconds, which " \
'means testing all exploit Metasploit Modules would take > 3 hours. Skip until ' \
'metasploit-framework loads faster. Add exploits to --only-type-directories when ' \
'removing this skip.'
include_examples 'can use full names', 'nop'
include_examples 'can use full names',
'post',
skip: "require 'metasploit/framework' takes ~3 seconds, so each test is ~5 seconds, which" \
'means testing all posts takes > 8 minutes. Skip until metasploit-framework loads ' \
'faster. Add post to --only-type-directories when removing this skip.'
end
end
end
end
end
Run 5 random load tests for each module type
MSP-12460
Most module types have too many modules to test in a reasonable amount
of time. Instead of disabling the long running module type contexts,
run 5 of each type.
RSpec.describe 'metasploit-cache', :content do
def db_content_load
db_content_purge
db_content_load_schema
expect(Metasploit::Cache::Module::Class::Name.count).to eq(0)
end
def db_content_load_schema
begin
ActiveRecord::Base.establish_connection(ActiveRecord::Base.configurations.fetch('content'))
ActiveRecord::Schema.verbose = false
db_schema_load
ensure
ActiveRecord::Base.establish_connection(ActiveRecord::Base.configurations.fetch('test'))
end
end
def db_content_purge
ActiveRecord::Tasks::DatabaseTasks.root = Metasploit::Cache::Engine.root
ActiveRecord::Tasks::DatabaseTasks.purge ActiveRecord::Base.configurations.fetch('content')
end
def db_schema_load
file = File.join('spec', 'dummy', 'db', 'schema.rb')
if File.exist?(file)
load(file)
else
abort %{#{file} doesn't exist yet. Run `rake db:migrate` to create it, then try again. If you do not intend to use a database, you should instead alter #{Rails.root}/config/application.rb to limit the frameworks that will be loaded.}
end
end
#
# Callbacks
#
before(:all) do
db_content_load
metasploit_framework_root = Metasploit::Framework::Engine.root
require 'childprocess'
metasploit_cache_load = ChildProcess.build(
'bundle',
'exec',
'metasploit-cache',
'load',
metasploit_framework_root.join('modules').to_path,
'--database-yaml', 'config/database.yml',
'--environment', 'content',
'--include', metasploit_framework_root.to_path,
metasploit_framework_root.join('app', 'validators').to_path,
'--require', 'metasploit/framework',
'metasploit/framework/executable_path_validator',
'metasploit/framework/file_path_validator',
'--gem', 'metasploit-framewor',
'--logger-severity', 'ERROR',
'--name', 'modules'
)
@metasploit_cache_load_out = Tempfile.new(['metasploit-cache-load', '.log'])
@metasploit_cache_load_out.sync = true
metasploit_cache_load.cwd = Metasploit::Cache::Engine.root.join('spec', 'dummy').to_path
metasploit_cache_load.io.stdout = @metasploit_cache_load_out
require 'benchmark'
Benchmark.bm do |report|
report.report do
metasploit_cache_load.start
metasploit_cache_load.poll_for_exit(5.minutes)
end
end
expect(metasploit_cache_load.exit_code).to eq(0),
->(){
@metasploit_cache_load_out.rewind
@metasploit_cache_load_out.read
}
end
after(:all) {
@metasploit_cache_load_out.rewind
puts @metasploit_cache_load_out.read
# close and delete
@metasploit_cache_load_out.close!
}
# :nocov:
# Can't just use the tag on the context because the below code will still run even if tag is filtered out
unless Bundler.settings.without.include? 'content'
context 'metasploit-framework', :content do
module_path_real_paths = Metasploit::Framework::Engine.paths['modules'].existent_directories
module_path_real_paths.each do |module_path_real_path|
module_path_real_pathname = Pathname.new(module_path_real_path)
module_path_relative_pathname = module_path_real_pathname.relative_path_from(
Metasploit::Framework::Engine.root
)
# use relative pathname so that context name is not dependent on build directory
context module_path_relative_pathname.to_s do
#
# Shared Examples
#
shared_examples_for 'can use full names' do |module_type, max_run_count: nil|
context module_type do
type_directory = Metasploit::Cache::Module::Ancestor::DIRECTORY_BY_MODULE_TYPE.fetch(module_type)
real_type_pathname = module_path_real_pathname.join(type_directory)
rule = File::Find.new(
ftype: 'file',
pattern: "*#{Metasploit::Cache::Module::Ancestor::EXTENSION}",
path: real_type_pathname.to_path
)
run_count = 0
rule.find do |real_path|
real_pathname = Pathname.new(real_path)
reference_path = real_pathname.relative_path_from(real_type_pathname).to_s
reference_name = reference_path.chomp(File.extname(reference_path))
context reference_name do
if max_run_count
before(:each) do
run_count += 1
if run_count > max_run_count
skip "Skipping because #{max_run_count} #{module_type} Metasploit Modules have been tested " \
"already and testing them all takes too long"
end
end
end
full_name = "#{module_type}/#{reference_name}"
it "can be `use`d" do
metasploit_framework_root = Metasploit::Framework::Engine.root
metasploit_cache_use = ChildProcess.build(
'bundle',
'exec',
'metasploit-cache',
'use',
full_name,
'--database-yaml', 'config/database.yml',
'--environment', 'content',
'--include', metasploit_framework_root.to_path,
metasploit_framework_root.join('app', 'validators').to_path,
'--require', 'metasploit/framework',
'metasploit/framework/executable_path_validator',
'metasploit/framework/file_path_validator',
'--logger-severity', 'ERROR'
)
metasploit_cache_use_out = Tempfile.new(['metasploit-cache-use', '.log'])
metasploit_cache_use_out.sync = true
metasploit_cache_use.cwd = Metasploit::Cache::Engine.root.join('spec', 'dummy').to_path
metasploit_cache_use.io.stdout = metasploit_cache_use_out
metasploit_cache_use.start
metasploit_cache_use.wait
expect(metasploit_cache_use.exit_code).to eq(0), ->(){
metasploit_cache_use_out.rewind
"metasploit-cache use #{full_name} exited with non-zero status:\n#{metasploit_cache_use_out.read}"
}
end
end
end
end
end
include_examples 'can use full names',
'auxiliary',
max_run_count: 5
include_examples 'can use full names',
'encoder',
max_run_count: 5
include_examples 'can use full names',
'exploit',
max_run_count: 5
include_examples 'can use full names', 'nop'
include_examples 'can use full names',
'post',
max_run_count: 5
end
end
end
end
end |
Chat::Application.configure do
# Settings specified here will take precedence over those in config/application.rb
# Code is not reloaded between requests
config.cache_classes = true
# Full error reports are disabled and caching is turned on
config.consider_all_requests_local = false
config.action_controller.perform_caching = true
# Disable Rails's static asset server (Apache or nginx will already do this)
config.serve_static_assets = false
# Compress JavaScripts and CSS
config.assets.compress = true
# Don't fallback to assets pipeline if a precompiled asset is missed
config.assets.compile = false
config.assets.initialize_on_precompile = false
# Generate digests for assets URLs
config.assets.digest = true
# Defaults to Rails.root.join("public/assets")
# config.assets.manifest = YOUR_PATH
# Specifies the header that your server uses for sending files
# config.action_dispatch.x_sendfile_header = "X-Sendfile" # for apache
# config.action_dispatch.x_sendfile_header = 'X-Accel-Redirect' # for nginx
# Force all access to the app over SSL, use Strict-Transport-Security, and use secure cookies.
# config.force_ssl = true
# See everything in the log (default is :info)
# config.log_level = :debug
# Use a different logger for distributed setups
# config.logger = SyslogLogger.new
# Use a different cache store in production
# config.cache_store = :mem_cache_store
# Enable serving of images, stylesheets, and JavaScripts from an asset server
# config.action_controller.asset_host = "http://assets.example.com"
# Precompile additional assets (application.js, application.css, and all non-JS/CSS are already added)
# config.assets.precompile += %w( search.js )
# Disable delivery errors, bad email addresses will be ignored
# config.action_mailer.raise_delivery_errors = false
# Enable threaded mode
# config.threadsafe!
# Enable locale fallbacks for I18n (makes lookups for any locale fall back to
# the I18n.default_locale when a translation can not be found)
config.i18n.fallbacks = true
# Send deprecation notices to registered listeners
config.active_support.deprecation = :notify
end
Production mailer configuration
Chat::Application.configure do
# Settings specified here will take precedence over those in config/application.rb
# Code is not reloaded between requests
config.cache_classes = true
# Full error reports are disabled and caching is turned on
config.consider_all_requests_local = false
config.action_controller.perform_caching = true
# Disable Rails's static asset server (Apache or nginx will already do this)
config.serve_static_assets = false
# Compress JavaScripts and CSS
config.assets.compress = true
# Don't fallback to assets pipeline if a precompiled asset is missed
config.assets.compile = false
config.assets.initialize_on_precompile = false
# Generate digests for assets URLs
config.assets.digest = true
# Defaults to Rails.root.join("public/assets")
# config.assets.manifest = YOUR_PATH
# Specifies the header that your server uses for sending files
# config.action_dispatch.x_sendfile_header = "X-Sendfile" # for apache
# config.action_dispatch.x_sendfile_header = 'X-Accel-Redirect' # for nginx
# Force all access to the app over SSL, use Strict-Transport-Security, and use secure cookies.
# config.force_ssl = true
# See everything in the log (default is :info)
# config.log_level = :debug
# Use a different logger for distributed setups
# config.logger = SyslogLogger.new
# Use a different cache store in production
# config.cache_store = :mem_cache_store
# Enable serving of images, stylesheets, and JavaScripts from an asset server
# config.action_controller.asset_host = "http://assets.example.com"
# Precompile additional assets (application.js, application.css, and all non-JS/CSS are already added)
# config.assets.precompile += %w( search.js )
# Disable delivery errors, bad email addresses will be ignored
# config.action_mailer.raise_delivery_errors = false
# Enable threaded mode
# config.threadsafe!
# Enable locale fallbacks for I18n (makes lookups for any locale fall back to
# the I18n.default_locale when a translation can not be found)
config.i18n.fallbacks = true
# Send deprecation notices to registered listeners
config.active_support.deprecation = :notify
config.action_mailer.default_url_options = { :host => 'http://chatmonkey.info' }
end
|
describe FactoryBot::Registry do
it "is an enumerable" do
registry = FactoryBot::Registry.new("Great thing")
expect(registry).to be_kind_of(Enumerable)
end
it "finds a registered object" do
registry = FactoryBot::Registry.new("Great thing")
registered_object = double("registered object")
registry.register(:object_name, registered_object)
expect(registry.find(:object_name)).to eq registered_object
end
it "finds a registered object with square brackets" do
registry = FactoryBot::Registry.new("Great thing")
registered_object = double("registered object")
registry.register(:object_name, registered_object)
expect(registry[:object_name]).to eq registered_object
end
it "raises when an object cannot be found" do
registry = FactoryBot::Registry.new("Great thing")
expect { registry.find(:object_name) }
.to raise_error(KeyError, "Great thing not registered: \"object_name\"")
end
it "adds and returns the object registered" do
registry = FactoryBot::Registry.new("Great thing")
registered_object = double("registered object")
expect(registry.register(:object_name, registered_object)).to eq registered_object
end
it "knows that an object is registered by symbol" do
registry = FactoryBot::Registry.new("Great thing")
registered_object = double("registered object")
registry.register(:object_name, registered_object)
expect(registry).to be_registered(:object_name)
end
it "knows that an object is registered by string" do
registry = FactoryBot::Registry.new("Great thing")
registered_object = double("registered object")
registry.register(:object_name, registered_object)
expect(registry).to be_registered("object_name")
end
it "knows when an object is not registered" do
registry = FactoryBot::Registry.new("Great thing")
expect(registry).not_to be_registered("bogus")
end
it "iterates registered objects" do
registry = FactoryBot::Registry.new("Great thing")
registered_object = double("registered object")
second_registered_object = double("second registered object")
registry.register(:first_object, registered_object)
registry.register(:second_object, second_registered_object)
expect(registry.to_a).to eq [registered_object, second_registered_object]
end
it "does not include duplicate objects with registered under different names" do
registry = FactoryBot::Registry.new("Great thing")
registered_object = double("registered object")
registry.register(:first_object, registered_object)
registry.register(:second_object, registered_object)
expect(registry.to_a).to eq [registered_object]
end
it "clears registered factories" do
registry = FactoryBot::Registry.new("Great thing")
registered_object = double("registered object")
registry.register(:object_name, registered_object)
registry.clear
expect(registry.count).to be_zero
end
end
Add back did_you_mean spec
This essentially reverts commit 3a4d6f48. We removed that test because
we couldn't get it passing on Ruby 2.3 and 2.4, but we have since
removed support for those versions of Ruby.
describe FactoryBot::Registry do
it "is an enumerable" do
registry = FactoryBot::Registry.new("Great thing")
expect(registry).to be_kind_of(Enumerable)
end
it "finds a registered object" do
registry = FactoryBot::Registry.new("Great thing")
registered_object = double("registered object")
registry.register(:object_name, registered_object)
expect(registry.find(:object_name)).to eq registered_object
end
it "finds a registered object with square brackets" do
registry = FactoryBot::Registry.new("Great thing")
registered_object = double("registered object")
registry.register(:object_name, registered_object)
expect(registry[:object_name]).to eq registered_object
end
it "raises when an object cannot be found" do
registry = FactoryBot::Registry.new("Great thing")
expect { registry.find(:object_name) }
.to raise_error(KeyError, "Great thing not registered: \"object_name\"")
end
it "includes a did_you_mean message" do
registry = FactoryBot::Registry.new(:registry)
registered_object = double(:registered_object)
registry.register(:factory_bot, registered_object)
expect { registry.find(:factory_bit) }
.to raise_error(KeyError, /Did you mean\? "factory_bot"/)
end
it "adds and returns the object registered" do
registry = FactoryBot::Registry.new("Great thing")
registered_object = double("registered object")
expect(registry.register(:object_name, registered_object)).to eq registered_object
end
it "knows that an object is registered by symbol" do
registry = FactoryBot::Registry.new("Great thing")
registered_object = double("registered object")
registry.register(:object_name, registered_object)
expect(registry).to be_registered(:object_name)
end
it "knows that an object is registered by string" do
registry = FactoryBot::Registry.new("Great thing")
registered_object = double("registered object")
registry.register(:object_name, registered_object)
expect(registry).to be_registered("object_name")
end
it "knows when an object is not registered" do
registry = FactoryBot::Registry.new("Great thing")
expect(registry).not_to be_registered("bogus")
end
it "iterates registered objects" do
registry = FactoryBot::Registry.new("Great thing")
registered_object = double("registered object")
second_registered_object = double("second registered object")
registry.register(:first_object, registered_object)
registry.register(:second_object, second_registered_object)
expect(registry.to_a).to eq [registered_object, second_registered_object]
end
it "does not include duplicate objects with registered under different names" do
registry = FactoryBot::Registry.new("Great thing")
registered_object = double("registered object")
registry.register(:first_object, registered_object)
registry.register(:second_object, registered_object)
expect(registry.to_a).to eq [registered_object]
end
it "clears registered factories" do
registry = FactoryBot::Registry.new("Great thing")
registered_object = double("registered object")
registry.register(:object_name, registered_object)
registry.clear
expect(registry.count).to be_zero
end
end
|
Theodinproject::Application.configure do
# Settings specified here will take precedence over those in config/application.rb
# Code is not reloaded between requests
config.cache_classes = true
# Full error reports are disabled and caching is turned on
config.consider_all_requests_local = false
config.action_controller.perform_caching = true
# Disable Rails's static asset server (Apache or nginx will already do this)
config.serve_static_assets = false
# Compress JavaScripts and CSS
config.assets.compress = true
# Don't fallback to assets pipeline if a precompiled asset is missed
config.assets.compile = false
# Generate digests for assets URLs
config.assets.digest = true
# Defaults to nil and saved in location specified by config.assets.prefix
# config.assets.manifest = YOUR_PATH
# Specifies the header that your server uses for sending files
# config.action_dispatch.x_sendfile_header = "X-Sendfile" # for apache
# config.action_dispatch.x_sendfile_header = 'X-Accel-Redirect' # for nginx
# Force all access to the app over SSL, use Strict-Transport-Security, and use secure cookies.
# config.force_ssl = true
# See everything in the log (default is :info)
# config.log_level = :debug
# Prepend all log lines with the following tags
# config.log_tags = [ :subdomain, :uuid ]
# Use a different logger for distributed setups
# config.logger = ActiveSupport::TaggedLogging.new(SyslogLogger.new)
# Use a different cache store in production
# config.cache_store = :mem_cache_store
# Enable serving of images, stylesheets, and JavaScripts from an asset server
# config.action_controller.asset_host = "http://assets.example.com"
# Precompile additional assets (application.js, application.css, and all non-JS/CSS are already added)
# config.assets.precompile += %w( search.js )
# Disable delivery errors, bad email addresses will be ignored
# config.action_mailer.raise_delivery_errors = false
# Enable threaded mode
# config.threadsafe!
# Enable locale fallbacks for I18n (makes lookups for any locale fall back to
# the I18n.default_locale when a translation can not be found)
config.i18n.fallbacks = true
# Send deprecation notices to registered listeners
config.active_support.deprecation = :notify
# Log the query plan for queries taking more than this (works
# with SQLite, MySQL, and PostgreSQL)
# config.active_record.auto_explain_threshold_in_seconds = 0.5
end
asset compile true
Theodinproject::Application.configure do
# Settings specified here will take precedence over those in config/application.rb
# Code is not reloaded between requests
config.cache_classes = true
# Full error reports are disabled and caching is turned on
config.consider_all_requests_local = false
config.action_controller.perform_caching = true
# Disable Rails's static asset server (Apache or nginx will already do this)
config.serve_static_assets = false
# Compress JavaScripts and CSS
config.assets.compress = true
# Don't fallback to assets pipeline if a precompiled asset is missed
config.assets.compile = true #false
# Generate digests for assets URLs
config.assets.digest = true
# Defaults to nil and saved in location specified by config.assets.prefix
# config.assets.manifest = YOUR_PATH
# Specifies the header that your server uses for sending files
# config.action_dispatch.x_sendfile_header = "X-Sendfile" # for apache
# config.action_dispatch.x_sendfile_header = 'X-Accel-Redirect' # for nginx
# Force all access to the app over SSL, use Strict-Transport-Security, and use secure cookies.
# config.force_ssl = true
# See everything in the log (default is :info)
# config.log_level = :debug
# Prepend all log lines with the following tags
# config.log_tags = [ :subdomain, :uuid ]
# Use a different logger for distributed setups
# config.logger = ActiveSupport::TaggedLogging.new(SyslogLogger.new)
# Use a different cache store in production
# config.cache_store = :mem_cache_store
# Enable serving of images, stylesheets, and JavaScripts from an asset server
# config.action_controller.asset_host = "http://assets.example.com"
# Precompile additional assets (application.js, application.css, and all non-JS/CSS are already added)
# config.assets.precompile += %w( search.js )
# Disable delivery errors, bad email addresses will be ignored
# config.action_mailer.raise_delivery_errors = false
# Enable threaded mode
# config.threadsafe!
# Enable locale fallbacks for I18n (makes lookups for any locale fall back to
# the I18n.default_locale when a translation can not be found)
config.i18n.fallbacks = true
# Send deprecation notices to registered listeners
config.active_support.deprecation = :notify
# Log the query plan for queries taking more than this (works
# with SQLite, MySQL, and PostgreSQL)
# config.active_record.auto_explain_threshold_in_seconds = 0.5
end
|
require 'spec_helper'
feature Commercial do
# It is necessary to use bang version of let to build roles before user
let!(:conference) { create(:conference) }
let!(:organizer_role) { Role.find_by(name: 'organizer', resource: conference) }
let!(:organizer) { create(:user, role_ids: [organizer_role.id]) }
let!(:participant) { create(:user) }
context 'in admin area' do
scenario 'adds, updates, deletes of a conference', feature: true, js: true do
expected_count = conference.commercials.count + 1
sign_in organizer
visit admin_conference_commercials_path(conference.short_title)
# Workaround to enable the 'Create Commercial' button
page.execute_script("$('#commercial_submit_action').prop('disabled', false)")
# Create valid commercial
fill_in 'commercial_url', with: 'https://www.youtube.com/watch?v=M9bq_alk-sw'
click_button 'Create Commercial'
expect(flash).to eq('Commercial was successfully created.')
expect(conference.commercials.count).to eq(expected_count)
commercial = conference.commercials.where(url: 'https://www.youtube.com/watch?v=M9bq_alk-sw').first
fill_in "commercial_url_#{commercial.id}", with: 'https://www.youtube.com/watch?v=VNkDJk5_9eU'
click_button 'Update'
expect(flash).to eq('Commercial was successfully updated.')
expect(conference.commercials.count).to eq(expected_count)
commercial.reload
expect(commercial.url).to eq 'https://www.youtube.com/watch?v=VNkDJk5_9eU'
# Delete commercial
click_link 'Delete'
expect(flash).to eq('Commercial was successfully destroyed.')
expect(conference.commercials.count).to eq(expected_count - 1)
end
end
context 'in public area' do
let!(:event) { create(:event, program: conference.program, title: 'Example Proposal') }
before(:each) do
event.event_users = [create(:event_user,
user_id: participant.id,
event_id: event.id,
event_role: 'submitter')]
@expected_count = Commercial.count + 1
sign_in participant
end
after(:each) do
sign_out
end
scenario 'adds a valid commercial of an event', feature: true, js: true do
visit edit_conference_program_proposal_path(conference.short_title, event.id)
click_link 'Commercials'
fill_in 'commercial_url', with: 'https://www.youtube.com/watch?v=M9bq_alk-sw'
# Workaround to enable the 'Create Commercial' button
page.execute_script("$('#commercial_submit_action').prop('disabled', false)")
click_button 'Create Commercial'
expect(flash).to eq('Commercial was successfully created.')
expect(event.commercials.count).to eq(@expected_count)
end
scenario 'updates a commercial of an event', feature: true, js: true do
commercial = create(:commercial,
commercialable_id: event.id,
commercialable_type: 'Event')
visit edit_conference_program_proposal_path(conference.short_title, event.id)
click_link 'Commercials'
fill_in "commercial_url_#{commercial.id}", with: 'https://www.youtube.com/watch?v=M9bq_alk-sw'
click_button 'Update'
expect(flash).to eq('Commercial was successfully updated.')
expect(event.commercials.count).to eq(@expected_count)
commercial.reload
expect(commercial.url).to eq('https://www.youtube.com/watch?v=M9bq_alk-sw')
end
scenario 'deletes a commercial of an event', feature: true, js: true do
create(:commercial,
commercialable_id: event.id,
commercialable_type: 'Event')
visit edit_conference_program_proposal_path(conference.short_title, event.id)
click_link 'Commercials'
click_link 'Delete'
page.driver.network_traffic
expect(flash).to eq('Commercial was successfully destroyed.')
expect(event.commercials.count).to eq(@expected_count - 1)
end
end
end
Add tests to check commercials #create,#update fails on invaid data
require 'spec_helper'
feature Commercial do
# It is necessary to use bang version of let to build roles before user
let!(:conference) { create(:conference) }
let!(:organizer_role) { Role.find_by(name: 'organizer', resource: conference) }
let!(:organizer) { create(:user, role_ids: [organizer_role.id]) }
let!(:participant) { create(:user) }
context 'in admin area' do
scenario 'adds, updates, deletes of a conference', feature: true, js: true do
expected_count = conference.commercials.count + 1
sign_in organizer
visit admin_conference_commercials_path(conference.short_title)
# Workaround to enable the 'Create Commercial' button
page.execute_script("$('#commercial_submit_action').prop('disabled', false)")
# Create valid commercial
fill_in 'commercial_url', with: 'https://www.youtube.com/watch?v=M9bq_alk-sw'
click_button 'Create Commercial'
expect(flash).to eq('Commercial was successfully created.')
expect(conference.commercials.count).to eq(expected_count)
commercial = conference.commercials.where(url: 'https://www.youtube.com/watch?v=M9bq_alk-sw').first
fill_in "commercial_url_#{commercial.id}", with: 'https://www.youtube.com/watch?v=VNkDJk5_9eU'
click_button 'Update'
expect(flash).to eq('Commercial was successfully updated.')
expect(conference.commercials.count).to eq(expected_count)
commercial.reload
expect(commercial.url).to eq 'https://www.youtube.com/watch?v=VNkDJk5_9eU'
# Delete commercial
click_link 'Delete'
expect(flash).to eq('Commercial was successfully destroyed.')
expect(conference.commercials.count).to eq(expected_count - 1)
end
end
context 'in public area' do
let!(:event) { create(:event, program: conference.program, title: 'Example Proposal') }
before(:each) do
event.event_users = [create(:event_user,
user_id: participant.id,
event_id: event.id,
event_role: 'submitter')]
@expected_count = Commercial.count + 1
sign_in participant
end
after(:each) do
sign_out
end
scenario 'adds a valid commercial of an event', feature: true, js: true do
visit edit_conference_program_proposal_path(conference.short_title, event.id)
click_link 'Commercials'
fill_in 'commercial_url', with: 'https://www.youtube.com/watch?v=M9bq_alk-sw'
# Workaround to enable the 'Create Commercial' button
page.execute_script("$('#commercial_submit_action').prop('disabled', false)")
click_button 'Create Commercial'
expect(flash).to eq('Commercial was successfully created.')
expect(event.commercials.count).to eq(@expected_count)
end
scenario 'does not add an invalid commercial of an event', feature: true, js: true do
visit edit_conference_program_proposal_path(conference.short_title, event.id)
click_link 'Commercials'
fill_in 'commercial_url', with: 'invalid_commercial_url'
# Workaround to enable the 'Create Commercial' button
page.execute_script("$('#commercial_submit_action').prop('disabled', false)")
click_button 'Create Commercial'
expect(flash).to include('An error prohibited this Commercial from being saved:')
expect(current_path).to eq edit_conference_program_proposal_path(conference.short_title, event.id)
expect(event.commercials.count).to eq 0
end
scenario 'updates a commercial of an event', feature: true, js: true do
commercial = create(:commercial,
commercialable_id: event.id,
commercialable_type: 'Event')
visit edit_conference_program_proposal_path(conference.short_title, event.id)
click_link 'Commercials'
fill_in "commercial_url_#{commercial.id}", with: 'https://www.youtube.com/watch?v=M9bq_alk-sw'
click_button 'Update'
expect(flash).to eq('Commercial was successfully updated.')
expect(event.commercials.count).to eq(@expected_count)
commercial.reload
expect(commercial.url).to eq('https://www.youtube.com/watch?v=M9bq_alk-sw')
end
scenario 'does not update a commercial of an event with invalid data', feature: true, js: true do
commercial = create(:commercial,
commercialable_id: event.id,
commercialable_type: 'Event')
visit edit_conference_program_proposal_path(conference.short_title, event.id)
click_link 'Commercials'
fill_in "commercial_url_#{commercial.id}", with: 'invalid_commercial_url'
click_button 'Update'
expect(flash).to include('An error prohibited this Commercial from being saved:')
expect(current_path).to eq edit_conference_program_proposal_path(conference.short_title, event.id)
commercial.reload
expect(commercial.url).to eq('https://www.youtube.com/watch?v=BTTygyxuGj8')
end
scenario 'deletes a commercial of an event', feature: true, js: true do
create(:commercial,
commercialable_id: event.id,
commercialable_type: 'Event')
visit edit_conference_program_proposal_path(conference.short_title, event.id)
click_link 'Commercials'
click_link 'Delete'
page.driver.network_traffic
expect(flash).to eq('Commercial was successfully destroyed.')
expect(event.commercials.count).to eq(@expected_count - 1)
end
end
end
|
SFVSRegistrationSystem::Application.configure do
# Settings specified here will take precedence over those in config/application.rb
# Code is not reloaded between requests
config.cache_classes = true
# Full error reports are disabled and caching is turned on
config.consider_all_requests_local = false
config.action_controller.perform_caching = true
# Disable Rails's static asset server (Apache or nginx will already do this)
config.serve_static_assets = false
# Compress JavaScripts and CSS
config.assets.compress = true
# Don't fallback to assets pipeline if a precompiled asset is missed
config.assets.compile = false
# Generate digests for assets URLs
config.assets.digest = true
# Defaults to nil and saved in location specified by config.assets.prefix
# config.assets.manifest = YOUR_PATH
# Specifies the header that your server uses for sending files
# config.action_dispatch.x_sendfile_header = "X-Sendfile" # for apache
# config.action_dispatch.x_sendfile_header = 'X-Accel-Redirect' # for nginx
# Force all access to the app over SSL, use Strict-Transport-Security, and use secure cookies.
# config.force_ssl = true
# See everything in the log (default is :info)
# config.log_level = :debug
# Prepend all log lines with the following tags
# config.log_tags = [ :subdomain, :uuid ]
# Use a different logger for distributed setups
# config.logger = ActiveSupport::TaggedLogging.new(SyslogLogger.new)
# Use a different cache store in production
# config.cache_store = :mem_cache_store
# Enable serving of images, stylesheets, and JavaScripts from an asset server
# config.action_controller.asset_host = "http://assets.example.com"
# Precompile additional assets (application.js, application.css, and all non-JS/CSS are already added)
# config.assets.precompile += %w( search.js )
# Disable delivery errors, bad email addresses will be ignored
# config.action_mailer.raise_delivery_errors = false
# Enable threaded mode
# config.threadsafe!
# Enable locale fallbacks for I18n (makes lookups for any locale fall back to
# the I18n.default_locale when a translation can not be found)
config.i18n.fallbacks = true
# Send deprecation notices to registered listeners
config.active_support.deprecation = :notify
# Log the query plan for queries taking more than this (works
# with SQLite, MySQL, and PostgreSQL)
# config.active_record.auto_explain_threshold_in_seconds = 0.5
# Devise Mailer
config.action_mailer.default_url_options = {:host => 'sfvsapp.herokuapp.com'}
config.action_mailer.delivery_method = :smtp
config.action_mailer.smtp_settings = {
:port => ENV['MAILGUN_SMTP_PORT'],
:address => ENV['MAILGUN_SMTP_SERVER'],
:user_name => ENV['MAILGUN_SMTP_LOGIN'],
:password => ENV['MAILGUN_SMTP_PASSWORD'],
:domain => 'sfvsapp.herokuapp.com',
:authentication => :plain
}
end
disabled email for now
SFVSRegistrationSystem::Application.configure do
# Settings specified here will take precedence over those in config/application.rb
# Code is not reloaded between requests
config.cache_classes = true
# Full error reports are disabled and caching is turned on
config.consider_all_requests_local = false
config.action_controller.perform_caching = true
# Disable Rails's static asset server (Apache or nginx will already do this)
config.serve_static_assets = false
# Compress JavaScripts and CSS
config.assets.compress = true
# Don't fallback to assets pipeline if a precompiled asset is missed
config.assets.compile = false
# Generate digests for assets URLs
config.assets.digest = true
config.action_mailer.raise_delivery_errors = false
# Defaults to nil and saved in location specified by config.assets.prefix
# config.assets.manifest = YOUR_PATH
# Specifies the header that your server uses for sending files
# config.action_dispatch.x_sendfile_header = "X-Sendfile" # for apache
# config.action_dispatch.x_sendfile_header = 'X-Accel-Redirect' # for nginx
# Force all access to the app over SSL, use Strict-Transport-Security, and use secure cookies.
# config.force_ssl = true
# See everything in the log (default is :info)
# config.log_level = :debug
# Prepend all log lines with the following tags
# config.log_tags = [ :subdomain, :uuid ]
# Use a different logger for distributed setups
# config.logger = ActiveSupport::TaggedLogging.new(SyslogLogger.new)
# Use a different cache store in production
# config.cache_store = :mem_cache_store
# Enable serving of images, stylesheets, and JavaScripts from an asset server
# config.action_controller.asset_host = "http://assets.example.com"
# Precompile additional assets (application.js, application.css, and all non-JS/CSS are already added)
# config.assets.precompile += %w( search.js )
# Disable delivery errors, bad email addresses will be ignored
# config.action_mailer.raise_delivery_errors = false
# Enable threaded mode
# config.threadsafe!
# Enable locale fallbacks for I18n (makes lookups for any locale fall back to
# the I18n.default_locale when a translation can not be found)
config.i18n.fallbacks = true
# Send deprecation notices to registered listeners
config.active_support.deprecation = :notify
# Log the query plan for queries taking more than this (works
# with SQLite, MySQL, and PostgreSQL)
# config.active_record.auto_explain_threshold_in_seconds = 0.5
# Devise Mailer
config.action_mailer.default_url_options = {:host => 'sfvsapp.herokuapp.com'}
config.action_mailer.delivery_method = :smtp
config.action_mailer.smtp_settings = {
:port => ENV['MAILGUN_SMTP_PORT'],
:address => ENV['MAILGUN_SMTP_SERVER'],
:user_name => ENV['MAILGUN_SMTP_LOGIN'],
:password => ENV['MAILGUN_SMTP_PASSWORD'],
:domain => 'sfvsapp.herokuapp.com',
:authentication => :plain
}
end
|
require 'rails_helper'
describe 'Test with visual testing', type: :feature, js: true do
let(:member) { FactoryBot.create :member, login_name: 'percy', preferred_avatar_uri: gravatar }
let(:someone_else) { FactoryBot.create :member, login_name: 'ruby', preferred_avatar_uri: gravatar2 }
let(:gravatar) { 'http://www.gravatar.com/avatar/d021434aac03a7f7c7c0de60d07dad1c?size=150&default=identicon' }
let(:gravatar2) { 'http://www.gravatar.com/avatar/353d83d3677b142520987e1936fd093c?size=150&default=identicon' }
let!(:tomato) { FactoryBot.create :tomato }
before do
{
chard: 'https://farm9.staticflickr.com/8516/8519911893_1759c28965_q.jpg',
apple: 'https://farm5.staticflickr.com/4748/38932178855_6fe9bcdb48_q.jpg',
pear: 'https://farm1.staticflickr.com/113/250984726_0fc31fea6d_q.jpg',
popcorn: 'https://farm8.staticflickr.com/7893/33150160528_24a689c6bc_q.jpg',
eggplant: 'https://farm8.staticflickr.com/7856/47068736892_1af9b8a4ba_q.jpg',
maize: 'https://farm66.staticflickr.com/65535/46739264475_7cb55b2cbb_q.jpg'
}.each do |crop_type, photo_url|
crop = FactoryBot.create crop_type
owner = FactoryBot.create :member, login_name: crop_type.to_s.reverse, email: "#{crop.name}@example.com"
planting = FactoryBot.create :planting, crop: crop, owner: owner, garden: owner.gardens.first
planting.photos << FactoryBot.create(:photo, owner: owner, thumbnail_url: photo_url)
end
end
shared_examples 'visit pages' do
describe 'home' do
it 'loads homepage' do
visit root_path
Percy.snapshot(page, name: "#{prefix}/homepage")
end
end
describe 'crops' do
it 'loads crops#show' do
visit crop_path(tomato)
Percy.snapshot(page, name: "#{prefix}/crops#show")
end
it 'loads crops#index' do
visit crops_path
Percy.snapshot(page, name: "#{prefix}/crops#index")
end
end
describe 'plantings' do
it 'loads plantings#index' do
visit plantings_path
Percy.snapshot(page, name: "#{prefix}/plantings#index")
end
it 'load another member plantings#show' do
planting = FactoryBot.create :planting, crop: tomato, owner: someone_else, garden: someone_else.gardens.first
visit planting_path(planting)
Percy.snapshot(page, name: "#{prefix}/plantings#show")
end
end
describe 'gardens' do
it 'loads gardens#index' do
visit gardens_path
Percy.snapshot(page, name: "#{prefix}/gardens#index")
end
it 'load some one else\'s gardens#show' do
garden = FactoryBot.create :garden, name: 'paraside', owner: someone_else
visit garden_path(garden)
Percy.snapshot(page, name: "#{prefix}/gardens#show")
end
end
describe 'members' do
it 'loads members#index' do
visit members_path
Percy.snapshot(page, name: "#{prefix}/members#index")
end
it 'loads another members#show' do
visit member_path(someone_else)
Percy.snapshot(page, name: "#{prefix}/members#show")
end
end
end
context "when signed out" do
let(:prefix) { 'signed-out' }
include_examples 'visit pages'
it 'loads sign in page' do
visit crops_path # some random page
click_link 'Sign in'
Percy.snapshot(page, name: "sign-in")
end
it 'loads sign up page' do
visit crops_path # some random page
click_link 'Sign up'
Percy.snapshot(page, name: "sign-up")
end
it 'loads forgot password' do
visit new_member_password_path
Percy.snapshot(page, name: "forgot-password")
end
it 'loads new confirmation' do
visit new_member_confirmation_path
Percy.snapshot(page, name: "forgot-password")
end
it 'loads sign in page' do
visit crops_path # some random page
end
end
context 'when signed in' do
let(:prefix) { 'signed-in' }
before { login_as member }
include_examples 'visit pages'
it 'load plantings#show' do
planting = FactoryBot.create :planting, crop: tomato, owner: member, garden: member.gardens.first
visit planting_path(planting)
Percy.snapshot(page, name: "#{prefix}/self/plantings#show")
end
it 'load members#show' do
visit member_path(member)
Percy.snapshot(page, name: "#{prefix}/self/members#show")
end
it 'load my gardens#show' do
garden = FactoryBot.create :garden, name: 'paradise', owner: member
visit garden_path(garden)
Percy.snapshot(page, name: "#{prefix}/self/gardens#show")
end
end
end
Create less members per run
require 'rails_helper'
describe 'Test with visual testing', type: :feature, js: true do
let(:member) { FactoryBot.create :member, login_name: 'percy', preferred_avatar_uri: gravatar }
let(:someone_else) { FactoryBot.create :member, login_name: 'ruby', preferred_avatar_uri: gravatar2 }
let(:gravatar) { 'http://www.gravatar.com/avatar/d021434aac03a7f7c7c0de60d07dad1c?size=150&default=identicon' }
let(:gravatar2) { 'http://www.gravatar.com/avatar/353d83d3677b142520987e1936fd093c?size=150&default=identicon' }
let!(:tomato) { FactoryBot.create :tomato, creator: someone_else }
before do
{
chard: 'https://farm9.staticflickr.com/8516/8519911893_1759c28965_q.jpg',
apple: 'https://farm5.staticflickr.com/4748/38932178855_6fe9bcdb48_q.jpg',
pear: 'https://farm1.staticflickr.com/113/250984726_0fc31fea6d_q.jpg',
popcorn: 'https://farm8.staticflickr.com/7893/33150160528_24a689c6bc_q.jpg',
eggplant: 'https://farm8.staticflickr.com/7856/47068736892_1af9b8a4ba_q.jpg',
maize: 'https://farm66.staticflickr.com/65535/46739264475_7cb55b2cbb_q.jpg'
}.each do |crop_type, photo_url|
crop = FactoryBot.create crop_type, creator: someone_else
owner = FactoryBot.create :member, login_name: crop_type.to_s.reverse, email: "#{crop.name}@example.com"
planting = FactoryBot.create :planting, crop: crop, owner: owner, garden: owner.gardens.first
planting.photos << FactoryBot.create(:photo, owner: owner, thumbnail_url: photo_url)
end
end
shared_examples 'visit pages' do
describe 'home' do
it 'loads homepage' do
visit root_path
Percy.snapshot(page, name: "#{prefix}/homepage")
end
end
describe 'crops' do
it 'loads crops#show' do
visit crop_path(tomato)
Percy.snapshot(page, name: "#{prefix}/crops#show")
end
it 'loads crops#index' do
visit crops_path
Percy.snapshot(page, name: "#{prefix}/crops#index")
end
end
describe 'plantings' do
it 'loads plantings#index' do
visit plantings_path
Percy.snapshot(page, name: "#{prefix}/plantings#index")
end
it 'load another member plantings#show' do
planting = FactoryBot.create :planting, crop: tomato, owner: someone_else, garden: someone_else.gardens.first
visit planting_path(planting)
Percy.snapshot(page, name: "#{prefix}/plantings#show")
end
end
describe 'gardens' do
it 'loads gardens#index' do
visit gardens_path
Percy.snapshot(page, name: "#{prefix}/gardens#index")
end
it 'load some one else\'s gardens#show' do
garden = FactoryBot.create :garden, name: 'paraside', owner: someone_else
visit garden_path(garden)
Percy.snapshot(page, name: "#{prefix}/gardens#show")
end
end
describe 'members' do
it 'loads members#index' do
visit members_path
Percy.snapshot(page, name: "#{prefix}/members#index")
end
it 'loads another members#show' do
visit member_path(someone_else)
Percy.snapshot(page, name: "#{prefix}/members#show")
end
end
end
context "when signed out" do
let(:prefix) { 'signed-out' }
include_examples 'visit pages'
it 'loads sign in page' do
visit crops_path # some random page
click_link 'Sign in'
Percy.snapshot(page, name: "sign-in")
end
it 'loads sign up page' do
visit crops_path # some random page
click_link 'Sign up'
Percy.snapshot(page, name: "sign-up")
end
it 'loads forgot password' do
visit new_member_password_path
Percy.snapshot(page, name: "forgot-password")
end
it 'loads new confirmation' do
visit new_member_confirmation_path
Percy.snapshot(page, name: "new-confimation")
end
it 'loads sign in page' do
visit crops_path # some random page
end
end
context 'when signed in' do
let(:prefix) { 'signed-in' }
before { login_as member }
include_examples 'visit pages'
it 'load plantings#show' do
planting = FactoryBot.create :planting, crop: tomato, owner: member, garden: member.gardens.first
visit planting_path(planting)
Percy.snapshot(page, name: "#{prefix}/self/plantings#show")
end
it 'load members#show' do
visit member_path(member)
Percy.snapshot(page, name: "#{prefix}/self/members#show")
end
it 'load my gardens#show' do
garden = FactoryBot.create :garden, name: 'paradise', owner: member
visit garden_path(garden)
Percy.snapshot(page, name: "#{prefix}/self/gardens#show")
end
end
end
|
Rails.application.configure do
# Settings specified here will take precedence over those in config/application.rb.
# Code is not reloaded between requests.
config.cache_classes = true
# Eager load code on boot. This eager loads most of Rails and
# your application in memory, allowing both threaded web servers
# and those relying on copy on write to perform better.
# Rake tasks automatically ignore this option for performance.
config.eager_load = true
# Full error reports are disabled and caching is turned on.
config.consider_all_requests_local = false
config.action_controller.perform_caching = true
# Disable serving static files from the `/public` folder by default since
# Apache or NGINX already handles this.
config.public_file_server.enabled = ENV['RAILS_SERVE_STATIC_FILES'].present?
# Compress JavaScripts and CSS.
config.assets.js_compressor = :uglifier
# config.assets.css_compressor = :sass
# Do not fallback to assets pipeline if a precompiled asset is missed.
config.assets.compile = false
# `config.assets.precompile` and `config.assets.version` have moved to config/initializers/assets.rb
# Enable serving of images, stylesheets, and JavaScripts from an asset server.
# config.action_controller.asset_host = 'http://assets.example.com'
# Specifies the header that your server uses for sending files.
# config.action_dispatch.x_sendfile_header = 'X-Sendfile' # for Apache
# config.action_dispatch.x_sendfile_header = 'X-Accel-Redirect' # for NGINX
# Mount Action Cable outside main process or domain
# config.action_cable.mount_path = nil
# config.action_cable.url = 'wss://example.com/cable'
# config.action_cable.allowed_request_origins = [ 'http://example.com', /http:\/\/example.*/ ]
# Force all access to the app over SSL, use Strict-Transport-Security, and use secure cookies.
# config.force_ssl = true
# Use the lowest log level to ensure availability of diagnostic information
# when problems arise.
config.log_level = :debug
# Prepend all log lines with the following tags.
config.log_tags = [ :request_id ]
# Use a different cache store in production.
# config.cache_store = :mem_cache_store
# Use a real queuing backend for Active Job (and separate queues per environment)
# config.active_job.queue_adapter = :resque
# config.active_job.queue_name_prefix = "drpedia_#{Rails.env}"
config.action_mailer.perform_caching = false
# Ignore bad email addresses and do not raise email delivery errors.
# Set this to true and configure the email server for immediate delivery to raise delivery errors.
# config.action_mailer.raise_delivery_errors = false
# Enable locale fallbacks for I18n (makes lookups for any locale fall back to
# the I18n.default_locale when a translation cannot be found).
config.i18n.fallbacks = true
# Send deprecation notices to registered listeners.
config.active_support.deprecation = :notify
# Use default logging formatter so that PID and timestamp are not suppressed.
config.log_formatter = ::Logger::Formatter.new
# Use a different logger for distributed setups.
# require 'syslog/logger'
# config.logger = ActiveSupport::TaggedLogging.new(Syslog::Logger.new 'app-name')
if ENV["RAILS_LOG_TO_STDOUT"].present?
logger = ActiveSupport::Logger.new(STDOUT)
logger.formatter = config.log_formatter
config.logger = ActiveSupport::TaggedLogging.new(logger)
end
# Do not dump schema after migrations.
config.active_record.dump_schema_after_migration = false
end
Disabled AR in production.
Rails.application.configure do
# Settings specified here will take precedence over those in config/application.rb.
# Code is not reloaded between requests.
config.cache_classes = true
# Eager load code on boot. This eager loads most of Rails and
# your application in memory, allowing both threaded web servers
# and those relying on copy on write to perform better.
# Rake tasks automatically ignore this option for performance.
config.eager_load = true
# Full error reports are disabled and caching is turned on.
config.consider_all_requests_local = false
config.action_controller.perform_caching = true
# Disable serving static files from the `/public` folder by default since
# Apache or NGINX already handles this.
config.public_file_server.enabled = ENV['RAILS_SERVE_STATIC_FILES'].present?
# Compress JavaScripts and CSS.
config.assets.js_compressor = :uglifier
# config.assets.css_compressor = :sass
# Do not fallback to assets pipeline if a precompiled asset is missed.
config.assets.compile = false
# `config.assets.precompile` and `config.assets.version` have moved to config/initializers/assets.rb
# Enable serving of images, stylesheets, and JavaScripts from an asset server.
# config.action_controller.asset_host = 'http://assets.example.com'
# Specifies the header that your server uses for sending files.
# config.action_dispatch.x_sendfile_header = 'X-Sendfile' # for Apache
# config.action_dispatch.x_sendfile_header = 'X-Accel-Redirect' # for NGINX
# Mount Action Cable outside main process or domain
# config.action_cable.mount_path = nil
# config.action_cable.url = 'wss://example.com/cable'
# config.action_cable.allowed_request_origins = [ 'http://example.com', /http:\/\/example.*/ ]
# Force all access to the app over SSL, use Strict-Transport-Security, and use secure cookies.
# config.force_ssl = true
# Use the lowest log level to ensure availability of diagnostic information
# when problems arise.
config.log_level = :debug
# Prepend all log lines with the following tags.
config.log_tags = [ :request_id ]
# Use a different cache store in production.
# config.cache_store = :mem_cache_store
# Use a real queuing backend for Active Job (and separate queues per environment)
# config.active_job.queue_adapter = :resque
# config.active_job.queue_name_prefix = "drpedia_#{Rails.env}"
config.action_mailer.perform_caching = false
# Ignore bad email addresses and do not raise email delivery errors.
# Set this to true and configure the email server for immediate delivery to raise delivery errors.
# config.action_mailer.raise_delivery_errors = false
# Enable locale fallbacks for I18n (makes lookups for any locale fall back to
# the I18n.default_locale when a translation cannot be found).
config.i18n.fallbacks = true
# Send deprecation notices to registered listeners.
config.active_support.deprecation = :notify
# Use default logging formatter so that PID and timestamp are not suppressed.
config.log_formatter = ::Logger::Formatter.new
# Use a different logger for distributed setups.
# require 'syslog/logger'
# config.logger = ActiveSupport::TaggedLogging.new(Syslog::Logger.new 'app-name')
if ENV["RAILS_LOG_TO_STDOUT"].present?
logger = ActiveSupport::Logger.new(STDOUT)
logger.formatter = config.log_formatter
config.logger = ActiveSupport::TaggedLogging.new(logger)
end
# Do not dump schema after migrations.
# config.active_record.dump_schema_after_migration = false
end
|
include Warden::Test::Helpers
Warden.test_mode!
# Feature: Answer questions
# As a user
# I want to go to the inbox
# So I can answer and get new questions
feature "Inbox", :devise do
after :each do
Warden.test_reset!
end
# Scenario: User answers a question
# Given I am signed in
# When I visit the inbox
# And I have a question in my inbox
# Then I can answer my question
# And see the answer on my user profile
scenario "user answers a question", js: true do
me = FactoryGirl.create :user
question = FactoryGirl.create :question
Inbox.create question: question, user: me, new: true
login_as me, scope: :user
visit root_path
expect(page).to have_text('1 new question'.upcase)
page.driver.render Rails.root.join("tmp/#{Time.now.to_i}_1.png"), full: true
click_link "Inbox"
expect(page).to have_text(question.content)
fill_in "ib-answer", with: Faker::Lorem.sentence
page.driver.render Rails.root.join("tmp/#{Time.now.to_i}_2.png"), full: true
click_button "Answer"
wait_for_ajax
expect(page).not_to have_text(question.content)
page.driver.render Rails.root.join("tmp/#{Time.now.to_i}_3.png"), full: true
visit show_user_profile_path(me.screen_name)
expect(page).to have_text(question.content)
page.driver.render Rails.root.join("tmp/#{Time.now.to_i}_4.png"), full: true
end
# Scenario: User generates new question
# Given I am signed in
# When I visit the inbox
# And I click "Get new question"
# Then I get a new question
scenario 'user generates new question', js: true do
me = FactoryGirl.create :user
login_as me, scope: :user
visit inbox_path
page.driver.render Rails.root.join("tmp/#{Time.now.to_i}_1.png"), full: true
click_button "Get new question"
wait_for_ajax
expect(page).to have_text('Answer')
page.driver.render Rails.root.join("tmp/#{Time.now.to_i}_2.png"), full: true
end
# Scenario: User with privacy options generates new question
# Given I am signed in
# When I visit the inbox
# And I click "Get new question"
# And I don't want to receive questions by anonymous users
# Then I get a new question
scenario 'user with privacy options generates new question', js: true do
me = FactoryGirl.create :user
me.privacy_allow_anonymous_questions = false
me.save
login_as me, scope: :user
visit inbox_path
page.driver.render Rails.root.join("tmp/#{Time.now.to_i}_1.png"), full: true
click_button "Get new question"
wait_for_ajax
expect(page).to have_text('Answer')
page.driver.render Rails.root.join("tmp/#{Time.now.to_i}_2.png"), full: true
end
=begin
# Scenario: User deletes a question
# Given I am signed in
# When I visit the inbox
# And I have a question in my inbox
# And I delete the question
# Then don't see it anymore in my inbox
scenario "user deletes a question", js: true do
me = FactoryGirl.create :user
question = FactoryGirl.create :question
Inbox.create question: question, user: me
login_as me, scope: :user
visit inbox_path
expect(page).to have_text(question.content)
click_button "Delete"
expect(page).to have_text('Really delete?')
page.driver.render Rails.root.join("tmp/#{Time.now.to_i}_1.png"), full: true
# this apparently doesn't get triggered :(
page.find('.sweet-alert').click_button 'Delete'
wait_for_ajax
login_as me, scope: :user
visit inbox_path
expect(page).not_to have_text(question.content)
page.driver.render Rails.root.join("tmp/#{Time.now.to_i}_2.png"), full: true
end
# Scenario: User deletes all questions
# Given I am signed in
# When I visit the inbox
# And I have a few questions in my inbox
# And I click on "Delete all questions"
# Then don't see them anymore in my inbox
scenario "user deletes all questions", js: true do
me = FactoryGirl.create :user
5.times do
question = FactoryGirl.create :question
Inbox.create question: question, user: me
end
login_as me, scope: :user
visit inbox_path
expect(page).to have_text('Answer'.upcase)
click_button "Delete all questions"
expect(page).to have_text('Really delete 5 questions?')
page.driver.render Rails.root.join("tmp/#{Time.now.to_i}_1.png"), full: true
page.find('.sweet-alert').click_button 'Delete'
wait_for_ajax
puts me.inbox.all
login_as me, scope: :user
visit inbox_path
page.driver.render Rails.root.join("tmp/#{Time.now.to_i}_2.png"), full: true
expect(page).not_to have_text('Answer'.upcase)
end
=end
end
Hopefully this fixes the last error in travis, doesn't error on my end
include Warden::Test::Helpers
Warden.test_mode!
# Feature: Answer questions
# As a user
# I want to go to the inbox
# So I can answer and get new questions
feature "Inbox", :devise do
after :each do
Warden.test_reset!
end
# Scenario: User answers a question
# Given I am signed in
# When I visit the inbox
# And I have a question in my inbox
# Then I can answer my question
# And see the answer on my user profile
scenario "user answers a question", js: true do
me = FactoryGirl.create :user
question = FactoryGirl.create :question
Inbox.create question: question, user: me, new: true
login_as me, scope: :user
visit root_path
expect(page).to have_text('1 new question'.upcase)
page.driver.render Rails.root.join("tmp/#{Time.now.to_i}_1.png"), full: true
click_link "Inbox"
expect(page).to have_text(question.content)
fill_in "ib-answer", with: Faker::Lorem.sentence
page.driver.render Rails.root.join("tmp/#{Time.now.to_i}_2.png"), full: true
click_button "Answer"
wait_for_ajax
expect(page).not_to have_text(question.content, wait: 2)
page.driver.render Rails.root.join("tmp/#{Time.now.to_i}_3.png"), full: true
visit show_user_profile_path(me.screen_name)
expect(page).to have_text(question.content)
page.driver.render Rails.root.join("tmp/#{Time.now.to_i}_4.png"), full: true
end
# Scenario: User generates new question
# Given I am signed in
# When I visit the inbox
# And I click "Get new question"
# Then I get a new question
scenario 'user generates new question', js: true do
me = FactoryGirl.create :user
login_as me, scope: :user
visit inbox_path
page.driver.render Rails.root.join("tmp/#{Time.now.to_i}_1.png"), full: true
click_button "Get new question"
wait_for_ajax
expect(page).to have_text('Answer')
page.driver.render Rails.root.join("tmp/#{Time.now.to_i}_2.png"), full: true
end
# Scenario: User with privacy options generates new question
# Given I am signed in
# When I visit the inbox
# And I click "Get new question"
# And I don't want to receive questions by anonymous users
# Then I get a new question
scenario 'user with privacy options generates new question', js: true do
me = FactoryGirl.create :user
me.privacy_allow_anonymous_questions = false
me.save
login_as me, scope: :user
visit inbox_path
page.driver.render Rails.root.join("tmp/#{Time.now.to_i}_1.png"), full: true
click_button "Get new question"
wait_for_ajax
expect(page).to have_text('Answer')
page.driver.render Rails.root.join("tmp/#{Time.now.to_i}_2.png"), full: true
end
=begin
# Scenario: User deletes a question
# Given I am signed in
# When I visit the inbox
# And I have a question in my inbox
# And I delete the question
# Then don't see it anymore in my inbox
scenario "user deletes a question", js: true do
me = FactoryGirl.create :user
question = FactoryGirl.create :question
Inbox.create question: question, user: me
login_as me, scope: :user
visit inbox_path
expect(page).to have_text(question.content)
click_button "Delete"
expect(page).to have_text('Really delete?')
page.driver.render Rails.root.join("tmp/#{Time.now.to_i}_1.png"), full: true
# this apparently doesn't get triggered :(
page.find('.sweet-alert').click_button 'Delete'
wait_for_ajax
login_as me, scope: :user
visit inbox_path
expect(page).not_to have_text(question.content)
page.driver.render Rails.root.join("tmp/#{Time.now.to_i}_2.png"), full: true
end
# Scenario: User deletes all questions
# Given I am signed in
# When I visit the inbox
# And I have a few questions in my inbox
# And I click on "Delete all questions"
# Then don't see them anymore in my inbox
scenario "user deletes all questions", js: true do
me = FactoryGirl.create :user
5.times do
question = FactoryGirl.create :question
Inbox.create question: question, user: me
end
login_as me, scope: :user
visit inbox_path
expect(page).to have_text('Answer'.upcase)
click_button "Delete all questions"
expect(page).to have_text('Really delete 5 questions?')
page.driver.render Rails.root.join("tmp/#{Time.now.to_i}_1.png"), full: true
page.find('.sweet-alert').click_button 'Delete'
wait_for_ajax
puts me.inbox.all
login_as me, scope: :user
visit inbox_path
page.driver.render Rails.root.join("tmp/#{Time.now.to_i}_2.png"), full: true
expect(page).not_to have_text('Answer'.upcase)
end
=end
end
|
Rails.application.configure do
# Code is not reloaded between requests.
config.cache_classes = true
# Eager load code on boot. This eager loads most of Rails and
# your application in memory, allowing both thread web servers
# and those relying on copy on write to perform better.
# Rake tasks automatically ignore this option for performance.
config.eager_load = true
# Full error reports are disabled and caching is turned on.
config.consider_all_requests_local = false
config.action_controller.perform_caching = true
# Enable Rack::Cache to put a simple HTTP cache in front of your application
# Add `rack-cache` to your Gemfile before enabling this.
# For large-scale production use, consider using a caching reverse proxy like
# nginx, varnish or squid.
# config.action_dispatch.rack_cache = true
# Disable Rails's static asset server (Apache or nginx will already do this).
config.serve_static_assets = false
# Compress JavaScripts and CSS.
config.assets.js_compressor = :uglifier
# config.assets.css_compressor = :sass
# Do not fallback to assets pipeline if a precompiled asset is missed.
config.assets.compile = false
# Generate digests for assets URLs.
config.assets.digest = true
# Version of your assets, change this if you want to expire all your assets.
config.assets.version = '1.0'
# Specifies the header that your server uses for sending files.
# config.action_dispatch.x_sendfile_header = "X-Sendfile" # for apache
# config.action_dispatch.x_sendfile_header = 'X-Accel-Redirect' # for nginx
# Force all access to the app over SSL, use Strict-Transport-Security, and
# use secure cookies.
# config.force_ssl = true
# Set to :debug to see everything in the log.
config.log_level = :info
# Prepend all log lines with the following tags.
# config.log_tags = [ :subdomain, :uuid ]
# Use a different logger for distributed setups.
# config.logger = ActiveSupport::TaggedLogging.new(SyslogLogger.new)
config.logger = ActiveSupport::TaggedLogging.new(Logger::Syslog.new("frontend", Syslog::LOG_LOCAL6).tap {|log| log.level = Logger::INFO})
# Use a different cache store in production.
# config.cache_store = :mem_cache_store
# Enable serving of images, stylesheets, and JavaScripts from an asset server.
# config.action_controller.asset_host = "http://assets.example.com"
# Precompile additional assets.
# application.js, application.css, and all non-JS/CSS in app/assets folder
# are already added.
# config.assets.precompile += %w( search.js )
# Enable locale fallbacks for I18n (makes lookups for any locale fall back to
# the I18n.default_locale when a translation can not be found).
config.i18n.fallbacks = true
# Send deprecation notices to registered listeners.
config.active_support.deprecation = :notify
# Disable automatic flushing of the log to improve performance.
# config.autoflush_log = false
# Use default logging formatter so that PID and timestamp are not suppressed.
config.log_formatter = ::Logger::Formatter.new
end
Adds static assets server in production.
@rskang
Rails.application.configure do
# Code is not reloaded between requests.
config.cache_classes = true
# Eager load code on boot. This eager loads most of Rails and
# your application in memory, allowing both thread web servers
# and those relying on copy on write to perform better.
# Rake tasks automatically ignore this option for performance.
config.eager_load = true
# Full error reports are disabled and caching is turned on.
config.consider_all_requests_local = false
config.action_controller.perform_caching = true
# Enable Rack::Cache to put a simple HTTP cache in front of your application
# Add `rack-cache` to your Gemfile before enabling this.
# For large-scale production use, consider using a caching reverse proxy like
# nginx, varnish or squid.
# config.action_dispatch.rack_cache = true
config.serve_static_assets = true
# Compress JavaScripts and CSS.
config.assets.js_compressor = :uglifier
# config.assets.css_compressor = :sass
# Do not fallback to assets pipeline if a precompiled asset is missed.
config.assets.compile = false
# Generate digests for assets URLs.
config.assets.digest = true
# Version of your assets, change this if you want to expire all your assets.
config.assets.version = '1.0'
# Specifies the header that your server uses for sending files.
# config.action_dispatch.x_sendfile_header = "X-Sendfile" # for apache
# config.action_dispatch.x_sendfile_header = 'X-Accel-Redirect' # for nginx
# Force all access to the app over SSL, use Strict-Transport-Security, and
# use secure cookies.
# config.force_ssl = true
# Set to :debug to see everything in the log.
config.log_level = :info
# Prepend all log lines with the following tags.
# config.log_tags = [ :subdomain, :uuid ]
# Use a different logger for distributed setups.
# config.logger = ActiveSupport::TaggedLogging.new(SyslogLogger.new)
config.logger = ActiveSupport::TaggedLogging.new(Logger::Syslog.new("frontend", Syslog::LOG_LOCAL6).tap {|log| log.level = Logger::INFO})
# Use a different cache store in production.
# config.cache_store = :mem_cache_store
# Enable serving of images, stylesheets, and JavaScripts from an asset server.
# config.action_controller.asset_host = "http://assets.example.com"
# Precompile additional assets.
# application.js, application.css, and all non-JS/CSS in app/assets folder
# are already added.
# config.assets.precompile += %w( search.js )
# Enable locale fallbacks for I18n (makes lookups for any locale fall back to
# the I18n.default_locale when a translation can not be found).
config.i18n.fallbacks = true
# Send deprecation notices to registered listeners.
config.active_support.deprecation = :notify
# Disable automatic flushing of the log to improve performance.
# config.autoflush_log = false
# Use default logging formatter so that PID and timestamp are not suppressed.
config.log_formatter = ::Logger::Formatter.new
end
|
# frozen_string_literal: true
require "rails_helper"
feature "signups", js: true do
include_context "work"
let(:actor) { create(:user) }
let(:page_path) { work_shifts_path }
around { |ex| with_user_home_subdomain(actor) { ex.run } }
before do
login_as(actor, scope: :user)
end
it_behaves_like "handles no periods"
context "with period but no shifts" do
let!(:period) { create(:work_period, phase: "ready") }
scenario "index" do
visit(page_path)
expect(page).to have_content("No jobs found")
end
end
context "with jobs" do
include_context "with jobs"
context "in draft phase" do
before { periods[0].update!(phase: "draft") }
scenario "index" do
visit(page_path)
expect(page).to have_content("This period is in the draft phase so workers can't sign up")
end
describe "auto open" do
before { periods[0].update!(auto_open_time: Time.current + 30.minutes) }
scenario do
visit(page_path)
expect(page).to have_content("This period is in the draft phase so workers can't sign up")
Timecop.freeze(Time.current + 31.minutes) do
visit(page_path)
expect(page).to have_content(jobs[0].title)
end
end
end
end
describe "filters and search", search: Work::Shift do
include_context "with assignments"
scenario do
visit(page_path)
select_lens(:shift, "All Jobs")
expect_jobs(*jobs[0..3])
fill_in_lens(:search, "fruct")
expect_jobs(jobs[1])
clear_lenses
expect_jobs(*jobs[0..3])
select_lens(:shift, "Open Jobs")
expect_jobs(*jobs[1..3])
select_lens(:shift, "My Jobs")
expect_jobs(*jobs[1..2])
select_lens(:shift, "My Household")
expect_jobs(*jobs[0..2])
select_lens(:shift, "Not Preassigned")
expect_jobs(*jobs[1..3])
select_lens(:shift, "Pants")
expect_jobs(jobs[1], jobs[3])
select_lens(:shift, "All Jobs")
select_lens(:period, periods[1].name)
expect_jobs(jobs[4])
end
end
describe "signup, show, unsignup, autorefresh", database_cleaner: :truncate do
before do
periods[0].update!(phase: "open")
end
# Need to clean with truncation because we are doing stuff with txn isolation which is forbidden
# inside nested transactions.
scenario do
visit(page_path)
within(".shift-card[data-id='#{jobs[0].shifts[0].id}']") do
expect(page).not_to have_content(actor.name)
click_on("Sign Up!")
expect(page).to have_content(actor.name)
end
within(".shift-card[data-id='#{jobs[1].shifts[0].id}']") do
with_env("STUB_SIGNUP_ERROR" => "Work::SlotsExceededError") do
click_on("Sign Up!")
expect(page).to have_content("someone beat you to it")
end
end
within(".shift-card[data-id='#{jobs[1].shifts[1].id}']") do
with_env("STUB_SIGNUP_ERROR" => "Work::AlreadySignedUpError") do
click_on("Sign Up!")
expect(page).to have_content("already signed up for")
end
end
click_on("Knembler")
expect(page).to have_content(jobs[0].description)
accept_confirm { click_on("Remove Signup") }
within(".shift-card[data-id='#{jobs[0].shifts[0].id}']") do
expect(page).not_to have_content(actor.name)
click_on("Sign Up!")
expect(page).to have_content(actor.name)
end
# Test autorefresh by simulating someone else having signed up.
within(".shift-card[data-id='#{jobs[2].shifts[0].id}']") do
expect(page).not_to have_content(users[0].name)
jobs[2].shifts[0].signup_user(users[0])
expect(page).to have_content(users[0].name)
end
end
end
end
end
8736: Add spec coverage for new unsignup link
# frozen_string_literal: true
require "rails_helper"
feature "signups", js: true do
include_context "work"
let(:actor) { create(:user) }
let(:page_path) { work_shifts_path }
around { |ex| with_user_home_subdomain(actor) { ex.run } }
before do
login_as(actor, scope: :user)
end
it_behaves_like "handles no periods"
context "with period but no shifts" do
let!(:period) { create(:work_period, phase: "ready") }
scenario "index" do
visit(page_path)
expect(page).to have_content("No jobs found")
end
end
context "with jobs" do
include_context "with jobs"
context "in draft phase" do
before { periods[0].update!(phase: "draft") }
scenario "index" do
visit(page_path)
expect(page).to have_content("This period is in the draft phase so workers can't sign up")
end
describe "auto open" do
before { periods[0].update!(auto_open_time: Time.current + 30.minutes) }
scenario do
visit(page_path)
expect(page).to have_content("This period is in the draft phase so workers can't sign up")
Timecop.freeze(Time.current + 31.minutes) do
visit(page_path)
expect(page).to have_content(jobs[0].title)
end
end
end
end
describe "filters and search", search: Work::Shift do
include_context "with assignments"
scenario do
visit(page_path)
select_lens(:shift, "All Jobs")
expect_jobs(*jobs[0..3])
fill_in_lens(:search, "fruct")
expect_jobs(jobs[1])
clear_lenses
expect_jobs(*jobs[0..3])
select_lens(:shift, "Open Jobs")
expect_jobs(*jobs[1..3])
select_lens(:shift, "My Jobs")
expect_jobs(*jobs[1..2])
select_lens(:shift, "My Household")
expect_jobs(*jobs[0..2])
select_lens(:shift, "Not Preassigned")
expect_jobs(*jobs[1..3])
select_lens(:shift, "Pants")
expect_jobs(jobs[1], jobs[3])
select_lens(:shift, "All Jobs")
select_lens(:period, periods[1].name)
expect_jobs(jobs[4])
end
end
describe "signup, show, unsignup, autorefresh", database_cleaner: :truncate do
before do
periods[0].update!(phase: "open")
end
# Need to clean with truncation because we are doing stuff with txn isolation which is forbidden
# inside nested transactions.
scenario do
visit(page_path)
within(".shift-card[data-id='#{jobs[0].shifts[0].id}']") do
expect(page).not_to have_content(actor.name)
click_on("Sign Up!")
expect(page).to have_content(actor.name)
end
within(".shift-card[data-id='#{jobs[1].shifts[0].id}']") do
with_env("STUB_SIGNUP_ERROR" => "Work::SlotsExceededError") do
click_on("Sign Up!")
expect(page).to have_content("someone beat you to it")
end
end
within(".shift-card[data-id='#{jobs[1].shifts[1].id}']") do
with_env("STUB_SIGNUP_ERROR" => "Work::AlreadySignedUpError") do
click_on("Sign Up!")
expect(page).to have_content("already signed up for")
end
end
# Unsignup via #show page
click_on("Knembler")
expect(page).to have_content(jobs[0].description)
accept_confirm { click_on("Remove Signup") }
within(".shift-card[data-id='#{jobs[0].shifts[0].id}']") do
expect(page).not_to have_content(actor.name)
click_on("Sign Up!")
expect(page).to have_content(actor.name)
# Unsignup via 'x' link
find(".cancel-link a").click
expect(page).not_to have_content(actor.name)
end
# Test autorefresh by simulating someone else having signed up.
within(".shift-card[data-id='#{jobs[2].shifts[0].id}']") do
expect(page).not_to have_content(users[0].name)
jobs[2].shifts[0].signup_user(users[0])
expect(page).to have_content(users[0].name)
end
end
end
end
end
|
require 'spec_helper'
RSpec.describe FlashRailsMessages::Helper do
let(:subject) do
ActionView::Base.new(
ActionView::LookupContext.new([])
)
end
describe '#render_flash_messages' do
context 'when flash does not have messages' do
it 'returns nothing' do
allow(subject).to receive(:flash).and_return({})
expect(subject.render_flash_messages).to eql('')
end
end
context 'when flash has messages' do
context 'when flash type is notice' do
it 'returns the correct message' do
allow(subject).to receive(:flash).and_return({ notice: 'message' })
alert_expected = alert_element('message', class: 'alert')
expect(subject.render_flash_messages).to eql(alert_expected)
end
end
context 'when flash type is success' do
it 'returns the correct message' do
allow(subject).to receive(:flash).and_return({ success: 'message' })
alert_expected = alert_element('message', class: 'alert')
expect(subject.render_flash_messages).to eql(alert_expected)
end
end
context 'when flash type is alert' do
it 'returns the correct message' do
allow(subject).to receive(:flash).and_return({ alert: 'message' })
alert_expected = alert_element('message', class: 'alert')
expect(subject.render_flash_messages).to eql(alert_expected)
end
end
context 'when flash type is error' do
it 'returns the correct message' do
allow(subject).to receive(:flash).and_return({ error: 'message' })
alert_expected = alert_element('message', class: 'alert')
expect(subject.render_flash_messages).to eql(alert_expected)
end
end
context 'when has more than one message' do
it 'returns all the correct messages' do
allow(subject).to receive(:flash).and_return({ alert: 'message1', notice: 'message2' })
alerts_expected = alert_element('message1', class: 'alert') +
alert_element('message2', class: 'alert')
expect(subject.render_flash_messages).to eql(alerts_expected)
end
end
context 'with dismissible option' do
it 'returns the correct message' do
allow(subject).to receive(:flash).and_return({ notice: 'message' })
alert_expected = alert_dismissible_element('message', class: 'alert')
expect(subject.render_flash_messages(dismissible: true)).to eql(alert_expected)
end
end
context 'with other options' do
it 'returns the correct message' do
allow(subject).to receive(:flash).and_return({ notice: 'message' })
alert_expected = alert_element('message', id: 'alert-id', class: 'alert alert-class')
expect(subject.render_flash_messages(id: 'alert-id', class: 'alert-class')).to eql(alert_expected)
end
end
end
end
def alert_element(message, options = {})
subject.content_tag(:div, message.html_safe, options)
end
def alert_dismissible_element(message, options = {})
subject.content_tag(:div, close_element + message.html_safe, options)
end
def close_element
subject.content_tag(:a, '×'.html_safe, class: 'close', href: '#')
end
end
Pass defaults for 6.1 compatability
require 'spec_helper'
RSpec.describe FlashRailsMessages::Helper do
let(:subject) do
ActionView::Base.new(
ActionView::LookupContext.new([]),
{},
nil
)
end
describe '#render_flash_messages' do
context 'when flash does not have messages' do
it 'returns nothing' do
allow(subject).to receive(:flash).and_return({})
expect(subject.render_flash_messages).to eql('')
end
end
context 'when flash has messages' do
context 'when flash type is notice' do
it 'returns the correct message' do
allow(subject).to receive(:flash).and_return({ notice: 'message' })
alert_expected = alert_element('message', class: 'alert')
expect(subject.render_flash_messages).to eql(alert_expected)
end
end
context 'when flash type is success' do
it 'returns the correct message' do
allow(subject).to receive(:flash).and_return({ success: 'message' })
alert_expected = alert_element('message', class: 'alert')
expect(subject.render_flash_messages).to eql(alert_expected)
end
end
context 'when flash type is alert' do
it 'returns the correct message' do
allow(subject).to receive(:flash).and_return({ alert: 'message' })
alert_expected = alert_element('message', class: 'alert')
expect(subject.render_flash_messages).to eql(alert_expected)
end
end
context 'when flash type is error' do
it 'returns the correct message' do
allow(subject).to receive(:flash).and_return({ error: 'message' })
alert_expected = alert_element('message', class: 'alert')
expect(subject.render_flash_messages).to eql(alert_expected)
end
end
context 'when has more than one message' do
it 'returns all the correct messages' do
allow(subject).to receive(:flash).and_return({ alert: 'message1', notice: 'message2' })
alerts_expected = alert_element('message1', class: 'alert') +
alert_element('message2', class: 'alert')
expect(subject.render_flash_messages).to eql(alerts_expected)
end
end
context 'with dismissible option' do
it 'returns the correct message' do
allow(subject).to receive(:flash).and_return({ notice: 'message' })
alert_expected = alert_dismissible_element('message', class: 'alert')
expect(subject.render_flash_messages(dismissible: true)).to eql(alert_expected)
end
end
context 'with other options' do
it 'returns the correct message' do
allow(subject).to receive(:flash).and_return({ notice: 'message' })
alert_expected = alert_element('message', id: 'alert-id', class: 'alert alert-class')
expect(subject.render_flash_messages(id: 'alert-id', class: 'alert-class')).to eql(alert_expected)
end
end
end
end
def alert_element(message, options = {})
subject.content_tag(:div, message.html_safe, options)
end
def alert_dismissible_element(message, options = {})
subject.content_tag(:div, close_element + message.html_safe, options)
end
def close_element
subject.content_tag(:a, '×'.html_safe, class: 'close', href: '#')
end
end
|
Rails.application.configure do
# Settings specified here will take precedence over those in config/application.rb.
# Code is not reloaded between requests.
config.cache_classes = true
# Eager load code on boot. This eager loads most of Rails and
# your application in memory, allowing both threaded web servers
# and those relying on copy on write to perform better.
# Rake tasks automatically ignore this option for performance.
config.eager_load = true
# Full error reports are disabled and caching is turned on.
config.consider_all_requests_local = false
config.action_controller.perform_caching = true
config.action_controller.asset_host = ENV['CDN_HOST'] if ENV['CDN_HOST'].present?
# Ensures that a master key has been made available in either ENV["RAILS_MASTER_KEY"]
# or in config/master.key. This key is used to decrypt credentials (and other encrypted files).
# config.require_master_key = true
# Disable serving static files from the `/public` folder by default since
# Apache or NGINX already handles this.
config.public_file_server.enabled = ENV['RAILS_SERVE_STATIC_FILES'].present?
ActiveSupport::Logger.new(STDOUT).tap do |logger|
logger.formatter = config.log_formatter
config.logger = ActiveSupport::TaggedLogging.new(logger)
end
# Compress JavaScripts and CSS.
# config.assets.js_compressor = Uglifier.new(mangle: false)
# config.assets.css_compressor = :sass
# Do not fallback to assets pipeline if a precompiled asset is missed.
config.assets.compile = false
# `config.assets.precompile` and `config.assets.version` have moved to config/initializers/assets.rb
# Specifies the header that your server uses for sending files.
# config.action_dispatch.x_sendfile_header = 'X-Sendfile' # for Apache
config.action_dispatch.x_sendfile_header = 'X-Accel-Redirect' # for NGINX
# Allow to specify public IP of reverse proxy if it's needed
config.action_dispatch.trusted_proxies = ENV['TRUSTED_PROXY_IP'].split.map { |item| IPAddr.new(item) } if ENV['TRUSTED_PROXY_IP'].present?
config.force_ssl = true
config.ssl_options = {
redirect: {
exclude: -> request { request.path.start_with?('/health') || request.headers["Host"].end_with?('.onion') }
}
}
# Use the lowest log level to ensure availability of diagnostic information
# when problems arise.
config.log_level = ENV.fetch('RAILS_LOG_LEVEL', 'info').to_sym
# Prepend all log lines with the following tags.
config.log_tags = [:request_id]
# Use a different cache store in production.
config.cache_store = :redis_cache_store, REDIS_CACHE_PARAMS
# Ignore bad email addresses and do not raise email delivery errors.
# Set this to true and configure the email server for immediate delivery to raise delivery errors.
# config.action_mailer.raise_delivery_errors = false
# Enable locale fallbacks for I18n (makes lookups for any locale fall back to
# English when a translation cannot be found).
config.i18n.fallbacks = [:en]
# Send deprecation notices to registered listeners.
config.active_support.deprecation = :notify
# Use default logging formatter so that PID and timestamp are not suppressed.
config.log_formatter = ::Logger::Formatter.new
# Better log formatting
config.lograge.enabled = true
config.lograge.custom_payload do |controller|
if controller.respond_to?(:signed_request?) && controller.signed_request?
{ key: controller.signature_key_id }
end
end
# Do not dump schema after migrations.
config.active_record.dump_schema_after_migration = false
config.action_mailer.perform_caching = false
# E-mails
outgoing_email_address = ENV.fetch('SMTP_FROM_ADDRESS', 'notifications@localhost')
outgoing_mail_domain = Mail::Address.new(outgoing_email_address).domain
config.action_mailer.default_options = {
from: outgoing_email_address,
reply_to: ENV['SMTP_REPLY_TO'],
'Message-ID': -> { "<#{Mail.random_tag}@#{outgoing_mail_domain}>" },
}
config.action_mailer.smtp_settings = {
:port => ENV['SMTP_PORT'],
:address => ENV['SMTP_SERVER'],
:user_name => ENV['SMTP_LOGIN'].presence,
:password => ENV['SMTP_PASSWORD'].presence,
:domain => ENV['SMTP_DOMAIN'] || ENV['LOCAL_DOMAIN'],
:authentication => ENV['SMTP_AUTH_METHOD'] == 'none' ? nil : ENV['SMTP_AUTH_METHOD'] || :plain,
:ca_file => ENV['SMTP_CA_FILE'].presence,
:openssl_verify_mode => ENV['SMTP_OPENSSL_VERIFY_MODE'],
:enable_starttls_auto => ENV['SMTP_ENABLE_STARTTLS_AUTO'] || true,
:tls => ENV['SMTP_TLS'].presence,
:ssl => ENV['SMTP_SSL'].presence,
}
config.action_mailer.delivery_method = ENV.fetch('SMTP_DELIVERY_METHOD', 'smtp').to_sym
config.action_dispatch.default_headers = {
'Server' => 'Mastodon',
'X-Frame-Options' => 'DENY',
'X-Content-Type-Options' => 'nosniff',
'X-XSS-Protection' => '1; mode=block',
'Permissions-Policy' => 'interest-cohort=()',
}
config.x.otp_secret = ENV.fetch('OTP_SECRET')
end
disable legacy XSS filtering (#17289)
Browsers are phasing out X-XSS-Protection, but Safari and IE still support it.
Rails.application.configure do
# Settings specified here will take precedence over those in config/application.rb.
# Code is not reloaded between requests.
config.cache_classes = true
# Eager load code on boot. This eager loads most of Rails and
# your application in memory, allowing both threaded web servers
# and those relying on copy on write to perform better.
# Rake tasks automatically ignore this option for performance.
config.eager_load = true
# Full error reports are disabled and caching is turned on.
config.consider_all_requests_local = false
config.action_controller.perform_caching = true
config.action_controller.asset_host = ENV['CDN_HOST'] if ENV['CDN_HOST'].present?
# Ensures that a master key has been made available in either ENV["RAILS_MASTER_KEY"]
# or in config/master.key. This key is used to decrypt credentials (and other encrypted files).
# config.require_master_key = true
# Disable serving static files from the `/public` folder by default since
# Apache or NGINX already handles this.
config.public_file_server.enabled = ENV['RAILS_SERVE_STATIC_FILES'].present?
ActiveSupport::Logger.new(STDOUT).tap do |logger|
logger.formatter = config.log_formatter
config.logger = ActiveSupport::TaggedLogging.new(logger)
end
# Compress JavaScripts and CSS.
# config.assets.js_compressor = Uglifier.new(mangle: false)
# config.assets.css_compressor = :sass
# Do not fallback to assets pipeline if a precompiled asset is missed.
config.assets.compile = false
# `config.assets.precompile` and `config.assets.version` have moved to config/initializers/assets.rb
# Specifies the header that your server uses for sending files.
# config.action_dispatch.x_sendfile_header = 'X-Sendfile' # for Apache
config.action_dispatch.x_sendfile_header = 'X-Accel-Redirect' # for NGINX
# Allow to specify public IP of reverse proxy if it's needed
config.action_dispatch.trusted_proxies = ENV['TRUSTED_PROXY_IP'].split.map { |item| IPAddr.new(item) } if ENV['TRUSTED_PROXY_IP'].present?
config.force_ssl = true
config.ssl_options = {
redirect: {
exclude: -> request { request.path.start_with?('/health') || request.headers["Host"].end_with?('.onion') }
}
}
# Use the lowest log level to ensure availability of diagnostic information
# when problems arise.
config.log_level = ENV.fetch('RAILS_LOG_LEVEL', 'info').to_sym
# Prepend all log lines with the following tags.
config.log_tags = [:request_id]
# Use a different cache store in production.
config.cache_store = :redis_cache_store, REDIS_CACHE_PARAMS
# Ignore bad email addresses and do not raise email delivery errors.
# Set this to true and configure the email server for immediate delivery to raise delivery errors.
# config.action_mailer.raise_delivery_errors = false
# Enable locale fallbacks for I18n (makes lookups for any locale fall back to
# English when a translation cannot be found).
config.i18n.fallbacks = [:en]
# Send deprecation notices to registered listeners.
config.active_support.deprecation = :notify
# Use default logging formatter so that PID and timestamp are not suppressed.
config.log_formatter = ::Logger::Formatter.new
# Better log formatting
config.lograge.enabled = true
config.lograge.custom_payload do |controller|
if controller.respond_to?(:signed_request?) && controller.signed_request?
{ key: controller.signature_key_id }
end
end
# Do not dump schema after migrations.
config.active_record.dump_schema_after_migration = false
config.action_mailer.perform_caching = false
# E-mails
outgoing_email_address = ENV.fetch('SMTP_FROM_ADDRESS', 'notifications@localhost')
outgoing_mail_domain = Mail::Address.new(outgoing_email_address).domain
config.action_mailer.default_options = {
from: outgoing_email_address,
reply_to: ENV['SMTP_REPLY_TO'],
'Message-ID': -> { "<#{Mail.random_tag}@#{outgoing_mail_domain}>" },
}
config.action_mailer.smtp_settings = {
:port => ENV['SMTP_PORT'],
:address => ENV['SMTP_SERVER'],
:user_name => ENV['SMTP_LOGIN'].presence,
:password => ENV['SMTP_PASSWORD'].presence,
:domain => ENV['SMTP_DOMAIN'] || ENV['LOCAL_DOMAIN'],
:authentication => ENV['SMTP_AUTH_METHOD'] == 'none' ? nil : ENV['SMTP_AUTH_METHOD'] || :plain,
:ca_file => ENV['SMTP_CA_FILE'].presence,
:openssl_verify_mode => ENV['SMTP_OPENSSL_VERIFY_MODE'],
:enable_starttls_auto => ENV['SMTP_ENABLE_STARTTLS_AUTO'] || true,
:tls => ENV['SMTP_TLS'].presence,
:ssl => ENV['SMTP_SSL'].presence,
}
config.action_mailer.delivery_method = ENV.fetch('SMTP_DELIVERY_METHOD', 'smtp').to_sym
config.action_dispatch.default_headers = {
'Server' => 'Mastodon',
'X-Frame-Options' => 'DENY',
'X-Content-Type-Options' => 'nosniff',
'X-XSS-Protection' => '0',
'Permissions-Policy' => 'interest-cohort=()',
}
config.x.otp_secret = ENV.fetch('OTP_SECRET')
end
|
Badgiy::Application.configure do
config.cache_classes = true
config.consider_all_requests_local = false
config.action_controller.perform_caching = true
config.cache_store = :dalli_store
config.force_ssl = true
config.action_controller.asset_host = ENV['CDN_ASSET_HOST']
config.action_mailer.asset_host = ENV['CDN_ASSET_HOST']
#config.font_assets.origin = ENV['FONT_ASSETS_ORIGIN']
config.action_mailer.delivery_method = :smtp
config.action_mailer.perform_deliveries = true
config.action_mailer.raise_delivery_errors = true
config.action_mailer.smtp_settings = {
authentication: :plain,
address: ENV['MAILGUN_SMTP_SERVER'],
port: ENV['MAILGUN_SMTP_PORT'],
domain: 'coderwall.com',
user_name: ENV['MAILGUN_SMTP_LOGIN'],
password: ENV['MAILGUN_SMTP_PASSWORD']
}
config.i18n.fallbacks = true
config.active_support.deprecation = :notify
config.serve_static_assets = true
config.assets.precompile = [/^[^_]/]
config.assets.compile = true
config.assets.compress = true
config.assets.digest = true
config.static_cache_control = 'public, max-age=31536000'
config.host = ENV['HOST_DOMAIN']
end
REsque has an emotional crisis when run with the threadsafe options set. This is the proposed workaround that let's us have threadsafe in the app but forces resque to run without threadsafe
Badgiy::Application.configure do
config.threadsafe! unless $rails_rake_task
config.cache_classes = true
config.consider_all_requests_local = false
config.action_controller.perform_caching = true
config.cache_store = :dalli_store
config.force_ssl = true
config.action_controller.asset_host = ENV['CDN_ASSET_HOST']
config.action_mailer.asset_host = ENV['CDN_ASSET_HOST']
#config.font_assets.origin = ENV['FONT_ASSETS_ORIGIN']
config.action_mailer.delivery_method = :smtp
config.action_mailer.perform_deliveries = true
config.action_mailer.raise_delivery_errors = true
config.action_mailer.smtp_settings = {
authentication: :plain,
address: ENV['MAILGUN_SMTP_SERVER'],
port: ENV['MAILGUN_SMTP_PORT'],
domain: 'coderwall.com',
user_name: ENV['MAILGUN_SMTP_LOGIN'],
password: ENV['MAILGUN_SMTP_PASSWORD']
}
config.i18n.fallbacks = true
config.active_support.deprecation = :notify
config.serve_static_assets = true
config.assets.precompile = [/^[^_]/]
config.assets.compile = true
config.assets.compress = true
config.assets.digest = true
config.static_cache_control = 'public, max-age=31536000'
config.host = ENV['HOST_DOMAIN']
end
|
# frozen_string_literal: true
require "spec_helper"
describe GraphQL::Analysis::AST do
class AstTypeCollector < GraphQL::Analysis::AST::Analyzer
def initialize(query)
super
@types = []
end
def on_enter_operation_definition(node, parent, visitor)
@types << visitor.type_definition
end
def on_enter_field(memo, node, visitor)
@types << visitor.field_definition.type.unwrap
end
def result
@types
end
end
class AstNodeCounter < GraphQL::Analysis::AST::Analyzer
def initialize(query)
super
@nodes = Hash.new { |h,k| h[k] = 0 }
end
def on_enter_abstract_node(node, parent, _visitor)
@nodes[node.class] += 1
end
def result
@nodes
end
end
class AstConditionalAnalyzer < GraphQL::Analysis::AST::Analyzer
def initialize(query)
super
@i_have_been_called = false
end
def analyze?
!!query.context[:analyze]
end
def on_operation_definition(node, parent, visitor)
@i_have_been_called = true
end
def result
@i_have_been_called
end
end
class AstErrorAnalyzer < GraphQL::Analysis::AST::Analyzer
def result
GraphQL::AnalysisError.new("An Error!")
end
end
class AstPreviousField < GraphQL::Analysis::AST::Analyzer
def on_enter_field(node, parent, visitor)
@previous_field = visitor.previous_field_definition
end
def result
@previous_field
end
end
class AstArguments < GraphQL::Analysis::AST::Analyzer
def on_enter_argument(node, parent, visitor)
@argument = visitor.argument_definition
@previous_argument = visitor.previous_argument_definition
end
def result
[@argument, @previous_argument]
end
end
describe "using the AST analysis engine" do
let(:schema) do
query_type = Class.new(GraphQL::Schema::Object) do
graphql_name 'Query'
field :foobar, Integer, null: false
def foobar
1337
end
end
Class.new(GraphQL::Schema) do
query query_type
use GraphQL::Analysis::AST
query_analyzer AstErrorAnalyzer
use GraphQL::Execution::Interpreter
end
end
let(:query_string) {%|
query {
foobar
}
|}
let(:query) { GraphQL::Query.new(schema, query_string, variables: {}) }
it "runs the AST analyzers correctly" do
res = query.result
refute res.key?("data")
assert_equal ["An Error!"], res["errors"].map { |e| e["message"] }
end
it "skips rewrite" do
# Try running the query:
query.result
# But the validation step doesn't build an irep_node tree
assert_nil query.irep_selection
end
describe "when validate: false" do
let(:query) { GraphQL::Query.new(schema, query_string, validate: false) }
it "Skips rewrite" do
# Try running the query:
query.result
# But the validation step doesn't build an irep_node tree
assert_nil query.irep_selection
end
end
end
describe ".analyze_query" do
let(:analyzers) { [AstTypeCollector, AstNodeCounter] }
let(:reduce_result) { GraphQL::Analysis::AST.analyze_query(query, analyzers) }
let(:variables) { {} }
let(:query) { GraphQL::Query.new(Dummy::Schema.graphql_definition, query_string, variables: variables) }
let(:query_string) {%|
{
cheese(id: 1) {
id
flavor
}
}
|}
describe "without a valid operation" do
let(:query_string) {%|
# A comment
# is an invalid operation
# Should break
|}
it "bails early when there is no selected operation to be executed" do
assert_equal 2, reduce_result.size
end
end
describe "conditional analysis" do
let(:analyzers) { [AstTypeCollector, AstConditionalAnalyzer] }
describe "when analyze? returns false" do
let(:query) { GraphQL::Query.new(Dummy::Schema, query_string, variables: variables, context: { analyze: false }) }
it "does not run the analyzer" do
# Only type_collector ran
assert_equal 1, reduce_result.size
end
end
describe "when analyze? returns true" do
let(:query) { GraphQL::Query.new(Dummy::Schema, query_string, variables: variables, context: { analyze: true }) }
it "it runs the analyzer" do
# Both analyzers ran
assert_equal 2, reduce_result.size
end
end
describe "Visitor#previous_field_definition" do
let(:analyzers) { [AstPreviousField] }
let(:query) { GraphQL::Query.new(Dummy::Schema, "{ __schema { types { name } } }") }
it "it runs the analyzer" do
prev_field = reduce_result.first
assert_equal "__Schema.types", prev_field.path
end
end
describe "Visitor#argument_definition" do
let(:analyzers) { [AstArguments] }
let(:query) do
GraphQL::Query.new(
Dummy::Schema,
'{ searchDairy(product: [{ source: "SHEEP" }]) { ... on Cheese { id } } }'
)
end
it "it runs the analyzer" do
argument, prev_argument = reduce_result.first
assert_equal "DairyProductInput.source", argument.path
assert_equal "Query.searchDairy.product", prev_argument.path
end
end
end
it "calls the defined analyzers" do
collected_types, node_counts = reduce_result
expected_visited_types = [
Dummy::DairyAppQuery,
Dummy::Cheese,
GraphQL::Types::Int,
GraphQL::Types::String
]
assert_equal expected_visited_types, collected_types
expected_node_counts = {
GraphQL::Language::Nodes::OperationDefinition => 1,
GraphQL::Language::Nodes::Field => 3,
GraphQL::Language::Nodes::Argument => 1
}
assert_equal expected_node_counts, node_counts
end
describe "tracing" do
let(:query_string) { "{ t: __typename }"}
it "emits traces" do
traces = TestTracing.with_trace do
ctx = { tracers: [TestTracing] }
Dummy::Schema.execute(query_string, context: ctx)
end
# The query_trace is on the list _first_ because it finished first
_lex, _parse, _validate, query_trace, multiplex_trace, *_rest = traces
assert_equal "analyze_multiplex", multiplex_trace[:key]
assert_instance_of GraphQL::Execution::Multiplex, multiplex_trace[:multiplex]
assert_equal "analyze_query", query_trace[:key]
assert_instance_of GraphQL::Query, query_trace[:query]
end
end
class AstConnectionCounter < GraphQL::Analysis::AST::Analyzer
def initialize(query)
super
@fields = 0
@connections = 0
end
def on_enter_field(node, parent, visitor)
if visitor.field_definition.connection?
@connections += 1
else
@fields += 1
end
end
def result
{
fields: @fields,
connections: @connections
}
end
end
describe "when processing fields" do
let(:analyzers) { [AstConnectionCounter] }
let(:reduce_result) { GraphQL::Analysis::AST.analyze_query(query, analyzers) }
let(:query) { GraphQL::Query.new(StarWars::Schema, query_string, variables: variables) }
let(:query_string) {%|
query getBases {
empire {
basesByName(first: 30) { edges { cursor } }
bases(first: 30) { edges { cursor } }
}
}
|}
it "knows which fields are connections" do
connection_counts = reduce_result.first
expected_connection_counts = {
:fields => 5,
:connections => 2
}
assert_equal expected_connection_counts, connection_counts
end
end
end
end
update test
# frozen_string_literal: true
require "spec_helper"
describe GraphQL::Analysis::AST do
class AstTypeCollector < GraphQL::Analysis::AST::Analyzer
def initialize(query)
super
@types = []
end
def on_enter_operation_definition(node, parent, visitor)
@types << visitor.type_definition
end
def on_enter_field(memo, node, visitor)
@types << visitor.field_definition.type.unwrap
end
def result
@types
end
end
class AstNodeCounter < GraphQL::Analysis::AST::Analyzer
def initialize(query)
super
@nodes = Hash.new { |h,k| h[k] = 0 }
end
def on_enter_abstract_node(node, parent, _visitor)
@nodes[node.class] += 1
end
def result
@nodes
end
end
class AstConditionalAnalyzer < GraphQL::Analysis::AST::Analyzer
def initialize(query)
super
@i_have_been_called = false
end
def analyze?
!!query.context[:analyze]
end
def on_operation_definition(node, parent, visitor)
@i_have_been_called = true
end
def result
@i_have_been_called
end
end
class AstErrorAnalyzer < GraphQL::Analysis::AST::Analyzer
def result
GraphQL::AnalysisError.new("An Error!")
end
end
class AstPreviousField < GraphQL::Analysis::AST::Analyzer
def on_enter_field(node, parent, visitor)
@previous_field = visitor.previous_field_definition
end
def result
@previous_field
end
end
class AstArguments < GraphQL::Analysis::AST::Analyzer
def on_enter_argument(node, parent, visitor)
@argument = visitor.argument_definition
@previous_argument = visitor.previous_argument_definition
end
def result
[@argument, @previous_argument]
end
end
describe "using the AST analysis engine" do
let(:schema) do
query_type = Class.new(GraphQL::Schema::Object) do
graphql_name 'Query'
field :foobar, Integer, null: false
def foobar
1337
end
end
Class.new(GraphQL::Schema) do
query query_type
use GraphQL::Analysis::AST
query_analyzer AstErrorAnalyzer
use GraphQL::Execution::Interpreter
end
end
let(:query_string) {%|
query {
foobar
}
|}
let(:query) { GraphQL::Query.new(schema, query_string, variables: {}) }
it "runs the AST analyzers correctly" do
res = query.result
refute res.key?("data")
assert_equal ["An Error!"], res["errors"].map { |e| e["message"] }
end
it "skips rewrite" do
# Try running the query:
query.result
# But the validation step doesn't build an irep_node tree
assert_nil query.irep_selection
end
describe "when validate: false" do
let(:query) { GraphQL::Query.new(schema, query_string, validate: false) }
it "Skips rewrite" do
# Try running the query:
query.result
# But the validation step doesn't build an irep_node tree
assert_nil query.irep_selection
end
end
end
describe ".analyze_query" do
let(:analyzers) { [AstTypeCollector, AstNodeCounter] }
let(:reduce_result) { GraphQL::Analysis::AST.analyze_query(query, analyzers) }
let(:variables) { {} }
let(:query) { GraphQL::Query.new(Dummy::Schema, query_string, variables: variables) }
let(:query_string) {%|
{
cheese(id: 1) {
id
flavor
}
}
|}
describe "without a valid operation" do
let(:query_string) {%|
# A comment
# is an invalid operation
# Should break
|}
it "bails early when there is no selected operation to be executed" do
assert_equal 2, reduce_result.size
end
end
describe "conditional analysis" do
let(:analyzers) { [AstTypeCollector, AstConditionalAnalyzer] }
describe "when analyze? returns false" do
let(:query) { GraphQL::Query.new(Dummy::Schema, query_string, variables: variables, context: { analyze: false }) }
it "does not run the analyzer" do
# Only type_collector ran
assert_equal 1, reduce_result.size
end
end
describe "when analyze? returns true" do
let(:query) { GraphQL::Query.new(Dummy::Schema, query_string, variables: variables, context: { analyze: true }) }
it "it runs the analyzer" do
# Both analyzers ran
assert_equal 2, reduce_result.size
end
end
describe "Visitor#previous_field_definition" do
let(:analyzers) { [AstPreviousField] }
let(:query) { GraphQL::Query.new(Dummy::Schema, "{ __schema { types { name } } }") }
it "it runs the analyzer" do
prev_field = reduce_result.first
assert_equal "__Schema.types", prev_field.path
end
end
describe "Visitor#argument_definition" do
let(:analyzers) { [AstArguments] }
let(:query) do
GraphQL::Query.new(
Dummy::Schema,
'{ searchDairy(product: [{ source: "SHEEP" }]) { ... on Cheese { id } } }'
)
end
it "it runs the analyzer" do
argument, prev_argument = reduce_result.first
assert_equal "DairyProductInput.source", argument.path
assert_equal "Query.searchDairy.product", prev_argument.path
end
end
end
it "calls the defined analyzers" do
collected_types, node_counts = reduce_result
expected_visited_types = [
Dummy::DairyAppQuery,
Dummy::Cheese,
GraphQL::Types::Int,
GraphQL::Types::String
]
assert_equal expected_visited_types, collected_types
expected_node_counts = {
GraphQL::Language::Nodes::OperationDefinition => 1,
GraphQL::Language::Nodes::Field => 3,
GraphQL::Language::Nodes::Argument => 1
}
assert_equal expected_node_counts, node_counts
end
describe "tracing" do
let(:query_string) { "{ t: __typename }"}
it "emits traces" do
traces = TestTracing.with_trace do
ctx = { tracers: [TestTracing] }
Dummy::Schema.execute(query_string, context: ctx)
end
# The query_trace is on the list _first_ because it finished first
_lex, _parse, _validate, query_trace, multiplex_trace, *_rest = traces
assert_equal "analyze_multiplex", multiplex_trace[:key]
assert_instance_of GraphQL::Execution::Multiplex, multiplex_trace[:multiplex]
assert_equal "analyze_query", query_trace[:key]
assert_instance_of GraphQL::Query, query_trace[:query]
end
end
class AstConnectionCounter < GraphQL::Analysis::AST::Analyzer
def initialize(query)
super
@fields = 0
@connections = 0
end
def on_enter_field(node, parent, visitor)
if visitor.field_definition.connection?
@connections += 1
else
@fields += 1
end
end
def result
{
fields: @fields,
connections: @connections
}
end
end
describe "when processing fields" do
let(:analyzers) { [AstConnectionCounter] }
let(:reduce_result) { GraphQL::Analysis::AST.analyze_query(query, analyzers) }
let(:query) { GraphQL::Query.new(StarWars::Schema, query_string, variables: variables) }
let(:query_string) {%|
query getBases {
empire {
basesByName(first: 30) { edges { cursor } }
bases(first: 30) { edges { cursor } }
}
}
|}
it "knows which fields are connections" do
connection_counts = reduce_result.first
expected_connection_counts = {
:fields => 5,
:connections => 2
}
assert_equal expected_connection_counts, connection_counts
end
end
end
end
|
Rails.application.configure do
# Settings specified here will take precedence over those in config/application.rb.
# Code is not reloaded between requests.
config.cache_classes = true
# Eager load code on boot. This eager loads most of Rails and
# your application in memory, allowing both threaded web servers
# and those relying on copy on write to perform better.
# Rake tasks automatically ignore this option for performance.
config.eager_load = true
# Full error reports are disabled and caching is turned on.
config.consider_all_requests_local = false
config.action_controller.perform_caching = true
# Disable serving static files from the `/public` folder by default since
# Apache or NGINX already handles this.
config.public_file_server.enabled = ENV['RAILS_SERVE_STATIC_FILES'].present?
# Compress JavaScripts and CSS.
config.assets.js_compressor = :uglifier
# config.assets.css_compressor = :sass
# Do not fallback to assets pipeline if a precompiled asset is missed.
config.assets.compile = false
# `config.assets.precompile` and `config.assets.version` have moved to config/initializers/assets.rb
# Enable serving of images, stylesheets, and JavaScripts from an asset server.
# config.action_controller.asset_host = 'http://assets.example.com'
# Specifies the header that your server uses for sending files.
# config.action_dispatch.x_sendfile_header = 'X-Sendfile' # for Apache
# config.action_dispatch.x_sendfile_header = 'X-Accel-Redirect' # for NGINX
# Mount Action Cable outside main process or domain
# config.action_cable.mount_path = nil
# config.action_cable.url = 'wss://example.com/cable'
# config.action_cable.allowed_request_origins = [ 'http://example.com', /http:\/\/example.*/ ]
# Force all access to the app over SSL, use Strict-Transport-Security, and use secure cookies.
config.force_ssl = ENV.fetch("FORCE_SSL") { false }
# Use the lowest log level to ensure availability of diagnostic information
# when problems arise.
config.log_level = :debug
# Prepend all log lines with the following tags.
config.log_tags = [ :request_id ]
# Use a real queuing backend for Active Job (and separate queues per environment)
# config.active_job.queue_adapter = :resque
# config.active_job.queue_name_prefix = "octobox_#{Rails.env}"
# config.action_mailer.perform_caching = false
# Ignore bad email addresses and do not raise email delivery errors.
# Set this to true and configure the email server for immediate delivery to raise delivery errors.
# config.action_mailer.raise_delivery_errors = false
# Enable locale fallbacks for I18n (makes lookups for any locale fall back to
# the I18n.default_locale when a translation cannot be found).
config.i18n.fallbacks = true
# Send deprecation notices to registered listeners.
config.active_support.deprecation = :notify
# Use default logging formatter so that PID and timestamp are not suppressed.
config.log_formatter = ::Logger::Formatter.new
# Use a different logger for distributed setups.
# require 'syslog/logger'
# config.logger = ActiveSupport::TaggedLogging.new(Syslog::Logger.new 'app-name')
if ENV["RAILS_LOG_TO_STDOUT"].present?
logger = ActiveSupport::Logger.new(STDOUT)
logger.formatter = config.log_formatter
config.logger = ActiveSupport::TaggedLogging.new(logger)
end
# Do not dump schema after migrations.
config.active_record.dump_schema_after_migration = false
end
Actually enable lograge in production
Rails.application.configure do
# Settings specified here will take precedence over those in config/application.rb.
# Code is not reloaded between requests.
config.cache_classes = true
# Eager load code on boot. This eager loads most of Rails and
# your application in memory, allowing both threaded web servers
# and those relying on copy on write to perform better.
# Rake tasks automatically ignore this option for performance.
config.eager_load = true
# Full error reports are disabled and caching is turned on.
config.consider_all_requests_local = false
config.action_controller.perform_caching = true
# Disable serving static files from the `/public` folder by default since
# Apache or NGINX already handles this.
config.public_file_server.enabled = ENV['RAILS_SERVE_STATIC_FILES'].present?
# Compress JavaScripts and CSS.
config.assets.js_compressor = :uglifier
# config.assets.css_compressor = :sass
# Do not fallback to assets pipeline if a precompiled asset is missed.
config.assets.compile = false
# `config.assets.precompile` and `config.assets.version` have moved to config/initializers/assets.rb
# Enable serving of images, stylesheets, and JavaScripts from an asset server.
# config.action_controller.asset_host = 'http://assets.example.com'
# Specifies the header that your server uses for sending files.
# config.action_dispatch.x_sendfile_header = 'X-Sendfile' # for Apache
# config.action_dispatch.x_sendfile_header = 'X-Accel-Redirect' # for NGINX
# Mount Action Cable outside main process or domain
# config.action_cable.mount_path = nil
# config.action_cable.url = 'wss://example.com/cable'
# config.action_cable.allowed_request_origins = [ 'http://example.com', /http:\/\/example.*/ ]
# Force all access to the app over SSL, use Strict-Transport-Security, and use secure cookies.
config.force_ssl = ENV.fetch("FORCE_SSL") { false }
# Use the lowest log level to ensure availability of diagnostic information
# when problems arise.
config.log_level = :debug
# Prepend all log lines with the following tags.
config.log_tags = [ :request_id ]
# Use a real queuing backend for Active Job (and separate queues per environment)
# config.active_job.queue_adapter = :resque
# config.active_job.queue_name_prefix = "octobox_#{Rails.env}"
# config.action_mailer.perform_caching = false
# Ignore bad email addresses and do not raise email delivery errors.
# Set this to true and configure the email server for immediate delivery to raise delivery errors.
# config.action_mailer.raise_delivery_errors = false
# Enable locale fallbacks for I18n (makes lookups for any locale fall back to
# the I18n.default_locale when a translation cannot be found).
config.i18n.fallbacks = true
# Send deprecation notices to registered listeners.
config.active_support.deprecation = :notify
# Use default logging formatter so that PID and timestamp are not suppressed.
config.log_formatter = ::Logger::Formatter.new
# Use a different logger for distributed setups.
# require 'syslog/logger'
# config.logger = ActiveSupport::TaggedLogging.new(Syslog::Logger.new 'app-name')
if ENV["RAILS_LOG_TO_STDOUT"].present?
logger = ActiveSupport::Logger.new(STDOUT)
logger.formatter = config.log_formatter
config.logger = ActiveSupport::TaggedLogging.new(logger)
end
# Do not dump schema after migrations.
config.active_record.dump_schema_after_migration = false
config.lograge.enabled = true
end
|
require 'rails_helper'
# Specs in this file have access to a helper object that includes
# the CasesHelper. For example:
#
# describe CasesHelper do
# describe "string concat" do
# it "concats two strings with spaces" do
# expect(helper.concat_strings("this","that")).to eq("this that")
# end
# end
# end
RSpec.describe CasesHelper, type: :helper do
let(:manager) { create :manager }
let(:responder) { create :responder }
let(:coworker) { create :responder,
responding_teams: responder.responding_teams }
let(:another_responder) { create :responder }
describe '#action_button_for(event)' do
context 'when event == :assign_responder' do
it 'generates HTML that links to the new assignment page' do
@case = create(:case)
expect(action_button_for(:assign_responder)).to eq(
"<a id=\"action--assign-to-responder\" class=\"button\" href=\"/cases/#{@case.id}/assignments/new\">Assign to a responder</a>")
end
end
context 'when event == :close' do
it 'generates HTML that links to the close case action' do
@case = create(:responded_case)
expect(action_button_for(:close)).to eq(
"<a id=\"action--close-case\" class=\"button\" data-method=\"get\" \
href=\"/cases/#{@case.id}/close\">Close case</a>"
)
end
end
context 'when event == :add_responses' do
it 'generates HTML that links to the upload response page' do
@case = create(:accepted_case)
expect(action_button_for(:add_responses)).to eq(
"<a id=\"action--upload-response\" class=\"button\" href=\"/cases/#{@case.id}/new_response_upload\">Upload response</a>"
)
end
end
context 'when event = ":respond' do
it 'generates HTML that links to the upload response page' do
@case = create(:case_with_response)
expect(action_button_for(:respond)).to eq(
"<a id=\"action--mark-response-as-sent\" class=\"button\" \
href=\"/cases/#{@case.id}/respond\">Mark response as sent</a>"
)
end
end
end
end
helper spec
require 'rails_helper'
# Specs in this file have access to a helper object that includes
# the CasesHelper. For example:
#
# describe CasesHelper do
# describe "string concat" do
# it "concats two strings with spaces" do
# expect(helper.concat_strings("this","that")).to eq("this that")
# end
# end
# end
RSpec.describe CasesHelper, type: :helper do
let(:manager) { create :manager }
let(:responder) { create :responder }
let(:coworker) { create :responder,
responding_teams: responder.responding_teams }
let(:another_responder) { create :responder }
describe '#action_button_for(event)' do
context 'when event == :assign_responder' do
it 'generates HTML that links to the new assignment page' do
@case = create(:case)
expect(action_button_for(:assign_responder)).to eq(
"<a id=\"action--assign-to-responder\" class=\"button\" href=\"/cases/#{@case.id}/assignments/new\">Assign to a responder</a>")
end
end
context 'when event == :close' do
it 'generates HTML that links to the close case action' do
@case = create(:responded_case)
expect(action_button_for(:close)).to eq(
"<a id=\"action--close-case\" class=\"button\" data-method=\"get\" \
href=\"/cases/#{@case.id}/close\">Close case</a>"
)
end
end
context 'when event == :add_responses' do
context 'case does not require clearance' do
it 'generates HTML that links to the upload response page' do
@case = create(:accepted_case)
expect(@case).to receive(:requires_clearance?).and_return(false)
expect(action_button_for(:add_responses)).to eq(
"<a id=\"action--upload-response\" class=\"button\" href=\"/cases/#{@case.id}/new_response_upload?action=upload\">Upload response</a>"
)
end
end
context 'case requires clearance' do
it 'generates HTML that links to the upload response page' do
@case = create(:accepted_case)
expect(@case).to receive(:requires_clearance?).and_return(true)
expect(action_button_for(:add_responses)).to eq(
"<a id=\"action--upload-response\" class=\"button\" href=\"/cases/#{@case.id}/new_response_upload?action=upload-flagged\">Upload response</a>"
)
end
end
end
context 'when event = ":respond' do
it 'generates HTML that links to the upload response page' do
@case = create(:case_with_response)
expect(action_button_for(:respond)).to eq(
"<a id=\"action--mark-response-as-sent\" class=\"button\" \
href=\"/cases/#{@case.id}/respond\">Mark response as sent</a>"
)
end
end
end
end
|
LinguisticExplorer::Application.configure do
# Settings specified here will take precedence over those in config/application.rb
# The production environment is meant for finished, "live" apps.
# Code is not reloaded between requests
config.cache_classes = true
# Full error reports are disabled and caching is turned on
config.consider_all_requests_local = false
config.action_controller.perform_caching = true
# Specifies the header that your server uses for sending files
config.action_dispatch.x_sendfile_header = "X-Sendfile"
# For nginx:
# config.action_dispatch.x_sendfile_header = 'X-Accel-Redirect'
# If you have no front-end server that supports something like X-Sendfile,
# just comment this out and Rails will serve the files
# See everything in the log (default is :info)
# config.log_level = :debug
# Use a different logger for distributed setups
# config.logger = SyslogLogger.new
# Use a different cache store in production
# config.cache_store = :mem_cache_store
# Disable Rails's static asset server
# In production, Apache or nginx will already do this
config.serve_static_assets = false
# Enable serving of images, stylesheets, and javascripts from an asset server
# config.action_controller.asset_host = "http://assets.example.com"
# Disable delivery errors, bad email addresses will be ignored
# config.action_mailer.raise_delivery_errors = false
# needed for Devise
config.action_mailer.default_url_options = { :host => 'linguisticexplorer'}
# Enable threaded mode
# config.threadsafe!
# Enable locale fallbacks for I18n (makes lookups for any locale fall back to
# the I18n.default_locale when a translation can not be found)
config.i18n.fallbacks = true
# Send deprecation notices to registered listeners
config.active_support.deprecation = :notify
Settings.configure do |s|
end
end
New Fixed ActionMailer setting: changed host and disabled auto loading TLS
LinguisticExplorer::Application.configure do
# Settings specified here will take precedence over those in config/application.rb
# The production environment is meant for finished, "live" apps.
# Code is not reloaded between requests
config.cache_classes = true
# Full error reports are disabled and caching is turned on
config.consider_all_requests_local = false
config.action_controller.perform_caching = true
# Specifies the header that your server uses for sending files
config.action_dispatch.x_sendfile_header = "X-Sendfile"
# For nginx:
# config.action_dispatch.x_sendfile_header = 'X-Accel-Redirect'
# If you have no front-end server that supports something like X-Sendfile,
# just comment this out and Rails will serve the files
# See everything in the log (default is :info)
# config.log_level = :debug
# Use a different logger for distributed setups
# config.logger = SyslogLogger.new
# Use a different cache store in production
# config.cache_store = :mem_cache_store
# Disable Rails's static asset server
# In production, Apache or nginx will already do this
config.serve_static_assets = false
# Enable serving of images, stylesheets, and javascripts from an asset server
# config.action_controller.asset_host = "http://assets.example.com"
# Disable delivery errors, bad email addresses will be ignored
# config.action_mailer.raise_delivery_errors = false
# needed for Devise and Postfix
config.action_mailer.default_url_options = {
:host => 'linguisticexplorer.terraling.com',
:enable_starttls_auto => false
}
# Enable threaded mode
# config.threadsafe!
# Enable locale fallbacks for I18n (makes lookups for any locale fall back to
# the I18n.default_locale when a translation can not be found)
config.i18n.fallbacks = true
# Send deprecation notices to registered listeners
config.active_support.deprecation = :notify
Settings.configure do |s|
end
end
|
require 'rails_helper'
ENV['REPLY_HOSTNAME'] = 'replyhostname.com'
describe EmailHelper do
context 'reply_to_address' do
let(:discussion) { double(:discussion, key: 'd1') }
let(:user) { double(:user, id: '1', email_api_key: 'abc123') }
it "gives correct format" do
output = helper.reply_to_address(discussion: discussion,
user: user)
output.should == "d=d1&u=1&k=abc123@replyhostname.com"
end
end
end
fixerupper
require 'rails_helper'
ENV['REPLY_HOSTNAME'] = 'replyhostname.com'
describe EmailHelper do
context 'reply_to_address' do
let(:discussion) { double(:discussion, id: 'd1') }
let(:user) { double(:user, id: '1', email_api_key: 'abc123') }
it "gives correct format" do
output = helper.reply_to_address(discussion: discussion,
user: user)
output.should == "d=d1&u=1&k=abc123@replyhostname.com"
end
end
end
|
Rails.application.configure do
# Settings specified here will take precedence over those in config/application.rb.
# Code is not reloaded between requests.
config.cache_classes = true
# Eager load code on boot. This eager loads most of Rails and
# your application in memory, allowing both threaded web servers
# and those relying on copy on write to perform better.
# Rake tasks automatically ignore this option for performance.
config.eager_load = true
# Full error reports are disabled and caching is turned on.
config.consider_all_requests_local = false
config.action_controller.perform_caching = true
# Enable Rack::Cache to put a simple HTTP cache in front of your application
# Add `rack-cache` to your Gemfile before enabling this.
# For large-scale production use, consider using a caching reverse proxy like
# NGINX, varnish or squid.
# config.action_dispatch.rack_cache = true
# Disable serving static files from the `/public` folder by default since
# Apache or NGINX already handles this.
config.serve_static_files = ENV['RAILS_SERVE_STATIC_FILES'].present?
# Compress JavaScripts and CSS.
config.assets.js_compressor = :uglifier
# config.assets.css_compressor = :sass
# Do not fallback to assets pipeline if a precompiled asset is missed.
config.assets.compile = false
# Asset digests allow you to set far-future HTTP expiration dates on all assets,
# yet still be able to expire them through the digest params.
config.assets.digest = true
# `config.assets.precompile` and `config.assets.version` have moved to config/initializers/assets.rb
# Specifies the header that your server uses for sending files.
# config.action_dispatch.x_sendfile_header = 'X-Sendfile' # for Apache
# config.action_dispatch.x_sendfile_header = 'X-Accel-Redirect' # for NGINX
# Force all access to the app over SSL, use Strict-Transport-Security, and use secure cookies.
# config.force_ssl = true
# Use the lowest log level to ensure availability of diagnostic information
# when problems arise.
config.log_level = :debug
# Prepend all log lines with the following tags.
# config.log_tags = [ :subdomain, :uuid ]
# Use a different logger for distributed setups.
# config.logger = ActiveSupport::TaggedLogging.new(SyslogLogger.new)
# Use a different cache store in production.
# config.cache_store = :mem_cache_store
# Enable serving of images, stylesheets, and JavaScripts from an asset server.
# config.action_controller.asset_host = 'http://assets.example.com'
# Enable locale fallbacks for I18n (makes lookups for any locale fall back to
# the I18n.default_locale when a translation cannot be found).
config.i18n.fallbacks = true
# Send deprecation notices to registered listeners.
config.active_support.deprecation = :notify
# Use default logging formatter so that PID and timestamp are not suppressed.
config.log_formatter = ::Logger::Formatter.new
# Do not dump schema after migrations.
config.active_record.dump_schema_after_migration = false
# AWS config variables
config.paperclip_defaults = {
storage: :s3,
s3_credentials: {
bucket: ENV['S3_BUCKET'],
access_key_id: ENV['AWS_ACCESS_KEY_ID'],
secret_access_key: ENV['AWS_SECRET_ACCESS_KEY']
},
path: ":basename.:extension", # this may be incorrect
s3_host_name:'s3-us-west-1.amazonaws.com',
url: ':s3_path_url'
}
config.action_mailer.default_url_options = { :host => 'doc-generator.herokuapp.com' }
config.action_mailer.delivery_method = :smtp
config.action_mailer.perform_deliveries = true
config.action_mailer.raise_delivery_errors = true
config.action_mailer.default :charset => "utf-8"
config.action_mailer.smtp_settings = {
address: "stmp.mandrillapp.com",
port: 587,
domain: 'doc-generator.herokuapp.com', # is it heroku or herokuapp
authentication: 'login',
enable_starttls_auto: true,
user_name: ENV['MANDRILL_USERNAME'],
password: ENV['MANDRILL_PASSWORD']
end
Undo last edit
Rails.application.configure do
# Settings specified here will take precedence over those in config/application.rb.
# Code is not reloaded between requests.
config.cache_classes = true
# Eager load code on boot. This eager loads most of Rails and
# your application in memory, allowing both threaded web servers
# and those relying on copy on write to perform better.
# Rake tasks automatically ignore this option for performance.
config.eager_load = true
# Full error reports are disabled and caching is turned on.
config.consider_all_requests_local = false
config.action_controller.perform_caching = true
# Enable Rack::Cache to put a simple HTTP cache in front of your application
# Add `rack-cache` to your Gemfile before enabling this.
# For large-scale production use, consider using a caching reverse proxy like
# NGINX, varnish or squid.
# config.action_dispatch.rack_cache = true
# Disable serving static files from the `/public` folder by default since
# Apache or NGINX already handles this.
config.serve_static_files = ENV['RAILS_SERVE_STATIC_FILES'].present?
# Compress JavaScripts and CSS.
config.assets.js_compressor = :uglifier
# config.assets.css_compressor = :sass
# Do not fallback to assets pipeline if a precompiled asset is missed.
config.assets.compile = false
# Asset digests allow you to set far-future HTTP expiration dates on all assets,
# yet still be able to expire them through the digest params.
config.assets.digest = true
# `config.assets.precompile` and `config.assets.version` have moved to config/initializers/assets.rb
# Specifies the header that your server uses for sending files.
# config.action_dispatch.x_sendfile_header = 'X-Sendfile' # for Apache
# config.action_dispatch.x_sendfile_header = 'X-Accel-Redirect' # for NGINX
# Force all access to the app over SSL, use Strict-Transport-Security, and use secure cookies.
# config.force_ssl = true
# Use the lowest log level to ensure availability of diagnostic information
# when problems arise.
config.log_level = :debug
# Prepend all log lines with the following tags.
# config.log_tags = [ :subdomain, :uuid ]
# Use a different logger for distributed setups.
# config.logger = ActiveSupport::TaggedLogging.new(SyslogLogger.new)
# Use a different cache store in production.
# config.cache_store = :mem_cache_store
# Enable serving of images, stylesheets, and JavaScripts from an asset server.
# config.action_controller.asset_host = 'http://assets.example.com'
# Enable locale fallbacks for I18n (makes lookups for any locale fall back to
# the I18n.default_locale when a translation cannot be found).
config.i18n.fallbacks = true
# Send deprecation notices to registered listeners.
config.active_support.deprecation = :notify
# Use default logging formatter so that PID and timestamp are not suppressed.
config.log_formatter = ::Logger::Formatter.new
# Do not dump schema after migrations.
config.active_record.dump_schema_after_migration = false
# AWS config variables
config.paperclip_defaults = {
storage: :s3,
s3_credentials: {
bucket: ENV['S3_BUCKET'],
access_key_id: ENV['AWS_ACCESS_KEY_ID'],
secret_access_key: ENV['AWS_SECRET_ACCESS_KEY']
},
path: ":basename.:extension", # this may be incorrect
s3_host_name:'s3-us-west-1.amazonaws.com',
url: ':s3_path_url'
}
config.action_mailer.default_url_options = { :host => 'doc-generator.herokuapp.com' }
config.action_mailer.delivery_method = :smtp
config.action_mailer.perform_deliveries = true
config.action_mailer.raise_delivery_errors = true
config.action_mailer.default :charset => "utf-8"
config.action_mailer.smtp_settings = {
address: "stmp.mandrillapp.com",
port: 587,
domain: 'doc-generator.herokuapp.com', # is it heroku or herokuapp
authentication: 'login',
enable_starttls_auto: true,
user_name: ENV['MANDRILL_USERNAME'],
password: ENV['MANDRILL_PASSWORD']
}
end
|
require 'spec_helper'
# Specs in this file have access to a helper object that includes
# the PagesHelper. For example:
#
# describe PagesHelper do
# describe "string concat" do
# it "concats two strings with spaces" do
# helper.concat_strings("this","that").should == "this that"
# end
# end
# end
describe PagesHelper do
pending "add some examples to (or delete) #{__FILE__}"
end
Won't be needed.
|
Rails.application.configure do
# Settings specified here will take precedence over those in config/application.rb.
# Code is not reloaded between requests.
config.cache_classes = true
# Eager load code on boot. This eager loads most of Rails and
# your application in memory, allowing both threaded web servers
# and those relying on copy on write to perform better.
# Rake tasks automatically ignore this option for performance.
config.eager_load = true
# Full error reports are disabled and caching is turned on.
config.consider_all_requests_local = false
config.action_controller.perform_caching = true
# Enable Rack::Cache to put a simple HTTP cache in front of your application
# Add `rack-cache` to your Gemfile before enabling this.
# For large-scale production use, consider using a caching reverse proxy like
# NGINX, varnish or squid.
# config.action_dispatch.rack_cache = true
# Disable serving static files from the `/public` folder by default since
# Apache or NGINX already handles this.
config.serve_static_files = ENV['RAILS_SERVE_STATIC_FILES'].present?
# Compress JavaScripts and CSS.
config.assets.js_compressor = :uglifier
# config.assets.css_compressor = :sass
# Do not fallback to assets pipeline if a precompiled asset is missed.
config.assets.compile = false
# Asset digests allow you to set far-future HTTP expiration dates on all assets,
# yet still be able to expire them through the digest params.
config.assets.digest = true
# `config.assets.precompile` and `config.assets.version` have moved to config/initializers/assets.rb
# Specifies the header that your server uses for sending files.
# config.action_dispatch.x_sendfile_header = 'X-Sendfile' # for Apache
# config.action_dispatch.x_sendfile_header = 'X-Accel-Redirect' # for NGINX
# Force all access to the app over SSL, use Strict-Transport-Security, and use secure cookies.
# config.force_ssl = true
# Use the lowest log level to ensure availability of diagnostic information
# when problems arise.
config.log_level = :debug
# Prepend all log lines with the following tags.
# config.log_tags = [ :subdomain, :uuid ]
# Use a different logger for distributed setups.
# config.logger = ActiveSupport::TaggedLogging.new(SyslogLogger.new)
# Use a different cache store in production.
# config.cache_store = :mem_cache_store
# Enable serving of images, stylesheets, and JavaScripts from an asset server.
# config.action_controller.asset_host = 'http://assets.example.com'
# Ignore bad email addresses and do not raise email delivery errors.
# Set this to true and configure the email server for immediate delivery to raise delivery errors.
# config.action_mailer.raise_delivery_errors = false
# Enable locale fallbacks for I18n (makes lookups for any locale fall back to
# the I18n.default_locale when a translation cannot be found).
config.i18n.fallbacks = true
# Send deprecation notices to registered listeners.
config.active_support.deprecation = :notify
# Use default logging formatter so that PID and timestamp are not suppressed.
config.log_formatter = ::Logger::Formatter.new
# Do not dump schema after migrations.
config.active_record.dump_schema_after_migration = false
config.action_mailer.perform_deliveries = true # Set it to false to disable the email in dev mode
config.action_mailer.delivery_method = :smtp
config.action_mailer.default_url_options = { :host => 'http://tcc-rails.herokuapp.com' }
config.action_mailer.smtp_settings = {
:address => "smtp.gmail.com",
:port => 587,
:authentication => :plain,
:user_name => "footwear.clickjogos@gmail.com",
:password => "clickjogos"
}
end
deploy mailer
Rails.application.configure do
# Settings specified here will take precedence over those in config/application.rb.
# Code is not reloaded between requests.
config.cache_classes = true
# Eager load code on boot. This eager loads most of Rails and
# your application in memory, allowing both threaded web servers
# and those relying on copy on write to perform better.
# Rake tasks automatically ignore this option for performance.
config.eager_load = true
# Full error reports are disabled and caching is turned on.
config.consider_all_requests_local = false
config.action_controller.perform_caching = true
# Enable Rack::Cache to put a simple HTTP cache in front of your application
# Add `rack-cache` to your Gemfile before enabling this.
# For large-scale production use, consider using a caching reverse proxy like
# NGINX, varnish or squid.
# config.action_dispatch.rack_cache = true
# Disable serving static files from the `/public` folder by default since
# Apache or NGINX already handles this.
config.serve_static_files = ENV['RAILS_SERVE_STATIC_FILES'].present?
# Compress JavaScripts and CSS.
config.assets.js_compressor = :uglifier
# config.assets.css_compressor = :sass
# Do not fallback to assets pipeline if a precompiled asset is missed.
config.assets.compile = false
# Asset digests allow you to set far-future HTTP expiration dates on all assets,
# yet still be able to expire them through the digest params.
config.assets.digest = true
# `config.assets.precompile` and `config.assets.version` have moved to config/initializers/assets.rb
# Specifies the header that your server uses for sending files.
# config.action_dispatch.x_sendfile_header = 'X-Sendfile' # for Apache
# config.action_dispatch.x_sendfile_header = 'X-Accel-Redirect' # for NGINX
# Force all access to the app over SSL, use Strict-Transport-Security, and use secure cookies.
# config.force_ssl = true
# Use the lowest log level to ensure availability of diagnostic information
# when problems arise.
config.log_level = :debug
# Prepend all log lines with the following tags.
# config.log_tags = [ :subdomain, :uuid ]
# Use a different logger for distributed setups.
# config.logger = ActiveSupport::TaggedLogging.new(SyslogLogger.new)
# Use a different cache store in production.
# config.cache_store = :mem_cache_store
# Enable serving of images, stylesheets, and JavaScripts from an asset server.
# config.action_controller.asset_host = 'http://assets.example.com'
# Ignore bad email addresses and do not raise email delivery errors.
# Set this to true and configure the email server for immediate delivery to raise delivery errors.
# config.action_mailer.raise_delivery_errors = false
# Enable locale fallbacks for I18n (makes lookups for any locale fall back to
# the I18n.default_locale when a translation cannot be found).
config.i18n.fallbacks = true
# Send deprecation notices to registered listeners.
config.active_support.deprecation = :notify
# Use default logging formatter so that PID and timestamp are not suppressed.
config.log_formatter = ::Logger::Formatter.new
# Do not dump schema after migrations.
config.active_record.dump_schema_after_migration = false
config.action_mailer.default_url_options = { :host => 'tcc-rails.herokuapp.com' }
config.action_mailer.perform_deliveries = true
config.action_mailer.delivery_method = :smtp
config.action_mailer.smtp_settings = {
:address => "smtp.gmail.com",
:port => 587,
:user_name => 'footwear.clickjogos@gmail.com',
:password => 'clickjogos',
:authentication => 'plain',
:enable_starttls_auto => true
}
end |
require 'rails_helper'
describe UsersHelper do
describe '#humanize_document_type' do
it "should return a humanized document type" do
expect(humanize_document_type("1")).to eq "DNI"
expect(humanize_document_type("2")).to eq "Passport"
expect(humanize_document_type("3")).to eq "Residence card"
end
end
describe '#deleted_commentable_text' do
it "should return the appropriate message for deleted debates" do
debate = create(:debate)
comment = create(:comment, commentable: debate)
debate.hide
expect(comment_commentable_title(comment)).to eq '<del>' + comment.commentable.title + '</del> <span class="small">(This debate has been deleted)</span>'
end
it "should return the appropriate message for deleted proposals" do
proposal = create(:proposal)
comment = create(:comment, commentable: proposal)
proposal.hide
expect(comment_commentable_title(comment)).to eq '<del>' + comment.commentable.title + '</del> <span class="small">(This proposal has been deleted)</span>'
end
it "should return the appropriate message for deleted budget investment" do
investment = create(:budget_investment)
comment = create(:comment, commentable: investment)
investment.hide
expect(comment_commentable_title(comment)).to eq '<del>' + comment.commentable.title + '</del> <span class="small">(This investment project has been deleted)</span>'
end
end
describe '#comment_commentable_title' do
it "should return a link to the comment" do
comment = create(:comment)
expect(comment_commentable_title(comment)).to eq link_to comment.commentable.title, comment
end
it "should return a hint if the commentable has been deleted" do
comment = create(:comment)
comment.commentable.hide
expect(comment_commentable_title(comment)).to eq '<del>' + comment.commentable.title + '</del> <span class="small">(This debate has been deleted)</span>'
end
end
end
Fix line lenght above 140
require 'rails_helper'
describe UsersHelper do
describe '#humanize_document_type' do
it "should return a humanized document type" do
expect(humanize_document_type("1")).to eq "DNI"
expect(humanize_document_type("2")).to eq "Passport"
expect(humanize_document_type("3")).to eq "Residence card"
end
end
describe '#deleted_commentable_text' do
it "should return the appropriate message for deleted debates" do
debate = create(:debate)
comment = create(:comment, commentable: debate)
debate.hide
expect(comment_commentable_title(comment)).to eq('<del>' + comment.commentable.title +
'</del> <span class="small">(This debate has been deleted)</span>')
end
it "should return the appropriate message for deleted proposals" do
proposal = create(:proposal)
comment = create(:comment, commentable: proposal)
proposal.hide
expect(comment_commentable_title(comment)).to eq('<del>' + comment.commentable.title +
'</del> <span class="small">(This proposal has been deleted)</span>')
end
it "should return the appropriate message for deleted budget investment" do
investment = create(:budget_investment)
comment = create(:comment, commentable: investment)
investment.hide
expect(comment_commentable_title(comment)).to eq('<del>' + comment.commentable.title +
'</del> <span class="small">(This investment project has been deleted)</span>')
end
end
describe '#comment_commentable_title' do
it "should return a link to the comment" do
comment = create(:comment)
expect(comment_commentable_title(comment)).to eq link_to comment.commentable.title, comment
end
it "should return a hint if the commentable has been deleted" do
comment = create(:comment)
comment.commentable.hide
expect(comment_commentable_title(comment)).to eq('<del>' + comment.commentable.title +
'</del> <span class="small">(This debate has been deleted)</span>')
end
end
end
|
module Hakiri
VERSION = '0.6.0'
end
Version bump
module Hakiri
VERSION = '0.6.1'
end |
require 'spec_helper'
describe 'Command test' do
it 'works with as-links' do
output = make_bin('--as-links www.github.com,foofoofoo.biz')
expect(output).to match('1 failure')
end
it 'works with alt-ignore' do
ignorableLinks = "#{FIXTURES_DIR}/images/ignorableAltViaOptions.html"
output = make_bin('--alt-ignore /wikimedia/,gpl.png', ignorableLinks)
expect(output).to match('successfully')
end
it 'works with check-external-hash' do
brokenHashOnTheWeb = "#{FIXTURES_DIR}/links/brokenHashOnTheWeb.html"
output = make_bin('--check-external-hash', brokenHashOnTheWeb)
expect(output).to match('1 failure')
end
it 'works with directory-index-file' do
link_pointing_to_directory = "#{FIXTURES_DIR}/links/link_pointing_to_directory.html"
output = make_bin('--directory-index-file index.php', link_pointing_to_directory)
expect(output).to match('successfully')
end
it 'works with disable-external' do
external = "#{FIXTURES_DIR}/links/brokenLinkExternal.html"
output = make_bin('--disable-external', external)
expect(output).to match('successfully')
end
it 'works with ext' do
external = "#{FIXTURES_DIR}/links/file.foo"
output = make_bin('--ext .foo', external)
expect(output).to match('1 failure')
end
it 'works with file-ignore' do
external = "#{FIXTURES_DIR}/links/brokenHashInternal.html"
output = make_bin("--file-ignore #{external}", external)
expect(output).to match('successfully')
end
it 'works with href-ignore' do
ignorableLinks = "#{FIXTURES_DIR}/links/ignorableLinksViaOptions.html"
output = make_bin('--href-ignore /^http:\/\//,/sdadsad/,../whaadadt.html', ignorableLinks)
expect(output).to match('successfully')
end
it 'works with href-swap' do
translatedLink = "#{FIXTURES_DIR}/links/linkTranslatedViaHrefSwap.html"
output = make_bin('--href-swap \A/articles/([\w-]+):\1.html', translatedLink)
expect(output).to match('successfully')
end
it 'works with only-4xx' do
brokenHashOnTheWeb = "#{FIXTURES_DIR}/links/brokenHashOnTheWeb.html"
output = make_bin('--only-4xx', brokenHashOnTheWeb)
expect(output).to match('successfully')
end
it 'works with validate-favicon' do
broken = "#{FIXTURES_DIR}/favicon/favicon_broken.html"
output = make_bin('--validate-favicon', broken)
expect(output).to match('1 failure')
end
it 'works with validate-html' do
broken = "#{FIXTURES_DIR}/html/invalid_tag.html"
output = make_bin('--validate-html', broken)
expect(output).to match('1 failure')
end
end
Wrap this cmd in quotes
require 'spec_helper'
describe 'Command test' do
it 'works with as-links' do
output = make_bin('--as-links www.github.com,foofoofoo.biz')
expect(output).to match('1 failure')
end
it 'works with alt-ignore' do
ignorableLinks = "#{FIXTURES_DIR}/images/ignorableAltViaOptions.html"
output = make_bin('--alt-ignore /wikimedia/,gpl.png', ignorableLinks)
expect(output).to match('successfully')
end
it 'works with check-external-hash' do
brokenHashOnTheWeb = "#{FIXTURES_DIR}/links/brokenHashOnTheWeb.html"
output = make_bin('--check-external-hash', brokenHashOnTheWeb)
expect(output).to match('1 failure')
end
it 'works with directory-index-file' do
link_pointing_to_directory = "#{FIXTURES_DIR}/links/link_pointing_to_directory.html"
output = make_bin('--directory-index-file index.php', link_pointing_to_directory)
expect(output).to match('successfully')
end
it 'works with disable-external' do
external = "#{FIXTURES_DIR}/links/brokenLinkExternal.html"
output = make_bin('--disable-external', external)
expect(output).to match('successfully')
end
it 'works with ext' do
external = "#{FIXTURES_DIR}/links/file.foo"
output = make_bin('--ext .foo', external)
expect(output).to match('1 failure')
end
it 'works with file-ignore' do
external = "#{FIXTURES_DIR}/links/brokenHashInternal.html"
output = make_bin("--file-ignore #{external}", external)
expect(output).to match('successfully')
end
it 'works with href-ignore' do
ignorableLinks = "#{FIXTURES_DIR}/links/ignorableLinksViaOptions.html"
output = make_bin('--href-ignore /^http:\/\//,/sdadsad/,../whaadadt.html', ignorableLinks)
expect(output).to match('successfully')
end
it 'works with href-swap' do
translatedLink = "#{FIXTURES_DIR}/links/linkTranslatedViaHrefSwap.html"
output = make_bin('--href-swap "\A/articles/([\w-]+):\1.html"', translatedLink)
expect(output).to match('successfully')
end
it 'works with only-4xx' do
brokenHashOnTheWeb = "#{FIXTURES_DIR}/links/brokenHashOnTheWeb.html"
output = make_bin('--only-4xx', brokenHashOnTheWeb)
expect(output).to match('successfully')
end
it 'works with validate-favicon' do
broken = "#{FIXTURES_DIR}/favicon/favicon_broken.html"
output = make_bin('--validate-favicon', broken)
expect(output).to match('1 failure')
end
it 'works with validate-html' do
broken = "#{FIXTURES_DIR}/html/invalid_tag.html"
output = make_bin('--validate-html', broken)
expect(output).to match('1 failure')
end
end
|
# rubocop:disable Metrics/BlockLength
Rails.application.configure do
# Settings specified here will take precedence over those in config/application.rb.
# Code is not reloaded between requests.
config.cache_classes = true
# Eager load code on boot. This eager loads most of Rails and
# your application in memory, allowing both threaded web servers
# and those relying on copy on write to perform better.
# Rake tasks automatically ignore this option for performance.
config.eager_load = true
# Full error reports are disabled and caching is turned on.
config.consider_all_requests_local = false
config.action_controller.perform_caching = true
# Ensures that a master key has been made available in either ENV["RAILS_MASTER_KEY"]
# or in config/master.key. This key is used to decrypt credentials (and other encrypted files).
# config.require_master_key = true
# Disable serving static files from the `/public` folder by default since
# Apache or NGINX already handles this.
config.public_file_server.enabled = ENV['RAILS_SERVE_STATIC_FILES'].present?
# Compress JavaScripts and CSS.
config.assets.js_compressor = :uglifier
config.assets.css_compressor = :sass
# Fall back to assets pipeline if a precompiled asset is missed.
config.assets.compile = true
# `config.assets.precompile` and `config.assets.version` have moved to config/initializers/assets.rb
# Enable serving of images, stylesheets, and JavaScripts from an asset server.
# config.action_controller.asset_host = 'http://assets.example.com'
# Specifies the header that your server uses for sending files.
# config.action_dispatch.x_sendfile_header = 'X-Sendfile' # for Apache
# config.action_dispatch.x_sendfile_header = 'X-Accel-Redirect' # for NGINX
# Store uploaded files on the local file system (see config/storage.yml for options).
config.active_storage.service = :local
# Mount Action Cable outside main process or domain.
# config.action_cable.mount_path = nil
# config.action_cable.url = 'wss://example.com/cable'
# config.action_cable.allowed_request_origins = [ 'http://example.com', /http:\/\/example.*/ ]
# Force all access to the app over SSL, use Strict-Transport-Security, and use secure cookies.
config.force_ssl = true
# Use the lowest log level to ensure availability of diagnostic information
# when problems arise.
config.log_level = :debug
# Prepend all log lines with the following tags.
config.log_tags = [:request_id]
# Use a different cache store in production.
# config.cache_store = :mem_cache_store
# Use a real queuing backend for Active Job (and separate queues per environment).
# config.active_job.queue_adapter = :resque
# config.active_job.queue_name_prefix = "year_in_pictures_rails_helper_production"
config.action_mailer.perform_caching = false
config.action_mailer.default_url_options = { host: ENV['APPLICATION_URL'] }
ActionMailer::Base.smtp_settings = {
address: 'smtp.sendgrid.net',
port: 587,
user_name: ENV['SENDGRID_USERNAME'],
password: ENV['SENDGRID_PASSWORD'],
domain: 'heroku.com',
authentication: :plain,
enable_starttls_auto: true
}
# Ignore bad email addresses and do not raise email delivery errors.
# Set this to true and configure the email server for immediate delivery to raise delivery errors.
# config.action_mailer.raise_delivery_errors = false
# Enable locale fallbacks for I18n (makes lookups for any locale fall back to
# the I18n.default_locale when a translation cannot be found).
config.i18n.fallbacks = [I18n.default_locale]
# Send deprecation notices to registered listeners.
config.active_support.deprecation = :notify
# Use default logging formatter so that PID and timestamp are not suppressed.
config.log_formatter = ::Logger::Formatter.new
# Use a different logger for distributed setups.
# require 'syslog/logger'
# config.logger = ActiveSupport::TaggedLogging.new(Syslog::Logger.new 'app-name')
if ENV['RAILS_LOG_TO_STDOUT'].present?
logger = ActiveSupport::Logger.new($stdout)
logger.formatter = config.log_formatter
config.logger = ActiveSupport::TaggedLogging.new(logger)
end
# Do not dump schema after migrations.
config.active_record.dump_schema_after_migration = false
# Inserts middleware to perform automatic connection switching.
# The `database_selector` hash is used to pass options to the DatabaseSelector
# middleware. The `delay` is used to determine how long to wait after a write
# to send a subsequent read to the primary.
#
# The `database_resolver` class is used by the middleware to determine which
# database is appropriate to use based on the time delay.
#
# The `database_resolver_context` class is used by the middleware to set
# timestamps for the last write to the primary. The resolver uses the context
# class timestamps to determine how long to wait before reading from the
# replica.
#
# By default Rails will store a last write timestamp in the session. The
# DatabaseSelector middleware is designed as such you can define your own
# strategy for connection switching and pass that into the middleware through
# these configuration options.
# config.active_record.database_selector = { delay: 2.seconds }
# config.active_record.database_resolver = ActiveRecord::Middleware::DatabaseSelector::Resolver
# config.active_record.database_resolver_context = ActiveRecord::Middleware::DatabaseSelector::Resolver::Session
# Enable generation of id-based attributes such as label for
config.action_view.form_with_generates_ids = true
end
# rubocop:enable Metrics/BlockLength
Update config for SendGrid in production
# rubocop:disable Metrics/BlockLength
Rails.application.configure do
# Settings specified here will take precedence over those in config/application.rb.
# Code is not reloaded between requests.
config.cache_classes = true
# Eager load code on boot. This eager loads most of Rails and
# your application in memory, allowing both threaded web servers
# and those relying on copy on write to perform better.
# Rake tasks automatically ignore this option for performance.
config.eager_load = true
# Full error reports are disabled and caching is turned on.
config.consider_all_requests_local = false
config.action_controller.perform_caching = true
# Ensures that a master key has been made available in either ENV["RAILS_MASTER_KEY"]
# or in config/master.key. This key is used to decrypt credentials (and other encrypted files).
# config.require_master_key = true
# Disable serving static files from the `/public` folder by default since
# Apache or NGINX already handles this.
config.public_file_server.enabled = ENV['RAILS_SERVE_STATIC_FILES'].present?
# Compress JavaScripts and CSS.
config.assets.js_compressor = :uglifier
config.assets.css_compressor = :sass
# Fall back to assets pipeline if a precompiled asset is missed.
config.assets.compile = true
# `config.assets.precompile` and `config.assets.version` have moved to config/initializers/assets.rb
# Enable serving of images, stylesheets, and JavaScripts from an asset server.
# config.action_controller.asset_host = 'http://assets.example.com'
# Specifies the header that your server uses for sending files.
# config.action_dispatch.x_sendfile_header = 'X-Sendfile' # for Apache
# config.action_dispatch.x_sendfile_header = 'X-Accel-Redirect' # for NGINX
# Store uploaded files on the local file system (see config/storage.yml for options).
config.active_storage.service = :local
# Mount Action Cable outside main process or domain.
# config.action_cable.mount_path = nil
# config.action_cable.url = 'wss://example.com/cable'
# config.action_cable.allowed_request_origins = [ 'http://example.com', /http:\/\/example.*/ ]
# Force all access to the app over SSL, use Strict-Transport-Security, and use secure cookies.
config.force_ssl = true
# Use the lowest log level to ensure availability of diagnostic information
# when problems arise.
config.log_level = :debug
# Prepend all log lines with the following tags.
config.log_tags = [:request_id]
# Use a different cache store in production.
# config.cache_store = :mem_cache_store
# Use a real queuing backend for Active Job (and separate queues per environment).
# config.active_job.queue_adapter = :resque
# config.active_job.queue_name_prefix = "year_in_pictures_rails_helper_production"
config.action_mailer.perform_caching = false
config.action_mailer.default_url_options = { host: ENV['APPLICATION_URL'] }
config.action_mailer.delivery_method = :smtp
config.action_mailer.perform_deliveries = true
config.action_mailer.smtp_settings = {
address: 'smtp.sendgrid.net',
port: 587,
user_name: ENV['SENDGRID_USERNAME'],
password: ENV['SENDGRID_PASSWORD'],
domain: 'heroku.com',
authentication: :plain,
enable_starttls_auto: true
}
# Ignore bad email addresses and do not raise email delivery errors.
# Set this to true and configure the email server for immediate delivery to raise delivery errors.
# config.action_mailer.raise_delivery_errors = false
# Enable locale fallbacks for I18n (makes lookups for any locale fall back to
# the I18n.default_locale when a translation cannot be found).
config.i18n.fallbacks = [I18n.default_locale]
# Send deprecation notices to registered listeners.
config.active_support.deprecation = :notify
# Use default logging formatter so that PID and timestamp are not suppressed.
config.log_formatter = ::Logger::Formatter.new
# Use a different logger for distributed setups.
# require 'syslog/logger'
# config.logger = ActiveSupport::TaggedLogging.new(Syslog::Logger.new 'app-name')
if ENV['RAILS_LOG_TO_STDOUT'].present?
logger = ActiveSupport::Logger.new($stdout)
logger.formatter = config.log_formatter
config.logger = ActiveSupport::TaggedLogging.new(logger)
end
# Do not dump schema after migrations.
config.active_record.dump_schema_after_migration = false
# Inserts middleware to perform automatic connection switching.
# The `database_selector` hash is used to pass options to the DatabaseSelector
# middleware. The `delay` is used to determine how long to wait after a write
# to send a subsequent read to the primary.
#
# The `database_resolver` class is used by the middleware to determine which
# database is appropriate to use based on the time delay.
#
# The `database_resolver_context` class is used by the middleware to set
# timestamps for the last write to the primary. The resolver uses the context
# class timestamps to determine how long to wait before reading from the
# replica.
#
# By default Rails will store a last write timestamp in the session. The
# DatabaseSelector middleware is designed as such you can define your own
# strategy for connection switching and pass that into the middleware through
# these configuration options.
# config.active_record.database_selector = { delay: 2.seconds }
# config.active_record.database_resolver = ActiveRecord::Middleware::DatabaseSelector::Resolver
# config.active_record.database_resolver_context = ActiveRecord::Middleware::DatabaseSelector::Resolver::Session
# Enable generation of id-based attributes such as label for
config.action_view.form_with_generates_ids = true
end
# rubocop:enable Metrics/BlockLength
|
# frozen_string_literal: true
require 'spec_helper'
describe ImageScraper::Client, :vcr do
let(:repo_url) { "https://raw.github.com/charlotte-ruby/image_scraper" }
describe "foo" do
it "something" do
url = "http://www.amazon.com/Planet-Two-Disc-Digital-Combo-Blu-ray/dp/B004LWZW4W/ref=sr_1_1?s=movies-tv&ie=UTF8&qid=1324771542&sr=1-1"
client = described_class.new(url)
expect(client.page_images).not_to be_empty
end
end
describe "#initialize" do
it 'works with invalid URLs' do
allow_any_instance_of(described_class).to receive(:fetch).and_return(nil)
scraper = described_class.new('bogusurl4444.com')
expect(scraper.doc).to be(nil)
end
it 'has empty data if URL is invalid' do
allow_any_instance_of(described_class).to receive(:fetch).and_return(nil)
scraper = described_class.new('bogusurl4444.com')
expect(scraper.image_urls).to be_empty
expect(scraper.stylesheets).to be_empty
expect(scraper.stylesheet_images).to be_empty
expect(scraper.page_images).to be_empty
end
end
describe '#image_urls' do
it 'scrapes absolute paths' do
images = [
'http://en.wikipedia.org/static/images/poweredby_mediawiki_88x31.png',
'http://en.wikipedia.org/static/images/wikimedia-button.png',
'http://en.wikipedia.org/wiki/Special:CentralAutoLogin/start?type=1x1',
'http://upload.wikimedia.org/wikipedia/commons/thumb/b/b6/SIPI_Jelly_Beans_4.1.07.tiff/lossy-page1-220px-SIPI_Jelly_Beans_4.1.07.tiff.jpg',
'http://upload.wikimedia.org/wikipedia/en/thumb/5/5c/Symbol_template_class.svg/16px-Symbol_template_class.svg.png'
]
url = 'http://en.wikipedia.org/wiki/Standard_test_image'
client = described_class.new(url, include_css_images: false)
expect(client.image_urls).to eq(images)
end
it 'scrapes with whitespace stripped' do
file = 'spec/support/extra_whitespace.html'
client = described_class.new('')
client.doc = File.open(file) { |f| Nokogiri::HTML(f) }
images = [
'http://g-ecx.images-amazon.com/images/G/01/SIMON/IsaacsonWalter._V164348457_.jpg',
'http://g-ecx.images-amazon.com/images/G/01/SIMON/IsaacsonWalter.jpg'
]
expect(client.image_urls).to eq(images)
end
it 'scrapes relative paths' do
scraper = described_class.new('http://en.wikipedia.org/wiki/Standard_test_image',
convert_to_absolute_url: false,
include_css_images: false)
images = [
'http://en.wikipedia.org/static/images/poweredby_mediawiki_88x31.png',
'http://en.wikipedia.org/static/images/wikimedia-button.png',
'http://en.wikipedia.org/wiki/Special:CentralAutoLogin/start?type=1x1',
'http://upload.wikimedia.org/wikipedia/commons/thumb/b/b6/SIPI_Jelly_Beans_4.1.07.tiff/lossy-page1-220px-SIPI_Jelly_Beans_4.1.07.tiff.jpg',
'http://upload.wikimedia.org/wikipedia/en/thumb/5/5c/Symbol_template_class.svg/16px-Symbol_template_class.svg.png'
]
expect(scraper.image_urls).to eq(images)
end
it 'handles url with unescaped spaces' do
url = 'https://raw.github.com/syoder/image_scraper/stylesheet_fix/test/resources/space in url.html'
scraper = described_class.new(url, include_css_images: false)
expected_url = 'https://raw.github.com/syoder/image_scraper/stylesheet_fix/test/resources/image1.png'
expect(scraper.image_urls.length).to eq(1)
expect(scraper.image_urls.first).to eq(expected_url)
end
end
describe '#stylesheets' do
it 'lists relative path stylesheets' do
file = 'spec/support/stylesheet_test.html'
client = described_class.new('http://test.com')
client.doc = File.open(file) { |f| Nokogiri::HTML(f) }
stylesheets = [
'http://test.com/css/master.css',
'http://test.com/css/master2.css'
]
expect(client.stylesheets).to eq(stylesheets)
end
it 'handles stylesheet with an unescaped url' do
scraper = described_class.new('')
scraper.url = 'http://test.com'
scraper.doc = Nokogiri::HTML("<link rel='stylesheet' href='http://test.com/unescaped path.css'>")
expect(scraper.stylesheets).to include('http://test.com/unescapedpath.css')
end
end
describe '#page_images' do
it 'handles unescaped urls' do
scraper = described_class.new('http://test.com')
scraper.doc = Nokogiri::HTML("<img src='http://test.com/unescaped path'>")
expect(scraper.page_images.length).to eq(1)
expect(scraper.page_images).to include('http://test.com/unescaped%20path')
end
it 'handldes image urls that include square brackets' do
scraper = described_class.new('http://google.com')
scraper.doc = Nokogiri::HTML("<img src='image[1].jpg' >")
expect(scraper.page_images).to be_empty
end
end
describe '#stylesheet_images' do
it 'scrapes stylesheet images' do
url = "#{repo_url}/master/spec/support/stylesheet_unescaped_image.html"
stylesheet_path = "#{repo_url}/master/someimage.png"
# /charlotte-ruby/image_scraper/master/spec/support/unescaped_image.css
scraper = described_class.new(url, include_css_images: true)
expect(scraper.stylesheet_images).to include(stylesheet_path)
end
it 'handles 404s' do
scraper = described_class.new('')
scraper.url = 'http://google.com'
scraper.doc = Nokogiri::HTML("<link rel='stylesheet' href='http://google.com/does_not_exist.css'>")
expect(scraper.stylesheet_images).to be_empty
end
it 'handles stylesheet image with a relative url' do
url = "#{repo_url}/master/spec/support/relative_image_url.html"
image_url = "#{repo_url}/master/spec/images/some_image.png"
scraper = described_class.new(url, include_css_images: true)
expect(scraper.stylesheet_images).to include(image_url)
end
end
end
rubocop
# frozen_string_literal: true
require 'spec_helper'
describe ImageScraper::Client, :vcr do
let(:repo_url) { "https://raw.github.com/charlotte-ruby/image_scraper" }
describe "foo" do
it "something" do
url = "http://www.amazon.com/Planet-Two-Disc-Digital-Combo-Blu-ray/dp/B004LWZW4W/ref=sr_1_1?s=movies-tv&ie=UTF8&qid=1324771542&sr=1-1"
client = described_class.new(url)
expect(client.page_images).not_to be_empty
end
end
describe "#initialize" do
it 'works with invalid URLs' do
allow_any_instance_of(described_class).to receive(:fetch).and_return(nil)
scraper = described_class.new('bogusurl4444.com')
expect(scraper.doc).to be_nil
end
it 'has empty data if URL is invalid' do
allow_any_instance_of(described_class).to receive(:fetch).and_return(nil)
scraper = described_class.new('bogusurl4444.com')
expect(scraper.image_urls).to be_empty
expect(scraper.stylesheets).to be_empty
expect(scraper.stylesheet_images).to be_empty
expect(scraper.page_images).to be_empty
end
end
describe '#image_urls' do
it 'scrapes absolute paths' do
images = [
'http://en.wikipedia.org/static/images/poweredby_mediawiki_88x31.png',
'http://en.wikipedia.org/static/images/wikimedia-button.png',
'http://en.wikipedia.org/wiki/Special:CentralAutoLogin/start?type=1x1',
'http://upload.wikimedia.org/wikipedia/commons/thumb/b/b6/SIPI_Jelly_Beans_4.1.07.tiff/lossy-page1-220px-SIPI_Jelly_Beans_4.1.07.tiff.jpg',
'http://upload.wikimedia.org/wikipedia/en/thumb/5/5c/Symbol_template_class.svg/16px-Symbol_template_class.svg.png'
]
url = 'http://en.wikipedia.org/wiki/Standard_test_image'
client = described_class.new(url, include_css_images: false)
expect(client.image_urls).to eq(images)
end
it 'scrapes with whitespace stripped' do
file = 'spec/support/extra_whitespace.html'
client = described_class.new('')
client.doc = File.open(file) { |f| Nokogiri::HTML(f) }
images = [
'http://g-ecx.images-amazon.com/images/G/01/SIMON/IsaacsonWalter._V164348457_.jpg',
'http://g-ecx.images-amazon.com/images/G/01/SIMON/IsaacsonWalter.jpg'
]
expect(client.image_urls).to eq(images)
end
it 'scrapes relative paths' do
scraper = described_class.new('http://en.wikipedia.org/wiki/Standard_test_image',
convert_to_absolute_url: false,
include_css_images: false)
images = [
'http://en.wikipedia.org/static/images/poweredby_mediawiki_88x31.png',
'http://en.wikipedia.org/static/images/wikimedia-button.png',
'http://en.wikipedia.org/wiki/Special:CentralAutoLogin/start?type=1x1',
'http://upload.wikimedia.org/wikipedia/commons/thumb/b/b6/SIPI_Jelly_Beans_4.1.07.tiff/lossy-page1-220px-SIPI_Jelly_Beans_4.1.07.tiff.jpg',
'http://upload.wikimedia.org/wikipedia/en/thumb/5/5c/Symbol_template_class.svg/16px-Symbol_template_class.svg.png'
]
expect(scraper.image_urls).to eq(images)
end
it 'handles url with unescaped spaces' do
url = 'https://raw.github.com/syoder/image_scraper/stylesheet_fix/test/resources/space in url.html'
scraper = described_class.new(url, include_css_images: false)
expected_url = 'https://raw.github.com/syoder/image_scraper/stylesheet_fix/test/resources/image1.png'
expect(scraper.image_urls.length).to eq(1)
expect(scraper.image_urls.first).to eq(expected_url)
end
end
describe '#stylesheets' do
it 'lists relative path stylesheets' do
file = 'spec/support/stylesheet_test.html'
client = described_class.new('http://test.com')
client.doc = File.open(file) { |f| Nokogiri::HTML(f) }
stylesheets = [
'http://test.com/css/master.css',
'http://test.com/css/master2.css'
]
expect(client.stylesheets).to eq(stylesheets)
end
it 'handles stylesheet with an unescaped url' do
scraper = described_class.new('')
scraper.url = 'http://test.com'
scraper.doc = Nokogiri::HTML("<link rel='stylesheet' href='http://test.com/unescaped path.css'>")
expect(scraper.stylesheets).to include('http://test.com/unescapedpath.css')
end
end
describe '#page_images' do
it 'handles unescaped urls' do
scraper = described_class.new('http://test.com')
scraper.doc = Nokogiri::HTML("<img src='http://test.com/unescaped path'>")
expect(scraper.page_images.length).to eq(1)
expect(scraper.page_images).to include('http://test.com/unescaped%20path')
end
it 'handldes image urls that include square brackets' do
scraper = described_class.new('http://google.com')
scraper.doc = Nokogiri::HTML("<img src='image[1].jpg' >")
expect(scraper.page_images).to be_empty
end
end
describe '#stylesheet_images' do
it 'scrapes stylesheet images' do
url = "#{repo_url}/master/spec/support/stylesheet_unescaped_image.html"
stylesheet_path = "#{repo_url}/master/someimage.png"
# /charlotte-ruby/image_scraper/master/spec/support/unescaped_image.css
scraper = described_class.new(url, include_css_images: true)
expect(scraper.stylesheet_images).to include(stylesheet_path)
end
it 'handles 404s' do
scraper = described_class.new('')
scraper.url = 'http://google.com'
scraper.doc = Nokogiri::HTML("<link rel='stylesheet' href='http://google.com/does_not_exist.css'>")
expect(scraper.stylesheet_images).to be_empty
end
it 'handles stylesheet image with a relative url' do
url = "#{repo_url}/master/spec/support/relative_image_url.html"
image_url = "#{repo_url}/master/spec/images/some_image.png"
scraper = described_class.new(url, include_css_images: true)
expect(scraper.stylesheet_images).to include(image_url)
end
end
end
|
require "forwardable"
require "hamster/immutable"
require "hamster/enumerable"
require "hamster/hash"
module Hamster
def self.vector(*items)
items.empty? ? EmptyVector : Vector.new(items.freeze)
end
# A `Vector` is an ordered, integer-indexed collection of objects. Like `Array`,
# `Vector` indexing starts at 0. Also like `Array`, negative indexes count back
# from the end of the `Vector`.
#
# `Vector`'s interface is modeled after that of `Array`, minus all the methods
# which do destructive updates. Some methods which modify `Array`s destructively
# (like {#insert} or {#delete_at}) are included, but they return new `Vectors`
# and leave the existing one unchanged.
#
# = Creating New Vectors
#
# Hamster.vector('a', 'b', 'c')
# Hamster::Vector.new([:first, :second, :third])
# Hamster::Vector[1, 2, 3, 4, 5]
#
# = Retrieving Items from Vectors
#
# require 'hamster/vector'
# vector = Hamster.vector(1, 2, 3, 4, 5)
# vector[0] # => 1
# vector[-1] # => 5
# vector[0,3] # => Hamster::Vector[1, 2, 3]
# vector[1..-1] # => Hamster::Vector[2, 3, 4, 5]
# vector.first # => 1
# vector.last # => 5
#
# = Creating Modified Vectors
#
# vector.add(6) # => Hamster::Vector[1, 2, 3, 4, 5, 6]
# vector.insert(1, :a, :b) # => Hamster::Vector[1, :a, :b, 2, 3, 4, 5]
# vector.delete_at(2) # => Hamster::Vector[1, 2, 4, 5]
# vector + [6, 7] # => Hamster::Vector[1, 2, 3, 4, 5, 6, 7]
#
# Other `Array`-like methods like {#select}, {#map}, {#shuffle}, {#uniq}, {#reverse},
# {#rotate}, {#flatten}, {#sort}, {#sort_by}, {#take}, {#drop}, {#take_while},
# {#drop_while}, {#fill}, {#product}, and {#transpose} are also supported.
#
class Vector
extend Forwardable
include Immutable
include Enumerable
# @private
BLOCK_SIZE = 32
# @private
INDEX_MASK = BLOCK_SIZE - 1
# @private
BITS_PER_LEVEL = 5
# Return the number of items in this `Vector`
# @return [Integer]
attr_reader :size
def_delegator :self, :size, :length
class << self
# Create a new `Vector` populated with the given items.
# @return [Vector]
def [](*items)
new(items.freeze)
end
# Return an empty `Vector`. If used on a subclass, returns an empty instance
# of that class.
#
# @return [Vector]
def empty
@empty ||= self.new
end
# "Raw" allocation of a new `Vector`. Used internally to create a new
# instance quickly after building a modified trie.
#
# @return [Vector]
# @private
def alloc(root, size, levels)
obj = allocate
obj.instance_variable_set(:@root, root)
obj.instance_variable_set(:@size, size)
obj.instance_variable_set(:@levels, levels)
obj
end
end
def initialize(items=[].freeze)
items = items.to_a
if items.size <= 32
items = items.dup.freeze if !items.frozen?
@root, @size, @levels = items, items.size, 0
else
root, size, levels = items, items.size, 0
while root.size > 32
root = root.each_slice(32).to_a
levels += 1
end
@root, @size, @levels = root.freeze, size, levels
end
end
# Return `true` if this `Vector` contains no items.
#
# @return [Boolean]
def empty?
@size == 0
end
def_delegator :self, :empty?, :null?
# Return the first item in the `Vector`. If the vector is empty, return `nil`.
#
# @return [Object]
def first
get(0)
end
def_delegator :self, :first, :head
# Return the last item in the `Vector`. If the vector is empty, return `nil`.
#
# @return [Object]
def last
get(-1)
end
# Return a new `Vector` with `item` added after the last occupied position.
#
# @param item [Object] The object to insert at the end of the vector
# @return [Vector]
def add(item)
update_root(@size, item)
end
def_delegator :self, :add, :<<
def_delegator :self, :add, :conj
def_delegator :self, :add, :conjoin
def_delegator :self, :add, :push
# Return a new `Vector` with the item at `index` replaced by `item`. If the
# `item` argument is missing, but an optional code block is provided, it will
# be passed the existing item and what the block returns will replace it.
#
# @param index [Integer] The index to update
# @param item [Object] The object to insert into that position
# @return [Vector]
def set(index, item = yield(get(index)))
raise IndexError if @size == 0
index += @size if index < 0
raise IndexError if index > @size || index < 0
update_root(index, item)
end
# Return a new `Vector` with a deeply nested value modified to the result
# of the given code block. When travesing the nested `Vector`s and
# `Hash`s, non-existing keys are created with value of empty `Hash`s.
#
# The code block receives the existing value of the deeply nested key (or
# `nil` if it doesn't exist). This is useful for "transforming" the value
# associated with a certain key.
#
# Note that the original `Vector` and sub-`Vector`s and sub-`Hash`s are
# left unmodified; new data structure copies are created along the path
# wherever needed.
#
# @example
# v = Hamster::Vector[123, 456, 789, Hamster::Hash["a" => Hamster::Vector[5, 6, 7]]]
# v.update_in(3, "a", 1) { |value| value + 9 }
# # => Hamster::Vector[123, 456, 789, Hamster::Hash["a" => Hamster::Vector[5, 15, 7]]]
#
# @param key_path [Object(s)] List of keys which form the path to the key to be modified
# @yield [value] The previously stored value
# @yieldreturn [Object] The new value to store
# @return [Hash]
def update_in(*key_path, &block)
if key_path.empty?
raise ArgumentError, "must have at least one key in path"
end
key = key_path[0]
if key_path.size == 1
new_value = block.call(get(key))
else
value = fetch(key, EmptyHash)
new_value = value.update_in(*key_path[1..-1], &block)
end
set(key, new_value)
end
# Retrieve the item at `index`. If there is none (either the provided index
# is too high or too low), return `nil`.
#
# @param index [Integer] The index to retrieve
# @return [Object]
def get(index)
return nil if @size == 0
index += @size if index < 0
return nil if index >= @size || index < 0
leaf_node_for(@root, @levels * BITS_PER_LEVEL, index)[index & INDEX_MASK]
end
def_delegator :self, :get, :at
# Retrieve the value at `index`, or use the provided default value or block,
# or otherwise raise an `IndexError`.
#
# @overload fetch(index)
# Retrieve the value at the given index, or raise an `IndexError` if it is
# not found.
# @param index [Integer] The index to look up
# @overload fetch(index) { |index| ... }
# Retrieve the value at the given index, or call the optional
# code block (with the non-existent index) and get its return value.
# @yield [index] The index which does not exist
# @yieldreturn [Object] Object to return instead
# @param index [Integer] The index to look up
# @overload fetch(index, default)
# Retrieve the value at the given index, or else return the provided
# `default` value.
# @param index [Integer] The index to look up
# @param default [Object] Object to return if the key is not found
#
# @return [Object]
def fetch(index, default = (missing_default = true))
if index >= -@size && index < @size
get(index)
elsif block_given?
yield(index)
elsif !missing_default
default
else
raise IndexError, "index #{index} outside of vector bounds"
end
end
# Element reference. Return the item at a specific index, or a specified,
# contiguous range of items (as a new `Vector`).
#
# @overload vector[index]
# Return the item at `index`.
# @param index [Integer] The index to retrieve.
# @overload vector[start, length]
# Return a subvector starting at index `start` and continuing for `length` elements.
# @param start [Integer] The index to start retrieving items from.
# @param length [Integer] The number of items to retrieve.
# @overload vector[range]
# Return a subvector specified by the given `range` of indices.
# @param range [Range] The range of indices to retrieve.
#
# @return [Object]
def [](arg, length = (missing_length = true))
if missing_length
if arg.is_a?(Range)
from, to = arg.begin, arg.end
from += @size if from < 0
to += @size if to < 0
to += 1 if !arg.exclude_end?
length = to - from
length = 0 if length < 0
subsequence(from, length)
else
get(arg)
end
else
arg += @size if arg < 0
subsequence(arg, length)
end
end
def_delegator :self, :[], :slice
# Return a new `Vector` with the given values inserted before the element at `index`.
#
# @param index [Integer] The index where the new items should go
# @param items [Array] The items to add
# @return [Vector]
def insert(index, *items)
raise IndexError if index < -@size
index += @size if index < 0
if index < @size
suffix = flatten_suffix(@root, @levels * BITS_PER_LEVEL, index, [])
suffix.unshift(*items)
elsif index == @size
suffix = items
else
suffix = Array.new(index - @size, nil).concat(items)
index = @size
end
replace_suffix(index, suffix)
end
# Return a new `Vector` with the element at `index` removed. If the given `index`
# does not exist, return `self`.
#
# @param index [Integer] The index to remove
# @return [Vector]
def delete_at(index)
return self if index >= @size || index < -@size
index += @size if index < 0
suffix = flatten_suffix(@root, @levels * BITS_PER_LEVEL, index, [])
replace_suffix(index, suffix.tap { |a| a.shift })
end
# Return a new `Vector` with the last element removed. If empty, just return `self`.
# @return [Vector]
def pop
return self if @size == 0
replace_suffix(@size-1, [])
end
# Return a new `Vector` with `obj` inserted before the first element, moving
# the other elements upwards.
# @param obj [Object] The value to prepend
# @return [Vector]
def unshift(obj)
insert(0, obj)
end
# Return a new `Vector` with the first element removed. If empty, just return `self`.
# @return [Vector]
def shift
delete_at(0)
end
# Call the given block once for each item in the vector, passing each
# item from first to last successively to the block.
#
# @return [self]
def each(&block)
return to_enum unless block_given?
traverse_depth_first(@root, @levels, &block)
self
end
# Call the given block once for each item in the vector, passing each
# item starting from the last, and counting back to the first, successively to
# the block.
#
# @return [self]
def reverse_each(&block)
return enum_for(:reverse_each) unless block_given?
reverse_traverse_depth_first(@root, @levels, &block)
self
end
# Return a new `Vector` containing all elements for which the given block returns
# true.
#
# @return [Vector]
def filter
return enum_for(:filter) unless block_given?
reduce(self.class.empty) { |vector, item| yield(item) ? vector.add(item) : vector }
end
# Return a new `Vector` with all items which are equal to `obj` removed.
# `#==` is used for checking equality.
#
# @param obj [Object] The object to remove (every occurrence)
# @return [Vector]
def delete(obj)
filter { |item| item != obj }
end
# Invoke the given block once for each item in the vector, and return a new
# `Vector` containing the values returned by the block.
#
# @return [Vector]
def map
return enum_for(:map) if not block_given?
return self if empty?
self.class.new(super)
end
def_delegator :self, :map, :collect
# Return a new `Vector` with the same elements as this one, but randomly permuted.
#
# @return [Vector]
def shuffle
self.class.new(((array = to_a).frozen? ? array.shuffle : array.shuffle!).freeze)
end
# Return a new `Vector` with no duplicate elements, as determined by `#hash` and
# `#eql?`. For each group of equivalent elements, only the first will be retained.
#
# @return [Vector]
def uniq
self.class.new(((array = to_a).frozen? ? array.uniq : array.uniq!).freeze)
end
# Return a new `Vector` with the same elements as this one, but in reverse order.
#
# @return [Vector]
def reverse
self.class.new(((array = to_a).frozen? ? array.reverse : array.reverse!).freeze)
end
# Return a new `Vector` with the same elements, but rotated so that the one at
# index `count` is the first element of the new vector. If `count` is positive,
# the elements will be shifted left, and those shifted past the lowest position
# will be moved to the end. If `count` is negative, the elements will be shifted
# right, and those shifted past the last position will be moved to the beginning.
#
# @param count [Integer] The number of positions to shift items by
# @return [Vector]
def rotate(count = 1)
return self if (count % @size) == 0
self.class.new(((array = to_a).frozen? ? array.rotate(count) : array.rotate!(count)).freeze)
end
# Return a new `Vector` with all nested vectors and arrays recursively "flattened
# out", that is, their elements inserted into the new `Vector` in the place where
# the nested array/vector originally was. If an optional `level` argument is
# provided, the flattening will only be done recursively that number of times.
# A `level` of 0 means not to flatten at all, 1 means to only flatten nested
# arrays/vectors which are directly contained within this `Vector`.
#
# @param level [Integer] The depth to which flattening should be applied
# @return [Vector]
def flatten(level = nil)
return self if level == 0
self.class.new(((array = to_a).frozen? ? array.flatten(level) : array.flatten!(level)).freeze)
end
# Return a new `Vector` built by concatenating this one with `other`. `other`
# can be any object which is convertible to an `Array` using `#to_a`.
#
# @param other [Enumerable] The collection to concatenate onto this vector
# @return [Vector]
def +(other)
other = other.to_a
other = other.dup if other.frozen?
replace_suffix(@size, other)
end
def_delegator :self, :+, :concat
# `others` should be arrays and/or vectors. The corresponding elements from this
# `Vector` and each of `others` (that is, the elements with the same indices)
# will be gathered into arrays.
#
# If an optional block is provided, each such array will be passed successively
# to the block. Otherwise, a new `Vector` of all those arrays will be returned.
#
# @param others [Array] The arrays/vectors to zip together with this one
# @return [Vector, nil]
def zip(*others)
if block_given?
super
else
self.class.new(super)
end
end
# Return a new `Vector` with the same items, but sorted. The sort order will
# be determined by comparing items using `#<=>`, or if an optional code block
# is provided, by using it as a comparator. The block should accept 2 parameters,
# and should return 0, 1, or -1 if the first parameter is equal to, greater than,
# or less than the second parameter (respectively).
#
# @return [Vector]
def sort
self.class.new(super)
end
# Return a new `Vector` with the same items, but sorted. The sort order will be
# determined by mapping the items through the given block to obtain sort keys,
# and then sorting the keys according to their natural sort order.
#
# @return [Vector]
def sort_by
self.class.new(super)
end
# Drop the first `n` elements and return the rest in a new `Vector`.
# @param n [Integer] The number of elements to remove
# @return [Vector]
def drop(n)
return self if n == 0
self.class.new(super)
end
# Return only the first `n` elements in a new `Vector`.
# @param n [Integer] The number of elements to retain
# @return [Vector]
def take(n)
return self if n >= @size
self.class.new(super)
end
# Drop elements up to, but not including, the first element for which the
# block returns `nil` or `false`. Gather the remaining elements into a new
# `Vector`. If no block is given, an `Enumerator` is returned instead.
#
# @return [Vector, Enumerator]
def drop_while
return enum_for(:drop_while) if not block_given?
self.class.new(super)
end
# Gather elements up to, but not including, the first element for which the
# block returns `nil` or `false`, and return them in a new `Vector`. If no block
# is given, an `Enumerator` is returned instead.
#
# @return [Vector, Enumerator]
def take_while
return enum_for(:take_while) if not block_given?
self.class.new(super)
end
# Repetition. Return a new `Vector` built by concatenating `times` copies
# of this one together.
#
# @param times [Integer] The number of times to repeat the elements in this vector
# @return [Vector]
def *(times)
return self.class.empty if times == 0
return self if times == 1
result = (to_a * times)
result.is_a?(Array) ? self.class.new(result) : result
end
# Replace a range of indexes with the given object.
#
# @overload fill(obj)
# Return a new `Vector` of the same size, with every index set to `obj`.
# @overload fill(obj, start)
# Return a new `Vector` with all indexes from `start` to the end of the
# vector set to `obj`.
# @overload fill(obj, start, length)
# Return a new `Vector` with `length` indexes, beginning from `start`,
# set to `obj`.
#
# @return [Vector]
def fill(obj, index = 0, length = nil)
raise IndexError if index < -@size
index += @size if index < 0
length ||= @size - index # to the end of the array, if no length given
if index < @size
suffix = flatten_suffix(@root, @levels * BITS_PER_LEVEL, index, [])
suffix.fill(obj, 0, length)
elsif index == @size
suffix = Array.new(length, obj)
else
suffix = Array.new(index - @size, nil).concat(Array.new(length, obj))
index = @size
end
replace_suffix(index, suffix)
end
# When invoked with a block, yields all combinations of length `n` of items
# from the `Vector`, and then returns `self`. There is no guarantee about
# which order the combinations will be yielded in.
#
# If no block is given, an `Enumerator` is returned instead.
#
# @return [self, Enumerator]
def combination(n)
return enum_for(:combination, n) if not block_given?
return self if n < 0 || @size < n
if n == 0
yield []
elsif n == 1
each { |item| yield [item] }
elsif n == @size
yield self.to_a
else
combos = lambda do |result,index,remaining|
while @size - index > remaining
if remaining == 1
yield result.dup << get(index)
else
combos[result.dup << get(index), index+1, remaining-1]
end
index += 1
end
index.upto(@size-1) { |i| result << get(i) }
yield result
end
combos[[], 0, n]
end
self
end
# When invoked with a block, yields all repeated combinations of length `n` of
# items from the `Vector`, and then returns `self`. A "repeated combination" is
# one in which any item from the `Vector` can appear consecutively any number of
# times.
#
# There is no guarantee about which order the combinations will be yielded in.
#
# If no block is given, an `Enumerator` is returned instead.
#
# @return [self, Enumerator]
def repeated_combination(n)
return enum_for(:repeated_combination, n) if not block_given?
if n < 0
# yield nothing
elsif n == 0
yield []
elsif n == 1
each { |item| yield [item] }
elsif @size == 0
# yield nothing
else
combos = lambda do |result,index,remaining|
while index < @size-1
if remaining == 1
yield result.dup << get(index)
else
combos[result.dup << get(index), index, remaining-1]
end
index += 1
end
item = get(index)
remaining.times { result << item }
yield result
end
combos[[], 0, n]
end
self
end
# Yields all permutations of length `n` of items from the `Vector`, and then
# returns `self`. If no length `n` is specified, permutations of all elements
# will be yielded.
#
# There is no guarantee about which order the permutations will be yielded in.
#
# If no block is given, an `Enumerator` is returned instead.
#
# @return [self, Enumerator]
def permutation(n = @size)
return enum_for(:permutation, n) if not block_given?
if n < 0 || @size < n
# yield nothing
elsif n == 0
yield []
elsif n == 1
each { |item| yield [item] }
else
used, result = [], []
perms = lambda do |index|
0.upto(@size-1) do |i|
if !used[i]
result[index] = get(i)
if index < n-1
used[i] = true
perms[index+1]
used[i] = false
else
yield result.dup
end
end
end
end
perms[0]
end
self
end
# When invoked with a block, yields all repeated permutations of length `n` of
# items from the `Vector`, and then returns `self`. A "repeated permutation" is
# one where any item from the `Vector` can appear any number of times, and in
# any position (not just consecutively)
#
# If no length `n` is specified, permutations of all elements will be yielded.
# There is no guarantee about which order the permutations will be yielded in.
#
# If no block is given, an `Enumerator` is returned instead.
#
# @return [self, Enumerator]
def repeated_permutation(n = @size)
return enum_for(:repeated_permutation, n) if not block_given?
if n < 0
# yield nothing
elsif n == 0
yield []
elsif n == 1
each { |item| yield [item] }
else
result = []
perms = lambda do |index|
0.upto(@size-1) do |i|
result[index] = get(i)
if index < n-1
perms[index+1]
else
yield result.dup
end
end
end
perms[0]
end
self
end
# With one or more vector or array arguments, return the cartesian product of
# this vector's elements and those of each argument; with no arguments, return the
# result of multiplying all this vector's items together.
#
# @overload product(*vectors)
# Return a `Vector` of all combinations of elements from this `Vector` and each
# of the given vectors or arrays. The length of the returned `Vector` is the product
# of `self.size` and the size of each argument vector or array.
# @overload product
# Return the result of multiplying all the items in this `Vector` together.
#
# @return [Vector]
def product(*vectors)
# if no vectors passed, return "product" as in result of multiplying all items
return super if vectors.empty?
vectors.unshift(self)
if vectors.any?(&:empty?)
return block_given? ? self : []
end
counters = Array.new(vectors.size, 0)
bump_counters = lambda do
i = vectors.size-1
counters[i] += 1
while counters[i] == vectors[i].size
counters[i] = 0
i -= 1
return true if i == -1 # we are done
counters[i] += 1
end
false # not done yet
end
build_array = lambda do
array = []
counters.each_with_index { |index,i| array << vectors[i][index] }
array
end
if block_given?
while true
yield build_array[]
return self if bump_counters[]
end
else
result = []
while true
result << build_array[]
return result if bump_counters[]
end
end
end
# Assume all elements are vectors or arrays and transpose the rows and columns.
# In other words, take the first element of each nested vector/array and gather
# them together into a new `Vector`. Do likewise for the second, third, and so on
# down to the end of each nested vector/array. Gather all the resulting `Vectors`
# into a new `Vector` and return it.
#
# This operation is closely related to {#zip}. The result is almost the same as
# calling {#zip} on the first nested vector/array with the others supplied as
# arguments.
#
# @return [Vector]
def transpose
return self.class.empty if empty?
result = Array.new(first.size) { [] }
0.upto(@size-1) do |i|
source = get(i)
if source.size != result.size
raise IndexError, "element size differs (#{source.size} should be #{result.size})"
end
0.upto(result.size-1) do |j|
result[j].push(source[j])
end
end
result.map! { |a| self.class.new(a) }
self.class.new(result)
end
# By using binary search, finds a value from this `Vector` which meets the
# condition defined by the provided block. Behavior is just like `Array#bsearch`.
# See `Array#bsearch` for details.
#
# @return [Object]
def bsearch
low, high, result = 0, @size, nil
while low < high
mid = (low + ((high - low) >> 1))
val = get(mid)
v = yield val
if v.is_a? Numeric
if v == 0
return val
elsif v > 0
high = mid
else
low = mid + 1
end
elsif v == true
result = val
high = mid
elsif !v
low = mid + 1
else
raise TypeError, "wrong argument type #{v.class} (must be numeric, true, false, or nil)"
end
end
result
end
# Return an empty `Vector` instance, of the same class as this one. Useful if you
# have multiple subclasses of `Vector` and want to treat them polymorphically.
#
# @return [Vector]
def clear
self.class.empty
end
# Return a randomly chosen item from this `Vector`. If the vector is empty, return `nil`.
#
# @return [Object]
def sample
get(rand(@size))
end
# Return a new `Vector` with only the elements at the given `indices`, in the
# order specified by `indices`. If any of the `indices` do not exist, `nil`s will
# appear in their places.
#
# @param indices [Array] The indices to retrieve and gather into a new `Vector`
# @return [Vector]
def values_at(*indices)
self.class.new(indices.map { |i| get(i) }.freeze)
end
# Return the index of the last element which is equal to the provided object,
# or for which the provided block returns true.
#
# @overload rindex(obj)
# Return the index of the last element in this `Vector` which is `#==` to `obj`.
# @overload rindex { |item| ... }
# Return the index of the last element in this `Vector` for which the block
# returns true. (Iteration starts from the last element, counts back, and
# stops as soon as a matching element is found.)
#
# @return [Index]
def rindex(obj = (missing_arg = true))
i = @size - 1
if missing_arg
if block_given?
reverse_each { |item| return i if yield item; i -= 1 }
nil
else
enum_for(:rindex)
end
else
reverse_each { |item| return i if item == obj; i -= 1 }
nil
end
end
# Assumes all elements are nested, indexable collections, and searches through them,
# comparing `obj` with the first element of each nested collection. Return the
# first nested collection which matches, or `nil` if none is found.
#
# @param obj [Object] The object to search for
# @return [Object]
def assoc(obj)
each { |array| return array if obj == array[0] }
nil
end
# Assumes all elements are nested, indexable collections, and searches through them,
# comparing `obj` with the second element of each nested collection. Return the
# first nested collection which matches, or `nil` if none is found.
#
# @param obj [Object] The object to search for
# @return [Object]
def rassoc(obj)
each { |array| return array if obj == array[1] }
nil
end
# Return an `Array` with the same elements, in the same order. The returned
# `Array` may or may not be frozen.
#
# @return [Array]
def to_a
if @levels == 0
@root
else
flatten_node(@root, @levels * BITS_PER_LEVEL, [])
end
end
# Return true if `other` has the same type and contents as this `Vector`.
#
# @param other [Object] The collection to compare with
# @return [Boolean]
def eql?(other)
return true if other.equal?(self)
return false unless instance_of?(other.class) && @size == other.size
@root.eql?(other.instance_variable_get(:@root))
end
# See `Object#hash`.
# @return [Integer]
def hash
reduce(0) { |hash, item| (hash << 5) - hash + item.hash }
end
# @return [::Array]
# @private
def marshal_dump
to_a
end
# @private
def marshal_load(array)
initialize(array.freeze)
end
private
def traverse_depth_first(node, level, &block)
return node.each(&block) if level == 0
node.each { |child| traverse_depth_first(child, level - 1, &block) }
end
def reverse_traverse_depth_first(node, level, &block)
return node.reverse_each(&block) if level == 0
node.reverse_each { |child| reverse_traverse_depth_first(child, level - 1, &block) }
end
def leaf_node_for(node, bitshift, index)
while bitshift > 0
node = node[(index >> bitshift) & INDEX_MASK]
bitshift -= BITS_PER_LEVEL
end
node
end
def update_root(index, item)
root, levels = @root, @levels
while index >= (1 << (BITS_PER_LEVEL * (levels + 1)))
root = [root].freeze
levels += 1
end
new_root = update_leaf_node(root, levels * BITS_PER_LEVEL, index, item)
if new_root.equal?(root)
self
else
self.class.alloc(new_root, @size > index ? @size : index + 1, levels)
end
end
def update_leaf_node(node, bitshift, index, item)
slot_index = (index >> bitshift) & INDEX_MASK
if bitshift > 0
old_child = node[slot_index] || []
item = update_leaf_node(old_child, bitshift - BITS_PER_LEVEL, index, item)
end
existing_item = node[slot_index]
if existing_item.equal?(item)
node
else
node.dup.tap { |n| n[slot_index] = item }.freeze
end
end
def flatten_range(node, bitshift, from, to)
from_slot = (from >> bitshift) & INDEX_MASK
to_slot = (to >> bitshift) & INDEX_MASK
if bitshift == 0 # are we at the bottom?
node.slice(from_slot, to_slot-from_slot+1)
elsif from_slot == to_slot
flatten_range(node[from_slot], bitshift - BITS_PER_LEVEL, from, to)
else
# the following bitmask can be used to pick out the part of the from/to indices
# which will be used to direct path BELOW this node
mask = ((1 << bitshift) - 1)
result = []
if from & mask == 0
flatten_node(node[from_slot], bitshift - BITS_PER_LEVEL, result)
else
result.concat(flatten_range(node[from_slot], bitshift - BITS_PER_LEVEL, from, from | mask))
end
(from_slot+1).upto(to_slot-1) do |slot_index|
flatten_node(node[slot_index], bitshift - BITS_PER_LEVEL, result)
end
if to & mask == mask
flatten_node(node[to_slot], bitshift - BITS_PER_LEVEL, result)
else
result.concat(flatten_range(node[to_slot], bitshift - BITS_PER_LEVEL, to & ~mask, to))
end
result
end
end
def flatten_node(node, bitshift, result)
if bitshift == 0
result.concat(node)
elsif bitshift == BITS_PER_LEVEL
node.each { |a| result.concat(a) }
else
bitshift -= BITS_PER_LEVEL
node.each { |a| flatten_node(a, bitshift, result) }
end
result
end
def subsequence(from, length)
return nil if from > @size || from < 0 || length < 0
length = @size - from if @size < from + length
return self.class.empty if length == 0
self.class.new(flatten_range(@root, @levels * BITS_PER_LEVEL, from, from + length - 1))
end
def flatten_suffix(node, bitshift, from, result)
from_slot = (from >> bitshift) & INDEX_MASK
if bitshift == 0
if from_slot == 0
result.concat(node)
else
result.concat(node.slice(from_slot, 32)) # entire suffix of node. excess length is ignored by #slice
end
else
mask = ((1 << bitshift) - 1)
if from & mask == 0
from_slot.upto(node.size-1) do |i|
flatten_node(node[i], bitshift - BITS_PER_LEVEL, result)
end
elsif child = node[from_slot]
flatten_suffix(child, bitshift - BITS_PER_LEVEL, from, result)
(from_slot+1).upto(node.size-1) do |i|
flatten_node(node[i], bitshift - BITS_PER_LEVEL, result)
end
end
result
end
end
def replace_suffix(from, suffix)
# new suffix can go directly after existing elements
raise IndexError if from > @size
root, levels = @root, @levels
if (from >> (BITS_PER_LEVEL * (@levels + 1))) != 0
# index where new suffix goes doesn't fall within current tree
# we will need to deepen tree
root = [root].freeze
levels += 1
end
new_size = from + suffix.size
root = replace_node_suffix(root, levels * BITS_PER_LEVEL, from, suffix)
if !suffix.empty?
levels.times { suffix = suffix.each_slice(32).to_a }
root.concat(suffix)
while root.size > 32
root = root.each_slice(32).to_a
levels += 1
end
else
while root.size == 1 && levels > 0
root = root[0]
levels -= 1
end
end
self.class.alloc(root.freeze, new_size, levels)
end
def replace_node_suffix(node, bitshift, from, suffix)
from_slot = (from >> bitshift) & INDEX_MASK
if bitshift == 0
if from_slot == 0
suffix.shift(32)
else
node.take(from_slot).concat(suffix.shift(32 - from_slot))
end
else
mask = ((1 << bitshift) - 1)
if from & mask == 0
if from_slot == 0
new_node = suffix.shift(32 * (1 << bitshift))
while bitshift != 0
new_node = new_node.each_slice(32).to_a
bitshift -= BITS_PER_LEVEL
end
new_node
else
result = node.take(from_slot)
remainder = suffix.shift((32 - from_slot) * (1 << bitshift))
while bitshift != 0
remainder = remainder.each_slice(32).to_a
bitshift -= BITS_PER_LEVEL
end
result.concat(remainder)
end
elsif child = node[from_slot]
result = node.take(from_slot)
result.push(replace_node_suffix(child, bitshift - BITS_PER_LEVEL, from, suffix))
remainder = suffix.shift((31 - from_slot) * (1 << bitshift))
while bitshift != 0
remainder = remainder.each_slice(32).to_a
bitshift -= BITS_PER_LEVEL
end
result.concat(remainder)
else
raise "Shouldn't happen"
end
end
end
end
# The canonical empty `Vector`. Returned by `Hamster.vector` and `Vector[]` when
# invoked with no arguments; also returned by `Vector.empty`. Prefer using this
# one rather than creating many empty vectors using `Vector.new`.
#
EmptyVector = Hamster::Vector.empty
end
Optimize Vector#drop for speed (using implementation strategy similar to Vector#shift)
Thanks to Dov Murik for noticing that #shift was faster than #drop.
require "forwardable"
require "hamster/immutable"
require "hamster/enumerable"
require "hamster/hash"
module Hamster
def self.vector(*items)
items.empty? ? EmptyVector : Vector.new(items.freeze)
end
# A `Vector` is an ordered, integer-indexed collection of objects. Like `Array`,
# `Vector` indexing starts at 0. Also like `Array`, negative indexes count back
# from the end of the `Vector`.
#
# `Vector`'s interface is modeled after that of `Array`, minus all the methods
# which do destructive updates. Some methods which modify `Array`s destructively
# (like {#insert} or {#delete_at}) are included, but they return new `Vectors`
# and leave the existing one unchanged.
#
# = Creating New Vectors
#
# Hamster.vector('a', 'b', 'c')
# Hamster::Vector.new([:first, :second, :third])
# Hamster::Vector[1, 2, 3, 4, 5]
#
# = Retrieving Items from Vectors
#
# require 'hamster/vector'
# vector = Hamster.vector(1, 2, 3, 4, 5)
# vector[0] # => 1
# vector[-1] # => 5
# vector[0,3] # => Hamster::Vector[1, 2, 3]
# vector[1..-1] # => Hamster::Vector[2, 3, 4, 5]
# vector.first # => 1
# vector.last # => 5
#
# = Creating Modified Vectors
#
# vector.add(6) # => Hamster::Vector[1, 2, 3, 4, 5, 6]
# vector.insert(1, :a, :b) # => Hamster::Vector[1, :a, :b, 2, 3, 4, 5]
# vector.delete_at(2) # => Hamster::Vector[1, 2, 4, 5]
# vector + [6, 7] # => Hamster::Vector[1, 2, 3, 4, 5, 6, 7]
#
# Other `Array`-like methods like {#select}, {#map}, {#shuffle}, {#uniq}, {#reverse},
# {#rotate}, {#flatten}, {#sort}, {#sort_by}, {#take}, {#drop}, {#take_while},
# {#drop_while}, {#fill}, {#product}, and {#transpose} are also supported.
#
class Vector
extend Forwardable
include Immutable
include Enumerable
# @private
BLOCK_SIZE = 32
# @private
INDEX_MASK = BLOCK_SIZE - 1
# @private
BITS_PER_LEVEL = 5
# Return the number of items in this `Vector`
# @return [Integer]
attr_reader :size
def_delegator :self, :size, :length
class << self
# Create a new `Vector` populated with the given items.
# @return [Vector]
def [](*items)
new(items.freeze)
end
# Return an empty `Vector`. If used on a subclass, returns an empty instance
# of that class.
#
# @return [Vector]
def empty
@empty ||= self.new
end
# "Raw" allocation of a new `Vector`. Used internally to create a new
# instance quickly after building a modified trie.
#
# @return [Vector]
# @private
def alloc(root, size, levels)
obj = allocate
obj.instance_variable_set(:@root, root)
obj.instance_variable_set(:@size, size)
obj.instance_variable_set(:@levels, levels)
obj
end
end
def initialize(items=[].freeze)
items = items.to_a
if items.size <= 32
items = items.dup.freeze if !items.frozen?
@root, @size, @levels = items, items.size, 0
else
root, size, levels = items, items.size, 0
while root.size > 32
root = root.each_slice(32).to_a
levels += 1
end
@root, @size, @levels = root.freeze, size, levels
end
end
# Return `true` if this `Vector` contains no items.
#
# @return [Boolean]
def empty?
@size == 0
end
def_delegator :self, :empty?, :null?
# Return the first item in the `Vector`. If the vector is empty, return `nil`.
#
# @return [Object]
def first
get(0)
end
def_delegator :self, :first, :head
# Return the last item in the `Vector`. If the vector is empty, return `nil`.
#
# @return [Object]
def last
get(-1)
end
# Return a new `Vector` with `item` added after the last occupied position.
#
# @param item [Object] The object to insert at the end of the vector
# @return [Vector]
def add(item)
update_root(@size, item)
end
def_delegator :self, :add, :<<
def_delegator :self, :add, :conj
def_delegator :self, :add, :conjoin
def_delegator :self, :add, :push
# Return a new `Vector` with the item at `index` replaced by `item`. If the
# `item` argument is missing, but an optional code block is provided, it will
# be passed the existing item and what the block returns will replace it.
#
# @param index [Integer] The index to update
# @param item [Object] The object to insert into that position
# @return [Vector]
def set(index, item = yield(get(index)))
raise IndexError if @size == 0
index += @size if index < 0
raise IndexError if index > @size || index < 0
update_root(index, item)
end
# Return a new `Vector` with a deeply nested value modified to the result
# of the given code block. When travesing the nested `Vector`s and
# `Hash`s, non-existing keys are created with value of empty `Hash`s.
#
# The code block receives the existing value of the deeply nested key (or
# `nil` if it doesn't exist). This is useful for "transforming" the value
# associated with a certain key.
#
# Note that the original `Vector` and sub-`Vector`s and sub-`Hash`s are
# left unmodified; new data structure copies are created along the path
# wherever needed.
#
# @example
# v = Hamster::Vector[123, 456, 789, Hamster::Hash["a" => Hamster::Vector[5, 6, 7]]]
# v.update_in(3, "a", 1) { |value| value + 9 }
# # => Hamster::Vector[123, 456, 789, Hamster::Hash["a" => Hamster::Vector[5, 15, 7]]]
#
# @param key_path [Object(s)] List of keys which form the path to the key to be modified
# @yield [value] The previously stored value
# @yieldreturn [Object] The new value to store
# @return [Hash]
def update_in(*key_path, &block)
if key_path.empty?
raise ArgumentError, "must have at least one key in path"
end
key = key_path[0]
if key_path.size == 1
new_value = block.call(get(key))
else
value = fetch(key, EmptyHash)
new_value = value.update_in(*key_path[1..-1], &block)
end
set(key, new_value)
end
# Retrieve the item at `index`. If there is none (either the provided index
# is too high or too low), return `nil`.
#
# @param index [Integer] The index to retrieve
# @return [Object]
def get(index)
return nil if @size == 0
index += @size if index < 0
return nil if index >= @size || index < 0
leaf_node_for(@root, @levels * BITS_PER_LEVEL, index)[index & INDEX_MASK]
end
def_delegator :self, :get, :at
# Retrieve the value at `index`, or use the provided default value or block,
# or otherwise raise an `IndexError`.
#
# @overload fetch(index)
# Retrieve the value at the given index, or raise an `IndexError` if it is
# not found.
# @param index [Integer] The index to look up
# @overload fetch(index) { |index| ... }
# Retrieve the value at the given index, or call the optional
# code block (with the non-existent index) and get its return value.
# @yield [index] The index which does not exist
# @yieldreturn [Object] Object to return instead
# @param index [Integer] The index to look up
# @overload fetch(index, default)
# Retrieve the value at the given index, or else return the provided
# `default` value.
# @param index [Integer] The index to look up
# @param default [Object] Object to return if the key is not found
#
# @return [Object]
def fetch(index, default = (missing_default = true))
if index >= -@size && index < @size
get(index)
elsif block_given?
yield(index)
elsif !missing_default
default
else
raise IndexError, "index #{index} outside of vector bounds"
end
end
# Element reference. Return the item at a specific index, or a specified,
# contiguous range of items (as a new `Vector`).
#
# @overload vector[index]
# Return the item at `index`.
# @param index [Integer] The index to retrieve.
# @overload vector[start, length]
# Return a subvector starting at index `start` and continuing for `length` elements.
# @param start [Integer] The index to start retrieving items from.
# @param length [Integer] The number of items to retrieve.
# @overload vector[range]
# Return a subvector specified by the given `range` of indices.
# @param range [Range] The range of indices to retrieve.
#
# @return [Object]
def [](arg, length = (missing_length = true))
if missing_length
if arg.is_a?(Range)
from, to = arg.begin, arg.end
from += @size if from < 0
to += @size if to < 0
to += 1 if !arg.exclude_end?
length = to - from
length = 0 if length < 0
subsequence(from, length)
else
get(arg)
end
else
arg += @size if arg < 0
subsequence(arg, length)
end
end
def_delegator :self, :[], :slice
# Return a new `Vector` with the given values inserted before the element at `index`.
#
# @param index [Integer] The index where the new items should go
# @param items [Array] The items to add
# @return [Vector]
def insert(index, *items)
raise IndexError if index < -@size
index += @size if index < 0
if index < @size
suffix = flatten_suffix(@root, @levels * BITS_PER_LEVEL, index, [])
suffix.unshift(*items)
elsif index == @size
suffix = items
else
suffix = Array.new(index - @size, nil).concat(items)
index = @size
end
replace_suffix(index, suffix)
end
# Return a new `Vector` with the element at `index` removed. If the given `index`
# does not exist, return `self`.
#
# @param index [Integer] The index to remove
# @return [Vector]
def delete_at(index)
return self if index >= @size || index < -@size
index += @size if index < 0
suffix = flatten_suffix(@root, @levels * BITS_PER_LEVEL, index, [])
replace_suffix(index, suffix.tap { |a| a.shift })
end
# Return a new `Vector` with the last element removed. If empty, just return `self`.
# @return [Vector]
def pop
return self if @size == 0
replace_suffix(@size-1, [])
end
# Return a new `Vector` with `obj` inserted before the first element, moving
# the other elements upwards.
# @param obj [Object] The value to prepend
# @return [Vector]
def unshift(obj)
insert(0, obj)
end
# Return a new `Vector` with the first element removed. If empty, just return `self`.
# @return [Vector]
def shift
delete_at(0)
end
# Call the given block once for each item in the vector, passing each
# item from first to last successively to the block.
#
# @return [self]
def each(&block)
return to_enum unless block_given?
traverse_depth_first(@root, @levels, &block)
self
end
# Call the given block once for each item in the vector, passing each
# item starting from the last, and counting back to the first, successively to
# the block.
#
# @return [self]
def reverse_each(&block)
return enum_for(:reverse_each) unless block_given?
reverse_traverse_depth_first(@root, @levels, &block)
self
end
# Return a new `Vector` containing all elements for which the given block returns
# true.
#
# @return [Vector]
def filter
return enum_for(:filter) unless block_given?
reduce(self.class.empty) { |vector, item| yield(item) ? vector.add(item) : vector }
end
# Return a new `Vector` with all items which are equal to `obj` removed.
# `#==` is used for checking equality.
#
# @param obj [Object] The object to remove (every occurrence)
# @return [Vector]
def delete(obj)
filter { |item| item != obj }
end
# Invoke the given block once for each item in the vector, and return a new
# `Vector` containing the values returned by the block.
#
# @return [Vector]
def map
return enum_for(:map) if not block_given?
return self if empty?
self.class.new(super)
end
def_delegator :self, :map, :collect
# Return a new `Vector` with the same elements as this one, but randomly permuted.
#
# @return [Vector]
def shuffle
self.class.new(((array = to_a).frozen? ? array.shuffle : array.shuffle!).freeze)
end
# Return a new `Vector` with no duplicate elements, as determined by `#hash` and
# `#eql?`. For each group of equivalent elements, only the first will be retained.
#
# @return [Vector]
def uniq
self.class.new(((array = to_a).frozen? ? array.uniq : array.uniq!).freeze)
end
# Return a new `Vector` with the same elements as this one, but in reverse order.
#
# @return [Vector]
def reverse
self.class.new(((array = to_a).frozen? ? array.reverse : array.reverse!).freeze)
end
# Return a new `Vector` with the same elements, but rotated so that the one at
# index `count` is the first element of the new vector. If `count` is positive,
# the elements will be shifted left, and those shifted past the lowest position
# will be moved to the end. If `count` is negative, the elements will be shifted
# right, and those shifted past the last position will be moved to the beginning.
#
# @param count [Integer] The number of positions to shift items by
# @return [Vector]
def rotate(count = 1)
return self if (count % @size) == 0
self.class.new(((array = to_a).frozen? ? array.rotate(count) : array.rotate!(count)).freeze)
end
# Return a new `Vector` with all nested vectors and arrays recursively "flattened
# out", that is, their elements inserted into the new `Vector` in the place where
# the nested array/vector originally was. If an optional `level` argument is
# provided, the flattening will only be done recursively that number of times.
# A `level` of 0 means not to flatten at all, 1 means to only flatten nested
# arrays/vectors which are directly contained within this `Vector`.
#
# @param level [Integer] The depth to which flattening should be applied
# @return [Vector]
def flatten(level = nil)
return self if level == 0
self.class.new(((array = to_a).frozen? ? array.flatten(level) : array.flatten!(level)).freeze)
end
# Return a new `Vector` built by concatenating this one with `other`. `other`
# can be any object which is convertible to an `Array` using `#to_a`.
#
# @param other [Enumerable] The collection to concatenate onto this vector
# @return [Vector]
def +(other)
other = other.to_a
other = other.dup if other.frozen?
replace_suffix(@size, other)
end
def_delegator :self, :+, :concat
# `others` should be arrays and/or vectors. The corresponding elements from this
# `Vector` and each of `others` (that is, the elements with the same indices)
# will be gathered into arrays.
#
# If an optional block is provided, each such array will be passed successively
# to the block. Otherwise, a new `Vector` of all those arrays will be returned.
#
# @param others [Array] The arrays/vectors to zip together with this one
# @return [Vector, nil]
def zip(*others)
if block_given?
super
else
self.class.new(super)
end
end
# Return a new `Vector` with the same items, but sorted. The sort order will
# be determined by comparing items using `#<=>`, or if an optional code block
# is provided, by using it as a comparator. The block should accept 2 parameters,
# and should return 0, 1, or -1 if the first parameter is equal to, greater than,
# or less than the second parameter (respectively).
#
# @return [Vector]
def sort
self.class.new(super)
end
# Return a new `Vector` with the same items, but sorted. The sort order will be
# determined by mapping the items through the given block to obtain sort keys,
# and then sorting the keys according to their natural sort order.
#
# @return [Vector]
def sort_by
self.class.new(super)
end
# Drop the first `n` elements and return the rest in a new `Vector`.
# @param n [Integer] The number of elements to remove
# @return [Vector]
def drop(n)
return self if n == 0
return self.class.empty if n >= @size
raise ArgumentError, "attempt to drop negative size" if n < 0
self.class.new(flatten_suffix(@root, @levels * BITS_PER_LEVEL, n, []))
end
# Return only the first `n` elements in a new `Vector`.
# @param n [Integer] The number of elements to retain
# @return [Vector]
def take(n)
return self if n >= @size
self.class.new(super)
end
# Drop elements up to, but not including, the first element for which the
# block returns `nil` or `false`. Gather the remaining elements into a new
# `Vector`. If no block is given, an `Enumerator` is returned instead.
#
# @return [Vector, Enumerator]
def drop_while
return enum_for(:drop_while) if not block_given?
self.class.new(super)
end
# Gather elements up to, but not including, the first element for which the
# block returns `nil` or `false`, and return them in a new `Vector`. If no block
# is given, an `Enumerator` is returned instead.
#
# @return [Vector, Enumerator]
def take_while
return enum_for(:take_while) if not block_given?
self.class.new(super)
end
# Repetition. Return a new `Vector` built by concatenating `times` copies
# of this one together.
#
# @param times [Integer] The number of times to repeat the elements in this vector
# @return [Vector]
def *(times)
return self.class.empty if times == 0
return self if times == 1
result = (to_a * times)
result.is_a?(Array) ? self.class.new(result) : result
end
# Replace a range of indexes with the given object.
#
# @overload fill(obj)
# Return a new `Vector` of the same size, with every index set to `obj`.
# @overload fill(obj, start)
# Return a new `Vector` with all indexes from `start` to the end of the
# vector set to `obj`.
# @overload fill(obj, start, length)
# Return a new `Vector` with `length` indexes, beginning from `start`,
# set to `obj`.
#
# @return [Vector]
def fill(obj, index = 0, length = nil)
raise IndexError if index < -@size
index += @size if index < 0
length ||= @size - index # to the end of the array, if no length given
if index < @size
suffix = flatten_suffix(@root, @levels * BITS_PER_LEVEL, index, [])
suffix.fill(obj, 0, length)
elsif index == @size
suffix = Array.new(length, obj)
else
suffix = Array.new(index - @size, nil).concat(Array.new(length, obj))
index = @size
end
replace_suffix(index, suffix)
end
# When invoked with a block, yields all combinations of length `n` of items
# from the `Vector`, and then returns `self`. There is no guarantee about
# which order the combinations will be yielded in.
#
# If no block is given, an `Enumerator` is returned instead.
#
# @return [self, Enumerator]
def combination(n)
return enum_for(:combination, n) if not block_given?
return self if n < 0 || @size < n
if n == 0
yield []
elsif n == 1
each { |item| yield [item] }
elsif n == @size
yield self.to_a
else
combos = lambda do |result,index,remaining|
while @size - index > remaining
if remaining == 1
yield result.dup << get(index)
else
combos[result.dup << get(index), index+1, remaining-1]
end
index += 1
end
index.upto(@size-1) { |i| result << get(i) }
yield result
end
combos[[], 0, n]
end
self
end
# When invoked with a block, yields all repeated combinations of length `n` of
# items from the `Vector`, and then returns `self`. A "repeated combination" is
# one in which any item from the `Vector` can appear consecutively any number of
# times.
#
# There is no guarantee about which order the combinations will be yielded in.
#
# If no block is given, an `Enumerator` is returned instead.
#
# @return [self, Enumerator]
def repeated_combination(n)
return enum_for(:repeated_combination, n) if not block_given?
if n < 0
# yield nothing
elsif n == 0
yield []
elsif n == 1
each { |item| yield [item] }
elsif @size == 0
# yield nothing
else
combos = lambda do |result,index,remaining|
while index < @size-1
if remaining == 1
yield result.dup << get(index)
else
combos[result.dup << get(index), index, remaining-1]
end
index += 1
end
item = get(index)
remaining.times { result << item }
yield result
end
combos[[], 0, n]
end
self
end
# Yields all permutations of length `n` of items from the `Vector`, and then
# returns `self`. If no length `n` is specified, permutations of all elements
# will be yielded.
#
# There is no guarantee about which order the permutations will be yielded in.
#
# If no block is given, an `Enumerator` is returned instead.
#
# @return [self, Enumerator]
def permutation(n = @size)
return enum_for(:permutation, n) if not block_given?
if n < 0 || @size < n
# yield nothing
elsif n == 0
yield []
elsif n == 1
each { |item| yield [item] }
else
used, result = [], []
perms = lambda do |index|
0.upto(@size-1) do |i|
if !used[i]
result[index] = get(i)
if index < n-1
used[i] = true
perms[index+1]
used[i] = false
else
yield result.dup
end
end
end
end
perms[0]
end
self
end
# When invoked with a block, yields all repeated permutations of length `n` of
# items from the `Vector`, and then returns `self`. A "repeated permutation" is
# one where any item from the `Vector` can appear any number of times, and in
# any position (not just consecutively)
#
# If no length `n` is specified, permutations of all elements will be yielded.
# There is no guarantee about which order the permutations will be yielded in.
#
# If no block is given, an `Enumerator` is returned instead.
#
# @return [self, Enumerator]
def repeated_permutation(n = @size)
return enum_for(:repeated_permutation, n) if not block_given?
if n < 0
# yield nothing
elsif n == 0
yield []
elsif n == 1
each { |item| yield [item] }
else
result = []
perms = lambda do |index|
0.upto(@size-1) do |i|
result[index] = get(i)
if index < n-1
perms[index+1]
else
yield result.dup
end
end
end
perms[0]
end
self
end
# With one or more vector or array arguments, return the cartesian product of
# this vector's elements and those of each argument; with no arguments, return the
# result of multiplying all this vector's items together.
#
# @overload product(*vectors)
# Return a `Vector` of all combinations of elements from this `Vector` and each
# of the given vectors or arrays. The length of the returned `Vector` is the product
# of `self.size` and the size of each argument vector or array.
# @overload product
# Return the result of multiplying all the items in this `Vector` together.
#
# @return [Vector]
def product(*vectors)
# if no vectors passed, return "product" as in result of multiplying all items
return super if vectors.empty?
vectors.unshift(self)
if vectors.any?(&:empty?)
return block_given? ? self : []
end
counters = Array.new(vectors.size, 0)
bump_counters = lambda do
i = vectors.size-1
counters[i] += 1
while counters[i] == vectors[i].size
counters[i] = 0
i -= 1
return true if i == -1 # we are done
counters[i] += 1
end
false # not done yet
end
build_array = lambda do
array = []
counters.each_with_index { |index,i| array << vectors[i][index] }
array
end
if block_given?
while true
yield build_array[]
return self if bump_counters[]
end
else
result = []
while true
result << build_array[]
return result if bump_counters[]
end
end
end
# Assume all elements are vectors or arrays and transpose the rows and columns.
# In other words, take the first element of each nested vector/array and gather
# them together into a new `Vector`. Do likewise for the second, third, and so on
# down to the end of each nested vector/array. Gather all the resulting `Vectors`
# into a new `Vector` and return it.
#
# This operation is closely related to {#zip}. The result is almost the same as
# calling {#zip} on the first nested vector/array with the others supplied as
# arguments.
#
# @return [Vector]
def transpose
return self.class.empty if empty?
result = Array.new(first.size) { [] }
0.upto(@size-1) do |i|
source = get(i)
if source.size != result.size
raise IndexError, "element size differs (#{source.size} should be #{result.size})"
end
0.upto(result.size-1) do |j|
result[j].push(source[j])
end
end
result.map! { |a| self.class.new(a) }
self.class.new(result)
end
# By using binary search, finds a value from this `Vector` which meets the
# condition defined by the provided block. Behavior is just like `Array#bsearch`.
# See `Array#bsearch` for details.
#
# @return [Object]
def bsearch
low, high, result = 0, @size, nil
while low < high
mid = (low + ((high - low) >> 1))
val = get(mid)
v = yield val
if v.is_a? Numeric
if v == 0
return val
elsif v > 0
high = mid
else
low = mid + 1
end
elsif v == true
result = val
high = mid
elsif !v
low = mid + 1
else
raise TypeError, "wrong argument type #{v.class} (must be numeric, true, false, or nil)"
end
end
result
end
# Return an empty `Vector` instance, of the same class as this one. Useful if you
# have multiple subclasses of `Vector` and want to treat them polymorphically.
#
# @return [Vector]
def clear
self.class.empty
end
# Return a randomly chosen item from this `Vector`. If the vector is empty, return `nil`.
#
# @return [Object]
def sample
get(rand(@size))
end
# Return a new `Vector` with only the elements at the given `indices`, in the
# order specified by `indices`. If any of the `indices` do not exist, `nil`s will
# appear in their places.
#
# @param indices [Array] The indices to retrieve and gather into a new `Vector`
# @return [Vector]
def values_at(*indices)
self.class.new(indices.map { |i| get(i) }.freeze)
end
# Return the index of the last element which is equal to the provided object,
# or for which the provided block returns true.
#
# @overload rindex(obj)
# Return the index of the last element in this `Vector` which is `#==` to `obj`.
# @overload rindex { |item| ... }
# Return the index of the last element in this `Vector` for which the block
# returns true. (Iteration starts from the last element, counts back, and
# stops as soon as a matching element is found.)
#
# @return [Index]
def rindex(obj = (missing_arg = true))
i = @size - 1
if missing_arg
if block_given?
reverse_each { |item| return i if yield item; i -= 1 }
nil
else
enum_for(:rindex)
end
else
reverse_each { |item| return i if item == obj; i -= 1 }
nil
end
end
# Assumes all elements are nested, indexable collections, and searches through them,
# comparing `obj` with the first element of each nested collection. Return the
# first nested collection which matches, or `nil` if none is found.
#
# @param obj [Object] The object to search for
# @return [Object]
def assoc(obj)
each { |array| return array if obj == array[0] }
nil
end
# Assumes all elements are nested, indexable collections, and searches through them,
# comparing `obj` with the second element of each nested collection. Return the
# first nested collection which matches, or `nil` if none is found.
#
# @param obj [Object] The object to search for
# @return [Object]
def rassoc(obj)
each { |array| return array if obj == array[1] }
nil
end
# Return an `Array` with the same elements, in the same order. The returned
# `Array` may or may not be frozen.
#
# @return [Array]
def to_a
if @levels == 0
@root
else
flatten_node(@root, @levels * BITS_PER_LEVEL, [])
end
end
# Return true if `other` has the same type and contents as this `Vector`.
#
# @param other [Object] The collection to compare with
# @return [Boolean]
def eql?(other)
return true if other.equal?(self)
return false unless instance_of?(other.class) && @size == other.size
@root.eql?(other.instance_variable_get(:@root))
end
# See `Object#hash`.
# @return [Integer]
def hash
reduce(0) { |hash, item| (hash << 5) - hash + item.hash }
end
# @return [::Array]
# @private
def marshal_dump
to_a
end
# @private
def marshal_load(array)
initialize(array.freeze)
end
private
def traverse_depth_first(node, level, &block)
return node.each(&block) if level == 0
node.each { |child| traverse_depth_first(child, level - 1, &block) }
end
def reverse_traverse_depth_first(node, level, &block)
return node.reverse_each(&block) if level == 0
node.reverse_each { |child| reverse_traverse_depth_first(child, level - 1, &block) }
end
def leaf_node_for(node, bitshift, index)
while bitshift > 0
node = node[(index >> bitshift) & INDEX_MASK]
bitshift -= BITS_PER_LEVEL
end
node
end
def update_root(index, item)
root, levels = @root, @levels
while index >= (1 << (BITS_PER_LEVEL * (levels + 1)))
root = [root].freeze
levels += 1
end
new_root = update_leaf_node(root, levels * BITS_PER_LEVEL, index, item)
if new_root.equal?(root)
self
else
self.class.alloc(new_root, @size > index ? @size : index + 1, levels)
end
end
def update_leaf_node(node, bitshift, index, item)
slot_index = (index >> bitshift) & INDEX_MASK
if bitshift > 0
old_child = node[slot_index] || []
item = update_leaf_node(old_child, bitshift - BITS_PER_LEVEL, index, item)
end
existing_item = node[slot_index]
if existing_item.equal?(item)
node
else
node.dup.tap { |n| n[slot_index] = item }.freeze
end
end
def flatten_range(node, bitshift, from, to)
from_slot = (from >> bitshift) & INDEX_MASK
to_slot = (to >> bitshift) & INDEX_MASK
if bitshift == 0 # are we at the bottom?
node.slice(from_slot, to_slot-from_slot+1)
elsif from_slot == to_slot
flatten_range(node[from_slot], bitshift - BITS_PER_LEVEL, from, to)
else
# the following bitmask can be used to pick out the part of the from/to indices
# which will be used to direct path BELOW this node
mask = ((1 << bitshift) - 1)
result = []
if from & mask == 0
flatten_node(node[from_slot], bitshift - BITS_PER_LEVEL, result)
else
result.concat(flatten_range(node[from_slot], bitshift - BITS_PER_LEVEL, from, from | mask))
end
(from_slot+1).upto(to_slot-1) do |slot_index|
flatten_node(node[slot_index], bitshift - BITS_PER_LEVEL, result)
end
if to & mask == mask
flatten_node(node[to_slot], bitshift - BITS_PER_LEVEL, result)
else
result.concat(flatten_range(node[to_slot], bitshift - BITS_PER_LEVEL, to & ~mask, to))
end
result
end
end
def flatten_node(node, bitshift, result)
if bitshift == 0
result.concat(node)
elsif bitshift == BITS_PER_LEVEL
node.each { |a| result.concat(a) }
else
bitshift -= BITS_PER_LEVEL
node.each { |a| flatten_node(a, bitshift, result) }
end
result
end
def subsequence(from, length)
return nil if from > @size || from < 0 || length < 0
length = @size - from if @size < from + length
return self.class.empty if length == 0
self.class.new(flatten_range(@root, @levels * BITS_PER_LEVEL, from, from + length - 1))
end
def flatten_suffix(node, bitshift, from, result)
from_slot = (from >> bitshift) & INDEX_MASK
if bitshift == 0
if from_slot == 0
result.concat(node)
else
result.concat(node.slice(from_slot, 32)) # entire suffix of node. excess length is ignored by #slice
end
else
mask = ((1 << bitshift) - 1)
if from & mask == 0
from_slot.upto(node.size-1) do |i|
flatten_node(node[i], bitshift - BITS_PER_LEVEL, result)
end
elsif child = node[from_slot]
flatten_suffix(child, bitshift - BITS_PER_LEVEL, from, result)
(from_slot+1).upto(node.size-1) do |i|
flatten_node(node[i], bitshift - BITS_PER_LEVEL, result)
end
end
result
end
end
def replace_suffix(from, suffix)
# new suffix can go directly after existing elements
raise IndexError if from > @size
root, levels = @root, @levels
if (from >> (BITS_PER_LEVEL * (@levels + 1))) != 0
# index where new suffix goes doesn't fall within current tree
# we will need to deepen tree
root = [root].freeze
levels += 1
end
new_size = from + suffix.size
root = replace_node_suffix(root, levels * BITS_PER_LEVEL, from, suffix)
if !suffix.empty?
levels.times { suffix = suffix.each_slice(32).to_a }
root.concat(suffix)
while root.size > 32
root = root.each_slice(32).to_a
levels += 1
end
else
while root.size == 1 && levels > 0
root = root[0]
levels -= 1
end
end
self.class.alloc(root.freeze, new_size, levels)
end
def replace_node_suffix(node, bitshift, from, suffix)
from_slot = (from >> bitshift) & INDEX_MASK
if bitshift == 0
if from_slot == 0
suffix.shift(32)
else
node.take(from_slot).concat(suffix.shift(32 - from_slot))
end
else
mask = ((1 << bitshift) - 1)
if from & mask == 0
if from_slot == 0
new_node = suffix.shift(32 * (1 << bitshift))
while bitshift != 0
new_node = new_node.each_slice(32).to_a
bitshift -= BITS_PER_LEVEL
end
new_node
else
result = node.take(from_slot)
remainder = suffix.shift((32 - from_slot) * (1 << bitshift))
while bitshift != 0
remainder = remainder.each_slice(32).to_a
bitshift -= BITS_PER_LEVEL
end
result.concat(remainder)
end
elsif child = node[from_slot]
result = node.take(from_slot)
result.push(replace_node_suffix(child, bitshift - BITS_PER_LEVEL, from, suffix))
remainder = suffix.shift((31 - from_slot) * (1 << bitshift))
while bitshift != 0
remainder = remainder.each_slice(32).to_a
bitshift -= BITS_PER_LEVEL
end
result.concat(remainder)
else
raise "Shouldn't happen"
end
end
end
end
# The canonical empty `Vector`. Returned by `Hamster.vector` and `Vector[]` when
# invoked with no arguments; also returned by `Vector.empty`. Prefer using this
# one rather than creating many empty vectors using `Vector.new`.
#
EmptyVector = Hamster::Vector.empty
end
|
Rails.application.configure do
# Settings specified here will take precedence over those in config/application.rb.
# Code is not reloaded between requests.
config.cache_classes = true
# Eager load code on boot. This eager loads most of Rails and
# your application in memory, allowing both threaded web servers
# and those relying on copy on write to perform better.
# Rake tasks automatically ignore this option for performance.
config.eager_load = true
# Full error reports are disabled and caching is turned on.
config.consider_all_requests_local = false
config.action_controller.perform_caching = true
# Enable Rack::Cache to put a simple HTTP cache in front of your application
# Add `rack-cache` to your Gemfile before enabling this.
# For large-scale production use, consider using a caching reverse proxy like nginx, varnish or squid.
# config.action_dispatch.rack_cache = true
# Disable Rails's static asset server (Apache or nginx will already do this).
config.serve_static_assets = false
# Compress JavaScripts and CSS.
config.assets.js_compressor = :uglifier
# config.assets.css_compressor = :sass
# Do not fallback to assets pipeline if a precompiled asset is missed.
config.assets.compile = false
# Generate digests for assets URLs.
config.assets.digest = true
# `config.assets.precompile` and `config.assets.version` have moved to config/initializers/assets.rb
# Specifies the header that your server uses for sending files.
# config.action_dispatch.x_sendfile_header = "X-Sendfile" # for apache
# config.action_dispatch.x_sendfile_header = 'X-Accel-Redirect' # for nginx
# Force all access to the app over SSL, use Strict-Transport-Security, and use secure cookies.
# config.force_ssl = true
# Set to :debug to see everything in the log.
config.log_level = :info
# Prepend all log lines with the following tags.
# config.log_tags = [ :subdomain, :uuid ]
# Use a different logger for distributed setups.
# config.logger = ActiveSupport::TaggedLogging.new(SyslogLogger.new)
# Use a different cache store in production.
# config.cache_store = :mem_cache_store
# Enable serving of images, stylesheets, and JavaScripts from an asset server.
# config.action_controller.asset_host = "http://assets.example.com"
# Ignore bad email addresses and do not raise email delivery errors.
# Set this to true and configure the email server for immediate delivery to raise delivery errors.
# config.action_mailer.raise_delivery_errors = false
# Enable locale fallbacks for I18n (makes lookups for any locale fall back to
# the I18n.default_locale when a translation cannot be found).
config.i18n.fallbacks = true
# Send deprecation notices to registered listeners.
config.active_support.deprecation = :notify
# Disable automatic flushing of the log to improve performance.
# config.autoflush_log = false
# Use default logging formatter so that PID and timestamp are not suppressed.
config.log_formatter = ::Logger::Formatter.new
# Do not dump schema after migrations.
config.active_record.dump_schema_after_migration = false
end
log to STDOUT
Rails.application.configure do
# Settings specified here will take precedence over those in config/application.rb.
# Code is not reloaded between requests.
config.cache_classes = true
# Eager load code on boot. This eager loads most of Rails and
# your application in memory, allowing both threaded web servers
# and those relying on copy on write to perform better.
# Rake tasks automatically ignore this option for performance.
config.eager_load = true
# Full error reports are disabled and caching is turned on.
config.consider_all_requests_local = false
config.action_controller.perform_caching = true
# Enable Rack::Cache to put a simple HTTP cache in front of your application
# Add `rack-cache` to your Gemfile before enabling this.
# For large-scale production use, consider using a caching reverse proxy like nginx, varnish or squid.
# config.action_dispatch.rack_cache = true
# Disable Rails's static asset server (Apache or nginx will already do this).
config.serve_static_assets = false
# Compress JavaScripts and CSS.
config.assets.js_compressor = :uglifier
# config.assets.css_compressor = :sass
# Do not fallback to assets pipeline if a precompiled asset is missed.
config.assets.compile = false
# Generate digests for assets URLs.
config.assets.digest = true
# `config.assets.precompile` and `config.assets.version` have moved to config/initializers/assets.rb
# Specifies the header that your server uses for sending files.
# config.action_dispatch.x_sendfile_header = "X-Sendfile" # for apache
# config.action_dispatch.x_sendfile_header = 'X-Accel-Redirect' # for nginx
# Force all access to the app over SSL, use Strict-Transport-Security, and use secure cookies.
# config.force_ssl = true
# Set to :debug to see everything in the log.
config.log_level = :info
# Prepend all log lines with the following tags.
# config.log_tags = [ :subdomain, :uuid ]
# Use a different logger for distributed setups.
config.logger = Logger.new(STDOUT)
# Use a different cache store in production.
# config.cache_store = :mem_cache_store
# Enable serving of images, stylesheets, and JavaScripts from an asset server.
# config.action_controller.asset_host = "http://assets.example.com"
# Ignore bad email addresses and do not raise email delivery errors.
# Set this to true and configure the email server for immediate delivery to raise delivery errors.
# config.action_mailer.raise_delivery_errors = false
# Enable locale fallbacks for I18n (makes lookups for any locale fall back to
# the I18n.default_locale when a translation cannot be found).
config.i18n.fallbacks = true
# Send deprecation notices to registered listeners.
config.active_support.deprecation = :notify
# Disable automatic flushing of the log to improve performance.
# config.autoflush_log = false
# Use default logging formatter so that PID and timestamp are not suppressed.
config.log_formatter = ::Logger::Formatter.new
# Do not dump schema after migrations.
config.active_record.dump_schema_after_migration = false
end
|
require 'spec_helper'
describe Handlebarer::Renderer do
context 'users/index' do
before do
User.all.each do |user|
user.destroy
end
@joe = User.create(:name => 'Joe', :email => 'joe@gmail.com')
@mike = User.create(:name => 'Mike', :email => 'mike@gmail.com')
end
before :each do
get '/users'
end
it 'render users index page' do
response.should render_template(:index)
end
it 'renders the template HTML' do
response.body.should include '<h1 id="topHeading">Hello All Users</h1>'
end
it 'renders users' do
response.body.should include @mike.name
response.body.should include @joe.name
end
end
context 'users/show' do
before :each do
@user = stub_model(User, :name => 'Sam', :email => 'sam@gmail.com')
User.should_receive(:find).and_return(@user)
end
it 'renders instance variables' do
get user_path(@user)
response.body.should include 'My name is %s' % @user.name
end
end
end
update renderer spec to check for rendered html as well as model instance vars
require 'spec_helper'
describe Handlebarer::Renderer do
context 'users/index' do
before do
User.all.each do |user|
user.destroy
end
@joe = User.create(:name => 'Joe', :email => 'joe@gmail.com')
@mike = User.create(:name => 'Mike', :email => 'mike@gmail.com')
end
before :each do
get '/users'
end
it 'render users index page' do
response.should render_template(:index)
end
it 'renders the template HTML' do
response.body.should include '<h1 id="topHeading">Hello All Users</h1>'
end
it 'renders users' do
response.body.should include "<p>#{@mike.name}</p>"
response.body.should include "<p>#{@joe.name}</p>"
end
end
context 'users/show' do
before :each do
@user = stub_model(User, :name => 'Sam', :email => 'sam@gmail.com')
User.should_receive(:find).and_return(@user)
end
it 'renders instance variables' do
get user_path(@user)
response.body.should include 'My name is %s' % @user.name
end
end
end |
###
# Blog settings
###
# Time.zone = "UTC"
activate :i18n, :mount_at_root => :en
activate :middleman_simple_thumbnailer
activate :blog do |blog|
# This will add a prefix to all links, template references and source paths
blog.name = "en"
blog.permalink = "{year}/{month}/{day}/{title}.html"
# Matcher for blog source files
blog.sources = "blog/en/{year}-{month}-{day}-{title}.html"
blog.taglink = "tag/{tag}.html"
# blog.layout = "layout"
blog.summary_separator = /(READMORE)/
# blog.summary_length = 250
blog.year_link = "{year}.html"
blog.month_link = "{year}/{month}.html"
blog.day_link = "{year}/{month}/{day}.html"
# blog.default_extension = ".markdown"
blog.tag_template = "tag_en.html"
blog.calendar_template = "calendar_en.html"
blog.layout = "post"
# Enable pagination
blog.paginate = true
blog.per_page = 6
blog.page_link = "page/{num}"
end
activate :blog do |blog|
# This will add a prefix to all links, template references and source paths
blog.name = "fr"
blog.permalink = "fr/{year}/{month}/{day}/{title}.html"
# Matcher for blog source files
blog.sources = "blog/fr/{year}-{month}-{day}-{title}.html"
blog.taglink = "fr/tag/{tag}.html"
# blog.layout = "layout"
blog.summary_separator = /(READMORE)/
# blog.summary_length = 250
blog.year_link = "fr/{year}.html"
blog.month_link = "fr/{year}/{month}.html"
blog.day_link = "fr/{year}/{month}/{day}.html"
# blog.default_extension = ".markdown"
blog.tag_template = "tag_fr.html"
blog.calendar_template = "calendar_fr.html"
blog.layout = "post"
# Enable pagination
blog.paginate = true
blog.per_page = 6
blog.page_link = "page/{num}"
end
set :casper, {
blog: {
url: 'http://the-french-cook.com/',
name: 'The French cook',
description: 'Make the world a better place to eat',
date_format: '%d %B %Y',
navigation: true,
logo: 'cover.jpg' # Optional
},
author: {
name: 'The Cooker',
bio: "Developer by day, fighter and cook by night",
location: 'Berlin',
website: nil, # Optional
gravatar_email: 'fabien@the-french-cook.com' # Optional
},
navigation: {
"Dev and Fight" => "http://fabbook.fr",
"Fabphoto" => "http://fabphoto.fr",
"Fabfight" => "http://fabfight.com"
}
}
page '/feed.en.xml', layout: false
page '/feed.fr.xml', layout: false
page '/sitemap.xml', layout: false
config = YAML.load_file("parameter.yml")
###
# Helpers
###
activate :deploy do |deploy|
deploy.method = :ftp
deploy.host = config['deploy']['host']
deploy.user = config['deploy']['user']
deploy.password = config['deploy']['password']
deploy.path = config['deploy']['path']
deploy.build_before = true # default: false
end
###
# Compass
###
# Change Compass configuration
# compass_config do |config|
# config.output_style = :compact
# end
###
# Page options, layouts, aliases and proxies
###
# Per-page layout changes:
#
# With no layout
# page "/path/to/file.html", layout: false
#
# With alternative layout
# page "/path/to/file.html", layout: :otherlayout
#
# A path which all have the same layout
# with_layout :admin do
# page "/admin/*"
# end
# Proxy pages (http://middlemanapp.com/basics/dynamic-pages/)
# proxy "/this-page-has-no-template.html", "/template-file.html", locals: {
# which_fake_page: "Rendering a fake page with a local variable" }
###
# Helpers
###
# Automatic image dimensions on image_tag helper
# activate :automatic_image_sizes
# Reload the browser automatically whenever files change
activate :livereload
# Pretty URLs - http://middlemanapp.com/basics/pretty-urls/
activate :directory_indexes
# Middleman-Syntax - https://github.com/middleman/middleman-syntax
set :haml, { ugly: true }
set :markdown_engine, :redcarpet
set :markdown, fenced_code_blocks: true, smartypants: true
activate :syntax, line_numbers: false
set :css_dir, 'stylesheets'
set :js_dir, 'javascripts'
set :images_dir, 'images'
set :partials_dir, 'partials'
ignore '/partials/*'
ignore '/tag_en.html.haml'
ignore '/tag_fr.html.haml'
ignore '/calendar_en.html.haml'
ignore '/calendar_fr.html.haml'
ready do
langs.each do |locale|
if locale == I18n.default_locale
proxy "/author/#{blog_author.name.parameterize}.html", "/author.#{locale}.html", ignore: true do
::I18n.locale = locale
end
else
proxy "/#{locale}/author/#{blog_author.name.parameterize}.html", "/author.#{locale}.html", ignore: true do
::I18n.locale = locale
end
end
end
end
# Build-specific configuration
configure :build do
# For example, change the Compass output style for deployment
activate :minify_css
# Minify Javascript on build
activate :minify_javascript
activate :minify_html
# Enable cache buster
# activate :asset_hash
# Use relative URLs
# activate :relative_assets
# Or use a different image path
# set :http_prefix, "/Content/images/"
end
update links
###
# Blog settings
###
# Time.zone = "UTC"
activate :i18n, :mount_at_root => :en
activate :middleman_simple_thumbnailer
activate :blog do |blog|
# This will add a prefix to all links, template references and source paths
blog.name = "en"
blog.permalink = "{year}/{month}/{day}/{title}.html"
# Matcher for blog source files
blog.sources = "blog/en/{year}-{month}-{day}-{title}.html"
blog.taglink = "tag/{tag}.html"
# blog.layout = "layout"
blog.summary_separator = /(READMORE)/
# blog.summary_length = 250
blog.year_link = "{year}.html"
blog.month_link = "{year}/{month}.html"
blog.day_link = "{year}/{month}/{day}.html"
# blog.default_extension = ".markdown"
blog.tag_template = "tag_en.html"
blog.calendar_template = "calendar_en.html"
blog.layout = "post"
# Enable pagination
blog.paginate = true
blog.per_page = 6
blog.page_link = "page/{num}"
end
activate :blog do |blog|
# This will add a prefix to all links, template references and source paths
blog.name = "fr"
blog.permalink = "fr/{year}/{month}/{day}/{title}.html"
# Matcher for blog source files
blog.sources = "blog/fr/{year}-{month}-{day}-{title}.html"
blog.taglink = "fr/tag/{tag}.html"
# blog.layout = "layout"
blog.summary_separator = /(READMORE)/
# blog.summary_length = 250
blog.year_link = "fr/{year}.html"
blog.month_link = "fr/{year}/{month}.html"
blog.day_link = "fr/{year}/{month}/{day}.html"
# blog.default_extension = ".markdown"
blog.tag_template = "tag_fr.html"
blog.calendar_template = "calendar_fr.html"
blog.layout = "post"
# Enable pagination
blog.paginate = true
blog.per_page = 6
blog.page_link = "page/{num}"
end
set :casper, {
blog: {
url: 'http://the-french-cook.com/',
name: 'The French cook',
description: 'Make the world a better place to eat',
date_format: '%d %B %Y',
navigation: true,
logo: 'cover.jpg' # Optional
},
author: {
name: 'The Cooker',
bio: "Developer by day, fighter and cook by night",
location: 'Berlin',
website: nil, # Optional
gravatar_email: 'fabien@the-french-cook.com' # Optional
},
navigation: {
"Fabbook" => "http://fabbook.fr",
"Fabphoto" => "http://fabphoto.fr",
"Fabfight" => "http://fabfight.com"
}
}
page '/feed.en.xml', layout: false
page '/feed.fr.xml', layout: false
page '/sitemap.xml', layout: false
config = YAML.load_file("parameter.yml")
###
# Helpers
###
activate :deploy do |deploy|
deploy.method = :ftp
deploy.host = config['deploy']['host']
deploy.user = config['deploy']['user']
deploy.password = config['deploy']['password']
deploy.path = config['deploy']['path']
deploy.build_before = true # default: false
end
###
# Compass
###
# Change Compass configuration
# compass_config do |config|
# config.output_style = :compact
# end
###
# Page options, layouts, aliases and proxies
###
# Per-page layout changes:
#
# With no layout
# page "/path/to/file.html", layout: false
#
# With alternative layout
# page "/path/to/file.html", layout: :otherlayout
#
# A path which all have the same layout
# with_layout :admin do
# page "/admin/*"
# end
# Proxy pages (http://middlemanapp.com/basics/dynamic-pages/)
# proxy "/this-page-has-no-template.html", "/template-file.html", locals: {
# which_fake_page: "Rendering a fake page with a local variable" }
###
# Helpers
###
# Automatic image dimensions on image_tag helper
# activate :automatic_image_sizes
# Reload the browser automatically whenever files change
activate :livereload
# Pretty URLs - http://middlemanapp.com/basics/pretty-urls/
activate :directory_indexes
# Middleman-Syntax - https://github.com/middleman/middleman-syntax
set :haml, { ugly: true }
set :markdown_engine, :redcarpet
set :markdown, fenced_code_blocks: true, smartypants: true
activate :syntax, line_numbers: false
set :css_dir, 'stylesheets'
set :js_dir, 'javascripts'
set :images_dir, 'images'
set :partials_dir, 'partials'
ignore '/partials/*'
ignore '/tag_en.html.haml'
ignore '/tag_fr.html.haml'
ignore '/calendar_en.html.haml'
ignore '/calendar_fr.html.haml'
ready do
langs.each do |locale|
if locale == I18n.default_locale
proxy "/author/#{blog_author.name.parameterize}.html", "/author.#{locale}.html", ignore: true do
::I18n.locale = locale
end
else
proxy "/#{locale}/author/#{blog_author.name.parameterize}.html", "/author.#{locale}.html", ignore: true do
::I18n.locale = locale
end
end
end
end
# Build-specific configuration
configure :build do
# For example, change the Compass output style for deployment
activate :minify_css
# Minify Javascript on build
activate :minify_javascript
activate :minify_html
# Enable cache buster
# activate :asset_hash
# Use relative URLs
# activate :relative_assets
# Or use a different image path
# set :http_prefix, "/Content/images/"
end
|
# frozen_string_literal: true
require_relative 'processors'
module BBLib
# This class wraps around a hash path or set of paths and maps a set of actions for modifying elements at the matching
# path.
class HashPathProc
include BBLib::Effortless
attr_ary_of String, :paths, default: [''], serialize: true, uniq: true
attr_of [String, Symbol], :action, default: nil, allow_nil: true, serialize: true, pre_proc: proc { |arg| HashPathProc.map_action(arg.to_sym) }
attr_ary :args, default: [], serialize: true
attr_hash :options, default: {}, serialize: true
attr_of [String, Proc], :condition, default: nil, allow_nil: true, serialize: true
attr_bool :recursive, default: false, serialize: true
attr_bool :class_based, default: true, serialize: true
def process(hash)
return hash unless @action && hash
tree = hash.to_tree_hash
paths.each do |path|
children = recursive ? tree.find(path).flat_map(&:descendants) : tree.find(path)
children.each do |child|
next unless check_condition(child.value)
HashPathProcs.send(find_action(action), child, *full_args, class_based: class_based)
end
end
hash.replace(tree.value) rescue tree.value
end
def check_condition(value)
return true unless condition
if condition.is_a?(String)
eval(condition)
else
condition.call(value)
end
rescue => e
false
end
protected
USED_KEYWORDS = [:action, :args, :paths, :recursive, :condition].freeze
def find_action(action)
(HashPathProcs.respond_to?(action) ? action : :custom)
end
def full_args
(HASH_PATH_PROC_TYPES.include?(action) ? [] : [action]) +
args +
(options.empty? || options.nil? ? [] : [options])
end
def self.map_action(action)
clean = HASH_PATH_PROC_TYPES.find { |k, v| action == k || v.include?(action) }
clean ? clean.first : action
end
def simple_init(*args)
options = BBLib.named_args(*args)
options.merge(options.delete(:options)) if options[:options]
USED_KEYWORDS.each { |k| options.delete(k) }
self.options = options
if args.first.is_a?(Symbol) && @action.nil?
self.action = args.shift
self.paths = args.shift if args.first.is_a?(String)
elsif action && args.first.is_a?(String)
self.paths = args.first
end
self.args += args.find_all { |arg| !arg.is_a?(Hash) } unless args.empty?
end
end
end
# Monkey patches
class Hash
def hash_path_proc(*args)
BBLib::HashPathProc.new(*args).process(self)
end
alias hpath_proc hash_path_proc
end
# Monkey patches
class Array
def hash_path_proc(*args)
BBLib::HashPathProc.new(*args).process(self)
end
alias hpath_proc hash_path_proc
end
Changed action to a symbol.
# frozen_string_literal: true
require_relative 'processors'
module BBLib
# This class wraps around a hash path or set of paths and maps a set of actions for modifying elements at the matching
# path.
class HashPathProc
include BBLib::Effortless
attr_ary_of String, :paths, default: [''], serialize: true, uniq: true
attr_of [String, Symbol], :action, default: nil, allow_nil: true, serialize: true, pre_proc: proc { |arg| HashPathProc.map_action(arg.to_s.to_sym) }
attr_ary :args, default: [], serialize: true
attr_hash :options, default: {}, serialize: true
attr_of [String, Proc], :condition, default: nil, allow_nil: true, serialize: true
attr_bool :recursive, default: false, serialize: true
attr_bool :class_based, default: true, serialize: true
def process(hash)
return hash unless @action && hash
tree = hash.to_tree_hash
paths.each do |path|
children = recursive ? tree.find(path).flat_map(&:descendants) : tree.find(path)
children.each do |child|
next unless check_condition(child.value)
HashPathProcs.send(find_action(action), child, *full_args, class_based: class_based)
end
end
hash.replace(tree.value) rescue tree.value
end
def check_condition(value)
return true unless condition
if condition.is_a?(String)
eval(condition)
else
condition.call(value)
end
rescue => e
false
end
protected
USED_KEYWORDS = [:action, :args, :paths, :recursive, :condition].freeze
def find_action(action)
(HashPathProcs.respond_to?(action) ? action : :custom)
end
def full_args
(HASH_PATH_PROC_TYPES.include?(action) ? [] : [action]) +
args +
(options.empty? || options.nil? ? [] : [options])
end
def self.map_action(action)
clean = HASH_PATH_PROC_TYPES.find { |k, v| action == k || v.include?(action) }
clean ? clean.first : action
end
def simple_init(*args)
options = BBLib.named_args(*args)
options.merge(options.delete(:options)) if options[:options]
USED_KEYWORDS.each { |k| options.delete(k) }
self.options = options
if args.first.is_a?(Symbol) && @action.nil?
self.action = args.shift
self.paths = args.shift if args.first.is_a?(String)
elsif action && args.first.is_a?(String)
self.paths = args.first
end
self.args += args.find_all { |arg| !arg.is_a?(Hash) } unless args.empty?
end
end
end
# Monkey patches
class Hash
def hash_path_proc(*args)
BBLib::HashPathProc.new(*args).process(self)
end
alias hpath_proc hash_path_proc
end
# Monkey patches
class Array
def hash_path_proc(*args)
BBLib::HashPathProc.new(*args).process(self)
end
alias hpath_proc hash_path_proc
end
|
# Generated by jeweler
# DO NOT EDIT THIS FILE DIRECTLY
# Instead, edit Jeweler::Tasks in Rakefile, and run 'rake gemspec'
# -*- encoding: utf-8 -*-
Gem::Specification.new do |s|
s.name = %q{acts_as_price}
s.version = "0.3.0"
s.required_rubygems_version = Gem::Requirement.new(">= 0") if s.respond_to? :required_rubygems_version=
s.authors = ["Jeroen van Ingen"]
s.date = %q{2012-06-03}
s.description = %q{A specified database column acts as a price and creates on the fly methods like 'price' and 'price_in_cents'. For more information visit: http://github.com/jeroeningen/acts_as_price}
s.email = %q{jeroeningen@gmail.com}
s.extra_rdoc_files = [
"README.rdoc"
]
s.files = [
"Gemfile",
"Gemfile.lock",
"MIT-LICENSE",
"README.rdoc",
"Rakefile",
"VERSION",
"acts_as_price.gemspec",
"init.rb",
"install.rb",
"lib/acts_as_price.rb",
"lib/acts_as_price_helper.rb",
"rdoc/classes/ActiveRecord.html",
"rdoc/classes/ActiveRecord/Acts.html",
"rdoc/classes/ActiveRecord/Acts/Price.html",
"rdoc/classes/ActiveRecord/Acts/Price/ClassMethods.html",
"rdoc/classes/ActsAsPriceHelper.html",
"rdoc/created.rid",
"rdoc/files/README_rdoc.html",
"rdoc/files/lib/acts_as_price_helper_rb.html",
"rdoc/files/lib/acts_as_price_rb.html",
"rdoc/fr_class_index.html",
"rdoc/fr_file_index.html",
"rdoc/fr_method_index.html",
"rdoc/index.html",
"rdoc/rdoc-style.css",
"spec/advanced_tests/car_spec.rb",
"spec/advanced_tests/fueltype_spec.rb",
"spec/models/car.rb",
"spec/models/fueltype.rb",
"spec/simple_tests/car_and_fueltype_spec.rb",
"spec/spec_helper.rb",
"uninstall.rb"
]
s.homepage = %q{http://github.com/jeroeningen/acts_as_price}
s.licenses = ["MIT"]
s.require_paths = ["lib"]
s.rubygems_version = %q{1.3.7}
s.summary = %q{A specified database column acts as a price}
s.test_files = [
"spec/advanced_tests/car_spec.rb",
"spec/advanced_tests/fueltype_spec.rb",
"spec/models/car.rb",
"spec/models/fueltype.rb",
"spec/simple_tests/car_and_fueltype_spec.rb",
"spec/spec_helper.rb"
]
if s.respond_to? :specification_version then
current_version = Gem::Specification::CURRENT_SPECIFICATION_VERSION
s.specification_version = 3
if Gem::Version.new(Gem::VERSION) >= Gem::Version.new('1.2.0') then
s.add_runtime_dependency(%q<rails>, ["= 3.0.7"])
s.add_runtime_dependency(%q<rake>, ["= 0.8.7"])
s.add_development_dependency(%q<jeweler>, ["~> 1.5.2"])
s.add_runtime_dependency(%q<rails>, ["= 3.0.7"])
s.add_development_dependency(%q<rspec-rails>, ["= 2.5.0"])
s.add_development_dependency(%q<shoulda-matchers>, ["= 1.0.0.beta2"])
s.add_development_dependency(%q<autotest>, ["= 4.4.6"])
s.add_development_dependency(%q<rcov>, ["= 0.9.9"])
s.add_development_dependency(%q<metrical>, ["= 0.0.5"])
s.add_development_dependency(%q<activerecord-nulldb-adapter>, ["= 0.2.1"])
else
s.add_dependency(%q<rails>, ["= 3.0.7"])
s.add_dependency(%q<rake>, ["= 0.8.7"])
s.add_dependency(%q<jeweler>, ["~> 1.5.2"])
s.add_dependency(%q<rails>, ["= 3.0.7"])
s.add_dependency(%q<rspec-rails>, ["= 2.5.0"])
s.add_dependency(%q<shoulda-matchers>, ["= 1.0.0.beta2"])
s.add_dependency(%q<autotest>, ["= 4.4.6"])
s.add_dependency(%q<rcov>, ["= 0.9.9"])
s.add_dependency(%q<metrical>, ["= 0.0.5"])
s.add_dependency(%q<activerecord-nulldb-adapter>, ["= 0.2.1"])
end
else
s.add_dependency(%q<rails>, ["= 3.0.7"])
s.add_dependency(%q<rake>, ["= 0.8.7"])
s.add_dependency(%q<jeweler>, ["~> 1.5.2"])
s.add_dependency(%q<rails>, ["= 3.0.7"])
s.add_dependency(%q<rspec-rails>, ["= 2.5.0"])
s.add_dependency(%q<shoulda-matchers>, ["= 1.0.0.beta2"])
s.add_dependency(%q<autotest>, ["= 4.4.6"])
s.add_dependency(%q<rcov>, ["= 0.9.9"])
s.add_dependency(%q<metrical>, ["= 0.0.5"])
s.add_dependency(%q<activerecord-nulldb-adapter>, ["= 0.2.1"])
end
end
Regenerate gemspec for version 0.3.0
# Generated by jeweler
# DO NOT EDIT THIS FILE DIRECTLY
# Instead, edit Jeweler::Tasks in Rakefile, and run 'rake gemspec'
# -*- encoding: utf-8 -*-
Gem::Specification.new do |s|
s.name = "acts_as_price"
s.version = "0.3.0"
s.required_rubygems_version = Gem::Requirement.new(">= 0") if s.respond_to? :required_rubygems_version=
s.authors = ["Jeroen van Ingen"]
s.date = "2012-12-10"
s.description = "A specified database column acts as a price and creates on the fly methods like 'price' and 'price_in_cents'. For more information visit: http://github.com/jeroeningen/acts_as_price"
s.email = "jeroeningen@gmail.com"
s.extra_rdoc_files = [
"README.rdoc"
]
s.files = [
"Gemfile",
"MIT-LICENSE",
"README.rdoc",
"Rakefile",
"VERSION",
"acts_as_price.gemspec",
"init.rb",
"install.rb",
"lib/acts_as_price.rb",
"lib/acts_as_price_helper.rb",
"rdoc/ActiveRecord.html",
"rdoc/ActiveRecord/Acts.html",
"rdoc/ActiveRecord/Acts/Price.html",
"rdoc/ActiveRecord/Acts/Price/ClassMethods.html",
"rdoc/ActsAsPriceHelper.html",
"rdoc/README_rdoc.html",
"rdoc/classes/ActiveRecord.html",
"rdoc/classes/ActiveRecord/Acts.html",
"rdoc/classes/ActiveRecord/Acts/Price.html",
"rdoc/classes/ActiveRecord/Acts/Price/ClassMethods.html",
"rdoc/classes/ActsAsPriceHelper.html",
"rdoc/created.rid",
"rdoc/files/README_rdoc.html",
"rdoc/files/lib/acts_as_price_helper_rb.html",
"rdoc/files/lib/acts_as_price_rb.html",
"rdoc/fr_class_index.html",
"rdoc/fr_file_index.html",
"rdoc/fr_method_index.html",
"rdoc/images/brick.png",
"rdoc/images/brick_link.png",
"rdoc/images/bug.png",
"rdoc/images/bullet_black.png",
"rdoc/images/bullet_toggle_minus.png",
"rdoc/images/bullet_toggle_plus.png",
"rdoc/images/date.png",
"rdoc/images/find.png",
"rdoc/images/loadingAnimation.gif",
"rdoc/images/macFFBgHack.png",
"rdoc/images/package.png",
"rdoc/images/page_green.png",
"rdoc/images/page_white_text.png",
"rdoc/images/page_white_width.png",
"rdoc/images/plugin.png",
"rdoc/images/ruby.png",
"rdoc/images/tag_green.png",
"rdoc/images/wrench.png",
"rdoc/images/wrench_orange.png",
"rdoc/images/zoom.png",
"rdoc/index.html",
"rdoc/js/darkfish.js",
"rdoc/js/jquery.js",
"rdoc/js/quicksearch.js",
"rdoc/js/thickbox-compressed.js",
"rdoc/lib/acts_as_price_helper_rb.html",
"rdoc/lib/acts_as_price_rb.html",
"rdoc/rdoc-style.css",
"rdoc/rdoc.css",
"spec/advanced_tests/car_spec.rb",
"spec/advanced_tests/fueltype_spec.rb",
"spec/models/car.rb",
"spec/models/fueltype.rb",
"spec/simple_tests/car_and_fueltype_spec.rb",
"spec/spec_helper.rb",
"uninstall.rb"
]
s.homepage = "http://github.com/jeroeningen/acts_as_price"
s.licenses = ["MIT"]
s.require_paths = ["lib"]
s.rubygems_version = "1.8.11"
s.summary = "A specified database column acts as a price"
if s.respond_to? :specification_version then
s.specification_version = 3
if Gem::Version.new(Gem::VERSION) >= Gem::Version.new('1.2.0') then
s.add_runtime_dependency(%q<rails>, ["> 3.0.7"])
s.add_runtime_dependency(%q<rake>, ["> 0.8.7"])
s.add_development_dependency(%q<jeweler>, ["> 1.5.2"])
s.add_runtime_dependency(%q<rails>, ["> 3.0.7"])
s.add_runtime_dependency(%q<rake>, ["> 0.8.7"])
else
s.add_dependency(%q<rails>, ["> 3.0.7"])
s.add_dependency(%q<rake>, ["> 0.8.7"])
s.add_dependency(%q<jeweler>, ["> 1.5.2"])
s.add_dependency(%q<rails>, ["> 3.0.7"])
s.add_dependency(%q<rake>, ["> 0.8.7"])
end
else
s.add_dependency(%q<rails>, ["> 3.0.7"])
s.add_dependency(%q<rake>, ["> 0.8.7"])
s.add_dependency(%q<jeweler>, ["> 1.5.2"])
s.add_dependency(%q<rails>, ["> 3.0.7"])
s.add_dependency(%q<rake>, ["> 0.8.7"])
end
end
|
WebsiteOne::Application.configure do
# Settings specified here will take precedence over those in config/application.rb.
# Code is not reloaded between requests.
config.cache_classes = true
# Eager load code on boot. This eager loads most of Rails and
# your application in memory, allowing both thread web servers
# and those relying on copy on write to perform better.
# Rake tasks automatically ignore this option for performance.
config.eager_load = true
# Full error reports are disabled and caching is turned on.
config.consider_all_requests_local = false
config.action_controller.perform_caching = true
# Enable Rack::Cache to put a simple HTTP cache in front of your application
# Add `rack-cache` to your Gemfile before enabling this.
# For large-scale production use, consider using a caching reverse proxy like nginx, varnish or squid.
config.action_dispatch.rack_cache = true
config.serve_static_assets = true
config.static_cache_control = "public, max-age=31536000"
# Compress JavaScripts and CSS.
config.assets.compress = true
config.assets.js_compressor = :uglifier
config.assets.css_compressor = :yui
# Do not fallback to assets pipeline if a precompiled asset is missed.
config.assets.compile = true
# Generate digests for assets URLs.
config.assets.digest = true
# Version of your assets, change this if you want to expire all your assets.
config.assets.version = '1.0'
# Specifies the header that your server uses for sending files.
# config.action_dispatch.x_sendfile_header = "X-Sendfile" # for apache
# config.action_dispatch.x_sendfile_header = 'X-Accel-Redirect' # for nginx
# Force all access to the app over SSL, use Strict-Transport-Security, and use secure cookies.
# config.force_ssl = true
# Set to :debug to see everything in the log.
config.log_level = :info
# Prepend all log lines with the following tags.
# config.log_tags = [ :subdomain, :uuid ]
# Use a different logger for distributed setups.
# config.logger = ActiveSupport::TaggedLogging.new(SyslogLogger.new)
# Use a different cache store in production.
# config.cache_store = :mem_cache_store
# Enable serving of images, stylesheets, and JavaScripts from an asset server.
# config.action_controller.asset_host = "http://assets.example.com"
# Precompile additional assets.
# application.js, application.css.scss, and all non-JS/CSS in app/assets folder are already added.
config.assets.precompile += %w( mercury_init.js 404.js )
# Ignore bad email addresses and do not raise email delivery errors.
# Set this to true and configure the email server for immediate delivery to raise delivery errors.
config.action_mailer.perform_deliveries = true
config.action_mailer.raise_delivery_errors = true
#config.action_mailer.smtp_settings = {
# :address => 'smtp.gmail.com',
# :port => 587,
# :domain => '',
# :user_name => 'wso.av.test@gmail.com', #This is a temporary solution
# :password => 'Wso12345', #This is a temporary solution
# :authentication => 'plain',
# :enable_starttls_auto => true }
#config.action_mailer.raise_delivery_errors = true
#config.action_mailer.delivery_method = :smtp
config.action_mailer.default_url_options = { :host => 'agileventures.org' }
# Enable locale fallbacks for I18n (makes lookups for any locale fall back to
# the I18n.default_locale when a translation can not be found).
config.i18n.fallbacks = true
# Send deprecation notices to registered listeners.
config.active_support.deprecation = :notify
# Disable automatic flushing of the log to improve performance.
# config.autoflush_log = false
# Use default logging formatter so that PID and timestamp are not suppressed.
config.log_formatter = ::Logger::Formatter.new
# may be needed for integrating bootstrap with Heroku deployment
#config.cache_classes = true
#config.serve_static_assets = true
config.assets.compile = false
#config.assets.digest = true
end
Add Projects and Events to precompile list
WebsiteOne::Application.configure do
# Settings specified here will take precedence over those in config/application.rb.
# Code is not reloaded between requests.
config.cache_classes = true
# Eager load code on boot. This eager loads most of Rails and
# your application in memory, allowing both thread web servers
# and those relying on copy on write to perform better.
# Rake tasks automatically ignore this option for performance.
config.eager_load = true
# Full error reports are disabled and caching is turned on.
config.consider_all_requests_local = false
config.action_controller.perform_caching = true
# Enable Rack::Cache to put a simple HTTP cache in front of your application
# Add `rack-cache` to your Gemfile before enabling this.
# For large-scale production use, consider using a caching reverse proxy like nginx, varnish or squid.
config.action_dispatch.rack_cache = true
config.serve_static_assets = true
config.static_cache_control = "public, max-age=31536000"
# Compress JavaScripts and CSS.
config.assets.compress = true
config.assets.js_compressor = :uglifier
config.assets.css_compressor = :yui
# Do not fallback to assets pipeline if a precompiled asset is missed.
config.assets.compile = true
# Generate digests for assets URLs.
config.assets.digest = true
# Version of your assets, change this if you want to expire all your assets.
config.assets.version = '1.0'
# Specifies the header that your server uses for sending files.
# config.action_dispatch.x_sendfile_header = "X-Sendfile" # for apache
# config.action_dispatch.x_sendfile_header = 'X-Accel-Redirect' # for nginx
# Force all access to the app over SSL, use Strict-Transport-Security, and use secure cookies.
# config.force_ssl = true
# Set to :debug to see everything in the log.
config.log_level = :info
# Prepend all log lines with the following tags.
# config.log_tags = [ :subdomain, :uuid ]
# Use a different logger for distributed setups.
# config.logger = ActiveSupport::TaggedLogging.new(SyslogLogger.new)
# Use a different cache store in production.
# config.cache_store = :mem_cache_store
# Enable serving of images, stylesheets, and JavaScripts from an asset server.
# config.action_controller.asset_host = "http://assets.example.com"
# Precompile additional assets.
# application.js, application.css.scss, and all non-JS/CSS in app/assets folder are already added.
config.assets.precompile += %w( mercury_init.js 404.js projects.js events.js )
# Ignore bad email addresses and do not raise email delivery errors.
# Set this to true and configure the email server for immediate delivery to raise delivery errors.
config.action_mailer.perform_deliveries = true
config.action_mailer.raise_delivery_errors = true
#config.action_mailer.smtp_settings = {
# :address => 'smtp.gmail.com',
# :port => 587,
# :domain => '',
# :user_name => 'wso.av.test@gmail.com', #This is a temporary solution
# :password => 'Wso12345', #This is a temporary solution
# :authentication => 'plain',
# :enable_starttls_auto => true }
#config.action_mailer.raise_delivery_errors = true
#config.action_mailer.delivery_method = :smtp
config.action_mailer.default_url_options = { :host => 'agileventures.org' }
# Enable locale fallbacks for I18n (makes lookups for any locale fall back to
# the I18n.default_locale when a translation can not be found).
config.i18n.fallbacks = true
# Send deprecation notices to registered listeners.
config.active_support.deprecation = :notify
# Disable automatic flushing of the log to improve performance.
# config.autoflush_log = false
# Use default logging formatter so that PID and timestamp are not suppressed.
config.log_formatter = ::Logger::Formatter.new
# may be needed for integrating bootstrap with Heroku deployment
#config.cache_classes = true
#config.serve_static_assets = true
config.assets.compile = false
#config.assets.digest = true
end
|
class Tweet < ActiveRecord::Base
set_primary_key :id
end
Fixing deprecated set_primary_key call.
class Tweet < ActiveRecord::Base
self.primary_key = :id
end
|
Rails.application.configure do
# Settings specified here will take precedence over those in config/application.rb.
# Code is not reloaded between requests.
config.cache_classes = true
# Eager load code on boot. This eager loads most of Rails and
# your application in memory, allowing both threaded web servers
# and those relying on copy on write to perform better.
# Rake tasks automatically ignore this option for performance.
config.eager_load = true
# Full error reports are disabled and caching is turned on.
config.consider_all_requests_local = false
config.action_controller.perform_caching = true
# Disable serving static files from the `/public` folder by default since
# Apache or NGINX already handles this.
config.public_file_server.enabled = ENV['RAILS_SERVE_STATIC_FILES'].present?
# Compress JavaScripts and CSS.
config.assets.js_compressor = :uglifier
# config.assets.css_compressor = :sass
# Do not fallback to assets pipeline if a precompiled asset is missed.
config.assets.compile = false
# `config.assets.precompile` and `config.assets.version` have moved to config/initializers/assets.rb
# Enable serving of images, stylesheets, and JavaScripts from an asset server.
# config.action_controller.asset_host = 'http://assets.example.com'
# Specifies the header that your server uses for sending files.
# config.action_dispatch.x_sendfile_header = 'X-Sendfile' # for Apache
# config.action_dispatch.x_sendfile_header = 'X-Accel-Redirect' # for NGINX
# Mount Action Cable outside main process or domain
# config.action_cable.mount_path = nil
# config.action_cable.url = 'wss://example.com/cable'
# config.action_cable.allowed_request_origins = [ 'http://example.com', /http:\/\/example.*/ ]
# Force all access to the app over SSL, use Strict-Transport-Security, and use secure cookies.
# config.force_ssl = true
# Use the lowest log level to ensure availability of diagnostic information
# when problems arise.
config.log_level = :debug
# Prepend all log lines with the following tags.
config.log_tags = [ :request_id ]
# Use a different cache store in production.
# config.cache_store = :mem_cache_store
# Use a real queuing backend for Active Job (and separate queues per environment)
# config.active_job.queue_adapter = :resque
# config.active_job.queue_name_prefix = "haute_tension_#{Rails.env}"
config.action_mailer.perform_caching = false
# Ignore bad email addresses and do not raise email delivery errors.
# Set this to true and configure the email server for immediate delivery to raise delivery errors.
# config.action_mailer.raise_delivery_errors = false
# Enable locale fallbacks for I18n (makes lookups for any locale fall back to
# the I18n.default_locale when a translation cannot be found).
config.i18n.fallbacks = true
# Send deprecation notices to registered listeners.
config.active_support.deprecation = :notify
# Use default logging formatter so that PID and timestamp are not suppressed.
config.log_formatter = ::Logger::Formatter.new
# Use a different logger for distributed setups.
# require 'syslog/logger'
# config.logger = ActiveSupport::TaggedLogging.new(Syslog::Logger.new 'app-name')
if ENV["RAILS_LOG_TO_STDOUT"].present?
logger = ActiveSupport::Logger.new(STDOUT)
logger.formatter = config.log_formatter
config.logger = ActiveSupport::TaggedLogging.new(logger)
end
# Do not dump schema after migrations.
config.active_record.dump_schema_after_migration = false
end
Production now show errors.
Rails.application.configure do
# Settings specified here will take precedence over those in config/application.rb.
# Code is not reloaded between requests.
config.cache_classes = true
# Eager load code on boot. This eager loads most of Rails and
# your application in memory, allowing both threaded web servers
# and those relying on copy on write to perform better.
# Rake tasks automatically ignore this option for performance.
config.eager_load = true
# Full error reports are disabled and caching is turned on.
# config.consider_all_requests_local = false
# switch back to false when done
config.consider_all_requests_local = true
config.action_controller.perform_caching = true
# Disable serving static files from the `/public` folder by default since
# Apache or NGINX already handles this.
config.public_file_server.enabled = ENV['RAILS_SERVE_STATIC_FILES'].present?
# Compress JavaScripts and CSS.
config.assets.js_compressor = :uglifier
# config.assets.css_compressor = :sass
# Do not fallback to assets pipeline if a precompiled asset is missed.
config.assets.compile = false
# `config.assets.precompile` and `config.assets.version` have moved to config/initializers/assets.rb
# Enable serving of images, stylesheets, and JavaScripts from an asset server.
# config.action_controller.asset_host = 'http://assets.example.com'
# Specifies the header that your server uses for sending files.
# config.action_dispatch.x_sendfile_header = 'X-Sendfile' # for Apache
# config.action_dispatch.x_sendfile_header = 'X-Accel-Redirect' # for NGINX
# Mount Action Cable outside main process or domain
# config.action_cable.mount_path = nil
# config.action_cable.url = 'wss://example.com/cable'
# config.action_cable.allowed_request_origins = [ 'http://example.com', /http:\/\/example.*/ ]
# Force all access to the app over SSL, use Strict-Transport-Security, and use secure cookies.
# config.force_ssl = true
# Use the lowest log level to ensure availability of diagnostic information
# when problems arise.
config.log_level = :debug
# Prepend all log lines with the following tags.
config.log_tags = [ :request_id ]
# Use a different cache store in production.
# config.cache_store = :mem_cache_store
# Use a real queuing backend for Active Job (and separate queues per environment)
# config.active_job.queue_adapter = :resque
# config.active_job.queue_name_prefix = "haute_tension_#{Rails.env}"
config.action_mailer.perform_caching = false
# Ignore bad email addresses and do not raise email delivery errors.
# Set this to true and configure the email server for immediate delivery to raise delivery errors.
# config.action_mailer.raise_delivery_errors = false
# Enable locale fallbacks for I18n (makes lookups for any locale fall back to
# the I18n.default_locale when a translation cannot be found).
config.i18n.fallbacks = true
# Send deprecation notices to registered listeners.
config.active_support.deprecation = :notify
# Use default logging formatter so that PID and timestamp are not suppressed.
config.log_formatter = ::Logger::Formatter.new
# Use a different logger for distributed setups.
# require 'syslog/logger'
# config.logger = ActiveSupport::TaggedLogging.new(Syslog::Logger.new 'app-name')
if ENV["RAILS_LOG_TO_STDOUT"].present?
logger = ActiveSupport::Logger.new(STDOUT)
logger.formatter = config.log_formatter
config.logger = ActiveSupport::TaggedLogging.new(logger)
end
# Do not dump schema after migrations.
config.active_record.dump_schema_after_migration = false
end
|
Not 1.9 specs
describe "not()" do
# not(arg).method and method(not(arg)) raise SyntaxErrors on 1.8. Here we
# use #inspect to test that the syntax works on 1.9
pending "can be used as a function" do
lambda do
not(true).inspect
end.should_not raise_error(SyntaxError)
end
pending "returns false if the argument is true" do
not(true).inspect.should == "false"
end
pending "returns true if the argument is false" do
not(false).inspect.should == "true"
end
pending "returns true if the argument is nil" do
not(false).inspect.should == "true"
end
end
|
Rails.application.configure do
# Settings specified here will take precedence over those in config/application.rb.
# Code is not reloaded between requests.
config.cache_classes = true
# Eager load code on boot. This eager loads most of Rails and
# your application in memory, allowing both threaded web servers
# and those relying on copy on write to perform better.
# Rake tasks automatically ignore this option for performance.
config.eager_load = true
# Full error reports are disabled and caching is turned on.
config.consider_all_requests_local = false
config.action_controller.perform_caching = true
# Enable Rack::Cache to put a simple HTTP cache in front of your application
# Add `rack-cache` to your Gemfile before enabling this.
# For large-scale production use, consider using a caching reverse proxy like
# NGINX, varnish or squid.
# config.action_dispatch.rack_cache = true
# Disable serving static files from the `/public` folder by default since
# Apache or NGINX already handles this.
config.serve_static_files = ENV['RAILS_SERVE_STATIC_FILES'].present?
# Compress JavaScripts and CSS.
config.assets.js_compressor = :uglifier
# config.assets.css_compressor = :sass
# Do not fallback to assets pipeline if a precompiled asset is missed.
config.assets.compile = false
# Asset digests allow you to set far-future HTTP expiration dates on all assets,
# yet still be able to expire them through the digest params.
config.assets.digest = true
# `config.assets.precompile` and `config.assets.version` have moved to config/initializers/assets.rb
# Specifies the header that your server uses for sending files.
# config.action_dispatch.x_sendfile_header = 'X-Sendfile' # for Apache
# config.action_dispatch.x_sendfile_header = 'X-Accel-Redirect' # for NGINX
# Force all access to the app over SSL, use Strict-Transport-Security, and use secure cookies.
# config.force_ssl = true
# Use the lowest log level to ensure availability of diagnostic information
# when problems arise.
config.log_level = :debug
# Prepend all log lines with the following tags.
# config.log_tags = [ :subdomain, :uuid ]
# Use a different logger for distributed setups.
# config.logger = ActiveSupport::TaggedLogging.new(SyslogLogger.new)
# Use a different cache store in production.
# config.cache_store = :mem_cache_store
# Enable serving of images, stylesheets, and JavaScripts from an asset server.
# config.action_controller.asset_host = 'http://assets.example.com'
# Ignore bad email addresses and do not raise email delivery errors.
# Set this to true and configure the email server for immediate delivery to raise delivery errors.
# config.action_mailer.raise_delivery_errors = false
# Enable locale fallbacks for I18n (makes lookups for any locale fall back to
# the I18n.default_locale when a translation cannot be found).
config.i18n.fallbacks = true
# Send deprecation notices to registered listeners.
config.active_support.deprecation = :notify
# Use default logging formatter so that PID and timestamp are not suppressed.
config.log_formatter = ::Logger::Formatter.new
# Do not dump schema after migrations.
config.active_record.dump_schema_after_migration = false
config.action_dispatch.x_sendfile_header = "X-Sendfile"
end
edits config to precompile assets
Rails.application.configure do
# Settings specified here will take precedence over those in config/application.rb.
# Code is not reloaded between requests.
config.cache_classes = true
# Eager load code on boot. This eager loads most of Rails and
# your application in memory, allowing both threaded web servers
# and those relying on copy on write to perform better.
# Rake tasks automatically ignore this option for performance.
config.eager_load = true
# Full error reports are disabled and caching is turned on.
config.consider_all_requests_local = false
config.action_controller.perform_caching = true
# Enable Rack::Cache to put a simple HTTP cache in front of your application
# Add `rack-cache` to your Gemfile before enabling this.
# For large-scale production use, consider using a caching reverse proxy like
# NGINX, varnish or squid.
# config.action_dispatch.rack_cache = true
# Disable serving static files from the `/public` folder by default since
# Apache or NGINX already handles this.
config.serve_static_files = ENV['RAILS_SERVE_STATIC_FILES'].present?
# Compress JavaScripts and CSS.
config.assets.js_compressor = :uglifier
# config.assets.css_compressor = :sass
# Do not fallback to assets pipeline if a precompiled asset is missed.
config.assets.compile = false
# Asset digests allow you to set far-future HTTP expiration dates on all assets,
# yet still be able to expire them through the digest params.
config.assets.digest = true
# `config.assets.precompile` and `config.assets.version` have moved to config/initializers/assets.rb
# Specifies the header that your server uses for sending files.
# config.action_dispatch.x_sendfile_header = 'X-Sendfile' # for Apache
# config.action_dispatch.x_sendfile_header = 'X-Accel-Redirect' # for NGINX
# Force all access to the app over SSL, use Strict-Transport-Security, and use secure cookies.
# config.force_ssl = true
# Use the lowest log level to ensure availability of diagnostic information
# when problems arise.
config.log_level = :debug
# Prepend all log lines with the following tags.
# config.log_tags = [ :subdomain, :uuid ]
# Use a different logger for distributed setups.
# config.logger = ActiveSupport::TaggedLogging.new(SyslogLogger.new)
# Use a different cache store in production.
# config.cache_store = :mem_cache_store
# Enable serving of images, stylesheets, and JavaScripts from an asset server.
# config.action_controller.asset_host = 'http://assets.example.com'
# Ignore bad email addresses and do not raise email delivery errors.
# Set this to true and configure the email server for immediate delivery to raise delivery errors.
# config.action_mailer.raise_delivery_errors = false
# Enable locale fallbacks for I18n (makes lookups for any locale fall back to
# the I18n.default_locale when a translation cannot be found).
config.i18n.fallbacks = true
# Send deprecation notices to registered listeners.
config.active_support.deprecation = :notify
# Use default logging formatter so that PID and timestamp are not suppressed.
config.log_formatter = ::Logger::Formatter.new
# Do not dump schema after migrations.
config.active_record.dump_schema_after_migration = false
config.action_dispatch.x_sendfile_header = "X-Sendfile"
config.cache_classes = true
config.serve_static_files = true
config.assets.compile = true
config.assets.digest = true
end
|
require "spec_helper"
require "hamster/hash"
describe Hamster::Hash do
[:get, :[]].each do |method|
describe "##{method}" do
context "with a default block" do
let(:hash) { Hamster.hash("A" => "aye") { |key| fail }}
context "when the key exists" do
it "returns the value associated with the key" do
hash.get("A").should == "aye"
end
end
context "when the key does not exist" do
let(:hash) do
Hamster.hash("A" => "aye") do |key|
expect(key).to eq("B")
"bee"
end
end
it "returns the value associated with the key" do
hash.get("B").should == "bee"
end
end
end
context "with no default block" do
let(:hash) { Hamster.hash("A" => "aye", "B" => "bee", "C" => "see", nil => "NIL") }
[
%w[A aye],
%w[B bee],
%w[C see],
[nil, "NIL"]
].each do |key, value|
it "returns the value (#{value.inspect}) for an existing key (#{key.inspect})" do
hash.send(method, key).should == value
end
end
it "returns nil for a non-existing key" do
hash.send(method, "D").should be_nil
end
end
end
end
end
Added various new test cases for Hash#get
require "spec_helper"
require "hamster/hash"
describe Hamster::Hash do
[:get, :[]].each do |method|
describe "##{method}" do
context "with a default block" do
let(:hash) { Hamster.hash("A" => "aye") { |key| fail }}
context "when the key exists" do
it "returns the value associated with the key" do
hash.send(method, "A").should == "aye"
end
it "does not call the default block even if the key is 'nil'" do
Hamster.hash(nil => 'something') { fail }.send(method, nil)
end
end
context "when the key does not exist" do
let(:hash) do
Hamster.hash("A" => "aye") do |key|
expect(key).to eq("B")
"bee"
end
end
it "returns the value from the default block" do
hash.send(method, "B").should == "bee"
end
end
end
context "with no default block" do
let(:hash) { Hamster.hash("A" => "aye", "B" => "bee", "C" => "see", nil => "NIL") }
[
%w[A aye],
%w[B bee],
%w[C see],
[nil, "NIL"]
].each do |key, value|
it "returns the value (#{value.inspect}) for an existing key (#{key.inspect})" do
hash.send(method, key).should == value
end
end
it "returns nil for a non-existing key" do
hash.send(method, "D").should be_nil
end
end
it "uses #hash to look up keys" do
x = double('0')
x.should_receive(:hash).and_return(0)
Hamster.hash(foo: :bar).send(method, x).should be_nil
end
it "uses #eql? to compare keys with the same hash code" do
x = double('x', hash: 42)
x.should_not_receive(:eql?)
y = double('y', hash: 42)
y.should_receive(:eql?).and_return(true)
Hamster.hash(y => 1)[x].should == 1
end
it "does not use #eql? to compare keys with different hash codes" do
x = double('x', hash: 0)
x.should_not_receive(:eql?)
y = double('y', hash: 1)
y.should_not_receive(:eql?)
Hamster.hash(y => 1)[x].should be_nil
end
end
end
end
|
require 'app'
require 'first_app'
require 'second_app'
run Rack::URLMap.new(
'/' => App,
'/first_app' => FirstApp::App,
'/second_app' => SecondApp::App
)
Force redeploy with useless commit
require 'app'
require 'first_app'
require 'second_app'
run Rack::URLMap.new(
'/' => App,
'/first_app' => FirstApp::App,
'/second_app' => SecondApp::App
)
|
#! /usr/bin/env ruby
#
# handler-logstash
#
# DESCRIPTION:
# Designed to take sensu events, transform them into logstah JSON events
# and ship them to a redis server for logstash to index. This also
# generates a tag with either 'sensu-ALERT' or 'sensu-RECOVERY' so that
# searching inside of logstash can be a little easier.
#
# OUTPUT:
# plain text
#
# PLATFORMS:
# Linux
#
# DEPENDENCIES:
# gem: sensu-plugin
# gem: diplomat
#
# USAGE:
#
# NOTES:
# Heavily inspried (er, copied from) the GELF Handler writeen by
# Joe Miller.
#
# LICENSE:
# Zach Dunn @SillySophist http://github.com/zadunn
# Released under the same terms as Sensu (the MIT license); see LICENSE
# for details.
#
require 'sensu-handler'
require 'redis'
require 'json'
require 'socket'
require 'time'
#
# Logstash Handler
#
class LogstashHandler < Sensu::Handler
def event_name
@event['client']['name'] + '/' + @event['check']['name']
end
def action_to_string
@event['action'].eql?('resolve') ? 'RESOLVE' : 'ALERT'
end
def handle # rubocop:disable all
time = Time.now.utc.iso8601
logstash_msg = {
:@timestamp => time,
:@version => 1,
:source => ::Socket.gethostname,
:tags => ["sensu-#{action_to_string}"],
:message => @event['check']['output'],
:host => @event['client']['name'],
:timestamp => @event['check']['issued'],
:address => @event['client']['address'],
:check_name => @event['check']['name'],
:command => @event['check']['command'],
:status => @event['check']['status'],
:flapping => @event['check']['flapping'],
:occurrences => @event['occurrences'],
:action => @event['action']
}
logstash_msg[:type] = settings['logstash']['type'] if settings['logstash'].key?('type')
case settings['logstash']['output']
when 'redis'
redis = Redis.new(host: settings['logstash']['server'], port: settings['logstash']['port'])
redis.lpush(settings['logstash']['list'], logstash_msg.to_json)
when 'udp'
socket = UDPSocket.new
socket.send(JSON.parse(logstash_msg), 0, settings['logstash']['server'], settings['logstash']['port'])
socket.close
end
end
end
add event status readability
#! /usr/bin/env ruby
#
# handler-logstash
#
# DESCRIPTION:
# Designed to take sensu events, transform them into logstah JSON events
# and ship them to a redis server for logstash to index. This also
# generates a tag with either 'sensu-ALERT' or 'sensu-RECOVERY' so that
# searching inside of logstash can be a little easier.
#
# OUTPUT:
# plain text
#
# PLATFORMS:
# Linux
#
# DEPENDENCIES:
# gem: sensu-plugin
# gem: diplomat
#
# USAGE:
#
# NOTES:
# Heavily inspried (er, copied from) the GELF Handler writeen by
# Joe Miller.
#
# LICENSE:
# Zach Dunn @SillySophist http://github.com/zadunn
# Released under the same terms as Sensu (the MIT license); see LICENSE
# for details.
#
require 'sensu-handler'
require 'redis'
require 'json'
require 'socket'
require 'time'
#
# Logstash Handler
#
class LogstashHandler < Sensu::Handler
def event_name
@event['client']['name'] + '/' + @event['check']['name']
end
def action_to_string
@event['action'].eql?('resolve') ? 'RESOLVE' : 'ALERT'
end
def event_status
case @event['check']['status']
when 0
'OK'
when 1
'WARNING'
when 2
'CRITICAL'
else
'unknown'
end
end
def handle # rubocop:disable all
time = Time.now.utc.iso8601
logstash_msg = {
:@timestamp => time,
:@version => 1,
:source => ::Socket.gethostname,
:tags => ["sensu-#{action_to_string}"],
:message => @event['check']['output'],
:host => @event['client']['name'],
:timestamp => @event['check']['issued'],
:address => @event['client']['address'],
:check_name => @event['check']['name'],
:command => @event['check']['command'],
:status => event_status,
:flapping => @event['check']['flapping'],
:occurrences => @event['occurrences'],
:action => @event['action']
}
logstash_msg[:type] = settings['logstash']['type'] if settings['logstash'].key?('type')
case settings['logstash']['output']
when 'redis'
redis = Redis.new(host: settings['logstash']['server'], port: settings['logstash']['port'])
redis.lpush(settings['logstash']['list'], logstash_msg.to_json)
when 'udp'
socket = UDPSocket.new
socket.send(JSON.parse(logstash_msg), 0, settings['logstash']['server'], settings['logstash']['port'])
socket.close
end
end
end
|
require 'spec_helper'
module Metagit
describe PrivateRepo do
let(:repo_path) { "./tmp/test_repo_private" }
let(:my_email) { "dan@dot.com" }
before :each do
# we may mock this if speed/complexity get in the way
@repo_raw = Metagit::Support::RuggedRepo.new(repo_path, my_email)
@repo = PrivateRepo.new repo_path
end
after :each do
# clean up after ourselves
require 'fileutils'
FileUtils.rm_rf repo_path
end
it 'the repo is readable' do
expect(@repo.readable?).to eq true
end
describe "#changes_since?" do
it "should be true when there are changes" do
expect(@repo.changes_since? @repo_raw.commits.first).to eq true
end
it "should be true when there are no changes" do
expect(@repo.changes_since? @repo_raw.commits.last).to eq false
end
end
describe "#stats_for_commit" do
it "should have the email of the author" do
expect(@repo.stats_for_commit(@repo_raw.commits.first)[:author_email]).to eq "dan@dot.com"
end
it "should have the commit timestamp" do
# don't run this near midnight :) TODO: more accuracy
expect(@repo.stats_for_commit(@repo_raw.commits.first)[:time].day).to eq Time.now.day
end
it "should have the number of files changed" do
expect(@repo.stats_for_commit(@repo_raw.commits.first)[:no_files_changed]).to eq 1
end
it "should have the number of insertions" do
expect(@repo.stats_for_commit(@repo_raw.commits.first)[:no_insertions]).to eq 0
end
it "should have the number of deletions" do
expect(@repo.stats_for_commit(@repo_raw.commits.first)[:no_deletions]).to eq 1
end
end
end
end
Added tests for some edge cases on stats
require 'spec_helper'
require 'fileutils'
module Metagit
describe PrivateRepo do
let(:repo_path) { "./tmp/test_repo_private" }
let(:my_email) { "dan@dot.com" }
before :each do
# we may mock this if speed/complexity get in the way
@repo_raw = Metagit::Support::RuggedRepo.new(repo_path, my_email)
@repo = PrivateRepo.new repo_path
end
after :each do
# clean up after ourselves
FileUtils.rm_rf repo_path
end
it 'the repo is readable' do
expect(@repo.readable?).to eq true
end
describe "#changes_since?" do
it "should be true when there are changes" do
expect(@repo.changes_since? @repo_raw.commits.first).to eq true
end
it "should be true when there are no changes" do
expect(@repo.changes_since? @repo_raw.commits.last).to eq false
end
end
describe "#stats_for_commit" do
it "should have the email of the author" do
expect(@repo.stats_for_commit(@repo_raw.commits.first)[:author_email]).to eq "dan@dot.com"
end
it "should have the commit timestamp" do
# don't run this near midnight :) TODO: more accuracy
expect(@repo.stats_for_commit(@repo_raw.commits.first)[:time].day).to eq Time.now.day
end
it "should have the number of files changed" do
expect(@repo.stats_for_commit(@repo_raw.commits.first)[:no_files_changed]).to eq 1
end
it "should have the number of insertions" do
expect(@repo.stats_for_commit(@repo_raw.commits.first)[:no_insertions]).to eq 0
end
it "should have the number of deletions" do
expect(@repo.stats_for_commit(@repo_raw.commits.first)[:no_deletions]).to eq 1
end
it "should also work for other commits" do
expect(@repo.stats_for_commit(@repo_raw.commits[1])[:author_email]).not_to eq "dan@dot.com"
end
it "should also work for other commits files changed" do
expect(@repo.stats_for_commit(@repo_raw.commits[1])[:no_files_changed]).to eq 1
end
end
end
end
|
require 'sinatra'
require 'sass'
require 'builder'
require 'dalli'
require 'rack-cache'
require 'kgio'
require 'active_support/core_ext/time/calculations'
#
# Defined in ENV on Heroku. To try locally, start memcached and uncomment:
# ENV["MEMCACHE_SERVERS"] = "localhost"
if memcache_servers = ENV["MEMCACHE_SERVERS"]
client = Dalli::Client.new(ENV["MEMCACHIER_SERVERS"],
:username => ENV["MEMCACHIER_USERNAME"],
:password => ENV["MEMCACHIER_PASSWORD"],
:failover => true,
:socket_timeout => 1.5,
:socket_failure_delay => 0.2,
:value_max_bytes => 10485760)
use Rack::Cache,
verbose: true,
metastore: client,
entitystore: client
# Flush the cache
client.flush
end
set :static_cache_control, [:public, max_age: 1800]
before do
cache_control :public, max_age: 1800 # 30 mins
end
require 'httparty'
CHURCHAPP_HEADERS = {"Content-type" => "application/json", "X-Account" => "winvin", "X-Application" => "Group Slideshow", "X-Auth" => ENV['CHURCHAPP_AUTH']}
require 'google_drive'
helpers do
def protect!
unless authorized?
response['WWW-Authenticate'] = %(Basic realm="Restricted Area")
throw(:halt, [401, "Not authorized\n"])
end
end
def authorized?
@auth ||= Rack::Auth::Basic::Request.new(request.env)
username = ENV['GROUPS_SIGNUP_USERNAME']
password = ENV['GROUPS_SIGNUP_PASSWORD']
@auth.provided? && @auth.basic? && @auth.credentials && @auth.credentials == [username, password]
end
def fetch_events(page)
response = HTTParty.get("https://api.churchsuite.co.uk/v1/calendar/events?page=#{page}", headers: CHURCHAPP_HEADERS)
json = JSON.parse(response.body)
if json["events"]
json["events"].map { |e| Event.new(e) }
else
[]
end
end
end
get '/' do
events = (fetch_events(1) + fetch_events(2) + fetch_events(3)).uniq(&:start_time)
@featured_events = events.select(&:featured?)
@healing_events = events.select { |e| e.category == 'Healing' }
@term = GroupTerm.new(Date.today)
@talks = get_talks
@hellobar = hellobar
haml :index
end
get '/groups-list/?' do
response = HTTParty.get('https://api.churchsuite.co.uk/v1/smallgroups/groups?view=active', headers: CHURCHAPP_HEADERS)
@groups = JSON.parse(response.body)["groups"].map { |g| Group.new(g) }
haml :groups_list, layout: nil
end
get '/groups-slideshow/?' do
response = HTTParty.get('https://api.churchsuite.co.uk/v1/smallgroups/groups?view=active', headers: CHURCHAPP_HEADERS)
puts response.body
@groups = JSON.parse(response.body)["groups"].map { |g| Group.new(g) }
haml :groups, layout: nil
end
get '/groups-signup/?' do
redirect 'https://winchester-vineyard.herokuapp.com/groups-signup' unless request.secure?
protect!
response = HTTParty.get('https://api.churchsuite.co.uk/v1/addressbook/contacts?per_page=400', headers: CHURCHAPP_HEADERS)
@contacts = JSON.parse(response.body)["contacts"]
response = HTTParty.get('https://api.churchsuite.co.uk/v1/smallgroups/groups?view=active', headers: CHURCHAPP_HEADERS)
@groups = JSON.parse(response.body)["groups"].map { |g| Group.new(g) }
haml :groups_signup, layout: nil
end
post '/groups-signup/:group_id/:contact_id' do |group_id, contact_id|
halt 426 unless request.secure?
body = {
"action" => "add",
"members" => {
"contacts" => [ contact_id.to_i ],
}
}.to_json
url = 'https://api.churchsuite.co.uk/v1/smallgroups/group/'+group_id+'/members'
puts body, url
response = HTTParty.post(url, headers: CHURCHAPP_HEADERS, body: body)
puts response.body
response.code
end
helpers do
def secs_until(n)
Time.parse(n['datetime']) - Time.now
end
end
SECONDS_IN_A_DAY = 86400
SECONDS_IN_A_WEEK = 86400 * 7
get '/feed.xml' do
require 'firebase'
firebase = Firebase::Client.new('https://winvin.firebaseio.com/')
all = firebase.get('news').body.values
soon = all.select do |n|
secs_until(n) >= 0 && secs_until(n) < SECONDS_IN_A_DAY
end
soon.each do |n|
n['id'] += '-soon'
n['pubDate'] = Time.parse(n['datetime']) - SECONDS_IN_A_DAY
end
upcoming = all.select do |n|
secs_until(n) >= SECONDS_IN_A_DAY && secs_until(n) < SECONDS_IN_A_WEEK
end
upcoming.each do |n|
n['id'] += '-upcoming'
n['pubDate'] = Time.parse(n['datetime']) - SECONDS_IN_A_WEEK
end
@news = soon + upcoming
builder :news
end
class GroupTerm
DATA = {
spring: {
name: "Spring",
signup_month: "January",
start_month: "February",
end_month: "April"
},
summer: {
name: "Summer",
signup_month: "May",
start_month: "June",
end_month: "August"
},
autumn: {
name: "Autumn",
signup_month: "September",
start_month: "October",
end_month: "December"
}
}
def initialize(date)
@date = date
@term = case @date.month
when 1..4
:spring
when 5..8
:summer
else
:autumn
end
end
def next
GroupTerm.new(@date.advance(months: 4))
end
def signup_month?
[1, 5, 9].include? @date.month
end
def name; DATA[@term][:name]; end
def signup_month; DATA[@term][:signup_month]; end
def start_month; DATA[@term][:start_month]; end
def end_month; DATA[@term][:end_month]; end
end
class Talk
attr :full_name, :who, :date, :download_url, :slides_url, :slug, :series_name, :title
def initialize(hash)
@series_name = hash['Series']
@full_name = (!hash['Series'].blank? ? "[" + hash['Series'] + "] " : "" ) + hash['Title']
@who = hash['Speaker(s)']
@date = Time.parse(hash['Date'])
@download_url = hash['Talk URL']
@slides_url = hash['Slides URL']
@slug = hash['Slug']
@title = hash['Title']
end
def id
@slug
end
def part_of_a_series?
@series_name.present?
end
def has_slides?
@slides_url.present?
end
def long_title
[
@date.strftime("%a %d %b %Y"),
": ",
@full_name,
" (",
@who,
")"
].join
end
def description
"Given by #{@who} on #{@date.strftime("%a %d %b %y")}."
end
def published?
true
end
end
Group = Struct.new(:hash) do
def visible?
hash["embed_visible"] == "1"
end
def id
hash["id"]
end
def full?
signup? && spaces <= 0
end
def signup?
!!hash["signup_capacity"]
end
def image?
!hash["images"].empty?
end
def location?
!hash["location"].empty?
end
def first_sentence
hash["description"].match(/^(.*?)[.!]\s?/)
end
def address
hash["location"]["address"]
end
def image
hash["images"]["original_500"]
end
def spaces
if signup?
hash["signup_capacity"].to_i - hash["no_members"].to_i
end
end
def day
if (hash["day"].nil?)
"TBC"
else
%w(Sundays Mondays Tuesdays Wednesdays Thursdays Fridays Saturdays)[hash["day"].to_i]
end
end
def time
hash["time"]
end
def name
hash["name"].sub(/\(.*201.\)/, '')
end
end
Event = Struct.new(:hash) do
def description
re = /<div[^>]*>|<\/div>/
hash["description"].gsub(re, '').match(/^(((.*?)[.?!]){3}|(.*))/)
end
def featured?
hash["signup_options"]["public"]["featured"] == "1"
end
def category
hash["category"]["name"]
end
def name
hash["name"]
end
def can_signup?
hash["signup_options"]["tickets"]["enabled"] == "1"
end
def ticket_url
hash["signup_options"]["tickets"]["url"]
end
def image?
!hash["images"].empty?
end
def image_url
hash["images"]["original_500"]
end
def start_time
Time.parse(hash["datetime_start"])
end
def start_date
start_time.midnight
end
def end_time
Time.parse(hash["datetime_end"])
end
def end_date
end_time.midnight
end
def end_date_string
end_date.strftime("%d %b")
end
def start_date_string
start_date.strftime("%d %b")
end
def start_time_string
start_time.strftime("%H:%M")
end
def end_time_string
end_time.strftime("%H:%M")
end
def full_date_string
if self.start_date != self.end_date
"#{self.start_date_string} - #{self.end_date_string}"
else
"#{self.start_date_string} #{self.start_time_string} - #{self.end_time_string}"
end
end
def location?
hash["location"].size > 0
end
def location_url
"http://maps.google.co.uk/?q=" + hash["location"]["address"] rescue nil
end
def location_title
hash["location"]["name"]
end
end
get '/talks/:slug' do |slug|
talk_row = talks.select { |t| t["Slug"] == slug }
halt 404 if (talk_row.nil? or talk_row["Date"].empty?)
@talk = Talk.new(talk_row)
@og_url = 'http://winvin.org.uk/talks/' + slug
@og_title = "Winchester Vineyard Talk: #{@talk.full_name}"
@og_description = @talk.description
haml :talk
end
helpers do
def sheet
session = GoogleDrive::Session.from_service_account_key(ENV["GOOGLE_API_SECRET"] ? StringIO.new(ENV["GOOGLE_API_SECRET"]) : "secret.json")
session.spreadsheet_by_key("1B9G8efynCzeWsHBoHAJeJRpO00AdrzaAZQaS50QCwXI")
end
def hellobar
worksheet = sheet.worksheet_by_sheet_id(0)
[worksheet[1, 2], worksheet[2, 2]]
end
def talks
sheet.worksheet_by_sheet_id("1813659711").list
end
def get_talks
talks.
select {|t| t["Date"].present? }.
map { |t| Talk.new(t) }.
sort_by(&:date).
reverse
end
end
get '/audio_plain' do
@talks = get_talks
haml :audio, :layout => false
end
get '/audio.xml' do
@talks = get_talks
builder :audio
end
get '/students/?' do
haml :students
end
get '/welcome/?' do
@talks = get_talks.select(&:published?).select {|t| t.series_name == "What on earth is the Vineyard" }
haml :welcome
end
get '/buildingforthefuture/?' do
@talks = get_talks.select(&:published?).select {|t| t.series_name == "Building for the future" }
haml :building
end
get '/mydata/?' do
haml :mydata
end
get('/bethelsozo/?') do
haml :sozo
end
get '/lifegroups/?' do
@term = GroupTerm.new(Date.today)
haml :lifegroups
end
get '/adventconspiracy/?' do
haml :adventconspiracy
end
get '/donate/?' do
haml :donate
end
get '/yobl/?' do
@talks = get_talks.select(&:published?).select {|t| t.series_name == "Year of Biblical Literacy" }
haml :yobl
end
get '/givehope/?' do
events = (1..4).reduce([]) { |memo, x| memo + fetch_events(x) }
@christmas_events = events.select { |e| e.category == 'Christmas' }
haml :givehope
end
get '/css/styles.css' do
scss :styles, :style => :expanded
end
get('/node/168/?') { redirect '/#wv-news' }
get('/node/2/?') { redirect '/#wv-sundays' }
get('/node/319/?') { redirect '/#wv-team' }
get('/node/74/?') { redirect '/#wv-growing' }
get '/node/?*' do
redirect '/'
end
not_found do
status 404
haml :not_found
end
get '/audio/?*' do
redirect '/#wv-talks'
end
get('/survey/?') { redirect 'https://docs.google.com/forms/d/e/1FAIpQLScpGATm9QhMj1Qsm46-ISbAecbbQx2s3XsXbpz-1Ki3sAS8qw/viewform?' + request.query_string }
get('/mystory/?') { redirect 'https://goo.gl/forms/TlkzGBBkzctP1azp2' }
get('/groupsslideshow/?') { redirect '/groups-slideshow/' }
get('/feedback/?') { redirect 'https://docs.google.com/forms/d/10iS6tahkIYb_rFu1uNUB9ytjsy_xS138PJcs915qASo/viewform?usp=send_form' }
get('/data-protection-policy/?') { redirect 'https://s3-eu-west-1.amazonaws.com/winchester-vineyard-website-assets/uploads/data-protection-policy.pdf' }
get('/makingithappen/?') { redirect 'https://docs.google.com/forms/d/12LKbZo-FXRk5JAPESu_Zfog7FAtCXtdMAfdHCbQ8OXs/viewform?c=0&w=1' }
get('/connect/?') { redirect '/welcome' }
get('/landing-banner-code/?') { redirect '/students' }
get('/find-us/?') { redirect '/#wv-find-us' }
get('/globalpartners/?') { redirect 'https://winvin.churchsuite.co.uk/donate/fund/0sfturgn' }
get('/focus-on-kids/?') { redirect 'https://winchester-vineyard-website-assets.s3.amazonaws.com/assets/Focus%20on%20Kids%20and%20Youth%20Vision.pdf' }
get('/events/?') { redirect '/#wv-news' }
get('/missions/?') { redirect 'https://drive.google.com/file/d/1L0hBqZDUXfOuVkA8maERoBZGE2qKL8Ji/view' }
# Redirect Events
get('/lggl/?') { redirect 'https://winvin.churchsuite.co.uk/events/qwhzfito' }
get('/destinycoaching/?') { redirect 'https://winvin.churchsuite.co.uk/events/y0eeyqoc' }
get('/spree/?') { redirect 'https://winvin.churchsuite.co.uk/events/eif6aysu' }
get('/alpha/?') { redirect 'https://winvin.churchsuite.co.uk/events/ng61rjiz' }
get('/welcomemeal/?') { redirect 'https://winvin.churchsuite.co.uk/events/cnklrdgw' }
get('/dti/?') { redirect 'https://winvin.churchsuite.co.uk/events/pektpqqj' }
get('/christmasmarket/?') { redirect 'https://winvin.churchsuite.co.uk/events/myhhbjll' }
get('/dadsgroup/?') { redirect 'https://winvin.churchsuite.co.uk/events/hzuhqfgi' }
get('/lam/?') { redirect 'https://winvin.churchsuite.co.uk/events/iz0wb1tx' }
get('/dy/?') { redirect 'https://drive.google.com/file/d/1_e0SApiwHuMfYJm62DR-5Ug1F7K2oRTw/view?usp=sharing' }
get('/worshipday/?') { redirect 'https://winvin.churchsuite.co.uk/events/hz07ghag' }
get('/parenting/?') { redirect 'https://winvin.churchsuite.co.uk/groups/uaebyqql' }
get('/fatherheart/?') { redirect 'https://winvin.churchsuite.co.uk/events/xagnrvgl' }
get('/storehouse/?') { redirect 'https://winvin.churchsuite.co.uk/events/luqnfryq' }
get('/eveningofworship/?') { redirect 'https://winvin.churchsuite.co.uk/events/ghf9dxou' }
get('/weekendaway/?') { redirect 'https://winvin.churchsuite.co.uk/events/7hyocf4b' }
get('/prayer/?') { redirect 'https://winvin.churchsuite.co.uk/events/lroufhaz' }
get('/supernaturalkingdom/?') { redirect 'https://winvin.churchsuite.co.uk/events/koenwqhh' }
get('/adamandeve/?') { redirect 'https://winvin.churchsuite.co.uk/events/xkkds56t' }
get('/gather/?') { redirect 'https://winvin.churchsuite.co.uk/events/jbzn0nqm' }
run Sinatra::Application
created ginny redirect
require 'sinatra'
require 'sass'
require 'builder'
require 'dalli'
require 'rack-cache'
require 'kgio'
require 'active_support/core_ext/time/calculations'
#
# Defined in ENV on Heroku. To try locally, start memcached and uncomment:
# ENV["MEMCACHE_SERVERS"] = "localhost"
if memcache_servers = ENV["MEMCACHE_SERVERS"]
client = Dalli::Client.new(ENV["MEMCACHIER_SERVERS"],
:username => ENV["MEMCACHIER_USERNAME"],
:password => ENV["MEMCACHIER_PASSWORD"],
:failover => true,
:socket_timeout => 1.5,
:socket_failure_delay => 0.2,
:value_max_bytes => 10485760)
use Rack::Cache,
verbose: true,
metastore: client,
entitystore: client
# Flush the cache
client.flush
end
set :static_cache_control, [:public, max_age: 1800]
before do
cache_control :public, max_age: 1800 # 30 mins
end
require 'httparty'
CHURCHAPP_HEADERS = {"Content-type" => "application/json", "X-Account" => "winvin", "X-Application" => "Group Slideshow", "X-Auth" => ENV['CHURCHAPP_AUTH']}
require 'google_drive'
helpers do
def protect!
unless authorized?
response['WWW-Authenticate'] = %(Basic realm="Restricted Area")
throw(:halt, [401, "Not authorized\n"])
end
end
def authorized?
@auth ||= Rack::Auth::Basic::Request.new(request.env)
username = ENV['GROUPS_SIGNUP_USERNAME']
password = ENV['GROUPS_SIGNUP_PASSWORD']
@auth.provided? && @auth.basic? && @auth.credentials && @auth.credentials == [username, password]
end
def fetch_events(page)
response = HTTParty.get("https://api.churchsuite.co.uk/v1/calendar/events?page=#{page}", headers: CHURCHAPP_HEADERS)
json = JSON.parse(response.body)
if json["events"]
json["events"].map { |e| Event.new(e) }
else
[]
end
end
end
get '/' do
events = (fetch_events(1) + fetch_events(2) + fetch_events(3)).uniq(&:start_time)
@featured_events = events.select(&:featured?)
@healing_events = events.select { |e| e.category == 'Healing' }
@term = GroupTerm.new(Date.today)
@talks = get_talks
@hellobar = hellobar
haml :index
end
get '/groups-list/?' do
response = HTTParty.get('https://api.churchsuite.co.uk/v1/smallgroups/groups?view=active', headers: CHURCHAPP_HEADERS)
@groups = JSON.parse(response.body)["groups"].map { |g| Group.new(g) }
haml :groups_list, layout: nil
end
get '/groups-slideshow/?' do
response = HTTParty.get('https://api.churchsuite.co.uk/v1/smallgroups/groups?view=active', headers: CHURCHAPP_HEADERS)
puts response.body
@groups = JSON.parse(response.body)["groups"].map { |g| Group.new(g) }
haml :groups, layout: nil
end
get '/groups-signup/?' do
redirect 'https://winchester-vineyard.herokuapp.com/groups-signup' unless request.secure?
protect!
response = HTTParty.get('https://api.churchsuite.co.uk/v1/addressbook/contacts?per_page=400', headers: CHURCHAPP_HEADERS)
@contacts = JSON.parse(response.body)["contacts"]
response = HTTParty.get('https://api.churchsuite.co.uk/v1/smallgroups/groups?view=active', headers: CHURCHAPP_HEADERS)
@groups = JSON.parse(response.body)["groups"].map { |g| Group.new(g) }
haml :groups_signup, layout: nil
end
post '/groups-signup/:group_id/:contact_id' do |group_id, contact_id|
halt 426 unless request.secure?
body = {
"action" => "add",
"members" => {
"contacts" => [ contact_id.to_i ],
}
}.to_json
url = 'https://api.churchsuite.co.uk/v1/smallgroups/group/'+group_id+'/members'
puts body, url
response = HTTParty.post(url, headers: CHURCHAPP_HEADERS, body: body)
puts response.body
response.code
end
helpers do
def secs_until(n)
Time.parse(n['datetime']) - Time.now
end
end
SECONDS_IN_A_DAY = 86400
SECONDS_IN_A_WEEK = 86400 * 7
get '/feed.xml' do
require 'firebase'
firebase = Firebase::Client.new('https://winvin.firebaseio.com/')
all = firebase.get('news').body.values
soon = all.select do |n|
secs_until(n) >= 0 && secs_until(n) < SECONDS_IN_A_DAY
end
soon.each do |n|
n['id'] += '-soon'
n['pubDate'] = Time.parse(n['datetime']) - SECONDS_IN_A_DAY
end
upcoming = all.select do |n|
secs_until(n) >= SECONDS_IN_A_DAY && secs_until(n) < SECONDS_IN_A_WEEK
end
upcoming.each do |n|
n['id'] += '-upcoming'
n['pubDate'] = Time.parse(n['datetime']) - SECONDS_IN_A_WEEK
end
@news = soon + upcoming
builder :news
end
class GroupTerm
DATA = {
spring: {
name: "Spring",
signup_month: "January",
start_month: "February",
end_month: "April"
},
summer: {
name: "Summer",
signup_month: "May",
start_month: "June",
end_month: "August"
},
autumn: {
name: "Autumn",
signup_month: "September",
start_month: "October",
end_month: "December"
}
}
def initialize(date)
@date = date
@term = case @date.month
when 1..4
:spring
when 5..8
:summer
else
:autumn
end
end
def next
GroupTerm.new(@date.advance(months: 4))
end
def signup_month?
[1, 5, 9].include? @date.month
end
def name; DATA[@term][:name]; end
def signup_month; DATA[@term][:signup_month]; end
def start_month; DATA[@term][:start_month]; end
def end_month; DATA[@term][:end_month]; end
end
class Talk
attr :full_name, :who, :date, :download_url, :slides_url, :slug, :series_name, :title
def initialize(hash)
@series_name = hash['Series']
@full_name = (!hash['Series'].blank? ? "[" + hash['Series'] + "] " : "" ) + hash['Title']
@who = hash['Speaker(s)']
@date = Time.parse(hash['Date'])
@download_url = hash['Talk URL']
@slides_url = hash['Slides URL']
@slug = hash['Slug']
@title = hash['Title']
end
def id
@slug
end
def part_of_a_series?
@series_name.present?
end
def has_slides?
@slides_url.present?
end
def long_title
[
@date.strftime("%a %d %b %Y"),
": ",
@full_name,
" (",
@who,
")"
].join
end
def description
"Given by #{@who} on #{@date.strftime("%a %d %b %y")}."
end
def published?
true
end
end
Group = Struct.new(:hash) do
def visible?
hash["embed_visible"] == "1"
end
def id
hash["id"]
end
def full?
signup? && spaces <= 0
end
def signup?
!!hash["signup_capacity"]
end
def image?
!hash["images"].empty?
end
def location?
!hash["location"].empty?
end
def first_sentence
hash["description"].match(/^(.*?)[.!]\s?/)
end
def address
hash["location"]["address"]
end
def image
hash["images"]["original_500"]
end
def spaces
if signup?
hash["signup_capacity"].to_i - hash["no_members"].to_i
end
end
def day
if (hash["day"].nil?)
"TBC"
else
%w(Sundays Mondays Tuesdays Wednesdays Thursdays Fridays Saturdays)[hash["day"].to_i]
end
end
def time
hash["time"]
end
def name
hash["name"].sub(/\(.*201.\)/, '')
end
end
Event = Struct.new(:hash) do
def description
re = /<div[^>]*>|<\/div>/
hash["description"].gsub(re, '').match(/^(((.*?)[.?!]){3}|(.*))/)
end
def featured?
hash["signup_options"]["public"]["featured"] == "1"
end
def category
hash["category"]["name"]
end
def name
hash["name"]
end
def can_signup?
hash["signup_options"]["tickets"]["enabled"] == "1"
end
def ticket_url
hash["signup_options"]["tickets"]["url"]
end
def image?
!hash["images"].empty?
end
def image_url
hash["images"]["original_500"]
end
def start_time
Time.parse(hash["datetime_start"])
end
def start_date
start_time.midnight
end
def end_time
Time.parse(hash["datetime_end"])
end
def end_date
end_time.midnight
end
def end_date_string
end_date.strftime("%d %b")
end
def start_date_string
start_date.strftime("%d %b")
end
def start_time_string
start_time.strftime("%H:%M")
end
def end_time_string
end_time.strftime("%H:%M")
end
def full_date_string
if self.start_date != self.end_date
"#{self.start_date_string} - #{self.end_date_string}"
else
"#{self.start_date_string} #{self.start_time_string} - #{self.end_time_string}"
end
end
def location?
hash["location"].size > 0
end
def location_url
"http://maps.google.co.uk/?q=" + hash["location"]["address"] rescue nil
end
def location_title
hash["location"]["name"]
end
end
get '/talks/:slug' do |slug|
talk_row = talks.select { |t| t["Slug"] == slug }
halt 404 if (talk_row.nil? or talk_row["Date"].empty?)
@talk = Talk.new(talk_row)
@og_url = 'http://winvin.org.uk/talks/' + slug
@og_title = "Winchester Vineyard Talk: #{@talk.full_name}"
@og_description = @talk.description
haml :talk
end
helpers do
def sheet
session = GoogleDrive::Session.from_service_account_key(ENV["GOOGLE_API_SECRET"] ? StringIO.new(ENV["GOOGLE_API_SECRET"]) : "secret.json")
session.spreadsheet_by_key("1B9G8efynCzeWsHBoHAJeJRpO00AdrzaAZQaS50QCwXI")
end
def hellobar
worksheet = sheet.worksheet_by_sheet_id(0)
[worksheet[1, 2], worksheet[2, 2]]
end
def talks
sheet.worksheet_by_sheet_id("1813659711").list
end
def get_talks
talks.
select {|t| t["Date"].present? }.
map { |t| Talk.new(t) }.
sort_by(&:date).
reverse
end
end
get '/audio_plain' do
@talks = get_talks
haml :audio, :layout => false
end
get '/audio.xml' do
@talks = get_talks
builder :audio
end
get '/students/?' do
haml :students
end
get '/welcome/?' do
@talks = get_talks.select(&:published?).select {|t| t.series_name == "What on earth is the Vineyard" }
haml :welcome
end
get '/buildingforthefuture/?' do
@talks = get_talks.select(&:published?).select {|t| t.series_name == "Building for the future" }
haml :building
end
get '/mydata/?' do
haml :mydata
end
get('/bethelsozo/?') do
haml :sozo
end
get '/lifegroups/?' do
@term = GroupTerm.new(Date.today)
haml :lifegroups
end
get '/adventconspiracy/?' do
haml :adventconspiracy
end
get '/donate/?' do
haml :donate
end
get '/yobl/?' do
@talks = get_talks.select(&:published?).select {|t| t.series_name == "Year of Biblical Literacy" }
haml :yobl
end
get '/givehope/?' do
events = (1..4).reduce([]) { |memo, x| memo + fetch_events(x) }
@christmas_events = events.select { |e| e.category == 'Christmas' }
haml :givehope
end
get '/css/styles.css' do
scss :styles, :style => :expanded
end
get('/node/168/?') { redirect '/#wv-news' }
get('/node/2/?') { redirect '/#wv-sundays' }
get('/node/319/?') { redirect '/#wv-team' }
get('/node/74/?') { redirect '/#wv-growing' }
get '/node/?*' do
redirect '/'
end
not_found do
status 404
haml :not_found
end
get '/audio/?*' do
redirect '/#wv-talks'
end
get('/survey/?') { redirect 'https://docs.google.com/forms/d/e/1FAIpQLScpGATm9QhMj1Qsm46-ISbAecbbQx2s3XsXbpz-1Ki3sAS8qw/viewform?' + request.query_string }
get('/mystory/?') { redirect 'https://goo.gl/forms/TlkzGBBkzctP1azp2' }
get('/groupsslideshow/?') { redirect '/groups-slideshow/' }
get('/feedback/?') { redirect 'https://docs.google.com/forms/d/10iS6tahkIYb_rFu1uNUB9ytjsy_xS138PJcs915qASo/viewform?usp=send_form' }
get('/data-protection-policy/?') { redirect 'https://s3-eu-west-1.amazonaws.com/winchester-vineyard-website-assets/uploads/data-protection-policy.pdf' }
get('/makingithappen/?') { redirect 'https://docs.google.com/forms/d/12LKbZo-FXRk5JAPESu_Zfog7FAtCXtdMAfdHCbQ8OXs/viewform?c=0&w=1' }
get('/connect/?') { redirect '/welcome' }
get('/landing-banner-code/?') { redirect '/students' }
get('/find-us/?') { redirect '/#wv-find-us' }
get('/globalpartners/?') { redirect 'https://winvin.churchsuite.co.uk/donate/fund/0sfturgn' }
get('/focus-on-kids/?') { redirect 'https://winchester-vineyard-website-assets.s3.amazonaws.com/assets/Focus%20on%20Kids%20and%20Youth%20Vision.pdf' }
get('/events/?') { redirect '/#wv-news' }
get('/missions/?') { redirect 'https://drive.google.com/file/d/1L0hBqZDUXfOuVkA8maERoBZGE2qKL8Ji/view' }
# Redirect Events
get('/lggl/?') { redirect 'https://winvin.churchsuite.co.uk/events/qwhzfito' }
get('/destinycoaching/?') { redirect 'https://winvin.churchsuite.co.uk/events/y0eeyqoc' }
get('/spree/?') { redirect 'https://winvin.churchsuite.co.uk/events/eif6aysu' }
get('/alpha/?') { redirect 'https://winvin.churchsuite.co.uk/events/ng61rjiz' }
get('/welcomemeal/?') { redirect 'https://winvin.churchsuite.co.uk/events/cnklrdgw' }
get('/dti/?') { redirect 'https://winvin.churchsuite.co.uk/events/pektpqqj' }
get('/christmasmarket/?') { redirect 'https://winvin.churchsuite.co.uk/events/myhhbjll' }
get('/dadsgroup/?') { redirect 'https://winvin.churchsuite.co.uk/events/hzuhqfgi' }
get('/lam/?') { redirect 'https://winvin.churchsuite.co.uk/events/iz0wb1tx' }
get('/dy/?') { redirect 'https://drive.google.com/file/d/1_e0SApiwHuMfYJm62DR-5Ug1F7K2oRTw/view?usp=sharing' }
get('/worshipday/?') { redirect 'https://winvin.churchsuite.co.uk/events/hz07ghag' }
get('/parenting/?') { redirect 'https://winvin.churchsuite.co.uk/groups/uaebyqql' }
get('/fatherheart/?') { redirect 'https://winvin.churchsuite.co.uk/events/xagnrvgl' }
get('/storehouse/?') { redirect 'https://winvin.churchsuite.co.uk/events/luqnfryq' }
get('/ginny/?') { redirect 'https://winvin.churchsuite.co.uk/events/nlgadj1k' }
get('/weekendaway/?') { redirect 'https://winvin.churchsuite.co.uk/events/7hyocf4b' }
get('/prayer/?') { redirect 'https://winvin.churchsuite.co.uk/events/lroufhaz' }
get('/supernaturalkingdom/?') { redirect 'https://winvin.churchsuite.co.uk/events/koenwqhh' }
get('/adamandeve/?') { redirect 'https://winvin.churchsuite.co.uk/events/xkkds56t' }
get('/gather/?') { redirect 'https://winvin.churchsuite.co.uk/events/jbzn0nqm' }
run Sinatra::Application
|
# frozen_string_literal: true
Rails.application.configure do
# Settings specified here will take precedence over those in config/application.rb.
# Code is not reloaded between requests.
config.cache_classes = true
# Eager load code on boot. This eager loads most of Rails and
# your application in memory, allowing both threaded web servers
# and those relying on copy on write to perform better.
# Rake tasks automatically ignore this option for performance.
config.eager_load = true
# Full error reports are disabled and caching is turned on.
config.consider_all_requests_local = false
config.action_controller.perform_caching = true
# Ensures that a master key has been made available in either ENV["RAILS_MASTER_KEY"]
# or in config/master.key. This key is used to decrypt credentials (and other encrypted files).
# config.require_master_key = true
# Disable serving static files from the `/public` folder by default since
# Apache or NGINX already handles this.
config.public_file_server.enabled = ENV["RAILS_SERVE_STATIC_FILES"].present?
# Compress CSS using a preprocessor.
# config.assets.css_compressor = :sass
# Do not fallback to assets pipeline if a precompiled asset is missed.
config.assets.compile = false
# Enable serving of images, stylesheets, and JavaScripts from an asset server.
# config.action_controller.asset_host = 'http://assets.example.com'
# Specifies the header that your server uses for sending files.
# config.action_dispatch.x_sendfile_header = 'X-Sendfile' # for Apache
# config.action_dispatch.x_sendfile_header = 'X-Accel-Redirect' # for NGINX
# Mount Action Cable outside main process or domain.
# config.action_cable.mount_path = nil
# config.action_cable.url = 'wss://example.com/cable'
# config.action_cable.allowed_request_origins = [ 'http://example.com', /http:\/\/example.*/ ]
# Force all access to the app over SSL, use Strict-Transport-Security, and use secure cookies.
# config.force_ssl = true
# Use the lowest log level to ensure availability of diagnostic information
# when problems arise.
config.log_level = :info
# Prepend all log lines with the following tags.
config.log_tags = [:request_id]
# Use a different cache store in production.
# config.cache_store = :mem_cache_store
# Use a real queuing backend for Active Job (and separate queues per environment).
# config.active_job.queue_adapter = :resque
# config.active_job.queue_name_prefix = "evemonk_production"
config.action_mailer.perform_caching = false
# Ignore bad email addresses and do not raise email delivery errors.
# Set this to true and configure the email server for immediate delivery to raise delivery errors.
# config.action_mailer.raise_delivery_errors = false
# Enable locale fallbacks for I18n (makes lookups for any locale fall back to
# the I18n.default_locale when a translation cannot be found).
config.i18n.fallbacks = true
# Send deprecation notices to registered listeners.
config.active_support.deprecation = :notify
# Use default logging formatter so that PID and timestamp are not suppressed.
config.log_formatter = ::Logger::Formatter.new
# Use a different logger for distributed setups.
# require 'syslog/logger'
# config.logger = ActiveSupport::TaggedLogging.new(Syslog::Logger.new 'app-name')
if ENV["RAILS_LOG_TO_STDOUT"].present?
logger = ActiveSupport::Logger.new(STDOUT)
logger.formatter = config.log_formatter
config.logger = ActiveSupport::TaggedLogging.new(logger)
end
# Do not dump schema after migrations.
config.active_record.dump_schema_after_migration = false
# Inserts middleware to perform automatic connection switching.
# The `database_selector` hash is used to pass options to the DatabaseSelector
# middleware. The `delay` is used to determine how long to wait after a write
# to send a subsequent read to the primary.
#
# The `database_resolver` class is used by the middleware to determine which
# database is appropriate to use based on the time delay.
#
# The `database_resolver_context` class is used by the middleware to set
# timestamps for the last write to the primary. The resolver uses the context
# class timestamps to determine how long to wait before reading from the
# replica.
#
# By default Rails will store a last write timestamp in the session. The
# DatabaseSelector middleware is designed as such you can define your own
# strategy for connection switching and pass that into the middleware through
# these configuration options.
# config.active_record.database_selector = { delay: 2.seconds }
# config.active_record.database_resolver = ActiveRecord::Middleware::DatabaseSelector::Resolver
# config.active_record.database_resolver_context = ActiveRecord::Middleware::DatabaseSelector::Resolver::Session
# Dalli
config.cache_store = :dalli_store, ENV["MEMCACHED_URL"]
# Sidekiq
config.active_job.queue_adapter = :sidekiq
# # Fluentd
# config.logger = ActFluentLoggerRails::Logger.new
# config.lograge.enabled = true
# config.lograge.formatter = Lograge::Formatters::Json.new
end
Cleanup
# frozen_string_literal: true
Rails.application.configure do
# Settings specified here will take precedence over those in config/application.rb.
# Code is not reloaded between requests.
config.cache_classes = true
# Eager load code on boot. This eager loads most of Rails and
# your application in memory, allowing both threaded web servers
# and those relying on copy on write to perform better.
# Rake tasks automatically ignore this option for performance.
config.eager_load = true
# Full error reports are disabled and caching is turned on.
config.consider_all_requests_local = false
config.action_controller.perform_caching = true
# Ensures that a master key has been made available in either ENV["RAILS_MASTER_KEY"]
# or in config/master.key. This key is used to decrypt credentials (and other encrypted files).
# config.require_master_key = true
# Disable serving static files from the `/public` folder by default since
# Apache or NGINX already handles this.
config.public_file_server.enabled = ENV["RAILS_SERVE_STATIC_FILES"].present?
# Compress CSS using a preprocessor.
# config.assets.css_compressor = :sass
# Do not fallback to assets pipeline if a precompiled asset is missed.
config.assets.compile = false
# Enable serving of images, stylesheets, and JavaScripts from an asset server.
# config.action_controller.asset_host = 'http://assets.example.com'
# Specifies the header that your server uses for sending files.
# config.action_dispatch.x_sendfile_header = 'X-Sendfile' # for Apache
# config.action_dispatch.x_sendfile_header = 'X-Accel-Redirect' # for NGINX
# Mount Action Cable outside main process or domain.
# config.action_cable.mount_path = nil
# config.action_cable.url = 'wss://example.com/cable'
# config.action_cable.allowed_request_origins = [ 'http://example.com', /http:\/\/example.*/ ]
# Force all access to the app over SSL, use Strict-Transport-Security, and use secure cookies.
# config.force_ssl = true
# Use the lowest log level to ensure availability of diagnostic information
# when problems arise.
config.log_level = :info
# Prepend all log lines with the following tags.
config.log_tags = [:request_id]
# Use a different cache store in production.
# config.cache_store = :mem_cache_store
# Use a real queuing backend for Active Job (and separate queues per environment).
# config.active_job.queue_adapter = :resque
# config.active_job.queue_name_prefix = "evemonk_production"
config.action_mailer.perform_caching = false
# Ignore bad email addresses and do not raise email delivery errors.
# Set this to true and configure the email server for immediate delivery to raise delivery errors.
# config.action_mailer.raise_delivery_errors = false
# Enable locale fallbacks for I18n (makes lookups for any locale fall back to
# the I18n.default_locale when a translation cannot be found).
config.i18n.fallbacks = true
# Send deprecation notices to registered listeners.
config.active_support.deprecation = :notify
# Use default logging formatter so that PID and timestamp are not suppressed.
config.log_formatter = ::Logger::Formatter.new
# Use a different logger for distributed setups.
# require 'syslog/logger'
# config.logger = ActiveSupport::TaggedLogging.new(Syslog::Logger.new 'app-name')
if ENV["RAILS_LOG_TO_STDOUT"].present?
logger = ActiveSupport::Logger.new(STDOUT)
logger.formatter = config.log_formatter
config.logger = ActiveSupport::TaggedLogging.new(logger)
end
# Do not dump schema after migrations.
config.active_record.dump_schema_after_migration = false
# Inserts middleware to perform automatic connection switching.
# The `database_selector` hash is used to pass options to the DatabaseSelector
# middleware. The `delay` is used to determine how long to wait after a write
# to send a subsequent read to the primary.
#
# The `database_resolver` class is used by the middleware to determine which
# database is appropriate to use based on the time delay.
#
# The `database_resolver_context` class is used by the middleware to set
# timestamps for the last write to the primary. The resolver uses the context
# class timestamps to determine how long to wait before reading from the
# replica.
#
# By default Rails will store a last write timestamp in the session. The
# DatabaseSelector middleware is designed as such you can define your own
# strategy for connection switching and pass that into the middleware through
# these configuration options.
# config.active_record.database_selector = { delay: 2.seconds }
# config.active_record.database_resolver = ActiveRecord::Middleware::DatabaseSelector::Resolver
# config.active_record.database_resolver_context = ActiveRecord::Middleware::DatabaseSelector::Resolver::Session
# Dalli
config.cache_store = :dalli_store, ENV["MEMCACHED_URL"]
# Sidekiq
config.active_job.queue_adapter = :sidekiq
end
|
require 'rails_helper'
describe Budget::Ballot do
describe "#amount_spent" do
it "returns the total amount spent in investments" do
inv1 = create(:budget_investment, :feasible, price: 10000)
inv2 = create(:budget_investment, :feasible, price: 20000)
ballot = create(:budget_ballot)
ballot.investments << inv1
expect(ballot.total_amount_spent).to eq 10000
ballot.investments << inv2
expect(ballot.total_amount_spent).to eq 30000
end
end
describe "#amount_spent by heading" do
it "returns the amount spent on all investments assigned to a specific heading" do
heading = create(:budget_heading)
inv1 = create(:budget_investment, :feasible, price: 10000, heading: heading)
inv2 = create(:budget_investment, :feasible, price: 20000, heading: create(:budget_heading))
inv3 = create(:budget_investment, :feasible, price: 25000)
inv4 = create(:budget_investment, :feasible, price: 40000, heading: heading)
ballot = create(:budget_ballot)
ballot.investments << inv1
ballot.investments << inv2
ballot.investments << inv3
expect(ballot.amount_spent(heading.id)).to eq 10000
expect(ballot.amount_spent(nil)).to eq 25000
ballot.investments << inv4
expect(ballot.amount_spent(heading.id)).to eq 50000
end
end
end
Adds a ballot spec for amount_available
require 'rails_helper'
describe Budget::Ballot do
describe "#amount_spent" do
it "returns the total amount spent in investments" do
inv1 = create(:budget_investment, :feasible, price: 10000)
inv2 = create(:budget_investment, :feasible, price: 20000)
ballot = create(:budget_ballot)
ballot.investments << inv1
expect(ballot.total_amount_spent).to eq 10000
ballot.investments << inv2
expect(ballot.total_amount_spent).to eq 30000
end
it "returns the amount spent on all investments assigned to a specific heading" do
heading = create(:budget_heading)
inv1 = create(:budget_investment, :feasible, price: 10000, heading: heading)
inv2 = create(:budget_investment, :feasible, price: 20000, heading: create(:budget_heading))
inv3 = create(:budget_investment, :feasible, price: 25000)
inv4 = create(:budget_investment, :feasible, price: 40000, heading: heading)
ballot = create(:budget_ballot)
ballot.investments << inv1
ballot.investments << inv2
ballot.investments << inv3
expect(ballot.amount_spent(heading.id)).to eq 10000
expect(ballot.amount_spent(nil)).to eq 25000
ballot.investments << inv4
expect(ballot.amount_spent(heading.id)).to eq 50000
end
end
describe "#amount_available" do
it "returns how much is left after taking some investments" do
budget = create(:budget, price: 200000)
heading = create(:budget_heading, budget: budget)
inv1 = create(:budget_investment, :feasible, price: 10000, heading: heading)
inv2 = create(:budget_investment, :feasible, price: 20000, heading: create(:budget_heading))
inv3 = create(:budget_investment, :feasible, price: 25000)
inv4 = create(:budget_investment, :feasible, price: 40000, heading: heading)
inv1 = create(:budget_investment, :feasible, price: 10000)
inv2 = create(:budget_investment, :feasible, price: 20000)
ballot = create(:budget_ballot, budget: budget)
ballot.investments << inv1
ballot.investments << inv2
expect(ballot.amount_available(heading)).to eq 1000000
expect(ballot.amount_available(nil)).to eq 170000
ballot.investments << inv3
ballot.investments << inv4
expect(ballot.amount_available(heading)).to eq 960000
expect(ballot.amount_available(nil)).to eq 145000
end
end
end
|
Added config.ru for thin
require_relative "./app.rb"
run Sinatra::Application
|
Rails.application.configure do
# Settings specified here will take precedence over those in config/application.rb.
# Code is not reloaded between requests.
config.cache_classes = true
# Eager load code on boot. This eager loads most of Rails and
# your application in memory, allowing both threaded web servers
# and those relying on copy on write to perform better.
# Rake tasks automatically ignore this option for performance.
config.eager_load = true
# Full error reports are disabled and caching is turned on.
config.consider_all_requests_local = false
config.action_controller.perform_caching = true
# Ensures that a master key has been made available in either ENV["RAILS_MASTER_KEY"]
# or in config/master.key. This key is used to decrypt credentials (and other encrypted files).
# config.require_master_key = true
# Disable serving static files from the `/public` folder by default since
# Apache or NGINX already handles this.
config.public_file_server.enabled = ENV['RAILS_SERVE_STATIC_FILES'].present?
# Compress CSS using a preprocessor.
# config.assets.css_compressor = :sass
# Do not fallback to assets pipeline if a precompiled asset is missed.
config.assets.compile = false
# Enable serving of images, stylesheets, and JavaScripts from an asset server.
asset_hostname = ENV['ASSETS_HOST'] || config.action_mailer.default_url_options[:host]
config.action_controller.asset_host = "//#{asset_hostname}"
# Specifies the header that your server uses for sending files.
# config.action_dispatch.x_sendfile_header = 'X-Sendfile' # for Apache
# config.action_dispatch.x_sendfile_header = 'X-Accel-Redirect' # for NGINX
# Store uploaded files on the local file system (see config/storage.yml for options).
# config.active_storage.service = :local
# Mount Action Cable outside main process or domain.
# config.action_cable.mount_path = nil
# config.action_cable.url = 'wss://example.com/cable'
# config.action_cable.allowed_request_origins = [ 'http://example.com', /http:\/\/example.*/ ]
# Force all access to the app over SSL, use Strict-Transport-Security, and use secure cookies.
# config.force_ssl = true
# Use the lowest log level to ensure availability of diagnostic information
# when problems arise.
config.log_level = :debug
# Prepend all log lines with the following tags.
config.log_tags = [ :request_id ]
# Use a different cache store in production.
# config.cache_store = :mem_cache_store
# Use a real queuing backend for Active Job (and separate queues per environment).
# config.active_job.queue_adapter = :resque
# config.active_job.queue_name_prefix = "larp_library_production"
config.action_mailer.perform_caching = false
# Ignore bad email addresses and do not raise email delivery errors.
# Set this to true and configure the email server for immediate delivery to raise delivery errors.
# config.action_mailer.raise_delivery_errors = false
# Enable locale fallbacks for I18n (makes lookups for any locale fall back to
# the I18n.default_locale when a translation cannot be found).
config.i18n.fallbacks = true
# Send deprecation notices to registered listeners.
config.active_support.deprecation = :notify
# Use default logging formatter so that PID and timestamp are not suppressed.
config.log_formatter = ::Logger::Formatter.new
# Use a different logger for distributed setups.
# require 'syslog/logger'
# config.logger = ActiveSupport::TaggedLogging.new(Syslog::Logger.new 'app-name')
if ENV["RAILS_LOG_TO_STDOUT"].present?
logger = ActiveSupport::Logger.new(STDOUT)
logger.formatter = config.log_formatter
config.logger = ActiveSupport::TaggedLogging.new(logger)
end
config.action_mailer.default_url_options = { host: 'www.larplibrary.org', protocol: 'https' }
# Do not dump schema after migrations.
config.active_record.dump_schema_after_migration = false
config.action_mailer.default_url_options = { host: 'www.larplibrary.org', protocol: 'https' }
config.action_mailer.delivery_method = :aws_sdk
# Inserts middleware to perform automatic connection switching.
# The `database_selector` hash is used to pass options to the DatabaseSelector
# middleware. The `delay` is used to determine how long to wait after a write
# to send a subsequent read to the primary.
#
# The `database_resolver` class is used by the middleware to determine which
# database is appropriate to use based on the time delay.
#
# The `database_resolver_context` class is used by the middleware to set
# timestamps for the last write to the primary. The resolver uses the context
# class timestamps to determine how long to wait before reading from the
# replica.
#
# By default Rails will store a last write timestamp in the session. The
# DatabaseSelector middleware is designed as such you can define your own
# strategy for connection switching and pass that into the middleware through
# these configuration options.
# config.active_record.database_selector = { delay: 2.seconds }
# config.active_record.database_resolver = ActiveRecord::Middleware::DatabaseSelector::Resolver
# config.active_record.database_resolver_context = ActiveRecord::Middleware::DatabaseSelector::Resolver::Session
end
Set CORS and caching headers on static files
Rails.application.configure do
# Settings specified here will take precedence over those in config/application.rb.
# Code is not reloaded between requests.
config.cache_classes = true
# Eager load code on boot. This eager loads most of Rails and
# your application in memory, allowing both threaded web servers
# and those relying on copy on write to perform better.
# Rake tasks automatically ignore this option for performance.
config.eager_load = true
# Full error reports are disabled and caching is turned on.
config.consider_all_requests_local = false
config.action_controller.perform_caching = true
# Ensures that a master key has been made available in either ENV["RAILS_MASTER_KEY"]
# or in config/master.key. This key is used to decrypt credentials (and other encrypted files).
# config.require_master_key = true
# Disable serving static files from the `/public` folder by default since
# Apache or NGINX already handles this.
config.public_file_server.enabled = ENV['RAILS_SERVE_STATIC_FILES'].present?
config.public_file_server.headers = {
'Access-Control-Allow-Origin' => '*',
'Cache-Control' => 'public, max-age=15552000',
'Expires' => "#{1.year.from_now.to_formatted_s(:rfc822)}"
}
# Compress CSS using a preprocessor.
# config.assets.css_compressor = :sass
# Do not fallback to assets pipeline if a precompiled asset is missed.
config.assets.compile = false
# Enable serving of images, stylesheets, and JavaScripts from an asset server.
asset_hostname = ENV['ASSETS_HOST'] || config.action_mailer.default_url_options[:host]
config.action_controller.asset_host = "//#{asset_hostname}"
# Specifies the header that your server uses for sending files.
# config.action_dispatch.x_sendfile_header = 'X-Sendfile' # for Apache
# config.action_dispatch.x_sendfile_header = 'X-Accel-Redirect' # for NGINX
# Store uploaded files on the local file system (see config/storage.yml for options).
# config.active_storage.service = :local
# Mount Action Cable outside main process or domain.
# config.action_cable.mount_path = nil
# config.action_cable.url = 'wss://example.com/cable'
# config.action_cable.allowed_request_origins = [ 'http://example.com', /http:\/\/example.*/ ]
# Force all access to the app over SSL, use Strict-Transport-Security, and use secure cookies.
# config.force_ssl = true
# Use the lowest log level to ensure availability of diagnostic information
# when problems arise.
config.log_level = :debug
# Prepend all log lines with the following tags.
config.log_tags = [ :request_id ]
# Use a different cache store in production.
# config.cache_store = :mem_cache_store
# Use a real queuing backend for Active Job (and separate queues per environment).
# config.active_job.queue_adapter = :resque
# config.active_job.queue_name_prefix = "larp_library_production"
config.action_mailer.perform_caching = false
# Ignore bad email addresses and do not raise email delivery errors.
# Set this to true and configure the email server for immediate delivery to raise delivery errors.
# config.action_mailer.raise_delivery_errors = false
# Enable locale fallbacks for I18n (makes lookups for any locale fall back to
# the I18n.default_locale when a translation cannot be found).
config.i18n.fallbacks = true
# Send deprecation notices to registered listeners.
config.active_support.deprecation = :notify
# Use default logging formatter so that PID and timestamp are not suppressed.
config.log_formatter = ::Logger::Formatter.new
# Use a different logger for distributed setups.
# require 'syslog/logger'
# config.logger = ActiveSupport::TaggedLogging.new(Syslog::Logger.new 'app-name')
if ENV["RAILS_LOG_TO_STDOUT"].present?
logger = ActiveSupport::Logger.new(STDOUT)
logger.formatter = config.log_formatter
config.logger = ActiveSupport::TaggedLogging.new(logger)
end
config.action_mailer.default_url_options = { host: 'www.larplibrary.org', protocol: 'https' }
# Do not dump schema after migrations.
config.active_record.dump_schema_after_migration = false
config.action_mailer.default_url_options = { host: 'www.larplibrary.org', protocol: 'https' }
config.action_mailer.delivery_method = :aws_sdk
# Inserts middleware to perform automatic connection switching.
# The `database_selector` hash is used to pass options to the DatabaseSelector
# middleware. The `delay` is used to determine how long to wait after a write
# to send a subsequent read to the primary.
#
# The `database_resolver` class is used by the middleware to determine which
# database is appropriate to use based on the time delay.
#
# The `database_resolver_context` class is used by the middleware to set
# timestamps for the last write to the primary. The resolver uses the context
# class timestamps to determine how long to wait before reading from the
# replica.
#
# By default Rails will store a last write timestamp in the session. The
# DatabaseSelector middleware is designed as such you can define your own
# strategy for connection switching and pass that into the middleware through
# these configuration options.
# config.active_record.database_selector = { delay: 2.seconds }
# config.active_record.database_resolver = ActiveRecord::Middleware::DatabaseSelector::Resolver
# config.active_record.database_resolver_context = ActiveRecord::Middleware::DatabaseSelector::Resolver::Session
end
|
require 'spec_helper.rb'
describe SearchResultSet do
let(:response_attributes) do
{
'q' => 'bla',
'remote_images' => [],
'local_images' => [],
'templates' => []
}
end
describe '.create_from_response' do
subject { SearchResultSet }
let(:fake_json_response) { response_attributes.to_json }
it 'instatiates itself with the parsed json attribes' do
result = subject.create_from_response(fake_json_response)
expect(result).to be_a SearchResultSet
expect(result.query).to eql 'bla'
expect(result.remote_images).to eql []
expect(result.local_images).to eql []
end
it 'does not blow up if remote images is not defined' do
without_remote_images = response_attributes.except(:remote_images).to_json
expect {
subject.create_from_response(without_remote_images)
}.to_not raise_error NoMethodError
end
end
describe '#as_json' do
subject { SearchResultSet.new(response_attributes) }
it 'provides the attributes to be converted to JSON' do
expected = {
'query' => 'bla',
'remote_images' => [],
'local_images' => [],
'templates' => []
}
expect(subject.as_json).to eq expected
end
end
end
Fix deprecation warning.
require 'spec_helper.rb'
describe SearchResultSet do
let(:response_attributes) do
{
'q' => 'bla',
'remote_images' => [],
'local_images' => [],
'templates' => []
}
end
describe '.create_from_response' do
subject { SearchResultSet }
let(:fake_json_response) { response_attributes.to_json }
it 'instatiates itself with the parsed json attribes' do
result = subject.create_from_response(fake_json_response)
expect(result).to be_a SearchResultSet
expect(result.query).to eql 'bla'
expect(result.remote_images).to eql []
expect(result.local_images).to eql []
end
it 'does not blow up if remote images is not defined' do
without_remote_images = response_attributes.except(:remote_images).to_json
expect {
subject.create_from_response(without_remote_images)
}.to_not raise_error
end
end
describe '#as_json' do
subject { SearchResultSet.new(response_attributes) }
it 'provides the attributes to be converted to JSON' do
expected = {
'query' => 'bla',
'remote_images' => [],
'local_images' => [],
'templates' => []
}
expect(subject.as_json).to eq expected
end
end
end
|
added config.ru
require 'rubygems'
require 'sinatra'
set :env, :production
disable :run
require 'hpshelper'
run Sinatra::Application
|
Rails.application.configure do
# Settings specified here will take precedence over those in config/application.rb.
# Code is not reloaded between requests.
config.cache_classes = true
# Eager load code on boot. This eager loads most of Rails and
# your application in memory, allowing both threaded web servers
# and those relying on copy on write to perform better.
# Rake tasks automatically ignore this option for performance.
config.eager_load = true
# Full error reports are disabled and caching is turned on.
config.consider_all_requests_local = false
config.action_controller.perform_caching = true
# Attempt to read encrypted secrets from `config/secrets.yml.enc`.
# Requires an encryption key in `ENV["RAILS_MASTER_KEY"]` or
# `config/secrets.yml.key`.
config.read_encrypted_secrets = true
# Disable serving static files from the `/public` folder by default since
# Apache or NGINX already handles this.
config.public_file_server.enabled = ENV['RAILS_SERVE_STATIC_FILES'].present?
# Compress JavaScripts and CSS.
config.assets.js_compressor = :uglifier
# config.assets.css_compressor = :sass
# Do not fallback to assets pipeline if a precompiled asset is missed.
config.assets.compile = false
# `config.assets.precompile` and `config.assets.version` have moved to config/initializers/assets.rb
# Enable serving of images, stylesheets, and JavaScripts from an asset server.
# config.action_controller.asset_host = 'http://assets.example.com'
# Specifies the header that your server uses for sending files.
# config.action_dispatch.x_sendfile_header = 'X-Sendfile' # for Apache
# config.action_dispatch.x_sendfile_header = 'X-Accel-Redirect' # for NGINX
# Mount Action Cable outside main process or domain
# config.action_cable.mount_path = nil
# config.action_cable.url = 'wss://example.com/cable'
# config.action_cable.allowed_request_origins = [ 'http://example.com', /http:\/\/example.*/ ]
# Force all access to the app over SSL, use Strict-Transport-Security, and use secure cookies.
# config.force_ssl = true
# Use the lowest log level to ensure availability of diagnostic information
# when problems arise.
config.log_level = :debug
# Prepend all log lines with the following tags.
config.log_tags = [ :request_id ]
# Use a different cache store in production.
# config.cache_store = :mem_cache_store
# Use a real queuing backend for Active Job (and separate queues per environment)
# config.active_job.queue_adapter = :resque
# config.active_job.queue_name_prefix = "beatsbreaksandcrates_#{Rails.env}"
config.action_mailer.perform_caching = false
# Ignore bad email addresses and do not raise email delivery errors.
# Set this to true and configure the email server for immediate delivery to raise delivery errors.
# config.action_mailer.raise_delivery_errors = false
# Enable locale fallbacks for I18n (makes lookups for any locale fall back to
# the I18n.default_locale when a translation cannot be found).
config.i18n.fallbacks = true
# Send deprecation notices to registered listeners.
config.active_support.deprecation = :notify
# Use default logging formatter so that PID and timestamp are not suppressed.
config.log_formatter = ::Logger::Formatter.new
# Use a different logger for distributed setups.
# require 'syslog/logger'
# config.logger = ActiveSupport::TaggedLogging.new(Syslog::Logger.new 'app-name')
if ENV["RAILS_LOG_TO_STDOUT"].present?
logger = ActiveSupport::Logger.new(STDOUT)
logger.formatter = config.log_formatter
config.logger = ActiveSupport::TaggedLogging.new(logger)
end
# Do not dump schema after migrations.
config.active_record.dump_schema_after_migration = false
end
compile assests
Rails.application.configure do
# Settings specified here will take precedence over those in config/application.rb.
# Code is not reloaded between requests.
config.cache_classes = true
# Eager load code on boot. This eager loads most of Rails and
# your application in memory, allowing both threaded web servers
# and those relying on copy on write to perform better.
# Rake tasks automatically ignore this option for performance.
config.eager_load = true
# Full error reports are disabled and caching is turned on.
config.consider_all_requests_local = false
config.action_controller.perform_caching = true
# Attempt to read encrypted secrets from `config/secrets.yml.enc`.
# Requires an encryption key in `ENV["RAILS_MASTER_KEY"]` or
# `config/secrets.yml.key`.
config.read_encrypted_secrets = true
# Disable serving static files from the `/public` folder by default since
# Apache or NGINX already handles this.
config.public_file_server.enabled = ENV['RAILS_SERVE_STATIC_FILES'].present?
# Compress JavaScripts and CSS.
config.assets.js_compressor = :uglifier
# config.assets.css_compressor = :sass
# Do not fallback to assets pipeline if a precompiled asset is missed.
config.assets.compile = true
# `config.assets.precompile` and `config.assets.version` have moved to config/initializers/assets.rb
# Enable serving of images, stylesheets, and JavaScripts from an asset server.
# config.action_controller.asset_host = 'http://assets.example.com'
# Specifies the header that your server uses for sending files.
# config.action_dispatch.x_sendfile_header = 'X-Sendfile' # for Apache
# config.action_dispatch.x_sendfile_header = 'X-Accel-Redirect' # for NGINX
# Mount Action Cable outside main process or domain
# config.action_cable.mount_path = nil
# config.action_cable.url = 'wss://example.com/cable'
# config.action_cable.allowed_request_origins = [ 'http://example.com', /http:\/\/example.*/ ]
# Force all access to the app over SSL, use Strict-Transport-Security, and use secure cookies.
# config.force_ssl = true
# Use the lowest log level to ensure availability of diagnostic information
# when problems arise.
config.log_level = :debug
# Prepend all log lines with the following tags.
config.log_tags = [ :request_id ]
# Use a different cache store in production.
# config.cache_store = :mem_cache_store
# Use a real queuing backend for Active Job (and separate queues per environment)
# config.active_job.queue_adapter = :resque
# config.active_job.queue_name_prefix = "beatsbreaksandcrates_#{Rails.env}"
config.action_mailer.perform_caching = false
# Ignore bad email addresses and do not raise email delivery errors.
# Set this to true and configure the email server for immediate delivery to raise delivery errors.
# config.action_mailer.raise_delivery_errors = false
# Enable locale fallbacks for I18n (makes lookups for any locale fall back to
# the I18n.default_locale when a translation cannot be found).
config.i18n.fallbacks = true
# Send deprecation notices to registered listeners.
config.active_support.deprecation = :notify
# Use default logging formatter so that PID and timestamp are not suppressed.
config.log_formatter = ::Logger::Formatter.new
# Use a different logger for distributed setups.
# require 'syslog/logger'
# config.logger = ActiveSupport::TaggedLogging.new(Syslog::Logger.new 'app-name')
if ENV["RAILS_LOG_TO_STDOUT"].present?
logger = ActiveSupport::Logger.new(STDOUT)
logger.formatter = config.log_formatter
config.logger = ActiveSupport::TaggedLogging.new(logger)
end
# Do not dump schema after migrations.
config.active_record.dump_schema_after_migration = false
end
|
# Copyright (c) 2010, Diaspora Inc. This file is
# licensed under the Affero General Public License version 3 or later. See
# the COPYRIGHT file.
require 'spec_helper'
describe User do
before do
@alices_aspect = alice.aspects.where(:name => "generic").first
@eves_aspect = eve.aspects.where(:name => "generic").first
@bobs_aspect = bob.aspects.where(:name => "generic").first
end
describe "#visible_posts" do
it "contains your public posts" do
public_post = alice.post(:status_message, :text => "hi", :to => @alices_aspect.id, :public => true)
alice.visible_posts.should include(public_post)
end
it "contains your non-public posts" do
private_post = alice.post(:status_message, :text => "hi", :to => @alices_aspect.id, :public => false)
alice.visible_posts.should include(private_post)
end
it "contains public posts from people you're following" do
dogs = bob.aspects.create(:name => "dogs")
bobs_public_post = bob.post(:status_message, :text => "hello", :public => true, :to => dogs.id)
alice.visible_posts.should include(bobs_public_post)
end
it "contains non-public posts from people who are following you" do
bobs_post = bob.post(:status_message, :text => "hello", :to => @bobs_aspect.id)
alice.visible_posts.should include(bobs_post)
end
it "does not contain non-public posts from aspects you're not in" do
dogs = bob.aspects.create(:name => "dogs")
invisible_post = bob.post(:status_message, :text => "foobar", :to => dogs.id)
alice.visible_posts.should_not include(invisible_post)
end
it "does not contain duplicate posts" do
bobs_other_aspect = bob.aspects.create(:name => "cat people")
bob.add_contact_to_aspect(bob.contact_for(alice.person), bobs_other_aspect)
bob.aspects_with_person(alice.person).should =~ [@bobs_aspect, bobs_other_aspect]
bobs_post = bob.post(:status_message, :text => "hai to all my people", :to => [@bobs_aspect.id, bobs_other_aspect.id])
alice.visible_posts.length.should == 1
alice.visible_posts.should include(bobs_post)
end
context 'with many posts' do
before do
bob.move_contact(eve.person, @bobs_aspect, bob.aspects.create(:name => 'new aspect'))
time_interval = 1000
(1..25).each do |n|
[alice, bob, eve].each do |u|
aspect_to_post = u.aspects.where(:name => "generic").first
post = u.post :status_message, :text => "#{u.username} - #{n}", :to => aspect_to_post.id
post.created_at = post.created_at - time_interval
post.updated_at = post.updated_at - time_interval
post.save
time_interval += 1000
end
end
end
it 'works' do #This is in one spec to save time
bob.visible_posts.length.should == 15 #it returns 15 by default
bob.visible_posts.should == bob.visible_posts(:by_members_of => bob.aspects.map { |a| a.id }) # it is the same when joining through aspects
bob.visible_posts.sort_by { |p| p.updated_at }.map { |p| p.id }.should == bob.visible_posts.map { |p| p.id }.reverse #it is sorted updated_at desc by default
opts = {:limit => 40}
bob.visible_posts(opts).length.should == 40 #it takes a limit
bob.visible_posts(opts).should == bob.visible_posts(opts.merge(:by_members_of => bob.aspects.map { |a| a.id }))
bob.visible_posts(opts).sort_by { |p| p.updated_at }.map { |p| p.id }.should == bob.visible_posts(opts).map { |p| p.id }.reverse
last_time_of_last_page = bob.visible_posts.last.updated_at
opts = {:max_time => last_time_of_last_page}
bob.visible_posts(opts).length.should == 15
bob.visible_posts(opts).map { |p| p.id }.should == bob.visible_posts(opts.merge(:by_members_of => bob.aspects.map { |a| a.id })).map { |p| p.id }
bob.visible_posts(opts).sort_by { |p| p.updated_at }.map { |p| p.id }.should == bob.visible_posts(opts).map { |p| p.id }.reverse
bob.visible_posts(opts).map { |p| p.id }.should == bob.visible_posts(:limit => 40)[15...30].map { |p| p.id } #pagination should return the right posts
opts = {:max_time => last_time_of_last_page.to_i}
bob.visible_posts(opts).length.should == 15
bob.visible_posts(opts).map { |p| p.id }.should == bob.visible_posts(opts.merge(:by_members_of => bob.aspects.map { |a| a.id })).map { |p| p.id }
bob.visible_posts(opts).sort_by { |p| p.updated_at }.map { |p| p.id }.should == bob.visible_posts(opts).map { |p| p.id }.reverse
bob.visible_posts(opts).map { |p| p.id }.should == bob.visible_posts(:limit => 40)[15...30].map { |p| p.id } #pagination should return the right posts
end
end
context 'with two posts' do
before do
connect_users(eve, @eves_aspect, alice, @alices_aspect)
aspect3 = alice.aspects.create(:name => "Snoozers")
@status_message1 = eve.post :status_message, :text => "hi", :to => @eves_aspect.id
@status_message2 = eve.post :status_message, :text => "hey", :public => true, :to => @eves_aspect.id
@status_message3 = alice.post :status_message, :text => "hey", :public => true, :to => @alices_aspect.id
@status_message4 = eve.post :status_message, :text => "blah", :public => true, :to => @eves_aspect.id
@status_message5 = alice.post :status_message, :text => "secrets", :to => aspect3.id
@pending_status_message = eve.post :status_message, :text => "hey", :public => true, :to => @eves_aspect.id, :pending => true
end
it "queries by person id" do
query = eve.visible_posts.where(:author_id => eve.person.id)
query.include?(@status_message1).should == true
query.include?(@status_message2).should == true
query.include?(@status_message3).should == false
query.include?(@status_message4).should == true
query.include?(@status_message5).should == false
end
it "selects public posts" do
query = eve.visible_posts.where(:public => true)
query.include?(@status_message1).should == false
query.include?(@status_message2).should == true
query.include?(@status_message3).should == true
query.include?(@status_message4).should == true
query.include?(@status_message5).should == false
end
it "selects non public posts" do
query = eve.visible_posts.where(:public => false)
query.include?(@status_message1).should == true
query.include?(@status_message2).should == false
query.include?(@status_message3).should == false
query.include?(@status_message4).should == false
query.include?(@status_message5).should == false
end
it "selects by message contents" do
query = eve.visible_posts.where(:text=> "hi")
query.should == [@status_message1]
end
it "does not return pending posts" do
@pending_status_message.pending.should be_true
eve.visible_posts.should_not include @pending_status_message
end
it 'is not emptied by a load of pending photos' do
15.times {
eve.build_post(:photo, :pending => true, :user_file=> File.open(photo_fixture_name), :to => eve.aspect_ids, :updated_at => Time.now + 1.day).save!
}
query = eve.visible_posts
query.map { |p| p.id }.should =~ [@status_message1, @status_message2, @status_message3, @status_message4].map { |p| p.id }
end
it 'is not emptied by a load of photos' do
15.times {
eve.build_post(:photo, :pending => false, :user_file=> File.open(photo_fixture_name), :to => eve.aspect_ids, :updated_at => Time.now + 1.day).save!
}
query = eve.visible_posts(:type => 'StatusMessage')
query.map { |p| p.id }.should =~ [@status_message1, @status_message2, @status_message3, @status_message4].map { |p| p.id }
end
it '#find_visible_post_by_id' do
eve.find_visible_post_by_id(@status_message1.id).should == @status_message1
eve.find_visible_post_by_id(@status_message5.id).should == nil
end
end
end
context 'with two users' do
describe '#people_in_aspects' do
it 'returns people objects for a users contact in each aspect' do
alice.people_in_aspects([@alices_aspect]).should == [bob.person]
end
it 'returns local/remote people objects for a users contact in each aspect' do
local_user1 = Factory(:user)
local_user2 = Factory(:user)
remote_user = Factory(:user)
asp1 = local_user1.aspects.create(:name => "lol")
asp2 = local_user2.aspects.create(:name => "brb")
asp3 = remote_user.aspects.create(:name => "ttyl")
connect_users(alice, @alices_aspect, local_user1, asp1)
connect_users(alice, @alices_aspect, local_user2, asp2)
connect_users(alice, @alices_aspect, remote_user, asp3)
local_person = remote_user.person
local_person.owner_id = nil
local_person.save
local_person.reload
alice.people_in_aspects([@alices_aspect]).count.should == 4
alice.people_in_aspects([@alices_aspect], :type => 'remote').count.should == 1
alice.people_in_aspects([@alices_aspect], :type => 'local').count.should == 3
end
it 'does not return people not connected to user on same pod' do
3.times { Factory(:user) }
alice.people_in_aspects([@alices_aspect]).count.should == 1
end
it "only returns non-pending contacts" do
alice.people_in_aspects([@alices_aspect]).should == [bob.person]
end
it "returns an empty array when passed an aspect the user doesn't own" do
alice.people_in_aspects([@eves_aspect]).should == []
end
end
end
context 'contact querying' do
let(:person_one) { Factory.create :person }
let(:person_two) { Factory.create :person }
let(:person_three) { Factory.create :person }
let(:aspect) { alice.aspects.create(:name => 'heroes') }
describe '#contact_for_person_id' do
it 'returns a contact' do
contact = Contact.create(:user => alice, :person => person_one, :aspects => [aspect])
alice.contacts << contact
alice.contact_for_person_id(person_one.id).should be_true
end
it 'returns the correct contact' do
contact = Contact.create(:user => alice, :person => person_one, :aspects => [aspect])
alice.contacts << contact
contact2 = Contact.create(:user => alice, :person => person_two, :aspects => [aspect])
alice.contacts << contact2
contact3 = Contact.create(:user => alice, :person => person_three, :aspects => [aspect])
alice.contacts << contact3
alice.contact_for_person_id(person_two.id).person.should == person_two
end
it 'returns nil for a non-contact' do
alice.contact_for_person_id(person_one.id).should be_nil
end
it 'returns nil when someone else has contact with the target' do
contact = Contact.create(:user => alice, :person => person_one, :aspects => [aspect])
alice.contacts << contact
eve.contact_for_person_id(person_one.id).should be_nil
end
end
describe '#contact_for' do
it 'takes a person_id and returns a contact' do
alice.should_receive(:contact_for_person_id).with(person_one.id)
alice.contact_for(person_one)
end
it 'returns nil if the input is nil' do
alice.contact_for(nil).should be_nil
end
end
describe '#aspects_with_person' do
before do
@connected_person = bob.person
end
it 'should return the aspects with given contact' do
alice.aspects_with_person(@connected_person).should == [@alices_aspect]
end
it 'returns multiple aspects if the person is there' do
aspect2 = alice.aspects.create(:name => 'second')
contact = alice.contact_for(@connected_person)
alice.add_contact_to_aspect(contact, aspect2)
alice.aspects_with_person(@connected_person).to_set.should == alice.aspects.to_set
end
end
end
describe '#posts_from' do
before do
@user3 = Factory(:user)
@aspect3 = @user3.aspects.create(:name => "bros")
@public_message = @user3.post(:status_message, :text => "hey there", :to => 'all', :public => true)
@private_message = @user3.post(:status_message, :text => "hey there", :to => @aspect3.id)
end
it 'displays public posts for a non-contact' do
alice.posts_from(@user3.person).should include @public_message
end
it 'does not display private posts for a non-contact' do
alice.posts_from(@user3.person).should_not include @private_message
end
it 'displays private and public posts for a non-contact after connecting' do
connect_users(alice, @alices_aspect, @user3, @aspect3)
new_message = @user3.post(:status_message, :text=> "hey there", :to => @aspect3.id)
alice.reload
alice.posts_from(@user3.person).should include @public_message
alice.posts_from(@user3.person).should include new_message
end
it 'displays recent posts first' do
msg3 = @user3.post(:status_message, :text => "hey there", :to => 'all', :public => true)
msg4 = @user3.post(:status_message, :text => "hey there", :to => 'all', :public => true)
msg3.created_at = Time.now+10
msg3.save!
msg4.created_at = Time.now+14
msg4.save!
alice.posts_from(@user3.person).map { |p| p.id }.should == [msg4, msg3, @public_message].map { |p| p.id }
end
end
end
More querying_spec refactoring. #visible_posts now has sufficient test coverage to aggressively refactor.
# Copyright (c) 2010, Diaspora Inc. This file is
# licensed under the Affero General Public License version 3 or later. See
# the COPYRIGHT file.
require 'spec_helper'
describe User do
before do
@alices_aspect = alice.aspects.where(:name => "generic").first
@eves_aspect = eve.aspects.where(:name => "generic").first
@bobs_aspect = bob.aspects.where(:name => "generic").first
end
describe "#visible_posts" do
it "contains your public posts" do
public_post = alice.post(:status_message, :text => "hi", :to => @alices_aspect.id, :public => true)
alice.visible_posts.should include(public_post)
end
it "contains your non-public posts" do
private_post = alice.post(:status_message, :text => "hi", :to => @alices_aspect.id, :public => false)
alice.visible_posts.should include(private_post)
end
it "contains public posts from people you're following" do
dogs = bob.aspects.create(:name => "dogs")
bobs_public_post = bob.post(:status_message, :text => "hello", :public => true, :to => dogs.id)
alice.visible_posts.should include(bobs_public_post)
end
it "contains non-public posts from people who are following you" do
bobs_post = bob.post(:status_message, :text => "hello", :to => @bobs_aspect.id)
alice.visible_posts.should include(bobs_post)
end
it "does not contain non-public posts from aspects you're not in" do
dogs = bob.aspects.create(:name => "dogs")
invisible_post = bob.post(:status_message, :text => "foobar", :to => dogs.id)
alice.visible_posts.should_not include(invisible_post)
end
it "does not contain pending posts" do
pending_post = bob.post(:status_message, :text => "hey", :public => true, :to => @bobs_aspect.id, :pending => true)
pending_post.should be_pending
alice.visible_posts.should_not include pending_post
end
it "does not contain pending photos" do
pending_photo = bob.post(:photo, :pending => true, :user_file=> File.open(photo_fixture_name), :to => @bobs_aspect)
alice.visible_posts.should_not include pending_photo
end
it "respects the :type option" do
photo = bob.post(:photo, :pending => false, :user_file=> File.open(photo_fixture_name), :to => @bobs_aspect)
alice.visible_posts.should include(photo)
alice.visible_posts(:type => 'StatusMessage').should_not include(photo)
end
it "does not contain duplicate posts" do
bobs_other_aspect = bob.aspects.create(:name => "cat people")
bob.add_contact_to_aspect(bob.contact_for(alice.person), bobs_other_aspect)
bob.aspects_with_person(alice.person).should =~ [@bobs_aspect, bobs_other_aspect]
bobs_post = bob.post(:status_message, :text => "hai to all my people", :to => [@bobs_aspect.id, bobs_other_aspect.id])
alice.visible_posts.length.should == 1
alice.visible_posts.should include(bobs_post)
end
context 'with many posts' do
before do
bob.move_contact(eve.person, @bobs_aspect, bob.aspects.create(:name => 'new aspect'))
time_interval = 1000
(1..25).each do |n|
[alice, bob, eve].each do |u|
aspect_to_post = u.aspects.where(:name => "generic").first
post = u.post :status_message, :text => "#{u.username} - #{n}", :to => aspect_to_post.id
post.created_at = post.created_at - time_interval
post.updated_at = post.updated_at - time_interval
post.save
time_interval += 1000
end
end
end
it 'works' do # The set up takes a looong time, so to save time we do several tests in one
bob.visible_posts.length.should == 15 #it returns 15 by default
bob.visible_posts.should == bob.visible_posts(:by_members_of => bob.aspects.map { |a| a.id }) # it is the same when joining through aspects
bob.visible_posts.sort_by { |p| p.updated_at }.map { |p| p.id }.should == bob.visible_posts.map { |p| p.id }.reverse #it is sorted updated_at desc by default
# It should respect the limit option
opts = {:limit => 40}
bob.visible_posts(opts).length.should == 40
bob.visible_posts(opts).should == bob.visible_posts(opts.merge(:by_members_of => bob.aspects.map { |a| a.id }))
bob.visible_posts(opts).sort_by { |p| p.updated_at }.map { |p| p.id }.should == bob.visible_posts(opts).map { |p| p.id }.reverse
# It should paginate using a datetime timestamp
last_time_of_last_page = bob.visible_posts.last.updated_at
opts = {:max_time => last_time_of_last_page}
bob.visible_posts(opts).length.should == 15
bob.visible_posts(opts).map { |p| p.id }.should == bob.visible_posts(opts.merge(:by_members_of => bob.aspects.map { |a| a.id })).map { |p| p.id }
bob.visible_posts(opts).sort_by { |p| p.updated_at }.map { |p| p.id }.should == bob.visible_posts(opts).map { |p| p.id }.reverse
bob.visible_posts(opts).map { |p| p.id }.should == bob.visible_posts(:limit => 40)[15...30].map { |p| p.id } #pagination should return the right posts
# It should paginate using an integer timestamp
opts = {:max_time => last_time_of_last_page.to_i}
bob.visible_posts(opts).length.should == 15
bob.visible_posts(opts).map { |p| p.id }.should == bob.visible_posts(opts.merge(:by_members_of => bob.aspects.map { |a| a.id })).map { |p| p.id }
bob.visible_posts(opts).sort_by { |p| p.updated_at }.map { |p| p.id }.should == bob.visible_posts(opts).map { |p| p.id }.reverse
bob.visible_posts(opts).map { |p| p.id }.should == bob.visible_posts(:limit => 40)[15...30].map { |p| p.id } #pagination should return the right posts
end
end
end
describe '#find_visible_post_by_id' do
it "returns a post if you can see it" do
bobs_post = bob.post(:status_message, :text => "hi", :to => @bobs_aspect.id, :public => false)
alice.find_visible_post_by_id(bobs_post.id).should == bobs_post
end
it "returns nil if you can't see that post" do
dogs = bob.aspects.create(:name => "dogs")
invisible_post = bob.post(:status_message, :text => "foobar", :to => dogs.id)
alice.find_visible_post_by_id(invisible_post.id).should be_nil
end
end
context 'with two users' do
describe '#people_in_aspects' do
it 'returns people objects for a users contact in each aspect' do
alice.people_in_aspects([@alices_aspect]).should == [bob.person]
end
it 'returns local/remote people objects for a users contact in each aspect' do
local_user1 = Factory(:user)
local_user2 = Factory(:user)
remote_user = Factory(:user)
asp1 = local_user1.aspects.create(:name => "lol")
asp2 = local_user2.aspects.create(:name => "brb")
asp3 = remote_user.aspects.create(:name => "ttyl")
connect_users(alice, @alices_aspect, local_user1, asp1)
connect_users(alice, @alices_aspect, local_user2, asp2)
connect_users(alice, @alices_aspect, remote_user, asp3)
local_person = remote_user.person
local_person.owner_id = nil
local_person.save
local_person.reload
alice.people_in_aspects([@alices_aspect]).count.should == 4
alice.people_in_aspects([@alices_aspect], :type => 'remote').count.should == 1
alice.people_in_aspects([@alices_aspect], :type => 'local').count.should == 3
end
it 'does not return people not connected to user on same pod' do
3.times { Factory(:user) }
alice.people_in_aspects([@alices_aspect]).count.should == 1
end
it "only returns non-pending contacts" do
alice.people_in_aspects([@alices_aspect]).should == [bob.person]
end
it "returns an empty array when passed an aspect the user doesn't own" do
alice.people_in_aspects([@eves_aspect]).should == []
end
end
end
context 'contact querying' do
let(:person_one) { Factory.create :person }
let(:person_two) { Factory.create :person }
let(:person_three) { Factory.create :person }
let(:aspect) { alice.aspects.create(:name => 'heroes') }
describe '#contact_for_person_id' do
it 'returns a contact' do
contact = Contact.create(:user => alice, :person => person_one, :aspects => [aspect])
alice.contacts << contact
alice.contact_for_person_id(person_one.id).should be_true
end
it 'returns the correct contact' do
contact = Contact.create(:user => alice, :person => person_one, :aspects => [aspect])
alice.contacts << contact
contact2 = Contact.create(:user => alice, :person => person_two, :aspects => [aspect])
alice.contacts << contact2
contact3 = Contact.create(:user => alice, :person => person_three, :aspects => [aspect])
alice.contacts << contact3
alice.contact_for_person_id(person_two.id).person.should == person_two
end
it 'returns nil for a non-contact' do
alice.contact_for_person_id(person_one.id).should be_nil
end
it 'returns nil when someone else has contact with the target' do
contact = Contact.create(:user => alice, :person => person_one, :aspects => [aspect])
alice.contacts << contact
eve.contact_for_person_id(person_one.id).should be_nil
end
end
describe '#contact_for' do
it 'takes a person_id and returns a contact' do
alice.should_receive(:contact_for_person_id).with(person_one.id)
alice.contact_for(person_one)
end
it 'returns nil if the input is nil' do
alice.contact_for(nil).should be_nil
end
end
describe '#aspects_with_person' do
before do
@connected_person = bob.person
end
it 'should return the aspects with given contact' do
alice.aspects_with_person(@connected_person).should == [@alices_aspect]
end
it 'returns multiple aspects if the person is there' do
aspect2 = alice.aspects.create(:name => 'second')
contact = alice.contact_for(@connected_person)
alice.add_contact_to_aspect(contact, aspect2)
alice.aspects_with_person(@connected_person).to_set.should == alice.aspects.to_set
end
end
end
describe '#posts_from' do
before do
@user3 = Factory(:user)
@aspect3 = @user3.aspects.create(:name => "bros")
@public_message = @user3.post(:status_message, :text => "hey there", :to => 'all', :public => true)
@private_message = @user3.post(:status_message, :text => "hey there", :to => @aspect3.id)
end
it 'displays public posts for a non-contact' do
alice.posts_from(@user3.person).should include @public_message
end
it 'does not display private posts for a non-contact' do
alice.posts_from(@user3.person).should_not include @private_message
end
it 'displays private and public posts for a non-contact after connecting' do
connect_users(alice, @alices_aspect, @user3, @aspect3)
new_message = @user3.post(:status_message, :text=> "hey there", :to => @aspect3.id)
alice.reload
alice.posts_from(@user3.person).should include @public_message
alice.posts_from(@user3.person).should include new_message
end
it 'displays recent posts first' do
msg3 = @user3.post(:status_message, :text => "hey there", :to => 'all', :public => true)
msg4 = @user3.post(:status_message, :text => "hey there", :to => 'all', :public => true)
msg3.created_at = Time.now+10
msg3.save!
msg4.created_at = Time.now+14
msg4.save!
alice.posts_from(@user3.person).map { |p| p.id }.should == [msg4, msg3, @public_message].map { |p| p.id }
end
end
end
|
$:.unshift File.join(File.dirname(__FILE__), *%w[lib])
require 'rubygems'
require 'pagoda/app'
use Rack::ShowExceptions
Shwedagon::App.set :blog, '../blog'
run Shwedagon::App.new
Change config.ru to reflect example blog (to be used along with shotgun)
$:.unshift File.join(File.dirname(__FILE__), *%w[lib])
require 'rubygems'
require 'pagoda/app'
use Rack::ShowExceptions
Shwedagon::App.set :blog, '../example-jekyll/'
run Shwedagon::App.new
|
Railskating::Application.configure do
# Settings specified here will take precedence over those in config/application.rb.
# Code is not reloaded between requests.
config.cache_classes = true
# Eager load code on boot. This eager loads most of Rails and
# your application in memory, allowing both thread web servers
# and those relying on copy on write to perform better.
# Rake tasks automatically ignore this option for performance.
config.eager_load = true
# Full error reports are disabled and caching is turned on.
config.consider_all_requests_local = false
config.action_controller.perform_caching = true
# Enable Rack::Cache to put a simple HTTP cache in front of your application
# Add `rack-cache` to your Gemfile before enabling this.
# For large-scale production use, consider using a caching reverse proxy like nginx, varnish or squid.
# config.action_dispatch.rack_cache = true
# Disable Rails's static asset server (Apache or nginx will already do this).
config.serve_static_assets = false
# Compress JavaScripts and CSS.
config.assets.js_compressor = :uglifier
# config.assets.css_compressor = :sass
# Do not fallback to assets pipeline if a precompiled asset is missed.
config.assets.compile = false
# Generate digests for assets URLs.
config.assets.digest = true
# Version of your assets, change this if you want to expire all your assets.
config.assets.version = '1.0'
# Specifies the header that your server uses for sending files.
# config.action_dispatch.x_sendfile_header = "X-Sendfile" # for apache
# config.action_dispatch.x_sendfile_header = 'X-Accel-Redirect' # for nginx
# Force all access to the app over SSL, use Strict-Transport-Security, and use secure cookies.
# config.force_ssl = true
# Set to :debug to see everything in the log.
config.log_level = :info
# Prepend all log lines with the following tags.
# config.log_tags = [ :subdomain, :uuid ]
# Use a different logger for distributed setups.
# config.logger = ActiveSupport::TaggedLogging.new(SyslogLogger.new)
# Use a different cache store in production.
# config.cache_store = :mem_cache_store
# Enable serving of images, stylesheets, and JavaScripts from an asset server.
# config.action_controller.asset_host = "http://assets.example.com"
# Precompile additional assets.
# application.js, application.css, and all non-JS/CSS in app/assets folder are already added.
# config.assets.precompile += %w( search.js )
# Ignore bad email addresses and do not raise email delivery errors.
# Set this to true and configure the email server for immediate delivery to raise delivery errors.
# config.action_mailer.raise_delivery_errors = false
# Enable locale fallbacks for I18n (makes lookups for any locale fall back to
# the I18n.default_locale when a translation can not be found).
config.i18n.fallbacks = true
# Send deprecation notices to registered listeners.
config.active_support.deprecation = :notify
# Disable automatic flushing of the log to improve performance.
# config.autoflush_log = false
# Use default logging formatter so that PID and timestamp are not suppressed.
config.log_formatter = ::Logger::Formatter.new
end
Change production config for Heroku static assets
For some reason, serving static assets on Heroku broke randomly.
According to http://stackoverflow.com/a/17082720/111910 these changes
might fix that.
Railskating::Application.configure do
# Settings specified here will take precedence over those in config/application.rb.
# Code is not reloaded between requests.
config.cache_classes = true
# Eager load code on boot. This eager loads most of Rails and
# your application in memory, allowing both thread web servers
# and those relying on copy on write to perform better.
# Rake tasks automatically ignore this option for performance.
config.eager_load = true
# Full error reports are disabled and caching is turned on.
config.consider_all_requests_local = false
config.action_controller.perform_caching = true
# Enable Rack::Cache to put a simple HTTP cache in front of your application
# Add `rack-cache` to your Gemfile before enabling this.
# For large-scale production use, consider using a caching reverse proxy like nginx, varnish or squid.
# config.action_dispatch.rack_cache = true
# Disable Rails's static asset server (Apache or nginx will already do this).
config.serve_static_assets = true
# Compress JavaScripts and CSS.
config.assets.js_compressor = :uglifier
config.assets.css_compressor = :sass
# Do not fallback to assets pipeline if a precompiled asset is missed.
config.assets.compile = true
# Generate digests for assets URLs.
config.assets.digest = true
# Version of your assets, change this if you want to expire all your assets.
config.assets.version = '1.0'
# Specifies the header that your server uses for sending files.
# config.action_dispatch.x_sendfile_header = "X-Sendfile" # for apache
# config.action_dispatch.x_sendfile_header = 'X-Accel-Redirect' # for nginx
# Force all access to the app over SSL, use Strict-Transport-Security, and use secure cookies.
# config.force_ssl = true
# Set to :debug to see everything in the log.
config.log_level = :info
# Prepend all log lines with the following tags.
# config.log_tags = [ :subdomain, :uuid ]
# Use a different logger for distributed setups.
# config.logger = ActiveSupport::TaggedLogging.new(SyslogLogger.new)
# Use a different cache store in production.
# config.cache_store = :mem_cache_store
# Enable serving of images, stylesheets, and JavaScripts from an asset server.
# config.action_controller.asset_host = "http://assets.example.com"
# Precompile additional assets.
# application.js, application.css, and all non-JS/CSS in app/assets folder are already added.
# config.assets.precompile += %w( search.js )
# Ignore bad email addresses and do not raise email delivery errors.
# Set this to true and configure the email server for immediate delivery to raise delivery errors.
# config.action_mailer.raise_delivery_errors = false
# Enable locale fallbacks for I18n (makes lookups for any locale fall back to
# the I18n.default_locale when a translation can not be found).
config.i18n.fallbacks = true
# Send deprecation notices to registered listeners.
config.active_support.deprecation = :notify
# Disable automatic flushing of the log to improve performance.
# config.autoflush_log = false
# Use default logging formatter so that PID and timestamp are not suppressed.
config.log_formatter = ::Logger::Formatter.new
end
|
require 'spec_helper'
describe NationBuilder::Client do
let(:client) do
NationBuilder::Client.new('dh',
'6ed3ab9395daf0e3e39098761e85e8e703aa84921c5c1e32637f6984944cf1f2')
end
describe '#endpoints' do
it 'should contain all defined endpoints' do
client.endpoints.sort.should eq([
:basic_pages,
:blog_posts,
:blogs,
:calendars,
:campaign_data,
:contact_types,
:contacts,
:donations,
:events,
:exports,
:imports,
:lists,
:memberships,
:page_attachments,
:people,
:people_tags,
:sites,
:survey_responses,
:surveys,
:webhooks
])
end
end
describe '#base_url' do
it 'should contain the nation slug' do
client.base_url.should eq('https://dh.nationbuilder.com')
end
end
describe '#call' do
it 'should handle a parametered GET' do
VCR.use_cassette('parametered_get') do
response = client.call(:basic_pages, :index, site_slug: 'dh')
response['results'].each do |result|
result['site_slug'].should eq('dh')
end
end
end
it 'should handle a parametered POST' do
params = {
person: {
email: "bob@example.com",
last_name: "Smith",
first_name: "Bob"
}
}
response = VCR.use_cassette('parametered_post') do
client.call(:people, :create, params)
end
response['person']['first_name'].should eq('Bob')
end
it 'should handle a DELETE' do
params = {
id: 24,
}
response = VCR.use_cassette('delete') do
client.call(:people, :destroy, params)
end
response.should eq({})
end
end
end
Update spec for pagination
require 'spec_helper'
describe NationBuilder::Client do
let(:client) do
NationBuilder::Client.new('organizeralexandreschmitt',
'07c3200badad9727d29fd2bcde566a211b05e05b92680ca0439504084452db9a')
end
describe '#endpoints' do
it 'should contain all defined endpoints' do
client.endpoints.sort.should eq([
:basic_pages,
:blog_posts,
:blogs,
:calendars,
:campaign_data,
:contact_types,
:contacts,
:donations,
:events,
:exports,
:imports,
:lists,
:memberships,
:page_attachments,
:people,
:people_tags,
:sites,
:survey_responses,
:surveys,
:webhooks
])
end
end
describe '#base_url' do
it 'should contain the nation slug' do
client.base_url.should eq('https://organizeralexandreschmitt.nationbuilder.com')
end
end
describe '#call' do
it 'should handle a parametered GET' do
VCR.use_cassette('parametered_get') do
response = client.call(:basic_pages, :index, site_slug: 'organizeralexandreschmitt')
response['results'].each do |result|
result['site_slug'].should eq('organizeralexandreschmitt')
end
end
end
it 'should handle a parametered POST' do
params = {
person: {
email: "bob@example.com",
last_name: "Smith",
first_name: "Bob"
}
}
response = VCR.use_cassette('parametered_post') do
client.call(:people, :create, params)
end
response['person']['first_name'].should eq('Bob')
end
it 'should handle a DELETE' do
params = {
id: 275446,
}
response = VCR.use_cassette('delete') do
client.call(:people, :destroy, params)
end
response.should eq({})
end
end
describe '#pagination' do
before do
VCR.use_cassette('parametered_get') do
client.call(:basic_pages, :index, site_slug: 'organizeralexandreschmitt')
end
end
it 'should check for next and prev page link' do
client.next?.should eq(true)
client.prev?.should eq(false)
end
it 'should return next page' do
response = VCR.use_cassette('next') { client.next }
response.should include('results')
end
it 'should return nil if no prev page' do
client.prev.should be_nil
end
end
end
|
#encoding: UTF-8
require "rubygems"
require "tweetstream"
require "em-http-request"
require "httparty"
require "simple_oauth"
require "json"
require "uri"
require "redis"
require "time"
# require the file with the API keys
require "./oauth-keys"
# config oauth
ACCOUNT_ID = OAUTH[:token].split("-").first.to_i
TweetStream.configure do |config|
config.consumer_key = OAUTH[:consumer_key]
config.consumer_secret = OAUTH[:consumer_secret]
config.oauth_token = OAUTH[:token]
config.oauth_token_secret = OAUTH[:token_secret]
config.auth_method = :oauth
end
# redis setup
$redis = Redis.new(REDIS)
$redis.set("count", 0)
#def fetch_cotations
# response = HTTParty.get(BIT_AVERAGE_URL)
# $COTATIONS[:data] = JSON.parse response.body
# $COTATIONS[:timestamp] = Time.now
#end
# constants
BIT_AVERAGE_URL = "https://api.bitcoinaverage.com/ticker/global/all" #bitcoinaverage.com API endpoint
# variables
twurl = URI.parse("https://api.twitter.com/1.1/statuses/update.json")
bit_regex = /\d+(\.|,)?(\d+)?/ # any money amount | accepts both . or , as separator
currency_regex = /#[A-Z]{3}/ # "#" followed by 3 capital letters
# user stream connection
@client = TweetStream::Client.new
puts "[STARTING] rack..."
run lambda { |env| [200, {'Content-Type'=>'text/plain'}, StringIO.new("#{$redis.get("count")} conversions so far")] }
Thread.new do
puts "[STARTING] bot..."
@client.userstream() do |status|
puts "[NEW TWEET] #{status.text}"
retweet = status.retweet? #checks if it's a convertion retweet
reply_to_me = status.in_reply_to_user_id == ACCOUNT_ID # checks if tweet mentions the bot
contains_currency = status.text =~ currency_regex # checks if tweet has a hashtag with a currency code
contains_amount = status.text =~ bit_regex # checks if tweets contains a number in it
puts retweet
puts reply_to_me
puts contains_currency
puts contains_amount
# check if stream is not a retweet and is valid
if !retweet && reply_to_me && contains_amount && contains_currency
puts "[PROCESSING] #{status.text}"
bit_amount = status.text[bit_regex].gsub(',', '.').to_f # grabs the amount on the tweet
currency = status.text[currency_regex][1..-1] # takes the "#" out
p bit_amount
p currency
# bloquing cotation update
operation = proc {
def cotations_updated?
if $redis.exists("timestamp") == true
(Time.now - Time.parse($redis.get("timestamp"))) < 10
else
return false
end
end
if cotations_updated?
puts "COTATIONS ARE UPDATED. No need no fetch them"
else
puts "Will fetch new cotations"
response = HTTParty.get(BIT_AVERAGE_URL)
$redis.set("data", response.body)
$redis.set("timestamp", Time.now)
# puts "Cotations fetched: #{$redis.get("data")}"
puts "Cotations fetched!"
end
def final_amount(amount, currency)
puts "Will compute final_amount"
cotations = JSON.parse($redis.get("data"))
if cotations[currency]
cotations[currency]["last"] * amount
else
-1
end
end
result = final_amount(bit_amount, currency)
result = result.round(2)
puts "Should return #{result}"
result
}
callback = proc { |this_amount|
cotations = JSON.parse($redis.get("data"))
if cotations[currency]
reply = "#{bit_amount} bitcoins in #{currency} is #{this_amount}"
else
reply = "Currency #{currency} not found :("
end
#create the reply tweet
puts reply
tweet = {
"status" => "@#{status.user.screen_name} " + reply,
"in_reply_to_status_id" => status.id.to_s
}
puts tweet
authorization = SimpleOAuth::Header.new(:post, twurl.to_s, tweet, OAUTH)
http = EventMachine::HttpRequest.new(twurl.to_s).post({
:head => {"Authorization" => authorization},
:body => tweet
})
http.errback {
puts "[ERROR] errback"
}
http.callback {
$redis.set("count", $redis.get("count").to_i + 1)
puts "[count] = #{$redis.get("count")}"
if http.response_header.status.to_i == 200
puts "[HTTP_OK] #{http.response_header.status}"
else
puts "[HTTP_ERROR] #{http.response_header.status}"
end
}
}
EventMachine.defer(operation, callback)
end
end
end
fix: joining the threads at the end
#encoding: UTF-8
require "rubygems"
require "tweetstream"
require "em-http-request"
require "httparty"
require "simple_oauth"
require "json"
require "uri"
require "redis"
require "time"
# require the file with the API keys
require "./oauth-keys"
# config oauth
ACCOUNT_ID = OAUTH[:token].split("-").first.to_i
TweetStream.configure do |config|
config.consumer_key = OAUTH[:consumer_key]
config.consumer_secret = OAUTH[:consumer_secret]
config.oauth_token = OAUTH[:token]
config.oauth_token_secret = OAUTH[:token_secret]
config.auth_method = :oauth
end
# redis setup
$redis = Redis.new(REDIS)
$redis.set("count", 0)
#def fetch_cotations
# response = HTTParty.get(BIT_AVERAGE_URL)
# $COTATIONS[:data] = JSON.parse response.body
# $COTATIONS[:timestamp] = Time.now
#end
# constants
BIT_AVERAGE_URL = "https://api.bitcoinaverage.com/ticker/global/all" #bitcoinaverage.com API endpoint
# variables
twurl = URI.parse("https://api.twitter.com/1.1/statuses/update.json")
bit_regex = /\d+(\.|,)?(\d+)?/ # any money amount | accepts both . or , as separator
currency_regex = /#[A-Z]{3}/ # "#" followed by 3 capital letters
# user stream connection
@client = TweetStream::Client.new
puts "[STARTING] rack..."
run lambda { |env| [200, {'Content-Type'=>'text/plain'}, StringIO.new("#{$redis.get("count")} conversions so far")] }
Thread.new do
puts "[STARTING] bot..."
@client.userstream() do |status|
puts "[NEW TWEET] #{status.text}"
retweet = status.retweet? #checks if it's a convertion retweet
reply_to_me = status.in_reply_to_user_id == ACCOUNT_ID # checks if tweet mentions the bot
contains_currency = status.text =~ currency_regex # checks if tweet has a hashtag with a currency code
contains_amount = status.text =~ bit_regex # checks if tweets contains a number in it
puts retweet
puts reply_to_me
puts contains_currency
puts contains_amount
# check if stream is not a retweet and is valid
if !retweet && reply_to_me && contains_amount && contains_currency
puts "[PROCESSING] #{status.text}"
bit_amount = status.text[bit_regex].gsub(',', '.').to_f # grabs the amount on the tweet
currency = status.text[currency_regex][1..-1] # takes the "#" out
p bit_amount
p currency
# bloquing cotation update
operation = proc {
def cotations_updated?
if $redis.exists("timestamp") == true
(Time.now - Time.parse($redis.get("timestamp"))) < 10
else
return false
end
end
if cotations_updated?
puts "COTATIONS ARE UPDATED. No need no fetch them"
else
puts "Will fetch new cotations"
response = HTTParty.get(BIT_AVERAGE_URL)
$redis.set("data", response.body)
$redis.set("timestamp", Time.now)
# puts "Cotations fetched: #{$redis.get("data")}"
puts "Cotations fetched!"
end
def final_amount(amount, currency)
puts "Will compute final_amount"
cotations = JSON.parse($redis.get("data"))
if cotations[currency]
cotations[currency]["last"] * amount
else
-1
end
end
result = final_amount(bit_amount, currency)
result = result.round(2)
puts "Should return #{result}"
result
}
callback = proc { |this_amount|
cotations = JSON.parse($redis.get("data"))
if cotations[currency]
reply = "#{bit_amount} bitcoins in #{currency} is #{this_amount}"
else
reply = "Currency #{currency} not found :("
end
#create the reply tweet
puts reply
tweet = {
"status" => "@#{status.user.screen_name} " + reply,
"in_reply_to_status_id" => status.id.to_s
}
puts tweet
authorization = SimpleOAuth::Header.new(:post, twurl.to_s, tweet, OAUTH)
http = EventMachine::HttpRequest.new(twurl.to_s).post({
:head => {"Authorization" => authorization},
:body => tweet
})
http.errback {
puts "[ERROR] errback"
}
http.callback {
$redis.set("count", $redis.get("count").to_i + 1)
puts "[count] = #{$redis.get("count")}"
if http.response_header.status.to_i == 200
puts "[HTTP_OK] #{http.response_header.status}"
else
puts "[HTTP_ERROR] #{http.response_header.status}"
end
}
}
EventMachine.defer(operation, callback)
end
end
end.join
|
Rails.application.configure do
# Settings specified here will take precedence over those in config/application.rb.
# Code is not reloaded between requests.
config.cache_classes = true
# Eager load code on boot. This eager loads most of Rails and
# your application in memory, allowing both threaded web servers
# and those relying on copy on write to perform better.
# Rake tasks automatically ignore this option for performance.
config.eager_load = true
# Full error reports are disabled and caching is turned on.
config.consider_all_requests_local = false
config.action_controller.perform_caching = true
# Enable Rack::Cache to put a simple HTTP cache in front of your application
# Add `rack-cache` to your Gemfile before enabling this.
# For large-scale production use, consider using a caching reverse proxy like
# NGINX, varnish or squid.
# config.action_dispatch.rack_cache = true
# Disable serving static files from the `/public` folder by default since
# Apache or NGINX already handles this.
config.serve_static_files = ENV['RAILS_SERVE_STATIC_FILES'].present?
# Compress JavaScripts and CSS.
config.assets.js_compressor = :uglifier
# config.assets.css_compressor = :sass
# Do not fallback to assets pipeline if a precompiled asset is missed.
config.assets.compile = true
# Asset digests allow you to set far-future HTTP expiration dates on all assets,
# yet still be able to expire them through the digest params.
config.assets.digest = true
# `config.assets.precompile` and `config.assets.version` have moved to config/initializers/assets.rb
# Specifies the header that your server uses for sending files.
# config.action_dispatch.x_sendfile_header = 'X-Sendfile' # for Apache
# config.action_dispatch.x_sendfile_header = 'X-Accel-Redirect' # for NGINX
# Force all access to the app over SSL, use Strict-Transport-Security, and use secure cookies.
config.force_ssl = true
# Use the lowest log level to ensure availability of diagnostic information
# when problems arise.
config.log_level = :debug
# Prepend all log lines with the following tags.
# config.log_tags = [ :subdomain, :uuid ]
# Use a different logger for distributed setups.
# config.logger = ActiveSupport::TaggedLogging.new(SyslogLogger.new)
# Use a different cache store in production.
# config.cache_store = :mem_cache_store
# Enable serving of images, stylesheets, and JavaScripts from an asset server.
# config.action_controller.asset_host = 'http://assets.example.com'
# Ignore bad email addresses and do not raise email delivery errors.
# Set this to true and configure the email server for immediate delivery to raise delivery errors.
# config.action_mailer.raise_delivery_errors = false
# Enable locale fallbacks for I18n (makes lookups for any locale fall back to
# the I18n.default_locale when a translation cannot be found).
config.i18n.fallbacks = true
# Send deprecation notices to registered listeners.
config.active_support.deprecation = :notify
# Use default logging formatter so that PID and timestamp are not suppressed.
config.log_formatter = ::Logger::Formatter.new
# Do not dump schema after migrations.
config.active_record.dump_schema_after_migration = false
end
configuring production environment
Rails.application.configure do
# Settings specified here will take precedence over those in config/application.rb.
# Code is not reloaded between requests.
config.cache_classes = true
# Eager load code on boot. This eager loads most of Rails and
# your application in memory, allowing both threaded web servers
# and those relying on copy on write to perform better.
# Rake tasks automatically ignore this option for performance.
config.eager_load = true
# Full error reports are disabled and caching is turned on.
config.consider_all_requests_local = false
config.action_controller.perform_caching = true
# Enable Rack::Cache to put a simple HTTP cache in front of your application
# Add `rack-cache` to your Gemfile before enabling this.
# For large-scale production use, consider using a caching reverse proxy like
# NGINX, varnish or squid.
# config.action_dispatch.rack_cache = true
# Disable serving static files from the `/public` folder by default since
# Apache or NGINX already handles this.
config.serve_static_files = ENV['RAILS_SERVE_STATIC_FILES'].present?
# Compress JavaScripts and CSS.
config.assets.js_compressor = :uglifier
# config.assets.css_compressor = :sass
# Do not fallback to assets pipeline if a precompiled asset is missed.
config.assets.compile = true
config.serve_static_assets = true
# Asset digests allow you to set far-future HTTP expiration dates on all assets,
# yet still be able to expire them through the digest params.
config.assets.digest = true
# `config.assets.precompile` and `config.assets.version` have moved to config/initializers/assets.rb
# Specifies the header that your server uses for sending files.
# config.action_dispatch.x_sendfile_header = 'X-Sendfile' # for Apache
# config.action_dispatch.x_sendfile_header = 'X-Accel-Redirect' # for NGINX
# Force all access to the app over SSL, use Strict-Transport-Security, and use secure cookies.
config.force_ssl = true
# Use the lowest log level to ensure availability of diagnostic information
# when problems arise.
config.log_level = :debug
# Prepend all log lines with the following tags.
# config.log_tags = [ :subdomain, :uuid ]
# Use a different logger for distributed setups.
# config.logger = ActiveSupport::TaggedLogging.new(SyslogLogger.new)
# Use a different cache store in production.
# config.cache_store = :mem_cache_store
# Enable serving of images, stylesheets, and JavaScripts from an asset server.
# config.action_controller.asset_host = 'http://assets.example.com'
# Ignore bad email addresses and do not raise email delivery errors.
# Set this to true and configure the email server for immediate delivery to raise delivery errors.
# config.action_mailer.raise_delivery_errors = false
# Enable locale fallbacks for I18n (makes lookups for any locale fall back to
# the I18n.default_locale when a translation cannot be found).
config.i18n.fallbacks = true
# Send deprecation notices to registered listeners.
config.active_support.deprecation = :notify
# Use default logging formatter so that PID and timestamp are not suppressed.
config.log_formatter = ::Logger::Formatter.new
# Do not dump schema after migrations.
config.active_record.dump_schema_after_migration = false
end
|
require 'object_table'
require 'object_table/grouped'
describe ObjectTable::Grouped do
let(:col1) { ((1..100).to_a + (-100..-1).to_a).shuffle }
let(:col2) { NArray.float(10, 200).random }
let(:table){ ObjectTable.new(col1: col1, col2: col2 ) }
let(:grouped){ ObjectTable::Grouped.new(table){ {pos: col1 > 0} } }
let(:positive) { (table.col1 > 0).where }
let(:negative) { (table.col1 < 0).where }
let(:pos_group) { table.where{|t| positive} }
let(:neg_group) { table.where{|t| negative} }
describe '._generate_name' do
let(:prefix){ 'key_' }
subject{ ObjectTable::Grouped._generate_name(prefix, existing_keys) }
context 'with no matching keys' do
let(:existing_keys){ ['a', 'b', 'c'] }
it 'should suffix the key with 0' do
expect(subject).to eql "key_0"
end
end
context 'with matching keys' do
let(:existing_keys){ ['key_1', 'key_67', 'key_8', 'abcd'] }
it 'should suffix the key with the next available number' do
expect(subject).to eql "key_68"
end
end
end
describe '#initialize' do
context 'when the block takes an argument' do
it 'should not evaluate in the context of the table' do
rspec_context = self
grouped = ObjectTable::Grouped.new(table) do |tbl|
receiver = eval('self', binding)
expect(receiver).to_not be table
expect(receiver).to be rspec_context
{}
end
grouped._groups # call _groups to make it call the block
end
it 'should pass the table into the block' do
grouped = ObjectTable::Grouped.new(table) do |tbl|
expect(tbl).to be table
{}
end
grouped._groups # call _groups to make it call the block
end
end
context 'when the block takes no arguments' do
it 'should call the block in the context of the table' do
_ = self
grouped = ObjectTable::Grouped.new(table) do
receiver = eval('self', binding)
_.expect(receiver).to _.be _.table
{}
end
grouped._groups # call _groups to make it call the block
end
end
end
context 'with changes to the parent' do
subject{ grouped }
it 'should mirror changes to the parent' do
expect(subject._groups[1]).to eql ({[1] => positive.to_a, [0] => negative.to_a})
table[:col1] = NArray.int(200).fill(2)
table[:col1][0] = -100
expect(subject._groups[1]).to eql ({[1] => (1...200).to_a, [0] => [0]})
end
end
describe '#_groups' do
subject{ grouped._groups }
it 'should return the names' do
expect(subject[0]).to eql [:pos]
end
it 'should return the group key => row mapping' do
groups = subject[1]
expect(groups[[0]]).to eql negative.to_a
expect(groups[[1]]).to eql positive.to_a
end
context 'when grouping by columns' do
let(:table){ ObjectTable.new(key1: [0]*4 + [1]*4, key2: [0, 0, 1, 1]*2, data: 1..8 ) }
let(:grouped){ ObjectTable::Grouped.new(table, :key1, :key2) }
it 'should use the columns as group names' do
expect(subject[0]).to eql [:key1, :key2]
end
it 'should use the columns as groups' do
groups = subject[1]
expect(groups[[0, 0]]).to eql (table.key1.eq(0) & table.key2.eq(0)).where.to_a
expect(groups[[0, 1]]).to eql (table.key1.eq(0) & table.key2.eq(1)).where.to_a
expect(groups[[1, 0]]).to eql (table.key1.eq(1) & table.key2.eq(0)).where.to_a
expect(groups[[1, 1]]).to eql (table.key1.eq(1) & table.key2.eq(1)).where.to_a
end
end
end
describe '#each' do
context 'when the block takes an argument' do
it 'should not evaluate in the context of the group' do
rspec_context = self
grouped.each do |group|
receiver = eval('self', binding)
expect(receiver).to_not be_a ObjectTable::Group
expect(receiver).to be rspec_context
end
end
end
context 'when the block takes no arguments' do
it 'should call the block in the context of the group' do
_ = self
grouped.each do
receiver = eval('self', binding)
_.expect(receiver).to _.be_a ObjectTable::Group
end
end
end
it 'should yield the groups' do
groups = [pos_group, neg_group]
grouped.each do |group|
expect(groups).to include group
groups -= [group]
end
end
it 'should give access to the keys' do
keys = []
grouped.each{ keys << Hash[@K.each_pair.to_a] }
expect(keys).to match_array [{pos: 0}, {pos: 1}]
end
it 'should give access to the correct key' do
keys = []
correct_keys = []
grouped.each do
keys << [@K.pos]
correct_keys << (col1 > 0).to_a.uniq
end
expect(keys).to match_array(correct_keys)
end
context 'with no block' do
it 'should return an enumerator' do
expect(grouped.each).to be_a Enumerator
end
it 'should enumerate the groups' do
groups = [pos_group, neg_group]
grouped.each.each do |group|
expect(groups).to include group
groups -= [group]
end
end
end
end
describe '#apply' do
subject{ grouped.apply{|group| group.col2.sum} }
it 'should return a table with the group keys' do
expect(subject).to be_a ObjectTable
expect(subject.colnames).to include :pos
end
it 'should concatenate the results of the block' do
value = [neg_group.col2.sum, pos_group.col2.sum]
expect(subject.sort_by(subject.pos)).to eql ObjectTable.new(pos: [0, 1], v_0: value)
end
describe 'value column auto naming' do
it 'should auto name the value column' do
grouped = ObjectTable::Grouped.new(table){{parity: 1}}
result = grouped.apply{|group| group.col1.sum}
expect(result).to have_column :v_0
expect(result.v_0.to_a).to eql [table.col1.sum]
end
it 'should auto name the value column' do
grouped = ObjectTable::Grouped.new(table){{v_0: 1}}
result = grouped.apply{|group| group.col1.sum}
expect(result).to have_column :v_1
expect(result.v_1.to_a).to eql [table.col1.sum]
end
end
context 'with results that are grids' do
subject{ grouped.apply{ @R[sum: col1.sum, mean: col2.mean] } }
it 'should return a table with the group keys' do
expect(subject).to be_a ObjectTable
expect(subject.colnames).to include :pos
end
it 'should stack the grids' do
expect(subject.sort_by(subject.pos)).to eql ObjectTable.new(
pos: [0, 1],
sum: [neg_group.col1.sum, pos_group.col1.sum],
mean: [neg_group.col2.mean, pos_group.col2.mean],
)
end
end
context 'with results that are tables' do
subject{ grouped.apply{ ObjectTable.new(sum: col1.sum, mean: col2.mean) } }
it 'should return a table with the group keys' do
expect(subject).to be_a ObjectTable
expect(subject.colnames).to include :pos
end
it 'should stack the grids' do
expect(subject.sort_by(subject.pos)).to eql ObjectTable.new(
pos: [0, 1],
sum: [neg_group.col1.sum, pos_group.col1.sum],
mean: [neg_group.col2.mean, pos_group.col2.mean],
)
end
end
context 'with results that are arrays' do
subject{ grouped.apply{ [col1[0], col1[-1]] } }
it 'should return a table with the group keys' do
expect(subject).to be_a ObjectTable
expect(subject.colnames).to include :pos
end
it 'should stack the grids' do
expect(subject.where{pos.eq 0}.v_0).to eq neg_group.col1[[0, -1]]
expect(subject.where{pos.eq 1}.v_0).to eq pos_group.col1[[0, -1]]
end
end
context 'with results that are narrays' do
subject{ grouped.apply{ col2 < 0.5 } }
it 'should return a table with the group keys' do
expect(subject).to be_a ObjectTable
expect(subject.colnames).to include :pos
end
it 'should stack the grids' do
expect(subject.where{pos.eq 0}.v_0).to eq (neg_group.col2 < 0.5)
expect(subject.where{pos.eq 1}.v_0).to eq (pos_group.col2 < 0.5)
end
end
context 'when the block takes an argument' do
it 'should not evaluate in the context of the group' do
rspec_context = self
grouped.apply do |group|
receiver = eval('self', binding)
expect(receiver).to_not be_a ObjectTable::Group
expect(receiver).to be rspec_context
nil
end
end
end
context 'when the block takes no arguments' do
it 'should call the block in the context of the group' do
_ = self
grouped.apply do
receiver = eval('self', binding)
_.expect(receiver).to _.be_a ObjectTable::Group
nil
end
end
end
context 'with a matrix key' do
let(:ngroups) { 10 }
let(:table) do
ObjectTable.new(
key1: 10.times.map{[rand, 'abc']} * ngroups,
key2: 10.times.map{[rand, 'def', 'ghi']} * ngroups,
value: (ngroups*10).times.map{rand},
)
end
let(:grouped) { ObjectTable::Grouped.new(table, :key1, :key2) }
subject{ grouped.apply{|group| group.value.sum} }
it 'should return a table with the group keys' do
expect(subject).to be_a ObjectTable
expect(subject.colnames).to include :key1
expect(subject.colnames).to include :key2
end
it 'should preserve the dimensions of the keys' do
expect(subject.key1.shape[0...-1]).to eql table.key1.shape[0...-1]
expect(subject.key2.shape[0...-1]).to eql table.key2.shape[0...-1]
end
context 'with vector values' do
subject{ grouped.apply{|group| group.value[0...10]} }
it 'should work' do
expect{subject}.to_not raise_error
end
end
end
context 'on an empty table' do
let(:table) { ObjectTable.new(col1: [], col2: []) }
it 'should return a table with no rows and only key columns' do
expect(subject.nrows).to eql 0
expect(subject.columns.keys).to eql [:pos]
end
end
end
describe '#reduce' do
let(:col2) { (NArray.float(10, 200).random * 100).to_i }
subject{ grouped.reduce{|row| row.R[:col2] += row.col2.sum } }
it 'should return a table with the group keys' do
expect(subject).to be_a ObjectTable
expect(subject.colnames).to include :pos
end
it 'should concatenate the results of the block' do
value = [neg_group.col2.sum, pos_group.col2.sum]
expect(subject.sort_by(subject.pos)).to eql ObjectTable.new(pos: [0, 1], col2: value)
end
context 'with results that are narrays' do
subject{ grouped.reduce{|row| row.R[:col2] += row.col2 } }
it 'should return a table with the group keys' do
expect(subject).to be_a ObjectTable
expect(subject.colnames).to include :pos
end
it 'should stack the grids' do
expect(subject.where{pos.eq 0}.col2).to eq neg_group.col2.sum(1).reshape(10, 1)
expect(subject.where{pos.eq 1}.col2).to eq pos_group.col2.sum(1).reshape(10, 1)
end
end
context 'with results that are arrays' do
subject{ grouped.reduce(col2: []){ @R[:col2] += col2.to_a } }
it 'should return a table with the group keys' do
expect(subject).to be_a ObjectTable
expect(subject.colnames).to include :pos
end
it 'should stack the grids' do
expect(subject.where{pos.eq 0}.col2).to eq neg_group.col2.reshape(1000, 1)
expect(subject.where{pos.eq 1}.col2).to eq pos_group.col2.reshape(1000, 1)
end
end
context 'when the block takes an argument' do
it 'should not evaluate in the context of the group' do
rspec_context = self
grouped.reduce do |group|
receiver = eval('self', binding)
expect(receiver).to_not be_a ObjectTable::Group
expect(receiver).to be rspec_context
nil
end
end
end
context 'when the block takes no arguments' do
it 'should call the block in the context of the row' do
_ = self
grouped.reduce do
receiver = eval('self', binding)
_.expect(receiver).to _.be_a Struct
nil
end
end
end
describe 'defaults' do
it 'should default to 0' do
grouped.reduce do |row|
expect(row.R[:key]).to eql 0
break
end
end
it 'should enforce defaults' do
grouped.reduce(key: 1) do |row|
expect(row.R[:key]).to eql 1
break
end
end
end
context 'with a matrix key' do
let(:group_count) { 10 }
let(:group_size) { 15 }
let(:table) do
ObjectTable.new(
key1: group_count.times.map{[rand, 'abc']} * group_size,
key2: group_count.times.map{[rand, 'def', 'ghi']} * group_size,
value: (group_size * group_count).times.map{rand},
)
end
let(:grouped) { ObjectTable::Grouped.new(table, :key1, :key2) }
subject{ grouped.reduce{@R[:value] += value} }
it 'should return a table with the group keys' do
expect(subject).to be_a ObjectTable
expect(subject.colnames).to include :key1
expect(subject.colnames).to include :key2
end
it 'should preserve the dimensions of the keys' do
expect(subject.key1.shape[0...-1]).to eql table.key1.shape[0...-1]
expect(subject.key2.shape[0...-1]).to eql table.key2.shape[0...-1]
end
context 'with vector values' do
subject{ grouped.reduce(value: []){ @R[:value] += [value] } }
it 'should work' do
expect{subject}.to_not raise_error
expect(subject.nrows).to eql group_count
expect(subject.value.shape).to eq [group_size, group_count]
end
end
end
context 'on an empty table' do
let(:table) { ObjectTable.new(col1: [], col2: []) }
it 'should return a table with no rows and only key columns' do
expect(subject.nrows).to eql 0
expect(subject.columns.keys).to eql [:pos]
end
end
end
end
test that group.reduce fails on bad input
require 'object_table'
require 'object_table/grouped'
describe ObjectTable::Grouped do
let(:col1) { ((1..100).to_a + (-100..-1).to_a).shuffle }
let(:col2) { NArray.float(10, 200).random }
let(:table){ ObjectTable.new(col1: col1, col2: col2 ) }
let(:grouped){ ObjectTable::Grouped.new(table){ {pos: col1 > 0} } }
let(:positive) { (table.col1 > 0).where }
let(:negative) { (table.col1 < 0).where }
let(:pos_group) { table.where{|t| positive} }
let(:neg_group) { table.where{|t| negative} }
describe '._generate_name' do
let(:prefix){ 'key_' }
subject{ ObjectTable::Grouped._generate_name(prefix, existing_keys) }
context 'with no matching keys' do
let(:existing_keys){ ['a', 'b', 'c'] }
it 'should suffix the key with 0' do
expect(subject).to eql "key_0"
end
end
context 'with matching keys' do
let(:existing_keys){ ['key_1', 'key_67', 'key_8', 'abcd'] }
it 'should suffix the key with the next available number' do
expect(subject).to eql "key_68"
end
end
end
describe '#initialize' do
context 'when the block takes an argument' do
it 'should not evaluate in the context of the table' do
rspec_context = self
grouped = ObjectTable::Grouped.new(table) do |tbl|
receiver = eval('self', binding)
expect(receiver).to_not be table
expect(receiver).to be rspec_context
{}
end
grouped._groups # call _groups to make it call the block
end
it 'should pass the table into the block' do
grouped = ObjectTable::Grouped.new(table) do |tbl|
expect(tbl).to be table
{}
end
grouped._groups # call _groups to make it call the block
end
end
context 'when the block takes no arguments' do
it 'should call the block in the context of the table' do
_ = self
grouped = ObjectTable::Grouped.new(table) do
receiver = eval('self', binding)
_.expect(receiver).to _.be _.table
{}
end
grouped._groups # call _groups to make it call the block
end
end
end
context 'with changes to the parent' do
subject{ grouped }
it 'should mirror changes to the parent' do
expect(subject._groups[1]).to eql ({[1] => positive.to_a, [0] => negative.to_a})
table[:col1] = NArray.int(200).fill(2)
table[:col1][0] = -100
expect(subject._groups[1]).to eql ({[1] => (1...200).to_a, [0] => [0]})
end
end
describe '#_groups' do
subject{ grouped._groups }
it 'should return the names' do
expect(subject[0]).to eql [:pos]
end
it 'should return the group key => row mapping' do
groups = subject[1]
expect(groups[[0]]).to eql negative.to_a
expect(groups[[1]]).to eql positive.to_a
end
context 'when grouping by columns' do
let(:table){ ObjectTable.new(key1: [0]*4 + [1]*4, key2: [0, 0, 1, 1]*2, data: 1..8 ) }
let(:grouped){ ObjectTable::Grouped.new(table, :key1, :key2) }
it 'should use the columns as group names' do
expect(subject[0]).to eql [:key1, :key2]
end
it 'should use the columns as groups' do
groups = subject[1]
expect(groups[[0, 0]]).to eql (table.key1.eq(0) & table.key2.eq(0)).where.to_a
expect(groups[[0, 1]]).to eql (table.key1.eq(0) & table.key2.eq(1)).where.to_a
expect(groups[[1, 0]]).to eql (table.key1.eq(1) & table.key2.eq(0)).where.to_a
expect(groups[[1, 1]]).to eql (table.key1.eq(1) & table.key2.eq(1)).where.to_a
end
end
end
describe '#each' do
context 'when the block takes an argument' do
it 'should not evaluate in the context of the group' do
rspec_context = self
grouped.each do |group|
receiver = eval('self', binding)
expect(receiver).to_not be_a ObjectTable::Group
expect(receiver).to be rspec_context
end
end
end
context 'when the block takes no arguments' do
it 'should call the block in the context of the group' do
_ = self
grouped.each do
receiver = eval('self', binding)
_.expect(receiver).to _.be_a ObjectTable::Group
end
end
end
it 'should yield the groups' do
groups = [pos_group, neg_group]
grouped.each do |group|
expect(groups).to include group
groups -= [group]
end
end
it 'should give access to the keys' do
keys = []
grouped.each{ keys << Hash[@K.each_pair.to_a] }
expect(keys).to match_array [{pos: 0}, {pos: 1}]
end
it 'should give access to the correct key' do
keys = []
correct_keys = []
grouped.each do
keys << [@K.pos]
correct_keys << (col1 > 0).to_a.uniq
end
expect(keys).to match_array(correct_keys)
end
context 'with no block' do
it 'should return an enumerator' do
expect(grouped.each).to be_a Enumerator
end
it 'should enumerate the groups' do
groups = [pos_group, neg_group]
grouped.each.each do |group|
expect(groups).to include group
groups -= [group]
end
end
end
end
describe '#apply' do
subject{ grouped.apply{|group| group.col2.sum} }
it 'should return a table with the group keys' do
expect(subject).to be_a ObjectTable
expect(subject.colnames).to include :pos
end
it 'should concatenate the results of the block' do
value = [neg_group.col2.sum, pos_group.col2.sum]
expect(subject.sort_by(subject.pos)).to eql ObjectTable.new(pos: [0, 1], v_0: value)
end
describe 'value column auto naming' do
it 'should auto name the value column' do
grouped = ObjectTable::Grouped.new(table){{parity: 1}}
result = grouped.apply{|group| group.col1.sum}
expect(result).to have_column :v_0
expect(result.v_0.to_a).to eql [table.col1.sum]
end
it 'should auto name the value column' do
grouped = ObjectTable::Grouped.new(table){{v_0: 1}}
result = grouped.apply{|group| group.col1.sum}
expect(result).to have_column :v_1
expect(result.v_1.to_a).to eql [table.col1.sum]
end
end
context 'with results that are grids' do
subject{ grouped.apply{ @R[sum: col1.sum, mean: col2.mean] } }
it 'should return a table with the group keys' do
expect(subject).to be_a ObjectTable
expect(subject.colnames).to include :pos
end
it 'should stack the grids' do
expect(subject.sort_by(subject.pos)).to eql ObjectTable.new(
pos: [0, 1],
sum: [neg_group.col1.sum, pos_group.col1.sum],
mean: [neg_group.col2.mean, pos_group.col2.mean],
)
end
end
context 'with results that are tables' do
subject{ grouped.apply{ ObjectTable.new(sum: col1.sum, mean: col2.mean) } }
it 'should return a table with the group keys' do
expect(subject).to be_a ObjectTable
expect(subject.colnames).to include :pos
end
it 'should stack the grids' do
expect(subject.sort_by(subject.pos)).to eql ObjectTable.new(
pos: [0, 1],
sum: [neg_group.col1.sum, pos_group.col1.sum],
mean: [neg_group.col2.mean, pos_group.col2.mean],
)
end
end
context 'with results that are arrays' do
subject{ grouped.apply{ [col1[0], col1[-1]] } }
it 'should return a table with the group keys' do
expect(subject).to be_a ObjectTable
expect(subject.colnames).to include :pos
end
it 'should stack the grids' do
expect(subject.where{pos.eq 0}.v_0).to eq neg_group.col1[[0, -1]]
expect(subject.where{pos.eq 1}.v_0).to eq pos_group.col1[[0, -1]]
end
end
context 'with results that are narrays' do
subject{ grouped.apply{ col2 < 0.5 } }
it 'should return a table with the group keys' do
expect(subject).to be_a ObjectTable
expect(subject.colnames).to include :pos
end
it 'should stack the grids' do
expect(subject.where{pos.eq 0}.v_0).to eq (neg_group.col2 < 0.5)
expect(subject.where{pos.eq 1}.v_0).to eq (pos_group.col2 < 0.5)
end
end
context 'when the block takes an argument' do
it 'should not evaluate in the context of the group' do
rspec_context = self
grouped.apply do |group|
receiver = eval('self', binding)
expect(receiver).to_not be_a ObjectTable::Group
expect(receiver).to be rspec_context
nil
end
end
end
context 'when the block takes no arguments' do
it 'should call the block in the context of the group' do
_ = self
grouped.apply do
receiver = eval('self', binding)
_.expect(receiver).to _.be_a ObjectTable::Group
nil
end
end
end
context 'with a matrix key' do
let(:ngroups) { 10 }
let(:table) do
ObjectTable.new(
key1: 10.times.map{[rand, 'abc']} * ngroups,
key2: 10.times.map{[rand, 'def', 'ghi']} * ngroups,
value: (ngroups*10).times.map{rand},
)
end
let(:grouped) { ObjectTable::Grouped.new(table, :key1, :key2) }
subject{ grouped.apply{|group| group.value.sum} }
it 'should return a table with the group keys' do
expect(subject).to be_a ObjectTable
expect(subject.colnames).to include :key1
expect(subject.colnames).to include :key2
end
it 'should preserve the dimensions of the keys' do
expect(subject.key1.shape[0...-1]).to eql table.key1.shape[0...-1]
expect(subject.key2.shape[0...-1]).to eql table.key2.shape[0...-1]
end
context 'with vector values' do
subject{ grouped.apply{|group| group.value[0...10]} }
it 'should work' do
expect{subject}.to_not raise_error
end
end
end
context 'on an empty table' do
let(:table) { ObjectTable.new(col1: [], col2: []) }
it 'should return a table with no rows and only key columns' do
expect(subject.nrows).to eql 0
expect(subject.columns.keys).to eql [:pos]
end
end
end
describe '#reduce' do
let(:col2) { (NArray.float(10, 200).random * 100).to_i }
subject{ grouped.reduce{|row| row.R[:col2] += row.col2.sum } }
it 'should return a table with the group keys' do
expect(subject).to be_a ObjectTable
expect(subject.colnames).to include :pos
end
it 'should concatenate the results of the block' do
value = [neg_group.col2.sum, pos_group.col2.sum]
expect(subject.sort_by(subject.pos)).to eql ObjectTable.new(pos: [0, 1], col2: value)
end
context 'with results that are narrays' do
subject{ grouped.reduce{|row| row.R[:col2] += row.col2 } }
it 'should return a table with the group keys' do
expect(subject).to be_a ObjectTable
expect(subject.colnames).to include :pos
end
it 'should stack the grids' do
expect(subject.where{pos.eq 0}.col2).to eq neg_group.col2.sum(1).reshape(10, 1)
expect(subject.where{pos.eq 1}.col2).to eq pos_group.col2.sum(1).reshape(10, 1)
end
end
context 'with results that are arrays' do
subject{ grouped.reduce(col2: []){ @R[:col2] += col2.to_a } }
it 'should return a table with the group keys' do
expect(subject).to be_a ObjectTable
expect(subject.colnames).to include :pos
end
it 'should stack the grids' do
expect(subject.where{pos.eq 0}.col2).to eq neg_group.col2.reshape(1000, 1)
expect(subject.where{pos.eq 1}.col2).to eq pos_group.col2.reshape(1000, 1)
end
end
context 'when the block takes an argument' do
it 'should not evaluate in the context of the group' do
rspec_context = self
grouped.reduce do |group|
receiver = eval('self', binding)
expect(receiver).to_not be_a ObjectTable::Group
expect(receiver).to be rspec_context
nil
end
end
end
context 'when the block takes no arguments' do
it 'should call the block in the context of the row' do
_ = self
grouped.reduce do
receiver = eval('self', binding)
_.expect(receiver).to _.be_a Struct
nil
end
end
end
describe 'defaults' do
it 'should default to 0' do
grouped.reduce do |row|
expect(row.R[:key]).to eql 0
break
end
end
it 'should enforce defaults' do
grouped.reduce(key: 1) do |row|
expect(row.R[:key]).to eql 1
break
end
end
it 'should fail if defaults are badly specified' do
expect{ grouped.reduce(123){} }.to raise_error
end
end
context 'with a matrix key' do
let(:group_count) { 10 }
let(:group_size) { 15 }
let(:table) do
ObjectTable.new(
key1: group_count.times.map{[rand, 'abc']} * group_size,
key2: group_count.times.map{[rand, 'def', 'ghi']} * group_size,
value: (group_size * group_count).times.map{rand},
)
end
let(:grouped) { ObjectTable::Grouped.new(table, :key1, :key2) }
subject{ grouped.reduce{@R[:value] += value} }
it 'should return a table with the group keys' do
expect(subject).to be_a ObjectTable
expect(subject.colnames).to include :key1
expect(subject.colnames).to include :key2
end
it 'should preserve the dimensions of the keys' do
expect(subject.key1.shape[0...-1]).to eql table.key1.shape[0...-1]
expect(subject.key2.shape[0...-1]).to eql table.key2.shape[0...-1]
end
context 'with vector values' do
subject{ grouped.reduce(value: []){ @R[:value] += [value] } }
it 'should work' do
expect{subject}.to_not raise_error
expect(subject.nrows).to eql group_count
expect(subject.value.shape).to eq [group_size, group_count]
end
end
end
context 'on an empty table' do
let(:table) { ObjectTable.new(col1: [], col2: []) }
it 'should return a table with no rows and only key columns' do
expect(subject.nrows).to eql 0
expect(subject.columns.keys).to eql [:pos]
end
end
end
end
|
require './hello'
run Sinatra::Application
Fixing config.ru file
require './main'
run Sinatra::Application |
Loomio::Application.configure do
config.action_dispatch.tld_length = (ENV['TLD_LENGTH'] || 1).to_i
# Code is not reloaded between requests
config.cache_classes = true
# Full error reports are disabled and caching is turned on
config.consider_all_requests_local = false
config.action_controller.perform_caching = true
config.static_cache_control = 'public, max-age=31536000'
# Compress JavaScripts and CSS
config.assets.compress = true
# Don't fallback to assets pipeline if a precompiled asset is missed
config.assets.compile = false
#config.assets.css_compressor = :sass
config.assets.js_compressor = :uglifier
# Generate digests for assets URLs
config.assets.digest = true
config.eager_load = true
config.action_dispatch.x_sendfile_header = nil
# Defaults to Rails.root.join("public/assets")
# config.assets.manifest = YOUR_PATH
# Force all access to the app over SSL, use Strict-Transport-Security, and use secure cookies.
config.force_ssl = true
# See everything in the log (default is :info)
# config.log_level = :debug
# Use a different logger for distributed setups
# config.logger = SyslogLogger.new
# Use a different cache store in production
# config.cache_store = :mem_cache_store
config.cache_store = :dalli_store
# Enable serving of images, stylesheets, and JavaScripts from an asset server
# config.action_controller.asset_host = "http://assets.example.com"
# Precompile additional assets (application.js, application.css, and all non-JS/CSS are already added)
# config.assets.precompile += %w( search.js )
# Disable delivery errors, bad email addresses will be ignored
# config.action_mailer.raise_delivery_errors = false
# Enable threaded mode
# config.threadsafe!
# Send deprecation notices to registered listeners
config.active_support.deprecation = :notify
config.action_mailer.perform_deliveries = true
# Send emails using SMTP service
config.action_mailer.delivery_method = :smtp
config.action_mailer.smtp_settings = {
:address => ENV['SMTP_SERVER'],
:port => ENV['SMTP_PORT'],
:authentication => :plain,
:user_name => ENV['SMTP_USERNAME'],
:password => ENV['SMTP_PASSWORD'],
:domain => ENV['SMTP_DOMAIN']
}
config.action_mailer.raise_delivery_errors = true
# Store avatars on Amazon S3
config.paperclip_defaults = {
:storage => :fog,
:fog_credentials => {
:provider => 'AWS',
:aws_access_key_id => ENV['AWS_ACCESS_KEY_ID'],
:aws_secret_access_key => ENV['AWS_SECRET_ACCESS_KEY']
},
:fog_directory => ENV['AWS_UPLOADS_BUCKET'],
:fog_public => true,
:fog_host => ENV['FOG_HOST']
}
end
enable threadsafe
Loomio::Application.configure do
config.action_dispatch.tld_length = (ENV['TLD_LENGTH'] || 1).to_i
# Code is not reloaded between requests
config.cache_classes = true
# Full error reports are disabled and caching is turned on
config.consider_all_requests_local = false
config.action_controller.perform_caching = true
config.static_cache_control = 'public, max-age=31536000'
# Compress JavaScripts and CSS
config.assets.compress = true
# Don't fallback to assets pipeline if a precompiled asset is missed
config.assets.compile = false
#config.assets.css_compressor = :sass
config.assets.js_compressor = :uglifier
# Generate digests for assets URLs
config.assets.digest = true
config.eager_load = true
config.action_dispatch.x_sendfile_header = nil
# Defaults to Rails.root.join("public/assets")
# config.assets.manifest = YOUR_PATH
# Force all access to the app over SSL, use Strict-Transport-Security, and use secure cookies.
config.force_ssl = true
# See everything in the log (default is :info)
# config.log_level = :debug
# Use a different logger for distributed setups
# config.logger = SyslogLogger.new
# Use a different cache store in production
# config.cache_store = :mem_cache_store
config.cache_store = :dalli_store
# Enable serving of images, stylesheets, and JavaScripts from an asset server
# config.action_controller.asset_host = "http://assets.example.com"
# Precompile additional assets (application.js, application.css, and all non-JS/CSS are already added)
# config.assets.precompile += %w( search.js )
# Disable delivery errors, bad email addresses will be ignored
# config.action_mailer.raise_delivery_errors = false
# Enable threaded mode
config.threadsafe!
# Send deprecation notices to registered listeners
config.active_support.deprecation = :notify
config.action_mailer.perform_deliveries = true
# Send emails using SMTP service
config.action_mailer.delivery_method = :smtp
config.action_mailer.smtp_settings = {
:address => ENV['SMTP_SERVER'],
:port => ENV['SMTP_PORT'],
:authentication => :plain,
:user_name => ENV['SMTP_USERNAME'],
:password => ENV['SMTP_PASSWORD'],
:domain => ENV['SMTP_DOMAIN']
}
config.action_mailer.raise_delivery_errors = true
# Store avatars on Amazon S3
config.paperclip_defaults = {
:storage => :fog,
:fog_credentials => {
:provider => 'AWS',
:aws_access_key_id => ENV['AWS_ACCESS_KEY_ID'],
:aws_secret_access_key => ENV['AWS_SECRET_ACCESS_KEY']
},
:fog_directory => ENV['AWS_UPLOADS_BUCKET'],
:fog_public => true,
:fog_host => ENV['FOG_HOST']
}
end
|
# This file is used by Rack-based servers to start the application.
require ::File.expand_path('../config/environment', __FILE__)
run Gramgoods::Application
adding gzip to config.ru
# This file is used by Rack-based servers to start the application.
require ::File.expand_path('../config/environment', __FILE__)
use Rack::Deflater
run Gramgoods::Application
|
RailsApp::Application.configure do
# Settings specified here will take precedence over those in config/application.rb
# Code is not reloaded between requests
config.cache_classes = true
# Full error reports are disabled and caching is turned on
config.consider_all_requests_local = false
config.action_controller.perform_caching = true
# Disable Rails's static asset server (Apache or nginx will already do this)
config.serve_static_assets = false
# Compress JavaScripts and CSS
config.assets.compress = true
# Don't fallback to assets pipeline if a precompiled asset is missed
config.assets.compile = false
# Generate digests for assets URLs
config.assets.digest = true
# Defaults to nil and saved in location specified by config.assets.prefix
# config.assets.manifest = YOUR_PATH
# Specifies the header that your server uses for sending files
# config.action_dispatch.x_sendfile_header = "X-Sendfile" # for apache
# config.action_dispatch.x_sendfile_header = 'X-Accel-Redirect' # for nginx
# Force all access to the app over SSL, use Strict-Transport-Security, and use secure cookies.
# config.force_ssl = true
# See everything in the log (default is :info)
# config.log_level = :debug
# Prepend all log lines with the following tags
# config.log_tags = [ :subdomain, :uuid ]
# Use a different logger for distributed setups
# config.logger = ActiveSupport::TaggedLogging.new(SyslogLogger.new)
# Use a different cache store in production
# config.cache_store = :mem_cache_store
# Enable serving of images, stylesheets, and JavaScripts from an asset server
# config.action_controller.asset_host = "http://assets.example.com"
# Precompile additional assets (application.js, application.css, and all non-JS/CSS are already added)
# config.assets.precompile += %w( search.js )
# Disable delivery errors, bad email addresses will be ignored
# config.action_mailer.raise_delivery_errors = false
# Enable threaded mode
# config.threadsafe!
# Enable locale fallbacks for I18n (makes lookups for any locale fall back to
# the I18n.default_locale when a translation can not be found)
config.i18n.fallbacks = true
# Send deprecation notices to registered listeners
config.active_support.deprecation = :notify
# Log the query plan for queries taking more than this (works
# with SQLite, MySQL, and PostgreSQL)
# config.active_record.auto_explain_threshold_in_seconds = 0.5
end
Card origin_cartridge_177 - Make rails-example to log into stdout by default
RailsApp::Application.configure do
# Settings specified here will take precedence over those in config/application.rb
# Code is not reloaded between requests
config.cache_classes = true
# Full error reports are disabled and caching is turned on
config.consider_all_requests_local = false
config.action_controller.perform_caching = true
# Disable Rails's static asset server (Apache or nginx will already do this)
config.serve_static_assets = false
# Compress JavaScripts and CSS
config.assets.compress = true
# Don't fallback to assets pipeline if a precompiled asset is missed
config.assets.compile = false
# Generate digests for assets URLs
config.assets.digest = true
# Defaults to nil and saved in location specified by config.assets.prefix
# config.assets.manifest = YOUR_PATH
# Specifies the header that your server uses for sending files
# config.action_dispatch.x_sendfile_header = "X-Sendfile" # for apache
# config.action_dispatch.x_sendfile_header = 'X-Accel-Redirect' # for nginx
# Force all access to the app over SSL, use Strict-Transport-Security, and use secure cookies.
# config.force_ssl = true
# See everything in the log (default is :info)
# config.log_level = :debug
# Prepend all log lines with the following tags
# config.log_tags = [ :subdomain, :uuid ]
# Use a different logger for distributed setups
# config.logger = ActiveSupport::TaggedLogging.new(SyslogLogger.new)
config.logger = Logger.new(STDOUT)
# Use a different cache store in production
# config.cache_store = :mem_cache_store
# Enable serving of images, stylesheets, and JavaScripts from an asset server
# config.action_controller.asset_host = "http://assets.example.com"
# Precompile additional assets (application.js, application.css, and all non-JS/CSS are already added)
# config.assets.precompile += %w( search.js )
# Disable delivery errors, bad email addresses will be ignored
# config.action_mailer.raise_delivery_errors = false
# Enable threaded mode
# config.threadsafe!
# Enable locale fallbacks for I18n (makes lookups for any locale fall back to
# the I18n.default_locale when a translation can not be found)
config.i18n.fallbacks = true
# Send deprecation notices to registered listeners
config.active_support.deprecation = :notify
# Log the query plan for queries taking more than this (works
# with SQLite, MySQL, and PostgreSQL)
# config.active_record.auto_explain_threshold_in_seconds = 0.5
end
|
require "spec_helper"
describe ::Protobuf::Nats::Client do
class ExampleServiceClass; end
let(:service) { ExampleServiceClass }
let(:method) { :created }
let(:options) {
{
:service => service,
:method => method
}
}
subject { described_class.new(options) }
describe "#ack_timeout" do
it "can be set via the PB_NATS_CLIENT_ACK_TIMEOUT environment variable" do
::ENV["PB_NATS_CLIENT_ACK_TIMEOUT"] = "1000"
expect(subject.ack_timeout).to eq(1_000)
::ENV.delete("PB_NATS_CLIENT_ACK_TIMEOUT")
end
it "has a default value" do
expect(subject.ack_timeout).to eq(5)
end
end
describe "#reconnect_delay" do
it "can be set via the PB_NATS_CLIENT_RECONNECT_DELAY environment variable" do
::ENV["PB_NATS_CLIENT_RECONNECT_DELAY"] = "1000"
expect(subject.reconnect_delay).to eq(1_000)
::ENV.delete("PB_NATS_CLIENT_RECONNECT_DELAY")
end
it "defaults to the ack_timeout" do
expect(subject.reconnect_delay).to eq(subject.ack_timeout)
end
end
describe "#response_timeout" do
it "can be set via the PB_NATS_CLIENT_RESPONSE_TIMEOUT environment variable" do
::ENV["PB_NATS_CLIENT_RESPONSE_TIMEOUT"] = "1000"
expect(subject.response_timeout).to eq(1_000)
::ENV.delete("PB_NATS_CLIENT_RESPONSE_TIMEOUT")
end
it "has a default value" do
expect(subject.response_timeout).to eq(60)
end
end
describe "#cached_subscription_key" do
it "caches the instance of a subscription key" do
::Protobuf::Nats::Client.instance_variable_set(:@subscription_key_cache, nil)
id = subject.cached_subscription_key.__id__
expect(subject.cached_subscription_key.__id__).to eq(id)
end
end
describe "#nats_request_with_two_responses" do
let(:client) { ::FakeNatsClient.new(:inbox => inbox) }
let(:inbox) { "INBOX_123" }
let(:msg_subject) { "rpc.yolo.brolo" }
let(:ack) { ::Protobuf::Nats::Messages::ACK }
let(:response) { "final count down" }
before do
allow(::Protobuf::Nats).to receive(:client_nats_connection).and_return(client)
end
it "processes a request and return the final response" do
client.schedule_messages([::FakeNatsClient::Message.new(inbox, ack, 0.05),
::FakeNatsClient::Message.new(inbox, response, 0.1)])
server_response = subject.nats_request_with_two_responses(msg_subject, "request data", {})
expect(server_response).to eq(response)
end
it "raises an error when the ack is not signaled" do
client.schedule_messages([::FakeNatsClient::Message.new(inbox, response, 0.05)])
options = {:ack_timeout => 0.1, :timeout => 0.2}
expect { subject.nats_request_with_two_responses(msg_subject, "request data", options) }.to raise_error(::NATS::IO::Timeout)
end
it "can send messages out of order and still complete" do
client.schedule_messages([::FakeNatsClient::Message.new(inbox, response, 0.05),
::FakeNatsClient::Message.new(inbox, ack, 0.1)])
server_response = subject.nats_request_with_two_responses(msg_subject, "request data", {})
expect(server_response).to eq(response)
end
it "raises an error when the ack is signaled but pb response is not" do
client.schedule_messages([::FakeNatsClient::Message.new(inbox, ack, 0.05)])
options = {:timeout => 0.1}
expect { subject.nats_request_with_two_responses(msg_subject, "request data", options) }.to raise_error(::NATS::IO::Timeout)
end
end
describe "#send_request" do
it "retries 3 times when and raises a NATS timeout" do
expect(subject).to receive(:setup_connection).exactly(3).times
expect(subject).to receive(:nats_request_with_two_responses).and_raise(::NATS::IO::Timeout).exactly(3).times
expect(subject).to receive(:complete).exactly(1).times
expect { subject.send_request }.to raise_error(::NATS::IO::Timeout)
end
it "waits the reconnect_delay duration when the nats connection is reconnecting" do
error = ::Protobuf::Nats::Errors::IOException.new
client = ::FakeNatsClient.new
allow(::Protobuf::Nats).to receive(:client_nats_connection).and_return(client)
allow(client).to receive(:publish).and_raise(error)
allow(subject).to receive(:setup_connection)
expect(subject).to receive(:reconnect_delay).and_return(0.01).exactly(3).times
expect { subject.send_request }.to raise_error(error)
end
end
end
Fix test to not explicitly call complete
require "spec_helper"
describe ::Protobuf::Nats::Client do
class ExampleServiceClass; end
let(:service) { ExampleServiceClass }
let(:method) { :created }
let(:options) {
{
:service => service,
:method => method
}
}
subject { described_class.new(options) }
describe "#ack_timeout" do
it "can be set via the PB_NATS_CLIENT_ACK_TIMEOUT environment variable" do
::ENV["PB_NATS_CLIENT_ACK_TIMEOUT"] = "1000"
expect(subject.ack_timeout).to eq(1_000)
::ENV.delete("PB_NATS_CLIENT_ACK_TIMEOUT")
end
it "has a default value" do
expect(subject.ack_timeout).to eq(5)
end
end
describe "#reconnect_delay" do
it "can be set via the PB_NATS_CLIENT_RECONNECT_DELAY environment variable" do
::ENV["PB_NATS_CLIENT_RECONNECT_DELAY"] = "1000"
expect(subject.reconnect_delay).to eq(1_000)
::ENV.delete("PB_NATS_CLIENT_RECONNECT_DELAY")
end
it "defaults to the ack_timeout" do
expect(subject.reconnect_delay).to eq(subject.ack_timeout)
end
end
describe "#response_timeout" do
it "can be set via the PB_NATS_CLIENT_RESPONSE_TIMEOUT environment variable" do
::ENV["PB_NATS_CLIENT_RESPONSE_TIMEOUT"] = "1000"
expect(subject.response_timeout).to eq(1_000)
::ENV.delete("PB_NATS_CLIENT_RESPONSE_TIMEOUT")
end
it "has a default value" do
expect(subject.response_timeout).to eq(60)
end
end
describe "#cached_subscription_key" do
it "caches the instance of a subscription key" do
::Protobuf::Nats::Client.instance_variable_set(:@subscription_key_cache, nil)
id = subject.cached_subscription_key.__id__
expect(subject.cached_subscription_key.__id__).to eq(id)
end
end
describe "#nats_request_with_two_responses" do
let(:client) { ::FakeNatsClient.new(:inbox => inbox) }
let(:inbox) { "INBOX_123" }
let(:msg_subject) { "rpc.yolo.brolo" }
let(:ack) { ::Protobuf::Nats::Messages::ACK }
let(:response) { "final count down" }
before do
allow(::Protobuf::Nats).to receive(:client_nats_connection).and_return(client)
end
it "processes a request and return the final response" do
client.schedule_messages([::FakeNatsClient::Message.new(inbox, ack, 0.05),
::FakeNatsClient::Message.new(inbox, response, 0.1)])
server_response = subject.nats_request_with_two_responses(msg_subject, "request data", {})
expect(server_response).to eq(response)
end
it "raises an error when the ack is not signaled" do
client.schedule_messages([::FakeNatsClient::Message.new(inbox, response, 0.05)])
options = {:ack_timeout => 0.1, :timeout => 0.2}
expect { subject.nats_request_with_two_responses(msg_subject, "request data", options) }.to raise_error(::NATS::IO::Timeout)
end
it "can send messages out of order and still complete" do
client.schedule_messages([::FakeNatsClient::Message.new(inbox, response, 0.05),
::FakeNatsClient::Message.new(inbox, ack, 0.1)])
server_response = subject.nats_request_with_two_responses(msg_subject, "request data", {})
expect(server_response).to eq(response)
end
it "raises an error when the ack is signaled but pb response is not" do
client.schedule_messages([::FakeNatsClient::Message.new(inbox, ack, 0.05)])
options = {:timeout => 0.1}
expect { subject.nats_request_with_two_responses(msg_subject, "request data", options) }.to raise_error(::NATS::IO::Timeout)
end
end
describe "#send_request" do
it "retries 3 times when and raises a NATS timeout" do
expect(subject).to receive(:setup_connection).exactly(3).times
expect(subject).to receive(:nats_request_with_two_responses).and_raise(::NATS::IO::Timeout).exactly(3).times
expect { subject.send_request }.to raise_error(::NATS::IO::Timeout)
end
it "waits the reconnect_delay duration when the nats connection is reconnecting" do
error = ::Protobuf::Nats::Errors::IOException.new
client = ::FakeNatsClient.new
allow(::Protobuf::Nats).to receive(:client_nats_connection).and_return(client)
allow(client).to receive(:publish).and_raise(error)
allow(subject).to receive(:setup_connection)
expect(subject).to receive(:reconnect_delay).and_return(0.01).exactly(3).times
expect { subject.send_request }.to raise_error(error)
end
end
end
|
Openfoodnetwork::Application.configure do
# Settings specified here will take precedence over those in config/application.rb
# Code is not reloaded between requests
config.cache_classes = true
# Full error reports are disabled and caching is turned on
config.consider_all_requests_local = false
config.action_controller.perform_caching = true
# Disable Rails's static asset server (Apache or nginx will already do this)
config.serve_static_assets = false
# Compress JavaScripts and CSS
config.assets.compress = true
# Don't fallback to assets pipeline if a precompiled asset is missed
config.assets.compile = false
# Generate digests for assets URLs
config.assets.digest = true
# Defaults to Rails.root.join("public/assets")
# config.assets.manifest = YOUR_PATH
# Specifies the header that your server uses for sending files
# config.action_dispatch.x_sendfile_header = "X-Sendfile" # for apache
# config.action_dispatch.x_sendfile_header = 'X-Accel-Redirect' # for nginx
# Force all access to the app over SSL, use Strict-Transport-Security, and use secure cookies.
# config.force_ssl = true
# See everything in the log (default is :info)
# config.log_level = :debug
# Use a different logger for distributed setups
# config.logger = SyslogLogger.new
# Use a different cache store in production
# config.cache_store = :mem_cache_store
# Enable serving of images, stylesheets, and JavaScripts from an asset server
# config.action_controller.asset_host = "http://assets.example.com"
# Precompile additional assets (application.js, application.css, and all non-JS/CSS are already added)
# config.assets.precompile += %w( search.js )
# Disable delivery errors, bad email addresses will be ignored
# config.action_mailer.raise_delivery_errors = false
# Enable threaded mode
# config.threadsafe!
# Enable locale fallbacks for I18n (makes lookups for any locale fall back to
# the I18n.default_locale when a translation can not be found)
config.i18n.fallbacks = true
# Send deprecation notices to registered listeners
config.active_support.deprecation = :notify
# force ssl site-wide
# config.middleware.insert_before ActionDispatch::Static, "Rack::SSL"
end
Turn off name mangling to make DI more natural in angular - https://shellycloud.com/blog/2013/10/how-to-integrate-angularjs-with-rails-4
Openfoodnetwork::Application.configure do
# Settings specified here will take precedence over those in config/application.rb
# Code is not reloaded between requests
config.cache_classes = true
# Full error reports are disabled and caching is turned on
config.consider_all_requests_local = false
config.action_controller.perform_caching = true
# Disable Rails's static asset server (Apache or nginx will already do this)
config.serve_static_assets = false
# Compress JavaScripts and CSS
config.assets.compress = true
# Don't fallback to assets pipeline if a precompiled asset is missed
config.assets.compile = false
# Generate digests for assets URLs
config.assets.digest = true
# Defaults to Rails.root.join("public/assets")
# config.assets.manifest = YOUR_PATH
# Specifies the header that your server uses for sending files
# config.action_dispatch.x_sendfile_header = "X-Sendfile" # for apache
# config.action_dispatch.x_sendfile_header = 'X-Accel-Redirect' # for nginx
# Force all access to the app over SSL, use Strict-Transport-Security, and use secure cookies.
# config.force_ssl = true
# See everything in the log (default is :info)
# config.log_level = :debug
# Use a different logger for distributed setups
# config.logger = SyslogLogger.new
# Use a different cache store in production
# config.cache_store = :mem_cache_store
# Enable serving of images, stylesheets, and JavaScripts from an asset server
# config.action_controller.asset_host = "http://assets.example.com"
# Precompile additional assets (application.js, application.css, and all non-JS/CSS are already added)
# config.assets.precompile += %w( search.js )
config.assets.js_compressor = Uglifier.new(mangle: false)
# Disable delivery errors, bad email addresses will be ignored
# config.action_mailer.raise_delivery_errors = false
# Enable threaded mode
# config.threadsafe!
# Enable locale fallbacks for I18n (makes lookups for any locale fall back to
# the I18n.default_locale when a translation can not be found)
config.i18n.fallbacks = true
# Send deprecation notices to registered listeners
config.active_support.deprecation = :notify
# force ssl site-wide
# config.middleware.insert_before ActionDispatch::Static, "Rack::SSL"
end
|
Scoutz::Application.configure do
# Settings specified here will take precedence over those in config/application.rb
# Code is not reloaded between requests
config.cache_classes = true
# Full error reports are disabled and caching is turned on
config.consider_all_requests_local = false
config.action_controller.perform_caching = true
# Disable Rails's static asset server (Apache or nginx will already do this)
config.serve_static_assets = false
# Compress JavaScripts and CSS
config.assets.compress = true
# Don't fallback to assets pipeline if a precompiled asset is missed
config.assets.compile = false
# Generate digests for assets URLs
config.assets.digest = true
# Defaults to Rails.root.join("public/assets")
# config.assets.manifest = YOUR_PATH
# Specifies the header that your server uses for sending files
# config.action_dispatch.x_sendfile_header = "X-Sendfile" # for apache
# config.action_dispatch.x_sendfile_header = 'X-Accel-Redirect' # for nginx
# Force all access to the app over SSL, use Strict-Transport-Security, and use secure cookies.
config.force_ssl = false
# See everything in the log (default is :info)
# config.log_level = :debug
# Prepend all log lines with the following tags
# config.log_tags = [ :subdomain, :uuid ]
# Use a different logger for distributed setups
# config.logger = ActiveSupport::TaggedLogging.new(SyslogLogger.new)
# Use a different cache store in production
# config.cache_store = :mem_cache_store
# Enable serving of images, stylesheets, and JavaScripts from an asset server
# config.action_controller.asset_host = "http://assets.example.com"
# Precompile additional assets (application.js, application.css, and all non-JS/CSS are already added)
# config.assets.precompile += %w( search.js )
# Disable delivery errors, bad email addresses will be ignored
# config.action_mailer.raise_delivery_errors = false
# Enable threaded mode
# config.threadsafe!
# Enable locale fallbacks for I18n (makes lookups for any locale fall back to
# the I18n.default_locale when a translation can not be found)
config.i18n.fallbacks = true
# Send deprecation notices to registered listeners
config.active_support.deprecation = :notify
# Log the query plan for queries taking more than this (works
# with SQLite, MySQL, and PostgreSQL)
# config.active_record.auto_explain_threshold_in_seconds = 0.5
end
apache sendfile
Scoutz::Application.configure do
# Settings specified here will take precedence over those in config/application.rb
# Code is not reloaded between requests
config.cache_classes = true
# Full error reports are disabled and caching is turned on
config.consider_all_requests_local = false
config.action_controller.perform_caching = true
# Disable Rails's static asset server (Apache or nginx will already do this)
config.serve_static_assets = false
# Compress JavaScripts and CSS
config.assets.compress = true
# Don't fallback to assets pipeline if a precompiled asset is missed
config.assets.compile = false
# Generate digests for assets URLs
config.assets.digest = true
# Defaults to Rails.root.join("public/assets")
# config.assets.manifest = YOUR_PATH
# Specifies the header that your server uses for sending files
config.action_dispatch.x_sendfile_header = "X-Sendfile" # for apache
# config.action_dispatch.x_sendfile_header = 'X-Accel-Redirect' # for nginx
# Force all access to the app over SSL, use Strict-Transport-Security, and use secure cookies.
config.force_ssl = false
# See everything in the log (default is :info)
# config.log_level = :debug
# Prepend all log lines with the following tags
# config.log_tags = [ :subdomain, :uuid ]
# Use a different logger for distributed setups
# config.logger = ActiveSupport::TaggedLogging.new(SyslogLogger.new)
# Use a different cache store in production
# config.cache_store = :mem_cache_store
# Enable serving of images, stylesheets, and JavaScripts from an asset server
# config.action_controller.asset_host = "http://assets.example.com"
# Precompile additional assets (application.js, application.css, and all non-JS/CSS are already added)
# config.assets.precompile += %w( search.js )
# Disable delivery errors, bad email addresses will be ignored
# config.action_mailer.raise_delivery_errors = false
# Enable threaded mode
# config.threadsafe!
# Enable locale fallbacks for I18n (makes lookups for any locale fall back to
# the I18n.default_locale when a translation can not be found)
config.i18n.fallbacks = true
# Send deprecation notices to registered listeners
config.active_support.deprecation = :notify
# Log the query plan for queries taking more than this (works
# with SQLite, MySQL, and PostgreSQL)
# config.active_record.auto_explain_threshold_in_seconds = 0.5
end
|
if ENV.key?("APP_USERNAME") && ENV.key?("APP_PASSWORD")
Vowessly::Application.config.middleware.insert_after 'Rack::Lock', 'Rack::Auth::Basic' do |username, password|
username == ENV["APP_USERNAME"] && password == ENV["APP_PASSWORD"]
end
end
Add a supervisor username and password
if ENV.key?("APP_USERNAME") && ENV.key?("APP_PASSWORD")
Vowessly::Application.config.middleware.insert_after 'Rack::Lock', 'Rack::Auth::Basic' do |username, password|
(username == ENV["APP_USERNAME"] && password == ENV["APP_PASSWORD"]) ||
(username == ENV["APP_SUPERVISOR_USERNAME"] && password == ENV["APP_SUPERVISOR_PASSWORD"])
end
end
|
if Object.const_defined?('LogStasher') && LogStasher.enabled
LogStasher.add_custom_fields do |fields|
fields[:request] = "#{request.request_method} #{request.path} #{request.headers['SERVER_PROTOCOL']}"
end
end
Clarify @fields added
if Object.const_defined?('LogStasher') && LogStasher.enabled
LogStasher.add_custom_fields do |fields|
# Mirrors Nginx request logging, e.g GET /path/here HTTP/1.1
fields[:request] = "#{request.request_method} #{request.fullpath} #{request.headers['SERVER_PROTOCOL']}"
# Pass X-Varnish to logging
fields[:varnish_id] = request.headers['X-Varnish']
end
end
|
# frozen_string_literal: true
#
# Copyright (C) 2013 - present Instructure, Inc.
#
# This file is part of Canvas.
#
# Canvas is free software: you can redistribute it and/or modify it under
# the terms of the GNU Affero General Public License as published by the Free
# Software Foundation, version 3 of the License.
#
# Canvas is distributed in the hope that it will be useful, but WITHOUT ANY
# WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR
# A PARTICULAR PURPOSE. See the GNU Affero General Public License for more
# details.
#
# You should have received a copy of the GNU Affero General Public License along
# with this program. If not, see <http://www.gnu.org/licenses/>.
require_dependency "setting"
config = ConfigFile.load("marginalia") || {}
if config[:components].present?
require "marginalia"
Marginalia::Railtie.insert
module Marginalia
module Comment
class << self
attr_accessor :migration, :rake_task
def context_id
RequestContext::Generator.request_id
end
def job_tag
Delayed::Worker.current_job&.tag
end
end
end
end
Marginalia::Comment.components = config[:components].map(&:to_sym)
module Marginalia::RakeTask
def execute(args = nil)
previous, Marginalia::Comment.rake_task = Marginalia::Comment.rake_task, name
super
ensure
Marginalia::Comment.rake_task = previous
end
end
Rake::Task.prepend(Marginalia::RakeTask)
end
prepend marginalia comments to prevent truncation
Change-Id: I2b0e4610031bc843ae585e9f16297a8685203a61
Reviewed-on: https://gerrit.instructure.com/c/canvas-lms/+/287850
Tested-by: Service Cloud Jenkins <9144042a601061f88f1e1d7a1753ea3e2972119d@instructure.com>
Reviewed-by: Bobby Buten <5d364bfede2bfc09ec31d2e6ed3a4d111d37200d@instructure.com>
QA-Review: Aaron Ogata <11ccf682c06d1508642f943a3962a1c3d00b8e44@instructure.com>
Product-Review: Aaron Ogata <11ccf682c06d1508642f943a3962a1c3d00b8e44@instructure.com>
# frozen_string_literal: true
#
# Copyright (C) 2013 - present Instructure, Inc.
#
# This file is part of Canvas.
#
# Canvas is free software: you can redistribute it and/or modify it under
# the terms of the GNU Affero General Public License as published by the Free
# Software Foundation, version 3 of the License.
#
# Canvas is distributed in the hope that it will be useful, but WITHOUT ANY
# WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR
# A PARTICULAR PURPOSE. See the GNU Affero General Public License for more
# details.
#
# You should have received a copy of the GNU Affero General Public License along
# with this program. If not, see <http://www.gnu.org/licenses/>.
require_dependency "setting"
config = ConfigFile.load("marginalia") || {}
if config[:components].present?
require "marginalia"
Marginalia::Railtie.insert
module Marginalia
module Comment
class << self
attr_accessor :migration, :rake_task
def context_id
RequestContext::Generator.request_id
end
def job_tag
Delayed::Worker.current_job&.tag
end
end
end
end
Marginalia::Comment.components = config[:components].map(&:to_sym)
Marginalia::Comment.prepend_comment = true
module Marginalia::RakeTask
def execute(args = nil)
previous, Marginalia::Comment.rake_task = Marginalia::Comment.rake_task, name
super
ensure
Marginalia::Comment.rake_task = previous
end
end
Rake::Task.prepend(Marginalia::RakeTask)
end
|
require 'spec_helper'
require 'bosh/dev/stemcell_rake_methods'
module Bosh::Dev
describe StemcellRakeMethods do
let(:env) { {} }
let(:shell) { instance_double('Bosh::Dev::Shell') }
let(:stemcell_rake_methods) { StemcellRakeMethods.new(env, shell) }
describe '#default_options' do
let(:default_disk_size) { 2048 }
context 'it is not given an infrastructure' do
it 'dies' do
STDERR.should_receive(:puts).with('Please specify target infrastructure (vsphere, aws, openstack)')
stemcell_rake_methods.should_receive(:exit).with(1).and_raise(SystemExit)
expect {
stemcell_rake_methods.default_options({})
}.to raise_error(SystemExit)
end
end
context 'it is given an unknown infrastructure' do
it 'dies' do
expect {
stemcell_rake_methods.default_options(infrastructure: 'fake')
}.to raise_error(RuntimeError, /Unknown infrastructure: fake/)
end
end
shared_examples_for 'setting default stemcells environment values' do
let(:env) do
{
'OVFTOOL' => 'fake_ovf_tool_path',
'STEMCELL_HYPERVISOR' => 'fake_stemcell_hypervisor',
'STEMCELL_NAME' => 'fake_stemcell_name',
'UBUNTU_ISO' => 'fake_ubuntu_iso',
'UBUNTU_MIRROR' => 'fake_ubuntu_mirror',
'TW_LOCAL_PASSPHRASE' => 'fake_tripwire_local_passphrase',
'TW_SITE_PASSPHRASE' => 'fake_tripwire_site_passphrase',
'RUBY_BIN' => 'fake_ruby_bin',
}
end
it 'sets default values for options based in hash' do
result = stemcell_rake_methods.default_options(infrastructure: infrastructure)
expect(result['system_parameters_infrastructure']).to eq(infrastructure)
expect(result['stemcell_name']).to eq('fake_stemcell_name')
expect(result['stemcell_infrastructure']).to eq(infrastructure)
expect(result['stemcell_hypervisor']).to eq('fake_stemcell_hypervisor')
expect(result['bosh_protocol_version']).to eq('1')
expect(result['UBUNTU_ISO']).to eq('fake_ubuntu_iso')
expect(result['UBUNTU_MIRROR']).to eq('fake_ubuntu_mirror')
expect(result['TW_LOCAL_PASSPHRASE']).to eq('fake_tripwire_local_passphrase')
expect(result['TW_SITE_PASSPHRASE']).to eq('fake_tripwire_site_passphrase')
expect(result['ruby_bin']).to eq('fake_ruby_bin')
expect(result['bosh_release_src_dir']).to match(%r{/release/src/bosh})
expect(result['bosh_agent_src_dir']).to match(/bosh_agent/)
expect(result['image_create_disk_size']).to eq(default_disk_size)
end
context 'when stemcell_tgz is passed in as an arugment' do
it 'includes stemcell_tgz' do
end
end
context 'when STEMCELL_NAME is not set' do
let(:env) do
{
'OVFTOOL' => 'fake_ovf_tool_path',
'STEMCELL_HYPERVISOR' => 'fake_stemcell_hypervisor',
'UBUNTU_ISO' => 'fake_ubuntu_iso',
'UBUNTU_MIRROR' => 'fake_ubuntu_mirror',
'TW_LOCAL_PASSPHRASE' => 'fake_tripwire_local_passphrase',
'TW_SITE_PASSPHRASE' => 'fake_tripwire_site_passphrase',
'RUBY_BIN' => 'fake_ruby_bin',
}
end
it "defaults to 'bosh-stemcell'" do
result = stemcell_rake_methods.default_options(infrastructure: infrastructure)
expect(result['stemcell_name']).to eq ('bosh-stemcell')
end
end
context 'when RUBY_BIN is not set' do
let(:env) do
{
'OVFTOOL' => 'fake_ovf_tool_path',
'STEMCELL_HYPERVISOR' => 'fake_stemcell_hypervisor',
'STEMCELL_NAME' => 'fake_stemcell_name',
'UBUNTU_ISO' => 'fake_ubuntu_iso',
'UBUNTU_MIRROR' => 'fake_ubuntu_mirror',
'TW_LOCAL_PASSPHRASE' => 'fake_tripwire_local_passphrase',
'TW_SITE_PASSPHRASE' => 'fake_tripwire_site_passphrase',
}
end
before do
RbConfig::CONFIG.stub(:[]).with('bindir').and_return('/a/path/to/')
RbConfig::CONFIG.stub(:[]).with('ruby_install_name').and_return('ruby')
end
it 'uses the RbConfig values' do
result = stemcell_rake_methods.default_options(infrastructure: infrastructure)
expect(result['ruby_bin']).to eq('/a/path/to/ruby')
end
end
it 'sets the disk_size to 2048MB unless the user requests otherwise' do
result = stemcell_rake_methods.default_options(infrastructure: infrastructure)
expect(result['image_create_disk_size']).to eq(default_disk_size)
end
it 'allows user to override default disk_size' do
result = stemcell_rake_methods.default_options(infrastructure: infrastructure, disk_size: 1234)
expect(result['image_create_disk_size']).to eq(1234)
end
end
context 'it is given an infrastructure' do
context 'when infrastruture is aws' do
let(:infrastructure) { 'aws' }
it_behaves_like 'setting default stemcells environment values'
context 'when STEMCELL_HYPERVISOR is not set' do
it 'uses "xen"' do
result = stemcell_rake_methods.default_options(infrastructure: infrastructure)
expect(result['stemcell_hypervisor']).to eq('xen')
end
end
end
context 'when infrastruture is vsphere' do
let(:infrastructure) { 'vsphere' }
it_behaves_like 'setting default stemcells environment values'
context 'when STEMCELL_HYPERVISOR is not set' do
let(:env) { { 'OVFTOOL' => 'fake_ovf_tool_path' } }
it 'uses "esxi"' do
result = stemcell_rake_methods.default_options(infrastructure: infrastructure)
expect(result['stemcell_hypervisor']).to eq('esxi')
end
end
context 'if you have OVFTOOL set in the environment' do
let(:env) { { 'OVFTOOL' => 'fake_ovf_tool_path' } }
it 'sets image_vsphere_ovf_ovftool_path' do
result = stemcell_rake_methods.default_options(infrastructure: 'vsphere')
expect(result['image_vsphere_ovf_ovftool_path']).to eq('fake_ovf_tool_path')
end
end
end
context 'when infrastructure is openstack' do
let(:infrastructure) { 'openstack' }
let(:default_disk_size) { 10240 }
it_behaves_like 'setting default stemcells environment values'
context 'when STEMCELL_HYPERVISOR is not set' do
it 'uses "kvm"' do
result = stemcell_rake_methods.default_options(infrastructure: infrastructure)
expect(result['stemcell_hypervisor']).to eq('kvm')
end
end
it 'increases default disk_size from 2048 to 10240 because of the lack of ephemeral disk' do
result = stemcell_rake_methods.default_options(infrastructure: 'openstack')
expect(result['image_create_disk_size']).to eq(10240)
end
it 'still allows user to force a specific disk_size' do
result = stemcell_rake_methods.default_options(infrastructure: 'openstack', disk_size: 1234)
expect(result['image_create_disk_size']).to eq(1234)
end
end
end
end
describe '#bosh_micro_options' do
let(:manifest) { 'fake_manifest' }
let(:tarball) { 'fake_tarball' }
let(:bosh_micro_options) { stemcell_rake_methods.bosh_micro_options(manifest, tarball) }
it 'returns a valid hash' do
expect(bosh_micro_options[:bosh_micro_enabled]).to eq('yes')
expect(bosh_micro_options[:bosh_micro_package_compiler_path]).to match(/\bpackage_compiler\b/)
expect(bosh_micro_options[:bosh_micro_manifest_yml_path]).to eq('fake_manifest')
expect(bosh_micro_options[:bosh_micro_release_tgz_path]).to eq('fake_tarball')
end
end
describe '#build' do
include FakeFS::SpecHelpers
let(:pid) { 99999 }
let(:root_dir) { "/var/tmp/bosh/bosh_agent-#{Bosh::Agent::VERSION}-#{pid}" }
let(:build_dir) { File.join(root_dir, 'build') }
let(:work_dir) { File.join(root_dir, 'work') }
let(:etc_dir) { File.join(build_dir, 'etc') }
let(:settings_file) { File.join(etc_dir, 'settings.bash') }
let(:spec_file) { File.join(build_dir, 'spec', "#{spec}.spec") }
let(:build_script) { File.join(build_dir, 'bin', 'build_from_spec.sh') }
let(:spec) { 'dave' }
let(:options) { { 'hello' => 'world' } }
before do
shell.stub(:run)
stemcell_rake_methods.stub(:puts)
Process.stub(pid: pid)
FileUtils.stub(:cp_r).with([], build_dir, preserve: true) do
FileUtils.mkdir_p etc_dir
FileUtils.touch settings_file
end
end
it 'creates a base directory for stemcell creation' do
expect {
stemcell_rake_methods.build(spec, options)
}.to change { Dir.exists?(root_dir) }.from(false).to(true)
end
it 'creates a build directory for stemcell creation' do
expect {
stemcell_rake_methods.build(spec, options)
}.to change { Dir.exists?(build_dir) }.from(false).to(true)
end
it 'copies the stemcell_builder code into the build directory' do
FileUtils.should_receive(:cp_r).with([], build_dir, preserve: true) do
FileUtils.mkdir_p etc_dir
FileUtils.touch File.join(etc_dir, 'settings.bash')
end
stemcell_rake_methods.build(spec, options)
end
it 'creates a work directory for stemcell creation chroot' do
expect {
stemcell_rake_methods.build(spec, options)
}.to change { Dir.exists?(work_dir) }.from(false).to(true)
end
context 'when the user sets their own WORK_PATH' do
let(:env) { { 'WORK_PATH' => '/aight' } }
it 'creates a work directory for stemcell creation chroot' do
expect {
stemcell_rake_methods.build(spec, options)
}.to change { Dir.exists?('/aight') }.from(false).to(true)
end
end
it 'writes a settings file into the build directory' do
stemcell_rake_methods.build(spec, options)
expect(File.read(settings_file)).to match(/hello=world/)
end
context 'when the user does not set proxy environment variables' do
it 'runs the stemcell builder with no environment variables set' do
shell.should_receive(:run).with("sudo env #{build_script} #{work_dir} #{spec_file} #{settings_file}")
stemcell_rake_methods.build(spec, options)
end
end
context 'when the uses sets proxy environment variables' do
let(:env) { { 'HTTP_PROXY' => 'nice_proxy', 'no_proxy' => 'naughty_proxy' } }
it 'maintains current user proxy env vars through the shell sudo call' do
shell.should_receive(:run).with("sudo env HTTP_PROXY='nice_proxy' no_proxy='naughty_proxy' #{build_script} #{work_dir} #{spec_file} #{settings_file}")
stemcell_rake_methods.build(spec, options)
end
end
context 'when the uses sets a BUILD_PATH environment variable' do
let(:root_dir) { 'TEST_ROOT_DIR' }
let(:env) { { 'BUILD_PATH' => root_dir } }
it 'passes through BUILD_PATH environment variables correctly' do
shell.should_receive(:run).with("sudo env #{build_script} #{work_dir} #{spec_file} #{settings_file}")
stemcell_rake_methods.build(spec, options)
end
end
end
end
end
Tidying tests to use args and removing duplicate tests
require 'spec_helper'
require 'bosh/dev/stemcell_rake_methods'
module Bosh::Dev
describe StemcellRakeMethods do
let(:env) { {} }
let(:shell) { instance_double('Bosh::Dev::Shell') }
let(:stemcell_rake_methods) { StemcellRakeMethods.new(env, shell) }
let(:infrastructure) { 'aws' }
let(:args) do
{
infrastructure: infrastructure
}
end
describe '#default_options' do
let(:default_disk_size) { 2048 }
context 'it is not given an infrastructure' do
let(:args) { {} }
it 'dies' do
STDERR.should_receive(:puts).with('Please specify target infrastructure (vsphere, aws, openstack)')
stemcell_rake_methods.should_receive(:exit).with(1).and_raise(SystemExit)
expect {
stemcell_rake_methods.default_options(args)
}.to raise_error(SystemExit)
end
end
context 'it is given an unknown infrastructure' do
let(:infrastructure) { 'fake' }
it 'dies' do
expect {
stemcell_rake_methods.default_options(args)
}.to raise_error(RuntimeError, /Unknown infrastructure: fake/)
end
end
shared_examples_for 'setting default stemcells environment values' do
let(:env) do
{
'OVFTOOL' => 'fake_ovf_tool_path',
'STEMCELL_HYPERVISOR' => 'fake_stemcell_hypervisor',
'STEMCELL_NAME' => 'fake_stemcell_name',
'UBUNTU_ISO' => 'fake_ubuntu_iso',
'UBUNTU_MIRROR' => 'fake_ubuntu_mirror',
'TW_LOCAL_PASSPHRASE' => 'fake_tripwire_local_passphrase',
'TW_SITE_PASSPHRASE' => 'fake_tripwire_site_passphrase',
'RUBY_BIN' => 'fake_ruby_bin',
}
end
it 'sets default values for options based in hash' do
result = stemcell_rake_methods.default_options(args)
expect(result['system_parameters_infrastructure']).to eq(infrastructure)
expect(result['stemcell_name']).to eq('fake_stemcell_name')
expect(result['stemcell_infrastructure']).to eq(infrastructure)
expect(result['stemcell_hypervisor']).to eq('fake_stemcell_hypervisor')
expect(result['bosh_protocol_version']).to eq('1')
expect(result['UBUNTU_ISO']).to eq('fake_ubuntu_iso')
expect(result['UBUNTU_MIRROR']).to eq('fake_ubuntu_mirror')
expect(result['TW_LOCAL_PASSPHRASE']).to eq('fake_tripwire_local_passphrase')
expect(result['TW_SITE_PASSPHRASE']).to eq('fake_tripwire_site_passphrase')
expect(result['ruby_bin']).to eq('fake_ruby_bin')
expect(result['bosh_release_src_dir']).to match(%r{/release/src/bosh})
expect(result['bosh_agent_src_dir']).to match(/bosh_agent/)
expect(result['image_create_disk_size']).to eq(default_disk_size)
end
context 'when STEMCELL_NAME is not set' do
let(:env) do
{
'OVFTOOL' => 'fake_ovf_tool_path',
'STEMCELL_HYPERVISOR' => 'fake_stemcell_hypervisor',
'UBUNTU_ISO' => 'fake_ubuntu_iso',
'UBUNTU_MIRROR' => 'fake_ubuntu_mirror',
'TW_LOCAL_PASSPHRASE' => 'fake_tripwire_local_passphrase',
'TW_SITE_PASSPHRASE' => 'fake_tripwire_site_passphrase',
'RUBY_BIN' => 'fake_ruby_bin',
}
end
it "defaults to 'bosh-stemcell'" do
result = stemcell_rake_methods.default_options(args)
expect(result['stemcell_name']).to eq ('bosh-stemcell')
end
end
context 'when RUBY_BIN is not set' do
let(:env) do
{
'OVFTOOL' => 'fake_ovf_tool_path',
'STEMCELL_HYPERVISOR' => 'fake_stemcell_hypervisor',
'STEMCELL_NAME' => 'fake_stemcell_name',
'UBUNTU_ISO' => 'fake_ubuntu_iso',
'UBUNTU_MIRROR' => 'fake_ubuntu_mirror',
'TW_LOCAL_PASSPHRASE' => 'fake_tripwire_local_passphrase',
'TW_SITE_PASSPHRASE' => 'fake_tripwire_site_passphrase',
}
end
before do
RbConfig::CONFIG.stub(:[]).with('bindir').and_return('/a/path/to/')
RbConfig::CONFIG.stub(:[]).with('ruby_install_name').and_return('ruby')
end
it 'uses the RbConfig values' do
result = stemcell_rake_methods.default_options(args)
expect(result['ruby_bin']).to eq('/a/path/to/ruby')
end
end
context 'when disk_size is not passed' do
it 'defaults to default disk size for infrastructure' do
result = stemcell_rake_methods.default_options(args)
expect(result['image_create_disk_size']).to eq(default_disk_size)
end
end
context 'when disk_size is passed' do
let(:args) do
{
infrastructure: infrastructure,
disk_size: 1234
}
end
it 'allows user to override default disk_size' do
result = stemcell_rake_methods.default_options(args)
expect(result['image_create_disk_size']).to eq(1234)
end
end
end
context 'it is given an infrastructure' do
context 'when infrastruture is aws' do
let(:infrastructure) { 'aws' }
it_behaves_like 'setting default stemcells environment values'
context 'when STEMCELL_HYPERVISOR is not set' do
it 'uses "xen"' do
result = stemcell_rake_methods.default_options(args)
expect(result['stemcell_hypervisor']).to eq('xen')
end
end
end
context 'when infrastruture is vsphere' do
let(:infrastructure) { 'vsphere' }
it_behaves_like 'setting default stemcells environment values'
context 'when STEMCELL_HYPERVISOR is not set' do
let(:env) { { 'OVFTOOL' => 'fake_ovf_tool_path' } }
it 'uses "esxi"' do
result = stemcell_rake_methods.default_options(args)
expect(result['stemcell_hypervisor']).to eq('esxi')
end
end
context 'if you have OVFTOOL set in the environment' do
let(:env) { { 'OVFTOOL' => 'fake_ovf_tool_path' } }
it 'sets image_vsphere_ovf_ovftool_path' do
result = stemcell_rake_methods.default_options(args)
expect(result['image_vsphere_ovf_ovftool_path']).to eq('fake_ovf_tool_path')
end
end
end
context 'when infrastructure is openstack' do
let(:infrastructure) { 'openstack' }
let(:default_disk_size) { 10240 }
it_behaves_like 'setting default stemcells environment values'
context 'when STEMCELL_HYPERVISOR is not set' do
it 'uses "kvm"' do
result = stemcell_rake_methods.default_options(args)
expect(result['stemcell_hypervisor']).to eq('kvm')
end
end
end
end
end
describe '#bosh_micro_options' do
let(:manifest) { 'fake_manifest' }
let(:tarball) { 'fake_tarball' }
let(:bosh_micro_options) { stemcell_rake_methods.bosh_micro_options(manifest, tarball) }
it 'returns a valid hash' do
expect(bosh_micro_options[:bosh_micro_enabled]).to eq('yes')
expect(bosh_micro_options[:bosh_micro_package_compiler_path]).to match(/\bpackage_compiler\b/)
expect(bosh_micro_options[:bosh_micro_manifest_yml_path]).to eq('fake_manifest')
expect(bosh_micro_options[:bosh_micro_release_tgz_path]).to eq('fake_tarball')
end
end
describe '#build' do
include FakeFS::SpecHelpers
let(:pid) { 99999 }
let(:root_dir) { "/var/tmp/bosh/bosh_agent-#{Bosh::Agent::VERSION}-#{pid}" }
let(:build_dir) { File.join(root_dir, 'build') }
let(:work_dir) { File.join(root_dir, 'work') }
let(:etc_dir) { File.join(build_dir, 'etc') }
let(:settings_file) { File.join(etc_dir, 'settings.bash') }
let(:spec_file) { File.join(build_dir, 'spec', "#{spec}.spec") }
let(:build_script) { File.join(build_dir, 'bin', 'build_from_spec.sh') }
let(:spec) { 'dave' }
let(:options) { { 'hello' => 'world' } }
before do
shell.stub(:run)
stemcell_rake_methods.stub(:puts)
Process.stub(pid: pid)
FileUtils.stub(:cp_r).with([], build_dir, preserve: true) do
FileUtils.mkdir_p etc_dir
FileUtils.touch settings_file
end
end
it 'creates a base directory for stemcell creation' do
expect {
stemcell_rake_methods.build(spec, options)
}.to change { Dir.exists?(root_dir) }.from(false).to(true)
end
it 'creates a build directory for stemcell creation' do
expect {
stemcell_rake_methods.build(spec, options)
}.to change { Dir.exists?(build_dir) }.from(false).to(true)
end
it 'copies the stemcell_builder code into the build directory' do
FileUtils.should_receive(:cp_r).with([], build_dir, preserve: true) do
FileUtils.mkdir_p etc_dir
FileUtils.touch File.join(etc_dir, 'settings.bash')
end
stemcell_rake_methods.build(spec, options)
end
it 'creates a work directory for stemcell creation chroot' do
expect {
stemcell_rake_methods.build(spec, options)
}.to change { Dir.exists?(work_dir) }.from(false).to(true)
end
context 'when the user sets their own WORK_PATH' do
let(:env) { { 'WORK_PATH' => '/aight' } }
it 'creates a work directory for stemcell creation chroot' do
expect {
stemcell_rake_methods.build(spec, options)
}.to change { Dir.exists?('/aight') }.from(false).to(true)
end
end
it 'writes a settings file into the build directory' do
stemcell_rake_methods.build(spec, options)
expect(File.read(settings_file)).to match(/hello=world/)
end
context 'when the user does not set proxy environment variables' do
it 'runs the stemcell builder with no environment variables set' do
shell.should_receive(:run).with("sudo env #{build_script} #{work_dir} #{spec_file} #{settings_file}")
stemcell_rake_methods.build(spec, options)
end
end
context 'when the uses sets proxy environment variables' do
let(:env) { { 'HTTP_PROXY' => 'nice_proxy', 'no_proxy' => 'naughty_proxy' } }
it 'maintains current user proxy env vars through the shell sudo call' do
shell.should_receive(:run).with("sudo env HTTP_PROXY='nice_proxy' no_proxy='naughty_proxy' #{build_script} #{work_dir} #{spec_file} #{settings_file}")
stemcell_rake_methods.build(spec, options)
end
end
context 'when the uses sets a BUILD_PATH environment variable' do
let(:root_dir) { 'TEST_ROOT_DIR' }
let(:env) { { 'BUILD_PATH' => root_dir } }
it 'passes through BUILD_PATH environment variables correctly' do
shell.should_receive(:run).with("sudo env #{build_script} #{work_dir} #{spec_file} #{settings_file}")
stemcell_rake_methods.build(spec, options)
end
end
end
end
end |
ActionMailer::Base.smtp_settings = {
:address => "smtp.gmail.com",
:port => 587,
:user_name => "noreply@tmux.me",
:password => "porkbork",
:authentication => "plain",
:enable_starttls_auto => true
}
Added mailer setup.
I did this so that I could actually use the mailer for password resets and
eventually notifications.
ActionMailer::Base.smtp_settings = {
:address => "smtp.gmail.com",
:port => 587,
:user_name => "andrew.deponte@tmux.me",
:password => "gj7OsrA0hCkQ",
:authentication => "plain",
:enable_starttls_auto => true
}
|
Sevenpages.reserved_slugs = []
Reserve a few admin-related slugs
Sevenpages.reserved_slugs = ['sevenpages', 'sp-admin', 'admin'] |
#!/usr/bin/env ruby
require 'travis'
require 'net/http'
def get_travis(repo)
parent_dir = File.join('build_logs', repo.gsub(/\//, '@'))
save_file = File.join(parent_dir, 'repo-data-travis.json')
error_file = File.join(parent_dir, 'errors')
FileUtils::mkdir_p(parent_dir)
builds = []
begin
repository = Travis::Repository.find(repo)
puts "Harvesting Travis build logs for #{repo}"
highest_build = repository.last_build_number
repository.each_build do |build|
jobs = build.jobs
jobs.each do |job|
name = File.join(parent_dir, build.number + '_' + build.commit.sha + '_' + job.id.to_s + '.log')
next if File.exists?(name)
begin
begin
log_url = "http://s3.amazonaws.com/archive.travis-ci.org/jobs/#{job.id}/log.txt"
log = Net::HTTP.get_response(URI.parse(log_url)).body
rescue
begin
# Give Travis CI some time before trying once more
log = job.log.body
rescue
# Workaround if log.body results in error.
log_url = "http://s3.amazonaws.com/archive.travis-ci.org/jobs/#{job.id}/log.txt"
log = Net::HTTP.get_response(URI.parse(log_url)).body
end
end
File.open(name, 'w') { |f| f.puts log }
log = '' # necessary to enable GC of previously stored value, otherwise: memory leak
rescue
error_message = "Could not get log #{name}"
puts error_message
File.open(error_file, 'a') { |f| f.puts error_message }
next
end
end
builds << {
:build_id => build.id,
:commit => build.commit.sha,
:pull_req => build.pull_request_number,
:branch => build.commit.branch,
:status => build.state,
:duration => build.duration,
:started_at => build.started_at,
:jobs => build.jobs.map { |x| x.id }
}
end
builds = builds.select { |x| !x.nil? }.flatten
File.open(save_file, 'w') { |f| f.puts builds.to_json }
builds
end
rescue Exception => e
error_message = "Error getting Travis builds for #{repo}: #{e.message}"
puts error_message
File.open(error_file, 'a') { |f| f.puts error_message }
return []
end
if (ARGV[0].nil? || ARGV[1].nil?)
puts 'Missing argument(s)!'
puts ''
puts 'usage: travis_harvester.rb owner repo'
exit(1)
end
owner = ARGV[0]
repo = ARGV[1]
get_travis("#{owner}/#{repo}")
Adds build phases state machine.
#!/usr/bin/env ruby
require 'travis'
require 'net/http'
def get_travis(repo)
parent_dir = File.join('build_logs', repo.gsub(/\//, '@'))
save_file = File.join(parent_dir, 'repo-data-travis.json')
error_file = File.join(parent_dir, 'errors')
FileUtils::mkdir_p(parent_dir)
builds = []
begin
repository = Travis::Repository.find(repo)
puts "Harvesting Travis build logs for #{repo}"
highest_build = repository.last_build_number
repository.each_build do |build|
jobs = build.jobs
jobs.each do |job|
name = File.join(parent_dir, build.number + '_' + build.commit.sha + '_' + job.id.to_s + '.log')
next if File.exists?(name)
begin
begin
log_url = "http://s3.amazonaws.com/archive.travis-ci.org/jobs/#{job.id}/log.txt"
log = Net::HTTP.get_response(URI.parse(log_url)).body
rescue
begin
# Give Travis CI some time before trying once more
log = job.log.body
rescue
# Workaround if log.body results in error.
log_url = "http://s3.amazonaws.com/archive.travis-ci.org/jobs/#{job.id}/log.txt"
log = Net::HTTP.get_response(URI.parse(log_url)).body
end
end
File.open(name, 'w') { |f| f.puts log }
log = '' # necessary to enable GC of previously stored value, otherwise: memory leak
rescue
error_message = "Could not get log #{name}"
puts error_message
File.open(error_file, 'a') { |f| f.puts error_message }
next
end
end
builds << {
:build_id => build.id,
:commit => build.commit.sha,
:pull_req => build.pull_request_number,
:branch => build.commit.branch,
:status => build.state,
:duration => build.duration,
:started_at => build.started_at,
:jobs => build.jobs.map { |x| x.id }
}
end
# TODO MMB open builds file in append mode?
builds = builds.select { |x| !x.nil? }.flatten
File.open(save_file, 'w') { |f| f.puts builds.to_json }
end
rescue Exception => e
error_message = "Error getting Travis builds for #{repo}: #{e.message}"
puts error_message
File.open(error_file, 'a') { |f| f.puts error_message }
return []
end
if (ARGV[0].nil? || ARGV[1].nil?)
puts 'Missing argument(s)!'
puts ''
puts 'usage: travis_harvester.rb owner repo'
exit(1)
end
owner = ARGV[0]
repo = ARGV[1]
get_travis("#{owner}/#{repo}")
|
#
# REST API Logging Tests
#
describe "Logging" do
describe "Successful Requests logging" do
before { allow($api_log).to receive(:info) }
it "logs hashed details about the request" do
api_basic_authorize collection_action_identifier(:users, :read, :get)
expect($api_log).to receive(:info).with(a_string_matching(/Request:/)
.and(matching(%r{:path=>"/api/users"}))
.and(matching(/:collection=>"users"/))
.and(matching(/:c_id=>nil/))
.and(matching(/:subcollection=>nil/))
.and(matching(/:s_id=>nil/)))
run_get users_url
end
it "logs all hash entries about the request" do
api_basic_authorize
expect($api_log).to receive(:info).with(
a_string_matching(
'Request: {:method=>:get, :action=>"read", :fullpath=>"/api", :url=>"http://www.example.com/api", ' \
':base=>"http://www.example.com", :path=>"/api", :prefix=>"/api", :version=>"3.0.0-pre", ' \
':api_prefix=>"http://www.example.com/api", :collection=>nil, :c_suffix=>"", :c_id=>nil, ' \
':subcollection=>nil, :s_id=>nil}'
)
)
run_get entrypoint_url
end
it "filters password attributes in nested parameters" do
api_basic_authorize collection_action_identifier(:services, :create)
expect($api_log).to receive(:info).with(
a_string_matching(
'Parameters: {"action"=>"update", "controller"=>"api/services", "format"=>"json", ' \
'"body"=>{"action"=>"create", "resource"=>{"name"=>"new_service_1", ' \
'"options"=>{"password"=>"\[FILTERED\]"}}}}'
)
)
run_post(services_url, gen_request(:create, "name" => "new_service_1", "options" => { "password" => "SECRET" }))
end
it "logs additional system authentication with miq_token" do
Timecop.freeze("2017-01-01 00:00:00 UTC") do
server_guid = MiqServer.first.guid
userid = api_config(:user)
timestamp = Time.now.utc
miq_token = MiqPassword.encrypt({:server_guid => server_guid, :userid => userid, :timestamp => timestamp}.to_yaml)
expect($api_log).to receive(:info).with(
a_string_matching(
"System Auth: {:x_miq_token=>\"#{Regexp.escape(miq_token)}\", :server_guid=>\"#{server_guid}\", " \
":userid=>\"api_user_id\", :timestamp=>2017-01-01 00:00:00 UTC}"
)
)
expect($api_log).to receive(:info).with(
a_string_matching(
'Authentication: {:type=>"system", :token=>nil, :x_miq_group=>nil, :user=>"api_user_id"}'
)
)
run_get entrypoint_url, :headers => {Api::HttpHeaders::MIQ_TOKEN => miq_token}
end
end
end
end
Don't match on values since we only originally cared about keys
The performance characteristics for chaining matchers here appears to
be exponential (see https://github.com/rspec/rspec-mocks/issues/1158),
so using a single regex here instead.
#
# REST API Logging Tests
#
describe "Logging" do
describe "Successful Requests logging" do
before { allow($api_log).to receive(:info) }
it "logs hashed details about the request" do
api_basic_authorize collection_action_identifier(:users, :read, :get)
expect($api_log).to receive(:info).with(a_string_matching(/Request:/)
.and(matching(%r{:path=>"/api/users"}))
.and(matching(/:collection=>"users"/))
.and(matching(/:c_id=>nil/))
.and(matching(/:subcollection=>nil/))
.and(matching(/:s_id=>nil/)))
run_get users_url
end
it "logs all hash entries about the request" do
api_basic_authorize
expect($api_log).to receive(:info).with(
a_string_matching(
":method.*:action.*:fullpath.*url.*:base.*:path.*:prefix.*:version.*:api_prefix.*:collection.*:c_suffix.*" \
":c_id.*:subcollection.*:s_id"
)
)
run_get entrypoint_url
end
it "filters password attributes in nested parameters" do
api_basic_authorize collection_action_identifier(:services, :create)
expect($api_log).to receive(:info).with(
a_string_matching(
'Parameters: {"action"=>"update", "controller"=>"api/services", "format"=>"json", ' \
'"body"=>{"action"=>"create", "resource"=>{"name"=>"new_service_1", ' \
'"options"=>{"password"=>"\[FILTERED\]"}}}}'
)
)
run_post(services_url, gen_request(:create, "name" => "new_service_1", "options" => { "password" => "SECRET" }))
end
it "logs additional system authentication with miq_token" do
Timecop.freeze("2017-01-01 00:00:00 UTC") do
server_guid = MiqServer.first.guid
userid = api_config(:user)
timestamp = Time.now.utc
miq_token = MiqPassword.encrypt({:server_guid => server_guid, :userid => userid, :timestamp => timestamp}.to_yaml)
expect($api_log).to receive(:info).with(
a_string_matching(
"System Auth: {:x_miq_token=>\"#{Regexp.escape(miq_token)}\", :server_guid=>\"#{server_guid}\", " \
":userid=>\"api_user_id\", :timestamp=>2017-01-01 00:00:00 UTC}"
)
)
expect($api_log).to receive(:info).with(
a_string_matching(
'Authentication: {:type=>"system", :token=>nil, :x_miq_group=>nil, :user=>"api_user_id"}'
)
)
run_get entrypoint_url, :headers => {Api::HttpHeaders::MIQ_TOKEN => miq_token}
end
end
end
end
|
require "spec_helper"
require "rspec/core/rake_task"
module RSpec::Core
describe RakeTask do
let(:task) { RakeTask.new }
before do
File.stub(:exist?) { false }
end
def with_bundler
task.skip_bundler = false
File.stub(:exist?) { true }
yield
end
def with_rcov
task.rcov = true
yield
end
def spec_command
task.__send__(:spec_command)
end
context "default" do
it "renders rspec" do
spec_command.should =~ /^-S rspec/
end
end
context "with bundler" do
context "with Gemfile" do
it "renders bundle exec rspec" do
File.stub(:exist?) { true }
task.skip_bundler = false
spec_command.should match(/bundle exec/)
end
end
context "with non-standard Gemfile" do
it "renders bundle exec rspec" do
File.stub(:exist?) {|f| f =~ /AltGemfile/}
task.gemfile = 'AltGemfile'
task.skip_bundler = false
spec_command.should match(/bundle exec/)
end
end
context "without Gemfile" do
it "renders bundle exec rspec" do
File.stub(:exist?) { false }
task.skip_bundler = false
spec_command.should_not match(/bundle exec/)
end
end
end
context "with rcov" do
it "renders rcov" do
with_rcov do
spec_command.should =~ /^-S rcov/
end
end
end
context "with bundler and rcov" do
it "renders bundle exec rcov" do
with_bundler do
with_rcov do
spec_command.should =~ /^-S bundle exec rcov/
end
end
end
end
context "with ruby options" do
it "renders them before -S" do
task.ruby_opts = "-w"
spec_command.should =~ /^-w -S rspec/
end
end
context "with rcov_opts" do
context "with rcov=false (default)" do
it "does not add the rcov options to the command" do
task.rcov_opts = '--exclude "mocks"'
spec_command.should_not =~ /--exclude "mocks"/
end
end
context "with rcov=true" do
it "renders them after rcov" do
task.rcov = true
task.rcov_opts = '--exclude "mocks"'
spec_command.should =~ /rcov.*--exclude "mocks"/
end
it "ensures that -Ispec:lib is in the resulting command" do
task.rcov = true
task.rcov_opts = '--exclude "mocks"'
spec_command.should =~ /rcov.*-Ispec:lib/
end
end
end
context "with rspec_opts" do
context "with rcov=true" do
it "adds the rspec_opts after the rcov_opts and files" do
task.stub(:files_to_run) { "this.rb that.rb" }
task.rcov = true
task.rspec_opts = "-Ifoo"
spec_command.should =~ /this.rb that.rb -- -Ifoo/
end
end
context "with rcov=false (default)" do
it "adds the rspec_opts" do
task.rspec_opts = "-Ifoo"
spec_command.should =~ /rspec -Ifoo/
end
end
end
context "with SPEC=path/to/file" do
before do
@orig_spec = ENV["SPEC"]
ENV["SPEC"] = "path/to/file"
end
after do
ENV["SPEC"] = @orig_spec
end
it "sets files to run" do
task.__send__(:files_to_run).should eq(["path/to/file"])
end
end
context "with pattern matching files with quotes" do
before do
@tmp_dir = File.expand_path(File.join(File.dirname(__FILE__), "rake_task_tmp_dir"))
@task = RakeTask.new do |t|
t.pattern = File.join(@tmp_dir, "*.rb")
end
FileUtils.mkdir_p @tmp_dir
spec_file_content = <<-END
describe "spec" do
it "should pass" do
1.should == 1
end
end
END
["first_file.rb", "second_\"file.rb", "third_'file.rb"].each do |file_name|
File.open(File.join(@tmp_dir, file_name), "w") do |h|
h.write spec_file_content
end
end
end
after do
FileUtils.rm_rf @tmp_dir
end
it "sets files to run" do
`/usr/bin/env ruby #{@task.send(:spec_command)}`
$?.exitstatus.should == 0
#@task.send(:files_to_run).should eq([])
end
end
end
end
Specify the escapes rather than shelling out.
require "spec_helper"
require "rspec/core/rake_task"
module RSpec::Core
describe RakeTask do
let(:task) { RakeTask.new }
before do
File.stub(:exist?) { false }
end
def with_bundler
task.skip_bundler = false
File.stub(:exist?) { true }
yield
end
def with_rcov
task.rcov = true
yield
end
def spec_command
task.__send__(:spec_command)
end
context "default" do
it "renders rspec" do
spec_command.should =~ /^-S rspec/
end
end
context "with bundler" do
context "with Gemfile" do
it "renders bundle exec rspec" do
File.stub(:exist?) { true }
task.skip_bundler = false
spec_command.should match(/bundle exec/)
end
end
context "with non-standard Gemfile" do
it "renders bundle exec rspec" do
File.stub(:exist?) {|f| f =~ /AltGemfile/}
task.gemfile = 'AltGemfile'
task.skip_bundler = false
spec_command.should match(/bundle exec/)
end
end
context "without Gemfile" do
it "renders bundle exec rspec" do
File.stub(:exist?) { false }
task.skip_bundler = false
spec_command.should_not match(/bundle exec/)
end
end
end
context "with rcov" do
it "renders rcov" do
with_rcov do
spec_command.should =~ /^-S rcov/
end
end
end
context "with bundler and rcov" do
it "renders bundle exec rcov" do
with_bundler do
with_rcov do
spec_command.should =~ /^-S bundle exec rcov/
end
end
end
end
context "with ruby options" do
it "renders them before -S" do
task.ruby_opts = "-w"
spec_command.should =~ /^-w -S rspec/
end
end
context "with rcov_opts" do
context "with rcov=false (default)" do
it "does not add the rcov options to the command" do
task.rcov_opts = '--exclude "mocks"'
spec_command.should_not =~ /--exclude "mocks"/
end
end
context "with rcov=true" do
it "renders them after rcov" do
task.rcov = true
task.rcov_opts = '--exclude "mocks"'
spec_command.should =~ /rcov.*--exclude "mocks"/
end
it "ensures that -Ispec:lib is in the resulting command" do
task.rcov = true
task.rcov_opts = '--exclude "mocks"'
spec_command.should =~ /rcov.*-Ispec:lib/
end
end
end
context "with rspec_opts" do
context "with rcov=true" do
it "adds the rspec_opts after the rcov_opts and files" do
task.stub(:files_to_run) { "this.rb that.rb" }
task.rcov = true
task.rspec_opts = "-Ifoo"
spec_command.should =~ /this.rb that.rb -- -Ifoo/
end
end
context "with rcov=false (default)" do
it "adds the rspec_opts" do
task.rspec_opts = "-Ifoo"
spec_command.should =~ /rspec -Ifoo/
end
end
end
context "with SPEC=path/to/file" do
before do
@orig_spec = ENV["SPEC"]
ENV["SPEC"] = "path/to/file"
end
after do
ENV["SPEC"] = @orig_spec
end
it "sets files to run" do
task.__send__(:files_to_run).should eq(["path/to/file"])
end
end
context "with paths with quotes" do
before do
@tmp_dir = File.expand_path('./tmp/rake_task_example/')
FileUtils.mkdir_p @tmp_dir
@task = RakeTask.new do |t|
t.pattern = File.join(@tmp_dir, "*spec.rb")
end
["first_spec.rb", "second_\"spec.rb", "third_'spec.rb"].each do |file_name|
FileUtils.touch(File.join(@tmp_dir, file_name))
end
end
it "escapes the quotes" do
@task.__send__(:files_to_run).should eq([
File.join(@tmp_dir, "first_spec.rb"),
File.join(@tmp_dir, "second_\\\"spec.rb"),
File.join(@tmp_dir, "third_\\'spec.rb")
])
end
end
end
end
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.