CombinedText stringlengths 4 3.42M |
|---|
describe :file_grpowned, :shared => true do
before :each do
@file = tmp('i_exist')
touch(@file) { |f| f.puts "file_content" }
File.chown(nil, Process.gid, @file) rescue nil
end
after :each do
rm_r @file
end
platform_is_not :windows do
it "returns true if the file exist" do
@object.send(@method, @file).should be_true
end
ruby_version_is "1.9" do
it "accepts an object that has a #to_path method" do
@object.send(@method, mock_to_path(@file)).should be_true
end
end
end
platform_is :windows do
it "returns false if the file exist" do
@object.send(@method, @file).should be_false
end
end
end
Specs for taking different groups into account.
These specs test the changes made to take non primary user groups into account
when calling Rubinius::Stat#grpowned?.
describe :file_grpowned, :shared => true do
before :each do
@file = tmp('i_exist')
touch(@file) { |f| f.puts "file_content" }
File.chown(nil, Process.gid, @file) rescue nil
end
after :each do
rm_r @file
end
platform_is_not :windows do
it "returns true if the file exist" do
@object.send(@method, @file).should be_true
end
ruby_version_is "1.9" do
it "accepts an object that has a #to_path method" do
@object.send(@method, mock_to_path(@file)).should be_true
end
end
it 'takes non primary groups into account' do
group = (Process.groups - [Process.egid]).first
File.chown(nil, group, @file)
@object.send(@method, @file).should == true
end
end
platform_is :windows do
it "returns false if the file exist" do
@object.send(@method, @file).should be_false
end
end
end
|
# Copyright 2013 Amazon.com, Inc. or its affiliates. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"). You
# may not use this file except in compliance with the License. A copy of
# the License is located at
#
# http://aws.amazon.com/apache2.0/
#
# or in the "license" file accompanying this file. This file is
# distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF
# ANY KIND, either express or implied. See the License for the specific
# language governing permissions and limitations under the License.
require 'spec_helper'
require 'ostruct'
module Seahorse
module Client
describe Base do
let(:api) {{ 'endpoint' => 'http://endpoint:123' }}
let(:client_class) { Base.define(api: api) }
let(:client) { client_class.new }
let(:required_plugins) { Base::REQUIRED_PLUGINS }
describe '#config' do
it 'returns a Configuration object' do
expect(client.config).to be_kind_of(Configuration)
end
it 'contains the client api' do
expect(client.config.api).to be(client_class.api)
end
it 'defaults endpoint to the api endpoint' do
expect(client.config.endpoint).to eq(api['endpoint'])
end
it 'defaults ssl_default to true' do
expect(client.config.ssl_default).to equal(true)
end
it 'passes constructor args to the config' do
client = client_class.new(foo: 'bar')
client.config.add_option(:foo)
expect(client.config.foo).to eq('bar')
end
end
describe '#build_request' do
let(:request) { client_class.new.build_request('operation') }
it 'returns a Request object' do
expect(request).to be_kind_of(Request)
end
it 'builds a handler list from client plugins' do
client_class.clear_plugins
client_class.add_plugin(Plugins::Api)
client_class.add_plugin(Plugins::NetHttp)
client_class.add_plugin(Plugins::Endpoint)
handlers = request.handlers.to_a
expect(handlers).to include(NetHttp::Handler)
expect(handlers).to include(Plugins::Endpoint::EndpointHandler)
end
it 'defaults the send handler to a NetHttp::Handler' do
handlers = request.handlers.to_a
expect(handlers).to include(NetHttp::Handler)
end
it 'populates the request context with the operation name' do
request = client.build_request('operation_name')
expect(request.context.operation_name).to eq('operation_name')
end
it 'stringifies the operation name' do
request = client.build_request(:operation)
expect(request.context.operation_name).to eq('operation')
end
it 'populates the request context params' do
params = double('params')
request = client.build_request('operation', params)
expect(request.context.params).to be(params)
end
it 'defaults request context params to an empty hash' do
request = client.build_request('operation')
expect(request.context.params).to eq({})
end
it 'populates the context with the client configuration' do
request = client.build_request('operation')
expect(request.context.config).to be(client.config)
end
end
describe '.api' do
it 'can be set' do
api = Model::Api.from_hash({})
client_class = Class.new(Base)
client_class.set_api(api)
expect(client_class.api).to be(api)
end
it 'can be set as a hash, returning a Model::Api' do
client_class = Class.new(Base)
api = client_class.set_api({})
expect(api).to be_kind_of(Model::Api)
expect(api.to_hash).to eq(Model::Api.from_hash({}).to_hash)
end
end
describe '.define' do
it 'creates a new client class' do
client_class = Client.define
expect(client_class.ancestors).to include(Client::Base)
end
it 'sets the api on the client class' do
api = Model::Api.from_hash({})
client_class = Client.define(api: api)
expect(client_class.api).to be(api)
end
it 'extends from subclasses of client' do
klass1 = Client.define
klass2 = klass1.define
expect(klass2.ancestors).to include(klass1)
expect(klass2.ancestors).to include(Client::Base)
end
end
describe '.new' do
it 'constructs the class specified by .client_class' do
new_client_class = Class.new { def initialize(cfg); end }
client_class.stub(:client_class) { new_client_class }
client = client_class.new(foo: 'bar')
expect(client).to be_kind_of(new_client_class)
end
end
describe 'plugin methods' do
let(:plugin_a) { Class.new }
let(:plugin_b) { Class.new }
describe '.add_plugin' do
it 'adds plugins to the client' do
client_class.add_plugin(plugin_a)
expect(client_class.plugins).to include(plugin_a)
end
it 'does not add plugins to the client parent class' do
subclass = Class.new(client_class)
subclass.add_plugin(plugin_a)
expect(client_class.plugins).to_not include(plugin_a)
expect(subclass.plugins).to include(plugin_a)
end
end
describe '.remove_plugin' do
it 'removes a plugin from the client' do
client_class.add_plugin(plugin_a)
client_class.add_plugin(plugin_b)
client_class.remove_plugin(plugin_a)
expect(client_class.plugins).not_to include(plugin_a)
expect(client_class.plugins).to include(plugin_b)
end
it 'does not remove plugins from the client parent class' do
client_class.add_plugin(plugin_a)
subclass = client_class.define
subclass.remove_plugin(plugin_a)
expect(client_class.plugins).to include(plugin_a)
expect(subclass.plugins).not_to include(plugin_a)
end
end
describe '.set_plugins' do
it 'replaces existing plugins' do
client_class.add_plugin(plugin_a)
client_class.set_plugins([plugin_b])
expect(client_class.plugins).to eq(required_plugins + [plugin_b])
end
end
describe '.clear_plugins' do
it 'removes all plugins' do
client_class.add_plugin(plugin_a)
client_class.clear_plugins
expect(client_class.plugins).to eq(required_plugins + [])
end
end
describe '.plugins' do
it 'returns a list of plugins applied to the client' do
expect(client_class.plugins).to be_kind_of(Array)
end
it 'returns a frozen list of plugins' do
expect(client_class.plugins.frozen?).to eq(true)
end
it 'has a defualt list of plugins' do
client_class = Class.new(Base)
expect(client_class.plugins.to_a).to eq([
Plugins::Api,
Plugins::Endpoint,
Plugins::NetHttp,
Plugins::OperationMethods
])
end
it 'replaces default plugins with the list specified in the API' do
stub_const('Seahorse::Client::PluginA', plugin_a)
api = { 'plugins' => ['Seahorse::Client::PluginA'] }
client_class = Base.define(api: api)
expect(client_class.plugins.count).to eq(5)
expect(client_class.plugins).to include(plugin_a)
end
end
describe '.client_class' do
it 'consults plugin list to see what client class to construct' do
subclient_class = Class.new(client_class)
plugin = double('plugin')
plugin.stub(:construct_client) do |klass, config|
subclient_class
end
client_class.add_plugin(plugin)
expect(client_class.client_class).to be(subclient_class)
end
it 'does not change the client class if plugin#construct_client does not return Class' do
plugin = Class.new(Plugin)
plugin.stub(:construct_client) {|c,o| nil }
client_class.add_plugin(plugin)
expect(client_class.client_class).to be(client_class)
end
it 'takes the last returned Class object from plugins' do
stub_const("ClassA", Class.new)
stub_const("ClassB", Class.new)
stub_const("ClassC", Class.new)
plugin1 = double('plugin1')
plugin1.stub(:construct_client) {|c,o| ClassA }
plugin2 = double('plugin2')
plugin2.stub(:construct_client) {|c,o| ClassB }
plugin3 = double('plugin3')
plugin3.stub(:construct_client) {|c,o| ClassC }
client_class.add_plugin(plugin1)
client_class.add_plugin(plugin2)
client_class.add_plugin(plugin3)
expect(client_class.client_class).to eq ClassC
end
end
describe 'applying plugins' do
it 'instructs plugins to #initialize_client' do
initialized_client = nil
plugin = double('plugin')
plugin.stub(:initialize_client) {|c| initialized_client = c }
client_class.add_plugin(plugin)
client = client_class.new
expect(initialized_client).to be(client)
end
it 'instructs plugins to #add_options' do
plugin = double('plugin')
plugin.stub(:add_options) { |config| config.add_option(:foo) }
client_class.add_plugin(plugin)
expect(client_class.new.config).to respond_to(:foo)
end
it 'instructs plugins to #add_handlers' do
plugin = double('plugin')
expect(plugin).to receive(:is_a?).
at_least(:once).with(kind_of(Class)) { false }
expect(plugin).to receive(:add_handlers).with(
kind_of(HandlerList), kind_of(Configuration))
client_class.add_plugin(plugin)
client_class.new
end
it 'does not call methods that plugin does not respond to' do
plugin = Object.new
client_class.add_plugin(plugin)
client_class.new
end
end
end
end
end
end
More cleanup/simplification of the client base specs.
# Copyright 2013 Amazon.com, Inc. or its affiliates. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"). You
# may not use this file except in compliance with the License. A copy of
# the License is located at
#
# http://aws.amazon.com/apache2.0/
#
# or in the "license" file accompanying this file. This file is
# distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF
# ANY KIND, either express or implied. See the License for the specific
# language governing permissions and limitations under the License.
require 'spec_helper'
require 'ostruct'
module Seahorse
module Client
describe Base do
let(:api) {{ 'endpoint' => 'http://endpoint:123' }}
let(:client_class) { Base.define(api: api) }
let(:client) { client_class.new }
let(:required_plugins) { Base::REQUIRED_PLUGINS }
let(:plugin_a) { Class.new }
let(:plugin_b) { Class.new }
describe '#config' do
it 'returns a Configuration object' do
expect(client.config).to be_kind_of(Configuration)
end
it 'contains the client api' do
expect(client.config.api).to be(client_class.api)
end
it 'defaults endpoint to the api endpoint' do
expect(client.config.endpoint).to eq(api['endpoint'])
end
it 'defaults ssl_default to true' do
expect(client.config.ssl_default).to equal(true)
end
it 'passes constructor args to the config' do
client = client_class.new(foo: 'bar')
client.config.add_option(:foo)
expect(client.config.foo).to eq('bar')
end
end
describe '#build_request' do
let(:request) { client_class.new.build_request('operation') }
it 'returns a Request object' do
expect(request).to be_kind_of(Request)
end
it 'builds a handler list from client plugins' do
client_class.clear_plugins
client_class.add_plugin(Plugins::Api)
client_class.add_plugin(Plugins::NetHttp)
client_class.add_plugin(Plugins::Endpoint)
handlers = request.handlers.to_a
expect(handlers).to include(NetHttp::Handler)
expect(handlers).to include(Plugins::Endpoint::EndpointHandler)
end
it 'defaults the send handler to a NetHttp::Handler' do
handlers = request.handlers.to_a
expect(handlers).to include(NetHttp::Handler)
end
it 'populates the request context with the operation name' do
request = client.build_request('operation_name')
expect(request.context.operation_name).to eq('operation_name')
end
it 'stringifies the operation name' do
request = client.build_request(:operation)
expect(request.context.operation_name).to eq('operation')
end
it 'populates the request context params' do
params = double('params')
request = client.build_request('operation', params)
expect(request.context.params).to be(params)
end
it 'defaults request context params to an empty hash' do
request = client.build_request('operation')
expect(request.context.params).to eq({})
end
it 'populates the context with the client configuration' do
request = client.build_request('operation')
expect(request.context.config).to be(client.config)
end
end
describe '.api' do
it 'can be set' do
api = Model::Api.from_hash({})
client_class = Class.new(Base)
client_class.set_api(api)
expect(client_class.api).to be(api)
end
it 'can be set as a hash, returning a Model::Api' do
client_class = Class.new(Base)
api = client_class.set_api({})
expect(api).to be_kind_of(Model::Api)
expect(api.to_hash).to eq(Model::Api.from_hash({}).to_hash)
end
end
describe '.define' do
it 'creates a new client class' do
client_class = Client.define
expect(client_class.ancestors).to include(Client::Base)
end
it 'sets the api on the client class' do
api = Model::Api.from_hash({})
client_class = Client.define(api: api)
expect(client_class.api).to be(api)
end
it 'extends from subclasses of client' do
klass1 = Client.define
klass2 = klass1.define
expect(klass2.ancestors).to include(klass1)
expect(klass2.ancestors).to include(Client::Base)
end
end
describe '.add_plugin' do
it 'adds plugins to the client' do
client_class.add_plugin(plugin_a)
expect(client_class.plugins).to include(plugin_a)
end
it 'does not add plugins to the client parent class' do
subclass = Class.new(client_class)
subclass.add_plugin(plugin_a)
expect(client_class.plugins).to_not include(plugin_a)
expect(subclass.plugins).to include(plugin_a)
end
end
describe '.remove_plugin' do
it 'removes a plugin from the client' do
client_class.add_plugin(plugin_a)
client_class.add_plugin(plugin_b)
client_class.remove_plugin(plugin_a)
expect(client_class.plugins).not_to include(plugin_a)
expect(client_class.plugins).to include(plugin_b)
end
it 'does not remove plugins from the client parent class' do
client_class.add_plugin(plugin_a)
subclass = client_class.define
subclass.remove_plugin(plugin_a)
expect(client_class.plugins).to include(plugin_a)
expect(subclass.plugins).not_to include(plugin_a)
end
end
describe '.set_plugins' do
it 'replaces existing plugins' do
client_class.add_plugin(plugin_a)
client_class.set_plugins([plugin_b])
expect(client_class.plugins).to eq(required_plugins + [plugin_b])
end
end
describe '.clear_plugins' do
it 'removes all plugins' do
client_class.add_plugin(plugin_a)
client_class.clear_plugins
expect(client_class.plugins).to eq(required_plugins + [])
end
end
describe '.plugins' do
it 'returns a list of plugins applied to the client' do
expect(client_class.plugins).to be_kind_of(Array)
end
it 'returns a frozen list of plugins' do
expect(client_class.plugins.frozen?).to eq(true)
end
it 'has a defualt list of plugins' do
client_class = Class.new(Base)
expect(client_class.plugins.to_a).to eq([
Plugins::Api,
Plugins::Endpoint,
Plugins::NetHttp,
Plugins::OperationMethods
])
end
it 'replaces default plugins with the list specified in the API' do
stub_const('Seahorse::Client::PluginA', plugin_a)
api = { 'plugins' => ['Seahorse::Client::PluginA'] }
client_class = Base.define(api: api)
expect(client_class.plugins.count).to eq(5)
expect(client_class.plugins).to include(plugin_a)
end
end
describe '.client_class' do
it 'consults plugin list to see what client class to construct' do
subclient_class = Class.new(client_class)
plugin = double('plugin')
plugin.stub(:client_class_for) do |klass, config|
subclient_class
end
client_class.add_plugin(plugin)
expect(client_class.client_class).to be(subclient_class)
end
it 'does not change the client class if plugin#construct_client does not return Class' do
plugin = Class.new(Plugin)
plugin.stub(:construct_client) {|c,o| nil }
client_class.add_plugin(plugin)
expect(client_class.client_class).to be(client_class)
end
it 'takes the last returned Class object from plugins' do
stub_const("ClassA", Class.new)
stub_const("ClassB", Class.new)
stub_const("ClassC", Class.new)
plugin1 = double('plugin1')
plugin1.stub(:client_class_for) {|c,o| ClassA }
plugin2 = double('plugin2')
plugin2.stub(:client_class_for) {|c,o| ClassB }
plugin3 = double('plugin3')
plugin3.stub(:client_class_for) {|c,o| ClassC }
client_class.add_plugin(plugin1)
client_class.add_plugin(plugin2)
client_class.add_plugin(plugin3)
expect(client_class.client_class).to eq ClassC
end
end
describe '.new' do
it 'constructs the class specified by .client_class' do
new_client_class = Class.new { def initialize(cfg); end }
client_class.stub(:client_class) { new_client_class }
client = client_class.new(foo: 'bar')
expect(client).to be_kind_of(new_client_class)
end
it 'instructs plugins to #initialize_client' do
initialized_client = nil
plugin = double('plugin')
plugin.stub(:initialize_client) {|c| initialized_client = c }
client_class.add_plugin(plugin)
client = client_class.new
expect(initialized_client).to be(client)
end
it 'instructs plugins to #add_options' do
plugin = double('plugin')
plugin.stub(:add_options) { |config| config.add_option(:foo) }
client_class.add_plugin(plugin)
expect(client_class.new.config).to respond_to(:foo)
end
it 'instructs plugins to #add_handlers' do
plugin = double('plugin')
expect(plugin).to receive(:is_a?).
at_least(:once).with(kind_of(Class)) { false }
expect(plugin).to receive(:add_handlers).with(
kind_of(HandlerList), kind_of(Configuration))
client_class.add_plugin(plugin)
client_class.new
end
it 'does not call methods that plugin does not respond to' do
plugin = Object.new
client_class.add_plugin(plugin)
client_class.new
end
end
end
end
end
|
require File.expand_path(File.dirname(__FILE__) + '/common')
describe "assignments" do
it_should_behave_like "in-process server selenium tests"
context "teacher view" do
before (:each) do
course_with_teacher_logged_in
end
it "should display assignment on calendar and link to assignment" do
assignment_name = 'first assignment'
current_date = Time.now.utc
due_date = current_date + 2.days
@assignment = @course.assignments.create(:name => assignment_name, :due_at => due_date)
get "/calendar"
#click on assignment in calendar
if due_date.month > current_date.month
driver.find_element(:css, '#content .next_month_link').click
wait_for_ajax_requests
end
day_id = 'day_' + due_date.year.to_s() + '_' + due_date.strftime('%m') + '_' + due_date.strftime('%d')
day_div = driver.find_element(:id, day_id)
sleep 1 # this is one of those cases where if we click too early, no subsequent clicks will work
day_div.find_element(:link, assignment_name).click
wait_for_animations
details_dialog = driver.find_element(:id, 'event_details').find_element(:xpath, '..')
details_dialog.should include_text(assignment_name)
details_dialog.find_element(:css, '.edit_event_link').click
details_dialog = driver.find_element(:id, 'edit_event').find_element(:xpath, '..')
details_dialog.find_element(:name, 'assignment[title]').should be_displayed
details_dialog.find_element(:css, '#edit_assignment_form .more_options_link').click
#make sure user is taken to assignment details
driver.find_element(:css, 'h2.title').should include_text(assignment_name)
end
it "should create an assignment" do
assignment_name = 'first assignment'
@course.assignment_groups.create!(:name => "first group")
@course.assignment_groups.create!(:name => "second group")
get "/courses/#{@course.id}/assignments"
#create assignment
click_option('#right-side select.assignment_groups_select', 'second group')
driver.find_element(:css, '.add_assignment_link').click
driver.find_element(:id, 'assignment_title').send_keys(assignment_name)
driver.find_element(:css, '.ui-datepicker-trigger').click
datepicker = datepicker_next
datepicker.find_element(:css, '.ui-datepicker-ok').click
driver.find_element(:id, 'assignment_points_possible').send_keys('5')
driver.
find_element(:id, 'add_assignment_form').submit
#make sure assignment was added to correct assignment group
wait_for_animations
first_group = driver.find_element(:css, '#groups .assignment_group:nth-child(2)')
first_group.should include_text('second group')
first_group.should include_text(assignment_name)
#click on assignment link
driver.find_element(:link, assignment_name).click
driver.find_element(:css, 'h2.title').should include_text(assignment_name)
end
it "should create an assignment with more options" do
enable_cache do
expected_text = "Assignment 1"
get "/courses/#{@course.id}/assignments"
group = @course.assignment_groups.first
AssignmentGroup.update_all({:updated_at => 1.hour.ago}, {:id => group.id})
first_stamp = group.reload.updated_at.to_i
driver.find_element(:css, '.add_assignment_link').click
expect_new_page_load { driver.find_element(:css, '.more_options_link').click }
expect_new_page_load { driver.find_element(:css, '#edit_assignment_form').submit }
@course.assignments.count.should == 1
driver.find_element(:css, '.no_assignments_message').should_not be_displayed
driver.find_element(:css, '#groups').should include_text(expected_text)
group.reload
group.updated_at.to_i.should_not == first_stamp
end
end
it "should allow creating a quiz assignment from 'more options'" do
skip_if_ie("Out of memory")
get "/courses/#{@course.id}/assignments"
driver.find_element(:css, ".assignment_group .add_assignment_link").click
form = driver.find_element(:css, "#add_assignment_form")
form.find_element(:css, ".assignment_submission_types option[value='online_quiz']").click
expect_new_page_load { form.find_element(:css, ".more_options_link").click }
driver.find_element(:css, ".submission_type_option option[value='none']").should be_selected
driver.find_element(:css, ".assignment_type option[value='assignment']").click
driver.find_element(:css, ".submission_type_option option[value='online']").click
driver.find_element(:css, ".assignment_type option[value='quiz']").click
expect_new_page_load { driver.find_element(:id, 'edit_assignment_form').submit }
end
it "should edit an assignment" do
skip_if_ie('Out of memory')
assignment_name = 'first test assignment'
due_date = Time.now.utc + 2.days
group = @course.assignment_groups.create!(:name => "default")
second_group = @course.assignment_groups.create!(:name => "second default")
@course.assignments.create!(
:name => assignment_name,
:due_at => due_date,
:assignment_group => group
)
get "/courses/#{@course.id}/assignments"
expect_new_page_load { driver.find_element(:link, assignment_name).click }
driver.find_element(:css, '.edit_full_assignment_link').click
driver.find_element(:css, '.more_options_link').click
driver.find_element(:id, 'assignment_assignment_group_id').should be_displayed
click_option('#assignment_assignment_group_id', second_group.name)
click_option('#assignment_grading_type', 'Letter Grade')
#check grading levels dialog
wait_for_animations
keep_trying_until { driver.find_element(:css, 'a.edit_letter_grades_link').should be_displayed }
driver.find_element(:css, 'a.edit_letter_grades_link').click
wait_for_animations
driver.find_element(:id, 'edit_letter_grades_form').should be_displayed
close_visible_dialog
#check peer reviews option
driver.find_element(:css, '#edit_assignment_form #assignment_peer_reviews').click
driver.find_element(:css, '#edit_assignment_form #auto_peer_reviews').click
driver.find_element(:css, '#edit_assignment_form #assignment_peer_review_count').send_keys('2')
driver.find_element(:css, '#edit_assignment_form #assignment_peer_reviews_assign_at + img').click
datepicker = datepicker_next
datepicker.find_element(:css, '.ui-datepicker-ok').click
driver.find_element(:id, 'assignment_title').send_keys(' edit')
#save changes
driver.find_element(:id, 'edit_assignment_form').submit
wait_for_ajaximations
driver.find_element(:css, 'h2.title').should include_text(assignment_name + ' edit')
end
it "should show a \"more errors\" errorBox if any invalid fields are hidden" do
assignment_name = 'first test assignment'
@group = @course.assignment_groups.create!(:name => "default")
@assignment = @course.assignments.create(
:name => assignment_name,
:assignment_group => @group,
:points_possible => 2,
:due_at => Time.now,
:lock_at => 1.month.ago # this will trigger the client-side validation error
)
get "/courses/#{@course.id}/assignments/#{@assignment.id}"
driver.find_element(:css, "a.edit_full_assignment_link").click
driver.find_element(:id, 'edit_assignment_form').submit
wait_for_animations
errorBoxes = driver.execute_script("return $('.errorBox').filter('[id!=error_box_template]').toArray();")
errorBoxes.size.should eql 2
errorBoxes.first.should_not be_displayed # .text just gives us an empty string since it's hidden
errorBoxes.last.text.should eql "There were errors on one or more advanced options"
errorBoxes.last.should be_displayed
driver.find_element(:css, 'a.more_options_link').click
wait_for_animations
errorBoxes = driver.execute_script("return $('.errorBox').filter('[id!=error_box_template]').toArray();")
errorBoxes.size.should eql 1 # the more_options_link one has now been removed from the DOM
errorBoxes.first.text.should eql "The assignment shouldn't be locked again until after the due date"
errorBoxes.first.should be_displayed
end
end
context "student view" do
before (:each) do
course_with_student_logged_in
end
it "should highlight mini-calendar dates where stuff is due" do
due_date = Time.now.utc + 2.days
@assignment = @course.assignments.create(:name => 'assignment', :due_at => due_date)
get "/courses/#{@course.id}/assignments/syllabus"
driver.find_element(:css, ".mini_calendar_day.date_#{due_date.strftime("%m_%d_%Y")}").
attribute('class').should match /has_event/
end
it "should not show submission data when muted" do
@assignment = @course.assignments.create!(:title => "hardest assignment ever", :submission_types => "online_url,online_upload")
@submission = @assignment.submit_homework(@student)
@submission.submission_type = "online_url"
@submission.save!
@submission.add_comment :author => @teacher, :comment => "comment before muting"
@assignment.mute!
@assignment.update_submission(@student, :hidden => true, :comment => "comment after muting")
outcome_with_rubric
@rubric.associate_with @assignment, @course, :purpose => "grading"
get "/courses/#{@course.id}/assignments/#{@assignment.id}"
driver.find_element(:css, ".details").text.should =~ /comment before muting/
driver.find_element(:css, ".details").text.should_not =~ /comment after muting/
end
end
end
BUG #6783 - adding student test to assignments selenium spec
Change-Id: Ic9f0f512a0d08bf3a901bca17b7c674632de1601
Reviewed-on: https://gerrit.instructure.com/8616
Reviewed-by: Bryan Madsen <0a3ba1af216659500dd8f41e29f3979f42452870@instructure.com>
Tested-by: Hudson <dfb64870173313a9a7b56f814c8e3b33e268497a@instructure.com>
require File.expand_path(File.dirname(__FILE__) + '/common')
describe "assignments" do
it_should_behave_like "in-process server selenium tests"
context "teacher view" do
before (:each) do
course_with_teacher_logged_in
end
it "should display assignment on calendar and link to assignment" do
assignment_name = 'first assignment'
current_date = Time.now.utc
due_date = current_date + 2.days
@assignment = @course.assignments.create(:name => assignment_name, :due_at => due_date)
get "/calendar"
#click on assignment in calendar
if due_date.month > current_date.month
driver.find_element(:css, '#content .next_month_link').click
wait_for_ajax_requests
end
day_id = 'day_' + due_date.year.to_s() + '_' + due_date.strftime('%m') + '_' + due_date.strftime('%d')
day_div = driver.find_element(:id, day_id)
sleep 1 # this is one of those cases where if we click too early, no subsequent clicks will work
day_div.find_element(:link, assignment_name).click
wait_for_animations
details_dialog = driver.find_element(:id, 'event_details').find_element(:xpath, '..')
details_dialog.should include_text(assignment_name)
details_dialog.find_element(:css, '.edit_event_link').click
details_dialog = driver.find_element(:id, 'edit_event').find_element(:xpath, '..')
details_dialog.find_element(:name, 'assignment[title]').should be_displayed
details_dialog.find_element(:css, '#edit_assignment_form .more_options_link').click
#make sure user is taken to assignment details
driver.find_element(:css, 'h2.title').should include_text(assignment_name)
end
it "should create an assignment" do
assignment_name = 'first assignment'
@course.assignment_groups.create!(:name => "first group")
@course.assignment_groups.create!(:name => "second group")
get "/courses/#{@course.id}/assignments"
#create assignment
click_option('#right-side select.assignment_groups_select', 'second group')
driver.find_element(:css, '.add_assignment_link').click
driver.find_element(:id, 'assignment_title').send_keys(assignment_name)
driver.find_element(:css, '.ui-datepicker-trigger').click
datepicker = datepicker_next
datepicker.find_element(:css, '.ui-datepicker-ok').click
driver.find_element(:id, 'assignment_points_possible').send_keys('5')
driver.
find_element(:id, 'add_assignment_form').submit
#make sure assignment was added to correct assignment group
wait_for_animations
first_group = driver.find_element(:css, '#groups .assignment_group:nth-child(2)')
first_group.should include_text('second group')
first_group.should include_text(assignment_name)
#click on assignment link
driver.find_element(:link, assignment_name).click
driver.find_element(:css, 'h2.title').should include_text(assignment_name)
end
it "should create an assignment with more options" do
enable_cache do
expected_text = "Assignment 1"
get "/courses/#{@course.id}/assignments"
group = @course.assignment_groups.first
AssignmentGroup.update_all({:updated_at => 1.hour.ago}, {:id => group.id})
first_stamp = group.reload.updated_at.to_i
driver.find_element(:css, '.add_assignment_link').click
expect_new_page_load { driver.find_element(:css, '.more_options_link').click }
expect_new_page_load { driver.find_element(:css, '#edit_assignment_form').submit }
@course.assignments.count.should == 1
driver.find_element(:css, '.no_assignments_message').should_not be_displayed
driver.find_element(:css, '#groups').should include_text(expected_text)
group.reload
group.updated_at.to_i.should_not == first_stamp
end
end
it "should allow creating a quiz assignment from 'more options'" do
skip_if_ie("Out of memory")
get "/courses/#{@course.id}/assignments"
driver.find_element(:css, ".assignment_group .add_assignment_link").click
form = driver.find_element(:css, "#add_assignment_form")
form.find_element(:css, ".assignment_submission_types option[value='online_quiz']").click
expect_new_page_load { form.find_element(:css, ".more_options_link").click }
driver.find_element(:css, ".submission_type_option option[value='none']").should be_selected
driver.find_element(:css, ".assignment_type option[value='assignment']").click
driver.find_element(:css, ".submission_type_option option[value='online']").click
driver.find_element(:css, ".assignment_type option[value='quiz']").click
expect_new_page_load { driver.find_element(:id, 'edit_assignment_form').submit }
end
it "should edit an assignment" do
skip_if_ie('Out of memory')
assignment_name = 'first test assignment'
due_date = Time.now.utc + 2.days
group = @course.assignment_groups.create!(:name => "default")
second_group = @course.assignment_groups.create!(:name => "second default")
@course.assignments.create!(
:name => assignment_name,
:due_at => due_date,
:assignment_group => group
)
get "/courses/#{@course.id}/assignments"
expect_new_page_load { driver.find_element(:link, assignment_name).click }
driver.find_element(:css, '.edit_full_assignment_link').click
driver.find_element(:css, '.more_options_link').click
driver.find_element(:id, 'assignment_assignment_group_id').should be_displayed
click_option('#assignment_assignment_group_id', second_group.name)
click_option('#assignment_grading_type', 'Letter Grade')
#check grading levels dialog
wait_for_animations
keep_trying_until { driver.find_element(:css, 'a.edit_letter_grades_link').should be_displayed }
driver.find_element(:css, 'a.edit_letter_grades_link').click
wait_for_animations
driver.find_element(:id, 'edit_letter_grades_form').should be_displayed
close_visible_dialog
#check peer reviews option
driver.find_element(:css, '#edit_assignment_form #assignment_peer_reviews').click
driver.find_element(:css, '#edit_assignment_form #auto_peer_reviews').click
driver.find_element(:css, '#edit_assignment_form #assignment_peer_review_count').send_keys('2')
driver.find_element(:css, '#edit_assignment_form #assignment_peer_reviews_assign_at + img').click
datepicker = datepicker_next
datepicker.find_element(:css, '.ui-datepicker-ok').click
driver.find_element(:id, 'assignment_title').send_keys(' edit')
#save changes
driver.find_element(:id, 'edit_assignment_form').submit
wait_for_ajaximations
driver.find_element(:css, 'h2.title').should include_text(assignment_name + ' edit')
end
it "should show a \"more errors\" errorBox if any invalid fields are hidden" do
assignment_name = 'first test assignment'
@group = @course.assignment_groups.create!(:name => "default")
@assignment = @course.assignments.create(
:name => assignment_name,
:assignment_group => @group,
:points_possible => 2,
:due_at => Time.now,
:lock_at => 1.month.ago # this will trigger the client-side validation error
)
get "/courses/#{@course.id}/assignments/#{@assignment.id}"
driver.find_element(:css, "a.edit_full_assignment_link").click
driver.find_element(:id, 'edit_assignment_form').submit
wait_for_animations
errorBoxes = driver.execute_script("return $('.errorBox').filter('[id!=error_box_template]').toArray();")
errorBoxes.size.should eql 2
errorBoxes.first.should_not be_displayed # .text just gives us an empty string since it's hidden
errorBoxes.last.text.should eql "There were errors on one or more advanced options"
errorBoxes.last.should be_displayed
driver.find_element(:css, 'a.more_options_link').click
wait_for_animations
errorBoxes = driver.execute_script("return $('.errorBox').filter('[id!=error_box_template]').toArray();")
errorBoxes.size.should eql 1 # the more_options_link one has now been removed from the DOM
errorBoxes.first.text.should eql "The assignment shouldn't be locked again until after the due date"
errorBoxes.first.should be_displayed
end
end
context "student view" do
before (:each) do
course_with_student_logged_in
end
it "should highlight mini-calendar dates where stuff is due" do
due_date = Time.now.utc + 2.days
@assignment = @course.assignments.create(:name => 'assignment', :due_at => due_date)
get "/courses/#{@course.id}/assignments/syllabus"
driver.find_element(:css, ".mini_calendar_day.date_#{due_date.strftime("%m_%d_%Y")}").
attribute('class').should match /has_event/
end
it "should not show submission data when muted" do
@assignment = @course.assignments.create!(:title => "hardest assignment ever", :submission_types => "online_url,online_upload")
@submission = @assignment.submit_homework(@student)
@submission.submission_type = "online_url"
@submission.save!
@submission.add_comment :author => @teacher, :comment => "comment before muting"
@assignment.mute!
@assignment.update_submission(@student, :hidden => true, :comment => "comment after muting")
outcome_with_rubric
@rubric.associate_with @assignment, @course, :purpose => "grading"
get "/courses/#{@course.id}/assignments/#{@assignment.id}"
driver.find_element(:css, ".details").text.should =~ /comment before muting/
driver.find_element(:css, ".details").text.should_not =~ /comment after muting/
end
it "should submit an assignment and validate confirmation information" do
pending "BUG 6783 - Coming Up assignments update error" do
due_date = Time.now.utc + 2.days
@assignment = @course.assignments.create(:name => 'assignment', :due_at => due_date, :submission_types => 'online_url')
@submission = @assignment.submit_homework(@student)
@submission.submission_type = "online_url"
@submission.save!
get "/courses/#{@course.id}/assignments/#{@assignment.id}"
driver.find_element(:css, '.details .header').should include_text('Turned In!')
get "/courses/#{@course.id}"
driver.execute_script("$('.tooltip_text').css('visibility', 'visible')")
tooltip_text_elements = driver.find_elements(:css, '.tooltip_text > span')
driver.find_element(:css, '.tooltip_text').should be_displayed
tooltip_text_elements[1].text.should == 'submitted'
end
end
end
end
|
require 'spec_helper'
require 'sisimai/mail/maildir'
describe Sisimai::Mail::Maildir do
samplemaildir = './set-of-emails/maildir/bsd'
allofthefiles = 445
let(:mailobj) { Sisimai::Mail::Maildir.new(samples) }
let(:mockobj) { Sisimai::Mail::Maildir.new(invalid) }
describe 'class method' do
describe '.new' do
context 'Maildir/ exists' do
let(:samples) { samplemaildir }
subject { mailobj }
it 'returns Sisimai::Mail::Maildir object' do
is_expected.to be_a Sisimai::Mail::Maildir
expect(mailobj.read).to be_a String
end
end
context 'directory does not exist' do
let(:invalid) { '/etc/neko/nyan' }
it 'raises Errno::ENOENT' do
expect { mockobj }.to raise_error(Errno::ENOENT)
end
end
context 'argument is not a directory' do
let(:invalid) { '/etc/hosts' }
it 'raises Errno::ENOTDIR' do
expect { mockobj }.to raise_error(Errno::ENOTDIR)
end
end
context 'wrong number of arguments' do
it 'raises ArgumentError' do
expect { Sisimai::Mail::Maildir.new }.to raise_error(ArgumentError)
expect { Sisimai::Mail::Maildir.new(nil, nil) }.to raise_error(ArgumentError)
end
end
end
end
describe 'instance method' do
let(:samples) { samplemaildir }
before do
mailobj.read
end
describe '#dir' do
subject { mailobj.dir }
it 'returns directory name' do
is_expected.to be_a String
is_expected.to be == samples
end
end
describe '#path' do
subject { mailobj.path }
it 'matches directory name' do
is_expected.to be_a String
is_expected.to match(%r|#{samples}/.+|)
end
end
describe '#file' do
subject { mailobj.file }
it 'returns filename' do
is_expected.to be_a String
is_expected.to match(/.+[.].+/)
end
end
describe '#size' do
subject { mailobj.size }
it 'returns the number of files in the direcotry' do
is_expected.to be_a Integer
is_expected.to be > 255
end
end
describe '#handle' do
let(:handle) { mailobj.handle }
subject { handle }
it 'is IO::Dir object' do
is_expected.to be_a Dir
end
end
describe '#inodes' do
let(:inodes) { mailobj.inodes }
subject { mailobj.inodes }
it 'contains inode table' do
is_expected.to be_a Hash
expect(inodes.size).to be == 1
end
end
describe '#count' do
let(:count) { mailobj.count }
subject { mailobj.count }
it 'returns the number of read files' do
is_expected.to be_a Integer
is_expected.to be == 1
end
end
describe '#read' do
maildir = Sisimai::Mail::Maildir.new(samplemaildir)
emindex = 0
while r = maildir.read do
emindex += 1
it 'is ' + maildir.file do
expect(maildir.file).to match(/\A[a-z0-9-]+[-]\d\d[.]eml\z/)
expect(maildir.file.size).to be > 0
end
it "has read #{maildir.count} files" do
expect(maildir.count).to be > 0
expect(maildir.count).to be == emindex
end
it 'has 1 or more inode entries' do
expect(maildir.inodes.keys.size).to be_a Integer
expect(maildir.inodes.keys.size).to be >= emindex - 3
end
end
example "the number of read files is #{maildir.count}" do
expect(maildir.count).to be > 0
expect(maildir.count).to be == emindex
expect(maildir.count).to be == allofthefiles
end
end
end
end
Increment the number of public sample emails:b12ecd5 6be105a
require 'spec_helper'
require 'sisimai/mail/maildir'
describe Sisimai::Mail::Maildir do
samplemaildir = './set-of-emails/maildir/bsd'
allofthefiles = 447
let(:mailobj) { Sisimai::Mail::Maildir.new(samples) }
let(:mockobj) { Sisimai::Mail::Maildir.new(invalid) }
describe 'class method' do
describe '.new' do
context 'Maildir/ exists' do
let(:samples) { samplemaildir }
subject { mailobj }
it 'returns Sisimai::Mail::Maildir object' do
is_expected.to be_a Sisimai::Mail::Maildir
expect(mailobj.read).to be_a String
end
end
context 'directory does not exist' do
let(:invalid) { '/etc/neko/nyan' }
it 'raises Errno::ENOENT' do
expect { mockobj }.to raise_error(Errno::ENOENT)
end
end
context 'argument is not a directory' do
let(:invalid) { '/etc/hosts' }
it 'raises Errno::ENOTDIR' do
expect { mockobj }.to raise_error(Errno::ENOTDIR)
end
end
context 'wrong number of arguments' do
it 'raises ArgumentError' do
expect { Sisimai::Mail::Maildir.new }.to raise_error(ArgumentError)
expect { Sisimai::Mail::Maildir.new(nil, nil) }.to raise_error(ArgumentError)
end
end
end
end
describe 'instance method' do
let(:samples) { samplemaildir }
before do
mailobj.read
end
describe '#dir' do
subject { mailobj.dir }
it 'returns directory name' do
is_expected.to be_a String
is_expected.to be == samples
end
end
describe '#path' do
subject { mailobj.path }
it 'matches directory name' do
is_expected.to be_a String
is_expected.to match(%r|#{samples}/.+|)
end
end
describe '#file' do
subject { mailobj.file }
it 'returns filename' do
is_expected.to be_a String
is_expected.to match(/.+[.].+/)
end
end
describe '#size' do
subject { mailobj.size }
it 'returns the number of files in the direcotry' do
is_expected.to be_a Integer
is_expected.to be > 255
end
end
describe '#handle' do
let(:handle) { mailobj.handle }
subject { handle }
it 'is IO::Dir object' do
is_expected.to be_a Dir
end
end
describe '#inodes' do
let(:inodes) { mailobj.inodes }
subject { mailobj.inodes }
it 'contains inode table' do
is_expected.to be_a Hash
expect(inodes.size).to be == 1
end
end
describe '#count' do
let(:count) { mailobj.count }
subject { mailobj.count }
it 'returns the number of read files' do
is_expected.to be_a Integer
is_expected.to be == 1
end
end
describe '#read' do
maildir = Sisimai::Mail::Maildir.new(samplemaildir)
emindex = 0
while r = maildir.read do
emindex += 1
it 'is ' + maildir.file do
expect(maildir.file).to match(/\A[a-z0-9-]+[-]\d\d[.]eml\z/)
expect(maildir.file.size).to be > 0
end
it "has read #{maildir.count} files" do
expect(maildir.count).to be > 0
expect(maildir.count).to be == emindex
end
it 'has 1 or more inode entries' do
expect(maildir.inodes.keys.size).to be_a Integer
expect(maildir.inodes.keys.size).to be >= emindex - 3
end
end
example "the number of read files is #{maildir.count}" do
expect(maildir.count).to be > 0
expect(maildir.count).to be == emindex
expect(maildir.count).to be == allofthefiles
end
end
end
end
|
require 'spec_helper'
require 'sisimai/data'
require 'sisimai/mail'
require 'sisimai/message'
DebugOnlyTo = ''
MTAChildren = {
'Courier' => {
'01' => { 'status' => %r/\A5[.]1[.]1\z/, 'reason' => %r/userunknown/ },
'02' => { 'status' => %r/\A5[.]0[.]0\z/, 'reason' => %r/filtered/ },
'03' => { 'status' => %r/\A5[.]7[.]1\z/, 'reason' => %r/blocked/ },
'04' => { 'status' => %r/\A5[.]0[.]0\z/, 'reason' => %r/hostunknown/ },
},
'Exchange' => {
'01' => { 'status' => %r/\A5[.]0[.]\d+\z/, 'reason' => %r/userunknown/ },
'02' => { 'status' => %r/\A5[.]0[.]\d+\z/, 'reason' => %r/userunknown/ },
'03' => { 'status' => %r/\A5[.]0[.]\d+\z/, 'reason' => %r/userunknown/ },
'04' => { 'status' => %r/\A5[.]0[.]\d+\z/, 'reason' => %r/filtered/ },
'05' => { 'status' => %r/\A5[.]0[.]\d+\z/, 'reason' => %r/userunknown/ },
'06' => { 'status' => %r/\A5[.]0[.]\d+\z/, 'reason' => %r/userunknown/ },
},
'Exim' => {
'01' => { 'status' => %r/\A5[.]7[.]0\z/, 'reason' => %r/blocked/ },
'02' => { 'status' => %r/\A5[.][12][.]1\z/, 'reason' => %r/userunknown/ },
'03' => { 'status' => %r/\A5[.]7[.]0\z/, 'reason' => %r/securityerror/ },
'04' => { 'status' => %r/\A5[.]7[.]0\z/, 'reason' => %r/blocked/ },
'05' => { 'status' => %r/\A5[.]1[.]1\z/, 'reason' => %r/userunknown/ },
'06' => { 'status' => %r/\A4[.]0[.]\d+\z/, 'reason' => %r/expired/ },
'07' => { 'status' => %r/\A4[.]0[.]\d+\z/, 'reason' => %r/mailboxfull/ },
'08' => { 'status' => %r/\A4[.]0[.]\d+\z/, 'reason' => %r/expired/ },
'09' => { 'status' => %r/\A5[.]0[.]\d+\z/, 'reason' => %r/hostunknown/ },
'10' => { 'status' => %r/\A5[.]0[.]\d+\z/, 'reason' => %r/suspend/ },
'11' => { 'status' => %r/\A5[.]0[.]\d+\z/, 'reason' => %r/onhold/ },
'12' => { 'status' => %r/\A[45][.]0[.]\d+\z/, 'reason' => %r/(?:hostunknown|expired|undefined)/ },
'13' => { 'status' => %r/\A5[.]0[.]\d+\z/, 'reason' => %r/(?:onhold|undefined|mailererror)/ },
'14' => { 'status' => %r/\A4[.]0[.]\d+\z/, 'reason' => %r/expired/ },
'15' => { 'status' => %r/\A5[.]4[.]3\z/, 'reason' => %r/systemerror/ },
'16' => { 'status' => %r/\A5[.]0[.]\d+\z/, 'reason' => %r/systemerror/ },
'17' => { 'status' => %r/\A5[.]0[.]\d+\z/, 'reason' => %r/mailboxfull/ },
'18' => { 'status' => %r/\A5[.]0[.]\d+\z/, 'reason' => %r/hostunknown/ },
'19' => { 'status' => %r/\A5[.]0[.]\d+\z/, 'reason' => %r/networkerror/ },
'20' => { 'status' => %r/\A4[.]0[.]\d+\z/, 'reason' => %r/(?:expired|systemerror)/ },
'21' => { 'status' => %r/\A5[.]0[.]\d+\z/, 'reason' => %r/expired/ },
'23' => { 'status' => %r/\A5[.]0[.]\d+\z/, 'reason' => %r/userunknown/ },
'24' => { 'status' => %r/\A5[.]0[.]\d+\z/, 'reason' => %r/filtered/ },
'25' => { 'status' => %r/\A4[.]0[.]\d+\z/, 'reason' => %r/expired/ },
'26' => { 'status' => %r/\A5[.]0[.]0\z/, 'reason' => %r/mailererror/ },
'27' => { 'status' => %r/\A5[.]0[.]\d+\z/, 'reason' => %r/blocked/ },
'28' => { 'status' => %r/\A5[.]0[.]\d+\z/, 'reason' => %r/mailererror/ },
},
'OpenSMTPD' => {
'01' => { 'status' => %r/\A5[.]1[.]1\z/, 'reason' => %r/userunknown/ },
'02' => { 'status' => %r/\A5[.][12][.][12]\z/, 'reason' => %r/(?:userunknown|mailboxfull)/ },
'03' => { 'status' => %r/\A5[.]0[.]\d+\z/, 'reason' => %r/hostunknown/ },
'04' => { 'status' => %r/\A5[.]0[.]\d+\z/, 'reason' => %r/networkerror/ },
'05' => { 'status' => %r/\A5[.]0[.]\d+\z/, 'reason' => %r/expired/ },
},
'Postfix' => {
'01' => { 'status' => %r/\A5[.]1[.]1\z/, 'reason' => %r/mailererror/ },
'02' => { 'status' => %r/\A5[.][12][.]1\z/, 'reason' => %r/(?:filtered|userunknown)/ },
'03' => { 'status' => %r/\A5[.]0[.]0\z/, 'reason' => %r/filtered/ },
'04' => { 'status' => %r/\A5[.]1[.]1\z/, 'reason' => %r/userunknown/ },
'05' => { 'status' => %r/\A4[.]1[.]1\z/, 'reason' => %r/userunknown/ },
'06' => { 'status' => %r/\A5[.]4[.]4\z/, 'reason' => %r/hostunknown/ },
'07' => { 'status' => %r/\A5[.]0[.]\d+\z/, 'reason' => %r/filtered/ },
'08' => { 'status' => %r/\A4[.]4[.]1\z/, 'reason' => %r/expired/ },
'09' => { 'status' => %r/\A4[.]3[.]2\z/, 'reason' => %r/toomanyconn/ },
'10' => { 'status' => %r/\A5[.]1[.]8\z/, 'reason' => %r/rejected/ },
'11' => { 'status' => %r/\A5[.]1[.]8\z/, 'reason' => %r/rejected/ },
'12' => { 'status' => %r/\A5[.]1[.]1\z/, 'reason' => %r/userunknown/ },
'13' => { 'status' => %r/\A5[.]2[.][12]\z/, 'reason' => %r/(?:userunknown|mailboxfull)/ },
'14' => { 'status' => %r/\A5[.]1[.]1\z/, 'reason' => %r/userunknown/ },
'15' => { 'status' => %r/\A4[.]4[.]1\z/, 'reason' => %r/expired/ },
'16' => { 'status' => %r/\A5[.]1[.]6\z/, 'reason' => %r/hasmoved/ },
'17' => { 'status' => %r/\A5[.]4[.]4\z/, 'reason' => %r/networkerror/ },
'18' => { 'status' => %r/\A5[.]7[.]1\z/, 'reason' => %r/norelaying/ },
'19' => { 'status' => %r/\A5[.]0[.]0\z/, 'reason' => %r/blocked/ },
'20' => { 'status' => %r/\A5[.]0[.]\d+\z/, 'reason' => %r/onhold/ },
'21' => { 'status' => %r/\A5[.]0[.]\d+\z/, 'reason' => %r/networkerror/ },
},
'Qmail' => {
'01' => { 'status' => %r/\A5[.]5[.]0\z/, 'reason' => %r/userunknown/ },
'02' => { 'status' => %r/\A5[.][12][.]1\z/, 'reason' => %r/(?:userunknown|filtered)/ },
'03' => { 'status' => %r/\A5[.]7[.]1\z/, 'reason' => %r/rejected/ },
'04' => { 'status' => %r/\A5[.]0[.]0\z/, 'reason' => %r/blocked/ },
'05' => { 'status' => %r/\A4[.]4[.]3\z/, 'reason' => %r/systemerror/ },
'06' => { 'status' => %r/\A4[.]2[.]2\z/, 'reason' => %r/mailboxfull/ },
'07' => { 'status' => %r/\A4[.]4[.]1\z/, 'reason' => %r/networkerror/ },
'08' => { 'status' => %r/\A5[.]0[.]\d+\z/, 'reason' => %r/mailboxfull/ },
'09' => { 'status' => %r/\A5[.]0[.]\d+\z/, 'reason' => %r/undefined/ },
},
'Sendmail' => {
'01' => { 'status' => %r/\A5[.]1[.]1\z/, 'reason' => %r/userunknown/ },
'02' => { 'status' => %r/\A5[.][12][.]1\z/, 'reason' => %r/(?:userunknown|filtered)/ },
'03' => { 'status' => %r/\A5[.]1[.]1\z/, 'reason' => %r/userunknown/ },
'04' => { 'status' => %r/\A5[.]1[.]8\z/, 'reason' => %r/rejected/ },
'05' => { 'status' => %r/\A5[.]2[.]3\z/, 'reason' => %r/exceedlimit/ },
'06' => { 'status' => %r/\A5[.]6[.]9\z/, 'reason' => %r/contenterror/ },
'07' => { 'status' => %r/\A5[.]7[.]1\z/, 'reason' => %r/norelaying/ },
'08' => { 'status' => %r/\A4[.]7[.]1\z/, 'reason' => %r/blocked/ },
'09' => { 'status' => %r/\A5[.]7[.]9\z/, 'reason' => %r/securityerror/ },
'10' => { 'status' => %r/\A4[.]7[.]1\z/, 'reason' => %r/blocked/ },
'11' => { 'status' => %r/\A4[.]4[.]7\z/, 'reason' => %r/expired/ },
'12' => { 'status' => %r/\A4[.]4[.]7\z/, 'reason' => %r/expired/ },
'13' => { 'status' => %r/\A5[.]3[.]0\z/, 'reason' => %r/systemerror/ },
'14' => { 'status' => %r/\A5[.]1[.]1\z/, 'reason' => %r/userunknown/ },
'15' => { 'status' => %r/\A5[.]1[.]2\z/, 'reason' => %r/hostunknown/ },
'16' => { 'status' => %r/\A5[.]5[.]0\z/, 'reason' => %r/blocked/ },
'17' => { 'status' => %r/\A5[.]1[.]6\z/, 'reason' => %r/hasmoved/ },
'18' => { 'status' => %r/\A5[.]0[.]0\z/, 'reason' => %r/mailererror/ },
'19' => { 'status' => %r/\A5[.]2[.]0\z/, 'reason' => %r/filtered/ },
'20' => { 'status' => %r/\A5[.]4[.]6\z/, 'reason' => %r/networkerror/ },
'21' => { 'status' => %r/\A4[.]4[.]7\z/, 'reason' => %r/blocked/ },
'22' => { 'status' => %r/\A5[.]1[.]6\z/, 'reason' => %r/hasmoved/ },
'23' => { 'status' => %r/\A5[.]7[.]1\z/, 'reason' => %r/spamdetected/ },
'24' => { 'status' => %r/\A5[.]1[.]2\z/, 'reason' => %r/hostunknown/ },
'25' => { 'status' => %r/\A5[.]1[.]1\z/, 'reason' => %r/userunknown/ },
'26' => { 'status' => %r/\A5[.]1[.]1\z/, 'reason' => %r/userunknown/ },
'27' => { 'status' => %r/\A5[.]0[.]0\z/, 'reason' => %r/filtered/ },
'28' => { 'status' => %r/\A5[.]1[.]1\z/, 'reason' => %r/userunknown/ },
'29' => { 'status' => %r/\A4[.]5[.]0\z/, 'reason' => %r/expired/ },
'30' => { 'status' => %r/\A4[.]4[.]7\z/, 'reason' => %r/expired/ },
'31' => { 'status' => %r/\A5[.]7[.]0\z/, 'reason' => %r/securityerror/ },
'32' => { 'status' => %r/\A5[.]1[.]1\z/, 'reason' => %r/userunknown/ },
'33' => { 'status' => %r/\A5[.]7[.]1\z/, 'reason' => %r/blocked/ },
'34' => { 'status' => %r/\A5[.]7[.]0\z/, 'reason' => %r/securityerror/ },
'35' => { 'status' => %r/\A5[.]7[.]13\z/, 'reason' => %r/suspend/ },
},
}
MTAChildren.each_key do |x|
cn = Module.const_get('Sisimai::MTA::' + x)
describe cn do
describe '.description' do
it('returns String') { expect(cn.description).to be_a String }
it('has the size') { expect(cn.description.size).to be > 0 }
end
describe '.pattern' do
it('returns Hash') { expect(cn.pattern).to be_a Hash }
it('have some keys') { expect(cn.pattern.keys.size).to be > 0 }
end
describe '.scan' do
it('returns nil') { expect(cn.scan(nil,nil)).to be nil }
end
(1 .. MTAChildren[x].keys.size).each do |i|
if DebugOnlyTo.size > 0
next unless DebugOnlyTo == sprintf( "%s-%02d", x.downcase, i)
end
emailfn = sprintf('./eg/maildir-as-a-sample/new/%s-%02d.eml', x.downcase, i)
mailbox = Sisimai::Mail.new(emailfn)
mailtxt = nil
n = sprintf('%02d', i)
next unless mailbox.path
next unless MTAChildren[x][n]
example sprintf('[%s] %s/mail = %s', n, cn, emailfn) do
expect(File.exist?(emailfn)).to be true
end
while r = mailbox.read do
mailtxt = r
it('returns String') { expect(mailtxt).to be_a String }
p = Sisimai::Message.new( { 'data' => r } )
it('returns Sisimai::Message object') { expect(p).to be_a Sisimai::Message }
example('Array in ds accessor') { expect(p.ds).to be_a Array }
example('Hash in header accessor') { expect(p.header).to be_a Hash }
example('Hash in rfc822 accessor') { expect(p.rfc822).to be_a Hash }
example('#from returns String') { expect(p.from).to be_a String }
example sprintf('[%s] %s#from = %s', n, cn, p.from) do
expect(p.from.size).to be > 0
end
p.ds.each do |e|
['recipient', 'agent'].each do |ee|
example sprintf('[%s] %s[%s] = %s', n, x, ee, e[ee]) do
expect(e[ee].size).to be > 0
end
end
%w[
date spec reason status command action alias rhost lhost diagnosis
feedbacktype softbounce
].each do |ee|
example sprintf('[%s] %s[%s] = %s', n, x, ee, e[ee]) do
expect(e.key?(ee)).to be true
end
end
if x == 'mFILTER'
example sprintf('[%s] %s[agent] = %s', n, x, e['agent']) do
expect(e['agent']).to be == 'm-FILTER'
end
elsif x == 'X4'
example sprintf('[%s] %s[agent] = %s', n, x, e['agent']) do
expect(e['agent']).to match(/(?:qmail|X4)/)
end
elsif x == 'Qmail'
example sprintf('[%s] %s[agent] = %s', n, x, e['agent']) do
expect(e['agent']).to be == 'qmail'
end
else
example sprintf('[%s] %s[agent] = %s', n, x, e['agent']) do
expect(e['agent']).to be == x
end
end
example sprintf('[%s] %s[recipient] = %s', n, x, e['recipient']) do
expect(e['recipient']).to match(/[0-9A-Za-z@-_.]+/)
expect(e['recipient']).not_to match(/[ ]/)
end
example sprintf('[%s] %s[command] = %s', n, x, e['command']) do
expect(e['command']).not_to match(/[ ]/)
end
if e['status'] && e['status'].size > 0
example sprintf('[%s] %s[status] = %s', n, x, e['status']) do
expect(e['status']).to match(/\A(?:[45][.]\d[.]\d+)\z/)
end
end
if e['action'].size > 0
example sprintf('[%s] %s[action] = %s', n, x, e['action']) do
expect(e['action']).to match(/\A(?:fail.+|delayed|expired)\z/)
end
end
['rhost', 'lhost'].each do |ee|
next unless e[ee]
next unless e[ee].size > 0
next if x =~ /\A(?:qmail|Exim|Exchange|X4)\z/
example sprintf('[%s] %s[%s] = %s', n, x, ee, e[ee]) do
expect(e[ee]).to match(/\A(?:localhost|.+[.].+)\z/)
end
end
end
o = Sisimai::Data.make( { 'data' => p } )
it 'returns Array' do
expect(o).to be_a Array
expect(o.size).to be > 0
end
o.each do |e|
it('is Sisimai::Data object') { expect(e).to be_a Sisimai::Data }
example '#timestamp returns Sisimai::Time' do
expect(e.timestamp).to be_a Sisimai::Time
end
example '#addresser returns Sisimai::Address' do
expect(e.addresser).to be_a Sisimai::Address
end
example '#recipient returns Sisimai::Address' do
expect(e.recipient).to be_a Sisimai::Address
end
%w[replycode subject smtpcommand diagnosticcode diagnostictype].each do |ee|
example sprintf('[%s] %s#%s = %s', n, x, ee, e.send(ee)) do
expect(e.send(ee)).to be_a String
end
end
example sprintf('[%s] %s#deliverystatus = %s', n, x, e.deliverystatus) do
expect(e.deliverystatus).to be_a String
expect(e.deliverystatus).not_to be_empty
end
%w[token smtpagent timezoneoffset].each do |ee|
example sprintf('[%s] %s#%s = %s', n, x, ee, e.send(ee)) do
expect(e.send(ee)).to be_a String
end
end
example sprintf('[%s] %s#senderdomain = %s', n, x, e.senderdomain) do
expect(e.addresser.host).to be == e.senderdomain
end
example sprintf('[%s] %s#destination = %s', n, x, e.destination) do
expect(e.recipient.host).to be == e.destination
end
example sprintf('[%s] %s#softbounce = %s', n, x, e.softbounce) do
if e.deliverystatus[0,1].to_i == 4
expect(e.softbounce).to be == 1
elsif e.deliverystatus[0,1].to_i == 5
expect(e.softbounce).to be == 0
else
expect(e.softbounce).to be == -1
end
end
example sprintf('[%s] %s#replycode = %s', n, x, e.replycode) do
expect(e.replycode).to match(/\A(?:[45]\d\d|)\z/)
end
example sprintf('[%s] %s#timezoneoffset = %s', n, x, e.timezoneoffset) do
expect(e.timezoneoffset).to match(/\A[-+]\d{4}\z/)
end
example sprintf('[%s] %s#deliverystatus = %s', n, x, e.deliverystatus) do
expect(e.deliverystatus).to match(MTAChildren[x][n]['status'])
end
example sprintf('[%s] %s#reason = %s', n, x, e.reason) do
expect(e.reason).to match(MTAChildren[x][n]['reason'])
end
example sprintf('[%s] %s#token = %s', n, x, e.token) do
expect(e.token).to match(/\A[0-9a-f]{40}\z/)
end
example sprintf('[%s] %s#feedbacktype = %s', n, x, e.feedbacktype) do
expect(e.feedbacktype).to be_empty
end
%w[deliverystatus diagnostictype smtpcommand lhost rhost alias listid
action messageid]. each do |ee|
example sprintf('[%s] %s#%s = %s', n, x, ee, e.send(ee)) do
expect(e.send(ee)).not_to match(/[ ]/)
end
end
%w[addresser recipient].each do |ee|
%w[user host verp alias].each do |eee|
example sprintf('[%s] %s#%s#%s = %s', n, x, ee, eee, e.send(ee).send(eee)) do
expect(e.send(ee).send(eee)).not_to match(/[ ]/)
end
end
end
end
end
end
end
end
Add value table for Sisimai::MTA::Domino, issue #7
require 'spec_helper'
require 'sisimai/data'
require 'sisimai/mail'
require 'sisimai/message'
DebugOnlyTo = ''
MTAChildren = {
'Courier' => {
'01' => { 'status' => %r/\A5[.]1[.]1\z/, 'reason' => %r/userunknown/ },
'02' => { 'status' => %r/\A5[.]0[.]0\z/, 'reason' => %r/filtered/ },
'03' => { 'status' => %r/\A5[.]7[.]1\z/, 'reason' => %r/blocked/ },
'04' => { 'status' => %r/\A5[.]0[.]0\z/, 'reason' => %r/hostunknown/ },
},
'Domino' => {
'01' => { 'status' => %r/\A5[.]0[.]\d+\z/, 'reason' => %r/userunknown/ },
'02' => { 'status' => %r/\A5[.]0[.]\d+\z/, 'reason' => %r/userunknown/ },
},
'Exchange' => {
'01' => { 'status' => %r/\A5[.]0[.]\d+\z/, 'reason' => %r/userunknown/ },
'02' => { 'status' => %r/\A5[.]0[.]\d+\z/, 'reason' => %r/userunknown/ },
'03' => { 'status' => %r/\A5[.]0[.]\d+\z/, 'reason' => %r/userunknown/ },
'04' => { 'status' => %r/\A5[.]0[.]\d+\z/, 'reason' => %r/filtered/ },
'05' => { 'status' => %r/\A5[.]0[.]\d+\z/, 'reason' => %r/userunknown/ },
'06' => { 'status' => %r/\A5[.]0[.]\d+\z/, 'reason' => %r/userunknown/ },
},
'Exim' => {
'01' => { 'status' => %r/\A5[.]7[.]0\z/, 'reason' => %r/blocked/ },
'02' => { 'status' => %r/\A5[.][12][.]1\z/, 'reason' => %r/userunknown/ },
'03' => { 'status' => %r/\A5[.]7[.]0\z/, 'reason' => %r/securityerror/ },
'04' => { 'status' => %r/\A5[.]7[.]0\z/, 'reason' => %r/blocked/ },
'05' => { 'status' => %r/\A5[.]1[.]1\z/, 'reason' => %r/userunknown/ },
'06' => { 'status' => %r/\A4[.]0[.]\d+\z/, 'reason' => %r/expired/ },
'07' => { 'status' => %r/\A4[.]0[.]\d+\z/, 'reason' => %r/mailboxfull/ },
'08' => { 'status' => %r/\A4[.]0[.]\d+\z/, 'reason' => %r/expired/ },
'09' => { 'status' => %r/\A5[.]0[.]\d+\z/, 'reason' => %r/hostunknown/ },
'10' => { 'status' => %r/\A5[.]0[.]\d+\z/, 'reason' => %r/suspend/ },
'11' => { 'status' => %r/\A5[.]0[.]\d+\z/, 'reason' => %r/onhold/ },
'12' => { 'status' => %r/\A[45][.]0[.]\d+\z/, 'reason' => %r/(?:hostunknown|expired|undefined)/ },
'13' => { 'status' => %r/\A5[.]0[.]\d+\z/, 'reason' => %r/(?:onhold|undefined|mailererror)/ },
'14' => { 'status' => %r/\A4[.]0[.]\d+\z/, 'reason' => %r/expired/ },
'15' => { 'status' => %r/\A5[.]4[.]3\z/, 'reason' => %r/systemerror/ },
'16' => { 'status' => %r/\A5[.]0[.]\d+\z/, 'reason' => %r/systemerror/ },
'17' => { 'status' => %r/\A5[.]0[.]\d+\z/, 'reason' => %r/mailboxfull/ },
'18' => { 'status' => %r/\A5[.]0[.]\d+\z/, 'reason' => %r/hostunknown/ },
'19' => { 'status' => %r/\A5[.]0[.]\d+\z/, 'reason' => %r/networkerror/ },
'20' => { 'status' => %r/\A4[.]0[.]\d+\z/, 'reason' => %r/(?:expired|systemerror)/ },
'21' => { 'status' => %r/\A5[.]0[.]\d+\z/, 'reason' => %r/expired/ },
'23' => { 'status' => %r/\A5[.]0[.]\d+\z/, 'reason' => %r/userunknown/ },
'24' => { 'status' => %r/\A5[.]0[.]\d+\z/, 'reason' => %r/filtered/ },
'25' => { 'status' => %r/\A4[.]0[.]\d+\z/, 'reason' => %r/expired/ },
'26' => { 'status' => %r/\A5[.]0[.]0\z/, 'reason' => %r/mailererror/ },
'27' => { 'status' => %r/\A5[.]0[.]\d+\z/, 'reason' => %r/blocked/ },
'28' => { 'status' => %r/\A5[.]0[.]\d+\z/, 'reason' => %r/mailererror/ },
},
'OpenSMTPD' => {
'01' => { 'status' => %r/\A5[.]1[.]1\z/, 'reason' => %r/userunknown/ },
'02' => { 'status' => %r/\A5[.][12][.][12]\z/, 'reason' => %r/(?:userunknown|mailboxfull)/ },
'03' => { 'status' => %r/\A5[.]0[.]\d+\z/, 'reason' => %r/hostunknown/ },
'04' => { 'status' => %r/\A5[.]0[.]\d+\z/, 'reason' => %r/networkerror/ },
'05' => { 'status' => %r/\A5[.]0[.]\d+\z/, 'reason' => %r/expired/ },
},
'Postfix' => {
'01' => { 'status' => %r/\A5[.]1[.]1\z/, 'reason' => %r/mailererror/ },
'02' => { 'status' => %r/\A5[.][12][.]1\z/, 'reason' => %r/(?:filtered|userunknown)/ },
'03' => { 'status' => %r/\A5[.]0[.]0\z/, 'reason' => %r/filtered/ },
'04' => { 'status' => %r/\A5[.]1[.]1\z/, 'reason' => %r/userunknown/ },
'05' => { 'status' => %r/\A4[.]1[.]1\z/, 'reason' => %r/userunknown/ },
'06' => { 'status' => %r/\A5[.]4[.]4\z/, 'reason' => %r/hostunknown/ },
'07' => { 'status' => %r/\A5[.]0[.]\d+\z/, 'reason' => %r/filtered/ },
'08' => { 'status' => %r/\A4[.]4[.]1\z/, 'reason' => %r/expired/ },
'09' => { 'status' => %r/\A4[.]3[.]2\z/, 'reason' => %r/toomanyconn/ },
'10' => { 'status' => %r/\A5[.]1[.]8\z/, 'reason' => %r/rejected/ },
'11' => { 'status' => %r/\A5[.]1[.]8\z/, 'reason' => %r/rejected/ },
'12' => { 'status' => %r/\A5[.]1[.]1\z/, 'reason' => %r/userunknown/ },
'13' => { 'status' => %r/\A5[.]2[.][12]\z/, 'reason' => %r/(?:userunknown|mailboxfull)/ },
'14' => { 'status' => %r/\A5[.]1[.]1\z/, 'reason' => %r/userunknown/ },
'15' => { 'status' => %r/\A4[.]4[.]1\z/, 'reason' => %r/expired/ },
'16' => { 'status' => %r/\A5[.]1[.]6\z/, 'reason' => %r/hasmoved/ },
'17' => { 'status' => %r/\A5[.]4[.]4\z/, 'reason' => %r/networkerror/ },
'18' => { 'status' => %r/\A5[.]7[.]1\z/, 'reason' => %r/norelaying/ },
'19' => { 'status' => %r/\A5[.]0[.]0\z/, 'reason' => %r/blocked/ },
'20' => { 'status' => %r/\A5[.]0[.]\d+\z/, 'reason' => %r/onhold/ },
'21' => { 'status' => %r/\A5[.]0[.]\d+\z/, 'reason' => %r/networkerror/ },
},
'Qmail' => {
'01' => { 'status' => %r/\A5[.]5[.]0\z/, 'reason' => %r/userunknown/ },
'02' => { 'status' => %r/\A5[.][12][.]1\z/, 'reason' => %r/(?:userunknown|filtered)/ },
'03' => { 'status' => %r/\A5[.]7[.]1\z/, 'reason' => %r/rejected/ },
'04' => { 'status' => %r/\A5[.]0[.]0\z/, 'reason' => %r/blocked/ },
'05' => { 'status' => %r/\A4[.]4[.]3\z/, 'reason' => %r/systemerror/ },
'06' => { 'status' => %r/\A4[.]2[.]2\z/, 'reason' => %r/mailboxfull/ },
'07' => { 'status' => %r/\A4[.]4[.]1\z/, 'reason' => %r/networkerror/ },
'08' => { 'status' => %r/\A5[.]0[.]\d+\z/, 'reason' => %r/mailboxfull/ },
'09' => { 'status' => %r/\A5[.]0[.]\d+\z/, 'reason' => %r/undefined/ },
},
'Sendmail' => {
'01' => { 'status' => %r/\A5[.]1[.]1\z/, 'reason' => %r/userunknown/ },
'02' => { 'status' => %r/\A5[.][12][.]1\z/, 'reason' => %r/(?:userunknown|filtered)/ },
'03' => { 'status' => %r/\A5[.]1[.]1\z/, 'reason' => %r/userunknown/ },
'04' => { 'status' => %r/\A5[.]1[.]8\z/, 'reason' => %r/rejected/ },
'05' => { 'status' => %r/\A5[.]2[.]3\z/, 'reason' => %r/exceedlimit/ },
'06' => { 'status' => %r/\A5[.]6[.]9\z/, 'reason' => %r/contenterror/ },
'07' => { 'status' => %r/\A5[.]7[.]1\z/, 'reason' => %r/norelaying/ },
'08' => { 'status' => %r/\A4[.]7[.]1\z/, 'reason' => %r/blocked/ },
'09' => { 'status' => %r/\A5[.]7[.]9\z/, 'reason' => %r/securityerror/ },
'10' => { 'status' => %r/\A4[.]7[.]1\z/, 'reason' => %r/blocked/ },
'11' => { 'status' => %r/\A4[.]4[.]7\z/, 'reason' => %r/expired/ },
'12' => { 'status' => %r/\A4[.]4[.]7\z/, 'reason' => %r/expired/ },
'13' => { 'status' => %r/\A5[.]3[.]0\z/, 'reason' => %r/systemerror/ },
'14' => { 'status' => %r/\A5[.]1[.]1\z/, 'reason' => %r/userunknown/ },
'15' => { 'status' => %r/\A5[.]1[.]2\z/, 'reason' => %r/hostunknown/ },
'16' => { 'status' => %r/\A5[.]5[.]0\z/, 'reason' => %r/blocked/ },
'17' => { 'status' => %r/\A5[.]1[.]6\z/, 'reason' => %r/hasmoved/ },
'18' => { 'status' => %r/\A5[.]0[.]0\z/, 'reason' => %r/mailererror/ },
'19' => { 'status' => %r/\A5[.]2[.]0\z/, 'reason' => %r/filtered/ },
'20' => { 'status' => %r/\A5[.]4[.]6\z/, 'reason' => %r/networkerror/ },
'21' => { 'status' => %r/\A4[.]4[.]7\z/, 'reason' => %r/blocked/ },
'22' => { 'status' => %r/\A5[.]1[.]6\z/, 'reason' => %r/hasmoved/ },
'23' => { 'status' => %r/\A5[.]7[.]1\z/, 'reason' => %r/spamdetected/ },
'24' => { 'status' => %r/\A5[.]1[.]2\z/, 'reason' => %r/hostunknown/ },
'25' => { 'status' => %r/\A5[.]1[.]1\z/, 'reason' => %r/userunknown/ },
'26' => { 'status' => %r/\A5[.]1[.]1\z/, 'reason' => %r/userunknown/ },
'27' => { 'status' => %r/\A5[.]0[.]0\z/, 'reason' => %r/filtered/ },
'28' => { 'status' => %r/\A5[.]1[.]1\z/, 'reason' => %r/userunknown/ },
'29' => { 'status' => %r/\A4[.]5[.]0\z/, 'reason' => %r/expired/ },
'30' => { 'status' => %r/\A4[.]4[.]7\z/, 'reason' => %r/expired/ },
'31' => { 'status' => %r/\A5[.]7[.]0\z/, 'reason' => %r/securityerror/ },
'32' => { 'status' => %r/\A5[.]1[.]1\z/, 'reason' => %r/userunknown/ },
'33' => { 'status' => %r/\A5[.]7[.]1\z/, 'reason' => %r/blocked/ },
'34' => { 'status' => %r/\A5[.]7[.]0\z/, 'reason' => %r/securityerror/ },
'35' => { 'status' => %r/\A5[.]7[.]13\z/, 'reason' => %r/suspend/ },
},
}
MTAChildren.each_key do |x|
cn = Module.const_get('Sisimai::MTA::' + x)
describe cn do
describe '.description' do
it('returns String') { expect(cn.description).to be_a String }
it('has the size') { expect(cn.description.size).to be > 0 }
end
describe '.pattern' do
it('returns Hash') { expect(cn.pattern).to be_a Hash }
it('have some keys') { expect(cn.pattern.keys.size).to be > 0 }
end
describe '.scan' do
it('returns nil') { expect(cn.scan(nil,nil)).to be nil }
end
(1 .. MTAChildren[x].keys.size).each do |i|
if DebugOnlyTo.size > 0
next unless DebugOnlyTo == sprintf( "%s-%02d", x.downcase, i)
end
emailfn = sprintf('./eg/maildir-as-a-sample/new/%s-%02d.eml', x.downcase, i)
mailbox = Sisimai::Mail.new(emailfn)
mailtxt = nil
n = sprintf('%02d', i)
next unless mailbox.path
next unless MTAChildren[x][n]
example sprintf('[%s] %s/mail = %s', n, cn, emailfn) do
expect(File.exist?(emailfn)).to be true
end
while r = mailbox.read do
mailtxt = r
it('returns String') { expect(mailtxt).to be_a String }
p = Sisimai::Message.new( { 'data' => r } )
it('returns Sisimai::Message object') { expect(p).to be_a Sisimai::Message }
example('Array in ds accessor') { expect(p.ds).to be_a Array }
example('Hash in header accessor') { expect(p.header).to be_a Hash }
example('Hash in rfc822 accessor') { expect(p.rfc822).to be_a Hash }
example('#from returns String') { expect(p.from).to be_a String }
example sprintf('[%s] %s#from = %s', n, cn, p.from) do
expect(p.from.size).to be > 0
end
p.ds.each do |e|
['recipient', 'agent'].each do |ee|
example sprintf('[%s] %s[%s] = %s', n, x, ee, e[ee]) do
expect(e[ee].size).to be > 0
end
end
%w[
date spec reason status command action alias rhost lhost diagnosis
feedbacktype softbounce
].each do |ee|
example sprintf('[%s] %s[%s] = %s', n, x, ee, e[ee]) do
expect(e.key?(ee)).to be true
end
end
if x == 'mFILTER'
example sprintf('[%s] %s[agent] = %s', n, x, e['agent']) do
expect(e['agent']).to be == 'm-FILTER'
end
elsif x == 'X4'
example sprintf('[%s] %s[agent] = %s', n, x, e['agent']) do
expect(e['agent']).to match(/(?:qmail|X4)/)
end
elsif x == 'Qmail'
example sprintf('[%s] %s[agent] = %s', n, x, e['agent']) do
expect(e['agent']).to be == 'qmail'
end
else
example sprintf('[%s] %s[agent] = %s', n, x, e['agent']) do
expect(e['agent']).to be == x
end
end
example sprintf('[%s] %s[recipient] = %s', n, x, e['recipient']) do
expect(e['recipient']).to match(/[0-9A-Za-z@-_.]+/)
expect(e['recipient']).not_to match(/[ ]/)
end
example sprintf('[%s] %s[command] = %s', n, x, e['command']) do
expect(e['command']).not_to match(/[ ]/)
end
if e['status'] && e['status'].size > 0
example sprintf('[%s] %s[status] = %s', n, x, e['status']) do
expect(e['status']).to match(/\A(?:[45][.]\d[.]\d+)\z/)
end
end
if e['action'].size > 0
example sprintf('[%s] %s[action] = %s', n, x, e['action']) do
expect(e['action']).to match(/\A(?:fail.+|delayed|expired)\z/)
end
end
['rhost', 'lhost'].each do |ee|
next unless e[ee]
next unless e[ee].size > 0
next if x =~ /\A(?:qmail|Exim|Exchange|X4)\z/
example sprintf('[%s] %s[%s] = %s', n, x, ee, e[ee]) do
expect(e[ee]).to match(/\A(?:localhost|.+[.].+)\z/)
end
end
end
o = Sisimai::Data.make( { 'data' => p } )
it 'returns Array' do
expect(o).to be_a Array
expect(o.size).to be > 0
end
o.each do |e|
it('is Sisimai::Data object') { expect(e).to be_a Sisimai::Data }
example '#timestamp returns Sisimai::Time' do
expect(e.timestamp).to be_a Sisimai::Time
end
example '#addresser returns Sisimai::Address' do
expect(e.addresser).to be_a Sisimai::Address
end
example '#recipient returns Sisimai::Address' do
expect(e.recipient).to be_a Sisimai::Address
end
%w[replycode subject smtpcommand diagnosticcode diagnostictype].each do |ee|
example sprintf('[%s] %s#%s = %s', n, x, ee, e.send(ee)) do
expect(e.send(ee)).to be_a String
end
end
example sprintf('[%s] %s#deliverystatus = %s', n, x, e.deliverystatus) do
expect(e.deliverystatus).to be_a String
expect(e.deliverystatus).not_to be_empty
end
%w[token smtpagent timezoneoffset].each do |ee|
example sprintf('[%s] %s#%s = %s', n, x, ee, e.send(ee)) do
expect(e.send(ee)).to be_a String
end
end
example sprintf('[%s] %s#senderdomain = %s', n, x, e.senderdomain) do
expect(e.addresser.host).to be == e.senderdomain
end
example sprintf('[%s] %s#destination = %s', n, x, e.destination) do
expect(e.recipient.host).to be == e.destination
end
example sprintf('[%s] %s#softbounce = %s', n, x, e.softbounce) do
if e.deliverystatus[0,1].to_i == 4
expect(e.softbounce).to be == 1
elsif e.deliverystatus[0,1].to_i == 5
expect(e.softbounce).to be == 0
else
expect(e.softbounce).to be == -1
end
end
example sprintf('[%s] %s#replycode = %s', n, x, e.replycode) do
expect(e.replycode).to match(/\A(?:[45]\d\d|)\z/)
end
example sprintf('[%s] %s#timezoneoffset = %s', n, x, e.timezoneoffset) do
expect(e.timezoneoffset).to match(/\A[-+]\d{4}\z/)
end
example sprintf('[%s] %s#deliverystatus = %s', n, x, e.deliverystatus) do
expect(e.deliverystatus).to match(MTAChildren[x][n]['status'])
end
example sprintf('[%s] %s#reason = %s', n, x, e.reason) do
expect(e.reason).to match(MTAChildren[x][n]['reason'])
end
example sprintf('[%s] %s#token = %s', n, x, e.token) do
expect(e.token).to match(/\A[0-9a-f]{40}\z/)
end
example sprintf('[%s] %s#feedbacktype = %s', n, x, e.feedbacktype) do
expect(e.feedbacktype).to be_empty
end
%w[deliverystatus diagnostictype smtpcommand lhost rhost alias listid
action messageid]. each do |ee|
example sprintf('[%s] %s#%s = %s', n, x, ee, e.send(ee)) do
expect(e.send(ee)).not_to match(/[ ]/)
end
end
%w[addresser recipient].each do |ee|
%w[user host verp alias].each do |eee|
example sprintf('[%s] %s#%s#%s = %s', n, x, ee, eee, e.send(ee).send(eee)) do
expect(e.send(ee).send(eee)).not_to match(/[ ]/)
end
end
end
end
end
end
end
end
|
# encoding: utf-8
module InitializerStubs
UNWANTED_BEHAVIOR = {
Adhearsion::Initializer => [:initialize_log_paths, :update_rails_env_var, :daemonize!, :require, :init_plugins, :run_plugins, :load_lib_folder]
} unless defined? UNWANTED_BEHAVIOR
def stub_behavior_for_initializer_with_no_path_changing_behavior
stub_unwanted_behavior
yield if block_given?
ensure
unstub_directory_changing_behavior
end
def with_new_initializer_with_no_path_changing_behavior(&block)
stub_behavior_for_initializer_with_no_path_changing_behavior do
block.call Adhearsion::Initializer.start
end
end
def stub_unwanted_behavior(unwanted_behavior = UNWANTED_BEHAVIOR)
unwanted_behavior.each do |stub_victim_class, undesired_methods|
undesired_methods.each do |undesired_method_name_or_key_value_pair|
undesired_method_name, method_implementation = case undesired_method_name_or_key_value_pair
when Array
[undesired_method_name_or_key_value_pair.first, lambda { |*args| undesired_method_name_or_key_value_pair.last } ]
else
[undesired_method_name_or_key_value_pair, lambda{ |*args| }]
end
stub_victim_class.send(:alias_method, "pre_stubbed_#{undesired_method_name}", undesired_method_name)
stub_victim_class.send(:define_method, undesired_method_name, &method_implementation)
end
end
end
def unstub_directory_changing_behavior
UNWANTED_BEHAVIOR.each do |stub_victim_class, undesired_methods|
undesired_methods.each do |undesired_method_name|
undesired_method_name = undesired_method_name.first if undesired_method_name.kind_of? Array
stub_victim_class.send(:remove_method, undesired_method_name)
stub_victim_class.send(:alias_method, undesired_method_name, "pre_stubbed_#{undesired_method_name}")
end
end
end
end
[BUGFIX] Fix some failing specs
# encoding: utf-8
module InitializerStubs
UNWANTED_BEHAVIOR = {
Adhearsion::Initializer => [:debugging_log, :initialize_log_paths, :update_rails_env_var, :daemonize!, :require, :init_plugins, :run_plugins, :load_lib_folder]
} unless defined? UNWANTED_BEHAVIOR
def stub_behavior_for_initializer_with_no_path_changing_behavior
stub_unwanted_behavior
yield if block_given?
ensure
unstub_directory_changing_behavior
end
def with_new_initializer_with_no_path_changing_behavior(&block)
stub_behavior_for_initializer_with_no_path_changing_behavior do
block.call Adhearsion::Initializer.start
end
end
def stub_unwanted_behavior(unwanted_behavior = UNWANTED_BEHAVIOR)
unwanted_behavior.each do |stub_victim_class, undesired_methods|
undesired_methods.each do |undesired_method_name_or_key_value_pair|
undesired_method_name, method_implementation = case undesired_method_name_or_key_value_pair
when Array
[undesired_method_name_or_key_value_pair.first, lambda { |*args| undesired_method_name_or_key_value_pair.last } ]
else
[undesired_method_name_or_key_value_pair, lambda{ |*args| }]
end
stub_victim_class.send(:alias_method, "pre_stubbed_#{undesired_method_name}", undesired_method_name)
stub_victim_class.send(:define_method, undesired_method_name, &method_implementation)
end
end
end
def unstub_directory_changing_behavior
UNWANTED_BEHAVIOR.each do |stub_victim_class, undesired_methods|
undesired_methods.each do |undesired_method_name|
undesired_method_name = undesired_method_name.first if undesired_method_name.kind_of? Array
stub_victim_class.send(:remove_method, undesired_method_name)
stub_victim_class.send(:alias_method, undesired_method_name, "pre_stubbed_#{undesired_method_name}")
end
end
end
end
|
describe Job do
let(:params) { {} }
let(:repo) { FactoryGirl.create(:repository) }
let(:build) { FactoryGirl.create(:build, repository: repo, state: [:received, :queued].include?(state) ? :created : state) }
let(:job) { FactoryGirl.create(:job, repository: repo, build: build, state: state) }
let(:now) { Time.now }
before { Travis::Event.stubs(:dispatch) }
def receive
job.send(:"#{event}!", params)
end
shared_examples 'sets the job to :received' do
it 'returns true' do
expect(receive).to be_truthy
end
it 'sets :state to :received' do
receive
expect(job.reload.state).to eql(:received)
end
it 'sets :received_at' do
receive
expect(job.reload.received_at).to eql(now)
end
it 'dispatches a job:received event' do
Travis::Event.expects(:dispatch).with('job:received', id: job.id)
receive
end
end
shared_examples 'sets the job to :started' do
it 'returns true' do
expect(receive).to be_truthy
end
it 'sets :state to :started' do
receive
expect(job.reload.state).to eql(:started)
end
it 'sets :started_at' do
receive
expect(job.reload.started_at).to eql(now)
end
it 'dispatches a job:started event' do
Travis::Event.expects(:dispatch).with('job:started', id: job.id)
receive
end
describe 'propagates to the build' do
it 'sets :state to :started' do
receive
expect(job.build.reload.state).to eql(:started)
end
it 'sets :finished_at' do
receive
expect(job.build.reload.started_at).to eql(now)
end
end
describe 'it denormalizes to the repository' do
%w(id number state duration started_at finished_at).each do |attr|
it "sets last_build_#{attr}" do
receive
expect(repo.reload.send(:"last_build_#{attr}").to_s).to eql(build.reload.send(attr).to_s)
end
end
end
end
shared_examples 'sets the job to :passed' do
it 'returns true' do
expect(receive).to be_truthy
end
it 'sets :state to :passed' do
receive
expect(job.reload.state).to eql(:passed)
end
it 'sets :finished_at' do
receive
expect(job.reload.finished_at).to eql(now)
end
it 'dispatches a job:finished event' do
Travis::Event.expects(:dispatch).with('job:finished', id: job.id)
receive
end
describe 'propagates to the build' do
describe 'with all other jobs being finished' do
it 'sets :state to :passed' do
receive
expect(job.build.reload.state).to eql(:passed)
end
it 'sets :finished_at' do
receive
expect(job.build.reload.finished_at).to eql(now)
end
end
describe 'with other jobs being pending' do
before do
FactoryGirl.create(:job, build: build, state: :started)
end
it 'does not set :state to :passed' do
receive
expect(job.build.reload.state).to_not eql(:passed)
end
it 'does not set :finished_at' do
receive
expect(job.build.reload.finished_at).to be_nil
end
end
end
describe 'it denormalizes to the repository' do
%w(state duration finished_at).each do |attr|
it "sets last_build_#{attr}" do
receive
expect(repo.reload.send(:"last_build_#{attr}").to_s).to eql(build.reload.send(attr).to_s)
end
end
end
end
shared_examples 'cancels the job' do
it 'returns true' do
expect(receive).to be_truthy
end
it 'sets :state to :canceled' do
receive
expect(job.reload.state).to eql(:canceled)
end
it 'sets :canceled_at' do
receive
expect(job.reload.canceled_at).to eql(now)
end
it 'sets :finished_at' do
receive
expect(job.reload.finished_at).to eql(now)
end
it 'dispatches a job:canceled event' do
Travis::Event.expects(:dispatch).with('job:canceled', id: job.id)
receive
end
describe 'with all other jobs being finished' do
it 'sets the build to :canceled' do
receive
expect(job.build.reload.state).to eql(:canceled)
end
end
describe 'with other jobs being pending' do
before do
FactoryGirl.create(:job, build: build, state: :started)
end
it 'does not set the build to :canceled' do
receive
expect(job.build.reload.state).to_not eql(:canceled)
end
end
describe 'it denormalizes to the repository' do
%w(state duration finished_at).each do |attr|
it "sets last_build_#{attr}" do
receive
expect(repo.reload.send(:"last_build_#{attr}").to_s).to eql(build.reload.send(attr).to_s)
end
end
end
end
shared_examples 'restarts the job' do
it 'returns true' do
expect(receive).to be_truthy
end
it 'sets :state to :created' do
receive
expect(job.reload.state).to eql(:created)
end
it 'resets :queued_at' do
receive
expect(job.reload.queued_at).to be_nil
end
it 'resets :received_at' do
receive
expect(job.reload.received_at).to be_nil
end
it 'resets :started_at' do
receive
expect(job.reload.started_at).to be_nil
end
it 'resets :finished_at' do
receive
expect(job.reload.finished_at).to be_nil
end
it 'resets :canceled_at' do
receive
expect(job.reload.canceled_at).to be_nil
end
it 'dispatches a job:restarted event' do
Travis::Event.expects(:dispatch).with('job:restarted', id: job.id)
receive
end
describe 'propagates to the build' do
it 'sets :state to :created' do
receive
expect(job.build.reload.state).to eql(:created)
end
it 'resets :duration' do
receive
expect(job.build.reload.duration).to be_nil
end
it 'resets :started_at' do
receive
expect(job.build.reload.started_at).to be_nil
end
it 'resets :finished_at' do
receive
expect(job.build.reload.finished_at).to be_nil
end
it 'clears log' do
receive
expect(job.log.reload.content).to be_nil
expect(job.log.reload.archive_verified).to be_nil
expect(job.log.reload.removed_by).to be_nil
expect(job.log.reload.removed_at).to be_nil
end
it 'does not restart other jobs on the matrix' do
other = FactoryGirl.create(:job, build: job.build, state: :passed)
receive
expect(other.reload.state).to eql(:passed)
end
end
describe 'it denormalizes to the repository' do
%w(state duration started_at finished_at).each do |attr|
it "sets last_build_#{attr}" do
receive
expect(repo.reload.send(:"last_build_#{attr}").to_s).to eql(build.reload.send(attr).to_s)
end
end
end
end
shared_examples 'does not apply' do
it 'does not change the job :state' do
receive
expect(job.reload.state).to eql(state)
end
it 'does not change the job `[state]ed_at` time' do
attr = "#{state}_ed".sub(/eed$/, 'ed')
expect { receive }.to_not change { job.reload.send(attr) } if job.respond_to?(attr)
end
it 'does not change the build :state' do
receive
expect(job.reload.state).to eql(state)
end
it 'does not change the build `[state]ed_at` time' do
attr = "#{state}_ed".sub(/eed$/, 'ed')
expect { receive }.to_not change { build.reload.send(attr) } if build.respond_to?(attr)
end
end
describe 'a :receive event' do
let(:event) { :receive }
let(:params) { { state: 'received', received_at: now.to_s } }
describe 'received by a :created job' do
let(:state) { :created }
include_examples 'sets the job to :received'
end
describe 'received by a :queued job' do
let(:state) { :queued }
include_examples 'sets the job to :received'
end
describe 'received by a :received job' do
let(:state) { :received }
include_examples 'does not apply'
end
describe 'received by a :started job' do
let(:state) { :started }
include_examples 'does not apply'
end
describe 'received by a :passed job' do
let(:state) { :passed }
include_examples 'does not apply'
end
describe 'received by a :failed job' do
let(:state) { :failed }
include_examples 'does not apply'
end
describe 'received by an :errored job' do
let(:state) { :errored }
include_examples 'does not apply'
end
describe 'received by a :canceled job' do
let(:state) { :canceled }
include_examples 'does not apply'
end
end
describe 'a :start event' do
let(:event) { :start }
let(:params) { { state: 'started', started_at: now.to_s } }
describe 'received by a :created job' do
let(:state) { :created }
include_examples 'sets the job to :started'
end
describe 'received by a :queued job' do
let(:state) { :queued }
include_examples 'sets the job to :started'
end
describe 'received by a :received job' do
let(:state) { :received }
include_examples 'sets the job to :started'
end
describe 'received by a :started job' do
let(:state) { :started }
include_examples 'does not apply'
end
describe 'received by a :passed job' do
let(:state) { :passed }
include_examples 'does not apply'
end
describe 'received by a :failed job' do
let(:state) { :failed }
include_examples 'does not apply'
end
describe 'received by an :errored job' do
let(:state) { :errored }
include_examples 'does not apply'
end
describe 'received by a :canceled job' do
let(:state) { :canceled }
include_examples 'does not apply'
end
end
describe 'a :finish event' do
let(:event) { :finish }
let(:params) { { state: 'passed', finished_at: now.to_s } }
describe 'received by a :created job' do
let(:state) { :created }
include_examples 'sets the job to :passed'
end
describe 'received by a :queued job' do
let(:state) { :queued }
include_examples 'sets the job to :passed'
end
describe 'received by a :received job' do
let(:state) { :received }
include_examples 'sets the job to :passed'
end
describe 'received by a :started job' do
let(:state) { :started }
include_examples 'sets the job to :passed'
end
describe 'received by a :passed job' do
let(:state) { :passed }
include_examples 'does not apply'
end
describe 'received by a :failed job' do
let(:state) { :failed }
include_examples 'does not apply'
end
describe 'received by an :errored job' do
let(:state) { :errored }
include_examples 'does not apply'
end
describe 'received by a :canceled job' do
let(:state) { :canceled }
include_examples 'does not apply'
end
end
describe 'a :restart event' do
let(:event) { :restart }
describe 'received by a :created job' do
let(:state) { :created }
include_examples 'does not apply'
end
describe 'received by a :queued job' do
let(:state) { :queued }
include_examples 'restarts the job'
end
describe 'received by a :received job' do
let(:state) { :received }
include_examples 'restarts the job'
end
describe 'received by a :started job' do
let(:state) { :started }
include_examples 'restarts the job'
end
describe 'received by a :passed job' do
let(:state) { :passed }
include_examples 'restarts the job'
end
describe 'received by a :failed job' do
let(:state) { :failed }
include_examples 'restarts the job'
end
describe 'received by an :errored job' do
let(:state) { :errored }
include_examples 'restarts the job'
end
describe 'received by a :canceled job' do
let(:state) { :canceled }
include_examples 'restarts the job'
end
end
describe 'a :cancel event' do
let(:event) { :cancel }
describe 'received by a :created job' do
let(:state) { :created }
include_examples 'cancels the job'
end
describe 'received by a :queued job' do
let(:state) { :queued }
include_examples 'cancels the job'
end
describe 'received by a :received job' do
let(:state) { :received }
include_examples 'cancels the job'
end
describe 'received by a :started job' do
let(:state) { :started }
include_examples 'cancels the job'
end
describe 'received by a :passed job' do
let(:state) { :passed }
include_examples 'does not apply'
end
describe 'received by a :failed job' do
let(:state) { :failed }
include_examples 'does not apply'
end
describe 'received by an :errored job' do
let(:state) { :errored }
include_examples 'does not apply'
end
describe 'received by a :canceled job' do
let(:state) { :canceled }
include_examples 'does not apply'
end
end
end
Fix expectation
describe Job do
let(:params) { {} }
let(:repo) { FactoryGirl.create(:repository) }
let(:build) { FactoryGirl.create(:build, repository: repo, state: [:received, :queued].include?(state) ? :created : state) }
let(:job) { FactoryGirl.create(:job, repository: repo, build: build, state: state) }
let(:now) { Time.now }
before { Travis::Event.stubs(:dispatch) }
def receive
job.send(:"#{event}!", params)
end
shared_examples 'sets the job to :received' do
it 'returns true' do
expect(receive).to be_truthy
end
it 'sets :state to :received' do
receive
expect(job.reload.state).to eql(:received)
end
it 'sets :received_at' do
receive
expect(job.reload.received_at).to eql(now)
end
it 'dispatches a job:received event' do
Travis::Event.expects(:dispatch).with('job:received', id: job.id)
receive
end
end
shared_examples 'sets the job to :started' do
it 'returns true' do
expect(receive).to be_truthy
end
it 'sets :state to :started' do
receive
expect(job.reload.state).to eql(:started)
end
it 'sets :started_at' do
receive
expect(job.reload.started_at).to eql(now)
end
it 'dispatches a job:started event' do
Travis::Event.expects(:dispatch).with('job:started', id: job.id)
receive
end
describe 'propagates to the build' do
it 'sets :state to :started' do
receive
expect(job.build.reload.state).to eql(:started)
end
it 'sets :finished_at' do
receive
expect(job.build.reload.started_at).to eql(now)
end
end
describe 'it denormalizes to the repository' do
%w(id number state duration started_at finished_at).each do |attr|
it "sets last_build_#{attr}" do
receive
expect(repo.reload.send(:"last_build_#{attr}").to_s).to eql(build.reload.send(attr).to_s)
end
end
end
end
shared_examples 'sets the job to :passed' do
it 'returns true' do
expect(receive).to be_truthy
end
it 'sets :state to :passed' do
receive
expect(job.reload.state).to eql(:passed)
end
it 'sets :finished_at' do
receive
expect(job.reload.finished_at).to eql(now)
end
it 'dispatches a job:finished event' do
Travis::Event.expects(:dispatch).with('job:finished', id: job.id)
receive
end
describe 'propagates to the build' do
describe 'with all other jobs being finished' do
it 'sets :state to :passed' do
receive
expect(job.build.reload.state).to eql(:passed)
end
it 'sets :finished_at' do
receive
expect(job.build.reload.finished_at).to eql(now)
end
end
describe 'with other jobs being pending' do
before do
FactoryGirl.create(:job, build: build, state: :started)
end
it 'does not set :state to :passed' do
receive
expect(job.build.reload.state).to_not eql(:passed)
end
it 'does not set :finished_at' do
receive
expect(job.build.reload.finished_at).to be_nil
end
end
end
describe 'it denormalizes to the repository' do
%w(state duration finished_at).each do |attr|
it "sets last_build_#{attr}" do
receive
expect(repo.reload.send(:"last_build_#{attr}").to_s).to eql(build.reload.send(attr).to_s)
end
end
end
end
shared_examples 'cancels the job' do
it 'returns true' do
expect(receive).to be_truthy
end
it 'sets :state to :canceled' do
receive
expect(job.reload.state).to eql(:canceled)
end
it 'sets :canceled_at' do
receive
expect(job.reload.canceled_at).to eql(now)
end
it 'sets :finished_at' do
receive
expect(job.reload.finished_at).to eql(now)
end
it 'dispatches a job:canceled event' do
Travis::Event.expects(:dispatch).with('job:canceled', id: job.id)
receive
end
describe 'with all other jobs being finished' do
it 'sets the build to :canceled' do
receive
expect(job.build.reload.state).to eql(:canceled)
end
end
describe 'with other jobs being pending' do
before do
FactoryGirl.create(:job, build: build, state: :started)
end
it 'does not set the build to :canceled' do
receive
expect(job.build.reload.state).to_not eql(:canceled)
end
end
describe 'it denormalizes to the repository' do
%w(state duration finished_at).each do |attr|
it "sets last_build_#{attr}" do
receive
expect(repo.reload.send(:"last_build_#{attr}").to_s).to eql(build.reload.send(attr).to_s)
end
end
end
end
shared_examples 'restarts the job' do
it 'returns true' do
expect(receive).to be_truthy
end
it 'sets :state to :created' do
receive
expect(job.reload.state).to eql(:created)
end
it 'resets :queued_at' do
receive
expect(job.reload.queued_at).to be_nil
end
it 'resets :received_at' do
receive
expect(job.reload.received_at).to be_nil
end
it 'resets :started_at' do
receive
expect(job.reload.started_at).to be_nil
end
it 'resets :finished_at' do
receive
expect(job.reload.finished_at).to be_nil
end
it 'resets :canceled_at' do
receive
expect(job.reload.canceled_at).to be_nil
end
it 'dispatches a job:restarted event' do
Travis::Event.expects(:dispatch).with('job:restarted', id: job.id)
receive
end
describe 'propagates to the build' do
it 'sets :state to :created' do
receive
expect(job.build.reload.state).to eql(:created)
end
it 'resets :duration' do
receive
expect(job.build.reload.duration).to be_nil
end
it 'resets :started_at' do
receive
expect(job.build.reload.started_at).to be_nil
end
it 'resets :finished_at' do
receive
expect(job.build.reload.finished_at).to be_nil
end
it 'clears log' do
receive
expect(job.log.reload.content).to be_empty
expect(job.log.reload.archive_verified).to be_nil
expect(job.log.reload.removed_by).to be_nil
expect(job.log.reload.removed_at).to be_nil
end
it 'does not restart other jobs on the matrix' do
other = FactoryGirl.create(:job, build: job.build, state: :passed)
receive
expect(other.reload.state).to eql(:passed)
end
end
describe 'it denormalizes to the repository' do
%w(state duration started_at finished_at).each do |attr|
it "sets last_build_#{attr}" do
receive
expect(repo.reload.send(:"last_build_#{attr}").to_s).to eql(build.reload.send(attr).to_s)
end
end
end
end
shared_examples 'does not apply' do
it 'does not change the job :state' do
receive
expect(job.reload.state).to eql(state)
end
it 'does not change the job `[state]ed_at` time' do
attr = "#{state}_ed".sub(/eed$/, 'ed')
expect { receive }.to_not change { job.reload.send(attr) } if job.respond_to?(attr)
end
it 'does not change the build :state' do
receive
expect(job.reload.state).to eql(state)
end
it 'does not change the build `[state]ed_at` time' do
attr = "#{state}_ed".sub(/eed$/, 'ed')
expect { receive }.to_not change { build.reload.send(attr) } if build.respond_to?(attr)
end
end
describe 'a :receive event' do
let(:event) { :receive }
let(:params) { { state: 'received', received_at: now.to_s } }
describe 'received by a :created job' do
let(:state) { :created }
include_examples 'sets the job to :received'
end
describe 'received by a :queued job' do
let(:state) { :queued }
include_examples 'sets the job to :received'
end
describe 'received by a :received job' do
let(:state) { :received }
include_examples 'does not apply'
end
describe 'received by a :started job' do
let(:state) { :started }
include_examples 'does not apply'
end
describe 'received by a :passed job' do
let(:state) { :passed }
include_examples 'does not apply'
end
describe 'received by a :failed job' do
let(:state) { :failed }
include_examples 'does not apply'
end
describe 'received by an :errored job' do
let(:state) { :errored }
include_examples 'does not apply'
end
describe 'received by a :canceled job' do
let(:state) { :canceled }
include_examples 'does not apply'
end
end
describe 'a :start event' do
let(:event) { :start }
let(:params) { { state: 'started', started_at: now.to_s } }
describe 'received by a :created job' do
let(:state) { :created }
include_examples 'sets the job to :started'
end
describe 'received by a :queued job' do
let(:state) { :queued }
include_examples 'sets the job to :started'
end
describe 'received by a :received job' do
let(:state) { :received }
include_examples 'sets the job to :started'
end
describe 'received by a :started job' do
let(:state) { :started }
include_examples 'does not apply'
end
describe 'received by a :passed job' do
let(:state) { :passed }
include_examples 'does not apply'
end
describe 'received by a :failed job' do
let(:state) { :failed }
include_examples 'does not apply'
end
describe 'received by an :errored job' do
let(:state) { :errored }
include_examples 'does not apply'
end
describe 'received by a :canceled job' do
let(:state) { :canceled }
include_examples 'does not apply'
end
end
describe 'a :finish event' do
let(:event) { :finish }
let(:params) { { state: 'passed', finished_at: now.to_s } }
describe 'received by a :created job' do
let(:state) { :created }
include_examples 'sets the job to :passed'
end
describe 'received by a :queued job' do
let(:state) { :queued }
include_examples 'sets the job to :passed'
end
describe 'received by a :received job' do
let(:state) { :received }
include_examples 'sets the job to :passed'
end
describe 'received by a :started job' do
let(:state) { :started }
include_examples 'sets the job to :passed'
end
describe 'received by a :passed job' do
let(:state) { :passed }
include_examples 'does not apply'
end
describe 'received by a :failed job' do
let(:state) { :failed }
include_examples 'does not apply'
end
describe 'received by an :errored job' do
let(:state) { :errored }
include_examples 'does not apply'
end
describe 'received by a :canceled job' do
let(:state) { :canceled }
include_examples 'does not apply'
end
end
describe 'a :restart event' do
let(:event) { :restart }
describe 'received by a :created job' do
let(:state) { :created }
include_examples 'does not apply'
end
describe 'received by a :queued job' do
let(:state) { :queued }
include_examples 'restarts the job'
end
describe 'received by a :received job' do
let(:state) { :received }
include_examples 'restarts the job'
end
describe 'received by a :started job' do
let(:state) { :started }
include_examples 'restarts the job'
end
describe 'received by a :passed job' do
let(:state) { :passed }
include_examples 'restarts the job'
end
describe 'received by a :failed job' do
let(:state) { :failed }
include_examples 'restarts the job'
end
describe 'received by an :errored job' do
let(:state) { :errored }
include_examples 'restarts the job'
end
describe 'received by a :canceled job' do
let(:state) { :canceled }
include_examples 'restarts the job'
end
end
describe 'a :cancel event' do
let(:event) { :cancel }
describe 'received by a :created job' do
let(:state) { :created }
include_examples 'cancels the job'
end
describe 'received by a :queued job' do
let(:state) { :queued }
include_examples 'cancels the job'
end
describe 'received by a :received job' do
let(:state) { :received }
include_examples 'cancels the job'
end
describe 'received by a :started job' do
let(:state) { :started }
include_examples 'cancels the job'
end
describe 'received by a :passed job' do
let(:state) { :passed }
include_examples 'does not apply'
end
describe 'received by a :failed job' do
let(:state) { :failed }
include_examples 'does not apply'
end
describe 'received by an :errored job' do
let(:state) { :errored }
include_examples 'does not apply'
end
describe 'received by a :canceled job' do
let(:state) { :canceled }
include_examples 'does not apply'
end
end
end
|
# coding: utf-8
lib = File.expand_path('../lib', __FILE__)
$LOAD_PATH.unshift(lib) unless $LOAD_PATH.include?(lib)
require 'u_torrent/version'
Gem::Specification.new do |spec|
spec.name = 'utorrent-webapi-ruby'
spec.version = UTorrent::VERSION
spec.authors = ['Peter Wu']
spec.email = ['petergenius@gmail.com']
spec.description = %q{UTorrent webapi ruby library}
spec.summary = %q{Communicate to utorrent via webapi in ruby}
spec.homepage = 'https://github.com/PeterWuMC/utorrent-webapi-ruby'
spec.license = 'MIT'
spec.files = `git ls-files`.split($/)
spec.executables = spec.files.grep(%r{^bin/}) { |f| File.basename(f) }
spec.test_files = spec.files.grep(%r{^(test|spec|features)/})
spec.require_paths = ['lib']
spec.add_dependency 'nokogiri', '~> 1.6.7.0'
end
ruby version ~> 2.0
# coding: utf-8
lib = File.expand_path('../lib', __FILE__)
$LOAD_PATH.unshift(lib) unless $LOAD_PATH.include?(lib)
require 'u_torrent/version'
Gem::Specification.new do |spec|
spec.name = 'utorrent-webapi-ruby'
spec.version = UTorrent::VERSION
spec.authors = ['Peter Wu']
spec.email = ['petergenius@gmail.com']
spec.description = %q{UTorrent webapi ruby library}
spec.summary = %q{Communicate to utorrent via webapi in ruby}
spec.homepage = 'https://github.com/PeterWuMC/utorrent-webapi-ruby'
spec.license = 'MIT'
spec.files = `git ls-files`.split($/)
spec.executables = spec.files.grep(%r{^bin/}) { |f| File.basename(f) }
spec.test_files = spec.files.grep(%r{^(test|spec|features)/})
spec.require_paths = ['lib']
spec.required_ruby_version = '~> 2.0'
spec.add_dependency 'nokogiri', '~> 1.6.7.0'
end
|
set :application, "Framing33"
set:domain, "vps116008.ovh.net"
set :user, "root"
set :password, "08TFUYk6"
set :deploy_to, "/var/www/framing33"
set :app_path, "app"
set :web_path, "web"
set :scm, :git
set :repository, "file:///C:/wamp/www/Framing33"
set :deploy_via, :copy
set :model_manager, "doctrine"
ssh_options[:forward_agent] = true
ssh_options[:port] = "22"
ssh_options[:config] = false
role :web, domain # Your HTTP server, Apache/etc
role :app, domain, :primary => true # This may be the same as your `Web` server
set :use_sudo, true
set :use_composer, true
set :keep_releases, 3
set :shared_files, [app_path+"/config/parameters.yml"] # Les fichiers à conserver entre chaque déploiement
set :shared_children, [app_path + "/logs", "vendor"] # Idem, mais pour les dossiers
set :writable_dirs, ["app/cache", "app/logs", "app/sessions"]
set :update_vendors, true # Il est conseillé de laisser a false et de ne pas faire de ‘composer update’ directement sur la prod
set :dump_assetic_assets, true # dumper les assets
logger.level = Logger::MAX_LEVEL
#Update database Doctrine
before "symfony:cache:warmup", "symfony:doctrine:schema:update"
Delete deploy.rb
|
set :stage_dir, 'app/config/deploy' # needed for Symfony2 only
require 'capistrano/ext/multistage'
set :stages, %w(production testing development)
set :application, "CVCBrowser"
set :user, "www-data"
default_run_options[:pty] = true
set :webserver_user, "www-data"
set :permission_method, :acl
set :group, "jzhao"
set :use_set_permissions, true
set :repository, "git@map2u.familyds.com:/volume1/homes/git/repositories/#{application}.git"
set :scm, :git
set :deploy_via, :remote_cache
set :shared_files, ["app/config/parameters.yml"]
set :shared_children, [app_path + "/../Data" ,app_path + "/cache",app_path + "/logs", web_path + "/uploads"]
set :writable_dirs, [app_path + "/cache", app_path + "/logs", web_path + "/uploads"]
set :branch, "master"
set :model_manager, "doctrine"
set :group_writable, true
default_run_options[:pty] = true
ssh_options[:forward_agent] = true
set :use_composer, true
set :update_vendors, false
#set :copy_vendors, true
set :use_sudo, false
set :keep_releases, 5
logger.level = Logger::MAX_LEVEL
#after "deploy:setup", :setup_ownership
after "deploy:finalize_update", :setup_ownership
before "deploy:update_code", :setup_ownership
task :setup_ownership do
# run "#{sudo} chown -R #{user}:#{group} #{deploy_to} && chmod -R g+s #{deploy_to}"
# run "#{sudo} chmod -R 777 #{deploy_to} #{current_path}/app/cache #{current_path}/app/logs"
end
after "deploy:update_code" do
capifony_pretty_print "--> Ensuring cache directory permissions"
run "setfacl -R -m u:www-data:rwX -m u:`whoami`:rwX #{latest_release}/#{cache_path}"
run "setfacl -dR -m u:www-data:rwX -m u:`whoami`:rwX #{latest_release}/#{cache_path}"
capifony_puts_ok
end
namespace :deploy do
task :update_code, :except => { :no_release => true } do
on_rollback { my_namespace.rollback }
strategy.deploy!
finalize_update
end
end
namespace :my_namespace do
task :rollback, :except => { :no_release => true } do
#run 'rm-rf #{release_path}; true'
#default capistrano action on rollback
#my custom actions
# run "cd "+shared_path+"/../current"
# run "php composer.phar dump-autoload"
set :release_path, shared_path+"/../current"
symfony.composer.dump_autoload
end
end
2015-03-01
set :stage_dir, 'app/config/deploy' # needed for Symfony2 only
require 'capistrano/ext/multistage'
set :stages, %w(production testing development)
set :application, "CVCBrowser"
set :user, "www-data"
default_run_options[:pty] = true
set :webserver_user, "www-data"
set :permission_method, :acl
set :group, "jzhao"
set :use_set_permissions, true
set :repository, "git@map2u.familyds.com:/volume1/homes/git/repositories/#{application}.git"
set :scm, :git
set :deploy_via, :remote_cache
set :shared_files, ["app/config/parameters.yml"]
set :shared_children, [app_path + "/../Data" ,app_path + "/cache",app_path + "/logs", web_path + "/uploads"]
set :writable_dirs, [app_path + "/cache", app_path + "/logs", web_path + "/uploads"]
set :branch, "master"
set :model_manager, "doctrine"
set :group_writable, true
default_run_options[:pty] = true
ssh_options[:forward_agent] = true
set :use_composer, true
set :update_vendors, true
#set :copy_vendors, true
set :composer_options, "--ansi --no-interaction install --no-dev"
set :use_sudo, false
set :keep_releases, 5
logger.level = Logger::MAX_LEVEL
#after "deploy:setup", :setup_ownership
after "deploy:finalize_update", :setup_ownership
before "deploy:update_code", :setup_ownership
task :setup_ownership do
# run "#{sudo} chown -R #{user}:#{group} #{deploy_to} && chmod -R g+s #{deploy_to}"
# run "#{sudo} chmod -R 777 #{deploy_to} #{current_path}/app/cache #{current_path}/app/logs"
end
after "deploy:update_code" do
capifony_pretty_print "--> Ensuring cache directory permissions"
run "setfacl -R -m u:www-data:rwX -m u:`whoami`:rwX #{latest_release}/#{cache_path}"
run "setfacl -dR -m u:www-data:rwX -m u:`whoami`:rwX #{latest_release}/#{cache_path}"
capifony_puts_ok
end
namespace :deploy do
task :update_code, :except => { :no_release => true } do
on_rollback { my_namespace.rollback }
strategy.deploy!
finalize_update
end
end
namespace :my_namespace do
task :rollback, :except => { :no_release => true } do
#run 'rm-rf #{release_path}; true'
#default capistrano action on rollback
#my custom actions
# run "cd "+shared_path+"/../current"
# run "php composer.phar dump-autoload"
set :release_path, shared_path+"/../current"
symfony.composer.dump_autoload
end
end |
#
# Author:: Ian Meyer (<ianmmeyer@gmail.com>)
# Copyright:: Copyright 2010-2016, Ian Meyer
# Copyright:: Copyright (c) Chef Software Inc.
# License:: Apache License, Version 2.0
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
require "spec_helper"
Chef::Knife::Bootstrap.load_deps
describe Chef::Knife::Bootstrap do
let(:bootstrap_template) { nil }
let(:stderr) { StringIO.new }
let(:bootstrap_cli_options) { [ ] }
let(:linux_test) { true }
let(:windows_test) { false }
let(:linux_test) { false }
let(:unix_test) { false }
let(:ssh_test) { false }
let(:connection) do
double("TrainConnector",
windows?: windows_test,
linux?: linux_test,
unix?: unix_test)
end
let(:knife) do
Chef::Log.logger = Logger.new(StringIO.new)
Chef::Config[:knife][:bootstrap_template] = bootstrap_template unless bootstrap_template.nil?
k = Chef::Knife::Bootstrap.new(bootstrap_cli_options)
allow(k.ui).to receive(:stderr).and_return(stderr)
allow(k).to receive(:encryption_secret_provided_ignore_encrypt_flag?).and_return(false)
allow(k).to receive(:connection).and_return connection
k.merge_configs
k
end
context "#check_license" do
let(:acceptor) { instance_double(LicenseAcceptance::Acceptor) }
before do
expect(LicenseAcceptance::Acceptor).to receive(:new).and_return(acceptor)
end
describe "when a license is not required" do
it "does not set the chef_license" do
expect(acceptor).to receive(:license_required?).and_return(false)
knife.check_license
expect(Chef::Config[:chef_license]).to eq(nil)
end
end
describe "when a license is required" do
it "sets the chef_license" do
expect(acceptor).to receive(:license_required?).and_return(true)
expect(acceptor).to receive(:id_from_mixlib).and_return("id")
expect(acceptor).to receive(:check_and_persist)
expect(acceptor).to receive(:acceptance_value).and_return("accept-no-persist")
knife.check_license
expect(Chef::Config[:chef_license]).to eq("accept-no-persist")
end
end
end
context "#bootstrap_template" do
it "should default to chef-full" do
expect(knife.bootstrap_template).to be_a_kind_of(String)
expect(File.basename(knife.bootstrap_template)).to eq("chef-full")
end
end
context "#render_template - when using the chef-full default template" do
let(:rendered_template) do
knife.merge_configs
knife.render_template
end
it "should render client.rb" do
expect(rendered_template).to match("cat > /etc/chef/client.rb <<'EOP'")
expect(rendered_template).to match("chef_server_url \"https://localhost:443\"")
expect(rendered_template).to match("validation_client_name \"chef-validator\"")
expect(rendered_template).to match("log_location STDOUT")
end
it "should render first-boot.json" do
expect(rendered_template).to match("cat > /etc/chef/first-boot.json <<'EOP'")
expect(rendered_template).to match('{"run_list":\[\]}')
end
context "and encrypted_data_bag_secret was provided" do
it "should render encrypted_data_bag_secret file" do
expect(knife).to receive(:encryption_secret_provided_ignore_encrypt_flag?).and_return(true)
expect(knife).to receive(:read_secret).and_return("secrets")
expect(rendered_template).to match("cat > /etc/chef/encrypted_data_bag_secret <<'EOP'")
expect(rendered_template).to match('{"run_list":\[\]}')
expect(rendered_template).to match(/secrets/)
end
end
end
context "with --bootstrap-vault-item" do
let(:bootstrap_cli_options) { [ "--bootstrap-vault-item", "vault1:item1", "--bootstrap-vault-item", "vault1:item2", "--bootstrap-vault-item", "vault2:item1" ] }
it "sets the knife config cli option correctly" do
expect(knife.config[:bootstrap_vault_item]).to eq({ "vault1" => %w{item1 item2}, "vault2" => ["item1"] })
end
end
context "with --bootstrap-preinstall-command" do
command = "while sudo fuser /var/lib/dpkg/lock >/dev/null 2>&1; do\n echo 'waiting for dpkg lock';\n sleep 1;\n done;"
let(:bootstrap_cli_options) { [ "--bootstrap-preinstall-command", command ] }
let(:rendered_template) do
knife.merge_configs
knife.render_template
end
it "configures the preinstall command in the bootstrap template correctly" do
expect(rendered_template).to match(/command/)
end
end
context "with --bootstrap-proxy" do
let(:bootstrap_cli_options) { [ "--bootstrap-proxy", "1.1.1.1" ] }
let(:rendered_template) do
knife.merge_configs
knife.render_template
end
it "configures the https_proxy environment variable in the bootstrap template correctly" do
expect(rendered_template).to match(/https_proxy="1.1.1.1" export https_proxy/)
end
end
context "with --bootstrap-no-proxy" do
let(:bootstrap_cli_options) { [ "--bootstrap-no-proxy", "localserver" ] }
let(:rendered_template) do
knife.merge_configs
knife.render_template
end
it "configures the https_proxy environment variable in the bootstrap template correctly" do
expect(rendered_template).to match(/no_proxy="localserver" export no_proxy/)
end
end
context "with :bootstrap_template and :template_file cli options" do
let(:bootstrap_cli_options) { [ "--bootstrap-template", "my-template", "other-template" ] }
it "should select bootstrap template" do
expect(File.basename(knife.bootstrap_template)).to eq("my-template")
end
end
context "when finding templates" do
context "when :bootstrap_template config is set to a file" do
context "that doesn't exist" do
let(:bootstrap_template) { "/opt/blah/not/exists/template.erb" }
it "raises an error" do
expect { knife.find_template }.to raise_error(Errno::ENOENT)
end
end
context "that exists" do
let(:bootstrap_template) { File.expand_path(File.join(CHEF_SPEC_DATA, "bootstrap", "test.erb")) }
it "loads the given file as the template" do
expect(Chef::Log).to receive(:trace)
expect(knife.find_template).to eq(File.expand_path(File.join(CHEF_SPEC_DATA, "bootstrap", "test.erb")))
end
end
end
context "when :bootstrap_template config is set to a template name" do
let(:bootstrap_template) { "example" }
let(:builtin_template_path) { File.expand_path(File.join(File.dirname(__FILE__), "../../../lib/chef/knife/bootstrap/templates", "example.erb")) }
let(:chef_config_dir_template_path) { "/knife/chef/config/bootstrap/example.erb" }
let(:env_home_template_path) { "/env/home/.chef/bootstrap/example.erb" }
let(:gem_files_template_path) { "/Users/schisamo/.rvm/gems/ruby-1.9.2-p180@chef-0.10/gems/knife-windows-0.5.4/lib/chef/knife/bootstrap/fake-bootstrap-template.erb" }
def configure_chef_config_dir
allow(Chef::Knife).to receive(:chef_config_dir).and_return("/knife/chef/config")
end
def configure_env_home
allow(Chef::Util::PathHelper).to receive(:home).with(".chef", "bootstrap", "example.erb").and_yield(env_home_template_path)
end
def configure_gem_files
allow(Gem).to receive(:find_files).and_return([ gem_files_template_path ])
end
before(:each) do
expect(File).to receive(:exists?).with(bootstrap_template).and_return(false)
end
context "when file is available everywhere" do
before do
configure_chef_config_dir
configure_env_home
configure_gem_files
expect(File).to receive(:exists?).with(builtin_template_path).and_return(true)
end
it "should load the template from built-in templates" do
expect(knife.find_template).to eq(builtin_template_path)
end
end
context "when file is available in chef_config_dir" do
before do
configure_chef_config_dir
configure_env_home
configure_gem_files
expect(File).to receive(:exists?).with(builtin_template_path).and_return(false)
expect(File).to receive(:exists?).with(chef_config_dir_template_path).and_return(true)
it "should load the template from chef_config_dir" do
knife.find_template.should eq(chef_config_dir_template_path)
end
end
end
context "when file is available in home directory" do
before do
configure_chef_config_dir
configure_env_home
configure_gem_files
expect(File).to receive(:exists?).with(builtin_template_path).and_return(false)
expect(File).to receive(:exists?).with(chef_config_dir_template_path).and_return(false)
expect(File).to receive(:exists?).with(env_home_template_path).and_return(true)
end
it "should load the template from chef_config_dir" do
expect(knife.find_template).to eq(env_home_template_path)
end
end
context "when file is available in Gem files" do
before do
configure_chef_config_dir
configure_env_home
configure_gem_files
expect(File).to receive(:exists?).with(builtin_template_path).and_return(false)
expect(File).to receive(:exists?).with(chef_config_dir_template_path).and_return(false)
expect(File).to receive(:exists?).with(env_home_template_path).and_return(false)
expect(File).to receive(:exists?).with(gem_files_template_path).and_return(true)
end
it "should load the template from Gem files" do
expect(knife.find_template).to eq(gem_files_template_path)
end
end
context "when file is available in Gem files and home dir doesn't exist" do
before do
configure_chef_config_dir
configure_gem_files
allow(Chef::Util::PathHelper).to receive(:home).with(".chef", "bootstrap", "example.erb").and_return(nil)
expect(File).to receive(:exists?).with(builtin_template_path).and_return(false)
expect(File).to receive(:exists?).with(chef_config_dir_template_path).and_return(false)
expect(File).to receive(:exists?).with(gem_files_template_path).and_return(true)
end
it "should load the template from Gem files" do
expect(knife.find_template).to eq(gem_files_template_path)
end
end
end
end
["-t", "--bootstrap-template"].each do |t|
context "when #{t} option is given in the command line" do
it "sets the knife :bootstrap_template config" do
knife.parse_options([t, "blahblah"])
knife.merge_configs
expect(knife.bootstrap_template).to eq("blahblah")
end
end
end
context "with run_list template" do
let(:bootstrap_template) { File.expand_path(File.join(CHEF_SPEC_DATA, "bootstrap", "test.erb")) }
it "should return an empty run_list" do
expect(knife.render_template).to eq('{"run_list":[]}')
end
it "should have role[base] in the run_list" do
knife.parse_options(["-r", "role[base]"])
knife.merge_configs
expect(knife.render_template).to eq('{"run_list":["role[base]"]}')
end
it "should have role[base] and recipe[cupcakes] in the run_list" do
knife.parse_options(["-r", "role[base],recipe[cupcakes]"])
knife.merge_configs
expect(knife.render_template).to eq('{"run_list":["role[base]","recipe[cupcakes]"]}')
end
context "with bootstrap_attribute options" do
let(:jsonfile) do
file = Tempfile.new (["node", ".json"])
File.open(file.path, "w") { |f| f.puts '{"foo":{"bar":"baz"}}' }
file
end
it "should have foo => {bar => baz} in the first_boot from cli" do
knife.parse_options(["-j", '{"foo":{"bar":"baz"}}'])
knife.merge_configs
expected_hash = FFI_Yajl::Parser.new.parse('{"foo":{"bar":"baz"},"run_list":[]}')
actual_hash = FFI_Yajl::Parser.new.parse(knife.render_template)
expect(actual_hash).to eq(expected_hash)
end
it "should have foo => {bar => baz} in the first_boot from file" do
knife.parse_options(["--json-attribute-file", jsonfile.path])
knife.merge_configs
expected_hash = FFI_Yajl::Parser.new.parse('{"foo":{"bar":"baz"},"run_list":[]}')
actual_hash = FFI_Yajl::Parser.new.parse(knife.render_template)
expect(actual_hash).to eq(expected_hash)
jsonfile.close
end
it "raises a Chef::Exceptions::BootstrapCommandInputError with the proper error message" do
knife.parse_options(["-j", '{"foo":{"bar":"baz"}}'])
knife.parse_options(["--json-attribute-file", jsonfile.path])
knife.merge_configs
allow(knife).to receive(:validate_name_args!)
expect(knife).to receive(:check_license)
expect { knife.run }.to raise_error(Chef::Exceptions::BootstrapCommandInputError)
jsonfile.close
end
end
end
context "with hints template" do
let(:bootstrap_template) { File.expand_path(File.join(CHEF_SPEC_DATA, "bootstrap", "test-hints.erb")) }
it "should create a hint file when told to" do
knife.parse_options(["--hint", "openstack"])
knife.merge_configs
expect(knife.render_template).to match(%r{/etc/chef/ohai/hints/openstack.json})
end
it "should populate a hint file with JSON when given a file to read" do
allow(::File).to receive(:read).and_return('{ "foo" : "bar" }')
knife.parse_options(["--hint", "openstack=hints/openstack.json"])
knife.merge_configs
expect(knife.render_template).to match(/\{\"foo\":\"bar\"\}/)
end
end
describe "specifying no_proxy with various entries" do
subject(:knife) do
k = described_class.new
Chef::Config[:knife][:bootstrap_template] = template_file
allow(k).to receive(:connection).and_return connection
k.parse_options(options)
k.merge_configs
k
end
let(:options) { ["--bootstrap-no-proxy", setting] }
let(:template_file) { File.expand_path(File.join(CHEF_SPEC_DATA, "bootstrap", "no_proxy.erb")) }
let(:rendered_template) do
knife.render_template
end
context "via --bootstrap-no-proxy" do
let(:setting) { "api.opscode.com" }
it "renders the client.rb with a single FQDN no_proxy entry" do
expect(rendered_template).to match(/.*no_proxy\s*"api.opscode.com".*/)
end
end
context "via --bootstrap-no-proxy multiple" do
let(:setting) { "api.opscode.com,172.16.10.*" }
it "renders the client.rb with comma-separated FQDN and wildcard IP address no_proxy entries" do
expect(rendered_template).to match(/.*no_proxy\s*"api.opscode.com,172.16.10.\*".*/)
end
end
context "via --ssl-verify-mode none" do
let(:options) { ["--node-ssl-verify-mode", "none"] }
it "renders the client.rb with ssl_verify_mode set to :verify_none" do
expect(rendered_template).to match(/ssl_verify_mode :verify_none/)
end
end
context "via --node-ssl-verify-mode peer" do
let(:options) { ["--node-ssl-verify-mode", "peer"] }
it "renders the client.rb with ssl_verify_mode set to :verify_peer" do
expect(rendered_template).to match(/ssl_verify_mode :verify_peer/)
end
end
context "via --node-ssl-verify-mode all" do
let(:options) { ["--node-ssl-verify-mode", "all"] }
it "raises error" do
expect { rendered_template }.to raise_error(RuntimeError)
end
end
context "via --node-verify-api-cert" do
let(:options) { ["--node-verify-api-cert"] }
it "renders the client.rb with verify_api_cert set to true" do
expect(rendered_template).to match(/verify_api_cert true/)
end
end
context "via --no-node-verify-api-cert" do
let(:options) { ["--no-node-verify-api-cert"] }
it "renders the client.rb with verify_api_cert set to false" do
expect(rendered_template).to match(/verify_api_cert false/)
end
end
end
describe "specifying the encrypted data bag secret key" do
let(:secret) { "supersekret" }
let(:options) { [] }
let(:bootstrap_template) { File.expand_path(File.join(CHEF_SPEC_DATA, "bootstrap", "secret.erb")) }
let(:rendered_template) do
knife.parse_options(options)
knife.merge_configs
knife.render_template
end
it "creates a secret file" do
expect(knife).to receive(:encryption_secret_provided_ignore_encrypt_flag?).and_return(true)
expect(knife).to receive(:read_secret).and_return(secret)
expect(rendered_template).to match(/#{secret}/)
end
it "renders the client.rb with an encrypted_data_bag_secret entry" do
expect(knife).to receive(:encryption_secret_provided_ignore_encrypt_flag?).and_return(true)
expect(knife).to receive(:read_secret).and_return(secret)
expect(rendered_template).to match(%r{encrypted_data_bag_secret\s*"/etc/chef/encrypted_data_bag_secret"})
end
end
describe "when transferring trusted certificates" do
let(:trusted_certs_dir) { Chef::Util::PathHelper.cleanpath(File.join(File.dirname(__FILE__), "../../data/trusted_certs")) }
let(:rendered_template) do
knife.merge_configs
knife.render_template
end
before do
Chef::Config[:trusted_certs_dir] = trusted_certs_dir
allow(IO).to receive(:read).and_call_original
allow(IO).to receive(:read).with(File.expand_path(Chef::Config[:validation_key])).and_return("")
end
def certificates
Dir[File.join(trusted_certs_dir, "*.{crt,pem}")]
end
it "creates /etc/chef/trusted_certs" do
expect(rendered_template).to match(%r{mkdir -p /etc/chef/trusted_certs})
end
it "copies the certificates in the directory" do
certificates.each do |cert|
expect(IO).to receive(:read).with(File.expand_path(cert))
end
certificates.each do |cert|
expect(rendered_template).to match(%r{cat > /etc/chef/trusted_certs/#{File.basename(cert)} <<'EOP'})
end
end
context "when :trusted_cets_dir is empty" do
let(:trusted_certs_dir) { Chef::Util::PathHelper.cleanpath(File.join(File.dirname(__FILE__), "../../data/trusted_certs_empty")) }
it "doesn't create /etc/chef/trusted_certs if :trusted_certs_dir is empty" do
expect(rendered_template).not_to match(%r{mkdir -p /etc/chef/trusted_certs})
end
end
end
context "when doing fips things" do
let(:template_file) { File.expand_path(File.join(CHEF_SPEC_DATA, "bootstrap", "no_proxy.erb")) }
let(:trusted_certs_dir) { Chef::Util::PathHelper.cleanpath(File.join(File.dirname(__FILE__), "../../data/trusted_certs")) }
before do
Chef::Config[:knife][:bootstrap_template] = template_file
knife.merge_configs
end
let(:rendered_template) do
knife.render_template
end
context "when knife is in fips mode" do
before do
Chef::Config[:fips] = true
end
it "renders 'fips true'" do
expect(rendered_template).to match("fips")
end
end
context "when knife is not in fips mode" do
before do
# This is required because the chef-fips pipeline does
# has a default value of true for fips
Chef::Config[:fips] = false
end
it "does not render anything about fips" do
expect(rendered_template).not_to match("fips")
end
end
end
describe "when transferring client.d" do
let(:rendered_template) do
knife.merge_configs
knife.render_template
end
before do
Chef::Config[:client_d_dir] = client_d_dir
end
context "when client_d_dir is nil" do
let(:client_d_dir) { nil }
it "does not create /etc/chef/client.d" do
expect(rendered_template).not_to match(%r{mkdir -p /etc/chef/client\.d})
end
end
context "when client_d_dir is set" do
let(:client_d_dir) do
Chef::Util::PathHelper.cleanpath(
File.join(File.dirname(__FILE__), "../../data/client.d_00")
)
end
it "creates /etc/chef/client.d" do
expect(rendered_template).to match("mkdir -p /etc/chef/client\.d")
end
context "a flat directory structure" do
it "escapes single-quotes" do
expect(rendered_template).to match("cat > /etc/chef/client.d/02-strings.rb <<'EOP'")
expect(rendered_template).to match("something '\\\\''/foo/bar'\\\\''")
end
it "creates a file 00-foo.rb" do
expect(rendered_template).to match("cat > /etc/chef/client.d/00-foo.rb <<'EOP'")
expect(rendered_template).to match("d6f9b976-289c-4149-baf7-81e6ffecf228")
end
it "creates a file bar" do
expect(rendered_template).to match("cat > /etc/chef/client.d/bar <<'EOP'")
expect(rendered_template).to match("1 / 0")
end
end
context "a nested directory structure" do
let(:client_d_dir) do
Chef::Util::PathHelper.cleanpath(
File.join(File.dirname(__FILE__), "../../data/client.d_01")
)
end
it "creates a file foo/bar.rb" do
expect(rendered_template).to match("cat > /etc/chef/client.d/foo/bar.rb <<'EOP'")
expect(rendered_template).to match("1 / 0")
end
end
end
end
describe "#connection_protocol" do
let(:host_descriptor) { "example.com" }
let(:config) { {} }
let(:knife_connection_protocol) { nil }
before do
allow(knife).to receive(:config).and_return config
allow(knife).to receive(:host_descriptor).and_return host_descriptor
if knife_connection_protocol
Chef::Config[:knife][:connection_protocol] = knife_connection_protocol
knife.merge_configs
end
end
context "when protocol is part of the host argument" do
let(:host_descriptor) { "winrm://myhost" }
it "returns the value provided by the host argument" do
expect(knife.connection_protocol).to eq "winrm"
end
end
context "when protocol is provided via the CLI flag" do
let(:config) { { connection_protocol: "winrm" } }
it "returns that value" do
expect(knife.connection_protocol).to eq "winrm"
end
end
context "when protocol is provided via the host argument and the CLI flag" do
let(:host_descriptor) { "ssh://example.com" }
let(:config) { { connection_protocol: "winrm" } }
it "returns the value provided by the host argument" do
expect(knife.connection_protocol).to eq "ssh"
end
end
context "when no explicit protocol is provided" do
let(:config) { {} }
let(:host_descriptor) { "example.com" }
let(:knife_connection_protocol) { "winrm" }
it "falls back to knife config" do
expect(knife.connection_protocol).to eq "winrm"
end
context "and there is no knife bootstrap_protocol" do
let(:knife_connection_protocol) { nil }
it "falls back to 'ssh'" do
expect(knife.connection_protocol).to eq "ssh"
end
end
end
end
describe "#validate_protocol!" do
let(:host_descriptor) { "example.com" }
let(:config) { {} }
let(:connection_protocol) { "ssh" }
before do
allow(knife).to receive(:config).and_return config
allow(knife).to receive(:connection_protocol).and_return connection_protocol
allow(knife).to receive(:host_descriptor).and_return host_descriptor
end
context "when protocol is provided both in the URL and via --protocol" do
context "and they do not match" do
let(:connection_protocol) { "ssh" }
let(:config) { { connection_protocol: "winrm" } }
it "outputs an error and exits" do
expect(knife.ui).to receive(:error)
expect { knife.validate_protocol! }.to raise_error SystemExit
end
end
context "and they do match" do
let(:connection_protocol) { "winrm" }
let(:config) { { connection_protocol: "winrm" } }
it "returns true" do
expect(knife.validate_protocol!).to eq true
end
end
end
context "and the protocol is supported" do
Chef::Knife::Bootstrap::SUPPORTED_CONNECTION_PROTOCOLS.each do |proto|
let(:connection_protocol) { proto }
it "returns true for #{proto}" do
expect(knife.validate_protocol!).to eq true
end
end
end
context "and the protocol is not supported" do
let(:connection_protocol) { "invalid" }
it "outputs an error and exits" do
expect(knife.ui).to receive(:error).with(/Unsupported protocol '#{connection_protocol}'/)
expect { knife.validate_protocol! }.to raise_error SystemExit
end
end
end
describe "#validate_policy_options!" do
context "when only policy_name is given" do
let(:bootstrap_cli_options) { %w{ --policy-name my-app-server } }
it "returns an error stating that policy_name and policy_group must be given together" do
expect { knife.validate_policy_options! }.to raise_error(SystemExit)
expect(stderr.string).to include("ERROR: --policy-name and --policy-group must be specified together")
end
end
context "when only policy_group is given" do
let(:bootstrap_cli_options) { %w{ --policy-group staging } }
it "returns an error stating that policy_name and policy_group must be given together" do
expect { knife.validate_policy_options! }.to raise_error(SystemExit)
expect(stderr.string).to include("ERROR: --policy-name and --policy-group must be specified together")
end
end
context "when both policy_name and policy_group are given, but run list is also given" do
let(:bootstrap_cli_options) { %w{ --policy-name my-app --policy-group staging --run-list cookbook } }
it "returns an error stating that policyfile and run_list are exclusive" do
expect { knife.validate_policy_options! }.to raise_error(SystemExit)
expect(stderr.string).to include("ERROR: Policyfile options and --run-list are exclusive")
end
end
context "when policy_name and policy_group are given with no conflicting options" do
let(:bootstrap_cli_options) { %w{ --policy-name my-app --policy-group staging } }
it "passes options validation" do
expect { knife.validate_policy_options! }.to_not raise_error
end
it "passes them into the bootstrap context" do
expect(knife.bootstrap_context.first_boot).to have_key(:policy_name)
expect(knife.bootstrap_context.first_boot).to have_key(:policy_group)
end
it "ensures that run_list is not set in the bootstrap context" do
expect(knife.bootstrap_context.first_boot).to_not have_key(:run_list)
end
end
# https://github.com/chef/chef/issues/4131
# Arguably a bug in the plugin: it shouldn't be setting this to nil, but it
# worked before, so make it work now.
context "when a plugin sets the run list option to nil" do
before do
knife.config[:run_list] = nil
end
it "passes options validation" do
expect { knife.validate_policy_options! }.to_not raise_error
end
end
end
# TODO - this is the only cli option we validate the _option_ itself -
# so we'll know if someone accidentally deletes or renames use_sudo_password
# Is this worht keeping? If so, then it seems we should expand it
# to cover all options.
context "validating use_sudo_password option" do
it "use_sudo_password contains description and long params for help" do
expect(knife.options).to(have_key(:use_sudo_password)) \
&& expect(knife.options[:use_sudo_password][:description].to_s).not_to(eq(""))\
&& expect(knife.options[:use_sudo_password][:long].to_s).not_to(eq(""))
end
end
context "#connection_opts" do
let(:connection_protocol) { "ssh" }
before do
allow(knife).to receive(:connection_protocol).and_return connection_protocol
end
context "behavioral test: " do
let(:expected_connection_opts) do
{ base_opts: true,
ssh_identity_opts: true,
ssh_opts: true,
gateway_opts: true,
host_verify_opts: true,
sudo_opts: true,
winrm_opts: true }
end
it "queries and merges only expected configurations" do
expect(knife).to receive(:base_opts).and_return({ base_opts: true })
expect(knife).to receive(:host_verify_opts).and_return({ host_verify_opts: true })
expect(knife).to receive(:gateway_opts).and_return({ gateway_opts: true })
expect(knife).to receive(:sudo_opts).and_return({ sudo_opts: true })
expect(knife).to receive(:winrm_opts).and_return({ winrm_opts: true })
expect(knife).to receive(:ssh_opts).and_return({ ssh_opts: true })
expect(knife).to receive(:ssh_identity_opts).and_return({ ssh_identity_opts: true })
expect(knife.connection_opts).to match expected_connection_opts
end
end
context "functional test: " do
context "when protocol is winrm" do
let(:connection_protocol) { "winrm" }
# context "and neither CLI nor Chef::Config config entries have been provided"
# end
context "and all supported values are provided as Chef::Config entries" do
before do
# Set everything to easily identifiable and obviously fake values
# to verify that Chef::Config is being sourced instead of knife.config
knife.config = {}
Chef::Config[:knife][:max_wait] = 9999
Chef::Config[:knife][:winrm_user] = "winbob"
Chef::Config[:knife][:winrm_port] = 9999
Chef::Config[:knife][:ca_trust_file] = "trust.me"
Chef::Config[:knife][:kerberos_realm] = "realm"
Chef::Config[:knife][:kerberos_service] = "service"
Chef::Config[:knife][:winrm_auth_method] = "kerberos" # default is negotiate
Chef::Config[:knife][:winrm_basic_auth_only] = true
Chef::Config[:knife][:winrm_no_verify_cert] = true
Chef::Config[:knife][:session_timeout] = 9999
Chef::Config[:knife][:winrm_ssl] = true
Chef::Config[:knife][:winrm_ssl_peer_fingerprint] = "ABCDEF"
end
context "and no CLI options have been given" do
let(:expected_result) do
{
logger: Chef::Log, # not configurable
ca_trust_path: "trust.me",
max_wait_until_ready: 9999,
operation_timeout: 9999,
ssl_peer_fingerprint: "ABCDEF",
winrm_transport: "kerberos",
winrm_basic_auth_only: true,
user: "winbob",
port: 9999,
self_signed: true,
ssl: true,
kerberos_realm: "realm",
kerberos_service: "service",
}
end
it "generates a config hash using the Chef::Config values" do
knife.merge_configs
expect(knife.connection_opts).to match expected_result
end
end
context "and some CLI options have been given" do
let(:expected_result) do
{
logger: Chef::Log, # not configurable
ca_trust_path: "no trust",
max_wait_until_ready: 9999,
operation_timeout: 9999,
ssl_peer_fingerprint: "ABCDEF",
winrm_transport: "kerberos",
winrm_basic_auth_only: true,
user: "microsoftbob",
port: 12,
self_signed: true,
ssl: true,
kerberos_realm: "realm",
kerberos_service: "service",
password: "lobster",
}
end
before do
knife.config[:ca_trust_file] = "no trust"
knife.config[:connection_user] = "microsoftbob"
knife.config[:connection_port] = 12
knife.config[:winrm_port] = "13" # indirectly verify we're not looking for the wrong CLI flag
knife.config[:connection_password] = "lobster"
end
it "generates a config hash using the CLI options when available and falling back to Chef::Config values" do
knife.merge_configs
expect(knife.connection_opts).to match expected_result
end
end
context "and all CLI options have been given" do
before do
# We'll force kerberos vi knife.config because it
# causes additional options to populate - make sure
# Chef::Config is different so we can be sure that we didn't
# pull in the Chef::Config value
Chef::Config[:knife][:winrm_auth_method] = "negotiate"
knife.config[:connection_password] = "blue"
knife.config[:max_wait] = 1000
knife.config[:connection_user] = "clippy"
knife.config[:connection_port] = 1000
knife.config[:winrm_port] = 1001 # We should not see this value get used
knife.config[:ca_trust_file] = "trust.the.internet"
knife.config[:kerberos_realm] = "otherrealm"
knife.config[:kerberos_service] = "otherservice"
knife.config[:winrm_auth_method] = "kerberos" # default is negotiate
knife.config[:winrm_basic_auth_only] = false
knife.config[:winrm_no_verify_cert] = false
knife.config[:session_timeout] = 1000
knife.config[:winrm_ssl] = false
knife.config[:winrm_ssl_peer_fingerprint] = "FEDCBA"
end
let(:expected_result) do
{
logger: Chef::Log, # not configurable
ca_trust_path: "trust.the.internet",
max_wait_until_ready: 1000,
operation_timeout: 1000,
ssl_peer_fingerprint: "FEDCBA",
winrm_transport: "kerberos",
winrm_basic_auth_only: false,
user: "clippy",
port: 1000,
self_signed: false,
ssl: false,
kerberos_realm: "otherrealm",
kerberos_service: "otherservice",
password: "blue",
}
end
it "generates a config hash using the CLI options and pulling nothing from Chef::Config" do
knife.merge_configs
expect(knife.connection_opts).to match expected_result
end
end
end # with underlying Chef::Config values
context "and no values are provided from Chef::Config or CLI" do
before do
# We will use knife's actual config since these tests
# have assumptions based on CLI default values
end
let(:expected_result) do
{
logger: Chef::Log,
operation_timeout: 60,
self_signed: false,
ssl: false,
ssl_peer_fingerprint: nil,
winrm_basic_auth_only: false,
winrm_transport: "negotiate",
}
end
it "populates appropriate defaults" do
knife.merge_configs
expect(knife.connection_opts).to match expected_result
end
end
end # winrm
context "when protocol is ssh" do
let(:connection_protocol) { "ssh" }
# context "and neither CLI nor Chef::Config config entries have been provided"
# end
context "and all supported values are provided as Chef::Config entries" do
before do
# Set everything to easily identifiable and obviously fake values
# to verify that Chef::Config is being sourced instead of knife.config
knife.config = {}
Chef::Config[:knife][:max_wait] = 9999
Chef::Config[:knife][:session_timeout] = 9999
Chef::Config[:knife][:ssh_user] = "sshbob"
Chef::Config[:knife][:ssh_port] = 9999
Chef::Config[:knife][:host_key_verify] = false
Chef::Config[:knife][:ssh_gateway_identity] = "/gateway.pem"
Chef::Config[:knife][:ssh_gateway] = "admin@mygateway.local:1234"
Chef::Config[:knife][:ssh_identity_file] = "/identity.pem"
Chef::Config[:knife][:use_sudo_password] = false # We have no password.
end
context "and no CLI options have been given" do
let(:expected_result) do
{
logger: Chef::Log, # not configurable
max_wait_until_ready: 9999.0,
connection_timeout: 9999,
user: "sshbob",
bastion_host: "mygateway.local",
bastion_port: 1234,
bastion_user: "admin",
forward_agent: false,
keys_only: true,
key_files: ["/identity.pem", "/gateway.pem"],
sudo: false,
verify_host_key: "always",
port: 9999,
non_interactive: true,
}
end
it "generates a correct config hash using the Chef::Config values" do
knife.merge_configs
expect(knife.connection_opts).to match expected_result
end
end
context "and unsupported Chef::Config options are given in Chef::Config, not in CLI" do
before do
Chef::Config[:knife][:password] = "blah"
Chef::Config[:knife][:ssh_password] = "blah"
Chef::Config[:knife][:preserve_home] = true
Chef::Config[:knife][:use_sudo] = true
Chef::Config[:knife][:ssh_forward_agent] = "blah"
end
it "does not include the corresponding option in the connection options" do
knife.merge_configs
expect(knife.connection_opts.key?(:password)).to eq false
expect(knife.connection_opts.key?(:ssh_forward_agent)).to eq false
expect(knife.connection_opts.key?(:use_sudo)).to eq false
expect(knife.connection_opts.key?(:preserve_home)).to eq false
end
end
context "and some CLI options have been given" do
before do
knife.config = {}
knife.config[:connection_user] = "sshalice"
knife.config[:connection_port] = 12
knife.config[:ssh_port] = "13" # canary to indirectly verify we're not looking for the wrong CLI flag
knife.config[:connection_password] = "feta cheese"
knife.config[:max_wait] = 150
knife.config[:session_timeout] = 120
knife.config[:use_sudo] = true
knife.config[:use_sudo_pasword] = true
knife.config[:ssh_forward_agent] = true
end
let(:expected_result) do
{
logger: Chef::Log, # not configurable
max_wait_until_ready: 150.0, # cli
connection_timeout: 120, # cli
user: "sshalice", # cli
password: "feta cheese", # cli
bastion_host: "mygateway.local", # Config
bastion_port: 1234, # Config
bastion_user: "admin", # Config
forward_agent: true, # cli
keys_only: false, # implied false from config password present
key_files: ["/identity.pem", "/gateway.pem"], # Config
sudo: true, # ccli
verify_host_key: "always", # Config
port: 12, # cli
non_interactive: true,
}
end
it "generates a config hash using the CLI options when available and falling back to Chef::Config values" do
knife.merge_configs
expect(knife.connection_opts).to match expected_result
end
end
context "and all CLI options have been given" do
before do
knife.config = {}
knife.config[:max_wait] = 150
knife.config[:session_timeout] = 120
knife.config[:connection_user] = "sshroot"
knife.config[:connection_port] = 1000
knife.config[:connection_password] = "blah"
knife.config[:forward_agent] = true
knife.config[:use_sudo] = true
knife.config[:use_sudo_password] = true
knife.config[:preserve_home] = true
knife.config[:use_sudo_pasword] = true
knife.config[:ssh_forward_agent] = true
knife.config[:ssh_verify_host_key] = true
knife.config[:ssh_gateway_identity] = "/gateway-identity.pem"
knife.config[:ssh_gateway] = "me@example.com:10"
knife.config[:ssh_identity_file] = "/my-identity.pem"
# We'll set these as canaries - if one of these values shows up
# in a failed test, then the behavior of not pulling from these keys
# out of knife.config is broken:
knife.config[:ssh_user] = "do not use"
knife.config[:ssh_port] = 1001
end
let(:expected_result) do
{
logger: Chef::Log, # not configurable
max_wait_until_ready: 150,
connection_timeout: 120,
user: "sshroot",
password: "blah",
port: 1000,
bastion_host: "example.com",
bastion_port: 10,
bastion_user: "me",
forward_agent: true,
keys_only: false,
key_files: ["/my-identity.pem", "/gateway-identity.pem"],
sudo: true,
sudo_options: "-H",
sudo_password: "blah",
verify_host_key: true,
non_interactive: true,
}
end
it "generates a config hash using the CLI options and pulling nothing from Chef::Config" do
knife.merge_configs
expect(knife.connection_opts).to match expected_result
end
end
end
context "and no values are provided from Chef::Config or CLI" do
before do
# We will use knife's actual config since these tests
# have assumptions based on CLI default values
config = {}
end
let(:expected_result) do
{
forward_agent: false,
key_files: [],
logger: Chef::Log,
keys_only: false,
sudo: false,
verify_host_key: "always",
non_interactive: true,
connection_timeout: 60,
}
end
it "populates appropriate defaults" do
knife.merge_configs
expect(knife.connection_opts).to match expected_result
end
end
end # ssh
end # functional tests
end # connection_opts
context "#base_opts" do
let(:connection_protocol) { nil }
before do
allow(knife).to receive(:connection_protocol).and_return connection_protocol
end
context "for all protocols" do
context "when password is provided" do
before do
knife.config[:connection_port] = 250
knife.config[:connection_user] = "test"
knife.config[:connection_password] = "opscode"
end
let(:expected_opts) do
{
port: 250,
user: "test",
logger: Chef::Log,
password: "opscode",
}
end
it "generates the correct options" do
expect(knife.base_opts).to eq expected_opts
end
end
context "when password is not provided" do
before do
knife.config[:connection_port] = 250
knife.config[:connection_user] = "test"
end
let(:expected_opts) do
{
port: 250,
user: "test",
logger: Chef::Log,
}
end
it "generates the correct options" do
expect(knife.base_opts).to eq expected_opts
end
end
end
end
context "#host_verify_opts" do
let(:connection_protocol) { nil }
before do
allow(knife).to receive(:connection_protocol).and_return connection_protocol
end
context "for winrm" do
let(:connection_protocol) { "winrm" }
it "returns the expected configuration" do
knife.config[:winrm_no_verify_cert] = true
expect(knife.host_verify_opts).to eq( { self_signed: true } )
end
it "provides a correct default when no option given" do
expect(knife.host_verify_opts).to eq( { self_signed: false } )
end
end
context "for ssh" do
let(:connection_protocol) { "ssh" }
it "returns the expected configuration" do
knife.config[:ssh_verify_host_key] = false
expect(knife.host_verify_opts).to eq( { verify_host_key: false } )
end
it "provides a correct default when no option given" do
expect(knife.host_verify_opts).to eq( { verify_host_key: "always" } )
end
end
end
# TODO - test keys_only, password, config source behavior
context "#ssh_identity_opts" do
let(:connection_protocol) { nil }
before do
allow(knife).to receive(:connection_protocol).and_return connection_protocol
end
context "for winrm" do
let(:connection_protocol) { "winrm" }
it "returns an empty hash" do
expect(knife.ssh_identity_opts).to eq({})
end
end
context "for ssh" do
let(:connection_protocol) { "ssh" }
context "when an identity file is specified" do
before do
knife.config[:ssh_identity_file] = "/identity.pem"
end
it "generates the expected configuration" do
expect(knife.ssh_identity_opts).to eq({
key_files: [ "/identity.pem" ],
keys_only: true,
})
end
context "and a password is also specified" do
before do
knife.config[:connection_password] = "blah"
end
it "generates the expected configuration (key, keys_only false)" do
expect(knife.ssh_identity_opts).to eq({
key_files: [ "/identity.pem" ],
keys_only: false,
})
end
end
context "and a gateway is not specified" do
context "but a gateway identity file is specified" do
it "does not include the gateway identity file in keys" do
expect(knife.ssh_identity_opts).to eq({
key_files: ["/identity.pem"],
keys_only: true,
})
end
end
end
context "and a gatway is specified" do
before do
knife.config[:ssh_gateway] = "example.com"
end
context "and a gateway identity file is not specified" do
it "config includes only identity file and not gateway identity" do
expect(knife.ssh_identity_opts).to eq({
key_files: [ "/identity.pem" ],
keys_only: true,
})
end
end
context "and a gateway identity file is also specified" do
before do
knife.config[:ssh_gateway_identity] = "/gateway.pem"
end
it "generates the expected configuration (both keys, keys_only true)" do
expect(knife.ssh_identity_opts).to eq({
key_files: [ "/identity.pem", "/gateway.pem" ],
keys_only: true,
})
end
end
end
end
context "when no identity file is specified" do
it "generates the expected configuration (no keys, keys_only false)" do
expect(knife.ssh_identity_opts).to eq( {
key_files: [ ],
keys_only: false,
})
end
context "and a gateway with gateway identity file is specified" do
before do
knife.config[:ssh_gateway] = "host"
knife.config[:ssh_gateway_identity] = "/gateway.pem"
end
it "generates the expected configuration (gateway key, keys_only false)" do
expect(knife.ssh_identity_opts).to eq({
key_files: [ "/gateway.pem" ],
keys_only: false,
})
end
end
end
end
end
context "#gateway_opts" do
let(:connection_protocol) { nil }
before do
allow(knife).to receive(:connection_protocol).and_return connection_protocol
end
context "for winrm" do
let(:connection_protocol) { "winrm" }
it "returns an empty hash" do
expect(knife.gateway_opts).to eq({})
end
end
context "for ssh" do
let(:connection_protocol) { "ssh" }
context "and ssh_gateway with hostname, user and port provided" do
before do
knife.config[:ssh_gateway] = "testuser@gateway:9021"
end
it "returns a proper bastion host config subset" do
expect(knife.gateway_opts).to eq({
bastion_user: "testuser",
bastion_host: "gateway",
bastion_port: 9021,
})
end
end
context "and ssh_gateway with only hostname is given" do
before do
knife.config[:ssh_gateway] = "gateway"
end
it "returns a proper bastion host config subset" do
expect(knife.gateway_opts).to eq({
bastion_user: nil,
bastion_host: "gateway",
bastion_port: nil,
})
end
end
context "and ssh_gateway with hostname and user is is given" do
before do
knife.config[:ssh_gateway] = "testuser@gateway"
end
it "returns a proper bastion host config subset" do
expect(knife.gateway_opts).to eq({
bastion_user: "testuser",
bastion_host: "gateway",
bastion_port: nil,
})
end
end
context "and ssh_gateway with hostname and port is is given" do
before do
knife.config[:ssh_gateway] = "gateway:11234"
end
it "returns a proper bastion host config subset" do
expect(knife.gateway_opts).to eq({
bastion_user: nil,
bastion_host: "gateway",
bastion_port: 11234,
})
end
end
context "and ssh_gateway is not provided" do
it "returns an empty hash" do
expect(knife.gateway_opts).to eq({})
end
end
end
end
context "#sudo_opts" do
let(:connection_protocol) { nil }
before do
allow(knife).to receive(:connection_protocol).and_return connection_protocol
end
context "for winrm" do
let(:connection_protocol) { "winrm" }
it "returns an empty hash" do
expect(knife.sudo_opts).to eq({})
end
end
context "for ssh" do
let(:connection_protocol) { "ssh" }
context "when use_sudo is set" do
before do
knife.config[:use_sudo] = true
end
it "returns a config that enables sudo" do
expect(knife.sudo_opts).to eq( { sudo: true } )
end
context "when use_sudo_password is also set" do
before do
knife.config[:use_sudo_password] = true
knife.config[:connection_password] = "opscode"
end
it "includes :connection_password value in a sudo-enabled configuration" do
expect(knife.sudo_opts).to eq({
sudo: true,
sudo_password: "opscode",
})
end
end
context "when preserve_home is set" do
before do
knife.config[:preserve_home] = true
end
it "enables sudo with sudo_option to preserve home" do
expect(knife.sudo_opts).to eq({
sudo_options: "-H",
sudo: true,
})
end
end
end
context "when use_sudo is not set" do
before do
knife.config[:use_sudo_password] = true
knife.config[:preserve_home] = true
end
it "returns configuration for sudo off, ignoring other related options" do
expect(knife.sudo_opts).to eq( { sudo: false } )
end
end
end
end
context "#ssh_opts" do
let(:connection_protocol) { nil }
before do
allow(knife).to receive(:connection_protocol).and_return connection_protocol
end
context "for ssh" do
let(:connection_protocol) { "ssh" }
let(:default_opts) do
{
non_interactive: true,
forward_agent: false,
connection_timeout: 60,
}
end
context "by default" do
it "returns a configuration hash with appropriate defaults" do
expect(knife.ssh_opts).to eq default_opts
end
end
context "when ssh_forward_agent has a value" do
before do
knife.config[:ssh_forward_agent] = true
end
it "returns a default configuration hash with forward_agent set to true" do
expect(knife.ssh_opts).to eq(default_opts.merge(forward_agent: true))
end
end
context "when session_timeout has a value" do
before do
knife.config[:session_timeout] = 120
end
it "returns a default configuration hash with updated timeout value." do
expect(knife.ssh_opts).to eq(default_opts.merge(connection_timeout: 120))
end
end
end
context "for winrm" do
let(:connection_protocol) { "winrm" }
it "returns an empty has because ssh is not winrm" do
expect(knife.ssh_opts).to eq({})
end
end
end
context "#winrm_opts" do
let(:connection_protocol) { nil }
before do
allow(knife).to receive(:connection_protocol).and_return connection_protocol
end
context "for winrm" do
let(:connection_protocol) { "winrm" }
let(:expected) do
{
winrm_transport: "negotiate",
winrm_basic_auth_only: false,
ssl: false,
ssl_peer_fingerprint: nil,
operation_timeout: 60,
}
end
it "generates a correct configuration hash with expected defaults" do
expect(knife.winrm_opts).to eq expected
end
context "with ssl_peer_fingerprint" do
let(:ssl_peer_fingerprint_expected) do
expected.merge({ ssl_peer_fingerprint: "ABCD" })
end
before do
knife.config[:winrm_ssl_peer_fingerprint] = "ABCD"
end
it "generates a correct options hash with ssl_peer_fingerprint from the config provided" do
expect(knife.winrm_opts).to eq ssl_peer_fingerprint_expected
end
end
context "with winrm_ssl" do
let(:ssl_expected) do
expected.merge({ ssl: true })
end
before do
knife.config[:winrm_ssl] = true
end
it "generates a correct options hash with ssl from the config provided" do
expect(knife.winrm_opts).to eq ssl_expected
end
end
context "with winrm_auth_method" do
let(:winrm_auth_method_expected) do
expected.merge({ winrm_transport: "freeaccess" })
end
before do
knife.config[:winrm_auth_method] = "freeaccess"
end
it "generates a correct options hash with winrm_transport from the config provided" do
expect(knife.winrm_opts).to eq winrm_auth_method_expected
end
end
context "with ca_trust_file" do
let(:ca_trust_expected) do
expected.merge({ ca_trust_path: "/trust.me" })
end
before do
knife.config[:ca_trust_file] = "/trust.me"
end
it "generates a correct options hash with ca_trust_file from the config provided" do
expect(knife.winrm_opts).to eq ca_trust_expected
end
end
context "with kerberos auth" do
let(:kerberos_expected) do
expected.merge({
kerberos_service: "testsvc",
kerberos_realm: "TESTREALM",
winrm_transport: "kerberos",
})
end
before do
knife.config[:winrm_auth_method] = "kerberos"
knife.config[:kerberos_service] = "testsvc"
knife.config[:kerberos_realm] = "TESTREALM"
end
it "generates a correct options hash containing kerberos auth configuration from the config provided" do
expect(knife.winrm_opts).to eq kerberos_expected
end
end
context "with winrm_basic_auth_only" do
before do
knife.config[:winrm_basic_auth_only] = true
end
let(:basic_auth_expected) do
expected.merge( { winrm_basic_auth_only: true } )
end
it "generates a correct options hash containing winrm_basic_auth_only from the config provided" do
expect(knife.winrm_opts).to eq basic_auth_expected
end
end
end
context "for ssh" do
let(:connection_protocol) { "ssh" }
it "returns an empty hash because ssh is not winrm" do
expect(knife.winrm_opts).to eq({})
end
end
end
describe "#run" do
it "performs the steps we expect to run a bootstrap" do
expect(knife).to receive(:check_license)
expect(knife).to receive(:validate_name_args!).ordered
expect(knife).to receive(:validate_protocol!).ordered
expect(knife).to receive(:validate_first_boot_attributes!).ordered
expect(knife).to receive(:validate_winrm_transport_opts!).ordered
expect(knife).to receive(:validate_policy_options!).ordered
expect(knife).to receive(:winrm_warn_no_ssl_verification).ordered
expect(knife).to receive(:warn_on_short_session_timeout).ordered
expect(knife).to receive(:connect!).ordered
expect(knife).to receive(:register_client).ordered
expect(knife).to receive(:render_template).and_return "content"
expect(knife).to receive(:upload_bootstrap).with("content").and_return "/remote/path.sh"
expect(knife).to receive(:perform_bootstrap).with("/remote/path.sh")
expect(connection).to receive(:del_file!) # Make sure cleanup happens
knife.run
# Post-run verify expected state changes (not many directly in #run)
expect($stdout.sync).to eq true
end
end
describe "#register_client" do
let(:vault_handler_mock) { double("ChefVaultHandler") }
let(:client_builder_mock) { double("ClientBuilder") }
let(:node_name) { nil }
before do
allow(knife).to receive(:chef_vault_handler).and_return vault_handler_mock
allow(knife).to receive(:client_builder).and_return client_builder_mock
knife.config[:chef_node_name] = node_name
end
shared_examples_for "creating the client locally" do
context "when a valid node name is present" do
let(:node_name) { "test" }
before do
allow(client_builder_mock).to receive(:client).and_return "client"
allow(client_builder_mock).to receive(:client_path).and_return "/key.pem"
end
it "runs client_builder and vault_handler" do
expect(client_builder_mock).to receive(:run)
expect(vault_handler_mock).to receive(:run).with("client")
knife.register_client
end
it "sets the path to the client key in the bootstrap context" do
allow(client_builder_mock).to receive(:run)
allow(vault_handler_mock).to receive(:run).with("client")
knife.register_client
expect(knife.bootstrap_context.client_pem).to eq "/key.pem"
end
end
context "when no valid node name is present" do
let(:node_name) { nil }
it "shows an error and exits" do
expect(knife.ui).to receive(:error)
expect { knife.register_client }.to raise_error(SystemExit)
end
end
end
context "when chef_vault_handler says we're using vault" do
let(:vault_handler_mock) { double("ChefVaultHandler") }
before do
allow(vault_handler_mock).to receive(:doing_chef_vault?).and_return true
end
it_behaves_like "creating the client locally"
end
context "when an non-existant validation key is specified in chef config" do
before do
Chef::Config[:validation_key] = "/blah"
allow(vault_handler_mock).to receive(:doing_chef_vault?).and_return false
allow(File).to receive(:exist?).with(%r{/blah}).and_return false
end
it_behaves_like "creating the client locally"
end
context "when a valid validation key is given and we're doing old-style client creation" do
before do
Chef::Config[:validation_key] = "/blah"
allow(File).to receive(:exist?).with(%r{/blah}).and_return true
allow(vault_handler_mock).to receive(:doing_chef_vault?).and_return false
end
it "shows a warning message" do
expect(knife.ui).to receive(:warn).twice
knife.register_client
end
end
end
describe "#perform_bootstrap" do
let(:exit_status) { 0 }
let(:result_mock) { double("result", exit_status: exit_status, stderr: "A message") }
before do
allow(connection).to receive(:hostname).and_return "testhost"
end
it "runs the remote script and logs the output" do
expect(knife.ui).to receive(:info).with(/Bootstrapping.*/)
expect(knife).to receive(:bootstrap_command)
.with("/path.sh")
.and_return("sh /path.sh")
expect(connection)
.to receive(:run_command)
.with("sh /path.sh")
.and_yield("output here")
.and_return result_mock
expect(knife.ui).to receive(:msg).with(/testhost/)
knife.perform_bootstrap("/path.sh")
end
context "when the remote command fails" do
let(:exit_status) { 1 }
it "shows an error and exits" do
expect(knife.ui).to receive(:info).with(/Bootstrapping.*/)
expect(knife).to receive(:bootstrap_command)
.with("/path.sh")
.and_return("sh /path.sh")
expect(connection).to receive(:run_command).with("sh /path.sh").and_return result_mock
expect { knife.perform_bootstrap("/path.sh") }.to raise_error(SystemExit)
end
end
end
describe "#connect!" do
before do
# These are not required at run-time because train will handle its own
# protocol loading. In this case, we're simulating train failures and have to load
# them ourselves.
require "net/ssh"
require "train/transports/ssh"
end
context "in the normal case" do
it "connects using the connection_opts and notifies the operator of progress" do
expect(knife.ui).to receive(:info).with(/Connecting to.*/)
expect(knife).to receive(:connection_opts).and_return( { opts: "here" })
expect(knife).to receive(:do_connect).with( { opts: "here" } )
knife.connect!
end
end
context "when a general non-auth-failure occurs" do
let(:expected_error) { RuntimeError.new }
before do
allow(knife).to receive(:do_connect).and_raise(expected_error)
end
it "re-raises the exception" do
expect { knife.connect! }.to raise_error(expected_error)
end
end
context "when ssh fingerprint is invalid" do
let(:expected_error) { Train::Error.new("fingerprint AA:BB is unknown for \"blah,127.0.0.1\"") }
before do
allow(knife).to receive(:do_connect).and_raise(expected_error)
end
it "warns, prompts to accept, then connects with verify_host_key of accept_new" do
expect(knife).to receive(:do_connect).and_raise(expected_error)
expect(knife.ui).to receive(:confirm)
.with(/.*host 'blah \(127.0.0.1\)'.*AA:BB.*Are you sure you want to continue.*/m)
.and_return(true)
expect(knife).to receive(:do_connect) do |opts|
expect(opts[:verify_host_key]).to eq :accept_new
end
knife.connect!
end
end
context "when an auth failure occurs" do
let(:expected_error) do
e = Train::Error.new
actual = Net::SSH::AuthenticationFailed.new
# Simulate train's nested error - they wrap
# ssh/network errors in a TrainError.
allow(e).to receive(:cause).and_return(actual)
e
end
let(:expected_error_password_prompt) do
e = Train::ClientError.new
reason = :no_ssh_password_or_key_available
allow(e).to receive(:reason).and_return(reason)
e
end
let(:expected_error_password_prompt_winrm) do
e = RuntimeError.new
message = "password is a required option"
allow(e).to receive(:message).and_return(message)
e
end
context "and password auth was used" do
before do
allow(connection).to receive(:password_auth?).and_return true
end
it "re-raises the error so as not to resubmit the same failing password" do
expect(knife).to receive(:do_connect).and_raise(expected_error)
expect { knife.connect! }.to raise_error(expected_error)
end
end
context "and password auth was not used" do
before do
allow(connection).to receive(:password_auth?).and_return false
allow(connection).to receive(:user).and_return "testuser"
allow(knife).to receive(:connection_protocol).and_return connection_protocol
end
context "when using ssh" do
let(:connection_protocol) { "ssh" }
it "warns, prompts for password, then reconnects with a password-enabled configuration using the new password" do
question_mock = double("question")
expect(knife).to receive(:do_connect).and_raise(expected_error_password_prompt)
expect(knife.ui).to receive(:warn).with(/Failed to auth.*/)
expect(knife.ui).to receive(:ask).and_return("newpassword")
# Ensure that we set echo off to prevent showing password on the screen
expect(knife).to receive(:do_connect) do |opts|
expect(opts[:password]).to eq "newpassword"
end
knife.connect!
end
end
context "when using winrm" do
let(:connection_protocol) { "winrm" }
it "warns, prompts for password, then reconnects with a password-enabled configuration using the new password for" do
expect(knife).to receive(:do_connect).and_raise(expected_error_password_prompt_winrm)
expect(knife.ui).to receive(:warn).with(/Failed to auth.*/)
expect(knife.ui).to receive(:ask).and_return("newpassword")
# Ensure that we set echo off to prevent showing password on the screen
expect(knife).to receive(:do_connect) do |opts|
expect(opts[:password]).to eq "newpassword"
end
knife.connect!
end
end
end
end
end
it "verifies that a server to bootstrap was given as a command line arg" do
knife.name_args = nil
expect(knife).to receive(:check_license)
expect { knife.run }.to raise_error(SystemExit)
expect(stderr.string).to match(/ERROR:.+FQDN or ip/)
end
describe "#bootstrap_context" do
context "under Windows" do
let(:windows_test) { true }
it "creates a WindowsBootstrapContext" do
require "chef/knife/core/windows_bootstrap_context"
expect(knife.bootstrap_context.class).to eq Chef::Knife::Core::WindowsBootstrapContext
end
end
context "under linux" do
let(:linux_test) { true }
it "creates a BootstrapContext" do
require "chef/knife/core/bootstrap_context"
expect(knife.bootstrap_context.class).to eq Chef::Knife::Core::BootstrapContext
end
end
end
describe "#config_value" do
before do
knife.config[:test_key_a] = "a from cli"
knife.config[:test_key_b] = "b from cli"
Chef::Config[:knife][:test_key_a] = "a from Chef::Config"
Chef::Config[:knife][:test_key_c] = "c from Chef::Config"
Chef::Config[:knife][:alt_test_key_c] = "alt c from Chef::Config"
knife.merge_configs
Chef::Config[:treat_deprecation_warnings_as_errors] = false
end
it "returns the Chef::Config value from the cli when the CLI key is set" do
expect(knife.config_value(:test_key_a, :alt_test_key_c)).to eq "a from cli"
end
it "returns the Chef::Config value from the alternative key when the CLI key is not set" do
expect(knife.config_value(:test_key_d, :alt_test_key_c)).to eq "alt c from Chef::Config"
end
it "returns the default value when the key is not provided by CLI or Chef::Config" do
expect(knife.config_value(:missing_key, :missing_key, "found")).to eq "found"
end
end
describe "#upload_bootstrap" do
before do
allow(connection).to receive(:temp_dir).and_return(temp_dir)
allow(connection).to receive(:normalize_path) { |a| a }
end
let(:content) { "bootstrap script content" }
context "under Windows" do
let(:windows_test) { true }
let(:temp_dir) { "C:/Temp/bootstrap" }
it "creates a bat file in the temp dir provided by connection, using given content" do
expect(connection).to receive(:upload_file_content!).with(content, "C:/Temp/bootstrap/bootstrap.bat")
expect(knife.upload_bootstrap(content)).to eq "C:/Temp/bootstrap/bootstrap.bat"
end
end
context "under Linux" do
let(:linux_test) { true }
let(:temp_dir) { "/tmp/bootstrap" }
it "creates a 'sh file in the temp dir provided by connection, using given content" do
expect(connection).to receive(:upload_file_content!).with(content, "/tmp/bootstrap/bootstrap.sh")
expect(knife.upload_bootstrap(content)).to eq "/tmp/bootstrap/bootstrap.sh"
end
end
end
describe "#bootstrap_command" do
context "under Windows" do
let(:windows_test) { true }
it "prefixes the command to run under cmd.exe" do
expect(knife.bootstrap_command("autoexec.bat")).to eq "cmd.exe /C autoexec.bat"
end
end
context "under Linux" do
let(:linux_test) { true }
it "prefixes the command to run under sh" do
expect(knife.bootstrap_command("bootstrap")).to eq "sh bootstrap"
end
end
end
describe "#default_bootstrap_template" do
context "under Windows" do
let(:windows_test) { true }
it "is windows-chef-client-msi" do
expect(knife.default_bootstrap_template).to eq "windows-chef-client-msi"
end
end
context "under Linux" do
let(:linux_test) { true }
it "is chef-full" do
expect(knife.default_bootstrap_template).to eq "chef-full"
end
end
end
describe "#do_connect" do
let(:host_descriptor) { "example.com" }
let(:connection) { double("TrainConnector") }
let(:connector_mock) { double("TargetResolver", targets: [ connection ]) }
before do
allow(knife).to receive(:host_descriptor).and_return host_descriptor
end
it "creates a TrainConnector and connects it" do
expect(Chef::Knife::Bootstrap::TrainConnector).to receive(:new).and_return connection
expect(connection).to receive(:connect!)
knife.do_connect({})
end
context "when sshd configured with requiretty" do
let(:pty_err_msg) { "Sudo requires a TTY. Please see the README on how to configure sudo to allow for non-interactive usage." }
let(:expected_error) { Train::UserError.new(pty_err_msg, :sudo_no_tty) }
before do
allow(connection).to receive(:connect!).and_raise(expected_error)
end
it "retry with pty true request option" do
expect(Chef::Knife::Bootstrap::TrainConnector).to receive(:new).and_return(connection).exactly(2).times
expect(knife.ui).to receive(:warn).with("#{pty_err_msg} - trying with pty request")
expect { knife.do_connect({}) }.to raise_error(expected_error)
end
end
end
describe "validate_winrm_transport_opts!" do
before do
allow(knife).to receive(:connection_protocol).and_return connection_protocol
end
context "when using ssh" do
let(:connection_protocol) { "ssh" }
it "returns true" do
expect(knife.validate_winrm_transport_opts!).to eq true
end
end
context "when using winrm" do
let(:connection_protocol) { "winrm" }
context "with plaintext auth" do
before do
knife.config[:winrm_auth_method] = "plaintext"
end
context "with ssl" do
before do
knife.config[:winrm_ssl] = true
end
it "will not error because we won't send anything in plaintext regardless" do
expect(knife.validate_winrm_transport_opts!).to eq true
end
end
context "without ssl" do
before do
knife.config[:winrm_ssl] = false
end
context "and no validation key exists" do
before do
Chef::Config[:validation_key] = "validation_key.pem"
allow(File).to receive(:exist?).with(/.*validation_key.pem/).and_return false
end
it "will error because we will generate and send a client key over the wire in plaintext" do
expect { knife.validate_winrm_transport_opts! }.to raise_error(SystemExit)
end
end
context "and a validation key exists" do
before do
Chef::Config[:validation_key] = "validation_key.pem"
allow(File).to receive(:exist?).with(/.*validation_key.pem/).and_return true
end
# TODO - don't we still send validation key?
it "will not error because we don not send client key over the wire" do
expect(knife.validate_winrm_transport_opts!).to eq true
end
end
end
end
context "with other auth" do
before do
knife.config[:winrm_auth_method] = "kerberos"
end
context "and no validation key exists" do
before do
Chef::Config[:validation_key] = "validation_key.pem"
allow(File).to receive(:exist?).with(/.*validation_key.pem/).and_return false
end
it "will not error because we're not using plaintext auth" do
expect(knife.validate_winrm_transport_opts!).to eq true
end
end
context "and a validation key exists" do
before do
Chef::Config[:validation_key] = "validation_key.pem"
allow(File).to receive(:exist?).with(/.*validation_key.pem/).and_return true
end
it "will not error because a client key won't be sent over the wire in plaintext when a validation key is present" do
expect(knife.validate_winrm_transport_opts!).to eq true
end
end
end
end
end
describe "#winrm_warn_no_ssl_verification" do
before do
allow(knife).to receive(:connection_protocol).and_return connection_protocol
end
context "when using ssh" do
let(:connection_protocol) { "ssh" }
it "does not issue a warning" do
expect(knife.ui).to_not receive(:warn)
knife.winrm_warn_no_ssl_verification
end
end
context "when using winrm" do
let(:connection_protocol) { "winrm" }
context "winrm_no_verify_cert is set" do
before do
knife.config[:winrm_no_verify_cert] = true
end
context "and ca_trust_file is present" do
before do
knife.config[:ca_trust_file] = "file"
end
it "does not issue a warning" do
expect(knife.ui).to_not receive(:warn)
knife.winrm_warn_no_ssl_verification
end
end
context "and winrm_ssl_peer_fingerprint is present" do
before do
knife.config[:winrm_ssl_peer_fingerprint] = "ABCD"
end
it "does not issue a warning" do
expect(knife.ui).to_not receive(:warn)
knife.winrm_warn_no_ssl_verification
end
end
context "and neither ca_trust_file nor winrm_ssl_peer_fingerprint is present" do
it "issues a warning" do
expect(knife.ui).to receive(:warn)
knife.winrm_warn_no_ssl_verification
end
end
end
end
end
describe "#warn_on_short_session_timeout" do
let(:session_timeout) { 60 }
before do
allow(knife).to receive(:session_timeout).and_return(session_timeout)
end
context "timeout is not set at all" do
let(:session_timeout) { nil }
it "does not issue a warning" do
expect(knife.ui).to_not receive(:warn)
knife.warn_on_short_session_timeout
end
end
context "timeout is more than 15" do
let(:session_timeout) { 16 }
it "does not issue a warning" do
expect(knife.ui).to_not receive(:warn)
knife.warn_on_short_session_timeout
end
end
context "timeout is 15 or less" do
let(:session_timeout) { 15 }
it "issues a warning" do
expect(knife.ui).to receive(:warn)
knife.warn_on_short_session_timeout
end
end
end
end
Remove unused code
Signed-off-by: Vivek Singh <ffdfd29e24c206a3c24e1247563eb4c218c7ffa5@msystechnologies.com>
#
# Author:: Ian Meyer (<ianmmeyer@gmail.com>)
# Copyright:: Copyright 2010-2016, Ian Meyer
# Copyright:: Copyright (c) Chef Software Inc.
# License:: Apache License, Version 2.0
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
require "spec_helper"
Chef::Knife::Bootstrap.load_deps
describe Chef::Knife::Bootstrap do
let(:bootstrap_template) { nil }
let(:stderr) { StringIO.new }
let(:bootstrap_cli_options) { [ ] }
let(:linux_test) { true }
let(:windows_test) { false }
let(:linux_test) { false }
let(:unix_test) { false }
let(:ssh_test) { false }
let(:connection) do
double("TrainConnector",
windows?: windows_test,
linux?: linux_test,
unix?: unix_test)
end
let(:knife) do
Chef::Log.logger = Logger.new(StringIO.new)
Chef::Config[:knife][:bootstrap_template] = bootstrap_template unless bootstrap_template.nil?
k = Chef::Knife::Bootstrap.new(bootstrap_cli_options)
allow(k.ui).to receive(:stderr).and_return(stderr)
allow(k).to receive(:encryption_secret_provided_ignore_encrypt_flag?).and_return(false)
allow(k).to receive(:connection).and_return connection
k.merge_configs
k
end
context "#check_license" do
let(:acceptor) { instance_double(LicenseAcceptance::Acceptor) }
before do
expect(LicenseAcceptance::Acceptor).to receive(:new).and_return(acceptor)
end
describe "when a license is not required" do
it "does not set the chef_license" do
expect(acceptor).to receive(:license_required?).and_return(false)
knife.check_license
expect(Chef::Config[:chef_license]).to eq(nil)
end
end
describe "when a license is required" do
it "sets the chef_license" do
expect(acceptor).to receive(:license_required?).and_return(true)
expect(acceptor).to receive(:id_from_mixlib).and_return("id")
expect(acceptor).to receive(:check_and_persist)
expect(acceptor).to receive(:acceptance_value).and_return("accept-no-persist")
knife.check_license
expect(Chef::Config[:chef_license]).to eq("accept-no-persist")
end
end
end
context "#bootstrap_template" do
it "should default to chef-full" do
expect(knife.bootstrap_template).to be_a_kind_of(String)
expect(File.basename(knife.bootstrap_template)).to eq("chef-full")
end
end
context "#render_template - when using the chef-full default template" do
let(:rendered_template) do
knife.merge_configs
knife.render_template
end
it "should render client.rb" do
expect(rendered_template).to match("cat > /etc/chef/client.rb <<'EOP'")
expect(rendered_template).to match("chef_server_url \"https://localhost:443\"")
expect(rendered_template).to match("validation_client_name \"chef-validator\"")
expect(rendered_template).to match("log_location STDOUT")
end
it "should render first-boot.json" do
expect(rendered_template).to match("cat > /etc/chef/first-boot.json <<'EOP'")
expect(rendered_template).to match('{"run_list":\[\]}')
end
context "and encrypted_data_bag_secret was provided" do
it "should render encrypted_data_bag_secret file" do
expect(knife).to receive(:encryption_secret_provided_ignore_encrypt_flag?).and_return(true)
expect(knife).to receive(:read_secret).and_return("secrets")
expect(rendered_template).to match("cat > /etc/chef/encrypted_data_bag_secret <<'EOP'")
expect(rendered_template).to match('{"run_list":\[\]}')
expect(rendered_template).to match(/secrets/)
end
end
end
context "with --bootstrap-vault-item" do
let(:bootstrap_cli_options) { [ "--bootstrap-vault-item", "vault1:item1", "--bootstrap-vault-item", "vault1:item2", "--bootstrap-vault-item", "vault2:item1" ] }
it "sets the knife config cli option correctly" do
expect(knife.config[:bootstrap_vault_item]).to eq({ "vault1" => %w{item1 item2}, "vault2" => ["item1"] })
end
end
context "with --bootstrap-preinstall-command" do
command = "while sudo fuser /var/lib/dpkg/lock >/dev/null 2>&1; do\n echo 'waiting for dpkg lock';\n sleep 1;\n done;"
let(:bootstrap_cli_options) { [ "--bootstrap-preinstall-command", command ] }
let(:rendered_template) do
knife.merge_configs
knife.render_template
end
it "configures the preinstall command in the bootstrap template correctly" do
expect(rendered_template).to match(/command/)
end
end
context "with --bootstrap-proxy" do
let(:bootstrap_cli_options) { [ "--bootstrap-proxy", "1.1.1.1" ] }
let(:rendered_template) do
knife.merge_configs
knife.render_template
end
it "configures the https_proxy environment variable in the bootstrap template correctly" do
expect(rendered_template).to match(/https_proxy="1.1.1.1" export https_proxy/)
end
end
context "with --bootstrap-no-proxy" do
let(:bootstrap_cli_options) { [ "--bootstrap-no-proxy", "localserver" ] }
let(:rendered_template) do
knife.merge_configs
knife.render_template
end
it "configures the https_proxy environment variable in the bootstrap template correctly" do
expect(rendered_template).to match(/no_proxy="localserver" export no_proxy/)
end
end
context "with :bootstrap_template and :template_file cli options" do
let(:bootstrap_cli_options) { [ "--bootstrap-template", "my-template", "other-template" ] }
it "should select bootstrap template" do
expect(File.basename(knife.bootstrap_template)).to eq("my-template")
end
end
context "when finding templates" do
context "when :bootstrap_template config is set to a file" do
context "that doesn't exist" do
let(:bootstrap_template) { "/opt/blah/not/exists/template.erb" }
it "raises an error" do
expect { knife.find_template }.to raise_error(Errno::ENOENT)
end
end
context "that exists" do
let(:bootstrap_template) { File.expand_path(File.join(CHEF_SPEC_DATA, "bootstrap", "test.erb")) }
it "loads the given file as the template" do
expect(Chef::Log).to receive(:trace)
expect(knife.find_template).to eq(File.expand_path(File.join(CHEF_SPEC_DATA, "bootstrap", "test.erb")))
end
end
end
context "when :bootstrap_template config is set to a template name" do
let(:bootstrap_template) { "example" }
let(:builtin_template_path) { File.expand_path(File.join(File.dirname(__FILE__), "../../../lib/chef/knife/bootstrap/templates", "example.erb")) }
let(:chef_config_dir_template_path) { "/knife/chef/config/bootstrap/example.erb" }
let(:env_home_template_path) { "/env/home/.chef/bootstrap/example.erb" }
let(:gem_files_template_path) { "/Users/schisamo/.rvm/gems/ruby-1.9.2-p180@chef-0.10/gems/knife-windows-0.5.4/lib/chef/knife/bootstrap/fake-bootstrap-template.erb" }
def configure_chef_config_dir
allow(Chef::Knife).to receive(:chef_config_dir).and_return("/knife/chef/config")
end
def configure_env_home
allow(Chef::Util::PathHelper).to receive(:home).with(".chef", "bootstrap", "example.erb").and_yield(env_home_template_path)
end
def configure_gem_files
allow(Gem).to receive(:find_files).and_return([ gem_files_template_path ])
end
before(:each) do
expect(File).to receive(:exists?).with(bootstrap_template).and_return(false)
end
context "when file is available everywhere" do
before do
configure_chef_config_dir
configure_env_home
configure_gem_files
expect(File).to receive(:exists?).with(builtin_template_path).and_return(true)
end
it "should load the template from built-in templates" do
expect(knife.find_template).to eq(builtin_template_path)
end
end
context "when file is available in chef_config_dir" do
before do
configure_chef_config_dir
configure_env_home
configure_gem_files
expect(File).to receive(:exists?).with(builtin_template_path).and_return(false)
expect(File).to receive(:exists?).with(chef_config_dir_template_path).and_return(true)
it "should load the template from chef_config_dir" do
knife.find_template.should eq(chef_config_dir_template_path)
end
end
end
context "when file is available in home directory" do
before do
configure_chef_config_dir
configure_env_home
configure_gem_files
expect(File).to receive(:exists?).with(builtin_template_path).and_return(false)
expect(File).to receive(:exists?).with(chef_config_dir_template_path).and_return(false)
expect(File).to receive(:exists?).with(env_home_template_path).and_return(true)
end
it "should load the template from chef_config_dir" do
expect(knife.find_template).to eq(env_home_template_path)
end
end
context "when file is available in Gem files" do
before do
configure_chef_config_dir
configure_env_home
configure_gem_files
expect(File).to receive(:exists?).with(builtin_template_path).and_return(false)
expect(File).to receive(:exists?).with(chef_config_dir_template_path).and_return(false)
expect(File).to receive(:exists?).with(env_home_template_path).and_return(false)
expect(File).to receive(:exists?).with(gem_files_template_path).and_return(true)
end
it "should load the template from Gem files" do
expect(knife.find_template).to eq(gem_files_template_path)
end
end
context "when file is available in Gem files and home dir doesn't exist" do
before do
configure_chef_config_dir
configure_gem_files
allow(Chef::Util::PathHelper).to receive(:home).with(".chef", "bootstrap", "example.erb").and_return(nil)
expect(File).to receive(:exists?).with(builtin_template_path).and_return(false)
expect(File).to receive(:exists?).with(chef_config_dir_template_path).and_return(false)
expect(File).to receive(:exists?).with(gem_files_template_path).and_return(true)
end
it "should load the template from Gem files" do
expect(knife.find_template).to eq(gem_files_template_path)
end
end
end
end
["-t", "--bootstrap-template"].each do |t|
context "when #{t} option is given in the command line" do
it "sets the knife :bootstrap_template config" do
knife.parse_options([t, "blahblah"])
knife.merge_configs
expect(knife.bootstrap_template).to eq("blahblah")
end
end
end
context "with run_list template" do
let(:bootstrap_template) { File.expand_path(File.join(CHEF_SPEC_DATA, "bootstrap", "test.erb")) }
it "should return an empty run_list" do
expect(knife.render_template).to eq('{"run_list":[]}')
end
it "should have role[base] in the run_list" do
knife.parse_options(["-r", "role[base]"])
knife.merge_configs
expect(knife.render_template).to eq('{"run_list":["role[base]"]}')
end
it "should have role[base] and recipe[cupcakes] in the run_list" do
knife.parse_options(["-r", "role[base],recipe[cupcakes]"])
knife.merge_configs
expect(knife.render_template).to eq('{"run_list":["role[base]","recipe[cupcakes]"]}')
end
context "with bootstrap_attribute options" do
let(:jsonfile) do
file = Tempfile.new (["node", ".json"])
File.open(file.path, "w") { |f| f.puts '{"foo":{"bar":"baz"}}' }
file
end
it "should have foo => {bar => baz} in the first_boot from cli" do
knife.parse_options(["-j", '{"foo":{"bar":"baz"}}'])
knife.merge_configs
expected_hash = FFI_Yajl::Parser.new.parse('{"foo":{"bar":"baz"},"run_list":[]}')
actual_hash = FFI_Yajl::Parser.new.parse(knife.render_template)
expect(actual_hash).to eq(expected_hash)
end
it "should have foo => {bar => baz} in the first_boot from file" do
knife.parse_options(["--json-attribute-file", jsonfile.path])
knife.merge_configs
expected_hash = FFI_Yajl::Parser.new.parse('{"foo":{"bar":"baz"},"run_list":[]}')
actual_hash = FFI_Yajl::Parser.new.parse(knife.render_template)
expect(actual_hash).to eq(expected_hash)
jsonfile.close
end
it "raises a Chef::Exceptions::BootstrapCommandInputError with the proper error message" do
knife.parse_options(["-j", '{"foo":{"bar":"baz"}}'])
knife.parse_options(["--json-attribute-file", jsonfile.path])
knife.merge_configs
allow(knife).to receive(:validate_name_args!)
expect(knife).to receive(:check_license)
expect { knife.run }.to raise_error(Chef::Exceptions::BootstrapCommandInputError)
jsonfile.close
end
end
end
context "with hints template" do
let(:bootstrap_template) { File.expand_path(File.join(CHEF_SPEC_DATA, "bootstrap", "test-hints.erb")) }
it "should create a hint file when told to" do
knife.parse_options(["--hint", "openstack"])
knife.merge_configs
expect(knife.render_template).to match(%r{/etc/chef/ohai/hints/openstack.json})
end
it "should populate a hint file with JSON when given a file to read" do
allow(::File).to receive(:read).and_return('{ "foo" : "bar" }')
knife.parse_options(["--hint", "openstack=hints/openstack.json"])
knife.merge_configs
expect(knife.render_template).to match(/\{\"foo\":\"bar\"\}/)
end
end
describe "specifying no_proxy with various entries" do
subject(:knife) do
k = described_class.new
Chef::Config[:knife][:bootstrap_template] = template_file
allow(k).to receive(:connection).and_return connection
k.parse_options(options)
k.merge_configs
k
end
let(:options) { ["--bootstrap-no-proxy", setting] }
let(:template_file) { File.expand_path(File.join(CHEF_SPEC_DATA, "bootstrap", "no_proxy.erb")) }
let(:rendered_template) do
knife.render_template
end
context "via --bootstrap-no-proxy" do
let(:setting) { "api.opscode.com" }
it "renders the client.rb with a single FQDN no_proxy entry" do
expect(rendered_template).to match(/.*no_proxy\s*"api.opscode.com".*/)
end
end
context "via --bootstrap-no-proxy multiple" do
let(:setting) { "api.opscode.com,172.16.10.*" }
it "renders the client.rb with comma-separated FQDN and wildcard IP address no_proxy entries" do
expect(rendered_template).to match(/.*no_proxy\s*"api.opscode.com,172.16.10.\*".*/)
end
end
context "via --ssl-verify-mode none" do
let(:options) { ["--node-ssl-verify-mode", "none"] }
it "renders the client.rb with ssl_verify_mode set to :verify_none" do
expect(rendered_template).to match(/ssl_verify_mode :verify_none/)
end
end
context "via --node-ssl-verify-mode peer" do
let(:options) { ["--node-ssl-verify-mode", "peer"] }
it "renders the client.rb with ssl_verify_mode set to :verify_peer" do
expect(rendered_template).to match(/ssl_verify_mode :verify_peer/)
end
end
context "via --node-ssl-verify-mode all" do
let(:options) { ["--node-ssl-verify-mode", "all"] }
it "raises error" do
expect { rendered_template }.to raise_error(RuntimeError)
end
end
context "via --node-verify-api-cert" do
let(:options) { ["--node-verify-api-cert"] }
it "renders the client.rb with verify_api_cert set to true" do
expect(rendered_template).to match(/verify_api_cert true/)
end
end
context "via --no-node-verify-api-cert" do
let(:options) { ["--no-node-verify-api-cert"] }
it "renders the client.rb with verify_api_cert set to false" do
expect(rendered_template).to match(/verify_api_cert false/)
end
end
end
describe "specifying the encrypted data bag secret key" do
let(:secret) { "supersekret" }
let(:options) { [] }
let(:bootstrap_template) { File.expand_path(File.join(CHEF_SPEC_DATA, "bootstrap", "secret.erb")) }
let(:rendered_template) do
knife.parse_options(options)
knife.merge_configs
knife.render_template
end
it "creates a secret file" do
expect(knife).to receive(:encryption_secret_provided_ignore_encrypt_flag?).and_return(true)
expect(knife).to receive(:read_secret).and_return(secret)
expect(rendered_template).to match(/#{secret}/)
end
it "renders the client.rb with an encrypted_data_bag_secret entry" do
expect(knife).to receive(:encryption_secret_provided_ignore_encrypt_flag?).and_return(true)
expect(knife).to receive(:read_secret).and_return(secret)
expect(rendered_template).to match(%r{encrypted_data_bag_secret\s*"/etc/chef/encrypted_data_bag_secret"})
end
end
describe "when transferring trusted certificates" do
let(:trusted_certs_dir) { Chef::Util::PathHelper.cleanpath(File.join(File.dirname(__FILE__), "../../data/trusted_certs")) }
let(:rendered_template) do
knife.merge_configs
knife.render_template
end
before do
Chef::Config[:trusted_certs_dir] = trusted_certs_dir
allow(IO).to receive(:read).and_call_original
allow(IO).to receive(:read).with(File.expand_path(Chef::Config[:validation_key])).and_return("")
end
def certificates
Dir[File.join(trusted_certs_dir, "*.{crt,pem}")]
end
it "creates /etc/chef/trusted_certs" do
expect(rendered_template).to match(%r{mkdir -p /etc/chef/trusted_certs})
end
it "copies the certificates in the directory" do
certificates.each do |cert|
expect(IO).to receive(:read).with(File.expand_path(cert))
end
certificates.each do |cert|
expect(rendered_template).to match(%r{cat > /etc/chef/trusted_certs/#{File.basename(cert)} <<'EOP'})
end
end
context "when :trusted_cets_dir is empty" do
let(:trusted_certs_dir) { Chef::Util::PathHelper.cleanpath(File.join(File.dirname(__FILE__), "../../data/trusted_certs_empty")) }
it "doesn't create /etc/chef/trusted_certs if :trusted_certs_dir is empty" do
expect(rendered_template).not_to match(%r{mkdir -p /etc/chef/trusted_certs})
end
end
end
context "when doing fips things" do
let(:template_file) { File.expand_path(File.join(CHEF_SPEC_DATA, "bootstrap", "no_proxy.erb")) }
let(:trusted_certs_dir) { Chef::Util::PathHelper.cleanpath(File.join(File.dirname(__FILE__), "../../data/trusted_certs")) }
before do
Chef::Config[:knife][:bootstrap_template] = template_file
knife.merge_configs
end
let(:rendered_template) do
knife.render_template
end
context "when knife is in fips mode" do
before do
Chef::Config[:fips] = true
end
it "renders 'fips true'" do
expect(rendered_template).to match("fips")
end
end
context "when knife is not in fips mode" do
before do
# This is required because the chef-fips pipeline does
# has a default value of true for fips
Chef::Config[:fips] = false
end
it "does not render anything about fips" do
expect(rendered_template).not_to match("fips")
end
end
end
describe "when transferring client.d" do
let(:rendered_template) do
knife.merge_configs
knife.render_template
end
before do
Chef::Config[:client_d_dir] = client_d_dir
end
context "when client_d_dir is nil" do
let(:client_d_dir) { nil }
it "does not create /etc/chef/client.d" do
expect(rendered_template).not_to match(%r{mkdir -p /etc/chef/client\.d})
end
end
context "when client_d_dir is set" do
let(:client_d_dir) do
Chef::Util::PathHelper.cleanpath(
File.join(File.dirname(__FILE__), "../../data/client.d_00")
)
end
it "creates /etc/chef/client.d" do
expect(rendered_template).to match("mkdir -p /etc/chef/client\.d")
end
context "a flat directory structure" do
it "escapes single-quotes" do
expect(rendered_template).to match("cat > /etc/chef/client.d/02-strings.rb <<'EOP'")
expect(rendered_template).to match("something '\\\\''/foo/bar'\\\\''")
end
it "creates a file 00-foo.rb" do
expect(rendered_template).to match("cat > /etc/chef/client.d/00-foo.rb <<'EOP'")
expect(rendered_template).to match("d6f9b976-289c-4149-baf7-81e6ffecf228")
end
it "creates a file bar" do
expect(rendered_template).to match("cat > /etc/chef/client.d/bar <<'EOP'")
expect(rendered_template).to match("1 / 0")
end
end
context "a nested directory structure" do
let(:client_d_dir) do
Chef::Util::PathHelper.cleanpath(
File.join(File.dirname(__FILE__), "../../data/client.d_01")
)
end
it "creates a file foo/bar.rb" do
expect(rendered_template).to match("cat > /etc/chef/client.d/foo/bar.rb <<'EOP'")
expect(rendered_template).to match("1 / 0")
end
end
end
end
describe "#connection_protocol" do
let(:host_descriptor) { "example.com" }
let(:config) { {} }
let(:knife_connection_protocol) { nil }
before do
allow(knife).to receive(:config).and_return config
allow(knife).to receive(:host_descriptor).and_return host_descriptor
if knife_connection_protocol
Chef::Config[:knife][:connection_protocol] = knife_connection_protocol
knife.merge_configs
end
end
context "when protocol is part of the host argument" do
let(:host_descriptor) { "winrm://myhost" }
it "returns the value provided by the host argument" do
expect(knife.connection_protocol).to eq "winrm"
end
end
context "when protocol is provided via the CLI flag" do
let(:config) { { connection_protocol: "winrm" } }
it "returns that value" do
expect(knife.connection_protocol).to eq "winrm"
end
end
context "when protocol is provided via the host argument and the CLI flag" do
let(:host_descriptor) { "ssh://example.com" }
let(:config) { { connection_protocol: "winrm" } }
it "returns the value provided by the host argument" do
expect(knife.connection_protocol).to eq "ssh"
end
end
context "when no explicit protocol is provided" do
let(:config) { {} }
let(:host_descriptor) { "example.com" }
let(:knife_connection_protocol) { "winrm" }
it "falls back to knife config" do
expect(knife.connection_protocol).to eq "winrm"
end
context "and there is no knife bootstrap_protocol" do
let(:knife_connection_protocol) { nil }
it "falls back to 'ssh'" do
expect(knife.connection_protocol).to eq "ssh"
end
end
end
end
describe "#validate_protocol!" do
let(:host_descriptor) { "example.com" }
let(:config) { {} }
let(:connection_protocol) { "ssh" }
before do
allow(knife).to receive(:config).and_return config
allow(knife).to receive(:connection_protocol).and_return connection_protocol
allow(knife).to receive(:host_descriptor).and_return host_descriptor
end
context "when protocol is provided both in the URL and via --protocol" do
context "and they do not match" do
let(:connection_protocol) { "ssh" }
let(:config) { { connection_protocol: "winrm" } }
it "outputs an error and exits" do
expect(knife.ui).to receive(:error)
expect { knife.validate_protocol! }.to raise_error SystemExit
end
end
context "and they do match" do
let(:connection_protocol) { "winrm" }
let(:config) { { connection_protocol: "winrm" } }
it "returns true" do
expect(knife.validate_protocol!).to eq true
end
end
end
context "and the protocol is supported" do
Chef::Knife::Bootstrap::SUPPORTED_CONNECTION_PROTOCOLS.each do |proto|
let(:connection_protocol) { proto }
it "returns true for #{proto}" do
expect(knife.validate_protocol!).to eq true
end
end
end
context "and the protocol is not supported" do
let(:connection_protocol) { "invalid" }
it "outputs an error and exits" do
expect(knife.ui).to receive(:error).with(/Unsupported protocol '#{connection_protocol}'/)
expect { knife.validate_protocol! }.to raise_error SystemExit
end
end
end
describe "#validate_policy_options!" do
context "when only policy_name is given" do
let(:bootstrap_cli_options) { %w{ --policy-name my-app-server } }
it "returns an error stating that policy_name and policy_group must be given together" do
expect { knife.validate_policy_options! }.to raise_error(SystemExit)
expect(stderr.string).to include("ERROR: --policy-name and --policy-group must be specified together")
end
end
context "when only policy_group is given" do
let(:bootstrap_cli_options) { %w{ --policy-group staging } }
it "returns an error stating that policy_name and policy_group must be given together" do
expect { knife.validate_policy_options! }.to raise_error(SystemExit)
expect(stderr.string).to include("ERROR: --policy-name and --policy-group must be specified together")
end
end
context "when both policy_name and policy_group are given, but run list is also given" do
let(:bootstrap_cli_options) { %w{ --policy-name my-app --policy-group staging --run-list cookbook } }
it "returns an error stating that policyfile and run_list are exclusive" do
expect { knife.validate_policy_options! }.to raise_error(SystemExit)
expect(stderr.string).to include("ERROR: Policyfile options and --run-list are exclusive")
end
end
context "when policy_name and policy_group are given with no conflicting options" do
let(:bootstrap_cli_options) { %w{ --policy-name my-app --policy-group staging } }
it "passes options validation" do
expect { knife.validate_policy_options! }.to_not raise_error
end
it "passes them into the bootstrap context" do
expect(knife.bootstrap_context.first_boot).to have_key(:policy_name)
expect(knife.bootstrap_context.first_boot).to have_key(:policy_group)
end
it "ensures that run_list is not set in the bootstrap context" do
expect(knife.bootstrap_context.first_boot).to_not have_key(:run_list)
end
end
# https://github.com/chef/chef/issues/4131
# Arguably a bug in the plugin: it shouldn't be setting this to nil, but it
# worked before, so make it work now.
context "when a plugin sets the run list option to nil" do
before do
knife.config[:run_list] = nil
end
it "passes options validation" do
expect { knife.validate_policy_options! }.to_not raise_error
end
end
end
# TODO - this is the only cli option we validate the _option_ itself -
# so we'll know if someone accidentally deletes or renames use_sudo_password
# Is this worht keeping? If so, then it seems we should expand it
# to cover all options.
context "validating use_sudo_password option" do
it "use_sudo_password contains description and long params for help" do
expect(knife.options).to(have_key(:use_sudo_password)) \
&& expect(knife.options[:use_sudo_password][:description].to_s).not_to(eq(""))\
&& expect(knife.options[:use_sudo_password][:long].to_s).not_to(eq(""))
end
end
context "#connection_opts" do
let(:connection_protocol) { "ssh" }
before do
allow(knife).to receive(:connection_protocol).and_return connection_protocol
end
context "behavioral test: " do
let(:expected_connection_opts) do
{ base_opts: true,
ssh_identity_opts: true,
ssh_opts: true,
gateway_opts: true,
host_verify_opts: true,
sudo_opts: true,
winrm_opts: true }
end
it "queries and merges only expected configurations" do
expect(knife).to receive(:base_opts).and_return({ base_opts: true })
expect(knife).to receive(:host_verify_opts).and_return({ host_verify_opts: true })
expect(knife).to receive(:gateway_opts).and_return({ gateway_opts: true })
expect(knife).to receive(:sudo_opts).and_return({ sudo_opts: true })
expect(knife).to receive(:winrm_opts).and_return({ winrm_opts: true })
expect(knife).to receive(:ssh_opts).and_return({ ssh_opts: true })
expect(knife).to receive(:ssh_identity_opts).and_return({ ssh_identity_opts: true })
expect(knife.connection_opts).to match expected_connection_opts
end
end
context "functional test: " do
context "when protocol is winrm" do
let(:connection_protocol) { "winrm" }
# context "and neither CLI nor Chef::Config config entries have been provided"
# end
context "and all supported values are provided as Chef::Config entries" do
before do
# Set everything to easily identifiable and obviously fake values
# to verify that Chef::Config is being sourced instead of knife.config
knife.config = {}
Chef::Config[:knife][:max_wait] = 9999
Chef::Config[:knife][:winrm_user] = "winbob"
Chef::Config[:knife][:winrm_port] = 9999
Chef::Config[:knife][:ca_trust_file] = "trust.me"
Chef::Config[:knife][:kerberos_realm] = "realm"
Chef::Config[:knife][:kerberos_service] = "service"
Chef::Config[:knife][:winrm_auth_method] = "kerberos" # default is negotiate
Chef::Config[:knife][:winrm_basic_auth_only] = true
Chef::Config[:knife][:winrm_no_verify_cert] = true
Chef::Config[:knife][:session_timeout] = 9999
Chef::Config[:knife][:winrm_ssl] = true
Chef::Config[:knife][:winrm_ssl_peer_fingerprint] = "ABCDEF"
end
context "and no CLI options have been given" do
let(:expected_result) do
{
logger: Chef::Log, # not configurable
ca_trust_path: "trust.me",
max_wait_until_ready: 9999,
operation_timeout: 9999,
ssl_peer_fingerprint: "ABCDEF",
winrm_transport: "kerberos",
winrm_basic_auth_only: true,
user: "winbob",
port: 9999,
self_signed: true,
ssl: true,
kerberos_realm: "realm",
kerberos_service: "service",
}
end
it "generates a config hash using the Chef::Config values" do
knife.merge_configs
expect(knife.connection_opts).to match expected_result
end
end
context "and some CLI options have been given" do
let(:expected_result) do
{
logger: Chef::Log, # not configurable
ca_trust_path: "no trust",
max_wait_until_ready: 9999,
operation_timeout: 9999,
ssl_peer_fingerprint: "ABCDEF",
winrm_transport: "kerberos",
winrm_basic_auth_only: true,
user: "microsoftbob",
port: 12,
self_signed: true,
ssl: true,
kerberos_realm: "realm",
kerberos_service: "service",
password: "lobster",
}
end
before do
knife.config[:ca_trust_file] = "no trust"
knife.config[:connection_user] = "microsoftbob"
knife.config[:connection_port] = 12
knife.config[:winrm_port] = "13" # indirectly verify we're not looking for the wrong CLI flag
knife.config[:connection_password] = "lobster"
end
it "generates a config hash using the CLI options when available and falling back to Chef::Config values" do
knife.merge_configs
expect(knife.connection_opts).to match expected_result
end
end
context "and all CLI options have been given" do
before do
# We'll force kerberos vi knife.config because it
# causes additional options to populate - make sure
# Chef::Config is different so we can be sure that we didn't
# pull in the Chef::Config value
Chef::Config[:knife][:winrm_auth_method] = "negotiate"
knife.config[:connection_password] = "blue"
knife.config[:max_wait] = 1000
knife.config[:connection_user] = "clippy"
knife.config[:connection_port] = 1000
knife.config[:winrm_port] = 1001 # We should not see this value get used
knife.config[:ca_trust_file] = "trust.the.internet"
knife.config[:kerberos_realm] = "otherrealm"
knife.config[:kerberos_service] = "otherservice"
knife.config[:winrm_auth_method] = "kerberos" # default is negotiate
knife.config[:winrm_basic_auth_only] = false
knife.config[:winrm_no_verify_cert] = false
knife.config[:session_timeout] = 1000
knife.config[:winrm_ssl] = false
knife.config[:winrm_ssl_peer_fingerprint] = "FEDCBA"
end
let(:expected_result) do
{
logger: Chef::Log, # not configurable
ca_trust_path: "trust.the.internet",
max_wait_until_ready: 1000,
operation_timeout: 1000,
ssl_peer_fingerprint: "FEDCBA",
winrm_transport: "kerberos",
winrm_basic_auth_only: false,
user: "clippy",
port: 1000,
self_signed: false,
ssl: false,
kerberos_realm: "otherrealm",
kerberos_service: "otherservice",
password: "blue",
}
end
it "generates a config hash using the CLI options and pulling nothing from Chef::Config" do
knife.merge_configs
expect(knife.connection_opts).to match expected_result
end
end
end # with underlying Chef::Config values
context "and no values are provided from Chef::Config or CLI" do
before do
# We will use knife's actual config since these tests
# have assumptions based on CLI default values
end
let(:expected_result) do
{
logger: Chef::Log,
operation_timeout: 60,
self_signed: false,
ssl: false,
ssl_peer_fingerprint: nil,
winrm_basic_auth_only: false,
winrm_transport: "negotiate",
}
end
it "populates appropriate defaults" do
knife.merge_configs
expect(knife.connection_opts).to match expected_result
end
end
end # winrm
context "when protocol is ssh" do
let(:connection_protocol) { "ssh" }
# context "and neither CLI nor Chef::Config config entries have been provided"
# end
context "and all supported values are provided as Chef::Config entries" do
before do
# Set everything to easily identifiable and obviously fake values
# to verify that Chef::Config is being sourced instead of knife.config
knife.config = {}
Chef::Config[:knife][:max_wait] = 9999
Chef::Config[:knife][:session_timeout] = 9999
Chef::Config[:knife][:ssh_user] = "sshbob"
Chef::Config[:knife][:ssh_port] = 9999
Chef::Config[:knife][:host_key_verify] = false
Chef::Config[:knife][:ssh_gateway_identity] = "/gateway.pem"
Chef::Config[:knife][:ssh_gateway] = "admin@mygateway.local:1234"
Chef::Config[:knife][:ssh_identity_file] = "/identity.pem"
Chef::Config[:knife][:use_sudo_password] = false # We have no password.
end
context "and no CLI options have been given" do
let(:expected_result) do
{
logger: Chef::Log, # not configurable
max_wait_until_ready: 9999.0,
connection_timeout: 9999,
user: "sshbob",
bastion_host: "mygateway.local",
bastion_port: 1234,
bastion_user: "admin",
forward_agent: false,
keys_only: true,
key_files: ["/identity.pem", "/gateway.pem"],
sudo: false,
verify_host_key: "always",
port: 9999,
non_interactive: true,
}
end
it "generates a correct config hash using the Chef::Config values" do
knife.merge_configs
expect(knife.connection_opts).to match expected_result
end
end
context "and unsupported Chef::Config options are given in Chef::Config, not in CLI" do
before do
Chef::Config[:knife][:password] = "blah"
Chef::Config[:knife][:ssh_password] = "blah"
Chef::Config[:knife][:preserve_home] = true
Chef::Config[:knife][:use_sudo] = true
Chef::Config[:knife][:ssh_forward_agent] = "blah"
end
it "does not include the corresponding option in the connection options" do
knife.merge_configs
expect(knife.connection_opts.key?(:password)).to eq false
expect(knife.connection_opts.key?(:ssh_forward_agent)).to eq false
expect(knife.connection_opts.key?(:use_sudo)).to eq false
expect(knife.connection_opts.key?(:preserve_home)).to eq false
end
end
context "and some CLI options have been given" do
before do
knife.config = {}
knife.config[:connection_user] = "sshalice"
knife.config[:connection_port] = 12
knife.config[:ssh_port] = "13" # canary to indirectly verify we're not looking for the wrong CLI flag
knife.config[:connection_password] = "feta cheese"
knife.config[:max_wait] = 150
knife.config[:session_timeout] = 120
knife.config[:use_sudo] = true
knife.config[:use_sudo_pasword] = true
knife.config[:ssh_forward_agent] = true
end
let(:expected_result) do
{
logger: Chef::Log, # not configurable
max_wait_until_ready: 150.0, # cli
connection_timeout: 120, # cli
user: "sshalice", # cli
password: "feta cheese", # cli
bastion_host: "mygateway.local", # Config
bastion_port: 1234, # Config
bastion_user: "admin", # Config
forward_agent: true, # cli
keys_only: false, # implied false from config password present
key_files: ["/identity.pem", "/gateway.pem"], # Config
sudo: true, # ccli
verify_host_key: "always", # Config
port: 12, # cli
non_interactive: true,
}
end
it "generates a config hash using the CLI options when available and falling back to Chef::Config values" do
knife.merge_configs
expect(knife.connection_opts).to match expected_result
end
end
context "and all CLI options have been given" do
before do
knife.config = {}
knife.config[:max_wait] = 150
knife.config[:session_timeout] = 120
knife.config[:connection_user] = "sshroot"
knife.config[:connection_port] = 1000
knife.config[:connection_password] = "blah"
knife.config[:forward_agent] = true
knife.config[:use_sudo] = true
knife.config[:use_sudo_password] = true
knife.config[:preserve_home] = true
knife.config[:use_sudo_pasword] = true
knife.config[:ssh_forward_agent] = true
knife.config[:ssh_verify_host_key] = true
knife.config[:ssh_gateway_identity] = "/gateway-identity.pem"
knife.config[:ssh_gateway] = "me@example.com:10"
knife.config[:ssh_identity_file] = "/my-identity.pem"
# We'll set these as canaries - if one of these values shows up
# in a failed test, then the behavior of not pulling from these keys
# out of knife.config is broken:
knife.config[:ssh_user] = "do not use"
knife.config[:ssh_port] = 1001
end
let(:expected_result) do
{
logger: Chef::Log, # not configurable
max_wait_until_ready: 150,
connection_timeout: 120,
user: "sshroot",
password: "blah",
port: 1000,
bastion_host: "example.com",
bastion_port: 10,
bastion_user: "me",
forward_agent: true,
keys_only: false,
key_files: ["/my-identity.pem", "/gateway-identity.pem"],
sudo: true,
sudo_options: "-H",
sudo_password: "blah",
verify_host_key: true,
non_interactive: true,
}
end
it "generates a config hash using the CLI options and pulling nothing from Chef::Config" do
knife.merge_configs
expect(knife.connection_opts).to match expected_result
end
end
end
context "and no values are provided from Chef::Config or CLI" do
before do
# We will use knife's actual config since these tests
# have assumptions based on CLI default values
config = {}
end
let(:expected_result) do
{
forward_agent: false,
key_files: [],
logger: Chef::Log,
keys_only: false,
sudo: false,
verify_host_key: "always",
non_interactive: true,
connection_timeout: 60,
}
end
it "populates appropriate defaults" do
knife.merge_configs
expect(knife.connection_opts).to match expected_result
end
end
end # ssh
end # functional tests
end # connection_opts
context "#base_opts" do
let(:connection_protocol) { nil }
before do
allow(knife).to receive(:connection_protocol).and_return connection_protocol
end
context "for all protocols" do
context "when password is provided" do
before do
knife.config[:connection_port] = 250
knife.config[:connection_user] = "test"
knife.config[:connection_password] = "opscode"
end
let(:expected_opts) do
{
port: 250,
user: "test",
logger: Chef::Log,
password: "opscode",
}
end
it "generates the correct options" do
expect(knife.base_opts).to eq expected_opts
end
end
context "when password is not provided" do
before do
knife.config[:connection_port] = 250
knife.config[:connection_user] = "test"
end
let(:expected_opts) do
{
port: 250,
user: "test",
logger: Chef::Log,
}
end
it "generates the correct options" do
expect(knife.base_opts).to eq expected_opts
end
end
end
end
context "#host_verify_opts" do
let(:connection_protocol) { nil }
before do
allow(knife).to receive(:connection_protocol).and_return connection_protocol
end
context "for winrm" do
let(:connection_protocol) { "winrm" }
it "returns the expected configuration" do
knife.config[:winrm_no_verify_cert] = true
expect(knife.host_verify_opts).to eq( { self_signed: true } )
end
it "provides a correct default when no option given" do
expect(knife.host_verify_opts).to eq( { self_signed: false } )
end
end
context "for ssh" do
let(:connection_protocol) { "ssh" }
it "returns the expected configuration" do
knife.config[:ssh_verify_host_key] = false
expect(knife.host_verify_opts).to eq( { verify_host_key: false } )
end
it "provides a correct default when no option given" do
expect(knife.host_verify_opts).to eq( { verify_host_key: "always" } )
end
end
end
# TODO - test keys_only, password, config source behavior
context "#ssh_identity_opts" do
let(:connection_protocol) { nil }
before do
allow(knife).to receive(:connection_protocol).and_return connection_protocol
end
context "for winrm" do
let(:connection_protocol) { "winrm" }
it "returns an empty hash" do
expect(knife.ssh_identity_opts).to eq({})
end
end
context "for ssh" do
let(:connection_protocol) { "ssh" }
context "when an identity file is specified" do
before do
knife.config[:ssh_identity_file] = "/identity.pem"
end
it "generates the expected configuration" do
expect(knife.ssh_identity_opts).to eq({
key_files: [ "/identity.pem" ],
keys_only: true,
})
end
context "and a password is also specified" do
before do
knife.config[:connection_password] = "blah"
end
it "generates the expected configuration (key, keys_only false)" do
expect(knife.ssh_identity_opts).to eq({
key_files: [ "/identity.pem" ],
keys_only: false,
})
end
end
context "and a gateway is not specified" do
context "but a gateway identity file is specified" do
it "does not include the gateway identity file in keys" do
expect(knife.ssh_identity_opts).to eq({
key_files: ["/identity.pem"],
keys_only: true,
})
end
end
end
context "and a gatway is specified" do
before do
knife.config[:ssh_gateway] = "example.com"
end
context "and a gateway identity file is not specified" do
it "config includes only identity file and not gateway identity" do
expect(knife.ssh_identity_opts).to eq({
key_files: [ "/identity.pem" ],
keys_only: true,
})
end
end
context "and a gateway identity file is also specified" do
before do
knife.config[:ssh_gateway_identity] = "/gateway.pem"
end
it "generates the expected configuration (both keys, keys_only true)" do
expect(knife.ssh_identity_opts).to eq({
key_files: [ "/identity.pem", "/gateway.pem" ],
keys_only: true,
})
end
end
end
end
context "when no identity file is specified" do
it "generates the expected configuration (no keys, keys_only false)" do
expect(knife.ssh_identity_opts).to eq( {
key_files: [ ],
keys_only: false,
})
end
context "and a gateway with gateway identity file is specified" do
before do
knife.config[:ssh_gateway] = "host"
knife.config[:ssh_gateway_identity] = "/gateway.pem"
end
it "generates the expected configuration (gateway key, keys_only false)" do
expect(knife.ssh_identity_opts).to eq({
key_files: [ "/gateway.pem" ],
keys_only: false,
})
end
end
end
end
end
context "#gateway_opts" do
let(:connection_protocol) { nil }
before do
allow(knife).to receive(:connection_protocol).and_return connection_protocol
end
context "for winrm" do
let(:connection_protocol) { "winrm" }
it "returns an empty hash" do
expect(knife.gateway_opts).to eq({})
end
end
context "for ssh" do
let(:connection_protocol) { "ssh" }
context "and ssh_gateway with hostname, user and port provided" do
before do
knife.config[:ssh_gateway] = "testuser@gateway:9021"
end
it "returns a proper bastion host config subset" do
expect(knife.gateway_opts).to eq({
bastion_user: "testuser",
bastion_host: "gateway",
bastion_port: 9021,
})
end
end
context "and ssh_gateway with only hostname is given" do
before do
knife.config[:ssh_gateway] = "gateway"
end
it "returns a proper bastion host config subset" do
expect(knife.gateway_opts).to eq({
bastion_user: nil,
bastion_host: "gateway",
bastion_port: nil,
})
end
end
context "and ssh_gateway with hostname and user is is given" do
before do
knife.config[:ssh_gateway] = "testuser@gateway"
end
it "returns a proper bastion host config subset" do
expect(knife.gateway_opts).to eq({
bastion_user: "testuser",
bastion_host: "gateway",
bastion_port: nil,
})
end
end
context "and ssh_gateway with hostname and port is is given" do
before do
knife.config[:ssh_gateway] = "gateway:11234"
end
it "returns a proper bastion host config subset" do
expect(knife.gateway_opts).to eq({
bastion_user: nil,
bastion_host: "gateway",
bastion_port: 11234,
})
end
end
context "and ssh_gateway is not provided" do
it "returns an empty hash" do
expect(knife.gateway_opts).to eq({})
end
end
end
end
context "#sudo_opts" do
let(:connection_protocol) { nil }
before do
allow(knife).to receive(:connection_protocol).and_return connection_protocol
end
context "for winrm" do
let(:connection_protocol) { "winrm" }
it "returns an empty hash" do
expect(knife.sudo_opts).to eq({})
end
end
context "for ssh" do
let(:connection_protocol) { "ssh" }
context "when use_sudo is set" do
before do
knife.config[:use_sudo] = true
end
it "returns a config that enables sudo" do
expect(knife.sudo_opts).to eq( { sudo: true } )
end
context "when use_sudo_password is also set" do
before do
knife.config[:use_sudo_password] = true
knife.config[:connection_password] = "opscode"
end
it "includes :connection_password value in a sudo-enabled configuration" do
expect(knife.sudo_opts).to eq({
sudo: true,
sudo_password: "opscode",
})
end
end
context "when preserve_home is set" do
before do
knife.config[:preserve_home] = true
end
it "enables sudo with sudo_option to preserve home" do
expect(knife.sudo_opts).to eq({
sudo_options: "-H",
sudo: true,
})
end
end
end
context "when use_sudo is not set" do
before do
knife.config[:use_sudo_password] = true
knife.config[:preserve_home] = true
end
it "returns configuration for sudo off, ignoring other related options" do
expect(knife.sudo_opts).to eq( { sudo: false } )
end
end
end
end
context "#ssh_opts" do
let(:connection_protocol) { nil }
before do
allow(knife).to receive(:connection_protocol).and_return connection_protocol
end
context "for ssh" do
let(:connection_protocol) { "ssh" }
let(:default_opts) do
{
non_interactive: true,
forward_agent: false,
connection_timeout: 60,
}
end
context "by default" do
it "returns a configuration hash with appropriate defaults" do
expect(knife.ssh_opts).to eq default_opts
end
end
context "when ssh_forward_agent has a value" do
before do
knife.config[:ssh_forward_agent] = true
end
it "returns a default configuration hash with forward_agent set to true" do
expect(knife.ssh_opts).to eq(default_opts.merge(forward_agent: true))
end
end
context "when session_timeout has a value" do
before do
knife.config[:session_timeout] = 120
end
it "returns a default configuration hash with updated timeout value." do
expect(knife.ssh_opts).to eq(default_opts.merge(connection_timeout: 120))
end
end
end
context "for winrm" do
let(:connection_protocol) { "winrm" }
it "returns an empty has because ssh is not winrm" do
expect(knife.ssh_opts).to eq({})
end
end
end
context "#winrm_opts" do
let(:connection_protocol) { nil }
before do
allow(knife).to receive(:connection_protocol).and_return connection_protocol
end
context "for winrm" do
let(:connection_protocol) { "winrm" }
let(:expected) do
{
winrm_transport: "negotiate",
winrm_basic_auth_only: false,
ssl: false,
ssl_peer_fingerprint: nil,
operation_timeout: 60,
}
end
it "generates a correct configuration hash with expected defaults" do
expect(knife.winrm_opts).to eq expected
end
context "with ssl_peer_fingerprint" do
let(:ssl_peer_fingerprint_expected) do
expected.merge({ ssl_peer_fingerprint: "ABCD" })
end
before do
knife.config[:winrm_ssl_peer_fingerprint] = "ABCD"
end
it "generates a correct options hash with ssl_peer_fingerprint from the config provided" do
expect(knife.winrm_opts).to eq ssl_peer_fingerprint_expected
end
end
context "with winrm_ssl" do
let(:ssl_expected) do
expected.merge({ ssl: true })
end
before do
knife.config[:winrm_ssl] = true
end
it "generates a correct options hash with ssl from the config provided" do
expect(knife.winrm_opts).to eq ssl_expected
end
end
context "with winrm_auth_method" do
let(:winrm_auth_method_expected) do
expected.merge({ winrm_transport: "freeaccess" })
end
before do
knife.config[:winrm_auth_method] = "freeaccess"
end
it "generates a correct options hash with winrm_transport from the config provided" do
expect(knife.winrm_opts).to eq winrm_auth_method_expected
end
end
context "with ca_trust_file" do
let(:ca_trust_expected) do
expected.merge({ ca_trust_path: "/trust.me" })
end
before do
knife.config[:ca_trust_file] = "/trust.me"
end
it "generates a correct options hash with ca_trust_file from the config provided" do
expect(knife.winrm_opts).to eq ca_trust_expected
end
end
context "with kerberos auth" do
let(:kerberos_expected) do
expected.merge({
kerberos_service: "testsvc",
kerberos_realm: "TESTREALM",
winrm_transport: "kerberos",
})
end
before do
knife.config[:winrm_auth_method] = "kerberos"
knife.config[:kerberos_service] = "testsvc"
knife.config[:kerberos_realm] = "TESTREALM"
end
it "generates a correct options hash containing kerberos auth configuration from the config provided" do
expect(knife.winrm_opts).to eq kerberos_expected
end
end
context "with winrm_basic_auth_only" do
before do
knife.config[:winrm_basic_auth_only] = true
end
let(:basic_auth_expected) do
expected.merge( { winrm_basic_auth_only: true } )
end
it "generates a correct options hash containing winrm_basic_auth_only from the config provided" do
expect(knife.winrm_opts).to eq basic_auth_expected
end
end
end
context "for ssh" do
let(:connection_protocol) { "ssh" }
it "returns an empty hash because ssh is not winrm" do
expect(knife.winrm_opts).to eq({})
end
end
end
describe "#run" do
it "performs the steps we expect to run a bootstrap" do
expect(knife).to receive(:check_license)
expect(knife).to receive(:validate_name_args!).ordered
expect(knife).to receive(:validate_protocol!).ordered
expect(knife).to receive(:validate_first_boot_attributes!).ordered
expect(knife).to receive(:validate_winrm_transport_opts!).ordered
expect(knife).to receive(:validate_policy_options!).ordered
expect(knife).to receive(:winrm_warn_no_ssl_verification).ordered
expect(knife).to receive(:warn_on_short_session_timeout).ordered
expect(knife).to receive(:connect!).ordered
expect(knife).to receive(:register_client).ordered
expect(knife).to receive(:render_template).and_return "content"
expect(knife).to receive(:upload_bootstrap).with("content").and_return "/remote/path.sh"
expect(knife).to receive(:perform_bootstrap).with("/remote/path.sh")
expect(connection).to receive(:del_file!) # Make sure cleanup happens
knife.run
# Post-run verify expected state changes (not many directly in #run)
expect($stdout.sync).to eq true
end
end
describe "#register_client" do
let(:vault_handler_mock) { double("ChefVaultHandler") }
let(:client_builder_mock) { double("ClientBuilder") }
let(:node_name) { nil }
before do
allow(knife).to receive(:chef_vault_handler).and_return vault_handler_mock
allow(knife).to receive(:client_builder).and_return client_builder_mock
knife.config[:chef_node_name] = node_name
end
shared_examples_for "creating the client locally" do
context "when a valid node name is present" do
let(:node_name) { "test" }
before do
allow(client_builder_mock).to receive(:client).and_return "client"
allow(client_builder_mock).to receive(:client_path).and_return "/key.pem"
end
it "runs client_builder and vault_handler" do
expect(client_builder_mock).to receive(:run)
expect(vault_handler_mock).to receive(:run).with("client")
knife.register_client
end
it "sets the path to the client key in the bootstrap context" do
allow(client_builder_mock).to receive(:run)
allow(vault_handler_mock).to receive(:run).with("client")
knife.register_client
expect(knife.bootstrap_context.client_pem).to eq "/key.pem"
end
end
context "when no valid node name is present" do
let(:node_name) { nil }
it "shows an error and exits" do
expect(knife.ui).to receive(:error)
expect { knife.register_client }.to raise_error(SystemExit)
end
end
end
context "when chef_vault_handler says we're using vault" do
let(:vault_handler_mock) { double("ChefVaultHandler") }
before do
allow(vault_handler_mock).to receive(:doing_chef_vault?).and_return true
end
it_behaves_like "creating the client locally"
end
context "when an non-existant validation key is specified in chef config" do
before do
Chef::Config[:validation_key] = "/blah"
allow(vault_handler_mock).to receive(:doing_chef_vault?).and_return false
allow(File).to receive(:exist?).with(%r{/blah}).and_return false
end
it_behaves_like "creating the client locally"
end
context "when a valid validation key is given and we're doing old-style client creation" do
before do
Chef::Config[:validation_key] = "/blah"
allow(File).to receive(:exist?).with(%r{/blah}).and_return true
allow(vault_handler_mock).to receive(:doing_chef_vault?).and_return false
end
it "shows a warning message" do
expect(knife.ui).to receive(:warn).twice
knife.register_client
end
end
end
describe "#perform_bootstrap" do
let(:exit_status) { 0 }
let(:result_mock) { double("result", exit_status: exit_status, stderr: "A message") }
before do
allow(connection).to receive(:hostname).and_return "testhost"
end
it "runs the remote script and logs the output" do
expect(knife.ui).to receive(:info).with(/Bootstrapping.*/)
expect(knife).to receive(:bootstrap_command)
.with("/path.sh")
.and_return("sh /path.sh")
expect(connection)
.to receive(:run_command)
.with("sh /path.sh")
.and_yield("output here")
.and_return result_mock
expect(knife.ui).to receive(:msg).with(/testhost/)
knife.perform_bootstrap("/path.sh")
end
context "when the remote command fails" do
let(:exit_status) { 1 }
it "shows an error and exits" do
expect(knife.ui).to receive(:info).with(/Bootstrapping.*/)
expect(knife).to receive(:bootstrap_command)
.with("/path.sh")
.and_return("sh /path.sh")
expect(connection).to receive(:run_command).with("sh /path.sh").and_return result_mock
expect { knife.perform_bootstrap("/path.sh") }.to raise_error(SystemExit)
end
end
end
describe "#connect!" do
before do
# These are not required at run-time because train will handle its own
# protocol loading. In this case, we're simulating train failures and have to load
# them ourselves.
require "net/ssh"
require "train/transports/ssh"
end
context "in the normal case" do
it "connects using the connection_opts and notifies the operator of progress" do
expect(knife.ui).to receive(:info).with(/Connecting to.*/)
expect(knife).to receive(:connection_opts).and_return( { opts: "here" })
expect(knife).to receive(:do_connect).with( { opts: "here" } )
knife.connect!
end
end
context "when a general non-auth-failure occurs" do
let(:expected_error) { RuntimeError.new }
before do
allow(knife).to receive(:do_connect).and_raise(expected_error)
end
it "re-raises the exception" do
expect { knife.connect! }.to raise_error(expected_error)
end
end
context "when ssh fingerprint is invalid" do
let(:expected_error) { Train::Error.new("fingerprint AA:BB is unknown for \"blah,127.0.0.1\"") }
before do
allow(knife).to receive(:do_connect).and_raise(expected_error)
end
it "warns, prompts to accept, then connects with verify_host_key of accept_new" do
expect(knife).to receive(:do_connect).and_raise(expected_error)
expect(knife.ui).to receive(:confirm)
.with(/.*host 'blah \(127.0.0.1\)'.*AA:BB.*Are you sure you want to continue.*/m)
.and_return(true)
expect(knife).to receive(:do_connect) do |opts|
expect(opts[:verify_host_key]).to eq :accept_new
end
knife.connect!
end
end
context "when an auth failure occurs" do
let(:expected_error) do
e = Train::Error.new
actual = Net::SSH::AuthenticationFailed.new
# Simulate train's nested error - they wrap
# ssh/network errors in a TrainError.
allow(e).to receive(:cause).and_return(actual)
e
end
let(:expected_error_password_prompt) do
e = Train::ClientError.new
reason = :no_ssh_password_or_key_available
allow(e).to receive(:reason).and_return(reason)
e
end
let(:expected_error_password_prompt_winrm) do
e = RuntimeError.new
message = "password is a required option"
allow(e).to receive(:message).and_return(message)
e
end
context "and password auth was used" do
before do
allow(connection).to receive(:password_auth?).and_return true
end
it "re-raises the error so as not to resubmit the same failing password" do
expect(knife).to receive(:do_connect).and_raise(expected_error)
expect { knife.connect! }.to raise_error(expected_error)
end
end
context "and password auth was not used" do
before do
allow(connection).to receive(:password_auth?).and_return false
allow(connection).to receive(:user).and_return "testuser"
allow(knife).to receive(:connection_protocol).and_return connection_protocol
end
context "when using ssh" do
let(:connection_protocol) { "ssh" }
it "warns, prompts for password, then reconnects with a password-enabled configuration using the new password" do
expect(knife).to receive(:do_connect).and_raise(expected_error_password_prompt)
expect(knife.ui).to receive(:warn).with(/Failed to auth.*/)
expect(knife.ui).to receive(:ask).and_return("newpassword")
# Ensure that we set echo off to prevent showing password on the screen
expect(knife).to receive(:do_connect) do |opts|
expect(opts[:password]).to eq "newpassword"
end
knife.connect!
end
end
context "when using winrm" do
let(:connection_protocol) { "winrm" }
it "warns, prompts for password, then reconnects with a password-enabled configuration using the new password for" do
expect(knife).to receive(:do_connect).and_raise(expected_error_password_prompt_winrm)
expect(knife.ui).to receive(:warn).with(/Failed to auth.*/)
expect(knife.ui).to receive(:ask).and_return("newpassword")
# Ensure that we set echo off to prevent showing password on the screen
expect(knife).to receive(:do_connect) do |opts|
expect(opts[:password]).to eq "newpassword"
end
knife.connect!
end
end
end
end
end
it "verifies that a server to bootstrap was given as a command line arg" do
knife.name_args = nil
expect(knife).to receive(:check_license)
expect { knife.run }.to raise_error(SystemExit)
expect(stderr.string).to match(/ERROR:.+FQDN or ip/)
end
describe "#bootstrap_context" do
context "under Windows" do
let(:windows_test) { true }
it "creates a WindowsBootstrapContext" do
require "chef/knife/core/windows_bootstrap_context"
expect(knife.bootstrap_context.class).to eq Chef::Knife::Core::WindowsBootstrapContext
end
end
context "under linux" do
let(:linux_test) { true }
it "creates a BootstrapContext" do
require "chef/knife/core/bootstrap_context"
expect(knife.bootstrap_context.class).to eq Chef::Knife::Core::BootstrapContext
end
end
end
describe "#config_value" do
before do
knife.config[:test_key_a] = "a from cli"
knife.config[:test_key_b] = "b from cli"
Chef::Config[:knife][:test_key_a] = "a from Chef::Config"
Chef::Config[:knife][:test_key_c] = "c from Chef::Config"
Chef::Config[:knife][:alt_test_key_c] = "alt c from Chef::Config"
knife.merge_configs
Chef::Config[:treat_deprecation_warnings_as_errors] = false
end
it "returns the Chef::Config value from the cli when the CLI key is set" do
expect(knife.config_value(:test_key_a, :alt_test_key_c)).to eq "a from cli"
end
it "returns the Chef::Config value from the alternative key when the CLI key is not set" do
expect(knife.config_value(:test_key_d, :alt_test_key_c)).to eq "alt c from Chef::Config"
end
it "returns the default value when the key is not provided by CLI or Chef::Config" do
expect(knife.config_value(:missing_key, :missing_key, "found")).to eq "found"
end
end
describe "#upload_bootstrap" do
before do
allow(connection).to receive(:temp_dir).and_return(temp_dir)
allow(connection).to receive(:normalize_path) { |a| a }
end
let(:content) { "bootstrap script content" }
context "under Windows" do
let(:windows_test) { true }
let(:temp_dir) { "C:/Temp/bootstrap" }
it "creates a bat file in the temp dir provided by connection, using given content" do
expect(connection).to receive(:upload_file_content!).with(content, "C:/Temp/bootstrap/bootstrap.bat")
expect(knife.upload_bootstrap(content)).to eq "C:/Temp/bootstrap/bootstrap.bat"
end
end
context "under Linux" do
let(:linux_test) { true }
let(:temp_dir) { "/tmp/bootstrap" }
it "creates a 'sh file in the temp dir provided by connection, using given content" do
expect(connection).to receive(:upload_file_content!).with(content, "/tmp/bootstrap/bootstrap.sh")
expect(knife.upload_bootstrap(content)).to eq "/tmp/bootstrap/bootstrap.sh"
end
end
end
describe "#bootstrap_command" do
context "under Windows" do
let(:windows_test) { true }
it "prefixes the command to run under cmd.exe" do
expect(knife.bootstrap_command("autoexec.bat")).to eq "cmd.exe /C autoexec.bat"
end
end
context "under Linux" do
let(:linux_test) { true }
it "prefixes the command to run under sh" do
expect(knife.bootstrap_command("bootstrap")).to eq "sh bootstrap"
end
end
end
describe "#default_bootstrap_template" do
context "under Windows" do
let(:windows_test) { true }
it "is windows-chef-client-msi" do
expect(knife.default_bootstrap_template).to eq "windows-chef-client-msi"
end
end
context "under Linux" do
let(:linux_test) { true }
it "is chef-full" do
expect(knife.default_bootstrap_template).to eq "chef-full"
end
end
end
describe "#do_connect" do
let(:host_descriptor) { "example.com" }
let(:connection) { double("TrainConnector") }
let(:connector_mock) { double("TargetResolver", targets: [ connection ]) }
before do
allow(knife).to receive(:host_descriptor).and_return host_descriptor
end
it "creates a TrainConnector and connects it" do
expect(Chef::Knife::Bootstrap::TrainConnector).to receive(:new).and_return connection
expect(connection).to receive(:connect!)
knife.do_connect({})
end
context "when sshd configured with requiretty" do
let(:pty_err_msg) { "Sudo requires a TTY. Please see the README on how to configure sudo to allow for non-interactive usage." }
let(:expected_error) { Train::UserError.new(pty_err_msg, :sudo_no_tty) }
before do
allow(connection).to receive(:connect!).and_raise(expected_error)
end
it "retry with pty true request option" do
expect(Chef::Knife::Bootstrap::TrainConnector).to receive(:new).and_return(connection).exactly(2).times
expect(knife.ui).to receive(:warn).with("#{pty_err_msg} - trying with pty request")
expect { knife.do_connect({}) }.to raise_error(expected_error)
end
end
end
describe "validate_winrm_transport_opts!" do
before do
allow(knife).to receive(:connection_protocol).and_return connection_protocol
end
context "when using ssh" do
let(:connection_protocol) { "ssh" }
it "returns true" do
expect(knife.validate_winrm_transport_opts!).to eq true
end
end
context "when using winrm" do
let(:connection_protocol) { "winrm" }
context "with plaintext auth" do
before do
knife.config[:winrm_auth_method] = "plaintext"
end
context "with ssl" do
before do
knife.config[:winrm_ssl] = true
end
it "will not error because we won't send anything in plaintext regardless" do
expect(knife.validate_winrm_transport_opts!).to eq true
end
end
context "without ssl" do
before do
knife.config[:winrm_ssl] = false
end
context "and no validation key exists" do
before do
Chef::Config[:validation_key] = "validation_key.pem"
allow(File).to receive(:exist?).with(/.*validation_key.pem/).and_return false
end
it "will error because we will generate and send a client key over the wire in plaintext" do
expect { knife.validate_winrm_transport_opts! }.to raise_error(SystemExit)
end
end
context "and a validation key exists" do
before do
Chef::Config[:validation_key] = "validation_key.pem"
allow(File).to receive(:exist?).with(/.*validation_key.pem/).and_return true
end
# TODO - don't we still send validation key?
it "will not error because we don not send client key over the wire" do
expect(knife.validate_winrm_transport_opts!).to eq true
end
end
end
end
context "with other auth" do
before do
knife.config[:winrm_auth_method] = "kerberos"
end
context "and no validation key exists" do
before do
Chef::Config[:validation_key] = "validation_key.pem"
allow(File).to receive(:exist?).with(/.*validation_key.pem/).and_return false
end
it "will not error because we're not using plaintext auth" do
expect(knife.validate_winrm_transport_opts!).to eq true
end
end
context "and a validation key exists" do
before do
Chef::Config[:validation_key] = "validation_key.pem"
allow(File).to receive(:exist?).with(/.*validation_key.pem/).and_return true
end
it "will not error because a client key won't be sent over the wire in plaintext when a validation key is present" do
expect(knife.validate_winrm_transport_opts!).to eq true
end
end
end
end
end
describe "#winrm_warn_no_ssl_verification" do
before do
allow(knife).to receive(:connection_protocol).and_return connection_protocol
end
context "when using ssh" do
let(:connection_protocol) { "ssh" }
it "does not issue a warning" do
expect(knife.ui).to_not receive(:warn)
knife.winrm_warn_no_ssl_verification
end
end
context "when using winrm" do
let(:connection_protocol) { "winrm" }
context "winrm_no_verify_cert is set" do
before do
knife.config[:winrm_no_verify_cert] = true
end
context "and ca_trust_file is present" do
before do
knife.config[:ca_trust_file] = "file"
end
it "does not issue a warning" do
expect(knife.ui).to_not receive(:warn)
knife.winrm_warn_no_ssl_verification
end
end
context "and winrm_ssl_peer_fingerprint is present" do
before do
knife.config[:winrm_ssl_peer_fingerprint] = "ABCD"
end
it "does not issue a warning" do
expect(knife.ui).to_not receive(:warn)
knife.winrm_warn_no_ssl_verification
end
end
context "and neither ca_trust_file nor winrm_ssl_peer_fingerprint is present" do
it "issues a warning" do
expect(knife.ui).to receive(:warn)
knife.winrm_warn_no_ssl_verification
end
end
end
end
end
describe "#warn_on_short_session_timeout" do
let(:session_timeout) { 60 }
before do
allow(knife).to receive(:session_timeout).and_return(session_timeout)
end
context "timeout is not set at all" do
let(:session_timeout) { nil }
it "does not issue a warning" do
expect(knife.ui).to_not receive(:warn)
knife.warn_on_short_session_timeout
end
end
context "timeout is more than 15" do
let(:session_timeout) { 16 }
it "does not issue a warning" do
expect(knife.ui).to_not receive(:warn)
knife.warn_on_short_session_timeout
end
end
context "timeout is 15 or less" do
let(:session_timeout) { 15 }
it "issues a warning" do
expect(knife.ui).to receive(:warn)
knife.warn_on_short_session_timeout
end
end
end
end
|
set :stages, [ "preprod", "prod" ]
set :default_stage, "preprod"
set :stage_dir, "app/config"
require 'capistrano/ext/multistage'
set :application, "ceofesa"
set :domain, "www.ceintranet.org"
ssh_options[:port] = "2224"
ssh_options[:forward_agent] = "2224"
set :keep_releases, 5
set :scm, :git
set :scm_verbose, true
set :repository, "git@github.com:ChantierEcole/ceofesa.git"
set :deploy_via, :remote_cache
set :deploy_to, "/var/www/ceofesa/preprod"
set :use_sudo, false
set :interactive_mode, false
set :user, "ofesa"
set :writable_dirs, ["app/cache", "app/logs"]
set :shared_files, [ app_path + "/config/parameters.yml", web_path + "/.htaccess" ]
set :shared_children, [ log_path, web_path + "/uploads" ]
set :writable_dirs, [ cache_path ]
set :webserver_user, "www-data"
set :permission_method, :acl
set :use_set_permissions, true
set :use_composer, true
set :dump_assetic_assets, true
set :normalize_asset_timestamps, false
role :web, domain
role :app, domain, :primary => true
role :db, domain, :primary => true
before "symfony:assetic:dump", "symfony:assets:update_version"
after "deploy", "deploy:cleanup"
after "deploy", "symfony:clear_apc"
after "deploy:rollback:cleanup", "symfony:clear_apc"
# Be more verbose by uncommenting the following line
logger.level = Logger::MAX_LEVEL
Add migration to deploy
set :stages, [ "preprod", "prod" ]
set :default_stage, "preprod"
set :stage_dir, "app/config"
require 'capistrano/ext/multistage'
set :application, "ceofesa"
set :domain, "www.ceintranet.org"
ssh_options[:port] = "2224"
ssh_options[:forward_agent] = "2224"
set :keep_releases, 5
set :scm, :git
set :scm_verbose, true
set :repository, "git@github.com:ChantierEcole/ceofesa.git"
set :deploy_via, :remote_cache
set :deploy_to, "/var/www/ceofesa/preprod"
set :use_sudo, false
set :interactive_mode, false
set :user, "ofesa"
set :writable_dirs, ["app/cache", "app/logs"]
set :shared_files, [ app_path + "/config/parameters.yml", web_path + "/.htaccess" ]
set :shared_children, [ log_path, web_path + "/uploads" ]
set :writable_dirs, [ cache_path ]
set :webserver_user, "www-data"
set :permission_method, :acl
set :use_set_permissions, true
set :use_composer, true
set :dump_assetic_assets, true
set :normalize_asset_timestamps, false
role :web, domain
role :app, domain, :primary => true
role :db, domain, :primary => true
before "symfony:assetic:dump", "symfony:assets:update_version"
after "deploy", "deploy:cleanup"
after "deploy", "symfony:clear_apc"
after "deploy", "symfony:doctrine:migrations:migrate"
after "deploy:rollback:cleanup", "symfony:clear_apc"
# Be more verbose by uncommenting the following line
logger.level = Logger::MAX_LEVEL |
#
# Cookbook Name:: universe_ubuntu
# Spec:: default
#
# Copyright (c) 2016 The Authors, All Rights Reserved.
require 'spec_helper'
describe 'universe_ubuntu::default' do
context 'When all attributes are default, on an Ubuntu' do
before do
stub_command('[ -x /home/vagrant/anaconda3/bin/conda ]').and_return(false)
stub_command('[ -e /home/vagrant/anaconda3/envs/universe ]').and_return(false)
stub_command('[ -x /home/vagrant/anaconda3/envs/universe/bin/tensorboard ]').and_return(false)
end
let(:chef_run) do
ChefSpec::ServerRunner.new(platform: 'ubuntu', version: '14.04') do |node|
node.override['universe']['user']['name'] = 'vagrant'
node.override['universe']['user']['home'] = '/home/vagrant'
node.override['universe']['conda_env'][:CONDA_PREFIX] = '/home/vagrant/anaconda3/envs/universe'
node.override['universe']['conda_env'][:PATH] = "/home/vagrant/anaconda3/envs/universe/bin:#{ENV['PATH']}"
node.override['universe']['gpu'] = true
node.automatic['os_version'] = 'specific_kernel_version'
end.converge(described_recipe)
end
it 'converges successfully' do
expect { chef_run }.to_not raise_error
end
it 'Include apt recipe' do
expect(chef_run).to include_recipe('apt::default')
end
it 'add new golang repository' do
expect(chef_run).to add_apt_repository('newer golang apt repo')
end
it 'add docker repository' do
expect(chef_run).to add_apt_repository('docker')
end
pkgs = %w(golang
libjpeg-turbo8-dev
make
tmux
htop
chromium-browser
git
cmake
zlib1g-dev
libjpeg-dev
xvfb
libav-tools
xorg-dev
python-opengl
libboost-all-dev
libsdl2-dev
swig)
pkgs.each do |name|
it "install #{name} package" do
expect(chef_run).to install_package name
end
end
it 'creates remote_file anaconda if missing' do
user = 'vagrant'
expect(chef_run).to create_remote_file_if_missing(
"#{Chef::Config[:file_cache_path]}/Anaconda3-4.2.0-Linux-x86_64.sh")
.with(
owner: user,
group: user,
mode: '0755',
checksum: '73b51715a12b6382dd4df3dd1905b531bd6792d4aa7273b2377a0436d45f0e78'
)
end
it 'installs anaconda' do
expect(chef_run).to run_execute("bash #{Chef::Config[:file_cache_path]}/Anaconda3-4.2.0-Linux-x86_64.sh -b")
.with(user: 'vagrant')
end
it 'creates conda env file' do
expect(chef_run).to create_template('/home/vagrant/environment.yml')
.with(owner: 'vagrant',
group: 'vagrant')
end
it 'creates conda environment' do
expect(chef_run).to run_execute('/home/vagrant/anaconda3/bin/conda env create -f environment.yml')
.with(user: 'vagrant', cwd: '/home/vagrant')
end
it 'Installs Tensorflow' do
conda_prefix = '/home/vagrant/anaconda3/envs/universe'
expect(chef_run).to run_execute("#{conda_prefix}/bin/pip install --ignore-installed --upgrade https://storage.googleapis.com/tensorflow/linux/gpu/tensorflow-0.11.0-cp35-cp35m-linux_x86_64.whl")
.with(
user: 'vagrant',
environment: {
'CONDA_DEFAULT_ENV' => 'universe',
'CONDA_PREFIX' => conda_prefix,
'PATH' => "#{conda_prefix}/bin:#{ENV['PATH']}"
})
end
docker_pkgs = ['linux-image-extra-specific_kernel_version',
'linux-image-extra-virtual',
'docker-engine']
docker_pkgs.each do |name|
it "Installs #{name} package" do
expect(chef_run).to install_package(name)
end
end
it 'Add current user to docker group' do
expect(chef_run).to modify_group('docker')
end
it 'Clone gym repo' do
expect(chef_run).to sync_git('/home/vagrant/gym')
end
it 'Clone universe repo' do
expect(chef_run).to sync_git('/home/vagrant/universe')
end
it 'Clone starter agent repo' do
expect(chef_run).to sync_git('/home/vagrant/universe-starter-agent')
end
it 'Install Gym modules' do
conda_prefix = '/home/vagrant/anaconda3/envs/universe'
expect(chef_run).to run_execute("#{conda_prefix}/bin/pip install -e '.[all]'")
.with(
user: 'vagrant',
cwd: '/home/vagrant/gym',
environment: {
'CONDA_DEFAULT_ENV' => 'universe',
'CONDA_PREFIX' => conda_prefix,
'PATH' => "#{conda_prefix}/bin:#{ENV['PATH']}"
})
end
it 'Install Universe modules' do
conda_prefix = '/home/vagrant/anaconda3/envs/universe'
expect(chef_run).to run_execute("#{conda_prefix}/bin/pip install -e .")
.with(
user: 'vagrant',
cwd: '/home/vagrant/universe',
environment: {
'CONDA_DEFAULT_ENV' => 'universe',
'CONDA_PREFIX' => conda_prefix,
'PATH' => "#{conda_prefix}/bin:#{ENV['PATH']}"
})
end
end
end
Unit test on notifications after add user to docker group
#
# Cookbook Name:: universe_ubuntu
# Spec:: default
#
# Copyright (c) 2016 The Authors, All Rights Reserved.
require 'spec_helper'
describe 'universe_ubuntu::default' do
context 'When all attributes are default, on an Ubuntu' do
before do
stub_command('[ -x /home/vagrant/anaconda3/bin/conda ]').and_return(false)
stub_command('[ -e /home/vagrant/anaconda3/envs/universe ]').and_return(false)
stub_command('[ -x /home/vagrant/anaconda3/envs/universe/bin/tensorboard ]').and_return(false)
end
let(:chef_run) do
ChefSpec::ServerRunner.new(platform: 'ubuntu', version: '14.04') do |node|
node.override['universe']['user']['name'] = 'vagrant'
node.override['universe']['user']['home'] = '/home/vagrant'
node.override['universe']['conda_env'][:CONDA_PREFIX] = '/home/vagrant/anaconda3/envs/universe'
node.override['universe']['conda_env'][:PATH] = "/home/vagrant/anaconda3/envs/universe/bin:#{ENV['PATH']}"
node.override['universe']['gpu'] = true
node.automatic['os_version'] = 'specific_kernel_version'
end.converge(described_recipe)
end
let(:add_user) { chef_run.group('docker') }
it 'converges successfully' do
expect { chef_run }.to_not raise_error
end
it 'Include apt recipe' do
expect(chef_run).to include_recipe('apt::default')
end
it 'add new golang repository' do
expect(chef_run).to add_apt_repository('newer golang apt repo')
end
it 'add docker repository' do
expect(chef_run).to add_apt_repository('docker')
end
pkgs = %w(golang
libjpeg-turbo8-dev
make
tmux
htop
chromium-browser
git
cmake
zlib1g-dev
libjpeg-dev
xvfb
libav-tools
xorg-dev
python-opengl
libboost-all-dev
libsdl2-dev
swig)
pkgs.each do |name|
it "install #{name} package" do
expect(chef_run).to install_package name
end
end
it 'creates remote_file anaconda if missing' do
user = 'vagrant'
expect(chef_run).to create_remote_file_if_missing(
"#{Chef::Config[:file_cache_path]}/Anaconda3-4.2.0-Linux-x86_64.sh")
.with(
owner: user,
group: user,
mode: '0755',
checksum: '73b51715a12b6382dd4df3dd1905b531bd6792d4aa7273b2377a0436d45f0e78'
)
end
it 'installs anaconda' do
expect(chef_run).to run_execute("bash #{Chef::Config[:file_cache_path]}/Anaconda3-4.2.0-Linux-x86_64.sh -b")
.with(user: 'vagrant')
end
it 'creates conda env file' do
expect(chef_run).to create_template('/home/vagrant/environment.yml')
.with(owner: 'vagrant',
group: 'vagrant')
end
it 'creates conda environment' do
expect(chef_run).to run_execute('/home/vagrant/anaconda3/bin/conda env create -f environment.yml')
.with(user: 'vagrant', cwd: '/home/vagrant')
end
it 'Installs Tensorflow' do
conda_prefix = '/home/vagrant/anaconda3/envs/universe'
expect(chef_run).to run_execute("#{conda_prefix}/bin/pip install --ignore-installed --upgrade https://storage.googleapis.com/tensorflow/linux/gpu/tensorflow-0.11.0-cp35-cp35m-linux_x86_64.whl")
.with(
user: 'vagrant',
environment: {
'CONDA_DEFAULT_ENV' => 'universe',
'CONDA_PREFIX' => conda_prefix,
'PATH' => "#{conda_prefix}/bin:#{ENV['PATH']}"
})
end
docker_pkgs = ['linux-image-extra-specific_kernel_version',
'linux-image-extra-virtual',
'docker-engine']
docker_pkgs.each do |name|
it "Installs #{name} package" do
expect(chef_run).to install_package(name)
end
end
it 'Add current user to docker group' do
expect(chef_run).to modify_group('docker')
end
it 'Notifies service docker restart' do
expect(add_user).to notify('service[docker]').to(:restart).immediately
end
it 'Notifies service lightdm' do
expect(add_user).to notify('service[lightdm]').to(:restart).immediately
end
it 'Clone gym repo' do
expect(chef_run).to sync_git('/home/vagrant/gym')
end
it 'Clone universe repo' do
expect(chef_run).to sync_git('/home/vagrant/universe')
end
it 'Clone starter agent repo' do
expect(chef_run).to sync_git('/home/vagrant/universe-starter-agent')
end
it 'Install Gym modules' do
conda_prefix = '/home/vagrant/anaconda3/envs/universe'
expect(chef_run).to run_execute("#{conda_prefix}/bin/pip install -e '.[all]'")
.with(
user: 'vagrant',
cwd: '/home/vagrant/gym',
environment: {
'CONDA_DEFAULT_ENV' => 'universe',
'CONDA_PREFIX' => conda_prefix,
'PATH' => "#{conda_prefix}/bin:#{ENV['PATH']}"
})
end
it 'Install Universe modules' do
conda_prefix = '/home/vagrant/anaconda3/envs/universe'
expect(chef_run).to run_execute("#{conda_prefix}/bin/pip install -e .")
.with(
user: 'vagrant',
cwd: '/home/vagrant/universe',
environment: {
'CONDA_DEFAULT_ENV' => 'universe',
'CONDA_PREFIX' => conda_prefix,
'PATH' => "#{conda_prefix}/bin:#{ENV['PATH']}"
})
end
end
end
|
require 'rails_helper'
describe "hq/users/index.html.erb", type: :view do
context 'class action-items' do
it 'should have a New User link' do
assign(:users, User.none.paginate(:page => 1))
render
expect(rendered).to have_link('New User', new_hq_user_path)
end
end
describe 'should display users correctly with pagination' do
context 'no users' do
before(:each) do
@users = User.none.paginate(:page => 1)
render
end
it 'hides the table' do
expect(rendered).to match /No Users Found/
expect(rendered).to have_selector('table#users-index', count: 0)
end
it 'hides the pagination bar' do
expect(rendered).to_not have_selector('.pagination', text: /Previous(.*)1(.*)Next/)
end
end
context '1 user' do
before(:each) do
@users = [ FactoryGirl.build_stubbed(:user,
user_name: 'Frederick Bloggs',
email: 'fred.bloggs@example.com') ].paginate
render
end
it 'displays a single user' do
expect(rendered).to have_selector('table tbody tr', count: 1)
expect(rendered).to have_link('Frederick Bloggs', href: hq_user_path(@users.first))
expect(rendered).to match 'fred.bloggs@example.com'
end
it 'hides the pagination bar' do
expect(rendered).not_to have_selector('.pagination', text: /Previous(.*)1(.*)Next/)
end
end
context 'many users on multiple pages' do
before :each do
@users= [ FactoryGirl.build_stubbed(:user),
FactoryGirl.build_stubbed(:user, user_name: 'Joseph Soap',
email: 'joe.soap@example.com'),
FactoryGirl.build_stubbed(:user, user_name: 'Tom Jones',
email: 'tom.jones@example.com') ].paginate(:page => 2, :per_page => 2)
end
it 'displays multiple users' do
render
expect(rendered).to have_selector('table tbody tr', count: 1)
expect(rendered).to have_link('Tom Jones', href: hq_user_path(@users.first))
expect(rendered).to match 'tom.jones@example.com'
expect(rendered).to_not have_link('Joseph Soap')
expect(rendered).to_not match 'joe.soap@example.com'
end
it 'shows the pagination bar' do
render
expect(rendered).to have_selector('.pagination', text: /Previous(.*)1(.*)Next/)
end
it 'paginates' do
expect(view).to receive(:will_paginate)
render
end
end
end
end
index spec only tests view related logic
require 'rails_helper'
describe "hq/users/index.html.erb", type: :view do
context 'User action-items' do
it 'should have a New User link' do
assign(:users, User.none.paginate(:page => 1))
render
expect(rendered).to have_link('New User', new_hq_user_path)
end
it 'should have a pagination bar' do
assign(:users, build_stubbed_list(:user,1))
expect(view).to receive(:will_paginate)
render
end
end
describe 'should display users correctly' do
context 'for no users' do
before(:each) do
@users = User.none.paginate(:page => 1)
render
end
it 'hides the table' do
expect(rendered).to match /No Users Found/
expect(rendered).to have_selector('table#users-index', count: 0)
end
end
context 'for multiple users' do
before :each do
@users= build_stubbed_list(:user, 2).paginate(:page => 1)
end
it 'shows the table' do
render
expect(rendered).to_not match /No Users Found/
expect(rendered).to have_selector('table tbody tr', count: 2)
end
it 'shows user names as link' do
render
expect(rendered).to have_link(@users.first.user_name, href: hq_user_path(@users.first))
end
end
end
end
|
require File.expand_path("../lib/validates_email/version", __FILE__)
Gem::Specification.new do |s|
s.name = "spectator-validates_email"
s.version = ValidatesEmail::VERSION
s.platform = Gem::Platform::RUBY
s.authors = ["Yury Velikanau"]
s.date = ["2010-10-24"]
s.email = ["yury.velikanau@gmail.com"]
s.homepage = "http://github.com/spectator/validates_email"
s.summary = "Rails 3 plugin to validate email addresses"
s.description = "Rails 3 plugin to validate email addresses against RFC 2822 and RFC 3696"
s.required_rubygems_version = ">= 1.3.6"
s.add_dependency "actionpack", "~> 3.0.0"
s.add_dependency "activemodel", "~> 3.0.0"
s.add_development_dependency "bundler", "~> 1.0.0"
s.add_development_dependency "rspec", "~> 2.0.0"
s.add_development_dependency "sqlite3-ruby", "~> 1.3.1"
s.add_development_dependency "rake", "~> 0.8.7"
s.files = Dir["{lib}/**/*.rb", "MIT-LICENSE", "*.rdoc"]
s.require_path = 'lib'
end
Changed rails dependency to allow working with Rails 3.1
require File.expand_path("../lib/validates_email/version", __FILE__)
Gem::Specification.new do |s|
s.name = "spectator-validates_email"
s.version = ValidatesEmail::VERSION
s.platform = Gem::Platform::RUBY
s.authors = ["Yury Velikanau"]
s.date = ["2010-10-24"]
s.email = ["yury.velikanau@gmail.com"]
s.homepage = "http://github.com/spectator/validates_email"
s.summary = "Rails 3 plugin to validate email addresses"
s.description = "Rails 3 plugin to validate email addresses against RFC 2822 and RFC 3696"
s.required_rubygems_version = ">= 1.3.6"
s.add_dependency "actionpack", ">= 3.0.0"
s.add_dependency "activemodel", ">= 3.0.0"
s.add_development_dependency "bundler", "~> 1.0.0"
s.add_development_dependency "rspec", "~> 2.0.0"
s.add_development_dependency "sqlite3-ruby", "~> 1.3.1"
s.add_development_dependency "rake", "~> 0.8.7"
s.files = Dir["{lib}/**/*.rb", "MIT-LICENSE", "*.rdoc"]
s.require_path = 'lib'
end
|
require 'rollbar/rails'
Rollbar.configure do |config|
config.access_token = Cartodb.config[:rollbar_api_key]
config.enabled = (Rails.env.production? || Rails.env.staging?) && config.access_token.present?
config.net_retries = 1 # This is actually 6 requests (18s), as Rollbar retries two times (failsafes) and CartoDB once
# Avoid a loop between our logger (who sends errors through rollbar)
# and rollbar itself when it cannot send an error to rollbar service
config.defaul_logger = Logger.new(STDERR)
# Add exception class names to the exception_level_filters hash to
# change the level that exception is reported at. Note that if an exception
# has already been reported and logged the level will need to be changed
# via the rollbar interface.
# Valid levels: 'critical', 'error', 'warning', 'info', 'debug', 'ignore'
# 'ignore' will cause the exception to not be reported at all.
info_errors = ['error creating usertable']
# TODO: This approach is currently not working (I suspect the usage of rescue_from StandardError)
config.exception_level_filters.merge!(
'ActionController::RoutingError' => 'ignore',
'Sequel::DatabaseConnectionError' => 'warning',
'ActiveRecord::RecordInvalid' => lambda do
|error| info_errors.any? { |message| error.to_s.downcase.include?(message) } ? 'info' : 'error'
end
)
config.before_process << proc do |options|
raise Rollbar::Ignore if options.is_a?(Hash) && (
options[:message]&.include?('ActionController::RoutingError') ||
options[:message] == 'coverage failed to store'
)
end
end
# TODO: remove this wrapper for legacy logger
module CartoDB
extend ::LoggerHelper
def self.notify_exception(e, extra = {})
log_error(exception: e, **extra)
end
def self.notify_error(message, additional_data = {})
log_error(message: message, **additional_data)
end
# Add `:request` and `:user` to additional_data if you want request content
def self.report_exception(e, message = nil, additional_data = {})
log_error(exception: e, message: message, **additional_data)
end
def self.notify_debug(message, additional_data = {})
log_debug(message: message, **additional_data)
end
def self.notify_warning_exception(exception)
log_warning(exception: exception)
end
end
Use logger instead of default_logger
default_logger cannot be assigned:
undefined method `defaul_logger=' for #<Rollbar::ConfiguredOptions:0x000000000379f5b0> (NoMethodError)
require 'rollbar/rails'
Rollbar.configure do |config|
config.access_token = Cartodb.config[:rollbar_api_key]
config.enabled = (Rails.env.production? || Rails.env.staging?) && config.access_token.present?
config.net_retries = 1 # This is actually 6 requests (18s), as Rollbar retries two times (failsafes) and CartoDB once
# Avoid a loop between our logger (who sends errors through rollbar)
# and rollbar itself when it cannot send an error to rollbar service
config.logger = Logger.new(STDERR)
# Add exception class names to the exception_level_filters hash to
# change the level that exception is reported at. Note that if an exception
# has already been reported and logged the level will need to be changed
# via the rollbar interface.
# Valid levels: 'critical', 'error', 'warning', 'info', 'debug', 'ignore'
# 'ignore' will cause the exception to not be reported at all.
info_errors = ['error creating usertable']
# TODO: This approach is currently not working (I suspect the usage of rescue_from StandardError)
config.exception_level_filters.merge!(
'ActionController::RoutingError' => 'ignore',
'Sequel::DatabaseConnectionError' => 'warning',
'ActiveRecord::RecordInvalid' => lambda do
|error| info_errors.any? { |message| error.to_s.downcase.include?(message) } ? 'info' : 'error'
end
)
config.before_process << proc do |options|
raise Rollbar::Ignore if options.is_a?(Hash) && (
options[:message]&.include?('ActionController::RoutingError') ||
options[:message] == 'coverage failed to store'
)
end
end
# TODO: remove this wrapper for legacy logger
module CartoDB
extend ::LoggerHelper
def self.notify_exception(e, extra = {})
log_error(exception: e, **extra)
end
def self.notify_error(message, additional_data = {})
log_error(message: message, **additional_data)
end
# Add `:request` and `:user` to additional_data if you want request content
def self.report_exception(e, message = nil, additional_data = {})
log_error(exception: e, message: message, **additional_data)
end
def self.notify_debug(message, additional_data = {})
log_debug(message: message, **additional_data)
end
def self.notify_warning_exception(exception)
log_warning(exception: exception)
end
end
|
class PrimoId
PRIMO_REFERRER_ID_BASE = 'info:sid/primo.exlibrisgroup.com:primo-'
IDS = {
'journal' => 'nyu_aleph002736245',
'Vogue' => 'nyu_aleph002893728',
'The New Yorker' => 'nyu_aleph002904404',
'book' => 'nyu_aleph001102376',
'checked out' => 'nyu_aleph003562911',
'requested' => 'nyu_aleph000864162',
'offsite' => 'nyu_aleph002928667',
'available' => 'nyu_aleph001102376',
'processing' => 'nyu_aleph003933870',
'on_order' => 'NEEDED',
'ill' => 'nyu_aleph000762323'
}
attr_reader :id, :state
def initialize(state)
@state = state
@id = IDS[state]
end
end
Add Primo ID for the title "Not by Reason Alone"
class PrimoId
PRIMO_REFERRER_ID_BASE = 'info:sid/primo.exlibrisgroup.com:primo-'
IDS = {
'journal' => 'nyu_aleph002736245',
'Vogue' => 'nyu_aleph002893728',
'The New Yorker' => 'nyu_aleph002904404',
'Not by Reason Alone' => 'nyu_aleph002104209',
'book' => 'nyu_aleph001102376',
'checked out' => 'nyu_aleph003562911',
'requested' => 'nyu_aleph000864162',
'offsite' => 'nyu_aleph002928667',
'available' => 'nyu_aleph001102376',
'processing' => 'nyu_aleph003933870',
'on_order' => 'NEEDED',
'ill' => 'nyu_aleph000762323'
}
attr_reader :id, :state
def initialize(state)
@state = state
@id = IDS[state]
end
end
|
require 'rspec/mocks'
module TestEnv
extend self
# When developing the seed repository, comment out the branch you will modify.
BRANCH_SHA = {
'not-merged-branch' => 'b83d6e3',
'branch-merged' => '498214d',
'empty-branch' => '7efb185',
'ends-with.json' => '98b0d8b',
'flatten-dir' => 'e56497b',
'feature' => '0b4bc9a',
'feature_conflict' => 'bb5206f',
'fix' => '48f0be4',
'improve/awesome' => '5937ac0',
'markdown' => '0ed8c6c',
'lfs' => 'be93687',
'master' => 'b83d6e3',
'merge-test' => '5937ac0',
"'test'" => 'e56497b',
'orphaned-branch' => '45127a9',
'binary-encoding' => '7b1cf43',
'gitattributes' => '5a62481',
'expand-collapse-diffs' => '4842455',
'expand-collapse-files' => '025db92',
'expand-collapse-lines' => '238e82d',
'video' => '8879059',
'crlf-diff' => '5938907',
'conflict-start' => '75284c7',
'conflict-resolvable' => '1450cd6',
'conflict-binary-file' => '259a6fb',
'conflict-contains-conflict-markers' => '5e0964c',
'conflict-missing-side' => 'eb227b3',
'conflict-non-utf8' => 'd0a293c',
'conflict-too-large' => '39fa04f',
}
# gitlab-test-fork is a fork of gitlab-fork, but we don't necessarily
# need to keep all the branches in sync.
# We currently only need a subset of the branches
FORKED_BRANCH_SHA = {
'add-submodule-version-bump' => '3f547c0',
'master' => '5937ac0',
'remove-submodule' => '2a33e0c',
'conflict-resolvable-fork' => '404fa3f'
}
# Test environment
#
# See gitlab.yml.example test section for paths
#
def init(opts = {})
# Disable mailer for spinach tests
disable_mailer if opts[:mailer] == false
clean_test_path
FileUtils.mkdir_p(repos_path)
FileUtils.mkdir_p(backup_path)
# Setup GitLab shell for test instance
setup_gitlab_shell
# Create repository for FactoryGirl.create(:project)
setup_factory_repo
# Create repository for FactoryGirl.create(:forked_project_with_submodules)
setup_forked_repo
end
def disable_mailer
allow_any_instance_of(NotificationService).to receive(:mailer).
and_return(double.as_null_object)
end
def enable_mailer
allow_any_instance_of(NotificationService).to receive(:mailer).
and_call_original
end
def disable_pre_receive
allow_any_instance_of(Gitlab::Git::Hook).to receive(:trigger).and_return([true, nil])
end
# Clean /tmp/tests
#
# Keeps gitlab-shell and gitlab-test
def clean_test_path
tmp_test_path = Rails.root.join('tmp', 'tests', '**')
Dir[tmp_test_path].each do |entry|
unless File.basename(entry) =~ /\Agitlab-(shell|test|test-fork)\z/
FileUtils.rm_rf(entry)
end
end
end
def setup_gitlab_shell
unless File.directory?(Gitlab.config.gitlab_shell.path)
`rake gitlab:shell:install`
end
end
def setup_factory_repo
setup_repo(factory_repo_path, factory_repo_path_bare, factory_repo_name,
BRANCH_SHA)
end
# This repo has a submodule commit that is not present in the main test
# repository.
def setup_forked_repo
setup_repo(forked_repo_path, forked_repo_path_bare, forked_repo_name,
FORKED_BRANCH_SHA)
end
def setup_repo(repo_path, repo_path_bare, repo_name, branch_sha)
clone_url = "https://gitlab.com/gitlab-org/#{repo_name}.git"
unless File.directory?(repo_path)
system(*%W(#{Gitlab.config.git.bin_path} clone -q #{clone_url} #{repo_path}))
end
set_repo_refs(repo_path, branch_sha)
# We must copy bare repositories because we will push to them.
system(git_env, *%W(#{Gitlab.config.git.bin_path} clone -q --bare #{repo_path} #{repo_path_bare}))
end
def copy_repo(project)
base_repo_path = File.expand_path(factory_repo_path_bare)
target_repo_path = File.expand_path(project.repository_storage_path + "/#{project.namespace.path}/#{project.path}.git")
FileUtils.mkdir_p(target_repo_path)
FileUtils.cp_r("#{base_repo_path}/.", target_repo_path)
FileUtils.chmod_R 0755, target_repo_path
set_repo_refs(target_repo_path, BRANCH_SHA)
end
def repos_path
Gitlab.config.repositories.storages.default
end
def backup_path
Gitlab.config.backup.path
end
def copy_forked_repo_with_submodules(project)
base_repo_path = File.expand_path(forked_repo_path_bare)
target_repo_path = File.expand_path(project.repository_storage_path + "/#{project.namespace.path}/#{project.path}.git")
FileUtils.mkdir_p(target_repo_path)
FileUtils.cp_r("#{base_repo_path}/.", target_repo_path)
FileUtils.chmod_R 0755, target_repo_path
set_repo_refs(target_repo_path, FORKED_BRANCH_SHA)
end
# When no cached assets exist, manually hit the root path to create them
#
# Otherwise they'd be created by the first test, often timing out and
# causing a transient test failure
def warm_asset_cache
return if warm_asset_cache?
return unless defined?(Capybara)
Capybara.current_session.driver.visit '/'
end
def warm_asset_cache?
cache = Rails.root.join(*%w(tmp cache assets test))
Dir.exist?(cache) && Dir.entries(cache).length > 2
end
private
def factory_repo_path
@factory_repo_path ||= Rails.root.join('tmp', 'tests', factory_repo_name)
end
def factory_repo_path_bare
"#{factory_repo_path}_bare"
end
def factory_repo_name
'gitlab-test'
end
def forked_repo_path
@forked_repo_path ||= Rails.root.join('tmp', 'tests', forked_repo_name)
end
def forked_repo_path_bare
"#{forked_repo_path}_bare"
end
def forked_repo_name
'gitlab-test-fork'
end
# Prevent developer git configurations from being persisted to test
# repositories
def git_env
{ 'GIT_TEMPLATE_DIR' => '' }
end
def set_repo_refs(repo_path, branch_sha)
Dir.chdir(repo_path) do
branch_sha.each do |branch, sha|
# Try to reset without fetching to avoid using the network.
reset = %W(#{Gitlab.config.git.bin_path} update-ref refs/heads/#{branch} #{sha})
unless system(*reset)
if system(*%W(#{Gitlab.config.git.bin_path} fetch origin))
unless system(*reset)
raise 'The fetched test seed '\
'does not contain the required revision.'
end
else
raise 'Could not fetch test seed repository.'
end
end
end
end
end
end
Fix Test Env (proper error handling when gitlab-shell is not clonned)
require 'rspec/mocks'
module TestEnv
extend self
# When developing the seed repository, comment out the branch you will modify.
BRANCH_SHA = {
'not-merged-branch' => 'b83d6e3',
'branch-merged' => '498214d',
'empty-branch' => '7efb185',
'ends-with.json' => '98b0d8b',
'flatten-dir' => 'e56497b',
'feature' => '0b4bc9a',
'feature_conflict' => 'bb5206f',
'fix' => '48f0be4',
'improve/awesome' => '5937ac0',
'markdown' => '0ed8c6c',
'lfs' => 'be93687',
'master' => 'b83d6e3',
'merge-test' => '5937ac0',
"'test'" => 'e56497b',
'orphaned-branch' => '45127a9',
'binary-encoding' => '7b1cf43',
'gitattributes' => '5a62481',
'expand-collapse-diffs' => '4842455',
'expand-collapse-files' => '025db92',
'expand-collapse-lines' => '238e82d',
'video' => '8879059',
'crlf-diff' => '5938907',
'conflict-start' => '75284c7',
'conflict-resolvable' => '1450cd6',
'conflict-binary-file' => '259a6fb',
'conflict-contains-conflict-markers' => '5e0964c',
'conflict-missing-side' => 'eb227b3',
'conflict-non-utf8' => 'd0a293c',
'conflict-too-large' => '39fa04f',
}
# gitlab-test-fork is a fork of gitlab-fork, but we don't necessarily
# need to keep all the branches in sync.
# We currently only need a subset of the branches
FORKED_BRANCH_SHA = {
'add-submodule-version-bump' => '3f547c0',
'master' => '5937ac0',
'remove-submodule' => '2a33e0c',
'conflict-resolvable-fork' => '404fa3f'
}
# Test environment
#
# See gitlab.yml.example test section for paths
#
def init(opts = {})
# Disable mailer for spinach tests
disable_mailer if opts[:mailer] == false
clean_test_path
FileUtils.mkdir_p(repos_path)
FileUtils.mkdir_p(backup_path)
# Setup GitLab shell for test instance
setup_gitlab_shell
# Create repository for FactoryGirl.create(:project)
setup_factory_repo
# Create repository for FactoryGirl.create(:forked_project_with_submodules)
setup_forked_repo
end
def disable_mailer
allow_any_instance_of(NotificationService).to receive(:mailer).
and_return(double.as_null_object)
end
def enable_mailer
allow_any_instance_of(NotificationService).to receive(:mailer).
and_call_original
end
def disable_pre_receive
allow_any_instance_of(Gitlab::Git::Hook).to receive(:trigger).and_return([true, nil])
end
# Clean /tmp/tests
#
# Keeps gitlab-shell and gitlab-test
def clean_test_path
tmp_test_path = Rails.root.join('tmp', 'tests', '**')
Dir[tmp_test_path].each do |entry|
unless File.basename(entry) =~ /\Agitlab-(shell|test|test-fork)\z/
FileUtils.rm_rf(entry)
end
end
end
def setup_gitlab_shell
unless File.directory?(Gitlab.config.gitlab_shell.path)
unless system('rake', 'gitlab:shell:install')
raise 'Can`t clone gitlab-shell'
end
end
end
def setup_factory_repo
setup_repo(factory_repo_path, factory_repo_path_bare, factory_repo_name,
BRANCH_SHA)
end
# This repo has a submodule commit that is not present in the main test
# repository.
def setup_forked_repo
setup_repo(forked_repo_path, forked_repo_path_bare, forked_repo_name,
FORKED_BRANCH_SHA)
end
def setup_repo(repo_path, repo_path_bare, repo_name, branch_sha)
clone_url = "https://gitlab.com/gitlab-org/#{repo_name}.git"
unless File.directory?(repo_path)
system(*%W(#{Gitlab.config.git.bin_path} clone -q #{clone_url} #{repo_path}))
end
set_repo_refs(repo_path, branch_sha)
# We must copy bare repositories because we will push to them.
system(git_env, *%W(#{Gitlab.config.git.bin_path} clone -q --bare #{repo_path} #{repo_path_bare}))
end
def copy_repo(project)
base_repo_path = File.expand_path(factory_repo_path_bare)
target_repo_path = File.expand_path(project.repository_storage_path + "/#{project.namespace.path}/#{project.path}.git")
FileUtils.mkdir_p(target_repo_path)
FileUtils.cp_r("#{base_repo_path}/.", target_repo_path)
FileUtils.chmod_R 0755, target_repo_path
set_repo_refs(target_repo_path, BRANCH_SHA)
end
def repos_path
Gitlab.config.repositories.storages.default
end
def backup_path
Gitlab.config.backup.path
end
def copy_forked_repo_with_submodules(project)
base_repo_path = File.expand_path(forked_repo_path_bare)
target_repo_path = File.expand_path(project.repository_storage_path + "/#{project.namespace.path}/#{project.path}.git")
FileUtils.mkdir_p(target_repo_path)
FileUtils.cp_r("#{base_repo_path}/.", target_repo_path)
FileUtils.chmod_R 0755, target_repo_path
set_repo_refs(target_repo_path, FORKED_BRANCH_SHA)
end
# When no cached assets exist, manually hit the root path to create them
#
# Otherwise they'd be created by the first test, often timing out and
# causing a transient test failure
def warm_asset_cache
return if warm_asset_cache?
return unless defined?(Capybara)
Capybara.current_session.driver.visit '/'
end
def warm_asset_cache?
cache = Rails.root.join(*%w(tmp cache assets test))
Dir.exist?(cache) && Dir.entries(cache).length > 2
end
private
def factory_repo_path
@factory_repo_path ||= Rails.root.join('tmp', 'tests', factory_repo_name)
end
def factory_repo_path_bare
"#{factory_repo_path}_bare"
end
def factory_repo_name
'gitlab-test'
end
def forked_repo_path
@forked_repo_path ||= Rails.root.join('tmp', 'tests', forked_repo_name)
end
def forked_repo_path_bare
"#{forked_repo_path}_bare"
end
def forked_repo_name
'gitlab-test-fork'
end
# Prevent developer git configurations from being persisted to test
# repositories
def git_env
{ 'GIT_TEMPLATE_DIR' => '' }
end
def set_repo_refs(repo_path, branch_sha)
Dir.chdir(repo_path) do
branch_sha.each do |branch, sha|
# Try to reset without fetching to avoid using the network.
reset = %W(#{Gitlab.config.git.bin_path} update-ref refs/heads/#{branch} #{sha})
unless system(*reset)
if system(*%W(#{Gitlab.config.git.bin_path} fetch origin))
unless system(*reset)
raise 'The fetched test seed '\
'does not contain the required revision.'
end
else
raise 'Could not fetch test seed repository.'
end
end
end
end
end
end
|
require 'securerandom'
class TerminalSession
ZSH_BIN = ENV['TEST_ZSH_BIN'] || 'zsh'
def initialize(width: 80, height: 24, prompt: '', term: 'xterm-256color')
tmux_command("new-session -d -x #{width} -y #{height} 'PS1=#{prompt} TERM=#{term} #{ZSH_BIN} -f'")
end
def run_command(command)
send_string(command)
send_keys('enter')
self
end
def send_string(str)
tmux_command("send-keys -t 0 -l '#{str.gsub("'", "\\'")}'")
self
end
def send_keys(*keys)
tmux_command("send-keys -t 0 #{keys.join(' ')}")
self
end
def content(esc_seqs: false)
cmd = 'capture-pane -p -t 0'
cmd += ' -e' if esc_seqs
tmux_command(cmd).strip
end
def clear
send_keys('C-l')
sleep(0.1) until content == ''
self
end
def destroy
tmux_command('kill-session')
end
def cursor
tmux_command("display-message -t 0 -p '\#{cursor_x},\#{cursor_y}'").
strip.
split(',').
map(&:to_i)
end
private
def socket_name
@socket_name ||= SecureRandom.hex(6)
end
def tmux_command(cmd)
out = `tmux -u -L #{socket_name} #{cmd}`
raise('tmux error') unless $?.success?
out
end
end
Clean up TerminalSession constructor a bit
require 'securerandom'
class TerminalSession
ZSH_BIN = ENV['TEST_ZSH_BIN'] || 'zsh'
def initialize(opts = {})
opts = {
width: 80,
height: 24,
prompt: '',
term: 'xterm-256color',
zsh_bin: ZSH_BIN
}.merge(opts)
cmd="PS1=#{opts[:prompt]} TERM=#{opts[:term]} #{ZSH_BIN} -f"
tmux_command("new-session -d -x #{opts[:width]} -y #{opts[:height]} '#{cmd}'")
end
def run_command(command)
send_string(command)
send_keys('enter')
self
end
def send_string(str)
tmux_command("send-keys -t 0 -l '#{str.gsub("'", "\\'")}'")
self
end
def send_keys(*keys)
tmux_command("send-keys -t 0 #{keys.join(' ')}")
self
end
def content(esc_seqs: false)
cmd = 'capture-pane -p -t 0'
cmd += ' -e' if esc_seqs
tmux_command(cmd).strip
end
def clear
send_keys('C-l')
sleep(0.1) until content == ''
self
end
def destroy
tmux_command('kill-session')
end
def cursor
tmux_command("display-message -t 0 -p '\#{cursor_x},\#{cursor_y}'").
strip.
split(',').
map(&:to_i)
end
private
def socket_name
@socket_name ||= SecureRandom.hex(6)
end
def tmux_command(cmd)
out = `tmux -u -L #{socket_name} #{cmd}`
raise('tmux error') unless $?.success?
out
end
end
|
Added blank credentials file.
#
# Fill in your gateway details below to test remotely (when not using VCR
# prerecorded responses). This should only be neccessary when rerecording
# existing actions or adding new remote tests
#
# This file is ignored by git so you should't be able to accidently commit the
# API details below.
#
GATEWAY_LOGIN = ''
GATEWAY_PASSWORD = ''
GATEWAY_MERCHANT_NUMBER = ''
|
require "spec_helper"
if Mongoid::VERSION =~ /\A3\./
describe Mongoid::Contextual::Atomic do
describe "#add_to_set" do
let!(:depeche_mode) do
Band.create(members: [ "Dave" ])
end
let!(:new_order) do
Band.create(members: [ "Peter" ])
end
let!(:smiths) do
Band.create
end
context "when the criteria has no sorting" do
let(:criteria) do
Band.all
end
let(:context) do
Mongoid::Contextual::Mongo.new(criteria)
end
before do
context.add_to_set(:members, "Dave")
end
it "does not add duplicates" do
depeche_mode.reload.members.should eq([ "Dave" ])
end
it "adds unique values" do
new_order.reload.members.should eq([ "Peter", "Dave" ])
end
it "adds to non initialized fields" do
smiths.reload.members.should eq([ "Dave" ])
end
end
context "when the criteria has sorting" do
let(:criteria) do
Band.asc(:name)
end
let(:context) do
Mongoid::Contextual::Mongo.new(criteria)
end
before do
context.add_to_set(:members, "Dave")
end
it "does not add duplicates" do
depeche_mode.reload.members.should eq([ "Dave" ])
end
it "adds unique values" do
new_order.reload.members.should eq([ "Peter", "Dave" ])
end
it "adds to non initialized fields" do
smiths.reload.members.should eq([ "Dave" ])
end
end
end
describe "#bit" do
let!(:depeche_mode) do
Band.create(likes: 60)
end
let!(:smiths) do
Band.create
end
let(:criteria) do
Band.all
end
let(:context) do
Mongoid::Contextual::Mongo.new(criteria)
end
context "when performing a bitwise and" do
before do
context.bit(:likes, { and: 13 })
end
it "performs the bitwise operation on initialized fields" do
depeche_mode.reload.likes.should eq(12)
end
it "does not error on non initialized fields" do
smiths.reload.likes.should eq(0)
end
end
context "when performing a bitwise or" do
before do
context.bit(:likes, { or: 13 })
end
it "performs the bitwise operation on initialized fields" do
depeche_mode.reload.likes.should eq(61)
end
it "does not error on non initialized fields" do
smiths.reload.likes.should eq(13)
end
end
context "when chaining bitwise operations" do
before do
context.bit(:likes, { and: 13, or: 10 })
end
it "performs the bitwise operation on initialized fields" do
depeche_mode.reload.likes.should eq(14)
end
it "does not error on non initialized fields" do
smiths.reload.likes.should eq(10)
end
end
end if mongodb_version > "2.5"
describe "#inc" do
let!(:depeche_mode) do
Band.create(likes: 60)
end
let!(:smiths) do
Band.create
end
let!(:beatles) do
Band.create(years: 2)
end
let(:criteria) do
Band.all
end
let(:context) do
Mongoid::Contextual::Mongo.new(criteria)
end
before do
context.inc(:likes, 10)
end
context "when the field exists" do
it "incs the value" do
depeche_mode.reload.likes.should eq(70)
end
end
context "when the field does not exist" do
it "does not error on the inc" do
smiths.likes.should be_nil
end
end
context "when using the alias" do
before do
context.inc(:years, 1)
end
it "incs the value and read from alias" do
beatles.reload.years.should eq(3)
end
it "incs the value and read from field" do
beatles.reload.y.should eq(3)
end
end
end
describe "#pop" do
let!(:depeche_mode) do
Band.create(members: [ "Dave", "Martin" ])
end
let!(:smiths) do
Band.create
end
let(:criteria) do
Band.all
end
let(:context) do
Mongoid::Contextual::Mongo.new(criteria)
end
context "when popping from the front" do
before do
context.pop(:members, -1)
end
it "pops the first element off the array" do
depeche_mode.reload.members.should eq([ "Martin" ])
end
it "does not error on uninitialized fields" do
smiths.reload.members.should be_nil
end
end
context "when popping from the rear" do
before do
context.pop(:members, 1)
end
it "pops the last element off the array" do
depeche_mode.reload.members.should eq([ "Dave" ])
end
it "does not error on uninitialized fields" do
smiths.reload.members.should be_nil
end
end
end
describe "#pull" do
let!(:depeche_mode) do
Band.create(members: [ "Dave", "Alan" ])
end
let!(:smiths) do
Band.create
end
let(:criteria) do
Band.all
end
let(:context) do
Mongoid::Contextual::Mongo.new(criteria)
end
before do
context.pull(:members, "Alan")
end
it "pulls when the value is found" do
depeche_mode.reload.members.should eq([ "Dave" ])
end
it "does not error on non existant fields" do
smiths.reload.members.should be_nil
end
end
describe "#pull_all" do
let!(:depeche_mode) do
Band.create(members: [ "Dave", "Alan", "Fletch" ])
end
let!(:smiths) do
Band.create
end
let(:criteria) do
Band.all
end
let(:context) do
Mongoid::Contextual::Mongo.new(criteria)
end
before do
context.pull_all(:members, [ "Alan", "Dave" ])
end
it "pulls when the values are found" do
depeche_mode.reload.members.should eq([ "Fletch" ])
end
it "does not error on non existant fields" do
smiths.reload.members.should be_nil
end
end
describe "#push" do
let!(:depeche_mode) do
Band.create(members: [ "Dave" ])
end
let!(:smiths) do
Band.create
end
let(:criteria) do
Band.all
end
let(:context) do
Mongoid::Contextual::Mongo.new(criteria)
end
before do
context.push(:members, "Alan")
end
it "pushes the value to existing arrays" do
depeche_mode.reload.members.should eq([ "Dave", "Alan" ])
end
it "pushes to non existant fields" do
smiths.reload.members.should eq([ "Alan" ])
end
end
describe "#push_all" do
let!(:depeche_mode) do
Band.create(members: [ "Dave" ])
end
let!(:smiths) do
Band.create
end
let(:criteria) do
Band.all
end
let(:context) do
Mongoid::Contextual::Mongo.new(criteria)
end
before do
context.push_all(:members, [ "Alan", "Fletch" ])
end
it "pushes the values to existing arrays" do
depeche_mode.reload.members.should eq([ "Dave", "Alan", "Fletch" ])
end
it "pushes to non existant fields" do
smiths.reload.members.should eq([ "Alan", "Fletch" ])
end
end
describe "#rename" do
let!(:depeche_mode) do
Band.create(members: [ "Dave" ])
end
let!(:smiths) do
Band.create
end
let(:criteria) do
Band.all
end
let(:context) do
Mongoid::Contextual::Mongo.new(criteria)
end
before do
context.rename(:members, :artists)
end
it "renames existing fields" do
depeche_mode.reload.artists.should eq([ "Dave" ])
end
it "does not rename non existant fields" do
smiths.reload.should_not respond_to(:artists)
end
end
describe "#set" do
let!(:depeche_mode) do
Band.create(name: "Depeche Mode")
end
let!(:smiths) do
Band.create
end
let(:criteria) do
Band.all
end
let(:context) do
Mongoid::Contextual::Mongo.new(criteria)
end
before do
context.set(:name, "Recoil")
end
it "sets existing fields" do
depeche_mode.reload.name.should eq("Recoil")
end
it "sets non existant fields" do
smiths.reload.name.should eq("Recoil")
end
end
describe "#unset" do
context "when unsetting a single field" do
let!(:depeche_mode) do
Band.create(name: "Depeche Mode", years: 10)
end
let!(:new_order) do
Band.create(name: "New Order", years: 10)
end
let(:criteria) do
Band.all
end
let(:context) do
Mongoid::Contextual::Mongo.new(criteria)
end
context "when the field is not aliased" do
before do
context.unset(:name)
end
it "unsets the first existing field" do
depeche_mode.reload.name.should be_nil
end
it "unsets the last existing field" do
new_order.reload.name.should be_nil
end
end
context "when the field is aliased" do
before do
context.unset(:years)
end
it "unsets the first existing field" do
depeche_mode.reload.years.should be_nil
end
it "unsets the last existing field" do
new_order.reload.years.should be_nil
end
end
end
context "when unsetting multiple fields" do
let!(:new_order) do
Band.create(name: "New Order", genres: [ "electro", "dub" ], years: 10)
end
let(:criteria) do
Band.all
end
let(:context) do
Mongoid::Contextual::Mongo.new(criteria)
end
context "when the field is not aliased" do
before do
context.unset(:name, :genres)
end
it "unsets name field" do
new_order.reload.name.should be_nil
end
it "unsets genres field" do
new_order.reload.genres.should be_nil
end
end
context "when the field is aliased" do
before do
context.unset(:name, :years)
end
it "unsets the unaliased field" do
new_order.reload.name.should be_nil
end
it "unsets the aliased field" do
new_order.reload.years.should be_nil
end
end
end
end
end
# MONGOID 4
describe Mongoid::Contextual::Atomic do
describe "#add_to_set" do
let!(:depeche_mode) do
Band.create(members: [ "Dave" ])
end
let!(:new_order) do
Band.create(members: [ "Peter" ])
end
let!(:smiths) do
Band.create
end
context "when the criteria has no sorting" do
let(:criteria) do
Band.all
end
let(:context) do
Mongoid::Contextual::Mongo.new(criteria)
end
before do
context.add_to_set(members: "Dave")
end
it "does not add duplicates" do
expect(depeche_mode.reload.members).to eq([ "Dave" ])
end
it "adds unique values" do
expect(new_order.reload.members).to eq([ "Peter", "Dave" ])
end
it "adds to non initialized fields" do
expect(smiths.reload.members).to eq([ "Dave" ])
end
end
context "when the criteria has sorting" do
let(:criteria) do
Band.asc(:name)
end
let(:context) do
Mongoid::Contextual::Mongo.new(criteria)
end
before do
context.add_to_set(members: "Dave", genres: "Electro")
end
it "does not add duplicates" do
expect(depeche_mode.reload.members).to eq([ "Dave" ])
end
it "adds multiple operations" do
expect(depeche_mode.reload.genres).to eq([ "Electro" ])
end
it "adds unique values" do
expect(new_order.reload.members).to eq([ "Peter", "Dave" ])
end
it "adds to non initialized fields" do
expect(smiths.reload.members).to eq([ "Dave" ])
end
end
end
describe "#bit" do
let!(:depeche_mode) do
Band.create(likes: 60)
end
let!(:smiths) do
Band.create
end
let(:criteria) do
Band.all
end
let(:context) do
Mongoid::Contextual::Mongo.new(criteria)
end
context "when performing a bitwise and" do
before do
context.bit(likes: { and: 13 })
end
it "performs the bitwise operation on initialized fields" do
expect(depeche_mode.reload.likes).to eq(12)
end
it "does not error on non initialized fields" do
expect(smiths.reload.likes).to eq(0)
end
end
context "when performing a bitwise or" do
before do
context.bit(likes: { or: 13 })
end
it "performs the bitwise operation on initialized fields" do
expect(depeche_mode.reload.likes).to eq(61)
end
it "does not error on non initialized fields" do
expect(smiths.reload.likes).to eq(13)
end
end
context "when chaining bitwise operations" do
before do
context.bit(likes: { and: 13, or: 10 })
end
it "performs the bitwise operation on initialized fields" do
expect(depeche_mode.reload.likes).to eq(14)
end
it "does not error on non initialized fields" do
expect(smiths.reload.likes).to eq(10)
end
end
end if mongodb_version > "2.5"
describe "#inc" do
let!(:depeche_mode) do
Band.create(likes: 60)
end
let!(:smiths) do
Band.create
end
let!(:beatles) do
Band.create(years: 2)
end
let(:criteria) do
Band.all
end
let(:context) do
Mongoid::Contextual::Mongo.new(criteria)
end
before do
context.inc(likes: 10)
end
context "when the field exists" do
it "incs the value" do
expect(depeche_mode.reload.likes).to eq(70)
end
end
context "when the field does not exist" do
it "does not error on the inc" do
expect(smiths.likes).to be_nil
end
end
context "when using the alias" do
before do
context.inc(years: 1)
end
it "incs the value and read from alias" do
expect(beatles.reload.years).to eq(3)
end
it "incs the value and read from field" do
expect(beatles.reload.y).to eq(3)
end
end
end
describe "#pop" do
let!(:depeche_mode) do
Band.create(members: [ "Dave", "Martin" ])
end
let!(:smiths) do
Band.create
end
let(:criteria) do
Band.all
end
let(:context) do
Mongoid::Contextual::Mongo.new(criteria)
end
context "when popping from the front" do
before do
context.pop(members: -1)
end
it "pops the first element off the array" do
expect(depeche_mode.reload.members).to eq([ "Martin" ])
end
it "does not error on uninitialized fields" do
expect(smiths.reload.members).to be_nil
end
end
context "when popping from the rear" do
before do
context.pop(members: 1)
end
it "pops the last element off the array" do
expect(depeche_mode.reload.members).to eq([ "Dave" ])
end
it "does not error on uninitialized fields" do
expect(smiths.reload.members).to be_nil
end
end
end
describe "#pull" do
let!(:depeche_mode) do
Band.create(members: [ "Dave", "Alan" ])
end
let!(:smiths) do
Band.create
end
let(:criteria) do
Band.all
end
let(:context) do
Mongoid::Contextual::Mongo.new(criteria)
end
before do
context.pull(members: "Alan")
end
it "pulls when the value is found" do
expect(depeche_mode.reload.members).to eq([ "Dave" ])
end
it "does not error on non existant fields" do
expect(smiths.reload.members).to be_nil
end
end
describe "#pull_all" do
let!(:depeche_mode) do
Band.create(members: [ "Dave", "Alan", "Fletch" ])
end
let!(:smiths) do
Band.create
end
let(:criteria) do
Band.all
end
let(:context) do
Mongoid::Contextual::Mongo.new(criteria)
end
before do
context.pull_all(members: [ "Alan", "Dave" ])
end
it "pulls when the values are found" do
expect(depeche_mode.reload.members).to eq([ "Fletch" ])
end
it "does not error on non existant fields" do
expect(smiths.reload.members).to be_nil
end
end
describe "#push" do
let!(:depeche_mode) do
Band.create(members: [ "Dave" ])
end
let!(:smiths) do
Band.create
end
let(:criteria) do
Band.all
end
let(:context) do
Mongoid::Contextual::Mongo.new(criteria)
end
before do
context.push(members: "Alan")
end
it "pushes the value to existing arrays" do
expect(depeche_mode.reload.members).to eq([ "Dave", "Alan" ])
end
it "pushes to non existant fields" do
expect(smiths.reload.members).to eq([ "Alan" ])
end
end
describe "#push_all" do
let!(:depeche_mode) do
Band.create(members: [ "Dave" ])
end
let!(:smiths) do
Band.create
end
let(:criteria) do
Band.all
end
let(:context) do
Mongoid::Contextual::Mongo.new(criteria)
end
before do
context.push_all(members: [ "Alan", "Fletch" ])
end
it "pushes the values to existing arrays" do
expect(depeche_mode.reload.members).to eq([ "Dave", "Alan", "Fletch" ])
end
it "pushes to non existant fields" do
expect(smiths.reload.members).to eq([ "Alan", "Fletch" ])
end
end
describe "#rename" do
let!(:depeche_mode) do
Band.create(members: [ "Dave" ])
end
let!(:smiths) do
Band.create
end
let(:criteria) do
Band.all
end
let(:context) do
Mongoid::Contextual::Mongo.new(criteria)
end
before do
context.rename(members: :artists)
end
it "renames existing fields" do
expect(depeche_mode.reload.artists).to eq([ "Dave" ])
end
it "does not rename non existant fields" do
expect(smiths.reload).to_not respond_to(:artists)
end
end
describe "#set" do
let!(:depeche_mode) do
Band.create(name: "Depeche Mode")
end
let!(:smiths) do
Band.create
end
let(:criteria) do
Band.all
end
let(:context) do
Mongoid::Contextual::Mongo.new(criteria)
end
before do
context.set(name: "Recoil")
end
it "sets existing fields" do
expect(depeche_mode.reload.name).to eq("Recoil")
end
it "sets non existant fields" do
expect(smiths.reload.name).to eq("Recoil")
end
end
describe "#unset" do
context "when unsetting a single field" do
let!(:depeche_mode) do
Band.create(name: "Depeche Mode", years: 10)
end
let!(:new_order) do
Band.create(name: "New Order", years: 10)
end
let(:criteria) do
Band.all
end
let(:context) do
Mongoid::Contextual::Mongo.new(criteria)
end
context "when the field is not aliased" do
before do
context.unset(:name)
end
it "unsets the first existing field" do
expect(depeche_mode.reload.name).to be_nil
end
it "unsets the last existing field" do
expect(new_order.reload.name).to be_nil
end
end
context "when the field is aliased" do
before do
context.unset(:years)
end
it "unsets the first existing field" do
expect(depeche_mode.reload.years).to be_nil
end
it "unsets the last existing field" do
expect(new_order.reload.years).to be_nil
end
end
end
context "when unsetting multiple fields" do
let!(:new_order) do
Band.create(name: "New Order", genres: [ "electro", "dub" ], years: 10)
end
let(:criteria) do
Band.all
end
let(:context) do
Mongoid::Contextual::Mongo.new(criteria)
end
context "when the field is not aliased" do
before do
context.unset(:name, :genres)
end
it "unsets name field" do
expect(new_order.reload.name).to be_nil
end
it "unsets genres field" do
expect(new_order.reload.genres).to be_nil
end
end
context "when the field is aliased" do
before do
context.unset(:name, :years)
end
it "unsets the unaliased field" do
expect(new_order.reload.name).to be_nil
end
it "unsets the aliased field" do
expect(new_order.reload.years).to be_nil
end
end
end
end
end
end
Fix strange syntax
require "spec_helper"
if Mongoid::VERSION =~ /\A3\./
describe Mongoid::Contextual::Atomic do
describe "#add_to_set" do
let!(:depeche_mode) do
Band.create(members: [ "Dave" ])
end
let!(:new_order) do
Band.create(members: [ "Peter" ])
end
let!(:smiths) do
Band.create
end
context "when the criteria has no sorting" do
let(:criteria) do
Band.all
end
let(:context) do
Mongoid::Contextual::Mongo.new(criteria)
end
before do
context.add_to_set(:members, "Dave")
end
it "does not add duplicates" do
depeche_mode.reload.members.should eq([ "Dave" ])
end
it "adds unique values" do
new_order.reload.members.should eq([ "Peter", "Dave" ])
end
it "adds to non initialized fields" do
smiths.reload.members.should eq([ "Dave" ])
end
end
context "when the criteria has sorting" do
let(:criteria) do
Band.asc(:name)
end
let(:context) do
Mongoid::Contextual::Mongo.new(criteria)
end
before do
context.add_to_set(:members, "Dave")
end
it "does not add duplicates" do
depeche_mode.reload.members.should eq([ "Dave" ])
end
it "adds unique values" do
new_order.reload.members.should eq([ "Peter", "Dave" ])
end
it "adds to non initialized fields" do
smiths.reload.members.should eq([ "Dave" ])
end
end
end
describe "#bit" do
let!(:depeche_mode) do
Band.create(likes: 60)
end
let!(:smiths) do
Band.create
end
let(:criteria) do
Band.all
end
let(:context) do
Mongoid::Contextual::Mongo.new(criteria)
end
context "when performing a bitwise and" do
before do
context.bit(:likes, { and: 13 })
end
it "performs the bitwise operation on initialized fields" do
depeche_mode.reload.likes.should eq(12)
end
it "does not error on non initialized fields" do
smiths.reload.likes.should eq(0)
end
end
context "when performing a bitwise or" do
before do
context.bit(:likes, { or: 13 })
end
it "performs the bitwise operation on initialized fields" do
depeche_mode.reload.likes.should eq(61)
end
it "does not error on non initialized fields" do
smiths.reload.likes.should eq(13)
end
end
context "when chaining bitwise operations" do
before do
context.bit(:likes, { and: 13, or: 10 })
end
it "performs the bitwise operation on initialized fields" do
depeche_mode.reload.likes.should eq(14)
end
it "does not error on non initialized fields" do
smiths.reload.likes.should eq(10)
end
end
end
describe "#inc" do
let!(:depeche_mode) do
Band.create(likes: 60)
end
let!(:smiths) do
Band.create
end
let!(:beatles) do
Band.create(years: 2)
end
let(:criteria) do
Band.all
end
let(:context) do
Mongoid::Contextual::Mongo.new(criteria)
end
before do
context.inc(:likes, 10)
end
context "when the field exists" do
it "incs the value" do
depeche_mode.reload.likes.should eq(70)
end
end
context "when the field does not exist" do
it "does not error on the inc" do
smiths.likes.should be_nil
end
end
context "when using the alias" do
before do
context.inc(:years, 1)
end
it "incs the value and read from alias" do
beatles.reload.years.should eq(3)
end
it "incs the value and read from field" do
beatles.reload.y.should eq(3)
end
end
end
describe "#pop" do
let!(:depeche_mode) do
Band.create(members: [ "Dave", "Martin" ])
end
let!(:smiths) do
Band.create
end
let(:criteria) do
Band.all
end
let(:context) do
Mongoid::Contextual::Mongo.new(criteria)
end
context "when popping from the front" do
before do
context.pop(:members, -1)
end
it "pops the first element off the array" do
depeche_mode.reload.members.should eq([ "Martin" ])
end
it "does not error on uninitialized fields" do
smiths.reload.members.should be_nil
end
end
context "when popping from the rear" do
before do
context.pop(:members, 1)
end
it "pops the last element off the array" do
depeche_mode.reload.members.should eq([ "Dave" ])
end
it "does not error on uninitialized fields" do
smiths.reload.members.should be_nil
end
end
end
describe "#pull" do
let!(:depeche_mode) do
Band.create(members: [ "Dave", "Alan" ])
end
let!(:smiths) do
Band.create
end
let(:criteria) do
Band.all
end
let(:context) do
Mongoid::Contextual::Mongo.new(criteria)
end
before do
context.pull(:members, "Alan")
end
it "pulls when the value is found" do
depeche_mode.reload.members.should eq([ "Dave" ])
end
it "does not error on non existant fields" do
smiths.reload.members.should be_nil
end
end
describe "#pull_all" do
let!(:depeche_mode) do
Band.create(members: [ "Dave", "Alan", "Fletch" ])
end
let!(:smiths) do
Band.create
end
let(:criteria) do
Band.all
end
let(:context) do
Mongoid::Contextual::Mongo.new(criteria)
end
before do
context.pull_all(:members, [ "Alan", "Dave" ])
end
it "pulls when the values are found" do
depeche_mode.reload.members.should eq([ "Fletch" ])
end
it "does not error on non existant fields" do
smiths.reload.members.should be_nil
end
end
describe "#push" do
let!(:depeche_mode) do
Band.create(members: [ "Dave" ])
end
let!(:smiths) do
Band.create
end
let(:criteria) do
Band.all
end
let(:context) do
Mongoid::Contextual::Mongo.new(criteria)
end
before do
context.push(:members, "Alan")
end
it "pushes the value to existing arrays" do
depeche_mode.reload.members.should eq([ "Dave", "Alan" ])
end
it "pushes to non existant fields" do
smiths.reload.members.should eq([ "Alan" ])
end
end
describe "#push_all" do
let!(:depeche_mode) do
Band.create(members: [ "Dave" ])
end
let!(:smiths) do
Band.create
end
let(:criteria) do
Band.all
end
let(:context) do
Mongoid::Contextual::Mongo.new(criteria)
end
before do
context.push_all(:members, [ "Alan", "Fletch" ])
end
it "pushes the values to existing arrays" do
depeche_mode.reload.members.should eq([ "Dave", "Alan", "Fletch" ])
end
it "pushes to non existant fields" do
smiths.reload.members.should eq([ "Alan", "Fletch" ])
end
end
describe "#rename" do
let!(:depeche_mode) do
Band.create(members: [ "Dave" ])
end
let!(:smiths) do
Band.create
end
let(:criteria) do
Band.all
end
let(:context) do
Mongoid::Contextual::Mongo.new(criteria)
end
before do
context.rename(:members, :artists)
end
it "renames existing fields" do
depeche_mode.reload.artists.should eq([ "Dave" ])
end
it "does not rename non existant fields" do
smiths.reload.should_not respond_to(:artists)
end
end
describe "#set" do
let!(:depeche_mode) do
Band.create(name: "Depeche Mode")
end
let!(:smiths) do
Band.create
end
let(:criteria) do
Band.all
end
let(:context) do
Mongoid::Contextual::Mongo.new(criteria)
end
before do
context.set(:name, "Recoil")
end
it "sets existing fields" do
depeche_mode.reload.name.should eq("Recoil")
end
it "sets non existant fields" do
smiths.reload.name.should eq("Recoil")
end
end
describe "#unset" do
context "when unsetting a single field" do
let!(:depeche_mode) do
Band.create(name: "Depeche Mode", years: 10)
end
let!(:new_order) do
Band.create(name: "New Order", years: 10)
end
let(:criteria) do
Band.all
end
let(:context) do
Mongoid::Contextual::Mongo.new(criteria)
end
context "when the field is not aliased" do
before do
context.unset(:name)
end
it "unsets the first existing field" do
depeche_mode.reload.name.should be_nil
end
it "unsets the last existing field" do
new_order.reload.name.should be_nil
end
end
context "when the field is aliased" do
before do
context.unset(:years)
end
it "unsets the first existing field" do
depeche_mode.reload.years.should be_nil
end
it "unsets the last existing field" do
new_order.reload.years.should be_nil
end
end
end
context "when unsetting multiple fields" do
let!(:new_order) do
Band.create(name: "New Order", genres: [ "electro", "dub" ], years: 10)
end
let(:criteria) do
Band.all
end
let(:context) do
Mongoid::Contextual::Mongo.new(criteria)
end
context "when the field is not aliased" do
before do
context.unset(:name, :genres)
end
it "unsets name field" do
new_order.reload.name.should be_nil
end
it "unsets genres field" do
new_order.reload.genres.should be_nil
end
end
context "when the field is aliased" do
before do
context.unset(:name, :years)
end
it "unsets the unaliased field" do
new_order.reload.name.should be_nil
end
it "unsets the aliased field" do
new_order.reload.years.should be_nil
end
end
end
end
end
# MONGOID 4
describe Mongoid::Contextual::Atomic do
describe "#add_to_set" do
let!(:depeche_mode) do
Band.create(members: [ "Dave" ])
end
let!(:new_order) do
Band.create(members: [ "Peter" ])
end
let!(:smiths) do
Band.create
end
context "when the criteria has no sorting" do
let(:criteria) do
Band.all
end
let(:context) do
Mongoid::Contextual::Mongo.new(criteria)
end
before do
context.add_to_set(members: "Dave")
end
it "does not add duplicates" do
expect(depeche_mode.reload.members).to eq([ "Dave" ])
end
it "adds unique values" do
expect(new_order.reload.members).to eq([ "Peter", "Dave" ])
end
it "adds to non initialized fields" do
expect(smiths.reload.members).to eq([ "Dave" ])
end
end
context "when the criteria has sorting" do
let(:criteria) do
Band.asc(:name)
end
let(:context) do
Mongoid::Contextual::Mongo.new(criteria)
end
before do
context.add_to_set(members: "Dave", genres: "Electro")
end
it "does not add duplicates" do
expect(depeche_mode.reload.members).to eq([ "Dave" ])
end
it "adds multiple operations" do
expect(depeche_mode.reload.genres).to eq([ "Electro" ])
end
it "adds unique values" do
expect(new_order.reload.members).to eq([ "Peter", "Dave" ])
end
it "adds to non initialized fields" do
expect(smiths.reload.members).to eq([ "Dave" ])
end
end
end
describe "#bit" do
let!(:depeche_mode) do
Band.create(likes: 60)
end
let!(:smiths) do
Band.create
end
let(:criteria) do
Band.all
end
let(:context) do
Mongoid::Contextual::Mongo.new(criteria)
end
context "when performing a bitwise and" do
before do
context.bit(likes: { and: 13 })
end
it "performs the bitwise operation on initialized fields" do
expect(depeche_mode.reload.likes).to eq(12)
end
it "does not error on non initialized fields" do
expect(smiths.reload.likes).to eq(0)
end
end
context "when performing a bitwise or" do
before do
context.bit(likes: { or: 13 })
end
it "performs the bitwise operation on initialized fields" do
expect(depeche_mode.reload.likes).to eq(61)
end
it "does not error on non initialized fields" do
expect(smiths.reload.likes).to eq(13)
end
end
context "when chaining bitwise operations" do
before do
context.bit(likes: { and: 13, or: 10 })
end
it "performs the bitwise operation on initialized fields" do
expect(depeche_mode.reload.likes).to eq(14)
end
it "does not error on non initialized fields" do
expect(smiths.reload.likes).to eq(10)
end
end
end if mongodb_version > "2.5"
describe "#inc" do
let!(:depeche_mode) do
Band.create(likes: 60)
end
let!(:smiths) do
Band.create
end
let!(:beatles) do
Band.create(years: 2)
end
let(:criteria) do
Band.all
end
let(:context) do
Mongoid::Contextual::Mongo.new(criteria)
end
before do
context.inc(likes: 10)
end
context "when the field exists" do
it "incs the value" do
expect(depeche_mode.reload.likes).to eq(70)
end
end
context "when the field does not exist" do
it "does not error on the inc" do
expect(smiths.likes).to be_nil
end
end
context "when using the alias" do
before do
context.inc(years: 1)
end
it "incs the value and read from alias" do
expect(beatles.reload.years).to eq(3)
end
it "incs the value and read from field" do
expect(beatles.reload.y).to eq(3)
end
end
end
describe "#pop" do
let!(:depeche_mode) do
Band.create(members: [ "Dave", "Martin" ])
end
let!(:smiths) do
Band.create
end
let(:criteria) do
Band.all
end
let(:context) do
Mongoid::Contextual::Mongo.new(criteria)
end
context "when popping from the front" do
before do
context.pop(members: -1)
end
it "pops the first element off the array" do
expect(depeche_mode.reload.members).to eq([ "Martin" ])
end
it "does not error on uninitialized fields" do
expect(smiths.reload.members).to be_nil
end
end
context "when popping from the rear" do
before do
context.pop(members: 1)
end
it "pops the last element off the array" do
expect(depeche_mode.reload.members).to eq([ "Dave" ])
end
it "does not error on uninitialized fields" do
expect(smiths.reload.members).to be_nil
end
end
end
describe "#pull" do
let!(:depeche_mode) do
Band.create(members: [ "Dave", "Alan" ])
end
let!(:smiths) do
Band.create
end
let(:criteria) do
Band.all
end
let(:context) do
Mongoid::Contextual::Mongo.new(criteria)
end
before do
context.pull(members: "Alan")
end
it "pulls when the value is found" do
expect(depeche_mode.reload.members).to eq([ "Dave" ])
end
it "does not error on non existant fields" do
expect(smiths.reload.members).to be_nil
end
end
describe "#pull_all" do
let!(:depeche_mode) do
Band.create(members: [ "Dave", "Alan", "Fletch" ])
end
let!(:smiths) do
Band.create
end
let(:criteria) do
Band.all
end
let(:context) do
Mongoid::Contextual::Mongo.new(criteria)
end
before do
context.pull_all(members: [ "Alan", "Dave" ])
end
it "pulls when the values are found" do
expect(depeche_mode.reload.members).to eq([ "Fletch" ])
end
it "does not error on non existant fields" do
expect(smiths.reload.members).to be_nil
end
end
describe "#push" do
let!(:depeche_mode) do
Band.create(members: [ "Dave" ])
end
let!(:smiths) do
Band.create
end
let(:criteria) do
Band.all
end
let(:context) do
Mongoid::Contextual::Mongo.new(criteria)
end
before do
context.push(members: "Alan")
end
it "pushes the value to existing arrays" do
expect(depeche_mode.reload.members).to eq([ "Dave", "Alan" ])
end
it "pushes to non existant fields" do
expect(smiths.reload.members).to eq([ "Alan" ])
end
end
describe "#push_all" do
let!(:depeche_mode) do
Band.create(members: [ "Dave" ])
end
let!(:smiths) do
Band.create
end
let(:criteria) do
Band.all
end
let(:context) do
Mongoid::Contextual::Mongo.new(criteria)
end
before do
context.push_all(members: [ "Alan", "Fletch" ])
end
it "pushes the values to existing arrays" do
expect(depeche_mode.reload.members).to eq([ "Dave", "Alan", "Fletch" ])
end
it "pushes to non existant fields" do
expect(smiths.reload.members).to eq([ "Alan", "Fletch" ])
end
end
describe "#rename" do
let!(:depeche_mode) do
Band.create(members: [ "Dave" ])
end
let!(:smiths) do
Band.create
end
let(:criteria) do
Band.all
end
let(:context) do
Mongoid::Contextual::Mongo.new(criteria)
end
before do
context.rename(members: :artists)
end
it "renames existing fields" do
expect(depeche_mode.reload.artists).to eq([ "Dave" ])
end
it "does not rename non existant fields" do
expect(smiths.reload).to_not respond_to(:artists)
end
end
describe "#set" do
let!(:depeche_mode) do
Band.create(name: "Depeche Mode")
end
let!(:smiths) do
Band.create
end
let(:criteria) do
Band.all
end
let(:context) do
Mongoid::Contextual::Mongo.new(criteria)
end
before do
context.set(name: "Recoil")
end
it "sets existing fields" do
expect(depeche_mode.reload.name).to eq("Recoil")
end
it "sets non existant fields" do
expect(smiths.reload.name).to eq("Recoil")
end
end
describe "#unset" do
context "when unsetting a single field" do
let!(:depeche_mode) do
Band.create(name: "Depeche Mode", years: 10)
end
let!(:new_order) do
Band.create(name: "New Order", years: 10)
end
let(:criteria) do
Band.all
end
let(:context) do
Mongoid::Contextual::Mongo.new(criteria)
end
context "when the field is not aliased" do
before do
context.unset(:name)
end
it "unsets the first existing field" do
expect(depeche_mode.reload.name).to be_nil
end
it "unsets the last existing field" do
expect(new_order.reload.name).to be_nil
end
end
context "when the field is aliased" do
before do
context.unset(:years)
end
it "unsets the first existing field" do
expect(depeche_mode.reload.years).to be_nil
end
it "unsets the last existing field" do
expect(new_order.reload.years).to be_nil
end
end
end
context "when unsetting multiple fields" do
let!(:new_order) do
Band.create(name: "New Order", genres: [ "electro", "dub" ], years: 10)
end
let(:criteria) do
Band.all
end
let(:context) do
Mongoid::Contextual::Mongo.new(criteria)
end
context "when the field is not aliased" do
before do
context.unset(:name, :genres)
end
it "unsets name field" do
expect(new_order.reload.name).to be_nil
end
it "unsets genres field" do
expect(new_order.reload.genres).to be_nil
end
end
context "when the field is aliased" do
before do
context.unset(:name, :years)
end
it "unsets the unaliased field" do
expect(new_order.reload.name).to be_nil
end
it "unsets the aliased field" do
expect(new_order.reload.years).to be_nil
end
end
end
end
end
end
|
#
# Author:: Adam Jacob (<adam@opscode.com>)
# Author:: Tim Hinderliter (<tim@opscode.com>)
# Author:: Christopher Walters (<cw@opscode.com>)
# Copyright:: Copyright 2008-2010 Opscode, Inc.
# License:: Apache License, Version 2.0
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
require 'spec_helper'
require 'chef/run_context'
require 'chef/rest'
require 'rbconfig'
describe Chef::Client do
let(:hostname) { "hostname" }
let(:machinename) { "machinename.example.org" }
let(:fqdn) { "hostname.example.org" }
let(:ohai_data) do
{ :fqdn => fqdn,
:hostname => hostname,
:machinename => machinename,
:platform => 'example-platform',
:platform_version => 'example-platform-1.0',
:data => {}
}
end
let(:ohai_system) do
ohai_system = double( "Ohai::System",
:all_plugins => true,
:data => ohai_data)
ohai_system.stub(:[]) do |key|
ohai_data[key]
end
ohai_system
end
let(:node) do
Chef::Node.new.tap do |n|
n.name(fqdn)
n.chef_environment("_default")
end
end
let(:json_attribs) { nil }
let(:client_opts) { {} }
let(:client) do
Chef::Client.new(json_attribs, client_opts).tap do |c|
c.node = node
end
end
before do
Chef::Log.logger = Logger.new(StringIO.new)
# Node/Ohai data
#Chef::Config[:node_name] = fqdn
Ohai::System.stub(:new).and_return(ohai_system)
end
describe "authentication protocol selection" do
after do
Chef::Config[:authentication_protocol_version] = "1.0"
end
context "when the node name is <= 90 bytes" do
it "does not force the authentication protocol to 1.1" do
Chef::Config[:node_name] = ("f" * 90)
# ugly that this happens as a side effect of a getter :(
client.node_name
Chef::Config[:authentication_protocol_version].should == "1.0"
end
end
context "when the node name is > 90 bytes" do
it "sets the authentication protocol to version 1.1" do
Chef::Config[:node_name] = ("f" * 91)
# ugly that this happens as a side effect of a getter :(
client.node_name
Chef::Config[:authentication_protocol_version].should == "1.1"
end
end
end
describe "configuring output formatters" do
context "when no formatter has been configured" do
context "and STDOUT is a TTY" do
before do
STDOUT.stub(:tty?).and_return(true)
end
it "configures the :doc formatter" do
client.formatters_for_run.should == [[:doc]]
end
context "and force_logger is set" do
before do
Chef::Config[:force_logger] = true
end
it "configures the :null formatter" do
Chef::Config[:force_logger].should be_true
client.formatters_for_run.should == [[:null]]
end
end
end
context "and STDOUT is not a TTY" do
before do
STDOUT.stub(:tty?).and_return(false)
end
it "configures the :null formatter" do
client.formatters_for_run.should == [[:null]]
end
context "and force_formatter is set" do
before do
Chef::Config[:force_formatter] = true
end
it "it configures the :doc formatter" do
client.formatters_for_run.should == [[:doc]]
end
end
end
end
context "when a formatter is configured" do
context "with no output path" do
before do
Chef::Config.add_formatter(:min)
end
it "does not configure a default formatter" do
client.formatters_for_run.should == [[:min, nil]]
end
it "configures the formatter for STDOUT/STDERR" do
configured_formatters = client.configure_formatters
min_formatter = configured_formatters[0]
min_formatter.output.out.should == STDOUT
min_formatter.output.err.should == STDERR
end
end
context "with an output path" do
before do
@tmpout = Tempfile.open("rspec-for-client-formatter-selection-#{Process.pid}")
Chef::Config.add_formatter(:min, @tmpout.path)
end
after do
@tmpout.close unless @tmpout.closed?
@tmpout.unlink
end
it "configures the formatter for the file path" do
configured_formatters = client.configure_formatters
min_formatter = configured_formatters[0]
min_formatter.output.out.path.should == @tmpout.path
min_formatter.output.err.path.should == @tmpout.path
end
end
end
end
describe "a full client run" do
shared_examples_for "a successful client run" do
let(:http_node_load) { double("Chef::REST (node)") }
let(:http_cookbook_sync) { double("Chef::REST (cookbook sync)") }
let(:http_node_save) { double("Chef::REST (node save)") }
let(:runner) { double("Chef::Runner") }
let(:api_client_exists?) { false }
let(:stdout) { StringIO.new }
let(:stderr) { StringIO.new }
let(:enable_fork) { false }
def stub_for_register
# --Client.register
# Make sure Client#register thinks the client key doesn't
# exist, so it tries to register and create one.
File.should_receive(:exists?).with(Chef::Config[:client_key]).exactly(1).times.and_return(api_client_exists?)
unless api_client_exists?
# Client.register will register with the validation client name.
Chef::ApiClient::Registration.any_instance.should_receive(:run)
end
end
def stub_for_node_load
# Client.register will then turn around create another
# Chef::REST object, this time with the client key it got from the
# previous step.
Chef::REST.should_receive(:new).
with(Chef::Config[:chef_server_url], fqdn, Chef::Config[:client_key]).
exactly(1).
and_return(http_node_load)
# --Client#build_node
# looks up the node, which we will return, then later saves it.
Chef::Node.should_receive(:find_or_create).with(fqdn).and_return(node)
# --ResourceReporter#node_load_completed
# gets a run id from the server for storing resource history
# (has its own tests, so stubbing it here.)
Chef::ResourceReporter.any_instance.should_receive(:node_load_completed)
end
def stub_for_sync_cookbooks
# --Client#setup_run_context
# ---Client#sync_cookbooks -- downloads the list of cookbooks to sync
#
Chef::CookbookSynchronizer.any_instance.should_receive(:sync_cookbooks)
Chef::REST.should_receive(:new).with(Chef::Config[:chef_server_url]).and_return(http_cookbook_sync)
http_cookbook_sync.should_receive(:post).
with("environments/_default/cookbook_versions", {:run_list => []}).
and_return({})
end
def stub_for_converge
# --Client#converge
Chef::Runner.should_receive(:new).and_return(runner)
runner.should_receive(:converge).and_return(true)
# --ResourceReporter#run_completed
# updates the server with the resource history
# (has its own tests, so stubbing it here.)
Chef::ResourceReporter.any_instance.should_receive(:run_completed)
end
def stub_for_node_save
# --Client#save_updated_node
Chef::REST.should_receive(:new).with(Chef::Config[:chef_server_url]).and_return(http_node_save)
http_node_save.should_receive(:put_rest).with("nodes/#{fqdn}", node).and_return(true)
end
def stub_for_run
Chef::RunLock.any_instance.should_receive(:acquire)
Chef::RunLock.any_instance.should_receive(:save_pid)
Chef::RunLock.any_instance.should_receive(:release)
# Post conditions: check that node has been filled in correctly
client.should_receive(:run_started)
client.should_receive(:run_completed_successfully)
end
before do
Chef::Config[:client_fork] = enable_fork
stub_const("Chef::Client::STDOUT_FD", stdout)
stub_const("Chef::Client::STDERR_FD", stderr)
stub_for_register
stub_for_node_load
stub_for_sync_cookbooks
stub_for_converge
stub_for_node_save
stub_for_run
end
it "runs ohai, sets up authentication, loads node state, synchronizes policy, and converges" do
# This is what we're testing.
client.run
# fork is stubbed, so we can see the outcome of the run
node.automatic_attrs[:platform].should == "example-platform"
node.automatic_attrs[:platform_version].should == "example-platform-1.0"
end
end
describe "when running chef-client without fork" do
include_examples "a successful client run"
end
describe "when running chef-client with forking enabled", :unix_only do
include_examples "a successful client run" do
let(:process_status) do
double("Process::Status")
end
let(:enable_fork) { true }
before do
Process.should_receive(:waitpid2).and_return([1, process_status])
process_status.should_receive(:success?).and_return(true)
client.should_receive(:exit).and_return(nil)
client.should_receive(:fork).and_yield
end
end
end
describe "when the client key already exists" do
let(:api_client_exists?) { true }
include_examples "a successful client run"
end
describe "when an override run list is given" do
let(:client_opts) { {:override_runlist => "recipe[override_recipe]"} }
it "should permit spaces in overriding run list" do
Chef::Client.new(nil, :override_runlist => 'role[a], role[b]')
end
describe "when running the client" do
include_examples "a successful client run" do
before do
# Client will try to compile and run override_recipe
Chef::RunContext::CookbookCompiler.any_instance.should_receive(:compile)
end
def stub_for_sync_cookbooks
# --Client#setup_run_context
# ---Client#sync_cookbooks -- downloads the list of cookbooks to sync
#
Chef::CookbookSynchronizer.any_instance.should_receive(:sync_cookbooks)
Chef::REST.should_receive(:new).with(Chef::Config[:chef_server_url]).and_return(http_cookbook_sync)
http_cookbook_sync.should_receive(:post).
with("environments/_default/cookbook_versions", {:run_list => ["override_recipe"]}).
and_return({})
end
def stub_for_node_save
# Expect NO node save
node.should_not_receive(:save)
end
end
end
end
describe "when a permanent run list is passed as an option" do
include_examples "a successful client run" do
let(:new_runlist) { "recipe[new_run_list_recipe]" }
let(:client_opts) { {:runlist => new_runlist} }
def stub_for_sync_cookbooks
# --Client#setup_run_context
# ---Client#sync_cookbooks -- downloads the list of cookbooks to sync
#
Chef::CookbookSynchronizer.any_instance.should_receive(:sync_cookbooks)
Chef::REST.should_receive(:new).with(Chef::Config[:chef_server_url]).and_return(http_cookbook_sync)
http_cookbook_sync.should_receive(:post).
with("environments/_default/cookbook_versions", {:run_list => ["new_run_list_recipe"]}).
and_return({})
end
before do
# Client will try to compile and run the new_run_list_recipe, but we
# do not create a fixture for this.
Chef::RunContext::CookbookCompiler.any_instance.should_receive(:compile)
end
it "sets the new run list on the node" do
client.run
node.run_list.should == Chef::RunList.new(new_runlist)
end
end
end
end
describe "when handling run failures" do
it "should remove the run_lock on failure of #load_node" do
@run_lock = double("Chef::RunLock", :acquire => true)
Chef::RunLock.stub(:new).and_return(@run_lock)
@events = double("Chef::EventDispatch::Dispatcher").as_null_object
Chef::EventDispatch::Dispatcher.stub(:new).and_return(@events)
# @events is created on Chef::Client.new, so we need to recreate it after mocking
client = Chef::Client.new
client.stub(:load_node).and_raise(Exception)
@run_lock.should_receive(:release)
if(Chef::Config[:client_fork] && !windows?)
client.should_receive(:fork) do |&block|
block.call
end
end
lambda { client.run }.should raise_error(Exception)
end
end
describe "when notifying other objects of the status of the chef run" do
before do
Chef::Client.clear_notifications
Chef::Node.stub(:find_or_create).and_return(node)
node.stub(:save)
client.load_node
client.build_node
end
it "notifies observers that the run has started" do
notified = false
Chef::Client.when_run_starts do |run_status|
run_status.node.should == node
notified = true
end
client.run_started
notified.should be_true
end
it "notifies observers that the run has completed successfully" do
notified = false
Chef::Client.when_run_completes_successfully do |run_status|
run_status.node.should == node
notified = true
end
client.run_completed_successfully
notified.should be_true
end
it "notifies observers that the run failed" do
notified = false
Chef::Client.when_run_fails do |run_status|
run_status.node.should == node
notified = true
end
client.run_failed
notified.should be_true
end
end
describe "build_node" do
it "should expand the roles and recipes for the node" do
node.run_list << "role[role_containing_cookbook1]"
role_containing_cookbook1 = Chef::Role.new
role_containing_cookbook1.name("role_containing_cookbook1")
role_containing_cookbook1.run_list << "cookbook1"
# build_node will call Node#expand! with server, which will
# eventually hit the server to expand the included role.
mock_chef_rest = double("Chef::REST")
mock_chef_rest.should_receive(:get_rest).with("roles/role_containing_cookbook1").and_return(role_containing_cookbook1)
Chef::REST.should_receive(:new).and_return(mock_chef_rest)
# check pre-conditions.
node[:roles].should be_nil
node[:recipes].should be_nil
client.policy_builder.stub(:node).and_return(node)
# chefspec and possibly others use the return value of this method
client.build_node.should == node
# check post-conditions.
node[:roles].should_not be_nil
node[:roles].length.should == 1
node[:roles].should include("role_containing_cookbook1")
node[:recipes].should_not be_nil
node[:recipes].length.should == 1
node[:recipes].should include("cookbook1")
end
end
describe "windows_admin_check" do
context "platform is not windows" do
before do
Chef::Platform.stub(:windows?).and_return(false)
end
it "shouldn't be called" do
client.should_not_receive(:has_admin_privileges?)
client.do_windows_admin_check
end
end
context "platform is windows" do
before do
Chef::Platform.stub(:windows?).and_return(true)
end
it "should be called" do
client.should_receive(:has_admin_privileges?)
client.do_windows_admin_check
end
context "admin privileges exist" do
before do
client.should_receive(:has_admin_privileges?).and_return(true)
end
it "should not log a warning message" do
Chef::Log.should_not_receive(:warn)
client.do_windows_admin_check
end
context "fatal admin check is configured" do
it "should not raise an exception" do
client.do_windows_admin_check #should not raise
end
end
end
context "admin privileges doesn't exist" do
before do
client.should_receive(:has_admin_privileges?).and_return(false)
end
it "should log a warning message" do
Chef::Log.should_receive(:warn)
client.do_windows_admin_check
end
context "fatal admin check is configured" do
it "should raise an exception" do
client.do_windows_admin_check # should not raise
end
end
end
end
end
describe "assert_cookbook_path_not_empty" do
before do
Chef::Config[:solo] = true
Chef::Config[:cookbook_path] = ["/path/to/invalid/cookbook_path"]
end
context "when any directory of cookbook_path contains no cookbook" do
it "raises CookbookNotFound error" do
expect do
client.send(:assert_cookbook_path_not_empty, nil)
end.to raise_error(Chef::Exceptions::CookbookNotFound, 'None of the cookbook paths set in Chef::Config[:cookbook_path], ["/path/to/invalid/cookbook_path"], contain any cookbooks')
end
end
end
describe "setting node name" do
context "when machinename, hostname and fqdn are all set" do
it "favors the fqdn" do
expect(client.node_name).to eql(fqdn)
end
end
context "when fqdn is missing" do
# ohai 7 should always have machinename == return of hostname
let(:fqdn) { nil }
it "favors the machinename" do
expect(client.node_name).to eql(machinename)
end
end
context "when fqdn and machinename are missing" do
# ohai 6 will not have machinename, return the short hostname
let(:fqdn) { nil }
let(:machinename) { nil }
it "falls back to hostname" do
expect(client.node_name).to eql(hostname)
end
end
context "when they're all missing" do
let(:machinename) { nil }
let(:hostname) { nil }
let(:fqdn) { nil }
it "throws an exception" do
expect { client.node_name }.to raise_error(Chef::Exceptions::CannotDetermineNodeName)
end
end
end
end
Added unit test for setting chef environment to single character
#
# Author:: Adam Jacob (<adam@opscode.com>)
# Author:: Tim Hinderliter (<tim@opscode.com>)
# Author:: Christopher Walters (<cw@opscode.com>)
# Copyright:: Copyright 2008-2010 Opscode, Inc.
# License:: Apache License, Version 2.0
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
require 'spec_helper'
require 'chef/run_context'
require 'chef/rest'
require 'rbconfig'
describe Chef::Client do
let(:hostname) { "hostname" }
let(:machinename) { "machinename.example.org" }
let(:fqdn) { "hostname.example.org" }
let(:ohai_data) do
{ :fqdn => fqdn,
:hostname => hostname,
:machinename => machinename,
:platform => 'example-platform',
:platform_version => 'example-platform-1.0',
:data => {}
}
end
let(:ohai_system) do
ohai_system = double( "Ohai::System",
:all_plugins => true,
:data => ohai_data)
ohai_system.stub(:[]) do |key|
ohai_data[key]
end
ohai_system
end
let(:node) do
Chef::Node.new.tap do |n|
n.name(fqdn)
n.chef_environment("_default")
end
end
let(:json_attribs) { nil }
let(:client_opts) { {} }
let(:client) do
Chef::Client.new(json_attribs, client_opts).tap do |c|
c.node = node
end
end
before do
Chef::Log.logger = Logger.new(StringIO.new)
# Node/Ohai data
#Chef::Config[:node_name] = fqdn
Ohai::System.stub(:new).and_return(ohai_system)
end
describe "authentication protocol selection" do
after do
Chef::Config[:authentication_protocol_version] = "1.0"
end
context "when the node name is <= 90 bytes" do
it "does not force the authentication protocol to 1.1" do
Chef::Config[:node_name] = ("f" * 90)
# ugly that this happens as a side effect of a getter :(
client.node_name
Chef::Config[:authentication_protocol_version].should == "1.0"
end
end
context "when the node name is > 90 bytes" do
it "sets the authentication protocol to version 1.1" do
Chef::Config[:node_name] = ("f" * 91)
# ugly that this happens as a side effect of a getter :(
client.node_name
Chef::Config[:authentication_protocol_version].should == "1.1"
end
end
end
describe "configuring output formatters" do
context "when no formatter has been configured" do
context "and STDOUT is a TTY" do
before do
STDOUT.stub(:tty?).and_return(true)
end
it "configures the :doc formatter" do
client.formatters_for_run.should == [[:doc]]
end
context "and force_logger is set" do
before do
Chef::Config[:force_logger] = true
end
it "configures the :null formatter" do
Chef::Config[:force_logger].should be_true
client.formatters_for_run.should == [[:null]]
end
end
end
context "and STDOUT is not a TTY" do
before do
STDOUT.stub(:tty?).and_return(false)
end
it "configures the :null formatter" do
client.formatters_for_run.should == [[:null]]
end
context "and force_formatter is set" do
before do
Chef::Config[:force_formatter] = true
end
it "it configures the :doc formatter" do
client.formatters_for_run.should == [[:doc]]
end
end
end
end
context "when a formatter is configured" do
context "with no output path" do
before do
Chef::Config.add_formatter(:min)
end
it "does not configure a default formatter" do
client.formatters_for_run.should == [[:min, nil]]
end
it "configures the formatter for STDOUT/STDERR" do
configured_formatters = client.configure_formatters
min_formatter = configured_formatters[0]
min_formatter.output.out.should == STDOUT
min_formatter.output.err.should == STDERR
end
end
context "with an output path" do
before do
@tmpout = Tempfile.open("rspec-for-client-formatter-selection-#{Process.pid}")
Chef::Config.add_formatter(:min, @tmpout.path)
end
after do
@tmpout.close unless @tmpout.closed?
@tmpout.unlink
end
it "configures the formatter for the file path" do
configured_formatters = client.configure_formatters
min_formatter = configured_formatters[0]
min_formatter.output.out.path.should == @tmpout.path
min_formatter.output.err.path.should == @tmpout.path
end
end
end
end
describe "a full client run" do
shared_examples_for "a successful client run" do
let(:http_node_load) { double("Chef::REST (node)") }
let(:http_cookbook_sync) { double("Chef::REST (cookbook sync)") }
let(:http_node_save) { double("Chef::REST (node save)") }
let(:runner) { double("Chef::Runner") }
let(:api_client_exists?) { false }
let(:stdout) { StringIO.new }
let(:stderr) { StringIO.new }
let(:enable_fork) { false }
def stub_for_register
# --Client.register
# Make sure Client#register thinks the client key doesn't
# exist, so it tries to register and create one.
File.should_receive(:exists?).with(Chef::Config[:client_key]).exactly(1).times.and_return(api_client_exists?)
unless api_client_exists?
# Client.register will register with the validation client name.
Chef::ApiClient::Registration.any_instance.should_receive(:run)
end
end
def stub_for_node_load
# Client.register will then turn around create another
# Chef::REST object, this time with the client key it got from the
# previous step.
Chef::REST.should_receive(:new).
with(Chef::Config[:chef_server_url], fqdn, Chef::Config[:client_key]).
exactly(1).
and_return(http_node_load)
# --Client#build_node
# looks up the node, which we will return, then later saves it.
Chef::Node.should_receive(:find_or_create).with(fqdn).and_return(node)
# --ResourceReporter#node_load_completed
# gets a run id from the server for storing resource history
# (has its own tests, so stubbing it here.)
Chef::ResourceReporter.any_instance.should_receive(:node_load_completed)
end
def stub_for_sync_cookbooks
# --Client#setup_run_context
# ---Client#sync_cookbooks -- downloads the list of cookbooks to sync
#
Chef::CookbookSynchronizer.any_instance.should_receive(:sync_cookbooks)
Chef::REST.should_receive(:new).with(Chef::Config[:chef_server_url]).and_return(http_cookbook_sync)
http_cookbook_sync.should_receive(:post).
with("environments/_default/cookbook_versions", {:run_list => []}).
and_return({})
end
def stub_for_converge
# --Client#converge
Chef::Runner.should_receive(:new).and_return(runner)
runner.should_receive(:converge).and_return(true)
# --ResourceReporter#run_completed
# updates the server with the resource history
# (has its own tests, so stubbing it here.)
Chef::ResourceReporter.any_instance.should_receive(:run_completed)
end
def stub_for_node_save
# --Client#save_updated_node
Chef::REST.should_receive(:new).with(Chef::Config[:chef_server_url]).and_return(http_node_save)
http_node_save.should_receive(:put_rest).with("nodes/#{fqdn}", node).and_return(true)
end
def stub_for_run
Chef::RunLock.any_instance.should_receive(:acquire)
Chef::RunLock.any_instance.should_receive(:save_pid)
Chef::RunLock.any_instance.should_receive(:release)
# Post conditions: check that node has been filled in correctly
client.should_receive(:run_started)
client.should_receive(:run_completed_successfully)
end
before do
Chef::Config[:client_fork] = enable_fork
stub_const("Chef::Client::STDOUT_FD", stdout)
stub_const("Chef::Client::STDERR_FD", stderr)
stub_for_register
stub_for_node_load
stub_for_sync_cookbooks
stub_for_converge
stub_for_node_save
stub_for_run
end
it "runs ohai, sets up authentication, loads node state, synchronizes policy, and converges" do
# This is what we're testing.
client.run
# fork is stubbed, so we can see the outcome of the run
node.automatic_attrs[:platform].should == "example-platform"
node.automatic_attrs[:platform_version].should == "example-platform-1.0"
end
end
describe "when running chef-client without fork" do
include_examples "a successful client run"
end
describe "when running chef-client with forking enabled", :unix_only do
include_examples "a successful client run" do
let(:process_status) do
double("Process::Status")
end
let(:enable_fork) { true }
before do
Process.should_receive(:waitpid2).and_return([1, process_status])
process_status.should_receive(:success?).and_return(true)
client.should_receive(:exit).and_return(nil)
client.should_receive(:fork).and_yield
end
end
end
describe "when the client key already exists" do
let(:api_client_exists?) { true }
include_examples "a successful client run"
end
describe "when an override run list is given" do
let(:client_opts) { {:override_runlist => "recipe[override_recipe]"} }
it "should permit spaces in overriding run list" do
Chef::Client.new(nil, :override_runlist => 'role[a], role[b]')
end
describe "when running the client" do
include_examples "a successful client run" do
before do
# Client will try to compile and run override_recipe
Chef::RunContext::CookbookCompiler.any_instance.should_receive(:compile)
end
def stub_for_sync_cookbooks
# --Client#setup_run_context
# ---Client#sync_cookbooks -- downloads the list of cookbooks to sync
#
Chef::CookbookSynchronizer.any_instance.should_receive(:sync_cookbooks)
Chef::REST.should_receive(:new).with(Chef::Config[:chef_server_url]).and_return(http_cookbook_sync)
http_cookbook_sync.should_receive(:post).
with("environments/_default/cookbook_versions", {:run_list => ["override_recipe"]}).
and_return({})
end
def stub_for_node_save
# Expect NO node save
node.should_not_receive(:save)
end
end
end
end
describe "when a permanent run list is passed as an option" do
include_examples "a successful client run" do
let(:new_runlist) { "recipe[new_run_list_recipe]" }
let(:client_opts) { {:runlist => new_runlist} }
def stub_for_sync_cookbooks
# --Client#setup_run_context
# ---Client#sync_cookbooks -- downloads the list of cookbooks to sync
#
Chef::CookbookSynchronizer.any_instance.should_receive(:sync_cookbooks)
Chef::REST.should_receive(:new).with(Chef::Config[:chef_server_url]).and_return(http_cookbook_sync)
http_cookbook_sync.should_receive(:post).
with("environments/_default/cookbook_versions", {:run_list => ["new_run_list_recipe"]}).
and_return({})
end
before do
# Client will try to compile and run the new_run_list_recipe, but we
# do not create a fixture for this.
Chef::RunContext::CookbookCompiler.any_instance.should_receive(:compile)
end
it "sets the new run list on the node" do
client.run
node.run_list.should == Chef::RunList.new(new_runlist)
end
end
end
end
describe "when handling run failures" do
it "should remove the run_lock on failure of #load_node" do
@run_lock = double("Chef::RunLock", :acquire => true)
Chef::RunLock.stub(:new).and_return(@run_lock)
@events = double("Chef::EventDispatch::Dispatcher").as_null_object
Chef::EventDispatch::Dispatcher.stub(:new).and_return(@events)
# @events is created on Chef::Client.new, so we need to recreate it after mocking
client = Chef::Client.new
client.stub(:load_node).and_raise(Exception)
@run_lock.should_receive(:release)
if(Chef::Config[:client_fork] && !windows?)
client.should_receive(:fork) do |&block|
block.call
end
end
lambda { client.run }.should raise_error(Exception)
end
end
describe "when notifying other objects of the status of the chef run" do
before do
Chef::Client.clear_notifications
Chef::Node.stub(:find_or_create).and_return(node)
node.stub(:save)
client.load_node
client.build_node
end
it "notifies observers that the run has started" do
notified = false
Chef::Client.when_run_starts do |run_status|
run_status.node.should == node
notified = true
end
client.run_started
notified.should be_true
end
it "notifies observers that the run has completed successfully" do
notified = false
Chef::Client.when_run_completes_successfully do |run_status|
run_status.node.should == node
notified = true
end
client.run_completed_successfully
notified.should be_true
end
it "notifies observers that the run failed" do
notified = false
Chef::Client.when_run_fails do |run_status|
run_status.node.should == node
notified = true
end
client.run_failed
notified.should be_true
end
end
describe "build_node" do
it "should expand the roles and recipes for the node" do
node.run_list << "role[role_containing_cookbook1]"
role_containing_cookbook1 = Chef::Role.new
role_containing_cookbook1.name("role_containing_cookbook1")
role_containing_cookbook1.run_list << "cookbook1"
# build_node will call Node#expand! with server, which will
# eventually hit the server to expand the included role.
mock_chef_rest = double("Chef::REST")
mock_chef_rest.should_receive(:get_rest).with("roles/role_containing_cookbook1").and_return(role_containing_cookbook1)
Chef::REST.should_receive(:new).and_return(mock_chef_rest)
# check pre-conditions.
node[:roles].should be_nil
node[:recipes].should be_nil
client.policy_builder.stub(:node).and_return(node)
# chefspec and possibly others use the return value of this method
client.build_node.should == node
# check post-conditions.
node[:roles].should_not be_nil
node[:roles].length.should == 1
node[:roles].should include("role_containing_cookbook1")
node[:recipes].should_not be_nil
node[:recipes].length.should == 1
node[:recipes].should include("cookbook1")
end
it "should set the environment from the specified configuration value" do
node.chef_environment.should == "_default"
Chef::Config[:environment] = "A"
test_env = Chef::Environment.new
test_env.name("A")
puts test_env.name
mock_chef_rest = double("Chef::REST")
mock_chef_rest.should_receive(:get_rest).with("environments/A").and_return(test_env)
Chef::REST.should_receive(:new).and_return(mock_chef_rest)
client.policy_builder.stub(:node).and_return(node)
client.build_node.should == node
node.chef_environment.should == "A"
end
end
describe "windows_admin_check" do
context "platform is not windows" do
before do
Chef::Platform.stub(:windows?).and_return(false)
end
it "shouldn't be called" do
client.should_not_receive(:has_admin_privileges?)
client.do_windows_admin_check
end
end
context "platform is windows" do
before do
Chef::Platform.stub(:windows?).and_return(true)
end
it "should be called" do
client.should_receive(:has_admin_privileges?)
client.do_windows_admin_check
end
context "admin privileges exist" do
before do
client.should_receive(:has_admin_privileges?).and_return(true)
end
it "should not log a warning message" do
Chef::Log.should_not_receive(:warn)
client.do_windows_admin_check
end
context "fatal admin check is configured" do
it "should not raise an exception" do
client.do_windows_admin_check #should not raise
end
end
end
context "admin privileges doesn't exist" do
before do
client.should_receive(:has_admin_privileges?).and_return(false)
end
it "should log a warning message" do
Chef::Log.should_receive(:warn)
client.do_windows_admin_check
end
context "fatal admin check is configured" do
it "should raise an exception" do
client.do_windows_admin_check # should not raise
end
end
end
end
end
describe "assert_cookbook_path_not_empty" do
before do
Chef::Config[:solo] = true
Chef::Config[:cookbook_path] = ["/path/to/invalid/cookbook_path"]
end
context "when any directory of cookbook_path contains no cookbook" do
it "raises CookbookNotFound error" do
expect do
client.send(:assert_cookbook_path_not_empty, nil)
end.to raise_error(Chef::Exceptions::CookbookNotFound, 'None of the cookbook paths set in Chef::Config[:cookbook_path], ["/path/to/invalid/cookbook_path"], contain any cookbooks')
end
end
end
describe "setting node name" do
context "when machinename, hostname and fqdn are all set" do
it "favors the fqdn" do
expect(client.node_name).to eql(fqdn)
end
end
context "when fqdn is missing" do
# ohai 7 should always have machinename == return of hostname
let(:fqdn) { nil }
it "favors the machinename" do
expect(client.node_name).to eql(machinename)
end
end
context "when fqdn and machinename are missing" do
# ohai 6 will not have machinename, return the short hostname
let(:fqdn) { nil }
let(:machinename) { nil }
it "falls back to hostname" do
expect(client.node_name).to eql(hostname)
end
end
context "when they're all missing" do
let(:machinename) { nil }
let(:hostname) { nil }
let(:fqdn) { nil }
it "throws an exception" do
expect { client.node_name }.to raise_error(Chef::Exceptions::CannotDetermineNodeName)
end
end
end
end
|
require 'spec/spec_helper'
describe Riddle::Client do
it "should have the same keys for both commands and versions" do
Riddle::Client::Commands.keys.should == Riddle::Client::Versions.keys
end
it "should default to localhost as the server" do
Riddle::Client.new.server.should == "localhost"
end
it "should default to port 3312" do
Riddle::Client.new.port.should == 3312
end
it "should translate anchor arguments correctly" do
client = Riddle::Client.new
client.set_anchor "latitude", 10.0, "longitude", 95.0
client.anchor.should == {
:latitude_attribute => "latitude",
:latitude => 10.0,
:longitude_attribute => "longitude",
:longitude => 95.0
}
end
it "should add queries to the queue" do
client = Riddle::Client.new
client.queue.should be_empty
client.append_query "spec"
client.queue.should_not be_empty
end
it "should build a basic search message correctly" do
client = Riddle::Client.new
client.append_query "test "
client.queue.first.should == query_contents(:simple)
end
it "should build a message with a specified index correctly" do
client = Riddle::Client.new
client.append_query "test ", "edition"
client.queue.first.should == query_contents(:index)
end
it "should build a message using match mode :any correctly" do
client = Riddle::Client.new
client.match_mode = :any
client.append_query "test this "
client.queue.first.should == query_contents(:any)
end
it "should build a message using sort by correctly" do
client = Riddle::Client.new
client.sort_by = 'id'
client.sort_mode = :extended
client.append_query "testing "
client.queue.first.should == query_contents(:sort)
end
it "should build a message using match mode :boolean correctly" do
client = Riddle::Client.new
client.match_mode = :boolean
client.append_query "test "
client.queue.first.should == query_contents(:boolean)
end
it "should build a message using match mode :phrase correctly" do
client = Riddle::Client.new
client.match_mode = :phrase
client.append_query "testing this "
client.queue.first.should == query_contents(:phrase)
end
it "should build a message with a filter correctly" do
client = Riddle::Client.new
client.filters << Riddle::Client::Filter.new("id", [10, 100, 1000])
client.append_query "test "
client.queue.first.should == query_contents(:filter)
end
it "should build a message with group values correctly" do
client = Riddle::Client.new
client.group_by = "id"
client.group_function = :attr
client.group_clause = "id"
client.append_query "test "
client.queue.first.should == query_contents(:group)
end
it "should build a message with group distinct value correctly" do
client = Riddle::Client.new
client.group_distinct = "id"
client.append_query "test "
client.queue.first.should == query_contents(:distinct)
end
it "should build a message with weights correctly" do
client = Riddle::Client.new
client.weights = [100, 1]
client.append_query "test "
client.queue.first.should == query_contents(:weights)
end
it "should build a message with an anchor correctly" do
client = Riddle::Client.new
client.set_anchor "latitude", 10.0, "longitude", 95.0
client.append_query "test "
client.queue.first.should == query_contents(:anchor)
end
it "should build a message with index weights correctly" do
client = Riddle::Client.new
client.index_weights = {"people" => 101}
client.append_query "test "
client.queue.first.should == query_contents(:index_weights)
end
it "should build a message with field weights correctly" do
client = Riddle::Client.new
client.field_weights = {"city" => 101}
client.append_query "test "
client.queue.first.should == query_contents(:field_weights)
end
it "should build a message with acomment correctly" do
client = Riddle::Client.new
client.append_query "test ", "*", "commenting"
client.queue.first.should == query_contents(:comment)
end
it "should keep multiple messages in the queue" do
client = Riddle::Client.new
client.weights = [100, 1]
client.append_query "test "
client.append_query "test "
client.queue.length.should == 2
client.queue.each { |item| item.should == query_contents(:weights) }
end
it "should keep multiple messages in the queue with different params" do
client = Riddle::Client.new
client.weights = [100, 1]
client.append_query "test "
client.weights = []
client.append_query "test ", "edition"
client.queue.first.should == query_contents(:weights)
client.queue.last.should == query_contents(:index)
end
it "should build a basic update message correctly" do
client = Riddle::Client.new
client.send(
:update_message,
"people",
["birthday"],
{1 => [191163600]}
).should == query_contents(:update_simple)
end
it "should build a keywords request without hits correctly" do
client = Riddle::Client.new
client.send(
:keywords_message,
"pat",
"people",
false
).should == query_contents(:keywords_without_hits)
end
it "should build a keywords request with hits correctly" do
client = Riddle::Client.new
client.send(
:keywords_message,
"pat",
"people",
true
).should == query_contents(:keywords_with_hits)
end
it "should timeout after a specified time" do
client = Riddle::Client.new
client.port = 3313
client.timeout = 3
server = TCPServer.new "localhost", 3313
lambda {
client.send(:connect) { |socket| }
}.should raise_error(Riddle::ConnectionError)
server.close
end
end
Switching ports for timeout test so it doesn't conflict with searchd for specs
require 'spec/spec_helper'
describe Riddle::Client do
it "should have the same keys for both commands and versions" do
Riddle::Client::Commands.keys.should == Riddle::Client::Versions.keys
end
it "should default to localhost as the server" do
Riddle::Client.new.server.should == "localhost"
end
it "should default to port 3312" do
Riddle::Client.new.port.should == 3312
end
it "should translate anchor arguments correctly" do
client = Riddle::Client.new
client.set_anchor "latitude", 10.0, "longitude", 95.0
client.anchor.should == {
:latitude_attribute => "latitude",
:latitude => 10.0,
:longitude_attribute => "longitude",
:longitude => 95.0
}
end
it "should add queries to the queue" do
client = Riddle::Client.new
client.queue.should be_empty
client.append_query "spec"
client.queue.should_not be_empty
end
it "should build a basic search message correctly" do
client = Riddle::Client.new
client.append_query "test "
client.queue.first.should == query_contents(:simple)
end
it "should build a message with a specified index correctly" do
client = Riddle::Client.new
client.append_query "test ", "edition"
client.queue.first.should == query_contents(:index)
end
it "should build a message using match mode :any correctly" do
client = Riddle::Client.new
client.match_mode = :any
client.append_query "test this "
client.queue.first.should == query_contents(:any)
end
it "should build a message using sort by correctly" do
client = Riddle::Client.new
client.sort_by = 'id'
client.sort_mode = :extended
client.append_query "testing "
client.queue.first.should == query_contents(:sort)
end
it "should build a message using match mode :boolean correctly" do
client = Riddle::Client.new
client.match_mode = :boolean
client.append_query "test "
client.queue.first.should == query_contents(:boolean)
end
it "should build a message using match mode :phrase correctly" do
client = Riddle::Client.new
client.match_mode = :phrase
client.append_query "testing this "
client.queue.first.should == query_contents(:phrase)
end
it "should build a message with a filter correctly" do
client = Riddle::Client.new
client.filters << Riddle::Client::Filter.new("id", [10, 100, 1000])
client.append_query "test "
client.queue.first.should == query_contents(:filter)
end
it "should build a message with group values correctly" do
client = Riddle::Client.new
client.group_by = "id"
client.group_function = :attr
client.group_clause = "id"
client.append_query "test "
client.queue.first.should == query_contents(:group)
end
it "should build a message with group distinct value correctly" do
client = Riddle::Client.new
client.group_distinct = "id"
client.append_query "test "
client.queue.first.should == query_contents(:distinct)
end
it "should build a message with weights correctly" do
client = Riddle::Client.new
client.weights = [100, 1]
client.append_query "test "
client.queue.first.should == query_contents(:weights)
end
it "should build a message with an anchor correctly" do
client = Riddle::Client.new
client.set_anchor "latitude", 10.0, "longitude", 95.0
client.append_query "test "
client.queue.first.should == query_contents(:anchor)
end
it "should build a message with index weights correctly" do
client = Riddle::Client.new
client.index_weights = {"people" => 101}
client.append_query "test "
client.queue.first.should == query_contents(:index_weights)
end
it "should build a message with field weights correctly" do
client = Riddle::Client.new
client.field_weights = {"city" => 101}
client.append_query "test "
client.queue.first.should == query_contents(:field_weights)
end
it "should build a message with acomment correctly" do
client = Riddle::Client.new
client.append_query "test ", "*", "commenting"
client.queue.first.should == query_contents(:comment)
end
it "should keep multiple messages in the queue" do
client = Riddle::Client.new
client.weights = [100, 1]
client.append_query "test "
client.append_query "test "
client.queue.length.should == 2
client.queue.each { |item| item.should == query_contents(:weights) }
end
it "should keep multiple messages in the queue with different params" do
client = Riddle::Client.new
client.weights = [100, 1]
client.append_query "test "
client.weights = []
client.append_query "test ", "edition"
client.queue.first.should == query_contents(:weights)
client.queue.last.should == query_contents(:index)
end
it "should build a basic update message correctly" do
client = Riddle::Client.new
client.send(
:update_message,
"people",
["birthday"],
{1 => [191163600]}
).should == query_contents(:update_simple)
end
it "should build a keywords request without hits correctly" do
client = Riddle::Client.new
client.send(
:keywords_message,
"pat",
"people",
false
).should == query_contents(:keywords_without_hits)
end
it "should build a keywords request with hits correctly" do
client = Riddle::Client.new
client.send(
:keywords_message,
"pat",
"people",
true
).should == query_contents(:keywords_with_hits)
end
it "should timeout after a specified time" do
client = Riddle::Client.new
client.port = 3314
client.timeout = 1
server = TCPServer.new "localhost", 3314
lambda {
client.send(:connect) { |socket| }
}.should raise_error(Riddle::ConnectionError)
server.close
end
end |
require 'spec_helper'
module Omnibus
describe Config do
it 'extends Mixlib::Config' do
expect(described_class).to be_a(Mixlib::Config)
end
before do
described_class.reset
Ohai.stub(:platform).and_return('linux')
end
shared_examples 'a configurable' do |id, default|
it "responds to .#{id}" do
expect(described_class).to have_method_defined(id)
end
it ".#{id} defaults to #{default.inspect}" do
expect(described_class.send(id)).to eq(default)
end
end
include_examples 'a configurable', :cache_dir, '/var/cache/omnibus/cache'
include_examples 'a configurable', :install_path_cache_dir, '/var/cache/omnibus/cache/install_path'
include_examples 'a configurable', :source_dir, '/var/cache/omnibus/src'
include_examples 'a configurable', :build_dir, '/var/cache/omnibus/build'
include_examples 'a configurable', :package_dir, '/var/cache/omnibus/pkg'
include_examples 'a configurable', :package_tmp, '/var/cache/omnibus/pkg-tmp'
include_examples 'a configurable', :project_root, Dir.pwd
include_examples 'a configurable', :install_dir, '/opt/chef'
include_examples 'a configurable', :build_dmg, true
include_examples 'a configurable', :dmg_window_bounds, '100, 100, 750, 600'
include_examples 'a configurable', :dmg_pkg_position, '535, 50'
include_examples 'a configurable', :use_s3_caching, false
include_examples 'a configurable', :s3_bucket, nil
include_examples 'a configurable', :s3_access_key, nil
include_examples 'a configurable', :release_s3_bucket, nil
include_examples 'a configurable', :release_s3_access_key, nil
include_examples 'a configurable', :release_s3_secret_key, nil
include_examples 'a configurable', :override_file, nil
include_examples 'a configurable', :software_gem, 'omnibus-software'
include_examples 'a configurable', :solaris_compiler, nil
include_examples 'a configurable', :append_timestamp, true
include_examples 'a configurable', :build_retries, 3
end
end
Add specs for Windows config
require 'spec_helper'
module Omnibus
describe Config do
it 'extends Mixlib::Config' do
expect(described_class).to be_a(Mixlib::Config)
end
before do
described_class.reset
Ohai.stub(:platform).and_return('linux')
end
shared_examples 'a configurable' do |id, default|
it "responds to .#{id}" do
expect(described_class).to have_method_defined(id)
end
it ".#{id} defaults to #{default.inspect}" do
expect(described_class.send(id)).to eq(default)
end
end
include_examples 'a configurable', :cache_dir, '/var/cache/omnibus/cache'
include_examples 'a configurable', :install_path_cache_dir, '/var/cache/omnibus/cache/install_path'
include_examples 'a configurable', :source_dir, '/var/cache/omnibus/src'
include_examples 'a configurable', :build_dir, '/var/cache/omnibus/build'
include_examples 'a configurable', :package_dir, '/var/cache/omnibus/pkg'
include_examples 'a configurable', :package_tmp, '/var/cache/omnibus/pkg-tmp'
include_examples 'a configurable', :project_root, Dir.pwd
include_examples 'a configurable', :install_dir, '/opt/chef'
include_examples 'a configurable', :build_dmg, true
include_examples 'a configurable', :dmg_window_bounds, '100, 100, 750, 600'
include_examples 'a configurable', :dmg_pkg_position, '535, 50'
include_examples 'a configurable', :use_s3_caching, false
include_examples 'a configurable', :s3_bucket, nil
include_examples 'a configurable', :s3_access_key, nil
include_examples 'a configurable', :release_s3_bucket, nil
include_examples 'a configurable', :release_s3_access_key, nil
include_examples 'a configurable', :release_s3_secret_key, nil
include_examples 'a configurable', :override_file, nil
include_examples 'a configurable', :software_gem, 'omnibus-software'
include_examples 'a configurable', :solaris_compiler, nil
include_examples 'a configurable', :append_timestamp, true
include_examples 'a configurable', :build_retries, 3
context 'on Windows' do
before do
Ohai.stub(:platform).and_return('windows')
stub_const('File::ALT_SEPARATOR', '\\')
end
include_examples 'a configurable', :cache_dir, 'C:\\omnibus-ruby\\cache'
include_examples 'a configurable', :install_path_cache_dir, 'C:\\omnibus-ruby\\cache\\install_path'
include_examples 'a configurable', :source_dir, 'C:\\omnibus-ruby\\src'
include_examples 'a configurable', :build_dir, 'C:\\omnibus-ruby\\build'
include_examples 'a configurable', :package_dir, 'C:\\omnibus-ruby\\pkg'
include_examples 'a configurable', :package_tmp, 'C:\\omnibus-ruby\\pkg-tmp'
end
end
end
|
# encoding: utf-8
require 'spec_helper'
describe Metar::Parser do
after :each do
Metar::Parser.compliance = :loose
end
context '.for_cccc' do
it 'returns a loaded parser' do
station = stub( 'station' )
raw = stub( 'raw', :metar => "XXXX 061610Z 24006KT 1 3/4SM -SN BKN016 OVC030 M17/M20 A2910 RMK AO2 P0000",
:time => '2010/02/06 16:10' )
Metar::Station.stub!( :new => station )
Metar::Raw::Noaa.stub!( :new => raw )
parser = Metar::Parser.for_cccc( 'XXXX' )
parser. should be_a( Metar::Parser )
parser.station_code. should == 'XXXX'
end
end
context 'attributes' do
before :each do
@call_time = Time.parse('2011-05-06 16:35')
Time.stub!(:now).and_return(@call_time)
end
it '.location missing' do
expect do
setup_parser("FUBAR 24006KT 1 3/4SM -SN BKN016 OVC030 M17/M20 A2910 RMK AO2 P0000")
end. to raise_error( Metar::ParseError, /Expecting location/ )
end
context 'datetime' do
it 'is parsed' do
parser = setup_parser("PAIL 061610Z 24006KT 1 3/4SM -SN BKN016 OVC030 M17/M20 A2910 RMK AO2 P0000")
parser.time. should == Time.gm(2011, 05, 06, 16, 10)
end
it 'throws an error is missing' do
expect do
setup_parser("PAIL 24006KT 1 3/4SM -SN BKN016 OVC030 M17/M20 A2910 RMK AO2 P0000")
end. to raise_error( Metar::ParseError, /Expecting datetime/ )
end
context 'in strict mode' do
before :each do
Metar::Parser.compliance = :strict
end
it 'less than 6 numerals fails' do
expect do
parser = setup_parser('MMCE 21645Z 12010KT 8SM SKC 29/26 A2992 RMK')
end. to raise_error(Metar::ParseError, /Expecting datetime/)
end
end
context 'in loose mode' do
it '5 numerals parses' do
parser = setup_parser('MMCE 21645Z 12010KT 8SM SKC 29/26 A2992 RMK')
parser.time. should == Time.gm(2011, 05, 02, 16, 45)
end
it "with 4 numerals parses, takes today's day" do
parser = setup_parser('HKML 1600Z 19010KT 9999 FEW022 25/22 Q1015')
parser.time. should == Time.gm(2011, 05, 06, 16, 00)
end
end
end
context '.observer' do
it 'real' do
parser = setup_parser("PAIL 061610Z 24006KT 1 3/4SM -SN BKN016 OVC030 M17/M20 A2910 RMK AO2 P0000")
parser.observer. should == :real
end
it 'auto' do
parser = setup_parser("CYXS 151034Z AUTO 09003KT 1/8SM FZFG VV001 M03/M03 A3019 RMK SLP263 ICG")
parser.observer. should == :auto
end
it 'corrected' do
parser = setup_parser("PAIL 061610Z COR 24006KT 1 3/4SM -SN BKN016 OVC030 M17/M20 A2910 RMK AO2 P0000")
parser.observer. should == :corrected
end
it 'corrected (Canadian)' do
parser = setup_parser('CYZU 310100Z CCA 26004KT 15SM FEW009 BKN040TCU BKN100 OVC210 15/12 A2996 RETS RMK SF1TCU4AC2CI1 SLP149')
parser.observer. should == :corrected
end
end
it 'wind' do
parser = setup_parser("PAIL 061610Z 24006KT 1 3/4SM -SN BKN016 OVC030 M17/M20 A2910 RMK AO2 P0000")
parser.wind.direction.value.should be_within( 0.0001 ).of( 240 )
parser.wind.speed.to_knots. should be_within( 0.0001 ).of( 6 )
end
it 'variable_wind' do
parser = setup_parser("LIRQ 061520Z 01007KT 350V050 9999 SCT035 BKN080 08/02 Q1005")
parser.variable_wind.direction1.value.
should be_within( 0.0001 ).of( 350 )
parser.variable_wind.direction2.value.
should be_within( 0.0001 ).of( 50 )
end
context '.visibility' do
it 'CAVOK' do
parser = setup_parser("PAIL 061610Z 24006KT CAVOK M17/M20 A2910 RMK AO2 P0000")
parser.visibility.distance.value.
should be_within( 0.01 ).of( 10000.00 )
parser.visibility.comparator.
should == :more_than
parser.present_weather.size.
should == 1
parser.present_weather[ 0 ].phenomenon.
should == 'No significant weather'
parser.sky_conditions.size.
should == 1
parser.sky_conditions[ 0 ].type.
should == nil
end
it 'visibility_miles_and_fractions' do
parser = setup_parser("PAIL 061610Z 24006KT 1 3/4SM -SN BKN016 OVC030 M17/M20 A2910 RMK AO2 P0000")
parser.visibility.distance.to_miles.
should be_within( 0.01 ).of( 1.75 )
end
it 'in meters' do
parser = setup_parser('VABB 282210Z 22005KT 4000 HZ SCT018 FEW025TCU BKN100 28/25 Q1003 NOSIG')
parser.visibility.distance.value.
should be_within(0.01).of(4000)
end
it '//// with automatic observer' do
parser = setup_parser("CYXS 151034Z AUTO 09003KT //// FZFG VV001 M03/M03 A3019 RMK SLP263 ICG")
parser.visibility. should be_nil
end
end
it 'runway_visible_range' do
parser = setup_parser("ESSB 151020Z 26003KT 2000 R12/1000N R30/1500N VV002 M07/M07 Q1013 1271//55")
parser.runway_visible_range.length.
should == 2
parser.runway_visible_range[0].designator.
should == '12'
parser.runway_visible_range[0].visibility1.distance.value.
should == 1000
parser.runway_visible_range[0].tendency.
should == :no_change
end
it 'runway_visible_range_defaults_to_empty_array' do
parser = setup_parser("PAIL 061610Z 24006KT 1 3/4SM -SN BKN016 OVC030 M17/M20 A2910 RMK AO2 P0000")
parser.runway_visible_range.length.
should == 0
end
it 'runway_visible_range_variable' do
parser = setup_parser("KPDX 151108Z 11006KT 1/4SM R10R/1600VP6000FT FG OVC002 05/05 A3022 RMK AO2")
parser.runway_visible_range[0].visibility1.distance.to_feet.
should == 1600.0
parser.runway_visible_range[0].visibility2.distance.to_feet.
should == 6000.0
end
context '.present_weather' do
it 'normal' do
parser = setup_parser("PAIL 061610Z 24006KT 1 3/4SM -SN BKN016 OVC030 M17/M20 A2910 RMK AO2 P0000")
parser.present_weather.size.
should == 1
parser.present_weather[0].modifier.
should == 'light'
parser.present_weather[0].phenomenon.
should == 'snow'
end
it 'auto + //' do
parser = setup_parser("PAIL 061610Z AUTO 24006KT 1 3/4SM // BKN016 OVC030 M17/M20 A2910 RMK AO2 P0000")
parser.present_weather.size.
should == 1
parser.present_weather[0].phenomenon.
should == 'not observed'
end
end
it 'present_weather_defaults_to_empty_array' do
parser = setup_parser("PAIL 061610Z 24006KT 1 3/4SM BKN016 OVC030 M17/M20 A2910 RMK AO2 P0000")
parser.present_weather.length.
should == 0
end
context '.sky_conditions' do
it 'normal' do
parser = setup_parser("PAIL 061610Z 24006KT 1 3/4SM -SN BKN016 OVC030 M17/M20 A2910 RMK AO2 P0000")
parser.sky_conditions.size.
should == 2
parser.sky_conditions[0].quantity.
should == 'broken'
parser.sky_conditions[0].height.value.
should == 487.68
parser.sky_conditions[1].quantity.
should == 'overcast'
parser.sky_conditions[1].height.value.
should == 914.40
end
it 'auto + ///' do
parser = setup_parser("PAIL 061610Z AUTO 24006KT 1 3/4SM /// M17/M20 A2910 RMK AO2 P0000")
parser.sky_conditions.size.
should == 0
end
end
it 'sky_conditions_defaults_to_empty_array' do
parser = setup_parser("PAIL 061610Z 24006KT 1 3/4SM -SN M17/M20 A2910 RMK AO2 P0000")
parser.sky_conditions.length.
should == 0
end
it 'vertical_visibility' do
parser = setup_parser("CYXS 151034Z AUTO 09003KT 1/8SM FZFG VV001 M03/M03 A3019 RMK SLP263 ICG")
parser.vertical_visibility.value.
should == 30.48
end
it 'temperature' do
parser = setup_parser("PAIL 061610Z 24006KT 1 3/4SM -SN BKN016 OVC030 M17/M20 A2910 RMK AO2 P0000")
parser.temperature.value. should == -17
end
it 'dew_point' do
parser = setup_parser("PAIL 061610Z 24006KT 1 3/4SM -SN BKN016 OVC030 M17/M20 A2910 RMK AO2 P0000")
parser.dew_point.value. should == -20
end
it 'sea_level_pressure' do
parser = setup_parser("PAIL 061610Z 24006KT 1 3/4SM -SN BKN016 OVC030 M17/M20 A2910 RMK AO2 P0000")
parser.sea_level_pressure.to_inches_of_mercury.
should == 29.10
end
it 'recent weather' do
parser = setup_parser("CYQH 310110Z 00000KT 20SM SCT035CB BKN050 RETS RMK CB4SC1")
parser.recent_weather. should be_a Array
parser.recent_weather.size. should == 1
parser.recent_weather[0].phenomenon.
should == 'thunderstorm'
end
context 'remarks' do
it 'are collected' do
parser = setup_parser("PAIL 061610Z 24006KT 1 3/4SM -SN BKN016 OVC030 M17/M20 A2910 RMK AO2 P0000")
parser.remarks. should be_a Array
parser.remarks.size. should == 2
end
it 'remarks defaults to empty array' do
parser = setup_parser("PAIL 061610Z 24006KT 1 3/4SM -SN BKN016 OVC030 M17/M20 A2910")
parser.remarks. should be_a Array
parser.remarks.length. should == 0
end
it 'parses known remarks' do
parser = setup_parser('CYZT 052200Z 31010KT 20SM SKC 17/12 A3005 RMK SLP174 20046')
parser.remarks[0]. should be_a(Metar::SeaLevelPressure)
parser.remarks[1]. should be_temperature_extreme(:minimum, 4.6)
end
context 'in strict mode' do
before :each do
Metar::Parser.compliance = :strict
end
it 'unparsed data causes an error' do
expect do
setup_parser("PAIL 061610Z 24006KT 1 3/4SM -SN BKN016 OVC030 M17/M20 A2910 FOO RMK AO2 P0000")
end. to raise_error(Metar::ParseError, /Unparsable text found/)
end
end
context 'in loose mode' do
it 'unparsed data is collected' do
parser = setup_parser("PAIL 061610Z 24006KT 1 3/4SM -SN BKN016 OVC030 M17/M20 A2910 FOO RMK AO2 P0000")
parser.unparsed. should == ['FOO']
parser.remarks.size. should == 2
end
end
end
def setup_parser(metar)
raw = Metar::Raw::Data.new(metar)
Metar::Parser.new(raw)
end
end
end
:each is the default for rspec 'before'
# encoding: utf-8
require 'spec_helper'
describe Metar::Parser do
after do
Metar::Parser.compliance = :loose
end
context '.for_cccc' do
it 'returns a loaded parser' do
station = stub( 'station' )
raw = stub( 'raw', :metar => "XXXX 061610Z 24006KT 1 3/4SM -SN BKN016 OVC030 M17/M20 A2910 RMK AO2 P0000",
:time => '2010/02/06 16:10' )
Metar::Station.stub!( :new => station )
Metar::Raw::Noaa.stub!( :new => raw )
parser = Metar::Parser.for_cccc( 'XXXX' )
parser. should be_a( Metar::Parser )
parser.station_code. should == 'XXXX'
end
end
context 'attributes' do
before do
@call_time = Time.parse('2011-05-06 16:35')
Time.stub!(:now).and_return(@call_time)
end
it '.location missing' do
expect do
setup_parser("FUBAR 24006KT 1 3/4SM -SN BKN016 OVC030 M17/M20 A2910 RMK AO2 P0000")
end. to raise_error( Metar::ParseError, /Expecting location/ )
end
context 'datetime' do
it 'is parsed' do
parser = setup_parser("PAIL 061610Z 24006KT 1 3/4SM -SN BKN016 OVC030 M17/M20 A2910 RMK AO2 P0000")
parser.time. should == Time.gm(2011, 05, 06, 16, 10)
end
it 'throws an error is missing' do
expect do
setup_parser("PAIL 24006KT 1 3/4SM -SN BKN016 OVC030 M17/M20 A2910 RMK AO2 P0000")
end. to raise_error( Metar::ParseError, /Expecting datetime/ )
end
context 'in strict mode' do
before do
Metar::Parser.compliance = :strict
end
it 'less than 6 numerals fails' do
expect do
parser = setup_parser('MMCE 21645Z 12010KT 8SM SKC 29/26 A2992 RMK')
end. to raise_error(Metar::ParseError, /Expecting datetime/)
end
end
context 'in loose mode' do
it '5 numerals parses' do
parser = setup_parser('MMCE 21645Z 12010KT 8SM SKC 29/26 A2992 RMK')
parser.time. should == Time.gm(2011, 05, 02, 16, 45)
end
it "with 4 numerals parses, takes today's day" do
parser = setup_parser('HKML 1600Z 19010KT 9999 FEW022 25/22 Q1015')
parser.time. should == Time.gm(2011, 05, 06, 16, 00)
end
end
end
context '.observer' do
it 'real' do
parser = setup_parser("PAIL 061610Z 24006KT 1 3/4SM -SN BKN016 OVC030 M17/M20 A2910 RMK AO2 P0000")
parser.observer. should == :real
end
it 'auto' do
parser = setup_parser("CYXS 151034Z AUTO 09003KT 1/8SM FZFG VV001 M03/M03 A3019 RMK SLP263 ICG")
parser.observer. should == :auto
end
it 'corrected' do
parser = setup_parser("PAIL 061610Z COR 24006KT 1 3/4SM -SN BKN016 OVC030 M17/M20 A2910 RMK AO2 P0000")
parser.observer. should == :corrected
end
it 'corrected (Canadian)' do
parser = setup_parser('CYZU 310100Z CCA 26004KT 15SM FEW009 BKN040TCU BKN100 OVC210 15/12 A2996 RETS RMK SF1TCU4AC2CI1 SLP149')
parser.observer. should == :corrected
end
end
it 'wind' do
parser = setup_parser("PAIL 061610Z 24006KT 1 3/4SM -SN BKN016 OVC030 M17/M20 A2910 RMK AO2 P0000")
parser.wind.direction.value.should be_within( 0.0001 ).of( 240 )
parser.wind.speed.to_knots. should be_within( 0.0001 ).of( 6 )
end
it 'variable_wind' do
parser = setup_parser("LIRQ 061520Z 01007KT 350V050 9999 SCT035 BKN080 08/02 Q1005")
parser.variable_wind.direction1.value.
should be_within( 0.0001 ).of( 350 )
parser.variable_wind.direction2.value.
should be_within( 0.0001 ).of( 50 )
end
context '.visibility' do
it 'CAVOK' do
parser = setup_parser("PAIL 061610Z 24006KT CAVOK M17/M20 A2910 RMK AO2 P0000")
parser.visibility.distance.value.
should be_within( 0.01 ).of( 10000.00 )
parser.visibility.comparator.
should == :more_than
parser.present_weather.size.
should == 1
parser.present_weather[ 0 ].phenomenon.
should == 'No significant weather'
parser.sky_conditions.size.
should == 1
parser.sky_conditions[ 0 ].type.
should == nil
end
it 'visibility_miles_and_fractions' do
parser = setup_parser("PAIL 061610Z 24006KT 1 3/4SM -SN BKN016 OVC030 M17/M20 A2910 RMK AO2 P0000")
parser.visibility.distance.to_miles.
should be_within( 0.01 ).of( 1.75 )
end
it 'in meters' do
parser = setup_parser('VABB 282210Z 22005KT 4000 HZ SCT018 FEW025TCU BKN100 28/25 Q1003 NOSIG')
parser.visibility.distance.value.
should be_within(0.01).of(4000)
end
it '//// with automatic observer' do
parser = setup_parser("CYXS 151034Z AUTO 09003KT //// FZFG VV001 M03/M03 A3019 RMK SLP263 ICG")
parser.visibility. should be_nil
end
end
it 'runway_visible_range' do
parser = setup_parser("ESSB 151020Z 26003KT 2000 R12/1000N R30/1500N VV002 M07/M07 Q1013 1271//55")
parser.runway_visible_range.length.
should == 2
parser.runway_visible_range[0].designator.
should == '12'
parser.runway_visible_range[0].visibility1.distance.value.
should == 1000
parser.runway_visible_range[0].tendency.
should == :no_change
end
it 'runway_visible_range_defaults_to_empty_array' do
parser = setup_parser("PAIL 061610Z 24006KT 1 3/4SM -SN BKN016 OVC030 M17/M20 A2910 RMK AO2 P0000")
parser.runway_visible_range.length.
should == 0
end
it 'runway_visible_range_variable' do
parser = setup_parser("KPDX 151108Z 11006KT 1/4SM R10R/1600VP6000FT FG OVC002 05/05 A3022 RMK AO2")
parser.runway_visible_range[0].visibility1.distance.to_feet.
should == 1600.0
parser.runway_visible_range[0].visibility2.distance.to_feet.
should == 6000.0
end
context '.present_weather' do
it 'normal' do
parser = setup_parser("PAIL 061610Z 24006KT 1 3/4SM -SN BKN016 OVC030 M17/M20 A2910 RMK AO2 P0000")
parser.present_weather.size.
should == 1
parser.present_weather[0].modifier.
should == 'light'
parser.present_weather[0].phenomenon.
should == 'snow'
end
it 'auto + //' do
parser = setup_parser("PAIL 061610Z AUTO 24006KT 1 3/4SM // BKN016 OVC030 M17/M20 A2910 RMK AO2 P0000")
parser.present_weather.size.
should == 1
parser.present_weather[0].phenomenon.
should == 'not observed'
end
end
it 'present_weather_defaults_to_empty_array' do
parser = setup_parser("PAIL 061610Z 24006KT 1 3/4SM BKN016 OVC030 M17/M20 A2910 RMK AO2 P0000")
parser.present_weather.length.
should == 0
end
context '.sky_conditions' do
it 'normal' do
parser = setup_parser("PAIL 061610Z 24006KT 1 3/4SM -SN BKN016 OVC030 M17/M20 A2910 RMK AO2 P0000")
parser.sky_conditions.size.
should == 2
parser.sky_conditions[0].quantity.
should == 'broken'
parser.sky_conditions[0].height.value.
should == 487.68
parser.sky_conditions[1].quantity.
should == 'overcast'
parser.sky_conditions[1].height.value.
should == 914.40
end
it 'auto + ///' do
parser = setup_parser("PAIL 061610Z AUTO 24006KT 1 3/4SM /// M17/M20 A2910 RMK AO2 P0000")
parser.sky_conditions.size.
should == 0
end
end
it 'sky_conditions_defaults_to_empty_array' do
parser = setup_parser("PAIL 061610Z 24006KT 1 3/4SM -SN M17/M20 A2910 RMK AO2 P0000")
parser.sky_conditions.length.
should == 0
end
it 'vertical_visibility' do
parser = setup_parser("CYXS 151034Z AUTO 09003KT 1/8SM FZFG VV001 M03/M03 A3019 RMK SLP263 ICG")
parser.vertical_visibility.value.
should == 30.48
end
it 'temperature' do
parser = setup_parser("PAIL 061610Z 24006KT 1 3/4SM -SN BKN016 OVC030 M17/M20 A2910 RMK AO2 P0000")
parser.temperature.value. should == -17
end
it 'dew_point' do
parser = setup_parser("PAIL 061610Z 24006KT 1 3/4SM -SN BKN016 OVC030 M17/M20 A2910 RMK AO2 P0000")
parser.dew_point.value. should == -20
end
it 'sea_level_pressure' do
parser = setup_parser("PAIL 061610Z 24006KT 1 3/4SM -SN BKN016 OVC030 M17/M20 A2910 RMK AO2 P0000")
parser.sea_level_pressure.to_inches_of_mercury.
should == 29.10
end
it 'recent weather' do
parser = setup_parser("CYQH 310110Z 00000KT 20SM SCT035CB BKN050 RETS RMK CB4SC1")
parser.recent_weather. should be_a Array
parser.recent_weather.size. should == 1
parser.recent_weather[0].phenomenon.
should == 'thunderstorm'
end
context 'remarks' do
it 'are collected' do
parser = setup_parser("PAIL 061610Z 24006KT 1 3/4SM -SN BKN016 OVC030 M17/M20 A2910 RMK AO2 P0000")
parser.remarks. should be_a Array
parser.remarks.size. should == 2
end
it 'remarks defaults to empty array' do
parser = setup_parser("PAIL 061610Z 24006KT 1 3/4SM -SN BKN016 OVC030 M17/M20 A2910")
parser.remarks. should be_a Array
parser.remarks.length. should == 0
end
it 'parses known remarks' do
parser = setup_parser('CYZT 052200Z 31010KT 20SM SKC 17/12 A3005 RMK SLP174 20046')
parser.remarks[0]. should be_a(Metar::SeaLevelPressure)
parser.remarks[1]. should be_temperature_extreme(:minimum, 4.6)
end
context 'in strict mode' do
before do
Metar::Parser.compliance = :strict
end
it 'unparsed data causes an error' do
expect do
setup_parser("PAIL 061610Z 24006KT 1 3/4SM -SN BKN016 OVC030 M17/M20 A2910 FOO RMK AO2 P0000")
end. to raise_error(Metar::ParseError, /Unparsable text found/)
end
end
context 'in loose mode' do
it 'unparsed data is collected' do
parser = setup_parser("PAIL 061610Z 24006KT 1 3/4SM -SN BKN016 OVC030 M17/M20 A2910 FOO RMK AO2 P0000")
parser.unparsed. should == ['FOO']
parser.remarks.size. should == 2
end
end
end
def setup_parser(metar)
raw = Metar::Raw::Data.new(metar)
Metar::Parser.new(raw)
end
end
end
|
require "vendor/heroku/okjson"
module Heroku
module Helpers
extend self
def home_directory
running_on_windows? ? ENV['USERPROFILE'].gsub("\\","/") : ENV['HOME']
end
def running_on_windows?
RUBY_PLATFORM =~ /mswin32|mingw32/
end
def running_on_a_mac?
RUBY_PLATFORM =~ /-darwin\d/
end
def display(msg="", new_line=true)
if new_line
puts(msg)
else
print(msg)
$stdout.flush
end
end
def redisplay(line, line_break = false)
display("\r\e[0K#{line}", line_break)
end
def deprecate(message)
display "WARNING: #{message}"
end
def confirm_billing
display
display "This action will cause your account to be billed at the end of the month"
display "For more information, see http://devcenter.heroku.com/articles/billing"
if confirm
heroku.confirm_billing
true
end
end
def confirm(message="Are you sure you wish to continue? (y/n)?")
display("#{message} ", false)
['y', 'yes'].include?(ask.downcase)
end
def confirm_command(app_to_confirm = app, message=nil)
raise(Heroku::Command::CommandFailed, "No app specified.\nRun this command from app folder or set it adding --app <app name>") unless app_to_confirm
if confirmed_app = options[:confirm]
unless confirmed_app == app_to_confirm
raise(Heroku::Command::CommandFailed, "Confirmed app #{confirmed_app} did not match the selected app #{app_to_confirm}.")
end
return true
else
display
message ||= "WARNING: Destructive Action\nThis command will affect the app: #{app_to_confirm}"
message << "\nTo proceed, type \"#{app_to_confirm}\" or re-run this command with --confirm #{app_to_confirm}"
output_with_bang(message)
display
display "> ", false
if ask.downcase != app_to_confirm
error("Confirmation did not match #{app_to_confirm}. Aborted.")
else
true
end
end
end
def format_date(date)
date = Time.parse(date) if date.is_a?(String)
date.strftime("%Y-%m-%d %H:%M %Z")
end
def ask
$stdin.gets.to_s.strip
end
def shell(cmd)
FileUtils.cd(Dir.pwd) {|d| return `#{cmd}`}
end
def run_command(command, args=[])
Heroku::Command.run(command, args)
end
def retry_on_exception(*exceptions)
retry_count = 0
begin
yield
rescue *exceptions => ex
raise ex if retry_count >= 3
sleep 3
retry_count += 1
retry
end
end
def has_git?
%x{ git --version }
$?.success?
end
def git(args)
return "" unless has_git?
flattened_args = [args].flatten.compact.join(" ")
%x{ git #{flattened_args} 2>&1 }.strip
end
def time_ago(elapsed)
if elapsed <= 60
"#{elapsed.floor}s ago"
elsif elapsed <= (60 * 60)
"#{(elapsed / 60).floor}m ago"
elsif elapsed <= (60 * 60 * 25)
"#{(elapsed / 60 / 60).floor}h ago"
else
(Time.now - elapsed).strftime("%Y/%m/%d %H:%M:%S")
end
end
def truncate(text, length)
if text.size > length
text[0, length - 2] + '..'
else
text
end
end
@@kb = 1024
@@mb = 1024 * @@kb
@@gb = 1024 * @@mb
def format_bytes(amount)
amount = amount.to_i
return '(empty)' if amount == 0
return amount if amount < @@kb
return "#{(amount / @@kb).round}k" if amount < @@mb
return "#{(amount / @@mb).round}M" if amount < @@gb
return "#{(amount / @@gb).round}G"
end
def quantify(string, num)
"%d %s" % [ num, num.to_i == 1 ? string : "#{string}s" ]
end
def create_git_remote(remote, url)
return if git('remote').split("\n").include?(remote)
return unless File.exists?(".git")
git "remote add #{remote} #{url}"
display "Git remote #{remote} added"
end
def longest(items)
items.map { |i| i.to_s.length }.sort.last
end
def display_table(objects, columns, headers)
lengths = []
columns.each_with_index do |column, index|
header = headers[index]
lengths << longest([header].concat(objects.map { |o| o[column].to_s }))
end
lines = lengths.map {|length| "-" * length}
lengths[-1] = 0 # remove padding from last column
display_row headers, lengths
display_row lines, lengths
objects.each do |row|
display_row columns.map { |column| row[column] }, lengths
end
end
def display_row(row, lengths)
row_data = []
row.zip(lengths).each do |column, length|
format = column.is_a?(Fixnum) ? "%#{length}s" : "%-#{length}s"
row_data << format % column
end
display(row_data.join(" "))
end
def json_encode(object)
Heroku::OkJson.encode(object)
rescue Heroku::OkJson::Error
nil
end
def json_decode(json)
Heroku::OkJson.decode(json)
rescue Heroku::OkJson::Error
nil
end
def set_buffer(enable)
with_tty do
if enable
`stty icanon echo`
else
`stty -icanon -echo`
end
end
end
def with_tty(&block)
return unless $stdin.isatty
begin
yield
rescue
# fails on windows
end
end
def get_terminal_environment
{ "TERM" => ENV["TERM"], "COLUMNS" => `tput cols`.strip, "LINES" => `tput lines`.strip }
rescue
{ "TERM" => ENV["TERM"] }
end
def fail(message)
raise Heroku::Command::CommandFailed, message
end
## DISPLAY HELPERS
def action(message, options={})
display("#{message}... ", false)
Heroku::Helpers.error_with_failure = true
ret = yield
Heroku::Helpers.error_with_failure = false
display((options[:success] || "done"), false)
display(", #{@status}", false) if @status
display
ret
end
def status(message)
@status = message
end
def format_with_bang(message)
return '' if message.to_s.strip == ""
" ! " + message.split("\n").join("\n ! ")
end
def output_with_bang(message="", new_line=true)
return if message.to_s.strip == ""
display(format_with_bang(message), new_line)
end
def error(message)
if Heroku::Helpers.error_with_failure
display("failed")
Heroku::Helpers.error_with_failure = false
end
$stderr.puts(format_with_bang(message))
exit(1)
end
def self.error_with_failure
@@error_with_failure ||= false
end
def self.error_with_failure=(new_error_with_failure)
@@error_with_failure = new_error_with_failure
end
def self.included_into
@@included_into ||= []
end
def self.extended_into
@@extended_into ||= []
end
def self.included(base)
included_into << base
end
def self.extended(base)
extended_into << base
end
def display_header(message="", new_line=true)
return if message.to_s.strip == ""
display("=== " + message.to_s.split("\n").join("\n=== "), new_line)
end
def display_object(object)
case object
when Array
# list of objects
object.each do |item|
display_object(item)
end
when Hash
# if all values are arrays, it is a list with headers
# otherwise it is a single header with pairs of data
if object.values.all? {|value| value.is_a?(Array)}
object.keys.sort_by {|key| key.to_s}.each do |key|
display_header(key)
display_object(object[key])
hputs
end
end
else
hputs(object.to_s)
end
end
def hputs(string='')
Kernel.puts(string)
end
def hprint(string='')
Kernel.print(string)
$stdout.flush
end
def spinner(ticks)
%w(/ - \\ |)[ticks % 4]
end
# produces a printf formatter line for an array of items
# if an individual line item is an array, it will create columns
# that are lined-up
#
# line_formatter(["foo", "barbaz"]) # => "%-6s"
# line_formatter(["foo", "barbaz"], ["bar", "qux"]) # => "%-3s %-6s"
#
def line_formatter(array)
if array.any? {|item| item.is_a?(Array)}
cols = []
array.each do |item|
if item.is_a?(Array)
item.each_with_index { |val,idx| cols[idx] = [cols[idx]||0, val.length].max }
end
end
cols.map { |col| "%-#{col}s" }.join(" ")
else
"%s"
end
end
def styled_array(array, options={})
fmt = line_formatter(array)
array = array.sort unless options[:sort] == false
array.each do |element|
display((fmt % element).rstrip)
end
display
end
def styled_header(header)
display("=== #{header}")
end
def styled_hash(hash)
max_key_length = hash.keys.map {|key| key.to_s.length}.max + 2
hash.keys.sort {|x,y| x.to_s <=> y.to_s}.each do |key|
case value = hash[key]
when Array
if value.empty?
next
else
elements = value.sort {|x,y| x.to_s <=> y.to_s}
display("#{key}: ".ljust(max_key_length), false)
display(elements[0])
elements[1..-1].each do |element|
display("#{' ' * max_key_length}#{element}")
end
if elements.length > 1
display
end
end
when nil
next
else
display("#{key}: ".ljust(max_key_length), false)
display(value)
end
end
end
def string_distance(first, last)
distances = [] # 0x0s
0.upto(first.length) do |index|
distances << [index] + [0] * last.length
end
distances[0] = 0.upto(last.length).to_a
1.upto(last.length) do |last_index|
1.upto(first.length) do |first_index|
first_char = first[first_index - 1, 1]
last_char = last[last_index - 1, 1]
if first_char == last_char
distances[first_index][last_index] = distances[first_index - 1][last_index - 1] # noop
else
distances[first_index][last_index] = [
distances[first_index - 1][last_index], # deletion
distances[first_index][last_index - 1], # insertion
distances[first_index - 1][last_index - 1] # substitution
].min + 1 # cost
if first_index > 1 && last_index > 1
first_previous_char = first[first_index - 2, 1]
last_previous_char = last[last_index - 2, 1]
if first_char == last_previous_char && first_previous_char == last_char
distances[first_index][last_index] = [
distances[first_index][last_index],
distances[first_index - 2][last_index - 2] + 1 # transposition
].min
end
end
end
end
end
distances[first.length][last.length]
end
def suggestion(actual, possibilities)
distances = Hash.new {|hash,key| hash[key] = []}
possibilities.each do |suggestion|
distances[string_distance(actual, suggestion)] << suggestion
end
minimum_distance = distances.keys.min
if minimum_distance < 4
suggestions = distances[minimum_distance].sort
if suggestions.length == 1
"Perhaps you meant `#{suggestions.first}`."
else
"Perhaps you meant #{suggestions[0...-1].map {|suggestion| "`#{suggestion}`"}.join(', ')} or `#{suggestions.last}`."
end
else
nil
end
end
end
end
fix line formatting to not bomb out on nil values
require "vendor/heroku/okjson"
module Heroku
module Helpers
extend self
def home_directory
running_on_windows? ? ENV['USERPROFILE'].gsub("\\","/") : ENV['HOME']
end
def running_on_windows?
RUBY_PLATFORM =~ /mswin32|mingw32/
end
def running_on_a_mac?
RUBY_PLATFORM =~ /-darwin\d/
end
def display(msg="", new_line=true)
if new_line
puts(msg)
else
print(msg)
$stdout.flush
end
end
def redisplay(line, line_break = false)
display("\r\e[0K#{line}", line_break)
end
def deprecate(message)
display "WARNING: #{message}"
end
def confirm_billing
display
display "This action will cause your account to be billed at the end of the month"
display "For more information, see http://devcenter.heroku.com/articles/billing"
if confirm
heroku.confirm_billing
true
end
end
def confirm(message="Are you sure you wish to continue? (y/n)?")
display("#{message} ", false)
['y', 'yes'].include?(ask.downcase)
end
def confirm_command(app_to_confirm = app, message=nil)
raise(Heroku::Command::CommandFailed, "No app specified.\nRun this command from app folder or set it adding --app <app name>") unless app_to_confirm
if confirmed_app = options[:confirm]
unless confirmed_app == app_to_confirm
raise(Heroku::Command::CommandFailed, "Confirmed app #{confirmed_app} did not match the selected app #{app_to_confirm}.")
end
return true
else
display
message ||= "WARNING: Destructive Action\nThis command will affect the app: #{app_to_confirm}"
message << "\nTo proceed, type \"#{app_to_confirm}\" or re-run this command with --confirm #{app_to_confirm}"
output_with_bang(message)
display
display "> ", false
if ask.downcase != app_to_confirm
error("Confirmation did not match #{app_to_confirm}. Aborted.")
else
true
end
end
end
def format_date(date)
date = Time.parse(date) if date.is_a?(String)
date.strftime("%Y-%m-%d %H:%M %Z")
end
def ask
$stdin.gets.to_s.strip
end
def shell(cmd)
FileUtils.cd(Dir.pwd) {|d| return `#{cmd}`}
end
def run_command(command, args=[])
Heroku::Command.run(command, args)
end
def retry_on_exception(*exceptions)
retry_count = 0
begin
yield
rescue *exceptions => ex
raise ex if retry_count >= 3
sleep 3
retry_count += 1
retry
end
end
def has_git?
%x{ git --version }
$?.success?
end
def git(args)
return "" unless has_git?
flattened_args = [args].flatten.compact.join(" ")
%x{ git #{flattened_args} 2>&1 }.strip
end
def time_ago(elapsed)
if elapsed <= 60
"#{elapsed.floor}s ago"
elsif elapsed <= (60 * 60)
"#{(elapsed / 60).floor}m ago"
elsif elapsed <= (60 * 60 * 25)
"#{(elapsed / 60 / 60).floor}h ago"
else
(Time.now - elapsed).strftime("%Y/%m/%d %H:%M:%S")
end
end
def truncate(text, length)
if text.size > length
text[0, length - 2] + '..'
else
text
end
end
@@kb = 1024
@@mb = 1024 * @@kb
@@gb = 1024 * @@mb
def format_bytes(amount)
amount = amount.to_i
return '(empty)' if amount == 0
return amount if amount < @@kb
return "#{(amount / @@kb).round}k" if amount < @@mb
return "#{(amount / @@mb).round}M" if amount < @@gb
return "#{(amount / @@gb).round}G"
end
def quantify(string, num)
"%d %s" % [ num, num.to_i == 1 ? string : "#{string}s" ]
end
def create_git_remote(remote, url)
return if git('remote').split("\n").include?(remote)
return unless File.exists?(".git")
git "remote add #{remote} #{url}"
display "Git remote #{remote} added"
end
def longest(items)
items.map { |i| i.to_s.length }.sort.last
end
def display_table(objects, columns, headers)
lengths = []
columns.each_with_index do |column, index|
header = headers[index]
lengths << longest([header].concat(objects.map { |o| o[column].to_s }))
end
lines = lengths.map {|length| "-" * length}
lengths[-1] = 0 # remove padding from last column
display_row headers, lengths
display_row lines, lengths
objects.each do |row|
display_row columns.map { |column| row[column] }, lengths
end
end
def display_row(row, lengths)
row_data = []
row.zip(lengths).each do |column, length|
format = column.is_a?(Fixnum) ? "%#{length}s" : "%-#{length}s"
row_data << format % column
end
display(row_data.join(" "))
end
def json_encode(object)
Heroku::OkJson.encode(object)
rescue Heroku::OkJson::Error
nil
end
def json_decode(json)
Heroku::OkJson.decode(json)
rescue Heroku::OkJson::Error
nil
end
def set_buffer(enable)
with_tty do
if enable
`stty icanon echo`
else
`stty -icanon -echo`
end
end
end
def with_tty(&block)
return unless $stdin.isatty
begin
yield
rescue
# fails on windows
end
end
def get_terminal_environment
{ "TERM" => ENV["TERM"], "COLUMNS" => `tput cols`.strip, "LINES" => `tput lines`.strip }
rescue
{ "TERM" => ENV["TERM"] }
end
def fail(message)
raise Heroku::Command::CommandFailed, message
end
## DISPLAY HELPERS
def action(message, options={})
display("#{message}... ", false)
Heroku::Helpers.error_with_failure = true
ret = yield
Heroku::Helpers.error_with_failure = false
display((options[:success] || "done"), false)
display(", #{@status}", false) if @status
display
ret
end
def status(message)
@status = message
end
def format_with_bang(message)
return '' if message.to_s.strip == ""
" ! " + message.split("\n").join("\n ! ")
end
def output_with_bang(message="", new_line=true)
return if message.to_s.strip == ""
display(format_with_bang(message), new_line)
end
def error(message)
if Heroku::Helpers.error_with_failure
display("failed")
Heroku::Helpers.error_with_failure = false
end
$stderr.puts(format_with_bang(message))
exit(1)
end
def self.error_with_failure
@@error_with_failure ||= false
end
def self.error_with_failure=(new_error_with_failure)
@@error_with_failure = new_error_with_failure
end
def self.included_into
@@included_into ||= []
end
def self.extended_into
@@extended_into ||= []
end
def self.included(base)
included_into << base
end
def self.extended(base)
extended_into << base
end
def display_header(message="", new_line=true)
return if message.to_s.strip == ""
display("=== " + message.to_s.split("\n").join("\n=== "), new_line)
end
def display_object(object)
case object
when Array
# list of objects
object.each do |item|
display_object(item)
end
when Hash
# if all values are arrays, it is a list with headers
# otherwise it is a single header with pairs of data
if object.values.all? {|value| value.is_a?(Array)}
object.keys.sort_by {|key| key.to_s}.each do |key|
display_header(key)
display_object(object[key])
hputs
end
end
else
hputs(object.to_s)
end
end
def hputs(string='')
Kernel.puts(string)
end
def hprint(string='')
Kernel.print(string)
$stdout.flush
end
def spinner(ticks)
%w(/ - \\ |)[ticks % 4]
end
# produces a printf formatter line for an array of items
# if an individual line item is an array, it will create columns
# that are lined-up
#
# line_formatter(["foo", "barbaz"]) # => "%-6s"
# line_formatter(["foo", "barbaz"], ["bar", "qux"]) # => "%-3s %-6s"
#
def line_formatter(array)
if array.any? {|item| item.is_a?(Array)}
cols = []
array.each do |item|
if item.is_a?(Array)
item.each_with_index { |val,idx| cols[idx] = [cols[idx]||0, (val || '').length].max }
end
end
cols.map { |col| "%-#{col}s" }.join(" ")
else
"%s"
end
end
def styled_array(array, options={})
fmt = line_formatter(array)
array = array.sort unless options[:sort] == false
array.each do |element|
display((fmt % element).rstrip)
end
display
end
def styled_header(header)
display("=== #{header}")
end
def styled_hash(hash)
max_key_length = hash.keys.map {|key| key.to_s.length}.max + 2
hash.keys.sort {|x,y| x.to_s <=> y.to_s}.each do |key|
case value = hash[key]
when Array
if value.empty?
next
else
elements = value.sort {|x,y| x.to_s <=> y.to_s}
display("#{key}: ".ljust(max_key_length), false)
display(elements[0])
elements[1..-1].each do |element|
display("#{' ' * max_key_length}#{element}")
end
if elements.length > 1
display
end
end
when nil
next
else
display("#{key}: ".ljust(max_key_length), false)
display(value)
end
end
end
def string_distance(first, last)
distances = [] # 0x0s
0.upto(first.length) do |index|
distances << [index] + [0] * last.length
end
distances[0] = 0.upto(last.length).to_a
1.upto(last.length) do |last_index|
1.upto(first.length) do |first_index|
first_char = first[first_index - 1, 1]
last_char = last[last_index - 1, 1]
if first_char == last_char
distances[first_index][last_index] = distances[first_index - 1][last_index - 1] # noop
else
distances[first_index][last_index] = [
distances[first_index - 1][last_index], # deletion
distances[first_index][last_index - 1], # insertion
distances[first_index - 1][last_index - 1] # substitution
].min + 1 # cost
if first_index > 1 && last_index > 1
first_previous_char = first[first_index - 2, 1]
last_previous_char = last[last_index - 2, 1]
if first_char == last_previous_char && first_previous_char == last_char
distances[first_index][last_index] = [
distances[first_index][last_index],
distances[first_index - 2][last_index - 2] + 1 # transposition
].min
end
end
end
end
end
distances[first.length][last.length]
end
def suggestion(actual, possibilities)
distances = Hash.new {|hash,key| hash[key] = []}
possibilities.each do |suggestion|
distances[string_distance(actual, suggestion)] << suggestion
end
minimum_distance = distances.keys.min
if minimum_distance < 4
suggestions = distances[minimum_distance].sort
if suggestions.length == 1
"Perhaps you meant `#{suggestions.first}`."
else
"Perhaps you meant #{suggestions[0...-1].map {|suggestion| "`#{suggestion}`"}.join(', ')} or `#{suggestions.last}`."
end
else
nil
end
end
end
end
|
module Rollbar
extend Heroku::Helpers
def self.error(e)
payload = json_encode(build_payload(e))
response = Excon.post('https://api.rollbar.com/api/1/item/', :body => payload)
response = json_decode(response.body)
raise response if response["err"] != 0
response["result"]["uuid"]
rescue => e
$stderr.puts "Error submitting error."
error_log(e.message, e.backtrace.join("\n"))
nil
end
private
def self.build_payload(e)
if e.is_a? Exception
build_trace_payload(e)
else
build_message_payload(e.to_s)
end
end
def self.build_trace_payload(e)
payload = base_payload
payload[:data][:body] = {:trace => trace_from_exception(e)}
payload
end
def self.build_message_payload(message)
payload = base_payload
payload[:data][:body] = {:message => {:body => message}}
payload
end
def self.base_payload
{
:access_token => '488f0c3af3d6450cb5b5827c8099dbff',
:data => {
:platform => 'client',
:environment => 'production',
:code_version => Heroku::VERSION,
:client => { :platform => RUBY_PLATFORM },
:request => { :command => ARGV[0] }
}
}
end
def self.trace_from_exception(e)
{
:frames => frames_from_exception(e),
:exception => {
:class => e.class.to_s,
:message => e.message
}
}
end
def self.frames_from_exception(e)
e.backtrace.map do |line|
filename, lineno, method = line.scan(/(.+):(\d+):in `(.*)'/)[0]
{ :filename => filename, :lineno => lineno.to_i, :method => method }
end
end
end
added ruby version to rollbar
module Rollbar
extend Heroku::Helpers
def self.error(e)
payload = json_encode(build_payload(e))
response = Excon.post('https://api.rollbar.com/api/1/item/', :body => payload)
response = json_decode(response.body)
raise response if response["err"] != 0
response["result"]["uuid"]
rescue => e
$stderr.puts "Error submitting error."
error_log(e.message, e.backtrace.join("\n"))
nil
end
private
def self.build_payload(e)
if e.is_a? Exception
build_trace_payload(e)
else
build_message_payload(e.to_s)
end
end
def self.build_trace_payload(e)
payload = base_payload
payload[:data][:body] = {:trace => trace_from_exception(e)}
payload
end
def self.build_message_payload(message)
payload = base_payload
payload[:data][:body] = {:message => {:body => message}}
payload
end
def self.base_payload
{
:access_token => '488f0c3af3d6450cb5b5827c8099dbff',
:data => {
:platform => 'client',
:environment => 'production',
:code_version => Heroku::VERSION,
:client => { :platform => RUBY_PLATFORM, :ruby => RUBY_VERSION },
:request => { :command => ARGV[0] }
}
}
end
def self.trace_from_exception(e)
{
:frames => frames_from_exception(e),
:exception => {
:class => e.class.to_s,
:message => e.message
}
}
end
def self.frames_from_exception(e)
e.backtrace.map do |line|
filename, lineno, method = line.scan(/(.+):(\d+):in `(.*)'/)[0]
{ :filename => filename, :lineno => lineno.to_i, :method => method }
end
end
end
|
require 'heroku/helpers'
module Heroku
module Updater
def self.installed_client_path
File.expand_path("../../..", __FILE__)
end
def self.updated_client_path
File.join(Heroku::Helpers.home_directory, ".heroku", "client")
end
def self.latest_local_version
installed_version = client_version_from_path(installed_client_path)
updated_version = client_version_from_path(updated_client_path)
if compare_versions(updated_version, installed_version) > 0
updated_version
else
installed_version
end
end
def self.client_version_from_path(path)
version_file = File.join(path, "lib/heroku/version.rb")
if File.exists?(version_file)
File.read(version_file).match(/VERSION = "([^"]+)"/)[1]
else
'0.0.0'
end
end
def self.disable(message=nil)
@disable = message if message
@disable
end
def self.check_disabled!
error disable if disable
end
def self.update(url, autoupdate=false)
require "excon"
require "fileutils"
require "tmpdir"
require "zip/zip"
user_agent = "heroku-toolbelt/#{latest_local_version} (#{RUBY_PLATFORM}) ruby/#{RUBY_VERSION}"
if autoupdate
user_agent += ' autoupdate'
end
Dir.mktmpdir do |download_dir|
# follow redirect, if one exists
headers = Excon.head(
url,
:headers => {
'User-Agent' => user_agent
}
).headers
if headers['Location']
url = headers['Location']
end
File.open("#{download_dir}/heroku.zip", "wb") do |file|
file.print Excon.get(url).body
end
Zip::ZipFile.open("#{download_dir}/heroku.zip") do |zip|
zip.each do |entry|
target = File.join(download_dir, entry.to_s)
FileUtils.mkdir_p File.dirname(target)
zip.extract(entry, target) { true }
end
end
FileUtils.rm "#{download_dir}/heroku.zip"
old_version = latest_local_version
new_version = client_version_from_path(download_dir)
if compare_versions(new_version, old_version) < 0 && !autoupdate
Heroku::Helpers.error("Installed version (#{old_version}) is newer than the latest available update (#{new_version})")
end
FileUtils.rm_rf updated_client_path
FileUtils.mkdir_p File.dirname(updated_client_path)
FileUtils.cp_r download_dir, updated_client_path
new_version
end
end
def self.compare_versions(first_version, second_version)
first_version.split('.').map {|part| Integer(part) rescue part} <=> second_version.split('.').map {|part| Integer(part) rescue part}
end
def self.inject_libpath
background_update!
old_version = client_version_from_path(installed_client_path)
new_version = client_version_from_path(updated_client_path)
if compare_versions(new_version, old_version) > 0
$:.unshift File.join(updated_client_path, "lib")
vendored_gems = Dir[File.join(updated_client_path, "vendor", "gems", "*")]
vendored_gems.each do |vendored_gem|
$:.unshift File.join(vendored_gem, "lib")
end
load('heroku/updater.rb') # reload updated updater
end
end
def self.background_update!
if File.exists?(File.join(Heroku::Helpers.home_directory, ".heroku", "autoupdate"))
pid = fork do
begin
require "excon"
latest_version = Heroku::Helpers.json_decode(Excon.get('http://rubygems.org/api/v1/gems/heroku.json').body)['version']
if compare_versions(latest_version, latest_local_version) > 0
update("https://toolbelt.herokuapp.com/download/zip", true)
end
rescue Exception => ex
# trap all errors
ensure
@background_updating = false
end
end
Process.detach pid
end
end
end
end
fix error output for disabled updater
require 'heroku/helpers'
module Heroku
module Updater
def self.installed_client_path
File.expand_path("../../..", __FILE__)
end
def self.updated_client_path
File.join(Heroku::Helpers.home_directory, ".heroku", "client")
end
def self.latest_local_version
installed_version = client_version_from_path(installed_client_path)
updated_version = client_version_from_path(updated_client_path)
if compare_versions(updated_version, installed_version) > 0
updated_version
else
installed_version
end
end
def self.client_version_from_path(path)
version_file = File.join(path, "lib/heroku/version.rb")
if File.exists?(version_file)
File.read(version_file).match(/VERSION = "([^"]+)"/)[1]
else
'0.0.0'
end
end
def self.disable(message=nil)
@disable = message if message
@disable
end
def self.check_disabled!
if disable
Heroku::Helpers.error(disable)
end
end
def self.update(url, autoupdate=false)
require "excon"
require "fileutils"
require "tmpdir"
require "zip/zip"
user_agent = "heroku-toolbelt/#{latest_local_version} (#{RUBY_PLATFORM}) ruby/#{RUBY_VERSION}"
if autoupdate
user_agent += ' autoupdate'
end
Dir.mktmpdir do |download_dir|
# follow redirect, if one exists
headers = Excon.head(
url,
:headers => {
'User-Agent' => user_agent
}
).headers
if headers['Location']
url = headers['Location']
end
File.open("#{download_dir}/heroku.zip", "wb") do |file|
file.print Excon.get(url).body
end
Zip::ZipFile.open("#{download_dir}/heroku.zip") do |zip|
zip.each do |entry|
target = File.join(download_dir, entry.to_s)
FileUtils.mkdir_p File.dirname(target)
zip.extract(entry, target) { true }
end
end
FileUtils.rm "#{download_dir}/heroku.zip"
old_version = latest_local_version
new_version = client_version_from_path(download_dir)
if compare_versions(new_version, old_version) < 0 && !autoupdate
Heroku::Helpers.error("Installed version (#{old_version}) is newer than the latest available update (#{new_version})")
end
FileUtils.rm_rf updated_client_path
FileUtils.mkdir_p File.dirname(updated_client_path)
FileUtils.cp_r download_dir, updated_client_path
new_version
end
end
def self.compare_versions(first_version, second_version)
first_version.split('.').map {|part| Integer(part) rescue part} <=> second_version.split('.').map {|part| Integer(part) rescue part}
end
def self.inject_libpath
background_update!
old_version = client_version_from_path(installed_client_path)
new_version = client_version_from_path(updated_client_path)
if compare_versions(new_version, old_version) > 0
$:.unshift File.join(updated_client_path, "lib")
vendored_gems = Dir[File.join(updated_client_path, "vendor", "gems", "*")]
vendored_gems.each do |vendored_gem|
$:.unshift File.join(vendored_gem, "lib")
end
load('heroku/updater.rb') # reload updated updater
end
end
def self.background_update!
if File.exists?(File.join(Heroku::Helpers.home_directory, ".heroku", "autoupdate"))
pid = fork do
begin
require "excon"
latest_version = Heroku::Helpers.json_decode(Excon.get('http://rubygems.org/api/v1/gems/heroku.json').body)['version']
if compare_versions(latest_version, latest_local_version) > 0
update("https://toolbelt.herokuapp.com/download/zip", true)
end
rescue Exception => ex
# trap all errors
ensure
@background_updating = false
end
end
Process.detach pid
end
end
end
end
|
require_relative 'three_dimensional'
module Hivegame
class Hex
attr_accessor :bug
def initialize()
@bug = nil
end
def occupied?
return !@bug.nil?
end
end
# A Hive board is a three-dimensional matrix of hexagonal cells,
# which are identified by a 3-tuple of coordinates.
class Board
include ThreeDimensional
# A board can enumerate *occupied* hexes
include Enumerable
ORIGIN = [0,0,0]
# To conserve memory, the internal representation of the
# `@board` is a hash mapping coordinates to hexes. An
# array would be mostly empty, wasting memory. We also
# maintain an undirected graph, '@hive', because graphs excel
# at answering certain questions.
def initialize
@board = {ORIGIN => Hex.new}
@hive = Hive.new
end
def add(point, bug)
validate_point(point)
return false unless supported_point?(point)
return false if hex(point).occupied?
return false unless add_to_hive_if_connected(point, bug)
@board[point].bug = bug
return true
end
def add_to_hive_if_connected point, bug
@hive.add_vertex(bug)
unless empty?
occupied_neighbor_hexes(point).each do |n|
@hive.add_edge(n.bug, bug)
end
unless @hive.connected?
@hive.remove_vertex(bug)
return false
end
end
return true
end
# `to_ascii` returns a textual representation of the board
def to_ascii
lines = []
cols = col_count
min_row.upto(max_row) do |row|
line = "%03d:" % row # left-padded row number
(cols - row).times {line << ' '}
min_col.upto(max_col) do |col|
line << (hex([row,col,0]).bug || '.').to_s
line << ' '
end
lines << line
end
lines.join "\n"
end
# `each` enumerates occupied hexes
def each
occupied_hexes.each { |hex| yield hex }
end
def empty?
count == 0
end
# `hex` returns the hex at the specified `point`,
# creating a hex if none exists.
def hex point
validate_point(point)
@board[point] = Hex.new if @board[point].nil?
@board[point]
end
def neighbors point
validate_point(point)
row, col, height = point[0], point[1], point[2]
offsets = [[-1,-1,0], [-1,0,0], [0,-1,0], [0,1,0], [1,0,0], \
[1,1,0], [0,0,1], [0,0,-1]]
offsets.map do |r,c,h|
[row+r, col+c, height+h]
end
end
def occupied_hexes
return @board.select { |point, hex| hex.occupied? }
end
def occupied_neighbor_hexes point
neighbors(point).map{|n| hex(n)}.select{|n| n.occupied?}
end
private
def col_count
max_col - min_col
end
def col_numbers
@board.map{|k,v| k[1]}
end
# `supported_point?` returns true if `point` is resting on
# the table or if the hex below `point` is occupied.
def supported_point? point
r,c,h = point[0], point[1], point[2]
h == 0 || hex([r,c,h-1]).occupied?
end
def min_row
row_numbers.min
end
def max_row
row_numbers.max
end
def min_col
col_numbers.min
end
def max_col
col_numbers.max
end
def row_numbers
@board.map{|k,v| k[0]}
end
end
end
semantic naming
require_relative 'three_dimensional'
module Hivegame
class Hex
attr_accessor :bug
def initialize()
@bug = nil
end
def occupied?
return !@bug.nil?
end
end
# A Hive board is a three-dimensional matrix of hexagonal cells,
# which are identified by a 3-tuple of coordinates.
class Board
include ThreeDimensional
# A board can enumerate *occupied* hexes
include Enumerable
ORIGIN = [0,0,0]
# To conserve memory, the internal representation of the
# `@board` is a hash mapping coordinates to hexes. An
# array would be mostly empty, wasting memory. We also
# maintain an undirected graph, '@hive', because graphs excel
# at answering certain questions.
def initialize
@board = {ORIGIN => Hex.new}
@hive = Hive.new
end
def add(point, bug)
validate_point(point)
return false unless supported_point?(point)
return false if hex(point).occupied?
return false unless add_to_hive_if_connected(point, bug)
@board[point].bug = bug
return true
end
def add_to_hive_if_connected point, bug
@hive.add_vertex(bug)
unless empty?
occupied_neighbor_hexes(point).each do |n|
@hive.add_edge(n.bug, bug)
end
unless @hive.connected?
@hive.remove_vertex(bug)
return false
end
end
return true
end
# `to_ascii` returns a textual representation of the board
def to_ascii
lines = []
cols = col_count
min_row.upto(max_row) do |row|
line = "%03d:" % row # left-padded row number
(cols - row).times {line << ' '}
min_col.upto(max_col) do |col|
line << (hex([row,col,0]).bug || '.').to_s
line << ' '
end
lines << line
end
lines.join "\n"
end
# `each` enumerates occupied hexes
def each
occupied_hexes.each { |hex| yield hex }
end
def empty?
count == 0
end
# `hex` returns the hex at the specified `point`,
# creating a hex if none exists.
def hex point
validate_point(point)
@board[point] = Hex.new if @board[point].nil?
@board[point]
end
def neighbors point
validate_point(point)
row, col, height = point[0], point[1], point[2]
offsets = [[-1,-1,0], [-1,0,0], [0,-1,0], [0,1,0], [1,0,0], \
[1,1,0], [0,0,1], [0,0,-1]]
offsets.map do |r,c,h|
[row+r, col+c, height+h]
end
end
def occupied_hexes
return @board.select { |point, hex| hex.occupied? }
end
def occupied_neighbor_hexes point
neighbors(point).map{|n| hex(n)}.select{|n| n.occupied?}
end
private
def col_count
max_col - min_col
end
def col_numbers
@board.map{|point, bug| point[1]}
end
# `supported_point?` returns true if `point` is resting on
# the table or if the hex below `point` is occupied.
def supported_point? point
r,c,h = point[0], point[1], point[2]
h == 0 || hex([r,c,h-1]).occupied?
end
def min_row
row_numbers.min
end
def max_row
row_numbers.max
end
def min_col
col_numbers.min
end
def max_col
col_numbers.max
end
def row_numbers
@board.map{|point, bug| point[0]}
end
end
end
|
require 'uri'
require File.expand_path(File.join(File.dirname(__FILE__), "..", "support", "paths"))
module WithinHelpers
def with_scope(locator)
locator ? within(locator) { yield } : yield
end
end
World(WithinHelpers)
When /^I click text "([^"]*)"(?: within "([^\"]*)")?$/ do |text_value, selector|
with_scope(selector) do
page.find('//a', :text => text_value).click
end
end
Then /^the "([^\"]*)" field should be disabled$/ do |label|
field_labeled(label)[:disabled].should be_true
end
Given /^devices exist$/ do |devices|
devices.hashes.each do |device_hash|
device = Device.new(:imei => device_hash[:imei], :blacklisted => device_hash[:blacklisted], :user_name => device_hash[:user_name])
device.save!
end
end
Then /^I should find the form with following attributes:$/ do |table|
table.raw.flatten.each do |attribute|
page.should have_field(attribute)
end
end
When /^I uncheck the disabled checkbox for user "([^"]*)"$/ do |username|
page.find("//tr[@id='user-row-#{username}']/td/input[@type='checkbox']").click
click_button("Yes")
end
Then /^I should (not )?see "([^\"]*)" with id "([^\"]*)"$/ do |do_not_want, element, id|
puts "Warning: element argument '#{element}' is ignored."
should = do_not_want ? :should_not : :should
puts should
page.send(should, have_css("##{id}"))
end
And /^I check the device with an imei of "([^\"]*)"$/ do |imei_number|
find(:css, ".blacklisted-checkbox-#{imei_number}").set(true)
end
Then /^user "([^\"]*)" should exist on the page$/ do |full_name|
lambda { page.find(:xpath, "//tr[@id=\"user-row-#{full_name}\"]") }.should_not raise_error(Capybara::ElementNotFound)
end
Then /^user "([^\"]*)" should not exist on the page$/ do |full_name|
lambda { page.find(:xpath, "//tr[@id=\"user-row-#{full_name}\"]") }.should raise_error(Capybara::ElementNotFound)
end
Then /^I should not see "([^\"]*)" for record "([^\"]*)"$/ do |text, full_name|
page.find(:xpath, "//div[text()=\"#{full_name}\"]/parent::*/parent::*").should_not have_content(text);
end
Then /^I should see "([^\"]*)" for record "([^\"]*)"$/ do |text, full_name|
page.find(:xpath, "//div[text()=\"#{full_name}\"]/parent::*/parent::*").should have_content(text);
end
Then /^I should see "([^\"]*)" for "([^\"]*)"$/ do |link, full_name|
lambda { page.find(:xpath, "//tr[@id=\"user-row-#{full_name}\"]/td/a[text()=\"#{link}\"]") }.should_not raise_error(Capybara::ElementNotFound)
end
Then /^I should not see "([^\"]*)" for "([^\"]*)"$/ do |link, full_name|
lambda { page.find(:xpath, "//tr[@id=\"user-row-#{full_name}\"]/td/a[text()=\"#{link}\"]") }.should raise_error(Capybara::ElementNotFound)
end
Then /^the field "([^"]*)" should have the following options:$/ do |locator, table|
page.should have_select(locator, :options => table.raw.flatten)
end
Then /^(?:|I )should see a link to the (.+)$/ do |page_name|
page.find(:xpath, "//a[@href=\"#{path_to(page_name)}\"] ")
end
Then /^I should not be able to see (.+)$/ do |page_name|
visit path_to(page_name)
page.status_code.should == 403
end
Then /^I should be able to see (.+)$/ do |page_name|
step "I go to #{page_name}"
step "I should be on #{page_name}"
end
And /^the user "([^\"]*)" should be marked as (disabled|enabled)$/ do |username, status|
disbled_checkbox = find(:css, "#user-row-#{username} td.user-status input")
if status == "disabled"
disbled_checkbox.should be_checked
else
disbled_checkbox.should_not be_checked
end
end
Then /^I should see an audio element that can play the audio file named "([^"]*)"$/ do |filename|
page.body.should have_selector("//audio/source", :src=>current_path + "/audio/")
end
Then /^I should not see an audio tag$/ do
page.body.should_not have_selector("//audio")
end
When /^I visit the "([^"]*)" tab$/ do |name_of_tab|
click_link name_of_tab
end
Then /^the "([^"]*)" radio_button should have the following options:$/ do |radio_button, table|
radio = Nokogiri::HTML(page.body).css("p##{radio_button.downcase.gsub(" ", "")}")
radio.should_not be_nil
table.raw.each { |row| radio.css("label").map(&:text).should include row.first }
end
Then /^the "([^"]*)" dropdown should have the following options:$/ do |dropdown_label, table|
options = table.hashes
page.has_select?(dropdown_label, :options => options.collect{|element| element['label']},
:selected => options.collect{|element| element['label'] if element['selected?'] == 'yes'}.compact!)
end
Then /^I should find the following links:$/ do |table|
table.rows_hash.each do |label, named_path|
href = path_to(named_path)
page.should have_xpath "//a[@href='#{href}' and text()='#{label}']" end
end
Then /^the "([^"]*)" checkboxes should have the following options:$/ do |checkbox_name, table|
checkbox_label = page.find "//label[contains(., '#{checkbox_name}')]"
checkbox_id = checkbox_label["for"].split("_").last
checkbox_elements = Nokogiri::HTML(page.body).css("input[type='checkbox'][name='child[#{checkbox_id}][]']")
checkboxes = checkbox_elements.inject({}) do | result, element |
result[element['value']] = !!element[:checked]
result
end
puts "checkboxes #{checkboxes}"
table.hashes.each do |expected_checkbox|
expected_value = expected_checkbox['value']
puts "expected_value #{expected_value}"
should_be_checked = (expected_checkbox['checked?'] == 'yes')
checkboxes.should have_key expected_value
checkboxes[expected_value].should == should_be_checked
end
end
When /^I check "([^"]*)" for "([^"]*)"$/ do |value, checkbox_name|
label = page.find '//label', :text => checkbox_name
checkbox_id = label["for"].split("_").last
page.check("child_#{checkbox_id}_#{value.dehumanize}")
end
puts statements removed where not required
require 'uri'
require File.expand_path(File.join(File.dirname(__FILE__), "..", "support", "paths"))
module WithinHelpers
def with_scope(locator)
locator ? within(locator) { yield } : yield
end
end
World(WithinHelpers)
When /^I click text "([^"]*)"(?: within "([^\"]*)")?$/ do |text_value, selector|
with_scope(selector) do
page.find('//a', :text => text_value).click
end
end
Then /^the "([^\"]*)" field should be disabled$/ do |label|
field_labeled(label)[:disabled].should be_true
end
Given /^devices exist$/ do |devices|
devices.hashes.each do |device_hash|
device = Device.new(:imei => device_hash[:imei], :blacklisted => device_hash[:blacklisted], :user_name => device_hash[:user_name])
device.save!
end
end
Then /^I should find the form with following attributes:$/ do |table|
table.raw.flatten.each do |attribute|
page.should have_field(attribute)
end
end
When /^I uncheck the disabled checkbox for user "([^"]*)"$/ do |username|
page.find("//tr[@id='user-row-#{username}']/td/input[@type='checkbox']").click
click_button("Yes")
end
Then /^I should (not )?see "([^\"]*)" with id "([^\"]*)"$/ do |do_not_want, element, id|
should = do_not_want ? :should_not : :should
page.send(should, have_css("##{id}"))
end
And /^I check the device with an imei of "([^\"]*)"$/ do |imei_number|
find(:css, ".blacklisted-checkbox-#{imei_number}").set(true)
end
Then /^user "([^\"]*)" should exist on the page$/ do |full_name|
lambda { page.find(:xpath, "//tr[@id=\"user-row-#{full_name}\"]") }.should_not raise_error(Capybara::ElementNotFound)
end
Then /^user "([^\"]*)" should not exist on the page$/ do |full_name|
lambda { page.find(:xpath, "//tr[@id=\"user-row-#{full_name}\"]") }.should raise_error(Capybara::ElementNotFound)
end
Then /^I should not see "([^\"]*)" for record "([^\"]*)"$/ do |text, full_name|
page.find(:xpath, "//div[text()=\"#{full_name}\"]/parent::*/parent::*").should_not have_content(text);
end
Then /^I should see "([^\"]*)" for record "([^\"]*)"$/ do |text, full_name|
page.find(:xpath, "//div[text()=\"#{full_name}\"]/parent::*/parent::*").should have_content(text);
end
Then /^I should see "([^\"]*)" for "([^\"]*)"$/ do |link, full_name|
lambda { page.find(:xpath, "//tr[@id=\"user-row-#{full_name}\"]/td/a[text()=\"#{link}\"]") }.should_not raise_error(Capybara::ElementNotFound)
end
Then /^I should not see "([^\"]*)" for "([^\"]*)"$/ do |link, full_name|
lambda { page.find(:xpath, "//tr[@id=\"user-row-#{full_name}\"]/td/a[text()=\"#{link}\"]") }.should raise_error(Capybara::ElementNotFound)
end
Then /^the field "([^"]*)" should have the following options:$/ do |locator, table|
page.should have_select(locator, :options => table.raw.flatten)
end
Then /^(?:|I )should see a link to the (.+)$/ do |page_name|
page.find(:xpath, "//a[@href=\"#{path_to(page_name)}\"] ")
end
Then /^I should not be able to see (.+)$/ do |page_name|
visit path_to(page_name)
page.status_code.should == 403
end
Then /^I should be able to see (.+)$/ do |page_name|
step "I go to #{page_name}"
step "I should be on #{page_name}"
end
And /^the user "([^\"]*)" should be marked as (disabled|enabled)$/ do |username, status|
disbled_checkbox = find(:css, "#user-row-#{username} td.user-status input")
if status == "disabled"
disbled_checkbox.should be_checked
else
disbled_checkbox.should_not be_checked
end
end
Then /^I should see an audio element that can play the audio file named "([^"]*)"$/ do |filename|
page.body.should have_selector("//audio/source", :src=>current_path + "/audio/")
end
Then /^I should not see an audio tag$/ do
page.body.should_not have_selector("//audio")
end
When /^I visit the "([^"]*)" tab$/ do |name_of_tab|
click_link name_of_tab
end
Then /^the "([^"]*)" radio_button should have the following options:$/ do |radio_button, table|
radio = Nokogiri::HTML(page.body).css("p##{radio_button.downcase.gsub(" ", "")}")
radio.should_not be_nil
table.raw.each { |row| radio.css("label").map(&:text).should include row.first }
end
Then /^the "([^"]*)" dropdown should have the following options:$/ do |dropdown_label, table|
options = table.hashes
page.has_select?(dropdown_label, :options => options.collect{|element| element['label']},
:selected => options.collect{|element| element['label'] if element['selected?'] == 'yes'}.compact!)
end
Then /^I should find the following links:$/ do |table|
table.rows_hash.each do |label, named_path|
href = path_to(named_path)
page.should have_xpath "//a[@href='#{href}' and text()='#{label}']" end
end
Then /^the "([^"]*)" checkboxes should have the following options:$/ do |checkbox_name, table|
checkbox_label = page.find "//label[contains(., '#{checkbox_name}')]"
checkbox_id = checkbox_label["for"].split("_").last
checkbox_elements = Nokogiri::HTML(page.body).css("input[type='checkbox'][name='child[#{checkbox_id}][]']")
checkboxes = checkbox_elements.inject({}) do | result, element |
result[element['value']] = !!element[:checked]
result
end
puts "checkboxes #{checkboxes}"
table.hashes.each do |expected_checkbox|
expected_value = expected_checkbox['value']
puts "expected_value #{expected_value}"
should_be_checked = (expected_checkbox['checked?'] == 'yes')
checkboxes.should have_key expected_value
checkboxes[expected_value].should == should_be_checked
end
end
When /^I check "([^"]*)" for "([^"]*)"$/ do |value, checkbox_name|
label = page.find '//label', :text => checkbox_name
checkbox_id = label["for"].split("_").last
page.check("child_#{checkbox_id}_#{value.dehumanize}")
end
|
module Hivegame
class Hex
end
class Board
def initialize(rows=9, cols=9)
@rows, @cols = rows, cols
@board = Array.new(@rows) do |row|
Array.new(@cols) do |col|
Hex.new
end
end
end
# This will print the board out to the console
def draw
@rows.times do |row|
line = ''
line << "#{row}:"
(@cols - row).times {line << ' '}
@cols.times do |col|
line << (distance([4,4], [row,col]) || 'X').to_s
line << ' '
end
puts line
end
end
def empty?
true
end
def neighbors(row, col)
[[-1,-1],[-1,0],[0,-1],[0,1],[1,0],[1,1]].map do |r, c|
[row+r, col+c]
end
end
def [](row)
@board[row]
end
end
end
fix up draw method to correctly pad row numbers.
module Hivegame
class Hex
end
class Board
def initialize(rows=20, cols=20)
@rows, @cols = rows, cols
@board = Array.new(@rows) do |row|
Array.new(@cols) do |col|
Hex.new
end
end
end
# This will print the board out to the console
def draw
@rows.times do |row|
line = "%03d:" % row
(@cols - row).times {line << ' '}
@cols.times do |col|
line << '. '
end
puts line
end
end
def empty?
true
end
def neighbors(row, col)
[[-1,-1],[-1,0],[0,-1],[0,1],[1,0],[1,1]].map do |r, c|
[row+r, col+c]
end
end
def [](row)
@board[row]
end
end
end
|
#--
# Copyright (c) 2010-2011 Engine Yard, Inc.
# Copyright (c) 2007-2009 Sun Microsystems, Inc.
# This source code is available under the MIT license.
# See the file LICENSE.txt for details.
#++
require File.expand_path('../../spec_helper', __FILE__)
describe Warbler::Jar do
use_fresh_rake_application
use_fresh_environment
def file_list(regex)
jar.files.keys.select {|f| f =~ regex }
end
def use_config(&block)
@extra_config = block
end
def apply_extra_config(config)
@extra_config.call(config) if @extra_config
end
let(:config) { Warbler::Config.new {|c| apply_extra_config(c) } }
let(:jar) { Warbler::Jar.new }
context "in a jar project" do
run_in_directory "spec/sample_jar"
cleanup_temp_files
it "detects a Jar trait" do
config.traits.should include(Warbler::Traits::Jar)
end
it "collects java libraries" do
jar.apply(config)
file_list(%r{^META-INF/lib/jruby-.*\.jar$}).should_not be_empty
end
it "adds a JarMain class" do
jar.apply(config)
file_list(%r{^JarMain\.class$}).should_not be_empty
end
it "adds an init.rb" do
jar.apply(config)
file_list(%r{^META-INF/init.rb$}).should_not be_empty
end
it "requires 'rubygems' in init.rb" do
jar.add_init_file(config)
contents = jar.contents('META-INF/init.rb')
contents.split("\n").grep(/require 'rubygems'/).should_not be_empty
end
it "adds ENV['GEM_HOME'] to init.rb" do
jar.add_init_file(config)
contents = jar.contents('META-INF/init.rb')
contents.should =~ /ENV\['GEM_HOME'\]/
end
it "adds a main.rb" do
jar.apply(config)
file_list(%r{^META-INF/main.rb$}).should_not be_empty
end
it "accepts a custom manifest file" do
touch 'manifest'
use_config do |config|
config.manifest_file = 'manifest'
end
jar.apply(config)
jar.files['META-INF/MANIFEST.MF'].should == "manifest"
end
it "accepts a MANIFEST.MF file if it exists in the project root" do
touch 'MANIFEST.MF'
jar.apply(config)
jar.files['META-INF/MANIFEST.MF'].should == "MANIFEST.MF"
end
it "does not add a manifest if one already exists" do
jar.files['META-INF/MANIFEST.MF'] = 'manifest'
jar.add_manifest(config)
jar.files['META-INF/MANIFEST.MF'].should == "manifest"
end
context "with a .gemspec" do
it "detects a Gemspec trait" do
config.traits.should include(Warbler::Traits::Gemspec)
end
it "detects gem dependencies" do
jar.apply(config)
file_list(%r{^gems/rubyzip.*/lib/zip/zip.rb}).should_not be_empty
file_list(%r{^specifications/rubyzip.*\.gemspec}).should_not be_empty
end
it "sets load paths in init.rb" do
jar.add_init_file(config)
contents = jar.contents('META-INF/init.rb')
contents.split("\n").grep(/LOAD_PATH\.unshift.*sample_jar\/lib/).should_not be_empty
end
it "loads the default executable in main.rb" do
jar.apply(config)
contents = jar.contents('META-INF/main.rb')
contents.split("\n").grep(/load.*sample_jar\/bin\/sample_jar/).should_not be_empty
end
it "includes compiled .rb and .class files" do
config.compiled_ruby_files = %w(lib/sample_jar.rb)
jar.compile(config)
jar.apply(config)
file_list(%r{^sample_jar/lib/sample_jar\.class$}).should_not be_empty
jar.contents('sample_jar/lib/sample_jar.rb').should =~ /require __FILE__\.sub/
end
end
context "with a gemspec without a default executable" do
before :each do
Dir['*.gemspec'].each do |f|
cp f, "#{f}.tmp"
lines = IO.readlines(f)
File.open(f, 'w') do |io|
lines.each do |line|
next if line =~ /executable/
io << line
end
end
end
end
after :each do
Dir['*.gemspec.tmp'].each {|f| mv f, "#{f.sub /\.tmp$/, ''}"}
end
it "loads the first bin/executable in main.rb" do
silence { jar.apply(config) }
contents = jar.contents('META-INF/main.rb')
contents.split("\n").grep(/load.*sample_jar\/bin\/sample_jar/).should_not be_empty
end
end
context "without a .gemspec" do
before :each do
Dir['*.gemspec'].each {|f| mv f, "#{f}.tmp"}
end
after :each do
Dir['*.gemspec.tmp'].each {|f| mv f, "#{f.sub /\.tmp$/, ''}"}
end
it "detects a NoGemspec trait" do
config.traits.should include(Warbler::Traits::NoGemspec)
end
it "collects gem files from configuration" do
use_config do |config|
config.gems << "rake"
end
jar.apply(config)
file_list(%r{^gems/rake.*/lib/rake.rb}).should_not be_empty
file_list(%r{^specifications/rake.*\.gemspec}).should_not be_empty
end
it "collects all project files in the directory" do
touch "extra.foobar"
jar.apply(config)
file_list(%r{^sample_jar/bin$}).should_not be_empty
file_list(%r{^sample_jar/test$}).should_not be_empty
file_list(%r{^sample_jar/lib/sample_jar.rb$}).should_not be_empty
file_list(%r{^sample_jar/extra\.foobar$}).should_not be_empty
end
it "sets load paths in init.rb" do
jar.add_init_file(config)
contents = jar.contents('META-INF/init.rb')
contents.split("\n").grep(/LOAD_PATH\.unshift.*sample_jar\/lib/).should_not be_empty
end
it "loads the first bin/executable in main.rb" do
jar.apply(config)
contents = jar.contents('META-INF/main.rb')
contents.split("\n").grep(/load.*sample_jar\/bin\/sample_jar/).should_not be_empty
end
end
end
context "in a war project" do
run_in_directory "spec/sample_war"
cleanup_temp_files
before(:each) do
mkdir_p "log"
touch "log/test.log"
end
it "detects a War trait" do
config.traits.should include(Warbler::Traits::War)
end
it "collects files in public" do
jar.apply(config)
file_list(%r{^index\.html}).should_not be_empty
end
it "collects gem files" do
use_config do |config|
config.gems << "rake"
end
jar.apply(config)
file_list(%r{WEB-INF/gems/gems/rake.*/lib/rake.rb}).should_not be_empty
file_list(%r{WEB-INF/gems/specifications/rake.*\.gemspec}).should_not be_empty
end
it "adds ENV['GEM_HOME'] to init.rb" do
jar.add_init_file(config)
contents = jar.contents('META-INF/init.rb')
contents.should =~ /ENV\['GEM_HOME'\]/
contents.should =~ /WEB-INF\/gems/
end
it "does not include log files by default" do
jar.apply(config)
file_list(%r{WEB-INF/log}).should_not be_empty
file_list(%r{WEB-INF/log/.*\.log}).should be_empty
end
def expand_webxml
jar.apply(config)
jar.files.should include("WEB-INF/web.xml")
require 'rexml/document'
REXML::Document.new(jar.files["WEB-INF/web.xml"]).root.elements
end
it "creates a web.xml file" do
use_config do |config|
config.webxml.jruby.max.runtimes = 5
end
elements = expand_webxml
elements.to_a(
"context-param/param-name[text()='jruby.max.runtimes']"
).should_not be_empty
elements.to_a(
"context-param/param-name[text()='jruby.max.runtimes']/../param-value"
).first.text.should == "5"
end
it "includes custom context parameters in web.xml" do
use_config do |config|
config.webxml.some.custom.config = "myconfig"
end
elements = expand_webxml
elements.to_a(
"context-param/param-name[text()='some.custom.config']"
).should_not be_empty
elements.to_a(
"context-param/param-name[text()='some.custom.config']/../param-value"
).first.text.should == "myconfig"
end
it "allows one jndi resource to be included" do
use_config do |config|
config.webxml.jndi = 'jndi/rails'
end
elements = expand_webxml
elements.to_a(
"resource-ref/res-ref-name[text()='jndi/rails']"
).should_not be_empty
end
it "allows multiple jndi resources to be included" do
use_config do |config|
config.webxml.jndi = ['jndi/rails1', 'jndi/rails2']
end
elements = expand_webxml
elements.to_a(
"resource-ref/res-ref-name[text()='jndi/rails1']"
).should_not be_empty
elements.to_a(
"resource-ref/res-ref-name[text()='jndi/rails2']"
).should_not be_empty
end
it "does not include any ignored context parameters" do
use_config do |config|
config.webxml.foo = "bar"
config.webxml.ignored << "foo"
end
elements = expand_webxml
elements.to_a(
"context-param/param-name[text()='foo']"
).should be_empty
elements.to_a(
"context-param/param-name[text()='ignored']"
).should be_empty
elements.to_a(
"context-param/param-name[text()='jndi']"
).should be_empty
end
it "uses a config/web.xml if it exists" do
mkdir_p "config"
touch "config/web.xml"
jar.apply(config)
jar.files["WEB-INF/web.xml"].should == "config/web.xml"
end
it "uses a config/web.xml.erb if it exists" do
mkdir_p "config"
File.open("config/web.xml.erb", "w") {|f| f << "Hi <%= webxml.public.root %>" }
jar.apply(config)
jar.files["WEB-INF/web.xml"].should_not be_nil
jar.files["WEB-INF/web.xml"].read.should == "Hi /"
end
it "collects java libraries" do
jar.apply(config)
file_list(%r{WEB-INF/lib/jruby-.*\.jar$}).should_not be_empty
end
it "collects application files" do
jar.apply(config)
file_list(%r{WEB-INF/app$}).should_not be_empty
file_list(%r{WEB-INF/config$}).should_not be_empty
file_list(%r{WEB-INF/lib$}).should_not be_empty
end
it "accepts an autodeploy directory where the war should be created" do
require 'tmpdir'
use_config do |config|
config.jar_name = 'warbler'
config.autodeploy_dir = Dir::tmpdir
end
touch "file.txt"
jar.files["file.txt"] = "file.txt"
silence { jar.create(config) }
File.exist?(File.join("#{Dir::tmpdir}","warbler.war")).should == true
end
it "allows the jar extension to be customized" do
use_config do |config|
config.jar_name = 'warbler'
config.jar_extension = 'foobar'
end
touch "file.txt"
jar.files["file.txt"] = "file.txt"
silence { jar.create(config) }
File.exist?("warbler.foobar").should == true
end
it "can exclude files from the .war" do
use_config do |config|
config.excludes += FileList['lib/tasks/utils.rake']
end
jar.apply(config)
file_list(%r{lib/tasks/utils.rake}).should be_empty
end
it "can exclude public files from the .war" do
use_config do |config|
config.excludes += FileList['public/robots.txt']
end
jar.apply(config)
file_list(%r{robots.txt}).should be_empty
end
it "reads configuration from #{Warbler::Config::FILE}" do
mkdir_p "config"
File.open(Warbler::Config::FILE, "w") do |dest|
contents =
File.open("#{Warbler::WARBLER_HOME}/warble.rb") do |src|
src.read
end
dest << contents.sub(/# config\.jar_name/, 'config.jar_name'
).sub(/# config.gems << "tzinfo"/, 'config.gems = []')
end
t = Warbler::Task.new "warble"
t.config.jar_name.should == "mywar"
end
it "fails if a gem is requested that is not installed" do
use_config do |config|
config.gems = ["nonexistent-gem"]
end
lambda {
Warbler::Task.new "warble", config
jar.apply(config)
}.should raise_error
end
it "allows specification of dependency by Gem::Dependency" do
spec = mock "gem spec"
spec.stub!(:name).and_return "hpricot"
spec.stub!(:full_name).and_return "hpricot-0.6.157"
spec.stub!(:full_gem_path).and_return "hpricot-0.6.157"
spec.stub!(:loaded_from).and_return "hpricot.gemspec"
spec.stub!(:files).and_return ["Rakefile"]
spec.stub!(:dependencies).and_return []
Gem.source_index.should_receive(:search).and_return do |gem|
gem.name.should == "hpricot"
[spec]
end
use_config do |config|
config.gems = [Gem::Dependency.new("hpricot", "> 0.6")]
end
silence { jar.apply(config) }
end
it "copies loose java classes to WEB-INF/classes" do
use_config do |config|
config.java_classes = FileList["Rakefile"]
end
jar.apply(config)
file_list(%r{WEB-INF/classes/Rakefile$}).should_not be_empty
end
it "does not try to autodetect frameworks when Warbler.framework_detection is false" do
begin
Warbler.framework_detection = false
task :environment
config.webxml.booter.should_not == :rails
t = Rake::Task['environment']
class << t; public :instance_variable_get; end
t.instance_variable_get("@already_invoked").should == false
ensure
Warbler.framework_detection = true
end
end
context "with the executable feature" do
it "adds a WarMain class" do
use_config do |config|
config.features << "executable"
end
jar.apply(config)
file_list(%r{^WarMain\.class$}).should_not be_empty
end
end
context "in a Rails application" do
before :each do
@rails = nil
task :environment do
@rails = mock_rails_module
end
end
def mock_rails_module
rails = Module.new
Object.const_set("Rails", rails)
version = Module.new
rails.const_set("VERSION", version)
version.const_set("STRING", "2.1.0")
rails
end
it "detects a Rails trait" do
config.traits.should include(Warbler::Traits::Rails)
end
it "auto-detects a Rails application" do
config.webxml.booter.should == :rails
config.gems["rails"].should == "2.1.0"
end
it "provides Rails gems by default, unless vendor/rails is present" do
config.gems.should have_key("rails")
mkdir_p "vendor/rails"
config = Warbler::Config.new
config.gems.should be_empty
rm_rf "vendor/rails"
@rails.stub!(:vendor_rails?).and_return true
config = Warbler::Config.new
config.gems.should be_empty
end
it "automatically adds Rails.configuration.gems to the list of gems" do
task :environment do
config = mock "config"
@rails.stub!(:configuration).and_return(config)
gem = mock "gem"
gem.stub!(:name).and_return "hpricot"
gem.stub!(:requirement).and_return Gem::Requirement.new("=0.6")
config.stub!(:gems).and_return [gem]
end
config.webxml.booter.should == :rails
config.gems.keys.should include(Gem::Dependency.new("hpricot", Gem::Requirement.new("=0.6")))
end
context "with threadsafe! enabled" do
before :each do
cp "config/environments/production.rb", "config/environments/production.rb.orig"
File.open("config/environments/production.rb", "a") {|f| f.puts "", "config.threadsafe!" }
end
after :each do
mv "config/environments/production.rb.orig", "config/environments/production.rb"
end
it "sets the jruby min and max runtimes to 1" do
config.webxml.booter.should == :rails
config.webxml.jruby.min.runtimes.should == 1
config.webxml.jruby.max.runtimes.should == 1
end
it "doesn't override already configured runtime numbers" do
use_config do |config|
config.webxml.jruby.min.runtimes = 2
config.webxml.jruby.max.runtimes = 2
end
config.webxml.jruby.min.runtimes.should == 2
config.webxml.jruby.max.runtimes.should == 2
end
end
it "adds RAILS_ENV to init.rb" do
use_config do |config|
config.webxml.booter = :rails
end
jar.add_init_file(config)
contents = jar.contents('META-INF/init.rb')
contents.should =~ /ENV\['RAILS_ENV'\]/
contents.should =~ /'production'/
end
end
context "in a Merb application" do
before :each do
touch "config/init.rb"
@merb = nil
task :merb_env do
@merb = mock_merb_module
end
end
after :each do
rm_f "config/init.rb"
end
def mock_merb_module
merb = Module.new
silence { Object.const_set("Merb", merb) }
boot_loader = Module.new
merb.const_set("BootLoader", boot_loader)
merb.const_set("VERSION", "1.0")
dependencies = Class.new do
@dependencies = []
def self.dependencies
@dependencies
end
def self.dependencies=(deps)
@dependencies = deps
end
end
boot_loader.const_set("Dependencies", dependencies)
dependencies
end
it "detects a Merb trait" do
config.traits.should include(Warbler::Traits::Merb)
end
it "auto-detects a Merb application" do
config.webxml.booter.should == :merb
config.gems.keys.should_not include("rails")
end
it "automatically adds Merb::BootLoader::Dependencies.dependencies to the list of gems" do
task :merb_env do
@merb.dependencies = [Gem::Dependency.new("merb-core", ">= 1.0.6.1")]
end
config.gems.keys.should include(Gem::Dependency.new("merb-core", ">= 1.0.6.1"))
end
it "skips Merb development dependencies" do
task :merb_env do
@merb.dependencies = [Gem::Dependency.new("rake", "= #{RAKEVERSION}", :development)]
end
jar.apply(config)
file_list(/rake-#{RAKEVERSION}/).should be_empty
end
it "warns about using Merb < 1.0" do
task :merb_env do
silence { Object.const_set("Merb", Module.new) }
end
silence { config.webxml.booter.should == :merb }
end
end
context "in a Rack application" do
before :each do
Dir.chdir('tmp')
rackup = "run Proc.new {|env| [200, {}, ['Hello World']]}"
File.open("config.ru", "w") {|f| f << rackup }
end
it "detects a Rack trait" do
config.traits.should include(Warbler::Traits::Rack)
end
it "auto-detects a Rack application with a config.ru file" do
jar.apply(config)
jar.files['WEB-INF/config.ru'].should == 'config.ru'
end
it "adds RACK_ENV to init.rb" do
jar.add_init_file(config)
contents = jar.contents('META-INF/init.rb')
contents.should =~ /ENV\['RACK_ENV'\]/
contents.should =~ /'production'/
end
end
it "skips directories that don't exist in config.dirs and print a warning" do
use_config do |config|
config.dirs = %w(lib notexist)
end
silence { jar.apply(config) }
file_list(%r{WEB-INF/lib}).should_not be_empty
file_list(%r{WEB-INF/notexist}).should be_empty
end
it "excludes Warbler's old tmp/war directory by default" do
mkdir_p "tmp/war"
touch "tmp/war/index.html"
use_config do |config|
config.dirs += ["tmp"]
end
jar.apply(config)
file_list(%r{WEB-INF/tmp/war}).should be_empty
file_list(%r{WEB-INF/tmp/war/index\.html}).should be_empty
end
it "writes gems to location specified by gem_path" do
use_config do |config|
config.gem_path = "/WEB-INF/jewels"
config.gems << 'rake'
end
elements = expand_webxml
file_list(%r{WEB-INF/jewels}).should_not be_empty
elements.to_a(
"context-param/param-name[text()='gem.path']"
).should_not be_empty
elements.to_a(
"context-param/param-name[text()='gem.path']/../param-value"
).first.text.should == "/WEB-INF/jewels"
end
it "allows adding additional WEB-INF files via config.webinf_files" do
File.open("myserver-web.xml", "w") do |f|
f << "<web-app></web-app>"
end
use_config do |config|
config.webinf_files = FileList['myserver-web.xml']
end
jar.apply(config)
file_list(%r{WEB-INF/myserver-web.xml}).should_not be_empty
end
it "allows expanding of additional WEB-INF files via config.webinf_files" do
File.open("myserver-web.xml.erb", "w") do |f|
f << "<web-app><%= webxml.rails.env %></web-app>"
end
use_config do |config|
config.webinf_files = FileList['myserver-web.xml.erb']
end
jar.apply(config)
file_list(%r{WEB-INF/myserver-web.xml}).should_not be_empty
jar.contents('WEB-INF/myserver-web.xml').should =~ /web-app.*production/
end
it "excludes test files in gems according to config.gem_excludes" do
use_config do |config|
config.gem_excludes += [/^(test|spec)\//]
end
jar.apply(config)
file_list(%r{WEB-INF/gems/gems/rake([^/]+)/test/test_rake.rb}).should be_empty
end
it "creates a META-INF/init.rb file with startup config" do
jar.apply(config)
file_list(%r{META-INF/init.rb}).should_not be_empty
end
it "allows adjusting the init file location in the war" do
use_config do |config|
config.init_filename = 'WEB-INF/init.rb'
end
jar.add_init_file(config)
file_list(%r{WEB-INF/init.rb}).should_not be_empty
end
it "allows adding custom files' contents to init.rb" do
use_config do |config|
config.init_contents << "Rakefile"
end
jar.add_init_file(config)
contents = jar.contents('META-INF/init.rb')
contents.should =~ /require 'rake'/
end
it "does not have escaped HTML in WARBLER_CONFIG" do
use_config do |config|
config.webxml.dummy = '<dummy/>'
end
jar.apply(config)
jar.contents('META-INF/init.rb').should =~ /<dummy\/>/
end
end
end
Isolate environment
#--
# Copyright (c) 2010-2011 Engine Yard, Inc.
# Copyright (c) 2007-2009 Sun Microsystems, Inc.
# This source code is available under the MIT license.
# See the file LICENSE.txt for details.
#++
require File.expand_path('../../spec_helper', __FILE__)
describe Warbler::Jar do
use_fresh_rake_application
use_fresh_environment
def file_list(regex)
jar.files.keys.select {|f| f =~ regex }
end
def use_config(&block)
@extra_config = block
end
def apply_extra_config(config)
@extra_config.call(config) if @extra_config
end
let(:config) { Warbler::Config.new {|c| apply_extra_config(c) } }
let(:jar) { Warbler::Jar.new }
context "in a jar project" do
run_in_directory "spec/sample_jar"
cleanup_temp_files
it "detects a Jar trait" do
config.traits.should include(Warbler::Traits::Jar)
end
it "collects java libraries" do
jar.apply(config)
file_list(%r{^META-INF/lib/jruby-.*\.jar$}).should_not be_empty
end
it "adds a JarMain class" do
jar.apply(config)
file_list(%r{^JarMain\.class$}).should_not be_empty
end
it "adds an init.rb" do
jar.apply(config)
file_list(%r{^META-INF/init.rb$}).should_not be_empty
end
it "requires 'rubygems' in init.rb" do
jar.add_init_file(config)
contents = jar.contents('META-INF/init.rb')
contents.split("\n").grep(/require 'rubygems'/).should_not be_empty
end
it "adds ENV['GEM_HOME'] to init.rb" do
jar.add_init_file(config)
contents = jar.contents('META-INF/init.rb')
contents.should =~ /ENV\['GEM_HOME'\]/
end
it "adds a main.rb" do
jar.apply(config)
file_list(%r{^META-INF/main.rb$}).should_not be_empty
end
it "accepts a custom manifest file" do
touch 'manifest'
use_config do |config|
config.manifest_file = 'manifest'
end
jar.apply(config)
jar.files['META-INF/MANIFEST.MF'].should == "manifest"
end
it "accepts a MANIFEST.MF file if it exists in the project root" do
touch 'MANIFEST.MF'
jar.apply(config)
jar.files['META-INF/MANIFEST.MF'].should == "MANIFEST.MF"
end
it "does not add a manifest if one already exists" do
jar.files['META-INF/MANIFEST.MF'] = 'manifest'
jar.add_manifest(config)
jar.files['META-INF/MANIFEST.MF'].should == "manifest"
end
context "with a .gemspec" do
it "detects a Gemspec trait" do
config.traits.should include(Warbler::Traits::Gemspec)
end
it "detects gem dependencies" do
jar.apply(config)
file_list(%r{^gems/rubyzip.*/lib/zip/zip.rb}).should_not be_empty
file_list(%r{^specifications/rubyzip.*\.gemspec}).should_not be_empty
end
it "sets load paths in init.rb" do
jar.add_init_file(config)
contents = jar.contents('META-INF/init.rb')
contents.split("\n").grep(/LOAD_PATH\.unshift.*sample_jar\/lib/).should_not be_empty
end
it "loads the default executable in main.rb" do
jar.apply(config)
contents = jar.contents('META-INF/main.rb')
contents.split("\n").grep(/load.*sample_jar\/bin\/sample_jar/).should_not be_empty
end
it "includes compiled .rb and .class files" do
config.compiled_ruby_files = %w(lib/sample_jar.rb)
jar.compile(config)
jar.apply(config)
file_list(%r{^sample_jar/lib/sample_jar\.class$}).should_not be_empty
jar.contents('sample_jar/lib/sample_jar.rb').should =~ /require __FILE__\.sub/
end
end
context "with a gemspec without a default executable" do
before :each do
Dir['*.gemspec'].each do |f|
cp f, "#{f}.tmp"
lines = IO.readlines(f)
File.open(f, 'w') do |io|
lines.each do |line|
next if line =~ /executable/
io << line
end
end
end
end
after :each do
Dir['*.gemspec.tmp'].each {|f| mv f, "#{f.sub /\.tmp$/, ''}"}
end
it "loads the first bin/executable in main.rb" do
silence { jar.apply(config) }
contents = jar.contents('META-INF/main.rb')
contents.split("\n").grep(/load.*sample_jar\/bin\/sample_jar/).should_not be_empty
end
end
context "without a .gemspec" do
before :each do
Dir['*.gemspec'].each {|f| mv f, "#{f}.tmp"}
end
after :each do
Dir['*.gemspec.tmp'].each {|f| mv f, "#{f.sub /\.tmp$/, ''}"}
end
it "detects a NoGemspec trait" do
config.traits.should include(Warbler::Traits::NoGemspec)
end
it "collects gem files from configuration" do
use_config do |config|
config.gems << "rake"
end
jar.apply(config)
file_list(%r{^gems/rake.*/lib/rake.rb}).should_not be_empty
file_list(%r{^specifications/rake.*\.gemspec}).should_not be_empty
end
it "collects all project files in the directory" do
touch "extra.foobar"
jar.apply(config)
file_list(%r{^sample_jar/bin$}).should_not be_empty
file_list(%r{^sample_jar/test$}).should_not be_empty
file_list(%r{^sample_jar/lib/sample_jar.rb$}).should_not be_empty
file_list(%r{^sample_jar/extra\.foobar$}).should_not be_empty
end
it "sets load paths in init.rb" do
jar.add_init_file(config)
contents = jar.contents('META-INF/init.rb')
contents.split("\n").grep(/LOAD_PATH\.unshift.*sample_jar\/lib/).should_not be_empty
end
it "loads the first bin/executable in main.rb" do
jar.apply(config)
contents = jar.contents('META-INF/main.rb')
contents.split("\n").grep(/load.*sample_jar\/bin\/sample_jar/).should_not be_empty
end
end
end
context "in a war project" do
run_in_directory "spec/sample_war"
cleanup_temp_files
before(:each) do
mkdir_p "log"
touch "log/test.log"
end
it "detects a War trait" do
config.traits.should include(Warbler::Traits::War)
end
it "collects files in public" do
jar.apply(config)
file_list(%r{^index\.html}).should_not be_empty
end
it "collects gem files" do
use_config do |config|
config.gems << "rake"
end
jar.apply(config)
file_list(%r{WEB-INF/gems/gems/rake.*/lib/rake.rb}).should_not be_empty
file_list(%r{WEB-INF/gems/specifications/rake.*\.gemspec}).should_not be_empty
end
it "adds ENV['GEM_HOME'] to init.rb" do
jar.add_init_file(config)
contents = jar.contents('META-INF/init.rb')
contents.should =~ /ENV\['GEM_HOME'\]/
contents.should =~ /WEB-INF\/gems/
end
it "does not include log files by default" do
jar.apply(config)
file_list(%r{WEB-INF/log}).should_not be_empty
file_list(%r{WEB-INF/log/.*\.log}).should be_empty
end
def expand_webxml
jar.apply(config)
jar.files.should include("WEB-INF/web.xml")
require 'rexml/document'
REXML::Document.new(jar.files["WEB-INF/web.xml"]).root.elements
end
it "creates a web.xml file" do
use_config do |config|
config.webxml.jruby.max.runtimes = 5
end
elements = expand_webxml
elements.to_a(
"context-param/param-name[text()='jruby.max.runtimes']"
).should_not be_empty
elements.to_a(
"context-param/param-name[text()='jruby.max.runtimes']/../param-value"
).first.text.should == "5"
end
it "includes custom context parameters in web.xml" do
use_config do |config|
config.webxml.some.custom.config = "myconfig"
end
elements = expand_webxml
elements.to_a(
"context-param/param-name[text()='some.custom.config']"
).should_not be_empty
elements.to_a(
"context-param/param-name[text()='some.custom.config']/../param-value"
).first.text.should == "myconfig"
end
it "allows one jndi resource to be included" do
use_config do |config|
config.webxml.jndi = 'jndi/rails'
end
elements = expand_webxml
elements.to_a(
"resource-ref/res-ref-name[text()='jndi/rails']"
).should_not be_empty
end
it "allows multiple jndi resources to be included" do
use_config do |config|
config.webxml.jndi = ['jndi/rails1', 'jndi/rails2']
end
elements = expand_webxml
elements.to_a(
"resource-ref/res-ref-name[text()='jndi/rails1']"
).should_not be_empty
elements.to_a(
"resource-ref/res-ref-name[text()='jndi/rails2']"
).should_not be_empty
end
it "does not include any ignored context parameters" do
use_config do |config|
config.webxml.foo = "bar"
config.webxml.ignored << "foo"
end
elements = expand_webxml
elements.to_a(
"context-param/param-name[text()='foo']"
).should be_empty
elements.to_a(
"context-param/param-name[text()='ignored']"
).should be_empty
elements.to_a(
"context-param/param-name[text()='jndi']"
).should be_empty
end
it "uses a config/web.xml if it exists" do
mkdir_p "config"
touch "config/web.xml"
jar.apply(config)
jar.files["WEB-INF/web.xml"].should == "config/web.xml"
end
it "uses a config/web.xml.erb if it exists" do
mkdir_p "config"
File.open("config/web.xml.erb", "w") {|f| f << "Hi <%= webxml.public.root %>" }
jar.apply(config)
jar.files["WEB-INF/web.xml"].should_not be_nil
jar.files["WEB-INF/web.xml"].read.should == "Hi /"
end
it "collects java libraries" do
jar.apply(config)
file_list(%r{WEB-INF/lib/jruby-.*\.jar$}).should_not be_empty
end
it "collects application files" do
jar.apply(config)
file_list(%r{WEB-INF/app$}).should_not be_empty
file_list(%r{WEB-INF/config$}).should_not be_empty
file_list(%r{WEB-INF/lib$}).should_not be_empty
end
it "accepts an autodeploy directory where the war should be created" do
require 'tmpdir'
use_config do |config|
config.jar_name = 'warbler'
config.autodeploy_dir = Dir::tmpdir
end
touch "file.txt"
jar.files["file.txt"] = "file.txt"
silence { jar.create(config) }
File.exist?(File.join("#{Dir::tmpdir}","warbler.war")).should == true
end
it "allows the jar extension to be customized" do
use_config do |config|
config.jar_name = 'warbler'
config.jar_extension = 'foobar'
end
touch "file.txt"
jar.files["file.txt"] = "file.txt"
silence { jar.create(config) }
File.exist?("warbler.foobar").should == true
end
it "can exclude files from the .war" do
use_config do |config|
config.excludes += FileList['lib/tasks/utils.rake']
end
jar.apply(config)
file_list(%r{lib/tasks/utils.rake}).should be_empty
end
it "can exclude public files from the .war" do
use_config do |config|
config.excludes += FileList['public/robots.txt']
end
jar.apply(config)
file_list(%r{robots.txt}).should be_empty
end
it "reads configuration from #{Warbler::Config::FILE}" do
mkdir_p "config"
File.open(Warbler::Config::FILE, "w") do |dest|
contents =
File.open("#{Warbler::WARBLER_HOME}/warble.rb") do |src|
src.read
end
dest << contents.sub(/# config\.jar_name/, 'config.jar_name'
).sub(/# config.gems << "tzinfo"/, 'config.gems = []')
end
t = Warbler::Task.new "warble"
t.config.jar_name.should == "mywar"
end
it "fails if a gem is requested that is not installed" do
use_config do |config|
config.gems = ["nonexistent-gem"]
end
lambda {
Warbler::Task.new "warble", config
jar.apply(config)
}.should raise_error
end
it "allows specification of dependency by Gem::Dependency" do
spec = mock "gem spec"
spec.stub!(:name).and_return "hpricot"
spec.stub!(:full_name).and_return "hpricot-0.6.157"
spec.stub!(:full_gem_path).and_return "hpricot-0.6.157"
spec.stub!(:loaded_from).and_return "hpricot.gemspec"
spec.stub!(:files).and_return ["Rakefile"]
spec.stub!(:dependencies).and_return []
Gem.source_index.should_receive(:search).and_return do |gem|
gem.name.should == "hpricot"
[spec]
end
use_config do |config|
config.gems = [Gem::Dependency.new("hpricot", "> 0.6")]
end
silence { jar.apply(config) }
end
it "copies loose java classes to WEB-INF/classes" do
use_config do |config|
config.java_classes = FileList["Rakefile"]
end
jar.apply(config)
file_list(%r{WEB-INF/classes/Rakefile$}).should_not be_empty
end
it "does not try to autodetect frameworks when Warbler.framework_detection is false" do
begin
Warbler.framework_detection = false
task :environment
config.webxml.booter.should_not == :rails
t = Rake::Task['environment']
class << t; public :instance_variable_get; end
t.instance_variable_get("@already_invoked").should == false
ensure
Warbler.framework_detection = true
end
end
context "with the executable feature" do
it "adds a WarMain class" do
use_config do |config|
config.features << "executable"
end
jar.apply(config)
file_list(%r{^WarMain\.class$}).should_not be_empty
end
end
context "in a Rails application" do
before :each do
@rails = nil
task :environment do
@rails = mock_rails_module
end
end
def mock_rails_module
rails = Module.new
Object.const_set("Rails", rails)
version = Module.new
rails.const_set("VERSION", version)
version.const_set("STRING", "2.1.0")
rails
end
it "detects a Rails trait" do
config.traits.should include(Warbler::Traits::Rails)
end
it "auto-detects a Rails application" do
config.webxml.booter.should == :rails
config.gems["rails"].should == "2.1.0"
end
it "provides Rails gems by default, unless vendor/rails is present" do
config.gems.should have_key("rails")
mkdir_p "vendor/rails"
config = Warbler::Config.new
config.gems.should be_empty
rm_rf "vendor/rails"
@rails.stub!(:vendor_rails?).and_return true
config = Warbler::Config.new
config.gems.should be_empty
end
it "automatically adds Rails.configuration.gems to the list of gems" do
task :environment do
config = mock "config"
@rails.stub!(:configuration).and_return(config)
gem = mock "gem"
gem.stub!(:name).and_return "hpricot"
gem.stub!(:requirement).and_return Gem::Requirement.new("=0.6")
config.stub!(:gems).and_return [gem]
end
config.webxml.booter.should == :rails
config.gems.keys.should include(Gem::Dependency.new("hpricot", Gem::Requirement.new("=0.6")))
end
context "with threadsafe! enabled" do
before :each do
cp "config/environments/production.rb", "config/environments/production.rb.orig"
File.open("config/environments/production.rb", "a") {|f| f.puts "", "config.threadsafe!" }
end
after :each do
mv "config/environments/production.rb.orig", "config/environments/production.rb"
end
it "sets the jruby min and max runtimes to 1" do
ENV["RAILS_ENV"] = nil
config.webxml.booter.should == :rails
config.webxml.jruby.min.runtimes.should == 1
config.webxml.jruby.max.runtimes.should == 1
end
it "doesn't override already configured runtime numbers" do
use_config do |config|
config.webxml.jruby.min.runtimes = 2
config.webxml.jruby.max.runtimes = 2
end
config.webxml.jruby.min.runtimes.should == 2
config.webxml.jruby.max.runtimes.should == 2
end
end
it "adds RAILS_ENV to init.rb" do
ENV["RAILS_ENV"] = nil
use_config do |config|
config.webxml.booter = :rails
end
jar.add_init_file(config)
contents = jar.contents('META-INF/init.rb')
contents.should =~ /ENV\['RAILS_ENV'\]/
contents.should =~ /'production'/
end
end
context "in a Merb application" do
before :each do
touch "config/init.rb"
@merb = nil
task :merb_env do
@merb = mock_merb_module
end
end
after :each do
rm_f "config/init.rb"
end
def mock_merb_module
merb = Module.new
silence { Object.const_set("Merb", merb) }
boot_loader = Module.new
merb.const_set("BootLoader", boot_loader)
merb.const_set("VERSION", "1.0")
dependencies = Class.new do
@dependencies = []
def self.dependencies
@dependencies
end
def self.dependencies=(deps)
@dependencies = deps
end
end
boot_loader.const_set("Dependencies", dependencies)
dependencies
end
it "detects a Merb trait" do
config.traits.should include(Warbler::Traits::Merb)
end
it "auto-detects a Merb application" do
config.webxml.booter.should == :merb
config.gems.keys.should_not include("rails")
end
it "automatically adds Merb::BootLoader::Dependencies.dependencies to the list of gems" do
task :merb_env do
@merb.dependencies = [Gem::Dependency.new("merb-core", ">= 1.0.6.1")]
end
config.gems.keys.should include(Gem::Dependency.new("merb-core", ">= 1.0.6.1"))
end
it "skips Merb development dependencies" do
task :merb_env do
@merb.dependencies = [Gem::Dependency.new("rake", "= #{RAKEVERSION}", :development)]
end
jar.apply(config)
file_list(/rake-#{RAKEVERSION}/).should be_empty
end
it "warns about using Merb < 1.0" do
task :merb_env do
silence { Object.const_set("Merb", Module.new) }
end
silence { config.webxml.booter.should == :merb }
end
end
context "in a Rack application" do
before :each do
Dir.chdir('tmp')
rackup = "run Proc.new {|env| [200, {}, ['Hello World']]}"
File.open("config.ru", "w") {|f| f << rackup }
end
it "detects a Rack trait" do
config.traits.should include(Warbler::Traits::Rack)
end
it "auto-detects a Rack application with a config.ru file" do
jar.apply(config)
jar.files['WEB-INF/config.ru'].should == 'config.ru'
end
it "adds RACK_ENV to init.rb" do
ENV["RACK_ENV"] = nil
jar.add_init_file(config)
contents = jar.contents('META-INF/init.rb')
contents.should =~ /ENV\['RACK_ENV'\]/
contents.should =~ /'production'/
end
end
it "skips directories that don't exist in config.dirs and print a warning" do
use_config do |config|
config.dirs = %w(lib notexist)
end
silence { jar.apply(config) }
file_list(%r{WEB-INF/lib}).should_not be_empty
file_list(%r{WEB-INF/notexist}).should be_empty
end
it "excludes Warbler's old tmp/war directory by default" do
mkdir_p "tmp/war"
touch "tmp/war/index.html"
use_config do |config|
config.dirs += ["tmp"]
end
jar.apply(config)
file_list(%r{WEB-INF/tmp/war}).should be_empty
file_list(%r{WEB-INF/tmp/war/index\.html}).should be_empty
end
it "writes gems to location specified by gem_path" do
use_config do |config|
config.gem_path = "/WEB-INF/jewels"
config.gems << 'rake'
end
elements = expand_webxml
file_list(%r{WEB-INF/jewels}).should_not be_empty
elements.to_a(
"context-param/param-name[text()='gem.path']"
).should_not be_empty
elements.to_a(
"context-param/param-name[text()='gem.path']/../param-value"
).first.text.should == "/WEB-INF/jewels"
end
it "allows adding additional WEB-INF files via config.webinf_files" do
File.open("myserver-web.xml", "w") do |f|
f << "<web-app></web-app>"
end
use_config do |config|
config.webinf_files = FileList['myserver-web.xml']
end
jar.apply(config)
file_list(%r{WEB-INF/myserver-web.xml}).should_not be_empty
end
it "allows expanding of additional WEB-INF files via config.webinf_files" do
ENV["RAILS_ENV"] = nil
File.open("myserver-web.xml.erb", "w") do |f|
f << "<web-app><%= webxml.rails.env %></web-app>"
end
use_config do |config|
config.webinf_files = FileList['myserver-web.xml.erb']
end
jar.apply(config)
file_list(%r{WEB-INF/myserver-web.xml}).should_not be_empty
jar.contents('WEB-INF/myserver-web.xml').should =~ /web-app.*production/
end
it "excludes test files in gems according to config.gem_excludes" do
use_config do |config|
config.gem_excludes += [/^(test|spec)\//]
end
jar.apply(config)
file_list(%r{WEB-INF/gems/gems/rake([^/]+)/test/test_rake.rb}).should be_empty
end
it "creates a META-INF/init.rb file with startup config" do
jar.apply(config)
file_list(%r{META-INF/init.rb}).should_not be_empty
end
it "allows adjusting the init file location in the war" do
use_config do |config|
config.init_filename = 'WEB-INF/init.rb'
end
jar.add_init_file(config)
file_list(%r{WEB-INF/init.rb}).should_not be_empty
end
it "allows adding custom files' contents to init.rb" do
use_config do |config|
config.init_contents << "Rakefile"
end
jar.add_init_file(config)
contents = jar.contents('META-INF/init.rb')
contents.should =~ /require 'rake'/
end
it "does not have escaped HTML in WARBLER_CONFIG" do
use_config do |config|
config.webxml.dummy = '<dummy/>'
end
jar.apply(config)
jar.contents('META-INF/init.rb').should =~ /<dummy\/>/
end
end
end
|
# -*- mode: ruby -*-
# vi: set ft=ruby :
# Requirements
require './lib/calculator'
require './lib/config_renderer'
# Get the cluster.yml values and execute the right method
def generate_hostsfile(address, conf)
hostsfiles = generate_hostsfile_list(conf)
case conf[:hostsfile_renderer]
when "ascending"
generate_hostsfile_ascending(address, hostsfiles)
when "random"
generate_hostsfile_random(address, hostsfiles)
when "modulo"
generate_hostsfile_modulo(address, hostsfiles, conf[:hostsfile_modul])
else
STDERR.puts("ERROR: Cannot read value in cluster.yml: hostsfile_renderer")
end
end
# Checks the value in cluster.yml how much files should be generated
def generate_hostsfile_list(conf)
hostfiles = Array[]
case conf[:hostsfile_files]
when "single_file"
hostfiles.push("./hostsfile")
when "file_per_host"
initializeHostsfileDir()
(1..27).each do |nodenr|
hostfiles.push("./hostsfiles/hostsfile_node" + nodenr.to_s)
end
(1..2).each do |nodenr|
hostfiles.push("./hostsfiles/hostsfile_nfs" + nodenr.to_s)
hostfiles.push("./hostsfiles/hostsfile_login" + nodenr.to_s)
end
hostfiles.push("./hostsfiles/hostsfile_master1")
else
STDERR.puts("ERROR: Cannot read value in cluster.yml: hostsfile_files")
end
hostfiles
end
# Creates the individual hostfiles for the specified filename
def generate_hostsfile_ascending(address, hostsfiles)
hostsfiles.each do |filename|
File.open(filename, "w") do |hostsfile|
nodenr = 1
i = 0
while i < address.length do
hostsfile.puts("%s node%d" % [ address[i], nodenr ])
i = i + 1
if i % 6 == 0
nodenr = nodenr + 1
end
end
add_nfs_login_master(hostsfile)
add_localhost(hostsfile)
end
end
end
# Write random order to specified file
def generate_hostsfile_random(address, hostsfiles)
hostsfiles.each do |filename|
File.open(filename, "w") do |hostsfile|
(1..27).each do |nodenr|
rn = rand(6)
hostsfile.puts("%s node%d" % [ address[((nodenr - 1) * 6 ) + rn], nodenr ])
(0..5).each do |index|
if index != rn
hostsfile.puts("%s node%d" % [ address[((nodenr - 1) * 6 ) + index], nodenr ])
end
end
end
add_nfs_login_master(hostsfile)
add_localhost(hostsfile)
end
end
end
# If you don't like the random approach, use this method a second parameter is
# added where you can add the node number and the modulo operation is used
# to determine the right ip adress for the hosts file. To create good routes
# call this method again on each node generation.
def generate_hostsfile_modulo(address, hostsfiles, modul)
hostsfiles.each do |filename|
File.open(filename, "w") do |hostsfile|
(1..27).each_with_index do |nodenr|
correction = node % 6
hostsfile.puts("%s node%d" % [ address[((nodenr - 1) * 6 ) + correction], nodenr ])
(0..5).each_with_index do |index|
if index != correction
hostsfile.puts("%s node%d" % [ address[((nodenr - 1) * 6 ) + index], nodenr ])
end
end
end
add_nfs_login_master(hostsfile)
add_localhost(hostsfile)
end
end
end
# Hostsfile
def add_localhost(hostsfile)
hostsfile.puts('127.0.0.1 localhost localhost.localdomain localhost4 localhost4.localdomain4')
hostsfile.puts('::1 localhost localhost.localdomain localhost6 localhost6.localdomain6')
end
# Make login and nfs nodes available
def add_nfs_login_master(file)
conf = read_yml_file(YAML.load_file("cluster.yml"))
(0..1).each do |index|
file.puts("%s nfs%d" % [ conf[:ip_nfs][index], (index + 1) ])
file.puts("%s login%d" % [ conf[:ip_login][index], (index + 1) ])
end
file.puts("%s node%d" % [ conf[:ip_master][0].gsub(".0.1",".0.2"), conf[:nodes_master][0] ])
file.puts("%s master1" % [ conf[:ip_master][0] ])
end
# Check if hostsdirectory exists and clear it for the hostfiles
def initializeHostsfileDir()
if(File.directory?("./hostsfiles"))
mkdir("./hostsfiles")
else
for path in Dir["./hostsfiles/*"]
File.delete(path)
end
end
end
Quickfix: hostsgenerator.rb
# -*- mode: ruby -*-
# vi: set ft=ruby :
# Requirements
require './lib/calculator'
require './lib/config_renderer'
# Get the cluster.yml values and execute the right method
def generate_hostsfile(address, conf)
hostsfiles = generate_hostsfile_list(conf)
case conf[:hostsfile_renderer]
when "ascending"
generate_hostsfile_ascending(address, hostsfiles)
when "random"
generate_hostsfile_random(address, hostsfiles)
when "modulo"
generate_hostsfile_modulo(address, hostsfiles, conf[:hostsfile_modul])
else
STDERR.puts("ERROR: Cannot read value in cluster.yml: hostsfile_renderer")
end
end
# Checks the value in cluster.yml how much files should be generated
def generate_hostsfile_list(conf)
hostfiles = Array[]
case conf[:hostsfile_files]
when "single_file"
hostfiles.push("./hostsfile")
when "file_per_host"
initializeHostsfileDir()
(1..27).each do |nodenr|
hostfiles.push("./hostsfiles/hostsfile_node" + nodenr.to_s)
end
(1..2).each do |nodenr|
hostfiles.push("./hostsfiles/hostsfile_nfs" + nodenr.to_s)
hostfiles.push("./hostsfiles/hostsfile_login" + nodenr.to_s)
end
hostfiles.push("./hostsfiles/hostsfile_master1")
else
STDERR.puts("ERROR: Cannot read value in cluster.yml: hostsfile_files")
end
hostfiles
end
# Creates the individual hostfiles for the specified filename
def generate_hostsfile_ascending(address, hostsfiles)
hostsfiles.each do |filename|
File.open(filename, "w") do |hostsfile|
nodenr = 1
i = 0
while i < address.length do
hostsfile.puts("%s node%d" % [ address[i], nodenr ])
i = i + 1
if i % 6 == 0
nodenr = nodenr + 1
end
end
add_nfs_login_master(hostsfile)
add_localhost(hostsfile)
end
end
end
# Write random order to specified file
def generate_hostsfile_random(address, hostsfiles)
hostsfiles.each do |filename|
File.open(filename, "w") do |hostsfile|
(1..27).each do |nodenr|
rn = rand(6)
hostsfile.puts("%s node%d" % [ address[((nodenr - 1) * 6 ) + rn], nodenr ])
(0..5).each do |index|
if index != rn
hostsfile.puts("%s node%d" % [ address[((nodenr - 1) * 6 ) + index], nodenr ])
end
end
end
add_nfs_login_master(hostsfile)
add_localhost(hostsfile)
end
end
end
# If you don't like the random approach, use this method a second parameter is
# added where you can add the node number and the modulo operation is used
# to determine the right ip adress for the hosts file. To create good routes
# call this method again on each node generation.
def generate_hostsfile_modulo(address, hostsfiles, modul)
hostsfiles.each do |filename|
File.open(filename, "w") do |hostsfile|
(1..27).each_with_index do |nodenr|
correction = node % 6
hostsfile.puts("%s node%d" % [ address[((nodenr - 1) * 6 ) + correction], nodenr ])
(0..5).each_with_index do |index|
if index != correction
hostsfile.puts("%s node%d" % [ address[((nodenr - 1) * 6 ) + index], nodenr ])
end
end
end
add_nfs_login_master(hostsfile)
add_localhost(hostsfile)
end
end
end
# Hostsfile
def add_localhost(hostsfile)
hostsfile.puts('127.0.0.1 localhost localhost.localdomain localhost4 localhost4.localdomain4')
hostsfile.puts('::1 localhost localhost.localdomain localhost6 localhost6.localdomain6')
end
# Make login and nfs nodes available
def add_nfs_login_master(file)
conf = read_yml_file(YAML.load_file("cluster.yml"))
(0..1).each do |index|
file.puts("%s nfs%d" % [ conf[:ip_nfs][index], (index + 1) ])
file.puts("%s login%d" % [ conf[:ip_login][index], (index + 1) ])
end
file.puts("%s node%d" % [ conf[:ip_master][0].gsub(".0.1",".0.2"), conf[:nodes_master][0] ])
file.puts("%s master1" % [ conf[:ip_master][0] ])
end
# Check if hostsdirectory exists and clear it for the hostfiles
def initializeHostsfileDir()
if(File.directory?("./hostsfiles"))
mkdir("./hostsfiles")
else
for path in Dir["./hostsfiles/*"]
File.delete(path)
end
end
end
|
require 'spec_helper'
require 'lumberg/whm'
module Lumberg
describe Whm::Account do
before(:each) do
@login = { host: @whm_host, hash: @whm_hash }
@server = Whm::Server.new(@login.dup)
@account = Whm::Account.new(server: @server.dup)
end
describe "initialize" do
it "should create a new instance of server" do
expect { Whm::Account.new }.to raise_error(WhmArgumentError, /Missing required param/)
end
it "should allow a server instance to be passed in" do
account = Whm::Account.new(server: @server)
account.server.should be_a(Whm::Server)
end
it "should allow a server hash to be passed in" do
account = Whm::Account.new(server: @login)
account.server.should be_a(Whm::Server)
end
end
describe "createacct" do
it "should require a username" do
expect { @account.createacct}.to raise_error(WhmArgumentError, /Missing required param.* username/)
end
it "should require a domain" do
expect { @account.createacct(username: 'user')}.to raise_error(WhmArgumentError, /Missing required param.* domain/)
end
it "should require a password" do
expect { @account.createacct(username: 'user', domain: 'example.com')}.to raise_error(WhmArgumentError, /Missing required param.* password/)
end
use_vcr_cassette "whm/account/createacct"
it "should allow account creation" do
message = @account.createacct(username: 'valid', password: 'hummingbird123', domain: 'valid-thing.com')
message[:success].should be(true)
message[:message].should match(/Account Creation Ok/i)
end
it "should return an error on duplicate account" do
@account.createacct(username: 'invalid', password: 'hummingbird123', domain: 'invalid-thing.com')
message = @account.createacct(username: 'invalid', password: 'hummingbird123', domain: 'invalid-thing.com')
message[:success].should be(false)
message[:message].should match(/username already exists/i)
end
end
end
end
More specs for create account
require 'spec_helper'
require 'lumberg/whm'
module Lumberg
describe Whm::Account do
before(:each) do
@login = { host: @whm_host, hash: @whm_hash }
@server = Whm::Server.new(@login.dup)
@account = Whm::Account.new(server: @server.dup)
end
describe "initialize" do
it "should create a new instance of server" do
expect { Whm::Account.new }.to raise_error(WhmArgumentError, /Missing required param/)
end
it "should allow a server instance to be passed in" do
account = Whm::Account.new(server: @server)
account.server.should be_a(Whm::Server)
end
it "should allow a server hash to be passed in" do
account = Whm::Account.new(server: @login)
account.server.should be_a(Whm::Server)
end
end
describe "createacct" do
use_vcr_cassette "whm/account/createacct"
it "should require a username" do
expect { @account.createacct}.to raise_error(WhmArgumentError, /Missing required param.* username/)
end
it "should require a domain" do
expect { @account.createacct(username: 'user')}.to raise_error(WhmArgumentError, /Missing required param.* domain/)
end
it "should require a password" do
expect { @account.createacct(username: 'user', domain: 'example.com')}.to raise_error(WhmArgumentError, /Missing required param.* password/)
end
it "should allow account creation with proper params" do
message = @account.createacct(username: 'valid', password: 'hummingbird123', domain: 'valid-thing.com')
message[:success].should be(true)
message[:message].should match(/Account Creation Ok/i)
message[:params].should_not be_empty
message[:params].should have_key(:options)
message[:params][:options].should include(:nameserver4, :nameserver, :nameserverentry2, :nameserverentry3,
:nameserverentry4, :nameserverentry, :ip, :nameservera2,
:nameservera3, :package, :nameservera4, :nameserver2,
:nameservera, :nameserver3)
end
it "should return an error on duplicate account" do
@account.createacct(username: 'invalid', password: 'hummingbird123', domain: 'invalid-thing.com')
message = @account.createacct(username: 'invalid', password: 'hummingbird123', domain: 'invalid-thing.com')
message[:success].should be(false)
message[:message].should match(/username already exists/i)
end
end
end
end
|
module Hotdog
VERSION = "0.23.0"
end
Bump version; 0.24.0 [ci skip]
module Hotdog
VERSION = "0.24.0"
end
|
module Houcho
VERSION = '0.0.9'
end
bump up version
module Houcho
VERSION = '0.0.10'
end
|
fixes bug when a word ended with like an abbreviation
|
Handle incomplete routing tree
|
require "identity_cache/version"
require 'cityhash'
require 'ar_transaction_changes'
require File.dirname(__FILE__) + '/memoized_cache_proxy'
require File.dirname(__FILE__) + '/belongs_to_caching'
module IdentityCache
CACHED_NIL = :idc_cached_nil
class << self
attr_accessor :logger, :readonly
attr_reader :cache
def cache_backend=(memcache)
cache.memcache = memcache
end
def cache
@cache ||= MemoizedCacheProxy.new
end
def logger
@logger || Rails.logger
end
def should_cache?
!readonly && ActiveRecord::Base.connection.open_transactions == 0
end
def fetch(key, &block)
result = cache.read(key) if should_cache?
if result.nil?
if block_given?
ActiveRecord::Base.connection.with_master do
result = yield
end
result = map_cached_nil_for(result)
if should_cache?
cache.write(key, result)
end
end
logger.debug "[IdentityCache] cache miss for #{key}"
else
logger.debug "[IdentityCache] cache hit for #{key}"
end
unmap_cached_nil_for(result)
end
def map_cached_nil_for(value)
value.nil? ? IdentityCache::CACHED_NIL : value
end
def unmap_cached_nil_for(value)
value == IdentityCache::CACHED_NIL ? nil : value
end
def fetch_multi(*keys, &block)
return {} if keys.size == 0
result = {}
result = cache.read_multi(*keys) if should_cache?
missed_keys = keys - result.select {|key, value| value.present? }.keys
if missed_keys.size > 0
if block_given?
replacement_results = nil
ActiveRecord::Base.connection.with_master do
replacement_results = yield missed_keys
end
missed_keys.zip(replacement_results) do |(key, replacement_result)|
if should_cache?
replacement_result = map_cached_nil_for(replacement_result )
cache.write(key, replacement_result)
logger.debug "[IdentityCache] cache miss for #{key} (multi)"
end
result[key] = replacement_result
end
end
else
result.keys.each do |key|
logger.debug "[IdentityCache] cache hit for #{key} (multi)"
end
end
result.keys.each do |key|
result[key] = unmap_cached_nil_for(result[key])
end
result
end
def included(base)
raise AlreadyIncludedError if base.respond_to? :cache_indexes
unless ActiveRecord::Base.connection.respond_to?(:with_master)
ActiveRecord::Base.connection.class.class_eval(ruby = <<-CODE, __FILE__, __LINE__)
def with_master
yield
end
CODE
end
base.send(:include, ArTransactionChanges) unless base.include?(ArTransactionChanges)
base.send(:include, IdentityCache::BelongsToCaching)
base.after_commit :expire_cache
base.after_touch :expire_cache
base.class_attribute :cache_indexes
base.class_attribute :cache_attributes
base.class_attribute :cached_has_manys
base.class_attribute :cached_has_ones
base.send(:extend, ClassMethods)
base.private_class_method :require_if_necessary, :build_normalized_has_many_cache, :build_denormalized_association_cache, :add_parent_expiry_hook,
:identity_cache_multiple_value_dynamic_fetcher, :identity_cache_single_value_dynamic_fetcher
base.instance_eval(ruby = <<-CODE, __FILE__, __LINE__)
private :expire_cache, :was_new_record?, :fetch_denormalized_cached_association, :populate_denormalized_cached_association
CODE
end
def memcache_hash(key)
CityHash.hash64(key)
end
end
module ClassMethods
def cache_index(*fields)
options = fields.extract_options!
self.cache_indexes ||= []
self.cache_indexes.push fields
field_list = fields.join("_and_")
arg_list = (0...fields.size).collect { |i| "arg#{i}" }.join(',')
where_list = fields.each_with_index.collect { |f, i| "#{f} = \#{quote_value(arg#{i})}" }.join(" AND ")
if options[:unique]
self.instance_eval(ruby = <<-CODE, __FILE__, __LINE__)
def fetch_by_#{field_list}(#{arg_list})
sql = "SELECT id FROM #{table_name} WHERE #{where_list} LIMIT 1"
identity_cache_single_value_dynamic_fetcher(#{fields.inspect}, [#{arg_list}], sql)
end
# exception throwing variant
def fetch_by_#{field_list}!(#{arg_list})
fetch_by_#{field_list}(#{arg_list}) or raise ActiveRecord::RecordNotFound
end
CODE
else
self.instance_eval(ruby = <<-CODE, __FILE__, __LINE__)
def fetch_by_#{field_list}(#{arg_list})
sql = "SELECT id FROM #{table_name} WHERE #{where_list}"
identity_cache_multiple_value_dynamic_fetcher(#{fields.inspect}, [#{arg_list}], sql)
end
CODE
end
end
def identity_cache_single_value_dynamic_fetcher(fields, values, sql_on_miss)
cache_key = rails_cache_index_key_for_fields_and_values(fields, values)
id = IdentityCache.fetch(cache_key) { connection.select_value(sql_on_miss) }
unless id.nil?
record = fetch_by_id(id.to_i)
IdentityCache.cache.delete(cache_key) unless record
end
record
end
def identity_cache_multiple_value_dynamic_fetcher(fields, values, sql_on_miss)
cache_key = rails_cache_index_key_for_fields_and_values(fields, values)
ids = IdentityCache.fetch(cache_key) { connection.select_values(sql_on_miss) }
ids.empty? ? [] : fetch_multi(*ids)
end
def cache_has_many(association, options = {})
options[:embed] ||= false
options[:inverse_name] ||= self.name.underscore.to_sym
raise InverseAssociationError unless self.reflect_on_association(association)
self.cached_has_manys ||= {}
self.cached_has_manys[association] = options
if options[:embed]
build_denormalized_association_cache(association, options)
else
build_normalized_has_many_cache(association, options)
end
end
def cache_has_one(association, options = {})
options[:embed] ||= true
options[:inverse_name] ||= self.name.underscore.to_sym
raise InverseAssociationError unless self.reflect_on_association(association)
self.cached_has_ones ||= {}
self.cached_has_ones[association] = options
build_denormalized_association_cache(association, options)
end
def build_denormalized_association_cache(association, options)
options[:cached_accessor_name] ||= "fetch_#{association}"
options[:cache_variable_name] ||= "cached_#{association}"
options[:population_method_name] ||= "populate_#{association}_cache"
unless instance_methods.include?(options[:cached_accessor_name].to_sym)
self.class_eval(ruby = <<-CODE, __FILE__, __LINE__)
def #{options[:cached_accessor_name]}
fetch_denormalized_cached_association('#{options[:cache_variable_name]}', :#{association})
end
def #{options[:population_method_name]}
populate_denormalized_cached_association('#{options[:cache_variable_name]}', :#{association})
end
CODE
association_class = reflect_on_association(association).klass
add_parent_expiry_hook(association_class, options.merge(:only_on_foreign_key_change => false))
end
end
def build_normalized_has_many_cache(association, options)
singular_association = association.to_s.singularize
association_class = reflect_on_association(association).klass
options[:cached_accessor_name] ||= "fetch_#{association}"
options[:ids_name] ||= "#{singular_association}_ids"
options[:ids_cache_name] ||= "cached_#{options[:ids_name]}"
options[:population_method_name] ||= "populate_#{association}_cache"
self.class_eval(ruby = <<-CODE, __FILE__, __LINE__)
attr_reader :#{options[:ids_cache_name]}
def #{options[:population_method_name]}
@#{options[:ids_cache_name]} = #{options[:ids_name]}
end
def #{options[:cached_accessor_name]}
if IdentityCache.should_cache? || #{association}.loaded?
populate_#{association}_cache unless @#{options[:ids_cache_name]}
@cached_#{association} ||= #{association_class}.fetch_multi(*@#{options[:ids_cache_name]})
else
#{association}
end
end
CODE
add_parent_expiry_hook(association_class, options.merge(:only_on_foreign_key_change => true))
end
def cache_attribute(attribute, options = {})
options[:by] ||= :id
fields = Array(options[:by])
self.cache_attributes ||= []
self.cache_attributes.push [attribute, fields]
field_list = fields.join("_and_")
arg_list = (0...fields.size).collect { |i| "arg#{i}" }.join(',')
where_list = fields.each_with_index.collect { |f, i| "#{f} = \#{quote_value(arg#{i})}" }.join(" AND ")
self.instance_eval(ruby = <<-CODE, __FILE__, __LINE__)
def fetch_#{attribute}_by_#{field_list}(#{arg_list})
sql = "SELECT #{attribute} FROM #{table_name} WHERE #{where_list} LIMIT 1"
attribute_dynamic_fetcher(#{attribute.inspect}, #{fields.inspect}, [#{arg_list}], sql)
end
CODE
end
def attribute_dynamic_fetcher(attribute, fields, values, sql_on_miss)
cache_key = rails_cache_key_for_attribute_and_fields_and_values(attribute, fields, values)
IdentityCache.fetch(cache_key) { connection.select_value(sql_on_miss) }
end
def exists_with_identity_cache?(id)
!!fetch_by_id(id)
end
def fetch_by_id(id)
if IdentityCache.should_cache?
require_if_necessary do
object = IdentityCache.fetch(rails_cache_key(id)){ resolve_cache_miss(id) }
object.clear_association_cache if object.respond_to?(:clear_association_cache)
IdentityCache.logger.error "[IDC id mismatch] fetch_by_id_requested=#{id} fetch_by_id_got=#{object.id} for #{object.inspect[(0..100)]} " if object && object.id != id.to_i
object
end
else
self.find_by_id(id)
end
end
def fetch(id)
fetch_by_id(id) or raise(ActiveRecord::RecordNotFound, "Couldn't find #{self.class.name} with ID=#{id}")
end
def fetch_multi(*ids)
if IdentityCache.should_cache?
require_if_necessary do
cache_keys = ids.map {|id| rails_cache_key(id) }
key_to_id_map = Hash[ cache_keys.zip(ids) ]
objects_by_key = IdentityCache.fetch_multi(*key_to_id_map.keys) do |unresolved_keys|
ids = unresolved_keys.map {|key| key_to_id_map[key] }
records = find_batch(ids).each{ |r| r.try(:populate_association_caches) }
end
objects_in_order = cache_keys.map {|key| objects_by_key[key] }
objects_in_order.each do |object|
object.clear_association_cache if object.respond_to?(:clear_association_cache)
end
objects_in_order.compact
end
else
find_batch(ids)
end
end
def require_if_necessary
# mem_cache_store returns raw value if unmarshal fails
rval = yield
case rval
when String
rval = Marshal.load(rval)
when Array
rval.map!{ |v| v.kind_of?(String) ? Marshal.load(v) : v }
end
rval
rescue ArgumentError => e
if e.message =~ /undefined [\w\/]+ (\w+)/
ok = Kernel.const_get($1) rescue nil
retry if ok
end
raise
end
module ParentModelExpiration
def expire_parent_cache_on_changes(parent_name, foreign_key, parent_class, options = {})
new_parent = send(parent_name)
if new_parent && new_parent.respond_to?(:expire_primary_index, true)
if should_expire_identity_cache_parent?(foreign_key, options[:only_on_foreign_key_change])
new_parent.expire_primary_index
new_parent.expire_parent_cache if new_parent.respond_to?(:expire_parent_cache)
end
end
if transaction_changed_attributes[foreign_key].present?
begin
old_parent = parent_class.find(transaction_changed_attributes[foreign_key])
old_parent.expire_primary_index if old_parent.respond_to?(:expire_primary_index)
old_parent.expire_parent_cache if old_parent.respond_to?(:expire_parent_cache)
rescue ActiveRecord::RecordNotFound => e
# suppress errors finding the old parent if its been destroyed since it will have expired itself in that case
end
end
true
end
def should_expire_identity_cache_parent?(foreign_key, only_on_foreign_key_change)
if only_on_foreign_key_change
destroyed? || was_new_record? || transaction_changed_attributes[foreign_key].present?
else
true
end
end
end
def add_parent_expiry_hook(child_class, options = {})
child_association = child_class.reflect_on_association(options[:inverse_name])
raise InverseAssociationError unless child_association
foreign_key = child_association.association_foreign_key
parent_class ||= self.name
new_parent = options[:inverse_name]
child_class.send(:include, ArTransactionChanges) unless child_class.include?(ArTransactionChanges)
child_class.send(:include, ParentModelExpiration) unless child_class.include?(ParentModelExpiration)
child_class.class_eval(ruby = <<-CODE, __FILE__, __LINE__)
after_commit :expire_parent_cache
after_touch :expire_parent_cache
def expire_parent_cache
expire_parent_cache_on_changes(:#{options[:inverse_name]}, '#{foreign_key}', #{parent_class}, #{options.inspect})
end
CODE
end
def resolve_cache_miss(id)
self.find_by_id(id, :include => cache_fetch_includes).tap do |object|
object.try(:populate_association_caches)
end
end
def all_cached_associations
(cached_has_manys || {}).merge(cached_has_ones || {})
end
def cache_fetch_includes
all_cached_associations.select{|k, v| v[:embed]}.map do |child_association, options|
child_class = reflect_on_association(child_association).try(:klass)
child_includes = child_class.respond_to?(:cache_fetch_includes) ? child_class.cache_fetch_includes : []
if child_includes.empty?
child_association
else
{ child_association => child_class.cache_fetch_includes }
end
end
end
def find_batch(ids)
@id_column ||= columns.detect {|c| c.name == "id"}
ids = ids.map{ |id| @id_column.type_cast(id) }
records = where('id IN (?)', ids).includes(cache_fetch_includes).all
records_by_id = records.index_by(&:id)
records = ids.map{ |id| records_by_id[id] }
mismatching_ids = records.compact.map(&:id) - ids
IdentityCache.logger.error "[IDC id mismatch] fetch_batch_requested=#{ids.inspect} fetch_batch_got=#{mismatchig_ids.inspect} mismatching ids " unless mismatching_ids.empty?
records
end
def rails_cache_key(id)
rails_cache_key_prefix + id.to_s
end
def rails_cache_key_prefix
@rails_cache_key_prefix ||= begin
column_list = columns.sort_by(&:name).map {|c| "#{c.name}:#{c.type}"} * ","
"IDC:blob:#{base_class.name}:#{IdentityCache.memcache_hash(column_list)}:"
end
end
def rails_cache_index_key_for_fields_and_values(fields, values)
"IDC:index:#{base_class.name}:#{rails_cache_string_for_fields_and_values(fields, values)}"
end
def rails_cache_key_for_attribute_and_fields_and_values(attribute, fields, values)
"IDC:attribute:#{base_class.name}:#{attribute}:#{rails_cache_string_for_fields_and_values(fields, values)}"
end
def rails_cache_string_for_fields_and_values(fields, values)
"#{fields.join('/')}:#{IdentityCache.memcache_hash(values.join('/'))}"
end
end
def populate_association_caches
self.class.all_cached_associations.each do |cached_association, options|
send(options[:population_method_name])
reflection = options[:embed] && self.class.reflect_on_association(cached_association)
if reflection && reflection.klass.respond_to?(:cached_has_manys)
child_objects = Array.wrap(send(options[:cached_accessor_name]))
child_objects.each(&:populate_association_caches)
end
end
end
def fetch_denormalized_cached_association(ivar_name, association_name)
ivar_full_name = :"@#{ivar_name}"
if IdentityCache.should_cache?
populate_denormalized_cached_association(ivar_name, association_name)
IdentityCache.unmap_cached_nil_for(instance_variable_get(ivar_full_name))
else
send(association_name.to_sym)
end
end
def populate_denormalized_cached_association(ivar_name, association_name)
ivar_full_name = :"@#{ivar_name}"
value = instance_variable_get(ivar_full_name)
return value unless value.nil?
reflection = association(association_name)
reflection.load_target unless reflection.loaded?
loaded_association = send(association_name)
instance_variable_set(ivar_full_name, IdentityCache.map_cached_nil_for(loaded_association))
end
def primary_cache_index_key
self.class.rails_cache_key(id)
end
def secondary_cache_index_key_for_current_values(fields)
self.class.rails_cache_index_key_for_fields_and_values(fields, fields.collect {|field| self.send(field)})
end
def secondary_cache_index_key_for_previous_values(fields)
self.class.rails_cache_index_key_for_fields_and_values(fields, old_values_for_fields(fields))
end
def attribute_cache_key_for_attribute_and_previous_values(attribute, fields)
self.class.rails_cache_key_for_attribute_and_fields_and_values(attribute, fields, old_values_for_fields(fields))
end
def old_values_for_fields(fields)
fields.map do |field|
field_string = field.to_s
if destroyed? && transaction_changed_attributes.has_key?(field_string)
transaction_changed_attributes[field_string]
elsif persisted? && transaction_changed_attributes.has_key?(field_string)
transaction_changed_attributes[field_string]
else
self.send(field)
end
end
end
def expire_primary_index
extra_keys = if respond_to? :updated_at
old_updated_at = old_values_for_fields([:updated_at]).first
"expiring_last_updated_at=#{old_updated_at}"
else
""
end
IdentityCache.logger.debug "[IdentityCache] expiring=#{self.class.name} expiring_id=#{id} #{extra_keys}"
IdentityCache.cache.delete(primary_cache_index_key)
end
def expire_secondary_indexes
cache_indexes.try(:each) do |fields|
if self.destroyed?
IdentityCache.cache.delete(secondary_cache_index_key_for_previous_values(fields))
else
new_cache_index_key = secondary_cache_index_key_for_current_values(fields)
IdentityCache.cache.delete(new_cache_index_key)
if !was_new_record?
old_cache_index_key = secondary_cache_index_key_for_previous_values(fields)
IdentityCache.cache.delete(old_cache_index_key) unless old_cache_index_key == new_cache_index_key
end
end
end
end
def expire_attribute_indexes
cache_attributes.try(:each) do |(attribute, fields)|
IdentityCache.cache.delete(attribute_cache_key_for_attribute_and_previous_values(attribute, fields)) unless was_new_record?
end
end
def expire_cache
expire_primary_index
expire_secondary_indexes
expire_attribute_indexes
true
end
def was_new_record?
!destroyed? && transaction_changed_attributes.has_key?('id') && transaction_changed_attributes['id'].nil?
end
class AlreadyIncludedError < StandardError; end
class InverseAssociationError < StandardError
def initialize
super "Inverse name for association could not be determined. Please use the :inverse_name option to specify the inverse association name for this cache."
end
end
end
Ensure all but nils respond to populate_association_caches
require "identity_cache/version"
require 'cityhash'
require 'ar_transaction_changes'
require File.dirname(__FILE__) + '/memoized_cache_proxy'
require File.dirname(__FILE__) + '/belongs_to_caching'
module IdentityCache
CACHED_NIL = :idc_cached_nil
class << self
attr_accessor :logger, :readonly
attr_reader :cache
def cache_backend=(memcache)
cache.memcache = memcache
end
def cache
@cache ||= MemoizedCacheProxy.new
end
def logger
@logger || Rails.logger
end
def should_cache?
!readonly && ActiveRecord::Base.connection.open_transactions == 0
end
def fetch(key, &block)
result = cache.read(key) if should_cache?
if result.nil?
if block_given?
ActiveRecord::Base.connection.with_master do
result = yield
end
result = map_cached_nil_for(result)
if should_cache?
cache.write(key, result)
end
end
logger.debug "[IdentityCache] cache miss for #{key}"
else
logger.debug "[IdentityCache] cache hit for #{key}"
end
unmap_cached_nil_for(result)
end
def map_cached_nil_for(value)
value.nil? ? IdentityCache::CACHED_NIL : value
end
def unmap_cached_nil_for(value)
value == IdentityCache::CACHED_NIL ? nil : value
end
def fetch_multi(*keys, &block)
return {} if keys.size == 0
result = {}
result = cache.read_multi(*keys) if should_cache?
missed_keys = keys - result.select {|key, value| value.present? }.keys
if missed_keys.size > 0
if block_given?
replacement_results = nil
ActiveRecord::Base.connection.with_master do
replacement_results = yield missed_keys
end
missed_keys.zip(replacement_results) do |(key, replacement_result)|
if should_cache?
replacement_result = map_cached_nil_for(replacement_result )
cache.write(key, replacement_result)
logger.debug "[IdentityCache] cache miss for #{key} (multi)"
end
result[key] = replacement_result
end
end
else
result.keys.each do |key|
logger.debug "[IdentityCache] cache hit for #{key} (multi)"
end
end
result.keys.each do |key|
result[key] = unmap_cached_nil_for(result[key])
end
result
end
def included(base)
raise AlreadyIncludedError if base.respond_to? :cache_indexes
unless ActiveRecord::Base.connection.respond_to?(:with_master)
ActiveRecord::Base.connection.class.class_eval(ruby = <<-CODE, __FILE__, __LINE__)
def with_master
yield
end
CODE
end
base.send(:include, ArTransactionChanges) unless base.include?(ArTransactionChanges)
base.send(:include, IdentityCache::BelongsToCaching)
base.after_commit :expire_cache
base.after_touch :expire_cache
base.class_attribute :cache_indexes
base.class_attribute :cache_attributes
base.class_attribute :cached_has_manys
base.class_attribute :cached_has_ones
base.send(:extend, ClassMethods)
base.private_class_method :require_if_necessary, :build_normalized_has_many_cache, :build_denormalized_association_cache, :add_parent_expiry_hook,
:identity_cache_multiple_value_dynamic_fetcher, :identity_cache_single_value_dynamic_fetcher
base.instance_eval(ruby = <<-CODE, __FILE__, __LINE__)
private :expire_cache, :was_new_record?, :fetch_denormalized_cached_association, :populate_denormalized_cached_association
CODE
end
def memcache_hash(key)
CityHash.hash64(key)
end
end
module ClassMethods
def cache_index(*fields)
options = fields.extract_options!
self.cache_indexes ||= []
self.cache_indexes.push fields
field_list = fields.join("_and_")
arg_list = (0...fields.size).collect { |i| "arg#{i}" }.join(',')
where_list = fields.each_with_index.collect { |f, i| "#{f} = \#{quote_value(arg#{i})}" }.join(" AND ")
if options[:unique]
self.instance_eval(ruby = <<-CODE, __FILE__, __LINE__)
def fetch_by_#{field_list}(#{arg_list})
sql = "SELECT id FROM #{table_name} WHERE #{where_list} LIMIT 1"
identity_cache_single_value_dynamic_fetcher(#{fields.inspect}, [#{arg_list}], sql)
end
# exception throwing variant
def fetch_by_#{field_list}!(#{arg_list})
fetch_by_#{field_list}(#{arg_list}) or raise ActiveRecord::RecordNotFound
end
CODE
else
self.instance_eval(ruby = <<-CODE, __FILE__, __LINE__)
def fetch_by_#{field_list}(#{arg_list})
sql = "SELECT id FROM #{table_name} WHERE #{where_list}"
identity_cache_multiple_value_dynamic_fetcher(#{fields.inspect}, [#{arg_list}], sql)
end
CODE
end
end
def identity_cache_single_value_dynamic_fetcher(fields, values, sql_on_miss)
cache_key = rails_cache_index_key_for_fields_and_values(fields, values)
id = IdentityCache.fetch(cache_key) { connection.select_value(sql_on_miss) }
unless id.nil?
record = fetch_by_id(id.to_i)
IdentityCache.cache.delete(cache_key) unless record
end
record
end
def identity_cache_multiple_value_dynamic_fetcher(fields, values, sql_on_miss)
cache_key = rails_cache_index_key_for_fields_and_values(fields, values)
ids = IdentityCache.fetch(cache_key) { connection.select_values(sql_on_miss) }
ids.empty? ? [] : fetch_multi(*ids)
end
def cache_has_many(association, options = {})
options[:embed] ||= false
options[:inverse_name] ||= self.name.underscore.to_sym
raise InverseAssociationError unless self.reflect_on_association(association)
self.cached_has_manys ||= {}
self.cached_has_manys[association] = options
if options[:embed]
build_denormalized_association_cache(association, options)
else
build_normalized_has_many_cache(association, options)
end
end
def cache_has_one(association, options = {})
options[:embed] ||= true
options[:inverse_name] ||= self.name.underscore.to_sym
raise InverseAssociationError unless self.reflect_on_association(association)
self.cached_has_ones ||= {}
self.cached_has_ones[association] = options
build_denormalized_association_cache(association, options)
end
def build_denormalized_association_cache(association, options)
options[:cached_accessor_name] ||= "fetch_#{association}"
options[:cache_variable_name] ||= "cached_#{association}"
options[:population_method_name] ||= "populate_#{association}_cache"
unless instance_methods.include?(options[:cached_accessor_name].to_sym)
self.class_eval(ruby = <<-CODE, __FILE__, __LINE__)
def #{options[:cached_accessor_name]}
fetch_denormalized_cached_association('#{options[:cache_variable_name]}', :#{association})
end
def #{options[:population_method_name]}
populate_denormalized_cached_association('#{options[:cache_variable_name]}', :#{association})
end
CODE
association_class = reflect_on_association(association).klass
add_parent_expiry_hook(association_class, options.merge(:only_on_foreign_key_change => false))
end
end
def build_normalized_has_many_cache(association, options)
singular_association = association.to_s.singularize
association_class = reflect_on_association(association).klass
options[:cached_accessor_name] ||= "fetch_#{association}"
options[:ids_name] ||= "#{singular_association}_ids"
options[:ids_cache_name] ||= "cached_#{options[:ids_name]}"
options[:population_method_name] ||= "populate_#{association}_cache"
self.class_eval(ruby = <<-CODE, __FILE__, __LINE__)
attr_reader :#{options[:ids_cache_name]}
def #{options[:population_method_name]}
@#{options[:ids_cache_name]} = #{options[:ids_name]}
end
def #{options[:cached_accessor_name]}
if IdentityCache.should_cache? || #{association}.loaded?
populate_#{association}_cache unless @#{options[:ids_cache_name]}
@cached_#{association} ||= #{association_class}.fetch_multi(*@#{options[:ids_cache_name]})
else
#{association}
end
end
CODE
add_parent_expiry_hook(association_class, options.merge(:only_on_foreign_key_change => true))
end
def cache_attribute(attribute, options = {})
options[:by] ||= :id
fields = Array(options[:by])
self.cache_attributes ||= []
self.cache_attributes.push [attribute, fields]
field_list = fields.join("_and_")
arg_list = (0...fields.size).collect { |i| "arg#{i}" }.join(',')
where_list = fields.each_with_index.collect { |f, i| "#{f} = \#{quote_value(arg#{i})}" }.join(" AND ")
self.instance_eval(ruby = <<-CODE, __FILE__, __LINE__)
def fetch_#{attribute}_by_#{field_list}(#{arg_list})
sql = "SELECT #{attribute} FROM #{table_name} WHERE #{where_list} LIMIT 1"
attribute_dynamic_fetcher(#{attribute.inspect}, #{fields.inspect}, [#{arg_list}], sql)
end
CODE
end
def attribute_dynamic_fetcher(attribute, fields, values, sql_on_miss)
cache_key = rails_cache_key_for_attribute_and_fields_and_values(attribute, fields, values)
IdentityCache.fetch(cache_key) { connection.select_value(sql_on_miss) }
end
def exists_with_identity_cache?(id)
!!fetch_by_id(id)
end
def fetch_by_id(id)
if IdentityCache.should_cache?
require_if_necessary do
object = IdentityCache.fetch(rails_cache_key(id)){ resolve_cache_miss(id) }
object.clear_association_cache if object.respond_to?(:clear_association_cache)
IdentityCache.logger.error "[IDC id mismatch] fetch_by_id_requested=#{id} fetch_by_id_got=#{object.id} for #{object.inspect[(0..100)]} " if object && object.id != id.to_i
object
end
else
self.find_by_id(id)
end
end
def fetch(id)
fetch_by_id(id) or raise(ActiveRecord::RecordNotFound, "Couldn't find #{self.class.name} with ID=#{id}")
end
def fetch_multi(*ids)
if IdentityCache.should_cache?
require_if_necessary do
cache_keys = ids.map {|id| rails_cache_key(id) }
key_to_id_map = Hash[ cache_keys.zip(ids) ]
objects_by_key = IdentityCache.fetch_multi(*key_to_id_map.keys) do |unresolved_keys|
ids = unresolved_keys.map {|key| key_to_id_map[key] }
records = find_batch(ids)
records.compact.each(&:populate_association_caches)
records
end
objects_in_order = cache_keys.map {|key| objects_by_key[key] }
objects_in_order.each do |object|
object.clear_association_cache if object.respond_to?(:clear_association_cache)
end
objects_in_order.compact
end
else
find_batch(ids)
end
end
def require_if_necessary
# mem_cache_store returns raw value if unmarshal fails
rval = yield
case rval
when String
rval = Marshal.load(rval)
when Array
rval.map!{ |v| v.kind_of?(String) ? Marshal.load(v) : v }
end
rval
rescue ArgumentError => e
if e.message =~ /undefined [\w\/]+ (\w+)/
ok = Kernel.const_get($1) rescue nil
retry if ok
end
raise
end
module ParentModelExpiration
def expire_parent_cache_on_changes(parent_name, foreign_key, parent_class, options = {})
new_parent = send(parent_name)
if new_parent && new_parent.respond_to?(:expire_primary_index, true)
if should_expire_identity_cache_parent?(foreign_key, options[:only_on_foreign_key_change])
new_parent.expire_primary_index
new_parent.expire_parent_cache if new_parent.respond_to?(:expire_parent_cache)
end
end
if transaction_changed_attributes[foreign_key].present?
begin
old_parent = parent_class.find(transaction_changed_attributes[foreign_key])
old_parent.expire_primary_index if old_parent.respond_to?(:expire_primary_index)
old_parent.expire_parent_cache if old_parent.respond_to?(:expire_parent_cache)
rescue ActiveRecord::RecordNotFound => e
# suppress errors finding the old parent if its been destroyed since it will have expired itself in that case
end
end
true
end
def should_expire_identity_cache_parent?(foreign_key, only_on_foreign_key_change)
if only_on_foreign_key_change
destroyed? || was_new_record? || transaction_changed_attributes[foreign_key].present?
else
true
end
end
end
def add_parent_expiry_hook(child_class, options = {})
child_association = child_class.reflect_on_association(options[:inverse_name])
raise InverseAssociationError unless child_association
foreign_key = child_association.association_foreign_key
parent_class ||= self.name
new_parent = options[:inverse_name]
child_class.send(:include, ArTransactionChanges) unless child_class.include?(ArTransactionChanges)
child_class.send(:include, ParentModelExpiration) unless child_class.include?(ParentModelExpiration)
child_class.class_eval(ruby = <<-CODE, __FILE__, __LINE__)
after_commit :expire_parent_cache
after_touch :expire_parent_cache
def expire_parent_cache
expire_parent_cache_on_changes(:#{options[:inverse_name]}, '#{foreign_key}', #{parent_class}, #{options.inspect})
end
CODE
end
def resolve_cache_miss(id)
self.find_by_id(id, :include => cache_fetch_includes).tap do |object|
object.try(:populate_association_caches)
end
end
def all_cached_associations
(cached_has_manys || {}).merge(cached_has_ones || {})
end
def cache_fetch_includes
all_cached_associations.select{|k, v| v[:embed]}.map do |child_association, options|
child_class = reflect_on_association(child_association).try(:klass)
child_includes = child_class.respond_to?(:cache_fetch_includes) ? child_class.cache_fetch_includes : []
if child_includes.empty?
child_association
else
{ child_association => child_class.cache_fetch_includes }
end
end
end
def find_batch(ids)
@id_column ||= columns.detect {|c| c.name == "id"}
ids = ids.map{ |id| @id_column.type_cast(id) }
records = where('id IN (?)', ids).includes(cache_fetch_includes).all
records_by_id = records.index_by(&:id)
records = ids.map{ |id| records_by_id[id] }
mismatching_ids = records.compact.map(&:id) - ids
IdentityCache.logger.error "[IDC id mismatch] fetch_batch_requested=#{ids.inspect} fetch_batch_got=#{mismatchig_ids.inspect} mismatching ids " unless mismatching_ids.empty?
records
end
def rails_cache_key(id)
rails_cache_key_prefix + id.to_s
end
def rails_cache_key_prefix
@rails_cache_key_prefix ||= begin
column_list = columns.sort_by(&:name).map {|c| "#{c.name}:#{c.type}"} * ","
"IDC:blob:#{base_class.name}:#{IdentityCache.memcache_hash(column_list)}:"
end
end
def rails_cache_index_key_for_fields_and_values(fields, values)
"IDC:index:#{base_class.name}:#{rails_cache_string_for_fields_and_values(fields, values)}"
end
def rails_cache_key_for_attribute_and_fields_and_values(attribute, fields, values)
"IDC:attribute:#{base_class.name}:#{attribute}:#{rails_cache_string_for_fields_and_values(fields, values)}"
end
def rails_cache_string_for_fields_and_values(fields, values)
"#{fields.join('/')}:#{IdentityCache.memcache_hash(values.join('/'))}"
end
end
def populate_association_caches
self.class.all_cached_associations.each do |cached_association, options|
send(options[:population_method_name])
reflection = options[:embed] && self.class.reflect_on_association(cached_association)
if reflection && reflection.klass.respond_to?(:cached_has_manys)
child_objects = Array.wrap(send(options[:cached_accessor_name]))
child_objects.each(&:populate_association_caches)
end
end
end
def fetch_denormalized_cached_association(ivar_name, association_name)
ivar_full_name = :"@#{ivar_name}"
if IdentityCache.should_cache?
populate_denormalized_cached_association(ivar_name, association_name)
IdentityCache.unmap_cached_nil_for(instance_variable_get(ivar_full_name))
else
send(association_name.to_sym)
end
end
def populate_denormalized_cached_association(ivar_name, association_name)
ivar_full_name = :"@#{ivar_name}"
value = instance_variable_get(ivar_full_name)
return value unless value.nil?
reflection = association(association_name)
reflection.load_target unless reflection.loaded?
loaded_association = send(association_name)
instance_variable_set(ivar_full_name, IdentityCache.map_cached_nil_for(loaded_association))
end
def primary_cache_index_key
self.class.rails_cache_key(id)
end
def secondary_cache_index_key_for_current_values(fields)
self.class.rails_cache_index_key_for_fields_and_values(fields, fields.collect {|field| self.send(field)})
end
def secondary_cache_index_key_for_previous_values(fields)
self.class.rails_cache_index_key_for_fields_and_values(fields, old_values_for_fields(fields))
end
def attribute_cache_key_for_attribute_and_previous_values(attribute, fields)
self.class.rails_cache_key_for_attribute_and_fields_and_values(attribute, fields, old_values_for_fields(fields))
end
def old_values_for_fields(fields)
fields.map do |field|
field_string = field.to_s
if destroyed? && transaction_changed_attributes.has_key?(field_string)
transaction_changed_attributes[field_string]
elsif persisted? && transaction_changed_attributes.has_key?(field_string)
transaction_changed_attributes[field_string]
else
self.send(field)
end
end
end
def expire_primary_index
extra_keys = if respond_to? :updated_at
old_updated_at = old_values_for_fields([:updated_at]).first
"expiring_last_updated_at=#{old_updated_at}"
else
""
end
IdentityCache.logger.debug "[IdentityCache] expiring=#{self.class.name} expiring_id=#{id} #{extra_keys}"
IdentityCache.cache.delete(primary_cache_index_key)
end
def expire_secondary_indexes
cache_indexes.try(:each) do |fields|
if self.destroyed?
IdentityCache.cache.delete(secondary_cache_index_key_for_previous_values(fields))
else
new_cache_index_key = secondary_cache_index_key_for_current_values(fields)
IdentityCache.cache.delete(new_cache_index_key)
if !was_new_record?
old_cache_index_key = secondary_cache_index_key_for_previous_values(fields)
IdentityCache.cache.delete(old_cache_index_key) unless old_cache_index_key == new_cache_index_key
end
end
end
end
def expire_attribute_indexes
cache_attributes.try(:each) do |(attribute, fields)|
IdentityCache.cache.delete(attribute_cache_key_for_attribute_and_previous_values(attribute, fields)) unless was_new_record?
end
end
def expire_cache
expire_primary_index
expire_secondary_indexes
expire_attribute_indexes
true
end
def was_new_record?
!destroyed? && transaction_changed_attributes.has_key?('id') && transaction_changed_attributes['id'].nil?
end
class AlreadyIncludedError < StandardError; end
class InverseAssociationError < StandardError
def initialize
super "Inverse name for association could not be determined. Please use the :inverse_name option to specify the inverse association name for this cache."
end
end
end
|
require 'logger'
require 'yaml'
class ImageMetadata
def initialize(tarball: '', env: nil, url: '', logger: nil)
@tarball = tarball
@env = env
@url = url
@logger = logger
@env_hash = nil
@files = {}
end
def parent_dir
@parent_dir ||= File.dirname(File.expand_path(tarball))
end
def valid?
!tarball.nil? && File.exist?(tarball)
end
def load!
extract_tarball
load_job_board_register_yml
load_image_metadata
@env_hash = env.to_hash
@files = {}
return unless dir.exist?
dir.children.reject { |c| !c.exist? || c.directory? }.each do |p|
@files[p.basename.to_s] = p
end
end
attr_reader :env_hash, :files, :tarball, :tarball_files, :url, :env
alias to_s tarball
def job_board_register_hash
YAML.load_file(job_board_register_yml)
end
def tarball_files
@tarball_files ||= begin
`tar -tf #{tarball}`.split("\n")
.map(&:strip)
.reject { |p| p.end_with?('/') }
.map do |p|
p.sub(%r{#{File.basename(tarball, '.tar.bz2')}/}, '')
end
end
end
private def relbase
@relbase ||= File.dirname(tarball)
end
private def load_job_board_register_yml
loaded = job_board_register_hash
env['OS'] = loaded['tags']['os']
env['DIST'] = loaded['tags']['dist']
env['TAGS'] = loaded['tags_string']
end
private def job_board_register_yml
@job_board_register_yml ||= File.join(dir, 'job-board-register.yml')
end
private def extract_tarball
system(*extract_command, out: '/dev/null')
end
private def extract_command
%W(tar -C #{relbase} -xjf #{File.expand_path(tarball)})
end
private def load_image_metadata
if envdir_isdir?
env.load_envdir(envdir) do |key, _|
logger.debug "loading #{key}"
end
else
logger.warn "#{envdir} does not exist"
end
end
private def dir
@dir ||= Pathname.new(
File.join(relbase, File.basename(tarball, '.tar.bz2'))
)
end
private def tarball_exists?
File.exist?(tarball)
end
private def envdir_isdir?
File.directory?(envdir)
end
private def envdir
@envdir ||= File.join(dir, 'env')
end
private def logger
@logger ||= Logger.new($stdout)
end
end
Ensure Pathname is present whoops :sweat_smile:
require 'logger'
require 'pathname'
require 'yaml'
class ImageMetadata
def initialize(tarball: '', env: nil, url: '', logger: nil)
@tarball = tarball
@env = env
@url = url
@logger = logger
@env_hash = nil
@files = {}
end
def parent_dir
@parent_dir ||= File.dirname(File.expand_path(tarball))
end
def valid?
!tarball.nil? && File.exist?(tarball)
end
def load!
extract_tarball
load_job_board_register_yml
load_image_metadata
@env_hash = env.to_hash
@files = {}
return unless dir.exist?
dir.children.reject { |c| !c.exist? || c.directory? }.each do |p|
@files[p.basename.to_s] = p
end
end
attr_reader :env_hash, :files, :tarball, :tarball_files, :url, :env
alias to_s tarball
def job_board_register_hash
YAML.load_file(job_board_register_yml)
end
def tarball_files
@tarball_files ||= begin
`tar -tf #{tarball}`.split("\n")
.map(&:strip)
.reject { |p| p.end_with?('/') }
.map do |p|
p.sub(%r{#{File.basename(tarball, '.tar.bz2')}/}, '')
end
end
end
private def relbase
@relbase ||= File.dirname(tarball)
end
private def load_job_board_register_yml
loaded = job_board_register_hash
env['OS'] = loaded['tags']['os']
env['DIST'] = loaded['tags']['dist']
env['TAGS'] = loaded['tags_string']
end
private def job_board_register_yml
@job_board_register_yml ||= File.join(dir, 'job-board-register.yml')
end
private def extract_tarball
system(*extract_command, out: '/dev/null')
end
private def extract_command
%W(tar -C #{relbase} -xjf #{File.expand_path(tarball)})
end
private def load_image_metadata
if envdir_isdir?
env.load_envdir(envdir) do |key, _|
logger.debug "loading #{key}"
end
else
logger.warn "#{envdir} does not exist"
end
end
private def dir
@dir ||= Pathname.new(
File.join(relbase, File.basename(tarball, '.tar.bz2'))
)
end
private def tarball_exists?
File.exist?(tarball)
end
private def envdir_isdir?
File.directory?(envdir)
end
private def envdir
@envdir ||= File.join(dir, 'env')
end
private def logger
@logger ||= Logger.new($stdout)
end
end
|
Add basic slack class
|
More refactoring
|
#TODO: rename to ChartDataPacket
#TODO: find a fitting place for this file maybe in module
class Chart
@@chart_index = 0
@@header = false
attr :chart_id
attr :width
attr :data
def initialize(type,data,option=nil,width = nil)
@@chart_index = @@chart_index + 1
chart_data = self.class.dataAdapter(type,data,option);
unless width.nil?
@width = width
end
@data = chart_data
@chart_id = @@chart_index
end
def get_id_str()
"chart_container_" + @chart_id.to_s
end
def self.include_header?()
!@@header
end
def self.set_header()
@@header = true
end
def self.set_Template_Optional_Params(template)
template[:title][:text] = ""
template.delete(:subtitle)
template.delete(:yAxis)
template.delete(:xAxis)
template
end
def self.dataAdapter(type,data,optionalConf)
template = data_template[type];
if (type == :pie) then
data[:type] = 'pie';
template[:series] = [data]
else
template[:series] = data
end
if optionalConf.nil? then
template = self.class.set_Template_optional_Params(template)
else
if optionalConf[:title].nil? then
template[:title][:text] = ""
else
template[:title][:text] = optionalConf[:title]
end
template=self.class.validate_optional_conf(optionalConf,template)
end
template
end
def self.validate_optional_conf(optionalConf,template)
if optionalConf[:subtitle].nil? then
template.delete(:subtitle)
else
template[:subtitle]={}
template[:subtitle][:text]=optionalConf[:subtitle]
end
if optionalConf[:y_axis].nil? then
template.delete(:yAxis)
else
template[:yAxis][:title][:text]=optionalConf[:y_axis]
end
if optionalConf[:x_axis].nil? then
template[:xAxis].delete(:title)
else
template[:xAxis][:title][:text] = optionalConf[:x_axis]
end
if optionalConf[:x_axis_categories].nil? then
template[:xAxis].delete(:categories)
else
template[:xAxis][:categories]=optionalConf[:x_axis_categories]
end
template
end
def self.data_template()
{
:pie => {
:chart => {
:plotBackgroundColor => nil,
:plotBorderWidth => nil,
:plotShadow => false
},
:title => {
:text => 'Title'
},
:subtitle => {
:text => 'Title'
},
:xAxis => {},
:yAxis => {},
:tooltip => {
:pointFormat => '{series.name}: <b>{point.percentage}%</b>',
:percentageDecimals => 1
},
:plotOptions => {
:pie => {
:allowPointSelect => true,
:cursor => 'pointer',
:dataLabels => {
:enabled => true,
:color => '#000000',
:connectorColor => '#000000',
:format => "<b>{point.name}</b>: {percentage:.2f} %"
}
}
},
:series => [
{
:type => 'pie',
:name => 'XXX pie',
:data => [
['part 1',45.0],
['part 2',26.8],
['part 3',8.5],
['part 4',6.2],
['part 5',0.7]
]
}
]
},
:bar => {
:chart => {
:type => 'column'
},
:title => {:text => "Review score for XXXX"},
:subtitle => {:text => "subtitle here"},
:xAxis => {
:title =>{},
:categories => [ 'Problem1', 'Problem2', 'Problem3', 'Problem4', 'Problem5', 'Problem6', 'Problem7', 'Problem8', 'Problem9', 'Problem10', 'Problem11','Problem12']
},
:yAxis => {
:min => 0,
:title => {
:text => 'score'
}
},
:tooltip => {
:headerFormat => '<span style="font-size:10px">{point.key}</span><table>',
:pointFormat => '<tr><td style="color:{series.color};padding:0">{series.name}: </td>' +'<td style="padding:0"><b>{point.y:.1f}</b></td></tr>',
:footerFormat => '</table>',
:shared => true,
:useHTML => true
},
:plotOptions => {
:column => {
:pointPadding => 0.2,
:borderWidth => 0
}
},
:series => [
{
:name => 'review 1',
:data => [9.9, 7.5, 6.4, 9.2, 4.0, 6.0, 5.6, 8.5, 6.4, 4.1, 5.6, 4.4]
}, {
:name => 'review 2',
:data => [3.6, 8.8, 8.5, 3.4, 6.0, 4.5, 5.0, 4.3, 9.2, 8.5, 6.6, 9.3]
}, {
:name => 'review 3',
:data => [8.9, 8.8, 9.3, 4.4, 7.0, 8.3, 9.0, 9.6, 5.4, 6.2, 9.3, 5.2]
}
]
},
:line => {
:title => {:text => "Review score for XXXX"},
:subtitle => {:text => "subtitle here"},
:xAxis => {
:title =>{},
:categories => [ 'Problem1', 'Problem2', 'Problem3', 'Problem4', 'Problem5', 'Problem6', 'Problem7', 'Problem8', 'Problem9', 'Problem10', 'Problem11','Problem12']
},
:yAxis => {
:min => 0,
:title => {
:text => 'score'
}
},
:tooltip => {
:headerFormat => '<span style="font-size:10px">{point.key}</span><table>',
:pointFormat => '<tr><td style="color:{series.color};padding:0">{series.name}: </td>' +'<td style="padding:0"><b>{point.y:.1f} mm</b></td></tr>',
:footerFormat => '</table>',
:shared => true,
:useHTML => true
},
:plotOptions => {
:column => {
:pointPadding => 0.2,
:borderWidth => 0
}
},
:series => [
{
:name => 'review 1',
:data => [9.9, 7.5, 6.4, 9.2, 4.0, 6.0, 5.6, 8.5, 6.4, 4.1, 5.6, 4.4]
}, {
:name => 'review 2',
:data => [3.6, 8.8, 8.5, 3.4, 6.0, 4.5, 5.0, 4.3, 9.2, 8.5, 6.6, 9.3]
}, {
:name => 'review 3',
:data => [8.9, 8.8, 9.3, 4.4, 7.0, 8.3, 9.0, 9.6, 5.4, 6.2, 9.3, 5.2]
}
]
},
:scatter => {
:chart => {
:type => 'scatter',
:zoomType => 'xy'
},
:title => {
:text => 'Height Versus Weight of 507 Individuals by Gender'
},
:subtitle => {
:text => 'Source: Heinz 2003'
},
:xAxis => {
:title => {
:enabled => true,
:text => 'Height (cm)'
},
:startOnTick => true,
:endOnTick => true,
:showLastLabel => true
},
:yAxis => {
:title => {
:text => 'Weight (kg)'
}
},
:legend => {
:layout => 'vertical',
:align => 'left',
:verticalAlign => 'top',
:x => 100,
:y => 70,
:floating => true,
:backgroundColor => '#FFFFFF',
:borderWidth => 1
},
:plotOptions => {
:scatter => {
:marker => {
:radius => 5,
:states => {
:hover => {
:enabled => true,
:lineColor => 'rgb(100,100,100)'
}
}
},
:states => {
:hover => {
:marker => {
:enabled => false
}
}
},
:tooltip => {
:headerFormat => '<b>{series.name}</b><br>',
:pointFormat => '{point.x} cm, {point.y} kg'
}
}
},
:series => [
{
:name => 'Female',
:color => 'rgba(223, 83, 83, .5)',
:data => [[161.2, 51.6], [167.5, 59.0], [159.5, 49.2], [157.0, 63.0], [155.8, 53.6],
[170.0, 59.0], [159.1, 47.6], [166.0, 69.8], [176.2, 66.8], [160.2, 75.2],
[172.5, 55.2], [170.9, 54.2], [172.9, 62.5], [153.4, 42.0], [160.0, 50.0],
[147.2, 49.8], [168.2, 49.2], [175.0, 73.2], [157.0, 47.8], [167.6, 68.8],
[159.5, 50.6], [175.0, 82.5], [166.8, 57.2], [176.5, 87.8], [170.2, 72.8],
[174.0, 54.5], [173.0, 59.8], [179.9, 67.3], [170.5, 67.8], [160.0, 47.0],
[154.4, 46.2], [162.0, 55.0], [176.5, 83.0], [160.0, 54.4], [152.0, 45.8],
[162.1, 53.6], [170.0, 73.2], [160.2, 52.1], [161.3, 67.9], [166.4, 56.6],
[168.9, 62.3], [163.8, 58.5], [167.6, 54.5], [160.0, 50.2], [161.3, 60.3],
[167.6, 58.3], [165.1, 56.2], [160.0, 50.2], [170.0, 72.9], [157.5, 59.8],
[167.6, 61.0], [160.7, 69.1], [163.2, 55.9], [152.4, 46.5], [157.5, 54.3],
[168.3, 54.8], [180.3, 60.7], [165.5, 60.0], [165.0, 62.0], [164.5, 60.3],
[156.0, 52.7], [160.0, 74.3], [163.0, 62.0], [165.7, 73.1], [161.0, 80.0],
[162.0, 54.7], [166.0, 53.2], [174.0, 75.7], [172.7, 61.1], [167.6, 55.7],
[151.1, 48.7], [164.5, 52.3], [163.5, 50.0], [152.0, 59.3], [169.0, 62.5],
[164.0, 55.7], [161.2, 54.8], [155.0, 45.9], [170.0, 70.6], [176.2, 67.2],
[170.0, 69.4], [162.5, 58.2], [170.3, 64.8], [164.1, 71.6], [169.5, 52.8],
[163.2, 59.8], [154.5, 49.0], [159.8, 50.0], [173.2, 69.2], [170.0, 55.9],
[161.4, 63.4], [169.0, 58.2], [166.2, 58.6], [159.4, 45.7], [162.5, 52.2],
[159.0, 48.6], [162.8, 57.8], [159.0, 55.6], [179.8, 66.8], [162.9, 59.4],
[161.0, 53.6], [151.1, 73.2], [168.2, 53.4], [168.9, 69.0], [173.2, 58.4],
[171.8, 56.2], [178.0, 70.6], [164.3, 59.8], [163.0, 72.0], [168.5, 65.2],
[166.8, 56.6], [172.7, 105.2], [163.5, 51.8], [169.4, 63.4], [167.8, 59.0],
[159.5, 47.6], [167.6, 63.0], [161.2, 55.2], [160.0, 45.0], [163.2, 54.0],
[162.2, 50.2], [161.3, 60.2], [149.5, 44.8], [157.5, 58.8], [163.2, 56.4],
[172.7, 62.0], [155.0, 49.2], [156.5, 67.2], [164.0, 53.8], [160.9, 54.4],
[162.8, 58.0], [167.0, 59.8], [160.0, 54.8], [160.0, 43.2], [168.9, 60.5],
[158.2, 46.4], [156.0, 64.4], [160.0, 48.8], [167.1, 62.2], [158.0, 55.5],
[167.6, 57.8], [156.0, 54.6], [162.1, 59.2], [173.4, 52.7], [159.8, 53.2],
[170.5, 64.5], [159.2, 51.8], [157.5, 56.0], [161.3, 63.6], [162.6, 63.2],
[160.0, 59.5], [168.9, 56.8], [165.1, 64.1], [162.6, 50.0], [165.1, 72.3],
[166.4, 55.0], [160.0, 55.9], [152.4, 60.4], [170.2, 69.1], [162.6, 84.5],
[170.2, 55.9], [158.8, 55.5], [172.7, 69.5], [167.6, 76.4], [162.6, 61.4],
[167.6, 65.9], [156.2, 58.6], [175.2, 66.8], [172.1, 56.6], [162.6, 58.6],
[160.0, 55.9], [165.1, 59.1], [182.9, 81.8], [166.4, 70.7], [165.1, 56.8],
[177.8, 60.0], [165.1, 58.2], [175.3, 72.7], [154.9, 54.1], [158.8, 49.1],
[172.7, 75.9], [168.9, 55.0], [161.3, 57.3], [167.6, 55.0], [165.1, 65.5],
[175.3, 65.5], [157.5, 48.6], [163.8, 58.6], [167.6, 63.6], [165.1, 55.2],
[165.1, 62.7], [168.9, 56.6], [162.6, 53.9], [164.5, 63.2], [176.5, 73.6],
[168.9, 62.0], [175.3, 63.6], [159.4, 53.2], [160.0, 53.4], [170.2, 55.0],
[162.6, 70.5], [167.6, 54.5], [162.6, 54.5], [160.7, 55.9], [160.0, 59.0],
[157.5, 63.6], [162.6, 54.5], [152.4, 47.3], [170.2, 67.7], [165.1, 80.9],
[172.7, 70.5], [165.1, 60.9], [170.2, 63.6], [170.2, 54.5], [170.2, 59.1],
[161.3, 70.5], [167.6, 52.7], [167.6, 62.7], [165.1, 86.3], [162.6, 66.4],
[152.4, 67.3], [168.9, 63.0], [170.2, 73.6], [175.2, 62.3], [175.2, 57.7],
[160.0, 55.4], [165.1, 104.1], [174.0, 55.5], [170.2, 77.3], [160.0, 80.5],
[167.6, 64.5], [167.6, 72.3], [167.6, 61.4], [154.9, 58.2], [162.6, 81.8],
[175.3, 63.6], [171.4, 53.4], [157.5, 54.5], [165.1, 53.6], [160.0, 60.0],
[174.0, 73.6], [162.6, 61.4], [174.0, 55.5], [162.6, 63.6], [161.3, 60.9],
[156.2, 60.0], [149.9, 46.8], [169.5, 57.3], [160.0, 64.1], [175.3, 63.6],
[169.5, 67.3], [160.0, 75.5], [172.7, 68.2], [162.6, 61.4], [157.5, 76.8],
[176.5, 71.8], [164.4, 55.5], [160.7, 48.6], [174.0, 66.4], [163.8, 67.3]]
}, {
:name => 'Male',
:color => 'rgba(119, 152, 191, .5)',
:data => [[174.0, 65.6], [175.3, 71.8], [193.5, 80.7], [186.5, 72.6], [187.2, 78.8],
[181.5, 74.8], [184.0, 86.4], [184.5, 78.4], [175.0, 62.0], [184.0, 81.6],
[180.0, 76.6], [177.8, 83.6], [192.0, 90.0], [176.0, 74.6], [174.0, 71.0],
[184.0, 79.6], [192.7, 93.8], [171.5, 70.0], [173.0, 72.4], [176.0, 85.9],
[176.0, 78.8], [180.5, 77.8], [172.7, 66.2], [176.0, 86.4], [173.5, 81.8],
[178.0, 89.6], [180.3, 82.8], [180.3, 76.4], [164.5, 63.2], [173.0, 60.9],
[183.5, 74.8], [175.5, 70.0], [188.0, 72.4], [189.2, 84.1], [172.8, 69.1],
[170.0, 59.5], [182.0, 67.2], [170.0, 61.3], [177.8, 68.6], [184.2, 80.1],
[186.7, 87.8], [171.4, 84.7], [172.7, 73.4], [175.3, 72.1], [180.3, 82.6],
[182.9, 88.7], [188.0, 84.1], [177.2, 94.1], [172.1, 74.9], [167.0, 59.1],
[169.5, 75.6], [174.0, 86.2], [172.7, 75.3], [182.2, 87.1], [164.1, 55.2],
[163.0, 57.0], [171.5, 61.4], [184.2, 76.8], [174.0, 86.8], [174.0, 72.2],
[177.0, 71.6], [186.0, 84.8], [167.0, 68.2], [171.8, 66.1], [182.0, 72.0],
[167.0, 64.6], [177.8, 74.8], [164.5, 70.0], [192.0, 101.6], [175.5, 63.2],
[171.2, 79.1], [181.6, 78.9], [167.4, 67.7], [181.1, 66.0], [177.0, 68.2],
[174.5, 63.9], [177.5, 72.0], [170.5, 56.8], [182.4, 74.5], [197.1, 90.9],
[180.1, 93.0], [175.5, 80.9], [180.6, 72.7], [184.4, 68.0], [175.5, 70.9],
[180.6, 72.5], [177.0, 72.5], [177.1, 83.4], [181.6, 75.5], [176.5, 73.0],
[175.0, 70.2], [174.0, 73.4], [165.1, 70.5], [177.0, 68.9], [192.0, 102.3],
[176.5, 68.4], [169.4, 65.9], [182.1, 75.7], [179.8, 84.5], [175.3, 87.7],
[184.9, 86.4], [177.3, 73.2], [167.4, 53.9], [178.1, 72.0], [168.9, 55.5],
[157.2, 58.4], [180.3, 83.2], [170.2, 72.7], [177.8, 64.1], [172.7, 72.3],
[165.1, 65.0], [186.7, 86.4], [165.1, 65.0], [174.0, 88.6], [175.3, 84.1],
[185.4, 66.8], [177.8, 75.5], [180.3, 93.2], [180.3, 82.7], [177.8, 58.0],
[177.8, 79.5], [177.8, 78.6], [177.8, 71.8], [177.8, 116.4], [163.8, 72.2],
[188.0, 83.6], [198.1, 85.5], [175.3, 90.9], [166.4, 85.9], [190.5, 89.1],
[166.4, 75.0], [177.8, 77.7], [179.7, 86.4], [172.7, 90.9], [190.5, 73.6],
[185.4, 76.4], [168.9, 69.1], [167.6, 84.5], [175.3, 64.5], [170.2, 69.1],
[190.5, 108.6], [177.8, 86.4], [190.5, 80.9], [177.8, 87.7], [184.2, 94.5],
[176.5, 80.2], [177.8, 72.0], [180.3, 71.4], [171.4, 72.7], [172.7, 84.1],
[172.7, 76.8], [177.8, 63.6], [177.8, 80.9], [182.9, 80.9], [170.2, 85.5],
[167.6, 68.6], [175.3, 67.7], [165.1, 66.4], [185.4, 102.3], [181.6, 70.5],
[172.7, 95.9], [190.5, 84.1], [179.1, 87.3], [175.3, 71.8], [170.2, 65.9],
[193.0, 95.9], [171.4, 91.4], [177.8, 81.8], [177.8, 96.8], [167.6, 69.1],
[167.6, 82.7], [180.3, 75.5], [182.9, 79.5], [176.5, 73.6], [186.7, 91.8],
[188.0, 84.1], [188.0, 85.9], [177.8, 81.8], [174.0, 82.5], [177.8, 80.5],
[171.4, 70.0], [185.4, 81.8], [185.4, 84.1], [188.0, 90.5], [188.0, 91.4],
[182.9, 89.1], [176.5, 85.0], [175.3, 69.1], [175.3, 73.6], [188.0, 80.5],
[188.0, 82.7], [175.3, 86.4], [170.5, 67.7], [179.1, 92.7], [177.8, 93.6],
[175.3, 70.9], [182.9, 75.0], [170.8, 93.2], [188.0, 93.2], [180.3, 77.7],
[177.8, 61.4], [185.4, 94.1], [168.9, 75.0], [185.4, 83.6], [180.3, 85.5],
[174.0, 73.9], [167.6, 66.8], [182.9, 87.3], [160.0, 72.3], [180.3, 88.6],
[167.6, 75.5], [186.7, 101.4], [175.3, 91.1], [175.3, 67.3], [175.9, 77.7],
[175.3, 81.8], [179.1, 75.5], [181.6, 84.5], [177.8, 76.6], [182.9, 85.0],
[177.8, 102.5], [184.2, 77.3], [179.1, 71.8], [176.5, 87.9], [188.0, 94.3],
[174.0, 70.9], [167.6, 64.5], [170.2, 77.3], [167.6, 72.3], [188.0, 87.3],
[174.0, 80.0], [176.5, 82.3], [180.3, 73.6], [167.6, 74.1], [188.0, 85.9],
[180.3, 73.2], [167.6, 76.3], [183.0, 65.9], [183.0, 90.9], [179.1, 89.1],
[170.2, 62.3], [177.8, 82.7], [179.1, 79.1], [190.5, 98.2], [177.8, 84.1],
[180.3, 83.2], [180.3, 83.2]]
}]
}
}
end
def self.test_data()
{
:course_list => [["course 1",1],["course 2",2],["course 3",3]],
:assignment_list => [["assignment 1",1],["assignment 2",2],["assignment",3]],
:team_list => [["team 1",1],["team 2",2],["team 3",3]],
:chart_obj => Chart.data_template()[:bar]
}
end
end
Changing function names to be more consistent with convention
#TODO: rename to ChartDataPacket
#TODO: find a fitting place for this file maybe in module
class Chart
@@chart_index = 0
@@header = false
attr :chart_id
attr :width
attr :data
def initialize(type,data,option=nil,width = nil)
@@chart_index = @@chart_index + 1
chart_data = self.class.dataAdapter(type,data,option);
unless width.nil?
@width = width
end
@data = chart_data
@chart_id = @@chart_index
end
def get_id_str()
"chart_container_" + @chart_id.to_s
end
def self.include_header?()
!@@header
end
def self.set_header()
@@header = true
end
def self.dataAdapter(type,data,optionalConf)
template = data_template[type];
if (type == :pie) then
data[:type] = 'pie';
template[:series] = [data]
else
template[:series] = data
end
if optionalConf.nil? then
template = self.class.set_template_optional_params(template)
else
if optionalConf[:title].nil? then
template[:title][:text] = ""
else
template[:title][:text] = optionalConf[:title]
end
template=validate_optional_conf(optionalConf,template)
end
template
end
def self.set_template_optional_params(template)
template[:title][:text] = ""
template.delete(:subtitle)
template.delete(:yAxis)
template.delete(:xAxis)
template
end
def self.validate_optional_conf(optionalConf,template)
if optionalConf[:subtitle].nil? then
template.delete(:subtitle)
else
template[:subtitle]={}
template[:subtitle][:text]=optionalConf[:subtitle]
end
if optionalConf[:y_axis].nil? then
template.delete(:yAxis)
else
template[:yAxis][:title][:text]=optionalConf[:y_axis]
end
if optionalConf[:x_axis].nil? then
template[:xAxis].delete(:title)
else
template[:xAxis][:title][:text] = optionalConf[:x_axis]
end
if optionalConf[:x_axis_categories].nil? then
template[:xAxis].delete(:categories)
else
template[:xAxis][:categories]=optionalConf[:x_axis_categories]
end
template
end
def self.data_template()
{
:pie => {
:chart => {
:plotBackgroundColor => nil,
:plotBorderWidth => nil,
:plotShadow => false
},
:title => {
:text => 'Title'
},
:subtitle => {
:text => 'Title'
},
:xAxis => {},
:yAxis => {},
:tooltip => {
:pointFormat => '{series.name}: <b>{point.percentage}%</b>',
:percentageDecimals => 1
},
:plotOptions => {
:pie => {
:allowPointSelect => true,
:cursor => 'pointer',
:dataLabels => {
:enabled => true,
:color => '#000000',
:connectorColor => '#000000',
:format => "<b>{point.name}</b>: {percentage:.2f} %"
}
}
},
:series => [
{
:type => 'pie',
:name => 'XXX pie',
:data => [
['part 1',45.0],
['part 2',26.8],
['part 3',8.5],
['part 4',6.2],
['part 5',0.7]
]
}
]
},
:bar => {
:chart => {
:type => 'column'
},
:title => {:text => "Review score for XXXX"},
:subtitle => {:text => "subtitle here"},
:xAxis => {
:title =>{},
:categories => [ 'Problem1', 'Problem2', 'Problem3', 'Problem4', 'Problem5', 'Problem6', 'Problem7', 'Problem8', 'Problem9', 'Problem10', 'Problem11','Problem12']
},
:yAxis => {
:min => 0,
:title => {
:text => 'score'
}
},
:tooltip => {
:headerFormat => '<span style="font-size:10px">{point.key}</span><table>',
:pointFormat => '<tr><td style="color:{series.color};padding:0">{series.name}: </td>' +'<td style="padding:0"><b>{point.y:.1f}</b></td></tr>',
:footerFormat => '</table>',
:shared => true,
:useHTML => true
},
:plotOptions => {
:column => {
:pointPadding => 0.2,
:borderWidth => 0
}
},
:series => [
{
:name => 'review 1',
:data => [9.9, 7.5, 6.4, 9.2, 4.0, 6.0, 5.6, 8.5, 6.4, 4.1, 5.6, 4.4]
}, {
:name => 'review 2',
:data => [3.6, 8.8, 8.5, 3.4, 6.0, 4.5, 5.0, 4.3, 9.2, 8.5, 6.6, 9.3]
}, {
:name => 'review 3',
:data => [8.9, 8.8, 9.3, 4.4, 7.0, 8.3, 9.0, 9.6, 5.4, 6.2, 9.3, 5.2]
}
]
},
:line => {
:title => {:text => "Review score for XXXX"},
:subtitle => {:text => "subtitle here"},
:xAxis => {
:title =>{},
:categories => [ 'Problem1', 'Problem2', 'Problem3', 'Problem4', 'Problem5', 'Problem6', 'Problem7', 'Problem8', 'Problem9', 'Problem10', 'Problem11','Problem12']
},
:yAxis => {
:min => 0,
:title => {
:text => 'score'
}
},
:tooltip => {
:headerFormat => '<span style="font-size:10px">{point.key}</span><table>',
:pointFormat => '<tr><td style="color:{series.color};padding:0">{series.name}: </td>' +'<td style="padding:0"><b>{point.y:.1f} mm</b></td></tr>',
:footerFormat => '</table>',
:shared => true,
:useHTML => true
},
:plotOptions => {
:column => {
:pointPadding => 0.2,
:borderWidth => 0
}
},
:series => [
{
:name => 'review 1',
:data => [9.9, 7.5, 6.4, 9.2, 4.0, 6.0, 5.6, 8.5, 6.4, 4.1, 5.6, 4.4]
}, {
:name => 'review 2',
:data => [3.6, 8.8, 8.5, 3.4, 6.0, 4.5, 5.0, 4.3, 9.2, 8.5, 6.6, 9.3]
}, {
:name => 'review 3',
:data => [8.9, 8.8, 9.3, 4.4, 7.0, 8.3, 9.0, 9.6, 5.4, 6.2, 9.3, 5.2]
}
]
},
:scatter => {
:chart => {
:type => 'scatter',
:zoomType => 'xy'
},
:title => {
:text => 'Height Versus Weight of 507 Individuals by Gender'
},
:subtitle => {
:text => 'Source: Heinz 2003'
},
:xAxis => {
:title => {
:enabled => true,
:text => 'Height (cm)'
},
:startOnTick => true,
:endOnTick => true,
:showLastLabel => true
},
:yAxis => {
:title => {
:text => 'Weight (kg)'
}
},
:legend => {
:layout => 'vertical',
:align => 'left',
:verticalAlign => 'top',
:x => 100,
:y => 70,
:floating => true,
:backgroundColor => '#FFFFFF',
:borderWidth => 1
},
:plotOptions => {
:scatter => {
:marker => {
:radius => 5,
:states => {
:hover => {
:enabled => true,
:lineColor => 'rgb(100,100,100)'
}
}
},
:states => {
:hover => {
:marker => {
:enabled => false
}
}
},
:tooltip => {
:headerFormat => '<b>{series.name}</b><br>',
:pointFormat => '{point.x} cm, {point.y} kg'
}
}
},
:series => [
{
:name => 'Female',
:color => 'rgba(223, 83, 83, .5)',
:data => [[161.2, 51.6], [167.5, 59.0], [159.5, 49.2], [157.0, 63.0], [155.8, 53.6],
[170.0, 59.0], [159.1, 47.6], [166.0, 69.8], [176.2, 66.8], [160.2, 75.2],
[172.5, 55.2], [170.9, 54.2], [172.9, 62.5], [153.4, 42.0], [160.0, 50.0],
[147.2, 49.8], [168.2, 49.2], [175.0, 73.2], [157.0, 47.8], [167.6, 68.8],
[159.5, 50.6], [175.0, 82.5], [166.8, 57.2], [176.5, 87.8], [170.2, 72.8],
[174.0, 54.5], [173.0, 59.8], [179.9, 67.3], [170.5, 67.8], [160.0, 47.0],
[154.4, 46.2], [162.0, 55.0], [176.5, 83.0], [160.0, 54.4], [152.0, 45.8],
[162.1, 53.6], [170.0, 73.2], [160.2, 52.1], [161.3, 67.9], [166.4, 56.6],
[168.9, 62.3], [163.8, 58.5], [167.6, 54.5], [160.0, 50.2], [161.3, 60.3],
[167.6, 58.3], [165.1, 56.2], [160.0, 50.2], [170.0, 72.9], [157.5, 59.8],
[167.6, 61.0], [160.7, 69.1], [163.2, 55.9], [152.4, 46.5], [157.5, 54.3],
[168.3, 54.8], [180.3, 60.7], [165.5, 60.0], [165.0, 62.0], [164.5, 60.3],
[156.0, 52.7], [160.0, 74.3], [163.0, 62.0], [165.7, 73.1], [161.0, 80.0],
[162.0, 54.7], [166.0, 53.2], [174.0, 75.7], [172.7, 61.1], [167.6, 55.7],
[151.1, 48.7], [164.5, 52.3], [163.5, 50.0], [152.0, 59.3], [169.0, 62.5],
[164.0, 55.7], [161.2, 54.8], [155.0, 45.9], [170.0, 70.6], [176.2, 67.2],
[170.0, 69.4], [162.5, 58.2], [170.3, 64.8], [164.1, 71.6], [169.5, 52.8],
[163.2, 59.8], [154.5, 49.0], [159.8, 50.0], [173.2, 69.2], [170.0, 55.9],
[161.4, 63.4], [169.0, 58.2], [166.2, 58.6], [159.4, 45.7], [162.5, 52.2],
[159.0, 48.6], [162.8, 57.8], [159.0, 55.6], [179.8, 66.8], [162.9, 59.4],
[161.0, 53.6], [151.1, 73.2], [168.2, 53.4], [168.9, 69.0], [173.2, 58.4],
[171.8, 56.2], [178.0, 70.6], [164.3, 59.8], [163.0, 72.0], [168.5, 65.2],
[166.8, 56.6], [172.7, 105.2], [163.5, 51.8], [169.4, 63.4], [167.8, 59.0],
[159.5, 47.6], [167.6, 63.0], [161.2, 55.2], [160.0, 45.0], [163.2, 54.0],
[162.2, 50.2], [161.3, 60.2], [149.5, 44.8], [157.5, 58.8], [163.2, 56.4],
[172.7, 62.0], [155.0, 49.2], [156.5, 67.2], [164.0, 53.8], [160.9, 54.4],
[162.8, 58.0], [167.0, 59.8], [160.0, 54.8], [160.0, 43.2], [168.9, 60.5],
[158.2, 46.4], [156.0, 64.4], [160.0, 48.8], [167.1, 62.2], [158.0, 55.5],
[167.6, 57.8], [156.0, 54.6], [162.1, 59.2], [173.4, 52.7], [159.8, 53.2],
[170.5, 64.5], [159.2, 51.8], [157.5, 56.0], [161.3, 63.6], [162.6, 63.2],
[160.0, 59.5], [168.9, 56.8], [165.1, 64.1], [162.6, 50.0], [165.1, 72.3],
[166.4, 55.0], [160.0, 55.9], [152.4, 60.4], [170.2, 69.1], [162.6, 84.5],
[170.2, 55.9], [158.8, 55.5], [172.7, 69.5], [167.6, 76.4], [162.6, 61.4],
[167.6, 65.9], [156.2, 58.6], [175.2, 66.8], [172.1, 56.6], [162.6, 58.6],
[160.0, 55.9], [165.1, 59.1], [182.9, 81.8], [166.4, 70.7], [165.1, 56.8],
[177.8, 60.0], [165.1, 58.2], [175.3, 72.7], [154.9, 54.1], [158.8, 49.1],
[172.7, 75.9], [168.9, 55.0], [161.3, 57.3], [167.6, 55.0], [165.1, 65.5],
[175.3, 65.5], [157.5, 48.6], [163.8, 58.6], [167.6, 63.6], [165.1, 55.2],
[165.1, 62.7], [168.9, 56.6], [162.6, 53.9], [164.5, 63.2], [176.5, 73.6],
[168.9, 62.0], [175.3, 63.6], [159.4, 53.2], [160.0, 53.4], [170.2, 55.0],
[162.6, 70.5], [167.6, 54.5], [162.6, 54.5], [160.7, 55.9], [160.0, 59.0],
[157.5, 63.6], [162.6, 54.5], [152.4, 47.3], [170.2, 67.7], [165.1, 80.9],
[172.7, 70.5], [165.1, 60.9], [170.2, 63.6], [170.2, 54.5], [170.2, 59.1],
[161.3, 70.5], [167.6, 52.7], [167.6, 62.7], [165.1, 86.3], [162.6, 66.4],
[152.4, 67.3], [168.9, 63.0], [170.2, 73.6], [175.2, 62.3], [175.2, 57.7],
[160.0, 55.4], [165.1, 104.1], [174.0, 55.5], [170.2, 77.3], [160.0, 80.5],
[167.6, 64.5], [167.6, 72.3], [167.6, 61.4], [154.9, 58.2], [162.6, 81.8],
[175.3, 63.6], [171.4, 53.4], [157.5, 54.5], [165.1, 53.6], [160.0, 60.0],
[174.0, 73.6], [162.6, 61.4], [174.0, 55.5], [162.6, 63.6], [161.3, 60.9],
[156.2, 60.0], [149.9, 46.8], [169.5, 57.3], [160.0, 64.1], [175.3, 63.6],
[169.5, 67.3], [160.0, 75.5], [172.7, 68.2], [162.6, 61.4], [157.5, 76.8],
[176.5, 71.8], [164.4, 55.5], [160.7, 48.6], [174.0, 66.4], [163.8, 67.3]]
}, {
:name => 'Male',
:color => 'rgba(119, 152, 191, .5)',
:data => [[174.0, 65.6], [175.3, 71.8], [193.5, 80.7], [186.5, 72.6], [187.2, 78.8],
[181.5, 74.8], [184.0, 86.4], [184.5, 78.4], [175.0, 62.0], [184.0, 81.6],
[180.0, 76.6], [177.8, 83.6], [192.0, 90.0], [176.0, 74.6], [174.0, 71.0],
[184.0, 79.6], [192.7, 93.8], [171.5, 70.0], [173.0, 72.4], [176.0, 85.9],
[176.0, 78.8], [180.5, 77.8], [172.7, 66.2], [176.0, 86.4], [173.5, 81.8],
[178.0, 89.6], [180.3, 82.8], [180.3, 76.4], [164.5, 63.2], [173.0, 60.9],
[183.5, 74.8], [175.5, 70.0], [188.0, 72.4], [189.2, 84.1], [172.8, 69.1],
[170.0, 59.5], [182.0, 67.2], [170.0, 61.3], [177.8, 68.6], [184.2, 80.1],
[186.7, 87.8], [171.4, 84.7], [172.7, 73.4], [175.3, 72.1], [180.3, 82.6],
[182.9, 88.7], [188.0, 84.1], [177.2, 94.1], [172.1, 74.9], [167.0, 59.1],
[169.5, 75.6], [174.0, 86.2], [172.7, 75.3], [182.2, 87.1], [164.1, 55.2],
[163.0, 57.0], [171.5, 61.4], [184.2, 76.8], [174.0, 86.8], [174.0, 72.2],
[177.0, 71.6], [186.0, 84.8], [167.0, 68.2], [171.8, 66.1], [182.0, 72.0],
[167.0, 64.6], [177.8, 74.8], [164.5, 70.0], [192.0, 101.6], [175.5, 63.2],
[171.2, 79.1], [181.6, 78.9], [167.4, 67.7], [181.1, 66.0], [177.0, 68.2],
[174.5, 63.9], [177.5, 72.0], [170.5, 56.8], [182.4, 74.5], [197.1, 90.9],
[180.1, 93.0], [175.5, 80.9], [180.6, 72.7], [184.4, 68.0], [175.5, 70.9],
[180.6, 72.5], [177.0, 72.5], [177.1, 83.4], [181.6, 75.5], [176.5, 73.0],
[175.0, 70.2], [174.0, 73.4], [165.1, 70.5], [177.0, 68.9], [192.0, 102.3],
[176.5, 68.4], [169.4, 65.9], [182.1, 75.7], [179.8, 84.5], [175.3, 87.7],
[184.9, 86.4], [177.3, 73.2], [167.4, 53.9], [178.1, 72.0], [168.9, 55.5],
[157.2, 58.4], [180.3, 83.2], [170.2, 72.7], [177.8, 64.1], [172.7, 72.3],
[165.1, 65.0], [186.7, 86.4], [165.1, 65.0], [174.0, 88.6], [175.3, 84.1],
[185.4, 66.8], [177.8, 75.5], [180.3, 93.2], [180.3, 82.7], [177.8, 58.0],
[177.8, 79.5], [177.8, 78.6], [177.8, 71.8], [177.8, 116.4], [163.8, 72.2],
[188.0, 83.6], [198.1, 85.5], [175.3, 90.9], [166.4, 85.9], [190.5, 89.1],
[166.4, 75.0], [177.8, 77.7], [179.7, 86.4], [172.7, 90.9], [190.5, 73.6],
[185.4, 76.4], [168.9, 69.1], [167.6, 84.5], [175.3, 64.5], [170.2, 69.1],
[190.5, 108.6], [177.8, 86.4], [190.5, 80.9], [177.8, 87.7], [184.2, 94.5],
[176.5, 80.2], [177.8, 72.0], [180.3, 71.4], [171.4, 72.7], [172.7, 84.1],
[172.7, 76.8], [177.8, 63.6], [177.8, 80.9], [182.9, 80.9], [170.2, 85.5],
[167.6, 68.6], [175.3, 67.7], [165.1, 66.4], [185.4, 102.3], [181.6, 70.5],
[172.7, 95.9], [190.5, 84.1], [179.1, 87.3], [175.3, 71.8], [170.2, 65.9],
[193.0, 95.9], [171.4, 91.4], [177.8, 81.8], [177.8, 96.8], [167.6, 69.1],
[167.6, 82.7], [180.3, 75.5], [182.9, 79.5], [176.5, 73.6], [186.7, 91.8],
[188.0, 84.1], [188.0, 85.9], [177.8, 81.8], [174.0, 82.5], [177.8, 80.5],
[171.4, 70.0], [185.4, 81.8], [185.4, 84.1], [188.0, 90.5], [188.0, 91.4],
[182.9, 89.1], [176.5, 85.0], [175.3, 69.1], [175.3, 73.6], [188.0, 80.5],
[188.0, 82.7], [175.3, 86.4], [170.5, 67.7], [179.1, 92.7], [177.8, 93.6],
[175.3, 70.9], [182.9, 75.0], [170.8, 93.2], [188.0, 93.2], [180.3, 77.7],
[177.8, 61.4], [185.4, 94.1], [168.9, 75.0], [185.4, 83.6], [180.3, 85.5],
[174.0, 73.9], [167.6, 66.8], [182.9, 87.3], [160.0, 72.3], [180.3, 88.6],
[167.6, 75.5], [186.7, 101.4], [175.3, 91.1], [175.3, 67.3], [175.9, 77.7],
[175.3, 81.8], [179.1, 75.5], [181.6, 84.5], [177.8, 76.6], [182.9, 85.0],
[177.8, 102.5], [184.2, 77.3], [179.1, 71.8], [176.5, 87.9], [188.0, 94.3],
[174.0, 70.9], [167.6, 64.5], [170.2, 77.3], [167.6, 72.3], [188.0, 87.3],
[174.0, 80.0], [176.5, 82.3], [180.3, 73.6], [167.6, 74.1], [188.0, 85.9],
[180.3, 73.2], [167.6, 76.3], [183.0, 65.9], [183.0, 90.9], [179.1, 89.1],
[170.2, 62.3], [177.8, 82.7], [179.1, 79.1], [190.5, 98.2], [177.8, 84.1],
[180.3, 83.2], [180.3, 83.2]]
}]
}
}
end
def self.test_data()
{
:course_list => [["course 1",1],["course 2",2],["course 3",3]],
:assignment_list => [["assignment 1",1],["assignment 2",2],["assignment",3]],
:team_list => [["team 1",1],["team 2",2],["team 3",3]],
:chart_obj => Chart.data_template()[:bar]
}
end
end
|
$:.unshift('lib')
require 'rubygems'
require 'ruote/engine' # sudo gem install ruote
require 'atom/feed' # sudo gem install atom-tools
require 'prawn' # sudo gem install prawn
#
# starting a transient engine (no need to make it persistent)
engine = Ruote::Engine.new(:definition_in_launchitem_allowed => true)
#
# a process that fetches the latest pictures from flickr.com and submits
# them concurrently to three users for review
pdef = Ruote.process_definition :name => 'picture_acquisition' do
sequence do
get_pictures
concurrence :merge_type => 'mix' do
# pass the picture list to three users concurrently
# make sure to let their choice appear in the final workitem
# at the end of the concurrence
user_alice
user_bob
user_charly
user_doug
end
generate_result_pdf
end
end
#
# fetching the flickr.com pictures via Atom
engine.register_participant :get_pictures do |workitem|
feed = Atom::Feed.new(
"http://api.flickr.com/services/feeds/photos_public.gne"+
"?tags=#{workitem.fields['tags'].join(',')}&format=atom")
feed.update!
workitem.fields['pictures'] = feed.entries.inject([]) do |a, entry|
a << [
entry.title,
entry.authors.first.name,
entry.links.last.href
]
end
end
#
# the users (well, here, just randomly picking a picture)
engine.register_participant 'user_.*' do |workitem|
workitem.fields[workitem.participant_name] =
workitem.fields['pictures'][(rand * workitem.fields['pictures'].length).to_i]
end
#
# the final participant, generates an "out.pdf" file in the current dir
engine.register_participant :generate_result_pdf do |workitem|
entries = workitem.fields.inject([]) do |a, (k, v)|
a << [ k, v.last ] if k.match(/^user-.*$/)
a
end
entries.each_with_index do |entry, i|
entry << "pic#{i}.jpg"
`curl #{entry[1]} > #{entry[2]}`
puts "..got #{entry[0]} / #{entry[2]}"
end
Prawn::Document.generate('out.pdf') do
font 'Helvetica'
entries.each do |entry|
text entry[0]
image entry[2], :width => 200
end
end
puts ".generated out.pdf"
`rm pic*.jpg`
end
#
# launching the process, requesting pictures tagged 'cat' and 'fish'
li = Ruote::Launchitem.new(pdef)
li.fields['tags'] = [ 'cat', 'fish' ]
fei = engine.launch(li)
#
# workflow engines are asynchronous beasts, have to wait for them
# (here we wait for a particular process)
outcome = engine.wait_for(fei)
p outcome
fixed flickr example (Thanks Gonzalo)
$:.unshift('lib')
require 'rubygems'
require 'ruote/engine' # sudo gem install ruote
require 'atom/feed' # sudo gem install atom-tools
require 'prawn' # sudo gem install prawn
#
# starting a transient engine (no need to make it persistent)
engine = Ruote::Engine.new(:definition_in_launchitem_allowed => true)
#
# a process that fetches the latest pictures from flickr.com and submits
# them concurrently to three users for review
pdef = Ruote.process_definition :name => 'picture_acquisition' do
sequence do
get_pictures
concurrence :merge_type => 'mix' do
# pass the picture list to three users concurrently
# make sure to let their choice appear in the final workitem
# at the end of the concurrence
user_alice
user_bob
user_charly
user_doug
end
generate_result_pdf
end
end
#
# fetching the flickr.com pictures via Atom
engine.register_participant :get_pictures do |workitem|
feed = Atom::Feed.new(
"http://api.flickr.com/services/feeds/photos_public.gne"+
"?tags=#{workitem.fields['tags'].join(',')}&format=atom")
feed.update!
workitem.fields['pictures'] = feed.entries.inject([]) do |a, entry|
a << [
entry.title,
entry.authors.first.name,
entry.links.last.href
]
end
end
#
# the users (well, here, just randomly picking a picture)
engine.register_participant 'user_.*' do |workitem|
workitem.fields[workitem.participant_name] =
workitem.fields['pictures'][(rand * workitem.fields['pictures'].length).to_i]
end
#
# the final participant, generates an "out.pdf" file in the current dir
engine.register_participant :generate_result_pdf do |workitem|
entries = workitem.fields.inject([]) do |a, (k, v)|
a << [ k, v.last ] if k.match(/^user\_.+$/)
a
end
entries.each_with_index do |entry, i|
entry << "pic#{i}.jpg"
`curl #{entry[1]} > #{entry[2]}`
puts "..got #{entry[0]} / #{entry[2]}"
end
Prawn::Document.generate('out.pdf') do
font 'Helvetica'
entries.each do |entry|
text entry[0]
image entry[2], :width => 200
end
end
puts ".generated out.pdf"
`rm pic*.jpg`
end
#
# launching the process, requesting pictures tagged 'cat' and 'fish'
li = Ruote::Launchitem.new(pdef)
li.fields['tags'] = [ 'cat', 'fish' ]
fei = engine.launch(li)
#
# workflow engines are asynchronous beasts, have to wait for them
# (here we wait for a particular process)
outcome = engine.wait_for(fei)
#p outcome
|
#METODO make it so you can just say task.add_contact(contact/contact_id) (for opportunity and organisation as well)
#METODO warn them about how you have to save a task before you can link it up
module Insightly
class Task < ReadWrite
include Insightly::TaskLinkHelper
self.url_base = "Tasks"
api_field "TASK_ID",
"TITLE",
"CATEGORY_ID",
"DUE_DATE",
"COMPLETED_DATE_UTC",
"PUBLICLY_VISIBLE",
"COMPLETED",
"PROJECT_ID",
"DETAILS",
"STATUS",
"PRIORITY",
"PERCENT_COMPLETE",
"START_DATE",
"ASSIGNED_BY_USER_ID",
"PARENT_TASK_ID",
"RECURRENCE",
"RESPONSIBLE_USER_ID",
"OWNER_USER_ID",
"DATE_CREATED_UTC",
"DATE_UPDATED_UTC",
"TASKLINKS"
def comments
list = []
get_collection("#{url_base}/#{task_id}/comments").each do |d|
list << Insightly::Comment.build(d)
end
list
end
def comment_on(body)
comment = Insightly::Comment.new.build({"BODY" => body})
result = post_collection("#{url_base}/#{task_id}/comments", comment.remote_data.to_json)
comment.build(result)
end
def not_started?
status == "NOT STARTED"
end
def in_progress?
status == "IN PROGRESS"
end
def waiting?
status == "WAITING"
end
def completed?
status == "COMPLETED"
end
def deferred?
status == "DEFERRED"
end
def fix_for_link(link)
link.task_id = self.remote_id
link
end
end
end
Removed todos
module Insightly
class Task < ReadWrite
include Insightly::TaskLinkHelper
self.url_base = "Tasks"
api_field "TASK_ID",
"TITLE",
"CATEGORY_ID",
"DUE_DATE",
"COMPLETED_DATE_UTC",
"PUBLICLY_VISIBLE",
"COMPLETED",
"PROJECT_ID",
"DETAILS",
"STATUS",
"PRIORITY",
"PERCENT_COMPLETE",
"START_DATE",
"ASSIGNED_BY_USER_ID",
"PARENT_TASK_ID",
"RECURRENCE",
"RESPONSIBLE_USER_ID",
"OWNER_USER_ID",
"DATE_CREATED_UTC",
"DATE_UPDATED_UTC",
"TASKLINKS"
def comments
list = []
get_collection("#{url_base}/#{task_id}/comments").each do |d|
list << Insightly::Comment.build(d)
end
list
end
def comment_on(body)
comment = Insightly::Comment.new.build({"BODY" => body})
result = post_collection("#{url_base}/#{task_id}/comments", comment.remote_data.to_json)
comment.build(result)
end
def not_started?
status == "NOT STARTED"
end
def in_progress?
status == "IN PROGRESS"
end
def waiting?
status == "WAITING"
end
def completed?
status == "COMPLETED"
end
def deferred?
status == "DEFERRED"
end
def fix_for_link(link)
link.task_id = self.remote_id
link
end
end
end |
# Generated by jeweler
# DO NOT EDIT THIS FILE DIRECTLY
# Instead, edit Jeweler::Tasks in Rakefile, and run 'rake gemspec'
# -*- encoding: utf-8 -*-
Gem::Specification.new do |s|
s.name = %q{knjappserver}
s.version = "0.0.22"
s.required_rubygems_version = Gem::Requirement.new(">= 0") if s.respond_to? :required_rubygems_version=
s.authors = ["Kasper Johansen"]
s.date = %q{2012-05-03}
s.description = %q{Which supports a lot of undocumented stuff.}
s.email = %q{k@spernj.org}
s.executables = ["check_running.rb", "knjappserver_start.rb"]
s.extra_rdoc_files = [
"LICENSE.txt",
"README.rdoc"
]
s.files = [
".document",
".rspec",
"Gemfile",
"Gemfile.lock",
"LICENSE.txt",
"README.rdoc",
"Rakefile",
"VERSION",
"bin/check_running.rb",
"bin/knjappserver_start.rb",
"knjappserver.gemspec",
"lib/conf/README",
"lib/conf/apache2_cgi_rhtml_conf.conf",
"lib/conf/conf_example.rb",
"lib/conf/conf_vars_example.rb",
"lib/files/database_schema.rb",
"lib/files/run/README",
"lib/include/class_customio.rb",
"lib/include/class_erbhandler.rb",
"lib/include/class_httpserver.rb",
"lib/include/class_httpsession.rb",
"lib/include/class_httpsession_contentgroup.rb",
"lib/include/class_httpsession_http_request.rb",
"lib/include/class_httpsession_http_response.rb",
"lib/include/class_httpsession_page_environment.rb",
"lib/include/class_httpsession_post_multipart.rb",
"lib/include/class_knjappserver.rb",
"lib/include/class_knjappserver_cleaner.rb",
"lib/include/class_knjappserver_cmdline.rb",
"lib/include/class_knjappserver_errors.rb",
"lib/include/class_knjappserver_leakproxy_client.rb",
"lib/include/class_knjappserver_leakproxy_server.rb",
"lib/include/class_knjappserver_logging.rb",
"lib/include/class_knjappserver_mailing.rb",
"lib/include/class_knjappserver_sessions.rb",
"lib/include/class_knjappserver_threadding.rb",
"lib/include/class_knjappserver_threadding_timeout.rb",
"lib/include/class_knjappserver_translations.rb",
"lib/include/class_knjappserver_web.rb",
"lib/include/class_log.rb",
"lib/include/class_log_access.rb",
"lib/include/class_log_data.rb",
"lib/include/class_log_data_link.rb",
"lib/include/class_log_data_value.rb",
"lib/include/class_log_link.rb",
"lib/include/class_session.rb",
"lib/include/gettext_funcs.rb",
"lib/include/magic_methods.rb",
"lib/knjappserver.rb",
"lib/pages/benchmark.rhtml",
"lib/pages/benchmark_print.rhtml",
"lib/pages/benchmark_simple.rhtml",
"lib/pages/benchmark_threadded_content.rhtml",
"lib/pages/debug_database_connections.rhtml",
"lib/pages/debug_http_sessions.rhtml",
"lib/pages/error_notfound.rhtml",
"lib/pages/logs_latest.rhtml",
"lib/pages/logs_show.rhtml",
"lib/pages/spec.rhtml",
"lib/pages/spec_post.rhtml",
"lib/pages/spec_test_multiple_clients.rhtml",
"lib/pages/spec_thread_joins.rhtml",
"lib/pages/spec_threadded_content.rhtml",
"lib/pages/tests.rhtml",
"lib/scripts/benchmark.rb",
"lib/scripts/knjappserver_cgi.rb",
"lib/scripts/knjappserver_fcgi.rb",
"lib/scripts/leakproxy.rb",
"spec/knjappserver_spec.rb",
"spec/leakproxy_spec.rb",
"spec/spec_helper.rb"
]
s.homepage = %q{http://github.com/kaspernj/knjappserver}
s.licenses = ["MIT"]
s.require_paths = ["lib"]
s.rubygems_version = %q{1.6.2}
s.summary = %q{A multi-threadded app-web-server.}
if s.respond_to? :specification_version then
s.specification_version = 3
if Gem::Version.new(Gem::VERSION) >= Gem::Version.new('1.2.0') then
s.add_runtime_dependency(%q<knjrbfw>, [">= 0"])
s.add_runtime_dependency(%q<erubis>, [">= 0"])
s.add_runtime_dependency(%q<mail>, [">= 0"])
s.add_development_dependency(%q<rspec>, ["~> 2.3.0"])
s.add_development_dependency(%q<bundler>, ["~> 1.0.0"])
s.add_development_dependency(%q<jeweler>, ["~> 1.6.3"])
s.add_development_dependency(%q<rcov>, [">= 0"])
s.add_development_dependency(%q<sqlite3>, [">= 0"])
s.add_development_dependency(%q<json>, [">= 0"])
else
s.add_dependency(%q<knjrbfw>, [">= 0"])
s.add_dependency(%q<erubis>, [">= 0"])
s.add_dependency(%q<mail>, [">= 0"])
s.add_dependency(%q<rspec>, ["~> 2.3.0"])
s.add_dependency(%q<bundler>, ["~> 1.0.0"])
s.add_dependency(%q<jeweler>, ["~> 1.6.3"])
s.add_dependency(%q<rcov>, [">= 0"])
s.add_dependency(%q<sqlite3>, [">= 0"])
s.add_dependency(%q<json>, [">= 0"])
end
else
s.add_dependency(%q<knjrbfw>, [">= 0"])
s.add_dependency(%q<erubis>, [">= 0"])
s.add_dependency(%q<mail>, [">= 0"])
s.add_dependency(%q<rspec>, ["~> 2.3.0"])
s.add_dependency(%q<bundler>, ["~> 1.0.0"])
s.add_dependency(%q<jeweler>, ["~> 1.6.3"])
s.add_dependency(%q<rcov>, [">= 0"])
s.add_dependency(%q<sqlite3>, [">= 0"])
s.add_dependency(%q<json>, [">= 0"])
end
end
Regenerate gemspec for version 0.0.23
# Generated by jeweler
# DO NOT EDIT THIS FILE DIRECTLY
# Instead, edit Jeweler::Tasks in Rakefile, and run 'rake gemspec'
# -*- encoding: utf-8 -*-
Gem::Specification.new do |s|
s.name = %q{knjappserver}
s.version = "0.0.23"
s.required_rubygems_version = Gem::Requirement.new(">= 0") if s.respond_to? :required_rubygems_version=
s.authors = ["Kasper Johansen"]
s.date = %q{2012-05-22}
s.description = %q{Which supports a lot of undocumented stuff.}
s.email = %q{k@spernj.org}
s.executables = ["check_running.rb", "knjappserver_start.rb"]
s.extra_rdoc_files = [
"LICENSE.txt",
"README.rdoc"
]
s.files = [
".document",
".rspec",
"Gemfile",
"Gemfile.lock",
"LICENSE.txt",
"README.rdoc",
"Rakefile",
"VERSION",
"bin/check_running.rb",
"bin/knjappserver_start.rb",
"knjappserver.gemspec",
"lib/conf/README",
"lib/conf/apache2_cgi_rhtml_conf.conf",
"lib/conf/conf_example.rb",
"lib/conf/conf_vars_example.rb",
"lib/files/database_schema.rb",
"lib/files/run/README",
"lib/include/class_customio.rb",
"lib/include/class_erbhandler.rb",
"lib/include/class_httpserver.rb",
"lib/include/class_httpsession.rb",
"lib/include/class_httpsession_contentgroup.rb",
"lib/include/class_httpsession_http_request.rb",
"lib/include/class_httpsession_http_response.rb",
"lib/include/class_httpsession_page_environment.rb",
"lib/include/class_httpsession_post_multipart.rb",
"lib/include/class_knjappserver.rb",
"lib/include/class_knjappserver_cleaner.rb",
"lib/include/class_knjappserver_cmdline.rb",
"lib/include/class_knjappserver_errors.rb",
"lib/include/class_knjappserver_leakproxy_client.rb",
"lib/include/class_knjappserver_leakproxy_server.rb",
"lib/include/class_knjappserver_logging.rb",
"lib/include/class_knjappserver_mailing.rb",
"lib/include/class_knjappserver_sessions.rb",
"lib/include/class_knjappserver_threadding.rb",
"lib/include/class_knjappserver_threadding_timeout.rb",
"lib/include/class_knjappserver_translations.rb",
"lib/include/class_knjappserver_web.rb",
"lib/include/class_log.rb",
"lib/include/class_log_access.rb",
"lib/include/class_log_data.rb",
"lib/include/class_log_data_link.rb",
"lib/include/class_log_data_value.rb",
"lib/include/class_log_link.rb",
"lib/include/class_session.rb",
"lib/include/gettext_funcs.rb",
"lib/include/magic_methods.rb",
"lib/knjappserver.rb",
"lib/pages/benchmark.rhtml",
"lib/pages/benchmark_print.rhtml",
"lib/pages/benchmark_simple.rhtml",
"lib/pages/benchmark_threadded_content.rhtml",
"lib/pages/debug_database_connections.rhtml",
"lib/pages/debug_http_sessions.rhtml",
"lib/pages/error_notfound.rhtml",
"lib/pages/logs_latest.rhtml",
"lib/pages/logs_show.rhtml",
"lib/pages/spec.rhtml",
"lib/pages/spec_post.rhtml",
"lib/pages/spec_test_multiple_clients.rhtml",
"lib/pages/spec_thread_joins.rhtml",
"lib/pages/spec_threadded_content.rhtml",
"lib/pages/tests.rhtml",
"lib/scripts/benchmark.rb",
"lib/scripts/knjappserver_cgi.rb",
"lib/scripts/knjappserver_fcgi.rb",
"lib/scripts/leakproxy.rb",
"spec/knjappserver_spec.rb",
"spec/leakproxy_spec.rb",
"spec/spec_helper.rb"
]
s.homepage = %q{http://github.com/kaspernj/knjappserver}
s.licenses = ["MIT"]
s.require_paths = ["lib"]
s.rubygems_version = %q{1.6.2}
s.summary = %q{A multi-threadded app-web-server.}
if s.respond_to? :specification_version then
s.specification_version = 3
if Gem::Version.new(Gem::VERSION) >= Gem::Version.new('1.2.0') then
s.add_runtime_dependency(%q<knjrbfw>, [">= 0"])
s.add_runtime_dependency(%q<erubis>, [">= 0"])
s.add_runtime_dependency(%q<mail>, [">= 0"])
s.add_runtime_dependency(%q<tsafe>, [">= 0"])
s.add_development_dependency(%q<rspec>, ["~> 2.3.0"])
s.add_development_dependency(%q<bundler>, [">= 1.0.0"])
s.add_development_dependency(%q<jeweler>, ["~> 1.6.3"])
s.add_development_dependency(%q<rcov>, [">= 0"])
s.add_development_dependency(%q<sqlite3>, [">= 0"])
else
s.add_dependency(%q<knjrbfw>, [">= 0"])
s.add_dependency(%q<erubis>, [">= 0"])
s.add_dependency(%q<mail>, [">= 0"])
s.add_dependency(%q<tsafe>, [">= 0"])
s.add_dependency(%q<rspec>, ["~> 2.3.0"])
s.add_dependency(%q<bundler>, [">= 1.0.0"])
s.add_dependency(%q<jeweler>, ["~> 1.6.3"])
s.add_dependency(%q<rcov>, [">= 0"])
s.add_dependency(%q<sqlite3>, [">= 0"])
end
else
s.add_dependency(%q<knjrbfw>, [">= 0"])
s.add_dependency(%q<erubis>, [">= 0"])
s.add_dependency(%q<mail>, [">= 0"])
s.add_dependency(%q<tsafe>, [">= 0"])
s.add_dependency(%q<rspec>, ["~> 2.3.0"])
s.add_dependency(%q<bundler>, [">= 1.0.0"])
s.add_dependency(%q<jeweler>, ["~> 1.6.3"])
s.add_dependency(%q<rcov>, [">= 0"])
s.add_dependency(%q<sqlite3>, [">= 0"])
end
end
|
module Issues
VERSION = '0.0.1'
end
version bump
module Issues
VERSION = '0.0.2'
end
|
require 'monk/glue/reloader'
module James
class Reloader < Monk::Glue::Reloader
def reload!
self.class.reload!
end
def files
self.class.files
end
class << self
def reload!
updated_files = []
@timestamps ||= {}
routing_changed = false
files.each do |file|
begin
now = File.mtime(file).to_i
old = @timestamps[file].to_i
routing = file =~ %r{(^|/)app/routes/}o
if routing or now > old
updated_files << file
routing_changed = true if routing and now > old
James.say_status(:updated, file) if old > 0
end
@timestamps[file] = now
rescue Exception
updated_files << file
end
end
if routing_changed
Main.reset!
end
updated_files.each do |file|
next if !routing_changed and file =~ %r{(^|/)app/routes/}o
load file
end
end
def files
array = []
James.libs.each do |dir, reload|
file = dir.sub(/(\.rb)?$/, '') + '.rb'
array << file if File.exist?(file)
array.concat(Dir.glob("#{dir}/**/*.rb").sort) if File.directory?(dir)
end
return array
end
end
end
end
Fixed to print updated file
require 'monk/glue/reloader'
module James
class Reloader < Monk::Glue::Reloader
def reload!
self.class.reload!
end
def files
self.class.files
end
class << self
def reload!
updated_files = []
@timestamps ||= {}
routing_changed = false
files.each do |file|
begin
now = File.mtime(file).to_i
old = @timestamps[file].to_i
routing = file =~ %r{(^|/)app/routes/}o
if routing or now > old
updated_files << file
routing_changed = true if routing and now > old
James.say_status(:updated, file) if now > old and old > 0
end
@timestamps[file] = now
rescue Exception
updated_files << file
end
end
if routing_changed
Main.reset!
end
updated_files.each do |file|
next if !routing_changed and file =~ %r{(^|/)app/routes/}o
load file
end
end
def files
array = []
James.libs.each do |dir, reload|
file = dir.sub(/(\.rb)?$/, '') + '.rb'
array << file if File.exist?(file)
array.concat(Dir.glob("#{dir}/**/*.rb").sort) if File.directory?(dir)
end
return array
end
end
end
end
|
# frozen_string_literal: true
require_all "jekyll/filters"
module Jekyll
module Filters
include URLFilters
include GroupingFilters
include DateFilters
# Convert a Markdown string into HTML output.
#
# input - The Markdown String to convert.
#
# Returns the HTML formatted String.
def markdownify(input)
@context.registers[:site].find_converter_instance(
Jekyll::Converters::Markdown
).convert(input.to_s)
end
# Convert quotes into smart quotes.
#
# input - The String to convert.
#
# Returns the smart-quotified String.
def smartify(input)
@context.registers[:site].find_converter_instance(
Jekyll::Converters::SmartyPants
).convert(input.to_s)
end
# Convert a Sass string into CSS output.
#
# input - The Sass String to convert.
#
# Returns the CSS formatted String.
def sassify(input)
@context.registers[:site].find_converter_instance(
Jekyll::Converters::Sass
).convert(input)
end
# Convert a Scss string into CSS output.
#
# input - The Scss String to convert.
#
# Returns the CSS formatted String.
def scssify(input)
@context.registers[:site].find_converter_instance(
Jekyll::Converters::Scss
).convert(input)
end
# Slugify a filename or title.
#
# input - The filename or title to slugify.
# mode - how string is slugified
#
# Returns the given filename or title as a lowercase URL String.
# See Utils.slugify for more detail.
def slugify(input, mode = nil)
Utils.slugify(input, :mode => mode)
end
# XML escape a string for use. Replaces any special characters with
# appropriate HTML entity replacements.
#
# input - The String to escape.
#
# Examples
#
# xml_escape('foo "bar" <baz>')
# # => "foo "bar" <baz>"
#
# Returns the escaped String.
def xml_escape(input)
input.to_s.encode(:xml => :attr).gsub(%r!\A"|"\Z!, "")
end
# CGI escape a string for use in a URL. Replaces any special characters
# with appropriate %XX replacements.
#
# input - The String to escape.
#
# Examples
#
# cgi_escape('foo,bar;baz?')
# # => "foo%2Cbar%3Bbaz%3F"
#
# Returns the escaped String.
def cgi_escape(input)
CGI.escape(input)
end
# URI escape a string.
#
# input - The String to escape.
#
# Examples
#
# uri_escape('foo, bar \\baz?')
# # => "foo,%20bar%20%5Cbaz?"
#
# Returns the escaped String.
def uri_escape(input)
Addressable::URI.normalize_component(input)
end
# Replace any whitespace in the input string with a single space
#
# input - The String on which to operate.
#
# Returns the formatted String
def normalize_whitespace(input)
input.to_s.gsub(%r!\s+!, " ").strip
end
# Count the number of words in the input string.
#
# input - The String on which to operate.
#
# Returns the Integer word count.
def number_of_words(input)
input.split.length
end
# Join an array of things into a string by separating with commas and the
# word "and" for the last one.
#
# array - The Array of Strings to join.
# connector - Word used to connect the last 2 items in the array
#
# Examples
#
# array_to_sentence_string(["apples", "oranges", "grapes"])
# # => "apples, oranges, and grapes"
#
# Returns the formatted String.
def array_to_sentence_string(array, connector = "and")
case array.length
when 0
""
when 1
array[0].to_s
when 2
"#{array[0]} #{connector} #{array[1]}"
else
"#{array[0...-1].join(", ")}, #{connector} #{array[-1]}"
end
end
# Convert the input into json string
#
# input - The Array or Hash to be converted
#
# Returns the converted json string
def jsonify(input)
as_liquid(input).to_json
end
# Filter an array of objects
#
# input - the object array.
# property - the property within each object to filter by.
# value - the desired value.
# Cannot be an instance of Array nor Hash since calling #to_s on them returns
# their `#inspect` string object.
#
# Returns the filtered array of objects
def where(input, property, value)
return input if !property || value.is_a?(Array) || value.is_a?(Hash)
return input unless input.respond_to?(:select)
input = input.values if input.is_a?(Hash)
input_id = input.hash
# implement a hash based on method parameters to cache the end-result
# for given parameters.
@where_filter_cache ||= {}
@where_filter_cache[input_id] ||= {}
@where_filter_cache[input_id][property] ||= {}
# stash or retrive results to return
@where_filter_cache[input_id][property][value] ||= begin
input.select do |object|
compare_property_vs_target(item_property(object, property), value)
end.to_a
end
end
# Filters an array of objects against an expression
#
# input - the object array
# variable - the variable to assign each item to in the expression
# expression - a Liquid comparison expression passed in as a string
#
# Returns the filtered array of objects
def where_exp(input, variable, expression)
return input unless input.respond_to?(:select)
input = input.values if input.is_a?(Hash) # FIXME
condition = parse_condition(expression)
@context.stack do
input.select do |object|
@context[variable] = object
condition.evaluate(@context)
end
end || []
end
# Convert the input into integer
#
# input - the object string
#
# Returns the integer value
def to_integer(input)
return 1 if input == true
return 0 if input == false
input.to_i
end
# Sort an array of objects
#
# input - the object array
# property - property within each object to filter by
# nils ('first' | 'last') - nils appear before or after non-nil values
#
# Returns the filtered array of objects
def sort(input, property = nil, nils = "first")
raise ArgumentError, "Cannot sort a null object." if input.nil?
if property.nil?
input.sort
else
if nils == "first"
order = - 1
elsif nils == "last"
order = + 1
else
raise ArgumentError, "Invalid nils order: " \
"'#{nils}' is not a valid nils order. It must be 'first' or 'last'."
end
sort_input(input, property, order)
end
end
def pop(array, num = 1)
return array unless array.is_a?(Array)
num = Liquid::Utils.to_integer(num)
new_ary = array.dup
new_ary.pop(num)
new_ary
end
def push(array, input)
return array unless array.is_a?(Array)
new_ary = array.dup
new_ary.push(input)
new_ary
end
def shift(array, num = 1)
return array unless array.is_a?(Array)
num = Liquid::Utils.to_integer(num)
new_ary = array.dup
new_ary.shift(num)
new_ary
end
def unshift(array, input)
return array unless array.is_a?(Array)
new_ary = array.dup
new_ary.unshift(input)
new_ary
end
def sample(input, num = 1)
return input unless input.respond_to?(:sample)
num = Liquid::Utils.to_integer(num) rescue 1
if num == 1
input.sample
else
input.sample(num)
end
end
# Convert an object into its String representation for debugging
#
# input - The Object to be converted
#
# Returns a String representation of the object.
def inspect(input)
xml_escape(input.inspect)
end
private
# Sort the input Enumerable by the given property.
# If the property doesn't exist, return the sort order respective of
# which item doesn't have the property.
# We also utilize the Schwartzian transform to make this more efficient.
def sort_input(input, property, order)
input.map { |item| [item_property(item, property), item] }
.sort! do |a_info, b_info|
a_property = a_info.first
b_property = b_info.first
if !a_property.nil? && b_property.nil?
- order
elsif a_property.nil? && !b_property.nil?
+ order
else
a_property <=> b_property || a_property.to_s <=> b_property.to_s
end
end
.map!(&:last)
end
# `where` filter helper
def compare_property_vs_target(property, target)
case target
when NilClass
return true if property.nil?
when Liquid::Expression::MethodLiteral # `empty` or `blank`
return true if Array(property).join == target.to_s
else
Array(property).each do |prop|
return true if prop.to_s == target.to_s
end
end
false
end
def item_property(item, property)
@item_property_cache ||= {}
@item_property_cache[property] ||= {}
@item_property_cache[property][item] ||= begin
if item.respond_to?(:to_liquid)
property.to_s.split(".").reduce(item.to_liquid) do |subvalue, attribute|
parse_sort_input(subvalue[attribute])
end
elsif item.respond_to?(:data)
parse_sort_input(item.data[property.to_s])
else
parse_sort_input(item[property.to_s])
end
end
end
# return numeric values as numbers for proper sorting
def parse_sort_input(property)
number_like = %r!\A\s*-?(?:\d+\.?\d*|\.\d+)\s*\Z!
return property.to_f if property =~ number_like
property
end
def as_liquid(item)
case item
when Hash
pairs = item.map { |k, v| as_liquid([k, v]) }
Hash[pairs]
when Array
item.map { |i| as_liquid(i) }
else
if item.respond_to?(:to_liquid)
liquidated = item.to_liquid
# prevent infinite recursion for simple types (which return `self`)
if liquidated == item
item
else
as_liquid(liquidated)
end
else
item
end
end
end
# ----------- The following set of code was *adapted* from Liquid::If
# ----------- ref: https://git.io/vp6K6
# Parse a string to a Liquid Condition
def parse_condition(exp)
parser = Liquid::Parser.new(exp)
condition = parse_binary_comparison(parser)
parser.consume(:end_of_string)
condition
end
# Generate a Liquid::Condition object from a Liquid::Parser object additionally processing
# the parsed expression based on whether the expression consists of binary operations with
# Liquid operators `and` or `or`
#
# - parser: an instance of Liquid::Parser
#
# Returns an instance of Liquid::Condition
def parse_binary_comparison(parser)
parse_comparison(parser).tap do |condition|
binary_operator = parser.id?("and") || parser.id?("or")
condition.send(binary_operator, parse_comparison(parser)) if binary_operator
end
end
# Generates a Liquid::Condition object from a Liquid::Parser object based on whether the parsed
# expression involves a "comparison" operator (e.g. <, ==, >, !=, etc)
#
# - parser: an instance of Liquid::Parser
#
# Returns an instance of Liquid::Condition
def parse_comparison(parser)
left_operand = Liquid::Expression.parse(parser.expression)
operator = parser.consume?(:comparison)
# No comparison-operator detected. Initialize a Liquid::Condition using only left operand
return Liquid::Condition.new(left_operand) unless operator
# Parse what remained after extracting the left operand and the `:comparison` operator
# and initialize a Liquid::Condition object using the operands and the comparison-operator
Liquid::Condition.new(left_operand, operator, Liquid::Expression.parse(parser.expression))
end
end
end
Liquid::Template.register_filter(
Jekyll::Filters
)
Reduce allocations from where-filter (#7653)
Merge pull request 7653
# frozen_string_literal: true
require_all "jekyll/filters"
module Jekyll
module Filters
include URLFilters
include GroupingFilters
include DateFilters
# Convert a Markdown string into HTML output.
#
# input - The Markdown String to convert.
#
# Returns the HTML formatted String.
def markdownify(input)
@context.registers[:site].find_converter_instance(
Jekyll::Converters::Markdown
).convert(input.to_s)
end
# Convert quotes into smart quotes.
#
# input - The String to convert.
#
# Returns the smart-quotified String.
def smartify(input)
@context.registers[:site].find_converter_instance(
Jekyll::Converters::SmartyPants
).convert(input.to_s)
end
# Convert a Sass string into CSS output.
#
# input - The Sass String to convert.
#
# Returns the CSS formatted String.
def sassify(input)
@context.registers[:site].find_converter_instance(
Jekyll::Converters::Sass
).convert(input)
end
# Convert a Scss string into CSS output.
#
# input - The Scss String to convert.
#
# Returns the CSS formatted String.
def scssify(input)
@context.registers[:site].find_converter_instance(
Jekyll::Converters::Scss
).convert(input)
end
# Slugify a filename or title.
#
# input - The filename or title to slugify.
# mode - how string is slugified
#
# Returns the given filename or title as a lowercase URL String.
# See Utils.slugify for more detail.
def slugify(input, mode = nil)
Utils.slugify(input, :mode => mode)
end
# XML escape a string for use. Replaces any special characters with
# appropriate HTML entity replacements.
#
# input - The String to escape.
#
# Examples
#
# xml_escape('foo "bar" <baz>')
# # => "foo "bar" <baz>"
#
# Returns the escaped String.
def xml_escape(input)
input.to_s.encode(:xml => :attr).gsub(%r!\A"|"\Z!, "")
end
# CGI escape a string for use in a URL. Replaces any special characters
# with appropriate %XX replacements.
#
# input - The String to escape.
#
# Examples
#
# cgi_escape('foo,bar;baz?')
# # => "foo%2Cbar%3Bbaz%3F"
#
# Returns the escaped String.
def cgi_escape(input)
CGI.escape(input)
end
# URI escape a string.
#
# input - The String to escape.
#
# Examples
#
# uri_escape('foo, bar \\baz?')
# # => "foo,%20bar%20%5Cbaz?"
#
# Returns the escaped String.
def uri_escape(input)
Addressable::URI.normalize_component(input)
end
# Replace any whitespace in the input string with a single space
#
# input - The String on which to operate.
#
# Returns the formatted String
def normalize_whitespace(input)
input.to_s.gsub(%r!\s+!, " ").strip
end
# Count the number of words in the input string.
#
# input - The String on which to operate.
#
# Returns the Integer word count.
def number_of_words(input)
input.split.length
end
# Join an array of things into a string by separating with commas and the
# word "and" for the last one.
#
# array - The Array of Strings to join.
# connector - Word used to connect the last 2 items in the array
#
# Examples
#
# array_to_sentence_string(["apples", "oranges", "grapes"])
# # => "apples, oranges, and grapes"
#
# Returns the formatted String.
def array_to_sentence_string(array, connector = "and")
case array.length
when 0
""
when 1
array[0].to_s
when 2
"#{array[0]} #{connector} #{array[1]}"
else
"#{array[0...-1].join(", ")}, #{connector} #{array[-1]}"
end
end
# Convert the input into json string
#
# input - The Array or Hash to be converted
#
# Returns the converted json string
def jsonify(input)
as_liquid(input).to_json
end
# Filter an array of objects
#
# input - the object array.
# property - the property within each object to filter by.
# value - the desired value.
# Cannot be an instance of Array nor Hash since calling #to_s on them returns
# their `#inspect` string object.
#
# Returns the filtered array of objects
def where(input, property, value)
return input if !property || value.is_a?(Array) || value.is_a?(Hash)
return input unless input.respond_to?(:select)
input = input.values if input.is_a?(Hash)
input_id = input.hash
# implement a hash based on method parameters to cache the end-result
# for given parameters.
@where_filter_cache ||= {}
@where_filter_cache[input_id] ||= {}
@where_filter_cache[input_id][property] ||= {}
# stash or retrive results to return
@where_filter_cache[input_id][property][value] ||= begin
input.select do |object|
compare_property_vs_target(item_property(object, property), value)
end.to_a
end
end
# Filters an array of objects against an expression
#
# input - the object array
# variable - the variable to assign each item to in the expression
# expression - a Liquid comparison expression passed in as a string
#
# Returns the filtered array of objects
def where_exp(input, variable, expression)
return input unless input.respond_to?(:select)
input = input.values if input.is_a?(Hash) # FIXME
condition = parse_condition(expression)
@context.stack do
input.select do |object|
@context[variable] = object
condition.evaluate(@context)
end
end || []
end
# Convert the input into integer
#
# input - the object string
#
# Returns the integer value
def to_integer(input)
return 1 if input == true
return 0 if input == false
input.to_i
end
# Sort an array of objects
#
# input - the object array
# property - property within each object to filter by
# nils ('first' | 'last') - nils appear before or after non-nil values
#
# Returns the filtered array of objects
def sort(input, property = nil, nils = "first")
raise ArgumentError, "Cannot sort a null object." if input.nil?
if property.nil?
input.sort
else
if nils == "first"
order = - 1
elsif nils == "last"
order = + 1
else
raise ArgumentError, "Invalid nils order: " \
"'#{nils}' is not a valid nils order. It must be 'first' or 'last'."
end
sort_input(input, property, order)
end
end
def pop(array, num = 1)
return array unless array.is_a?(Array)
num = Liquid::Utils.to_integer(num)
new_ary = array.dup
new_ary.pop(num)
new_ary
end
def push(array, input)
return array unless array.is_a?(Array)
new_ary = array.dup
new_ary.push(input)
new_ary
end
def shift(array, num = 1)
return array unless array.is_a?(Array)
num = Liquid::Utils.to_integer(num)
new_ary = array.dup
new_ary.shift(num)
new_ary
end
def unshift(array, input)
return array unless array.is_a?(Array)
new_ary = array.dup
new_ary.unshift(input)
new_ary
end
def sample(input, num = 1)
return input unless input.respond_to?(:sample)
num = Liquid::Utils.to_integer(num) rescue 1
if num == 1
input.sample
else
input.sample(num)
end
end
# Convert an object into its String representation for debugging
#
# input - The Object to be converted
#
# Returns a String representation of the object.
def inspect(input)
xml_escape(input.inspect)
end
private
# Sort the input Enumerable by the given property.
# If the property doesn't exist, return the sort order respective of
# which item doesn't have the property.
# We also utilize the Schwartzian transform to make this more efficient.
def sort_input(input, property, order)
input.map { |item| [item_property(item, property), item] }
.sort! do |a_info, b_info|
a_property = a_info.first
b_property = b_info.first
if !a_property.nil? && b_property.nil?
- order
elsif a_property.nil? && !b_property.nil?
+ order
else
a_property <=> b_property || a_property.to_s <=> b_property.to_s
end
end
.map!(&:last)
end
# `where` filter helper
#
# rubocop:disable Metrics/CyclomaticComplexity
# rubocop:disable Metrics/PerceivedComplexity
def compare_property_vs_target(property, target)
case target
when NilClass
return true if property.nil?
when Liquid::Expression::MethodLiteral # `empty` or `blank`
target = target.to_s
return true if property == target || Array(property).join == target
else
target = target.to_s
if property.is_a? String
return true if property == target
else
Array(property).each do |prop|
return true if prop.to_s == target
end
end
end
false
end
# rubocop:enable Metrics/CyclomaticComplexity
# rubocop:enable Metrics/PerceivedComplexity
def item_property(item, property)
@item_property_cache ||= {}
@item_property_cache[property] ||= {}
@item_property_cache[property][item] ||= begin
if item.respond_to?(:to_liquid)
property.to_s.split(".").reduce(item.to_liquid) do |subvalue, attribute|
parse_sort_input(subvalue[attribute])
end
elsif item.respond_to?(:data)
parse_sort_input(item.data[property.to_s])
else
parse_sort_input(item[property.to_s])
end
end
end
# return numeric values as numbers for proper sorting
def parse_sort_input(property)
number_like = %r!\A\s*-?(?:\d+\.?\d*|\.\d+)\s*\Z!
return property.to_f if property =~ number_like
property
end
def as_liquid(item)
case item
when Hash
pairs = item.map { |k, v| as_liquid([k, v]) }
Hash[pairs]
when Array
item.map { |i| as_liquid(i) }
else
if item.respond_to?(:to_liquid)
liquidated = item.to_liquid
# prevent infinite recursion for simple types (which return `self`)
if liquidated == item
item
else
as_liquid(liquidated)
end
else
item
end
end
end
# ----------- The following set of code was *adapted* from Liquid::If
# ----------- ref: https://git.io/vp6K6
# Parse a string to a Liquid Condition
def parse_condition(exp)
parser = Liquid::Parser.new(exp)
condition = parse_binary_comparison(parser)
parser.consume(:end_of_string)
condition
end
# Generate a Liquid::Condition object from a Liquid::Parser object additionally processing
# the parsed expression based on whether the expression consists of binary operations with
# Liquid operators `and` or `or`
#
# - parser: an instance of Liquid::Parser
#
# Returns an instance of Liquid::Condition
def parse_binary_comparison(parser)
parse_comparison(parser).tap do |condition|
binary_operator = parser.id?("and") || parser.id?("or")
condition.send(binary_operator, parse_comparison(parser)) if binary_operator
end
end
# Generates a Liquid::Condition object from a Liquid::Parser object based on whether the parsed
# expression involves a "comparison" operator (e.g. <, ==, >, !=, etc)
#
# - parser: an instance of Liquid::Parser
#
# Returns an instance of Liquid::Condition
def parse_comparison(parser)
left_operand = Liquid::Expression.parse(parser.expression)
operator = parser.consume?(:comparison)
# No comparison-operator detected. Initialize a Liquid::Condition using only left operand
return Liquid::Condition.new(left_operand) unless operator
# Parse what remained after extracting the left operand and the `:comparison` operator
# and initialize a Liquid::Condition object using the operands and the comparison-operator
Liquid::Condition.new(left_operand, operator, Liquid::Expression.parse(parser.expression))
end
end
end
Liquid::Template.register_filter(
Jekyll::Filters
)
|
module Jekyll
VERSION = '3.1.0.pre.rc2'
end
Release :gem: 3.1.0.pre.rc3
module Jekyll
VERSION = '3.1.0.pre.rc3'
end
|
Pod::Spec.new do |s|
s.name = "LlamaKit"
s.version = "0.5.0"
s.summary = "Collection of must-have functional Swift tools."
s.description = "Collection of must-have functional tools. Trying to be as lightweight as possible, hopefully providing a simple foundation that more advanced systems can build on. LlamaKit is very Cocoa-focused. It is designed to work with common Cocoa paradigms, use names that are understandable to Cocoa devs, integrate with Cocoa tools like GCD, and in general strive for a low-to-modest learning curve for devs familiar with ObjC and Swift rather than Haskell and ML."
s.homepage = "https://github.com/LlamaKit/LlamaKit"
s.license = "MIT"
s.author = { "Rob Napier" => "robnapier@gmail.com" }
s.social_media_url = "http://twitter.com/cocoaphony"
s.ios.deployment_target = "8.0"
s.osx.deployment_target = "10.10"
s.source = { :git => "https://github.com/LlamaKit/LlamaKit.git", :tag => "v#{s.version}" }
s.source_files = "LlamaKit/*.swift"
end
0.6.0 Podspec.
Pod::Spec.new do |s|
s.name = "LlamaKit"
s.version = "0.6.0"
s.summary = "Collection of must-have functional Swift tools."
s.description = "Collection of must-have functional tools. Trying to be as lightweight as possible, hopefully providing a simple foundation that more advanced systems can build on. LlamaKit is very Cocoa-focused. It is designed to work with common Cocoa paradigms, use names that are understandable to Cocoa devs, integrate with Cocoa tools like GCD, and in general strive for a low-to-modest learning curve for devs familiar with ObjC and Swift rather than Haskell and ML."
s.homepage = "https://github.com/LlamaKit/LlamaKit"
s.license = "MIT"
s.author = { "Rob Napier" => "robnapier@gmail.com" }
s.social_media_url = "https://twitter.com/cocoaphony"
s.ios.deployment_target = "8.3"
s.osx.deployment_target = "10.10"
s.source = { :git => "https://github.com/LlamaKit/LlamaKit.git", :tag => "v#{s.version}" }
s.source_files = "LlamaKit/*.swift"
s.requires_arc = true
end
|
$LOAD_PATH.push File.expand_path('../lib', __FILE__)
require 'krikri/spec/version'
# Describe your gem and declare its dependencies:
Gem::Specification.new do |s|
s.name = 'krikri-spec'
s.version = Krikri::Spec::VERSION
s.authors = ['Audrey Altman',
'Mark Breedlove',
'Tom Johnson',
'Mark Matienzo',
'Scott Williams']
s.email = ['tech@dp.la']
s.homepage = 'http://github.com/dpla/krikri-spec'
s.summary = "Shared tests for Krikri's metadata aggregation, " \
'enhancement, and quality control functionality.'
s.description = 'Sharable test suite components for apps based on ' \
'the Krikri engine.'
s.license = 'MIT'
s.files = Dir['lib/**/*', 'README.md']
s.test_files = Dir['spec/**/*']
s.add_dependency 'krikri'
s.add_development_dependency 'yard'
s.add_development_dependency 'rspec', '~> 3.2'
end
Pin `rspec` to `~>3.3`
Bumps the RSpec version to 3.3 to match KriKri. This is related to:
http://rspec.info/blog/2015/06/rspec-3-3-has-been-released/ and
https://github.com/dpla/KriKri/pull/243
Makes RSpec a runtime dependency to force this version requirement on
apps using this gem. Because those apps should make this gem a
development dependency, the requirement won't affect their runtime
dependencies.
$LOAD_PATH.push File.expand_path('../lib', __FILE__)
require 'krikri/spec/version'
# Describe your gem and declare its dependencies:
Gem::Specification.new do |s|
s.name = 'krikri-spec'
s.version = Krikri::Spec::VERSION
s.authors = ['Audrey Altman',
'Mark Breedlove',
'Tom Johnson',
'Mark Matienzo',
'Scott Williams']
s.email = ['tech@dp.la']
s.homepage = 'http://github.com/dpla/krikri-spec'
s.summary = "Shared tests for Krikri's metadata aggregation, " \
'enhancement, and quality control functionality.'
s.description = 'Sharable test suite components for apps based on ' \
'the Krikri engine.'
s.license = 'MIT'
s.files = Dir['lib/**/*', 'README.md']
s.test_files = Dir['spec/**/*']
s.add_dependency 'krikri'
s.add_dependency 'rspec', '~> 3.3'
s.add_development_dependency 'yard'
end
|
module JsonApi::Image
extend JsonApi::Json
TYPE_KEY = 'image'
DEFAULT_PAGE = 25
MAX_PAGE = 50
def self.build_json(image, args={})
json = {}
json['id'] = image.global_id
json['url'] = image.best_url
settings = image.settings
protected_source = !!image.protected?
allowed_sources = args[:allowed_sources]
allowed_sources ||= args[:permissions] && args[:permissions].enabled_protected_sources
allowed_sources ||= []
if settings && protected_source && !allowed_sources.include?(settings['protected_source'])
settings = settings['fallback'] || {}
json['url'] = Uploader.fronted_url(settings['url'])
json['fallback'] = true
protected_source = false
end
['pending', 'content_type', 'width', 'height', 'source_url'].each do |key|
json[key] = settings[key]
end
json['protected'] = protected_source
json['protected_source'] = settings['protected_source'] if json['protected']
json['license'] = OBF::Utils.parse_license(settings['license'])
if (args[:data] || !image.url) && image.data
json['url'] = image.data
end
if args[:permissions]
json['permissions'] = image.permissions_for(args[:permissions])
end
json
end
def self.meta(image)
json = {}
if image.pending_upload?
params = image.remote_upload_params
json = {'remote_upload' => params}
end
json
end
end
fallback source for protected images
module JsonApi::Image
extend JsonApi::Json
TYPE_KEY = 'image'
DEFAULT_PAGE = 25
MAX_PAGE = 50
def self.build_json(image, args={})
json = {}
json['id'] = image.global_id
json['url'] = image.best_url
settings = image.settings
settings['protected_source'] ||= 'lessonpix' if settings['license'] && settings['license']['source_url'] && settings['license']['source_url'].match(/lessonpix/)
protected_source = !!image.protected?
allowed_sources = args[:allowed_sources]
allowed_sources ||= args[:permissions] && args[:permissions].enabled_protected_sources
allowed_sources ||= []
if settings && protected_source && !allowed_sources.include?(settings['protected_source'])
settings = settings['fallback'] || {}
json['url'] = Uploader.fronted_url(settings['url'])
json['fallback'] = true
protected_source = false
end
['pending', 'content_type', 'width', 'height', 'source_url'].each do |key|
json[key] = settings[key]
end
json['protected'] = protected_source
json['protected_source'] = settings['protected_source'] if json['protected']
json['license'] = OBF::Utils.parse_license(settings['license'])
if (args[:data] || !image.url) && image.data
json['url'] = image.data
end
if args[:permissions]
json['permissions'] = image.permissions_for(args[:permissions])
end
json
end
def self.meta(image)
json = {}
if image.pending_upload?
params = image.remote_upload_params
json = {'remote_upload' => params}
end
json
end
end |
module Jujube
VERSION = "0.8.0"
end
Bump version to 0.9.0
module Jujube
VERSION = "0.9.0"
end
|
require 'delegate'
# Delegator with some handy shortcuts
class Event < SimpleDelegator
# Time it starts at day, or '...'
def start_time_f day
#puts "start #{start_time} : #{start_time.class} #{start_time.to_date} #{day}"
if dtstart.class == Date
# whole day
""
elsif start_time.to_date == day.to_date
start_time.strftime('%H:%M')
else
"..."
end
end
# Time it finishes at day, or '...'
def finish_time_f day
if dtend.class == Date
# whole day
""
elsif finish_time.to_date == day.to_date
finish_time.strftime('%H:%M')
else
return "..."
end
end
# Time it finishes and or starts at day, or '...'
def time_f day
start = start_time_f day
finish = finish_time_f day
if start == finish && start == ""
# whole day
""
else
"#{start_time_f day} - #{finish_time_f day}"
end
end
# Date and time from and to
def from_to_f
return "#{dtstart.to_datetime.strftime("%d.%m. %H:%M")} - #{dtend.to_datetime.strftime("%d.%m. %H:%M")}"
end
# Create DateTime from yyyymmdd + h + m .
def self.start_time_from_params params
start_day = Date.parse(params['start_day'])
if !params[:start_time]
return start_day
end
hour, minute = params[:start_time].match(/(\d\d):(\d\d)/)[1,2]
start_time = DateTime.new(start_day.year,
start_day.month, start_day.day, hour.to_i, minute.to_i)
end
def self.start_date_from params
Date.parse(params['start_day'])
end
def update params
begin
hour, minute = params[:start_time].match(/(\d\d):(\d\d)/)[1,2]
start_day = Date.parse(params['start_day'])
start_time = DateTime.new(start_day.year,
start_day.month, start_day.day, hour.to_i, minute.to_i)
self.dtstart = start_time
minutes = EventParamHelper.duration params['duration']
self.dtend = start_time + minutes
rescue
STDERR.puts "event#update params: problems with (up)date."
end
self.summary = params['summary'] if params['summary']
self.description = params['description'] if params['description']
self.location = params['location'] if params['location']
end
# Create a new event from params as given by new_event form of kalindar.
# this should eventually go somewhere else, but its better here than in app already.
def self.create_from_params params
event = RiCal::Component::Event.new($cal.calendars.first)
event.uid = SecureRandom.uuid
if params['summary']
event.summary = params['summary']
end
if params['description']
event.description = params['description']
end
if params['location']
event.location = params['location']
end
# Access should be made failsafe.
start_time = start_time_from_params(params)
event.dtstart = start_time
minutes = EventParamHelper.duration params['duration']
event.dtend = start_time + Rational(minutes, 1440)
Event.new event
end
private
end
module EventParamHelper
# minutes for abbrevations
@@duration_param = {
'15m' => 15,
'30m' => 30,
'60m' => 60,
'90m' => 90,
'120m' => 120,
'1d' => 24 * 60,
'2d' => 24 * 2 * 60,
'5d' => 24 * 5 * 60,
'1w' => 24 * 7 * 60
}
def self.duration duration_p
# throw
@@duration_param[duration_p]
end
def self.check_params params
errors = {}
if not(params[:start_time] =~ /\d\d:\d\d/)
errors[:start_time] = ''
end
if not(duration params[:duration])
errors[:duration] = ''
end
errors
end
end
better time format for events time_f
require 'delegate'
# Delegator with some handy shortcuts
class Event < SimpleDelegator
# Time it starts at day, or '...'
def start_time_f day
#puts "start #{start_time} : #{start_time.class} #{start_time.to_date} #{day}"
if dtstart.class == Date
# whole day
""
elsif start_time.to_date == day.to_date
start_time.strftime('%H:%M')
else
"..."
end
end
# Time it finishes at day, or '...'
def finish_time_f day
if dtend.class == Date
# whole day
""
elsif finish_time.to_date == day.to_date
finish_time.strftime('%H:%M')
else
return "..."
end
end
# Time it finishes and or starts at day, or '...'
def time_f day
start = start_time_f day
finish = finish_time_f day
if start == finish && start == ""
# whole day
""
elsif start == finish && start == "..."
"..."
else
"#{start_time_f day} - #{finish_time_f day}"
end
end
# Date and time from and to
def from_to_f
return "#{dtstart.to_datetime.strftime("%d.%m. %H:%M")} - #{dtend.to_datetime.strftime("%d.%m. %H:%M")}"
end
# Create DateTime from yyyymmdd + h + m .
def self.start_time_from_params params
start_day = Date.parse(params['start_day'])
if !params[:start_time]
return start_day
end
hour, minute = params[:start_time].match(/(\d\d):(\d\d)/)[1,2]
start_time = DateTime.new(start_day.year,
start_day.month, start_day.day, hour.to_i, minute.to_i)
end
def self.start_date_from params
Date.parse(params['start_day'])
end
def update params
begin
hour, minute = params[:start_time].match(/(\d\d):(\d\d)/)[1,2]
start_day = Date.parse(params['start_day'])
start_time = DateTime.new(start_day.year,
start_day.month, start_day.day, hour.to_i, minute.to_i)
self.dtstart = start_time
minutes = EventParamHelper.duration params['duration']
self.dtend = start_time + minutes
rescue
STDERR.puts "event#update params: problems with (up)date."
end
self.summary = params['summary'] if params['summary']
self.description = params['description'] if params['description']
self.location = params['location'] if params['location']
end
# Create a new event from params as given by new_event form of kalindar.
# this should eventually go somewhere else, but its better here than in app already.
def self.create_from_params params
event = RiCal::Component::Event.new($cal.calendars.first)
event.uid = SecureRandom.uuid
if params['summary']
event.summary = params['summary']
end
if params['description']
event.description = params['description']
end
if params['location']
event.location = params['location']
end
# Access should be made failsafe.
start_time = start_time_from_params(params)
event.dtstart = start_time
minutes = EventParamHelper.duration params['duration']
event.dtend = start_time + Rational(minutes, 1440)
Event.new event
end
private
end
module EventParamHelper
# minutes for abbrevations
@@duration_param = {
'15m' => 15,
'30m' => 30,
'60m' => 60,
'90m' => 90,
'120m' => 120,
'1d' => 24 * 60,
'2d' => 24 * 2 * 60,
'5d' => 24 * 5 * 60,
'1w' => 24 * 7 * 60
}
def self.duration duration_p
# throw
@@duration_param[duration_p]
end
def self.check_params params
errors = {}
if not(params[:start_time] =~ /\d\d:\d\d/)
errors[:start_time] = ''
end
if not(duration params[:duration])
errors[:duration] = ''
end
errors
end
end
|
module Kamerling module UUID
def self.[] bin
bin.unpack('H8H4H4H4H12').join '-'
end
def self.bin uuid
[uuid.tr('-', '')].pack 'H*'
end
def self.new
SecureRandom.uuid
end
end end
UUID: actually work outside of specs
require 'securerandom'
module Kamerling module UUID
def self.[] bin
bin.unpack('H8H4H4H4H12').join '-'
end
def self.bin uuid
[uuid.tr('-', '')].pack 'H*'
end
def self.new
SecureRandom.uuid
end
end end
|
require 'karmap'
require 'karmap/service_config'
module Karma
class Service
include Karma::ServiceConfig
#################################################
# process configuration
#################################################
base_min_running 1
base_max_running 1
base_port 5000
base_auto_restart true
#################################################
# thread configuration
#################################################
base_num_threads 1
base_log_level :info
LOGGER_SHIFT_AGE = 2
LOGGER_SHIFT_SIZE = 52428800
attr_accessor :notifier, :engine, :process_config, :thread_config, :sleep_time
@@running_instance = nil
def initialize
@engine = Karma.engine_class.new
@notifier = Karma.notifier_class.new
@thread_config = {
num_threads: self.class.num_threads,
log_level: self.class.log_level
}
@process_config = {
min_running: self.class.min_running,
max_running: self.class.max_running,
memory_max: self.class.memory_max,
cpu_quota: self.class.cpu_quota,
auto_start: self.class.auto_start,
auto_restart: self.class.auto_restart
}
@running = false
@thread_pool = Karma::Thread::ThreadPool.new(Proc.new { perform }, { log_prefix: self.log_prefix })
@thread_config_reader = Karma::Thread::SimpleTcpConfigReader.new(@thread_config)
@sleep_time = 1
end
def log_prefix
"log/#{self.name}-#{self.process_config[:port]}"
end
def name
self.class.name.demodulize.downcase
end
def full_name
"#{Karma.project_name}-#{name}"
end
def command
"rails runner -e production \"#{self.class.name}.run\"" # override if needed
end
def log_location
nil # override this
end
def perform
# abstract, override this
raise NotImplementedError
end
def update_process_config(config)
process_config.merge!(config)
end
def update_thread_config(config)
thread_config.merge!(config)
end
#################################################
#################################################
# abstract callbacks
#################################################
def before_start
# abstract callback, override if needed
end
def after_start
# abstract callback, override if needed
end
def before_stop
# abstract callback, override if needed
end
def after_stop
# abstract callback, override if needed
end
#################################################
def self.run
if @@running_instance.nil?
@@running_instance = self.new()
@@running_instance.run
end
end
def run
Signal.trap('INT') do
puts 'int trapped'
thread_config[:running] = false
end
Signal.trap('TERM') do
puts 'term trapped'
thread_config[:running] = false
end
before_start
@thread_config_reader.start
thread_config[:running] = true
after_start
# notify queue after start
message = engine.get_process_status_message($$)
notifier.notify(message)
while thread_config[:running] do
# notify queue each loop
message = engine.get_process_status_message($$)
notifier.notify(message)
thread_config.merge!(@thread_config_reader.config)
@thread_pool.manage(thread_config)
sleep(sleep_time)
end
before_stop
@thread_pool.stop_all
after_stop
# notify queue after stop
message = engine.get_process_status_message($$)
notifier.notify(message)
end
def register
message = Karma::Messages::ProcessRegisterMessage.new(
host: ::Socket.gethostname,
project: Karma.karma_project_id,
service: full_name
)
notifier.notify(message)
end
end
end
remove sleep_time attr_accessor
require 'karmap'
require 'karmap/service_config'
module Karma
class Service
include Karma::ServiceConfig
#################################################
# process configuration
#################################################
base_min_running 1
base_max_running 1
base_port 5000
base_auto_restart true
#################################################
# thread configuration
#################################################
base_num_threads 1
base_log_level :info
LOGGER_SHIFT_AGE = 2
LOGGER_SHIFT_SIZE = 52428800
attr_accessor :notifier, :engine, :process_config, :thread_config
@@running_instance = nil
def initialize
@engine = Karma.engine_class.new
@notifier = Karma.notifier_class.new
@thread_config = {
num_threads: self.class.num_threads,
log_level: self.class.log_level
}
@process_config = {
min_running: self.class.min_running,
max_running: self.class.max_running,
memory_max: self.class.memory_max,
cpu_quota: self.class.cpu_quota,
auto_start: self.class.auto_start,
auto_restart: self.class.auto_restart
}
@running = false
@thread_pool = Karma::Thread::ThreadPool.new(Proc.new { perform }, { log_prefix: self.log_prefix })
@thread_config_reader = Karma::Thread::SimpleTcpConfigReader.new(@thread_config)
@sleep_time = 1
end
def log_prefix
"log/#{self.name}-#{self.process_config[:port]}"
end
def name
self.class.name.demodulize.downcase
end
def full_name
"#{Karma.project_name}-#{name}"
end
def command
"rails runner -e production \"#{self.class.name}.run\"" # override if needed
end
def log_location
nil # override this
end
def perform
# abstract, override this
raise NotImplementedError
end
def update_process_config(config)
process_config.merge!(config)
end
def update_thread_config(config)
thread_config.merge!(config)
end
#################################################
#################################################
# abstract callbacks
#################################################
def before_start
# abstract callback, override if needed
end
def after_start
# abstract callback, override if needed
end
def before_stop
# abstract callback, override if needed
end
def after_stop
# abstract callback, override if needed
end
#################################################
def self.run
if @@running_instance.nil?
@@running_instance = self.new()
@@running_instance.run
end
end
def run
Signal.trap('INT') do
puts 'int trapped'
thread_config[:running] = false
end
Signal.trap('TERM') do
puts 'term trapped'
thread_config[:running] = false
end
before_start
@thread_config_reader.start
thread_config[:running] = true
after_start
# notify queue after start
message = engine.get_process_status_message($$)
notifier.notify(message)
while thread_config[:running] do
# notify queue each loop
message = engine.get_process_status_message($$)
notifier.notify(message)
thread_config.merge!(@thread_config_reader.config)
@thread_pool.manage(thread_config)
sleep(@sleep_time)
end
before_stop
@thread_pool.stop_all
after_stop
# notify queue after stop
message = engine.get_process_status_message($$)
notifier.notify(message)
end
def register
message = Karma::Messages::ProcessRegisterMessage.new(
host: ::Socket.gethostname,
project: Karma.karma_project_id,
service: full_name
)
notifier.notify(message)
end
end
end
|
require 'bundler'
require_relative 'gemfile_filter'
require_relative 'gemspec_filter'
require_relative 'dependency'
module KeepUp
# A Gemfile with its current set of locked dependencies.
class Bundle
def initialize(definition_builder:)
@definition_builder = definition_builder
end
def dependencies
gemspec_dependencies + gemfile_dependencies + transitive_dependencies
end
def apply_updated_dependency(dependency)
report_intent dependency
update_gemfile_contents(dependency)
update_gemspec_contents(dependency)
result = update_lockfile(dependency)
report_result dependency, result
result
end
def check?
bundler_definition.to_lock == File.read('Gemfile.lock')
end
private
attr_reader :definition_builder
def report_intent(dependency)
print "Updating #{dependency.name}"
end
def report_result(dependency, result)
if result
puts " to #{result.version}"
else
puts " to #{dependency.version}"
puts 'Update failed'
end
end
def gemfile_dependencies
raw = if Bundler::VERSION >= '0.15.'
bundler_lockfile.dependencies.values
else
bundler_lockfile.dependencies
end
build_dependencies raw
end
def gemspec_dependencies
gemspec_source = bundler_lockfile.sources.
find { |it| it.is_a? Bundler::Source::Gemspec }
return [] unless gemspec_source
build_dependencies gemspec_source.gemspec.dependencies
end
def transitive_dependencies
build_dependencies bundler_lockfile.specs.flat_map(&:dependencies).uniq
end
def build_dependencies(deps)
deps.map { |dep| build_dependency dep }.compact
end
def build_dependency(dep)
spec = locked_spec dep
return unless spec
Dependency.new(name: dep.name,
requirement_list: dep.requirement.as_list,
locked_version: spec.version)
end
def locked_spec(dep)
bundler_lockfile.specs.find { |it| it.name == dep.name }
end
def bundler_lockfile
@bundler_lockfile ||= bundler_definition.locked_gems
end
def bundler_definition
@bundler_definition ||= definition_builder.build(false)
end
def update_gemfile_contents(update)
current_dependency = gemfile_dependencies.find { |it| it.name == update.name }
return unless current_dependency
return if current_dependency.matches_spec?(update)
update = current_dependency.generalize_specification(update)
contents = File.read 'Gemfile'
updated_contents = GemfileFilter.apply(contents, update)
File.write 'Gemfile', updated_contents
end
def update_gemspec_contents(update)
current_dependency = gemspec_dependencies.find { |it| it.name == update.name }
return unless current_dependency
return if current_dependency.matches_spec?(update)
update = current_dependency.generalize_specification(update)
contents = File.read gemspec_name
updated_contents = GemspecFilter.apply(contents, update)
File.write gemspec_name, updated_contents
end
def gemspec_name
@gemspec_name ||= begin
gemspecs = Dir.glob('*.gemspec')
case gemspecs.count
when 1
gemspecs.first
else
raise '???'
end
end
end
# Update lockfile and return resulting spec, or false in case of failure
def update_lockfile(update)
Bundler.clear_gemspec_cache
definition = definition_builder.build(gems: [update.name])
definition.lock('Gemfile.lock')
current = locked_spec(update)
result = definition.specs.find { |it| it.name == update.name }
result if result.version > current.version
rescue Bundler::VersionConflict
false
end
end
end
Fix version check
require 'bundler'
require_relative 'gemfile_filter'
require_relative 'gemspec_filter'
require_relative 'dependency'
module KeepUp
# A Gemfile with its current set of locked dependencies.
class Bundle
def initialize(definition_builder:)
@definition_builder = definition_builder
end
def dependencies
gemspec_dependencies + gemfile_dependencies + transitive_dependencies
end
def apply_updated_dependency(dependency)
report_intent dependency
update_gemfile_contents(dependency)
update_gemspec_contents(dependency)
result = update_lockfile(dependency)
report_result dependency, result
result
end
def check?
bundler_definition.to_lock == File.read('Gemfile.lock')
end
private
attr_reader :definition_builder
def report_intent(dependency)
print "Updating #{dependency.name}"
end
def report_result(dependency, result)
if result
puts " to #{result.version}"
else
puts " to #{dependency.version}"
puts 'Update failed'
end
end
def gemfile_dependencies
raw = if Bundler::VERSION >= '1.15.'
bundler_lockfile.dependencies.values
else
bundler_lockfile.dependencies
end
build_dependencies raw
end
def gemspec_dependencies
gemspec_source = bundler_lockfile.sources.
find { |it| it.is_a? Bundler::Source::Gemspec }
return [] unless gemspec_source
build_dependencies gemspec_source.gemspec.dependencies
end
def transitive_dependencies
build_dependencies bundler_lockfile.specs.flat_map(&:dependencies).uniq
end
def build_dependencies(deps)
deps.map { |dep| build_dependency dep }.compact
end
def build_dependency(dep)
spec = locked_spec dep
return unless spec
Dependency.new(name: dep.name,
requirement_list: dep.requirement.as_list,
locked_version: spec.version)
end
def locked_spec(dep)
bundler_lockfile.specs.find { |it| it.name == dep.name }
end
def bundler_lockfile
@bundler_lockfile ||= bundler_definition.locked_gems
end
def bundler_definition
@bundler_definition ||= definition_builder.build(false)
end
def update_gemfile_contents(update)
current_dependency = gemfile_dependencies.find { |it| it.name == update.name }
return unless current_dependency
return if current_dependency.matches_spec?(update)
update = current_dependency.generalize_specification(update)
contents = File.read 'Gemfile'
updated_contents = GemfileFilter.apply(contents, update)
File.write 'Gemfile', updated_contents
end
def update_gemspec_contents(update)
current_dependency = gemspec_dependencies.find { |it| it.name == update.name }
return unless current_dependency
return if current_dependency.matches_spec?(update)
update = current_dependency.generalize_specification(update)
contents = File.read gemspec_name
updated_contents = GemspecFilter.apply(contents, update)
File.write gemspec_name, updated_contents
end
def gemspec_name
@gemspec_name ||= begin
gemspecs = Dir.glob('*.gemspec')
case gemspecs.count
when 1
gemspecs.first
else
raise '???'
end
end
end
# Update lockfile and return resulting spec, or false in case of failure
def update_lockfile(update)
Bundler.clear_gemspec_cache
definition = definition_builder.build(gems: [update.name])
definition.lock('Gemfile.lock')
current = locked_spec(update)
result = definition.specs.find { |it| it.name == update.name }
result if result.version > current.version
rescue Bundler::VersionConflict
false
end
end
end
|
module Konjac
VERSION = "0.1.3"
end
Incrementing version
module Konjac
VERSION = "0.1.4"
end
|
Pod::Spec.new do |s|
s.name = "Loggable"
s.version = "0.0.1"
s.summary = "Protocol-oriented module for printing logs to the console, automatically tracking source location and log type"
s.description = "Print information to the console with the included lightweight and easy-to-use API or create one yourself which adopts the Loggable protocol"
s.homepage = "https://github.com/mtfourre/Loggable"
s.license = { :type => "Unlicense", :file => "LICENSE" }
s.author = { "Michael Fourre" => "mtfourre@gmail.com" }
s.platform = :ios
s.ios.deployment_target = '9.0'
s.source = { :git => 'https://github.com/mtfourre/Loggable.git' }
s.source_files = "*.swift"
end
Increment version in podspec
Pod::Spec.new do |s|
s.name = "Loggable"
s.version = "0.1.0"
s.summary = "Protocol-oriented module for printing logs to the console, automatically tracking source location and log type"
s.description = "Print information to the console with the included lightweight and easy-to-use API or create one yourself which adopts the Loggable protocol"
s.homepage = "https://github.com/mtfourre/Loggable"
s.license = { :type => "Unlicense", :file => "LICENSE" }
s.author = { "Michael Fourre" => "mtfourre@gmail.com" }
s.platform = :ios
s.ios.deployment_target = '9.0'
s.source = { :git => 'https://github.com/mtfourre/Loggable.git' }
s.source_files = "*.swift"
end
|
#!/usr/bin/env ruby
#
# Ad-hoc script for sending the test event to service classes
#
# Usage:
#
# bundle exec ruby service_test.rb
#
# Environment variables used:
#
# REPO_NAME Defaults to "App"
# SLACK_WEBHOOK_URL Slack is not tested unless set
# FLOWDOCK_API_TOKEN Flowdock is not tested unless set
#
# Example:
#
# SLACK_WEBHOOK_URL="http://..." bundle exec ruby service_test.rb
#
###
require 'cc/services'
CC::Service.load_services
class WithResponseLogging
def initialize(invocation)
@invocation = invocation
end
def call
@invocation.call.tap { |r| p r }
end
end
def test_service(klass, config)
repo_name = ENV["REPO_NAME"] || "App"
service = klass.new(config, name: :test, repo_name: repo_name)
CC::Service::Invocation.new(service) do |i|
i.wrap(WithResponseLogging)
end
end
if webhook_url = ENV["SLACK_WEBHOOK_URL"]
puts "Testing Slack..."
test_service(CC::Service::Slack, webhook_url: webhook_url)
end
if api_token = ENV["FLOWDOCK_API_TOKEN"]
puts "Testing Flowdock..."
test_service(CC::Service::Flowdock, api_token: api_token)
end
if (jira_username = ENV["JIRA_USERNAME"]) &&
(jira_password = ENV["JIRA_PASSWORD"]) &&
(jira_domain = ENV["JIRA_DOMAIN"]) &&
(jira_project = ENV["JIRA_PROJECT"])
puts "Testing JIRA"
test_service(CC::Service::Jira, { username: jira_username,
password: jira_password,
domain: jira_domain,
project_id: jira_project })
end
DRY up output in service test
#!/usr/bin/env ruby
#
# Ad-hoc script for sending the test event to service classes
#
# Usage:
#
# bundle exec ruby service_test.rb
#
# Environment variables used:
#
# REPO_NAME Defaults to "App"
# SLACK_WEBHOOK_URL Slack is not tested unless set
# FLOWDOCK_API_TOKEN Flowdock is not tested unless set
#
# Example:
#
# SLACK_WEBHOOK_URL="http://..." bundle exec ruby service_test.rb
#
###
require 'cc/services'
CC::Service.load_services
class WithResponseLogging
def initialize(invocation)
@invocation = invocation
end
def call
@invocation.call.tap { |r| p r }
end
end
def test_service(klass, config)
repo_name = ENV["REPO_NAME"] || "App"
service = klass.new(config, name: :test, repo_name: repo_name)
puts "Service: #{klass.slug}"
puts "Config: #{config.inspect}"
CC::Service::Invocation.new(service) do |i|
i.wrap(WithResponseLogging)
end
end
if webhook_url = ENV["SLACK_WEBHOOK_URL"]
test_service(CC::Service::Slack, webhook_url: webhook_url)
end
if api_token = ENV["FLOWDOCK_API_TOKEN"]
test_service(CC::Service::Flowdock, api_token: api_token)
end
if (jira_username = ENV["JIRA_USERNAME"]) &&
(jira_password = ENV["JIRA_PASSWORD"]) &&
(jira_domain = ENV["JIRA_DOMAIN"]) &&
(jira_project = ENV["JIRA_PROJECT"])
test_service(CC::Service::Jira, { username: jira_username,
password: jira_password,
domain: jira_domain,
project_id: jira_project })
end
|
class LessonTweeter
include UsersHelper
def initialize(lesson, teacher)
@lesson = lesson
@teacher = teacher
end
def tweet
status = lesson.tweet_message.present? ? lesson.tweet_message : default_content
if status.match(/{{URL}}/i)
status.gsub!(/{{URL}}/i, url)
else
status += " - #{url}"
end
return false if status.length > 140
begin
client = Twitter::REST::Client.new do |config|
config.consumer_key = ENV['TWITTER_CONSUMER_KEY']
config.consumer_secret = ENV['TWITTER_CONSUMER_SECRET']
config.access_token = ENV['TWITTER_OAUTH_TOKEN']
config.access_token_secret = ENV['TWITTER_OAUTH_SECRET']
end
client.update(status)
true
rescue StandardError
false
end
end
private
attr_reader :lesson, :teacher
def default_content
<<-TWEET.strip_heredoc.sub(/\n$/, "")
Shame on #{first_name(teacher) } for this boring message. The next class is "#{lesson.title}"! {{URL}}
TWEET
end
def url
Rails.application.routes.url_helpers.lesson_url(lesson)
end
end
Fix Hound Violation
class LessonTweeter
include UsersHelper
def initialize(lesson, teacher)
@lesson = lesson
@teacher = teacher
end
def tweet
status = lesson.tweet_message.present? ? lesson.tweet_message : default_content
if status.match(/{{URL}}/i)
status.gsub!(/{{URL}}/i, url)
else
status += " - #{url}"
end
return false if status.length > 140
begin
client = Twitter::REST::Client.new do |config|
config.consumer_key = ENV['TWITTER_CONSUMER_KEY']
config.consumer_secret = ENV['TWITTER_CONSUMER_SECRET']
config.access_token = ENV['TWITTER_OAUTH_TOKEN']
config.access_token_secret = ENV['TWITTER_OAUTH_SECRET']
end
client.update(status)
true
rescue StandardError
false
end
end
private
attr_reader :lesson, :teacher
def default_content
<<-TWEET.strip_heredoc.sub(/\n$/, "")
Shame on #{first_name(teacher) } for this boring message. The next class is "#{lesson.title}"! {{URL}}
TWEET
end
def url
Rails.application.routes.url_helpers.lesson_url(lesson)
end
end
|
class Letsads::Client
def send_sms(phone_numbers, message)
sms_sender.send_sms(phone_numbers, message)
end
private
def sms_sender
@sms_sender ||= Letsads::SmsSender.new
end
end
Add balance checking
class Letsads::Client
def send_sms(phone_numbers, message)
sms_sender.send_sms(phone_numbers, message)
end
def balance
xml_builder = Letsads::XMLBuilder.new
xml = xml_builder.balance_xml
end_point = Letsads::EXTERNAL_API_ENDPOINT
RestClient.post(end_point, xml, content_type: :xml)
end
private
def sms_sender
@sms_sender ||= Letsads::SmsSender.new
end
end
|
module Librarian
class Spec
attr_reader :source, :dependencies
def initialize(source, dependencies)
@source, @dependencies = source, dependencies
end
end
end
Use attr_accessor rather than attr_reader and instance variables.
module Librarian
class Spec
attr_accessor :source, :dependencies
private :source=, :dependencies=
def initialize(source, dependencies)
self.source = source
self.dependencies = dependencies
end
end
end
|
require 'aws-sdk'
$s3 = Aws::S3::Client.new
def put_object(filename, content_type)
md5 = ''
File.open(filename) do |file|
md5 = Digest::MD5.base64digest(file.read)
end
puts "#{md5} #{filename}"
resp = $s3.put_object({
acl: 'private',
body: File.open(filename),
bucket: "ocr.nyc",
cache_control: 'max-age: 60',
content_md5: md5,
content_length: 1,
content_type: content_type,
key: filename.gsub(/^_site\//, ''),
storage_class: 'REDUCED_REDUNDANCY',
})
end
Dir.glob('_site/**/*.ico') do |filename|
put_object filename, 'image/x-icon'
end
Dir.glob('_site/**/*.png') do |filename|
put_object filename, 'image/png'
end
Dir.glob('_site/**/*.jpg') do |filename|
put_object filename, 'image/jpeg'
end
Dir.glob('_site/**/*.css') do |filename|
put_object filename, 'text/css'
end
Dir.glob('_site/**/*.js') do |filename|
put_object filename, 'application/javascript'
end
Dir.glob('_site/**/*.html') do |filename|
put_object filename, 'text/html'
end
Dir.glob('_site/**/*.xml') do |filename|
put_object filename, 'text/xml'
end
deploy
require 'aws-sdk'
$s3 = Aws::S3::Client.new
$object_keys = {}
def put_object(filename, content_type)
object_key = filename.gsub(/^_site\//, '').gsub(/\/index.html$/, '/')
$object_keys[object_key] = true
md5 = ''
File.open(filename) do |file|
md5 = Digest::MD5.base64digest(file.read)
end
puts "#{md5} #{filename}"
resp = $s3.put_object({
acl: 'private',
body: File.open(filename),
bucket: "ocr.nyc",
cache_control: 'max-age: 300',
content_md5: md5,
content_type: content_type,
key: object_key,
storage_class: 'REDUCED_REDUNDANCY',
})
end
Dir.glob('_site/**/*.ico') do |filename|
put_object filename, 'image/x-icon'
end
Dir.glob('_site/**/*.png') do |filename|
put_object filename, 'image/png'
end
Dir.glob('_site/**/*.jpg') do |filename|
put_object filename, 'image/jpeg'
end
Dir.glob('_site/**/*.css') do |filename|
put_object filename, 'text/css'
end
Dir.glob('_site/**/*.js') do |filename|
put_object filename, 'application/javascript'
end
Dir.glob('_site/**/*.html') do |filename|
put_object filename, 'text/html'
end
Dir.glob('_site/**/*.xml') do |filename|
put_object filename, 'text/xml'
end
$s3.list_objects_v2(bucket: 'ocr.nyc').contents.each do |object|
unless $object_keys[object.key]
puts "deleting #{object.key}"
$s3.delete_object({
bucket: 'ocr.nyc',
key: object.key
})
end
end
|
module Licode
VERSION = "0.1.0"
end
tweaked this to get the release working right.
module Licode
VERSION = "0.1.1"
end
|
#!/usr/bin/env ruby
# -------------------------------------------------------------------------- #
# Copyright 2015, Terradue S.r.l. #
# #
# Licensed under the Apache License, Version 2.0 (the "License"); you may #
# not use this file except in compliance with the License. You may obtain #
# a copy of the License at #
# #
# http://www.apache.org/licenses/LICENSE-2.0 #
# #
# Unless required by applicable law or agreed to in writing, software #
# distributed under the License is distributed on an "AS IS" BASIS, #
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. #
# See the License for the specific language governing permissions and #
# limitations under the License. #
# -------------------------------------------------------------------------- #
require 'time'
require 'timeout'
require 'drivers/bursting_driver'
include REXML
class LibcloudDriver < BurstingDriver
DRIVER_CONF = "#{ETC_LOCATION}/libcloud_driver.conf"
DRIVER_DEFAULT = "#{ETC_LOCATION}/libcloud_driver.default"
# Commands constants
PUBLIC_CMD = {
:run => {
:cmd => :'create-node',
:args => {
"HARDWAREID" => {
:opt => '--flavorId'
},
"IMAGEID" => {
:opt => '--image'
},
"NETWORKS" => {
:opt => '--networks'
},
"POOL" => {
:opt => '--floatingippool'
},
"VOLUMESIZE" => {
:opt => '--size'
},
"VOLUMETYPE" => {
:opt => '--type'
},
"VOLUMEDEVICE" => {
:opt => '--device'
},
},
},
:get => {
:cmd => 'find-node',
:args => {
"ID" => {
:opt => '--id'
},
},
},
:shutdown => {
:cmd => 'destroy-node',
:args => {
"ID" => {
:opt => '--id'
},
},
}
}
# CLI specific attributes that will be retrieved in a polling action
POLL_ATTRS = [
:public_ips,
:private_ips
]
def initialize(host)
super(host)
@cli_cmd = @public_cloud_conf['libcloud_cmd']
@context_path = @public_cloud_conf['context_path']
@instance_types = @public_cloud_conf['instance_types']
@hostname = host
hosts = @public_cloud_conf['hosts']
@host = hosts[host] || hosts["default"]
@context_path.concat("/#{@host['site']}/")
@common_args = ""
@common_args.concat("--provider \'#{@host['provider']}\'")
@common_args.concat(" --user \'#{@host['user']}\'")
@common_args.concat(" --key \'#{@host['key']}\'")
@common_args.concat(" --ex_force_auth_version \'#{@host['ex_force_auth_version']}\'")
@common_args.concat(" --ex_force_auth_url \'#{@host['ex_force_auth_url']}\'")
@common_args.concat(" --ex_force_base_url \'#{@host['ex_force_base_url']}\'")
@common_args.concat(" --ex_domain_name \'#{@host['ex_domain_name']}\'")
@common_args.concat(" --ex_token_scope \'#{@host['ex_token_scope']}\'")
@common_args.concat(" --ex_tenant_name \'#{@host['ex_tenant_name']}\'")
@common_args.concat(" --json")
end
def create_instance(vm_id, opts, context_xml)
command = self.class::PUBLIC_CMD[:run][:cmd]
args = @common_args.clone
opts.each {|k,v|
args.concat(" ")
args.concat("#{k} \'#{v}\'")
}
begin
rc, info = do_command("#{@cli_cmd} #{command} #{args} --name \'one-#{vm_id}\' 2>/dev/null")
nodeId = JSON.parse(info)['data'][0]['id']
log("#{LOG_LOCATION}/#{vm_id}.log","info","nodeid is #{nodeId.to_s}")
privateAddresses = JSON.parse(info)['data'][0]['private_ips']
# while the node is not running
# timeout is set to 5 minutes
timeout_in_seconds = 5*60
Timeout.timeout(timeout_in_seconds) do
while privateAddresses.nil? || privateAddresses.empty? do
rc, info = do_command("#{@cli_cmd} find-node #{args} --id \'#{nodeId}\' 2>/dev/null")
privateAddresses = JSON.parse(info)['data'][0]['private_ips']
end
end
# checking if there is a volume to create and attach
# if yes, attach volume to the vm
volumeTypeOption = '--type'
volumeSizeOption = '--size'
volumeDeviceOption = '--device'
volumeTypeValue = opts[volumeTypeOption]
volumeSizeValue = opts[volumeSizeOption]
volumeDeviceValue = opts[volumeDeviceOption]
if (volumeTypeValue) && (volumeSizeValue) && (volumeDeviceValue)
# creating a new volume
rc, volumeinfo = do_command("#{@cli_cmd} create-volume #{@common_args} #{volumeTypeOption} \'#{volumeTypeValue}\' #{volumeSizeOption} #{volumeSizeValue} --name \'one-#{vm_id}\' --json 2>/dev/null")
raise "Error creating the volume" if !rc
log("#{LOG_LOCATION}/#{vm_id}.log","info","volume creation result code: #{rc}")
# retrieving the volume id
volumeId = JSON.parse(volumeinfo)['data'][0]['id'].to_s
log("#{LOG_LOCATION}/#{vm_id}.log","info","volume id is #{volumeId}")
log("#{LOG_LOCATION}/#{vm_id}.log","info","attaching volume #{volumeId} to vm \'#{nodeId}\'")
# attaching volume to the vm
rc, volumeinfo = do_command("#{@cli_cmd} attach-volume #{@common_args} --volumeId \'#{volumeId}\' --id \'#{nodeId}\' #{volumeDeviceOption} \'#{volumeDeviceValue}\' 2>/dev/null")
end
raise "Error creating the instance" if !rc
rescue => e
STDERR.puts e.message
# detroying the vm
self.destroy_instance(nodeId)
exit(-1)
end
privateAddresses.each { |ip|
context_id = ip.gsub(".", "-")
create_context(context_xml, context_id)
}
log("#{LOG_LOCATION}/#{vm_id}.log","info","returning nodeid #{nodeId}")
return nodeId
end
def get_instance(deploy_id)
log("#{LOG_LOCATION}/libcloud_dev","info","get_instance\n #{deploy_id.to_s}")
command = self.class::PUBLIC_CMD[:get][:cmd]
args = @common_args.clone
args.concat(" --id #{deploy_id}")
begin
rc,info = do_command("#{@cli_cmd} #{command} #{args} 2>/dev/null")
raise "Instance #{deploy_id} does not exist" if JSON.parse(info)['message']
rescue => e
STDERR.puts e.message
exit(-1)
end
return info
end
def destroy_instance(deploy_id)
command = self.class::PUBLIC_CMD[:shutdown][:cmd]
info = get_instance(deploy_id)
args = @common_args.clone
args.concat(" --id #{deploy_id}")
begin
rc = do_command("#{@cli_cmd} #{command} #{args} 2>/dev/null")
hash = JSON.parse(info)
hash['data'][0]['state']='deleted'
info = hash.to_json
raise "Instance #{id} does not exist" if !rc
privateAddresses = JSON.parse(info)['data'][0]['private_ips']
privateAddresses.each { |ip|
context_id = ip.gsub(".", "-")
remove_context(context_id)
}
volumesAttached = JSON.parse(info)['data'][0]['extra']['volumes_attached']
if volumesAttached
# while the node is running
# timeout is set to 5 minutes
timeout_in_seconds = 5*60
Timeout.timeout(timeout_in_seconds) do
loop do
rc, info = do_command("#{@cli_cmd} find-node #{args} --id \'#{deploy_id}\' 2>/dev/null")
break if JSON.parse(info)['message']
end
end
for volume in volumesAttached do
rc,info = do_command("#{@cli_cmd} destroy-volume #{@common_args} -v \'#{volume['id'].to_s}\' ")
raise "An error occured while destroying volume #{volume['id'].to_s} message: #{JSON.parse(info)['message']}" if !rc
end
end
rescue => e
STDERR.puts e.message
exit(-1)
end
return info
end
def monitor_all_vms(host_id)
totalmemory = 0
totalcpu = 0
@host['capacity'].each { |name, size|
cpu, mem = instance_type_capacity(name)
totalmemory += mem * size.to_i
totalcpu += cpu * size.to_i
}
host_info = "HYPERVISOR=libcloud\n"
host_info << "PUBLIC_CLOUD=YES\n"
host_info << "PRIORITY=-1\n"
host_info << "TOTALMEMORY=#{totalmemory.round}\n"
host_info << "TOTALCPU=#{totalcpu}\n"
host_info << "CPUSPEED=1000\n"
host_info << "HOSTNAME=\"#{@hostname}\"\n"
vms_info = "VM_POLL=YES\n"
client = ::OpenNebula::Client.new()
xml = client.call("host.info",host_id.to_i)
xml_host = REXML::Document.new(xml) if xml
usedcpu = 100
usedmemory = 0
# In the case of the libcloud driver is not possible to assign a name
# or a TAG to the VM. In this way a VM started from the OpenNebula cannot
# be discriminated from one started from another client.
# The solution here is to perform a polling call for each VM.
# The OpenNebula's XML-RPC Api is used to get all the instances associated
# with the 'host_id' specified.
XPath.each(xml_host, "/HOST/VMS/ID") { |e1|
vm_id = e1.text
xml = client.call("vm.info", vm_id.to_i)
xml_vm = REXML::Document.new(xml) if xml
deploy_id = ""
poll_data = ""
XPath.each(xml_vm, "/VM/DEPLOY_ID") { |e2| deploy_id = e2.text }
if !deploy_id.nil?
if !deploy_id.empty?
instance = get_instance(deploy_id)
poll_data = parse_poll(instance)
vms_info << "VM=[\n"
vms_info << " ID=#{vm_id || -1},\n"
vms_info << " DEPLOY_ID=#{deploy_id},\n"
vms_info << " POLL=\"#{poll_data}\" ]\n"
end
end
}
puts host_info
puts vms_info
end
def parse_poll(instance_info)
info = "#{POLL_ATTRIBUTE[:usedmemory]}=0 " \
"#{POLL_ATTRIBUTE[:usedcpu]}=0 " \
"#{POLL_ATTRIBUTE[:nettx]}=0 " \
"#{POLL_ATTRIBUTE[:netrx]}=0 "
instance = JSON.parse(instance_info)
state = ""
if !instance
state = VM_STATE[:deleted]
else
state = case instance['data'][0]['state'].upcase
when "RUNNING", "STARTING"
VM_STATE[:active]
when "TERMINATED", "STOPPED", "REBOOTING"
VM_STATE[:paused]
else
VM_STATE[:deleted]
end
end
info << "#{POLL_ATTRIBUTE[:state]}=#{state} "
POLL_ATTRS.map { |key|
value = instance['data'][0]["#{key}"]
if !value.nil? && !value.empty?
if value.kind_of?(Hash)
value_str = value.inspect
else
value_str = value
end
# TODO: In the case of _PUBLICADDRESSES or _PRIVATEADDRESSES keys,
# handle the case in which multiple addresses are passed.
# Use comma-separated list (e.g., interface to E-CEO portal)
info << "LIBCLOUD_#{key.to_s.upcase.sub('PUBLIC_IPS', "PUBLICADDRESSES").sub('PRIVATE_IPS',"PRIVATEADDRESSES")}=#{value_str.join(",")} "
end
}
return info
end
private
def do_command(cmd)
rc = LocalCommand.run(cmd)
if rc.code == 0
return [true, rc.stdout]
else
STDERR.puts("Error executing: #{cmd} err: #{rc.stderr} out: #{rc.stdout}")
return [false, rc.code]
end
end
end
[libcloud] Removed all redirections of stderr to /dev/null
#!/usr/bin/env ruby
# -------------------------------------------------------------------------- #
# Copyright 2015, Terradue S.r.l. #
# #
# Licensed under the Apache License, Version 2.0 (the "License"); you may #
# not use this file except in compliance with the License. You may obtain #
# a copy of the License at #
# #
# http://www.apache.org/licenses/LICENSE-2.0 #
# #
# Unless required by applicable law or agreed to in writing, software #
# distributed under the License is distributed on an "AS IS" BASIS, #
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. #
# See the License for the specific language governing permissions and #
# limitations under the License. #
# -------------------------------------------------------------------------- #
require 'time'
require 'timeout'
require 'drivers/bursting_driver'
include REXML
class LibcloudDriver < BurstingDriver
DRIVER_CONF = "#{ETC_LOCATION}/libcloud_driver.conf"
DRIVER_DEFAULT = "#{ETC_LOCATION}/libcloud_driver.default"
# Commands constants
PUBLIC_CMD = {
:run => {
:cmd => :'create-node',
:args => {
"HARDWAREID" => {
:opt => '--flavorId'
},
"IMAGEID" => {
:opt => '--image'
},
"NETWORKS" => {
:opt => '--networks'
},
"POOL" => {
:opt => '--floatingippool'
},
"VOLUMESIZE" => {
:opt => '--size'
},
"VOLUMETYPE" => {
:opt => '--type'
},
"VOLUMEDEVICE" => {
:opt => '--device'
},
},
},
:get => {
:cmd => 'find-node',
:args => {
"ID" => {
:opt => '--id'
},
},
},
:shutdown => {
:cmd => 'destroy-node',
:args => {
"ID" => {
:opt => '--id'
},
},
}
}
# CLI specific attributes that will be retrieved in a polling action
POLL_ATTRS = [
:public_ips,
:private_ips
]
def initialize(host)
super(host)
@cli_cmd = @public_cloud_conf['libcloud_cmd']
@context_path = @public_cloud_conf['context_path']
@instance_types = @public_cloud_conf['instance_types']
@hostname = host
hosts = @public_cloud_conf['hosts']
@host = hosts[host] || hosts["default"]
@context_path.concat("/#{@host['site']}/")
@common_args = ""
@common_args.concat("--provider \'#{@host['provider']}\'")
@common_args.concat(" --user \'#{@host['user']}\'")
@common_args.concat(" --key \'#{@host['key']}\'")
@common_args.concat(" --ex_force_auth_version \'#{@host['ex_force_auth_version']}\'")
@common_args.concat(" --ex_force_auth_url \'#{@host['ex_force_auth_url']}\'")
@common_args.concat(" --ex_force_base_url \'#{@host['ex_force_base_url']}\'")
@common_args.concat(" --ex_domain_name \'#{@host['ex_domain_name']}\'")
@common_args.concat(" --ex_token_scope \'#{@host['ex_token_scope']}\'")
@common_args.concat(" --ex_tenant_name \'#{@host['ex_tenant_name']}\'")
@common_args.concat(" --json")
end
def create_instance(vm_id, opts, context_xml)
command = self.class::PUBLIC_CMD[:run][:cmd]
args = @common_args.clone
opts.each {|k,v|
args.concat(" ")
args.concat("#{k} \'#{v}\'")
}
begin
rc, info = do_command("#{@cli_cmd} #{command} #{args} --name \'one-#{vm_id}\'")
nodeId = JSON.parse(info)['data'][0]['id']
log("#{LOG_LOCATION}/#{vm_id}.log","info","nodeid is #{nodeId.to_s}")
privateAddresses = JSON.parse(info)['data'][0]['private_ips']
# while the node is not running
# timeout is set to 5 minutes
timeout_in_seconds = 5*60
Timeout.timeout(timeout_in_seconds) do
while privateAddresses.nil? || privateAddresses.empty? do
rc, info = do_command("#{@cli_cmd} find-node #{args} --id \'#{nodeId}\'")
privateAddresses = JSON.parse(info)['data'][0]['private_ips']
end
end
# checking if there is a volume to create and attach
# if yes, attach volume to the vm
volumeTypeOption = '--type'
volumeSizeOption = '--size'
volumeDeviceOption = '--device'
volumeTypeValue = opts[volumeTypeOption]
volumeSizeValue = opts[volumeSizeOption]
volumeDeviceValue = opts[volumeDeviceOption]
if (volumeTypeValue) && (volumeSizeValue) && (volumeDeviceValue)
# creating a new volume
rc, volumeinfo = do_command("#{@cli_cmd} create-volume #{@common_args} #{volumeTypeOption} \'#{volumeTypeValue}\' #{volumeSizeOption} #{volumeSizeValue} --name \'one-#{vm_id}\' --json")
raise "Error creating the volume" if !rc
log("#{LOG_LOCATION}/#{vm_id}.log","info","volume creation result code: #{rc}")
# retrieving the volume id
volumeId = JSON.parse(volumeinfo)['data'][0]['id'].to_s
log("#{LOG_LOCATION}/#{vm_id}.log","info","volume id is #{volumeId}")
log("#{LOG_LOCATION}/#{vm_id}.log","info","attaching volume #{volumeId} to vm \'#{nodeId}\'")
# attaching volume to the vm
rc, volumeinfo = do_command("#{@cli_cmd} attach-volume #{@common_args} --volumeId \'#{volumeId}\' --id \'#{nodeId}\' #{volumeDeviceOption} \'#{volumeDeviceValue}\'")
end
raise "Error creating the instance" if !rc
rescue => e
STDERR.puts e.message
# detroying the vm
self.destroy_instance(nodeId)
exit(-1)
end
privateAddresses.each { |ip|
context_id = ip.gsub(".", "-")
create_context(context_xml, context_id)
}
log("#{LOG_LOCATION}/#{vm_id}.log","info","returning nodeid #{nodeId}")
return nodeId
end
def get_instance(deploy_id)
log("#{LOG_LOCATION}/libcloud_dev","info","get_instance\n #{deploy_id.to_s}")
command = self.class::PUBLIC_CMD[:get][:cmd]
args = @common_args.clone
args.concat(" --id #{deploy_id}")
begin
rc,info = do_command("#{@cli_cmd} #{command} #{args}")
raise "Instance #{deploy_id} does not exist" if JSON.parse(info)['message']
rescue => e
STDERR.puts e.message
exit(-1)
end
return info
end
def destroy_instance(deploy_id)
command = self.class::PUBLIC_CMD[:shutdown][:cmd]
info = get_instance(deploy_id)
args = @common_args.clone
args.concat(" --id #{deploy_id}")
begin
rc = do_command("#{@cli_cmd} #{command} #{args}")
hash = JSON.parse(info)
hash['data'][0]['state']='deleted'
info = hash.to_json
raise "Instance #{id} does not exist" if !rc
privateAddresses = JSON.parse(info)['data'][0]['private_ips']
privateAddresses.each { |ip|
context_id = ip.gsub(".", "-")
remove_context(context_id)
}
volumesAttached = JSON.parse(info)['data'][0]['extra']['volumes_attached']
if volumesAttached
# while the node is running
# timeout is set to 5 minutes
timeout_in_seconds = 5*60
Timeout.timeout(timeout_in_seconds) do
loop do
rc, info = do_command("#{@cli_cmd} find-node #{args} --id \'#{deploy_id}\'")
break if JSON.parse(info)['message']
end
end
for volume in volumesAttached do
rc,info = do_command("#{@cli_cmd} destroy-volume #{@common_args} -v \'#{volume['id'].to_s}\' ")
raise "An error occured while destroying volume #{volume['id'].to_s} message: #{JSON.parse(info)['message']}" if !rc
end
end
rescue => e
STDERR.puts e.message
exit(-1)
end
return info
end
def monitor_all_vms(host_id)
totalmemory = 0
totalcpu = 0
@host['capacity'].each { |name, size|
cpu, mem = instance_type_capacity(name)
totalmemory += mem * size.to_i
totalcpu += cpu * size.to_i
}
host_info = "HYPERVISOR=libcloud\n"
host_info << "PUBLIC_CLOUD=YES\n"
host_info << "PRIORITY=-1\n"
host_info << "TOTALMEMORY=#{totalmemory.round}\n"
host_info << "TOTALCPU=#{totalcpu}\n"
host_info << "CPUSPEED=1000\n"
host_info << "HOSTNAME=\"#{@hostname}\"\n"
vms_info = "VM_POLL=YES\n"
client = ::OpenNebula::Client.new()
xml = client.call("host.info",host_id.to_i)
xml_host = REXML::Document.new(xml) if xml
usedcpu = 100
usedmemory = 0
# In the case of the libcloud driver is not possible to assign a name
# or a TAG to the VM. In this way a VM started from the OpenNebula cannot
# be discriminated from one started from another client.
# The solution here is to perform a polling call for each VM.
# The OpenNebula's XML-RPC Api is used to get all the instances associated
# with the 'host_id' specified.
XPath.each(xml_host, "/HOST/VMS/ID") { |e1|
vm_id = e1.text
xml = client.call("vm.info", vm_id.to_i)
xml_vm = REXML::Document.new(xml) if xml
deploy_id = ""
poll_data = ""
XPath.each(xml_vm, "/VM/DEPLOY_ID") { |e2| deploy_id = e2.text }
if !deploy_id.nil?
if !deploy_id.empty?
instance = get_instance(deploy_id)
poll_data = parse_poll(instance)
vms_info << "VM=[\n"
vms_info << " ID=#{vm_id || -1},\n"
vms_info << " DEPLOY_ID=#{deploy_id},\n"
vms_info << " POLL=\"#{poll_data}\" ]\n"
end
end
}
puts host_info
puts vms_info
end
def parse_poll(instance_info)
info = "#{POLL_ATTRIBUTE[:usedmemory]}=0 " \
"#{POLL_ATTRIBUTE[:usedcpu]}=0 " \
"#{POLL_ATTRIBUTE[:nettx]}=0 " \
"#{POLL_ATTRIBUTE[:netrx]}=0 "
instance = JSON.parse(instance_info)
state = ""
if !instance
state = VM_STATE[:deleted]
else
state = case instance['data'][0]['state'].upcase
when "RUNNING", "STARTING"
VM_STATE[:active]
when "TERMINATED", "STOPPED", "REBOOTING"
VM_STATE[:paused]
else
VM_STATE[:deleted]
end
end
info << "#{POLL_ATTRIBUTE[:state]}=#{state} "
POLL_ATTRS.map { |key|
value = instance['data'][0]["#{key}"]
if !value.nil? && !value.empty?
if value.kind_of?(Hash)
value_str = value.inspect
else
value_str = value
end
# TODO: In the case of _PUBLICADDRESSES or _PRIVATEADDRESSES keys,
# handle the case in which multiple addresses are passed.
# Use comma-separated list (e.g., interface to E-CEO portal)
info << "LIBCLOUD_#{key.to_s.upcase.sub('PUBLIC_IPS', "PUBLICADDRESSES").sub('PRIVATE_IPS',"PRIVATEADDRESSES")}=#{value_str.join(",")} "
end
}
return info
end
private
def do_command(cmd)
rc = LocalCommand.run(cmd)
if rc.code == 0
return [true, rc.stdout]
else
STDERR.puts("Error executing: #{cmd} err: #{rc.stderr} out: #{rc.stdout}")
return [false, rc.code]
end
end
end
|
#!/usr/bin/env ruby
require 'eldritch'
def merge_sort(a, b)
merged = []
until b.empty? || a.empty? do
if a.first <= b.first
merged += a.take_while { |i| (i <= b.first) }
a = a.drop_while { |i| i <= b.first }
else
merged += b.take_while { |i| i <= a.first }
b = b.drop_while { |i| i <= a.first }
end
end
merged + (a.empty? ? b : a)
end
def parallel_sort(array)
return array if array.size <= 1
mid = (array.length / 2).floor
first = async { parallel_sort(array.slice(0, mid)) }
second = parallel_sort(array.slice(mid, array.length - mid))
merge_sort(second, first.value)
end
def not_parallel_sort(array)
return array if array.size <= 1
mid = (array.length / 2).floor
first = not_parallel_sort(array.slice(0, mid))
second = not_parallel_sort(array.slice(mid, array.length - mid))
merge_sort(second, first)
end
nums = 100000.times.map { rand(1..100000) }
start = Time.now
parallel_sort(nums)
mid = Time.now
not_parallel_sort(nums)
stop = Time.now
puts 'Time parallel'
puts mid - start
puts 'Time sequential'
puts stop - mid
renamed merge_sort to merge
it's not sorting technically speaking
#!/usr/bin/env ruby
require 'eldritch'
def merge(a, b)
merged = []
until b.empty? || a.empty? do
if a.first <= b.first
merged += a.take_while { |i| (i <= b.first) }
a = a.drop_while { |i| i <= b.first }
else
merged += b.take_while { |i| i <= a.first }
b = b.drop_while { |i| i <= a.first }
end
end
merged + (a.empty? ? b : a)
end
def parallel_sort(array)
return array if array.size <= 1
mid = (array.length / 2).floor
first = async { parallel_sort(array.slice(0, mid)) }
second = parallel_sort(array.slice(mid, array.length - mid))
merge(second, first.value)
end
def not_parallel_sort(array)
return array if array.size <= 1
mid = (array.length / 2).floor
first = not_parallel_sort(array.slice(0, mid))
second = not_parallel_sort(array.slice(mid, array.length - mid))
merge(second, first)
end
nums = 100000.times.map { rand(1..100000) }
start = Time.now
parallel_sort(nums)
mid = Time.now
not_parallel_sort(nums)
stop = Time.now
puts 'Time parallel'
puts mid - start
puts 'Time sequential'
puts stop - mid |
#
# Be sure to run `pod spec lint ProSpecs.podspec' to ensure this is a
# valid spec and to remove all comments including this before submitting the spec.
#
# To learn more about Podspec attributes see http://docs.cocoapods.org/specification.html
# To see working Podspecs in the CocoaPods repo see https://github.com/CocoaPods/Specs/
#
Pod::Spec.new do |s|
s.name = "ProSpecs"
s.version = "0.0.1"
s.summary = "ProSpecs"
s.description = <<-DESC
私有库测试用,看看能不能成功
DESC
s.homepage = "https://github.com/shaoxionghua/ProSpecs.git"
# s.screenshots = "www.example.com/screenshots_1.gif", "www.example.com/screenshots_2.gif"
s.license = "MIT"
# s.license = { :type => "MIT", :file => "FILE_LICENSE" }
s.author = { "邵雄华" => "shaoxh@gold-finance.com.cn" }
s.source = { :git => "https://github.com/shaoxionghua/ProSpecs.git", :tag => "#{s.version}" }
s.platform = :ios, "8.0" #支持的平台及版本,这里我们呢用swift,直接上9.0
s.requires_arc = true #是否使用ARC
s.source_files = "ProSpecs", "ProSpecs/*.{h,m}"
s.frameworks = 'UIKit', 'QuartzCore', 'Foundation','SystemConfiguration' #所需的framework,多个用逗号隔开
s.module_name = 'ProSpecs' #模块名称
# s.exclude_files = "Classes/Exclude"
# s.public_header_files = "Classes/**/*.h"
# ――― Resources ―――――――――――――――――――――――――――――――――――――――――――――――――――――――――――――――― #
#
# A list of resources included with the Pod. These are copied into the
# target bundle with a build phase script. Anything else will be cleaned.
# You can preserve files from being cleaned, please don't preserve
# non-essential files like tests, examples and documentation.
#
# s.resource = "icon.png"
# s.resources = "Resources/*.png"
# s.preserve_paths = "FilesToSave", "MoreFilesToSave"
# ――― Project Linking ―――――――――――――――――――――――――――――――――――――――――――――――――――――――――― #
#
# Link your library with frameworks, or libraries. Libraries do not include
# the lib prefix of their name.
#
# s.framework = "SomeFramework"
# s.frameworks = "SomeFramework", "AnotherFramework"
# s.library = "iconv"
# s.libraries = "iconv", "xml2"
# ――― Project Settings ――――――――――――――――――――――――――――――――――――――――――――――――――――――――― #
#
# If your library depends on compiler flags you can set them in the xcconfig hash
# where they will only apply to your library. If you depend on other Podspecs
# you can include multiple dependencies to ensure it works.
# s.requires_arc = true
# s.xcconfig = { "HEADER_SEARCH_PATHS" => "$(SDKROOT)/usr/include/libxml2" }
# s.dependency "JSONKit", "~> 1.4"
end
[Fix] ProSpecs (0.0.1)
#
# Be sure to run `pod spec lint ProSpecs.podspec' to ensure this is a
# valid spec and to remove all comments including this before submitting the spec.
#
# To learn more about Podspec attributes see http://docs.cocoapods.org/specification.html
# To see working Podspecs in the CocoaPods repo see https://github.com/CocoaPods/Specs/
#
Pod::Spec.new do |s|
s.name = "ProSpecs"
s.version = "0.0.1"
s.summary = "ProSpecs"
s.description = <<-DESC
私有库测试用,看看能不能成功
DESC
s.homepage = "https://github.com/shaoxionghua/ProSpecs.git"
# s.screenshots = "www.example.com/screenshots_1.gif", "www.example.com/screenshots_2.gif"
s.license = "MIT"
# s.license = { :type => "MIT", :file => "FILE_LICENSE" }
s.author = { "邵雄华" => "shaoxh@gold-finance.com.cn" }
s.source = { :git => "https://github.com/shaoxionghua/ProSpecs.git", :tag => "#{s.version}" }
s.platform = :ios, "8.0" #支持的平台及版本,这里我们呢用swift,直接上9.0
s.requires_arc = true #是否使用ARC
s.source_files = "ProSpecs", "ProSpecs/*.{h,m}"
s.frameworks = 'UIKit', 'QuartzCore', 'Foundation','SystemConfiguration' #所需的framework,多个用逗号隔开
s.module_name = 'ProSpecs' #模块名称
# spec.pod_target_xcconfig = { 'OTHER_LDFLAGS' => '-ObjC' }
# s.exclude_files = "Classes/Exclude"
# s.public_header_files = "Classes/**/*.h"
# ――― Resources ―――――――――――――――――――――――――――――――――――――――――――――――――――――――――――――――― #
#
# A list of resources included with the Pod. These are copied into the
# target bundle with a build phase script. Anything else will be cleaned.
# You can preserve files from being cleaned, please don't preserve
# non-essential files like tests, examples and documentation.
#
# s.resource = "icon.png"
# s.resources = "Resources/*.png"
# s.preserve_paths = "FilesToSave", "MoreFilesToSave"
# ――― Project Linking ―――――――――――――――――――――――――――――――――――――――――――――――――――――――――― #
#
# Link your library with frameworks, or libraries. Libraries do not include
# the lib prefix of their name.
#
# s.framework = "SomeFramework"
# s.frameworks = "SomeFramework", "AnotherFramework"
# s.library = "iconv"
# s.libraries = "iconv", "xml2"
# ――― Project Settings ――――――――――――――――――――――――――――――――――――――――――――――――――――――――― #
#
# If your library depends on compiler flags you can set them in the xcconfig hash
# where they will only apply to your library. If you depend on other Podspecs
# you can include multiple dependencies to ensure it works.
# s.requires_arc = true
# s.xcconfig = { "HEADER_SEARCH_PATHS" => "$(SDKROOT)/usr/include/libxml2" }
# s.dependency "JSONKit", "~> 1.4"
end
|
Pod::Spec.new do |s|
s.name = 'REFrostedViewController'
s.version = '2.1.1'
s.authors = { 'Roman Efimov' => 'romefimov@gmail.com' }
s.homepage = 'https://github.com/romaonthego/REFrostedViewController'
s.summary = 'iOS 7 style blurred view controller that appears on top of your view controller.'
s.source = { :git => 'https://github.com/romaonthego/REFrostedViewController.git',
:tag => '2.1.1' }
s.license = { :type => "MIT", :file => "LICENSE" }
s.platform = :ios, '6.0'
s.requires_arc = true
s.source_files = 'REFrostedViewController'
s.public_header_files = 'REFrostedViewController/*.h'
s.ios.deployment_target = '6.0'
s.ios.frameworks = 'QuartzCore', 'Accelerate'
end
Version bump (2.1.2)
Pod::Spec.new do |s|
s.name = 'REFrostedViewController'
s.version = '2.1.2'
s.authors = { 'Roman Efimov' => 'romefimov@gmail.com' }
s.homepage = 'https://github.com/romaonthego/REFrostedViewController'
s.summary = 'iOS 7 style blurred view controller that appears on top of your view controller.'
s.source = { :git => 'https://github.com/romaonthego/REFrostedViewController.git',
:tag => '2.1.2' }
s.license = { :type => "MIT", :file => "LICENSE" }
s.platform = :ios, '6.0'
s.requires_arc = true
s.source_files = 'REFrostedViewController'
s.public_header_files = 'REFrostedViewController/*.h'
s.ios.deployment_target = '6.0'
s.ios.frameworks = 'QuartzCore', 'Accelerate'
end
|
require 'rubygems'
require 'sinatra'
class Hostess < Sinatra::Default
set :app_file, __FILE__
def serve(path, redirect = false)
headers "Cache-Control" => "public, max-age=60"
if Rails.env.production?
if redirect
redirect File.join("http://s3.amazonaws.com", VaultObject.current_bucket, request.path_info)
else
VaultObject.value(request.path_info)
end
else
send_file(path)
end
end
get "/specs.#{Gem.marshal_version}.gz" do
content_type('application/x-gzip')
serve(current_path)
end
get "/latest_specs.#{Gem.marshal_version}.gz" do
content_type('application/x-gzip')
serve(current_path)
end
get "/quick/Marshal.#{Gem.marshal_version}/*.gemspec.rz" do
content_type('application/x-deflate')
serve(current_path)
end
get "/gems/*.gem" do
original_name = File.basename(current_path, ".gem").split('-')
name = original_name[0..-2].join('-')
version = original_name[-1]
rubygem = Rubygem.find_by_name(name)
if rubygem
rubygem.increment!(:downloads)
content_type('application/octet-stream')
serve(current_path, true)
else
halt 404
end
end
def current_path
@current_path ||= Gemcutter.server_path(request.env["PATH_INFO"])
end
end
Forgot to check the env in the hostess
require 'rubygems'
require 'sinatra'
class Hostess < Sinatra::Default
set :app_file, __FILE__
def serve(path, redirect = false)
headers "Cache-Control" => "public, max-age=60"
if Rails.env.development? || Rails.env.test?
send_file(path)
else
if redirect
redirect File.join("http://s3.amazonaws.com", VaultObject.current_bucket, request.path_info)
else
VaultObject.value(request.path_info)
end
end
end
get "/specs.#{Gem.marshal_version}.gz" do
content_type('application/x-gzip')
serve(current_path)
end
get "/latest_specs.#{Gem.marshal_version}.gz" do
content_type('application/x-gzip')
serve(current_path)
end
get "/quick/Marshal.#{Gem.marshal_version}/*.gemspec.rz" do
content_type('application/x-deflate')
serve(current_path)
end
get "/gems/*.gem" do
original_name = File.basename(current_path, ".gem").split('-')
name = original_name[0..-2].join('-')
version = original_name[-1]
rubygem = Rubygem.find_by_name(name)
if rubygem
rubygem.increment!(:downloads)
content_type('application/octet-stream')
serve(current_path, true)
else
halt 404
end
end
def current_path
@current_path ||= Gemcutter.server_path(request.env["PATH_INFO"])
end
end
|
Add UWDC module
require 'httpclient/include_client'
require 'httpclient'
require 'nokogiri'
require 'json'
require 'active_support/core_ext/hash'
module UWDC
class Mets
extend HTTPClient::IncludeClient
include_http_client
def initialize(id)
@id = id
end
def get
begin
response = http_client.get("http://depot.library.wisc.edu:9090/fedora/objects/1711.dl:#{@id}/methods/1711.dl%3ASDefineFirstClassObject/viewMets")
response_xml = Nokogiri::XML.parse(response.body)
response_xml.remove_namespaces!
rescue TimeoutError, HTTPClient::ConfigurationError, HTTPClient::BadResponseError, Nokogiri::SyntaxError => e
exception = e
end
end
def to_json
Hash.from_xml(xml.to_xml).to_json
end
def to_ruby
Hash.from_xml(xml.to_xml)
end
def to_xml
xml.to_xml
end
end
class Mods < Mets
def xml
get.xpath("//dmdSec[contains(@ID,'#{@id}')]//mods[1]")
end
end
class Origin < Mets
def xml
get.xpath("//amdSec[contains(@ID,'#{@id}')]//origin[1]")
end
end
class RelsExt < Mets
def xml
get.xpath("//amdSec[contains(@ID,'#{@id}')]//RDF[1]")
end
end
end |
#!/usr/bin/env ruby
# chkconfig: - 85 15
# description: Passenger Standalone
require 'yaml'
require 'pathname'
# The user the applications run as.
USER = "admin"
USER_HOME_PATH = File.expand_path("~#{USER}")
RVM_PATH = USER_HOME_PATH + "/.rvm/"
# The place where all your applications reside. This scripts assumes that
# you have the following structure in your APPLICATIONS_PATH:
# APPLICATIONS_PATH
# |- APPLICATION_NAME
# |- STAGE
# |- server.yml (REQUIRED)
# |- current
APPLICATIONS_PATH = USER_HOME_PATH + "/applications/"
# Example :Server.yml
#
# passenger:
# port: 10001 # The port number passenger standalone will be ran on
# rvm:
# rvm_ruby_string: "rubyversion@gemsetname" # The ruby version and gemset RVM will use
# callbacks: # All callbacks are optional and are references to scripts relative to APPLICATIONS_PATH/APPLICATION_NAME/STAGE
# start:
# before: # Ran before passenger has started
# after: # Ran after passenger has started
# stop:
# before: # Ran before passenger has been stopped
# after: # Ran after passenger has been stopped
# ======================================================
# Shouldn't be necessary to change stuff below this line
# ======================================================
# Main start routine.
def run!
command = ARGV.first
available_commands = %w{start stop list info}
raise "Use one of #{available_commands.join("|")} as first argument" unless available_commands.include?(command)
applications = Dir.glob("#{APPLICATIONS_PATH}*/*").inject({}){|mem, dir| parts = dir.split("/"); mem[parts[-2]] ||= []; mem[parts[-2]] << parts[-1]; mem}
if command == "list"
head = "| " + "Application".ljust(60) + " | " + "Stage".ljust(20) + " | " + "Port".ljust(6) + " | " + "RVM".ljust(80) + " |"
puts "-" * head.size
puts head
puts "-" * head.size
end
if application = ARGV[1]
unless applications.has_key?(application)
raise "Can't find application #{application}"
end
if stage = ARGV[2]
unless applications[application].include?(stage)
raise "Stage #{stage} not found for application #{application}"
end
Application.new(application,stage).run!(command)
else
applications[application].each do |stage|
Application.new(application,stage).run!(command)
end
end
else
applications.sort.each do |application, stages|
stages.each do |stage|
Application.new(application,stage).run!(command)
end
end
end
if command == "list"
puts "-" * head.size
end
true
end
# Wrapper class to handle application specific start and stop methods
# including callbacks
class Application
attr_reader :config, :path, :name, :stage
def initialize(application_name,stage)
@path = Pathname.new("#{APPLICATIONS_PATH}#{application_name}/#{stage}")
@name = application_name
@stage = stage
# Sanity check!
raise "No server.yml found in '#{@path}'" unless File.exist?(@path +"server.yml")
# Load config
@config = YAML.load(File.read((@path +"server.yml").to_s))
end
def run!(command)
case command
when "start" then self.start!
when "stop" then self.stop!
when "info" then self.info!
when "list" then self.list!
end
end
def info!
fw = 6
say "#{self.name} - #{self.stage}"
say " " + "Path".ljust(fw) + ": " + self.path.to_s
say " " + "RVM".ljust(fw) + ": " + self.config["rvm"]["rvm_ruby_string"]
say " " + "Port".ljust(fw) + ": " + self.config["passenger"]["port"]
end
def list!
say "| " + field(self.name, 60) + " | " + field(self.stage, 20) + " | " + field(self.config["passenger"]["port"],6) + " | " + field(self.config["rvm"]["rvm_ruby_string"], 80) + " |"
end
def field(txt, len)
txt.to_s.ljust(len)[0,len]
end
def start!
say "Start #{USER} #{self.name} #{self.stage}"
# Check for passenger gem.
unless rvm_execute(self.config, "gem list passenger") =~ /passenger/
say "Installing Passenger..."
rvm_execute(self.config, "gem install passenger")
end
# Run the before start callback
run_callback(:start, :before)
# Make sure we have the required dirs
execute "mkdir -p #{self.path + "shared/pid"}"
execute "mkdir -p #{self.path + "shared/log"}"
# Start the server
options = []
options << "--user #{USER}"
options << "--port #{self.config['passenger']['port']}"
options << "--environment production"
options << "--daemonize"
options << "--pid-file #{self.path + "shared/pid/passenger.pid"}"
options << "--log-file /dev/null"
puts rvm_execute(self.config, "passenger start #{self.path + "current"} #{options.join(" ")}")
# Run the after start callback
run_callback(:start, :after)
end
def stop!
say "Stop #{USER} #{self.name} #{self.stage}"
# Run the before :stop callback
run_callback(:stop, :before)
puts rvm_execute(self.config, "passenger stop --pid-file #{self.path + "passenger.pid"}")
# Run the after :stop callback
run_callback(:stop, :after)
end
# Simple output wrapper
def say(msg)
puts msg
end
protected
def run_callback(key, time)
return unless self.config.has_key?("callbacks")
callbacks = self.config["callbacks"]
if callback = (callbacks[key.to_s] && callbacks[key.to_s][time.to_s])
if File.exists?(self.path + callback)
say "Running #{time} #{key} callback '#{callback}'"
say rvm_execute(self.config, "bash #{self.path + callback}")
else
raise "Defined callback #{time} #{key} '#{callback}' does not exist!"
end
end
end
end
# Helper method to run within an RVM environment
def rvm_execute(server_config, command)
execute("rvm_path=#{RVM_PATH} #{RVM_PATH}bin/rvm-shell '#{server_config['rvm']['rvm_ruby_string']}' -c '#{command}'")
end
# Execute command and decide wether or not to SUDO
def execute(command)
whoami = `whoami`.strip
if whoami == USER
`#{command} 2>&1`
else
`sudo -u #{USER} -H #{command} 2>&1`
end
end
begin
if run!
puts "Done!"
else
puts "Usage: passenger-standalone {start|stop|info} [application] [stage]"
end
rescue StandardError => e
puts "ERROR: #{e.message}"
puts "Usage: passenger-standalone {start|stop|info} [application] [stage]"
end
Use correct PID file for stopping too
#!/usr/bin/env ruby
# chkconfig: - 85 15
# description: Passenger Standalone
require 'yaml'
require 'pathname'
# The user the applications run as.
USER = "admin"
USER_HOME_PATH = File.expand_path("~#{USER}")
RVM_PATH = USER_HOME_PATH + "/.rvm/"
# The place where all your applications reside. This scripts assumes that
# you have the following structure in your APPLICATIONS_PATH:
# APPLICATIONS_PATH
# |- APPLICATION_NAME
# |- STAGE
# |- server.yml (REQUIRED)
# |- current
APPLICATIONS_PATH = USER_HOME_PATH + "/applications/"
# Example :Server.yml
#
# passenger:
# port: 10001 # The port number passenger standalone will be ran on
# rvm:
# rvm_ruby_string: "rubyversion@gemsetname" # The ruby version and gemset RVM will use
# callbacks: # All callbacks are optional and are references to scripts relative to APPLICATIONS_PATH/APPLICATION_NAME/STAGE
# start:
# before: # Ran before passenger has started
# after: # Ran after passenger has started
# stop:
# before: # Ran before passenger has been stopped
# after: # Ran after passenger has been stopped
# ======================================================
# Shouldn't be necessary to change stuff below this line
# ======================================================
# Main start routine.
def run!
command = ARGV.first
available_commands = %w{start stop list info}
raise "Use one of #{available_commands.join("|")} as first argument" unless available_commands.include?(command)
applications = Dir.glob("#{APPLICATIONS_PATH}*/*").inject({}){|mem, dir| parts = dir.split("/"); mem[parts[-2]] ||= []; mem[parts[-2]] << parts[-1]; mem}
if command == "list"
head = "| " + "Application".ljust(60) + " | " + "Stage".ljust(20) + " | " + "Port".ljust(6) + " | " + "RVM".ljust(80) + " |"
puts "-" * head.size
puts head
puts "-" * head.size
end
if application = ARGV[1]
unless applications.has_key?(application)
raise "Can't find application #{application}"
end
if stage = ARGV[2]
unless applications[application].include?(stage)
raise "Stage #{stage} not found for application #{application}"
end
Application.new(application,stage).run!(command)
else
applications[application].each do |stage|
Application.new(application,stage).run!(command)
end
end
else
applications.sort.each do |application, stages|
stages.each do |stage|
Application.new(application,stage).run!(command)
end
end
end
if command == "list"
puts "-" * head.size
end
true
end
# Wrapper class to handle application specific start and stop methods
# including callbacks
class Application
attr_reader :config, :path, :name, :stage
def initialize(application_name,stage)
@path = Pathname.new("#{APPLICATIONS_PATH}#{application_name}/#{stage}")
@name = application_name
@stage = stage
# Sanity check!
raise "No server.yml found in '#{@path}'" unless File.exist?(@path +"server.yml")
# Load config
@config = YAML.load(File.read((@path +"server.yml").to_s))
end
def run!(command)
case command
when "start" then self.start!
when "stop" then self.stop!
when "info" then self.info!
when "list" then self.list!
end
end
def info!
fw = 6
say "#{self.name} - #{self.stage}"
say " " + "Path".ljust(fw) + ": " + self.path.to_s
say " " + "RVM".ljust(fw) + ": " + self.config["rvm"]["rvm_ruby_string"]
say " " + "Port".ljust(fw) + ": " + self.config["passenger"]["port"]
end
def list!
say "| " + field(self.name, 60) + " | " + field(self.stage, 20) + " | " + field(self.config["passenger"]["port"],6) + " | " + field(self.config["rvm"]["rvm_ruby_string"], 80) + " |"
end
def field(txt, len)
txt.to_s.ljust(len)[0,len]
end
def start!
say "Start #{USER} #{self.name} #{self.stage}"
# Check for passenger gem.
unless rvm_execute(self.config, "gem list passenger") =~ /passenger/
say "Installing Passenger..."
rvm_execute(self.config, "gem install passenger")
end
# Run the before start callback
run_callback(:start, :before)
# Make sure we have the required dirs
execute "mkdir -p #{self.path + "shared/pid"}"
execute "mkdir -p #{self.path + "shared/log"}"
# Start the server
options = []
options << "--user #{USER}"
options << "--port #{self.config['passenger']['port']}"
options << "--environment production"
options << "--daemonize"
options << "--pid-file #{self.path + "shared/pid/passenger.pid"}"
options << "--log-file /dev/null"
puts rvm_execute(self.config, "passenger start #{self.path + "current"} #{options.join(" ")}")
# Run the after start callback
run_callback(:start, :after)
end
def stop!
say "Stop #{USER} #{self.name} #{self.stage}"
# Run the before :stop callback
run_callback(:stop, :before)
puts rvm_execute(self.config, "passenger stop --pid-file #{self.path + "shared/pid/passenger.pid"}")
# Run the after :stop callback
run_callback(:stop, :after)
end
# Simple output wrapper
def say(msg)
puts msg
end
protected
def run_callback(key, time)
return unless self.config.has_key?("callbacks")
callbacks = self.config["callbacks"]
if callback = (callbacks[key.to_s] && callbacks[key.to_s][time.to_s])
if File.exists?(self.path + callback)
say "Running #{time} #{key} callback '#{callback}'"
say rvm_execute(self.config, "bash #{self.path + callback}")
else
raise "Defined callback #{time} #{key} '#{callback}' does not exist!"
end
end
end
end
# Helper method to run within an RVM environment
def rvm_execute(server_config, command)
execute("rvm_path=#{RVM_PATH} #{RVM_PATH}bin/rvm-shell '#{server_config['rvm']['rvm_ruby_string']}' -c '#{command}'")
end
# Execute command and decide wether or not to SUDO
def execute(command)
whoami = `whoami`.strip
if whoami == USER
`#{command} 2>&1`
else
`sudo -u #{USER} -H #{command} 2>&1`
end
end
begin
if run!
puts "Done!"
else
puts "Usage: passenger-standalone {start|stop|info} [application] [stage]"
end
rescue StandardError => e
puts "ERROR: #{e.message}"
puts "Usage: passenger-standalone {start|stop|info} [application] [stage]"
end |
=begin
CIRCL Directory
Copyright (C) 2011 Complex IT sàrl
This program is free software: you can redistribute it and/or modify
it under the terms of the GNU Affero General Public License as
published by the Free Software Foundation, either version 3 of the
License, or (at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU Affero General Public License for more details.
You should have received a copy of the GNU Affero General Public License
along with this program. If not, see <http://www.gnu.org/licenses/>.
=end
# == Schema Information
#
# Table name: affairs
#
# *id*:: <tt>integer, not null, primary key</tt>
# *owner_id*:: <tt>integer, not null</tt>
# *buyer_id*:: <tt>integer, not null</tt>
# *receiver_id*:: <tt>integer, not null</tt>
# *title*:: <tt>string(255), default(""), not null</tt>
# *description*:: <tt>text, default("")</tt>
# *value_in_cents*:: <tt>integer, default(0), not null</tt>
# *value_currency*:: <tt>string(255), default("CHF"), not null</tt>
# *created_at*:: <tt>datetime</tt>
# *updated_at*:: <tt>datetime</tt>
#--
# == Schema Information End
#++
class Affair < ActiveRecord::Base
################
### INCLUDES ###
################
# Monetize deprecation warning
require 'monetize/core_extensions'
# include ChangesTracker
include StatusExtention
include ElasticSearch::Mapping
include ElasticSearch::Indexing
extend MoneyComposer
# TODO: Move this to jsbuilder
class InvoiceHelper
include ActionView::Helpers::DateHelper
end
def helper
@h || InvoiceHelper.new
end
#################
### CALLBACKS ###
#################
after_save :update_elasticsearch
before_save :update_value, if: 'value_in_cents.blank?'
before_save :compute_value_without_taxes, if: 'custom_value_with_taxes'
before_save :update_statuses
before_validation :ensure_buyer_and_receiver_person_exists
before_destroy :do_not_destroy_if_has_invoices
before_destroy { subscriptions.clear }
#################
### RELATIONS ###
#################
# Relations
belongs_to :owner,
class_name: 'Person',
foreign_key: 'owner_id'
belongs_to :buyer,
class_name: 'Person',
foreign_key: 'buyer_id'
belongs_to :receiver,
class_name: 'Person',
foreign_key: 'receiver_id'
belongs_to :seller,
class_name: 'Person',
foreign_key: 'seller_id'
belongs_to :condition,
class_name: 'AffairsCondition'
has_one :parent,
class_name: 'Affair',
primary_key: 'parent_id',
foreign_key: 'id'
has_many :children,
class_name: 'Affair',
foreign_key: 'parent_id'
has_many :invoices,
dependent: :destroy
has_many :receipts,
-> { uniq },
through: :invoices
has_many :extras,
-> { order(:position) },
dependent: :destroy
#after_add: :update_on_prestation_alteration,
#after_remove: :update_on_prestation_alteration
has_many :tasks,
-> { order('start_date ASC') },
dependent: :destroy
#after_add: :update_on_prestation_alteration,
#after_remove: :update_on_prestation_alteration
has_many :product_items,
-> { order(:position) },
class_name: 'AffairsProductsProgram',
dependent: :destroy
#after_add: :update_on_prestation_alteration,
#after_remove: :update_on_prestation_alteration
has_many :products,
through: :product_items
has_many :programs,
through: :product_items
has_many :affairs_stakeholders
has_many :stakeholders,
through: :affairs_stakeholders,
source: :person
has_many :affairs_subscriptions # for permissions
# monitored_habtm :subscriptions,
has_and_belongs_to_many :subscriptions,
after_add: :update_on_prestation_alteration,
after_remove: :update_on_prestation_alteration
# Money
money :value
scope :open_affairs, -> {
mask = Affair.statuses_value_for(:to_be_billed)
where("(affairs.status::bit(16) & ?::bit(16))::int = ?", mask, mask)
}
scope :estimates, -> { where estimate: true }
scope :effectives, -> { where estimate: false}
# Used to calculate value from value with taxes
attr_accessor :custom_value_with_taxes
attr_accessor :template
###################
### VALIDATIONS ###
###################
validates_presence_of :title, :owner_id, :buyer_id, :receiver_id, :value_in_cents, :value_currency
# Validate fields of type 'string' length
validates_length_of :title, maximum: 255
# Validate fields of type 'text' length
validates_length_of :description, maximum: 65536
validate :vat_calculation_availability, if: 'custom_value_with_taxes'
validate :parent_id_is_not_self, if: 'parent_id'
########################
#### CLASS METHODS #####
########################
# Returns the list of available statuses as an array.
# Watch the sources to read available statuses.
def self.available_statuses
[
# under bit weight 256 (bits 0-7),
# invoices are not (fully) paid
:open, # 0
:underpaid, # 1
:partially_paid, # 2
:to_be_billed, # 3
nil, # 4
nil, # 5
nil, # 6
:cancelled, # 7 user defined
# starting from 256 (bit 8-15),
# invoices are paid
:paid, # 8
:overpaid, # 9
nil, # 10
nil, # 11
nil, # 12
nil, # 13
nil, # 14
:offered # 15 user defined
]
end
########################
### INSTANCE METHODS ###
########################
# attributes overridden - JSON API
def as_json(options = nil)
h = super(options)
h[:created_at] = created_at.to_date # Override datetime
h[:parent_title] = parent.try(:title)
h[:owner_name] = owner.try(:name)
h[:owner_address] = owner.try(:address_for_bvr)
h[:buyer_name] = buyer.try(:name)
h[:buyer_address] = buyer.try(:address_for_bvr)
h[:seller_name] = seller.try(:name)
h[:receiver_name] = receiver.try(:name)
h[:receiver_address] = receiver.try(:address_for_bvr)
h[:invoices_count] = invoices.count
h[:invoices_value] = invoices_value.to_f
h[:invoices_value_currency] = invoices_value.currency.try(:iso_code)
h[:invoices_value_with_taxes] = invoices_value_with_taxes.to_f
h[:invoices_value_with_taxes_currency] = invoices_value_with_taxes.currency.try(:iso_code)
h[:receipts_count] = receipts.count
h[:receipts_value] = receipts_value.to_f
h[:receipts_value_currency] = receipts_value.currency.try(:iso_code)
h[:subscriptions_count] = subscriptions.count
h[:subscriptions_value] = subscriptions_value.to_f
h[:subscriptions_value_currency] = subscriptions_value.currency.try(:iso_code)
h[:tasks_count] = tasks.count
h[:tasks_value] = tasks_value.to_f
h[:tasks_value_currency] = tasks_value.currency.try(:iso_code)
h[:tasks_duration_translation] = helper.distance_of_time(tasks_duration.minutes)
h[:products_count] = product_items.count
h[:products_value] = product_items_value.to_f
h[:products_value_currency] = product_items_value.currency.try(:iso_code)
h[:extras_count] = extras.count
h[:extras_value] = extras_value.to_f
h[:extras_value_currency] = extras_value.currency.try(:iso_code)
h[:value] = value.try(:to_f)
h[:value_currency] = value.currency.try(:iso_code)
h[:value_with_taxes] = value_with_taxes.try(:to_f)
h[:value_with_taxes_currency] = value_with_taxes.currency.try(:iso_code)
h[:computed_value] = compute_value.try(:to_f)
h[:computed_value_currency] = compute_value.currency.try(:iso_code)
h[:computed_value_with_taxes] = compute_value_with_taxes.try(:to_f)
h[:computed_value_with_taxes_currency] = compute_value_with_taxes.currency.try(:iso_code)
h[:arts_count] = product_items.each_with_object([]){|i,a| a << i if i.variant.art > 0}.size
h[:arts_value] = arts_value.try(:to_f)
h[:arts_value_currency] = arts_value.currency.try(:iso_code)
h[:vat_count] = extras.each_with_object([]){|i,a| a << i if i.vat_percentage != ApplicationSetting.value("service_vat_rate").to_f}.size + 1
h[:vat_value] = vat_value.try(:to_f)
h[:vat_value_currency] = vat_value.currency.try(:iso_code)
h[:statuses] = translated_statuses
h[:affairs_stakeholders] = affairs_stakeholders.as_json
h
end
def translated_statuses
get_statuses.map{|s| I18n.t("affair.views.statuses." + s.to_s)}.join(", ")
end
def invoices_value
invoices.map{|i| i.value.to_money(value_currency)}.sum.to_money
end
def invoices_value_with_taxes
invoices.map{|i| i.value_with_taxes.to_money(value_currency)}.sum.to_money
end
def receipts_value
receipts.map{ |r| r.value.to_money(value_currency)}.sum.to_money
end
def subscriptions_value
# Sum only leaves of a subscription tree (the last child)
leaves = find_children(subscriptions)
leaves.map{|l| l.value_for(owner).to_money(value_currency)}.sum.to_money
end
def tasks_value
tasks.map{ |t| t.value.to_money(value_currency)}.sum.to_money
end
def tasks_real_value
tasks.map{ |t| t.compute_value.to_money(value_currency)}.sum.to_money
end
def tasks_duration
tasks.map{ |t| t.duration}.sum
end
def tasks_bid_value
tasks_real_value - tasks_value
end
def product_items_value
product_items.map{|p| p.bid_price.to_money(value_currency)}.sum.to_money
end
def product_items_real_value
product_items.map{|p| p.value.to_money(value_currency)}.sum.to_money
end
def product_items_bid_value
product_items_real_value - product_items_value
end
def extras_value
extras.map{|e| e.total_value.to_money(value_currency)}.sum.to_money
end
def balance_value
receipts_value - invoices_value
end
def overpaid_value
(balance_value > 0) ? balance_value : 0.to_money
end
def arts_value
product_items.map{|i| i.variant.art.to_money(value_currency) * i.quantity}.sum.to_money
end
def vat_value(forced_value = self.value)
return 0.to_money if ApplicationSetting.value('use_vat') != "true"
# Variable VAT, extract extras with different vat rate
extra_with_different_vat_rate = extras.each_with_object([]) do |i,a|
a << i if i.vat_percentage != ApplicationSetting.value("service_vat_rate").to_f
end
extras_diff_value = extra_with_different_vat_rate.map(&:value).sum.to_money(value_currency)
extras_diff_vat = extra_with_different_vat_rate.map(&:vat).sum.to_money(value_currency)
service_value = forced_value - extras_diff_value
sum = service_value * (ApplicationSetting.value("service_vat_rate").to_f / 100.0)
sum += extras_diff_vat
sum
end
# It will set this affair's value to the computed value of all provisions and
# returns its value.
def compute_value
val = subscriptions_value
val += tasks_value
val += product_items_value
val += arts_value
val += extras_value
val.to_money(value_currency)
end
def compute_value_with_taxes
val = compute_value
val + vat_value(val)
end
def value_with_taxes
value + vat_value
end
# Workflows and statuses
# Returns true if all invoices are open in this affair.
def open?
invoices.inject(false) { |sum, i| sum | i.has_status?(:open) }
end
# Returns true if invoices are partially paid in this affair.
def partially_paid?
if open?
return invoices.inject(false) { |sum, i| sum | i.has_status?(:paid) }
end
false
end
# Return true if every single invoice has been paid.
# If the sum of receipts is greater than the sum of invoices, it
# doesn't means every single invoice has been paid.
def paid?
return false if invoices.count == 0
invoices.inject(true) { |sum, i| sum & i.has_status?(:paid) }
end
# Returns true if at leaset one invoice is overpaid in this affair.
def overpaid?
return false if invoices.count == 0
invoices.inject(false) { |sum, i| sum | i.has_status?(:overpaid) }
end
# Returns true if at least one invoice is underpaid in this affair.
def underpaid?
return false if invoices.count == 0
invoices.inject(false) { |sum, i| sum | i.has_status?(:underpaid) }
end
# Returns true if all invoices are set to cancelled.
def cancelled?
return false if invoices.count == 0
invoices.inject(true) { |sum, i| sum & i.has_status?(:cancelled) }
end
# Returns true if all invoices are set to offered.
def offered?
return false if invoices.count == 0
invoices.inject(true) { |sum, i| sum & i.has_status?(:offered) }
end
def to_be_billed?
invoices_value < value
end
def product_items_for_category(cat)
if cat
product_items
.joins(:product)
.where("products.category = ?", cat)
else
product_items
.joins(:product)
.where("products.category is null")
end
end
def product_items_categories
Product
.joins(:product_items)
.where("affairs_products_programs.affair_id = ?", id)
.select("DISTINCT category")
.order("category")
.map(&:category)
end
def product_items_category_value_for(cat)
product_items_for_category(cat).map(&:value).sum.to_money(value_currency)
end
private
# Buffer method used to update value and statuses information after habtm relationship
# alteration.
def update_on_prestation_alteration(record = nil)
self.update_attribute(:value, compute_value)
self.update_attribute(:status, update_statuses)
end
def update_value
self.value = compute_value
end
def compute_value_without_taxes
self.value = reverse_vat_value(value)
end
# Takes a value taxes included
def reverse_vat_value(val)
val / ( 1 + (ApplicationSetting.value("service_vat_rate").to_f / 100) )
end
def vat_calculation_availability
extras.each do |i|
if i.vat_percentage != ApplicationSetting.value("service_vat_rate").to_f
errors.add(:base,
I18n.t('affair.errors.unable_to_compute_value_without_taxes_if_extras_have_different_vat_values'))
return false
end
end
end
# Update this affair's statuses by comparing affair's value, its invoices and receipts
# and return its statuses.
def update_statuses
statuses = []
if cancelled?
statuses << :cancelled
elsif unbillable
statuses << :paid # NOTE is that the best reflection of its status ?
else
statuses << :open if open?
statuses << :underpaid if underpaid?
statuses << :partially_paid if partially_paid?
statuses << :to_be_billed if to_be_billed?
end
# TODO How an invoice could be paid and open in the same time ?
if offered?
statuses << :offered
else
statuses << :paid if paid?
statuses << :overpaid if overpaid?
end
self.reset_statuses(statuses)
statuses
end
def find_children(subscriptions)
subs = []
subscriptions.each do |s|
if s.children.empty?
subs << s
else
subs << find_children(s.children)
end
end
subs.flatten.uniq
end
def ensure_buyer_and_receiver_person_exists
self.receiver = self.owner unless self.receiver
self.buyer = self.owner unless self.buyer
end
def update_elasticsearch
# It may have some custom search attributes which
# depends on this affair through it's relations.
# so update relations' indices no mater what changes
unless self.changes.empty?
# update current relations' indices
owner.update_index
if buyer != owner
buyer.update_index
receiver.update_index if buyer != receiver
end
# and former relations' indices
if self.changes.keys.index('buyer_id')
# 0:original, 1:new value == self.buyer_id
buyer_id = self.changes['buyer_id'][0]
if Person.exists?(buyer_id) # in case former person doesn't exists
p = Person.find(buyer_id)
p.update_index
end
end
if self.changes.keys.index('receiver_id')
# 0:original, 1:new value == self.receiver_id
receiver_id = self.changes['receiver_id'][0]
if Person.exists?(receiver_id)
p = Person.find(receiver_id)
p.update_index
end
end
end
true
end
def do_not_destroy_if_has_invoices
unless invoices.empty?
errors.add(:base,
I18n.t('affair.errors.cant_delete_affair_who_has_invoices'))
false
end
end
def parent_id_is_not_self
if id == parent_id
errors.add(:base,
I18n.t('affair.errors.parent_id_cannot_be_self'))
false
end
end
end
Re-enable status update on affais
=begin
CIRCL Directory
Copyright (C) 2011 Complex IT sàrl
This program is free software: you can redistribute it and/or modify
it under the terms of the GNU Affero General Public License as
published by the Free Software Foundation, either version 3 of the
License, or (at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU Affero General Public License for more details.
You should have received a copy of the GNU Affero General Public License
along with this program. If not, see <http://www.gnu.org/licenses/>.
=end
# == Schema Information
#
# Table name: affairs
#
# *id*:: <tt>integer, not null, primary key</tt>
# *owner_id*:: <tt>integer, not null</tt>
# *buyer_id*:: <tt>integer, not null</tt>
# *receiver_id*:: <tt>integer, not null</tt>
# *title*:: <tt>string(255), default(""), not null</tt>
# *description*:: <tt>text, default("")</tt>
# *value_in_cents*:: <tt>integer, default(0), not null</tt>
# *value_currency*:: <tt>string(255), default("CHF"), not null</tt>
# *created_at*:: <tt>datetime</tt>
# *updated_at*:: <tt>datetime</tt>
#--
# == Schema Information End
#++
class Affair < ActiveRecord::Base
################
### INCLUDES ###
################
# Monetize deprecation warning
require 'monetize/core_extensions'
# include ChangesTracker
include StatusExtention
include ElasticSearch::Mapping
include ElasticSearch::Indexing
extend MoneyComposer
# TODO: Move this to jsbuilder
class InvoiceHelper
include ActionView::Helpers::DateHelper
end
def helper
@h || InvoiceHelper.new
end
#################
### CALLBACKS ###
#################
after_save :update_elasticsearch
before_save :update_value, if: 'value_in_cents.blank?'
before_save :compute_value_without_taxes, if: 'custom_value_with_taxes'
before_save :update_statuses
before_validation :ensure_buyer_and_receiver_person_exists
before_destroy :do_not_destroy_if_has_invoices
before_destroy { subscriptions.clear }
#################
### RELATIONS ###
#################
# Relations
belongs_to :owner,
class_name: 'Person',
foreign_key: 'owner_id'
belongs_to :buyer,
class_name: 'Person',
foreign_key: 'buyer_id'
belongs_to :receiver,
class_name: 'Person',
foreign_key: 'receiver_id'
belongs_to :seller,
class_name: 'Person',
foreign_key: 'seller_id'
belongs_to :condition,
class_name: 'AffairsCondition'
has_one :parent,
class_name: 'Affair',
primary_key: 'parent_id',
foreign_key: 'id'
has_many :children,
class_name: 'Affair',
foreign_key: 'parent_id'
has_many :invoices,
dependent: :destroy
has_many :receipts,
-> { uniq },
through: :invoices
has_many :extras,
-> { order(:position) },
dependent: :destroy,
after_add: :update_on_prestation_alteration,
after_remove: :update_on_prestation_alteration
has_many :tasks,
-> { order('start_date ASC') },
dependent: :destroy,
after_add: :update_on_prestation_alteration,
after_remove: :update_on_prestation_alteration
has_many :product_items,
-> { order(:position) },
class_name: 'AffairsProductsProgram',
dependent: :destroy,
after_add: :update_on_prestation_alteration,
after_remove: :update_on_prestation_alteration
has_many :products,
through: :product_items
has_many :programs,
through: :product_items
has_many :affairs_stakeholders
has_many :stakeholders,
through: :affairs_stakeholders,
source: :person
has_many :affairs_subscriptions # for permissions
# monitored_habtm :subscriptions,
has_and_belongs_to_many :subscriptions,
after_add: :update_on_prestation_alteration,
after_remove: :update_on_prestation_alteration
# Money
money :value
scope :open_affairs, -> {
mask = Affair.statuses_value_for(:to_be_billed)
where("(affairs.status::bit(16) & ?::bit(16))::int = ?", mask, mask)
}
scope :estimates, -> { where estimate: true }
scope :effectives, -> { where estimate: false}
# Used to calculate value from value with taxes
attr_accessor :custom_value_with_taxes
attr_accessor :template
###################
### VALIDATIONS ###
###################
validates_presence_of :title, :owner_id, :buyer_id, :receiver_id, :value_in_cents, :value_currency
# Validate fields of type 'string' length
validates_length_of :title, maximum: 255
# Validate fields of type 'text' length
validates_length_of :description, maximum: 65536
validate :vat_calculation_availability, if: 'custom_value_with_taxes'
validate :parent_id_is_not_self, if: 'parent_id'
########################
#### CLASS METHODS #####
########################
# Returns the list of available statuses as an array.
# Watch the sources to read available statuses.
def self.available_statuses
[
# under bit weight 256 (bits 0-7),
# invoices are not (fully) paid
:open, # 0
:underpaid, # 1
:partially_paid, # 2
:to_be_billed, # 3
nil, # 4
nil, # 5
nil, # 6
:cancelled, # 7 user defined
# starting from 256 (bit 8-15),
# invoices are paid
:paid, # 8
:overpaid, # 9
nil, # 10
nil, # 11
nil, # 12
nil, # 13
nil, # 14
:offered # 15 user defined
]
end
########################
### INSTANCE METHODS ###
########################
# attributes overridden - JSON API
def as_json(options = nil)
h = super(options)
h[:created_at] = created_at.to_date # Override datetime
h[:parent_title] = parent.try(:title)
h[:owner_name] = owner.try(:name)
h[:owner_address] = owner.try(:address_for_bvr)
h[:buyer_name] = buyer.try(:name)
h[:buyer_address] = buyer.try(:address_for_bvr)
h[:seller_name] = seller.try(:name)
h[:receiver_name] = receiver.try(:name)
h[:receiver_address] = receiver.try(:address_for_bvr)
h[:invoices_count] = invoices.count
h[:invoices_value] = invoices_value.to_f
h[:invoices_value_currency] = invoices_value.currency.try(:iso_code)
h[:invoices_value_with_taxes] = invoices_value_with_taxes.to_f
h[:invoices_value_with_taxes_currency] = invoices_value_with_taxes.currency.try(:iso_code)
h[:receipts_count] = receipts.count
h[:receipts_value] = receipts_value.to_f
h[:receipts_value_currency] = receipts_value.currency.try(:iso_code)
h[:subscriptions_count] = subscriptions.count
h[:subscriptions_value] = subscriptions_value.to_f
h[:subscriptions_value_currency] = subscriptions_value.currency.try(:iso_code)
h[:tasks_count] = tasks.count
h[:tasks_value] = tasks_value.to_f
h[:tasks_value_currency] = tasks_value.currency.try(:iso_code)
h[:tasks_duration_translation] = helper.distance_of_time(tasks_duration.minutes)
h[:products_count] = product_items.count
h[:products_value] = product_items_value.to_f
h[:products_value_currency] = product_items_value.currency.try(:iso_code)
h[:extras_count] = extras.count
h[:extras_value] = extras_value.to_f
h[:extras_value_currency] = extras_value.currency.try(:iso_code)
h[:value] = value.try(:to_f)
h[:value_currency] = value.currency.try(:iso_code)
h[:value_with_taxes] = value_with_taxes.try(:to_f)
h[:value_with_taxes_currency] = value_with_taxes.currency.try(:iso_code)
h[:computed_value] = compute_value.try(:to_f)
h[:computed_value_currency] = compute_value.currency.try(:iso_code)
h[:computed_value_with_taxes] = compute_value_with_taxes.try(:to_f)
h[:computed_value_with_taxes_currency] = compute_value_with_taxes.currency.try(:iso_code)
h[:arts_count] = product_items.each_with_object([]){|i,a| a << i if i.variant.art > 0}.size
h[:arts_value] = arts_value.try(:to_f)
h[:arts_value_currency] = arts_value.currency.try(:iso_code)
h[:vat_count] = extras.each_with_object([]){|i,a| a << i if i.vat_percentage != ApplicationSetting.value("service_vat_rate").to_f}.size + 1
h[:vat_value] = vat_value.try(:to_f)
h[:vat_value_currency] = vat_value.currency.try(:iso_code)
h[:statuses] = translated_statuses
h[:affairs_stakeholders] = affairs_stakeholders.as_json
h
end
def translated_statuses
get_statuses.map{|s| I18n.t("affair.views.statuses." + s.to_s)}.join(", ")
end
def invoices_value
invoices.map{|i| i.value.to_money(value_currency)}.sum.to_money
end
def invoices_value_with_taxes
invoices.map{|i| i.value_with_taxes.to_money(value_currency)}.sum.to_money
end
def receipts_value
receipts.map{ |r| r.value.to_money(value_currency)}.sum.to_money
end
def subscriptions_value
# Sum only leaves of a subscription tree (the last child)
leaves = find_children(subscriptions)
leaves.map{|l| l.value_for(owner).to_money(value_currency)}.sum.to_money
end
def tasks_value
tasks.map{ |t| t.value.to_money(value_currency)}.sum.to_money
end
def tasks_real_value
tasks.map{ |t| t.compute_value.to_money(value_currency)}.sum.to_money
end
def tasks_duration
tasks.map{ |t| t.duration}.sum
end
def tasks_bid_value
tasks_real_value - tasks_value
end
def product_items_value
product_items.map{|p| p.bid_price.to_money(value_currency)}.sum.to_money
end
def product_items_real_value
product_items.map{|p| p.value.to_money(value_currency)}.sum.to_money
end
def product_items_bid_value
product_items_real_value - product_items_value
end
def extras_value
extras.map{|e| e.total_value.to_money(value_currency)}.sum.to_money
end
def balance_value
receipts_value - invoices_value
end
def overpaid_value
(balance_value > 0) ? balance_value : 0.to_money
end
def arts_value
product_items.map{|i| i.variant.art.to_money(value_currency) * i.quantity}.sum.to_money
end
def vat_value(forced_value = self.value)
return 0.to_money if ApplicationSetting.value('use_vat') != "true"
# Variable VAT, extract extras with different vat rate
extra_with_different_vat_rate = extras.each_with_object([]) do |i,a|
a << i if i.vat_percentage != ApplicationSetting.value("service_vat_rate").to_f
end
extras_diff_value = extra_with_different_vat_rate.map(&:value).sum.to_money(value_currency)
extras_diff_vat = extra_with_different_vat_rate.map(&:vat).sum.to_money(value_currency)
service_value = forced_value - extras_diff_value
sum = service_value * (ApplicationSetting.value("service_vat_rate").to_f / 100.0)
sum += extras_diff_vat
sum
end
# It will set this affair's value to the computed value of all provisions and
# returns its value.
def compute_value
val = subscriptions_value
val += tasks_value
val += product_items_value
val += arts_value
val += extras_value
val.to_money(value_currency)
end
def compute_value_with_taxes
val = compute_value
val + vat_value(val)
end
def value_with_taxes
value + vat_value
end
# Workflows and statuses
# Returns true if all invoices are open in this affair.
def open?
invoices.inject(false) { |sum, i| sum | i.has_status?(:open) }
end
# Returns true if invoices are partially paid in this affair.
def partially_paid?
if open?
return invoices.inject(false) { |sum, i| sum | i.has_status?(:paid) }
end
false
end
# Return true if every single invoice has been paid.
# If the sum of receipts is greater than the sum of invoices, it
# doesn't means every single invoice has been paid.
def paid?
return false if invoices.count == 0
invoices.inject(true) { |sum, i| sum & i.has_status?(:paid) }
end
# Returns true if at leaset one invoice is overpaid in this affair.
def overpaid?
return false if invoices.count == 0
invoices.inject(false) { |sum, i| sum | i.has_status?(:overpaid) }
end
# Returns true if at least one invoice is underpaid in this affair.
def underpaid?
return false if invoices.count == 0
invoices.inject(false) { |sum, i| sum | i.has_status?(:underpaid) }
end
# Returns true if all invoices are set to cancelled.
def cancelled?
return false if invoices.count == 0
invoices.inject(true) { |sum, i| sum & i.has_status?(:cancelled) }
end
# Returns true if all invoices are set to offered.
def offered?
return false if invoices.count == 0
invoices.inject(true) { |sum, i| sum & i.has_status?(:offered) }
end
def to_be_billed?
invoices_value < value
end
def product_items_for_category(cat)
if cat
product_items
.joins(:product)
.where("products.category = ?", cat)
else
product_items
.joins(:product)
.where("products.category is null")
end
end
def product_items_categories
Product
.joins(:product_items)
.where("affairs_products_programs.affair_id = ?", id)
.select("DISTINCT category")
.order("category")
.map(&:category)
end
def product_items_category_value_for(cat)
product_items_for_category(cat).map(&:value).sum.to_money(value_currency)
end
private
# Buffer method used to update value and statuses information after habtm relationship
# alteration.
def update_on_prestation_alteration(record = nil)
self.update_attribute(:value, compute_value)
self.update_attribute(:status, update_statuses)
end
def update_value
self.value = compute_value
end
def compute_value_without_taxes
self.value = reverse_vat_value(value)
end
# Takes a value taxes included
def reverse_vat_value(val)
val / ( 1 + (ApplicationSetting.value("service_vat_rate").to_f / 100) )
end
def vat_calculation_availability
extras.each do |i|
if i.vat_percentage != ApplicationSetting.value("service_vat_rate").to_f
errors.add(:base,
I18n.t('affair.errors.unable_to_compute_value_without_taxes_if_extras_have_different_vat_values'))
return false
end
end
end
# Update this affair's statuses by comparing affair's value, its invoices and receipts
# and return its statuses.
def update_statuses
statuses = []
if cancelled?
statuses << :cancelled
elsif unbillable
statuses << :paid # NOTE is that the best reflection of its status ?
else
statuses << :open if open?
statuses << :underpaid if underpaid?
statuses << :partially_paid if partially_paid?
statuses << :to_be_billed if to_be_billed?
end
# TODO How an invoice could be paid and open in the same time ?
if offered?
statuses << :offered
else
statuses << :paid if paid?
statuses << :overpaid if overpaid?
end
self.reset_statuses(statuses)
statuses
end
def find_children(subscriptions)
subs = []
subscriptions.each do |s|
if s.children.empty?
subs << s
else
subs << find_children(s.children)
end
end
subs.flatten.uniq
end
def ensure_buyer_and_receiver_person_exists
self.receiver = self.owner unless self.receiver
self.buyer = self.owner unless self.buyer
end
def update_elasticsearch
# It may have some custom search attributes which
# depends on this affair through it's relations.
# so update relations' indices no mater what changes
unless self.changes.empty?
# update current relations' indices
owner.update_index
if buyer != owner
buyer.update_index
receiver.update_index if buyer != receiver
end
# and former relations' indices
if self.changes.keys.index('buyer_id')
# 0:original, 1:new value == self.buyer_id
buyer_id = self.changes['buyer_id'][0]
if Person.exists?(buyer_id) # in case former person doesn't exists
p = Person.find(buyer_id)
p.update_index
end
end
if self.changes.keys.index('receiver_id')
# 0:original, 1:new value == self.receiver_id
receiver_id = self.changes['receiver_id'][0]
if Person.exists?(receiver_id)
p = Person.find(receiver_id)
p.update_index
end
end
end
true
end
def do_not_destroy_if_has_invoices
unless invoices.empty?
errors.add(:base,
I18n.t('affair.errors.cant_delete_affair_who_has_invoices'))
false
end
end
def parent_id_is_not_self
if id == parent_id
errors.add(:base,
I18n.t('affair.errors.parent_id_cannot_be_self'))
false
end
end
end
|
# frozen_string_literal: true
# This model refers to law enforcement agencies involved in the cases
class Agency < ActiveRecord::Base
# This is the jurisdiction of the agency
enum jurisdiction: {
unknown: 'unknown',
local: 'local',
state: 'state',
federal: 'federal',
university: 'university',
commercial: 'commercial'
}
class JurisdictionType
include EnumeratedType
declare :none
declare :state
declare :local
declare :federal
declare :university
declare :private
end
STRIPPED_ATTRIBUTES = %w[
name
city
street_address
zipcode description
telephone
email
website
lead_officer
].freeze
auto_strip_attributes(*STRIPPED_ATTRIBUTES)
has_paper_trail
has_many :case_agencies
has_many :cases, through: :case_agencies
belongs_to :state
before_save do
self.name = name.lstrip
end
validates :name, presence: { message: 'Please enter a name.' }
validates :name, uniqueness: {
message: 'An agency with this name already exists and can be found. If you'\
' want to create a new agency, it must have a unique name.'
}
validates :state_id, presence: {
message: 'You must specify the state in which the agency is located.'
}
extend FriendlyId
friendly_id :slug_candidates, use: :slugged
# Geocoding
geocoded_by :full_address
before_save :geocode, if: proc { |agcy|
agcy.street_address_changed? ||
agcy.city_changed? ||
agcy.state_id_changed? ||
agcy.zipcode_changed?
} # auto-fetch coordinates
def full_address
"#{street_address} #{city} #{state.ansi_code} #{zipcode}".strip
end
def nearby_cases
try(:nearbys, 50).try(:order, 'distance')
end
# Try building a slug based on the following fields in
# increasing order of specificity.
def slug_candidates
[
:name,
%i[name city],
%i[name street_address city]
]
end
def retrieve_state
State.where(id: state_id).pluck(:name).join
end
end
Removes unused code and fixes stripped attributes in agency model.
# frozen_string_literal: true
# This model refers to law enforcement agencies involved in the cases
class Agency < ActiveRecord::Base
# This is the jurisdiction of the agency
enum jurisdiction: {
unknown: 'unknown',
local: 'local',
state: 'state',
federal: 'federal',
university: 'university',
commercial: 'commercial'
}
class JurisdictionType
include EnumeratedType
declare :none
declare :state
declare :local
declare :federal
declare :university
declare :private
end
STRIPPED_ATTRIBUTES = %w[
name
city
street_address
zipcode
description
telephone
email
website
lead_officer
].freeze
auto_strip_attributes(*STRIPPED_ATTRIBUTES)
has_paper_trail
has_many :case_agencies
has_many :cases, through: :case_agencies
belongs_to :state
validates :name, presence: { message: 'Please enter a name.' }
validates :name, uniqueness: {
message: 'An agency with this name already exists and can be found. If you'\
' want to create a new agency, it must have a unique name.'
}
validates :state_id, presence: {
message: 'You must specify the state in which the agency is located.'
}
extend FriendlyId
friendly_id :slug_candidates, use: :slugged
# Geocoding
geocoded_by :full_address
before_save :geocode, if: proc { |agcy|
agcy.street_address_changed? ||
agcy.city_changed? ||
agcy.state_id_changed? ||
agcy.zipcode_changed?
} # auto-fetch coordinates
def full_address
"#{street_address} #{city} #{state.ansi_code} #{zipcode}".strip
end
def nearby_cases
try(:nearbys, 50).try(:order, 'distance')
end
# Try building a slug based on the following fields in
# increasing order of specificity.
def slug_candidates
[
:name,
%i[name city],
%i[name street_address city]
]
end
def retrieve_state
State.where(id: state_id).pluck(:name).join
end
end
|
# frozen_string_literal: true
# This model refers to law enforcement agencies involved in the cases
class Agency < ActiveRecord::Base
# This is the jurisdiction of the agency
# TODO: Determine a better way to do these enums
class Jurisdiction
include EnumeratedType
declare :none
declare :state
declare :local
declare :federal
declare :university
declare :private
end
has_many :case_agencies
has_many :cases, through: :case_agencies
belongs_to :state
before_save do
self.name = name.lstrip
end
validates :name, presence: { message: 'Please enter a name.' }
validates :name, uniqueness: {
message: 'An agency with this name already exists and can be found. If you'\
' want to create a new agency, it must have a unique name.'
}
validates :state_id, presence: {
message: 'You must specify the state in which the incident occurred.'
}
extend FriendlyId
friendly_id :slug_candidates, use: :slugged
# Geocoding
geocoded_by :full_address
before_save :geocode, if: proc { |agcy|
agcy.street_address_changed? ||
agcy.city_changed? ||
agcy.state_id_changed? ||
agcy.zipcode_changed?
} # auto-fetch coordinates
def full_address
"#{street_address} #{city} #{state.ansi_code} #{zipcode}".strip
end
def nearby_cases
try(:nearbys, 50).try(:order, 'distance')
end
# Try building a slug based on the following fields in
# increasing order of specificity.
def slug_candidates
[
:name,
%i[name city],
%i[name street_address city]
]
end
def retrieve_state
State.where(id: state_id).pluck(:name).join
end
end
Change name of the class in agency model to match new name
# frozen_string_literal: true
# This model refers to law enforcement agencies involved in the cases
class Agency < ActiveRecord::Base
# This is the jurisdiction of the agency
# TODO: Determine a better way to do these enums
class Jurisdiction_type
include EnumeratedType
declare :none
declare :state
declare :local
declare :federal
declare :university
declare :private
end
has_many :case_agencies
has_many :cases, through: :case_agencies
belongs_to :state
before_save do
self.name = name.lstrip
end
validates :name, presence: { message: 'Please enter a name.' }
validates :name, uniqueness: {
message: 'An agency with this name already exists and can be found. If you'\
' want to create a new agency, it must have a unique name.'
}
validates :state_id, presence: {
message: 'You must specify the state in which the incident occurred.'
}
extend FriendlyId
friendly_id :slug_candidates, use: :slugged
# Geocoding
geocoded_by :full_address
before_save :geocode, if: proc { |agcy|
agcy.street_address_changed? ||
agcy.city_changed? ||
agcy.state_id_changed? ||
agcy.zipcode_changed?
} # auto-fetch coordinates
def full_address
"#{street_address} #{city} #{state.ansi_code} #{zipcode}".strip
end
def nearby_cases
try(:nearbys, 50).try(:order, 'distance')
end
# Try building a slug based on the following fields in
# increasing order of specificity.
def slug_candidates
[
:name,
%i[name city],
%i[name street_address city]
]
end
def retrieve_state
State.where(id: state_id).pluck(:name).join
end
end
|
class Answer < ActiveRecord::Base
belongs_to :from_member, class_name: Member
belongs_to :for_member, class_name: Member
belongs_to :question
belongs_to :review
end
forcing a default order as a bugfix for a postgres ordering problem
class Answer < ActiveRecord::Base
belongs_to :from_member, class_name: Member
belongs_to :for_member, class_name: Member
belongs_to :question
belongs_to :review
# Force a default order
default_scope order("created_at ASC")
end
|
# An Answer is a single piece of data in response to a single question or sub-question.
#
# A note about rank/inst_num attributes
#
# rank:
# - The rank of the answer within a given set of answers for a multilevel select question.
# - Starts at 1 (top level) and increases
# - Should be 1 for non-multilevel questions
#
# inst_num:
# - The number of the set of answers in which this answer belongs
# - Starts at 1 (first instance) and increases
# - e.g. if a response has three instances of a given group, values will be 1, 2, 3, and
# there will be N answers in instance 1, N in instance 2, etc., where N is the number of Q's in the group
# - Should be 1 for answers to top level questions and questions in non-repeating groups
# - Questions with answers with inst_nums higher than 1 shouldn't be allowed to be moved.
#
class Answer < ResponseNode
include ActionView::Helpers::NumberHelper
include PgSearch
LOCATION_ATTRIBS = %i(latitude longitude altitude accuracy)
acts_as_paranoid
# Convert value to tsvector for use in full text search.
trigger.before(:insert, :update) do
"new.tsv := TO_TSVECTOR('simple', COALESCE(
new.value,
(SELECT STRING_AGG(opt_name_translation.value, ' ')
FROM options, jsonb_each_text(options.name_translations) opt_name_translation
WHERE options.id = new.option_id
OR options.id IN (SELECT option_id FROM choices WHERE answer_id = new.id)),
''
));"
end
attr_accessor :location_values_replicated
belongs_to :questioning, inverse_of: :answers
belongs_to :option, inverse_of: :answers
belongs_to :response, inverse_of: :answers, touch: true
has_many :choices, -> { order(:created_at) }, dependent: :destroy, inverse_of: :answer, autosave: true
has_many :options, through: :choices
has_one :media_object, dependent: :destroy, inverse_of: :answer, autosave: true, class_name: "Media::Object"
before_validation :replicate_location_values
before_save :replicate_location_values # Doing this twice on purpose, see below.
before_save :chop_decimals
before_save :format_location_value
before_save :round_ints
before_save :blanks_to_nulls
before_save :remove_unchecked_choices
after_save :reset_location_flag
validate :validate_required, if: -> { should_validate?(:required) }
validate :validate_location, if: -> { should_validate?(:location) }
validate :validate_date, :validate_datetime
accepts_nested_attributes_for(:choices)
delegate :question, :qtype, :required?, :hidden?, :multimedia?,
:option_set, :options, :first_level_option_nodes, :condition, to: :questioning
delegate :name, :hint, to: :question, prefix: true
delegate :name, to: :level, prefix: true, allow_nil: true
delegate :mission, to: :response
delegate :parent_group_name, to: :questioning
scope :public_access, -> { joins(questioning: :question).
where("questions.access_level = 'inherit'").order("form_items.rank") }
scope :created_after, ->(date) { includes(:response).where("responses.created_at >= ?", date) }
scope :created_before, ->(date) { includes(:response).where("responses.created_at <= ?", date) }
scope :newest_first, -> { includes(:response).order("responses.created_at DESC") }
pg_search_scope :search_by_value,
against: :value,
using: {
tsearch: {
tsvector_column: "tsv",
prefix: true,
negation: true
}
}
# gets all location answers for the given mission
# returns only the response ID and the answer value
def self.location_answers_for_mission(mission, user = nil, _options = {})
response_conditions = { mission_id: mission.try(:id) }
# if the user is not a staffer or higher privilege, only show their own responses
response_conditions[:user_id] = user.id if user.present? && !user.role?(:staffer, mission)
# return an AR relation
joins(:response)
.joins(%{LEFT JOIN "choices" ON "choices"."answer_id" = "answers"."id"})
.where(responses: response_conditions)
.where(%{
("answers"."latitude" IS NOT NULL AND "answers"."longitude" IS NOT NULL)
OR ("choices"."latitude" IS NOT NULL AND "choices"."longitude" IS NOT NULL)
})
.select(:response_id,
%{COALESCE("answers"."latitude", "choices"."latitude") AS "latitude",
COALESCE("answers"."longitude", "choices"."longitude") AS "longitude"})
.order(%{"answers"."response_id" DESC})
.paginate(page: 1, per_page: 1000)
end
# Tests if there exists at least one answer referencing the option and questionings with the given IDs.
def self.any_for_option_and_questionings?(option_id, questioning_ids)
find_by_sql(["
SELECT COUNT(*) AS count
FROM answers a
LEFT OUTER JOIN choices c ON c.deleted_at IS NULL AND c.answer_id = a.id
WHERE a.deleted_at IS NULL
AND a.type = 'Answer'
AND (a.option_id = ? OR c.option_id = ?)
AND a.questioning_id IN (?)",
option_id, option_id, questioning_ids]).first.count > 0
end
# This is a temporary method for fetching option_node based on the related OptionSet and Option.
# Eventually Options will be removed and OptionNodes will be stored on Answers directly.
def option_node
OptionNode.where(option_id: option_id, option_set_id: option_set.id).first
end
def option_node_id
option_node.try(:id)
end
# This is a temporary method for assigning option based on an OptionNode ID.
# Eventually Options will be removed and OptionNodes will be stored on Answers directly.
def option_node_id=(id)
self.option_id = id.present? ? OptionNode.id_to_option_id(id) : nil
end
# If this is an answer to a multilevel select_one question, returns the OptionLevel, else returns nil.
def level
option_set.try(:multilevel?) ? option_set.levels[(rank || 1) - 1] : nil
end
def choices_by_option
@choice_hash ||= choices.select(&:checked?).index_by(&:option)
end
def all_choices
# for each option, if we have a matching choice, just return it (checked? defaults to true)
# otherwise create one and set checked? to false
options.map { |o| choices_by_option[o] || choices.new(option: o, checked: false) }
end
# if this answer is for a location question and the value is not blank, returns a two element array representing the
# lat long. else returns nil
def location
value.split(" ") if location_type_with_value?
end
# returns the value for this answer casted to the appropriate data type
def casted_value
case qtype.name
when "date" then date_value
when "time" then time_value
when "datetime" then datetime_value
when "integer", "counter" then value.try(:to_i)
when "decimal" then value.try(:to_f)
when "select_one" then option_name
when "select_multiple" then choices.empty? ? nil : choices.map(&:option_name).sort.join(";")
else value.blank? ? nil : value
end
end
def lengthy?
value.present? && value.size >= 1000
end
# relevant defaults to true until set otherwise
def relevant?
@relevant.nil? ? true : @relevant
end
alias_method :relevant, :relevant?
# A flag indicating whether the answer is relevant and should thus be validated.
# convert string 'true'/'false' to boolean
def relevant=(r)
@relevant = r.is_a?(String) ? r == "true" : r
end
# Checks if answer must be non-empty to be valid.
# Non-first-rank answers are currently not required even if their questioning is required (i.e. partial answers allowed).
def required_and_relevant?
required? && !hidden? && relevant? && first_rank? && qtype.name != "select_multiple"
end
# Whether this Answer is the first in its set (i.e. rank is nil or 1)
def first_rank?
rank.nil? || rank == 1
end
# check various fields for blankness
def empty?
value.blank? && time_value.blank? && date_value.blank? &&
datetime_value.blank? && option_id.nil? && media_object.nil?
end
alias_method :blank?, :empty?
# checks if answer is required and relevant but also empty
def required_but_empty?
required_and_relevant? && empty?
end
def location_type_with_value?
qtype.name == "location" && value.present?
end
def has_coordinates?
latitude.present? && longitude.present?
end
def from_group?
questioning && questioning.parent && questioning.parent.type == "QingGroup"
end
def option_name
option.canonical_name if option
end
def option_names
choices.map(&:option).map(&:canonical_name).join(", ") if choices
end
# Used with nested attribs
def media_object_id
media_object.try(:id)
end
# Used with nested attribs
# Attempts to find unassociated media object with given ID and assoicate with this answer.
# Fails silently if not found.
def media_object_id=(id)
if id.nil?
self.media_object = nil
elsif media_object_id != id
self.media_object = Media::Object.find_by(id: id, answer_id: nil)
end
end
def has_media_object?
!media_object_id.nil?
end
def group_level
questioning.ancestry_depth - 1
end
private
def should_validate?(field)
return false if response && !response.validate_answers?
return false if marked_for_destruction?
case field
when :numericality
qtype.numeric? && value.present?
when :required
# don't validate requiredness if response says no
!(response && response.incomplete?)
when :location
qtype.name == "location"
else
true
end
end
def replicate_location_values
# This method is run before_validation and before_save in case validations are skipped.
# We use this flag to not duplicate effort.
return if location_values_replicated
self.location_values_replicated = true
choices.each(&:replicate_location_values)
if location_type_with_value?
tokens = self.value.split(" ")
LOCATION_ATTRIBS.each_with_index do |a, i|
self[a] = tokens[i] ? BigDecimal.new(tokens[i]) : nil
end
elsif option.present? && option.has_coordinates?
self.latitude = option.latitude
self.longitude = option.longitude
elsif choice = choices.detect(&:has_coordinates?)
self.latitude = choice.latitude
self.longitude = choice.longitude
end
true
end
# We sometimes save decimals without validating, so we need to be careful
# not to overflow the DB.
def chop_decimals
LOCATION_ATTRIBS.each do |a|
next if self[a].nil?
column = self.class.column_for_attribute(a)
if self[a].abs > 10 ** (column.precision - column.scale)
self[a] = 0
end
end
self.accuracy = 0 if accuracy.present? && accuracy < 0
true
end
def format_location_value
if has_coordinates?
self.value = sprintf("%.6f %.6f", latitude, longitude)
if altitude.present?
self.value << sprintf(" %.3f", altitude)
if accuracy.present?
self.value << sprintf(" %.3f", accuracy)
end
end
end
true
end
def round_ints
self.value = value.to_i if %w(integer counter).include?(qtype.name) && value.present?
true
end
def blanks_to_nulls
self.value = nil if value.blank?
true
end
def remove_unchecked_choices
choices.destroy(*choices.reject(&:checked?))
true
end
def validate_required
errors.add(:value, :required) if required_but_empty?
end
def validate_location
# Doesn't make sense to validate lat/lng if copied from options because the user
# can't do anything about that.
if location_type_with_value?
if latitude.nil? || latitude < -90 || latitude > 90
errors.add(:value, :invalid_latitude)
end
if longitude.nil? || longitude < -180 || longitude > 180
errors.add(:value, :invalid_longitude)
end
if altitude.present? && (altitude >= 1e6 || altitude <= -1e6)
errors.add(:value, :invalid_altitude)
end
if accuracy.present?
if accuracy < 0
errors.add(:value, :accuracy_negative)
elsif accuracy >= 1e6
errors.add(:value, :invalid_accuracy)
end
end
end
end
def reset_location_flag
self.location_values_replicated = false
true
end
def validate_date
raw_date = read_attribute_before_type_cast("date_value")
return if raw_date.blank? || Time.zone.parse(raw_date.to_s).present?
errors.add(:date_value, :invalid_date)
end
def validate_datetime
raw_datetime = read_attribute_before_type_cast("datetime_value")
return if raw_datetime.blank? || Time.zone.parse(raw_datetime.to_s).present?
errors.add(:datetime_value, :invalid_datetime)
end
end
Restore answer validations
# An Answer is a single piece of data in response to a single question or sub-question.
#
# A note about rank/inst_num attributes
#
# rank:
# - The rank of the answer within a given set of answers for a multilevel select question.
# - Starts at 1 (top level) and increases
# - Should be 1 for non-multilevel questions
#
# inst_num:
# - The number of the set of answers in which this answer belongs
# - Starts at 1 (first instance) and increases
# - e.g. if a response has three instances of a given group, values will be 1, 2, 3, and
# there will be N answers in instance 1, N in instance 2, etc., where N is the number of Q's in the group
# - Should be 1 for answers to top level questions and questions in non-repeating groups
# - Questions with answers with inst_nums higher than 1 shouldn't be allowed to be moved.
#
class Answer < ResponseNode
include ActionView::Helpers::NumberHelper
include PgSearch
LOCATION_ATTRIBS = %i(latitude longitude altitude accuracy)
acts_as_paranoid
# Convert value to tsvector for use in full text search.
trigger.before(:insert, :update) do
"new.tsv := TO_TSVECTOR('simple', COALESCE(
new.value,
(SELECT STRING_AGG(opt_name_translation.value, ' ')
FROM options, jsonb_each_text(options.name_translations) opt_name_translation
WHERE options.id = new.option_id
OR options.id IN (SELECT option_id FROM choices WHERE answer_id = new.id)),
''
));"
end
attr_accessor :location_values_replicated
belongs_to :questioning, inverse_of: :answers
belongs_to :option, inverse_of: :answers
belongs_to :response, inverse_of: :answers, touch: true
has_many :choices, -> { order(:created_at) }, dependent: :destroy, inverse_of: :answer, autosave: true
has_many :options, through: :choices
has_one :media_object, dependent: :destroy, inverse_of: :answer, autosave: true, class_name: "Media::Object"
before_validation :replicate_location_values
before_save :replicate_location_values # Doing this twice on purpose, see below.
before_save :chop_decimals
before_save :format_location_value
before_save :round_ints
before_save :blanks_to_nulls
before_save :remove_unchecked_choices
after_save :reset_location_flag
validates :value, numericality: true, if: -> { should_validate?(:numericality) }
validate :validate_min_max, if: -> { should_validate?(:min_max) }
validate :validate_required, if: -> { should_validate?(:required) }
validate :validate_location, if: -> { should_validate?(:location) }
validate :validate_date, :validate_datetime
accepts_nested_attributes_for(:choices)
delegate :question, :qtype, :required?, :hidden?, :multimedia?,
:option_set, :options, :first_level_option_nodes, :condition, to: :questioning
delegate :name, :hint, to: :question, prefix: true
delegate :name, to: :level, prefix: true, allow_nil: true
delegate :mission, to: :response
delegate :parent_group_name, to: :questioning
scope :public_access, -> { joins(questioning: :question).
where("questions.access_level = 'inherit'").order("form_items.rank") }
scope :created_after, ->(date) { includes(:response).where("responses.created_at >= ?", date) }
scope :created_before, ->(date) { includes(:response).where("responses.created_at <= ?", date) }
scope :newest_first, -> { includes(:response).order("responses.created_at DESC") }
pg_search_scope :search_by_value,
against: :value,
using: {
tsearch: {
tsvector_column: "tsv",
prefix: true,
negation: true
}
}
# gets all location answers for the given mission
# returns only the response ID and the answer value
def self.location_answers_for_mission(mission, user = nil, _options = {})
response_conditions = { mission_id: mission.try(:id) }
# if the user is not a staffer or higher privilege, only show their own responses
response_conditions[:user_id] = user.id if user.present? && !user.role?(:staffer, mission)
# return an AR relation
joins(:response)
.joins(%{LEFT JOIN "choices" ON "choices"."answer_id" = "answers"."id"})
.where(responses: response_conditions)
.where(%{
("answers"."latitude" IS NOT NULL AND "answers"."longitude" IS NOT NULL)
OR ("choices"."latitude" IS NOT NULL AND "choices"."longitude" IS NOT NULL)
})
.select(:response_id,
%{COALESCE("answers"."latitude", "choices"."latitude") AS "latitude",
COALESCE("answers"."longitude", "choices"."longitude") AS "longitude"})
.order(%{"answers"."response_id" DESC})
.paginate(page: 1, per_page: 1000)
end
# Tests if there exists at least one answer referencing the option and questionings with the given IDs.
def self.any_for_option_and_questionings?(option_id, questioning_ids)
find_by_sql(["
SELECT COUNT(*) AS count
FROM answers a
LEFT OUTER JOIN choices c ON c.deleted_at IS NULL AND c.answer_id = a.id
WHERE a.deleted_at IS NULL
AND a.type = 'Answer'
AND (a.option_id = ? OR c.option_id = ?)
AND a.questioning_id IN (?)",
option_id, option_id, questioning_ids]).first.count > 0
end
# This is a temporary method for fetching option_node based on the related OptionSet and Option.
# Eventually Options will be removed and OptionNodes will be stored on Answers directly.
def option_node
OptionNode.where(option_id: option_id, option_set_id: option_set.id).first
end
def option_node_id
option_node.try(:id)
end
# This is a temporary method for assigning option based on an OptionNode ID.
# Eventually Options will be removed and OptionNodes will be stored on Answers directly.
def option_node_id=(id)
self.option_id = id.present? ? OptionNode.id_to_option_id(id) : nil
end
# If this is an answer to a multilevel select_one question, returns the OptionLevel, else returns nil.
def level
option_set.try(:multilevel?) ? option_set.levels[(rank || 1) - 1] : nil
end
def choices_by_option
@choice_hash ||= choices.select(&:checked?).index_by(&:option)
end
def all_choices
# for each option, if we have a matching choice, just return it (checked? defaults to true)
# otherwise create one and set checked? to false
options.map { |o| choices_by_option[o] || choices.new(option: o, checked: false) }
end
# if this answer is for a location question and the value is not blank, returns a two element array representing the
# lat long. else returns nil
def location
value.split(" ") if location_type_with_value?
end
# returns the value for this answer casted to the appropriate data type
def casted_value
case qtype.name
when "date" then date_value
when "time" then time_value
when "datetime" then datetime_value
when "integer", "counter" then value.try(:to_i)
when "decimal" then value.try(:to_f)
when "select_one" then option_name
when "select_multiple" then choices.empty? ? nil : choices.map(&:option_name).sort.join(";")
else value.blank? ? nil : value
end
end
def lengthy?
value.present? && value.size >= 1000
end
# relevant defaults to true until set otherwise
def relevant?
@relevant.nil? ? true : @relevant
end
alias_method :relevant, :relevant?
# A flag indicating whether the answer is relevant and should thus be validated.
# convert string 'true'/'false' to boolean
def relevant=(r)
@relevant = r.is_a?(String) ? r == "true" : r
end
# Checks if answer must be non-empty to be valid.
# Non-first-rank answers are currently not required even if their questioning is required (i.e. partial answers allowed).
def required_and_relevant?
required? && !hidden? && relevant? && first_rank? && qtype.name != "select_multiple"
end
# Whether this Answer is the first in its set (i.e. rank is nil or 1)
def first_rank?
rank.nil? || rank == 1
end
# check various fields for blankness
def empty?
value.blank? && time_value.blank? && date_value.blank? &&
datetime_value.blank? && option_id.nil? && media_object.nil?
end
alias_method :blank?, :empty?
# checks if answer is required and relevant but also empty
def required_but_empty?
required_and_relevant? && empty?
end
def location_type_with_value?
qtype.name == "location" && value.present?
end
def has_coordinates?
latitude.present? && longitude.present?
end
def from_group?
questioning && questioning.parent && questioning.parent.type == "QingGroup"
end
def option_name
option.canonical_name if option
end
def option_names
choices.map(&:option).map(&:canonical_name).join(", ") if choices
end
# Used with nested attribs
def media_object_id
media_object.try(:id)
end
# Used with nested attribs
# Attempts to find unassociated media object with given ID and assoicate with this answer.
# Fails silently if not found.
def media_object_id=(id)
if id.nil?
self.media_object = nil
elsif media_object_id != id
self.media_object = Media::Object.find_by(id: id, answer_id: nil)
end
end
def has_media_object?
!media_object_id.nil?
end
def group_level
questioning.ancestry_depth - 1
end
private
def should_validate?(field)
return false if response && !response.validate_answers?
return false if marked_for_destruction?
case field
when :numericality
qtype.numeric? && value.present?
when :required
# don't validate requiredness if response says no
!(response && response.incomplete?)
when :min_max
value.present?
when :location
qtype.name == "location"
else
true
end
end
def replicate_location_values
# This method is run before_validation and before_save in case validations are skipped.
# We use this flag to not duplicate effort.
return if location_values_replicated
self.location_values_replicated = true
choices.each(&:replicate_location_values)
if location_type_with_value?
tokens = self.value.split(" ")
LOCATION_ATTRIBS.each_with_index do |a, i|
self[a] = tokens[i] ? BigDecimal.new(tokens[i]) : nil
end
elsif option.present? && option.has_coordinates?
self.latitude = option.latitude
self.longitude = option.longitude
elsif choice = choices.detect(&:has_coordinates?)
self.latitude = choice.latitude
self.longitude = choice.longitude
end
true
end
# We sometimes save decimals without validating, so we need to be careful
# not to overflow the DB.
def chop_decimals
LOCATION_ATTRIBS.each do |a|
next if self[a].nil?
column = self.class.column_for_attribute(a)
if self[a].abs > 10 ** (column.precision - column.scale)
self[a] = 0
end
end
self.accuracy = 0 if accuracy.present? && accuracy < 0
true
end
def format_location_value
if has_coordinates?
self.value = sprintf("%.6f %.6f", latitude, longitude)
if altitude.present?
self.value << sprintf(" %.3f", altitude)
if accuracy.present?
self.value << sprintf(" %.3f", accuracy)
end
end
end
true
end
def round_ints
self.value = value.to_i if %w(integer counter).include?(qtype.name) && value.present?
true
end
def blanks_to_nulls
self.value = nil if value.blank?
true
end
def remove_unchecked_choices
choices.destroy(*choices.reject(&:checked?))
true
end
def validate_required
errors.add(:value, :required) if required_but_empty?
end
def validate_min_max
val_f = value.to_f
if question.maximum && (val_f > question.maximum || question.maxstrictly && val_f == question.maximum) ||
question.minimum && (val_f < question.minimum || question.minstrictly && val_f == question.minimum)
errors.add(:value, question.min_max_error_msg)
end
end
def validate_location
# Doesn't make sense to validate lat/lng if copied from options because the user
# can't do anything about that.
if location_type_with_value?
if latitude.nil? || latitude < -90 || latitude > 90
errors.add(:value, :invalid_latitude)
end
if longitude.nil? || longitude < -180 || longitude > 180
errors.add(:value, :invalid_longitude)
end
if altitude.present? && (altitude >= 1e6 || altitude <= -1e6)
errors.add(:value, :invalid_altitude)
end
if accuracy.present?
if accuracy < 0
errors.add(:value, :accuracy_negative)
elsif accuracy >= 1e6
errors.add(:value, :invalid_accuracy)
end
end
end
end
def reset_location_flag
self.location_values_replicated = false
true
end
def validate_date
raw_date = read_attribute_before_type_cast("date_value")
return if raw_date.blank? || Time.zone.parse(raw_date.to_s).present?
errors.add(:date_value, :invalid_date)
end
def validate_datetime
raw_datetime = read_attribute_before_type_cast("datetime_value")
return if raw_datetime.blank? || Time.zone.parse(raw_datetime.to_s).present?
errors.add(:datetime_value, :invalid_datetime)
end
end
|
# -*- encoding: utf-8 -*-
class Answer < ActiveRecord::Base
default_scope :order => 'id ASC'
scope :public_answers, where(:shared => true)
scope :private_answers, where(:shared => false)
belongs_to :user, :counter_cache => true, :validate => true
belongs_to :question, :counter_cache => true, :validate => true
has_many :answer_has_items, :dependent => :destroy
has_many :items, :through => :answer_has_items
after_save :save_questions
before_save :add_items
validates_associated :user, :question
validates_presence_of :user, :question, :body
validate :check_url_list
def self.per_page
10
end
def save_questions
self.question.save
end
def add_items
item_list = item_identifier_list.to_s.strip.split.map{|i| Item.where(:item_identifier => i).first}.compact.uniq
url_list = add_urls
self.items = item_list + url_list
end
def add_urls
list = url_list.to_s.strip.split.map{|u| Manifestation.where(:access_address => Addressable::URI.parse(u).normalize.to_s).first}.compact.map{|m| m.web_item}.compact.uniq
end
def check_url_list
url_list.to_s.strip.split.each do |url|
errors.add(:url_list) unless Addresable::URI.parse(url).host
end
end
end
# == Schema Information
#
# Table name: answers
#
# id :integer not null, primary key
# user_id :integer not null
# question_id :integer not null
# body :text
# created_at :datetime
# updated_at :datetime
# deleted_at :datetime
# shared :boolean default(TRUE), not null
# state :string(255)
# item_identifier_list :text
# url_list :text
#
modified
# -*- encoding: utf-8 -*-
class Answer < ActiveRecord::Base
default_scope :order => 'id ASC'
scope :public_answers, where(:shared => true)
scope :private_answers, where(:shared => false)
belongs_to :user, :counter_cache => true, :validate => true
belongs_to :question, :counter_cache => true, :validate => true
has_many :answer_has_items, :dependent => :destroy
has_many :items, :through => :answer_has_items
after_save :save_questions
before_save :add_items
validates_associated :user, :question
validates_presence_of :user, :question, :body
validate :check_url_list
def self.per_page
10
end
def save_questions
self.question.save
end
def add_items
item_list = item_identifier_list.to_s.strip.split.map{|i| Item.where(:item_identifier => i).first}.compact.uniq
url_list = add_urls
self.items = item_list + url_list
end
def add_urls
list = url_list.to_s.strip.split.map{|u| Manifestation.where(:access_address => Addressable::URI.parse(u).normalize.to_s).first}.compact.map{|m| m.web_item}.compact.uniq
end
def check_url_list
url_list.to_s.strip.split.each do |url|
errors.add(:url_list) unless Addressable::URI.parse(url).host
end
end
end
# == Schema Information
#
# Table name: answers
#
# id :integer not null, primary key
# user_id :integer not null
# question_id :integer not null
# body :text
# created_at :datetime
# updated_at :datetime
# deleted_at :datetime
# shared :boolean default(TRUE), not null
# state :string(255)
# item_identifier_list :text
# url_list :text
#
|
# == Schema Information
#
# Table name: bounties
#
# id :integer not null, primary key
# value :integer not null
# issue_id :integer not null
# issuer_id :integer not null
# claimant_id :integer
# claimed_value :integer
# claimed_at :datetime
# created_at :datetime
# updated_at :datetime
#
class Bounty < ActiveRecord::Base
belongs_to :issue
has_one :repository, through: :issue
belongs_to :issuer, class_name: 'Coder', foreign_key: 'issuer_id'
belongs_to :claimant, class_name: 'Coder', foreign_key: 'claimant_id'
validates :issue, presence: true
validates :issuer, presence: true
validates :value, presence: true, numericality: {
only_integer: true,
greater_than_or_equal_to: 0
}
after_save :expire_caches
delegate :to_s, to: :value
def self.update_or_create(issue, coder, new_abs_value)
new_value = BountyPoints.bounty_points_from_abs new_abs_value
# Find the bounty for this issue if it already exists
bounty = Bounty.find_or_create_by issue: issue,
issuer: coder,
claimed_at: nil do |b|
b.value = 0
end
# Check whether the user has got enought points to spend
delta = new_value - bounty.value
if delta > coder.bounty_residual
raise Error.new("You don\'t have enough bounty points to put a"\
" bounty of this amount.")
end
# Increase value
bounty.value += delta
# Try to save the bounty, update the remaining bounty points, and return
# some possibly updated records
unless bounty.save
raise Error.new("There occured an error while trying to save your"\
" bounty (#{bounty.errors.full_messages})")
end
coder.bounty_residual -= delta
coder.save!
SlackWebhook.publish_bounty(bounty)
end
def claim(time: Time.zone.now)
return if claimed_at # This bounty has already been claimed
if issue.assignee && issue.assignee != issuer
# Reward assignee
issue.assignee.reward_bounty self, time: time
issue.assignee.save!
else
# refund bounty points
issuer.bounty_residual += value
issuer.save!
# This bounty is of no use; destroy it.
destroy
end
end
def absolute_value
claimed_value || BountyPoints.bounty_points_to_abs(value)
end
def pinpoint_value(coder: nil, time: Time.current)
self.claimant = coder
self.claimed_at = time
self.claimed_value = absolute_value
self.value = 0
save!
end
class Error < StandardError
end
private
def expire_caches
BountyPoints.expire_assigned_bounty_points
end
end
Restructure Bounty.update_or_create
# == Schema Information
#
# Table name: bounties
#
# id :integer not null, primary key
# value :integer not null
# issue_id :integer not null
# issuer_id :integer not null
# claimant_id :integer
# claimed_value :integer
# claimed_at :datetime
# created_at :datetime
# updated_at :datetime
#
class Bounty < ActiveRecord::Base
belongs_to :issue
has_one :repository, through: :issue
belongs_to :issuer, class_name: 'Coder', foreign_key: 'issuer_id'
belongs_to :claimant, class_name: 'Coder', foreign_key: 'claimant_id'
validates :issue, presence: true
validates :issuer, presence: true
validates :value, presence: true, numericality: {
only_integer: true,
greater_than_or_equal_to: 0
}
after_save :expire_caches
delegate :to_s, to: :value
def self.update_or_create(issue, coder, new_abs_value)
# Find the bounty for this issue if it already exists, or make a new one
bounty = Bounty.find_or_create_by issue: issue,
issuer: coder,
claimed_at: nil do |b|
b.value = 0
end
bounty.update!(coder, new_abs_value)
end
def update!(coder, new_abs_value)
new_value = BountyPoints.bounty_points_from_abs new_abs_value
# Check whether the user has got enought points to spend
delta = new_value - value
if delta > coder.bounty_residual
raise Error.new("You don\'t have enough bounty points to put a"\
" bounty of this amount.")
end
# Increase value
value += delta
# Try to save the bounty, update the remaining bounty points, and return
# some possibly updated records
unless save
raise Error.new("There occured an error while trying to save your"\
" bounty (#{bounty.errors.full_messages})")
end
coder.bounty_residual -= delta
coder.save!
SlackWebhook.publish_bounty(bounty)
end
def claim(time: Time.zone.now)
return if claimed_at # This bounty has already been claimed
if issue.assignee && issue.assignee != issuer
# Reward assignee
issue.assignee.reward_bounty self, time: time
issue.assignee.save!
else
# refund bounty points
issuer.bounty_residual += value
issuer.save!
# This bounty is of no use; destroy it.
destroy
end
end
def absolute_value
claimed_value || BountyPoints.bounty_points_to_abs(value)
end
def pinpoint_value(coder: nil, time: Time.current)
self.claimant = coder
self.claimed_at = time
self.claimed_value = absolute_value
self.value = 0
save!
end
class Error < StandardError
end
private
def expire_caches
BountyPoints.expire_assigned_bounty_points
end
end
|
class Client < ActiveRecord::Base
has_many :tickets
belongs_to :status
has_many :receipts, through: :tickets
# Add back validation for address and city and telephone
# Validates the business name
validates :business_name, presence: true
# Validates the city, this should allow blanks in the business name
validates :city, allow_blank: true, format: {
with: /\A[-a-zA-Z ?()'\/&-\.]+\Z/,
message: 'must only have letters (no digits).'
}
# Validates the zipcode
validates :zipcode, allow_blank: true, length:{
minimum: 4, maximum: 5,
message: 'is the wrong length. Needs to be between four to five digits long.'
}, numericality: { greater_than: 0 }
# Validates the email
# validates :email, allow_blank: true, uniqueness: true, format: {
# with: /\A([0-9a-zA-Z]+[-._+&])*[0-9a-zA-Z]+@([-0-9a-zA-Z]+[.])+[a-zA-Z]{2,6}\Z/,
# message: 'must be a valid email address.'
# }
# Validates the contact first and last name
validates :contact_fname, :contact_lname, allow_blank: true, format: {
with: /\A[-a-zA-Z ?()'\/&-\.]+\Z/,
message: 'must only have letters (no digits).'
}
# Validates the telephone - This validation breaks the seed file
# validates :telephone, allow_blank: true, format: {
# with: /\A(17\s*-\s*\d{4}\s*-\s*[1-4]|(\d{3}\s*-\s*){1,2}\d{4}(\s*[Ee][Xx][Tt]\.?\s*\d{1,7})?)\Z/,
# message: 'must be a valid telephone number.'
# }
# Returns all pending clients, needs to be refactored to remove magic number
def self.pending
where(status_id: Status.where(status_type: 'Pending'))
end
def self.edited_pending
where(status_id: 5)
end
def self.unapproved
where(status_id: Status.where(status_type: 'Unapproved'))
end
def self.house
where(status_id: Status.where(status_type: ['In House', 'Approved']))
end
def self.approve_clients(array_of_pending_clients)
for i in 0..array_of_pending_clients.count-1
pending_client = Client.find(array_of_pending_clients[i].to_i)
pending_client.status_id = Status.where('status_type = ?', 'Approved').first.id
pending_client.save
ticket = Ticket.where('client_id = ?', pending_client.id).first
Receipt.create(:ticket_id => ticket.id, :user_id => ticket.user_id)
end
end
def self.unapprove_clients(array_of_pending_clients)
for i in 0..array_of_pending_clients.count-1
pending_client = Client.find(array_of_pending_clients[i].to_i)
pending_client.status_id = Status.where('status_type = ?', 'Unapproved').first.id
pending_client.save
if Ticket.where('client_id = ?', pending_client.id).first
Ticket.where('client_id = ?', pending_client.id).first.destroy
end
end
end
def self.tickets_for_selected_project(pid)
ticket_info = Ticket.where(project_id: pid)
Struct.new('Client_ticket', :business_name, :contact_fname, :telephone, :student_lname, :zipcode, :city, :id,
:state, :contact_lname, :contact_title, :client_id, :address, :student_fname, :student_id, :comment)
client_ticket = []
ticket_info.each_with_index do |t, i|
client_ticket[i] = Struct::Client_ticket.new
client_ticket[i].id = t.id
client_ticket[i].business_name = t.client.business_name
client_ticket[i].contact_fname = t.client.contact_fname
client_ticket[i].telephone = t.client.telephone
client_ticket[i].zipcode = t.client.zipcode
client_ticket[i].state = t.client.state
client_ticket[i].contact_lname = t.client.contact_lname
client_ticket[i].contact_title = t.client.contact_title
client_ticket[i].city = t.client.city
client_ticket[i].comment = t.client.comment
client_ticket[i].client_id = t.client_id
if t.user_id == nil || t.user_id == 0 # If the ticket does not have a holder
client_ticket[i].student_fname = nil
client_ticket[i].student_lname = nil
client_ticket[i].student_id = nil
else
client_ticket[i].student_fname = t.user.first_name
client_ticket[i].student_lname = t.user.last_name
client_ticket[i].student_id = t.user.school_id
end
end
client_ticket
end
def Client.make_pending_edited_client(edited_client, client, client_params)
if edited_client.attributes != Client.find(client).attributes
pending_edited_client = Client.new
# pending_edited_client.save
# render text: pending_edited_client.id
edited_client = Client.find(client).dup
pending_edited_client.assign_attributes(client_params)
pending_edited_client.status_id = 5
pending_edited_client.parent_id = Client.find(client).id
pending_edited_client.business_name
pending_edited_client.telephone
pending_edited_client.save(:validate => false)
end
end
def Client.approve_edited_clients(status, array_of_edited_pending_clients)
for i in 0..array_of_edited_pending_clients.count-1
pending_edited_client = Client.find(array_of_edited_pending_clients[i].to_i)
current_client = Client.find(Client.find(array_of_edited_pending_clients[i].to_i).parent_id)
if status == 2 || status == 3
# Anything you don't want copied into the orginal list here
current_client.update(pending_edited_client.attributes.except('id', 'status_id', 'created_at', 'parent_id'))
current_client.save(:validate => false)
pending_edited_client.delete
else
pending_edited_client.delete
end
end
end
def full_name
"#{contact_fname} #{contact_lname}"
end
end
Client addresses now appear on the assign page
class Client < ActiveRecord::Base
has_many :tickets
belongs_to :status
has_many :receipts, through: :tickets
# Add back validation for address and city and telephone
# Validates the business name
validates :business_name, presence: true
# Validates the city, this should allow blanks in the business name
validates :city, allow_blank: true, format: {
with: /\A[-a-zA-Z ?()'\/&-\.]+\Z/,
message: 'must only have letters (no digits).'
}
# Validates the zipcode
validates :zipcode, allow_blank: true, length:{
minimum: 4, maximum: 5,
message: 'is the wrong length. Needs to be between four to five digits long.'
}, numericality: { greater_than: 0 }
# Validates the email
# validates :email, allow_blank: true, uniqueness: true, format: {
# with: /\A([0-9a-zA-Z]+[-._+&])*[0-9a-zA-Z]+@([-0-9a-zA-Z]+[.])+[a-zA-Z]{2,6}\Z/,
# message: 'must be a valid email address.'
# }
# Validates the contact first and last name
validates :contact_fname, :contact_lname, allow_blank: true, format: {
with: /\A[-a-zA-Z ?()'\/&-\.]+\Z/,
message: 'must only have letters (no digits).'
}
# Validates the telephone - This validation breaks the seed file
# validates :telephone, allow_blank: true, format: {
# with: /\A(17\s*-\s*\d{4}\s*-\s*[1-4]|(\d{3}\s*-\s*){1,2}\d{4}(\s*[Ee][Xx][Tt]\.?\s*\d{1,7})?)\Z/,
# message: 'must be a valid telephone number.'
# }
# Returns all pending clients, needs to be refactored to remove magic number
def self.pending
where(status_id: Status.where(status_type: 'Pending'))
end
def self.edited_pending
where(status_id: 5)
end
def self.unapproved
where(status_id: Status.where(status_type: 'Unapproved'))
end
def self.house
where(status_id: Status.where(status_type: ['In House', 'Approved']))
end
def self.approve_clients(array_of_pending_clients)
for i in 0..array_of_pending_clients.count-1
pending_client = Client.find(array_of_pending_clients[i].to_i)
pending_client.status_id = Status.where('status_type = ?', 'Approved').first.id
pending_client.save
ticket = Ticket.where('client_id = ?', pending_client.id).first
Receipt.create(:ticket_id => ticket.id, :user_id => ticket.user_id)
end
end
def self.unapprove_clients(array_of_pending_clients)
for i in 0..array_of_pending_clients.count-1
pending_client = Client.find(array_of_pending_clients[i].to_i)
pending_client.status_id = Status.where('status_type = ?', 'Unapproved').first.id
pending_client.save
if Ticket.where('client_id = ?', pending_client.id).first
Ticket.where('client_id = ?', pending_client.id).first.destroy
end
end
end
def self.tickets_for_selected_project(pid)
ticket_info = Ticket.where(project_id: pid)
Struct.new('Client_ticket', :business_name, :contact_fname, :telephone, :student_lname, :zipcode, :city, :id,
:state, :contact_lname, :contact_title, :client_id, :address, :student_fname, :student_id, :comment)
client_ticket = []
ticket_info.each_with_index do |t, i|
client_ticket[i] = Struct::Client_ticket.new
client_ticket[i].id = t.id
client_ticket[i].business_name = t.client.business_name
client_ticket[i].contact_fname = t.client.contact_fname
client_ticket[i].telephone = t.client.telephone
client_ticket[i].zipcode = t.client.zipcode
client_ticket[i].state = t.client.state
client_ticket[i].contact_lname = t.client.contact_lname
client_ticket[i].contact_title = t.client.contact_title
client_ticket[i].address = t.client.address
client_ticket[i].city = t.client.city
client_ticket[i].comment = t.client.comment
client_ticket[i].client_id = t.client_id
if t.user_id == nil || t.user_id == 0 # If the ticket does not have a holder
client_ticket[i].student_fname = nil
client_ticket[i].student_lname = nil
client_ticket[i].student_id = nil
else
client_ticket[i].student_fname = t.user.first_name
client_ticket[i].student_lname = t.user.last_name
client_ticket[i].student_id = t.user.school_id
end
end
client_ticket
end
def Client.make_pending_edited_client(edited_client, client, client_params)
if edited_client.attributes != Client.find(client).attributes
pending_edited_client = Client.new
# pending_edited_client.save
# render text: pending_edited_client.id
edited_client = Client.find(client).dup
pending_edited_client.assign_attributes(client_params)
pending_edited_client.status_id = 5
pending_edited_client.parent_id = Client.find(client).id
pending_edited_client.business_name
pending_edited_client.telephone
pending_edited_client.save(:validate => false)
end
end
def Client.approve_edited_clients(status, array_of_edited_pending_clients)
for i in 0..array_of_edited_pending_clients.count-1
pending_edited_client = Client.find(array_of_edited_pending_clients[i].to_i)
current_client = Client.find(Client.find(array_of_edited_pending_clients[i].to_i).parent_id)
if status == 2 || status == 3
# Anything you don't want copied into the orginal list here
current_client.update(pending_edited_client.attributes.except('id', 'status_id', 'created_at', 'parent_id'))
current_client.save(:validate => false)
pending_edited_client.delete
else
pending_edited_client.delete
end
end
end
def full_name
"#{contact_fname} #{contact_lname}"
end
end
|
# Course class
class Course < ApplicationRecord
include ApplicationHelper
has_many :lectures, dependent: :destroy
# tags are notions that treated in the course
# e.g.: vector space, linear map are tags for the course 'Linear Algebra 1'
has_many :course_tag_joins, dependent: :destroy
has_many :tags, through: :course_tag_joins
has_many :media, as: :teachable
# users in this context are users who have subscribed to this course
has_many :course_user_joins, dependent: :destroy
has_many :users, through: :course_user_joins
# preceding courses are courses that this course is based upon
has_many :course_self_joins, dependent: :destroy
has_many :preceding_courses, through: :course_self_joins
# editors are users who have the right to modify its content
has_many :editable_user_joins, as: :editable, dependent: :destroy
has_many :editors, through: :editable_user_joins, as: :editable,
source: :user
validates :title, presence: { message: 'Titel muss vorhanden sein.' },
uniqueness: { message: 'Titel ist bereits vergeben.' }
validates :short_title,
presence: { message: 'Kurztitel muss vorhanden sein.' },
uniqueness: { message: 'Kurztitel ist bereits vergeben.' }
# some information about media and lectures are cached
# to find out whether the cache is out of date, always touch'em after saving
after_save :touch_media
after_save :touch_lectures_and_lessons
# The next methods coexist for lectures and lessons as well.
# Therefore, they can be called on any *teachable*
def course
self
end
def lecture
end
def lesson
end
def media_scope
self
end
def selector_value
'Course-' + id.to_s
end
def to_label
title
end
def compact_title
short_title
end
def title_for_viewers
Rails.cache.fetch("#{cache_key}/title_for_viewers") do
short_title
end
end
def long_title
title
end
def card_header
title
end
def published?
true
end
def card_header_path(user)
return unless user.courses.include?(self)
course_path
end
# only irrelevant courses can be deleted
def irrelevant?
lectures.empty? && media.empty? && id.present?
end
def published_lectures
lectures.published
end
def restricted?
false
end
# The next methods return if there are any media in the Kaviar, Sesam etc.
# projects that are associated to this course *with inheritance*
# These methods make use of caching.
def kaviar?
project?('kaviar')
end
def sesam?
project?('sesam')
end
def keks?
project?('keks')
end
def erdbeere?
project?('erdbeere')
end
def kiwi?
project?('kiwi')
end
def nuesse?
project?('nuesse')
end
def script?
project?('script')
end
def reste?
project?('reste')
end
# The next methods return if there are any media in the Kaviar, Sesam etc.
# projects that are associated to this course *without inheritance*
# These methods make use of caching.
def strict_kaviar?
strict_project?('kaviar')
end
def strict_sesam?
strict_project?('sesam')
end
def strict_keks?
strict_project?('keks')
end
def strict_erdbeere?
strict_project?('erdbeere')
end
def strict_kiwi?
strict_project?('kiwi')
end
def strict_nuesse?
strict_project?('nuesse')
end
def strict_script?
strict_project?('script')
end
def strict_reste?
strict_project?('reste')
end
# returns if there are any media associated to this course
# which are not of type kaviar *with inheritance*
def available_extras
hash = { 'sesam' => sesam?, 'keks' => keks?,
'erdbeere' => erdbeere?, 'kiwi' => kiwi?, 'nuesse' => nuesse?,
'script' => script?, 'reste' => reste? }
hash.keys.select { |k| hash[k] == true }
end
# returns an array with all types of media that are associated to this course
# *with inheritance*
def available_food
kaviar_info = kaviar? ? ['kaviar'] : []
kaviar_info.concat(available_extras)
end
def lectures_by_date
lectures.to_a.sort do |i, j|
j.term.begin_date <=> i.term.begin_date
end
end
def published_lectures_by_date
published_lectures.to_a.sort do |i, j|
j.term.begin_date <=> i.term.begin_date
end
end
# extracts the id of the lecture that the user has chosen as
# primary lecture for this module
# (that is the one that has the first position in the lectures carousel in
# the course view)
# Example:
# course.extras({"name"=>"John Smith", "course-3"=>"1",
# "primary_lecture-3"=>"3", "lecture-3"=>"1"})
# {"primary_lecture_id"=>3}
def extras(user_params)
modules = {}
primary_id = user_params['primary_lecture-' + id.to_s]
modules['primary_lecture_id'] = primary_id == '0' ? nil : primary_id.to_i
modules
end
# returns all items related to all lectures associated to this course
def items
lectures.collect(&:items).flatten
end
# returns the lecture which gets to sit on top in the lecture carousel in the
# lecture view
def front_lecture(user, active_lecture_id)
# make sure the front lecture is subscribed by the user
if subscribed_lectures(user).map(&:id).include?(active_lecture_id)
return Lecture.find(active_lecture_id)
end
primary_lecture(user)
end
def primary_lecture(user)
user_join = CourseUserJoin.where(course: self, user: user)
return if user_join.empty?
Lecture.find_by_id(user_join.first.primary_lecture_id)
end
def subscribed_lectures(user)
course.lectures & user.lectures
end
def to_be_authorized_lectures(user)
published_lectures.select(&:restricted?) -
subscribed_lectures(user)
end
def subscribed_lectures_by_date(user)
subscribed_lectures(user).to_a.sort do |i, j|
j.term.begin_date <=> i.term.begin_date
end
end
def subscribed_by?(user)
user.courses.include?(self)
end
def edited_by?(user)
return true if editors.include?(user)
false
end
# a course is addable by the user if the user is an editor or teacher of
# this course or a lecture of this course
def addable_by?(user)
in?(user.edited_or_given_courses_with_inheritance)
end
# a course is removable by the user if the user is an editor of this course
def removable_by?(user)
in?(user.edited_courses)
end
# returns the ARel of all media that are associated to the course
# by inheritance (i.e. directly and media which are associated to lectures or
# lessons associated to this course)
def media_with_inheritance
Rails.cache.fetch("#{cache_key}/media_with_inheritance") do
Medium.where(id: Medium.proper.includes(:teachable)
.select { |m| m.teachable.course == self }
.map(&:id))
end
end
def media_items_with_inheritance
media_with_inheritance.collect do |m|
m.items_with_references.collect { |i| [i[:title_within_course], i[:id]] }
end
.reduce(:concat)
end
def sections
lectures.collect(&:sections).flatten
end
# returns an array of teachables determined by the search params
# search_params is a hash with keys :all_teachables, :teachable_ids
# teachable ids is an array made up of strings composed of 'lecture-'
# or 'course-' followed by the id
# search is done with inheritance
def self.search_teachables(search_params)
unless search_params[:all_teachables] == '0'
return Course.all + Lecture.all + Lesson.all
end
courses = Course.where(id: Course.search_course_ids(search_params))
inherited_lectures = Lecture.where(course: courses)
selected_lectures = Lecture.where(id: Course
.search_lecture_ids(search_params))
lectures = (inherited_lectures + selected_lectures).uniq
lessons = lectures.collect(&:lessons).flatten
courses + lectures + lessons
end
def self.search_lecture_ids(search_params)
teachable_ids = search_params[:teachable_ids] || []
teachable_ids.select { |t| t.start_with?('lecture') }
.map { |t| t.remove('lecture-') }
end
def self.search_course_ids(search_params)
teachable_ids = search_params[:teachable_ids] || []
teachable_ids.select { |t| t.start_with?('course') }
.map { |t| t.remove('course-') }
end
# returns the array of courses that can be edited by the given user,
# together with a string made up of 'Course-' and their id
# Is used in options_for_select in form helpers.
def self.editable_selection(user)
if user.admin?
return Course.order(:title)
.map { |c| [c.title_for_viewers, 'Course-' + c.id.to_s] }
end
Course.includes(:editors, :editable_user_joins)
.order(:title).select { |c| c.edited_by?(user) }
.map { |c| [c.title_for_viewers, 'Course-' + c.id.to_s] }
end
# returns the array of all tags (sorted by title) together with
# their ids
def self.select_by_title
Course.all.to_a.natural_sort_by(&:title).map { |t| [t.title, t.id] }
end
def mc_questions_count
Rails.cache.fetch("#{cache_key}/mc_questions_count") do
Question.where(teachable: [self] + [lectures.published],
independent: true)
.select { |q| q.answers.count > 1 }
.count
end
end
def create_random_quiz!
question_ids = Question.where(teachable: [self] + [lectures.published],
independent: true)
.select { |q| q.answers.count > 1 }
.sample(5).map(&:id)
quiz_graph = QuizGraph.build_from_questions(question_ids)
quiz = Quiz.new(description: "Zufallsquiz #{course.title} #{Time.now}",
level: 1,
quiz_graph: quiz_graph,
sort: 'RandomQuiz')
quiz.save
return quiz.errors unless quiz.valid?
quiz
end
private
# looks in the cache if there are any media associated *with inheritance*
# to this course and a given project (kaviar, semsam etc.)
def project?(project)
Rails.cache.fetch("#{cache_key}/#{project}") do
Medium.where(sort: sort[project]).includes(:teachable)
.any? { |m| m.teachable.present? && m.teachable.course == self }
end
end
# looks in the cache if there are any media associated *without_inheritance*
# to this course and a given project (kaviar, semsam etc.)
def strict_project?(project)
Rails.cache.fetch("#{cache_key}/strict_#{project}") do
Medium.where(sort: sort[project], teachable: self).any?
end
end
def sort
{ 'kaviar' => ['Kaviar'], 'sesam' => ['Sesam'], 'kiwi' => ['Kiwi'],
'keks' => ['KeksQuiz'], 'nuesse' => ['Nuesse'],
'erdbeere' => ['Erdbeere'], 'script' => ['Script'], 'reste' => ['Reste'] }
end
def course_path
Rails.application.routes.url_helpers.course_path(self)
end
def touch_media
media_with_inheritance.update_all(updated_at: Time.now)
end
def touch_lectures_and_lessons
lectures.update_all(updated_at: Time.now)
Lesson.where(lecture: lectures).update_all(updated_at: Time.now)
end
end
ensure that primary_lecture_id in CourseUserJoin is nil or a nonzero integer
# Course class
class Course < ApplicationRecord
include ApplicationHelper
has_many :lectures, dependent: :destroy
# tags are notions that treated in the course
# e.g.: vector space, linear map are tags for the course 'Linear Algebra 1'
has_many :course_tag_joins, dependent: :destroy
has_many :tags, through: :course_tag_joins
has_many :media, as: :teachable
# users in this context are users who have subscribed to this course
has_many :course_user_joins, dependent: :destroy
has_many :users, through: :course_user_joins
# preceding courses are courses that this course is based upon
has_many :course_self_joins, dependent: :destroy
has_many :preceding_courses, through: :course_self_joins
# editors are users who have the right to modify its content
has_many :editable_user_joins, as: :editable, dependent: :destroy
has_many :editors, through: :editable_user_joins, as: :editable,
source: :user
validates :title, presence: { message: 'Titel muss vorhanden sein.' },
uniqueness: { message: 'Titel ist bereits vergeben.' }
validates :short_title,
presence: { message: 'Kurztitel muss vorhanden sein.' },
uniqueness: { message: 'Kurztitel ist bereits vergeben.' }
# some information about media and lectures are cached
# to find out whether the cache is out of date, always touch'em after saving
after_save :touch_media
after_save :touch_lectures_and_lessons
# The next methods coexist for lectures and lessons as well.
# Therefore, they can be called on any *teachable*
def course
self
end
def lecture
end
def lesson
end
def media_scope
self
end
def selector_value
'Course-' + id.to_s
end
def to_label
title
end
def compact_title
short_title
end
def title_for_viewers
Rails.cache.fetch("#{cache_key}/title_for_viewers") do
short_title
end
end
def long_title
title
end
def card_header
title
end
def published?
true
end
def card_header_path(user)
return unless user.courses.include?(self)
course_path
end
# only irrelevant courses can be deleted
def irrelevant?
lectures.empty? && media.empty? && id.present?
end
def published_lectures
lectures.published
end
def restricted?
false
end
# The next methods return if there are any media in the Kaviar, Sesam etc.
# projects that are associated to this course *with inheritance*
# These methods make use of caching.
def kaviar?
project?('kaviar')
end
def sesam?
project?('sesam')
end
def keks?
project?('keks')
end
def erdbeere?
project?('erdbeere')
end
def kiwi?
project?('kiwi')
end
def nuesse?
project?('nuesse')
end
def script?
project?('script')
end
def reste?
project?('reste')
end
# The next methods return if there are any media in the Kaviar, Sesam etc.
# projects that are associated to this course *without inheritance*
# These methods make use of caching.
def strict_kaviar?
strict_project?('kaviar')
end
def strict_sesam?
strict_project?('sesam')
end
def strict_keks?
strict_project?('keks')
end
def strict_erdbeere?
strict_project?('erdbeere')
end
def strict_kiwi?
strict_project?('kiwi')
end
def strict_nuesse?
strict_project?('nuesse')
end
def strict_script?
strict_project?('script')
end
def strict_reste?
strict_project?('reste')
end
# returns if there are any media associated to this course
# which are not of type kaviar *with inheritance*
def available_extras
hash = { 'sesam' => sesam?, 'keks' => keks?,
'erdbeere' => erdbeere?, 'kiwi' => kiwi?, 'nuesse' => nuesse?,
'script' => script?, 'reste' => reste? }
hash.keys.select { |k| hash[k] == true }
end
# returns an array with all types of media that are associated to this course
# *with inheritance*
def available_food
kaviar_info = kaviar? ? ['kaviar'] : []
kaviar_info.concat(available_extras)
end
def lectures_by_date
lectures.to_a.sort do |i, j|
j.term.begin_date <=> i.term.begin_date
end
end
def published_lectures_by_date
published_lectures.to_a.sort do |i, j|
j.term.begin_date <=> i.term.begin_date
end
end
# extracts the id of the lecture that the user has chosen as
# primary lecture for this module
# (that is the one that has the first position in the lectures carousel in
# the course view)
# Example:
# course.extras({"name"=>"John Smith", "course-3"=>"1",
# "primary_lecture-3"=>"3", "lecture-3"=>"1"})
# {"primary_lecture_id"=>3}
def extras(user_params)
modules = {}
primary_id = user_params['primary_lecture-' + id.to_s]
modules['primary_lecture_id'] = primary_id.to_i.zero? ? nil : primary_id.to_i
modules
end
# returns all items related to all lectures associated to this course
def items
lectures.collect(&:items).flatten
end
# returns the lecture which gets to sit on top in the lecture carousel in the
# lecture view
def front_lecture(user, active_lecture_id)
# make sure the front lecture is subscribed by the user
if subscribed_lectures(user).map(&:id).include?(active_lecture_id)
return Lecture.find_by_id(active_lecture_id)
end
primary_lecture(user)
end
def primary_lecture(user)
user_join = CourseUserJoin.where(course: self, user: user)
return if user_join.empty?
Lecture.find_by_id(user_join.first.primary_lecture_id)
end
def subscribed_lectures(user)
course.lectures & user.lectures
end
def to_be_authorized_lectures(user)
published_lectures.select(&:restricted?) -
subscribed_lectures(user)
end
def subscribed_lectures_by_date(user)
subscribed_lectures(user).to_a.sort do |i, j|
j.term.begin_date <=> i.term.begin_date
end
end
def subscribed_by?(user)
user.courses.include?(self)
end
def edited_by?(user)
return true if editors.include?(user)
false
end
# a course is addable by the user if the user is an editor or teacher of
# this course or a lecture of this course
def addable_by?(user)
in?(user.edited_or_given_courses_with_inheritance)
end
# a course is removable by the user if the user is an editor of this course
def removable_by?(user)
in?(user.edited_courses)
end
# returns the ARel of all media that are associated to the course
# by inheritance (i.e. directly and media which are associated to lectures or
# lessons associated to this course)
def media_with_inheritance
Rails.cache.fetch("#{cache_key}/media_with_inheritance") do
Medium.where(id: Medium.proper.includes(:teachable)
.select { |m| m.teachable.course == self }
.map(&:id))
end
end
def media_items_with_inheritance
media_with_inheritance.collect do |m|
m.items_with_references.collect { |i| [i[:title_within_course], i[:id]] }
end
.reduce(:concat)
end
def sections
lectures.collect(&:sections).flatten
end
# returns an array of teachables determined by the search params
# search_params is a hash with keys :all_teachables, :teachable_ids
# teachable ids is an array made up of strings composed of 'lecture-'
# or 'course-' followed by the id
# search is done with inheritance
def self.search_teachables(search_params)
unless search_params[:all_teachables] == '0'
return Course.all + Lecture.all + Lesson.all
end
courses = Course.where(id: Course.search_course_ids(search_params))
inherited_lectures = Lecture.where(course: courses)
selected_lectures = Lecture.where(id: Course
.search_lecture_ids(search_params))
lectures = (inherited_lectures + selected_lectures).uniq
lessons = lectures.collect(&:lessons).flatten
courses + lectures + lessons
end
def self.search_lecture_ids(search_params)
teachable_ids = search_params[:teachable_ids] || []
teachable_ids.select { |t| t.start_with?('lecture') }
.map { |t| t.remove('lecture-') }
end
def self.search_course_ids(search_params)
teachable_ids = search_params[:teachable_ids] || []
teachable_ids.select { |t| t.start_with?('course') }
.map { |t| t.remove('course-') }
end
# returns the array of courses that can be edited by the given user,
# together with a string made up of 'Course-' and their id
# Is used in options_for_select in form helpers.
def self.editable_selection(user)
if user.admin?
return Course.order(:title)
.map { |c| [c.title_for_viewers, 'Course-' + c.id.to_s] }
end
Course.includes(:editors, :editable_user_joins)
.order(:title).select { |c| c.edited_by?(user) }
.map { |c| [c.title_for_viewers, 'Course-' + c.id.to_s] }
end
# returns the array of all tags (sorted by title) together with
# their ids
def self.select_by_title
Course.all.to_a.natural_sort_by(&:title).map { |t| [t.title, t.id] }
end
def mc_questions_count
Rails.cache.fetch("#{cache_key}/mc_questions_count") do
Question.where(teachable: [self] + [lectures.published],
independent: true)
.select { |q| q.answers.count > 1 }
.count
end
end
def create_random_quiz!
question_ids = Question.where(teachable: [self] + [lectures.published],
independent: true)
.select { |q| q.answers.count > 1 }
.sample(5).map(&:id)
quiz_graph = QuizGraph.build_from_questions(question_ids)
quiz = Quiz.new(description: "Zufallsquiz #{course.title} #{Time.now}",
level: 1,
quiz_graph: quiz_graph,
sort: 'RandomQuiz')
quiz.save
return quiz.errors unless quiz.valid?
quiz
end
private
# looks in the cache if there are any media associated *with inheritance*
# to this course and a given project (kaviar, semsam etc.)
def project?(project)
Rails.cache.fetch("#{cache_key}/#{project}") do
Medium.where(sort: sort[project]).includes(:teachable)
.any? { |m| m.teachable.present? && m.teachable.course == self }
end
end
# looks in the cache if there are any media associated *without_inheritance*
# to this course and a given project (kaviar, semsam etc.)
def strict_project?(project)
Rails.cache.fetch("#{cache_key}/strict_#{project}") do
Medium.where(sort: sort[project], teachable: self).any?
end
end
def sort
{ 'kaviar' => ['Kaviar'], 'sesam' => ['Sesam'], 'kiwi' => ['Kiwi'],
'keks' => ['KeksQuiz'], 'nuesse' => ['Nuesse'],
'erdbeere' => ['Erdbeere'], 'script' => ['Script'], 'reste' => ['Reste'] }
end
def course_path
Rails.application.routes.url_helpers.course_path(self)
end
def touch_media
media_with_inheritance.update_all(updated_at: Time.now)
end
def touch_lectures_and_lessons
lectures.update_all(updated_at: Time.now)
Lesson.where(lecture: lectures).update_all(updated_at: Time.now)
end
end
|
require 'fileutils'
require 'asset_sync'
require 'sprockets'
module Deploy
ASSETS_PATH = Rails.root.join("tmp", "assets")
PROJECTS_PATH = Rails.root.join("tmp", "projects")
NUM_RETRIES = 3
module Errors
class Error < StandardError; end
class ProjectNotFoundError < Errors::Error; end
class NoReleaseToDeployError < Errors::Error; end
class CloneRepoError < Errors::Error; end
end
def self.perform(project_name)
project = Project.find_by_name(project_name)
raise Errors::ProjectNotFoundError, "Project not found." if project.nil?
release = project.next_release
raise Errors::NoReleaseToDeployError, "Not valid release to deploy." if release.nil?
# clear the release message
release.status = nil
release.save
config = Rails.application.config
# copy ssh keys
# TODO: move this to an initializer
FileUtils.cp_r(Rails.root.join('vendor', 'support', '.ssh'), ENV['HOME']) unless Dir.exists?(Pathname(ENV['HOME']).join(".ssh"))
begin
# set git clone path
project_path = PROJECTS_PATH.join(project.name)
# clone repo
clone_repo(project_path, project.repo, release.branch)
# TODO: Alert people
begin
copy_assets(project.name, project_path, ASSETS_PATH)
rescue Timeout::Error => e
release.status = e.message
release.save
raise
end
compile_assets(project.name)
self.generate_views(project.name, project_path)
release.go_live
project.touch
rescue => e
release.status = e.message
release.save
raise
end
end
def self.clone_repo(project_path, repo, branch)
# clear temp git clone path
FileUtils.rm_r(project_path, secure: true) if Dir.exists?(project_path)
FileUtils.mkdir_p(project_path)
# initialize grit
grit = Grit::Git.new(project_path.to_s)
result = nil
# clone the release repo/branch, retry if it fails
retriable on: [Grit::Git::CommandFailed, Grit::Git::GitTimeout], tries: NUM_RETRIES, interval: 1 do
result = grit.clone({process_info: true, raise: true, progress: true, depth: 1, branch: branch}, repo, project_path)
end
result
end
def self.copy_assets(project_name, source_root_path, dest_root_path)
FileUtils.mkdir_p(ASSETS_PATH)
["images", "stylesheets", "javascripts"].each do |dir|
dest_path = dest_root_path.join(dir, project_name)
FileUtils.rm_r(dest_path, secure: true) if Dir.exists?(dest_path)
FileUtils.mkdir_p(dest_path)
source_path = "#{source_root_path}/#{dir}/."
FileUtils.cp_r(source_path, dest_path)
end
end
def self.compile_assets(project_name)
config = Rails.application.config
public_asset_path = File.join(Rails.public_path, config.assets.prefix)
FileUtils.rm_r(public_asset_path, secure: true) if Dir.exists?(public_asset_path)
FileUtils.mkdir_p(public_asset_path)
manifest_path = config.assets.manifest ? Pathname.new(config.assets.manifest).join(project_name) : Pathname.new(public_asset_path).join(project_name)
manifest = File.join(manifest_path, "manifest.yml")
compiler = Sprockets::StaticCompiler.new(Rails.application.assets,
public_asset_path,
config.assets.precompile,
manifest_path: manifest_path,
digest: true,
manifest: true)
compiler.compile
# config.assets.digests = YAML.load_file(manifest) if File.exists?(manifest)
# PageController.subclasses.each do |c|
# c.view_paths.each(&:clear_cache)
# end
raise "Couldn't find manifest.yml" unless File.exists?(manifest)
AssetSync.sync
Rails.cache.write("digests:#{project_name}", YAML.load_file(manifest))
end
def self.generate_views(project_name, project_path)
project = Project.find_by_name(project_name)
project.view_templates.delete_all
file_extensions = "erb,haml"
Dir.glob(project_path.join("**", "*.{#{file_extensions}}")).each do |full_path|
path = Pathname.new(full_path).relative_path_from(project_path)
v = project.view_templates.new
file = File.open(full_path)
v.contents = file.read
pieces = path.to_s.split(".")
v.path = "#{project.name}/#{pieces.shift}"
v.handlers = pieces.pop
v.formats = pieces.last
v.locale = "en"
v.save!
# TODO: Handle template save validation errors
end
end
end
don't set depth flag when cloning from bitbucket and using https protocol. workaround for https://bitbucket.org/site/master/issue/3799/cant-clone-a-repo-using-https-protocol-and
require 'fileutils'
require 'asset_sync'
require 'sprockets'
module Deploy
ASSETS_PATH = Rails.root.join("tmp", "assets")
PROJECTS_PATH = Rails.root.join("tmp", "projects")
NUM_RETRIES = 3
module Errors
class Error < StandardError; end
class ProjectNotFoundError < Errors::Error; end
class NoReleaseToDeployError < Errors::Error; end
class CloneRepoError < Errors::Error; end
end
def self.perform(project_name)
project = Project.find_by_name(project_name)
raise Errors::ProjectNotFoundError, "Project not found." if project.nil?
release = project.next_release
raise Errors::NoReleaseToDeployError, "Not valid release to deploy." if release.nil?
# clear the release message
release.status = nil
release.save
config = Rails.application.config
# copy ssh keys
# TODO: move this to an initializer
FileUtils.cp_r(Rails.root.join('vendor', 'support', '.ssh'), ENV['HOME']) unless Dir.exists?(Pathname(ENV['HOME']).join(".ssh"))
begin
# set git clone path
project_path = PROJECTS_PATH.join(project.name)
# clone repo
clone_repo(project_path, project.repo, release.branch)
# TODO: Alert people
begin
copy_assets(project.name, project_path, ASSETS_PATH)
rescue Timeout::Error => e
release.status = e.message
release.save
raise
end
compile_assets(project.name)
self.generate_views(project.name, project_path)
release.go_live
project.touch
rescue => e
release.status = e.message
release.save
raise
end
end
def self.clone_repo(project_path, repo, branch)
# clear temp git clone path
FileUtils.rm_r(project_path, secure: true) if Dir.exists?(project_path)
FileUtils.mkdir_p(project_path)
# initialize grit
grit = Grit::Git.new(project_path.to_s)
result = nil
# clone the release repo/branch, retry if it fails
retriable on: [Grit::Git::CommandFailed, Grit::Git::GitTimeout], tries: NUM_RETRIES, interval: 1 do
flags = {process_info: true, raise: true, progress: true, branch: branch}
flags[:depth] = 1 unless !!(/^(?:https:).+(?:bitbucket.org)/ =~ repo) # workaround for https://bitbucket.org/site/master/issue/3799/cant-clone-a-repo-using-https-protocol-and
result = grit.clone(flags, repo, project_path)
end
result
end
def self.copy_assets(project_name, source_root_path, dest_root_path)
FileUtils.mkdir_p(ASSETS_PATH)
["images", "stylesheets", "javascripts"].each do |dir|
dest_path = dest_root_path.join(dir, project_name)
FileUtils.rm_r(dest_path, secure: true) if Dir.exists?(dest_path)
FileUtils.mkdir_p(dest_path)
source_path = "#{source_root_path}/#{dir}/."
FileUtils.cp_r(source_path, dest_path)
end
end
def self.compile_assets(project_name)
config = Rails.application.config
public_asset_path = File.join(Rails.public_path, config.assets.prefix)
FileUtils.rm_r(public_asset_path, secure: true) if Dir.exists?(public_asset_path)
FileUtils.mkdir_p(public_asset_path)
manifest_path = config.assets.manifest ? Pathname.new(config.assets.manifest).join(project_name) : Pathname.new(public_asset_path).join(project_name)
manifest = File.join(manifest_path, "manifest.yml")
compiler = Sprockets::StaticCompiler.new(Rails.application.assets,
public_asset_path,
config.assets.precompile,
manifest_path: manifest_path,
digest: true,
manifest: true)
compiler.compile
# config.assets.digests = YAML.load_file(manifest) if File.exists?(manifest)
# PageController.subclasses.each do |c|
# c.view_paths.each(&:clear_cache)
# end
raise "Couldn't find manifest.yml" unless File.exists?(manifest)
AssetSync.sync
Rails.cache.write("digests:#{project_name}", YAML.load_file(manifest))
end
def self.generate_views(project_name, project_path)
project = Project.find_by_name(project_name)
project.view_templates.delete_all
file_extensions = "erb,haml"
Dir.glob(project_path.join("**", "*.{#{file_extensions}}")).each do |full_path|
path = Pathname.new(full_path).relative_path_from(project_path)
v = project.view_templates.new
file = File.open(full_path)
v.contents = file.read
pieces = path.to_s.split(".")
v.path = "#{project.name}/#{pieces.shift}"
v.handlers = pieces.pop
v.formats = pieces.last
v.locale = "en"
v.save!
# TODO: Handle template save validation errors
end
end
end |
class Effort < ActiveRecord::Base
include PersonalInfo
enum gender: [:male, :female]
belongs_to :event
belongs_to :participant
has_many :split_times, dependent: :destroy
validates_presence_of :event_id, :first_name, :last_name, :gender
validates_uniqueness_of :participant_id, scope: :event_id, unless: 'participant_id.nil?'
validates_uniqueness_of :bib_number, scope: :event_id, allow_nil: true
def self.columns_for_import
id = ["id"]
foreign_keys = Effort.column_names.find_all { |x| x.include?("_id") }
stamps = Effort.column_names.find_all { |x| x.include?("_at") | x.include?("_by") }
(column_names - (id + foreign_keys + stamps)).map &:to_sym
end
def finished?
return false if split_times.count < 1
split_times.reverse.each do |split_time|
return true if split_time.split.kind == "finish"
end
false
end
def finish_status
return "DNF" if dropped?
return finish_time.formatted_time if finished?
"In progress"
end
def finish_time
return nil if split_times.count < 1
split_times.reverse.each do |split_time|
return split_time if split_time.split.kind == "finish"
end
nil
end
def exact_matching_participant # Suitable for automated matcher
participants = Participant.last_name_matches(last_name, rigor: 'exact')
.first_name_matches(first_name, rigor: 'exact').gender_matches(gender)
exact_match = Participant.age_matches(age_today, participants, 'soft')
exact_match.count == 1 ? exact_match.first : nil # Convert single match to object; don't pass if more than one match
end
def closest_matching_participant # Requires human review
participant_with_same_name ||
participant_with_nickname ||
participant_changed_last_name ||
participant_changed_first_name ||
participant_same_full_name
# return participant_with_nickname if participant_with_nickname
end
def participant_with_same_name
Participant.last_name_matches(last_name).first_name_matches(first_name).first
end
def participant_with_nickname # Need to find a good nickname gem
# Participant.last_name_matches(last_name).first_name_nickname(first_name).first
end
def participant_changed_last_name # To find women who may have changed their last names
participants = Participant.female.first_name_matches(first_name).state_matches(state_code).all
Participant.age_matches(age_today, participants).first
end
def participant_changed_first_name # To pick up discrepancies in first names #TODO use levensthein alagorithm
participants = Participant.last_name_matches(last_name).gender_matches(gender).all
Participant.age_matches(age_today, participants).first
end
def participant_same_full_name # For situations where middle names are sometimes included with first_name and sometimes with last_name
participants = Participant.gender_matches(gender) # To limit pool of search options
Participant.full_name_matches(full_name, participants).first
end
def approximate_age_today
now = Time.now.utc.to_date
age ? (years_between_dates(event.first_start_time.to_date, now) + age).to_i : nil
end
def base_split_times
return_array = []
split_times.each do |split_time|
if split_time.split.sub_order == 0
return_array << split_time
end
end
return_array
end
end
Fix split sorting in effort view
class Effort < ActiveRecord::Base
include PersonalInfo
enum gender: [:male, :female]
belongs_to :event
belongs_to :participant
has_many :split_times, dependent: :destroy
validates_presence_of :event_id, :first_name, :last_name, :gender
validates_uniqueness_of :participant_id, scope: :event_id, unless: 'participant_id.nil?'
validates_uniqueness_of :bib_number, scope: :event_id, allow_nil: true
def self.columns_for_import
id = ["id"]
foreign_keys = Effort.column_names.find_all { |x| x.include?("_id") }
stamps = Effort.column_names.find_all { |x| x.include?("_at") | x.include?("_by") }
(column_names - (id + foreign_keys + stamps)).map &:to_sym
end
def finished?
return false if split_times.count < 1
split_times.reverse.each do |split_time|
return true if split_time.split.kind == "finish"
end
false
end
def finish_status
return "DNF" if dropped?
return finish_time.formatted_time if finished?
"In progress"
end
def finish_time
return nil if split_times.count < 1
split_times.reverse.each do |split_time|
return split_time if split_time.split.kind == "finish"
end
nil
end
def exact_matching_participant # Suitable for automated matcher
participants = Participant.last_name_matches(last_name, rigor: 'exact')
.first_name_matches(first_name, rigor: 'exact').gender_matches(gender)
exact_match = Participant.age_matches(age_today, participants, 'soft')
exact_match.count == 1 ? exact_match.first : nil # Convert single match to object; don't pass if more than one match
end
def closest_matching_participant # Requires human review
participant_with_same_name ||
participant_with_nickname ||
participant_changed_last_name ||
participant_changed_first_name ||
participant_same_full_name
# return participant_with_nickname if participant_with_nickname
end
def participant_with_same_name
Participant.last_name_matches(last_name).first_name_matches(first_name).first
end
def participant_with_nickname # Need to find a good nickname gem
# Participant.last_name_matches(last_name).first_name_nickname(first_name).first
end
def participant_changed_last_name # To find women who may have changed their last names
participants = Participant.female.first_name_matches(first_name).state_matches(state_code).all
Participant.age_matches(age_today, participants).first
end
def participant_changed_first_name # To pick up discrepancies in first names #TODO use levensthein alagorithm
participants = Participant.last_name_matches(last_name).gender_matches(gender).all
Participant.age_matches(age_today, participants).first
end
def participant_same_full_name # For situations where middle names are sometimes included with first_name and sometimes with last_name
participants = Participant.gender_matches(gender) # To limit pool of search options
Participant.full_name_matches(full_name, participants).first
end
def approximate_age_today
now = Time.now.utc.to_date
age ? (years_between_dates(event.first_start_time.to_date, now) + age).to_i : nil
end
def base_split_times
return_array = []
split_times.each do |split_time|
if split_time.split.sub_order == 0
return_array << split_time
end
end
return_array.sort_by { |x| x.split.distance_from_start }
end
end
|
require 'csv'
class Import < ActiveRecord::Base
serialize :successful_rows
has_many :document_sources
has_many :documents, -> { uniq }, through: :document_sources
has_many :editions, -> { uniq }, through: :documents
has_many :import_errors, dependent: :destroy
has_many :force_publication_attempts, dependent: :destroy
has_many :import_logs, -> { order('row_number') }, dependent: :destroy
belongs_to :creator, class_name: "User"
belongs_to :organisation
TYPES = {
consultation: [Whitehall::Uploader::ConsultationRow, Consultation],
news_article: [Whitehall::Uploader::NewsArticleRow, NewsArticle],
publication: [Whitehall::Uploader::PublicationRow, Publication],
speech: [Whitehall::Uploader::SpeechRow, Speech],
statistical_data_set: [Whitehall::Uploader::StatisticalDataSetRow, StatisticalDataSet],
fatality_notice: [Whitehall::Uploader::FatalityNoticeRow, FatalityNotice],
detailed_guide: [Whitehall::Uploader::DetailedGuideRow, DetailedGuide],
case_study: [Whitehall::Uploader::CaseStudyRow, CaseStudy],
}
after_destroy :destroy_all_imported_documents
validate :csv_data_supplied
validates :organisation_id, presence: true
validate :valid_csv_data_encoding!
validates :data_type, inclusion: { in: TYPES.keys.map(&:to_s), message: "%{value} is not a valid type" }
validate :valid_csv_headings?, if: :valid_csv_data_encoding?
validate :all_rows_have_old_url?, if: :valid_csv_data_encoding?
validate :no_duplicate_old_urls, if: :valid_csv_data_encoding?
def self.excluding_csv_data
select(Import.columns.map(&:name) - ['csv_data'])
end
def self.read_file(file)
return nil unless file
raw = file.read.force_encoding("ascii-8bit")
if raw[0..2] == "\uFEFF".force_encoding("ascii-8bit")
raw[3..-1]
else
raw
end.force_encoding('utf-8')
end
def self.create_from_file(current_user, csv_file, data_type, organisation_id)
Import.create(
data_type: data_type,
organisation_id: organisation_id,
csv_data: read_file(csv_file),
creator_id: current_user.id,
original_filename: csv_file && csv_file.original_filename,
successful_rows: []
)
end
def self.source_of(document)
joins(document_sources: :document).where('documents.id' => document.id).first
end
def enqueue!
update_column(:import_enqueued_at, Time.zone.now)
ImportWorker.perform_async(self.id)
end
def status
if import_enqueued_at.nil?
:new
elsif import_started_at.nil?
:queued
elsif import_finished_at.nil?
:running
else
:finished
end
end
def row_numbers
(2..rows.count).to_a
end
def successful_row_numbers
document_sources.pluck(:row_number)
end
def failed_row_numbers
import_errors.pluck(:row_number)
end
def missing_row_numbers
@missing_row_numbers ||= row_numbers - successful_row_numbers - failed_row_numbers
end
def success_count
status == :finished ? documents.count(distinct: true) : 0
end
def most_recent_force_publication_attempt
force_publication_attempts.last
end
def force_publishable?
reason_for_not_being_force_publishable.nil?
end
def reason_for_not_being_force_publishable
case status
when :finished
most_recent = most_recent_force_publication_attempt
if most_recent.nil? || (most_recent.present?) && most_recent.repeatable?
if imported_editions.empty?
'Import created no documents'
elsif imported_editions.imported.any?
'Some still imported'
elsif force_publishable_editions.empty?
'None to publish'
else
nil
end
else
'Attempt to force publish is already in progress'
end
when :new, :queued, :running
'Import still running'
else
'Import failed'
end
end
def force_publish!
force_publication_attempts.create!.enqueue!
end
def force_publishable_editions
imported_editions.where(state: ['draft', 'submitted'])
end
def force_publishable_edition_count
force_publishable_editions.count
end
def imported_editions
is_first_edition_for_document = "NOT EXISTS (
SELECT 1
FROM editions e2
WHERE e2.document_id = editions.document_id
AND e2.id < editions.id)"
editions.where(is_first_edition_for_document)
end
def import_errors_for_row(row_number)
import_errors.where(row_number: row_number).map do |import_error|
import_error.message
end
end
def number_of_rows_with_errors
import_errors.count(:row_number, distinct: true)
end
def perform(options = {})
progress_logger.start(rows)
rows.each_with_index do |data_row, ix|
row_number = ix + 2
if blank_row?(data_row)
progress_logger.info("blank, skipped", row_number)
next
end
ImportRowWorker.perform_async(id, data_row.to_hash, row_number)
end
progress_logger.finish
end
def progress_logger
@progress_logger ||= Whitehall::Uploader::ProgressLogger.new(self)
end
def log
import_logs.map(&:to_s).join("\n")
end
def import_user
User.find_by!(name: "Automatic Data Importer")
end
def headers
rows.headers
end
NilAsBlankConverter = ->(heading) { heading || "" }
def rows
@rows ||= CSV.parse(csv_data, headers: true, header_converters: [NilAsBlankConverter, :downcase])
end
def blank_row?(row)
row.fields.all?(&:blank?)
end
def row_class
data_type && TYPES[data_type.to_sym] && TYPES[data_type.to_sym][0]
end
def model_class
data_type && TYPES[data_type.to_sym] && TYPES[data_type.to_sym][1]
end
# We cannot use the standard presence validator as sometimes
# broken data cannot have string methods called on it
def csv_data_supplied
errors.add(:csv_data, "not supplied") if csv_data.nil?
end
def valid_csv_data_encoding!
if (csv_data)
errors.add(:csv_data, "Invalid #{csv_data.encoding} character encoding") unless valid_csv_data_encoding?
end
end
def valid_csv_data_encoding?
csv_data && csv_data.valid_encoding?
end
def valid_csv_headings?
return unless row_class && csv_data
heading_validation_errors = row_class.heading_validation_errors(headers)
heading_validation_errors.each do |e|
errors.add(:csv_data, e)
end
end
def all_rows_have_old_url?
if blank_row_number = rows.find_index { |row| row.fields.any?(&:present?) && row['old_url'].blank? }
errors.add(:csv_data, "Row #{blank_row_number + 2}: old_url is blank")
end
end
def no_duplicate_old_urls
urls = rows.map.with_index { |row, i| [i + 2, row['old_url']] }
duplicates = urls.group_by { |row_number, old_url| old_url }.select { |old_url, set| set.size > 1 }
if duplicates.any?
duplicates.each do |old_url, set|
errors.add(:csv_data, "Duplicate old_url '#{old_url}' in rows #{set.map {|r| r[0]}.join(', ')}")
end
end
end
def document_imported_before(document)
document_index = documents.index(document)
document_index > 0 ? documents[document_index - 1] : nil
end
def self.use_separate_connection
# ActiveRecord stashes DB connections on a class
# hierarchy basis, so this establishes a separate
# DB connection for just the Import model that
# will be free from transactional semantics
# applied to ActiveRecord::Base.
# This is so we can log information as we process
# files without worrying about transactional
# rollbacks for the actual import process.
Import.establish_connection ActiveRecord::Base.configurations[Rails.env]
ImportError.establish_connection ActiveRecord::Base.configurations[Rails.env]
ImportLog.establish_connection ActiveRecord::Base.configurations[Rails.env]
end
private
def destroy_all_imported_documents
Document.destroy_all(id: self.document_ids)
end
end
Don't use deprecated distinct: true in count scope
require 'csv'
class Import < ActiveRecord::Base
serialize :successful_rows
has_many :document_sources
has_many :documents, -> { uniq }, through: :document_sources
has_many :editions, -> { uniq }, through: :documents
has_many :import_errors, dependent: :destroy
has_many :force_publication_attempts, dependent: :destroy
has_many :import_logs, -> { order('row_number') }, dependent: :destroy
belongs_to :creator, class_name: "User"
belongs_to :organisation
TYPES = {
consultation: [Whitehall::Uploader::ConsultationRow, Consultation],
news_article: [Whitehall::Uploader::NewsArticleRow, NewsArticle],
publication: [Whitehall::Uploader::PublicationRow, Publication],
speech: [Whitehall::Uploader::SpeechRow, Speech],
statistical_data_set: [Whitehall::Uploader::StatisticalDataSetRow, StatisticalDataSet],
fatality_notice: [Whitehall::Uploader::FatalityNoticeRow, FatalityNotice],
detailed_guide: [Whitehall::Uploader::DetailedGuideRow, DetailedGuide],
case_study: [Whitehall::Uploader::CaseStudyRow, CaseStudy],
}
after_destroy :destroy_all_imported_documents
validate :csv_data_supplied
validates :organisation_id, presence: true
validate :valid_csv_data_encoding!
validates :data_type, inclusion: { in: TYPES.keys.map(&:to_s), message: "%{value} is not a valid type" }
validate :valid_csv_headings?, if: :valid_csv_data_encoding?
validate :all_rows_have_old_url?, if: :valid_csv_data_encoding?
validate :no_duplicate_old_urls, if: :valid_csv_data_encoding?
def self.excluding_csv_data
select(Import.columns.map(&:name) - ['csv_data'])
end
def self.read_file(file)
return nil unless file
raw = file.read.force_encoding("ascii-8bit")
if raw[0..2] == "\uFEFF".force_encoding("ascii-8bit")
raw[3..-1]
else
raw
end.force_encoding('utf-8')
end
def self.create_from_file(current_user, csv_file, data_type, organisation_id)
Import.create(
data_type: data_type,
organisation_id: organisation_id,
csv_data: read_file(csv_file),
creator_id: current_user.id,
original_filename: csv_file && csv_file.original_filename,
successful_rows: []
)
end
def self.source_of(document)
joins(document_sources: :document).where('documents.id' => document.id).first
end
def enqueue!
update_column(:import_enqueued_at, Time.zone.now)
ImportWorker.perform_async(self.id)
end
def status
if import_enqueued_at.nil?
:new
elsif import_started_at.nil?
:queued
elsif import_finished_at.nil?
:running
else
:finished
end
end
def row_numbers
(2..rows.count).to_a
end
def successful_row_numbers
document_sources.pluck(:row_number)
end
def failed_row_numbers
import_errors.pluck(:row_number)
end
def missing_row_numbers
@missing_row_numbers ||= row_numbers - successful_row_numbers - failed_row_numbers
end
def success_count
status == :finished ? documents.distinct.count : 0
end
def most_recent_force_publication_attempt
force_publication_attempts.last
end
def force_publishable?
reason_for_not_being_force_publishable.nil?
end
def reason_for_not_being_force_publishable
case status
when :finished
most_recent = most_recent_force_publication_attempt
if most_recent.nil? || (most_recent.present?) && most_recent.repeatable?
if imported_editions.empty?
'Import created no documents'
elsif imported_editions.imported.any?
'Some still imported'
elsif force_publishable_editions.empty?
'None to publish'
else
nil
end
else
'Attempt to force publish is already in progress'
end
when :new, :queued, :running
'Import still running'
else
'Import failed'
end
end
def force_publish!
force_publication_attempts.create!.enqueue!
end
def force_publishable_editions
imported_editions.where(state: ['draft', 'submitted'])
end
def force_publishable_edition_count
force_publishable_editions.count
end
def imported_editions
is_first_edition_for_document = "NOT EXISTS (
SELECT 1
FROM editions e2
WHERE e2.document_id = editions.document_id
AND e2.id < editions.id)"
editions.where(is_first_edition_for_document)
end
def import_errors_for_row(row_number)
import_errors.where(row_number: row_number).map do |import_error|
import_error.message
end
end
def number_of_rows_with_errors
import_errors.distinct.count(:row_number)
end
def perform(options = {})
progress_logger.start(rows)
rows.each_with_index do |data_row, ix|
row_number = ix + 2
if blank_row?(data_row)
progress_logger.info("blank, skipped", row_number)
next
end
ImportRowWorker.perform_async(id, data_row.to_hash, row_number)
end
progress_logger.finish
end
def progress_logger
@progress_logger ||= Whitehall::Uploader::ProgressLogger.new(self)
end
def log
import_logs.map(&:to_s).join("\n")
end
def import_user
User.find_by!(name: "Automatic Data Importer")
end
def headers
rows.headers
end
NilAsBlankConverter = ->(heading) { heading || "" }
def rows
@rows ||= CSV.parse(csv_data, headers: true, header_converters: [NilAsBlankConverter, :downcase])
end
def blank_row?(row)
row.fields.all?(&:blank?)
end
def row_class
data_type && TYPES[data_type.to_sym] && TYPES[data_type.to_sym][0]
end
def model_class
data_type && TYPES[data_type.to_sym] && TYPES[data_type.to_sym][1]
end
# We cannot use the standard presence validator as sometimes
# broken data cannot have string methods called on it
def csv_data_supplied
errors.add(:csv_data, "not supplied") if csv_data.nil?
end
def valid_csv_data_encoding!
if (csv_data)
errors.add(:csv_data, "Invalid #{csv_data.encoding} character encoding") unless valid_csv_data_encoding?
end
end
def valid_csv_data_encoding?
csv_data && csv_data.valid_encoding?
end
def valid_csv_headings?
return unless row_class && csv_data
heading_validation_errors = row_class.heading_validation_errors(headers)
heading_validation_errors.each do |e|
errors.add(:csv_data, e)
end
end
def all_rows_have_old_url?
if blank_row_number = rows.find_index { |row| row.fields.any?(&:present?) && row['old_url'].blank? }
errors.add(:csv_data, "Row #{blank_row_number + 2}: old_url is blank")
end
end
def no_duplicate_old_urls
urls = rows.map.with_index { |row, i| [i + 2, row['old_url']] }
duplicates = urls.group_by { |row_number, old_url| old_url }.select { |old_url, set| set.size > 1 }
if duplicates.any?
duplicates.each do |old_url, set|
errors.add(:csv_data, "Duplicate old_url '#{old_url}' in rows #{set.map {|r| r[0]}.join(', ')}")
end
end
end
def document_imported_before(document)
document_index = documents.index(document)
document_index > 0 ? documents[document_index - 1] : nil
end
def self.use_separate_connection
# ActiveRecord stashes DB connections on a class
# hierarchy basis, so this establishes a separate
# DB connection for just the Import model that
# will be free from transactional semantics
# applied to ActiveRecord::Base.
# This is so we can log information as we process
# files without worrying about transactional
# rollbacks for the actual import process.
Import.establish_connection ActiveRecord::Base.configurations[Rails.env]
ImportError.establish_connection ActiveRecord::Base.configurations[Rails.env]
ImportLog.establish_connection ActiveRecord::Base.configurations[Rails.env]
end
private
def destroy_all_imported_documents
Document.destroy_all(id: self.document_ids)
end
end
|
# (c) 2008-2011 by Allgemeinbildung e.V., Bremen, Germany
# This file is part of Communtu.
# Communtu is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
# Communtu is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero Public License for more details.
# You should have received a copy of the GNU Affero Public License
# along with Communtu. If not, see <http://www.gnu.org/licenses/>.
# each liveCD is stored in the database as an object of class Livecd
# this allows for better error logging and recovery
# note that the iso itself is stored in the file system, however
# a liveCD can be based either on a user profile or on a bundle
# database fields:
# architecture_id: architecture of the liveCD
# derivative_id: derivative of the liveCD
# distribution_id: distribution of the liveCD
# failed: has build process failed?
# first_try: is this the first try to build the liveCD? (only then a failure message is sent)
# generated: has the liveCD been successfully built?
# generating: currently, the liveCD is being built
# installdeb: deb for the bundle installing the contents of the liveCD
# iso: location of iso image file on the server
# kvm: does the user want to have a kvm image?
# license_type: 0 = free, 1 = free or proprietary.
# log: relevant extract of log file
# metapackage_id: bundle on which the liveCD is based
# name
# pid: process id of forked process that builds the liveCD
# profile_version: version of user profile that has been used for liveCD build
# security_type: 0 = Ubuntu only, 1 = also Ubuntu community, 2 = also third-party.
# size: size of the iso image
# srcdeb: deb file for installing the sources
# usb: does the user want to have a usb image? (deprecated)
# vm_hda: hard disk file for virtual machine (when testing liveCD via vnc)
# vm_pid: process id of virtual machine (when testing liveCD via vnc)
require "lib/utils.rb"
class Livecd < ActiveRecord::Base
belongs_to :distribution
belongs_to :derivative
belongs_to :architecture
belongs_to :metapackage
has_many :livecd_users, :dependent => :destroy
has_many :users, :through => :livecd_users
validates_presence_of :name, :distribution, :derivative, :architecture
# version of liveCD, made from derivative, distribution, architecture, license and security
def smallversion
self.derivative.name.downcase+"-" \
+(self.distribution.name.gsub(/[a-zA-Z ]/,'')) \
+"-desktop-"+self.architecture.name
end
def fullversion
self.smallversion \
+ "-" +(Package.license_components[self.license_type]) \
+ "-" +(Package.security_components[self.security_type])
end
# unique name of liveCD
def fullname
"#{self.name}-#{self.fullversion}"
end
# filename of LiveCD in the file system
def iso_image
"#{RAILS_ROOT}/public/isos/#{self.fullname}.iso"
end
# filename of kvm image in the file system
def kvm_image
"#{RAILS_ROOT}/public/isos/#{self.fullname}.kvm.img"
end
# filename of kvm image in the file system
def usb_image
"#{RAILS_ROOT}/public/isos/#{self.fullname}.usb.img"
end
def self.rails_url
if RAILS_ROOT.index("test").nil?
then "http://communtu.org"
else "http://test.communtu.de"
end
end
# base url of LiveCD on the communtu server
def base_url
return "#{Livecd.rails_url}/isos/#{self.fullname}"
end
# url of iso image on the communtu server
def iso_url
"#{self.base_url}.iso"
end
# url of kvm image on the communtu server
def kvm_url
"#{self.base_url}.kvm.img"
end
# url of usb image on the communtu server
def usb_url
"#{self.base_url}.usb.img"
end
# check if a user supplied name is acceptable
def self.check_name(name)
if name.match(/^communtu-.*/)
return I18n.t(:livecd_communtu_name)
end
if name.match(/^[A-Za-z0-9_-]*$/).nil?
return I18n.t(:livecd_incorrect_name)
end
if !Livecd.find_by_name(name).nil?
return I18n.t(:livecd_existing_name)
end
return nil
end
def worker
MiddleMan.new_worker(:class => :livecd_worker,
:args => self.id,
:job_key => :livecd_worker,
:singleton => true)
end
# create the liveCD in a forked process
def fork_remaster(port=2222)
nice = (self.users[0].nil? or !self.users[0].has_role?('administrator'))
nicestr = if nice then "nice -n +10 " else "" end
self.pid = fork do
ActiveRecord::Base.connection.reconnect!
system "echo \"Livecd.find(#{self.id.to_s}).remaster(#{port.to_s})\" | #{nicestr} nohup script/console production"
end
ActiveRecord::Base.connection.reconnect!
self.save
end
# created liveCD, using script/remaster
def remaster(port=2222)
ActiveRecord::Base.connection.reconnect!
ver = self.smallversion
fullname = self.fullname
# need to generate iso, use lock in order to prevent parallel generation of multiple isos
begin
safe_system "dotlockfile -p -r 1000 #{RAILS_ROOT}/livecd#{port}_lock"
self.generating = true
self.save
# log to log/livecd.log
system "(echo; echo \"------------------------------------\") >> #{RAILS_ROOT}/log/livecd#{port}.log"
date = IO.popen("date",&:read).chomp
call = "echo \"#{port}: #{date} - Creating live CD #{fullname}\" >> #{RAILS_ROOT}/log/"
system (call+"livecd#{port}.log")
system (call+"livecd.log")
# check if there is enough disk space (at least 25 GB)
while disk_free_space(SETTINGS['iso_path']) < 25000000000
# destroy the oldest liveCD
cd=Livecd.find(:first,:order=>"updated_at ASC")
call = "(echo \"#{port}: Disk full - deleting live CD #{cd.id}\" >> #{RAILS_ROOT}/log/"
system (call+"livecd#{port}.log")
system (call+"livecd.log")
cd.destroy
end
# normal users get nice'd
nice = (self.users[0].nil? or !self.users[0].has_role?('administrator'))
nicestr = if nice then "-nice " else "" end
# Jaunty and lower need virtualisation due to requirement of squashfs version >= 4 (on the server, we have Hardy)
if self.distribution.name[0] <= 74 then # 74 is "J"
virt = "-v "
else
virt = ""
end
isoflag = self.iso ? "-iso #{self.iso_image} " : ""
kvmflag = self.kvm ? "-kvm #{self.kvm_image} " : ""
usbflag = self.usb ? "-usb #{self.usb_image} " : ""
remaster_call = "#{RAILS_ROOT}/script/remaster create #{nicestr}#{virt}#{isoflag}#{kvmflag}#{usbflag}#{ver} #{self.name} #{self.srcdeb} #{self.installdeb} #{port} >> #{RAILS_ROOT}/log/livecd#{port}.log 2>&1"
system "echo \"#{remaster_call}\" >> #{RAILS_ROOT}/log/livecd#{port}.log"
self.failed = !(system remaster_call)
# kill VM and release lock, necessary in case of abrupt exit
system "sudo kill-kvm #{port}"
system "dotlockfile -u /home/communtu/livecd/livecd#{port}.lock"
system "echo >> #{RAILS_ROOT}/log/livecd#{port}.log"
date = IO.popen("date",&:read).chomp
call = "echo \"#{port}: #{date} - finished\" >> #{RAILS_ROOT}/log/"
system (call+"livecd#{port}.log")
system (call+"livecd.log")
msg = if self.failed then "failed" else "succeeded" end
call = "echo \"#{port}: Creation of live CD #{msg}\" >> #{RAILS_ROOT}/log/"
system (call+"livecd#{port}.log")
system (call+"livecd.log")
system "echo >> #{RAILS_ROOT}/log/livecd#{port}.log"
if self.failed then
self.log = IO.popen("tail -n80 #{RAILS_ROOT}/log/livecd#{port}.log").read
end
rescue StandardError => err
self.log = "ruby code for live CD/DVD creation crashed: "+err
self.failed = true
end
system "dotlockfile -u #{RAILS_ROOT}/livecd#{port}_lock"
# store size and inform user via email
ActiveRecord::Base.connection.reconnect! # needed after a possibly long time
if !self.failed then
self.generated = true
self.size = 0
if self.iso
self.size += File.size(self.iso_image)
end
if self.kvm
self.size += File.size(self.kvm_image)
end
if self.usb
self.size += File.size(self.iso_image)
end
self.users.each do |user|
MyMailer.deliver_livecd(user,"#{Livecd.rails_url}/livecds/#{self.id}")
end
else
if self.first_try then
self.users.each do |user|
MyMailer.deliver_livecd_failed(user,self.fullname)
end
self.first_try = false
end
end
self.generating = false
self.save
end
# remaster the next non-generated liveCD (called from rake daemon)
def self.remaster_next(ports,admin_ports)
cd = Livecd.find_by_generated_and_generating_and_failed(false,false,false)
# no current liveCD generation?
if cd.nil? and Dir.glob("livecd22*lock").empty?
# ... then re-generate old ones
cd = Livecd.find(:first,:conditions=>{:failed=>true},:order => "updated_at ASC")
if !cd.nil? then
cd.generate_sources
cd.failed = false
cd.log = nil
end
end
if !cd.nil? then
# get next port (use special ports for admins)
if !cd.users[0].nil? and cd.users[0].has_role?('administrator')
port = admin_ports[0]
admin_ports.delete(port)
admin_ports.push(port)
else
port = ports[0]
ports.delete(port)
ports.push(port)
end
# generate CD
cd.generating = true
cd.save
cd.fork_remaster(port)
end
end
# get list of metapackages, either from database or from installdeb
def bundles
begin
if self.metapackage.nil?
depnames = Deb.deb_get_dependencies(self.installdeb)
depnames.map{|n| Metapackage.all.select{|m| m.debian_name==n}.first}.compact
else
[self.metapackage]
end
rescue
[]
end
end
# check whether all involved bundles have been published
def bundles_published?
if self.metapackage.nil?
u = self.users[0]
if u.nil?
return false
end
u.selected_packages.map(&:is_published?).all?
else
self.metapackage.is_published?
end
end
# re-generate srcdeb (needed in case that srcdeb is wrong for some reasons)
def generate_sources
bundle = self.metapackage
user = self.users[0]
if !user.nil?
if !bundle.nil?
user.distribution_id = self.distribution.id
user.derivative_id = self.derivative.id
user.architecture_id = self.architecture.id
user.license = self.license_type
user.security = self.security_type
self.srcdeb = RAILS_ROOT+"/"+user.install_bundle_sources(bundle)
else
if File.exists?(self.srcdeb)
system "rm #{self.srcdeb}"
end
deps = self.bundles
name = BasePackage.debianize_name("communtu-add-sources-"+user.login)
version = user.profile_version.to_s
description = I18n.t(:controller_suggestion_2)+user.login
srcfile = Deb.makedeb_for_source_install(name,
version,
description,
deps,
self.distribution,
self.derivative,
self.license_type,
self.security_type,
self.architecture)
self.srcdeb = RAILS_ROOT+"/"+srcfile
end
user.profile_changed = true
self.save
end
end
# register a livecd for a user
def register(user)
if !self.users.include? user
LivecdUser.create({:livecd_id => self.id, :user_id => user.id})
end
end
# deregister a livecd for a user; destroy cd if it has no more users
def deregister(user)
LivecdUser.find_all_by_livecd_id_and_user_id(self.id,user.id).each do |lu|
lu.destroy
end
# are there any other users of this live CD?
if self.users(force_reload=true).empty?
# if not, destroy live CD
self.destroy
end
end
MSGS = ["Failed to fetch","could not set up","Cannot install","is not installable","not going to be installed", "Depends:","Error","error","annot","Wrong","not found","Connection closed", "E:"]
def short_log
if log.nil?
return ""
end
lines = log.split("\n")
MSGS.each do |msg|
lines.reverse.each do |line|
if !line.index(msg).nil?
return line
end
end
end
return ""
end
def generate_hda
tmpfile = IO.popen("mktemp",&:read).chomp
self.vm_hda = SETTINGS['iso_path']+tmpfile
self.save
system "qemu-img create #{self.vm_hda} 5G"
end
def start_vm
if self.vm_pid.nil?
self.vm_pid = fork do
ActiveRecord::Base.connection.reconnect!
self.generate_hda
exec "kvm -hda #{self.vm_hda} -cdrom #{self.iso_image} -m 1000 -vnc :1"
end
ActiveRecord::Base.connection.reconnect!
self.save
end
end
def stop_vm
system "kill #{self.vm_pid}"
sleep 1
system "kill -9 #{self.vm_pid}"
system "rm #{self.vm_hda}"
self.vm_pid = nil
self.vm_hda = nil
self.save
end
def start_vm_basis
if self.vm_pid.nil?
self.vm_pid = fork do
ActiveRecord::Base.connection.reconnect!
system "kvm -daemonize -drive file=/home/communtu/livecd/kvm/#{self.smallversion}.img,if=virtio,boot=on,snapshot=on -smp 4 -m 800 -nographic -redir tcp:2221::22"
cmd = "scp -P 2221 -o StrictHostKeyChecking=no -o ConnectTimeout=500 #{self.srcdeb} root@localhost:/root/#{self.smallversion}/edit/root/"
system "echo #{cmd} >> log/vm.log"
system "#{cmd} >> log/vm.log"
if !self.installdeb.index(".deb").nil? # install deb is a deb file? then copy it, too
cmd = "scp -P 2221 -o StrictHostKeyChecking=no -o ConnectTimeout=500 #{self.installdeb} root@localhost:/root/#{self.smallversion}/edit/root/"
system "echo #{cmd} >> log/vm.log"
system "#{cmd} >> log/vm.log"
end
end
ActiveRecord::Base.connection.reconnect!
self.save
end
end
protected
# cleanup of processes and iso files
def delete_images
if self.iso
File.delete self.iso_image
end
if self.kvm
File.delete self.kvm_image
end
if self.usb
File.delete self.usb_image
end
end
def before_destroy
begin
# if process for creating the livecd is waiting but has not started yet, kill it
if !self.generated and !self.generating and !self.pid.nil?
Process.kill("TERM", self.pid)
# use time for deletion of iso as waiting time
self.delete_images
Process.kill("KILL", self.pid)
else
# only delete the iso
self.delete_images
end
rescue
end
end
end
working on #1444
# (c) 2008-2011 by Allgemeinbildung e.V., Bremen, Germany
# This file is part of Communtu.
# Communtu is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
# Communtu is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero Public License for more details.
# You should have received a copy of the GNU Affero Public License
# along with Communtu. If not, see <http://www.gnu.org/licenses/>.
# each liveCD is stored in the database as an object of class Livecd
# this allows for better error logging and recovery
# note that the iso itself is stored in the file system, however
# a liveCD can be based either on a user profile or on a bundle
# database fields:
# architecture_id: architecture of the liveCD
# derivative_id: derivative of the liveCD
# distribution_id: distribution of the liveCD
# failed: has build process failed?
# first_try: is this the first try to build the liveCD? (only then a failure message is sent)
# generated: has the liveCD been successfully built?
# generating: currently, the liveCD is being built
# installdeb: deb for the bundle installing the contents of the liveCD
# iso: location of iso image file on the server
# kvm: does the user want to have a kvm image?
# license_type: 0 = free, 1 = free or proprietary.
# log: relevant extract of log file
# metapackage_id: bundle on which the liveCD is based
# name
# pid: process id of forked process that builds the liveCD
# profile_version: version of user profile that has been used for liveCD build
# security_type: 0 = Ubuntu only, 1 = also Ubuntu community, 2 = also third-party.
# size: size of the iso image
# srcdeb: deb file for installing the sources
# usb: does the user want to have a usb image? (deprecated)
# vm_hda: hard disk file for virtual machine (when testing liveCD via vnc)
# vm_pid: process id of virtual machine (when testing liveCD via vnc)
require "lib/utils.rb"
class Livecd < ActiveRecord::Base
belongs_to :distribution
belongs_to :derivative
belongs_to :architecture
belongs_to :metapackage
has_many :livecd_users, :dependent => :destroy
has_many :users, :through => :livecd_users
validates_presence_of :name, :distribution, :derivative, :architecture
# version of liveCD, made from derivative, distribution, architecture, license and security
def smallversion
self.derivative.name.downcase+"-" \
+(self.distribution.name.gsub(/[a-zA-Z ]/,'')) \
+"-desktop-"+self.architecture.name
end
def fullversion
self.smallversion \
+ "-" +(Package.license_components[self.license_type]) \
+ "-" +(Package.security_components[self.security_type])
end
# unique name of liveCD
def fullname
"#{self.name}-#{self.fullversion}"
end
# filename of LiveCD in the file system
def iso_image
"#{RAILS_ROOT}/public/isos/#{self.fullname}.iso"
end
# filename of kvm image in the file system
def kvm_image
"#{RAILS_ROOT}/public/isos/#{self.fullname}.kvm.img"
end
# filename of kvm image in the file system
def usb_image
"#{RAILS_ROOT}/public/isos/#{self.fullname}.usb.img"
end
def self.rails_url
if RAILS_ROOT.index("test").nil?
then "http://communtu.org"
else "http://test.communtu.de"
end
end
# base url of LiveCD on the communtu server
def base_url
return "#{Livecd.rails_url}/isos/#{self.fullname}"
end
# url of iso image on the communtu server
def iso_url
"#{self.base_url}.iso"
end
# url of kvm image on the communtu server
def kvm_url
"#{self.base_url}.kvm.img"
end
# url of usb image on the communtu server
def usb_url
"#{self.base_url}.usb.img"
end
# check if a user supplied name is acceptable
def self.check_name(name)
if name.match(/^communtu-.*/)
return I18n.t(:livecd_communtu_name)
end
if name.match(/^[A-Za-z0-9_-]*$/).nil?
return I18n.t(:livecd_incorrect_name)
end
if !Livecd.find_by_name(name).nil?
return I18n.t(:livecd_existing_name)
end
return nil
end
def worker
MiddleMan.new_worker(:class => :livecd_worker,
:args => self.id,
:job_key => :livecd_worker,
:singleton => true)
end
# create the liveCD in a forked process
def fork_remaster(port=2222)
nice = (self.users[0].nil? or !self.users[0].has_role?('administrator'))
nicestr = if nice then "nice -n +10 " else "" end
self.pid = fork do
ActiveRecord::Base.connection.reconnect!
system "echo \"Livecd.find(#{self.id.to_s}).remaster(#{port.to_s})\" | #{nicestr} nohup script/console production"
end
ActiveRecord::Base.connection.reconnect!
self.save
end
# created liveCD, using script/remaster
def remaster(port=2222)
ActiveRecord::Base.connection.reconnect!
ver = self.smallversion
fullname = self.fullname
# need to generate iso, use lock in order to prevent parallel generation of multiple isos
begin
safe_system "dotlockfile -p -r 1000 #{RAILS_ROOT}/livecd#{port}_lock"
self.generating = true
self.save
# log to log/livecd.log
system "(echo; echo \"------------------------------------\") >> #{RAILS_ROOT}/log/livecd#{port}.log"
date = IO.popen("date",&:read).chomp
call = "echo \"#{port}: #{date} - Creating live CD ##{self.id} #{fullname}\" >> #{RAILS_ROOT}/log/"
system (call+"livecd#{port}.log")
system (call+"livecd.log")
# check if there is enough disk space (at least 25 GB)
while disk_free_space(SETTINGS['iso_path']) < 25000000000
# destroy the oldest liveCD
cd=Livecd.find(:first,:order=>"updated_at ASC")
call = "(echo \"#{port}: Disk full - deleting live CD #{cd.id}\" >> #{RAILS_ROOT}/log/"
system (call+"livecd#{port}.log")
system (call+"livecd.log")
cd.destroy
end
# normal users get nice'd
nice = (self.users[0].nil? or !self.users[0].has_role?('administrator'))
nicestr = if nice then "-nice " else "" end
# Jaunty and lower need virtualisation due to requirement of squashfs version >= 4 (on the server, we have Hardy)
if self.distribution.name[0] <= 74 then # 74 is "J"
virt = "-v "
else
virt = ""
end
isoflag = self.iso ? "-iso #{self.iso_image} " : ""
kvmflag = self.kvm ? "-kvm #{self.kvm_image} " : ""
usbflag = self.usb ? "-usb #{self.usb_image} " : ""
remaster_call = "#{RAILS_ROOT}/script/remaster create #{nicestr}#{virt}#{isoflag}#{kvmflag}#{usbflag}#{ver} #{self.name} #{self.srcdeb} #{self.installdeb} #{port} >> #{RAILS_ROOT}/log/livecd#{port}.log 2>&1"
system "echo \"#{remaster_call}\" >> #{RAILS_ROOT}/log/livecd#{port}.log"
self.failed = !(system remaster_call)
# kill VM and release lock, necessary in case of abrupt exit
system "sudo kill-kvm #{port}"
system "dotlockfile -u /home/communtu/livecd/livecd#{port}.lock"
system "echo >> #{RAILS_ROOT}/log/livecd#{port}.log"
date = IO.popen("date",&:read).chomp
call = "echo \"#{port}: #{date} - finished\" >> #{RAILS_ROOT}/log/"
system (call+"livecd#{port}.log")
system (call+"livecd.log")
msg = if self.failed then "failed" else "succeeded" end
call = "echo \"#{port}: Creation of live CD #{msg}\" >> #{RAILS_ROOT}/log/"
system (call+"livecd#{port}.log")
system (call+"livecd.log")
system "echo >> #{RAILS_ROOT}/log/livecd#{port}.log"
if self.failed then
self.log = IO.popen("tail -n80 #{RAILS_ROOT}/log/livecd#{port}.log").read
end
rescue StandardError => err
self.log = "ruby code for live CD/DVD creation crashed: "+err
self.failed = true
end
system "dotlockfile -u #{RAILS_ROOT}/livecd#{port}_lock"
# store size and inform user via email
ActiveRecord::Base.connection.reconnect! # needed after a possibly long time
if !self.failed then
self.generated = true
self.size = 0
if self.iso
self.size += File.size(self.iso_image)
end
if self.kvm
self.size += File.size(self.kvm_image)
end
if self.usb
self.size += File.size(self.iso_image)
end
self.users.each do |user|
MyMailer.deliver_livecd(user,"#{Livecd.rails_url}/livecds/#{self.id}")
end
else
if self.first_try then
self.users.each do |user|
MyMailer.deliver_livecd_failed(user,self.fullname)
end
self.first_try = false
end
end
self.generating = false
self.save
end
# remaster the next non-generated liveCD (called from rake daemon)
def self.remaster_next(ports,admin_ports)
cd = Livecd.find_by_generated_and_generating_and_failed(false,false,false)
# no current liveCD generation?
if cd.nil? and Dir.glob("livecd22*lock").empty?
# ... then re-generate old ones
cd = Livecd.find(:first,:conditions=>{:failed=>true},:order => "updated_at ASC")
if !cd.nil? then
cd.generate_sources
cd.failed = false
cd.log = nil
end
end
if !cd.nil? then
# get next port (use special ports for admins)
if !cd.users[0].nil? and cd.users[0].has_role?('administrator')
port = admin_ports[0]
admin_ports.delete(port)
admin_ports.push(port)
else
port = ports[0]
ports.delete(port)
ports.push(port)
end
# generate CD
cd.generating = true
cd.save
cd.fork_remaster(port)
end
end
# get list of metapackages, either from database or from installdeb
def bundles
begin
if self.metapackage.nil?
depnames = Deb.deb_get_dependencies(self.installdeb)
depnames.map{|n| Metapackage.all.select{|m| m.debian_name==n}.first}.compact
else
[self.metapackage]
end
rescue
[]
end
end
# check whether all involved bundles have been published
def bundles_published?
if self.metapackage.nil?
u = self.users[0]
if u.nil?
return false
end
u.selected_packages.map(&:is_published?).all?
else
self.metapackage.is_published?
end
end
# re-generate srcdeb (needed in case that srcdeb is wrong for some reasons)
def generate_sources
bundle = self.metapackage
user = self.users[0]
if !user.nil?
if !bundle.nil?
user.distribution_id = self.distribution.id
user.derivative_id = self.derivative.id
user.architecture_id = self.architecture.id
user.license = self.license_type
user.security = self.security_type
self.srcdeb = RAILS_ROOT+"/"+user.install_bundle_sources(bundle)
else
if File.exists?(self.srcdeb)
system "rm #{self.srcdeb}"
end
deps = self.bundles
name = BasePackage.debianize_name("communtu-add-sources-"+user.login)
version = user.profile_version.to_s
description = I18n.t(:controller_suggestion_2)+user.login
srcfile = Deb.makedeb_for_source_install(name,
version,
description,
deps,
self.distribution,
self.derivative,
self.license_type,
self.security_type,
self.architecture)
self.srcdeb = RAILS_ROOT+"/"+srcfile
end
user.profile_changed = true
self.save
end
end
# register a livecd for a user
def register(user)
if !self.users.include? user
LivecdUser.create({:livecd_id => self.id, :user_id => user.id})
end
end
# deregister a livecd for a user; destroy cd if it has no more users
def deregister(user)
LivecdUser.find_all_by_livecd_id_and_user_id(self.id,user.id).each do |lu|
lu.destroy
end
# are there any other users of this live CD?
if self.users(force_reload=true).empty?
# if not, destroy live CD
self.destroy
end
end
MSGS = ["Failed to fetch","could not set up","Cannot install","is not installable","not going to be installed", "Depends:","Error","error","annot","Wrong","not found","Connection closed", "E:"]
def short_log
if log.nil?
return ""
end
lines = log.split("\n")
MSGS.each do |msg|
lines.reverse.each do |line|
if !line.index(msg).nil?
return line
end
end
end
return ""
end
def generate_hda
tmpfile = IO.popen("mktemp",&:read).chomp
self.vm_hda = SETTINGS['iso_path']+tmpfile
self.save
system "qemu-img create #{self.vm_hda} 5G"
end
def start_vm
if self.vm_pid.nil?
self.vm_pid = fork do
ActiveRecord::Base.connection.reconnect!
self.generate_hda
exec "kvm -hda #{self.vm_hda} -cdrom #{self.iso_image} -m 1000 -vnc :1"
end
ActiveRecord::Base.connection.reconnect!
self.save
end
end
def stop_vm
system "kill #{self.vm_pid}"
sleep 1
system "kill -9 #{self.vm_pid}"
system "rm #{self.vm_hda}"
self.vm_pid = nil
self.vm_hda = nil
self.save
end
def start_vm_basis
if self.vm_pid.nil?
self.vm_pid = fork do
ActiveRecord::Base.connection.reconnect!
system "kvm -daemonize -drive file=/home/communtu/livecd/kvm/#{self.smallversion}.img,if=virtio,boot=on,snapshot=on -smp 4 -m 800 -nographic -redir tcp:2221::22"
cmd = "scp -P 2221 -o StrictHostKeyChecking=no -o ConnectTimeout=500 #{self.srcdeb} root@localhost:/root/#{self.smallversion}/edit/root/"
system "echo #{cmd} >> log/vm.log"
system "#{cmd} >> log/vm.log"
if !self.installdeb.index(".deb").nil? # install deb is a deb file? then copy it, too
cmd = "scp -P 2221 -o StrictHostKeyChecking=no -o ConnectTimeout=500 #{self.installdeb} root@localhost:/root/#{self.smallversion}/edit/root/"
system "echo #{cmd} >> log/vm.log"
system "#{cmd} >> log/vm.log"
end
end
ActiveRecord::Base.connection.reconnect!
self.save
end
end
protected
# cleanup of processes and iso files
def delete_images
if self.iso
File.delete self.iso_image
end
if self.kvm
File.delete self.kvm_image
end
if self.usb
File.delete self.usb_image
end
end
def before_destroy
begin
# if process for creating the livecd is waiting but has not started yet, kill it
if !self.generated and !self.generating and !self.pid.nil?
Process.kill("TERM", self.pid)
# use time for deletion of iso as waiting time
self.delete_images
Process.kill("KILL", self.pid)
else
# only delete the iso
self.delete_images
end
rescue
end
end
end
|
class Market < ActiveRecord::Base
attr_accessible :from_exchange, :from_currency,
:to_exchange, :to_currency, :fee_percentage, :delay_ms
belongs_to :from_exchange, :class_name => :Exchange
belongs_to :to_exchange, :class_name => :Exchange
belongs_to :exchange
has_many :tickers
has_many :trades
has_many :depth_runs
scope :internal, where("to_exchange_id = from_exchange_id")
scope :trading, lambda { |from_currency, to_currency|
where(["from_currency = ? and to_currency = ?",
from_currency, to_currency]) }
def self.transfers(from_exchange, to_exchange, currency)
where('from_exchange_id = ?', from_exchange.id).
where('to_exchange_id = ?', to_exchange.id).
where('from_currency = ? and to_currency = ?', currency, currency).
order('fee_percentage asc')
end
def name
if from_exchange != exchange
from_exchange_name = "#{from_exchange.name}-"
end
if to_exchange != exchange
to_exchange_name = "#{to_exchange.name}-"
end
"#{exchange.name} #{from_exchange_name}#{from_currency}/#{to_exchange_name}#{to_currency}"
end
def fee
fee_percentage/100
end
def api
"Markets::#{exchange.name.classify}".constantize.new(self)
end
def pair
Market.where(["to_exchange_id = ? and from_exchange_id = ?",
to_exchange_id, from_exchange_id]).
where(["from_currency = ? and to_currency = ?",
to_currency, from_currency]).first
end
def last_ticker
tickers.last
end
def depth_filter(data, currency)
depth_run = depth_runs.create
offers = api.offers(data, currency)
offers.map!{|o| o.merge({market_id:id})}
ActiveRecord::Base.transaction do
depth_run.offers.create(offers)
end
if currency == from_currency
best_offer = depth_run.offers.order('price desc').last
elsif currency == to_currency
best_offer = depth_run.offers.order('price asc').last
elsif raise "depth_filter failed, bad currency #{currency} for market #{self}"
end
puts "#{currency} #{self.from_currency}/#{self.to_currency} Best offer: #{best_offer.price}"
depth_run.update_attribute :best_offer_id, best_offer.id
depth_run
end
def ticker
tickers.last
end
def offers
last_run = depth_runs.last
last_run ? last_run.offers : []
end
end
fix errant elsif
class Market < ActiveRecord::Base
attr_accessible :from_exchange, :from_currency,
:to_exchange, :to_currency, :fee_percentage, :delay_ms
belongs_to :from_exchange, :class_name => :Exchange
belongs_to :to_exchange, :class_name => :Exchange
belongs_to :exchange
has_many :tickers
has_many :trades
has_many :depth_runs
scope :internal, where("to_exchange_id = from_exchange_id")
scope :trading, lambda { |from_currency, to_currency|
where(["from_currency = ? and to_currency = ?",
from_currency, to_currency]) }
def self.transfers(from_exchange, to_exchange, currency)
where('from_exchange_id = ?', from_exchange.id).
where('to_exchange_id = ?', to_exchange.id).
where('from_currency = ? and to_currency = ?', currency, currency).
order('fee_percentage asc')
end
def name
if from_exchange != exchange
from_exchange_name = "#{from_exchange.name}-"
end
if to_exchange != exchange
to_exchange_name = "#{to_exchange.name}-"
end
"#{exchange.name} #{from_exchange_name}#{from_currency}/#{to_exchange_name}#{to_currency}"
end
def fee
fee_percentage/100
end
def api
"Markets::#{exchange.name.classify}".constantize.new(self)
end
def pair
Market.where(["to_exchange_id = ? and from_exchange_id = ?",
to_exchange_id, from_exchange_id]).
where(["from_currency = ? and to_currency = ?",
to_currency, from_currency]).first
end
def last_ticker
tickers.last
end
def depth_filter(data, currency)
depth_run = depth_runs.create
offers = api.offers(data, currency)
offers.map!{|o| o.merge({market_id:id})}
ActiveRecord::Base.transaction do
depth_run.offers.create(offers)
end
if currency == from_currency
best_offer = depth_run.offers.order('price desc').last
elsif currency == to_currency
best_offer = depth_run.offers.order('price asc').last
else
raise "depth_filter failed, bad currency #{currency} for market #{self}"
end
puts "#{currency} #{self.from_currency}/#{self.to_currency} Best offer: #{best_offer.price}"
depth_run.update_attribute :best_offer_id, best_offer.id
depth_run
end
def ticker
tickers.last
end
def offers
last_run = depth_runs.last
last_run ? last_run.offers : []
end
end
|
require 'fileutils'
module MCollective
module Agent
class Puppetupdate < RPC::Agent
attr_accessor :dir, :repo_url
def initialize
@debug = true
@dir = config('directory') || '/etc/puppet'
@repo_url = config('repository') || 'http://git/git/puppet'
super
end
def git_repo
config('clone_at') || "#{@dir}/puppet.git"
end
def load_puppet
require 'puppet'
rescue LoadError => e
reply.fail! "Cannot load Puppet"
end
action "update" do
load_puppet
begin
update_all_branches
update_master_checkout
reply[:output] = "Done"
rescue Exception => e
reply.fail! "Exception: #{e}"
end
end
action "update_default" do
validate :revision, String
validate :revision, :shellsafe
load_puppet
begin
revision = request[:revision]
update_bare_repo
update_branch("default",revision)
reply[:output] = "Done"
rescue Exception => e
reply.fail! "Exception: #{e}"
end
end
def branches
%x[cd #{git_repo} && git branch -a].
lines.reject{|l| l =~ /\//}.map(&:strip)
end
def all_env_branches
%x[ls -1 #{@dir}/environments].lines.map(&:strip)
end
def update_master_checkout
Dir.chdir(@dir) do
debug "chdir #{@dir} for update_master_checkout"
exec "git --git-dir=#{git_repo} --work-tree=#{@dir} reset --hard master"
end
end
def update_all_branches(revisions={})
update_bare_repo
branches.each do |branch|
debug "WORKING FOR BRANCH #{branch}"
debug "#{revisions[branch]}"
update_branch(branch, revisions[branch])
end
write_puppet_conf(branches)
cleanup_old_branches(branches)
end
def cleanup_old_branches(branches)
local_branches = ["default"]
branches.each { |branch| local_branches << local_branch_name(branch) }
all_env_branches.each do |branch|
next if local_branches.include?(branch)
debug "Cleanup old branch named #{branch}"
exec "rm -rf #{@dir}/environments/#{branch}"
end
end
def write_puppet_conf(branches)
branches << "default"
FileUtils.cp "#{@dir}/puppet.conf.base", "#{@dir}/puppet.conf"
branches.each do |branch|
open("#{@dir}/puppet.conf", "a") do |f|
f.puts "\n[#{local_branch_name(branch)}]\n"
f.puts "modulepath=$confdir/environments/#{local_branch_name(branch)}/modules\n"
f.puts "manifest=$confdir/environments/#{local_branch_name(branch)}/manifests/site.pp\n"
end
end
end
def update_branch(remote_branch_name, revision=nil)
revision ||= "#{remote_branch_name(remote_branch_name)}"
local_branch_name = local_branch_name(remote_branch_name)
branch_dir = "#{@dir}/environments/#{local_branch_name}/"
Dir.mkdir("#{@dir}/environments") unless File.exist?("#{@dir}/environments")
Dir.mkdir(branch_dir) unless File.exist?(branch_dir)
Dir.chdir(branch_dir) do
debug "git --git-dir=#{git_repo} --work-tree=#{branch_dir} reset --hard #{revision}\n"
exec "git --git-dir=#{git_repo} --work-tree=#{branch_dir} reset --hard #{revision}"
end
end
def remote_branch_name(remote_branch_name)
/\* (.+)/.match(remote_branch_name) ? $1 : remote_branch_name
end
def local_branch_name(remote_branch_name)
if /(\/|\* )(.+)/.match(remote_branch_name)
remote_branch_name = $2
end
remote_branch_name == 'master' ? "masterbranch" : remote_branch_name
end
def update_bare_repo
envDir="#{git_repo}"
if File.exists?(envDir)
Dir.chdir(git_repo) do
debug "chdir #{git_repo}"
exec("git fetch origin")
exec("git remote prune origin")
end
else
exec "git clone --mirror #{@repo_url} #{git_repo}"
end
debug "done update_bare_repo"
end
def debug(line)
logger.info(line) if @debug == true
end
def exec(cmd)
debug "Running cmd #{cmd}"
output=`#{cmd} 2>&1`
raise "#{cmd} failed with: #{output}" unless $?.success?
end
private
def config(key)
Config.instance.pluginconf["puppetupdate.#{key}"]
end
end
end
end
move action definitions to the top
require 'fileutils'
module MCollective
module Agent
class Puppetupdate < RPC::Agent
action "update" do
load_puppet
begin
update_all_branches
update_master_checkout
reply[:output] = "Done"
rescue Exception => e
reply.fail! "Exception: #{e}"
end
end
action "update_default" do
validate :revision, String
validate :revision, :shellsafe
load_puppet
begin
revision = request[:revision]
update_bare_repo
update_branch("default",revision)
reply[:output] = "Done"
rescue Exception => e
reply.fail! "Exception: #{e}"
end
end
attr_accessor :dir, :repo_url
def initialize
@debug = true
@dir = config('directory') || '/etc/puppet'
@repo_url = config('repository') || 'http://git/git/puppet'
super
end
def git_repo
config('clone_at') || "#{@dir}/puppet.git"
end
def load_puppet
require 'puppet'
rescue LoadError => e
reply.fail! "Cannot load Puppet"
end
def branches
%x[cd #{git_repo} && git branch -a].
lines.reject{|l| l =~ /\//}.map(&:strip)
end
def all_env_branches
%x[ls -1 #{@dir}/environments].lines.map(&:strip)
end
def update_master_checkout
Dir.chdir(@dir) do
debug "chdir #{@dir} for update_master_checkout"
exec "git --git-dir=#{git_repo} --work-tree=#{@dir} reset --hard master"
end
end
def update_all_branches(revisions={})
update_bare_repo
branches.each do |branch|
debug "WORKING FOR BRANCH #{branch}"
debug "#{revisions[branch]}"
update_branch(branch, revisions[branch])
end
write_puppet_conf(branches)
cleanup_old_branches(branches)
end
def cleanup_old_branches(branches)
local_branches = ["default"]
branches.each { |branch| local_branches << local_branch_name(branch) }
all_env_branches.each do |branch|
next if local_branches.include?(branch)
debug "Cleanup old branch named #{branch}"
exec "rm -rf #{@dir}/environments/#{branch}"
end
end
def write_puppet_conf(branches)
branches << "default"
FileUtils.cp "#{@dir}/puppet.conf.base", "#{@dir}/puppet.conf"
branches.each do |branch|
open("#{@dir}/puppet.conf", "a") do |f|
f.puts "\n[#{local_branch_name(branch)}]\n"
f.puts "modulepath=$confdir/environments/#{local_branch_name(branch)}/modules\n"
f.puts "manifest=$confdir/environments/#{local_branch_name(branch)}/manifests/site.pp\n"
end
end
end
def update_branch(remote_branch_name, revision=nil)
revision ||= "#{remote_branch_name(remote_branch_name)}"
local_branch_name = local_branch_name(remote_branch_name)
branch_dir = "#{@dir}/environments/#{local_branch_name}/"
Dir.mkdir("#{@dir}/environments") unless File.exist?("#{@dir}/environments")
Dir.mkdir(branch_dir) unless File.exist?(branch_dir)
Dir.chdir(branch_dir) do
debug "git --git-dir=#{git_repo} --work-tree=#{branch_dir} reset --hard #{revision}\n"
exec "git --git-dir=#{git_repo} --work-tree=#{branch_dir} reset --hard #{revision}"
end
end
def remote_branch_name(remote_branch_name)
/\* (.+)/.match(remote_branch_name) ? $1 : remote_branch_name
end
def local_branch_name(remote_branch_name)
if /(\/|\* )(.+)/.match(remote_branch_name)
remote_branch_name = $2
end
remote_branch_name == 'master' ? "masterbranch" : remote_branch_name
end
def update_bare_repo
envDir="#{git_repo}"
if File.exists?(envDir)
Dir.chdir(git_repo) do
debug "chdir #{git_repo}"
exec("git fetch origin")
exec("git remote prune origin")
end
else
exec "git clone --mirror #{@repo_url} #{git_repo}"
end
debug "done update_bare_repo"
end
def debug(line)
logger.info(line) if @debug == true
end
def exec(cmd)
debug "Running cmd #{cmd}"
output=`#{cmd} 2>&1`
raise "#{cmd} failed with: #{output}" unless $?.success?
end
private
def config(key)
Config.instance.pluginconf["puppetupdate.#{key}"]
end
end
end
end
|
class Market < ActiveRecord::Base
attr_accessible :from_exchange, :from_currency,
:to_exchange, :to_currency, :fee_percentage, :delay_ms
belongs_to :from_exchange, :class_name => :Exchange
belongs_to :to_exchange, :class_name => :Exchange
belongs_to :exchange
has_many :tickers
has_many :trades
has_many :depth_runs
scope :internal, where("to_exchange_id = from_exchange_id")
scope :trading, lambda { |from_currency, to_currency|
where(["from_currency = ? and to_currency = ?",
from_currency, to_currency]) }
def self.transfers(from_exchange, to_exchange, currency)
where('from_exchange_id = ?', from_exchange.id).
where('to_exchange_id = ?', to_exchange.id).
where('from_currency = ? and to_currency = ?', currency, currency).
order('fee_percentage asc')
end
def name
if from_exchange != exchange
from_exchange_name = "#{from_exchange.name}-"
end
if to_exchange != exchange
to_exchange_name = "#{to_exchange.name}-"
end
"#{exchange.name} #{from_exchange_name}#{from_currency}/#{to_exchange_name}#{to_currency}"
end
def fee
fee_percentage/100
end
def api
"Markets::#{exchange.name.classify}".constantize.new(self)
end
def pair
Market.where(["to_exchange_id = ? and from_exchange_id = ?",
to_exchange_id, from_exchange_id]).
where(["from_currency = ? and to_currency = ?",
to_currency, from_currency]).first
end
def last_ticker
tickers.last
end
def depth_filter(data, currency)
depth_run = depth_runs.create
offers = api.offers(data, currency)
offers.map!{|o| o.merge({market_id:id})}
ActiveRecord::Base.transaction do
depth_run.offers.create(offers)
end
if currency == from_currency
best_offer = depth_run.offers.order('price desc').last
elsif currency == to_currency
best_offer = depth_run.offers.order('price asc').last
elsif raise "depth_filter failed, bad currency #{currency} for market #{self}"
end
puts "#{currency} #{self.from_currency}/#{self.to_currency} Best offer: #{best_offer.price}"
depth_run.update_attribute :best_offer_id, best_offer.id
depth_run
end
def ticker
tickers.last
end
def offers
last_run = depth_runs.last
last_run ? last_run.offers : []
end
end
use the slug, luke
class Market < ActiveRecord::Base
attr_accessible :from_exchange, :from_currency,
:to_exchange, :to_currency, :fee_percentage, :delay_ms
belongs_to :from_exchange, :class_name => :Exchange
belongs_to :to_exchange, :class_name => :Exchange
belongs_to :exchange
has_many :tickers
has_many :trades
has_many :depth_runs
scope :internal, where("to_exchange_id = from_exchange_id")
scope :trading, lambda { |from_currency, to_currency|
where(["from_currency = ? and to_currency = ?",
from_currency, to_currency]) }
def self.transfers(from_exchange, to_exchange, currency)
where('from_exchange_id = ?', from_exchange.id).
where('to_exchange_id = ?', to_exchange.id).
where('from_currency = ? and to_currency = ?', currency, currency).
order('fee_percentage asc')
end
def name
if from_exchange != exchange
from_exchange_name = "#{from_exchange.name}-"
end
if to_exchange != exchange
to_exchange_name = "#{to_exchange.name}-"
end
"#{exchange.name} #{from_exchange_name}#{from_currency}/#{to_exchange_name}#{to_currency}"
end
def fee
fee_percentage/100
end
def api
"Markets::#{exchange.slug.classify}".constantize.new(self)
end
def pair
Market.where(["to_exchange_id = ? and from_exchange_id = ?",
to_exchange_id, from_exchange_id]).
where(["from_currency = ? and to_currency = ?",
to_currency, from_currency]).first
end
def last_ticker
tickers.last
end
def depth_filter(data, currency)
depth_run = depth_runs.create
offers = api.offers(data, currency)
offers.map!{|o| o.merge({market_id:id})}
ActiveRecord::Base.transaction do
depth_run.offers.create(offers)
end
if currency == from_currency
best_offer = depth_run.offers.order('price desc').last
elsif currency == to_currency
best_offer = depth_run.offers.order('price asc').last
elsif raise "depth_filter failed, bad currency #{currency} for market #{self}"
end
puts "#{currency} #{self.from_currency}/#{self.to_currency} Best offer: #{best_offer.price}"
depth_run.update_attribute :best_offer_id, best_offer.id
depth_run
end
def ticker
tickers.last
end
def offers
last_run = depth_runs.last
last_run ? last_run.offers : []
end
end
|
class Medium < ActiveRecord::Base
belongs_to :exhibition
belongs_to :exhibition_including_deleted, :class_name => 'Exhibition', :foreign_key => 'exhibition_id', :with_deleted => true
belongs_to :artwork
belongs_to :artwork_including_deleted, :class_name => 'Artwork', :foreign_key => 'artwork_id', :with_deleted => true
# API
JSON_ATTRS = ['uuid', 'created_at', 'updated_at', 'deleted_at', 'title', 'kind', 'width', 'height', 'position'].freeze
# Soft delete
acts_as_paranoid
# File attachment
has_attached_file :file, :styles => {
:thumb => '200x200#',
:small => '300x300',
:medium => '600x600',
:large => '1200x1200'
}, :processors => lambda { |m| (m.kind == 'video') ? [ :video_thumbnail ] : [ :thumbnail ] }
before_post_process :verify_content_type
before_save :extract_dimensions
# UUID
before_create :set_uuid
# Validations
validates :exhibition_id, :presence => true
validates :artwork_id, :presence => true
validates :title, :presence => true
validates :file, :attachment_presence => true
validates_attachment_content_type :file, :content_type => [
'image/jpeg', 'image/png',
'audio/mpeg', 'audio/mp4', 'audio/mp3',
'video/mpeg', 'video/mp4', 'video/quicktime'
], :message => 'is incorrect. Please upload either an image, audio or a video file'
# Scopes
scope :image, -> { where(:kind => 'image') }
scope :audio, -> { where(:kind => 'audio') }
scope :video, -> { where(:kind => 'video') }
def as_json(options=nil)
attributes.slice(*JSON_ATTRS).merge({
:exhibition_uuid => exhibition_including_deleted.uuid,
:artwork_uuid => artwork_including_deleted.uuid,
:urlThumb => file.url(:thumb),
:urlSmall => file.url(:small),
:urlMedium => file.url(:medium),
:urlLarge => file.url(:large),
:urlFull => file.url(:original)
})
end
private
def set_uuid
self.uuid = UUIDTools::UUID.timestamp_create().to_s
end
# Figure out the type of the file
# Halt post-process if not an image
def verify_content_type
self.kind = 'image' if %w(image/jpeg image/png).include?(file_content_type)
self.kind = 'audio' if %w(audio/mpeg audio/mp4 audio/mp3).include?(file_content_type)
self.kind = 'video' if %w(video/mpeg video/mp4 video/quicktime).include?(file_content_type)
# Only post process images and video (for thumb generation) (not audio)
self.kind != 'audio'
end
# Retrieves dimensions for image assets
def extract_dimensions
return if self.kind == 'audio'
if self.kind == 'image'
tempfile = file.queued_for_write[:original]
elsif self.kind == 'video'
tempfile = file.queued_for_write[:large] # large thumbnail for video
end
unless tempfile.nil?
geometry = Paperclip::Geometry.from_file(tempfile)
self.width = geometry.width.to_i
self.height = geometry.height.to_i
end
end
end
Added new fields to API
class Medium < ActiveRecord::Base
belongs_to :exhibition
belongs_to :exhibition_including_deleted, :class_name => 'Exhibition', :foreign_key => 'exhibition_id', :with_deleted => true
belongs_to :artwork
belongs_to :artwork_including_deleted, :class_name => 'Artwork', :foreign_key => 'artwork_id', :with_deleted => true
# API
JSON_ATTRS = ['uuid', 'created_at', 'updated_at', 'deleted_at', 'title', 'kind', 'width', 'height', 'position', 'alt', 'description'].freeze
# Soft delete
acts_as_paranoid
# File attachment
has_attached_file :file, :styles => {
:thumb => '200x200#',
:small => '300x300',
:medium => '600x600',
:large => '1200x1200'
}, :processors => lambda { |m| (m.kind == 'video') ? [ :video_thumbnail ] : [ :thumbnail ] }
before_post_process :verify_content_type
before_save :extract_dimensions
# UUID
before_create :set_uuid
# Validations
validates :exhibition_id, :presence => true
validates :artwork_id, :presence => true
validates :title, :presence => true
validates :file, :attachment_presence => true
validates_attachment_content_type :file, :content_type => [
'image/jpeg', 'image/png',
'audio/mpeg', 'audio/mp4', 'audio/mp3',
'video/mpeg', 'video/mp4', 'video/quicktime'
], :message => 'is incorrect. Please upload either an image, audio or a video file'
# Scopes
scope :image, -> { where(:kind => 'image') }
scope :audio, -> { where(:kind => 'audio') }
scope :video, -> { where(:kind => 'video') }
def as_json(options=nil)
attributes.slice(*JSON_ATTRS).merge({
:exhibition_uuid => exhibition_including_deleted.uuid,
:artwork_uuid => artwork_including_deleted.uuid,
:urlThumb => file.url(:thumb),
:urlSmall => file.url(:small),
:urlMedium => file.url(:medium),
:urlLarge => file.url(:large),
:urlFull => file.url(:original)
})
end
private
def set_uuid
self.uuid = UUIDTools::UUID.timestamp_create().to_s
end
# Figure out the type of the file
# Halt post-process if not an image
def verify_content_type
self.kind = 'image' if %w(image/jpeg image/png).include?(file_content_type)
self.kind = 'audio' if %w(audio/mpeg audio/mp4 audio/mp3).include?(file_content_type)
self.kind = 'video' if %w(video/mpeg video/mp4 video/quicktime).include?(file_content_type)
# Only post process images and video (for thumb generation) (not audio)
self.kind != 'audio'
end
# Retrieves dimensions for image assets
def extract_dimensions
return if self.kind == 'audio'
if self.kind == 'image'
tempfile = file.queued_for_write[:original]
elsif self.kind == 'video'
tempfile = file.queued_for_write[:large] # large thumbnail for video
end
unless tempfile.nil?
geometry = Paperclip::Geometry.from_file(tempfile)
self.width = geometry.width.to_i
self.height = geometry.height.to_i
end
end
end
|
class Member
require 'date'
include Mongoid::Document
include Mongoid::Timestamps
include MergingModel
GENDER_TYPES = %W(male female unknown)
CITIZEN_STATUS_TYPES = %W[
us_citizen
naturalized_citizen
alien_lawfully_present
lawful_permanent_resident
indian_tribe_member
undocumented_immigrant
not_lawfully_present_in_us
]
# gdb_member_id is the primary key. if hbx_member_id isn't provided, gdb_member_id is used
auto_increment :_id, seed: 9999
field :hbx_member_id, type: String # Enterprise-level unique ID for this person
field :e_person_id, type: String # Elibility system transaction-level foreign key
field :e_concern_role_id, type: String # Eligibility system 'unified person' foreign key
field :aceds_id, type: Integer # Medicaid system foreign key
field :e_pdc_id, type: String
field :import_source, type: String # e.g. :b2b_gateway, :eligibility_system
field :imported_at, type: DateTime
# Carrier ids are N <-> N with members,
# we'll store them at the policy level to avoid any issues
# field :carrier_id, type: String
field :dob, type: DateTime
field :death_date, type: DateTime
field :ssn, type: String
field :gender, type: String
field :ethnicity, type: String, default: ""
field :race, type: String, default: ""
field :birth_location, type: String, default: ""
field :marital_status, type: String, default: ""
field :hbx_role, type: String, default: ""
field :citizen_status, type: String, default: 'us_citizen'
field :is_state_resident, type: Boolean, default: true
field :is_incarcerated, type: Boolean, default: false
field :is_applicant, type: Boolean, default: true
field :hlh, as: :tobacco_use_code, type: String, default: "unknown"
field :lui, as: :language_code, type: String
validates_presence_of :gender, message: "Choose a gender"
validates_inclusion_of :gender, in: GENDER_TYPES, message: "Invalid gender"
# validates_numericality_of :ssn
validates_length_of :ssn, allow_blank: true, allow_nil: true, minimum: 9, maximum: 9,
message: "SSN must be 9 digits"
validates :citizen_status,
inclusion: { in: CITIZEN_STATUS_TYPES, message: "%{value} is not a valid citizen status" },
allow_blank: true
index({"person_relationships.subject_person" => 1})
index({"person_relationships.object_person" => 1})
# index({ hbx_member_id: 1 }, { unique: false, name: "member_exchange_id_index" })
# index({ a_id: 1 }, { unique: false, name: "authority_member_exchange_id_index" })
# index({ ssn: -1 }, { unique: false, sparse: true, name: "member_ssn_index" })
embedded_in :person
before_create :generate_hbx_member_id
# Strip non-numeric chars from ssn
# SSN validation rules, see: http://www.ssa.gov/employer/randomizationfaqs.html#a0=12
def ssn=(val)
return if val.blank?
write_attribute(:ssn, val.to_s.gsub(/[^0-9]/i, ''))
end
def gender=(val)
return if val.blank?
write_attribute(:gender, val.downcase)
end
# def dob=(val)
# bday = DateTime.strptime(val, "%m-%d-%Y").to_date
# write_attribute(:dob, bday)
# end
def policies
Policy.elem_match(enrollees: { m_id: hbx_member_id })
end
def carriers
policies.map { |p| p.carrier }.uniq
end
def enrollees
policies.map { |p| p.enrollees.find_by(m_id: hbx_member_id) }
end
def policies_with_over_age_children
return [] if dob > (Date.today - 26.years)
policies.find_all { |p| p.enrollees.find_by(m_id: hbx_member_id).rel_code == "child" }
end
def authority?
self.hbx_member_id == person.authority_member_id
end
def merge_member(m_member)
merge_without_blanking(
m_member,
:e_concern_role_id,
:dob,
:gender,
:ssn,
:hlh,
:lui,
:import_source,
:imported_at
)
end
def self.find_for_member_id(member_id)
Queries::MemberByHbxIdQuery.new(member_id).execute
end
def can_be_quoted?
# (citizen_status != "undocumented_immigrant") && \
# (citizen_status != "not_lawfully_present_in_us") && \
!is_incarcerated
end
protected
def generate_hbx_member_id
self.hbx_member_id = self.hbx_member_id || self._id.to_s
end
def dob_string
self.dob.blank? ? "" : self.dob.strftime("%Y%m%d")
end
def safe_downcase(val)
val.blank? ? val : val.downcase.strip
end
end
Blocking missing gender.
class Member
require 'date'
include Mongoid::Document
include Mongoid::Timestamps
include MergingModel
GENDER_TYPES = %W(male female)
CITIZEN_STATUS_TYPES = %W[
us_citizen
naturalized_citizen
alien_lawfully_present
lawful_permanent_resident
indian_tribe_member
undocumented_immigrant
not_lawfully_present_in_us
]
# gdb_member_id is the primary key. if hbx_member_id isn't provided, gdb_member_id is used
auto_increment :_id, seed: 9999
field :hbx_member_id, type: String # Enterprise-level unique ID for this person
field :e_person_id, type: String # Elibility system transaction-level foreign key
field :e_concern_role_id, type: String # Eligibility system 'unified person' foreign key
field :aceds_id, type: Integer # Medicaid system foreign key
field :e_pdc_id, type: String
field :import_source, type: String # e.g. :b2b_gateway, :eligibility_system
field :imported_at, type: DateTime
# Carrier ids are N <-> N with members,
# we'll store them at the policy level to avoid any issues
# field :carrier_id, type: String
field :dob, type: DateTime
field :death_date, type: DateTime
field :ssn, type: String
field :gender, type: String
field :ethnicity, type: String, default: ""
field :race, type: String, default: ""
field :birth_location, type: String, default: ""
field :marital_status, type: String, default: ""
field :hbx_role, type: String, default: ""
field :citizen_status, type: String, default: 'us_citizen'
field :is_state_resident, type: Boolean, default: true
field :is_incarcerated, type: Boolean, default: false
field :is_applicant, type: Boolean, default: true
field :hlh, as: :tobacco_use_code, type: String, default: "unknown"
field :lui, as: :language_code, type: String
validates_presence_of :gender, message: "Choose a gender"
validates_inclusion_of :gender, in: GENDER_TYPES, message: "Invalid gender"
# validates_numericality_of :ssn
validates_length_of :ssn, allow_blank: true, allow_nil: true, minimum: 9, maximum: 9,
message: "SSN must be 9 digits"
validates :citizen_status,
inclusion: { in: CITIZEN_STATUS_TYPES, message: "%{value} is not a valid citizen status" },
allow_blank: true
index({"person_relationships.subject_person" => 1})
index({"person_relationships.object_person" => 1})
# index({ hbx_member_id: 1 }, { unique: false, name: "member_exchange_id_index" })
# index({ a_id: 1 }, { unique: false, name: "authority_member_exchange_id_index" })
# index({ ssn: -1 }, { unique: false, sparse: true, name: "member_ssn_index" })
embedded_in :person
before_create :generate_hbx_member_id
# Strip non-numeric chars from ssn
# SSN validation rules, see: http://www.ssa.gov/employer/randomizationfaqs.html#a0=12
def ssn=(val)
return if val.blank?
write_attribute(:ssn, val.to_s.gsub(/[^0-9]/i, ''))
end
def gender=(val)
return if val.blank?
write_attribute(:gender, val.downcase)
end
# def dob=(val)
# bday = DateTime.strptime(val, "%m-%d-%Y").to_date
# write_attribute(:dob, bday)
# end
def policies
Policy.elem_match(enrollees: { m_id: hbx_member_id })
end
def carriers
policies.map { |p| p.carrier }.uniq
end
def enrollees
policies.map { |p| p.enrollees.find_by(m_id: hbx_member_id) }
end
def policies_with_over_age_children
return [] if dob > (Date.today - 26.years)
policies.find_all { |p| p.enrollees.find_by(m_id: hbx_member_id).rel_code == "child" }
end
def authority?
self.hbx_member_id == person.authority_member_id
end
def merge_member(m_member)
merge_without_blanking(
m_member,
:e_concern_role_id,
:dob,
:gender,
:ssn,
:hlh,
:lui,
:import_source,
:imported_at
)
end
def self.find_for_member_id(member_id)
Queries::MemberByHbxIdQuery.new(member_id).execute
end
def can_be_quoted?
# (citizen_status != "undocumented_immigrant") && \
# (citizen_status != "not_lawfully_present_in_us") && \
!is_incarcerated
end
protected
def generate_hbx_member_id
self.hbx_member_id = self.hbx_member_id || self._id.to_s
end
def dob_string
self.dob.blank? ? "" : self.dob.strftime("%Y%m%d")
end
def safe_downcase(val)
val.blank? ? val : val.downcase.strip
end
end
|
class Period < ActiveRecord::Base
belongs_to :company
has_many :bills, :autosave => true
has_many :journals, :autosave => true
# only useful for random statistics
has_many :journal_operations, :through => :journals
STATUSES = {0 => 'New', 1 => 'Open', 2 => 'Done', 3 => 'Closed'}
STATUSE_NAMES = {'New' => 0, 'Open' => 1, 'Done' => 2, 'Closed' => 3}
def status_name
STATUSES[self.status]
end
def open?
self.status == 1
end
def create_next
year = self.year
nr = self.nr + 1
if nr > 13
nr = 1
year = year + 1
end
return Period.new :company => self.company, :year => year, :nr => nr, :status => Period::STATUSE_NAMES['New']
end
def open_bills
return self.bills.find_all { |bill| bill.open? }
end
def move_open_bills(new_period)
self.open_bills.each do |bill|
new_period.bills.push(bill)
self.bills.delete(bill)
end
end
def status_elevation_requires_closing_bills?
return self.status >= Period::STATUSE_NAMES['Open'] && self.open_bills.count > 0
end
def elevate_status
if self.status_elevation_requires_closing_bills?
raise Exception.new "Can not close period with open bills"
end
self.status += 1
end
end
write access for orders/invoices
just leveraging the existing authorization
code here
class Period < ActiveRecord::Base
belongs_to :company
has_many :bills, :autosave => true
has_many :journals, :autosave => true
# only useful for random statistics
has_many :journal_operations, :through => :journals
STATUSES = {0 => 'New', 1 => 'Open', 2 => 'Done', 3 => 'Closed'}
STATUSE_NAMES = {'New' => 0, 'Open' => 1, 'Done' => 2, 'Closed' => 3}
def status_name
STATUSES[self.status]
end
def open?
permitted_to? :update, self
end
def create_next
year = self.year
nr = self.nr + 1
if nr > 13
nr = 1
year = year + 1
end
return Period.new :company => self.company, :year => year, :nr => nr, :status => Period::STATUSE_NAMES['New']
end
def open_bills
return self.bills.find_all { |bill| bill.open? }
end
def move_open_bills(new_period)
self.open_bills.each do |bill|
new_period.bills.push(bill)
self.bills.delete(bill)
end
end
def status_elevation_requires_closing_bills?
return self.status >= Period::STATUSE_NAMES['Open'] && self.open_bills.count > 0
end
def elevate_status
if self.status_elevation_requires_closing_bills?
raise Exception.new "Can not close period with open bills"
end
self.status += 1
end
end
|
class Person < ActiveRecord::Base
belongs_to :user
has_many :items, :as => :owner
has_many :item_requests, :as => :requester
has_many :item_gifts, :as => :gifter, :class_name => "ItemRequest"
has_many :people_network_requests
has_many :received_people_network_requests, :class_name => "PeopleNetworkRequest", :foreign_key => "trusted_person_id"
has_many :people_networks
has_many :received_people_networks, :class_name => "PeopleNetwork", :foreign_key => "trusted_person_id"
has_many :activity_logs, :as => :primary
has_many :activity_logs_as_secondary, :as => :secondary, :class_name => "ActivityLog"
has_many :event_logs, :as => :primary
has_many :event_logs_as_secondary, :as => :secondary, :class_name => "EventLog"
has_many :event_entities, :as => :entity
has_many :related_event_logs, :through => :event_entities, :source => :event_log
has_many :feedbacks
has_one :reputation_rating
validates_presence_of :user_id, :name
after_create :create_entity_for_person
def belongs_to?(some_user)
user == some_user
end
def trusts?(other_person)
self.people_networks.involves_as_trusted_person(other_person).first
end
def create_entity_for_person
Entity.create!(:entity_type_id => EntityType::PERSON_ENTITY, :specific_entity_id => self.id)
end
def trusted_network_size
self.people_networks.count
end
def self.search(search)
search.empty? ? '' : Person.where("name LIKE ?", "%#{search.downcase}%")
end
def searchable_core_of_friends
ids = self.people_networks.map { |n| n.trusted_person_id }
ids.push( self.id)
ids = ids.map! { |k| "#{k}" }.join(",")
end
def trusted_friends
self.people_networks.map { |n| n.trusted_person }
end
def mutural_friends(other_person)
mutural_friends = []
self.people_networks.each do |n|
mutural_friends.push(n.trusted_person) if n.trusted_person.trusts?(other_person)
end
mutural_friends
end
def extended_network_size
people_ids = self.people_networks.map{|i| i["trusted_person_id"]}
friends = Person.find(:all, :conditions => ["id IN (?)", people_ids])
size = 0
friends.each { |person| size += person.people_networks.count }
size
end
def mutural_friends_count(other_person)
mutural_friends = 0
self.people_networks.each do |pn|
mutural_friends+=1 if pn.trusted_person.trusts?(other_person)
end
mutural_friends
end
def trusts_me_count
self.people_networks.involves(self).count
end
def all_item_requests
ItemRequest.involves(self)
end
def active_item_requests
ItemRequest.involves(self).active.order("created_at DESC")
end
def unanswered_requests(requester = nil)
if requester
ItemRequest.unanswered.involves(self).involves(requester)
else
ItemRequest.unanswered.involves(self)
end
end
def avatar(avatar_size = nil)
self.user.avatar(avatar_size)
end
def first_name
name.split.first
end
def news_feed
# Updated SQL to get all events relating to anyone in a user's trusted
# network or to themselves
self_id = self.id
ee = Arel::Table.new(EventEntity.table_name.to_sym)
pn = Arel::Table.new(PeopleNetwork.table_name.to_sym)
pn_network = pn.project(pn[:trusted_person_id], Arel.sql("4 as trusted_relationship_value")).where(pn[:person_id].eq(self_id))
query = ee.project(Arel.sql("#{ee.name}.event_log_id as event_log_id"), Arel.sql("SUM(trusted_relationship_value) as total_relationship_value"))
query = query.join(Arel.sql("LEFT JOIN (#{pn_network.to_sql}) AS network ON #{ee.name}.entity_id = network.trusted_person_id AND #{ee.name}.entity_type = 'Person'"))
query = query.group(ee[:event_log_id], ee[:created_at]).order("#{ee.name}.created_at DESC").take(25)
query = query.where(Arel.sql("trusted_person_id IS NOT NULL or (#{ee.name}.entity_type = 'Person' and #{ee.name}.entity_id = #{self_id})"))
event_log_ids = EventEntity.find_by_sql(query.to_sql)
# CASHE PREVIOUSLY SHOWN NEWS FEED IF NOT ALREADY CASHED
event_log_ids = event_log_ids.reverse
event_log_ids.each do |e|
conditions = { :type_id => EventDisplay::DASHBOARD_FEED,
:person_id => self_id,
:event_log_id => e.event_log_id }
EventDisplay.find(:first, :conditions => conditions) || EventDisplay.create(conditions)
end
end
#SHOW NEWS FEED THAT ARE STORED IN CASHE, BUT NOT SHOWN AT SAME TIME AS CURRENT NEWS FEED
def news_feed_cashe(event_log_ids)
news_event_logs = event_log_ids.map{|e| e.event_log_id}
event_displays = EventDisplay.find(:all, :conditions => ["type_id=? and person_id=? and event_log_id not in (?)",
EventDisplay::DASHBOARD_FEED, self.id, news_event_logs], :order => 'event_log_id DESC').take(25)
news_cashe_event_logs = event_displays.map{|e| e.event_log_id}
EventLog.find(:all, :conditions => ["id IN (?)", news_cashe_event_logs], :order => 'created_at DESC')
end
def gift_act_actions
ActivityLog.gift_actions(self).size
end
def people_helped
ActivityLog.find(:all, :select => 'DISTINCT secondary_id',
:conditions => ["primary_id =? and primary_type=? and secondary_type=? and event_type_id IN (?)",
self.id, "Person", "Person", EventType.completed_request_ids]).size
end
def gift_act_rating
rating = (self.people_helped - 1 + self.gift_act_actions.to_f/20)
update_gift_act_rating(rating)
rating
end
def update_gift_act_rating(rating)
gift_act = PersonGiftActRating.find_or_create_by_person_id(:person_id => self.id)
rating < 0 ? rating = 0 : rating
gift_act.gift_act_rating = rating
gift_act.save!
end
###########
# Trust related methods
###########
def request_trusted_relationship(person_requesting)
self.received_people_network_requests.create(:person => person_requesting)
end
def requested_trusted_relationship?(person_requesting)
self.received_people_network_requests.where(:person_id => person_requesting).count > 0
end
def requested_trusted_relationship(person_requesting)
self.received_people_network_requests.where(:person_id => person_requesting).first
end
###########
# Latest activity methods for personal page
###########
def public_events
EventLog.involves(self).completed_requests.order("#{EventLog.table_name}.created_at DESC").take(7)
end
def public_activities(current_user)
if !current_user.person.same_as_person?(self)
activites = ActivityLog.activities_involving(self, current_user.person).order("created_at desc").limit(7)
else
activites = ActivityLog.public_activities(current_user.person).order("created_at desc").limit(7)
end
end
def same_as_person?(person)
self.id == person.id
end
end
downcase removed
class Person < ActiveRecord::Base
belongs_to :user
has_many :items, :as => :owner
has_many :item_requests, :as => :requester
has_many :item_gifts, :as => :gifter, :class_name => "ItemRequest"
has_many :people_network_requests
has_many :received_people_network_requests, :class_name => "PeopleNetworkRequest", :foreign_key => "trusted_person_id"
has_many :people_networks
has_many :received_people_networks, :class_name => "PeopleNetwork", :foreign_key => "trusted_person_id"
has_many :activity_logs, :as => :primary
has_many :activity_logs_as_secondary, :as => :secondary, :class_name => "ActivityLog"
has_many :event_logs, :as => :primary
has_many :event_logs_as_secondary, :as => :secondary, :class_name => "EventLog"
has_many :event_entities, :as => :entity
has_many :related_event_logs, :through => :event_entities, :source => :event_log
has_many :feedbacks
has_one :reputation_rating
validates_presence_of :user_id, :name
after_create :create_entity_for_person
def belongs_to?(some_user)
user == some_user
end
def trusts?(other_person)
self.people_networks.involves_as_trusted_person(other_person).first
end
def create_entity_for_person
Entity.create!(:entity_type_id => EntityType::PERSON_ENTITY, :specific_entity_id => self.id)
end
def trusted_network_size
self.people_networks.count
end
def self.search(search)
search.empty? ? '' : Person.where("name LIKE ?", "%#{search}%")
end
def searchable_core_of_friends
ids = self.people_networks.map { |n| n.trusted_person_id }
ids.push( self.id)
ids = ids.map! { |k| "#{k}" }.join(",")
end
def trusted_friends
self.people_networks.map { |n| n.trusted_person }
end
def mutural_friends(other_person)
mutural_friends = []
self.people_networks.each do |n|
mutural_friends.push(n.trusted_person) if n.trusted_person.trusts?(other_person)
end
mutural_friends
end
def extended_network_size
people_ids = self.people_networks.map{|i| i["trusted_person_id"]}
friends = Person.find(:all, :conditions => ["id IN (?)", people_ids])
size = 0
friends.each { |person| size += person.people_networks.count }
size
end
def mutural_friends_count(other_person)
mutural_friends = 0
self.people_networks.each do |pn|
mutural_friends+=1 if pn.trusted_person.trusts?(other_person)
end
mutural_friends
end
def trusts_me_count
self.people_networks.involves(self).count
end
def all_item_requests
ItemRequest.involves(self)
end
def active_item_requests
ItemRequest.involves(self).active.order("created_at DESC")
end
def unanswered_requests(requester = nil)
if requester
ItemRequest.unanswered.involves(self).involves(requester)
else
ItemRequest.unanswered.involves(self)
end
end
def avatar(avatar_size = nil)
self.user.avatar(avatar_size)
end
def first_name
name.split.first
end
def news_feed
# Updated SQL to get all events relating to anyone in a user's trusted
# network or to themselves
self_id = self.id
ee = Arel::Table.new(EventEntity.table_name.to_sym)
pn = Arel::Table.new(PeopleNetwork.table_name.to_sym)
pn_network = pn.project(pn[:trusted_person_id], Arel.sql("4 as trusted_relationship_value")).where(pn[:person_id].eq(self_id))
query = ee.project(Arel.sql("#{ee.name}.event_log_id as event_log_id"), Arel.sql("SUM(trusted_relationship_value) as total_relationship_value"))
query = query.join(Arel.sql("LEFT JOIN (#{pn_network.to_sql}) AS network ON #{ee.name}.entity_id = network.trusted_person_id AND #{ee.name}.entity_type = 'Person'"))
query = query.group(ee[:event_log_id], ee[:created_at]).order("#{ee.name}.created_at DESC").take(25)
query = query.where(Arel.sql("trusted_person_id IS NOT NULL or (#{ee.name}.entity_type = 'Person' and #{ee.name}.entity_id = #{self_id})"))
event_log_ids = EventEntity.find_by_sql(query.to_sql)
# CASHE PREVIOUSLY SHOWN NEWS FEED IF NOT ALREADY CASHED
event_log_ids = event_log_ids.reverse
event_log_ids.each do |e|
conditions = { :type_id => EventDisplay::DASHBOARD_FEED,
:person_id => self_id,
:event_log_id => e.event_log_id }
EventDisplay.find(:first, :conditions => conditions) || EventDisplay.create(conditions)
end
end
#SHOW NEWS FEED THAT ARE STORED IN CASHE, BUT NOT SHOWN AT SAME TIME AS CURRENT NEWS FEED
def news_feed_cashe(event_log_ids)
news_event_logs = event_log_ids.map{|e| e.event_log_id}
event_displays = EventDisplay.find(:all, :conditions => ["type_id=? and person_id=? and event_log_id not in (?)",
EventDisplay::DASHBOARD_FEED, self.id, news_event_logs], :order => 'event_log_id DESC').take(25)
news_cashe_event_logs = event_displays.map{|e| e.event_log_id}
EventLog.find(:all, :conditions => ["id IN (?)", news_cashe_event_logs], :order => 'created_at DESC')
end
def gift_act_actions
ActivityLog.gift_actions(self).size
end
def people_helped
ActivityLog.find(:all, :select => 'DISTINCT secondary_id',
:conditions => ["primary_id =? and primary_type=? and secondary_type=? and event_type_id IN (?)",
self.id, "Person", "Person", EventType.completed_request_ids]).size
end
def gift_act_rating
rating = (self.people_helped - 1 + self.gift_act_actions.to_f/20)
update_gift_act_rating(rating)
rating
end
def update_gift_act_rating(rating)
gift_act = PersonGiftActRating.find_or_create_by_person_id(:person_id => self.id)
rating < 0 ? rating = 0 : rating
gift_act.gift_act_rating = rating
gift_act.save!
end
###########
# Trust related methods
###########
def request_trusted_relationship(person_requesting)
self.received_people_network_requests.create(:person => person_requesting)
end
def requested_trusted_relationship?(person_requesting)
self.received_people_network_requests.where(:person_id => person_requesting).count > 0
end
def requested_trusted_relationship(person_requesting)
self.received_people_network_requests.where(:person_id => person_requesting).first
end
###########
# Latest activity methods for personal page
###########
def public_events
EventLog.involves(self).completed_requests.order("#{EventLog.table_name}.created_at DESC").take(7)
end
def public_activities(current_user)
if !current_user.person.same_as_person?(self)
activites = ActivityLog.activities_involving(self, current_user.person).order("created_at desc").limit(7)
else
activites = ActivityLog.public_activities(current_user.person).order("created_at desc").limit(7)
end
end
def same_as_person?(person)
self.id == person.id
end
end
|
# frozen_string_literal: true
# == Schema Information
#
# Table name: people
#
# id :integer not null, primary key
# first_name :string(255)
# last_name :string(255)
# email_address :string(255)
# address_1 :string(255)
# address_2 :string(255)
# city :string(255)
# state :string(255)
# postal_code :string(255)
# geography_id :integer
# primary_device_id :integer
# primary_device_description :string(255)
# secondary_device_id :integer
# secondary_device_description :string(255)
# primary_connection_id :integer
# primary_connection_description :string(255)
# phone_number :string(255)
# participation_type :string(255)
# created_at :datetime
# updated_at :datetime
# signup_ip :string(255)
# signup_at :datetime
# voted :string(255)
# called_311 :string(255)
# secondary_connection_id :integer
# secondary_connection_description :string(255)
# verified :string(255)
# preferred_contact_method :string(255)
# token :string(255)
# active :boolean default(TRUE)
# deactivated_at :datetime
# deactivated_method :string(255)
# neighborhood :string(255)
# referred_by :string(255)
# low_income :boolean
# rapidpro_uuid :string(255)
# landline :string(255)
# created_by :integer
# screening_status :string(255) default("new")
# phone_confirmed :boolean default(FALSE)
# email_confirmed :boolean default(FALSE)
# confirmation_sent :boolean default(FALSE)
# welcome_sent :boolean default(FALSE)
# participation_level :string(255) default("new")
# locale :string(255) default("en")
# cached_tag_list :text(65535)
#
# FIXME: Refactor and re-enable cop
# rubocop:disable ClassLength
class Person < ApplicationRecord
has_paper_trail
acts_as_taggable
VERIFIED_TYPES = [
VERIFIED_TYPE = 'Verified',
NOT_VERIFIED_TYPE = 'No'
].freeze
# * Active DIG member =“Participated in 3+ sessions” = invited to join FB group;
# * [Need another name for level 2] = “Participated in at least one season--
# (could code as 6 months active) OR at least 2 different projects/teams
# (could code based on being tagged in a session by at least 2 different teams)
# * DIG Ambassador = “active for at least one year, 2+ projects/teams
# if there’s any way to automate that info to flow into dashboard/pool —
# and notify me when new person gets added-- that would be amazing
PARTICIPATION_LEVELS = [
PARTICIPATION_LEVEL_NEW = "new",
PARTICIPATION_LEVEL_INACTIVE = "inactive",
PARTICIPATION_LEVEL_PARTICIPANT = "participant",
PARTICIPATION_LEVEL_ACTIVE = "active",
PARTICIPATION_LEVEL_AMBASSADOR = "ambassador"
]
page 50
# include Searchable
include ExternalDataMappings
include Neighborhoods
phony_normalize :phone_number, default_country_code: 'US'
phony_normalized_method :phone_number, default_country_code: 'US'
has_many :comments, as: :commentable, dependent: :destroy
has_many :rewards
accepts_nested_attributes_for :rewards, reject_if: :all_blank
attr_accessor :rewards_attributes
has_many :invitations
has_many :research_sessions, through: :invitations
# TODO: remove people from carts on deactivation
has_many :carts_people
has_many :carts, through: :carts_people, foreign_key: :person_id
has_secure_token :token
if ENV['RAILS_ENV'] == 'production'
after_commit :send_to_mailchimp, on: %i[update create] if ENV['MAILCHIMP_API_KEY']
after_commit :update_rapidpro, on: %i[update create] if ENV['RAPIDPRO_TOKEN']
before_destroy :delete_from_rapidpro if ENV['RAPIDPRO_TOKEN']
end
after_create :update_neighborhood
after_commit :send_new_person_notifications, on: :create
validates :first_name, presence: true
validates :last_name, presence: true
validates :postal_code, presence: true
validates :postal_code, zipcode: { country_code: :us }
# phony validations and normalization
phony_normalize :phone_number, default_country_code: 'US'
phony_normalize :landline, default_country_code: 'US'
validates :phone_number, presence: true, length: { in: 9..15 },
unless: proc { |person| person.email_address.present? }
validates :email_address, presence: true,
unless: proc { |person| person.phone_number.present? }
validates :email_address,
format: { with: Devise.email_regexp,
if: proc { |person| person.email_address.present? } }
validates :phone_number, allow_blank: true, uniqueness: true
validates :landline, allow_blank: true, uniqueness: true
validates :email_address, email: true, allow_blank: true, uniqueness: true
# scope :no_signup_card, -> { where('id NOT IN (SELECT DISTINCT(person_id) FROM rewards where rewards.reason = 1)') }
# scope :signup_card_needed, -> { joins(:rewards).where('rewards.reason !=1') }
scope :verified, -> { where('verified like ?', '%Verified%') }
scope :not_verified, -> { where.not('verified like ?', '%Verified%') }
scope :active, -> { where(active: true) }
scope :deactivated, -> { where(active: false) }
scope :order_by_reward_sum, -> { joins(:rewards).includes(:research_sessions).where('rewards.created_at >= ?', Time.current.beginning_of_year).select('people.id, people.first_name,people.last_name, people.active,sum(rewards.amount_cents) as total_rewards').group('people.id').order('total_rewards desc') }
# no longer using this. now managing active elsewhere
# default_scope { where(active:
ransacker :full_name, formatter: proc { |v| v.mb_chars.downcase.to_s } do |parent|
Arel::Nodes::NamedFunction.new('lower',
[Arel::Nodes::NamedFunction.new('concat_ws',
[Arel::Nodes.build_quoted(' '), parent.table[:first_name], parent.table[:last_name]])])
end
scope :ransack_tagged_with, ->(*tags) { tagged_with(tags) }
def self.ransackable_scopes(_auth_object = nil)
%i[no_signup_card ransack_tagged_with]
end
def self.locale_name_to_locale(locale_name)
obj = { 'english' => 'en', 'spanish' => 'es', 'chinese' => 'zh' }
obj[locale_name.to_s.downcase]
end
ransack_alias :comments, :comments_content
ransack_alias :nav_bar_search, :full_name_or_email_address_or_phone_number_or_comments_content
# def self.send_all_reminders
# # this is where reservation_reminders
# # called by whenever in /config/schedule.rb
# Person.active.all.find_each(&:send_invitation_reminder)
# end
def self.update_all_participation_levels
@results = []
Person.active.all.find_each do |person|
@results << person.update_participation_level
end
@results.compact!
if @results.present?
User.approved.admin.all.find_each do |u|
AdminMailer.participation_level_change(results: @results, to: u.email_address).deliver_later
end
end
end
def inactive_criteria
at_least_one_reward_older_than_a_year = rewards.where('created_at < ?', 1.year.ago).size >= 1
no_rewards_in_the_past_year = rewards.where('created_at >= ?', 1.year.ago).size == 0
at_least_one_reward_older_than_a_year && no_rewards_in_the_past_year
end
def participant_criteria
# gotten a gift card in the past year.
rewards.where('created_at > ?', 1.year.ago).map { |g| g&.research_session&.id }.compact.uniq.size >= 1
end
def active_criteria
at_least_one_reward_in_past_six_months = rewards.where('created_at > ?', 6.months.ago).map { |g| g&.research_session&.id }.compact.uniq.size >= 1
at_least_one_reward_in_past_six_months
end
def ambassador_criteria
if tag_list.include?('brl special ambassador')
true
else
sessions_with_two_or_more_teams_in_the_past_year = rewards.where('created_at > ?', 1.year.ago).map(&:team).uniq.size >= 2
at_least_three_sessions_ever = rewards.map { |g| g&.research_session&.id }.compact.uniq.size >= 3
sessions_with_two_or_more_teams_in_the_past_year && at_least_three_sessions_ever
end
end
def calc_participation_level
pl = PARTICIPATION_LEVEL_NEW # needs outreach
pl = PARTICIPATION_LEVEL_INACTIVE if inactive_criteria
pl = PARTICIPATION_LEVEL_PARTICIPANT if participant_criteria
pl = PARTICIPATION_LEVEL_ACTIVE if active_criteria
pl = PARTICIPATION_LEVEL_AMBASSADOR if ambassador_criteria
pl
end
def update_participation_level
return if tag_list.include? 'not dig'
new_level = calc_participation_level
if participation_level != new_level
old_level = participation_level
self.participation_level = new_level
tag_list.remove(old_level)
tag_list.add(new_level)
save
Cart.where(name: Person::PARTICIPATION_LEVELS).find_each do |cart|
if cart.name == new_level
begin
cart.people << self
rescue StandardError
ActiveRecord::RecordInvalid
end
else
cart.remove_person(id) # no-op if person not in cart
end
end # end cart update
return { pid: id, old: old_level, new: new_level }
end
end
def verified?
verified&.start_with?('Verified')
end
def rewards_total
end_of_last_year = Time.zone.today.beginning_of_year - 1.day
total = rewards.where('created_at > ?', end_of_last_year).sum(:amount_cents)
Money.new(total, 'USD')
end
def rewards_count
rewards.size
end
def tag_values
tags.collect(&:name)
end
def tag_count
tag_list.size
end
def screened?
tag_list.include?('screened')
end
def send_to_mailchimp
status = active? ? 'subscribed' : 'unsubscribed'
MailchimpUpdateJob.perform_async(id, status)
end
def delete_from_rapidpro
RapidproDeleteJob.perform_async(id)
end
def update_rapidpro
if active && !tag_list.include?('not dig')
RapidproUpdateJob.perform_async(id)
elsif !active || tag_list.include?('not dig')
delete_from_rapidpro
end
end
def primary_device_type_name
Patterns::Application.config.device_mappings.rassoc(primary_device_id)[0].to_s if primary_device_id.present?
end
def secondary_device_type_name
Patterns::Application.config.device_mappings.rassoc(secondary_device_id)[0].to_s if secondary_device_id.present?
end
def primary_connection_type_name
Patterns::Application.config.connection_mappings.rassoc(primary_connection_id)[0].to_s if primary_connection_id.present?
end
def secondary_connection_type_name
Patterns::Application.config.connection_mappings.rassoc(secondary_connection_id)[0].to_s if secondary_connection_id.present?
end
def lat_long
::ZIP_LAT_LONG[postal_code.to_s]
end
def full_name
[first_name, last_name].join(' ')
end
def address_fields_to_sentence
[address_1, address_2, city, state, postal_code].reject(&:blank?).join(', ')
end
# def send_invitation_reminder
# # called by whenever in /config/schedule.rb
# invs = invitations.remindable.upcoming(2)
# case preferred_contact_method.upcase
# when 'SMS'
# ::InvitationReminderSms.new(to: person, invitations: invs).send
# when 'EMAIL'
# ::PersonMailer.remind(
# invitations: invs,
# email_address: email_address
# ).deliver_later
# end
# invs.each do |inv|
# if inv.aasm_state == 'invited'
# inv.aasm_state = 'reminded'
# inv.save
# end
# end
# end
def to_a
fields = Person.column_names
fields.push('tags')
fields.map do |f|
field_value = send(f.to_sym)
if f == 'phone_number'
if field_value.present?
field_value.phony_formatted(format: :national, spaces: '-')
else
''
end
elsif f == 'email_address'
field_value.presence || ''
elsif f == 'tags'
tag_values.present? ? tag_values.join('|') : ''
else
field_value
end
end
end
def deactivate!(type = nil)
self.active = false
self.deactivated_at = Time.current
self.deactivated_method = type if type
save! # sends background mailchimp update
delete_from_rapidpro # remove from rapidpro
end
def reactivate!
self.active = true
save!
update_rapidpro
end
def md5_email
Digest::MD5.hexdigest(email_address.downcase) if email_address.present?
end
def update_neighborhood
n = zip_to_neighborhood(postal_code)
self.signup_at = created_at if signup_at.nil?
if n.present?
self.neighborhood = n
save
end
end
def send_new_person_notifications
User.where(new_person_notification: true).find_each do |user|
email = user.email_address
::UserMailer.new_person_notify(email_address: email, person: self).deliver_later
end
end
end
# rubocop:enable ClassLength
remove unused Person#verified? method
# frozen_string_literal: true
# == Schema Information
#
# Table name: people
#
# id :integer not null, primary key
# first_name :string(255)
# last_name :string(255)
# email_address :string(255)
# address_1 :string(255)
# address_2 :string(255)
# city :string(255)
# state :string(255)
# postal_code :string(255)
# geography_id :integer
# primary_device_id :integer
# primary_device_description :string(255)
# secondary_device_id :integer
# secondary_device_description :string(255)
# primary_connection_id :integer
# primary_connection_description :string(255)
# phone_number :string(255)
# participation_type :string(255)
# created_at :datetime
# updated_at :datetime
# signup_ip :string(255)
# signup_at :datetime
# voted :string(255)
# called_311 :string(255)
# secondary_connection_id :integer
# secondary_connection_description :string(255)
# verified :string(255)
# preferred_contact_method :string(255)
# token :string(255)
# active :boolean default(TRUE)
# deactivated_at :datetime
# deactivated_method :string(255)
# neighborhood :string(255)
# referred_by :string(255)
# low_income :boolean
# rapidpro_uuid :string(255)
# landline :string(255)
# created_by :integer
# screening_status :string(255) default("new")
# phone_confirmed :boolean default(FALSE)
# email_confirmed :boolean default(FALSE)
# confirmation_sent :boolean default(FALSE)
# welcome_sent :boolean default(FALSE)
# participation_level :string(255) default("new")
# locale :string(255) default("en")
# cached_tag_list :text(65535)
#
# FIXME: Refactor and re-enable cop
# rubocop:disable ClassLength
class Person < ApplicationRecord
has_paper_trail
acts_as_taggable
VERIFIED_TYPES = [
VERIFIED_TYPE = 'Verified',
NOT_VERIFIED_TYPE = 'No'
].freeze
# * Active DIG member =“Participated in 3+ sessions” = invited to join FB group;
# * [Need another name for level 2] = “Participated in at least one season--
# (could code as 6 months active) OR at least 2 different projects/teams
# (could code based on being tagged in a session by at least 2 different teams)
# * DIG Ambassador = “active for at least one year, 2+ projects/teams
# if there’s any way to automate that info to flow into dashboard/pool —
# and notify me when new person gets added-- that would be amazing
PARTICIPATION_LEVELS = [
PARTICIPATION_LEVEL_NEW = "new",
PARTICIPATION_LEVEL_INACTIVE = "inactive",
PARTICIPATION_LEVEL_PARTICIPANT = "participant",
PARTICIPATION_LEVEL_ACTIVE = "active",
PARTICIPATION_LEVEL_AMBASSADOR = "ambassador"
]
page 50
# include Searchable
include ExternalDataMappings
include Neighborhoods
phony_normalize :phone_number, default_country_code: 'US'
phony_normalized_method :phone_number, default_country_code: 'US'
has_many :comments, as: :commentable, dependent: :destroy
has_many :rewards
accepts_nested_attributes_for :rewards, reject_if: :all_blank
attr_accessor :rewards_attributes
has_many :invitations
has_many :research_sessions, through: :invitations
# TODO: remove people from carts on deactivation
has_many :carts_people
has_many :carts, through: :carts_people, foreign_key: :person_id
has_secure_token :token
if ENV['RAILS_ENV'] == 'production'
after_commit :send_to_mailchimp, on: %i[update create] if ENV['MAILCHIMP_API_KEY']
after_commit :update_rapidpro, on: %i[update create] if ENV['RAPIDPRO_TOKEN']
before_destroy :delete_from_rapidpro if ENV['RAPIDPRO_TOKEN']
end
after_create :update_neighborhood
after_commit :send_new_person_notifications, on: :create
validates :first_name, presence: true
validates :last_name, presence: true
validates :postal_code, presence: true
validates :postal_code, zipcode: { country_code: :us }
# phony validations and normalization
phony_normalize :phone_number, default_country_code: 'US'
phony_normalize :landline, default_country_code: 'US'
validates :phone_number, presence: true, length: { in: 9..15 },
unless: proc { |person| person.email_address.present? }
validates :email_address, presence: true,
unless: proc { |person| person.phone_number.present? }
validates :email_address,
format: { with: Devise.email_regexp,
if: proc { |person| person.email_address.present? } }
validates :phone_number, allow_blank: true, uniqueness: true
validates :landline, allow_blank: true, uniqueness: true
validates :email_address, email: true, allow_blank: true, uniqueness: true
# scope :no_signup_card, -> { where('id NOT IN (SELECT DISTINCT(person_id) FROM rewards where rewards.reason = 1)') }
# scope :signup_card_needed, -> { joins(:rewards).where('rewards.reason !=1') }
scope :verified, -> { where('verified like ?', '%Verified%') }
scope :not_verified, -> { where.not('verified like ?', '%Verified%') }
scope :active, -> { where(active: true) }
scope :deactivated, -> { where(active: false) }
scope :order_by_reward_sum, -> { joins(:rewards).includes(:research_sessions).where('rewards.created_at >= ?', Time.current.beginning_of_year).select('people.id, people.first_name,people.last_name, people.active,sum(rewards.amount_cents) as total_rewards').group('people.id').order('total_rewards desc') }
# no longer using this. now managing active elsewhere
# default_scope { where(active:
ransacker :full_name, formatter: proc { |v| v.mb_chars.downcase.to_s } do |parent|
Arel::Nodes::NamedFunction.new('lower',
[Arel::Nodes::NamedFunction.new('concat_ws',
[Arel::Nodes.build_quoted(' '), parent.table[:first_name], parent.table[:last_name]])])
end
scope :ransack_tagged_with, ->(*tags) { tagged_with(tags) }
def self.ransackable_scopes(_auth_object = nil)
%i[no_signup_card ransack_tagged_with]
end
def self.locale_name_to_locale(locale_name)
obj = { 'english' => 'en', 'spanish' => 'es', 'chinese' => 'zh' }
obj[locale_name.to_s.downcase]
end
ransack_alias :comments, :comments_content
ransack_alias :nav_bar_search, :full_name_or_email_address_or_phone_number_or_comments_content
# def self.send_all_reminders
# # this is where reservation_reminders
# # called by whenever in /config/schedule.rb
# Person.active.all.find_each(&:send_invitation_reminder)
# end
def self.update_all_participation_levels
@results = []
Person.active.all.find_each do |person|
@results << person.update_participation_level
end
@results.compact!
if @results.present?
User.approved.admin.all.find_each do |u|
AdminMailer.participation_level_change(results: @results, to: u.email_address).deliver_later
end
end
end
def inactive_criteria
at_least_one_reward_older_than_a_year = rewards.where('created_at < ?', 1.year.ago).size >= 1
no_rewards_in_the_past_year = rewards.where('created_at >= ?', 1.year.ago).size == 0
at_least_one_reward_older_than_a_year && no_rewards_in_the_past_year
end
def participant_criteria
# gotten a gift card in the past year.
rewards.where('created_at > ?', 1.year.ago).map { |g| g&.research_session&.id }.compact.uniq.size >= 1
end
def active_criteria
at_least_one_reward_in_past_six_months = rewards.where('created_at > ?', 6.months.ago).map { |g| g&.research_session&.id }.compact.uniq.size >= 1
at_least_one_reward_in_past_six_months
end
def ambassador_criteria
if tag_list.include?('brl special ambassador')
true
else
sessions_with_two_or_more_teams_in_the_past_year = rewards.where('created_at > ?', 1.year.ago).map(&:team).uniq.size >= 2
at_least_three_sessions_ever = rewards.map { |g| g&.research_session&.id }.compact.uniq.size >= 3
sessions_with_two_or_more_teams_in_the_past_year && at_least_three_sessions_ever
end
end
def calc_participation_level
pl = PARTICIPATION_LEVEL_NEW # needs outreach
pl = PARTICIPATION_LEVEL_INACTIVE if inactive_criteria
pl = PARTICIPATION_LEVEL_PARTICIPANT if participant_criteria
pl = PARTICIPATION_LEVEL_ACTIVE if active_criteria
pl = PARTICIPATION_LEVEL_AMBASSADOR if ambassador_criteria
pl
end
def update_participation_level
return if tag_list.include? 'not dig'
new_level = calc_participation_level
if participation_level != new_level
old_level = participation_level
self.participation_level = new_level
tag_list.remove(old_level)
tag_list.add(new_level)
save
Cart.where(name: Person::PARTICIPATION_LEVELS).find_each do |cart|
if cart.name == new_level
begin
cart.people << self
rescue StandardError
ActiveRecord::RecordInvalid
end
else
cart.remove_person(id) # no-op if person not in cart
end
end # end cart update
return { pid: id, old: old_level, new: new_level }
end
end
def rewards_total
end_of_last_year = Time.zone.today.beginning_of_year - 1.day
total = rewards.where('created_at > ?', end_of_last_year).sum(:amount_cents)
Money.new(total, 'USD')
end
def rewards_count
rewards.size
end
def tag_values
tags.collect(&:name)
end
def tag_count
tag_list.size
end
def screened?
tag_list.include?('screened')
end
def send_to_mailchimp
status = active? ? 'subscribed' : 'unsubscribed'
MailchimpUpdateJob.perform_async(id, status)
end
def delete_from_rapidpro
RapidproDeleteJob.perform_async(id)
end
def update_rapidpro
if active && !tag_list.include?('not dig')
RapidproUpdateJob.perform_async(id)
elsif !active || tag_list.include?('not dig')
delete_from_rapidpro
end
end
def primary_device_type_name
Patterns::Application.config.device_mappings.rassoc(primary_device_id)[0].to_s if primary_device_id.present?
end
def secondary_device_type_name
Patterns::Application.config.device_mappings.rassoc(secondary_device_id)[0].to_s if secondary_device_id.present?
end
def primary_connection_type_name
Patterns::Application.config.connection_mappings.rassoc(primary_connection_id)[0].to_s if primary_connection_id.present?
end
def secondary_connection_type_name
Patterns::Application.config.connection_mappings.rassoc(secondary_connection_id)[0].to_s if secondary_connection_id.present?
end
def lat_long
::ZIP_LAT_LONG[postal_code.to_s]
end
def full_name
[first_name, last_name].join(' ')
end
def address_fields_to_sentence
[address_1, address_2, city, state, postal_code].reject(&:blank?).join(', ')
end
# def send_invitation_reminder
# # called by whenever in /config/schedule.rb
# invs = invitations.remindable.upcoming(2)
# case preferred_contact_method.upcase
# when 'SMS'
# ::InvitationReminderSms.new(to: person, invitations: invs).send
# when 'EMAIL'
# ::PersonMailer.remind(
# invitations: invs,
# email_address: email_address
# ).deliver_later
# end
# invs.each do |inv|
# if inv.aasm_state == 'invited'
# inv.aasm_state = 'reminded'
# inv.save
# end
# end
# end
def to_a
fields = Person.column_names
fields.push('tags')
fields.map do |f|
field_value = send(f.to_sym)
if f == 'phone_number'
if field_value.present?
field_value.phony_formatted(format: :national, spaces: '-')
else
''
end
elsif f == 'email_address'
field_value.presence || ''
elsif f == 'tags'
tag_values.present? ? tag_values.join('|') : ''
else
field_value
end
end
end
def deactivate!(type = nil)
self.active = false
self.deactivated_at = Time.current
self.deactivated_method = type if type
save! # sends background mailchimp update
delete_from_rapidpro # remove from rapidpro
end
def reactivate!
self.active = true
save!
update_rapidpro
end
def md5_email
Digest::MD5.hexdigest(email_address.downcase) if email_address.present?
end
def update_neighborhood
n = zip_to_neighborhood(postal_code)
self.signup_at = created_at if signup_at.nil?
if n.present?
self.neighborhood = n
save
end
end
def send_new_person_notifications
User.where(new_person_notification: true).find_each do |user|
email = user.email_address
::UserMailer.new_person_notify(email_address: email, person: self).deliver_later
end
end
end
# rubocop:enable ClassLength
|
class Person
attr_reader :id
def initialize(params)
@id = params[:id]
end
# TODO When Person becomes a table in the db make this an association
def members
Member.where(person: id)
end
# TODO When Person becomes a table in the db make this an association
def policy_member_distances
PolicyMemberDistance.where(person: id)
end
# TODO When Person becomes a table in the db make this an association
def offices
Office.where(person: id)
end
def member_who_voted_on_division(division)
latest_member = members.order(entered_house: :desc).first
# What we have now in @member is a member related to the person that voted in division but @member wasn't necessarily
# current when @division took place. So, let's fix this
# We're doing this the same way as the php which doesn't seem necessarily the best way
# TODO Figure what is the best way
new_member = members.find do |member|
member.vote_on_division_with_tell(division) != "absent"
end
new_member || latest_member
end
# Find the member that relates to a given policy
# Let's just step through the votes of the policy and find the first matching member
def member_for_policy(policy)
policy.divisions.each do |division|
member = members.current_on(division.date).first
return member if member
end
# If we can't find a member just return the first one
members.order(entered_house: :desc).first
end
def agreement_fraction_with_policy(policy)
pmd = policy_member_distances.find_by(policy: policy)
pmd ? pmd.agreement_fraction : 0
end
def number_of_votes_on_policy(policy)
pmd = policy_member_distances.find_by(policy: policy)
pmd ? pmd.number_of_votes : 0
end
def current_offices
# Checking for the to_date after the sql query to get the same result as php
offices.order(from_date: :desc).select{|o| o.to_date == Date.new(9999,12,31)}
end
def offices_on_date(date)
offices.where("? >= from_date AND ? <= to_date", date, date)
end
# TODO This is wrong as parliamentary secretaries will be considered to be on the
# front bench which as far as I understand is not the case
def on_front_bench?(date)
!offices_on_date(date).empty?
end
end
Use vote_on_division_without_tell instead of vote_on_division_with_tell
class Person
attr_reader :id
def initialize(params)
@id = params[:id]
end
# TODO When Person becomes a table in the db make this an association
def members
Member.where(person: id)
end
# TODO When Person becomes a table in the db make this an association
def policy_member_distances
PolicyMemberDistance.where(person: id)
end
# TODO When Person becomes a table in the db make this an association
def offices
Office.where(person: id)
end
def member_who_voted_on_division(division)
latest_member = members.order(entered_house: :desc).first
# What we have now in @member is a member related to the person that voted in division but @member wasn't necessarily
# current when @division took place. So, let's fix this
# We're doing this the same way as the php which doesn't seem necessarily the best way
# TODO Figure what is the best way
new_member = members.find do |member|
member.vote_on_division_without_tell(division) != "absent"
end
new_member || latest_member
end
# Find the member that relates to a given policy
# Let's just step through the votes of the policy and find the first matching member
def member_for_policy(policy)
policy.divisions.each do |division|
member = members.current_on(division.date).first
return member if member
end
# If we can't find a member just return the first one
members.order(entered_house: :desc).first
end
def agreement_fraction_with_policy(policy)
pmd = policy_member_distances.find_by(policy: policy)
pmd ? pmd.agreement_fraction : 0
end
def number_of_votes_on_policy(policy)
pmd = policy_member_distances.find_by(policy: policy)
pmd ? pmd.number_of_votes : 0
end
def current_offices
# Checking for the to_date after the sql query to get the same result as php
offices.order(from_date: :desc).select{|o| o.to_date == Date.new(9999,12,31)}
end
def offices_on_date(date)
offices.where("? >= from_date AND ? <= to_date", date, date)
end
# TODO This is wrong as parliamentary secretaries will be considered to be on the
# front bench which as far as I understand is not the case
def on_front_bench?(date)
!offices_on_date(date).empty?
end
end
|
class Person < ActiveRecord::Base
belongs_to :user
has_and_belongs_to_many :projects
has_many :group_memberships
has_many :groups, :through => :group_memberships
has_many :employments
has_many :companies, :through => :employments
validates_presence_of :first_name, :last_name
def self.from_user(user)
first_name, last_name = user.name.split(/\s+/, 2)
return self.new(
:twitter => user.login,
:first_name => first_name,
:last_name => last_name,
:bio => user.description,
:url => user.url,
:avatar_url => user.profile_image_url
)
end
def self.from_twitter(screen_name, twitter_token)
twitterer = twitter_token.get("/users/show?screen_name=#{screen_name}")
first_name, last_name = twitterer['name'].split(/\s+/, 2)
return self.new(
:twitter => screen_name,
:first_name => first_name,
:last_name => last_name,
:bio => twitterer['description'],
:url => twitterer['url'],
:avatar_url => twitterer['profile_image_url']
)
end
def name
[first_name, last_name].join(' ')
end
end
# == Schema Information
#
# Table name: people
#
# id :integer not null, primary key
# first_name :string(255)
# last_name :string(255)
# email :string(255)
# twitter :string(255)
# url :string(255)
# bio :text
# created_at :datetime
# updated_at :datetime
# user_id :integer
#
Don't require last name
class Person < ActiveRecord::Base
belongs_to :user
has_and_belongs_to_many :projects
has_many :group_memberships
has_many :groups, :through => :group_memberships
has_many :employments
has_many :companies, :through => :employments
validates_presence_of :first_name
def self.from_user(user)
first_name, last_name = user.name.split(/\s+/, 2)
return self.new(
:twitter => user.login,
:first_name => first_name,
:last_name => last_name,
:bio => user.description,
:url => user.url,
:avatar_url => user.profile_image_url
)
end
def self.from_twitter(screen_name, twitter_token)
twitterer = twitter_token.get("/users/show?screen_name=#{screen_name}")
first_name, last_name = twitterer['name'].split(/\s+/, 2)
return self.new(
:twitter => screen_name,
:first_name => first_name,
:last_name => last_name,
:bio => twitterer['description'],
:url => twitterer['url'],
:avatar_url => twitterer['profile_image_url']
)
end
def name
[first_name, last_name].join(' ')
end
end
# == Schema Information
#
# Table name: people
#
# id :integer not null, primary key
# first_name :string(255)
# last_name :string(255)
# email :string(255)
# twitter :string(255)
# url :string(255)
# bio :text
# created_at :datetime
# updated_at :datetime
# user_id :integer
#
|
# frozen_string_literal: true
# == Schema Information
#
# Table name: people
#
# id :integer not null, primary key
# first_name :string(255)
# last_name :string(255)
# email_address :string(255)
# address_1 :string(255)
# address_2 :string(255)
# city :string(255)
# state :string(255)
# postal_code :string(255)
# geography_id :integer
# primary_device_id :integer
# primary_device_description :string(255)
# secondary_device_id :integer
# secondary_device_description :string(255)
# primary_connection_id :integer
# primary_connection_description :string(255)
# phone_number :string(255)
# participation_type :string(255)
# created_at :datetime
# updated_at :datetime
# signup_ip :string(255)
# signup_at :datetime
# voted :string(255)
# called_311 :string(255)
# secondary_connection_id :integer
# secondary_connection_description :string(255)
# verified :string(255)
# preferred_contact_method :string(255)
# token :string(255)
# active :boolean default(TRUE)
# deactivated_at :datetime
# deactivated_method :string(255)
# neighborhood :string(255)
# referred_by :string(255)
# low_income :boolean
# rapidpro_uuid :string(255)
#
# FIXME: Refactor and re-enable cop
# rubocop:disable ClassLength
class Person < ApplicationRecord
has_paper_trail
acts_as_taggable
page 50
# include Searchable
include ExternalDataMappings
include Neighborhoods
phony_normalize :phone_number, default_country_code: 'US'
phony_normalized_method :phone_number, default_country_code: 'US'
has_many :comments, as: :commentable, dependent: :destroy
has_many :submissions, dependent: :destroy
has_many :gift_cards
accepts_nested_attributes_for :gift_cards, reject_if: :all_blank
attr_accessor :gift_cards_attributes
has_many :reservations, dependent: :destroy
has_many :events, through: :reservations
has_many :invitations
has_many :research_sessions, through: :invitations
# TODO: remove people from carts on deactivation
has_many :carts_people
has_many :carts, through: :carts_people, foreign_key: :person_id
has_secure_token :token
if ENV['RAILS_ENV'] == 'production'
after_commit :sendToMailChimp, on: %i[update create]
after_commit :updateRapidPro, on: %i[update create]
end
after_create :update_neighborhood
after_commit :send_new_person_notifications, on: :create
validates :first_name, presence: true
validates :last_name, presence: true
validates :postal_code, presence: true
validates :postal_code, zipcode: { country_code: :us }
# phony validations and normalization
phony_normalize :phone_number, default_country_code: 'US'
phony_normalize :landline, default_country_code: 'US'
# validates :phone_number, presence: true, length: { in: 9..15 },
# unless: proc { |person| person.email_address.present? }
validates :phone_number, allow_blank: true, uniqueness: true
validates :landline, allow_blank: true, uniqueness: true
# validates :email_address, presence: true,gc
# unless: proc { |person| person.phone_number.present? }
validates :email_address, email: true, allow_blank: true, uniqueness: true
scope :no_signup_card, -> { where('id NOT IN (SELECT DISTINCT(person_id) FROM gift_cards where gift_cards.reason = 1)') }
scope :signup_card_needed, -> { joins(:gift_cards).where('gift_cards.reason !=1') }
scope :verified, -> { where('verified like ?', '%Verified%') }
scope :not_verified, -> { where.not('verified like ?', '%Verified%') }
scope :active, -> { where(active: true) }
scope :deactivated, -> { where(active: false) }
scope :order_by_giftcard_sum, -> { joins(:gift_cards).includes(:research_sessions).where('gift_cards.created_at >= ?', Time.current.beginning_of_year).select('people.id, people.first_name,people.last_name, people.active,sum(gift_cards.amount_cents) as total_gc').group('people.id').order('total_gc desc') }
# no longer using this. now managing active elsewhere
# default_scope { where(active:
ransacker :full_name, formatter: proc { |v| v.mb_chars.downcase.to_s } do |parent|
Arel::Nodes::NamedFunction.new('lower',
[Arel::Nodes::NamedFunction.new('concat_ws',
[Arel::Nodes.build_quoted(' '), parent.table[:first_name], parent.table[:last_name]])])
end
scope :ransack_tagged_with, ->(*tags) { tagged_with(tags) }
def self.ransackable_scopes(_auth_object = nil)
%i[no_signup_card ransack_tagged_with]
end
def self.locale_name_to_locale(locale_name)
obj = { 'english': 'en', 'spanish': 'es', 'chinese': 'zh' }
obj[locale_name.downcase]
end
ransack_alias :comments, :comments_content
ransack_alias :nav_bar_search, :full_name_or_email_address_or_phone_number_or_comments_content
def self.send_all_reminders
# this is where reservation_reminders
# called by whenever in /config/schedule.rb
Person.active.all.find_each(&:send_invitation_reminder)
end
def self.update_all_participation_levels
Person.active.all.find_each(&:update_participation_level)
end
def self.participation_levels
# * Active DIG member =“Participated in 3+ sessions” = invited to join FB group;
# * [Need another name for level 2] = “Participated in at least one season--
# (could code as 6 months active) OR at least 2 different projects/teams
# (could code based on being tagged in a session by at least 2 different teams)
# * DIG Ambassador = “active for at least one year, 2+ projects/teams
# if there’s any way to automate that info to flow into dashboard/pool —
# and notify me when new person gets added-- that would be amazing
%w[new active regular ambassador]
end
def update_participation_level
self.participation_level = 'active' if gift_cards.map { |g| g&.research_session&.id }.compact.uniq.size >= 3
self.participation_level = 'regular' if gift_cards.where('created_at > ?', 6.months.ago).map { |g| g&.research_session&.id }.compact.uniq.size >= 1
self.participation_level = 'regular' if gift_cards.where('created_at > ?', 6.months.ago).map(&:team).uniq.size >= 2
# onlder than a year and 2 or more sessions with two teams and either 3 research sessions or 6 cards in the last year
if created_at <= 1.year.ago && gift_cards.where('created_at > ?', 1.year.ago).map(&:team).uniq.size >= 2 && (gift_cards.map { |g| g&.research_session&.id }.compact.uniq.size >=3 || gift_cards.where('created_at > ?', 1.year.ago) >= 6)
self.participation_level = 'ambassador'
end
if participation_level_changed?
tag_list.remove(participation_level_was)
tag_list.add(self.participation_level)
if participation_level_was != self.participation_level
User.approved.admin.all.find_each do |u|
AdminMailer.participation_level_change(person: self, to: u.email, old_level: participation_level_was).deliver_later
end
end
Cart.where(name: Person.participation_levels).find_each do |cart|
if cart.name == participation_level
begin
cart.people << self
rescue StandardError
next
end
else
cart.remove_person_id(id)
end
end
end
save
end
def self.verified_types
Person.pluck(:verified).uniq.select(&:present?)
end
def signup_gc_sent
signup_cards = gift_cards.where(reason: 1)
return true unless signup_cards.empty?
false
end
def verified?
verified&.start_with?('Verified')
end
def gift_card_total
end_of_last_year = Time.zone.today.beginning_of_year - 1.day
total = gift_cards.where('created_at > ?', end_of_last_year).sum(:amount_cents)
Money.new(total, 'USD')
end
def gift_card_count
gift_cards.size
end
WUFOO_FIELD_MAPPING = {
'Field1' => :first_name,
'Field2' => :last_name,
'Field10' => :email_address,
'Field276' => :voted,
'Field277' => :called_311,
'Field39' => :primary_device_id, # type of primary
'Field21' => :primary_device_description, # desc of primary
'Field40' => :secondary_device_id,
'Field24' => :secondary_device_description, # desc of secondary
'Field41' => :primary_connection_id, # connection type
# 'Field41' => :primary_connection_description, # description of connection
'Field42' => :secondary_connection_id, # connection type
# 'Field42' => :secondary_connection_description, # description of connection
'Field268' => :address_1, # address_1
'Field269' => :city, # city
# 'Field47' => :state, # state
'Field271' => :postal_code, # postal_code
'Field9' => :phone_number, # phone_number
'IP' => :signup_ip, # client IP, ignored for the moment
}.freeze
def tag_values
tags.collect(&:name)
end
def tag_count
tag_list.size
end
def screened?
tag_list.include?('screened')
end
def submission_values
submissions.collect(&:submission_values)
end
# FIXME: Refactor and re-enable cop
# rubocop:disable Metrics/MethodLength, Metrics/AbcSize, Metrics/CyclomaticComplexity, Rails/TimeZone
#
def self.initialize_from_wufoo_sms(params)
new_person = Person.new
# Save to Person
new_person.first_name = params['Field275']
new_person.last_name = params['Field276']
new_person.address_1 = params['Field268']
new_person.postal_code = params['Field271']
new_person.email_address = params['Field279']
new_person.phone_number = params['field281']
new_person.primary_device_id = case params['Field39'].upcase
when 'A'
Person.map_device_to_id('Desktop computer')
when 'B'
Person.map_device_to_id('Laptop')
when 'C'
Person.map_device_to_id('Tablet')
when 'D'
Person.map_device_to_id('Smart phone')
else
params['Field39']
end
new_person.primary_device_description = params['Field21']
new_person.primary_connection_id = case params['Field41'].upcase
when 'A'
Person.primary_connection_id('Broadband at home')
when 'B'
Person.primary_connection_id('Phone plan with data')
when 'C'
Person.primary_connection_id('Public wi-fi')
when 'D'
Person.primary_connection_id('Public computer center')
else
params['Field41']
end
new_person.preferred_contact_method = if params['Field278'].casecmp('TEXT')
'SMS'
else
'EMAIL'
end
new_person.verified = 'Verified by Text Message Signup'
new_person.signup_at = Time.now
new_person
end
# rubocop:enable Metrics/MethodLength, Metrics/AbcSize, Metrics/CyclomaticComplexity, Rails/TimeZone
def sendToMailChimp
status = active? ? 'subscribed' : 'unsubscribed'
MailchimpUpdateJob.perform_async(id, status)
end
def deleteFromRapidPro
RapidproDeleteJob.perform_async(id) unless active
end
def updateRapidPro
RapidproUpdateJob.perform_async(id) if active
end
# FIXME: Refactor and re-enable cop
# rubocop:disable Metrics/MethodLength, Metrics/AbcSize, Metrics/PerceivedComplexity
#
def self.initialize_from_wufoo(params)
new_person = Person.new
params.each_pair do |k, v|
new_person[WUFOO_FIELD_MAPPING[k]] = v if WUFOO_FIELD_MAPPING[k].present?
end
# Special handling of participation type. New form uses 2 fields where old form used 1. Need to combine into one. Manually set to "Either one" if both field53 & field54 are populated.
new_person.participation_type = if params['Field53'] != '' && params['Field54'] != ''
'Either one'
elsif params['Field53'] != ''
params['Field53']
else
params['Field54']
end
new_person.preferred_contact_method = if params['Field273'] == 'Email'
'EMAIL'
else
'SMS'
end
# Copy connection descriptions to description fields
new_person.primary_connection_description = new_person.primary_connection_id
new_person.secondary_connection_description = new_person.secondary_connection_id
# rewrite the device and connection identifiers to integers
new_person.primary_device_id = Person.map_device_to_id(params[WUFOO_FIELD_MAPPING.rassoc(:primary_device_id).first])
new_person.secondary_device_id = Person.map_device_to_id(params[WUFOO_FIELD_MAPPING.rassoc(:secondary_device_id).first])
new_person.primary_connection_id = Person.map_connection_to_id(params[WUFOO_FIELD_MAPPING.rassoc(:primary_connection_id).first])
new_person.secondary_connection_id = Person.map_connection_to_id(params[WUFOO_FIELD_MAPPING.rassoc(:secondary_connection_id).first])
# FIXME: this is a hack, since we need to initialize people
# with a city/state, but don't ask for it in the Wufoo form
# new_person.city = "Chicago" With update we ask for city
new_person.state = 'Illinois'
new_person.signup_at = params['DateCreated']
new_person
end
# rubocop:enable Metrics/MethodLength, Metrics/AbcSize, Metrics/PerceivedComplexity
def primary_device_type_name
if primary_device_id.present?
Logan::Application.config.device_mappings.rassoc(primary_device_id)[0].to_s
end
end
def secondary_device_type_name
if secondary_device_id.present?
Logan::Application.config.device_mappings.rassoc(secondary_device_id)[0].to_s
end
end
def primary_connection_type_name
if primary_connection_id.present?
Logan::Application.config.connection_mappings.rassoc(primary_connection_id)[0].to_s
end
end
def secondary_connection_type_name
if secondary_connection_id.present?
Logan::Application.config.connection_mappings.rassoc(secondary_connection_id)[0].to_s
end
end
def full_name
[first_name, last_name].join(' ')
end
def address_fields_to_sentence
[address_1, address_2, city, state, postal_code].reject(&:blank?).join(', ')
end
def send_invitation_reminder
# called by whenever in /config/schedule.rb
invs = invitations.remindable.upcoming(2)
case preferred_contact_method.upcase
when 'SMS'
::InvitationReminderSms.new(to: person, invitations: invs).send
when 'EMAIL'
::PersonMailer.remind(
invitations: invs,
email_address: email_address
).deliver_later
end
invs.each do |inv|
if inv.aasm_state == 'invited'
inv.aasm_state = 'reminded'
inv.save
end
end
end
def to_a
fields = Person.column_names
fields.push('tags')
fields.map do |f|
field_value = send(f.to_sym)
if f == 'phone_number'
if field_value.present?
field_value.phony_formatted(format: :national, spaces: '-')
else
''
end
elsif f == 'email_address'
field_value.presence || ''
elsif f == 'tags'
tag_values.present? ? tag_values.join('|') : ''
else
field_value
end
end
end
def deactivate!(type = nil)
self.active = false
self.deactivated_at = Time.current
self.deactivated_method = type if type
save! # sends background mailchimp update
deleteFromRapidPro # remove from rapidpro
end
def reactivate!
self.active = true
save!
updateRapidPro
end
def md5_email
Digest::MD5.hexdigest(email_address.downcase) if email_address.present?
end
def update_neighborhood
n = zip_to_neighborhood(postal_code)
self.signup_at = created_at if signup_at.nil?
if n.present?
self.neighborhood = n
save
end
end
def send_new_person_notifications
User.where(new_person_notification: true).find_each do |user|
email = user.email_address
::UserMailer.new_person_notify(email_address: email, person: self).deliver_later
end
end
# FIXME: Refactor and re-enable cop
# rubocop:disable Metrics/MethodLength, Metrics/AbcSize, Metrics/CyclomaticComplexity, Metrics/PerceivedComplexity
#
# Compare to other records in the database to find possible duplicates.
def possible_duplicates
@duplicates = {}
if last_name.present?
last_name_duplicates = Person.where(last_name: last_name).where.not(id: id)
last_name_duplicates.each do |duplicate|
duplicate_hash = {}
duplicate_hash['person'] = duplicate
duplicate_hash['match_count'] = 1
duplicate_hash['last_name_match'] = true
duplicate_hash['matches_on'] = ['Last Name']
@duplicates[duplicate.id] = duplicate_hash
end
end
if email_address.present?
email_address_duplicates = Person.where(email_address: email_address).where.not(id: id)
email_address_duplicates.each do |duplicate|
if @duplicates.key? duplicate.id
@duplicates[duplicate.id]['match_count'] += 1
@duplicates[duplicate.id]['matches_on'].push('Email Address')
else
@duplicates[duplicate.id] = {}
@duplicates[duplicate.id]['person'] = duplicate
@duplicates[duplicate.id]['match_count'] = 1
@duplicates[duplicate.id]['matches_on'] = ['Email Address']
end
@duplicates[duplicate.id]['email_address_match'] = true
end
end
if phone_number.present?
phone_number_duplicates = Person.where(phone_number: phone_number).where.not(id: id)
phone_number_duplicates.each do |duplicate|
if @duplicates.key? duplicate.id
@duplicates[duplicate.id]['match_count'] += 1
@duplicates[duplicate.id]['matches_on'].push('Phone Number')
else
@duplicates[duplicate.id] = {}
@duplicates[duplicate.id]['person'] = duplicate
@duplicates[duplicate.id]['match_count'] = 1
@duplicates[duplicate.id]['matches_on'] = ['Phone Number']
end
@duplicates[duplicate.id]['phone_number_match'] = true
end
end
if address_1.present?
address_1_duplicates = Person.where(address_1: address_1).where.not(id: id)
address_1_duplicates.each do |duplicate|
if @duplicates.key? duplicate.id
@duplicates[duplicate.id]['match_count'] += 1
@duplicates[duplicate.id]['matches_on'].push('Address_1')
else
@duplicates[duplicate.id] = {}
@duplicates[duplicate.id]['person'] = duplicate
@duplicates[duplicate.id]['match_count'] = 1
@duplicates[duplicate.id]['matches_on'] = ['Address_1']
end
@duplicates[duplicate.id]['address_1_match'] = true
end
end
@duplicates
end
# rubocop:enable Metrics/MethodLength, Metrics/AbcSize, Metrics/CyclomaticComplexity, Metrics/PerceivedComplexity
private
end
# rubocop:enable ClassLength
slight refactor
# frozen_string_literal: true
# == Schema Information
#
# Table name: people
#
# id :integer not null, primary key
# first_name :string(255)
# last_name :string(255)
# email_address :string(255)
# address_1 :string(255)
# address_2 :string(255)
# city :string(255)
# state :string(255)
# postal_code :string(255)
# geography_id :integer
# primary_device_id :integer
# primary_device_description :string(255)
# secondary_device_id :integer
# secondary_device_description :string(255)
# primary_connection_id :integer
# primary_connection_description :string(255)
# phone_number :string(255)
# participation_type :string(255)
# created_at :datetime
# updated_at :datetime
# signup_ip :string(255)
# signup_at :datetime
# voted :string(255)
# called_311 :string(255)
# secondary_connection_id :integer
# secondary_connection_description :string(255)
# verified :string(255)
# preferred_contact_method :string(255)
# token :string(255)
# active :boolean default(TRUE)
# deactivated_at :datetime
# deactivated_method :string(255)
# neighborhood :string(255)
# referred_by :string(255)
# low_income :boolean
# rapidpro_uuid :string(255)
#
# FIXME: Refactor and re-enable cop
# rubocop:disable ClassLength
class Person < ApplicationRecord
has_paper_trail
acts_as_taggable
page 50
# include Searchable
include ExternalDataMappings
include Neighborhoods
phony_normalize :phone_number, default_country_code: 'US'
phony_normalized_method :phone_number, default_country_code: 'US'
has_many :comments, as: :commentable, dependent: :destroy
has_many :submissions, dependent: :destroy
has_many :gift_cards
accepts_nested_attributes_for :gift_cards, reject_if: :all_blank
attr_accessor :gift_cards_attributes
has_many :reservations, dependent: :destroy
has_many :events, through: :reservations
has_many :invitations
has_many :research_sessions, through: :invitations
# TODO: remove people from carts on deactivation
has_many :carts_people
has_many :carts, through: :carts_people, foreign_key: :person_id
has_secure_token :token
if ENV['RAILS_ENV'] == 'production'
after_commit :sendToMailChimp, on: %i[update create]
after_commit :updateRapidPro, on: %i[update create]
end
after_create :update_neighborhood
after_commit :send_new_person_notifications, on: :create
validates :first_name, presence: true
validates :last_name, presence: true
validates :postal_code, presence: true
validates :postal_code, zipcode: { country_code: :us }
# phony validations and normalization
phony_normalize :phone_number, default_country_code: 'US'
phony_normalize :landline, default_country_code: 'US'
# validates :phone_number, presence: true, length: { in: 9..15 },
# unless: proc { |person| person.email_address.present? }
validates :phone_number, allow_blank: true, uniqueness: true
validates :landline, allow_blank: true, uniqueness: true
# validates :email_address, presence: true,gc
# unless: proc { |person| person.phone_number.present? }
validates :email_address, email: true, allow_blank: true, uniqueness: true
scope :no_signup_card, -> { where('id NOT IN (SELECT DISTINCT(person_id) FROM gift_cards where gift_cards.reason = 1)') }
scope :signup_card_needed, -> { joins(:gift_cards).where('gift_cards.reason !=1') }
scope :verified, -> { where('verified like ?', '%Verified%') }
scope :not_verified, -> { where.not('verified like ?', '%Verified%') }
scope :active, -> { where(active: true) }
scope :deactivated, -> { where(active: false) }
scope :order_by_giftcard_sum, -> { joins(:gift_cards).includes(:research_sessions).where('gift_cards.created_at >= ?', Time.current.beginning_of_year).select('people.id, people.first_name,people.last_name, people.active,sum(gift_cards.amount_cents) as total_gc').group('people.id').order('total_gc desc') }
# no longer using this. now managing active elsewhere
# default_scope { where(active:
ransacker :full_name, formatter: proc { |v| v.mb_chars.downcase.to_s } do |parent|
Arel::Nodes::NamedFunction.new('lower',
[Arel::Nodes::NamedFunction.new('concat_ws',
[Arel::Nodes.build_quoted(' '), parent.table[:first_name], parent.table[:last_name]])])
end
scope :ransack_tagged_with, ->(*tags) { tagged_with(tags) }
def self.ransackable_scopes(_auth_object = nil)
%i[no_signup_card ransack_tagged_with]
end
def self.locale_name_to_locale(locale_name)
obj = { 'english': 'en', 'spanish': 'es', 'chinese': 'zh' }
obj[locale_name.downcase]
end
ransack_alias :comments, :comments_content
ransack_alias :nav_bar_search, :full_name_or_email_address_or_phone_number_or_comments_content
def self.send_all_reminders
# this is where reservation_reminders
# called by whenever in /config/schedule.rb
Person.active.all.find_each(&:send_invitation_reminder)
end
def self.update_all_participation_levels
Person.active.all.find_each(&:update_participation_level)
end
def self.participation_levels
# * Active DIG member =“Participated in 3+ sessions” = invited to join FB group;
# * [Need another name for level 2] = “Participated in at least one season--
# (could code as 6 months active) OR at least 2 different projects/teams
# (could code based on being tagged in a session by at least 2 different teams)
# * DIG Ambassador = “active for at least one year, 2+ projects/teams
# if there’s any way to automate that info to flow into dashboard/pool —
# and notify me when new person gets added-- that would be amazing
%w[new active regular ambassador]
end
def yaseen_hack
# research sessions didn't really exist long enough for this to make sense. hence the hack. For Yaseen and other
gift_cards.map { |g| g&.research_session&.id }.compact.uniq.size >=3 || gift_cards.where('created_at > ?', 1.year.ago).size >= 6
end
def regular_criteria
gift_cards.where('created_at > ?', 6.months.ago).map { |g| g&.research_session&.id }.compact.uniq.size >= 1 || gift_cards.where('created_at > ?', 6.months.ago).map(&:team).uniq.size >= 2
end
def active_criteria
gift_cards.map { |g| g&.research_session&.id }.compact.uniq.size >= 3
end
def ambassador_criteria
# onlder than a year and 2 or more sessions with two teams and either 3 research sessions or 6 cards in the last year
created_at <= 1.year.ago && gift_cards.where('created_at > ?', 1.year.ago).map(&:team).uniq.size >= 2 && yaseen_hack
end
def update_participation_level
self.participation_level = 'active' if active_criteria
self.participation_level = 'regular' if regular_criteria
self.participation_level = 'ambassador' if ambassador_criteria
if participation_level_changed?
tag_list.remove(participation_level_was)
tag_list.add(self.participation_level)
if participation_level_was != self.participation_level
User.approved.admin.all.find_each do |u|
AdminMailer.participation_level_change(person: self, to: u.email, old_level: participation_level_was).deliver_later
end
end
Cart.where(name: Person.participation_levels).find_each do |cart|
if cart.name == participation_level
begin
cart.people << self
rescue StandardError
next
end
else
cart.remove_person_id(id)
end
end
end
save
end
def self.verified_types
Person.pluck(:verified).uniq.select(&:present?)
end
def signup_gc_sent
signup_cards = gift_cards.where(reason: 1)
return true unless signup_cards.empty?
false
end
def verified?
verified&.start_with?('Verified')
end
def gift_card_total
end_of_last_year = Time.zone.today.beginning_of_year - 1.day
total = gift_cards.where('created_at > ?', end_of_last_year).sum(:amount_cents)
Money.new(total, 'USD')
end
def gift_card_count
gift_cards.size
end
WUFOO_FIELD_MAPPING = {
'Field1' => :first_name,
'Field2' => :last_name,
'Field10' => :email_address,
'Field276' => :voted,
'Field277' => :called_311,
'Field39' => :primary_device_id, # type of primary
'Field21' => :primary_device_description, # desc of primary
'Field40' => :secondary_device_id,
'Field24' => :secondary_device_description, # desc of secondary
'Field41' => :primary_connection_id, # connection type
# 'Field41' => :primary_connection_description, # description of connection
'Field42' => :secondary_connection_id, # connection type
# 'Field42' => :secondary_connection_description, # description of connection
'Field268' => :address_1, # address_1
'Field269' => :city, # city
# 'Field47' => :state, # state
'Field271' => :postal_code, # postal_code
'Field9' => :phone_number, # phone_number
'IP' => :signup_ip, # client IP, ignored for the moment
}.freeze
def tag_values
tags.collect(&:name)
end
def tag_count
tag_list.size
end
def screened?
tag_list.include?('screened')
end
def submission_values
submissions.collect(&:submission_values)
end
# FIXME: Refactor and re-enable cop
# rubocop:disable Metrics/MethodLength, Metrics/AbcSize, Metrics/CyclomaticComplexity, Rails/TimeZone
#
def self.initialize_from_wufoo_sms(params)
new_person = Person.new
# Save to Person
new_person.first_name = params['Field275']
new_person.last_name = params['Field276']
new_person.address_1 = params['Field268']
new_person.postal_code = params['Field271']
new_person.email_address = params['Field279']
new_person.phone_number = params['field281']
new_person.primary_device_id = case params['Field39'].upcase
when 'A'
Person.map_device_to_id('Desktop computer')
when 'B'
Person.map_device_to_id('Laptop')
when 'C'
Person.map_device_to_id('Tablet')
when 'D'
Person.map_device_to_id('Smart phone')
else
params['Field39']
end
new_person.primary_device_description = params['Field21']
new_person.primary_connection_id = case params['Field41'].upcase
when 'A'
Person.primary_connection_id('Broadband at home')
when 'B'
Person.primary_connection_id('Phone plan with data')
when 'C'
Person.primary_connection_id('Public wi-fi')
when 'D'
Person.primary_connection_id('Public computer center')
else
params['Field41']
end
new_person.preferred_contact_method = if params['Field278'].casecmp('TEXT')
'SMS'
else
'EMAIL'
end
new_person.verified = 'Verified by Text Message Signup'
new_person.signup_at = Time.now
new_person
end
# rubocop:enable Metrics/MethodLength, Metrics/AbcSize, Metrics/CyclomaticComplexity, Rails/TimeZone
def sendToMailChimp
status = active? ? 'subscribed' : 'unsubscribed'
MailchimpUpdateJob.perform_async(id, status)
end
def deleteFromRapidPro
RapidproDeleteJob.perform_async(id) unless active
end
def updateRapidPro
RapidproUpdateJob.perform_async(id) if active
end
# FIXME: Refactor and re-enable cop
# rubocop:disable Metrics/MethodLength, Metrics/AbcSize, Metrics/PerceivedComplexity
#
def self.initialize_from_wufoo(params)
new_person = Person.new
params.each_pair do |k, v|
new_person[WUFOO_FIELD_MAPPING[k]] = v if WUFOO_FIELD_MAPPING[k].present?
end
# Special handling of participation type. New form uses 2 fields where old form used 1. Need to combine into one. Manually set to "Either one" if both field53 & field54 are populated.
new_person.participation_type = if params['Field53'] != '' && params['Field54'] != ''
'Either one'
elsif params['Field53'] != ''
params['Field53']
else
params['Field54']
end
new_person.preferred_contact_method = if params['Field273'] == 'Email'
'EMAIL'
else
'SMS'
end
# Copy connection descriptions to description fields
new_person.primary_connection_description = new_person.primary_connection_id
new_person.secondary_connection_description = new_person.secondary_connection_id
# rewrite the device and connection identifiers to integers
new_person.primary_device_id = Person.map_device_to_id(params[WUFOO_FIELD_MAPPING.rassoc(:primary_device_id).first])
new_person.secondary_device_id = Person.map_device_to_id(params[WUFOO_FIELD_MAPPING.rassoc(:secondary_device_id).first])
new_person.primary_connection_id = Person.map_connection_to_id(params[WUFOO_FIELD_MAPPING.rassoc(:primary_connection_id).first])
new_person.secondary_connection_id = Person.map_connection_to_id(params[WUFOO_FIELD_MAPPING.rassoc(:secondary_connection_id).first])
# FIXME: this is a hack, since we need to initialize people
# with a city/state, but don't ask for it in the Wufoo form
# new_person.city = "Chicago" With update we ask for city
new_person.state = 'Illinois'
new_person.signup_at = params['DateCreated']
new_person
end
# rubocop:enable Metrics/MethodLength, Metrics/AbcSize, Metrics/PerceivedComplexity
def primary_device_type_name
if primary_device_id.present?
Logan::Application.config.device_mappings.rassoc(primary_device_id)[0].to_s
end
end
def secondary_device_type_name
if secondary_device_id.present?
Logan::Application.config.device_mappings.rassoc(secondary_device_id)[0].to_s
end
end
def primary_connection_type_name
if primary_connection_id.present?
Logan::Application.config.connection_mappings.rassoc(primary_connection_id)[0].to_s
end
end
def secondary_connection_type_name
if secondary_connection_id.present?
Logan::Application.config.connection_mappings.rassoc(secondary_connection_id)[0].to_s
end
end
def full_name
[first_name, last_name].join(' ')
end
def address_fields_to_sentence
[address_1, address_2, city, state, postal_code].reject(&:blank?).join(', ')
end
def send_invitation_reminder
# called by whenever in /config/schedule.rb
invs = invitations.remindable.upcoming(2)
case preferred_contact_method.upcase
when 'SMS'
::InvitationReminderSms.new(to: person, invitations: invs).send
when 'EMAIL'
::PersonMailer.remind(
invitations: invs,
email_address: email_address
).deliver_later
end
invs.each do |inv|
if inv.aasm_state == 'invited'
inv.aasm_state = 'reminded'
inv.save
end
end
end
def to_a
fields = Person.column_names
fields.push('tags')
fields.map do |f|
field_value = send(f.to_sym)
if f == 'phone_number'
if field_value.present?
field_value.phony_formatted(format: :national, spaces: '-')
else
''
end
elsif f == 'email_address'
field_value.presence || ''
elsif f == 'tags'
tag_values.present? ? tag_values.join('|') : ''
else
field_value
end
end
end
def deactivate!(type = nil)
self.active = false
self.deactivated_at = Time.current
self.deactivated_method = type if type
save! # sends background mailchimp update
deleteFromRapidPro # remove from rapidpro
end
def reactivate!
self.active = true
save!
updateRapidPro
end
def md5_email
Digest::MD5.hexdigest(email_address.downcase) if email_address.present?
end
def update_neighborhood
n = zip_to_neighborhood(postal_code)
self.signup_at = created_at if signup_at.nil?
if n.present?
self.neighborhood = n
save
end
end
def send_new_person_notifications
User.where(new_person_notification: true).find_each do |user|
email = user.email_address
::UserMailer.new_person_notify(email_address: email, person: self).deliver_later
end
end
# FIXME: Refactor and re-enable cop
# rubocop:disable Metrics/MethodLength, Metrics/AbcSize, Metrics/CyclomaticComplexity, Metrics/PerceivedComplexity
#
# Compare to other records in the database to find possible duplicates.
def possible_duplicates
@duplicates = {}
if last_name.present?
last_name_duplicates = Person.where(last_name: last_name).where.not(id: id)
last_name_duplicates.each do |duplicate|
duplicate_hash = {}
duplicate_hash['person'] = duplicate
duplicate_hash['match_count'] = 1
duplicate_hash['last_name_match'] = true
duplicate_hash['matches_on'] = ['Last Name']
@duplicates[duplicate.id] = duplicate_hash
end
end
if email_address.present?
email_address_duplicates = Person.where(email_address: email_address).where.not(id: id)
email_address_duplicates.each do |duplicate|
if @duplicates.key? duplicate.id
@duplicates[duplicate.id]['match_count'] += 1
@duplicates[duplicate.id]['matches_on'].push('Email Address')
else
@duplicates[duplicate.id] = {}
@duplicates[duplicate.id]['person'] = duplicate
@duplicates[duplicate.id]['match_count'] = 1
@duplicates[duplicate.id]['matches_on'] = ['Email Address']
end
@duplicates[duplicate.id]['email_address_match'] = true
end
end
if phone_number.present?
phone_number_duplicates = Person.where(phone_number: phone_number).where.not(id: id)
phone_number_duplicates.each do |duplicate|
if @duplicates.key? duplicate.id
@duplicates[duplicate.id]['match_count'] += 1
@duplicates[duplicate.id]['matches_on'].push('Phone Number')
else
@duplicates[duplicate.id] = {}
@duplicates[duplicate.id]['person'] = duplicate
@duplicates[duplicate.id]['match_count'] = 1
@duplicates[duplicate.id]['matches_on'] = ['Phone Number']
end
@duplicates[duplicate.id]['phone_number_match'] = true
end
end
if address_1.present?
address_1_duplicates = Person.where(address_1: address_1).where.not(id: id)
address_1_duplicates.each do |duplicate|
if @duplicates.key? duplicate.id
@duplicates[duplicate.id]['match_count'] += 1
@duplicates[duplicate.id]['matches_on'].push('Address_1')
else
@duplicates[duplicate.id] = {}
@duplicates[duplicate.id]['person'] = duplicate
@duplicates[duplicate.id]['match_count'] = 1
@duplicates[duplicate.id]['matches_on'] = ['Address_1']
end
@duplicates[duplicate.id]['address_1_match'] = true
end
end
@duplicates
end
# rubocop:enable Metrics/MethodLength, Metrics/AbcSize, Metrics/CyclomaticComplexity, Metrics/PerceivedComplexity
private
end
# rubocop:enable ClassLength
|
# -*- encoding: utf-8 -*-
require File.expand_path('../lib/alchemy/spree/version', __FILE__)
Gem::Specification.new do |gem|
gem.authors = ["Thomas von Deyen"]
gem.email = ["tvd@magiclabs.de"]
gem.description = %q{A Alchemy CMS and Spree connector}
gem.summary = %q{The World's Most Flexible E-Commerce Platform meets The World's Most Flexible Content Management System!}
gem.homepage = "https://github.com/magiclabs/alchemy_spree"
gem.license = 'BSD New'
gem.executables = `git ls-files -- bin/*`.split("\n").map{ |f| File.basename(f) }
gem.files = `git ls-files`.split("\n")
gem.test_files = `git ls-files -- {test,spec,features}/*`.split("\n")
gem.name = "alchemy_spree"
gem.require_paths = ["lib"]
gem.version = Alchemy::Spree::VERSION
gem.add_dependency('alchemy_cms', ['~> 3.1'])
gem.add_dependency('spree', ['~> 3.0.0'])
end
Allow spree 3.1
# -*- encoding: utf-8 -*-
require File.expand_path('../lib/alchemy/spree/version', __FILE__)
Gem::Specification.new do |gem|
gem.authors = ["Thomas von Deyen"]
gem.email = ["tvd@magiclabs.de"]
gem.description = %q{A Alchemy CMS and Spree connector}
gem.summary = %q{The World's Most Flexible E-Commerce Platform meets The World's Most Flexible Content Management System!}
gem.homepage = "https://github.com/magiclabs/alchemy_spree"
gem.license = 'BSD New'
gem.executables = `git ls-files -- bin/*`.split("\n").map{ |f| File.basename(f) }
gem.files = `git ls-files`.split("\n")
gem.test_files = `git ls-files -- {test,spec,features}/*`.split("\n")
gem.name = "alchemy_spree"
gem.require_paths = ["lib"]
gem.version = Alchemy::Spree::VERSION
gem.add_dependency('alchemy_cms', ['~> 3.1'])
gem.add_dependency('spree', ['>= 3.0', '< 4.0'])
end
|
require 'json'
class Person < ActiveRecord::Base
PERSON_HASH_CACHE_EXPIRE_TIME = 5
attr_accessor :guid, :password, :password2, :username, :email, :form_username, :form_given_name, :form_family_name, :form_password, :form_password2, :form_email
attr_protected :is_admin
has_many :feedbacks
has_many :listings
has_many :items, :foreign_key => "owner_id"
# Can't be used because conditions parameter can't be passed from controller
# has_many :available_items,
# :class_name => "Item",
# :foreign_key => "owner_id",
# :conditions => "status <> 'disabled'",
# :order => "title"
has_many :disabled_items,
:class_name => "Item",
:foreign_key => "owner_id",
:conditions => "status = 'disabled'",
:order => "title"
# Can't be used because conditions parameter can't be passed from controller
# has_many :available_favors,
# :class_name => "Favor",
# :foreign_key => "owner_id",
# :conditions => "status <> 'disabled'",
# :order => "title"
has_many :disabled_favors,
:class_name => "Favor",
:foreign_key => "owner_id",
:conditions => "status = 'disabled'",
:order => "title"
has_many :favors
has_many :person_interesting_listings
has_many :interesting_listings,
:through => :person_interesting_listings,
:source => :listing
has_many :person_conversations
has_many :conversations,
:through => :person_conversations,
:source => :conversation
has_and_belongs_to_many :kassi_events
has_many :received_comments,
:class_name => "PersonComment",
:foreign_key => "target_person_id"
has_one :settings
class PersonConnection < ActiveResource::Base
# This is an inner class to handle remote connection to COS database where the actual information
# of person model is stored. This is subclass of ActiveResource so it includes some automatic
# functionality to access REST interface.
#
# In practise we use here connection.post/get/put/delete and the URL and Parameters as described
# in COS documentation at #{COS_URL}
self.site = COS_URL
self.format = :json
self.timeout = COS_TIMEOUT
self.element_name = "people"
self.collection_name = "people"
def self.create_person(params, cookie)
creating_headers = {"Cookie" => cookie}
response = connection.post("#{prefix}#{element_name}", params.to_json ,creating_headers)
end
def self.get_person(id, cookie)
return fix_alphabets(connection.get("#{prefix}#{element_name}/#{id}/@self", {"Cookie" => cookie }))
end
def self.search(query, cookie)
return fix_alphabets(connection.get("#{prefix}#{element_name}?search=" + query, {"Cookie" => cookie} ))
end
def self.get_friends(id, cookie)
return fix_alphabets(connection.get("#{prefix}#{element_name}/#{id}/@friends", {"Cookie" => cookie }))
end
def self.get_pending_friend_requests(id, cookie)
return fix_alphabets(connection.get("#{prefix}#{element_name}/#{id}/@pending_friend_requests", {"Cookie" => cookie }))
end
def self.put_attributes(params, id, cookie)
connection.put("#{prefix}#{element_name}/#{id}/@self",{:person => params}.to_json, {"Cookie" => cookie} )
#Rails.cache.delete("person_hash.#{id}_asked_with_cookie.#{cookie}")
end
def self.update_avatar(image, id, cookie)
connection.put("#{prefix}#{element_name}/#{id}/@avatar", {:file => image}, {"Cookie" => cookie} )
end
def self.add_as_friend(friend_id, id, cookie)
connection.post("#{prefix}#{element_name}/#{id}/@friends", {:friend_id => friend_id}.to_json, {"Cookie" => cookie} )
#Rails.cache.delete("person_hash.#{id}_asked_with_cookie.#{cookie}")
#Rails.cache.delete("person_hash.#{friend_id}_asked_with_cookie.#{cookie}")
end
def self.remove_from_friends(friend_id, id, cookie)
connection.delete("#{prefix}#{element_name}/#{id}/@friends/#{friend_id}", {"Cookie" => cookie} )
#Rails.cache.delete("person_hash.#{id}_asked_with_cookie.#{cookie}")
#Rails.cache.delete("person_hash.#{friend_id}_asked_with_cookie.#{cookie}")
end
def self.remove_pending_friend_request(friend_id, id, cookie)
connection.delete("#{prefix}#{element_name}/#{id}/@pending_friend_requests/#{friend_id}", {"Cookie" => cookie} )
#Rails.cache.delete("person_hash.#{id}_asked_with_cookie.#{cookie}")
#Rails.cache.delete("person_hash.#{friend_id}_asked_with_cookie.#{cookie}")
end
def self.get_groups(id, cookie)
return fix_alphabets(connection.get("#{prefix}#{element_name}/#{id}/@groups", {"Cookie" => cookie }))
end
def self.join_group(id, group_id, cookie)
creating_headers = {"Cookie" => cookie}
response = connection.post("#{prefix}#{element_name}/#{id}/@groups", {:group_id => group_id}.to_json, creating_headers)
end
#fixes utf8 letters
def self.fix_alphabets(json_hash)
#the parameter must be a hash that is decoded from JSON by activeResource messing up umlaut letters
JSON.parse(json_hash.to_json.gsub(/\\\\u/,'\\u'))
end
end
def self.create(params, cookie)
# create to Common Services
person_hash = {:person => params.slice(:username, :password, :email) }
response = PersonConnection.create_person(person_hash, cookie)
#pick id from the response (same id in kassi and COS DBs)
params[:id] = response.body[/"id":"([^"]+)"/, 1]
#create locally with less attributes
super(params.except(:username, :email, :given_name, :family_name))
end
def self.add_to_kassi_db(id)
person = Person.new({:id => id })
if person.save
return person
else
return nil
logger.error { "Error storing person to Kassi DB with ID: #{id}" }
end
end
def initialize(params={})
self.guid = params[:id] #store GUID to temporary attribute
super(params)
end
def after_initialize
#self.id may already be correct in this point so use ||=
self.id ||= self.guid
end
def self.search(query)
cookie = Session.kassiCookie
begin
person_hash = PersonConnection.search(query, cookie)
rescue ActiveResource::ResourceNotFound => e
#Could not find person with that id in COS Database!
return nil
end
return person_hash
end
def username(cookie=nil)
if new_record?
return form_username ? form_username : ""
end
person_hash = get_person_hash(cookie)
return "Person not found!" if person_hash.nil?
return person_hash["username"]
end
def name_or_username(cookie=nil)
person_hash = get_person_hash(cookie)
return "Person not found!" if person_hash.nil?
if person_hash["name"] && person_hash["name"]["unstructured"] && person_hash["name"]["unstructured"] =~ /\S/
return person_hash["name"]["unstructured"]
else
return person_hash["username"]
end
end
def name(cookie=nil)
return name_or_username(cookie)
end
def given_name(cookie=nil)
if new_record?
return form_given_name ? form_given_name : ""
end
person_hash = get_person_hash(cookie)
return "Not found!" if person_hash.nil?
return "" if person_hash["name"].nil?
return person_hash["name"]["given_name"]
end
def set_given_name(name, cookie)
update_attributes({:name => {:given_name => name, } }, cookie)
end
def family_name(cookie=nil)
if new_record?
return form_family_name ? form_family_name : ""
end
person_hash = get_person_hash(cookie)
return "Not found!" if person_hash.nil?
return "" if person_hash["name"].nil?
return person_hash["name"]["family_name"]
end
def set_family_name(name, cookie)
update_attributes({:name => {:family_name => name } }, cookie)
end
def street_address(cookie=nil)
person_hash = get_person_hash(cookie)
return "Not found!" if person_hash.nil?
return "" if person_hash["address"].nil?
return person_hash["address"]["street_address"]
end
def set_street_address(street_address, cookie)
update_attributes({:address => {:street_address => street_address } }, cookie)
end
def postal_code(cookie=nil)
person_hash = get_person_hash(cookie)
return "Not found!" if person_hash.nil?
return "" if person_hash["address"].nil?
return person_hash["address"]["postal_code"]
end
def set_postal_code(postal_code, cookie)
update_attributes({:address => {:postal_code => postal_code } }, cookie)
end
def locality(cookie=nil)
person_hash = get_person_hash(cookie)
return "Not found!" if person_hash.nil?
return "" if person_hash["address"].nil?
return person_hash["address"]["locality"]
end
def set_locality(locality, cookie)
update_attributes({:address => {:locality => locality } }, cookie)
end
def unstructured_address(cookie=nil)
person_hash = get_person_hash(cookie)
return "Not found!" if person_hash.nil?
return "" if person_hash["address"].nil?
return person_hash["address"]["unstructured"]
end
def phone_number(cookie=nil)
person_hash = get_person_hash(cookie)
return "Person not found!" if person_hash.nil?
return person_hash["phone_number"]
end
def set_phone_number(number, cookie)
update_attributes({:phone_number => number}, cookie)
end
def email(cookie=nil)
if new_record?
return form_email ? form_email : ""
end
person_hash = get_person_hash(cookie)
return "Person not found!" if person_hash.nil?
return person_hash["email"]
end
def set_email(email, cookie)
update_attributes({:email => email}, cookie)
end
def password(cookie = nil)
if new_record?
return form_password ? form_password : ""
end
person_hash = get_person_hash(cookie)
return "Person not found!" if person_hash.nil?
return person_hash["password"]
end
def set_password(password, cookie)
update_attributes({:password => password}, cookie)
end
def password2
if new_record?
return form_password2 ? form_password2 : ""
end
end
# Returns contacts of this person as an array of Person objects
def contacts
Person.find_by_sql(contact_query("id, created_at"))
end
# Returns a query that gets the selected attributes for contacts
def contact_query(select)
"SELECT #{select}
FROM
people, kassi_events_people
WHERE
id = person_id AND
person_id <> '#{id}' AND
kassi_event_id IN (
SELECT kassi_event_id FROM kassi_events_people WHERE person_id = '#{id}'
)"
end
# Returns friends of this person as an array of Person objects
def friends(cookie)
Person.find_kassi_users_by_ids(get_friend_ids(cookie))
end
# Returns ids of OtaSizzle friends of this person
def get_friend_ids(cookie)
Person.get_person_ids(get_friends(cookie))
end
# Returns those people who are also kassi users
def self.find_kassi_users_by_ids(ids)
Person.find_by_sql("SELECT * FROM people WHERE id IN ('" + ids.join("', '") + "')")
end
def add_as_friend(friend_id, cookie)
PersonConnection.add_as_friend(friend_id, self.id, cookie)
end
def remove_from_friends(friend_id, cookie)
PersonConnection.remove_from_friends(friend_id, self.id, cookie)
end
def remove_pending_friend_request(friend_id, cookie)
PersonConnection.remove_from_friends(friend_id, self.id, cookie)
end
# Retrieves friends of this person from COS
def get_friends(cookie)
begin
friend_hash = PersonConnection.get_friends(self.id, cookie)
rescue ActiveResource::ResourceNotFound => e
#Could not find person with that id in COS Database!
return nil
end
return friend_hash
end
def get_friend_requests(cookie)
begin
request_hash = PersonConnection.get_pending_friend_requests(self.id, cookie)
rescue ActiveResource::ResourceNotFound => e
#Could not find person with that id in COS Database!
return nil
end
return request_hash
end
# Returns all the groups that this user is a member in
# as an array of Group objects
def groups(cookie)
Group.find(get_group_ids(cookie))
end
# Returns ids of OtaSizzle groups of this person
def get_group_ids(cookie)
Group.get_group_ids(get_groups(cookie))
end
# Returns a hash from COS containing groups of this person
def get_groups(cookie)
begin
group_hash = PersonConnection.get_groups(self.id, cookie)
rescue ActiveResource::ResourceNotFound => e
#Could not find person with that id in COS Database!
return nil
end
return group_hash
end
def update_attributes(params, cookie)
#Handle name part parameters also if they are in hash root level
Person.remove_root_level_fields(params, "name", ["given_name", "family_name"])
Person.remove_root_level_fields(params, "address", ["street_address", "postal_code", "locality"])
PersonConnection.put_attributes(params, self.id, cookie)
end
def self.remove_root_level_fields(params, field_type, fields)
fields.each do |field|
if params[field] && (params[field_type].nil? || params[field_type][field].nil?)
params.update({field_type => Hash.new}) if params[field_type].nil?
params[field_type].update({field => params[field]})
params.delete(field)
end
end
end
def update_avatar(image, cookie)
PersonConnection.update_avatar(image, self.id, cookie)
end
def get_person_hash(cookie=nil)
cookie = Session.kassiCookie if cookie.nil?
begin
#person_hash = Rails.cache.fetch("person_hash.#{id}_asked_with_cookie.#{cookie}", :expires_in => PERSON_HASH_CACHE_EXPIRE_TIME) {PersonConnection.get_person(self.id, cookie)}
person_hash = PersonConnection.get_person(self.id, cookie)
rescue ActiveResource::UnauthorizedAccess => e
cookie = Session.updateKassiCookie
person_hash = PersonConnection.get_person(self.id, cookie)
#Rails.cache.write("person_hash.#{id}_asked_with_cookie.#{cookie}", person_hash, :expires_in => PERSON_HASH_CACHE_EXPIRE_TIME)
rescue ActiveResource::ResourceNotFound => e
#Could not find person with that id in COS Database!
return nil
end
return person_hash
end
def friend_status(cookie = nil)
person_hash = get_person_hash(cookie)
return "Person not found!" if person_hash.nil?
return person_hash["connection"]
end
def available_items(conditions)
Item.find :all,
:conditions => ["owner_id = '#{id}' AND status <> 'disabled'" + conditions],
:order => "title"
end
def save_item(item)
existing_item = disabled_items.find_by_title(item.title)
if existing_item
existing_item.description = item.description
if existing_item.save
existing_item.enable
return true
else
item.errors.add(:description, "is too long")
end
else
return true if item.save
end
return false
end
def available_favors(conditions)
Favor.find :all,
:conditions => ["owner_id = '#{id}' AND status <> 'disabled'" + conditions],
:order => "title"
end
def save_favor(favor)
existing_favor = disabled_favors.find_by_title(favor.title)
if existing_favor
existing_favor.description = favor.description
if existing_favor.save
existing_favor.enable
return true
else
favor.errors.add(:description, "is too long")
end
else
return true if favor.save
end
return false
end
def join_group(group_id, cookie)
PersonConnection.join_group(self.id, group_id, cookie)
end
# Takes a person hash from COS and extracts ids from it
# into an array.
def self.get_person_ids(person_hash)
person_hash["entry"].collect { |person| person["id"] }
end
end
Cache päälle, toivottavasti fiksummin kuin aikasemmin..
require 'json'
class Person < ActiveRecord::Base
PERSON_HASH_CACHE_EXPIRE_TIME = 5
attr_accessor :guid, :password, :password2, :username, :email, :form_username, :form_given_name, :form_family_name, :form_password, :form_password2, :form_email
attr_protected :is_admin
has_many :feedbacks
has_many :listings
has_many :items, :foreign_key => "owner_id"
# Can't be used because conditions parameter can't be passed from controller
# has_many :available_items,
# :class_name => "Item",
# :foreign_key => "owner_id",
# :conditions => "status <> 'disabled'",
# :order => "title"
has_many :disabled_items,
:class_name => "Item",
:foreign_key => "owner_id",
:conditions => "status = 'disabled'",
:order => "title"
# Can't be used because conditions parameter can't be passed from controller
# has_many :available_favors,
# :class_name => "Favor",
# :foreign_key => "owner_id",
# :conditions => "status <> 'disabled'",
# :order => "title"
has_many :disabled_favors,
:class_name => "Favor",
:foreign_key => "owner_id",
:conditions => "status = 'disabled'",
:order => "title"
has_many :favors
has_many :person_interesting_listings
has_many :interesting_listings,
:through => :person_interesting_listings,
:source => :listing
has_many :person_conversations
has_many :conversations,
:through => :person_conversations,
:source => :conversation
has_and_belongs_to_many :kassi_events
has_many :received_comments,
:class_name => "PersonComment",
:foreign_key => "target_person_id"
has_one :settings
class PersonConnection < ActiveResource::Base
# This is an inner class to handle remote connection to COS database where the actual information
# of person model is stored. This is subclass of ActiveResource so it includes some automatic
# functionality to access REST interface.
#
# In practise we use here connection.post/get/put/delete and the URL and Parameters as described
# in COS documentation at #{COS_URL}
self.site = COS_URL
self.format = :json
self.timeout = COS_TIMEOUT
self.element_name = "people"
self.collection_name = "people"
def self.create_person(params, cookie)
creating_headers = {"Cookie" => cookie}
response = connection.post("#{prefix}#{element_name}", params.to_json ,creating_headers)
end
def self.get_person(id, cookie)
return fix_alphabets(connection.get("#{prefix}#{element_name}/#{id}/@self", {"Cookie" => cookie }))
end
def self.search(query, cookie)
return fix_alphabets(connection.get("#{prefix}#{element_name}?search=" + query, {"Cookie" => cookie} ))
end
def self.get_friends(id, cookie)
return fix_alphabets(connection.get("#{prefix}#{element_name}/#{id}/@friends", {"Cookie" => cookie }))
end
def self.get_pending_friend_requests(id, cookie)
return fix_alphabets(connection.get("#{prefix}#{element_name}/#{id}/@pending_friend_requests", {"Cookie" => cookie }))
end
def self.put_attributes(params, id, cookie)
connection.put("#{prefix}#{element_name}/#{id}/@self",{:person => params}.to_json, {"Cookie" => cookie} )
#Rails.cache.delete("person_hash.#{id}_asked_with_cookie.#{cookie}")
parent.cache_delete(id,cookie)
end
def self.update_avatar(image, id, cookie)
connection.put("#{prefix}#{element_name}/#{id}/@avatar", {:file => image}, {"Cookie" => cookie} )
end
def self.add_as_friend(friend_id, id, cookie)
connection.post("#{prefix}#{element_name}/#{id}/@friends", {:friend_id => friend_id}.to_json, {"Cookie" => cookie} )
#Rails.cache.delete("person_hash.#{id}_asked_with_cookie.#{cookie}")
parent.cache_delete(id,cookie)
#Rails.cache.delete("person_hash.#{friend_id}_asked_with_cookie.#{cookie}")
parent.cache_delete(friend_id,cookie)
end
def self.remove_from_friends(friend_id, id, cookie)
connection.delete("#{prefix}#{element_name}/#{id}/@friends/#{friend_id}", {"Cookie" => cookie} )
#Rails.cache.delete("person_hash.#{id}_asked_with_cookie.#{cookie}")
parent.cache_delete(id,cookie)
#Rails.cache.delete("person_hash.#{friend_id}_asked_with_cookie.#{cookie}")
parent.cache_delete(friend_id,cookie)
end
def self.remove_pending_friend_request(friend_id, id, cookie)
connection.delete("#{prefix}#{element_name}/#{id}/@pending_friend_requests/#{friend_id}", {"Cookie" => cookie} )
#Rails.cache.delete("person_hash.#{id}_asked_with_cookie.#{cookie}")
parent.cache_delete(id,cookie)
#Rails.cache.delete("person_hash.#{friend_id}_asked_with_cookie.#{cookie}")
parent.cache_delete(friend_id,cookie)
end
def self.get_groups(id, cookie)
return fix_alphabets(connection.get("#{prefix}#{element_name}/#{id}/@groups", {"Cookie" => cookie }))
end
def self.join_group(id, group_id, cookie)
creating_headers = {"Cookie" => cookie}
response = connection.post("#{prefix}#{element_name}/#{id}/@groups", {:group_id => group_id}.to_json, creating_headers)
end
#fixes utf8 letters
def self.fix_alphabets(json_hash)
#the parameter must be a hash that is decoded from JSON by activeResource messing up umlaut letters
JSON.parse(json_hash.to_json.gsub(/\\\\u/,'\\u'))
end
end
def self.create(params, cookie)
# create to Common Services
person_hash = {:person => params.slice(:username, :password, :email) }
response = PersonConnection.create_person(person_hash, cookie)
#pick id from the response (same id in kassi and COS DBs)
params[:id] = response.body[/"id":"([^"]+)"/, 1]
#create locally with less attributes
super(params.except(:username, :email, :given_name, :family_name))
end
def self.add_to_kassi_db(id)
person = Person.new({:id => id })
if person.save
return person
else
return nil
logger.error { "Error storing person to Kassi DB with ID: #{id}" }
end
end
def initialize(params={})
self.guid = params[:id] #store GUID to temporary attribute
super(params)
end
def after_initialize
#self.id may already be correct in this point so use ||=
self.id ||= self.guid
end
def self.search(query)
cookie = Session.kassiCookie
begin
person_hash = PersonConnection.search(query, cookie)
rescue ActiveResource::ResourceNotFound => e
#Could not find person with that id in COS Database!
return nil
end
return person_hash
end
def username(cookie=nil)
if new_record?
return form_username ? form_username : ""
end
person_hash = get_person_hash(cookie)
return "Person not found!" if person_hash.nil?
return person_hash["username"]
end
def name_or_username(cookie=nil)
person_hash = get_person_hash(cookie)
return "Person not found!" if person_hash.nil?
if person_hash["name"] && person_hash["name"]["unstructured"] && person_hash["name"]["unstructured"] =~ /\S/
return person_hash["name"]["unstructured"]
else
return person_hash["username"]
end
end
def name(cookie=nil)
return name_or_username(cookie)
end
def given_name(cookie=nil)
if new_record?
return form_given_name ? form_given_name : ""
end
person_hash = get_person_hash(cookie)
return "Not found!" if person_hash.nil?
return "" if person_hash["name"].nil?
return person_hash["name"]["given_name"]
end
def set_given_name(name, cookie)
update_attributes({:name => {:given_name => name, } }, cookie)
end
def family_name(cookie=nil)
if new_record?
return form_family_name ? form_family_name : ""
end
person_hash = get_person_hash(cookie)
return "Not found!" if person_hash.nil?
return "" if person_hash["name"].nil?
return person_hash["name"]["family_name"]
end
def set_family_name(name, cookie)
update_attributes({:name => {:family_name => name } }, cookie)
end
def street_address(cookie=nil)
person_hash = get_person_hash(cookie)
return "Not found!" if person_hash.nil?
return "" if person_hash["address"].nil?
return person_hash["address"]["street_address"]
end
def set_street_address(street_address, cookie)
update_attributes({:address => {:street_address => street_address } }, cookie)
end
def postal_code(cookie=nil)
person_hash = get_person_hash(cookie)
return "Not found!" if person_hash.nil?
return "" if person_hash["address"].nil?
return person_hash["address"]["postal_code"]
end
def set_postal_code(postal_code, cookie)
update_attributes({:address => {:postal_code => postal_code } }, cookie)
end
def locality(cookie=nil)
person_hash = get_person_hash(cookie)
return "Not found!" if person_hash.nil?
return "" if person_hash["address"].nil?
return person_hash["address"]["locality"]
end
def set_locality(locality, cookie)
update_attributes({:address => {:locality => locality } }, cookie)
end
def unstructured_address(cookie=nil)
person_hash = get_person_hash(cookie)
return "Not found!" if person_hash.nil?
return "" if person_hash["address"].nil?
return person_hash["address"]["unstructured"]
end
def phone_number(cookie=nil)
person_hash = get_person_hash(cookie)
return "Person not found!" if person_hash.nil?
return person_hash["phone_number"]
end
def set_phone_number(number, cookie)
update_attributes({:phone_number => number}, cookie)
end
def email(cookie=nil)
if new_record?
return form_email ? form_email : ""
end
person_hash = get_person_hash(cookie)
return "Person not found!" if person_hash.nil?
return person_hash["email"]
end
def set_email(email, cookie)
update_attributes({:email => email}, cookie)
end
def password(cookie = nil)
if new_record?
return form_password ? form_password : ""
end
person_hash = get_person_hash(cookie)
return "Person not found!" if person_hash.nil?
return person_hash["password"]
end
def set_password(password, cookie)
update_attributes({:password => password}, cookie)
end
def password2
if new_record?
return form_password2 ? form_password2 : ""
end
end
# Returns contacts of this person as an array of Person objects
def contacts
Person.find_by_sql(contact_query("id, created_at"))
end
# Returns a query that gets the selected attributes for contacts
def contact_query(select)
"SELECT #{select}
FROM
people, kassi_events_people
WHERE
id = person_id AND
person_id <> '#{id}' AND
kassi_event_id IN (
SELECT kassi_event_id FROM kassi_events_people WHERE person_id = '#{id}'
)"
end
# Returns friends of this person as an array of Person objects
def friends(cookie)
Person.find_kassi_users_by_ids(get_friend_ids(cookie))
end
# Returns ids of OtaSizzle friends of this person
def get_friend_ids(cookie)
Person.get_person_ids(get_friends(cookie))
end
# Returns those people who are also kassi users
def self.find_kassi_users_by_ids(ids)
Person.find_by_sql("SELECT * FROM people WHERE id IN ('" + ids.join("', '") + "')")
end
def add_as_friend(friend_id, cookie)
PersonConnection.add_as_friend(friend_id, self.id, cookie)
end
def remove_from_friends(friend_id, cookie)
PersonConnection.remove_from_friends(friend_id, self.id, cookie)
end
def remove_pending_friend_request(friend_id, cookie)
PersonConnection.remove_from_friends(friend_id, self.id, cookie)
end
# Retrieves friends of this person from COS
def get_friends(cookie)
begin
friend_hash = PersonConnection.get_friends(self.id, cookie)
rescue ActiveResource::ResourceNotFound => e
#Could not find person with that id in COS Database!
return nil
end
return friend_hash
end
def get_friend_requests(cookie)
begin
request_hash = PersonConnection.get_pending_friend_requests(self.id, cookie)
rescue ActiveResource::ResourceNotFound => e
#Could not find person with that id in COS Database!
return nil
end
return request_hash
end
# Returns all the groups that this user is a member in
# as an array of Group objects
def groups(cookie)
Group.find(get_group_ids(cookie))
end
# Returns ids of OtaSizzle groups of this person
def get_group_ids(cookie)
Group.get_group_ids(get_groups(cookie))
end
# Returns a hash from COS containing groups of this person
def get_groups(cookie)
begin
group_hash = PersonConnection.get_groups(self.id, cookie)
rescue ActiveResource::ResourceNotFound => e
#Could not find person with that id in COS Database!
return nil
end
return group_hash
end
def update_attributes(params, cookie)
#Handle name part parameters also if they are in hash root level
Person.remove_root_level_fields(params, "name", ["given_name", "family_name"])
Person.remove_root_level_fields(params, "address", ["street_address", "postal_code", "locality"])
PersonConnection.put_attributes(params, self.id, cookie)
end
def self.remove_root_level_fields(params, field_type, fields)
fields.each do |field|
if params[field] && (params[field_type].nil? || params[field_type][field].nil?)
params.update({field_type => Hash.new}) if params[field_type].nil?
params[field_type].update({field => params[field]})
params.delete(field)
end
end
end
def update_avatar(image, cookie)
PersonConnection.update_avatar(image, self.id, cookie)
end
def get_person_hash(cookie=nil)
cookie = Session.kassiCookie if cookie.nil?
begin
#person_hash = Rails.cache.fetch("person_hash.#{id}_asked_with_cookie.#{cookie}", :expires_in => PERSON_HASH_CACHE_EXPIRE_TIME) {PersonConnection.get_person(self.id, cookie)}
person_hash = Person.cache_fetch(id,cookie)
#person_hash = PersonConnection.get_person(self.id, cookie)
rescue ActiveResource::UnauthorizedAccess => e
cookie = Session.updateKassiCookie
person_hash = PersonConnection.get_person(self.id, cookie)
#Rails.cache.write("person_hash.#{id}_asked_with_cookie.#{cookie}", person_hash, :expires_in => PERSON_HASH_CACHE_EXPIRE_TIME)
cache_write(person_hash,id,cookie)
rescue ActiveResource::ResourceNotFound => e
#Could not find person with that id in COS Database!
return nil
end
return person_hash
end
def friend_status(cookie = nil)
person_hash = get_person_hash(cookie)
return "Person not found!" if person_hash.nil?
return person_hash["connection"]
end
def available_items(conditions)
Item.find :all,
:conditions => ["owner_id = '#{id}' AND status <> 'disabled'" + conditions],
:order => "title"
end
def save_item(item)
existing_item = disabled_items.find_by_title(item.title)
if existing_item
existing_item.description = item.description
if existing_item.save
existing_item.enable
return true
else
item.errors.add(:description, "is too long")
end
else
return true if item.save
end
return false
end
def available_favors(conditions)
Favor.find :all,
:conditions => ["owner_id = '#{id}' AND status <> 'disabled'" + conditions],
:order => "title"
end
def save_favor(favor)
existing_favor = disabled_favors.find_by_title(favor.title)
if existing_favor
existing_favor.description = favor.description
if existing_favor.save
existing_favor.enable
return true
else
favor.errors.add(:description, "is too long")
end
else
return true if favor.save
end
return false
end
def join_group(group_id, cookie)
PersonConnection.join_group(self.id, group_id, cookie)
end
# Takes a person hash from COS and extracts ids from it
# into an array.
def self.get_person_ids(person_hash)
person_hash["entry"].collect { |person| person["id"] }
end
private
# This method constructs a key to be used in caching.
# Important thing is that cache contains peoples profiles, but
# the contents stored may be different, depending on who's asking.
# There for the key contains person_id and a hash calculated from cookie.
# (Cookie is different for each asker.)
def self.cache_key(id,cookie)
"person_hash.#{id}_asked_by.#{cookie.hash}"
end
#Methods to simplify the cache access
def self.cache_fetch(id,cookie)
Rails.cache.fetch(cache_key(id,cookie), :expires_in => PERSON_HASH_CACHE_EXPIRE_TIME) {PersonConnection.get_person(id, cookie)}
end
def self.cache_write(person_hash,id,cookie)
Rails.cache.write(cache_key(id,cookie), person_hash, :expires_in => PERSON_HASH_CACHE_EXPIRE_TIME)
end
def self.cache_delete(id,cookie)
Rails.cache.delete(cache_key(id,cookie))
end
end
|
# Generated by jeweler
# DO NOT EDIT THIS FILE DIRECTLY
# Instead, edit Jeweler::Tasks in Rakefile, and run 'rake gemspec'
# -*- encoding: utf-8 -*-
# stub: algoliasearch 1.2.0 ruby lib
Gem::Specification.new do |s|
s.name = "algoliasearch"
s.version = "1.2.2"
s.required_rubygems_version = Gem::Requirement.new(">= 0") if s.respond_to? :required_rubygems_version=
s.require_paths = ["lib"]
s.authors = ["Algolia"]
s.date = "2014-01-11"
s.description = "A simple Ruby client for the algolia.com REST API"
s.email = "contact@algolia.com"
s.extra_rdoc_files = [
"ChangeLog",
"LICENSE.txt",
"README.md"
]
s.files = [
".rspec",
".travis.yml",
"ChangeLog",
"Gemfile",
"Gemfile.lock",
"LICENSE.txt",
"README.md",
"Rakefile",
"algoliasearch.gemspec",
"contacts.json",
"lib/algolia/client.rb",
"lib/algolia/error.rb",
"lib/algolia/index.rb",
"lib/algolia/protocol.rb",
"lib/algolia/version.rb",
"lib/algolia/webmock.rb",
"lib/algoliasearch.rb",
"resources/ca-bundle.crt",
"spec/client_spec.rb",
"spec/mock_spec.rb",
"spec/spec_helper.rb",
"spec/stub_spec.rb"
]
s.homepage = "http://github.com/algolia/algoliasearch-client-ruby"
s.licenses = ["MIT"]
s.rubygems_version = "2.2.1"
s.summary = "A simple Ruby client for the algolia.com REST API"
if s.respond_to? :specification_version then
s.specification_version = 4
if Gem::Version.new(Gem::VERSION) >= Gem::Version.new('1.2.0') then
s.add_runtime_dependency(%q<httpclient>, ["~> 2.3"])
s.add_runtime_dependency(%q<json>, [">= 1.5.1"])
s.add_development_dependency "travis"
s.add_development_dependency "rake"
s.add_development_dependency "rdoc"
else
s.add_dependency(%q<httpclient>, ["~> 2.3"])
s.add_dependency(%q<json>, [">= 1.5.1"])
end
else
s.add_dependency(%q<httpclient>, ["~> 2.3"])
s.add_dependency(%q<json>, [">= 1.5.1"])
end
end
Bump to 1.2.3
# Generated by jeweler
# DO NOT EDIT THIS FILE DIRECTLY
# Instead, edit Jeweler::Tasks in Rakefile, and run 'rake gemspec'
# -*- encoding: utf-8 -*-
# stub: algoliasearch 1.2.0 ruby lib
Gem::Specification.new do |s|
s.name = "algoliasearch"
s.version = "1.2.3"
s.required_rubygems_version = Gem::Requirement.new(">= 0") if s.respond_to? :required_rubygems_version=
s.require_paths = ["lib"]
s.authors = ["Algolia"]
s.date = "2014-01-11"
s.description = "A simple Ruby client for the algolia.com REST API"
s.email = "contact@algolia.com"
s.extra_rdoc_files = [
"ChangeLog",
"LICENSE.txt",
"README.md"
]
s.files = [
".rspec",
".travis.yml",
"ChangeLog",
"Gemfile",
"Gemfile.lock",
"LICENSE.txt",
"README.md",
"Rakefile",
"algoliasearch.gemspec",
"contacts.json",
"lib/algolia/client.rb",
"lib/algolia/error.rb",
"lib/algolia/index.rb",
"lib/algolia/protocol.rb",
"lib/algolia/version.rb",
"lib/algolia/webmock.rb",
"lib/algoliasearch.rb",
"resources/ca-bundle.crt",
"spec/client_spec.rb",
"spec/mock_spec.rb",
"spec/spec_helper.rb",
"spec/stub_spec.rb"
]
s.homepage = "http://github.com/algolia/algoliasearch-client-ruby"
s.licenses = ["MIT"]
s.rubygems_version = "2.2.1"
s.summary = "A simple Ruby client for the algolia.com REST API"
if s.respond_to? :specification_version then
s.specification_version = 4
if Gem::Version.new(Gem::VERSION) >= Gem::Version.new('1.2.0') then
s.add_runtime_dependency(%q<httpclient>, ["~> 2.3"])
s.add_runtime_dependency(%q<json>, [">= 1.5.1"])
s.add_development_dependency "travis"
s.add_development_dependency "rake"
s.add_development_dependency "rdoc"
else
s.add_dependency(%q<httpclient>, ["~> 2.3"])
s.add_dependency(%q<json>, [">= 1.5.1"])
end
else
s.add_dependency(%q<httpclient>, ["~> 2.3"])
s.add_dependency(%q<json>, [">= 1.5.1"])
end
end
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.