CombinedText stringlengths 4 3.42M |
|---|
require 'spec_helper'
require 'sinatra/filtering_parameters'
describe Sinatra::FilteringParameters do
def post_with_filter(args)
mock_app do
register Sinatra::FilteringParameters
if args[:allow].nil?
post('/sample/:name'){ params.to_json }
else
post('/sample/:name', allow: args[:allow]){ params.to_json }
end
end
post '/sample/foo', args[:pass_params]
end
def result_should_be_equal(filterd_params)
last_response.should be_ok
last_response.body.should == filterd_params.to_json
end
describe 'permitted parameters nothing' do
it "when success" do
post_with_filter(
:pass_params => { :a => 1, :b => 2, :c => 3 }
)
result_should_be_equal({ "a"=>"1", "b"=>"2", "c"=>"3", "splat"=>[], "captures"=>["foo"], "name"=>"foo" })
end
end
describe 'permitted parameters specified in' do
it 'symbol' do
post_with_filter(
:pass_params => { :a => 1, :b => 2, :c => 3 },
:allow => :name
)
result_should_be_equal({ "splat"=>[], "captures"=>["foo"], "name"=>"foo" })
end
it "string" do
post_with_filter(
:pass_params => { :a => 1, :b => 2, :c => 3 },
:allow => 'name'
)
result_should_be_equal({ "splat"=>[], "captures"=>["foo"], "name"=>"foo" })
end
it "array" do
post_with_filter(
:pass_params => { :a => 1, :b => 2, :c => 3 },
:allow => [:name, :a]
)
result_should_be_equal({ "splat"=>[], "captures"=>["foo"], "name"=>"foo", "a"=>"1" })
end
context "empty" do
it "string" do
post_with_filter(
:pass_params => { :a => 1, :b => 2, :c => 3 },
:allow => ''
)
result_should_be_equal({ "splat"=>[], "captures"=>["foo"] })
end
it "array" do
post_with_filter(
:pass_params => { :a => 1, :b => 2, :c => 3 },
:allow => []
)
result_should_be_equal({ "splat"=>[], "captures"=>["foo"] })
end
end
end
end
fix rspec
require 'spec_helper'
describe Sinatra::FilteringParameters do
def post_with_filter(args)
mock_app do
register Sinatra::FilteringParameters
if args[:allow].nil?
post('/sample/:name'){ params.to_json }
else
post('/sample/:name', allow: args[:allow]){ params.to_json }
end
end
post '/sample/foo', args[:pass_params]
end
def result_should_be_equal(filterd_params)
last_response.body.should == filterd_params.to_json
end
describe 'permitted parameters nothing' do
it "when success" do
post_with_filter(
:pass_params => { :a => 1, :b => 2, :c => 3 }
)
result_should_be_equal({ "a"=>"1", "b"=>"2", "c"=>"3", "splat"=>[], "captures"=>["foo"], "name"=>"foo" })
end
end
describe 'permitted parameters specified in' do
it 'symbol' do
post_with_filter(
:pass_params => { :a => 1, :b => 2, :c => 3 },
:allow => :name
)
result_should_be_equal({ "splat"=>[], "captures"=>["foo"], "name"=>"foo" })
end
it "string" do
post_with_filter(
:pass_params => { :a => 1, :b => 2, :c => 3 },
:allow => 'name'
)
result_should_be_equal({ "splat"=>[], "captures"=>["foo"], "name"=>"foo" })
end
it "array" do
post_with_filter(
:pass_params => { :a => 1, :b => 2, :c => 3 },
:allow => [:name, :a]
)
result_should_be_equal({ "splat"=>[], "captures"=>["foo"], "name"=>"foo", "a"=>"1" })
end
context "empty" do
it "string" do
post_with_filter(
:pass_params => { :a => 1, :b => 2, :c => 3 },
:allow => ''
)
result_should_be_equal({ "splat"=>[], "captures"=>["foo"] })
end
it "array" do
post_with_filter(
:pass_params => { :a => 1, :b => 2, :c => 3 },
:allow => []
)
result_should_be_equal({ "splat"=>[], "captures"=>["foo"] })
end
end
end
end
|
require_relative "../spec_helper"
require 'logger'
describe "common_logger plugin" do
def cl_app(&block)
app(:common_logger, &block)
@logger = StringIO.new
@app.plugin :common_logger, @logger
end
it 'logs requests to given logger/stream' do
cl_app(&:path_info)
body.must_equal '/'
@logger.rewind
@logger.read.must_match /\A- - - \[\d\d\/[A-Z][a-z]{2}\/\d\d\d\d:\d\d:\d\d:\d\d -\d\d\d\d\] "GET \/ " 200 1 0.\d\d\d\d\n\z/
@logger.rewind
@logger.truncate(0)
body('', 'HTTP_X_FORWARDED_FOR'=>'1.1.1.1', 'REMOTE_USER'=>'je', 'REQUEST_METHOD'=>'POST', 'QUERY_STRING'=>'', "HTTP_VERSION"=>'HTTP/1.1').must_equal ''
@logger.rewind
@logger.read.must_match /\A1\.1\.1\.1 - je \[\d\d\/[A-Z][a-z]{2}\/\d\d\d\d:\d\d:\d\d:\d\d -\d\d\d\d\] "POST HTTP\/1.1" 200 - 0.\d\d\d\d\n\z/
@logger.rewind
@logger.truncate(0)
body('/b', 'REMOTE_ADDR'=>'1.1.1.2', 'QUERY_STRING'=>'foo=bar', "HTTP_VERSION"=>'HTTP/1.0').must_equal '/b'
@logger.rewind
@logger.read.must_match /\A1\.1\.1\.2 - - \[\d\d\/[A-Z][a-z]{2}\/\d\d\d\d:\d\d:\d\d:\d\d -\d\d\d\d\] "GET \/b\?foo=bar HTTP\/1.0" 200 2 0.\d\d\d\d\n\z/
@app.plugin :common_logger, Logger.new(@logger)
@logger.rewind
@logger.truncate(0)
body.must_equal '/'
@logger.rewind
@logger.read.must_match /\A- - - \[\d\d\/[A-Z][a-z]{2}\/\d\d\d\d:\d\d:\d\d:\d\d -\d\d\d\d\] "GET \/ " 200 1 0.\d\d\d\d\n\z/
end
it 'skips timer information if not available' do
cl_app do |r|
@_request_timer = nil
r.path_info
end
body.must_equal '/'
@logger.rewind
@logger.read.must_match /\A- - - \[\d\d\/[A-Z][a-z]{2}\/\d\d\d\d:\d\d:\d\d:\d\d -\d\d\d\d\] "GET \/ " 200 1 -\n\z/
end
it 'skips length information if not available' do
cl_app do |r|
r.halt [500, {}, []]
end
body.must_equal ''
@logger.rewind
@logger.read.must_match /\A- - - \[\d\d\/[A-Z][a-z]{2}\/\d\d\d\d:\d\d:\d\d:\d\d -\d\d\d\d\] "GET \/ " 500 - 0.\d\d\d\d\n\z/
end
end
Fix common_logger specs to work with zero or positive time zone offsets
require_relative "../spec_helper"
require 'logger'
describe "common_logger plugin" do
def cl_app(&block)
app(:common_logger, &block)
@logger = StringIO.new
@app.plugin :common_logger, @logger
end
it 'logs requests to given logger/stream' do
cl_app(&:path_info)
body.must_equal '/'
@logger.rewind
@logger.read.must_match /\A- - - \[\d\d\/[A-Z][a-z]{2}\/\d\d\d\d:\d\d:\d\d:\d\d [-+]\d\d\d\d\] "GET \/ " 200 1 0.\d\d\d\d\n\z/
@logger.rewind
@logger.truncate(0)
body('', 'HTTP_X_FORWARDED_FOR'=>'1.1.1.1', 'REMOTE_USER'=>'je', 'REQUEST_METHOD'=>'POST', 'QUERY_STRING'=>'', "HTTP_VERSION"=>'HTTP/1.1').must_equal ''
@logger.rewind
@logger.read.must_match /\A1\.1\.1\.1 - je \[\d\d\/[A-Z][a-z]{2}\/\d\d\d\d:\d\d:\d\d:\d\d [-+]\d\d\d\d\] "POST HTTP\/1.1" 200 - 0.\d\d\d\d\n\z/
@logger.rewind
@logger.truncate(0)
body('/b', 'REMOTE_ADDR'=>'1.1.1.2', 'QUERY_STRING'=>'foo=bar', "HTTP_VERSION"=>'HTTP/1.0').must_equal '/b'
@logger.rewind
@logger.read.must_match /\A1\.1\.1\.2 - - \[\d\d\/[A-Z][a-z]{2}\/\d\d\d\d:\d\d:\d\d:\d\d [-+]\d\d\d\d\] "GET \/b\?foo=bar HTTP\/1.0" 200 2 0.\d\d\d\d\n\z/
@app.plugin :common_logger, Logger.new(@logger)
@logger.rewind
@logger.truncate(0)
body.must_equal '/'
@logger.rewind
@logger.read.must_match /\A- - - \[\d\d\/[A-Z][a-z]{2}\/\d\d\d\d:\d\d:\d\d:\d\d [-+]\d\d\d\d\] "GET \/ " 200 1 0.\d\d\d\d\n\z/
end
it 'skips timer information if not available' do
cl_app do |r|
@_request_timer = nil
r.path_info
end
body.must_equal '/'
@logger.rewind
@logger.read.must_match /\A- - - \[\d\d\/[A-Z][a-z]{2}\/\d\d\d\d:\d\d:\d\d:\d\d [-+]\d\d\d\d\] "GET \/ " 200 1 -\n\z/
end
it 'skips length information if not available' do
cl_app do |r|
r.halt [500, {}, []]
end
body.must_equal ''
@logger.rewind
@logger.read.must_match /\A- - - \[\d\d\/[A-Z][a-z]{2}\/\d\d\d\d:\d\d:\d\d:\d\d [-+]\d\d\d\d\] "GET \/ " 500 - 0.\d\d\d\d\n\z/
end
end
|
class CodesController < ApplicationController
before_action :set_code, only: [:show, :edit, :update, :destroy]
# GET /codes
# GET /codes.json
def index
if params.has_key?(:contract_id)
@codes = Code.where(contract_id: params[:contract_id] )
else
@codes = Code.all
end
end
# GET /codes/1
# GET /codes/1.json
def show
end
# GET /codes/new
def new
@code = Code.new
end
# GET /codes/1/edit
def edit
end
# POST /codes
# POST /codes.json
def create
@code = Code.new(code_params)
scrape_events(@code)
respond_to do |format|
if @code.save
format.html { redirect_to @code, notice: 'Code was successfully created.' }
format.json { render :show, status: :created, location: @code }
else
format.html { render :new }
format.json { render json: @code.errors, status: :unprocessable_entity }
end
end
end
# PATCH/PUT /codes/1
# PATCH/PUT /codes/1.json
def update
respond_to do |format|
if @code.update(code_params)
scrape_events(@code)
format.html { redirect_to @code, notice: 'Code was successfully updated.' }
format.json { render :show, status: :ok, location: @code }
else
format.html { render :edit }
format.json { render json: @code.errors, status: :unprocessable_entity }
end
end
end
# DELETE /codes/1
# DELETE /codes/1.json
def destroy
@code.destroy
respond_to do |format|
format.html { redirect_to codes_url, notice: 'Code was successfully destroyed.' }
format.json { head :no_content }
end
end
private
# Use callbacks to share common setup or constraints between actions.
def set_code
@code = Code.find(params[:id])
end
# Never trust parameters from the scary internet, only allow the white list through.
def code_params
params.require(:code).permit(:version, :code, :contract_id)
end
def scrape_events(code)
logger.info('Scrape events.')
content = code.code
lines = content.split(/\r\n/)
logger.info(lines)
lines.grep(/^\s*def\s+(sc_event_[a-zA-Z0-9_]+)/){
sc_event = ScEvent.new
sc_event.callback = $1
sc_event.code = code
sc_event.save
logger.info($1)
}
end
end
Code author is saved
class CodesController < ApplicationController
before_action :set_code, only: [:show, :edit, :update, :destroy]
# GET /codes
# GET /codes.json
def index
if params.has_key?(:contract_id)
@codes = Code.where(contract_id: params[:contract_id] )
else
@codes = Code.where(author: session[:user_id])
end
end
# GET /codes/1
# GET /codes/1.json
def show
end
# GET /codes/new
def new
@code = Code.new
end
# GET /codes/1/edit
def edit
end
# POST /codes
# POST /codes.json
def create
@code = Code.new(code_params)
@code.author = session[:user_id]
scrape_events(@code)
respond_to do |format|
if @code.save
format.html { redirect_to @code, notice: 'Code was successfully created.' }
format.json { render :show, status: :created, location: @code }
else
format.html { render :new }
format.json { render json: @code.errors, status: :unprocessable_entity }
end
end
end
# PATCH/PUT /codes/1
# PATCH/PUT /codes/1.json
def update
respond_to do |format|
if @code.update(code_params)
scrape_events(@code)
format.html { redirect_to @code, notice: 'Code was successfully updated.' }
format.json { render :show, status: :ok, location: @code }
else
format.html { render :edit }
format.json { render json: @code.errors, status: :unprocessable_entity }
end
end
end
# DELETE /codes/1
# DELETE /codes/1.json
def destroy
@code.destroy
respond_to do |format|
format.html { redirect_to codes_url, notice: 'Code was successfully destroyed.' }
format.json { head :no_content }
end
end
private
# Use callbacks to share common setup or constraints between actions.
def set_code
@code = Code.find(params[:id])
end
# Never trust parameters from the scary internet, only allow the white list through.
def code_params
params.require(:code).permit(:version, :code, :contract_id)
end
def scrape_events(code)
logger.info('Scrape events.')
content = code.code
lines = content.split(/\r\n/)
logger.info(lines)
lines.grep(/^\s*def\s+(sc_event_[a-zA-Z0-9_]+)/){
sc_event = ScEvent.new
sc_event.callback = $1
sc_event.code = code
sc_event.save
logger.info($1)
}
end
end
|
require 'rails_helper'
describe UserPolicy do # rubocop:disable Metrics/BlockLength
let(:user) { double }
let(:record) { double }
subject { described_class.new(user, record) }
describe '#initialize' do
its(:user) { should eq(user) }
its(:record) { should eq(record) }
end
describe '#index?' do
specify { expect(subject.index?).to eq(false) }
end
describe '#show?' do
context 'true' do
let!(:user) { create(:user) }
let(:record) { user }
specify { expect(subject.show?).to eq(true) }
end
context 'false' do
let!(:user) { create(:user) }
let!(:record) { create(:user) }
specify { expect(subject.show?).to eq(false) }
end
end
describe '#create?' do
specify { expect(subject.create?).to eq(false) }
end
describe '#new?' do
specify { expect(subject.new?).to eq(false) }
end
describe '#update?' do
before { expect(subject).to receive(:show?) }
specify { expect { subject.update? }.not_to raise_error }
end
describe '#edit?' do
before { expect(subject).to receive(:show?) }
specify { expect { subject.edit? }.not_to raise_error }
end
describe '#destroy?' do
before { expect(subject).to receive(:show?) }
specify { expect { subject.destroy? }.not_to raise_error }
end
describe '#scope' do
before do
#
# Pundit.policy_scope!(user, User)
#
expect(Pundit).to receive(:policy_scope!).with(user, User)
end
specify { expect { subject.scope }.not_to raise_error }
end
end
# TODO: recheck this
describe UserPolicy::Scope do
let(:user) { double }
let(:scope) { double }
subject { described_class.new(user, scope) }
describe '#initialize' do
its(:user) { should eq(user) }
its(:scope) { should eq(scope) }
end
describe '#resolve' do
let(:user) { stub_model User, id: 42 }
before do
#
# subject.scope.where(id: user.id) => scope
#
expect(subject).to receive(:scope) do
double.tap do |a|
expect(a).to receive(:where).with(id: user.id).and_return(scope)
end
end
end
specify { expect(subject.resolve).to eq(scope) }
end
end
Use factories
require 'rails_helper'
describe UserPolicy do # rubocop:disable Metrics/BlockLength
let(:user) { double }
let(:record) { double }
subject { described_class.new(user, record) }
describe '#initialize' do
its(:user) { should eq(user) }
its(:record) { should eq(record) }
end
describe '#index?' do
specify { expect(subject.index?).to eq(false) }
end
describe '#show?' do
context 'true' do
let!(:user) { create(:user) }
let(:record) { user }
specify { expect(subject.show?).to eq(true) }
end
context 'false' do
let!(:user) { create(:user) }
let!(:record) { create(:user) }
specify { expect(subject.show?).to eq(false) }
end
end
describe '#create?' do
specify { expect(subject.create?).to eq(false) }
end
describe '#new?' do
specify { expect(subject.new?).to eq(false) }
end
describe '#update?' do
before { expect(subject).to receive(:show?) }
specify { expect { subject.update? }.not_to raise_error }
end
describe '#edit?' do
before { expect(subject).to receive(:show?) }
specify { expect { subject.edit? }.not_to raise_error }
end
describe '#destroy?' do
before { expect(subject).to receive(:show?) }
specify { expect { subject.destroy? }.not_to raise_error }
end
describe '#scope' do
before do
#
# Pundit.policy_scope!(user, User)
#
expect(Pundit).to receive(:policy_scope!).with(user, User)
end
specify { expect { subject.scope }.not_to raise_error }
end
end
describe UserPolicy::Scope do
let(:user) { double }
let(:scope) { User }
subject { described_class.new(user, scope) }
describe '#initialize' do
its(:user) { should eq(user) }
its(:scope) { should eq(scope) }
end
describe '#resolve' do
let!(:user) { create(:user) }
specify { expect(subject.resolve).to eq([user]) }
end
end
|
require File.dirname(__FILE__) + '/../spec_helper'
include PoolParty::Cloud
include PoolParty::Resources
class TestServiceClass
plugin :test_service do
def enable
has_file(:name => "/etc/poolparty/lobos")
end
end
end
describe "Cloud" do
before(:each) do
setup
reset_resources!
end
describe "wrapped" do
before(:each) do
@obj = Object.new
@pool = pool :just_pool do; end
end
it "should respond to the pool method outside the block" do
@obj.respond_to?(:cloud).should == true
end
describe "global" do
before(:each) do
@cloud1 = cloud :pop do;end
end
it "should store the cloud in the global list of clouds" do
@obj.clouds.has_key?(:pop).should == true
end
it "should store the cloud" do
@obj.cloud(:pop).should == @cloud1
end
it "should have set the using base on intantiation to ec2" do
@cloud1.using_remoter?.should_not == nil
end
it "should say the remoter_base is ec2 (by default)" do
@cloud1.remote_base.should == PoolParty::Ec2
end
end
it "should return the cloud if the cloud key is already in the clouds list" do
@cld = cloud :pop do;end
@pool.cloud(:pop).should == @cld
end
describe "options" do
before(:each) do
reset!
setup
@p = pool :options do
minimum_instances 100
access_key "access_key"
cloud :apple do
access_key "cloud_access_key"
end
end
@c = @p.cloud(:apple)
end
it "should be able to grab the cloud from the pool" do
@c.should == @p.cloud(:apple)
end
it "should take the options set on the pool" do
@p.minimum_instances.should == 100
end
it "should take the access_key option set from the cloud" do
@c.access_key.should == "cloud_access_key"
end
end
describe "block" do
before(:each) do
reset!
@cloud = Cloud.new(:test, @pool) do
# Inside cloud block
keypair "fake_keypair"
end
@cloud.stub!(:plugin_store).and_return []
end
it "should be able to pull the pool from the cloud" do
@cloud.parent == @pool
end
it "should have the outer pool listed as the parent of the inner cloud" do
@pool = pool :knick_knack do
cloud :paddy_wack do
end
end
cloud(:paddy_wack).parent.should == pool(:knick_knack)
end
it "should have services in an array" do
@cloud.services.class.should == Array
end
it "should have no services in the array when there are no services defined" do
@cloud.services.size.should == 0
end
it "should respond to a configure method" do
@cloud.respond_to?(:configure).should == true
end
describe "configuration" do
before(:each) do
reset!
@cloud2 = Cloud.new(:test, @pool) do
minimum_instances 1
maximum_instances 2
end
end
it "should be able to se the minimum_instances without the var" do
@cloud2.minimum_instances.should == 1
end
it "should be able to se the maximum_instances with the =" do
@cloud2.maximum_instances.should == 2
end
end
describe "options" do
it "should set the minimum_instances to 2" do
@cloud.minimum_instances.should == 2
end
it "should set the maximum_instances to 5" do
@cloud.maximum_instances.should == 5
end
it "should be able to set the minimum instances" do
@cloud.minimum_instances 3
@cloud.minimum_instances.should == 3
end
it "should be able to take a hash from configure and convert it to the options" do
@cloud.configure( {:minimum_instances => 1, :maximum_instances => 10, :keypair => "friend"} )
@cloud.keypair.should == "friend"
end
describe "minimum_instances/maximum_instances as a range" do
before(:each) do
reset!
@pool = pool :just_pool do
cloud :app do
instances 8..15
end
end
@cloud = @pool.cloud(:app)
end
it "should set the minimum based on the range" do
@cloud.minimum_instances.should == 8
end
it "should set the maximum based on the range set by instances" do
@cloud.maximum_instances.should == 15
end
end
describe "keypair" do
before(:each) do
reset!
end
it "should be able to define a keypair in the cloud" do
@c = cloud :app do
keypair "hotdog"
end
@c.keypair.should == "hotdog"
end
it "should take the pool parent's keypair if it's defined on the pool" do
pool :pool do
keypair "ney"
cloud :app do
end
cloud :group do
end
end
pool(:pool).cloud(:app).keypair.should == "ney"
pool(:pool).cloud(:group).keypair.should == "ney"
end
it "should generate a keypair based on the cloud name if none is defined" do
pool :pool do
cloud :app do
end
cloud :nickes do
end
end
pool(:pool).cloud(:app).keypair.should == "pool_app"
pool(:pool).cloud(:nickes).keypair.should == "pool_nickes"
end
end
describe "Manifest" do
before(:each) do
reset!
stub_list_from_remote_for(@cloud)
@cloud.instance_eval do
has_file(:name => "/etc/httpd/http.conf") do
content <<-EOE
hello my lady
EOE
end
has_gempackage(:name => "poolparty")
has_package(:name => "dummy")
end
end
it "should it should have the method build_manifest" do
@cloud.respond_to?(:build_manifest).should == true
end
it "should make a new 'haproxy' class" do
@cloud.stub!(:realize_plugins!).and_return true
PoolPartyHaproxyClass.should_receive(:new).once
@cloud.add_poolparty_base_requirements
end
it "should have 3 resources" do
@cloud.add_poolparty_base_requirements
@cloud.number_of_resources.should > 2
end
it "should receive add_poolparty_base_requirements before building the manifest" do
@cloud.should_receive(:add_poolparty_base_requirements).once
@cloud.build_manifest
end
describe "add_poolparty_base_requirements" do
before(:each) do
reset!
@cloud.instance_eval do
@heartbeat = nil
end
@hb = "heartbeat".class_constant.new(@cloud)
@cloud.stub!(:realize_plugins!).and_return []
end
it "should call initialize on heartbeat (in add_poolparty_base_requirements)" do
@cloud.stub!(:realize_plugins!).and_return []
@hb.class.should_receive(:new).and_return true
@cloud.add_poolparty_base_requirements
end
it "should call heartbeat on the cloud" do
@cloud.should_receive(:heartbeat).and_return true
@cloud.add_poolparty_base_requirements
end
it "should call Hearbeat.new" do
"heartbeat".class_constant.should_receive(:new).and_return @hb
@cloud.add_poolparty_base_requirements
end
it "should call enable on the plugin call" do
@hb = "heartbeat".class_constant
"heartbeat".class_constant.stub!(:new).and_return @hb
@cloud.add_poolparty_base_requirements
@cloud.heartbeat.should == @hb
end
describe "after adding" do
before(:each) do
stub_list_from_remote_for(@cloud)
@cloud.add_poolparty_base_requirements
end
it "should add resources onto the heartbeat class inside the cloud" do
@cloud.services.size.should > 0
end
it "should store the class heartbeat" do
@cloud.services.map {|a| a.class}.include?("heartbeat".class_constant).should == true
end
it "should have an array of resources on the heartbeat" do
@cloud.services.first.resources.class.should == Hash
end
describe "resources" do
before(:each) do
@cloud8 = Cloud.new(:tester, @pool) do
test_service
end
@service = @cloud8.services.first
@files = @service.resource(:file)
end
it "should have a file resource" do
@files.first.nil?.should == false
end
it "should have an array of lines" do
@files.class.should == Array
end
it "should not be empty" do
@files.should_not be_empty
end
end
end
end
describe "building" do
before(:each) do
str = "master 192.168.0.1
node1 192.168.0.2"
@sample_instances_list = [{:ip => "192.168.0.1", :name => "master"}, {:ip => "192.168.0.2", :name => "node1"}]
@ris = @sample_instances_list.map {|h| PoolParty::Remote::RemoteInstance.new(h, @cloud) }
stub_remoter_for(@cloud)
@manifest = @cloud.build_manifest
end
it "should return a string when calling build_manifest" do
@manifest.class.should == String
end
it "should have a comment of # file in the manifest as described by the has_file" do
@manifest.should =~ /file \{/
end
it "should have the comment of a package in the manifest" do
@manifest.should =~ /package \{/
end
it "should have the comment for haproxy in the manifest" do
@manifest.should =~ /haproxy/
end
it "should include the poolparty gem" do
@manifest.should =~ /package \{/
end
it "should include custom functions" do
@manifest.should =~ /define line\(\$file/
File.open("test_manifest.pp", "w+") {|f| f << @manifest}
end
end
describe "building with an existing manifest" do
before(:each) do
@file = "/etc/puppet/manifests/nodes/nodes.pp"
@file.stub!(:read).and_return "nodes generate"
::FileTest.stub!(:file?).with("/etc/puppet/manifests/classes/poolparty.pp").and_return true
@cloud.stub!(:open).with("/etc/puppet/manifests/classes/poolparty.pp").and_return @file
end
it "should not call resources_string_from_resources if the file /etc/puppet/manifests/nodes/nodes.pp exists" do
@cloud.should_not_receive(:add_poolparty_base_requirements)
@cloud.build_manifest
end
it "should build from the existing file" do
@cloud.build_manifest.should == "nodes generate"
end
end
end
end
describe "instances" do
before(:each) do
@cloud3 = cloud :pop do;keypair "fake_keypair";end
stub_list_from_remote_for(@cloud3)
end
it "should respond to the method master" do
@cloud3.respond_to?(:master).should == true
end
it "should return a master that is not nil" do
@cloud3.master.should_not be_nil
end
end
end
end
end
Removed creation of the test manifest
require File.dirname(__FILE__) + '/../spec_helper'
include PoolParty::Cloud
include PoolParty::Resources
class TestServiceClass
plugin :test_service do
def enable
has_file(:name => "/etc/poolparty/lobos")
end
end
end
describe "Cloud" do
before(:each) do
setup
reset_resources!
end
describe "wrapped" do
before(:each) do
@obj = Object.new
@pool = pool :just_pool do; end
end
it "should respond to the pool method outside the block" do
@obj.respond_to?(:cloud).should == true
end
describe "global" do
before(:each) do
@cloud1 = cloud :pop do;end
end
it "should store the cloud in the global list of clouds" do
@obj.clouds.has_key?(:pop).should == true
end
it "should store the cloud" do
@obj.cloud(:pop).should == @cloud1
end
it "should have set the using base on intantiation to ec2" do
@cloud1.using_remoter?.should_not == nil
end
it "should say the remoter_base is ec2 (by default)" do
@cloud1.remote_base.should == PoolParty::Ec2
end
end
it "should return the cloud if the cloud key is already in the clouds list" do
@cld = cloud :pop do;end
@pool.cloud(:pop).should == @cld
end
describe "options" do
before(:each) do
reset!
setup
@p = pool :options do
minimum_instances 100
access_key "access_key"
cloud :apple do
access_key "cloud_access_key"
end
end
@c = @p.cloud(:apple)
end
it "should be able to grab the cloud from the pool" do
@c.should == @p.cloud(:apple)
end
it "should take the options set on the pool" do
@p.minimum_instances.should == 100
end
it "should take the access_key option set from the cloud" do
@c.access_key.should == "cloud_access_key"
end
end
describe "block" do
before(:each) do
reset!
@cloud = Cloud.new(:test, @pool) do
# Inside cloud block
keypair "fake_keypair"
end
@cloud.stub!(:plugin_store).and_return []
end
it "should be able to pull the pool from the cloud" do
@cloud.parent == @pool
end
it "should have the outer pool listed as the parent of the inner cloud" do
@pool = pool :knick_knack do
cloud :paddy_wack do
end
end
cloud(:paddy_wack).parent.should == pool(:knick_knack)
end
it "should have services in an array" do
@cloud.services.class.should == Array
end
it "should have no services in the array when there are no services defined" do
@cloud.services.size.should == 0
end
it "should respond to a configure method" do
@cloud.respond_to?(:configure).should == true
end
describe "configuration" do
before(:each) do
reset!
@cloud2 = Cloud.new(:test, @pool) do
minimum_instances 1
maximum_instances 2
end
end
it "should be able to se the minimum_instances without the var" do
@cloud2.minimum_instances.should == 1
end
it "should be able to se the maximum_instances with the =" do
@cloud2.maximum_instances.should == 2
end
end
describe "options" do
it "should set the minimum_instances to 2" do
@cloud.minimum_instances.should == 2
end
it "should set the maximum_instances to 5" do
@cloud.maximum_instances.should == 5
end
it "should be able to set the minimum instances" do
@cloud.minimum_instances 3
@cloud.minimum_instances.should == 3
end
it "should be able to take a hash from configure and convert it to the options" do
@cloud.configure( {:minimum_instances => 1, :maximum_instances => 10, :keypair => "friend"} )
@cloud.keypair.should == "friend"
end
describe "minimum_instances/maximum_instances as a range" do
before(:each) do
reset!
@pool = pool :just_pool do
cloud :app do
instances 8..15
end
end
@cloud = @pool.cloud(:app)
end
it "should set the minimum based on the range" do
@cloud.minimum_instances.should == 8
end
it "should set the maximum based on the range set by instances" do
@cloud.maximum_instances.should == 15
end
end
describe "keypair" do
before(:each) do
reset!
end
it "should be able to define a keypair in the cloud" do
@c = cloud :app do
keypair "hotdog"
end
@c.keypair.should == "hotdog"
end
it "should take the pool parent's keypair if it's defined on the pool" do
pool :pool do
keypair "ney"
cloud :app do
end
cloud :group do
end
end
pool(:pool).cloud(:app).keypair.should == "ney"
pool(:pool).cloud(:group).keypair.should == "ney"
end
it "should generate a keypair based on the cloud name if none is defined" do
pool :pool do
cloud :app do
end
cloud :nickes do
end
end
pool(:pool).cloud(:app).keypair.should == "pool_app"
pool(:pool).cloud(:nickes).keypair.should == "pool_nickes"
end
end
describe "Manifest" do
before(:each) do
reset!
stub_list_from_remote_for(@cloud)
@cloud.instance_eval do
has_file(:name => "/etc/httpd/http.conf") do
content <<-EOE
hello my lady
EOE
end
has_gempackage(:name => "poolparty")
has_package(:name => "dummy")
end
end
it "should it should have the method build_manifest" do
@cloud.respond_to?(:build_manifest).should == true
end
it "should make a new 'haproxy' class" do
@cloud.stub!(:realize_plugins!).and_return true
PoolPartyHaproxyClass.should_receive(:new).once
@cloud.add_poolparty_base_requirements
end
it "should have 3 resources" do
@cloud.add_poolparty_base_requirements
@cloud.number_of_resources.should > 2
end
it "should receive add_poolparty_base_requirements before building the manifest" do
@cloud.should_receive(:add_poolparty_base_requirements).once
@cloud.build_manifest
end
describe "add_poolparty_base_requirements" do
before(:each) do
reset!
@cloud.instance_eval do
@heartbeat = nil
end
@hb = "heartbeat".class_constant.new(@cloud)
@cloud.stub!(:realize_plugins!).and_return []
end
it "should call initialize on heartbeat (in add_poolparty_base_requirements)" do
@cloud.stub!(:realize_plugins!).and_return []
@hb.class.should_receive(:new).and_return true
@cloud.add_poolparty_base_requirements
end
it "should call heartbeat on the cloud" do
@cloud.should_receive(:heartbeat).and_return true
@cloud.add_poolparty_base_requirements
end
it "should call Hearbeat.new" do
"heartbeat".class_constant.should_receive(:new).and_return @hb
@cloud.add_poolparty_base_requirements
end
it "should call enable on the plugin call" do
@hb = "heartbeat".class_constant
"heartbeat".class_constant.stub!(:new).and_return @hb
@cloud.add_poolparty_base_requirements
@cloud.heartbeat.should == @hb
end
describe "after adding" do
before(:each) do
stub_list_from_remote_for(@cloud)
@cloud.add_poolparty_base_requirements
end
it "should add resources onto the heartbeat class inside the cloud" do
@cloud.services.size.should > 0
end
it "should store the class heartbeat" do
@cloud.services.map {|a| a.class}.include?("heartbeat".class_constant).should == true
end
it "should have an array of resources on the heartbeat" do
@cloud.services.first.resources.class.should == Hash
end
describe "resources" do
before(:each) do
@cloud8 = Cloud.new(:tester, @pool) do
test_service
end
@service = @cloud8.services.first
@files = @service.resource(:file)
end
it "should have a file resource" do
@files.first.nil?.should == false
end
it "should have an array of lines" do
@files.class.should == Array
end
it "should not be empty" do
@files.should_not be_empty
end
end
end
end
describe "building" do
before(:each) do
str = "master 192.168.0.1
node1 192.168.0.2"
@sample_instances_list = [{:ip => "192.168.0.1", :name => "master"}, {:ip => "192.168.0.2", :name => "node1"}]
@ris = @sample_instances_list.map {|h| PoolParty::Remote::RemoteInstance.new(h, @cloud) }
stub_remoter_for(@cloud)
@manifest = @cloud.build_manifest
end
it "should return a string when calling build_manifest" do
@manifest.class.should == String
end
it "should have a comment of # file in the manifest as described by the has_file" do
@manifest.should =~ /file \{/
end
it "should have the comment of a package in the manifest" do
@manifest.should =~ /package \{/
end
it "should have the comment for haproxy in the manifest" do
@manifest.should =~ /haproxy/
end
it "should include the poolparty gem" do
@manifest.should =~ /package \{/
end
it "should include custom functions" do
@manifest.should =~ /define line\(\$file/
end
end
describe "building with an existing manifest" do
before(:each) do
@file = "/etc/puppet/manifests/nodes/nodes.pp"
@file.stub!(:read).and_return "nodes generate"
::FileTest.stub!(:file?).with("/etc/puppet/manifests/classes/poolparty.pp").and_return true
@cloud.stub!(:open).with("/etc/puppet/manifests/classes/poolparty.pp").and_return @file
end
it "should not call resources_string_from_resources if the file /etc/puppet/manifests/nodes/nodes.pp exists" do
@cloud.should_not_receive(:add_poolparty_base_requirements)
@cloud.build_manifest
end
it "should build from the existing file" do
@cloud.build_manifest.should == "nodes generate"
end
end
end
end
describe "instances" do
before(:each) do
@cloud3 = cloud :pop do;keypair "fake_keypair";end
stub_list_from_remote_for(@cloud3)
end
it "should respond to the method master" do
@cloud3.respond_to?(:master).should == true
end
it "should return a master that is not nil" do
@cloud3.master.should_not be_nil
end
end
end
end
end |
require "spec_helper"
describe ::Protobuf::Nats::Client do
class ExampleServiceClass; end
let(:service) { ExampleServiceClass }
let(:method) { :created }
let(:options) {
{
:service => service,
:method => method
}
}
subject { described_class.new(options) }
describe "#ack_timeout" do
it "can be set via the PB_NATS_CLIENT_ACK_TIMEOUT environment variable" do
::ENV["PB_NATS_CLIENT_ACK_TIMEOUT"] = "1000"
expect(subject.ack_timeout).to eq(1_000)
::ENV.delete("PB_NATS_CLIENT_ACK_TIMEOUT")
end
it "has a default value" do
expect(subject.ack_timeout).to eq(5)
end
end
describe "#reconnect_delay" do
it "can be set via the PB_NATS_CLIENT_RECONNECT_DELAY environment variable" do
::ENV["PB_NATS_CLIENT_RECONNECT_DELAY"] = "1000"
expect(subject.reconnect_delay).to eq(1_000)
::ENV.delete("PB_NATS_CLIENT_RECONNECT_DELAY")
end
it "defaults to the ack_timeout" do
expect(subject.reconnect_delay).to eq(subject.ack_timeout)
end
end
describe "#response_timeout" do
it "can be set via the PB_NATS_CLIENT_RESPONSE_TIMEOUT environment variable" do
::ENV["PB_NATS_CLIENT_RESPONSE_TIMEOUT"] = "1000"
expect(subject.response_timeout).to eq(1_000)
::ENV.delete("PB_NATS_CLIENT_RESPONSE_TIMEOUT")
end
it "has a default value" do
expect(subject.response_timeout).to eq(60)
end
end
describe "#cached_subscription_key" do
it "caches the instance of a subscription key" do
::Protobuf::Nats::Client.instance_variable_set(:@subscription_key_cache, nil)
id = subject.cached_subscription_key.__id__
expect(subject.cached_subscription_key.__id__).to eq(id)
end
end
describe "#nats_request_with_two_responses" do
let(:client) { ::FakeNatsClient.new(:inbox => inbox) }
let(:inbox) { "INBOX_123" }
let(:msg_subject) { "rpc.yolo.brolo" }
let(:ack) { ::Protobuf::Nats::Messages::ACK }
let(:nack) { ::Protobuf::Nats::Messages::NACK }
let(:response) { "final count down" }
before do
allow(::Protobuf::Nats).to receive(:client_nats_connection).and_return(client)
end
it "processes a request and return the final response" do
client.schedule_messages([::FakeNatsClient::Message.new(inbox, ack, 0.05),
::FakeNatsClient::Message.new(inbox, response, 0.1)])
server_response = subject.nats_request_with_two_responses(msg_subject, "request data", {})
expect(server_response).to eq(response)
end
it "raises an error when the ack is not signaled" do
client.schedule_messages([::FakeNatsClient::Message.new(inbox, response, 0.05)])
options = {:ack_timeout => 0.1, :timeout => 0.2}
expect { subject.nats_request_with_two_responses(msg_subject, "request data", options) }.to raise_error(::NATS::IO::Timeout)
end
it "can send messages out of order and still complete" do
client.schedule_messages([::FakeNatsClient::Message.new(inbox, response, 0.05),
::FakeNatsClient::Message.new(inbox, ack, 0.1)])
server_response = subject.nats_request_with_two_responses(msg_subject, "request data", {})
expect(server_response).to eq(response)
end
it "raises an error when the ack is signaled but pb response is not" do
client.schedule_messages([::FakeNatsClient::Message.new(inbox, ack, 0.05)])
options = {:timeout => 0.1}
expect { subject.nats_request_with_two_responses(msg_subject, "request data", options) }.to raise_error(::NATS::IO::Timeout)
end
it "raises an error when the server responds with nack" do
client.schedule_messages([::FakeNatsClient::Message.new(inbox, nack, 0.05)])
options = {:timeout => 0.1}
expect { subject.nats_request_with_two_responses(msg_subject, "request data", options) }.to raise_error(Protobuf::Nats::Client::NackError)
end
end
describe "#send_request" do
it "retries 3 times when and raises a NATS timeout" do
expect(subject).to receive(:setup_connection).exactly(3).times
expect(subject).to receive(:nats_request_with_two_responses).and_raise(::NATS::IO::Timeout).exactly(3).times
expect { subject.send_request }.to raise_error(::NATS::IO::Timeout)
end
it "waits the reconnect_delay duration when the nats connection is reconnecting" do
error = ::Protobuf::Nats::Errors::IOException.new
client = ::FakeNatsClient.new
allow(::Protobuf::Nats).to receive(:client_nats_connection).and_return(client)
allow(client).to receive(:publish).and_raise(error)
allow(subject).to receive(:setup_connection)
expect(subject).to receive(:reconnect_delay).and_return(0.01).exactly(3).times
expect { subject.send_request }.to raise_error(error)
end
end
end
Test nack retries
require "spec_helper"
describe ::Protobuf::Nats::Client do
class ExampleServiceClass; end
let(:service) { ExampleServiceClass }
let(:method) { :created }
let(:options) {
{
:service => service,
:method => method
}
}
subject { described_class.new(options) }
describe "#ack_timeout" do
it "can be set via the PB_NATS_CLIENT_ACK_TIMEOUT environment variable" do
::ENV["PB_NATS_CLIENT_ACK_TIMEOUT"] = "1000"
expect(subject.ack_timeout).to eq(1_000)
::ENV.delete("PB_NATS_CLIENT_ACK_TIMEOUT")
end
it "has a default value" do
expect(subject.ack_timeout).to eq(5)
end
end
describe "#reconnect_delay" do
it "can be set via the PB_NATS_CLIENT_RECONNECT_DELAY environment variable" do
::ENV["PB_NATS_CLIENT_RECONNECT_DELAY"] = "1000"
expect(subject.reconnect_delay).to eq(1_000)
::ENV.delete("PB_NATS_CLIENT_RECONNECT_DELAY")
end
it "defaults to the ack_timeout" do
expect(subject.reconnect_delay).to eq(subject.ack_timeout)
end
end
describe "#response_timeout" do
it "can be set via the PB_NATS_CLIENT_RESPONSE_TIMEOUT environment variable" do
::ENV["PB_NATS_CLIENT_RESPONSE_TIMEOUT"] = "1000"
expect(subject.response_timeout).to eq(1_000)
::ENV.delete("PB_NATS_CLIENT_RESPONSE_TIMEOUT")
end
it "has a default value" do
expect(subject.response_timeout).to eq(60)
end
end
describe "#cached_subscription_key" do
it "caches the instance of a subscription key" do
::Protobuf::Nats::Client.instance_variable_set(:@subscription_key_cache, nil)
id = subject.cached_subscription_key.__id__
expect(subject.cached_subscription_key.__id__).to eq(id)
end
end
describe "#nats_request_with_two_responses" do
let(:client) { ::FakeNatsClient.new(:inbox => inbox) }
let(:inbox) { "INBOX_123" }
let(:msg_subject) { "rpc.yolo.brolo" }
let(:ack) { ::Protobuf::Nats::Messages::ACK }
let(:nack) { ::Protobuf::Nats::Messages::NACK }
let(:response) { "final count down" }
before do
allow(::Protobuf::Nats).to receive(:client_nats_connection).and_return(client)
end
it "processes a request and return the final response" do
client.schedule_messages([::FakeNatsClient::Message.new(inbox, ack, 0.05),
::FakeNatsClient::Message.new(inbox, response, 0.1)])
server_response = subject.nats_request_with_two_responses(msg_subject, "request data", {})
expect(server_response).to eq(response)
end
it "raises an error when the ack is not signaled" do
client.schedule_messages([::FakeNatsClient::Message.new(inbox, response, 0.05)])
options = {:ack_timeout => 0.1, :timeout => 0.2}
expect { subject.nats_request_with_two_responses(msg_subject, "request data", options) }.to raise_error(::NATS::IO::Timeout)
end
it "can send messages out of order and still complete" do
client.schedule_messages([::FakeNatsClient::Message.new(inbox, response, 0.05),
::FakeNatsClient::Message.new(inbox, ack, 0.1)])
server_response = subject.nats_request_with_two_responses(msg_subject, "request data", {})
expect(server_response).to eq(response)
end
it "raises an error when the ack is signaled but pb response is not" do
client.schedule_messages([::FakeNatsClient::Message.new(inbox, ack, 0.05)])
options = {:timeout => 0.1}
expect { subject.nats_request_with_two_responses(msg_subject, "request data", options) }.to raise_error(::NATS::IO::Timeout)
end
it "raises an error when the server responds with nack" do
client.schedule_messages([::FakeNatsClient::Message.new(inbox, nack, 0.05)])
options = {:timeout => 0.1}
expect { subject.nats_request_with_two_responses(msg_subject, "request data", options) }.to raise_error(::Protobuf::Nats::Client::NackError)
end
end
describe "#send_request" do
it "retries 3 times when and raises a NATS timeout" do
expect(subject).to receive(:setup_connection).exactly(3).times
expect(subject).to receive(:nats_request_with_two_responses).and_raise(::NATS::IO::Timeout).exactly(3).times
expect { subject.send_request }.to raise_error(::NATS::IO::Timeout)
end
it "tries 6 times when the server responds with NACK" do
inbox = "INBOX-123"
client = ::FakeNatsClient.new(:inbox => inbox)
def client.subscribe(subject, args, &block)
block.call(::Protobuf::Nats::Client::NackError)
end
allow(::Protobuf::Nats).to receive(:client_nats_connection).and_return(client)
expect(subject).to receive(:setup_connection).exactly(6).times
expect(subject).to receive(:nats_request_with_two_responses).and_raise(::Protobuf::Nats::Client::NackError).exactly(6).times
t_start = Time.now.to_f
expect { subject.send_request }.to raise_error(::Protobuf::Nats::Client::NackError)
t_end = Time.now.to_f
expect(t_end - t_start).to be_within(0.1).of(::Protobuf::Nats::Client::NACK_BACKOFF_INTERVALS.sum/1000.0)
end
it "waits the reconnect_delay duration when the nats connection is reconnecting" do
error = ::Protobuf::Nats::Errors::IOException.new
client = ::FakeNatsClient.new
allow(::Protobuf::Nats).to receive(:client_nats_connection).and_return(client)
allow(client).to receive(:publish).and_raise(error)
allow(subject).to receive(:setup_connection)
expect(subject).to receive(:reconnect_delay).and_return(0.01).exactly(3).times
expect { subject.send_request }.to raise_error(error)
end
end
end
|
require 'spec_helper'
describe API::API do
include ApiHelpers
include StubGitlabCalls
before {
stub_gitlab_calls
}
describe "GET /runners" do
let(:gitlab_url) { GitlabCi.config.gitlab_server_urls.first }
let(:auth_opts) {
{
:email => "test@test.com",
:password => "123456"
}
}
let(:private_token) { Network.new.authenticate(gitlab_url, auth_opts)["private_token"] }
let(:options) {
{
:private_token => private_token,
:url => gitlab_url
}
}
before do
5.times { FactoryGirl.create(:runner) }
end
it "should retrieve a list of all runners" do
get api("/runners"), options
response.status.should == 200
json_response.count.should == 5
json_response.last.should have_key("id")
json_response.last.should have_key("token")
end
end
describe "POST /runners/register" do
it "should create a runner if token provided" do
post api("/runners/register"), token: GitlabCi::REGISTRATION_TOKEN, public_key: 'sha-rsa ....'
response.status.should == 201
end
it "should return 403 error if no token" do
post api("/runners/register")
response.status.should == 403
end
end
end
Add specs for runner setup via API
require 'spec_helper'
describe API::API do
include ApiHelpers
include StubGitlabCalls
before {
stub_gitlab_calls
}
describe "GET /runners" do
let(:gitlab_url) { GitlabCi.config.gitlab_server_urls.first }
let(:auth_opts) {
{
:email => "test@test.com",
:password => "123456"
}
}
let(:private_token) { Network.new.authenticate(gitlab_url, auth_opts)["private_token"] }
let(:options) {
{
:private_token => private_token,
:url => gitlab_url
}
}
before do
5.times { FactoryGirl.create(:runner) }
end
it "should retrieve a list of all runners" do
get api("/runners"), options
response.status.should == 200
json_response.count.should == 5
json_response.last.should have_key("id")
json_response.last.should have_key("token")
end
end
describe "POST /runners/register" do
describe "should create a runner if token provided" do
before { post api("/runners/register"), token: GitlabCi::REGISTRATION_TOKEN }
it { response.status.should == 201 }
end
describe "should create a runner if project token provided" do
let(:project) { FactoryGirl.create(:project) }
before { post api("/runners/register"), token: project.token }
it { response.status.should == 201 }
it { project.runners.size.should == 1 }
end
it "should return 403 error if token is invalid" do
post api("/runners/register"), token: 'invalid'
response.status.should == 403
end
it "should return 400 error if no token" do
post api("/runners/register")
response.status.should == 400
end
end
end
|
require 'spec_helper'
describe 'api should not require csrf protection', type: :request do
before(:each) do
ActionController::Base.allow_forgery_protection = true
end
after(:each) do
ActionController::Base.allow_forgery_protection = false
end
it 'should return 200 when making a request without csrf' do
user = create(:user)
token = create(:access_token, resource_owner_id: user.id)
allow(token).to receive(:accessible?).and_return(true)
allow(token).to receive(:scopes)
.and_return(Doorkeeper::OAuth::Scopes.from_array(%w(project public)))
allow_any_instance_of(Api::V1::ProjectsController).to receive(:doorkeeper_token)
.and_return(token)
post "/api/projects", { projects: { name: "new_hotness",
display_name: "New Hotness!",
description: "Your shits busted",
primary_language: 'en' } }.to_json,
{ "HTTP_ACCEPT" => "application/vnd.api+json; version=1",
"CONTENT_TYPE" => "application/json; charset=utf-8" }
expect(response.status).to eq(201)
end
end
fix csrf spec
require 'spec_helper'
describe 'api should not require csrf protection', type: :request do
before(:each) do
ActionController::Base.allow_forgery_protection = true
end
after(:each) do
ActionController::Base.allow_forgery_protection = false
end
it 'should return 200 when making a request without csrf' do
user = create(:user)
token = create(:access_token, resource_owner_id: user.id)
allow(token).to receive(:accessible?).and_return(true)
allow(token).to receive(:scopes)
.and_return(Doorkeeper::OAuth::Scopes.from_array(%w(project public)))
allow_any_instance_of(Api::V1::ProjectsController).to receive(:doorkeeper_token)
.and_return(token)
post "/api/projects", { projects: { name: "new_hotness",
display_name: "New Hotness!",
description: "Your shits busted",
primary_language: 'en',
private: false } }.to_json,
{ "HTTP_ACCEPT" => "application/vnd.api+json; version=1",
"CONTENT_TYPE" => "application/json; charset=utf-8" }
expect(response.status).to eq(201)
end
end
|
require 'roqua/support/errors'
describe 'Error reporting' do
let(:exception) do
Exception.new('exception_message').tap do |exception|
exception.set_backtrace ['back', 'trace', 'lines']
end
end
let(:logstream) { StringIO.new }
let(:logger) { Logger.new(logstream) }
let(:logwrapper) { Roqua::LogWrapper.new(logger) }
before do
Roqua.logger = logwrapper
end
it 'sends notifications to the eventlog' do
Roqua.logger.should_receive(:error).with('roqua.exception',
class_name: 'Exception',
message: 'exception_message',
backtrace: ['back', 'trace', 'lines'],
parameters: {})
Roqua::Support::Errors.report exception
end
it 'sends notifications to airbrake' do
stub_const("Airbrake", double("Airbrake", is_ignored_exception?: false))
Airbrake.should_receive(:notify_or_ignore).with(exception, parameters: {})
Roqua::Support::Errors.report exception
end
context 'when Appsignal is loaded' do
let(:agent) { double("agent") }
let(:transaction) { double("transaction") }
it 'sends notifications to appsignal' do
stub_const("Appsignal", Module.new)
Appsignal.stub(is_ignored_exception?: false, agent: agent)
stub_const("Appsignal::Transaction", double("Transaction", create: transaction))
transaction.should_receive(:set_tags).with({})
transaction.should_receive(:add_exception).with(exception)
transaction.should_receive(:complete!)
agent.should_receive(:send_queue)
Roqua::Support::Errors.report exception
end
end
it 'supports default extra params' do
Roqua::Support::Errors.stub(extra_parameters: {organization: 'some_org'})
Roqua.logger.should_receive(:error).with('roqua.exception',
class_name: 'Exception',
message: 'exception_message',
backtrace: ['back', 'trace', 'lines'],
parameters: {organization: 'some_org'})
Roqua::Support::Errors.report exception
end
end
fix spec
require 'roqua/support/errors'
describe 'Error reporting' do
let(:exception) do
Exception.new('exception_message').tap do |exception|
exception.set_backtrace ['back', 'trace', 'lines']
end
end
let(:logstream) { StringIO.new }
let(:logger) { Logger.new(logstream) }
let(:logwrapper) { Roqua::LogWrapper.new(logger) }
before do
Roqua.logger = logwrapper
end
it 'sends notifications to the eventlog' do
Roqua.logger.should_receive(:error).with('roqua.exception',
class_name: 'Exception',
message: 'exception_message',
backtrace: ['back', 'trace', 'lines'],
parameters: {})
Roqua::Support::Errors.report exception
end
it 'sends notifications to airbrake' do
stub_const("Airbrake", double("Airbrake", is_ignored_exception?: false))
Airbrake.should_receive(:notify_or_ignore).with(exception, parameters: {})
Roqua::Support::Errors.report exception
end
context 'when Appsignal is loaded' do
let(:agent) { double("agent") }
let(:transaction) { double("transaction") }
it 'sends notifications to appsignal' do
stub_const("Appsignal", Module.new)
Appsignal.stub(active?: true)
Appsignal.stub(is_ignored_exception?: false, agent: agent)
stub_const("Appsignal::Transaction", double("Transaction", create: transaction))
transaction.should_receive(:set_tags).with({})
transaction.should_receive(:add_exception).with(exception)
transaction.should_receive(:complete!)
agent.should_receive(:send_queue)
Roqua::Support::Errors.report exception
end
end
it 'supports default extra params' do
Roqua::Support::Errors.stub(extra_parameters: {organization: 'some_org'})
Roqua.logger.should_receive(:error).with('roqua.exception',
class_name: 'Exception',
message: 'exception_message',
backtrace: ['back', 'trace', 'lines'],
parameters: {organization: 'some_org'})
Roqua::Support::Errors.report exception
end
end |
describe :hash_eql, :shared => true do
it "does not compare values when keys don't match" do
value = mock('x')
value.should_not_receive(:==)
value.should_not_receive(:eql?)
new_hash(1 => value).send(@method, new_hash(2 => value)).should be_false
end
it "returns false when the numbers of keys differ without comparing any elements" do
obj = mock('x')
h = new_hash(obj => obj)
obj.should_not_receive(:==)
obj.should_not_receive(:eql?)
new_hash.send(@method, h).should be_false
h.send(@method, new_hash).should be_false
end
it "first compares keys via hash" do
x = mock('x')
x.should_receive(:hash).any_number_of_times.and_return(0)
y = mock('y')
y.should_receive(:hash).any_number_of_times.and_return(0)
new_hash(x => 1).send(@method, new_hash(y => 1)).should be_false
end
it "does not compare keys with different hash codes via eql?" do
x = mock('x')
y = mock('y')
x.should_not_receive(:eql?)
y.should_not_receive(:eql?)
x.should_receive(:hash).any_number_of_times.and_return(0)
y.should_receive(:hash).any_number_of_times.and_return(1)
new_hash(x => 1).send(@method, new_hash(y => 1)).should be_false
end
it "computes equality for recursive hashes" do
h = new_hash
h[:a] = h
h.send(@method, h[:a]).should be_true
(h == h[:a]).should be_true
end
it "doesn't call to_hash on objects" do
mock_hash = mock("fake hash")
def mock_hash.to_hash() new_hash end
new_hash.send(@method, mock_hash).should be_false
end
ruby_bug "redmine #2448", "1.9.1" do
it "computes equality for complex recursive hashes" do
a, b = {}, {}
a.merge! :self => a, :other => b
b.merge! :self => b, :other => a
a.send(@method, b).should be_true # they both have the same structure!
c = {}
c.merge! :other => c, :self => c
c.send(@method, a).should be_true # subtle, but they both have the same structure!
a[:delta] = c[:delta] = a
c.send(@method, a).should be_false # not quite the same structure, as a[:other][:delta] = nil
c[:delta] = 42
c.send(@method, a).should be_false
a[:delta] = 42
c.send(@method, a).should be_false
b[:delta] = 42
c.send(@method, a).should be_true
end
it "computes equality for recursive hashes & arrays" do
x, y, z = [], [], []
a, b, c = {:foo => x, :bar => 42}, {:foo => y, :bar => 42}, {:foo => z, :bar => 42}
x << a
y << c
z << b
b.send(@method, c).should be_true # they clearly have the same structure!
y.send(@method, z).should be_true
a.send(@method, b).should be_true # subtle, but they both have the same structure!
x.send(@method, y).should be_true
y << x
y.send(@method, z).should be_false
z << x
y.send(@method, z).should be_true
a[:foo], a[:bar] = a[:bar], a[:foo]
a.send(@method, b).should be_false
b[:bar] = b[:foo]
b.send(@method, c).should be_false
end
end # ruby_bug
end
# All these tests are true for ==, and for eql? when Ruby >= 1.8.7
describe :hash_eql_additional, :shared => true do
it "compares values when keys match" do
x = mock('x')
y = mock('y')
def x.==(o) false end
def y.==(o) false end
def x.eql?(o) false end
def y.eql?(o) false end
new_hash(1 => x).send(@method, new_hash(1 => y)).should be_false
x = mock('x')
y = mock('y')
def x.==(o) true end
def y.==(o) true end
def x.eql?(o) true end
def y.eql?(o) true end
new_hash(1 => x).send(@method, new_hash(1 => y)).should be_true
end
it "compares keys with eql? semantics" do
new_hash(1.0 => "x").send(@method, new_hash(1.0 => "x")).should be_true
new_hash(1.0 => "x").send(@method, new_hash(1.0 => "x")).should be_true
new_hash(1 => "x").send(@method, new_hash(1.0 => "x")).should be_false
new_hash(1.0 => "x").send(@method, new_hash(1 => "x")).should be_false
end
it "returns true iff other Hash has the same number of keys and each key-value pair matches" do
a = new_hash(:a => 5)
b = new_hash
a.send(@method, b).should be_false
b[:a] = 5
a.send(@method, b).should be_true
c = new_hash("a" => 5)
a.send(@method, c).should be_false
end
it "does not call to_hash on hash subclasses" do
new_hash(5 => 6).send(@method, ToHashHash[5 => 6]).should be_true
end
it "ignores hash class differences" do
h = new_hash(1 => 2, 3 => 4)
MyHash[h].send(@method, h).should be_true
MyHash[h].send(@method, MyHash[h]).should be_true
h.send(@method, MyHash[h]).should be_true
end
# Why isn't this true of eql? too ?
it "compares keys with matching hash codes via eql?" do
# Can't use should_receive because it uses hash and eql? internally
a = Array.new(2) do
obj = mock('0')
def obj.hash()
return 0
end
# It's undefined whether the impl does a[0].eql?(a[1]) or
# a[1].eql?(a[0]) so we taint both.
def obj.eql?(o)
return true if self == o
taint
o.taint
false
end
obj
end
new_hash(a[0] => 1).send(@method, new_hash(a[1] => 1)).should be_false
a[0].tainted?.should be_true
a[1].tainted?.should be_true
a = Array.new(2) do
obj = mock('0')
def obj.hash()
# It's undefined whether the impl does a[0].send(@method, a[1]) or
# a[1].send(@method, a[0]) so we taint both.
def self.eql?(o) taint; o.taint; true; end
return 0
end
obj
end
new_hash(a[0] => 1).send(@method, new_hash(a[1] => 1)).should be_true
a[0].tainted?.should be_true
a[1].tainted?.should be_true
end
end
describe :hash_eql_additional_more, :shared => true do
it "returns true if other Hash has the same number of keys and each key-value pair matches, even though the default-value are not same" do
new_hash(5).send(@method, new_hash(1)).should be_true
new_hash {|h, k| 1}.send(@method, new_hash {}).should be_true
new_hash {|h, k| 1}.send(@method, new_hash(2)).should be_true
d = new_hash {|h, k| 1}
e = new_hash {}
d[1] = 2
e[1] = 2
d.send(@method, e).should be_true
end
end
Add spec for value comparison order in Hash#==
describe :hash_eql, :shared => true do
it "compares the values in self to values in other hash" do
l_val = mock("left")
r_val = mock("right")
l_val.should_receive(:eql?).with(r_val).and_return(true)
new_hash(1 => l_val).send(@method, new_hash(1 => r_val)).should be_true
end
it "does not compare values when keys don't match" do
value = mock('x')
value.should_not_receive(:==)
value.should_not_receive(:eql?)
new_hash(1 => value).send(@method, new_hash(2 => value)).should be_false
end
it "returns false when the numbers of keys differ without comparing any elements" do
obj = mock('x')
h = new_hash(obj => obj)
obj.should_not_receive(:==)
obj.should_not_receive(:eql?)
new_hash.send(@method, h).should be_false
h.send(@method, new_hash).should be_false
end
it "first compares keys via hash" do
x = mock('x')
x.should_receive(:hash).any_number_of_times.and_return(0)
y = mock('y')
y.should_receive(:hash).any_number_of_times.and_return(0)
new_hash(x => 1).send(@method, new_hash(y => 1)).should be_false
end
it "does not compare keys with different hash codes via eql?" do
x = mock('x')
y = mock('y')
x.should_not_receive(:eql?)
y.should_not_receive(:eql?)
x.should_receive(:hash).any_number_of_times.and_return(0)
y.should_receive(:hash).any_number_of_times.and_return(1)
new_hash(x => 1).send(@method, new_hash(y => 1)).should be_false
end
it "computes equality for recursive hashes" do
h = new_hash
h[:a] = h
h.send(@method, h[:a]).should be_true
(h == h[:a]).should be_true
end
it "doesn't call to_hash on objects" do
mock_hash = mock("fake hash")
def mock_hash.to_hash() new_hash end
new_hash.send(@method, mock_hash).should be_false
end
ruby_bug "redmine #2448", "1.9.1" do
it "computes equality for complex recursive hashes" do
a, b = {}, {}
a.merge! :self => a, :other => b
b.merge! :self => b, :other => a
a.send(@method, b).should be_true # they both have the same structure!
c = {}
c.merge! :other => c, :self => c
c.send(@method, a).should be_true # subtle, but they both have the same structure!
a[:delta] = c[:delta] = a
c.send(@method, a).should be_false # not quite the same structure, as a[:other][:delta] = nil
c[:delta] = 42
c.send(@method, a).should be_false
a[:delta] = 42
c.send(@method, a).should be_false
b[:delta] = 42
c.send(@method, a).should be_true
end
it "computes equality for recursive hashes & arrays" do
x, y, z = [], [], []
a, b, c = {:foo => x, :bar => 42}, {:foo => y, :bar => 42}, {:foo => z, :bar => 42}
x << a
y << c
z << b
b.send(@method, c).should be_true # they clearly have the same structure!
y.send(@method, z).should be_true
a.send(@method, b).should be_true # subtle, but they both have the same structure!
x.send(@method, y).should be_true
y << x
y.send(@method, z).should be_false
z << x
y.send(@method, z).should be_true
a[:foo], a[:bar] = a[:bar], a[:foo]
a.send(@method, b).should be_false
b[:bar] = b[:foo]
b.send(@method, c).should be_false
end
end # ruby_bug
end
# All these tests are true for ==, and for eql? when Ruby >= 1.8.7
describe :hash_eql_additional, :shared => true do
it "compares values when keys match" do
x = mock('x')
y = mock('y')
def x.==(o) false end
def y.==(o) false end
def x.eql?(o) false end
def y.eql?(o) false end
new_hash(1 => x).send(@method, new_hash(1 => y)).should be_false
x = mock('x')
y = mock('y')
def x.==(o) true end
def y.==(o) true end
def x.eql?(o) true end
def y.eql?(o) true end
new_hash(1 => x).send(@method, new_hash(1 => y)).should be_true
end
it "compares keys with eql? semantics" do
new_hash(1.0 => "x").send(@method, new_hash(1.0 => "x")).should be_true
new_hash(1.0 => "x").send(@method, new_hash(1.0 => "x")).should be_true
new_hash(1 => "x").send(@method, new_hash(1.0 => "x")).should be_false
new_hash(1.0 => "x").send(@method, new_hash(1 => "x")).should be_false
end
it "returns true iff other Hash has the same number of keys and each key-value pair matches" do
a = new_hash(:a => 5)
b = new_hash
a.send(@method, b).should be_false
b[:a] = 5
a.send(@method, b).should be_true
c = new_hash("a" => 5)
a.send(@method, c).should be_false
end
it "does not call to_hash on hash subclasses" do
new_hash(5 => 6).send(@method, ToHashHash[5 => 6]).should be_true
end
it "ignores hash class differences" do
h = new_hash(1 => 2, 3 => 4)
MyHash[h].send(@method, h).should be_true
MyHash[h].send(@method, MyHash[h]).should be_true
h.send(@method, MyHash[h]).should be_true
end
# Why isn't this true of eql? too ?
it "compares keys with matching hash codes via eql?" do
# Can't use should_receive because it uses hash and eql? internally
a = Array.new(2) do
obj = mock('0')
def obj.hash()
return 0
end
# It's undefined whether the impl does a[0].eql?(a[1]) or
# a[1].eql?(a[0]) so we taint both.
def obj.eql?(o)
return true if self == o
taint
o.taint
false
end
obj
end
new_hash(a[0] => 1).send(@method, new_hash(a[1] => 1)).should be_false
a[0].tainted?.should be_true
a[1].tainted?.should be_true
a = Array.new(2) do
obj = mock('0')
def obj.hash()
# It's undefined whether the impl does a[0].send(@method, a[1]) or
# a[1].send(@method, a[0]) so we taint both.
def self.eql?(o) taint; o.taint; true; end
return 0
end
obj
end
new_hash(a[0] => 1).send(@method, new_hash(a[1] => 1)).should be_true
a[0].tainted?.should be_true
a[1].tainted?.should be_true
end
end
describe :hash_eql_additional_more, :shared => true do
it "returns true if other Hash has the same number of keys and each key-value pair matches, even though the default-value are not same" do
new_hash(5).send(@method, new_hash(1)).should be_true
new_hash {|h, k| 1}.send(@method, new_hash {}).should be_true
new_hash {|h, k| 1}.send(@method, new_hash(2)).should be_true
d = new_hash {|h, k| 1}
e = new_hash {}
d[1] = 2
e[1] = 2
d.send(@method, e).should be_true
end
end
|
require 'fakeweb'
FakeWeb.allow_net_connect = false
def fake(routes)
routes.map { |k, v|
# Ghetto globbing
if k[1].is_a?(String)
k[1].gsub!("*", "[^/]*")
k[1] = Regexp.new("^#{k[1]}$")
end
[k, v]
}.each do |url, filename|
file = File.join("test", "fixtures", "#{filename}.response")
FakeWeb.register_uri(url[0], url[1], :response => file)
end
end
Allow FakeWeb to connection to CodeClimate.
require 'fakeweb'
FakeWeb.allow_net_connect = %r[^https?://codeclimate.com/]
def fake(routes)
routes.map { |k, v|
# Ghetto globbing
if k[1].is_a?(String)
k[1].gsub!("*", "[^/]*")
k[1] = Regexp.new("^#{k[1]}$")
end
[k, v]
}.each do |url, filename|
file = File.join("test", "fixtures", "#{filename}.response")
FakeWeb.register_uri(url[0], url[1], :response => file)
end
end
|
require 'test_helper'
class FourInfoTest < ActiveSupport::TestCase
context "contactable class" do
setup {
@klass = Class.new
@klass.send :include, FourInfo::Contactable
}
FourInfo::Contactable::Attributes.each do |attribute|
should "begin with appropriate default for #{attribute}_column" do
assert_equal attribute, @klass.send("#{attribute}_column")
end
should "allow setting #{attribute}_column" do
new_column_name = :custom_column
@klass.send "#{attribute}_column", new_column_name
assert_equal new_column_name, @klass.send("#{attribute}_column")
end
end
end
context "contactable instance" do
setup { @user = User.new }
context "when phone number is blank" do
setup { @user.sms_phone_number = nil}
context "confirming phone number" do
setup { @user.confirm_sms! }
should_not_change "any attributes" do
@user.attributes.inspect
end
end
end
context "when phone number exists" do
setup { @user.sms_phone_number = "206-555-5555"}
should "save confirmation number in proper attribute" do
assert @user.send(User.sms_confirmation_code_column)
end
should_change "stored code" do
@user.send User.sms_confirmation_code_column
end
end
end
context "standardizing numbers" do
context "to digits" do
should "remove all but integers" do
assert_equal '12345', FourInfo.numerize('1-2-3-4-5')
assert_equal '12345', FourInfo.numerize('1 2 3 4 5')
assert_equal '12345', FourInfo.numerize('1,2(3)4.5')
assert_equal '12345', FourInfo.numerize('1,2(3)4.5')
end
end
context "to international format" do
should "add a '+' to all 11 digit numbers" do
assert_equal '+12345678901', FourInfo.internationalize('12345678901')
assert_equal '+72345678901', FourInfo.internationalize('72345678901')
end
should "add a '+1' to any 10 digit number" do
assert_equal '+12345678901', FourInfo.internationalize('2345678901')
assert_equal '+17345678901', FourInfo.internationalize('7345678901')
end
should "leave 12 digit numbers unchanged" do
[ '+' + ('3'*11),
'+' + ('8'*11),
'+' + ('4'*11) ].each do |number|
assert_equal number, FourInfo.internationalize(number)
end
end
should "leave 12 digit numbers unchanged" do
[ '+' + ('3'*11),
'+' + ('8'*11),
'+' + ('4'*11) ].each do |number|
assert_equal number, FourInfo.internationalize(number)
end
end
should "return nil for all bad numbers" do
assert_equal nil, FourInfo.internationalize(nil)
assert_equal nil, FourInfo.internationalize('nil')
assert_equal nil, FourInfo.internationalize('1234')
assert_equal nil, FourInfo.internationalize('11111111111111111111111')
assert_equal nil, FourInfo.internationalize('what?')
end
end
end
end
removing duplicate test method
require 'test_helper'
class FourInfoTest < ActiveSupport::TestCase
context "contactable class" do
setup {
@klass = Class.new
@klass.send :include, FourInfo::Contactable
}
FourInfo::Contactable::Attributes.each do |attribute|
should "begin with appropriate default for #{attribute}_column" do
assert_equal attribute, @klass.send("#{attribute}_column")
end
should "allow setting #{attribute}_column" do
new_column_name = :custom_column
@klass.send "#{attribute}_column", new_column_name
assert_equal new_column_name, @klass.send("#{attribute}_column")
end
end
end
context "contactable instance" do
setup { @user = User.new }
context "when phone number is blank" do
setup { @user.sms_phone_number = nil}
context "confirming phone number" do
setup { @user.confirm_sms! }
should_not_change "any attributes" do
@user.attributes.inspect
end
end
end
context "when phone number exists" do
setup { @user.sms_phone_number = "206-555-5555"}
should "save confirmation number in proper attribute" do
assert @user.send(User.sms_confirmation_code_column)
end
should_change "stored code" do
@user.send User.sms_confirmation_code_column
end
end
end
context "standardizing numbers" do
context "to digits" do
should "remove all but integers" do
assert_equal '12345', FourInfo.numerize('1-2-3-4-5')
assert_equal '12345', FourInfo.numerize('1 2 3 4 5')
assert_equal '12345', FourInfo.numerize('1,2(3)4.5')
assert_equal '12345', FourInfo.numerize('1,2(3)4.5')
end
end
context "to international format" do
should "add a '+' to all 11 digit numbers" do
assert_equal '+12345678901', FourInfo.internationalize('12345678901')
assert_equal '+72345678901', FourInfo.internationalize('72345678901')
end
should "add a '+1' to any 10 digit number" do
assert_equal '+12345678901', FourInfo.internationalize('2345678901')
assert_equal '+17345678901', FourInfo.internationalize('7345678901')
end
should "leave 12 digit numbers unchanged" do
[ '+' + ('3'*11),
'+' + ('8'*11),
'+' + ('4'*11) ].each do |number|
assert_equal number, FourInfo.internationalize(number)
end
end
should "return nil for all bad numbers" do
assert_equal nil, FourInfo.internationalize(nil)
assert_equal nil, FourInfo.internationalize('nil')
assert_equal nil, FourInfo.internationalize('1234')
assert_equal nil, FourInfo.internationalize('11111111111111111111111')
assert_equal nil, FourInfo.internationalize('what?')
end
end
end
end
|
require 'minitest/autorun'
require 'beefcake/generator'
class GeneratorTest < Minitest::Test
def setup
# Load up the generator request for the addressbook.proto example
dat = File.dirname(__FILE__) + "/../dat/code_generator_request.dat"
mock_request = File.read(dat)
@req = CodeGeneratorRequest.decode(mock_request)
end
if "".respond_to?(:encoding)
def test_request_has_filenames_as_binary
@req.proto_file.each do |file|
assert_equal Encoding.find("ASCII-8BIT"), file.name.encoding
end
end
end
def test_generate_empty_namespace
@res = Beefcake::Generator.compile([], @req)
assert_equal(CodeGeneratorResponse, @res.class)
end
def test_generate_top_namespace
@res = Beefcake::Generator.compile(["Top"], @req)
assert_equal(CodeGeneratorResponse, @res.class)
assert_match(/module Top/, @res.file.first.content)
end
def test_generate_two_level_namespace
@res = Beefcake::Generator.compile(["Top", "Bottom"], @req)
assert_equal(CodeGeneratorResponse, @res.class)
assert_match(/module Top\s*\n\s*module Bottom/m, @res.file.first.content)
end
# Covers the regression of encoding a CodeGeneratorResponse under 1.9.2-p136 raising
# Encoding::CompatibilityError: incompatible character encodings: ASCII-8BIT and US-ASCII
def test_encode_decode_generated_response
@res = Beefcake::Generator.compile([], @req)
assert_nothing_raised { @res.encode }
end
def test_encode_decode_generated_response
@res = Beefcake::Generator.compile([], @req)
assert_equal(CodeGeneratorResponse, @res.class)
round_trip = CodeGeneratorResponse.decode(@res.encode)
assert_equal round_trip, @res
end
end
remove assert_nothing_raised
require 'minitest/autorun'
require 'beefcake/generator'
class GeneratorTest < Minitest::Test
def setup
# Load up the generator request for the addressbook.proto example
dat = File.dirname(__FILE__) + "/../dat/code_generator_request.dat"
mock_request = File.read(dat)
@req = CodeGeneratorRequest.decode(mock_request)
end
if "".respond_to?(:encoding)
def test_request_has_filenames_as_binary
@req.proto_file.each do |file|
assert_equal Encoding.find("ASCII-8BIT"), file.name.encoding
end
end
end
def test_generate_empty_namespace
@res = Beefcake::Generator.compile([], @req)
assert_equal(CodeGeneratorResponse, @res.class)
end
def test_generate_top_namespace
@res = Beefcake::Generator.compile(["Top"], @req)
assert_equal(CodeGeneratorResponse, @res.class)
assert_match(/module Top/, @res.file.first.content)
end
def test_generate_two_level_namespace
@res = Beefcake::Generator.compile(["Top", "Bottom"], @req)
assert_equal(CodeGeneratorResponse, @res.class)
assert_match(/module Top\s*\n\s*module Bottom/m, @res.file.first.content)
end
# Covers the regression of encoding a CodeGeneratorResponse under 1.9.2-p136 raising
# Encoding::CompatibilityError: incompatible character encodings: ASCII-8BIT and US-ASCII
def test_encode_decode_generated_response
@res = Beefcake::Generator.compile([], @req)
@res.encode
end
def test_encode_decode_generated_response
@res = Beefcake::Generator.compile([], @req)
assert_equal(CodeGeneratorResponse, @res.class)
round_trip = CodeGeneratorResponse.decode(@res.encode)
assert_equal round_trip, @res
end
end
|
provides :hostname
resource_name :hostname
property :hostname, String, name_property: true
property :compile_time, [ true, false ], default: true
property :ipaddress, [ String, nil ], default: node["ipaddress"]
property :aliases, [ Array, nil ], default: nil
property :windows_reboot, [ true, false ], default: true
default_action :set
action_class do
def append_replacing_matching_lines(path, regex, string)
text = IO.read(path).split("\n")
text.reject! { |s| s =~ regex }
text += [ string ]
file path do
content text.join("\n") + "\n"
owner "root"
group node["root_group"]
mode "0644"
not_if { IO.read(path).split("\n").include?(string) }
end
end
def docker_guest?
node["virtualization"] && node["virtualization"]["systems"] &&
node["virtualization"]["systems"]["docker"] && node["virtualization"]["systems"]["docker"] == "guest"
end
end
action :set do
ohai "reload hostname" do
plugin "hostname"
action :nothing
end
if node["platform_family"] != "windows"
# set the hostname via /bin/hostname
execute "set hostname to #{new_resource.hostname}" do
command "/bin/hostname #{new_resource.hostname}"
not_if { shell_out!("hostname").stdout.chomp == new_resource.hostname }
notifies :reload, "ohai[reload hostname]"
end
# make sure node['fqdn'] resolves via /etc/hosts
unless new_resource.ipaddress.nil?
newline = "#{new_resource.ipaddress} #{new_resource.hostname}"
newline << " #{new_resource.aliases.join(" ")}" if new_resource.aliases && !new_resource.aliases.empty?
newline << " #{new_resource.hostname[/[^\.]*/]}"
r = append_replacing_matching_lines("/etc/hosts", /^#{new_resource.ipaddress}\s+|\s+#{new_resource.hostname}\s+/, newline)
r.atomic_update false if docker_guest?
r.notifies :reload, "ohai[reload hostname]"
end
# setup the hostname to perist on a reboot
case
when ::File.exist?("/usr/sbin/scutil")
# darwin
execute "set HostName via scutil" do
command "/usr/sbin/scutil --set HostName #{new_resource.hostname}"
not_if { shell_out!("/usr/sbin/scutil --get HostName").stdout.chomp == new_resource.hostname }
notifies :reload, "ohai[reload hostname]"
end
execute "set ComputerName via scutil" do
command "/usr/sbin/scutil --set ComputerName #{new_resource.hostname}"
not_if { shell_out!("/usr/sbin/scutil --get ComputerName").stdout.chomp == new_resource.hostname }
notifies :reload, "ohai[reload hostname]"
end
shortname = new_resource.hostname[/[^\.]*/]
execute "set LocalHostName via scutil" do
command "/usr/sbin/scutil --set LocalHostName #{shortname}"
not_if { shell_out!("/usr/sbin/scutil --get LocalHostName").stdout.chomp == shortname }
notifies :reload, "ohai[reload hostname]"
end
when node[:os] == "linux"
case
when ::File.exist?("/usr/bin/hostnamectl") && !docker_guest?
# use hostnamectl whenever we find it on linux (as systemd takes over the world)
# this must come before other methods like /etc/hostname and /etc/sysconfig/network
execute "hostnamectl set-hostname #{new_resource.hostname}" do
notifies :reload, "ohai[reload hostname]"
not_if { shell_out!("hostnamectl status", { :returns => [0, 1] }).stdout =~ /Static hostname:\s+#{new_resource.hostname}/ }
end
when ::File.exist?("/etc/hostname")
# debian family uses /etc/hostname
# this is also fallback for any linux systemd host in a docker container
file "/etc/hostname" do
atomic_update false
content "#{new_resource.hostname}\n"
owner "root"
group node["root_group"]
mode "0644"
end
when ::File.exist?("/etc/sysconfig/network")
# older non-systemd RHEL/Fedora derived
append_replacing_matching_lines("/etc/sysconfig/network", /^HOSTNAME\s+=/, "HOSTNAME=#{new_resource.hostname}")
when ::File.exist?("/etc/HOSTNAME")
# SuSE/OpenSUSE uses /etc/HOSTNAME
file "/etc/HOSTNAME" do
content "#{new_resource.hostname}\n"
owner "root"
group node["root_group"]
mode "0644"
end
when ::File.exist?("/etc/conf.d/hostname")
# Gentoo
file "/etc/conf.d/hostname" do
content "#{new_resource.hostname}\n"
owner "root"
group node["root_group"]
mode "0644"
end
else
# This is a failsafe for all other linux distributions where we set the hostname
# via /etc/sysctl.conf on reboot. This may get into a fight with other cookbooks
# that manage sysctls on linux.
append_replacing_matching_lines("/etc/sysctl.conf", /^\s+kernel\.hostname\s+=/, "kernel.hostname=#{new_resource.hostname}")
end
when ::File.exist?("/etc/rc.conf")
# *BSD systems with /etc/rc.conf + /etc/myname
append_replacing_matching_lines("/etc/rc.conf", /^\s+hostname\s+=/, "hostname=#{new_resource.hostname}")
file "/etc/myname" do
content "#{new_resource.hostname}\n"
owner "root"
group node["root_group"]
mode "0644"
end
when ::File.exist?("/etc/nodename")
# Solaris <= 5.10 systems prior to svccfg taking over this functionality (must come before svccfg handling)
file "/etc/nodename" do
content "#{new_resource.hostname}\n"
owner "root"
group node["root_group"]
mode "0644"
end
# Solaris also has /etc/inet/hosts (copypasta alert)
unless new_resource.ipaddress.nil?
newline = "#{new_resource.ipaddress} #{new_resource.hostname}"
newline << " #{new_resource.aliases.join(" ")}" if new_resource.aliases && !new_resource.aliases.empty?
newline << " #{new_resource.hostname[/[^\.]*/]}"
r = append_replacing_matching_lines("/etc/inet/hosts", /^#{new_resource.ipaddress}\s+|\s+#{new_resource.hostname}\s+/, newline)
r.notifies :reload, "ohai[reload hostname]"
end
when ::File.exist?("/usr/sbin/svccfg")
# Solaris >= 5.11 systems using svccfg (must come after /etc/nodename handling)
else
raise "Do not know how to set hostname on os #{node["os"]}, platform #{node["platform"]},"\
"platform_version #{node["platform_version"]}, platform_family #{node["platform_family"]}"
end
else # windows
# suppress EC2 config service from setting our hostname
ec2_config_xml = 'C:\Program Files\Amazon\Ec2ConfigService\Settings\config.xml'
cookbook_file ec2_config_xml do
source "config.xml"
only_if { ::File.exist? ec2_config_xml }
end
# update via netdom
powershell_script "set hostname" do
code <<-EOH
$sysInfo = Get-WmiObject -Class Win32_ComputerSystem
$sysInfo.Rename(#{new_resource.hostname})
EOH
not_if { Socket.gethostbyname(Socket.gethostname).first == new_resource.hostname }
end
# reboot because $windows
reboot "setting hostname" do
reason "chef setting hostname"
action :request_reboot
only_if { new_resource.windows_reboot }
end
end
end
# this resource forces itself to run at compile_time
def after_created
if compile_time
Array(action).each do |action|
self.run_action(action)
end
end
end
fix formatting of /etc/conf.d/hostname
provides :hostname
resource_name :hostname
property :hostname, String, name_property: true
property :compile_time, [ true, false ], default: true
property :ipaddress, [ String, nil ], default: node["ipaddress"]
property :aliases, [ Array, nil ], default: nil
property :windows_reboot, [ true, false ], default: true
default_action :set
action_class do
def append_replacing_matching_lines(path, regex, string)
text = IO.read(path).split("\n")
text.reject! { |s| s =~ regex }
text += [ string ]
file path do
content text.join("\n") + "\n"
owner "root"
group node["root_group"]
mode "0644"
not_if { IO.read(path).split("\n").include?(string) }
end
end
def docker_guest?
node["virtualization"] && node["virtualization"]["systems"] &&
node["virtualization"]["systems"]["docker"] && node["virtualization"]["systems"]["docker"] == "guest"
end
end
action :set do
ohai "reload hostname" do
plugin "hostname"
action :nothing
end
if node["platform_family"] != "windows"
# set the hostname via /bin/hostname
execute "set hostname to #{new_resource.hostname}" do
command "/bin/hostname #{new_resource.hostname}"
not_if { shell_out!("hostname").stdout.chomp == new_resource.hostname }
notifies :reload, "ohai[reload hostname]"
end
# make sure node['fqdn'] resolves via /etc/hosts
unless new_resource.ipaddress.nil?
newline = "#{new_resource.ipaddress} #{new_resource.hostname}"
newline << " #{new_resource.aliases.join(" ")}" if new_resource.aliases && !new_resource.aliases.empty?
newline << " #{new_resource.hostname[/[^\.]*/]}"
r = append_replacing_matching_lines("/etc/hosts", /^#{new_resource.ipaddress}\s+|\s+#{new_resource.hostname}\s+/, newline)
r.atomic_update false if docker_guest?
r.notifies :reload, "ohai[reload hostname]"
end
# setup the hostname to perist on a reboot
case
when ::File.exist?("/usr/sbin/scutil")
# darwin
execute "set HostName via scutil" do
command "/usr/sbin/scutil --set HostName #{new_resource.hostname}"
not_if { shell_out!("/usr/sbin/scutil --get HostName").stdout.chomp == new_resource.hostname }
notifies :reload, "ohai[reload hostname]"
end
execute "set ComputerName via scutil" do
command "/usr/sbin/scutil --set ComputerName #{new_resource.hostname}"
not_if { shell_out!("/usr/sbin/scutil --get ComputerName").stdout.chomp == new_resource.hostname }
notifies :reload, "ohai[reload hostname]"
end
shortname = new_resource.hostname[/[^\.]*/]
execute "set LocalHostName via scutil" do
command "/usr/sbin/scutil --set LocalHostName #{shortname}"
not_if { shell_out!("/usr/sbin/scutil --get LocalHostName").stdout.chomp == shortname }
notifies :reload, "ohai[reload hostname]"
end
when node[:os] == "linux"
case
when ::File.exist?("/usr/bin/hostnamectl") && !docker_guest?
# use hostnamectl whenever we find it on linux (as systemd takes over the world)
# this must come before other methods like /etc/hostname and /etc/sysconfig/network
execute "hostnamectl set-hostname #{new_resource.hostname}" do
notifies :reload, "ohai[reload hostname]"
not_if { shell_out!("hostnamectl status", { :returns => [0, 1] }).stdout =~ /Static hostname:\s+#{new_resource.hostname}/ }
end
when ::File.exist?("/etc/hostname")
# debian family uses /etc/hostname
# this is also fallback for any linux systemd host in a docker container
file "/etc/hostname" do
atomic_update false
content "#{new_resource.hostname}\n"
owner "root"
group node["root_group"]
mode "0644"
end
when ::File.exist?("/etc/sysconfig/network")
# older non-systemd RHEL/Fedora derived
append_replacing_matching_lines("/etc/sysconfig/network", /^HOSTNAME\s+=/, "HOSTNAME=#{new_resource.hostname}")
when ::File.exist?("/etc/HOSTNAME")
# SuSE/OpenSUSE uses /etc/HOSTNAME
file "/etc/HOSTNAME" do
content "#{new_resource.hostname}\n"
owner "root"
group node["root_group"]
mode "0644"
end
when ::File.exist?("/etc/conf.d/hostname")
# Gentoo
file "/etc/conf.d/hostname" do
content "hostname=\"#{new_resource.hostname}\"\n"
owner "root"
group node["root_group"]
mode "0644"
end
else
# This is a failsafe for all other linux distributions where we set the hostname
# via /etc/sysctl.conf on reboot. This may get into a fight with other cookbooks
# that manage sysctls on linux.
append_replacing_matching_lines("/etc/sysctl.conf", /^\s+kernel\.hostname\s+=/, "kernel.hostname=#{new_resource.hostname}")
end
when ::File.exist?("/etc/rc.conf")
# *BSD systems with /etc/rc.conf + /etc/myname
append_replacing_matching_lines("/etc/rc.conf", /^\s+hostname\s+=/, "hostname=#{new_resource.hostname}")
file "/etc/myname" do
content "#{new_resource.hostname}\n"
owner "root"
group node["root_group"]
mode "0644"
end
when ::File.exist?("/etc/nodename")
# Solaris <= 5.10 systems prior to svccfg taking over this functionality (must come before svccfg handling)
file "/etc/nodename" do
content "#{new_resource.hostname}\n"
owner "root"
group node["root_group"]
mode "0644"
end
# Solaris also has /etc/inet/hosts (copypasta alert)
unless new_resource.ipaddress.nil?
newline = "#{new_resource.ipaddress} #{new_resource.hostname}"
newline << " #{new_resource.aliases.join(" ")}" if new_resource.aliases && !new_resource.aliases.empty?
newline << " #{new_resource.hostname[/[^\.]*/]}"
r = append_replacing_matching_lines("/etc/inet/hosts", /^#{new_resource.ipaddress}\s+|\s+#{new_resource.hostname}\s+/, newline)
r.notifies :reload, "ohai[reload hostname]"
end
when ::File.exist?("/usr/sbin/svccfg")
# Solaris >= 5.11 systems using svccfg (must come after /etc/nodename handling)
else
raise "Do not know how to set hostname on os #{node["os"]}, platform #{node["platform"]},"\
"platform_version #{node["platform_version"]}, platform_family #{node["platform_family"]}"
end
else # windows
# suppress EC2 config service from setting our hostname
ec2_config_xml = 'C:\Program Files\Amazon\Ec2ConfigService\Settings\config.xml'
cookbook_file ec2_config_xml do
source "config.xml"
only_if { ::File.exist? ec2_config_xml }
end
# update via netdom
powershell_script "set hostname" do
code <<-EOH
$sysInfo = Get-WmiObject -Class Win32_ComputerSystem
$sysInfo.Rename(#{new_resource.hostname})
EOH
not_if { Socket.gethostbyname(Socket.gethostname).first == new_resource.hostname }
end
# reboot because $windows
reboot "setting hostname" do
reason "chef setting hostname"
action :request_reboot
only_if { new_resource.windows_reboot }
end
end
end
# this resource forces itself to run at compile_time
def after_created
if compile_time
Array(action).each do |action|
self.run_action(action)
end
end
end
|
module Access
module Derivatives
VERSION = "0.0.5"
end
end
version 0.0.6
module Access
module Derivatives
VERSION = "0.0.6"
end
end
|
class File
class << self
alias 📁❓ directory?
alias 🌍📖❓ world_readable?
alias 🌍✍❓ world_writeable?
end
end
🔨🌍✍ 💂⁉️ 🐛😑
class File
class << self
alias 📁❓ directory?
alias 🌍📖❓ world_readable?
alias 🌍✍❓ world_writable?
end
end
|
module ActsAsSolr #:nodoc:
module ClassMethods
include CommonMethods
include ParserMethods
# Finds instances of a model. Terms are ANDed by default, can be overwritten
# by using OR between terms
#
# Here's a sample (untested) code for your controller:
#
# def search
# results = Book.find_by_solr params[:query]
# end
#
# For specific fields searching use :filter_queries options
#
# ====options:
# offset:: - The first document to be retrieved (offset)
# page:: - The page to be retrieved
# limit:: - The number of rows per page
# per_page:: - Alias for limit
# filter_queries:: - Use solr filter queries to sort by fields
#
# Book.find_by_solr 'ruby', :filter_queries => ['price:5']
#
# sort:: - Orders (sort by) the result set using a given criteria:
#
# Book.find_by_solr 'ruby', :sort => 'description asc'
#
# field_types:: This option is deprecated and will be obsolete by version 1.0.
# There's no need to specify the :field_types anymore when doing a
# search in a model that specifies a field type for a field. The field
# types are automatically traced back when they're included.
#
# class Electronic < ActiveRecord::Base
# acts_as_solr :fields => [{:price => :range_float}]
# end
#
# facets:: This option argument accepts the following arguments:
# fields:: The fields to be included in the faceted search (Solr's facet.field)
# query:: The queries to be included in the faceted search (Solr's facet.query)
# zeros:: Display facets with count of zero. (true|false)
# sort:: Sorts the faceted resuls by highest to lowest count. (true|false)
# browse:: This is where the 'drill-down' of the facets work. Accepts an array of
# fields in the format "facet_field:term"
# mincount:: Replacement for zeros (it has been deprecated in Solr). Specifies the
# minimum count necessary for a facet field to be returned. (Solr's
# facet.mincount) Overrides :zeros if it is specified. Default is 0.
#
# dates:: Run date faceted queries using the following arguments:
# fields:: The fields to be included in the faceted date search (Solr's facet.date).
# It may be either a String/Symbol or Hash. If it's a hash the options are the
# same as date_facets minus the fields option (i.e., :start:, :end, :gap, :other,
# :between). These options if provided will override the base options.
# (Solr's f.<field_name>.date.<key>=<value>).
# start:: The lower bound for the first date range for all Date Faceting. Required if
# :fields is present
# end:: The upper bound for the last date range for all Date Faceting. Required if
# :fields is prsent
# gap:: The size of each date range expressed as an interval to be added to the lower
# bound using the DateMathParser syntax. Required if :fields is prsent
# hardend:: A Boolean parameter instructing Solr what do do in the event that
# facet.date.gap does not divide evenly between facet.date.start and facet.date.end.
# other:: This param indicates that in addition to the counts for each date range
# constraint between facet.date.start and facet.date.end, other counds should be
# calculated. May specify more then one in an Array. The possible options are:
# before:: - all records with lower bound less than start
# after:: - all records with upper bound greater than end
# between:: - all records with field values between start and end
# none:: - compute no other bounds (useful in per field assignment)
# all:: - shortcut for before, after, and between
# filter:: Similar to :query option provided by :facets, in that accepts an array of
# of date queries to limit results. Can not be used as a part of a :field hash.
# This is the only option that can be used if :fields is not present.
#
# Example:
#
# Electronic.find_by_solr "memory", :facets => {:zeros => false, :sort => true,
# :query => ["price:[* TO 200]",
# "price:[200 TO 500]",
# "price:[500 TO *]"],
# :fields => [:category, :manufacturer],
# :browse => ["category:Memory","manufacturer:Someone"]}
#
#
# Examples of date faceting:
#
# basic:
# Electronic.find_by_solr "memory", :facets => {:dates => {:fields => [:updated_at, :created_at],
# :start => 'NOW-10YEARS/DAY', :end => 'NOW/DAY', :gap => '+2YEARS', :other => :before}}
#
# advanced:
# Electronic.find_by_solr "memory", :facets => {:dates => {:fields => [:updated_at,
# {:created_at => {:start => 'NOW-20YEARS/DAY', :end => 'NOW-10YEARS/DAY', :other => [:before, :after]}
# }], :start => 'NOW-10YEARS/DAY', :end => 'NOW/DAY', :other => :before, :filter =>
# ["created_at:[NOW-10YEARS/DAY TO NOW/DAY]", "updated_at:[NOW-1YEAR/DAY TO NOW/DAY]"]}}
#
# filter only:
# Electronic.find_by_solr "memory", :facets => {:dates => {:filter => "updated_at:[NOW-1YEAR/DAY TO NOW/DAY]"}}
#
#
#
# scores:: If set to true this will return the score as a 'solr_score' attribute
# for each one of the instances found. Does not currently work with find_id_by_solr
#
# books = Book.find_by_solr 'ruby OR splinter', :scores => true
# books.records.first.solr_score
# => 1.21321397
# books.records.last.solr_score
# => 0.12321548
#
# lazy:: If set to true the search will return objects that will touch the database when you ask for one
# of their attributes for the first time. Useful when you're using fragment caching based solely on
# types and ids.
#
# relevance:: Sets fields relevance
#
# Book.find_by_solr "zidane", :relevance => {:title => 5, :author => 2}
#
def find_by_solr(query, options={})
options[:results_format] ||= :objects
data = parse_query(query, options)
return parse_results(data, options)
end
alias :search :find_by_solr
# Finds instances of a model and returns an array with the ids:
# Book.find_id_by_solr "rails" => [1,4,7]
# The options accepted are the same as find_by_solr
#
def find_id_by_solr(query, options={})
options[:results_format] ||= :ids
data = parse_query(query, options)
return parse_results(data, options)
end
# This method can be used to execute a search across multiple models:
# Book.multi_solr_search "Napoleon OR Tom", :models => [Movie]
#
# ====options:
# Accepts the same options as find_by_solr plus:
# models:: The additional models you'd like to include in the search
# results_format:: Specify the format of the results found
# :objects :: Will return an array with the results being objects (default). Example:
# Book.multi_solr_search "Napoleon OR Tom", :models => [Movie], :results_format => :objects
# :ids :: Will return an array with the ids of each entry found. Example:
# Book.multi_solr_search "Napoleon OR Tom", :models => [Movie], :results_format => :ids
# => [{"id" => "Movie:1"},{"id" => Book:1}]
# Where the value of each array is as Model:instance_id
# :none :: Useful for querying facets
# scores:: If set to true this will return the score as a 'solr_score' attribute
# for each one of the instances found. Does not currently work with find_id_by_solr
#
# books = Book.multi_solr_search 'ruby OR splinter', :scores => true
# books.records.first.solr_score
# => 1.21321397
# books.records.last.solr_score
# => 0.12321548
#
def multi_solr_search(query, options = {})
options[:results_format] ||= :objects
data = parse_query(query, options)
if data.nil? or data.total_hits == 0
return SearchResults.new(:docs => [], :total => 0)
end
result = find_multi_search_objects(data, options)
if options[:scores] and options[:results_format] == :objects
add_scores(result, data)
end
SearchResults.new :docs => result, :total => data.total_hits
end
def find_multi_search_objects(data, options)
result = []
if options[:results_format] == :objects
data.hits.each do |doc|
k = doc.fetch('id').first.to_s.split(':')
result << k[0].constantize.find_by_id(k[1])
end
elsif options[:results_format] == :ids
data.hits.each{|doc| result << {"id" => doc["id"].to_s}}
end
result
end
# returns the total number of documents found in the query specified:
# Book.count_by_solr 'rails' => 3
#
def count_by_solr(query, options = {})
options[:results_format] ||= :ids
data = parse_query(query, options)
data.total_hits
end
# It's used to rebuild the Solr index for a specific model.
# Book.rebuild_solr_index
#
# If batch_size is greater than 0, adds will be done in batches.
# NOTE: If using sqlserver, be sure to use a finder with an explicit order.
# Non-edge versions of rails do not handle pagination correctly for sqlserver
# without an order clause.
#
# If a finder block is given, it will be called to retrieve the items to index.
# This can be very useful for things such as updating based on conditions or
# using eager loading for indexed associations.
def rebuild_solr_index(batch_size=300, options = {}, &finder)
finder ||= lambda do |ar, sql_options|
ar.all sql_options.merge!({:order => self.primary_key})
end
start_time = Time.now
options[:offset] ||= 0
options[:threads] ||= 2
options[:delayed_job] &= defined?(Delayed::Job)
if batch_size > 0
items_processed = 0
offset = options[:offset]
end_reached = false
threads = []
mutex = Mutex.new
queue = Queue.new
loop do
items = finder.call(self, {:limit => batch_size, :offset => offset})
add_batch = items.collect { |content| content.to_solr_doc }
offset += items.size
end_reached = items.size == 0
break if end_reached
if options[:threads] == threads.size
threads.first.join
threads.shift
end
queue << [items, add_batch]
threads << Thread.new do
iteration_start = Time.now
iteration_items, iteration_add_batch = queue.pop(true)
begin
if options[:delayed_job]
delay.solr_add iteration_add_batch
else
solr_add iteration_add_batch
solr_commit
end
rescue Exception => exception
logger.error(exception.to_s)
end
last_id = iteration_items.last.id
time_so_far = Time.now - start_time
iteration_time = Time.now - iteration_start
mutex.synchronize do
items_processed += iteration_items.size
if options[:delayed_job]
logger.info "#{Process.pid}: #{items_processed} items for #{self.name} have been sent to Delayed::Job in #{'%.3f' % time_so_far}s at #{'%.3f' % (items_processed / time_so_far)} items/sec. Last id: #{last_id}"
else
logger.info "#{Process.pid}: #{items_processed} items for #{self.name} have been batch added to index in #{'%.3f' % time_so_far}s at #{'%.3f' % (items_processed / time_so_far)} items/sec. Last id: #{last_id}"
end
end
end
end
solr_commit if options[:delayed_job]
threads.each{ |t| t.join }
else
items = finder.call(self, {})
items.each { |content| content.solr_save }
items_processed = items.size
end
if items_processed > 0
solr_optimize
time_elapsed = Time.now - start_time
logger.info "Index for #{self.name} has been rebuilt (took #{'%.3f' % time_elapsed}s)"
else
"Nothing to index for #{self.name}"
end
end
alias :rebuild_index :rebuild_solr_index
end
end
Do not optimize anymore
module ActsAsSolr #:nodoc:
module ClassMethods
include CommonMethods
include ParserMethods
# Finds instances of a model. Terms are ANDed by default, can be overwritten
# by using OR between terms
#
# Here's a sample (untested) code for your controller:
#
# def search
# results = Book.find_by_solr params[:query]
# end
#
# For specific fields searching use :filter_queries options
#
# ====options:
# offset:: - The first document to be retrieved (offset)
# page:: - The page to be retrieved
# limit:: - The number of rows per page
# per_page:: - Alias for limit
# filter_queries:: - Use solr filter queries to sort by fields
#
# Book.find_by_solr 'ruby', :filter_queries => ['price:5']
#
# sort:: - Orders (sort by) the result set using a given criteria:
#
# Book.find_by_solr 'ruby', :sort => 'description asc'
#
# field_types:: This option is deprecated and will be obsolete by version 1.0.
# There's no need to specify the :field_types anymore when doing a
# search in a model that specifies a field type for a field. The field
# types are automatically traced back when they're included.
#
# class Electronic < ActiveRecord::Base
# acts_as_solr :fields => [{:price => :range_float}]
# end
#
# facets:: This option argument accepts the following arguments:
# fields:: The fields to be included in the faceted search (Solr's facet.field)
# query:: The queries to be included in the faceted search (Solr's facet.query)
# zeros:: Display facets with count of zero. (true|false)
# sort:: Sorts the faceted resuls by highest to lowest count. (true|false)
# browse:: This is where the 'drill-down' of the facets work. Accepts an array of
# fields in the format "facet_field:term"
# mincount:: Replacement for zeros (it has been deprecated in Solr). Specifies the
# minimum count necessary for a facet field to be returned. (Solr's
# facet.mincount) Overrides :zeros if it is specified. Default is 0.
#
# dates:: Run date faceted queries using the following arguments:
# fields:: The fields to be included in the faceted date search (Solr's facet.date).
# It may be either a String/Symbol or Hash. If it's a hash the options are the
# same as date_facets minus the fields option (i.e., :start:, :end, :gap, :other,
# :between). These options if provided will override the base options.
# (Solr's f.<field_name>.date.<key>=<value>).
# start:: The lower bound for the first date range for all Date Faceting. Required if
# :fields is present
# end:: The upper bound for the last date range for all Date Faceting. Required if
# :fields is prsent
# gap:: The size of each date range expressed as an interval to be added to the lower
# bound using the DateMathParser syntax. Required if :fields is prsent
# hardend:: A Boolean parameter instructing Solr what do do in the event that
# facet.date.gap does not divide evenly between facet.date.start and facet.date.end.
# other:: This param indicates that in addition to the counts for each date range
# constraint between facet.date.start and facet.date.end, other counds should be
# calculated. May specify more then one in an Array. The possible options are:
# before:: - all records with lower bound less than start
# after:: - all records with upper bound greater than end
# between:: - all records with field values between start and end
# none:: - compute no other bounds (useful in per field assignment)
# all:: - shortcut for before, after, and between
# filter:: Similar to :query option provided by :facets, in that accepts an array of
# of date queries to limit results. Can not be used as a part of a :field hash.
# This is the only option that can be used if :fields is not present.
#
# Example:
#
# Electronic.find_by_solr "memory", :facets => {:zeros => false, :sort => true,
# :query => ["price:[* TO 200]",
# "price:[200 TO 500]",
# "price:[500 TO *]"],
# :fields => [:category, :manufacturer],
# :browse => ["category:Memory","manufacturer:Someone"]}
#
#
# Examples of date faceting:
#
# basic:
# Electronic.find_by_solr "memory", :facets => {:dates => {:fields => [:updated_at, :created_at],
# :start => 'NOW-10YEARS/DAY', :end => 'NOW/DAY', :gap => '+2YEARS', :other => :before}}
#
# advanced:
# Electronic.find_by_solr "memory", :facets => {:dates => {:fields => [:updated_at,
# {:created_at => {:start => 'NOW-20YEARS/DAY', :end => 'NOW-10YEARS/DAY', :other => [:before, :after]}
# }], :start => 'NOW-10YEARS/DAY', :end => 'NOW/DAY', :other => :before, :filter =>
# ["created_at:[NOW-10YEARS/DAY TO NOW/DAY]", "updated_at:[NOW-1YEAR/DAY TO NOW/DAY]"]}}
#
# filter only:
# Electronic.find_by_solr "memory", :facets => {:dates => {:filter => "updated_at:[NOW-1YEAR/DAY TO NOW/DAY]"}}
#
#
#
# scores:: If set to true this will return the score as a 'solr_score' attribute
# for each one of the instances found. Does not currently work with find_id_by_solr
#
# books = Book.find_by_solr 'ruby OR splinter', :scores => true
# books.records.first.solr_score
# => 1.21321397
# books.records.last.solr_score
# => 0.12321548
#
# lazy:: If set to true the search will return objects that will touch the database when you ask for one
# of their attributes for the first time. Useful when you're using fragment caching based solely on
# types and ids.
#
# relevance:: Sets fields relevance
#
# Book.find_by_solr "zidane", :relevance => {:title => 5, :author => 2}
#
def find_by_solr(query, options={})
options[:results_format] ||= :objects
data = parse_query(query, options)
return parse_results(data, options)
end
alias :search :find_by_solr
# Finds instances of a model and returns an array with the ids:
# Book.find_id_by_solr "rails" => [1,4,7]
# The options accepted are the same as find_by_solr
#
def find_id_by_solr(query, options={})
options[:results_format] ||= :ids
data = parse_query(query, options)
return parse_results(data, options)
end
# This method can be used to execute a search across multiple models:
# Book.multi_solr_search "Napoleon OR Tom", :models => [Movie]
#
# ====options:
# Accepts the same options as find_by_solr plus:
# models:: The additional models you'd like to include in the search
# results_format:: Specify the format of the results found
# :objects :: Will return an array with the results being objects (default). Example:
# Book.multi_solr_search "Napoleon OR Tom", :models => [Movie], :results_format => :objects
# :ids :: Will return an array with the ids of each entry found. Example:
# Book.multi_solr_search "Napoleon OR Tom", :models => [Movie], :results_format => :ids
# => [{"id" => "Movie:1"},{"id" => Book:1}]
# Where the value of each array is as Model:instance_id
# :none :: Useful for querying facets
# scores:: If set to true this will return the score as a 'solr_score' attribute
# for each one of the instances found. Does not currently work with find_id_by_solr
#
# books = Book.multi_solr_search 'ruby OR splinter', :scores => true
# books.records.first.solr_score
# => 1.21321397
# books.records.last.solr_score
# => 0.12321548
#
def multi_solr_search(query, options = {})
options[:results_format] ||= :objects
data = parse_query(query, options)
if data.nil? or data.total_hits == 0
return SearchResults.new(:docs => [], :total => 0)
end
result = find_multi_search_objects(data, options)
if options[:scores] and options[:results_format] == :objects
add_scores(result, data)
end
SearchResults.new :docs => result, :total => data.total_hits
end
def find_multi_search_objects(data, options)
result = []
if options[:results_format] == :objects
data.hits.each do |doc|
k = doc.fetch('id').first.to_s.split(':')
result << k[0].constantize.find_by_id(k[1])
end
elsif options[:results_format] == :ids
data.hits.each{|doc| result << {"id" => doc["id"].to_s}}
end
result
end
# returns the total number of documents found in the query specified:
# Book.count_by_solr 'rails' => 3
#
def count_by_solr(query, options = {})
options[:results_format] ||= :ids
data = parse_query(query, options)
data.total_hits
end
# It's used to rebuild the Solr index for a specific model.
# Book.rebuild_solr_index
#
# If batch_size is greater than 0, adds will be done in batches.
# NOTE: If using sqlserver, be sure to use a finder with an explicit order.
# Non-edge versions of rails do not handle pagination correctly for sqlserver
# without an order clause.
#
# If a finder block is given, it will be called to retrieve the items to index.
# This can be very useful for things such as updating based on conditions or
# using eager loading for indexed associations.
def rebuild_solr_index(batch_size=300, options = {}, &finder)
finder ||= lambda do |ar, sql_options|
ar.all sql_options.merge!({:order => self.primary_key})
end
start_time = Time.now
options[:offset] ||= 0
options[:threads] ||= 2
options[:delayed_job] &= defined?(Delayed::Job)
if batch_size > 0
items_processed = 0
offset = options[:offset]
end_reached = false
threads = []
mutex = Mutex.new
queue = Queue.new
loop do
items = finder.call(self, {:limit => batch_size, :offset => offset})
add_batch = items.collect { |content| content.to_solr_doc }
offset += items.size
end_reached = items.size == 0
break if end_reached
if options[:threads] == threads.size
threads.first.join
threads.shift
end
queue << [items, add_batch]
threads << Thread.new do
iteration_start = Time.now
iteration_items, iteration_add_batch = queue.pop(true)
begin
if options[:delayed_job]
delay.solr_add iteration_add_batch
else
solr_add iteration_add_batch
solr_commit
end
rescue Exception => exception
logger.error(exception.to_s)
end
last_id = iteration_items.last.id
time_so_far = Time.now - start_time
iteration_time = Time.now - iteration_start
mutex.synchronize do
items_processed += iteration_items.size
if options[:delayed_job]
logger.info "#{Process.pid}: #{items_processed} items for #{self.name} have been sent to Delayed::Job in #{'%.3f' % time_so_far}s at #{'%.3f' % (items_processed / time_so_far)} items/sec. Last id: #{last_id}"
else
logger.info "#{Process.pid}: #{items_processed} items for #{self.name} have been batch added to index in #{'%.3f' % time_so_far}s at #{'%.3f' % (items_processed / time_so_far)} items/sec. Last id: #{last_id}"
end
end
end
end
solr_commit if options[:delayed_job]
threads.each{ |t| t.join }
else
items = finder.call(self, {})
items.each { |content| content.solr_save }
items_processed = items.size
end
if items_processed > 0
# FIXME: not recommended, reenable with option
#solr_optimize
time_elapsed = Time.now - start_time
logger.info "Index for #{self.name} has been rebuilt (took #{'%.3f' % time_elapsed}s)"
else
"Nothing to index for #{self.name}"
end
end
alias :rebuild_index :rebuild_solr_index
end
end
|
module AjaxDatatablesRails
class Base
extend Forwardable
class MethodNotImplementedError < StandardError; end
attr_reader :view, :options, :sortable_columns, :searchable_columns
def_delegator :@view, :params, :params
def initialize(view, options = {})
@view = view
@options = options
end
def config
@config ||= AjaxDatatablesRails.config
end
def sortable_columns
@sortable_columns ||= []
end
def searchable_columns
@searchable_columns ||= []
end
def data
fail(
MethodNotImplementedError,
'Please implement this method in your class.'
)
end
def get_raw_records
fail(
MethodNotImplementedError,
'Please implement this method in your class.'
)
end
def as_json(options = {})
{
:draw => params[:draw].to_i,
:recordsTotal => get_raw_records.count(:all),
:recordsFiltered => filter_records(get_raw_records).count(:all),
:data => data
}
end
private
def records
@records ||= fetch_records
end
def fetch_records
records = get_raw_records
records = sort_records(records)
records = filter_records(records)
records = paginate_records(records) unless params[:length] == '-1'
records
end
def sort_records(records)
sort_by = []
params[:order].each_value do |item|
sort_by << "#{sort_column(item)} #{sort_direction(item)}"
end
records.order(sort_by.join(", "))
end
def paginate_records(records)
fail(
MethodNotImplementedError,
'Please mixin a pagination extension.'
)
end
def filter_records(records)
records = simple_search(records)
records = composite_search(records)
records
end
def simple_search(records)
return records unless (params[:search].present? && params[:search][:value].present?)
conditions = build_conditions_for(params[:search][:value])
records = records.where(conditions) if conditions
records
end
def composite_search(records)
conditions = aggregate_query
records = records.where(conditions) if conditions
records
end
def build_conditions_for(query)
search_for = query.split(' ')
criteria = search_for.inject([]) do |criteria, atom|
criteria << searchable_columns.map { |col| search_condition(col, atom) }.reduce(:or)
end.reduce(:and)
criteria
end
def search_condition(column, value)
model, column = column.split('.')
model = model.singularize.titleize.gsub( / /, '' ).constantize
# because postgresql is preferred, so that's why use VARCHAR
# but we we need to use CHAR typecast on mysql db adapter
# or maybe it should
# if :pg
# elsif :mysql
# else
if config.db_adapter == :pg
typecast = 'VARCHAR'
else
typecase = 'CHAR'
end
casted_column = ::Arel::Nodes::NamedFunction.new('CAST', [model.arel_table[column.to_sym].as(typecast)])
casted_column.matches("%#{value}%")
end
def aggregate_query
conditions = searchable_columns.each_with_index.map do |column, index|
value = params[:columns]["#{index}"][:search][:value] if params[:columns]
search_condition(column, value) unless value.blank?
end
conditions.compact.reduce(:and)
end
def offset
(page - 1) * per_page
end
def page
(params[:start].to_i / per_page) + 1
end
def per_page
params.fetch(:length, 10).to_i
end
def sort_column(item)
sortable_columns[item['column'].to_i]
end
def sort_direction(item)
options = %w(desc asc)
options.include?(item['dir']) ? item['dir'].upcase : 'ASC'
end
end
end
fix typo
module AjaxDatatablesRails
class Base
extend Forwardable
class MethodNotImplementedError < StandardError; end
attr_reader :view, :options, :sortable_columns, :searchable_columns
def_delegator :@view, :params, :params
def initialize(view, options = {})
@view = view
@options = options
end
def config
@config ||= AjaxDatatablesRails.config
end
def sortable_columns
@sortable_columns ||= []
end
def searchable_columns
@searchable_columns ||= []
end
def data
fail(
MethodNotImplementedError,
'Please implement this method in your class.'
)
end
def get_raw_records
fail(
MethodNotImplementedError,
'Please implement this method in your class.'
)
end
def as_json(options = {})
{
:draw => params[:draw].to_i,
:recordsTotal => get_raw_records.count(:all),
:recordsFiltered => filter_records(get_raw_records).count(:all),
:data => data
}
end
private
def records
@records ||= fetch_records
end
def fetch_records
records = get_raw_records
records = sort_records(records)
records = filter_records(records)
records = paginate_records(records) unless params[:length] == '-1'
records
end
def sort_records(records)
sort_by = []
params[:order].each_value do |item|
sort_by << "#{sort_column(item)} #{sort_direction(item)}"
end
records.order(sort_by.join(", "))
end
def paginate_records(records)
fail(
MethodNotImplementedError,
'Please mixin a pagination extension.'
)
end
def filter_records(records)
records = simple_search(records)
records = composite_search(records)
records
end
def simple_search(records)
return records unless (params[:search].present? && params[:search][:value].present?)
conditions = build_conditions_for(params[:search][:value])
records = records.where(conditions) if conditions
records
end
def composite_search(records)
conditions = aggregate_query
records = records.where(conditions) if conditions
records
end
def build_conditions_for(query)
search_for = query.split(' ')
criteria = search_for.inject([]) do |criteria, atom|
criteria << searchable_columns.map { |col| search_condition(col, atom) }.reduce(:or)
end.reduce(:and)
criteria
end
def search_condition(column, value)
model, column = column.split('.')
model = model.singularize.titleize.gsub( / /, '' ).constantize
# because postgresql is preferred, so that's why use VARCHAR
# but we we need to use CHAR typecast on mysql db adapter
# or maybe it should
# if :pg
# elsif :mysql
# else
if config.db_adapter == :pg
typecast = 'VARCHAR'
else
typecast = 'CHAR'
end
casted_column = ::Arel::Nodes::NamedFunction.new('CAST', [model.arel_table[column.to_sym].as(typecast)])
casted_column.matches("%#{value}%")
end
def aggregate_query
conditions = searchable_columns.each_with_index.map do |column, index|
value = params[:columns]["#{index}"][:search][:value] if params[:columns]
search_condition(column, value) unless value.blank?
end
conditions.compact.reduce(:and)
end
def offset
(page - 1) * per_page
end
def page
(params[:start].to_i / per_page) + 1
end
def per_page
params.fetch(:length, 10).to_i
end
def sort_column(item)
sortable_columns[item['column'].to_i]
end
def sort_direction(item)
options = %w(desc asc)
options.include?(item['dir']) ? item['dir'].upcase : 'ASC'
end
end
end
|
# Generated by jeweler
# DO NOT EDIT THIS FILE DIRECTLY
# Instead, edit Jeweler::Tasks in Rakefile, and run 'rake gemspec'
# -*- encoding: utf-8 -*-
Gem::Specification.new do |s|
s.name = %q{resque_mailer}
s.version = "2.0.0"
s.required_rubygems_version = Gem::Requirement.new(">= 0") if s.respond_to? :required_rubygems_version=
s.authors = ["Nick Plante"]
s.date = %q{2011-06-24}
s.description = %q{Rails plugin for sending asynchronous email with ActionMailer and Resque}
s.email = %q{nap@zerosum.org}
s.extra_rdoc_files = [
"LICENSE",
"README.md"
]
s.files = [
".document",
"CHANGELOG.md",
"Gemfile",
"Gemfile.lock",
"LICENSE",
"README.md",
"Rakefile",
"VERSION",
"lib/resque_mailer.rb",
"rails/init.rb",
"resque_mailer.gemspec",
"spec/resque_mailer_spec.rb",
"spec/spec.opts",
"spec/spec_helper.rb"
]
s.homepage = %q{http://github.com/zapnap/resque_mailer}
s.require_paths = ["lib"]
s.rubygems_version = %q{1.3.7}
s.summary = %q{Rails plugin for sending asynchronous email with ActionMailer and Resque}
if s.respond_to? :specification_version then
current_version = Gem::Specification::CURRENT_SPECIFICATION_VERSION
s.specification_version = 3
if Gem::Version.new(Gem::VERSION) >= Gem::Version.new('1.2.0') then
s.add_runtime_dependency(%q<resque>, [">= 1.2.3"])
s.add_runtime_dependency(%q<actionmailer>, [">= 3.0.0"])
s.add_development_dependency(%q<rspec>, [">= 2.6.0"])
s.add_development_dependency(%q<yard>, [">= 0.6.0"])
s.add_development_dependency(%q<bundler>, ["~> 1.0.0"])
s.add_development_dependency(%q<jeweler>, ["~> 1.6.2"])
s.add_development_dependency(%q<rspec>, [">= 2.6.0"])
s.add_development_dependency(%q<resque>, [">= 1.2.3"])
s.add_development_dependency(%q<actionmailer>, [">= 3.0.0"])
else
s.add_dependency(%q<resque>, [">= 1.2.3"])
s.add_dependency(%q<actionmailer>, [">= 3.0.0"])
s.add_dependency(%q<rspec>, [">= 2.6.0"])
s.add_dependency(%q<yard>, [">= 0.6.0"])
s.add_dependency(%q<bundler>, ["~> 1.0.0"])
s.add_dependency(%q<jeweler>, ["~> 1.6.2"])
s.add_dependency(%q<rspec>, [">= 2.6.0"])
s.add_dependency(%q<resque>, [">= 1.2.3"])
s.add_dependency(%q<actionmailer>, [">= 3.0.0"])
end
else
s.add_dependency(%q<resque>, [">= 1.2.3"])
s.add_dependency(%q<actionmailer>, [">= 3.0.0"])
s.add_dependency(%q<rspec>, [">= 2.6.0"])
s.add_dependency(%q<yard>, [">= 0.6.0"])
s.add_dependency(%q<bundler>, ["~> 1.0.0"])
s.add_dependency(%q<jeweler>, ["~> 1.6.2"])
s.add_dependency(%q<rspec>, [">= 2.6.0"])
s.add_dependency(%q<resque>, [">= 1.2.3"])
s.add_dependency(%q<actionmailer>, [">= 3.0.0"])
end
end
Regenerate gemspec for version 2.0.1
# Generated by jeweler
# DO NOT EDIT THIS FILE DIRECTLY
# Instead, edit Jeweler::Tasks in Rakefile, and run 'rake gemspec'
# -*- encoding: utf-8 -*-
Gem::Specification.new do |s|
s.name = %q{resque_mailer}
s.version = "2.0.1"
s.required_rubygems_version = Gem::Requirement.new(">= 0") if s.respond_to? :required_rubygems_version=
s.authors = ["Nick Plante"]
s.date = %q{2011-08-19}
s.description = %q{Rails plugin for sending asynchronous email with ActionMailer and Resque}
s.email = %q{nap@zerosum.org}
s.extra_rdoc_files = [
"LICENSE",
"README.md"
]
s.files = [
".document",
"CHANGELOG.md",
"Gemfile",
"Gemfile.lock",
"LICENSE",
"README.md",
"Rakefile",
"VERSION",
"lib/resque_mailer.rb",
"rails/init.rb",
"resque_mailer.gemspec",
"spec/resque_mailer_spec.rb",
"spec/spec.opts",
"spec/spec_helper.rb"
]
s.homepage = %q{http://github.com/zapnap/resque_mailer}
s.require_paths = ["lib"]
s.rubygems_version = %q{1.3.7}
s.summary = %q{Rails plugin for sending asynchronous email with ActionMailer and Resque}
if s.respond_to? :specification_version then
current_version = Gem::Specification::CURRENT_SPECIFICATION_VERSION
s.specification_version = 3
if Gem::Version.new(Gem::VERSION) >= Gem::Version.new('1.2.0') then
s.add_runtime_dependency(%q<resque>, [">= 1.2.3"])
s.add_runtime_dependency(%q<actionmailer>, [">= 3.0.0"])
s.add_development_dependency(%q<rspec>, [">= 2.6.0"])
s.add_development_dependency(%q<yard>, [">= 0.6.0"])
s.add_development_dependency(%q<bundler>, ["~> 1.0.0"])
s.add_development_dependency(%q<jeweler>, ["~> 1.6.2"])
s.add_development_dependency(%q<rspec>, [">= 2.6.0"])
s.add_development_dependency(%q<resque>, [">= 1.2.3"])
s.add_development_dependency(%q<actionmailer>, [">= 3.0.0"])
else
s.add_dependency(%q<resque>, [">= 1.2.3"])
s.add_dependency(%q<actionmailer>, [">= 3.0.0"])
s.add_dependency(%q<rspec>, [">= 2.6.0"])
s.add_dependency(%q<yard>, [">= 0.6.0"])
s.add_dependency(%q<bundler>, ["~> 1.0.0"])
s.add_dependency(%q<jeweler>, ["~> 1.6.2"])
s.add_dependency(%q<rspec>, [">= 2.6.0"])
s.add_dependency(%q<resque>, [">= 1.2.3"])
s.add_dependency(%q<actionmailer>, [">= 3.0.0"])
end
else
s.add_dependency(%q<resque>, [">= 1.2.3"])
s.add_dependency(%q<actionmailer>, [">= 3.0.0"])
s.add_dependency(%q<rspec>, [">= 2.6.0"])
s.add_dependency(%q<yard>, [">= 0.6.0"])
s.add_dependency(%q<bundler>, ["~> 1.0.0"])
s.add_dependency(%q<jeweler>, ["~> 1.6.2"])
s.add_dependency(%q<rspec>, [">= 2.6.0"])
s.add_dependency(%q<resque>, [">= 1.2.3"])
s.add_dependency(%q<actionmailer>, [">= 3.0.0"])
end
end
|
# frozen_string_literal: true
require "test_helper"
require "i18n/tasks"
module Misc
class I18nTest < ActiveSupport::TestCase
def i18n
@i18n ||= I18n::Tasks::BaseTask.new
end
def missing_keys
@missing_keys ||= i18n.missing_keys
end
def unused_keys
@unused_keys ||= i18n.unused_keys
end
def non_normalized_paths
@non_normalized_paths ||= i18n.non_normalized_paths
end
def test_no_missing_keys
assert_empty missing_keys, "Missing #{missing_keys.leaves.count} i18n keys, run `i18n-tasks missing' to show them"
end
def test_no_unused_keys
assert_empty unused_keys, "#{unused_keys.leaves.count} unused i18n keys, run `i18n-tasks unused' to show them"
end
def test_normalized
assert_empty non_normalized_paths, "The following files need to be normalized:\n" \
"#{non_normalized_paths.map { |path| " #{path}" }.join("\n")}\n" \
"Please run `i18n-tasks normalize` to fix"
end
end
end
Don't test normalized i18n
# frozen_string_literal: true
require "test_helper"
require "i18n/tasks"
module Misc
class I18nTest < ActiveSupport::TestCase
def i18n
@i18n ||= I18n::Tasks::BaseTask.new
end
def missing_keys
@missing_keys ||= i18n.missing_keys
end
def unused_keys
@unused_keys ||= i18n.unused_keys
end
def test_no_missing_keys
assert_empty missing_keys, "Missing #{missing_keys.leaves.count} i18n keys, run `i18n-tasks missing' to show them"
end
def test_no_unused_keys
assert_empty unused_keys, "#{unused_keys.leaves.count} unused i18n keys, run `i18n-tasks unused' to show them"
end
end
end
|
meta_base_path = 'c:\\meta'
default['paths']['meta'] = meta_base_path
consul_base_path = 'c:\\ops\\consul'
default['paths']['consul_bin'] = "#{consul_base_path}\\bin"
default['paths']['consul_data'] = "#{consul_base_path}\\data"
default['paths']['consul_checks'] = "#{consul_base_path}\\checks"
default['paths']['consul_config'] = "#{meta_base_path}\\consul"
Adding the attribute for the consul base directory.
meta_base_path = 'c:\\meta'
default['paths']['meta'] = meta_base_path
consul_base_path = 'c:\\ops\\consul'
default['paths']['consul_base'] = consul_base_path
default['paths']['consul_bin'] = "#{consul_base_path}\\bin"
default['paths']['consul_data'] = "#{consul_base_path}\\data"
default['paths']['consul_checks'] = "#{consul_base_path}\\checks"
default['paths']['consul_config'] = "#{meta_base_path}\\consul"
|
#
# Cookbook Name:: wt_streaminglogreplay
# Recipe:: default
#
# Copyright 2012, Webtrends
#
# All rights reserved - Do Not Redistribute
#
# include runit recipe so a service can be defined later
include_recipe "runit"
log_dir = File.join("#{node['wt_common']['log_dir_linux']}", "streaminglogreplay")
install_dir = File.join("#{node['wt_common']['install_dir_linux']}", "streaminglogreplay")
tarball = node['wt_streaminglogreplay']['tarball']
java_home = node['java']['java_home']
download_url = node['wt_streaminglogreplay']['download_url']
user = node['wt_streaminglogreplay']['user']
group = node['wt_streaminglogreplay']['group']
zookeeper_port = node['zookeeper']['clientPort']
log "Install dir: #{install_dir}"
log "Log dir: #{log_dir}"
log "Java home: #{java_home}"
# create the log directory
directory "#{log_dir}" do
owner user
group group
mode 00755
recursive true
action :create
end
# create the install directory
directory "#{install_dir}/bin" do
owner "root"
group "root"
mode 00755
recursive true
action :create
end
# download the application tarball
remote_file "#{Chef::Config[:file_cache_path]}/#{tarball}" do
source download_url
mode 00644
end
# uncompress the application tarball into the install directory
execute "tar" do
user "root"
group "root"
cwd install_dir
command "tar zxf #{Chef::Config[:file_cache_path]}/#{tarball}"
end
template "#{install_dir}/bin/service-control" do
source "service-control.erb"
owner "root"
group "root"
mode 00755
variables({
:log_dir => log_dir,
:install_dir => install_dir,
:java_home => java_home,
:user => user,
:java_class => "com.webtrends.streaming.LogReplayer",
:java_jmx_port => 9999
})
end
# grab the zookeeper nodes that are currently available
zookeeper_pairs = Array.new
if not Chef::Config.solo
search(:node, "role:zookeeper AND chef_environment:#{node.chef_environment}").each do |n|
zookeeper_pairs << n[:fqdn]
end
end
# append the zookeeper client port (defaults to 2181)
i = 0
while i < zookeeper_pairs.size do
zookeeper_pairs[i] = zookeeper_pairs[i].concat(":#{zookeeper_port}")
i += 1
end
template "#{install_dir}/conf/kafka.properties" do
source "kafka.properties.erb"
owner "root"
group "root"
mode 00644
variables({
:zookeeper_pairs => zookeeper_pairs
})
end
# delete the application tarball
execute "delete_install_source" do
user "root"
group "root"
command "rm -f #{Chef::Config[:file_cache_path]}/#{tarball}"
action :run
end
# create a runit service
runit_service "streaminglogreplay" do
options({
:log_dir => log_dir,
:install_dir => install_dir,
:java_home => java_home,
:user => user
})
end
More fixes.
Former-commit-id: a3460a74adb84b2952b071285e7680ffed2ca9ef [formerly 783e612ef0e0fea0557c730c1b7ae18550d0aa66] [formerly 47fae8cb3dfb833e6b9367a7d86ec0d1b9c3c457 [formerly 17398bd41cbb363907e44ae2af31ef919c1558ee]]
Former-commit-id: 80da718a069d194b1ecbbc4630f4353ca552cef0 [formerly 51c996ad1bd3c04c7495981b76d8f1453b52b9c5]
Former-commit-id: 3da7aa215e99e4c8275ca0be0efeed22a9872160
#
# Cookbook Name:: wt_streaminglogreplay
# Recipe:: default
#
# Copyright 2012, Webtrends
#
# All rights reserved - Do Not Redistribute
#
# include runit recipe so a service can be defined later
include_recipe "runit"
log_dir = File.join("#{node['wt_common']['log_dir_linux']}", "streaminglogreplay")
install_dir = File.join("#{node['wt_common']['install_dir_linux']}", "streaminglogreplay")
tarball = node['wt_streaminglogreplay']['tarball']
java_home = node['java']['java_home']
download_url = node['wt_streaminglogreplay']['download_url']
user = node['wt_streaminglogreplay']['user']
group = node['wt_streaminglogreplay']['group']
zookeeper_port = node['zookeeper']['clientPort']
log "Install dir: #{install_dir}"
log "Log dir: #{log_dir}"
log "Java home: #{java_home}"
# create the log directory
directory "#{log_dir}" do
owner user
group group
mode 00755
recursive true
action :create
end
# create the bin directory
directory "#{install_dir}/bin" do
owner "root"
group "root"
mode 00755
recursive true
action :create
end
# create the conf directory
directory "#{install_dir}/conf" do
owner "root"
group "root"
mode 00755
recursive true
action :create
end
# download the application tarball
remote_file "#{Chef::Config[:file_cache_path]}/#{tarball}" do
source download_url
mode 00644
end
# uncompress the application tarball into the install directory
execute "tar" do
user "root"
group "root"
cwd install_dir
command "tar zxf #{Chef::Config[:file_cache_path]}/#{tarball}"
end
template "#{install_dir}/bin/service-control" do
source "service-control.erb"
owner "root"
group "root"
mode 00755
variables({
:log_dir => log_dir,
:install_dir => install_dir,
:java_home => java_home,
:user => user,
:java_class => "com.webtrends.streaming.LogReplayer",
:java_jmx_port => 9999
})
end
# grab the zookeeper nodes that are currently available
zookeeper_pairs = Array.new
if not Chef::Config.solo
search(:node, "role:zookeeper AND chef_environment:#{node.chef_environment}").each do |n|
zookeeper_pairs << n[:fqdn]
end
end
# append the zookeeper client port (defaults to 2181)
i = 0
while i < zookeeper_pairs.size do
zookeeper_pairs[i] = zookeeper_pairs[i].concat(":#{zookeeper_port}")
i += 1
end
template "#{install_dir}/conf/kafka.properties" do
source "kafka.properties.erb"
owner "root"
group "root"
mode 00644
variables({
:zookeeper_pairs => zookeeper_pairs
})
end
# delete the application tarball
execute "delete_install_source" do
user "root"
group "root"
command "rm -f #{Chef::Config[:file_cache_path]}/#{tarball}"
action :run
end
# create a runit service
runit_service "streaminglogreplay" do
options({
:log_dir => log_dir,
:install_dir => install_dir,
:java_home => java_home,
:user => user
})
end
|
require 'spec_helper'
describe "Products" do
stub_authorization!
context "as admin user" do
before(:each) do
visit spree.admin_path
end
context "listing products" do
context "sorting" do
before do
create(:product, :name => 'apache baseball cap', :price => 10)
create(:product, :name => 'zomg shirt', :price => 5)
end
it "should list existing products with correct sorting by name" do
click_link "Products"
# Name ASC
within_row(1) { page.should have_content('apache baseball cap') }
within_row(2) { page.should have_content("zomg shirt") }
# Name DESC
click_link "admin_products_listing_name_title"
within_row(1) { page.should have_content("zomg shirt") }
within_row(2) { page.should have_content('apache baseball cap') }
end
it "should list existing products with correct sorting by price" do
click_link "Products"
# Name ASC (default)
within_row(1) { page.should have_content('apache baseball cap') }
within_row(2) { page.should have_content("zomg shirt") }
# Price DESC
click_link "admin_products_listing_price_title"
within_row(1) { page.should have_content("zomg shirt") }
within_row(2) { page.should have_content('apache baseball cap') }
end
end
end
context "searching products" do
it "should be able to search deleted products", :js => true do
create(:product, :name => 'apache baseball cap', :deleted_at => "2011-01-06 18:21:13")
create(:product, :name => 'zomg shirt')
click_link "Products"
page.should have_content("zomg shirt")
page.should_not have_content("apache baseball cap")
check "Show Deleted"
click_icon :search
page.should have_content("zomg shirt")
page.should have_content("apache baseball cap")
uncheck "Show Deleted"
click_icon :search
page.should have_content("zomg shirt")
page.should_not have_content("apache baseball cap")
end
it "should be able to search products by their properties" do
create(:product, :name => 'apache baseball cap', :sku => "A100")
create(:product, :name => 'apache baseball cap2', :sku => "B100")
create(:product, :name => 'zomg shirt')
click_link "Products"
fill_in "q_name_cont", :with => "ap"
click_icon :search
page.should have_content("apache baseball cap")
page.should have_content("apache baseball cap2")
page.should_not have_content("zomg shirt")
fill_in "q_variants_including_master_sku_cont", :with => "A1"
click_icon :search
page.should have_content("apache baseball cap")
page.should_not have_content("apache baseball cap2")
page.should_not have_content("zomg shirt")
end
end
context "creating a new product from a prototype" do
include_context "product prototype"
before(:each) do
@option_type_prototype = prototype
@property_prototype = create(:prototype, :name => "Random")
click_link "Products"
click_link "admin_new_product"
within('#new_product') do
page.should have_content("SKU")
end
end
it "should allow an admin to create a new product and variants from a prototype", :js => true do
fill_in "product_name", :with => "Baseball Cap"
fill_in "product_sku", :with => "B100"
fill_in "product_price", :with => "100"
fill_in "product_available_on", :with => "2012/01/24"
select "Size", :from => "Prototype"
check "Large"
click_button "Create"
page.should have_content("successfully created!")
Spree::Product.last.variants.length.should == 1
end
it "should not display variants when prototype does not contain option types", :js => true do
select "Random", :from => "Prototype"
fill_in "product_name", :with => "Baseball Cap"
page.should_not have_content("Variants")
end
it "should keep option values selected if validation fails", :js => true do
select "Size", :from => "Prototype"
check "Large"
click_button "Create"
page.should have_content("Name can't be blank")
field_labeled("Size").should be_checked
field_labeled("Large").should be_checked
field_labeled("Small").should_not be_checked
end
end
context "creating a new product" do
before(:each) do
click_link "Products"
click_link "admin_new_product"
within('#new_product') do
page.should have_content("SKU")
end
end
it "should allow an admin to create a new product", :js => true do
fill_in "product_name", :with => "Baseball Cap"
fill_in "product_sku", :with => "B100"
fill_in "product_price", :with => "100"
fill_in "product_available_on", :with => "2012/01/24"
click_button "Create"
page.should have_content("successfully created!")
fill_in "product_on_hand", :with => "100"
click_button "Update"
page.should have_content("successfully updated!")
end
it "should show validation errors", :js => true do
click_button "Create"
page.should have_content("Name can't be blank")
page.should have_content("Price can't be blank")
end
# Regression test for #2097
it "can set the count on hand to a null value", :js => true do
fill_in "product_name", :with => "Baseball Cap"
fill_in "product_price", :with => "100"
fill_in "product_on_hand", :with => ""
click_button "Create"
page.should have_content("successfully created!")
end
end
context "cloning a product", :js => true do
it "should allow an admin to clone a product" do
create(:product)
click_link "Products"
within_row(1) do
click_icon :copy
end
page.should have_content("Product has been cloned")
end
context "cloning a deleted product" do
it "should allow an admin to clone a deleted product" do
create(:product, :name => "apache baseball cap")
click_link "Products"
check "Show Deleted"
click_button "Search"
page.should have_content("apache baseball cap")
within_row(1) do
click_icon :copy
end
page.should have_content("Product has been cloned")
end
end
end
end
end
Correctly set product_on_hand check in products_spec test
require 'spec_helper'
describe "Products" do
stub_authorization!
context "as admin user" do
before(:each) do
visit spree.admin_path
end
context "listing products" do
context "sorting" do
before do
create(:product, :name => 'apache baseball cap', :price => 10)
create(:product, :name => 'zomg shirt', :price => 5)
end
it "should list existing products with correct sorting by name" do
click_link "Products"
# Name ASC
within_row(1) { page.should have_content('apache baseball cap') }
within_row(2) { page.should have_content("zomg shirt") }
# Name DESC
click_link "admin_products_listing_name_title"
within_row(1) { page.should have_content("zomg shirt") }
within_row(2) { page.should have_content('apache baseball cap') }
end
it "should list existing products with correct sorting by price" do
click_link "Products"
# Name ASC (default)
within_row(1) { page.should have_content('apache baseball cap') }
within_row(2) { page.should have_content("zomg shirt") }
# Price DESC
click_link "admin_products_listing_price_title"
within_row(1) { page.should have_content("zomg shirt") }
within_row(2) { page.should have_content('apache baseball cap') }
end
end
end
context "searching products" do
it "should be able to search deleted products", :js => true do
create(:product, :name => 'apache baseball cap', :deleted_at => "2011-01-06 18:21:13")
create(:product, :name => 'zomg shirt')
click_link "Products"
page.should have_content("zomg shirt")
page.should_not have_content("apache baseball cap")
check "Show Deleted"
click_icon :search
page.should have_content("zomg shirt")
page.should have_content("apache baseball cap")
uncheck "Show Deleted"
click_icon :search
page.should have_content("zomg shirt")
page.should_not have_content("apache baseball cap")
end
it "should be able to search products by their properties" do
create(:product, :name => 'apache baseball cap', :sku => "A100")
create(:product, :name => 'apache baseball cap2', :sku => "B100")
create(:product, :name => 'zomg shirt')
click_link "Products"
fill_in "q_name_cont", :with => "ap"
click_icon :search
page.should have_content("apache baseball cap")
page.should have_content("apache baseball cap2")
page.should_not have_content("zomg shirt")
fill_in "q_variants_including_master_sku_cont", :with => "A1"
click_icon :search
page.should have_content("apache baseball cap")
page.should_not have_content("apache baseball cap2")
page.should_not have_content("zomg shirt")
end
end
context "creating a new product from a prototype" do
include_context "product prototype"
before(:each) do
@option_type_prototype = prototype
@property_prototype = create(:prototype, :name => "Random")
click_link "Products"
click_link "admin_new_product"
within('#new_product') do
page.should have_content("SKU")
end
end
it "should allow an admin to create a new product and variants from a prototype", :js => true do
fill_in "product_name", :with => "Baseball Cap"
fill_in "product_sku", :with => "B100"
fill_in "product_price", :with => "100"
fill_in "product_available_on", :with => "2012/01/24"
select "Size", :from => "Prototype"
check "Large"
click_button "Create"
page.should have_content("successfully created!")
Spree::Product.last.variants.length.should == 1
end
it "should not display variants when prototype does not contain option types", :js => true do
select "Random", :from => "Prototype"
fill_in "product_name", :with => "Baseball Cap"
page.should_not have_content("Variants")
end
it "should keep option values selected if validation fails", :js => true do
select "Size", :from => "Prototype"
check "Large"
click_button "Create"
page.should have_content("Name can't be blank")
field_labeled("Size").should be_checked
field_labeled("Large").should be_checked
field_labeled("Small").should_not be_checked
end
end
context "creating a new product" do
before(:each) do
click_link "Products"
click_link "admin_new_product"
within('#new_product') do
page.should have_content("SKU")
end
end
it "should allow an admin to create a new product", :js => true do
fill_in "product_name", :with => "Baseball Cap"
fill_in "product_sku", :with => "B100"
fill_in "product_price", :with => "100"
fill_in "product_available_on", :with => "2012/01/24"
click_button "Create"
page.should have_content("successfully created!")
fill_in "product_on_hand", :with => "100"
click_button "Update"
page.should have_content("successfully updated!")
end
it "should show validation errors", :js => true do
click_button "Create"
page.should have_content("Name can't be blank")
page.should have_content("Price can't be blank")
end
# Regression test for #2097
it "can set the count on hand to a null value", :js => true do
fill_in "product_name", :with => "Baseball Cap"
fill_in "product_price", :with => "100"
click_button "Create"
page.should have_content("successfully created!")
fill_in "product_on_hand", :with => ""
click_button "Update"
page.should_not have_content("spree_products.count_on_hand may not be NULL")
page.should have_content("successfully updated!")
end
end
context "cloning a product", :js => true do
it "should allow an admin to clone a product" do
create(:product)
click_link "Products"
within_row(1) do
click_icon :copy
end
page.should have_content("Product has been cloned")
end
context "cloning a deleted product" do
it "should allow an admin to clone a deleted product" do
create(:product, :name => "apache baseball cap")
click_link "Products"
check "Show Deleted"
click_button "Search"
page.should have_content("apache baseball cap")
within_row(1) do
click_icon :copy
end
page.should have_content("Product has been cloned")
end
end
end
end
end
|
require 'rails/railtie'
require 'lograge'
require 'logstash-logger'
require_relative "./lograge/formatter"
module Applicaster
module Logger
class Railtie < Rails::Railtie
DEFAULT_APP_NAME = Rails.application.class.parent.to_s.underscore
# taken from https://github.com/rails/rails/blob/master/actionpack/lib/action_controller/log_subscriber.rb
INTERNAL_PARAMS = %w(controller action format only_path)
config.applicaster_logger = ActiveSupport::OrderedOptions.new.tap do |config|
uri = ENV["LOGSTASH_URI"]
config.enabled = uri.present?
config.level = ::Logger::INFO
config.application_name = ENV.fetch("LOG_APP_NAME") { DEFAULT_APP_NAME }
config.logstash_config = uri.present? ? { uri: uri } : { type: :stdout }
config.logzio_token = ENV['LOGZIO_TOKEN'].presence
end
initializer :applicaster_logger_rack do |app|
app.middleware.insert 0, Applicaster::Logger::Rack::ThreadContext
app.middleware.insert_after ActionDispatch::RequestId, Applicaster::Logger::Rack::RequestData
end
initializer :applicaster_logger_lograge, before: :lograge do |app|
setup_lograge(app) if app.config.applicaster_logger.enabled
end
initializer :applicaster_logger, before: :initialize_logger do |app|
setup_logger(app) if app.config.applicaster_logger.enabled
end
def setup_lograge(app)
app.config.lograge.enabled = true
app.config.lograge.formatter = Applicaster::Logger::Lograge::Formatter.new
app.config.lograge.custom_options = lambda do |event|
{
params: event.payload[:params].except(*INTERNAL_PARAMS).inspect,
custom_params: event.payload[:custom_params],
}
end
end
def setup_logger(app)
config = app.config.applicaster_logger
app.config.logger = new_logger("rails_logger")
Applicaster::Logger::Sidekiq.setup(new_logger("sidekiq")) if defined?(::Sidekiq)
Sidetiq.logger = new_logger("sidetiq") if defined?(Sidetiq)
Delayed::Worker.logger = new_logger("delayed") if defined?(Delayed)
end
def new_logger(facility)
config = ::Rails.application.config.applicaster_logger
LogStashLogger.new(config.logstash_config).tap do |logger|
logger.level = config.level
logger.formatter = Applicaster::Logger::Formatter.new(
default_fields.merge({ facility: facility })
)
end
end
def default_fields
config = ::Rails.application.config.applicaster_logger
{
application: config.application_name,
environment: Rails.env.to_s
}.merge(config.logzio_token ? { token: config.logzio_token } : {})
end
end
end
end
[bug] fix config.application_name defaults to 'object'
The default value taken from `Rails.application.class.parent.to_s.underscore` was being evaluated too early in the boot process
before the rails app class is created, so `Rails.application` would return nil.
require 'rails/railtie'
require 'lograge'
require 'logstash-logger'
require_relative "./lograge/formatter"
module Applicaster
module Logger
class Railtie < Rails::Railtie
DEFAULT_APP_NAME = proc { Rails.application.class.parent.to_s.underscore }
# taken from https://github.com/rails/rails/blob/master/actionpack/lib/action_controller/log_subscriber.rb
INTERNAL_PARAMS = %w(controller action format only_path)
ActiveSupport.on_load(:before_configuration) do
config.applicaster_logger = ActiveSupport::OrderedOptions.new.tap do |config|
uri = ENV["LOGSTASH_URI"]
config.enabled = uri.present?
config.level = ::Logger::INFO
config.application_name = ENV.fetch("LOG_APP_NAME", &DEFAULT_APP_NAME)
config.logstash_config = uri.present? ? { uri: uri } : { type: :stdout }
config.logzio_token = ENV['LOGZIO_TOKEN'].presence
end
end
initializer :applicaster_logger_rack do |app|
app.middleware.insert 0, Applicaster::Logger::Rack::ThreadContext
app.middleware.insert_after ActionDispatch::RequestId, Applicaster::Logger::Rack::RequestData
end
initializer :applicaster_logger_lograge, before: :lograge do |app|
setup_lograge(app) if app.config.applicaster_logger.enabled
end
initializer :applicaster_logger, before: :initialize_logger do |app|
setup_logger(app) if app.config.applicaster_logger.enabled
end
def setup_lograge(app)
app.config.lograge.enabled = true
app.config.lograge.formatter = Applicaster::Logger::Lograge::Formatter.new
app.config.lograge.custom_options = lambda do |event|
{
params: event.payload[:params].except(*INTERNAL_PARAMS).inspect,
custom_params: event.payload[:custom_params],
}
end
end
def setup_logger(app)
config = app.config.applicaster_logger
app.config.logger = new_logger("rails_logger")
Applicaster::Logger::Sidekiq.setup(new_logger("sidekiq")) if defined?(::Sidekiq)
Sidetiq.logger = new_logger("sidetiq") if defined?(Sidetiq)
Delayed::Worker.logger = new_logger("delayed") if defined?(Delayed)
end
def new_logger(facility)
config = ::Rails.application.config.applicaster_logger
LogStashLogger.new(config.logstash_config).tap do |logger|
logger.level = config.level
logger.formatter = Applicaster::Logger::Formatter.new(
default_fields.merge({ facility: facility })
)
end
end
def default_fields
config = ::Rails.application.config.applicaster_logger
{
application: config.application_name,
environment: Rails.env.to_s
}.merge(config.logzio_token ? { token: config.logzio_token } : {})
end
end
end
end
|
require "rexml/parsers/pullparser"
require "htmlentities"
require "hpricot"
module Ardes #:nodoc:
module TruncateHtmlHelper
# Truncates html respecting tags and html entities.
#
# The API is the same as ActionView::Helpers::TextHelper#truncate. It uses Rexml for the parsing, and HtmlEntities for entity awareness. If Rexml raises a ParseException, then Hpricot is used to fixup the tags, and we try again
#
# Examples:
# truncate_html '<p>Hello <strong>World</strong></p>', 7 # => '<p>Hello <strong>W…</strong></p>'
# truncate_html '<p>Hello & Goodbye</p>', 7 # => '<p>Hello &…</p>'
def truncate_html(input, length = 30, ellipsis = '…')
parser = REXML::Parsers::PullParser.new(input)
tags, output, chars_remaining = [], '', length
while parser.has_next? && chars_remaining > 0
element = parser.pull
case element.event_type
when :start_element
output << rexml_element_to_tag(element)
tags.push element[0]
when :end_element
output << "</#{tags.pop}>"
when :text
text = HTMLEntities.decode_entities(element[0])
output << HTMLEntities.encode_entities(text.first(chars_remaining), :named, :basic)
chars_remaining -= text.length
output << ellipsis if chars_remaining < 0
end
end
tags.reverse.each {|tag| output << "</#{tag}>" }
output
rescue REXML::ParseException
truncate_html(Hpricot(input, :fixup_tags => true).to_html, length, ellipsis)
end
private
def rexml_element_to_tag(element)
"<#{element[0]}#{element[1].inject(""){|m,(k,v)| m << %{ #{k}="#{v}"}} unless element[1].empty?}>"
end
end
end
rescuing non-existence of htmlentities and hpricot, so rake gems can start
require "rexml/parsers/pullparser"
require "htmlentities" rescue nil
require "hpricot" rescue nil
module Ardes #:nodoc:
module TruncateHtmlHelper
# Truncates html respecting tags and html entities.
#
# The API is the same as ActionView::Helpers::TextHelper#truncate. It uses Rexml for the parsing, and HtmlEntities for entity awareness. If Rexml raises a ParseException, then Hpricot is used to fixup the tags, and we try again
#
# Examples:
# truncate_html '<p>Hello <strong>World</strong></p>', 7 # => '<p>Hello <strong>W…</strong></p>'
# truncate_html '<p>Hello & Goodbye</p>', 7 # => '<p>Hello &…</p>'
def truncate_html(input, length = 30, ellipsis = '…')
parser = REXML::Parsers::PullParser.new(input)
tags, output, chars_remaining = [], '', length
while parser.has_next? && chars_remaining > 0
element = parser.pull
case element.event_type
when :start_element
output << rexml_element_to_tag(element)
tags.push element[0]
when :end_element
output << "</#{tags.pop}>"
when :text
text = HTMLEntities.decode_entities(element[0])
output << HTMLEntities.encode_entities(text.first(chars_remaining), :named, :basic)
chars_remaining -= text.length
output << ellipsis if chars_remaining < 0
end
end
tags.reverse.each {|tag| output << "</#{tag}>" }
output
rescue REXML::ParseException
truncate_html(Hpricot(input, :fixup_tags => true).to_html, length, ellipsis)
end
private
def rexml_element_to_tag(element)
"<#{element[0]}#{element[1].inject(""){|m,(k,v)| m << %{ #{k}="#{v}"}} unless element[1].empty?}>"
end
end
end |
### WARNING: This file is auto-generated by the asana-api-meta repo. Do not
### edit it manually.
module Asana
module Resources
# An _attachment_ object represents any file attached to a task in Asana,
# whether it's an uploaded file or one associated via a third-party service
# such as Dropbox or Google Drive.
class Attachment < Resource
attr_reader :id
class << self
# Returns the plural name of the resource.
def plural_name
'attachments'
end
# Returns the full record for a single attachment.
#
# id - [Id] Globally unique identifier for the attachment.
def find_by_id(client, id)
self.new(body(client.get("/attachments/#{id}")), client: client)
end
# Returns the compact records for all attachments on the task.
#
# task - [Id] Globally unique identifier for the task.
def find_by_task(client, task:)
Collection.new(body(client.get("/tasks/#{task}/attachments")).map { |data| self.new(data, client: client) }, client: client)
end
# Returns the compact records for all attachments on the task.
#
# task - [Id] Globally unique identifier for the task.
#
# file - [File] TBD
# data - [Hash] the attributes to post.
def create_on_task(client, task:, file:, **data)
with_params = data.merge(file: file).reject { |_,v| v.nil? }
self.new(body(client.post("/tasks/#{task}/attachments", body: with_params)), client: client)
end
end
end
end
end
Deploy from asana-api-meta vundefined
### WARNING: This file is auto-generated by the asana-api-meta repo. Do not
### edit it manually.
module Asana
module Resources
# An _attachment_ object represents any file attached to a task in Asana,
# whether it's an uploaded file or one associated via a third-party service
# such as Dropbox or Google Drive.
class Attachment < Resource
attr_reader :id
class << self
# Returns the plural name of the resource.
def plural_name
'attachments'
end
# Returns the full record for a single attachment.
#
# id - [Id] Globally unique identifier for the attachment.
def find_by_id(client, id)
self.new(body(client.get("/attachments/#{id}")), client: client)
end
# Returns the compact records for all attachments on the task.
#
# task - [Id] Globally unique identifier for the task.
def find_by_task(client, task:)
Collection.new(body(client.get("/tasks/#{task}/attachments")).map { |data| self.new(data, client: client) }, client: client)
end
end
end
end
end
|
require "logger"
require "singleton"
# Logger interface class to access logger though symbolic names
module BmcDaemonLib
class LoggerPool
include Singleton
def get pipe
@loggers ||= {}
@loggers[pipe] ||= create(pipe)
end
def create pipe
# Compute logfile or STDERR, and declare what we're doing
filename = logfile(pipe)
# Create the logger and return it
logger = Logger.new(filename, LOG_ROTATION) #, 10, 1024000)
logger.progname = pipe.to_s.downcase
logger.formatter = LoggerFormatter
# Finally return this logger
logger
rescue Errno::EACCES
puts "logging [#{pipe}] failed: access error"
end
protected
def logfile pipe
# Disabled if no valid config
return nil unless Conf[:logs].is_a?(Hash)
# Compute logfile and check if we can write there
logfile = File.expand_path(Conf[:logs][pipe].to_s, Conf[:logs][:path].to_s)
# Check that we'll be able to create logfiles
if File.exists?(logfile)
# File is there, is it writable ?
unless File.writable?(logfile)
puts "logging [#{pipe}] disabled: file not writable [#{logfile}]"
return nil
end
else
# No file here, can we create it ?
logdir = File.dirname(logfile)
unless File.writable?(logdir)
puts "logging [#{pipe}] disabled: directory not writable [#{logdir}]"
return nil
end
end
# OK, return a clean file path
puts "logging [#{pipe}] to [#{logfile}]"
return logfile
end
end
end
logger: disable logging if no path given for that pipe
require "logger"
require "singleton"
# Logger interface class to access logger though symbolic names
module BmcDaemonLib
class LoggerPool
include Singleton
def get pipe = nil
pipe = :default if pipe.to_s.blank?
@loggers ||= {}
@loggers[pipe] ||= create(pipe)
end
def create pipe
# Compute logfile or STDERR, and declare what we're doing
filename = logfile(pipe)
# Create the logger and return it
logger = Logger.new(filename, LOG_ROTATION) #, 10, 1024000)
logger.progname = pipe.to_s.downcase
logger.formatter = LoggerFormatter
# Finally return this logger
logger
rescue Errno::EACCES
puts "logging [#{pipe}] failed: access error"
end
protected
def logfile pipe
# Disabled if no valid config
return nil unless Conf[:logs].is_a?(Hash) && Conf.at(:logs, pipe)
# Compute logfile and check if we can write there
logfile = File.expand_path(Conf[:logs][pipe].to_s, Conf[:logs][:path].to_s)
# Check that we'll be able to create logfiles
if File.exists?(logfile)
# File is there, is it writable ?
unless File.writable?(logfile)
puts "logging [#{pipe}] disabled: file not writable [#{logfile}]"
return nil
end
else
# No file here, can we create it ?
logdir = File.dirname(logfile)
unless File.writable?(logdir)
puts "logging [#{pipe}] disabled: directory not writable [#{logdir}]"
return nil
end
end
# OK, return a clean file path
puts "logging [#{pipe}] to [#{logfile}]"
return logfile
end
end
end
|
require 'shellwords'
require 'pathname'
module Brick::Mixin
module DockerSupport
#from yml file to the configuration for creating container
def create_config hash
hash=transform_docker_hash hash
create_config_for_port hash
create_config_for_volumes hash
hash
end
def start_config hash
hash=transform_docker_hash hash
start_config_for_port hash
start_config_for_volumes hash
hash
end
#the format is captalized
private
def transform_docker_hash hash
hash= Hash[hash.map do |k,v|
keys=k.split('_')
keys.map!{|key|key.capitalize}
[keys.join(''), v]
end
]
common_config_for_cmd hash
common_config_for_env hash
common_config_for_volumes hash
hash
end
def common_config_for_cmd hash
cmd= hash.delete('Command')
#hash['Cmd']=cmd.split(' ') unless cmd.nil?
unless cmd.nil?
if cmd.instance_of? Array
hash['Cmd'] = cmd
else
hash['Cmd'] = Shellwords.split(cmd)
end
end
hash
end
#common configuration for environment variable
def common_config_for_env hash
#Support environment variables
env_variables = hash.delete('Environment')
unless env_variables.nil?
if env_variables.instance_of? Array
hash['Env'] = env_variables
elsif env_variables.instance_of? Hash
var_arrays = []
env_variables.each {|key, value| var_arrays<<"#{key}=#{value}" }
hash['Env'] = var_arrays
end
end
hash
end
def common_config_for_volumes hash
#volumes
unless hash["Volumes"].nil?
volumes = hash["Volumes"]
if volumes.instance_of? Array
volumes.map!{|vo|
vo_parts = vo.split(":")
if vo_parts.size==1
[vo_parts[0],vo_parts[0],'rw'].join(':')
elsif vo_parts.size==2
[vo_parts[0],vo_parts[1],'rw'].join(':')
elsif vo_parts.size==3
vo
end
}
else
raise "the value of volumes should be an array"
end
end
hash
end
def create_config_for_volumes hash
#create config for volumes
unless hash["Volumes"].nil?
volumes = hash.delete('Volumes')
volume_hash={}
volumes.each{|vo|
vo_parts = vo.split(':')
volume_hash[vo_parts[1]] = {}
}
hash['Volumes'] = volume_hash
hash
end
hash
end
def start_config_for_volumes hash
#start config for volumes
#start config for volumes
unless hash["Volumes"].nil?
binds = hash.delete('Volumes')
hash["Binds"] = binds
end
hash
end
#the port configuration for creating container
def create_config_for_port hash
exposed_ports = []
#add expose ports
unless hash["Ports"].nil?
ports = hash.delete "Ports"
ports.each{|port|
container_port = (port.split(':'))[-1]
exposed_ports << container_port
}
end
#Add expose to exposed ports
unless hash["Expose"].nil?
exposes = hash.delete "Expose"
exposes.each{|expose|
exposed_ports << expose
}
end
if exposed_ports.size > 0
proto = 'tcp'
exposed_port_hash = Hash.new
exposed_ports.each {|container_port| exposed_port_hash["#{container_port}/#{proto}"]={}}
hash["ExposedPorts"]=exposed_port_hash
end
hash
end
#the port configuration for starting container
def start_config_for_port hash
#the setting for start config
port_bindings = {}
unless hash["Ports"].nil?
ports = hash.delete "Ports"
ports.each{|port|
port_definition = port.split(':')
proto ="tcp"
if port_definition.size > 1
container_port = port_definition[-1]
host_port = port_definition[-2]
port_bindings["#{container_port}/#{proto}"] = [{"HostPort"=>host_port}]
# port_bindings << {"#{container_port}/#{proto}"=>[{"HostPort"=>host_port}]}
else
port_bindings["#{port}/#{proto}"] = [{"HostPort"=>port}]
end
}
hash["PortBindings"]=port_bindings
end
hash
end
def determine_dockerfile_path dockerfile_path, project_dir
pathname= Pathname.new(dockerfile_path)
if pathname.absolute?
real_dockerfile_path = dockerfile_path
else
unless project_dir.nil?
real_dockerfile_path = File.absolute_path(File.join(project_dir, dockerfile_path))
end
end
real_dockerfile_path
end
end
end
support volumes for relative path
require 'shellwords'
require 'pathname'
module Brick::Mixin
module DockerSupport
#from yml file to the configuration for creating container
def create_config hash
hash=transform_docker_hash hash
create_config_for_port hash
create_config_for_volumes hash
hash
end
def start_config hash
hash=transform_docker_hash hash
start_config_for_port hash
start_config_for_volumes hash
hash
end
#the format is captalized
private
def transform_docker_hash hash
hash= Hash[hash.map do |k,v|
keys=k.split('_')
keys.map!{|key|key.capitalize}
[keys.join(''), v]
end
]
common_config_for_cmd hash
common_config_for_env hash
common_config_for_volumes hash
hash
end
def common_config_for_cmd hash
cmd= hash.delete('Command')
#hash['Cmd']=cmd.split(' ') unless cmd.nil?
unless cmd.nil?
if cmd.instance_of? Array
hash['Cmd'] = cmd
else
hash['Cmd'] = Shellwords.split(cmd)
end
end
hash
end
#common configuration for environment variable
def common_config_for_env hash
#Support environment variables
env_variables = hash.delete('Environment')
unless env_variables.nil?
if env_variables.instance_of? Array
hash['Env'] = env_variables
elsif env_variables.instance_of? Hash
var_arrays = []
env_variables.each {|key, value| var_arrays<<"#{key}=#{value}" }
hash['Env'] = var_arrays
end
end
hash
end
def common_config_for_volumes hash
#volumes
unless hash["Volumes"].nil?
volumes = hash["Volumes"]
if volumes.instance_of? Array
volumes.map!{|vo|
vo_parts = vo.split(":")
container_volume = nil
host_volume = vo_parts[0]
option = "rw"
if(Pathname.new(host_volume).relative?)
host_volume = File.join(::Brick::Config[:project_dir],host_volume)
end
if vo_parts.size==1
container_volume = host_volume
elsif vo_parts.size>=2
container_volume = vo_parts[1]
if(Pathname.new(container_volume).relative?)
container_volume = File.join(::Brick::Config[:project_dir],container_volume)
end
end
if vo_parts.size==3
option = vo_parts[2]
end
[host_volume, container_volume, option].join(':')
}
else
raise "the value of volumes should be an array"
end
end
hash
end
def create_config_for_volumes hash
#create config for volumes
unless hash["Volumes"].nil?
volumes = hash.delete('Volumes')
volume_hash={}
volumes.each{|vo|
vo_parts = vo.split(':')
volume_hash[vo_parts[1]] = {}
}
hash['Volumes'] = volume_hash
hash
end
hash
end
def start_config_for_volumes hash
#start config for volumes
#start config for volumes
unless hash["Volumes"].nil?
binds = hash.delete('Volumes')
hash["Binds"] = binds
end
hash
end
#the port configuration for creating container
def create_config_for_port hash
exposed_ports = []
#add expose ports
unless hash["Ports"].nil?
ports = hash.delete "Ports"
ports.each{|port|
container_port = (port.split(':'))[-1]
exposed_ports << container_port
}
end
#Add expose to exposed ports
unless hash["Expose"].nil?
exposes = hash.delete "Expose"
exposes.each{|expose|
exposed_ports << expose
}
end
if exposed_ports.size > 0
proto = 'tcp'
exposed_port_hash = Hash.new
exposed_ports.each {|container_port| exposed_port_hash["#{container_port}/#{proto}"]={}}
hash["ExposedPorts"]=exposed_port_hash
end
hash
end
#the port configuration for starting container
def start_config_for_port hash
#the setting for start config
port_bindings = {}
unless hash["Ports"].nil?
ports = hash.delete "Ports"
ports.each{|port|
port_definition = port.split(':')
proto ="tcp"
if port_definition.size > 1
container_port = port_definition[-1]
host_port = port_definition[-2]
port_bindings["#{container_port}/#{proto}"] = [{"HostPort"=>host_port}]
# port_bindings << {"#{container_port}/#{proto}"=>[{"HostPort"=>host_port}]}
else
port_bindings["#{port}/#{proto}"] = [{"HostPort"=>port}]
end
}
hash["PortBindings"]=port_bindings
end
hash
end
def determine_dockerfile_path dockerfile_path, project_dir
pathname= Pathname.new(dockerfile_path)
if pathname.absolute?
real_dockerfile_path = dockerfile_path
else
unless project_dir.nil?
real_dockerfile_path = File.absolute_path(File.join(project_dir, dockerfile_path))
end
end
real_dockerfile_path
end
end
end |
# Encoding: utf-8
# ASP.NET Core Buildpack
# Copyright 2014-2016 the original author or authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
require_relative '../app_dir'
require_relative '../services/optional_components.rb'
module AspNetCoreBuildpack
class Releaser
def release(build_dir,ibmdb)
@ibmdb = ibmdb
puts("from release file optsdashdb value is ")
puts(ibmdb)
app = AppDir.new(build_dir)
start_cmd = get_start_cmd(app)
fail 'No project could be identified to run' if start_cmd.nil? || start_cmd.empty?
write_startup_script(startup_script_path(build_dir))
puts("LD_LIBRARY_PATH =")
#puts(env['LD_LIBRARY_PATH'])
generate_yml(start_cmd)
end
private
def write_startup_script(startup_script)
FileUtils.mkdir_p(File.dirname(startup_script))
File.open(startup_script, 'w') do |f|
f.write 'export HOME=/app;'
if ibmdb.eql?('true')
puts("clidriver lib path is not set \n")
f.write 'export LD_LIBRARY_PATH=$LD_LIBRARY_PATH:$HOME/libunwind/lib;'
#cmd = "echo 'LD_LIBRARY_PATH = ';echo $LD_LIBRARY_PATH;"
#@shell.exec(cmd, @out)
else
puts("clidriver path is set \n")
f.write 'export LD_LIBRARY_PATH=$LD_LIBRARY_PATH:$HOME/libunwind/lib:$HOME/odbc_cli/clidriver/lib;'
#puts("LD_LIBRARY_PATH = $LD_LIBRARY_PATH")
#cmd = "echo 'LD_LIBRARY_PATH = ';echo $LD_LIBRARY_PATH;"
#@shell.exec(cmd, @out)
end
f.write 'export PATH=$PATH:$HOME/.dotnet:$HOME;'
end
end
def generate_yml(start_cmd)
yml = <<-EOT
---
default_process_types:
web: #{start_cmd} --server.urls http://0.0.0.0:${PORT}
EOT
yml
end
def get_source_start_cmd(project)
return "dotnet run --project #{project}" unless project.nil?
end
def get_published_start_cmd(project, build_dir)
return "#{project}" if File.exist? File.join(build_dir, "#{project}")
return "dotnet #{project}.dll" if File.exist? File.join(build_dir, "#{project}.dll")
end
def get_start_cmd(app)
start_cmd = get_source_start_cmd(app.main_project_path)
return start_cmd unless start_cmd.nil?
start_cmd = get_published_start_cmd(app.published_project, app.root)
return start_cmd unless start_cmd.nil?
end
def startup_script_path(dir)
File.join(dir, '.profile.d', 'startup.sh')
end
attr_reader :ibmdb
end
end
Update releaser.rb
# Encoding: utf-8
# ASP.NET Core Buildpack
# Copyright 2014-2016 the original author or authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
require_relative '../app_dir'
require_relative '../services/optional_components.rb'
module AspNetCoreBuildpack
class Releaser
def release(build_dir,ibmdb)
@ibmdb = ibmdb
puts("from release file optsdashdb value is ")
puts(ibmdb)
app = AppDir.new(build_dir)
start_cmd = get_start_cmd(app)
fail 'No project could be identified to run' if start_cmd.nil? || start_cmd.empty?
write_startup_script(startup_script_path(build_dir))
puts("LD_LIBRARY_PATH =")
#puts(env['LD_LIBRARY_PATH'])
generate_yml(start_cmd)
end
private
def write_startup_script(startup_script)
FileUtils.mkdir_p(File.dirname(startup_script))
File.open(startup_script, 'w') do |f|
f.write 'export HOME=/app;'
if ibmdb.eql?('true')
#puts("clidriver lib path is not set \n")
f.write 'export LD_LIBRARY_PATH=$LD_LIBRARY_PATH:$HOME/libunwind/lib;'
#cmd = "echo 'LD_LIBRARY_PATH = ';echo $LD_LIBRARY_PATH;"
#@shell.exec(cmd, @out)
else
#puts("clidriver path is set \n")
f.write 'export LD_LIBRARY_PATH=$LD_LIBRARY_PATH:$HOME/libunwind/lib:$HOME/odbc_cli/clidriver/lib;'
#puts("LD_LIBRARY_PATH = $LD_LIBRARY_PATH")
#cmd = "echo 'LD_LIBRARY_PATH = ';echo $LD_LIBRARY_PATH;"
#@shell.exec(cmd, @out)
end
f.write 'export PATH=$PATH:$HOME/.dotnet:$HOME;'
end
end
def generate_yml(start_cmd)
yml = <<-EOT
---
default_process_types:
web: #{start_cmd} --server.urls http://0.0.0.0:${PORT}
EOT
yml
end
def get_source_start_cmd(project)
return "dotnet run --project #{project}" unless project.nil?
end
def get_published_start_cmd(project, build_dir)
return "#{project}" if File.exist? File.join(build_dir, "#{project}")
return "dotnet #{project}.dll" if File.exist? File.join(build_dir, "#{project}.dll")
end
def get_start_cmd(app)
start_cmd = get_source_start_cmd(app.main_project_path)
return start_cmd unless start_cmd.nil?
start_cmd = get_published_start_cmd(app.published_project, app.root)
return start_cmd unless start_cmd.nil?
end
def startup_script_path(dir)
File.join(dir, '.profile.d', 'startup.sh')
end
attr_reader :ibmdb
end
end
|
#!/usr/bin/env ruby
# ----------------------------------------------------------------------------- #
# File: listbox.rb
# Description: A list box based on textpad
# Author: jkepler http://github.com/mare-imbrium/canis/
# Date: 2014-04-06 - 19:37
# License: Same as Ruby's License (http://www.ruby-lang.org/LICENSE.txt)
# Last update: 2014-06-29 20:43
# ----------------------------------------------------------------------------- #
# listbox.rb Copyright (C) 2012-2014 kepler
require 'canis'
require 'forwardable'
require 'canis/core/include/listselectionmodel'
##
# A listbox based on textpad.
# Contains a scrollable array of Strings. The list is selectable too.
# In place editing is not provided, however editing in a separate box
# has been implemented in various examples.
# Essentially, the listbox only adds selection to the textpad.
# TODO
# ----
# [ ] focussed_color - this could be part of textpad too. row under cursor
# [ ] rlist has menu actions that can use prompt menu or popup ?
# [ ] nothing has been done about show_selector -- consider whether to knock off
#
#
# CHANGES
# -------
# - removed Array operations to Textpad, some renaming 2014-04-10 - 20:50
#
#
module Canis
##
# A scrollable, selectable array of strings.
# Delegates display to ListRenderer
# Delegates selection to Defaultlistselection (/include/listselectionmodel.rb)
# Due to extending Defaultlistselection, methods are not visible here.
# Selection methods are (the first three are what programmers will use the most):
#
# - `selected_values` : returns values selecteda (multiple selection)
# - `selected_value` : returns value of row selected (single selection)
# - `selected_rows` : same as selected_indices, indices of selected items
#
# - `toggle_row_selection` : toggles current row, called by key $row_selector
# - `select` : select given or current row
# - `unselect` : unselects given or current row
# - `is_row_selected?` : determine if given row is selected
# - `is_selection_empty?` : has anything been selected
# - `clear_selection` : clear selection
# - `select_all` : select all rows
#
# Listbox also fires a ListSelectionEvent whose type can be:
#
# - :INSERT , a row or rows added to selection
# - :DELETE , a row or rows removed from selection
# - :CLEAR , all selection cleared
#
# == Examples
#
# mylist = %w[john tim matz shougo _why sean aaron]
# l = Listbox.new @form, :row => 5, :col => 4, :height => 10, :width => 20, :list => mylist
#
# Inside a Flow:
#
# lb = listbox :list => mylist, :title => 'Contacts', :width_pc => 50, :selection_mode => :single
#
class Listbox < TextPad
extend Forwardable
# boolean, should a selector character be shown on the left of data for selected rows.
dsl_property :show_selector
# should textpads content_cols also add left_margin ? XXX
# how much space to leave on left, currently 0, was used with selector character once
dsl_property :left_margin
# justify text to :left :right or :center (renderer to take care of this).
dsl_accessor :justify
# should focussed line be shown in a different way, currently BOLD, default true
dsl_accessor :should_show_focus
def initialize form = nil, config={}, &block
@left_margin = 0
@should_show_focus = true
register_events([:LEAVE_ROW, :LIST_SELECTION_EVENT])
self.extend DefaultListSelection
super
# textpad takes care of enter_row and press
#@_events.push(*[:LEAVE_ROW, :LIST_SELECTION_EVENT])
bind_key(?f, 'next row starting with char'){ set_selection_for_char(nil) }
# if user has not specified a selection model, install default
unless @selection_mode == :none
unless @list_selection_model
create_default_selection_model
end
end
# if user has not specified a renderer, install default
unless @renderer
create_default_renderer
end
end
# create a default renderer since user has not specified
# Widgets inheriting this with a differernt rendering such as tree
# can overrider this.
def create_default_renderer
r = ListRenderer.new self
renderer(r)
end
def renderer r
@renderer = r
end
# create a default selection model
# Widgets inheriting this may override this
def create_default_selection_model
list_selection_model(Canis::DefaultListSelectionModel.new self)
end
# http://www.opensource.apple.com/source/gcc/gcc-5483/libjava/javax/swing/table/DefaultTableColumnModel.java
#
# clear the list completely of data, including selections
def clear
@selected_indices.clear
super
end
alias :remove_all :clear
# This is called whenever user leaves a row
# Fires handler for leave_row
def on_leave_row arow
fire_handler :LEAVE_ROW, self
end
# This is called whenever user enters a row
def on_enter_row arow
super
# TODO check if user wants focus to be showed
## this results in the row being entered and left being evaluated and repainted
# which means that the focussed row can be bolded. The renderer's +render+ method will be triggered
if @should_show_focus
fire_row_changed @oldindex
fire_row_changed arow
end
end
# get a char ensure it is a char or number
# In this state, it could accept control and other chars.
private
def _ask_a_char
ch = @graphic.getch
#message "achar is #{ch}"
if ch < 26 || ch > 255
@graphic.ungetch ch
return :UNHANDLED
end
return ch.chr
end
public
# sets the selection to the next row starting with char
# Trying to return unhandled is having no effect right now. if only we could pop it into a
# stack or unget it.
def set_selection_for_char char=nil
char = _ask_a_char unless char
return :UNHANDLED if char == :UNHANDLED
#alert "got #{char}"
@oldrow = @current_index
@last_regex = /^#{char}/i
ix = next_regex @last_regex
return unless ix
@current_index = ix[0]
#alert "curr ind #{@current_index} "
@search_found_ix = @current_index
@curpos = ix[1]
ensure_visible
return @current_index
end
# Find the next row that contains given string
# @return row and col offset of match, or nil
# @param String to find
def next_regex str
first = nil
## content can be string or Chunkline, so we had to write <tt>index</tt> for this.
## =~ does not give an error, but it does not work.
@list.each_with_index do |line, ix|
col = line =~ /#{str}/
if col
first ||= [ ix, col ]
if ix > @current_index
return [ix, col]
end
end
end
return first
end
end # class listbox
## Takes care of rendering the list.
# In the case of a List we take care of selected indices.
# Also, focussed row is shown in bold, although we can make that optional and configurable
# A user wanting a different rendering of listboxes may either extend this class
# or completely replace it and set it as the renderer.
class ListRenderer < AbstractTextPadRenderer
def initialize obj
@obj = obj
@selected_indices = obj.selected_indices
@left_margin = obj.left_margin
# internal width based on both borders - earlier internal_width which we need
@int_w = 3
# 3 leaves a blank black in popuplists as in testlistbox.rb F4
# setting it as 2 means that in some cases, the next line first character
# gets overwritten with traversal
#@int_w = 2
end
#
# @param pad for calling print methods on
# @param lineno the line number on the pad to print on
# @param text data to print
#--
# NOTE: in some cases like testlistbox.rb if a line is updated then the newly printed
# value may not overwrite the entire line, addstr seems to only write the text no more
# Fixed with +clear_row+
#++
def render pad, lineno, text
sele = false
bg = @obj.bgcolor
fg = @obj.color
att = NORMAL
#cp = $datacolor
cp = get_color($datacolor, fg, bg)
if @selected_indices.include? lineno
# print selected row in reverse
sele = true
fg = @obj.selected_color || fg
bg = @obj.selected_bgcolor || bg
att = @obj.selected_attr || REVERSE
cp = get_color($datacolor, fg, bg)
elsif lineno == @obj.current_index
# print focussed row in bold
att = BOLD if @obj.should_show_focus
# take current index into account as BOLD
# and oldindex as normal
end
FFI::NCurses.wattron(pad,FFI::NCurses.COLOR_PAIR(cp) | att)
FFI::NCurses.mvwaddstr(pad, lineno, @left_margin, text)
FFI::NCurses.wattroff(pad,FFI::NCurses.COLOR_PAIR(cp) | att)
# the above only sets the attrib under the text not the whole line, we
# need the whole line to be REVERSE
# Strangely in testlistbox1 unselecting removes the entire lines REVERSE
# but in testlistbox.rb the previous selected lines REV only partially goes
# so we have to make the entire line in current attrib
sele = true
if sele
FFI::NCurses.mvwchgat(pad, y=lineno, x=@left_margin, @obj.width - @left_margin - @int_w, att, cp, nil)
end
end
# clear row before writing so previous contents are erased and don't show through
# I could do this everytime i write but trying to make it faster
# and only call this if +fire_row_changed+ is called.
# NOTE: in clear_row one is supposed to clear to the width of the pad, not window
# otherwise on scrolling you might get black bg if you have some other color bg.
# This is mostly important if you have a bgcolor that is different from the terminal
# bgcolor.
# @param - pad
# @param - line number (index of row to clear)
def _clear_row pad, lineno
raise "unused"
@color_pair ||= get_color($datacolor, @obj.color, @obj.bgcolor)
cp = @color_pair
att = NORMAL
@_clearstring ||= " " * (@obj.width - @left_margin - @int_w)
# with int_w = 3 we get that one space in popuplist
# added attr on 2014-05-02 - 00:16 otherwise a list inside a white bg messagebox shows
# empty rows in black bg.
FFI::NCurses.wattron(pad,FFI::NCurses.COLOR_PAIR(cp) | att)
FFI::NCurses.mvwaddstr(pad,lineno, @left_margin, @_clearstring)
FFI::NCurses.wattroff(pad,FFI::NCurses.COLOR_PAIR(cp) | att)
end
end
end # module
added row_focussed_attr
#!/usr/bin/env ruby
# ----------------------------------------------------------------------------- #
# File: listbox.rb
# Description: A list box based on textpad
# Author: jkepler http://github.com/mare-imbrium/canis/
# Date: 2014-04-06 - 19:37
# License: Same as Ruby's License (http://www.ruby-lang.org/LICENSE.txt)
# Last update: 2014-07-02 17:51
# ----------------------------------------------------------------------------- #
# listbox.rb Copyright (C) 2012-2014 kepler
require 'canis'
require 'forwardable'
require 'canis/core/include/listselectionmodel'
##
# A listbox based on textpad.
# Contains a scrollable array of Strings. The list is selectable too.
# In place editing is not provided, however editing in a separate box
# has been implemented in various examples.
# Essentially, the listbox only adds selection to the textpad.
# TODO
# ----
# [ ] focussed_color - this could be part of textpad too. row under cursor
# [ ] rlist has menu actions that can use prompt menu or popup ?
# [ ] nothing has been done about show_selector -- consider whether to knock off
#
#
# CHANGES
# -------
# - removed Array operations to Textpad, some renaming 2014-04-10 - 20:50
#
#
module Canis
##
# A scrollable, selectable array of strings.
# Delegates display to ListRenderer
# Delegates selection to Defaultlistselection (/include/listselectionmodel.rb)
# Due to extending Defaultlistselection, methods are not visible here.
# Selection methods are (the first three are what programmers will use the most):
#
# - `selected_values` : returns values selecteda (multiple selection)
# - `selected_value` : returns value of row selected (single selection)
# - `selected_rows` : same as selected_indices, indices of selected items
#
# - `toggle_row_selection` : toggles current row, called by key $row_selector
# - `select` : select given or current row
# - `unselect` : unselects given or current row
# - `is_row_selected?` : determine if given row is selected
# - `is_selection_empty?` : has anything been selected
# - `clear_selection` : clear selection
# - `select_all` : select all rows
#
# Listbox also fires a ListSelectionEvent whose type can be:
#
# - :INSERT , a row or rows added to selection
# - :DELETE , a row or rows removed from selection
# - :CLEAR , all selection cleared
#
# == Examples
#
# mylist = %w[john tim matz shougo _why sean aaron]
# l = Listbox.new @form, :row => 5, :col => 4, :height => 10, :width => 20, :list => mylist
#
# Inside a Flow:
#
# lb = listbox :list => mylist, :title => 'Contacts', :width_pc => 50, :selection_mode => :single
#
class Listbox < TextPad
extend Forwardable
# boolean, should a selector character be shown on the left of data for selected rows.
dsl_property :show_selector
# should textpads content_cols also add left_margin ? XXX
# how much space to leave on left, currently 0, was used with selector character once
dsl_property :left_margin
# justify text to :left :right or :center (renderer to take care of this).
dsl_accessor :justify
# should focussed line be shown in a different way, currently BOLD, default true
dsl_accessor :should_show_focus
# attribute for focussed row, if not set will use $row_focussed_attr
attr_writer :row_focussed_attr
def initialize form = nil, config={}, &block
@left_margin = 0
@should_show_focus = true
register_events([:LEAVE_ROW, :LIST_SELECTION_EVENT])
self.extend DefaultListSelection
super
# textpad takes care of enter_row and press
#@_events.push(*[:LEAVE_ROW, :LIST_SELECTION_EVENT])
bind_key(?f, 'next row starting with char'){ set_selection_for_char(nil) }
# if user has not specified a selection model, install default
unless @selection_mode == :none
unless @list_selection_model
create_default_selection_model
end
end
# if user has not specified a renderer, install default
unless @renderer
create_default_renderer
end
end
# create a default renderer since user has not specified
# Widgets inheriting this with a differernt rendering such as tree
# can overrider this.
def create_default_renderer
r = ListRenderer.new self
renderer(r)
end
def renderer r
@renderer = r
end
# create a default selection model
# Widgets inheriting this may override this
def create_default_selection_model
list_selection_model(Canis::DefaultListSelectionModel.new self)
end
# http://www.opensource.apple.com/source/gcc/gcc-5483/libjava/javax/swing/table/DefaultTableColumnModel.java
#
# clear the list completely of data, including selections
def clear
@selected_indices.clear
super
end
alias :remove_all :clear
# This is called whenever user leaves a row
# Fires handler for leave_row
def on_leave_row arow
# leave this out, since we are not painting on exit of widget 2014-07-02 - 17:51
#if @should_show_focus
#fire_row_changed arow
#end
fire_handler :LEAVE_ROW, self
end
# This is called whenever user enters a row
def on_enter_row arow
super
# TODO check if user wants focus to be showed
## this results in the row being entered and left being evaluated and repainted
# which means that the focussed row can be bolded. The renderer's +render+ method will be triggered
if @should_show_focus
fire_row_changed @oldindex
fire_row_changed arow
end
end
#def on_leave
#super
#on_leave_row @current_index if @current_index
#end
# get a char ensure it is a char or number
# In this state, it could accept control and other chars.
private
def _ask_a_char
ch = @graphic.getch
#message "achar is #{ch}"
if ch < 26 || ch > 255
@graphic.ungetch ch
return :UNHANDLED
end
return ch.chr
end
public
# sets the selection to the next row starting with char
# Trying to return unhandled is having no effect right now. if only we could pop it into a
# stack or unget it.
def set_selection_for_char char=nil
char = _ask_a_char unless char
return :UNHANDLED if char == :UNHANDLED
#alert "got #{char}"
@oldrow = @current_index
@last_regex = /^#{char}/i
ix = next_regex @last_regex
return unless ix
@current_index = ix[0]
#alert "curr ind #{@current_index} "
@search_found_ix = @current_index
@curpos = ix[1]
ensure_visible
return @current_index
end
# Find the next row that contains given string
# @return row and col offset of match, or nil
# @param String to find
def next_regex str
first = nil
## content can be string or Chunkline, so we had to write <tt>index</tt> for this.
## =~ does not give an error, but it does not work.
@list.each_with_index do |line, ix|
col = line =~ /#{str}/
if col
first ||= [ ix, col ]
if ix > @current_index
return [ix, col]
end
end
end
return first
end
# returns the attribute to be used when a row is focussed (under cursor)
def row_focussed_attr
return @row_focussed_attr || $row_focussed_attr
end
end # class listbox
## Takes care of rendering the list.
# In the case of a List we take care of selected indices.
# Also, focussed row is shown in bold, although we can make that optional and configurable
# A user wanting a different rendering of listboxes may either extend this class
# or completely replace it and set it as the renderer.
class ListRenderer < AbstractTextPadRenderer
# text to be placed in the left margin. This requires that a left margin be set in the source
# object.
attr_accessor :left_margin_text
def initialize obj
@obj = obj
# internal width based on both borders - earlier internal_width which we need
@int_w = 3
# 3 leaves a blank black in popuplists as in testlistbox.rb F4
# setting it as 2 means that in some cases, the next line first character
# gets overwritten with traversal
#@int_w = 2
end
# This is called prior to render_all, and may not be called when a single row is rendered
# as in fire_row_changed
def pre_render
super
@selected_indices = @obj.selected_indices
@left_margin = @obj.left_margin
@bg = @obj.bgcolor
@fg = @obj.color
@attr = NORMAL
end
#
# @param pad for calling print methods on
# @param lineno the line number on the pad to print on
# @param text data to print
#--
# NOTE: in some cases like testlistbox.rb if a line is updated then the newly printed
# value may not overwrite the entire line, addstr seems to only write the text no more
# Fixed with +clear_row+
#++
def render pad, lineno, text
sele = false
=begin
bg = @obj.bgcolor
fg = @obj.color
att = NORMAL
cp = get_color($datacolor, fg, bg)
=end
bg = @bg || @obj.bgcolor
fg = @fg || @obj.color
att = @attr || NORMAL
cp = get_color($datacolor, fg, bg)
if @selected_indices.include? lineno
# print selected row in reverse
sele = true
fg = @obj.selected_color || fg
bg = @obj.selected_bgcolor || bg
att = @obj.selected_attr || REVERSE
cp = get_color($datacolor, fg, bg)
elsif lineno == @obj.current_index
# print focussed row in different attrib
if @obj.should_show_focus
# bold was supposed to be if the object loses focus, but although render is called
# however, padrefresh is not happening since we do not paint on exiting a widget
att = BOLD
if @obj.focussed
att = @obj.row_focussed_attr
end
end
# take current index into account as BOLD
# and oldindex as normal
end
FFI::NCurses.wattron(pad,FFI::NCurses.COLOR_PAIR(cp) | att)
FFI::NCurses.mvwaddstr(pad, lineno, 0, @left_margin_text) if @left_margin_text
FFI::NCurses.mvwaddstr(pad, lineno, @left_margin, text)
FFI::NCurses.wattroff(pad,FFI::NCurses.COLOR_PAIR(cp) | att)
# the above only sets the attrib under the text not the whole line, we
# need the whole line to be REVERSE
# Strangely in testlistbox1 unselecting removes the entire lines REVERSE
# but in testlistbox.rb the previous selected lines REV only partially goes
# so we have to make the entire line in current attrib
sele = true
if sele
FFI::NCurses.mvwchgat(pad, y=lineno, x=@left_margin, @obj.width - @left_margin - @int_w, att, cp, nil)
end
end
# clear row before writing so previous contents are erased and don't show through
# I could do this everytime i write but trying to make it faster
# and only call this if +fire_row_changed+ is called.
# NOTE: in clear_row one is supposed to clear to the width of the pad, not window
# otherwise on scrolling you might get black bg if you have some other color bg.
# This is mostly important if you have a bgcolor that is different from the terminal
# bgcolor.
# @param - pad
# @param - line number (index of row to clear)
def _clear_row pad, lineno
raise "unused"
@color_pair ||= get_color($datacolor, @obj.color, @obj.bgcolor)
cp = @color_pair
att = NORMAL
@_clearstring ||= " " * (@obj.width - @left_margin - @int_w)
# with int_w = 3 we get that one space in popuplist
# added attr on 2014-05-02 - 00:16 otherwise a list inside a white bg messagebox shows
# empty rows in black bg.
FFI::NCurses.wattron(pad,FFI::NCurses.COLOR_PAIR(cp) | att)
FFI::NCurses.mvwaddstr(pad,lineno, @left_margin, @_clearstring)
FFI::NCurses.wattroff(pad,FFI::NCurses.COLOR_PAIR(cp) | att)
end
end
end # module
|
require 'new_relic/recipes'
module CapistranoDeploy
module Newrelic
def self.load_into(configuration)
configuration.load do
set(:new_relic_user) { (%x(git config user.name)).chomp }
set(:current_revision) { capture("cd #{deploy_to} && git rev-parse HEAD").chomp }
set(:link) { "https://api.newrelic.com/deployments.xml" }
namespace :newrelic do
task :notice_deployment, roles: :notification do
run "curl -sH '#{new_relic_api_key}'
-d 'deployment[app_name]=#{new_relic_app_name}'
-d 'deployment[revision]=#{current_revision}'
-d 'deployment[user]=#{new_relic_user}' #{link}"
end
end
after 'unicorn:reexec', 'newrelic:notice_deployment'
end
end
end
end
Only run New Relic notifications on primary prod
* Now this task only gets run on 1 server (the primary), it will also
* only run on the production stage (but this is configurable via setting
:new_relic_stages
require 'new_relic/recipes'
module CapistranoDeploy
module Newrelic
def self.load_into(configuration)
configuration.load do
set(:new_relic_stages) { %w(production) }
set(:new_relic_user) { (%x(git config user.name)).chomp }
set(:current_revision) { capture("cd #{deploy_to} && git rev-parse HEAD").chomp }
set(:link) { "https://api.newrelic.com/deployments.xml" }
namespace :newrelic do
task :notice_deployment, roles: :notification, only: { primary: true } do
if new_relic_stages.include? current_stage
run "curl -sH '#{new_relic_api_key}'
-d 'deployment[app_name]=#{new_relic_app_name}'
-d 'deployment[revision]=#{current_revision}'
-d 'deployment[user]=#{new_relic_user}' #{link}"
end
end
end
after 'unicorn:reexec', 'newrelic:notice_deployment'
end
end
end
end |
namespace :alchemy do
# TODO: split up this namespace into something that runs once on `cap install` and
# once on every deploy
desc "Prepare Alchemy for deployment."
task :default_paths do
set :alchemy_picture_cache_path,
-> { File.join('public', Alchemy::MountPoint.get, 'pictures') }
set :linked_dirs, fetch(:linked_dirs, []) + [
"uploads/pictures",
"uploads/attachments",
fetch(:alchemy_picture_cache_path),
"tmp/cache/assets"
]
# TODO: Check, if this is the right approach to ensure that we don't overwrite existing settings?
# Or does Capistrano already handle this for us?
set :linked_files, fetch(:linked_files, []) + %w(config/database.yml)
end
# TODO: Do we really need this in Alchemy or should we release an official Capistrano plugin for that?
namespace :database_yml do
desc "Creates the database.yml file"
task create: ['alchemy:default_paths', 'deploy:check'] do
set :db_environment, ask("the environment", fetch(:rails_env, 'production'))
set :db_adapter, ask("database adapter (mysql or postgresql)", 'mysql')
set :db_adapter, fetch(:db_adapter).gsub(/\Amysql\z/, 'mysql2')
set :db_name, ask("database name", nil)
set :db_username, ask("database username", nil)
set :db_password, ask("database password", nil)
default_db_host = fetch(:db_adapter) == 'mysql2' ? 'localhost' : '127.0.0.1'
set :db_host, ask("database host", default_db_host)
db_config = ERB.new <<-EOF
#{fetch(:db_environment)}:
adapter: #{fetch(:db_adapter)}
encoding: utf8
reconnect: false
pool: 5
database: #{fetch(:db_name)}
username: #{fetch(:db_username)}
password: #{fetch(:db_password)}
host: #{fetch(:db_host)}
EOF
on roles :app do
execute :mkdir, '-p', "#{shared_path}/config"
upload! StringIO.new(db_config.result), "#{shared_path}/config/database.yml"
end
end
end
namespace :db do
desc "Dumps the database into 'db/dumps' on the server."
task dump: ['alchemy:default_paths', 'deploy:check'] do
on roles :db do
within release_path do
timestamp = Time.now.strftime('%Y-%m-%d-%H-%M')
execute :mkdir, '-p', 'db/dumps'
with dump_filename: "db/dumps/#{timestamp}.sql", rails_env: fetch(:rails_env, 'production') do
execute :rake, 'alchemy:db:dump'
end
end
end
end
end
namespace :import do
desc "Imports all data (Pictures, attachments and the database) into your local development machine."
task all: ['alchemy:default_paths', 'deploy:check'] do
on roles [:app, :db] do
invoke('alchemy:import:pictures')
puts "\n"
invoke('alchemy:import:attachments')
puts "\n"
invoke('alchemy:import:database')
end
end
desc "Imports the server database into your local development machine."
task database: ['alchemy:default_paths', 'deploy:check'] do
on roles :db do |server|
puts "Importing database. Please wait..."
system db_import_cmd(server)
puts "Done."
end
end
desc "Imports attachments into your local machine using rsync."
task attachments: ['alchemy:default_paths', 'deploy:check'] do
on roles :app do |server|
get_files(:attachments, server)
end
end
desc "Imports pictures into your local machine using rsync."
task pictures: ['alchemy:default_paths', 'deploy:check'] do
on roles :app do |server|
get_files(:pictures, server)
end
end
end
namespace :export do
desc "Sends all data (Pictures, attachments and the database) to your remote machine."
task all: ['alchemy:default_paths', 'deploy:check'] do
invoke 'alchemy:export:pictures'
invoke 'alchemy:export:attachments'
invoke 'alchemy:export:database'
end
desc "Imports the server database into your local development machine."
task database: ['alchemy:default_paths', 'deploy:check'] do
on roles :db do |host|
within release_path do
if ask(:backup_confirm, 'WARNING: This task will overwrite your remote database. Do you want me to make a backup? (y/n)') == "y"
backup_database
export_database(host)
else
if ask(:overwrite_confirm, 'Are you sure? (y/n)') == "y"
export_database(host)
else
backup_database
export_database(host)
end
end
end
end
end
desc "Sends attachments to your remote machine using rsync."
task attachments: ['alchemy:default_paths', 'deploy:check'] do
on roles :app do |host|
send_files :attachments, host
end
end
desc "Sends pictures to your remote machine using rsync."
task pictures: ['alchemy:default_paths', 'deploy:check'] do
on roles :app do |host|
send_files :pictures, host
end
end
end
desc "Upgrades production database to current Alchemy CMS version"
task upgrade: ['alchemy:default_paths', 'deploy:check'] do
on roles [:app, :db] do
within release_path do
with rails_env: fetch(:rails_env, 'production') do
execute :rake, 'alchemy:upgrade'
end
end
end
end
end
Removes the database.yml creation and link tasks
This is not our concern and should be done by the app developer.
Refs #7
namespace :alchemy do
# Prepare Alchemy for deployment
task :default_paths do
set :alchemy_picture_cache_path,
-> { File.join('public', Alchemy::MountPoint.get, 'pictures') }
set :linked_dirs, fetch(:linked_dirs, []) + [
"uploads/pictures",
"uploads/attachments",
fetch(:alchemy_picture_cache_path),
"tmp/cache/assets"
]
end
namespace :db do
desc "Dumps the database into 'db/dumps' on the server."
task dump: ['alchemy:default_paths', 'deploy:check'] do
on roles :db do
within release_path do
timestamp = Time.now.strftime('%Y-%m-%d-%H-%M')
execute :mkdir, '-p', 'db/dumps'
with dump_filename: "db/dumps/#{timestamp}.sql", rails_env: fetch(:rails_env, 'production') do
execute :rake, 'alchemy:db:dump'
end
end
end
end
end
namespace :import do
desc "Imports all data (Pictures, attachments and the database) into your local development machine."
task all: ['alchemy:default_paths', 'deploy:check'] do
on roles [:app, :db] do
invoke('alchemy:import:pictures')
puts "\n"
invoke('alchemy:import:attachments')
puts "\n"
invoke('alchemy:import:database')
end
end
desc "Imports the server database into your local development machine."
task database: ['alchemy:default_paths', 'deploy:check'] do
on roles :db do |server|
puts "Importing database. Please wait..."
system db_import_cmd(server)
puts "Done."
end
end
desc "Imports attachments into your local machine using rsync."
task attachments: ['alchemy:default_paths', 'deploy:check'] do
on roles :app do |server|
get_files(:attachments, server)
end
end
desc "Imports pictures into your local machine using rsync."
task pictures: ['alchemy:default_paths', 'deploy:check'] do
on roles :app do |server|
get_files(:pictures, server)
end
end
end
namespace :export do
desc "Sends all data (Pictures, attachments and the database) to your remote machine."
task all: ['alchemy:default_paths', 'deploy:check'] do
invoke 'alchemy:export:pictures'
invoke 'alchemy:export:attachments'
invoke 'alchemy:export:database'
end
desc "Imports the server database into your local development machine."
task database: ['alchemy:default_paths', 'deploy:check'] do
on roles :db do |host|
within release_path do
if ask(:backup_confirm, 'WARNING: This task will overwrite your remote database. Do you want me to make a backup? (y/n)') == "y"
backup_database
export_database(host)
else
if ask(:overwrite_confirm, 'Are you sure? (y/n)') == "y"
export_database(host)
else
backup_database
export_database(host)
end
end
end
end
end
desc "Sends attachments to your remote machine using rsync."
task attachments: ['alchemy:default_paths', 'deploy:check'] do
on roles :app do |host|
send_files :attachments, host
end
end
desc "Sends pictures to your remote machine using rsync."
task pictures: ['alchemy:default_paths', 'deploy:check'] do
on roles :app do |host|
send_files :pictures, host
end
end
end
desc "Upgrades production database to current Alchemy CMS version"
task upgrade: ['alchemy:default_paths', 'deploy:check'] do
on roles [:app, :db] do
within release_path do
with rails_env: fetch(:rails_env, 'production') do
execute :rake, 'alchemy:upgrade'
end
end
end
end
end
|
namespace :machine do
include Aptitude
include UserManagement
desc "Sets up a blank Ubuntu to run our Rails-setup"
task :init do
on roles(:app) do
apt_get_update
invoke "machine:install:htop"
invoke "machine:install:language_pack_de"
invoke "machine:install:unattended_upgrades"
invoke "machine:install:ntp"
invoke "machine:install:git"
invoke "machine:install:nginx"
invoke "machine:install:fail2ban"
invoke "machine:install:rvm"
invoke "machine:install:ruby"
invoke "machine:install:set_defaults"
invoke "machine:install:bluepill"
invoke "machine:install:bundler"
invoke "machine:install:nodejs"
invoke "machine:install:elasticsearch"
invoke "machine:install:imagemagick" if fetch(:imagemagick)
invoke "machine:install:mysql_dev" if fetch(:db_engine) == "mysql"
invoke "machine:install:postgres_dev" if fetch(:db_engine) == "postgresql"
end
end
before "machine:init", "machine:check_ubuntu_user"
before "deploy", "machine:check_ubuntu_user"
desc "Check if we are doing things as the correct user"
task :check_ubuntu_user do
on roles(:app) do
unless am_i?("ubuntu")
invoke "machine:create_ubuntu_user"
error "Please use a use a user named 'ubuntu' to login to the machine."
fail
end
end
end
desc "Creates an Amazon AWS-style 'ubuntu'-user on machines with only 'root'"
task :create_ubuntu_user do
on roles(:app) do
execute_script("create_ubuntu_user.sh")
end
end
desc "Install configs"
task :setup do
invoke "nginx:setup"
invoke "unicorn:setup"
invoke "upstart:setup"
invoke "logrotate:setup"
invoke "bluepill:setup"
end
before :setup, "deploy:ensure_folder"
namespace :install do
task :set_defaults do
on roles(:app) do
execute_script("set_defaults.sh", fetch(:rvm_ruby_version))
warn "--------------------------------------------------------------------------------------"
warn "Run 'dpkg-reconfigure -plow unattended-upgrades' to enable automatic security updates!"
warn "--------------------------------------------------------------------------------------"
end
end
task :elasticsearch do
on roles(:search) do
invoke 'elasticsearch:install'
end
end
task :language_pack_de do
on roles(:app) do
apt_get_install("language-pack-de") unless is_package_installed?("language-pack-de")
end
end
task :ruby do
on roles(:app) do
execute :rvm, :install, fetch(:rvm_ruby_version)
end
end
before :ruby, 'rvm:hook'
task :bundler do
on roles(:app) do
execute :rvmsudo, :gem, :install, :bundler
end
end
before :bundler, 'rvm:hook'
task :bluepill do
on roles(:app) do
execute :rvmsudo, :gem, :install, :bluepill
sudo 'mkdir -p /var/run/bluepill'
end
end
before :bluepill, 'rvm:hook'
task :rvm do
on roles(:app) do
execute_script("install_rvm.sh")
end
end
task :imagemagick do
on roles(:app) do
apt_get_install("imagemagick") unless is_package_installed?("imagemagick")
end
end
task :mysql_dev do
on roles(:app) do
apt_get_install("libmysqlclient-dev") unless is_package_installed?("libmysqlclient-dev")
end
end
task :postgres_dev do
on roles(:app) do
apt_get_install("libpq-dev") unless is_package_installed?("libpq-dev")
apt_get_install("postgresql-client") unless is_package_installed?("postgresql-client")
end
end
task :htop do
on roles(:app) do
apt_get_install("htop") unless is_package_installed?("htop")
end
end
task :nodejs do
on roles(:app) do
apt_get_install("nodejs") unless is_package_installed?("nodejs")
end
end
task :ntp do
on roles(:app) do
apt_get_install("ntp") unless is_package_installed?("ntp")
end
end
task :git do
on roles(:app) do
apt_get_install("git") unless is_package_installed?("git")
end
end
task :nginx do
on roles(:app) do
apt_get_install("nginx") unless is_package_installed?("nginx")
end
end
task :fail2ban do
on roles(:app) do
apt_get_install("fail2ban") unless is_package_installed?("fail2ban")
end
end
task :imagemagick do
on roles(:app) do
unless is_package_installed?("imagemagick")
apt_get_install("imagemagick")
apt_get_install("libmagickcore-dev")
end
end
end
task :unattended_upgrades do
on roles(:app) do
unless is_package_installed?("unattended-upgrades")
apt_get_install("unattended-upgrades")
end
end
end
task :update_rvm_key do
on roles(:app) do
execute :gpg, "--keyserver hkp://keys.gnupg.net --recv-keys D39DC0E3"
end
end
before "machine:install:rvm", "machine:install:update_rvm_key"
end
end
add libmagickwand-dev to imagemagick install
namespace :machine do
include Aptitude
include UserManagement
desc "Sets up a blank Ubuntu to run our Rails-setup"
task :init do
on roles(:app) do
apt_get_update
invoke "machine:install:htop"
invoke "machine:install:language_pack_de"
invoke "machine:install:unattended_upgrades"
invoke "machine:install:ntp"
invoke "machine:install:git"
invoke "machine:install:nginx"
invoke "machine:install:fail2ban"
invoke "machine:install:rvm"
invoke "machine:install:ruby"
invoke "machine:install:set_defaults"
invoke "machine:install:bluepill"
invoke "machine:install:bundler"
invoke "machine:install:nodejs"
invoke "machine:install:elasticsearch"
invoke "machine:install:imagemagick" if fetch(:imagemagick)
invoke "machine:install:mysql_dev" if fetch(:db_engine) == "mysql"
invoke "machine:install:postgres_dev" if fetch(:db_engine) == "postgresql"
end
end
before "machine:init", "machine:check_ubuntu_user"
before "deploy", "machine:check_ubuntu_user"
desc "Check if we are doing things as the correct user"
task :check_ubuntu_user do
on roles(:app) do
unless am_i?("ubuntu")
invoke "machine:create_ubuntu_user"
error "Please use a use a user named 'ubuntu' to login to the machine."
fail
end
end
end
desc "Creates an Amazon AWS-style 'ubuntu'-user on machines with only 'root'"
task :create_ubuntu_user do
on roles(:app) do
execute_script("create_ubuntu_user.sh")
end
end
desc "Install configs"
task :setup do
invoke "nginx:setup"
invoke "unicorn:setup"
invoke "upstart:setup"
invoke "logrotate:setup"
invoke "bluepill:setup"
end
before :setup, "deploy:ensure_folder"
namespace :install do
task :set_defaults do
on roles(:app) do
execute_script("set_defaults.sh", fetch(:rvm_ruby_version))
warn "--------------------------------------------------------------------------------------"
warn "Run 'dpkg-reconfigure -plow unattended-upgrades' to enable automatic security updates!"
warn "--------------------------------------------------------------------------------------"
end
end
task :elasticsearch do
on roles(:search) do
invoke 'elasticsearch:install'
end
end
task :language_pack_de do
on roles(:app) do
apt_get_install("language-pack-de") unless is_package_installed?("language-pack-de")
end
end
task :ruby do
on roles(:app) do
execute :rvm, :install, fetch(:rvm_ruby_version)
end
end
before :ruby, 'rvm:hook'
task :bundler do
on roles(:app) do
execute :rvmsudo, :gem, :install, :bundler
end
end
before :bundler, 'rvm:hook'
task :bluepill do
on roles(:app) do
execute :rvmsudo, :gem, :install, :bluepill
sudo 'mkdir -p /var/run/bluepill'
end
end
before :bluepill, 'rvm:hook'
task :rvm do
on roles(:app) do
execute_script("install_rvm.sh")
end
end
task :imagemagick do
on roles(:app) do
apt_get_install("imagemagick") unless is_package_installed?("imagemagick")
end
end
task :mysql_dev do
on roles(:app) do
apt_get_install("libmysqlclient-dev") unless is_package_installed?("libmysqlclient-dev")
end
end
task :postgres_dev do
on roles(:app) do
apt_get_install("libpq-dev") unless is_package_installed?("libpq-dev")
apt_get_install("postgresql-client") unless is_package_installed?("postgresql-client")
end
end
task :htop do
on roles(:app) do
apt_get_install("htop") unless is_package_installed?("htop")
end
end
task :nodejs do
on roles(:app) do
apt_get_install("nodejs") unless is_package_installed?("nodejs")
end
end
task :ntp do
on roles(:app) do
apt_get_install("ntp") unless is_package_installed?("ntp")
end
end
task :git do
on roles(:app) do
apt_get_install("git") unless is_package_installed?("git")
end
end
task :nginx do
on roles(:app) do
apt_get_install("nginx") unless is_package_installed?("nginx")
end
end
task :fail2ban do
on roles(:app) do
apt_get_install("fail2ban") unless is_package_installed?("fail2ban")
end
end
task :imagemagick do
on roles(:app) do
unless is_package_installed?("imagemagick")
apt_get_install("imagemagick")
apt_get_install("libmagickcore-dev")
apt_get_install("libmagickwand-dev")
end
end
end
task :unattended_upgrades do
on roles(:app) do
unless is_package_installed?("unattended-upgrades")
apt_get_install("unattended-upgrades")
end
end
end
task :update_rvm_key do
on roles(:app) do
execute :gpg, "--keyserver hkp://keys.gnupg.net --recv-keys D39DC0E3"
end
end
before "machine:install:rvm", "machine:install:update_rvm_key"
end
end
|
namespace :opcache do
namespace :cache do
desc <<-DESC
Create a temporary PHP file to clear OPCache, call it (using curl) and removes it
This task must be triggered AFTER the deployment.
DESC
task :clear do
invoke "#{scm}:set_current_revision"
on roles(:web) do
apc_file = "#{fetch(:webroot)}/opcache_clear_#{fetch(:current_revision)}.php"
contents = StringIO.new("<?php if (function_exists('apc_clear_cache')) { apc_clear_cache(); apc_clear_cache('user'); } opcache_reset(); clearstatcache(true); echo trim(file_get_contents(__DIR__.'/../REVISION')); ?>")
upload! contents, apc_file
run_locally do
domain = fetch(:domain)
if not domain.match(/:\/\//)
domain = "http://#{domain}"
end
output = %x[curl -s -l #{domain}/opcache_clear_#{fetch(:current_revision)}.php]
sleep = fetch(:apc_sleep)
while output != fetch(:current_revision)
sleep(sleep)
output = %x[curl -s -l #{domain}/opcache_clear_#{fetch(:current_revision)}.php]
debug "Retry OPCache clear in #{sleep} second."
end
info 'Successfully cleared OPCache cache.'
end
execute "rm #{apc_file}"
end
end
end
namespace :monitor do
desc "Enable OPCache monitoring"
task :enable do
on roles(:web) do
execute :cp, "#{fetch(:webroot)}/../vendor/rlerdorf/opcache-status/opcache.php", "#{fetch(:webroot)}/"
end
end
desc "Disable OPCache monitoring"
task :disable do
on roles(:web) do
execute :rm, "#{fetch(:webroot)}/opcache.php"
end
end
end
end
before('opcache:cache:clear', :log_before_cache_clear) do
SSHKit.config.output.start(" ├── Clear OpCache")
end
after('opcache:cache:clear', :log_after_cache_clear) do
SSHKit.config.output.success
end
No specific logging
namespace :opcache do
namespace :cache do
desc <<-DESC
Create a temporary PHP file to clear OPCache, call it (using curl) and removes it
This task must be triggered AFTER the deployment.
DESC
task :clear do
invoke "#{scm}:set_current_revision"
on roles(:web) do
apc_file = "#{fetch(:webroot)}/opcache_clear_#{fetch(:current_revision)}.php"
contents = StringIO.new("<?php if (function_exists('apc_clear_cache')) { apc_clear_cache(); apc_clear_cache('user'); } opcache_reset(); clearstatcache(true); echo trim(file_get_contents(__DIR__.'/../REVISION')); ?>")
upload! contents, apc_file
run_locally do
domain = fetch(:domain)
if not domain.match(/:\/\//)
domain = "http://#{domain}"
end
output = %x[curl -s -l #{domain}/opcache_clear_#{fetch(:current_revision)}.php]
sleep = fetch(:apc_sleep)
while output != fetch(:current_revision)
sleep(sleep)
output = %x[curl -s -l #{domain}/opcache_clear_#{fetch(:current_revision)}.php]
debug "Retry OPCache clear in #{sleep} second."
end
info 'Successfully cleared OPCache cache.'
end
execute "rm #{apc_file}"
end
end
end
namespace :monitor do
desc "Enable OPCache monitoring"
task :enable do
on roles(:web) do
execute :cp, "#{fetch(:webroot)}/../vendor/rlerdorf/opcache-status/opcache.php", "#{fetch(:webroot)}/"
end
end
desc "Disable OPCache monitoring"
task :disable do
on roles(:web) do
execute :rm, "#{fetch(:webroot)}/opcache.php"
end
end
end
end
|
module CarrierWave
# Much of this is has been shamelessly taken from the CarrierWave::MiniMagick module
# https://github.com/carrierwaveuploader/carrierwave
module GraphicsMagick
extend ActiveSupport::Concern
included do
begin
require "graphicsmagick"
rescue LoadError => e
e.message << " (You may need to install the graphicsmagick gem)"
raise e
end
end
module ClassMethods
def convert_b(format)
process :convert_b => format
end
def auto_orient
process :auto_orient
end
def strip
process :strip
end
def resize_to_limit(width, height)
process :resize_to_limit => [width, height]
end
def resize_to_fit(width, height)
process :resize_to_fit => [width, height]
end
def resize_to_fill(width, height, gravity='Center')
process :resize_to_fill => [width, height, gravity]
end
end
##
# Changes the image encoding format to the given format
#
# See http://www.graphicsmagick.org/mogrify.html
#
# === Parameters
#
# [format (#to_s)] an abreviation of the format
#
# === Yields
#
# [GraphicsMagick::Image] additional manipulations to perform
#
# === Examples
#
# image.convert(:png)
#
def convert_b(format)
Rails.logger.debug "GraphicsMagick#convert - new format is #{format.to_s}"
manipulate! do |img|
@format = format
img.convert
img = yield(img) if block_given?
img
end
end
##
# Auto rotates the file (useful for images taken with a digital camera)
#
def auto_orient
manipulate! do |img|
img.auto_orient
img = yield(img) if block_given?
img
end
end
##
# Remove all profiles and text attributes from the image
#
def strip
manipulate! do |img|
img.strip
img = yield(img) if block_given?
img
end
end
##
# Resize the image to fit within the specified dimensions while retaining
# the original aspect ratio. Will only resize the image if it is larger than the
# specified dimensions. The resulting image may be shorter or narrower than specified
# in the smaller dimension but will not be larger than the specified values.
#
# === Parameters
#
# [width (Integer)] the width to scale the image to
# [height (Integer)] the height to scale the image to
#
# === Yields
#
# [GraphicsMagick::Image] additional manipulations to perform
#
def resize_to_limit(width, height)
manipulate! do |img|
img.resize "#{width}x#{height}>"
img = yield(img) if block_given?
img
end
end
##
# Resize the image to fit within the specified dimensions while retaining
# the original aspect ratio. The image may be shorter or narrower than
# specified in the smaller dimension but will not be larger than the specified values.
#
# === Parameters
#
# [width (Integer)] the width to scale the image to
# [height (Integer)] the height to scale the image to
#
# === Yields
#
# [GraphicsMagick::Image] additional manipulations to perform
#
def resize_to_fit(width, height)
manipulate! do |img|
img.resize "#{width}x#{height}"
img = yield(img) if block_given?
img
end
end
##
# Resize the image to fit within the specified dimensions while retaining
# the aspect ratio of the original image. If necessary, crop the image in the
# larger dimension.
#
# === Parameters
#
# [width (Integer)] the width to scale the image to
# [height (Integer)] the height to scale the image to
# [gravity (String)] the current gravity suggestion (default: 'Center'; options: 'NorthWest', 'North', 'NorthEast', 'West', 'Center', 'East', 'SouthWest', 'South', 'SouthEast')
#
# === Yields
#
# [GraphicsMagick::Image] additional manipulations to perform
#
def resize_to_fill(width, height, gravity = 'Center')
Rails.logger.debug "GraphicsMagick#resize_to_fill"
manipulate! do |img|
img.resize("#{width}x#{height}^")
.gravity(gravity)
.background("rgba(255,255,255,0.0)")
.extent("#{width}x#{height}")
img = yield(img) if block_given?
img
end
end
##
# Manipulate the image with GraphicsMagick. This method will pass the image
# to the supplied block. It will NOT save the image to disk by default. Override
# this by passing true as the only argument. Note: by default, the image is only
# saved after all processes have been run. If you are using this method to utilize
# Graphicsmagick utilities other than mogrify, then make sure all processes have
# been explicitly written to disk first, or call manipulate(true) before using
# built in convenience methods.
#
# === Gotcha
#
# This method assumes that the object responds to +current_path+.
# Any class that this module is mixed into must have a +current_path+ method.
# CarrierWave::Uploader does, so you won't need to worry about this in
# most cases.
#
#
# === Yields
#
# [GraphicsMagick::Image] manipulations to perform
#
#
def manipulate!(save_image = false)
cache_stored_file! if !cached?
@_gimage ||= ::GraphicsMagick::Image.new(current_path)
@_gimage = yield(@_gimage)
@_image.write(current_path) if save_image
@_gimage
rescue => e
raise CarrierWave::ProcessingError.new("Failed to manipulate file! #{e}")
end
def process!(*)
Rails.logger.debug "GraphicsMagick - Processing image #{file.filename}"
result = super
if @_gimage
if @format
Rails.logger.debug "GraphicsMagick - Changing formats to #{@format.to_s}"
Rails.logger.debug "GraphicsMagick - New file should be at #{file.basename}.#{@format.to_s.downcase}"
new_file = @_gimage.write("#{file.basename}.#{@format.to_s.downcase}")
file = new_file.file
else
@_gimage.write!
end
@_gimage = nil
end
result
end
end
end
another attempt
module CarrierWave
# Much of this is has been shamelessly taken from the CarrierWave::MiniMagick module
# https://github.com/carrierwaveuploader/carrierwave
module GraphicsMagick
extend ActiveSupport::Concern
included do
begin
require "graphicsmagick"
rescue LoadError => e
e.message << " (You may need to install the graphicsmagick gem)"
raise e
end
end
module ClassMethods
def convert_b(format)
process :convert_b => format
end
def auto_orient
process :auto_orient
end
def strip
process :strip
end
def resize_to_limit(width, height)
process :resize_to_limit => [width, height]
end
def resize_to_fit(width, height)
process :resize_to_fit => [width, height]
end
def resize_to_fill(width, height, gravity='Center')
process :resize_to_fill => [width, height, gravity]
end
end
##
# Changes the image encoding format to the given format
#
# See http://www.graphicsmagick.org/mogrify.html
#
# === Parameters
#
# [format (#to_s)] an abreviation of the format
#
# === Yields
#
# [GraphicsMagick::Image] additional manipulations to perform
#
# === Examples
#
# image.convert(:png)
#
def convert_b(format)
manipulate! do |img|
@format = format
img.convert
img = yield(img) if block_given?
img
end
end
##
# Auto rotates the file (useful for images taken with a digital camera)
#
def auto_orient
manipulate! do |img|
img.auto_orient
img = yield(img) if block_given?
img
end
end
##
# Remove all profiles and text attributes from the image
#
def strip
manipulate! do |img|
img.strip
img = yield(img) if block_given?
img
end
end
##
# Resize the image to fit within the specified dimensions while retaining
# the original aspect ratio. Will only resize the image if it is larger than the
# specified dimensions. The resulting image may be shorter or narrower than specified
# in the smaller dimension but will not be larger than the specified values.
#
# === Parameters
#
# [width (Integer)] the width to scale the image to
# [height (Integer)] the height to scale the image to
#
# === Yields
#
# [GraphicsMagick::Image] additional manipulations to perform
#
def resize_to_limit(width, height)
manipulate! do |img|
img.resize "#{width}x#{height}>"
img = yield(img) if block_given?
img
end
end
##
# Resize the image to fit within the specified dimensions while retaining
# the original aspect ratio. The image may be shorter or narrower than
# specified in the smaller dimension but will not be larger than the specified values.
#
# === Parameters
#
# [width (Integer)] the width to scale the image to
# [height (Integer)] the height to scale the image to
#
# === Yields
#
# [GraphicsMagick::Image] additional manipulations to perform
#
def resize_to_fit(width, height)
manipulate! do |img|
img.resize "#{width}x#{height}"
img = yield(img) if block_given?
img
end
end
##
# Resize the image to fit within the specified dimensions while retaining
# the aspect ratio of the original image. If necessary, crop the image in the
# larger dimension.
#
# === Parameters
#
# [width (Integer)] the width to scale the image to
# [height (Integer)] the height to scale the image to
# [gravity (String)] the current gravity suggestion (default: 'Center'; options: 'NorthWest', 'North', 'NorthEast', 'West', 'Center', 'East', 'SouthWest', 'South', 'SouthEast')
#
# === Yields
#
# [GraphicsMagick::Image] additional manipulations to perform
#
def resize_to_fill(width, height, gravity = 'Center')
manipulate! do |img|
img.resize("#{width}x#{height}^")
.gravity(gravity)
.background("rgba(255,255,255,0.0)")
.extent("#{width}x#{height}")
img = yield(img) if block_given?
img
end
end
##
# Manipulate the image with GraphicsMagick. This method will pass the image
# to the supplied block. It will NOT save the image to disk by default. Override
# this by passing true as the only argument. Note: by default, the image is only
# saved after all processes have been run. If you are using this method to utilize
# Graphicsmagick utilities other than mogrify, then make sure all processes have
# been explicitly written to disk first, or call manipulate(true) before using
# built in convenience methods.
#
# === Gotcha
#
# This method assumes that the object responds to +current_path+.
# Any class that this module is mixed into must have a +current_path+ method.
# CarrierWave::Uploader does, so you won't need to worry about this in
# most cases.
#
#
# === Yields
#
# [GraphicsMagick::Image] manipulations to perform
#
#
def manipulate!(save_image = false)
cache_stored_file! if !cached?
@_gimage ||= ::GraphicsMagick::Image.new(current_path)
@_gimage = yield(@_gimage)
@_image.write(current_path) if save_image
@_gimage
rescue => e
raise CarrierWave::ProcessingError.new("Failed to manipulate file! #{e}")
end
def process!(*)
Rails.logger.debug "GraphicsMagick - Processing image #{file.filename}"
result = super
if @_gimage
if @format
Rails.logger.debug "GraphicsMagick - Changing formats to #{@format.to_s}"
Rails.logger.debug "GraphicsMagick - New file should be at #{file.basename}.#{@format.to_s.downcase}"
new_file = @_gimage.write("#{file.basename}.#{@format.to_s.downcase}")
file.file = new_file.file
else
@_gimage.write!
end
@_gimage = nil
end
result
end
end
end |
#
# Author:: Nuo Yan (<nuo@opscode.com>)
# Copyright:: Copyright (c) 2009 Opscode, Inc.
# License:: Apache License, Version 2.0
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
require 'chef/knife'
class Chef
class Knife
class CookbookCreate < Knife
deps do
require 'chef/json_compat'
require 'uri'
require 'fileutils'
end
banner "knife cookbook create COOKBOOK (options)"
option :cookbook_path,
:short => "-o PATH",
:long => "--cookbook-path PATH",
:description => "The directory where the cookbook will be created"
option :readme_format,
:short => "-r FORMAT",
:long => "--readme-format FORMAT",
:description => "Format of the README file, supported formats are 'md' (markdown) and 'rdoc' (rdoc)"
option :cookbook_license,
:short => "-I LICENSE",
:long => "--license LICENSE",
:description => "License for cookbook, apachev2, gplv2, gplv3, mit or none"
option :cookbook_copyright,
:short => "-C COPYRIGHT",
:long => "--copyright COPYRIGHT",
:description => "Name of Copyright holder"
option :cookbook_email,
:short => "-m EMAIL",
:long => "--email EMAIL",
:description => "Email address of cookbook maintainer"
def run
self.config = Chef::Config.merge!(config)
if @name_args.length < 1
show_usage
ui.fatal("You must specify a cookbook name")
exit 1
end
if default_cookbook_path_empty? && parameter_empty?(config[:cookbook_path])
raise ArgumentError, "Default cookbook_path is not specified in the knife.rb config file, and a value to -o is not provided. Nowhere to write the new cookbook to."
end
cookbook_path = File.expand_path(Array(config[:cookbook_path]).first)
cookbook_name = @name_args.first
copyright = config[:cookbook_copyright] || "YOUR_COMPANY_NAME"
email = config[:cookbook_email] || "YOUR_EMAIL"
license = ((config[:cookbook_license] != "false") && config[:cookbook_license]) || "none"
readme_format = ((config[:readme_format] != "false") && config[:readme_format]) || "md"
create_cookbook(cookbook_path, cookbook_name, copyright, license)
create_readme(cookbook_path, cookbook_name, readme_format)
create_changelog(cookbook_path, cookbook_name)
create_metadata(cookbook_path, cookbook_name, copyright, email, license, readme_format)
end
def create_cookbook(dir, cookbook_name, copyright, license)
msg("** Creating cookbook #{cookbook_name}")
FileUtils.mkdir_p "#{File.join(dir, cookbook_name, "attributes")}"
FileUtils.mkdir_p "#{File.join(dir, cookbook_name, "recipes")}"
FileUtils.mkdir_p "#{File.join(dir, cookbook_name, "definitions")}"
FileUtils.mkdir_p "#{File.join(dir, cookbook_name, "libraries")}"
FileUtils.mkdir_p "#{File.join(dir, cookbook_name, "resources")}"
FileUtils.mkdir_p "#{File.join(dir, cookbook_name, "providers")}"
FileUtils.mkdir_p "#{File.join(dir, cookbook_name, "files", "default")}"
FileUtils.mkdir_p "#{File.join(dir, cookbook_name, "templates", "default")}"
unless File.exists?(File.join(dir, cookbook_name, "recipes", "default.rb"))
open(File.join(dir, cookbook_name, "recipes", "default.rb"), "w") do |file|
file.puts <<-EOH
#
# Cookbook Name:: #{cookbook_name}
# Recipe:: default
#
# Copyright #{Time.now.year}, #{copyright}
#
EOH
case license
when "apachev2"
file.puts <<-EOH
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
EOH
when "gplv2"
file.puts <<-EOH
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License along
# with this program; if not, write to the Free Software Foundation, Inc.,
# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
#
EOH
when "gplv3"
file.puts <<-EOH
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
EOH
when "mit"
file.puts <<-EOH
# Permission is hereby granted, free of charge, to any person obtaining
# a copy of this software and associated documentation files (the
# "Software"), to deal in the Software without restriction, including
# without limitation the rights to use, copy, modify, merge, publish,
# distribute, sublicense, and/or sell copies of the Software, and to
# permit persons to whom the Software is furnished to do so, subject to
# the following conditions:
#
# The above copyright notice and this permission notice shall be
# included in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
#
EOH
when "none"
file.puts <<-EOH
# All rights reserved - Do Not Redistribute
#
EOH
end
end
end
end
def create_changelog(dir, cookbook_name)
msg("** Creating CHANGELOG for cookbook: #{cookbook_name}")
unless File.exists?(File.join(dir,cookbook_name,'CHANGELOG.md'))
open(File.join(dir, cookbook_name, 'CHANGELOG.md'),'w') do |file|
file.puts <<-EOH
#{cookbook_name} CHANGELOG
#{'='*"#{cookbook_name} CHANGELOG".length}
This file is used to list changes made in each version of the #{cookbook_name} cookbook.
0.1.0
-----
- [your_name] - Initial release of #{cookbook_name}
- - -
Check the [Markdown Syntax Guide](http://daringfireball.net/projects/markdown/syntax) for help with Markdown.
The [Github Flavored Markdown page](http://github.github.com/github-flavored-markdown/) describes the differences between markdown on github and standard markdown.
EOH
end
end
end
def create_readme(dir, cookbook_name, readme_format)
msg("** Creating README for cookbook: #{cookbook_name}")
unless File.exists?(File.join(dir, cookbook_name, "README.#{readme_format}"))
open(File.join(dir, cookbook_name, "README.#{readme_format}"), "w") do |file|
case readme_format
when "rdoc"
file.puts <<-EOH
= #{cookbook_name} Cookbook
TODO: Enter the cookbook description here.
e.g.
This cookbook makes your favorite breakfast sandwich.
== Requirements
TODO: List your cookbook requirements. Be sure to include any requirements this cookbook has on platforms, libraries, other cookbooks, packages, operating systems, etc.
e.g.
==== packages
- +toaster+ - #{cookbook_name} needs toaster to brown your bagel.
== Attributes
TODO: List you cookbook attributes here.
e.g.
==== #{cookbook_name}::default
<table>
<tr>
<th>Key</th>
<th>Type</th>
<th>Description</th>
<th>Default</th>
</tr>
<tr>
<td><tt>['#{cookbook_name}']['bacon']</tt></td>
<td>Boolean</td>
<td>whether to include bacon</td>
<td><tt>true</tt></td>
</tr>
</table>
== Usage
==== #{cookbook_name}::default
TODO: Write usage instructions for each cookbook.
e.g.
Just include +#{cookbook_name}+ in your node's +run_list+:
{
"name":"my_node",
"run_list": [
"recipe[#{cookbook_name}]"
]
}
== Contributing
TODO: (optional) If this is a public cookbook, detail the process for contributing. If this is a private cookbook, remove this section.
e.g.
1. Fork the repository on Github
2. Create a named feature branch (like `add_component_x`)
3. Write your change
4. Write tests for your change (if applicable)
5. Run the tests, ensuring they all pass
6. Submit a Pull Request using Github
== License and Authors
Authors: TODO: List authors
EOH
when "md","mkd","txt"
file.puts <<-EOH
#{cookbook_name} Cookbook
#{'='*"#{cookbook_name} Cookbook".length}
TODO: Enter the cookbook description here.
e.g.
This cookbook makes your favorite breakfast sandwich.
Requirements
------------
TODO: List your cookbook requirements. Be sure to include any requirements this cookbook has on platforms, libraries, other cookbooks, packages, operating systems, etc.
e.g.
#### packages
- `toaster` - #{cookbook_name} needs toaster to brown your bagel.
Attributes
----------
TODO: List you cookbook attributes here.
e.g.
#### #{cookbook_name}::default
<table>
<tr>
<th>Key</th>
<th>Type</th>
<th>Description</th>
<th>Default</th>
</tr>
<tr>
<td><tt>['#{cookbook_name}']['bacon']</tt></td>
<td>Boolean</td>
<td>whether to include bacon</td>
<td><tt>true</tt></td>
</tr>
</table>
Usage
-----
#### #{cookbook_name}::default
TODO: Write usage instructions for each cookbook.
e.g.
Just include `#{cookbook_name}` in your node's `run_list`:
```json
{
"name":"my_node",
"run_list": [
"recipe[#{cookbook_name}]"
]
}
```
Contributing
------------
TODO: (optional) If this is a public cookbook, detail the process for contributing. If this is a private cookbook, remove this section.
e.g.
1. Fork the repository on Github
2. Create a named feature branch (like `add_component_x`)
3. Write your change
4. Write tests for your change (if applicable)
5. Run the tests, ensuring they all pass
6. Submit a Pull Request using Github
License and Authors
-------------------
Authors: TODO: List authors
EOH
else
file.puts <<-EOH
#{cookbook_name} Cookbook
#{'='*"#{cookbook_name} Cookbook".length}
TODO: Enter the cookbook description here.
e.g.
This cookbook makes your favorite breakfast sandwich.
Requirements
TODO: List your cookbook requirements. Be sure to include any requirements this cookbook has on platforms, libraries, other cookbooks, packages, operating systems, etc.
e.g.
toaster #{cookbook_name} needs toaster to brown your bagel.
Attributes
TODO: List you cookbook attributes here.
#{cookbook_name}
Key Type Description Default
['#{cookbook_name}']['bacon'] Boolean whether to include bacon true
Usage
#{cookbook_name}
TODO: Write usage instructions for each cookbook.
e.g.
Just include `#{cookbook_name}` in your node's `run_list`:
[code]
{
"name":"my_node",
"run_list": [
"recipe[#{cookbook_name}]"
]
}
[/code]
Contributing
TODO: (optional) If this is a public cookbook, detail the process for contributing. If this is a private cookbook, remove this section.
e.g.
1. Fork the repository on Github
2. Create a named feature branch (like `add_component_x`)
3. Write your change
4. Write tests for your change (if applicable)
5. Run the tests, ensuring they all pass
6. Submit a Pull Request using Github
License and Authors
Authors: TODO: List authors
EOH
end
end
end
end
def create_metadata(dir, cookbook_name, copyright, email, license, readme_format)
msg("** Creating metadata for cookbook: #{cookbook_name}")
license_name = case license
when "apachev2"
"Apache 2.0"
when "gplv2"
"GNU Public License 2.0"
when "gplv3"
"GNU Public License 3.0"
when "mit"
"MIT"
when "none"
"All rights reserved"
end
unless File.exists?(File.join(dir, cookbook_name, "metadata.rb"))
open(File.join(dir, cookbook_name, "metadata.rb"), "w") do |file|
if File.exists?(File.join(dir, cookbook_name, "README.#{readme_format}"))
long_description = "long_description IO.read(File.join(File.dirname(__FILE__), 'README.#{readme_format}'))"
end
file.puts <<-EOH
name '#{cookbook_name}'
maintainer '#{copyright}'
maintainer_email '#{email}'
license '#{license_name}'
description 'Installs/Configures #{cookbook_name}'
#{long_description}
version '0.1.0'
EOH
end
end
end
private
def default_cookbook_path_empty?
Chef::Config[:cookbook_path].nil? || Chef::Config[:cookbook_path].empty?
end
def parameter_empty?(parameter)
parameter.nil? || parameter.empty?
end
end
end
end
Fix typo: "List you cookbook" should be "List your cookbook"
#
# Author:: Nuo Yan (<nuo@opscode.com>)
# Copyright:: Copyright (c) 2009 Opscode, Inc.
# License:: Apache License, Version 2.0
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
require 'chef/knife'
class Chef
class Knife
class CookbookCreate < Knife
deps do
require 'chef/json_compat'
require 'uri'
require 'fileutils'
end
banner "knife cookbook create COOKBOOK (options)"
option :cookbook_path,
:short => "-o PATH",
:long => "--cookbook-path PATH",
:description => "The directory where the cookbook will be created"
option :readme_format,
:short => "-r FORMAT",
:long => "--readme-format FORMAT",
:description => "Format of the README file, supported formats are 'md' (markdown) and 'rdoc' (rdoc)"
option :cookbook_license,
:short => "-I LICENSE",
:long => "--license LICENSE",
:description => "License for cookbook, apachev2, gplv2, gplv3, mit or none"
option :cookbook_copyright,
:short => "-C COPYRIGHT",
:long => "--copyright COPYRIGHT",
:description => "Name of Copyright holder"
option :cookbook_email,
:short => "-m EMAIL",
:long => "--email EMAIL",
:description => "Email address of cookbook maintainer"
def run
self.config = Chef::Config.merge!(config)
if @name_args.length < 1
show_usage
ui.fatal("You must specify a cookbook name")
exit 1
end
if default_cookbook_path_empty? && parameter_empty?(config[:cookbook_path])
raise ArgumentError, "Default cookbook_path is not specified in the knife.rb config file, and a value to -o is not provided. Nowhere to write the new cookbook to."
end
cookbook_path = File.expand_path(Array(config[:cookbook_path]).first)
cookbook_name = @name_args.first
copyright = config[:cookbook_copyright] || "YOUR_COMPANY_NAME"
email = config[:cookbook_email] || "YOUR_EMAIL"
license = ((config[:cookbook_license] != "false") && config[:cookbook_license]) || "none"
readme_format = ((config[:readme_format] != "false") && config[:readme_format]) || "md"
create_cookbook(cookbook_path, cookbook_name, copyright, license)
create_readme(cookbook_path, cookbook_name, readme_format)
create_changelog(cookbook_path, cookbook_name)
create_metadata(cookbook_path, cookbook_name, copyright, email, license, readme_format)
end
def create_cookbook(dir, cookbook_name, copyright, license)
msg("** Creating cookbook #{cookbook_name}")
FileUtils.mkdir_p "#{File.join(dir, cookbook_name, "attributes")}"
FileUtils.mkdir_p "#{File.join(dir, cookbook_name, "recipes")}"
FileUtils.mkdir_p "#{File.join(dir, cookbook_name, "definitions")}"
FileUtils.mkdir_p "#{File.join(dir, cookbook_name, "libraries")}"
FileUtils.mkdir_p "#{File.join(dir, cookbook_name, "resources")}"
FileUtils.mkdir_p "#{File.join(dir, cookbook_name, "providers")}"
FileUtils.mkdir_p "#{File.join(dir, cookbook_name, "files", "default")}"
FileUtils.mkdir_p "#{File.join(dir, cookbook_name, "templates", "default")}"
unless File.exists?(File.join(dir, cookbook_name, "recipes", "default.rb"))
open(File.join(dir, cookbook_name, "recipes", "default.rb"), "w") do |file|
file.puts <<-EOH
#
# Cookbook Name:: #{cookbook_name}
# Recipe:: default
#
# Copyright #{Time.now.year}, #{copyright}
#
EOH
case license
when "apachev2"
file.puts <<-EOH
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
EOH
when "gplv2"
file.puts <<-EOH
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License along
# with this program; if not, write to the Free Software Foundation, Inc.,
# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
#
EOH
when "gplv3"
file.puts <<-EOH
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
EOH
when "mit"
file.puts <<-EOH
# Permission is hereby granted, free of charge, to any person obtaining
# a copy of this software and associated documentation files (the
# "Software"), to deal in the Software without restriction, including
# without limitation the rights to use, copy, modify, merge, publish,
# distribute, sublicense, and/or sell copies of the Software, and to
# permit persons to whom the Software is furnished to do so, subject to
# the following conditions:
#
# The above copyright notice and this permission notice shall be
# included in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
#
EOH
when "none"
file.puts <<-EOH
# All rights reserved - Do Not Redistribute
#
EOH
end
end
end
end
def create_changelog(dir, cookbook_name)
msg("** Creating CHANGELOG for cookbook: #{cookbook_name}")
unless File.exists?(File.join(dir,cookbook_name,'CHANGELOG.md'))
open(File.join(dir, cookbook_name, 'CHANGELOG.md'),'w') do |file|
file.puts <<-EOH
#{cookbook_name} CHANGELOG
#{'='*"#{cookbook_name} CHANGELOG".length}
This file is used to list changes made in each version of the #{cookbook_name} cookbook.
0.1.0
-----
- [your_name] - Initial release of #{cookbook_name}
- - -
Check the [Markdown Syntax Guide](http://daringfireball.net/projects/markdown/syntax) for help with Markdown.
The [Github Flavored Markdown page](http://github.github.com/github-flavored-markdown/) describes the differences between markdown on github and standard markdown.
EOH
end
end
end
def create_readme(dir, cookbook_name, readme_format)
msg("** Creating README for cookbook: #{cookbook_name}")
unless File.exists?(File.join(dir, cookbook_name, "README.#{readme_format}"))
open(File.join(dir, cookbook_name, "README.#{readme_format}"), "w") do |file|
case readme_format
when "rdoc"
file.puts <<-EOH
= #{cookbook_name} Cookbook
TODO: Enter the cookbook description here.
e.g.
This cookbook makes your favorite breakfast sandwich.
== Requirements
TODO: List your cookbook requirements. Be sure to include any requirements this cookbook has on platforms, libraries, other cookbooks, packages, operating systems, etc.
e.g.
==== packages
- +toaster+ - #{cookbook_name} needs toaster to brown your bagel.
== Attributes
TODO: List your cookbook attributes here.
e.g.
==== #{cookbook_name}::default
<table>
<tr>
<th>Key</th>
<th>Type</th>
<th>Description</th>
<th>Default</th>
</tr>
<tr>
<td><tt>['#{cookbook_name}']['bacon']</tt></td>
<td>Boolean</td>
<td>whether to include bacon</td>
<td><tt>true</tt></td>
</tr>
</table>
== Usage
==== #{cookbook_name}::default
TODO: Write usage instructions for each cookbook.
e.g.
Just include +#{cookbook_name}+ in your node's +run_list+:
{
"name":"my_node",
"run_list": [
"recipe[#{cookbook_name}]"
]
}
== Contributing
TODO: (optional) If this is a public cookbook, detail the process for contributing. If this is a private cookbook, remove this section.
e.g.
1. Fork the repository on Github
2. Create a named feature branch (like `add_component_x`)
3. Write your change
4. Write tests for your change (if applicable)
5. Run the tests, ensuring they all pass
6. Submit a Pull Request using Github
== License and Authors
Authors: TODO: List authors
EOH
when "md","mkd","txt"
file.puts <<-EOH
#{cookbook_name} Cookbook
#{'='*"#{cookbook_name} Cookbook".length}
TODO: Enter the cookbook description here.
e.g.
This cookbook makes your favorite breakfast sandwich.
Requirements
------------
TODO: List your cookbook requirements. Be sure to include any requirements this cookbook has on platforms, libraries, other cookbooks, packages, operating systems, etc.
e.g.
#### packages
- `toaster` - #{cookbook_name} needs toaster to brown your bagel.
Attributes
----------
TODO: List you cookbook attributes here.
e.g.
#### #{cookbook_name}::default
<table>
<tr>
<th>Key</th>
<th>Type</th>
<th>Description</th>
<th>Default</th>
</tr>
<tr>
<td><tt>['#{cookbook_name}']['bacon']</tt></td>
<td>Boolean</td>
<td>whether to include bacon</td>
<td><tt>true</tt></td>
</tr>
</table>
Usage
-----
#### #{cookbook_name}::default
TODO: Write usage instructions for each cookbook.
e.g.
Just include `#{cookbook_name}` in your node's `run_list`:
```json
{
"name":"my_node",
"run_list": [
"recipe[#{cookbook_name}]"
]
}
```
Contributing
------------
TODO: (optional) If this is a public cookbook, detail the process for contributing. If this is a private cookbook, remove this section.
e.g.
1. Fork the repository on Github
2. Create a named feature branch (like `add_component_x`)
3. Write your change
4. Write tests for your change (if applicable)
5. Run the tests, ensuring they all pass
6. Submit a Pull Request using Github
License and Authors
-------------------
Authors: TODO: List authors
EOH
else
file.puts <<-EOH
#{cookbook_name} Cookbook
#{'='*"#{cookbook_name} Cookbook".length}
TODO: Enter the cookbook description here.
e.g.
This cookbook makes your favorite breakfast sandwich.
Requirements
TODO: List your cookbook requirements. Be sure to include any requirements this cookbook has on platforms, libraries, other cookbooks, packages, operating systems, etc.
e.g.
toaster #{cookbook_name} needs toaster to brown your bagel.
Attributes
TODO: List you cookbook attributes here.
#{cookbook_name}
Key Type Description Default
['#{cookbook_name}']['bacon'] Boolean whether to include bacon true
Usage
#{cookbook_name}
TODO: Write usage instructions for each cookbook.
e.g.
Just include `#{cookbook_name}` in your node's `run_list`:
[code]
{
"name":"my_node",
"run_list": [
"recipe[#{cookbook_name}]"
]
}
[/code]
Contributing
TODO: (optional) If this is a public cookbook, detail the process for contributing. If this is a private cookbook, remove this section.
e.g.
1. Fork the repository on Github
2. Create a named feature branch (like `add_component_x`)
3. Write your change
4. Write tests for your change (if applicable)
5. Run the tests, ensuring they all pass
6. Submit a Pull Request using Github
License and Authors
Authors: TODO: List authors
EOH
end
end
end
end
def create_metadata(dir, cookbook_name, copyright, email, license, readme_format)
msg("** Creating metadata for cookbook: #{cookbook_name}")
license_name = case license
when "apachev2"
"Apache 2.0"
when "gplv2"
"GNU Public License 2.0"
when "gplv3"
"GNU Public License 3.0"
when "mit"
"MIT"
when "none"
"All rights reserved"
end
unless File.exists?(File.join(dir, cookbook_name, "metadata.rb"))
open(File.join(dir, cookbook_name, "metadata.rb"), "w") do |file|
if File.exists?(File.join(dir, cookbook_name, "README.#{readme_format}"))
long_description = "long_description IO.read(File.join(File.dirname(__FILE__), 'README.#{readme_format}'))"
end
file.puts <<-EOH
name '#{cookbook_name}'
maintainer '#{copyright}'
maintainer_email '#{email}'
license '#{license_name}'
description 'Installs/Configures #{cookbook_name}'
#{long_description}
version '0.1.0'
EOH
end
end
end
private
def default_cookbook_path_empty?
Chef::Config[:cookbook_path].nil? || Chef::Config[:cookbook_path].empty?
end
def parameter_empty?(parameter)
parameter.nil? || parameter.empty?
end
end
end
end
|
# Author:: Murali Raju (<murali.raju@appliv.com>)
# Copyright:: Copyright (c) 2012 Murali Raju.
# License:: Apache License, Version 2.0
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
require 'chef/knife/ucs_base'
class Chef
class Knife
class UcsPoolCreate < Knife
include Knife::UCSBase
banner "knife ucs pool create (options)"
attr_accessor :initial_sleep_delay
option :pool,
:short => "-P POOL",
:long => "--pool POOLTYPE",
:description => "UCS pool types <mac,uuid,wwpn,wwnn,managementip>",
:proc => Proc.new { |f| Chef::Config[:knife][:pool] = f }
option :name,
:short => "-N NAME",
:long => "--pool-name POOLNAME",
:description => "The pool name",
:proc => Proc.new { |f| Chef::Config[:knife][:name] = f }
option :start,
:short => "-S START",
:long => "--pool-start STARTRANGE",
:description => "Start of a pool range <IP, WWPN, WWNN, MAC>",
:proc => Proc.new { |f| Chef::Config[:knife][:start] = f }
option :end,
:short => "-E END",
:long => "--pool-end ENDRANGE",
:description => "End of a pool range <IP, WWPN, WWNN, MAC>",
:proc => Proc.new { |f| Chef::Config[:knife][:end] = f }
option :mask,
:short => "-M MASK",
:long => "--subnet-mask SUBNETMASK",
:description => "The subnet mask for an IP range",
:proc => Proc.new { |f| Chef::Config[:knife][:mask] = f }
option :gateway,
:short => "-G GATEWAY",
:long => "--gateway IPGATEWAY",
:description => "The IP Gateway address of a subnet",
:proc => Proc.new { |f| Chef::Config[:knife][:gateway] = f }
option :org,
:short => "-O ORG",
:long => "--org ORG",
:description => "The organization",
:proc => Proc.new { |f| Chef::Config[:knife][:org] = f }
def run
$stdout.sync = true
pool_type = "#{Chef::Config[:knife][:pool]}"
case pool_type
when 'managementip'
json = { :start_ip => Chef::Config[:knife][:start], :end_ip => Chef::Config[:knife][:end],
:subnet_mask => Chef::Config[:knife][:mask], :gateway => Chef::Config[:knife][:gateway] }.to_json
xml_response = provisioner.create_management_ip_pool(json)
xml_doc = Nokogiri::XML(xml_response)
xml_doc.xpath("configConfMos/outConfigs/pair/ippoolBlock").each do |ippool|
puts ''
puts "Management IP Block from: #{ui.color("#{ippool.attributes['from']}", :magenta)} to: #{ui.color("#{ippool.attributes['to']}", :magenta)}" +
" status: #{ui.color("#{ippool.attributes['status']}", :green)}"
end
#Ugly...refactor later to parse error with better exception handling. Nokogiri xpath search for elements might be an option
xml_doc.xpath("configConfMos").each do |ippool|
puts "#{ippool.attributes['errorCode']} #{ui.color("#{ippool.attributes['errorDescr']}", :red)}"
end
when 'mac'
json = { :mac_pool_name => Chef::Config[:knife][:name], :mac_pool_start => Chef::Config[:knife][:start],
:mac_pool_end => Chef::Config[:knife][:end], :org => Chef::Config[:knife][:org] }.to_json
xml_response = provisioner.create_mac_pool(json)
xml_doc = Nokogiri::XML(xml_response)
xml_doc.xpath("configConfMos/outConfigs/pair/macpoolPool").each do |macpool|
puts ''
puts "MAC address pool : #{ui.color("#{macpool.attributes['name']}", :magenta)}" +
" status: #{ui.color("#{macpool.attributes['status']}", :green)}"
end
#Ugly...refactor later to parse error with better exception handling. Nokogiri xpath search for elements might be an option
xml_doc.xpath("configConfMos").each do |macpool|
puts "#{macpool.attributes['errorCode']} #{ui.color("#{macpool.attributes['errorDescr']}", :red)}"
end
when 'wwnn'
json = { :wwnn_pool_name => Chef::Config[:knife][:name], :wwnn_pool_start => Chef::Config[:knife][:start],
:wwnn_pool_end => Chef::Config[:knife][:end], :org => Chef::Config[:knife][:org] }.to_json
puts provisioner.create_wwnn_pool(json)
xml_doc = Nokogiri::XML(xml_response)
xml_doc.xpath("configConfMos/outConfigs/pair/macpoolPool").each do |wwnn|
puts ''
puts "WWNN pool : #{ui.color("#{macpool.attributes['name']}", :magenta)}" +
" status: #{ui.color("#{macpool.attributes['status']}", :green)}"
end
#Ugly...refactor later to parse error with better exception handling. Nokogiri xpath search for elements might be an option
xml_doc.xpath("configConfMos").each do |wwnn|
puts "#{wwnn.attributes['errorCode']} #{ui.color("#{wwnn.attributes['errorDescr']}", :red)}"
end
else
puts "Incorrect options. Please make sure you are using one of the following: mac,uuid,wwpn,wwnn,managementip"
end
end
end
end
end
refactor
# Author:: Murali Raju (<murali.raju@appliv.com>)
# Copyright:: Copyright (c) 2012 Murali Raju.
# License:: Apache License, Version 2.0
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
require 'chef/knife/ucs_base'
class Chef
class Knife
class UcsPoolCreate < Knife
include Knife::UCSBase
banner "knife ucs pool create (options)"
attr_accessor :initial_sleep_delay
option :pool,
:short => "-P POOL",
:long => "--pool POOLTYPE",
:description => "UCS pool types <mac,uuid,wwpn,wwnn,managementip>",
:proc => Proc.new { |f| Chef::Config[:knife][:pool] = f }
option :name,
:short => "-N NAME",
:long => "--pool-name POOLNAME",
:description => "The pool name",
:proc => Proc.new { |f| Chef::Config[:knife][:name] = f }
option :start,
:short => "-S START",
:long => "--pool-start STARTRANGE",
:description => "Start of a pool range <IP, WWPN, WWNN, MAC>",
:proc => Proc.new { |f| Chef::Config[:knife][:start] = f }
option :end,
:short => "-E END",
:long => "--pool-end ENDRANGE",
:description => "End of a pool range <IP, WWPN, WWNN, MAC>",
:proc => Proc.new { |f| Chef::Config[:knife][:end] = f }
option :mask,
:short => "-M MASK",
:long => "--subnet-mask SUBNETMASK",
:description => "The subnet mask for an IP range",
:proc => Proc.new { |f| Chef::Config[:knife][:mask] = f }
option :gateway,
:short => "-G GATEWAY",
:long => "--gateway IPGATEWAY",
:description => "The IP Gateway address of a subnet",
:proc => Proc.new { |f| Chef::Config[:knife][:gateway] = f }
option :org,
:short => "-O ORG",
:long => "--org ORG",
:description => "The organization",
:proc => Proc.new { |f| Chef::Config[:knife][:org] = f }
def run
$stdout.sync = true
pool_type = "#{Chef::Config[:knife][:pool]}"
case pool_type
when 'managementip'
json = { :start_ip => Chef::Config[:knife][:start], :end_ip => Chef::Config[:knife][:end],
:subnet_mask => Chef::Config[:knife][:mask], :gateway => Chef::Config[:knife][:gateway] }.to_json
xml_response = provisioner.create_management_ip_pool(json)
xml_doc = Nokogiri::XML(xml_response)
xml_doc.xpath("configConfMos/outConfigs/pair/ippoolBlock").each do |ippool|
puts ''
puts "Management IP Block from: #{ui.color("#{ippool.attributes['from']}", :magenta)} to: #{ui.color("#{ippool.attributes['to']}", :magenta)}" +
" status: #{ui.color("#{ippool.attributes['status']}", :green)}"
end
#Ugly...refactor later to parse error with better exception handling. Nokogiri xpath search for elements might be an option
xml_doc.xpath("configConfMos").each do |ippool|
puts "#{ippool.attributes['errorCode']} #{ui.color("#{ippool.attributes['errorDescr']}", :red)}"
end
when 'mac'
json = { :mac_pool_name => Chef::Config[:knife][:name], :mac_pool_start => Chef::Config[:knife][:start],
:mac_pool_end => Chef::Config[:knife][:end], :org => Chef::Config[:knife][:org] }.to_json
xml_response = provisioner.create_mac_pool(json)
xml_doc = Nokogiri::XML(xml_response)
xml_doc.xpath("configConfMos/outConfigs/pair/macpoolPool").each do |macpool|
puts ''
puts "MAC address pool : #{ui.color("#{macpool.attributes['name']}", :magenta)}" +
" status: #{ui.color("#{macpool.attributes['status']}", :green)}"
end
#Ugly...refactor later to parse error with better exception handling. Nokogiri xpath search for elements might be an option
xml_doc.xpath("configConfMos").each do |macpool|
puts "#{macpool.attributes['errorCode']} #{ui.color("#{macpool.attributes['errorDescr']}", :red)}"
end
when 'wwnn'
json = { :wwnn_name => Chef::Config[:knife][:name], :wwnn_from => Chef::Config[:knife][:start],
:wwnn_to => Chef::Config[:knife][:end], :org => Chef::Config[:knife][:org] }.to_json
puts provisioner.create_wwnn_pool(json)
# xml_doc = Nokogiri::XML(xml_response)
#
# xml_doc.xpath("configConfMos/outConfigs/pair/macpoolPool").each do |wwnn|
# puts ''
# puts "WWNN pool : #{ui.color("#{macpool.attributes['name']}", :magenta)}" +
# " status: #{ui.color("#{macpool.attributes['status']}", :green)}"
# end
#
# #Ugly...refactor later to parse error with better exception handling. Nokogiri xpath search for elements might be an option
# xml_doc.xpath("configConfMos").each do |wwnn|
# puts "#{wwnn.attributes['errorCode']} #{ui.color("#{wwnn.attributes['errorDescr']}", :red)}"
# end
else
puts "Incorrect options. Please make sure you are using one of the following: mac,uuid,wwpn,wwnn,managementip"
end
end
end
end
end
|
# Author:: Murali Raju (<murali.raju@appliv.com>)
# Copyright:: Copyright (c) 2012 Murali Raju.
# License:: Apache License, Version 2.0
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
require 'chef/knife/ucs_base'
class Chef
class Knife
class UcsPoolCreate < Knife
include Knife::UCSBase
banner "knife ucs pool create (options)"
attr_accessor :initial_sleep_delay
option :pool,
:short => "-P POOL",
:long => "--pool POOLTYPE",
:description => "UCS pool types <mac,uuid,wwpn,wwnn,managementip>",
:proc => Proc.new { |f| Chef::Config[:knife][:pool] = f }
option :name,
:short => "-N NAME",
:long => "--pool-name POOLNAME",
:description => "The pool name",
:proc => Proc.new { |f| Chef::Config[:knife][:name] = f }
option :start,
:short => "-S START",
:long => "--pool-start STARTRANGE",
:description => "Start of a pool range <IP, WWPN, WWNN, MAC>",
:proc => Proc.new { |f| Chef::Config[:knife][:start] = f }
option :end,
:short => "-E END",
:long => "--pool-end ENDRANGE",
:description => "End of a pool range <IP, WWPN, WWNN, MAC>",
:proc => Proc.new { |f| Chef::Config[:knife][:end] = f }
option :mask,
:short => "-M MASK",
:long => "--subnet-mask SUBNETMASK",
:description => "The subnet mask for an IP range",
:proc => Proc.new { |f| Chef::Config[:knife][:mask] = f }
option :gateway,
:short => "-G GATEWAY",
:long => "--gateway IPGATEWAY",
:description => "The IP Gateway address of a subnet",
:proc => Proc.new { |f| Chef::Config[:knife][:gateway] = f }
option :org,
:short => "-O ORG",
:long => "--org ORG",
:description => "The organization",
:proc => Proc.new { |f| Chef::Config[:knife][:org] = f }
def run
$stdout.sync = true
pool_type = "#{Chef::Config[:knife][:pool]}"
case pool_type
when 'managementip'
json = { :start_ip => Chef::Config[:knife][:start], :end_ip => Chef::Config[:knife][:end],
:subnet_mask => Chef::Config[:knife][:mask], :gateway => Chef::Config[:knife][:gateway] }.to_json
xml_response = provisioner.create_management_ip_pool(json)
xml_doc = Nokogiri::XML(xml_response)
xml_doc.xpath("configConfMos/outConfigs/pair/ippoolBlock").each do |ippool|
puts ''
puts "Management IP Block from: #{ui.color("#{ippool.attributes['from']}", :magenta)} to: #{ui.color("#{ippool.attributes['to']}", :magenta)}" +
" status: #{ui.color("#{ippool.attributes['status']}", :green)}"
end
#Ugly...refactor later to parse error with better exception handling. Nokogiri xpath search for elements might be an option
xml_doc.xpath("configConfMos").each do |ippool|
puts "#{ippool.attributes['errorCode']} #{ui.color("#{ippool.attributes['errorDescr']}", :red)}"
end
when 'mac'
json = { :mac_pool_name => Chef::Config[:knife][:name], :mac_pool_start => Chef::Config[:knife][:start],
:mac_pool_end => Chef::Config[:knife][:end], :org => Chef::Config[:knife][:org] }.to_json
xml_response = provisioner.create_mac_pool(json)
xml_doc = Nokogiri::XML(xml_response)
xml_doc.xpath("configConfMos/outConfigs/pair/macpoolPool").each do |macpool|
puts ''
puts "MAC address pool from: #{ui.color("#{macpool.attributes['from']}", :magenta)} to: #{ui.color("#{macpool.attributes['to']}", :magenta)}" +
" status: #{ui.color("#{macpool.attributes['status']}", :green)}"
end
#Ugly...refactor later to parse error with better exception handling. Nokogiri xpath search for elements might be an option
xml_doc.xpath("configConfMos").each do |macpool|
puts "#{macpool.attributes['errorCode']} #{ui.color("#{macpool.attributes['errorDescr']}", :red)}"
end
else
puts "Incorrect options. Please make sure you are using one of the following: mac,uuid,wwpn,wwnn,managementip"
end
end
end
end
end
refactor
# Author:: Murali Raju (<murali.raju@appliv.com>)
# Copyright:: Copyright (c) 2012 Murali Raju.
# License:: Apache License, Version 2.0
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
require 'chef/knife/ucs_base'
class Chef
class Knife
class UcsPoolCreate < Knife
include Knife::UCSBase
banner "knife ucs pool create (options)"
attr_accessor :initial_sleep_delay
option :pool,
:short => "-P POOL",
:long => "--pool POOLTYPE",
:description => "UCS pool types <mac,uuid,wwpn,wwnn,managementip>",
:proc => Proc.new { |f| Chef::Config[:knife][:pool] = f }
option :name,
:short => "-N NAME",
:long => "--pool-name POOLNAME",
:description => "The pool name",
:proc => Proc.new { |f| Chef::Config[:knife][:name] = f }
option :start,
:short => "-S START",
:long => "--pool-start STARTRANGE",
:description => "Start of a pool range <IP, WWPN, WWNN, MAC>",
:proc => Proc.new { |f| Chef::Config[:knife][:start] = f }
option :end,
:short => "-E END",
:long => "--pool-end ENDRANGE",
:description => "End of a pool range <IP, WWPN, WWNN, MAC>",
:proc => Proc.new { |f| Chef::Config[:knife][:end] = f }
option :mask,
:short => "-M MASK",
:long => "--subnet-mask SUBNETMASK",
:description => "The subnet mask for an IP range",
:proc => Proc.new { |f| Chef::Config[:knife][:mask] = f }
option :gateway,
:short => "-G GATEWAY",
:long => "--gateway IPGATEWAY",
:description => "The IP Gateway address of a subnet",
:proc => Proc.new { |f| Chef::Config[:knife][:gateway] = f }
option :org,
:short => "-O ORG",
:long => "--org ORG",
:description => "The organization",
:proc => Proc.new { |f| Chef::Config[:knife][:org] = f }
def run
$stdout.sync = true
pool_type = "#{Chef::Config[:knife][:pool]}"
case pool_type
when 'managementip'
json = { :start_ip => Chef::Config[:knife][:start], :end_ip => Chef::Config[:knife][:end],
:subnet_mask => Chef::Config[:knife][:mask], :gateway => Chef::Config[:knife][:gateway] }.to_json
xml_response = provisioner.create_management_ip_pool(json)
xml_doc = Nokogiri::XML(xml_response)
xml_doc.xpath("configConfMos/outConfigs/pair/ippoolBlock").each do |ippool|
puts ''
puts "Management IP Block from: #{ui.color("#{ippool.attributes['from']}", :magenta)} to: #{ui.color("#{ippool.attributes['to']}", :magenta)}" +
" status: #{ui.color("#{ippool.attributes['status']}", :green)}"
end
#Ugly...refactor later to parse error with better exception handling. Nokogiri xpath search for elements might be an option
xml_doc.xpath("configConfMos").each do |ippool|
puts "#{ippool.attributes['errorCode']} #{ui.color("#{ippool.attributes['errorDescr']}", :red)}"
end
when 'mac'
json = { :mac_pool_name => Chef::Config[:knife][:name], :mac_pool_start => Chef::Config[:knife][:start],
:mac_pool_end => Chef::Config[:knife][:end], :org => Chef::Config[:knife][:org] }.to_json
xml_response = provisioner.create_mac_pool(json)
xml_doc = Nokogiri::XML(xml_response)
xml_doc.xpath("configConfMos/outConfigs/pair/macpoolPool").each do |macpool|
puts ''
puts "MAC address pool : #{ui.color("#{macpool.attributes['name']}", :magenta)}" +
" status: #{ui.color("#{macpool.attributes['status']}", :green)}"
end
#Ugly...refactor later to parse error with better exception handling. Nokogiri xpath search for elements might be an option
xml_doc.xpath("configConfMos").each do |macpool|
puts "#{macpool.attributes['errorCode']} #{ui.color("#{macpool.attributes['errorDescr']}", :red)}"
end
else
puts "Incorrect options. Please make sure you are using one of the following: mac,uuid,wwpn,wwnn,managementip"
end
end
end
end
end
|
#
# Author:: Kaustubh Deorukhkar (kaustubh@clogeny.com)
# Copyright:: Copyright (c) 2013 Opscode, Inc
# License:: Apache License, Version 2.0
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
require 'chef/provider/ifconfig'
class Chef
class Provider
class Ifconfig
class Aix < Chef::Provider::Ifconfig
def load_current_resource
@current_resource = Chef::Resource::Ifconfig.new(@new_resource.name)
@interface_exists = false
found_interface = false
interface = {}
@status = popen4("ifconfig -a") do |pid, stdin, stdout, stderr|
stdout.each do |line|
if !found_interface
if line =~ /^(\S+):\sflags=(\S+)/
# We have interface name, if this is the interface for @current_resource, load info else skip till next interface is found.
if $1 == @new_resource.device
# Found interface
found_interface = true
@interface_exists = true
@current_resource.target(@new_resource.target)
@current_resource.device($1)
interface[:flags] = $2
@current_resource.metric($1) if line =~ /metric\s(\S+)/
end
end
else
# parse interface related information, stop when next interface is found.
if line =~ /^(\S+):\sflags=(\S+)/
# we are done parsing interface info and hit another one, so stop.
found_interface = false
break
else
if found_interface
# read up interface info
@current_resource.inet_addr($1) if line =~ /inet\s(\S+)\s/
@current_resource.bcast($1) if line =~ /broadcast\s(\S+)/
@current_resource.mask(hex_to_dec_netmask($1)) if line =~ /netmask\s(\S+)\s/
end
end
end
end
end
@current_resource
end
private
# add can be used in two scenarios, add first inet addr or add VIP
# http://www-01.ibm.com/support/docview.wss?uid=swg21294045
def add_command
# ifconfig changes are temporary, chdev persist across reboots.
raise Chef::Exceptions::Ifconfig, "interface metric attribute cannot be set for :add action" if @new_resource.metric
if @current_resource.inet_addr || @new_resource.is_vip
# adding a VIP
command = "chdev -l #{@new_resource.device} -a alias4=#{@new_resource.name}"
command << ",#{@new_resource.mask}" if @new_resource.mask
else
command = "chdev -l #{@new_resource.device} -a netaddr=#{@new_resource.name}"
command << " -a netmask=#{@new_resource.mask}" if @new_resource.mask
command << " -a mtu=#{@new_resource.mtu}" if @new_resource.mtu
end
command
end
def enable_command
if @current_resource.inet_addr || @new_resource.is_vip
# add alias
command = "ifconfig #{@new_resource.device} inet #{@new_resource.name}"
command << " netmask #{@new_resource.mask}" if @new_resource.mask
command << " metric #{@new_resource.metric}" if @new_resource.metric
command << " mtu #{@new_resource.mtu}" if @new_resource.mtu
command << " alias"
else
command = super
end
command
end
def disable_command
if @new_resource.is_vip
"ifconfig #{@new_resource.device} inet #{@new_resource.name} delete"
else
super
end
end
def delete_command
# ifconfig changes are temporary, chdev persist across reboots.
if @new_resource.is_vip
command = "chdev -l #{@new_resource.device} -a delalias4=#{@new_resource.name}"
command << ",#{@new_resource.mask}" if @new_resource.mask
else
command = "chdev -l #{@new_resource.device} -a state=down"
end
command
end
def loopback_device
"lo0"
end
def hex_to_dec_netmask(netmask)
# example '0xffff0000' -> '255.255.0.0'
dec = netmask[2..3].to_i(16).to_s(10)
[4,6,8].each { |n| dec = dec + "." + netmask[n..n+1].to_i(16).to_s(10) }
dec
end
end
end
end
end
remove an extra space.
#
# Author:: Kaustubh Deorukhkar (kaustubh@clogeny.com)
# Copyright:: Copyright (c) 2013 Opscode, Inc
# License:: Apache License, Version 2.0
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
require 'chef/provider/ifconfig'
class Chef
class Provider
class Ifconfig
class Aix < Chef::Provider::Ifconfig
def load_current_resource
@current_resource = Chef::Resource::Ifconfig.new(@new_resource.name)
@interface_exists = false
found_interface = false
interface = {}
@status = popen4("ifconfig -a") do |pid, stdin, stdout, stderr|
stdout.each do |line|
if !found_interface
if line =~ /^(\S+):\sflags=(\S+)/
# We have interface name, if this is the interface for @current_resource, load info else skip till next interface is found.
if $1 == @new_resource.device
# Found interface
found_interface = true
@interface_exists = true
@current_resource.target(@new_resource.target)
@current_resource.device($1)
interface[:flags] = $2
@current_resource.metric($1) if line =~ /metric\s(\S+)/
end
end
else
# parse interface related information, stop when next interface is found.
if line =~ /^(\S+):\sflags=(\S+)/
# we are done parsing interface info and hit another one, so stop.
found_interface = false
break
else
if found_interface
# read up interface info
@current_resource.inet_addr($1) if line =~ /inet\s(\S+)\s/
@current_resource.bcast($1) if line =~ /broadcast\s(\S+)/
@current_resource.mask(hex_to_dec_netmask($1)) if line =~ /netmask\s(\S+)\s/
end
end
end
end
end
@current_resource
end
private
# add can be used in two scenarios, add first inet addr or add VIP
# http://www-01.ibm.com/support/docview.wss?uid=swg21294045
def add_command
# ifconfig changes are temporary, chdev persist across reboots.
raise Chef::Exceptions::Ifconfig, "interface metric attribute cannot be set for :add action" if @new_resource.metric
if @current_resource.inet_addr || @new_resource.is_vip
# adding a VIP
command = "chdev -l #{@new_resource.device} -a alias4=#{@new_resource.name}"
command << ",#{@new_resource.mask}" if @new_resource.mask
else
command = "chdev -l #{@new_resource.device} -a netaddr=#{@new_resource.name}"
command << " -a netmask=#{@new_resource.mask}" if @new_resource.mask
command << " -a mtu=#{@new_resource.mtu}" if @new_resource.mtu
end
command
end
def enable_command
if @current_resource.inet_addr || @new_resource.is_vip
# add alias
command = "ifconfig #{@new_resource.device} inet #{@new_resource.name}"
command << " netmask #{@new_resource.mask}" if @new_resource.mask
command << " metric #{@new_resource.metric}" if @new_resource.metric
command << " mtu #{@new_resource.mtu}" if @new_resource.mtu
command << " alias"
else
command = super
end
command
end
def disable_command
if @new_resource.is_vip
"ifconfig #{@new_resource.device} inet #{@new_resource.name} delete"
else
super
end
end
def delete_command
# ifconfig changes are temporary, chdev persist across reboots.
if @new_resource.is_vip
command = "chdev -l #{@new_resource.device} -a delalias4=#{@new_resource.name}"
command << ",#{@new_resource.mask}" if @new_resource.mask
else
command = "chdev -l #{@new_resource.device} -a state=down"
end
command
end
def loopback_device
"lo0"
end
def hex_to_dec_netmask(netmask)
# example '0xffff0000' -> '255.255.0.0'
dec = netmask[2..3].to_i(16).to_s(10)
[4,6,8].each { |n| dec = dec + "." + netmask[n..n+1].to_i(16).to_s(10) }
dec
end
end
end
end
end
|
module ChefSpec::API
# @since 0.9.0
module NotificationsMatchers
#
# Assert that a resource notifies another. Given a Chef Recipe that
# notifies a template resource to restart apache:
#
# ifconfig '10.0.0.1' do
# action :add
# end
#
# The Examples section demonstrates the different ways to test an
# +ifconfig+ resource with ChefSpec.
#
# @example Assert the template notifies apache of something
# template = chef_run.template('/etc/apache2.conf')
# expect(template).to notify('service[apache2]')
#
# @example Assert the template notifies apache to restart
# expect(template).to notify('service[apache2]').to(:restart)
#
# @example Assert the template notifies apache to restart immediately
# expect(template).to notify('service[apache2]').to(:restart).immediately
#
# @example Assert the template notifies apache to restart delayed
# expect(template).to notify('service[apache2]').to(:restart).delayed
#
#
# @param [String] signature
# the signature of the notification to match
#
# @return [ChefSpec::Matchers::NotificationsMatcher]
#
def notify(signature)
ChefSpec::Matchers::NotificationsMatcher.new(signature)
end
end
end
Fix typos in notifications documentation
module ChefSpec::API
# @since 0.9.0
module NotificationsMatchers
#
# Assert that a resource notifies another. Given a Chef Recipe that
# notifies a template resource to restart apache:
#
# template '/etc/apache2/config' do
# notifies :restart, 'service[apache2]'
# end
#
# The Examples section demonstrates the different ways to test an
# notifications on a resource with ChefSpec.
#
# @example Assert the template notifies apache of something
# template = chef_run.template('/etc/apache2.conf')
# expect(template).to notify('service[apache2]')
#
# @example Assert the template notifies apache to restart
# expect(template).to notify('service[apache2]').to(:restart)
#
# @example Assert the template notifies apache to restart immediately
# expect(template).to notify('service[apache2]').to(:restart).immediately
#
# @example Assert the template notifies apache to restart delayed
# expect(template).to notify('service[apache2]').to(:restart).delayed
#
#
# @param [String] signature
# the signature of the notification to match
#
# @return [ChefSpec::Matchers::NotificationsMatcher]
#
def notify(signature)
ChefSpec::Matchers::NotificationsMatcher.new(signature)
end
end
end
|
module CodeClimate
module TestReporter
def self.start
if run?
require "simplecov"
::SimpleCov.add_filter 'vendor'
::SimpleCov.formatter = Formatter
::SimpleCov.start(configuration.profile) do
skip_token configuration.skip_token
end
end
end
def self.run?
environment_variable_set? && run_on_current_branch?
end
def self.environment_variable_set?
return @environment_variable_set if defined?(@environment_variable_set)
@environment_variable_set = !!ENV["CODECLIMATE_REPO_TOKEN"]
unless @environment_variable_set
logger.info("Not reporting to Code Climate because ENV['CODECLIMATE_REPO_TOKEN'] is not set.")
end
@environment_variable_set
end
def self.run_on_current_branch?
return @run_on_current_branch if defined?(@run_on_current_branch)
@run_on_current_branch = true if configured_branch.nil?
@run_on_current_branch ||= !!(current_branch =~ /#{configured_branch}/i)
unless @run_on_current_branch
logger.info("Not reporting to Code Climate because #{configured_branch} is set as the reporting branch.")
end
@run_on_current_branch
end
def self.configured_branch
configuration.branch
end
def self.current_branch
Git.branch_from_git_or_ci
end
def self.logger
CodeClimate::TestReporter.configuration.logger
end
end
end
Scope TestReporter configuration when setting 'skip_token' in SimpleCov
module CodeClimate
module TestReporter
def self.start
if run?
require "simplecov"
::SimpleCov.add_filter 'vendor'
::SimpleCov.formatter = Formatter
::SimpleCov.start(configuration.profile) do
skip_token CodeClimate::TestReporter.configuration.skip_token
end
end
end
def self.run?
environment_variable_set? && run_on_current_branch?
end
def self.environment_variable_set?
return @environment_variable_set if defined?(@environment_variable_set)
@environment_variable_set = !!ENV["CODECLIMATE_REPO_TOKEN"]
unless @environment_variable_set
logger.info("Not reporting to Code Climate because ENV['CODECLIMATE_REPO_TOKEN'] is not set.")
end
@environment_variable_set
end
def self.run_on_current_branch?
return @run_on_current_branch if defined?(@run_on_current_branch)
@run_on_current_branch = true if configured_branch.nil?
@run_on_current_branch ||= !!(current_branch =~ /#{configured_branch}/i)
unless @run_on_current_branch
logger.info("Not reporting to Code Climate because #{configured_branch} is set as the reporting branch.")
end
@run_on_current_branch
end
def self.configured_branch
configuration.branch
end
def self.current_branch
Git.branch_from_git_or_ci
end
def self.logger
CodeClimate::TestReporter.configuration.logger
end
end
end
|
require 'optparse'
require 'fileutils'
require 'open3'
require File.expand_path(File.dirname(__FILE__) + '/../coffee-script')
module CoffeeScript
# The CommandLine handles all of the functionality of the `coffee-script`
# utility.
class CommandLine
BANNER = <<-EOS
coffee-script compiles CoffeeScript source files into JavaScript.
Usage:
coffee-script path/to/script.cs
EOS
# Seconds to pause between checks for changed source files.
WATCH_INTERVAL = 0.5
# Run the CommandLine off the contents of ARGV.
def initialize
@mtimes = {}
parse_options
return launch_repl if @options[:interactive]
return eval_scriptlet if @options[:eval]
check_sources
return run_scripts if @options[:run]
@sources.each {|source| compile_javascript(source) }
watch_coffee_scripts if @options[:watch]
end
# The "--help" usage message.
def usage
puts "\n#{@option_parser}\n"
exit
end
private
# Compiles (or partially compiles) the source CoffeeScript file, returning
# the desired JS, tokens, or lint results.
def compile_javascript(source)
script = File.read(source)
return tokens(script) if @options[:tokens]
js = compile(script, source)
return unless js
return puts(js) if @options[:print]
return lint(js) if @options[:lint]
File.open(path_for(source), 'w+') {|f| f.write(js) }
end
# Spins up a watcher thread to keep track of the modification times of the
# source files, recompiling them whenever they're saved.
def watch_coffee_scripts
watch_thread = Thread.start do
loop do
@sources.each do |source|
mtime = File.stat(source).mtime
@mtimes[source] ||= mtime
if mtime > @mtimes[source]
@mtimes[source] = mtime
compile_javascript(source)
end
end
sleep WATCH_INTERVAL
end
end
Signal.trap("INT") { watch_thread.kill }
watch_thread.join
end
# Ensure that all of the source files exist.
def check_sources
usage if @sources.empty?
missing = @sources.detect {|s| !File.exists?(s) }
if missing
STDERR.puts("File not found: '#{missing}'")
exit(1)
end
end
# Pipe compiled JS through JSLint (requires a working 'jsl' command).
def lint(js)
stdin, stdout, stderr = Open3.popen3('jsl -nologo -stdin')
stdin.write(js)
stdin.close
puts stdout.read.tr("\n", '')
errs = stderr.read.chomp
puts errs unless errs.empty?
stdout.close and stderr.close
end
# Eval a little piece of CoffeeScript directly from the command line.
def eval_scriptlet
script = STDIN.tty? ? @sources.join(' ') : STDIN.read
return tokens(script) if @options[:tokens]
js = compile(script)
return lint(js) if @options[:lint]
puts js
end
# Use Narwhal to run an interactive CoffeeScript session.
def launch_repl
exec "narwhal lib/coffee_script/narwhal/js/launcher.js"
end
# Use Narwhal to compile and execute CoffeeScripts.
def run_scripts
sources = @sources.join(' ')
exec "narwhal lib/coffee_script/narwhal/js/launcher.js #{sources}"
end
# Print the tokens that the lexer generates from a source script.
def tokens(script)
puts Lexer.new.tokenize(script).inspect
end
# Compile a single source file to JavaScript.
def compile(script, source='')
begin
CoffeeScript.compile(script, :no_wrap => @options[:no_wrap])
rescue CoffeeScript::ParseError => e
STDERR.puts e.message(source)
exit(1) unless @options[:watch]
nil
end
end
# Write out JavaScript alongside CoffeeScript unless an output directory
# is specified.
def path_for(source)
filename = File.basename(source, File.extname(source)) + '.js'
dir = @options[:output] || File.dirname(source)
File.join(dir, filename)
end
# Install the CoffeeScript TextMate bundle to ~/Library.
def install_bundle
bundle_dir = File.expand_path('~/Library/Application Support/TextMate/Bundles/')
FileUtils.cp_r(File.dirname(__FILE__) + '/CoffeeScript.tmbundle', bundle_dir)
end
# Use OptionParser for all the options.
def parse_options
@options = {}
@option_parser = OptionParser.new do |opts|
opts.on('-i', '--interactive', 'run a CoffeeScript REPL (requires Narwhal)') do |i|
@options[:interactive] = true
end
opts.on('-r', '--run', 'compile and run a script (requires Narwhal)') do |r|
@options[:run] = true
end
opts.on('-o', '--output [DIR]', 'set the directory for compiled JavaScript') do |d|
@options[:output] = d
FileUtils.mkdir_p(d) unless File.exists?(d)
end
opts.on('-w', '--watch', 'watch scripts for changes, and recompile') do |w|
@options[:watch] = true
end
opts.on('-p', '--print', 'print the compiled JavaScript to stdout') do |d|
@options[:print] = true
end
opts.on('-l', '--lint', 'pipe the compiled JavaScript through JSLint') do |l|
@options[:lint] = true
end
opts.on('-e', '--eval', 'compile a cli scriptlet or read from stdin') do |e|
@options[:eval] = true
end
opts.on('-t', '--tokens', 'print the tokens that the lexer produces') do |t|
@options[:tokens] = true
end
opts.on('-v', '--verbose', 'print at every step of code generation') do |v|
ENV['VERBOSE'] = 'true'
end
opts.on('-n', '--no-wrap', 'raw output, no safety wrapper or vars') do |n|
@options[:no_wrap] = true
end
opts.on_tail('--install-bundle', 'install the CoffeeScript TextMate bundle') do |i|
install_bundle
exit
end
opts.on_tail('--version', 'display coffee-script version') do
puts "coffee-script version #{CoffeeScript::VERSION}"
exit
end
opts.on_tail('-h', '--help', 'display this help message') do
usage
end
end
@option_parser.banner = BANNER
begin
@option_parser.parse!(ARGV)
rescue OptionParser::InvalidOption => e
puts e.message
exit(1)
end
@sources = ARGV
end
end
end
better error warnings on the command line
require 'optparse'
require 'fileutils'
require 'open3'
require File.expand_path(File.dirname(__FILE__) + '/../coffee-script')
module CoffeeScript
# The CommandLine handles all of the functionality of the `coffee-script`
# utility.
class CommandLine
BANNER = <<-EOS
coffee-script compiles CoffeeScript source files into JavaScript.
Usage:
coffee-script path/to/script.cs
EOS
# Seconds to pause between checks for changed source files.
WATCH_INTERVAL = 0.5
# Run the CommandLine off the contents of ARGV.
def initialize
@mtimes = {}
parse_options
return launch_repl if @options[:interactive]
return eval_scriptlet if @options[:eval]
check_sources
return run_scripts if @options[:run]
@sources.each {|source| compile_javascript(source) }
watch_coffee_scripts if @options[:watch]
end
# The "--help" usage message.
def usage
puts "\n#{@option_parser}\n"
exit
end
private
# Compiles (or partially compiles) the source CoffeeScript file, returning
# the desired JS, tokens, or lint results.
def compile_javascript(source)
script = File.read(source)
return tokens(script) if @options[:tokens]
js = compile(script, source)
return unless js
return puts(js) if @options[:print]
return lint(js) if @options[:lint]
File.open(path_for(source), 'w+') {|f| f.write(js) }
end
# Spins up a watcher thread to keep track of the modification times of the
# source files, recompiling them whenever they're saved.
def watch_coffee_scripts
watch_thread = Thread.start do
loop do
@sources.each do |source|
mtime = File.stat(source).mtime
@mtimes[source] ||= mtime
if mtime > @mtimes[source]
@mtimes[source] = mtime
compile_javascript(source)
end
end
sleep WATCH_INTERVAL
end
end
Signal.trap("INT") { watch_thread.kill }
watch_thread.join
end
# Ensure that all of the source files exist.
def check_sources
usage if @sources.empty?
missing = @sources.detect {|s| !File.exists?(s) }
if missing
STDERR.puts("File not found: '#{missing}'")
exit(1)
end
end
# Pipe compiled JS through JSLint (requires a working 'jsl' command).
def lint(js)
stdin, stdout, stderr = Open3.popen3('jsl -nologo -stdin')
stdin.write(js)
stdin.close
puts stdout.read.tr("\n", '')
errs = stderr.read.chomp
puts errs unless errs.empty?
stdout.close and stderr.close
end
# Eval a little piece of CoffeeScript directly from the command line.
def eval_scriptlet
script = STDIN.tty? ? @sources.join(' ') : STDIN.read
return tokens(script) if @options[:tokens]
js = compile(script)
return lint(js) if @options[:lint]
puts js
end
# Use Narwhal to run an interactive CoffeeScript session.
def launch_repl
exec "narwhal lib/coffee_script/narwhal/js/launcher.js"
rescue Errno::ENOENT
puts "Error: Narwhal must be installed to use the interactive REPL."
exit(1)
end
# Use Narwhal to compile and execute CoffeeScripts.
def run_scripts
sources = @sources.join(' ')
exec "narwhal lib/coffee_script/narwhal/js/launcher.js #{sources}"
rescue Errno::ENOENT
puts "Error: Narwhal must be installed in order to execute CoffeeScripts."
exit(1)
end
# Print the tokens that the lexer generates from a source script.
def tokens(script)
puts Lexer.new.tokenize(script).inspect
end
# Compile a single source file to JavaScript.
def compile(script, source='')
begin
CoffeeScript.compile(script, :no_wrap => @options[:no_wrap])
rescue CoffeeScript::ParseError => e
STDERR.puts e.message(source)
exit(1) unless @options[:watch]
nil
end
end
# Write out JavaScript alongside CoffeeScript unless an output directory
# is specified.
def path_for(source)
filename = File.basename(source, File.extname(source)) + '.js'
dir = @options[:output] || File.dirname(source)
File.join(dir, filename)
end
# Install the CoffeeScript TextMate bundle to ~/Library.
def install_bundle
bundle_dir = File.expand_path('~/Library/Application Support/TextMate/Bundles/')
FileUtils.cp_r(File.dirname(__FILE__) + '/CoffeeScript.tmbundle', bundle_dir)
end
# Use OptionParser for all the options.
def parse_options
@options = {}
@option_parser = OptionParser.new do |opts|
opts.on('-i', '--interactive', 'run a CoffeeScript REPL (requires Narwhal)') do |i|
@options[:interactive] = true
end
opts.on('-r', '--run', 'compile and run a script (requires Narwhal)') do |r|
@options[:run] = true
end
opts.on('-o', '--output [DIR]', 'set the directory for compiled JavaScript') do |d|
@options[:output] = d
FileUtils.mkdir_p(d) unless File.exists?(d)
end
opts.on('-w', '--watch', 'watch scripts for changes, and recompile') do |w|
@options[:watch] = true
end
opts.on('-p', '--print', 'print the compiled JavaScript to stdout') do |d|
@options[:print] = true
end
opts.on('-l', '--lint', 'pipe the compiled JavaScript through JSLint') do |l|
@options[:lint] = true
end
opts.on('-e', '--eval', 'compile a cli scriptlet or read from stdin') do |e|
@options[:eval] = true
end
opts.on('-t', '--tokens', 'print the tokens that the lexer produces') do |t|
@options[:tokens] = true
end
opts.on('-v', '--verbose', 'print at every step of code generation') do |v|
ENV['VERBOSE'] = 'true'
end
opts.on('-n', '--no-wrap', 'raw output, no safety wrapper or vars') do |n|
@options[:no_wrap] = true
end
opts.on_tail('--install-bundle', 'install the CoffeeScript TextMate bundle') do |i|
install_bundle
exit
end
opts.on_tail('--version', 'display coffee-script version') do
puts "coffee-script version #{CoffeeScript::VERSION}"
exit
end
opts.on_tail('-h', '--help', 'display this help message') do
usage
end
end
@option_parser.banner = BANNER
begin
@option_parser.parse!(ARGV)
rescue OptionParser::InvalidOption => e
puts e.message
exit(1)
end
@sources = ARGV
end
end
end
|
module CouchPopulator
class Initializer
class << self
def run
# process command line options
command_line_options
# Only check CouchDB-availibilty when needed
unless command_line_options[:generate_only]
# Build the full CouchDB database url
couch_url = CouchPopulator::CouchHelper.get_full_couchurl(command_line_options[:couch])
# Check for availabilty of couchdb
Trollop.die :couch, "#{couch_url} is not reachable or ressource does not exist" unless CouchPopulator::CouchHelper.couch_available? couch_url
# create database on demand
if command_line_options[:create_db]
# TODO needs to be implemented properly
# CouchPopulator::CouchHelper.create_db(command_line_options[:couch])
else
CouchPopulator::CouchHelper.database_exists? couch_url
end
end
# Initialize CouchPopulator
options = ({:executor_klass => executor, :generator_klass => generator, :logger => CouchPopulator::Logger.new(command_line_options[:logfile])}).merge(command_line_options)
CouchPopulator::Base.new(command_line_options[:couch], options).populate
end
# Define some command-line options
def command_line_options
@command_line_options ||= Trollop.options do
version "v1.0 (c) Sebastian Cohnen, 2009"
banner <<-BANNER
This is a simple, yet powerfull tool to import large numbers of on-the-fly generated documents into CouchDB.
It's using concurrency by spawning several curl subprocesses. Documents are generated on-the-fly.
See http://github.com/tisba/couchpopulator for more information.
Usage:
./couchpopulator [OPTIONS] [executor [EXECUTOR-OPTIONS]]
To see, what options for 'executor' are:
./couchpopulator executor -h
BANNER
opt :couch, "URL of CouchDB Server. You can also provide the name of the target DB only, http://localhost:5984/ will be prepended automatically", :default => ""
opt :create_db, "Create DB if needed.", :default => false
opt :generator, "Name of the generator-class to use", :default => "Example"
opt :generate_only, "Generate the docs, but don't write to couch and stdout them instead", :default => false
opt :logfile, "Redirect info/debug output to specified file instead to stdout", :type => String, :default => ""
stop_on_unknown
end
end
# Get the requested generator or die
def generator
@generator ||= begin
begin
require File.join(File.dirname(__FILE__), "../../generators/#{command_line_options[:generator]}.rb")
rescue Exception => e
Trollop.die :generator, "Generator #{command_line_options[:generator]} not found!"
end
generator_klass = CouchPopulator::MiscHelper.camelize_and_constantize("generators/#{command_line_options[:generator]}") rescue generator_klass = nil
Trollop.die :generator, "Generator must be set, a valid class-name and respond to generate(n)" if generator_klass.nil? || generator_klass.methods.member?(:generate)
generator_klass
end
end
# Get the exexcutor (defaults to standard) or die
def executor
@executor ||= begin
executor_cmd = ARGV.shift || "standard"
begin
require File.join(File.dirname(__FILE__), "../../executors/#{executor_cmd}.rb")
rescue Exception => e
Trollop.die "Executor #{executor_cmd} not found!"
end
executor_klass = CouchPopulator::MiscHelper.camelize_and_constantize("executors/#{executor_cmd}") rescue executor_klass = nil
Trollop.die "Executor must be set and a valid class-name" if executor_klass.nil?
executor_klass
end
end
end
end
end
fix to the initializer
module CouchPopulator
class Initializer
class << self
def run
# process command line options
command_line_options
# Only check CouchDB-availibilty when needed
unless command_line_options[:generate_only]
Trollop.die :couch, "You need at least to provide the database's name" if command_line_options[:couch].nil?
# Build the full CouchDB database url
couch_url = CouchHelper.get_full_couchurl(command_line_options[:couch])
# Check for availabilty of couchdb
Trollop.die :couch, "#{couch_url} is not reachable or ressource does not exist" unless CouchHelper.couch_available?(couch_url)
# create database on demand
if command_line_options[:create_db]
# TODO needs to be implemented properly
# CouchPopulator::CouchHelper.create_db(command_line_options[:couch])
else
CouchPopulator::CouchHelper.database_exists? couch_url
end
end
# Initialize CouchPopulator
options = ({:executor_klass => executor, :generator_klass => generator, :logger => CouchPopulator::Logger.new(command_line_options[:logfile])}).merge(command_line_options)
CouchPopulator::Base.new(command_line_options[:couch], options).populate
end
# Define some command-line options
def command_line_options
@command_line_options ||= Trollop.options do
version "v1.0 (c) Sebastian Cohnen, 2009"
banner <<-BANNER
This is a simple, yet powerfull tool to import large numbers of on-the-fly generated documents into CouchDB.
It's using concurrency by spawning several curl subprocesses. Documents are generated on-the-fly.
See http://github.com/tisba/couchpopulator for more information.
Usage:
./couchpopulator [OPTIONS] [executor [EXECUTOR-OPTIONS]]
To see, what options for 'executor' are:
./couchpopulator executor -h
OPTIONS:
BANNER
opt :couch, "URL of CouchDB Server. You can also provide the name of the target DB only, http://localhost:5984/ will be prepended automatically", :type => String
opt :create_db, "Create DB if needed.", :default => false
opt :generator, "Name of the generator-class to use", :default => "Example"
opt :generate_only, "Generate the docs, but don't write to couch and stdout them instead", :default => false
opt :logfile, "Redirect info/debug output to specified file instead to stdout", :type => String, :default => ""
stop_on_unknown
end
end
# Get the requested generator or die
def generator
@generator ||= begin
begin
require File.join(File.dirname(__FILE__), "../../generators/#{command_line_options[:generator]}.rb")
rescue Exception => e
Trollop.die :generator, "Generator #{command_line_options[:generator]} not found!"
end
generator_klass = CouchPopulator::MiscHelper.camelize_and_constantize("generators/#{command_line_options[:generator]}") rescue generator_klass = nil
Trollop.die :generator, "Generator must be set, a valid class-name and respond to generate(n)" if generator_klass.nil? || generator_klass.methods.member?(:generate)
generator_klass
end
end
# Get the exexcutor (defaults to standard) or die
def executor
@executor ||= begin
executor_cmd = ARGV.shift || "standard"
begin
require File.join(File.dirname(__FILE__), "../../executors/#{executor_cmd}.rb")
rescue Exception => e
Trollop.die "Executor #{executor_cmd} not found!"
end
executor_klass = CouchPopulator::MiscHelper.camelize_and_constantize("executors/#{executor_cmd}") rescue executor_klass = nil
Trollop.die "Executor must be set and a valid class-name" if executor_klass.nil?
executor_klass
end
end
end
end
end |
module CreditCard
class LuhnValidator
extend Forwardable
def_delegator :credit_card, :number
def initialize(credit_card)
@credit_card = credit_card
end
def call
s1 = s2 = 0
number.reverse.chars.each_slice(2) do |odd, even|
s1 += odd.to_i
double = even.to_i * 2
double -= 9 if double >= 10
s2 += double
end
(s1 + s2) % 10 == 0
end
private
attr_reader :credit_card
end
end
Use block on LhunValidator
module CreditCard
class LuhnValidator
extend Forwardable
def_delegator :credit_card, :number
def initialize(credit_card)
@credit_card = credit_card
@sum1 = @sum2 = 0
end
def call
pair_of_numbers do |odd, even|
do_first_sum(odd)
do_second_sum(even)
end
(@sum1 + @sum2) % 10 == 0
end
private
attr_reader :credit_card
def pair_of_numbers
number.reverse.chars.each_slice(2) do |odd, even|
yield(odd, even)
end
end
def do_first_sum(value)
@sum1 += value.to_i
end
def do_second_sum(value)
double = value.to_i * 2
double -= 9 if double >= 10
@sum2 += double
end
end
end
|
# Copyright 2006-2008 by Mike Bailey. All rights reserved.
unless Capistrano::Configuration.respond_to?(:instance)
abort "deprec2 requires Capistrano 2"
end
require "#{File.dirname(__FILE__)}/recipes/canonical"
require "#{File.dirname(__FILE__)}/recipes/deprec"
require "#{File.dirname(__FILE__)}/recipes/deprecated"
# Updated for deprec3
require "#{File.dirname(__FILE__)}/recipes/mri"
require "#{File.dirname(__FILE__)}/recipes/ree"
require "#{File.dirname(__FILE__)}/recipes/rubygems"
# Not yet updaterd for deprec3
require "#{File.dirname(__FILE__)}/recipes/ec2"
require "#{File.dirname(__FILE__)}/recipes/vmware_tools"
require "#{File.dirname(__FILE__)}/recipes/mongrel"
require "#{File.dirname(__FILE__)}/recipes/passenger"
require "#{File.dirname(__FILE__)}/recipes/mysql"
require "#{File.dirname(__FILE__)}/recipes/postgresql"
require "#{File.dirname(__FILE__)}/recipes/sqlite"
require "#{File.dirname(__FILE__)}/recipes/couchdb"
require "#{File.dirname(__FILE__)}/recipes/apache"
require "#{File.dirname(__FILE__)}/recipes/nginx"
require "#{File.dirname(__FILE__)}/recipes/bash"
require "#{File.dirname(__FILE__)}/recipes/git"
require "#{File.dirname(__FILE__)}/recipes/svn"
# require "#{File.dirname(__FILE__)}/recipes/integrity"
require "#{File.dirname(__FILE__)}/recipes/users"
require "#{File.dirname(__FILE__)}/recipes/ssh"
require "#{File.dirname(__FILE__)}/recipes/php"
# require "#{File.dirname(__FILE__)}/recipes/scm/trac"
require "#{File.dirname(__FILE__)}/recipes/aoe"
require "#{File.dirname(__FILE__)}/recipes/xen"
require "#{File.dirname(__FILE__)}/recipes/xentools"
require "#{File.dirname(__FILE__)}/recipes/ddclient"
require "#{File.dirname(__FILE__)}/recipes/ntp"
require "#{File.dirname(__FILE__)}/recipes/logrotate"
require "#{File.dirname(__FILE__)}/recipes/ssl"
require "#{File.dirname(__FILE__)}/recipes/postfix"
require "#{File.dirname(__FILE__)}/recipes/memcache"
require "#{File.dirname(__FILE__)}/recipes/monit"
require "#{File.dirname(__FILE__)}/recipes/network"
require "#{File.dirname(__FILE__)}/recipes/nagios3"
require "#{File.dirname(__FILE__)}/recipes/nrpe"
require "#{File.dirname(__FILE__)}/recipes/collectd"
require "#{File.dirname(__FILE__)}/recipes/syslog"
require "#{File.dirname(__FILE__)}/recipes/syslog_ng"
require "#{File.dirname(__FILE__)}/recipes/stunnel"
require "#{File.dirname(__FILE__)}/recipes/heartbeat"
require "#{File.dirname(__FILE__)}/recipes/haproxy"
require "#{File.dirname(__FILE__)}/recipes/ubuntu"
require "#{File.dirname(__FILE__)}/recipes/lvm"
require "#{File.dirname(__FILE__)}/recipes/vnstat"
require "#{File.dirname(__FILE__)}/recipes/sphinx"
require "#{File.dirname(__FILE__)}/recipes/utils"
# require "#{File.dirname(__FILE__)}/recipes/apt_mirror"
# require "#{File.dirname(__FILE__)}/recipes/wordpress" Not working
require "#{File.dirname(__FILE__)}/recipes/wpmu"
require "#{File.dirname(__FILE__)}/recipes/ar_sendmail"
require "#{File.dirname(__FILE__)}/recipes/starling"
Commented out a bunch of 'retired recipes'
There are a bunch of recipes I haven't used for years
that probably need cleaning up. Feel free to take them
and put them in you own repository if you want them.
# Copyright 2006-2008 by Mike Bailey. All rights reserved.
unless Capistrano::Configuration.respond_to?(:instance)
abort "deprec2 requires Capistrano 2"
end
require "#{File.dirname(__FILE__)}/recipes/canonical"
require "#{File.dirname(__FILE__)}/recipes/deprec"
require "#{File.dirname(__FILE__)}/recipes/deprecated"
# Updated for deprec3
require "#{File.dirname(__FILE__)}/recipes/users"
require "#{File.dirname(__FILE__)}/recipes/network"
require "#{File.dirname(__FILE__)}/recipes/mri"
require "#{File.dirname(__FILE__)}/recipes/ree"
require "#{File.dirname(__FILE__)}/recipes/rubygems"
require "#{File.dirname(__FILE__)}/recipes/nagios3"
require "#{File.dirname(__FILE__)}/recipes/nrpe"
# To be updated for deprec3
require "#{File.dirname(__FILE__)}/recipes/passenger"
require "#{File.dirname(__FILE__)}/recipes/mysql"
require "#{File.dirname(__FILE__)}/recipes/postgresql"
require "#{File.dirname(__FILE__)}/recipes/apache"
require "#{File.dirname(__FILE__)}/recipes/git"
require "#{File.dirname(__FILE__)}/recipes/haproxy"
require "#{File.dirname(__FILE__)}/recipes/heartbeat"
require "#{File.dirname(__FILE__)}/recipes/ssh"
require "#{File.dirname(__FILE__)}/recipes/monit"
require "#{File.dirname(__FILE__)}/recipes/collectd"
require "#{File.dirname(__FILE__)}/recipes/ubuntu"
# To be decided...
require "#{File.dirname(__FILE__)}/recipes/ec2"
require "#{File.dirname(__FILE__)}/recipes/vmware_tools"
require "#{File.dirname(__FILE__)}/recipes/mongrel"
require "#{File.dirname(__FILE__)}/recipes/sqlite"
require "#{File.dirname(__FILE__)}/recipes/nginx"
require "#{File.dirname(__FILE__)}/recipes/bash"
require "#{File.dirname(__FILE__)}/recipes/php"
require "#{File.dirname(__FILE__)}/recipes/aoe"
require "#{File.dirname(__FILE__)}/recipes/ddclient"
require "#{File.dirname(__FILE__)}/recipes/ntp"
require "#{File.dirname(__FILE__)}/recipes/logrotate"
require "#{File.dirname(__FILE__)}/recipes/ssl"
require "#{File.dirname(__FILE__)}/recipes/postfix"
require "#{File.dirname(__FILE__)}/recipes/memcache"
require "#{File.dirname(__FILE__)}/recipes/syslog"
require "#{File.dirname(__FILE__)}/recipes/syslog_ng"
require "#{File.dirname(__FILE__)}/recipes/stunnel"
require "#{File.dirname(__FILE__)}/recipes/lvm"
require "#{File.dirname(__FILE__)}/recipes/vnstat"
require "#{File.dirname(__FILE__)}/recipes/utils"
# Retired recipes
#
# require "#{File.dirname(__FILE__)}/recipes/integrity"
# require "#{File.dirname(__FILE__)}/recipes/xen"
# require "#{File.dirname(__FILE__)}/recipes/xentools"
# require "#{File.dirname(__FILE__)}/recipes/svn"
# require "#{File.dirname(__FILE__)}/recipes/scm/trac"
# require "#{File.dirname(__FILE__)}/recipes/sphinx"
# require "#{File.dirname(__FILE__)}/recipes/apt_mirror"
# require "#{File.dirname(__FILE__)}/recipes/wordpress" Not working
# require "#{File.dirname(__FILE__)}/recipes/wpmu"
# require "#{File.dirname(__FILE__)}/recipes/ar_sendmail"
# require "#{File.dirname(__FILE__)}/recipes/starling"
# require "#{File.dirname(__FILE__)}/recipes/couchdb"
|
require 'rails/all'
module Diesel
module Testing
APP_ROOT = File.expand_path('..', __FILE__).freeze
class Application < Rails::Application
config.encoding = "utf-8"
config.action_mailer.default_url_options = { :host => 'localhost' }
config.paths.config.database = "#{APP_ROOT}/config/database.yml"
config.paths.config.routes << "#{APP_ROOT}/config/routes.rb"
config.paths.app.controllers << "#{APP_ROOT}/app/controllers"
config.paths.app.views << "#{APP_ROOT}/app/views"
config.paths.log = "tmp/log"
config.cache_classes = true
config.whiny_nils = true
config.consider_all_requests_local = true
config.action_controller.perform_caching = false
config.action_dispatch.show_exceptions = false
config.action_controller.allow_forgery_protection = false
config.action_mailer.delivery_method = :test
config.active_support.deprecation = :stderr
config.secret_token = "DIESEL" * 5 # so diesel
def require_environment!
initialize!
end
def initialize!
FileUtils.mkdir_p(Rails.root.join("db").to_s)
super unless @initialized
end
end
end
end
Fix deprecation warning introduced in Rails 3.1.x
Somehow, the path manipulation on 3.1.x is not working on 3.0.x either. You've been warned.
require 'rails/all'
module Diesel
module Testing
APP_ROOT = File.expand_path('..', __FILE__).freeze
class Application < Rails::Application
config.encoding = "utf-8"
config.action_mailer.default_url_options = { :host => 'localhost' }
if Rails::VERSION::MAJOR >= 3 && Rails::VERSION::MINOR >= 1
config.paths['config/database'] = "#{APP_ROOT}/config/database.yml"
config.paths['config/routes'] << "#{APP_ROOT}/config/routes.rb"
config.paths['app/controllers'] << "#{APP_ROOT}/app/controllers"
config.paths['app/views'] << "#{APP_ROOT}/app/views"
config.paths['log'] = "tmp/log/development.log"
else
config.paths.config.database = "#{APP_ROOT}/config/database.yml"
config.paths.config.routes << "#{APP_ROOT}/config/routes.rb"
config.paths.app.controllers << "#{APP_ROOT}/app/controllers"
config.paths.app.views << "#{APP_ROOT}/app/views"
config.paths.log = "tmp/log"
end
config.cache_classes = true
config.whiny_nils = true
config.consider_all_requests_local = true
config.action_controller.perform_caching = false
config.action_dispatch.show_exceptions = false
config.action_controller.allow_forgery_protection = false
config.action_mailer.delivery_method = :test
config.active_support.deprecation = :stderr
config.secret_token = "DIESEL" * 5 # so diesel
def require_environment!
initialize!
end
def initialize!
FileUtils.mkdir_p(Rails.root.join("db").to_s)
super unless @initialized
end
end
end
end
|
module Pakyow
class Application
class << self
attr_accessor :routes_proc, :handlers_proc, :middleware_proc, :configurations
# Sets the path to the application file so it can be reloaded later.
#
def inherited(subclass)
Pakyow::Configuration::App.application_path = parse_path_from_caller(caller[0])
end
def parse_path_from_caller(caller)
caller.match(/^(.+)(:?:\d+(:?:in `.+')?$)/)[1]
end
# Runs the application. Accepts the environment(s) to run, for example:
# run(:development)
# run([:development, :staging])
#
def run(*args)
return if running?
@running = true
self.builder.run(self.prepare(*args))
detect_handler.run(builder, :Host => Pakyow::Configuration::Base.server.host, :Port => Pakyow::Configuration::Base.server.port)
end
# Stages the application. Everything is loaded but the application is
# not started. Accepts the same arguments as #run.
#
def stage(*args)
return if staged?
@staged = true
prepare(*args)
end
def builder
@builder ||= Rack::Builder.new
end
def prepared?
@prepared
end
# Returns true if the application is running.
#
def running?
@running
end
# Returns true if the application is staged.
#
def staged?
@staged
end
# Convenience method for base configuration class.
#
def config
Pakyow::Configuration::Base
end
# Creates configuration for a particular environment. Example:
# configure(:development) { app.auto_reload = true }
#
def configure(environment, &block)
self.configurations ||= {}
self.configurations[environment] = block
end
# Creates routes. Example:
# routes { get '/' { # do something } }
#
def routes(&block)
self.routes_proc = block
end
# Creates handlers for later execution.
# The handler can be created one of two ways:
#
# Define a controller/action handler with an associate response status:
# handler(name, 404, :ApplicationController, :handle_404)
#
# Specify a block as a handler:
# handler(name, 404) { # handle error }
#
# If a controller calls #invoke_handler!(name) then the
# handler defined for that code will be invoked.
#
def handlers(&block)
self.handlers_proc = block
end
def middleware(&block)
self.middleware_proc = block
end
protected
# Prepares the application for running or staging and returns an instance
# of the application.
def prepare(*args)
self.load_config args.empty? || args.first.nil? ? [Configuration::Base.app.default_environment] : args
return if prepared?
self.builder.use(Rack::MethodOverride)
self.builder.instance_eval(&self.middleware_proc) if self.middleware_proc
self.builder.use(Pakyow::Static) if Configuration::Base.app.static
self.builder.use(Pakyow::Logger) if Configuration::Base.app.log
self.builder.use(Pakyow::Reloader) if Configuration::Base.app.auto_reload
@prepared = true
$:.unshift(Dir.pwd) unless $:.include? Dir.pwd
return self.new
end
def load_config(args)
if self.configurations
args << Configuration::Base.app.default_environment if args.empty?
args.each do |env|
next unless config = self.configurations[env]
Configuration::Base.instance_eval(&config)
end
end
end
def detect_handler
['thin', 'mongrel', 'webrick'].each do |server|
begin
return Rack::Handler.get(server)
rescue LoadError
rescue NameError
end
end
end
end
include Helpers
attr_accessor :request, :response, :presenter, :route_store, :restful_routes, :handler_store
def initialize
Pakyow.app = self
@handler_name_to_code = {}
@handler_code_to_name = {}
# This configuration option will be set if a presenter is to be used
if Configuration::Base.app.presenter
# Create a new instance of the presenter
self.presenter = Configuration::Base.app.presenter.new
end
# Load application files
load_app
end
# Interrupts the application and returns response immediately.
#
def halt!
throw :halt, self.response
end
def invoke_route!(route, method)
# TODO Think about all this
block = prepare_route_block(route, method)
Log.enter "invoke_route!(#{route} #{method}) #{block ? 'have' : 'NO'} block"
# if there's no block we end up stopping but the request in in changed so content comes
# back according to what the request was when this method was called and not the according
# to the route arg passed in.
# TODO Is this the right way to deal with this?
self.request.route_spec = route unless block
throw :new_block, block
end
def invoke_handler!(name_or_code)
# TODO Think about all this
if block = @handler_store[name_or_code]
# we are given a name
code = @handler_name_to_code[name]
self.response.status = code if code
throw :new_block, block
elsif name = @handler_code_to_name[name_or_code]
# we are given a code
block = @handler_store[name]
self.response.status = name_or_code
throw :new_block, block
else
# no block to be found
# do we assume code if a number and set status?
self.response.status = name_or_code if name_or_code.is_a?(Fixnum)
# still need to stop execution, I think? But do nothing.
throw :new_block, nil
end
end
#TODO move to protected section
def prepare_route_block(route, method)
set_request_format_from_route(route)
controller_block, packet = @route_store.get_block(route, method)
Log.enter "prepare_route_block(#{route} #{method}) #{controller_block ? 'have' : 'NO'} block"
self.request.params.merge!(HashUtils.strhash(packet[:vars]))
self.request.route_spec = packet[:data][:route_spec] if packet[:data]
self.request.restful = packet[:data][:restful] if packet[:data]
controller_block
end
#TODO move to protected section
def trampoline(block)
while block do
block = catch(:new_block) {
block.call()
# Getting here means that call() returned normally (not via a throw)
# By definition, we do not have a 404 since we matched a route to get the block to call
nil
} # end :invoke_route catch block
# If invoke_route! or invoke_handler! was called in the block, block will have a new value.
# If neither was called, block will be nil
if block && self.presenter
Log.enter "PPFR [block: #{block.inspect}]"
self.presenter.prepare_for_request(self.request)
end
end
end
# Called on every request.
#
def call(env)
self.request = Request.new(env)
self.response = Rack::Response.new
have_route = false
halted_resp = catch(:halt) {
route_block = prepare_route_block(self.request.path, self.request.method)
have_route = true if route_block
if self.presenter
self.presenter.prepare_for_request(self.request)
end
trampoline(route_block) if !Pakyow::Configuration::App.ignore_routes
Log.enter "presenter: #{self.presenter ? "yes" : "no" } presented?: #{self.presenter.presented?}"
if self.presenter
self.response.body = [self.presenter.content]
end
# 404 if no route matched and no views were found
if !have_route && (!self.presenter || !self.presenter.presented?)
Log.enter "[404] Not Found"
handler404 = @handler_store[@handler_code_to_name[404]] if @handler_code_to_name[404]
trampoline(handler404) if handler404
self.response.status = 404
end
false
} #end :halt catch block
if halted_resp
throw :halt, self.response
end
# This needs to be in the 'return' position (last statement)
finish!
rescue StandardError => error
self.request.error = error
handler500 = @handler_store[@handler_code_to_name[500]] if @handler_code_to_name[500]
trampoline(handler500) if handler500
self.response.status = 500
if Configuration::Base.app.errors_in_browser
self.response.body = []
self.response.body << "<h4>#{CGI.escapeHTML(error.to_s)}</h4>"
self.response.body << error.backtrace.join("<br />")
end
begin
# caught by other middleware (e.g. logger)
throw :error, error
rescue ArgumentError
end
finish!
end
# Sends a file in the response (immediately). Accepts a File object. Mime
# type is automatically detected.
#
def send_file(source_file, send_as = nil, type = nil)
path = source_file.is_a?(File) ? source_file.path : source_file
send_as ||= path
type ||= Rack::Mime.mime_type(".#{send_as.split('.')[-1]}")
data = ""
File.open(path, "r").each_line { |line| data << line }
self.response = Rack::Response.new(data, self.response.status, self.response.header.merge({ "Content-Type" => type }))
halt!
end
# Sends data in the response (immediately). Accepts the data, mime type,
# and optional file name.
#
def send_data(data, type, file_name = nil)
status = self.response ? self.response.status : 200
headers = self.response ? self.response.header : {}
headers = headers.merge({ "Content-Type" => type })
headers = headers.merge({ "Content-disposition" => "attachment; filename=#{file_name}"}) if file_name
self.response = Rack::Response.new(data, status, headers)
halt!
end
# Redirects to location (immediately).
#
def redirect_to(location, status_code = 302)
headers = self.response ? self.response.header : {}
headers = headers.merge({'Location' => location})
self.response = Rack::Response.new('', status_code, headers)
halt!
end
# Registers a route for GET requests. Route can be defined one of two ways:
# get('/', :ControllerClass, :action_method)
# get('/') { # do something }
#
# Routes for namespaced controllers (e.g. Admin::ControllerClass) can be defined like this:
# get('/', :Admin_ControllerClass, :action_method)
#
def get(route, *args, &block)
register_route(:user, route, block, :get, *args)
end
# Registers a route for POST requests (see #get).
#
def post(route, *args, &block)
register_route(:user, route, block, :post, *args)
end
# Registers a route for PUT requests (see #get).
#
def put(route, *args, &block)
register_route(:user, route, block, :put, *args)
end
# Registers a route for DELETE requests (see #get).
#
def delete(route, *args, &block)
register_route(:user, route, block, :delete, *args)
end
# Registers the default route (see #get).
#
def default(*args, &block)
register_route(:user, '/', block, :get, *args)
end
# Creates REST routes for a resource. Arguments: url, controller, model, hooks
#
def restful(url, controller, *args, &block)
model, hooks = parse_restful_args(args)
with_scope(:url => url.gsub(/^[\/]+|[\/]+$/,""), :model => model) do
nest_scope(&block) if block_given?
@restful_routes ||= {}
@restful_routes[model] ||= {} if model
@@restful_actions.each do |opts|
action_url = current_path
if suffix = opts[:url_suffix]
action_url = File.join(action_url, suffix)
end
# Create the route
register_route(:restful, action_url, nil, opts[:method], controller, opts[:action], hooks)
# Store url for later use (currently used by Binder#action)
@restful_routes[model][opts[:action]] = action_url if model
end
remove_scope
end
end
@@restful_actions = [
{ :action => :edit, :method => :get, :url_suffix => 'edit/:id' },
{ :action => :show, :method => :get, :url_suffix => ':id' },
{ :action => :new, :method => :get, :url_suffix => 'new' },
{ :action => :update, :method => :put, :url_suffix => ':id' },
{ :action => :delete, :method => :delete, :url_suffix => ':id' },
{ :action => :index, :method => :get },
{ :action => :create, :method => :post }
]
def hook(name, controller = nil, action = nil, &block)
block = build_controller_block(controller, action) if controller
@route_store.add_hook(name, block)
end
def handler(name, *args, &block)
code, controller, action = parse_handler_args(args)
if block_given?
@handler_store[name] = block
else
@handler_store[name] = build_controller_block(controller, action)
end
if code
@handler_name_to_code[name] = code
@handler_code_to_name[code] = name
end
end
#TODO: don't like this...
def reload
load_app
end
protected
def parse_route_args(args)
controller = args[0] if args[0] && (args[0].is_a?(Symbol) || args[0].is_a?(String))
action = args[1] if controller
hooks = args[2] if controller
unless controller
hooks = args[0] if args[0] && args[0].is_a?(Hash)
end
return controller, action, hooks
end
def parse_restful_args(args)
model = args[0] if args[0] && (args[0].is_a?(Symbol) || args[0].is_a?(String))
hooks = args[1] if model
unless model
hooks = args[0] if args[0] && args[0].is_a?(Hash)
end
return model, hooks
end
def parse_handler_args(args)
code = args[0] if args.length == 1 || args.length == 3
controller = args[1] if code
action = args[2] if code && args[2]
unless code
controller = args[0]
action = args[1] if args[1]
end
return code, controller, action
end
# Handles route registration.
#
def register_route(type, route, block, method, *args)
controller, action, hooks = parse_route_args(args)
if controller
block = build_controller_block(controller, action)
end
data = {:route_type=>type, :route_spec=>route}
if type == :restful
data[:restful] = {:restful_action=>action}
end
@route_store.add_route(route, block, method, data, hooks)
end
def build_controller_block(controller, action)
controller = eval(controller.to_s)
action ||= Configuration::Base.app.default_action
block = lambda {
instance = controller.new
request.controller = instance
request.action = action
instance.send(action)
}
block
end
def set_request_format_from_route(route)
route, format = StringUtils.split_at_last_dot(route)
self.request.format = ((format && (format[format.length - 1, 1] == '/')) ? format[0, format.length - 1] : format)
end
def with_scope(opts)
@scope ||= {}
@scope[:path] ||= []
@scope[:model] = opts[:model]
@scope[:path] << opts[:url]
yield
end
def remove_scope
@scope[:path].pop
end
def nest_scope(&block)
@scope[:path].insert(-1, ":#{StringUtils.underscore(@scope[:model].to_s)}_id")
yield
@scope[:path].pop
end
def current_path
@scope[:path].join('/')
end
def set_cookies
if self.request.cookies && self.request.cookies != {}
self.request.cookies.each do |key, value|
if value.is_a?(Hash)
self.response.set_cookie(key, {:path => '/', :expires => Time.now + 604800}.merge(value))
elsif value.is_a?(String)
self.response.set_cookie(key, {:path => '/', :expires => Time.now + 604800}.merge({:value => value}))
else
self.response.set_cookie(key, {:path => '/', :expires => Time.now + 604800 * -1 }.merge({:value => value}))
end
end
end
end
# Reloads all application files in application_path and presenter (if specified).
#
def load_app
load(Configuration::App.application_path)
@loader = Loader.new unless @loader
@loader.load!(Configuration::Base.app.src_dir)
load_handlers
load_routes
# Reload views
if self.presenter
self.presenter.load
end
end
def load_handlers
@handler_store = {}
self.instance_eval(&self.class.handlers_proc) if self.class.handlers_proc
end
def load_routes
@route_store = RouteStore.new
self.instance_eval(&self.class.routes_proc) if self.class.routes_proc
end
# Send the response and cleanup.
#
def finish!
set_cookies
self.response.finish
end
end
end
Fixes problems with halting, redirecting, sending files and static middleware. I don't know what I was thinking.
module Pakyow
class Application
class << self
attr_accessor :routes_proc, :handlers_proc, :middleware_proc, :configurations
# Sets the path to the application file so it can be reloaded later.
#
def inherited(subclass)
Pakyow::Configuration::App.application_path = parse_path_from_caller(caller[0])
end
def parse_path_from_caller(caller)
caller.match(/^(.+)(:?:\d+(:?:in `.+')?$)/)[1]
end
# Runs the application. Accepts the environment(s) to run, for example:
# run(:development)
# run([:development, :staging])
#
def run(*args)
return if running?
@running = true
self.builder.run(self.prepare(*args))
detect_handler.run(builder, :Host => Pakyow::Configuration::Base.server.host, :Port => Pakyow::Configuration::Base.server.port)
end
# Stages the application. Everything is loaded but the application is
# not started. Accepts the same arguments as #run.
#
def stage(*args)
return if staged?
@staged = true
prepare(*args)
end
def builder
@builder ||= Rack::Builder.new
end
def prepared?
@prepared
end
# Returns true if the application is running.
#
def running?
@running
end
# Returns true if the application is staged.
#
def staged?
@staged
end
# Convenience method for base configuration class.
#
def config
Pakyow::Configuration::Base
end
# Creates configuration for a particular environment. Example:
# configure(:development) { app.auto_reload = true }
#
def configure(environment, &block)
self.configurations ||= {}
self.configurations[environment] = block
end
# Creates routes. Example:
# routes { get '/' { # do something } }
#
def routes(&block)
self.routes_proc = block
end
# Creates handlers for later execution.
# The handler can be created one of two ways:
#
# Define a controller/action handler with an associate response status:
# handler(name, 404, :ApplicationController, :handle_404)
#
# Specify a block as a handler:
# handler(name, 404) { # handle error }
#
# If a controller calls #invoke_handler!(name) then the
# handler defined for that code will be invoked.
#
def handlers(&block)
self.handlers_proc = block
end
def middleware(&block)
self.middleware_proc = block
end
protected
# Prepares the application for running or staging and returns an instance
# of the application.
def prepare(*args)
self.load_config args.empty? || args.first.nil? ? [Configuration::Base.app.default_environment] : args
return if prepared?
self.builder.use(Rack::MethodOverride)
self.builder.instance_eval(&self.middleware_proc) if self.middleware_proc
self.builder.use(Pakyow::Static) if Configuration::Base.app.static
self.builder.use(Pakyow::Logger) if Configuration::Base.app.log
self.builder.use(Pakyow::Reloader) if Configuration::Base.app.auto_reload
@prepared = true
$:.unshift(Dir.pwd) unless $:.include? Dir.pwd
return self.new
end
def load_config(args)
if self.configurations
args << Configuration::Base.app.default_environment if args.empty?
args.each do |env|
next unless config = self.configurations[env]
Configuration::Base.instance_eval(&config)
end
end
end
def detect_handler
['thin', 'mongrel', 'webrick'].each do |server|
begin
return Rack::Handler.get(server)
rescue LoadError
rescue NameError
end
end
end
end
include Helpers
attr_accessor :request, :response, :presenter, :route_store, :restful_routes, :handler_store
def initialize
Pakyow.app = self
@handler_name_to_code = {}
@handler_code_to_name = {}
# This configuration option will be set if a presenter is to be used
if Configuration::Base.app.presenter
# Create a new instance of the presenter
self.presenter = Configuration::Base.app.presenter.new
end
# Load application files
load_app
end
# Interrupts the application and returns response immediately.
#
def halt!
throw :halt, self.response
end
def invoke_route!(route, method)
# TODO Think about all this
block = prepare_route_block(route, method)
Log.enter "invoke_route!(#{route} #{method}) #{block ? 'have' : 'NO'} block"
# if there's no block we end up stopping but the request in in changed so content comes
# back according to what the request was when this method was called and not the according
# to the route arg passed in.
# TODO Is this the right way to deal with this?
self.request.route_spec = route unless block
throw :new_block, block
end
def invoke_handler!(name_or_code)
# TODO Think about all this
if block = @handler_store[name_or_code]
# we are given a name
code = @handler_name_to_code[name]
self.response.status = code if code
throw :new_block, block
elsif name = @handler_code_to_name[name_or_code]
# we are given a code
block = @handler_store[name]
self.response.status = name_or_code
throw :new_block, block
else
# no block to be found
# do we assume code if a number and set status?
self.response.status = name_or_code if name_or_code.is_a?(Fixnum)
# still need to stop execution, I think? But do nothing.
throw :new_block, nil
end
end
#TODO move to protected section
def prepare_route_block(route, method)
set_request_format_from_route(route)
controller_block, packet = @route_store.get_block(route, method)
Log.enter "prepare_route_block(#{route} #{method}) #{controller_block ? 'have' : 'NO'} block"
self.request.params.merge!(HashUtils.strhash(packet[:vars]))
self.request.route_spec = packet[:data][:route_spec] if packet[:data]
self.request.restful = packet[:data][:restful] if packet[:data]
controller_block
end
#TODO move to protected section
def trampoline(block)
while block do
block = catch(:new_block) {
block.call()
# Getting here means that call() returned normally (not via a throw)
# By definition, we do not have a 404 since we matched a route to get the block to call
nil
} # end :invoke_route catch block
# If invoke_route! or invoke_handler! was called in the block, block will have a new value.
# If neither was called, block will be nil
if block && self.presenter
Log.enter "PPFR [block: #{block.inspect}]"
self.presenter.prepare_for_request(self.request)
end
end
end
# Called on every request.
#
def call(env)
self.request = Request.new(env)
self.response = Rack::Response.new
have_route = false
halted_resp = catch(:halt) {
route_block = prepare_route_block(self.request.path, self.request.method)
have_route = true if route_block
if self.presenter
self.presenter.prepare_for_request(self.request)
end
trampoline(route_block) if !Pakyow::Configuration::App.ignore_routes
Log.enter "presenter: #{self.presenter ? "yes" : "no" } presented?: #{self.presenter.presented?}"
if self.presenter
self.response.body = [self.presenter.content]
end
# 404 if no route matched and no views were found
if !have_route && (!self.presenter || !self.presenter.presented?)
Log.enter "[404] Not Found"
handler404 = @handler_store[@handler_code_to_name[404]] if @handler_code_to_name[404]
trampoline(handler404) if handler404
self.response.status = 404
end
false
} #end :halt catch block
# This needs to be in the 'return' position (last statement)
finish!
rescue StandardError => error
self.request.error = error
handler500 = @handler_store[@handler_code_to_name[500]] if @handler_code_to_name[500]
trampoline(handler500) if handler500
self.response.status = 500
if Configuration::Base.app.errors_in_browser
self.response.body = []
self.response.body << "<h4>#{CGI.escapeHTML(error.to_s)}</h4>"
self.response.body << error.backtrace.join("<br />")
end
begin
# caught by other middleware (e.g. logger)
throw :error, error
rescue ArgumentError
end
finish!
end
# Sends a file in the response (immediately). Accepts a File object. Mime
# type is automatically detected.
#
def send_file(source_file, send_as = nil, type = nil)
path = source_file.is_a?(File) ? source_file.path : source_file
send_as ||= path
type ||= Rack::Mime.mime_type(".#{send_as.split('.')[-1]}")
data = ""
File.open(path, "r").each_line { |line| data << line }
self.response = Rack::Response.new(data, self.response.status, self.response.header.merge({ "Content-Type" => type }))
halt!
end
# Sends data in the response (immediately). Accepts the data, mime type,
# and optional file name.
#
def send_data(data, type, file_name = nil)
status = self.response ? self.response.status : 200
headers = self.response ? self.response.header : {}
headers = headers.merge({ "Content-Type" => type })
headers = headers.merge({ "Content-disposition" => "attachment; filename=#{file_name}"}) if file_name
self.response = Rack::Response.new(data, status, headers)
halt!
end
# Redirects to location (immediately).
#
def redirect_to(location, status_code = 302)
headers = self.response ? self.response.header : {}
headers = headers.merge({'Location' => location})
self.response = Rack::Response.new('', status_code, headers)
halt!
end
# Registers a route for GET requests. Route can be defined one of two ways:
# get('/', :ControllerClass, :action_method)
# get('/') { # do something }
#
# Routes for namespaced controllers (e.g. Admin::ControllerClass) can be defined like this:
# get('/', :Admin_ControllerClass, :action_method)
#
def get(route, *args, &block)
register_route(:user, route, block, :get, *args)
end
# Registers a route for POST requests (see #get).
#
def post(route, *args, &block)
register_route(:user, route, block, :post, *args)
end
# Registers a route for PUT requests (see #get).
#
def put(route, *args, &block)
register_route(:user, route, block, :put, *args)
end
# Registers a route for DELETE requests (see #get).
#
def delete(route, *args, &block)
register_route(:user, route, block, :delete, *args)
end
# Registers the default route (see #get).
#
def default(*args, &block)
register_route(:user, '/', block, :get, *args)
end
# Creates REST routes for a resource. Arguments: url, controller, model, hooks
#
def restful(url, controller, *args, &block)
model, hooks = parse_restful_args(args)
with_scope(:url => url.gsub(/^[\/]+|[\/]+$/,""), :model => model) do
nest_scope(&block) if block_given?
@restful_routes ||= {}
@restful_routes[model] ||= {} if model
@@restful_actions.each do |opts|
action_url = current_path
if suffix = opts[:url_suffix]
action_url = File.join(action_url, suffix)
end
# Create the route
register_route(:restful, action_url, nil, opts[:method], controller, opts[:action], hooks)
# Store url for later use (currently used by Binder#action)
@restful_routes[model][opts[:action]] = action_url if model
end
remove_scope
end
end
@@restful_actions = [
{ :action => :edit, :method => :get, :url_suffix => 'edit/:id' },
{ :action => :show, :method => :get, :url_suffix => ':id' },
{ :action => :new, :method => :get, :url_suffix => 'new' },
{ :action => :update, :method => :put, :url_suffix => ':id' },
{ :action => :delete, :method => :delete, :url_suffix => ':id' },
{ :action => :index, :method => :get },
{ :action => :create, :method => :post }
]
def hook(name, controller = nil, action = nil, &block)
block = build_controller_block(controller, action) if controller
@route_store.add_hook(name, block)
end
def handler(name, *args, &block)
code, controller, action = parse_handler_args(args)
if block_given?
@handler_store[name] = block
else
@handler_store[name] = build_controller_block(controller, action)
end
if code
@handler_name_to_code[name] = code
@handler_code_to_name[code] = name
end
end
#TODO: don't like this...
def reload
load_app
end
protected
def parse_route_args(args)
controller = args[0] if args[0] && (args[0].is_a?(Symbol) || args[0].is_a?(String))
action = args[1] if controller
hooks = args[2] if controller
unless controller
hooks = args[0] if args[0] && args[0].is_a?(Hash)
end
return controller, action, hooks
end
def parse_restful_args(args)
model = args[0] if args[0] && (args[0].is_a?(Symbol) || args[0].is_a?(String))
hooks = args[1] if model
unless model
hooks = args[0] if args[0] && args[0].is_a?(Hash)
end
return model, hooks
end
def parse_handler_args(args)
code = args[0] if args.length == 1 || args.length == 3
controller = args[1] if code
action = args[2] if code && args[2]
unless code
controller = args[0]
action = args[1] if args[1]
end
return code, controller, action
end
# Handles route registration.
#
def register_route(type, route, block, method, *args)
controller, action, hooks = parse_route_args(args)
if controller
block = build_controller_block(controller, action)
end
data = {:route_type=>type, :route_spec=>route}
if type == :restful
data[:restful] = {:restful_action=>action}
end
@route_store.add_route(route, block, method, data, hooks)
end
def build_controller_block(controller, action)
controller = eval(controller.to_s)
action ||= Configuration::Base.app.default_action
block = lambda {
instance = controller.new
request.controller = instance
request.action = action
instance.send(action)
}
block
end
def set_request_format_from_route(route)
route, format = StringUtils.split_at_last_dot(route)
self.request.format = ((format && (format[format.length - 1, 1] == '/')) ? format[0, format.length - 1] : format)
end
def with_scope(opts)
@scope ||= {}
@scope[:path] ||= []
@scope[:model] = opts[:model]
@scope[:path] << opts[:url]
yield
end
def remove_scope
@scope[:path].pop
end
def nest_scope(&block)
@scope[:path].insert(-1, ":#{StringUtils.underscore(@scope[:model].to_s)}_id")
yield
@scope[:path].pop
end
def current_path
@scope[:path].join('/')
end
def set_cookies
if self.request.cookies && self.request.cookies != {}
self.request.cookies.each do |key, value|
if value.is_a?(Hash)
self.response.set_cookie(key, {:path => '/', :expires => Time.now + 604800}.merge(value))
elsif value.is_a?(String)
self.response.set_cookie(key, {:path => '/', :expires => Time.now + 604800}.merge({:value => value}))
else
self.response.set_cookie(key, {:path => '/', :expires => Time.now + 604800 * -1 }.merge({:value => value}))
end
end
end
end
# Reloads all application files in application_path and presenter (if specified).
#
def load_app
load(Configuration::App.application_path)
@loader = Loader.new unless @loader
@loader.load!(Configuration::Base.app.src_dir)
load_handlers
load_routes
# Reload views
if self.presenter
self.presenter.load
end
end
def load_handlers
@handler_store = {}
self.instance_eval(&self.class.handlers_proc) if self.class.handlers_proc
end
def load_routes
@route_store = RouteStore.new
self.instance_eval(&self.class.routes_proc) if self.class.routes_proc
end
# Send the response and cleanup.
#
def finish!
set_cookies
self.response.finish
end
end
end
|
require "fileutils"
module DataMapperSalesforce
class SoapWrapper
def initialize(module_name, driver_name, wsdl_path, api_dir)
@module_name, @driver_name, @wsdl_path, @api_dir = module_name, driver_name, File.expand_path(wsdl_path), File.expand_path(api_dir)
generate_soap_classes
driver
end
attr_reader :module_name, :driver_name, :wsdl_path, :api_dir
def driver
@driver ||= Object.const_get(module_name).const_get(driver_name).new
end
def generate_soap_classes
unless File.file?(wsdl_path)
raise Errno::ENOENT, "Could not find the WSDL at #{wsdl_path}"
end
unless File.directory?(wsdl_api_dir)
FileUtils.mkdir_p wsdl_api_dir
end
unless Dir["#{wsdl_api_dir}/#{module_name}*.rb"].size == 3
Dir.chdir(wsdl_api_dir) do
puts system(`which wsdl2ruby.rb`.chomp, "--wsdl", wsdl_path, "--module_path", module_name, "--classdef", module_name, "--type", "client")
FileUtils.rm Dir["*Client.rb"]
end
end
$:.push wsdl_api_dir
require "#{module_name}Driver"
end
def wsdl_api_dir
"#{api_dir}/#{File.basename(wsdl_path)}"
end
end
end
Check whether the classes actually do get generated
require "fileutils"
module DataMapperSalesforce
class SoapWrapper
class ClassesFailedToGenerate < StandardError; end
def initialize(module_name, driver_name, wsdl_path, api_dir)
@module_name, @driver_name, @wsdl_path, @api_dir = module_name, driver_name, File.expand_path(wsdl_path), File.expand_path(api_dir)
generate_soap_classes
driver
end
attr_reader :module_name, :driver_name, :wsdl_path, :api_dir
def driver
@driver ||= Object.const_get(module_name).const_get(driver_name).new
end
def generate_soap_classes
unless File.file?(wsdl_path)
raise Errno::ENOENT, "Could not find the WSDL at #{wsdl_path}"
end
unless File.directory?(wsdl_api_dir)
FileUtils.mkdir_p wsdl_api_dir
end
unless Dir["#{wsdl_api_dir}/#{module_name}*.rb"].size == 3
Dir.chdir(wsdl_api_dir) do
unless system("wsdl2ruby.rb", "--wsdl", wsdl_path, "--module_path", module_name, "--classdef", module_name, "--type", "client")
raise ClassesFailedToGenerate, "Could not generate the ruby classes from the WSDL"
end
FileUtils.rm Dir["*Client.rb"]
end
end
$:.push wsdl_api_dir
require "#{module_name}Driver"
end
def wsdl_api_dir
"#{api_dir}/#{File.basename(wsdl_path)}"
end
end
end
|
module DynamicSitemaps
class Generator
class << self
def generate
instance_eval open(DynamicSitemaps.config_path).read
generate_index
end
def generate_index
IndexGenerator.new(sitemaps).generate
end
def sitemap(*args, &block)
args << {} unless args.last.is_a?(Hash)
args.last[:host] ||= host
args.last[:folder] ||= folder
sitemap = Sitemap.new(*args, &block)
sitemaps << SitemapGenerator.new(sitemap).generate
end
# Array of SitemapResult
def sitemaps
@sitemaps ||= []
end
def host(*args)
if args.any?
@host = args.first
Rails.application.routes.default_url_options[:host] = @host
else
@host
end
end
def folder(*args)
if args.any?
@folder = args.first
raise ArgumentError, "Folder can't be blank." if @folder.blank?
path = "#{DynamicSitemaps.path}/#{folder}"
if Dir.exists?(path)
FileUtils.rm_rf "#{path}/*"
end
else
# Ensure that the default folder is set and cleaned.
folder DynamicSitemaps.folder if @folder.blank?
@folder
end
end
end
end
end
Use Dir.glob
module DynamicSitemaps
class Generator
class << self
def generate
instance_eval open(DynamicSitemaps.config_path).read
generate_index
end
def generate_index
IndexGenerator.new(sitemaps).generate
end
def sitemap(*args, &block)
args << {} unless args.last.is_a?(Hash)
args.last[:host] ||= host
args.last[:folder] ||= folder
sitemap = Sitemap.new(*args, &block)
sitemaps << SitemapGenerator.new(sitemap).generate
end
# Array of SitemapResult
def sitemaps
@sitemaps ||= []
end
def host(*args)
if args.any?
@host = args.first
Rails.application.routes.default_url_options[:host] = @host
else
@host
end
end
def folder(*args)
if args.any?
@folder = args.first
raise ArgumentError, "Folder can't be blank." if @folder.blank?
FileUtils.rm_rf Dir.glob("#{DynamicSitemaps.path}/#{folder}/*")
else
# Ensure that the default folder is set and cleaned.
folder DynamicSitemaps.folder if @folder.blank?
@folder
end
end
end
end
end |
module DynamicSitemaps
class Generator
# Generates the sitemap(s) and index based on the configuration file specified in DynamicSitemaps.config_path.
# If you supply a block, that block is evaluated instead of the configuration file.
def generate(&block)
create_temp_dir
if block
instance_eval &block
else
instance_eval open(DynamicSitemaps.config_path).read, DynamicSitemaps.config_path
end
generate_index
move_to_destination
ping_search_engines
ensure
remove_temp_dir
end
def generate_index
IndexGenerator.new(sitemaps).generate
end
def create_temp_dir
remove_temp_dir
FileUtils.mkdir_p DynamicSitemaps.temp_path
end
def remove_temp_dir
FileUtils.rm_rf DynamicSitemaps.temp_path
end
def move_to_destination
sitemaps.map(&:folder).uniq.each do |folder|
destination = "#{DynamicSitemaps.path}/#{folder}"
FileUtils.mkdir_p destination
FileUtils.rm_rf Dir.glob("#{destination}/*")
FileUtils.mv Dir["#{DynamicSitemaps.temp_path}/#{folder}/*"], destination
end
remove_temp_dir
end
def ping_search_engines
Pinger.ping_search_engines_with ping_urls
end
def sitemap(*args, &block)
args << {} unless args.last.is_a?(Hash)
args.last[:host] ||= host
args.last[:folder] ||= folder
sitemap = Sitemap.new(*args, &block)
ensure_valid_sitemap_name! sitemap
sitemap_names[sitemap.folder] << sitemap.name
sitemaps << SitemapGenerator.new(sitemap).generate
end
def sitemap_for(collection, options = {}, &block)
raise ArgumentError, "The collection given to `sitemap_for` must respond to #find_each. This is for performance. Use `Model.scoped` to get an ActiveRecord relation that responds to #find_each." unless collection.respond_to?(:find_each)
name = options.delete(:name) || collection.model_name.underscore.pluralize.to_sym
options[:collection] = collection
sitemap(name, options, &block)
end
def ensure_valid_sitemap_name!(sitemap)
raise ArgumentError, "Sitemap name :#{sitemap.name} has already been defined for the folder \"#{sitemap.folder}\". Please use `sitemap :other_name do ... end` or `sitemap_for <relation>, name: :other_name`." if sitemap_names[sitemap.folder].include?(sitemap.name)
raise ArgumentError, "Sitemap name :#{sitemap.name} conflicts with the index file name #{DynamicSitemaps.index_file_name}. Please change it using `sitemap :other_name do ... end`." if "#{sitemap.name}.xml" == DynamicSitemaps.index_file_name
end
# Array of SitemapResult
def sitemaps
@sitemaps ||= []
end
# Generated sitemap names
def sitemap_names
@sitemap_names ||= Hash.new { |h, k| h[k] = [] }
end
# URLs to ping after generation
def ping_urls
@ping_urls ||= []
end
def host(*args)
if args.any?
@host = args.first
Rails.application.routes.default_url_options[:host] = @host
else
@host
end
end
# Adds a sitemap URL to ping search engines with after generation.
def ping_with(sitemap_url)
ping_urls << sitemap_url
end
def folder(*args)
if args.any?
@folder = args.first
raise ArgumentError, "Folder can't be blank." if @folder.blank?
else
# Ensure that the default folder is set and cleaned.
folder DynamicSitemaps.folder if @folder.blank?
@folder
end
end
end
end
rails 4 issue: change the activemodel to string before to undescore the name
module DynamicSitemaps
class Generator
# Generates the sitemap(s) and index based on the configuration file specified in DynamicSitemaps.config_path.
# If you supply a block, that block is evaluated instead of the configuration file.
def generate(&block)
create_temp_dir
if block
instance_eval &block
else
instance_eval open(DynamicSitemaps.config_path).read, DynamicSitemaps.config_path
end
generate_index
move_to_destination
ping_search_engines
ensure
remove_temp_dir
end
def generate_index
IndexGenerator.new(sitemaps).generate
end
def create_temp_dir
remove_temp_dir
FileUtils.mkdir_p DynamicSitemaps.temp_path
end
def remove_temp_dir
FileUtils.rm_rf DynamicSitemaps.temp_path
end
def move_to_destination
sitemaps.map(&:folder).uniq.each do |folder|
destination = "#{DynamicSitemaps.path}/#{folder}"
FileUtils.mkdir_p destination
FileUtils.rm_rf Dir.glob("#{destination}/*")
FileUtils.mv Dir["#{DynamicSitemaps.temp_path}/#{folder}/*"], destination
end
remove_temp_dir
end
def ping_search_engines
Pinger.ping_search_engines_with ping_urls
end
def sitemap(*args, &block)
args << {} unless args.last.is_a?(Hash)
args.last[:host] ||= host
args.last[:folder] ||= folder
sitemap = Sitemap.new(*args, &block)
ensure_valid_sitemap_name! sitemap
sitemap_names[sitemap.folder] << sitemap.name
sitemaps << SitemapGenerator.new(sitemap).generate
end
def sitemap_for(collection, options = {}, &block)
raise ArgumentError, "The collection given to `sitemap_for` must respond to #find_each. This is for performance. Use `Model.scoped` to get an ActiveRecord relation that responds to #find_each." unless collection.respond_to?(:find_each)
name = options.delete(:name) || collection.model_name.to_s.underscore.pluralize.to_sym
options[:collection] = collection
sitemap(name, options, &block)
end
def ensure_valid_sitemap_name!(sitemap)
raise ArgumentError, "Sitemap name :#{sitemap.name} has already been defined for the folder \"#{sitemap.folder}\". Please use `sitemap :other_name do ... end` or `sitemap_for <relation>, name: :other_name`." if sitemap_names[sitemap.folder].include?(sitemap.name)
raise ArgumentError, "Sitemap name :#{sitemap.name} conflicts with the index file name #{DynamicSitemaps.index_file_name}. Please change it using `sitemap :other_name do ... end`." if "#{sitemap.name}.xml" == DynamicSitemaps.index_file_name
end
# Array of SitemapResult
def sitemaps
@sitemaps ||= []
end
# Generated sitemap names
def sitemap_names
@sitemap_names ||= Hash.new { |h, k| h[k] = [] }
end
# URLs to ping after generation
def ping_urls
@ping_urls ||= []
end
def host(*args)
if args.any?
@host = args.first
Rails.application.routes.default_url_options[:host] = @host
else
@host
end
end
# Adds a sitemap URL to ping search engines with after generation.
def ping_with(sitemap_url)
ping_urls << sitemap_url
end
def folder(*args)
if args.any?
@folder = args.first
raise ArgumentError, "Folder can't be blank." if @folder.blank?
else
# Ensure that the default folder is set and cleaned.
folder DynamicSitemaps.folder if @folder.blank?
@folder
end
end
end
end |
module DynamoDB
module Migration
class Execute
DEFAULT_MIGRATION_TABLE_NAME = 'migrations'
def initialize(client, migration_table_name)
@client = client
@migration_table_name = migration_table_name
end
def update_all
ensure_migrations_table_exists
migration_classes.each do |clazz|
apply_migration(clazz)
end
end
private
attr_reader :client
def apply_migration(clazz)
return if migration_completed?(clazz)
record_start_migration(clazz)
migration = clazz.new
migration.client = client
migration.update
record_successful_migration(clazz)
rescue Aws::DynamoDB::Errors::ServiceError => e
record_failed_migration(clazz)
raise
end
def record_failed_migration(clazz)
client.delete_item({
table_name: migration_table_name,
key: {
"file" => clazz_filename(clazz),
},
condition_expression: "completed = :false",
expression_attribute_values: {
":false" => false
}
})
end
def record_start_migration(clazz)
client.put_item({
table_name: migration_table_name,
item: {
"file" => clazz_filename(clazz),
"executed_at" => Time.now.iso8601,
"created_at" => Time.now.iso8601,
"updated_at" => Time.now.iso8601,
"completed" => false,
},
return_values: "NONE",
})
end
def record_successful_migration(clazz)
client.update_item({
table_name: migration_table_name,
key: {
"file" => clazz_filename(clazz),
},
update_expression: "SET completed = :true",
condition_expression: "completed = :false",
expression_attribute_values: {
":false" => false,
":true" => true,
}
})
end
def clazz_filename(clazz)
full_filename = clazz.instance_methods(false)
.map { |m| clazz.instance_method(m).source_location }
.compact
.map { |m| m.first }
.uniq
.first
File.basename(full_filename)
end
def migration_classes
ObjectSpace.each_object(DynamoDB::Migration::Unit.singleton_class)
.reject { |c| c == DynamoDB::Migration::Unit }
.sort_by { |c| clazz_filename(c) }
end
def migration_completed?(clazz)
migration = client.get_item({
table_name: migration_table_name,
key: {
"file" => clazz_filename(clazz),
},
attributes_to_get: ["file", "completed"],
consistent_read: true,
}).item
migration && migration["completed"]
end
def ensure_migrations_table_exists
client.create_table(
table_name: migration_table_name,
attribute_definitions: [
{
attribute_name: "file",
attribute_type: "S",
},
],
key_schema: [
{
attribute_name: "file",
key_type: "HASH",
},
],
provisioned_throughput: {
read_capacity_units: 1,
write_capacity_units: 1,
},
stream_specification: {
stream_enabled: true,
stream_view_type: "NEW_AND_OLD_IMAGES",
},
) unless table_exists?(client, migration_table_name)
rescue Aws::DynamoDB::Errors::ResourceInUseException => e
raise e unless e.message =~ /preexisting table/i
end
def table_exists?(client, table_name)
client.describe_table(table_name: table_name)
rescue Aws::DynamoDB::Errors::ResourceNotFoundException => e
false
end
def migration_table_name
@migration_table_name ||
ENV['DYNAMODB_MIGRATION_TABLE_NAME'] ||
DEFAULT_MIGRATION_TABLE_NAME
end
end
end
end
After submitting the request to create the migrations table, I added a waiter that waits for it to exist before continuing with the migration. This resolves issue #4.
module DynamoDB
module Migration
class Execute
DEFAULT_MIGRATION_TABLE_NAME = 'migrations'
def initialize(client, migration_table_name)
@client = client
@migration_table_name = migration_table_name
end
def update_all
ensure_migrations_table_exists
migration_classes.each do |clazz|
apply_migration(clazz)
end
end
private
attr_reader :client
def apply_migration(clazz)
return if migration_completed?(clazz)
record_start_migration(clazz)
migration = clazz.new
migration.client = client
migration.update
record_successful_migration(clazz)
rescue Aws::DynamoDB::Errors::ServiceError => e
record_failed_migration(clazz)
raise
end
def record_failed_migration(clazz)
client.delete_item({
table_name: migration_table_name,
key: {
"file" => clazz_filename(clazz),
},
condition_expression: "completed = :false",
expression_attribute_values: {
":false" => false
}
})
end
def record_start_migration(clazz)
client.put_item({
table_name: migration_table_name,
item: {
"file" => clazz_filename(clazz),
"executed_at" => Time.now.iso8601,
"created_at" => Time.now.iso8601,
"updated_at" => Time.now.iso8601,
"completed" => false,
},
return_values: "NONE",
})
end
def record_successful_migration(clazz)
client.update_item({
table_name: migration_table_name,
key: {
"file" => clazz_filename(clazz),
},
update_expression: "SET completed = :true",
condition_expression: "completed = :false",
expression_attribute_values: {
":false" => false,
":true" => true,
}
})
end
def clazz_filename(clazz)
full_filename = clazz.instance_methods(false)
.map { |m| clazz.instance_method(m).source_location }
.compact
.map { |m| m.first }
.uniq
.first
File.basename(full_filename)
end
def migration_classes
ObjectSpace.each_object(DynamoDB::Migration::Unit.singleton_class)
.reject { |c| c == DynamoDB::Migration::Unit }
.sort_by { |c| clazz_filename(c) }
end
def migration_completed?(clazz)
migration = client.get_item({
table_name: migration_table_name,
key: {
"file" => clazz_filename(clazz),
},
attributes_to_get: ["file", "completed"],
consistent_read: true,
}).item
migration && migration["completed"]
end
def ensure_migrations_table_exists
unless table_exists?(client, migration_table_name)
client.create_table(
table_name: migration_table_name,
attribute_definitions: [
{
attribute_name: "file",
attribute_type: "S",
},
],
key_schema: [
{
attribute_name: "file",
key_type: "HASH",
},
],
provisioned_throughput: {
read_capacity_units: 1,
write_capacity_units: 1,
},
stream_specification: {
stream_enabled: true,
stream_view_type: "NEW_AND_OLD_IMAGES",
},
)
puts "Waiting for table #{migration_table_name} to exist..."
client.wait_until(:table_exists, {:table_name => migration_table_name})
puts "#{migration_table_name} exists, continuing migration."
end
rescue Aws::DynamoDB::Errors::ResourceInUseException => e
raise e unless e.message =~ /preexisting table/i
end
def table_exists?(client, table_name)
client.describe_table(table_name: table_name)
rescue Aws::DynamoDB::Errors::ResourceNotFoundException => e
false
end
def migration_table_name
@migration_table_name ||
ENV['DYNAMODB_MIGRATION_TABLE_NAME'] ||
DEFAULT_MIGRATION_TABLE_NAME
end
end
end
end
|
module Ember
module ES6Template
VERSION = '0.2.0'
end
end
Bump version to 0.2.1
module Ember
module ES6Template
VERSION = '0.2.1'
end
end
|
require "json_api_client"
require "faraday/request/basic_authentication"
require "uri"
require "active_support/core_ext/hash/indifferent_access"
require "flex_commerce_api/json_api_client_extension/paginator"
require "flex_commerce_api/json_api_client_extension/requestor"
require "flex_commerce_api/json_api_client_extension/save_request_body_middleware"
require "flex_commerce_api/json_api_client_extension/logging_middleware"
require "flex_commerce_api/json_api_client_extension/status_middleware"
require "flex_commerce_api/json_api_client_extension/json_format_middleware"
require "flex_commerce_api/json_api_client_extension/previewed_request_middleware"
require "flex_commerce_api/json_api_client_extension/has_many_association_proxy"
require "flex_commerce_api/json_api_client_extension/builder"
require "flex_commerce_api/json_api_client_extension/flexible_connection"
module FlexCommerceApi
#
# Base class for all flex commerce models
#
class ApiBase < JsonApiClient::Resource
PRIVATE_ATTRIBUTES = %w(id type relationships links meta)
# set the api base url in an abstract base class
self.paginator = JsonApiClientExtension::Paginator
self.requestor_class = JsonApiClientExtension::Requestor
self.connection_class = ::FlexCommerceApi::JsonApiClientExtension::FlexibleConnection
class << self
def create!(*args)
create(*args).tap do |resource|
raise(::FlexCommerceApi::Error::RecordInvalid.new(resource)) unless resource.errors.empty?
end
end
# @method all
# Returns all resources
# @return [FlexCommerceApi::ApiBase[]] An array of resources or an empty array
# @method paginate
# Paginates the list of resources by a preset page size
# @param [Hash] options The options to paginate with
# @param options [Numeric|String] :page The page to fetch
# @method find
# @param [String] spec The spec of what to find
#
# Finds a resource
# @return [FlexCommerceApi::ApiBase] resource The resource found
# @raise [FlexCommerceApi::Error::NotFound] If not found
def find(*args)
# This is required as currently the underlying gem returns an array
# even if 1 record is found. This is inconsistent with active record
result = super
result.length <= 1 ? result.first : result
end
# The username to use for authentication. This is the same as
# the account name from the flex platform.
# @return [String] The username
def username
URI.parse(site).path.split("/").reject(&:empty?).first
end
# The password to use for authentication. This is the same as
# the access key token from the flex platform.
# @return [String] The password
def password
FlexCommerceApi.config.flex_api_key
end
def path(params = nil, record = nil)
super(params)
end
def reconfigure_all options = {}
subclasses.each do |sub|
sub.reconfigure options
end
reconfigure options
end
def reconfigure options = {}
self.site = FlexCommerceApi.config.api_base_url
adapter_options = { adapter: FlexCommerceApi.config.adapter || :net_http }
self.connection_options.delete(:include_previewed)
self.connection_options = connection_options.merge(adapter_options).merge(options)
reload_connection_if_required
end
def reload_connection_if_required
_build_connection(true) if connection_object
end
end
reconfigure
# Ensures all attributes are with indifferent access
def initialize(attrs = {})
super attrs.with_indifferent_access
end
def save!
return if save
raise_record_invalid
end
def public_attributes
attributes.reject { |k, v| PRIVATE_ATTRIBUTES.include?(k.to_s) }
end
def meta_attribute(key)
attributes[:meta_attributes][key][:value] rescue nil
end
def template_attribute(key)
attributes[:template_attributes][key][:value] rescue nil
end
def method_missing(method, *args)
if relationships and relationships.has_attribute?(method)
super
else
has_attribute?(method) || method.to_s=~(/=$/) || method.to_s=~/!$/ ? super : nil
end
end
private
def raise_record_invalid
raise(::FlexCommerceApi::Error::RecordInvalid.new(self))
end
# This is temporary code - eventually this will be in the lower level gem
def has_many_association_proxy(assoc_name, real_instance, options = {})
JsonApiClientExtension::HasManyAssociationProxy.new(real_instance, self, assoc_name, options)
end
end
end
Retrieves related resources contained within template attributes - WIP
require "json_api_client"
require "faraday/request/basic_authentication"
require "uri"
require "active_support/core_ext/hash/indifferent_access"
require "flex_commerce_api/json_api_client_extension/paginator"
require "flex_commerce_api/json_api_client_extension/requestor"
require "flex_commerce_api/json_api_client_extension/save_request_body_middleware"
require "flex_commerce_api/json_api_client_extension/logging_middleware"
require "flex_commerce_api/json_api_client_extension/status_middleware"
require "flex_commerce_api/json_api_client_extension/json_format_middleware"
require "flex_commerce_api/json_api_client_extension/previewed_request_middleware"
require "flex_commerce_api/json_api_client_extension/has_many_association_proxy"
require "flex_commerce_api/json_api_client_extension/builder"
require "flex_commerce_api/json_api_client_extension/flexible_connection"
module FlexCommerceApi
#
# Base class for all flex commerce models
#
class ApiBase < JsonApiClient::Resource
PRIVATE_ATTRIBUTES = %w(id type relationships links meta)
# set the api base url in an abstract base class
self.paginator = JsonApiClientExtension::Paginator
self.requestor_class = JsonApiClientExtension::Requestor
self.connection_class = ::FlexCommerceApi::JsonApiClientExtension::FlexibleConnection
class << self
def create!(*args)
create(*args).tap do |resource|
raise(::FlexCommerceApi::Error::RecordInvalid.new(resource)) unless resource.errors.empty?
end
end
# @method all
# Returns all resources
# @return [FlexCommerceApi::ApiBase[]] An array of resources or an empty array
# @method paginate
# Paginates the list of resources by a preset page size
# @param [Hash] options The options to paginate with
# @param options [Numeric|String] :page The page to fetch
# @method find
# @param [String] spec The spec of what to find
#
# Finds a resource
# @return [FlexCommerceApi::ApiBase] resource The resource found
# @raise [FlexCommerceApi::Error::NotFound] If not found
def find(*args)
# This is required as currently the underlying gem returns an array
# even if 1 record is found. This is inconsistent with active record
result = super
result.length <= 1 ? result.first : result
end
# The username to use for authentication. This is the same as
# the account name from the flex platform.
# @return [String] The username
def username
URI.parse(site).path.split("/").reject(&:empty?).first
end
# The password to use for authentication. This is the same as
# the access key token from the flex platform.
# @return [String] The password
def password
FlexCommerceApi.config.flex_api_key
end
def path(params = nil, record = nil)
super(params)
end
def reconfigure_all options = {}
subclasses.each do |sub|
sub.reconfigure options
end
reconfigure options
end
def reconfigure options = {}
self.site = FlexCommerceApi.config.api_base_url
adapter_options = { adapter: FlexCommerceApi.config.adapter || :net_http }
self.connection_options.delete(:include_previewed)
self.connection_options = connection_options.merge(adapter_options).merge(options)
reload_connection_if_required
end
def reload_connection_if_required
_build_connection(true) if connection_object
end
end
reconfigure
# Ensures all attributes are with indifferent access
def initialize(attrs = {})
super attrs.with_indifferent_access
end
def save!
return if save
raise_record_invalid
end
def public_attributes
attributes.reject { |k, v| PRIVATE_ATTRIBUTES.include?(k.to_s) }
end
def meta_attribute(key)
attributes[:meta_attributes][key][:value] rescue nil
end
def template_attribute(key)
case attributes[:template_attributes][key][:data_type]
when "related-files", "related-products" then self.send(key)
else attributes[:template_attributes][key][:value] rescue nil
end
end
def method_missing(method, *args)
if relationships and relationships.has_attribute?(method)
super
else
has_attribute?(method) || method.to_s=~(/=$/) || method.to_s=~/!$/ ? super : nil
end
end
private
def raise_record_invalid
raise(::FlexCommerceApi::Error::RecordInvalid.new(self))
end
# This is temporary code - eventually this will be in the lower level gem
def has_many_association_proxy(assoc_name, real_instance, options = {})
JsonApiClientExtension::HasManyAssociationProxy.new(real_instance, self, assoc_name, options)
end
end
end
|
forgot rails engine
module Flowplayer
module Rails
class Engine < ::Rails::Engine
end
end
end
|
module Fluent
class CombinerOutput < Fluent::Output
Fluent::Plugin.register_output('combiner', self)
# config_param :hoge, :string, :default => 'hoge'
config_param :tag, :string, :default => 'combined'
config_param :tag_prefix, :string, :default => nil
config_param :input_tag_remove_prefix, :string, :default => nil
attr_accessor :hist
def initialize
super
require 'pathname'
end
def configure(conf)
super
@hist = initialize_hist
end
def initialize_hist(tags=nil)
hist = {}
if tags
tags.each do |tag|
hist[tag] = {:hist => {}, :sum => 0, :len => 0}
end
end
hist
end
#def start
# super
#end
#def shutdown
# super
#end
def increment(tag, key)
@hist[tag] ||= {:hist => {}, :sum => 0, :len => 0}
if @hist[tag][:hist].key? key
@hist[tag][:hist][key] += 1
@hist[tag][:sum] += 1
else
@hist[tag][:hist][key] = 1
@hist[tag][:sum] += 1
@hist[tag][:len] += 1
end
@hist
end
def countup(tag, keys)
if keys.is_a?(Array)
keys.each {|k| increment(tag, k)}
elsif keys.is_a?(String)
increment(tag, keys)
end
end
def clear
@hist = initialize_hist(@hist.keys.dup)
end
def flush
flushed, @hist = @hist, initialize_hist(@hist.keys.dup)
generate_output(flushed)
end
def flush_emit
Fluent::Engine.emit(@tag, Fluent::Engine.now, flush)
end
def generate_output(data)
output = {}
data.each do |tag, hist|
output[@tag + '.' + tag] = hist
end
output
end
def emit(tag, es, chain)
es.each do |time, record|
keys = record["keys"]
countup(tag, keys)
end
chain.next
end
end
end
add watcher. - periodically flush hist data.
module Fluent
class CombinerOutput < Fluent::Output
Fluent::Plugin.register_output('combiner', self)
# config_param :hoge, :string, :default => 'hoge'
config_param :tag, :string, :default => 'combined'
config_param :tag_prefix, :string, :default => nil
config_param :input_tag_remove_prefix, :string, :default => nil
config_param :count_interval, :time, :default => 60
attr_accessor :hist, :last_checked, :tick
def initialize
super
require 'pathname'
end
def configure(conf)
super
@hist = initialize_hist
end
def initialize_hist(tags=nil)
hist = {}
if tags
tags.each do |tag|
hist[tag] = {:hist => {}, :sum => 0, :len => 0}
end
end
hist
end
def start
super
start_watch
end
def shutdown
super
@watcher.terminate
@watcher.join
end
def increment(tag, key)
@hist[tag] ||= {:hist => {}, :sum => 0, :len => 0}
if @hist[tag][:hist].key? key
@hist[tag][:hist][key] += 1
@hist[tag][:sum] += 1
else
@hist[tag][:hist][key] = 1
@hist[tag][:sum] += 1
@hist[tag][:len] += 1
end
@hist
end
def countup(tag, keys)
if keys.is_a?(Array)
keys.each {|k| increment(tag, k)}
elsif keys.is_a?(String)
increment(tag, keys)
end
end
def clear
@hist = initialize_hist(@hist.keys.dup)
end
def flush
flushed, @hist = @hist, initialize_hist(@hist.keys.dup)
generate_output(flushed)
end
def flush_emit
Fluent::Engine.emit(@tag, Fluent::Engine.now, flush)
end
def generate_output(data)
output = {}
data.each do |tag, hist|
output[@tag + '.' + tag] = hist
end
output
end
def emit(tag, es, chain)
es.each do |time, record|
keys = record["keys"]
countup(tag, keys)
end
chain.next
end
private
def start_watch
@watcher = Thread.new(&method(:watch))
end
def watch
@last_checked ||= Fluent::Engine.now
while true
sleep 0.5
if Fluent::Engine.now - @last_checked >= @tick
now = Fluent::Engine.now
flush_emit
@last_checked = now
end
end
end
end
end
|
# encoding:utf-8
#--
# The MIT License (MIT)
#
# Copyright (c) 2015, The Gamera Development Team. See the COPYRIGHT file at
# the top-level directory of this distribution and at
# http://github.com/gamera-team/gamera/COPYRIGHT.
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
# THE SOFTWARE.
#++
require 'capybara'
module Gamera
module PageSections
# This class represents a table on a web page. For example, if you had
# a page like
#
# <html>
# <body>
# <p>Example table</p>
# <table>
# <tr>
# <th>Item</th>
# <th>Wholesale Cost</th>
# <th>Retail Cost</th>
# <th></th>
# <th></th>
# </tr>
# <tr>
# <td>Red Hat</td>
# <td>2.00</td>
# <td>15.00</td>
# <td><a href="/item/12/edit">Edit</a></td>
# <td><a href="/item/12/delete">Delete</a></td>
# </tr>
# <tr>
# <td>Skull cap</td>
# <td>2.00</td>
# <td>27.00</td>
# <td><a href="/item/1/edit">Edit</a></td>
# <td><a href="/item/1/delete">Delete</a></td>
# </tr>
# </table>
# </body>
# </html>
#
# you could include this in a page object class like so:
#
# class HatPage < Gamera::Page
# include Forwardable
# include Gamera::PageSections
#
# attr_reader :registration_form, :table
#
# def initialize
# super(path_join(BASE_URL, '/hat'), %r{hat})
#
# headers = ['Item', 'Wholesale Cost', 'Retail Cost']
# @registration_form = Table.new(headers: headers, row_name:hat,
# name_column: 'Item')
# def_delegators :hat_table,
# :hat, :hats,
# :has_hat?, :has_hats?,
# :edit_hat, :delete_hat
# end
# end
#
class Table < DelegateClass(Capybara::Node::Element)
include Capybara::DSL
# @param headers [Array] An array of the strings from the tables header row
# @param row_name [String] A label that can be used to create more readable versions of general row methods
# @param plural_row_name [String] Plural form of [row_name]
# @param name_column [String] The column heading for the column which contains each row's name
# @param row_css [String] The CSS selector which is used to find individual rows in the table
# @param row_class [Class] The class which will represent a table row
# @param row_editor [Class] A class which defines the edit behavior for a row
# @param row_deleter [Class] A class which defines the edit behavior for a row
def initialize(headers:,
row_name:,
plural_row_name: nil,
name_column: 'Name',
row_css: 'tr + tr', # all tr's except the first one (which is almost always a table header)
row_class: TableRow,
row_editor: RowEditor.new,
row_deleter: RowDeleter.new)
@row_css = row_css
@headers = headers
@row_class = row_class
@row_editor = row_editor
@row_deleter = row_deleter
@row_name = row_name
@plural_row_name = plural_row_name
@name_column = name_column.downcase.tr(' ', '_').gsub(/[^a-z0-9_]+/, '')
add_custom_function_names
end
# Retrieves an array of rows from the table
#
# @return [Array] An array of row_class objects
def rows
has_rows?
all(row_css).map { |r| row_class.new(r, headers) }
end
# Finds and returns a row from the table
#
# @param name [String] [RegExp] The name to look for in the table's specified name column.
# @return [row_class] A row_class object that has the matching name or nil
def row_named(name)
if name.is_a? String
rows.detect { |r| r.send(name_column) == name } if has_row?(name)
elsif name.is_a? Regexp
rows.detect { |r| name.match r.send(name_column) } if has_row?(name)
end
end
# Checks for the existence of a row with the given name
#
# @param name [String] The name to look for in the table's specified name column.
# @return [Boolean] True if a row with the specified name is found, false
# otherwise
def has_row?(name)
page.has_selector?(row_css, text: name)
end
# Checks for the absence of a row with the given name
#
# @param name [String] The name to look for in the table's specified name column.
# @return [Boolean] False if a row with the specified name is found, true
# otherwise
def has_no_row?(name)
page.has_no_selector?(row_css, text: name)
end
# Checks to see if the table has any rows
#
# @return [Boolean] True if the row selector is found, false otherwise
def has_rows?
has_selector?(row_css)
end
# Checks to see if the table has no rows
#
# @return [Boolean] False if the row selector is found, true otherwise
def has_no_rows?
has_no_selector?(row_css)
end
# Delete all of the rows from the table
def delete_all_rows
while has_rows?
r = rows.first.send(name_column)
delete_row(r)
has_row?(r)
end
end
# Start the delete process for the row matching the specified name
def delete_row(name)
row_deleter.delete(row_named(name))
end
# Start the edit process for the row matching the specified name
def edit_row(name)
row_editor.edit(row_named(name))
end
private
attr_reader :headers, :row_css, :row_name, :name_column, :row_class,
:row_editor, :row_deleter
def add_custom_function_names
row_name = @row_name # The attr_reader wasn't working here
plural_row_name = @plural_row_name
rows_name = plural_row_name ? plural_row_name.to_sym : "#{row_name}s".to_sym
has_row_name = "has_#{row_name}?".to_sym
has_no_row_name = "has_no_#{row_name}?".to_sym
has_rows_name = plural_row_name ? "has_#{plural_row_name}?".to_sym : "has_#{row_name}s?".to_sym
has_no_rows_name = plural_row_name ? "has_no_#{plural_row_name}?".to_sym : "has_no_#{row_name}s?".to_sym
delete_all_rows_name = plural_row_name ? "delete_all_#{plural_row_name}".to_sym : "delete_all_#{row_name}s".to_sym
delete_row_name = "delete_#{row_name}".to_sym
edit_row_name = "edit_#{row_name}".to_sym
self.class.instance_eval do
alias rows_name rows
alias has_row_name has_row?
alias has_no_row_name has_no_row?
alias has_rows_name has_rows?
alias has_no_rows_name has_no_rows?
alias delete_all_rows_name delete_all_rows
alias delete_row_name delete_row
alias edit_row_name edit_row
alias row_name row_named
end
end
end
# Default class used to represent a row in a table
class TableRow < DelegateClass(Capybara::Node::Element)
# @param row_css [String] The css selector for the row
# @param headers [Array] An array of the strings from the tables header row
def initialize(row_css, headers)
super(row_css)
headers.each_with_index do |header, i|
cell_name = header.downcase.tr(' ', '_').gsub(/[^a-z0-9_]+/, '')
self.class.send(:define_method, cell_name) do
find("td:nth-child(#{i + 1})").text
end
end
end
end
# Wrapper class for row edit action
class RowEditor
def edit(table_row)
table_row.find_link('Edit').click
end
end
# Wrapper class for row delete action
class RowDeleter
def delete(table_row)
table_row.find_link('Delete').click
end
end
end
end
Reverses a change of alias_method to the unreliable alias.
No idea what I was thinking when I made the initial change. Go alias_method!
# encoding:utf-8
#--
# The MIT License (MIT)
#
# Copyright (c) 2015, The Gamera Development Team. See the COPYRIGHT file at
# the top-level directory of this distribution and at
# http://github.com/gamera-team/gamera/COPYRIGHT.
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
# THE SOFTWARE.
#++
require 'capybara'
module Gamera
module PageSections
# This class represents a table on a web page. For example, if you had
# a page like
#
# <html>
# <body>
# <p>Example table</p>
# <table>
# <tr>
# <th>Item</th>
# <th>Wholesale Cost</th>
# <th>Retail Cost</th>
# <th></th>
# <th></th>
# </tr>
# <tr>
# <td>Red Hat</td>
# <td>2.00</td>
# <td>15.00</td>
# <td><a href="/item/12/edit">Edit</a></td>
# <td><a href="/item/12/delete">Delete</a></td>
# </tr>
# <tr>
# <td>Skull cap</td>
# <td>2.00</td>
# <td>27.00</td>
# <td><a href="/item/1/edit">Edit</a></td>
# <td><a href="/item/1/delete">Delete</a></td>
# </tr>
# </table>
# </body>
# </html>
#
# you could include this in a page object class like so:
#
# class HatPage < Gamera::Page
# include Forwardable
# include Gamera::PageSections
#
# attr_reader :registration_form, :table
#
# def initialize
# super(path_join(BASE_URL, '/hat'), %r{hat})
#
# headers = ['Item', 'Wholesale Cost', 'Retail Cost']
# @registration_form = Table.new(headers: headers, row_name:hat,
# name_column: 'Item')
# def_delegators :hat_table,
# :hat, :hats,
# :has_hat?, :has_hats?,
# :edit_hat, :delete_hat
# end
# end
#
class Table < DelegateClass(Capybara::Node::Element)
include Capybara::DSL
# @param headers [Array] An array of the strings from the tables header row
# @param row_name [String] A label that can be used to create more readable versions of general row methods
# @param plural_row_name [String] Plural form of [row_name]
# @param name_column [String] The column heading for the column which contains each row's name
# @param row_css [String] The CSS selector which is used to find individual rows in the table
# @param row_class [Class] The class which will represent a table row
# @param row_editor [Class] A class which defines the edit behavior for a row
# @param row_deleter [Class] A class which defines the edit behavior for a row
def initialize(headers:,
row_name:,
plural_row_name: nil,
name_column: 'Name',
row_css: 'tr + tr', # all <tr>s except the first one (which is almost always a table header)
row_class: TableRow,
row_editor: RowEditor.new,
row_deleter: RowDeleter.new)
@row_css = row_css
@headers = headers
@row_class = row_class
@row_editor = row_editor
@row_deleter = row_deleter
@row_name = row_name
@plural_row_name = plural_row_name
@name_column = name_column.downcase.tr(' ', '_').gsub(/[^a-z0-9_]+/, '')
add_custom_function_names
end
# Retrieves an array of rows from the table
#
# @return [Array] An array of row_class objects
def rows
has_rows?
all(row_css).map { |r| row_class.new(r, headers) }
end
# Finds and returns a row from the table
#
# @param name [String] [RegExp] The name to look for in the table's specified name column.
# @return [row_class] A row_class object that has the matching name or nil
def row_named(name)
if name.is_a? String
rows.detect { |r| r.send(name_column) == name } if has_row?(name)
elsif name.is_a? Regexp
rows.detect { |r| name.match r.send(name_column) } if has_row?(name)
end
end
# Checks for the existence of a row with the given name
#
# @param name [String] The name to look for in the table's specified name column.
# @return [Boolean] True if a row with the specified name is found, false
# otherwise
def has_row?(name)
page.has_selector?(row_css, text: name)
end
# Checks for the absence of a row with the given name
#
# @param name [String] The name to look for in the table's specified name column.
# @return [Boolean] False if a row with the specified name is found, true
# otherwise
def has_no_row?(name)
page.has_no_selector?(row_css, text: name)
end
# Checks to see if the table has any rows
#
# @return [Boolean] True if the row selector is found, false otherwise
def has_rows?
has_selector?(row_css)
end
# Checks to see if the table has no rows
#
# @return [Boolean] False if the row selector is found, true otherwise
def has_no_rows?
has_no_selector?(row_css)
end
# Delete all of the rows from the table
def delete_all_rows
while has_rows?
r = rows.first.send(name_column)
delete_row(r)
has_row?(r)
end
end
# Start the delete process for the row matching the specified name
def delete_row(name)
row_deleter.delete(row_named(name))
end
# Start the edit process for the row matching the specified name
def edit_row(name)
row_editor.edit(row_named(name))
end
private
attr_reader :headers, :row_css, :row_name, :name_column, :row_class,
:row_editor, :row_deleter
def add_custom_function_names
row_name = @row_name # The attr_reader wasn't working here
plural_row_name = @plural_row_name
rows_name = plural_row_name ? plural_row_name.to_sym : "#{row_name}s".to_sym
has_row_name = "has_#{row_name}?".to_sym
has_no_row_name = "has_no_#{row_name}?".to_sym
has_rows_name = plural_row_name ? "has_#{plural_row_name}?".to_sym : "has_#{row_name}s?".to_sym
has_no_rows_name = plural_row_name ? "has_no_#{plural_row_name}?".to_sym : "has_no_#{row_name}s?".to_sym
delete_all_rows_name = plural_row_name ? "delete_all_#{plural_row_name}".to_sym : "delete_all_#{row_name}s".to_sym
delete_row_name = "delete_#{row_name}".to_sym
edit_row_name = "edit_#{row_name}".to_sym
self.class.instance_eval do
alias_method rows_name, :rows
alias_method has_row_name, :has_row?
alias_method has_no_row_name, :has_no_row?
alias_method has_rows_name, :has_rows?
alias_method has_no_rows_name, :has_no_rows?
alias_method delete_all_rows_name, :delete_all_rows
alias_method delete_row_name, :delete_row
alias_method edit_row_name, :edit_row
alias_method row_name, :row_named
end
end
end
# Default class used to represent a row in a table
class TableRow < DelegateClass(Capybara::Node::Element)
# @param row_css [String] The css selector for the row
# @param headers [Array] An array of the strings from the tables header row
def initialize(row_css, headers)
super(row_css)
headers.each_with_index do |header, i|
cell_name = header.downcase.tr(' ', '_').gsub(/[^a-z0-9_]+/, '')
self.class.send(:define_method, cell_name) do
find("td:nth-child(#{i + 1})").text
end
end
end
end
# Wrapper class for row edit action
class RowEditor
def edit(table_row)
table_row.find_link('Edit').click
end
end
# Wrapper class for row delete action
class RowDeleter
def delete(table_row)
table_row.find_link('Delete').click
end
end
end
end
|
ENV['RACK_ENV'] ||= 'development'
require 'csv'
Bundler.require :default, ENV['RACK_ENV'].to_sym
require_relative 'web_helpers'
Dotenv.load
module GithubIssuesExporter
# :nodoc:
class Web < Sinatra::Base
include WebHelpers
helpers do
def repos
@repos ||= github_user.api.repositories(type: 'all')
end
def orgs
@orgs ||= github_user.api.organizations
end
def issues
return [] unless params[:repo]
if params[:q]
q = "repo:#{repository_name} " << params[:q]
@issues ||= github_user.api.search_issues(q, search_options).items
else
@issues ||= github_user.api.list_issues(params[:repo], issue_filters)
end
end
def labels
return [] unless params[:repo]
@labels ||= github_user.api.labels(params[:repo])
.sort_by! { |label| label.name.downcase }
end
def milestones
return [] unless params[:repo]
@milestones ||= github_user.api.list_milestones(params[:repo])
end
def repository_name
[params[:org], params[:repo]]
.reject { |name| name == '' }.compact.join('/')
end
def issue_filters
filters = {
state: params[:state] || 'all',
per_page: 100,
sort: params[:sort],
direction: params[:direction] || 'desc',
labels: params[:labels].to_a.join(',')
}
if params[:milestone] && params[:milestone] != '*'
filters.merge!(milestone: params[:milestone])
end
if params[:assignee] && params[:assignee] != ''
filters.merge!(assignee: params[:assignee])
end
if params[:since]
filters.merge!(since: params[:since])
end
filters
end
def search_options
{
order: params[:direction] || 'desc',
page: params[:page] || 1,
per_page: params[:per_page] || 100,
sort: params[:sort]
}
end
end
configure :development do
require 'better_errors'
use BetterErrors::Middleware
BetterErrors.application_root = __dir__
register Sinatra::Reloader
end
configure do
enable :sessions
register Sinatra::Partial
register Sinatra::Auth::Github
set :root, File.expand_path(File.dirname(__FILE__) + '/../../web')
set :public_folder, proc { "#{root}/assets" }
set :views, proc { "#{root}/views" }
set :slim, pretty: true, format: :html5
set :partial_template_engine, :slim
set :github_options,
scopes: 'user,repo,read:org',
secret: ENV['GITHUB_CLIENT_SECRET'],
client_id: ENV['GITHUB_CLIENT_ID']
end
get '/' do
@github_user = github_user
slim :index
end
get '/login' do
authenticate!
redirect '/'
end
get '/logout' do
logout!
redirect '/'
end
get '/dashboard' do
authenticate!
@github_user = github_user
@repos = repos
@organizations = orgs
@milestones = milestones
@labels = labels.map { |label| label.name }
@issues = issues
slim :dashboard
end
get '/issues.csv' do
authenticate!
content_type 'application/csv'
attachment 'issues.csv'
@issues = issues
csv_string = CSV.generate do |csv|
csv << ['#', 'Title', 'Creator', 'Assigned to', 'State', 'Labels', 'Created at', 'Updated at']
@issues.each do |issue|
csv << [
issue.number,
issue.title,
issue.user.login,
issue.assignee ? issue.assignee.login : '',
issue.state,
issue.labels.any? ? issue.labels.map { |label| label.name }.join(', ') : '',
issue.created_at,
issue.updated_at
]
end
end
end
not_found do
slim :not_found
end
end
end
Update deprecated slim options
ENV['RACK_ENV'] ||= 'development'
require 'csv'
Bundler.require :default, ENV['RACK_ENV'].to_sym
require_relative 'web_helpers'
Dotenv.load
module GithubIssuesExporter
# :nodoc:
class Web < Sinatra::Base
include WebHelpers
helpers do
def repos
@repos ||= github_user.api.repositories(type: 'all')
end
def orgs
@orgs ||= github_user.api.organizations
end
def issues
return [] unless params[:repo]
if params[:q]
q = "repo:#{repository_name} " << params[:q]
@issues ||= github_user.api.search_issues(q, search_options).items
else
@issues ||= github_user.api.list_issues(params[:repo], issue_filters)
end
end
def labels
return [] unless params[:repo]
@labels ||= github_user.api.labels(params[:repo])
.sort_by! { |label| label.name.downcase }
end
def milestones
return [] unless params[:repo]
@milestones ||= github_user.api.list_milestones(params[:repo])
end
def repository_name
[params[:org], params[:repo]]
.reject { |name| name == '' }.compact.join('/')
end
def issue_filters
filters = {
state: params[:state] || 'all',
per_page: 100,
sort: params[:sort],
direction: params[:direction] || 'desc',
labels: params[:labels].to_a.join(',')
}
if params[:milestone] && params[:milestone] != '*'
filters.merge!(milestone: params[:milestone])
end
if params[:assignee] && params[:assignee] != ''
filters.merge!(assignee: params[:assignee])
end
if params[:since]
filters.merge!(since: params[:since])
end
filters
end
def search_options
{
order: params[:direction] || 'desc',
page: params[:page] || 1,
per_page: params[:per_page] || 100,
sort: params[:sort]
}
end
end
configure :development do
require 'better_errors'
use BetterErrors::Middleware
BetterErrors.application_root = __dir__
register Sinatra::Reloader
end
configure do
enable :sessions
register Sinatra::Partial
register Sinatra::Auth::Github
set :root, File.expand_path(File.dirname(__FILE__) + '/../../web')
set :public_folder, proc { "#{root}/assets" }
set :views, proc { "#{root}/views" }
set :slim, pretty: true, tabsize: 2, format: :html
set :partial_template_engine, :slim
set :github_options,
scopes: 'user,repo,read:org',
secret: ENV['GITHUB_CLIENT_SECRET'],
client_id: ENV['GITHUB_CLIENT_ID']
end
get '/' do
@github_user = github_user
slim :index
end
get '/login' do
authenticate!
redirect '/'
end
get '/logout' do
logout!
redirect '/'
end
get '/dashboard' do
authenticate!
@github_user = github_user
@repos = repos
@organizations = orgs
@milestones = milestones
@labels = labels.map { |label| label.name }
@issues = issues
slim :dashboard
end
get '/issues.csv' do
authenticate!
content_type 'application/csv'
attachment 'issues.csv'
@issues = issues
csv_string = CSV.generate do |csv|
csv << ['#', 'Title', 'Creator', 'Assigned to', 'State', 'Labels', 'Created at', 'Updated at']
@issues.each do |issue|
csv << [
issue.number,
issue.title,
issue.user.login,
issue.assignee ? issue.assignee.login : '',
issue.state,
issue.labels.any? ? issue.labels.map { |label| label.name }.join(', ') : '',
issue.created_at,
issue.updated_at
]
end
end
end
not_found do
slim :not_found
end
end
end
|
require "yaml"
module GomiBot
module Twitter
class Following
def initialize
@client = GomiBot::Twitter::Client.new
end
def call(following: true, unfollowing: true)
follow if following
unfollow if unfollowing
refresh_skip_following_list
end
def follow
new_skip_following = []
to_follow_list.sample(following_limit).each do |i|
new_skip_following << i
user_data = @client.twitter.user(i).attrs
@client.follow(i) if judge_following(user_data)
end
unless new_skip_following.empty?
new_config = config
new_config[:skip_following] = new_skip_following
dump_config(new_config)
end
end
def unfollow
to_unfollow_list.sample(unfollowing_limit).each do |i|
@client.unfollow(i)
end
end
# private
def follower_ids
@follower_ids ||= @client.follower_ids
end
def friend_ids
@friend_ids ||= @client.friend_ids
end
def friendships_outgoing
@friendships_outgoing ||= @client.friendships_outgoing
end
def to_follow_list
follower_ids - friend_ids - friendships_outgoing - ignore_following_list
end
def to_unfollow_list
friend_ids - follower_ids - friendships_outgoing - ignore_unfollowing_list
end
def ignore_following_list
config[:skip_following] + config[:blacklist]
end
def ignore_unfollowing_list
config[:whitelist]
end
def refresh_skip_following_list
new_config = config
new_nonfollower = config[:skip_following] - follower_ids - friendships_outgoing
new_list = config[:skip_following] - new_nonfollower
new_config[:skip_following] = new_list
dump_config(new_config)
end
def judge_following(data)
bot_regexp = /[^r][^o]bot|[^ろ]ぼっと|[^ロ]ボット/i
data[:lang] = "ja" &&
data[:screen_name] !~ bot_regexp &&
data[:name] !~ bot_regexp &&
data[:description] !~ bot_regexp &&
data[:favourites_count] > 0 &&
data[:friends_count] < 10 * data[:followers_count]
end
def config_file
GomiBot.config_dir + "friendship.yml"
end
def config
@config ||= YAML.load_file(config_file)
end
def dump_config(new_conf)
File.open(config_file, "w") { |f| YAML.dump(new_conf, f) }
@config = new_conf
end
def following_limit
GomiBot.config[:following][:following_limit]
end
def unfollowing_limit
GomiBot.config[:following][:unfollowing_limit]
end
end
end
end
new の時点でfollow, unfollow の指定をするように変更
require "yaml"
module GomiBot
module Twitter
class Following
def initialize(
following: GomiBot.config[:following][:auto_following],
unfollowing: GomiBot.config[:following][:auto_unfollowing]
)
@following = following
@unfollowing = unfollowing
@client = GomiBot::Twitter::Client.new
end
def call
follow if @following
unfollow if @unfollowing
refresh_skip_following_list
end
def follow
new_skip_following = []
to_follow_list.sample(following_limit).each do |i|
new_skip_following << i
user_data = @client.twitter.user(i).attrs
@client.follow(i) if judge_following(user_data)
end
unless new_skip_following.empty?
new_config = config
new_config[:skip_following] = new_skip_following
dump_config(new_config)
end
end
def unfollow
to_unfollow_list.sample(unfollowing_limit).each do |i|
@client.unfollow(i)
end
end
# private
def follower_ids
@follower_ids ||= @client.follower_ids
end
def friend_ids
@friend_ids ||= @client.friend_ids
end
def friendships_outgoing
@friendships_outgoing ||= @client.friendships_outgoing
end
def to_follow_list
follower_ids - friend_ids - friendships_outgoing - ignore_following_list
end
def to_unfollow_list
friend_ids - follower_ids - friendships_outgoing - ignore_unfollowing_list
end
def ignore_following_list
config[:skip_following] + config[:blacklist]
end
def ignore_unfollowing_list
config[:whitelist]
end
def refresh_skip_following_list
new_config = config
new_nonfollower = config[:skip_following] - follower_ids - friendships_outgoing
new_list = config[:skip_following] - new_nonfollower
new_config[:skip_following] = new_list
dump_config(new_config)
end
def judge_following(data)
bot_regexp = /[^r][^o]bot|[^ろ]ぼっと|[^ロ]ボット/i
data[:lang] = "ja" &&
data[:screen_name] !~ bot_regexp &&
data[:name] !~ bot_regexp &&
data[:description] !~ bot_regexp &&
data[:favourites_count] > 0 &&
data[:friends_count] < 10 * data[:followers_count]
end
def config_file
GomiBot.config_dir + "friendship.yml"
end
def config
@config ||= YAML.load_file(config_file)
end
def dump_config(new_conf)
File.open(config_file, "w") { |f| YAML.dump(new_conf, f) }
@config = new_conf
end
def following_limit
GomiBot.config[:following][:following_limit]
end
def unfollowing_limit
GomiBot.config[:following][:unfollowing_limit]
end
end
end
end |
class CLI
def initialize
puts "Welcome to GrandmaSweets Rubygem! Feel in the mood for some delicious recipes? Well, you're in the right place!!!"
puts "Pick among our selection of traditional Italian recipes to impress your friends, and yourself!"
puts "Loading recipes..."
@s = Scraper.new
end
def call
@s.scrape_recipes
sleep 4
list_recipes
sleep 2
meditation_pause
select_recipe
goodbye
end
end
added instance method 'list recipes' to the CLI class
class CLI
def initialize
puts "Welcome to GrandmaSweets Rubygem! Feel in the mood for some delicious recipes? Well, you're in the right place!!!"
puts "Pick among our selection of traditional Italian recipes to impress your friends, and yourself!"
puts "Loading recipes..."
@s = Scraper.new
end
def call
@s.scrape_recipes
sleep 4
list_recipes
sleep 2
meditation_pause
select_recipe
goodbye
end
def list_recipes
puts "Here's the list of our recipes:"
@s.recipes.each.with_index(1) do |recipe, i|
puts "-----------------------------------------------------"
puts " #{(i).to_s} " + "#{recipe.name}"
puts " "
puts " #{recipe.description}"
puts "-----------------------------------------------------"
end
end
end
|
module HairTrigger
module SchemaDumper
module TrailerWithTriggersSupport
def trailer(stream)
orig_show_warnings = Builder.show_warnings
Builder.show_warnings = false # we already show them when running the migration
triggers(stream)
super
ensure
Builder.show_warnings = orig_show_warnings
end
end
def triggers(stream)
@adapter_name = @connection.adapter_name.downcase.to_sym
all_triggers = @connection.triggers
db_trigger_warnings = {}
migration_trigger_builders = []
db_triggers = whitelist_triggers(all_triggers)
migration_triggers = HairTrigger.current_migrations(:in_rake_task => true, :previous_schema => self.class.previous_schema).map do |(_, builder)|
definitions = []
builder.generate.each do |statement|
if statement =~ /\ACREATE(.*TRIGGER| FUNCTION) ([^ \n]+)/
# poor man's unquote
type = ($1 == ' FUNCTION' ? :function : :trigger)
name = $2.gsub('"', '')
definitions << [name, statement, type]
end
end
{:builder => builder, :definitions => definitions}
end
migration_triggers.each do |migration|
next unless migration[:definitions].all? do |(name, definition, type)|
db_triggers[name] && (db_trigger_warnings[name] = true) && db_triggers[name] == normalize_trigger(name, definition, type)
end
migration[:definitions].each do |(name, _, _)|
db_triggers.delete(name)
db_trigger_warnings.delete(name)
end
migration_trigger_builders << migration[:builder]
end
db_triggers.to_a.sort_by{ |t| (t.first + 'a').sub(/\(/, '_') }.each do |(name, definition)|
if db_trigger_warnings[name]
stream.puts " # WARNING: generating adapter-specific definition for #{name} due to a mismatch."
stream.puts " # either there's a bug in hairtrigger or you've messed up your migrations and/or db :-/"
else
stream.puts " # no candidate create_trigger statement could be found, creating an adapter-specific one"
end
if definition =~ /\n/
stream.print " execute(<<-SQL)\n#{definition.rstrip}\n SQL\n\n"
else
stream.print " execute(#{definition.inspect})\n\n"
end
end
migration_trigger_builders.each { |builder| stream.print builder.to_ruby(' ', false) + "\n\n" }
end
def normalize_trigger(name, definition, type)
@adapter_name = @connection.adapter_name.downcase.to_sym
return definition unless @adapter_name == :postgresql
# because postgres does not preserve the original CREATE TRIGGER/
# FUNCTION statements, its decompiled reconstruction will not match
# ours. we work around it by creating our generated trigger/function,
# asking postgres for its definition, and then rolling back.
@connection.transaction(requires_new: true) do
chars = ('a'..'z').to_a + ('0'..'9').to_a + ['_']
test_name = '_hair_trigger_test_' + (0..43).map{ chars[(rand * chars.size).to_i] }.join
# take of the parens for gsubbing, since this version might be quoted
name = name[0..-3] if type == :function
@connection.execute(definition.sub(name, test_name))
# now add them back
if type == :function
test_name << '()'
name << '()'
end
definition = @connection.triggers(:only => [test_name], :simple_check => true).values.first
definition.sub!(test_name, name)
raise ActiveRecord::Rollback
end
definition
end
def whitelist_triggers(triggers)
triggers.reject do |name, source|
ActiveRecord::SchemaDumper.ignore_tables.any? { |ignored_table_name| source =~ /ON\s+#{@connection.quote_table_name(ignored_table_name)}\s/ }
end
end
def self.included(base)
base.class_eval do
prepend TrailerWithTriggersSupport
class_attribute :previous_schema
end
end
end
end
add postgis support to schema_dumper
module HairTrigger
module SchemaDumper
module TrailerWithTriggersSupport
def trailer(stream)
orig_show_warnings = Builder.show_warnings
Builder.show_warnings = false # we already show them when running the migration
triggers(stream)
super
ensure
Builder.show_warnings = orig_show_warnings
end
end
def triggers(stream)
@adapter_name = @connection.adapter_name.downcase.to_sym
all_triggers = @connection.triggers
db_trigger_warnings = {}
migration_trigger_builders = []
db_triggers = whitelist_triggers(all_triggers)
migration_triggers = HairTrigger.current_migrations(:in_rake_task => true, :previous_schema => self.class.previous_schema).map do |(_, builder)|
definitions = []
builder.generate.each do |statement|
if statement =~ /\ACREATE(.*TRIGGER| FUNCTION) ([^ \n]+)/
# poor man's unquote
type = ($1 == ' FUNCTION' ? :function : :trigger)
name = $2.gsub('"', '')
definitions << [name, statement, type]
end
end
{:builder => builder, :definitions => definitions}
end
migration_triggers.each do |migration|
next unless migration[:definitions].all? do |(name, definition, type)|
db_triggers[name] && (db_trigger_warnings[name] = true) && db_triggers[name] == normalize_trigger(name, definition, type)
end
migration[:definitions].each do |(name, _, _)|
db_triggers.delete(name)
db_trigger_warnings.delete(name)
end
migration_trigger_builders << migration[:builder]
end
db_triggers.to_a.sort_by{ |t| (t.first + 'a').sub(/\(/, '_') }.each do |(name, definition)|
if db_trigger_warnings[name]
stream.puts " # WARNING: generating adapter-specific definition for #{name} due to a mismatch."
stream.puts " # either there's a bug in hairtrigger or you've messed up your migrations and/or db :-/"
else
stream.puts " # no candidate create_trigger statement could be found, creating an adapter-specific one"
end
if definition =~ /\n/
stream.print " execute(<<-SQL)\n#{definition.rstrip}\n SQL\n\n"
else
stream.print " execute(#{definition.inspect})\n\n"
end
end
migration_trigger_builders.each { |builder| stream.print builder.to_ruby(' ', false) + "\n\n" }
end
def normalize_trigger(name, definition, type)
@adapter_name = @connection.adapter_name.downcase.to_sym
return definition unless @adapter_name == :postgresql || @adapter_name == :postgis
# because postgres does not preserve the original CREATE TRIGGER/
# FUNCTION statements, its decompiled reconstruction will not match
# ours. we work around it by creating our generated trigger/function,
# asking postgres for its definition, and then rolling back.
@connection.transaction(requires_new: true) do
chars = ('a'..'z').to_a + ('0'..'9').to_a + ['_']
test_name = '_hair_trigger_test_' + (0..43).map{ chars[(rand * chars.size).to_i] }.join
# take of the parens for gsubbing, since this version might be quoted
name = name[0..-3] if type == :function
@connection.execute(definition.sub(name, test_name))
# now add them back
if type == :function
test_name << '()'
name << '()'
end
definition = @connection.triggers(:only => [test_name], :simple_check => true).values.first
definition.sub!(test_name, name)
raise ActiveRecord::Rollback
end
definition
end
def whitelist_triggers(triggers)
triggers.reject do |name, source|
ActiveRecord::SchemaDumper.ignore_tables.any? { |ignored_table_name| source =~ /ON\s+#{@connection.quote_table_name(ignored_table_name)}\s/ }
end
end
def self.included(base)
base.class_eval do
prepend TrailerWithTriggersSupport
class_attribute :previous_schema
end
end
end
end
|
module Hdo
module Stats
class AgreementScorer
def initialize(votes = Vote.with_results)
@votes = votes
end
def result
@result ||= (
result = Hash.new(0)
count = 0
@votes.each do |vote|
next if ignored?(vote)
count += 1
stats = vote.stats
combinations.each do |current_parties|
if agree?(current_parties, stats)
key = current_parties.map(&:external_id).sort.join(',')
result[key] += 1
end
end
end
{ :total => count, :data => result }
)
end
def print(io = $stdout)
total, data = result.values_at(:total, :data)
data = data.sort_by { |combo, value| -value }
data.each do |c, v|
str = "%s: %.2f%% (%d/%d)" % [c.ljust(20), (v * 100 / total.to_f), v, total]
io.puts str
end
end
private
def combinations
@combinations ||= (
parties = Party.all.to_a
combinations = []
2.upto(parties.size) do |n|
combinations.concat parties.combination(n).to_a
end
combinations
)
end
def agree?(parties, stats)
parties.map { |party| stats.text_for(party) }.uniq.size == 1
end
def ignored?(vote)
vote.non_personal? && vote.subject =~ /lovens overskrift og loven i sin helhet/i
end
end
end
end
Add ability to specify unit.
module Hdo
module Stats
class AgreementScorer
VALID_UNITS = [:propositions, :votes]
def initialize(opts = {})
@votes = opts[:votes] || Vote.with_results
if opts[:unit]
unless VALID_UNITS.include?(opts[:unit])
raise "invalid unit: #{opts[:unit].inspect}"
end
@unit = opts[:unit]
else
@unit = :propositions
end
end
def result
@result ||= (
result = Hash.new(0)
count = 0
@votes.each do |vote|
next if ignored?(vote)
stats = vote.stats
case @unit
when :propositions
unit_count = vote.propositions.count
when :votes
unit_count = 1
else
raise "invalid unit: #{@unit.inspect}"
end
count += unit_count
combinations.each do |current_parties|
if agree?(current_parties, stats)
key = current_parties.map(&:external_id).sort.join(',')
result[key] += unit_count
end
end
end
{ :total => count, :data => result }
)
end
def print(io = $stdout)
total, data = result.values_at(:total, :data)
data = data.sort_by { |combo, value| -value }
data.each do |c, v|
str = "%s: %.2f%% (%d/%d)" % [c.ljust(20), (v * 100 / total.to_f), v, total]
io.puts str
end
end
private
def combinations
@combinations ||= (
parties = Party.all.to_a
combinations = []
2.upto(parties.size) do |n|
combinations.concat parties.combination(n).to_a
end
combinations
)
end
def agree?(parties, stats)
parties.map { |party| stats.text_for(party) }.uniq.size == 1
end
def ignored?(vote)
vote.non_personal? && vote.subject =~ /lovens overskrift og loven i sin helhet/i
end
end
end
end |
class Hiera
module Backend
class Http_backend
def initialize
require 'net/http'
require 'net/https'
@config = Config[:http]
@http = Net::HTTP.new(@config[:host], @config[:port])
@http.read_timeout = @config[:http_read_timeout] || 10
@http.open_timeout = @config[:http_connect_timeout] || 10
@path_base = @config[:path_base] || ''
if @config[:use_ssl]
@http.use_ssl = true
if @config[:ssl_cert]
@http.verify_mode = OpenSSL::SSL::VERIFY_PEER
store = OpenSSL::X509::Store.new
store.add_cert(OpenSSL::X509::Certificate.new(File.read(@config[:ssl_ca_cert])))
@http.cert_store = store
@http.key = OpenSSL::PKey::RSA.new(File.read(@config[:ssl_cert]))
@http.cert = OpenSSL::X509::Certificate.new(File.read(@config[:ssl_key]))
end
else
@http.use_ssl = false
end
end
def lookup(key, scope, order_override, resolution_type)
answer = nil
paths = @config[:paths].clone
paths.insert(0, order_override) if order_override
paths.map! { |p| Backend.parse_string(@path_base + p, scope, { 'key' => key }) }
paths.each do |path|
Hiera.debug("[hiera-http]: Lookup #{key} from #{@config[:host]}:#{@config[:port]}#{path}")
httpreq = Net::HTTP::Get.new(path)
begin
httpres = @http.request(httpreq)
rescue Exception => e
Hiera.warn("[hiera-http]: Net::HTTP threw exception #{e.message}")
raise Exception, e.message unless @config[:failure] == 'graceful'
next
end
unless httpres.kind_of?(Net::HTTPSuccess)
Hiera.debug("[hiera-http]: bad http response from #{@config[:host]}:#{@config[:port]}#{path}")
Hiera.debug("HTTP response code was #{httpres.code}")
raise Exception, 'Bad HTTP response' unless @config[:failure] == 'graceful'
next
end
result = self.parse_response(key, httpres.body)
next unless result
parsed_result = Backend.parse_answer(result, scope)
case resolution_type
when :array
answer ||= []
answer << parsed_result
when :hash
answer ||= {}
answer = parsed_result.merge answer
else
answer = parsed_result
break
end
end
answer
end
def parse_response(key,answer)
return nil unless answer
Hiera.debug("[hiera-http]: Query returned data, parsing response as #{@config[:output] || 'plain'}")
case @config[:output]
when 'plain'
# When the output format is configured as plain we assume that if the
# endpoint URL returns an HTTP success then the contents of the response
# body is the value itself, or nil.
#
answer
when 'json'
# If JSON is specified as the output format, assume the output of the
# endpoint URL is a JSON document and return keypart that matched our
# lookup key
self.json_handler(key,answer)
when 'yaml'
# If YAML is specified as the output format, assume the output of the
# endpoint URL is a YAML document and return keypart that matched our
# lookup key
self.yaml_handler(key,answer)
else
answer
end
end
# Handlers
# Here we define specific handlers to parse the output of the http request
# and return a value. Currently we support YAML and JSON
#
def json_handler(key,answer)
require 'rubygems'
require 'json'
JSON.parse(answer)[key]
end
def yaml_handler(answer)
require 'yaml'
YAML.parse(answer)[key]
end
end
end
end
* Added ignore_404 option
Gives the ability to disable failure: graceful in order to bomb out on errors, but still ignore 404's which might be an expected result of a hiera lookup that should pass through to the next level in the hierarchy and may not actually be an error
class Hiera
module Backend
class Http_backend
def initialize
require 'net/http'
require 'net/https'
@config = Config[:http]
@http = Net::HTTP.new(@config[:host], @config[:port])
@http.read_timeout = @config[:http_read_timeout] || 10
@http.open_timeout = @config[:http_connect_timeout] || 10
@path_base = @config[:path_base] || ''
if @config[:use_ssl]
@http.use_ssl = true
if @config[:ssl_cert]
@http.verify_mode = OpenSSL::SSL::VERIFY_PEER
store = OpenSSL::X509::Store.new
store.add_cert(OpenSSL::X509::Certificate.new(File.read(@config[:ssl_ca_cert])))
@http.cert_store = store
@http.key = OpenSSL::PKey::RSA.new(File.read(@config[:ssl_cert]))
@http.cert = OpenSSL::X509::Certificate.new(File.read(@config[:ssl_key]))
end
else
@http.use_ssl = false
end
end
def lookup(key, scope, order_override, resolution_type)
answer = nil
paths = @config[:paths].clone
paths.insert(0, order_override) if order_override
paths.map! { |p| Backend.parse_string(@path_base + p, scope, { 'key' => key }) }
paths.each do |path|
Hiera.debug("[hiera-http]: Lookup #{key} from #{@config[:host]}:#{@config[:port]}#{path}")
httpreq = Net::HTTP::Get.new(path)
begin
httpres = @http.request(httpreq)
rescue Exception => e
Hiera.warn("[hiera-http]: Net::HTTP threw exception #{e.message}")
raise Exception, e.message unless @config[:failure] == 'graceful'
next
end
unless httpres.kind_of?(Net::HTTPSuccess)
Hiera.debug("[hiera-http]: bad http response from #{@config[:host]}:#{@config[:port]}#{path}")
Hiera.debug("HTTP response code was #{httpres.code}")
unless ( httpres.code == '404' && @config[:ignore_404] == true )
raise Exception, 'Bad HTTP response' unless @config[:failure] == 'graceful'
end
next
end
result = self.parse_response(key, httpres.body)
next unless result
parsed_result = Backend.parse_answer(result, scope)
case resolution_type
when :array
answer ||= []
answer << parsed_result
when :hash
answer ||= {}
answer = parsed_result.merge answer
else
answer = parsed_result
break
end
end
answer
end
def parse_response(key,answer)
return nil unless answer
Hiera.debug("[hiera-http]: Query returned data, parsing response as #{@config[:output] || 'plain'}")
case @config[:output]
when 'plain'
# When the output format is configured as plain we assume that if the
# endpoint URL returns an HTTP success then the contents of the response
# body is the value itself, or nil.
#
answer
when 'json'
# If JSON is specified as the output format, assume the output of the
# endpoint URL is a JSON document and return keypart that matched our
# lookup key
self.json_handler(key,answer)
when 'yaml'
# If YAML is specified as the output format, assume the output of the
# endpoint URL is a YAML document and return keypart that matched our
# lookup key
self.yaml_handler(key,answer)
else
answer
end
end
# Handlers
# Here we define specific handlers to parse the output of the http request
# and return a value. Currently we support YAML and JSON
#
def json_handler(key,answer)
require 'rubygems'
require 'json'
JSON.parse(answer)[key]
end
def yaml_handler(answer)
require 'yaml'
YAML.parse(answer)[key]
end
end
end
end
|
# frozen_string_literal: true
module Jekyll
module TeXyll
module Compiler
class Job
def initialize(snippet: '', options: {})
@options = options
@snippet = snippet
end
def run
make_dirs
prepare_code
run_pipeline
@gauge = Metrics::Gauge.new(
Hash[[:yml, :tfm, :fit].map { |sym| [sym, file(sym)] }]
)
end
def add_to_static_files_of(site)
FileUtils.cp(file(:fit), file(:svg))
# TODO: minify/compress svg?
site.static_files << Jekyll::StaticFile.new(
site, dir(:work), @options['dest_dir'], "#{@hash}.svg"
)
end
def html_tag
img_tag = @gauge.render_img_tag("#{@options['dest_dir']}/#{@hash}.svg")
classes = @options['classes'].join(' ')
"<span class='#{classes}'>#{img_tag}</span>"
end
private
def make_dirs
FileUtils.mkdir_p dir(:work)
FileUtils.mkdir_p "#{dir(:work)}/#{@options['dest_dir']}"
end
def prepare_code
template = Liquid::Template.parse(@options['template'])
@code = template.render(
'preamble' => @options['preamble'],
'append' => @options['append'],
'prepend' => @options['prepend'],
'snippet' => @snippet
)
@hash = Digest::MD5.hexdigest(@code)
File.open(file(:tex), 'w') do |file|
file.write(@code)
end unless File.exist?(file(:tex))
end
def run_pipeline
Pipeline.new(
pipeline: @options['pipeline'],
engines: @options['engines'],
context: binding
).run
end
def dir(key)
{ work: @options['work_dir'] }[key]
end
def file(key)
dir(:work) + {
tex: "/#{@hash}.tex",
dvi: "/#{@hash}.dvi",
yml: "/#{@hash}.yml",
tfm: "/#{@hash}.tfm.svg",
fit: "/#{@hash}.fit.svg",
svg: "/#{@options['dest_dir']}/#{@hash}.svg"
}[key]
end
end
end
end
end
switched to absolute path
# frozen_string_literal: true
module Jekyll
module TeXyll
module Compiler
class Job
def initialize(snippet: '', options: {})
@options = options
@snippet = snippet
end
def run
make_dirs
prepare_code
run_pipeline
@gauge = Metrics::Gauge.new(
Hash[[:yml, :tfm, :fit].map { |sym| [sym, file(sym)] }]
)
end
def add_to_static_files_of(site)
FileUtils.cp(file(:fit), file(:svg))
# TODO: minify/compress svg?
site.static_files << Jekyll::StaticFile.new(
site, dir(:work), @options['dest_dir'], "#{@hash}.svg"
)
end
def html_tag
img_tag = @gauge.render_img_tag("/#{@options['dest_dir']}/#{@hash}.svg")
classes = @options['classes'].join(' ')
"<span class='#{classes}'>#{img_tag}</span>"
end
private
def make_dirs
FileUtils.mkdir_p dir(:work)
FileUtils.mkdir_p "#{dir(:work)}/#{@options['dest_dir']}"
end
def prepare_code
template = Liquid::Template.parse(@options['template'])
@code = template.render(
'preamble' => @options['preamble'],
'append' => @options['append'],
'prepend' => @options['prepend'],
'snippet' => @snippet
)
@hash = Digest::MD5.hexdigest(@code)
File.open(file(:tex), 'w') do |file|
file.write(@code)
end unless File.exist?(file(:tex))
end
def run_pipeline
Pipeline.new(
pipeline: @options['pipeline'],
engines: @options['engines'],
context: binding
).run
end
def dir(key)
{ work: @options['work_dir'] }[key]
end
def file(key)
dir(:work) + {
tex: "/#{@hash}.tex",
dvi: "/#{@hash}.dvi",
yml: "/#{@hash}.yml",
tfm: "/#{@hash}.tfm.svg",
fit: "/#{@hash}.fit.svg",
svg: "/#{@options['dest_dir']}/#{@hash}.svg"
}[key]
end
end
end
end
end
|
# frozen_string_literal: true
require 'json'
require_relative 'auth'
require_relative 'services'
require 'l2met-log'
require 'sinatra/base'
require 'sinatra/json'
require 'sinatra/param'
module JobBoard
class JobDeliveryAPI < Sinatra::Base
helpers Sinatra::Param
before { content_type :json }
helpers do
include L2met::Log
include JobBoard::Auth::GuestDetect
end
before '/jobs*' do
halt 403, JSON.dump('@type' => 'error', error: 'just no') if guest?
end
post '/jobs' do
param :queue, String, blank: true, required: true
param :count, Integer, default: 1
unless request.env.key?('HTTP_FROM')
halt 412, JSON.dump(
'@type' => 'error', error: 'missing from header'
)
end
from = request.env.fetch('HTTP_FROM')
site = request.env.fetch('travis.site')
body = JobBoard::Services::AllocateJobs.run(
count: params[:count],
from: from,
jobs: JSON.parse(request.body.read).fetch('jobs'),
queue_name: params[:queue],
site: site
).merge(
'@count' => params[:count],
'@queue' => params[:queue]
)
log msg: :allocated, queue: params[:queue],
count: params[:count], from: from, site: site
json body
end
post '/jobs/add' do
job = JSON.parse(request.body.read)
site = request.env.fetch('travis.site')
db_job = JobBoard::Services::CreateOrUpdateJob.run(job: job, site: site)
if db_job.nil?
log level: :error, msg: 'failed to create or update job',
job_id: job.fetch('id'), site: site
halt 400, JSON.dump('@type' => 'error', error: 'what')
end
log msg: :added, job_id: job.fetch('id'), site: site
[201, { 'Content-Length' => '0' }, '']
end
get '/jobs/:job_id' do
job_id = params.fetch('job_id')
site = request.env.fetch('travis.site')
infra = request.env.fetch('travis.infra', '')
job = JobBoard::Services::FetchJob.run(
job_id: job_id, site: site, infra: infra
)
halt 404, JSON.dump('@type' => 'error', error: 'no such job') if job.nil?
if job.is_a?(JobBoard::Services::FetchJobScript::BuildScriptError)
halt 424, JSON.dump(
'@type' => 'error',
error: 'job script fetch error',
upstream_error: job.message
)
end
log msg: :fetched, job_id: job_id, site: site, infra: infra
json job
end
delete '/jobs/:job_id' do
job_id = params.fetch('job_id')
site = request.env.fetch('travis.site')
JobBoard::Services::DeleteJob.run(job_id: job_id, site: site)
log msg: :deleted, job_id: job_id, site: site
[204, {}, '']
end
end
end
Add more debug logging to job delivery API used by scheduler
# frozen_string_literal: true
require 'json'
require_relative 'auth'
require_relative 'services'
require 'l2met-log'
require 'sinatra/base'
require 'sinatra/json'
require 'sinatra/param'
module JobBoard
class JobDeliveryAPI < Sinatra::Base
helpers Sinatra::Param
before { content_type :json }
helpers do
include L2met::Log
include JobBoard::Auth::GuestDetect
end
before '/jobs*' do
halt 403, JSON.dump('@type' => 'error', error: 'just no') if guest?
end
post '/jobs' do
param :queue, String, blank: true, required: true
param :count, Integer, default: 1
unless request.env.key?('HTTP_FROM')
halt 412, JSON.dump(
'@type' => 'error', error: 'missing from header'
)
end
from = request.env.fetch('HTTP_FROM')
site = request.env.fetch('travis.site')
body = JobBoard::Services::AllocateJobs.run(
count: params[:count],
from: from,
jobs: JSON.parse(request.body.read).fetch('jobs'),
queue_name: params[:queue],
site: site
).merge(
'@count' => params[:count],
'@queue' => params[:queue]
)
log msg: :allocated, queue: params[:queue],
count: params[:count], from: from, site: site
json body
end
post '/jobs/add' do
job = JSON.parse(request.body.read)
site = request.env.fetch('travis.site')
log level: :debug, msg: 'parsed job',
job_id: job.fetch('id', '<unknown>'), site: site
db_job = JobBoard::Services::CreateOrUpdateJob.run(job: job, site: site)
if db_job.nil?
log level: :error, msg: 'failed to create or update job',
job_id: job.fetch('id'), site: site
halt 400, JSON.dump('@type' => 'error', error: 'what')
end
log msg: :added, job_id: job.fetch('id'), site: site
[201, { 'Content-Length' => '0' }, '']
end
get '/jobs/:job_id' do
job_id = params.fetch('job_id')
site = request.env.fetch('travis.site')
infra = request.env.fetch('travis.infra', '')
job = JobBoard::Services::FetchJob.run(
job_id: job_id, site: site, infra: infra
)
halt 404, JSON.dump('@type' => 'error', error: 'no such job') if job.nil?
if job.is_a?(JobBoard::Services::FetchJobScript::BuildScriptError)
halt 424, JSON.dump(
'@type' => 'error',
error: 'job script fetch error',
upstream_error: job.message
)
end
log msg: :fetched, job_id: job_id, site: site, infra: infra
json job
end
delete '/jobs/:job_id' do
job_id = params.fetch('job_id')
site = request.env.fetch('travis.site')
JobBoard::Services::DeleteJob.run(job_id: job_id, site: site)
log msg: :deleted, job_id: job_id, site: site
[204, {}, '']
end
end
end
|
#!/usr/bin/env ruby
require 'rubygems'
require 'digest/md5'
require 'eventmachine'
require 'pp'
module XYZ
module MCollectiveMockClients
include EM::Protocols::Stomp
Msg_types = {
:get_log_fragment => [:command],
:discovery => [:command],
:chef_solo => [:command],
:puppet_apply => [:command],
}
def connection_completed
connect :login => 'mcollective', :passcode => 'marionette'
end
def receive_msg msg
if msg.command == "CONNECTED"
Msg_types.each do |a,dirs|
dirs = [:command,:reply] if dirs.empty?
dirs.each do |dir|
subscribe("/topic/mcollective.#{a}.#{dir}")
end
end
else
decoded_msg = Marshal.load(msg.body)#Security.decodemsg(msg.body)
decoded_msg[:body] = Marshal.load(decoded_msg[:body])
pp ['got a message', decoded_msg]
respond_to(decoded_msg)
end
end
def respond_to(msg)
if msg[:msgtarget] =~ /discovery.command$/
respond_to__discovery(msg)
else
pp ['got a message', msg]
end
end
def respond_to__discovery(msg)
reply,target = encodereply("discovery","pong",msg[:requestid])
send(target, reply)
end
def encodereply(agent, msg, requestid)
sender_id = "foo"
serialized = Marshal.dump(msg)
digest = Digest::MD5.hexdigest(serialized.to_s + "unset")
target = "/topic/mcollective.#{agent}.reply"
req = {
:senderid => sender_id,
:requestid => requestid,
:senderagent => agent,
:msgtarget => target,
:msgtime => Time.now.to_i,
:body => serialized
}
req[:hash] = digest
reply = Marshal.dump(req)
[reply,target]
end
end
end
EM.run{
EM.connect 'localhost', 6163, XYZ::MCollectiveMockClients
}
working on file mock
#!/usr/bin/env ruby
require 'rubygems'
require 'digest/md5'
require 'eventmachine'
require 'pp'
module XYZ
module MCollectiveMockClients
include EM::Protocols::Stomp
Msg_types = {
:get_log_fragment => [:command],
:discovery => [:command],
:chef_solo => [:command],
:puppet_apply => [:command],
}
def connection_completed
connect :login => 'mcollective', :passcode => 'marionette'
end
def receive_msg msg
if msg.command == "CONNECTED"
Msg_types.each do |a,dirs|
dirs = [:command,:reply] if dirs.empty?
dirs.each do |dir|
subscribe("/topic/mcollective.#{a}.#{dir}")
end
end
else
decoded_msg = Marshal.load(msg.body)#Security.decodemsg(msg.body)
decoded_msg[:body] = Marshal.load(decoded_msg[:body])
pp ['got a message', decoded_msg]
respond_to(decoded_msg)
end
end
def respond_to(msg)
if msg[:msgtarget] =~ /discovery.command$/
respond_to__discovery(msg)
elsif msg[:msgtarget] =~ /chef_solo.command$/
respond_to__chef_solo(msg)
elsif msg[:msgtarget] =~ /puppet_apply.command$/
respond_to__puppet_apply(msg)
elsif msg[:msgtarget] =~ /get_log_fragment.command$/
respond_to__get_log_fragment(msg)
else
pp ['cant treat msg', msg]
end
end
def respond_to__discovery(msg)
find_pbuilderids(msg).each do |pbuilderid|
reply,target = encodereply(pbuilderid,"discovery","pong",msg[:requestid])
send(target, reply)
end
end
def respond_to__chef_solo(msg)
find_pbuilderids(msg).each do |pbuilderid|
response = {
:statuscode=>0,
:data=>
{:status=>:succeeded,
:node_name=>"domU-12-31-39-0B-F1-65.compute-1.internal"},
:statusmsg=>"OK"
}
reply,target = encodereply(pbuilderid,"chef_solo",response,msg[:requestid])
send(target, reply)
end
end
def respond_to__get_log_fragment(msg)
find_pbuilderids(msg).each do |pbuilderid|
response = get_log_fragment_response(pbuilderid,msg)
reply,target = encodereply(pbuilderid,"chef_solo",response,msg[:requestid])
send(target, reply)
end
end
def find_pbuilderids(msg)
pb_fact = ((msg[:filter]||{})["fact"]||[]).find{|f|f[:fact]=="pbuilderid"}
return Array.new unless pb_fact
if pb_fact[:operator] == "=="
[pb_fact[:value]]
elsif pb_fact[:operator] == "=~"
pbuilderids = Array.new
pb_fact[:value].gsub(/[A-Za-z0-9-]+/){|m|pbuilderids << m}
pbuilderids
else
pp "got fact: #{pb_fact.inspect}"
[]
end
end
def encodereply(pbuilderid,agent, msg, requestid)
sender_id = pbuilderid
serialized = Marshal.dump(msg)
digest = Digest::MD5.hexdigest(serialized.to_s + "unset")
target = "/topic/mcollective.#{agent}.reply"
req = {
:senderid => sender_id,
:requestid => requestid,
:senderagent => agent,
:msgtarget => target,
:msgtime => Time.now.to_i,
:body => serialized
}
req[:hash] = digest
reply = Marshal.dump(req)
[reply,target]
end
def get_log_fragment_response(pbuilderid,msg)
lines = get_log_fragment(msg)
if lines.nil?
error_msg = "Cannot find log fragment matching"
error_response = {
:status => :failed,
:error => {
:formatted_exception => error_msg
},
:pbuilderid => pbuilderid
}
error_response
else
ok_response = {
:status => :ok,
:data => lines,
:pbuilderid => pbuilderid
}
ok_response
end
end
def get_log_fragment(msg)
ret = String.new
matching_file = nil
matching_file = "/root/r8client/mock_logs/error1.log"
begin
f = File.open(matching_file)
until f.eof
ret << f.readline.chop
end
ensure
f.close
end
ret
end
end
end
EM.run{
EM.connect 'localhost', 6163, XYZ::MCollectiveMockClients
}
|
module JqueryTablesorter
VERSION = '1.19.3'
end
Build version by using three constants
module JqueryTablesorter
MAJOR = 1
MINOR = 19
TINY = 3
VERSION = [MAJOR, MINOR, TINY].compact.join('.')
end
|
#
# Author:: Chef Partner Engineering (<partnereng@chef.io>)
# Copyright:: Copyright (c) 2015 Chef Software, Inc.
# License:: Apache License, Version 2.0
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
module Kitchen
module Driver
VRA_VERSION = '2.0.0'.freeze
end
end
Bump version for release
Bump bump bump
Signed-off-by: JJ Asghar <7323a5431d1c31072983a6a5bf23745b655ddf59@chef.io>
#
# Author:: Chef Partner Engineering (<partnereng@chef.io>)
# Copyright:: Copyright (c) 2015 Chef Software, Inc.
# License:: Apache License, Version 2.0
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
module Kitchen
module Driver
VRA_VERSION = '2.1.0'.freeze
end
end
|
# FIXME: can these be moved to main library file?
require 'instagram'
require 'date'
require 'oembed'
class DuplicateImageError < StandardError
end
module Lentil
# A collection of methods for querying the Instagram API and importing metadata.
class InstagramHarvester
#
# Configure the Instagram class in preparation requests.
#
# @options opts [String] :client_id (Lentil::Engine::APP_CONFIG["instagram_client_id"]) The Instagram client ID
# @options opts [String] :client_secret (Lentil::Engine::APP_CONFIG["instagram_client_secret"]) The Instagram client secret
# @options opts [String] :access_token (nil) The optional Instagram client ID
def configure_connection(opts = {})
opts['client_id'] ||= Lentil::Engine::APP_CONFIG["instagram_client_id"]
opts['client_secret'] ||= Lentil::Engine::APP_CONFIG["instagram_client_secret"]
opts['access_token'] ||= nil
Instagram.configure do |config|
config.client_id = opts['client_id']
config.client_secret = opts['client_secret']
if (opts['access_token'])
config.access_token = opts['access_token']
end
end
end
#
# Configure the Instagram class in preparation for leaving comments
#
# @param access_token = nil [String] Instagram access token for the writing account
def configure_comment_connection(access_token = nil)
access_token ||= Lentil::Engine::APP_CONFIG["instagram_access_token"] || nil
raise "instagram_access_token must be defined as a parameter or in the application config" unless access_token
configure_connection({'access_token' => access_token})
end
# Queries the Instagram API for recent images with a given tag.
#
# @param [String] tag The tag to query by
#
# @return [Hashie::Mash] The data returned by Instagram API
def fetch_recent_images_by_tag(tag = nil)
configure_connection
tag ||= Lentil::Engine::APP_CONFIG["default_image_search_tag"]
Instagram.tag_recent_media(tag, :count=>100)
end
# Queries the Instagram API for the image metadata associated with a given ID.
#
# @param [String] image_id Instagram image ID
#
# @return [Hashie::Mash] data returned by Instagram API
def fetch_image_by_id(image_id)
configure_connection
Instagram.media_item(image_id)
end
# Retrieves an image OEmbed metadata from the public URL using the Instagram OEmbed service
#
# @param url [String] The public Instagram image URL
#
# @return [String] the Instagram image OEmbed data
def retrieve_oembed_data_from_url(url)
OEmbed::Providers::Instagram.get(url)
end
# Retrieves image metadata via the public URL and imports it
#
# @param url [String] The public Instagram image URL
#
# @return [Array] new image objects
def save_image_from_url(url)
save_instagram_load(fetch_image_by_id(retrieve_oembed_data_from_url(url).fields["media_id"]))
end
# Produce processed image metadata from Instagram metadata.
# This metadata is accepted by the save_image method.
#
# @param [Hashie::Mash] instagram_metadata The single image metadata returned by Instagram API
#
# @return [Hash] processed image metadata
def extract_image_data(instagram_metadata)
{
url: instagram_metadata.link,
external_id: instagram_metadata.id,
large_url: instagram_metadata.images.standard_resolution.url,
name: instagram_metadata.caption && instagram_metadata.caption.text,
tags: instagram_metadata.tags,
user: instagram_metadata.user,
original_datetime: Time.at(instagram_metadata.created_time.to_i).to_datetime,
original_metadata: instagram_metadata
}
end
# Takes return from Instagram API gem and adds image,
# users, and tags to the database.
#
# @raise [DuplicateImageError] This method does not accept duplicate external image IDs
#
# @param [Hash] image_data processed Instagram image metadata
#
# @return [Image] new Image object
def save_image(image_data)
instagram_service = Lentil::Service.where(:name => "Instagram").first
user_record = instagram_service.users.where(:user_name => image_data[:user][:username]).
first_or_create!({:full_name => image_data[:user][:full_name], :bio => image_data[:user][:bio]})
raise DuplicateImageError, "Duplicate image identifier" unless user_record.
images.where(:external_identifier => image_data[:external_id]).first.nil?
# TODO: Reject duplicates
image_record = user_record.images.build({
:external_identifier => image_data[:external_id],
:description => image_data[:name],
:url => image_data[:url],
:long_url => image_data[:large_url],
:original_datetime => image_data[:original_datetime]
})
# This is a temporary fix for a bug in the Hashie to_hash method.
# It's fixed in master and should be in the next release (>1.2.0).
image_record.original_metadata = JSON.parse(image_data[:original_metadata].to_json)
# Default to "All Rights Reserved" until we find out more about licenses
# FIXME: Set the default license in the app config
unless image_record.licenses.size > 0
image_record.licenses << Lentil::License.where(:short_name => "ARR").first
end
image_data[:tags].each {|tag| image_record.tags << Lentil::Tag.where(:name => tag).first_or_create}
user_record.save!
image_record.save!
image_record
end
# Takes return from Instagram API gem and adds all new images,
# users, and tags to the database.
#
# @param [Hashie::Mash] instagram_load The content returned by the Instagram gem
# @param [Boolean] raise_dupes Whether to raise exceptions for duplicate images
#
# @raise [DuplicateImageError] If there are duplicate images and raise_dupes is true
#
# @return [Array] New image objects
def save_instagram_load(instagram_load, raise_dupes=false)
# Handle collections of images and individual images
images = instagram_load
if !images.kind_of?(Array)
images = [images]
end
images.collect {|image|
begin
save_image(extract_image_data(image))
rescue DuplicateImageError => e
raise e if raise_dupes
next
end
}.compact
end
#
# Call save_instagram_load, but raise exceptions for duplicates.
#
# @param [Hashie::Mash] instagram_load The content returned by the Instagram gem
#
# @raise [DuplicateImageError] If there are duplicate images
#
# @return [Array] New image objects
def save_instagram_load!(instagram_load)
save_instagram_load(instagram_load, true)
end
#
# Retrieve the binary image data for a given Image object
#
# @param [Image] image An Image model object from the Instagram service
#
# @raise [Exception] If there are request problems
#
# @return [String] Binary image data
def harvest_image_data(image)
response = Typhoeus.get(image.large_url, followlocation: true)
if response.success?
raise "Invalid content type: " + response.headers['Content-Type'] unless (response.headers['Content-Type'] == 'image/jpeg')
elsif response.timed_out?
raise "Request timed out"
elsif response.code == 0
raise "Could not get an HTTP response"
else
raise "HTTP request failed: " + response.code.to_s
end
response.body
end
#
# Test if an image is still avaiable
#
# @param [Image] image An Image model object from the Instagram service
#
# @raise [Exception] If there are request problems
#
# @return [Boolean] Whether the image request was successful
def test_remote_image(image)
response = Typhoeus.get(image.thumbnail_url, followlocation: true)
if response.success?
true
elsif response.timed_out? || (response.code == 0)
nil
else
false
end
end
#
# Leave a comment containing the donor agreement on an Instagram image
#
# @param image [type] An Image model object from the Instagram service
#
# @raise [Exception] If a comment submission fails
# @authenticated true
#
# @return [Hashie::Mash] Instagram response
def leave_image_comment(image, comment)
configure_comment_connection
Instagram.client.create_media_comment(image.external_identifier, comment)
end
end
end
Do not stop processing data after an invalid harvested image
# FIXME: can these be moved to main library file?
require 'instagram'
require 'date'
require 'oembed'
require 'pp'
class DuplicateImageError < StandardError
end
module Lentil
# A collection of methods for querying the Instagram API and importing metadata.
class InstagramHarvester
#
# Configure the Instagram class in preparation requests.
#
# @options opts [String] :client_id (Lentil::Engine::APP_CONFIG["instagram_client_id"]) The Instagram client ID
# @options opts [String] :client_secret (Lentil::Engine::APP_CONFIG["instagram_client_secret"]) The Instagram client secret
# @options opts [String] :access_token (nil) The optional Instagram client ID
def configure_connection(opts = {})
opts['client_id'] ||= Lentil::Engine::APP_CONFIG["instagram_client_id"]
opts['client_secret'] ||= Lentil::Engine::APP_CONFIG["instagram_client_secret"]
opts['access_token'] ||= nil
Instagram.configure do |config|
config.client_id = opts['client_id']
config.client_secret = opts['client_secret']
if (opts['access_token'])
config.access_token = opts['access_token']
end
end
end
#
# Configure the Instagram class in preparation for leaving comments
#
# @param access_token = nil [String] Instagram access token for the writing account
def configure_comment_connection(access_token = nil)
access_token ||= Lentil::Engine::APP_CONFIG["instagram_access_token"] || nil
raise "instagram_access_token must be defined as a parameter or in the application config" unless access_token
configure_connection({'access_token' => access_token})
end
# Queries the Instagram API for recent images with a given tag.
#
# @param [String] tag The tag to query by
#
# @return [Hashie::Mash] The data returned by Instagram API
def fetch_recent_images_by_tag(tag = nil)
configure_connection
tag ||= Lentil::Engine::APP_CONFIG["default_image_search_tag"]
Instagram.tag_recent_media(tag, :count=>100)
end
# Queries the Instagram API for the image metadata associated with a given ID.
#
# @param [String] image_id Instagram image ID
#
# @return [Hashie::Mash] data returned by Instagram API
def fetch_image_by_id(image_id)
configure_connection
Instagram.media_item(image_id)
end
# Retrieves an image OEmbed metadata from the public URL using the Instagram OEmbed service
#
# @param url [String] The public Instagram image URL
#
# @return [String] the Instagram image OEmbed data
def retrieve_oembed_data_from_url(url)
OEmbed::Providers::Instagram.get(url)
end
# Retrieves image metadata via the public URL and imports it
#
# @param url [String] The public Instagram image URL
#
# @return [Array] new image objects
def save_image_from_url(url)
save_instagram_load(fetch_image_by_id(retrieve_oembed_data_from_url(url).fields["media_id"]))
end
# Produce processed image metadata from Instagram metadata.
# This metadata is accepted by the save_image method.
#
# @param [Hashie::Mash] instagram_metadata The single image metadata returned by Instagram API
#
# @return [Hash] processed image metadata
def extract_image_data(instagram_metadata)
{
url: instagram_metadata.link,
external_id: instagram_metadata.id,
large_url: instagram_metadata.images.standard_resolution.url,
name: instagram_metadata.caption && instagram_metadata.caption.text,
tags: instagram_metadata.tags,
user: instagram_metadata.user,
original_datetime: Time.at(instagram_metadata.created_time.to_i).to_datetime,
original_metadata: instagram_metadata
}
end
# Takes return from Instagram API gem and adds image,
# users, and tags to the database.
#
# @raise [DuplicateImageError] This method does not accept duplicate external image IDs
#
# @param [Hash] image_data processed Instagram image metadata
#
# @return [Image] new Image object
def save_image(image_data)
instagram_service = Lentil::Service.where(:name => "Instagram").first
user_record = instagram_service.users.where(:user_name => image_data[:user][:username]).
first_or_create!({:full_name => image_data[:user][:full_name], :bio => image_data[:user][:bio]})
raise DuplicateImageError, "Duplicate image identifier" unless user_record.
images.where(:external_identifier => image_data[:external_id]).first.nil?
image_record = user_record.images.build({
:external_identifier => image_data[:external_id],
:description => image_data[:name],
:url => image_data[:url],
:long_url => image_data[:large_url],
:original_datetime => image_data[:original_datetime]
})
# This is a temporary fix for a bug in the Hashie to_hash method.
# It's fixed in master and should be in the next release (>1.2.0).
image_record.original_metadata = JSON.parse(image_data[:original_metadata].to_json)
# Default to "All Rights Reserved" until we find out more about licenses
# FIXME: Set the default license in the app config
unless image_record.licenses.size > 0
image_record.licenses << Lentil::License.where(:short_name => "ARR").first
end
image_data[:tags].each {|tag| image_record.tags << Lentil::Tag.where(:name => tag).first_or_create}
user_record.save!
image_record.save!
image_record
end
# Takes return from Instagram API gem and adds all new images,
# users, and tags to the database.
#
# @param [Hashie::Mash] instagram_load The content returned by the Instagram gem
# @param [Boolean] raise_dupes Whether to raise exceptions for duplicate images
#
# @raise [DuplicateImageError] If there are duplicate images and raise_dupes is true
#
# @return [Array] New image objects
def save_instagram_load(instagram_load, raise_dupes=false)
# Handle collections of images and individual images
images = instagram_load
if !images.kind_of?(Array)
images = [images]
end
images.collect {|image|
begin
save_image(extract_image_data(image))
rescue DuplicateImageError => e
raise e if raise_dupes
next
rescue => e
Rails.logger.error e.message
puts e.message
pp image
next
end
}.compact
end
#
# Call save_instagram_load, but raise exceptions for duplicates.
#
# @param [Hashie::Mash] instagram_load The content returned by the Instagram gem
#
# @raise [DuplicateImageError] If there are duplicate images
#
# @return [Array] New image objects
def save_instagram_load!(instagram_load)
save_instagram_load(instagram_load, true)
end
#
# Retrieve the binary image data for a given Image object
#
# @param [Image] image An Image model object from the Instagram service
#
# @raise [Exception] If there are request problems
#
# @return [String] Binary image data
def harvest_image_data(image)
response = Typhoeus.get(image.large_url, followlocation: true)
if response.success?
raise "Invalid content type: " + response.headers['Content-Type'] unless (response.headers['Content-Type'] == 'image/jpeg')
elsif response.timed_out?
raise "Request timed out"
elsif response.code == 0
raise "Could not get an HTTP response"
else
raise "HTTP request failed: " + response.code.to_s
end
response.body
end
#
# Test if an image is still avaiable
#
# @param [Image] image An Image model object from the Instagram service
#
# @raise [Exception] If there are request problems
#
# @return [Boolean] Whether the image request was successful
def test_remote_image(image)
response = Typhoeus.get(image.thumbnail_url, followlocation: true)
if response.success?
true
elsif response.timed_out? || (response.code == 0)
nil
else
false
end
end
#
# Leave a comment containing the donor agreement on an Instagram image
#
# @param image [type] An Image model object from the Instagram service
#
# @raise [Exception] If a comment submission fails
# @authenticated true
#
# @return [Hashie::Mash] Instagram response
def leave_image_comment(image, comment)
configure_comment_connection
Instagram.client.create_media_comment(image.external_identifier, comment)
end
end
end
|
module Linguist
module Strategy
class Modeline
EmacsModeline = /-\*-\s*mode:\s*(\w+);?\s*-\*-/
VimModeline = /\/\*\s*vim:\s*set\s*(?:ft|filetype)=(\w+):\s*\*\//
# Public: Detects language based on Vim and Emacs modelines
#
# blob - An object that quacks like a blob.
#
# Examples
#
# Modeline.call(FileBlob.new("path/to/file"))
#
# Returns an Array with one Language if the blob has a Vim or Emacs modeline
# that matches a Language name or alias. Returns an empty array if no match.
def self.call(blob, _ = nil)
Array(Language.find_by_alias(modeline(blob.data)))
end
# Public: Get the modeline from the first n-lines of the file
#
# Returns a String or nil
def self.modeline(data)
match = data.match(EmacsModeline) || data.match(VimModeline)
match[1] if match
end
end
end
end
Making modelines case-insensitive
module Linguist
module Strategy
class Modeline
EmacsModeline = /-\*-\s*mode:\s*(\w+);?\s*-\*-/i
VimModeline = /\/\*\s*vim:\s*set\s*(?:ft|filetype)=(\w+):\s*\*\//i
# Public: Detects language based on Vim and Emacs modelines
#
# blob - An object that quacks like a blob.
#
# Examples
#
# Modeline.call(FileBlob.new("path/to/file"))
#
# Returns an Array with one Language if the blob has a Vim or Emacs modeline
# that matches a Language name or alias. Returns an empty array if no match.
def self.call(blob, _ = nil)
Array(Language.find_by_alias(modeline(blob.data)))
end
# Public: Get the modeline from the first n-lines of the file
#
# Returns a String or nil
def self.modeline(data)
match = data.match(EmacsModeline) || data.match(VimModeline)
match[1] if match
end
end
end
end
|
# frozen_string_literal: true
require_relative 'crumb'
module Loaf
module ControllerExtensions
# Module injection
#
# @api private
def self.included(base)
base.extend ClassMethods
base.send :include, InstanceMethods
base.send :helper_method, :_breadcrumbs
end
module ClassMethods
# Add breacrumb to the trail in controller as class method
#
# @param [String]
#
# @api public
def breadcrumb(name, url, options = {})
normalizer = method(:_normalize_name)
send(_filter_name, options) do |instance|
normalized_name = normalizer.call(name, instance)
normalized_url = normalizer.call(url, instance)
instance.send(:breadcrumb, normalized_name, normalized_url, options)
end
end
alias add_breadcrumb breadcrumb
private
# Choose available filter name
#
# @api private
def _filter_name
respond_to?(:before_action) ? :before_action : :before_filter
end
# @api private
def _normalize_name(name, instance)
case name
when NilClass
when Proc
if name.arity == 1
instance.instance_exec(instance, &name)
else
instance.instance_exec(&name)
end
else
name
end
end
end # ClassMethods
module InstanceMethods
# Add breadcrumb in controller as instance method
#
# @param [String] name
#
# @param [Object] url
#
# @api public
def breadcrumb(name, url, options = {})
_breadcrumbs << Loaf::Crumb.new(name, url, options)
end
alias add_breadcrumb breadcrumb
# Collection of breadcrumbs
#
# @api private
def _breadcrumbs
@_breadcrumbs ||= []
end
# Remove all current breadcrumbs
#
# @api public
def clear_breadcrumbs
_breadcrumbs.clear
end
end # InstanceMethods
end # ControllerExtensions
end # Loaf
Add docs
# frozen_string_literal: true
require_relative 'crumb'
module Loaf
module ControllerExtensions
# Module injection
#
# @api private
def self.included(base)
base.extend ClassMethods
base.send :include, InstanceMethods
base.send :helper_method, :_breadcrumbs
end
module ClassMethods
# Add breacrumb to the trail in controller as class method
#
# @param [String]
#
# @api public
def breadcrumb(name, url, options = {})
normalizer = method(:_normalize_name)
send(_filter_name, options) do |instance|
normalized_name = normalizer.call(name, instance)
normalized_url = normalizer.call(url, instance)
instance.send(:breadcrumb, normalized_name, normalized_url, options)
end
end
alias add_breadcrumb breadcrumb
private
# Choose available filter name
#
# @api private
def _filter_name
respond_to?(:before_action) ? :before_action : :before_filter
end
# Convert breadcrumb name to string
#
# @return [String]
#
# @api private
def _normalize_name(name, instance)
case name
when NilClass
when Proc
if name.arity == 1
instance.instance_exec(instance, &name)
else
instance.instance_exec(&name)
end
else
name
end
end
end # ClassMethods
module InstanceMethods
# Add breadcrumb in controller as instance method
#
# @param [String] name
#
# @param [Object] url
#
# @api public
def breadcrumb(name, url, options = {})
_breadcrumbs << Loaf::Crumb.new(name, url, options)
end
alias add_breadcrumb breadcrumb
# Collection of breadcrumbs
#
# @api private
def _breadcrumbs
@_breadcrumbs ||= []
end
# Remove all current breadcrumbs
#
# @api public
def clear_breadcrumbs
_breadcrumbs.clear
end
end # InstanceMethods
end # ControllerExtensions
end # Loaf
|
require "logstash/inputs/base"
require "logstash/namespace"
# Receive events using the lumberjack protocol.
#
# This is mainly to receive events shipped with lumberjack,
# <http://github.com/jordansissel/lumberjack>
class LogStash::Inputs::Lumberjack < LogStash::Inputs::Base
config_name "lumberjack"
plugin_status "experimental"
# the address to listen on.
config :host, :validate => :string, :default => "0.0.0.0"
# the port to listen on.
config :port, :validate => :number, :required => true
# ssl certificate to use
config :ssl_certificate, :validate => :string, :required => true
# ssl key to use
config :ssl_key, :validate => :string, :required => true
# ssl key passphrase to use
config :ssl_key_passphrase, :validate => :password
# TODO(sissel): Add CA to authenticate clients with.
public
def register
require "lumberjack/server"
@logger.info("Starting lumberjack input listener", :address => "#{@host}:#{@port}")
@lumberjack = Lumberjack::Server.new(:address => @host, :port => @port,
:ssl_certificate => @ssl_certificate, :ssl_key => @ssl_key,
:ssl_key_passphrase => @ssl_key_passphrase)
end # def register
public
def run(output_queue)
begin
@lumberjack.run do |l|
source = "lumberjack://#{l.delete("host")}/#{l.delete("file")}"
event = to_event(l.delete("line"), source)
# take any remaining fields in the lumberjack event and merge it as a
# field in the logstash event.
l.each do |key, value|
event[key] = value
end
output_queue << event
end
rescue IOError => e
@logger.debug("Connection reset.", :e => e, :backtrace => e.backtrace)
retry
end
end # def run
end # class LogStash::Inputs::Lumberjack
refactor out to lumberjack gem
require "logstash/inputs/base"
require "logstash/namespace"
# Receive events using the lumberjack protocol.
#
# This is mainly to receive events shipped with lumberjack,
# <http://github.com/jordansissel/lumberjack>
class LogStash::Inputs::Lumberjack < LogStash::Inputs::Base
config_name "lumberjack"
plugin_status "experimental"
# the address to listen on.
config :host, :validate => :string, :default => "0.0.0.0"
# the port to listen on.
config :port, :validate => :number, :required => true
# ssl certificate to use
config :ssl_certificate, :validate => :string, :required => true
# ssl key to use
config :ssl_key, :validate => :string, :required => true
# ssl key passphrase to use
config :ssl_key_passphrase, :validate => :password
# TODO(sissel): Add CA to authenticate clients with.
public
def register
require "lumberjack/server"
@logger.info("Starting lumberjack input listener", :address => "#{@host}:#{@port}")
@lumberjack = Lumberjack::Server.new(:address => @host, :port => @port,
:ssl_certificate => @ssl_certificate, :ssl_key => @ssl_key,
:ssl_key_passphrase => @ssl_key_passphrase)
end # def register
public
def run(output_queue)
@lumberjack.run do |l|
source = "lumberjack://#{l.delete("host")}/#{l.delete("file")}"
event = to_event(l.delete("line"), source)
# take any remaining fields in the lumberjack event and merge it as a
# field in the logstash event.
l.each do |key, value|
event[key] = value
end
output_queue << event
end
end # def run
end # class LogStash::Inputs::Lumberjack
|
require 'masamune/string_format'
module Masamune::Commands
class Postgres
PROMPT = 'postgres=#'
include Masamune::StringFormat
attr_accessor :file, :exec, :input, :output, :print, :block, :variables
def initialize(opts = {})
self.file = opts[:file]
self.exec = opts[:exec]
self.output = opts[:output]
self.print = opts.fetch(:print, false)
self.block = opts[:block]
self.variables = opts.fetch(:variables, {})
end
def exec=(sql = nil)
return unless sql
self.input = @exec = strip_sql(sql)
end
def stdin
if input
@stdin ||= StringIO.new(input)
end
end
def interactive?
!(exec || file)
end
def print?
self.print
end
def command_args
args = []
args << 'PGPASSFILE=%s' % configuration[:pgpass_file] if configuration[:pgpass_file]
args << configuration[:path]
args << '--host=%s' % configuration[:hostname] if configuration[:hostname]
args << '--dbname=%s' % configuration[:database]
args << '--username=%s' % configuration[:username] if configuration[:username]
args << '--no-password'
args << configuration[:options].map(&:to_a)
args << '--file=%s' % file if file
args << '--output=%s' % output if output
variables.each do |key, val|
args << '--set=%s' % "#{key.to_s}='#{val.to_s}'"
end
args.flatten.compact
end
def before_execute
Masamune.print("psql with file #{file}") if file
end
def handle_stdout(line, line_no)
if line =~ /\A#{PROMPT}/
Masamune.logger.debug(line)
else
block.call(line) if block
Masamune::print(line) if print?
end
end
private
def configuration
Masamune.configuration.postgres
end
end
end
Use database prompt
require 'masamune/string_format'
module Masamune::Commands
class Postgres
include Masamune::StringFormat
attr_accessor :file, :exec, :input, :output, :print, :block, :variables
def initialize(opts = {})
self.file = opts[:file]
self.exec = opts[:exec]
self.output = opts[:output]
self.print = opts.fetch(:print, false)
self.block = opts[:block]
self.variables = opts.fetch(:variables, {})
end
def exec=(sql = nil)
return unless sql
self.input = @exec = strip_sql(sql)
end
def stdin
if input
@stdin ||= StringIO.new(input)
end
end
def interactive?
!(exec || file)
end
def print?
self.print
end
def command_args
args = []
args << 'PGPASSFILE=%s' % configuration[:pgpass_file] if configuration[:pgpass_file]
args << configuration[:path]
args << '--host=%s' % configuration[:hostname] if configuration[:hostname]
args << '--dbname=%s' % configuration[:database]
args << '--username=%s' % configuration[:username] if configuration[:username]
args << '--no-password'
args << configuration[:options].map(&:to_a)
args << '--file=%s' % file if file
args << '--output=%s' % output if output
variables.each do |key, val|
args << '--set=%s' % "#{key.to_s}='#{val.to_s}'"
end
args.flatten.compact
end
def before_execute
Masamune.print("psql with file #{file}") if file
end
def handle_stdout(line, line_no)
if line =~ /\A#{prompt}/
Masamune.logger.debug(line)
else
block.call(line) if block
Masamune::print(line) if print?
end
end
def prompt
configuration[:database] + '=>'
end
private
def configuration
Masamune.configuration.postgres
end
end
end
|
class Maze::Formatter::Image::Polar < Maze::Formatter::Image
def render_background
end
def render_grid
canvas.stroke_antialias true
canvas.stroke_linecap 'square'
canvas.stroke 'gray90'
canvas.stroke_width 1
canvas.fill 'none'
grid.each_cell do |cell|
next if cell.row == 0
ax, ay, bx, by, cx, cy, dx, dy, radius, ccw, cw = coord cell
canvas.ellipse image_center, image_center, radius, radius, ccw, cw
canvas.line cx, cy, dx, dy
end
canvas.ellipse(image_center, image_center, grid.rows * cell_width, grid.rows * cell_width, 0, 360)
end
def render_wall
canvas.stroke_antialias true
canvas.stroke_linecap 'square'
canvas.stroke wall_color
canvas.stroke_width wall_width
canvas.fill 'none'
grid.each_cell do |cell|
next if cell.row == 0
ax, ay, bx, by, cx, cy, dx, dy, radius, ccw, cw = coord cell
canvas.ellipse image_center, image_center, radius, radius, ccw, cw unless cell.linked_to?(:inward)
canvas.line cx, cy, dx, dy unless cell.linked_to?(:cw)
end
canvas.ellipse(image_center, image_center, grid.rows * cell_width, grid.rows * cell_width, 0, 360)
end
def render_path
canvas.stroke_antialias true
canvas.stroke_linecap 'square'
canvas.fill 'none'
canvas.stroke path_color
canvas.stroke_width path_width
grid.each_cell do |cell|
next unless path_cell? cell
unless path?(:cw, cell) || path?(:ccw, cell)
# draw arc to close the gap if outward ring is subdivided
# and cell is linked outwards but not cw and ccw
# this can be the case even for cell(0,0)
outward_cells = path_outward(cell)
if outward_subdivided?(cell) && outward_cells.any?
_, _, _, _, radius, angle = center_coord cell
angles_outward_cells = outward_cells.map {|o| _, _, _, _, _, a = center_coord(o); a }
# don't use cell(0,0) own angel, override with one of the outward cells
angle = angles_outward_cells.first if cell.row == 0
angle1 = [angle, *angles_outward_cells].min
angle2 = [angle, *angles_outward_cells].max
canvas.ellipse image_center, image_center, radius, radius, angle1, angle2 unless angle1 == angle2
end
end
next if cell.row == 0
if path?(:inward, cell)
x1, y1, x2, y2, _, _ = center_coord cell
canvas.line x1, y1, x2, y2
end
if path?(:cw, cell)
_, _, _, _, radius1, angle1 = center_coord cell
_, _, _, _, radius2, angle2 = center_coord cell.cw
# adjust angle if outward ring is subdivided
if outward_subdivided?(cell)
outward_cells = path_outward(cell)
_, _, _, _, _, angle1 = center_coord(outward_cells.first) if outward_cells.any?
outward_cells_cw = path_outward(cell.cw)
_, _, _, _, _, angle2 = center_coord(outward_cells_cw.first) if outward_cells_cw.any?
end
canvas.ellipse image_center, image_center, radius1, radius1, angle1, angle2
end
end
# draw start and finish
canvas.stroke_antialias true
canvas.stroke_linecap 'square'
canvas.fill path_color
canvas.stroke 'none'
[path_start, path_finish].compact.each do |cell|
x, y, _, _, _, _ = center_coord cell
canvas.ellipse x, y, path_width*2, path_width*2, 0, 360
end
end
def outward_subdivided? cell
return false if grid.rows == cell.row + 1
grid.columns(cell.row).size != grid.columns(cell.row+1).size
end
def path_outward cell
cell.outward.select {|o| cell.linked?(o) && path_cell?(o) }
end
def coord cell
theta = 2 * Math::PI / grid.columns(cell.row).size
inner_radius = cell.row * cell_width
outer_radius = (cell.row + 1) * cell_width
theta_ccw = cell.column * theta
theta_cw = (cell.column + 1) * theta
ax = image_center + inner_radius * Math.cos(theta_ccw)
ay = image_center + inner_radius * Math.sin(theta_ccw)
bx = image_center + outer_radius * Math.cos(theta_ccw)
by = image_center + outer_radius * Math.sin(theta_ccw)
cx = image_center + inner_radius * Math.cos(theta_cw)
cy = image_center + inner_radius * Math.sin(theta_cw)
dx = image_center + outer_radius * Math.cos(theta_cw)
dy = image_center + outer_radius * Math.sin(theta_cw)
theta_ccw_degres = 360 / (2 * Math::PI) * theta_ccw
theta_cw_degres = 360 / (2 * Math::PI) * theta_cw
[ax, ay, bx, by, cx, cy, dx, dy, inner_radius, theta_ccw_degres, theta_cw_degres]
end
def center_coord cell
theta = 2 * Math::PI / grid.columns(cell.row).size
angle = (cell.column + 0.5) * theta
angle_degres = 360 / (2 * Math::PI) * angle
radius1 = (cell.row + 0.5) * cell_width
radius2 = (cell.row - 0.5) * cell_width
x1 = image_center + radius1 * Math.cos(angle)
y1 = image_center + radius1 * Math.sin(angle)
x2 = image_center + radius2 * Math.cos(angle)
y2 = image_center + radius2 * Math.sin(angle)
[x1, y1, x2, y2, radius1, angle_degres]
end
def image_width
cell_width * grid.rows * 2 + wall_width + border_width * 2 + 3 # why? +3
end
alias_method :image_height, :image_width
def image_center
image_width / 2
end
end
+ render background for polar grids
class Maze::Formatter::Image::Polar < Maze::Formatter::Image
def render_background
canvas.stroke_antialias true
canvas.stroke_linecap 'butt'
canvas.stroke_width cell_width
canvas.fill 'none'
grid.each_cell do |cell|
color = distance_color cell
next unless color
canvas.stroke color
_, _, _, _, _, ccw, cw = coord cell
radius, _ = center_coord cell
canvas.ellipse image_center, image_center, radius, radius, ccw, cw
end
end
def render_grid
canvas.stroke_antialias true
canvas.stroke_linecap 'square'
canvas.stroke 'gray90'
canvas.stroke_width 1
canvas.fill 'none'
grid.each_cell do |cell|
next if cell.row == 0
cx, cy, dx, dy, radius, ccw, cw = coord cell
canvas.ellipse image_center, image_center, radius, radius, ccw, cw
canvas.line cx, cy, dx, dy
end
canvas.ellipse(image_center, image_center, grid.rows * cell_width, grid.rows * cell_width, 0, 360)
end
def render_wall
canvas.stroke_antialias true
canvas.stroke_linecap 'square'
canvas.stroke wall_color
canvas.stroke_width wall_width
canvas.fill 'none'
grid.each_cell do |cell|
next if cell.row == 0
cx, cy, dx, dy, radius, ccw, cw = coord cell
canvas.ellipse image_center, image_center, radius, radius, ccw, cw unless cell.linked_to?(:inward)
canvas.line cx, cy, dx, dy unless cell.linked_to?(:cw)
end
canvas.ellipse(image_center, image_center, grid.rows * cell_width, grid.rows * cell_width, 0, 360)
end
def render_path
canvas.stroke_antialias true
canvas.stroke_linecap 'square'
canvas.fill 'none'
canvas.stroke path_color
canvas.stroke_width path_width
grid.each_cell do |cell|
next unless path_cell? cell
unless path?(:cw, cell) || path?(:ccw, cell)
# draw arc to close the gap if outward ring is subdivided
# and cell is linked outwards but not cw and ccw
# this can be the case even for cell(0,0)
outward_cells = path_outward(cell)
if outward_subdivided?(cell) && outward_cells.any?
radius, angle = center_coord cell
angles_outward_cells = outward_cells.map {|o| _, a = center_coord(o); a }
# don't use cell(0,0) own angel, override with one of the outward cells
angle = angles_outward_cells.first if cell.row == 0
angle1 = [angle, *angles_outward_cells].min
angle2 = [angle, *angles_outward_cells].max
canvas.ellipse image_center, image_center, radius, radius, angle1, angle2 unless angle1 == angle2
end
end
next if cell.row == 0
if path?(:inward, cell)
radius, theta = center_coord cell, :radian
# center of cell
x1, y1 = polar2cartesian(radius, theta)
# center of inward cell, but adjusted to the same angle of the current cell
x2, y2 = polar2cartesian(radius - cell_width, theta)
canvas.line x1, y1, x2, y2
end
if path?(:cw, cell)
radius1, angle1 = center_coord cell
radius2, angle2 = center_coord cell.cw
# adjust angle if outward ring is subdivided
if outward_subdivided?(cell)
outward_cells = path_outward(cell)
_, angle1 = center_coord(outward_cells.first) if outward_cells.any?
outward_cells_cw = path_outward(cell.cw)
_, angle2 = center_coord(outward_cells_cw.first) if outward_cells_cw.any?
end
canvas.ellipse image_center, image_center, radius1, radius1, angle1, angle2
end
end
# draw start and finish
canvas.stroke_antialias true
canvas.stroke_linecap 'square'
canvas.fill path_color
canvas.stroke 'none'
[path_start, path_finish].compact.each do |cell|
x, y = polar2cartesian(*center_coord(cell, :radian))
canvas.ellipse x, y, path_width*2, path_width*2, 0, 360
end
end
def outward_subdivided? cell
return false if grid.rows == cell.row + 1
grid.columns(cell.row).size != grid.columns(cell.row+1).size
end
def path_outward cell
cell.outward.select {|o| cell.linked?(o) && path_cell?(o) }
end
def coord cell, unit=:degree
inner_radius = cell.row * cell_width
outer_radius = (cell.row + 1) * cell_width
theta = 2 * Math::PI / grid.columns(cell.row).size
theta_ccw = cell.column * theta
theta_cw = (cell.column + 1) * theta
# we need only the cartesian coords of the cw wall
# ax, ay = polar2cartesian(inner_radius, theta_ccw)
# bx, by = polar2cartesian(outer_radius, theta_ccw)
cx, cy = polar2cartesian(inner_radius, theta_cw)
dx, dy = polar2cartesian(outer_radius, theta_cw)
if unit == :degree
theta_ccw = radian2degree theta_ccw
theta_cw = radian2degree theta_cw
end
[cx, cy, dx, dy, inner_radius, theta_ccw, theta_cw]
end
def center_coord cell, unit=:degree
radius = (cell.row + 0.5) * cell_width
theta = 2 * Math::PI / grid.columns(cell.row).size
angle = (cell.column + 0.5) * theta
angle = radian2degree(angle) if unit == :degree
[radius, angle]
end
def polar2cartesian radius, theta
[image_center + radius * Math.cos(theta), image_center + radius * Math.sin(theta)]
end
def radian2degree value
360 / (2 * Math::PI) * value
end
def image_width
cell_width * grid.rows * 2 + wall_width + border_width * 2 + 3 # why? +3
end
alias_method :image_height, :image_width
def image_center
image_width / 2
end
end |
module MediaInfoNative
class MediaInfo
# These are parts of the convenience wrapper of the old Mediainfo gem.
# I recklessly throw lots of the noise away. Hope it resembles the old
# API.
BaseStream::TYPES.each do |type|
define_method "#{type}" do
StreamProxy.new(streams.select { |s| s.stream_type == type })
end
end
def method_missing(meth, *args, &block)
self.general.send(meth, *args, &block)
end
[:video, :audio, :image].each do |t|
define_method "#{t}?" do
self.send(t).count > 0
end
end
end
class StreamProxy
def initialize(streams)
@streams = streams
end
SingleStreamAPIError = Class.new(RuntimeError)
NoStreamsForProxyError = Class.new(NoMethodError)
UnknownAttributeError = Class.new(NoMethodError)
def [](idx); @streams[idx]; end
def count; @streams.size; end
def method_missing(m, *a, &b)
case @streams.size
when 0
raise NoStreamsForProxyError
when 1
if @streams.first.respond_to?(m)
@streams.first.send(m, *a, &b)
else
raise UnknownAttributeError
end
else
raise SingleStreamAPIError
end
end
end
end
We also need to know if we have an other track
module MediaInfoNative
class MediaInfo
# These are parts of the convenience wrapper of the old Mediainfo gem.
# I recklessly throw lots of the noise away. Hope it resembles the old
# API.
BaseStream::TYPES.each do |type|
define_method "#{type}" do
StreamProxy.new(streams.select { |s| s.stream_type == type })
end
end
def method_missing(meth, *args, &block)
self.general.send(meth, *args, &block)
end
[:video, :audio, :image, :other].each do |t|
define_method "#{t}?" do
self.send(t).count > 0
end
end
end
class StreamProxy
def initialize(streams)
@streams = streams
end
SingleStreamAPIError = Class.new(RuntimeError)
NoStreamsForProxyError = Class.new(NoMethodError)
UnknownAttributeError = Class.new(NoMethodError)
def [](idx); @streams[idx]; end
def count; @streams.size; end
def method_missing(m, *a, &b)
case @streams.size
when 0
raise NoStreamsForProxyError
when 1
if @streams.first.respond_to?(m)
@streams.first.send(m, *a, &b)
else
raise UnknownAttributeError
end
else
raise SingleStreamAPIError
end
end
end
end
|
module MiniTest
module Reporters
VERSION = "0.12.1"
end
end
Bump version.
module MiniTest
module Reporters
VERSION = "0.12.2"
end
end
|
module MiqExpression::SubstMixin
include ActiveSupport::Concern
def exp_build_table_or_nil(exp)
return nil if exp == {"???" => "???"}
exp_build_table(exp)
end
# Build an array of expression symbols by recursively traversing the MiqExpression object
# and inserting sequential tokens for each expression part
def exp_build_table(exp, quick_search = false)
exp_table = []
if exp["and"]
exp_table.push("(")
exp["and"].each do |e|
exp_table += exp_build_table(e, quick_search)
exp_table.push("AND") unless e == exp["and"].last
end
exp_table.push(")")
elsif exp["or"]
exp_table.push("(")
exp["or"].each do |e|
exp_table += exp_build_table(e, quick_search)
exp_table.push("OR") unless e == exp["or"].last
end
exp_table.push(")")
elsif exp["not"]
@exp_token ||= 0
@exp_token += 1
exp[:token] = @exp_token
exp_table.push(quick_search ? "NOT" : ["NOT", @exp_token]) # No token if building quick search exp
exp_table.push("(") unless %w(and or).include?(exp["not"].keys.first) # No parens if and/or under me
exp_table += exp_build_table(exp["not"], quick_search)
exp_table.push(")") unless %w(and or).include?(exp["not"].keys.first) # No parens if and/or under me
else
@exp_token ||= 0
@exp_token += 1
exp[:token] = @exp_token
if exp["???"] # Found a new expression part
exp_table.push(["???", @exp_token])
exp_context[@expkey][:exp_token] = @exp_token # Save the token value for the view
exp_context[:edit_exp] = copy_hash(exp) # Save the exp part for the view
exp_context[@expkey].update_from_exp_tree(exp_context[:edit_exp]) # Set the fields for a new exp part
elsif quick_search # Separate out the user input fields if doing a quick search
human_exp = MiqExpression.to_human(exp)
if human_exp.include?("<user input>")
exp_table.push(human_exp.split("<user input>").join(""))
exp_table.push([:user_input, @exp_token])
else
exp_table.push(human_exp)
end
else # Not quick search, add token to the expression
exp_table.push([MiqExpression.to_human(exp), @exp_token])
end
end
exp_table
end
# Go thru an expression and replace the quick search tokens
def exp_replace_qs_tokens(exp, tokens)
key = exp.keys.first
if %w(and or).include?(key)
exp[key].each { |e| exp_replace_qs_tokens(e, tokens) }
elsif key == "not"
exp_replace_qs_tokens(exp[key], tokens)
elsif exp.key?(:token) && exp[key].key?("value")
token = exp[:token]
if tokens[token] # Only atoms included in tokens will have user input
value = tokens[token][:value] # Get the user typed value
if tokens[token][:value_type] == :bytes
value += ".#{tokens[token][:suffix] || "bytes"}" # For :bytes type, add in the suffix
end
exp[key]["value"] = value # Replace the exp value with the proper qs value
end
end
end
# Find an expression atom based on the token
def exp_find_by_token(exp, token, parent_is_not = false)
if exp.kind_of?(Array) # Is this and AND or OR
exp.find { |e| exp_find_by_token(e, token) } # Look for token
elsif exp[:token] && exp[:token] == token # This is the token exp
@parent_is_not = true if parent_is_not # Remember that token exp's parent is a NOT
exp # return it
elsif exp["not"]
exp_find_by_token(exp["not"], token, true) # Look for token under NOT (indicate we are a NOT)
elsif exp["and"]
exp_find_by_token(exp["and"], token) # Look for token under AND
elsif exp["or"]
exp_find_by_token(exp["or"], token) # Look for token under OR
end
end
# Create a hash to store quick search information by token
# and add in other quick search exp atom information.
def create_tokens(exp_table, orig_exp)
exp_table.select { |e| e.kind_of?(Array) }.each_with_object({}) do |e, acc|
token = e.last
acc[token] = {:value => nil}
exp = exp_find_by_token(orig_exp, token)
first_exp = exp[exp.keys.first]
if first_exp.key?("field") # Base token settings on exp type
field = exp[exp.keys.first]["field"]
acc[token][:field] = field
acc[token][:value_type] = MiqExpression.get_col_info(field)[:format_sub_type]
elsif first_exp.key?("tag")
acc[token][:tag] = first_exp["tag"]
elsif first_exp.key?("count")
acc[token][:count] = first_exp["count"]
end
end
end
def exp_context
@edit
end
end
remove temporary tokens from expression hash after substitution. Keeping token will make expression evaluation failed.
Fixes https://bugzilla.redhat.com/show_bug.cgi?id=1558926
module MiqExpression::SubstMixin
include ActiveSupport::Concern
def exp_build_table_or_nil(exp)
return nil if exp == {"???" => "???"}
exp_build_table(exp)
end
# Build an array of expression symbols by recursively traversing the MiqExpression object
# and inserting sequential tokens for each expression part
def exp_build_table(exp, quick_search = false)
exp_table = []
if exp["and"]
exp_table.push("(")
exp["and"].each do |e|
exp_table += exp_build_table(e, quick_search)
exp_table.push("AND") unless e == exp["and"].last
end
exp_table.push(")")
elsif exp["or"]
exp_table.push("(")
exp["or"].each do |e|
exp_table += exp_build_table(e, quick_search)
exp_table.push("OR") unless e == exp["or"].last
end
exp_table.push(")")
elsif exp["not"]
@exp_token ||= 0
@exp_token += 1
exp[:token] = @exp_token
exp_table.push(quick_search ? "NOT" : ["NOT", @exp_token]) # No token if building quick search exp
exp_table.push("(") unless %w(and or).include?(exp["not"].keys.first) # No parens if and/or under me
exp_table += exp_build_table(exp["not"], quick_search)
exp_table.push(")") unless %w(and or).include?(exp["not"].keys.first) # No parens if and/or under me
else
@exp_token ||= 0
@exp_token += 1
exp[:token] = @exp_token
if exp["???"] # Found a new expression part
exp_table.push(["???", @exp_token])
exp_context[@expkey][:exp_token] = @exp_token # Save the token value for the view
exp_context[:edit_exp] = copy_hash(exp) # Save the exp part for the view
exp_context[@expkey].update_from_exp_tree(exp_context[:edit_exp]) # Set the fields for a new exp part
elsif quick_search # Separate out the user input fields if doing a quick search
human_exp = MiqExpression.to_human(exp)
if human_exp.include?("<user input>")
exp_table.push(human_exp.split("<user input>").join(""))
exp_table.push([:user_input, @exp_token])
else
exp_table.push(human_exp)
end
else # Not quick search, add token to the expression
exp_table.push([MiqExpression.to_human(exp), @exp_token])
end
end
exp_table
end
# Go thru an expression and replace the quick search tokens
def exp_replace_qs_tokens(exp, tokens)
key = exp.keys.first
if %w(and or).include?(key)
exp[key].each { |e| exp_replace_qs_tokens(e, tokens) }
elsif key == "not"
exp_replace_qs_tokens(exp[key], tokens)
elsif exp.key?(:token) && exp[key].key?("value")
token = exp[:token]
if tokens[token] # Only atoms included in tokens will have user input
value = tokens[token][:value] # Get the user typed value
if tokens[token][:value_type] == :bytes
value += ".#{tokens[token][:suffix] || "bytes"}" # For :bytes type, add in the suffix
end
exp[key]["value"] = value # Replace the exp value with the proper qs value
end
exp.delete(:token)
end
end
# Find an expression atom based on the token
def exp_find_by_token(exp, token, parent_is_not = false)
if exp.kind_of?(Array) # Is this and AND or OR
exp.find { |e| exp_find_by_token(e, token) } # Look for token
elsif exp[:token] && exp[:token] == token # This is the token exp
@parent_is_not = true if parent_is_not # Remember that token exp's parent is a NOT
exp # return it
elsif exp["not"]
exp_find_by_token(exp["not"], token, true) # Look for token under NOT (indicate we are a NOT)
elsif exp["and"]
exp_find_by_token(exp["and"], token) # Look for token under AND
elsif exp["or"]
exp_find_by_token(exp["or"], token) # Look for token under OR
end
end
# Create a hash to store quick search information by token
# and add in other quick search exp atom information.
def create_tokens(exp_table, orig_exp)
exp_table.select { |e| e.kind_of?(Array) }.each_with_object({}) do |e, acc|
token = e.last
acc[token] = {:value => nil}
exp = exp_find_by_token(orig_exp, token)
first_exp = exp[exp.keys.first]
if first_exp.key?("field") # Base token settings on exp type
field = exp[exp.keys.first]["field"]
acc[token][:field] = field
acc[token][:value_type] = MiqExpression.get_col_info(field)[:format_sub_type]
elsif first_exp.key?("tag")
acc[token][:tag] = first_exp["tag"]
elsif first_exp.key?("count")
acc[token][:count] = first_exp["count"]
end
end
end
def exp_context
@edit
end
end
|
require 'money'
require 'open-uri'
class Money
module Bank
class GoogleCurrency < Money::Bank::VariableExchange
def get_google_rate(from, to)
data = eval(URI.parse("http://www.google.com/ig/calculator?hl=en&q=1#{from.upcase}%3D%3F#{to.upcase}").read)
raise UnknownRate unless data[:error] == '' or data[:error] == '0'
data[:rhs].split(' ')[0].to_f
end
end
end
end
implement #get_rate
require 'money'
require 'open-uri'
class Money
module Bank
class GoogleCurrency < Money::Bank::VariableExchange
attr_reader :rates
def get_rate(from, to)
@mutex.synchronize{
@rates[rate_key_for(from, to)] ||= get_google_rate(from, to)
}
end
def get_google_rate(from, to)
data = eval(URI.parse("http://www.google.com/ig/calculator?hl=en&q=1#{from.upcase}%3D%3F#{to.upcase}").read)
raise UnknownRate unless data[:error] == '' or data[:error] == '0'
data[:rhs].split(' ')[0].to_f
end
end
end
end
|
# Copyright (C) 2009-2014 MongoDB, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
module Mongo
module Operation
# A MongoDB map reduce operation.
# Note that a map reduce operation can behave like a read and
# return a result set, or can behave like a write operation and
# output results to a user-specified collection.
#
# @since 2.0.0
class MapReduce
include Executable
# Check equality of two map reduce operations.
#
# @example Check operation equality.
# operation == other
#
# @param [ Object ] other The other operation.
#
# @return [ true, false ] Whether the objects are equal.
#
# @since 2.0.0
def ==(other)
# @todo: check db name and map, reduce explicitly
spec[:selector] == other.spec[:selector]
end
alias_method :eql?, :==
# Initialize a map reduce operation.
#
# @example
# include Mongo
# include Operation
# MapReduce.new({ :selector => { :mapreduce => 'test_coll',
# :map => '',
# :reduce => '' },
# :db_name => 'test_db' })
#
# @param [ Hash ] spec The specifications for the operation.
#
# @option spec :selector [ Hash ] The map reduce selector.
# @option spec :db_name [ String ] The name of the database on which
# the operation should be executed.
# @option spec :options [ Hash ] Options for the map reduce command.
#
# @since 2.0.0
def initialize(spec)
@spec = spec
end
# Execute the operation.
# The context gets a connection on which the operation
# is sent in the block.
# If the map reduce will be written to an output collection and the
# server is not primary, the operation will be rerouted to the primary
# with a warning.
#
# @params [ Mongo::Server::Context ] The context for this operation.
#
# @return [ Result ] The operation response, if there is one.
#
# @since 2.0.0
def execute(context)
if context.server.secondary? && !secondary_ok?
warn "Database command '#{selector.keys.first}' rerouted to primary server"
# @todo: Should we respect tag sets and options here?
context = Mongo::ServerPreference.get(:mode => :primary).server.context
end
context.with_connection do |connection|
connection.dispatch([message])
end
end
private
# The selector for this map reduce command operation.
#
# @return [ Hash ] The selector describing this map reduce operation.
#
# @since 2.0.0
def selector
@spec[:selector]
end
# Whether this operation can be executed on a replica set secondary server.
# The map reduce operation may not be executed on a secondary if the user has specified
# an output collection to which the results will be written.
#
# @return [ true, false ] Whether the operation can be executed on a secondary.
#
# @since 2.0.0
def secondary_ok?
out = selector[:out] || selector['out']
out.nil? || out == 'inline'
end
# The wire protocol message for this operation.
#
# @return [ Mongo::Protocol::Query ] Wire protocol message.
#
# @since 2.0.0
def message
Protocol::Query.new(db_name, Database::COMMAND, selector, options)
end
end
end
end
RUBY-596 Map/reduce operation uses Specifiable
# Copyright (C) 2009-2014 MongoDB, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
module Mongo
module Operation
# A MongoDB map reduce operation.
#
# @note A map/reduce operation can behave like a read and
# return a result set, or can behave like a write operation and
# output results to a user-specified collection.
#
# @example Create the map/reduce operation.
# MapReduce.new({
# :selector => {
# :mapreduce => 'test_coll',
# :map => '',
# :reduce => ''
# },
# :db_name => 'test_db'
# })
#
# @param [ Hash ] spec The specifications for the operation.
#
# @option spec :selector [ Hash ] The map reduce selector.
# @option spec :db_name [ String ] The name of the database on which
# the operation should be executed.
# @option spec :options [ Hash ] Options for the map reduce command.
#
# @since 2.0.0
class MapReduce
include Executable
include Specifiable
# Execute the map/reduce operation.
#
# @example Execute the operation.
# operation.execute(context)
#
# @params [ Mongo::Server::Context ] The context for this operation.
#
# @return [ Result ] The operation response, if there is one.
#
# @since 2.0.0
def execute(context)
# @todo: Should we respect tag sets and options here?
if context.server.secondary? && !secondary_ok?
warn "Database command '#{selector.keys.first}' rerouted to primary server"
context = Mongo::ServerPreference.get(:mode => :primary).server.context
end
execute_message(context)
end
private
def execute_message(context)
context.with_connection do |connection|
connection.dispatch([ message ])
end
end
# Whether this operation can be executed on a replica set secondary server.
# The map reduce operation may not be executed on a secondary if the user has specified
# an output collection to which the results will be written.
#
# @return [ true, false ] Whether the operation can be executed on a secondary.
#
# @since 2.0.0
def secondary_ok?
out = selector[:out] || selector['out']
out.nil? || out == 'inline'
end
def message
Protocol::Query.new(db_name, Database::COMMAND, selector, options)
end
end
end
end
|
class MongoBrowser::Api
module Entities
class Database < Grape::Entity
expose :name, documentation: { type: String, desc: "Database name." }
expose :size, documentation: { type: Integer, desc: "Database size in bytes." }
expose :count, documentation: { type: Integer, desc: "Number of collections." }
end
class Collection < Grape::Entity
expose(:dbName, documentation: { type: String, desc: "Database name." }) do |collection|
collection.db_name
end
expose :name, documentation: { type: String, desc: "Collection name." }
expose :size, documentation: { type: Integer, desc: "Number of documents." }
end
class Document < Grape::Entity
expose(:id, documentation: { type: String, desc: "Document id." }) do |document|
document.id.to_s
end
expose :data, document: { type: Hash, desc: "Document" }
end
# TODO add docs
class PagedDocuments < Grape::Entity
expose :page
expose :size
expose(:totalPages) { |paged| paged.total_pages }
expose :documents, using: Document
end
end
end
Add api docs.
class MongoBrowser::Api
module Entities
class Database < Grape::Entity
expose :name, documentation: { type: String, desc: "Database name." }
expose :size, documentation: { type: Integer, desc: "Database size in bytes." }
expose :count, documentation: { type: Integer, desc: "Number of collections." }
end
class Collection < Grape::Entity
expose(:dbName, documentation: { type: String, desc: "Database name." }) do |collection|
collection.db_name
end
expose :name, documentation: { type: String, desc: "Collection name." }
expose :size, documentation: { type: Integer, desc: "Number of documents." }
end
class Document < Grape::Entity
expose(:id, documentation: { type: String, desc: "Document id." }) do |document|
document.id.to_s
end
expose :data, document: { type: Hash, desc: "Document" }
end
class PagedDocuments < Grape::Entity
expose :page, documentation: { type: Integer, desc: "Current page." }
expose :size, documentation: { type: Integer, desc: "Total number of records." }
expose(:totalPages, documentation: { type: Integer, desc: "Total number of pages" }) do |paged|
paged.total_pages
end
expose :documents, using: Document
end
end
end
|
module MongoOplogBackup
VERSION = "0.0.2"
end
v0.0.3.
module MongoOplogBackup
VERSION = "0.0.3"
end
|
module Mumblr
class PostContent < Model
include DataMapper::Resource
property :id, Serial
property :url, String
property :retrieved_at, DateTime
belongs_to :post
###############
# API Methods #
###############
def self.api_extract_from_post(post, post_hash)
post_type = post_hash['type'].to_sym
Model::logger.debug("Extracting content URLs from type: #{post_type}")
case post_type
when :photo
api_extract_photos post, post_hash
when :video
api_extract_videos post, post_hash
else
Model::logger.debug("\tSkipping post type: #{post_type}")
end
end
def self.api_extract_photos(post, post_hash)
post_hash['photos'].each do |photo|
first_or_create({url: photo['original_size']['url'] }, {
post_id: post.id
})
end
end
def self.api_extract_videos(post, post_hash)
first_or_create({ url: post_hash['video_url'] }, {
post_id: post.id
})
end
# FIXME Refactor this so the callbacks can be passed in
# origin: 'likes' or whatever
def download(directory, origin=nil)
unless url
Model::logger.warn("Skipping download for #{id} (no URL specified)")
return
end
dest_path = File.join(directory, post.blog.name)
dest_path = File.join(dest_path, origin) if origin
FileUtils.mkdir_p(dest_path) unless File.exists?(dest_path)
dest_path = File.join(dest_path, File.basename(url))
# TODO check for identical file
if File.exists?(dest_path)
STDERR.puts("Skipping #{dest_path} (exists)")
return
end
pbar = nil
content_length = nil
begin
open(dest_path, 'wb') do |dest_file|
open(url,
content_length_proc: lambda {|t|
content_length = t
if t && 0 < t
title = File.basename(url)
pbar = ProgressBar.new(title, t)
pbar.format = "%-15s %3d%% %s %s"
pbar.file_transfer_mode
end
},
progress_proc: lambda {|s|
pbar.set s if pbar
pbar.finish if pbar and s == content_length
}) do |f|
IO.copy_stream(f, dest_file)
retrieved_at = DateTime.now
save
full_dest = File.expand_path(dest_path)
STDERR.puts("file://#{full_dest}")
end
end
retrieved_at = DateTime.now
rescue
STDERR.puts("error with #{url}")
end
end
end
end
Finally, I figured out why retrieved_at wasn't updating (it created a
local variable - whoops)
module Mumblr
class PostContent < Model
include DataMapper::Resource
property :id, Serial
property :url, String
property :retrieved_at, DateTime
belongs_to :post
###############
# API Methods #
###############
def self.api_extract_from_post(post, post_hash)
post_type = post_hash['type'].to_sym
Model::logger.debug("Extracting content URLs from type: #{post_type}")
case post_type
when :photo
api_extract_photos post, post_hash
when :video
api_extract_videos post, post_hash
else
Model::logger.debug("\tSkipping post type: #{post_type}")
end
end
def self.api_extract_photos(post, post_hash)
post_hash['photos'].each do |photo|
first_or_create({url: photo['original_size']['url'] }, {
post_id: post.id
})
end
end
def self.api_extract_videos(post, post_hash)
first_or_create({ url: post_hash['video_url'] }, {
post_id: post.id
})
end
# FIXME Refactor this so the callbacks can be passed in
# origin: 'likes' or whatever
def download(directory, origin=nil)
unless url
Model::logger.warn("Skipping download for #{id} (no URL specified)")
return
end
dest_path = File.join(directory, post.blog.name)
dest_path = File.join(dest_path, origin) if origin
FileUtils.mkdir_p(dest_path) unless File.exists?(dest_path)
dest_path = File.join(dest_path, File.basename(url))
# TODO check for identical file
if File.exists?(dest_path)
STDERR.puts("Skipping #{dest_path} (exists)")
return
end
pbar = nil
content_length = nil
begin
open(dest_path, 'wb') do |dest_file|
open(url,
content_length_proc: lambda {|t|
content_length = t
if t && 0 < t
title = File.basename(url)
pbar = ProgressBar.new(title, t)
pbar.format = "%-15s %3d%% %s %s"
pbar.file_transfer_mode
end
},
progress_proc: lambda {|s|
pbar.set s if pbar
pbar.finish if pbar and s == content_length
}) do |f|
IO.copy_stream(f, dest_file)
self.retrieved_at = DateTime.now
update_success = save
unless update_success
Model::logger.warn "Could not save retrieved_at: #{}"
end
full_dest = File.expand_path(dest_path)
STDERR.puts("file://#{full_dest}")
end
end
rescue
STDERR.puts("error with #{url}")
end
end
end
end
|
# coding: utf-8
#* mysh/internal/actions/vars.rb -- The mysh internal variables commands.
module Mysh
#* mysh/internal/actions/vars.rb -- The mysh internal variable commands.
class VarsCommand < Action
#The mysh variable parsing regex.
VAR_EXP = %r{(?<name> [a-z][a-z0-9_]*){0}
(?<equals> =){0}
(?<value> \S.*){0}
\$ (\g<name> \s* (\g<equals> \s* \g<value>?)?)?}x
#Setup an internal action.
def initialize(name, description)
@name = @equals = @value = nil
super(name, description)
end
#Execute a command against the internal mysh variables.
def process_command(input)
match = VAR_EXP.match(input.raw)
@name, @equals, @value = match[:name], match[:equals], match[:value]
do_command
:internal
end
#Do the actual work here.
def do_command
sym = @name.to_sym if @name
if @value
MNV[sym] = @value
elsif @equals
MNV[sym] = ""
elsif @name
puts "#{@name} = #{MNV.get_source(sym)}"
else
show_all_values
end
end
#Display all variables neatly.
def show_all_values
puts (MNV.keys - ['$'.to_sym])
.sort
.map {|sym| ["$" + sym.to_s, MNV.get_source(sym)]}
.format_mysh_bullets
end
end
#The show command action object.
desc = 'Set/query mysh variables. See ?$ for more.'
VARS_COMMAND = VarsCommand.new('$<name>=value', desc)
COMMANDS.add_action(VARS_COMMAND)
end
Fixed a bug in env vars that messed up help.
# coding: utf-8
#* mysh/internal/actions/vars.rb -- The mysh internal variables commands.
module Mysh
#* mysh/internal/actions/vars.rb -- The mysh internal variable commands.
class VarsCommand < Action
#The mysh variable parsing regex.
VAR_EXP = %r{(?<name> [a-z][a-z0-9_]*){0}
(?<equals> =){0}
(?<value> \S.*){0}
\$ (\g<name> \s* (\g<equals> \s* \g<value>?)?)?}x
#Setup an internal action.
def initialize(name, description)
@name = @equals = @value = nil
super(name, description)
end
#Execute a command against the internal mysh variables.
def process_command(input)
match = VAR_EXP.match(input.raw)
@var_name, @equals, @value = match[:name], match[:equals], match[:value]
do_command
:internal
end
#Do the actual work here.
def do_command
sym = @var_name.to_sym if @var_name
if @value
MNV[sym] = @value
elsif @equals
MNV[sym] = ""
elsif @name
puts "#{@var_name} = #{MNV.get_source(sym)}"
else
show_all_values
end
end
#Display all variables neatly.
def show_all_values
puts (MNV.keys - ['$'.to_sym])
.sort
.map {|sym| ["$" + sym.to_s, MNV.get_source(sym)]}
.format_mysh_bullets
end
end
#The show command action object.
desc = 'Set/query mysh variables. See ?$ for more.'
VARS_COMMAND = VarsCommand.new('$<name>=value', desc)
COMMANDS.add_action(VARS_COMMAND)
end
|
module NestedDb
class NestedInstances
attr_accessor :parent,
:taxonomy,
:objects,
:destroy_ids,
:errors,
:reverse_association,
:association
def initialize(parent, options = {})
# setup an array to hold the objects
self.objects ||= []
# setup an array to hold ids of those which should be deleted
self.destroy_ids ||= []
# set the parent
self.parent = parent
# set the taxonomy
self.taxonomy = options[:taxonomy]
# set the associations
self.reverse_association = options[:inverse_of]
self.association = options[:association_name]
# loop through each attribute set to setup each object
(options[:attributes] || {}).each do |i,attrs|
attrs.symbolize_keys! unless attrs.kind_of?(ActiveSupport::HashWithIndifferentAccess)
# pull out the ID (if present)
existing_id = attrs.delete(:id)
# if all attributes are blank, skip
next if attrs.all? { |_, value| value.blank? }
# if we have an ID
if existing_id
# find the existing object
obj = parent.send(association).select { |o| o.id == existing_id }.first
# ensure we have the object
raise StandardError, "Existing ID: #{existing_id}, not found in association `#{association}`. Options are: #{parent.send(association).map(&:id).join(', ')}" unless obj
# set the taxonomy
obj.taxonomy = taxonomy
# call extend
obj.extend_based_on_taxonomy
# set parent
obj.send(reverse_association, parent)
# if this is set to destroy
self.destroy_ids << obj.id if attrs.delete(:_destroy)
# don't setup a new field if it's set to be destroyed
elsif !attrs.delete(:_destroy)
# create the new object
obj = parent.send(association).build
# call extend
obj.extend_based_on_taxonomy
# set parent
obj.send(reverse_association, parent)
end
# if we have an object
if obj
# update the attributes
attrs.each { |k,v| obj.send("#{k.to_s}=", v) }
# add this object to the set
self.objects << obj
end
end
end
def valid?
# setup hash to store errors
self.errors = {}
# validate each object and
# merge in any errors
objects.each do |object|
# if it's invalid
unless object.valid?
# loop through errors and append
object.errors.each do |key,value|
self.errors.merge!(key => value)
end
end
end
#
end
# allow one error, if it's on the association
def valid_as_nested?
valid? || (errors.length == 1 && errors.has_key?(reverse_association))
end
# save each of the objects, or delete where required
def save
objects.each do |object|
# if this object has been saved, and it's marked for deletion
if destroy_ids.include?(object.id)
# delete it
object.destroy
else
# update the parent
object.send(reverse_association, parent) unless object.send(reverse_association).try(:persisted?)
# save the object
object.save
end
end
end
end
end
Updated association loading
module NestedDb
class NestedInstances
attr_accessor :parent,
:taxonomy,
:objects,
:destroy_ids,
:errors,
:reverse_association,
:association
def initialize(parent, options = {})
# setup an array to hold the objects
self.objects ||= []
# setup an array to hold ids of those which should be deleted
self.destroy_ids ||= []
# set the parent
self.parent = parent
# set the taxonomy
self.taxonomy = options[:taxonomy]
# set the associations
self.reverse_association = options[:inverse_of]
self.association = options[:association_name]
# loop through each attribute set to setup each object
(options[:attributes] || {}).each do |i,attrs|
attrs.symbolize_keys! unless attrs.kind_of?(ActiveSupport::HashWithIndifferentAccess)
# pull out the ID (if present)
existing_id = attrs.delete(:id)
# if all attributes are blank, skip
next if attrs.all? { |_, value| value.blank? }
# if we have an ID
if existing_id
# find the existing object
obj = parent.send(association).select { |o| existing_id.to_s == o.id.to_s }.first
# ensure we have the object
raise StandardError, "Existing ID: #{existing_id.to_s}, not found in association `#{association}`. Options are: #{parent.send(association).map(&:id).map(&:to_s).join(', ')}" unless obj
# set the taxonomy
obj.taxonomy = taxonomy
# call extend
obj.extend_based_on_taxonomy
# set parent
obj.send(reverse_association, parent)
# if this is set to destroy
self.destroy_ids << obj.id if attrs.delete(:_destroy)
# don't setup a new field if it's set to be destroyed
elsif !attrs.delete(:_destroy)
# create the new object
obj = parent.send(association).build
# call extend
obj.extend_based_on_taxonomy
# set parent
obj.send(reverse_association, parent)
end
# if we have an object
if obj
# update the attributes
attrs.each { |k,v| obj.send("#{k.to_s}=", v) }
# add this object to the set
self.objects << obj
end
end
end
def valid?
# setup hash to store errors
self.errors = {}
# validate each object and
# merge in any errors
objects.each do |object|
# if it's invalid
unless object.valid?
# loop through errors and append
object.errors.each do |key,value|
self.errors.merge!(key => value)
end
end
end
#
end
# allow one error, if it's on the association
def valid_as_nested?
valid? || (errors.length == 1 && errors.has_key?(reverse_association))
end
# save each of the objects, or delete where required
def save
objects.each do |object|
# if this object has been saved, and it's marked for deletion
if destroy_ids.include?(object.id)
# delete it
object.destroy
else
# update the parent
object.send(reverse_association, parent) unless object.send(reverse_association).try(:persisted?)
# save the object
object.save
end
end
end
end
end |
module Nucleon
module Action
module Plugin
class List < CORL.plugin_class(:nucleon, :cloud_action)
include Mixin::Action::Registration
#-----------------------------------------------------------------------------
# Info
def self.describe
super(:plugin, :list, 15)
end
#-----------------------------------------------------------------------------
# Settings
def configure
super do
end
end
#---
def ignore
node_ignore
end
def arguments
[]
end
#-----------------------------------------------------------------------------
# Operations
def execute
super do |node, network|
ensure_network(network) do
last_namespace = nil
last_plugin_type = nil
Nucleon.loaded_plugins.each do |namespace, plugins|
info("------------------------------------------------------", { :i18n => false, :prefix => false })
info(" Namespace: #{purple(namespace)}", { :i18n => false, :prefix => false })
info("\n", { :i18n => false, :prefix => false })
provider_info = {}
max_width = 10
plugins.each do |type, providers|
info(" Plugin type: #{blue(type)}", { :i18n => false, :prefix => false })
info(" Providers:", { :i18n => false, :prefix => false })
providers.keys.each do |name|
width = name.to_s.size
max_width = width if width > max_width
end
providers.each do |provider, plugin_info|
info(" #{sprintf("%-#{max_width + 10}s", green(provider))} - #{yellow(plugin_info[:file])}", { :i18n => false, :prefix => false })
end
info("\n", { :i18n => false, :prefix => false })
last_plugin_type = type
end
last_namespace = namespace
end
end
end
end
end
end
end
end
Removing unneeded registration mixin and ignore method definition in the list plugin action provider.
module Nucleon
module Action
module Plugin
class List < CORL.plugin_class(:nucleon, :cloud_action)
#-----------------------------------------------------------------------------
# Info
def self.describe
super(:plugin, :list, 15)
end
#-----------------------------------------------------------------------------
# Settings
def configure
super do
end
end
#---
def arguments
[]
end
#-----------------------------------------------------------------------------
# Operations
def execute
super do |node, network|
ensure_network(network) do
last_namespace = nil
last_plugin_type = nil
Nucleon.loaded_plugins.each do |namespace, plugins|
info("------------------------------------------------------", { :i18n => false, :prefix => false })
info(" Namespace: #{purple(namespace)}", { :i18n => false, :prefix => false })
info("\n", { :i18n => false, :prefix => false })
provider_info = {}
max_width = 10
plugins.each do |type, providers|
info(" Plugin type: #{blue(type)}", { :i18n => false, :prefix => false })
info(" Providers:", { :i18n => false, :prefix => false })
providers.keys.each do |name|
width = name.to_s.size
max_width = width if width > max_width
end
providers.each do |provider, plugin_info|
info(" #{sprintf("%-#{max_width + 10}s", green(provider))} - #{yellow(plugin_info[:file])}", { :i18n => false, :prefix => false })
end
info("\n", { :i18n => false, :prefix => false })
last_plugin_type = type
end
last_namespace = namespace
end
end
end
end
end
end
end
end
|
module OmniAuth
module TrustAuth
VERSION = "0.0.5"
end
end
Bumping version number.
module OmniAuth
module TrustAuth
VERSION = "0.0.6"
end
end
|
module OmniAuth
module Vkontakte
VERSION = "1.4.0"
end
end
bump version (#63)
module OmniAuth
module Vkontakte
VERSION = "1.4.1"
end
end
|
require 'omniauth-oauth2'
require 'multi_json'
module OmniAuth
module Strategies
class Amazon < OmniAuth::Strategies::OAuth2
option :name, 'amazon'
option :client_options, {
:site => 'https://www.amazon.com/',
:authorize_url => 'https://www.amazon.com/ap/oa',
:token_url => 'https://api.amazon.com/auth/o2/token'
}
option :access_token_options, {
:mode => :query
}
option :authorize_params, {
:scope => 'profile postal_code'
}
def build_access_token
token_params = {
:redirect_uri => callback_url,
:client_id => client.id,
:client_secret => client.secret
}
verifier = request.params['code']
client.auth_code.get_token(verifier, token_params)
end
uid { raw_info['Profile']['CustomerId'] }
info do
{
'email' => raw_info['Profile']['PrimaryEmail'],
'name' => raw_info['Profile']['Name']
}
end
extra do
{
'postal_code' => raw_info['Profile']['PostalCode']
}
end
def raw_info
access_token.options[:parse] = :json
# This way is not working right now, do it the longer way
# for the time being
#
#@raw_info ||= access_token.get('/ap/user/profile').parsed
url = "/ap/user/profile"
params = {:params => { :access_token => access_token.token}}
@raw_info ||= access_token.client.request(:get, url, params).parsed
end
end
end
end
Fix callback url
require 'omniauth-oauth2'
require 'multi_json'
module OmniAuth
module Strategies
class Amazon < OmniAuth::Strategies::OAuth2
option :name, 'amazon'
option :client_options, {
:site => 'https://www.amazon.com/',
:authorize_url => 'https://www.amazon.com/ap/oa',
:token_url => 'https://api.amazon.com/auth/o2/token'
}
option :access_token_options, {
:mode => :query
}
option :authorize_params, {
:scope => 'profile postal_code'
}
def build_access_token
token_params = {
:redirect_uri => callback_url.split('?').first,
:client_id => client.id,
:client_secret => client.secret
}
verifier = request.params['code']
client.auth_code.get_token(verifier, token_params)
end
uid { raw_info['Profile']['CustomerId'] }
info do
{
'email' => raw_info['Profile']['PrimaryEmail'],
'name' => raw_info['Profile']['Name']
}
end
extra do
{
'postal_code' => raw_info['Profile']['PostalCode']
}
end
def raw_info
access_token.options[:parse] = :json
# This way is not working right now, do it the longer way
# for the time being
#
#@raw_info ||= access_token.get('/ap/user/profile').parsed
url = "/ap/user/profile"
params = {:params => { :access_token => access_token.token}}
@raw_info ||= access_token.client.request(:get, url, params).parsed
end
end
end
end
|
require 'omniauth'
require 'omniauth/strategies/oauth'
module OmniAuth
module Strategies
class Fitbit < OmniAuth::Strategies::OAuth
option :name, "fitbit"
option :client_options, {
:site => 'http://api.fitbit.com',
:request_token_path => '/oauth/request_token',
:access_token_path => '/oauth/authorize',
:authorize_path => '/oauth/oauth_allow'
}
uid do
request.params['encodedId']
puts request.params.to_s
end
info do
{
:display_name => raw_info #['user']['displayName']
}
end
extra do
{
:raw_info => raw_info
}
end
def raw_info
#@raw_info ||= MultiJson.load(access_token.get("http://api.fitbit.com/1/user/-/profile.json").body)
@raw_info ||= access_token.get("http://api.fitbit.com/1/user/" + request.params['encodedId'] + "/profile.json").body
end
end
end
end
Changed code for testing
require 'omniauth'
require 'omniauth/strategies/oauth'
module OmniAuth
module Strategies
class Fitbit < OmniAuth::Strategies::OAuth
option :name, "fitbit"
option :client_options, {
:site => 'http://api.fitbit.com',
:request_token_path => '/oauth/request_token',
:access_token_path => '/oauth/authorize',
:authorize_path => '/oauth/oauth_allow'
}
uid do
request.params['oauth_token']
end
info do
{
:display_name => raw_info #['user']['displayName']
}
end
extra do
{
:raw_info => raw_info
}
end
def raw_info
#@raw_info ||= MultiJson.load(access_token.get("http://api.fitbit.com/1/user/-/profile.json").body)
@raw_info ||= access_token.get("http://api.fitbit.com/1/user/-/profile.json").body
end
end
end
end |
module Omniship
module UPS
module Track
class Package < Omniship::Base
DEPARTURE_CODE = "I"
ARRIVAL_CODE = "D"
def tracking_number
@root.xpath('TrackingNumber/text()').to_s
end
def activity
@root.xpath('Activity').map do |act|
Activity.new(act)
end
end
def alternate_tracking
if !@root.xpath('PackageServiceOptions/USPSPICNumber').empty?
# surepost
path = @root.xpath('PackageServiceOptions/USPSPICNumber').text
puts "USPSPICNumber #{path}"
else
# mail innovations
path = @root.xpath('AlternateTrackingInfo')
puts "non USPSPICNumber = #{path.to_xml.inspect}"
end
AlternateTracking.new(path) if !path.empty?
end
def has_left?
activity.any? {|activity| activity.code == DEPARTURE_CODE }
end
def has_arrived?
activity.any? {|activity| activity.code == ARRIVAL_CODE && !activity.status.include?("transferred to post office")}
end
end
end
end
end
Remove debugging
module Omniship
module UPS
module Track
class Package < Omniship::Base
DEPARTURE_CODE = "I"
ARRIVAL_CODE = "D"
def tracking_number
@root.xpath('TrackingNumber/text()').to_s
end
def activity
@root.xpath('Activity').map do |act|
Activity.new(act)
end
end
def alternate_tracking
if !@root.xpath('PackageServiceOptions/USPSPICNumber').empty?
# surepost
path = @root.xpath('PackageServiceOptions/USPSPICNumber').text
else
# mail innovations
path = @root.xpath('AlternateTrackingInfo')
end
AlternateTracking.new(path) if !path.empty?
end
def has_left?
activity.any? {|activity| activity.code == DEPARTURE_CODE }
end
def has_arrived?
activity.any? {|activity| activity.code == ARRIVAL_CODE && !activity.status.include?("transferred to post office")}
end
end
end
end
end
|
# -*- coding: utf-8; -*-
# frozen_string_literal: true
# vim:set fileencoding=utf-8:
require 'optparse'
require 'ruby-units'
module OnZeroLoad
class Main
class IncompatibleUnit < OptionParser::InvalidArgument
def initialize(reason, *args)
super(args)
@reason = reason
end
end
class MainOptParse < OnZeroLoad::Main
def self.define_standard_options(parser, standards, options)
parser.separator("")
parser.separator("Standard options:")
parser.separator("")
standards.each do |long, more|
parser.on("-#{more[:short]}", "--#{long}", more[:desc]) do |value|
options[long] = value
end
end
parser
end
def self.define_threshold_options(parser, thresholds, options)
parser.separator("")
parser.separator("Threshold options:")
parser.separator("")
thresholds.each do |long, more|
desc = threshold_option_description(more)
parser.on("-#{more[:short]}", "--#{long}=#{more[:value]}", desc) do |value|
options[long] = threshold_option_value_to_unit(value, more[:unit])
end
end
parser
end
def self.threshold_option_description(more)
desc = more[:desc].clone
unit = more[:unit].units unless more[:unit].units.empty?
default = more[:unit] unless more[:unit].scalar == 1
desc << " (" if unit || default
desc << "in #{unit}" if unit
desc << ", " if unit && default
desc << "default #{default}" if default
desc << ")" if unit || default
end
def self.threshold_option_value_to_unit(value, unit)
value = Unit.new(value)
unless value.compatible?(unit)
unit_numerator = Unit.new(unit.numerator.join)
if value.compatible?(unit_numerator)
unit_denominator = Unit.new(unit.denominator.join)
value = value / unit_denominator
end
end
unless value.compatible?(unit)
raise IncompatibleUnit.new("#{value} is not compatible to #{unit}")
end
value.convert_to(unit)
end
def self.define_command_options(parser, commands, options)
parser.separator("")
parser.separator("Predefined commands:")
parser.separator("")
commands.each do |long, more|
parser.on("-#{more[:short]}", "--#{long}",
"#{more[:desc]} ('#{more[:cmd].join(" ")}')") do |value|
options[long] = value
end
end
parser
end
def self.option_parser(options, standards, thresholds, commands)
OptionParser.new { |parser|
base = File.basename($0)
parser.version = "%s %s" % [ base, OnZeroLoad::VERSION ]
parser.banner = "Usage: %s [OPTION]... -- [COMMAND] [COMMAND OPTION]..." % [ base ]
parser.separator("")
parser.separator("Execute a command if the system load drops below given thresholds.")
self.define_standard_options(parser, standards, options)
self.define_threshold_options(parser, thresholds, options)
self.define_command_options(parser, commands, options)
}
end
def self.parse(args = ARGV, standards, thresholds, commands)
options = {}
parser = self.option_parser(options, standards, thresholds, commands)
begin
options[:args] = parser.parse(args)
rescue OptionParser::ParseError => error
$stderr.puts "Error: #{error.message}."
$stderr.puts "Try --help for help."
end
if options[:help]
$stdout.puts parser
end
if options[:version]
$stdout.puts parser.version
end
[:load, :cpu, :disk, :net, :input].each { |key|
options[key] = options[key].last if options[key].kind_of? Array
}
options
end
end
end
end
String.<< fails for frozen strings. Use String.+= instead.
# -*- coding: utf-8; -*-
# frozen_string_literal: true
# vim:set fileencoding=utf-8:
require 'optparse'
require 'ruby-units'
module OnZeroLoad
class Main
class IncompatibleUnit < OptionParser::InvalidArgument
def initialize(reason, *args)
super(args)
@reason = reason
end
end
class MainOptParse < OnZeroLoad::Main
def self.define_standard_options(parser, standards, options)
parser.separator("")
parser.separator("Standard options:")
parser.separator("")
standards.each do |long, more|
parser.on("-#{more[:short]}", "--#{long}", more[:desc]) do |value|
options[long] = value
end
end
parser
end
def self.define_threshold_options(parser, thresholds, options)
parser.separator("")
parser.separator("Threshold options:")
parser.separator("")
thresholds.each do |long, more|
desc = threshold_option_description(more)
parser.on("-#{more[:short]}", "--#{long}=#{more[:value]}", desc) do |value|
options[long] = threshold_option_value_to_unit(value, more[:unit])
end
end
parser
end
def self.threshold_option_description(more)
desc = more[:desc].clone
unit = more[:unit].units unless more[:unit].units.empty?
default = more[:unit] unless more[:unit].scalar == 1
desc += " (" if unit || default
desc += "in #{unit}" if unit
desc += ", " if unit && default
desc += "default #{default}" if default
desc += ")" if unit || default
end
def self.threshold_option_value_to_unit(value, unit)
value = Unit.new(value)
unless value.compatible?(unit)
unit_numerator = Unit.new(unit.numerator.join)
if value.compatible?(unit_numerator)
unit_denominator = Unit.new(unit.denominator.join)
value = value / unit_denominator
end
end
unless value.compatible?(unit)
raise IncompatibleUnit.new("#{value} is not compatible to #{unit}")
end
value.convert_to(unit)
end
def self.define_command_options(parser, commands, options)
parser.separator("")
parser.separator("Predefined commands:")
parser.separator("")
commands.each do |long, more|
parser.on("-#{more[:short]}", "--#{long}",
"#{more[:desc]} ('#{more[:cmd].join(" ")}')") do |value|
options[long] = value
end
end
parser
end
def self.option_parser(options, standards, thresholds, commands)
OptionParser.new { |parser|
base = File.basename($0)
parser.version = "%s %s" % [ base, OnZeroLoad::VERSION ]
parser.banner = "Usage: %s [OPTION]... -- [COMMAND] [COMMAND OPTION]..." % [ base ]
parser.separator("")
parser.separator("Execute a command if the system load drops below given thresholds.")
self.define_standard_options(parser, standards, options)
self.define_threshold_options(parser, thresholds, options)
self.define_command_options(parser, commands, options)
}
end
def self.parse(args = ARGV, standards, thresholds, commands)
options = {}
parser = self.option_parser(options, standards, thresholds, commands)
begin
options[:args] = parser.parse(args)
rescue OptionParser::ParseError => error
$stderr.puts "Error: #{error.message}."
$stderr.puts "Try --help for help."
end
if options[:help]
$stdout.puts parser
end
if options[:version]
$stdout.puts parser.version
end
[:load, :cpu, :disk, :net, :input].each { |key|
options[key] = options[key].last if options[key].kind_of? Array
}
options
end
end
end
end
|
java_import org.lwjgl.opengl.GL11
java_import org.lwjgl.opengl.GL15
java_import org.lwjgl.opengl.GL20
java_import org.lwjgl.opengl.GL30
java_import org.lwjgl.opengl.GL32
java_import org.lwjgl.BufferUtils
java_import org.lwjgl.input.Keyboard
java_import org.lwjgl.util.vector.Matrix4f
java_import org.lwjgl.util.vector.Vector3f
require "opengl/gl_utils"
require "pry"
#
# Attempting to do a pyramid that is interactive.
#
class OpenGL::InteractivePyramid
include OpenGL::GLUtils
add_start
#position constants
RIGHT_EXTENT = 0.5
LEFT_EXTENT = -RIGHT_EXTENT
TOP_EXTENT = 0.5
BOTTOM_EXTENT = -TOP_EXTENT
FRONT_EXTENT = -3.0
REAR_EXTENT = -4.0
#colour constants
GREEN_COLOUR = [0.75, 0.75, 1.0, 1.0]
BLUE_COLOUR = [0.0, 0.5, 0.0, 1.0]
RED_COLOUR = [1.0, 0.0, 0.0, 1.0]
GREY_COLOUR = [0.8, 0.8, 0.8, 1.0]
BROWN_COLOUR = [0.5, 0.5, 0.0, 1.0]
# Constructor
def initialize
init_vertex_data
create_display("Interactive Pyramid");
#initialise the viewport
GL11.gl_viewport(0, 0, Display.width, Display.height)
init_program
init_vertex_buffer
init_vertex_array_objects
GL11.gl_enable(GL11::GL_CULL_FACE)
GL11.gl_cull_face(GL11::GL_BACK)
GL11.gl_front_face(GL11::GL_CW)
GL11.gl_enable(GL11::GL_DEPTH_TEST)
GL11.gl_depth_mask(true)
GL11.gl_depth_func(GL11::GL_LEQUAL)
GL11.gl_depth_range(0.0, 1.0)
@y_rotation = 0
@x_rotation = 0
@rotation_matrix = Matrix4f.new
@rotation_buffer = BufferUtils.create_float_buffer 16
render_loop do
input
display
end
destroy_display
end
#initialise the vertex buffer
def init_vertex_buffer
@vertex_buffer_id = GL15.gl_gen_buffers
GL15.gl_bind_buffer(GL15::GL_ARRAY_BUFFER, @vertex_buffer_id)
buffer = BufferUtils.create_float_buffer(@vertex_data.size).put(@vertex_data.to_java(:float)).flip
GL15.gl_buffer_data(GL15::GL_ARRAY_BUFFER, buffer, GL15::GL_STATIC_DRAW)
GL15.gl_bind_buffer(GL15::GL_ARRAY_BUFFER, 0)
@index_buffer_id = GL15.gl_gen_buffers
GL15.gl_bind_buffer(GL15::GL_ELEMENT_ARRAY_BUFFER, @index_buffer_id)
buffer = BufferUtils.create_short_buffer(@index_data.size).put(@index_data.to_java(:short)).flip
GL15.gl_buffer_data(GL15::GL_ELEMENT_ARRAY_BUFFER, buffer, GL15::GL_STATIC_DRAW)
GL15.gl_bind_buffer(GL15::GL_ELEMENT_ARRAY_BUFFER, 0)
end
# initialise the vertex array objects
def init_vertex_array_objects
#first object
@vao_id = GL30.gl_gen_vertex_arrays
GL30.gl_bind_vertex_array(@vao_id)
GL15.gl_bind_buffer(GL15::GL_ARRAY_BUFFER, @vertex_buffer_id)
GL20.gl_enable_vertex_attrib_array(0)
GL20.gl_enable_vertex_attrib_array(1)
GL20.gl_vertex_attrib_pointer(0, 3, GL11::GL_FLOAT, false, 0, 0)
GL20.gl_vertex_attrib_pointer(1, 4, GL11::GL_FLOAT, false, 0, 5 * 3 * FLOAT_SIZE)
GL15.gl_bind_buffer(GL15::GL_ELEMENT_ARRAY_BUFFER, @index_buffer_id)
GL30.gl_bind_vertex_array(0)
end
#
# Manage the input for this program
#
def input
@y_rotation -= 0.01 if Keyboard.is_key_down Keyboard::KEY_LEFT
@y_rotation += 0.01 if Keyboard.is_key_down Keyboard::KEY_RIGHT
@x_rotation -= 0.01 if Keyboard.is_key_down Keyboard::KEY_UP
@x_rotation += 0.01 if Keyboard.is_key_down Keyboard::KEY_DOWN
calc_rotation
end
# render a frame
def display
#set the colour to clear.
GL11.gl_clear_color(0.0, 0.0, 0.0, 0.0)
#clear the buffer. Remember that Java static types come back as Ruby Constants.
GL11.gl_clear(GL11::GL_COLOR_BUFFER_BIT | GL11::GL_DEPTH_BUFFER_BIT)
GL20.gl_use_program(@program_id)
GL20.gl_uniform_matrix4(@transform_matrix_location, false, @rotation_buffer)
GL30.gl_bind_vertex_array(@vao_id)
GL11.gl_draw_elements(GL11::GL_TRIANGLES, @index_data.size, GL11::GL_UNSIGNED_SHORT, 0)
#cleanup
GL30.gl_bind_vertex_array(0)
GL20.gl_use_program(0)
end
# initialise the program
def init_program
@program_id = compile_program('perspective_matrix_vertex_basic.glsl', 'colour_passthrough.glsl')
@perspective_matrix_location = GL20.gl_get_uniform_location(@program_id, "cameraToClipMatrix")
@transform_matrix_location = GL20.gl_get_uniform_location(@program_id, "modelToCameraMatrix")
#set up the perspective matrix
z_near = 1.0
z_far = 10.0
@frustrum_scale = calculate_frustum_scale(45.0)
perspective_matrix_buffer = BufferUtils.create_float_buffer(16);
perspective_matrix = Matrix4f.new
perspective_matrix.m00 = @frustrum_scale
perspective_matrix.m11 = @frustrum_scale
perspective_matrix.m22 = (z_far + z_near) / (z_near - z_far)
perspective_matrix.m32 = (2.0 * z_far * z_near) / (z_near - z_far)
perspective_matrix.m23 = -1.0
# make sure to make this 0, as this is an identity matrix to start.
perspective_matrix.m33 = 0.0
puts perspective_matrix.to_s
perspective_matrix.store(perspective_matrix_buffer)
GL20.gl_use_program(@program_id)
GL20.gl_uniform_matrix4(@perspective_matrix_location, false, perspective_matrix_buffer.flip)
GL20.gl_use_program(0)
end
# calculate the frustrum scale
# @param [Float] angle in degrees.
def calculate_frustum_scale(angle)
return (1.0 / Math.tan((angle * (Math::PI / 180)) / 2.0))
end
#initialise the vertex data
def init_vertex_data
@vertex_data = [
#pyramid positions
# -- bottom square
LEFT_EXTENT, BOTTOM_EXTENT, FRONT_EXTENT,
RIGHT_EXTENT, BOTTOM_EXTENT, FRONT_EXTENT,
RIGHT_EXTENT, BOTTOM_EXTENT, REAR_EXTENT,
LEFT_EXTENT, BOTTOM_EXTENT, REAR_EXTENT,
# -- top position
((LEFT_EXTENT + RIGHT_EXTENT)/2), TOP_EXTENT, ((FRONT_EXTENT + REAR_EXTENT)/2),
#Colours
RED_COLOUR,
GREEN_COLOUR,
RED_COLOUR,
BLUE_COLOUR,
RED_COLOUR
]
puts "Vertex Data", #{@vertex_data}
#flatten out all the colours.
@vertex_data.flatten!
@index_data = [
0, 4, 1, #front
3, 4, 0, #left
2, 1, 4, #right
3, 2, 4, #rear
#bottom
0, 1, 3,
1, 2, 3
]
end
=begin
m00[0] m10[4] m20[8] m30[12]
m01[1] m11[5] m21[9] m31[13]
m02[2] m12[6] m22[10] m32[14]
m03[3] m13[7] m23[11] m33[15]
=end
# calculate the rotations
def calc_rotation
#translate in on the z axis
z = (@vertex_data[14])
sin_y = Math.sin(@y_rotation)
cos_y = Math.cos(@y_rotation)
sin_x = Math.sin(@x_rotation)
cos_x = Math.cos(@x_rotation)
# translate in, rotate x and y, then translate out.
@rotation_matrix.m00 = cos_y
@rotation_matrix.m20 = sin_y
@rotation_matrix.m30 = -sin_y * z
@rotation_matrix.m01 = sin_x * sin_y
@rotation_matrix.m11 = cos_x
@rotation_matrix.m21 = (-sin_x) * cos_y
@rotation_matrix.m31 = sin_x * cos_y * z
@rotation_matrix.m02 = (-cos_x) * sin_y
@rotation_matrix.m12 = sin_x
@rotation_matrix.m22 = cos_x * cos_y
@rotation_matrix.m32 = z - (cos_x * cos_y * z)
@rotation_matrix.store(@rotation_buffer)
@rotation_buffer.flip
end
end
Aspect ratio correction.
java_import org.lwjgl.opengl.GL11
java_import org.lwjgl.opengl.GL15
java_import org.lwjgl.opengl.GL20
java_import org.lwjgl.opengl.GL30
java_import org.lwjgl.opengl.GL32
java_import org.lwjgl.BufferUtils
java_import org.lwjgl.input.Keyboard
java_import org.lwjgl.util.vector.Matrix4f
java_import org.lwjgl.util.vector.Vector3f
require "opengl/gl_utils"
require "pry"
#
# Attempting to do a pyramid that is interactive.
#
class OpenGL::InteractivePyramid
include OpenGL::GLUtils
add_start
#position constants
RIGHT_EXTENT = 0.5
LEFT_EXTENT = -RIGHT_EXTENT
TOP_EXTENT = 0.5
BOTTOM_EXTENT = -TOP_EXTENT
FRONT_EXTENT = -3.0
REAR_EXTENT = -4.0
#colour constants
GREEN_COLOUR = [0.75, 0.75, 1.0, 1.0]
BLUE_COLOUR = [0.0, 0.5, 0.0, 1.0]
RED_COLOUR = [1.0, 0.0, 0.0, 1.0]
GREY_COLOUR = [0.8, 0.8, 0.8, 1.0]
BROWN_COLOUR = [0.5, 0.5, 0.0, 1.0]
# Constructor
def initialize
init_vertex_data
create_display("Interactive Pyramid");
#initialise the viewport
GL11.gl_viewport(0, 0, Display.width, Display.height)
init_program
init_vertex_buffer
init_vertex_array_objects
GL11.gl_enable(GL11::GL_CULL_FACE)
GL11.gl_cull_face(GL11::GL_BACK)
GL11.gl_front_face(GL11::GL_CW)
GL11.gl_enable(GL11::GL_DEPTH_TEST)
GL11.gl_depth_mask(true)
GL11.gl_depth_func(GL11::GL_LEQUAL)
GL11.gl_depth_range(0.0, 1.0)
@y_rotation = 0
@x_rotation = 0
@rotation_matrix = Matrix4f.new
@rotation_buffer = BufferUtils.create_float_buffer 16
render_loop do
input
display
end
destroy_display
end
#initialise the vertex buffer
def init_vertex_buffer
@vertex_buffer_id = GL15.gl_gen_buffers
GL15.gl_bind_buffer(GL15::GL_ARRAY_BUFFER, @vertex_buffer_id)
buffer = BufferUtils.create_float_buffer(@vertex_data.size).put(@vertex_data.to_java(:float)).flip
GL15.gl_buffer_data(GL15::GL_ARRAY_BUFFER, buffer, GL15::GL_STATIC_DRAW)
GL15.gl_bind_buffer(GL15::GL_ARRAY_BUFFER, 0)
@index_buffer_id = GL15.gl_gen_buffers
GL15.gl_bind_buffer(GL15::GL_ELEMENT_ARRAY_BUFFER, @index_buffer_id)
buffer = BufferUtils.create_short_buffer(@index_data.size).put(@index_data.to_java(:short)).flip
GL15.gl_buffer_data(GL15::GL_ELEMENT_ARRAY_BUFFER, buffer, GL15::GL_STATIC_DRAW)
GL15.gl_bind_buffer(GL15::GL_ELEMENT_ARRAY_BUFFER, 0)
end
# initialise the vertex array objects
def init_vertex_array_objects
#first object
@vao_id = GL30.gl_gen_vertex_arrays
GL30.gl_bind_vertex_array(@vao_id)
GL15.gl_bind_buffer(GL15::GL_ARRAY_BUFFER, @vertex_buffer_id)
GL20.gl_enable_vertex_attrib_array(0)
GL20.gl_enable_vertex_attrib_array(1)
GL20.gl_vertex_attrib_pointer(0, 3, GL11::GL_FLOAT, false, 0, 0)
GL20.gl_vertex_attrib_pointer(1, 4, GL11::GL_FLOAT, false, 0, 5 * 3 * FLOAT_SIZE)
GL15.gl_bind_buffer(GL15::GL_ELEMENT_ARRAY_BUFFER, @index_buffer_id)
GL30.gl_bind_vertex_array(0)
end
#
# Manage the input for this program
#
def input
@y_rotation -= 0.01 if Keyboard.is_key_down Keyboard::KEY_LEFT
@y_rotation += 0.01 if Keyboard.is_key_down Keyboard::KEY_RIGHT
@x_rotation -= 0.01 if Keyboard.is_key_down Keyboard::KEY_UP
@x_rotation += 0.01 if Keyboard.is_key_down Keyboard::KEY_DOWN
calc_rotation
end
# render a frame
def display
#set the colour to clear.
GL11.gl_clear_color(0.0, 0.0, 0.0, 0.0)
#clear the buffer. Remember that Java static types come back as Ruby Constants.
GL11.gl_clear(GL11::GL_COLOR_BUFFER_BIT | GL11::GL_DEPTH_BUFFER_BIT)
GL20.gl_use_program(@program_id)
GL20.gl_uniform_matrix4(@transform_matrix_location, false, @rotation_buffer)
GL30.gl_bind_vertex_array(@vao_id)
GL11.gl_draw_elements(GL11::GL_TRIANGLES, @index_data.size, GL11::GL_UNSIGNED_SHORT, 0)
#cleanup
GL30.gl_bind_vertex_array(0)
GL20.gl_use_program(0)
end
# initialise the program
def init_program
@program_id = compile_program('perspective_matrix_vertex_basic.glsl', 'colour_passthrough.glsl')
@perspective_matrix_location = GL20.gl_get_uniform_location(@program_id, "cameraToClipMatrix")
@transform_matrix_location = GL20.gl_get_uniform_location(@program_id, "modelToCameraMatrix")
#set up the perspective matrix
z_near = 1.0
z_far = 10.0
@frustrum_scale = calculate_frustum_scale(45.0)
perspective_matrix_buffer = BufferUtils.create_float_buffer(16);
perspective_matrix = Matrix4f.new
perspective_matrix.m00 = (@frustrum_scale / (Display.width.fdiv(Display.height))) #aspect ratio. use fdiv as height and width are ints.
perspective_matrix.m11 = @frustrum_scale
perspective_matrix.m22 = (z_far + z_near) / (z_near - z_far)
perspective_matrix.m32 = (2.0 * z_far * z_near) / (z_near - z_far)
perspective_matrix.m23 = -1.0
# make sure to make this 0, as this is an identity matrix to start.
perspective_matrix.m33 = 0.0
puts perspective_matrix.to_s
perspective_matrix.store(perspective_matrix_buffer)
GL20.gl_use_program(@program_id)
GL20.gl_uniform_matrix4(@perspective_matrix_location, false, perspective_matrix_buffer.flip)
GL20.gl_use_program(0)
end
# calculate the frustrum scale
# @param [Float] angle in degrees.
def calculate_frustum_scale(angle)
return (1.0 / Math.tan((angle * (Math::PI / 180)) / 2.0))
end
#initialise the vertex data
def init_vertex_data
@vertex_data = [
#pyramid positions
# -- bottom square
LEFT_EXTENT, BOTTOM_EXTENT, FRONT_EXTENT,
RIGHT_EXTENT, BOTTOM_EXTENT, FRONT_EXTENT,
RIGHT_EXTENT, BOTTOM_EXTENT, REAR_EXTENT,
LEFT_EXTENT, BOTTOM_EXTENT, REAR_EXTENT,
# -- top position
((LEFT_EXTENT + RIGHT_EXTENT)/2), TOP_EXTENT, ((FRONT_EXTENT + REAR_EXTENT)/2),
#Colours
RED_COLOUR,
GREEN_COLOUR,
RED_COLOUR,
BLUE_COLOUR,
RED_COLOUR
]
puts "Vertex Data", #{@vertex_data}
#flatten out all the colours.
@vertex_data.flatten!
@index_data = [
0, 4, 1, #front
3, 4, 0, #left
2, 1, 4, #right
3, 2, 4, #rear
#bottom
0, 1, 3,
1, 2, 3
]
end
=begin
m00[0] m10[4] m20[8] m30[12]
m01[1] m11[5] m21[9] m31[13]
m02[2] m12[6] m22[10] m32[14]
m03[3] m13[7] m23[11] m33[15]
=end
# calculate the rotations
def calc_rotation
#translate in on the z axis
z = (@vertex_data[14])
sin_y = Math.sin(@y_rotation)
cos_y = Math.cos(@y_rotation)
sin_x = Math.sin(@x_rotation)
cos_x = Math.cos(@x_rotation)
# translate in, rotate x and y, then translate out.
@rotation_matrix.m00 = cos_y
@rotation_matrix.m20 = sin_y
@rotation_matrix.m30 = -sin_y * z
@rotation_matrix.m01 = sin_x * sin_y
@rotation_matrix.m11 = cos_x
@rotation_matrix.m21 = (-sin_x) * cos_y
@rotation_matrix.m31 = sin_x * cos_y * z
@rotation_matrix.m02 = (-cos_x) * sin_y
@rotation_matrix.m12 = sin_x
@rotation_matrix.m22 = cos_x * cos_y
@rotation_matrix.m32 = z - (cos_x * cos_y * z)
@rotation_matrix.store(@rotation_buffer)
@rotation_buffer.flip
end
end |
# This file is copied to spec/ when you run 'rails generate rspec:install'
if ENV['coverage'] == 'on'
require 'simplecov'
SimpleCov.start 'rails' do
minimum_coverage 100
end
end
ENV["RAILS_ENV"] ||= 'test'
require File.expand_path("../../config/environment", __FILE__)
require 'rspec/rails'
require 'shoulda/matchers'
require "email_spec"
# Checks for pending migrations before tests are run.
# If you are not using ActiveRecord, you can remove this line.
ActiveRecord::Migration.maintain_test_schema!
# Requires supporting ruby files with custom matchers and macros, etc,
# in spec/support/ and its subdirectories.
Dir[Rails.root.join("spec/support/**/*.rb")].each {|f| require f}
RSpec.configure do |config|
# Remove this line if you're not using ActiveRecord or ActiveRecord fixtures
config.fixture_path = "#{::Rails.root}/spec/fixtures"
# If you're not using ActiveRecord, or you'd prefer not to run each of your
# examples within a transaction, remove the following line or assign false
# instead of true.
config.use_transactional_fixtures = true
# Use the new rspec expect syntax
config.expect_with :rspec do |c|
c.syntax = :expect
end
config.mock_with :rspec do |mocks|
mocks.syntax = :expect
mocks.verify_partial_doubles = true
end
config.order = "random"
config.render_views
config.include FactoryGirl::Syntax::Methods
config.include EmailSpec::Helpers
config.include EmailSpec::Matchers
config.filter_run focus: true
config.run_all_when_everything_filtered = true
# If true, the base class of anonymous controllers will be inferred
# automatically. This will be the default behavior in future versions of
# rspec-rails.
config.infer_base_class_for_anonymous_controllers = false
end
Add infer_spec_type_from_file_location to spec_helper
# This file is copied to spec/ when you run 'rails generate rspec:install'
if ENV['coverage'] == 'on'
require 'simplecov'
SimpleCov.start 'rails' do
minimum_coverage 100
end
end
ENV["RAILS_ENV"] ||= 'test'
require File.expand_path("../../config/environment", __FILE__)
require 'rspec/rails'
require 'shoulda/matchers'
require "email_spec"
# Checks for pending migrations before tests are run.
# If you are not using ActiveRecord, you can remove this line.
ActiveRecord::Migration.maintain_test_schema!
# Requires supporting ruby files with custom matchers and macros, etc,
# in spec/support/ and its subdirectories.
Dir[Rails.root.join("spec/support/**/*.rb")].each {|f| require f}
RSpec.configure do |config|
# Remove this line if you're not using ActiveRecord or ActiveRecord fixtures
config.fixture_path = "#{::Rails.root}/spec/fixtures"
# If you're not using ActiveRecord, or you'd prefer not to run each of your
# examples within a transaction, remove the following line or assign false
# instead of true.
config.use_transactional_fixtures = true
# Use the new rspec expect syntax
config.expect_with :rspec do |c|
c.syntax = :expect
end
config.mock_with :rspec do |mocks|
mocks.syntax = :expect
mocks.verify_partial_doubles = true
end
config.infer_spec_type_from_file_location!
config.order = "random"
config.render_views
config.include FactoryGirl::Syntax::Methods
config.include EmailSpec::Helpers
config.include EmailSpec::Matchers
config.filter_run focus: true
config.run_all_when_everything_filtered = true
# If true, the base class of anonymous controllers will be inferred
# automatically. This will be the default behavior in future versions of
# rspec-rails.
config.infer_base_class_for_anonymous_controllers = false
end
|
copy_static_file 'config/initializers/rack_timeout.rb'
git add: 'config/initializers/rack_timeout.rb'
git_commit 'Add Rack::Timeout configuration.'
Add on , to configure rack-timeout into application middleware.
rack_timeout = <<RACK_TIMEOUT
use Rack::Timeout
RACK_TIMEOUT
in_root do
inject_into_file 'config.ru', rack_timeout, {after: "require ::File.expand_path('../config/environment', __FILE__)", verbose: false}
end
git add: 'config.ru'
copy_static_file 'config/initializers/rack_timeout.rb'
git add: 'config/initializers/rack_timeout.rb'
git_commit 'Add Rack::Timeout configuration.'
|
module PaysonAPI
module Request
class Payment
attr_accessor :return_url, :cancel_url, :ipn_url, :memo, :sender, :receivers,
:locale, :currency, :tracking_id, :invoice_fee, :order_items, :fundings,
:fees_payer, :guarantee_offered
def initialize(return_url, cancel_url, ipn_url, memo, sender, receivers)
@return_url = return_url
@cancel_url = cancel_url
@ipn_url = ipn_url
@memo = memo
@sender = sender
@receivers = receivers
end
def to_hash
{}.tap do |hash|
# Append mandatory params
hash['returnUrl'] = @return_url
hash['cancelUrl'] = @cancel_url
hash['memo'] = @memo
hash.merge!(@sender.to_hash)
hash.merge!(Receiver.to_hash(@receivers))
# Append optional params
append_locale(hash, @locale) if @locale
append_currency(hash, @currency) if @currency
append_fees_payer(hash, @fees_payer) if @fees_payer
append_guarantee(hash, @guarantee_offered) if @guarantee_offered
hash.merge!(OrderItem.to_hash(@order_items)) if @order_items
hash.merge!(Funding.to_hash(@fundings)) if @fundings
hash['ipnNotificationUrl'] = @ipn_url if @ipn_url
hash['invoiceFee'] = @invoice_fee if @invoice_fee
hash['trackingId'] = @tracking_id if @tracking_id
end
end
private
def append_locale(hash, locale)
raise "Unknown locale: #{locale}" if !LOCALES.include?(locale)
hash['localeCode'] = locale
end
def append_currency(hash, currency)
raise "Unknown currency: #{currency}" if !CURRENCIES.include?(currency)
hash['currencyCode'] = currency
end
def append_guarantee(hash, guarantee_offered)
if !GUARANTEE_OFFERINGS.include?(guarantee_offered)
raise "Unknown guarantee offering: #{guarantee_offered}"
end
hash['guaranteeOffered'] = guarantee_offered
end
def append_fees_payer(hash, fees_payer)
if !FEES_PAYERS.include?(fees_payer)
raise "Unknown fees payer: #{fees_payer}"
end
hash['feesPayer'] = fees_payer if fees_payer
end
end
end
end
fees_payer is clearly set at this point, no need to check
module PaysonAPI
module Request
class Payment
attr_accessor :return_url, :cancel_url, :ipn_url, :memo, :sender, :receivers,
:locale, :currency, :tracking_id, :invoice_fee, :order_items, :fundings,
:fees_payer, :guarantee_offered
def initialize(return_url, cancel_url, ipn_url, memo, sender, receivers)
@return_url = return_url
@cancel_url = cancel_url
@ipn_url = ipn_url
@memo = memo
@sender = sender
@receivers = receivers
end
def to_hash
{}.tap do |hash|
# Append mandatory params
hash['returnUrl'] = @return_url
hash['cancelUrl'] = @cancel_url
hash['memo'] = @memo
hash.merge!(@sender.to_hash)
hash.merge!(Receiver.to_hash(@receivers))
# Append optional params
append_locale(hash, @locale) if @locale
append_currency(hash, @currency) if @currency
append_fees_payer(hash, @fees_payer) if @fees_payer
append_guarantee(hash, @guarantee_offered) if @guarantee_offered
hash.merge!(OrderItem.to_hash(@order_items)) if @order_items
hash.merge!(Funding.to_hash(@fundings)) if @fundings
hash['ipnNotificationUrl'] = @ipn_url if @ipn_url
hash['invoiceFee'] = @invoice_fee if @invoice_fee
hash['trackingId'] = @tracking_id if @tracking_id
end
end
private
def append_locale(hash, locale)
raise "Unknown locale: #{locale}" if !LOCALES.include?(locale)
hash['localeCode'] = locale
end
def append_currency(hash, currency)
raise "Unknown currency: #{currency}" if !CURRENCIES.include?(currency)
hash['currencyCode'] = currency
end
def append_guarantee(hash, guarantee_offered)
if !GUARANTEE_OFFERINGS.include?(guarantee_offered)
raise "Unknown guarantee offering: #{guarantee_offered}"
end
hash['guaranteeOffered'] = guarantee_offered
end
def append_fees_payer(hash, fees_payer)
if !FEES_PAYERS.include?(fees_payer)
raise "Unknown fees payer: #{fees_payer}"
end
hash['feesPayer'] = fees_payer
end
end
end
end
|
module PerconaMigrations
VERSION = "0.0.1"
end
bump version
module PerconaMigrations
VERSION = "0.0.2"
end
|
require "pincers/support/cookie"
module Pincers::Support
class CookieJar
BAD_VALUE_CHARS = /([\x00-\x20\x7F",;\\])/ # RFC 6265 - 4.1.1
attr_reader :cookies
def initialize(_cookies=nil)
@cookies = _cookies || []
end
def copy
self.class.new @cookies.clone
end
def get(_url, _name)
for_origin(URI.parse(_url)).find { |c| c.name == _name }
end
def set(_parts)
raise ArgumentError, "domain is required" unless _parts[:domain]
raise ArgumentError, "name is required" unless _parts[:name]
raise ArgumentError, "value is required" unless _parts[:value]
cookie = Cookie.new(
_parts[:name],
_parts[:value],
_parts[:domain].gsub(/^\./,''),
_parts[:path] || '/',
_parts[:expires],
_parts[:secure]
)
replace_cookie cookie
self
end
def set_raw(_uri, _raw)
cookie = decode_cookie _raw
cookie.domain = compute_domain cookie.domain, _uri.host
cookie.path = compute_path cookie.path, _uri.path
return nil if cookie.domain.nil? or cookie.path.nil?
replace_cookie(cookie)
cookie
end
def for_origin(_uri)
@cookies.select do |c|
domains_match c.domain, _uri.host and paths_match c.path, _uri.path
end
end
def for_origin_as_header(_uri)
for_origin(_uri).map { |c| "#{c.name}=#{quote(c.value)}" }.join('; ')
end
private
def decode_cookie(_raw)
# taken from WEBrick implementation
cookie_elem = _raw.split(/;/)
first_elem = cookie_elem.shift
first_elem.strip!
key, value = first_elem.split(/\=/, 2)
cookie = Cookie.new(key, dequote(value))
cookie_elem.each do |pair|
pair.strip!
key, value = pair.split(/\=/, 2)
value = dequote(value.strip) if value
case key.downcase
when "domain" then cookie.domain = value.downcase
when "path" then cookie.path = value.downcase
when "expires" then cookie.expires = value
# when "max-age" then cookie.max_age = Integer(value)
# when "comment" then cookie.comment = value
# when "version" then cookie.version = Integer(value)
when "secure" then cookie.secure = true
end
end
cookie
end
def domains_match(_cookie_domain, _request_domain)
# RFC 6265 - 5.1.3
# TODO: ensure request domain is not an IP
return true if _cookie_domain == _request_domain
return true if _request_domain.end_with? ".#{_cookie_domain}"
return false
end
def paths_match(_cookie_path, _request_path)
# RFC 6265 - 5.1.4
_request_path = '/' if _request_path.empty?
return true if _cookie_path == _request_path
return true if _cookie_path[-1] == '/' and _request_path.start_with? _cookie_path
return true if _request_path.start_with? "#{_cookie_path}/"
return false
end
def compute_domain(_cookie_domain, _request_domain)
return _request_domain if _cookie_domain.nil?
# cookies with different domain are discarded
return nil unless _cookie_domain.end_with? _request_domain
return _cookie_domain.gsub(/^\./,'') # remove leading dot
end
def compute_path(_cookie_path, _request_path)
default_path = compute_default_path(_request_path)
return default_path if _cookie_path.nil?
return nil unless _cookie_path.start_with? default_path
return _cookie_path
end
def compute_default_path(_request_path)
# RFC 6265 - 5.1.4
return '/' unless _request_path[0] === '/'
ls_idx = _request_path.rindex('/')
return '/' unless ls_idx > 0
_request_path[0..ls_idx]
end
def replace_cookie(_cookie)
@cookies.each_with_index do |cookie, i|
if equivalent(cookie, _cookie)
@cookies[i] = _cookie
return
end
end
@cookies << _cookie
end
def dequote(_str)
# taken from WEBrick implementation
ret = (/\A"(.*)"\Z/ =~ _str) ? $1 : _str.dup
ret.gsub!(/\\(.)/, "\\1")
ret
end
def quote(_str)
# taken from WEBrick implementation and the http-cookie gem
return _str unless BAD_VALUE_CHARS === _str
'"' << _str.gsub(/[\\\"]/o, "\\\1") << '"'
end
def equivalent(_cookie_a, _cookie_b)
return false unless _cookie_a.domain == _cookie_b.domain
return false unless _cookie_a.path == _cookie_b.path
return false unless _cookie_a.name == _cookie_b.name
return true
end
end
end
refactor(cookie_jar): makes set return nil on failure and cookie on success
require "pincers/support/cookie"
module Pincers::Support
class CookieJar
BAD_VALUE_CHARS = /([\x00-\x20\x7F",;\\])/ # RFC 6265 - 4.1.1
attr_reader :cookies
def initialize(_cookies=nil)
@cookies = _cookies || []
end
def copy
self.class.new @cookies.clone
end
def get(_url, _name)
for_origin(URI.parse(_url)).find { |c| c.name == _name }
end
def set(_parts)
return nil unless _parts[:domain]
return nil unless _parts[:name]
return nil unless _parts[:value]
cookie = Cookie.new(
_parts[:name],
_parts[:value],
_parts[:domain].gsub(/^\./,''),
_parts[:path] || '/',
_parts[:expires],
_parts[:secure]
)
replace_cookie cookie
cookie
end
def set_raw(_uri, _raw)
cookie = decode_cookie _raw
cookie.domain = compute_domain cookie.domain, _uri.host
cookie.path = compute_path cookie.path, _uri.path
return nil if cookie.domain.nil? or cookie.path.nil?
replace_cookie(cookie)
cookie
end
def for_origin(_uri)
@cookies.select do |c|
domains_match c.domain, _uri.host and paths_match c.path, _uri.path
end
end
def for_origin_as_header(_uri)
for_origin(_uri).map { |c| "#{c.name}=#{quote(c.value)}" }.join('; ')
end
private
def decode_cookie(_raw)
# taken from WEBrick implementation
cookie_elem = _raw.split(/;/)
first_elem = cookie_elem.shift
first_elem.strip!
key, value = first_elem.split(/\=/, 2)
cookie = Cookie.new(key, dequote(value))
cookie_elem.each do |pair|
pair.strip!
key, value = pair.split(/\=/, 2)
value = dequote(value.strip) if value
case key.downcase
when "domain" then cookie.domain = value.downcase
when "path" then cookie.path = value.downcase
when "expires" then cookie.expires = value
# when "max-age" then cookie.max_age = Integer(value)
# when "comment" then cookie.comment = value
# when "version" then cookie.version = Integer(value)
when "secure" then cookie.secure = true
end
end
cookie
end
def domains_match(_cookie_domain, _request_domain)
# RFC 6265 - 5.1.3
# TODO: ensure request domain is not an IP
return true if _cookie_domain == _request_domain
return true if _request_domain.end_with? ".#{_cookie_domain}"
return false
end
def paths_match(_cookie_path, _request_path)
# RFC 6265 - 5.1.4
_request_path = '/' if _request_path.empty?
return true if _cookie_path == _request_path
return true if _cookie_path[-1] == '/' and _request_path.start_with? _cookie_path
return true if _request_path.start_with? "#{_cookie_path}/"
return false
end
def compute_domain(_cookie_domain, _request_domain)
return _request_domain if _cookie_domain.nil?
# cookies with different domain are discarded
return nil unless _cookie_domain.end_with? _request_domain
return _cookie_domain.gsub(/^\./,'') # remove leading dot
end
def compute_path(_cookie_path, _request_path)
default_path = compute_default_path(_request_path)
return default_path if _cookie_path.nil?
return nil unless _cookie_path.start_with? default_path
return _cookie_path
end
def compute_default_path(_request_path)
# RFC 6265 - 5.1.4
return '/' unless _request_path[0] === '/'
ls_idx = _request_path.rindex('/')
return '/' unless ls_idx > 0
_request_path[0..ls_idx]
end
def replace_cookie(_cookie)
@cookies.each_with_index do |cookie, i|
if equivalent(cookie, _cookie)
@cookies[i] = _cookie
return
end
end
@cookies << _cookie
end
def dequote(_str)
# taken from WEBrick implementation
ret = (/\A"(.*)"\Z/ =~ _str) ? $1 : _str.dup
ret.gsub!(/\\(.)/, "\\1")
ret
end
def quote(_str)
# taken from WEBrick implementation and the http-cookie gem
return _str unless BAD_VALUE_CHARS === _str
'"' << _str.gsub(/[\\\"]/o, "\\\1") << '"'
end
def equivalent(_cookie_a, _cookie_b)
return false unless _cookie_a.domain == _cookie_b.domain
return false unless _cookie_a.path == _cookie_b.path
return false unless _cookie_a.name == _cookie_b.name
return true
end
end
end |
# @see https://developers.podio.com/doc/files
class Podio::ExternalFile < ActivePodio::Base
property :external_file_id, :string
property :name, :string
property :mimetype, :string
property :created_on, :datetime
property :updated_on, :datetime
alias_method :id, :external_file_id
class << self
def find_all_for_linked_account(linked_account_id, options={})
list Podio.connection.get { |req|
req.url("/file/linked_account/#{linked_account_id}/", options)
}.body
end
def create_from_external_file_id(linked_account_id, external_file_id, preserve_permissions=false)
response = Podio.client.connection.post do |req|
req.url "/file/linked_account/#{linked_account_id}/"
req.body = {
:external_file_id => external_file_id,
:preserve_permissions => preserve_permissions
}
end
member response.body
end
end
end
Add options to create_from_external_file_id
# @see https://developers.podio.com/doc/files
class Podio::ExternalFile < ActivePodio::Base
property :external_file_id, :string
property :name, :string
property :mimetype, :string
property :created_on, :datetime
property :updated_on, :datetime
alias_method :id, :external_file_id
class << self
def find_all_for_linked_account(linked_account_id, options={})
list Podio.connection.get { |req|
req.url("/file/linked_account/#{linked_account_id}/", options)
}.body
end
def create_from_external_file_id(linked_account_id, external_file_id, preserve_permissions=false, options={})
response = Podio.client.connection.post do |req|
req.url("/file/linked_account/#{linked_account_id}/", options)
req.body = {
:external_file_id => external_file_id,
:preserve_permissions => preserve_permissions
}
end
member response.body
end
end
end
|
require 'project_compat'
module Acts
module Authorized
module PolicyBasedAuthorization
def self.included klass
klass.extend ClassMethods
klass.class_eval do
belongs_to :contributor, :polymorphic => true unless method_defined? :contributor
after_initialize :contributor_or_default_if_new
#checks a policy exists, and if missing resorts to using a private policy
after_initialize :policy_or_default_if_new
include ProjectCompat unless method_defined? :projects
belongs_to :policy, :required_access_to_owner => :manage, :autosave => true
end
end
module ClassMethods
end
def contributor_credited?
true
end
def private?
policy.private?
end
def public?
policy.public?
end
def default_policy
Policy.default
end
def policy_or_default
if self.policy.nil?
self.policy = default_policy
end
end
def policy_or_default_if_new
if self.new_record?
policy_or_default
end
end
def default_contributor
User.current_user
end
#when having a sharing_scope policy of Policy::ALL_SYSMO_USERS it is concidered to have advanced permissions if any of the permissions do not relate to the projects associated with the resource (ISA or Asset))
#this is a temporary work-around for the loss of the custom_permissions flag when defining a pre-canned permission of shared with sysmo, but editable/downloadable within mhy project
#other policy sharing scopes are simpler, and are concidered to have advanced permissions if there are more than zero permissions defined
def has_advanced_permissions?
if policy.sharing_scope==Policy::ALL_SYSMO_USERS
!(policy.permissions.collect{|p| p.contributor} - projects).empty?
else
policy.permissions.count > 0
end
end
def contributor_or_default_if_new
if self.new_record? && contributor.nil?
self.contributor = default_contributor
end
end
#contritutor or person who can manage the item and the item was published
def can_publish?
((Ability.new(User.current_user).can? :publish, self) && self.can_manage?) || self.contributor == User.current_user || try_block{self.contributor.user} == User.current_user || (self.can_manage? && self.policy.sharing_scope == Policy::EVERYONE) || Seek::Config.is_virtualliver
end
#use request_permission_summary to retrieve who can manage the item
def people_can_manage
contributor = self.contributor.kind_of?(Person) ? self.contributor : self.contributor.try(:person)
return [[contributor.id, "#{contributor.first_name} #{contributor.last_name}", Policy::MANAGING]] if policy.blank?
creators = is_downloadable? ? self.creators : []
asset_managers = projects.collect(&:asset_managers).flatten
grouped_people_by_access_type = policy.summarize_permissions creators,asset_managers, contributor
grouped_people_by_access_type[Policy::MANAGING]
end
AUTHORIZATION_ACTIONS.each do |action|
eval <<-END_EVAL
def can_#{action}? user = User.current_user
new_record? || Rails.cache.fetch(cache_keys) {((Authorization.is_authorized? "#{action}", nil, self, user) || (Ability.new(user).can? "#{action}".to_sym, self) || (Ability.new(user).can? "#{action}_asset".to_sym, self)) ? :true : :false} == :true
end
END_EVAL
end
def cache_keys user, action
cache_keys = []
person = user.try(:person)
#action
cache_keys << ["can_#{action}?"]
#item (to invalidate when contributor is changed)
cache_keys << self.cache_key
#item creators (to invalidate when creators are changed)
if self.respond_to? :assets_creators
cache_keys |= self.assets_creators.sort.collect(&:cache_key)
end
#person to be authorized
cache_keys << person.try(:cache_key)
#policy
cache_keys << policy.cache_key
#permissions
cache_keys |= policy.permissions.sort.collect(&:cache_key)
#group_memberships + favourite_group_memberships
unless person.nil?
cache_keys |= person.group_memberships.sort.collect(&:cache_key)
cache_keys |= person.favourite_group_memberships.sort.collect(&:cache_key)
end
cache_keys
end
#returns a list of the people that can manage this file
#which will be the contributor, and those that have manage permissions
def managers
#FIXME: how to handle projects as contributors - return all people or just specific people (pals or other role)?
people=[]
unless self.contributor.nil?
people << self.contributor.person if self.contributor.kind_of?(User)
people << self.contributor if self.contributor.kind_of?(Person)
end
self.policy.permissions.each do |perm|
unless perm.contributor.nil? || perm.access_type!=Policy::MANAGING
people << (perm.contributor) if perm.contributor.kind_of?(Person)
people << (perm.contributor.person) if perm.contributor.kind_of?(User)
end
end
people.uniq
end
end
end
end
forget to call cach_keys function, move cache_keys to the end
require 'project_compat'
module Acts
module Authorized
module PolicyBasedAuthorization
def self.included klass
klass.extend ClassMethods
klass.class_eval do
belongs_to :contributor, :polymorphic => true unless method_defined? :contributor
after_initialize :contributor_or_default_if_new
#checks a policy exists, and if missing resorts to using a private policy
after_initialize :policy_or_default_if_new
include ProjectCompat unless method_defined? :projects
belongs_to :policy, :required_access_to_owner => :manage, :autosave => true
end
end
module ClassMethods
end
def contributor_credited?
true
end
def private?
policy.private?
end
def public?
policy.public?
end
def default_policy
Policy.default
end
def policy_or_default
if self.policy.nil?
self.policy = default_policy
end
end
def policy_or_default_if_new
if self.new_record?
policy_or_default
end
end
def default_contributor
User.current_user
end
#when having a sharing_scope policy of Policy::ALL_SYSMO_USERS it is concidered to have advanced permissions if any of the permissions do not relate to the projects associated with the resource (ISA or Asset))
#this is a temporary work-around for the loss of the custom_permissions flag when defining a pre-canned permission of shared with sysmo, but editable/downloadable within mhy project
#other policy sharing scopes are simpler, and are concidered to have advanced permissions if there are more than zero permissions defined
def has_advanced_permissions?
if policy.sharing_scope==Policy::ALL_SYSMO_USERS
!(policy.permissions.collect{|p| p.contributor} - projects).empty?
else
policy.permissions.count > 0
end
end
def contributor_or_default_if_new
if self.new_record? && contributor.nil?
self.contributor = default_contributor
end
end
#contritutor or person who can manage the item and the item was published
def can_publish?
((Ability.new(User.current_user).can? :publish, self) && self.can_manage?) || self.contributor == User.current_user || try_block{self.contributor.user} == User.current_user || (self.can_manage? && self.policy.sharing_scope == Policy::EVERYONE) || Seek::Config.is_virtualliver
end
#use request_permission_summary to retrieve who can manage the item
def people_can_manage
contributor = self.contributor.kind_of?(Person) ? self.contributor : self.contributor.try(:person)
return [[contributor.id, "#{contributor.first_name} #{contributor.last_name}", Policy::MANAGING]] if policy.blank?
creators = is_downloadable? ? self.creators : []
asset_managers = projects.collect(&:asset_managers).flatten
grouped_people_by_access_type = policy.summarize_permissions creators,asset_managers, contributor
grouped_people_by_access_type[Policy::MANAGING]
end
AUTHORIZATION_ACTIONS.each do |action|
eval <<-END_EVAL
def can_#{action}? user = User.current_user
cache_keys = cache_keys(user, "#{action}")
new_record? || Rails.cache.fetch(cache_keys) {((Authorization.is_authorized? "#{action}", nil, self, user) || (Ability.new(user).can? "#{action}".to_sym, self) || (Ability.new(user).can? "#{action}_asset".to_sym, self)) ? :true : :false} == :true
end
END_EVAL
end
#returns a list of the people that can manage this file
#which will be the contributor, and those that have manage permissions
def managers
#FIXME: how to handle projects as contributors - return all people or just specific people (pals or other role)?
people=[]
unless self.contributor.nil?
people << self.contributor.person if self.contributor.kind_of?(User)
people << self.contributor if self.contributor.kind_of?(Person)
end
self.policy.permissions.each do |perm|
unless perm.contributor.nil? || perm.access_type!=Policy::MANAGING
people << (perm.contributor) if perm.contributor.kind_of?(Person)
people << (perm.contributor.person) if perm.contributor.kind_of?(User)
end
end
people.uniq
end
def cache_keys user, action
cache_keys = []
person = user.try(:person)
#action
cache_keys << ["can_#{action}?"]
#item (to invalidate when contributor is changed)
cache_keys << self.cache_key
#item creators (to invalidate when creators are changed)
if self.respond_to? :assets_creators
cache_keys |= self.assets_creators.sort_by(&:id).collect(&:cache_key)
end
#person to be authorized
cache_keys << person.try(:cache_key)
#policy
cache_keys << policy.cache_key
#permissions
cache_keys |= policy.permissions.sort_by(&:id).collect(&:cache_key)
#group_memberships + favourite_group_memberships
unless person.nil?
cache_keys |= person.group_memberships.sort_by(&:id).collect(&:cache_key)
cache_keys |= person.favourite_group_memberships.sort_by(&:id).collect(&:cache_key)
end
cache_keys
end
end
end
end |
module Refinery
module Pages
class Engine < ::Rails::Engine
extend Refinery::Engine
isolate_namespace Refinery
engine_name :refinery_pages
config.autoload_paths += %W( #{config.root}/lib )
before_inclusion do
Refinery::Plugin.register do |plugin|
plugin.pathname = root
plugin.name = 'refinery_pages'
plugin.menu_match = %r{refinery/page(_part|s_dialog)?s(/preview)?$}
plugin.url = proc { Refinery::Core::Engine.routes.url_helpers.admin_pages_path }
end
::ApplicationController.send :helper, Refinery::Pages::ContentPagesHelper
Refinery::AdminController.send :helper, Refinery::Pages::ContentPagesHelper
end
after_inclusion do
Refinery.include_once(::ApplicationController, Refinery::Pages::InstanceMethods)
Refinery.include_once(Refinery::AdminController, Refinery::Pages::Admin::InstanceMethods)
end
initializer "refinery.pages append marketable routes", :after => :set_routes_reloader_hook do
append_marketable_routes if Refinery::Pages.marketable_urls
end
initializer "add marketable route parts to reserved words", :after => :set_routes_reloader_hook do
add_route_parts_as_reserved_words if Refinery::Pages.marketable_urls
end
config.to_prepare do
Rails.application.config.assets.precompile += %w(
speakingurl.js
)
end
config.after_initialize do
Refinery.register_extension(Refinery::Pages)
end
protected
def append_marketable_routes
Refinery::Core::Engine.routes.append do
get '*path', :to => 'pages#show', :as => :marketable_page
end
Rails.application.routes_reloader.reload!
end
# Add any parts of routes as reserved words.
def add_route_parts_as_reserved_words
ActiveSupport.on_load(:active_record) do
# do not add routes with :allow_slug => true
included_routes = Rails.application.routes.named_routes.reject{ |name, route| route.defaults[:allow_slug] }
route_paths = included_routes.map { |name, route| route.path.spec }
route_paths.reject! { |path| path.to_s =~ %r{^/(rails|refinery)}}
Refinery::Pages.friendly_id_reserved_words |= route_paths.map { |path|
path.to_s.gsub(%r{^/}, '').to_s.split('(').first.to_s.split(':').first.to_s.split('/')
}.flatten.reject { |w| w =~ %r{_|\.} }.uniq
end
end
end
end
end
Rails.application.routes.named_routes to_a
reject should be done on an Array
module Refinery
module Pages
class Engine < ::Rails::Engine
extend Refinery::Engine
isolate_namespace Refinery
engine_name :refinery_pages
config.autoload_paths += %W( #{config.root}/lib )
before_inclusion do
Refinery::Plugin.register do |plugin|
plugin.pathname = root
plugin.name = 'refinery_pages'
plugin.menu_match = %r{refinery/page(_part|s_dialog)?s(/preview)?$}
plugin.url = proc { Refinery::Core::Engine.routes.url_helpers.admin_pages_path }
end
::ApplicationController.send :helper, Refinery::Pages::ContentPagesHelper
Refinery::AdminController.send :helper, Refinery::Pages::ContentPagesHelper
end
after_inclusion do
Refinery.include_once(::ApplicationController, Refinery::Pages::InstanceMethods)
Refinery.include_once(Refinery::AdminController, Refinery::Pages::Admin::InstanceMethods)
end
initializer "refinery.pages append marketable routes", :after => :set_routes_reloader_hook do
append_marketable_routes if Refinery::Pages.marketable_urls
end
initializer "add marketable route parts to reserved words", :after => :set_routes_reloader_hook do
add_route_parts_as_reserved_words if Refinery::Pages.marketable_urls
end
config.to_prepare do
Rails.application.config.assets.precompile += %w(
speakingurl.js
)
end
config.after_initialize do
Refinery.register_extension(Refinery::Pages)
end
protected
def append_marketable_routes
Refinery::Core::Engine.routes.append do
get '*path', :to => 'pages#show', :as => :marketable_page
end
Rails.application.routes_reloader.reload!
end
# Add any parts of routes as reserved words.
def add_route_parts_as_reserved_words
ActiveSupport.on_load(:active_record) do
# do not add routes with :allow_slug => true
included_routes = Rails.application.routes.named_routes.to_a.reject{ |name, route| route.defaults[:allow_slug] }
route_paths = included_routes.map { |name, route| route.path.spec }
route_paths.reject! { |path| path.to_s =~ %r{^/(rails|refinery)}}
Refinery::Pages.friendly_id_reserved_words |= route_paths.map { |path|
path.to_s.gsub(%r{^/}, '').to_s.split('(').first.to_s.split(':').first.to_s.split('/')
}.flatten.reject { |w| w =~ %r{_|\.} }.uniq
end
end
end
end
end
|
module PomodoroBeeminder
VERSION = "0.0.1"
end
Bump to 0.0.2
module PomodoroBeeminder
VERSION = "0.0.2"
end
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.