CombinedText stringlengths 4 3.42M |
|---|
require File.expand_path('../boot', __FILE__)
require 'rails/all'
# Require the gems listed in Gemfile, including any gems
# you've limited to :test, :development, or :production.
Bundler.require(:default, Rails.env)
module PoemToday
class Application < Rails::Application
# Settings in config/environments/* take precedence over those specified here.
# Application configuration should go into files in config/initializers
# -- all .rb files in that directory are automatically loaded.
# Set Time.zone default to the specified zone and make Active Record auto-convert to this zone.
# Run "rake -D time" for a list of tasks for finding time zone names. Default is UTC.
config.time_zone = 'Eastern Time (US & Canada)'
# The default locale is :en and all translations from config/locales/*.rb,yml are auto loaded.
config.autoload_paths += Dir[Rails.root.join('app', 'models', '{**/}')]
# config.i18n.load_path += Dir[Rails.root.join('my', 'locales', '*.{rb,yml}').to_s]
# config.i18n.default_locale = :de
config.assets.paths << "#{Rails.root}/app/assets/fonts"
config.assets.precompile += %w( .svg .eot .woff .ttf )S
config.serve_static_assets = true
I18n.enforce_available_locales = false
end
end
revert
require File.expand_path('../boot', __FILE__)
require 'rails/all'
# Require the gems listed in Gemfile, including any gems
# you've limited to :test, :development, or :production.
Bundler.require(:default, Rails.env)
module PoemToday
class Application < Rails::Application
# Settings in config/environments/* take precedence over those specified here.
# Application configuration should go into files in config/initializers
# -- all .rb files in that directory are automatically loaded.
# Set Time.zone default to the specified zone and make Active Record auto-convert to this zone.
# Run "rake -D time" for a list of tasks for finding time zone names. Default is UTC.
config.time_zone = 'Eastern Time (US & Canada)'
# The default locale is :en and all translations from config/locales/*.rb,yml are auto loaded.
config.autoload_paths += Dir[Rails.root.join('app', 'models', '{**/}')]
# config.i18n.load_path += Dir[Rails.root.join('my', 'locales', '*.{rb,yml}').to_s]
# config.i18n.default_locale = :de
config.serve_static_assets = true
I18n.enforce_available_locales = false
end
end
|
require File.expand_path(File.dirname(__FILE__) + '/spec_helper')
AWS.config(:logger => Logger.new($stdout))
AWS.config(:log_level => :debug)
AWS.config(:http_wire_trace => true)
describe "Q3" do
before do
client.list_queues[:queue_urls].each do |queue_url|
client.delete_queue(:queue_url => queue_url)
end
end
after do
client.list_queues[:queue_urls].each do |queue_url|
client.delete_queue(:queue_url => queue_url)
end
end
context 'Actions' do
context 'Queue' do
context "CreateQueue" do
it 'should create queue' do
client.create_queue(:queue_name => 'myqueue001')
expect(client.list_queues[:queue_urls]).to include('http://localhost/*/myqueue001')
end
end
context "ListQueues" do
it 'should list queues' do
1.upto(5) do |i|
client.create_queue(:queue_name => "myqueue00#{i}")
end
expect(client.list_queues[:queue_urls]).to eq %w(
http://localhost/*/myqueue001
http://localhost/*/myqueue002
http://localhost/*/myqueue003
http://localhost/*/myqueue004
http://localhost/*/myqueue005
)
end
end
context "GetQueueUrl" do
it 'should get queue url' do
client.create_queue(queue_name: 'myqueue001')
res = client.get_queue_url(queue_name: 'myqueue001')
expect(res[:queue_url]).to eq('http://localhost/*/myqueue001')
end
it 'should raise error when non existent queue is specified' do
expect {
client.get_queue_url(queue_name: 'myqueue002')
}.to raise_error(AWS::SQS::Errors::NonExistentQueue)
end
end
context "GetQueueAttributes" do
it 'should get queue attributes' do
now = Time.now.to_i
client.create_queue(queue_name: 'myqueue001')
res = client.get_queue_attributes(queue_url: 'http://localhost/*/myqueue001')
expect(res[:attributes]["CreateTimestamp"].to_i).to be_within(5).of(now)
expect(res[:attributes]["LastModifiedTimestamp"].to_i).to be_within(5).of(now)
expect(res[:attributes]["VisibilityTimeout"]).to eq("30")
expect(res[:attributes]["MessageRetentionPeriod"]).to eq("345600")
expect(res[:attributes]["MaximumMessageSize"]).to eq("262144")
expect(res[:attributes]["DelaySeconds"]).to eq("0")
expect(res[:attributes]["ReceiveMessageWaitTimeSeconds"]).to eq("0")
expect(res[:attributes]["ApproximateNumberOfMessages"]).to eq("0")
expect(res[:attributes]["ApproximateNumberOfMessagesNotVisible"]).to eq("0")
expect(res[:attributes]["ApproximateNumberOfMessagesDelayed"]).to eq("0")
end
it 'should get precise ApproximateNumberOfMessages*' do
queue = q3.queues.create('myqueue001')
queue.send_message('hello', delay_seconds: 5)
queue.send_message('hello')
queue.send_message('hello')
queue.receive_message
res = client.get_queue_attributes(queue_url: 'http://localhost/*/myqueue001')
expect(res[:attributes]["ApproximateNumberOfMessages"]).to eq("1")
expect(res[:attributes]["ApproximateNumberOfMessagesNotVisible"]).to eq("1")
expect(res[:attributes]["ApproximateNumberOfMessagesDelayed"]).to eq("1")
end
end
context "SetQueueAttributes" do
it 'should set queue attributes' do
client.create_queue(queue_name: 'myqueue001')
client.set_queue_attributes(
queue_url: 'http://localhost/*/myqueue001',
attributes: {
'VisibilityTimeout' => '1',
'MessageRetentionPeriod' => '2',
'MaximumMessageSize' => '3',
'DelaySeconds' => '4',
'ReceiveMessageWaitTimeSeconds' => '5',
}
)
res = client.get_queue_attributes(queue_url: 'http://localhost/*/myqueue001')
expect(res[:attributes]["VisibilityTimeout"]).to eq("1")
expect(res[:attributes]["MessageRetentionPeriod"]).to eq("2")
expect(res[:attributes]["MaximumMessageSize"]).to eq("3")
expect(res[:attributes]["DelaySeconds"]).to eq("4")
expect(res[:attributes]["ReceiveMessageWaitTimeSeconds"]).to eq("5")
end
end
context "DeleteQueue" do
it 'should delete queue' do
client.create_queue(queue_name: 'myqueue001')
client.delete_queue(queue_url: 'http://localhost/*/myqueue001')
expect(client.list_queues[:queue_urls]).not_to include('http://localhost/*/myqueue001')
end
end
end
context 'Message' do
context "SendMessage" do
it 'should send message' do
client.create_queue(queue_name: 'myqueue001')
client.send_message(
queue_url: 'http://localhost/*/myqueue001',
message_body: 'hello'
)
res = client.receive_message(queue_url: 'http://localhost/*/myqueue001')
expect(res[:messages].first[:body]).to eq('hello')
end
end
context "ReceiveMessage" do
it 'should receive message' do
client.create_queue(queue_name: 'myqueue001')
client.send_message(
queue_url: 'http://localhost/*/myqueue001',
message_body: 'hello'
)
res = client.receive_message(queue_url: 'http://localhost/*/myqueue001')
expect(res[:messages].first[:body]).to eq('hello')
end
end
context "ChangeMessageVisibility" do
it 'should change message visibility' do
client.create_queue(queue_name: 'myqueue001')
client.send_message(
queue_url: 'http://localhost/*/myqueue001',
message_body: 'hello'
)
res = client.receive_message(queue_url: 'http://localhost/*/myqueue001')
receipt_handle = res[:messages].first[:receipt_handle]
client.change_message_visibility(
queue_url: 'http://localhost/*/myqueue001',
receipt_handle: receipt_handle,
visibility_timeout: 3,
)
sleep 5
res = client.receive_message(queue_url: 'http://localhost/*/myqueue001')
expect(res[:messages].first[:body]).to eq('hello')
end
end
context "DeleteMessage" do
it 'should delete message' do
client.create_queue(queue_name: 'myqueue001')
client.send_message(
queue_url: 'http://localhost/*/myqueue001',
message_body: 'hello'
)
res = client.receive_message(queue_url: 'http://localhost/*/myqueue001')
receipt_handle = res[:messages].first[:receipt_handle]
client.delete_message(
queue_url: 'http://localhost/*/myqueue001',
receipt_handle: receipt_handle
)
res = client.receive_message(queue_url: 'http://localhost/*/myqueue001')
expect(res[:messages]).to be_empty
end
end
end
end
context 'Features' do
context 'Message Extention Period' do
it 'with CreateQueue MessageExtentionPeriod' do
queue = q3.queues.create('myqueue001', :message_retention_period => 3)
queue.send_message('hello')
message = queue.receive_message
expect(message.body).to eq('hello')
queue.send_message('hello')
sleep 4
message = queue.receive_message
expect(message).to be_nil
end
it 'with SetQueueAttributes MessageExtentionPeriod' do
queue = q3.queues.create('myqueue001')
queue.message_retention_period = 3
queue.send_message('hello')
message = queue.receive_message
expect(message.body).to eq('hello')
queue.send_message('hello')
sleep 4
message = queue.receive_message
expect(message).to be_nil
end
end
context 'First In First Out' do
it 'First in first out' do
queue = q3.queues.create('myqueue001', :visibility_timeout => 3)
1.upto(10) do |i|
queue.send_message("hello #{i}")
end
1.upto(10) do |i|
message = queue.receive_message
expect(message.body).to eq("hello #{i}")
end
end
end
context 'Long Polling' do
end
context 'Visibility Timeout' do
it 'with CreateQueue VisibilityTimeout' do
queue = q3.queues.create('myqueue001', :visibility_timeout => 3)
queue.send_message('hello')
message = queue.receive_message
expect(message.body).to eq('hello')
message = queue.receive_message
expect(message).to be_nil
sleep 4
message = queue.receive_message
expect(message.body).to eq('hello')
end
it 'with SetQueueAttributes VisibilityTimeout' do
queue = q3.queues.create('myqueue001')
queue.visibility_timeout = 3
queue.send_message('hello')
message = queue.receive_message
expect(message.body).to eq('hello')
message = queue.receive_message
expect(message).to be_nil
sleep 4
message = queue.receive_message
expect(message.body).to eq('hello')
end
it 'with ReceiveMessage VisibilityTimeout' do
queue = q3.queues.create('myqueue001')
queue.send_message('hello')
message = queue.receive_message(visibility_timeout: 3)
expect(message.body).to eq('hello')
message = queue.receive_message
expect(message).to be_nil
sleep 4
message = queue.receive_message
expect(message.body).to eq('hello')
end
end
context 'Delayed Message' do
it 'with CreateQueue DelaySeconds' do
queue = q3.queues.create('myqueue001', :delay_seconds => 3)
queue.send_message('hello')
message = queue.receive_message
expect(message).to be_nil
sleep 4
message = queue.receive_message
expect(message.body).to eq('hello')
end
it 'with SendMessage DelaySeconds' do
queue = q3.queues.create('myqueue001')
queue.send_message('hello', :delay_seconds => 3)
message = queue.receive_message
expect(message).to be_nil
sleep 4
message = queue.receive_message
expect(message.body).to eq('hello')
end
end
end
end
add GetQueueAttributes test
require File.expand_path(File.dirname(__FILE__) + '/spec_helper')
AWS.config(:logger => Logger.new($stdout))
AWS.config(:log_level => :debug)
AWS.config(:http_wire_trace => true)
describe "Q3" do
before do
client.list_queues[:queue_urls].each do |queue_url|
client.delete_queue(:queue_url => queue_url)
end
end
after do
client.list_queues[:queue_urls].each do |queue_url|
client.delete_queue(:queue_url => queue_url)
end
end
context 'Actions' do
context 'Queue' do
context "CreateQueue" do
it 'should create queue' do
client.create_queue(:queue_name => 'myqueue001')
expect(client.list_queues[:queue_urls]).to include('http://localhost/*/myqueue001')
end
end
context "ListQueues" do
it 'should list queues' do
1.upto(5) do |i|
client.create_queue(:queue_name => "myqueue00#{i}")
end
expect(client.list_queues[:queue_urls]).to eq %w(
http://localhost/*/myqueue001
http://localhost/*/myqueue002
http://localhost/*/myqueue003
http://localhost/*/myqueue004
http://localhost/*/myqueue005
)
end
end
context "GetQueueUrl" do
it 'should get queue url' do
client.create_queue(queue_name: 'myqueue001')
res = client.get_queue_url(queue_name: 'myqueue001')
expect(res[:queue_url]).to eq('http://localhost/*/myqueue001')
end
it 'should raise error when non existent queue is specified' do
expect {
client.get_queue_url(queue_name: 'myqueue002')
}.to raise_error(AWS::SQS::Errors::NonExistentQueue)
end
end
context "GetQueueAttributes" do
it 'should get queue attributes' do
now = Time.now.to_i
client.create_queue(queue_name: 'myqueue001')
res = client.get_queue_attributes(queue_url: 'http://localhost/*/myqueue001')
expect(res[:attributes]["CreateTimestamp"].to_i).to be_within(5).of(now)
expect(res[:attributes]["LastModifiedTimestamp"].to_i).to be_within(5).of(now)
expect(res[:attributes]["VisibilityTimeout"]).to eq("30")
expect(res[:attributes]["MessageRetentionPeriod"]).to eq("345600")
expect(res[:attributes]["MaximumMessageSize"]).to eq("262144")
expect(res[:attributes]["DelaySeconds"]).to eq("0")
expect(res[:attributes]["ReceiveMessageWaitTimeSeconds"]).to eq("0")
expect(res[:attributes]["ApproximateNumberOfMessages"]).to eq("0")
expect(res[:attributes]["ApproximateNumberOfMessagesNotVisible"]).to eq("0")
expect(res[:attributes]["ApproximateNumberOfMessagesDelayed"]).to eq("0")
end
it 'should get updated LastModifiedTimestamp' do
past = Time.now.to_i
queue = q3.queues.create('myqueue001')
sleep 3
now = Time.now.to_i
queue.visibility_timeout = 1
expect(queue.last_modified_timestamp.to_i).not_to be_within(2).of(past)
expect(queue.last_modified_timestamp.to_i).to be_within(2).of(now)
end
it 'should get precise ApproximateNumberOfMessages*' do
queue = q3.queues.create('myqueue001')
queue.send_message('hello', delay_seconds: 5)
queue.send_message('hello')
queue.send_message('hello')
queue.receive_message
res = client.get_queue_attributes(queue_url: 'http://localhost/*/myqueue001')
expect(res[:attributes]["ApproximateNumberOfMessages"]).to eq("1")
expect(res[:attributes]["ApproximateNumberOfMessagesNotVisible"]).to eq("1")
expect(res[:attributes]["ApproximateNumberOfMessagesDelayed"]).to eq("1")
end
end
context "SetQueueAttributes" do
it 'should set queue attributes' do
client.create_queue(queue_name: 'myqueue001')
client.set_queue_attributes(
queue_url: 'http://localhost/*/myqueue001',
attributes: {
'VisibilityTimeout' => '1',
'MessageRetentionPeriod' => '2',
'MaximumMessageSize' => '3',
'DelaySeconds' => '4',
'ReceiveMessageWaitTimeSeconds' => '5',
}
)
res = client.get_queue_attributes(queue_url: 'http://localhost/*/myqueue001')
expect(res[:attributes]["VisibilityTimeout"]).to eq("1")
expect(res[:attributes]["MessageRetentionPeriod"]).to eq("2")
expect(res[:attributes]["MaximumMessageSize"]).to eq("3")
expect(res[:attributes]["DelaySeconds"]).to eq("4")
expect(res[:attributes]["ReceiveMessageWaitTimeSeconds"]).to eq("5")
end
end
context "DeleteQueue" do
it 'should delete queue' do
client.create_queue(queue_name: 'myqueue001')
client.delete_queue(queue_url: 'http://localhost/*/myqueue001')
expect(client.list_queues[:queue_urls]).not_to include('http://localhost/*/myqueue001')
end
end
end
context 'Message' do
context "SendMessage" do
it 'should send message' do
client.create_queue(queue_name: 'myqueue001')
client.send_message(
queue_url: 'http://localhost/*/myqueue001',
message_body: 'hello'
)
res = client.receive_message(queue_url: 'http://localhost/*/myqueue001')
expect(res[:messages].first[:body]).to eq('hello')
end
end
context "ReceiveMessage" do
it 'should receive message' do
client.create_queue(queue_name: 'myqueue001')
client.send_message(
queue_url: 'http://localhost/*/myqueue001',
message_body: 'hello'
)
res = client.receive_message(queue_url: 'http://localhost/*/myqueue001')
expect(res[:messages].first[:body]).to eq('hello')
end
end
context "ChangeMessageVisibility" do
it 'should change message visibility' do
client.create_queue(queue_name: 'myqueue001')
client.send_message(
queue_url: 'http://localhost/*/myqueue001',
message_body: 'hello'
)
res = client.receive_message(queue_url: 'http://localhost/*/myqueue001')
receipt_handle = res[:messages].first[:receipt_handle]
client.change_message_visibility(
queue_url: 'http://localhost/*/myqueue001',
receipt_handle: receipt_handle,
visibility_timeout: 3,
)
sleep 5
res = client.receive_message(queue_url: 'http://localhost/*/myqueue001')
expect(res[:messages].first[:body]).to eq('hello')
end
end
context "DeleteMessage" do
it 'should delete message' do
client.create_queue(queue_name: 'myqueue001')
client.send_message(
queue_url: 'http://localhost/*/myqueue001',
message_body: 'hello'
)
res = client.receive_message(queue_url: 'http://localhost/*/myqueue001')
receipt_handle = res[:messages].first[:receipt_handle]
client.delete_message(
queue_url: 'http://localhost/*/myqueue001',
receipt_handle: receipt_handle
)
res = client.receive_message(queue_url: 'http://localhost/*/myqueue001')
expect(res[:messages]).to be_empty
end
end
end
end
context 'Features' do
context 'Message Extention Period' do
it 'with CreateQueue MessageExtentionPeriod' do
queue = q3.queues.create('myqueue001', :message_retention_period => 3)
queue.send_message('hello')
message = queue.receive_message
expect(message.body).to eq('hello')
queue.send_message('hello')
sleep 4
message = queue.receive_message
expect(message).to be_nil
end
it 'with SetQueueAttributes MessageExtentionPeriod' do
queue = q3.queues.create('myqueue001')
queue.message_retention_period = 3
queue.send_message('hello')
message = queue.receive_message
expect(message.body).to eq('hello')
queue.send_message('hello')
sleep 4
message = queue.receive_message
expect(message).to be_nil
end
end
context 'First In First Out' do
it 'First in first out' do
queue = q3.queues.create('myqueue001', :visibility_timeout => 3)
1.upto(10) do |i|
queue.send_message("hello #{i}")
end
1.upto(10) do |i|
message = queue.receive_message
expect(message.body).to eq("hello #{i}")
end
end
end
context 'Long Polling' do
end
context 'Visibility Timeout' do
it 'with CreateQueue VisibilityTimeout' do
queue = q3.queues.create('myqueue001', :visibility_timeout => 3)
queue.send_message('hello')
message = queue.receive_message
expect(message.body).to eq('hello')
message = queue.receive_message
expect(message).to be_nil
sleep 4
message = queue.receive_message
expect(message.body).to eq('hello')
end
it 'with SetQueueAttributes VisibilityTimeout' do
queue = q3.queues.create('myqueue001')
queue.visibility_timeout = 3
queue.send_message('hello')
message = queue.receive_message
expect(message.body).to eq('hello')
message = queue.receive_message
expect(message).to be_nil
sleep 4
message = queue.receive_message
expect(message.body).to eq('hello')
end
it 'with ReceiveMessage VisibilityTimeout' do
queue = q3.queues.create('myqueue001')
queue.send_message('hello')
message = queue.receive_message(visibility_timeout: 3)
expect(message.body).to eq('hello')
message = queue.receive_message
expect(message).to be_nil
sleep 4
message = queue.receive_message
expect(message.body).to eq('hello')
end
end
context 'Delayed Message' do
it 'with CreateQueue DelaySeconds' do
queue = q3.queues.create('myqueue001', :delay_seconds => 3)
queue.send_message('hello')
message = queue.receive_message
expect(message).to be_nil
sleep 4
message = queue.receive_message
expect(message.body).to eq('hello')
end
it 'with SendMessage DelaySeconds' do
queue = q3.queues.create('myqueue001')
queue.send_message('hello', :delay_seconds => 3)
message = queue.receive_message
expect(message).to be_nil
sleep 4
message = queue.receive_message
expect(message.body).to eq('hello')
end
end
end
end
|
module Wovnrb
VERSION = "0.1.79"
end
up version
module Wovnrb
VERSION = "0.1.80"
end
|
require File.expand_path('../boot', __FILE__)
require 'rails/all'
# Require the gems listed in Gemfile, including any gems
# you've limited to :test, :development, or :production.
Bundler.require(*Rails.groups)
module UCUtrade
class Application < Rails::Application
# Settings in config/environments/* take precedence over those specified here.
# Application configuration should go into files in config/initializers
# -- all .rb files in that directory are automatically loaded.
# Set Time.zone default to the specified zone and make Active Record auto-convert to this zone.
# Run "rake -D time" for a list of tasks for finding time zone names. Default is UTC.
# config.time_zone = 'Central Time (US & Canada)'
# The default locale is :en and all translations from config/locales/*.rb,yml are auto loaded.
# config.i18n.load_path += Dir[Rails.root.join('my', 'locales', '*.{rb,yml}').to_s]
# config.i18n.default_locale = :de
# Do not swallow errors in after_commit/after_rollback callbacks.
config.active_record.raise_in_transactional_callbacks = true
end
end
debugging heroku
require File.expand_path('../boot', __FILE__)
require 'rails/all'
# Require the gems listed in Gemfile, including any gems
# you've limited to :test, :development, or :production.
Bundler.require(*Rails.groups)
module UCUtrade
class Application < Rails::Application
# Settings in config/environments/* take precedence over those specified here.
# Application configuration should go into files in config/initializers
# -- all .rb files in that directory are automatically loaded.
# Set Time.zone default to the specified zone and make Active Record auto-convert to this zone.
# Run "rake -D time" for a list of tasks for finding time zone names. Default is UTC.
# config.time_zone = 'Central Time (US & Canada)'
# The default locale is :en and all translations from config/locales/*.rb,yml are auto loaded.
# config.i18n.load_path += Dir[Rails.root.join('my', 'locales', '*.{rb,yml}').to_s]
# config.i18n.default_locale = :de
# Do not swallow errors in after_commit/after_rollback callbacks.
config.active_record.raise_in_transactional_callbacks = true
config.assets.initialize_on_precompile = false
end
end
|
#
# Cookbook Name:: audacity
# Recipe:: win
#
# Copyright 2013, Wesleyan University
#
# All rights reserved - Do Not Redistribute
#
# Install our base package
windows_package "Audacity" do
source "http://sw.wesleyan.edu/windows/audacity/audacity-win-2.0.5.exe"
checksum "88e63316304304c8e028062bb117025ce0da7a72e5032e9726e2a1ea3c2b961d"
version "2.0.5"
end
# Install the LAME library
windows_package "LAME for Audacity" do
source "http://sw.wesleyan.edu/windows/audacity/Lame_v3.99.3_for_Windows.exe"
checksum "90e9d629b4e528b77c6f51f185c3e81bb3a4eca7d6d53f049ed9ef7787464e8b"
version "3.99.3"
end
# Install the FFmpeg library
windows_package "FFmpeg for Audacity" do
source "http://sw.wesleyan.edu/windows/audacity/FFmpeg_v0.6.2_for_Audacity_on_Windows.exe"
checksum "96760833b848675460ae97cfed64ec1645643420ec386c7b92da556d91af79ef"
version "0.6.2"
end
I don't know what year it is.
#
# Cookbook Name:: audacity
# Recipe:: win
#
# Copyright 2014, Wesleyan University
#
# All rights reserved - Do Not Redistribute
#
# Install our base package
windows_package "Audacity" do
source "http://sw.wesleyan.edu/windows/audacity/audacity-win-2.0.5.exe"
checksum "88e63316304304c8e028062bb117025ce0da7a72e5032e9726e2a1ea3c2b961d"
version "2.0.5"
end
# Install the LAME library
windows_package "LAME for Audacity" do
source "http://sw.wesleyan.edu/windows/audacity/Lame_v3.99.3_for_Windows.exe"
checksum "90e9d629b4e528b77c6f51f185c3e81bb3a4eca7d6d53f049ed9ef7787464e8b"
version "3.99.3"
end
# Install the FFmpeg library
windows_package "FFmpeg for Audacity" do
source "http://sw.wesleyan.edu/windows/audacity/FFmpeg_v0.6.2_for_Audacity_on_Windows.exe"
checksum "96760833b848675460ae97cfed64ec1645643420ec386c7b92da556d91af79ef"
version "0.6.2"
end
|
class EventHub::Components::Logger
def self.stdout
logger = Logger.new(STDOUT)
logger.formatter = proc do |severity, time, progname, msg|
time_in_string = "#{time.strftime("%Y-%m-%d %H:%M:%S")}.#{"%04d" % (time.usec/100)}"
"#{time_in_string}: #{"%10s" % severity} - #{msg}\n"
end
logger
end
def self.logstash(processor_name, environment, host, port)
#configure logstash with custom fields
config = LogStashLogger.configure do |config|
config.customize_event do |event|
event["app_name"] = processor_name
event["env"] = environment
end
end
LogStashLogger.new([{type: :file, path: "log/#{processor_name}.log", sync: true}])
end
end
changed log path for ruby processors
changed path to logs/ruby instead of logs for better structure
class EventHub::Components::Logger
def self.stdout
logger = Logger.new(STDOUT)
logger.formatter = proc do |severity, time, progname, msg|
time_in_string = "#{time.strftime("%Y-%m-%d %H:%M:%S")}.#{"%04d" % (time.usec/100)}"
"#{time_in_string}: #{"%10s" % severity} - #{msg}\n"
end
logger
end
def self.logstash(processor_name, environment, host, port)
#configure logstash with custom fields
config = LogStashLogger.configure do |config|
config.customize_event do |event|
event["app_name"] = processor_name
event["env"] = environment
end
end
LogStashLogger.new([{type: :file, path: "logs/ruby/#{processor_name}.log", sync: true}])
end
end
|
module CountryCodes
@@codes = Hash.new
File.open('config/countries.tab').each do |record|
parts = record.split("\t")
@@codes[parts[0]] = parts[1].strip
end
#puts "countries = " + @@codes.to_s
def self.country(code)
@@codes[code]
end
def self.code(country)
c = nil
@@codes.each do |key, val|
if(country.downcase.strip == val.downcase)
c = key.downcase
break
end
end
return c
end
def self.valid_code?(code)
@@codes.key?(code)
end
end
be explicit about path to countries.tab data - required to get the tests passing in rubymine, but good practice anyway
module CountryCodes
@@codes = Hash.new
File.open(File.join(RAILS_ROOT, 'config', 'countries.tab')).each do |record|
parts = record.split("\t")
@@codes[parts[0]] = parts[1].strip
end
#puts "countries = " + @@codes.to_s
def self.country(code)
@@codes[code]
end
def self.code(country)
c = nil
@@codes.each do |key, val|
if(country.downcase.strip == val.downcase)
c = key.downcase
break
end
end
return c
end
def self.valid_code?(code)
@@codes.key?(code)
end
end |
require File.expand_path('../boot', __FILE__)
require 'rails/all'
# Require the gems listed in Gemfile, including any gems
# you've limited to :test, :development, or :production.
Bundler.require(:default, Rails.env)
module Mapotempo
class Application < Rails::Application
# Settings in config/environments/* take precedence over those specified here.
# Application configuration should go into files in config/initializers
# -- all .rb files in that directory are automatically loaded.
# Set Time.zone default to the specified zone and make Active Record auto-convert to this zone.
# Run "rake -D time" for a list of tasks for finding time zone names. Default is UTC.
# config.time_zone = 'Central Time (US & Canada)'
# The default locale is :en and all translations from config/locales/*.rb,yml are auto loaded.
# config.i18n.load_path += Dir[Rails.root.join('my', 'locales', '*.{rb,yml}').to_s]
# config.i18n.default_locale = :de
config.assets.initialize_on_precompile = true
# Application config
config.optimize_cache = ActiveSupport::Cache::FileStore.new(Dir.tmpdir, namespace: 'optimizer', expires_in: 60*60*24*10)
config.optimize_url = 'http://localhost:4567/0.1/optimize_tsptw'
config.geocode_cache = ActiveSupport::Cache::FileStore.new(Dir.tmpdir, namespace: 'geocode', expires_in: 60*60*24*10)
config.geocode_reverse_cache = ActiveSupport::Cache::FileStore.new(Dir.tmpdir, namespace: 'geocode_reverse', expires_in: 60*60*24*10)
config.geocode_complete_cache = ActiveSupport::Cache::FileStore.new(Dir.tmpdir, namespace: 'geocode_complete', expires_in: 60*60*24*10)
config.geocode_ign_referer = 'localhost'
config.geocode_ign_key = nil
config.geocode_complete = false # Build time setting
config.trace_cache_request = ActiveSupport::Cache::FileStore.new(Dir.tmpdir, namespace: 'trace_request', expires_in: 60*60*24*10)
config.trace_cache_result = ActiveSupport::Cache::FileStore.new(Dir.tmpdir, namespace: 'trace_result', expires_in: 60*60*24*10)
config.tomtom_api = 'https://soap.business.tomtom.com/v1.20'
config.delayed_job_use = false
config.self_care = true # Allow subscription and resiliation by the user himself
config.help_url = nil
end
end
ActionView::Base.field_error_proc = Proc.new do |html_tag, instance|
class_attr_index = html_tag.index 'class="'
if class_attr_index
html_tag.insert class_attr_index+7, 'ui-state-error '
else
html_tag.insert html_tag.index('>'), ' class="ui-state-error"'
end
end
module ActiveRecord
module Validations
class AssociatedBubblingValidator < ActiveModel::EachValidator
def validate_each(record, attribute, value)
(value.is_a?(Enumerable) || value.is_a?(ActiveRecord::Associations::CollectionProxy) ? value : [value]).each do |v|
unless v.valid?
v.errors.full_messages.each do |msg|
record.errors.add(attribute, msg, options.merge(:value => value))
end
end
end
end
end
module ClassMethods
def validates_associated_bubbling(*attr_names)
validates_with AssociatedBubblingValidator, _merge_attributes(attr_names)
end
end
end
end
Fix rails warning
require File.expand_path('../boot', __FILE__)
require 'rails/all'
# Require the gems listed in Gemfile, including any gems
# you've limited to :test, :development, or :production.
Bundler.require(:default, Rails.env)
module Mapotempo
class Application < Rails::Application
# Settings in config/environments/* take precedence over those specified here.
# Application configuration should go into files in config/initializers
# -- all .rb files in that directory are automatically loaded.
# Set Time.zone default to the specified zone and make Active Record auto-convert to this zone.
# Run "rake -D time" for a list of tasks for finding time zone names. Default is UTC.
# config.time_zone = 'Central Time (US & Canada)'
# The default locale is :en and all translations from config/locales/*.rb,yml are auto loaded.
# config.i18n.load_path += Dir[Rails.root.join('my', 'locales', '*.{rb,yml}').to_s]
# config.i18n.default_locale = :de
config.i18n.enforce_available_locales = true
I18n.config.enforce_available_locales = true
config.assets.initialize_on_precompile = true
# Application config
config.optimize_cache = ActiveSupport::Cache::FileStore.new(Dir.tmpdir, namespace: 'optimizer', expires_in: 60*60*24*10)
config.optimize_url = 'http://localhost:4567/0.1/optimize_tsptw'
config.geocode_cache = ActiveSupport::Cache::FileStore.new(Dir.tmpdir, namespace: 'geocode', expires_in: 60*60*24*10)
config.geocode_reverse_cache = ActiveSupport::Cache::FileStore.new(Dir.tmpdir, namespace: 'geocode_reverse', expires_in: 60*60*24*10)
config.geocode_complete_cache = ActiveSupport::Cache::FileStore.new(Dir.tmpdir, namespace: 'geocode_complete', expires_in: 60*60*24*10)
config.geocode_ign_referer = 'localhost'
config.geocode_ign_key = nil
config.geocode_complete = false # Build time setting
config.trace_cache_request = ActiveSupport::Cache::FileStore.new(Dir.tmpdir, namespace: 'trace_request', expires_in: 60*60*24*10)
config.trace_cache_result = ActiveSupport::Cache::FileStore.new(Dir.tmpdir, namespace: 'trace_result', expires_in: 60*60*24*10)
config.tomtom_api = 'https://soap.business.tomtom.com/v1.20'
config.delayed_job_use = false
config.self_care = true # Allow subscription and resiliation by the user himself
config.help_url = nil
end
end
ActionView::Base.field_error_proc = Proc.new do |html_tag, instance|
class_attr_index = html_tag.index 'class="'
if class_attr_index
html_tag.insert class_attr_index+7, 'ui-state-error '
else
html_tag.insert html_tag.index('>'), ' class="ui-state-error"'
end
end
module ActiveRecord
module Validations
class AssociatedBubblingValidator < ActiveModel::EachValidator
def validate_each(record, attribute, value)
(value.is_a?(Enumerable) || value.is_a?(ActiveRecord::Associations::CollectionProxy) ? value : [value]).each do |v|
unless v.valid?
v.errors.full_messages.each do |msg|
record.errors.add(attribute, msg, options.merge(:value => value))
end
end
end
end
end
module ClassMethods
def validates_associated_bubbling(*attr_names)
validates_with AssociatedBubblingValidator, _merge_attributes(attr_names)
end
end
end
end
|
name 'cloudstack-installer'
maintainer 'Opscode'
maintainer_email 'chirag@clogeny.com'
license 'All rights reserved'
description 'Installs/Configures Citrix CloudPlatform 3.0.6'
long_description IO.read(File.join(File.dirname(__FILE__), 'README.md'))
version '0.1.0'
recipe 'cs_installer:mgmt_server', "Setup the Management Server correctly"
recipe 'cs_installer:setup_zone', "Setup the CloudPlatform Environment(Zone)"
supports 'centos', '>= 6.2'
supports 'rhel', '>= 6.2'
depends 'selinux'
depends 'ntp'
depends 'mysql'
bump the patch version
name 'cloudstack-installer'
maintainer 'Opscode'
maintainer_email 'chirag@clogeny.com'
license 'All rights reserved'
description 'Installs/Configures Citrix CloudPlatform 3.0.6'
long_description IO.read(File.join(File.dirname(__FILE__), 'README.md'))
version '0.1.2'
recipe 'cs_installer:mgmt_server', "Setup the Management Server correctly"
recipe 'cs_installer:setup_zone', "Setup the CloudPlatform Environment(Zone)"
supports 'centos', '>= 6.2'
supports 'rhel', '>= 6.2'
depends 'selinux'
depends 'ntp'
depends 'mysql'
|
require 'xommelier'
require 'xommelier/common'
module Xommelier
module OPML
include Xommelier::Xml
# @!group Simple Types
class Category < Array
def self.from_xommelier(value)
new(value.split('/'))
end
def category?
[0] == nil
end
def tag?
!category?
end
def to_xommelier
join('/')
end
end
class CategoryArray < Array
def self.from_xommelier(value)
new(value.split(',').map { |category| Category.from_xommelier(category) })
end
def to_xommelier
map { |category| category.to_xommelier }.join(',')
end
end
# @!group Complex Types
class Element < Xml::Element
def self.element(name, options = {})
options[:as] ||= name.to_s.camelize(:lower)
super(name, options)
end
def self.attribute(name, options = {})
options[:as] ||= name.to_s.camelize(:lower)
super(name, options)
end
end
class Head < Element
may do
element :title
element :date_created, type: Common::Time822
element :date_modified, type: Common::Time822
element :owner_name
element :owner_email
element :owner_id
element :docs
#element :expansionState
#element :vertScrollState
#element :windowTop
#element :windowLeft
#element :windowBottom
#element :windowRight
end
end
class Outline < Element
attribute :text
may do
# Common
attribute :type
attribute :is_comment, type: Boolean, default: false
attribute :is_breakpoint, type: Boolean, default: false
attribute :created, type: Common::Time822
attribute :category, type: CategoryArray
# Subscriptions
attribute :xml_url
attribute :html_url
attribute :description
attribute :language
attribute :version
# Links
attribute :url
end
any do
element :outline, type: Outline
end
def each_outline(&block)
block.call(self)
outlines.each do |outline|
outline.each_outline(&block)
end
end
end
class Body < Element
many do
element :outline, type: Outline
end
def each_outline(&block)
outlines.each do |outline|
outline.each_outline(&block)
end
end
end
class Opml < Element
attribute :version, default: '2.0'
element :head, type: Head
element :body, type: Body
end
end
end
Fix CategoryArray importing in OPML
require 'xommelier'
require 'xommelier/common'
module Xommelier
module OPML
include Xommelier::Xml
# @!group Simple Types
class Category < Array
def self.from_xommelier(value)
new(value.split('/'))
end
def category?
[0] == nil
end
def tag?
!category?
end
def to_xommelier
join('/')
end
end
class CategoryArray < Array
def self.from_xommelier(value)
new(value.to_s.split(',').map { |category| Category.from_xommelier(category) })
end
def to_xommelier
map { |category| category.to_xommelier }.join(',')
end
end
# @!group Complex Types
class Element < Xml::Element
def self.element(name, options = {})
options[:as] ||= name.to_s.camelize(:lower)
super(name, options)
end
def self.attribute(name, options = {})
options[:as] ||= name.to_s.camelize(:lower)
super(name, options)
end
end
class Head < Element
may do
element :title
element :date_created, type: Common::Time822
element :date_modified, type: Common::Time822
element :owner_name
element :owner_email
element :owner_id
element :docs
#element :expansionState
#element :vertScrollState
#element :windowTop
#element :windowLeft
#element :windowBottom
#element :windowRight
end
end
class Outline < Element
attribute :text
may do
# Common
attribute :type
attribute :is_comment, type: Boolean, default: false
attribute :is_breakpoint, type: Boolean, default: false
attribute :created, type: Common::Time822
attribute :category, type: CategoryArray
# Subscriptions
attribute :xml_url
attribute :html_url
attribute :description
attribute :language
attribute :version
# Links
attribute :url
end
any do
element :outline, type: Outline
end
def each_outline(&block)
block.call(self)
outlines.each do |outline|
outline.each_outline(&block)
end
end
end
class Body < Element
many do
element :outline, type: Outline
end
def each_outline(&block)
outlines.each do |outline|
outline.each_outline(&block)
end
end
end
class Opml < Element
attribute :version, default: '2.0'
element :head, type: Head
element :body, type: Body
end
end
end
|
# frozen_string_literal: true
require_relative 'boot'
require 'rails/all'
# Require the gems listed in Gemfile, including any gems
# you've limited to :test, :development, or :production.
Bundler.require(*Rails.groups)
module Heliotrope
class Application < Rails::Application
# Initialize configuration defaults for originally generated Rails version.
config.load_defaults 5.1
config.generators do |g|
g.test_framework :rspec, spec: true
end
# Settings in config/environments/* take precedence over those specified here.
# Application configuration should go into files in config/initializers
# -- all .rb files in that directory are automatically loaded.
# Set Time.zone default to the specified zone and make Active Record auto-convert to this zone.
# Run "rake -D time" for a list of tasks for finding time zone names. Default is UTC.
# config.time_zone = 'Central Time (US & Canada)'
# The default locale is :en and all translations from config/locales/*.rb,yml are auto loaded.
# config.i18n.load_path += Dir[Rails.root.join('my', 'locales', '*.{rb,yml}').to_s]
# config.i18n.default_locale = :de
config.active_job.queue_adapter = :resque
# Add concerns to autoload paths
config.autoload_paths += %W[#{config.root}/app/presenters/concerns]
# Add lib directory to autoload paths
config.autoload_paths << "#{config.root}/lib"
config.autoload_paths << "#{config.root}/lib/devise"
# For properly generating URLs and minting DOIs - the app may not by default
# Outside of a request context the hostname needs to be provided.
config.hostname = Settings.host
# Set default host
Rails.application.routes.default_url_options[:host] = config.hostname
# Affirmative login means that we only log someone into the application
# when they actively initiate a login, even if they have an SSO session
# that we recognize and could log them in automatically.
#
# session[:log_me_in] flag is set true when login initiated by user
#
# See the KeycardAuthenticatable strategy for more detail.
# Auto login means that, if we ever access a protected resource (such that
# the Devise authenticate_user! filter is called), we will automatically
# sign the user into the application if they have an SSO session active.
#
# See the KeycardAuthenticatable strategy for more detail.
config.auto_login = Settings.auto_login && true
# Automatic account creation on login
# Should we create users automatically on first login?
# This supports convenient single sign-on account provisioning
#
# See the KeycardAuthenticatable strategy for more detail.
config.create_user_on_login = Settings.create_user_on_login && true
# HELIO-4075 Set java.io.tmpdir to application tmp
# Ensure tmp directories are defined (Cut and pasted from DBD October 14, 2021 then modified for Fulcrum)
verbose_init = false
if verbose_init
Rails.logger.info "ENV TMPDIR -- BEFORE -- Application Configuration"
Rails.logger.info "ENV['TMPDIR']=#{ENV['TMPDIR']}"
Rails.logger.info "ENV['_JAVA_OPTIONS']=#{ENV['_JAVA_OPTIONS']}"
Rails.logger.info "ENV['JAVA_OPTIONS']=#{ENV['JAVA_OPTIONS']}"
end
# Never use /tmp, always use ~/tmp, #627 and http://stackoverflow.com/a/17068331
tmpdir = Rails.root.join('tmp').to_s
ENV['TMPDIR'] = tmpdir
ENV['_JAVA_OPTIONS'] = "-Djava.io.tmpdir=#{tmpdir}" if ENV['_JAVA_OPTIONS'].blank?
ENV['JAVA_OPTIONS'] = "-Djava.io.tmpdir=#{tmpdir}" if ENV['JAVA_OPTIONS'].blank?
if verbose_init
Rails.logger.info "ENV TMPDIR -- AFTER -- Application Configuration"
Rails.logger.info "ENV['TMPDIR']=#{ENV['TMPDIR']}"
Rails.logger.info "ENV['_JAVA_OPTIONS']=#{ENV['_JAVA_OPTIONS']}"
Rails.logger.info "ENV['JAVA_OPTIONS']=#{ENV['JAVA_OPTIONS']}"
Rails.logger.info `echo $TMPDIR`.to_s
Rails.logger.info `echo $_JAVA_OPTIONS`.to_s
Rails.logger.info `echo $JAVA_OPTIONS`.to_s
end
# Set the epub engine for cozy-sun-bear
config.cozy_epub_engine = 'epubjs'
# See https://github.com/mlibrary/umrdr/commit/4aa4e63349d6f3aa51d76f07aa20faeae6712719
config.skylight.probes -= ['middleware']
# Prometheus monitoring, see HELIO-3388
ENV["PROMETHEUS_MONITORING_DIR"] = ENV["PROMETHEUS_MONITORING_DIR"] || Settings.prometheus_monitoring.dir || Rails.root.join("tmp", "prometheus").to_s
FileUtils.mkdir_p ENV.fetch("PROMETHEUS_MONITORING_DIR")
Prometheus::Client.config.data_store = Prometheus::Client::DataStores::DirectFileStore.new(dir: ENV.fetch("PROMETHEUS_MONITORING_DIR"))
# HELIO-4309
config.active_record.yaml_column_permitted_classes = [ActiveSupport::HashWithIndifferentAccess]
# Princesse de Cleves stuff
if ENV["CIRCLECI"].blank?
config.princesse_de_cleves_monograph_noid = ActiveFedora::SolrService.query("+isbn_numeric:9781643150383 AND +has_model_ssim:Monograph", rows: 1)&.first&.id
config.princesse_de_cleves_epub_noid = ActiveFedora::SolrService.query("+has_model_ssim:FileSet AND +monograph_id_ssim:#{Rails.configuration.princesse_de_cleves_monograph_noid} AND +label_tesim:epub", rows: 1)&.first&.id
end
config.to_prepare do
# ensure overrides are loaded
# see https://bibwild.wordpress.com/2016/12/27/a-class_eval-monkey-patching-pattern-with-prepend/
Dir.glob(Rails.root.join('app', '**', '*_override*.rb')) do |c|
Rails.configuration.cache_classes ? require(c) : load(c)
end
# Here we swap out some of the default actor stack, from Hyrax, located
# (in Hyrax itself) at app/services/default_middleware_stack.rb
#
# FIRST IN LINE
#
# Insert actor after obtaining lock so we are first in line!
Hyrax::CurationConcern.actor_factory.insert_after(Hyrax::Actors::OptimisticLockValidator, HeliotropeActor)
# Maybe register DOIs on update
Hyrax::CurationConcern.actor_factory.insert_after(HeliotropeActor, RegisterFileSetDoisActor)
# Heliotrope "importer" style CreateWithFilesActor
Hyrax::CurationConcern.actor_factory.insert_after(RegisterFileSetDoisActor, CreateWithImportFilesActor)
#
# LAST IN LINE
#
# Destroy FeaturedRepresentatives on delete
Hyrax::CurationConcern.actor_factory.insert_after(Hyrax::Actors::CleanupTrophiesActor, FeaturedRepresentativeActor)
end
end
end
don't look for PdC EPUB if its Monograph not found
# frozen_string_literal: true
require_relative 'boot'
require 'rails/all'
# Require the gems listed in Gemfile, including any gems
# you've limited to :test, :development, or :production.
Bundler.require(*Rails.groups)
module Heliotrope
class Application < Rails::Application
# Initialize configuration defaults for originally generated Rails version.
config.load_defaults 5.1
config.generators do |g|
g.test_framework :rspec, spec: true
end
# Settings in config/environments/* take precedence over those specified here.
# Application configuration should go into files in config/initializers
# -- all .rb files in that directory are automatically loaded.
# Set Time.zone default to the specified zone and make Active Record auto-convert to this zone.
# Run "rake -D time" for a list of tasks for finding time zone names. Default is UTC.
# config.time_zone = 'Central Time (US & Canada)'
# The default locale is :en and all translations from config/locales/*.rb,yml are auto loaded.
# config.i18n.load_path += Dir[Rails.root.join('my', 'locales', '*.{rb,yml}').to_s]
# config.i18n.default_locale = :de
config.active_job.queue_adapter = :resque
# Add concerns to autoload paths
config.autoload_paths += %W[#{config.root}/app/presenters/concerns]
# Add lib directory to autoload paths
config.autoload_paths << "#{config.root}/lib"
config.autoload_paths << "#{config.root}/lib/devise"
# For properly generating URLs and minting DOIs - the app may not by default
# Outside of a request context the hostname needs to be provided.
config.hostname = Settings.host
# Set default host
Rails.application.routes.default_url_options[:host] = config.hostname
# Affirmative login means that we only log someone into the application
# when they actively initiate a login, even if they have an SSO session
# that we recognize and could log them in automatically.
#
# session[:log_me_in] flag is set true when login initiated by user
#
# See the KeycardAuthenticatable strategy for more detail.
# Auto login means that, if we ever access a protected resource (such that
# the Devise authenticate_user! filter is called), we will automatically
# sign the user into the application if they have an SSO session active.
#
# See the KeycardAuthenticatable strategy for more detail.
config.auto_login = Settings.auto_login && true
# Automatic account creation on login
# Should we create users automatically on first login?
# This supports convenient single sign-on account provisioning
#
# See the KeycardAuthenticatable strategy for more detail.
config.create_user_on_login = Settings.create_user_on_login && true
# HELIO-4075 Set java.io.tmpdir to application tmp
# Ensure tmp directories are defined (Cut and pasted from DBD October 14, 2021 then modified for Fulcrum)
verbose_init = false
if verbose_init
Rails.logger.info "ENV TMPDIR -- BEFORE -- Application Configuration"
Rails.logger.info "ENV['TMPDIR']=#{ENV['TMPDIR']}"
Rails.logger.info "ENV['_JAVA_OPTIONS']=#{ENV['_JAVA_OPTIONS']}"
Rails.logger.info "ENV['JAVA_OPTIONS']=#{ENV['JAVA_OPTIONS']}"
end
# Never use /tmp, always use ~/tmp, #627 and http://stackoverflow.com/a/17068331
tmpdir = Rails.root.join('tmp').to_s
ENV['TMPDIR'] = tmpdir
ENV['_JAVA_OPTIONS'] = "-Djava.io.tmpdir=#{tmpdir}" if ENV['_JAVA_OPTIONS'].blank?
ENV['JAVA_OPTIONS'] = "-Djava.io.tmpdir=#{tmpdir}" if ENV['JAVA_OPTIONS'].blank?
if verbose_init
Rails.logger.info "ENV TMPDIR -- AFTER -- Application Configuration"
Rails.logger.info "ENV['TMPDIR']=#{ENV['TMPDIR']}"
Rails.logger.info "ENV['_JAVA_OPTIONS']=#{ENV['_JAVA_OPTIONS']}"
Rails.logger.info "ENV['JAVA_OPTIONS']=#{ENV['JAVA_OPTIONS']}"
Rails.logger.info `echo $TMPDIR`.to_s
Rails.logger.info `echo $_JAVA_OPTIONS`.to_s
Rails.logger.info `echo $JAVA_OPTIONS`.to_s
end
# Set the epub engine for cozy-sun-bear
config.cozy_epub_engine = 'epubjs'
# See https://github.com/mlibrary/umrdr/commit/4aa4e63349d6f3aa51d76f07aa20faeae6712719
config.skylight.probes -= ['middleware']
# Prometheus monitoring, see HELIO-3388
ENV["PROMETHEUS_MONITORING_DIR"] = ENV["PROMETHEUS_MONITORING_DIR"] || Settings.prometheus_monitoring.dir || Rails.root.join("tmp", "prometheus").to_s
FileUtils.mkdir_p ENV.fetch("PROMETHEUS_MONITORING_DIR")
Prometheus::Client.config.data_store = Prometheus::Client::DataStores::DirectFileStore.new(dir: ENV.fetch("PROMETHEUS_MONITORING_DIR"))
# HELIO-4309
config.active_record.yaml_column_permitted_classes = [ActiveSupport::HashWithIndifferentAccess]
# Princesse de Cleves stuff
if ENV["CIRCLECI"].blank?
config.princesse_de_cleves_monograph_noid = ActiveFedora::SolrService.query("+isbn_numeric:9781643150383 AND +has_model_ssim:Monograph", rows: 1)&.first&.id
if Rails.configuration.princesse_de_cleves_monograph_noid.present?
config.princesse_de_cleves_epub_noid = ActiveFedora::SolrService.query("+has_model_ssim:FileSet AND +monograph_id_ssim:#{Rails.configuration.princesse_de_cleves_monograph_noid} AND +label_tesim:epub", rows: 1)&.first&.id
else
config.princesse_de_cleves_epub_noid = nil
end
end
config.to_prepare do
# ensure overrides are loaded
# see https://bibwild.wordpress.com/2016/12/27/a-class_eval-monkey-patching-pattern-with-prepend/
Dir.glob(Rails.root.join('app', '**', '*_override*.rb')) do |c|
Rails.configuration.cache_classes ? require(c) : load(c)
end
# Here we swap out some of the default actor stack, from Hyrax, located
# (in Hyrax itself) at app/services/default_middleware_stack.rb
#
# FIRST IN LINE
#
# Insert actor after obtaining lock so we are first in line!
Hyrax::CurationConcern.actor_factory.insert_after(Hyrax::Actors::OptimisticLockValidator, HeliotropeActor)
# Maybe register DOIs on update
Hyrax::CurationConcern.actor_factory.insert_after(HeliotropeActor, RegisterFileSetDoisActor)
# Heliotrope "importer" style CreateWithFilesActor
Hyrax::CurationConcern.actor_factory.insert_after(RegisterFileSetDoisActor, CreateWithImportFilesActor)
#
# LAST IN LINE
#
# Destroy FeaturedRepresentatives on delete
Hyrax::CurationConcern.actor_factory.insert_after(Hyrax::Actors::CleanupTrophiesActor, FeaturedRepresentativeActor)
end
end
end
|
# Altai Private Cloud
# Copyright (C) GridDynamics Openstack Core Team, GridDynamics
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Lesser General Public License as published by
# the Free Software Foundation, either version 2.1 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Lesser General Public License for more details.
#
# You should have received a copy of the GNU General Public License
require "rubygems"
require "uuid"
log("Start to install nova")
log("Start to install nova-openstack")
%w( openstack-nova-essex-api
openstack-nova-essex-network
openstack-nova-essex-objectstore
openstack-nova-essex-scheduler
openstack-nova-essex-volume
openstack-nova-essex-xvpvncproxy
python-novaclient-essex ntp).each do |package_name|
package package_name
end
mysql_create_database "nova" do
user :nova
password node["mysql-nova-password"]
end
mysql_create_database "dns" do
user :dns
password node["mysql-dns-password"]
end
template "/etc/nova/nova.conf" do
source "nova/nova.conf.erb"
mode 00600
owner "nova"
group "nobody"
end
template "/etc/nova/api-paste.ini" do
source "nova/api-paste.ini.erb"
mode 00600
owner "nova"
group "nobody"
end
execute "db sync" do
command "nova-manage db sync"
end
%w(ntpd nova-api nova-network nova-scheduler nova-objectstore
nova-xvpvncproxy).each do |service|
service service do
action [:enable, :restart]
end
end
#try "set ip_forward" do
# code <<-EOH
# #FIXME - this doesn't work in Jenkins testbed
# #echo "net.ipv4.ip_forward = 1" > /etc/sysctl.d/ip_forward
# #sysctl -p
# EOH
#end
log("nova was succesfully installed")
install nova-networks-ext
# Altai Private Cloud
# Copyright (C) GridDynamics Openstack Core Team, GridDynamics
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Lesser General Public License as published by
# the Free Software Foundation, either version 2.1 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Lesser General Public License for more details.
#
# You should have received a copy of the GNU General Public License
require "rubygems"
require "uuid"
log("Start to install nova")
log("Start to install nova-openstack")
%w( openstack-nova-essex-api
openstack-nova-essex-network
openstack-nova-essex-objectstore
openstack-nova-essex-scheduler
openstack-nova-essex-volume
openstack-nova-essex-xvpvncproxy
python-novaclient-essex
nova-networks-ext
ntp).each do |package_name|
package package_name
end
mysql_create_database "nova" do
user :nova
password node["mysql-nova-password"]
end
mysql_create_database "dns" do
user :dns
password node["mysql-dns-password"]
end
template "/etc/nova/nova.conf" do
source "nova/nova.conf.erb"
mode 00600
owner "nova"
group "nobody"
end
template "/etc/nova/api-paste.ini" do
source "nova/api-paste.ini.erb"
mode 00600
owner "nova"
group "nobody"
end
execute "db sync" do
command "nova-manage db sync"
end
%w(ntpd nova-api nova-network nova-scheduler nova-objectstore
nova-xvpvncproxy).each do |service|
service service do
action [:enable, :restart]
end
end
#try "set ip_forward" do
# code <<-EOH
# #FIXME - this doesn't work in Jenkins testbed
# #echo "net.ipv4.ip_forward = 1" > /etc/sysctl.d/ip_forward
# #sysctl -p
# EOH
#end
log("nova was succesfully installed")
|
require 'cuffsert/cfarguments'
require 'cuffsert/cfstates'
require 'cuffsert/cli_args'
require 'cuffsert/metadata'
require 'cuffsert/presenters'
require 'cuffsert/rxcfclient'
require 'rx'
require 'uri'
# TODO:
# - Stop using file: that we anyway need to special-case in cfarguments
# - default value for meta.metadata when stack_path is local file
# - selector and metadata are mandatory and need guards accordingly
# - validate_and_urlify belongs in metadata.rb
# - execute should use helpers and not know details of statuses
module CuffSert
def self.validate_and_urlify(stack_path)
if stack_path =~ /^[A-Za-z0-9]+:/
stack_uri = URI.parse(stack_path)
else
normalized = File.expand_path(stack_path)
unless File.exist?(normalized)
raise "Local file #{normalized} does not exist"
end
stack_uri = URI.join('file:///', normalized)
end
unless ['s3', 'file'].include?(stack_uri.scheme)
raise "Uri #{stack_uri.scheme} is not supported"
end
stack_uri
end
def self.need_confirmation(meta, change_set)
return false if meta.dangerous_ok
change_set[:changes].any? do |change|
change[:action] == 'Delete' || (
change[:action] == 'Modify' &&
['True', 'Conditional'].include?(change[:replacement])
)
end
end
def self.execute(meta, client: RxCFClient.new)
sources = []
found = client.find_stack_blocking(meta)
if found && INPROGRESS_STATES.include?(found[:stack_status])
raise 'Stack operation already in progress'
end
if found && found[:stack_status] == 'ROLLBACK_COMPLETE'
cfargs = CuffSert.as_delete_stack_args(meta)
sources << client.delete_stack(cfargs)
found = nil
end
if found
cfargs = CuffSert.as_update_stack_args(meta)
sources << client.update_stack(cfargs)
else
cfargs = CuffSert.as_create_stack_args(meta)
sources << client.create_stack(cfargs)
end
Rx::Observable.concat(*sources)
end
def self.run(argv)
cli_args = CuffSert.parse_cli_args(argv)
meta = CuffSert.build_meta(cli_args)
if cli_args[:stack_path].nil? || cli_args[:stack_path].size != 1
raise 'Requires exactly one stack path'
end
stack_path = cli_args[:stack_path][0]
meta.stack_uri = CuffSert.validate_and_urlify(stack_path)
events = CuffSert.execute(meta)
RawPresenter.new(events)
end
end
Refactor execute for clearer flow; changeset-based update will soon complicate things
require 'cuffsert/cfarguments'
require 'cuffsert/cfstates'
require 'cuffsert/cli_args'
require 'cuffsert/metadata'
require 'cuffsert/presenters'
require 'cuffsert/rxcfclient'
require 'rx'
require 'uri'
# TODO:
# - Stop using file: that we anyway need to special-case in cfarguments
# - default value for meta.metadata when stack_path is local file
# - selector and metadata are mandatory and need guards accordingly
# - validate_and_urlify belongs in metadata.rb
# - execute should use helpers and not know details of statuses
module CuffSert
def self.validate_and_urlify(stack_path)
if stack_path =~ /^[A-Za-z0-9]+:/
stack_uri = URI.parse(stack_path)
else
normalized = File.expand_path(stack_path)
unless File.exist?(normalized)
raise "Local file #{normalized} does not exist"
end
stack_uri = URI.join('file:///', normalized)
end
unless ['s3', 'file'].include?(stack_uri.scheme)
raise "Uri #{stack_uri.scheme} is not supported"
end
stack_uri
end
def self.need_confirmation(meta, change_set)
return false if meta.dangerous_ok
change_set[:changes].any? do |change|
change[:action] == 'Delete' || (
change[:action] == 'Modify' &&
['True', 'Conditional'].include?(change[:replacement])
)
end
end
def self.create_stack(client, meta)
cfargs = CuffSert.as_create_stack_args(meta)
client.create_stack(cfargs)
end
def self.update_stack(client, meta)
cfargs = CuffSert.as_update_stack_args(meta)
client.update_stack(cfargs)
end
def self.delete_stack(client, meta)
cfargs = CuffSert.as_delete_stack_args(meta)
client.delete_stack(cfargs)
end
def self.execute(meta, client: RxCFClient.new)
sources = []
found = client.find_stack_blocking(meta)
if found && INPROGRESS_STATES.include?(found[:stack_status])
raise 'Stack operation already in progress'
end
if found.nil?
sources << self.create_stack(client, meta)
elsif found[:stack_status] == 'ROLLBACK_COMPLETE'
sources << self.delete_stack(client, meta)
sources << self.create_stack(client, meta)
else
sources << self.update_stack(client, meta)
end
Rx::Observable.concat(*sources)
end
def self.run(argv)
cli_args = CuffSert.parse_cli_args(argv)
meta = CuffSert.build_meta(cli_args)
if cli_args[:stack_path].nil? || cli_args[:stack_path].size != 1
raise 'Requires exactly one stack path'
end
stack_path = cli_args[:stack_path][0]
meta.stack_uri = CuffSert.validate_and_urlify(stack_path)
events = CuffSert.execute(meta)
RawPresenter.new(events)
end
end
|
require File.expand_path('../boot', __FILE__)
require 'rails/all'
if defined?(Bundler)
# If you precompile assets before deploying to production, use this line
Bundler.require(*Rails.groups(:assets => %w(development test)))
# If you want your assets lazily compiled in production, use this line
# Bundler.require(:default, :assets, Rails.env)
end
module Kozuchi
class Application < Rails::Application
# Settings in config/environments/* take precedence over those specified here.
# Application configuration should go into files in config/initializers
# -- all .rb files in that directory are automatically loaded.
# Custom directories with classes and modules you want to be autoloadable.
# config.autoload_paths += %W(#{config.root}/extras)
config.eager_load_paths += %W(#{config.root}/lib)
# Only load the plugins named here, in the order given (default is alphabetical).
# :all can be used as a placeholder for all plugins not explicitly named.
# config.plugins = [ :exception_notification, :ssl_requirement, :all ]
# Activate observers that should always be running.
config.active_record.observers = :user_observer
# Configure the default encoding used in templates for Ruby 1.9.
config.encoding = "utf-8"
config.action_controller.cache_store = :file_store, File.join(::Rails.root.to_s, "tmp/cache")
# Enable escaping HTML in JSON.
config.active_support.escape_html_entities_in_json = true
# Use SQL instead of Active Record's schema dumper when creating the database.
# This is necessary if your schema can't be completely dumped by the schema dumper,
# like if you have constraints or database-specific column types
# BINARY属性を文字列カラムに指定している部分があるため
config.active_record.schema_format = :sql
# Enable the asset pipeline
config.assets.enabled = true
# Version of your assets, change this if you want to expire all your assets
config.assets.version = '1.0'
config.assets.precompile += ['all_media.css', 'print.css']
end
class << self
attr_accessor :news_updated_on, :personal_info_policy_updated_on, :privacy_policy_updated_on
end
end
Remove config.encoding
require File.expand_path('../boot', __FILE__)
require 'rails/all'
if defined?(Bundler)
# If you precompile assets before deploying to production, use this line
Bundler.require(*Rails.groups(:assets => %w(development test)))
# If you want your assets lazily compiled in production, use this line
# Bundler.require(:default, :assets, Rails.env)
end
module Kozuchi
class Application < Rails::Application
# Settings in config/environments/* take precedence over those specified here.
# Application configuration should go into files in config/initializers
# -- all .rb files in that directory are automatically loaded.
# Custom directories with classes and modules you want to be autoloadable.
# config.autoload_paths += %W(#{config.root}/extras)
config.eager_load_paths += %W(#{config.root}/lib)
# Only load the plugins named here, in the order given (default is alphabetical).
# :all can be used as a placeholder for all plugins not explicitly named.
# config.plugins = [ :exception_notification, :ssl_requirement, :all ]
# Activate observers that should always be running.
config.active_record.observers = :user_observer
config.action_controller.cache_store = :file_store, File.join(::Rails.root.to_s, "tmp/cache")
# Enable escaping HTML in JSON.
config.active_support.escape_html_entities_in_json = true
# Use SQL instead of Active Record's schema dumper when creating the database.
# This is necessary if your schema can't be completely dumped by the schema dumper,
# like if you have constraints or database-specific column types
# BINARY属性を文字列カラムに指定している部分があるため
config.active_record.schema_format = :sql
# Enable the asset pipeline
config.assets.enabled = true
# Version of your assets, change this if you want to expire all your assets
config.assets.version = '1.0'
config.assets.precompile += ['all_media.css', 'print.css']
end
class << self
attr_accessor :news_updated_on, :personal_info_policy_updated_on, :privacy_policy_updated_on
end
end
|
#
# Cookbook Name:: impala
# Recipe:: hbase
#
# Copyright 2015, Cloudera Inc.
#
# All rights reserved - Do Not Redistribute
#
# Setup HBase
bash 'setup_hbase_ssh' do
user node['impala_dev']['username']
code <<-EOH
cd /home/#{node['impala_dev']['username']}
cat .ssh/id_rsa.pub >> .ssh/authorized_keys
chmod 600 .ssh/authorized_keys
ssh-keyscan -H github.mtv.cloudera.com >> .ssh/known_hosts
chmod 600 .ssh/known_hosts
EOH
end
Scan ssh key of public github
#
# Cookbook Name:: impala
# Recipe:: hbase
#
# Copyright 2015, Cloudera Inc.
#
# All rights reserved - Do Not Redistribute
#
# Setup HBase
bash 'setup_hbase_ssh' do
user node['impala_dev']['username']
code <<-EOH
cd /home/#{node['impala_dev']['username']}
cat .ssh/id_rsa.pub >> .ssh/authorized_keys
chmod 600 .ssh/authorized_keys
ssh-keyscan -H github.com >> .ssh/known_hosts
chmod 600 .ssh/known_hosts
EOH
end
|
module Exact
class SalesInvoice < Base
include ActiveModel::Model
include Virtus.model
EXACT_SERVICE = 'salesinvoice'
EXACT_GUID = 'InvoiceID'
OBSOLETE_INSTANCE_VARIABLES = ['TaxSchedule',
'TaxScheduleCode',
'TaxScheduleDescription']
attribute :AmountDC, String
attribute :AmountDiscount, String
attribute :AmountDiscountExclVat, String
attribute :AmountFC, String
attribute :AmountFCExclVat, String
attribute :Created, DateTime
attribute :Creator, String
attribute :CreatorFullName, String
attribute :Currency, String
attribute :Description, String
attribute :Discount, String
attribute :DiscountType, Integer
attribute :Division, Integer
attribute :DeliverTo, String
attribute :DeliverToAddress, String
attribute :DeliverToContactPerson, String
attribute :DeliverToContactPersonFullName, String
attribute :DeliverToName, String
attribute :Document, String
attribute :DocumentNumber, Integer
attribute :DocumentSubject, String
attribute :DueDate, DateTime
attribute :ExtraDutyAmountFC, String
attribute :GAccountAmountFC, String
attribute :InvoiceDate, DateTime
attribute :InvoiceID, String
attribute :InvoiceTo, String
attribute :InvoiceToContactPerson, String
attribute :InvoiceToContactPersonFullName, String
attribute :InvoiceToName, String
attribute :InvoiceNumber, Integer
attribute :IsExtraDuty, Boolean
attribute :Journal, String
attribute :JournalDescription, String
attribute :Modified, DateTime
attribute :Modifier, String
attribute :ModifierFullName, String
attribute :OrderDate, DateTime
attribute :OrderedBy, String
attribute :OrderedByContactPerson, String
attribute :OrderedByContactPersonFullName, String
attribute :OrderedByName, String
attribute :OrderNumber, Integer
attribute :PaymentCondition, String
attribute :PaymentConditionDescription, String
attribute :PaymentReference, String
attribute :Remarks, String
attribute :Salesperson, String
attribute :SalespersonFullName, String
attribute :StarterSalesInvoiceStatus, String
attribute :StarterSalesInvoiceStatusDescription, String
attribute :Status, String
attribute :StatusDescription, String
attribute :TaxSchedule, String
attribute :TaxScheduleCode, String
attribute :TaxScheduleDescription, String
attribute :Type, Integer
attribute :TypeDescription, String
attribute :VATAmountFC, String
attribute :VATAmountDC, String
attribute :Warehouse, String
attribute :WithholdingTaxAmountFC, String
attribute :WithholdingTaxBaseAmount, String
attribute :WithholdingTaxPercentage, String
attribute :YourRef, String
end
end
Added attributes to SalesInvoice
module Exact
class SalesInvoice < Base
include ActiveModel::Model
include Virtus.model
EXACT_SERVICE = 'salesinvoice'
EXACT_GUID = 'InvoiceID'
OBSOLETE_INSTANCE_VARIABLES = ['TaxSchedule',
'TaxScheduleCode',
'TaxScheduleDescription']
attribute :AmountDC, String
attribute :AmountDiscount, String
attribute :AmountDiscountExclVat, String
attribute :AmountFC, String
attribute :AmountFCExclVat, String
attribute :Created, DateTime
attribute :Creator, String
attribute :CreatorFullName, String
attribute :Currency, String
attribute :Description, String
attribute :Discount, String
attribute :DiscountType, Integer
attribute :Division, Integer
attribute :DeliverTo, String
attribute :DeliverToAddress, String
attribute :DeliverToContactPerson, String
attribute :DeliverToContactPersonFullName, String
attribute :DeliverToName, String
attribute :Document, String
attribute :DocumentNumber, Integer
attribute :DocumentSubject, String
attribute :DueDate, DateTime
attribute :ExtraDutyAmountFC, String
attribute :GAccountAmountFC, String
attribute :InvoiceDate, DateTime
attribute :InvoiceID, String
attribute :InvoiceTo, String
attribute :InvoiceToContactPerson, String
attribute :InvoiceToContactPersonFullName, String
attribute :InvoiceToName, String
attribute :InvoiceNumber, Integer
attribute :IsExtraDuty, Boolean
attribute :Journal, String
attribute :JournalDescription, String
attribute :Modified, DateTime
attribute :Modifier, String
attribute :ModifierFullName, String
attribute :OrderDate, DateTime
attribute :OrderedBy, String
attribute :OrderedByContactPerson, String
attribute :OrderedByContactPersonFullName, String
attribute :OrderedByName, String
attribute :OrderNumber, Integer
attribute :PaymentCondition, String
attribute :PaymentConditionDescription, String
attribute :PaymentReference, String
attribute :Remarks, String
attribute :Salesperson, String
attribute :SalespersonFullName, String
attribute :SelectionCode Strings
attribute :SelectionCodeCode Strings
attribute :SelectionCodeDescription Strings
attribute :StarterSalesInvoiceStatus, String
attribute :StarterSalesInvoiceStatusDescription, String
attribute :Status, String
attribute :StatusDescription, String
attribute :TaxSchedule, String
attribute :TaxScheduleCode, String
attribute :TaxScheduleDescription, String
attribute :Type, Integer
attribute :TypeDescription, String
attribute :VATAmountFC, String
attribute :VATAmountDC, String
attribute :Warehouse, String
attribute :WithholdingTaxAmountFC, String
attribute :WithholdingTaxBaseAmount, String
attribute :WithholdingTaxPercentage, String
attribute :YourRef, String
end
end
|
class OrigenTestersApplication < Origen::Application
# See http://origen.freescale.net/origen/latest/api/Origen/Application/Configuration.html
# for a full list of the configuration options available
config.shared = {
:command_launcher => "config/shared_commands.rb"
}
# This information is used in headers and email templates, set it specific
# to your application
config.name = 'Origen Testers'
config.initials = 'OrigenTesters'
self.name = 'origen_testers'
self.namespace = 'OrigenTesters'
config.rc_url = "git@github.com:Origen-SDK/origen_testers.git"
config.release_externally = true
# Added list of directory to exclude when run running origen rc unman
config.unmanaged_dirs = %w[spec/patterns/bin]
config.unmanaged_files = %w[]
config.web_directory = "git@github.com:Origen-SDK/Origen-SDK.github.io.git/testers"
config.web_domain = "http://origen-sdk.org/testers"
# When false Origen will be less strict about checking for some common coding errors,
# it is recommended that you leave this to true for better feedback and easier debug.
# This will be the default setting in Origen v3.
config.strict_errors = true
config.semantically_version = true
# By default all generated output will end up in ./output.
# Here you can specify an alternative directory entirely, or make it dynamic such that
# the output ends up in a setup specific directory.
config.output_directory do
dir = "#{Origen.root}/output/#{$tester.name}"
# Check if running on windows, if so, substitute :: with _
dir.gsub!("::","_") if Origen.os.windows?
dir
end
# Similary for the reference files, generally you want to setup the reference directory
# structure to mirror that of your output directory structure.
config.reference_directory do
dir = "#{Origen.root}/.ref/#{$tester.name}"
# Check if running on windows, if so, substitute :: with _
dir.gsub!("::","_") if Origen.os.windows?
dir
end
# Setting this to the spec area for testing of compiler
config.pattern_output_directory do
dir = "#{Origen.root}/spec/patterns/atp"
# Check if running on windows, if so, substitute :: with _
dir.gsub!("::","_") if Origen.os.windows?
dir
end
# Run the tests before deploying to generate test coverage numbers
def before_deploy_site
Dir.chdir Origen.root do
system 'origen examples -c'
system 'origen specs -c'
dir = "#{Origen.root}/web/output/coverage"
FileUtils.remove_dir(dir, true) if File.exist?(dir)
system "mv #{Origen.root}/coverage #{dir}"
end
end
# This will automatically deploy your documentation after every tag
def after_release_email(tag, note, type, selector, options)
command = "origen web compile --remote --api --comment 'Release of #{Origen.app.name} #{Origen.app.version}'"
Dir.chdir Origen.root do
system command
end
end
# Ensure that all tests pass before allowing a release to continue
def validate_release
if !system("origen examples") # || !system("origen specs")
puts "Sorry but you can't release with failing tests, please fix them and try again."
exit 1
else
puts "All tests passing, proceeding with release process!"
end
end
# Help to find patterns based on an iterator
config.pattern_name_translator do |name|
if name == 'dummy_name'
{ :source => 'timing', :output => 'timing' }
else
name.gsub(/_b\d/, '_bx')
end
end
if current? # Standalone only configs
# By block iterator
config.pattern_iterator do |iterator|
iterator.key = :by_block
iterator.loop do |&pattern|
$nvm.blocks.each do |block|
pattern.call(block)
end
end
iterator.setup do |block|
blk = $nvm.find_block_by_id(block.id)
blk.select
blk
end
iterator.pattern_name do |name, block|
name.gsub('_bx', "_b#{block.id}")
end
end
# By setting iterator
config.pattern_iterator do |iterator|
iterator.key = :by_setting
iterator.loop do |settings, &pattern|
settings.each do |setting|
pattern.call(setting)
end
end
iterator.pattern_name do |name, setting|
name.gsub('_x', "_#{setting}")
end
end
end # standalone only configs
# Set up lint test
config.lint_test = {
# Require the lint tests to pass before allowing a release to proceed
:run_on_tag => true,
# Auto correct violations where possible whenever 'origen lint' is run
:auto_correct => true,
# Limit the testing for large legacy applications
#:level => :easy,
# Run on these directories/files by default
#:files => ["lib", "config/application.rb"],
}
end
Minor help for web compile if $tester not yet defined.
class OrigenTestersApplication < Origen::Application
# See http://origen.freescale.net/origen/latest/api/Origen/Application/Configuration.html
# for a full list of the configuration options available
config.shared = {
:command_launcher => "config/shared_commands.rb"
}
# This information is used in headers and email templates, set it specific
# to your application
config.name = 'Origen Testers'
config.initials = 'OrigenTesters'
self.name = 'origen_testers'
self.namespace = 'OrigenTesters'
config.rc_url = "git@github.com:Origen-SDK/origen_testers.git"
config.release_externally = true
# Added list of directory to exclude when run running origen rc unman
config.unmanaged_dirs = %w[spec/patterns/bin]
config.unmanaged_files = %w[]
config.web_directory = "git@github.com:Origen-SDK/Origen-SDK.github.io.git/testers"
config.web_domain = "http://origen-sdk.org/testers"
# When false Origen will be less strict about checking for some common coding errors,
# it is recommended that you leave this to true for better feedback and easier debug.
# This will be the default setting in Origen v3.
config.strict_errors = true
config.semantically_version = true
# to handle for web compile where environment/tester not yet defined
tester_name = $tester.nil? ? '' : $tester.name
# By default all generated output will end up in ./output.
# Here you can specify an alternative directory entirely, or make it dynamic such that
# the output ends up in a setup specific directory.
config.output_directory do
dir = "#{Origen.root}/output/#{tester_name}"
# Check if running on windows, if so, substitute :: with _
dir.gsub!("::","_") if Origen.os.windows?
dir
end
# Similary for the reference files, generally you want to setup the reference directory
# structure to mirror that of your output directory structure.
config.reference_directory do
dir = "#{Origen.root}/.ref/#{tester_name}"
# Check if running on windows, if so, substitute :: with _
dir.gsub!("::","_") if Origen.os.windows?
dir
end
# Setting this to the spec area for testing of compiler
config.pattern_output_directory do
dir = "#{Origen.root}/spec/patterns/atp"
# Check if running on windows, if so, substitute :: with _
dir.gsub!("::","_") if Origen.os.windows?
dir
end
# Run the tests before deploying to generate test coverage numbers
def before_deploy_site
Dir.chdir Origen.root do
system 'origen examples -c'
system 'origen specs -c'
dir = "#{Origen.root}/web/output/coverage"
FileUtils.remove_dir(dir, true) if File.exist?(dir)
system "mv #{Origen.root}/coverage #{dir}"
end
end
# This will automatically deploy your documentation after every tag
def after_release_email(tag, note, type, selector, options)
command = "origen web compile --remote --api --comment 'Release of #{Origen.app.name} #{Origen.app.version}'"
Dir.chdir Origen.root do
system command
end
end
# Ensure that all tests pass before allowing a release to continue
def validate_release
if !system("origen examples") # || !system("origen specs")
puts "Sorry but you can't release with failing tests, please fix them and try again."
exit 1
else
puts "All tests passing, proceeding with release process!"
end
end
# Help to find patterns based on an iterator
config.pattern_name_translator do |name|
if name == 'dummy_name'
{ :source => 'timing', :output => 'timing' }
else
name.gsub(/_b\d/, '_bx')
end
end
if current? # Standalone only configs
# By block iterator
config.pattern_iterator do |iterator|
iterator.key = :by_block
iterator.loop do |&pattern|
$nvm.blocks.each do |block|
pattern.call(block)
end
end
iterator.setup do |block|
blk = $nvm.find_block_by_id(block.id)
blk.select
blk
end
iterator.pattern_name do |name, block|
name.gsub('_bx', "_b#{block.id}")
end
end
# By setting iterator
config.pattern_iterator do |iterator|
iterator.key = :by_setting
iterator.loop do |settings, &pattern|
settings.each do |setting|
pattern.call(setting)
end
end
iterator.pattern_name do |name, setting|
name.gsub('_x', "_#{setting}")
end
end
end # standalone only configs
# Set up lint test
config.lint_test = {
# Require the lint tests to pass before allowing a release to proceed
:run_on_tag => true,
# Auto correct violations where possible whenever 'origen lint' is run
:auto_correct => true,
# Limit the testing for large legacy applications
#:level => :easy,
# Run on these directories/files by default
#:files => ["lib", "config/application.rb"],
}
end
|
module Yatedo
VERSION = "1.1.0"
end
Up to version 1.2
module Yatedo
VERSION = "1.2.0"
end
|
module DAV
module Callbacks
class Filter < Struct.new(:matcher, :unit)
def initialize(*args)
super(*args)
if matcher.respond_to? :matches?
def self.call(resource, *args)
unit.call(resource, *args) if matcher.matches? resource
end
elsif matcher.is_a? String
def self.call(resource, *args)
if File.fnmatch? matcher, resource.uri.path
unit.call(resource, *args)
end
end
elsif matcher.is_a? Regexp
def self.call(resource, *args)
unit.call(resource, *args) if matcher =~ resource.uri.path
end
end
end
def call(resource, *args)
unit.call resource, *args
end
end
class Collection
def initialize(klass)
@klass = klass
@queues = {}
yield @queues if block_given?
end
def add(slot, method_sym, pattern, unit)
unless String === pattern or Regexp === pattern
unit, pattern = pattern
end
unless unit.respond_to? :call
unit = Symbol === unit ? method(unit) : Proc.new
end
@queues["#{ slot }_#{ method_sym }"] << Filter.new(pattern, unit)
end
def register(method_sym)
@queues["before_#{ method_sym }"] = []
@queues["after_#{ method_sym }"] = []
end
def run(slot, method_sym, obj, *args)
@queues["#{ slot }_#{ method_sym }"].
all? { |callback| callback.call(obj, *args) != false } or cancel!
end
def clone
Collection.new @klass do |qs|
@queues.each { |k, callbacks| qs[k] = callbacks.clone }
end
end
protected
def cancel!
throw :cancel
end
end
attr_reader :callbacks
def self.extended(base)
base.instance_variable_set :@callbacks, Collection.new(base)
base.class_eval do
def callbacks
self.class.callbacks
end
end
end
def inherited(base)
super base
base.instance_variable_set :@callbacks, callbacks.clone
end
def define_callbacks(*methods)
methods.each do |method_sym|
alias_method :"#{ method_sym }_without_callbacks", method_sym
class_eval <<-RUBY
def #{ method_sym }(*args)
catch(:cancel) do
callbacks.run(:before, :#{ method_sym }, self, *args)
result = #{ method_sym }_without_callbacks(*args)
callbacks.run(:after, :#{ method_sym }, self, *args)
result
end
end
RUBY
callbacks.register method_sym
end
end
def before(method_sym, pattern = nil, unit = nil, &block)
callbacks.add :before, method_sym, pattern, unit, &block
self
end
def after(method_sym, pattern = nil, unit = nil, &block)
callbacks.add :after, method_sym, pattern, unit, &block
self
end
end
end
Added TODO.
module DAV
module Callbacks
class Filter < Struct.new(:matcher, :unit)
def initialize(*args)
super(*args)
if matcher.respond_to? :matches?
def self.call(resource, *args)
unit.call(resource, *args) if matcher.matches? resource
end
elsif matcher.is_a? String
def self.call(resource, *args)
if File.fnmatch? matcher, resource.uri.path
unit.call(resource, *args)
end
end
elsif matcher.is_a? Regexp
def self.call(resource, *args)
unit.call(resource, *args) if matcher =~ resource.uri.path
end
end
end
def call(resource, *args)
unit.call resource, *args
end
end
class Collection
def initialize(klass)
@klass = klass
@queues = {}
yield @queues if block_given?
end
def add(slot, method_sym, pattern, unit)
unless String === pattern or Regexp === pattern
unit, pattern = pattern
end
unless unit.respond_to? :call
unit = Symbol === unit ? method(unit) : Proc.new
end
@queues["#{ slot }_#{ method_sym }"] << Filter.new(pattern, unit)
end
def register(method_sym)
@queues["before_#{ method_sym }"] = []
@queues["after_#{ method_sym }"] = []
end
def run(slot, method_sym, obj, *args)
@queues["#{ slot }_#{ method_sym }"].
all? { |callback| callback.call(obj, *args) != false } or cancel!
end
def clone
Collection.new @klass do |qs|
@queues.each { |k, callbacks| qs[k] = callbacks.clone }
end
end
protected
def cancel!
# TODO log.info about canceled...
throw :cancel
end
end
attr_reader :callbacks
def self.extended(base)
base.instance_variable_set :@callbacks, Collection.new(base)
base.class_eval do
def callbacks
self.class.callbacks
end
end
end
def inherited(base)
super base
base.instance_variable_set :@callbacks, callbacks.clone
end
def define_callbacks(*methods)
methods.each do |method_sym|
alias_method :"#{ method_sym }_without_callbacks", method_sym
class_eval <<-RUBY
def #{ method_sym }(*args)
catch(:cancel) do
callbacks.run(:before, :#{ method_sym }, self, *args)
result = #{ method_sym }_without_callbacks(*args)
callbacks.run(:after, :#{ method_sym }, self, *args)
result
end
end
RUBY
callbacks.register method_sym
end
end
def before(method_sym, pattern = nil, unit = nil, &block)
callbacks.add :before, method_sym, pattern, unit, &block
self
end
def after(method_sym, pattern = nil, unit = nil, &block)
callbacks.add :after, method_sym, pattern, unit, &block
self
end
end
end
|
module Fastbill
module Automatic
class Webhook
attr_reader :type, :customer, :subscription, :payment
def initialize(attributes = {})
set_attributes(attributes)
end
def self.parse(params)
self.new( JSON.parse(params) )
end
def set_attributes(attributes)
attributes.each_pair do |key, value|
if key == "customer"
instance_variable_set("@#{key.downcase}", Customer.new(value))
elsif key == "subscription"
instance_variable_set("@#{key.downcase}", Subscription.new(value))
elsif key == "payment"
instance_variable_set("@#{key.downcase}", Payment.new(value))
else
instance_variable_set("@#{key.downcase}", value)
end
end
end
end
end
end
user more descriptive names to set instance variables
module Fastbill
module Automatic
class Webhook
attr_reader :type, :customer, :subscription, :payment
def initialize(attributes = {})
set_attributes(attributes)
end
def self.parse(params)
self.new( JSON.parse(params) )
end
def set_attributes(attributes)
attributes.each_pair do |key, value|
if key == "customer"
instance_variable_set("@customer", Customer.new(value))
elsif key == "subscription"
instance_variable_set("@subscription", Subscription.new(value))
elsif key == "payment"
instance_variable_set("@payment", Payment.new(value))
else
instance_variable_set("@#{key.downcase}", value)
end
end
end
end
end
end
|
# frozen_string_literal: true
require_relative '../config/environment'
module DbPyrUpdate
class Base
def initialize(file)
@reps = YAML.load(File.open(file))
end
end
class Reps < Base
def call
@reps.each do |yaml_rep|
db_rep = Rep.find_or_create_by(bioguide_id: yaml_rep['id']['bioguide'])
update_rep(db_rep, yaml_rep)
puts "Updated #{db_rep.official_full}"
end
end
private
def update_rep(db_rep, yaml_rep)
name = yaml_rep['name']
term = yaml_rep['terms'].last
db_rep.tap do |rep|
update_rep_name(rep, name)
update_rep_term_info(rep, term)
update_rep_photo(rep)
update_rep_capitol_office(rep, term)
rep.active = true
end
db_rep.save
end
def update_rep_name(rep, name)
rep.official_full = name['official_full']
rep.first = name['first']
rep.middle = name['middle']
rep.last = name['last']
rep.suffix = name['suffix']
rep.nickname = name['nickname']
end
def update_rep_term_info(rep, term)
dis_code = format('%d', term['district']) if term['district']
dis_code = dis_code.size == 1 ? "0#{dis_code}" : dis_code if dis_code
rep.role = determine_current_rep_role(term)
rep.state = State.find_by(abbr: term['state'])
rep.district = District.where(code: dis_code, state: rep.state).take
rep.party = term['party']
rep.url = term['url']
rep.contact_form = term['contact_form']
rep.senate_class = format('0%o', term['class']) if term['class']
end
def determine_current_rep_role(term)
if term['type'] == 'sen'
'United States Senator'
elsif term['type'] == 'rep'
'United States Representative'
else
term['type']
end
end
def update_rep_capitol_office(rep, term)
address_ary = term['address'].split(' ')
cap_office = OfficeLocation.find_or_create_by(
office_type: 'capitol',
bioguide_id: rep.bioguide_id
)
cap_office.tap do |off|
update_basic_office_info(off, rep)
update_phone_fax_and_hours(off, term)
update_cap_office_address(address_ary, off)
end
cap_office.add_v_card
end
def update_basic_office_info(off, rep)
off.office_id = "#{rep.bioguide_id}-capitol"
off.rep = rep
off.active = true
end
def update_phone_fax_and_hours(off, term)
off.phone = term['phone']
off.fax = term['fax']
off.hours = term['hours']
end
def update_cap_office_address(address_ary, off)
off.zip = address_ary.pop
off.state = address_ary.pop
off.city = address_ary.pop
off.address = address_ary.
join(' ').
delete(';').
sub('HOB', 'House Office Building')
end
def update_rep_photo(rep)
rep.fetch_avatar_data
rep.add_photo
end
# End of private methods
end
class HistoricalReps < Base
def call
@reps.each do |h_rep|
rep = Rep.find_by(bioguide_id: h_rep['id']['bioguide'])
next if rep.blank?
rep.update(active: false)
rep.office_locations.each { |office| office.update(active: false) }
puts "Retired #{rep.official_full}"
end
end
end
class Socials < Base
def call
@reps.each do |social|
rep = Rep.find_or_create_by(bioguide_id: social['id']['bioguide'])
update_rep_socials(rep, social)
rep.save
puts "Updated socials for #{rep.official_full}"
end
end
private
def update_rep_socials(rep, social)
update_facebook(rep, social)
update_twitter(rep, social)
update_youtube(rep, social)
update_instagram(rep, social)
rep.googleplus = social['social']['googleplus']
end
def update_instagram(rep, social)
rep.instagram = social['social']['instagram']
rep.instagram_id = social['social']['instagram_id']
end
def update_youtube(rep, social)
rep.youtube = social['social']['youtube']
rep.youtube_id = social['social']['youtube_id']
end
def update_twitter(rep, social)
rep.twitter = social['social']['twitter']
rep.twitter_id = social['social']['twitter_id']
end
def update_facebook(rep, social)
rep.facebook = social['social']['facebook']
rep.facebook_id = social['social']['facebook_id']
end
# End of private methods
end
class OfficeLocations < Base
def call
@active_offices = []
@reps.each do |yaml_office|
next if yaml_office['offices'].blank?
find_or_create_offices(yaml_office)
end
district_offices = OfficeLocation.where(office_type: 'district').map(&:id)
inactive_offices = district_offices - @active_offices
OfficeLocation.find(inactive_offices).each { |o| o.update(active: false) }
end
private
def find_or_create_offices(yaml_office)
yaml_office['offices'].each do |yaml_off|
office = OfficeLocation.find_or_create_by(
bioguide_id: yaml_office['id']['bioguide'],
office_id: yaml_off['id'],
office_type: 'district'
)
update_location_info(office, yaml_off)
update_other_office_info(office, yaml_off)
@active_offices << office.id
puts "Updated #{office.rep.official_full}'s #{office.city} office"
end
end
def update_location_info(office, yaml_off)
office.office_id = yaml_off['id']
office.suite = yaml_off['suite']
office.phone = yaml_off['phone']
office.address = yaml_off['address']
office.building = yaml_off['building']
office.city = yaml_off['city']
office.state = yaml_off['state']
office.zip = yaml_off['zip']
office.latitude = yaml_off['latitude']
office.longitude = yaml_off['longitude']
end
def update_other_office_info(office, yaml_off)
office.fax = yaml_off['fax']
office.hours = yaml_off['hours']
office.active = true
office.add_v_card
end
# End of private methods
end
end
optimization
# frozen_string_literal: true
require_relative '../config/environment'
module DbPyrUpdate
class Base
def initialize(file)
@reps = YAML.load(File.open(file))
end
end
class Reps < Base
def call
@reps.each do |yaml_rep|
db_rep = Rep.find_or_create_by(bioguide_id: yaml_rep['id']['bioguide'])
update_rep(db_rep, yaml_rep)
puts "Updated #{db_rep.official_full}"
end
end
private
def update_rep(db_rep, yaml_rep)
name = yaml_rep['name']
term = yaml_rep['terms'].last
db_rep.tap do |rep|
update_rep_name(rep, name)
update_rep_term_info(rep, term)
update_rep_photo(rep)
update_rep_capitol_office(rep, term)
rep.active = true
end
db_rep.save
end
def update_rep_name(rep, name)
rep.official_full = name['official_full']
rep.first = name['first']
rep.middle = name['middle']
rep.last = name['last']
rep.suffix = name['suffix']
rep.nickname = name['nickname']
end
def update_rep_term_info(rep, term)
dis_code = format('%d', term['district']) if term['district']
dis_code = dis_code.size == 1 ? "0#{dis_code}" : dis_code if dis_code
rep.role = determine_current_rep_role(term)
rep.state = State.find_by(abbr: term['state'])
rep.district = District.where(code: dis_code, state: rep.state).take
rep.party = term['party']
rep.url = term['url']
rep.contact_form = term['contact_form']
rep.senate_class = format('0%o', term['class']) if term['class']
end
def determine_current_rep_role(term)
if term['type'] == 'sen'
'United States Senator'
elsif term['type'] == 'rep'
'United States Representative'
else
term['type']
end
end
def update_rep_capitol_office(rep, term)
address_ary = term['address'].split(' ')
cap_office = OfficeLocation.find_or_create_by(
office_type: 'capitol',
bioguide_id: rep.bioguide_id
)
cap_office.tap do |off|
update_basic_office_info(off, rep)
update_phone_fax_and_hours(off, term)
update_cap_office_address(address_ary, off)
end
cap_office.add_v_card
end
def update_basic_office_info(off, rep)
off.office_id = "#{rep.bioguide_id}-capitol"
off.rep = rep
off.active = true
end
def update_phone_fax_and_hours(off, term)
off.phone = term['phone']
off.fax = term['fax']
off.hours = term['hours']
end
def update_cap_office_address(address_ary, off)
off.zip = address_ary.pop
off.state = address_ary.pop
off.city = address_ary.pop
off.address = address_ary.
join(' ').
delete(';').
sub('HOB', 'House Office Building')
end
def update_rep_photo(rep)
rep.fetch_avatar_data
rep.add_photo
end
# End of private methods
end
class HistoricalReps < Base
def call
bioguide_ids = @reps.map { |h_rep| h_rep['id']['bioguide'] }
Rep.where(bioguide_id: bioguide_ids).each do |rep|
rep.update(active: false)
rep.office_locations.each { |office| office.update(active: false) }
puts "Retired #{rep.official_full}"
end
end
end
class Socials < Base
def call
@reps.each do |social|
rep = Rep.find_or_create_by(bioguide_id: social['id']['bioguide'])
update_rep_socials(rep, social)
rep.save
puts "Updated socials for #{rep.official_full}"
end
end
private
def update_rep_socials(rep, social)
update_facebook(rep, social)
update_twitter(rep, social)
update_youtube(rep, social)
update_instagram(rep, social)
rep.googleplus = social['social']['googleplus']
end
def update_instagram(rep, social)
rep.instagram = social['social']['instagram']
rep.instagram_id = social['social']['instagram_id']
end
def update_youtube(rep, social)
rep.youtube = social['social']['youtube']
rep.youtube_id = social['social']['youtube_id']
end
def update_twitter(rep, social)
rep.twitter = social['social']['twitter']
rep.twitter_id = social['social']['twitter_id']
end
def update_facebook(rep, social)
rep.facebook = social['social']['facebook']
rep.facebook_id = social['social']['facebook_id']
end
# End of private methods
end
class OfficeLocations < Base
def call
@active_offices = []
@reps.each do |yaml_office|
next if yaml_office['offices'].blank?
find_or_create_offices(yaml_office)
end
district_offices = OfficeLocation.where(office_type: 'district').map(&:id)
inactive_offices = district_offices - @active_offices
OfficeLocation.find(inactive_offices).each { |o| o.update(active: false) }
end
private
def find_or_create_offices(yaml_office)
yaml_office['offices'].each do |yaml_off|
office = OfficeLocation.find_or_create_by(
bioguide_id: yaml_office['id']['bioguide'],
office_id: yaml_off['id'],
office_type: 'district'
)
update_location_info(office, yaml_off)
update_other_office_info(office, yaml_off)
@active_offices << office.id
puts "Updated #{office.rep.official_full}'s #{office.city} office"
end
end
def update_location_info(office, yaml_off)
office.office_id = yaml_off['id']
office.suite = yaml_off['suite']
office.phone = yaml_off['phone']
office.address = yaml_off['address']
office.building = yaml_off['building']
office.city = yaml_off['city']
office.state = yaml_off['state']
office.zip = yaml_off['zip']
office.latitude = yaml_off['latitude']
office.longitude = yaml_off['longitude']
end
def update_other_office_info(office, yaml_off)
office.fax = yaml_off['fax']
office.hours = yaml_off['hours']
office.active = true
office.add_v_card
end
# End of private methods
end
end
|
#
# Cookbook Name:: osqa
# Recipe:: default
#
# Copyright 2011, OpenStreetMap Foundation
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# https://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
include_recipe "tools"
include_recipe "apache"
include_recipe "memcached"
include_recipe "python"
package "python-dev"
package "libmysqlclient-dev"
package "libpq-dev"
python_directory = "/opt/osqa-python"
python_virtualenv python_directory
python_package "django" do
python_virtualenv python_directory
version "1.6.11"
end
python_package "html5lib" do
python_virtualenv python_directory
version "0.999"
end
python_package "markdown" do
python_virtualenv python_directory
version "2.4"
end
python_package "python-memcached" do
python_virtualenv python_directory
version "1.53"
end
python_package "python_openid" do
python_virtualenv python_directory
version "2.2.5"
end
python_package "MySQL_python" do
python_virtualenv python_directory
version "1.2.3"
end
python_package "psycopg2" do
python_virtualenv python_directory
version "2.4.5"
end
python_package "South" do
python_virtualenv python_directory
version "0.7.6"
end
apache_module "rewrite"
apache_module "wsgi"
node[:osqa][:sites].each do |site|
site_name = site[:name]
site_aliases = site[:aliases] || []
directory = site[:directory] || "/srv/#{site_name}"
site_user = site[:user] || node[:osqa][:user]
site_user = Etc.getpwuid(site_user).name if site_user.is_a?(Integer)
site_group = site[:group] || node[:osqa][:group] || Etc.getpwnam(site_user).gid
site_group = Etc.getgrgid(site_group).name if site_group.is_a?(Integer)
database_name = site[:database_name] || node[:osqa][:database_name]
database_user = site[:database_user] || node[:osqa][:database_user]
database_password = site[:database_user] || node[:osqa][:database_password]
backup_name = site[:backup]
ssl_certificate site_name do
domains [site_name] + site_aliases
notifies :reload, "service[apache2]"
end
apache_site site_name do
template "apache.erb"
directory directory
variables :user => site_user, :group => site_group, :aliases => site_aliases, :python_home => python_directory
end
directory directory do
owner site_user
group site_group
mode 0o755
end
execute "osqa-migrate" do
action :nothing
command "python manage.py migrate forum"
cwd "#{directory}/osqa"
user site_user
group site_group
notifies :reload, "service[apache2]"
end
git "#{directory}/osqa" do
action :sync
repository "https://git.openstreetmap.org/public/osqa.git"
revision "live"
user site_user
group site_group
notifies :run, "execute[osqa-migrate]"
end
directory "#{directory}/upfiles" do
user site_user
group site_group
mode 0o755
end
template "#{directory}/osqa/osqa.wsgi" do
source "osqa.wsgi.erb"
owner site_user
group site_group
mode 0o644
variables :directory => directory
notifies :reload, "service[apache2]"
end
settings = edit_file "#{directory}/osqa/settings_local.py.dist" do |line|
line.gsub!(/^( *)'ENGINE': '.*',/, "\\1'ENGINE': 'django.db.backends.postgresql_psycopg2',")
line.gsub!(/^( *)'NAME': '.*',/, "\\1'NAME': '#{database_name}',")
line.gsub!(/^( *)'USER': '.*',/, "\\1'USER': '#{database_user}',")
line.gsub!(/^( *)'PASSWORD': '.*',/, "\\1'PASSWORD': '#{database_password}',")
line.gsub!(/^ALLOWED_HOSTS = .*/, "ALLOWED_HOSTS = ('help.openstreetmap.org',)")
line.gsub!(/^CACHE_BACKEND = .*/, "CACHE_BACKEND = 'memcached://127.0.0.1:11211/'")
line.gsub!(%r{^APP_URL = 'http://'}, "APP_URL = 'https://#{site_name}'")
line.gsub!(%r{^TIME_ZONE = 'America/New_York'}, "TIME_ZONE = 'Europe/London'")
line.gsub!(/^DISABLED_MODULES = \[([^\]]+)\]/, "DISABLED_MODULES = [\\1, 'localauth', 'facebookauth', 'oauthauth']")
line
end
file "#{directory}/osqa/settings_local.py" do
owner site_user
group site_group
mode 0o644
content settings
notifies :reload, "service[apache2]"
end
template "/etc/cron.daily/#{backup_name}-backup" do
source "backup.cron.erb"
owner "root"
group "root"
mode 0o755
variables :name => backup_name, :directory => directory, :user => site_user, :database => database_name
end
end
Correct some python package names
#
# Cookbook Name:: osqa
# Recipe:: default
#
# Copyright 2011, OpenStreetMap Foundation
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# https://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
include_recipe "tools"
include_recipe "apache"
include_recipe "memcached"
include_recipe "python"
package "python-dev"
package "libmysqlclient-dev"
package "libpq-dev"
python_directory = "/opt/osqa-python"
python_virtualenv python_directory
python_package "Django" do
python_virtualenv python_directory
version "1.6.11"
end
python_package "html5lib" do
python_virtualenv python_directory
version "0.999"
end
python_package "Markdown" do
python_virtualenv python_directory
version "2.4"
end
python_package "python-memcached" do
python_virtualenv python_directory
version "1.53"
end
python_package "python-openid" do
python_virtualenv python_directory
version "2.2.5"
end
python_package "MySQL-python" do
python_virtualenv python_directory
version "1.2.3"
end
python_package "psycopg2" do
python_virtualenv python_directory
version "2.4.5"
end
python_package "South" do
python_virtualenv python_directory
version "0.7.6"
end
apache_module "rewrite"
apache_module "wsgi"
node[:osqa][:sites].each do |site|
site_name = site[:name]
site_aliases = site[:aliases] || []
directory = site[:directory] || "/srv/#{site_name}"
site_user = site[:user] || node[:osqa][:user]
site_user = Etc.getpwuid(site_user).name if site_user.is_a?(Integer)
site_group = site[:group] || node[:osqa][:group] || Etc.getpwnam(site_user).gid
site_group = Etc.getgrgid(site_group).name if site_group.is_a?(Integer)
database_name = site[:database_name] || node[:osqa][:database_name]
database_user = site[:database_user] || node[:osqa][:database_user]
database_password = site[:database_user] || node[:osqa][:database_password]
backup_name = site[:backup]
ssl_certificate site_name do
domains [site_name] + site_aliases
notifies :reload, "service[apache2]"
end
apache_site site_name do
template "apache.erb"
directory directory
variables :user => site_user, :group => site_group, :aliases => site_aliases, :python_home => python_directory
end
directory directory do
owner site_user
group site_group
mode 0o755
end
execute "osqa-migrate" do
action :nothing
command "python manage.py migrate forum"
cwd "#{directory}/osqa"
user site_user
group site_group
notifies :reload, "service[apache2]"
end
git "#{directory}/osqa" do
action :sync
repository "https://git.openstreetmap.org/public/osqa.git"
revision "live"
user site_user
group site_group
notifies :run, "execute[osqa-migrate]"
end
directory "#{directory}/upfiles" do
user site_user
group site_group
mode 0o755
end
template "#{directory}/osqa/osqa.wsgi" do
source "osqa.wsgi.erb"
owner site_user
group site_group
mode 0o644
variables :directory => directory
notifies :reload, "service[apache2]"
end
settings = edit_file "#{directory}/osqa/settings_local.py.dist" do |line|
line.gsub!(/^( *)'ENGINE': '.*',/, "\\1'ENGINE': 'django.db.backends.postgresql_psycopg2',")
line.gsub!(/^( *)'NAME': '.*',/, "\\1'NAME': '#{database_name}',")
line.gsub!(/^( *)'USER': '.*',/, "\\1'USER': '#{database_user}',")
line.gsub!(/^( *)'PASSWORD': '.*',/, "\\1'PASSWORD': '#{database_password}',")
line.gsub!(/^ALLOWED_HOSTS = .*/, "ALLOWED_HOSTS = ('help.openstreetmap.org',)")
line.gsub!(/^CACHE_BACKEND = .*/, "CACHE_BACKEND = 'memcached://127.0.0.1:11211/'")
line.gsub!(%r{^APP_URL = 'http://'}, "APP_URL = 'https://#{site_name}'")
line.gsub!(%r{^TIME_ZONE = 'America/New_York'}, "TIME_ZONE = 'Europe/London'")
line.gsub!(/^DISABLED_MODULES = \[([^\]]+)\]/, "DISABLED_MODULES = [\\1, 'localauth', 'facebookauth', 'oauthauth']")
line
end
file "#{directory}/osqa/settings_local.py" do
owner site_user
group site_group
mode 0o644
content settings
notifies :reload, "service[apache2]"
end
template "/etc/cron.daily/#{backup_name}-backup" do
source "backup.cron.erb"
owner "root"
group "root"
mode 0o755
variables :name => backup_name, :directory => directory, :user => site_user, :database => database_name
end
end
|
class YoutubeTrends
GOOGLE_API_YOUTUBE_TRENDS_URL = 'https://www.googleapis.com/youtube/v3/videos'
MAX_RESULTS_PER_REQUEST = 50
def initialize
end
def apiRequestVideoList params = nil
uri = getUriTrendsVideo
response = Net::HTTP.get(uri)
end
def getUriTrendsVideo params = nil
queryString = buildQueryString params
uri = URI(GOOGLE_API_YOUTUBE_TRENDS_URL + queryString)
end
def getDefaultParamsQueryString
{
:part => 'contentDetails',
:chart => 'mostPopular',
:regionCode => 'IN',
:maxResults => MAX_RESULTS_PER_REQUEST,
:key => 'AIzaSyDu_K050qbIQQnw3ZJ2MTLS1lYssdh_B6E'
}
end
def buildQueryString params
defaulfParams = getDefaultParamsQueryString
puts defaulfParams.inspect
defaulfParams.merge(params) if ! params.nil?
query = ''
defaulfParams.each {|key, value|
query += '&' + key.to_s + '=' + value.to_s
}
'?' + query[1, query.length]
end
end
Get trends
class YoutubeTrends
GOOGLE_API_YOUTUBE_TRENDS_URL = 'https://www.googleapis.com/youtube/v3/videos'
MAX_RESULTS_PER_REQUEST = 50
def initialize
end
def apiRequestVideoList params = nil
item_list = []
next_page_token = nil;
counter = 6
while counter > 0 do
uri = getUriTrendsVideo({:pageToken => next_page_token})
puts 'URI: ' + uri.to_s
response = Net::HTTP.get(uri)
parsed_response = JSON.parse(response)
puts 'parsed_response:'
puts parsed_response.inspect
puts 'it\'s done'
item_list << parsed_response['items']
puts 'Check next page'
puts (parsed_response.key?('nextPageToken')).inspect
break if ! parsed_response.key?('nextPageToken')
next_page_token = parsed_response['nextPageToken']
puts 'Token:' + parsed_response['nextPageToken']
counter -= 1
end
item_list
end
def getUriTrendsVideo params = nil
queryString = buildQueryString params
uri = URI(GOOGLE_API_YOUTUBE_TRENDS_URL + queryString)
end
def getDefaultParamsQueryString
{
:part => 'contentDetails',
:chart => 'mostPopular',
:regionCode => 'IN',
:maxResults => MAX_RESULTS_PER_REQUEST,
:key => 'AIzaSyDu_K050qbIQQnw3ZJ2MTLS1lYssdh_B6E'
}
end
def buildQueryString params
defaulfParams = getDefaultParamsQueryString
parameters_for_query = defaulfParams.merge(params)
query_string = ''
parameters_for_query.each {|key, value|
if ! value.nil?
query_string += '&' + key.to_s + '=' + value.to_s
end
}
puts 'query_string: ' + query_string
'?' + query_string[1, query_string.length]
end
end |
# frozen_string_literal: true
require_relative "boot"
require "rails"
require "active_model/railtie"
require "active_job/railtie"
require "active_record/railtie"
require "action_controller/railtie"
require "action_mailer/railtie"
require "action_view/railtie"
require "sprockets/railtie"
require "rails/test_unit/railtie"
require "ostruct"
require "csv"
require "google/apis/calendar_v3"
require "googleauth"
require "google/api_client/client_secrets"
require "googleauth/stores/file_token_store"
Bundler.require(*Rails.groups)
module Gobierto
class Application < Rails::Application
config.i18n.load_path += Dir[Rails.root.join("config", "locales", "**", "*.{rb,yml}").to_s]
config.i18n.default_locale = :es
config.i18n.available_locales = [:es, :en, :ca]
config.generators do |g|
g.test_framework :minitest, spec: false, fixture: true
end
config.middleware.use I18n::JS::Middleware
config.action_dispatch.default_headers.merge!("Access-Control-Allow-Origin" => "*",
"Access-Control-Request-Method" => "*")
config.active_job.queue_adapter = :async
# Required code paths
required_paths = [
"#{config.root}/lib",
"#{config.root}/lib/validators",
"#{config.root}/lib/constraints",
"#{config.root}/lib/errors",
"#{config.root}/lib/ibm_notes",
"#{config.root}/lib/liquid"
]
config.autoload_paths += required_paths
config.eager_load_paths += required_paths
# Auth Strategies
base_strategies_path = %w(vendor auth_strategies)
available_strategies = Dir.chdir(config.root.join(*base_strategies_path)) do
Dir.glob('*').select{ |file| File.directory?(file) }
end
available_strategies.each do |strategy|
require_dependency config.root.join(*base_strategies_path).join(strategy, 'lib', 'initializer')
end
config.engine_sass_config_overrides = []
config.engine_sass_theme_dependencies = []
# Do not add wrapper .field_with_errors around form fields with validation errors
config.action_view.field_error_proc = proc { |html_tag, _instance| html_tag }
config.time_zone = "Madrid"
end
end
require_dependency "app/publishers/base"
require_dependency "app/subscribers/base"
Load dynamically engine overrides from vendor
# frozen_string_literal: true
require_relative "boot"
require "rails"
require "active_model/railtie"
require "active_job/railtie"
require "active_record/railtie"
require "action_controller/railtie"
require "action_mailer/railtie"
require "action_view/railtie"
require "sprockets/railtie"
require "rails/test_unit/railtie"
require "ostruct"
require "csv"
require "google/apis/calendar_v3"
require "googleauth"
require "google/api_client/client_secrets"
require "googleauth/stores/file_token_store"
Bundler.require(*Rails.groups)
module Gobierto
class Application < Rails::Application
config.i18n.load_path += Dir[Rails.root.join("config", "locales", "**", "*.{rb,yml}").to_s]
config.i18n.default_locale = :es
config.i18n.available_locales = [:es, :en, :ca]
config.generators do |g|
g.test_framework :minitest, spec: false, fixture: true
end
config.middleware.use I18n::JS::Middleware
config.action_dispatch.default_headers.merge!("Access-Control-Allow-Origin" => "*",
"Access-Control-Request-Method" => "*")
config.active_job.queue_adapter = :async
# Required code paths
required_paths = [
"#{config.root}/lib",
"#{config.root}/lib/validators",
"#{config.root}/lib/constraints",
"#{config.root}/lib/errors",
"#{config.root}/lib/ibm_notes",
"#{config.root}/lib/liquid"
]
config.autoload_paths += required_paths
config.eager_load_paths += required_paths
# Auth Strategies
base_strategies_path = %w(vendor auth_strategies)
available_strategies = Dir.chdir(config.root.join(*base_strategies_path)) do
Dir.glob('*').select{ |file| File.directory?(file) }
end
available_strategies.each do |strategy|
require_dependency config.root.join(*base_strategies_path).join(strategy, 'lib', 'initializer')
end
# Engine Overrides
config.engine_sass_config_overrides = []
config.engine_sass_theme_dependencies = []
base_engines_path = %w(vendor gobierto_engines)
available_engines = Dir.chdir(config.root.join(*base_engines_path)) do
Dir.glob('*').select { |item| File.directory?(item) }
end
available_engines.each do |engine_dir|
require_dependency config.root.join(*base_engines_path).join(engine_dir, "lib", "initializer")
end
# Do not add wrapper .field_with_errors around form fields with validation errors
config.action_view.field_error_proc = proc { |html_tag, _instance| html_tag }
config.time_zone = "Madrid"
end
end
require_dependency "app/publishers/base"
require_dependency "app/subscribers/base"
|
module Geos
class PreparedGeometry
include Geos::Tools
attr_reader :ptr
undef :clone, :dup
def initialize(geom, auto_free = true)
@ptr = FFI::AutoPointer.new(
FFIGeos.GEOSPrepare_r(Geos.current_handle, geom.ptr),
auto_free ? self.class.method(:release) : self.class.method(:no_release)
)
@geom = geom
if !auto_free
@ptr.autorelease = false
end
end
def self.no_release(ptr) #:nodoc:
end
def self.release(ptr) #:nodoc:
FFIGeos.GEOSPreparedGeom_destroy_r(Geos.current_handle, ptr)
end
def contains?(geom)
check_geometry(geom)
bool_result(FFIGeos.GEOSPreparedContains_r(Geos.current_handle, self.ptr, geom.ptr))
end
def contains_properly?(geom)
check_geometry(geom)
bool_result(FFIGeos.GEOSPreparedContainsProperly_r(Geos.current_handle, self.ptr, geom.ptr))
end
def covered_by?(geom)
check_geometry(geom)
bool_result(FFIGeos.GEOSPreparedCoveredBy_r(Geos.current_handle, self.ptr, geom.ptr))
end
def covers?(geom)
check_geometry(geom)
bool_result(FFIGeos.GEOSPreparedCovers_r(Geos.current_handle, self.ptr, geom.ptr))
end
def crosses?(geom)
check_geometry(geom)
bool_result(FFIGeos.GEOSPreparedCrosses_r(Geos.current_handle, self.ptr, geom.ptr))
end
def disjoint?(geom)
check_geometry(geom)
bool_result(FFIGeos.GEOSPreparedDisjoint_r(Geos.current_handle, self.ptr, geom.ptr))
end
def intersects?(geom)
check_geometry(geom)
bool_result(FFIGeos.GEOSPreparedIntersects_r(Geos.current_handle, self.ptr, geom.ptr))
end
def overlaps?(geom)
check_geometry(geom)
bool_result(FFIGeos.GEOSPreparedOverlaps_r(Geos.current_handle, self.ptr, geom.ptr))
end
def touches?(geom)
check_geometry(geom)
bool_result(FFIGeos.GEOSPreparedTouches_r(Geos.current_handle, self.ptr, geom.ptr))
end
def within?(geom)
check_geometry(geom)
bool_result(FFIGeos.GEOSPreparedWithin_r(Geos.current_handle, self.ptr, geom.ptr))
end
end
end
Add an attr_reader to get to the geometry cmeiklejohn's patch.
module Geos
class PreparedGeometry
include Geos::Tools
attr_reader :ptr, :geometry
undef :clone, :dup
def initialize(geom, auto_free = true)
@ptr = FFI::AutoPointer.new(
FFIGeos.GEOSPrepare_r(Geos.current_handle, geom.ptr),
auto_free ? self.class.method(:release) : self.class.method(:no_release)
)
@geometry = geom
if !auto_free
@ptr.autorelease = false
end
end
def self.no_release(ptr) #:nodoc:
end
def self.release(ptr) #:nodoc:
FFIGeos.GEOSPreparedGeom_destroy_r(Geos.current_handle, ptr)
end
def contains?(geom)
check_geometry(geom)
bool_result(FFIGeos.GEOSPreparedContains_r(Geos.current_handle, self.ptr, geom.ptr))
end
def contains_properly?(geom)
check_geometry(geom)
bool_result(FFIGeos.GEOSPreparedContainsProperly_r(Geos.current_handle, self.ptr, geom.ptr))
end
def covered_by?(geom)
check_geometry(geom)
bool_result(FFIGeos.GEOSPreparedCoveredBy_r(Geos.current_handle, self.ptr, geom.ptr))
end
def covers?(geom)
check_geometry(geom)
bool_result(FFIGeos.GEOSPreparedCovers_r(Geos.current_handle, self.ptr, geom.ptr))
end
def crosses?(geom)
check_geometry(geom)
bool_result(FFIGeos.GEOSPreparedCrosses_r(Geos.current_handle, self.ptr, geom.ptr))
end
def disjoint?(geom)
check_geometry(geom)
bool_result(FFIGeos.GEOSPreparedDisjoint_r(Geos.current_handle, self.ptr, geom.ptr))
end
def intersects?(geom)
check_geometry(geom)
bool_result(FFIGeos.GEOSPreparedIntersects_r(Geos.current_handle, self.ptr, geom.ptr))
end
def overlaps?(geom)
check_geometry(geom)
bool_result(FFIGeos.GEOSPreparedOverlaps_r(Geos.current_handle, self.ptr, geom.ptr))
end
def touches?(geom)
check_geometry(geom)
bool_result(FFIGeos.GEOSPreparedTouches_r(Geos.current_handle, self.ptr, geom.ptr))
end
def within?(geom)
check_geometry(geom)
bool_result(FFIGeos.GEOSPreparedWithin_r(Geos.current_handle, self.ptr, geom.ptr))
end
end
end
|
module DbSchema
class DSL
attr_reader :block
def initialize(block)
@block = block
@schema = []
end
def schema
block.call(self)
@schema
end
def table(name, &block)
table_yielder = TableYielder.new(name, block)
@schema << Definitions::Table.new(
name,
fields: table_yielder.fields,
indices: table_yielder.indices,
checks: table_yielder.checks,
foreign_keys: table_yielder.foreign_keys
)
end
def enum(name, values)
@schema << Definitions::Enum.new(name.to_sym, values.map(&:to_sym))
end
def extension(name)
@schema << Definitions::Extension.new(name.to_sym)
end
class TableYielder
attr_reader :table_name
def initialize(table_name, block)
@table_name = table_name
block.call(self)
end
DbSchema::Definitions::Field.registry.keys.each do |type|
define_method(type) do |name, **options|
field(name, type, options)
end
end
def primary_key(name)
fields << Definitions::Field::Integer.new(name, primary_key: true)
end
def field(name, type, **options)
fields << Definitions::Field.build(name, type, options)
end
def index(*columns, name: nil, unique: false, using: :btree, where: nil, **ordered_fields)
# TODO: refactor to one big hash with all columns of the index
if columns.last.is_a?(Hash)
*unordered_columns, ordered_expressions = columns
else
unordered_columns = columns
ordered_expressions = {}
end
index_columns = unordered_columns.map do |column_name|
if column_name.is_a?(String)
Definitions::Index::Expression.new(column_name)
else
Definitions::Index::TableField.new(column_name)
end
end + ordered_fields.merge(ordered_expressions).map do |column_name, column_order_options|
options = case column_order_options
when :asc
{}
when :desc
{ order: :desc }
when :asc_nulls_first
{ nulls: :first }
when :desc_nulls_last
{ order: :desc, nulls: :last }
else
raise ArgumentError, 'Only :asc, :desc, :asc_nulls_first and :desc_nulls_last options are supported.'
end
if column_name.is_a?(String)
Definitions::Index::Expression.new(column_name, **options)
else
Definitions::Index::TableField.new(column_name, **options)
end
end
index_name = name || "#{table_name}_#{index_columns.map(&:index_name_segment).join('_')}_index"
indices << Definitions::Index.new(
name: index_name,
columns: index_columns,
unique: unique,
type: using,
condition: where
)
end
def check(name, condition)
checks << Definitions::CheckConstraint.new(name: name, condition: condition)
end
def foreign_key(*fkey_fields, references:, name: nil, on_update: :no_action, on_delete: :no_action, deferrable: false)
fkey_name = name || :"#{table_name}_#{fkey_fields.first}_fkey"
if references.is_a?(Array)
# [:table, :field]
referenced_table, *referenced_keys = references
foreign_keys << Definitions::ForeignKey.new(
name: fkey_name,
fields: fkey_fields,
table: referenced_table,
keys: referenced_keys,
on_delete: on_delete,
on_update: on_update,
deferrable: deferrable
)
else
# :table
foreign_keys << Definitions::ForeignKey.new(
name: fkey_name,
fields: fkey_fields,
table: references,
on_delete: on_delete,
on_update: on_update,
deferrable: deferrable
)
end
end
def method_missing(method_name, name, *args, &block)
options = args.first || {}
fields << Definitions::Field::Custom.new(
name,
type_name: method_name,
**options
)
end
def fields
@fields ||= []
end
def indices
@indices ||= []
end
def checks
@checks ||= []
end
def foreign_keys
@foreign_keys ||= []
end
end
end
end
refactor index handling in DSL
module DbSchema
class DSL
attr_reader :block
def initialize(block)
@block = block
@schema = []
end
def schema
block.call(self)
@schema
end
def table(name, &block)
table_yielder = TableYielder.new(name, block)
@schema << Definitions::Table.new(
name,
fields: table_yielder.fields,
indices: table_yielder.indices,
checks: table_yielder.checks,
foreign_keys: table_yielder.foreign_keys
)
end
def enum(name, values)
@schema << Definitions::Enum.new(name.to_sym, values.map(&:to_sym))
end
def extension(name)
@schema << Definitions::Extension.new(name.to_sym)
end
class TableYielder
attr_reader :table_name
def initialize(table_name, block)
@table_name = table_name
block.call(self)
end
DbSchema::Definitions::Field.registry.keys.each do |type|
define_method(type) do |name, **options|
field(name, type, options)
end
end
def primary_key(name)
fields << Definitions::Field::Integer.new(name, primary_key: true)
end
def field(name, type, **options)
fields << Definitions::Field.build(name, type, options)
end
def index(*columns, name: nil, unique: false, using: :btree, where: nil, **ordered_fields)
if columns.last.is_a?(Hash)
*ascending_columns, ordered_expressions = columns
else
ascending_columns = columns
ordered_expressions = {}
end
columns_data = ascending_columns.each_with_object({}) do |column_name, columns|
columns[column_name] = :asc
end.merge(ordered_fields).merge(ordered_expressions)
index_columns = columns_data.map do |column_name, column_order_options|
options = case column_order_options
when :asc
{}
when :desc
{ order: :desc }
when :asc_nulls_first
{ nulls: :first }
when :desc_nulls_last
{ order: :desc, nulls: :last }
else
raise ArgumentError, 'Only :asc, :desc, :asc_nulls_first and :desc_nulls_last options are supported.'
end
if column_name.is_a?(String)
Definitions::Index::Expression.new(column_name, **options)
else
Definitions::Index::TableField.new(column_name, **options)
end
end
index_name = name || "#{table_name}_#{index_columns.map(&:index_name_segment).join('_')}_index"
indices << Definitions::Index.new(
name: index_name,
columns: index_columns,
unique: unique,
type: using,
condition: where
)
end
def check(name, condition)
checks << Definitions::CheckConstraint.new(name: name, condition: condition)
end
def foreign_key(*fkey_fields, references:, name: nil, on_update: :no_action, on_delete: :no_action, deferrable: false)
fkey_name = name || :"#{table_name}_#{fkey_fields.first}_fkey"
if references.is_a?(Array)
# [:table, :field]
referenced_table, *referenced_keys = references
foreign_keys << Definitions::ForeignKey.new(
name: fkey_name,
fields: fkey_fields,
table: referenced_table,
keys: referenced_keys,
on_delete: on_delete,
on_update: on_update,
deferrable: deferrable
)
else
# :table
foreign_keys << Definitions::ForeignKey.new(
name: fkey_name,
fields: fkey_fields,
table: references,
on_delete: on_delete,
on_update: on_update,
deferrable: deferrable
)
end
end
def method_missing(method_name, name, *args, &block)
options = args.first || {}
fields << Definitions::Field::Custom.new(
name,
type_name: method_name,
**options
)
end
def fields
@fields ||= []
end
def indices
@indices ||= []
end
def checks
@checks ||= []
end
def foreign_keys
@foreign_keys ||= []
end
end
end
end
|
#
# Cookbook Name:: printers
# Recipe:: mac
#
# Copyright 2013, Wesleyan University
#
# All rights reserved - Do Not Redistribute
#
# Install HP drivers
dmg_package "HP Printer Drivers" do
app "HewlettPackardPrinterDrivers"
volumes_dir "HewlettPackard Printer Drivers"
dmg_name "HPPrinterDrivers2.14"
source "http://ims-chef.wesleyan.edu/os_x/hp_printer/HPPrinterDrivers2.14.dmg"
checksum "c14f44c8e009954870ae90fdec63fea91ea1716f06a9fefc5a2cbe139c3dc9d2"
action :install
type "pkg"
package_id "com.apple.pkg.HewlettPackardPrinterDrivers"
version "2.14.0"
end
# Install Xerox drivers
dmg_package "Xerox Printer Drivers" do
app "XeroxPrinterDrivers"
volumes_dir "Xerox Printer Drivers"
dmg_name "XeroxPrinterDrivers2.3.dmg"
source "http://ims-chef.wesleyan.edu/os_x/xerox_printer/XeroxPrinterDrivers2.3.dmg"
checksum "36c9e37bc1c7d5c3edc4273c28e6e19628de2faf2e974a0ef6fbd0fb0c70c80e"
action :install
type "pkg"
package_id "com.apple.pkg.XeroxPrinterDrivers"
version "2.3.0"
end
# Install Epson drivers
dmg_package "EPSON Printer Drivers" do
app "EPSONPrinterDrivers"
volumes_dir "EPSON Printer Drivers"
dmg_name "EPSONPrinterDrivers2.15"
source "http://ims-chef.wesleyan.edu/os_x/epson_printer/EPSONPrinterDrivers2.15.dmg"
checksum "f9f5658ff11b9933b9f777c15a1d433dfdb1c3131693fb8e0382a5372ba293b6"
action :install
type "pkg"
package_id "com.apple.pkg.EPSONPrinterDrivers"
version "2.15.0"
end
#install Pharos drivers
dmg_package "Pharos Popup Printer Client" do
app "Popup"
volumes_dir "Popup"
dmg_name "pharos_client_labs-8.4.1"
source "http://ims-chef.wesleyan.edu/os_x/pharos_printer/pharos_client_labs-8.4.1.dmg"
checksum "dd25cb018bc251bc9a749508c8596f6406fa850d0bab12c473530fbabcec8bf0"
action :install
type "pkg"
package_id "com.pharos.popup"
version "8.4.1"
end
dmg_package "Epson 3880 Driver" do
app "Epson3880"
volumes_dir "Epson"
dmg_name "Epson"
type "mpkg"
source "http://ims-chef.wesleyan.edu/os_x/epson_printer/Epson.dmg"
checksum "b620b0761a4ad1a5aa6a38124716aede665e6429e8d5b154cb5bb2d467eef247"
action :install
package_id "com.epson.drivers.3880"
version "1.0.0"
end
dmg_package "Epson 9800 Driver" do
app "Epson9800"
volumes_dir "Epson"
dmg_name "Epson"
type "mpkg"
source "http://ims-chef.wesleyan.edu/os_x/epson_printer/Epson.dmg"
checksum "b620b0761a4ad1a5aa6a38124716aede665e6429e8d5b154cb5bb2d467eef247"
action :install
package_id "com.epson.drivers.9800"
version "1.0.0"
end
printers "ArtLib-HP4105" do
model "/Library/Printers/PPDs/Contents/Resources/HP LaserJet 4100 Series.gz"
end
printers "Allb204-HP4015-BW" do
model "/Library/Printers/PPDs/Contents/Resources/HP LaserJet 4000 Series.gz"
end
printers "awks112-bw-hp5100n" do
model "/Library/Printers/PPDs/Contents/Resources/HP LaserJet 5100 Series.gz"
end
printers "AWKS112-color-HP5550" do
model "/Library/Printers/PPDs/Contents/Resources/HP Color LaserJet 5550.gz"
end
printers "CFA-HP5200-BW" do
model "/Library/Printers/PPDs/Contents/Resources/HP LaserJet 5200 Series.gz"
end
printers "cfalab-xerox7760gx-color" do
model "/Library/Printers/PPDs/Contents/Resources/Xerox Phaser 7760GX.gz"
end
printers "Fisk-HP4015BW" do
model "/Library/Printers/PPDs/Contents/Resources/HP LaserJet 4000 Series.gz"
end
printers "HAS-HP4015BW" do
action :remove
end
printers "HASLab_color_HP3800" do
action :remove
end
printers "LRC-HP4015BW2" do
model "/Library/Printers/PPDs/Contents/Resources/HP LaserJet 4000 Series.gz"
end
printers "PAC-HP4015BW2" do
action :remove
end
printers "PAC-HP4015BW" do
action :remove
end
printers "PAC-HP4015BW-2" do
action :remove
end
printers "PACLab_color_HPCP3505n" do
action :remove
end
printers "QAC-HP4015-BW" do
action :remove
end
printers "SCIC-HP4200" do
model "/Library/Printers/PPDs/Contents/Resources/hp LaserJet 4200 Series.gz"
end
printers "ST-HP4015-BW2" do
model "/Library/Printers/PPDs/Contents/Resources/HP LaserJet 4000 Series.gz"
end
printers "ST-HP4015BW" do
model "/Library/Printers/PPDs/Contents/Resources/HP LaserJet 4000 Series.gz"
end
printers "STLab-color-HPCP3505" do
model "/Library/Printers/PPDs/Contents/Resources/HP Color LaserJet CP3505.gz"
end
printers "SciLi-Color-HPCP4520" do
action :remove
end
printers "SciLi-hpm602bw" do
action :remove
end
printers "UsdanLobby-RicohBW" do
model "drv:///sample.drv/laserjet.ppd"
end
printers "Printers" do
action :set_default
end
now removing all printers
#
# Cookbook Name:: printers
# Recipe:: mac
#
# Copyright 2013, Wesleyan University
#
# All rights reserved - Do Not Redistribute
#
# Install Epson drivers
dmg_package "EPSON Printer Drivers" do
app "EPSONPrinterDrivers"
volumes_dir "EPSON Printer Drivers"
dmg_name "EPSONPrinterDrivers2.15"
source "http://ims-chef.wesleyan.edu/os_x/epson_printer/EPSONPrinterDrivers2.15.dmg"
checksum "f9f5658ff11b9933b9f777c15a1d433dfdb1c3131693fb8e0382a5372ba293b6"
action :install
type "pkg"
package_id "com.apple.pkg.EPSONPrinterDrivers"
version "2.15.0"
end
#install Pharos drivers
dmg_package "Pharos Popup Printer Client" do
app "Popup"
volumes_dir "Popup"
dmg_name "pharos_client_labs-8.4.1"
source "http://ims-chef.wesleyan.edu/os_x/pharos_printer/pharos_client_labs-8.4.1.dmg"
checksum "dd25cb018bc251bc9a749508c8596f6406fa850d0bab12c473530fbabcec8bf0"
action :install
type "pkg"
package_id "com.pharos.popup"
version "8.4.1"
end
dmg_package "Epson 3880 Driver" do
app "Epson3880"
volumes_dir "Epson"
dmg_name "Epson"
type "mpkg"
source "http://ims-chef.wesleyan.edu/os_x/epson_printer/Epson.dmg"
checksum "b620b0761a4ad1a5aa6a38124716aede665e6429e8d5b154cb5bb2d467eef247"
action :install
package_id "com.epson.drivers.3880"
version "1.0.0"
end
dmg_package "Epson 9800 Driver" do
app "Epson9800"
volumes_dir "Epson"
dmg_name "Epson"
type "mpkg"
source "http://ims-chef.wesleyan.edu/os_x/epson_printer/Epson.dmg"
checksum "b620b0761a4ad1a5aa6a38124716aede665e6429e8d5b154cb5bb2d467eef247"
action :install
package_id "com.epson.drivers.9800"
version "1.0.0"
end
printers "ArtLib-HP4105" do
action :remove
end
printers "Allb204-HP4015-BW" do
action :remove
end
printers "awks112-bw-hp5100n" do
action :remove
end
printers "AWKS112-color-HP5550" do
action :remove
end
printers "CFA-HP5200-BW" do
action :remove
end
printers "cfalab-xerox7760gx-color" do
action :remove
end
printers "Fisk-HP4015BW" do
action :remove
end
printers "HAS-HP4015BW" do
action :remove
end
printers "HASLab_color_HP3800" do
action :remove
end
printers "LRC-HP4015BW2" do
action :remove
end
printers "PAC-HP4015BW2" do
action :remove
end
printers "PAC-HP4015BW" do
action :remove
end
printers "PAC-HP4015BW-2" do
action :remove
end
printers "PACLab_color_HPCP3505n" do
action :remove
end
printers "QAC-HP4015-BW" do
action :remove
end
printers "SCIC-HP4200" do
action :remove
end
printers "ST-HP4015-BW2" do
action :remove
end
printers "ST-HP4015BW" do
action :remove
end
printers "STLab-color-HPCP3505" do
action :remove
end
printers "SciLi-Color-HPCP4520" do
action :remove
end
printers "SciLi-hpm602bw" do
action :remove
end
printers "UsdanLobby-RicohBW" do
action :remove
end
printers "Printers" do
action :set_default
end
|
module Yuba
module Rendering
def render(*args)
view_model_hash = args.find { |arg| arg.is_a?(Hash) && arg[:view_model] }
@_view_model = view_model_hash[:view_model] if view_model_hash[:view_model]
super
end
def view_assigns
super.merge(view_model_assigns)
end
private
def _protected_ivars
super.merge(:@_view_model)
end
def view_model_assigns
return {} unless @_view_model
# TODO: get all public methods between self and Yuba::ViewModel
# now get only in self
methods = @_view_model.public_methods(false)
methods.reject! do |method_name|
%i[call initialize].include?(method_name)
end
methods.inject({}) do |hash, method_name|
hash[method_name] = @_view_model.public_send(method_name)
hash
end
end
end
end
Fix error on render without view_model options
module Yuba
module Rendering
def render(*args)
view_model_hash = args.find { |arg| arg.is_a?(Hash) && arg[:view_model] }
@_view_model = view_model_hash[:view_model] if view_model_hash && view_model_hash[:view_model]
super
end
def view_assigns
super.merge(view_model_assigns)
end
private
def _protected_ivars
super.merge(:@_view_model)
end
def view_model_assigns
return {} unless @_view_model
# TODO: get all public methods between self and Yuba::ViewModel
# now get only in self
methods = @_view_model.public_methods(false)
methods.reject! do |method_name|
%i[call initialize].include?(method_name)
end
methods.inject({}) do |hash, method_name|
hash[method_name] = @_view_model.public_send(method_name)
hash
end
end
end
end
|
require 'nokogiri'
require 'cgi'
module Deface
class Parser
# converts erb to markup
#
def self.erb_markup!(source)
#all opening html tags that contain <% %> blocks
source.scan(/<\w+[^<>]+(?:<%.*?%>[^<>]*)+/m).each do |line|
#regexs to catch <% %> inside attributes id="<% something %>" - with double, single or no quotes
erb_attrs_regexs = [/([\w-]+)(\s?=\s?)(")([^"]*<%.*?%>[^"]*)/m,
/([\w-]+)(\s?=\s?)(')([^']*<%.*?%>[^']*)'/m,
/([\w-]+)(\s?=\s?)()(<%.*?%>)(?:\s|>|\z)/m]
replace_line = erb_attrs_regexs.inject(line.clone) do |replace_line, regex|
replace_line = line.scan(regex).inject(replace_line) do |replace_line, match|
replace_line.sub("#{match[0]}#{match[1]}#{match[2]}#{match[3]}#{match[2]}") { |m| m = " data-erb-#{match[0]}=\"#{CGI.escapeHTML(match[3])}\"" }
end
replace_line
end
i = -1
#catch all <% %> inside tags id <p <%= test %>> , not inside attrs
replace_line.scan(/(<%.*?%>)/m).each do |match|
replace_line.sub!(match[0]) { |m| m = " data-erb-#{i += 1}=\"#{CGI.escapeHTML(match[0])}\"" }
end
source.sub!(line) { |m| m = replace_line }
end
#replaces all <% %> not inside opening html tags
replacements = [ {"<%=" => "<code erb-loud>"},
{"<%" => "<code erb-silent>"},
{"%>" => "</code>"} ]
replacements.each{ |h| h.each { |replace, with| source.gsub! replace, with } }
source.scan(/(<code.*?>)((?:(?!<\/code>)[\s\S])*)(<\/code>)/).each do |match|
source.sub!("#{match[0]}#{match[1]}#{match[2]}") { |m| m = "#{match[0]}#{CGI.escapeHTML(match[1])}#{match[2]}" }
end
source
end
# undoes ERB markup generated by Deface::Parser::ERB
#
def self.undo_erb_markup!(source)
replacements = [ {"<code erb-silent>" => '<%'},
{"<code erb-loud>" => '<%='},
{"</code>" => '%>'}]
replacements.each{ |h| h.each { |replace, with| source.gsub! replace, with } }
source.scan(/data-erb-(\d+)+=(['"])(.*?)\2/m).each do |match|
source.gsub!("data-erb-#{match[0]}=#{match[1]}#{match[2]}#{match[1]}") { |m| m = CGI.unescapeHTML(match[2]) }
end
source.scan(/data-erb-([\w-]+)+=(["'])(.*?)\2/m).each do |match|
source.gsub!("data-erb-#{match[0]}=#{match[1]}#{match[2]}#{match[1]}") { |m| "#{match[0]}=#{match[1]}#{CGI.unescapeHTML(match[2])}#{match[1]}" }
end
#un-escape changes from Nokogiri and erb-markup!
source.scan(/(<%.*?)((?:(?!%>)[\s\S])*)(%>)/).each do |match|
source.gsub!("#{match[0]}#{match[1]}#{match[2]}") { |m| m = "#{match[0]}#{ CGI.unescapeHTML match[1] }#{match[2]}" }
end
source
end
def self.convert(source)
erb_markup!(source)
if source =~ /<html.*?(?:(?!>)[\s\S])*>/
Nokogiri::HTML::Document.parse(source)
elsif source =~ /<body.*?(?:(?!>)[\s\S])*>/
Nokogiri::HTML::Document.parse(source).css('body').first
else
Nokogiri::HTML::DocumentFragment.parse(source)
end
end
end
end
Force encoding for source when using Nokogiri::HTML::DocumentFragment
This prevents the encoding error of 'output error : unknown encoding ASCII-8BIT'
require 'nokogiri'
require 'cgi'
module Deface
class Parser
# converts erb to markup
#
def self.erb_markup!(source)
#all opening html tags that contain <% %> blocks
source.scan(/<\w+[^<>]+(?:<%.*?%>[^<>]*)+/m).each do |line|
#regexs to catch <% %> inside attributes id="<% something %>" - with double, single or no quotes
erb_attrs_regexs = [/([\w-]+)(\s?=\s?)(")([^"]*<%.*?%>[^"]*)/m,
/([\w-]+)(\s?=\s?)(')([^']*<%.*?%>[^']*)'/m,
/([\w-]+)(\s?=\s?)()(<%.*?%>)(?:\s|>|\z)/m]
replace_line = erb_attrs_regexs.inject(line.clone) do |replace_line, regex|
replace_line = line.scan(regex).inject(replace_line) do |replace_line, match|
replace_line.sub("#{match[0]}#{match[1]}#{match[2]}#{match[3]}#{match[2]}") { |m| m = " data-erb-#{match[0]}=\"#{CGI.escapeHTML(match[3])}\"" }
end
replace_line
end
i = -1
#catch all <% %> inside tags id <p <%= test %>> , not inside attrs
replace_line.scan(/(<%.*?%>)/m).each do |match|
replace_line.sub!(match[0]) { |m| m = " data-erb-#{i += 1}=\"#{CGI.escapeHTML(match[0])}\"" }
end
source.sub!(line) { |m| m = replace_line }
end
#replaces all <% %> not inside opening html tags
replacements = [ {"<%=" => "<code erb-loud>"},
{"<%" => "<code erb-silent>"},
{"%>" => "</code>"} ]
replacements.each{ |h| h.each { |replace, with| source.gsub! replace, with } }
source.scan(/(<code.*?>)((?:(?!<\/code>)[\s\S])*)(<\/code>)/).each do |match|
source.sub!("#{match[0]}#{match[1]}#{match[2]}") { |m| m = "#{match[0]}#{CGI.escapeHTML(match[1])}#{match[2]}" }
end
source
end
# undoes ERB markup generated by Deface::Parser::ERB
#
def self.undo_erb_markup!(source)
replacements = [ {"<code erb-silent>" => '<%'},
{"<code erb-loud>" => '<%='},
{"</code>" => '%>'}]
replacements.each{ |h| h.each { |replace, with| source.gsub! replace, with } }
source.scan(/data-erb-(\d+)+=(['"])(.*?)\2/m).each do |match|
source.gsub!("data-erb-#{match[0]}=#{match[1]}#{match[2]}#{match[1]}") { |m| m = CGI.unescapeHTML(match[2]) }
end
source.scan(/data-erb-([\w-]+)+=(["'])(.*?)\2/m).each do |match|
source.gsub!("data-erb-#{match[0]}=#{match[1]}#{match[2]}#{match[1]}") { |m| "#{match[0]}=#{match[1]}#{CGI.unescapeHTML(match[2])}#{match[1]}" }
end
#un-escape changes from Nokogiri and erb-markup!
source.scan(/(<%.*?)((?:(?!%>)[\s\S])*)(%>)/).each do |match|
source.gsub!("#{match[0]}#{match[1]}#{match[2]}") { |m| m = "#{match[0]}#{ CGI.unescapeHTML match[1] }#{match[2]}" }
end
source
end
def self.convert(source)
erb_markup!(source)
if source =~ /<html.*?(?:(?!>)[\s\S])*>/
Nokogiri::HTML::Document.parse(source)
elsif source =~ /<body.*?(?:(?!>)[\s\S])*>/
Nokogiri::HTML::Document.parse(source).css('body').first
else
source = source.force_encoding('utf-8') if source.respond_to?(:force_encoding)
Nokogiri::HTML::DocumentFragment.parse(source)
end
end
end
end
|
#
# Cookbook Name:: tile
# Recipe:: default
#
# Copyright 2013, OpenStreetMap Foundation
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
include_recipe "apache"
include_recipe "git"
include_recipe "nodejs"
include_recipe "postgresql"
include_recipe "tools"
blocks = data_bag_item("tile", "blocks")
apache_module "alias"
apache_module "expires"
apache_module "headers"
apache_module "remoteip"
apache_module "rewrite"
apache_module "tile" do
conf "tile.conf.erb"
end
tilecaches = search(:node, "roles:tilecache").sort_by { |n| n[:hostname] }
apache_site "default" do
action [ :disable ]
end
apache_site "tile.openstreetmap.org" do
template "apache.erb"
variables :caches => tilecaches
end
template "/etc/logrotate.d/apache2" do
source "logrotate.apache.erb"
owner "root"
group "root"
mode 0644
end
directory "/srv/tile.openstreetmap.org" do
owner "tile"
group "tile"
mode 0755
end
package "renderd"
service "renderd" do
action [ :enable, :start ]
supports :status => false, :restart => true, :reload => false
end
directory "/srv/tile.openstreetmap.org/tiles" do
owner "tile"
group "tile"
mode 0755
end
template "/etc/renderd.conf" do
source "renderd.conf.erb"
owner "root"
group "root"
mode 0644
notifies :reload, resources(:service => "apache2")
notifies :restart, resources(:service => "renderd")
end
remote_directory "/srv/tile.openstreetmap.org/html" do
source "html"
owner "tile"
group "tile"
mode 0755
files_owner "tile"
files_group "tile"
files_mode 0644
end
template "/srv/tile.openstreetmap.org/html/index.html" do
source "index.html.erb"
owner "tile"
group "tile"
mode 0644
end
package "python-cairo"
package "python-mapnik"
directory "/srv/tile.openstreetmap.org/cgi-bin" do
owner "tile"
group "tile"
mode 0755
end
template "/srv/tile.openstreetmap.org/cgi-bin/export" do
source "export.erb"
owner "tile"
group "tile"
mode 0755
variables :blocks => blocks
end
template "/srv/tile.openstreetmap.org/cgi-bin/debug" do
source "debug.erb"
owner "tile"
group "tile"
mode 0755
end
template "/etc/cron.hourly/export" do
source "export.cron.erb"
owner "root"
group "root"
mode 0755
end
directory "/srv/tile.openstreetmap.org/data" do
owner "tile"
group "tile"
mode 0755
end
node[:tile][:data].each do |name,data|
url = data[:url]
file = "/srv/tile.openstreetmap.org/data/#{File.basename(url)}"
directory = "/srv/tile.openstreetmap.org/data/#{data[:directory]}"
directory directory do
owner "tile"
group "tile"
mode 0755
end
if file =~ /\.tgz$/
package "tar"
execute file do
action :nothing
command "tar -zxf #{file} -C #{directory}"
user "tile"
group "tile"
end
elsif file =~ /\.tar\.bz2$/
package "tar"
execute file do
action :nothing
command "tar -jxf #{file} -C #{directory}"
user "tile"
group "tile"
end
elsif file =~ /\.zip$/
package "unzip"
execute file do
action :nothing
command "unzip -qq -o #{file} -d #{directory}"
user "tile"
group "tile"
end
end
if data[:processed]
original = "#{directory}/#{data[:original]}"
processed = "#{directory}/#{data[:processed]}"
package "gdal-bin"
execute processed do
action :nothing
command "ogr2ogr #{processed} #{original}"
user "tile"
group "tile"
subscribes :run, resources(:execute => file), :immediately
end
end
execute "#{file}_shapeindex" do
action :nothing
command "find #{directory} -type f -iname '*.shp' -print0 | xargs -0 --no-run-if-empty shapeindex --shape_files"
user "tile"
user "tile"
subscribes :run, resources(:execute => file), :immediately
end
remote_file file do
if data[:refresh]
action :create
use_conditional_get true
else
action :create_if_missing
end
source url
owner "tile"
group "tile"
mode 0644
notifies :run, resources(:execute => file), :immediately
notifies :restart, resources(:service => "renderd")
end
end
nodejs_package "carto"
nodejs_package "millstone"
directory "/srv/tile.openstreetmap.org/styles" do
owner "tile"
group "tile"
mode 0755
end
node[:tile][:styles].each do |name,details|
style_directory = "/srv/tile.openstreetmap.org/styles/#{name}"
tile_directory = "/srv/tile.openstreetmap.org/tiles/#{name}"
template "/usr/local/bin/update-lowzoom-#{name}" do
source "update-lowzoom.erb"
owner "root"
group "root"
mode 0755
variables :style => name
end
template "/etc/init.d/update-lowzoom-#{name}" do
source "update-lowzoom.init.erb"
owner "root"
group "root"
mode 0755
variables :style => name
end
service "update-lowzoom-#{name}" do
action :disable
supports :restart => true
end
directory tile_directory do
owner "tile"
group "tile"
mode 0755
end
details[:tile_directories].each do |directory|
directory[:min_zoom].upto(directory[:max_zoom]) do |zoom|
directory "#{directory[:name]}/#{zoom}" do
owner "www-data"
group "www-data"
mode 0755
end
link "#{tile_directory}/#{zoom}" do
to "#{directory[:name]}/#{zoom}"
owner "tile"
group "tile"
end
end
end
file "#{tile_directory}/planet-import-complete" do
action :create_if_missing
owner "tile"
group "tile"
mode 0444
end
git style_directory do
action :sync
repository details[:repository]
revision details[:revision]
user "tile"
group "tile"
end
link "#{style_directory}/data" do
to "/srv/tile.openstreetmap.org/data"
owner "tile"
group "tile"
end
execute "#{style_directory}/project.mml" do
action :nothing
command "carto project.mml > project.xml"
cwd style_directory
user "tile"
group "tile"
subscribes :run, "git[#{style_directory}]"
notifies :restart, "service[renderd]", :immediately
notifies :restart, "service[update-lowzoom-#{name}]"
end
end
package "postgis"
postgresql_user "jburgess" do
cluster node[:tile][:database][:cluster]
superuser true
end
postgresql_user "tomh" do
cluster node[:tile][:database][:cluster]
superuser true
end
postgresql_user "tile" do
cluster node[:tile][:database][:cluster]
end
postgresql_user "www-data" do
cluster node[:tile][:database][:cluster]
end
postgresql_database "gis" do
cluster node[:tile][:database][:cluster]
owner "tile"
end
postgresql_extension "postgis" do
cluster node[:tile][:database][:cluster]
database "gis"
end
[ "geography_columns",
"planet_osm_nodes",
"planet_osm_rels",
"planet_osm_ways",
"raster_columns",
"raster_overviews",
"spatial_ref_sys" ].each do |table|
postgresql_table table do
cluster node[:tile][:database][:cluster]
database "gis"
owner "tile"
permissions "tile" => :all
end
end
[ "geometry_columns",
"planet_osm_line",
"planet_osm_point",
"planet_osm_polygon",
"planet_osm_roads" ].each do |table|
postgresql_table table do
cluster node[:tile][:database][:cluster]
database "gis"
owner "tile"
permissions "tile" => :all, "www-data" => :select
end
end
postgresql_munin "gis" do
cluster node[:tile][:database][:cluster]
database "gis"
end
#file node[:tile][:node_file] do
# owner "tile"
# group "www-data"
# mode 0640
#end
directory "/var/log/tile" do
owner "tile"
group "tile"
mode 0755
end
package "osm2pgsql"
package "osmosis"
package "ruby"
package "rubygems"
package "libproj-dev"
package "libxml2-dev"
gem_package "proj4rb"
gem_package "libxml-ruby"
gem_package "mmap"
remote_directory "/usr/local/lib/site_ruby" do
source "ruby"
owner "root"
group "root"
mode 0755
files_owner "root"
files_group "root"
files_mode 0644
end
template "/usr/local/bin/expire-tiles" do
source "expire-tiles.erb"
owner "root"
group "root"
mode 0755
end
template "/etc/sudoers.d/tile" do
source "sudoers.erb"
owner "root"
group "root"
mode 0440
end
directory "/var/lib/replicate" do
owner "tile"
group "tile"
mode 0755
end
template "/var/lib/replicate/configuration.txt" do
source "replicate.configuration.erb"
owner "tile"
group "tile"
mode 0644
end
template "/usr/local/bin/replicate" do
source "replicate.erb"
owner "root"
group "root"
mode 0755
end
template "/etc/init.d/replicate" do
source "replicate.init.erb"
owner "root"
group "root"
mode 0755
end
service "replicate" do
action [ :enable, :start ]
supports :restart => true
subscribes :restart, resources(:template => "/usr/local/bin/replicate")
subscribes :restart, resources(:template => "/etc/init.d/replicate")
end
template "/etc/logrotate.d/replicate" do
source "replicate.logrotate.erb"
owner "root"
group "root"
mode 0644
end
template "/usr/local/bin/render-lowzoom" do
source "render-lowzoom.erb"
owner "root"
group "root"
mode 0755
end
template "/etc/cron.d/render-lowzoom" do
source "render-lowzoom.cron.erb"
owner "root"
group "root"
mode 0644
end
template "/etc/rsyslog.d/20-renderd.conf" do
source "renderd.rsyslog.erb"
owner "root"
group "root"
mode 0644
notifies :restart, "service[rsyslog]"
end
package "libfilesys-df-perl"
template "/usr/local/bin/cleanup-tiles" do
source "cleanup-tiles.erb"
owner "root"
group "root"
mode 0755
end
tile_directories = node[:tile][:styles].collect do |name,style|
style[:tile_directories].collect { |directory| directory[:name] }
end.flatten.sort.uniq
template "/etc/cron.d/cleanup-tiles" do
source "cleanup-tiles.cron.erb"
owner "root"
group "root"
mode 0644
variables :directories => tile_directories
end
munin_plugin "mod_tile_fresh"
munin_plugin "mod_tile_response"
munin_plugin "mod_tile_zoom"
munin_plugin "mod_tile_latency"
munin_plugin "renderd_processed"
munin_plugin "renderd_queue"
munin_plugin "renderd_zoom"
munin_plugin "renderd_zoom_time"
munin_plugin "replication_delay" do
conf "munin.erb"
end
Manage the renderd_queue_time munin plugin
#
# Cookbook Name:: tile
# Recipe:: default
#
# Copyright 2013, OpenStreetMap Foundation
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
include_recipe "apache"
include_recipe "git"
include_recipe "nodejs"
include_recipe "postgresql"
include_recipe "tools"
blocks = data_bag_item("tile", "blocks")
apache_module "alias"
apache_module "expires"
apache_module "headers"
apache_module "remoteip"
apache_module "rewrite"
apache_module "tile" do
conf "tile.conf.erb"
end
tilecaches = search(:node, "roles:tilecache").sort_by { |n| n[:hostname] }
apache_site "default" do
action [ :disable ]
end
apache_site "tile.openstreetmap.org" do
template "apache.erb"
variables :caches => tilecaches
end
template "/etc/logrotate.d/apache2" do
source "logrotate.apache.erb"
owner "root"
group "root"
mode 0644
end
directory "/srv/tile.openstreetmap.org" do
owner "tile"
group "tile"
mode 0755
end
package "renderd"
service "renderd" do
action [ :enable, :start ]
supports :status => false, :restart => true, :reload => false
end
directory "/srv/tile.openstreetmap.org/tiles" do
owner "tile"
group "tile"
mode 0755
end
template "/etc/renderd.conf" do
source "renderd.conf.erb"
owner "root"
group "root"
mode 0644
notifies :reload, resources(:service => "apache2")
notifies :restart, resources(:service => "renderd")
end
remote_directory "/srv/tile.openstreetmap.org/html" do
source "html"
owner "tile"
group "tile"
mode 0755
files_owner "tile"
files_group "tile"
files_mode 0644
end
template "/srv/tile.openstreetmap.org/html/index.html" do
source "index.html.erb"
owner "tile"
group "tile"
mode 0644
end
package "python-cairo"
package "python-mapnik"
directory "/srv/tile.openstreetmap.org/cgi-bin" do
owner "tile"
group "tile"
mode 0755
end
template "/srv/tile.openstreetmap.org/cgi-bin/export" do
source "export.erb"
owner "tile"
group "tile"
mode 0755
variables :blocks => blocks
end
template "/srv/tile.openstreetmap.org/cgi-bin/debug" do
source "debug.erb"
owner "tile"
group "tile"
mode 0755
end
template "/etc/cron.hourly/export" do
source "export.cron.erb"
owner "root"
group "root"
mode 0755
end
directory "/srv/tile.openstreetmap.org/data" do
owner "tile"
group "tile"
mode 0755
end
node[:tile][:data].each do |name,data|
url = data[:url]
file = "/srv/tile.openstreetmap.org/data/#{File.basename(url)}"
directory = "/srv/tile.openstreetmap.org/data/#{data[:directory]}"
directory directory do
owner "tile"
group "tile"
mode 0755
end
if file =~ /\.tgz$/
package "tar"
execute file do
action :nothing
command "tar -zxf #{file} -C #{directory}"
user "tile"
group "tile"
end
elsif file =~ /\.tar\.bz2$/
package "tar"
execute file do
action :nothing
command "tar -jxf #{file} -C #{directory}"
user "tile"
group "tile"
end
elsif file =~ /\.zip$/
package "unzip"
execute file do
action :nothing
command "unzip -qq -o #{file} -d #{directory}"
user "tile"
group "tile"
end
end
if data[:processed]
original = "#{directory}/#{data[:original]}"
processed = "#{directory}/#{data[:processed]}"
package "gdal-bin"
execute processed do
action :nothing
command "ogr2ogr #{processed} #{original}"
user "tile"
group "tile"
subscribes :run, resources(:execute => file), :immediately
end
end
execute "#{file}_shapeindex" do
action :nothing
command "find #{directory} -type f -iname '*.shp' -print0 | xargs -0 --no-run-if-empty shapeindex --shape_files"
user "tile"
user "tile"
subscribes :run, resources(:execute => file), :immediately
end
remote_file file do
if data[:refresh]
action :create
use_conditional_get true
else
action :create_if_missing
end
source url
owner "tile"
group "tile"
mode 0644
notifies :run, resources(:execute => file), :immediately
notifies :restart, resources(:service => "renderd")
end
end
nodejs_package "carto"
nodejs_package "millstone"
directory "/srv/tile.openstreetmap.org/styles" do
owner "tile"
group "tile"
mode 0755
end
node[:tile][:styles].each do |name,details|
style_directory = "/srv/tile.openstreetmap.org/styles/#{name}"
tile_directory = "/srv/tile.openstreetmap.org/tiles/#{name}"
template "/usr/local/bin/update-lowzoom-#{name}" do
source "update-lowzoom.erb"
owner "root"
group "root"
mode 0755
variables :style => name
end
template "/etc/init.d/update-lowzoom-#{name}" do
source "update-lowzoom.init.erb"
owner "root"
group "root"
mode 0755
variables :style => name
end
service "update-lowzoom-#{name}" do
action :disable
supports :restart => true
end
directory tile_directory do
owner "tile"
group "tile"
mode 0755
end
details[:tile_directories].each do |directory|
directory[:min_zoom].upto(directory[:max_zoom]) do |zoom|
directory "#{directory[:name]}/#{zoom}" do
owner "www-data"
group "www-data"
mode 0755
end
link "#{tile_directory}/#{zoom}" do
to "#{directory[:name]}/#{zoom}"
owner "tile"
group "tile"
end
end
end
file "#{tile_directory}/planet-import-complete" do
action :create_if_missing
owner "tile"
group "tile"
mode 0444
end
git style_directory do
action :sync
repository details[:repository]
revision details[:revision]
user "tile"
group "tile"
end
link "#{style_directory}/data" do
to "/srv/tile.openstreetmap.org/data"
owner "tile"
group "tile"
end
execute "#{style_directory}/project.mml" do
action :nothing
command "carto project.mml > project.xml"
cwd style_directory
user "tile"
group "tile"
subscribes :run, "git[#{style_directory}]"
notifies :restart, "service[renderd]", :immediately
notifies :restart, "service[update-lowzoom-#{name}]"
end
end
package "postgis"
postgresql_user "jburgess" do
cluster node[:tile][:database][:cluster]
superuser true
end
postgresql_user "tomh" do
cluster node[:tile][:database][:cluster]
superuser true
end
postgresql_user "tile" do
cluster node[:tile][:database][:cluster]
end
postgresql_user "www-data" do
cluster node[:tile][:database][:cluster]
end
postgresql_database "gis" do
cluster node[:tile][:database][:cluster]
owner "tile"
end
postgresql_extension "postgis" do
cluster node[:tile][:database][:cluster]
database "gis"
end
[ "geography_columns",
"planet_osm_nodes",
"planet_osm_rels",
"planet_osm_ways",
"raster_columns",
"raster_overviews",
"spatial_ref_sys" ].each do |table|
postgresql_table table do
cluster node[:tile][:database][:cluster]
database "gis"
owner "tile"
permissions "tile" => :all
end
end
[ "geometry_columns",
"planet_osm_line",
"planet_osm_point",
"planet_osm_polygon",
"planet_osm_roads" ].each do |table|
postgresql_table table do
cluster node[:tile][:database][:cluster]
database "gis"
owner "tile"
permissions "tile" => :all, "www-data" => :select
end
end
postgresql_munin "gis" do
cluster node[:tile][:database][:cluster]
database "gis"
end
#file node[:tile][:node_file] do
# owner "tile"
# group "www-data"
# mode 0640
#end
directory "/var/log/tile" do
owner "tile"
group "tile"
mode 0755
end
package "osm2pgsql"
package "osmosis"
package "ruby"
package "rubygems"
package "libproj-dev"
package "libxml2-dev"
gem_package "proj4rb"
gem_package "libxml-ruby"
gem_package "mmap"
remote_directory "/usr/local/lib/site_ruby" do
source "ruby"
owner "root"
group "root"
mode 0755
files_owner "root"
files_group "root"
files_mode 0644
end
template "/usr/local/bin/expire-tiles" do
source "expire-tiles.erb"
owner "root"
group "root"
mode 0755
end
template "/etc/sudoers.d/tile" do
source "sudoers.erb"
owner "root"
group "root"
mode 0440
end
directory "/var/lib/replicate" do
owner "tile"
group "tile"
mode 0755
end
template "/var/lib/replicate/configuration.txt" do
source "replicate.configuration.erb"
owner "tile"
group "tile"
mode 0644
end
template "/usr/local/bin/replicate" do
source "replicate.erb"
owner "root"
group "root"
mode 0755
end
template "/etc/init.d/replicate" do
source "replicate.init.erb"
owner "root"
group "root"
mode 0755
end
service "replicate" do
action [ :enable, :start ]
supports :restart => true
subscribes :restart, resources(:template => "/usr/local/bin/replicate")
subscribes :restart, resources(:template => "/etc/init.d/replicate")
end
template "/etc/logrotate.d/replicate" do
source "replicate.logrotate.erb"
owner "root"
group "root"
mode 0644
end
template "/usr/local/bin/render-lowzoom" do
source "render-lowzoom.erb"
owner "root"
group "root"
mode 0755
end
template "/etc/cron.d/render-lowzoom" do
source "render-lowzoom.cron.erb"
owner "root"
group "root"
mode 0644
end
template "/etc/rsyslog.d/20-renderd.conf" do
source "renderd.rsyslog.erb"
owner "root"
group "root"
mode 0644
notifies :restart, "service[rsyslog]"
end
package "libfilesys-df-perl"
template "/usr/local/bin/cleanup-tiles" do
source "cleanup-tiles.erb"
owner "root"
group "root"
mode 0755
end
tile_directories = node[:tile][:styles].collect do |name,style|
style[:tile_directories].collect { |directory| directory[:name] }
end.flatten.sort.uniq
template "/etc/cron.d/cleanup-tiles" do
source "cleanup-tiles.cron.erb"
owner "root"
group "root"
mode 0644
variables :directories => tile_directories
end
munin_plugin "mod_tile_fresh"
munin_plugin "mod_tile_latency"
munin_plugin "mod_tile_response"
munin_plugin "mod_tile_zoom"
munin_plugin "renderd_processed"
munin_plugin "renderd_queue"
munin_plugin "renderd_queue_time"
munin_plugin "renderd_zoom"
munin_plugin "renderd_zoom_time"
munin_plugin "replication_delay" do
conf "munin.erb"
end
|
module FontAwesome
module Rails
FA_VERSION = "4.3.0"
VERSION = "4.3.0.0"
end
end
Bumped version to 4.4.0.0
module FontAwesome
module Rails
FA_VERSION = "4.4.0"
VERSION = "4.4.0.0"
end
end
|
module Zxcvbn
VERSION = "0.1.0"
end
Version 0.1.1
https://github.com/envato/zxcvbn-ruby/compare/v0.1.0...v0.1.1
module Zxcvbn
VERSION = '0.1.1'.freeze
end
|
# frozen_string_literal: true
require_relative "boot"
require "rails/all"
require_relative "bundler_helper"
# Require the gems listed in Gemfile, including any gems
# you've limited to :test, :development, or :production.
Bundler.require(*Rails.groups(BundlerHelper.database))
# Load asset_sync early
require_relative 'asset_sync'
module Diaspora
class Application < Rails::Application
# Initialize configuration defaults for originally generated Rails version.
config.load_defaults 6.1
# Use classic autoloader for now
config.autoloader = :classic
# Configuration for the application, engines, and railties goes here.
#
# These settings can be overridden in specific environments using the files
# in config/environments, which are processed later.
#
# config.time_zone = "Central Time (US & Canada)"
# config.eager_load_paths << Rails.root.join("extras")
# Custom directories with classes and modules you want to be autoloadable.
config.autoload_paths += %W[#{config.root}/app]
config.autoload_once_paths += %W[#{config.root}/lib]
# Allow to decode Time from serialized columns
config.active_record.yaml_column_permitted_classes = [Time]
# Enable escaping HTML in JSON.
config.active_support.escape_html_entities_in_json = true
# Enable the asset pipeline
config.assets.enabled = true
# Speed up precompile by not loading the environment
config.assets.initialize_on_precompile = false
# See lib/tasks/assets.rake: non_digest_assets
config.assets.non_digest_assets = %w[branding/logos/asterisk.png]
# Configure generators values. Many other options are available, be sure to check the documentation.
config.generators do |g|
g.template_engine :haml
g.test_framework :rspec
end
# Setup action mailer early
config.action_mailer.default_url_options = {
protocol: AppConfig.pod_uri.scheme,
host: AppConfig.pod_uri.authority
}
config.action_mailer.asset_host = AppConfig.pod_uri.to_s
config.middleware.use Rack::OAuth2::Server::Resource::Bearer, "OpenID Connect" do |req|
Api::OpenidConnect::OAuthAccessToken
.valid(Time.zone.now.utc).find_by(token: req.access_token) || req.invalid_token!
end
end
end
Rails.application.routes.default_url_options[:host] = AppConfig.pod_uri.host
Rails.application.routes.default_url_options[:port] = AppConfig.pod_uri.port
Disable the default CSRF protection.
This was added in Rails 5.2 defaults, but we upgraded from 5.1 defaults to 6.1, so we didn't notice until now.
closes #8374
# frozen_string_literal: true
require_relative "boot"
require "rails/all"
require_relative "bundler_helper"
# Require the gems listed in Gemfile, including any gems
# you've limited to :test, :development, or :production.
Bundler.require(*Rails.groups(BundlerHelper.database))
# Load asset_sync early
require_relative 'asset_sync'
module Diaspora
class Application < Rails::Application
# Initialize configuration defaults for originally generated Rails version.
config.load_defaults 6.1
# Use classic autoloader for now
config.autoloader = :classic
# Configuration for the application, engines, and railties goes here.
#
# These settings can be overridden in specific environments using the files
# in config/environments, which are processed later.
#
# config.time_zone = "Central Time (US & Canada)"
# config.eager_load_paths << Rails.root.join("extras")
# Custom directories with classes and modules you want to be autoloadable.
config.autoload_paths += %W[#{config.root}/app]
config.autoload_once_paths += %W[#{config.root}/lib]
# Allow to decode Time from serialized columns
config.active_record.yaml_column_permitted_classes = [Time]
# Enable escaping HTML in JSON.
config.active_support.escape_html_entities_in_json = true
# We specify CSRF protection manually in ApplicationController with
# protect_from_forgery - having it enabled anywhere by default breaks
# federation.
config.action_controller.default_protect_from_forgery = false
# Enable the asset pipeline
config.assets.enabled = true
# Speed up precompile by not loading the environment
config.assets.initialize_on_precompile = false
# See lib/tasks/assets.rake: non_digest_assets
config.assets.non_digest_assets = %w[branding/logos/asterisk.png]
# Configure generators values. Many other options are available, be sure to check the documentation.
config.generators do |g|
g.template_engine :haml
g.test_framework :rspec
end
# Setup action mailer early
config.action_mailer.default_url_options = {
protocol: AppConfig.pod_uri.scheme,
host: AppConfig.pod_uri.authority
}
config.action_mailer.asset_host = AppConfig.pod_uri.to_s
config.middleware.use Rack::OAuth2::Server::Resource::Bearer, "OpenID Connect" do |req|
Api::OpenidConnect::OAuthAccessToken
.valid(Time.zone.now.utc).find_by(token: req.access_token) || req.invalid_token!
end
end
end
Rails.application.routes.default_url_options[:host] = AppConfig.pod_uri.host
Rails.application.routes.default_url_options[:port] = AppConfig.pod_uri.port
|
# encoding: UTF-8
require 'delegate'
module Deferring
class Foo < SimpleDelegator
attr_reader :name, :values
def initialize(name, original_association)
super(original_association)
@name = name
@values = VirtualProxy.new { @values = original_association.to_a.clone }
end
alias_method :association, :__getobj__
def ids=(ids)
ids = Array(ids).reject { |id| id.blank? }
@values = klass.find(ids)
end
def ids
@values.map(&:id)
end
def values=(records)
@values = records.select { |record| add_record?(record) }
end
def add_record?(record)
return false unless record
!(values.detect { |value| value.id == record.id })
end
def add_record(record)
values.push(record)
end
def add_by_id(id)
add_record(klass.find(id)) if add_record?(klass.find(id))
end
def remove_by_id(id)
if record = values.detect { |value| value.id == id }
values.delete(record)
end
end
delegate :[],
:<<,
:delete,
:size,
:length,
to: :values
def build(*args)
result = association.build(args)
values.concat(result)
values
end
def create!(*args)
result = association.create!(args)
values.concat(result)
values
end
def klass
if association.respond_to?(:klass)
association.klass
else
name.singularize.classify.constantize
end
end
end
end
Tap tap
# encoding: UTF-8
require 'delegate'
module Deferring
class Foo < SimpleDelegator
attr_reader :name, :values
def initialize(name, original_association)
super(original_association)
@name = name
@values = VirtualProxy.new { @values = original_association.to_a.clone }
end
alias_method :association, :__getobj__
def ids=(ids)
ids = Array(ids).reject { |id| id.blank? }
@values = klass.find(ids)
end
def ids
@values.map(&:id)
end
def values=(records)
@values = records.select { |record| add_record?(record) }
end
def add_record?(record)
return false unless record
!(values.detect { |value| value.id == record.id })
end
def add_record(record)
values.push(record)
end
def add_by_id(id)
add_record(klass.find(id)) if add_record?(klass.find(id))
end
def remove_by_id(id)
if record = values.detect { |value| value.id == id }
values.delete(record)
end
end
delegate :[],
:<<,
:delete,
:size,
:length,
to: :values
def build(*args)
association.build(args).tap do |result|
values.concat(result)
end
end
def create!(*args)
association.create!(args).tap do |result|
values.concat(result)
end
end
def klass
if association.respond_to?(:klass)
association.klass
else
name.singularize.classify.constantize
end
end
end
end
|
module Zxcvbn
VERSION = "0.0.3"
end
Bump version to 0.1.0
module Zxcvbn
VERSION = "0.1.0"
end
|
class <%= options[:user_class_name] %> < ActiveRecord::Base
ROLE = Typus::Configuration.roles.keys.sort
LANGUAGE = Typus.locales
enable_as_typus_user
end
Force set_table_name
class <%= options[:user_class_name] %> < ActiveRecord::Base
set_table_name "#{admin_users_table_name}"
ROLE = Typus::Configuration.roles.keys.sort
LANGUAGE = Typus.locales
enable_as_typus_user
end
|
require File.expand_path('../boot', __FILE__)
# Pick the frameworks you want:
require 'action_controller/railtie'
require 'dm-rails/railtie'
# require 'action_mailer/railtie'
# require 'active_resource/railtie'
# require 'rails/test_unit/railtie'
# If you have a Gemfile, require the gems listed there, including any gems
# you've limited to :test, :development, or :production.
Bundler.require(:default, Rails.env) if defined?(Bundler)
module Yogo
class Application < Rails::Application
# Settings in config/environments/* take precedence over those specified here.
# Application configuration should go into files in config/initializers
# -- all .rb files in that directory are automatically loaded.
# Custom directories with classes and modules you want to be autoloadable.
# config.autoload_paths += %W(#{config.root}/extras)
# Only load the plugins named here, in the order given (default is alphabetical).
# :all can be used as a placeholder for all plugins not explicitly named.
# config.plugins = [ :exception_notification, :ssl_requirement, :all ]
# Activate observers that should always be running.
# config.active_record.observers = :cacher, :garbage_collector, :forum_observer
# Set Time.zone default to the specified zone and make Active Record auto-convert to this zone.
# Run "rake -D time" for a list of tasks for finding time zone names. Default is UTC.
# config.time_zone = 'Central Time (US & Canada)'
# The default locale is :en and all translations from config/locales/*.rb,yml are auto loaded.
# config.i18n.load_path += Dir[Rails.root.join('my', 'locales', '*.{rb,yml}').to_s]
# config.i18n.default_locale = :de
# JavaScript files you want as :defaults (application.js is always included).
# config.action_view.javascript_expansions[:defaults] = %w(jquery rails)
# Configure the default encoding used in templates for Ruby 1.9.
config.encoding = "utf-8"
# Configure sensitive parameters which will be filtered from the log file.
config.filter_parameters += [:password]
end
end
require 'yogo/setting'
Filter the password_confirmation
require File.expand_path('../boot', __FILE__)
# Pick the frameworks you want:
require 'action_controller/railtie'
require 'dm-rails/railtie'
# require 'action_mailer/railtie'
# require 'active_resource/railtie'
# require 'rails/test_unit/railtie'
# If you have a Gemfile, require the gems listed there, including any gems
# you've limited to :test, :development, or :production.
Bundler.require(:default, Rails.env) if defined?(Bundler)
module Yogo
class Application < Rails::Application
# Settings in config/environments/* take precedence over those specified here.
# Application configuration should go into files in config/initializers
# -- all .rb files in that directory are automatically loaded.
# Custom directories with classes and modules you want to be autoloadable.
# config.autoload_paths += %W(#{config.root}/extras)
# Only load the plugins named here, in the order given (default is alphabetical).
# :all can be used as a placeholder for all plugins not explicitly named.
# config.plugins = [ :exception_notification, :ssl_requirement, :all ]
# Activate observers that should always be running.
# config.active_record.observers = :cacher, :garbage_collector, :forum_observer
# Set Time.zone default to the specified zone and make Active Record auto-convert to this zone.
# Run "rake -D time" for a list of tasks for finding time zone names. Default is UTC.
# config.time_zone = 'Central Time (US & Canada)'
# The default locale is :en and all translations from config/locales/*.rb,yml are auto loaded.
# config.i18n.load_path += Dir[Rails.root.join('my', 'locales', '*.{rb,yml}').to_s]
# config.i18n.default_locale = :de
# JavaScript files you want as :defaults (application.js is always included).
# config.action_view.javascript_expansions[:defaults] = %w(jquery rails)
# Configure the default encoding used in templates for Ruby 1.9.
config.encoding = "utf-8"
# Configure sensitive parameters which will be filtered from the log file.
config.filter_parameters += [:password, :password_confirmation]
end
end
# Not sure the best place to require this yet
require 'yogo/setting'
|
module Depth
VERSION = '0.0.2'
end
Version bump
module Depth
VERSION = '0.1.0'
end
|
require 'ohm/contrib'
class Activity < OurOhm
include Ohm::Timestamping
reference :user, GraphUser
generic_reference :subject
generic_reference :object
attribute :action
def self.for(search_for)
res = find(subject_id: search_for.id, subject_class: search_for.class) | find(object_id: search_for.id, object_class: search_for.class)
if search_for.class == GraphUser
res |= find(user_id: search_for.id)
end
res
end
end
module ActivitySubject
def activity(user, action, subject, sub_action = :to ,object = nil)
Activity.create(
:user => user,
:action => action,
:subject => subject,
:object => object
)
end
end
i love new hash syntax
require 'ohm/contrib'
class Activity < OurOhm
include Ohm::Timestamping
reference :user, GraphUser
generic_reference :subject
generic_reference :object
attribute :action
def self.for(search_for)
res = find(subject_id: search_for.id, subject_class: search_for.class) | find(object_id: search_for.id, object_class: search_for.class)
if search_for.class == GraphUser
res |= find(user_id: search_for.id)
end
res
end
end
module ActivitySubject
def activity(user, action, subject, sub_action = :to ,object = nil)
Activity.create(user: user,action: action, subject: subject, object: object)
end
end |
module GitHubPages
#
class Configuration
# Plugins which are activated by default
DEFAULT_PLUGINS = %w(
jekyll-coffeescript
jekyll-gist
jekyll-github-metadata
jekyll-paginate
jekyll-textile-converter
).freeze
# Plugins allowed by GitHub Pages
PLUGIN_WHITELIST = %w(
jekyll-coffeescript
jekyll-feed
jekyll-gist
jekyll-github-metadata
jekyll-mentions
jekyll-paginate
jekyll-redirect-from
jekyll-seo-tag
jekyll-sitemap
jekyll-textile-converter
jemoji
).freeze
# Default, user overwritable options
DEFAULTS = {
"jailed" => false,
"gems" => DEFAULT_PLUGINS,
"kramdown" => {
"input" => "GFM",
"hard_wrap" => false
}
}.freeze
# Jekyll defaults merged with Pages defaults.
MERGED_DEFAULTS = Jekyll::Utils.deep_merge_hashes(
Jekyll::Configuration::DEFAULTS,
DEFAULTS
).freeze
# Options which GitHub Pages sets, regardless of the user-specified value
#
# The following values are also overridden by GitHub Pages, but are not
# overridden locally, for practical purposes:
# * source
# * destination
# * jailed
# * verbose
# * incremental
# * GH_ENV
OVERRIDES = {
"lsi" => false,
"safe" => true,
"plugins" => SecureRandom.hex,
"whitelist" => PLUGIN_WHITELIST,
"highlighter" => "rouge",
"kramdown" => {
"template" => "",
"math_engine" => "mathjax",
"syntax_highlighter" => "rouge"
},
"gist" => {
"noscript" => false
}
}.freeze
# These configuration settings have corresponding instance variables on
# Jekyll::Site and need to be set properly when the config is updated.
CONFIGS_WITH_METHODS = %w(
safe lsi highlighter baseurl exclude include future unpublished
show_drafts limit_posts keep_files gems
).freeze
class << self
def processed?(site)
site.instance_variable_get(:@_github_pages_processed) == true
end
def processed(site)
site.instance_variable_set :@_github_pages_processed, true
end
def disable_whitelist?
Jekyll.env == "development" && ENV["DISABLE_WHITELIST"] == "1"
end
# Given a user's config, determines the effective configuration by building a user
# configuration sandwhich with our overrides overriding the user's specified
# values which themselves override our defaults.
#
# Returns the effective Configuration
#
# Note: this is a highly modified version of Jekyll#configuration
def effective_config(user_config)
# Merge user config into defaults
config = Jekyll::Utils.deep_merge_hashes(MERGED_DEFAULTS, user_config)
.fix_common_issues
.add_default_collections
# Merge overwrites into user config
config = Jekyll::Utils.deep_merge_hashes config, OVERRIDES
# Ensure we have those gems we want.
config["gems"] = Array(config["gems"]) | DEFAULT_PLUGINS
config["whitelist"] = config["whitelist"] | config["gems"] if disable_whitelist?
config
end
# Set the site's configuration. Implemented as an `after_reset` hook.
# Equivalent #set! function contains the code of interest. This function
# guards against double-processing via the value in #processed.
def set(site)
return if processed? site
set!(site)
processed(site)
end
# Set the site's configuration with all the proper defaults and overrides.
# Should be called by #set to protect against multiple processings.
def set!(site)
config = effective_config(site.config)
# Assign everything to the site
site.instance_variable_set :@config, config
# Ensure all
CONFIGS_WITH_METHODS.each do |opt|
site.public_send("#{opt}=", site.config[opt])
end
end
end
end
end
just look for the DISABLE_WHITELIST flag
module GitHubPages
#
class Configuration
# Plugins which are activated by default
DEFAULT_PLUGINS = %w(
jekyll-coffeescript
jekyll-gist
jekyll-github-metadata
jekyll-paginate
jekyll-textile-converter
).freeze
# Plugins allowed by GitHub Pages
PLUGIN_WHITELIST = %w(
jekyll-coffeescript
jekyll-feed
jekyll-gist
jekyll-github-metadata
jekyll-mentions
jekyll-paginate
jekyll-redirect-from
jekyll-seo-tag
jekyll-sitemap
jekyll-textile-converter
jemoji
).freeze
# Default, user overwritable options
DEFAULTS = {
"jailed" => false,
"gems" => DEFAULT_PLUGINS,
"kramdown" => {
"input" => "GFM",
"hard_wrap" => false
}
}.freeze
# Jekyll defaults merged with Pages defaults.
MERGED_DEFAULTS = Jekyll::Utils.deep_merge_hashes(
Jekyll::Configuration::DEFAULTS,
DEFAULTS
).freeze
# Options which GitHub Pages sets, regardless of the user-specified value
#
# The following values are also overridden by GitHub Pages, but are not
# overridden locally, for practical purposes:
# * source
# * destination
# * jailed
# * verbose
# * incremental
# * GH_ENV
OVERRIDES = {
"lsi" => false,
"safe" => true,
"plugins" => SecureRandom.hex,
"whitelist" => PLUGIN_WHITELIST,
"highlighter" => "rouge",
"kramdown" => {
"template" => "",
"math_engine" => "mathjax",
"syntax_highlighter" => "rouge"
},
"gist" => {
"noscript" => false
}
}.freeze
# These configuration settings have corresponding instance variables on
# Jekyll::Site and need to be set properly when the config is updated.
CONFIGS_WITH_METHODS = %w(
safe lsi highlighter baseurl exclude include future unpublished
show_drafts limit_posts keep_files gems
).freeze
class << self
def processed?(site)
site.instance_variable_get(:@_github_pages_processed) == true
end
def processed(site)
site.instance_variable_set :@_github_pages_processed, true
end
def disable_whitelist?
Jekyll.env == "development" && !ENV["DISABLE_WHITELIST"].to_s.empty?
end
# Given a user's config, determines the effective configuration by building a user
# configuration sandwhich with our overrides overriding the user's specified
# values which themselves override our defaults.
#
# Returns the effective Configuration
#
# Note: this is a highly modified version of Jekyll#configuration
def effective_config(user_config)
# Merge user config into defaults
config = Jekyll::Utils.deep_merge_hashes(MERGED_DEFAULTS, user_config)
.fix_common_issues
.add_default_collections
# Merge overwrites into user config
config = Jekyll::Utils.deep_merge_hashes config, OVERRIDES
# Ensure we have those gems we want.
config["gems"] = Array(config["gems"]) | DEFAULT_PLUGINS
config["whitelist"] = config["whitelist"] | config["gems"] if disable_whitelist?
config
end
# Set the site's configuration. Implemented as an `after_reset` hook.
# Equivalent #set! function contains the code of interest. This function
# guards against double-processing via the value in #processed.
def set(site)
return if processed? site
set!(site)
processed(site)
end
# Set the site's configuration with all the proper defaults and overrides.
# Should be called by #set to protect against multiple processings.
def set!(site)
config = effective_config(site.config)
# Assign everything to the site
site.instance_variable_set :@config, config
# Ensure all
CONFIGS_WITH_METHODS.each do |opt|
site.public_send("#{opt}=", site.config[opt])
end
end
end
end
end
|
module Devise
# Holds devise schema information. To use it, just include its methods
# and overwrite the apply_schema method.
module Schema
# Creates email, encrypted_password and password_salt.
#
# == Options
# * :null - When true, allow columns to be null.
# * :encryptor - The encryptor going to be used, necessary for setting the proper encrypter password length.
def authenticatable(options={})
null = options[:null] || false
encryptor = options[:encryptor] || (respond_to?(:encryptor) ? self.encryptor : :sha1)
apply_schema :email, String, :null => null, :limit => 100
apply_schema :encrypted_password, String, :null => null, :limit => Devise::ENCRYPTORS_LENGTH[encryptor]
apply_schema :password_salt, String, :null => null, :limit => 20
end
# Creates confirmation_token, confirmed_at and confirmation_sent_at.
def confirmable
apply_schema :confirmation_token, String, :limit => 20
apply_schema :confirmed_at, DateTime
apply_schema :confirmation_sent_at, DateTime
end
# Creates reset_password_token.
def recoverable
apply_schema :reset_password_token, String, :limit => 20
end
# Creates remember_token and remember_created_at.
def rememberable
apply_schema :remember_token, String, :limit => 20
apply_schema :remember_created_at, DateTime
end
# Creates sign_in_count, current_sign_in_at, last_sign_in_at,
# current_sign_in_ip, last_sign_in_ip.
def trackable
apply_schema :sign_in_count, Integer
apply_schema :current_sign_in_at, DateTime
apply_schema :last_sign_in_at, DateTime
apply_schema :current_sign_in_ip, String
apply_schema :last_sign_in_ip, String
end
# Creates failed_attempts, unlock_token and locked_at
def lockable
apply_schema :failed_attempts, Integer, :default => 0
apply_schema :unlock_token, String, :limit => 20
apply_schema :locked_at, DateTime
end
# Overwrite with specific modification to create your own schema.
def apply_schema(name, type, options={})
raise NotImplementedError
end
end
end
bcrypt requires the salt to be over 29 chars
module Devise
# Holds devise schema information. To use it, just include its methods
# and overwrite the apply_schema method.
module Schema
# Creates email, encrypted_password and password_salt.
#
# == Options
# * :null - When true, allow columns to be null.
# * :encryptor - The encryptor going to be used, necessary for setting the proper encrypter password length.
def authenticatable(options={})
null = options[:null] || false
encryptor = options[:encryptor] || (respond_to?(:encryptor) ? self.encryptor : :sha1)
apply_schema :email, String, :null => null, :limit => 100
apply_schema :encrypted_password, String, :null => null, :limit => Devise::ENCRYPTORS_LENGTH[encryptor]
apply_schema :password_salt, String, :null => null, :limit => 30
end
# Creates confirmation_token, confirmed_at and confirmation_sent_at.
def confirmable
apply_schema :confirmation_token, String, :limit => 20
apply_schema :confirmed_at, DateTime
apply_schema :confirmation_sent_at, DateTime
end
# Creates reset_password_token.
def recoverable
apply_schema :reset_password_token, String, :limit => 20
end
# Creates remember_token and remember_created_at.
def rememberable
apply_schema :remember_token, String, :limit => 20
apply_schema :remember_created_at, DateTime
end
# Creates sign_in_count, current_sign_in_at, last_sign_in_at,
# current_sign_in_ip, last_sign_in_ip.
def trackable
apply_schema :sign_in_count, Integer
apply_schema :current_sign_in_at, DateTime
apply_schema :last_sign_in_at, DateTime
apply_schema :current_sign_in_ip, String
apply_schema :last_sign_in_ip, String
end
# Creates failed_attempts, unlock_token and locked_at
def lockable
apply_schema :failed_attempts, Integer, :default => 0
apply_schema :unlock_token, String, :limit => 20
apply_schema :locked_at, DateTime
end
# Overwrite with specific modification to create your own schema.
def apply_schema(name, type, options={})
raise NotImplementedError
end
end
end
|
class FactGraph
def self.recalculate
new.recalculate
end
def debug x
@logger ||= Logger.new(STDERR)
@logger.info "#{Time.now} #{x}"
$stdout.flush
end
def recalculate
# calculate_authority
# calculate_user_opinions_of_all_base_facts
# 5.times do |i|
i = 1
calculate_fact_relation_influencing_opinions i
# calculate_fact_opinions i
# end
# cut_off_top
end
def cut_off_top
Fact.cut_off_top
end
def calculate_fact_relation_influencing_opinions i
debug "Calculating fact relation influencing opinions (#{i})"
FactRelation.all.to_a.each {|f| f.calculate_influencing_opinion}
end
def calculate_fact_opinions i
debug "Calculating fact opinions (#{i})"
Fact.all.to_a.each do |f|
f.calculate_opinion
f.reposition_in_top
end
end
def calculate_user_opinions_of_all_base_facts
debug "Calculating user opinions on basefacts"
Basefact.all.to_a.each {|f| f.calculate_user_opinion }
end
def calculate_authority
debug "Calculating Authority"
Authority.run_calculation(authority_calculators)
end
def authority_calculators
[
MapReduce::FactAuthority,
MapReduce::ChannelAuthority,
MapReduce::TopicAuthority,
MapReduce::FactCredibility,
MapReduce::FactRelationCredibility
]
end
def self.export_opiniated(writer,fact,prefix="")
writer.write(prefix + LoadDsl.export_believers(fact.opiniated(:believes))) if fact.opiniated(:believes).size > 0
writer.write(prefix + LoadDsl.export_disbelievers(fact.opiniated(:disbelieves))) if fact.opiniated(:disbelieves).size > 0
writer.write(prefix + LoadDsl.export_doubters(fact.opiniated(:doubts))) if fact.opiniated(:doubts).size > 0
end
def self.export(writer, options={})
verbose = options.has_key?(:verbose) and options[:verbose]
writer.write(LoadDsl.export_header)
GraphUser.all.each do |gu|
writer.write(LoadDsl.export_user(gu))
print "." if verbose
end
writer.write("\n")
Site.all.each do |s|
writer.write(LoadDsl.export_site(s))
print "." if verbose
end
writer.write("\n")
([''] + GraphUser.all.to_a.map {|gu| gu.id}).each do |x|
fs = Fact.find(:created_by_id => x)
if x != '' && fs.size > 0
writer.write("\n")
writer.write(LoadDsl.export_activate_user(GraphUser[x]))
end
fs.each do |fact|
writer.write(" "+LoadDsl.export_fact(fact))
self.export_opiniated(writer,fact," ")
print "." if verbose
end
end
([''] + GraphUser.all.to_a.map {|gu| gu.id}).each do |x|
fs = FactRelation.find(:created_by_id => x)
if x != '' && fs.size > 0
writer.write("\n")
writer.write(LoadDsl.export_activate_user(GraphUser[x]))
end
fs.each do |fact_relation|
writer.write(" "+LoadDsl.export_fact_relation(fact_relation))
self.export_opiniated(writer,fact_relation," ")
print "." if verbose
end
puts if verbose
end
GraphUser.all.each do |gu|
if ChannelList.new(gu).channels.size > 0
writer.write("\n")
writer.write(LoadDsl.export_activate_user(gu))
end
#use Channel.find because we also want deleted channels
Channel.find(:created_by_id => gu.id ).each do |channel|
writer.write(" "+LoadDsl.export_channel(channel))
channel.sorted_internal_facts.each do |f|
if f and f.data_id
writer.write(" "+LoadDsl.export_add_fact(f))
end
end
channel.sorted_delete_facts.each do |f|
if f and f.data_id
writer.write(" "+LoadDsl.export_del_fact(f))
end
end
channel.contained_channels.each do |ch|
writer.write(" "+LoadDsl.export_sub_channel(ch))
end
end
end
writer.write(LoadDsl.export_footer)
end
end
Revert "Disabled most of the calculation to debug newrelic"
This reverts commit 850252db5459565dfee638d76751d506eea43bc7.
class FactGraph
def self.recalculate
new.recalculate
end
def debug x
@logger ||= Logger.new(STDERR)
@logger.info "#{Time.now} #{x}"
$stdout.flush
end
def recalculate
calculate_authority
calculate_user_opinions_of_all_base_facts
5.times do |i|
calculate_fact_relation_influencing_opinions i
calculate_fact_opinions i
end
cut_off_top
end
def cut_off_top
Fact.cut_off_top
end
def calculate_fact_relation_influencing_opinions i
debug "Calculating fact relation influencing opinions (#{i})"
FactRelation.all.to_a.each {|f| f.calculate_influencing_opinion}
end
def calculate_fact_opinions i
debug "Calculating fact opinions (#{i})"
Fact.all.to_a.each do |f|
f.calculate_opinion
f.reposition_in_top
end
end
def calculate_user_opinions_of_all_base_facts
debug "Calculating user opinions on basefacts"
Basefact.all.to_a.each {|f| f.calculate_user_opinion }
end
def calculate_authority
debug "Calculating Authority"
Authority.run_calculation(authority_calculators)
end
def authority_calculators
[
MapReduce::FactAuthority,
MapReduce::ChannelAuthority,
MapReduce::TopicAuthority,
MapReduce::FactCredibility,
MapReduce::FactRelationCredibility
]
end
def self.export_opiniated(writer,fact,prefix="")
writer.write(prefix + LoadDsl.export_believers(fact.opiniated(:believes))) if fact.opiniated(:believes).size > 0
writer.write(prefix + LoadDsl.export_disbelievers(fact.opiniated(:disbelieves))) if fact.opiniated(:disbelieves).size > 0
writer.write(prefix + LoadDsl.export_doubters(fact.opiniated(:doubts))) if fact.opiniated(:doubts).size > 0
end
def self.export(writer, options={})
verbose = options.has_key?(:verbose) and options[:verbose]
writer.write(LoadDsl.export_header)
GraphUser.all.each do |gu|
writer.write(LoadDsl.export_user(gu))
print "." if verbose
end
writer.write("\n")
Site.all.each do |s|
writer.write(LoadDsl.export_site(s))
print "." if verbose
end
writer.write("\n")
([''] + GraphUser.all.to_a.map {|gu| gu.id}).each do |x|
fs = Fact.find(:created_by_id => x)
if x != '' && fs.size > 0
writer.write("\n")
writer.write(LoadDsl.export_activate_user(GraphUser[x]))
end
fs.each do |fact|
writer.write(" "+LoadDsl.export_fact(fact))
self.export_opiniated(writer,fact," ")
print "." if verbose
end
end
([''] + GraphUser.all.to_a.map {|gu| gu.id}).each do |x|
fs = FactRelation.find(:created_by_id => x)
if x != '' && fs.size > 0
writer.write("\n")
writer.write(LoadDsl.export_activate_user(GraphUser[x]))
end
fs.each do |fact_relation|
writer.write(" "+LoadDsl.export_fact_relation(fact_relation))
self.export_opiniated(writer,fact_relation," ")
print "." if verbose
end
puts if verbose
end
GraphUser.all.each do |gu|
if ChannelList.new(gu).channels.size > 0
writer.write("\n")
writer.write(LoadDsl.export_activate_user(gu))
end
#use Channel.find because we also want deleted channels
Channel.find(:created_by_id => gu.id ).each do |channel|
writer.write(" "+LoadDsl.export_channel(channel))
channel.sorted_internal_facts.each do |f|
if f and f.data_id
writer.write(" "+LoadDsl.export_add_fact(f))
end
end
channel.sorted_delete_facts.each do |f|
if f and f.data_id
writer.write(" "+LoadDsl.export_del_fact(f))
end
end
channel.contained_channels.each do |ch|
writer.write(" "+LoadDsl.export_sub_channel(ch))
end
end
end
writer.write(LoadDsl.export_footer)
end
end
|
require File.expand_path('../boot', __FILE__)
require 'rails/all'
Bundler.require(:default, Rails.env) if defined?(Bundler)
module TravisRails
class Application < Rails::Application
def self.javascript_expansions(*types)
types.inject({}) { |expansions, type| expansions.merge(type => javascripts(type)) }
end
def self.javascripts(dir)
root = Rails.root.join('public/javascripts')
Dir[root.join(dir.to_s).join('**/*.js')].map { |file| file.sub("#{root.to_s}/", '') }.sort
end
vendor = %w(jquery-1.4.4.min.js jquery.timeago.js underscore handlebars backbone pusher-1.6.min.js unobtrusive_flash) # socky jquery-ui-1.8.9.highlight.min.js
jasmine = %w(jasmine jasmine-html)
expansions = javascript_expansions(:lib, :app, :tests)
expansions[:tests].sort! { |lft, rgt| lft.include?('helper') ? -1 : rgt.include?('helper') ? 1 : lft <=> rgt }
expansions.merge!(:vendor => vendor.map { |name| "vendor/#{name}" })
expansions.merge!(:jasmine => jasmine.map { |name| "vendor/#{name}" })
config.autoload_paths << config.paths.app.views.to_a.first
config.action_view.javascript_expansions = expansions
config.encoding = "utf-8"
config.filter_parameters += [:password]
# config.action_controller.logger = Logger.new(STDOUT)
config.serve_static_assets = true
ActiveRecord::Base.include_root_in_json = false
end
end
Remove :lib from the expansions in application.rb
require File.expand_path('../boot', __FILE__)
require 'rails/all'
Bundler.require(:default, Rails.env) if defined?(Bundler)
module TravisRails
class Application < Rails::Application
def self.javascript_expansions(*types)
types.inject({}) { |expansions, type| expansions.merge(type => javascripts(type)) }
end
def self.javascripts(dir)
root = Rails.root.join('public/javascripts')
Dir[root.join(dir.to_s).join('**/*.js')].map { |file| file.sub("#{root.to_s}/", '') }.sort
end
vendor = %w(jquery-1.4.4.min.js jquery.timeago.js underscore handlebars backbone pusher-1.6.min.js unobtrusive_flash) # socky jquery-ui-1.8.9.highlight.min.js
jasmine = %w(jasmine jasmine-html)
expansions = javascript_expansions(:app, :tests)
expansions[:tests].sort! { |lft, rgt| lft.include?('helper') ? -1 : rgt.include?('helper') ? 1 : lft <=> rgt }
expansions.merge!(:vendor => vendor.map { |name| "vendor/#{name}" })
expansions.merge!(:jasmine => jasmine.map { |name| "vendor/#{name}" })
config.autoload_paths << config.paths.app.views.to_a.first
config.action_view.javascript_expansions = expansions
config.encoding = "utf-8"
config.filter_parameters += [:password]
# config.action_controller.logger = Logger.new(STDOUT)
config.serve_static_assets = true
ActiveRecord::Base.include_root_in_json = false
end
end
|
#!/usr/bin/env ruby
require 'github_api'
require 'json'
require 'colorize'
require 'benchmark'
require_relative 'github_changelog_generator/parser'
require_relative 'github_changelog_generator/generator'
require_relative 'github_changelog_generator/version'
module GitHubChangelogGenerator
class ChangelogGenerator
attr_accessor :options, :all_tags, :github
PER_PAGE_NUMBER = 30
def initialize
@options = Parser.parse_options
if options[:verbose]
puts 'Input options:'
pp options
puts ''
end
github_token
github_options = {per_page: PER_PAGE_NUMBER}
github_options[:oauth_token] = @github_token unless @github_token.nil?
github_options[:endpoint] = options[:github_endpoint] unless options[:github_endpoint].nil?
github_options[:site] = options[:github_endpoint] unless options[:github_site].nil?
@github = Github.new github_options
@generator = Generator.new(@options)
@all_tags = self.get_all_tags
@pull_requests = self.get_filtered_pull_requests
if @options[:issues]
@issues = self.get_all_issues
else
@issues = []
end
@tag_times_hash = {}
end
def print_json(json)
puts JSON.pretty_generate(json)
end
def exec_command(cmd)
exec_cmd = "cd #{$project_path} and #{cmd}"
%x[#{exec_cmd}]
end
def get_all_closed_pull_requests
if @options[:verbose]
print "Fetching pull requests...\r"
end
response = @github.pull_requests.list @options[:user], @options[:project], :state => 'closed'
pull_requests = []
page_i = 0
response.each_page do |page|
page_i += PER_PAGE_NUMBER
count_pages = response.count_pages
print "Fetching pull requests... #{page_i}/#{count_pages * PER_PAGE_NUMBER}\r"
pull_requests.concat(page)
end
print " \r"
if @options[:verbose]
puts "Received pull requests: #{pull_requests.count}"
end
pull_requests
end
def get_filtered_pull_requests
pull_requests = self.get_all_closed_pull_requests
unless @options[:pull_request_labels].nil?
if @options[:verbose]
puts 'Filter all pull requests by labels.'
end
filtered_pull_requests = pull_requests.select { |pull_request|
#fetch this issue to get labels array
issue = @github.issues.get @options[:user], @options[:project], pull_request.number
#compare is there any labels from @options[:labels] array
issue_without_labels = !issue.labels.map { |label| label.name }.any?
if @options[:verbose]
puts "Filter request \##{issue.number}."
end
if @options[:pull_request_labels].any?
select_by_label = (issue.labels.map { |label| label.name } & @options[:pull_request_labels]).any?
else
select_by_label = false
end
select_by_label | issue_without_labels
}
if @options[:verbose]
puts "Filtered pull requests with specified labels and w/o labels: #{filtered_pull_requests.count}"
end
return filtered_pull_requests
end
pull_requests
end
def compund_changelog
log = "# Changelog\n\n"
if @options[:last]
log += self.generate_log_between_tags(self.all_tags[0], self.all_tags[1])
elsif @options[:tag1] and @options[:tag2]
tag1 = @options[:tag1]
tag2 = @options[:tag2]
tags_strings = []
self.all_tags.each { |x| tags_strings.push(x['name']) }
if tags_strings.include?(tag1)
if tags_strings.include?(tag2)
hash = Hash[tags_strings.map.with_index.to_a]
index1 = hash[tag1]
index2 = hash[tag2]
log += self.generate_log_between_tags(self.all_tags[index1], self.all_tags[index2])
else
puts "Can't find tag #{tag2} -> exit"
exit
end
else
puts "Can't find tag #{tag1} -> exit"
exit
end
else
log += self.generate_log_for_all_tags
end
log += "\n\n\\* *This changelog was generated by [github_changelog_generator](https://github.com/skywinder/Github-Changelog-Generator)*"
output_filename = "#{@options[:output]}"
File.open(output_filename, 'w') { |file| file.write(log) }
puts "Done! Generated log placed in #{`pwd`.strip!}/#{output_filename}"
end
def generate_log_for_all_tags
log = ''
# Async fetching tags:
threads = []
@all_tags.each { |tag|
threads << Thread.new { self.get_time_of_tag(tag) }
}
threads.each { |thr| thr.join }
if @options[:verbose]
puts "Sorting tags.."
end
@all_tags.sort_by! { |x| self.get_time_of_tag(x) }.reverse!
if @options[:verbose]
puts "Generating log.."
end
(1 ... self.all_tags.size).each { |index|
log += self.generate_log_between_tags(self.all_tags[index], self.all_tags[index-1])
}
log += generate_log_between_tags(nil, self.all_tags.last)
log
end
def is_megred(number)
@github.pull_requests.merged? @options[:user], @options[:project], number
end
def get_all_tags
if @options[:verbose]
print "Fetching tags...\r"
end
response = @github.repos.tags @options[:user], @options[:project]
tags = []
page_i = 0
count_pages = response.count_pages
response.each_page do |page|
page_i += PER_PAGE_NUMBER
print "Fetching tags... #{page_i}/#{count_pages * PER_PAGE_NUMBER}\r"
tags.concat(page)
end
print " \r"
if @options[:verbose]
puts "Found #{tags.count} tags"
end
tags
end
def github_token
if @options[:token]
return @github_token ||= @options[:token]
end
env_var = ENV.fetch 'CHANGELOG_GITHUB_TOKEN', nil
unless env_var
puts "Warning: No token provided (-t option) and variable $CHANGELOG_GITHUB_TOKEN was not found.".yellow
puts "This script can make only 50 requests to GitHub API per hour without token!".yellow
end
@github_token ||= env_var
end
def generate_log_between_tags(older_tag, newer_tag)
if newer_tag.nil?
puts "Can't find tag -> terminate"
exit 1
end
newer_tag_time = self.get_time_of_tag(newer_tag)
newer_tag_name = newer_tag['name']
filtered_pull_requests = delete_by_time(@pull_requests, :merged_at, newer_tag_time, older_tag)
filtered_issues = delete_by_time(@issues, :closed_at, newer_tag_time, older_tag)
older_tag_name = older_tag.nil? ? nil : older_tag['name']
if @options[:filter_issues_by_milestone]
#delete excess irrelevant issues (according milestones)
filtered_issues.select! { |issue|
if issue.milestone.nil?
true
else
#check, that this milestone in tag list:
milestone_is_tag = @all_tags.find { |tag|
tag.name == issue.milestone.title
}
milestone_is_tag.nil?
end
}
#add missed issues (according milestones)
issues_to_add = @issues.select { |issue|
if issue.milestone.nil?
false
else
#check, that this milestone in tag list:
milestone_is_tag = @all_tags.find { |tag|
tag.name == issue.milestone.title
}
if milestone_is_tag.nil?
false
else
issue.milestone.title == newer_tag_name
end
end
}
filtered_issues |= issues_to_add
end
self.create_log(filtered_pull_requests, filtered_issues, newer_tag_name, newer_tag_time, older_tag_name)
end
def delete_by_time(array, hash_key, newer_tag_time, older_tag = nil)
older_tag_time = self.get_time_of_tag(older_tag)
array.select { |req|
if req[hash_key]
t = Time.parse(req[hash_key]).utc - 60
if older_tag_time.nil?
tag_in_range_old = true
else
tag_in_range_old = t > older_tag_time
end
tag_in_range_new = t <= newer_tag_time
tag_in_range = (tag_in_range_old) && (tag_in_range_new)
tag_in_range
else
false
end
}
end
# @param [Array] pull_requests
# @param [Array] issues
# @param [String] newer_tag_name
# @param [String] newer_tag_time
# @param [String] older_tag_name
# @return [String]
def create_log(pull_requests, issues, newer_tag_name, newer_tag_time, older_tag_name = nil)
github_site = options[:github_site] || 'https://github.com'
project_url = "#{github_site}/#{@options[:user]}/#{@options[:project]}"
# Generate tag name and link
log = "## [#{newer_tag_name}](#{project_url}/tree/#{newer_tag_name})\n"
if @options[:compare_link] && older_tag_name
# Generate compare link
log += "[Full Changelog](#{project_url}/compare/#{older_tag_name}...#{newer_tag_name})\n"
end
#Generate date string:
time_string = newer_tag_time.strftime @options[:format]
log += "#### #{time_string}\n"
if @options[:pulls]
# Generate pull requests:
pull_requests.each { |pull_request|
merge = @generator.get_string_for_pull_request(pull_request)
log += "- #{merge}"
} if pull_requests
end
if @options[:issues]
# Generate issues:
if issues
issues.sort! { |x, y|
if x.labels.any? && y.labels.any?
x.labels[0].name <=> y.labels[0].name
else
if x.labels.any?
1
else
if y.labels.any?
-1
else
0
end
end
end
}.reverse!
end
issues.each { |dict|
is_bug = false
is_enhancement = false
dict.labels.each { |label|
if label.name == 'bug'
is_bug = true
end
if label.name == 'enhancement'
is_enhancement = true
end
}
intro = 'Closed issue'
if is_bug
intro = 'Fixed bug'
end
if is_enhancement
intro = 'Implemented enhancement'
end
enc_string = @generator.encapsulate_string dict[:title]
merge = "*#{intro}:* #{enc_string} [\\##{dict[:number]}](#{dict.html_url})\n\n"
log += "- #{merge}"
}
end
log
end
def get_time_of_tag(tag_name)
if tag_name.nil?
return nil
end
if @tag_times_hash[tag_name['name']]
return @tag_times_hash[tag_name['name']]
end
github_git_data_commits_get = @github.git_data.commits.get @options[:user], @options[:project], tag_name['commit']['sha']
time_string = github_git_data_commits_get['committer']['date']
Time.parse(time_string)
@tag_times_hash[tag_name['name']] = Time.parse(time_string)
end
def get_all_issues
if @options[:verbose]
print "Fetching closed issues...\r"
end
response = @github.issues.list user: @options[:user], repo: @options[:project], state: 'closed', filter: 'all', labels: nil
issues = []
page_i = 0
count_pages = response.count_pages
response.each_page do |page|
page_i += PER_PAGE_NUMBER
print "Fetching issues... #{page_i}/#{count_pages * PER_PAGE_NUMBER}\r"
issues.concat(page)
end
print " \r"
if @options[:verbose]
puts "Received issues: #{issues.count}"
end
# remove pull request from issues:
issues.select! { |x|
x.pull_request == nil
}
if @options[:verbose]
puts "Filtering issues with labels #{@options[:labels]}#{@options[:add_issues_wo_labels] ? ' and w/o labels' : ''}"
end
filtered_issues = issues.select { |issue|
#compare is there any labels from @options[:labels] array
(issue.labels.map { |label| label.name } & @options[:labels]).any?
}
if @options[:add_issues_wo_labels]
issues_wo_labels = issues.select {
# add issues without any labels
|issue| !issue.labels.map { |label| label.name }.any?
}
filtered_issues.concat(issues_wo_labels)
end
if @options[:verbose]
puts "Filtered issues: #{filtered_issues.count}"
end
filtered_issues
end
end
if __FILE__ == $0
GitHubChangelogGenerator::ChangelogGenerator.new.compund_changelog
end
end
thread saety fix
#!/usr/bin/env ruby
require 'github_api'
require 'json'
require 'colorize'
require 'benchmark'
require_relative 'github_changelog_generator/parser'
require_relative 'github_changelog_generator/generator'
require_relative 'github_changelog_generator/version'
module GitHubChangelogGenerator
class ChangelogGenerator
attr_accessor :options, :all_tags, :github
PER_PAGE_NUMBER = 30
def initialize
@options = Parser.parse_options
if options[:verbose]
puts 'Input options:'
pp options
puts ''
end
github_token
github_options = {per_page: PER_PAGE_NUMBER}
github_options[:oauth_token] = @github_token unless @github_token.nil?
github_options[:endpoint] = options[:github_endpoint] unless options[:github_endpoint].nil?
github_options[:site] = options[:github_endpoint] unless options[:github_site].nil?
@github = Github.new github_options
@generator = Generator.new(@options)
@all_tags = self.get_all_tags
@pull_requests = self.get_filtered_pull_requests
if @options[:issues]
@issues = self.get_all_issues
else
@issues = []
end
@tag_times_hash = {}
end
def print_json(json)
puts JSON.pretty_generate(json)
end
def exec_command(cmd)
exec_cmd = "cd #{$project_path} and #{cmd}"
%x[#{exec_cmd}]
end
def get_all_closed_pull_requests
if @options[:verbose]
print "Fetching pull requests...\r"
end
response = @github.pull_requests.list @options[:user], @options[:project], :state => 'closed'
pull_requests = []
page_i = 0
response.each_page do |page|
page_i += PER_PAGE_NUMBER
count_pages = response.count_pages
print "Fetching pull requests... #{page_i}/#{count_pages * PER_PAGE_NUMBER}\r"
pull_requests.concat(page)
end
print " \r"
if @options[:verbose]
puts "Received pull requests: #{pull_requests.count}"
end
pull_requests
end
def get_filtered_pull_requests
pull_requests = self.get_all_closed_pull_requests
unless @options[:pull_request_labels].nil?
if @options[:verbose]
puts 'Filter all pull requests by labels.'
end
filtered_pull_requests = pull_requests.select { |pull_request|
#fetch this issue to get labels array
issue = @github.issues.get @options[:user], @options[:project], pull_request.number
#compare is there any labels from @options[:labels] array
issue_without_labels = !issue.labels.map { |label| label.name }.any?
if @options[:verbose]
puts "Filter request \##{issue.number}."
end
if @options[:pull_request_labels].any?
select_by_label = (issue.labels.map { |label| label.name } & @options[:pull_request_labels]).any?
else
select_by_label = false
end
select_by_label | issue_without_labels
}
if @options[:verbose]
puts "Filtered pull requests with specified labels and w/o labels: #{filtered_pull_requests.count}"
end
return filtered_pull_requests
end
pull_requests
end
def compund_changelog
log = "# Changelog\n\n"
if @options[:last]
log += self.generate_log_between_tags(self.all_tags[0], self.all_tags[1])
elsif @options[:tag1] and @options[:tag2]
tag1 = @options[:tag1]
tag2 = @options[:tag2]
tags_strings = []
self.all_tags.each { |x| tags_strings.push(x['name']) }
if tags_strings.include?(tag1)
if tags_strings.include?(tag2)
hash = Hash[tags_strings.map.with_index.to_a]
index1 = hash[tag1]
index2 = hash[tag2]
log += self.generate_log_between_tags(self.all_tags[index1], self.all_tags[index2])
else
puts "Can't find tag #{tag2} -> exit"
exit
end
else
puts "Can't find tag #{tag1} -> exit"
exit
end
else
log += self.generate_log_for_all_tags
end
log += "\n\n\\* *This changelog was generated by [github_changelog_generator](https://github.com/skywinder/Github-Changelog-Generator)*"
output_filename = "#{@options[:output]}"
File.open(output_filename, 'w') { |file| file.write(log) }
puts "Done! Generated log placed in #{`pwd`.strip!}/#{output_filename}"
end
def generate_log_for_all_tags
log = ''
if @options[:verbose]
puts "Fetching tags dates.."
end
# Async fetching tags:
threads = []
@all_tags.each { |tag|
# explicit set @tag_times_hash to write data safety.
threads << Thread.new { self.get_time_of_tag(tag, @tag_times_hash) }
}
threads.each { |thr| thr.join }
if @options[:verbose]
puts "Sorting tags.."
end
@all_tags.sort_by! { |x| self.get_time_of_tag(x) }.reverse!
if @options[:verbose]
puts "Generating log.."
end
(1 ... self.all_tags.size).each { |index|
log += self.generate_log_between_tags(self.all_tags[index], self.all_tags[index-1])
}
log += generate_log_between_tags(nil, self.all_tags.last)
log
end
def is_megred(number)
@github.pull_requests.merged? @options[:user], @options[:project], number
end
def get_all_tags
if @options[:verbose]
print "Fetching tags...\r"
end
response = @github.repos.tags @options[:user], @options[:project]
tags = []
page_i = 0
count_pages = response.count_pages
response.each_page do |page|
page_i += PER_PAGE_NUMBER
print "Fetching tags... #{page_i}/#{count_pages * PER_PAGE_NUMBER}\r"
tags.concat(page)
end
print " \r"
if @options[:verbose]
puts "Found #{tags.count} tags"
end
tags
end
def github_token
if @options[:token]
return @github_token ||= @options[:token]
end
env_var = ENV.fetch 'CHANGELOG_GITHUB_TOKEN', nil
unless env_var
puts "Warning: No token provided (-t option) and variable $CHANGELOG_GITHUB_TOKEN was not found.".yellow
puts "This script can make only 50 requests to GitHub API per hour without token!".yellow
end
@github_token ||= env_var
end
def generate_log_between_tags(older_tag, newer_tag)
if newer_tag.nil?
puts "Can't find tag -> terminate"
exit 1
end
newer_tag_time = self.get_time_of_tag(newer_tag)
newer_tag_name = newer_tag['name']
filtered_pull_requests = delete_by_time(@pull_requests, :merged_at, newer_tag_time, older_tag)
filtered_issues = delete_by_time(@issues, :closed_at, newer_tag_time, older_tag)
older_tag_name = older_tag.nil? ? nil : older_tag['name']
if @options[:filter_issues_by_milestone]
#delete excess irrelevant issues (according milestones)
filtered_issues.select! { |issue|
if issue.milestone.nil?
true
else
#check, that this milestone in tag list:
milestone_is_tag = @all_tags.find { |tag|
tag.name == issue.milestone.title
}
milestone_is_tag.nil?
end
}
#add missed issues (according milestones)
issues_to_add = @issues.select { |issue|
if issue.milestone.nil?
false
else
#check, that this milestone in tag list:
milestone_is_tag = @all_tags.find { |tag|
tag.name == issue.milestone.title
}
if milestone_is_tag.nil?
false
else
issue.milestone.title == newer_tag_name
end
end
}
filtered_issues |= issues_to_add
end
self.create_log(filtered_pull_requests, filtered_issues, newer_tag_name, newer_tag_time, older_tag_name)
end
def delete_by_time(array, hash_key, newer_tag_time, older_tag = nil)
older_tag_time = self.get_time_of_tag(older_tag)
array.select { |req|
if req[hash_key]
t = Time.parse(req[hash_key]).utc - 60
if older_tag_time.nil?
tag_in_range_old = true
else
tag_in_range_old = t > older_tag_time
end
tag_in_range_new = t <= newer_tag_time
tag_in_range = (tag_in_range_old) && (tag_in_range_new)
tag_in_range
else
false
end
}
end
# @param [Array] pull_requests
# @param [Array] issues
# @param [String] newer_tag_name
# @param [String] newer_tag_time
# @param [String] older_tag_name
# @return [String]
def create_log(pull_requests, issues, newer_tag_name, newer_tag_time, older_tag_name = nil)
github_site = options[:github_site] || 'https://github.com'
project_url = "#{github_site}/#{@options[:user]}/#{@options[:project]}"
# Generate tag name and link
log = "## [#{newer_tag_name}](#{project_url}/tree/#{newer_tag_name})\n"
if @options[:compare_link] && older_tag_name
# Generate compare link
log += "[Full Changelog](#{project_url}/compare/#{older_tag_name}...#{newer_tag_name})\n"
end
#Generate date string:
time_string = newer_tag_time.strftime @options[:format]
log += "#### #{time_string}\n"
if @options[:pulls]
# Generate pull requests:
pull_requests.each { |pull_request|
merge = @generator.get_string_for_pull_request(pull_request)
log += "- #{merge}"
} if pull_requests
end
if @options[:issues]
# Generate issues:
if issues
issues.sort! { |x, y|
if x.labels.any? && y.labels.any?
x.labels[0].name <=> y.labels[0].name
else
if x.labels.any?
1
else
if y.labels.any?
-1
else
0
end
end
end
}.reverse!
end
issues.each { |dict|
is_bug = false
is_enhancement = false
dict.labels.each { |label|
if label.name == 'bug'
is_bug = true
end
if label.name == 'enhancement'
is_enhancement = true
end
}
intro = 'Closed issue'
if is_bug
intro = 'Fixed bug'
end
if is_enhancement
intro = 'Implemented enhancement'
end
enc_string = @generator.encapsulate_string dict[:title]
merge = "*#{intro}:* #{enc_string} [\\##{dict[:number]}](#{dict.html_url})\n\n"
log += "- #{merge}"
}
end
log
end
def get_time_of_tag(tag_name, tag_times_hash = @tag_times_hash)
if tag_name.nil?
return nil
end
if tag_times_hash[tag_name['name']]
return @tag_times_hash[tag_name['name']]
end
github_git_data_commits_get = @github.git_data.commits.get @options[:user], @options[:project], tag_name['commit']['sha']
time_string = github_git_data_commits_get['committer']['date']
@tag_times_hash[tag_name['name']] = Time.parse(time_string)
end
def get_all_issues
if @options[:verbose]
print "Fetching closed issues...\r"
end
response = @github.issues.list user: @options[:user], repo: @options[:project], state: 'closed', filter: 'all', labels: nil
issues = []
page_i = 0
count_pages = response.count_pages
response.each_page do |page|
page_i += PER_PAGE_NUMBER
print "Fetching issues... #{page_i}/#{count_pages * PER_PAGE_NUMBER}\r"
issues.concat(page)
end
print " \r"
if @options[:verbose]
puts "Received issues: #{issues.count}"
end
# remove pull request from issues:
issues.select! { |x|
x.pull_request == nil
}
if @options[:verbose]
puts "Filtering issues with labels #{@options[:labels]}#{@options[:add_issues_wo_labels] ? ' and w/o labels' : ''}"
end
filtered_issues = issues.select { |issue|
#compare is there any labels from @options[:labels] array
(issue.labels.map { |label| label.name } & @options[:labels]).any?
}
if @options[:add_issues_wo_labels]
issues_wo_labels = issues.select {
# add issues without any labels
|issue| !issue.labels.map { |label| label.name }.any?
}
filtered_issues.concat(issues_wo_labels)
end
if @options[:verbose]
puts "Filtered issues: #{filtered_issues.count}"
end
filtered_issues
end
end
if __FILE__ == $0
GitHubChangelogGenerator::ChangelogGenerator.new.compund_changelog
end
end
|
require 'rest-client'
require 'json'
module Discordrb::API
APIBASE = 'https://discordapp.com/api'
module_function
# Ban a user from a server and delete their messages from the last message_days days
def ban_user(token, server_id, user_id, message_days)
RestClient.put(
"#{APIBASE}/guilds/#{server_id}/bans/#{user_id}?delete-message-days=#{message_days}",
Authorization: token
)
end
# Get a server's banned users
def bans(token, server_id)
RestClient.get(
"#{APIBASE}/guilds/#{server_id}/bans",
Authorization: token
)
end
# Login to the server
def login(email, password)
RestClient.post(
"#{APIBASE}/auth/login",
email: email,
password: password
)
end
# Logout from the server
def logout(token)
RestClient.post(
"#{APIBASE}/auth/logout",
Authorization: token
)
end
# Create a server
def create_server(token, name, region)
RestClient.post(
"#{APIBASE}/guilds",
{ 'name' => name, 'region' => region }.to_json,
Authorization: token,
content_type: :json
)
end
# Leave a server
def leave_server(server_id)
RestClient.delete(
"#{APIBASE}/guilds/#{server_id}",
Authorization: token
)
end
# Get a channel's data
def channel(token, channel_id)
RestClient.get(
"#{APIBASE}/channels/#{channel_id}",
Authorization: token
)
end
# Create a channel
def create_channel(token, server_id, name, type)
RestClient.post(
"#{APIBASE}/guilds/#{server_id}/channels",
{ 'name' => name, 'type' => type }.to_json,
Authorization: token,
content_type: :json
)
end
# Update a channel's data
def update_channel(token, channel_id, name, topic, position = 0)
RestClient.patch(
"#{APIBASE}/channels/#{channel_id}",
{ 'name' => name, 'position' => position, 'topic' => topic }.to_json,
Authorization: token,
content_type: :json
)
end
# Delete a channel
def delete_channel(token, channel_id)
RestClient.delete(
"#{APIBASE}/channels/#{channel_id}",
Authorization: token
)
end
# Join a server using an invite
def join_server(token, invite_id)
RestClient.post(
"#{APIBASE}/invite/#{invite_id}",
Authorization: token
)
end
# Create a private channel
def create_private(token, bot_user_id, user_id)
RestClient.post(
"#{APIBASE}/users/#{bot_user_id}/channels",
{ 'recipient_id' => user_id }.to_json,
Authorization: @token,
content_type: :json
)
end
# Create an instant invite from a server or a channel id
def create_invite(token, id, max_age = 0, max_uses = 0, temporary = false, xkcd = false)
RestClient.post(
"#{APIBASE}/channels/#{id}/invites",
{ 'max_age' => max_age, 'max_uses' => max_uses, 'temporary' => temporary, 'xkcdpass' => xkcd }.to_json,
Authorization: token,
content_type: :json
)
end
# Send a message to a channel
def send_message(token, channel_id, message, mentions = [], tts = false)
RestClient.post(
"#{APIBASE}/channels/#{channel_id}/messages",
{ 'content' => message, 'mentions' => mentions, tts => tts }.to_json,
Authorization: token,
content_type: :json
)
end
# Delete a message
def delete_message(token, channel_id, message_id)
RestClient.delete(
"#{APIBASE}/channels/#{channel_id}/messages/#{message_id}",
Authorization: token
)
end
# Edit a message
def edit_message(token, channel_id, message, mentions = [])
RestClient.patch(
"#{APIBASE}/channels/#{channel_id}/messages",
{ 'content' => message, 'mentions' => mentions }.to_json,
Authorization: token,
content_type: :json
)
end
# Acknowledge that a message has been received
# The last acknowledged message will be sent in the ready packet,
# so this is an easy way to catch up on messages
def acknowledge_message(token, channel_id, message_id)
RestClient.post(
"#{APIBASE}/channels/#{channel_id}/messages/#{message_id}/ack",
Authorization: token
)
end
# Send a file as a message to a channel
def send_file(token, channel_id, file, filename = 'filename')
RestClient.post(
"#{APIBASE}/channels/#{channel_id}/messages",
(filename.to_sym) => file,
Authorization: token
)
end
# Create a role (parameters such as name and colour will have to be set by update_role afterwards)
def create_role(token, server_id)
RestClient.post(
"#{APIBASE}/guilds/#{server_id}/roles",
Authorization: token
)
end
# Update a role
# Permissions are the Discord defaults; allowed: invite creation, reading/sending messages,
# sending TTS messages, embedding links, sending files, reading the history, mentioning everybody,
# connecting to voice, speaking and voice activity (push-to-talk isn't mandatory)
def update_role(token, server_id, role_id, name, colour, hoist = false, packed_permissions = 36953089)
RestClient.patch(
"#{APIBASE}/guilds/#{server_id}/roles/#{role_id}",
{ 'color' => colour, 'name' => name, 'hoist' => hoist, 'permissions' => packed_permissions }.to_json,
Authorization: token,
content_type: :json
)
end
# Delete a role
def delete_role(token, server_id, role_id)
RestClient.delete(
"#{APIBASE}/guilds/#{server_id}/roles/#{role_id}",
Authorization: token
)
end
# Update a user's roles
def update_user_roles(token, server_id, user_id, roles)
RestClient.patch(
"#{APIBASE}/guilds/#{server_id}/members/#{user_id}",
{ 'roles' => roles }.to_json,
Authorization: token,
content_type: :json
)
end
# Update a user's permission overrides in a channel
def update_user_overrides(token, channel_id, user_id, allow, deny)
RestClient.put(
"#{APIBASE}/channels/#{channel_id}/permissions/#{user_id}",
{ 'type' => 'member', 'id' => user_id, 'allow' => allow, 'deny' => deny }.to_json,
Authorization: token,
content_type: :json
)
end
# Update a role's permission overrides in a channel
def update_role_overrides(token, channel_id, role_id, allow, deny)
RestClient.put(
"#{APIBASE}/channels/#{channel_id}/permissions/#{role_id}",
{ 'type' => 'role', 'id' => role_id, 'allow' => allow, 'deny' => deny }.to_json,
Authorization: token,
content_type: :json
)
end
# Get the gateway to be used
def gateway(token)
RestClient.get(
"#{APIBASE}/gateway",
Authorization: token
)
end
# Start typing (needs to be resent every 5 seconds to keep up the typing)
def start_typing(token, channel_id)
RestClient.post(
"#{APIBASE}/channels/#{channel_id}/typing",
Authorization: token
)
end
# Get user data
def user(token, user_id)
RestClient.get(
"#{APIBASE}/users/#{user_id}",
Authorization: token
)
end
# Update user data
def update_user(token, email, password, new_username, avatar, new_password = nil)
RestClient.patch(
"#{APIBASE}/users/@me",
{ 'avatar' => avatar, 'email' => email, 'new_password' => new_password, 'password' => password, 'username' => new_username }.to_json,
Authorization: token,
content_type: :json
)
end
# Get a list of messages from a channel's history
def channel_log(token, channel_id, amount, before = nil, after = nil)
RestClient.get(
"#{APIBASE}/channels/#{channel_id}/messages?limit=#{amount}#{"&before=#{before}" if before}#{"&after=#{after}" if after}",
Authorization: token
)
end
end
Fix accidental @ in API
require 'rest-client'
require 'json'
module Discordrb::API
APIBASE = 'https://discordapp.com/api'
module_function
# Ban a user from a server and delete their messages from the last message_days days
def ban_user(token, server_id, user_id, message_days)
RestClient.put(
"#{APIBASE}/guilds/#{server_id}/bans/#{user_id}?delete-message-days=#{message_days}",
Authorization: token
)
end
# Get a server's banned users
def bans(token, server_id)
RestClient.get(
"#{APIBASE}/guilds/#{server_id}/bans",
Authorization: token
)
end
# Login to the server
def login(email, password)
RestClient.post(
"#{APIBASE}/auth/login",
email: email,
password: password
)
end
# Logout from the server
def logout(token)
RestClient.post(
"#{APIBASE}/auth/logout",
Authorization: token
)
end
# Create a server
def create_server(token, name, region)
RestClient.post(
"#{APIBASE}/guilds",
{ 'name' => name, 'region' => region }.to_json,
Authorization: token,
content_type: :json
)
end
# Leave a server
def leave_server(server_id)
RestClient.delete(
"#{APIBASE}/guilds/#{server_id}",
Authorization: token
)
end
# Get a channel's data
def channel(token, channel_id)
RestClient.get(
"#{APIBASE}/channels/#{channel_id}",
Authorization: token
)
end
# Create a channel
def create_channel(token, server_id, name, type)
RestClient.post(
"#{APIBASE}/guilds/#{server_id}/channels",
{ 'name' => name, 'type' => type }.to_json,
Authorization: token,
content_type: :json
)
end
# Update a channel's data
def update_channel(token, channel_id, name, topic, position = 0)
RestClient.patch(
"#{APIBASE}/channels/#{channel_id}",
{ 'name' => name, 'position' => position, 'topic' => topic }.to_json,
Authorization: token,
content_type: :json
)
end
# Delete a channel
def delete_channel(token, channel_id)
RestClient.delete(
"#{APIBASE}/channels/#{channel_id}",
Authorization: token
)
end
# Join a server using an invite
def join_server(token, invite_id)
RestClient.post(
"#{APIBASE}/invite/#{invite_id}",
Authorization: token
)
end
# Create a private channel
def create_private(token, bot_user_id, user_id)
RestClient.post(
"#{APIBASE}/users/#{bot_user_id}/channels",
{ 'recipient_id' => user_id }.to_json,
Authorization: token,
content_type: :json
)
end
# Create an instant invite from a server or a channel id
def create_invite(token, id, max_age = 0, max_uses = 0, temporary = false, xkcd = false)
RestClient.post(
"#{APIBASE}/channels/#{id}/invites",
{ 'max_age' => max_age, 'max_uses' => max_uses, 'temporary' => temporary, 'xkcdpass' => xkcd }.to_json,
Authorization: token,
content_type: :json
)
end
# Send a message to a channel
def send_message(token, channel_id, message, mentions = [], tts = false)
RestClient.post(
"#{APIBASE}/channels/#{channel_id}/messages",
{ 'content' => message, 'mentions' => mentions, tts => tts }.to_json,
Authorization: token,
content_type: :json
)
end
# Delete a message
def delete_message(token, channel_id, message_id)
RestClient.delete(
"#{APIBASE}/channels/#{channel_id}/messages/#{message_id}",
Authorization: token
)
end
# Edit a message
def edit_message(token, channel_id, message, mentions = [])
RestClient.patch(
"#{APIBASE}/channels/#{channel_id}/messages",
{ 'content' => message, 'mentions' => mentions }.to_json,
Authorization: token,
content_type: :json
)
end
# Acknowledge that a message has been received
# The last acknowledged message will be sent in the ready packet,
# so this is an easy way to catch up on messages
def acknowledge_message(token, channel_id, message_id)
RestClient.post(
"#{APIBASE}/channels/#{channel_id}/messages/#{message_id}/ack",
Authorization: token
)
end
# Send a file as a message to a channel
def send_file(token, channel_id, file, filename = 'filename')
RestClient.post(
"#{APIBASE}/channels/#{channel_id}/messages",
(filename.to_sym) => file,
Authorization: token
)
end
# Create a role (parameters such as name and colour will have to be set by update_role afterwards)
def create_role(token, server_id)
RestClient.post(
"#{APIBASE}/guilds/#{server_id}/roles",
Authorization: token
)
end
# Update a role
# Permissions are the Discord defaults; allowed: invite creation, reading/sending messages,
# sending TTS messages, embedding links, sending files, reading the history, mentioning everybody,
# connecting to voice, speaking and voice activity (push-to-talk isn't mandatory)
def update_role(token, server_id, role_id, name, colour, hoist = false, packed_permissions = 36953089)
RestClient.patch(
"#{APIBASE}/guilds/#{server_id}/roles/#{role_id}",
{ 'color' => colour, 'name' => name, 'hoist' => hoist, 'permissions' => packed_permissions }.to_json,
Authorization: token,
content_type: :json
)
end
# Delete a role
def delete_role(token, server_id, role_id)
RestClient.delete(
"#{APIBASE}/guilds/#{server_id}/roles/#{role_id}",
Authorization: token
)
end
# Update a user's roles
def update_user_roles(token, server_id, user_id, roles)
RestClient.patch(
"#{APIBASE}/guilds/#{server_id}/members/#{user_id}",
{ 'roles' => roles }.to_json,
Authorization: token,
content_type: :json
)
end
# Update a user's permission overrides in a channel
def update_user_overrides(token, channel_id, user_id, allow, deny)
RestClient.put(
"#{APIBASE}/channels/#{channel_id}/permissions/#{user_id}",
{ 'type' => 'member', 'id' => user_id, 'allow' => allow, 'deny' => deny }.to_json,
Authorization: token,
content_type: :json
)
end
# Update a role's permission overrides in a channel
def update_role_overrides(token, channel_id, role_id, allow, deny)
RestClient.put(
"#{APIBASE}/channels/#{channel_id}/permissions/#{role_id}",
{ 'type' => 'role', 'id' => role_id, 'allow' => allow, 'deny' => deny }.to_json,
Authorization: token,
content_type: :json
)
end
# Get the gateway to be used
def gateway(token)
RestClient.get(
"#{APIBASE}/gateway",
Authorization: token
)
end
# Start typing (needs to be resent every 5 seconds to keep up the typing)
def start_typing(token, channel_id)
RestClient.post(
"#{APIBASE}/channels/#{channel_id}/typing",
Authorization: token
)
end
# Get user data
def user(token, user_id)
RestClient.get(
"#{APIBASE}/users/#{user_id}",
Authorization: token
)
end
# Update user data
def update_user(token, email, password, new_username, avatar, new_password = nil)
RestClient.patch(
"#{APIBASE}/users/@me",
{ 'avatar' => avatar, 'email' => email, 'new_password' => new_password, 'password' => password, 'username' => new_username }.to_json,
Authorization: token,
content_type: :json
)
end
# Get a list of messages from a channel's history
def channel_log(token, channel_id, amount, before = nil, after = nil)
RestClient.get(
"#{APIBASE}/channels/#{channel_id}/messages?limit=#{amount}#{"&before=#{before}" if before}#{"&after=#{after}" if after}",
Authorization: token
)
end
end
|
class GraphUser < OurOhm
def graph_user
return self
end
reference :user, lambda { |id| id && User.find(id) }
set :believes_facts, Basefact
set :doubts_facts, Basefact
set :disbelieves_facts, Basefact
private :believes_facts, :doubts_facts, :disbelieves_facts
collection :created_facts, Basefact, :created_by
define_memoized_method :internal_channels do
Channel.find(:created_by_id => self.id).except(:discontinued => 'true').sort
end
def channel_manager
@channel_manager || ChannelManager.new(self)
end
delegate :editable_channels_for, :to => :channel_manager
define_memoized_method :channels do
channels = self.internal_channels.to_a
channels.delete(self.created_facts_channel)
channels.unshift(self.created_facts_channel)
channels.delete(self.stream)
channels.unshift(self.stream )
channels
end
reference :stream, Channel::UserStream
def create_stream
self.stream = Channel::UserStream.create(:created_by => self)
save
end
after :create, :create_stream
reference :created_facts_channel, Channel::CreatedFacts
def create_created_facts_channel
self.created_facts_channel = Channel::CreatedFacts.create(:created_by => self)
save
end
after :create, :create_created_facts_channel
collection :activities, Activity, :user
after :create, :calculate_authority
attribute :cached_authority
index :cached_authority
def calculate_authority
self.cached_authority = 1.0 + Math.log2(self.real_created_facts.inject(1) { |result, fact| result * fact.influencing_authority})
self.class.key[:top_users].zadd(self.cached_authority, id)
self.save
end
def remove_from_top_users
self.class.key[:top_users].zrem(id)
end
after :delete, :remove_from_top_users
def self.top(nr = 10)
self.key[:top_users].zrevrange(0,nr-1).map(&GraphUser)
end
def authority
self.cached_authority || 1.0
end
def rounded_authority
auth = [self.authority.to_f, 1.0].max
sprintf('%.1f', auth)
end
# user.facts_he(:beliefs)
def facts_he(type)
type = type.to_sym
belief_check(type)
if [:beliefs,:believes].include?(type)
believes_facts
elsif [:doubts].include?(type)
doubts_facts
elsif [:disbeliefs,:disbelieves].include?(type)
disbelieves_facts
else
raise "invalid opinion"
end
end
def has_opinion?(type, fact)
facts_he(type).include?(fact)
end
def opinion_on(fact)
[:beliefs, :doubts, :disbeliefs].each do |opinion|
return opinion if self.has_opinion?(opinion,fact)
end
return nil
end
def facts
facts_he(:believes) | facts_he(:doubts) | facts_he(:disbelieves)
end
def real_facts
facts.find_all { |fact| fact.class == Fact }
end
def real_created_facts
created_facts.find_all { |fact| fact.class == Fact }
end
def update_opinion(type, fact)
# Remove existing opinion by user
remove_opinions(fact)
facts_he(type) << fact
end
def remove_opinions(fact)
[:believes, :doubts, :disbelieves].each do |type|
facts_he(type).delete(fact)
end
end
end
removed whitespace
class GraphUser < OurOhm
def graph_user
return self
end
reference :user, lambda { |id| id && User.find(id) }
set :believes_facts, Basefact
set :doubts_facts, Basefact
set :disbelieves_facts, Basefact
private :believes_facts, :doubts_facts, :disbelieves_facts
collection :created_facts, Basefact, :created_by
define_memoized_method :internal_channels do
Channel.find(:created_by_id => self.id).except(:discontinued => 'true').sort
end
def channel_manager
@channel_manager || ChannelManager.new(self)
end
delegate :editable_channels_for, :to => :channel_manager
define_memoized_method :channels do
channels = self.internal_channels.to_a
channels.delete(self.created_facts_channel)
channels.unshift(self.created_facts_channel)
channels.delete(self.stream)
channels.unshift(self.stream )
channels
end
reference :stream, Channel::UserStream
def create_stream
self.stream = Channel::UserStream.create(:created_by => self)
save
end
after :create, :create_stream
reference :created_facts_channel, Channel::CreatedFacts
def create_created_facts_channel
self.created_facts_channel = Channel::CreatedFacts.create(:created_by => self)
save
end
after :create, :create_created_facts_channel
collection :activities, Activity, :user
after :create, :calculate_authority
attribute :cached_authority
index :cached_authority
def calculate_authority
self.cached_authority = 1.0 + Math.log2(self.real_created_facts.inject(1) { |result, fact| result * fact.influencing_authority})
self.class.key[:top_users].zadd(self.cached_authority, id)
self.save
end
def remove_from_top_users
self.class.key[:top_users].zrem(id)
end
after :delete, :remove_from_top_users
def self.top(nr = 10)
self.key[:top_users].zrevrange(0,nr-1).map(&GraphUser)
end
def authority
self.cached_authority || 1.0
end
def rounded_authority
auth = [self.authority.to_f, 1.0].max
sprintf('%.1f', auth)
end
# user.facts_he(:beliefs)
def facts_he(type)
type = type.to_sym
belief_check(type)
if [:beliefs,:believes].include?(type)
believes_facts
elsif [:doubts].include?(type)
doubts_facts
elsif [:disbeliefs,:disbelieves].include?(type)
disbelieves_facts
else
raise "invalid opinion"
end
end
def has_opinion?(type, fact)
facts_he(type).include?(fact)
end
def opinion_on(fact)
[:beliefs, :doubts, :disbeliefs].each do |opinion|
return opinion if self.has_opinion?(opinion,fact)
end
return nil
end
def facts
facts_he(:believes) | facts_he(:doubts) | facts_he(:disbelieves)
end
def real_facts
facts.find_all { |fact| fact.class == Fact }
end
def real_created_facts
created_facts.find_all { |fact| fact.class == Fact }
end
def update_opinion(type, fact)
# Remove existing opinion by user
remove_opinions(fact)
facts_he(type) << fact
end
def remove_opinions(fact)
[:believes, :doubts, :disbelieves].each do |type|
facts_he(type).delete(fact)
end
end
end
|
require File.expand_path('../boot', __FILE__)
require 'rails/all'
# Require core functionalities
require File.expand_path('../../lib/core', __FILE__)
# Require the gems listed in Gemfile, including any gems
# you've limited to :test, :development, or :production.
Bundler.require(*Rails.groups)
module MonsoonDashboard
class Application < Rails::Application
config.react.addons = true
# Settings in config/environments/* take precedence over those specified here.
# Application configuration should go into files in config/initializers
# -- all .rb files in that directory are automatically loaded.
#config.autoload_paths += %W(#{config.root}/plugins)
config.autoload_paths << Rails.root.join('lib')
# Use memory for caching, file cache needs some work for working with docker
# Not sure if this really makes sense becasue every passenger thread will have it's own cache
config.cache_store = :memory_store
# Set Time.zone default to the specified zone and make Active Record auto-convert to this zone.
# Run "rake -D time" for a list of tasks for finding time zone names. Default is UTC.
# config.time_zone = 'Central Time (US & Canada)'
# The default locale is :en and all translations from config/locales/*.rb,yml are auto loaded.
# config.i18n.load_path += Dir[Rails.root.join('my', 'locales', '*.{rb,yml}').to_s]
# config.i18n.default_locale = :de
# Do not swallow errors in after_commit/after_rollback callbacks.
config.active_record.raise_in_transactional_callbacks = true
config.middleware.insert_before Rack::Sendfile, "DebugHeadersMiddleware"
require 'prometheus/client/rack/collector'
# build a map from the plugins
plugin_mount_points = {}
Core::PluginsManager.available_plugins.each{|plugin| plugin_mount_points[plugin.mount_point] = plugin.mount_point}
config.middleware.insert_after ActionDispatch::DebugExceptions, Prometheus::Client::Rack::Collector do |env|
{
method: env['REQUEST_METHOD'].downcase,
host: env['HTTP_HOST'].to_s,
# just take the first component of the path as a label
path: env['REQUEST_PATH'][0, env['REQUEST_PATH'].index('/',1) || 20 ],
controller: env.fetch("action_dispatch.request.path_parameters",{}).fetch(:controller,''),
action: env.fetch("action_dispatch.request.path_parameters",{}).fetch(:action,''),
plugin: if env.fetch("action_dispatch.request.path_parameters",{}).fetch(:project_id,false)
plugin_mount_points[env['REQUEST_PATH'].split("/")[3]] || ""
elsif env.fetch("action_dispatch.request.path_parameters",{}).fetch(:domain_id, false)
plugin_mount_points[env['REQUEST_PATH'].split("/")[2]] || ""
else
''
end
}
end
require 'prometheus/client/rack/exporter'
config.middleware.insert_after Prometheus::Client::Rack::Collector, Prometheus::Client::Rack::Exporter
config.middleware.use "RevisionMiddleware"
############# ENSURE EDGE MODE FOR IE ###############
config.action_dispatch.default_headers["X-UA-Compatible"]="IE=edge,chrome=1"
############# KEYSTONE ENDPOINT ##############
config.keystone_endpoint = if ENV['AUTHORITY_SERVICE_HOST'] && ENV['AUTHORITY_SERVICE_PORT']
proto = ENV['AUTHORITY_SERVICE_PROTO'] || 'http'
host = ENV['AUTHORITY_SERVICE_HOST']
port = ENV['AUTHORITY_SERVICE_PORT']
"#{proto}://#{host}:#{port}/v3"
else
ENV['MONSOON_OPENSTACK_AUTH_API_ENDPOINT']
end
config.debug_api_calls = ENV.has_key?('DEBUG_API_CALLS')
config.debug_policy_engine = ENV.has_key?('DEBUG_POLICY_ENGINE')
config.ssl_verify_peer = if ENV.has_key?('ELEKTRA_SSL_VERIFY_PEER')
ENV['ELEKTRA_SSL_VERIFY_PEER'].to_s=='true'
else
true
end
############## REGION ###############
config.default_region = ENV['MONSOON_DASHBOARD_REGION'] || ['eu-de-1','staging','europe']
############## CLOUD ADMIN ###############
config.cloud_admin_domain = ENV.fetch('MONSOON_OPENSTACK_CLOUDADMIN_DOMAIN', 'ccadmin')
config.cloud_admin_project = ENV.fetch('MONSOON_OPENSTACK_CLOUDADMIN_PROJECT', 'cloud_admin')
############## SERVICE USER #############
config.service_user_id = ENV['MONSOON_OPENSTACK_AUTH_API_USERID']
config.service_user_password = ENV['MONSOON_OPENSTACK_AUTH_API_PASSWORD']
config.service_user_domain_name = ENV['MONSOON_OPENSTACK_AUTH_API_DOMAIN']
config.default_domain = ENV['MONSOON_DASHBOARD_DEFAULT_DOMAIN'] || 'monsoon3'
# Mailer configuration for inquiries/requests
config.action_mailer.raise_delivery_errors = true
config.action_mailer.delivery_method = :smtp
config.action_mailer.smtp_settings = {
address: ENV['MONSOON_DASHBOARD_MAIL_SERVER'],
port: ENV['MONSOON_DASHBOARD_MAIL_SERVER_PORT'] || 25,
enable_starttls_auto: false
}
config.action_mailer.default_options = {
from: 'Converged Cloud <noreply+ConvergedCloud@sap.corp>'
}
end
end
use ELEKTRA_SSL_VERIFY_PEER to disable ssl_verify_peer in FOG
require File.expand_path('../boot', __FILE__)
require 'rails/all'
# Require core functionalities
require File.expand_path('../../lib/core', __FILE__)
# Require the gems listed in Gemfile, including any gems
# you've limited to :test, :development, or :production.
Bundler.require(*Rails.groups)
module MonsoonDashboard
class Application < Rails::Application
config.react.addons = true
# Settings in config/environments/* take precedence over those specified here.
# Application configuration should go into files in config/initializers
# -- all .rb files in that directory are automatically loaded.
#config.autoload_paths += %W(#{config.root}/plugins)
config.autoload_paths << Rails.root.join('lib')
# Use memory for caching, file cache needs some work for working with docker
# Not sure if this really makes sense becasue every passenger thread will have it's own cache
config.cache_store = :memory_store
# Set Time.zone default to the specified zone and make Active Record auto-convert to this zone.
# Run "rake -D time" for a list of tasks for finding time zone names. Default is UTC.
# config.time_zone = 'Central Time (US & Canada)'
# The default locale is :en and all translations from config/locales/*.rb,yml are auto loaded.
# config.i18n.load_path += Dir[Rails.root.join('my', 'locales', '*.{rb,yml}').to_s]
# config.i18n.default_locale = :de
# Do not swallow errors in after_commit/after_rollback callbacks.
config.active_record.raise_in_transactional_callbacks = true
config.middleware.insert_before Rack::Sendfile, "DebugHeadersMiddleware"
require 'prometheus/client/rack/collector'
# build a map from the plugins
plugin_mount_points = {}
Core::PluginsManager.available_plugins.each{|plugin| plugin_mount_points[plugin.mount_point] = plugin.mount_point}
config.middleware.insert_after ActionDispatch::DebugExceptions, Prometheus::Client::Rack::Collector do |env|
{
method: env['REQUEST_METHOD'].downcase,
host: env['HTTP_HOST'].to_s,
# just take the first component of the path as a label
path: env['REQUEST_PATH'][0, env['REQUEST_PATH'].index('/',1) || 20 ],
controller: env.fetch("action_dispatch.request.path_parameters",{}).fetch(:controller,''),
action: env.fetch("action_dispatch.request.path_parameters",{}).fetch(:action,''),
plugin: if env.fetch("action_dispatch.request.path_parameters",{}).fetch(:project_id,false)
plugin_mount_points[env['REQUEST_PATH'].split("/")[3]] || ""
elsif env.fetch("action_dispatch.request.path_parameters",{}).fetch(:domain_id, false)
plugin_mount_points[env['REQUEST_PATH'].split("/")[2]] || ""
else
''
end
}
end
require 'prometheus/client/rack/exporter'
config.middleware.insert_after Prometheus::Client::Rack::Collector, Prometheus::Client::Rack::Exporter
config.middleware.use "RevisionMiddleware"
############# ENSURE EDGE MODE FOR IE ###############
config.action_dispatch.default_headers["X-UA-Compatible"]="IE=edge,chrome=1"
############# KEYSTONE ENDPOINT ##############
config.keystone_endpoint = if ENV['AUTHORITY_SERVICE_HOST'] && ENV['AUTHORITY_SERVICE_PORT']
proto = ENV['AUTHORITY_SERVICE_PROTO'] || 'http'
host = ENV['AUTHORITY_SERVICE_HOST']
port = ENV['AUTHORITY_SERVICE_PORT']
"#{proto}://#{host}:#{port}/v3"
else
ENV['MONSOON_OPENSTACK_AUTH_API_ENDPOINT']
end
config.debug_api_calls = ENV.has_key?('DEBUG_API_CALLS')
config.debug_policy_engine = ENV.has_key?('DEBUG_POLICY_ENGINE')
config.ssl_verify_peer = true
Excon.defaults[:ssl_verify_peer] = true
if ENV.has_key?('ELEKTRA_SSL_VERIFY_PEER') and ENV['ELEKTRA_SSL_VERIFY_PEER'] == 'false'
config.ssl_verify_peer = false
# set ssl_verify_peer for Excon that is used in FOG to talk with openstack services
Excon.defaults[:ssl_verify_peer] = false
end
puts "=> SSL verify: #{config.ssl_verify_peer}"
############## REGION ###############
config.default_region = ENV['MONSOON_DASHBOARD_REGION'] || ['eu-de-1','staging','europe']
############## CLOUD ADMIN ###############
config.cloud_admin_domain = ENV.fetch('MONSOON_OPENSTACK_CLOUDADMIN_DOMAIN', 'ccadmin')
config.cloud_admin_project = ENV.fetch('MONSOON_OPENSTACK_CLOUDADMIN_PROJECT', 'cloud_admin')
############## SERVICE USER #############
config.service_user_id = ENV['MONSOON_OPENSTACK_AUTH_API_USERID']
config.service_user_password = ENV['MONSOON_OPENSTACK_AUTH_API_PASSWORD']
config.service_user_domain_name = ENV['MONSOON_OPENSTACK_AUTH_API_DOMAIN']
config.default_domain = ENV['MONSOON_DASHBOARD_DEFAULT_DOMAIN'] || 'monsoon3'
# Mailer configuration for inquiries/requests
config.action_mailer.raise_delivery_errors = true
config.action_mailer.delivery_method = :smtp
config.action_mailer.smtp_settings = {
address: ENV['MONSOON_DASHBOARD_MAIL_SERVER'],
port: ENV['MONSOON_DASHBOARD_MAIL_SERVER_PORT'] || 25,
enable_starttls_auto: false
}
config.action_mailer.default_options = {
from: 'Converged Cloud <noreply+ConvergedCloud@sap.corp>'
}
end
end
|
require "git"
require "git-ssh-wrapper"
module GithubHerokuDeployer
class Git
def initialize(options)
@heroku_repo = options[:heroku_repo]
@github_repo = options[:github_repo]
@id_rsa = options[:id_rsa]
@logger = options[:logger]
@repo_dir = options[:repo_dir]
end
def push_app_to_heroku(remote="heroku", branch="master", &block)
wrapper = ssh_wrapper
run "cd #{repo.dir}; git remote rm #{remote}" if repo.remote(remote).url
repo.add_remote(remote, @heroku_repo)
yield(repo) if block_given?
@logger.info "deploying #{repo.dir} to #{repo.remote(remote).url} from branch #{branch}"
run "cd #{repo.dir}; env #{wrapper.git_ssh} git push -f #{remote} #{branch}:#{branch}"
ensure
wrapper.unlink
end
def repo
@repo ||= setup_repo
end
def setup_repo
clone_or_pull
open
end
def folder
@folder ||= setup_folder
end
def setup_folder
folder = File.join(@repo_dir, Zlib.crc32(@github_repo).to_s)
FileUtils.mkdir_p(folder)
folder
end
def clone_or_pull
!exists_locally? ? clone : pull
end
def exists_locally?
File.exists?(File.join(folder, ".git", "config"))
end
def clone
wrapper = ssh_wrapper
@logger.info "cloning #{@github_repo} to #{folder}"
run "env #{wrapper.git_ssh} git clone #{@github_repo} #{folder}"
ensure
wrapper.unlink
end
def pull
wrapper = ssh_wrapper
dir = Dir.pwd # need to cd back to here
@logger.info "pulling from #{folder}"
run "cd #{folder}; env #{wrapper.git_ssh} git pull; cd #{dir}"
ensure
wrapper.unlink
end
def open
::Git.open(folder)
end
def ssh_wrapper
GitSSHWrapper.new(private_key_path: id_rsa_path)
end
def id_rsa_path
file = Tempfile.new("id_rsa")
file.write(@id_rsa)
file.rewind
file.path
end
def run(command)
result = `#{command} 2>&1`
status = $?.exitstatus
if status == 0
@logger.info result
else
raise GithubHerokuDeployer::CommandException, result
end
end
end
end
Revert "keep branch:as-branch"
This reverts commit abc635a9646f7e369a1f06efa624696144dad2d6.
require "git"
require "git-ssh-wrapper"
module GithubHerokuDeployer
class Git
def initialize(options)
@heroku_repo = options[:heroku_repo]
@github_repo = options[:github_repo]
@id_rsa = options[:id_rsa]
@logger = options[:logger]
@repo_dir = options[:repo_dir]
end
def push_app_to_heroku(remote="heroku", branch="master", &block)
wrapper = ssh_wrapper
run "cd #{repo.dir}; git remote rm #{remote}" if repo.remote(remote).url
repo.add_remote(remote, @heroku_repo)
yield(repo) if block_given?
@logger.info "deploying #{repo.dir} to #{repo.remote(remote).url} from branch #{branch}"
run "cd #{repo.dir}; env #{wrapper.git_ssh} git push -f #{remote} #{branch}"
ensure
wrapper.unlink
end
def repo
@repo ||= setup_repo
end
def setup_repo
clone_or_pull
open
end
def folder
@folder ||= setup_folder
end
def setup_folder
folder = File.join(@repo_dir, Zlib.crc32(@github_repo).to_s)
FileUtils.mkdir_p(folder)
folder
end
def clone_or_pull
!exists_locally? ? clone : pull
end
def exists_locally?
File.exists?(File.join(folder, ".git", "config"))
end
def clone
wrapper = ssh_wrapper
@logger.info "cloning #{@github_repo} to #{folder}"
run "env #{wrapper.git_ssh} git clone #{@github_repo} #{folder}"
ensure
wrapper.unlink
end
def pull
wrapper = ssh_wrapper
dir = Dir.pwd # need to cd back to here
@logger.info "pulling from #{folder}"
run "cd #{folder}; env #{wrapper.git_ssh} git pull; cd #{dir}"
ensure
wrapper.unlink
end
def open
::Git.open(folder)
end
def ssh_wrapper
GitSSHWrapper.new(private_key_path: id_rsa_path)
end
def id_rsa_path
file = Tempfile.new("id_rsa")
file.write(@id_rsa)
file.rewind
file.path
end
def run(command)
result = `#{command} 2>&1`
status = $?.exitstatus
if status == 0
@logger.info result
else
raise GithubHerokuDeployer::CommandException, result
end
end
end
end
|
require File.expand_path('../boot', __FILE__)
require 'rails/all'
# Require the gems listed in Gemfile, including any gems
# you've limited to :test, :development, or :production.
Bundler.require(*Rails.groups)
$: << File.expand_path('../../lib', __FILE__)
require 'kor'
require 'securerandom'
module Kor
class Application < Rails::Application
config.autoload_paths << "#{Rails.root}/lib"
config.assets.js_compressor = :uglifier
config.assets.precompile += ["kor.js", "blaze.js", "master.css", "blaze.css", "kor_index.js", "kor_index.css"]
# Settings in config/environments/* take precedence over those specified here.
# Application configuration should go into files in config/initializers
# -- all .rb files in that directory are automatically loaded.
# Set Time.zone default to the specified zone and make Active Record auto-convert to this zone.
# Run "rake -D time" for a list of tasks for finding time zone names. Default is UTC.
# config.time_zone = 'Central Time (US & Canada)'
# The default locale is :en and all translations from config/locales/*.rb,yml are auto loaded.
# config.i18n.load_path += Dir[Rails.root.join('my', 'locales', '*.{rb,yml}').to_s]
# config.i18n.default_locale = :de
config.i18n.available_locales = [:de, :en]
# Do not swallow errors in after_commit/after_rollback callbacks.
config.active_record.raise_in_transactional_callbacks = true
config.active_job.queue_adapter = :delayed_job
config.middleware.insert_before 0, "Rack::Cors" do
allow do
origins *Kor::Config.instance['allowed_origins']
resource '*', headers: :any, methods: [:get, :options]
end
end
config.action_dispatch.perform_deep_munge = false
initializer 'action_mailer.set_configs' do
if mc = Kor::Config.instance['mail']
dm = mc['delivery_method'].to_sym
config.action_mailer.delivery_method = dm
c = (mc["#{dm}_settings"] || {}).symbolize_keys
config.action_mailer.send("#{dm}_settings=".to_sym, c)
end
end
end
end
# TODO: better test fields_controller.rb
# TODO: better test generators_controller.rb
# TODO: test putting a whole authority group to the clipboard
# TODO: test random query for more than 4 entities
# TODO: test mailers and that they are used
# TODO: move all js templates to misc.html.erb or partial them from there
# TODO: test downloads_controller
# TODO: make sure there are tests for storing serialized attributes: dataset,
# properties, datings, synonyms, relationship properties
# TODO: merge entity group tables?
# TODO: add @javascript tag to all feature tests
# TODO: when deleting relationships and that completely empties the second or a
# higher page, the previous page should be loaded
# TODO: integration tests for tools: mass_destroy, add_to_authority_group,
# add_to_user_group, move_to_collection, remove_from_authority_group,
# remove_from_user_group
# TODO: integration test for reset clipboard
# TODO: make sure in js kind_id == 1 isn't assumed to ensure medium kind
# TODO: remove new_datings_attributes and existing_datings_attributes
# TODO: upgrade elasticsearch
# TODO: add tests for the command line tool
# TODO: make sure that time zones are handled correctly from http content type to db
# TODO: angular: remove flashing of unloaded page areas and remove flashing of strange "select <some HEX>" within media relations
# TODO: handle stale object errors on json apis
# TODO: use jbuilder without exception for api responses
# TODO: make image and video styles configurable live
# TODO: develop commenting policy
# TODO: replace fake_authentication and classic data_helper
# TODO: check helpers for redundant code
# TOTO: remove redundant code and comments from old js files
# TODO: remove comments everywhere
# TODO: re-enable still extraction for videos
# TODO: make an indicator for not-yet-processed media (use special dummy)
# TODO: use https://github.com/bkeepers/dotenv
# TODO: session panel is not visible on welcome page
# TODO: unify test setup steps
# TODO: unify default params and sanitation for pagination scopes
# TODO: instead of describing config defaults in the readme, refer to kor.defaults.yml which should also yield descriptions
# TODO: clean up translation files (remove obsolete models)
# TODO: when replacing sprockets, simulate checksum behaviour to provoke correct cache expiries
# TODO: use json.extract! whenever possible
# TODO: replace extended json views with customized json views
# TODO: in json responses, include errors for models
# TODO: unify save.json.jbuilder files
# TODO: handle base errors on riot pages
# TODO: make the busy wheel only show when necessary (e.g. doesn't switch off after error)
# TODO: make all json endpoints comply with response policy
# TODO: rename Field.show_label to Field.label
# TODO: fix spinning wheel for riot, angular and all other ajax
# TODO: use zlib from stdlib instead of the gem?
# TODO: use "un" from stdlib to find graph communities?
# TODO: use neo transactions to effectively clear the store after tests
# TODO: change denied redirect to denied action rendering
# TODO: use http://errbit.com/ instead of custom exception logger
# TODO: test changing of kind inheritance to update their entities dataset and
# that relation inheritance uses the ancestry to show abailable relations
# when creating relationships
# TODO: add consistent optimistic locking
# TODO: clean up widget directory
# * main ticket: https://github.com/coneda/kor/issues/53
# * Import Erlangen CRM, how to expose functionality?
# * TODO (15h = 8h):
# * only a single type for relationship ends (3h)
# https://github.com/coneda/kor/issues/94
# * expose hierarchy in API (1h): https://github.com/coneda/kor/issues/113
# * more tests for inheritable relations? (3h)
# * make existing tests pass (3h)
# * wrap up (5h)
# * feedback loops (8h)
removed comments
require File.expand_path('../boot', __FILE__)
require 'rails/all'
# Require the gems listed in Gemfile, including any gems
# you've limited to :test, :development, or :production.
Bundler.require(*Rails.groups)
$: << File.expand_path('../../lib', __FILE__)
require 'kor'
require 'securerandom'
module Kor
class Application < Rails::Application
config.autoload_paths << "#{Rails.root}/lib"
config.assets.js_compressor = :uglifier
config.assets.precompile += ["kor.js", "blaze.js", "master.css", "blaze.css", "kor_index.js", "kor_index.css"]
# Settings in config/environments/* take precedence over those specified here.
# Application configuration should go into files in config/initializers
# -- all .rb files in that directory are automatically loaded.
# Set Time.zone default to the specified zone and make Active Record auto-convert to this zone.
# Run "rake -D time" for a list of tasks for finding time zone names. Default is UTC.
# config.time_zone = 'Central Time (US & Canada)'
# The default locale is :en and all translations from config/locales/*.rb,yml are auto loaded.
# config.i18n.load_path += Dir[Rails.root.join('my', 'locales', '*.{rb,yml}').to_s]
# config.i18n.default_locale = :de
config.i18n.available_locales = [:de, :en]
# Do not swallow errors in after_commit/after_rollback callbacks.
config.active_record.raise_in_transactional_callbacks = true
config.active_job.queue_adapter = :delayed_job
config.middleware.insert_before 0, "Rack::Cors" do
allow do
origins *Kor::Config.instance['allowed_origins']
resource '*', headers: :any, methods: [:get, :options]
end
end
config.action_dispatch.perform_deep_munge = false
initializer 'action_mailer.set_configs' do
if mc = Kor::Config.instance['mail']
dm = mc['delivery_method'].to_sym
config.action_mailer.delivery_method = dm
c = (mc["#{dm}_settings"] || {}).symbolize_keys
config.action_mailer.send("#{dm}_settings=".to_sym, c)
end
end
end
end
# TODO: better test fields_controller.rb
# TODO: better test generators_controller.rb
# TODO: test putting a whole authority group to the clipboard
# TODO: test random query for more than 4 entities
# TODO: test mailers and that they are used
# TODO: move all js templates to misc.html.erb or partial them from there
# TODO: test downloads_controller
# TODO: make sure there are tests for storing serialized attributes: dataset,
# properties, datings, synonyms, relationship properties
# TODO: merge entity group tables?
# TODO: add @javascript tag to all feature tests
# TODO: when deleting relationships and that completely empties the second or a
# higher page, the previous page should be loaded
# TODO: integration tests for tools: mass_destroy, add_to_authority_group,
# add_to_user_group, move_to_collection, remove_from_authority_group,
# remove_from_user_group
# TODO: integration test for reset clipboard
# TODO: make sure in js kind_id == 1 isn't assumed to ensure medium kind
# TODO: remove new_datings_attributes and existing_datings_attributes
# TODO: upgrade elasticsearch
# TODO: add tests for the command line tool
# TODO: make sure that time zones are handled correctly from http content type to db
# TODO: angular: remove flashing of unloaded page areas and remove flashing of strange "select <some HEX>" within media relations
# TODO: handle stale object errors on json apis
# TODO: use jbuilder without exception for api responses
# TODO: make image and video styles configurable live
# TODO: develop commenting policy
# TODO: replace fake_authentication and classic data_helper
# TODO: check helpers for redundant code
# TOTO: remove redundant code and comments from old js files
# TODO: remove comments everywhere
# TODO: re-enable still extraction for videos
# TODO: make an indicator for not-yet-processed media (use special dummy)
# TODO: use https://github.com/bkeepers/dotenv
# TODO: session panel is not visible on welcome page
# TODO: unify test setup steps
# TODO: unify default params and sanitation for pagination scopes
# TODO: instead of describing config defaults in the readme, refer to kor.defaults.yml which should also yield descriptions
# TODO: clean up translation files (remove obsolete models)
# TODO: when replacing sprockets, simulate checksum behaviour to provoke correct cache expiries
# TODO: use json.extract! whenever possible
# TODO: replace extended json views with customized json views
# TODO: in json responses, include errors for models
# TODO: unify save.json.jbuilder files
# TODO: handle base errors on riot pages
# TODO: make the busy wheel only show when necessary (e.g. doesn't switch off after error)
# TODO: make all json endpoints comply with response policy
# TODO: rename Field.show_label to Field.label
# TODO: fix spinning wheel for riot, angular and all other ajax
# TODO: use zlib from stdlib instead of the gem?
# TODO: use "un" from stdlib to find graph communities?
# TODO: use neo transactions to effectively clear the store after tests
# TODO: change denied redirect to denied action rendering
# TODO: use http://errbit.com/ instead of custom exception logger
# TODO: test changing of kind inheritance to update their entities dataset and
# that relation inheritance uses the ancestry to show abailable relations
# when creating relationships
# TODO: add consistent optimistic locking
# TODO: clean up widget directory
|
# Licensed to the Apache Software Foundation (ASF) under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
module GloboDns
class StringIOLogger < ActiveSupport
# class StringIOLogger
# include ActiveSupport::TaggedLogging
def initialize(logger)
super(logger)
@sio = StringIO.new('', 'w')
@sio_logger = Logger.new(@sio)
end
def add(severity, message = nil, progname = nil, &block)
message = (block_given? ? block.call : progname) if message.nil?
@sio_logger.add(severity, "#{tags_text}#{message}", progname)
@logger.add(severity, "#{tags_text}#{message}", progname)
end
def string
@sio.string
end
def error(*args)
current_tags << 'ERROR'
rv = super(*args)
current_tags.pop
rv
end
def warn(*args)
current_tags << 'WARNING'
rv = super(*args)
current_tags.pop
rv
end
end
end
Tentando arrumar o erro 'superclass must be a Class (Module given)'
# Licensed to the Apache Software Foundation (ASF) under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
module GloboDns
class StringIOLogger < ActiveSupport::TestCase
# class StringIOLogger
# include ActiveSupport::TaggedLogging
def initialize(logger)
super(logger)
@sio = StringIO.new('', 'w')
@sio_logger = Logger.new(@sio)
end
def add(severity, message = nil, progname = nil, &block)
message = (block_given? ? block.call : progname) if message.nil?
@sio_logger.add(severity, "#{tags_text}#{message}", progname)
@logger.add(severity, "#{tags_text}#{message}", progname)
end
def string
@sio.string
end
def error(*args)
current_tags << 'ERROR'
rv = super(*args)
current_tags.pop
rv
end
def warn(*args)
current_tags << 'WARNING'
rv = super(*args)
current_tags.pop
rv
end
end
end
|
# frozen_string_literal: true
require 'money'
Money.locale_backend = :i18n
Explicitly set Money rounding to ROUND_HALF_EVEN
This is the current default value, but it will change in
the Money gem in some future release (v7).
People can start adhering to that value by setting
Money.rounding_mode = BigDecimal::ROUND_HALF_UP
in their own application. For now, this is what we used so far
and we should not change it for existing applications.
Ref: https://github.com/RubyMoney/money/pull/883
# frozen_string_literal: true
require 'money'
Money.locale_backend = :i18n
Money.rounding_mode = BigDecimal::ROUND_HALF_EVEN
|
#
# Copyright 2015, Noah Kantrowitz
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
require 'chef/resource'
module Halite
module SpecHelper
# Utility methods to patch a resource or provider class in to Chef for the
# duration of a block.
#
# @since 1.0.0
# @api private
module Patcher
# Patch a class in to Chef for the duration of a block.
#
# @param name [String, Symbol] Name to create in snake-case (eg. :my_name).
# @param klass [Class] Class to patch in.
# @param mod [Module] Optional module to create a constant in.
# @param block [Proc] Block to execute while the patch is available.
# @return [void]
def self.patch(name, klass, mod=nil, &block)
patch_descendants_tracker(klass) do
patch_node_map(name, klass) do
patch_priority_map(name, klass) do
patch_recipe_dsl(name, klass) do
if mod
patch_module(mod, name, klass, &block)
else
block.call
end
end
end
end
end
end
# Perform any post-class-creation cleanup tasks to deal with compile time
# global registrations.
#
# @since 1.0.4
# @param name [String, Symbol] Name of the class that was created in
# snake-case (eg. :my_name).
# @param klass [Class] Newly created class.
# @return [void]
def self.post_create_cleanup(name, klass)
# Remove from DescendantsTracker.
Chef::Mixin::DescendantsTracker.direct_descendants(klass.superclass).delete(klass)
# Remove from the priority maps.
if priority_map = priority_map_for(klass)
# Make sure we add name in there too because anonymous classes don't
# get a priority map registration by default.
removed_keys = remove_from_node_map(priority_map, klass) | [name.to_sym]
# This ivar is used down in #patch_priority_map to re-add the correct
# keys based on the class definition.
klass.instance_variable_set(:@halite_original_priority_keys, removed_keys)
end
# Remove from the global node map.
if defined?(Chef::Resource.node_map)
removed_keys = remove_from_node_map(Chef::Resource.node_map, klass)
# Used down in patch_node_map.
klass.instance_variable_set(:@halite_original_nodemap_keys, removed_keys)
end
end
# Patch an object in to a global namespace for the duration of a block.
#
# @param mod [Module] Namespace to patch in to.
# @param name [String, Symbol] Name to create in snake-case (eg. :my_name).
# @param obj Object to patch in.
# @param block [Proc] Block to execute while the name is available.
# @return [void]
def self.patch_module(mod, name, obj, &block)
class_name = Chef::Mixin::ConvertToClassName.convert_to_class_name(name.to_s)
if mod.const_defined?(class_name, false)
old_class = mod.const_get(class_name, false)
# We are only allowed to patch over things installed by patch_module
raise "#{mod.name}::#{class_name} is already defined" if !old_class.instance_variable_get(:@poise_patch_module)
# Remove it before setting to avoid the redefinition warning
mod.send(:remove_const, class_name)
end
# Tag our objects so we know we are allowed to overwrite those, but not other stuff.
obj.instance_variable_set(:@poise_patch_module, true)
mod.const_set(class_name, obj)
begin
block.call
ensure
# Same as above, have to remove before set because warnings
mod.send(:remove_const, class_name)
mod.const_set(class_name, old_class) if old_class
end
end
# Patch an object in to Chef's DescendantsTracker system for the duration
# of a code block.
#
# @param klass [Class] Class to patch in.
# @param block [Proc] Block to execute while the patch is available.
# @return [void]
def self.patch_descendants_tracker(klass, &block)
begin
# Re-add to tracking.
Chef::Mixin::DescendantsTracker.store_inherited(klass.superclass, klass)
block.call
ensure
# Clean up after ourselves.
Chef::Mixin::DescendantsTracker.direct_descendants(klass.superclass).delete(klass)
end
end
# Patch a class in to its node_map. This is not used in 12.4+.
#
# @param name [Symbol] Name to patch in.
# @param klass [Class] Resource class to patch in.
# @param block [Proc] Block to execute while the patch is available.
# @return [void]
def self.patch_node_map(name, klass, &block)
return block.call unless defined?(klass.node_map)
begin
# Technically this is set to true on >=12.4, but this should work.
keys = klass.instance_variable_get(:@halite_original_nodemap_keys) | [name.to_sym]
keys.each do |key|
klass.node_map.set(key, klass)
end
block.call
ensure
remove_from_node_map(klass.node_map, klass)
end
end
# Patch a resource in to Chef's recipe DSL for the duration of a code
# block. This is a no-op before Chef 12.4.
#
# @param name [Symbol] Name to patch in.
# @param klass [Class] Resource class to patch in.
# @param block [Proc] Block to execute while the patch is available.
# @return [void]
def self.patch_recipe_dsl(name, klass, &block)
return block.call unless defined?(Chef::DSL::Resources.add_resource_dsl) && klass < Chef::Resource
begin
Chef::DSL::Resources.add_resource_dsl(name)
block.call
ensure
Chef::DSL::Resources.remove_resource_dsl(name)
end
end
# Patch a class in to the correct priority map for the duration of a code
# block. This is a no-op before Chef 12.4.
#
# @since 1.0.4
# @param name [Symbol] Name to patch in.
# @param klass [Class] Resource or provider class to patch in.
# @param block [Proc] Block to execute while the patch is available.
# @return [void]
def self.patch_priority_map(name, klass, &block)
priority_map = priority_map_for(klass)
return block.call unless priority_map
begin
# Unlike patch_node_map, this has to be an array!
klass.instance_variable_get(:@halite_original_priority_keys).each do |key|
priority_map.set(key, [klass])
end
block.call
ensure
remove_from_node_map(priority_map, klass)
end
end
private
# Find the global priority map for a class.
#
# @since 1.0.4
# @param klass [Class] Resource or provider class to look up.
# @return [nil, Chef::Platform::ResourcePriorityMap, Chef::Platform::ProviderPriorityMap]
def self.priority_map_for(klass)
if defined?(Chef.resource_priority_map) && klass < Chef::Resource
Chef.resource_priority_map
elsif defined?(Chef.provider_priority_map) && klass < Chef::Provider
Chef.provider_priority_map
end
end
# Remove a value from a Chef::NodeMap. Returns the keys that were removed.
#
# @since 1.0.4
# @param node_map [Chef::NodeMap] Node map to remove from.
# @param value [Object] Value to remove.
# @return [Array<Symbol>]
def self.remove_from_node_map(node_map, value)
# Sigh.
removed_keys = []
# 12.4.1+ switched this to a private accessor and lazy init.
map = if node_map.respond_to?(:map)
node_map.send(:map)
else
node_map.instance_variable_get(:@map)
end
map.each do |key, matchers|
matchers.delete_if do |matcher|
# In 12.4+ this value is an array of classes, before that it is the class.
if matcher[:value].is_a?(Array)
matcher[:value].include?(value)
else
matcher[:value] == value
end && removed_keys << key # Track removed keys in a hacky way.
end
end
removed_keys
end
end
end
end
Need to ask for privates too.
That is as dirty as it sounds.
#
# Copyright 2015, Noah Kantrowitz
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
require 'chef/resource'
module Halite
module SpecHelper
# Utility methods to patch a resource or provider class in to Chef for the
# duration of a block.
#
# @since 1.0.0
# @api private
module Patcher
# Patch a class in to Chef for the duration of a block.
#
# @param name [String, Symbol] Name to create in snake-case (eg. :my_name).
# @param klass [Class] Class to patch in.
# @param mod [Module] Optional module to create a constant in.
# @param block [Proc] Block to execute while the patch is available.
# @return [void]
def self.patch(name, klass, mod=nil, &block)
patch_descendants_tracker(klass) do
patch_node_map(name, klass) do
patch_priority_map(name, klass) do
patch_recipe_dsl(name, klass) do
if mod
patch_module(mod, name, klass, &block)
else
block.call
end
end
end
end
end
end
# Perform any post-class-creation cleanup tasks to deal with compile time
# global registrations.
#
# @since 1.0.4
# @param name [String, Symbol] Name of the class that was created in
# snake-case (eg. :my_name).
# @param klass [Class] Newly created class.
# @return [void]
def self.post_create_cleanup(name, klass)
# Remove from DescendantsTracker.
Chef::Mixin::DescendantsTracker.direct_descendants(klass.superclass).delete(klass)
# Remove from the priority maps.
if priority_map = priority_map_for(klass)
# Make sure we add name in there too because anonymous classes don't
# get a priority map registration by default.
removed_keys = remove_from_node_map(priority_map, klass) | [name.to_sym]
# This ivar is used down in #patch_priority_map to re-add the correct
# keys based on the class definition.
klass.instance_variable_set(:@halite_original_priority_keys, removed_keys)
end
# Remove from the global node map.
if defined?(Chef::Resource.node_map)
removed_keys = remove_from_node_map(Chef::Resource.node_map, klass)
# Used down in patch_node_map.
klass.instance_variable_set(:@halite_original_nodemap_keys, removed_keys)
end
end
# Patch an object in to a global namespace for the duration of a block.
#
# @param mod [Module] Namespace to patch in to.
# @param name [String, Symbol] Name to create in snake-case (eg. :my_name).
# @param obj Object to patch in.
# @param block [Proc] Block to execute while the name is available.
# @return [void]
def self.patch_module(mod, name, obj, &block)
class_name = Chef::Mixin::ConvertToClassName.convert_to_class_name(name.to_s)
if mod.const_defined?(class_name, false)
old_class = mod.const_get(class_name, false)
# We are only allowed to patch over things installed by patch_module
raise "#{mod.name}::#{class_name} is already defined" if !old_class.instance_variable_get(:@poise_patch_module)
# Remove it before setting to avoid the redefinition warning
mod.send(:remove_const, class_name)
end
# Tag our objects so we know we are allowed to overwrite those, but not other stuff.
obj.instance_variable_set(:@poise_patch_module, true)
mod.const_set(class_name, obj)
begin
block.call
ensure
# Same as above, have to remove before set because warnings
mod.send(:remove_const, class_name)
mod.const_set(class_name, old_class) if old_class
end
end
# Patch an object in to Chef's DescendantsTracker system for the duration
# of a code block.
#
# @param klass [Class] Class to patch in.
# @param block [Proc] Block to execute while the patch is available.
# @return [void]
def self.patch_descendants_tracker(klass, &block)
begin
# Re-add to tracking.
Chef::Mixin::DescendantsTracker.store_inherited(klass.superclass, klass)
block.call
ensure
# Clean up after ourselves.
Chef::Mixin::DescendantsTracker.direct_descendants(klass.superclass).delete(klass)
end
end
# Patch a class in to its node_map. This is not used in 12.4+.
#
# @param name [Symbol] Name to patch in.
# @param klass [Class] Resource class to patch in.
# @param block [Proc] Block to execute while the patch is available.
# @return [void]
def self.patch_node_map(name, klass, &block)
return block.call unless defined?(klass.node_map)
begin
# Technically this is set to true on >=12.4, but this should work.
keys = klass.instance_variable_get(:@halite_original_nodemap_keys) | [name.to_sym]
keys.each do |key|
klass.node_map.set(key, klass)
end
block.call
ensure
remove_from_node_map(klass.node_map, klass)
end
end
# Patch a resource in to Chef's recipe DSL for the duration of a code
# block. This is a no-op before Chef 12.4.
#
# @param name [Symbol] Name to patch in.
# @param klass [Class] Resource class to patch in.
# @param block [Proc] Block to execute while the patch is available.
# @return [void]
def self.patch_recipe_dsl(name, klass, &block)
return block.call unless defined?(Chef::DSL::Resources.add_resource_dsl) && klass < Chef::Resource
begin
Chef::DSL::Resources.add_resource_dsl(name)
block.call
ensure
Chef::DSL::Resources.remove_resource_dsl(name)
end
end
# Patch a class in to the correct priority map for the duration of a code
# block. This is a no-op before Chef 12.4.
#
# @since 1.0.4
# @param name [Symbol] Name to patch in.
# @param klass [Class] Resource or provider class to patch in.
# @param block [Proc] Block to execute while the patch is available.
# @return [void]
def self.patch_priority_map(name, klass, &block)
priority_map = priority_map_for(klass)
return block.call unless priority_map
begin
# Unlike patch_node_map, this has to be an array!
klass.instance_variable_get(:@halite_original_priority_keys).each do |key|
priority_map.set(key, [klass])
end
block.call
ensure
remove_from_node_map(priority_map, klass)
end
end
private
# Find the global priority map for a class.
#
# @since 1.0.4
# @param klass [Class] Resource or provider class to look up.
# @return [nil, Chef::Platform::ResourcePriorityMap, Chef::Platform::ProviderPriorityMap]
def self.priority_map_for(klass)
if defined?(Chef.resource_priority_map) && klass < Chef::Resource
Chef.resource_priority_map
elsif defined?(Chef.provider_priority_map) && klass < Chef::Provider
Chef.provider_priority_map
end
end
# Remove a value from a Chef::NodeMap. Returns the keys that were removed.
#
# @since 1.0.4
# @param node_map [Chef::NodeMap] Node map to remove from.
# @param value [Object] Value to remove.
# @return [Array<Symbol>]
def self.remove_from_node_map(node_map, value)
# Sigh.
removed_keys = []
# 12.4.1+ switched this to a private accessor and lazy init.
map = if node_map.respond_to?(:map, true)
node_map.send(:map)
else
node_map.instance_variable_get(:@map)
end
map.each do |key, matchers|
matchers.delete_if do |matcher|
# In 12.4+ this value is an array of classes, before that it is the class.
if matcher[:value].is_a?(Array)
matcher[:value].include?(value)
else
matcher[:value] == value
end && removed_keys << key # Track removed keys in a hacky way.
end
end
removed_keys
end
end
end
end
|
require File.expand_path('../boot', __FILE__)
require "rails/all"
if defined?(Bundler)
Bundler.require(:default, Rails.env)
end
module Shouldifollow
class Application < Rails::Application
# Settings in config/environments/* take precedence over those specified here.
# Application configuration should go into files in config/initializers
# -- all .rb files in that directory are automatically loaded.
# Custom directories with classes and modules you want to be autoloadable.
# config.autoload_paths += %W(#{config.root}/extras)
# Only load the plugins named here, in the order given (default is alphabetical).
# :all can be used as a placeholder for all plugins not explicitly named.
# config.plugins = [ :exception_notification, :ssl_requirement, :all ]
# Activate observers that should always be running.
# config.active_record.observers = :cacher, :garbage_collector, :forum_observer
# Set Time.zone default to the specified zone and make Active Record auto-convert to this zone.
# Run "rake -D time" for a list of tasks for finding time zone names. Default is UTC.
# config.time_zone = 'Central Time (US & Canada)'
# The default locale is :en and all translations from config/locales/*.rb,yml are auto loaded.
# config.i18n.load_path += Dir[Rails.root.join('my', 'locales', '*.{rb,yml}').to_s]
# config.i18n.default_locale = :de
# Configure the default encoding used in templates for Ruby 1.9.
config.encoding = "utf-8"
# Configure sensitive parameters which will be filtered from the log file.
config.filter_parameters += [:password]
# Enable escaping HTML in JSON.
config.active_support.escape_html_entities_in_json = true
# Use SQL instead of Active Record's schema dumper when creating the database.
# This is necessary if your schema can't be completely dumped by the schema dumper,
# like if you have constraints or database-specific column types
# config.active_record.schema_format = :sql
# Enable the asset pipeline
config.assets.enabled = true
# Version of your assets, change this if you want to expire all your assets
config.assets.version = "1.1"
#disables ruby's buffering of stdout in dev mode so foreman receives output
if Rails.env.development?
$stdout.sync = true
end
end
end
enable compression
require File.expand_path('../boot', __FILE__)
require "rails/all"
if defined?(Bundler)
Bundler.require(:default, Rails.env)
end
module Shouldifollow
class Application < Rails::Application
# Settings in config/environments/* take precedence over those specified here.
# Application configuration should go into files in config/initializers
# -- all .rb files in that directory are automatically loaded.
# Custom directories with classes and modules you want to be autoloadable.
# config.autoload_paths += %W(#{config.root}/extras)
# Only load the plugins named here, in the order given (default is alphabetical).
# :all can be used as a placeholder for all plugins not explicitly named.
# config.plugins = [ :exception_notification, :ssl_requirement, :all ]
# Activate observers that should always be running.
# config.active_record.observers = :cacher, :garbage_collector, :forum_observer
# Set Time.zone default to the specified zone and make Active Record auto-convert to this zone.
# Run "rake -D time" for a list of tasks for finding time zone names. Default is UTC.
# config.time_zone = 'Central Time (US & Canada)'
# The default locale is :en and all translations from config/locales/*.rb,yml are auto loaded.
# config.i18n.load_path += Dir[Rails.root.join('my', 'locales', '*.{rb,yml}').to_s]
# config.i18n.default_locale = :de
# Configure the default encoding used in templates for Ruby 1.9.
config.encoding = "utf-8"
# Configure sensitive parameters which will be filtered from the log file.
config.filter_parameters += [:password]
# Enable escaping HTML in JSON.
config.active_support.escape_html_entities_in_json = true
# Use SQL instead of Active Record's schema dumper when creating the database.
# This is necessary if your schema can't be completely dumped by the schema dumper,
# like if you have constraints or database-specific column types
# config.active_record.schema_format = :sql
# Enable the asset pipeline
config.assets.enabled = true
# Version of your assets, change this if you want to expire all your assets
config.assets.version = "1.1"
#disables ruby's buffering of stdout in dev mode so foreman receives output
if Rails.env.development?
$stdout.sync = true
end
# Enable compression
config.middleware.use Rack::Deflater
end
end
|
module HeadingWithTitle
VERSION = "0.0.3"
end
Bump to 0.0.4
module HeadingWithTitle
VERSION = "0.0.4"
end
|
require File.expand_path('boot', __dir__)
require 'rails/all'
require_relative 'application_name'
# Require the gems listed in Gemfile, including any gems
# you've limited to :test, :development, or :production.
Bundler.require(*Rails.groups)
Dotenv::Railtie.load
module TPS
class Application < Rails::Application
config.load_defaults 6.0
# Settings in config/environments/* take precedence over those specified here.
# Application configuration should go into files in config/initializers
# -- all .rb files in that directory are automatically loaded.
# The default autoloader since Rails 6.0 defaults is zeitwerk.
# However, to split the work, we will move to zeitwerk only in a future PR.
config.autoloader = :classic
# Set Time.zone default to the specified zone and make Active Record auto-convert to this zone.
# Run "rake -D time" for a list of tasks for finding time zone names. Default is UTC.
config.time_zone = 'Paris'
# The default locale is :en and all translations from config/locales/*.rb,yml are auto loaded.
# config.i18n.load_path += Dir[Rails.root.join('my', 'locales', '*.{rb,yml}').to_s]
config.i18n.default_locale = :fr
config.i18n.load_path += Dir[Rails.root.join('config', 'locales', '**', '*.{rb,yml}')]
config.i18n.available_locales = [:fr]
config.paths.add "#{config.root}/lib", eager_load: true
config.assets.paths << Rails.root.join('app', 'assets', 'javascript')
config.assets.paths << Rails.root.join('app', 'assets', 'fonts')
config.assets.precompile += ['.woff']
config.active_job.queue_adapter = :delayed_job
# The default list used to be accessible through `ActionView::Base.sanitized_allowed_tags`,
# but a regression in Rails 6.0 makes it unavailable.
# It should be fixed in Rails 6.1.
# See https://github.com/rails/rails/issues/39586
# default_allowed_tags = ActionView::Base.sanitized_allowed_tags
default_allowed_tags = ['strong', 'em', 'b', 'i', 'p', 'code', 'pre', 'tt', 'samp', 'kbd', 'var', 'sub', 'sup', 'dfn', 'cite', 'big', 'small', 'address', 'hr', 'br', 'div', 'span', 'h1', 'h2', 'h3', 'h4', 'h5', 'h6', 'ul', 'ol', 'li', 'dl', 'dt', 'dd', 'abbr', 'acronym', 'a', 'img', 'blockquote', 'del', 'ins']
config.action_view.sanitized_allowed_tags = default_allowed_tags + ['u']
# Since Rails 5.0, this option is enabled by default.
# However we keep it disabled for now, because many of our specs and fatories
# do not build the required associations properly.
# TODO: fix the specs, and enable this option.
config.active_record.belongs_to_required_by_default = false
# Some mobile browsers have a behaviour where, although they will delete the session
# cookie when the browser shutdowns, they will still serve a cached version
# of the page on relaunch.
# The CSRF token in the HTML is then mismatched with the CSRF token in the session cookie
# (because the session cookie has been cleared). This causes form submissions to fail with
# a "ActionController::InvalidAuthenticityToken" exception.
# To prevent this, tell browsers to never cache the HTML of a page.
# (This doesn’t affect assets files, which are still sent with the proper cache headers).
#
# See https://github.com/rails/rails/issues/21948
config.action_dispatch.default_headers['Cache-Control'] = 'no-store, no-cache'
config.to_prepare do
# Make main application helpers available in administrate
Administrate::ApplicationController.helper(TPS::Application.helpers)
end
config.middleware.use Rack::Attack
config.middleware.use Flipper::Middleware::Memoizer, preload_all: true
config.ds_weekly_overview = ENV['APP_NAME'] == 'tps'
config.ds_autosave = {
debounce_delay: 3000,
status_visible_duration: 6000
}
config.skylight.probes += [:graphql]
end
end
config: don't explicitely eager-load lib/
Rationale:
- `lib/` is supposed to contain code mostly independant from Rails;
- By default, Rails doesn't eager-load `lib/` anymore (this used to be
the case, but since a few releases).
If this commits triggers some errors, then these errors should be fixed
(rather that `lib/` being added again to the load path).
require File.expand_path('boot', __dir__)
require 'rails/all'
require_relative 'application_name'
# Require the gems listed in Gemfile, including any gems
# you've limited to :test, :development, or :production.
Bundler.require(*Rails.groups)
Dotenv::Railtie.load
module TPS
class Application < Rails::Application
config.load_defaults 6.0
# Settings in config/environments/* take precedence over those specified here.
# Application configuration should go into files in config/initializers
# -- all .rb files in that directory are automatically loaded.
# The default autoloader since Rails 6.0 defaults is zeitwerk.
# However, to split the work, we will move to zeitwerk only in a future PR.
config.autoloader = :classic
# Set Time.zone default to the specified zone and make Active Record auto-convert to this zone.
# Run "rake -D time" for a list of tasks for finding time zone names. Default is UTC.
config.time_zone = 'Paris'
# The default locale is :en and all translations from config/locales/*.rb,yml are auto loaded.
# config.i18n.load_path += Dir[Rails.root.join('my', 'locales', '*.{rb,yml}').to_s]
config.i18n.default_locale = :fr
config.i18n.load_path += Dir[Rails.root.join('config', 'locales', '**', '*.{rb,yml}')]
config.i18n.available_locales = [:fr]
config.assets.paths << Rails.root.join('app', 'assets', 'javascript')
config.assets.paths << Rails.root.join('app', 'assets', 'fonts')
config.assets.precompile += ['.woff']
config.active_job.queue_adapter = :delayed_job
# The default list used to be accessible through `ActionView::Base.sanitized_allowed_tags`,
# but a regression in Rails 6.0 makes it unavailable.
# It should be fixed in Rails 6.1.
# See https://github.com/rails/rails/issues/39586
# default_allowed_tags = ActionView::Base.sanitized_allowed_tags
default_allowed_tags = ['strong', 'em', 'b', 'i', 'p', 'code', 'pre', 'tt', 'samp', 'kbd', 'var', 'sub', 'sup', 'dfn', 'cite', 'big', 'small', 'address', 'hr', 'br', 'div', 'span', 'h1', 'h2', 'h3', 'h4', 'h5', 'h6', 'ul', 'ol', 'li', 'dl', 'dt', 'dd', 'abbr', 'acronym', 'a', 'img', 'blockquote', 'del', 'ins']
config.action_view.sanitized_allowed_tags = default_allowed_tags + ['u']
# Since Rails 5.0, this option is enabled by default.
# However we keep it disabled for now, because many of our specs and fatories
# do not build the required associations properly.
# TODO: fix the specs, and enable this option.
config.active_record.belongs_to_required_by_default = false
# Some mobile browsers have a behaviour where, although they will delete the session
# cookie when the browser shutdowns, they will still serve a cached version
# of the page on relaunch.
# The CSRF token in the HTML is then mismatched with the CSRF token in the session cookie
# (because the session cookie has been cleared). This causes form submissions to fail with
# a "ActionController::InvalidAuthenticityToken" exception.
# To prevent this, tell browsers to never cache the HTML of a page.
# (This doesn’t affect assets files, which are still sent with the proper cache headers).
#
# See https://github.com/rails/rails/issues/21948
config.action_dispatch.default_headers['Cache-Control'] = 'no-store, no-cache'
config.to_prepare do
# Make main application helpers available in administrate
Administrate::ApplicationController.helper(TPS::Application.helpers)
end
config.middleware.use Rack::Attack
config.middleware.use Flipper::Middleware::Memoizer, preload_all: true
config.ds_weekly_overview = ENV['APP_NAME'] == 'tps'
config.ds_autosave = {
debounce_delay: 3000,
status_visible_duration: 6000
}
config.skylight.probes += [:graphql]
end
end
|
require 'rest-client'
require 'faye/websocket'
require 'eventmachine'
require 'discordrb/endpoints/endpoints'
require 'discordrb/events/message'
require 'discordrb/events/typing'
require 'discordrb/events/lifetime'
require 'discordrb/events/presence'
require 'discordrb/events/voice-state-update'
require 'discordrb/events/channel-create'
require 'discordrb/events/channel-update'
require 'discordrb/events/channel-delete'
require 'discordrb/events/guild-member-update'
require 'discordrb/events/guild-role-create'
require 'discordrb/events/guild-role-delete'
require 'discordrb/events/guild-role-update'
require 'discordrb/exceptions'
require 'discordrb/data'
module Discordrb
class Bot
include Discordrb::Events
def initialize(email, password, debug = false)
# Make sure people replace the login details in the example files...
if email.end_with? "example.com"
puts "You have to replace the login details in the example files with your own!"
exit
end
@debug = debug
@email = email
@password = password
@token = login
@event_handlers = {}
@channels = {}
@users = {}
end
def run
# Handle heartbeats
@heartbeat_interval = 1
@heartbeat_active = false
@heartbeat_thread = Thread.new do
while true do
sleep @heartbeat_interval
send_heartbeat if @heartbeat_active
end
end
while true do
websocket_connect
debug("Disconnected! Attempting to reconnect in 5 seconds.")
sleep 5
@token = login
end
end
def channel(id)
debug("Obtaining data for channel with id #{id}")
return @channels[id] if @channels[id]
response = RestClient.get Discordrb::Endpoints::CHANNELS + "/#{id}", {:Authorization => @token}
channel = Channel.new(JSON.parse(response), self)
@channels[id] = channel
end
def private_channel(id)
debug("Creating private channel with user id #{id}")
return @private_channels[id] if @private_channels[id]
data = {
'recipient_id' => id
}
response = RestClient.post Discordrb::Endpoints::USERS + "/#{@bot_user.id}/channels", data.to_json, {:Authorization => @token, :content_type => :json}
channel = Channel.new(JSON.parse(response), self)
@private_channels[id] = channel
end
def user(id)
@users[id]
end
def server(id)
@servers[id]
end
def send_message(channel_id, content)
debug("Sending message to #{channel_id} with content '#{content}'")
data = {
'content' => content.to_s,
'mentions' => []
}
RestClient.post Discordrb::Endpoints::CHANNELS + "/#{channel_id}/messages", data.to_json, {:Authorization => @token, :content_type => :json}
end
def debug=(debug)
@debug = debug
end
def message(attributes = {}, &block)
register_event(MessageEvent, attributes, block)
end
def ready(attributes = {}, &block)
register_event(ReadyEvent, attributes, block)
end
def disconnected(attributes = {}, &block)
register_event(DisconnectEvent, attributes, block)
end
def typing(attributes = {}, &block)
register_event(TypingEvent, attributes, block)
end
def presence(attributes = {}, &block)
register_event(PresenceEvent, attributes, block)
end
def mention(attributes = {}, &block)
register_event(MentionEvent, attributes, block)
end
# Handle channel creation
# Attributes:
# * type: Channel type ('text' or 'voice')
# * name: Channel name
def channel_create(attributes = {}, &block)
register_event(ChannelCreateEvent, attributes, block)
end
# Handle channel update
# Attributes:
# * type: Channel type ('text' or 'voice')
# * name: Channel name
def channel_update(attributes = {}, &block)
register_event(ChannelUpdateEvent, attributes, block)
end
# Handle channel deletion
# Attributes:
# * type: Channel type ('text' or 'voice')
# * name: Channel name
def channel_delete(attributes = {}, &block)
register_event(ChannelDeleteEvent, attributes, block)
end
# Handle a change to a voice state.
# This includes joining a voice channel or changing mute or deaf state.
# Attributes:
# * from: User whose voice state changed
# * mute: server mute status
# * deaf: server deaf status
# * self_mute: self mute status
# * self_deaf: self deaf status
# * channel: channel the user joined
def voice_state_update(attributes = {}, &block)
register_event(VoiceStateUpdateEvent, attributes, block)
end
def remove_handler(handler)
clazz = event_class(handler.class)
@event_handlers[clazz].delete(handler)
end
def add_handler(handler)
clazz = event_class(handler.class)
@event_handlers[clazz] << handler
end
def debug(message)
puts "[DEBUG @ #{Time.now.to_s}] #{message}" if @debug
end
alias_method :<<, :add_handler
private
# Internal handler for PRESENCE_UPDATE
def update_presence(data)
user_id = data['user']['id'].to_i
server_id = data['guild_id'].to_i
server = @servers[server_id]
return if !server
user = @users[user_id]
if !user
user = User.new(data['user'], self)
@users[user_id] = user
end
status = data['status'].to_sym
if status != :offline
if !(server.members.find {|u| u.id == user.id })
server.members << user
end
end
user.status = status
user.game_id = data['game_id']
end
# Internal handler for VOICE_STATUS_UPDATE
def update_voice_state(data)
user_id = data['user_id'].to_i
server_id = data['guild_id'].to_i
server = @servers[server_id]
return if !server
user = @users[user_id]
user.server_mute = data['mute']
user.server_deaf = data['deaf']
user.self_mute = data['self_mute']
user.self_deaf = data['self_deaf']
channel_id = data['channel_id']
channel = nil
if channel_id
channel = @channels[channel_id.to_i]
end
user.move(channel)
end
# Internal handler for CHANNEL_CREATE
def create_channel(data)
channel = Channel.new(data, self)
server = channel.server
server.channels << channel
@channels[channel.id] = channel
end
# Internal handler for CHANNEL_UPDATE
def update_channel(data)
channel = Channel.new(data, self)
server = channel.server
old_channel = @channels[channel.id]
return if !old_channel
old_channel.update_from(channel)
end
# Internal handler for CHANNEL_DELETE
def delete_channel(data)
channel = Channel.new(data, self)
server = channel.server
@channels[channel.id] = nil
server.channels.reject! {|c| c.id == channel.id}
end
# Internal handler for GUILD_MEMBER_UPDATE
def update_guild_member(data)
user_data = data['user']
server_id = data['guild_id'].to_i
server = @servers[server_id]
roles = []
data['roles'].each do |element|
role_id = element.to_i
roles << server.roles.find {|r| r.id == role_id}
end
user_id = user_data['id'].to_i
user = @users[user_id]
user.update_roles(server, roles)
end
# Internal handler for GUILD_ROLE_UPDATE
def update_guild_role(data)
role_data = data['role']
server_id = data['guild_id'].to_i
server = @servers[server_id]
new_role = Role.new(role_data, self, server)
role_id = role_data['id'].to_i
old_role = server.roles.find {|r| r.id == role_id}
old_role.update_from(new_role)
end
# Internal handler for GUILD_ROLE_CREATE
def create_guild_role(data)
role_data = data['role']
server_id = data['guild_id'].to_i
server = @servers[server_id]
new_role = Role.new(role_data, self, server)
server.add_role(new_role)
end
# Internal handler for GUILD_ROLE_DELETE
def delete_guild_role(data)
role_data = data['role']
role_id = role_data['id'].to_i
server_id = data['guild_id'].to_i
server = @servers[server_id]
server.delete_role(role_id)
end
# Internal handler for MESSAGE_CREATE
def create_message(data); end
# Internal handler for TYPING_START
def start_typing(data); end
def login
debug("Logging in")
login_attempts = login_attempts || 0
# Login
login_response = RestClient.post Discordrb::Endpoints::LOGIN, :email => @email, :password => @password
raise HTTPStatusException.new(login_response.code) if login_response.code >= 400
# Parse response
login_response_object = JSON.parse(login_response)
raise InvalidAuthenticationException unless login_response_object['token']
debug("Received token: #{login_response_object['token']}")
login_response_object['token']
rescue Exception => e
response_code = login_response.nil? ? 0 : login_response.code ######## mackmm145
if login_attempts < 100 && (e.inspect.include?("No such host is known.") || response_code == 523)
debug("Login failed! Reattempting in 5 seconds. #{100 - login_attempts} attempts remaining.")
debug("Error was: #{e.inspect}")
sleep 5
login_attempts += 1
retry
else
debug("Login failed permanently after #{login_attempts + 1} attempts")
# Apparently we get a 400 if the password or username is incorrect. In that case, tell the user
debug("Are you sure you're using the correct username and password?") if e.class == RestClient::BadRequest
raise $!
end
end
def get_gateway
# Get updated websocket_hub
response = RestClient.get Discordrb::Endpoints::GATEWAY, :authorization => @token
JSON.parse(response)["url"]
end
def websocket_connect
debug("Attempting to get gateway URL...")
websocket_hub = get_gateway
debug("Success! Gateway URL is #{websocket_hub}.")
debug("Now running bot")
EM.run {
@ws = Faye::WebSocket::Client.new(websocket_hub)
@ws.on :open do |event|; websocket_open(event); end
@ws.on :message do |event|; websocket_message(event); end
@ws.on :error do |event|; debug(event.message); end
@ws.on :close do |event|; websocket_close(event); @ws = nil; end
}
end
def websocket_message(event)
begin
debug("Received packet #{event.data}")
# Parse packet
packet = JSON.parse(event.data)
raise "Invalid Packet" unless packet['op'] == 0 # TODO
data = packet['d']
case packet['t']
when "READY"
# Activate the heartbeats
@heartbeat_interval = data['heartbeat_interval'].to_f / 1000.0
@heartbeat_active = true
debug("Desired heartbeat_interval: #{@heartbeat_interval}")
bot_user_id = data['user']['id'].to_i
# Initialize servers
@servers = {}
data['guilds'].each do |element|
server = Server.new(element, self)
@servers[server.id] = server
# Initialize users
server.members.each do |element|
unless @users[element.id]
@users[element.id] = element
else
# If the user is already cached, just add the new roles
@users[element.id].merge_roles(server, element.roles[server.id])
end
end
# Save the bot user
@bot_user = @users[bot_user_id]
end
# Add private channels
@private_channels = {}
data['private_channels'].each do |element|
channel = Channel.new(element, self)
@channels[channel.id] = channel
@private_channels[channel.recipient.id] = channel
end
# Make sure to raise the event
raise_event(ReadyEvent.new)
when "MESSAGE_CREATE"
create_message(data)
message = Message.new(data, self)
event = MessageEvent.new(message, self)
raise_event(event)
if message.mentions.any? { |user| user.id == @bot_user.id }
event = MentionEvent.new(message, self)
raise_event(event)
end
when "TYPING_START"
start_typing(data)
event = TypingEvent.new(data, self)
raise_event(event)
when "PRESENCE_UPDATE"
update_presence(data)
event = PresenceEvent.new(data, self)
raise_event(event)
when "VOICE_STATE_UPDATE"
update_voice_state(data)
event = VoiceStateUpdateEvent.new(data, self)
raise_event(event)
when "CHANNEL_CREATE"
create_channel(data)
event = ChannelCreateEvent.new(data, self)
raise_event(event)
when "CHANNEL_UPDATE"
update_channel(data)
event = ChannelUpdateEvent.new(data, self)
raise_event(event)
when "CHANNEL_DELETE"
delete_channel(data)
event = ChannelDeleteEvent.new(data, self)
raise_event(event)
when "GUILD_MEMBER_UPDATE"
update_guild_member(data)
event = GuildMemberUpdateEvent.new(data, self)
raise_event(event)
when "GUILD_ROLE_UPDATE"
update_guild_role(data)
event = GuildRoleUpdateEvent.new(data, self)
raise_event(event)
when "GUILD_ROLE_CREATE"
create_guild_role(data)
event = GuildRoleCreateEvent.new(data, self)
raise_event(event)
when "GUILD_ROLE_DELETE"
delete_guild_role(data)
event = GuildRoleDeleteEvent.new(data, self)
raise_event(event)
end
rescue Exception => e
debug("Exception: #{e.inspect}")
e.backtrace.each {|line| debug(line) }
end
end
def websocket_close(event)
debug("Disconnected from WebSocket!")
debug(" (Reason: #{event.reason})")
debug(" (Code: #{event.code})")
raise_event(DisconnectEvent.new)
EM.stop
end
def websocket_open(event)
# Send the initial packet
packet = {
"op" => 2, # Packet identifier
"d" => { # Packet data
"v" => 2, # Another identifier
"token" => @token,
"properties" => { # I'm unsure what these values are for exactly, but they don't appear to impact bot functionality in any way.
"$os" => "#{RUBY_PLATFORM}",
"$browser" => "discordrb",
"$device" => "discordrb",
"$referrer" => "",
"$referring_domain" => ""
}
}
}
@ws.send(packet.to_json)
end
def raise_event(event)
debug("Raised a #{event.class}")
handlers = @event_handlers[event.class]
(handlers || []).each do |handler|
handler.match(event)
end
end
def register_event(clazz, attributes, block)
handler = handler_class(clazz).new(attributes, block)
@event_handlers[clazz] ||= []
@event_handlers[clazz] << handler
# Return the handler so it can be removed later
handler
end
def send_heartbeat
millis = Time.now.strftime("%s%L").to_i
debug("Sending heartbeat at #{millis}")
data = {
'op' => 1,
'd' => millis
}
@ws.send(data.to_json)
end
def class_from_string(str)
str.split('::').inject(Object) do |mod, class_name|
mod.const_get(class_name)
end
end
def event_class(handler_class)
class_name = handler_class.to_s
return nil unless class_name.end_with? "Handler"
class_from_string(class_name[0..-8])
end
def handler_class(event_class)
class_from_string(event_class.to_s + "Handler")
end
end
end
Replace the requests in Bot with calls to API
require 'rest-client'
require 'faye/websocket'
require 'eventmachine'
require 'discordrb/endpoints/endpoints'
require 'discordrb/events/message'
require 'discordrb/events/typing'
require 'discordrb/events/lifetime'
require 'discordrb/events/presence'
require 'discordrb/events/voice-state-update'
require 'discordrb/events/channel-create'
require 'discordrb/events/channel-update'
require 'discordrb/events/channel-delete'
require 'discordrb/events/guild-member-update'
require 'discordrb/events/guild-role-create'
require 'discordrb/events/guild-role-delete'
require 'discordrb/events/guild-role-update'
require 'discordrb/api'
require 'discordrb/exceptions'
require 'discordrb/data'
module Discordrb
class Bot
include Discordrb::Events
def initialize(email, password, debug = false)
# Make sure people replace the login details in the example files...
if email.end_with? "example.com"
puts "You have to replace the login details in the example files with your own!"
exit
end
@debug = debug
@email = email
@password = password
@token = login
@event_handlers = {}
@channels = {}
@users = {}
end
def run
# Handle heartbeats
@heartbeat_interval = 1
@heartbeat_active = false
@heartbeat_thread = Thread.new do
while true do
sleep @heartbeat_interval
send_heartbeat if @heartbeat_active
end
end
while true do
websocket_connect
debug("Disconnected! Attempting to reconnect in 5 seconds.")
sleep 5
@token = login
end
end
def channel(id)
debug("Obtaining data for channel with id #{id}")
return @channels[id] if @channels[id]
response = API.channel(@token, id)
channel = Channel.new(JSON.parse(response), self)
@channels[id] = channel
end
def private_channel(id)
debug("Creating private channel with user id #{id}")
return @private_channels[id] if @private_channels[id]
data = {
'recipient_id' => id
}
response = API.create_private(@token, @bot_user.id, id)
channel = Channel.new(JSON.parse(response), self)
@private_channels[id] = channel
end
def user(id)
@users[id]
end
def server(id)
@servers[id]
end
def send_message(channel_id, content)
debug("Sending message to #{channel_id} with content '#{content}'")
data = {
'content' => content.to_s,
'mentions' => []
}
API.send_message(@token, channel_id, content)
end
def debug=(debug)
@debug = debug
end
def message(attributes = {}, &block)
register_event(MessageEvent, attributes, block)
end
def ready(attributes = {}, &block)
register_event(ReadyEvent, attributes, block)
end
def disconnected(attributes = {}, &block)
register_event(DisconnectEvent, attributes, block)
end
def typing(attributes = {}, &block)
register_event(TypingEvent, attributes, block)
end
def presence(attributes = {}, &block)
register_event(PresenceEvent, attributes, block)
end
def mention(attributes = {}, &block)
register_event(MentionEvent, attributes, block)
end
# Handle channel creation
# Attributes:
# * type: Channel type ('text' or 'voice')
# * name: Channel name
def channel_create(attributes = {}, &block)
register_event(ChannelCreateEvent, attributes, block)
end
# Handle channel update
# Attributes:
# * type: Channel type ('text' or 'voice')
# * name: Channel name
def channel_update(attributes = {}, &block)
register_event(ChannelUpdateEvent, attributes, block)
end
# Handle channel deletion
# Attributes:
# * type: Channel type ('text' or 'voice')
# * name: Channel name
def channel_delete(attributes = {}, &block)
register_event(ChannelDeleteEvent, attributes, block)
end
# Handle a change to a voice state.
# This includes joining a voice channel or changing mute or deaf state.
# Attributes:
# * from: User whose voice state changed
# * mute: server mute status
# * deaf: server deaf status
# * self_mute: self mute status
# * self_deaf: self deaf status
# * channel: channel the user joined
def voice_state_update(attributes = {}, &block)
register_event(VoiceStateUpdateEvent, attributes, block)
end
def remove_handler(handler)
clazz = event_class(handler.class)
@event_handlers[clazz].delete(handler)
end
def add_handler(handler)
clazz = event_class(handler.class)
@event_handlers[clazz] << handler
end
def debug(message)
puts "[DEBUG @ #{Time.now.to_s}] #{message}" if @debug
end
alias_method :<<, :add_handler
private
# Internal handler for PRESENCE_UPDATE
def update_presence(data)
user_id = data['user']['id'].to_i
server_id = data['guild_id'].to_i
server = @servers[server_id]
return if !server
user = @users[user_id]
if !user
user = User.new(data['user'], self)
@users[user_id] = user
end
status = data['status'].to_sym
if status != :offline
if !(server.members.find {|u| u.id == user.id })
server.members << user
end
end
user.status = status
user.game_id = data['game_id']
end
# Internal handler for VOICE_STATUS_UPDATE
def update_voice_state(data)
user_id = data['user_id'].to_i
server_id = data['guild_id'].to_i
server = @servers[server_id]
return if !server
user = @users[user_id]
user.server_mute = data['mute']
user.server_deaf = data['deaf']
user.self_mute = data['self_mute']
user.self_deaf = data['self_deaf']
channel_id = data['channel_id']
channel = nil
if channel_id
channel = @channels[channel_id.to_i]
end
user.move(channel)
end
# Internal handler for CHANNEL_CREATE
def create_channel(data)
channel = Channel.new(data, self)
server = channel.server
server.channels << channel
@channels[channel.id] = channel
end
# Internal handler for CHANNEL_UPDATE
def update_channel(data)
channel = Channel.new(data, self)
server = channel.server
old_channel = @channels[channel.id]
return if !old_channel
old_channel.update_from(channel)
end
# Internal handler for CHANNEL_DELETE
def delete_channel(data)
channel = Channel.new(data, self)
server = channel.server
@channels[channel.id] = nil
server.channels.reject! {|c| c.id == channel.id}
end
# Internal handler for GUILD_MEMBER_UPDATE
def update_guild_member(data)
user_data = data['user']
server_id = data['guild_id'].to_i
server = @servers[server_id]
roles = []
data['roles'].each do |element|
role_id = element.to_i
roles << server.roles.find {|r| r.id == role_id}
end
user_id = user_data['id'].to_i
user = @users[user_id]
user.update_roles(server, roles)
end
# Internal handler for GUILD_ROLE_UPDATE
def update_guild_role(data)
role_data = data['role']
server_id = data['guild_id'].to_i
server = @servers[server_id]
new_role = Role.new(role_data, self, server)
role_id = role_data['id'].to_i
old_role = server.roles.find {|r| r.id == role_id}
old_role.update_from(new_role)
end
# Internal handler for GUILD_ROLE_CREATE
def create_guild_role(data)
role_data = data['role']
server_id = data['guild_id'].to_i
server = @servers[server_id]
new_role = Role.new(role_data, self, server)
server.add_role(new_role)
end
# Internal handler for GUILD_ROLE_DELETE
def delete_guild_role(data)
role_data = data['role']
role_id = role_data['id'].to_i
server_id = data['guild_id'].to_i
server = @servers[server_id]
server.delete_role(role_id)
end
# Internal handler for MESSAGE_CREATE
def create_message(data); end
# Internal handler for TYPING_START
def start_typing(data); end
def login
debug("Logging in")
login_attempts = login_attempts || 0
# Login
login_response = API.login(@email, @password)
raise HTTPStatusException.new(login_response.code) if login_response.code >= 400
# Parse response
login_response_object = JSON.parse(login_response)
raise InvalidAuthenticationException unless login_response_object['token']
debug("Received token: #{login_response_object['token']}")
login_response_object['token']
rescue Exception => e
response_code = login_response.nil? ? 0 : login_response.code ######## mackmm145
if login_attempts < 100 && (e.inspect.include?("No such host is known.") || response_code == 523)
debug("Login failed! Reattempting in 5 seconds. #{100 - login_attempts} attempts remaining.")
debug("Error was: #{e.inspect}")
sleep 5
login_attempts += 1
retry
else
debug("Login failed permanently after #{login_attempts + 1} attempts")
# Apparently we get a 400 if the password or username is incorrect. In that case, tell the user
debug("Are you sure you're using the correct username and password?") if e.class == RestClient::BadRequest
raise $!
end
end
def get_gateway
# Get updated websocket_hub
response = API.gateway(@token)
JSON.parse(response)["url"]
end
def websocket_connect
debug("Attempting to get gateway URL...")
websocket_hub = get_gateway
debug("Success! Gateway URL is #{websocket_hub}.")
debug("Now running bot")
EM.run {
@ws = Faye::WebSocket::Client.new(websocket_hub)
@ws.on :open do |event|; websocket_open(event); end
@ws.on :message do |event|; websocket_message(event); end
@ws.on :error do |event|; debug(event.message); end
@ws.on :close do |event|; websocket_close(event); @ws = nil; end
}
end
def websocket_message(event)
begin
debug("Received packet #{event.data}")
# Parse packet
packet = JSON.parse(event.data)
raise "Invalid Packet" unless packet['op'] == 0 # TODO
data = packet['d']
case packet['t']
when "READY"
# Activate the heartbeats
@heartbeat_interval = data['heartbeat_interval'].to_f / 1000.0
@heartbeat_active = true
debug("Desired heartbeat_interval: #{@heartbeat_interval}")
bot_user_id = data['user']['id'].to_i
# Initialize servers
@servers = {}
data['guilds'].each do |element|
server = Server.new(element, self)
@servers[server.id] = server
# Initialize users
server.members.each do |element|
unless @users[element.id]
@users[element.id] = element
else
# If the user is already cached, just add the new roles
@users[element.id].merge_roles(server, element.roles[server.id])
end
end
# Save the bot user
@bot_user = @users[bot_user_id]
end
# Add private channels
@private_channels = {}
data['private_channels'].each do |element|
channel = Channel.new(element, self)
@channels[channel.id] = channel
@private_channels[channel.recipient.id] = channel
end
# Make sure to raise the event
raise_event(ReadyEvent.new)
when "MESSAGE_CREATE"
create_message(data)
message = Message.new(data, self)
event = MessageEvent.new(message, self)
raise_event(event)
if message.mentions.any? { |user| user.id == @bot_user.id }
event = MentionEvent.new(message, self)
raise_event(event)
end
when "TYPING_START"
start_typing(data)
event = TypingEvent.new(data, self)
raise_event(event)
when "PRESENCE_UPDATE"
update_presence(data)
event = PresenceEvent.new(data, self)
raise_event(event)
when "VOICE_STATE_UPDATE"
update_voice_state(data)
event = VoiceStateUpdateEvent.new(data, self)
raise_event(event)
when "CHANNEL_CREATE"
create_channel(data)
event = ChannelCreateEvent.new(data, self)
raise_event(event)
when "CHANNEL_UPDATE"
update_channel(data)
event = ChannelUpdateEvent.new(data, self)
raise_event(event)
when "CHANNEL_DELETE"
delete_channel(data)
event = ChannelDeleteEvent.new(data, self)
raise_event(event)
when "GUILD_MEMBER_UPDATE"
update_guild_member(data)
event = GuildMemberUpdateEvent.new(data, self)
raise_event(event)
when "GUILD_ROLE_UPDATE"
update_guild_role(data)
event = GuildRoleUpdateEvent.new(data, self)
raise_event(event)
when "GUILD_ROLE_CREATE"
create_guild_role(data)
event = GuildRoleCreateEvent.new(data, self)
raise_event(event)
when "GUILD_ROLE_DELETE"
delete_guild_role(data)
event = GuildRoleDeleteEvent.new(data, self)
raise_event(event)
end
rescue Exception => e
debug("Exception: #{e.inspect}")
e.backtrace.each {|line| debug(line) }
end
end
def websocket_close(event)
debug("Disconnected from WebSocket!")
debug(" (Reason: #{event.reason})")
debug(" (Code: #{event.code})")
raise_event(DisconnectEvent.new)
EM.stop
end
def websocket_open(event)
# Send the initial packet
packet = {
"op" => 2, # Packet identifier
"d" => { # Packet data
"v" => 2, # Another identifier
"token" => @token,
"properties" => { # I'm unsure what these values are for exactly, but they don't appear to impact bot functionality in any way.
"$os" => "#{RUBY_PLATFORM}",
"$browser" => "discordrb",
"$device" => "discordrb",
"$referrer" => "",
"$referring_domain" => ""
}
}
}
@ws.send(packet.to_json)
end
def raise_event(event)
debug("Raised a #{event.class}")
handlers = @event_handlers[event.class]
(handlers || []).each do |handler|
handler.match(event)
end
end
def register_event(clazz, attributes, block)
handler = handler_class(clazz).new(attributes, block)
@event_handlers[clazz] ||= []
@event_handlers[clazz] << handler
# Return the handler so it can be removed later
handler
end
def send_heartbeat
millis = Time.now.strftime("%s%L").to_i
debug("Sending heartbeat at #{millis}")
data = {
'op' => 1,
'd' => millis
}
@ws.send(data.to_json)
end
def class_from_string(str)
str.split('::').inject(Object) do |mod, class_name|
mod.const_get(class_name)
end
end
def event_class(handler_class)
class_name = handler_class.to_s
return nil unless class_name.end_with? "Handler"
class_from_string(class_name[0..-8])
end
def handler_class(event_class)
class_from_string(event_class.to_s + "Handler")
end
end
end
|
class Hisat < Formula
homepage "http://ccb.jhu.edu/software/hisat/"
# tag "bioinformatics"
# doi "10.1038/nmeth.3317"
url "http://ccb.jhu.edu/software/hisat/downloads/hisat-0.1.5-beta-source.zip"
sha256 "0a58d820297fae2f90a783bdb714621a6051fe2bde0a60d518cb3672eeda2210"
version "0.1.5b"
bottle do
root_url "https://homebrew.bintray.com/bottles-science"
cellar :any
sha256 "d9540bb34062a9037d402352f99a61f6cda54732460d3eab39f439a4ea449fdf" => :yosemite
sha256 "30639176fdd893df3b6d64761a09de3a44ada9cb038771296651e2277dabf6f8" => :mavericks
sha256 "12d5c4dc85b63920c6e0e6b7808336fd7818111e88e940552ec663557e0d4b60" => :mountain_lion
end
def install
system "make"
bin.install "hisat", Dir["hisat-*"]
doc.install Dir["doc/*"]
end
test do
assert_match "HISAT", shell_output("hisat 2>&1", 1)
end
end
hisat 0.1.6b
Closes #2475.
Signed-off-by: Torsten Seemann <feac082ffa4709ec52de15779c15d209ea844842@seemann.id.au>
class Hisat < Formula
homepage "http://ccb.jhu.edu/software/hisat/"
# tag "bioinformatics"
# doi "10.1038/nmeth.3317"
url "http://ccb.jhu.edu/software/hisat/downloads/hisat-0.1.6-beta-source.zip"
sha256 "69fbd79d8f29b221aa72f0db33148d67d44a3e2cfe16dadf0663a58b7741ff9c"
version "0.1.6b"
bottle do
root_url "https://homebrew.bintray.com/bottles-science"
cellar :any
sha256 "d9540bb34062a9037d402352f99a61f6cda54732460d3eab39f439a4ea449fdf" => :yosemite
sha256 "30639176fdd893df3b6d64761a09de3a44ada9cb038771296651e2277dabf6f8" => :mavericks
sha256 "12d5c4dc85b63920c6e0e6b7808336fd7818111e88e940552ec663557e0d4b60" => :mountain_lion
end
def install
system "make"
bin.install "hisat", Dir["hisat-*"]
doc.install Dir["doc/*"]
end
test do
assert_match "HISAT", shell_output("hisat 2>&1", 1)
end
end
|
require File.expand_path('../boot', __FILE__)
require 'rails/all'
require_relative '../lib/same_site_security/middleware'
# Require the gems listed in Gemfile, including any gems
# you've limited to :test, :development, or :production.
Bundler.require(*Rails.groups)
module Signon
def self.mysql?
ENV.fetch("SIGNONOTRON2_DB_ADAPTER", "mysql") == "mysql"
end
class Application < Rails::Application
# Settings in config/environments/* take precedence over those specified here.
# Application configuration should go into files in config/initializers
# -- all .rb files in that directory are automatically loaded.
# Set Time.zone default to the specified zone and make Active Record auto-convert to this zone.
# Run "rake -D time" for a list of tasks for finding time zone names. Default is UTC.
config.time_zone = 'London'
# The default locale is :en and all translations from config/locales/*.rb,yml are auto loaded.
# config.i18n.load_path += Dir[Rails.root.join('my', 'locales', '*.{rb,yml}').to_s]
# config.i18n.default_locale = :de
I18n.config.enforce_available_locales = true
# Configure the default encoding used in templates for Ruby 1.9.
config.encoding = "utf-8"
# Configure sensitive parameters which will be filtered from the log file.
# Note: filter_parameters are treated as regexes, so :password also matches
# current_password, password_confirmation and password-strength-score
config.filter_parameters += [:password]
# Enable the asset pipeline
config.assets.enabled = true
config.assets.version = '1.0'
#config.middleware.insert_before Warden::Manager, Slimmer::App, config.slimmer.to_hash
# Prevent ActionDispatch::RemoteIp::IpSpoofAttackError when the client set a Client-IP
# header and the request IP was interrogated.
#
# In our infrastructure, the protection this would give is provided by nginx, so
# disabling it solves the above problem and doesn't give us additional risk.
config.action_dispatch.ip_spoofing_check = false
# Precompile additional assets (application.js, application.css, and all non-JS/CSS are already added)
config.assets.precompile += %w(password-strength-indicator.js)
config.to_prepare do
Doorkeeper::ApplicationController.layout "application"
end
config.autoload_paths << Rails.root.join('lib')
config.active_job.queue_adapter = :sidekiq
config.middleware.insert_before 0, SameSiteSecurity::Middleware
config.active_record.raise_in_transactional_callbacks = true
end
end
Remove raise_in_transactional_callbacks
It has been removed in Rails 5
It was added in 4.1/4.2 as a temporary solution to a problem not relevant in rails 5
require File.expand_path('../boot', __FILE__)
require 'rails/all'
require_relative '../lib/same_site_security/middleware'
# Require the gems listed in Gemfile, including any gems
# you've limited to :test, :development, or :production.
Bundler.require(*Rails.groups)
module Signon
def self.mysql?
ENV.fetch("SIGNONOTRON2_DB_ADAPTER", "mysql") == "mysql"
end
class Application < Rails::Application
# Settings in config/environments/* take precedence over those specified here.
# Application configuration should go into files in config/initializers
# -- all .rb files in that directory are automatically loaded.
# Set Time.zone default to the specified zone and make Active Record auto-convert to this zone.
# Run "rake -D time" for a list of tasks for finding time zone names. Default is UTC.
config.time_zone = 'London'
# The default locale is :en and all translations from config/locales/*.rb,yml are auto loaded.
# config.i18n.load_path += Dir[Rails.root.join('my', 'locales', '*.{rb,yml}').to_s]
# config.i18n.default_locale = :de
I18n.config.enforce_available_locales = true
# Configure the default encoding used in templates for Ruby 1.9.
config.encoding = "utf-8"
# Configure sensitive parameters which will be filtered from the log file.
# Note: filter_parameters are treated as regexes, so :password also matches
# current_password, password_confirmation and password-strength-score
config.filter_parameters += [:password]
# Enable the asset pipeline
config.assets.enabled = true
config.assets.version = '1.0'
#config.middleware.insert_before Warden::Manager, Slimmer::App, config.slimmer.to_hash
# Prevent ActionDispatch::RemoteIp::IpSpoofAttackError when the client set a Client-IP
# header and the request IP was interrogated.
#
# In our infrastructure, the protection this would give is provided by nginx, so
# disabling it solves the above problem and doesn't give us additional risk.
config.action_dispatch.ip_spoofing_check = false
# Precompile additional assets (application.js, application.css, and all non-JS/CSS are already added)
config.assets.precompile += %w(password-strength-indicator.js)
config.to_prepare do
Doorkeeper::ApplicationController.layout "application"
end
config.autoload_paths << Rails.root.join('lib')
config.active_job.queue_adapter = :sidekiq
config.middleware.insert_before 0, SameSiteSecurity::Middleware
end
end
|
require 'rest-client'
require 'faye/websocket'
require 'eventmachine'
require 'discordrb/endpoints/endpoints'
require 'discordrb/exceptions'
module Discordrb
class Bot
def initialize(email, password)
@email = email
@password = password
@token = login()
websocket_connect()
end
private
def login
# Login
login_response = RestClient.post Discordrb::Endpoints::LOGIN, :email => @email, :password => @password
raise HTTPStatusException.new(login_response.code) if login_response.code >= 400
# Parse response
login_response_object = JSON.parse(login_response)
raise InvalidAuthenticationException unless login_response_object[token]
login_response_object[token]
end
def websocket_connect
EM.run {
@ws = Faye::WebSocket::Client.new(Discordrb::Endpoints::WEBSOCKET_HUB)
@ws.on :open do |event|; websocket_message(event); end
@ws.on :message do |event|; websocket_message(event); end
@ws.on :close do |event|
websocket_close(event)
@ws = nil
end
}
end
def websocket_message(event)
end
def websocket_close(event)
end
def websocket_open(event)
end
end
end
Fixed me using the bare word "token" instead of a string
require 'rest-client'
require 'faye/websocket'
require 'eventmachine'
require 'discordrb/endpoints/endpoints'
require 'discordrb/exceptions'
module Discordrb
class Bot
def initialize(email, password)
@email = email
@password = password
@token = login()
websocket_connect()
end
private
def login
# Login
login_response = RestClient.post Discordrb::Endpoints::LOGIN, :email => @email, :password => @password
raise HTTPStatusException.new(login_response.code) if login_response.code >= 400
# Parse response
login_response_object = JSON.parse(login_response)
raise InvalidAuthenticationException unless login_response_object['token']
login_response_object['token']
end
def websocket_connect
EM.run {
@ws = Faye::WebSocket::Client.new(Discordrb::Endpoints::WEBSOCKET_HUB)
@ws.on :open do |event|; websocket_message(event); end
@ws.on :message do |event|; websocket_message(event); end
@ws.on :close do |event|
websocket_close(event)
@ws = nil
end
}
end
def websocket_message(event)
end
def websocket_close(event)
end
def websocket_open(event)
end
end
end
|
require File.expand_path('../boot', __FILE__)
require 'rails/all'
require 'syslog/logger'
# Require the gems listed in Gemfile, including any gems
# you've limited to :test, :development, or :production.
Bundler.require(*Rails.groups)
# load ENV variables from .env file if it exists
env_file = File.expand_path("../../.env", __FILE__)
if File.exist?(env_file)
require 'dotenv'
Dotenv.load! env_file
end
# load ENV variables from container environment if json file exists
# see https://github.com/phusion/baseimage-docker#envvar_dumps
env_json_file = "/etc/container_environment.json"
if File.exist?(env_json_file)
env_vars = JSON.parse(File.read(env_json_file))
env_vars.each { |k, v| ENV[k] = v }
end
# default values for some ENV variables
ENV['APPLICATION'] ||= "spionone"
ENV['SITENAMELONG'] ||= "DataCite API"
ENV['TRUSTED_IP'] ||= "10.0.10.1"
module Spinone
class Application < Rails::Application
# Settings in config/environments/* take precedence over those specified here.
# Application configuration should go into files in config/initializers
# -- all .rb files in that directory are automatically loaded.
# Custom directories with classes and modules you want to be autoloadable.
# config.autoload_paths += %W(#{config.root}/extras)
config.autoload_paths += Dir["#{config.root}/app/models/**/**", "#{config.root}/app/controllers/**/"]
# add assets from Ember app
config.assets.paths << "#{Rails.root}/frontend/bower_components"
# Only load the plugins named here, in the order given (default is alphabetical).
# :all can be used as a placeholder for all plugins not explicitly named.
# config.plugins = [ :exception_notification, :ssl_requirement, :all ]
# Activate observers that should always be running.
# config.active_record.observers = :cacher, :garbage_collector, :forum_observer
# Set Time.zone default to the specified zone and make Active Record auto-convert to this zone.
# Run "rake -D time" for a list of tasks for finding time zone names. Default is UTC.
# config.time_zone = 'Central Time (US & Canada)'
# The default locale is :en and all translations from config/locales/*.rb,yml are auto loaded.
# config.i18n.load_path += Dir[Rails.root.join('my', 'locales', '*.{rb,yml}').to_s]
# config.i18n.default_locale = :de
# Configure the default encoding used in templates for Ruby.
config.encoding = "utf-8"
# Configure sensitive parameters which will be filtered from the log file.
# TODO: do I need to add salt here?
config.filter_parameters += [:password, :authentication_token]
# Use a different cache store
# dalli uses ENV['MEMCACHE_SERVERS']
ENV['MEMCACHE_SERVERS'] ||= ENV['HOSTNAME']
config.cache_store = :dalli_store, nil, { :namespace => ENV['APPLICATION'], :compress => true }
# Skip validation of locale
I18n.enforce_available_locales = false
# Disable IP spoofing check
config.action_dispatch.ip_spoofing_check = false
# compress responses with deflate or gzip
config.middleware.use Rack::Deflater
# set Active Job queueing backend
config.active_job.queue_adapter = :sidekiq
# Minimum Sass number precision required by bootstrap-sass
#::Sass::Script::Value::Number.precision = [8, ::Sass::Script::Value::Number.precision].max
# parameter keys that are not explicitly permitted will raise error
config.action_controller.action_on_unpermitted_parameters = :raise
end
end
set default log level
require File.expand_path('../boot', __FILE__)
require 'rails/all'
require 'syslog/logger'
# Require the gems listed in Gemfile, including any gems
# you've limited to :test, :development, or :production.
Bundler.require(*Rails.groups)
# load ENV variables from .env file if it exists
env_file = File.expand_path("../../.env", __FILE__)
if File.exist?(env_file)
require 'dotenv'
Dotenv.load! env_file
end
# load ENV variables from container environment if json file exists
# see https://github.com/phusion/baseimage-docker#envvar_dumps
env_json_file = "/etc/container_environment.json"
if File.exist?(env_json_file)
env_vars = JSON.parse(File.read(env_json_file))
env_vars.each { |k, v| ENV[k] = v }
end
# default values for some ENV variables
ENV['APPLICATION'] ||= "spionone"
ENV['SITENAMELONG'] ||= "DataCite API"
ENV['LOG_LEVEL'] ||= "info"
ENV['TRUSTED_IP'] ||= "10.0.10.1"
module Spinone
class Application < Rails::Application
# Settings in config/environments/* take precedence over those specified here.
# Application configuration should go into files in config/initializers
# -- all .rb files in that directory are automatically loaded.
# Custom directories with classes and modules you want to be autoloadable.
# config.autoload_paths += %W(#{config.root}/extras)
config.autoload_paths += Dir["#{config.root}/app/models/**/**", "#{config.root}/app/controllers/**/"]
# add assets from Ember app
config.assets.paths << "#{Rails.root}/frontend/bower_components"
# Only load the plugins named here, in the order given (default is alphabetical).
# :all can be used as a placeholder for all plugins not explicitly named.
# config.plugins = [ :exception_notification, :ssl_requirement, :all ]
# Activate observers that should always be running.
# config.active_record.observers = :cacher, :garbage_collector, :forum_observer
# Set Time.zone default to the specified zone and make Active Record auto-convert to this zone.
# Run "rake -D time" for a list of tasks for finding time zone names. Default is UTC.
# config.time_zone = 'Central Time (US & Canada)'
# The default locale is :en and all translations from config/locales/*.rb,yml are auto loaded.
# config.i18n.load_path += Dir[Rails.root.join('my', 'locales', '*.{rb,yml}').to_s]
# config.i18n.default_locale = :de
# Configure the default encoding used in templates for Ruby.
config.encoding = "utf-8"
# Configure sensitive parameters which will be filtered from the log file.
# TODO: do I need to add salt here?
config.filter_parameters += [:password, :authentication_token]
# Use a different cache store
# dalli uses ENV['MEMCACHE_SERVERS']
ENV['MEMCACHE_SERVERS'] ||= ENV['HOSTNAME']
config.cache_store = :dalli_store, nil, { :namespace => ENV['APPLICATION'], :compress => true }
# Skip validation of locale
I18n.enforce_available_locales = false
# Disable IP spoofing check
config.action_dispatch.ip_spoofing_check = false
# compress responses with deflate or gzip
config.middleware.use Rack::Deflater
# set Active Job queueing backend
config.active_job.queue_adapter = :sidekiq
# Minimum Sass number precision required by bootstrap-sass
#::Sass::Script::Value::Number.precision = [8, ::Sass::Script::Value::Number.precision].max
# parameter keys that are not explicitly permitted will raise error
config.action_controller.action_on_unpermitted_parameters = :raise
end
end
|
require_relative 'boot'
require 'rails/all'
# require 'active_record/connection_adapters/postgis_adapter/railtie'
# require "./lib/middleware/catch_json_parse_errors.rb"
require './app/controllers/concerns/json_response_helper.rb'
# Require the gems listed in Gemfile, including any gems
# you've limited to :test, :development, or :production.
Bundler.require(*Rails.groups)
module OneclickCore
class Application < Rails::Application
# Settings in config/environments/* take precedence over those specified here.
# Application configuration should go into files in config/initializers
# -- all .rb files in that directory are automatically loaded.
config.time_zone = 'Eastern Time (US & Canada)'
config.i18n.available_locales = [:en, :es]
config.i18n.default_locale = :en
# Load different Service Types
config.autoload_paths += %W(#{config.root}/app/models/services)
# Set default CORS settings
config.middleware.insert_before 0, Rack::Cors do
allow do
origins '*' # /http:\/\/localhost:(\d*)/
resource '*',
# headers: ['Origin', 'X-Requested-With', 'Content-Type', 'Accept',
# 'Authorization', 'X-User-Token', 'X-User-Email',
# 'Access-Control-Request-Headers', 'Access-Control-Request-Method'
# ],
headers: :any, # fixes CORS errors on OPTIONS requests
methods: [:get, :post, :put, :delete, :options]
end
end
# Sends back appropriate JSON 400 response if a bad JSON request is sent.
config.middleware.insert_before Rack::Head, JsonResponseHelper::CatchJsonParseErrors
end
end
Use optional ENV to set the time zone
require_relative 'boot'
require 'rails/all'
# require 'active_record/connection_adapters/postgis_adapter/railtie'
# require "./lib/middleware/catch_json_parse_errors.rb"
require './app/controllers/concerns/json_response_helper.rb'
# Require the gems listed in Gemfile, including any gems
# you've limited to :test, :development, or :production.
Bundler.require(*Rails.groups)
module OneclickCore
class Application < Rails::Application
# Settings in config/environments/* take precedence over those specified here.
# Application configuration should go into files in config/initializers
# -- all .rb files in that directory are automatically loaded.
config.time_zone = ENV['TIME_ZONE'] || 'Eastern Time (US & Canada)'
config.i18n.available_locales = [:en, :es]
config.i18n.default_locale = :en
# Load different Service Types
config.autoload_paths += %W(#{config.root}/app/models/services)
# Set default CORS settings
config.middleware.insert_before 0, Rack::Cors do
allow do
origins '*' # /http:\/\/localhost:(\d*)/
resource '*',
# headers: ['Origin', 'X-Requested-With', 'Content-Type', 'Accept',
# 'Authorization', 'X-User-Token', 'X-User-Email',
# 'Access-Control-Request-Headers', 'Access-Control-Request-Method'
# ],
headers: :any, # fixes CORS errors on OPTIONS requests
methods: [:get, :post, :put, :delete, :options]
end
end
# Sends back appropriate JSON 400 response if a bad JSON request is sent.
config.middleware.insert_before Rack::Head, JsonResponseHelper::CatchJsonParseErrors
end
end
|
# encoding: utf-8
module Imap::Backup
module Account; end
class Account::Folder
REQUESTED_ATTRIBUTES = ['RFC822', 'FLAGS', 'INTERNALDATE']
def initialize(connection, folder)
@connection, @folder = connection, folder
end
def uids
@connection.imap.examine(@folder)
@connection.imap.uid_search(['ALL']).sort
rescue Net::IMAP::NoResponseError => e
Imap::Backup.logger.warn "Folder '#{@folder}' does not exist"
[]
end
def fetch(uid)
@connection.imap.examine(@folder)
message = @connection.imap.uid_fetch([uid.to_i], REQUESTED_ATTRIBUTES)[0][1]
message['RFC822'].force_encoding('utf-8') if RUBY_VERSION > '1.9'
message
rescue Net::IMAP::NoResponseError => e
Imap::Backup.logger.warn "Folder '#{@folder}' does not exist"
nil
end
end
end
Style refactor: use attributes not instance variables
# encoding: utf-8
module Imap::Backup
module Account; end
class Account::Folder
REQUESTED_ATTRIBUTES = ['RFC822', 'FLAGS', 'INTERNALDATE']
attr_reader :connection
attr_reader :folder
def initialize(connection, folder)
@connection, @folder = connection, folder
end
def uids
connection.imap.examine(folder)
connection.imap.uid_search(['ALL']).sort
rescue Net::IMAP::NoResponseError => e
Imap::Backup.logger.warn "Folder '#{folder}' does not exist"
[]
end
def fetch(uid)
connection.imap.examine(folder)
message = connection.imap.uid_fetch([uid.to_i], REQUESTED_ATTRIBUTES)[0][1]
message['RFC822'].force_encoding('utf-8') if RUBY_VERSION > '1.9'
message
rescue Net::IMAP::NoResponseError => e
Imap::Backup.logger.warn "Folder '#{folder}' does not exist"
nil
end
end
end
|
require File.expand_path('../boot', __FILE__)
require 'rails/all'
if defined?(Bundler)
# If you precompile assets before deploying to production, use this line
Bundler.require(*Rails.groups(:assets => %w(development test)))
# If you want your assets lazily compiled in production, use this line
Bundler.require(:default, :assets, Rails.env)
end
module Whbab
class Application < Rails::Application
# Settings in config/environments/* take precedence over those specified here.
# Application configuration should go into files in config/initializers
# -- all .rb files in that directory are automatically loaded.
# Custom directories with classes and modules you want to be autoloadable.
# config.autoload_paths += %W(#{config.root}/extras)
# Only load the plugins named here, in the order given (default is alphabetical).
# :all can be used as a placeholder for all plugins not explicitly named.
# config.plugins = [ :exception_notification, :ssl_requirement, :all ]
# Activate observers that should always be running.
# config.active_record.observers = :cacher, :garbage_collector, :forum_observer
# Set Time.zone default to the specified zone and make Active Record auto-convert to this zone.
# Run "rake -D time" for a list of tasks for finding time zone names. Default is UTC.
config.time_zone = 'Paris'
# The default locale is :en and all translations from config/locales/*.rb,yml are auto loaded.
# config.i18n.load_path += Dir[Rails.root.join('my', 'locales', '*.{rb,yml}').to_s]
config.i18n.default_locale = :fr
# Configure the default encoding used in templates for Ruby 1.9.
config.encoding = 'utf-8'
# Configure sensitive parameters which will be filtered from the log file.
config.filter_parameters += [:password]
# Use SQL instead of Active Record's schema dumper when creating the database.
# This is necessary if your schema can't be completely dumped by the schema dumper,
# like if you have constraints or database-specific column types
# config.active_record.schema_format = :sql
# Enforce whitelist mode for mass assignment.
# This will create an empty whitelist of attributes available for mass-assignment for all models
# in your app. As such, your models will need to explicitly whitelist or blacklist accessible
# parameters by using an attr_accessible or attr_protected declaration.
# config.active_record.whitelist_attributes = true
# Enable the asset pipeline
config.assets.enabled = true
# Version of your assets, change this if you want to expire all your assets
config.assets.version = '1.0'
end
end
Remove lazy compile cause assets server
require File.expand_path('../boot', __FILE__)
require 'rails/all'
if defined?(Bundler)
# If you precompile assets before deploying to production, use this line
Bundler.require(*Rails.groups(:assets => %w(development test)))
# If you want your assets lazily compiled in production, use this line
# Bundler.require(:default, :assets, Rails.env)
end
module Whbab
class Application < Rails::Application
# Settings in config/environments/* take precedence over those specified here.
# Application configuration should go into files in config/initializers
# -- all .rb files in that directory are automatically loaded.
# Custom directories with classes and modules you want to be autoloadable.
# config.autoload_paths += %W(#{config.root}/extras)
# Only load the plugins named here, in the order given (default is alphabetical).
# :all can be used as a placeholder for all plugins not explicitly named.
# config.plugins = [ :exception_notification, :ssl_requirement, :all ]
# Activate observers that should always be running.
# config.active_record.observers = :cacher, :garbage_collector, :forum_observer
# Set Time.zone default to the specified zone and make Active Record auto-convert to this zone.
# Run "rake -D time" for a list of tasks for finding time zone names. Default is UTC.
config.time_zone = 'Paris'
# The default locale is :en and all translations from config/locales/*.rb,yml are auto loaded.
# config.i18n.load_path += Dir[Rails.root.join('my', 'locales', '*.{rb,yml}').to_s]
config.i18n.default_locale = :fr
# Configure the default encoding used in templates for Ruby 1.9.
config.encoding = 'utf-8'
# Configure sensitive parameters which will be filtered from the log file.
config.filter_parameters += [:password]
# Use SQL instead of Active Record's schema dumper when creating the database.
# This is necessary if your schema can't be completely dumped by the schema dumper,
# like if you have constraints or database-specific column types
# config.active_record.schema_format = :sql
# Enforce whitelist mode for mass assignment.
# This will create an empty whitelist of attributes available for mass-assignment for all models
# in your app. As such, your models will need to explicitly whitelist or blacklist accessible
# parameters by using an attr_accessible or attr_protected declaration.
# config.active_record.whitelist_attributes = true
# Enable the asset pipeline
config.assets.enabled = true
# Version of your assets, change this if you want to expire all your assets
config.assets.version = '1.0'
end
end
|
require File.expand_path('../boot', __FILE__)
require 'rails/all'
# Require core functionalities
require File.expand_path('../../lib/core', __FILE__)
# Require the gems listed in Gemfile, including any gems
# you've limited to :test, :development, or :production.
Bundler.require(*Rails.groups)
module MonsoonDashboard
class Application < Rails::Application
# Settings in config/environments/* take precedence over those specified here.
# Application configuration should go into files in config/initializers
# -- all .rb files in that directory are automatically loaded.
#config.autoload_paths += %W(#{config.root}/plugins)
config.autoload_paths << Rails.root.join('lib')
# Use memory for caching, file cache needs some work for working with docker
# Not sure if this really makes sense becasue every passenger thread will have it's own cache
config.cache_store = :memory_store
# Set Time.zone default to the specified zone and make Active Record auto-convert to this zone.
# Run "rake -D time" for a list of tasks for finding time zone names. Default is UTC.
# config.time_zone = 'Central Time (US & Canada)'
# The default locale is :en and all translations from config/locales/*.rb,yml are auto loaded.
# config.i18n.load_path += Dir[Rails.root.join('my', 'locales', '*.{rb,yml}').to_s]
# config.i18n.default_locale = :de
# Do not swallow errors in after_commit/after_rollback callbacks.
config.active_record.raise_in_transactional_callbacks = true
config.middleware.insert_before Rack::Sendfile, "DebugEnvMiddleware"
config.middleware.insert_before Rack::Sendfile, "DebugHeadersMiddleware"
require 'prometheus/client/rack/collector'
config.middleware.insert_after ActionDispatch::DebugExceptions, Prometheus::Client::Rack::Collector
require 'prometheus/client/rack/exporter'
config.middleware.insert_after Prometheus::Client::Rack::Collector, Prometheus::Client::Rack::Exporter
config.middleware.use "RevisionMiddleware"
############# ENSURE EDGE MODE FOR IE ###############
config.action_dispatch.default_headers["X-UA-Compatible"]="IE=edge,chrome=1"
############# KEYSTONE ENDPOINT ##############
config.keystone_endpoint = if ENV['AUTHORITY_SERVICE_HOST'] && ENV['AUTHORITY_SERVICE_PORT']
proto = ENV['AUTHORITY_SERVICE_PROTO'] || 'http'
host = ENV['AUTHORITY_SERVICE_HOST']
port = ENV['AUTHORITY_SERVICE_PORT']
"#{proto}://#{host}:#{port}/v3"
else
ENV['MONSOON_OPENSTACK_AUTH_API_ENDPOINT']
end
config.debug_api_calls = ENV.has_key?('DEBUG_API_CALLS')
config.debug_policy_engine = ENV.has_key?('DEBUG_POLICY_ENGINE')
############## REGION ###############
config.default_region = ENV['MONSOON_DASHBOARD_REGION'] || ['eu-de-1','staging','europe']
#############ä SERVICE USER #############
config.service_user_id = ENV['MONSOON_OPENSTACK_AUTH_API_USERID']
config.service_user_password = ENV['MONSOON_OPENSTACK_AUTH_API_PASSWORD']
config.service_user_domain_name = ENV['MONSOON_OPENSTACK_AUTH_API_DOMAIN']
config.default_domain = ENV['MONSOON_DASHBOARD_DEFAULT_DOMAIN'] || 'monsoon3'
end
end
Avoid metric label sprawl
For now we only add the first component of the REQUEST_PATH as a label.
This gives us at least the domain
require File.expand_path('../boot', __FILE__)
require 'rails/all'
# Require core functionalities
require File.expand_path('../../lib/core', __FILE__)
# Require the gems listed in Gemfile, including any gems
# you've limited to :test, :development, or :production.
Bundler.require(*Rails.groups)
module MonsoonDashboard
class Application < Rails::Application
# Settings in config/environments/* take precedence over those specified here.
# Application configuration should go into files in config/initializers
# -- all .rb files in that directory are automatically loaded.
#config.autoload_paths += %W(#{config.root}/plugins)
config.autoload_paths << Rails.root.join('lib')
# Use memory for caching, file cache needs some work for working with docker
# Not sure if this really makes sense becasue every passenger thread will have it's own cache
config.cache_store = :memory_store
# Set Time.zone default to the specified zone and make Active Record auto-convert to this zone.
# Run "rake -D time" for a list of tasks for finding time zone names. Default is UTC.
# config.time_zone = 'Central Time (US & Canada)'
# The default locale is :en and all translations from config/locales/*.rb,yml are auto loaded.
# config.i18n.load_path += Dir[Rails.root.join('my', 'locales', '*.{rb,yml}').to_s]
# config.i18n.default_locale = :de
# Do not swallow errors in after_commit/after_rollback callbacks.
config.active_record.raise_in_transactional_callbacks = true
config.middleware.insert_before Rack::Sendfile, "DebugEnvMiddleware"
config.middleware.insert_before Rack::Sendfile, "DebugHeadersMiddleware"
require 'prometheus/client/rack/collector'
config.middleware.insert_after ActionDispatch::DebugExceptions, Prometheus::Client::Rack::Collector do |env|
{
method: env['REQUEST_METHOD'].downcase,
host: env['HTTP_HOST'].to_s,
# just take the first component of the path as a label
path: env['REQUEST_PATH'][0, env['REQUEST_PATH'].index('/',1) || 20 ],
}
end
require 'prometheus/client/rack/exporter'
config.middleware.insert_after Prometheus::Client::Rack::Collector, Prometheus::Client::Rack::Exporter
config.middleware.use "RevisionMiddleware"
############# ENSURE EDGE MODE FOR IE ###############
config.action_dispatch.default_headers["X-UA-Compatible"]="IE=edge,chrome=1"
############# KEYSTONE ENDPOINT ##############
config.keystone_endpoint = if ENV['AUTHORITY_SERVICE_HOST'] && ENV['AUTHORITY_SERVICE_PORT']
proto = ENV['AUTHORITY_SERVICE_PROTO'] || 'http'
host = ENV['AUTHORITY_SERVICE_HOST']
port = ENV['AUTHORITY_SERVICE_PORT']
"#{proto}://#{host}:#{port}/v3"
else
ENV['MONSOON_OPENSTACK_AUTH_API_ENDPOINT']
end
config.debug_api_calls = ENV.has_key?('DEBUG_API_CALLS')
config.debug_policy_engine = ENV.has_key?('DEBUG_POLICY_ENGINE')
############## REGION ###############
config.default_region = ENV['MONSOON_DASHBOARD_REGION'] || ['eu-de-1','staging','europe']
#############ä SERVICE USER #############
config.service_user_id = ENV['MONSOON_OPENSTACK_AUTH_API_USERID']
config.service_user_password = ENV['MONSOON_OPENSTACK_AUTH_API_PASSWORD']
config.service_user_domain_name = ENV['MONSOON_OPENSTACK_AUTH_API_DOMAIN']
config.default_domain = ENV['MONSOON_DASHBOARD_DEFAULT_DOMAIN'] || 'monsoon3'
end
end
|
require File.expand_path('../boot', __FILE__)
require 'rails/all'
# If you have a Gemfile, require the gems listed there, including any gems
# you've limited to :test, :development, or :production.
Bundler.require(:default, Rails.env) if defined?(Bundler)
module Lifestreams
class Application < Rails::Application
# Settings in config/environments/* take precedence over those specified here.
# Application configuration should go into files in config/initializers
# -- all .rb files in that directory are automatically loaded.
# Custom directories with classes and modules you want to be autoloadable.
# config.autoload_paths += %W(#{config.root}/extras)
# Only load the plugins named here, in the order given (default is alphabetical).
# :all can be used as a placeholder for all plugins not explicitly named.
# config.plugins = [ :exception_notification, :ssl_requirement, :all ]
# Activate observers that should always be running.
# config.active_record.observers = :cacher, :garbage_collector, :forum_observer
# Set Time.zone default to the specified zone and make Active Record auto-convert to this zone.
# Run "rake -D time" for a list of tasks for finding time zone names. Default is UTC.
# config.time_zone = 'Central Time (US & Canada)'
# The default locale is :en and all translations from config/locales/*.rb,yml are auto loaded.
# config.i18n.load_path += Dir[Rails.root.join('my', 'locales', '*.{rb,yml}').to_s]
# config.i18n.default_locale = :de
# JavaScript files you want as :defaults (application.js is always included).
# config.action_view.javascript_expansions[:defaults] = %w(jquery rails)
# Configure the default encoding used in templates for Ruby 1.9.
config.encoding = "utf-8"
# Configure sensitive parameters which will be filtered from the log file.
config.filter_parameters += [:password]
config.generators do |g|
g.test_framework :rspec
end
config.action_mailer.delivery_method = :smtp
config.action_mailer.raise_delivery_errors = true
end
end
autoload lib/
require File.expand_path('../boot', __FILE__)
require 'rails/all'
# If you have a Gemfile, require the gems listed there, including any gems
# you've limited to :test, :development, or :production.
Bundler.require(:default, Rails.env) if defined?(Bundler)
module Lifestreams
class Application < Rails::Application
# Settings in config/environments/* take precedence over those specified here.
# Application configuration should go into files in config/initializers
# -- all .rb files in that directory are automatically loaded.
# Custom directories with classes and modules you want to be autoloadable.
# config.autoload_paths += %W(#{config.root}/extras)
config.autoload_paths << File.join(config.root, "lib")
# Only load the plugins named here, in the order given (default is alphabetical).
# :all can be used as a placeholder for all plugins not explicitly named.
# config.plugins = [ :exception_notification, :ssl_requirement, :all ]
# Activate observers that should always be running.
# config.active_record.observers = :cacher, :garbage_collector, :forum_observer
# Set Time.zone default to the specified zone and make Active Record auto-convert to this zone.
# Run "rake -D time" for a list of tasks for finding time zone names. Default is UTC.
# config.time_zone = 'Central Time (US & Canada)'
# The default locale is :en and all translations from config/locales/*.rb,yml are auto loaded.
# config.i18n.load_path += Dir[Rails.root.join('my', 'locales', '*.{rb,yml}').to_s]
# config.i18n.default_locale = :de
# JavaScript files you want as :defaults (application.js is always included).
# config.action_view.javascript_expansions[:defaults] = %w(jquery rails)
# Configure the default encoding used in templates for Ruby 1.9.
config.encoding = "utf-8"
# Configure sensitive parameters which will be filtered from the log file.
config.filter_parameters += [:password]
config.generators do |g|
g.test_framework :rspec
end
config.action_mailer.delivery_method = :smtp
config.action_mailer.raise_delivery_errors = true
end
end
|
module InstagramReporter
VERSION = "0.1.5"
end
bumped version to 0.1.6
module InstagramReporter
VERSION = "0.1.6"
end
|
require File.expand_path('../boot', __FILE__)
require 'rails/all'
if defined?(Bundler)
# If you precompile assets before deploying to production, use this line
Bundler.require(*Rails.groups(:assets => %w(development test)))
# If you want your assets lazily compiled in production, use this line
# Bundler.require(:default, :assets, Rails.env)
end
module AmahiHDA
class Application < Rails::Application
# Settings in config/environments/* take precedence over those specified here.
# Application configuration should go into files in config/initializers
# -- all .rb files in that directory are automatically loaded.
# Custom directories with classes and modules you want to be autoloadable.
config.autoload_paths += %W(#{config.root}/lib)
# Only load the plugins named here, in the order given (default is alphabetical).
# :all can be used as a placeholder for all plugins not explicitly named.
# config.plugins = [ :exception_notification, :ssl_requirement, :all ]
# Activate observers that should always be running.
# config.active_record.observers = :cacher, :garbage_collector, :forum_observer
# Set Time.zone default to the specified zone and make Active Record auto-convert to this zone.
# Run "rake -D time" for a list of tasks for finding time zone names. Default is UTC.
# config.time_zone = 'Central Time (US & Canada)'
# The default locale is :en and all translations from config/locales/*.rb,yml are auto loaded.
# config.i18n.load_path += Dir[Rails.root.join('my', 'locales', '*.{rb,yml}').to_s]
# config.i18n.default_locale = :de
# Configure the default encoding used in templates for Ruby 1.9.
config.encoding = "utf-8"
# Configure sensitive parameters which will be filtered from the log file.
config.filter_parameters += [:password, :passwd, :passwd_confirm]
# Enable escaping HTML in JSON.
config.active_support.escape_html_entities_in_json = true
# Use SQL instead of Active Record's schema dumper when creating the database.
# This is necessary if your schema can't be completely dumped by the schema dumper,
# like if you have constraints or database-specific column types
# config.active_record.schema_format = :sql
# Enforce whitelist mode for mass assignment.
# This will create an empty whitelist of attributes available for mass-assignment for all models
# in your app. As such, your models will need to explicitly whitelist or blacklist accessible
# parameters by using an attr_accessible or attr_protected declaration.
config.active_record.whitelist_attributes = true
# Enable the asset pipeline
config.assets.enabled = true
# Version of your assets, change this if you want to expire all your assets
config.assets.version = '1.1'
# initialize tabs app variable
config.tabs = []
end
end
############################################
# load all Amahi platform plugins installed
############################################
module AmahiHDA
class Application < Rails::Application
PLUGIN_LOCATION = File.join(Rails.root, 'plugins')
amahi_plugins = []
Dir.glob(File.join(PLUGIN_LOCATION, '*')).sort.each do |dir|
file = "#{dir}/config/amahi-plugin.yml"
if File.file?(file) and File.readable?(file)
plugin = YAML.load(File.read(file)).symbolize_keys
plugin[:dir] = File.basename(dir)
amahi_plugins << plugin
$LOAD_PATH << "#{dir}/lib"
Kernel.require plugin[:class].underscore
end
end
# stick them in an app-wide variable for when it's needed by the app
config.amahi_plugins = amahi_plugins
end
end
work-around for fedora 19 bug #979133
https://bugzilla.redhat.com/show_bug.cgi?id=979133
# FIXME - temporary work-around for Fedora 19
$LOAD_PATH << '/usr/lib64/gems/ruby/psych-2.0.0/lib' << '/usr/share/gems/gems/psych-2.0.0/lib'
require File.expand_path('../boot', __FILE__)
require 'rails/all'
if defined?(Bundler)
# If you precompile assets before deploying to production, use this line
Bundler.require(*Rails.groups(:assets => %w(development test)))
# If you want your assets lazily compiled in production, use this line
# Bundler.require(:default, :assets, Rails.env)
end
module AmahiHDA
class Application < Rails::Application
# Settings in config/environments/* take precedence over those specified here.
# Application configuration should go into files in config/initializers
# -- all .rb files in that directory are automatically loaded.
# Custom directories with classes and modules you want to be autoloadable.
config.autoload_paths += %W(#{config.root}/lib)
# Only load the plugins named here, in the order given (default is alphabetical).
# :all can be used as a placeholder for all plugins not explicitly named.
# config.plugins = [ :exception_notification, :ssl_requirement, :all ]
# Activate observers that should always be running.
# config.active_record.observers = :cacher, :garbage_collector, :forum_observer
# Set Time.zone default to the specified zone and make Active Record auto-convert to this zone.
# Run "rake -D time" for a list of tasks for finding time zone names. Default is UTC.
# config.time_zone = 'Central Time (US & Canada)'
# The default locale is :en and all translations from config/locales/*.rb,yml are auto loaded.
# config.i18n.load_path += Dir[Rails.root.join('my', 'locales', '*.{rb,yml}').to_s]
# config.i18n.default_locale = :de
# Configure the default encoding used in templates for Ruby 1.9.
config.encoding = "utf-8"
# Configure sensitive parameters which will be filtered from the log file.
config.filter_parameters += [:password, :passwd, :passwd_confirm]
# Enable escaping HTML in JSON.
config.active_support.escape_html_entities_in_json = true
# Use SQL instead of Active Record's schema dumper when creating the database.
# This is necessary if your schema can't be completely dumped by the schema dumper,
# like if you have constraints or database-specific column types
# config.active_record.schema_format = :sql
# Enforce whitelist mode for mass assignment.
# This will create an empty whitelist of attributes available for mass-assignment for all models
# in your app. As such, your models will need to explicitly whitelist or blacklist accessible
# parameters by using an attr_accessible or attr_protected declaration.
config.active_record.whitelist_attributes = true
# Enable the asset pipeline
config.assets.enabled = true
# Version of your assets, change this if you want to expire all your assets
config.assets.version = '1.1'
# initialize tabs app variable
config.tabs = []
end
end
############################################
# load all Amahi platform plugins installed
############################################
module AmahiHDA
class Application < Rails::Application
PLUGIN_LOCATION = File.join(Rails.root, 'plugins')
amahi_plugins = []
Dir.glob(File.join(PLUGIN_LOCATION, '*')).sort.each do |dir|
file = "#{dir}/config/amahi-plugin.yml"
if File.file?(file) and File.readable?(file)
plugin = YAML.load(File.read(file)).symbolize_keys
plugin[:dir] = File.basename(dir)
amahi_plugins << plugin
$LOAD_PATH << "#{dir}/lib"
Kernel.require plugin[:class].underscore
end
end
# stick them in an app-wide variable for when it's needed by the app
config.amahi_plugins = amahi_plugins
end
end
|
require 'rubygems'
require 'sequel'
require 'fileutils'
require 'safe_yaml'
# NOTE: This converter requires Sequel and the MySQL gems.
# The MySQL gem can be difficult to install on OS X. Once you have MySQL
# installed, running the following commands should work:
# $ sudo gem install sequel
# $ sudo gem install mysql -- --with-mysql-config=/usr/local/mysql/bin/mysql_config
module Jekyll
module WordPress
# Main migrator function. Call this to perform the migration.
#
# dbname:: The name of the database
# user:: The database user name
# pass:: The database user's password
# host:: The address of the MySQL database host. Default: 'localhost'
# options:: A hash table of configuration options.
#
# Supported options are:
#
# :table_prefix:: Prefix of database tables used by WordPress.
# Default: 'wp_'
# :clean_entities:: If true, convert non-ASCII characters to HTML
# entities in the posts, comments, titles, and
# names. Requires the 'htmlentities' gem to
# work. Default: true.
# :comments:: If true, migrate post comments too. Comments
# are saved in the post's YAML front matter.
# Default: true.
# :categories:: If true, save the post's categories in its
# YAML front matter.
# :tags:: If true, save the post's tags in its
# YAML front matter.
# :more_excerpt:: If true, when a post has no excerpt but
# does have a <!-- more --> tag, use the
# preceding post content as the excerpt.
# Default: true.
# :more_anchor:: If true, convert a <!-- more --> tag into
# two HTML anchors with ids "more" and
# "more-NNN" (where NNN is the post number).
# Default: true.
# :status:: Array of allowed post statuses. Only
# posts with matching status will be migrated.
# Known statuses are :publish, :draft, :private,
# and :revision. If this is nil or an empty
# array, all posts are migrated regardless of
# status. Default: [:publish].
#
def self.process(dbname, user, pass, host='localhost', options={})
options = {
:table_prefix => 'wp_',
:clean_entities => true,
:comments => true,
:categories => true,
:tags => true,
:more_excerpt => true,
:more_anchor => true,
:status => [:publish] # :draft, :private, :revision
}.merge(options)
if options[:clean_entities]
begin
require 'htmlentities'
rescue LoadError
STDERR.puts "Could not require 'htmlentities', so the " +
":clean_entities option is now disabled."
options[:clean_entities] = false
end
end
FileUtils.mkdir_p("_posts")
db = Sequel.mysql(dbname, :user => user, :password => pass,
:host => host, :encoding => 'utf8')
px = options[:table_prefix]
posts_query = "
SELECT
posts.ID AS `id`,
posts.guid AS `guid`,
posts.post_type AS `type`,
posts.post_status AS `status`,
posts.post_title AS `title`,
posts.post_name AS `slug`,
posts.post_date AS `date`,
posts.post_content AS `content`,
posts.post_excerpt AS `excerpt`,
posts.comment_count AS `comment_count`,
users.display_name AS `author`,
users.user_login AS `author_login`,
users.user_email AS `author_email`,
users.user_url AS `author_url`
FROM #{px}posts AS `posts`
LEFT JOIN #{px}users AS `users`
ON posts.post_author = users.ID"
if options[:status] and not options[:status].empty?
status = options[:status][0]
posts_query << "
WHERE posts.post_status = '#{status.to_s}'"
options[:status][1..-1].each do |status|
posts_query << " OR
posts.post_status = '#{status.to_s}'"
end
end
db[posts_query].each do |post|
process_post(post, db, options)
end
end
def self.process_post(post, db, options)
px = options[:table_prefix]
title = post[:title]
if options[:clean_entities]
title = clean_entities(title)
end
slug = post[:slug]
if !slug or slug.empty?
slug = sluggify(title)
end
date = post[:date] || Time.now
name = "%02d-%02d-%02d-%s.markdown" % [date.year, date.month,
date.day, slug]
content = post[:content].to_s
if options[:clean_entities]
content = clean_entities(content)
end
excerpt = post[:excerpt].to_s
more_index = content.index(/<!-- *more *-->/)
more_anchor = nil
if more_index
if options[:more_excerpt] and
(post[:excerpt].nil? or post[:excerpt].empty?)
excerpt = content[0...more_index]
end
if options[:more_anchor]
more_link = "more"
content.sub!(/<!-- *more *-->/,
"<a id=\"more\"></a>" +
"<a id=\"more-#{post[:id]}\"></a>")
end
end
categories = []
tags = []
if options[:categories] or options[:tags]
cquery =
"SELECT
terms.name AS `name`,
ttax.taxonomy AS `type`
FROM
#{px}terms AS `terms`,
#{px}term_relationships AS `trels`,
#{px}term_taxonomy AS `ttax`
WHERE
trels.object_id = '#{post[:id]}' AND
trels.term_taxonomy_id = ttax.term_taxonomy_id AND
terms.term_id = ttax.term_id"
db[cquery].each do |term|
if options[:categories] and term[:type] == "category"
if options[:clean_entities]
categories << clean_entities(term[:name])
else
categories << term[:name]
end
elsif options[:tags] and term[:type] == "post_tag"
if options[:clean_entities]
tags << clean_entities(term[:name])
else
tags << term[:name]
end
end
end
end
comments = []
if options[:comments] and post[:comment_count].to_i > 0
cquery =
"SELECT
comment_ID AS `id`,
comment_author AS `author`,
comment_author_email AS `author_email`,
comment_author_url AS `author_url`,
comment_date AS `date`,
comment_date_gmt AS `date_gmt`,
comment_content AS `content`
FROM #{px}comments
WHERE
comment_post_ID = '#{post[:id]}' AND
comment_approved != 'spam'"
db[cquery].each do |comment|
comcontent = comment[:content].to_s
if comcontent.respond_to?(:force_encoding)
comcontent.force_encoding("UTF-8")
end
if options[:clean_entities]
comcontent = clean_entities(comcontent)
end
comauthor = comment[:author].to_s
if options[:clean_entities]
comauthor = clean_entities(comauthor)
end
comments << {
'id' => comment[:id].to_i,
'author' => comauthor,
'author_email' => comment[:author_email].to_s,
'author_url' => comment[:author_url].to_s,
'date' => comment[:date].to_s,
'date_gmt' => comment[:date_gmt].to_s,
'content' => comcontent,
}
end
comments.sort!{ |a,b| a['id'] <=> b['id'] }
end
# Get the relevant fields as a hash, delete empty fields and
# convert to YAML for the header.
data = {
'layout' => post[:type].to_s,
'status' => post[:status].to_s,
'published' => (post[:status].to_s == "publish"),
'title' => title.to_s,
'author' => post[:author].to_s,
'author_login' => post[:author_login].to_s,
'author_email' => post[:author_email].to_s,
'author_url' => post[:author_url].to_s,
'excerpt' => excerpt,
'more_anchor' => more_anchor,
'wordpress_id' => post[:id],
'wordpress_url' => post[:guid].to_s,
'date' => date,
'categories' => options[:categories] ? categories : nil,
'tags' => options[:tags] ? tags : nil,
'comments' => options[:comments] ? comments : nil,
}.delete_if { |k,v| v.nil? || v == '' }.to_yaml
# Write out the data and content to file
File.open("_posts/#{name}", "w") do |f|
f.puts data
f.puts "---"
f.puts content
end
end
def self.clean_entities( text )
if text.respond_to?(:force_encoding)
text.force_encoding("UTF-8")
end
text = HTMLEntities.new.encode(text, :named)
# We don't want to convert these, it would break all
# HTML tags in the post and comments.
text.gsub!("&", "&")
text.gsub!("<", "<")
text.gsub!(">", ">")
text.gsub!(""", '"')
text.gsub!("'", "'")
text.gsub!("/", "/")
text
end
def self.sluggify( title )
begin
require 'unidecode'
title = title.to_ascii
rescue LoadError
STDERR.puts "Could not require 'unidecode'. If your post titles have non-ASCII characters, you could get nicer permalinks by installing unidecode."
end
title.downcase.gsub(/[^0-9A-Za-z]+/, " ").strip.gsub(" ", "-")
end
end
end
Fixes YAML encoding in wordpress migration. Fixes mojombo/jekyll#644.
require 'rubygems'
require 'sequel'
require 'fileutils'
require 'psych'
require 'safe_yaml'
# NOTE: This converter requires Sequel and the MySQL gems.
# The MySQL gem can be difficult to install on OS X. Once you have MySQL
# installed, running the following commands should work:
# $ sudo gem install sequel
# $ sudo gem install mysql -- --with-mysql-config=/usr/local/mysql/bin/mysql_config
module Jekyll
module WordPress
# Main migrator function. Call this to perform the migration.
#
# dbname:: The name of the database
# user:: The database user name
# pass:: The database user's password
# host:: The address of the MySQL database host. Default: 'localhost'
# options:: A hash table of configuration options.
#
# Supported options are:
#
# :table_prefix:: Prefix of database tables used by WordPress.
# Default: 'wp_'
# :clean_entities:: If true, convert non-ASCII characters to HTML
# entities in the posts, comments, titles, and
# names. Requires the 'htmlentities' gem to
# work. Default: true.
# :comments:: If true, migrate post comments too. Comments
# are saved in the post's YAML front matter.
# Default: true.
# :categories:: If true, save the post's categories in its
# YAML front matter.
# :tags:: If true, save the post's tags in its
# YAML front matter.
# :more_excerpt:: If true, when a post has no excerpt but
# does have a <!-- more --> tag, use the
# preceding post content as the excerpt.
# Default: true.
# :more_anchor:: If true, convert a <!-- more --> tag into
# two HTML anchors with ids "more" and
# "more-NNN" (where NNN is the post number).
# Default: true.
# :status:: Array of allowed post statuses. Only
# posts with matching status will be migrated.
# Known statuses are :publish, :draft, :private,
# and :revision. If this is nil or an empty
# array, all posts are migrated regardless of
# status. Default: [:publish].
#
def self.process(dbname, user, pass, host='localhost', options={})
options = {
:table_prefix => 'wp_',
:clean_entities => true,
:comments => true,
:categories => true,
:tags => true,
:more_excerpt => true,
:more_anchor => true,
:status => [:publish] # :draft, :private, :revision
}.merge(options)
if options[:clean_entities]
begin
require 'htmlentities'
rescue LoadError
STDERR.puts "Could not require 'htmlentities', so the " +
":clean_entities option is now disabled."
options[:clean_entities] = false
end
end
FileUtils.mkdir_p("_posts")
db = Sequel.mysql(dbname, :user => user, :password => pass,
:host => host, :encoding => 'utf8')
px = options[:table_prefix]
posts_query = "
SELECT
posts.ID AS `id`,
posts.guid AS `guid`,
posts.post_type AS `type`,
posts.post_status AS `status`,
posts.post_title AS `title`,
posts.post_name AS `slug`,
posts.post_date AS `date`,
posts.post_content AS `content`,
posts.post_excerpt AS `excerpt`,
posts.comment_count AS `comment_count`,
users.display_name AS `author`,
users.user_login AS `author_login`,
users.user_email AS `author_email`,
users.user_url AS `author_url`
FROM #{px}posts AS `posts`
LEFT JOIN #{px}users AS `users`
ON posts.post_author = users.ID"
if options[:status] and not options[:status].empty?
status = options[:status][0]
posts_query << "
WHERE posts.post_status = '#{status.to_s}'"
options[:status][1..-1].each do |status|
posts_query << " OR
posts.post_status = '#{status.to_s}'"
end
end
db[posts_query].each do |post|
process_post(post, db, options)
end
end
def self.process_post(post, db, options)
px = options[:table_prefix]
title = post[:title]
if options[:clean_entities]
title = clean_entities(title)
end
slug = post[:slug]
if !slug or slug.empty?
slug = sluggify(title)
end
date = post[:date] || Time.now
name = "%02d-%02d-%02d-%s.markdown" % [date.year, date.month,
date.day, slug]
content = post[:content].to_s
if options[:clean_entities]
content = clean_entities(content)
end
excerpt = post[:excerpt].to_s
more_index = content.index(/<!-- *more *-->/)
more_anchor = nil
if more_index
if options[:more_excerpt] and
(post[:excerpt].nil? or post[:excerpt].empty?)
excerpt = content[0...more_index]
end
if options[:more_anchor]
more_link = "more"
content.sub!(/<!-- *more *-->/,
"<a id=\"more\"></a>" +
"<a id=\"more-#{post[:id]}\"></a>")
end
end
categories = []
tags = []
if options[:categories] or options[:tags]
cquery =
"SELECT
terms.name AS `name`,
ttax.taxonomy AS `type`
FROM
#{px}terms AS `terms`,
#{px}term_relationships AS `trels`,
#{px}term_taxonomy AS `ttax`
WHERE
trels.object_id = '#{post[:id]}' AND
trels.term_taxonomy_id = ttax.term_taxonomy_id AND
terms.term_id = ttax.term_id"
db[cquery].each do |term|
if options[:categories] and term[:type] == "category"
if options[:clean_entities]
categories << clean_entities(term[:name])
else
categories << term[:name]
end
elsif options[:tags] and term[:type] == "post_tag"
if options[:clean_entities]
tags << clean_entities(term[:name])
else
tags << term[:name]
end
end
end
end
comments = []
if options[:comments] and post[:comment_count].to_i > 0
cquery =
"SELECT
comment_ID AS `id`,
comment_author AS `author`,
comment_author_email AS `author_email`,
comment_author_url AS `author_url`,
comment_date AS `date`,
comment_date_gmt AS `date_gmt`,
comment_content AS `content`
FROM #{px}comments
WHERE
comment_post_ID = '#{post[:id]}' AND
comment_approved != 'spam'"
db[cquery].each do |comment|
comcontent = comment[:content].to_s
if comcontent.respond_to?(:force_encoding)
comcontent.force_encoding("UTF-8")
end
if options[:clean_entities]
comcontent = clean_entities(comcontent)
end
comauthor = comment[:author].to_s
if options[:clean_entities]
comauthor = clean_entities(comauthor)
end
comments << {
'id' => comment[:id].to_i,
'author' => comauthor,
'author_email' => comment[:author_email].to_s,
'author_url' => comment[:author_url].to_s,
'date' => comment[:date].to_s,
'date_gmt' => comment[:date_gmt].to_s,
'content' => comcontent,
}
end
comments.sort!{ |a,b| a['id'] <=> b['id'] }
end
# Get the relevant fields as a hash, delete empty fields and
# convert to YAML for the header.
data = {
'layout' => post[:type].to_s,
'status' => post[:status].to_s,
'published' => (post[:status].to_s == "publish"),
'title' => title.to_s,
'author' => post[:author].to_s,
'author_login' => post[:author_login].to_s,
'author_email' => post[:author_email].to_s,
'author_url' => post[:author_url].to_s,
'excerpt' => excerpt,
'more_anchor' => more_anchor,
'wordpress_id' => post[:id],
'wordpress_url' => post[:guid].to_s,
'date' => date,
'categories' => options[:categories] ? categories : nil,
'tags' => options[:tags] ? tags : nil,
'comments' => options[:comments] ? comments : nil,
}.delete_if { |k,v| v.nil? || v == '' }.to_yaml
# Write out the data and content to file
File.open("_posts/#{name}", "w") do |f|
f.puts data
f.puts "---"
f.puts content
end
end
def self.clean_entities( text )
if text.respond_to?(:force_encoding)
text.force_encoding("UTF-8")
end
text = HTMLEntities.new.encode(text, :named)
# We don't want to convert these, it would break all
# HTML tags in the post and comments.
text.gsub!("&", "&")
text.gsub!("<", "<")
text.gsub!(">", ">")
text.gsub!(""", '"')
text.gsub!("'", "'")
text.gsub!("/", "/")
text
end
def self.sluggify( title )
begin
require 'unidecode'
title = title.to_ascii
rescue LoadError
STDERR.puts "Could not require 'unidecode'. If your post titles have non-ASCII characters, you could get nicer permalinks by installing unidecode."
end
title.downcase.gsub(/[^0-9A-Za-z]+/, " ").strip.gsub(" ", "-")
end
end
end
|
require File.expand_path('../boot', __FILE__)
# see http://mongoid.org/docs/installation/configuration.html
# require 'rails/all'
require "action_controller/railtie"
require "action_mailer/railtie"
require "active_resource/railtie"
require "rails/test_unit/railtie"
# If you have a Gemfile, require the gems listed there, including any gems
# you've limited to :test, :development, or :production.
Bundler.require(:default, Rails.env) if defined?(Bundler)
module TomatoesApp
class Application < Rails::Application
# Settings in config/environments/* take precedence over those specified here.
# Application configuration should go into files in config/initializers
# -- all .rb files in that directory are automatically loaded.
# Custom directories with classes and modules you want to be autoloadable.
config.autoload_paths += %W(#{config.root}/lib)
# Only load the plugins named here, in the order given (default is alphabetical).
# :all can be used as a placeholder for all plugins not explicitly named.
# config.plugins = [ :exception_notification, :ssl_requirement, :all ]
# Activate observers that should always be running.
# config.active_record.observers = :cacher, :garbage_collector, :forum_observer
# Set Time.zone default to the specified zone and make Active Record auto-convert to this zone.
# Run "rake -D time" for a list of tasks for finding time zone names. Default is UTC.
# config.time_zone = 'Central Time (US & Canada)'
# The default locale is :en and all translations from config/locales/*.rb,yml are auto loaded.
# config.i18n.load_path += Dir[Rails.root.join('my', 'locales', '*.{rb,yml}').to_s]
# config.i18n.default_locale = :de
# JavaScript files you want as :defaults (application.js is always included).
config.action_view.javascript_expansions[:defaults] = %w()
# Configure the default encoding used in templates for Ruby 1.9.
config.encoding = "utf-8"
# Configure sensitive parameters which will be filtered from the log file.
config.filter_parameters += [:password]
end
end
disable mongoid model preloading
see http://mongoid.org/docs/rails/railties.html
require File.expand_path('../boot', __FILE__)
# see http://mongoid.org/docs/installation/configuration.html
# require 'rails/all'
require "action_controller/railtie"
require "action_mailer/railtie"
require "active_resource/railtie"
require "rails/test_unit/railtie"
# If you have a Gemfile, require the gems listed there, including any gems
# you've limited to :test, :development, or :production.
Bundler.require(:default, Rails.env) if defined?(Bundler)
module TomatoesApp
class Application < Rails::Application
# Settings in config/environments/* take precedence over those specified here.
# Application configuration should go into files in config/initializers
# -- all .rb files in that directory are automatically loaded.
# Custom directories with classes and modules you want to be autoloadable.
config.autoload_paths += %W(#{config.root}/lib)
# Only load the plugins named here, in the order given (default is alphabetical).
# :all can be used as a placeholder for all plugins not explicitly named.
# config.plugins = [ :exception_notification, :ssl_requirement, :all ]
# Activate observers that should always be running.
# config.active_record.observers = :cacher, :garbage_collector, :forum_observer
# Set Time.zone default to the specified zone and make Active Record auto-convert to this zone.
# Run "rake -D time" for a list of tasks for finding time zone names. Default is UTC.
# config.time_zone = 'Central Time (US & Canada)'
# The default locale is :en and all translations from config/locales/*.rb,yml are auto loaded.
# config.i18n.load_path += Dir[Rails.root.join('my', 'locales', '*.{rb,yml}').to_s]
# config.i18n.default_locale = :de
# JavaScript files you want as :defaults (application.js is always included).
config.action_view.javascript_expansions[:defaults] = %w()
# Configure the default encoding used in templates for Ruby 1.9.
config.encoding = "utf-8"
# Configure sensitive parameters which will be filtered from the log file.
config.filter_parameters += [:password]
# disable mongoid model preloading (see http://mongoid.org/docs/rails/railties.html)
config.mongoid.preload_models = false
end
end
|
require File.expand_path('../boot', __FILE__)
require 'rails/all'
if defined?(Bundler)
# If you precompile assets before deploying to production, use this line
Bundler.require(*Rails.groups(:assets => %w(development test)))
# If you want your assets lazily compiled in production, use this line
# Bundler.require(:default, :assets, Rails.env)
end
module CDB3
class Application < Rails::Application
# Settings in config/environments/* take precedence over those specified here.
# Application configuration should go into files in config/initializers
# -- all .rb files in that directory are automatically loaded.
# Set Time.zone default to the specified zone and make Active Record auto-convert to this zone.
# Run "rake -D time" for a list of tasks for finding time zone names. Default is UTC.
# config.time_zone = 'Central Time (US & Canada)'
# The default locale is :en and all translations from config/locales/*.rb,yml are auto loaded.
# config.i18n.load_path += Dir[Rails.root.join('my', 'locales', '*.{rb,yml}').to_s]
# config.i18n.default_locale = :de
config.action_controller.permit_all_parameters = true
console do
require "pry"
config.console = Pry
end
end
end
attempt font assets fix
require File.expand_path('../boot', __FILE__)
require 'rails/all'
if defined?(Bundler)
# If you precompile assets before deploying to production, use this line
Bundler.require(*Rails.groups(:assets => %w(development test)))
# If you want your assets lazily compiled in production, use this line
# Bundler.require(:default, :assets, Rails.env)
end
module CDB3
class Application < Rails::Application
# Settings in config/environments/* take precedence over those specified here.
# Application configuration should go into files in config/initializers
# -- all .rb files in that directory are automatically loaded.
# Set Time.zone default to the specified zone and make Active Record auto-convert to this zone.
# Run "rake -D time" for a list of tasks for finding time zone names. Default is UTC.
# config.time_zone = 'Central Time (US & Canada)'
# The default locale is :en and all translations from config/locales/*.rb,yml are auto loaded.
# config.i18n.load_path += Dir[Rails.root.join('my', 'locales', '*.{rb,yml}').to_s]
# config.i18n.default_locale = :de
config.action_controller.permit_all_parameters = true
config.assets.paths << Rails.root.join('vendor', 'assets', 'fonts')
console do
require "pry"
config.console = Pry
end
end
end
|
require File.expand_path('../boot', __FILE__)
# Require only the libraries we are using
require 'active_model/railtie'
require 'active_record/railtie'
require 'action_controller/railtie'
# require 'action_mailer/railtie'
# require 'active_resource/railtie'
require 'sprockets/railtie'
Bundler.require(:default, Rails.env)
require 'carrierwave/orm/activerecord'
require 'dotenv'
Dotenv.load
module ZineDistro
# ZineDistro configuration
class Application < Rails::Application
config.generators do |g|
g.test_framework :rspec,
fixtures: true,
view_specs: false,
helper_specs: false,
routing_specs: false,
controller_specs: true,
feature_specs: true
g.fixture_replacement :factory_girl, dir: 'spec/factories'
g.helper false
g.template_engine :slim
g.stylesheets false
g.stylesheet_engine = :sass
end
# Settings in config/environments/* take precedence over those specified here.
# Application configuration should go into files in config/initializers
# -- all .rb files in that directory are automatically loaded.
# Custom directories with classes and modules you want to be autoloadable.
config.autoload_paths += %W(#{config.root}/lib)
# Only load the plugins named here, in the order given (default is alphabetical).
# :all can be used as a placeholder for all plugins not explicitly named.
# config.plugins = [ :exception_notification, :ssl_requirement, :all ]
# Activate observers that should always be running.
# config.active_record.observers = :cacher, :garbage_collector, :forum_observer
# Set Time.zone default to the specified zone and make Active Record auto-convert to this zone.
# Run "rake -D time" for a list of tasks for finding time zone names. Default is UTC.
# config.time_zone = 'Central Time (US & Canada)'
# The default locale is :en and all translations from config/locales/*.rb,yml are auto loaded.
# config.i18n.load_path += Dir[Rails.root.join('my', 'locales', '*.{rb,yml}').to_s]
# config.i18n.default_locale = :de
# Configure the default encoding used in templates for Ruby 1.9.
config.encoding = 'utf-8'
# Configure sensitive parameters which will be filtered from the log file.
config.filter_parameters += [:password]
# Enable escaping HTML in JSON.
config.active_support.escape_html_entities_in_json = true
I18n.enforce_available_locales = true
config.i18n.available_locales = [:en]
config.i18n.default_locale = :en
# Use SQL instead of Active Record's schema dumper when creating the database.
# This is necessary if your schema can't be completely dumped by the schema dumper,
# like if you have constraints or database-specific column types
# config.active_record.schema_format = :sql
# Version of your assets, change this if you want to expire all your assets
config.assets.enabled = true
config.assets.version = '1.0'
config.assets.initialize_on_precompile = false
# Add the fonts to asset pipeline
config.assets.paths << Rails.root.join('app', 'assets', 'fonts')
config.assets.precompile += %w( .svg .eot .woff .ttf )
end
end
Switch generator from slim to haml
require File.expand_path('../boot', __FILE__)
# Require only the libraries we are using
require 'active_model/railtie'
require 'active_record/railtie'
require 'action_controller/railtie'
# require 'action_mailer/railtie'
# require 'active_resource/railtie'
require 'sprockets/railtie'
Bundler.require(:default, Rails.env)
require 'carrierwave/orm/activerecord'
require 'dotenv'
Dotenv.load
module ZineDistro
# ZineDistro configuration
class Application < Rails::Application
config.generators do |g|
g.test_framework :rspec,
fixtures: true,
view_specs: false,
helper_specs: false,
routing_specs: false,
controller_specs: true,
feature_specs: true
g.fixture_replacement :factory_girl, dir: 'spec/factories'
g.helper false
g.template_engine :haml
g.stylesheets false
g.stylesheet_engine = :sass
end
# Settings in config/environments/* take precedence over those specified here.
# Application configuration should go into files in config/initializers
# -- all .rb files in that directory are automatically loaded.
# Custom directories with classes and modules you want to be autoloadable.
config.autoload_paths += %W(#{config.root}/lib)
# Only load the plugins named here, in the order given (default is alphabetical).
# :all can be used as a placeholder for all plugins not explicitly named.
# config.plugins = [ :exception_notification, :ssl_requirement, :all ]
# Activate observers that should always be running.
# config.active_record.observers = :cacher, :garbage_collector, :forum_observer
# Set Time.zone default to the specified zone and make Active Record auto-convert to this zone.
# Run "rake -D time" for a list of tasks for finding time zone names. Default is UTC.
# config.time_zone = 'Central Time (US & Canada)'
# The default locale is :en and all translations from config/locales/*.rb,yml are auto loaded.
# config.i18n.load_path += Dir[Rails.root.join('my', 'locales', '*.{rb,yml}').to_s]
# config.i18n.default_locale = :de
# Configure the default encoding used in templates for Ruby 1.9.
config.encoding = 'utf-8'
# Configure sensitive parameters which will be filtered from the log file.
config.filter_parameters += [:password]
# Enable escaping HTML in JSON.
config.active_support.escape_html_entities_in_json = true
I18n.enforce_available_locales = true
config.i18n.available_locales = [:en]
config.i18n.default_locale = :en
# Use SQL instead of Active Record's schema dumper when creating the database.
# This is necessary if your schema can't be completely dumped by the schema dumper,
# like if you have constraints or database-specific column types
# config.active_record.schema_format = :sql
# Version of your assets, change this if you want to expire all your assets
config.assets.enabled = true
config.assets.version = '1.0'
config.assets.initialize_on_precompile = false
# Add the fonts to asset pipeline
config.assets.paths << Rails.root.join('app', 'assets', 'fonts')
config.assets.precompile += %w( .svg .eot .woff .ttf )
end
end
|
require_relative 'boot'
require 'rails/all'
# Require the gems listed in Gemfile, including any gems
# you've limited to :test, :development, or :production.
Bundler.require(*Rails.groups)
module Railsgirls
class Application < Rails::Application
# Settings in config/environments/* take precedence over those specified here.
# Application configuration should go into files in config/initializers
# -- all .rb files in that directory are automatically loaded.
end
end
fix error uninitialized constant uri generic
require_relative 'boot'
require 'uri'
require 'rails/all'
# Require the gems listed in Gemfile, including any gems
# you've limited to :test, :development, or :production.
Bundler.require(*Rails.groups)
module Railsgirls
class Application < Rails::Application
# Settings in config/environments/* take precedence over those specified here.
# Application configuration should go into files in config/initializers
# -- all .rb files in that directory are automatically loaded.
end
end
|
require_relative 'boot'
require 'rails/all'
require 'multi_json'
if defined?(Bundler)
# Require the gems listed in Gemfile, including any gems
# you've limited to :test, :development, or :production.
Bundler.require(:default, Rails.env)
end
module UnipeptWeb
class Application < Rails::Application
# Initialize configuration defaults for originally generated Rails version.
config.load_defaults 5.2
# Settings in config/environments/* take precedence over those specified here.
# Application configuration should go into files in config/initializers
# -- all .rb files in that directory are automatically loaded.
# Custom directories with classes and modules you want to be autoloadable.
# config.autoload_paths += %W(#{config.root}/extras)
# Only load the plugins named here, in the order given (default is alphabetical).
# :all can be used as a placeholder for all plugins not explicitly named.
# config.plugins = [ :exception_notification, :ssl_requirement, :all ]
# Activate observers that should always be running.
# config.active_record.observers = :cacher, :garbage_collector, :forum_observer
# JavaScript files you want as :defaults (application.js is always included).
# config.action_view.javascript_expansions[:defaults] = %w(jquery rails)
# Configure the default encoding used in templates for Ruby 1.9.
config.encoding = 'utf-8'
# Configure sensitive parameters which will be filtered from the log file.
config.filter_parameters += [:password]
config.versions = {
unipept: '4.3.2',
gem: '2.0',
uniprot: '2020.01'
}
config.api_host = 'api.unipept.ugent.be'
MultiJson.use :Oj
end
end
Bump version number
require_relative 'boot'
require 'rails/all'
require 'multi_json'
if defined?(Bundler)
# Require the gems listed in Gemfile, including any gems
# you've limited to :test, :development, or :production.
Bundler.require(:default, Rails.env)
end
module UnipeptWeb
class Application < Rails::Application
# Initialize configuration defaults for originally generated Rails version.
config.load_defaults 5.2
# Settings in config/environments/* take precedence over those specified here.
# Application configuration should go into files in config/initializers
# -- all .rb files in that directory are automatically loaded.
# Custom directories with classes and modules you want to be autoloadable.
# config.autoload_paths += %W(#{config.root}/extras)
# Only load the plugins named here, in the order given (default is alphabetical).
# :all can be used as a placeholder for all plugins not explicitly named.
# config.plugins = [ :exception_notification, :ssl_requirement, :all ]
# Activate observers that should always be running.
# config.active_record.observers = :cacher, :garbage_collector, :forum_observer
# JavaScript files you want as :defaults (application.js is always included).
# config.action_view.javascript_expansions[:defaults] = %w(jquery rails)
# Configure the default encoding used in templates for Ruby 1.9.
config.encoding = 'utf-8'
# Configure sensitive parameters which will be filtered from the log file.
config.filter_parameters += [:password]
config.versions = {
unipept: '4.3.3',
gem: '2.1.1',
uniprot: '2020.01'
}
config.api_host = 'api.unipept.ugent.be'
MultiJson.use :Oj
end
end
|
module Drift
VERSION = "0.2.0"
end
Version 0.3.0
module Drift
VERSION = "0.3.0"
end
|
require File.expand_path('../boot', __FILE__)
require 'rails/all'
# Require the gems listed in Gemfile, including any gems
# you've limited to :test, :development, or :production.
Bundler.require(*Rails.groups)
module BasementWeatherServer
class Application < Rails::Application
# Settings in config/environments/* take precedence over those specified here.
# Application configuration should go into files in config/initializers
# -- all .rb files in that directory are automatically loaded.
# Set Time.zone default to the specified zone and make Active Record auto-convert to this zone.
# Run "rake -D time" for a list of tasks for finding time zone names. Default is UTC.
# config.time_zone = 'Central Time (US & Canada)'
# The default locale is :en and all translations from config/locales/*.rb,yml are auto loaded.
# config.i18n.load_path += Dir[Rails.root.join('my', 'locales', '*.{rb,yml}').to_s]
# config.i18n.default_locale = :de
# Do not swallow errors in after_commit/after_rollback callbacks.
config.active_record.raise_in_transactional_callbacks = true
end
end
Set Timezone to Atlantic time
require File.expand_path('../boot', __FILE__)
require 'rails/all'
# Require the gems listed in Gemfile, including any gems
# you've limited to :test, :development, or :production.
Bundler.require(*Rails.groups)
module BasementWeatherServer
class Application < Rails::Application
# Settings in config/environments/* take precedence over those specified here.
# Application configuration should go into files in config/initializers
# -- all .rb files in that directory are automatically loaded.
# Set Time.zone default to the specified zone and make Active Record auto-convert to this zone.
# Run "rake -D time" for a list of tasks for finding time zone names. Default is UTC.
config.time_zone = 'Atlantic Time (Canada)'
# The default locale is :en and all translations from config/locales/*.rb,yml are auto loaded.
# config.i18n.load_path += Dir[Rails.root.join('my', 'locales', '*.{rb,yml}').to_s]
# config.i18n.default_locale = :de
# Do not swallow errors in after_commit/after_rollback callbacks.
config.active_record.raise_in_transactional_callbacks = true
end
end
|
require 'rubygems'
require 'ffi-rzmq'
require 'eventmachine'
require 'uri'
require 'resolv'
require 'ipaddr'
require 'dripdrop/message'
require 'dripdrop/node/nodelet'
require 'dripdrop/handlers/base'
require 'dripdrop/handlers/zeromq'
require 'dripdrop/handlers/websockets'
require 'dripdrop/handlers/http'
class DripDrop
class Node
ZCTX = ZMQ::Context.new 1
attr_reader :zm_reactor, :routing, :nodelets
attr_accessor :debug
def initialize(opts={},&block)
@block = block
@thread = nil # Thread containing the reactors
@routing = {} # Routing table
@debug = opts[:debug]
@recipients_for = {}
@handler_default_opts = {:debug => @debug}
@nodelets = {} # Cache of registered nodelets
@zctx = ZCTX
end
# Starts the reactors and runs the block passed to initialize.
# This is non-blocking.
def start
@thread = Thread.new do
EM.error_handler {|e| self.error_handler e}
EM.run { action }
end
end
# When subclassing +DripDrop::Node+ you probably want to define this method
# Otherwise it will attempt to run the @block passed into +DripDrop::Node.new+
def action
if @block
self.instance_eval(&@block)
else
raise "Could not start, no block or specified"
end
end
# If the reactor has started, this blocks until the thread
# running the reactor joins. This should block forever
# unless +stop+ is called.
def join
if @thread
@thread.join
else
raise "Can't join on a node that isn't yet started"
end
end
# Blocking version of start, equivalent to +start+ then +join+
def start!
self.start
self.join
end
# Stops the reactors. If you were blocked on #join, that will unblock.
def stop
EM.stop
end
# Defines a new route. Routes are the recommended way to instantiate
# handlers. For example:
#
# route :stats_pub, :zmq_publish, 'tcp://127.0.0.1:2200', :bind
# route :stats_sub, :zmq_subscribe, stats_pub.address, :connect
#
# Will make the following methods available within the reactor block:
# stats_pub # A regular zmq_publish handler
# :stats_sub # A regular zmq_subscribe handler
#
# See the docs for +routes_for+ for more info in grouping routes for
# nodelets and maintaining sanity in larger apps
def route(name,handler_type,*handler_args)
route_full(nil, name, handler_type, *handler_args)
end
# Probably not useful for most, apps. This is used internally to
# create a route for a given nodelet.
def route_full(nodelet, name, handler_type, *handler_args)
# If we're in a route_for block, prepend appropriately
full_name = (nodelet && nodelet.name) ? "#{nodelet.name}_#{name}".to_sym : name
handler = self.send(handler_type, *handler_args)
@routing[full_name] = handler
# Define the route name as a singleton method
(class << self; self; end).class_eval do
define_method(full_name) { handler }
end
handler
end
# DEPRECATED, will be deleted in 0.8
def routes_for(nodelet_name,&block)
$stderr.write "routes_for is now deprecated, use nodelet instead"
nlet = nodelet(nodelet_name,&block)
block.call(nlet)
end
# Nodelets are a way of segmenting a DripDrop::Node. This can be used
# for both organization and deployment. One might want the production
# deployment of an app to be broken across multiple servers or processes
# for instance:
#
# nodelet :heartbeat do |nlet|
# nlet.route :ticker, :zmq_publish, 'tcp://127.0.0.1', :bind
# EM::PeriodicalTimer.new(1) do
# nlet.ticker.send_message(:name => 'tick')
# end
# end
#
# Nodelets can also be subclassed, for instance:
#
# class SpecialNodelet < DripDrop::Node::Nodelet
# def action
# nlet.route :ticker, :zmq_publish, 'tcp://127.0.0.1', :bind
# EM::PeriodicalTimer.new(1) do
# nlet.ticker.send_message(:name => 'tick')
# end
# end
# end
#
# nodelet :heartbeat, SpecialNodelet
#
# If you specify a block, Nodelet#action will be ignored and the block
# will be run
def nodelet(name,klass=Nodelet,&block)
nlet = @nodelets[name] ||= klass.new(self,name,routing)
if block
block.call(nlet)
else
nlet.action
end
nlet
end
# Creates a ZMQ::SUB type socket. Can only receive messages via +on_recv+.
# zmq_subscribe sockets have a +topic_filter+ option, which restricts which
# messages they can receive. It takes a regexp as an option.
def zmq_subscribe(address,socket_ctype,opts={},&block)
zmq_handler(DripDrop::ZMQSubHandler,ZMQ::SUB,address,socket_ctype,opts)
end
# Creates a ZMQ::PUB type socket, can only send messages via +send_message+
def zmq_publish(address,socket_ctype,opts={})
zmq_handler(DripDrop::ZMQPubHandler,ZMQ::PUB,address,socket_ctype,opts)
end
# Creates a ZMQ::PULL type socket. Can only receive messages via +on_recv+
def zmq_pull(address,socket_ctype,opts={},&block)
zmq_handler(DripDrop::ZMQPullHandler,ZMQ::PULL,address,socket_ctype,opts)
end
# Creates a ZMQ::PUSH type socket, can only send messages via +send_message+
def zmq_push(address,socket_ctype,opts={})
zmq_handler(DripDrop::ZMQPushHandler,ZMQ::PUSH,address,socket_ctype,opts)
end
# Creates a ZMQ::XREP type socket, both sends and receivesc XREP sockets are extremely
# powerful, so their functionality is currently limited. XREP sockets in DripDrop can reply
# to the original source of the message.
#
# Receiving with XREP sockets in DripDrop is different than other types of sockets, on_recv
# passes 2 arguments to its callback, +message+, and +response+. A minimal example is shown below:
#
#
# zmq_xrep(z_addr, :bind).on_recv do |message,response|
# response.send_message(message)
# end
#
def zmq_xrep(address,socket_ctype,opts={})
zmq_handler(DripDrop::ZMQXRepHandler,ZMQ::XREP,address,socket_ctype,opts)
end
# See the documentation for +zmq_xrep+ for more info
def zmq_xreq(address,socket_ctype,opts={})
zmq_handler(DripDrop::ZMQXReqHandler,ZMQ::XREQ,address,socket_ctype,opts)
end
# Binds an EM websocket connection to +address+. takes blocks for
# +on_open+, +on_recv+, +on_close+ and +on_error+.
#
# For example +on_recv+ could be used to echo incoming messages thusly:
# websocket(addr).on_open {|conn|
# ws.send_message(:name => 'ws_open_ack')
# }.on_recv {|msg,conn|
# conn.send(msg)
# }.on_close {|conn|
# }.on_error {|reason,conn|
# }
#
# The +ws+ object that's passed into the handlers is not
# the +DripDrop::WebSocketHandler+ object, but an em-websocket object.
def websocket(address,opts={})
uri = URI.parse(address)
h_opts = handler_opts_given(opts)
DripDrop::WebSocketHandler.new(uri,h_opts)
end
# Starts a new Thin HTTP server listening on address.
# Can have an +on_recv+ handler that gets passed +msg+ and +response+ args.
# http_server(addr) {|msg,response| response.send_message(msg)}
def http_server(address,opts={},&block)
uri = URI.parse(address)
h_opts = handler_opts_given(opts)
DripDrop::HTTPServerHandler.new(uri, h_opts,&block)
end
# An EM HTTP client.
# Example:
# client = http_client(addr)
# client.send_message(:name => 'name', :body => 'hi') do |resp_msg|
# puts resp_msg.inspect
# end
def http_client(address,opts={})
uri = URI.parse(address)
h_opts = handler_opts_given(opts)
DripDrop::HTTPClientHandler.new(uri, h_opts)
end
# An inprocess pub/sub queue that works similarly to EM::Channel,
# but has manually specified identifiers for subscribers letting you
# more easily delete subscribers without crazy id tracking.
#
# This is useful for situations where you want to broadcast messages across your app,
# but need a way to properly delete listeners.
#
# +dest+ is the name of the pub/sub channel.
# +data+ is any type of ruby var you'd like to send.
def send_internal(dest,data)
return false unless @recipients_for[dest]
blocks = @recipients_for[dest].values
return false unless blocks
blocks.each do |block|
block.call(data)
end
end
# Defines a subscriber to the channel +dest+, to receive messages from +send_internal+.
# +identifier+ is a unique identifier for this receiver.
# The identifier can be used by +remove_recv_internal+
def recv_internal(dest,identifier,&block)
if @recipients_for[dest]
@recipients_for[dest][identifier] = block
else
@recipients_for[dest] = {identifier => block}
end
end
# Deletes a subscriber to the channel +dest+ previously identified by a
# reciever created with +recv_internal+
def remove_recv_internal(dest,identifier)
return false unless @recipients_for[dest]
@recipients_for[dest].delete(identifier)
end
# Catch all error handler
def error_handler(e)
$stderr.write "#{e.class}: #{e.message}\n\t#{e.backtrace.join("\n\t")}"
end
private
def zmq_handler(klass, sock_type, address, socket_ctype, opts={})
addr_uri = URI.parse(address)
if addr_uri.scheme == 'tcp'
host = Resolv.getaddresses(addr_uri.host).first
host_addr = Resolv.getaddresses('localhost').map {|a| IPAddr.new(a)}.find {|a| a.ipv4?}
host_str = host_addr.ipv6? ? "[#{host_addr.to_s}]" : host_addr.to_s
else
host_str = addr_uri.host
end
z_addr = "#{addr_uri.scheme}://#{host_str}:#{addr_uri.port.to_i}"
h_opts = handler_opts_given(opts)
connection = EM::ZeroMQ.create @zctx, sock_type, socket_ctype, address, klass.new(h_opts)
handler = connection.handler
handler.connection = connection
handler.post_setup
handler
end
def handler_opts_given(opts)
@handler_default_opts.merge(opts)
end
end
end
Syntax cleanup
require 'rubygems'
require 'ffi-rzmq'
require 'eventmachine'
require 'uri'
require 'resolv'
require 'ipaddr'
require 'dripdrop/message'
require 'dripdrop/node/nodelet'
require 'dripdrop/handlers/base'
require 'dripdrop/handlers/zeromq'
require 'dripdrop/handlers/websockets'
require 'dripdrop/handlers/http'
class DripDrop
class Node
ZCTX = ZMQ::Context.new 1
attr_reader :zm_reactor, :routing, :nodelets
attr_accessor :debug
def initialize(opts={},&block)
@block = block
@thread = nil # Thread containing the reactors
@routing = {} # Routing table
@debug = opts[:debug]
@recipients_for = {}
@handler_default_opts = {:debug => @debug}
@nodelets = {} # Cache of registered nodelets
@zctx = ZCTX
end
# Starts the reactors and runs the block passed to initialize.
# This is non-blocking.
def start
@thread = Thread.new do
EM.error_handler {|e| self.error_handler e}
EM.run { action }
end
end
# Blocking version of start, equivalent to +start+ then +join+
def start!
self.start
self.join
end
# Stops the reactors. If you were blocked on #join, that will unblock.
def stop
EM.stop
end
# When subclassing +DripDrop::Node+ you probably want to define this method
# Otherwise it will attempt to run the @block passed into +DripDrop::Node.new+
def action
if @block
self.instance_eval(&@block)
else
raise "Could not start, no block or specified"
end
end
# If the reactor has started, this blocks until the thread
# running the reactor joins. This should block forever
# unless +stop+ is called.
def join
if @thread
@thread.join
else
raise "Can't join on a node that isn't yet started"
end
end
# Defines a new route. Routes are the recommended way to instantiate
# handlers. For example:
#
# route :stats_pub, :zmq_publish, 'tcp://127.0.0.1:2200', :bind
# route :stats_sub, :zmq_subscribe, stats_pub.address, :connect
#
# Will make the following methods available within the reactor block:
# stats_pub # A regular zmq_publish handler
# :stats_sub # A regular zmq_subscribe handler
#
# See the docs for +routes_for+ for more info in grouping routes for
# nodelets and maintaining sanity in larger apps
def route(name,handler_type,*handler_args)
route_full(nil, name, handler_type, *handler_args)
end
# Probably not useful for most, apps. This is used internally to
# create a route for a given nodelet.
def route_full(nodelet, name, handler_type, *handler_args)
# If we're in a route_for block, prepend appropriately
full_name = (nodelet && nodelet.name) ? "#{nodelet.name}_#{name}".to_sym : name
handler = self.send(handler_type, *handler_args)
@routing[full_name] = handler
# Define the route name as a singleton method
(class << self; self; end).class_eval do
define_method(full_name) { handler }
end
handler
end
# DEPRECATED, will be deleted in 0.8
def routes_for(nodelet_name,&block)
$stderr.write "routes_for is now deprecated, use nodelet instead"
nlet = nodelet(nodelet_name,&block)
block.call(nlet)
end
# Nodelets are a way of segmenting a DripDrop::Node. This can be used
# for both organization and deployment. One might want the production
# deployment of an app to be broken across multiple servers or processes
# for instance:
#
# nodelet :heartbeat do |nlet|
# nlet.route :ticker, :zmq_publish, 'tcp://127.0.0.1', :bind
# EM::PeriodicalTimer.new(1) do
# nlet.ticker.send_message(:name => 'tick')
# end
# end
#
# Nodelets can also be subclassed, for instance:
#
# class SpecialNodelet < DripDrop::Node::Nodelet
# def action
# nlet.route :ticker, :zmq_publish, 'tcp://127.0.0.1', :bind
# EM::PeriodicalTimer.new(1) do
# nlet.ticker.send_message(:name => 'tick')
# end
# end
# end
#
# nodelet :heartbeat, SpecialNodelet
#
# If you specify a block, Nodelet#action will be ignored and the block
# will be run
def nodelet(name,klass=Nodelet,&block)
nlet = @nodelets[name] ||= klass.new(self,name,routing)
if block
block.call(nlet)
else
nlet.action
end
nlet
end
# Creates a ZMQ::SUB type socket. Can only receive messages via +on_recv+.
# zmq_subscribe sockets have a +topic_filter+ option, which restricts which
# messages they can receive. It takes a regexp as an option.
def zmq_subscribe(address,socket_ctype,opts={},&block)
zmq_handler(DripDrop::ZMQSubHandler,ZMQ::SUB,address,socket_ctype,opts)
end
# Creates a ZMQ::PUB type socket, can only send messages via +send_message+
def zmq_publish(address,socket_ctype,opts={})
zmq_handler(DripDrop::ZMQPubHandler,ZMQ::PUB,address,socket_ctype,opts)
end
# Creates a ZMQ::PULL type socket. Can only receive messages via +on_recv+
def zmq_pull(address,socket_ctype,opts={},&block)
zmq_handler(DripDrop::ZMQPullHandler,ZMQ::PULL,address,socket_ctype,opts)
end
# Creates a ZMQ::PUSH type socket, can only send messages via +send_message+
def zmq_push(address,socket_ctype,opts={})
zmq_handler(DripDrop::ZMQPushHandler,ZMQ::PUSH,address,socket_ctype,opts)
end
# Creates a ZMQ::XREP type socket, both sends and receivesc XREP sockets are extremely
# powerful, so their functionality is currently limited. XREP sockets in DripDrop can reply
# to the original source of the message.
#
# Receiving with XREP sockets in DripDrop is different than other types of sockets, on_recv
# passes 2 arguments to its callback, +message+, and +response+. A minimal example is shown below:
#
#
# zmq_xrep(z_addr, :bind).on_recv do |message,response|
# response.send_message(message)
# end
#
def zmq_xrep(address,socket_ctype,opts={})
zmq_handler(DripDrop::ZMQXRepHandler,ZMQ::XREP,address,socket_ctype,opts)
end
# See the documentation for +zmq_xrep+ for more info
def zmq_xreq(address,socket_ctype,opts={})
zmq_handler(DripDrop::ZMQXReqHandler,ZMQ::XREQ,address,socket_ctype,opts)
end
# Binds an EM websocket connection to +address+. takes blocks for
# +on_open+, +on_recv+, +on_close+ and +on_error+.
#
# For example +on_recv+ could be used to echo incoming messages thusly:
# websocket(addr).on_open {|conn|
# ws.send_message(:name => 'ws_open_ack')
# }.on_recv {|msg,conn|
# conn.send(msg)
# }.on_close {|conn|
# }.on_error {|reason,conn|
# }
#
# The +ws+ object that's passed into the handlers is not
# the +DripDrop::WebSocketHandler+ object, but an em-websocket object.
def websocket(address,opts={})
uri = URI.parse(address)
h_opts = handler_opts_given(opts)
DripDrop::WebSocketHandler.new(uri,h_opts)
end
# Starts a new Thin HTTP server listening on address.
# Can have an +on_recv+ handler that gets passed +msg+ and +response+ args.
# http_server(addr) {|msg,response| response.send_message(msg)}
def http_server(address,opts={},&block)
uri = URI.parse(address)
h_opts = handler_opts_given(opts)
DripDrop::HTTPServerHandler.new(uri, h_opts,&block)
end
# An EM HTTP client.
# Example:
# client = http_client(addr)
# client.send_message(:name => 'name', :body => 'hi') do |resp_msg|
# puts resp_msg.inspect
# end
def http_client(address,opts={})
uri = URI.parse(address)
h_opts = handler_opts_given(opts)
DripDrop::HTTPClientHandler.new(uri, h_opts)
end
# An inprocess pub/sub queue that works similarly to EM::Channel,
# but has manually specified identifiers for subscribers letting you
# more easily delete subscribers without crazy id tracking.
#
# This is useful for situations where you want to broadcast messages across your app,
# but need a way to properly delete listeners.
#
# +dest+ is the name of the pub/sub channel.
# +data+ is any type of ruby var you'd like to send.
def send_internal(dest,data)
return false unless @recipients_for[dest]
blocks = @recipients_for[dest].values
return false unless blocks
blocks.each do |block|
block.call(data)
end
end
# Defines a subscriber to the channel +dest+, to receive messages from +send_internal+.
# +identifier+ is a unique identifier for this receiver.
# The identifier can be used by +remove_recv_internal+
def recv_internal(dest,identifier,&block)
if @recipients_for[dest]
@recipients_for[dest][identifier] = block
else
@recipients_for[dest] = {identifier => block}
end
end
# Deletes a subscriber to the channel +dest+ previously identified by a
# reciever created with +recv_internal+
def remove_recv_internal(dest,identifier)
return false unless @recipients_for[dest]
@recipients_for[dest].delete(identifier)
end
# Catch all error handler
def error_handler(e)
$stderr.write "#{e.class}: #{e.message}\n\t#{e.backtrace.join("\n\t")}"
end
private
def zmq_handler(klass, sock_type, address, socket_ctype, opts={})
addr_uri = URI.parse(address)
if addr_uri.scheme == 'tcp'
host = Resolv.getaddresses(addr_uri.host).first
host_addr = Resolv.getaddresses('localhost').map {|a| IPAddr.new(a)}.find {|a| a.ipv4?}
host_str = host_addr.ipv6? ? "[#{host_addr.to_s}]" : host_addr.to_s
else
host_str = addr_uri.host
end
z_addr = "#{addr_uri.scheme}://#{host_str}:#{addr_uri.port.to_i}"
h_opts = handler_opts_given(opts)
connection = EM::ZeroMQ.create @zctx, sock_type, socket_ctype, address, klass.new(h_opts)
handler = connection.handler
handler.connection = connection
handler.post_setup
handler
end
def handler_opts_given(opts)
@handler_default_opts.merge(opts)
end
end
end
|
# Description: It is currently used to set up various methods for each instance
# of the CapybaraDriver Base. It has access to the envs.json file
# and a sharedpasswords.yaml file (if created).
#
# Original Date: August 20th 2011
require 'base64'
require 'capybara'
require 'capybara/dsl'
require 'json'
require 'log4r'
begin
require 'chunky_png'
rescue
end
require 'rspec'
require 'selenium-webdriver'
require 'uri'
module Kaiki
end
module Kaiki::CapybaraDriver
end
# This file is currently incomplete, due to the migration from the
# Selenium WebDriver to Capybara.
class Kaiki::CapybaraDriver::Base
include Log4r
include Capybara::DSL
# The basename of the json file that contains all environment information.
ENVS_FILE = "envs.json"
# The default timeout for Waits.
DEFAULT_TIMEOUT = 4
# The default dimensions for the headless display.
DEFAULT_DIMENSIONS = "1600x900x24"
# The file that contains shared passwords for test users.
SHARED_PASSWORDS_FILE = "shared_passwords.yaml"
# Public: Gets/Sets the driver used to power the browser. Gets/Sets whether
# the browser is headless. Gets/Sets the overridden puts method.
attr_accessor :driver, :is_headless, :puts_method, :headless, :pause_time, :log
# Public: Initilize a CapybaraDriver instance, reads in necessary variables
# from the envs file. Sets variables for each instance of CapybaraDriver, the
# use of nil means make a new one. Record is a hash containing notes that the
# "user" needs to keep, like the document number just created. Writes to an
# external log file with information about certain steps through the feature
# files.
#
# Parameters:
# username - Name used by the user to log in with.
# password - Password for said user.
# options - Environment options.
#
# Returns nothing.
def initialize(username, password, options={})
@username = username
@password = password
@standard_envs = JSON.parse(IO.readlines(ENVS_FILE).map{ |l| \
l.gsub(/[\r\n]/, '') }.join(""))
@envs = options[:envs] ?
@standard_envs.select { |k,v| options[:envs].include? k } :
@standard_envs
if @envs.empty?
@envs = {
options[:envs].first => { "code" => options[:envs].first, "url" \
=> options[:envs].first }
}
end
if @envs.keys.size == 1
@env = @envs.keys.first
end
@pause_time = options[:pause_time] || 2
@is_headless = options[:is_headless]
@firefox_profile_name = options[:firefox_profile]
@firefox_path = options[:firefox_path]
@record = {}
@log = Logger.new 'debug_log'
file_outputter = FileOutputter.new 'file', :filename => File.join(Dir::pwd,\
'features', 'logs', Time.now.strftime("%Y.%m.%d-%H.%M.%S"))
@log.outputters = file_outputter
@log.level = DEBUG
end
# Public: Desired url for the browser to navigate to.
#
# Returns nothing.
def url
@envs[@env]['url'] || "https://kr-#{@env}.mosaic.arizona.edu/kra-#{@env}"
end
# Public: Changes focus to the outermost page element
#
# Returns nothing.
def switch_default_content
driver.switch_to.default_content
end
# Public: Changes focus to the given frame by frameid
#
# Returns nothing.
def select_frame(id)
driver.switch_to.frame id
end
# Public: Switch to the default tab/window/frame, and backdoor login as `user`
#
# Parameters:
# user - the user to be backdoored as.
#
# Returns nothing.
def backdoor_as(user)
switch_default_content
retries = 2
begin
@log.debug " backdoor_as: Waiting up to #{DEFAULT_TIMEOUT} " \
"seconds to find(:name, 'backdoorId')..."
fill_in('backdoorId', :with => "#{user}")
rescue Selenium::WebDriver::Error::TimeOutError => error
raise error if retries == 0
@log.debug " backdoor_as: Page is likely boned. Navigating back home..."
visit base_path
retries -= 1
retry
end
click_button 'login'
end
# Public: Logs in to the Kuali system using the backdoor method
# for the given user. Or log out and log back in as user.
#
# Parameters:
# user - the user to be logged in as
#
# Returns nothing.
def login_as(user)
if @login_method == :backdoor
backdoor_as(user)
else
logout
visit base_path
login_via_webauth_with user
end
end
# Public: Logs out.
#
# Returns nothing.
def logout
switch_default_content
click_button 'logout'
end
# Public: Defines the base path for url navigation.
#
# Returns nothing.
def base_path
uri = URI.parse url
uri.path
end
# Public: Defines 'host' attribute of {#url}
#
# Returns nothing.
def host
uri = URI.parse url
"#{uri.scheme}://#{uri.host}"
end
# Public: Login via Webauth with a specific username, and optional password.
# If no password is given, it will be retrieved from the
# shared_passwords.yml file. Also checks if we logged in correctly.
#
# Parameters:
# username - Username to log in with.
# password - Password to log in with.
#
# Returns nothing.
def login_via_webauth_with(username, password=nil)
password ||= self.class.shared_password_for username
sleep 1
fill_in 'NetID', :with => username
fill_in 'Password', :with => password
click_button('LOGIN')
sleep 1
begin
status = find(:id, 'status')
if has_content? "You entered an invalid NetID or password"
raise WebauthAuthenticationError.new
@driver.execute_script "window.close()"
elsif has_content? "Password is a required field"
raise WebauthAuthenticationError.new
@driver.execute_script "window.close()"
end
rescue Selenium::WebDriver::Error::NoSuchElementError,
Capybara::ElementNotFound
end
begin
expiring_password_link = find(:link_text, "Go there now")
if expiring_password_link
expiring_password_link.click
end
rescue Selenium::WebDriver::Error::NoSuchElementError,
Capybara::ElementNotFound
return
end
end
# Public: Highlights the current elements being interacted with.
#
# Parameters:
# method - Actual id, name, title, etc.
# locator - By id, name, title, etc. or the element.
#
# Returns nothing.
def highlight(method, locator, ancestors=0)
@log.debug " highlight: Waiting up to #{DEFAULT_TIMEOUT} " \
"seconds to find_element(#{method}, #{locator})..."
wait = Selenium::WebDriver::Wait.new(:timeout => DEFAULT_TIMEOUT)
wait_for(method, locator)
element = find(method, locator)
@driver.execute_script("hlt = function(c) { c.style.border='solid 1px" \
"rgb(255, 16, 16)'; }; return hlt(arguments[0]);", element)
parents = ""
red = 255
ancestors.times do
parents << ".parentNode"
red -= (12*8 / ancestors)
@driver.execute_script("hlt = function(c) { c#{parents}.style.border='" \
"solid 1px rgb(#{red}, 0, 0)'; };" \
"return hlt(arguments[0]);", element)
end
end
# Public: "Maximize" the current window using Selenium's
# `manage.window.resize_to`. This script does not use the
# window manager's "maximize" capability, but rather resizes
# the window. By default, it positions the window 64 pixels
# below and to the right of the top left corner, and sizes the
# window to be 128 pixels smaller both vretically and horizontally
# than the available space.
#
# Parameters:
# x - Pixels from the left edge the window starts.
# y - Pixels from the top edge the window starts.
# w - How much smaller you want the browser window than the monitor.
# h - How much smaller you want the browser window than the monitor.
#
# Returns nothing.
def maximize_ish(x = 64, y = 64, w = -128, h = -128)
if is_headless
x = 0; y = 0; w = -2; h = -2
end
width = w
height = h
width = "window.screen.availWidth - #{-w}" if w <= 0
height = "window.screen.availHeight - #{-h}" if h <= 0
if is_headless
@driver.manage.window.position = Selenium::WebDriver::Point.new(0,0)
max_width, max_height = @driver.execute_script("return" \
"[window.screen.availWidth, window.screen.availHeight];")
@driver.manage.window.resize_to(max_width, max_height)
else
@driver.manage.window.position = Selenium::WebDriver::Point.new(40,30)
max_width, max_height = @driver.execute_script("return" \
"[window.screen.availWidth, window.screen.availHeight];")
@driver.manage.window.resize_to(max_width-900, max_height-100)
end
@driver.execute_script %[
if (window.screen) {
window.moveTo(#{x}, #{y});
window.resizeTo(#{width}, #{height});
};
]
end
# Public: Set `@screenshot_dir`, and make the screenshot directory
# if it doesn't exist.
#
# Returns nothing.
def mk_screenshot_dir(base)
@screenshot_dir = File.join(base, Time.now.strftime("%Y-%m-%d.%H"))
return if Dir::exists? @screenshot_dir
Dir::mkdir(@screenshot_dir)
end
# Public: Pause for `@pause_time` by default, or for `time` seconds.
#
# Returns nothing.
def pause(time = nil)
@log.debug " breathing..."
sleep (time or @pause_time)
end
# Public: Take a screenshot, and save it to `@screenshot_dir` by the name
# `#{name}.png`
#
# Returns nothing.
def screenshot(name)
@driver.save_screenshot(File.join(@screenshot_dir, "#{name}.png"))
puts "Screenshot saved to " + File.join(@screenshot_dir, "#{name}.png")
end
# Public: Start a browser session by choosing a Firefox profile,
# setting the Capybara driver and settings, and visiting the
# #base_path.
#
# Returns nothing.
def start_session
@download_dir = File.join(Dir::pwd, 'features', 'downloads')
Dir::mkdir(@download_dir) unless Dir::exists? @download_dir
mk_screenshot_dir(File.join(Dir::pwd, 'features', 'screenshots'))
if @firefox_profile_name
@profile = Selenium::WebDriver::Firefox::Profile.from_name \
@firefox_profile_name
else
@profile = Selenium::WebDriver::Firefox::Profile.new
end
@profile['browser.download.dir'] = @download_dir
@profile['browser.download.folderList'] = 2
@profile['browser.helperApps.neverAsk.saveToDisk'] = "application/pdf"
@profile['browser.link.open_newwindow'] = 3
if @firefox_path
Selenium::WebDriver::Firefox.path = @firefox_path
end
if is_headless
@headless = Headless.new(:dimensions => DEFAULT_DIMENSIONS)
@headless.start
end
Capybara.run_server = false
Capybara.app_host = host
Capybara.default_wait_time = DEFAULT_TIMEOUT
Capybara.register_driver :selenium do |app|
Capybara::Selenium::Driver.new(app, :profile => @profile)
end
Capybara.default_driver = :selenium
visit base_path
@driver = page.driver.browser
end
# Public: Gathers the shared password for test users to use when logging in
# via WebAuth
#
# Parameters:
# username - Username for the shared password.
#
# Returns the shared password.
def self.shared_password_for(username)
return nil if not File.exist? SHARED_PASSWORDS_FILE
shared_passwords = File.open(SHARED_PASSWORDS_FILE) \
{ |h| YAML::load_file(h) }
if shared_passwords.keys.any? { |user| username[user] }
user_group = shared_passwords.keys.select { |user| username[user] }[0]
return shared_passwords[user_group]
end
nil
end
# Public: Show a visual vertical tab inside a document's layout.
# Accepts the "name" of the tab. Find the name of the tab
# by looking up the `title` of the `input` that is the open
# button. The title is everything after the word "open."
#
# Parameters:
# name - Name of the tab to toggle.
#
# Returns nothing.
def show_tab(name)
find(:xpath, "//input[contains(@title, 'open #{name}')]").click
end
# Public: Hide a visual vertical tab inside a document's layout.
# Accepts the "name" of the tab. Find the name of the tab
# by looking up the `title` of the `input` that is the close
# button. The title is everything after the word "close."
#
# Parameters:
# name - Name of the tab to toggle.
#
# Returns nothing.
def hide_tab(name)
find(:xpath, "//input[contains(@title, 'close #{name}')]").click
end
# Public: Deselect all `<option>s` within a `<select>`, suppressing any
# `UnsupportedOperationError` that Selenium may throw.
#
# Parameters:
# el - Name of the select block.
#
# Returns nothing.
def safe_deselect_all(el)
el.deselect_all
rescue Selenium::WebDriver::Error::UnsupportedOperationError
end
# Public: Check the field that is expressed with `selectors`
# (the first one that is found). `selectors` is typically
# an Array returned by `ApproximationsFactory`, but it
# could be hand-generated.
#
# Parameters:
# selectors - The identifier of the fields you're looking at.
#
# Returns nothing.
def check_approximate_field(selectors)
selectors.each do |selector|
begin
return check_by_xpath(selector)
rescue Selenium::WebDriver::Error::NoSuchElementError,
Selenium::WebDriver::Error::TimeOutError,
Capybara::ElementNotFound
end
end
@log.error "Failed to check approximate field. Selectors are:\n " \
"#{selectors.join("\n") }"
raise Capybara::ElementNotFound
end
# Public: Uncheck the field that is expressed with `selectors`
# (the first one that is found). 'selectors` is typically
# an Array returned by `ApproximationsFactory`, but it
# could be hand-generated.
#
# Parameters:
# selectors - The identifier of the fields you're looking at.
#
# Returns nothing.
def uncheck_approximate_field(selectors)
selectors.each do |selector|
begin
return uncheck_by_xpath(selector)
rescue Selenium::WebDriver::Error::NoSuchElementError,
Selenium::WebDriver::Error::TimeOutError,
Capybara::ElementNotFound
end
end
@log.error "Failed to uncheck approximate field. Selectors are:\n " \
"#{selectors.join("\n") }"
raise Capybara::ElementNotFound
end
# Public: Check a field, selecting by xpath.
#
# Parameters:
# xpath - Xpath of the item you're looking for.
#
# Returns nothing.
def check_by_xpath(xpath)
@log.debug " Start check_by_xpath(#{xpath})"
find(:xpath, xpath).set(true)
end
# Public: Uncheck a field, selecting by xpath.
#
# Parameters:
# xpath - Xpath of the item you're looking for.
#
# Returns nothing.
def uncheck_by_xpath(xpath)
@log.debug " Start uncheck_by_xpath(#{xpath})"
find(:xpath, xpath).set(false)
end
# Public: Utilizes the Approximation Factory to find the selector type of the
# adjacent field to the item you wish to validate.
#
# selectors - Input, text area, select, etc.
# value - Text to be filled in or chosen from a drop down.
#
# Returns nothing.
def click_approximate_field(selectors, option = nil)
selectors.each do |selector|
begin
click_by_xpath(selector, option)
return
rescue Selenium::WebDriver::Error::NoSuchElementError,
Selenium::WebDriver::Error::TimeOutError,
Selenium::WebDriver::Error::InvalidSelectorError,
Capybara::ElementNotFound
end
end
@log.error "Failed to click approximate field. Selectors are:\n " \
"#{selectors.join("\n") }"
raise Capybara::ElementNotFound
end
# Public: Clicks on the link or button with the given name.
#
# Parameters:
# text - Item to be clicked on
#
# Returns nothing.
def click(text)
click_on text
@log.debug " clicking #{text}"
end
# Public: Click a link or button, selecting by xpath.
#
# Parameters:
# xpath - Xpath of the item you're looking for.
#
# Returns nothing.
def click_by_xpath(xpath, option)
@log.debug " Start click_by_xpath(#{xpath})"
if option == "button"
find(:xpath, xpath).click
elsif option == "radio"
find(:xpath, xpath).set(true)
end
end
# Public: Same as get_field, but if there are multiple fields
# using the same name.
#
# selectors - Input, text area, select, etc.
#
# Returns nothing.
def get_approximate_field(selectors)
selectors.each do |selector|
begin
return get_field(selector)
rescue Selenium::WebDriver::Error::NoSuchElementError,
Selenium::WebDriver::Error::TimeOutError,
Capybara::ElementNotFound
end
end
@log.error "Failed to get approximate field. Selectors are:\n " \
"#{selectors.join("\n") }"
raise Capybara::ElementNotFound
end
# Public: Finds the field you are looking for.
#
# Parameters:
# selector - Input, text area, select, etc.
# options - Extra options for narrowing the search.
#
# Returns the text in the given field.
def get_field(selector, options={})
wait_for(:xpath, selector)
element = find(:xpath, selector)
begin
if element[:type] == "text"
return element[:value]
elsif element[:type] == "select-one"
return element.find(:xpath, "option[@selected ='selected']").text
else
return element.text.strip
end
end
end
# Public: Utilizes the Approximation Factory to find the selector type of the
# adjacent field to the item you wish to fill in. It then calls
# set_field with the selector type and value to be filled in.
#
# Parameters:
# selectors - Input, text area, select, etc.
# value - Text to be filled in or chosen from a drop down.
#
# Returns nothing.
def set_approximate_field(selectors, value=nil)
selectors.each do |selector|
begin
set_field(selector, value)
return
rescue Selenium::WebDriver::Error::NoSuchElementError,
Selenium::WebDriver::Error::TimeOutError,
Capybara::ElementNotFound
end
end
@log.error "Failed to set approximate field. Selectors are:\n " \
"#{selectors.join("\n") }"
raise Capybara::ElementNotFound
end
# Public: Takes in the id of a selector, i.e. input, text area, select, etc.,
# and inputs the value to this field.
#
# Parameters:
# id - Input, text area, select, etc.
# value - Text to be filled in or chosen from a drop down.
#
# Returns nothing.
def set_field(id, value=nil)
@log.debug " Start set_field(#{id.inspect}, #{value.inspect})"
if id =~ /@value=/
node_name = 'radio'
locator = id
elsif id =~ /^\/\// or id =~ /^id\(".+"\)/
node_name = nil
begin
node = driver.find_element(:xpath, id)
node_name = node.tag_name.downcase
rescue Selenium::WebDriver::Error::NoSuchElementError
end
locator = id
elsif id =~ /^.+=.+ .+=.+$/
node_name = 'radio'
locator = id
else
@log.debug " set_field: Waiting up to #{DEFAULT_TIMEOUT} " \
"seconds to find_element(:id, #{id})..."
wait_for(:id, id)
node = find(:id, id)
node_name = node.tag_name.downcase
locator = "//*[@id='#{id}']"
end
case node_name
when 'textarea'
@log.debug " set_field: node_name is #{node_name.inspect}"
@log.debug " set_field: locator is #{locator.inspect}"
if not locator['"']
unless get_field(locator).empty?
@driver.execute_script("return document.evaluate(\"#{locator}\"," \
"document, null," \
"XPathResult.FIRST_ORDERED_NODE_TYPE," \
"null).singleNodeValue.value = '';", nil)
end
else
@log.warn " set_field: locator (#{locator.inspect}) " \
"has a \" in it, so... I couldn't check if the input was " \
"empty. Good luck!"
end
@driver.find_element(:xpath, locator).send_keys(value, :tab)
when 'input'
@log.debug " set_field: node_name is #{node_name.inspect}"
@log.debug " set_field: locator is #{locator.inspect}"
if not locator['"']
unless get_field(locator).empty?
@driver.execute_script("return document.evaluate(\"#{locator}\"," \
"document, null," \
"XPathResult.FIRST_ORDERED_NODE_TYPE," \
"null).singleNodeValue.value = '';", nil)
end
else
@log.warn " set_field: locator (#{locator.inspect}) " \
"has a \" in it, so... I couldn't check if the input was " \
"empty. Good luck!"
end
@driver.find_element(:xpath, locator).send_keys(value, :tab)
when 'select'
@log.debug " set_field: Waiting up to #{DEFAULT_TIMEOUT}" \
"seconds to find_element(:xpath, #{locator})..."
wait_for(:xpath, locator)
select = Selenium::WebDriver::Support::Select.new( \
@driver.find_element(:xpath, locator))
safe_deselect_all(select)
select.select_by(:text, value)
when 'radio'
@driver.find_element(:xpath, locator).click
else
@driver.find_element(:xpath, locator).send_keys(value)
end
end
# Public: Create and execute a `Selenium::WebDriver::Wait` for finding
# an element by `method` and `selector`.
#
# Parameters:
# method - Actual id, name, title, etc.
# locator - By id, name, title, etc. or the element.
#
# Returns nothing.
def wait_for(method, locator)
@log.debug " wait_for: Waiting up to #{DEFAULT_TIMEOUT} " \
"seconds to find (#{method}, #{locator})..."
sleep 0.1
find(method, locator)
end
# Public: Takes in an array of xpaths (such as that provided by the
# ApproximationFactory class and returns the first element found that
# matches in the given array.
#
# Parameters:
# selectors - The array of xpaths to be searched.
#
# Returns the first element that matches the selectors array.
#
def find_approximate_element(selectors)
selectors.each do |selector|
begin
@log.debug "Finding element at #{selector}..."
element = find(:xpath, selector)
return element
rescue Selenium::WebDriver::Error::NoSuchElementError,
Selenium::WebDriver::Error::TimeOutError,
Capybara::ElementNotFound
end
end
@log.error "Failed to find approximate element. Selectors are:\n " \
"#{selectors.join("\n") }"
raise Capybara::ElementNotFound
end
# Public: This method changes focus to the last window that has been
# opened.
#
# Returns the first element that matches the selectors array.
def last_window_focus
@driver.switch_to.window(@driver.window_handles.last)
end
end
ECE-482 Changed lines 344 and 349
Added if ENV['BUILD_NUMBER'].nil? and corresponding end, so that when Jenkins does it's builds, it's not trying to launch 2 headless displays.
# Description: It is currently used to set up various methods for each instance
# of the CapybaraDriver Base. It has access to the envs.json file
# and a sharedpasswords.yaml file (if created).
#
# Original Date: August 20th 2011
require 'base64'
require 'capybara'
require 'capybara/dsl'
require 'json'
require 'log4r'
begin
require 'chunky_png'
rescue
end
require 'rspec'
require 'selenium-webdriver'
require 'uri'
module Kaiki
end
module Kaiki::CapybaraDriver
end
# This file is currently incomplete, due to the migration from the
# Selenium WebDriver to Capybara.
class Kaiki::CapybaraDriver::Base
include Log4r
include Capybara::DSL
# The basename of the json file that contains all environment information.
ENVS_FILE = "envs.json"
# The default timeout for Waits.
DEFAULT_TIMEOUT = 4
# The default dimensions for the headless display.
DEFAULT_DIMENSIONS = "1600x900x24"
# The file that contains shared passwords for test users.
SHARED_PASSWORDS_FILE = "shared_passwords.yaml"
# Public: Gets/Sets the driver used to power the browser. Gets/Sets whether
# the browser is headless. Gets/Sets the overridden puts method.
attr_accessor :driver, :is_headless, :puts_method, :headless, :pause_time, :log
# Public: Initilize a CapybaraDriver instance, reads in necessary variables
# from the envs file. Sets variables for each instance of CapybaraDriver, the
# use of nil means make a new one. Record is a hash containing notes that the
# "user" needs to keep, like the document number just created. Writes to an
# external log file with information about certain steps through the feature
# files.
#
# Parameters:
# username - Name used by the user to log in with.
# password - Password for said user.
# options - Environment options.
#
# Returns nothing.
def initialize(username, password, options={})
@username = username
@password = password
@standard_envs = JSON.parse(IO.readlines(ENVS_FILE).map{ |l| \
l.gsub(/[\r\n]/, '') }.join(""))
@envs = options[:envs] ?
@standard_envs.select { |k,v| options[:envs].include? k } :
@standard_envs
if @envs.empty?
@envs = {
options[:envs].first => { "code" => options[:envs].first, "url" \
=> options[:envs].first }
}
end
if @envs.keys.size == 1
@env = @envs.keys.first
end
@pause_time = options[:pause_time] || 2
@is_headless = options[:is_headless]
@firefox_profile_name = options[:firefox_profile]
@firefox_path = options[:firefox_path]
@record = {}
@log = Logger.new 'debug_log'
file_outputter = FileOutputter.new 'file', :filename => File.join(Dir::pwd,\
'features', 'logs', Time.now.strftime("%Y.%m.%d-%H.%M.%S"))
@log.outputters = file_outputter
@log.level = DEBUG
end
# Public: Desired url for the browser to navigate to.
#
# Returns nothing.
def url
@envs[@env]['url'] || "https://kr-#{@env}.mosaic.arizona.edu/kra-#{@env}"
end
# Public: Changes focus to the outermost page element
#
# Returns nothing.
def switch_default_content
driver.switch_to.default_content
end
# Public: Changes focus to the given frame by frameid
#
# Returns nothing.
def select_frame(id)
driver.switch_to.frame id
end
# Public: Switch to the default tab/window/frame, and backdoor login as `user`
#
# Parameters:
# user - the user to be backdoored as.
#
# Returns nothing.
def backdoor_as(user)
switch_default_content
retries = 2
begin
@log.debug " backdoor_as: Waiting up to #{DEFAULT_TIMEOUT} " \
"seconds to find(:name, 'backdoorId')..."
fill_in('backdoorId', :with => "#{user}")
rescue Selenium::WebDriver::Error::TimeOutError => error
raise error if retries == 0
@log.debug " backdoor_as: Page is likely boned. Navigating back home..."
visit base_path
retries -= 1
retry
end
click_button 'login'
end
# Public: Logs in to the Kuali system using the backdoor method
# for the given user. Or log out and log back in as user.
#
# Parameters:
# user - the user to be logged in as
#
# Returns nothing.
def login_as(user)
if @login_method == :backdoor
backdoor_as(user)
else
logout
visit base_path
login_via_webauth_with user
end
end
# Public: Logs out.
#
# Returns nothing.
def logout
switch_default_content
click_button 'logout'
end
# Public: Defines the base path for url navigation.
#
# Returns nothing.
def base_path
uri = URI.parse url
uri.path
end
# Public: Defines 'host' attribute of {#url}
#
# Returns nothing.
def host
uri = URI.parse url
"#{uri.scheme}://#{uri.host}"
end
# Public: Login via Webauth with a specific username, and optional password.
# If no password is given, it will be retrieved from the
# shared_passwords.yml file. Also checks if we logged in correctly.
#
# Parameters:
# username - Username to log in with.
# password - Password to log in with.
#
# Returns nothing.
def login_via_webauth_with(username, password=nil)
password ||= self.class.shared_password_for username
sleep 1
fill_in 'NetID', :with => username
fill_in 'Password', :with => password
click_button('LOGIN')
sleep 1
begin
status = find(:id, 'status')
if has_content? "You entered an invalid NetID or password"
raise WebauthAuthenticationError.new
@driver.execute_script "window.close()"
elsif has_content? "Password is a required field"
raise WebauthAuthenticationError.new
@driver.execute_script "window.close()"
end
rescue Selenium::WebDriver::Error::NoSuchElementError,
Capybara::ElementNotFound
end
begin
expiring_password_link = find(:link_text, "Go there now")
if expiring_password_link
expiring_password_link.click
end
rescue Selenium::WebDriver::Error::NoSuchElementError,
Capybara::ElementNotFound
return
end
end
# Public: Highlights the current elements being interacted with.
#
# Parameters:
# method - Actual id, name, title, etc.
# locator - By id, name, title, etc. or the element.
#
# Returns nothing.
def highlight(method, locator, ancestors=0)
@log.debug " highlight: Waiting up to #{DEFAULT_TIMEOUT} " \
"seconds to find_element(#{method}, #{locator})..."
wait = Selenium::WebDriver::Wait.new(:timeout => DEFAULT_TIMEOUT)
wait_for(method, locator)
element = find(method, locator)
@driver.execute_script("hlt = function(c) { c.style.border='solid 1px" \
"rgb(255, 16, 16)'; }; return hlt(arguments[0]);", element)
parents = ""
red = 255
ancestors.times do
parents << ".parentNode"
red -= (12*8 / ancestors)
@driver.execute_script("hlt = function(c) { c#{parents}.style.border='" \
"solid 1px rgb(#{red}, 0, 0)'; };" \
"return hlt(arguments[0]);", element)
end
end
# Public: "Maximize" the current window using Selenium's
# `manage.window.resize_to`. This script does not use the
# window manager's "maximize" capability, but rather resizes
# the window. By default, it positions the window 64 pixels
# below and to the right of the top left corner, and sizes the
# window to be 128 pixels smaller both vretically and horizontally
# than the available space.
#
# Parameters:
# x - Pixels from the left edge the window starts.
# y - Pixels from the top edge the window starts.
# w - How much smaller you want the browser window than the monitor.
# h - How much smaller you want the browser window than the monitor.
#
# Returns nothing.
def maximize_ish(x = 64, y = 64, w = -128, h = -128)
if is_headless
x = 0; y = 0; w = -2; h = -2
end
width = w
height = h
width = "window.screen.availWidth - #{-w}" if w <= 0
height = "window.screen.availHeight - #{-h}" if h <= 0
if is_headless
@driver.manage.window.position = Selenium::WebDriver::Point.new(0,0)
max_width, max_height = @driver.execute_script("return" \
"[window.screen.availWidth, window.screen.availHeight];")
@driver.manage.window.resize_to(max_width, max_height)
else
@driver.manage.window.position = Selenium::WebDriver::Point.new(40,30)
max_width, max_height = @driver.execute_script("return" \
"[window.screen.availWidth, window.screen.availHeight];")
@driver.manage.window.resize_to(max_width-900, max_height-100)
end
@driver.execute_script %[
if (window.screen) {
window.moveTo(#{x}, #{y});
window.resizeTo(#{width}, #{height});
};
]
end
# Public: Set `@screenshot_dir`, and make the screenshot directory
# if it doesn't exist.
#
# Returns nothing.
def mk_screenshot_dir(base)
@screenshot_dir = File.join(base, Time.now.strftime("%Y-%m-%d.%H"))
return if Dir::exists? @screenshot_dir
Dir::mkdir(@screenshot_dir)
end
# Public: Pause for `@pause_time` by default, or for `time` seconds.
#
# Returns nothing.
def pause(time = nil)
@log.debug " breathing..."
sleep (time or @pause_time)
end
# Public: Take a screenshot, and save it to `@screenshot_dir` by the name
# `#{name}.png`
#
# Returns nothing.
def screenshot(name)
@driver.save_screenshot(File.join(@screenshot_dir, "#{name}.png"))
puts "Screenshot saved to " + File.join(@screenshot_dir, "#{name}.png")
end
# Public: Start a browser session by choosing a Firefox profile,
# setting the Capybara driver and settings, and visiting the
# #base_path.
#
# Returns nothing.
def start_session
@download_dir = File.join(Dir::pwd, 'features', 'downloads')
Dir::mkdir(@download_dir) unless Dir::exists? @download_dir
mk_screenshot_dir(File.join(Dir::pwd, 'features', 'screenshots'))
if @firefox_profile_name
@profile = Selenium::WebDriver::Firefox::Profile.from_name \
@firefox_profile_name
else
@profile = Selenium::WebDriver::Firefox::Profile.new
end
@profile['browser.download.dir'] = @download_dir
@profile['browser.download.folderList'] = 2
@profile['browser.helperApps.neverAsk.saveToDisk'] = "application/pdf"
@profile['browser.link.open_newwindow'] = 3
if @firefox_path
Selenium::WebDriver::Firefox.path = @firefox_path
end
if ENV['BUILD_NUMBER'].nil?
if is_headless
@headless = Headless.new(:dimensions => DEFAULT_DIMENSIONS)
@headless.start
end
end
Capybara.run_server = false
Capybara.app_host = host
Capybara.default_wait_time = DEFAULT_TIMEOUT
Capybara.register_driver :selenium do |app|
Capybara::Selenium::Driver.new(app, :profile => @profile)
end
Capybara.default_driver = :selenium
visit base_path
@driver = page.driver.browser
end
# Public: Gathers the shared password for test users to use when logging in
# via WebAuth
#
# Parameters:
# username - Username for the shared password.
#
# Returns the shared password.
def self.shared_password_for(username)
return nil if not File.exist? SHARED_PASSWORDS_FILE
shared_passwords = File.open(SHARED_PASSWORDS_FILE) \
{ |h| YAML::load_file(h) }
if shared_passwords.keys.any? { |user| username[user] }
user_group = shared_passwords.keys.select { |user| username[user] }[0]
return shared_passwords[user_group]
end
nil
end
# Public: Show a visual vertical tab inside a document's layout.
# Accepts the "name" of the tab. Find the name of the tab
# by looking up the `title` of the `input` that is the open
# button. The title is everything after the word "open."
#
# Parameters:
# name - Name of the tab to toggle.
#
# Returns nothing.
def show_tab(name)
find(:xpath, "//input[contains(@title, 'open #{name}')]").click
end
# Public: Hide a visual vertical tab inside a document's layout.
# Accepts the "name" of the tab. Find the name of the tab
# by looking up the `title` of the `input` that is the close
# button. The title is everything after the word "close."
#
# Parameters:
# name - Name of the tab to toggle.
#
# Returns nothing.
def hide_tab(name)
find(:xpath, "//input[contains(@title, 'close #{name}')]").click
end
# Public: Deselect all `<option>s` within a `<select>`, suppressing any
# `UnsupportedOperationError` that Selenium may throw.
#
# Parameters:
# el - Name of the select block.
#
# Returns nothing.
def safe_deselect_all(el)
el.deselect_all
rescue Selenium::WebDriver::Error::UnsupportedOperationError
end
# Public: Check the field that is expressed with `selectors`
# (the first one that is found). `selectors` is typically
# an Array returned by `ApproximationsFactory`, but it
# could be hand-generated.
#
# Parameters:
# selectors - The identifier of the fields you're looking at.
#
# Returns nothing.
def check_approximate_field(selectors)
selectors.each do |selector|
begin
return check_by_xpath(selector)
rescue Selenium::WebDriver::Error::NoSuchElementError,
Selenium::WebDriver::Error::TimeOutError,
Capybara::ElementNotFound
end
end
@log.error "Failed to check approximate field. Selectors are:\n " \
"#{selectors.join("\n") }"
raise Capybara::ElementNotFound
end
# Public: Uncheck the field that is expressed with `selectors`
# (the first one that is found). 'selectors` is typically
# an Array returned by `ApproximationsFactory`, but it
# could be hand-generated.
#
# Parameters:
# selectors - The identifier of the fields you're looking at.
#
# Returns nothing.
def uncheck_approximate_field(selectors)
selectors.each do |selector|
begin
return uncheck_by_xpath(selector)
rescue Selenium::WebDriver::Error::NoSuchElementError,
Selenium::WebDriver::Error::TimeOutError,
Capybara::ElementNotFound
end
end
@log.error "Failed to uncheck approximate field. Selectors are:\n " \
"#{selectors.join("\n") }"
raise Capybara::ElementNotFound
end
# Public: Check a field, selecting by xpath.
#
# Parameters:
# xpath - Xpath of the item you're looking for.
#
# Returns nothing.
def check_by_xpath(xpath)
@log.debug " Start check_by_xpath(#{xpath})"
find(:xpath, xpath).set(true)
end
# Public: Uncheck a field, selecting by xpath.
#
# Parameters:
# xpath - Xpath of the item you're looking for.
#
# Returns nothing.
def uncheck_by_xpath(xpath)
@log.debug " Start uncheck_by_xpath(#{xpath})"
find(:xpath, xpath).set(false)
end
# Public: Utilizes the Approximation Factory to find the selector type of the
# adjacent field to the item you wish to validate.
#
# selectors - Input, text area, select, etc.
# value - Text to be filled in or chosen from a drop down.
#
# Returns nothing.
def click_approximate_field(selectors, option = nil)
selectors.each do |selector|
begin
click_by_xpath(selector, option)
return
rescue Selenium::WebDriver::Error::NoSuchElementError,
Selenium::WebDriver::Error::TimeOutError,
Selenium::WebDriver::Error::InvalidSelectorError,
Capybara::ElementNotFound
end
end
@log.error "Failed to click approximate field. Selectors are:\n " \
"#{selectors.join("\n") }"
raise Capybara::ElementNotFound
end
# Public: Clicks on the link or button with the given name.
#
# Parameters:
# text - Item to be clicked on
#
# Returns nothing.
def click(text)
click_on text
@log.debug " clicking #{text}"
end
# Public: Click a link or button, selecting by xpath.
#
# Parameters:
# xpath - Xpath of the item you're looking for.
#
# Returns nothing.
def click_by_xpath(xpath, option)
@log.debug " Start click_by_xpath(#{xpath})"
if option == "button"
find(:xpath, xpath).click
elsif option == "radio"
find(:xpath, xpath).set(true)
end
end
# Public: Same as get_field, but if there are multiple fields
# using the same name.
#
# selectors - Input, text area, select, etc.
#
# Returns nothing.
def get_approximate_field(selectors)
selectors.each do |selector|
begin
return get_field(selector)
rescue Selenium::WebDriver::Error::NoSuchElementError,
Selenium::WebDriver::Error::TimeOutError,
Capybara::ElementNotFound
end
end
@log.error "Failed to get approximate field. Selectors are:\n " \
"#{selectors.join("\n") }"
raise Capybara::ElementNotFound
end
# Public: Finds the field you are looking for.
#
# Parameters:
# selector - Input, text area, select, etc.
# options - Extra options for narrowing the search.
#
# Returns the text in the given field.
def get_field(selector, options={})
wait_for(:xpath, selector)
element = find(:xpath, selector)
begin
if element[:type] == "text"
return element[:value]
elsif element[:type] == "select-one"
return element.find(:xpath, "option[@selected ='selected']").text
else
return element.text.strip
end
end
end
# Public: Utilizes the Approximation Factory to find the selector type of the
# adjacent field to the item you wish to fill in. It then calls
# set_field with the selector type and value to be filled in.
#
# Parameters:
# selectors - Input, text area, select, etc.
# value - Text to be filled in or chosen from a drop down.
#
# Returns nothing.
def set_approximate_field(selectors, value=nil)
selectors.each do |selector|
begin
set_field(selector, value)
return
rescue Selenium::WebDriver::Error::NoSuchElementError,
Selenium::WebDriver::Error::TimeOutError,
Capybara::ElementNotFound
end
end
@log.error "Failed to set approximate field. Selectors are:\n " \
"#{selectors.join("\n") }"
raise Capybara::ElementNotFound
end
# Public: Takes in the id of a selector, i.e. input, text area, select, etc.,
# and inputs the value to this field.
#
# Parameters:
# id - Input, text area, select, etc.
# value - Text to be filled in or chosen from a drop down.
#
# Returns nothing.
def set_field(id, value=nil)
@log.debug " Start set_field(#{id.inspect}, #{value.inspect})"
if id =~ /@value=/
node_name = 'radio'
locator = id
elsif id =~ /^\/\// or id =~ /^id\(".+"\)/
node_name = nil
begin
node = driver.find_element(:xpath, id)
node_name = node.tag_name.downcase
rescue Selenium::WebDriver::Error::NoSuchElementError
end
locator = id
elsif id =~ /^.+=.+ .+=.+$/
node_name = 'radio'
locator = id
else
@log.debug " set_field: Waiting up to #{DEFAULT_TIMEOUT} " \
"seconds to find_element(:id, #{id})..."
wait_for(:id, id)
node = find(:id, id)
node_name = node.tag_name.downcase
locator = "//*[@id='#{id}']"
end
case node_name
when 'textarea'
@log.debug " set_field: node_name is #{node_name.inspect}"
@log.debug " set_field: locator is #{locator.inspect}"
if not locator['"']
unless get_field(locator).empty?
@driver.execute_script("return document.evaluate(\"#{locator}\"," \
"document, null," \
"XPathResult.FIRST_ORDERED_NODE_TYPE," \
"null).singleNodeValue.value = '';", nil)
end
else
@log.warn " set_field: locator (#{locator.inspect}) " \
"has a \" in it, so... I couldn't check if the input was " \
"empty. Good luck!"
end
@driver.find_element(:xpath, locator).send_keys(value, :tab)
when 'input'
@log.debug " set_field: node_name is #{node_name.inspect}"
@log.debug " set_field: locator is #{locator.inspect}"
if not locator['"']
unless get_field(locator).empty?
@driver.execute_script("return document.evaluate(\"#{locator}\"," \
"document, null," \
"XPathResult.FIRST_ORDERED_NODE_TYPE," \
"null).singleNodeValue.value = '';", nil)
end
else
@log.warn " set_field: locator (#{locator.inspect}) " \
"has a \" in it, so... I couldn't check if the input was " \
"empty. Good luck!"
end
@driver.find_element(:xpath, locator).send_keys(value, :tab)
when 'select'
@log.debug " set_field: Waiting up to #{DEFAULT_TIMEOUT}" \
"seconds to find_element(:xpath, #{locator})..."
wait_for(:xpath, locator)
select = Selenium::WebDriver::Support::Select.new( \
@driver.find_element(:xpath, locator))
safe_deselect_all(select)
select.select_by(:text, value)
when 'radio'
@driver.find_element(:xpath, locator).click
else
@driver.find_element(:xpath, locator).send_keys(value)
end
end
# Public: Create and execute a `Selenium::WebDriver::Wait` for finding
# an element by `method` and `selector`.
#
# Parameters:
# method - Actual id, name, title, etc.
# locator - By id, name, title, etc. or the element.
#
# Returns nothing.
def wait_for(method, locator)
@log.debug " wait_for: Waiting up to #{DEFAULT_TIMEOUT} " \
"seconds to find (#{method}, #{locator})..."
sleep 0.1
find(method, locator)
end
# Public: Takes in an array of xpaths (such as that provided by the
# ApproximationFactory class and returns the first element found that
# matches in the given array.
#
# Parameters:
# selectors - The array of xpaths to be searched.
#
# Returns the first element that matches the selectors array.
#
def find_approximate_element(selectors)
selectors.each do |selector|
begin
@log.debug "Finding element at #{selector}..."
element = find(:xpath, selector)
return element
rescue Selenium::WebDriver::Error::NoSuchElementError,
Selenium::WebDriver::Error::TimeOutError,
Capybara::ElementNotFound
end
end
@log.error "Failed to find approximate element. Selectors are:\n " \
"#{selectors.join("\n") }"
raise Capybara::ElementNotFound
end
# Public: This method changes focus to the last window that has been
# opened.
#
# Returns the first element that matches the selectors array.
def last_window_focus
@driver.switch_to.window(@driver.window_handles.last)
end
end
|
class Dropio::Client
attr_accessor :service
def initialize
self.service = Dropio::Api.new
end
def drop(drop_name, token = nil)
handle(:drop, self.service.drop(drop_name, token))
end
def generate_drop_url(drop)
self.service.generate_drop_url(drop.name,drop.default_token)
end
def create_drop(params = {})
handle(:drop, self.service.create_drop(params))
end
def update_drop(drop)
params = { :description => drop.description, :admin_email => drop.admin_email,
:email_key => drop.email_key, :default_view => drop.default_view,
:chat_password => drop.chat_password, :guests_can_comment => drop.guests_can_comment,
:guests_can_add => drop.guests_can_add, :guests_can_delete => drop.guests_can_delete,
:expiration_length => drop.expiration_length, :password => drop.password,
:admin_password => drop.admin_password, :premium_code => drop.premium_code }
handle(:drop, self.service.update_drop(drop.name,drop.admin_token,params))
end
def empty_drop(drop)
r = handle(:response, self.service.empty_drop(drop.name,drop.admin_token))
r["result"]
end
def delete_drop(drop)
r = handle(:response, self.service.delete_drop(drop.name,drop.admin_token))
r["result"]
end
def promote_nick(drop,nick)
r = handle(:response, self.service.promote_nick(drop.name,nick,drop.admin_token))
r["result"]
end
def drop_upload_code(drop)
r = handle(:response, self.service.drop_upload_code(drop.name,drop.default_token))
r["upload_code"]
end
def create_link(drop, url, title = nil, description = nil)
a = handle(:asset, self.service.create_link(drop.name, url, title, description, drop.default_token))
a.drop = drop
a
end
def create_note(drop, contents, title = nil, description = nil)
a = handle(:asset, self.service.create_note(drop.name, contents, title, description, drop.default_token))
a.drop = drop
a
end
def add_file(drop, file_path, description = nil, convert_to = nil, pingback_url = nil, comment = nil)
a = handle(:asset, self.service.add_file(drop.name, file_path, description, convert_to, pingback_url, comment, drop.default_token))
a.drop = drop
a
end
def add_file_from_url(drop, url, description = nil, convert_to = nil, pingback_url = nil)
a = handle(:asset, self.service.add_file_from_url(drop.name, url, description, convert_to, pingback_url, drop.default_token))
a.drop = drop
a
end
def assets(drop, page = 1, order = :oldest)
assets = handle(:assets, self.service.assets(drop.name,page,order,drop.default_token))
assets.each{|a| a.drop = drop}
assets
end
def asset(drop, asset_name)
a = handle(:asset, self.service.asset(drop.name,asset_name,drop.default_token))
a.drop = drop
a
end
def generate_asset_url(asset)
self.service.generate_drop_url(asset.drop.name, asset.name, asset.drop.default_token)
end
def asset_embed_code(asset)
r = handle(:response, self.service.asset_embed_code(asset.drop.name,asset.name,asset.drop.default_token))
r["embed_code"]
end
def update_asset(asset)
params = { :title => asset.title, :description => asset.description, :url => asset.url, :contents => asset.contents }
a = handle(:asset, self.service.update_asset(asset.drop.name,asset.name,params,asset.drop.default_token))
a.drop = asset.drop
a
end
def delete_asset(asset)
r = handle(:response, self.service.delete_asset(asset.drop.name,asset.name,asset.drop.default_token))
r["result"]
end
def send_asset_to_drop(asset, target_drop)
r = handle(:response, self.service.send_asset_to_drop(asset.drop.name, asset.name, target_drop.name, target_drop.default_token, asset.drop.default_token))
r["result"]
end
def send_asset_to_fax(asset, fax_number)
r = handle(:response, self.service.send_asset_to_fax(asset.drop.name, asset.name, fax_number, asset.drop.default_token))
r["result"]
end
def send_asset_to_emails(asset, emails, message)
r = handle(:response, self.service.send_asset_to_emails(asset.drop.name, asset.name, emails, message, asset.drop.default_token))
r["result"]
end
def copy_asset(asset,target_drop)
r = handle(:response, self.service.copy_asset(asset.drop.name,asset.name,target_drop.name,target_drop.default_token,asset.drop.default_token))
r["result"]
end
def move_asset(asset,target_drop)
r = handle(:response, self.service.move_asset(asset.drop.name,asset.name,target_drop.name,target_drop.default_token,asset.drop.default_token))
r["result"]
end
def comments(asset, page = 1)
comments = handle(:comments, self.service.comments(asset.drop.name,asset.name,page,asset.drop.default_token))
comments.each{|c| c.asset = asset}
comments
end
def create_comment(asset, contents)
c = handle(:comment, self.service.create_comment(asset.drop.name,asset.name,contents,asset.drop.default_token))
c.asset = asset
c
end
def comment(asset, comment_id)
c = handle(:comment, self.service.comment(asset.drop.name,asset.name,comment_id,asset.drop.default_token))
c.asset = asset
c
end
def update_comment(comment)
c = handle(:comment, self.service.update_comment(comment.asset.drop.name,comment.asset.name,comment.id,comment.contents,comment.asset.drop.admin_token))
c.asset = comment.asset
c
end
def delete_comment(comment)
r = handle(:response, self.service.delete_comment(comment.asset.drop.name,comment.asset.name,comment.id,comment.asset.drop.admin_token))
r["result"]
end
def create_twitter_subscription(drop, username,password, message, events)
s = handle(:subscription, self.service.create_twitter_subscription(drop.name, username, password, message, events, drop.default_token))
s.drop = drop
s
end
def create_email_subscription(drop, emails, welcome_message, welcome_subject, welcome_from, message, events)
s = handle(:subscription, self.service.create_email_subscription(drop.name, emails, welcome_message, welcome_subject, welcome_from, message, events, drop.default_token))
s.drop = drop
s
end
def create_pingback_subscription(drop, url, events)
s = handle(:subscription, self.service.create_pingback_subscription(drop.name, url, events, drop.default_token))
s.drop = drop
s
end
def subscriptions(drop, page = 1)
subscriptions = handle(:subscriptions, self.service.subscriptions(drop.name,page,drop.admin_token))
subscriptions.each{|s| s.drop = drop}
subscriptions
end
private
def handle(type, response)
if response.code != 200
parse_response(response)
end
case type
when :drop then return Dropio::Drop.new(response)
when :asset then return Dropio::Asset.new(response)
when :assets then return response.collect{|a| Dropio::Asset.new(a)}
when :comment then return Comment.new(response)
when :comments then return response.collect{|c| Dropio::Comment.new(c)}
when :subscription then return Dropio::Subscription.new(response)
when :subscriptions then return response.collect{|s| Dropio::Subscription.new(s)}
when :response then return parse_response(response)
end
end
def parse_response(response)
case response.code
when 200 then return response
when 400 then raise Dropio::RequestError, parse_error_message(response)
when 403 then raise Dropio::AuthorizationError, parse_error_message(response)
when 404 then raise Dropio::MissingResourceError, parse_error_message(response)
when 500 then raise Dropio::ServerError, "There was a problem connecting to Drop.io."
else
raise "Received an unexpected HTTP response: #{response.code} #{response.body}"
end
end
# Extracts the error message from the response for the exception.
def parse_error_message(error_hash)
if (error_hash && error_hash.is_a?(Hash) && error_hash["response"] && error_hash["response"]["message"])
return error_hash["response"]["message"]
else
return "There was a problem connecting to Drop.io."
end
end
end
Version 2.0 JSON changing
class Dropio::Client
attr_accessor :service
def initialize
self.service = Dropio::Api.new
end
def drop(drop_name, token = nil)
handle(:drop, self.service.drop(drop_name, token))
end
def generate_drop_url(drop)
self.service.generate_drop_url(drop.name,drop.default_token)
end
def create_drop(params = {})
handle(:drop, self.service.create_drop(params))
end
def update_drop(drop)
params = { :description => drop.description, :admin_email => drop.admin_email,
:email_key => drop.email_key, :default_view => drop.default_view,
:chat_password => drop.chat_password, :guests_can_comment => drop.guests_can_comment,
:guests_can_add => drop.guests_can_add, :guests_can_delete => drop.guests_can_delete,
:expiration_length => drop.expiration_length, :password => drop.password,
:admin_password => drop.admin_password, :premium_code => drop.premium_code }
handle(:drop, self.service.update_drop(drop.name,drop.admin_token,params))
end
def empty_drop(drop)
r = handle(:response, self.service.empty_drop(drop.name,drop.admin_token))
r["result"]
end
def delete_drop(drop)
r = handle(:response, self.service.delete_drop(drop.name,drop.admin_token))
r["result"]
end
def promote_nick(drop,nick)
r = handle(:response, self.service.promote_nick(drop.name,nick,drop.admin_token))
r["result"]
end
def drop_upload_code(drop)
r = handle(:response, self.service.drop_upload_code(drop.name,drop.default_token))
r["upload_code"]
end
def create_link(drop, url, title = nil, description = nil)
a = handle(:asset, self.service.create_link(drop.name, url, title, description, drop.default_token))
a.drop = drop
a
end
def create_note(drop, contents, title = nil, description = nil)
a = handle(:asset, self.service.create_note(drop.name, contents, title, description, drop.default_token))
a.drop = drop
a
end
def add_file(drop, file_path, description = nil, convert_to = nil, pingback_url = nil, comment = nil)
a = handle(:asset, self.service.add_file(drop.name, file_path, description, convert_to, pingback_url, comment, drop.default_token))
a.drop = drop
a
end
def add_file_from_url(drop, url, description = nil, convert_to = nil, pingback_url = nil)
a = handle(:asset, self.service.add_file_from_url(drop.name, url, description, convert_to, pingback_url, drop.default_token))
a.drop = drop
a
end
def assets(drop, page = 1, order = :oldest)
assets = handle(:assets, self.service.assets(drop.name,page,order,drop.default_token))
assets.each{|a| a.drop = drop}
assets
end
def asset(drop, asset_name)
a = handle(:asset, self.service.asset(drop.name,asset_name,drop.default_token))
a.drop = drop
a
end
def generate_asset_url(asset)
self.service.generate_drop_url(asset.drop.name, asset.name, asset.drop.default_token)
end
def asset_embed_code(asset)
r = handle(:response, self.service.asset_embed_code(asset.drop.name,asset.name,asset.drop.default_token))
r["embed_code"]
end
def update_asset(asset)
params = { :title => asset.title, :description => asset.description, :url => asset.url, :contents => asset.contents }
a = handle(:asset, self.service.update_asset(asset.drop.name,asset.name,params,asset.drop.default_token))
a.drop = asset.drop
a
end
def delete_asset(asset)
r = handle(:response, self.service.delete_asset(asset.drop.name,asset.name,asset.drop.default_token))
r["result"]
end
def send_asset_to_drop(asset, target_drop)
r = handle(:response, self.service.send_asset_to_drop(asset.drop.name, asset.name, target_drop.name, target_drop.default_token, asset.drop.default_token))
r["result"]
end
def send_asset_to_fax(asset, fax_number)
r = handle(:response, self.service.send_asset_to_fax(asset.drop.name, asset.name, fax_number, asset.drop.default_token))
r["result"]
end
def send_asset_to_emails(asset, emails, message)
r = handle(:response, self.service.send_asset_to_emails(asset.drop.name, asset.name, emails, message, asset.drop.default_token))
r["result"]
end
def copy_asset(asset,target_drop)
r = handle(:response, self.service.copy_asset(asset.drop.name,asset.name,target_drop.name,target_drop.default_token,asset.drop.default_token))
r["result"]
end
def move_asset(asset,target_drop)
r = handle(:response, self.service.move_asset(asset.drop.name,asset.name,target_drop.name,target_drop.default_token,asset.drop.default_token))
r["result"]
end
def comments(asset, page = 1)
comments = handle(:comments, self.service.comments(asset.drop.name,asset.name,page,asset.drop.default_token))
comments.each{|c| c.asset = asset}
comments
end
def create_comment(asset, contents)
c = handle(:comment, self.service.create_comment(asset.drop.name,asset.name,contents,asset.drop.default_token))
c.asset = asset
c
end
def comment(asset, comment_id)
c = handle(:comment, self.service.comment(asset.drop.name,asset.name,comment_id,asset.drop.default_token))
c.asset = asset
c
end
def update_comment(comment)
c = handle(:comment, self.service.update_comment(comment.asset.drop.name,comment.asset.name,comment.id,comment.contents,comment.asset.drop.admin_token))
c.asset = comment.asset
c
end
def delete_comment(comment)
r = handle(:response, self.service.delete_comment(comment.asset.drop.name,comment.asset.name,comment.id,comment.asset.drop.admin_token))
r["result"]
end
def create_twitter_subscription(drop, username,password, message, events)
s = handle(:subscription, self.service.create_twitter_subscription(drop.name, username, password, message, events, drop.default_token))
s.drop = drop
s
end
def create_email_subscription(drop, emails, welcome_message, welcome_subject, welcome_from, message, events)
s = handle(:subscription, self.service.create_email_subscription(drop.name, emails, welcome_message, welcome_subject, welcome_from, message, events, drop.default_token))
s.drop = drop
s
end
def create_pingback_subscription(drop, url, events)
s = handle(:subscription, self.service.create_pingback_subscription(drop.name, url, events, drop.default_token))
s.drop = drop
s
end
def subscriptions(drop, page = 1)
subscriptions = handle(:subscriptions, self.service.subscriptions(drop.name,page,drop.admin_token))
subscriptions.each{|s| s.drop = drop}
subscriptions
end
private
def handle(type, response)
if response.code != 200
parse_response(response)
end
case type
when :drop then return Dropio::Drop.new(response)
when :asset then return Dropio::Asset.new(response)
when :assets then return response['assets'].collect{|a| Dropio::Asset.new(a)}
when :comment then return Comment.new(response)
when :comments then return response['comments'].collect{|c| Dropio::Comment.new(c)}
when :subscription then return Dropio::Subscription.new(response)
when :subscriptions then return response['subscriptions'].collect{|s| Dropio::Subscription.new(s)}
when :response then return parse_response(response)
end
end
def parse_response(response)
case response.code
when 200 then return response
when 400 then raise Dropio::RequestError, parse_error_message(response)
when 403 then raise Dropio::AuthorizationError, parse_error_message(response)
when 404 then raise Dropio::MissingResourceError, parse_error_message(response)
when 500 then raise Dropio::ServerError, "There was a problem connecting to Drop.io."
else
raise "Received an unexpected HTTP response: #{response.code} #{response.body}"
end
end
# Extracts the error message from the response for the exception.
def parse_error_message(error_hash)
if (error_hash && error_hash.is_a?(Hash) && error_hash["response"] && error_hash["response"]["message"])
return error_hash["response"]["message"]
else
return "There was a problem connecting to Drop.io."
end
end
end |
module KnowledgeNetStore
class Engine < ::Rails::Engine
isolate_namespace KnowledgeNetStore
config.to_prepare do
ApplicationController.helper ::ApplicationHelper
end
end
end
add engine 打补丁机制
module KnowledgeNetStore
class Engine < ::Rails::Engine
isolate_namespace KnowledgeNetStore
config.to_prepare do
ApplicationController.helper ::ApplicationHelper
Dir.glob(Rails.root + "app/decorators/knowledge_net_store/**/*_decorator.rb").each do |c|
require_dependency(c)
end
end
end
end
|
version ConfigRoot
require 'pathname'
require 'dotenv'
module ConfigRoot
# make these methods also callable directly
# see for discussion on extend self vs module_function:
# https://github.com/bbatsov/ruby-style-guide/issues/556
extend self
# The app's configuration root directory
# @return [Pathname] path to configuration root
def config_root
Pathname.new(ENV["OOD_APP_CONFIG"] || "/etc/ood/config/apps/myjobs")
end
def load_dotenv_files
Dir.chdir app_root do
# .env.local first, so it can override OOD_APP_CONFIG
Dotenv.load(*dotenv_local_files) unless dotenv_local_files.empty?
# load the rest of the dotenv files
Dotenv.load(*dotenv_files)
end
end
private
# FIXME: if Rails is always guarenteed to be defined
# here, including requiring from a bin/setup-production, then lets drop this
#
def rails_env
(defined?(Rails) && Rails.env) || ENV['RAILS_ENV']
end
# FIXME: if Rails.root is always guarenteed to be defined
# here, including in a bin/setup-production, then lets drop this
#
# The app's root directory
# @return [Pathname] path to configuration root
def app_root
Pathname.new(File.expand_path("../../", __FILE__))
end
def dotenv_local_files
[
(app_root.join(".env.#{rails_env}.local") unless rails_env.nil?),
(app_root.join(".env.local") unless rails_env == "test"),
].compact
end
def dotenv_files
[
config_root.join("env"),
(app_root.join(".env.#{rails_env}") unless rails_env.nil?),
app_root.join(".env")
].compact
end
end
|
require 'rubygems'
gem 'httparty'
require 'httparty'
require 'activesupport'
require File.dirname(__FILE__) + '/ebay_products/data'
class EbayProducts
include HTTParty
base_uri "open.api.ebay.com"
default_params :responseencoding => 'XML', :callname => "FindProducts", :version => "619", :siteid => 0, :maxentries => 18
attr_reader :query, :appid, :product_id
def initialize(args, appid)
@query = args[:keywords]
@product_id = args[:product_id]
@appid = appid
end
def search
@search ||= self.class.get("/shopping", :query => options, :format => :xml)["FindProductsResponse"]["Product"]
end
def products
@products ||= search.collect {|product| ProductInformation.new(product) }
end
def product
products.first
end
def options
hash = {:appid => @appid}
if @product_id
hash['ProductID.value'.to_sym] = @product_id
hash['ProductID.type'.to_sym] = 'Reference' # assumes product id is of type reference
else
hash[:QueryKeywords] = @query
end
hash
end
end
Handle when only 1 product in result
require 'rubygems'
gem 'httparty'
require 'httparty'
require 'activesupport'
require File.dirname(__FILE__) + '/ebay_products/data'
class EbayProducts
include HTTParty
base_uri "open.api.ebay.com"
default_params :responseencoding => 'XML', :callname => "FindProducts", :version => "619", :siteid => 0, :maxentries => 18
attr_reader :query, :appid, :product_id
def initialize(args, appid)
@query = args[:keywords]
@product_id = args[:product_id]
@appid = appid
end
def search
if @search.blank?
@search = self.class.get("/shopping", :query => options, :format => :xml)["FindProductsResponse"]["Product"]
if @search.is_a? Hash
@search = [@search]
end
end
@search
end
def products
@products ||= search.collect {|product| ProductInformation.new(product) }
end
def product
products.first
end
def options
hash = {:appid => @appid}
if @product_id
hash['ProductID.value'.to_sym] = @product_id
hash['ProductID.type'.to_sym] = 'Reference' # assumes product id is of type reference
else
hash[:QueryKeywords] = @query
end
hash
end
end |
module LdapBinder
module CryptoSupport
#
# Prepares a password for use with LDAP
#
def prepare_password(password, salt=create_salt)
pwdigest = Digest::SHA1.digest("#{password}#{salt}")
"{SSHA}" + Base64.encode64("#{pwdigest}#{salt}").chomp!.tap { | s | puts "HASH = #{s}" }
end
#
# Creates a salt to use for better hashing of a password
#
def create_salt(login='default_login')
Digest::SHA1.hexdigest("--#{Time.now.to_s}--#{login}--")
end
end
end
Removed puts.
module LdapBinder
module CryptoSupport
#
# Prepares a password for use with LDAP
#
def prepare_password(password, salt=create_salt)
pwdigest = Digest::SHA1.digest("#{password}#{salt}")
"{SSHA}" + Base64.encode64("#{pwdigest}#{salt}").chomp! # .tap { | s | puts "HASH = #{s}" }
end
#
# Creates a salt to use for better hashing of a password
#
def create_salt(login='default_login')
Digest::SHA1.hexdigest("--#{Time.now.to_s}--#{login}--")
end
end
end
|
module Ecwid
VERSION = '0.0.1'
end
update version
module Ecwid
VERSION = '0.0.2'
end |
require "formula"
class Hosts < Formula
homepage "https://github.com/xwmx/hosts"
url "https://github.com/xwmx/hosts.git",
:using => :git,
:tag => "3.5.1"
head "https://github.com/xwmx/hosts.git"
depends_on "bash"
depends_on "bash-completion"
def install
bin.install "hosts"
zsh_completion.install "etc/hosts-completion.zsh" => "_hosts"
bash_completion.install "etc/hosts-completion.bash" => "hosts"
end
test do
system "#{bin}/hosts"
end
end
Update hosts to 3.6.0
require "formula"
class Hosts < Formula
homepage "https://github.com/xwmx/hosts"
url "https://github.com/xwmx/hosts.git",
:using => :git,
:tag => "3.6.0"
head "https://github.com/xwmx/hosts.git"
depends_on "bash"
depends_on "bash-completion"
def install
bin.install "hosts"
zsh_completion.install "etc/hosts-completion.zsh" => "_hosts"
bash_completion.install "etc/hosts-completion.bash" => "hosts"
end
test do
system "#{bin}/hosts"
end
end
|
module Egree
VERSION = "0.0.4"
end
Bump Gem version
cancel_url and procedure can now be supplied as options when creating a case.
Document takes an optional filename argument. This filename is visible
in the Egree interface,
module Egree
VERSION = "0.0.5"
end
|
require 'libis-ingester'
require 'libis-tools'
require 'libis-format'
require 'libis/tools/extend/hash'
require 'sidekiq'
require 'sidekiq/api'
require 'singleton'
module Libis
module Ingester
# noinspection RubyResolve
class Initializer
include Singleton
attr_accessor :config, :database
def initialize
@config = nil
@database = nil
end
def self.init(config_file)
initializer = self.instance
initializer.configure(config_file)
initializer.database
initializer.sidekiq
initializer
end
def configure(config_file)
@config = Initializer.load_config(config_file)
raise RuntimeError, "Configuration file '#{config_file}' not found." unless @config
raise RuntimeError, "Missing section 'config' in site config." unless @config.config
::Libis::Ingester.configure do |cfg|
@config.config.each do |key, value|
cfg.send("#{key}=", value)
end
end
if @config.ingester && @config.ingester.task_dir
::Libis::Ingester::Config.require_all(@config.ingester.task_dir)
end
if @config.format_config
Libis::Format::TypeDatabase.instance.load_types(@config.format_config.type_database) if @config.format_config.type_database
Libis::Format::Tools::Fido.add_format(@config.format_config.fido_formats) if @config.format_config.fido_formats
end
self
end
def database
return @database if @database
raise RuntimeError, "Missing section 'database' in site config." unless @config && @config.database
@database = ::Libis::Ingester::Database.new(
(@config.database.config_file || File.join(Libis::Ingester::ROOT_DIR, 'mongoid.yml')),
(@config.database.env || :test)
)
end
def sidekiq
return @sidekiq if @sidekiq
raise RuntimeError, 'Missing sidekiq section in configuration.' unless @config && @config.sidekiq
id = (@config.sidekiq.namespace.gsub(/\s/, '') || 'Ingester' rescue 'Ingester')
Sidekiq.configure_client do |config|
config.redis = {
url: @config.sidekiq.redis_url,
namespace: @config.sidekiq.namespace,
id: "#{id}Client"
}.cleanup
end
Sidekiq.configure_server do |config|
config.redis = {
url: @config.sidekiq.redis_url,
namespace: @config.sidekiq.namespace,
id: "#{id}Server"
}.cleanup
end
@sidekiq = Sidekiq::Client.new
end
def seed_database
raise RuntimeError, 'Database not initialized.' unless @database
sources = []
sources << @config.database.seed_dir if @config.database.seed_dir && Dir.exist?(@config.database.seed_dir)
sources << @config.seed.to_h if @config.seed
@database.setup.seed(*sources)
@database
end
private
def self.load_config(config_file)
raise RuntimeError, "Configuration file '#{config_file}' not found." unless File.exist?(config_file)
config = Libis::Tools::ConfigFile.new({}, preserve_original_keys: false)
config << config_file
config
end
end
end
end
Initializer: config values merge new values for Hashes
require 'libis-ingester'
require 'libis-tools'
require 'libis-format'
require 'libis/tools/extend/hash'
require 'sidekiq'
require 'sidekiq/api'
require 'singleton'
module Libis
module Ingester
# noinspection RubyResolve
class Initializer
include Singleton
attr_accessor :config, :database
def initialize
@config = nil
@database = nil
end
def self.init(config_file)
initializer = self.instance
initializer.configure(config_file)
initializer.database
initializer.sidekiq
initializer
end
def configure(config_file)
@config = Initializer.load_config(config_file)
raise RuntimeError, "Configuration file '#{config_file}' not found." unless @config
raise RuntimeError, "Missing section 'config' in site config." unless @config.config
::Libis::Ingester.configure do |cfg|
@config.config.each do |key, value|
if value.is_a?(Hash)
cfg[key].merge!(value)
else
cfg.send("#{key}=", value)
end
end
end
if @config.ingester && @config.ingester.task_dir
::Libis::Ingester::Config.require_all(@config.ingester.task_dir)
end
if @config.format_config
Libis::Format::TypeDatabase.instance.load_types(@config.format_config.type_database) if @config.format_config.type_database
Libis::Format::Tools::Fido.add_format(@config.format_config.fido_formats) if @config.format_config.fido_formats
end
self
end
def database
return @database if @database
raise RuntimeError, "Missing section 'database' in site config." unless @config && @config.database
@database = ::Libis::Ingester::Database.new(
(@config.database.config_file || File.join(Libis::Ingester::ROOT_DIR, 'mongoid.yml')),
(@config.database.env || :test)
)
end
def sidekiq
return @sidekiq if @sidekiq
raise RuntimeError, 'Missing sidekiq section in configuration.' unless @config && @config.sidekiq
id = (@config.sidekiq.namespace.gsub(/\s/, '') || 'Ingester' rescue 'Ingester')
Sidekiq.configure_client do |config|
config.redis = {
url: @config.sidekiq.redis_url,
namespace: @config.sidekiq.namespace,
id: "#{id}Client"
}.cleanup
end
Sidekiq.configure_server do |config|
config.redis = {
url: @config.sidekiq.redis_url,
namespace: @config.sidekiq.namespace,
id: "#{id}Server"
}.cleanup
end
@sidekiq = Sidekiq::Client.new
end
def seed_database
raise RuntimeError, 'Database not initialized.' unless @database
sources = []
sources << @config.database.seed_dir if @config.database.seed_dir && Dir.exist?(@config.database.seed_dir)
sources << @config.seed.to_h if @config.seed
@database.setup.seed(*sources)
@database
end
private
def self.load_config(config_file)
raise RuntimeError, "Configuration file '#{config_file}' not found." unless File.exist?(config_file)
config = Libis::Tools::ConfigFile.new({}, preserve_original_keys: false)
config << config_file
config
end
end
end
end
|
require 'support/dsl_accessor'
module EventMachine
module IRC
class Client
include DslAccessor
include IRC::Commands
include IRC::Responses
# EventMachine::Connection object to IRC server
# @private
attr_accessor :conn
# IRC server to connect to. Defaults to 127.0.0.1:6667
# attr_accessor :host, :port
dsl_accessor :host, :port
dsl_accessor :realname
dsl_accessor :ssl
# Custom logger
dsl_accessor :logger
# Set of channels that this client is connected to
attr_reader :channels
# Hash of callbacks on events. key is symbol event name.
# value is array of procs to call
# @private
attr_reader :callbacks
# Build a new unconnected IRC client
#
# @param [Hash] options
# @option options [String] :host
# @option options [String] :port
# @option options [Boolean] :ssl
# @option options [String] :realname
#
# @yield [client] new instance for decoration
def initialize(options = {}, &blk)
options.symbolize_keys!
options = {
:host => '127.0.0.1',
:port => '6667',
:ssl => false,
:realname => 'Anonymous Annie'
}.merge!(options)
@host = options[:host]
@port = options[:port]
@ssl = options[:ssl]
@realname = options[:realname]
@channels = Set.new
@callbacks = Hash.new
@connected = false
if block_given?
if blk.arity == 1
yield self
else
instance_eval(&blk)
end
end
end
# Creates a Eventmachine TCP connection with :host and :port. It should be called
# after callbacks are registered.
# @see #on
# @return [EventMachine::Connection]
def connect
self.conn ||= EventMachine::connect(@host, @port, Dispatcher, :parent => self, :ssl => @ssl)
end
# @return [Boolean]
def connected?
@connected
end
# Callbacks
# Register a callback with :name as one of the following, and
# a block with the same number of params.
#
# @example
# on(:join) {|channel| puts channel}
#
# :connect - called after connection to server established
#
# :join
# @param who [String]
# @param channel [String]
# @param names [Array]
#
# :message, :privmsg - called on channel message or nick message
# @param source [String]
# @param target [String]
# @param message [String]
#
# :raw - called for all messages from server
# @param raw_hash [Hash] same format as return of #parse_message
def on(name, &blk)
# TODO: I thought Hash.new([]) would work, but it gets empted out
# TODO: normalize aliases :privmsg, :message, etc
(@callbacks[name.to_sym] ||= []) << blk
end
# Trigger a named callback
def trigger(name, *args)
# TODO: should this be instance_eval(&blk)? prevents it from non-dsl style
(@callbacks[name.to_sym] || []).each {|blk| blk.call(*args)}
end
# Sends raw message to IRC server. Assumes message is correctly formatted
# TODO: what if connect fails? or disconnects?
def send_data(message)
return false unless connected?
message = message + "\r\n"
log Logger::DEBUG, message
self.conn.send_data(message)
end
# EventMachine Callbacks
def receive_data(data)
data.split("\r\n").each do |message|
parsed = parse_message(message)
handle_parsed_message(parsed)
trigger(:raw, parsed)
end
end
# @private
def ready
@connected = true
user('guest', '0', @realname)
trigger(:connect)
end
# @private
def unbind
trigger(:disconnect)
end
def log(*args)
@logger.log(*args) if @logger
end
def run!
EM.epoll
EventMachine.run do
trap("TERM") { EM::stop }
trap("INT") { EM::stop }
connect
log Logger::INFO, "Starting IRC client..."
end
log Logger::INFO, "Stopping IRC client"
@logger.close if @logger
end
end
end
end
update client docs
require 'support/dsl_accessor'
module EventMachine
module IRC
class Client
include DslAccessor
include IRC::Commands
include IRC::Responses
# EventMachine::Connection object to IRC server
# @private
attr_accessor :conn
# @macro dsl_accessor
# Accessor for `$1`
# Defaults to '127.0.0.1'
dsl_accessor :host
# @macro dsl_accessor
# Defaults to '6667'
dsl_accessor :port
# @macro dsl_accessor
dsl_accessor :realname
# @macro dsl_accessor
dsl_accessor :ssl
# @macro dsl_accessor
dsl_accessor :logger
# Set of channels that this client is connected to
attr_reader :channels
# Hash of callbacks on events. key is symbol event name.
# value is array of procs to call
# @private
attr_reader :callbacks
# Build a new unconnected IRC client
#
# @param [Hash] options
# @option options [String] :host
# @option options [String] :port
# @option options [Boolean] :ssl
# @option options [String] :realname
#
# @yield [client] new instance for decoration
def initialize(options = {}, &blk)
options.symbolize_keys!
options = {
:host => '127.0.0.1',
:port => '6667',
:ssl => false,
:realname => 'Anonymous Annie'
}.merge!(options)
@host = options[:host]
@port = options[:port]
@ssl = options[:ssl]
@realname = options[:realname]
@channels = Set.new
@callbacks = Hash.new
@connected = false
if block_given?
if blk.arity == 1
yield self
else
instance_eval(&blk)
end
end
end
# Creates a Eventmachine TCP connection with :host and :port. It should be called
# after callbacks are registered.
# @see #on
# @return [EventMachine::Connection]
def connect
self.conn ||= EventMachine::connect(@host, @port, Dispatcher, :parent => self, :ssl => @ssl)
end
# @return [Boolean]
def connected?
@connected
end
# Start running the client
def run!
EM.epoll
EventMachine.run do
trap("TERM") { EM::stop }
trap("INT") { EM::stop }
connect
log Logger::INFO, "Starting IRC client..."
end
log Logger::INFO, "Stopping IRC client"
@logger.close if @logger
end
# === Callbacks
# Register a callback with `:name` as one of the following, and
# a block with the same number of params.
#
# @example
# on(:join) {|channel| puts channel}
#
# :connect - called after connection to server established
#
# :join
# @param who [String]
# @param channel [String]
# @param names [Array]
#
# :message, :privmsg - called on channel message or nick message
# @param source [String]
# @param target [String]
# @param message [String]
#
# :raw - called for all messages from server
# @param raw_hash [Hash] same format as return of #parse_message
def on(name, &blk)
# TODO: I thought Hash.new([]) would work, but it gets empted out
# TODO: normalize aliases :privmsg, :message, etc
(@callbacks[name.to_sym] ||= []) << blk
end
# Trigger a named callback
# @private
def trigger(name, *args)
# TODO: should this be instance_eval(&blk)? prevents it from non-dsl style
(@callbacks[name.to_sym] || []).each {|blk| blk.call(*args)}
end
# @private
def log(*args)
@logger.log(*args) if @logger
end
# Sends raw message to IRC server. Assumes message is correctly formatted
# TODO: what if connect fails? or disconnects?
# @private
def send_data(message)
return false unless connected?
message = message + "\r\n"
log Logger::DEBUG, message
self.conn.send_data(message)
end
# === EventMachine Callbacks
# @private
def receive_data(data)
data.split("\r\n").each do |message|
parsed = parse_message(message)
handle_parsed_message(parsed)
trigger(:raw, parsed)
end
end
# @private
def ready
@connected = true
user('guest', '0', @realname)
trigger(:connect)
end
# @private
def unbind
trigger(:disconnect)
end
end
end
end |
##
# This class provides methods to be inherited as route definition.
class Midori::API
# Add GET method as a DSL for route definition
# === Attributes
# * +path+ [+String+, +Regex+] Accepts as part of path in route definition.
# === Returns
# nil
# === Examples
# String as router
# get '/' do
# puts 'Hello World'
# end
#
# Regex as router
# get /\/hello\/(.*?)/ do
# puts 'Hello World'
# end
def self.get(path, &block) end
# Add POST method as a DSL for route definition
# === Attributes
# * +path+ [+String+, +Regex+] Accepts as part of path in route definition.
# === Returns
# nil
# === Examples
# String as router
# post '/' do
# puts 'Hello World'
# end
#
# Regex as router
# post /\/hello\/(.*?)/ do
# puts 'Hello World'
# end
def self.post(path, &block) end
# Add PUT method as a DSL for route definition
# === Attributes
# * +path+ [+String+, +Regex+] Accepts as part of path in route definition.
# === Returns
# nil
# === Examples
# String as router
# put '/' do
# puts 'Hello World'
# end
#
# Regex as router
# put /\/hello\/(.*?)/ do
# puts 'Hello World'
# end
def self.put(path, &block) end
# Add DELETE method as a DSL for route definition
# === Attributes
# * +path+ [+String+, +Regex+] Accepts as part of path in route definition.
# === Returns
# nil
# === Examples
# String as router
# delete '/' do
# puts 'Hello World'
# end
#
# Regex as router
# delete /\/hello\/(.*?)/ do
# puts 'Hello World'
# end
def self.delete(path, &block) end
# Add OPTIONS method as a DSL for route definition
# === Attributes
# * +path+ [+String+, +Regex+] Accepts as part of path in route definition.
# === Returns
# nil
# === Examples
# String as router
# options '/' do
# puts 'Hello World'
# end
#
# Regex as router
# options /\/hello\/(.*?)/ do
# puts 'Hello World'
# end
def self.options(path, &block) end
# Add LINK method as a DSL for route definition
# === Attributes
# * +path+ [+String+, +Regex+] Accepts as part of path in route definition.
# === Returns
# nil
# === Examples
# String as router
# link '/' do
# puts 'Hello World'
# end
#
# Regex as router
# link /\/hello\/(.*?)/ do
# puts 'Hello World'
# end
def self.link(path, &block) end
# Add UNLINK method as a DSL for route definition
# === Attributes
# * +path+ [+String+, +Regex+] Accepts as part of path in route definition.
# === Returns
# nil
# === Examples
# String as router
# unlink '/' do
# puts 'Hello World'
# end
#
# Regex as router
# unlink /\/hello\/(.*?)/ do
# puts 'Hello World'
# end
def self.unlink(path, &unlink) end
METHODS = %w'get post put delete options link unlink' # :nodoc:
# Magics to fill DSL methods through dynamically class method definition
METHODS.each do |method|
define_singleton_method(method) do |*args|
add_route(method.upcase, args[0], args[1]) # args[0]: route, # args[1]: block
end
end
def self.add_route(method, path, block)
@route = Array.new if @route.nil?
@route << Midori::Route.new(method, path, block)
nil
end
private_class_method :add_route
# def self.match(method, route)
#
# end
end
class Midori::Route
attr_accessor :method, :path, :function
def initialize(method, path, function)
@method = method
@path = path
@function = function
end
end
Improve doc
##
# This class provides methods to be inherited as route definition.
class Midori::API
# Add GET method as a DSL for route definition
# === Attributes
# * +path+ [+String+, +Regex+] - Accepts as part of path in route definition.
# === Returns
# nil
# === Examples
# String as router
# get '/' do
# puts 'Hello World'
# end
#
# Regex as router
# get /\/hello\/(.*?)/ do
# puts 'Hello World'
# end
def self.get(path, &block) end
# Add POST method as a DSL for route definition
# === Attributes
# * +path+ [+String+, +Regex+] - Accepts as part of path in route definition.
# === Returns
# nil
# === Examples
# String as router
# post '/' do
# puts 'Hello World'
# end
#
# Regex as router
# post /\/hello\/(.*?)/ do
# puts 'Hello World'
# end
def self.post(path, &block) end
# Add PUT method as a DSL for route definition
# === Attributes
# * +path+ [+String+, +Regex+] - Accepts as part of path in route definition.
# === Returns
# nil
# === Examples
# String as router
# put '/' do
# puts 'Hello World'
# end
#
# Regex as router
# put /\/hello\/(.*?)/ do
# puts 'Hello World'
# end
def self.put(path, &block) end
# Add DELETE method as a DSL for route definition
# === Attributes
# * +path+ [+String+, +Regex+] - Accepts as part of path in route definition.
# === Returns
# nil
# === Examples
# String as router
# delete '/' do
# puts 'Hello World'
# end
#
# Regex as router
# delete /\/hello\/(.*?)/ do
# puts 'Hello World'
# end
def self.delete(path, &block) end
# Add OPTIONS method as a DSL for route definition
# === Attributes
# * +path+ [+String+, +Regex+] - Accepts as part of path in route definition.
# === Returns
# nil
# === Examples
# String as router
# options '/' do
# puts 'Hello World'
# end
#
# Regex as router
# options /\/hello\/(.*?)/ do
# puts 'Hello World'
# end
def self.options(path, &block) end
# Add LINK method as a DSL for route definition
# === Attributes
# * +path+ [+String+, +Regex+] - Accepts as part of path in route definition.
# === Returns
# nil
# === Examples
# String as router
# link '/' do
# puts 'Hello World'
# end
#
# Regex as router
# link /\/hello\/(.*?)/ do
# puts 'Hello World'
# end
def self.link(path, &block) end
# Add UNLINK method as a DSL for route definition
# === Attributes
# * +path+ [+String+, +Regex+] - Accepts as part of path in route definition.
# === Returns
# nil
# === Examples
# String as router
# unlink '/' do
# puts 'Hello World'
# end
#
# Regex as router
# unlink /\/hello\/(.*?)/ do
# puts 'Hello World'
# end
def self.unlink(path, &unlink) end
METHODS = %w'get post put delete options link unlink' # :nodoc:
# Magics to fill DSL methods through dynamically class method definition
METHODS.each do |method|
define_singleton_method(method) do |*args|
add_route(method.upcase, args[0], args[1]) # args[0]: route, # args[1]: block
end
end
def self.add_route(method, path, block)
@route = Array.new if @route.nil?
@route << Midori::Route.new(method, path, block)
nil
end
private_class_method :add_route
# def self.match(method, route)
#
# end
end
class Midori::Route
attr_accessor :method, :path, :function
def initialize(method, path, function)
@method = method
@path = path
@function = function
end
end |
Pod::Spec.new do |s|
s.name = 'sqlite3'
s.version = '3.8.8.1'
s.license = { :type => 'Public Domain', :text => <<-LICENSE
All of the code and documentation in SQLite has been dedicated to the public domain by the authors.
All code authors, and representatives of the companies they work for, have signed affidavits dedicating their contributions to the public domain and originals of those signed affidavits are stored in a firesafe at the main offices of Hwaci.
Anyone is free to copy, modify, publish, use, compile, sell, or distribute the original SQLite code, either in source code form or as a compiled binary, for any purpose, commercial or non-commercial, and by any means.
LICENSE
}
s.summary = 'SQLite is an embedded SQL database engine'
s.documentation_url = 'https://sqlite.org/docs.html'
s.homepage = 'https://github.com/clemensg/sqlite3pod'
s.authors = { 'Clemens Gruber' => 'clemensgru@gmail.com' }
archive_name = "sqlite-amalgamation-#{s.version.to_s.gsub('.', '0')}"
s.source = { :http => "https://www.sqlite.org/#{Time.now.year}/#{archive_name}.zip" }
s.requires_arc = false
s.default_subspecs = 'common'
s.subspec 'common' do |ss|
ss.source_files = "#{archive_name}/sqlite*.{h,c}"
ss.xcconfig = { 'OTHER_CFLAGS' => '$(inherited) -DHAVE_USLEEP=1' }
end
s.subspec 'api_armor' do |ss|
ss.dependency 'sqlite3/common'
ss.xcconfig = { 'OTHER_CFLAGS' => '$(inherited) -DSQLITE_ENABLE_API_ARMOR=1' }
end
s.subspec 'coldata' do |ss|
ss.dependency 'sqlite3/common'
ss.xcconfig = { 'OTHER_CFLAGS' => '$(inherited) -DSQLITE_ENABLE_COLUMN_METADATA=1' }
end
s.subspec 'fts' do |ss|
ss.dependency 'sqlite3/common'
ss.xcconfig = { 'OTHER_CFLAGS' => '$(inherited) -DSQLITE_ENABLE_FTS4=1 -DSQLITE_ENABLE_FTS3_PARENTHESIS=1' }
end
s.subspec 'rtree' do |ss|
ss.dependency 'sqlite3/common'
ss.xcconfig = { 'OTHER_CFLAGS' => '$(inherited) -DSQLITE_ENABLE_RTREE=1' }
end
s.subspec 'soundex' do |ss|
ss.dependency 'sqlite3/common'
ss.xcconfig = { 'OTHER_CFLAGS' => '$(inherited) -DSQLITE_SOUNDEX=1' }
end
s.subspec 'stat3' do |ss|
ss.dependency 'sqlite3/common'
ss.xcconfig = { 'OTHER_CFLAGS' => '$(inherited) -DSQLITE_ENABLE_STAT3=1' }
end
s.subspec 'stat4' do |ss|
ss.dependency 'sqlite3/common'
ss.xcconfig = { 'OTHER_CFLAGS' => '$(inherited) -DSQLITE_ENABLE_STAT4=1' }
end
s.subspec 'unicode61' do |ss|
ss.dependency 'sqlite3/common'
ss.dependency 'sqlite3/fts'
ss.xcconfig = { 'OTHER_CFLAGS' => '$(inherited) -DSQLITE_ENABLE_FTS4_UNICODE61=1' }
end
s.subspec 'unlock_notify' do |ss|
ss.dependency 'sqlite3/common'
ss.xcconfig = { 'OTHER_CFLAGS' => '$(inherited) -DSQLITE_ENABLE_UNLOCK_NOTIFY=1' }
end
end
Bump SQLite 3 version to 3.8.8.2: "Enhance sqlite3_wal_checkpoint_v2(TRUNCATE) interface so that it truncates the WAL file even if there is no checkpoint work to be done."
Signed-off-by: Clemens Gruber <dda41f46a19317797d2fa2442dc67d1578f8ace9@gmail.com>
Pod::Spec.new do |s|
s.name = 'sqlite3'
s.version = '3.8.8.2'
s.license = { :type => 'Public Domain', :text => <<-LICENSE
All of the code and documentation in SQLite has been dedicated to the public domain by the authors.
All code authors, and representatives of the companies they work for, have signed affidavits dedicating their contributions to the public domain and originals of those signed affidavits are stored in a firesafe at the main offices of Hwaci.
Anyone is free to copy, modify, publish, use, compile, sell, or distribute the original SQLite code, either in source code form or as a compiled binary, for any purpose, commercial or non-commercial, and by any means.
LICENSE
}
s.summary = 'SQLite is an embedded SQL database engine'
s.documentation_url = 'https://sqlite.org/docs.html'
s.homepage = 'https://github.com/clemensg/sqlite3pod'
s.authors = { 'Clemens Gruber' => 'clemensgru@gmail.com' }
archive_name = "sqlite-amalgamation-#{s.version.to_s.gsub('.', '0')}"
s.source = { :http => "https://www.sqlite.org/#{Time.now.year}/#{archive_name}.zip" }
s.requires_arc = false
s.default_subspecs = 'common'
s.subspec 'common' do |ss|
ss.source_files = "#{archive_name}/sqlite*.{h,c}"
ss.xcconfig = { 'OTHER_CFLAGS' => '$(inherited) -DHAVE_USLEEP=1' }
end
s.subspec 'api_armor' do |ss|
ss.dependency 'sqlite3/common'
ss.xcconfig = { 'OTHER_CFLAGS' => '$(inherited) -DSQLITE_ENABLE_API_ARMOR=1' }
end
s.subspec 'coldata' do |ss|
ss.dependency 'sqlite3/common'
ss.xcconfig = { 'OTHER_CFLAGS' => '$(inherited) -DSQLITE_ENABLE_COLUMN_METADATA=1' }
end
s.subspec 'fts' do |ss|
ss.dependency 'sqlite3/common'
ss.xcconfig = { 'OTHER_CFLAGS' => '$(inherited) -DSQLITE_ENABLE_FTS4=1 -DSQLITE_ENABLE_FTS3_PARENTHESIS=1' }
end
s.subspec 'rtree' do |ss|
ss.dependency 'sqlite3/common'
ss.xcconfig = { 'OTHER_CFLAGS' => '$(inherited) -DSQLITE_ENABLE_RTREE=1' }
end
s.subspec 'soundex' do |ss|
ss.dependency 'sqlite3/common'
ss.xcconfig = { 'OTHER_CFLAGS' => '$(inherited) -DSQLITE_SOUNDEX=1' }
end
s.subspec 'stat3' do |ss|
ss.dependency 'sqlite3/common'
ss.xcconfig = { 'OTHER_CFLAGS' => '$(inherited) -DSQLITE_ENABLE_STAT3=1' }
end
s.subspec 'stat4' do |ss|
ss.dependency 'sqlite3/common'
ss.xcconfig = { 'OTHER_CFLAGS' => '$(inherited) -DSQLITE_ENABLE_STAT4=1' }
end
s.subspec 'unicode61' do |ss|
ss.dependency 'sqlite3/common'
ss.dependency 'sqlite3/fts'
ss.xcconfig = { 'OTHER_CFLAGS' => '$(inherited) -DSQLITE_ENABLE_FTS4_UNICODE61=1' }
end
s.subspec 'unlock_notify' do |ss|
ss.dependency 'sqlite3/common'
ss.xcconfig = { 'OTHER_CFLAGS' => '$(inherited) -DSQLITE_ENABLE_UNLOCK_NOTIFY=1' }
end
end
|
module Enumify
module Model
def self.included(base)
base.extend(ClassMethods)
end
module ClassMethods
def enumify(parameter, vals=[], opts={})
validates_inclusion_of parameter, :in => vals, :allow_nil => !!opts[:allow_nil]
paramater_string = parameter.to_s
prefix = ''
if opts[:prefix] == true
prefix = "#{paramater_string}_"
elsif opts[:prefix].present?
prefix = "#{opts[:prefix].to_s}_"
end
constant = opts.fetch(:constant, true)
if constant
const_name = constant === true ? paramater_string.pluralize : constant.to_s
const_set("#{const_name.upcase}", vals)
end
define_method "#{paramater_string}" do
attr = read_attribute(parameter)
(attr.nil? || attr.empty?) ? nil : attr.to_sym
end
define_method "#{paramater_string}=" do |value|
send("_set_#{paramater_string}", value, false)
end
self.class_eval do
private
define_method "_set_#{paramater_string}" do |value, should_save|
value = value and value.to_sym
old = read_attribute(parameter) ? read_attribute(parameter).to_sym : nil
return value if old == value
write_attribute(parameter, (value and value.to_s))
save if should_save
send("#{paramater_string}_changed", old, value) if respond_to?("#{paramater_string}_changed", true) and !old.nil?
return value
end
end
vals.each do |val|
attribute = prefix + val.to_s
query_method = "#{attribute}?"
bang_method = "#{attribute}!"
raise "Collision in enum values method #{attribute}" if respond_to?(query_method) or respond_to?(bang_method) or respond_to?(attribute)
define_method query_method do
send("#{paramater_string}") == val
end
define_method bang_method do
send("_set_#{paramater_string}", val, true)
end
scope attribute.to_sym, lambda { where(parameter.to_sym => val.to_s) }
end
# We want to first define all the "positive" scopes and only then define
# the "negation scopes", to make sure they don't override previous scopes
vals.each do |val|
# We need to prefix the field with the table name since if this scope will
# be used in a joined query with other models that have the same enum field then
# it will fail on ambiguous column name.
negative_scope = "not_" + prefix + val.to_s
unless respond_to?(negative_scope)
scope negative_scope, lambda { where("#{self.table_name}.#{parameter} != ?", val.to_s) }
end
end
end
alias_method :enum, :enumify
end
end
end
Removed redundant quotes.
module Enumify
module Model
def self.included(base)
base.extend(ClassMethods)
end
module ClassMethods
def enumify(parameter, vals=[], opts={})
validates_inclusion_of parameter, :in => vals, :allow_nil => !!opts[:allow_nil]
paramater_string = parameter.to_s
prefix = ''
if opts[:prefix] == true
prefix = "#{paramater_string}_"
elsif opts[:prefix].present?
prefix = "#{opts[:prefix].to_s}_"
end
constant = opts.fetch(:constant, true)
if constant
const_name = constant === true ? paramater_string.pluralize : constant.to_s
const_set(const_name.upcase, vals)
end
define_method "#{paramater_string}" do
attr = read_attribute(parameter)
(attr.nil? || attr.empty?) ? nil : attr.to_sym
end
define_method "#{paramater_string}=" do |value|
send("_set_#{paramater_string}", value, false)
end
self.class_eval do
private
define_method "_set_#{paramater_string}" do |value, should_save|
value = value and value.to_sym
old = read_attribute(parameter) ? read_attribute(parameter).to_sym : nil
return value if old == value
write_attribute(parameter, (value and value.to_s))
save if should_save
send("#{paramater_string}_changed", old, value) if respond_to?("#{paramater_string}_changed", true) and !old.nil?
return value
end
end
vals.each do |val|
attribute = prefix + val.to_s
query_method = "#{attribute}?"
bang_method = "#{attribute}!"
raise "Collision in enum values method #{attribute}" if respond_to?(query_method) or respond_to?(bang_method) or respond_to?(attribute)
define_method query_method do
send("#{paramater_string}") == val
end
define_method bang_method do
send("_set_#{paramater_string}", val, true)
end
scope attribute.to_sym, lambda { where(parameter.to_sym => val.to_s) }
end
# We want to first define all the "positive" scopes and only then define
# the "negation scopes", to make sure they don't override previous scopes
vals.each do |val|
# We need to prefix the field with the table name since if this scope will
# be used in a joined query with other models that have the same enum field then
# it will fail on ambiguous column name.
negative_scope = "not_" + prefix + val.to_s
unless respond_to?(negative_scope)
scope negative_scope, lambda { where("#{self.table_name}.#{parameter} != ?", val.to_s) }
end
end
end
alias_method :enum, :enumify
end
end
end
|
module Lita
module Handlers
class UrlShortner < Handler
route(/^shorten\(.+\)/i, :lookup, command: true, help: {'shorten URL' => 'returns a shorten url'})
G_API_KEY = ENV["G_API_KEY"]
def show_url(response)
headers = {"Content-Type"=>"application/json"}
url = response.matches[0][0]
url = { :longUrl => "#{url}"}.to_json
puts url
short_url = generate_url(url) unless short_url
response.reply_privately "Here's your short url: #{short_url}"
end
private
def generate_url(url)
resp = HTTParty.post("https://www.googleapis.com/urlshortener/v1/url?key=#{G_API_KEY}", :body => url, :headers => headers ).to_json
resp = JSON.parse(resp)
short_url = resp["id"]
short_url
end
end
Lita.register_handler(UrlShortner)
end
end
debug
module Lita
module Handlers
class UrlShortner < Handler
route(/^shorten\(.+\)/i, :lookup, command: true, help: {'shorten URL' => 'returns a shorten url'})
G_API_KEY = ENV["G_API_KEY"]
def show_url(response)
headers = {"Content-Type"=>"application/json"}
url = response.matches[0][0]
url = { :longUrl => "#{url}"}.to_json
raise.inspect
short_url = generate_url(url) unless short_url
response.reply_privately "Here's your short url: #{short_url}"
end
private
def generate_url(url)
resp = HTTParty.post("https://www.googleapis.com/urlshortener/v1/url?key=#{G_API_KEY}", :body => url, :headers => headers ).to_json
resp = JSON.parse(resp)
short_url = resp["id"]
short_url
end
end
Lita.register_handler(UrlShortner)
end
end |
module LLT
class Diff::Parser
module Reportable
include HashContainable
attr_reader :id, :total
def initialize(id, total = 1)
super(id)
@total = total
end
def add(element)
if el = @container[element.id]
el.add_total(element)
element.container.each do |_, nested_el|
el.add(nested_el)
end
else
@container[element.id] = element
end
end
def add_total(element)
@total += element.total
end
def increment
@total += 1
end
def xml_tag
self.class.name.scan(/::(\w+)$/)[0].first.downcase
end
def xml_attributes
{ name: @id, total: @total }
end
def sort
Hash[
@container.sort do |(a_id, a_r), (b_id, b_r)|
comp = b_r.total <=> a_r.total
comp.zero? ? a_id <=> b_id : comp
end
]
end
def sort!
each { |_, el| el.sort! }
@container = sort
end
# This could be implemented with a block as well (which holds
# whatever code needs to be performed on the cloned instance,
# but probably not a good idea as this called very often - make
# it as lean as possibe.
def new_clone
cloned = old_clone
cloned.replace_with_clone(:container)
cloned
end
end
end
end
Simplify Reportable#clone
No need for faking an alias_method_chain, just use super!
module LLT
class Diff::Parser
module Reportable
include HashContainable
attr_reader :id, :total
def initialize(id, total = 1)
super(id)
@total = total
end
def add(element)
if el = @container[element.id]
el.add_total(element)
element.container.each do |_, nested_el|
el.add(nested_el)
end
else
@container[element.id] = element
end
end
def add_total(element)
@total += element.total
end
def increment
@total += 1
end
def xml_tag
self.class.name.scan(/::(\w+)$/)[0].first.downcase
end
def xml_attributes
{ name: @id, total: @total }
end
def sort
Hash[
@container.sort do |(a_id, a_r), (b_id, b_r)|
comp = b_r.total <=> a_r.total
comp.zero? ? a_id <=> b_id : comp
end
]
end
def sort!
each { |_, el| el.sort! }
@container = sort
end
# This could be implemented with a block as well (which holds
# whatever code needs to be performed on the cloned instance,
# but probably not a good idea as this called very often - make
# it as lean as possibe.
def clone
cloned = super
cloned.replace_with_clone(:container)
cloned
end
end
end
end
|
# encoding: utf-8
module Locomotive
module Mounter #:nodoc
VERSION = '1.0.0.alpha3'
end
end
bump version to 1.0.0.alpha4
# encoding: utf-8
module Locomotive
module Mounter #:nodoc
VERSION = '1.0.0.alpha4'
end
end
|
module Lore
module Model_Shortcuts
def html_escape_values_of(*attributes)
add_input_filter(*attributes) { |a|
a = a.to_s
a.gsub("'",''')
a.gsub('"','"')
}
add_output_filter(*attributes) { |a|
a = a.to_s
a.gsub("'",''')
a.gsub('"','"')
}
end
alias html_encode html_escape_values_of
def convert_decimal(*attributes)
add_input_filter(*attributes) { |v|
if v.is_a?(Float) then
v.to_s
else
v = v.to_s.gsub(/\s/,'')
v.sub!(',','.')
if v == '' then
v = "0.00"
else
v = "#{v}.00" if !v.include?('.')
end
v
end
}
end
end
end
Fixed Model.html_encode
module Lore
module Model_Shortcuts
def html_escape_values_of(*attributes)
add_input_filter(*attributes) { |a|
a = a.to_s
a.gsub!("'",''')
a.gsub!('"','"')
a
}
add_output_filter(*attributes) { |a|
a = a.to_s
a.gsub!("'",''')
a.gsub!('"','"')
a
}
end
alias html_encode html_escape_values_of
def convert_decimal(*attributes)
add_input_filter(*attributes) { |v|
if v.is_a?(Float) then
v.to_s
else
v = v.to_s.gsub(/\s/,'')
v.sub!(',','.')
if v == '' then
v = "0.00"
else
v = "#{v}.00" if !v.include?('.')
end
v
end
}
end
end
end
|
module MailBoxValidator
VERSION = "1.1.0"
end
Bump version
module MailBoxValidator
VERSION = "1.1.1"
end
|
require 'rails/engine'
require 'sprockets/railtie'
# Since we serve our assets directly through apache on an appliance after they
# are pre-compiled, there is no need to have sass/coffeescript loaded in the
# application, so we can save a bit of resources by not loading these two.
#
# That said, we still need to load both of these when pre-compiling the assets
# in production mode, so if Rake is defined, load things like we used to.
#
# For this to work properly, it is dependent on patternfly/patternfly-sass#150
if ENV["RAILS_ENV"] != "production" || defined?(Rake)
require 'sass-rails'
require 'coffee-rails'
require 'font-fabulous'
require 'patternfly-sass'
else
require 'bootstrap-sass/engine'
require 'font_awesome/sass/rails/engine'
require 'font-fabulous/engine'
require 'patternfly-sass/engine'
end
require 'high_voltage'
require 'lodash-rails'
require 'jquery-hotkeys-rails'
require "novnc-rails"
require 'webpacker'
module ManageIQ
module UI
module Classic
class Engine < ::Rails::Engine
config.autoload_paths << root.join('app', 'controllers', 'mixins').to_s
config.autoload_paths << root.join('lib').to_s
if Rails.env.production? || Rails.env.test?
require 'uglifier'
config.assets.js_compressor = Uglifier.new(
:compress => {
:unused => false,
:keep_fargs => true,
:keep_fnames => true
}
)
end
def vmdb_plugin?
true
end
end
end
end
end
Add comment about leak in engine.rb
require 'rails/engine'
require 'sprockets/railtie'
# Since we serve our assets directly through apache on an appliance after they
# are pre-compiled, there is no need to have sass/coffeescript loaded in the
# application, so we can save a bit of resources by not loading these two.
#
# That said, we still need to load both of these when pre-compiling the assets
# in production mode, so if Rake is defined, load things like we used to.
#
# For this to work properly, it is dependent on patternfly/patternfly-sass#150
if ENV["RAILS_ENV"] != "production" || defined?(Rake)
require 'sass-rails'
require 'coffee-rails'
require 'font-fabulous'
require 'patternfly-sass'
else
require 'bootstrap-sass/engine'
require 'font_awesome/sass/rails/engine'
require 'font-fabulous/engine'
require 'patternfly-sass/engine'
end
require 'high_voltage'
require 'lodash-rails'
require 'jquery-hotkeys-rails'
require "novnc-rails"
require 'webpacker'
module ManageIQ
module UI
module Classic
class Engine < ::Rails::Engine
# NOTE: If you are going to make changes to autoload_paths, please make
# sure they are all strings. Rails will push these paths into the
# $LOAD_PATH.
#
# More info can be found in the ruby-lang bug:
#
# https://bugs.ruby-lang.org/issues/14372
#
config.autoload_paths << root.join('app', 'controllers', 'mixins').to_s
config.autoload_paths << root.join('lib').to_s
if Rails.env.production? || Rails.env.test?
require 'uglifier'
config.assets.js_compressor = Uglifier.new(
:compress => {
:unused => false,
:keep_fargs => true,
:keep_fnames => true
}
)
end
def vmdb_plugin?
true
end
end
end
end
end
|
require 'ui'
require 'logic'
require 'single_game'
require 'multi_game'
module MasterMind
module Tobi
module GameHelper
def user_choice
option_chosen = false
while !option_chosen
option_chosen = true # assume user selects valid option so as to quit loop
input = gets.chomp.downcase
option_chosen = validate_input?(input)
end
end
def validate_input?(input)
case input
when "p", "play" then play_game
when "r", "read"
print_help
return false # continue loop after displaying help message
when "q", "quit" then exit
else # user selects an invalid option
print UI::INVALID_MESSAGE
return false
end
return true
end
def play_game
game_logic = GameLogic.new(ask_level); sequence = game_logic.generate_sequence
ask_mode(sequence, game_logic)
puts ""
print UI::OPTIONS_MESSAGE + UI::INPUT_PROMPT
user_choice
end
def ask_mode(sequence, game_logic)
print UI::MODE_SELECT
option_chosen = false
while !option_chosen
option_chosen = true # assume user selects valid option so as to quit loop
input = gets.chomp.downcase
case input
when "s", "single" then SinglePlayer.new(sequence, game_logic).start_game
when "m", "multi" then MultiPlayer.new(sequence, game_logic, hide_guess?).start_game
else
print UI::INVALID_MESSAGE
option_chosen = false
end
end
end
def hide_guess?
print UI::PASSWORD_MESSAGE
option_chosen = false
while !option_chosen
option_chosen = true # assume user selects valid option so as to quit loop
input = gets.chomp.downcase
case input
when "y", "yes" then return true
when "n", "no" then return false
else # user selects an invalid option
print UI::INVALID_MESSAGE
option_chosen = false
end
end
end
def print_help
puts UI::HELP_MESSAGE
print UI::OPTIONS_MESSAGE + UI::INPUT_PROMPT
end
def ask_level
print UI::LEVEL_MESSAGE
option_chosen = false
while !option_chosen
option_chosen = true # assume user selects valid level so as to quit loop
input = gets.chomp.downcase
case input
when "b", "beginner" then return GameLogic::BEGINNER
when "i", "intermediate" then return GameLogic::INTERMEDIATE
when "a", "advanced" then return GameLogic::ADVANCED
else # user selects an invalid level
print UI::INVALID_MESSAGE
option_chosen = false
end
end
end
end
end
end
refactor multi_game
require 'ui'
require 'logic'
require 'single_game'
require 'multi_game'
module MasterMind
module Tobi
module GameHelper
def user_choice
option_chosen = false
while !option_chosen
option_chosen = true # assume user selects valid option so as to quit loop
input = gets.chomp.downcase
option_chosen = validate_input?(input)
end
end
def validate_input?(input)
case input
when "p", "play" then play_game
when "r", "read"
print_help
return false # continue loop after displaying help message
when "q", "quit" then exit
else # user selects an invalid option
print UI::INVALID_MESSAGE
return false
end
return true
end
def play_game
game_logic = GameLogic.new(ask_level); sequence = game_logic.generate_sequence
ask_mode(sequence, game_logic)
puts ""
print UI::OPTIONS_MESSAGE + UI::INPUT_PROMPT
user_choice
end
def ask_mode(sequence, game_logic)
print UI::MODE_SELECT
option_chosen = false
while !option_chosen
option_chosen = true # assume user selects valid option so as to quit loop
input = gets.chomp.downcase
case input
when "s", "single" then SinglePlayer.new(sequence, game_logic).start_game
when "m", "multi" then MultiPlayer.new(sequence, game_logic, hide_guess?).start_game
else
print UI::INVALID_MESSAGE
option_chosen = false
end
end
end
def hide_guess?
print UI::PASSWORD_MESSAGE
option_chosen = false
while !option_chosen
option_chosen = true # assume user selects valid option so as to quit loop
input = gets.chomp.downcase
case input
when "y", "yes" then return true
when "n", "no" then return false
else # user selects an invalid option
print UI::INVALID_MESSAGE
option_chosen = false
end
end
end
def print_help
puts UI::HELP_MESSAGE
print UI::OPTIONS_MESSAGE + UI::INPUT_PROMPT
end
def ask_level
print UI::LEVEL_MESSAGE
option_chosen = false
while !option_chosen
option_chosen = true # assume user selects valid level so as to quit loop
input = gets.chomp.downcase
case input
when "b", "beginner" then return GameLogic::BEGINNER
when "i", "intermediate" then return GameLogic::INTERMEDIATE
when "a", "advanced" then return GameLogic::ADVANCED
else # user selects an invalid level
print UI::INVALID_MESSAGE
option_chosen = false
end
end
end
def user_helper(level)
print UI::LEVEL_MESSAGE
option_chosen = false
while !option_chosen
option_chosen = true # assume user selects valid level so as to quit loop
input = gets.chomp.downcase
if level
case input
when "b", "beginner" then return GameLogic::BEGINNER
when "i", "intermediate" then return GameLogic::INTERMEDIATE
when "a", "advanced" then return GameLogic::ADVANCED
else # user selects an invalid level
print UI::INVALID_MESSAGE
option_chosen = false
end
else
case input
when "y", "yes" then return true
when "n", "no" then return false
else # user selects an invalid option
print UI::INVALID_MESSAGE
option_chosen = false
end
end
end
end
end
end
end |
require 'webrat/rack/rack_session'
require File.join(File.dirname(__FILE__), 'base')
module Merb
module Test
module World
# We are going to wait for the Webrat::RackSession to be included on the gem spec of webrat
class Webrat < Webrat::Session
include Base
## Begin RackSession implementations
def response_body
@response.body
end
def response_code
@response.status
end
## End of RackSession implementation
attr_reader :response
%w(get head post put delete).each do |verb|
define_method(verb) do |*args| # (path, data, headers = nil)
path, data, headers = *args
all = (headers || {})
all.merge!(:method => "#{verb.upcase}") unless all[:method] || all["REQUEST_METHOD"]
unless data.empty?
if verb == "post"
all.merge!(:body_params => data)
elsif verb == "get"
all.merge!(:params => data)
end
end
@response = request(path, all)
class << @response
def body
super.to_s
end
end
@response
end
end
end
end
end
end
World do
Merb::Test::World::Webrat.new
end
Ignoring the webrat/rack/rack_session require
# require 'webrat/rack/rack_session'
require File.join(File.dirname(__FILE__), 'base')
module Merb
module Test
module World
# We are going to wait for the Webrat::RackSession to be included on the gem spec of webrat
class Webrat < Webrat::Session
include Base
## Begin RackSession implementations
def response_body
@response.body
end
def response_code
@response.status
end
## End of RackSession implementation
attr_reader :response
%w(get head post put delete).each do |verb|
define_method(verb) do |*args| # (path, data, headers = nil)
path, data, headers = *args
all = (headers || {})
all.merge!(:method => "#{verb.upcase}") unless all[:method] || all["REQUEST_METHOD"]
unless data.empty?
if verb == "post"
all.merge!(:body_params => data)
elsif verb == "get"
all.merge!(:params => data)
end
end
@response = request(path, all)
class << @response
def body
super.to_s
end
end
@response
end
end
end
end
end
end
World do
Merb::Test::World::Webrat.new
end
|
# frozen_string_literal: true
module MetalArchives
##
# Abstract model class
#
class Base
##
# Generic shallow copy constructor
#
def initialize(attributes = {})
raise Errors::NotImplementedError, "no :id property in model" unless respond_to? :id
set(**attributes)
end
##
# Set properties
#
def set(**attributes)
attributes.each { |key, value| instance_variable_set(:"@#{key}", value) }
end
##
# Returns true if two objects have the same type and id
#
def ==(other)
other.is_a?(self.class) &&
id == other.id
end
##
# Fetch, parse and load the data
#
# [Raises]
# - rdoc-ref:Errors::InvalidIDError when no id
# - rdoc-ref:Errors::APIError when receiving a status code >= 400 (except 404)
#
def load!
raise Errors::InvalidIDError, "no id present" unless id
# Use constructor to set attributes
set(**assemble)
@loaded = true
MetalArchives.cache[id] = self
rescue StandardError => e
# Don't cache invalid requests
MetalArchives.cache.delete id
raise e
end
##
# Whether or not the object is currently loaded
#
def loaded?
@loaded ||= false
end
##
# Whether or not the object is currently cached
#
def cached?
loaded? && MetalArchives.cache.include?(id)
end
protected
##
# Fetch the data and assemble the model
#
# Override this method
#
# [Raises]
# - rdoc-ref:Errors::InvalidIDError when no or invalid id
# - rdoc-ref:Errors::APIError when receiving a status code >= 400 (except 404)
#
def assemble
raise Errors::NotImplementedError, "method :assemble not implemented"
end
##
# String representation
#
def inspect
"#<#{self.class.name} @id=#{id}>"
end
class << self
##
# Declared properties
#
def properties
@properties ||= {}
end
protected
##
# Defines a model property.
#
# [+name+]
# Name of the property
#
# [+opts+]
# [+type+]
# Data type of property (a constant)
#
# Default: +String+
#
# [+multiple+]
# Whether or not the property has multiple values (which
# turns it into an +Array+ of +type+)
#
def property(name, opts = {})
properties[name] = opts
# property
define_method(name) do
# Load only when not loaded or id property
load! unless loaded? || name == :id
instance_variable_get(:"@#{name}")
end
# property?
define_method("#{name}?") do
send(name).present?
end
# property=
define_method("#{name}=") do |value|
return instance_variable_set(:"@#{name}", value) if value.nil?
# Check value type
type = opts[:type] || String
if opts[:multiple]
raise Errors::TypeError, "invalid type #{value.class}, must be Array for #{name}" unless value.is_a? Array
value.each do |val|
raise Errors::TypeError, "invalid type #{val.class}, must be #{type} for #{name}" unless val.is_a? type
end
else
raise Errors::TypeError, "invalid type #{value.class}, must be #{type} for #{name}" unless value.is_a? type
end
instance_variable_set(:"@#{name}", value)
end
end
##
# Defines a model enum property.
#
# [+name+]
# Name of the property
#
# [+opts+]
# [+values+]
# Required. An array of possible values
#
# [+multiple+]
# Whether or not the property has multiple values (which
# turns it into an +Array+ of +type+)
#
def enum(name, opts)
raise ArgumentError, "opts[:values] is required" unless opts && opts[:values]
properties[name] = opts
# property
define_method(name) do
load! unless loaded?
instance_variable_get(:"@#{name}")
end
# property?
define_method("#{name}?") do
load! unless loaded? && instance_variable_defined?("@#{name}")
property = instance_variable_get(:"@#{name}")
property.respond_to?(:empty?) ? !property.empty? : !property.nil?
end
# property=
define_method("#{name}=") do |value|
# Check enum type
if opts[:multiple]
raise Errors::TypeError, "invalid enum value #{value}, must be Array for #{name}" unless value.is_a? Array
value.each do |val|
raise Errors::TypeError, "invalid enum value #{val} for #{name}" unless opts[:values].include? val
end
else
raise Errors::TypeError, "invalid enum value #{value} for #{name}" unless opts[:values].include? value
end
instance_variable_set(:"@#{name}", value)
end
end
##
# Defines a model boolean property. This method is an alias for <tt>enum name, :values => [true, false]</tt>
#
# [+name+]
# Name of the property
#
# [+opts+]
# [+multiple+]
# Whether or not the property has multiple values (which
# turns it into an +Array+ of +type+)
#
def boolean(name, opts = {})
enum name, opts.merge(values: [true, false])
end
end
end
end
Fix model inspect method
# frozen_string_literal: true
module MetalArchives
##
# Abstract model class
#
class Base
##
# Generic shallow copy constructor
#
def initialize(attributes = {})
raise Errors::NotImplementedError, "no :id property in model" unless respond_to? :id
set(**attributes)
end
##
# Set properties
#
def set(**attributes)
attributes.each { |key, value| instance_variable_set(:"@#{key}", value) }
end
##
# Returns true if two objects have the same type and id
#
def ==(other)
other.is_a?(self.class) &&
id == other.id
end
##
# Fetch, parse and load the data
#
# [Raises]
# - rdoc-ref:Errors::InvalidIDError when no id
# - rdoc-ref:Errors::APIError when receiving a status code >= 400 (except 404)
#
def load!
raise Errors::InvalidIDError, "no id present" unless id
# Use constructor to set attributes
set(**assemble)
@loaded = true
MetalArchives.cache[id] = self
rescue StandardError => e
# Don't cache invalid requests
MetalArchives.cache.delete id
raise e
end
##
# Whether or not the object is currently loaded
#
def loaded?
@loaded ||= false
end
##
# Whether or not the object is currently cached
#
def cached?
loaded? && MetalArchives.cache.include?(id)
end
##
# String representation
#
def inspect
"#<#{self.class.name} @id=#{@id} @name=\"#{@name}\">"
end
protected
##
# Fetch the data and assemble the model
#
# Override this method
#
# [Raises]
# - rdoc-ref:Errors::InvalidIDError when no or invalid id
# - rdoc-ref:Errors::APIError when receiving a status code >= 400 (except 404)
#
def assemble
raise Errors::NotImplementedError, "method :assemble not implemented"
end
class << self
##
# Declared properties
#
def properties
@properties ||= {}
end
protected
##
# Defines a model property.
#
# [+name+]
# Name of the property
#
# [+opts+]
# [+type+]
# Data type of property (a constant)
#
# Default: +String+
#
# [+multiple+]
# Whether or not the property has multiple values (which
# turns it into an +Array+ of +type+)
#
def property(name, opts = {})
properties[name] = opts
# property
define_method(name) do
# Load only when not loaded or id property
load! unless loaded? || name == :id
instance_variable_get(:"@#{name}")
end
# property?
define_method("#{name}?") do
send(name).present?
end
# property=
define_method("#{name}=") do |value|
return instance_variable_set(:"@#{name}", value) if value.nil?
# Check value type
type = opts[:type] || String
if opts[:multiple]
raise Errors::TypeError, "invalid type #{value.class}, must be Array for #{name}" unless value.is_a? Array
value.each do |val|
raise Errors::TypeError, "invalid type #{val.class}, must be #{type} for #{name}" unless val.is_a? type
end
else
raise Errors::TypeError, "invalid type #{value.class}, must be #{type} for #{name}" unless value.is_a? type
end
instance_variable_set(:"@#{name}", value)
end
end
##
# Defines a model enum property.
#
# [+name+]
# Name of the property
#
# [+opts+]
# [+values+]
# Required. An array of possible values
#
# [+multiple+]
# Whether or not the property has multiple values (which
# turns it into an +Array+ of +type+)
#
def enum(name, opts)
raise ArgumentError, "opts[:values] is required" unless opts && opts[:values]
properties[name] = opts
# property
define_method(name) do
load! unless loaded?
instance_variable_get(:"@#{name}")
end
# property?
define_method("#{name}?") do
load! unless loaded? && instance_variable_defined?("@#{name}")
property = instance_variable_get(:"@#{name}")
property.respond_to?(:empty?) ? !property.empty? : !property.nil?
end
# property=
define_method("#{name}=") do |value|
# Check enum type
if opts[:multiple]
raise Errors::TypeError, "invalid enum value #{value}, must be Array for #{name}" unless value.is_a? Array
value.each do |val|
raise Errors::TypeError, "invalid enum value #{val} for #{name}" unless opts[:values].include? val
end
else
raise Errors::TypeError, "invalid enum value #{value} for #{name}" unless opts[:values].include? value
end
instance_variable_set(:"@#{name}", value)
end
end
##
# Defines a model boolean property. This method is an alias for <tt>enum name, :values => [true, false]</tt>
#
# [+name+]
# Name of the property
#
# [+opts+]
# [+multiple+]
# Whether or not the property has multiple values (which
# turns it into an +Array+ of +type+)
#
def boolean(name, opts = {})
enum name, opts.merge(values: [true, false])
end
end
end
end
|
class Etcds
VERSION = "0.1.1"
end
Bumped version number
class Etcds
VERSION = "0.1.2"
end
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.