CombinedText stringlengths 4 3.42M |
|---|
require 'logger'
module ActiveAdmin
module Editor
class << self
# Returns the current Configuration
def configuration
@configuration ||= Configuration.new
end
# Yields the Configuration
def configure
yield configuration
end
end
class Configuration
# AWS credentials
attr_accessor :aws_access_key_id
attr_accessor :aws_access_secret
# The s3 bucket to store uploads.
attr_accessor :s3_bucket
# Base directory to store the uploaded files in the bucket. Defaults to
# 'uploads'.
attr_accessor :storage_dir
# wysiwyg stylesheets that get included in the backend and the frontend.
attr_accessor :stylesheets
attr_accessor :template_paths
def storage_dir
@storage_dir ||= 'uploads'
end
def storage_dir=(dir)
@storage_dir = dir.to_s.gsub(/(^\/|\/$)/, '')
end
def stylesheets
@stylesheets ||= [ 'active_admin/editor/wysiwyg.css' ]
end
def s3_configured?
aws_access_key_id.present? &&
aws_access_secret.present? &&
s3_bucket.present?
end
def parser_rules
@parser_rules ||= PARSER_RULES.dup
end
def template_paths
defaults = {
toolbar: 'active_admin/editor/templates/toolbar',
uploader: 'active_admin/editor/templates/uploader'
}
@template_paths ? @template_paths.reverse_merge!(defaults) : defaults
end
end
end
end
Adds description to attribute template_paths
require 'logger'
module ActiveAdmin
module Editor
class << self
# Returns the current Configuration
def configuration
@configuration ||= Configuration.new
end
# Yields the Configuration
def configure
yield configuration
end
end
class Configuration
# AWS credentials
attr_accessor :aws_access_key_id
attr_accessor :aws_access_secret
# The s3 bucket to store uploads.
attr_accessor :s3_bucket
# Base directory to store the uploaded files in the bucket. Defaults to
# 'uploads'.
attr_accessor :storage_dir
# wysiwyg stylesheets that get included in the backend and the frontend.
attr_accessor :stylesheets
# paths to client side templates in the asset pipeline
attr_accessor :template_paths
def storage_dir
@storage_dir ||= 'uploads'
end
def storage_dir=(dir)
@storage_dir = dir.to_s.gsub(/(^\/|\/$)/, '')
end
def stylesheets
@stylesheets ||= [ 'active_admin/editor/wysiwyg.css' ]
end
def s3_configured?
aws_access_key_id.present? &&
aws_access_secret.present? &&
s3_bucket.present?
end
def parser_rules
@parser_rules ||= PARSER_RULES.dup
end
def template_paths
defaults = {
toolbar: 'active_admin/editor/templates/toolbar',
uploader: 'active_admin/editor/templates/uploader'
}
@template_paths ? @template_paths.reverse_merge!(defaults) : defaults
end
end
end
end
|
require 'singleton'
require 'active_decorator/helpers'
module ActiveDecorator
class Decorator
include Singleton
def initialize
@@decorators = {}
end
def decorate(obj)
return if obj.nil?
if obj.is_a? Array
obj.each do |r|
decorate r
end
elsif defined?(ActiveRecord) && obj.is_a?(ActiveRecord::Relation) && !obj.respond_to?(:to_a_with_decorator)
class << obj
def to_a_with_decorator
to_a_without_decorator.tap do |arr|
ActiveDecorator::Decorator.instance.decorate arr
end
end
alias_method_chain :to_a, :decorator
end
else
d = decorator_for obj.class
return obj unless d
obj.extend d unless obj.is_a? d
end
end
private
def decorator_for(model_class)
return @@decorators[model_class] if @@decorators.has_key? model_class
decorator_name = "#{model_class.name}Decorator"
d = decorator_name.constantize
d.send :include, ActiveDecorator::Helpers
@@decorators[model_class] = d
rescue NameError
end
end
end
Cache nil results for decorator_for.
This avoids the expensive and repeated calls to constantize for classes where there isn't a corresponding Decorator implementation.
require 'singleton'
require 'active_decorator/helpers'
module ActiveDecorator
class Decorator
include Singleton
def initialize
@@decorators = {}
end
def decorate(obj)
return if obj.nil?
if obj.is_a? Array
obj.each do |r|
decorate r
end
elsif defined?(ActiveRecord) && obj.is_a?(ActiveRecord::Relation) && !obj.respond_to?(:to_a_with_decorator)
class << obj
def to_a_with_decorator
to_a_without_decorator.tap do |arr|
ActiveDecorator::Decorator.instance.decorate arr
end
end
alias_method_chain :to_a, :decorator
end
else
d = decorator_for obj.class
return obj unless d
obj.extend d unless obj.is_a? d
end
end
private
def decorator_for(model_class)
return @@decorators[model_class] if @@decorators.has_key? model_class
decorator_name = "#{model_class.name}Decorator"
d = decorator_name.constantize
d.send :include, ActiveDecorator::Helpers
@@decorators[model_class] = d
rescue NameError
@@decorators[model_class] = nil
end
end
end
|
module ActiveDelegate
autoload :ReadWrite, 'active_delegate/read_write'
autoload :Dirty, 'active_delegate/dirty'
autoload :Localized, 'active_delegate/localized'
class Attributes
# Initialize attributes
def initialize(model, options)
@model = model
@options = default_options.merge(options)
delegate_attributes
save_delegated_attributes
redefine_build_association
end
private
# Get default options
def default_options
{ except: [], only: [], allow_nil: false, to: [], prefix: nil, localized: false }
end
# Get association reflection
def association_reflection
assoc_name = @options.fetch(:to)
reflection = @model.reflect_on_association(assoc_name)
return reflection unless reflection.nil?
raise "#{@model.name} don't have the association #{assoc_name}"
end
# Get model association class
def association_class
association_reflection.klass
end
# Get association attribute names
def association_attribute_names
association_class.attribute_names
end
# Default excluded attributes
def default_excluded_attributes
assoc_as = association_reflection.options[:as]
poly_attr = [:"#{assoc_as}_type", :"#{assoc_as}_id"] if assoc_as.present?
[:id, :created_at, :updated_at] + poly_attr.to_a
end
# Get delegatable attributes
def delegatable_attributes
attributes = association_attribute_names.map(&:to_sym)
attributes = attributes & @options[:only].to_a if @options[:only].present?
attributes = attributes - @options[:except].to_a if @options[:except].present?
attributes = attributes - default_excluded_attributes
attributes.map(&:to_sym)
end
# Get localized delegatable attributes
def localized_attributes
attributes = delegatable_attributes
localized = Localized.localized_methods(attributes) if @options[:localized].present?
localized.to_a.map(&:to_sym)
end
# Get delegatable methods
def delegatable_methods
attributes = delegatable_attributes + localized_attributes
readwrite = ReadWrite.readwrite_methods(attributes)
dirty = Dirty.dirty_methods(attributes)
methods = readwrite + dirty
methods.map(&:to_sym)
end
# Delegate attributes
def delegate_attributes
options = { to: @options[:to], allow_nil: @options[:allow_nil], prefix: @options[:prefix] }
@model.delegate(*delegatable_methods, options)
end
# Redefine build association method
def redefine_build_association
assoc_name = @options[:to]
@model.class_eval do
class_eval <<-EOM, __FILE__, __LINE__ + 1
def #{assoc_name}
super || send(:build_#{assoc_name})
end
EOM
end
end
# Get attribute prefix
def attribute_prefix
prefix = @options[:prefix]
prefix.is_a?(TrueClass) ? @options[:to] : prefix
end
# Get prefixed attributes
def prefix_attributes(attributes)
if @options[:prefix].present?
attributes.map { |a| :"#{attribute_prefix}_#{a}" }
else
attributes
end
end
# Save delagated attributes in model class
def save_delegated_attributes
dl_atable = association_reflection.klass.table_name
dl_method = :"#{dl_atable}_attribute_names"
delegated = prefix_attributes(delegatable_attributes)
define_attribute_names_and_types(delegated)
delegated = @model.try(dl_method).to_a.concat(delegated)
@model.send(:define_singleton_method, dl_method) { delegated }
if @options[:localized].present?
localized = prefix_attributes(localized_attributes)
lc_method = :"#{dl_atable}_localized_attribute_names"
@model.send(:define_singleton_method, lc_method) { localized }
end
end
# Define attribute names and types
def define_attribute_names_and_types(attributes)
existing = association_attribute_names.map(&:to_sym)
undefined = attributes.reject { |a| a.in? existing }
undefined.each do |attrib|
attr_name = attrib.to_s.sub("#{attribute_prefix}_", '')
attr_deft = @options[:default] || association_class.column_defaults["#{attr_name}"]
cast_type = @options[:cast_type] || association_class.attribute_types["#{attr_name}"]
@model.attribute(attrib, cast_type, default: attr_deft)
if @options[:alias].present?
@model.attribute(@options[:alias], cast_type, default: attr_deft)
@model.alias_attribute(@options[:alias], attrib)
end
end
end
end
end
disbale default attribute setting
it does not work as expected
module ActiveDelegate
autoload :ReadWrite, 'active_delegate/read_write'
autoload :Dirty, 'active_delegate/dirty'
autoload :Localized, 'active_delegate/localized'
class Attributes
# Initialize attributes
def initialize(model, options)
@model = model
@options = default_options.merge(options)
delegate_attributes
save_delegated_attributes
redefine_build_association
end
private
# Get default options
def default_options
{ except: [], only: [], allow_nil: false, to: [], prefix: nil, localized: false }
end
# Get association reflection
def association_reflection
assoc_name = @options.fetch(:to)
reflection = @model.reflect_on_association(assoc_name)
return reflection unless reflection.nil?
raise "#{@model.name} don't have the association #{assoc_name}"
end
# Get model association class
def association_class
association_reflection.klass
end
# Get association attribute names
def association_attribute_names
association_class.attribute_names
end
# Default excluded attributes
def default_excluded_attributes
assoc_as = association_reflection.options[:as]
poly_attr = [:"#{assoc_as}_type", :"#{assoc_as}_id"] if assoc_as.present?
[:id, :created_at, :updated_at] + poly_attr.to_a
end
# Get delegatable attributes
def delegatable_attributes
attributes = association_attribute_names.map(&:to_sym)
attributes = attributes & @options[:only].to_a if @options[:only].present?
attributes = attributes - @options[:except].to_a if @options[:except].present?
attributes = attributes - default_excluded_attributes
attributes.map(&:to_sym)
end
# Get localized delegatable attributes
def localized_attributes
attributes = delegatable_attributes
localized = Localized.localized_methods(attributes) if @options[:localized].present?
localized.to_a.map(&:to_sym)
end
# Get delegatable methods
def delegatable_methods
attributes = delegatable_attributes + localized_attributes
readwrite = ReadWrite.readwrite_methods(attributes)
dirty = Dirty.dirty_methods(attributes)
methods = readwrite + dirty
methods.map(&:to_sym)
end
# Delegate attributes
def delegate_attributes
options = { to: @options[:to], allow_nil: @options[:allow_nil], prefix: @options[:prefix] }
@model.delegate(*delegatable_methods, options)
end
# Redefine build association method
def redefine_build_association
assoc_name = @options[:to]
@model.class_eval do
class_eval <<-EOM, __FILE__, __LINE__ + 1
def #{assoc_name}
super || send(:build_#{assoc_name})
end
EOM
end
end
# Get attribute prefix
def attribute_prefix
prefix = @options[:prefix]
prefix.is_a?(TrueClass) ? @options[:to] : prefix
end
# Get prefixed attributes
def prefix_attributes(attributes)
if @options[:prefix].present?
attributes.map { |a| :"#{attribute_prefix}_#{a}" }
else
attributes
end
end
# Save delagated attributes in model class
def save_delegated_attributes
dl_atable = association_reflection.klass.table_name
dl_method = :"#{dl_atable}_attribute_names"
delegated = prefix_attributes(delegatable_attributes)
define_attribute_names_and_types(delegated)
delegated = @model.try(dl_method).to_a.concat(delegated)
@model.send(:define_singleton_method, dl_method) { delegated }
if @options[:localized].present?
localized = prefix_attributes(localized_attributes)
lc_method = :"#{dl_atable}_localized_attribute_names"
@model.send(:define_singleton_method, lc_method) { localized }
end
end
# Define attribute names and types
def define_attribute_names_and_types(attributes)
existing = association_attribute_names.map(&:to_sym)
undefined = attributes.reject { |a| a.in? existing }
undefined.each do |attrib|
attr_name = attrib.to_s.sub("#{attribute_prefix}_", '')
cast_type = @options[:cast_type] || association_class.attribute_types["#{attr_name}"]
@model.attribute(attrib, cast_type)
if @options[:alias].present?
@model.attribute(@options[:alias], cast_type)
@model.alias_attribute(@options[:alias], attrib)
end
end
end
end
end
|
module ActsAsTranslated
VERSION = '0.0.2'
end
bumped to version 0.0.3
module ActsAsTranslated
VERSION = '0.0.3'
end
|
require 'json'
require 'uri'
require 'net/http'
module AkamaiRestCcu
class Connection
def initialize(opts = {})
opts = opts.dup
@base_uri = URI(opts[:base_uri])
@http = Akamai::Edgegrid::HTTP.new(
@base_uri.host,
@base_uri.port
)
@http.setup_edgegrid(
client_token: opts[:client_token],
client_secret: opts[:client_secret],
access_token: opts[:access_token],
max_body: 128*1024
)
end
def get(path)
request = Net::HTTP::Get.new(
URI.join(@base_uri.to_s, path).to_s,
{ 'Content-Type' => 'application/json' }
)
send(request)
end
def post(path, payload = {})
request = Net::HTTP::Post.new(
URI.join(@base_uri.to_s, path).to_s,
{ 'Content-Type' => 'application/json' }
)
request.body = payload.to_json
send(request)
end
private
def send(request)
response = @http.request(request)
case response
when Net::HTTPBadRequest
raise Errors::BadRequest, response.body
when Net::HTTPUnauthorized
raise Errors::Unauthorized, response.body
when Net::HTTPForbidden
raise Errors::Forbidden, response.body
when Net::HTTPRequestEntityTooLarge
raise Errors::RequestEntityTooLarge, response.body
when Net::HTTPTooManyRequests
raise Errors::TooManyRequests, response.body
when Net::HTTPInsufficientStorage
raise Errors::Insufficient, response.body
when Net::HTTPUnsupportedMediaType
raise Errors::UnsupportedMediaType, response.body
when Net::HTTPInternalServerError
raise Errors::HTTPInternalServerError, response.body
end
response.body
end
end
end
Rename 'send' to 'send_req'
require 'json'
require 'uri'
require 'net/http'
module AkamaiRestCcu
class Connection
def initialize(opts = {})
opts = opts.dup
@base_uri = URI(opts[:base_uri])
@http = Akamai::Edgegrid::HTTP.new(
@base_uri.host,
@base_uri.port
)
@http.setup_edgegrid(
client_token: opts[:client_token],
client_secret: opts[:client_secret],
access_token: opts[:access_token],
max_body: 128*1024
)
end
def get(path)
request = Net::HTTP::Get.new(
URI.join(@base_uri.to_s, path).to_s,
{ 'Content-Type' => 'application/json' }
)
send_req(request)
end
def post(path, payload = {})
request = Net::HTTP::Post.new(
URI.join(@base_uri.to_s, path).to_s,
{ 'Content-Type' => 'application/json' }
)
request.body = payload.to_json
send_req(request)
end
private
def send_req(request)
response = @http.request(request)
case response
when Net::HTTPBadRequest
raise Errors::BadRequest, response.body
when Net::HTTPUnauthorized
raise Errors::Unauthorized, response.body
when Net::HTTPForbidden
raise Errors::Forbidden, response.body
when Net::HTTPRequestEntityTooLarge
raise Errors::RequestEntityTooLarge, response.body
when Net::HTTPTooManyRequests
raise Errors::TooManyRequests, response.body
when Net::HTTPInsufficientStorage
raise Errors::Insufficient, response.body
when Net::HTTPUnsupportedMediaType
raise Errors::UnsupportedMediaType, response.body
when Net::HTTPInternalServerError
raise Errors::HTTPInternalServerError, response.body
end
response.body
end
end
end
|
module Benchmark
def compare(*reports)
iter = false
sorted = reports.sort do |a,b|
if a.respond_to? :ips
iter = true
b.ips <=> a.ips
else
a.runtime <=> b.runtime
end
end
best = sorted.shift
STDOUT.puts "\nComparison:"
if iter
STDOUT.printf "%20s: %10.1f i/s\n", best.label, best.ips
else
STDOUT.puts "#{best.rjust(20)}: #{best.runtime}s"
end
sorted.each do |report|
name = report.label
if iter
x = (best.ips.to_f / report.ips.to_f)
STDOUT.printf "%20s: %10.1f i/s - %.2fx slower\n", name, report.ips, x
else
x = "%.2f" % (report.ips.to_f / best.ips.to_f)
STDOUT.puts "#{name.rjust(20)}: #{report.runtime}s - #{x}x slower"
end
end
STDOUT.puts
end
module_function :compare
end
Only compare more than 1 reports
module Benchmark
def compare(*reports)
return if reports.size < 2
iter = false
sorted = reports.sort do |a,b|
if a.respond_to? :ips
iter = true
b.ips <=> a.ips
else
a.runtime <=> b.runtime
end
end
best = sorted.shift
STDOUT.puts "\nComparison:"
if iter
STDOUT.printf "%20s: %10.1f i/s\n", best.label, best.ips
else
STDOUT.puts "#{best.rjust(20)}: #{best.runtime}s"
end
sorted.each do |report|
name = report.label
if iter
x = (best.ips.to_f / report.ips.to_f)
STDOUT.printf "%20s: %10.1f i/s - %.2fx slower\n", name, report.ips, x
else
x = "%.2f" % (report.ips.to_f / best.ips.to_f)
STDOUT.puts "#{name.rjust(20)}: #{report.runtime}s - #{x}x slower"
end
end
STDOUT.puts
end
module_function :compare
end
|
require 'spec_helper'
describe 'Ubuntu Stemcell' do
it_behaves_like 'a stemcell'
describe package('apt') do
it { should be_installed }
end
describe package('rpm') do
it { should_not be_installed }
end
context 'installed by base_debootstrap' do
%w(
adduser
apt
apt-utils
bzip2
console-setup
dash
debconf
dhcp3-client
eject
gnupg
ifupdown
initramfs-tools
iproute
iputils-ping
kbd
less
locales
lsb-release
makedev
mawk
module-init-tools
net-tools
netbase
netcat-openbsd
ntpdate
passwd
procps
python
sudo
tasksel
tzdata
ubuntu-keyring
udev
upstart
ureadahead
vim-tiny
whiptail
).each do |pkg|
describe package(pkg) do
it { should be_installed }
end
end
describe file('/etc/lsb-release') do
it { should be_file }
it { should contain 'DISTRIB_RELEASE=10.04' }
it { should contain 'DISTRIB_CODENAME=lucid' }
end
end
context 'installed by base_apt' do
%w(
upstart
build-essential
libssl-dev
lsof
strace
bind9-host
dnsutils
tcpdump
iputils-arping
curl
wget
libcurl3
libcurl4-openssl-dev
bison
libreadline6-dev
libxml2
libxml2-dev
libxslt1.1
libxslt1-dev
zip
unzip
nfs-common
flex
psmisc
apparmor-utils
iptables
sysstat
rsync
openssh-server
traceroute
libncurses5-dev
quota
libaio1
gdb
tripwire
libcap2-bin
libcap-dev
libbz2-dev
cmake
scsitools
mg
htop
module-assistant
debhelper
runit
sudo
uuid-dev
libgcrypt11-dev
).each do |pkg|
describe package(pkg) do
it { should be_installed }
end
end
describe file('/sbin/rescan-scsi-bus.sh') do
it { should be_file }
it { should be_executable }
end
end
describe package('libyaml-dev') do
it {should_not be_installed }
end
context 'installed by system_grub' do
{
'grub' => '0.97-29ubuntu60.10.04.2',
}.each do |pkg, version|
describe package(pkg) do
it { should be_installed.with_version(version) }
end
end
%w(e2fs_stage1_5 stage1 stage2).each do |grub_stage|
describe file("/boot/grub/#{grub_stage}") do
it { should be_file }
end
end
end
context 'installed by system_kernel' do
%w(
linux-image-virtual-lts-backport-oneiric
linux-headers-virtual-lts-backport-oneiric
).each do |pkg|
describe package(pkg) do
it { should be_installed }
end
end
end
context 'installed by image_install_grub' do
describe file('/boot/grub/grub.conf') do
it { should be_file }
it { should contain 'default=0' }
it { should contain 'timeout=1' }
it { should contain 'title Ubuntu 10.04.4 LTS (3.0.0-32-virtual)' }
it { should contain ' root (hd0,0)' }
it { should contain ' kernel /boot/vmlinuz-3.0.0-32-virtual ro root=UUID=' }
it { should contain ' selinux=0' }
it { should contain ' initrd /boot/initrd.img-3.0.0-32-virtual' }
end
describe file('/boot/grub/menu.lst') do
before { pending 'until aws/openstack stop clobbering the symlink with "update-grub"' }
it { should be_linked_to('./grub.conf') }
end
end
context 'installed by bosh_user' do
describe file('/etc/passwd') do
it { should be_file }
it { should contain '/home/vcap:/bin/bash' }
end
end
context 'installed by system_parameters' do
describe file('/var/vcap/bosh/etc/operating_system') do
it { should contain('ubuntu') }
end
end
context 'installed by bosh_harden' do
describe 'disallow unsafe setuid binaries' do
subject { backend.run_command('find / -xdev -perm +6000 -a -type f')[:stdout].split }
it { should match_array(%w(/bin/su /usr/bin/sudo /usr/bin/sudoedit)) }
end
describe 'disallow root login' do
subject { file('/etc/ssh/sshd_config') }
it { should contain /^PermitRootLogin no$/ }
end
end
context 'installed by system-aws-network', exclude_on_vsphere: true do
describe file('/etc/network/interfaces') do
it { should be_file }
it { should contain 'auto eth0' }
it { should contain 'iface eth0 inet dhcp' }
end
end
end
Formatting
require 'spec_helper'
describe 'Ubuntu Stemcell' do
it_behaves_like 'a stemcell'
describe package('apt') do
it { should be_installed }
end
describe package('rpm') do
it { should_not be_installed }
end
context 'installed by base_debootstrap' do
%w(
adduser
apt
apt-utils
bzip2
console-setup
dash
debconf
dhcp3-client
eject
gnupg
ifupdown
initramfs-tools
iproute
iputils-ping
kbd
less
locales
lsb-release
makedev
mawk
module-init-tools
net-tools
netbase
netcat-openbsd
ntpdate
passwd
procps
python
sudo
tasksel
tzdata
ubuntu-keyring
udev
upstart
ureadahead
vim-tiny
whiptail
).each do |pkg|
describe package(pkg) do
it { should be_installed }
end
end
describe file('/etc/lsb-release') do
it { should be_file }
it { should contain 'DISTRIB_RELEASE=10.04' }
it { should contain 'DISTRIB_CODENAME=lucid' }
end
end
context 'installed by base_apt' do
%w(
upstart
build-essential
libssl-dev
lsof
strace
bind9-host
dnsutils
tcpdump
iputils-arping
curl
wget
libcurl3
libcurl4-openssl-dev
bison
libreadline6-dev
libxml2
libxml2-dev
libxslt1.1
libxslt1-dev
zip
unzip
nfs-common
flex
psmisc
apparmor-utils
iptables
sysstat
rsync
openssh-server
traceroute
libncurses5-dev
quota
libaio1
gdb
tripwire
libcap2-bin
libcap-dev
libbz2-dev
cmake
scsitools
mg
htop
module-assistant
debhelper
runit
sudo
uuid-dev
libgcrypt11-dev
).each do |pkg|
describe package(pkg) do
it { should be_installed }
end
end
describe file('/sbin/rescan-scsi-bus.sh') do
it { should be_file }
it { should be_executable }
end
end
describe package('libyaml-dev') do
it { should_not be_installed }
end
context 'installed by system_grub' do
{
'grub' => '0.97-29ubuntu60.10.04.2',
}.each do |pkg, version|
describe package(pkg) do
it { should be_installed.with_version(version) }
end
end
%w(e2fs_stage1_5 stage1 stage2).each do |grub_stage|
describe file("/boot/grub/#{grub_stage}") do
it { should be_file }
end
end
end
context 'installed by system_kernel' do
%w(
linux-image-virtual-lts-backport-oneiric
linux-headers-virtual-lts-backport-oneiric
).each do |pkg|
describe package(pkg) do
it { should be_installed }
end
end
end
context 'installed by image_install_grub' do
describe file('/boot/grub/grub.conf') do
it { should be_file }
it { should contain 'default=0' }
it { should contain 'timeout=1' }
it { should contain 'title Ubuntu 10.04.4 LTS (3.0.0-32-virtual)' }
it { should contain ' root (hd0,0)' }
it { should contain ' kernel /boot/vmlinuz-3.0.0-32-virtual ro root=UUID=' }
it { should contain ' selinux=0' }
it { should contain ' initrd /boot/initrd.img-3.0.0-32-virtual' }
end
describe file('/boot/grub/menu.lst') do
before { pending 'until aws/openstack stop clobbering the symlink with "update-grub"' }
it { should be_linked_to('./grub.conf') }
end
end
context 'installed by bosh_user' do
describe file('/etc/passwd') do
it { should be_file }
it { should contain '/home/vcap:/bin/bash' }
end
end
context 'installed by system_parameters' do
describe file('/var/vcap/bosh/etc/operating_system') do
it { should contain('ubuntu') }
end
end
context 'installed by bosh_harden' do
describe 'disallow unsafe setuid binaries' do
subject { backend.run_command('find / -xdev -perm +6000 -a -type f')[:stdout].split }
it { should match_array(%w(/bin/su /usr/bin/sudo /usr/bin/sudoedit)) }
end
describe 'disallow root login' do
subject { file('/etc/ssh/sshd_config') }
it { should contain /^PermitRootLogin no$/ }
end
end
context 'installed by system-aws-network', exclude_on_vsphere: true do
describe file('/etc/network/interfaces') do
it { should be_file }
it { should contain 'auto eth0' }
it { should contain 'iface eth0 inet dhcp' }
end
end
end
|
# Copyright (c) 2009-2012 VMware, Inc.
module Bosh::Cli::Command
class JobManagement < Base
FORCE = 'Proceed even when there are other manifest changes'
# bosh start
usage 'start'
desc 'Start job/instance'
option '--force', FORCE
def start_job(job, index = nil)
change_job_state(:start, job, index)
end
# bosh stop
usage 'stop'
desc 'Stop job/instance'
option '--soft', 'Stop process only'
option '--hard', 'Power off VM'
option '--force', FORCE
def stop_job(job, index = nil)
if hard?
change_job_state(:detach, job, index)
else
change_job_state(:stop, job, index)
end
end
# bosh restart
usage 'restart'
desc 'Restart job/instance (soft stop + start)'
option '--force', FORCE
def restart_job(job, index = nil)
change_job_state(:restart, job, index)
end
# bosh recreate
usage 'recreate'
desc 'Recreate job/instance (hard stop + start)'
option '--force', FORCE
def recreate_job(job, index = nil)
change_job_state(:recreate, job, index)
end
private
class JobState
OPERATION_DESCRIPTIONS = {
start: 'start %s',
stop: 'stop %s',
detach: 'stop %s and power off its VM(s)',
restart: 'restart %s',
recreate: 'recreate %s'
}
NEW_STATES = {
start: 'started',
stop: 'stopped',
detach: 'detached',
restart: 'restart',
recreate: 'recreate'
}
COMPLETION_DESCRIPTIONS = {
start: '%s has been started',
stop: '%s has been stopped, VM(s) still running',
detach: '%s has been detached, VM(s) powered off',
restart: '%s has been restarted',
recreate: '%s has been recreated'
}
def initialize(command, force = false)
@command = command
@force = force
end
def change(state, job, index)
job_desc = job_description(job, index)
op_desc = OPERATION_DESCRIPTIONS.fetch(state) % job_desc
new_state = NEW_STATES.fetch(state)
completion_desc = COMPLETION_DESCRIPTIONS.fetch(state) % job_desc.make_green
status, task_id = perform_vm_state_change(job, index, new_state, op_desc)
command.task_report(status, task_id, completion_desc)
end
private
attr_reader :command
def force?
!!@force
end
def job_description(job, index)
index ? "#{job}/#{index}" : "#{job}"
end
def perform_vm_state_change(job, index, new_state, operation_desc)
command.say("You are about to #{operation_desc.make_green}")
manifest = command.prepare_deployment_manifest
manifest_yaml = Psych.dump(manifest)
if command.interactive?
check_if_manifest_changed(manifest)
unless command.confirmed?("#{operation_desc.capitalize}?")
command.cancel_deployment
end
end
command.nl
command.say("Performing `#{operation_desc}'...")
command.director.change_job_state(manifest['name'], manifest_yaml, job, index, new_state)
end
def check_if_manifest_changed(manifest)
other_changes_present = command.inspect_deployment_changes(
manifest, :show_empty_changeset => false)
if other_changes_present && !force?
command.err('Cannot perform job management when other deployment changes ' +
"are present. Please use `--force' to override.")
end
end
end
def change_job_state(state, job, index = nil)
check_arguments(state, job)
index = valid_index_for(job, index)
JobState.new(self, force?).change(state, job, index)
end
def hard?
options[:hard]
end
def soft?
options[:soft]
end
def force?
options[:force]
end
def check_arguments(operation, job)
auth_required
job_must_exist_in_deployment(job)
if hard? && soft?
err('Cannot handle both --hard and --soft options, please choose one')
end
if !hard_and_soft_options_allowed?(operation) && (hard? || soft?)
err("--hard and --soft options only make sense for `stop' operation")
end
end
def hard_and_soft_options_allowed?(operation)
operation == :stop || operation == :detach
end
def check_if_manifest_changed(manifest)
return if force?
other_changes_present = inspect_deployment_changes(
manifest, :show_empty_changeset => false)
if other_changes_present
err('Cannot perform job management when other deployment changes ' +
"are present. Please use `--force' to override.")
end
end
end
end
Extract VmState to spec what was private behavior
# Copyright (c) 2009-2012 VMware, Inc.
module Bosh::Cli::Command
class JobManagement < Base
FORCE = 'Proceed even when there are other manifest changes'
# bosh start
usage 'start'
desc 'Start job/instance'
option '--force', FORCE
def start_job(job, index = nil)
change_job_state(:start, job, index)
end
# bosh stop
usage 'stop'
desc 'Stop job/instance'
option '--soft', 'Stop process only'
option '--hard', 'Power off VM'
option '--force', FORCE
def stop_job(job, index = nil)
if hard?
change_job_state(:detach, job, index)
else
change_job_state(:stop, job, index)
end
end
# bosh restart
usage 'restart'
desc 'Restart job/instance (soft stop + start)'
option '--force', FORCE
def restart_job(job, index = nil)
change_job_state(:restart, job, index)
end
# bosh recreate
usage 'recreate'
desc 'Recreate job/instance (hard stop + start)'
option '--force', FORCE
def recreate_job(job, index = nil)
change_job_state(:recreate, job, index)
end
private
class JobState
OPERATION_DESCRIPTIONS = {
start: 'start %s',
stop: 'stop %s',
detach: 'stop %s and power off its VM(s)',
restart: 'restart %s',
recreate: 'recreate %s'
}
NEW_STATES = {
start: 'started',
stop: 'stopped',
detach: 'detached',
restart: 'restart',
recreate: 'recreate'
}
COMPLETION_DESCRIPTIONS = {
start: '%s has been started',
stop: '%s has been stopped, VM(s) still running',
detach: '%s has been detached, VM(s) powered off',
restart: '%s has been restarted',
recreate: '%s has been recreated'
}
def initialize(command, vm_state)
@command = command
@vm_state = vm_state
end
def change(state, job, index)
job_desc = job_description(job, index)
op_desc = OPERATION_DESCRIPTIONS.fetch(state) % job_desc
new_state = NEW_STATES.fetch(state)
completion_desc = COMPLETION_DESCRIPTIONS.fetch(state) % job_desc.make_green
status, task_id = perform_vm_state_change(job, index, new_state, op_desc)
command.task_report(status, task_id, completion_desc)
end
private
attr_reader :command, :vm_state
def job_description(job, index)
index ? "#{job}/#{index}" : "#{job}"
end
def perform_vm_state_change(job, index, new_state, operation_desc)
vm_state.change(job, index, new_state, operation_desc)
end
end
class VmState
def initialize(command, force)
@command = command
@force = force
end
def change(job, index, new_state, operation_desc)
command.say("You are about to #{operation_desc.make_green}")
manifest = command.prepare_deployment_manifest
manifest_yaml = Psych.dump(manifest)
if command.interactive?
check_if_manifest_changed(manifest)
unless command.confirmed?("#{operation_desc.capitalize}?")
command.cancel_deployment
end
end
command.nl
command.say("Performing `#{operation_desc}'...")
command.director.change_job_state(manifest['name'], manifest_yaml, job, index, new_state)
end
private
attr_reader :command
def force?
!!@force
end
def check_if_manifest_changed(manifest)
other_changes_present = command.inspect_deployment_changes(
manifest, show_empty_changeset: false)
if other_changes_present && !force?
command.err('Cannot perform job management when other deployment changes ' +
"are present. Please use `--force' to override.")
end
end
end
def change_job_state(state, job, index = nil)
check_arguments(state, job)
index = valid_index_for(job, index)
vm_state = VmState.new(self, force?)
JobState.new(self, vm_state).change(state, job, index)
end
def hard?
options[:hard]
end
def soft?
options[:soft]
end
def force?
options[:force]
end
def check_arguments(operation, job)
auth_required
job_must_exist_in_deployment(job)
if hard? && soft?
err('Cannot handle both --hard and --soft options, please choose one')
end
if !hard_and_soft_options_allowed?(operation) && (hard? || soft?)
err("--hard and --soft options only make sense for `stop' operation")
end
end
def hard_and_soft_options_allowed?(operation)
operation == :stop || operation == :detach
end
def check_if_manifest_changed(manifest)
return if force?
other_changes_present = inspect_deployment_changes(
manifest, :show_empty_changeset => false)
if other_changes_present
err('Cannot perform job management when other deployment changes ' +
"are present. Please use `--force' to override.")
end
end
end
end
|
require 'aweplug/google_apis'
require 'aweplug/helpers/searchisko_social'
require 'json'
require 'aweplug/helpers/resources'
require 'aweplug/helpers/searchisko'
module Aweplug
module Books
module StringUtils
refine String do
def numeric?
return true if self =~ /^\d+$/
true if Float(self) rescue false
end
def truncate(max: 150)
out = ""
i = 0
self.gsub(/<\/?[^>]*>/, "").scan(/[^\.!?]+[\.!?]/).map(&:strip).each do |s|
i += s.length
if i > max
break
else
out << s
end
end
out
end
end
end
class GoogleBooks
include Aweplug::GoogleAPIs
include Aweplug::Helpers::SearchiskoSocial
include Aweplug::Helpers::Resources
using StringUtils
BOOKS_API_SERVICE_NAME = 'books'
BOOKS_API_VERSION = 'v1'
def initialize site, push_to_searchisko
@site = site
@push_to_searchisko = push_to_searchisko
@client = google_client(site, authenticate: site.authenticate_google_books_api)
@books = @client.discovered_api(BOOKS_API_SERVICE_NAME, BOOKS_API_VERSION)
@searchisko = Aweplug::Helpers::Searchisko.default site, 360
end
def get data
res = @client.execute!(
:api_method => @books.volumes.list,
:parameters => {
:q => "isbn:#{data['isbn']}"
}
)
if res.success?
books = JSON.load(res.body)
if books['totalItems'] == 1
book = books['items'][0]
elsif books['totalItems'] > 1
# See if only one of the books has the correct ISBN_13
possibles = books['items'].find_all { |b| isbn_13(b) == data['isbn'] }
if possibles.length == 1
book = possibles.first
else
puts ">1 books found for #{data['isbn']}"
end
else
puts "No results found for isbn: #{data['isbn']}"
end
unless book.nil?
isbn = isbn_13(book) || data['isbn']
if !data['thumbnail_url'].nil? && !data['thumbnail_url'].empty?
thumbnail = data['thumbnail_url']
elsif book['volumeInfo'].has_key? 'imageLinks'
thumbnail = book['volumeInfo']['imageLinks']['thumbnail']
else
thumbnail = cdn("#{@site.base_url}/images/books/book_noimageavailable.jpg")
end
normalized_authors = book['volumeInfo'].has_key?('authors') ? book['volumeInfo']['authors'].collect { |a| normalize 'contributor_profile_by_jbossdeveloper_quickstart_author', a, @searchisko } : []
unless book['volumeInfo']['publishedDate'].nil?
if m = book['volumeInfo']['publishedDate'].match(/^(\d{4})([-|\/](\d{1,2})([-|\/](\d{1,2}))?)?$/)
if !m[5].nil?
published = DateTime.new(m[1].to_i, m[3].to_i, m[5].to_i)
elsif !m[3].nil?
published = DateTime.new(m[1].to_i, m[3].to_i)
else
published = DateTime.new(m[1].to_i)
end
end
end
description = book['volumeInfo']['description'].truncate(max: 500) if book['volumeInfo']['description']
{
:sys_title => book['volumeInfo']['title'],
:sys_description => description,
:sys_url_view => book['volumeInfo']['canonicalVolumeLink'],
:authors => book['volumeInfo']['authors'],
:thumbnail => thumbnail.to_s,
:isbn => isbn,
:tags => book['volumeInfo']['categories'],
:web_reader_link => book['volumeInfo']['webReadLink'],
:preview_link => book['volumeInfo']['previewLink'],
:info_link => book['volumeInfo']['infoLink'],
:publisher => book['volumeInfo']['publisher'],
:sys_content => book['volumeInfo']['description'],
:sys_created => published,
:normalized_authors => normalized_authors,
:average_rating => book['volumeInfo']['averageRating']
}
end
else
puts "#{res.status} loading isbn: #{data['isbn']}"
end
end
def send_to_searchisko book
unless !@push_to_searchisko || @site.profile =~ /development/
@searchisko.push_content('jbossdeveloper_book',
book[:isbn],
book.reject {|k, v| k == :normalized_authors }.to_json)
end
end
private
def isbn_13 book
if book['volumeInfo'].has_key?('industryIdentifiers')
ids = Hash[book['volumeInfo']['industryIdentifiers'].map(&:values).map(&:flatten)]
if ids.has_key?('ISBN_13') && !ids['ISBN_13'].nil? && !ids['ISBN_13'].empty?
return ids['ISBN_13']
elsif ids.has_key?('OTHER') && !ids['OTHER'].nil? && ids['OTHER'].numeric? && !ids['OTHER'].empty?
return ids['OTHER']
end
end
nil
end
end
end
end
Addresses DEVELOPER-1093 (book details)
There are new columns in the spreadsheet that will be used if Google
doesn't have the book in their collection. I mirrored the structure and
keys so both branches run through the same path.
require 'aweplug/google_apis'
require 'aweplug/helpers/searchisko_social'
require 'json'
require 'aweplug/helpers/resources'
require 'aweplug/helpers/searchisko'
module Aweplug
module Books
module StringUtils
refine String do
def numeric?
return true if self =~ /^\d+$/
true if Float(self) rescue false
end
def truncate(max: 150)
out = ""
i = 0
self.gsub(/<\/?[^>]*>/, "").scan(/[^\.!?]+[\.!?]/).map(&:strip).each do |s|
i += s.length
if i > max
break
else
out << s
end
end
out
end
end
end
class GoogleBooks
include Aweplug::GoogleAPIs
include Aweplug::Helpers::SearchiskoSocial
include Aweplug::Helpers::Resources
using StringUtils
BOOKS_API_SERVICE_NAME = 'books'
BOOKS_API_VERSION = 'v1'
def initialize site, push_to_searchisko
@site = site
@push_to_searchisko = push_to_searchisko
@client = google_client(site, authenticate: site.authenticate_google_books_api)
@books = @client.discovered_api(BOOKS_API_SERVICE_NAME, BOOKS_API_VERSION)
@searchisko = Aweplug::Helpers::Searchisko.default site, 360
end
def get data
res = @client.execute!(
:api_method => @books.volumes.list,
:parameters => {
:q => "isbn:#{data['isbn']}"
}
)
if res.success?
books = JSON.load(res.body)
if books['totalItems'] == 1
book = books['items'][0]
elsif books['totalItems'] > 1
# See if only one of the books has the correct ISBN_13
possibles = books['items'].find_all { |b| isbn_13(b) == data['isbn'] }
if possibles.length == 1
book = possibles.first
else
puts ">1 books found for #{data['isbn']}"
end
else
puts "No results found for isbn: #{data['isbn']}, attempting to use spreadsheet info"
book = {'volumeInfo' => {'authors' => (data['authors'].nil?) ? [] : data['authors'].split(','),
'publishedDate' => data['published_date'],
'description' => data['description'],
'title' => data['title'],
'volumeLink' => data['book_url'],
'categories' => (data['categories'].nil?) ? [] : data['categories'].split(','),
'webReaderLink' => data['web_reader_url'],
'previewLink' => data['preview_url'],
'infoLink' => data['book_url'],
'publisher' => data['publisher'],
'averageRating' => data['average_rating']
}}
end
unless book.nil?
isbn = isbn_13(book) || data['isbn']
if !data['thumbnail_url'].nil? && !data['thumbnail_url'].empty?
thumbnail = data['thumbnail_url']
elsif book['volumeInfo'].has_key? 'imageLinks'
thumbnail = book['volumeInfo']['imageLinks']['thumbnail']
else
thumbnail = cdn("#{@site.base_url}/images/books/book_noimageavailable.jpg")
end
normalized_authors = book['volumeInfo'].has_key?('authors') ? book['volumeInfo']['authors'].collect { |a| normalize 'contributor_profile_by_jbossdeveloper_quickstart_author', a, @searchisko } : []
unless book['volumeInfo']['publishedDate'].nil?
if m = book['volumeInfo']['publishedDate'].match(/^(\d{4})([-|\/](\d{1,2})([-|\/](\d{1,2}))?)?$/)
if !m[5].nil?
published = DateTime.new(m[1].to_i, m[3].to_i, m[5].to_i)
elsif !m[3].nil?
published = DateTime.new(m[1].to_i, m[3].to_i)
else
published = DateTime.new(m[1].to_i)
end
end
end
description = book['volumeInfo']['description'].truncate(max: 500) if book['volumeInfo']['description']
{
:sys_title => book['volumeInfo']['title'],
:sys_description => description,
:sys_url_view => book['volumeInfo']['canonicalVolumeLink'],
:authors => book['volumeInfo']['authors'],
:thumbnail => thumbnail.to_s,
:isbn => isbn,
:tags => book['volumeInfo']['categories'],
:web_reader_link => book['volumeInfo']['webReadLink'],
:preview_link => book['volumeInfo']['previewLink'],
:info_link => book['volumeInfo']['infoLink'],
:publisher => book['volumeInfo']['publisher'],
:sys_content => book['volumeInfo']['description'],
:sys_created => published,
:normalized_authors => normalized_authors,
:average_rating => book['volumeInfo']['averageRating']
}
end
else
puts "#{res.status} loading isbn: #{data['isbn']}"
end
end
def send_to_searchisko book
unless !@push_to_searchisko || @site.profile =~ /development/
@searchisko.push_content('jbossdeveloper_book',
book[:isbn],
book.reject {|k, v| k == :normalized_authors }.to_json)
end
end
private
def isbn_13 book
if book['volumeInfo'].has_key?('industryIdentifiers')
ids = Hash[book['volumeInfo']['industryIdentifiers'].map(&:values).map(&:flatten)]
if ids.has_key?('ISBN_13') && !ids['ISBN_13'].nil? && !ids['ISBN_13'].empty?
return ids['ISBN_13']
elsif ids.has_key?('OTHER') && !ids['OTHER'].nil? && ids['OTHER'].numeric? && !ids['OTHER'].empty?
return ids['OTHER']
end
end
nil
end
end
end
end
|
# encoding: utf-8
module Backup
module Database
class PostgreSQL < Base
class Error < Backup::Error; end
##
# Name of the database that needs to get dumped.
# To dump all databases, set this to `:all` or leave blank.
# +username+ must be a PostgreSQL superuser to run `pg_dumpall`.
attr_accessor :name
##
# Credentials for the specified database
attr_accessor :username, :password
##
# If set the pg_dump(all) command is executed as the given user
attr_accessor :sudo_user
##
# Connectivity options
attr_accessor :host, :port, :socket
##
# Tables to skip while dumping the database.
# If `name` is set to :all (or not specified), these are ignored.
attr_accessor :skip_tables
##
# Tables to dump. This in only valid if `name` is specified.
# If none are given, the entire database will be dumped.
attr_accessor :only_tables
##
# Additional "pg_dump" or "pg_dumpall" options
attr_accessor :additional_options
def initialize(model, database_id = nil, &block)
super
instance_eval(&block) if block_given?
@name ||= :all
end
##
# Performs the pgdump command and outputs the dump file
# in the +dump_path+ using +dump_filename+.
#
# <trigger>/databases/PostgreSQL[-<database_id>].sql[.gz]
def perform!
super
pipeline = Pipeline.new
dump_ext = 'sql'
pipeline << (dump_all? ? pgdumpall : pgdump)
model.compressor.compress_with do |command, ext|
pipeline << command
dump_ext << ext
end if model.compressor
pipeline << "#{ utility(:cat) } > " +
"'#{ File.join(dump_path, dump_filename) }.#{ dump_ext }'"
pipeline.run
if pipeline.success?
log!(:finished)
else
raise Error, "Dump Failed!\n" + pipeline.error_messages
end
end
def pgdump
"#{ password_option }" +
"#{ sudo_option }" +
"#{ utility(:pg_dump) } #{ username_option } #{ connectivity_options } " +
"#{ user_options } #{ tables_to_dump } #{ tables_to_skip } #{ name }"
end
def pgdumpall
"#{ password_option }" +
"#{ sudo_option }" +
"#{ utility(:pg_dumpall) } #{ username_option } " +
"#{ connectivity_options } #{ user_options }"
end
def password_option
"PGPASSWORD=#{ Shellwords.escape(password) } " if password
end
def sudo_option
"#{ utility(:sudo) } -n -u #{ sudo_user } " if sudo_user
end
def username_option
"--username=#{ Shellwords.escape(username) }" if username
end
def connectivity_options
return "--host='#{ socket }'" if socket
opts = []
opts << "--host='#{ host }'" if host
opts << "--port='#{ port }'" if port
opts.join(' ')
end
def user_options
Array(additional_options).join(' ')
end
def tables_to_dump
Array(only_tables).map do |table|
"--table='#{ table }'"
end.join(' ')
end
def tables_to_skip
Array(skip_tables).map do |table|
"--exclude-table='#{ table }'"
end.join(' ')
end
def dump_all?
name == :all
end
end
end
end
Fix [warn] could not change directory to "/root"
As mentioned already in #695 , this is a common issue with postgresql backups + use_sudo.
A better fix than ignoring the logs would be to use sudo -H
# encoding: utf-8
module Backup
module Database
class PostgreSQL < Base
class Error < Backup::Error; end
##
# Name of the database that needs to get dumped.
# To dump all databases, set this to `:all` or leave blank.
# +username+ must be a PostgreSQL superuser to run `pg_dumpall`.
attr_accessor :name
##
# Credentials for the specified database
attr_accessor :username, :password
##
# If set the pg_dump(all) command is executed as the given user
attr_accessor :sudo_user
##
# Connectivity options
attr_accessor :host, :port, :socket
##
# Tables to skip while dumping the database.
# If `name` is set to :all (or not specified), these are ignored.
attr_accessor :skip_tables
##
# Tables to dump. This in only valid if `name` is specified.
# If none are given, the entire database will be dumped.
attr_accessor :only_tables
##
# Additional "pg_dump" or "pg_dumpall" options
attr_accessor :additional_options
def initialize(model, database_id = nil, &block)
super
instance_eval(&block) if block_given?
@name ||= :all
end
##
# Performs the pgdump command and outputs the dump file
# in the +dump_path+ using +dump_filename+.
#
# <trigger>/databases/PostgreSQL[-<database_id>].sql[.gz]
def perform!
super
pipeline = Pipeline.new
dump_ext = 'sql'
pipeline << (dump_all? ? pgdumpall : pgdump)
model.compressor.compress_with do |command, ext|
pipeline << command
dump_ext << ext
end if model.compressor
pipeline << "#{ utility(:cat) } > " +
"'#{ File.join(dump_path, dump_filename) }.#{ dump_ext }'"
pipeline.run
if pipeline.success?
log!(:finished)
else
raise Error, "Dump Failed!\n" + pipeline.error_messages
end
end
def pgdump
"#{ password_option }" +
"#{ sudo_option }" +
"#{ utility(:pg_dump) } #{ username_option } #{ connectivity_options } " +
"#{ user_options } #{ tables_to_dump } #{ tables_to_skip } #{ name }"
end
def pgdumpall
"#{ password_option }" +
"#{ sudo_option }" +
"#{ utility(:pg_dumpall) } #{ username_option } " +
"#{ connectivity_options } #{ user_options }"
end
def password_option
"PGPASSWORD=#{ Shellwords.escape(password) } " if password
end
def sudo_option
"#{ utility(:sudo) } -n -H -u #{ sudo_user } " if sudo_user
end
def username_option
"--username=#{ Shellwords.escape(username) }" if username
end
def connectivity_options
return "--host='#{ socket }'" if socket
opts = []
opts << "--host='#{ host }'" if host
opts << "--port='#{ port }'" if port
opts.join(' ')
end
def user_options
Array(additional_options).join(' ')
end
def tables_to_dump
Array(only_tables).map do |table|
"--table='#{ table }'"
end.join(' ')
end
def tables_to_skip
Array(skip_tables).map do |table|
"--exclude-table='#{ table }'"
end.join(' ')
end
def dump_all?
name == :all
end
end
end
end
|
require 'active_support/core_ext/hash/indifferent_access'
require 'open3'
require 'fileutils'
require 'tmpdir'
require 'yaml'
require 'shellwords'
require 'buildpack/packager/zip_file_excluder'
module Buildpack
module Packager
class Package < Struct.new(:options)
def copy_buildpack_to_temp_dir(temp_dir)
FileUtils.cp_r(File.join(options[:root_dir], '.'), temp_dir)
FileUtils.cp(options[:manifest_path], File.join(temp_dir, 'manifest.yml'))
end
def build_dependencies(temp_dir)
local_cache_directory = options[:cache_dir] || "#{ENV['HOME']}/.buildpack-packager/cache"
FileUtils.mkdir_p(local_cache_directory)
dependency_dir = File.join(temp_dir, "dependencies")
FileUtils.mkdir_p(dependency_dir)
download_dependencies(manifest[:dependencies], local_cache_directory, dependency_dir)
end
def download_dependencies(dependencies, local_cache_directory, dependency_dir)
dependencies.each do |dependency|
safe_uri = uri_without_credentials(dependency['uri'])
translated_filename = uri_cache_path(safe_uri)
local_cached_file = File.expand_path(File.join(local_cache_directory, translated_filename))
if options[:force_download] || !File.exist?(local_cached_file)
puts "Downloading #{dependency['name']} version #{dependency['version']} from: #{safe_uri}"
download_file(dependency['uri'], local_cached_file)
human_readable_size = `du -h #{local_cached_file} | cut -f1`.strip
puts " Using #{dependency['name']} version #{dependency['version']} with size #{human_readable_size}"
from_local_cache = false
else
human_readable_size = `du -h #{local_cached_file} | cut -f1`.strip
puts "Using #{dependency['name']} version #{dependency['version']} from local cache at: #{local_cached_file} with size #{human_readable_size}"
from_local_cache = true
end
ensure_correct_dependency_checksum({
local_cached_file: local_cached_file,
dependency: dependency,
from_local_cache: from_local_cache
})
FileUtils.cp(local_cached_file, dependency_dir)
end
end
def build_zip_file(temp_dir)
FileUtils.rm_rf(zip_file_path)
zip_files(temp_dir, zip_file_path, manifest[:exclude_files])
end
def list
DependenciesPresenter.new(manifest['dependencies']).present
end
def defaults
DefaultVersionsPresenter.new(manifest['default_versions']).present
end
def zip_file_path
Shellwords.escape(File.join(options[:root_dir], zip_file_name))
end
private
def uri_without_credentials(uri_string)
uri = URI(uri_string)
if uri.userinfo
uri.user = "-redacted-" if uri.user
uri.password = "-redacted-" if uri.password
end
uri.to_s.sub("file:", "file://")
end
def uri_cache_path uri
uri.gsub(/[:\/\?&]/, '_')
end
def manifest
@manifest ||= YAML.load_file(options[:manifest_path]).with_indifferent_access
end
def zip_file_name
"#{manifest[:language]}_buildpack#{cached_identifier}-v#{buildpack_version}.zip"
end
def buildpack_version
File.read("#{options[:root_dir]}/VERSION").chomp
end
def cached_identifier
return '' unless options[:mode] == :cached
'-cached'
end
def ensure_correct_dependency_checksum(local_cached_file:, dependency:, from_local_cache:)
if dependency['md5'] != Digest::MD5.file(local_cached_file).hexdigest
if from_local_cache
FileUtils.rm_rf(local_cached_file)
download_file(dependency['uri'], local_cached_file)
ensure_correct_dependency_checksum({
local_cached_file: local_cached_file,
dependency: dependency,
from_local_cache: false
})
else
raise CheckSumError,
"File: #{dependency['name']}, version: #{dependency['version']} downloaded at location #{dependency['uri']}\n\tis reporting a different checksum than the one specified in the manifest."
end
else
puts " #{dependency['name']} version #{dependency['version']} matches the manifest provided md5 checksum of #{dependency['md5']}\n\n"
end
end
def download_file(url, file)
raise "Failed to download file from #{url}" unless system("curl #{url} -o #{file} -L --fail -f")
end
def zip_files(source_dir, zip_file_path, excluded_files)
excluder = ZipFileExcluder.new
manifest_exclusions = excluder.generate_manifest_exclusions excluded_files
gitfile_exclusions = excluder.generate_exclusions_from_git_files source_dir
all_exclusions = manifest_exclusions + ' ' + gitfile_exclusions
`cd #{source_dir} && zip -r #{zip_file_path} ./ #{all_exclusions}`
end
end
end
end
Add retry to file download
- will help prevent flakiness in CI
Signed-off-by: Sam Smith <03a07629efb0a02c7bbe7cc340a4243da1ab861a@gmail.com>
require 'active_support/core_ext/hash/indifferent_access'
require 'open3'
require 'fileutils'
require 'tmpdir'
require 'yaml'
require 'shellwords'
require 'buildpack/packager/zip_file_excluder'
module Buildpack
module Packager
class Package < Struct.new(:options)
def copy_buildpack_to_temp_dir(temp_dir)
FileUtils.cp_r(File.join(options[:root_dir], '.'), temp_dir)
FileUtils.cp(options[:manifest_path], File.join(temp_dir, 'manifest.yml'))
end
def build_dependencies(temp_dir)
local_cache_directory = options[:cache_dir] || "#{ENV['HOME']}/.buildpack-packager/cache"
FileUtils.mkdir_p(local_cache_directory)
dependency_dir = File.join(temp_dir, "dependencies")
FileUtils.mkdir_p(dependency_dir)
download_dependencies(manifest[:dependencies], local_cache_directory, dependency_dir)
end
def download_dependencies(dependencies, local_cache_directory, dependency_dir)
dependencies.each do |dependency|
safe_uri = uri_without_credentials(dependency['uri'])
translated_filename = uri_cache_path(safe_uri)
local_cached_file = File.expand_path(File.join(local_cache_directory, translated_filename))
if options[:force_download] || !File.exist?(local_cached_file)
puts "Downloading #{dependency['name']} version #{dependency['version']} from: #{safe_uri}"
download_file(dependency['uri'], local_cached_file)
human_readable_size = `du -h #{local_cached_file} | cut -f1`.strip
puts " Using #{dependency['name']} version #{dependency['version']} with size #{human_readable_size}"
from_local_cache = false
else
human_readable_size = `du -h #{local_cached_file} | cut -f1`.strip
puts "Using #{dependency['name']} version #{dependency['version']} from local cache at: #{local_cached_file} with size #{human_readable_size}"
from_local_cache = true
end
ensure_correct_dependency_checksum({
local_cached_file: local_cached_file,
dependency: dependency,
from_local_cache: from_local_cache
})
FileUtils.cp(local_cached_file, dependency_dir)
end
end
def build_zip_file(temp_dir)
FileUtils.rm_rf(zip_file_path)
zip_files(temp_dir, zip_file_path, manifest[:exclude_files])
end
def list
DependenciesPresenter.new(manifest['dependencies']).present
end
def defaults
DefaultVersionsPresenter.new(manifest['default_versions']).present
end
def zip_file_path
Shellwords.escape(File.join(options[:root_dir], zip_file_name))
end
private
def uri_without_credentials(uri_string)
uri = URI(uri_string)
if uri.userinfo
uri.user = "-redacted-" if uri.user
uri.password = "-redacted-" if uri.password
end
uri.to_s.sub("file:", "file://")
end
def uri_cache_path uri
uri.gsub(/[:\/\?&]/, '_')
end
def manifest
@manifest ||= YAML.load_file(options[:manifest_path]).with_indifferent_access
end
def zip_file_name
"#{manifest[:language]}_buildpack#{cached_identifier}-v#{buildpack_version}.zip"
end
def buildpack_version
File.read("#{options[:root_dir]}/VERSION").chomp
end
def cached_identifier
return '' unless options[:mode] == :cached
'-cached'
end
def ensure_correct_dependency_checksum(local_cached_file:, dependency:, from_local_cache:)
if dependency['md5'] != Digest::MD5.file(local_cached_file).hexdigest
if from_local_cache
FileUtils.rm_rf(local_cached_file)
download_file(dependency['uri'], local_cached_file)
ensure_correct_dependency_checksum({
local_cached_file: local_cached_file,
dependency: dependency,
from_local_cache: false
})
else
raise CheckSumError,
"File: #{dependency['name']}, version: #{dependency['version']} downloaded at location #{dependency['uri']}\n\tis reporting a different checksum than the one specified in the manifest."
end
else
puts " #{dependency['name']} version #{dependency['version']} matches the manifest provided md5 checksum of #{dependency['md5']}\n\n"
end
end
def download_file(url, file)
raise "Failed to download file from #{url}" unless system("curl -s --retry 15 --retry-delay 2 #{url} -o #{file} -L --fail -f")
end
def zip_files(source_dir, zip_file_path, excluded_files)
excluder = ZipFileExcluder.new
manifest_exclusions = excluder.generate_manifest_exclusions excluded_files
gitfile_exclusions = excluder.generate_exclusions_from_git_files source_dir
all_exclusions = manifest_exclusions + ' ' + gitfile_exclusions
`cd #{source_dir} && zip -r #{zip_file_path} ./ #{all_exclusions}`
end
end
end
end
|
# Licensed to the Apache Software Foundation (ASF) under one or more
# contributor license agreements. See the NOTICE file distributed with this
# work for additional information regarding copyright ownership. The ASF
# licenses this file to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations under
# the License.
module OSGi
#:nodoc:
# This module is used to identify the packaging task
# that represent a bundle packaging.
#
# Tasks representing bundle packaging should include this module
# to be used by the buildr system properly.
#
module BundlePackaging
end
#monkey patch the Unzip task to support unzipping tgz
#TODO: find out how to apply the patterns (include/exclude) and move this to buildr eventually
class Buildr::Unzip
def extract
# If no paths specified, then no include/exclude patterns
# specified. Nothing will happen unless we include all files.
if @paths.empty?
@paths[nil] = FromPath.new(self, nil)
end
# Otherwise, empty unzip creates target as a file when touching.
mkpath target.to_s
zip_file_path = zip_file.to_s
if zip_file_path.match /\.[t?]gz$/
#un-tar.gz
@paths.each do |path, patterns|
patterns.include = ['*'] if patterns.include.nil?
patterns.exclude = [] if patterns.exclude.nil?
end
Zlib::GzipReader.open(zip_file_path) { |tar|
Archive::Tar::Minitar::Input.open(tar) do |inp|
inp.each do |entry|
if included?(entry.full_name)
trace "Extracting #{entry.full_name}"
inp.extract_entry(target.to_s, entry)
end
end
end
}
else
Zip::ZipFile.open(zip_file.to_s) do |zip|
entries = zip.collect
@paths.each do |path, patterns|
patterns.map(entries).each do |dest, entry|
next if entry.directory?
dest = File.expand_path(dest, target.to_s)
trace "Extracting #{dest}"
mkpath File.dirname(dest) rescue nil
entry.restore_permissions = true
entry.extract(dest) { true }
end
end
end
end
# Let other tasks know we updated the target directory.
touch target.to_s
end
#reads the includes/excludes and apply them to the entry_name
def included?(entry_name)
@paths.each do |path, patterns|
return true if path.nil?
if entry_name =~ /^#{path}/
short = entry_name.sub(path, '')
if patterns.include.any? { |pattern| File.fnmatch(pattern, entry_name) } &&
!patterns.exclude.any? { |pattern| File.fnmatch(pattern, entry_name) }
# trace "tar_entry.full_name " + entry_name + " is included"
return true
end
end
end
# trace "tar_entry.full_name " + entry_name + " is excluded"
return false
end
end
#
# The task to package a project
# as a OSGi bundle.
#
class BundleTask < ::Buildr::Packaging::Java::JarTask
include BundlePackaging
# Artifacts to include under /lib.
attr_accessor :libs
def initialize(*args) #:nodoc:
super
@libs = []
prepare do
unless @libs.nil? || @libs.empty?
artifacts = Buildr.artifacts(@libs)
path('lib').include artifacts
manifest["Bundle-Classpath"] = [".", artifacts.collect {|a| "lib/#{File.basename(a.to_s)}"}].flatten.join(",")
end
end
end
end
module ActAsOSGiBundle
include Extension
protected
# returns true if the project defines at least one bundle packaging.
# We keep this method protected and we will call it using send.
def is_packaging_osgi_bundle()
packages.each {|package| return true if package.is_a?(::OSGi::BundlePackaging)}
return false
end
def package_as_bundle(file_name)
task = BundleTask.define_task(file_name).tap do |plugin|
# Custom resource task to grab everything located at the root of the project
# while leaving the user also specify a resources directory, in case we are in face
# of a complex project.
# This is a bit hacky and not fully respecting the project layout, so we might find some alternative later
# to do the job by extending the layout object, and maybe making this resource task available as a subclass
# of ResourcesTask.
p_r = ResourcesTask.define_task
p_r.send :associate_with, project, :main
p_r.from("#{project.base_dir}").exclude("**/.*").exclude("**/*.jar").exclude("**/*.java")
p_r.exclude("src/**").exclude("*src").exclude("*src/**").exclude("build.properties")
p_r.exclude("bin").exclude("bin/**")
p_r.exclude("target/**").exclude("target")
properties = ResourcesTask.define_task
properties.send :associate_with, project, :main
properties.from(File.join(project.base_dir, project.layout[:source, :main, :java])).
exclude("**/.*").exclude("**/*.java") if File.exists? File.join(project.base_dir, project.layout[:source, :main, :java])
manifest_location = File.join(project.base_dir, "META-INF", "MANIFEST.MF")
manifest = project.manifest
if File.exists?(manifest_location)
read_m = ::Buildr::Packaging::Java::Manifest.parse(File.read(manifest_location)).main
manifest = project.manifest.merge(read_m)
end
manifest["Bundle-Version"] = project.version # the version of the bundle packaged is ALWAYS the version of the project.
manifest["Bundle-SymbolicName"] ||= project.name.split(":").last # if it was resetted to nil, we force the id to be added back.
plugin.with :manifest=> manifest, :meta_inf=>meta_inf
plugin.with [compile.target, resources.target, p_r.target, properties.target].compact
end
end
def package_as_bundle_spec(spec) #:nodoc:
spec.merge(:type=>:jar, :id => name.split(":").last)
end
before_define do |project|
project.manifest["Bundle-SymbolicName"] = project.name.split(":").last
project.manifest["Bundle-Name"] = project.comment || project.name
project.manifest["Bundle-Version"] = project.version
end
end
module BundleProjects #:nodoc
# Returns the projects
# that define an OSGi bundle packaging.
#
def bundle_projects
Buildr.projects.flatten.select {|project|
project.send :is_packaging_osgi_bundle
}
end
module_function :bundle_projects
end
end
class Buildr::Project
include OSGi::ActAsOSGiBundle
end
module Buildr4OSGi
include OSGi::BundleProjects
end
also support tar.gz, not just tgz files
# Licensed to the Apache Software Foundation (ASF) under one or more
# contributor license agreements. See the NOTICE file distributed with this
# work for additional information regarding copyright ownership. The ASF
# licenses this file to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations under
# the License.
module OSGi
#:nodoc:
# This module is used to identify the packaging task
# that represent a bundle packaging.
#
# Tasks representing bundle packaging should include this module
# to be used by the buildr system properly.
#
module BundlePackaging
end
#monkey patch the Unzip task to support unzipping tgz
#TODO: find out how to apply the patterns (include/exclude) and move this to buildr eventually
class Buildr::Unzip
def extract
# If no paths specified, then no include/exclude patterns
# specified. Nothing will happen unless we include all files.
if @paths.empty?
@paths[nil] = FromPath.new(self, nil)
end
# Otherwise, empty unzip creates target as a file when touching.
mkpath target.to_s
zip_file_path = zip_file.to_s
if zip_file_path.match /\.[t?]gz$/ or zip_file_path.match /\.tar\.gz$/
#un-tar.gz
@paths.each do |path, patterns|
patterns.include = ['*'] if patterns.include.nil?
patterns.exclude = [] if patterns.exclude.nil?
end
Zlib::GzipReader.open(zip_file_path) { |tar|
Archive::Tar::Minitar::Input.open(tar) do |inp|
inp.each do |entry|
if included?(entry.full_name)
trace "Extracting #{entry.full_name}"
inp.extract_entry(target.to_s, entry)
end
end
end
}
else
Zip::ZipFile.open(zip_file.to_s) do |zip|
entries = zip.collect
@paths.each do |path, patterns|
patterns.map(entries).each do |dest, entry|
next if entry.directory?
dest = File.expand_path(dest, target.to_s)
trace "Extracting #{dest}"
mkpath File.dirname(dest) rescue nil
entry.restore_permissions = true
entry.extract(dest) { true }
end
end
end
end
# Let other tasks know we updated the target directory.
touch target.to_s
end
#reads the includes/excludes and apply them to the entry_name
def included?(entry_name)
@paths.each do |path, patterns|
return true if path.nil?
if entry_name =~ /^#{path}/
short = entry_name.sub(path, '')
if patterns.include.any? { |pattern| File.fnmatch(pattern, entry_name) } &&
!patterns.exclude.any? { |pattern| File.fnmatch(pattern, entry_name) }
# trace "tar_entry.full_name " + entry_name + " is included"
return true
end
end
end
# trace "tar_entry.full_name " + entry_name + " is excluded"
return false
end
end
#
# The task to package a project
# as a OSGi bundle.
#
class BundleTask < ::Buildr::Packaging::Java::JarTask
include BundlePackaging
# Artifacts to include under /lib.
attr_accessor :libs
def initialize(*args) #:nodoc:
super
@libs = []
prepare do
unless @libs.nil? || @libs.empty?
artifacts = Buildr.artifacts(@libs)
path('lib').include artifacts
manifest["Bundle-Classpath"] = [".", artifacts.collect {|a| "lib/#{File.basename(a.to_s)}"}].flatten.join(",")
end
end
end
end
module ActAsOSGiBundle
include Extension
protected
# returns true if the project defines at least one bundle packaging.
# We keep this method protected and we will call it using send.
def is_packaging_osgi_bundle()
packages.each {|package| return true if package.is_a?(::OSGi::BundlePackaging)}
return false
end
def package_as_bundle(file_name)
task = BundleTask.define_task(file_name).tap do |plugin|
# Custom resource task to grab everything located at the root of the project
# while leaving the user also specify a resources directory, in case we are in face
# of a complex project.
# This is a bit hacky and not fully respecting the project layout, so we might find some alternative later
# to do the job by extending the layout object, and maybe making this resource task available as a subclass
# of ResourcesTask.
p_r = ResourcesTask.define_task
p_r.send :associate_with, project, :main
p_r.from("#{project.base_dir}").exclude("**/.*").exclude("**/*.jar").exclude("**/*.java")
p_r.exclude("src/**").exclude("*src").exclude("*src/**").exclude("build.properties")
p_r.exclude("bin").exclude("bin/**")
p_r.exclude("target/**").exclude("target")
properties = ResourcesTask.define_task
properties.send :associate_with, project, :main
properties.from(File.join(project.base_dir, project.layout[:source, :main, :java])).
exclude("**/.*").exclude("**/*.java") if File.exists? File.join(project.base_dir, project.layout[:source, :main, :java])
manifest_location = File.join(project.base_dir, "META-INF", "MANIFEST.MF")
manifest = project.manifest
if File.exists?(manifest_location)
read_m = ::Buildr::Packaging::Java::Manifest.parse(File.read(manifest_location)).main
manifest = project.manifest.merge(read_m)
end
manifest["Bundle-Version"] = project.version # the version of the bundle packaged is ALWAYS the version of the project.
manifest["Bundle-SymbolicName"] ||= project.name.split(":").last # if it was resetted to nil, we force the id to be added back.
plugin.with :manifest=> manifest, :meta_inf=>meta_inf
plugin.with [compile.target, resources.target, p_r.target, properties.target].compact
end
end
def package_as_bundle_spec(spec) #:nodoc:
spec.merge(:type=>:jar, :id => name.split(":").last)
end
before_define do |project|
project.manifest["Bundle-SymbolicName"] = project.name.split(":").last
project.manifest["Bundle-Name"] = project.comment || project.name
project.manifest["Bundle-Version"] = project.version
end
end
module BundleProjects #:nodoc
# Returns the projects
# that define an OSGi bundle packaging.
#
def bundle_projects
Buildr.projects.flatten.select {|project|
project.send :is_packaging_osgi_bundle
}
end
module_function :bundle_projects
end
end
class Buildr::Project
include OSGi::ActAsOSGiBundle
end
module Buildr4OSGi
include OSGi::BundleProjects
end |
module Capybara::Harness::Dom
class Field
include Capybara::DSL
attr_accessor :name, :label, :data_type, :through
def initialize(options = {})
self.name = options.delete(:name)
self.label = options.fetch(:label, name.to_s.titleize)
self.data_type = options.fetch(:data_type, :string)
self.through = options.delete(:through)
end
def fill(attrs = {})
value = extract_value(attrs)
case data_type
when :string then
fill_in(label, :with => value)
when :select then
select(value, :from => label)
end
end
def through
@through.to_sym if @through
end
def name
@name.to_sym
end
private
def has_attribute?(attrs)
return attrs[through].has_key?(name) if through && attrs.has_key?(through)
attrs.has_key?(name)
end
def extract_value(attrs)
attrs = attrs[through] unless through.nil?
attrs[name.to_sym]
end
end
end
Change :data_type to :as
module Capybara::Harness::Dom
class Field
include Capybara::DSL
attr_accessor :name, :label, :data_type, :through
attr_accessor :name, :label, :as, :through
def initialize(options = {})
self.name = options.delete(:name)
self.label = options.fetch(:label, name.to_s.titleize)
self.through = options.delete(:through)
@as = options.fetch(:as, :string).to_sym
end
def fill(attrs = {})
value = extract_value(attrs)
case as
when :string then
fill_in(label, :with => value)
when :select then
select(value, :from => label)
end
end
def through
@through.to_sym if @through
end
def name
@name.to_sym
end
private
def has_attribute?(attrs)
return attrs[through].has_key?(name) if through && attrs.has_key?(through)
attrs.has_key?(name)
end
def extract_value(attrs)
attrs = attrs[through] unless through.nil?
attrs[name.to_sym]
end
end
end |
require 'active_support'
#require 'action_controller/record_identifier'
require 'cheddargetter_client'
#require 'rails/record_identifier'
require 'rails/naming'
module CheddargetterClientRails
autoload :Subscription, 'cheddargetter_client_rails/subscription'
def self.included(base)
attr_accessor :has_subscription_options
base.extend ClassMethods
def subscription
@subscription ||= CheddargetterClientRails::Subscription.new
end
def subscription=(value)
@subscription = value
end
def validate_subscription
supplement_subscription_fields
if !skip_cheddargetter && new_record? && !subscription.valid?
errors.add(:subscription, 'problem')
end
end
def supplement_subscription_fields
if subscription.is_a?(ActiveSupport::HashWithIndifferentAccess)
self.subscription = CheddargetterClientRails::Subscription.new(subscription)
end
self.class.shared_columns.each do |subscription_column, user_attribute|
if(subscription_column == :planCode && user_attribute.is_a?(String)) #user can specify planCode as a string
subscription.send(subscription_column.to_s + '=', user_attribute)
else
subscription.send(subscription_column.to_s + '=', send(user_attribute))
end
end
end
def create_subscription
raise ArgumentError, 'Customer code is not set on record.' if !customer_code_column_value && !subscription.customerCode
subscription.customerCode = customer_code_column_value if !subscription.customerCode
subscription.create unless skip_cheddargetter
end
def current_subscription
@current_subscription ||= CheddargetterClientRails::Subscription.get(customer_code_column_value) if customer_code_column_value
end
def destroy_subscription
current_subscription.try(:destroy)
end
def customer_code_column_value
if self.class.send(:customer_code_column)
value = send(self.class.send(:customer_code_column))
value.to_s if value.try(:to_s).present?
end
end
def build_subscription(attributes_hash)
# set attributes from current cheddargetter subscription, then
# replaces any values with supplied data
new_subscription = CheddargetterClientRails::Subscription.new
if old_subscription = current_subscription
old_subscription.instance_variables_hash.each do |key, value|
new_subscription.send(key.to_s + '=', value)
end
end
attributes_hash.each do |key, value|
new_subscription.send(key.to_s + '=', value)
end
self.subscription = new_subscription
new_subscription
end
def save_subscription(attributes_hash)
build_subscription(attributes_hash)
subscription.save
end
def update_subscription
if !new_record?
subscription.customerCode = customer_code_column_value if subscription.customerCode.blank? and customer_code_column_value.present?
if shared_attributes_have_changed? || subscription.fields_present?
subscription.update
end
end
end
def shared_attributes_have_changed?
self.class.shared_columns.collect do |cgkey, column|
self.send(column.to_s + '_changed?')
end.include?(true)
end
end
module ClassMethods
def has_subscription(args = {})
self.class.send(:attr_accessor, :has_subscription_options)
self.has_subscription_options = args
self.customer_code_column = args.delete(:customerCode) || :id
raise ArgumentError.new("Record does not respond to #{customer_code_column.to_s}.") if !responds_to_customer_code_column?
shared = {}
shared[:email] = args.delete(:email) || :email
shared[:firstName] = args.delete(:firstName) || :first_name
shared[:lastName] = args.delete(:lastName) || :last_name
shared[:planCode] = args.delete(:planCode) || :plan_code
args.each do |key, value|
shared[key] = value
end
self.shared_columns = shared
attr_accessor :skip_cheddargetter
attr_accessible :subscription
validate :validate_subscription
after_create :create_subscription
before_destroy :destroy_subscription
after_save :update_subscription
end
def responds_to_customer_code_column?
self.instance_methods.include?(customer_code_column.to_sym) ||
self.column_names.include?(customer_code_column.to_s)
end
def customer_code_column
@customer_code_column
end
def customer_code_column=(column)
@customer_code_column = column
end
def shared_columns
@shared_columns
end
def shared_columns=(columns)
@shared_columns = columns
end
end
end
class ActiveRecord::Base
include CheddargetterClientRails
end
Removing commented lines.
require 'active_support'
require 'cheddargetter_client'
require 'rails/naming'
module CheddargetterClientRails
autoload :Subscription, 'cheddargetter_client_rails/subscription'
def self.included(base)
attr_accessor :has_subscription_options
base.extend ClassMethods
def subscription
@subscription ||= CheddargetterClientRails::Subscription.new
end
def subscription=(value)
@subscription = value
end
def validate_subscription
supplement_subscription_fields
if !skip_cheddargetter && new_record? && !subscription.valid?
errors.add(:subscription, 'problem')
end
end
def supplement_subscription_fields
if subscription.is_a?(ActiveSupport::HashWithIndifferentAccess)
self.subscription = CheddargetterClientRails::Subscription.new(subscription)
end
self.class.shared_columns.each do |subscription_column, user_attribute|
if(subscription_column == :planCode && user_attribute.is_a?(String)) #user can specify planCode as a string
subscription.send(subscription_column.to_s + '=', user_attribute)
else
subscription.send(subscription_column.to_s + '=', send(user_attribute))
end
end
end
def create_subscription
raise ArgumentError, 'Customer code is not set on record.' if !customer_code_column_value && !subscription.customerCode
subscription.customerCode = customer_code_column_value if !subscription.customerCode
subscription.create unless skip_cheddargetter
end
def current_subscription
@current_subscription ||= CheddargetterClientRails::Subscription.get(customer_code_column_value) if customer_code_column_value
end
def destroy_subscription
current_subscription.try(:destroy)
end
def customer_code_column_value
if self.class.send(:customer_code_column)
value = send(self.class.send(:customer_code_column))
value.to_s if value.try(:to_s).present?
end
end
def build_subscription(attributes_hash)
# set attributes from current cheddargetter subscription, then
# replaces any values with supplied data
new_subscription = CheddargetterClientRails::Subscription.new
if old_subscription = current_subscription
old_subscription.instance_variables_hash.each do |key, value|
new_subscription.send(key.to_s + '=', value)
end
end
attributes_hash.each do |key, value|
new_subscription.send(key.to_s + '=', value)
end
self.subscription = new_subscription
new_subscription
end
def save_subscription(attributes_hash)
build_subscription(attributes_hash)
subscription.save
end
def update_subscription
if !new_record?
subscription.customerCode = customer_code_column_value if subscription.customerCode.blank? and customer_code_column_value.present?
if shared_attributes_have_changed? || subscription.fields_present?
subscription.update
end
end
end
def shared_attributes_have_changed?
self.class.shared_columns.collect do |cgkey, column|
self.send(column.to_s + '_changed?')
end.include?(true)
end
end
module ClassMethods
def has_subscription(args = {})
self.class.send(:attr_accessor, :has_subscription_options)
self.has_subscription_options = args
self.customer_code_column = args.delete(:customerCode) || :id
raise ArgumentError.new("Record does not respond to #{customer_code_column.to_s}.") if !responds_to_customer_code_column?
shared = {}
shared[:email] = args.delete(:email) || :email
shared[:firstName] = args.delete(:firstName) || :first_name
shared[:lastName] = args.delete(:lastName) || :last_name
shared[:planCode] = args.delete(:planCode) || :plan_code
args.each do |key, value|
shared[key] = value
end
self.shared_columns = shared
attr_accessor :skip_cheddargetter
attr_accessible :subscription
validate :validate_subscription
after_create :create_subscription
before_destroy :destroy_subscription
after_save :update_subscription
end
def responds_to_customer_code_column?
self.instance_methods.include?(customer_code_column.to_sym) ||
self.column_names.include?(customer_code_column.to_s)
end
def customer_code_column
@customer_code_column
end
def customer_code_column=(column)
@customer_code_column = column
end
def shared_columns
@shared_columns
end
def shared_columns=(columns)
@shared_columns = columns
end
end
end
class ActiveRecord::Base
include CheddargetterClientRails
end |
#
# Author:: Adam Jacob (<adam@opscode.com>)
# Copyright:: Copyright (c) 2008 Opscode, Inc.
# License:: Apache License, Version 2.0
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
class Chef
class DelayedEvaluator < Proc
end
module Mixin
module ParamsValidate
# Takes a hash of options, along with a map to validate them. Returns the original
# options hash, plus any changes that might have been made (through things like setting
# default values in the validation map)
#
# For example:
#
# validate({ :one => "neat" }, { :one => { :kind_of => String }})
#
# Would raise an exception if the value of :one above is not a kind_of? string. Valid
# map options are:
#
# :default:: Sets the default value for this parameter.
# :callbacks:: Takes a hash of Procs, which should return true if the argument is valid.
# The key will be inserted into the error message if the Proc does not return true:
# "Option #{key}'s value #{value} #{message}!"
# :kind_of:: Ensure that the value is a kind_of?(Whatever). If passed an array, it will ensure
# that the value is one of those types.
# :respond_to:: Ensure that the value has a given method. Takes one method name or an array of
# method names.
# :required:: Raise an exception if this parameter is missing. Valid values are true or false,
# by default, options are not required.
# :regex:: Match the value of the paramater against a regular expression.
# :equal_to:: Match the value of the paramater with ==. An array means it can be equal to any
# of the values.
def validate(opts, map)
#--
# validate works by taking the keys in the validation map, assuming it's a hash, and
# looking for _pv_:symbol as methods. Assuming it find them, it calls the right
# one.
#++
raise ArgumentError, "Options must be a hash" unless opts.kind_of?(Hash)
raise ArgumentError, "Validation Map must be a hash" unless map.kind_of?(Hash)
map.each do |key, validation|
unless key.kind_of?(Symbol) || key.kind_of?(String)
raise ArgumentError, "Validation map keys must be symbols or strings!"
end
case validation
when true
_pv_required(opts, key)
when false
true
when Hash
validation.each do |check, carg|
check_method = "_pv_#{check.to_s}"
if self.respond_to?(check_method, true)
self.send(check_method, opts, key, carg)
else
raise ArgumentError, "Validation map has unknown check: #{check}"
end
end
end
end
opts
end
def lazy(&block)
DelayedEvaluator.new(&block)
end
def set_or_return(symbol, arg, validation, &block)
iv_symbol = "@#{symbol.to_s}".to_sym
if arg == nil && self.instance_variable_defined?(iv_symbol) == true && !block_given?
ivar = self.instance_variable_get(iv_symbol)
if(ivar.is_a?(DelayedEvaluator))
validate({ symbol => ivar.call }, { symbol => validation })[symbol]
else
ivar
end
else
if(arg.is_a?(DelayedEvaluator))
val = arg
elsif(block_given?)
val = DelayedEvaluator.new(&block)
else
val = validate({ symbol => arg }, { symbol => validation })[symbol]
end
self.instance_variable_set(iv_symbol, val)
end
end
private
# Return the value of a parameter, or nil if it doesn't exist.
def _pv_opts_lookup(opts, key)
if opts.has_key?(key.to_s)
opts[key.to_s]
elsif opts.has_key?(key.to_sym)
opts[key.to_sym]
else
nil
end
end
# Raise an exception if the parameter is not found.
def _pv_required(opts, key, is_required=true)
if is_required
if (opts.has_key?(key.to_s) && !opts[key.to_s].nil?) ||
(opts.has_key?(key.to_sym) && !opts[key.to_sym].nil?)
true
else
raise Exceptions::ValidationFailed, "Required argument #{key} is missing!"
end
end
end
def _pv_equal_to(opts, key, to_be)
value = _pv_opts_lookup(opts, key)
unless value.nil?
passes = false
Array(to_be).each do |tb|
passes = true if value == tb
end
unless passes
raise Exceptions::ValidationFailed, "Option #{key} must be equal to one of: #{to_be.join(", ")}! You passed #{value.inspect}."
end
end
end
# Raise an exception if the parameter is not a kind_of?(to_be)
def _pv_kind_of(opts, key, to_be)
value = _pv_opts_lookup(opts, key)
unless value.nil?
passes = false
Array(to_be).each do |tb|
passes = true if value.kind_of?(tb)
end
unless passes
raise Exceptions::ValidationFailed, "Option #{key} must be a kind of #{to_be}! You passed #{value.inspect}."
end
end
end
# Raise an exception if the parameter does not respond to a given set of methods.
def _pv_respond_to(opts, key, method_name_list)
value = _pv_opts_lookup(opts, key)
unless value.nil?
Array(method_name_list).each do |method_name|
unless value.respond_to?(method_name)
raise Exceptions::ValidationFailed, "Option #{key} must have a #{method_name} method!"
end
end
end
end
# Assert that parameter returns false when passed a predicate method.
# For example, :cannot_be => :blank will raise a Exceptions::ValidationFailed
# error value.blank? returns a 'truthy' (not nil or false) value.
#
# Note, this will *PASS* if the object doesn't respond to the method.
# So, to make sure a value is not nil and not blank, you need to do
# both :cannot_be => :blank *and* :cannot_be => :nil (or :required => true)
def _pv_cannot_be(opts, key, predicate_method_base_name)
value = _pv_opts_lookup(opts, key)
predicate_method = (predicate_method_base_name.to_s + "?").to_sym
if value.respond_to?(predicate_method)
if value.send(predicate_method)
raise Exceptions::ValidationFailed, "Option #{key} cannot be #{predicate_method_base_name}"
end
end
end
# Assign a default value to a parameter.
def _pv_default(opts, key, default_value)
value = _pv_opts_lookup(opts, key)
if value == nil
opts[key] = default_value
end
end
# Check a parameter against a regular expression.
def _pv_regex(opts, key, regex)
value = _pv_opts_lookup(opts, key)
if value != nil
passes = false
[ regex ].flatten.each do |r|
if value != nil
if r.match(value.to_s)
passes = true
end
end
end
unless passes
raise Exceptions::ValidationFailed, "Option #{key}'s value #{value} does not match regular expression #{regex.inspect}"
end
end
end
# Check a parameter against a hash of proc's.
def _pv_callbacks(opts, key, callbacks)
raise ArgumentError, "Callback list must be a hash!" unless callbacks.kind_of?(Hash)
value = _pv_opts_lookup(opts, key)
if value != nil
callbacks.each do |message, zeproc|
if zeproc.call(value) != true
raise Exceptions::ValidationFailed, "Option #{key}'s value #{value} #{message}!"
end
end
end
end
# Allow a parameter to default to @name
def _pv_name_attribute(opts, key, is_name_attribute=true)
if is_name_attribute
if opts[key] == nil
opts[key] = self.instance_variable_get("@name")
end
end
end
end
end
end
Remove block parameter support from #set_or_return
Conflicts:
lib/chef/mixin/params_validate.rb
#
# Author:: Adam Jacob (<adam@opscode.com>)
# Copyright:: Copyright (c) 2008 Opscode, Inc.
# License:: Apache License, Version 2.0
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
class Chef
class DelayedEvaluator < Proc
end
module Mixin
module ParamsValidate
# Takes a hash of options, along with a map to validate them. Returns the original
# options hash, plus any changes that might have been made (through things like setting
# default values in the validation map)
#
# For example:
#
# validate({ :one => "neat" }, { :one => { :kind_of => String }})
#
# Would raise an exception if the value of :one above is not a kind_of? string. Valid
# map options are:
#
# :default:: Sets the default value for this parameter.
# :callbacks:: Takes a hash of Procs, which should return true if the argument is valid.
# The key will be inserted into the error message if the Proc does not return true:
# "Option #{key}'s value #{value} #{message}!"
# :kind_of:: Ensure that the value is a kind_of?(Whatever). If passed an array, it will ensure
# that the value is one of those types.
# :respond_to:: Ensure that the value has a given method. Takes one method name or an array of
# method names.
# :required:: Raise an exception if this parameter is missing. Valid values are true or false,
# by default, options are not required.
# :regex:: Match the value of the paramater against a regular expression.
# :equal_to:: Match the value of the paramater with ==. An array means it can be equal to any
# of the values.
def validate(opts, map)
#--
# validate works by taking the keys in the validation map, assuming it's a hash, and
# looking for _pv_:symbol as methods. Assuming it find them, it calls the right
# one.
#++
raise ArgumentError, "Options must be a hash" unless opts.kind_of?(Hash)
raise ArgumentError, "Validation Map must be a hash" unless map.kind_of?(Hash)
map.each do |key, validation|
unless key.kind_of?(Symbol) || key.kind_of?(String)
raise ArgumentError, "Validation map keys must be symbols or strings!"
end
case validation
when true
_pv_required(opts, key)
when false
true
when Hash
validation.each do |check, carg|
check_method = "_pv_#{check.to_s}"
if self.respond_to?(check_method, true)
self.send(check_method, opts, key, carg)
else
raise ArgumentError, "Validation map has unknown check: #{check}"
end
end
end
end
opts
end
def lazy(&block)
DelayedEvaluator.new(&block)
end
def set_or_return(symbol, arg, validation)
iv_symbol = "@#{symbol.to_s}".to_sym
if arg == nil && self.instance_variable_defined?(iv_symbol) == true
ivar = self.instance_variable_get(iv_symbol)
if(ivar.is_a?(DelayedEvaluator))
validate({ symbol => ivar.call }, { symbol => validation })[symbol]
else
ivar
end
else
if(arg.is_a?(DelayedEvaluator))
val = arg
else
val = validate({ symbol => arg }, { symbol => validation })[symbol]
end
self.instance_variable_set(iv_symbol, val)
end
end
private
# Return the value of a parameter, or nil if it doesn't exist.
def _pv_opts_lookup(opts, key)
if opts.has_key?(key.to_s)
opts[key.to_s]
elsif opts.has_key?(key.to_sym)
opts[key.to_sym]
else
nil
end
end
# Raise an exception if the parameter is not found.
def _pv_required(opts, key, is_required=true)
if is_required
if (opts.has_key?(key.to_s) && !opts[key.to_s].nil?) ||
(opts.has_key?(key.to_sym) && !opts[key.to_sym].nil?)
true
else
raise Exceptions::ValidationFailed, "Required argument #{key} is missing!"
end
end
end
def _pv_equal_to(opts, key, to_be)
value = _pv_opts_lookup(opts, key)
unless value.nil?
passes = false
Array(to_be).each do |tb|
passes = true if value == tb
end
unless passes
raise Exceptions::ValidationFailed, "Option #{key} must be equal to one of: #{to_be.join(", ")}! You passed #{value.inspect}."
end
end
end
# Raise an exception if the parameter is not a kind_of?(to_be)
def _pv_kind_of(opts, key, to_be)
value = _pv_opts_lookup(opts, key)
unless value.nil?
passes = false
Array(to_be).each do |tb|
passes = true if value.kind_of?(tb)
end
unless passes
raise Exceptions::ValidationFailed, "Option #{key} must be a kind of #{to_be}! You passed #{value.inspect}."
end
end
end
# Raise an exception if the parameter does not respond to a given set of methods.
def _pv_respond_to(opts, key, method_name_list)
value = _pv_opts_lookup(opts, key)
unless value.nil?
Array(method_name_list).each do |method_name|
unless value.respond_to?(method_name)
raise Exceptions::ValidationFailed, "Option #{key} must have a #{method_name} method!"
end
end
end
end
# Assert that parameter returns false when passed a predicate method.
# For example, :cannot_be => :blank will raise a Exceptions::ValidationFailed
# error value.blank? returns a 'truthy' (not nil or false) value.
#
# Note, this will *PASS* if the object doesn't respond to the method.
# So, to make sure a value is not nil and not blank, you need to do
# both :cannot_be => :blank *and* :cannot_be => :nil (or :required => true)
def _pv_cannot_be(opts, key, predicate_method_base_name)
value = _pv_opts_lookup(opts, key)
predicate_method = (predicate_method_base_name.to_s + "?").to_sym
if value.respond_to?(predicate_method)
if value.send(predicate_method)
raise Exceptions::ValidationFailed, "Option #{key} cannot be #{predicate_method_base_name}"
end
end
end
# Assign a default value to a parameter.
def _pv_default(opts, key, default_value)
value = _pv_opts_lookup(opts, key)
if value == nil
opts[key] = default_value
end
end
# Check a parameter against a regular expression.
def _pv_regex(opts, key, regex)
value = _pv_opts_lookup(opts, key)
if value != nil
passes = false
[ regex ].flatten.each do |r|
if value != nil
if r.match(value.to_s)
passes = true
end
end
end
unless passes
raise Exceptions::ValidationFailed, "Option #{key}'s value #{value} does not match regular expression #{regex.inspect}"
end
end
end
# Check a parameter against a hash of proc's.
def _pv_callbacks(opts, key, callbacks)
raise ArgumentError, "Callback list must be a hash!" unless callbacks.kind_of?(Hash)
value = _pv_opts_lookup(opts, key)
if value != nil
callbacks.each do |message, zeproc|
if zeproc.call(value) != true
raise Exceptions::ValidationFailed, "Option #{key}'s value #{value} #{message}!"
end
end
end
end
# Allow a parameter to default to @name
def _pv_name_attribute(opts, key, is_name_attribute=true)
if is_name_attribute
if opts[key] == nil
opts[key] = self.instance_variable_get("@name")
end
end
end
end
end
end
|
#
# Author:: Adam Jacob (<adam@opscode.com>)
# Copyright:: Copyright (c) 2008 Opscode, Inc.
# License:: Apache License, Version 2.0
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
require 'pathname'
require 'chef/mixin/shell_out'
require 'chef/provider/user'
class Chef
class Provider
class User
class Useradd < Chef::Provider::User
include Chef::Mixin::ShellOut
UNIVERSAL_OPTIONS = [[:comment, "-c"], [:gid, "-g"], [:password, "-p"], [:shell, "-s"], [:uid, "-u"]]
def create_user
command = compile_command("useradd") do |useradd|
useradd.concat(universal_options)
useradd.concat(useradd_options)
end
shell_out!(*command)
end
def manage_user
if universal_options != ""
command = compile_command("usermod") do |u|
u.concat(universal_options)
end
shell_out!(*command)
end
end
def remove_user
command = [ "userdel" ]
command << "-r" if managing_home_dir?
command << new_resource.username
shell_out!(*command)
end
def check_lock
# we can get an exit code of 1 even when it's successful on
# rhel/centos (redhat bug 578534). See additional error checks below.
passwd_s = shell_out!("passwd", "-S", new_resource.username, :returns => [0,1])
if whyrun_mode? && passwd_s.stdout.empty? && passwd_s.stderr.match(/does not exist/)
# if we're in whyrun mode and the user is not yet created we assume it would be
return false
end
raise Chef::Exceptions::User, "Cannot determine if #{@new_resource} is locked!" if passwd_s.stdout.empty?
status_line = passwd_s.stdout.split(' ')
case status_line[1]
when /^P/
@locked = false
when /^N/
@locked = false
when /^L/
@locked = true
end
unless passwd_s.exitstatus == 0
raise_lock_error = false
if ['redhat', 'centos'].include?(node[:platform])
passwd_version_check = shell_out!('rpm -q passwd')
passwd_version = passwd_version_check.stdout.chomp
unless passwd_version == 'passwd-0.73-1'
raise_lock_error = true
end
else
raise_lock_error = true
end
raise Chef::Exceptions::User, "Cannot determine if #{new_resource} is locked!" if raise_lock_error
end
@locked
end
def lock_user
shell_out!("usermod", "-L", new_resource.username)
end
def unlock_user
shell_out!("usermod", "-U", new_resource.username)
end
def compile_command(base_command)
base_command = Array(base_command)
yield base_command
base_command << new_resource.username
base_command
end
def universal_options
@universal_options ||=
begin
opts = []
# magic allows UNIVERSAL_OPTIONS to be overridden in a subclass
self.class::UNIVERSAL_OPTIONS.each do |field, option|
update_options(field, option, opts)
end
if updating_home?
if managing_home_dir?
Chef::Log.debug("#{new_resource} managing the users home directory")
opts << "-d" << new_resource.home << "-m"
else
Chef::Log.debug("#{new_resource} setting home to #{new_resource.home}")
opts << "-d" << new_resource.home
end
end
opts << "-o" if new_resource.non_unique || new_resource.supports[:non_unique]
opts
end
end
def update_options(field, option, opts)
if @current_resource.send(field).to_s != new_resource.send(field).to_s
if new_resource.send(field)
Chef::Log.debug("#{new_resource} setting #{field} to #{new_resource.send(field)}")
opts << option << new_resource.send(field).to_s
end
end
end
def useradd_options
opts = []
opts << "-r" if new_resource.system
opts
end
def updating_home?
# will return false if paths are equivalent
# Pathname#cleanpath does a better job than ::File::expand_path (on both unix and windows)
# ::File.expand_path("///tmp") == ::File.expand_path("/tmp") => false
# ::File.expand_path("\\tmp") => "C:/tmp"
return true if @current_resource.home.nil? && new_resource.home
new_resource.home and Pathname.new(@current_resource.home).cleanpath != Pathname.new(new_resource.home).cleanpath
end
def managing_home_dir?
new_resource.manage_home || new_resource.supports[:manage_home]
end
end
end
end
end
universal_options in useradd provider is an array.
#
# Author:: Adam Jacob (<adam@opscode.com>)
# Copyright:: Copyright (c) 2008 Opscode, Inc.
# License:: Apache License, Version 2.0
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
require 'pathname'
require 'chef/mixin/shell_out'
require 'chef/provider/user'
class Chef
class Provider
class User
class Useradd < Chef::Provider::User
include Chef::Mixin::ShellOut
UNIVERSAL_OPTIONS = [[:comment, "-c"], [:gid, "-g"], [:password, "-p"], [:shell, "-s"], [:uid, "-u"]]
def create_user
command = compile_command("useradd") do |useradd|
useradd.concat(universal_options)
useradd.concat(useradd_options)
end
shell_out!(*command)
end
def manage_user
unless universal_options.empty?
command = compile_command("usermod") do |u|
u.concat(universal_options)
end
shell_out!(*command)
end
end
def remove_user
command = [ "userdel" ]
command << "-r" if managing_home_dir?
command << new_resource.username
shell_out!(*command)
end
def check_lock
# we can get an exit code of 1 even when it's successful on
# rhel/centos (redhat bug 578534). See additional error checks below.
passwd_s = shell_out!("passwd", "-S", new_resource.username, :returns => [0,1])
if whyrun_mode? && passwd_s.stdout.empty? && passwd_s.stderr.match(/does not exist/)
# if we're in whyrun mode and the user is not yet created we assume it would be
return false
end
raise Chef::Exceptions::User, "Cannot determine if #{@new_resource} is locked!" if passwd_s.stdout.empty?
status_line = passwd_s.stdout.split(' ')
case status_line[1]
when /^P/
@locked = false
when /^N/
@locked = false
when /^L/
@locked = true
end
unless passwd_s.exitstatus == 0
raise_lock_error = false
if ['redhat', 'centos'].include?(node[:platform])
passwd_version_check = shell_out!('rpm -q passwd')
passwd_version = passwd_version_check.stdout.chomp
unless passwd_version == 'passwd-0.73-1'
raise_lock_error = true
end
else
raise_lock_error = true
end
raise Chef::Exceptions::User, "Cannot determine if #{new_resource} is locked!" if raise_lock_error
end
@locked
end
def lock_user
shell_out!("usermod", "-L", new_resource.username)
end
def unlock_user
shell_out!("usermod", "-U", new_resource.username)
end
def compile_command(base_command)
base_command = Array(base_command)
yield base_command
base_command << new_resource.username
base_command
end
def universal_options
@universal_options ||=
begin
opts = []
# magic allows UNIVERSAL_OPTIONS to be overridden in a subclass
self.class::UNIVERSAL_OPTIONS.each do |field, option|
update_options(field, option, opts)
end
if updating_home?
if managing_home_dir?
Chef::Log.debug("#{new_resource} managing the users home directory")
opts << "-d" << new_resource.home << "-m"
else
Chef::Log.debug("#{new_resource} setting home to #{new_resource.home}")
opts << "-d" << new_resource.home
end
end
opts << "-o" if new_resource.non_unique || new_resource.supports[:non_unique]
opts
end
end
def update_options(field, option, opts)
if @current_resource.send(field).to_s != new_resource.send(field).to_s
if new_resource.send(field)
Chef::Log.debug("#{new_resource} setting #{field} to #{new_resource.send(field)}")
opts << option << new_resource.send(field).to_s
end
end
end
def useradd_options
opts = []
opts << "-r" if new_resource.system
opts
end
def updating_home?
# will return false if paths are equivalent
# Pathname#cleanpath does a better job than ::File::expand_path (on both unix and windows)
# ::File.expand_path("///tmp") == ::File.expand_path("/tmp") => false
# ::File.expand_path("\\tmp") => "C:/tmp"
return true if @current_resource.home.nil? && new_resource.home
new_resource.home and Pathname.new(@current_resource.home).cleanpath != Pathname.new(new_resource.home).cleanpath
end
def managing_home_dir?
new_resource.manage_home || new_resource.supports[:manage_home]
end
end
end
end
end
|
require "cassandra/0.7"
module Chronologic::Service::Schema
mattr_accessor :write_opts
mattr_accessor :logger
self.write_opts = {:consistency => Cassandra::Consistency::QUORUM}
MAX_SUBSCRIPTIONS = 50_000
MAX_TIMELINES = 50_000
def self.create_object(key, attrs)
log "create_object(#{key})"
connection.insert(:Object, key, attrs, write_opts)
end
def self.remove_object(object_key)
log("remove_object(#{object_key})")
connection.remove(:Object, object_key, write_opts)
end
def self.object_for(object_key)
log("object_for(#{object_key})")
# Note: this will only pull 100 columns for objects. This seems like a
# reasonable default, but we'll look back at this comment some day and
# laugh. ~AKK
case object_key
when String
connection.get(:Object, object_key)
when Array
return {} if object_key.empty?
connection.multi_get(:Object, object_key)
end
end
def self.create_subscription(timeline_key, subscriber_key, backlink_key='')
log("create_subscription(#{timeline_key}, #{subscriber_key}, #{backlink_key})")
connection.insert(:Subscription, subscriber_key, {timeline_key => backlink_key}, write_opts)
end
def self.remove_subscription(timeline_key, subscriber_key)
log("remove_subscription(#{timeline_key}, #{subscriber_key}")
connection.remove(:Subscription, subscriber_key, timeline_key)
end
def self.subscribers_for(timeline_key)
log("subscribers_for(#{timeline_key})")
case timeline_key
when String
connection.get(:Subscription, timeline_key, :count => MAX_SUBSCRIPTIONS).keys
when Array
return [] if timeline_key.empty?
connection.multi_get(:Subscription, timeline_key, :count => MAX_SUBSCRIPTIONS).map { |k, v| v.keys }.flatten
end
end
def self.followers_for(timeline_key)
connection.get(:Subscription, timeline_key, :count => MAX_SUBSCRIPTIONS).values
end
def self.create_event(event_key, data)
log("create_event(#{event_key})")
connection.insert(:Event, event_key, data, write_opts)
end
def self.update_event(event_key, data)
log("update_event(#{event_key})")
connection.insert(:Event, event_key, data, write_opts)
end
def self.remove_event(event_key)
log("remove_event(#{event_key})")
connection.remove(:Event, event_key)
end
def self.event_exists?(event_key)
log("event_exists?(#{event_key.inspect})")
connection.exists?(:Event, event_key)
end
def self.event_for(event_key)
log("event_for(#{event_key.inspect})")
# Note: this will only pull 100 columns for events. This seems like a
# reasonable default, but we'll look back at this comment some day and
# laugh. ~AKK
case event_key
when Array
return {} if event_key.empty?
connection.multi_get(:Event, event_key)
when String
connection.get(:Event, event_key)
end
end
def self.create_timeline_event(timeline, uuid, event_key)
log("create_timeline_event(#{timeline}, #{uuid}, #{event_key})")
connection.insert(:Timeline, timeline, {uuid => event_key}, write_opts)
end
def self.timeline_for(timeline, options={})
log("timeline_for(#{timeline}, #{options.inspect})")
count = options[:per_page] || 20
start = options[:page] || nil # Cassandra seems OK with a nil offset
case timeline
when String
connection.get(
:Timeline,
timeline,
:start => start,
:count => count,
# AKK: it would be nice to figure out how not to need to reverse
# this so that clients don't have to reverse it again to get
# reverse-chronological listings
:reversed => true
)
when Array
return {} if timeline.empty?
connection.multi_get(:Timeline, timeline)
end
end
def self.timeline_events_for(timeline, options={})
log("timeline_events_for(#{timeline})")
case timeline
when String
timeline_for(timeline, options)
when Array
timeline_for(timeline).inject({}) do |hsh, (timeline_key, column)|
hsh.update(timeline_key => column.values)
end
end
end
def self.remove_timeline_event(timeline, uuid)
log("remove_timeline_event(#{timeline}, #{uuid})")
connection.remove(:Timeline, timeline, uuid)
end
def self.timeline_count(timeline)
# Used to use connection.count_columns here, but it doesn't seem
# to respect the :count option. There is a fix for this in rjackson's fork,
# need to see if its merged into fauna and included in a release. ~AKK
# But in the meantime, nothing in Gowalla is using the page count so we're
# going to hardcode this obviously incorrect value for the time being.
-1
end
# Lookup events on the specified timeline(s) and return all the events
# referenced by those timelines.
#
# timeline_keys - one or more String timeline_keys to fetch events from
#
# Returns a flat array of events
def self.fetch_timelines(timeline_keys, per_page=20, page='')
event_keys = timeline_events_for(
timeline_keys,
:per_page => per_page,
:page => page
).values.flatten
event_for(event_keys.uniq).
map do |k, e|
Chronologic::Event.load_from_columns(e).tap do |event|
event.key = k
end
end
end
# Fetch objects referenced by events and correctly populate the event objects
#
# events - an array of Chronologic::Event objects to populate
#
# Returns a flat array of Chronologic::Event objects with their object
# references populated.
def self.fetch_objects(events)
object_keys = events.map { |e| e.objects.values }.flatten.uniq
objects = object_for(object_keys)
events.map do |e|
e.tap do
e.objects.each do |type, keys|
if keys.is_a?(Array)
e.objects[type] = keys.map { |k| objects[k] }
else
e.objects[type] = objects[keys]
end
end
end
end
end
# Convert a flat array of Chronologic::Events into a properly hierarchical
# timeline.
#
# events - an array of Chronologic::Event objects, each possibly referencing
# other events
#
# Returns a flat array of Chronologic::Event objects with their subevent
# references correctly populated.
def self.reify_timeline(events)
event_index = events.inject({}) { |idx, e| idx.update(e.key => e) }
timeline_index = events.inject([]) do |timeline, e|
if e.subevent? && event_index.has_key?(e.parent)
# AKK: something is weird about Hashie::Dash or Event in that if you
# push objects onto subevents, they are added to an object that is
# referenced by all instances of event. So, these dup'ing hijinks are
subevents = event_index[e.parent].subevents.dup
subevents << e
event_index[e.parent].subevents = subevents
else
timeline << e.key
end
timeline
end
timeline_index.map { |key| event_index[key] }
end
def self.batch
connection.batch { yield }
end
def self.connection
Chronologic.connection
end
def self.log(msg)
return unless logger
logger.debug(msg)
end
end
Read events at quorum to maybe fix some consistency-related bugs.
require "cassandra/0.7"
module Chronologic::Service::Schema
mattr_accessor :write_opts
mattr_accessor :consistent_read_opts
mattr_accessor :logger
self.write_opts = {:consistency => Cassandra::Consistency::QUORUM}
self.consistent_read_opts = {:consistency => Cassandra::Consistency::QUORUM}
MAX_SUBSCRIPTIONS = 50_000
MAX_TIMELINES = 50_000
def self.create_object(key, attrs)
log "create_object(#{key})"
connection.insert(:Object, key, attrs, write_opts)
end
def self.remove_object(object_key)
log("remove_object(#{object_key})")
connection.remove(:Object, object_key, write_opts)
end
def self.object_for(object_key)
log("object_for(#{object_key})")
# Note: this will only pull 100 columns for objects. This seems like a
# reasonable default, but we'll look back at this comment some day and
# laugh. ~AKK
case object_key
when String
connection.get(:Object, object_key)
when Array
return {} if object_key.empty?
connection.multi_get(:Object, object_key)
end
end
def self.create_subscription(timeline_key, subscriber_key, backlink_key='')
log("create_subscription(#{timeline_key}, #{subscriber_key}, #{backlink_key})")
connection.insert(:Subscription, subscriber_key, {timeline_key => backlink_key}, write_opts)
end
def self.remove_subscription(timeline_key, subscriber_key)
log("remove_subscription(#{timeline_key}, #{subscriber_key}")
connection.remove(:Subscription, subscriber_key, timeline_key)
end
def self.subscribers_for(timeline_key)
log("subscribers_for(#{timeline_key})")
case timeline_key
when String
connection.get(:Subscription, timeline_key, :count => MAX_SUBSCRIPTIONS).keys
when Array
return [] if timeline_key.empty?
connection.multi_get(:Subscription, timeline_key, :count => MAX_SUBSCRIPTIONS).map { |k, v| v.keys }.flatten
end
end
def self.followers_for(timeline_key)
connection.get(:Subscription, timeline_key, :count => MAX_SUBSCRIPTIONS).values
end
def self.create_event(event_key, data)
log("create_event(#{event_key})")
connection.insert(:Event, event_key, data, write_opts)
end
def self.update_event(event_key, data)
log("update_event(#{event_key})")
connection.insert(:Event, event_key, data, write_opts)
end
def self.remove_event(event_key)
log("remove_event(#{event_key})")
connection.remove(:Event, event_key, write_opts)
end
def self.event_exists?(event_key)
log("event_exists?(#{event_key.inspect})")
connection.exists?(:Event, event_key, consistent_read_opts)
end
def self.event_for(event_key)
log("event_for(#{event_key.inspect})")
# Note: this will only pull 100 columns for events. This seems like a
# reasonable default, but we'll look back at this comment some day and
# laugh. ~AKK
case event_key
when Array
return {} if event_key.empty?
connection.multi_get(:Event, event_key, consistent_read_opts)
when String
connection.get(:Event, event_key, consistent_read_opts)
end
end
def self.create_timeline_event(timeline, uuid, event_key)
log("create_timeline_event(#{timeline}, #{uuid}, #{event_key})")
connection.insert(:Timeline, timeline, {uuid => event_key}, write_opts)
end
def self.timeline_for(timeline, options={})
log("timeline_for(#{timeline}, #{options.inspect})")
count = options[:per_page] || 20
start = options[:page] || nil # Cassandra seems OK with a nil offset
case timeline
when String
connection.get(
:Timeline,
timeline,
:start => start,
:count => count,
# AKK: it would be nice to figure out how not to need to reverse
# this so that clients don't have to reverse it again to get
# reverse-chronological listings
:reversed => true
)
when Array
return {} if timeline.empty?
connection.multi_get(:Timeline, timeline)
end
end
def self.timeline_events_for(timeline, options={})
log("timeline_events_for(#{timeline})")
case timeline
when String
timeline_for(timeline, options)
when Array
timeline_for(timeline).inject({}) do |hsh, (timeline_key, column)|
hsh.update(timeline_key => column.values)
end
end
end
def self.remove_timeline_event(timeline, uuid)
log("remove_timeline_event(#{timeline}, #{uuid})")
connection.remove(:Timeline, timeline, uuid)
end
def self.timeline_count(timeline)
# Used to use connection.count_columns here, but it doesn't seem
# to respect the :count option. There is a fix for this in rjackson's fork,
# need to see if its merged into fauna and included in a release. ~AKK
# But in the meantime, nothing in Gowalla is using the page count so we're
# going to hardcode this obviously incorrect value for the time being.
-1
end
# Lookup events on the specified timeline(s) and return all the events
# referenced by those timelines.
#
# timeline_keys - one or more String timeline_keys to fetch events from
#
# Returns a flat array of events
def self.fetch_timelines(timeline_keys, per_page=20, page='')
event_keys = timeline_events_for(
timeline_keys,
:per_page => per_page,
:page => page
).values.flatten
event_for(event_keys.uniq).
map do |k, e|
Chronologic::Event.load_from_columns(e).tap do |event|
event.key = k
end
end
end
# Fetch objects referenced by events and correctly populate the event objects
#
# events - an array of Chronologic::Event objects to populate
#
# Returns a flat array of Chronologic::Event objects with their object
# references populated.
def self.fetch_objects(events)
object_keys = events.map { |e| e.objects.values }.flatten.uniq
objects = object_for(object_keys)
events.map do |e|
e.tap do
e.objects.each do |type, keys|
if keys.is_a?(Array)
e.objects[type] = keys.map { |k| objects[k] }
else
e.objects[type] = objects[keys]
end
end
end
end
end
# Convert a flat array of Chronologic::Events into a properly hierarchical
# timeline.
#
# events - an array of Chronologic::Event objects, each possibly referencing
# other events
#
# Returns a flat array of Chronologic::Event objects with their subevent
# references correctly populated.
def self.reify_timeline(events)
event_index = events.inject({}) { |idx, e| idx.update(e.key => e) }
timeline_index = events.inject([]) do |timeline, e|
if e.subevent? && event_index.has_key?(e.parent)
# AKK: something is weird about Hashie::Dash or Event in that if you
# push objects onto subevents, they are added to an object that is
# referenced by all instances of event. So, these dup'ing hijinks are
subevents = event_index[e.parent].subevents.dup
subevents << e
event_index[e.parent].subevents = subevents
else
timeline << e.key
end
timeline
end
timeline_index.map { |key| event_index[key] }
end
def self.batch
connection.batch { yield }
end
def self.connection
Chronologic.connection
end
def self.log(msg)
return unless logger
logger.debug(msg)
end
end
|
# Copyright (c) 2006-2010 Nick Sieger <nicksieger@gmail.com>
# See the file LICENSE.txt included with the distribution for
# software license details.
require 'fileutils'
module CI #:nodoc:
module Reporter #:nodoc:
class ReportManager
def initialize(prefix)
@basedir = ENV['CI_REPORTS'] || File.expand_path("#{Dir.getwd}/#{prefix.downcase}/reports")
@basename = "#{@basedir}/#{prefix.upcase}"
FileUtils.mkdir_p(@basedir)
end
def write_report(suite)
File.open("#{@basename}-#{suite.name.gsub(/[^a-zA-Z0-9]+/, '-')}.xml", "w") do |f|
f << suite.to_xml
end
end
end
end
end
prevent from xml result files from being overwritten (appears when you have multiple suites with the same name)
# Copyright (c) 2006-2010 Nick Sieger <nicksieger@gmail.com>
# See the file LICENSE.txt included with the distribution for
# software license details.
require 'fileutils'
module CI #:nodoc:
module Reporter #:nodoc:
class ReportManager
def initialize(prefix)
@basedir = ENV['CI_REPORTS'] || File.expand_path("#{Dir.getwd}/#{prefix.downcase}/reports")
@basename = "#{@basedir}/#{prefix.upcase}"
FileUtils.mkdir_p(@basedir)
end
def write_report(suite)
File.open(filename_for(suite), "w") do |f|
f << suite.to_xml
end
end
private
# creates a uniqe filename per suite
# to prevent results from being overwritten
# if a result file is already written, it appends an index
# e.g.
# SPEC-MailsController.xml
# SPEC-MailsController.0.xml
# SPEC-MailsController.1.xml
# SPEC-MailsController...xml
# SPEC-MailsController.N.xml
#
# with N < 100000, to prevent endless sidestep loops
MAX_SIDESTEPS = 100000
#
def filename_for(suite)
basename = "#{@basename}-#{suite.name.gsub(/[^a-zA-Z0-9]+/, '-')}"
suffix = "xml"
# the initial filename, e.g. SPEC-MailsController.xml
filename = [basename, suffix].join(".")
# if the initial filename is already in use
# do sidesteps, beginning with SPEC-MailsController.0.xml
i = 0
while File.exists?(filename) && i < MAX_SIDESTEPS
filename = [basename, i, suffix].join(".")
i += 1
end
filename
end
end
end
end
|
# November 2015, Chris Van Heuveln
#
# Copyright (c) 2015 Cisco and/or its affiliates.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
require_relative 'cisco_cmn_utils'
require_relative 'node_util'
require_relative 'pim'
require_relative 'vrf'
require_relative 'vni'
# Add some interface-specific constants to the Cisco namespace
module Cisco
IF_SWITCHPORT_MODE = {
disabled: '',
access: 'access',
trunk: 'trunk',
fex_fabric: 'fex-fabric',
tunnel: 'dot1q-tunnel',
fabricpath: 'fabricpath',
}
# Interface - node utility class for general interface config management
class Interface < NodeUtil
attr_reader :name
def initialize(name, instantiate=true)
fail TypeError unless name.is_a?(String)
fail ArgumentError unless name.length > 0
@name = name.downcase
create if instantiate
end
def self.interfaces
hash = {}
intf_list = config_get('interface', 'all_interfaces')
return hash if intf_list.nil?
intf_list.each do |id|
id = id.downcase
hash[id] = Interface.new(id, false)
end
hash
end
def create
feature_vlan_set(true) if @name[/vlan/i]
config_set('interface', 'create', @name)
end
def destroy
config_set('interface', 'destroy', @name)
end
########################################################
# PROPERTIES #
########################################################
def access_vlan
config_get('interface', 'access_vlan', @name)
end
def access_vlan=(vlan)
config_set('interface', 'access_vlan', @name, vlan)
rescue Cisco::CliError => e
raise "[#{@name}] '#{e.command}' : #{e.clierror}"
end
def ipv4_acl_in
config_get('interface', 'ipv4_acl_in', @name)
end
def ipv4_acl_in=(val)
if val != ''
state = ''
else
state = 'no'
val = ipv4_acl_in
end
return unless val && val != ''
config_set('interface', 'ipv4_acl_in', @name, state, val)
end
def default_ipv4_acl_in
config_get_default('interface', 'ipv4_acl_in')
end
def ipv4_acl_out
config_get('interface', 'ipv4_acl_out', @name)
end
def ipv4_acl_out=(val)
if val != ''
state = ''
else
state = 'no'
val = ipv4_acl_out
end
return unless val && val != ''
config_set('interface', 'ipv4_acl_out', @name, state, val)
end
def default_ipv4_acl_out
config_get_default('interface', 'ipv4_acl_out')
end
def ipv6_acl_in
config_get('interface', 'ipv6_acl_in', @name)
end
def ipv6_acl_in=(val)
if val != ''
state = ''
else
state = 'no'
val = ipv6_acl_in
end
return unless val && val != ''
config_set('interface', 'ipv6_acl_in', @name, state, val)
end
def default_ipv6_acl_in
config_get_default('interface', 'ipv6_acl_in')
end
def ipv6_acl_out
config_get('interface', 'ipv6_acl_out', @name)
end
def ipv6_acl_out=(val)
if val != ''
state = ''
else
state = 'no'
val = ipv6_acl_out
end
return unless val && val != ''
config_set('interface', 'ipv6_acl_out', @name, state, val)
end
def default_ipv6_acl_out
config_get_default('interface', 'ipv6_acl_out')
end
def default_access_vlan
config_get_default('interface', 'access_vlan')
end
def channel_group
config_get('interface', 'channel_group', @name)
end
def channel_group=(val)
fail "channel_group is not supported on #{@name}" unless
@name[/Ethernet/i]
# 'force' is needed by cli_nxos to handle the case where a port-channel
# interface is created prior to the channel-group cli; in which case
# the properties of the port-channel interface will be different from
# the ethernet interface. 'force' is not needed if the port-channel is
# created as a result of the channel-group cli but since it does no
# harm we will use it every time.
if val
state = ''
force = 'force'
else
state = 'no'
val = force = ''
end
config_set('interface',
'channel_group', @name, state, val, force)
rescue Cisco::CliError => e
raise "[#{@name}] '#{e.command}' : #{e.clierror}"
end
def default_channel_group
config_get_default('interface', 'channel_group')
end
def description
config_get('interface', 'description', @name)
end
def description=(desc)
fail TypeError unless desc.is_a?(String)
if desc.strip.empty?
config_set('interface', 'description', @name, 'no', '')
else
config_set('interface', 'description', @name, '', desc)
end
rescue Cisco::CliError => e
raise "[#{@name}] '#{e.command}' : #{e.clierror}"
end
def default_description
config_get_default('interface', 'description')
end
def encapsulation_dot1q
config_get('interface', 'encapsulation_dot1q', @name)
end
def encapsulation_dot1q=(val)
if val.to_s.empty?
config_set('interface', 'encapsulation_dot1q', @name, 'no', '')
else
config_set('interface', 'encapsulation_dot1q', @name, '', val)
end
rescue Cisco::CliError => e
raise "[#{@name}] '#{e.command}' : #{e.clierror}"
end
def default_encapsulation_dot1q
config_get_default('interface', 'encapsulation_dot1q')
end
def fabricpath_feature
FabricpathGlobal.fabricpath_feature
end
def fabricpath_feature_set(fabricpath_set)
FabricpathGlobal.fabricpath_feature_set(fabricpath_set)
end
def fex_feature
fex = config_get('fex', 'feature')
fail 'fex_feature not found' if fex.nil?
fex.to_sym
end
def fex_feature_set(fex_set)
curr = fex_feature
return if curr == fex_set
case fex_set
when :enabled
config_set('fex', 'feature_install', '') if curr == :uninstalled
config_set('fex', 'feature', '')
when :disabled
config_set('fex', 'feature', 'no') if curr == :enabled
return
when :installed
config_set('fex', 'feature_install', '') if curr == :uninstalled
when :uninstalled
config_set('fex', 'feature', 'no') if curr == :enabled
config_set('fex', 'feature_install', 'no')
end
rescue Cisco::CliError => e
raise "[#{@name}] '#{e.command}' : #{e.clierror}"
end
def ipv4_addr_mask
config_get('interface', 'ipv4_addr_mask', @name)
end
def ipv4_addr_mask_set(addr, mask)
check_switchport_disabled
if addr.nil? || addr == default_ipv4_address
config_set('interface', 'ipv4_addr_mask', @name, 'no', '')
else
config_set('interface', 'ipv4_addr_mask', @name, '',
"#{addr}/#{mask}")
end
rescue Cisco::CliError => e
raise "[#{@name}] '#{e.command}' : #{e.clierror}"
end
def ipv4_address
val = ipv4_addr_mask
return default_ipv4_address if val.nil?
# val is [[addr, mask], [addr, mask secondary]] - we just want the addr
val.shift.first
end
def default_ipv4_address
config_get_default('interface', 'ipv4_address')
end
def ipv4_netmask_length
val = ipv4_addr_mask
return default_ipv4_netmask_length if val.nil?
# val is [[addr, mask], [addr, mask secondary]] - we just want the mask
val.shift.last.to_i
end
def default_ipv4_netmask_length
config_get_default('interface', 'ipv4_netmask_length')
end
def ipv4_pim_sparse_mode
config_get('interface', 'ipv4_pim_sparse_mode', @name)
end
def ipv4_pim_sparse_mode=(state)
check_switchport_disabled
Pim.feature_enable unless Pim.feature_enabled
config_set('interface', 'ipv4_pim_sparse_mode', @name,
state ? '' : 'no')
rescue Cisco::CliError => e
raise "[#{@name}] '#{e.command}' : #{e.clierror}"
end
def default_ipv4_pim_sparse_mode
config_get_default('interface', 'ipv4_pim_sparse_mode')
end
def ipv4_proxy_arp
config_get('interface', 'ipv4_proxy_arp', @name)
end
def ipv4_proxy_arp=(proxy_arp)
check_switchport_disabled
no_cmd = (proxy_arp ? '' : 'no')
config_set('interface', 'ipv4_proxy_arp', @name, no_cmd)
end
def default_ipv4_proxy_arp
config_get_default('interface', 'ipv4_proxy_arp')
end
def ipv4_redirects_lookup_string
case @name
when /loopback/i
return 'ipv4_redirects_loopback'
else
return 'ipv4_redirects_other_interfaces'
end
end
def ipv4_redirects
config_get('interface', ipv4_redirects_lookup_string, @name)
end
def ipv4_redirects=(redirects)
check_switchport_disabled
no_cmd = (redirects ? '' : 'no')
config_set('interface', ipv4_redirects_lookup_string, @name, no_cmd)
end
def default_ipv4_redirects
config_get_default('interface', ipv4_redirects_lookup_string)
end
def feature_lacp?
config_get('interface', 'feature_lacp')
end
def feature_lacp_set(val)
return if feature_lacp? == val
config_set('interface', 'feature_lacp', val ? '' : 'no')
end
def mtu
config_get('interface', 'mtu', @name)
end
def mtu=(val)
check_switchport_disabled
config_set('interface', 'mtu', @name, '', val)
rescue Cisco::CliError => e
raise "[#{@name}] '#{e.command}' : #{e.clierror}"
end
def default_mtu
config_get_default('interface', 'mtu')
end
def speed
config_get('interface', 'speed', @name)
end
def speed=(val)
if node.product_id =~ /C31\d\d/
fail 'Changing interface speed is not permitted on this platform'
end
config_set('interface', 'speed', @name, val)
rescue Cisco::CliError => e
raise "[#{@name}] '#{e.command}' : #{e.clierror}"
end
def default_speed
config_get_default('interface', 'speed')
end
def duplex
config_get('interface', 'duplex', @name)
end
def duplex=(val)
if node.product_id =~ /C31\d\d/
fail 'Changing interface duplex is not permitted on this platform'
end
config_set('interface', 'duplex', @name, val)
rescue Cisco::CliError => e
raise "[#{@name}] '#{e.command}' : #{e.clierror}"
end
def default_duplex
config_get_default('interface', 'duplex')
end
def negotiate_auto_lookup_string
case @name
when /Ethernet/i
return 'negotiate_auto_ethernet'
when /port-channel/i # Ether-channel
return 'negotiate_auto_portchannel'
else
return 'negotiate_auto_other_interfaces'
end
end
def negotiate_auto
config_get('interface', negotiate_auto_lookup_string, @name)
end
def negotiate_auto=(negotiate_auto)
lookup = negotiate_auto_lookup_string
no_cmd = (negotiate_auto ? '' : 'no')
begin
config_set('interface', lookup, @name, no_cmd)
rescue Cisco::CliError => e
raise "[#{@name}] '#{e.command}' : #{e.clierror}"
end
end
def default_negotiate_auto
config_get_default('interface', negotiate_auto_lookup_string)
end
def shutdown
config_get('interface', 'shutdown', @name)
end
def shutdown=(state)
no_cmd = (state ? '' : 'no')
config_set('interface', 'shutdown', @name, no_cmd)
rescue Cisco::CliError => e
raise "[#{@name}] '#{e.command}' : #{e.clierror}"
end
def default_shutdown
case @name
when /Ethernet/i
def_sw = system_default_switchport
def_shut = system_default_switchport_shutdown
if def_sw && def_shut
lookup = 'shutdown_ethernet_switchport_shutdown'
elsif def_sw && !def_shut
lookup = 'shutdown_ethernet_switchport_noshutdown'
elsif !def_sw && def_shut
lookup = 'shutdown_ethernet_noswitchport_shutdown'
elsif !def_sw && !def_shut
lookup = 'shutdown_ethernet_noswitchport_noshutdown'
else
fail "Error: def_sw #{def_sw}, def_shut #{def_shut}"
end
when /loopback/i
lookup = 'shutdown_loopback'
when /port-channel/i # EtherChannel
lookup = 'shutdown_ether_channel'
when /Vlan/i
lookup = 'shutdown_vlan'
else
lookup = 'shutdown_unknown'
end
config_get_default('interface', lookup)
end
def switchport
# This is "switchport", not "switchport mode"
config_get('interface', 'switchport', @name)
end
def switchport_enable(val=true)
config_set('interface', 'switchport', @name, val ? '' : 'no')
end
# switchport_autostate_exclude is exclusive to switchport interfaces
def switchport_autostate_exclude
config_get('interface',
'switchport_autostate_exclude', @name)
end
def switchport_autostate_exclude=(val)
# cannot configure autostate unless feature vlan is enabled
fail('switchport mode must be configured before ' \
'switchport autostate') unless switchport
feature_vlan_set(true)
config_set('interface', 'switchport_autostate_exclude',
@name, val ? '' : 'no')
end
def default_switchport_autostate_exclude
config_get_default('interface', 'switchport_autostate_exclude')
end
def switchport_mode_lookup_string
case @name
when /Ethernet/i
return 'switchport_mode_ethernet'
when /port-channel/i
return 'switchport_mode_port_channel'
else
return 'switchport_mode_other_interfaces'
end
end
def switchport_mode
mode = config_get('interface', switchport_mode_lookup_string, @name)
return mode.nil? ? :disabled : IF_SWITCHPORT_MODE.key(mode)
rescue IndexError
# Assume this is an interface that doesn't support switchport.
# Do not raise exception since the providers will prefetch this property
# regardless of interface type.
return :disabled
end
def switchport_enable_and_mode(mode_set)
switchport_enable unless switchport
if :fabricpath == mode_set
fabricpath_feature_set(:enabled) unless :enabled == fabricpath_feature
elsif :fex_fabric == mode_set
fex_feature_set(:enabled) unless :enabled == fex_feature
end
config_set('interface', switchport_mode_lookup_string, @name, '',
IF_SWITCHPORT_MODE[mode_set])
rescue RuntimeError
raise "[#{@name}] switchport_mode is not supported on this interface"
end
def switchport_mode=(mode_set)
# no system default switchport
# int e1/1
# switchport
# switchport mode [access|trunk|fex|...]
fail ArgumentError unless IF_SWITCHPORT_MODE.keys.include? mode_set
case mode_set
when :disabled
if switchport
# Note: turn off switchport command, not switchport mode
config_set('interface', 'switchport', @name, 'no')
end
when :default
if :disabled == default_switchport_mode
config_set('interface', switchport_mode_lookup_string,
@name, 'no', '')
else
switchport_enable_and_mode(mode_set)
end
else
switchport_enable_and_mode(mode_set)
end # case
rescue Cisco::CliError => e
raise "[#{@name}] '#{e.command}' : #{e.clierror}"
end
def default_switchport_mode
return :disabled unless system_default_switchport
IF_SWITCHPORT_MODE.key(
config_get_default('interface', switchport_mode_lookup_string))
end
def switchport_trunk_allowed_vlan
config_get('interface', 'switchport_trunk_allowed_vlan', @name)
end
def switchport_trunk_allowed_vlan=(val)
if val.nil?
config_set(
'interface', 'switchport_trunk_allowed_vlan', @name, 'no', '')
else
config_set(
'interface', 'switchport_trunk_allowed_vlan', @name, '', val)
end
rescue Cisco::CliError => e
raise "[#{@name}] '#{e.command}' : #{e.clierror}"
end
def default_switchport_trunk_allowed_vlan
config_get_default('interface', 'switchport_trunk_allowed_vlan')
end
def switchport_trunk_native_vlan
config_get('interface', 'switchport_trunk_native_vlan', @name)
end
def switchport_trunk_native_vlan=(val)
if val.nil?
config_set(
'interface', 'switchport_trunk_native_vlan', @name, 'no', '')
else
config_set(
'interface', 'switchport_trunk_native_vlan', @name, '', val)
end
rescue Cisco::CliError => e
raise "[#{@name}] '#{e.command}' : #{e.clierror}"
end
# vlan_mapping & vlan_mapping_enable
# Hardware & Cli Dependencies:
# - F3 linecards only
# - vdc
# - limit-resource
# - bridge-domain
# - feature vni
# - switchport mode
# Getter: Builds an array of vlan_mapping commands currently
# on the device.
# cli: switchport vlan mapping 2 200
# switchport vlan mapping 4 400
# array: [['2', '200'], ['4', '400']]
#
def default_vlan_mapping
config_get_default('interface', 'vlan_mapping')
end
def vlan_mapping
config_get('interface', 'vlan_mapping', @name).each(&:compact!)
end
def vlan_mapping=(should_list)
Vni.feature_vni_enable unless Vni.feature_vni_enabled
# Process a hash of vlan_mapping cmds from delta_add_remove().
# The vlan_mapping cli does not allow commands to be updated, they must
# first be removed if there is a change.
delta_hash = Utils.delta_add_remove(should_list, vlan_mapping,
:updates_not_allowed)
return if delta_hash.values.flatten.empty?
# Process :remove first to ensure "update" commands will not fail.
[:remove, :add].each do |action|
CiscoLogger.debug("vlan_mapping delta #{@get_args}\n"\
"#{action}: #{delta_hash[action]}")
delta_hash[action].each do |original, translated|
state = (action == :add) ? '' : 'no'
config_set('interface', 'vlan_mapping', @name,
state, original, translated)
end
end
rescue Cisco::CliError => e
raise "[#{@name}] '#{e.command}' : #{e.clierror}"
end
# cli: switchport vlan mapping enable
def default_vlan_mapping_enable
config_get_default('interface', 'vlan_mapping_enable')
end
def vlan_mapping_enable
config_get('interface', 'vlan_mapping_enable', @name)
end
def vlan_mapping_enable=(state)
config_set('interface', 'vlan_mapping_enable', @name,
state ? '' : 'no')
end
def default_switchport_trunk_native_vlan
config_get_default('interface', 'switchport_trunk_native_vlan')
end
def system_default_switchport
# This command is a user-configurable system default.
config_get('interface', 'system_default_switchport')
end
def system_default_switchport_shutdown
# This command is a user-configurable system default.
config_get('interface', 'system_default_switchport_shutdown')
end
def system_default_svi_autostate
# This command is a user-configurable system default.
config_get('interface', 'system_default_svi_autostate')
end
def switchport_vtp_mode_capable?
!switchport_mode.to_s.match(/(access|trunk)/).nil?
end
def switchport_vtp
return false unless switchport_vtp_mode_capable?
config_get('interface', 'vtp', @name)
end
def switchport_vtp=(vtp_set)
return false unless switchport_vtp_mode_capable?
no_cmd = (vtp_set) ? '' : 'no'
config_set('interface', 'vtp', @name, no_cmd)
rescue Cisco::CliError => e
raise "[#{@name}] '#{e.command}' : #{e.clierror}"
end
def svi_cmd_allowed?(cmd)
fail "[#{@name}] Invalid interface type for command [#{cmd}]" unless
@name[/vlan/i]
end
# svi_autostate is exclusive to svi interfaces
def svi_autostate
return nil unless @name[/^vlan/i]
config_get('interface', 'svi_autostate', @name)
end
def svi_autostate=(val)
check_switchport_disabled
svi_cmd_allowed?('autostate')
config_set('interface', 'svi_autostate', @name, val ? '' : 'no')
end
def default_svi_autostate
system_default_svi_autostate
end
def feature_vlan?
config_get('interface', 'feature_vlan')
end
def feature_vlan_set(val)
return if feature_vlan? == val
config_set('interface', 'feature_vlan', val ? '' : 'no')
end
# svi_management is exclusive to svi interfaces
def svi_management
return nil unless @name[/^vlan/i]
config_get('interface', 'svi_management', @name)
end
def svi_management=(val)
check_switchport_disabled
svi_cmd_allowed?('management')
config_set('interface', 'svi_management', @name, val ? '' : 'no')
end
def default_svi_management
config_get_default('interface', 'svi_management')
end
def default_switchport_vtp
config_get_default('interface', 'vtp')
end
def switchport_vtp_feature?
config_get('vtp', 'feature')
end
def check_switchport_disabled
fail "#{caller[0][/`.*'/][1..-2]} cannot be set unless switchport mode" \
' is disabled' unless switchport_mode == :disabled
end
def vrf
config_get('interface', 'vrf', @name)
end
def vrf=(vrf)
fail TypeError unless vrf.is_a?(String)
if vrf.empty?
config_set('interface', 'vrf', @name, 'no', '')
else
config_set('interface', 'vrf', @name, '', vrf)
end
rescue Cisco::CliError => e
raise "[#{@name}] '#{e.command}' : #{e.clierror}"
end
def default_vrf
config_get_default('interface', 'vrf')
end
end # Class
end # Module
Bugfix for vlan_mapping getter
Symptom:
N3k gets error: undefined method `each' for nil:NilClass
Problem:
config_get should return the YAML default_value if it doesn't find a match
but the platform exclude causes it to return nil instead.
config_get('interface', 'vlan_mapping', @name).each(&:compact!)
vlan_mapping:
_exclude: [/N(3|5|6|9)/]
default_value: []
Minitest didn't catch it because the test was skipped due to a platform check.
# November 2015, Chris Van Heuveln
#
# Copyright (c) 2015 Cisco and/or its affiliates.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
require_relative 'cisco_cmn_utils'
require_relative 'node_util'
require_relative 'pim'
require_relative 'vrf'
require_relative 'vni'
# Add some interface-specific constants to the Cisco namespace
module Cisco
IF_SWITCHPORT_MODE = {
disabled: '',
access: 'access',
trunk: 'trunk',
fex_fabric: 'fex-fabric',
tunnel: 'dot1q-tunnel',
fabricpath: 'fabricpath',
}
# Interface - node utility class for general interface config management
class Interface < NodeUtil
attr_reader :name
def initialize(name, instantiate=true)
fail TypeError unless name.is_a?(String)
fail ArgumentError unless name.length > 0
@name = name.downcase
create if instantiate
end
def self.interfaces
hash = {}
intf_list = config_get('interface', 'all_interfaces')
return hash if intf_list.nil?
intf_list.each do |id|
id = id.downcase
hash[id] = Interface.new(id, false)
end
hash
end
def create
feature_vlan_set(true) if @name[/vlan/i]
config_set('interface', 'create', @name)
end
def destroy
config_set('interface', 'destroy', @name)
end
########################################################
# PROPERTIES #
########################################################
def access_vlan
config_get('interface', 'access_vlan', @name)
end
def access_vlan=(vlan)
config_set('interface', 'access_vlan', @name, vlan)
rescue Cisco::CliError => e
raise "[#{@name}] '#{e.command}' : #{e.clierror}"
end
def ipv4_acl_in
config_get('interface', 'ipv4_acl_in', @name)
end
def ipv4_acl_in=(val)
if val != ''
state = ''
else
state = 'no'
val = ipv4_acl_in
end
return unless val && val != ''
config_set('interface', 'ipv4_acl_in', @name, state, val)
end
def default_ipv4_acl_in
config_get_default('interface', 'ipv4_acl_in')
end
def ipv4_acl_out
config_get('interface', 'ipv4_acl_out', @name)
end
def ipv4_acl_out=(val)
if val != ''
state = ''
else
state = 'no'
val = ipv4_acl_out
end
return unless val && val != ''
config_set('interface', 'ipv4_acl_out', @name, state, val)
end
def default_ipv4_acl_out
config_get_default('interface', 'ipv4_acl_out')
end
def ipv6_acl_in
config_get('interface', 'ipv6_acl_in', @name)
end
def ipv6_acl_in=(val)
if val != ''
state = ''
else
state = 'no'
val = ipv6_acl_in
end
return unless val && val != ''
config_set('interface', 'ipv6_acl_in', @name, state, val)
end
def default_ipv6_acl_in
config_get_default('interface', 'ipv6_acl_in')
end
def ipv6_acl_out
config_get('interface', 'ipv6_acl_out', @name)
end
def ipv6_acl_out=(val)
if val != ''
state = ''
else
state = 'no'
val = ipv6_acl_out
end
return unless val && val != ''
config_set('interface', 'ipv6_acl_out', @name, state, val)
end
def default_ipv6_acl_out
config_get_default('interface', 'ipv6_acl_out')
end
def default_access_vlan
config_get_default('interface', 'access_vlan')
end
def channel_group
config_get('interface', 'channel_group', @name)
end
def channel_group=(val)
fail "channel_group is not supported on #{@name}" unless
@name[/Ethernet/i]
# 'force' is needed by cli_nxos to handle the case where a port-channel
# interface is created prior to the channel-group cli; in which case
# the properties of the port-channel interface will be different from
# the ethernet interface. 'force' is not needed if the port-channel is
# created as a result of the channel-group cli but since it does no
# harm we will use it every time.
if val
state = ''
force = 'force'
else
state = 'no'
val = force = ''
end
config_set('interface',
'channel_group', @name, state, val, force)
rescue Cisco::CliError => e
raise "[#{@name}] '#{e.command}' : #{e.clierror}"
end
def default_channel_group
config_get_default('interface', 'channel_group')
end
def description
config_get('interface', 'description', @name)
end
def description=(desc)
fail TypeError unless desc.is_a?(String)
if desc.strip.empty?
config_set('interface', 'description', @name, 'no', '')
else
config_set('interface', 'description', @name, '', desc)
end
rescue Cisco::CliError => e
raise "[#{@name}] '#{e.command}' : #{e.clierror}"
end
def default_description
config_get_default('interface', 'description')
end
def encapsulation_dot1q
config_get('interface', 'encapsulation_dot1q', @name)
end
def encapsulation_dot1q=(val)
if val.to_s.empty?
config_set('interface', 'encapsulation_dot1q', @name, 'no', '')
else
config_set('interface', 'encapsulation_dot1q', @name, '', val)
end
rescue Cisco::CliError => e
raise "[#{@name}] '#{e.command}' : #{e.clierror}"
end
def default_encapsulation_dot1q
config_get_default('interface', 'encapsulation_dot1q')
end
def fabricpath_feature
FabricpathGlobal.fabricpath_feature
end
def fabricpath_feature_set(fabricpath_set)
FabricpathGlobal.fabricpath_feature_set(fabricpath_set)
end
def fex_feature
fex = config_get('fex', 'feature')
fail 'fex_feature not found' if fex.nil?
fex.to_sym
end
def fex_feature_set(fex_set)
curr = fex_feature
return if curr == fex_set
case fex_set
when :enabled
config_set('fex', 'feature_install', '') if curr == :uninstalled
config_set('fex', 'feature', '')
when :disabled
config_set('fex', 'feature', 'no') if curr == :enabled
return
when :installed
config_set('fex', 'feature_install', '') if curr == :uninstalled
when :uninstalled
config_set('fex', 'feature', 'no') if curr == :enabled
config_set('fex', 'feature_install', 'no')
end
rescue Cisco::CliError => e
raise "[#{@name}] '#{e.command}' : #{e.clierror}"
end
def ipv4_addr_mask
config_get('interface', 'ipv4_addr_mask', @name)
end
def ipv4_addr_mask_set(addr, mask)
check_switchport_disabled
if addr.nil? || addr == default_ipv4_address
config_set('interface', 'ipv4_addr_mask', @name, 'no', '')
else
config_set('interface', 'ipv4_addr_mask', @name, '',
"#{addr}/#{mask}")
end
rescue Cisco::CliError => e
raise "[#{@name}] '#{e.command}' : #{e.clierror}"
end
def ipv4_address
val = ipv4_addr_mask
return default_ipv4_address if val.nil?
# val is [[addr, mask], [addr, mask secondary]] - we just want the addr
val.shift.first
end
def default_ipv4_address
config_get_default('interface', 'ipv4_address')
end
def ipv4_netmask_length
val = ipv4_addr_mask
return default_ipv4_netmask_length if val.nil?
# val is [[addr, mask], [addr, mask secondary]] - we just want the mask
val.shift.last.to_i
end
def default_ipv4_netmask_length
config_get_default('interface', 'ipv4_netmask_length')
end
def ipv4_pim_sparse_mode
config_get('interface', 'ipv4_pim_sparse_mode', @name)
end
def ipv4_pim_sparse_mode=(state)
check_switchport_disabled
Pim.feature_enable unless Pim.feature_enabled
config_set('interface', 'ipv4_pim_sparse_mode', @name,
state ? '' : 'no')
rescue Cisco::CliError => e
raise "[#{@name}] '#{e.command}' : #{e.clierror}"
end
def default_ipv4_pim_sparse_mode
config_get_default('interface', 'ipv4_pim_sparse_mode')
end
def ipv4_proxy_arp
config_get('interface', 'ipv4_proxy_arp', @name)
end
def ipv4_proxy_arp=(proxy_arp)
check_switchport_disabled
no_cmd = (proxy_arp ? '' : 'no')
config_set('interface', 'ipv4_proxy_arp', @name, no_cmd)
end
def default_ipv4_proxy_arp
config_get_default('interface', 'ipv4_proxy_arp')
end
def ipv4_redirects_lookup_string
case @name
when /loopback/i
return 'ipv4_redirects_loopback'
else
return 'ipv4_redirects_other_interfaces'
end
end
def ipv4_redirects
config_get('interface', ipv4_redirects_lookup_string, @name)
end
def ipv4_redirects=(redirects)
check_switchport_disabled
no_cmd = (redirects ? '' : 'no')
config_set('interface', ipv4_redirects_lookup_string, @name, no_cmd)
end
def default_ipv4_redirects
config_get_default('interface', ipv4_redirects_lookup_string)
end
def feature_lacp?
config_get('interface', 'feature_lacp')
end
def feature_lacp_set(val)
return if feature_lacp? == val
config_set('interface', 'feature_lacp', val ? '' : 'no')
end
def mtu
config_get('interface', 'mtu', @name)
end
def mtu=(val)
check_switchport_disabled
config_set('interface', 'mtu', @name, '', val)
rescue Cisco::CliError => e
raise "[#{@name}] '#{e.command}' : #{e.clierror}"
end
def default_mtu
config_get_default('interface', 'mtu')
end
def speed
config_get('interface', 'speed', @name)
end
def speed=(val)
if node.product_id =~ /C31\d\d/
fail 'Changing interface speed is not permitted on this platform'
end
config_set('interface', 'speed', @name, val)
rescue Cisco::CliError => e
raise "[#{@name}] '#{e.command}' : #{e.clierror}"
end
def default_speed
config_get_default('interface', 'speed')
end
def duplex
config_get('interface', 'duplex', @name)
end
def duplex=(val)
if node.product_id =~ /C31\d\d/
fail 'Changing interface duplex is not permitted on this platform'
end
config_set('interface', 'duplex', @name, val)
rescue Cisco::CliError => e
raise "[#{@name}] '#{e.command}' : #{e.clierror}"
end
def default_duplex
config_get_default('interface', 'duplex')
end
def negotiate_auto_lookup_string
case @name
when /Ethernet/i
return 'negotiate_auto_ethernet'
when /port-channel/i # Ether-channel
return 'negotiate_auto_portchannel'
else
return 'negotiate_auto_other_interfaces'
end
end
def negotiate_auto
config_get('interface', negotiate_auto_lookup_string, @name)
end
def negotiate_auto=(negotiate_auto)
lookup = negotiate_auto_lookup_string
no_cmd = (negotiate_auto ? '' : 'no')
begin
config_set('interface', lookup, @name, no_cmd)
rescue Cisco::CliError => e
raise "[#{@name}] '#{e.command}' : #{e.clierror}"
end
end
def default_negotiate_auto
config_get_default('interface', negotiate_auto_lookup_string)
end
def shutdown
config_get('interface', 'shutdown', @name)
end
def shutdown=(state)
no_cmd = (state ? '' : 'no')
config_set('interface', 'shutdown', @name, no_cmd)
rescue Cisco::CliError => e
raise "[#{@name}] '#{e.command}' : #{e.clierror}"
end
def default_shutdown
case @name
when /Ethernet/i
def_sw = system_default_switchport
def_shut = system_default_switchport_shutdown
if def_sw && def_shut
lookup = 'shutdown_ethernet_switchport_shutdown'
elsif def_sw && !def_shut
lookup = 'shutdown_ethernet_switchport_noshutdown'
elsif !def_sw && def_shut
lookup = 'shutdown_ethernet_noswitchport_shutdown'
elsif !def_sw && !def_shut
lookup = 'shutdown_ethernet_noswitchport_noshutdown'
else
fail "Error: def_sw #{def_sw}, def_shut #{def_shut}"
end
when /loopback/i
lookup = 'shutdown_loopback'
when /port-channel/i # EtherChannel
lookup = 'shutdown_ether_channel'
when /Vlan/i
lookup = 'shutdown_vlan'
else
lookup = 'shutdown_unknown'
end
config_get_default('interface', lookup)
end
def switchport
# This is "switchport", not "switchport mode"
config_get('interface', 'switchport', @name)
end
def switchport_enable(val=true)
config_set('interface', 'switchport', @name, val ? '' : 'no')
end
# switchport_autostate_exclude is exclusive to switchport interfaces
def switchport_autostate_exclude
config_get('interface',
'switchport_autostate_exclude', @name)
end
def switchport_autostate_exclude=(val)
# cannot configure autostate unless feature vlan is enabled
fail('switchport mode must be configured before ' \
'switchport autostate') unless switchport
feature_vlan_set(true)
config_set('interface', 'switchport_autostate_exclude',
@name, val ? '' : 'no')
end
def default_switchport_autostate_exclude
config_get_default('interface', 'switchport_autostate_exclude')
end
def switchport_mode_lookup_string
case @name
when /Ethernet/i
return 'switchport_mode_ethernet'
when /port-channel/i
return 'switchport_mode_port_channel'
else
return 'switchport_mode_other_interfaces'
end
end
def switchport_mode
mode = config_get('interface', switchport_mode_lookup_string, @name)
return mode.nil? ? :disabled : IF_SWITCHPORT_MODE.key(mode)
rescue IndexError
# Assume this is an interface that doesn't support switchport.
# Do not raise exception since the providers will prefetch this property
# regardless of interface type.
return :disabled
end
def switchport_enable_and_mode(mode_set)
switchport_enable unless switchport
if :fabricpath == mode_set
fabricpath_feature_set(:enabled) unless :enabled == fabricpath_feature
elsif :fex_fabric == mode_set
fex_feature_set(:enabled) unless :enabled == fex_feature
end
config_set('interface', switchport_mode_lookup_string, @name, '',
IF_SWITCHPORT_MODE[mode_set])
rescue RuntimeError
raise "[#{@name}] switchport_mode is not supported on this interface"
end
def switchport_mode=(mode_set)
# no system default switchport
# int e1/1
# switchport
# switchport mode [access|trunk|fex|...]
fail ArgumentError unless IF_SWITCHPORT_MODE.keys.include? mode_set
case mode_set
when :disabled
if switchport
# Note: turn off switchport command, not switchport mode
config_set('interface', 'switchport', @name, 'no')
end
when :default
if :disabled == default_switchport_mode
config_set('interface', switchport_mode_lookup_string,
@name, 'no', '')
else
switchport_enable_and_mode(mode_set)
end
else
switchport_enable_and_mode(mode_set)
end # case
rescue Cisco::CliError => e
raise "[#{@name}] '#{e.command}' : #{e.clierror}"
end
def default_switchport_mode
return :disabled unless system_default_switchport
IF_SWITCHPORT_MODE.key(
config_get_default('interface', switchport_mode_lookup_string))
end
def switchport_trunk_allowed_vlan
config_get('interface', 'switchport_trunk_allowed_vlan', @name)
end
def switchport_trunk_allowed_vlan=(val)
if val.nil?
config_set(
'interface', 'switchport_trunk_allowed_vlan', @name, 'no', '')
else
config_set(
'interface', 'switchport_trunk_allowed_vlan', @name, '', val)
end
rescue Cisco::CliError => e
raise "[#{@name}] '#{e.command}' : #{e.clierror}"
end
def default_switchport_trunk_allowed_vlan
config_get_default('interface', 'switchport_trunk_allowed_vlan')
end
def switchport_trunk_native_vlan
config_get('interface', 'switchport_trunk_native_vlan', @name)
end
def switchport_trunk_native_vlan=(val)
if val.nil?
config_set(
'interface', 'switchport_trunk_native_vlan', @name, 'no', '')
else
config_set(
'interface', 'switchport_trunk_native_vlan', @name, '', val)
end
rescue Cisco::CliError => e
raise "[#{@name}] '#{e.command}' : #{e.clierror}"
end
# vlan_mapping & vlan_mapping_enable
# Hardware & Cli Dependencies:
# - F3 linecards only
# - vdc
# - limit-resource
# - bridge-domain
# - feature vni
# - switchport mode
# Getter: Builds an array of vlan_mapping commands currently
# on the device.
# cli: switchport vlan mapping 2 200
# switchport vlan mapping 4 400
# array: [['2', '200'], ['4', '400']]
#
def default_vlan_mapping
config_get_default('interface', 'vlan_mapping')
end
def vlan_mapping
match = config_get('interface', 'vlan_mapping', @name)
match.each(&:compact!) unless match.nil?
match
end
def vlan_mapping=(should_list)
Vni.feature_vni_enable unless Vni.feature_vni_enabled
# Process a hash of vlan_mapping cmds from delta_add_remove().
# The vlan_mapping cli does not allow commands to be updated, they must
# first be removed if there is a change.
delta_hash = Utils.delta_add_remove(should_list, vlan_mapping,
:updates_not_allowed)
return if delta_hash.values.flatten.empty?
# Process :remove first to ensure "update" commands will not fail.
[:remove, :add].each do |action|
CiscoLogger.debug("vlan_mapping delta #{@get_args}\n"\
"#{action}: #{delta_hash[action]}")
delta_hash[action].each do |original, translated|
state = (action == :add) ? '' : 'no'
config_set('interface', 'vlan_mapping', @name,
state, original, translated)
end
end
rescue Cisco::CliError => e
raise "[#{@name}] '#{e.command}' : #{e.clierror}"
end
# cli: switchport vlan mapping enable
def default_vlan_mapping_enable
config_get_default('interface', 'vlan_mapping_enable')
end
def vlan_mapping_enable
config_get('interface', 'vlan_mapping_enable', @name)
end
def vlan_mapping_enable=(state)
config_set('interface', 'vlan_mapping_enable', @name,
state ? '' : 'no')
end
def default_switchport_trunk_native_vlan
config_get_default('interface', 'switchport_trunk_native_vlan')
end
def system_default_switchport
# This command is a user-configurable system default.
config_get('interface', 'system_default_switchport')
end
def system_default_switchport_shutdown
# This command is a user-configurable system default.
config_get('interface', 'system_default_switchport_shutdown')
end
def system_default_svi_autostate
# This command is a user-configurable system default.
config_get('interface', 'system_default_svi_autostate')
end
def switchport_vtp_mode_capable?
!switchport_mode.to_s.match(/(access|trunk)/).nil?
end
def switchport_vtp
return false unless switchport_vtp_mode_capable?
config_get('interface', 'vtp', @name)
end
def switchport_vtp=(vtp_set)
return false unless switchport_vtp_mode_capable?
no_cmd = (vtp_set) ? '' : 'no'
config_set('interface', 'vtp', @name, no_cmd)
rescue Cisco::CliError => e
raise "[#{@name}] '#{e.command}' : #{e.clierror}"
end
def svi_cmd_allowed?(cmd)
fail "[#{@name}] Invalid interface type for command [#{cmd}]" unless
@name[/vlan/i]
end
# svi_autostate is exclusive to svi interfaces
def svi_autostate
return nil unless @name[/^vlan/i]
config_get('interface', 'svi_autostate', @name)
end
def svi_autostate=(val)
check_switchport_disabled
svi_cmd_allowed?('autostate')
config_set('interface', 'svi_autostate', @name, val ? '' : 'no')
end
def default_svi_autostate
system_default_svi_autostate
end
def feature_vlan?
config_get('interface', 'feature_vlan')
end
def feature_vlan_set(val)
return if feature_vlan? == val
config_set('interface', 'feature_vlan', val ? '' : 'no')
end
# svi_management is exclusive to svi interfaces
def svi_management
return nil unless @name[/^vlan/i]
config_get('interface', 'svi_management', @name)
end
def svi_management=(val)
check_switchport_disabled
svi_cmd_allowed?('management')
config_set('interface', 'svi_management', @name, val ? '' : 'no')
end
def default_svi_management
config_get_default('interface', 'svi_management')
end
def default_switchport_vtp
config_get_default('interface', 'vtp')
end
def switchport_vtp_feature?
config_get('vtp', 'feature')
end
def check_switchport_disabled
fail "#{caller[0][/`.*'/][1..-2]} cannot be set unless switchport mode" \
' is disabled' unless switchport_mode == :disabled
end
def vrf
config_get('interface', 'vrf', @name)
end
def vrf=(vrf)
fail TypeError unless vrf.is_a?(String)
if vrf.empty?
config_set('interface', 'vrf', @name, 'no', '')
else
config_set('interface', 'vrf', @name, '', vrf)
end
rescue Cisco::CliError => e
raise "[#{@name}] '#{e.command}' : #{e.clierror}"
end
def default_vrf
config_get_default('interface', 'vrf')
end
end # Class
end # Module
|
module CoffeeAssetPaths
VERSION = "1.0.1"
end
version
module CoffeeAssetPaths
VERSION = "1.0.2"
end
|
require_relative 'api_runner'
module Unipept::Commands
class Taxa2Tree < ApiRunner
def initialize(args, opts, cmd)
super
# JSON is the default format for this command
args[:format] = 'json' unless args[:format]
unless %w[url html json].include? args[:format]
warn "Format #{args[:format]} is not supported by taxa2tree. Use html, url or json (default)."
exit 1
end
if options[:format] == 'html'
# Overwrite the URL for this command, since it's possible that it uses HTML generated by the server.
@url = "#{@host}/api/v1/#{cmd.name}.html"
elsif args[:format] == 'url'
@link = true
end
end
def required_fields
['taxon_id']
end
def batch_size
return arguments.length unless arguments.empty?
return File.foreach(options[:input]).inject(0) { |c, _| c + 1 } if options[:input]
@stdin_contents = $stdin.readlines
@stdin_contents.length
end
def input_iterator
return arguments.each unless arguments.empty?
return IO.foreach(options[:input]) if options[:input]
@stdin_contents.each
end
protected
def filter_result(json_response)
# We do not filter here, since select is not supported by the taxa2tree-command
[JSON[json_response]] rescue []
end
def construct_request_body(input)
if input.empty? && input[0].include?(',')
data = input.map do |item|
splitted = item.rstrip.split ','
splitted[1] = splitted[1].to_i
splitted
end
{
counts: Hash[data],
link: @link
}
else
{
input: input,
link: @link
}
end
end
end
end
Fix HTML output of taxa2tree
require_relative 'api_runner'
module Unipept::Commands
class Taxa2Tree < ApiRunner
def initialize(args, opts, cmd)
super
# JSON is the default format for this command
args[:format] = 'json' unless args[:format]
unless %w[url html json].include? args[:format]
warn "Format #{args[:format]} is not supported by taxa2tree. Use html, url or json (default)."
exit 1
end
if options[:format] == 'html'
# Overwrite the URL for this command, since it's possible that it uses HTML generated by the server.
@url = "#{@host}/api/v1/#{cmd.name}.html"
elsif args[:format] == 'url'
@link = true
end
end
def required_fields
['taxon_id']
end
def batch_size
return arguments.length unless arguments.empty?
return File.foreach(options[:input]).inject(0) { |c, _| c + 1 } if options[:input]
@stdin_contents = $stdin.readlines
@stdin_contents.length
end
def input_iterator
return arguments.each unless arguments.empty?
return IO.foreach(options[:input]) if options[:input]
@stdin_contents.each
end
protected
def filter_result(response)
return response if response.start_with?('<!DOCTYPE')
# We do not filter here, since select is not supported by the taxa2tree-command
[JSON[response]] rescue []
end
def construct_request_body(input)
if input.empty? && input[0].include?(',')
data = input.map do |item|
splitted = item.rstrip.split ','
splitted[1] = splitted[1].to_i
splitted
end
{
counts: Hash[data],
link: @link
}
else
{
input: input,
link: @link
}
end
end
end
end
|
require 'logger'
module Concurrent
module Concern
# Include where logging is needed
#
# @!visibility private
module Logging
include Logger::Severity
# Logs through {Configuration#logger}, it can be overridden by setting @logger
# @param [Integer] level one of Logger::Severity constants
# @param [String] progname e.g. a path of an Actor
# @param [String, nil] message when nil block is used to generate the message
# @yieldreturn [String] a message
def log(level, progname, message = nil, &block)
#NOTE: Cannot require 'concurrent/configuration' above due to circular references.
# Assume that the gem has been initialized if we've gotten this far.
(@logger || Concurrent.global_logger).call level, progname, message, &block
rescue => error
$stderr.puts "`Concurrent.configuration.logger` failed to log #{[level, progname, message, block]}\n" +
"#{error.message} (#{error.class})\n#{error.backtrace.join "\n"}"
end
end
end
end
Fixed yardoc errors.
require 'logger'
module Concurrent
module Concern
# Include where logging is needed
#
# @!visibility private
module Logging
include Logger::Severity
# Logs through {Concurrent.global_logger}, it can be overridden by setting @logger
# @param [Integer] level one of Logger::Severity constants
# @param [String] progname e.g. a path of an Actor
# @param [String, nil] message when nil block is used to generate the message
# @yieldreturn [String] a message
def log(level, progname, message = nil, &block)
#NOTE: Cannot require 'concurrent/configuration' above due to circular references.
# Assume that the gem has been initialized if we've gotten this far.
(@logger || Concurrent.global_logger).call level, progname, message, &block
rescue => error
$stderr.puts "`Concurrent.configuration.logger` failed to log #{[level, progname, message, block]}\n" +
"#{error.message} (#{error.class})\n#{error.backtrace.join "\n"}"
end
end
end
end
|
module CucumberAnalytics
VERSION = '1.0.0'
end
Version bump
Gem version bumped for v1.1.0 release.
module CucumberAnalytics
VERSION = '1.1.0'
end
|
# rubocop:disable Metrics/LineLength
module DatastaxRails
module Schema
# Methods relating to maintaining Solr indexes and configuration
module Solr
# Generates a SOLR schema file. The default schema template included with DSR can handle
# most normal circumstances for indexing. When a customized template is required, it can
# be placed in the application's config/solr directory using the naming convention
# column_family-schema.xml.erb. It will be processed as a normal ERB file. See the DSR version
# for examples.
def generate_solr_schema(model)
@fields = []
@copy_fields = []
@fulltext_fields = []
if model <= WideStorageModel
@primary_key = "(#{model.primary_key},#{model.cluster_by})"
else
@primary_key = model.primary_key
end
@custom_fields = ''
@columns = model.attribute_definitions.values
@fields.sort! { |a, b| a[:name] <=> b[:name] }
@copy_fields.sort! { |a, b| a[:source] <=> b[:source] }
@fulltext_fields.sort!
if Rails.root.join('config', 'solr', "#{model.column_family}-schema.xml.erb").exist?
say "Using custom schema for #{model.name}", :subitem
ERB.new(Rails.root.join('config', 'solr', "#{model.column_family}-schema.xml.erb").read, 0, '>')
.result(binding)
elsif Rails.root.join('config', 'solr', 'application-schema.xml.erb').exist?
say 'Using application default schema', :subitem
ERB.new(Rails.root.join('config', 'solr', 'application-schema.xml.erb').read, 0, '>').result(binding)
else
ERB.new(File.read(File.join(File.dirname(__FILE__), '..', '..', '..', 'config', 'schema.xml.erb')), 0, '>')
.result(binding)
end
end
# Sends a command to Solr instructing it to reindex the data. The data is reindexed in the background,
# and the new index is swapped in once it is finished.
def reindex_solr(model, destructive = false)
reload_solr_core(model, true, destructive)
end
# Sends a command to Solr instructing it to reload the core for a given model.
# This is for making sure that solr knows the model changes when it comes to multiple
# datacenter deployments.
def reload_solr_core(model, reindex = false, destructive = false)
url = "#{DatastaxRails::Base.solr_base_url}/admin/cores?action=RELOAD&name=#{DatastaxRails::Base.config[:keyspace]}.#{model.column_family}&reindex=#{reindex}&deleteAll=#{destructive}"
say "Posting reindex command to '#{url}'", :subitem if reindex.eql?(true)
`curl -s -X POST '#{url}' -H 'Content-type:text/xml; charset=utf-8'`
say 'Reindexing will run in the background', :subitem if reindex.eql?(true)
end
# Creates the initial Solr Core. This is required once the first time a Solr schema is uploaded.
# It will cause the data to be indexed in the background.
def create_solr_core(model)
url = "#{DatastaxRails::Base.solr_base_url}/admin/cores?action=CREATE&name=#{DatastaxRails::Base.config[:keyspace]}.#{model.column_family}"
say "Posting create command to '#{url}'", :subitem
`curl -s -X POST '#{url}' -H 'Content-type:text/xml; charset=utf-8'`
end
# Uploads the necessary configuration files for solr to function
# The solrconfig and stopwords files can be overridden on a per-model basis
# by creating a file called config/solr/column_family-solrconfig.xml or
# config/solr/column_family-stopwords.txt
#
# TODO: find a way to upload arbitrary files automatically (e.g., additional stopwords lists)
# TODO: Simplify this method
def upload_solr_configuration(model, force = false, reindex = true) # rubocop:disable all
reload_solr_core(model, false, false)
count = 0
@live_indexing = model.live_indexing
@solr_commit_time = model.solr_commit_time || (@live_indexing ? '1000' : '5000')
@ram_buffer_size = model.ram_buffer_size || (@live_indexing ? '2000' : '100')
@lucene_match_version = model.lucene_match_version
if Rails.root.join('config', 'solr', "#{model.column_family}-solrconfig.xml").exist?
say 'Using custom solrconfig file', :subitem
solrconfig = ERB.new(Rails.root.join('config', 'solr', "#{model.column_family}-solrconfig.xml").read).result(binding)
else
solrconfig = ERB.new(File.read(File.join(File.dirname(__FILE__), '..', '..', '..', 'config', 'solrconfig.xml.erb'))).result(binding)
end
if Rails.root.join('config', 'solr', "#{model.column_family}-stopwords.txt").exist?
say 'Using custom stopwords file', :subitem
stopwords = Rails.root.join('config', 'solr', "#{model.column_family}-stopwords.txt").read
else
stopwords = File.read(File.join(File.dirname(__FILE__), '..', '..', '..', 'config', 'stopwords.txt'))
end
schema = generate_solr_schema(model)
solrconfig_digest = Digest::SHA1.hexdigest(solrconfig)
stopwords_digest = Digest::SHA1.hexdigest(stopwords)
schema_digest = Digest::SHA1.hexdigest(schema)
newcf = !column_exists?(model.column_family, 'solr_query')
force ||= newcf
results = DatastaxRails::Cql::Select.new(SchemaMigration, ['*']).conditions(cf: model.column_family).execute
sm_digests = results.first || {}
solr_url = "#{DatastaxRails::Base.solr_base_url}/resource/#{@keyspace}.#{model.column_family}"
uri = URI.parse(solr_url)
http = Net::HTTP.new(uri.host, uri.port)
if uri.scheme == 'https'
http.use_ssl = true
http.cert = OpenSSL::X509::Certificate.new(Rails.root.join('config', 'datastax_rails.crt').read)
http.key = OpenSSL::PKey::RSA.new(Rails.root.join('config', 'datastax_rails.key').read)
http.ca_path = Rails.root.join('config', 'sade_ca.crt').to_s
http.verify_mode = OpenSSL::SSL::VERIFY_NONE
end
http.read_timeout = 300
if force || solrconfig_digest != sm_digests['solrconfig']
count += 1
loop do
say "Posting Solr Config file to '#{solr_url}/solrconfig.xml'", :subitem
http.post(uri.path + '/solrconfig.xml', solrconfig, 'Content-type' => 'text/xml; charset=utf-8')
if Rails.env.production?
sleep(5)
resp = http.get(uri.path + '/solrconfig.xml')
continue unless resp.message == 'OK'
end
break
end
DatastaxRails::Cql::Update.new(SchemaMigration, cf: model.column_family).columns(solrconfig: solrconfig_digest).execute
end
if force || stopwords_digest != sm_digests['stopwords']
count += 1
loop do
say "Posting Solr Stopwords file to '#{solr_url}/stopwords.txt'", :subitem
http.post(uri.path + '/stopwords.txt', stopwords, 'Content-type' => 'text/xml; charset=utf-8')
if Rails.env.production?
sleep(5)
resp = http.get(uri.path + '/stopwords.txt')
continue unless resp.message == 'OK'
end
break
end
DatastaxRails::Cql::Update.new(SchemaMigration, cf: model.column_family).columns(stopwords: stopwords_digest).execute
end
if force || schema_digest != sm_digests['digest']
count += 1
loop do
say "Posting Solr Schema file to '#{solr_url}/schema.xml'", :subitem
http.post(uri.path + '/schema.xml', schema, 'Content-type' => 'text/xml; charset=utf-8')
if Rails.env.production?
sleep(5)
resp = http.get(uri.path + '/schema.xml')
continue unless resp.message == 'OK'
end
break
end
DatastaxRails::Cql::Update.new(SchemaMigration, cf: model.column_family).columns(digest: schema_digest).execute
if newcf
create_solr_core(model)
elsif reindex
reindex_solr(model)
end
end
count
end
end
end
end
Run reindex post command in the background
# rubocop:disable Metrics/LineLength
module DatastaxRails
module Schema
# Methods relating to maintaining Solr indexes and configuration
module Solr
# Generates a SOLR schema file. The default schema template included with DSR can handle
# most normal circumstances for indexing. When a customized template is required, it can
# be placed in the application's config/solr directory using the naming convention
# column_family-schema.xml.erb. It will be processed as a normal ERB file. See the DSR version
# for examples.
def generate_solr_schema(model)
@fields = []
@copy_fields = []
@fulltext_fields = []
if model <= WideStorageModel
@primary_key = "(#{model.primary_key},#{model.cluster_by})"
else
@primary_key = model.primary_key
end
@custom_fields = ''
@columns = model.attribute_definitions.values
@fields.sort! { |a, b| a[:name] <=> b[:name] }
@copy_fields.sort! { |a, b| a[:source] <=> b[:source] }
@fulltext_fields.sort!
if Rails.root.join('config', 'solr', "#{model.column_family}-schema.xml.erb").exist?
say "Using custom schema for #{model.name}", :subitem
ERB.new(Rails.root.join('config', 'solr', "#{model.column_family}-schema.xml.erb").read, 0, '>')
.result(binding)
elsif Rails.root.join('config', 'solr', 'application-schema.xml.erb').exist?
say 'Using application default schema', :subitem
ERB.new(Rails.root.join('config', 'solr', 'application-schema.xml.erb').read, 0, '>').result(binding)
else
ERB.new(File.read(File.join(File.dirname(__FILE__), '..', '..', '..', 'config', 'schema.xml.erb')), 0, '>')
.result(binding)
end
end
# Sends a command to Solr instructing it to reindex the data. The data is reindexed in the background,
# and the new index is swapped in once it is finished.
def reindex_solr(model, destructive = false)
reload_solr_core(model, true, destructive)
end
# Sends a command to Solr instructing it to reload the core for a given model.
# This is for making sure that solr knows the model changes when it comes to multiple
# datacenter deployments.
def reload_solr_core(model, reindex = false, destructive = false)
url = "#{DatastaxRails::Base.solr_base_url}/admin/cores?action=RELOAD&name=#{DatastaxRails::Base.config[:keyspace]}.#{model.column_family}&reindex=#{reindex}&deleteAll=#{destructive}"
say "Posting reindex command to '#{url}'", :subitem if reindex.eql?(true)
`curl -s -X POST '#{url}' -H 'Content-type:text/xml; charset=utf-8' &`
say 'Reindexing will run in the background', :subitem if reindex.eql?(true)
end
# Creates the initial Solr Core. This is required once the first time a Solr schema is uploaded.
# It will cause the data to be indexed in the background.
def create_solr_core(model)
url = "#{DatastaxRails::Base.solr_base_url}/admin/cores?action=CREATE&name=#{DatastaxRails::Base.config[:keyspace]}.#{model.column_family}"
say "Posting create command to '#{url}'", :subitem
`curl -s -X POST '#{url}' -H 'Content-type:text/xml; charset=utf-8'`
end
# Uploads the necessary configuration files for solr to function
# The solrconfig and stopwords files can be overridden on a per-model basis
# by creating a file called config/solr/column_family-solrconfig.xml or
# config/solr/column_family-stopwords.txt
#
# TODO: find a way to upload arbitrary files automatically (e.g., additional stopwords lists)
# TODO: Simplify this method
def upload_solr_configuration(model, force = false, reindex = true) # rubocop:disable all
reload_solr_core(model, false, false)
count = 0
@live_indexing = model.live_indexing
@solr_commit_time = model.solr_commit_time || (@live_indexing ? '1000' : '5000')
@ram_buffer_size = model.ram_buffer_size || (@live_indexing ? '2000' : '100')
@lucene_match_version = model.lucene_match_version
if Rails.root.join('config', 'solr', "#{model.column_family}-solrconfig.xml").exist?
say 'Using custom solrconfig file', :subitem
solrconfig = ERB.new(Rails.root.join('config', 'solr', "#{model.column_family}-solrconfig.xml").read).result(binding)
else
solrconfig = ERB.new(File.read(File.join(File.dirname(__FILE__), '..', '..', '..', 'config', 'solrconfig.xml.erb'))).result(binding)
end
if Rails.root.join('config', 'solr', "#{model.column_family}-stopwords.txt").exist?
say 'Using custom stopwords file', :subitem
stopwords = Rails.root.join('config', 'solr', "#{model.column_family}-stopwords.txt").read
else
stopwords = File.read(File.join(File.dirname(__FILE__), '..', '..', '..', 'config', 'stopwords.txt'))
end
schema = generate_solr_schema(model)
solrconfig_digest = Digest::SHA1.hexdigest(solrconfig)
stopwords_digest = Digest::SHA1.hexdigest(stopwords)
schema_digest = Digest::SHA1.hexdigest(schema)
newcf = !column_exists?(model.column_family, 'solr_query')
force ||= newcf
results = DatastaxRails::Cql::Select.new(SchemaMigration, ['*']).conditions(cf: model.column_family).execute
sm_digests = results.first || {}
solr_url = "#{DatastaxRails::Base.solr_base_url}/resource/#{@keyspace}.#{model.column_family}"
uri = URI.parse(solr_url)
http = Net::HTTP.new(uri.host, uri.port)
if uri.scheme == 'https'
http.use_ssl = true
http.cert = OpenSSL::X509::Certificate.new(Rails.root.join('config', 'datastax_rails.crt').read)
http.key = OpenSSL::PKey::RSA.new(Rails.root.join('config', 'datastax_rails.key').read)
http.ca_path = Rails.root.join('config', 'sade_ca.crt').to_s
http.verify_mode = OpenSSL::SSL::VERIFY_NONE
end
http.read_timeout = 300
if force || solrconfig_digest != sm_digests['solrconfig']
count += 1
loop do
say "Posting Solr Config file to '#{solr_url}/solrconfig.xml'", :subitem
http.post(uri.path + '/solrconfig.xml', solrconfig, 'Content-type' => 'text/xml; charset=utf-8')
if Rails.env.production?
sleep(5)
resp = http.get(uri.path + '/solrconfig.xml')
continue unless resp.message == 'OK'
end
break
end
DatastaxRails::Cql::Update.new(SchemaMigration, cf: model.column_family).columns(solrconfig: solrconfig_digest).execute
end
if force || stopwords_digest != sm_digests['stopwords']
count += 1
loop do
say "Posting Solr Stopwords file to '#{solr_url}/stopwords.txt'", :subitem
http.post(uri.path + '/stopwords.txt', stopwords, 'Content-type' => 'text/xml; charset=utf-8')
if Rails.env.production?
sleep(5)
resp = http.get(uri.path + '/stopwords.txt')
continue unless resp.message == 'OK'
end
break
end
DatastaxRails::Cql::Update.new(SchemaMigration, cf: model.column_family).columns(stopwords: stopwords_digest).execute
end
if force || schema_digest != sm_digests['digest']
count += 1
loop do
say "Posting Solr Schema file to '#{solr_url}/schema.xml'", :subitem
http.post(uri.path + '/schema.xml', schema, 'Content-type' => 'text/xml; charset=utf-8')
if Rails.env.production?
sleep(5)
resp = http.get(uri.path + '/schema.xml')
continue unless resp.message == 'OK'
end
break
end
DatastaxRails::Cql::Update.new(SchemaMigration, cf: model.column_family).columns(digest: schema_digest).execute
if newcf
create_solr_core(model)
elsif reindex
reindex_solr(model)
end
end
count
end
end
end
end
|
# Copyright 2006-2008 by Mike Bailey. All rights reserved.
unless Capistrano::Configuration.respond_to?(:instance)
abort "deprec2 requires Capistrano 2"
end
require "#{File.dirname(__FILE__)}/recipes/canonical"
require "#{File.dirname(__FILE__)}/recipes/deprec"
require "#{File.dirname(__FILE__)}/recipes/deprecated"
require "#{File.dirname(__FILE__)}/recipes/chef"
require "#{File.dirname(__FILE__)}/recipes/app/mongrel"
require "#{File.dirname(__FILE__)}/recipes/app/passenger"
require "#{File.dirname(__FILE__)}/recipes/db/mysql"
require "#{File.dirname(__FILE__)}/recipes/db/postgresql"
require "#{File.dirname(__FILE__)}/recipes/db/sqlite"
require "#{File.dirname(__FILE__)}/recipes/db/couchdb"
require "#{File.dirname(__FILE__)}/recipes/ruby/mri"
require "#{File.dirname(__FILE__)}/recipes/ruby/ree"
require "#{File.dirname(__FILE__)}/recipes/web/apache"
require "#{File.dirname(__FILE__)}/recipes/web/nginx"
require "#{File.dirname(__FILE__)}/recipes/git"
require "#{File.dirname(__FILE__)}/recipes/gitosis"
require "#{File.dirname(__FILE__)}/recipes/svn"
require "#{File.dirname(__FILE__)}/recipes/integrity"
require "#{File.dirname(__FILE__)}/recipes/users"
require "#{File.dirname(__FILE__)}/recipes/ssh"
require "#{File.dirname(__FILE__)}/recipes/php"
# require "#{File.dirname(__FILE__)}/recipes/scm/trac"
require "#{File.dirname(__FILE__)}/recipes/aoe"
require "#{File.dirname(__FILE__)}/recipes/xen"
require "#{File.dirname(__FILE__)}/recipes/xentools"
require "#{File.dirname(__FILE__)}/recipes/ddclient"
require "#{File.dirname(__FILE__)}/recipes/ntp"
require "#{File.dirname(__FILE__)}/recipes/logrotate"
require "#{File.dirname(__FILE__)}/recipes/ssl"
require "#{File.dirname(__FILE__)}/recipes/postfix"
require "#{File.dirname(__FILE__)}/recipes/memcache"
require "#{File.dirname(__FILE__)}/recipes/monit"
require "#{File.dirname(__FILE__)}/recipes/network"
require "#{File.dirname(__FILE__)}/recipes/nagios"
require "#{File.dirname(__FILE__)}/recipes/heartbeat"
require "#{File.dirname(__FILE__)}/recipes/ubuntu"
require "#{File.dirname(__FILE__)}/recipes/lvm"
require "#{File.dirname(__FILE__)}/recipes/vnstat"
require "#{File.dirname(__FILE__)}/recipes/sphinx"
require "#{File.dirname(__FILE__)}/recipes/utils"
require "#{File.dirname(__FILE__)}/recipes/apt_mirror"
# require "#{File.dirname(__FILE__)}/recipes/wordpress" Not working
require "#{File.dirname(__FILE__)}/recipes/wpmu"
require "#{File.dirname(__FILE__)}/recipes/ar_sendmail"
require "#{File.dirname(__FILE__)}/recipes/starling"
Removed apt_mirror
# Copyright 2006-2008 by Mike Bailey. All rights reserved.
unless Capistrano::Configuration.respond_to?(:instance)
abort "deprec2 requires Capistrano 2"
end
require "#{File.dirname(__FILE__)}/recipes/canonical"
require "#{File.dirname(__FILE__)}/recipes/deprec"
require "#{File.dirname(__FILE__)}/recipes/deprecated"
require "#{File.dirname(__FILE__)}/recipes/chef"
require "#{File.dirname(__FILE__)}/recipes/app/mongrel"
require "#{File.dirname(__FILE__)}/recipes/app/passenger"
require "#{File.dirname(__FILE__)}/recipes/db/mysql"
require "#{File.dirname(__FILE__)}/recipes/db/postgresql"
require "#{File.dirname(__FILE__)}/recipes/db/sqlite"
require "#{File.dirname(__FILE__)}/recipes/db/couchdb"
require "#{File.dirname(__FILE__)}/recipes/ruby/mri"
require "#{File.dirname(__FILE__)}/recipes/ruby/ree"
require "#{File.dirname(__FILE__)}/recipes/web/apache"
require "#{File.dirname(__FILE__)}/recipes/web/nginx"
require "#{File.dirname(__FILE__)}/recipes/git"
require "#{File.dirname(__FILE__)}/recipes/gitosis"
require "#{File.dirname(__FILE__)}/recipes/svn"
require "#{File.dirname(__FILE__)}/recipes/integrity"
require "#{File.dirname(__FILE__)}/recipes/users"
require "#{File.dirname(__FILE__)}/recipes/ssh"
require "#{File.dirname(__FILE__)}/recipes/php"
# require "#{File.dirname(__FILE__)}/recipes/scm/trac"
require "#{File.dirname(__FILE__)}/recipes/aoe"
require "#{File.dirname(__FILE__)}/recipes/xen"
require "#{File.dirname(__FILE__)}/recipes/xentools"
require "#{File.dirname(__FILE__)}/recipes/ddclient"
require "#{File.dirname(__FILE__)}/recipes/ntp"
require "#{File.dirname(__FILE__)}/recipes/logrotate"
require "#{File.dirname(__FILE__)}/recipes/ssl"
require "#{File.dirname(__FILE__)}/recipes/postfix"
require "#{File.dirname(__FILE__)}/recipes/memcache"
require "#{File.dirname(__FILE__)}/recipes/monit"
require "#{File.dirname(__FILE__)}/recipes/network"
require "#{File.dirname(__FILE__)}/recipes/nagios"
require "#{File.dirname(__FILE__)}/recipes/heartbeat"
require "#{File.dirname(__FILE__)}/recipes/ubuntu"
require "#{File.dirname(__FILE__)}/recipes/lvm"
require "#{File.dirname(__FILE__)}/recipes/vnstat"
require "#{File.dirname(__FILE__)}/recipes/sphinx"
require "#{File.dirname(__FILE__)}/recipes/utils"
# require "#{File.dirname(__FILE__)}/recipes/apt_mirror"
# require "#{File.dirname(__FILE__)}/recipes/wordpress" Not working
require "#{File.dirname(__FILE__)}/recipes/wpmu"
require "#{File.dirname(__FILE__)}/recipes/ar_sendmail"
require "#{File.dirname(__FILE__)}/recipes/starling"
|
module DirectiveRecord
module Query
class SQL
def initialize(base)
@base = base
end
def to_sql(*args)
options = extract_options(args)
validate_options! options
prepare_options! options
normalize_options! options
parse_joins! options
prepend_base_alias! options
finalize_options! options
compose_sql options
end
private
def path_delimiter
raise NotImplementedError
end
def aggregate_delimiter
raise NotImplementedError
end
def select_aggregate_sql(method, path)
raise NotImplementedError
end
def select_aggregate_sql_alias(method, path)
quote_alias("#{method}#{aggregate_delimiter}#{path}")
end
def group_by_all_sql
raise NotImplementedError
end
def base
@base
end
def base_alias
@base_alias ||= quote_alias(base.table_name.split("_").collect{|x| x[0]}.join(""))
end
def quote_alias(sql_alias)
sql_alias
end
def extract_options(args)
options = args.extract_options!.deep_dup
options.reverse_merge! :select => (args.empty? ? "*" : args)
options
end
def validate_options!(options)
options.assert_valid_keys :select, :where, :group_by, :order_by, :limit, :offset, :aggregates, :numerize_aliases
end
def prepare_options!(options); end
def normalize_options!(options)
normalize_select!(options)
normalize_where!(options)
normalize_group_by!(options)
normalize_order_by!(options)
options.reject!{|k, v| v.blank?}
end
def normalize_select!(options)
select = to_array! options, :select
select.uniq!
options[:scales] = select.inject({}) do |hash, sql|
if scale = column_for(sql).try(:scale)
hash[sql] = scale
end
hash
end
options[:aggregated] = {}
options[:aliases] = {}
options[:select] = options[:select].inject([]) do |array, path|
sql, sql_alias = ((path == ".*") ? "#{base_alias}.*" : path), nil
if aggregate_method = (options[:aggregates] || {})[path]
sql = select_aggregate_sql(aggregate_method, path)
sql_alias = options[:aggregated][path] = select_aggregate_sql_alias(aggregate_method, path)
end
if scale = options[:scales][path]
sql = "ROUND(#{sql}, #{scale})"
sql_alias ||= quote_alias(path)
end
if options[:numerize_aliases]
sql = sql.gsub(/ AS .*$/, "")
sql_alias = options[:aliases][prepend_base_alias(sql_alias || sql)] = "c#{array.size + 1}"
end
array << [sql, sql_alias].compact.join(" AS ")
array
end
end
def normalize_where!(options)
regexp = /^\S+/
where, having = (to_array!(options, :where) || []).partition{|statement| !options[:aggregated].keys.include?(statement.strip.match(regexp).to_s)}
unless (attrs = base.scope_attributes).blank?
sql = base.send(:sanitize_sql_for_conditions, attrs, "").gsub(/``.`(\w+)`/) { $1 }
where << sql
end
options[:where], options[:having] = where, having.collect do |statement|
statement.strip.gsub(regexp){|path| options[:aggregated][path]}
end
[:where, :having].each do |key|
value = options[key]
options[key] = (value.collect{|x| "(#{x})"}.join(" AND ") unless value.empty?)
end
end
def normalize_group_by!(options)
group_by = to_array! options, :group_by
group_by.clear.push(group_by_all_sql) if group_by == [:all]
end
def normalize_order_by!(options)
options[:order_by] ||= (options[:group_by] || []).collect do |path|
direction = (path.to_s == "date") ? "DESC" : "ASC"
"#{path} #{direction}"
end
to_array!(options, :order_by).collect! do |x|
path, direction = x.split " "
scale = options[:scales][path]
select = begin
if aggregate_method = (options[:aggregates] || {})[path]
select_aggregate_sql(aggregate_method, path)
else
path
end
end
"#{scale ? "ROUND(#{select}, #{scale})" : select} #{direction.upcase if direction}"
end
end
def to_array!(options, key)
if value = options[key]
options[key] = [value].flatten
end
end
def column_for(path)
segments = path.split(".")
column = segments.pop
model = segments.inject(base) do |klass, association|
klass.reflect_on_association(association.to_sym).klass
end
model.columns_hash[column]
rescue
nil
end
def parse_joins!(options)
return if (paths = extract_paths(options)).empty?
regexp = /INNER JOIN `([^`]+)`( `[^`]+`)? ON `[^`]+`.`([^`]+)` = `[^`]+`.`([^`]+)`/
options[:joins] = paths.collect do |path|
joins, associations = [], []
path.split(".").inject(base) do |klass, association|
association = association.to_sym
table_joins = klass.joins(association).to_sql.scan regexp
concerns_bridge_table = table_joins.size == 2
bridge_table_as = nil
table_joins.each_with_index do |table_join, index|
concerns_bridge_table_join = concerns_bridge_table && index == 0
join_table, possible_alias, join_table_column, table_column = table_join
table_as = (klass == base) ? base_alias : quote_alias(associations.join(path_delimiter))
join_table_as = quote_alias((associations + [association]).join(path_delimiter))
if concerns_bridge_table
if concerns_bridge_table_join
join_table_as = bridge_table_as = quote_alias("#{(associations + [association]).join(path_delimiter)}_bridge_table")
else
table_as = bridge_table_as
end
end
joins.push "LEFT JOIN #{join_table} #{join_table_as} ON #{join_table_as}.#{join_table_column} = #{table_as}.#{table_column}"
end
associations << association
klass.reflect_on_association(association).klass
end
joins
end.flatten.uniq.join("\n")
end
def extract_paths(options)
options.inject([]) do |paths, (key, value)|
if [:select, :where, :group_by, :having].include?(key)
value = value.join " " if value.is_a?(Array)
paths.concat value.gsub(/((?<![\\])['"])((?:.(?!(?<![\\])\1))*.?)\1/, " ").scan(/[a-zA-Z_]+\.[a-zA-Z_\.]+/).collect{|x| x.split(".")[0..-2].join "."}
else
paths
end
end.uniq
end
def prepend_base_alias!(options)
[:select, :where, :having, :group_by, :order_by].each do |key|
if value = options[key]
options[key] = prepend_base_alias value, options[:aliases]
end
end
end
def prepend_base_alias(sql, aliases = {})
columns = base.columns_hash.keys
sql = sql.join ", " if sql.is_a?(Array)
sql.gsub(/("[^"]*"|'[^']*'|[a-zA-Z_#{aggregate_delimiter}]+(\.[a-zA-Z_\*]+)*)/) do
columns.include?($1) ? "#{base_alias}.#{$1}" : begin
if (string = $1).match /^([a-zA-Z_\.]+)\.([a-zA-Z_\*]+)$/
path, column = $1, $2
"#{quote_alias path.gsub(".", path_delimiter)}.#{column}"
else
string
end
end
end
end
def finalize_options!(options); end
def compose_sql(options)
sql = ["SELECT #{options[:select]}", "FROM #{base.table_name} #{base_alias}", options[:joins]].compact
[:where, :group_by, :having, :order_by, :limit, :offset].each do |key|
if value = options[key]
keyword = key.to_s.upcase.gsub("_", " ")
sql << "#{keyword} #{value}"
end
end
sql.join "\n"
end
end
end
end
Improved determining wheteher to use WHERE or HAVING statements
module DirectiveRecord
module Query
class SQL
def initialize(base)
@base = base
end
def to_sql(*args)
options = extract_options(args)
validate_options! options
prepare_options! options
normalize_options! options
parse_joins! options
prepend_base_alias! options
finalize_options! options
compose_sql options
end
private
def path_delimiter
raise NotImplementedError
end
def aggregate_delimiter
raise NotImplementedError
end
def select_aggregate_sql(method, path)
raise NotImplementedError
end
def select_aggregate_sql_alias(method, path)
quote_alias("#{method}#{aggregate_delimiter}#{path}")
end
def group_by_all_sql
raise NotImplementedError
end
def base
@base
end
def base_alias
@base_alias ||= quote_alias(base.table_name.split("_").collect{|x| x[0]}.join(""))
end
def quote_alias(sql_alias)
sql_alias
end
def extract_options(args)
options = args.extract_options!.deep_dup
options.reverse_merge! :select => (args.empty? ? "*" : args)
options
end
def validate_options!(options)
options.assert_valid_keys :select, :where, :group_by, :order_by, :limit, :offset, :aggregates, :numerize_aliases
end
def prepare_options!(options); end
def normalize_options!(options)
normalize_select!(options)
normalize_where!(options)
normalize_group_by!(options)
normalize_order_by!(options)
options.reject!{|k, v| v.blank?}
end
def normalize_select!(options)
select = to_array! options, :select
select.uniq!
options[:scales] = select.inject({}) do |hash, sql|
if scale = column_for(sql).try(:scale)
hash[sql] = scale
end
hash
end
options[:aggregated] = {}
options[:aliases] = {}
options[:select] = options[:select].inject([]) do |array, path|
sql, sql_alias = ((path == ".*") ? "#{base_alias}.*" : path), nil
if aggregate_method = (options[:aggregates] || {})[path]
sql = select_aggregate_sql(aggregate_method, path)
sql_alias = options[:aggregated][path] = select_aggregate_sql_alias(aggregate_method, path)
end
if scale = options[:scales][path]
sql = "ROUND(#{sql}, #{scale})"
sql_alias ||= quote_alias(path)
end
if options[:numerize_aliases]
sql = sql.gsub(/ AS .*$/, "")
sql_alias = options[:aliases][prepend_base_alias(sql_alias || sql)] = "c#{array.size + 1}"
end
array << [sql, sql_alias].compact.join(" AS ")
array
end
end
def normalize_where!(options)
regexp = /^\S+/
where, having = (to_array!(options, :where) || []).partition do |statement|
!column_for(statement.strip.match(regexp).to_s).nil?
end
unless (attrs = base.scope_attributes).blank?
sql = base.send(:sanitize_sql_for_conditions, attrs, "").gsub(/``.`(\w+)`/) { $1 }
where << sql
end
options[:where], options[:having] = where, having.collect do |statement|
statement.strip.gsub(regexp){|path| options[:aggregated][path] || path}
end
[:where, :having].each do |key|
value = options[key]
options[key] = (value.collect{|x| "(#{x})"}.join(" AND ") unless value.empty?)
end
end
def normalize_group_by!(options)
group_by = to_array! options, :group_by
group_by.clear.push(group_by_all_sql) if group_by == [:all]
end
def normalize_order_by!(options)
options[:order_by] ||= (options[:group_by] || []).collect do |path|
direction = (path.to_s == "date") ? "DESC" : "ASC"
"#{path} #{direction}"
end
to_array!(options, :order_by).collect! do |x|
path, direction = x.split " "
scale = options[:scales][path]
select = begin
if aggregate_method = (options[:aggregates] || {})[path]
select_aggregate_sql(aggregate_method, path)
else
path
end
end
"#{scale ? "ROUND(#{select}, #{scale})" : select} #{direction.upcase if direction}"
end
end
def to_array!(options, key)
if value = options[key]
options[key] = [value].flatten
end
end
def column_for(path)
segments = path.split(".")
column = segments.pop
model = segments.inject(base) do |klass, association|
klass.reflect_on_association(association.to_sym).klass
end
model.columns_hash[column]
rescue
nil
end
def parse_joins!(options)
return if (paths = extract_paths(options)).empty?
regexp = /INNER JOIN `([^`]+)`( `[^`]+`)? ON `[^`]+`.`([^`]+)` = `[^`]+`.`([^`]+)`/
options[:joins] = paths.collect do |path|
joins, associations = [], []
path.split(".").inject(base) do |klass, association|
association = association.to_sym
table_joins = klass.joins(association).to_sql.scan regexp
concerns_bridge_table = table_joins.size == 2
bridge_table_as = nil
table_joins.each_with_index do |table_join, index|
concerns_bridge_table_join = concerns_bridge_table && index == 0
join_table, possible_alias, join_table_column, table_column = table_join
table_as = (klass == base) ? base_alias : quote_alias(associations.join(path_delimiter))
join_table_as = quote_alias((associations + [association]).join(path_delimiter))
if concerns_bridge_table
if concerns_bridge_table_join
join_table_as = bridge_table_as = quote_alias("#{(associations + [association]).join(path_delimiter)}_bridge_table")
else
table_as = bridge_table_as
end
end
joins.push "LEFT JOIN #{join_table} #{join_table_as} ON #{join_table_as}.#{join_table_column} = #{table_as}.#{table_column}"
end
associations << association
klass.reflect_on_association(association).klass
end
joins
end.flatten.uniq.join("\n")
end
def extract_paths(options)
options.inject([]) do |paths, (key, value)|
if [:select, :where, :group_by, :having].include?(key)
value = value.join " " if value.is_a?(Array)
paths.concat value.gsub(/((?<![\\])['"])((?:.(?!(?<![\\])\1))*.?)\1/, " ").scan(/[a-zA-Z_]+\.[a-zA-Z_\.]+/).collect{|x| x.split(".")[0..-2].join "."}
else
paths
end
end.uniq
end
def prepend_base_alias!(options)
[:select, :where, :having, :group_by, :order_by].each do |key|
if value = options[key]
options[key] = prepend_base_alias value, options[:aliases]
end
end
end
def prepend_base_alias(sql, aliases = {})
columns = base.columns_hash.keys
sql = sql.join ", " if sql.is_a?(Array)
sql.gsub(/("[^"]*"|'[^']*'|[a-zA-Z_#{aggregate_delimiter}]+(\.[a-zA-Z_\*]+)*)/) do
columns.include?($1) ? "#{base_alias}.#{$1}" : begin
if (string = $1).match /^([a-zA-Z_\.]+)\.([a-zA-Z_\*]+)$/
path, column = $1, $2
"#{quote_alias path.gsub(".", path_delimiter)}.#{column}"
else
string
end
end
end
end
def finalize_options!(options); end
def compose_sql(options)
sql = ["SELECT #{options[:select]}", "FROM #{base.table_name} #{base_alias}", options[:joins]].compact
[:where, :group_by, :having, :order_by, :limit, :offset].each do |key|
if value = options[key]
keyword = key.to_s.upcase.gsub("_", " ")
sql << "#{keyword} #{value}"
end
end
sql.join "\n"
end
end
end
end
|
module Docs
class ReactNative < React
self.name = 'React Native'
self.slug = 'react_native'
self.type = 'react'
self.release = '0.21'
self.base_url = 'https://facebook.github.io/react-native/docs/'
self.root_path = 'getting-started.html'
self.links = {
home: 'https://facebook.github.io/react-native/',
code: 'https://github.com/facebook/react-native'
}
options[:root_title] = 'React Native Documentation'
options[:only_patterns] = nil
options[:skip] = %w(
videos.html
transforms.html
troubleshooting.html)
options[:fix_urls] = ->(url) {
url.sub! 'docs/docs', 'docs'
url
}
options[:attribution] = <<-HTML
© 2016 Facebook Inc.<br>
Licensed under the Creative Commons Attribution 4.0 International Public License.
HTML
end
end
Update React Native documentation (0.22)
module Docs
class ReactNative < React
self.name = 'React Native'
self.slug = 'react_native'
self.type = 'react'
self.release = '0.22'
self.base_url = 'https://facebook.github.io/react-native/docs/'
self.root_path = 'getting-started.html'
self.links = {
home: 'https://facebook.github.io/react-native/',
code: 'https://github.com/facebook/react-native'
}
options[:root_title] = 'React Native Documentation'
options[:only_patterns] = nil
options[:skip] = %w(
videos.html
transforms.html
troubleshooting.html)
options[:fix_urls] = ->(url) {
url.sub! 'docs/docs', 'docs'
url
}
options[:attribution] = <<-HTML
© 2016 Facebook Inc.<br>
Licensed under the Creative Commons Attribution 4.0 International Public License.
HTML
end
end
|
class Reapr < Formula
desc "Evaluates accuracy of a genome assembly using mapped paired end reads"
homepage "http://www.sanger.ac.uk/science/tools/reapr"
# doi "10.1186/gb-2013-14-5-r47"
# tag "bioinformatics"
url "ftp://ftp.sanger.ac.uk/pub/resources/software/reapr/Reapr_1.0.18.tar.gz"
sha256 "6d691b5b49c58aef332e771d339e32097a7696e9c68bd8f16808b46d648b6660"
bottle do
cellar :any
sha256 "c2b05bab93220b81d44f8a6b61b4c59f9eba5eed183a4ad0f28fe1f46ff9f14a" => :el_capitan
sha256 "aa1812958b54fd5974f18f1ed7e084a6441b7111639574995b62762700c009e9" => :yosemite
sha256 "6d390b5a36e6cc7b56a6fd86b28e443f4dbf073f714bddc866d3781a024a46f6" => :mavericks
end
depends_on "bamtools"
depends_on "htslib"
depends_on "r" => [:recommended, :run] # only needed for the test
depends_on "samtools-0.1"
depends_on "smalt"
resource "manual" do
url "ftp://ftp.sanger.ac.uk/pub/resources/software/reapr/Reapr_1.0.18.manual.pdf"
sha256 "304b7b7b725abc285791d8be3b2aaf6f4afeb38852ce91fa5635dc0a9913a517"
end
resource "test_data" do
url "ftp://ftp.sanger.ac.uk/pub/resources/software/reapr/Reapr_1.0.18.test_data.tar.gz"
sha256 "6ef426e56c4854cdbb22d7012aca29d22b072de5e63f505be11229df76b12840"
end
resource "File::Spec::Link" do
url "http://search.cpan.org/CPAN/authors/id/R/RM/RMBARKER/File-Copy-Link-0.140.tar.gz"
sha256 "2063656dcd38bade43dc7f1e2ef5f1b6a8086c2f15d37b334189bd2a28e8ffeb"
end
def install
ENV.prepend_create_path "PERL5LIB", libexec+"lib/perl5"
resource("File::Spec::Link").stage do
system "perl", "Makefile.PL", "INSTALL_BASE=#{libexec}"
system "make"
system "make", "install"
end
if OS.mac?
inreplace "third_party/snpomatic/src/snpomatic.h",
"using namespace std ;",
"using namespace std ;\n#define ulong u_long"
end
system "make", "-C", "third_party/tabix"
system "make", "-C", "third_party/snpomatic"
system "make", "-C", "src",
"CFLAGS=-I#{Formula["bamtools"].opt_include}/bamtools"
doc.install %w[README changelog.txt licence.txt]
doc.install resource("manual")
(pkgshare/"test").install resource("test_data")
cd "src" do
libexec.install %w[
bam2fcdEstimate bam2fragCov bam2insert
bam2perfect fa2gaps fa2gc make_plots n50 scaff2contig
task_break task_fcdrate task_gapresize task_score task_stats
task_facheck.pl task_perfectfrombam.pl task_perfectmap.pl
task_pipeline.pl task_plots.pl task_preprocess.pl task_smaltmap.pl
task_summary.pl reapr.pl
]
end
bin.install_symlink libexec+"reapr.pl" => "reapr"
libexec.install_symlink Formula["htslib"].opt_bin => "tabix"
libexec.install_symlink Formula["smalt"].opt_bin/"smalt" => "smalt"
libexec.install_symlink Formula["samtools-0.1"].opt_bin/"samtools" => "samtools"
libexec.install "third_party/snpomatic/findknownsnps"
bin.env_script_all_files(libexec, :PERL5LIB => ENV["PERL5LIB"])
ln_s bin/"reapr", prefix/"reapr"
end
test do
cp_r Dir[pkgshare/"test/*"], testpath
system "./test.sh"
end
end
reapr: use vendored samtools-0.1 to deconflict CI
Closes #3657.
Signed-off-by: ilovezfs <fbd54dbbcf9e596abad4ccdc4dfc17f80ebeaee2@icloud.com>
class Reapr < Formula
desc "Evaluates accuracy of a genome assembly using mapped paired end reads"
homepage "http://www.sanger.ac.uk/science/tools/reapr"
# doi "10.1186/gb-2013-14-5-r47"
# tag "bioinformatics"
url "ftp://ftp.sanger.ac.uk/pub/resources/software/reapr/Reapr_1.0.18.tar.gz"
sha256 "6d691b5b49c58aef332e771d339e32097a7696e9c68bd8f16808b46d648b6660"
revision 1
bottle do
cellar :any
sha256 "c2b05bab93220b81d44f8a6b61b4c59f9eba5eed183a4ad0f28fe1f46ff9f14a" => :el_capitan
sha256 "aa1812958b54fd5974f18f1ed7e084a6441b7111639574995b62762700c009e9" => :yosemite
sha256 "6d390b5a36e6cc7b56a6fd86b28e443f4dbf073f714bddc866d3781a024a46f6" => :mavericks
end
depends_on "bamtools"
depends_on "htslib"
depends_on "r" => [:recommended, :run] # only needed for the test
depends_on "smalt"
resource "manual" do
url "ftp://ftp.sanger.ac.uk/pub/resources/software/reapr/Reapr_1.0.18.manual.pdf"
sha256 "304b7b7b725abc285791d8be3b2aaf6f4afeb38852ce91fa5635dc0a9913a517"
end
resource "test_data" do
url "ftp://ftp.sanger.ac.uk/pub/resources/software/reapr/Reapr_1.0.18.test_data.tar.gz"
sha256 "6ef426e56c4854cdbb22d7012aca29d22b072de5e63f505be11229df76b12840"
end
resource "File::Spec::Link" do
url "http://search.cpan.org/CPAN/authors/id/R/RM/RMBARKER/File-Copy-Link-0.140.tar.gz"
sha256 "2063656dcd38bade43dc7f1e2ef5f1b6a8086c2f15d37b334189bd2a28e8ffeb"
end
def install
ENV.prepend_create_path "PERL5LIB", libexec+"lib/perl5"
resource("File::Spec::Link").stage do
system "perl", "Makefile.PL", "INSTALL_BASE=#{libexec}"
system "make"
system "make", "install"
end
if OS.mac?
inreplace "third_party/snpomatic/src/snpomatic.h",
"using namespace std ;",
"using namespace std ;\n#define ulong u_long"
end
# use the vendored samtools-0.1 to avoid CI conflicts
system "make", "-C", "third_party/samtools"
system "make", "-C", "third_party/samtools", "razip"
system "make", "-C", "third_party/tabix"
system "make", "-C", "third_party/snpomatic"
system "make", "-C", "src",
"CFLAGS=-I#{Formula["bamtools"].opt_include}/bamtools"
doc.install %w[README changelog.txt licence.txt]
doc.install resource("manual")
(pkgshare/"test").install resource("test_data")
cd "src" do
libexec.install %w[
bam2fcdEstimate bam2fragCov bam2insert
bam2perfect fa2gaps fa2gc make_plots n50 scaff2contig
task_break task_fcdrate task_gapresize task_score task_stats
task_facheck.pl task_perfectfrombam.pl task_perfectmap.pl
task_pipeline.pl task_plots.pl task_preprocess.pl task_smaltmap.pl
task_summary.pl reapr.pl
]
end
bin.install_symlink libexec/"reapr.pl" => "reapr"
libexec.install_symlink Formula["htslib"].opt_bin => "tabix"
libexec.install_symlink Formula["smalt"].opt_bin/"smalt" => "smalt"
cd "third_party/samtools" do
libexec.install %w[samtools razip]
(libexec/"share/man/man1").install "samtools.1"
end
cd "third_party/samtools/bcftools" do
libexec.install %w[bcftools vcfutils.pl]
(libexec/"share/doc/bcftools").install "bcf.tex"
end
cd "third_party/samtools/misc" do
(libexec/"samtools-misc").install Dir["*.java"]
(libexec/"samtools-misc").install Dir["*.pl"]
(libexec/"samtools-misc").install Dir["*.py"]
(libexec/"samtools-misc").install %w[
maq2sam-long maq2sam-short md5sum-lite seqtk wgsim
]
end
libexec.install "third_party/snpomatic/findknownsnps"
bin.env_script_all_files(libexec, :PERL5LIB => ENV["PERL5LIB"])
ln_s bin/"reapr", prefix/"reapr"
end
test do
cp_r Dir[pkgshare/"test/*"], testpath
system "./test.sh"
end
end
|
class Recon < Formula
homepage "http://www.repeatmasker.org/RepeatModeler.html"
# doi "10.1101/gr.88502"
# tag "bioinformatics"
url "http://www.repeatmasker.org/RepeatModeler/RECON-1.08.tar.gz"
sha256 "699765fa49d18dbfac9f7a82ecd054464b468cb7521abe9c2bd8caccf08ee7d8"
bottle do
cellar :any
sha256 "b3e222db9633614433c6cabd5c2ee46c78f18194421cb4e8df0820608eb90d22" => :yosemite
sha256 "c3d875ca2a2715e03be0cb439d3848c6bc5fb19e80bec51ea2d296bbdcf03d27" => :mavericks
sha256 "5b1e6e98ae5a2b9dc18ca9d7a1de74db21fe1b7918498e12019e7a3e72ff12d1" => :mountain_lion
sha256 "d07b99e6c7f1ae0b4f6d461ce4386afb3e9ad6ef8436abc743c768943d532c5e" => :x86_64_linux
end
def install
inreplace "scripts/recon.pl", '$path = "";', "$path = \"#{bin}\";"
bin.mkdir
system *%W[make -C src]
system *%W[make -C src install BINDIR=#{bin} MANDIR=#{man}]
bin.install Dir["scripts/*"]
doc.install %W[00README COPYRIGHT INSTALL LICENSE]
end
test do
assert_match "usage", shell_output("#{bin}/recon.pl 2>&1", 255)
end
end
recon: update 1.08 bottle.
class Recon < Formula
homepage "http://www.repeatmasker.org/RepeatModeler.html"
# doi "10.1101/gr.88502"
# tag "bioinformatics"
url "http://www.repeatmasker.org/RepeatModeler/RECON-1.08.tar.gz"
sha256 "699765fa49d18dbfac9f7a82ecd054464b468cb7521abe9c2bd8caccf08ee7d8"
bottle do
cellar :any_skip_relocation
sha256 "cd1fe441ff386d786943598239ec0dd39a3806f156f469b30e4039314f42abab" => :high_sierra
sha256 "9d28e681fdc14d5f10906a20e55c04d93384b6d73975ab06157d42c2a578f2ac" => :sierra
sha256 "7a99aac95cb168f1fa13db23186fea687750d488cde5d869b98403d10b516248" => :el_capitan
end
def install
inreplace "scripts/recon.pl", '$path = "";', "$path = \"#{bin}\";"
bin.mkdir
system *%W[make -C src]
system *%W[make -C src install BINDIR=#{bin} MANDIR=#{man}]
bin.install Dir["scripts/*"]
doc.install %W[00README COPYRIGHT INSTALL LICENSE]
end
test do
assert_match "usage", shell_output("#{bin}/recon.pl 2>&1", 255)
end
end
|
#!/usr/bin/false
NAME = "emscripten"
VERSION = "1.37.27"
RELEASE = "1"
FILES = [
[
"https://github.com/kripken/emscripten/archive/#{VERSION}.tar.gz",
"a345032415362a0a66e4886ecd751f6394237ff764b1f1c40dde25410792991c",
"emscripten-#{VERSION}.tgz"
],
[
"https://github.com/kripken/emscripten-fastcomp/archive/#{VERSION}.tar.gz",
"409055d32dca9788b7ef15fbe81bd1df82a0ab91337f15be3254c11d5743043a",
"emscripten_fastcomp-#{VERSION}.tgz"
],
[
"https://github.com/kripken/emscripten-fastcomp-clang/archive/#{VERSION}.tar.gz",
"bd532912eab4e52bd83f603c7fb4d2fe770b99ede766e2b9a82f5f3f68f4a168",
"emscripten_fastcomp_clang-#{VERSION}.tgz"
]
]
INSTALL_CMAKE = true
Update build scripts for Emscripten 1.38.11
#!/usr/bin/false
NAME = "emscripten"
VERSION = "1.38.11"
RELEASE = "1"
FILES = [
[
"https://github.com/kripken/emscripten/archive/#{VERSION}.tar.gz",
"5521e8eefbee284b6a72797c7f63ce606d37647930cd8f4d48d45d02c4e1da95",
"emscripten-#{VERSION}.tgz"
],
[
"https://github.com/kripken/emscripten-fastcomp/archive/#{VERSION}.tar.gz",
"55ddc1b1f045a36ac34ab60bb0e1a0370a40249eba8d41cd4e427be95beead18",
"emscripten_fastcomp-#{VERSION}.tgz"
],
[
"https://github.com/kripken/emscripten-fastcomp-clang/archive/#{VERSION}.tar.gz",
"1d2ac9f8dab54f0f17e4a77c3cd4653fe9f890831ef6e405320850fd7351f795",
"emscripten_fastcomp_clang-#{VERSION}.tgz"
]
]
INSTALL_CMAKE = true
|
require 'fileutils'
# FIXME remove DummyKeyGenerator and this require in 4.1
require 'active_support/key_generator'
require 'rails/engine'
module Rails
# In Rails 3.0, a Rails::Application object was introduced which is nothing more than
# an Engine but with the responsibility of coordinating the whole boot process.
#
# == Initialization
#
# Rails::Application is responsible for executing all railties and engines
# initializers. It also executes some bootstrap initializers (check
# Rails::Application::Bootstrap) and finishing initializers, after all the others
# are executed (check Rails::Application::Finisher).
#
# == Configuration
#
# Besides providing the same configuration as Rails::Engine and Rails::Railtie,
# the application object has several specific configurations, for example
# "cache_classes", "consider_all_requests_local", "filter_parameters",
# "logger" and so forth.
#
# Check Rails::Application::Configuration to see them all.
#
# == Routes
#
# The application object is also responsible for holding the routes and reloading routes
# whenever the files change in development.
#
# == Middlewares
#
# The Application is also responsible for building the middleware stack.
#
# == Booting process
#
# The application is also responsible for setting up and executing the booting
# process. From the moment you require "config/application.rb" in your app,
# the booting process goes like this:
#
# 1) require "config/boot.rb" to setup load paths
# 2) require railties and engines
# 3) Define Rails.application as "class MyApp::Application < Rails::Application"
# 4) Run config.before_configuration callbacks
# 5) Load config/environments/ENV.rb
# 6) Run config.before_initialize callbacks
# 7) Run Railtie#initializer defined by railties, engines and application.
# One by one, each engine sets up its load paths, routes and runs its config/initializers/* files.
# 9) Custom Railtie#initializers added by railties, engines and applications are executed
# 10) Build the middleware stack and run to_prepare callbacks
# 11) Run config.before_eager_load and eager_load! if eager_load is true
# 12) Run config.after_initialize callbacks
#
class Application < Engine
autoload :Bootstrap, 'rails/application/bootstrap'
autoload :Configuration, 'rails/application/configuration'
autoload :Finisher, 'rails/application/finisher'
autoload :Railties, 'rails/engine/railties'
autoload :RoutesReloader, 'rails/application/routes_reloader'
class << self
def inherited(base)
raise "You cannot have more than one Rails::Application" if Rails.application
super
Rails.application = base.instance
Rails.application.add_lib_to_load_path!
ActiveSupport.run_load_hooks(:before_configuration, base.instance)
end
end
attr_accessor :assets, :sandbox
alias_method :sandbox?, :sandbox
attr_reader :reloaders
delegate :default_url_options, :default_url_options=, to: :routes
def initialize
super
@initialized = false
@reloaders = []
@routes_reloader = nil
@env_config = nil
@ordered_railties = nil
@railties = nil
end
# Returns true if the application is initialized.
def initialized?
@initialized
end
# Implements call according to the Rack API. It simply
# dispatches the request to the underlying middleware stack.
def call(env)
env["ORIGINAL_FULLPATH"] = build_original_fullpath(env)
super(env)
end
# Reload application routes regardless if they changed or not.
def reload_routes!
routes_reloader.reload!
end
# Return the application's KeyGenerator
def key_generator
# number of iterations selected based on consultation with the google security
# team. Details at https://github.com/rails/rails/pull/6952#issuecomment-7661220
@caching_key_generator ||= begin
if config.secret_key_base
key_generator = ActiveSupport::KeyGenerator.new(config.secret_key_base, iterations: 1000)
ActiveSupport::CachingKeyGenerator.new(key_generator)
else
ActiveSupport::DummyKeyGenerator.new(config.secret_token)
end
end
end
# Stores some of the Rails initial environment parameters which
# will be used by middlewares and engines to configure themselves.
# Currently stores:
#
# * "action_dispatch.parameter_filter" => config.filter_parameters
# * "action_dispatch.redirect_filter" => config.filter_redirect
# * "action_dispatch.secret_token" => config.secret_token,
# * "action_dispatch.show_exceptions" => config.action_dispatch.show_exceptions
# * "action_dispatch.show_detailed_exceptions" => config.consider_all_requests_local
# * "action_dispatch.logger" => Rails.logger
# * "action_dispatch.backtrace_cleaner" => Rails.backtrace_cleaner
# * "action_dispatch.key_generator" => key_generator
# * "action_dispatch.http_auth_salt" => config.action_dispatch.http_auth_salt
# * "action_dispatch.signed_cookie_salt" => config.action_dispatch.signed_cookie_salt
# * "action_dispatch.encrypted_cookie_salt" => config.action_dispatch.encrypted_cookie_salt
# * "action_dispatch.encrypted_signed_cookie_salt" => config.action_dispatch.encrypted_signed_cookie_salt
#
def env_config
@env_config ||= begin
if config.secret_key_base.nil?
ActiveSupport::Deprecation.warn "You didn't set config.secret_key_base in config/initializers/secret_token.rb file. " +
"This should be used instead of the old deprecated config.secret_token in order to use the new EncryptedCookieStore. " +
"To convert safely to the encrypted store (without losing existing cookies and sessions), see http://guides.rubyonrails.org/upgrading_ruby_on_rails.html#action-pack"
if config.secret_token.blank?
raise "You must set config.secret_key_base in your app's config"
end
end
super.merge({
"action_dispatch.parameter_filter" => config.filter_parameters,
"action_dispatch.redirect_filter" => config.filter_redirect,
"action_dispatch.secret_token" => config.secret_token,
"action_dispatch.show_exceptions" => config.action_dispatch.show_exceptions,
"action_dispatch.show_detailed_exceptions" => config.consider_all_requests_local,
"action_dispatch.logger" => Rails.logger,
"action_dispatch.backtrace_cleaner" => Rails.backtrace_cleaner,
"action_dispatch.key_generator" => key_generator,
"action_dispatch.http_auth_salt" => config.action_dispatch.http_auth_salt,
"action_dispatch.signed_cookie_salt" => config.action_dispatch.signed_cookie_salt,
"action_dispatch.encrypted_cookie_salt" => config.action_dispatch.encrypted_cookie_salt,
"action_dispatch.encrypted_signed_cookie_salt" => config.action_dispatch.encrypted_signed_cookie_salt
})
end
end
## Rails internal API
# This method is called just after an application inherits from Rails::Application,
# allowing the developer to load classes in lib and use them during application
# configuration.
#
# class MyApplication < Rails::Application
# require "my_backend" # in lib/my_backend
# config.i18n.backend = MyBackend
# end
#
# Notice this method takes into consideration the default root path. So if you
# are changing config.root inside your application definition or having a custom
# Rails application, you will need to add lib to $LOAD_PATH on your own in case
# you need to load files in lib/ during the application configuration as well.
def add_lib_to_load_path! #:nodoc:
path = File.join config.root, 'lib'
$LOAD_PATH.unshift(path) if File.exists?(path)
end
def require_environment! #:nodoc:
environment = paths["config/environment"].existent.first
require environment if environment
end
def routes_reloader #:nodoc:
@routes_reloader ||= RoutesReloader.new
end
# Returns an array of file paths appended with a hash of
# directories-extensions suitable for ActiveSupport::FileUpdateChecker
# API.
def watchable_args #:nodoc:
files, dirs = config.watchable_files.dup, config.watchable_dirs.dup
ActiveSupport::Dependencies.autoload_paths.each do |path|
dirs[path.to_s] = [:rb]
end
[files, dirs]
end
# Initialize the application passing the given group. By default, the
# group is :default but sprockets precompilation passes group equals
# to assets if initialize_on_precompile is false to avoid booting the
# whole app.
def initialize!(group=:default) #:nodoc:
raise "Application has been already initialized." if @initialized
run_initializers(group, self)
@initialized = true
self
end
def initializers #:nodoc:
Bootstrap.initializers_for(self) +
railties_initializers(super) +
Finisher.initializers_for(self)
end
def config #:nodoc:
@config ||= Application::Configuration.new(find_root_with_flag("config.ru", Dir.pwd))
end
def to_app #:nodoc:
self
end
def helpers_paths #:nodoc:
config.helpers_paths
end
protected
alias :build_middleware_stack :app
def run_tasks_blocks(app) #:nodoc:
railties.each { |r| r.run_tasks_blocks(app) }
super
require "rails/tasks"
config = self.config
task :environment do
config.eager_load = false
require_environment!
end
end
def run_generators_blocks(app) #:nodoc:
railties.each { |r| r.run_generators_blocks(app) }
super
end
def run_runner_blocks(app) #:nodoc:
railties.each { |r| r.run_runner_blocks(app) }
super
end
def run_console_blocks(app) #:nodoc:
railties.each { |r| r.run_console_blocks(app) }
super
end
# Returns the ordered railties for this application considering railties_order.
def ordered_railties #:nodoc:
@ordered_railties ||= begin
order = config.railties_order.map do |railtie|
if railtie == :main_app
self
elsif railtie.respond_to?(:instance)
railtie.instance
else
railtie
end
end
all = (railties - order)
all.push(self) unless (all + order).include?(self)
order.push(:all) unless order.include?(:all)
index = order.index(:all)
order[index] = all
order.reverse.flatten
end
end
def railties_initializers(current) #:nodoc:
initializers = []
ordered_railties.each do |r|
if r == self
initializers += current
else
initializers += r.initializers
end
end
initializers
end
def reload_dependencies? #:nodoc:
config.reload_classes_only_on_change != true || reloaders.map(&:updated?).any?
end
def default_middleware_stack #:nodoc:
ActionDispatch::MiddlewareStack.new.tap do |middleware|
app = self
if rack_cache = load_rack_cache
require "action_dispatch/http/rack_cache"
middleware.use ::Rack::Cache, rack_cache
end
if config.force_ssl
middleware.use ::ActionDispatch::SSL, config.ssl_options
end
if config.action_dispatch.x_sendfile_header.present?
middleware.use ::Rack::Sendfile, config.action_dispatch.x_sendfile_header
end
if config.serve_static_assets
middleware.use ::ActionDispatch::Static, paths["public"].first, config.static_cache_control
end
middleware.use ::Rack::Lock unless allow_concurrency?
middleware.use ::Rack::Runtime
middleware.use ::Rack::MethodOverride
middleware.use ::ActionDispatch::RequestId
# Must come after Rack::MethodOverride to properly log overridden methods
middleware.use ::Rails::Rack::Logger, config.log_tags
middleware.use ::ActionDispatch::ShowExceptions, show_exceptions_app
middleware.use ::ActionDispatch::DebugExceptions, app
middleware.use ::ActionDispatch::RemoteIp, config.action_dispatch.ip_spoofing_check, config.action_dispatch.trusted_proxies
unless config.cache_classes
middleware.use ::ActionDispatch::Reloader, lambda { app.reload_dependencies? }
end
middleware.use ::ActionDispatch::Callbacks
middleware.use ::ActionDispatch::Cookies
if config.session_store
if config.force_ssl && !config.session_options.key?(:secure)
config.session_options[:secure] = true
end
middleware.use config.session_store, config.session_options
middleware.use ::ActionDispatch::Flash
end
middleware.use ::ActionDispatch::ParamsParser
middleware.use ::Rack::Head
middleware.use ::Rack::ConditionalGet
middleware.use ::Rack::ETag, "no-cache"
end
end
def allow_concurrency?
if config.allow_concurrency.nil?
config.cache_classes
else
config.allow_concurrency
end
end
def load_rack_cache
rack_cache = config.action_dispatch.rack_cache
return unless rack_cache
begin
require 'rack/cache'
rescue LoadError => error
error.message << ' Be sure to add rack-cache to your Gemfile'
raise
end
if rack_cache == true
{
metastore: "rails:/",
entitystore: "rails:/",
verbose: false
}
else
rack_cache
end
end
def show_exceptions_app
config.exceptions_app || ActionDispatch::PublicExceptions.new(Rails.public_path)
end
def build_original_fullpath(env) #:nodoc:
path_info = env["PATH_INFO"]
query_string = env["QUERY_STRING"]
script_name = env["SCRIPT_NAME"]
if query_string.present?
"#{script_name}#{path_info}?#{query_string}"
else
"#{script_name}#{path_info}"
end
end
end
end
Renumbering the comments in the application boot process.
require 'fileutils'
# FIXME remove DummyKeyGenerator and this require in 4.1
require 'active_support/key_generator'
require 'rails/engine'
module Rails
# In Rails 3.0, a Rails::Application object was introduced which is nothing more than
# an Engine but with the responsibility of coordinating the whole boot process.
#
# == Initialization
#
# Rails::Application is responsible for executing all railties and engines
# initializers. It also executes some bootstrap initializers (check
# Rails::Application::Bootstrap) and finishing initializers, after all the others
# are executed (check Rails::Application::Finisher).
#
# == Configuration
#
# Besides providing the same configuration as Rails::Engine and Rails::Railtie,
# the application object has several specific configurations, for example
# "cache_classes", "consider_all_requests_local", "filter_parameters",
# "logger" and so forth.
#
# Check Rails::Application::Configuration to see them all.
#
# == Routes
#
# The application object is also responsible for holding the routes and reloading routes
# whenever the files change in development.
#
# == Middlewares
#
# The Application is also responsible for building the middleware stack.
#
# == Booting process
#
# The application is also responsible for setting up and executing the booting
# process. From the moment you require "config/application.rb" in your app,
# the booting process goes like this:
#
# 1) require "config/boot.rb" to setup load paths
# 2) require railties and engines
# 3) Define Rails.application as "class MyApp::Application < Rails::Application"
# 4) Run config.before_configuration callbacks
# 5) Load config/environments/ENV.rb
# 6) Run config.before_initialize callbacks
# 7) Run Railtie#initializer defined by railties, engines and application.
# One by one, each engine sets up its load paths, routes and runs its config/initializers/* files.
# 8) Custom Railtie#initializers added by railties, engines and applications are executed
# 9) Build the middleware stack and run to_prepare callbacks
# 10) Run config.before_eager_load and eager_load! if eager_load is true
# 11) Run config.after_initialize callbacks
#
class Application < Engine
autoload :Bootstrap, 'rails/application/bootstrap'
autoload :Configuration, 'rails/application/configuration'
autoload :Finisher, 'rails/application/finisher'
autoload :Railties, 'rails/engine/railties'
autoload :RoutesReloader, 'rails/application/routes_reloader'
class << self
def inherited(base)
raise "You cannot have more than one Rails::Application" if Rails.application
super
Rails.application = base.instance
Rails.application.add_lib_to_load_path!
ActiveSupport.run_load_hooks(:before_configuration, base.instance)
end
end
attr_accessor :assets, :sandbox
alias_method :sandbox?, :sandbox
attr_reader :reloaders
delegate :default_url_options, :default_url_options=, to: :routes
def initialize
super
@initialized = false
@reloaders = []
@routes_reloader = nil
@env_config = nil
@ordered_railties = nil
@railties = nil
end
# Returns true if the application is initialized.
def initialized?
@initialized
end
# Implements call according to the Rack API. It simply
# dispatches the request to the underlying middleware stack.
def call(env)
env["ORIGINAL_FULLPATH"] = build_original_fullpath(env)
super(env)
end
# Reload application routes regardless if they changed or not.
def reload_routes!
routes_reloader.reload!
end
# Return the application's KeyGenerator
def key_generator
# number of iterations selected based on consultation with the google security
# team. Details at https://github.com/rails/rails/pull/6952#issuecomment-7661220
@caching_key_generator ||= begin
if config.secret_key_base
key_generator = ActiveSupport::KeyGenerator.new(config.secret_key_base, iterations: 1000)
ActiveSupport::CachingKeyGenerator.new(key_generator)
else
ActiveSupport::DummyKeyGenerator.new(config.secret_token)
end
end
end
# Stores some of the Rails initial environment parameters which
# will be used by middlewares and engines to configure themselves.
# Currently stores:
#
# * "action_dispatch.parameter_filter" => config.filter_parameters
# * "action_dispatch.redirect_filter" => config.filter_redirect
# * "action_dispatch.secret_token" => config.secret_token,
# * "action_dispatch.show_exceptions" => config.action_dispatch.show_exceptions
# * "action_dispatch.show_detailed_exceptions" => config.consider_all_requests_local
# * "action_dispatch.logger" => Rails.logger
# * "action_dispatch.backtrace_cleaner" => Rails.backtrace_cleaner
# * "action_dispatch.key_generator" => key_generator
# * "action_dispatch.http_auth_salt" => config.action_dispatch.http_auth_salt
# * "action_dispatch.signed_cookie_salt" => config.action_dispatch.signed_cookie_salt
# * "action_dispatch.encrypted_cookie_salt" => config.action_dispatch.encrypted_cookie_salt
# * "action_dispatch.encrypted_signed_cookie_salt" => config.action_dispatch.encrypted_signed_cookie_salt
#
def env_config
@env_config ||= begin
if config.secret_key_base.nil?
ActiveSupport::Deprecation.warn "You didn't set config.secret_key_base in config/initializers/secret_token.rb file. " +
"This should be used instead of the old deprecated config.secret_token in order to use the new EncryptedCookieStore. " +
"To convert safely to the encrypted store (without losing existing cookies and sessions), see http://guides.rubyonrails.org/upgrading_ruby_on_rails.html#action-pack"
if config.secret_token.blank?
raise "You must set config.secret_key_base in your app's config"
end
end
super.merge({
"action_dispatch.parameter_filter" => config.filter_parameters,
"action_dispatch.redirect_filter" => config.filter_redirect,
"action_dispatch.secret_token" => config.secret_token,
"action_dispatch.show_exceptions" => config.action_dispatch.show_exceptions,
"action_dispatch.show_detailed_exceptions" => config.consider_all_requests_local,
"action_dispatch.logger" => Rails.logger,
"action_dispatch.backtrace_cleaner" => Rails.backtrace_cleaner,
"action_dispatch.key_generator" => key_generator,
"action_dispatch.http_auth_salt" => config.action_dispatch.http_auth_salt,
"action_dispatch.signed_cookie_salt" => config.action_dispatch.signed_cookie_salt,
"action_dispatch.encrypted_cookie_salt" => config.action_dispatch.encrypted_cookie_salt,
"action_dispatch.encrypted_signed_cookie_salt" => config.action_dispatch.encrypted_signed_cookie_salt
})
end
end
## Rails internal API
# This method is called just after an application inherits from Rails::Application,
# allowing the developer to load classes in lib and use them during application
# configuration.
#
# class MyApplication < Rails::Application
# require "my_backend" # in lib/my_backend
# config.i18n.backend = MyBackend
# end
#
# Notice this method takes into consideration the default root path. So if you
# are changing config.root inside your application definition or having a custom
# Rails application, you will need to add lib to $LOAD_PATH on your own in case
# you need to load files in lib/ during the application configuration as well.
def add_lib_to_load_path! #:nodoc:
path = File.join config.root, 'lib'
$LOAD_PATH.unshift(path) if File.exists?(path)
end
def require_environment! #:nodoc:
environment = paths["config/environment"].existent.first
require environment if environment
end
def routes_reloader #:nodoc:
@routes_reloader ||= RoutesReloader.new
end
# Returns an array of file paths appended with a hash of
# directories-extensions suitable for ActiveSupport::FileUpdateChecker
# API.
def watchable_args #:nodoc:
files, dirs = config.watchable_files.dup, config.watchable_dirs.dup
ActiveSupport::Dependencies.autoload_paths.each do |path|
dirs[path.to_s] = [:rb]
end
[files, dirs]
end
# Initialize the application passing the given group. By default, the
# group is :default but sprockets precompilation passes group equals
# to assets if initialize_on_precompile is false to avoid booting the
# whole app.
def initialize!(group=:default) #:nodoc:
raise "Application has been already initialized." if @initialized
run_initializers(group, self)
@initialized = true
self
end
def initializers #:nodoc:
Bootstrap.initializers_for(self) +
railties_initializers(super) +
Finisher.initializers_for(self)
end
def config #:nodoc:
@config ||= Application::Configuration.new(find_root_with_flag("config.ru", Dir.pwd))
end
def to_app #:nodoc:
self
end
def helpers_paths #:nodoc:
config.helpers_paths
end
protected
alias :build_middleware_stack :app
def run_tasks_blocks(app) #:nodoc:
railties.each { |r| r.run_tasks_blocks(app) }
super
require "rails/tasks"
config = self.config
task :environment do
config.eager_load = false
require_environment!
end
end
def run_generators_blocks(app) #:nodoc:
railties.each { |r| r.run_generators_blocks(app) }
super
end
def run_runner_blocks(app) #:nodoc:
railties.each { |r| r.run_runner_blocks(app) }
super
end
def run_console_blocks(app) #:nodoc:
railties.each { |r| r.run_console_blocks(app) }
super
end
# Returns the ordered railties for this application considering railties_order.
def ordered_railties #:nodoc:
@ordered_railties ||= begin
order = config.railties_order.map do |railtie|
if railtie == :main_app
self
elsif railtie.respond_to?(:instance)
railtie.instance
else
railtie
end
end
all = (railties - order)
all.push(self) unless (all + order).include?(self)
order.push(:all) unless order.include?(:all)
index = order.index(:all)
order[index] = all
order.reverse.flatten
end
end
def railties_initializers(current) #:nodoc:
initializers = []
ordered_railties.each do |r|
if r == self
initializers += current
else
initializers += r.initializers
end
end
initializers
end
def reload_dependencies? #:nodoc:
config.reload_classes_only_on_change != true || reloaders.map(&:updated?).any?
end
def default_middleware_stack #:nodoc:
ActionDispatch::MiddlewareStack.new.tap do |middleware|
app = self
if rack_cache = load_rack_cache
require "action_dispatch/http/rack_cache"
middleware.use ::Rack::Cache, rack_cache
end
if config.force_ssl
middleware.use ::ActionDispatch::SSL, config.ssl_options
end
if config.action_dispatch.x_sendfile_header.present?
middleware.use ::Rack::Sendfile, config.action_dispatch.x_sendfile_header
end
if config.serve_static_assets
middleware.use ::ActionDispatch::Static, paths["public"].first, config.static_cache_control
end
middleware.use ::Rack::Lock unless allow_concurrency?
middleware.use ::Rack::Runtime
middleware.use ::Rack::MethodOverride
middleware.use ::ActionDispatch::RequestId
# Must come after Rack::MethodOverride to properly log overridden methods
middleware.use ::Rails::Rack::Logger, config.log_tags
middleware.use ::ActionDispatch::ShowExceptions, show_exceptions_app
middleware.use ::ActionDispatch::DebugExceptions, app
middleware.use ::ActionDispatch::RemoteIp, config.action_dispatch.ip_spoofing_check, config.action_dispatch.trusted_proxies
unless config.cache_classes
middleware.use ::ActionDispatch::Reloader, lambda { app.reload_dependencies? }
end
middleware.use ::ActionDispatch::Callbacks
middleware.use ::ActionDispatch::Cookies
if config.session_store
if config.force_ssl && !config.session_options.key?(:secure)
config.session_options[:secure] = true
end
middleware.use config.session_store, config.session_options
middleware.use ::ActionDispatch::Flash
end
middleware.use ::ActionDispatch::ParamsParser
middleware.use ::Rack::Head
middleware.use ::Rack::ConditionalGet
middleware.use ::Rack::ETag, "no-cache"
end
end
def allow_concurrency?
if config.allow_concurrency.nil?
config.cache_classes
else
config.allow_concurrency
end
end
def load_rack_cache
rack_cache = config.action_dispatch.rack_cache
return unless rack_cache
begin
require 'rack/cache'
rescue LoadError => error
error.message << ' Be sure to add rack-cache to your Gemfile'
raise
end
if rack_cache == true
{
metastore: "rails:/",
entitystore: "rails:/",
verbose: false
}
else
rack_cache
end
end
def show_exceptions_app
config.exceptions_app || ActionDispatch::PublicExceptions.new(Rails.public_path)
end
def build_original_fullpath(env) #:nodoc:
path_info = env["PATH_INFO"]
query_string = env["QUERY_STRING"]
script_name = env["SCRIPT_NAME"]
if query_string.present?
"#{script_name}#{path_info}?#{query_string}"
else
"#{script_name}#{path_info}"
end
end
end
end
|
require 'sinatra'
require 'sinatra/reloader' if development?
require 'sinatra/config_file'
require 'erubis'
require 'json'
require 'json/jwt'
require 'securerandom'
require 'rack-flash'
require 'redis-rack'
require 'mail'
require 'rdiscount'
require 'json'
require 'uri'
require_relative 'rcmcsession'
require_relative 'models/rapid_connect_service'
require_relative 'models/claims_set'
require_relative 'models/attributes_claim'
# The RapidConnect application
class RapidConnect < Sinatra::Base
configure :production, :development do
# :nocov: Doesn't run in test environment
use RapidConnectMemcacheSession, memcache_session_expiry: 3600, secure: Sinatra::Base.production?
# :nocov:
end
configure :test do
use Rack::Session::Pool, expire_in: 3600
end
use Rack::UTF8Sanitizer
use Rack::MethodOverride
use Rack::Flash, sweep: true
configure :development do
# :nocov: Doesn't run in test environment
register Sinatra::Reloader
# :nocov:
end
configure :production, :development do
# :nocov: Doesn't run in test environment
enable :logging
register Sinatra::ConfigFile
set :app_root, File.expand_path(File.join(File.dirname(__FILE__), '..'))
config_file File.join(settings.app_root, 'config', 'app_config.yml')
set :app_logfile, File.join(settings.app_root, 'logs', 'app.log')
set :audit_logfile, File.join(settings.app_root, 'logs', 'audit.log')
set :erb, escape_html: true
mail_settings = settings.mail
Mail.defaults do
delivery_method :smtp,
address: 'localhost',
port: '25',
user_name: mail_settings[:user_name],
password: mail_settings[:password],
authentication: :plain,
enable_starttls_auto: true
end
unless settings.respond_to? :hostname
set :hostname, ::URI.parse(settings.issuer).hostname
end
# :nocov:
end
attr_reader :current_version
AUTHORIZE_REGEX = /^AAF-RAPID-EXPORT service="([^"]+)", key="([^"]*)?"$/
def initialize
super
check_reopen
@current_version = '1.4.2-tuakiri1'
end
def check_reopen
return if @pid == Process.pid
@redis = Redis.new
@app_logger = Logger.new(settings.app_logfile)
@app_logger.level = Logger::DEBUG
@app_logger.formatter = Logger::Formatter.new
@audit_logger = Logger.new(settings.audit_logfile)
@audit_logger.level = Logger::INFO
@pid = Process.pid
end
def call(env)
check_reopen
super(env)
end
##
# Marketing Site
##
get '/' do
erb :welcome, layout: nil
end
## Status for load balancer
get '/status' do
if settings.status_disabled_file && File.exists?(settings.status_disabled_file)
404
end
## else return a blank 200 page
end
before %r{\A/(login|jwt)/.+}.freeze do
cache_control :no_cache
end
###
# Session Management
###
get '/login/:id' do |id|
shibboleth_login_url = "/Shibboleth.sso/Login?target=/login/shibboleth/#{id}"
if params[:entityID]
shibboleth_login_url = "#{shibboleth_login_url}&entityID=#{params[:entityID]}"
end
redirect shibboleth_login_url
end
get '/login/shibboleth/:id' do |id|
# Process Shibboleth provided login details
if env['HTTP_SHIB_SESSION_ID'] && !env['HTTP_SHIB_SESSION_ID'].empty?
targets = session[:target] || {}
target = targets[id.to_s]
if target
session[:target].delete id.to_s
# As we support more attributes in the future the subject should be extended to hold all of them
subject = {
principal: env['HTTP_PERSISTENT_ID'],
cn: env['HTTP_CN'],
display_name: env['HTTP_DISPLAYNAME'],
given_name: env['HTTP_GIVENNAME'],
surname: env['HTTP_SN'],
mail: env['HTTP_MAIL'],
principal_name: env['HTTP_EPPN'],
scoped_affiliation: env['HTTP_AFFILIATION'],
o: env['HTTP_O'],
shared_token: env['HTTP_AUEDUPERSONSHAREDTOKEN']
}
session[:subject] = subject
if valid_subject?(subject)
@app_logger.info "Established session for #{subject[:cn]}(#{subject[:principal]})"
redirect target
else
session.clear
session[:invalid_target] = target
session[:invalid_subject] = subject
redirect '/invalidsession'
end
else
session.clear
redirect '/serviceunknown'
end
else
403
end
end
get '/logout' do
if session[:subject]
@app_logger.info "Terminated session for #{session[:subject][:cn]}(#{session[:subject][:principal]})"
end
session.clear
if params[:return]
target = params[:return]
else
target = '/'
end
redirect target
end
get '/serviceunknown' do
erb :serviceunknown
end
get '/invalidsession' do
erb :invalidsession
end
def valid_subject?(subject)
subject[:principal].present? &&
subject[:cn].present? &&
subject[:mail].present? &&
subject[:display_name].present?
end
###
# Service Registration
###
before '/registration*' do
authenticated?
end
get '/registration' do
@organisations = load_organisations
erb :'registration/index'
end
def load_service(identifier)
json = @redis.hget('serviceproviders', identifier)
return nil if json.nil?
RapidConnectService.new.from_json(json).tap do |service|
service.identifier = identifier
end
end
def load_all_services
@redis.hgetall('serviceproviders').sort.reduce({}) do |hash, (id, json)|
service = RapidConnectService.new.from_json(json).tap do |s|
s.identifier = id
end
hash.merge(id => service)
end
end
def service_attrs
%i(organisation name audience endpoint secret).reduce({}) do |map, sym|
map.merge(sym => params[sym])
end
end
def registrant_attrs
subject = session[:subject]
{ registrant_name: subject[:cn], registrant_mail: subject[:mail] }
end
def admin_supplied_attrs
base = { enabled: !params[:enabled].nil? }
%i(type registrant_name registrant_mail).reduce(base) do |map, sym|
map.merge(sym => params[sym])
end
end
post '/registration/save' do
service = RapidConnectService.new
service.attributes = service_attrs.merge(registrant_attrs)
if service.valid?
identifier = service.identifier!
if @redis.hexists('serviceproviders', identifier)
@organisations = load_organisations
flash[:error] = 'Invalid identifier generated. Please re-submit registration.'
erb :'registration/index'
else
service.enabled = settings.auto_approve_in_test && (settings.federation == 'test')
service.created_at = Time.now.utc.to_i
@redis.hset('serviceproviders', identifier, service.to_json)
send_registration_email(service)
if service.enabled
session[:registration_identifier] = identifier
end
@app_logger.info "New service #{service}, endpoint: #{service.endpoint}, contact email: #{service.registrant_mail}, organisation: #{service.organisation}"
redirect to('/registration/complete')
end
else
@organisations = load_organisations
flash[:error] = "Invalid data supplied: #{service.errors.full_messages.join(', ')}"
erb :'registration/index'
end
end
get '/registration/complete' do
@identifier = nil
@approved = settings.auto_approve_in_test && settings.federation == 'test'
if @approved
@identifier = session[:registration_identifier]
end
erb :'registration/complete'
end
###
# Administration
###
before '/administration*' do
authenticated?
administrator?
end
get '/administration' do
erb :'administration/index'
end
# Administration - Services
get '/administration/services' do
@services = load_all_services
erb :'administration/services/list'
end
get '/administration/services/:identifier' do |identifier|
@identifier = identifier
@service = load_service(identifier)
halt 404 if @service.nil?
erb :'administration/services/show'
end
get '/administration/services/edit/:identifier' do |identifier|
@identifier = identifier
@service = load_service(identifier)
halt 404 if @service.nil?
@organisations = load_organisations
erb :'administration/services/edit'
end
put '/administration/services/update' do
identifier = params[:identifier]
service = load_service(identifier)
if service.nil?
flash[:error] = 'Invalid data supplied'
halt redirect to('/administration/services')
end
service.attributes = service_attrs.merge(admin_supplied_attrs)
if service.valid?
@redis.hset('serviceproviders', identifier, service.to_json)
@app_logger.info "Service #{identifier} updated by #{session[:subject][:principal]} #{session[:subject][:cn]}"
redirect to('/administration/services/' + identifier)
else
flash[:error] = 'Invalid data supplied'
redirect to('/administration/services')
end
end
patch '/administration/services/toggle/:identifier' do |identifier|
service = load_service(identifier)
halt 404 if service.nil?
service.enabled = !service.enabled
@redis.hset('serviceproviders', identifier, service.to_json)
@app_logger.info "Service #{identifier} toggled by #{session[:subject][:principal]} #{session[:subject][:cn]}"
flash[:success] = 'Service modified successfully'
redirect to('/administration/services/' + identifier)
end
delete '/administration/services/delete/:identifier' do |identifier|
service = load_service(identifier)
halt 404 if service.nil?
@redis.hdel('serviceproviders', identifier)
@app_logger.info "Service #{identifier} deleted by #{session[:subject][:principal]} #{session[:subject][:cn]}"
flash[:success] = 'Service deleted successfully'
redirect '/administration/services'
end
# Administration - Administrators
get '/administration/administrators' do
administrators_raw = @redis.hgetall('administrators')
@administrators = administrators_raw.reduce({}) { |map, (k, v)| map.merge(k => JSON.parse(v)) }
erb :'administration/administrators/list'
end
get '/administration/administrators/create' do
erb :'administration/administrators/create'
end
post '/administration/administrators/save' do
identifier = params[:identifier]
if identifier.nil? || identifier.empty?
flash[:error] = 'Invalid form data'
erb :'administration/administrators/create'
else
if @redis.hexists('administrators', identifier)
flash[:error] = 'Administrator already exists'
redirect '/administration/administrators'
else
name = params[:name]
mail = params[:mail]
if name && !name.empty? && mail && !mail.empty?
@redis.hset('administrators', identifier, { 'name' => name, 'mail' => mail }.to_json)
@app_logger.info "Current administrator #{session[:subject][:principal]} #{session[:subject][:cn]} added new administrator #{name}, #{mail}"
flash[:success] = 'Administrator added'
redirect '/administration/administrators'
else
flash[:error] = 'Invalid form data'
erb :'administration/administrators/create'
end
end
end
end
delete '/administration/administrators/delete' do
identifier = params[:identifier]
if identifier.nil? || identifier.empty?
flash[:error] = 'Invalid form data'
else
if identifier == session[:subject][:principal]
flash[:error] = 'Removing your own access is not supported'
else
if @redis.hexists('administrators', identifier)
@redis.hdel('administrators', identifier)
@app_logger.info "Current administrator #{session[:subject][:principal]} #{session[:subject][:cn]} deleted administrator #{identifier}"
flash[:success] = 'Administrator deleted successfully'
else
flash[:error] = 'No such administrator'
end
end
end
redirect '/administration/administrators'
end
###
# JWT
###
before '/jwt/*' do
authenticated?
end
def binding(*parts)
['urn:mace:aaf.edu.au:rapid.aaf.edu.au', *parts].join(':')
end
# To enable raptor and other tools to report on rapid like we would any other
# IdP we create a shibboleth styled audit.log file for each service access.
# Fields are on a single line, separated by pipes:
#
# auditEventTime|requestBinding|requestId|relyingPartyId|messageProfileId|
# assertingPartyId|responseBinding|responseId|principalName|authNMethod|
# releasedAttributeId1,releasedAttributeId2,|nameIdentifier|
# assertion1ID,assertion2ID,|
def audit_log(service, subject, claims, attrs)
fields = [
Time.now.utc.strftime('%Y%m%dT%H%M%SZ'), binding(service.type, 'get'),
service.identifier, claims[:aud], binding('jwt', service.type, 'sso'),
claims[:iss], binding('jwt', service.type, 'post'), claims[:jti],
subject[:principal], 'urn:oasis:names:tc:SAML:2.0:ac:classes:XMLDSig',
attrs.sort.join(','), '', '', ''
]
@audit_logger.info(fields.join('|'))
end
before '/jwt/authnrequest/:type/:identifier' do |type, identifier|
@service = load_service(identifier)
if @service.nil? || @service.type != type
halt 404, 'There is no such endpoint defined please validate the request.'
end
unless @service.enabled
halt 403, "The service \"#{@service.name}\" is unable to process requests at this time."
end
iss = settings.issuer
aud = @service.audience
claim = AttributesClaim.new(iss, aud, session[:subject])
@app_logger.info("Retargeted principal #{session[:subject][:principal]} " \
"for #{aud} as #{claim.attributes[:edupersontargetedid]}")
@claims_set = ClaimsSet.send(type, iss, aud, claim)
@jws = @claims_set.to_jws(@service.secret)
@endpoint = @service.endpoint
@app_logger.info "Provided details for #{session[:subject][:cn]}(#{session[:subject][:mail]}) to service #{@service.name} (#{@service.endpoint})"
@app_logger.debug @claims_set.claims
end
get '/jwt/authnrequest/research/:identifier' do
attrs = @claims_set.claims[:'https://aaf.edu.au/attributes']
audit_log(@service, session['subject'], @claims_set.claims, attrs.keys)
erb :post, layout: :post
end
get '/jwt/authnrequest/auresearch/:identifier' do
attrs = @claims_set.claims[:'https://aaf.edu.au/attributes']
audit_log(@service, session['subject'], @claims_set.claims, attrs.keys)
erb :post, layout: :post
end
get '/jwt/authnrequest/zendesk/:identifier' do
attrs = %w(cn mail edupersontargetedid o)
audit_log(@service, session['subject'], @claims_set.claims, attrs)
redirect "#{@endpoint}?jwt=#{@jws}&return_to=#{params[:return_to]}"
end
get '/developers' do
erb :developers, locals: { text: markdown(:'documentation/developers') }
end
def flash_types
[:success, :warning, :error]
end
def authenticated?
return if session[:subject]
id = SecureRandom.urlsafe_base64(24, false)
session[:target] ||= {}
session[:target][id] = request.url
login_url = "/login/#{id}"
if params[:entityID]
login_url = "#{login_url}?entityID=#{params[:entityID]}"
end
redirect login_url
end
def administrator?
return if @redis.hexists('administrators', session[:subject][:principal])
@app_logger.warn "Denied access to administrative area to #{session[:subject][:principal]} #{session[:subject][:cn]}"
status 403
halt erb :'administration/administrators/denied'
end
##
# New Service Registration Notification
##
def send_registration_email(service)
mail_settings = settings.mail
settings_hostname = settings.hostname
service_url_research = "https://#{settings.hostname}/jwt/authnrequest/research/#{service.identifier}"
service_url_zendesk = "https://#{settings.hostname}/jwt/authnrequest/zendesk/#{service.identifier}"
if service.enabled
admin_action = "There is a new registration within AAF Rapid Connect that has been automatically approved - but we are letting you know anyway."
else
admin_action = "There is a new registration within AAF Rapid Connect that needs to be enabled."
end
Mail.deliver do
from mail_settings[:from]
to mail_settings[:to]
subject 'New service registration for AAF Rapid Connect'
html_part do
content_type 'text/html; charset=UTF-8'
body "
#{admin_action}
<br><br>
<strong>Details</strong>
<br>
<ul>
<li>Service Name: #{service.name}</li>
<li>Endpoint: #{service.endpoint}</li>
<li>Creator: #{service.registrant_name} (#{service.registrant_mail})</li>
</ul>
<br><br>
Please ensure <strong>all endpoints utilise HTTPS</strong> before enabling.
<br><br>
For more information and to enable this service please view the <a href='https://#{settings_hostname}/administration/services/#{service.identifier}'>full service record</a> in AAF Rapid Connect.
<br><br>
After reviewing and approving the service, please notify the user. We suggest the following template:
<br><hr><br>
To: \"#{service.registrant_name}\" <#{service.registrant_mail}><br>
Subject: service registration on #{settings_hostname}<br>
<br>
Dear #{service.registrant_name}<br>
<br>
Your service #{service.name} has been accepted into the AAF Rapid Connect at #{settings_hostname}<br>
<br>
You can now configure your service to use this login URL :<br>
<a href=\"#{service_url_research}\">#{service_url_research}</a><br>
<br>
Or, alternatively, if your service is a Zendesk instance, please use the following URL instead:<br>
<a href=\"#{service_url_zendesk}\">#{service_url_zendesk}</a><br>
<br>
Please contact AAF support at support@aaf.edu.au if you have any questions or need any assistance with connecting your service to AAF RapidConnect.
"
end
end
end
##
# Export Data
##
before '/export*' do
api_authenticated?
end
get '/export/service/:identifier' do |identifier|
content_type :json
service = load_service(identifier)
halt 404 if service.nil?
{ service: service_as_json(identifier, service) }.to_json
end
get '/export/services' do
content_type :json
services = load_all_services.sort.map do |(id, service)|
service_as_json(id, service)
end
{ services: services }.to_json
end
get '/export/basic' do
content_type :json
services = load_all_services.map do |(id, service)|
service_as_json(id, service).tap do |s|
s[:rapidconnect].delete(:secret)
end
end
{ services: services }.to_json
end
def service_as_json(id, service)
{ id: id,
name: service.name,
created_at: Time.at(service.created_at).utc.xmlschema,
contact: {
name: service.registrant_name,
email: service.registrant_mail,
type: 'technical'
},
rapidconnect: {
audience: service.audience,
callback: service.endpoint,
secret: service.secret,
endpoints: {
scholarly: "https://#{settings.hostname}/jwt/authnrequest/research/#{id}"
}
},
enabled: service.enabled,
organization: service.organisation }
end
def api_authenticated?
if settings.export[:enabled]
authorization = request.env['HTTP_AUTHORIZATION']
unless authorization && authorization =~ AUTHORIZE_REGEX
halt 403, 'Invalid authorization token'
end
service, secret = authorization.match(AUTHORIZE_REGEX).captures
unless secret == settings.export[:secret]
halt 403, 'Invalid authorization header'
end
@app_logger.info "Established API session for service #{service}"
else
halt 404
end
end
##
# Organisation names via FR
##
def load_organisations
JSON.parse(IO.read(settings.organisations)).sort_by(&:downcase)
end
end
Logging: revert app_logger.level back to INFO
from DEBUG accidentally introduced in ed5b3c4 as it now breaks tests.
require 'sinatra'
require 'sinatra/reloader' if development?
require 'sinatra/config_file'
require 'erubis'
require 'json'
require 'json/jwt'
require 'securerandom'
require 'rack-flash'
require 'redis-rack'
require 'mail'
require 'rdiscount'
require 'json'
require 'uri'
require_relative 'rcmcsession'
require_relative 'models/rapid_connect_service'
require_relative 'models/claims_set'
require_relative 'models/attributes_claim'
# The RapidConnect application
class RapidConnect < Sinatra::Base
configure :production, :development do
# :nocov: Doesn't run in test environment
use RapidConnectMemcacheSession, memcache_session_expiry: 3600, secure: Sinatra::Base.production?
# :nocov:
end
configure :test do
use Rack::Session::Pool, expire_in: 3600
end
use Rack::UTF8Sanitizer
use Rack::MethodOverride
use Rack::Flash, sweep: true
configure :development do
# :nocov: Doesn't run in test environment
register Sinatra::Reloader
# :nocov:
end
configure :production, :development do
# :nocov: Doesn't run in test environment
enable :logging
register Sinatra::ConfigFile
set :app_root, File.expand_path(File.join(File.dirname(__FILE__), '..'))
config_file File.join(settings.app_root, 'config', 'app_config.yml')
set :app_logfile, File.join(settings.app_root, 'logs', 'app.log')
set :audit_logfile, File.join(settings.app_root, 'logs', 'audit.log')
set :erb, escape_html: true
mail_settings = settings.mail
Mail.defaults do
delivery_method :smtp,
address: 'localhost',
port: '25',
user_name: mail_settings[:user_name],
password: mail_settings[:password],
authentication: :plain,
enable_starttls_auto: true
end
unless settings.respond_to? :hostname
set :hostname, ::URI.parse(settings.issuer).hostname
end
# :nocov:
end
attr_reader :current_version
AUTHORIZE_REGEX = /^AAF-RAPID-EXPORT service="([^"]+)", key="([^"]*)?"$/
def initialize
super
check_reopen
@current_version = '1.4.2-tuakiri1'
end
def check_reopen
return if @pid == Process.pid
@redis = Redis.new
@app_logger = Logger.new(settings.app_logfile)
@app_logger.level = Logger::INFO
@app_logger.formatter = Logger::Formatter.new
@audit_logger = Logger.new(settings.audit_logfile)
@audit_logger.level = Logger::INFO
@pid = Process.pid
end
def call(env)
check_reopen
super(env)
end
##
# Marketing Site
##
get '/' do
erb :welcome, layout: nil
end
## Status for load balancer
get '/status' do
if settings.status_disabled_file && File.exists?(settings.status_disabled_file)
404
end
## else return a blank 200 page
end
before %r{\A/(login|jwt)/.+}.freeze do
cache_control :no_cache
end
###
# Session Management
###
get '/login/:id' do |id|
shibboleth_login_url = "/Shibboleth.sso/Login?target=/login/shibboleth/#{id}"
if params[:entityID]
shibboleth_login_url = "#{shibboleth_login_url}&entityID=#{params[:entityID]}"
end
redirect shibboleth_login_url
end
get '/login/shibboleth/:id' do |id|
# Process Shibboleth provided login details
if env['HTTP_SHIB_SESSION_ID'] && !env['HTTP_SHIB_SESSION_ID'].empty?
targets = session[:target] || {}
target = targets[id.to_s]
if target
session[:target].delete id.to_s
# As we support more attributes in the future the subject should be extended to hold all of them
subject = {
principal: env['HTTP_PERSISTENT_ID'],
cn: env['HTTP_CN'],
display_name: env['HTTP_DISPLAYNAME'],
given_name: env['HTTP_GIVENNAME'],
surname: env['HTTP_SN'],
mail: env['HTTP_MAIL'],
principal_name: env['HTTP_EPPN'],
scoped_affiliation: env['HTTP_AFFILIATION'],
o: env['HTTP_O'],
shared_token: env['HTTP_AUEDUPERSONSHAREDTOKEN']
}
session[:subject] = subject
if valid_subject?(subject)
@app_logger.info "Established session for #{subject[:cn]}(#{subject[:principal]})"
redirect target
else
session.clear
session[:invalid_target] = target
session[:invalid_subject] = subject
redirect '/invalidsession'
end
else
session.clear
redirect '/serviceunknown'
end
else
403
end
end
get '/logout' do
if session[:subject]
@app_logger.info "Terminated session for #{session[:subject][:cn]}(#{session[:subject][:principal]})"
end
session.clear
if params[:return]
target = params[:return]
else
target = '/'
end
redirect target
end
get '/serviceunknown' do
erb :serviceunknown
end
get '/invalidsession' do
erb :invalidsession
end
def valid_subject?(subject)
subject[:principal].present? &&
subject[:cn].present? &&
subject[:mail].present? &&
subject[:display_name].present?
end
###
# Service Registration
###
before '/registration*' do
authenticated?
end
get '/registration' do
@organisations = load_organisations
erb :'registration/index'
end
def load_service(identifier)
json = @redis.hget('serviceproviders', identifier)
return nil if json.nil?
RapidConnectService.new.from_json(json).tap do |service|
service.identifier = identifier
end
end
def load_all_services
@redis.hgetall('serviceproviders').sort.reduce({}) do |hash, (id, json)|
service = RapidConnectService.new.from_json(json).tap do |s|
s.identifier = id
end
hash.merge(id => service)
end
end
def service_attrs
%i(organisation name audience endpoint secret).reduce({}) do |map, sym|
map.merge(sym => params[sym])
end
end
def registrant_attrs
subject = session[:subject]
{ registrant_name: subject[:cn], registrant_mail: subject[:mail] }
end
def admin_supplied_attrs
base = { enabled: !params[:enabled].nil? }
%i(type registrant_name registrant_mail).reduce(base) do |map, sym|
map.merge(sym => params[sym])
end
end
post '/registration/save' do
service = RapidConnectService.new
service.attributes = service_attrs.merge(registrant_attrs)
if service.valid?
identifier = service.identifier!
if @redis.hexists('serviceproviders', identifier)
@organisations = load_organisations
flash[:error] = 'Invalid identifier generated. Please re-submit registration.'
erb :'registration/index'
else
service.enabled = settings.auto_approve_in_test && (settings.federation == 'test')
service.created_at = Time.now.utc.to_i
@redis.hset('serviceproviders', identifier, service.to_json)
send_registration_email(service)
if service.enabled
session[:registration_identifier] = identifier
end
@app_logger.info "New service #{service}, endpoint: #{service.endpoint}, contact email: #{service.registrant_mail}, organisation: #{service.organisation}"
redirect to('/registration/complete')
end
else
@organisations = load_organisations
flash[:error] = "Invalid data supplied: #{service.errors.full_messages.join(', ')}"
erb :'registration/index'
end
end
get '/registration/complete' do
@identifier = nil
@approved = settings.auto_approve_in_test && settings.federation == 'test'
if @approved
@identifier = session[:registration_identifier]
end
erb :'registration/complete'
end
###
# Administration
###
before '/administration*' do
authenticated?
administrator?
end
get '/administration' do
erb :'administration/index'
end
# Administration - Services
get '/administration/services' do
@services = load_all_services
erb :'administration/services/list'
end
get '/administration/services/:identifier' do |identifier|
@identifier = identifier
@service = load_service(identifier)
halt 404 if @service.nil?
erb :'administration/services/show'
end
get '/administration/services/edit/:identifier' do |identifier|
@identifier = identifier
@service = load_service(identifier)
halt 404 if @service.nil?
@organisations = load_organisations
erb :'administration/services/edit'
end
put '/administration/services/update' do
identifier = params[:identifier]
service = load_service(identifier)
if service.nil?
flash[:error] = 'Invalid data supplied'
halt redirect to('/administration/services')
end
service.attributes = service_attrs.merge(admin_supplied_attrs)
if service.valid?
@redis.hset('serviceproviders', identifier, service.to_json)
@app_logger.info "Service #{identifier} updated by #{session[:subject][:principal]} #{session[:subject][:cn]}"
redirect to('/administration/services/' + identifier)
else
flash[:error] = 'Invalid data supplied'
redirect to('/administration/services')
end
end
patch '/administration/services/toggle/:identifier' do |identifier|
service = load_service(identifier)
halt 404 if service.nil?
service.enabled = !service.enabled
@redis.hset('serviceproviders', identifier, service.to_json)
@app_logger.info "Service #{identifier} toggled by #{session[:subject][:principal]} #{session[:subject][:cn]}"
flash[:success] = 'Service modified successfully'
redirect to('/administration/services/' + identifier)
end
delete '/administration/services/delete/:identifier' do |identifier|
service = load_service(identifier)
halt 404 if service.nil?
@redis.hdel('serviceproviders', identifier)
@app_logger.info "Service #{identifier} deleted by #{session[:subject][:principal]} #{session[:subject][:cn]}"
flash[:success] = 'Service deleted successfully'
redirect '/administration/services'
end
# Administration - Administrators
get '/administration/administrators' do
administrators_raw = @redis.hgetall('administrators')
@administrators = administrators_raw.reduce({}) { |map, (k, v)| map.merge(k => JSON.parse(v)) }
erb :'administration/administrators/list'
end
get '/administration/administrators/create' do
erb :'administration/administrators/create'
end
post '/administration/administrators/save' do
identifier = params[:identifier]
if identifier.nil? || identifier.empty?
flash[:error] = 'Invalid form data'
erb :'administration/administrators/create'
else
if @redis.hexists('administrators', identifier)
flash[:error] = 'Administrator already exists'
redirect '/administration/administrators'
else
name = params[:name]
mail = params[:mail]
if name && !name.empty? && mail && !mail.empty?
@redis.hset('administrators', identifier, { 'name' => name, 'mail' => mail }.to_json)
@app_logger.info "Current administrator #{session[:subject][:principal]} #{session[:subject][:cn]} added new administrator #{name}, #{mail}"
flash[:success] = 'Administrator added'
redirect '/administration/administrators'
else
flash[:error] = 'Invalid form data'
erb :'administration/administrators/create'
end
end
end
end
delete '/administration/administrators/delete' do
identifier = params[:identifier]
if identifier.nil? || identifier.empty?
flash[:error] = 'Invalid form data'
else
if identifier == session[:subject][:principal]
flash[:error] = 'Removing your own access is not supported'
else
if @redis.hexists('administrators', identifier)
@redis.hdel('administrators', identifier)
@app_logger.info "Current administrator #{session[:subject][:principal]} #{session[:subject][:cn]} deleted administrator #{identifier}"
flash[:success] = 'Administrator deleted successfully'
else
flash[:error] = 'No such administrator'
end
end
end
redirect '/administration/administrators'
end
###
# JWT
###
before '/jwt/*' do
authenticated?
end
def binding(*parts)
['urn:mace:aaf.edu.au:rapid.aaf.edu.au', *parts].join(':')
end
# To enable raptor and other tools to report on rapid like we would any other
# IdP we create a shibboleth styled audit.log file for each service access.
# Fields are on a single line, separated by pipes:
#
# auditEventTime|requestBinding|requestId|relyingPartyId|messageProfileId|
# assertingPartyId|responseBinding|responseId|principalName|authNMethod|
# releasedAttributeId1,releasedAttributeId2,|nameIdentifier|
# assertion1ID,assertion2ID,|
def audit_log(service, subject, claims, attrs)
fields = [
Time.now.utc.strftime('%Y%m%dT%H%M%SZ'), binding(service.type, 'get'),
service.identifier, claims[:aud], binding('jwt', service.type, 'sso'),
claims[:iss], binding('jwt', service.type, 'post'), claims[:jti],
subject[:principal], 'urn:oasis:names:tc:SAML:2.0:ac:classes:XMLDSig',
attrs.sort.join(','), '', '', ''
]
@audit_logger.info(fields.join('|'))
end
before '/jwt/authnrequest/:type/:identifier' do |type, identifier|
@service = load_service(identifier)
if @service.nil? || @service.type != type
halt 404, 'There is no such endpoint defined please validate the request.'
end
unless @service.enabled
halt 403, "The service \"#{@service.name}\" is unable to process requests at this time."
end
iss = settings.issuer
aud = @service.audience
claim = AttributesClaim.new(iss, aud, session[:subject])
@app_logger.info("Retargeted principal #{session[:subject][:principal]} " \
"for #{aud} as #{claim.attributes[:edupersontargetedid]}")
@claims_set = ClaimsSet.send(type, iss, aud, claim)
@jws = @claims_set.to_jws(@service.secret)
@endpoint = @service.endpoint
@app_logger.info "Provided details for #{session[:subject][:cn]}(#{session[:subject][:mail]}) to service #{@service.name} (#{@service.endpoint})"
@app_logger.debug @claims_set.claims
end
get '/jwt/authnrequest/research/:identifier' do
attrs = @claims_set.claims[:'https://aaf.edu.au/attributes']
audit_log(@service, session['subject'], @claims_set.claims, attrs.keys)
erb :post, layout: :post
end
get '/jwt/authnrequest/auresearch/:identifier' do
attrs = @claims_set.claims[:'https://aaf.edu.au/attributes']
audit_log(@service, session['subject'], @claims_set.claims, attrs.keys)
erb :post, layout: :post
end
get '/jwt/authnrequest/zendesk/:identifier' do
attrs = %w(cn mail edupersontargetedid o)
audit_log(@service, session['subject'], @claims_set.claims, attrs)
redirect "#{@endpoint}?jwt=#{@jws}&return_to=#{params[:return_to]}"
end
get '/developers' do
erb :developers, locals: { text: markdown(:'documentation/developers') }
end
def flash_types
[:success, :warning, :error]
end
def authenticated?
return if session[:subject]
id = SecureRandom.urlsafe_base64(24, false)
session[:target] ||= {}
session[:target][id] = request.url
login_url = "/login/#{id}"
if params[:entityID]
login_url = "#{login_url}?entityID=#{params[:entityID]}"
end
redirect login_url
end
def administrator?
return if @redis.hexists('administrators', session[:subject][:principal])
@app_logger.warn "Denied access to administrative area to #{session[:subject][:principal]} #{session[:subject][:cn]}"
status 403
halt erb :'administration/administrators/denied'
end
##
# New Service Registration Notification
##
def send_registration_email(service)
mail_settings = settings.mail
settings_hostname = settings.hostname
service_url_research = "https://#{settings.hostname}/jwt/authnrequest/research/#{service.identifier}"
service_url_zendesk = "https://#{settings.hostname}/jwt/authnrequest/zendesk/#{service.identifier}"
if service.enabled
admin_action = "There is a new registration within AAF Rapid Connect that has been automatically approved - but we are letting you know anyway."
else
admin_action = "There is a new registration within AAF Rapid Connect that needs to be enabled."
end
Mail.deliver do
from mail_settings[:from]
to mail_settings[:to]
subject 'New service registration for AAF Rapid Connect'
html_part do
content_type 'text/html; charset=UTF-8'
body "
#{admin_action}
<br><br>
<strong>Details</strong>
<br>
<ul>
<li>Service Name: #{service.name}</li>
<li>Endpoint: #{service.endpoint}</li>
<li>Creator: #{service.registrant_name} (#{service.registrant_mail})</li>
</ul>
<br><br>
Please ensure <strong>all endpoints utilise HTTPS</strong> before enabling.
<br><br>
For more information and to enable this service please view the <a href='https://#{settings_hostname}/administration/services/#{service.identifier}'>full service record</a> in AAF Rapid Connect.
<br><br>
After reviewing and approving the service, please notify the user. We suggest the following template:
<br><hr><br>
To: \"#{service.registrant_name}\" <#{service.registrant_mail}><br>
Subject: service registration on #{settings_hostname}<br>
<br>
Dear #{service.registrant_name}<br>
<br>
Your service #{service.name} has been accepted into the AAF Rapid Connect at #{settings_hostname}<br>
<br>
You can now configure your service to use this login URL :<br>
<a href=\"#{service_url_research}\">#{service_url_research}</a><br>
<br>
Or, alternatively, if your service is a Zendesk instance, please use the following URL instead:<br>
<a href=\"#{service_url_zendesk}\">#{service_url_zendesk}</a><br>
<br>
Please contact AAF support at support@aaf.edu.au if you have any questions or need any assistance with connecting your service to AAF RapidConnect.
"
end
end
end
##
# Export Data
##
before '/export*' do
api_authenticated?
end
get '/export/service/:identifier' do |identifier|
content_type :json
service = load_service(identifier)
halt 404 if service.nil?
{ service: service_as_json(identifier, service) }.to_json
end
get '/export/services' do
content_type :json
services = load_all_services.sort.map do |(id, service)|
service_as_json(id, service)
end
{ services: services }.to_json
end
get '/export/basic' do
content_type :json
services = load_all_services.map do |(id, service)|
service_as_json(id, service).tap do |s|
s[:rapidconnect].delete(:secret)
end
end
{ services: services }.to_json
end
def service_as_json(id, service)
{ id: id,
name: service.name,
created_at: Time.at(service.created_at).utc.xmlschema,
contact: {
name: service.registrant_name,
email: service.registrant_mail,
type: 'technical'
},
rapidconnect: {
audience: service.audience,
callback: service.endpoint,
secret: service.secret,
endpoints: {
scholarly: "https://#{settings.hostname}/jwt/authnrequest/research/#{id}"
}
},
enabled: service.enabled,
organization: service.organisation }
end
def api_authenticated?
if settings.export[:enabled]
authorization = request.env['HTTP_AUTHORIZATION']
unless authorization && authorization =~ AUTHORIZE_REGEX
halt 403, 'Invalid authorization token'
end
service, secret = authorization.match(AUTHORIZE_REGEX).captures
unless secret == settings.export[:secret]
halt 403, 'Invalid authorization header'
end
@app_logger.info "Established API session for service #{service}"
else
halt 404
end
end
##
# Organisation names via FR
##
def load_organisations
JSON.parse(IO.read(settings.organisations)).sort_by(&:downcase)
end
end
|
# Generated by jeweler
# DO NOT EDIT THIS FILE DIRECTLY
# Instead, edit Jeweler::Tasks in Rakefile, and run 'rake gemspec'
# -*- encoding: utf-8 -*-
Gem::Specification.new do |s|
s.name = "amazon_seller_central"
s.version = "0.2.11"
s.required_rubygems_version = Gem::Requirement.new(">= 0") if s.respond_to? :required_rubygems_version=
s.authors = ["optoro"]
s.date = "2012-11-19"
s.description = "This gem is intended to wrap Amazon's SellerCentral pages with a Ruby API. Currently this gem supports accessing buyer feedback only."
s.email = "dev@optoro.com"
s.extra_rdoc_files = [
"LICENSE.txt",
"README.md"
]
s.files = [
".document",
".rspec",
".rvmrc",
"CHANGELOG.md",
"Gemfile",
"Gemfile.lock",
"Guardfile",
"LICENSE.txt",
"README.md",
"Rakefile",
"VERSION",
"amazon_seller_central.gemspec",
"lib/amazon_seller_central.rb",
"lib/amazon_seller_central/configuration.rb",
"lib/amazon_seller_central/feedback.rb",
"lib/amazon_seller_central/feedback_page.rb",
"lib/amazon_seller_central/inventory.rb",
"lib/amazon_seller_central/inventory_page.rb",
"lib/amazon_seller_central/listing.rb",
"lib/amazon_seller_central/listing_set.rb",
"lib/amazon_seller_central/mechanizer.rb",
"lib/amazon_seller_central/order.rb",
"lib/amazon_seller_central/orders_page.rb",
"lib/amazon_seller_central/page.rb",
"lib/amazon_seller_central/payment.rb",
"lib/amazon_seller_central/payments_page.rb",
"spec/amazon_seller_central_spec.rb",
"spec/lib/feedback_page_spec.rb",
"spec/lib/feedback_spec.rb",
"spec/lib/inventory_page_spec.rb",
"spec/lib/inventory_spec.rb",
"spec/lib/listing_set_spec.rb",
"spec/lib/listing_spec.rb",
"spec/lib/mechanizer_spec.rb",
"spec/lib/orders_page_spec.rb",
"spec/lib/payments_page_spec.rb",
"spec/spec_helper.rb",
"spec/support/page_body_regexen.rb",
"spec/support/page_examples.rb",
"spec/support/sample_pages.rb",
"spec/support/sample_pages/Feedback Manager.html",
"spec/support/sample_pages/Feedback Page 1.html",
"spec/support/sample_pages/Feedback Page 2.html",
"spec/support/sample_pages/Feedback Page Last.html",
"spec/support/sample_pages/Manage Orders.html",
"spec/support/sample_pages/Payments Page.html",
"spec/support/sample_pages/Seller Central Homepage.html",
"spec/support/sample_pages/Seller Central Redirect.html",
"spec/support/sample_pages/Seller Central.html",
"spec/support/sample_pages/Settlement Payment Reports 1.html",
"spec/support/sample_pages/Settlement Payment Reports 2.html",
"spec/support/sample_pages/another_listings_page.html",
"spec/support/sample_pages/listings_last_page.html",
"spec/support/sample_pages/listings_page_1.html",
"spec/support/sample_pages/listings_page_2.html",
"spec/support/sample_pages/update_inventory_result_from_last_page.html",
"spec/support/sample_pages/update_inventory_result_from_page_1.html",
"spec/support/sample_pages/update_inventory_result_from_page_2.html"
]
s.homepage = "http://github.com/optoro/amazon_seller_central"
s.licenses = ["MIT"]
s.require_paths = ["lib"]
s.rubygems_version = "1.8.24"
s.summary = "Ruby API to access Amazon's SellerCentral"
if s.respond_to? :specification_version then
s.specification_version = 3
if Gem::Version.new(Gem::VERSION) >= Gem::Version.new('1.2.0') then
s.add_runtime_dependency(%q<mechanize>, [">= 1.0.0"])
s.add_development_dependency(%q<rspec>, ["~> 2.3.0"])
s.add_development_dependency(%q<bundler>, [">= 0"])
s.add_development_dependency(%q<jeweler>, ["~> 1.6.3"])
s.add_development_dependency(%q<rcov>, [">= 0"])
s.add_development_dependency(%q<yard>, [">= 0"])
s.add_development_dependency(%q<guard>, [">= 0"])
s.add_development_dependency(%q<guard-rspec>, [">= 0"])
s.add_development_dependency(%q<rb-inotify>, [">= 0.8.5"])
s.add_development_dependency(%q<libnotify>, [">= 0"])
s.add_development_dependency(%q<ffi>, [">= 0"])
s.add_development_dependency(%q<faker>, [">= 0"])
s.add_development_dependency(%q<fakeweb>, [">= 0"])
s.add_development_dependency(%q<ruby-debug19>, [">= 0"])
else
s.add_dependency(%q<mechanize>, [">= 1.0.0"])
s.add_dependency(%q<rspec>, ["~> 2.3.0"])
s.add_dependency(%q<bundler>, [">= 0"])
s.add_dependency(%q<jeweler>, ["~> 1.6.3"])
s.add_dependency(%q<rcov>, [">= 0"])
s.add_dependency(%q<yard>, [">= 0"])
s.add_dependency(%q<guard>, [">= 0"])
s.add_dependency(%q<guard-rspec>, [">= 0"])
s.add_dependency(%q<rb-inotify>, [">= 0.8.5"])
s.add_dependency(%q<libnotify>, [">= 0"])
s.add_dependency(%q<ffi>, [">= 0"])
s.add_dependency(%q<faker>, [">= 0"])
s.add_dependency(%q<fakeweb>, [">= 0"])
s.add_dependency(%q<ruby-debug19>, [">= 0"])
end
else
s.add_dependency(%q<mechanize>, [">= 1.0.0"])
s.add_dependency(%q<rspec>, ["~> 2.3.0"])
s.add_dependency(%q<bundler>, [">= 0"])
s.add_dependency(%q<jeweler>, ["~> 1.6.3"])
s.add_dependency(%q<rcov>, [">= 0"])
s.add_dependency(%q<yard>, [">= 0"])
s.add_dependency(%q<guard>, [">= 0"])
s.add_dependency(%q<guard-rspec>, [">= 0"])
s.add_dependency(%q<rb-inotify>, [">= 0.8.5"])
s.add_dependency(%q<libnotify>, [">= 0"])
s.add_dependency(%q<ffi>, [">= 0"])
s.add_dependency(%q<faker>, [">= 0"])
s.add_dependency(%q<fakeweb>, [">= 0"])
s.add_dependency(%q<ruby-debug19>, [">= 0"])
end
end
Regenerate gemspec for version 0.2.12
# Generated by jeweler
# DO NOT EDIT THIS FILE DIRECTLY
# Instead, edit Jeweler::Tasks in Rakefile, and run 'rake gemspec'
# -*- encoding: utf-8 -*-
Gem::Specification.new do |s|
s.name = "amazon_seller_central"
s.version = "0.2.12"
s.required_rubygems_version = Gem::Requirement.new(">= 0") if s.respond_to? :required_rubygems_version=
s.authors = ["optoro"]
s.date = "2013-05-01"
s.description = "This gem is intended to wrap Amazon's SellerCentral pages with a Ruby API. Currently this gem supports accessing buyer feedback only."
s.email = "dev@optoro.com"
s.extra_rdoc_files = [
"LICENSE.txt",
"README.md"
]
s.files = [
".document",
".rspec",
".rvmrc",
"CHANGELOG.md",
"Gemfile",
"Gemfile.lock",
"Guardfile",
"LICENSE.txt",
"README.md",
"Rakefile",
"VERSION",
"amazon_seller_central.gemspec",
"lib/amazon_seller_central.rb",
"lib/amazon_seller_central/configuration.rb",
"lib/amazon_seller_central/feedback.rb",
"lib/amazon_seller_central/feedback_page.rb",
"lib/amazon_seller_central/inventory.rb",
"lib/amazon_seller_central/inventory_page.rb",
"lib/amazon_seller_central/listing.rb",
"lib/amazon_seller_central/listing_set.rb",
"lib/amazon_seller_central/mechanizer.rb",
"lib/amazon_seller_central/order.rb",
"lib/amazon_seller_central/orders_page.rb",
"lib/amazon_seller_central/page.rb",
"lib/amazon_seller_central/payment.rb",
"lib/amazon_seller_central/payments_page.rb",
"spec/amazon_seller_central_spec.rb",
"spec/lib/feedback_page_spec.rb",
"spec/lib/feedback_spec.rb",
"spec/lib/inventory_page_spec.rb",
"spec/lib/inventory_spec.rb",
"spec/lib/listing_set_spec.rb",
"spec/lib/listing_spec.rb",
"spec/lib/mechanizer_spec.rb",
"spec/lib/orders_page_spec.rb",
"spec/lib/payments_page_spec.rb",
"spec/spec_helper.rb",
"spec/support/page_body_regexen.rb",
"spec/support/page_examples.rb",
"spec/support/sample_pages.rb",
"spec/support/sample_pages/Feedback Manager.html",
"spec/support/sample_pages/Feedback Page 1.html",
"spec/support/sample_pages/Feedback Page 2.html",
"spec/support/sample_pages/Feedback Page Last.html",
"spec/support/sample_pages/Manage Orders.html",
"spec/support/sample_pages/Payments Page.html",
"spec/support/sample_pages/Seller Central Homepage.html",
"spec/support/sample_pages/Seller Central Redirect.html",
"spec/support/sample_pages/Seller Central.html",
"spec/support/sample_pages/Settlement Payment Reports 1.html",
"spec/support/sample_pages/Settlement Payment Reports 2.html",
"spec/support/sample_pages/another_listings_page.html",
"spec/support/sample_pages/listings_last_page.html",
"spec/support/sample_pages/listings_page_1.html",
"spec/support/sample_pages/listings_page_2.html",
"spec/support/sample_pages/update_inventory_result_from_last_page.html",
"spec/support/sample_pages/update_inventory_result_from_page_1.html",
"spec/support/sample_pages/update_inventory_result_from_page_2.html"
]
s.homepage = "http://github.com/optoro/amazon_seller_central"
s.licenses = ["MIT"]
s.require_paths = ["lib"]
s.rubygems_version = "1.8.10"
s.summary = "Ruby API to access Amazon's SellerCentral"
if s.respond_to? :specification_version then
s.specification_version = 3
if Gem::Version.new(Gem::VERSION) >= Gem::Version.new('1.2.0') then
s.add_runtime_dependency(%q<mechanize>, [">= 1.0.0"])
s.add_development_dependency(%q<rspec>, ["~> 2.3.0"])
s.add_development_dependency(%q<bundler>, [">= 0"])
s.add_development_dependency(%q<jeweler>, ["~> 1.6.3"])
s.add_development_dependency(%q<rcov>, [">= 0"])
s.add_development_dependency(%q<yard>, [">= 0"])
s.add_development_dependency(%q<guard>, [">= 0"])
s.add_development_dependency(%q<guard-rspec>, [">= 0"])
s.add_development_dependency(%q<rb-inotify>, [">= 0.8.5"])
s.add_development_dependency(%q<libnotify>, [">= 0"])
s.add_development_dependency(%q<ffi>, [">= 0"])
s.add_development_dependency(%q<faker>, [">= 0"])
s.add_development_dependency(%q<fakeweb>, [">= 0"])
s.add_development_dependency(%q<ruby-debug19>, [">= 0"])
else
s.add_dependency(%q<mechanize>, [">= 1.0.0"])
s.add_dependency(%q<rspec>, ["~> 2.3.0"])
s.add_dependency(%q<bundler>, [">= 0"])
s.add_dependency(%q<jeweler>, ["~> 1.6.3"])
s.add_dependency(%q<rcov>, [">= 0"])
s.add_dependency(%q<yard>, [">= 0"])
s.add_dependency(%q<guard>, [">= 0"])
s.add_dependency(%q<guard-rspec>, [">= 0"])
s.add_dependency(%q<rb-inotify>, [">= 0.8.5"])
s.add_dependency(%q<libnotify>, [">= 0"])
s.add_dependency(%q<ffi>, [">= 0"])
s.add_dependency(%q<faker>, [">= 0"])
s.add_dependency(%q<fakeweb>, [">= 0"])
s.add_dependency(%q<ruby-debug19>, [">= 0"])
end
else
s.add_dependency(%q<mechanize>, [">= 1.0.0"])
s.add_dependency(%q<rspec>, ["~> 2.3.0"])
s.add_dependency(%q<bundler>, [">= 0"])
s.add_dependency(%q<jeweler>, ["~> 1.6.3"])
s.add_dependency(%q<rcov>, [">= 0"])
s.add_dependency(%q<yard>, [">= 0"])
s.add_dependency(%q<guard>, [">= 0"])
s.add_dependency(%q<guard-rspec>, [">= 0"])
s.add_dependency(%q<rb-inotify>, [">= 0.8.5"])
s.add_dependency(%q<libnotify>, [">= 0"])
s.add_dependency(%q<ffi>, [">= 0"])
s.add_dependency(%q<faker>, [">= 0"])
s.add_dependency(%q<fakeweb>, [">= 0"])
s.add_dependency(%q<ruby-debug19>, [">= 0"])
end
end
|
# Generated by jeweler
# DO NOT EDIT THIS FILE DIRECTLY
# Instead, edit Jeweler::Tasks in Rakefile, and run 'rake gemspec'
# -*- encoding: utf-8 -*-
Gem::Specification.new do |s|
s.name = "amazon_seller_central"
s.version = "0.2.6"
s.required_rubygems_version = Gem::Requirement.new(">= 0") if s.respond_to? :required_rubygems_version=
s.authors = ["optoro"]
s.date = "2012-07-24"
s.description = "This gem is intended to wrap Amazon's SellerCentral pages with a Ruby API. Currently this gem supports accessing buyer feedback only."
s.email = "dev@optoro.com"
s.extra_rdoc_files = [
"LICENSE.txt",
"README.md"
]
s.files = [
".document",
".rspec",
".rvmrc",
"CHANGELOG.md",
"Gemfile",
"Gemfile.lock",
"Guardfile",
"LICENSE.txt",
"README.md",
"Rakefile",
"VERSION",
"amazon_seller_central.gemspec",
"lib/amazon_seller_central.rb",
"lib/amazon_seller_central/configuration.rb",
"lib/amazon_seller_central/feedback.rb",
"lib/amazon_seller_central/feedback_page.rb",
"lib/amazon_seller_central/inventory.rb",
"lib/amazon_seller_central/inventory_page.rb",
"lib/amazon_seller_central/listing.rb",
"lib/amazon_seller_central/listing_set.rb",
"lib/amazon_seller_central/mechanizer.rb",
"lib/amazon_seller_central/order.rb",
"lib/amazon_seller_central/orders_page.rb",
"lib/amazon_seller_central/page.rb",
"lib/amazon_seller_central/payment.rb",
"lib/amazon_seller_central/payments_page.rb",
"spec/amazon_seller_central_spec.rb",
"spec/lib/feedback_page_spec.rb",
"spec/lib/feedback_spec.rb",
"spec/lib/inventory_page_spec.rb",
"spec/lib/inventory_spec.rb",
"spec/lib/listing_set_spec.rb",
"spec/lib/listing_spec.rb",
"spec/lib/mechanizer_spec.rb",
"spec/lib/orders_page_spec.rb",
"spec/lib/payments_page_spec.rb",
"spec/spec_helper.rb",
"spec/support/page_body_regexen.rb",
"spec/support/page_examples.rb",
"spec/support/sample_pages.rb",
"spec/support/sample_pages/Feedback Manager.html",
"spec/support/sample_pages/Feedback Page 1.html",
"spec/support/sample_pages/Feedback Page 2.html",
"spec/support/sample_pages/Feedback Page Last.html",
"spec/support/sample_pages/Manage Orders.html",
"spec/support/sample_pages/Payments Page.html",
"spec/support/sample_pages/Seller Central Homepage.html",
"spec/support/sample_pages/Seller Central Redirect.html",
"spec/support/sample_pages/Seller Central.html",
"spec/support/sample_pages/Settlement Payment Reports 1.html",
"spec/support/sample_pages/Settlement Payment Reports 2.html",
"spec/support/sample_pages/another_listings_page.html",
"spec/support/sample_pages/listings_last_page.html",
"spec/support/sample_pages/listings_page_1.html",
"spec/support/sample_pages/listings_page_2.html",
"spec/support/sample_pages/update_inventory_result_from_last_page.html",
"spec/support/sample_pages/update_inventory_result_from_page_1.html",
"spec/support/sample_pages/update_inventory_result_from_page_2.html"
]
s.homepage = "http://github.com/optoro/amazon_seller_central"
s.licenses = ["MIT"]
s.require_paths = ["lib"]
s.rubygems_version = "1.8.10"
s.summary = "Ruby API to access Amazon's SellerCentral"
if s.respond_to? :specification_version then
s.specification_version = 3
if Gem::Version.new(Gem::VERSION) >= Gem::Version.new('1.2.0') then
s.add_runtime_dependency(%q<mechanize>, [">= 1.0.0"])
s.add_development_dependency(%q<rspec>, ["~> 2.3.0"])
s.add_development_dependency(%q<bundler>, ["~> 1.0.0"])
s.add_development_dependency(%q<jeweler>, ["~> 1.6.3"])
s.add_development_dependency(%q<rcov>, [">= 0"])
s.add_development_dependency(%q<yard>, [">= 0"])
s.add_development_dependency(%q<guard>, [">= 0"])
s.add_development_dependency(%q<guard-rspec>, [">= 0"])
s.add_development_dependency(%q<rb-inotify>, [">= 0.8.5"])
s.add_development_dependency(%q<libnotify>, [">= 0"])
s.add_development_dependency(%q<ffi>, [">= 0"])
s.add_development_dependency(%q<faker>, [">= 0"])
s.add_development_dependency(%q<fakeweb>, [">= 0"])
s.add_development_dependency(%q<ruby-debug19>, [">= 0"])
else
s.add_dependency(%q<mechanize>, [">= 1.0.0"])
s.add_dependency(%q<rspec>, ["~> 2.3.0"])
s.add_dependency(%q<bundler>, ["~> 1.0.0"])
s.add_dependency(%q<jeweler>, ["~> 1.6.3"])
s.add_dependency(%q<rcov>, [">= 0"])
s.add_dependency(%q<yard>, [">= 0"])
s.add_dependency(%q<guard>, [">= 0"])
s.add_dependency(%q<guard-rspec>, [">= 0"])
s.add_dependency(%q<rb-inotify>, [">= 0.8.5"])
s.add_dependency(%q<libnotify>, [">= 0"])
s.add_dependency(%q<ffi>, [">= 0"])
s.add_dependency(%q<faker>, [">= 0"])
s.add_dependency(%q<fakeweb>, [">= 0"])
s.add_dependency(%q<ruby-debug19>, [">= 0"])
end
else
s.add_dependency(%q<mechanize>, [">= 1.0.0"])
s.add_dependency(%q<rspec>, ["~> 2.3.0"])
s.add_dependency(%q<bundler>, ["~> 1.0.0"])
s.add_dependency(%q<jeweler>, ["~> 1.6.3"])
s.add_dependency(%q<rcov>, [">= 0"])
s.add_dependency(%q<yard>, [">= 0"])
s.add_dependency(%q<guard>, [">= 0"])
s.add_dependency(%q<guard-rspec>, [">= 0"])
s.add_dependency(%q<rb-inotify>, [">= 0.8.5"])
s.add_dependency(%q<libnotify>, [">= 0"])
s.add_dependency(%q<ffi>, [">= 0"])
s.add_dependency(%q<faker>, [">= 0"])
s.add_dependency(%q<fakeweb>, [">= 0"])
s.add_dependency(%q<ruby-debug19>, [">= 0"])
end
end
Regenerate gemspec for version 0.2.7
# Generated by jeweler
# DO NOT EDIT THIS FILE DIRECTLY
# Instead, edit Jeweler::Tasks in Rakefile, and run 'rake gemspec'
# -*- encoding: utf-8 -*-
Gem::Specification.new do |s|
s.name = "amazon_seller_central"
s.version = "0.2.7"
s.required_rubygems_version = Gem::Requirement.new(">= 0") if s.respond_to? :required_rubygems_version=
s.authors = ["optoro"]
s.date = "2012-08-08"
s.description = "This gem is intended to wrap Amazon's SellerCentral pages with a Ruby API. Currently this gem supports accessing buyer feedback only."
s.email = "dev@optoro.com"
s.extra_rdoc_files = [
"LICENSE.txt",
"README.md"
]
s.files = [
".document",
".rspec",
".rvmrc",
"CHANGELOG.md",
"Gemfile",
"Gemfile.lock",
"Guardfile",
"LICENSE.txt",
"README.md",
"Rakefile",
"VERSION",
"amazon_seller_central.gemspec",
"lib/amazon_seller_central.rb",
"lib/amazon_seller_central/configuration.rb",
"lib/amazon_seller_central/feedback.rb",
"lib/amazon_seller_central/feedback_page.rb",
"lib/amazon_seller_central/inventory.rb",
"lib/amazon_seller_central/inventory_page.rb",
"lib/amazon_seller_central/listing.rb",
"lib/amazon_seller_central/listing_set.rb",
"lib/amazon_seller_central/mechanizer.rb",
"lib/amazon_seller_central/order.rb",
"lib/amazon_seller_central/orders_page.rb",
"lib/amazon_seller_central/page.rb",
"lib/amazon_seller_central/payment.rb",
"lib/amazon_seller_central/payments_page.rb",
"spec/amazon_seller_central_spec.rb",
"spec/lib/feedback_page_spec.rb",
"spec/lib/feedback_spec.rb",
"spec/lib/inventory_page_spec.rb",
"spec/lib/inventory_spec.rb",
"spec/lib/listing_set_spec.rb",
"spec/lib/listing_spec.rb",
"spec/lib/mechanizer_spec.rb",
"spec/lib/orders_page_spec.rb",
"spec/lib/payments_page_spec.rb",
"spec/spec_helper.rb",
"spec/support/page_body_regexen.rb",
"spec/support/page_examples.rb",
"spec/support/sample_pages.rb",
"spec/support/sample_pages/Feedback Manager.html",
"spec/support/sample_pages/Feedback Page 1.html",
"spec/support/sample_pages/Feedback Page 2.html",
"spec/support/sample_pages/Feedback Page Last.html",
"spec/support/sample_pages/Manage Orders.html",
"spec/support/sample_pages/Payments Page.html",
"spec/support/sample_pages/Seller Central Homepage.html",
"spec/support/sample_pages/Seller Central Redirect.html",
"spec/support/sample_pages/Seller Central.html",
"spec/support/sample_pages/Settlement Payment Reports 1.html",
"spec/support/sample_pages/Settlement Payment Reports 2.html",
"spec/support/sample_pages/another_listings_page.html",
"spec/support/sample_pages/listings_last_page.html",
"spec/support/sample_pages/listings_page_1.html",
"spec/support/sample_pages/listings_page_2.html",
"spec/support/sample_pages/update_inventory_result_from_last_page.html",
"spec/support/sample_pages/update_inventory_result_from_page_1.html",
"spec/support/sample_pages/update_inventory_result_from_page_2.html"
]
s.homepage = "http://github.com/optoro/amazon_seller_central"
s.licenses = ["MIT"]
s.require_paths = ["lib"]
s.rubygems_version = "1.8.10"
s.summary = "Ruby API to access Amazon's SellerCentral"
if s.respond_to? :specification_version then
s.specification_version = 3
if Gem::Version.new(Gem::VERSION) >= Gem::Version.new('1.2.0') then
s.add_runtime_dependency(%q<mechanize>, [">= 1.0.0"])
s.add_development_dependency(%q<rspec>, ["~> 2.3.0"])
s.add_development_dependency(%q<bundler>, ["~> 1.0.0"])
s.add_development_dependency(%q<jeweler>, ["~> 1.6.3"])
s.add_development_dependency(%q<rcov>, [">= 0"])
s.add_development_dependency(%q<yard>, [">= 0"])
s.add_development_dependency(%q<guard>, [">= 0"])
s.add_development_dependency(%q<guard-rspec>, [">= 0"])
s.add_development_dependency(%q<rb-inotify>, [">= 0.8.5"])
s.add_development_dependency(%q<libnotify>, [">= 0"])
s.add_development_dependency(%q<ffi>, [">= 0"])
s.add_development_dependency(%q<faker>, [">= 0"])
s.add_development_dependency(%q<fakeweb>, [">= 0"])
s.add_development_dependency(%q<ruby-debug19>, [">= 0"])
else
s.add_dependency(%q<mechanize>, [">= 1.0.0"])
s.add_dependency(%q<rspec>, ["~> 2.3.0"])
s.add_dependency(%q<bundler>, ["~> 1.0.0"])
s.add_dependency(%q<jeweler>, ["~> 1.6.3"])
s.add_dependency(%q<rcov>, [">= 0"])
s.add_dependency(%q<yard>, [">= 0"])
s.add_dependency(%q<guard>, [">= 0"])
s.add_dependency(%q<guard-rspec>, [">= 0"])
s.add_dependency(%q<rb-inotify>, [">= 0.8.5"])
s.add_dependency(%q<libnotify>, [">= 0"])
s.add_dependency(%q<ffi>, [">= 0"])
s.add_dependency(%q<faker>, [">= 0"])
s.add_dependency(%q<fakeweb>, [">= 0"])
s.add_dependency(%q<ruby-debug19>, [">= 0"])
end
else
s.add_dependency(%q<mechanize>, [">= 1.0.0"])
s.add_dependency(%q<rspec>, ["~> 2.3.0"])
s.add_dependency(%q<bundler>, ["~> 1.0.0"])
s.add_dependency(%q<jeweler>, ["~> 1.6.3"])
s.add_dependency(%q<rcov>, [">= 0"])
s.add_dependency(%q<yard>, [">= 0"])
s.add_dependency(%q<guard>, [">= 0"])
s.add_dependency(%q<guard-rspec>, [">= 0"])
s.add_dependency(%q<rb-inotify>, [">= 0.8.5"])
s.add_dependency(%q<libnotify>, [">= 0"])
s.add_dependency(%q<ffi>, [">= 0"])
s.add_dependency(%q<faker>, [">= 0"])
s.add_dependency(%q<fakeweb>, [">= 0"])
s.add_dependency(%q<ruby-debug19>, [">= 0"])
end
end
|
module DPL
class Provider
class CloudFoundry < Provider
def initial_go_tools_install
context.shell 'test x$TRAVIS_OS_NAME = "xlinux" && rel="linux64-binary" || rel="macosx64"; wget "https://cli.run.pivotal.io/stable?release=${rel}&source=github" -qO cf.tgz && tar -zxvf cf.tgz && rm cf.tgz'
end
def check_auth
initial_go_tools_install
context.shell "./cf api #{option(:api)} #{'--skip-ssl-validation' if options[:skip_ssl_validation]}"
options[:client_id] ? check_client_auth : check_basic_auth
end
def check_app
if options[:manifest]
error 'Application must have a manifest.yml for unattended deployment' unless File.exists? options[:manifest]
end
end
def needs_key?
false
end
def push_app
error 'Failed to push app' unless context.shell("./cf push#{app_name}#{manifest}")
ensure
context.shell "./cf logout"
end
def cleanup
end
def uncleanup
end
def app_name
options[:app_name].nil? ? "" : " #{options[:app_name]}"
end
def manifest
options[:manifest].nil? ? "" : " -f #{options[:manifest]}"
end
private
def check_basic_auth
context.shell "./cf login -u #{option(:username)} -p #{option(:password)} -o '#{option(:organization)}' -s '#{option(:space)}'"
end
def check_client_auth
context.shell "./cf auth #{option(:client_id)} #{option(:client_secret)} --client-credentials"
context.shell "./cf target -o '#{option(:organization)}' -s '#{option(:space)}'"
end
end
end
end
Allow spaces in the app_name
module DPL
class Provider
class CloudFoundry < Provider
def initial_go_tools_install
context.shell 'test x$TRAVIS_OS_NAME = "xlinux" && rel="linux64-binary" || rel="macosx64"; wget "https://cli.run.pivotal.io/stable?release=${rel}&source=github" -qO cf.tgz && tar -zxvf cf.tgz && rm cf.tgz'
end
def check_auth
initial_go_tools_install
context.shell "./cf api #{option(:api)} #{'--skip-ssl-validation' if options[:skip_ssl_validation]}"
options[:client_id] ? check_client_auth : check_basic_auth
end
def check_app
if options[:manifest]
error 'Application must have a manifest.yml for unattended deployment' unless File.exists? options[:manifest]
end
end
def needs_key?
false
end
def push_app
error 'Failed to push app' unless context.shell("./cf push#{app_name}#{manifest}")
ensure
context.shell "./cf logout"
end
def cleanup
end
def uncleanup
end
def app_name
options[:app_name].nil? ? "" : " '#{options[:app_name]}'"
end
def manifest
options[:manifest].nil? ? "" : " -f #{options[:manifest]}"
end
private
def check_basic_auth
context.shell "./cf login -u #{option(:username)} -p #{option(:password)} -o '#{option(:organization)}' -s '#{option(:space)}'"
end
def check_client_auth
context.shell "./cf auth #{option(:client_id)} #{option(:client_secret)} --client-credentials"
context.shell "./cf target -o '#{option(:organization)}' -s '#{option(:space)}'"
end
end
end
end
|
# frozen_string_literal: true
require 'dry/schema/message_set'
require 'dry/validation/constants'
require 'dry/validation/message'
module Dry
module Validation
# MessageSet is a specialized message set for handling validation messages
#
# @api public
class MessageSet < Schema::MessageSet
# Return the source set of messages used to produce final evaluated messages
#
# @return [Array<Message, Message::Localized, Schema::Message>]
#
# @api private
attr_reader :source_messages
# Configured locale
#
# @return [Symbol]
#
# @api public
attr_reader :locale
# @api private
def initialize(messages, options = EMPTY_HASH)
@locale = options[:locale]
@source_messages = options.fetch(:source) { messages.dup }
super
end
# Return a new message set using updated options
#
# @return [MessageSet]
#
# @api private
def with(other, new_options = EMPTY_HASH)
return self if new_options.empty? && other.eql?(messages)
self.class.new(
other | select { |err| err.is_a?(Message) },
options.merge(source: source_messages, **new_options)
).freeze
end
# Add a new message
#
# This is used when result is being prepared
#
# @return [MessageSet]
#
# @api private
def add(message)
@empty = nil
source_messages << message
messages << message
initialize_placeholders!
self
end
# Filter message set using provided predicates
#
# This method is open to any predicate because messages can be anything that
# implements Message API, thus they can implement whatever predicates you
# may need.
#
# @example get a list of base messages
# message_set = contract.(input).errors
# message_set.filter(:base?)
#
# @param [Array<Symbol>] predicates
#
# @return [MessageSet]
#
# @api public
def filter(*predicates)
messages = select { |msg|
predicates.all? { |predicate| msg.respond_to?(predicate) && msg.public_send(predicate) }
}
self.class.new(messages)
end
# @api private
def freeze
source_messages.select { |err| err.respond_to?(:evaluate) }.each do |err|
idx = source_messages.index(err)
msg = err.evaluate(locale: locale, full: options[:full])
messages[idx] = msg
end
to_h
self
end
private
# @api private
def unique_paths
source_messages.uniq(&:path).map(&:path)
end
# @api private
def messages_map
@messages_map ||= reduce(placeholders) { |hash, msg|
node = msg.path.reduce(hash) { |a, e| a.is_a?(Hash) ? a[e] : a.last[e] }
(node[0].is_a?(::Array) ? node[0] : node) << msg.dump
hash
}
end
# @api private
#
# rubocop:disable Metrics/AbcSize
# rubocop:disable Metrics/PerceivedComplexity
def initialize_placeholders!
@placeholders = unique_paths.sort_by(&:size).each_with_object(EMPTY_HASH.dup) { |path, hash|
curr_idx = 0
last_idx = path.size - 1
node = hash
while curr_idx <= last_idx
key = path[curr_idx]
next_node =
if node.is_a?(Array) && key.is_a?(Symbol)
node_hash = (node << [] << {}).last
node_hash[key] || (node_hash[key] = curr_idx < last_idx ? {} : [])
else
node[key] || (node[key] = curr_idx < last_idx ? {} : [])
end
node = next_node
curr_idx += 1
end
}
end
# rubocop:enable Metrics/AbcSize
# rubocop:enable Metrics/PerceivedComplexity
end
end
end
Remove old MessageSet to_h code
# frozen_string_literal: true
require 'dry/schema/message_set'
require 'dry/validation/constants'
require 'dry/validation/message'
module Dry
module Validation
# MessageSet is a specialized message set for handling validation messages
#
# @api public
class MessageSet < Schema::MessageSet
# Return the source set of messages used to produce final evaluated messages
#
# @return [Array<Message, Message::Localized, Schema::Message>]
#
# @api private
attr_reader :source_messages
# Configured locale
#
# @return [Symbol]
#
# @api public
attr_reader :locale
# @api private
def initialize(messages, options = EMPTY_HASH)
@locale = options[:locale]
@source_messages = options.fetch(:source) { messages.dup }
super
end
# Return a new message set using updated options
#
# @return [MessageSet]
#
# @api private
def with(other, new_options = EMPTY_HASH)
return self if new_options.empty? && other.eql?(messages)
self.class.new(
other | select { |err| err.is_a?(Message) },
options.merge(source: source_messages, **new_options)
).freeze
end
# Add a new message
#
# This is used when result is being prepared
#
# @return [MessageSet]
#
# @api private
def add(message)
@empty = nil
source_messages << message
messages << message
self
end
# Filter message set using provided predicates
#
# This method is open to any predicate because messages can be anything that
# implements Message API, thus they can implement whatever predicates you
# may need.
#
# @example get a list of base messages
# message_set = contract.(input).errors
# message_set.filter(:base?)
#
# @param [Array<Symbol>] predicates
#
# @return [MessageSet]
#
# @api public
def filter(*predicates)
messages = select { |msg|
predicates.all? { |predicate| msg.respond_to?(predicate) && msg.public_send(predicate) }
}
self.class.new(messages)
end
# @api private
def freeze
source_messages.select { |err| err.respond_to?(:evaluate) }.each do |err|
idx = source_messages.index(err)
msg = err.evaluate(locale: locale, full: options[:full])
messages[idx] = msg
end
to_h
self
end
end
end
end
|
module EffectiveResources
class Engine < ::Rails::Engine
engine_name 'effective_resources'
config.autoload_paths += Dir[
"#{config.root}/jobs/",
"#{config.root}/lib/validators/",
"#{config.root}/app/controllers/concerns/"
]
config.eager_load_paths += Dir[
"#{config.root}/jobs/",
"#{config.root}/lib/validators/",
"#{config.root}/app/controllers/concerns/"
]
# Set up our default configuration options.
initializer 'effective_resources.defaults', before: :load_config_initializers do |app|
eval File.read("#{config.root}/config/effective_resources.rb")
end
# Include acts_as_addressable concern and allow any ActiveRecord object to call it
initializer 'effective_resources.active_record' do |app|
ActiveSupport.on_load :active_record do
ActiveRecord::Base.extend(ActsAsArchived::Base)
ActiveRecord::Base.extend(ActsAsEmailForm::Base)
ActiveRecord::Base.extend(ActsAsTokened::Base)
ActiveRecord::Base.extend(ActsAsSlugged::Base)
ActiveRecord::Base.extend(ActsAsStatused::Base)
ActiveRecord::Base.extend(ActsAsWizard::Base)
ActiveRecord::Base.extend(ActsAsPurchasableWizard::Base)
ActiveRecord::Base.extend(HasManyRichTexts::Base)
ActiveRecord::Base.extend(EffectiveDeviseUser::Base)
ActiveRecord::Base.extend(EffectiveResource::Base)
ActiveRecord::Base.include(EffectiveAfterCommit::Base)
end
end
initializer 'effective_resources.cancancan' do |app|
ActiveSupport.on_load :active_record do
if defined?(CanCan::Ability)
CanCan::Ability.module_eval do
CRUD_ACTIONS = [:index, :new, :create, :edit, :update, :show, :destroy]
def crud
CRUD_ACTIONS
end
end
CanCan::Ability.include(ActsAsArchived::CanCan)
CanCan::Ability.include(ActsAsStatused::CanCan)
end
end
end
# Register the acts_as_archived routes concern
# resources :things, concerns: :acts_as_archived
initializer 'effective_resources.routes_concern' do |app|
app.config.to_prepare do
ActionDispatch::Routing::Mapper.include(ActsAsArchived::RoutesConcern)
end
end
# Register the flash_messages concern so that it can be called in ActionController
initializer 'effective_resources.action_controller' do |app|
ActiveSupport.on_load :action_controller do
include(Effective::FlashMessages)
end
end
end
end
Use config.to_prepare for all initializers
module EffectiveResources
class Engine < ::Rails::Engine
engine_name 'effective_resources'
config.autoload_paths += Dir[
"#{config.root}/jobs/",
"#{config.root}/lib/validators/",
"#{config.root}/app/controllers/concerns/"
]
config.eager_load_paths += Dir[
"#{config.root}/jobs/",
"#{config.root}/lib/validators/",
"#{config.root}/app/controllers/concerns/"
]
# Set up our default configuration options.
initializer 'effective_resources.defaults', before: :load_config_initializers do |app|
eval File.read("#{config.root}/config/effective_resources.rb")
end
# Include acts_as_addressable concern and allow any ActiveRecord object to call it
initializer 'effective_resources.active_record' do |app|
app.config.to_prepare do
ActiveRecord::Base.extend(ActsAsArchived::Base)
ActiveRecord::Base.extend(ActsAsEmailForm::Base)
ActiveRecord::Base.extend(ActsAsTokened::Base)
ActiveRecord::Base.extend(ActsAsSlugged::Base)
ActiveRecord::Base.extend(ActsAsStatused::Base)
ActiveRecord::Base.extend(ActsAsWizard::Base)
ActiveRecord::Base.extend(ActsAsPurchasableWizard::Base)
ActiveRecord::Base.extend(HasManyRichTexts::Base)
ActiveRecord::Base.extend(EffectiveDeviseUser::Base)
ActiveRecord::Base.extend(EffectiveResource::Base)
ActiveRecord::Base.include(EffectiveAfterCommit::Base)
end
end
initializer 'effective_resources.cancancan' do |app|
app.config.to_prepare do
if defined?(CanCan::Ability)
CanCan::Ability.module_eval do
CRUD_ACTIONS = [:index, :new, :create, :edit, :update, :show, :destroy]
def crud
CRUD_ACTIONS
end
end
CanCan::Ability.include(ActsAsArchived::CanCan)
CanCan::Ability.include(ActsAsStatused::CanCan)
end
end
end
# Register the acts_as_archived routes concern
# resources :things, concerns: :acts_as_archived
initializer 'effective_resources.routes_concern' do |app|
app.config.to_prepare do
ActionDispatch::Routing::Mapper.include(ActsAsArchived::RoutesConcern)
end
end
# Register the flash_messages concern so that it can be called in ActionController
initializer 'effective_resources.action_controller' do |app|
app.config.to_prepare do
ActiveSupport.on_load :action_controller do
include(Effective::FlashMessages)
end
end
end
end
end
|
class Answer < ActiveRecord::Base
has_many :votes, as: :votable
has_many :comments, as: :commentable
belongs_to :question
has_one :best_answered_question, class_name: "Question", foreign_key: "answer_id"
belongs_to :user
validates :body, presence: true
end
updated answer associations"
class Answer < ActiveRecord::Base
has_many :votes, as: :votable
has_many :comments, as: :commentable
belongs_to :question
belongs_to :user
validates :body, presence: true
end
|
# encoding: utf-8
#
# 测死链脚本 ruby 1.9.3
#
# gem版本:typhoeus (0.6.1) mechanize (2.5.1)
#
# 用法:
# ruby script/deadlink.rb
#
# 原理说明:
# 用 mechanize 抓TO_GET_LINK里的页面,找到上面所有链接
# 再用 typhoeus 并发请求这些链接,对失败的请求再用 typhoeus 分别发送请求
# typhoeus 最终调用的是curl。
# 如遇到couldn't_connect couldn't_resolve_host情况,建议调小并发量。
#
# Mechanize log: log/mechanize.log
# deadlink log: tmp/2014-07-10-111111_broken_link.csv
deadlink死链
# encoding: utf-8
#
# 测死链脚本 ruby 1.9.3
#
# gem版本:typhoeus (0.6.1) mechanize (2.5.1)
#
# 用法:
# ruby script/deadlink.rb
#
# 原理说明:
# 用 mechanize 抓TO_GET_LINK里的页面,找到上面所有链接
# 再用 typhoeus 并发请求这些链接,对失败的请求再用 typhoeus 分别发送请求
# typhoeus 最终调用的是curl。
# 如遇到couldn't_connect couldn't_resolve_host情况,建议调小并发量。
#
# Mechanize log: log/mechanize.log
# deadlink log: tmp/2014-07-10-111111_broken_link.csv
require 'csv'
require 'net/http'
require 'mechanize'
require 'typhoeus'
ALLOW_STATUS = [200, 301, 302, 303]
outfile_path = File.expand_path("../../tmp/#{Time.now.strftime("%Y-%m-%d")}_broken_link.csv", __FILE__)
outfile = CSV.open(outfile_path, 'w')
def valid_link?(href)
href.to_s != "javascript:void(0)" && href.to_s != "#" && href.to_s != "" && href.to_s.include?('http')
end
class String
def to_link
if self[0] == '/'
"http://www.xingishere.com#{self}"
else
self
end
end
end
class NilClass
def to_link
""
end
end
TO_TEST_LINK = {
:index => "http://www.xingishere.com",
:blogs => "http://www.xingishere.com/blogs",
:links => "http://www.xingishere.com/links",
}
|
module ElocalApiSupport
VERSION = '0.1.4'
end
Bump version
module ElocalApiSupport
VERSION = '0.1.5'
end
|
module FactoryGirl
module Strategy
class Stub
@@next_id = 1000
def association(runner)
runner.run(:build_stubbed)
end
def result(evaluation)
evaluation.object.tap do |instance|
stub_database_interaction_on_result(instance)
evaluation.notify(:after_stub, instance)
end
end
private
def next_id
@@next_id += 1
end
def stub_database_interaction_on_result(result_instance)
result_instance.id = next_id
result_instance.instance_eval do
def persisted?
!new_record?
end
def new_record?
id.nil?
end
def save(*args)
raise 'stubbed models are not allowed to access the database'
end
def destroy(*args)
raise 'stubbed models are not allowed to access the database'
end
def connection
raise 'stubbed models are not allowed to access the database'
end
def reload
raise 'stubbed models are not allowed to access the database'
end
def update_attribute(*args)
raise 'stubbed models are not allowed to access the database'
end
end
created_at_missing_default = result_instance.respond_to?(:created_at) && !result_instance.created_at
result_instance_missing_created_at = !result_instance.respond_to?(:created_at)
if created_at_missing_default || result_instance_missing_created_at
result_instance.instance_eval do
def created_at
@created_at ||= Time.now
end
end
end
end
end
end
end
Stub update_column to raise, which will replace update_attribute in Rails 4
module FactoryGirl
module Strategy
class Stub
@@next_id = 1000
def association(runner)
runner.run(:build_stubbed)
end
def result(evaluation)
evaluation.object.tap do |instance|
stub_database_interaction_on_result(instance)
evaluation.notify(:after_stub, instance)
end
end
private
def next_id
@@next_id += 1
end
def stub_database_interaction_on_result(result_instance)
result_instance.id = next_id
result_instance.instance_eval do
def persisted?
!new_record?
end
def new_record?
id.nil?
end
def save(*args)
raise 'stubbed models are not allowed to access the database'
end
def destroy(*args)
raise 'stubbed models are not allowed to access the database'
end
def connection
raise 'stubbed models are not allowed to access the database'
end
def reload
raise 'stubbed models are not allowed to access the database'
end
def update_attribute(*args)
raise 'stubbed models are not allowed to access the database'
end
def update_column(*args)
raise 'stubbed models are not allowed to access the database'
end
end
created_at_missing_default = result_instance.respond_to?(:created_at) && !result_instance.created_at
result_instance_missing_created_at = !result_instance.respond_to?(:created_at)
if created_at_missing_default || result_instance_missing_created_at
result_instance.instance_eval do
def created_at
@created_at ||= Time.now
end
end
end
end
end
end
end
|
module FakeBraintree
class CreditCard
include Helpers
def initialize(credit_card_hash, options)
set_up_credit_card(credit_card_hash, options)
set_expiration_month_and_year
end
def update
if credit_card_exists_in_registry?
updated_credit_card = update_credit_card!
response_for_updated_card(updated_credit_card)
else
response_for_card_not_found
end
end
private
def credit_card
@credit_card.dup
end
def update_credit_card!
credit_card_from_registry.merge!(credit_card)
end
def response_for_updated_card(credit_card)
gzipped_response(200, credit_card.to_xml(:root => 'credit_card'))
end
def credit_card_exists_in_registry?
token = credit_card['token']
FakeBraintree.registry.credit_cards.key?(token)
end
def credit_card_from_registry
token = credit_card['token']
FakeBraintree.registry.credit_cards[token]
end
def response_for_card_not_found
gzipped_response(404, FakeBraintree.failure_response.to_xml(:root => 'api_error_response'))
end
def expiration_month
if credit_card.key?("expiration_date")
credit_card["expiration_date"].split('/')[0]
end
end
def expiration_year
if credit_card.key?("expiration_date")
credit_card["expiration_date"].split('/')[1]
end
end
def set_up_credit_card(credit_card_hash, options)
@credit_card = {
"token" => options[:token],
"merchant_id" => options[:merchant_id]
}.merge(credit_card_hash)
end
def set_expiration_month_and_year
if expiration_month
@credit_card["expiration_month"] = expiration_month
end
if expiration_year
@credit_card["expiration_year"] = expiration_year
end
end
end
end
De-dupe logic.
module FakeBraintree
class CreditCard
include Helpers
def initialize(credit_card_hash, options)
set_up_credit_card(credit_card_hash, options)
set_expiration_month_and_year
end
def update
if credit_card_exists_in_registry?
updated_credit_card = update_credit_card!
response_for_updated_card(updated_credit_card)
else
response_for_card_not_found
end
end
private
def credit_card
@credit_card.dup
end
def update_credit_card!
credit_card_from_registry.merge!(credit_card)
end
def response_for_updated_card(credit_card)
gzipped_response(200, credit_card.to_xml(:root => 'credit_card'))
end
def credit_card_exists_in_registry?
FakeBraintree.registry.credit_cards.key?(token)
end
def credit_card_from_registry
FakeBraintree.registry.credit_cards[token]
end
def response_for_card_not_found
gzipped_response(404, FakeBraintree.failure_response.to_xml(:root => 'api_error_response'))
end
def expiration_month
if credit_card.key?("expiration_date")
credit_card["expiration_date"].split('/')[0]
end
end
def expiration_year
if credit_card.key?("expiration_date")
credit_card["expiration_date"].split('/')[1]
end
end
def set_up_credit_card(credit_card_hash, options)
@credit_card = {
"token" => options[:token],
"merchant_id" => options[:merchant_id]
}.merge(credit_card_hash)
end
def set_expiration_month_and_year
if expiration_month
@credit_card["expiration_month"] = expiration_month
end
if expiration_year
@credit_card["expiration_year"] = expiration_year
end
end
def token
credit_card['token']
end
end
end
|
module Fastlane
module Actions
# will make sure a gem is installed. If it's not an appropriate error message is shown
# this will *not* 'require' the gem
def self.verify_gem!(gem_name)
begin
Gem::Specification.find_by_name(gem_name)
rescue Gem::LoadError
print_gem_error "Could not find gem '#{gem_name}'"
print_gem_error ""
print_gem_error "If you installed fastlane using `sudo gem install fastlane` run"
print_gem_error "`sudo gem install #{gem_name}` to install the missing gem"
print_gem_error ""
print_gem_error "If you use a Gemfile add this to your Gemfile:"
print_gem_error "gem '#{gem_name}'"
print_gem_error "and run `bundle install`"
raise "You have to install the `#{gem_name}`".red unless Helper.is_test?
end
true
end
def self.print_gem_error(str)
Helper.log.error str.red
end
end
end
Improved gem_helper documentation
module Fastlane
module Actions
# will make sure a gem is installed. If it's not an appropriate error message is shown
# this will *not* 'require' the gem
def self.verify_gem!(gem_name)
begin
Gem::Specification.find_by_name(gem_name)
# We don't import this by default, as it's not always the same
# also e.g. cocoapods is just required and not imported
rescue Gem::LoadError
print_gem_error "Could not find gem '#{gem_name}'"
print_gem_error ""
print_gem_error "If you installed fastlane using `sudo gem install fastlane` run"
print_gem_error "`sudo gem install #{gem_name}` to install the missing gem"
print_gem_error ""
print_gem_error "If you use a Gemfile add this to your Gemfile:"
print_gem_error "gem '#{gem_name}'"
print_gem_error "and run `bundle install`"
raise "You have to install the `#{gem_name}`".red unless Helper.is_test?
end
true
end
def self.print_gem_error(str)
Helper.log.error str.red
end
end
end
|
# encoding: utf-8
module FiniteMachine
# An asynchronous messages proxy
class AsyncProxy
attr_reader :context
# Initialize an AsynxProxy
#
# @param [Object] context
# the context this proxy is associated with
#
# @api private
def initialize(context)
@context = context
end
# Delegate asynchronous event to event queue
#
# @api private
def method_missing(method_name, *args, &block)
@event_queue = FiniteMachine.event_queue
@event_queue << AsyncCall.build(@context, Callable.new(method_name), *args, &block)
end
end # AsyncProxy
end # FiniteMachine
Change async proxy to use thread context and threadsafe attribute.
# encoding: utf-8
module FiniteMachine
# An asynchronous messages proxy
class AsyncProxy
include Threadable
include ThreadContext
attr_threadsafe :context
# Initialize an AsynxProxy
#
# @param [Object] context
# the context this proxy is associated with
#
# @api private
def initialize(context)
self.context = context
end
# Delegate asynchronous event to event queue
#
# @api private
def method_missing(method_name, *args, &block)
event_queue << AsyncCall.build(context, Callable.new(method_name), *args, &block)
end
end # AsyncProxy
end # FiniteMachine
|
# encoding: utf-8
require 'ostruct'
require 'csv'
require 'flextures/flextures_base_config'
require 'flextures/flextures_extension_modules'
require 'flextures/flextures'
require 'flextures/flextures_factory'
module Flextures
# data loader
module Loader
PARENT = Flextures
@@table_cache = {}
@@option_cache = {}
# column set default value
COMPLETER = {
binary:->{ 0 },
boolean:->{ false },
date:->{ DateTime.now },
datetime:->{ DateTime.now },
decimal:->{ 0 },
float:->{ 0.0 },
integer:->{ 0 },
string:->{ "" },
text:->{ "" },
time:->{ DateTime.now },
timestamp:->{ DateTime.now },
}
# colum data translate
TRANSLATER = {
binary:->(d){
return d if d.nil?
Base64.decode64(d)
},
boolean:->(d){
return d if d.nil?
!(0==d || ""==d || !d)
},
date:->(d){
return d if d.nil?
return nil if d==""
Date.parse(d.to_s)
},
datetime:->(d){
return d if d.nil?
return nil if d==""
DateTime.parse(d.to_s)
},
decimal:->(d){
return d if d.nil?
d.to_i
},
float:->(d){
return d if d.nil?
d.to_f
},
integer:->(d){
return d if d.nil?
d.to_i
},
string:->(d){
return d if d.nil? or d.is_a?(Hash) or d.is_a?(Array)
d.to_s
},
text:->(d){
return d if d.nil? or d.is_a?(Hash) or d.is_a?(Array)
d.to_s
},
time:->(d){
return d if d.nil?
return nil if d==""
DateTime.parse(d.to_s)
},
timestamp:->(d){
return d if d.nil?
return nil if d==""
DateTime.parse(d.to_s)
},
}
# load fixture datas
#
# example:
# flextures :all # load all table data
# flextures :users, :items # load table data, received arguments
# flextures :users => :users2 # :table_name => :file_name
#
# @params [Hash] fixtures load table data
def self.flextures *fixtures
load_list = parse_flextures_options(*fixtures)
load_list.sort &self.loading_order
load_list.each{ |params| Loader::load params }
end
# @return [Proc] order rule block (user Array#sort_by methd)
def self.loading_order
->(a,b){
a = Flextures::Config.table_load_order.index(a) || -1
b = Flextures::Config.table_load_order.index(b) || -1
b <=> a
}
end
# called by Rspec or Should
# set options
# @params [Hash] options exmple : { cashe: true, dir: "models/users" }
def self.set_options options
@@option_cache ||= {}
@@option_cache.merge!(options)
end
# called by Rspec or Should after filter
# reflesh options
def self.delete_options
@@option_cache = {}
end
# return current option status
# @return [Hash] current option status
def self.flextures_options
@@option_cache
end
# return current cache status
# @return [Hash] current option status
def self.flextures_curent_cache
@@table_cache
end
# load fixture data
# fixture file prefer YAML to CSV
# @params [Hash] format file load format(table name, file name, options...)
def self.load format
file_name, method = file_exist format
if method
send(method, format)
else
puts "Warning: #{file_name} is not exist!" unless format[:silent]
end
end
# load CSV data
# @params [Hash] format file load format(table name, file name, options...)
def self.csv format
type = :csv
file_name, ext = file_exist format, [type]
return unless self.file_loadable? format, file_name
klass, filter = self.create_model_filter format, file_name, type
self.load_csv format, klass, filter, file_name
end
# load YAML data
# @params [Hash] format file load format( table: name, file: name, options...)
def self.yml format
type = :yml
file_name, ext = file_exist format, [type]
return unless self.file_loadable? format, file_name
klass, filter = self.create_model_filter format, file_name, type
self.load_yml format, klass, filter, file_name
end
def self.load_csv format, klass, filter, file_name
attributes = klass.columns.map &:name
CSV.open( file_name ) do |csv|
keys = csv.shift # active record column names
warning "CSV", attributes, keys unless format[:silent]
csv.each do |values|
h = values.extend(Extensions::Array).to_hash(keys)
filter.call h
end
end
file_name
end
def self.load_yml format, klass, filter, file_name
yaml = YAML.load File.open(file_name)
return false unless yaml # if file is empty
attributes = klass.columns.map &:name
yaml.each do |k,h|
warning "YAML", attributes, h.keys unless format[:silent]
filter.call h
end
file_name
end
# if parameter include controller, action value
# load directroy is change
# spec/fixtures/:controller_name/:action_name/
# @return [String] directory path
def self.parse_controller_option options
controller_dir = ["controllers"]
controller_dir<< options[:controller] if options[:controller]
controller_dir<< options[:action] if options[:controller] and options[:action]
File.join(*controller_dir)
end
# if parameter include controller, action value
# load directroy is change
# spec/fixtures/:model_name/:method_name/
# @return [String] directory path
def self.parse_model_options options
model_dir = ["models"]
model_dir<< options[:model] if options[:model]
model_dir<< options[:method] if options[:model] and options[:method]
File.join(*model_dir)
end
# parse flextures function arguments
# @params [Hash] fixtures function arguments
# @return [Array] formatted load options
def self.parse_flextures_options *fixtures
options = {}
options = fixtures.shift if fixtures.size > 1 and fixtures.first.is_a?(Hash)
options[:dir] = self.parse_controller_option( options ) if options[:controller]
options[:dir] = self.parse_model_options( options ) if options[:model]
# :all value load all loadable fixtures
fixtures = Flextures::deletable_tables if fixtures.size==1 and :all == fixtures.first
last_hash = fixtures.last.is_a?(Hash) ? fixtures.pop : {}
load_hash = fixtures.inject({}){ |h,name| h[name.to_sym] = name.to_s; h } # if name is string is buged
load_hash.merge!(last_hash)
load_hash.map { |k,v| { table: k, file: v, loader: :fun }.merge(@@option_cache).merge(options) }
end
# example:
# self.create_stair_list("foo/bar/baz")
# return ["foo/bar/baz","foo/bar","foo",""]
def self.stair_list dir, stair=true
return [dir.to_s] unless stair
l = []
dir.to_s.split("/").inject([]){ |a,d| a<< d; l.unshift(a.join("/")); a }
l<< ""
l
end
# parse format option and return load file info
# @param [Hash] format load file format informations
# @return [Array] [file_name, filt_type(:csv or :yml)]
def self.file_exist format, type = [:csv,:yml]
table_name = format[:table].to_s
file_name = (format[:file] || format[:table]).to_s
base_dir_name = Flextures::Config.fixture_load_directory
self.stair_list(format[:dir], format[:stair]).each do |dir|
file_path = File.join( base_dir_name, dir, file_name )
return ["#{file_path}.csv", :csv] if type.member?(:csv) and File.exist? "#{file_path}.csv"
return ["#{file_path}.yml", :yml] if type.member?(:yml) and File.exist? "#{file_path}.yml"
end
[ File.join(base_dir_name, "#{file_name}.csv"), nil ]
end
# file load check
# file is cached or not exist file don't load
# @return [Bool] lodable is 'true'
def self.file_loadable? format, file_name
table_name = format[:table].to_s.to_sym
# if table data is loaded, use cached data
return if format[:cache] and @@table_cache[table_name] == file_name
@@table_cache[table_name] = file_name
return unless File.exist? file_name
puts "try loading #{file_name}" if !format[:silent] and ![:fun].include?(format[:loader])
true
end
# print warinig message that lack or not exist colum names
def self.warning format, attributes, keys
(attributes-keys).each { |name| puts "Warning: #{format} colum is missing! [#{name}]" }
(keys-attributes).each { |name| puts "Warning: #{format} colum is left over! [#{name}]" }
end
# create filter and table info
def self.create_model_filter format, file_name, type
table_name = format[:table].to_s
klass = PARENT::create_model table_name
# if you use 'rails3_acts_as_paranoid' gem, that is not delete data 'delete_all' method
klass.send (klass.respond_to?(:delete_all!) ? :delete_all! : :delete_all)
filter = ->(h){
filter = create_filter klass, LoadFilter[table_name.to_sym], file_name, type, format
o = klass.new
o = filter.call o, h
o.save( validate: false )
o
}
[klass, filter]
end
# return flextures data translate filter
# translate filter is some functions
# 1. column value is fill, if colum is not nullable
# 2. factory filter
# @params [ActiveRecord::Base] klass ActiveRecord model data
# @params [Proc] factory FactoryFilter
# @params [String] filename
# @params [Symbol] ext file type (:csv or :yml)
# @params [Hash] options other options
# @return [Proc] translate filter
def self.create_filter klass, factory, filename, ext, options
columns = klass.columns
# data translat array to hash
column_hash = columns.inject({}) { |h,col| h[col.name] = col; h }
lack_columns = columns.reject { |c| c.null and c.default }.map{ |o| o.name.to_sym }
# default value shound not be null columns
not_nullable_columns = columns.reject(&:null).map &:name
strict_filter=->(o,h){
# if value is not 'nil', value translate suitable form
h.each{ |k,v| v.nil? || o[k] = (TRANSLATER[column_hash[k].type] && TRANSLATER[column_hash[k].type].call(v)) }
# call FactoryFilter
factory.call(*[o, filename, ext][0,factory.arity]) if factory and !options[:unfilter]
o
}
# receives hased data and translate ActiveRecord Model data
# loose filter correct error values
# strict filter don't correct errora values and raise error
loose_filter=->(o,h){
h.reject! { |k,v| options[:minus].include?(k) } if options[:minus]
# if column name is not include database table columns, those names delete
h.select! { |k,v| column_hash[k] }
strict_filter.call(o,h)
# set default value if value is 'nil'
not_nullable_columns.each{ |k| o[k].nil? && o[k] = (column_hash[k] && COMPLETER[column_hash[k].type] && COMPLETER[column_hash[k].type].call) }
# fill span values if column is not exist
lack_columns.each { |k| o[k].nil? && o[k] = (column_hash[k] && COMPLETER[column_hash[k].type] && COMPLETER[column_hash[k].type].call) }
o
}
(options[:strict]==true) ? strict_filter : loose_filter
end
end
end
loader
# encoding: utf-8
require 'ostruct'
require 'csv'
require 'flextures/flextures_base_config'
require 'flextures/flextures_extension_modules'
require 'flextures/flextures'
require 'flextures/flextures_factory'
module Flextures
# data loader
module Loader
PARENT = Flextures
@@table_cache = {}
@@option_cache = {}
# column set default value
COMPLETER = {
binary:->{ 0 },
boolean:->{ false },
date:->{ DateTime.now },
datetime:->{ DateTime.now },
decimal:->{ 0 },
float:->{ 0.0 },
integer:->{ 0 },
string:->{ "" },
text:->{ "" },
time:->{ DateTime.now },
timestamp:->{ DateTime.now },
}
# colum data translate
TRANSLATER = {
binary:->(d){
return d if d.nil?
Base64.decode64(d)
},
boolean:->(d){
return d if d.nil?
!(0==d || ""==d || !d)
},
date:->(d){
return d if d.nil?
return nil if d==""
Date.parse(d.to_s)
},
datetime:->(d){
return d if d.nil?
return nil if d==""
DateTime.parse(d.to_s)
},
decimal:->(d){
return d if d.nil?
d.to_i
},
float:->(d){
return d if d.nil?
d.to_f
},
integer:->(d){
return d if d.nil?
d.to_i
},
string:->(d){
return d if d.nil? or d.is_a?(Hash) or d.is_a?(Array)
d.to_s
},
text:->(d){
return d if d.nil? or d.is_a?(Hash) or d.is_a?(Array)
d.to_s
},
time:->(d){
return d if d.nil?
return nil if d==""
DateTime.parse(d.to_s)
},
timestamp:->(d){
return d if d.nil?
return nil if d==""
DateTime.parse(d.to_s)
},
}
# load fixture datas
#
# example:
# flextures :all # load all table data
# flextures :users, :items # load table data, received arguments
# flextures :users => :users2 # :table_name => :file_name
#
# @params [Hash] fixtures load table data
def self.flextures *fixtures
load_list = parse_flextures_options(*fixtures)
load_list.sort(&self.loading_order).each{ |params| Loader::load params }
end
# @return [Proc] order rule block (user Array#sort methd)
def self.loading_order
->(a,b){
a = Flextures::Config.table_load_order.index(a) || -1
b = Flextures::Config.table_load_order.index(b) || -1
b <=> a
}
end
# called by Rspec or Should
# set options
# @params [Hash] options exmple : { cashe: true, dir: "models/users" }
def self.set_options options
@@option_cache ||= {}
@@option_cache.merge!(options)
end
# called by Rspec or Should after filter
# reflesh options
def self.delete_options
@@option_cache = {}
end
# return current option status
# @return [Hash] current option status
def self.flextures_options
@@option_cache
end
# return current cache status
# @return [Hash] current option status
def self.flextures_curent_cache
@@table_cache
end
# load fixture data
# fixture file prefer YAML to CSV
# @params [Hash] format file load format(table name, file name, options...)
def self.load format
file_name, method = file_exist format
if method
send(method, format)
else
puts "Warning: #{file_name} is not exist!" unless format[:silent]
end
end
# load CSV data
# @params [Hash] format file load format(table name, file name, options...)
def self.csv format
type = :csv
file_name, ext = file_exist format, [type]
return unless self.file_loadable? format, file_name
klass, filter = self.create_model_filter format, file_name, type
self.load_csv format, klass, filter, file_name
end
# load YAML data
# @params [Hash] format file load format( table: name, file: name, options...)
def self.yml format
type = :yml
file_name, ext = file_exist format, [type]
return unless self.file_loadable? format, file_name
klass, filter = self.create_model_filter format, file_name, type
self.load_yml format, klass, filter, file_name
end
def self.load_csv format, klass, filter, file_name
attributes = klass.columns.map &:name
CSV.open( file_name ) do |csv|
keys = csv.shift # active record column names
warning "CSV", attributes, keys unless format[:silent]
csv.each do |values|
h = values.extend(Extensions::Array).to_hash(keys)
filter.call h
end
end
file_name
end
def self.load_yml format, klass, filter, file_name
yaml = YAML.load File.open(file_name)
return false unless yaml # if file is empty
attributes = klass.columns.map &:name
yaml.each do |k,h|
warning "YAML", attributes, h.keys unless format[:silent]
filter.call h
end
file_name
end
# if parameter include controller, action value
# load directroy is change
# spec/fixtures/:controller_name/:action_name/
# @return [String] directory path
def self.parse_controller_option options
controller_dir = ["controllers"]
controller_dir<< options[:controller] if options[:controller]
controller_dir<< options[:action] if options[:controller] and options[:action]
File.join(*controller_dir)
end
# if parameter include controller, action value
# load directroy is change
# spec/fixtures/:model_name/:method_name/
# @return [String] directory path
def self.parse_model_options options
model_dir = ["models"]
model_dir<< options[:model] if options[:model]
model_dir<< options[:method] if options[:model] and options[:method]
File.join(*model_dir)
end
# parse flextures function arguments
# @params [Hash] fixtures function arguments
# @return [Array] formatted load options
def self.parse_flextures_options *fixtures
options = {}
options = fixtures.shift if fixtures.size > 1 and fixtures.first.is_a?(Hash)
options[:dir] = self.parse_controller_option( options ) if options[:controller]
options[:dir] = self.parse_model_options( options ) if options[:model]
# :all value load all loadable fixtures
fixtures = Flextures::deletable_tables if fixtures.size==1 and :all == fixtures.first
last_hash = fixtures.last.is_a?(Hash) ? fixtures.pop : {}
load_hash = fixtures.inject({}){ |h,name| h[name.to_sym] = name.to_s; h } # if name is string is buged
load_hash.merge!(last_hash)
load_hash.map { |k,v| { table: k, file: v, loader: :fun }.merge(@@option_cache).merge(options) }
end
# example:
# self.create_stair_list("foo/bar/baz")
# return ["foo/bar/baz","foo/bar","foo",""]
def self.stair_list dir, stair=true
return [dir.to_s] unless stair
l = []
dir.to_s.split("/").inject([]){ |a,d| a<< d; l.unshift(a.join("/")); a }
l<< ""
l
end
# parse format option and return load file info
# @param [Hash] format load file format informations
# @return [Array] [file_name, filt_type(:csv or :yml)]
def self.file_exist format, type = [:csv,:yml]
table_name = format[:table].to_s
file_name = (format[:file] || format[:table]).to_s
base_dir_name = Flextures::Config.fixture_load_directory
self.stair_list(format[:dir], format[:stair]).each do |dir|
file_path = File.join( base_dir_name, dir, file_name )
return ["#{file_path}.csv", :csv] if type.member?(:csv) and File.exist? "#{file_path}.csv"
return ["#{file_path}.yml", :yml] if type.member?(:yml) and File.exist? "#{file_path}.yml"
end
[ File.join(base_dir_name, "#{file_name}.csv"), nil ]
end
# file load check
# file is cached or not exist file don't load
# @return [Bool] lodable is 'true'
def self.file_loadable? format, file_name
table_name = format[:table].to_s.to_sym
# if table data is loaded, use cached data
return if format[:cache] and @@table_cache[table_name] == file_name
@@table_cache[table_name] = file_name
return unless File.exist? file_name
puts "try loading #{file_name}" if !format[:silent] and ![:fun].include?(format[:loader])
true
end
# print warinig message that lack or not exist colum names
def self.warning format, attributes, keys
(attributes-keys).each { |name| puts "Warning: #{format} colum is missing! [#{name}]" }
(keys-attributes).each { |name| puts "Warning: #{format} colum is left over! [#{name}]" }
end
# create filter and table info
def self.create_model_filter format, file_name, type
table_name = format[:table].to_s
klass = PARENT::create_model table_name
# if you use 'rails3_acts_as_paranoid' gem, that is not delete data 'delete_all' method
klass.send (klass.respond_to?(:delete_all!) ? :delete_all! : :delete_all)
filter = ->(h){
filter = create_filter klass, LoadFilter[table_name.to_sym], file_name, type, format
o = klass.new
o = filter.call o, h
o.save( validate: false )
o
}
[klass, filter]
end
# return flextures data translate filter
# translate filter is some functions
# 1. column value is fill, if colum is not nullable
# 2. factory filter
# @params [ActiveRecord::Base] klass ActiveRecord model data
# @params [Proc] factory FactoryFilter
# @params [String] filename
# @params [Symbol] ext file type (:csv or :yml)
# @params [Hash] options other options
# @return [Proc] translate filter
def self.create_filter klass, factory, filename, ext, options
columns = klass.columns
# data translat array to hash
column_hash = columns.inject({}) { |h,col| h[col.name] = col; h }
lack_columns = columns.reject { |c| c.null and c.default }.map{ |o| o.name.to_sym }
# default value shound not be null columns
not_nullable_columns = columns.reject(&:null).map &:name
strict_filter=->(o,h){
# if value is not 'nil', value translate suitable form
h.each{ |k,v| v.nil? || o[k] = (TRANSLATER[column_hash[k].type] && TRANSLATER[column_hash[k].type].call(v)) }
# call FactoryFilter
factory.call(*[o, filename, ext][0,factory.arity]) if factory and !options[:unfilter]
o
}
# receives hased data and translate ActiveRecord Model data
# loose filter correct error values
# strict filter don't correct errora values and raise error
loose_filter=->(o,h){
h.reject! { |k,v| options[:minus].include?(k) } if options[:minus]
# if column name is not include database table columns, those names delete
h.select! { |k,v| column_hash[k] }
strict_filter.call(o,h)
# set default value if value is 'nil'
not_nullable_columns.each{ |k| o[k].nil? && o[k] = (column_hash[k] && COMPLETER[column_hash[k].type] && COMPLETER[column_hash[k].type].call) }
# fill span values if column is not exist
lack_columns.each { |k| o[k].nil? && o[k] = (column_hash[k] && COMPLETER[column_hash[k].type] && COMPLETER[column_hash[k].type].call) }
o
}
(options[:strict]==true) ? strict_filter : loose_filter
end
end
end
|
# encoding: UTF-8
require 'date'
require 'influxdb'
require 'fluent/output'
require 'fluent/mixin'
class Fluent::InfluxdbOutput < Fluent::BufferedOutput
Fluent::Plugin.register_output('influxdb', self)
include Fluent::HandleTagNameMixin
config_param :host, :string, :default => 'localhost',
:desc => "The IP or domain of influxDB, separate with comma."
config_param :port, :integer, :default => 8086,
:desc => "The HTTP port of influxDB."
config_param :dbname, :string, :default => 'fluentd',
:desc => <<-DESC
The database name of influxDB.
You should create the database and grant permissions at first.
DESC
config_param :user, :string, :default => 'root',
:desc => "The DB user of influxDB, should be created manually."
config_param :password, :string, :default => 'root', :secret => true,
:desc => "The password of the user."
config_param :retry, :integer, :default => nil,
:desc => 'The finite number of retry times. default is infinite'
config_param :time_key, :string, :default => 'time',
:desc => 'Use value of this tag if it exists in event instead of event timestamp'
config_param :time_precision, :string, :default => 's',
:desc => <<-DESC
The time precision of timestamp.
You should specify either hour (h), minutes (m), second (s),
millisecond (ms), microsecond (u), or nanosecond (n).
DESC
config_param :use_ssl, :bool, :default => false,
:desc => "Use SSL when connecting to influxDB."
config_param :verify_ssl, :bool, :default => true,
:desc => "Enable/Disable SSL Certs verification when connecting to influxDB via SSL."
config_param :tag_keys, :array, :default => [],
:desc => "The names of the keys to use as influxDB tags."
config_param :sequence_tag, :string, :default => nil,
:desc => <<-DESC
The name of the tag whose value is incremented for the consecutive simultaneous
events and reset to zero for a new event with the different timestamp.
DESC
config_param :retention_policy_key, :string, :default => nil,
:desc => "The key of the key in the record that stores the retention policy name"
config_param :default_retention_policy, :string, :default => nil,
:desc => "The name of the default retention policy"
def initialize
super
@seq = 0
@prev_timestamp = nil
end
def configure(conf)
super
end
def start
super
$log.info "Connecting to database: #{@dbname}, host: #{@host}, port: #{@port}, username: #{@user}, use_ssl = #{@use_ssl}, verify_ssl = #{@verify_ssl}"
# ||= for testing.
@influxdb ||= InfluxDB::Client.new @dbname, hosts: @host.split(','),
port: @port,
username: @user,
password: @password,
async: false,
retry: @retry,
time_precision: @time_precision,
use_ssl: @use_ssl,
verify_ssl: @verify_ssl
begin
existing_databases = @influxdb.list_databases.map { |x| x['name'] }
unless existing_databases.include? @dbname
raise Fluent::ConfigError, 'Database ' + @dbname + ' doesn\'t exist. Create it first, please. Existing databases: ' + existing_databases.join(',')
end
rescue InfluxDB::AuthenticationError
$log.info "skip database presence check because '#{@user}' user doesn't have admin privilege. Check '#{@dbname}' exists on influxdb"
end
end
FORMATTED_RESULT_FOR_INVALID_RECORD = ''.freeze
def format(tag, time, record)
# TODO: Use tag based chunk separation for more reliability
if record.empty? || record.has_value?(nil)
FORMATTED_RESULT_FOR_INVALID_RECORD
else
[tag, time, record].to_msgpack
end
end
def shutdown
super
@influxdb.stop!
end
def write(chunk)
points = []
chunk.msgpack_each do |tag, time, record|
timestamp = record.delete(@time_key) || time
if tag_keys.empty?
values = record
tags = {}
else
values = {}
tags = {}
record.each_pair do |k, v|
if @tag_keys.include?(k)
# If the tag value is not nil, empty, or a space, add the tag
if v.to_s.strip != ''
tags[k] = v
end
else
values[k] = v
end
end
end
if @sequence_tag
if @prev_timestamp == timestamp
@seq += 1
else
@seq = 0
end
tags[@sequence_tag] = @seq
@prev_timestamp = timestamp
end
point = {
:timestamp => timestamp,
:series => tag,
:values => values,
:tags => tags,
}
retention_policy = @default_retention_policy
unless @retention_policy_key.nil?
retention_policy = record.delete(@retention_policy_key) || @default_retention_policy
unless points.nil?
if retention_policy != @default_retention_policy
# flush the retention policy first
@influxdb.write_points(points, nil, @default_retention_policy)
points = nil
end
end
end
if points.nil?
@influxdb.write_points([point], nil, retention_policy)
else
points << point
end
end
unless points.nil?
if @default_retention_policy.nil?
@influxdb.write_points(points)
else
@influxdb.write_points(points, nil, @default_retention_policy)
end
end
end
end
InfluxDB returns different responce code in v1.0 or higher
# encoding: UTF-8
require 'date'
require 'influxdb'
require 'fluent/output'
require 'fluent/mixin'
class Fluent::InfluxdbOutput < Fluent::BufferedOutput
Fluent::Plugin.register_output('influxdb', self)
include Fluent::HandleTagNameMixin
config_param :host, :string, :default => 'localhost',
:desc => "The IP or domain of influxDB, separate with comma."
config_param :port, :integer, :default => 8086,
:desc => "The HTTP port of influxDB."
config_param :dbname, :string, :default => 'fluentd',
:desc => <<-DESC
The database name of influxDB.
You should create the database and grant permissions at first.
DESC
config_param :user, :string, :default => 'root',
:desc => "The DB user of influxDB, should be created manually."
config_param :password, :string, :default => 'root', :secret => true,
:desc => "The password of the user."
config_param :retry, :integer, :default => nil,
:desc => 'The finite number of retry times. default is infinite'
config_param :time_key, :string, :default => 'time',
:desc => 'Use value of this tag if it exists in event instead of event timestamp'
config_param :time_precision, :string, :default => 's',
:desc => <<-DESC
The time precision of timestamp.
You should specify either hour (h), minutes (m), second (s),
millisecond (ms), microsecond (u), or nanosecond (n).
DESC
config_param :use_ssl, :bool, :default => false,
:desc => "Use SSL when connecting to influxDB."
config_param :verify_ssl, :bool, :default => true,
:desc => "Enable/Disable SSL Certs verification when connecting to influxDB via SSL."
config_param :tag_keys, :array, :default => [],
:desc => "The names of the keys to use as influxDB tags."
config_param :sequence_tag, :string, :default => nil,
:desc => <<-DESC
The name of the tag whose value is incremented for the consecutive simultaneous
events and reset to zero for a new event with the different timestamp.
DESC
config_param :retention_policy_key, :string, :default => nil,
:desc => "The key of the key in the record that stores the retention policy name"
config_param :default_retention_policy, :string, :default => nil,
:desc => "The name of the default retention policy"
def initialize
super
@seq = 0
@prev_timestamp = nil
end
def configure(conf)
super
end
def start
super
$log.info "Connecting to database: #{@dbname}, host: #{@host}, port: #{@port}, username: #{@user}, use_ssl = #{@use_ssl}, verify_ssl = #{@verify_ssl}"
# ||= for testing.
@influxdb ||= InfluxDB::Client.new @dbname, hosts: @host.split(','),
port: @port,
username: @user,
password: @password,
async: false,
retry: @retry,
time_precision: @time_precision,
use_ssl: @use_ssl,
verify_ssl: @verify_ssl
begin
existing_databases = @influxdb.list_databases.map { |x| x['name'] }
unless existing_databases.include? @dbname
raise Fluent::ConfigError, 'Database ' + @dbname + ' doesn\'t exist. Create it first, please. Existing databases: ' + existing_databases.join(',')
end
rescue InfluxDB::AuthenticationError, InfluxDB::Error
$log.info "skip database presence check because '#{@user}' user doesn't have admin privilege. Check '#{@dbname}' exists on influxdb"
end
end
FORMATTED_RESULT_FOR_INVALID_RECORD = ''.freeze
def format(tag, time, record)
# TODO: Use tag based chunk separation for more reliability
if record.empty? || record.has_value?(nil)
FORMATTED_RESULT_FOR_INVALID_RECORD
else
[tag, time, record].to_msgpack
end
end
def shutdown
super
@influxdb.stop!
end
def write(chunk)
points = []
chunk.msgpack_each do |tag, time, record|
timestamp = record.delete(@time_key) || time
if tag_keys.empty?
values = record
tags = {}
else
values = {}
tags = {}
record.each_pair do |k, v|
if @tag_keys.include?(k)
# If the tag value is not nil, empty, or a space, add the tag
if v.to_s.strip != ''
tags[k] = v
end
else
values[k] = v
end
end
end
if @sequence_tag
if @prev_timestamp == timestamp
@seq += 1
else
@seq = 0
end
tags[@sequence_tag] = @seq
@prev_timestamp = timestamp
end
point = {
:timestamp => timestamp,
:series => tag,
:values => values,
:tags => tags,
}
retention_policy = @default_retention_policy
unless @retention_policy_key.nil?
retention_policy = record.delete(@retention_policy_key) || @default_retention_policy
unless points.nil?
if retention_policy != @default_retention_policy
# flush the retention policy first
@influxdb.write_points(points, nil, @default_retention_policy)
points = nil
end
end
end
if points.nil?
@influxdb.write_points([point], nil, retention_policy)
else
points << point
end
end
unless points.nil?
if @default_retention_policy.nil?
@influxdb.write_points(points)
else
@influxdb.write_points(points, nil, @default_retention_policy)
end
end
end
end
|
#!/usr/bin/env ruby
require 'github_api'
require 'json'
require 'colorize'
require 'benchmark'
require_relative 'github_changelog_generator/parser'
require_relative 'github_changelog_generator/generator'
require_relative 'github_changelog_generator/version'
module GitHubChangelogGenerator
class ChangelogGenerator
attr_accessor :options, :all_tags, :github
PER_PAGE_NUMBER = 30
def initialize
@options = Parser.parse_options
fetch_github_token
github_options = {per_page: PER_PAGE_NUMBER}
github_options[:oauth_token] = @github_token unless @github_token.nil?
github_options[:endpoint] = options[:github_endpoint] unless options[:github_endpoint].nil?
github_options[:site] = options[:github_endpoint] unless options[:github_site].nil?
@github = Github.new github_options
@generator = Generator.new(@options)
@all_tags = self.get_all_tags
@issues, @pull_requests = self.fetch_issues_and_pull_requests
if @options[:pulls]
@pull_requests = self.get_filtered_pull_requests
self.fetch_merged_at_pull_requests
else
@pull_requests = []
end
if @options[:issues]
@issues = self.get_filtered_issues
else
@issues = []
end
fetch_event_for_issues_and_pr
detect_actual_closed_dates
@tag_times_hash = {}
end
def detect_actual_closed_dates
if @options[:verbose]
print "Fetching closed dates for issues...\r"
end
threads = []
@issues.each { |issue|
threads << Thread.new {
find_closed_date_by_commit(issue)
}
}
@pull_requests.each { |pull_request|
threads << Thread.new {
find_closed_date_by_commit(pull_request)
}
}
threads.each { |thr| thr.join }
if @options[:verbose]
puts 'Fetching closed dates for issues: Done!'
end
end
def find_closed_date_by_commit(issue)
unless issue['events'].nil?
#if it's PR -> then find "merged event", in case of usual issue -> fond closed date
compare_string = issue[:merged_at].nil? ? 'closed' : 'merged'
# reverse! - to find latest closed event. (event goes in date order)
issue['events'].reverse!.each { |event|
if event[:event].eql? compare_string
if event[:commit_id].nil?
issue[:actual_date] = issue[:closed_at]
else
begin
commit = @github.git_data.commits.get @options[:user], @options[:project], event[:commit_id]
issue[:actual_date] = commit[:author][:date]
rescue
puts "Warning: can't fetch commit #{event[:commit_id]} probably it referenced from another repo."
issue[:actual_date] = issue[:closed_at]
end
end
break
end
}
end
#TODO: assert issues, that remain without 'actual_date' hash for some reason.
end
def print_json(json)
puts JSON.pretty_generate(json)
end
def fetch_merged_at_pull_requests
if @options[:verbose]
print "Fetching merged dates...\r"
end
response = @github.pull_requests.list @options[:user], @options[:project], :state => 'closed'
pull_requests = []
page_i = 0
response.each_page do |page|
page_i += PER_PAGE_NUMBER
count_pages = response.count_pages
print "Fetching merged dates... #{page_i}/#{count_pages * PER_PAGE_NUMBER}\r"
pull_requests.concat(page)
end
print " \r"
@pull_requests.each { |pr|
fetched_pr = pull_requests.find { |fpr|
fpr.number == pr.number }
pr[:merged_at] = fetched_pr[:merged_at]
pull_requests.delete(fetched_pr)
}
if @options[:verbose]
puts 'Fetching merged dates... Done!'
end
end
def get_filtered_pull_requests
pull_requests = @pull_requests
filtered_pull_requests = pull_requests
unless @options[:include_labels].nil?
filtered_pull_requests = pull_requests.select { |issue|
#add all labels from @options[:incluse_labels] array
(issue.labels.map { |label| label.name } & @options[:include_labels]).any?
}
end
unless @options[:exclude_labels].nil?
filtered_pull_requests = filtered_pull_requests.select { |issue|
#delete all labels from @options[:exclude_labels] array
!(issue.labels.map { |label| label.name } & @options[:exclude_labels]).any?
}
end
if @options[:add_issues_wo_labels]
issues_wo_labels = pull_requests.select {
# add issues without any labels
|issue| !issue.labels.map { |label| label.name }.any?
}
filtered_pull_requests |= issues_wo_labels
end
if @options[:verbose]
puts "Filtered pull requests: #{filtered_pull_requests.count}"
end
filtered_pull_requests
end
def compund_changelog
log = "# Change Log\n\n"
if @options[:unreleased_only]
log += self.generate_log_between_tags(self.all_tags[0], nil)
elsif @options[:tag1] and @options[:tag2]
tag1 = @options[:tag1]
tag2 = @options[:tag2]
tags_strings = []
self.all_tags.each { |x| tags_strings.push(x['name']) }
if tags_strings.include?(tag1)
if tags_strings.include?(tag2)
to_a = tags_strings.map.with_index.to_a
hash = Hash[to_a]
index1 = hash[tag1]
index2 = hash[tag2]
log += self.generate_log_between_tags(self.all_tags[index1], self.all_tags[index2])
else
puts "Can't find tag #{tag2} -> exit"
exit
end
else
puts "Can't find tag #{tag1} -> exit"
exit
end
else
log += self.generate_log_for_all_tags
end
log += "\n\n\\* *This Change Log was automatically generated by [github_changelog_generator](https://github.com/skywinder/Github-Changelog-Generator)*"
output_filename = "#{@options[:output]}"
File.open(output_filename, 'w') { |file| file.write(log) }
puts 'Done!'
puts "Generated log placed in #{`pwd`.strip!}/#{output_filename}"
end
def generate_log_for_all_tags
fetch_tags_dates
if @options[:verbose]
puts "Sorting tags.."
end
@all_tags.sort_by! { |x| self.get_time_of_tag(x) }.reverse!
if @options[:verbose]
puts "Generating log.."
end
log = ''
if @options[:unreleased] && @all_tags.count != 0
unreleased_log = self.generate_log_between_tags(self.all_tags[0], nil)
if unreleased_log
log += unreleased_log
end
end
(1 ... self.all_tags.size).each { |index|
log += self.generate_log_between_tags(self.all_tags[index], self.all_tags[index-1])
}
if @all_tags.count != 0
log += generate_log_between_tags(nil, self.all_tags.last)
end
log
end
def fetch_tags_dates
if @options[:verbose]
print "Fetching tags dates..\r"
end
# Async fetching tags:
threads = []
i = 0
all = @all_tags.count
@all_tags.each { |tag|
# explicit set @tag_times_hash to write data safety.
threads << Thread.new {
self.get_time_of_tag(tag, @tag_times_hash)
if @options[:verbose]
print "Fetching tags dates: #{i+1}/#{all}\r"
i+=1
end
}
}
print " \r"
threads.each { |thr| thr.join }
if @options[:verbose]
puts 'Fetching tags: Done!'
end
end
def is_megred(number)
@github.pull_requests.merged? @options[:user], @options[:project], number
end
def get_all_tags
if @options[:verbose]
print "Fetching tags...\r"
end
response = @github.repos.tags @options[:user], @options[:project]
tags = []
page_i = 0
count_pages = response.count_pages
response.each_page do |page|
page_i += PER_PAGE_NUMBER
print "Fetching tags... #{page_i}/#{count_pages * PER_PAGE_NUMBER}\r"
tags.concat(page)
end
print " \r"
if @options[:verbose]
puts "Found #{tags.count} tags"
end
tags
end
def fetch_github_token
env_var = @options[:token] ? @options[:token] : (ENV.fetch 'CHANGELOG_GITHUB_TOKEN', nil)
unless env_var
puts "Warning: No token provided (-t option) and variable $CHANGELOG_GITHUB_TOKEN was not found.".yellow
puts "This script can make only 50 requests to GitHub API per hour without token!".yellow
end
@github_token ||= env_var
end
def generate_log_between_tags(older_tag, newer_tag)
# older_tag nil - means it's first tag, newer_tag nil - means it unreleased section
filtered_pull_requests = delete_by_time(@pull_requests, :actual_date, older_tag, newer_tag)
filtered_issues = delete_by_time(@issues, :actual_date, older_tag, newer_tag)
newer_tag_name = newer_tag.nil? ? nil : newer_tag['name']
older_tag_name = older_tag.nil? ? nil : older_tag['name']
if @options[:filter_issues_by_milestone]
#delete excess irrelevant issues (according milestones)
filtered_issues = filter_by_milestone(filtered_issues, newer_tag_name, @issues)
filtered_pull_requests = filter_by_milestone(filtered_pull_requests, newer_tag_name, @pull_requests)
end
if filtered_issues.empty? && filtered_pull_requests.empty? && newer_tag.nil?
# do not generate empty unreleased section
return nil
end
self.create_log(filtered_pull_requests, filtered_issues, newer_tag, older_tag_name)
end
def filter_by_milestone(filtered_issues, newer_tag_name, src_array)
filtered_issues.select! { |issue|
# leave issues without milestones
if issue.milestone.nil?
true
else
#check, that this milestone in tag list:
@all_tags.find { |tag| tag.name == issue.milestone.title }.nil?
end
}
unless newer_tag_name.nil?
#add missed issues (according milestones)
issues_to_add = src_array.select { |issue|
if issue.milestone.nil?
false
else
#check, that this milestone in tag list:
milestone_is_tag = @all_tags.find { |tag|
tag.name == issue.milestone.title
}
if milestone_is_tag.nil?
false
else
issue.milestone.title == newer_tag_name
end
end
}
filtered_issues |= issues_to_add
end
filtered_issues
end
def delete_by_time(array, hash_key, older_tag = nil, newer_tag = nil)
raise 'At least one of the tags should be not nil!' if (older_tag.nil? && newer_tag.nil?)
newer_tag_time = self.get_time_of_tag(newer_tag)
older_tag_time = self.get_time_of_tag(older_tag)
array.select { |req|
if req[hash_key]
t = Time.parse(req[hash_key]).utc
if older_tag_time.nil?
tag_in_range_old = true
else
tag_in_range_old = t > older_tag_time
end
if newer_tag_time.nil?
tag_in_range_new = true
else
tag_in_range_new = t <= newer_tag_time
end
tag_in_range = (tag_in_range_old) && (tag_in_range_new)
tag_in_range
else
false
end
}
end
# @param [Array] pull_requests
# @param [Array] issues
# @param [String] older_tag_name
# @return [String]
def create_log(pull_requests, issues, newer_tag, older_tag_name = nil)
newer_tag_time = newer_tag.nil? ? nil : self.get_time_of_tag(newer_tag)
newer_tag_name = newer_tag.nil? ? nil : newer_tag['name']
github_site = options[:github_site] || 'https://github.com'
project_url = "#{github_site}/#{@options[:user]}/#{@options[:project]}"
if newer_tag.nil?
newer_tag_name = @options[:unreleased_label]
newer_tag_link = 'HEAD'
newer_tag_time = Time.new
else
newer_tag_link = newer_tag_name
end
log = ''
log += generate_header(log, newer_tag_name, newer_tag_link, newer_tag_time, older_tag_name, project_url)
if @options[:issues]
# Generate issues:
issues_a = []
enhancement_a = []
bugs_a =[]
issues.each { |dict|
added = false
dict.labels.each { |label|
if label.name == 'bug'
bugs_a.push dict
added = true
next
end
if label.name == 'enhancement'
enhancement_a.push dict
added = true
next
end
}
unless added
issues_a.push dict
end
}
log += generate_log_from_array(enhancement_a, @options[:enhancement_prefix])
log += generate_log_from_array(bugs_a, @options[:bug_prefix])
log += generate_log_from_array(issues_a, @options[:issue_prefix])
end
if @options[:pulls]
# Generate pull requests:
log += generate_log_from_array(pull_requests, @options[:merge_prefix])
end
log
end
def generate_log_from_array(issues, prefix)
log = ''
if options[:simple_list].nil? && issues.any?
log += "#{prefix}\n\n"
end
if issues.any?
issues.each { |issue|
merge_string = @generator.get_string_for_issue(issue)
log += "- #{merge_string}\n\n"
}
end
log
end
def generate_header(log, newer_tag_name, newer_tag_name2, newer_tag_time, older_tag_name, project_url)
#Generate date string:
time_string = newer_tag_time.strftime @options[:format]
# Generate tag name and link
if newer_tag_name.equal? @options[:unreleased_label]
log += "## [#{newer_tag_name}](#{project_url}/tree/#{newer_tag_name2})\n\n"
else
log += "## [#{newer_tag_name}](#{project_url}/tree/#{newer_tag_name2}) (#{time_string})\n\n"
end
if @options[:compare_link] && older_tag_name
# Generate compare link
log += "[Full Changelog](#{project_url}/compare/#{older_tag_name}...#{newer_tag_name2})\n\n"
end
log
end
def get_time_of_tag(tag_name, tag_times_hash = @tag_times_hash)
if tag_name.nil?
return nil
end
if tag_times_hash[tag_name['name']]
return @tag_times_hash[tag_name['name']]
end
github_git_data_commits_get = @github.git_data.commits.get @options[:user], @options[:project], tag_name['commit']['sha']
time_string = github_git_data_commits_get['committer']['date']
@tag_times_hash[tag_name['name']] = Time.parse(time_string)
end
def get_filtered_issues
issues = @issues
filtered_issues = issues
unless @options[:include_labels].nil?
filtered_issues = issues.select { |issue|
#add all labels from @options[:incluse_labels] array
(issue.labels.map { |label| label.name } & @options[:include_labels]).any?
}
end
unless @options[:exclude_labels].nil?
filtered_issues = filtered_issues.select { |issue|
#delete all labels from @options[:exclude_labels] array
!(issue.labels.map { |label| label.name } & @options[:exclude_labels]).any?
}
end
if @options[:add_issues_wo_labels]
issues_wo_labels = issues.select {
# add issues without any labels
|issue| !issue.labels.map { |label| label.name }.any?
}
filtered_issues |= issues_wo_labels
end
if @options[:verbose]
puts "Filtered issues: #{filtered_issues.count}"
end
filtered_issues
end
def fetch_issues_and_pull_requests
if @options[:verbose]
print "Fetching closed issues...\r"
end
response = @github.issues.list user: @options[:user], repo: @options[:project], state: 'closed', filter: 'all', labels: nil
issues = []
page_i = 0
count_pages = response.count_pages
response.each_page do |page|
page_i += PER_PAGE_NUMBER
print "Fetching issues... #{page_i}/#{count_pages * PER_PAGE_NUMBER}\r"
issues.concat(page)
end
print " \r"
if @options[:verbose]
puts "Received issues: #{issues.count}"
end
# remove pull request from issues:
issues_wo_pr = issues.select { |x|
x.pull_request == nil
}
pull_requests = issues.select { |x|
x.pull_request != nil
}
return issues_wo_pr, pull_requests
end
def fetch_event_for_issues_and_pr
if @options[:verbose]
print "Fetching events for issues and PR: 0/#{@issues.count + @pull_requests.count}\r"
end
# Async fetching events:
fetch_events_async(@issues + @pull_requests)
#to clear line from prev print
print " \r"
if @options[:verbose]
puts 'Fetching events for issues and PR: Done!'
end
end
def fetch_events_async(issues)
i = 0
max_thread_number = 50
threads = []
issues.each_slice(max_thread_number) { |issues_slice|
issues_slice.each { |issue|
threads << Thread.new {
obj = @github.issues.events.list user: @options[:user], repo: @options[:project], issue_number: issue['number']
issue[:events] = obj.body
print "Fetching events for issues and PR: #{i+1}/#{@issues.count + @pull_requests.count}\r"
i +=1
}
}
threads.each { |thr| thr.join }
threads = []
}
end
end
if __FILE__ == $0
GitHubChangelogGenerator::ChangelogGenerator.new.compund_changelog
end
end
Fix termination in case of empty unreleased section with `--unreleased-only` option.
#!/usr/bin/env ruby
require 'github_api'
require 'json'
require 'colorize'
require 'benchmark'
require_relative 'github_changelog_generator/parser'
require_relative 'github_changelog_generator/generator'
require_relative 'github_changelog_generator/version'
module GitHubChangelogGenerator
class ChangelogGenerator
attr_accessor :options, :all_tags, :github
PER_PAGE_NUMBER = 30
def initialize
@options = Parser.parse_options
fetch_github_token
github_options = {per_page: PER_PAGE_NUMBER}
github_options[:oauth_token] = @github_token unless @github_token.nil?
github_options[:endpoint] = options[:github_endpoint] unless options[:github_endpoint].nil?
github_options[:site] = options[:github_endpoint] unless options[:github_site].nil?
@github = Github.new github_options
@generator = Generator.new(@options)
@all_tags = self.get_all_tags
@issues, @pull_requests = self.fetch_issues_and_pull_requests
if @options[:pulls]
@pull_requests = self.get_filtered_pull_requests
self.fetch_merged_at_pull_requests
else
@pull_requests = []
end
if @options[:issues]
@issues = self.get_filtered_issues
else
@issues = []
end
fetch_event_for_issues_and_pr
detect_actual_closed_dates
@tag_times_hash = {}
end
def detect_actual_closed_dates
if @options[:verbose]
print "Fetching closed dates for issues...\r"
end
threads = []
@issues.each { |issue|
threads << Thread.new {
find_closed_date_by_commit(issue)
}
}
@pull_requests.each { |pull_request|
threads << Thread.new {
find_closed_date_by_commit(pull_request)
}
}
threads.each { |thr| thr.join }
if @options[:verbose]
puts 'Fetching closed dates for issues: Done!'
end
end
def find_closed_date_by_commit(issue)
unless issue['events'].nil?
#if it's PR -> then find "merged event", in case of usual issue -> fond closed date
compare_string = issue[:merged_at].nil? ? 'closed' : 'merged'
# reverse! - to find latest closed event. (event goes in date order)
issue['events'].reverse!.each { |event|
if event[:event].eql? compare_string
if event[:commit_id].nil?
issue[:actual_date] = issue[:closed_at]
else
begin
commit = @github.git_data.commits.get @options[:user], @options[:project], event[:commit_id]
issue[:actual_date] = commit[:author][:date]
rescue
puts "Warning: can't fetch commit #{event[:commit_id]} probably it referenced from another repo."
issue[:actual_date] = issue[:closed_at]
end
end
break
end
}
end
#TODO: assert issues, that remain without 'actual_date' hash for some reason.
end
def print_json(json)
puts JSON.pretty_generate(json)
end
def fetch_merged_at_pull_requests
if @options[:verbose]
print "Fetching merged dates...\r"
end
response = @github.pull_requests.list @options[:user], @options[:project], :state => 'closed'
pull_requests = []
page_i = 0
response.each_page do |page|
page_i += PER_PAGE_NUMBER
count_pages = response.count_pages
print "Fetching merged dates... #{page_i}/#{count_pages * PER_PAGE_NUMBER}\r"
pull_requests.concat(page)
end
print " \r"
@pull_requests.each { |pr|
fetched_pr = pull_requests.find { |fpr|
fpr.number == pr.number }
pr[:merged_at] = fetched_pr[:merged_at]
pull_requests.delete(fetched_pr)
}
if @options[:verbose]
puts 'Fetching merged dates... Done!'
end
end
def get_filtered_pull_requests
pull_requests = @pull_requests
filtered_pull_requests = pull_requests
unless @options[:include_labels].nil?
filtered_pull_requests = pull_requests.select { |issue|
#add all labels from @options[:incluse_labels] array
(issue.labels.map { |label| label.name } & @options[:include_labels]).any?
}
end
unless @options[:exclude_labels].nil?
filtered_pull_requests = filtered_pull_requests.select { |issue|
#delete all labels from @options[:exclude_labels] array
!(issue.labels.map { |label| label.name } & @options[:exclude_labels]).any?
}
end
if @options[:add_issues_wo_labels]
issues_wo_labels = pull_requests.select {
# add issues without any labels
|issue| !issue.labels.map { |label| label.name }.any?
}
filtered_pull_requests |= issues_wo_labels
end
if @options[:verbose]
puts "Filtered pull requests: #{filtered_pull_requests.count}"
end
filtered_pull_requests
end
def compund_changelog
log = "# Change Log\n\n"
if @options[:unreleased_only]
log += self.generate_log_between_tags(self.all_tags[0], nil)
elsif @options[:tag1] and @options[:tag2]
tag1 = @options[:tag1]
tag2 = @options[:tag2]
tags_strings = []
self.all_tags.each { |x| tags_strings.push(x['name']) }
if tags_strings.include?(tag1)
if tags_strings.include?(tag2)
to_a = tags_strings.map.with_index.to_a
hash = Hash[to_a]
index1 = hash[tag1]
index2 = hash[tag2]
log += self.generate_log_between_tags(self.all_tags[index1], self.all_tags[index2])
else
puts "Can't find tag #{tag2} -> exit"
exit
end
else
puts "Can't find tag #{tag1} -> exit"
exit
end
else
log += self.generate_log_for_all_tags
end
log += "\n\n\\* *This Change Log was automatically generated by [github_changelog_generator](https://github.com/skywinder/Github-Changelog-Generator)*"
output_filename = "#{@options[:output]}"
File.open(output_filename, 'w') { |file| file.write(log) }
puts 'Done!'
puts "Generated log placed in #{`pwd`.strip!}/#{output_filename}"
end
def generate_log_for_all_tags
fetch_tags_dates
if @options[:verbose]
puts "Sorting tags.."
end
@all_tags.sort_by! { |x| self.get_time_of_tag(x) }.reverse!
if @options[:verbose]
puts "Generating log.."
end
log = ''
if @options[:unreleased] && @all_tags.count != 0
unreleased_log = self.generate_log_between_tags(self.all_tags[0], nil)
if unreleased_log
log += unreleased_log
end
end
(1 ... self.all_tags.size).each { |index|
log += self.generate_log_between_tags(self.all_tags[index], self.all_tags[index-1])
}
if @all_tags.count != 0
log += generate_log_between_tags(nil, self.all_tags.last)
end
log
end
def fetch_tags_dates
if @options[:verbose]
print "Fetching tags dates..\r"
end
# Async fetching tags:
threads = []
i = 0
all = @all_tags.count
@all_tags.each { |tag|
# explicit set @tag_times_hash to write data safety.
threads << Thread.new {
self.get_time_of_tag(tag, @tag_times_hash)
if @options[:verbose]
print "Fetching tags dates: #{i+1}/#{all}\r"
i+=1
end
}
}
print " \r"
threads.each { |thr| thr.join }
if @options[:verbose]
puts 'Fetching tags: Done!'
end
end
def is_megred(number)
@github.pull_requests.merged? @options[:user], @options[:project], number
end
def get_all_tags
if @options[:verbose]
print "Fetching tags...\r"
end
response = @github.repos.tags @options[:user], @options[:project]
tags = []
page_i = 0
count_pages = response.count_pages
response.each_page do |page|
page_i += PER_PAGE_NUMBER
print "Fetching tags... #{page_i}/#{count_pages * PER_PAGE_NUMBER}\r"
tags.concat(page)
end
print " \r"
if @options[:verbose]
puts "Found #{tags.count} tags"
end
tags
end
def fetch_github_token
env_var = @options[:token] ? @options[:token] : (ENV.fetch 'CHANGELOG_GITHUB_TOKEN', nil)
unless env_var
puts "Warning: No token provided (-t option) and variable $CHANGELOG_GITHUB_TOKEN was not found.".yellow
puts "This script can make only 50 requests to GitHub API per hour without token!".yellow
end
@github_token ||= env_var
end
def generate_log_between_tags(older_tag, newer_tag)
# older_tag nil - means it's first tag, newer_tag nil - means it unreleased section
filtered_pull_requests = delete_by_time(@pull_requests, :actual_date, older_tag, newer_tag)
filtered_issues = delete_by_time(@issues, :actual_date, older_tag, newer_tag)
newer_tag_name = newer_tag.nil? ? nil : newer_tag['name']
older_tag_name = older_tag.nil? ? nil : older_tag['name']
if @options[:filter_issues_by_milestone]
#delete excess irrelevant issues (according milestones)
filtered_issues = filter_by_milestone(filtered_issues, newer_tag_name, @issues)
filtered_pull_requests = filter_by_milestone(filtered_pull_requests, newer_tag_name, @pull_requests)
end
if filtered_issues.empty? && filtered_pull_requests.empty? && newer_tag.nil?
# do not generate empty unreleased section
return ''
end
self.create_log(filtered_pull_requests, filtered_issues, newer_tag, older_tag_name)
end
def filter_by_milestone(filtered_issues, newer_tag_name, src_array)
filtered_issues.select! { |issue|
# leave issues without milestones
if issue.milestone.nil?
true
else
#check, that this milestone in tag list:
@all_tags.find { |tag| tag.name == issue.milestone.title }.nil?
end
}
unless newer_tag_name.nil?
#add missed issues (according milestones)
issues_to_add = src_array.select { |issue|
if issue.milestone.nil?
false
else
#check, that this milestone in tag list:
milestone_is_tag = @all_tags.find { |tag|
tag.name == issue.milestone.title
}
if milestone_is_tag.nil?
false
else
issue.milestone.title == newer_tag_name
end
end
}
filtered_issues |= issues_to_add
end
filtered_issues
end
def delete_by_time(array, hash_key, older_tag = nil, newer_tag = nil)
raise 'At least one of the tags should be not nil!' if (older_tag.nil? && newer_tag.nil?)
newer_tag_time = self.get_time_of_tag(newer_tag)
older_tag_time = self.get_time_of_tag(older_tag)
array.select { |req|
if req[hash_key]
t = Time.parse(req[hash_key]).utc
if older_tag_time.nil?
tag_in_range_old = true
else
tag_in_range_old = t > older_tag_time
end
if newer_tag_time.nil?
tag_in_range_new = true
else
tag_in_range_new = t <= newer_tag_time
end
tag_in_range = (tag_in_range_old) && (tag_in_range_new)
tag_in_range
else
false
end
}
end
# @param [Array] pull_requests
# @param [Array] issues
# @param [String] older_tag_name
# @return [String]
def create_log(pull_requests, issues, newer_tag, older_tag_name = nil)
newer_tag_time = newer_tag.nil? ? nil : self.get_time_of_tag(newer_tag)
newer_tag_name = newer_tag.nil? ? nil : newer_tag['name']
github_site = options[:github_site] || 'https://github.com'
project_url = "#{github_site}/#{@options[:user]}/#{@options[:project]}"
if newer_tag.nil?
newer_tag_name = @options[:unreleased_label]
newer_tag_link = 'HEAD'
newer_tag_time = Time.new
else
newer_tag_link = newer_tag_name
end
log = ''
log += generate_header(log, newer_tag_name, newer_tag_link, newer_tag_time, older_tag_name, project_url)
if @options[:issues]
# Generate issues:
issues_a = []
enhancement_a = []
bugs_a =[]
issues.each { |dict|
added = false
dict.labels.each { |label|
if label.name == 'bug'
bugs_a.push dict
added = true
next
end
if label.name == 'enhancement'
enhancement_a.push dict
added = true
next
end
}
unless added
issues_a.push dict
end
}
log += generate_log_from_array(enhancement_a, @options[:enhancement_prefix])
log += generate_log_from_array(bugs_a, @options[:bug_prefix])
log += generate_log_from_array(issues_a, @options[:issue_prefix])
end
if @options[:pulls]
# Generate pull requests:
log += generate_log_from_array(pull_requests, @options[:merge_prefix])
end
log
end
def generate_log_from_array(issues, prefix)
log = ''
if options[:simple_list].nil? && issues.any?
log += "#{prefix}\n\n"
end
if issues.any?
issues.each { |issue|
merge_string = @generator.get_string_for_issue(issue)
log += "- #{merge_string}\n\n"
}
end
log
end
def generate_header(log, newer_tag_name, newer_tag_name2, newer_tag_time, older_tag_name, project_url)
#Generate date string:
time_string = newer_tag_time.strftime @options[:format]
# Generate tag name and link
if newer_tag_name.equal? @options[:unreleased_label]
log += "## [#{newer_tag_name}](#{project_url}/tree/#{newer_tag_name2})\n\n"
else
log += "## [#{newer_tag_name}](#{project_url}/tree/#{newer_tag_name2}) (#{time_string})\n\n"
end
if @options[:compare_link] && older_tag_name
# Generate compare link
log += "[Full Changelog](#{project_url}/compare/#{older_tag_name}...#{newer_tag_name2})\n\n"
end
log
end
def get_time_of_tag(tag_name, tag_times_hash = @tag_times_hash)
if tag_name.nil?
return nil
end
if tag_times_hash[tag_name['name']]
return @tag_times_hash[tag_name['name']]
end
github_git_data_commits_get = @github.git_data.commits.get @options[:user], @options[:project], tag_name['commit']['sha']
time_string = github_git_data_commits_get['committer']['date']
@tag_times_hash[tag_name['name']] = Time.parse(time_string)
end
def get_filtered_issues
issues = @issues
filtered_issues = issues
unless @options[:include_labels].nil?
filtered_issues = issues.select { |issue|
#add all labels from @options[:incluse_labels] array
(issue.labels.map { |label| label.name } & @options[:include_labels]).any?
}
end
unless @options[:exclude_labels].nil?
filtered_issues = filtered_issues.select { |issue|
#delete all labels from @options[:exclude_labels] array
!(issue.labels.map { |label| label.name } & @options[:exclude_labels]).any?
}
end
if @options[:add_issues_wo_labels]
issues_wo_labels = issues.select {
# add issues without any labels
|issue| !issue.labels.map { |label| label.name }.any?
}
filtered_issues |= issues_wo_labels
end
if @options[:verbose]
puts "Filtered issues: #{filtered_issues.count}"
end
filtered_issues
end
def fetch_issues_and_pull_requests
if @options[:verbose]
print "Fetching closed issues...\r"
end
response = @github.issues.list user: @options[:user], repo: @options[:project], state: 'closed', filter: 'all', labels: nil
issues = []
page_i = 0
count_pages = response.count_pages
response.each_page do |page|
page_i += PER_PAGE_NUMBER
print "Fetching issues... #{page_i}/#{count_pages * PER_PAGE_NUMBER}\r"
issues.concat(page)
end
print " \r"
if @options[:verbose]
puts "Received issues: #{issues.count}"
end
# remove pull request from issues:
issues_wo_pr = issues.select { |x|
x.pull_request == nil
}
pull_requests = issues.select { |x|
x.pull_request != nil
}
return issues_wo_pr, pull_requests
end
def fetch_event_for_issues_and_pr
if @options[:verbose]
print "Fetching events for issues and PR: 0/#{@issues.count + @pull_requests.count}\r"
end
# Async fetching events:
fetch_events_async(@issues + @pull_requests)
#to clear line from prev print
print " \r"
if @options[:verbose]
puts 'Fetching events for issues and PR: Done!'
end
end
def fetch_events_async(issues)
i = 0
max_thread_number = 50
threads = []
issues.each_slice(max_thread_number) { |issues_slice|
issues_slice.each { |issue|
threads << Thread.new {
obj = @github.issues.events.list user: @options[:user], repo: @options[:project], issue_number: issue['number']
issue[:events] = obj.body
print "Fetching events for issues and PR: #{i+1}/#{@issues.count + @pull_requests.count}\r"
i +=1
}
}
threads.each { |thr| thr.join }
threads = []
}
end
end
if __FILE__ == $0
GitHubChangelogGenerator::ChangelogGenerator.new.compund_changelog
end
end
|
module Gitlab
module Satellite
class CheckoutFailed < StandardError; end
class CommitFailed < StandardError; end
class PushFailed < StandardError; end
class Satellite
include Gitlab::Popen
PARKING_BRANCH = "__parking_branch"
attr_accessor :project
def initialize(project)
@project = project
end
def log(message)
Gitlab::Satellite::Logger.error(message)
end
def clear_and_update!
project.ensure_satellite_exists
@repo = nil
clear_working_dir!
delete_heads!
remove_remotes!
update_from_source!
end
def create
output, status = popen(%W(git clone -- #{project.repository.path_to_repo} #{path}),
Gitlab.config.satellites.path)
log("PID: #{project.id}: git clone #{project.repository.path_to_repo} #{path}")
log("PID: #{project.id}: -> #{output}")
if status.zero?
true
else
log("Failed to create satellite for #{project.name_with_namespace}")
false
end
end
def exists?
File.exists? path
end
# * Locks the satellite
# * Changes the current directory to the satellite's working dir
# * Yields
def lock
project.ensure_satellite_exists
File.open(lock_file, "w+") do |f|
begin
f.flock File::LOCK_EX
yield
ensure
f.flock File::LOCK_UN
end
end
end
def lock_file
create_locks_dir unless File.exists?(lock_files_dir)
File.join(lock_files_dir, "satellite_#{project.id}.lock")
end
def path
File.join(Gitlab.config.satellites.path, project.path_with_namespace)
end
def repo
project.ensure_satellite_exists
@repo ||= Grit::Repo.new(path)
end
def destroy
FileUtils.rm_rf(path)
end
private
# Clear the working directory
def clear_working_dir!
repo.git.reset(hard: true)
repo.git.clean(f: true, d: true, x: true)
end
# Deletes all branches except the parking branch
#
# This ensures we have no name clashes or issues updating branches when
# working with the satellite.
def delete_heads!
heads = repo.heads.map(&:name)
# update or create the parking branch
repo.git.checkout(default_options({ B: true }), PARKING_BRANCH)
# remove the parking branch from the list of heads ...
heads.delete(PARKING_BRANCH)
# ... and delete all others
heads.each { |head| repo.git.branch(default_options({ D: true }), head) }
end
# Deletes all remotes except origin
#
# This ensures we have no remote name clashes or issues updating branches when
# working with the satellite.
def remove_remotes!
remotes = repo.git.remote.split(' ')
remotes.delete('origin')
remotes.each { |name| repo.git.remote(default_options,'rm', name)}
end
# Updates the satellite from bare repo
#
# Note: this will only update remote branches (i.e. origin/*)
def update_from_source!
repo.git.remote(default_options, 'set-url', :origin, project.repository.path_to_repo)
repo.git.fetch(default_options, :origin)
end
def default_options(options = {})
{ raise: true, timeout: true }.merge(options)
end
# Create directory for storing
# satellites lock files
def create_locks_dir
FileUtils.mkdir_p(lock_files_dir)
end
def lock_files_dir
@lock_files_dir ||= File.join(Gitlab.config.satellites.path, "tmp")
end
end
end
end
Add autoloads for lib/gitlab/satellite/files/*
These files don't match their naming convention, but for organizational
purposes it makes sense.
module Gitlab
module Satellite
autoload :DeleteFileAction, 'gitlab/satellite/files/delete_file_action'
autoload :EditFileAction, 'gitlab/satellite/files/edit_file_action'
autoload :FileAction, 'gitlab/satellite/files/file_action'
autoload :NewFileAction, 'gitlab/satellite/files/new_file_action'
class CheckoutFailed < StandardError; end
class CommitFailed < StandardError; end
class PushFailed < StandardError; end
class Satellite
include Gitlab::Popen
PARKING_BRANCH = "__parking_branch"
attr_accessor :project
def initialize(project)
@project = project
end
def log(message)
Gitlab::Satellite::Logger.error(message)
end
def clear_and_update!
project.ensure_satellite_exists
@repo = nil
clear_working_dir!
delete_heads!
remove_remotes!
update_from_source!
end
def create
output, status = popen(%W(git clone -- #{project.repository.path_to_repo} #{path}),
Gitlab.config.satellites.path)
log("PID: #{project.id}: git clone #{project.repository.path_to_repo} #{path}")
log("PID: #{project.id}: -> #{output}")
if status.zero?
true
else
log("Failed to create satellite for #{project.name_with_namespace}")
false
end
end
def exists?
File.exists? path
end
# * Locks the satellite
# * Changes the current directory to the satellite's working dir
# * Yields
def lock
project.ensure_satellite_exists
File.open(lock_file, "w+") do |f|
begin
f.flock File::LOCK_EX
yield
ensure
f.flock File::LOCK_UN
end
end
end
def lock_file
create_locks_dir unless File.exists?(lock_files_dir)
File.join(lock_files_dir, "satellite_#{project.id}.lock")
end
def path
File.join(Gitlab.config.satellites.path, project.path_with_namespace)
end
def repo
project.ensure_satellite_exists
@repo ||= Grit::Repo.new(path)
end
def destroy
FileUtils.rm_rf(path)
end
private
# Clear the working directory
def clear_working_dir!
repo.git.reset(hard: true)
repo.git.clean(f: true, d: true, x: true)
end
# Deletes all branches except the parking branch
#
# This ensures we have no name clashes or issues updating branches when
# working with the satellite.
def delete_heads!
heads = repo.heads.map(&:name)
# update or create the parking branch
repo.git.checkout(default_options({ B: true }), PARKING_BRANCH)
# remove the parking branch from the list of heads ...
heads.delete(PARKING_BRANCH)
# ... and delete all others
heads.each { |head| repo.git.branch(default_options({ D: true }), head) }
end
# Deletes all remotes except origin
#
# This ensures we have no remote name clashes or issues updating branches when
# working with the satellite.
def remove_remotes!
remotes = repo.git.remote.split(' ')
remotes.delete('origin')
remotes.each { |name| repo.git.remote(default_options,'rm', name)}
end
# Updates the satellite from bare repo
#
# Note: this will only update remote branches (i.e. origin/*)
def update_from_source!
repo.git.remote(default_options, 'set-url', :origin, project.repository.path_to_repo)
repo.git.fetch(default_options, :origin)
end
def default_options(options = {})
{ raise: true, timeout: true }.merge(options)
end
# Create directory for storing
# satellites lock files
def create_locks_dir
FileUtils.mkdir_p(lock_files_dir)
end
def lock_files_dir
@lock_files_dir ||= File.join(Gitlab.config.satellites.path, "tmp")
end
end
end
end
|
module GnuplotRB
##
# This module contains methods that should be mixed into
# plottable classes. It includes OptionHandling and
# implements several plotting methods.
module Plottable
include OptionHandling
##
# @private
# You should implement #plot in classes that are Plottable
def plot(*_)
fail NotImplementedError, 'You should implement #plot in classes that are Plottable!'
end
##
# In this gem #method_missing is used both to handle
# options and to handle plotting to specific terminal.
#
# == Options handling
# === Overview
# You may set options using #option_name(option_value) method.
# A new object will be constructed with selected option set.
# And finally you can get current value of any option using
# #options_name without arguments.
# === Arguments
# * *option_value* - value to set an option. If none given
# method will just return current option's value
# === Examples
# plot = Splot.new
# new_plot = plot.title('Awesome plot')
# plot.title #=> nil
# new_plot.title #=> 'Awesome plot'
#
# == Plotting to specific term
# === Overview
# Gnuplot offers possibility to output graphics to many image formats.
# The easiest way to to so is to use #to_<plot_name> methods.
# === Arguments
# * *options* - set of options related to terminal (size, font etc).
# Be careful, some terminals have their own specific options.
# === Examples
# # font options specific for png term
# multiplot.to_png('./result.png', size: [300, 500], font: ['arial', 12])
# # font options specific for svg term
# content = multiplot.to_svg(size: [100, 100], fname: 'Arial', fsize: 12)
def method_missing(meth_id, *args)
meth = meth_id.id2name
case
when meth[0..2] == 'to_'
term = meth[3..-1]
super unless OptionHandling.valid_terminal?(term)
to_specific_term(term, *args)
when meth[-1] == '!'
option!(meth[0..-2].to_sym, *args)
when meth[-1] == '='
option!(meth[0..-2].to_sym, *args)
option(meth[0..-2].to_sym)
else
option(meth_id, *args)
end
end
##
# @return [true] for existing methods and
# #to_|term_name| when name is a valid terminal type.
# @return [false] otherwise
def respond_to?(meth_id)
# Next line is here to force iRuby use #to_iruby
# instead of #to_svg.
return super if defined? IRuby
meth = meth_id.id2name
term = meth[0..2] == 'to_' && OptionHandling.valid_terminal?(meth[3..-1])
term || super
end
##
# This method is used to embed plottable objects into iRuby notebooks. There is
# {a notebook}[http://nbviewer.ipython.org/github/dilcom/gnuplotrb/blob/master/notebooks/basic_usage.ipynb]
# with examples of its usage.
def to_iruby
available_terminals = {
'png' => 'image/png',
'pngcairo' => 'image/png',
'jpeg' => 'image/jpeg',
'svg' => 'image/svg+xml',
'dumb' => 'text/plain'
}
terminal, options = term.is_a?(Array) ? [term[0], term[1]] : [term, {}]
terminal = 'svg' unless available_terminals.keys.include?(terminal)
[available_terminals[terminal], send("to_#{terminal}".to_sym, **options)]
end
##
# @private
# Output plot to specific terminal (possibly some file).
# Explicit use should be avoided. This method is called from #method_missing
# when it handles method names like #to_png(options).
#
# @param trminal [String] terminal name ('png', 'svg' etc)
# @param path [String] path to output file, if none given it will output to temp file
# and then read it and return binary contents of file
# @param options [Hash] used in #plot
# @example
# ## plot here may be Plot, Splot, Multiplot or any other plottable class
# plot.to_png('./result.png', size: [300, 500])
# contents = plot.to_svg(size: [100, 100])
# plot.to_dumb('./result.txt', size: [30, 15])
def to_specific_term(terminal, path = nil, **options)
if path
result = plot(term: [terminal, options], output: path)
else
path = Dir::Tmpname.make_tmpname(terminal, 0)
plot(term: [terminal, options], output: path)
result = File.binread(path)
File.delete(path)
end
result
end
##
# @return [Terminal] terminal object linked with this Plottable object
def own_terminal
@terminal ||= Terminal.new
end
##
# @!method xrange(value = nil)
# @!method yrange(value = nil)
# @!method title(value = nil)
# @!method option_name(value = nil)
# Clone existing object and set new options value in created one or just return
# existing value if nil given.
#
# Method is handled by #method_missing.
#
# You may set options using #option_name(option_value) method.
# A new object will be constructed with selected option set.
# And finally you can get current value of any option using
# #options_name without arguments.
#
# Available options are listed in Plot, Splot, Multiplot etc class top level doc.
#
# @param value new value for option
# @return new object with option_name set to *value* if value given
# @return old option value if no value given
#
# @example
# plot = Splot.new
# new_plot = plot.title('Awesome plot')
# plot.title #=> nil
# new_plot.title #=> 'Awesome plot'
##
# @!method xrange!(value)
# @!method yrange!(value)
# @!method title!(value)
# @!method option_name!(value)
# Set value for an option.
#
# Method is handled by #method_missing.
#
# You may set options using obj.option_name!(option_value) or
# obj.option_name = option_value methods.
#
# Available options are listed in Plot, Splot, Multiplot etc class top level doc.
#
# @param value new value for option
# @return self
#
# @example
# plot = Splot.new
# plot.title #=> nil
# plot.title!('Awesome plot')
# plot.title #=> 'Awesome plot'
#
# @example
# plot = Splot.new
# plot.title #=> nil
# plot.title = 'Awesome plot'
# plot.title #=> 'Awesome plot'
##
# @!method to_png(path = nil, **options)
# @!method to_svg(path = nil, **options)
# @!method to_gif(path = nil, **options)
# @!method to_canvas(path = nil, **options)
# Output to plot to according image format.
#
# All of #to_|terminal_name| methods are handled with #method_missing.
#
# Gnuplot offers possibility to output graphics to many image formats.
# The easiest way to to so is to use #to_<plot_name> methods.
#
# @param path [String] path to save plot file to.
# @param options [Hash] specific terminal options like 'size',
# 'font' etc
#
# @return [String] contents of plotted file unless path given
# @return self if path given
#
# @example
# # font options specific for png term
# multiplot.to_png('./result.png', size: [300, 500], font: ['arial', 12])
# # font options specific for svg term
# content = multiplot.to_svg(size: [100, 100], fname: 'Arial', fsize: 12)
end
end
Avoid warning for the new respond_to? meth signature
module GnuplotRB
##
# This module contains methods that should be mixed into
# plottable classes. It includes OptionHandling and
# implements several plotting methods.
module Plottable
include OptionHandling
##
# @private
# You should implement #plot in classes that are Plottable
def plot(*_)
fail NotImplementedError, 'You should implement #plot in classes that are Plottable!'
end
##
# In this gem #method_missing is used both to handle
# options and to handle plotting to specific terminal.
#
# == Options handling
# === Overview
# You may set options using #option_name(option_value) method.
# A new object will be constructed with selected option set.
# And finally you can get current value of any option using
# #options_name without arguments.
# === Arguments
# * *option_value* - value to set an option. If none given
# method will just return current option's value
# === Examples
# plot = Splot.new
# new_plot = plot.title('Awesome plot')
# plot.title #=> nil
# new_plot.title #=> 'Awesome plot'
#
# == Plotting to specific term
# === Overview
# Gnuplot offers possibility to output graphics to many image formats.
# The easiest way to to so is to use #to_<plot_name> methods.
# === Arguments
# * *options* - set of options related to terminal (size, font etc).
# Be careful, some terminals have their own specific options.
# === Examples
# # font options specific for png term
# multiplot.to_png('./result.png', size: [300, 500], font: ['arial', 12])
# # font options specific for svg term
# content = multiplot.to_svg(size: [100, 100], fname: 'Arial', fsize: 12)
def method_missing(meth_id, *args)
meth = meth_id.id2name
case
when meth[0..2] == 'to_'
term = meth[3..-1]
super unless OptionHandling.valid_terminal?(term)
to_specific_term(term, *args)
when meth[-1] == '!'
option!(meth[0..-2].to_sym, *args)
when meth[-1] == '='
option!(meth[0..-2].to_sym, *args)
option(meth[0..-2].to_sym)
else
option(meth_id, *args)
end
end
##
# @return [true] for existing methods and
# #to_|term_name| when name is a valid terminal type.
# @return [false] otherwise
def respond_to?(meth_id, include_all=false)
# Next line is here to force iRuby use #to_iruby
# instead of #to_svg.
return super if defined? IRuby
meth = meth_id.id2name
term = meth[0..2] == 'to_' && OptionHandling.valid_terminal?(meth[3..-1])
term || super
end
##
# This method is used to embed plottable objects into iRuby notebooks. There is
# {a notebook}[http://nbviewer.ipython.org/github/dilcom/gnuplotrb/blob/master/notebooks/basic_usage.ipynb]
# with examples of its usage.
def to_iruby
available_terminals = {
'png' => 'image/png',
'pngcairo' => 'image/png',
'jpeg' => 'image/jpeg',
'svg' => 'image/svg+xml',
'dumb' => 'text/plain'
}
terminal, options = term.is_a?(Array) ? [term[0], term[1]] : [term, {}]
terminal = 'svg' unless available_terminals.keys.include?(terminal)
[available_terminals[terminal], send("to_#{terminal}".to_sym, **options)]
end
##
# @private
# Output plot to specific terminal (possibly some file).
# Explicit use should be avoided. This method is called from #method_missing
# when it handles method names like #to_png(options).
#
# @param trminal [String] terminal name ('png', 'svg' etc)
# @param path [String] path to output file, if none given it will output to temp file
# and then read it and return binary contents of file
# @param options [Hash] used in #plot
# @example
# ## plot here may be Plot, Splot, Multiplot or any other plottable class
# plot.to_png('./result.png', size: [300, 500])
# contents = plot.to_svg(size: [100, 100])
# plot.to_dumb('./result.txt', size: [30, 15])
def to_specific_term(terminal, path = nil, **options)
if path
result = plot(term: [terminal, options], output: path)
else
path = Dir::Tmpname.make_tmpname(terminal, 0)
plot(term: [terminal, options], output: path)
result = File.binread(path)
File.delete(path)
end
result
end
##
# @return [Terminal] terminal object linked with this Plottable object
def own_terminal
@terminal ||= Terminal.new
end
##
# @!method xrange(value = nil)
# @!method yrange(value = nil)
# @!method title(value = nil)
# @!method option_name(value = nil)
# Clone existing object and set new options value in created one or just return
# existing value if nil given.
#
# Method is handled by #method_missing.
#
# You may set options using #option_name(option_value) method.
# A new object will be constructed with selected option set.
# And finally you can get current value of any option using
# #options_name without arguments.
#
# Available options are listed in Plot, Splot, Multiplot etc class top level doc.
#
# @param value new value for option
# @return new object with option_name set to *value* if value given
# @return old option value if no value given
#
# @example
# plot = Splot.new
# new_plot = plot.title('Awesome plot')
# plot.title #=> nil
# new_plot.title #=> 'Awesome plot'
##
# @!method xrange!(value)
# @!method yrange!(value)
# @!method title!(value)
# @!method option_name!(value)
# Set value for an option.
#
# Method is handled by #method_missing.
#
# You may set options using obj.option_name!(option_value) or
# obj.option_name = option_value methods.
#
# Available options are listed in Plot, Splot, Multiplot etc class top level doc.
#
# @param value new value for option
# @return self
#
# @example
# plot = Splot.new
# plot.title #=> nil
# plot.title!('Awesome plot')
# plot.title #=> 'Awesome plot'
#
# @example
# plot = Splot.new
# plot.title #=> nil
# plot.title = 'Awesome plot'
# plot.title #=> 'Awesome plot'
##
# @!method to_png(path = nil, **options)
# @!method to_svg(path = nil, **options)
# @!method to_gif(path = nil, **options)
# @!method to_canvas(path = nil, **options)
# Output to plot to according image format.
#
# All of #to_|terminal_name| methods are handled with #method_missing.
#
# Gnuplot offers possibility to output graphics to many image formats.
# The easiest way to to so is to use #to_<plot_name> methods.
#
# @param path [String] path to save plot file to.
# @param options [Hash] specific terminal options like 'size',
# 'font' etc
#
# @return [String] contents of plotted file unless path given
# @return self if path given
#
# @example
# # font options specific for png term
# multiplot.to_png('./result.png', size: [300, 500], font: ['arial', 12])
# # font options specific for svg term
# content = multiplot.to_svg(size: [100, 100], fname: 'Arial', fsize: 12)
end
end
|
require 'i18n/backend/base'
require 'i18n/backend/active_record/translation'
module I18n
module Backend
class ActiveRecord < Base
def reload!
end
def store_translations(locale, data)
separator = I18n.default_separator # TODO allow to pass as an option?
wind_keys(data).each do |key, v|
Translation.locale(locale).lookup(expand_keys(key, separator), separator).delete_all
Translation.create(:locale => locale.to_s, :key => key, :value => v)
end
end
def available_locales
begin
Translation.available_locales
rescue ::ActiveRecord::StatementInvalid
[]
end
end
protected
def lookup(locale, key, scope = [], separator = nil)
return unless key
separator ||= I18n.default_separator
key = (Array(scope) + Array(key)).join(separator)
result = Translation.locale(locale).lookup(key, separator).all
if result.empty?
return nil
elsif result.first.key == key
return result.first.value
else
chop_range = (key.size + separator.size)..-1
result = result.inject({}) do |hash, r|
hash[r.key.slice(chop_range)] = r.value
hash
end
deep_symbolize_keys(unwind_keys(result))
end
end
# For a key :'foo.bar.baz' return ['foo', 'foo.bar', 'foo.bar.baz']
def expand_keys(key, separator = I18n.default_separator)
key.to_s.split(separator).inject([]) do |keys, key|
keys << [keys.last, key].compact.join(separator)
end
end
end
end
end
remove whitespace
require 'i18n/backend/base'
require 'i18n/backend/active_record/translation'
module I18n
module Backend
class ActiveRecord < Base
def reload!
end
def store_translations(locale, data)
separator = I18n.default_separator # TODO allow to pass as an option?
wind_keys(data).each do |key, v|
Translation.locale(locale).lookup(expand_keys(key, separator), separator).delete_all
Translation.create(:locale => locale.to_s, :key => key, :value => v)
end
end
def available_locales
begin
Translation.available_locales
rescue ::ActiveRecord::StatementInvalid
[]
end
end
protected
def lookup(locale, key, scope = [], separator = nil)
return unless key
separator ||= I18n.default_separator
key = (Array(scope) + Array(key)).join(separator)
result = Translation.locale(locale).lookup(key, separator).all
if result.empty?
return nil
elsif result.first.key == key
return result.first.value
else
chop_range = (key.size + separator.size)..-1
result = result.inject({}) do |hash, r|
hash[r.key.slice(chop_range)] = r.value
hash
end
deep_symbolize_keys(unwind_keys(result))
end
end
# For a key :'foo.bar.baz' return ['foo', 'foo.bar', 'foo.bar.baz']
def expand_keys(key, separator = I18n.default_separator)
key.to_s.split(separator).inject([]) do |keys, key|
keys << [keys.last, key].compact.join(separator)
end
end
end
end
end
|
module I18nRailsHelpers
VERSION = "1.4.4"
end
Bump to 1.4.5
module I18nRailsHelpers
VERSION = "1.4.5"
end
|
module Inesita
class AppFilesListener
include Singleton
CURRENT_DIR = Dir.pwd
def initialize
@websockets = []
listener = Listen.to(Config::APP_DIR) do |modified, added, _removed|
(modified + added).each do |file|
@websockets.each do |ws|
ws.send transform_filename(file)
end
end
end
listener.start
end
def add_ws(ws)
@websockets << ws
end
def rm_ws(ws)
@websockets.delete(ws)
end
def transform_filename(filename)
filename.sub!(CURRENT_DIR, '')
path = filename.split('/')
path.delete('')
path.delete(Config::APP_DIR)
path = path.join('/').split('.')
prefix = Config::ASSETS_PREFIX
name = path.first
ext = if path.include?('rb') || path.include?('js')
'js'
elsif path.include?('sass') || path.include?('css')
'css'
end
"#{prefix}|#{name}|#{ext}"
end
end
end
fix stylesheer livereload
module Inesita
class AppFilesListener
include Singleton
CURRENT_DIR = Dir.pwd
def initialize
@websockets = []
listener = Listen.to(Config::APP_DIR) do |modified, added, _removed|
(modified + added).each do |file|
@websockets.each do |ws|
ws.send transform_filename(file)
end
end
end
listener.start
end
def add_ws(ws)
@websockets << ws
end
def rm_ws(ws)
@websockets.delete(ws)
end
def transform_filename(filename)
filename.sub!(CURRENT_DIR, '')
path = filename.split('/')
path.delete('')
path.delete(Config::APP_DIR)
path = path.join('/').split('.')
prefix = Config::ASSETS_PREFIX
name = path.first
if path.include?('rb') || path.include?('js')
"#{prefix}|#{name}|js"
elsif path.include?('sass') || path.include?('css')
"#{prefix}|stylesheet|css"
end
end
end
end
|
#
# Copyright (c) 2012 Kannan Manickam <arangamani.kannan@gmail.com>
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
# THE SOFTWARE.
#
module JenkinsApi
class Client
VERSION = "0.2.1"
end
end
bumped version to 0.3.0
#
# Copyright (c) 2012 Kannan Manickam <arangamani.kannan@gmail.com>
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
# THE SOFTWARE.
#
module JenkinsApi
class Client
MAJOR = 0
MINOR = 3
TINY = 0
PRE = nil
VERSION = [MAJOR, MINOR, TINY, PRE].compact.join('.')
end
end
|
module JquerySlickRails
VERSION = "1.5.8.1"
end
Version 1.5.8.2. Looks like we have to stick with the original eot font file to avoid issues in IE
module JquerySlickRails
VERSION = "1.5.8.2"
end
|
require_relative "./data_structures/string"
require_relative "./data_structures/number"
module JsonTestData
class JsonSchema
attr_accessor :schema
def initialize(schema)
@schema = JSON.parse(schema, symbolize_names: true)
end
def generate_example
generate(schema).to_json
end
private
def generate_data(obj)
case obj.fetch(:type)
when "number"
JsonTestData::Number.create(obj)
when "integer"
JsonTestData::Number.create(obj)
when "boolean"
true
when "string"
JsonTestData::String.create(obj)
end
end
def generate(obj)
if is_object?(obj)
generate_object(obj)
elsif is_array?(obj)
generate_array(obj)
else
generate_data(obj)
end
end
def is_object?(property)
property.fetch(:type, nil) == "object"
end
def is_array?(property)
property.fetch(:type, nil) == "array"
end
def generate_object(object)
obj = {}
if object.has_key?(:properties)
object.fetch(:properties).each do |k, v|
obj[k] = nil unless v.has_key?(:type)
obj[k] = generate(v)
end
end
if object.has_key?(:minProperties) && obj.length < object.fetch(:minProperties, 0)
(object.fetch(:minProperties) - obj.length).times do
key = JsonTestData::String.create({type: "string"})
obj[key] = nil
end
end
obj.size == 0 ? {} : obj
end
def generate_array(object)
object.fetch(:minItems, nil) || object[:minItems] = 1
if object.fetch(:items, nil) && object.fetch(:items).has_key?(:type)
Array.new(object.fetch(:minItems)).map {|item| generate(object.fetch(:items)) }
else
Array.new(object.fetch(:minItems))
end
end
end
end
Delete unused block argument
require_relative "./data_structures/string"
require_relative "./data_structures/number"
module JsonTestData
class JsonSchema
attr_accessor :schema
def initialize(schema)
@schema = JSON.parse(schema, symbolize_names: true)
end
def generate_example
generate(schema).to_json
end
private
def generate_data(obj)
case obj.fetch(:type)
when "number"
JsonTestData::Number.create(obj)
when "integer"
JsonTestData::Number.create(obj)
when "boolean"
true
when "string"
JsonTestData::String.create(obj)
end
end
def generate(obj)
if is_object?(obj)
generate_object(obj)
elsif is_array?(obj)
generate_array(obj)
else
generate_data(obj)
end
end
def is_object?(property)
property.fetch(:type, nil) == "object"
end
def is_array?(property)
property.fetch(:type, nil) == "array"
end
def generate_object(object)
obj = {}
if object.has_key?(:properties)
object.fetch(:properties).each do |k, v|
obj[k] = nil unless v.has_key?(:type)
obj[k] = generate(v)
end
end
if object.has_key?(:minProperties) && obj.length < object.fetch(:minProperties, 0)
(object.fetch(:minProperties) - obj.length).times do
key = JsonTestData::String.create({type: "string"})
obj[key] = nil
end
end
obj.size == 0 ? {} : obj
end
def generate_array(object)
object.fetch(:minItems, nil) || object[:minItems] = 1
if object.fetch(:items, nil) && object.fetch(:items).has_key?(:type)
Array.new(object.fetch(:minItems)).map { generate(object.fetch(:items)) }
else
Array.new(object.fetch(:minItems))
end
end
end
end
|
module JsonapiCompliable
VERSION = "0.6.3"
end
bump version
module JsonapiCompliable
VERSION = "0.6.4"
end
|
module Lambra
class CodeLoader
def self.evaluate(string)
ast = Lambra::Parser.parse string
execute(ast)
end
def self.execute(ast)
visitor = BytecodeCompiler.new
gen = visitor.compile(ast)
gen.encode
cm = gen.package Rubinius::CompiledMethod
require_relative '../bootstrap'
env = GlobalScope
file = if ast.respond_to?(:filename) && ast.filename
ast.filename
else
'(eval)'
end
line, binding, instance = ast.line, env.send(:binding), env
# cm = Noscript::Compiler.compile_eval(code, binding.variables, file, line)
cm.scope = Rubinius::StaticScope.new(GlobalScope)
cm.name = :__lambra__
script = Rubinius::CompiledMethod::Script.new(cm, file, true)
be = Rubinius::BlockEnvironment.new
script.eval_binding = binding
# script.eval_source = string
cm.scope.script = script
be.under_context(binding.variables, cm)
be.from_eval!
be.call_on_instance(instance)
end
def self.execute_file(name)
ast = Lambra::Parser.parse IO.read(name)
ast.define_singleton_method(:filename) { name }
execute(ast)
end
end
end
Make it work with rbx-head
module Lambra
class CodeLoader
def self.evaluate(string)
ast = Lambra::Parser.parse string
execute(ast)
end
def self.execute(ast)
visitor = BytecodeCompiler.new
gen = visitor.compile(ast)
gen.encode
cm = gen.package Rubinius::CompiledMethod
require_relative '../bootstrap'
env = GlobalScope
file = if ast.respond_to?(:filename) && ast.filename
ast.filename
else
'(eval)'
end
line, binding, instance = ast.line, env.send(:binding), env
# cm = Noscript::Compiler.compile_eval(code, binding.variables, file, line)
cm.scope = Rubinius::StaticScope.new(GlobalScope)
cm.name = :__lambra__
script = Rubinius::CompiledMethod::Script.new(cm, file, true)
be = Rubinius::BlockEnvironment.new
cm.scope.script = script
be.under_context(binding.variables, cm)
be.call_on_instance(instance)
end
def self.execute_file(name)
ast = Lambra::Parser.parse IO.read(name)
ast.define_singleton_method(:filename) { name }
execute(ast)
end
end
end
|
module LLT
class Diff::Parser
module Reportable
include HashContainable
attr_reader :id, :total, :right, :wrong, :unique
def initialize(id, total = 1)
super(id)
@total = total
end
def init_diff
@wrong = 0
@unique = 0
each { |_, el| el.init_diff }
end
def add(element)
if el = @container[element.id]
el.add_total(element)
element.container.each do |_, nested_el|
el.add(nested_el)
end
else
@container[element.id] = element
end
end
def add_total(element)
@total += element.total
end
def increment
@total += 1
end
def xml_tag
self.class.name.scan(/::(\w+)$/)[0].first.downcase
end
def xml_attributes
{ name: @id, total: @total, right: @right, wrong: @wrong, unique: @unique }
end
def sort
Hash[
@container.sort do |(a_id, a_r), (b_id, b_r)|
comp = b_r.total <=> a_r.total
comp.zero? ? a_id <=> b_id : comp
end
]
end
def sort!
each { |_, el| el.sort! }
@container = sort
end
# This could be implemented with a block as well (which holds
# whatever code needs to be performed on the cloned instance,
# but probably not a good idea as this called very often - make
# it as lean as possibe.
def clone
cloned = super
cloned.replace_with_clone(:container)
cloned
end
end
end
end
Add Reportable#add_wrong
module LLT
class Diff::Parser
module Reportable
include HashContainable
attr_reader :id, :total, :right, :wrong, :unique
def initialize(id, total = 1)
super(id)
@total = total
end
def init_diff
@wrong = 0
@unique = 0
each { |_, el| el.init_diff }
end
def add(element)
if el = @container[element.id]
el.add_total(element)
element.container.each do |_, nested_el|
el.add(nested_el)
end
else
@container[element.id] = element
end
end
def add_total(element)
@total += element.total
end
def add_wrong(unique = nil)
@wrong += 1
@unique += 1 if unique
end
def increment
@total += 1
end
def xml_tag
self.class.name.scan(/::(\w+)$/)[0].first.downcase
end
def xml_attributes
{ name: @id, total: @total, right: @right, wrong: @wrong, unique: @unique }
end
def sort
Hash[
@container.sort do |(a_id, a_r), (b_id, b_r)|
comp = b_r.total <=> a_r.total
comp.zero? ? a_id <=> b_id : comp
end
]
end
def sort!
each { |_, el| el.sort! }
@container = sort
end
# This could be implemented with a block as well (which holds
# whatever code needs to be performed on the cloned instance,
# but probably not a good idea as this called very often - make
# it as lean as possibe.
def clone
cloned = super
cloned.replace_with_clone(:container)
cloned
end
end
end
end
|
require "logstash/inputs/base"
require "logstash/namespace"
# Receive events using the lumberjack protocol.
#
# This is mainly to receive events shipped with lumberjack,
# <http://github.com/jordansissel/lumberjack>
class LogStash::Inputs::Lumberjack < LogStash::Inputs::Base
config_name "lumberjack"
plugin_status "experimental"
# the address to listen on.
config :host, :validate => :string, :default => "0.0.0.0"
# the port to listen on.
config :port, :validate => :number, :required => true
# ssl certificate to use
config :ssl_certificate, :validate => :string, :required => true
# ssl key to use
config :ssl_key, :validate => :string, :required => true
# ssl key passphrase to use
config :ssl_key_passphrase, :validate => :password
# TODO(sissel): Add CA to authenticate clients with.
public
def register
require "lumberjack/server"
@logger.info("Starting lumberjack input listener", :address => "#{@host}:#{@port}")
@lumberjack = Lumberjack::Server.new(:address => @host, :port => @port,
:ssl_certificate => @ssl_certificate, :ssl_key => @ssl_key,
:ssl_key_passphrase => @ssl_key_passphrase)
end # def register
public
def run(output_queue)
@lumberjack.run do |l|
source = "lumberjack://#{l.delete("host")}/#{l.delete("file")}".force_encoding("UTF-8")
event = to_event(l.delete("line").force_encoding("UTF-8"), source)
# take any remaining fields in the lumberjack event and merge it as a
# field in the logstash event.
l.each do |key, value|
event[key.force_encoding("UTF-8")] = value.force_encoding("UTF-8")
end
output_queue << event
end
end # def run
end # class LogStash::Inputs::Lumberjack
- don't force encoding, lumberback does that now internally.
require "logstash/inputs/base"
require "logstash/namespace"
# Receive events using the lumberjack protocol.
#
# This is mainly to receive events shipped with lumberjack,
# <http://github.com/jordansissel/lumberjack>
class LogStash::Inputs::Lumberjack < LogStash::Inputs::Base
config_name "lumberjack"
plugin_status "experimental"
# the address to listen on.
config :host, :validate => :string, :default => "0.0.0.0"
# the port to listen on.
config :port, :validate => :number, :required => true
# ssl certificate to use
config :ssl_certificate, :validate => :string, :required => true
# ssl key to use
config :ssl_key, :validate => :string, :required => true
# ssl key passphrase to use
config :ssl_key_passphrase, :validate => :password
# TODO(sissel): Add CA to authenticate clients with.
public
def register
require "lumberjack/server"
@logger.info("Starting lumberjack input listener", :address => "#{@host}:#{@port}")
@lumberjack = Lumberjack::Server.new(:address => @host, :port => @port,
:ssl_certificate => @ssl_certificate, :ssl_key => @ssl_key,
:ssl_key_passphrase => @ssl_key_passphrase)
end # def register
public
def run(output_queue)
@lumberjack.run do |l|
source = "lumberjack://#{l.delete("host")}/#{l.delete("file")}"
event = to_event(l.delete("line"), source)
# take any remaining fields in the lumberjack event and merge it as a
# field in the logstash event.
l.each do |key, value|
event[key] = value
end
output_queue << event
end
end # def run
end # class LogStash::Inputs::Lumberjack
|
class Document
include Mongoid::Document
include Mongoid::Timestamps
include Mongoid::Pagination
include Finder
field :title, type: String
field :category, type: String
field :published_at, type: Date
field :description, type: String
field :original_title, type: String
field :original_filename, type: String
field :information, type: Hash
field :fontspecs, type: Hash, default: {}
field :last_analysis_at, type: Time
field :processed_text, type: String
field :state, type: Symbol, default: :waiting
field :public, type: Boolean, default: false
field :percentage, type: Integer, default: 0
field :file_id, type: Moped::BSON::ObjectId
field :thumbnail_file_id, type: Moped::BSON::ObjectId
belongs_to :project
has_many :pages
has_many :fact_registers
has_many :named_entities
has_and_belongs_to_many :people, index: true
validates_presence_of :file_id
validates_presence_of :original_filename
before_save :set_default_title
after_create :enqueue_process
after_destroy :destroy_gridfs_files
scope :public, -> { where(public: true) }
include Tire::Model::Search
include Tire::Model::Callbacks
tire do
mapping do
indexes :title, analyzer: "snowball", boost: 100
indexes :original_title, analyzer: "snowball", boost: 90
indexes :pages, analyzer: "snowball"
end
end
def to_indexed_json
fields = {
title: title,
original_title: original_title,
pages: {},
}
pages.each do |page|
fields[:pages][page.num] = page.text.gsub(/<[^<]+?>/, "")
end
fields.to_json
end
def file
if file_id
Mongoid::GridFS.namespace_for(:documents).get(file_id)
end
end
def file=(file_or_path)
fs = Mongoid::GridFS.namespace_for(:documents).put(file_or_path)
self.file_id = fs.id
fs
end
def thumbnail_file
if thumbnail_file_id
Mongoid::GridFS.namespace_for(:thumbnails).get(thumbnail_file_id)
end
end
def thumbnail_file=(file_or_path)
fs = Mongoid::GridFS.namespace_for(:thumbnails).put(file_or_path)
self.thumbnail_file_id = fs.id
fs
end
def readable?
true
end
def geocoded?
true
end
def exportable?
true
end
def processed?
true
end
def completed?
percentage == 100
end
protected
def set_default_title
if self.title.blank?
self.title = self.original_filename
end
end
def enqueue_process
logger.info "Enqueue processing task for document with id #{id}"
Resque.enqueue(DocumentProcessBootstrapTask, id)
end
def destroy_gridfs_files
file.destroy
thumbnail_file.destroy
end
end
document: Only destroy GridFS files if they exist
class Document
include Mongoid::Document
include Mongoid::Timestamps
include Mongoid::Pagination
include Finder
field :title, type: String
field :category, type: String
field :published_at, type: Date
field :description, type: String
field :original_title, type: String
field :original_filename, type: String
field :information, type: Hash
field :fontspecs, type: Hash, default: {}
field :last_analysis_at, type: Time
field :processed_text, type: String
field :state, type: Symbol, default: :waiting
field :public, type: Boolean, default: false
field :percentage, type: Integer, default: 0
field :file_id, type: Moped::BSON::ObjectId
field :thumbnail_file_id, type: Moped::BSON::ObjectId
belongs_to :project
has_many :pages
has_many :fact_registers
has_many :named_entities
has_and_belongs_to_many :people, index: true
validates_presence_of :file_id
validates_presence_of :original_filename
before_save :set_default_title
after_create :enqueue_process
after_destroy :destroy_gridfs_files
scope :public, -> { where(public: true) }
include Tire::Model::Search
include Tire::Model::Callbacks
tire do
mapping do
indexes :title, analyzer: "snowball", boost: 100
indexes :original_title, analyzer: "snowball", boost: 90
indexes :pages, analyzer: "snowball"
end
end
def to_indexed_json
fields = {
title: title,
original_title: original_title,
pages: {},
}
pages.each do |page|
fields[:pages][page.num] = page.text.gsub(/<[^<]+?>/, "")
end
fields.to_json
end
def file
if file_id
Mongoid::GridFS.namespace_for(:documents).get(file_id)
end
end
def file=(file_or_path)
fs = Mongoid::GridFS.namespace_for(:documents).put(file_or_path)
self.file_id = fs.id
fs
end
def thumbnail_file
if thumbnail_file_id
Mongoid::GridFS.namespace_for(:thumbnails).get(thumbnail_file_id)
end
end
def thumbnail_file=(file_or_path)
fs = Mongoid::GridFS.namespace_for(:thumbnails).put(file_or_path)
self.thumbnail_file_id = fs.id
fs
end
def readable?
true
end
def geocoded?
true
end
def exportable?
true
end
def processed?
true
end
def completed?
percentage == 100
end
protected
def set_default_title
if self.title.blank?
self.title = self.original_filename
end
end
def enqueue_process
logger.info "Enqueue processing task for document with id #{id}"
Resque.enqueue(DocumentProcessBootstrapTask, id)
end
def destroy_gridfs_files
file.destroy if file
thumbnail_file.destroy if thumbnail_file
end
end
|
use gem/rip versions of deps instead of vendoring
module Sprockets
class SourceFile
def source_lines
@lines ||= begin
lines = []
comments = []
File.open(pathname.absolute_location, 'rb') do |file|
file.each do |line|
lines << line = SourceLine.new(self, line, file.lineno)
if line.begins_pdoc_comment? || comments.any?
comments << line
end
if line.ends_multiline_comment?
if line.ends_pdoc_comment?
comments.each { |l| l.comment! }
end
comments.clear
end
end
end
lines
end
end
end
end |
module MiqToolsServices
class MiniGit
include ServiceMixin
# All MiniGit methods return stdout which always has a trailing newline
# that is never wanted, so remove it always.
def delegate_to_service(method_name, *args)
super.chomp
end
attr_reader :path_to_repo
def initialize(path_to_repo)
@path_to_repo = path_to_repo
service # initialize the service
end
def service
@service ||= begin
require 'minigit'
::MiniGit.debug = true
::MiniGit::Capturing.new(File.expand_path(path_to_repo))
end
end
def bugzilla_ids(ref)
Bugzilla.ids_in_git_commit_message(commit_message(ref))
end
def temporarily_checkout(ref)
ref = ref_name(ref)
orig_ref = current_branch
checkout(ref) unless ref == orig_ref
yield
ensure
checkout(orig_ref) unless ref == orig_ref
end
def new_commits(since_commit, ref = "HEAD")
rev_list({:reverse => true}, "#{since_commit}..#{ref}").split("\n")
end
def commit_message(commit)
show({:pretty => "fuller"}, "--stat", "--summary", commit)
end
def ref_name(ref)
name = rev_parse("--abbrev-ref", ref)
name.empty? ? ref : name
end
def author_name(ref)
log("-1", "--format=\"%an\"", ref)
end
def author_email(ref)
log("-1", "--format=\"%ae\"", ref)
end
def subject(ref)
log("-1", "--format=\"%s\"", ref)
end
def current_branch
ref = ref_name("HEAD")
ref == "HEAD" ? current_ref : ref
end
def current_ref
rev_parse("HEAD")
end
def branches
branch.split("\n").collect do |b|
b = b[1..-1] if b.start_with?("*")
b.strip
end
end
def destroy_branch(branch_name)
branch("-D", branch_name)
end
def diff_details(commit1, commit2 = nil)
if commit2.nil?
commit2 = commit1
commit1 = "#{commit1}~"
end
output = diff("--patience", "-U0", "--no-color", "#{commit1}...#{commit2}")
ret = Hash.new { |h, k| h[k] = [] }
path = line_number = nil
output.lines.each_with_object(ret) do |line, h|
case line
when /^--- (?:a\/)?/
next
when /^\+\+\+ (?:b\/)?(.+)/
path = $1.chomp
when /^@@ -\d+(?:,\d+)? \+(\d+)(?:,\d+)? @@/
line_number = $1.to_i
when /^([ +-])/
if $1 != "-"
h[path] << line_number
line_number += 1
end
end
end
end
def diff_file_names(commit1, commit2 = nil)
if commit2.nil?
commit2 = commit1
commit1 = "#{commit1}~"
end
diff("--name-only", "#{commit1}...#{commit2}").split
end
#
# Pull Request specific methods
#
def self.pr_branch(pr_number)
"pr/#{pr_number}"
end
delegate :pr_branch, :to => :class
def self.pr_number(branch)
branch.split("/").last.to_i
end
delegate :pr_number, :to => :class
def self.pr_branch?(branch)
branch =~ %r{^pr/\d+$}
end
def pr_branch?(branch = nil)
branch ||= current_branch
self.class.pr_branch?(branch)
end
def mergeable?(branch = nil, into_branch = "master")
branch ||= current_branch
temporarily_checkout(into_branch) do
begin
merge("--no-commit", "--no-ff", branch)
return true
rescue ::MiniGit::GitError
return false
ensure
merge("--abort")
end
end
end
def update_pr_branch(branch = nil, remote = "upstream")
create_or_update_pr_branch(branch || current_branch, remote)
end
def create_pr_branch(branch, remote = "upstream")
create_or_update_pr_branch(branch, remote)
end
private
def create_or_update_pr_branch(branch, remote)
fetch("-fu", remote, "refs/pull/#{pr_number(branch)}/head:#{branch}")
reset("--hard")
end
end
end
Read the difference between two commits one line at a time.
For large changes between two commits, reading the whole difference
into memory is expensive.
module MiqToolsServices
class MiniGit
include ServiceMixin
# All MiniGit methods return stdout which always has a trailing newline
# that is never wanted, so remove it always.
def delegate_to_service(method_name, *args)
super.chomp
end
attr_reader :path_to_repo
def initialize(path_to_repo)
@path_to_repo = path_to_repo
service # initialize the service
end
def service
@service ||= begin
require 'minigit'
::MiniGit.debug = true
::MiniGit::Capturing.new(File.expand_path(path_to_repo))
end
end
def bugzilla_ids(ref)
Bugzilla.ids_in_git_commit_message(commit_message(ref))
end
def temporarily_checkout(ref)
ref = ref_name(ref)
orig_ref = current_branch
checkout(ref) unless ref == orig_ref
yield
ensure
checkout(orig_ref) unless ref == orig_ref
end
def new_commits(since_commit, ref = "HEAD")
rev_list({:reverse => true}, "#{since_commit}..#{ref}").split("\n")
end
def commit_message(commit)
show({:pretty => "fuller"}, "--stat", "--summary", commit)
end
def ref_name(ref)
name = rev_parse("--abbrev-ref", ref)
name.empty? ? ref : name
end
def author_name(ref)
log("-1", "--format=\"%an\"", ref)
end
def author_email(ref)
log("-1", "--format=\"%ae\"", ref)
end
def subject(ref)
log("-1", "--format=\"%s\"", ref)
end
def current_branch
ref = ref_name("HEAD")
ref == "HEAD" ? current_ref : ref
end
def current_ref
rev_parse("HEAD")
end
def branches
branch.split("\n").collect do |b|
b = b[1..-1] if b.start_with?("*")
b.strip
end
end
def destroy_branch(branch_name)
branch("-D", branch_name)
end
def diff_details(commit1, commit2 = nil)
if commit2.nil?
commit2 = commit1
commit1 = "#{commit1}~"
end
output = diff("--patience", "-U0", "--no-color", "#{commit1}...#{commit2}")
ret = Hash.new { |h, k| h[k] = [] }
path = line_number = nil
output.each_line do |line|
case line
when /^--- (?:a\/)?/
next
when /^\+\+\+ (?:b\/)?(.+)/
path = $1.chomp
when /^@@ -\d+(?:,\d+)? \+(\d+)(?:,\d+)? @@/
line_number = $1.to_i
when /^([ +-])/
if $1 != "-"
ret[path] << line_number
line_number += 1
end
end
end
ret
end
def diff_file_names(commit1, commit2 = nil)
if commit2.nil?
commit2 = commit1
commit1 = "#{commit1}~"
end
diff("--name-only", "#{commit1}...#{commit2}").split
end
#
# Pull Request specific methods
#
def self.pr_branch(pr_number)
"pr/#{pr_number}"
end
delegate :pr_branch, :to => :class
def self.pr_number(branch)
branch.split("/").last.to_i
end
delegate :pr_number, :to => :class
def self.pr_branch?(branch)
branch =~ %r{^pr/\d+$}
end
def pr_branch?(branch = nil)
branch ||= current_branch
self.class.pr_branch?(branch)
end
def mergeable?(branch = nil, into_branch = "master")
branch ||= current_branch
temporarily_checkout(into_branch) do
begin
merge("--no-commit", "--no-ff", branch)
return true
rescue ::MiniGit::GitError
return false
ensure
merge("--abort")
end
end
end
def update_pr_branch(branch = nil, remote = "upstream")
create_or_update_pr_branch(branch || current_branch, remote)
end
def create_pr_branch(branch, remote = "upstream")
create_or_update_pr_branch(branch, remote)
end
private
def create_or_update_pr_branch(branch, remote)
fetch("-fu", remote, "refs/pull/#{pr_number(branch)}/head:#{branch}")
reset("--hard")
end
end
end
|
# Define built-in column types, with default values for valid attributes
ModalFields.define do
string :limit=>255
text :limit=>nil
integer :limit=>nil
float
decimal :scale=>nil, :precision=>nil
datetime
time
date
binary :limit=>nil
boolean
end
ModalFields.alias :timestamp=>:datetime
Add timestamp to the predefined field types
# Define built-in column types, with default values for valid attributes
ModalFields.define do
string :limit=>255
text :limit=>nil
integer :limit=>nil
float
decimal :scale=>nil, :precision=>nil
datetime
time
date
binary :limit=>nil
boolean
timestamp
end
ModalFields.alias :timestamp=>:datetime
|
require "scan_info_search_index"
module Groonga
class ScanInfoData
attr_accessor :start
attr_accessor :end
attr_accessor :op
attr_accessor :logical_op
attr_accessor :query
attr_accessor :args
attr_accessor :search_indexes
attr_accessor :flags
attr_accessor :max_interval
attr_accessor :similarity_threshold
attr_accessor :quorum_threshold
attr_accessor :start_position
attr_accessor :weight
def initialize(start)
@start = start
@end = start
@op = Operator::NOP
@logical_op = Operator::OR
@query = nil
@args = []
@search_indexes = []
@flags = ScanInfo::Flags::PUSH
@max_interval = nil
@similarity_threshold = nil
@quorum_threshold = nil
@start_position = nil
@weight = 0
end
def match_resolve_index
if near_search?
match_near_resolve_index
elsif similar_search?
match_similar_resolve_index
elsif quorum_match?
match_quorum_resolve_index
else
match_generic_resolve_index
end
end
def call_relational_resolve_indexes
procedure, *args = *@args
return unless procedure.selector?
selector_op = procedure.selector_operator
args.each do |arg|
call_relational_resolve_index(arg, selector_op)
end
end
private
def near_search?
(@op == Operator::NEAR or @op == Operator::NEAR2) and @args.size == 3
end
def match_near_resolve_index
arg = @args[0]
case arg
when Expression
match_resolve_index_expression(arg)
when Accessor
match_resolve_index_accessor(arg)
when Indexable
match_resolve_index_indexable(arg)
else
message =
"The first argument of NEAR/NEAR2 must be Expression, Accessor or Indexable: #{arg.class}"
raise ErrorMessage, message
end
self.query = @args[1]
self.max_interval = @args[2].value
end
def similar_search?
@op == Operator::SIMILAR and @args.size == 3
end
def match_similar_resolve_index
arg = @args[0]
case arg
when Expression
match_resolve_index_expression(arg)
when Accessor
match_resolve_index_accessor(arg)
when IndexColumn
match_resolve_index_index_column(arg)
when Indexable
match_resolve_index_indexable(arg)
else
message =
"The first argument of SIMILAR must be Expression, Accessor or Indexable: #{arg.class}"
raise ErrorMessage, message
end
self.query = @args[1]
self.similarity_threshold = @args[2].value
end
def quorum_match?
@op == Operator::QUORUM and @args.size == 3
end
def match_quorum_resolve_index
arg = @args[0]
case arg
when Expression
match_resolve_index_expression(arg)
when Accessor
match_resolve_index_accessor(arg)
when Indexable
match_resolve_index_indexable(arg)
else
message =
"The first argument of QUORUM must be Expression, Accessor or Indexable: #{arg.class}"
raise ErrorMesesage, message
end
self.query = @args[1]
self.quorum_threshold = @args[2].value
end
def match_generic_resolve_index
@args.each do |arg|
case arg
when Expression
match_resolve_index_expression(arg)
when Accessor
match_resolve_index_accessor(arg)
when IndexColumn
match_resolve_index_index_column(arg)
when Indexable
match_resolve_index_indexable(arg)
when Procedure
break
else
self.query = arg
end
end
if @op == Operator::REGEXP and not index_searchable_regexp?(@query)
@search_indexes.clear
end
end
def index_searchable_regexp?(pattern)
return false if pattern.nil?
paren = :outside
dot = false
previous_char = nil
pattern.value.each_char do |char|
if previous_char == "\\"
case char
when "Z"
return false
when "b", "B"
return false
when "d", "D", "h", "H", "p", "s", "S", "w", "W"
return false
when "X"
return false
when "k", "g", "1", "2", "3", "4", "5", "6", "7", "8", "9"
return false
when "\\"
previous_char = nil
next
end
else
case paren
when :starting
case char
when "?"
return false if previous_char != "("
when "-"
return false if previous_char != "?"
when "m"
return false if previous_char != "-"
when "i"
return false if previous_char != "m"
when "x"
return false if previous_char != "i"
when ":"
return false if previous_char != "x"
paren = :inside
else
return false
end
else
case char
when "("
return false unless paren == :outside
paren = :starting
when ")"
return false unless paren == :inside
paren = :outside
when "."
return false if dot
dot = true
when "*"
return false unless dot
dot = false
when "[", "]", "|", "?", "+", "{", "}", "^", "$"
return false
else
return false if dot
end
end
end
previous_char = char
end
return false if dot
return false unless paren == :outside
true
end
def match_resolve_index_expression(expression)
codes = expression.codes
n_codes = codes.size
i = 0
while i < n_codes
i = match_resolve_index_expression_codes(expression, codes, i, n_codes)
end
end
def match_resolve_index_expression_codes(expression, codes, i, n_codes)
code = codes[i]
value = code.value
return i + 1 if value.nil?
case value
when Accessor, Column
index_info, offset =
match_resolve_index_expression_find_index(expression,
codes, i, n_codes)
i += offset - 1
if index_info
if value.is_a?(Accessor)
self.flags |= ScanInfo::Flags::ACCESSOR
end
weight, offset = codes[i].weight
i += offset
put_search_index(index_info.index, index_info.section_id, weight)
end
when Procedure
unless value.scorer?
message = "procedure must be scorer: #{scorer.name}>"
raise ErrorMessage, message
end
scorer = value
i += 1
index_info, offset =
match_resolve_index_expression_find_index(expression,
codes, i, n_codes)
i += offset
if index_info
scorer_args_expr_offset = 0
if codes[i].op != Operator::CALL
scorer_args_expr_offset = i
end
while i < n_codes and codes[i].op != Operator::CALL
i += 1
end
weight, offset = codes[i].weight
i += offset
put_search_index(index_info.index,
index_info.section_id,
weight,
scorer,
expression,
scorer_args_expr_offset)
end
when Table
raise ErrorMessage, "invalid match target: <#{value.name}>"
end
i + 1
end
def match_resolve_index_expression_find_index(expression, codes, i, n_codes)
code = codes[i]
value = code.value
index_info = nil
offset = 1
case value
when Accessor
accessor = value
index_info = accessor.find_index(@op)
if index_info
if accessor.have_next? and index_info.index != accessor.object
index_info = IndexInfo.new(accessor, index_info.section_id)
end
end
when FixedSizeColumn, VariableSizeColumn
index_info = value.find_index(@op)
when IndexColumn
index = value
section_id = 0
rest_n_codes = n_codes - i
if rest_n_codes >= 2 and
codes[i + 1].value.is_a?(Bulk) and
(codes[i + 1].value.domain_id == ID::UINT32 or
codes[i + 1].value.domain_id == ID::INT32) and
codes[i + 2].op == Operator::GET_MEMBER
section_id = codes[i + 1].value.value + 1
offset += 2
end
index = Context.instance[index.id]
index_info = IndexInfo.new(index, section_id)
end
[index_info, offset]
end
def match_resolve_index_expression_accessor(expr_code)
accessor = expr_code.value
self.flags |= ScanInfo::Flags::ACCESSOR
index_info = accessor.find_index(op)
return if index_info.nil?
section_id = index_info.section_id
weight = expr_code.weight
if accessor.next
put_search_index(accessor, section_id, weight)
else
put_search_index(index_info.index, section_id, weight)
end
end
def match_resolve_index_expression_data_column(expr_code)
column = expr_code.value
index_info = column.find_index(op)
return if index_info.nil?
put_search_index(index_info.index, index_info.section_id, expr_code.weight)
end
def match_resolve_index_index_column(index)
put_search_index(index, 0, 1)
end
def match_resolve_index_indexable(indexable)
index_info = indexable.find_index(op)
return if index_info.nil?
put_search_index(index_info.index, index_info.section_id, 1)
end
def match_resolve_index_accessor(accessor)
self.flags |= ScanInfo::Flags::ACCESSOR
index_info = accessor.find_index(op)
return if index_info.nil?
if accessor.next
put_search_index(accessor, index_info.section_id, 1)
else
put_search_index(index_info.index, index_info.section_id, 1)
end
end
def call_relational_resolve_index(object, selector_op)
case object
when Accessor
call_relational_resolve_index_accessor(object, selector_op)
when Bulk
self.query = object
when Indexable
call_relational_resolve_index_indexable(object, selector_op)
end
end
def call_relational_resolve_index_indexable(indexable, selector_op)
index_info = indexable.find_index(selector_op)
return if index_info.nil?
put_search_index(index_info.index, index_info.section_id, 1)
end
def call_relational_resolve_index_accessor(accessor, selector_op)
self.flags |= ScanInfo::Flags::ACCESSOR
index_info = accessor.find_index(selector_op)
return if index_info.nil?
put_search_index(index_info.index, index_info.section_id, 1)
end
def put_search_index(index, section_id, weight, *args)
search_index = ScanInfoSearchIndex.new(index,
section_id,
weight + @weight,
*args)
@search_indexes << search_index
end
end
end
ScanInfoData#match_resolve_index_index_column: refer index
require "scan_info_search_index"
module Groonga
class ScanInfoData
attr_accessor :start
attr_accessor :end
attr_accessor :op
attr_accessor :logical_op
attr_accessor :query
attr_accessor :args
attr_accessor :search_indexes
attr_accessor :flags
attr_accessor :max_interval
attr_accessor :similarity_threshold
attr_accessor :quorum_threshold
attr_accessor :start_position
attr_accessor :weight
def initialize(start)
@start = start
@end = start
@op = Operator::NOP
@logical_op = Operator::OR
@query = nil
@args = []
@search_indexes = []
@flags = ScanInfo::Flags::PUSH
@max_interval = nil
@similarity_threshold = nil
@quorum_threshold = nil
@start_position = nil
@weight = 0
end
def match_resolve_index
if near_search?
match_near_resolve_index
elsif similar_search?
match_similar_resolve_index
elsif quorum_match?
match_quorum_resolve_index
else
match_generic_resolve_index
end
end
def call_relational_resolve_indexes
procedure, *args = *@args
return unless procedure.selector?
selector_op = procedure.selector_operator
args.each do |arg|
call_relational_resolve_index(arg, selector_op)
end
end
private
def near_search?
(@op == Operator::NEAR or @op == Operator::NEAR2) and @args.size == 3
end
def match_near_resolve_index
arg = @args[0]
case arg
when Expression
match_resolve_index_expression(arg)
when Accessor
match_resolve_index_accessor(arg)
when Indexable
match_resolve_index_indexable(arg)
else
message =
"The first argument of NEAR/NEAR2 must be Expression, Accessor or Indexable: #{arg.class}"
raise ErrorMessage, message
end
self.query = @args[1]
self.max_interval = @args[2].value
end
def similar_search?
@op == Operator::SIMILAR and @args.size == 3
end
def match_similar_resolve_index
arg = @args[0]
case arg
when Expression
match_resolve_index_expression(arg)
when Accessor
match_resolve_index_accessor(arg)
when IndexColumn
match_resolve_index_index_column(arg)
when Indexable
match_resolve_index_indexable(arg)
else
message =
"The first argument of SIMILAR must be Expression, Accessor or Indexable: #{arg.class}"
raise ErrorMessage, message
end
self.query = @args[1]
self.similarity_threshold = @args[2].value
end
def quorum_match?
@op == Operator::QUORUM and @args.size == 3
end
def match_quorum_resolve_index
arg = @args[0]
case arg
when Expression
match_resolve_index_expression(arg)
when Accessor
match_resolve_index_accessor(arg)
when Indexable
match_resolve_index_indexable(arg)
else
message =
"The first argument of QUORUM must be Expression, Accessor or Indexable: #{arg.class}"
raise ErrorMesesage, message
end
self.query = @args[1]
self.quorum_threshold = @args[2].value
end
def match_generic_resolve_index
@args.each do |arg|
case arg
when Expression
match_resolve_index_expression(arg)
when Accessor
match_resolve_index_accessor(arg)
when IndexColumn
match_resolve_index_index_column(arg)
when Indexable
match_resolve_index_indexable(arg)
when Procedure
break
else
self.query = arg
end
end
if @op == Operator::REGEXP and not index_searchable_regexp?(@query)
@search_indexes.clear
end
end
def index_searchable_regexp?(pattern)
return false if pattern.nil?
paren = :outside
dot = false
previous_char = nil
pattern.value.each_char do |char|
if previous_char == "\\"
case char
when "Z"
return false
when "b", "B"
return false
when "d", "D", "h", "H", "p", "s", "S", "w", "W"
return false
when "X"
return false
when "k", "g", "1", "2", "3", "4", "5", "6", "7", "8", "9"
return false
when "\\"
previous_char = nil
next
end
else
case paren
when :starting
case char
when "?"
return false if previous_char != "("
when "-"
return false if previous_char != "?"
when "m"
return false if previous_char != "-"
when "i"
return false if previous_char != "m"
when "x"
return false if previous_char != "i"
when ":"
return false if previous_char != "x"
paren = :inside
else
return false
end
else
case char
when "("
return false unless paren == :outside
paren = :starting
when ")"
return false unless paren == :inside
paren = :outside
when "."
return false if dot
dot = true
when "*"
return false unless dot
dot = false
when "[", "]", "|", "?", "+", "{", "}", "^", "$"
return false
else
return false if dot
end
end
end
previous_char = char
end
return false if dot
return false unless paren == :outside
true
end
def match_resolve_index_expression(expression)
codes = expression.codes
n_codes = codes.size
i = 0
while i < n_codes
i = match_resolve_index_expression_codes(expression, codes, i, n_codes)
end
end
def match_resolve_index_expression_codes(expression, codes, i, n_codes)
code = codes[i]
value = code.value
return i + 1 if value.nil?
case value
when Accessor, Column
index_info, offset =
match_resolve_index_expression_find_index(expression,
codes, i, n_codes)
i += offset - 1
if index_info
if value.is_a?(Accessor)
self.flags |= ScanInfo::Flags::ACCESSOR
end
weight, offset = codes[i].weight
i += offset
put_search_index(index_info.index, index_info.section_id, weight)
end
when Procedure
unless value.scorer?
message = "procedure must be scorer: #{scorer.name}>"
raise ErrorMessage, message
end
scorer = value
i += 1
index_info, offset =
match_resolve_index_expression_find_index(expression,
codes, i, n_codes)
i += offset
if index_info
scorer_args_expr_offset = 0
if codes[i].op != Operator::CALL
scorer_args_expr_offset = i
end
while i < n_codes and codes[i].op != Operator::CALL
i += 1
end
weight, offset = codes[i].weight
i += offset
put_search_index(index_info.index,
index_info.section_id,
weight,
scorer,
expression,
scorer_args_expr_offset)
end
when Table
raise ErrorMessage, "invalid match target: <#{value.name}>"
end
i + 1
end
def match_resolve_index_expression_find_index(expression, codes, i, n_codes)
code = codes[i]
value = code.value
index_info = nil
offset = 1
case value
when Accessor
accessor = value
index_info = accessor.find_index(@op)
if index_info
if accessor.have_next? and index_info.index != accessor.object
index_info = IndexInfo.new(accessor, index_info.section_id)
end
end
when FixedSizeColumn, VariableSizeColumn
index_info = value.find_index(@op)
when IndexColumn
index = value
section_id = 0
rest_n_codes = n_codes - i
if rest_n_codes >= 2 and
codes[i + 1].value.is_a?(Bulk) and
(codes[i + 1].value.domain_id == ID::UINT32 or
codes[i + 1].value.domain_id == ID::INT32) and
codes[i + 2].op == Operator::GET_MEMBER
section_id = codes[i + 1].value.value + 1
offset += 2
end
index = Context.instance[index.id]
index_info = IndexInfo.new(index, section_id)
end
[index_info, offset]
end
def match_resolve_index_expression_accessor(expr_code)
accessor = expr_code.value
self.flags |= ScanInfo::Flags::ACCESSOR
index_info = accessor.find_index(op)
return if index_info.nil?
section_id = index_info.section_id
weight = expr_code.weight
if accessor.next
put_search_index(accessor, section_id, weight)
else
put_search_index(index_info.index, section_id, weight)
end
end
def match_resolve_index_expression_data_column(expr_code)
column = expr_code.value
index_info = column.find_index(op)
return if index_info.nil?
put_search_index(index_info.index, index_info.section_id, expr_code.weight)
end
def match_resolve_index_index_column(index)
index = Context.instance[index.id]
put_search_index(index, 0, 1)
end
def match_resolve_index_indexable(indexable)
index_info = indexable.find_index(op)
return if index_info.nil?
put_search_index(index_info.index, index_info.section_id, 1)
end
def match_resolve_index_accessor(accessor)
self.flags |= ScanInfo::Flags::ACCESSOR
index_info = accessor.find_index(op)
return if index_info.nil?
if accessor.next
put_search_index(accessor, index_info.section_id, 1)
else
put_search_index(index_info.index, index_info.section_id, 1)
end
end
def call_relational_resolve_index(object, selector_op)
case object
when Accessor
call_relational_resolve_index_accessor(object, selector_op)
when Bulk
self.query = object
when Indexable
call_relational_resolve_index_indexable(object, selector_op)
end
end
def call_relational_resolve_index_indexable(indexable, selector_op)
index_info = indexable.find_index(selector_op)
return if index_info.nil?
put_search_index(index_info.index, index_info.section_id, 1)
end
def call_relational_resolve_index_accessor(accessor, selector_op)
self.flags |= ScanInfo::Flags::ACCESSOR
index_info = accessor.find_index(selector_op)
return if index_info.nil?
put_search_index(index_info.index, index_info.section_id, 1)
end
def put_search_index(index, section_id, weight, *args)
search_index = ScanInfoSearchIndex.new(index,
section_id,
weight + @weight,
*args)
@search_indexes << search_index
end
end
end
|
# encoding: utf-8
module Mutant
class Predicate
# Base class for predicates on object attributes
class Attribute < self
include Concord.new(:attribute_name, :expectation)
private
# Return value for object
#
# @param [Object] object
#
# @return [Object]
#
# @api private
#
def value(object)
object.public_send(attribute_name)
end
# Regexp based attribute predicate
class Regexp < self
# Test for match
#
# @param [Object] object
#
# @return [true]
# if attribute value matches expectation
#
# @return [false]
# otherwise
#
# @api private
#
def match?(object)
!!(expectation =~ value(object))
end
end # Regexp
# Equality based attribute predicate
class Equality < self
PATTERN = /\Acode:(?<code>[[:xdigit:]]{1,6})\z/.freeze
# Test if class handles string
#
# @param [String] notation
#
# @return [Filter]
# if notation matches pattern
#
# @return [nil]
# otherwise
#
# @api private
#
def self.handle(notation)
match = PATTERN.match(notation)
new(:code, match[:code]) if match
end
# Test for match
#
# @param [Object] object
#
# @return [true]
# if attribute value matches expectation
#
# @return [false]
# otherwise
#
# @api private
#
def match?(object)
expectation.eql?(value(object))
end
end # Equality
end # Attribute
end # Filter
end # Mutant
Remove dead code
# encoding: utf-8
module Mutant
class Predicate
# Base class for predicates on object attributes
class Attribute < self
include Concord.new(:attribute_name, :expectation)
private
# Return value for object
#
# @param [Object] object
#
# @return [Object]
#
# @api private
#
def value(object)
object.public_send(attribute_name)
end
# Regexp based attribute predicate
class Regexp < self
# Test for match
#
# @param [Object] object
#
# @return [true]
# if attribute value matches expectation
#
# @return [false]
# otherwise
#
# @api private
#
def match?(object)
!!(expectation =~ value(object))
end
end # Regexp
# Equality based attribute predicate
class Equality < self
PATTERN = /\Acode:(?<code>[[:xdigit:]]{1,6})\z/.freeze
# Test for match
#
# @param [Object] object
#
# @return [true]
# if attribute value matches expectation
#
# @return [false]
# otherwise
#
# @api private
#
def match?(object)
expectation.eql?(value(object))
end
end # Equality
end # Attribute
end # Filter
end # Mutant
|
module Nanoc::Int
# The class responsible for keeping track of all loaded plugins, such as
# filters ({Nanoc::Filter}) and data sources ({Nanoc::DataSource}).
#
# @api private
class PluginRegistry
extend Nanoc::Int::Memoization
# A module that contains class methods for plugins. It provides functions
# for setting identifiers, registering plugins and finding plugins. Plugin
# classes should extend this module.
module PluginMethods
# @overload identifiers(*identifiers)
#
# Sets the identifiers for this plugin.
#
# @param [Array<Symbol>] identifiers A list of identifiers to assign to
# this plugin.
#
# @return [void]
#
# @overload identifiers
#
# @return [Array<Symbol>] The identifiers for this plugin
def identifiers(*identifiers)
if identifiers.empty?
Nanoc::Int::PluginRegistry.instance.identifiers_of(superclass, self)
else
register(self, *identifiers)
end
end
# @overload identifier(identifier)
#
# Sets the identifier for this plugin.
#
# @param [Symbol] identifier An identifier to assign to this plugin.
#
# @return [void]
#
# @overload identifier
#
# @return [Symbol] The first identifier for this plugin
def identifier(identifier = nil)
if identifier
identifiers(identifier)
else
Nanoc::Int::PluginRegistry.instance.identifiers_of(superclass, self).first
end
end
# Registers the given class as a plugin with the given identifier.
#
# @param [Class, String] class_or_name The class to register, or a
# string containing the class name to register.
#
# @param [Array<Symbol>] identifiers A list of identifiers to assign to
# this plugin.
#
# @return [void]
def register(class_or_name, *identifiers)
# Find plugin class
klass = self
klass = klass.superclass while klass.superclass.respond_to?(:register)
# Register
registry = Nanoc::Int::PluginRegistry.instance
registry.register(klass, class_or_name, *identifiers)
end
# @return [Hash<Symbol, Class>] All plugins of this type, with keys
# being the identifiers and values the plugin classes
def all
Nanoc::Int::PluginRegistry.instance.find_all(self)
end
# Returns the plugin with the given name (identifier)
#
# @param [String] name The name of the plugin class to find
#
# @return [Class] The plugin class with the given name
def named(name)
Nanoc::Int::PluginRegistry.instance.find(self, name)
end
end
# Returns the shared {PluginRegistry} instance, creating it if none exists
# yet.
#
# @return [Nanoc::Int::PluginRegistry] The shared plugin registry
def self.instance
@instance ||= new
end
# Creates a new plugin registry. This should usually not be necessary; it
# is recommended to use the shared instance (obtained from
# {Nanoc::Int::PluginRegistry.instance}).
def initialize
@identifiers_to_classes = {}
@classes_to_identifiers = {}
end
# Registers the given class as a plugin.
#
# @param [Class] superclass The superclass of the plugin. For example:
# {Nanoc::Filter}.
#
# @param [Class, String] class_or_name The class to register. This can be
# a string, in which case it will be automatically converted to a proper
# class at lookup. For example: `Nanoc::Filters::ERB`,
# `"Nanoc::Filters::Haml"`.
#
# @param [Symbol] identifiers One or more symbols identifying the class.
# For example: `:haml`, :`erb`.
#
# @return [void]
def register(superclass, class_or_name, *identifiers)
@identifiers_to_classes[superclass] ||= {}
@classes_to_identifiers[superclass] ||= {}
identifiers.each do |identifier|
@identifiers_to_classes[superclass][identifier.to_sym] = class_or_name
(@classes_to_identifiers[superclass][name_for_class(class_or_name)] ||= []) << identifier.to_sym
end
end
# @param [Class] superclass The superclass of the plugin. For example:
# {Nanoc::Filter}.
#
# @param [Class] klass The class to get the identifiers for.
#
# @return [Array<Symbol>] An array of identifiers for the given class
def identifiers_of(superclass, klass)
(@classes_to_identifiers[superclass] || {})[name_for_class(klass)] || []
end
# Finds the plugin that is a subclass of the given class and has the given
# name.
#
# @param [Class] klass The class of the plugin to return
#
# @param [Symbol] name The name of the plugin to return
#
# @return [Class, nil] The plugin with the given name
def find(klass, name)
@identifiers_to_classes[klass] ||= {}
resolve(@identifiers_to_classes[klass][name.to_sym], klass)
end
# Returns all plugins of the given class.
#
# @param [Class] klass The class of the plugin to return
#
# @return [Enumerable<Class>] A collection of class plugins
def find_all(klass)
@identifiers_to_classes[klass] ||= {}
res = {}
@identifiers_to_classes[klass].each_pair { |k, v| res[k] = resolve(v, k) }
res
end
# Returns a list of all plugins. The returned list of plugins is an array
# with array elements in the following format:
#
# { :class => ..., :superclass => ..., :identifiers => ... }
#
# @return [Array<Hash>] A list of all plugins in the format described
def all
plugins = []
@identifiers_to_classes.each_pair do |superclass, submap|
submap.each_pair do |identifier, klass|
# Find existing plugin
existing_plugin = plugins.find do |p|
p[:class] == klass && p[:superclass] == superclass
end
if existing_plugin
# Add identifier to existing plugin
existing_plugin[:identifiers] << identifier
existing_plugin[:identifiers] = existing_plugin[:identifiers].sort_by(&:to_s)
else
# Create new plugin
plugins << {
class: klass,
superclass: superclass,
identifiers: [identifier],
}
end
end
end
plugins
end
protected
def resolve(class_or_name, _klass)
if class_or_name.is_a?(String)
class_or_name.scan(/\w+/).reduce(Kernel) do |memo, part|
memo.const_get(part)
end
else
class_or_name
end
end
memoize :resolve
def name_for_class(klass)
klass.to_s.sub(/^(::)?/, '::')
end
end
end
Simplify PluginRegistry#resolve
As of recent Ruby versions, const_get can take an entire constant, and
the splitting approach is no longer necessary.
module Nanoc::Int
# The class responsible for keeping track of all loaded plugins, such as
# filters ({Nanoc::Filter}) and data sources ({Nanoc::DataSource}).
#
# @api private
class PluginRegistry
extend Nanoc::Int::Memoization
# A module that contains class methods for plugins. It provides functions
# for setting identifiers, registering plugins and finding plugins. Plugin
# classes should extend this module.
module PluginMethods
# @overload identifiers(*identifiers)
#
# Sets the identifiers for this plugin.
#
# @param [Array<Symbol>] identifiers A list of identifiers to assign to
# this plugin.
#
# @return [void]
#
# @overload identifiers
#
# @return [Array<Symbol>] The identifiers for this plugin
def identifiers(*identifiers)
if identifiers.empty?
Nanoc::Int::PluginRegistry.instance.identifiers_of(superclass, self)
else
register(self, *identifiers)
end
end
# @overload identifier(identifier)
#
# Sets the identifier for this plugin.
#
# @param [Symbol] identifier An identifier to assign to this plugin.
#
# @return [void]
#
# @overload identifier
#
# @return [Symbol] The first identifier for this plugin
def identifier(identifier = nil)
if identifier
identifiers(identifier)
else
Nanoc::Int::PluginRegistry.instance.identifiers_of(superclass, self).first
end
end
# Registers the given class as a plugin with the given identifier.
#
# @param [Class, String] class_or_name The class to register, or a
# string containing the class name to register.
#
# @param [Array<Symbol>] identifiers A list of identifiers to assign to
# this plugin.
#
# @return [void]
def register(class_or_name, *identifiers)
# Find plugin class
klass = self
klass = klass.superclass while klass.superclass.respond_to?(:register)
# Register
registry = Nanoc::Int::PluginRegistry.instance
registry.register(klass, class_or_name, *identifiers)
end
# @return [Hash<Symbol, Class>] All plugins of this type, with keys
# being the identifiers and values the plugin classes
def all
Nanoc::Int::PluginRegistry.instance.find_all(self)
end
# Returns the plugin with the given name (identifier)
#
# @param [String] name The name of the plugin class to find
#
# @return [Class] The plugin class with the given name
def named(name)
Nanoc::Int::PluginRegistry.instance.find(self, name)
end
end
# Returns the shared {PluginRegistry} instance, creating it if none exists
# yet.
#
# @return [Nanoc::Int::PluginRegistry] The shared plugin registry
def self.instance
@instance ||= new
end
# Creates a new plugin registry. This should usually not be necessary; it
# is recommended to use the shared instance (obtained from
# {Nanoc::Int::PluginRegistry.instance}).
def initialize
@identifiers_to_classes = {}
@classes_to_identifiers = {}
end
# Registers the given class as a plugin.
#
# @param [Class] superclass The superclass of the plugin. For example:
# {Nanoc::Filter}.
#
# @param [Class, String] class_or_name The class to register. This can be
# a string, in which case it will be automatically converted to a proper
# class at lookup. For example: `Nanoc::Filters::ERB`,
# `"Nanoc::Filters::Haml"`.
#
# @param [Symbol] identifiers One or more symbols identifying the class.
# For example: `:haml`, :`erb`.
#
# @return [void]
def register(superclass, class_or_name, *identifiers)
@identifiers_to_classes[superclass] ||= {}
@classes_to_identifiers[superclass] ||= {}
identifiers.each do |identifier|
@identifiers_to_classes[superclass][identifier.to_sym] = class_or_name
(@classes_to_identifiers[superclass][name_for_class(class_or_name)] ||= []) << identifier.to_sym
end
end
# @param [Class] superclass The superclass of the plugin. For example:
# {Nanoc::Filter}.
#
# @param [Class] klass The class to get the identifiers for.
#
# @return [Array<Symbol>] An array of identifiers for the given class
def identifiers_of(superclass, klass)
(@classes_to_identifiers[superclass] || {})[name_for_class(klass)] || []
end
# Finds the plugin that is a subclass of the given class and has the given
# name.
#
# @param [Class] klass The class of the plugin to return
#
# @param [Symbol] name The name of the plugin to return
#
# @return [Class, nil] The plugin with the given name
def find(klass, name)
@identifiers_to_classes[klass] ||= {}
resolve(@identifiers_to_classes[klass][name.to_sym], klass)
end
# Returns all plugins of the given class.
#
# @param [Class] klass The class of the plugin to return
#
# @return [Enumerable<Class>] A collection of class plugins
def find_all(klass)
@identifiers_to_classes[klass] ||= {}
res = {}
@identifiers_to_classes[klass].each_pair { |k, v| res[k] = resolve(v, k) }
res
end
# Returns a list of all plugins. The returned list of plugins is an array
# with array elements in the following format:
#
# { :class => ..., :superclass => ..., :identifiers => ... }
#
# @return [Array<Hash>] A list of all plugins in the format described
def all
plugins = []
@identifiers_to_classes.each_pair do |superclass, submap|
submap.each_pair do |identifier, klass|
# Find existing plugin
existing_plugin = plugins.find do |p|
p[:class] == klass && p[:superclass] == superclass
end
if existing_plugin
# Add identifier to existing plugin
existing_plugin[:identifiers] << identifier
existing_plugin[:identifiers] = existing_plugin[:identifiers].sort_by(&:to_s)
else
# Create new plugin
plugins << {
class: klass,
superclass: superclass,
identifiers: [identifier],
}
end
end
end
plugins
end
protected
def resolve(class_or_name, _klass)
if class_or_name.is_a?(String)
Kernel.const_get(class_or_name)
else
class_or_name
end
end
memoize :resolve
def name_for_class(klass)
klass.to_s.sub(/^(::)?/, '::')
end
end
end
|
module Nerve
module ServiceCheck
class BaseServiceCheck
include Utils
include Logging
def initialize(opts={})
@timeout = opts['timeout'] ? opts['timeout'].to_i : 0.1
@rise = opts['rise'] ? opts['rise'].to_i : 1
@fall = opts['fall'] ? opts['fall'].to_i : 1
@check_buffer = RingBuffer.new([@rise, @fall].max)
@last_result = nil
end
def up?
# do the check
check_result = ignore_errors do
Timeout::timeout(@timeout) do
check
end
end
# this is the first check -- initialize buffer
if @last_result == nil
@last_result = check_result
@check_buffer.size.times {@check_buffer.push check_result}
log.info "initial service check returned #{@check_result}"
end
log.debug "service check returned #{check_result}"
@check_buffer.push(check_result)
# we've failed if the last @fall times are false
unless @check_buffer.last(@fall).reduce(:|)
log.info "service check transitions to down after #{@fall} failures" if @last_result
@last_result = false
end
# we've succeeded if the last @rise times is true
if @check_buffer.last(@rise).reduce(:&)
log.info "service check transitions to up after #{@rise} successes" unless @last_result
@last_result = true
end
# otherwise return the last result
return @last_result
end
end
end
end
properly return initial result
module Nerve
module ServiceCheck
class BaseServiceCheck
include Utils
include Logging
def initialize(opts={})
@timeout = opts['timeout'] ? opts['timeout'].to_i : 0.1
@rise = opts['rise'] ? opts['rise'].to_i : 1
@fall = opts['fall'] ? opts['fall'].to_i : 1
@check_buffer = RingBuffer.new([@rise, @fall].max)
@last_result = nil
end
def up?
# do the check
check_result = ignore_errors do
Timeout::timeout(@timeout) do
check
end
end
# this is the first check -- initialize buffer
if @last_result == nil
@last_result = check_result
@check_buffer.size.times {@check_buffer.push check_result}
log.info "initial service check returned #{check_result}"
end
log.debug "service check returned #{check_result}"
@check_buffer.push(check_result)
# we've failed if the last @fall times are false
unless @check_buffer.last(@fall).reduce(:|)
log.info "service check transitions to down after #{@fall} failures" if @last_result
@last_result = false
end
# we've succeeded if the last @rise times is true
if @check_buffer.last(@rise).reduce(:&)
log.info "service check transitions to up after #{@rise} successes" unless @last_result
@last_result = true
end
# otherwise return the last result
return @last_result
end
end
end
end
|
module NetSuiteRails
module RecordSync
def self.included(klass)
klass.class_eval do
class_attribute :netsuite_settings
self.netsuite_settings = {
before_netsuite_push: [],
after_netsuite_push: [],
after_netsuite_pull: [],
netsuite_sync: :read,
netsuite_field_map: {},
netsuite_field_hints: {},
netsuite_record_class: nil,
}
cattr_accessor :netsuite_custom_record_type_id
cattr_accessor :netsuite_sync_options
end
klass.send(:extend, ClassMethods)
klass.send(:include, InstanceMethods)
SyncTrigger.attach(klass)
PollTrigger.attach(klass)
end
module ClassMethods
def netsuite_poll(opts = {})
RecordSync::PollManager.poll(self, opts)
end
attr_accessor :netsuite_custom_record_type_id
attr_accessor :netsuite_sync_options
# TODO is there a better way to implement callback chains?
# https://github.com/rails/rails/blob/0c0f278ab20f3042cdb69604166e18a61f8605ad/activesupport/lib/active_support/callbacks.rb#L491
def before_netsuite_push(callback = nil, &block)
self.netsuite_settings[:before_netsuite_push] << (callback || block) if callback || block
self.netsuite_settings[:before_netsuite_push]
end
def after_netsuite_push(callback = nil, &block)
self.netsuite_settings[:after_netsuite_push] << (callback || block) if callback || block
self.netsuite_settings[:after_netsuite_push]
end
def after_netsuite_pull(callback = nil, &block)
self.netsuite_settings[:after_netsuite_pull] << (callback || block) if callback || block
self.netsuite_settings[:after_netsuite_pull]
end
def netsuite_field_map(field_mapping = nil)
if !field_mapping.nil?
self.netsuite_settings[:netsuite_field_map] = field_mapping
end
self.netsuite_settings[:netsuite_field_map]
end
def netsuite_field_hints(list = nil)
if !list.nil?
self.netsuite_settings[:netsuite_field_hints] = list
end
self.netsuite_settings[:netsuite_field_hints]
end
# TODO persist type for CustomRecordRef
def netsuite_record_class(record_class = nil, custom_record_type_id = nil)
if !record_class.nil?
self.netsuite_settings[:netsuite_record_class] = record_class
self.netsuite_custom_record_type_id = custom_record_type_id
end
self.netsuite_settings[:netsuite_record_class]
end
# there is a model level of this method in order to be based on the model level record class
def netsuite_custom_record?
self.netsuite_record_class == NetSuite::Records::CustomRecord
end
# :read, :write_only, :read_write
def netsuite_sync(flag = nil, opts = {})
if !flag.nil?
self.netsuite_sync_options = opts
self.netsuite_settings[:netsuite_sync] = flag
end
self.netsuite_settings[:netsuite_sync]
end
end
module InstanceMethods
attr_writer :netsuite_manual_fields
def netsuite_manual_fields
@netsuite_manual_fields ||= []
end
# these methods are here for easy model override
def netsuite_sync_options
self.class.netsuite_sync_options
end
def netsuite_sync
self.class.netsuite_sync
end
def netsuite_record_class
self.class.netsuite_record_class
end
def netsuite_field_map
self.class.netsuite_field_map
end
def netsuite_field_hints
self.class.netsuite_field_hints
end
# assumes netsuite_id field on activerecord
def netsuite_pulling?
@netsuite_pulling ||= false
end
def netsuite_pulled?
@netsuite_pulled ||= false
end
def netsuite_pull(opts = {})
# TODO need to support the opts hash
netsuite_extract_from_record(netsuite_pull_record)
end
def netsuite_pull_record
# TODO support use_external_id / netsuite_external_id
if netsuite_custom_record?
NetSuite::Records::CustomRecord.get(
internal_id: self.netsuite_id,
type_id: self.class.netsuite_custom_record_type_id
)
else
self.netsuite_record_class.get(self.netsuite_id)
end
end
def netsuite_push(opts = {})
NetSuiteRails::RecordSync::PushManager.push(self, opts)
end
# TODO move this login into separate service object
def netsuite_extract_from_record(netsuite_record)
Rails.logger.info "NetSuite: Pull #{netsuite_record.class} #{netsuite_record.internal_id}"
@netsuite_pulling = true
field_hints = self.netsuite_field_hints
custom_field_list = self.netsuite_field_map[:custom_field_list] || {}
all_field_list = self.netsuite_field_map.except(:custom_field_list) || {}
all_field_list.merge!(custom_field_list)
# TODO should have a helper module for common push/pull methods
reflection_attributes = NetSuiteRails::RecordSync::PushManager.relationship_attributes_list(self)
# handle non-collection associations
association_keys = reflection_attributes.values.reject(&:collection?).map(&:name)
all_field_list.each do |local_field, netsuite_field|
is_custom_field = custom_field_list.keys.include?(local_field)
if netsuite_field.is_a?(Proc)
netsuite_field.call(self, netsuite_record, :pull)
next
end
field_value = if is_custom_field
netsuite_record.custom_field_list.send(netsuite_field).value rescue ""
else
netsuite_record.send(netsuite_field)
end
if field_value.blank?
# TODO possibly nil out the local value?
next
end
if association_keys.include?(local_field)
field_value = reflection_attributes[local_field].
klass.
where(netsuite_id: field_value.internal_id).
first_or_initialize
elsif is_custom_field
field_value = NetSuiteRails::RecordSync::PullManager.extract_custom_field_value(field_value)
else
# then it's not a custom field
end
# TODO should we just check for nil? vs present?
if field_hints.has_key?(local_field) && field_value.present?
field_value = NetSuiteRails::Transformations.transform(field_hints[local_field], field_value, :pull)
end
self.send(:"#{local_field}=", field_value)
end
netsuite_execute_callbacks(self.class.after_netsuite_pull, netsuite_record)
@netsuite_pulling = false
@netsuite_pulled = true
end
def new_netsuite_record?
self.netsuite_id.blank?
end
def netsuite_custom_record?
self.netsuite_record_class == NetSuite::Records::CustomRecord
end
# TODO this should be protected; it needs to be pushed down to the Push/Pull manager level
def netsuite_execute_callbacks(list, record)
list.each do |callback|
if callback.is_a?(Symbol)
self.send(callback, record)
else
instance_exec(record, &callback)
end
end
end
end
end
end
Adding convenience method to determine the sync mode for a class
module NetSuiteRails
module RecordSync
def self.included(klass)
klass.class_eval do
class_attribute :netsuite_settings
self.netsuite_settings = {
before_netsuite_push: [],
after_netsuite_push: [],
after_netsuite_pull: [],
netsuite_sync: :read,
netsuite_field_map: {},
netsuite_field_hints: {},
netsuite_record_class: nil,
}
cattr_accessor :netsuite_custom_record_type_id
cattr_accessor :netsuite_sync_options
end
klass.send(:extend, ClassMethods)
klass.send(:include, InstanceMethods)
SyncTrigger.attach(klass)
PollTrigger.attach(klass)
end
module ClassMethods
def netsuite_poll(opts = {})
RecordSync::PollManager.poll(self, opts)
end
attr_accessor :netsuite_custom_record_type_id
attr_accessor :netsuite_sync_options
# TODO is there a better way to implement callback chains?
# https://github.com/rails/rails/blob/0c0f278ab20f3042cdb69604166e18a61f8605ad/activesupport/lib/active_support/callbacks.rb#L491
def before_netsuite_push(callback = nil, &block)
self.netsuite_settings[:before_netsuite_push] << (callback || block) if callback || block
self.netsuite_settings[:before_netsuite_push]
end
def after_netsuite_push(callback = nil, &block)
self.netsuite_settings[:after_netsuite_push] << (callback || block) if callback || block
self.netsuite_settings[:after_netsuite_push]
end
def after_netsuite_pull(callback = nil, &block)
self.netsuite_settings[:after_netsuite_pull] << (callback || block) if callback || block
self.netsuite_settings[:after_netsuite_pull]
end
def netsuite_field_map(field_mapping = nil)
if !field_mapping.nil?
self.netsuite_settings[:netsuite_field_map] = field_mapping
end
self.netsuite_settings[:netsuite_field_map]
end
def netsuite_field_hints(list = nil)
if !list.nil?
self.netsuite_settings[:netsuite_field_hints] = list
end
self.netsuite_settings[:netsuite_field_hints]
end
# TODO persist type for CustomRecordRef
def netsuite_record_class(record_class = nil, custom_record_type_id = nil)
if !record_class.nil?
self.netsuite_settings[:netsuite_record_class] = record_class
self.netsuite_custom_record_type_id = custom_record_type_id
end
self.netsuite_settings[:netsuite_record_class]
end
# there is a model level of this method in order to be based on the model level record class
def netsuite_custom_record?
self.netsuite_record_class == NetSuite::Records::CustomRecord
end
# :read, :write_only, :read_write
def netsuite_sync(flag = nil, opts = {})
if !flag.nil?
self.netsuite_sync_options = opts
self.netsuite_settings[:netsuite_sync] = flag
end
self.netsuite_settings[:netsuite_sync]
end
end
module InstanceMethods
attr_writer :netsuite_manual_fields
def netsuite_manual_fields
@netsuite_manual_fields ||= []
end
# these methods are here for easy model override
def netsuite_sync_options
self.class.netsuite_sync_options
end
def netsuite_sync
self.class.netsuite_sync
end
def netsuite_record_class
self.class.netsuite_record_class
end
def netsuite_field_map
self.class.netsuite_field_map
end
def netsuite_field_hints
self.class.netsuite_field_hints
end
# assumes netsuite_id field on activerecord
def netsuite_pulling?
@netsuite_pulling ||= false
end
def netsuite_pulled?
@netsuite_pulled ||= false
end
def netsuite_async_jobs?
self.netsuite_sync_options[:sync_mode] == :async || (self.netsuite_sync_options[:sync_mode].blank? && NetSuiteRails::Configuration.netsuite_sync_mode == :async)
end
# TODO need to support the opts hash
def netsuite_pull(opts = {})
# TODO need to support the opts hash
netsuite_extract_from_record(netsuite_pull_record)
end
def netsuite_pull_record
# TODO support use_external_id / netsuite_external_id
if netsuite_custom_record?
NetSuite::Records::CustomRecord.get(
internal_id: self.netsuite_id,
type_id: self.class.netsuite_custom_record_type_id
)
else
self.netsuite_record_class.get(self.netsuite_id)
end
end
def netsuite_push(opts = {})
NetSuiteRails::RecordSync::PushManager.push(self, opts)
end
# TODO move this login into separate service object
def netsuite_extract_from_record(netsuite_record)
Rails.logger.info "NetSuite: Pull #{netsuite_record.class} #{netsuite_record.internal_id}"
@netsuite_pulling = true
field_hints = self.netsuite_field_hints
custom_field_list = self.netsuite_field_map[:custom_field_list] || {}
all_field_list = self.netsuite_field_map.except(:custom_field_list) || {}
all_field_list.merge!(custom_field_list)
# TODO should have a helper module for common push/pull methods
reflection_attributes = NetSuiteRails::RecordSync::PushManager.relationship_attributes_list(self)
# handle non-collection associations
association_keys = reflection_attributes.values.reject(&:collection?).map(&:name)
all_field_list.each do |local_field, netsuite_field|
is_custom_field = custom_field_list.keys.include?(local_field)
if netsuite_field.is_a?(Proc)
netsuite_field.call(self, netsuite_record, :pull)
next
end
field_value = if is_custom_field
netsuite_record.custom_field_list.send(netsuite_field).value rescue ""
else
netsuite_record.send(netsuite_field)
end
if field_value.blank?
# TODO possibly nil out the local value?
next
end
if association_keys.include?(local_field)
field_value = reflection_attributes[local_field].
klass.
where(netsuite_id: field_value.internal_id).
first_or_initialize
elsif is_custom_field
field_value = NetSuiteRails::RecordSync::PullManager.extract_custom_field_value(field_value)
else
# then it's not a custom field
end
# TODO should we just check for nil? vs present?
if field_hints.has_key?(local_field) && field_value.present?
field_value = NetSuiteRails::Transformations.transform(field_hints[local_field], field_value, :pull)
end
self.send(:"#{local_field}=", field_value)
end
netsuite_execute_callbacks(self.class.after_netsuite_pull, netsuite_record)
@netsuite_pulling = false
@netsuite_pulled = true
end
def new_netsuite_record?
self.netsuite_id.blank?
end
def netsuite_custom_record?
self.netsuite_record_class == NetSuite::Records::CustomRecord
end
# TODO this should be protected; it needs to be pushed down to the Push/Pull manager level
def netsuite_execute_callbacks(list, record)
list.each do |callback|
if callback.is_a?(Symbol)
self.send(callback, record)
else
instance_exec(record, &callback)
end
end
end
end
end
end
|
require 'omniauth-oauth2'
require 'active_support/core_ext/string'
module OmniAuth
module Strategies
class Authic < OmniAuth::Strategies::OAuth2
option :name, "authic"
option :scope, "email"
option :subdomain, "" # Comes in from config
option :domain, "authic.com"
uid{ raw_info['id'] }
info do
{
:name => raw_info['name'],
:email => raw_info['email']
}
end
extra do
{
'raw_info' => raw_info
}
end
def request_phase
# Add authic_action into the params mix. This will let the Authic server know to serve up the signup or signin page
options.authorize_params[:authic_action] = request.params["authic_action"]
super
end
def client
raise "You must specify your Authic subdomain in setup i.e. :subdomain => 'mysubdomain'" if options[:subdomain].blank?
# Make sure we set the site correctly before creating a client
options[:client_options][:site] = "https://#{options[:subdomain]}.#{options[:domain]}"
super
end
def raw_info
@raw_info ||= access_token.get("/authic_user_info.json").parsed
end
end
end
end
Added more use info fields and toggle-able SSL for testing
require 'omniauth-oauth2'
require 'active_support/core_ext/string'
module OmniAuth
module Strategies
class Authic < OmniAuth::Strategies::OAuth2
option :name, "authic"
option :scope, "email"
option :subdomain, "" # Comes in from config
option :domain, "authic.com"
option :ssl, true
uid{ raw_info['id'] }
info do
{
:email => raw_info['email'],
:first_name => raw_info['first_name'],
:last_name => raw_info['last_name'],
:full_name => raw_info['full_name'],
:mobile => raw_info['last_name'],
:phone => raw_info['phone'],
:birth_date => raw_info['birth_date'],
:groups => raw_info['groups'],
:roles => raw_info['roles']
}
end
extra do
{
'raw_info' => raw_info
}
end
def request_phase
# Add authic_action into the params mix. This will let the Authic server know to serve up the signup or signin page
options.authorize_params[:authic_action] = request.params["authic_action"]
super
end
def client
raise "You must specify your Authic subdomain in setup i.e. :subdomain => 'mysubdomain'" if options[:subdomain].blank?
scheme = options[:ssl] ? "https" : "http"
# Make sure we set the site correctly before creating a client
options[:client_options][:site] = "#{scheme}://#{options[:subdomain]}.#{options[:domain]}"
super
end
def raw_info
@raw_info ||= access_token.get("/authic_user_info.json").parsed
end
end
end
end |
require 'omniauth-oauth'
require 'multi_json'
module OmniAuth
module Strategies
class Douban < OmniAuth::Strategies::OAuth
option :name, 'douban'
option :sign_in, true
def initialize(*args)
super
# taken from https://github.com/intridea/omniauth/blob/0-3-stable/oa-oauth/lib/omniauth/strategies/oauth/douban.rb#L15-21
options.client_options = {
:access_token_path => '/service/auth/access_token',
:authorize_path => '/service/auth/authorize',
:realm => 'OmniAuth',
:request_token_path => '/service/auth/request_token',
:site => 'http://www.douban.com'
}
end
def consumer
consumer = ::OAuth::Consumer.new(options.consumer_key, options.consumer_secret, options.client_options)
consumer
end
uid { access_token.params[:id] }
# adapted from https://github.com/intridea/omniauth/blob/0-3-stable/oa-oauth/lib/omniauth/strategies/oauth/douban.rb#L38-53
info do
{
:nickname => raw_info['db:uid']['$t'],
:name => raw_info['title']['$t'],
:location => raw_info['location'] ? raw_info['location']['$t'] : nil,
:image => raw_info['link'].find{|l| l['@rel'] == 'icon'}['@href'],
:description => raw_info['content']['$t'],
:urls => {
'Douban' => raw_info['link'].find{|l| l['@rel'] == 'alternate'}['@href']
}
}
end
extra do
{ :raw_info => raw_info }
end
#taken from https://github.com/intridea/omniauth/blob/0-3-stable/oa-oauth/lib/omniauth/strategies/oauth/tsina.rb#L52-67
# def request_phase
# request_token = consumer.get_request_token(:oauth_callback => callback_url)
# session['oauth'] ||= {}
# session['oauth'][name.to_s] = {'callback_confirmed' => true, 'request_token' => request_token.token, 'request_secret' => request_token.secret}
#
# if request_token.callback_confirmed?
# redirect request_token.authorize_url(options[:authorize_params])
# else
# redirect request_token.authorize_url(options[:authorize_params].merge(:oauth_callback => callback_url))
# end
#
# rescue ::Timeout::Error => e
# fail!(:timeout, e)
# rescue ::Net::HTTPFatalError, ::OpenSSL::SSL::SSLError => e
# fail!(:service_unavailable, e)
# end
def raw_info
@raw_info ||= MultiJson.decode(access_token.get('http://api.douban.com/people/%40me?alt=json').body)
rescue ::Errno::ETIMEDOUT
raise ::Timeout::Error
end
end
end
end
fix uid nil problem
require 'omniauth-oauth'
require 'multi_json'
module OmniAuth
module Strategies
class Douban < OmniAuth::Strategies::OAuth
option :name, 'douban'
option :sign_in, true
def initialize(*args)
super
# taken from https://github.com/intridea/omniauth/blob/0-3-stable/oa-oauth/lib/omniauth/strategies/oauth/douban.rb#L15-21
options.client_options = {
:access_token_path => '/service/auth/access_token',
:authorize_path => '/service/auth/authorize',
:realm => 'OmniAuth',
:request_token_path => '/service/auth/request_token',
:site => 'http://www.douban.com'
}
end
def consumer
consumer = ::OAuth::Consumer.new(options.consumer_key, options.consumer_secret, options.client_options)
consumer
end
uid { access_token.params[:douban_user_id] }
# adapted from https://github.com/intridea/omniauth/blob/0-3-stable/oa-oauth/lib/omniauth/strategies/oauth/douban.rb#L38-53
info do
{
:nickname => raw_info['db:uid']['$t'],
:name => raw_info['title']['$t'],
:location => raw_info['location'] ? raw_info['location']['$t'] : nil,
:image => raw_info['link'].find{|l| l['@rel'] == 'icon'}['@href'],
:description => raw_info['content']['$t'],
:urls => {
'Douban' => raw_info['link'].find{|l| l['@rel'] == 'alternate'}['@href']
}
}
end
extra do
{ :raw_info => raw_info }
end
#taken from https://github.com/intridea/omniauth/blob/0-3-stable/oa-oauth/lib/omniauth/strategies/oauth/tsina.rb#L52-67
# def request_phase
# request_token = consumer.get_request_token(:oauth_callback => callback_url)
# session['oauth'] ||= {}
# session['oauth'][name.to_s] = {'callback_confirmed' => true, 'request_token' => request_token.token, 'request_secret' => request_token.secret}
#
# if request_token.callback_confirmed?
# redirect request_token.authorize_url(options[:authorize_params])
# else
# redirect request_token.authorize_url(options[:authorize_params].merge(:oauth_callback => callback_url))
# end
#
# rescue ::Timeout::Error => e
# fail!(:timeout, e)
# rescue ::Net::HTTPFatalError, ::OpenSSL::SSL::SSLError => e
# fail!(:service_unavailable, e)
# end
def raw_info
@raw_info ||= MultiJson.decode(access_token.get('http://api.douban.com/people/%40me?alt=json').body)
rescue ::Errno::ETIMEDOUT
raise ::Timeout::Error
end
end
end
end |
# lots of stuff taken from https://github.com/yzhang/omniauth/commit/eafc5ff8115bcc7d62c461d4774658979dd0a48e
require 'omniauth-oauth2'
module OmniAuth
module Strategies
class Kaixin < OmniAuth::Strategies::OAuth2
option :client_options, {
:site => 'https://api.kaixin001.com/',
:authorize_url => '/oauth2/authorize',
:token_url => '/oauth2/access_token',
:token_method => :get
}
def request_phase
super
end
uid { raw_info['id'] }
info do
{
'nickname' => raw_info['login'],
"uid"=>raw_info['login'],
"gender"=> (raw_info['gender'] == '0' ? 'Male' : 'Female'),
"image"=>raw_info['logo50']},
'name' => raw_info['name'],
'urls' => {
'Kaixin' => "http://www.kaixin001.com/"
}
}
end
def raw_info
@raw_info ||= MultiJson.decode(access_token.get("/users/me.json?access_token=#{@access_token.token}").body)
puts @raw_info.inspect
@raw_info
rescue ::Errno::ETIMEDOUT
raise ::Timeout::Error
end
end
end
end
# require 'omniauth/strategies/oauth2'
#
# module OmniAuth
# module Strategies
#
# # Authenticate to Kaixin001 utilizing OAuth 2.0 and retrieve
# # basic user information.
# #
# # OAuth 2.0 - Kaixin001 Documentation
# # http://wiki.open.kaixin001.com/
# #
# # Apply kaixin001 key here:
# # http://www.kaixin001.com/platform/rapp/rapp.php
# # adapted from https://github.com/yzhang/omniauth/commit/eafc5ff8115bcc7d62c461d4774658979dd0a48e
#
# class Kaixin < OmniAuth::Strategies::OAuth2
# def initialize(*args)
# super
# # taken from https://github.com/intridea/omniauth/blob/0-3-stable/oa-oauth/lib/omniauth/strategies/oauth/tqq.rb#L15-24
# puts options.inspect
# options.client_options = {
# :site => 'https://api.kaixin001.com/',
# :authorize_url => '/oauth2/authorize',
# :token_url => '/oauth2/access_token',
# :token_method => :get
# }
# end
#
# info do
# {
# :uid => raw_info['uid'],
# :user_info => raw_info['data']['name'],
# :location => raw_info['data']['location'],
# :image => raw_info['data']['head'],
# :description => raw_info['description'],
# :extra => {
# 'user_hash' => user_data,
# }
# }
# end
#
# extra do
# { :raw_info => raw_info }
# end
#
# def callback_phase
#
# if request.params['error'] || request.params['error_reason']
# raise CallbackError.new(request.params['error'], request.params['error_description'] || request.params['error_reason'], request.params['error_uri'])
# end
#
# self.access_token = build_access_token
# self.access_token = client.auth_code.refresh_token(access_token.refresh_token) if access_token.expired?
#
# super
# rescue ::OAuth2::Error, CallbackError => e
# fail!(:invalid_credentials, e)
# rescue ::MultiJson::DecodeError => e
# fail!(:invalid_response, e)
# rescue ::Timeout::Error, ::Errno::ETIMEDOUT => e
# fail!(:timeout, e)
# end
#
# credentials do
# prune!({
# 'expires' => access_token.expires?,
# 'expires_at' => access_token.expires_at
# })
# end
#
# def user_info
# {
# 'uid' => raw_info['uid'],
# 'name' => raw_info['name'],
# 'gender' => raw_info['gender'],
# }
# end
#
# def raw_info
# @raw_info ||= MultiJson.decode(access_token.get("/users/me.json?access_token=#{@access_token.token}").body)
# rescue ::Errno::ETIMEDOUT
# raise ::Timeout::Error
# end
# end
# end
# end
oops. forgot a comma
# lots of stuff taken from https://github.com/yzhang/omniauth/commit/eafc5ff8115bcc7d62c461d4774658979dd0a48e
require 'omniauth-oauth2'
module OmniAuth
module Strategies
class Kaixin < OmniAuth::Strategies::OAuth2
option :client_options, {
:site => 'https://api.kaixin001.com/',
:authorize_url => '/oauth2/authorize',
:token_url => '/oauth2/access_token',
:token_method => :get
}
def request_phase
super
end
uid { raw_info['id'] }
info do
{
'nickname' => raw_info['login'],
"uid"=>raw_info['login'],
"gender"=> (raw_info['gender'] == '0' ? 'Male' : 'Female'),
"image"=>raw_info['logo50'],
'name' => raw_info['name'],
'urls' => {
'Kaixin' => "http://www.kaixin001.com/"
}
}
end
def raw_info
@raw_info ||= MultiJson.decode(access_token.get("/users/me.json?access_token=#{@access_token.token}").body)
puts @raw_info.inspect
@raw_info
rescue ::Errno::ETIMEDOUT
raise ::Timeout::Error
end
end
end
end
# require 'omniauth/strategies/oauth2'
#
# module OmniAuth
# module Strategies
#
# # Authenticate to Kaixin001 utilizing OAuth 2.0 and retrieve
# # basic user information.
# #
# # OAuth 2.0 - Kaixin001 Documentation
# # http://wiki.open.kaixin001.com/
# #
# # Apply kaixin001 key here:
# # http://www.kaixin001.com/platform/rapp/rapp.php
# # adapted from https://github.com/yzhang/omniauth/commit/eafc5ff8115bcc7d62c461d4774658979dd0a48e
#
# class Kaixin < OmniAuth::Strategies::OAuth2
# def initialize(*args)
# super
# # taken from https://github.com/intridea/omniauth/blob/0-3-stable/oa-oauth/lib/omniauth/strategies/oauth/tqq.rb#L15-24
# puts options.inspect
# options.client_options = {
# :site => 'https://api.kaixin001.com/',
# :authorize_url => '/oauth2/authorize',
# :token_url => '/oauth2/access_token',
# :token_method => :get
# }
# end
#
# info do
# {
# :uid => raw_info['uid'],
# :user_info => raw_info['data']['name'],
# :location => raw_info['data']['location'],
# :image => raw_info['data']['head'],
# :description => raw_info['description'],
# :extra => {
# 'user_hash' => user_data,
# }
# }
# end
#
# extra do
# { :raw_info => raw_info }
# end
#
# def callback_phase
#
# if request.params['error'] || request.params['error_reason']
# raise CallbackError.new(request.params['error'], request.params['error_description'] || request.params['error_reason'], request.params['error_uri'])
# end
#
# self.access_token = build_access_token
# self.access_token = client.auth_code.refresh_token(access_token.refresh_token) if access_token.expired?
#
# super
# rescue ::OAuth2::Error, CallbackError => e
# fail!(:invalid_credentials, e)
# rescue ::MultiJson::DecodeError => e
# fail!(:invalid_response, e)
# rescue ::Timeout::Error, ::Errno::ETIMEDOUT => e
# fail!(:timeout, e)
# end
#
# credentials do
# prune!({
# 'expires' => access_token.expires?,
# 'expires_at' => access_token.expires_at
# })
# end
#
# def user_info
# {
# 'uid' => raw_info['uid'],
# 'name' => raw_info['name'],
# 'gender' => raw_info['gender'],
# }
# end
#
# def raw_info
# @raw_info ||= MultiJson.decode(access_token.get("/users/me.json?access_token=#{@access_token.token}").body)
# rescue ::Errno::ETIMEDOUT
# raise ::Timeout::Error
# end
# end
# end
# end
|
require 'omniauth-oauth'
require 'multi_json'
module OmniAuth
module Strategies
class Vatsim < OmniAuth::Strategies::OAuth
option :name, 'vatsim'
option :client_options, {
site: 'http://sso.hardern.net/server', # default to demo site
authorize_path: '/auth/pre_login/?',
request_token_path: '/api/login_token',
access_token_path: '/api/login_return',
}
uid do
parse_callback['id']
end
info do
{
id: parse_callback['id'],
name_first: parse_callback['name_first'],
name_last: parse_callback['name_last'],
rating: parse_callback['rating'],
pilot_rating: parse_callback['pilot_rating'],
email: parse_callback['email'],
experience: parse_callback['experience'],
reg_date: parse_callback['reg_date'],
country: parse_callback['country'],
region: parse_callback['region'],
division: parse_callback['division'],
subdivision: parse_callback['subdivision']
}
end
# Parse the callback for user information
def parse_callback
MultiJson.decode(self.extra['access_token'].params.keys[1])['user']
end
# Customize the OAuth request phase to handle VATSIM SSO
def request_phase
request_token = consumer.get_request_token({oauth_callback: callback_url}, options.request_params) do |response_body|
# Debug the response body
log :debug, response_body.inspect
# Log errors
if MultiJson.decode(response_body)['request']['result'] == 'fail'
log :error, MultiJson.decode(response_body)['request']['result']['message']
end
# symbolize string keys returned by VATSIM SSO
MultiJson.decode(response_body)['token'].symbolize_keys
end
session['oauth'] ||= {}
session['oauth'][name.to_s] = {
'callback_confirmed': request_token.callback_confirmed?,
'request_token': request_token.token,
'request_secret': request_token.secret
}
if request_token.callback_confirmed?
redirect request_token.authorize_url(options[:authorize_params])
else
redirect request_token.authorize_url(options[:authorize_params].merge(oauth_callback: callback_url))
end
rescue ::Timeout::Error => e
fail!(:timeout, e)
rescue ::Net::HTTPFatalError, ::OpenSSL::SSL::SSLError => e
fail!(:service_unavailable, e)
end # def request_phase
end # class Vatsim
end # module Strategies
end # module OmniAuth
Amends previous commit for the correct position of the failure message returned in the response body
require 'omniauth-oauth'
require 'multi_json'
module OmniAuth
module Strategies
class Vatsim < OmniAuth::Strategies::OAuth
option :name, 'vatsim'
option :client_options, {
site: 'http://sso.hardern.net/server', # default to demo site
authorize_path: '/auth/pre_login/?',
request_token_path: '/api/login_token',
access_token_path: '/api/login_return',
}
uid do
parse_callback['id']
end
info do
{
id: parse_callback['id'],
name_first: parse_callback['name_first'],
name_last: parse_callback['name_last'],
rating: parse_callback['rating'],
pilot_rating: parse_callback['pilot_rating'],
email: parse_callback['email'],
experience: parse_callback['experience'],
reg_date: parse_callback['reg_date'],
country: parse_callback['country'],
region: parse_callback['region'],
division: parse_callback['division'],
subdivision: parse_callback['subdivision']
}
end
# Parse the callback for user information
def parse_callback
MultiJson.decode(self.extra['access_token'].params.keys[1])['user']
end
# Customize the OAuth request phase to handle VATSIM SSO
def request_phase
request_token = consumer.get_request_token({oauth_callback: callback_url}, options.request_params) do |response_body|
# Debug the response body
log :debug, response_body.inspect
# Log errors
if MultiJson.decode(response_body)['request']['result'] == 'fail'
log :error, MultiJson.decode(response_body)['request']['message']
end
# symbolize string keys returned by VATSIM SSO
MultiJson.decode(response_body)['token'].symbolize_keys
end
session['oauth'] ||= {}
session['oauth'][name.to_s] = {
'callback_confirmed': request_token.callback_confirmed?,
'request_token': request_token.token,
'request_secret': request_token.secret
}
if request_token.callback_confirmed?
redirect request_token.authorize_url(options[:authorize_params])
else
redirect request_token.authorize_url(options[:authorize_params].merge(oauth_callback: callback_url))
end
rescue ::Timeout::Error => e
fail!(:timeout, e)
rescue ::Net::HTTPFatalError, ::OpenSSL::SSL::SSLError => e
fail!(:service_unavailable, e)
end # def request_phase
end # class Vatsim
end # module Strategies
end # module OmniAuth
|
module PageObject
module PagePopulator
#
# This method will populate all matched page TextFields,
# TextAreas, SelectLists, FileFields, Checkboxes, and Radio Buttons from the
# Hash passed as an argument. The way it find an element is by
# matching the Hash key to the name you provided when declaring
# the element on your page.
#
# Checkbox and Radio Button values must be true or false.
#
# @example
# class ExamplePage
# include PageObject
#
# text_field(:username, :id => 'username_id')
# checkbox(:active, :id => 'active_id')
# end
#
# ...
#
# @browser = Watir::Browser.new :firefox
# example_page = ExamplePage.new(@browser)
# example_page.populate_page_with :username => 'a name', :active => true
#
# @param data [Hash] the data to use to populate this page. The key
# can be either a string or a symbol. The value must be a string
# for TextField, TextArea, SelectList, and FileField and must be true or
# false for a Checkbox or RadioButton.
#
def populate_page_with(data)
data.each do |key, value|
populate_checkbox(key, value) if is_checkbox?(key) and is_enabled?(key)
populate_radiobuttongroup(key, value) if is_radiobuttongroup?(key)
populate_radiobutton(key, value) if is_radiobutton?(key) and is_enabled?(key)
populate_select_list(key, value) if is_select_list?(key)
populate_text(key, value) if is_text?(key) and is_enabled?(key)
end
end
private
def populate_text(key, value)
self.send "#{key}=", value
end
def populate_checkbox(key, value)
return self.send "check_#{key}" if value
return self.send "uncheck_#{key}"
end
def populate_radiobutton(key, value)
return self.send "select_#{key}" if value
end
def populate_radiobuttongroup(key, value)
return self.send("select_#{key}", value)
end
def populate_select_list(key, value)
self.send "#{key}=", value
end
def is_text?(key)
return false if is_select_list?(key)
respond_to?("#{key}=".to_sym)
end
def is_checkbox?(key)
respond_to?("check_#{key}".to_sym)
end
def is_radiobutton?(key)
respond_to?("select_#{key}".to_sym)
end
def is_radiobuttongroup?(key)
respond_to?("select_#{key}".to_sym) and respond_to?("#{key}_values")
end
def is_select_list?(key)
respond_to?("#{key}_options".to_sym)
end
def is_enabled?(key)
return false if is_radiobuttongroup?(key)
return true if (self.send "#{key}_element").tag_name == "textarea"
element = self.send("#{key}_element")
element.enabled? and element.visible?
end
end
end
attempt to improve page populator
module PageObject
module PagePopulator
#
# This method will populate all matched page TextFields,
# TextAreas, SelectLists, FileFields, Checkboxes, and Radio Buttons from the
# Hash passed as an argument. The way it find an element is by
# matching the Hash key to the name you provided when declaring
# the element on your page.
#
# Checkbox and Radio Button values must be true or false.
#
# @example
# class ExamplePage
# include PageObject
#
# text_field(:username, :id => 'username_id')
# checkbox(:active, :id => 'active_id')
# end
#
# ...
#
# @browser = Watir::Browser.new :firefox
# example_page = ExamplePage.new(@browser)
# example_page.populate_page_with :username => 'a name', :active => true
#
# @param data [Hash] the data to use to populate this page. The key
# can be either a string or a symbol. The value must be a string
# for TextField, TextArea, SelectList, and FileField and must be true or
# false for a Checkbox or RadioButton.
#
def populate_page_with(data)
data.each do |key, value|
debugger if $VERBOSE
element = self.send("#{key}_element")
case element
when PageObject::Elements::TextArea, PageObject::Elements::TextField
populate_text(key, value) if is_enabled?(key)
when PageObject::Elements::RadioButton
populate_radiobutton(key, value) if is_enabled?(key)
when PageObject::Elements::CheckBox
populate_checkbox(key, value) if is_enabled?(key)
when PageObject::Elements::SelectList
populate_select_list(key, value)
when Array
populate_radiobuttongroup(key, value) if is_radiobuttongroup?(key)
else
puts "FIX ME: #{element}, class #{element.class}"
end
end
end
private
def populate_text(key, value)
self.send "#{key}=", value
end
def populate_checkbox(key, value)
return self.send "check_#{key}" if value
return self.send "uncheck_#{key}"
end
def populate_radiobutton(key, value)
return self.send "select_#{key}" if value
end
def populate_radiobuttongroup(key, value)
return self.send("select_#{key}", value)
end
def populate_select_list(key, value)
self.send "#{key}=", value
end
def is_radiobuttongroup?(key)
respond_to?("#{key}_values")
end
def is_enabled?(key)
return false if is_radiobuttongroup?(key)
return true if (self.send "#{key}_element").tag_name == "textarea"
element = self.send("#{key}_element")
element.enabled? and element.visible?
end
end
end
|
# AcrossLite is a file format used by the New York Times to distribute crosswords.
#
# Binary format: http://code.google.com/p/puz/
# Text format: http://www.litsoft.com/across/docs/AcrossTextFormat.pdf
#
# provides:
# AcrossLiteBinary : read, write
# AcrossLiteText : read, write
require 'ostruct'
module Pangrid
GRID_CHARS = {:black => '.', :null => '.'}
# CRC checksum for binary format
class Checksum
attr_accessor :sum
def self.of_string s
c = self.new(0)
c.add_string s
c.sum
end
def initialize(seed)
@sum = seed
end
def add_char(b)
low = sum & 0x0001
@sum = sum >> 1
@sum = sum | 0x8000 if low == 1
@sum = (sum + b) & 0xffff
end
def add_string(s)
s.bytes.map {|b| add_char b}
end
def add_string_0(s)
add_string (s + "\0") unless s.empty?
end
end
module AcrossLiteUtils
# String -> Cell[][]
def unpack_solution(xw, s)
s.each_char.map {|c|
Cell.new(:solution => c == '.' ? :black : c)
}.each_slice(xw.width).to_a
end
# {xw | solution = Cell[][]} -> String
def pack_solution(xw)
# acrosslite doesn't support non-rectangular grids, so map null squares to
# black too
xw.to_array(GRID_CHARS).map(&:join).join
end
# {xw | solution = Cell[][]} -> String
def empty_fill(xw)
# when converting from another format -> binary we won't typically have fill
# information, since that is an internal property of the acrosslite player
grid = xw.to_array(GRID_CHARS) {|c| '-'}
grid.map(&:join).join
end
end
# Binary format
class AcrossLiteBinary < Plugin
include AcrossLiteUtils
# crossword, checksums
attr_accessor :xw, :cs
HEADER_FORMAT = "v A12 v V2 A4 v2 A12 c2 v3"
HEADER_CHECKSUM_FORMAT = "c2 v3"
EXT_HEADER_FORMAT = "A4 v2"
EXTENSIONS = %w(LTIM GRBS RTBL GEXT)
FILE_MAGIC = "ACROSS&DOWN\0"
def initialize
@xw = XWord.new
@cs = OpenStruct.new
@xw.extensions = []
end
def read(data)
s = data.force_encoding("ISO-8859-1")
i = s.index(FILE_MAGIC)
check("Could not recognise AcrossLite binary file") { i }
# read the header
h_start, h_end = i - 2, i - 2 + 0x34
header = s[h_start .. h_end]
cs.global, _, cs.cib, cs.masked_low, cs.masked_high,
xw.version, _, cs.scrambled, _,
xw.width, xw.height, xw.n_clues, xw.puzzle_type, xw.scrambled_state =
header.unpack(HEADER_FORMAT)
# solution and fill = blocks of w*h bytes each
size = xw.width * xw.height
xw.solution = unpack_solution xw, s[h_end, size]
xw.fill = s[h_end + size, size]
s = s[h_end + 2 * size .. -1]
# title, author, copyright, clues * n, notes = zero-terminated strings
xw.title, xw.author, xw.copyright, *xw.clues, xw.notes, s =
s.split("\0", xw.n_clues + 5)
# extensions: 8-byte header + len bytes data + \0
while (s.length > 8) do
e = OpenStruct.new
e.section, e.len, e.checksum = s.unpack(EXT_HEADER_FORMAT)
check("Unrecognised extension #{e.section}") { EXTENSIONS.include? e.section }
size = 8 + e.len + 1
break if s.length < size
e.data = s[8 ... size]
self.send(:"read_#{e.section.downcase}", e)
xw.extensions << e
s = s[size .. -1]
end
# verify checksums
check("Failed checksum") { checksums == cs }
process_extensions
unpack_clues
xw
end
def write(xw)
@xw = xw
# fill in some fields that might not be present (checksums needs this)
pack_clues
xw.n_clues = xw.clues.length
xw.fill ||= empty_fill(xw)
xw.puzzle_type ||= 1
xw.scrambled_state ||= 0
xw.version = "1.3"
xw.notes ||= ""
xw.extensions ||= []
# extensions
xw.encode_rebus!
if not xw.rebus.empty?
# GRBS
e = OpenStruct.new
e.section = "GRBS"
e.grid = xw.to_array({:black => 0, :null => 0}) {|s|
s.rebus? ? s.solution.symbol.to_i : 0
}.flatten
xw.extensions << e
# RTBL
e = OpenStruct.new
e.section = "RTBL"
e.rebus = {}
xw.rebus.each do |long, (k, short)|
e.rebus[k] = [long, short]
end
xw.extensions << e
end
# calculate checksums
@cs = checksums
h = [cs.global, FILE_MAGIC, cs.cib, cs.masked_low, cs.masked_high,
xw.version + "\0", 0, cs.scrambled, "\0" * 12,
xw.width, xw.height, xw.n_clues, xw.puzzle_type, xw.scrambled_state]
header = h.pack(HEADER_FORMAT)
strings = [xw.title, xw.author, xw.copyright] + xw.clues + [xw.notes]
strings = strings.map {|x| x + "\0"}.join
[header, pack_solution(xw), xw.fill, strings, write_extensions].map {|x|
x.force_encoding("ISO-8859-1")
}.join
end
private
# sort incoming clues in xw.clues -> across and down
def unpack_clues
across, down = xw.number
clues = across.map {|x| [x, :a]} + down.map {|x| [x, :d]}
clues.sort!
xw.across_clues = []
xw.down_clues = []
clues.zip(xw.clues).each do |(n, dir), clue|
if dir == :a
xw.across_clues << clue
else
xw.down_clues << clue
end
end
end
# combine across and down clues -> xw.clues
def pack_clues
across, down = xw.number
clues = across.map {|x| [x, :a]} + down.map {|x| [x, :d]}
clues.sort!
ac, dn = xw.across_clues.dup, xw.down_clues.dup
xw.clues = []
clues.each do |n, dir|
if dir == :a
xw.clues << ac.shift
else
xw.clues << dn.shift
end
end
check("Extra across clue") { ac.empty? }
check("Extra down clue") { dn.empty? }
end
def get_extension(s)
return nil unless xw.extensions
xw.extensions.find {|e| e.section == s}
end
def process_extensions
# record these for file inspection, though they're unlikely to be useful
if (ltim = get_extension("LTIM"))
xw.time_elapsed = ltim.elapsed
xw.paused
end
# we need both grbs and rtbl
grbs, rtbl = get_extension("GRBS"), get_extension("RTBL")
if grbs and rtbl
grbs.grid.each_with_index do |n, i|
if n > 0 and (v = rtbl.rebus[n])
x, y = i % xw.width, i / xw.width
cell = xw.solution[y][x]
cell.solution = Rebus.new(v[0])
end
end
end
end
def read_ltim(e)
m = e.data.match /^(\d+),(\d+)\0$/
check("Could not read extension LTIM") { m }
e.elapsed = m[1].to_i
e.stopped = m[2] == "1"
end
def write_ltim(e)
e.elapsed.to_s + "," + (e.stopped ? "1" : "0") + "\0"
end
def read_rtbl(e)
rx = /(([\d ]\d):(\w+);)/
m = e.data.match /^#{rx}*\0$/
check("Could not read extension RTBL") { m }
e.rebus = {}
e.data.scan(rx).each {|_, k, v|
e.rebus[k.to_i] = [v, '-']
}
end
def write_rtbl(e)
e.rebus.keys.sort.map {|x|
x.to_s.rjust(2) + ":" + e.rebus[x][0] + ";"
}.join
end
def read_gext(e)
e.grid = e.data.bytes
end
def write_gext(e)
e.grid.map(&:chr).join
end
def read_grbs(e)
e.grid = e.data.bytes.map {|b| b == 0 ? 0 : b - 1 }
end
def write_grbs(e)
e.grid.map {|x| x == 0 ? 0 : x + 1}.map(&:chr).join
end
def write_extensions
xw.extensions.map {|e|
e.data = self.send(:"write_#{e.section.downcase}", e)
e.len = e.data.length
e.data += "\0"
e.checksum = Checksum.of_string(e.data)
[e.section, e.len, e.checksum].pack(EXT_HEADER_FORMAT) +
e.data
}.join
end
# checksums
def text_checksum(seed)
c = Checksum.new(seed)
c.add_string_0 xw.title
c.add_string_0 xw.author
c.add_string_0 xw.copyright
xw.clues.each {|cl| c.add_string cl}
if (xw.version == '1.3')
c.add_string_0 xw.notes
end
c.sum
end
def header_checksum
h = [xw.width, xw.height, xw.n_clues, xw.puzzle_type, xw.scrambled_state]
Checksum.of_string h.pack(HEADER_CHECKSUM_FORMAT)
end
def global_checksum
c = Checksum.new header_checksum
c.add_string pack_solution(xw)
c.add_string xw.fill
text_checksum c.sum
end
def magic_checksums
mask = "ICHEATED".bytes
sums = [
text_checksum(0),
Checksum.of_string(xw.fill),
Checksum.of_string(pack_solution(xw)),
header_checksum
]
l, h = 0, 0
sums.each_with_index do |sum, i|
l = (l << 8) | (mask[3 - i] ^ (sum & 0xff))
h = (h << 8) | (mask[7 - i] ^ (sum >> 8))
end
[l, h]
end
def checksums
c = OpenStruct.new
c.masked_low, c.masked_high = magic_checksums
c.cib = header_checksum
c.global = global_checksum
c.scrambled = 0
c
end
end
# Text format
class AcrossLiteText < Plugin
include AcrossLiteUtils
attr_accessor :xw, :rebus
def initialize
@xw = XWord.new
end
def read(data)
s = data.each_line.map(&:strip)
# first line must be <ACROSS PUZZLE> or <ACROSS PUZZLE V2>
xw.version = { "<ACROSS PUZZLE>" => 1, "<ACROSS PUZZLE V2>" => 2 }[s.shift]
check("Could not recognise Across Lite text file") { !xw.version.nil? }
header, section = "START", []
s.each do |line|
if line =~ /^<(.*)>/
process_section header, section
header = $1
section = []
else
section << line
end
end
process_section header, section
xw
end
def write(xw)
@xw = xw
# scan the grid for rebus squares and replace them with lookup keys
xw.encode_rebus!
sections = [
['TITLE', [xw.title]],
['AUTHOR', [xw.author]],
['COPYRIGHT', [xw.copyright]],
['SIZE', ["#{xw.height}x#{xw.width}"]],
['GRID', write_grid],
['REBUS', write_rebus],
['ACROSS', xw.across_clues],
['DOWN', xw.down_clues],
['NOTEPAD', xw.notes.to_s.split("\n")]
]
out = ["<ACROSS PUZZLE V2>"]
sections.each do |h, s|
next if s.nil? || s.empty?
out << "<#{h}>"
s.each {|l| out << " #{l}"}
end
out.join("\n") + "\n"
end
private
def process_section(header, section)
case header
when "START"
return
when "TITLE", "AUTHOR", "COPYRIGHT"
check { section.length == 1 }
xw[header.downcase] = section[0]
when "NOTEPAD"
xw.notes = section.join("\n")
when "SIZE"
check { section.length == 1 && section[0] =~ /^\d+x\d+/ }
xw.height, xw.width = section[0].split('x').map(&:to_i)
when "GRID"
check { xw.width && xw.height }
check { section.length == xw.height }
check { section.all? {|line| line.length == xw.width } }
xw.solution = unpack_solution xw, section.join
when "REBUS"
check { section.length > 0 }
check("Text format v1 does not support <REBUS>") {xw.version == 2}
# flag list (currently MARK or nothing)
xw.mark = section[0] == "MARK;"
section.shift if xw.mark
section.each do |line|
check { line =~ /^.+:.+:.$/ }
sym, long, short = line.split(':')
xw.each_cell do |c|
if c.solution == sym
c.solution = Rebus.new(long, short)
end
end
end
xw.encode_rebus!
when "ACROSS"
xw.across_clues = section
when "DOWN"
xw.down_clues = section
else
raise PuzzleFormatError, "Unrecognised header #{header}"
end
end
def write_grid
xw.to_array(GRID_CHARS).map(&:join)
end
def write_rebus
out = []
out << "MARK;" if xw.mark
xw.rebus.keys.sort.each do |long|
key, short = xw.rebus[long]
out << "#{key}:#{long}:#{short}"
end
out
end
end
end # module Pangrid
better acrosslite output support for empty grids and missing clues
# AcrossLite is a file format used by the New York Times to distribute crosswords.
#
# Binary format: http://code.google.com/p/puz/
# Text format: http://www.litsoft.com/across/docs/AcrossTextFormat.pdf
#
# provides:
# AcrossLiteBinary : read, write
# AcrossLiteText : read, write
require 'ostruct'
module Pangrid
GRID_CHARS = {:black => '.', :null => '?'}
FILL_CHARS = {:black => '.', :null => '-'}
# CRC checksum for binary format
class Checksum
attr_accessor :sum
def self.of_string s
c = self.new(0)
c.add_string s
c.sum
end
def initialize(seed)
@sum = seed
end
def add_char(b)
low = sum & 0x0001
@sum = sum >> 1
@sum = sum | 0x8000 if low == 1
@sum = (sum + b) & 0xffff
end
def add_string(s)
s.bytes.map {|b| add_char b}
end
def add_string_0(s)
add_string (s + "\0") unless s.empty?
end
end
module AcrossLiteUtils
# String -> Cell[][]
def unpack_solution(xw, s)
s.each_char.map {|c|
Cell.new(:solution => c == '.' ? :black : c)
}.each_slice(xw.width).to_a
end
# {xw | solution = Cell[][]} -> String
def pack_solution(xw)
# acrosslite doesn't support non-rectangular grids, so map null squares to
# black too
xw.to_array(GRID_CHARS).map(&:join).join
end
# {xw | solution = Cell[][]} -> String
def empty_fill(xw)
# when converting from another format -> binary we won't typically have fill
# information, since that is an internal property of the acrosslite player
grid = xw.to_array(FILL_CHARS) {|c| '-'}
grid.map(&:join).join
end
end
# Binary format
class AcrossLiteBinary < Plugin
include AcrossLiteUtils
# crossword, checksums
attr_accessor :xw, :cs
HEADER_FORMAT = "v A12 v V2 A4 v2 A12 c2 v3"
HEADER_CHECKSUM_FORMAT = "c2 v3"
EXT_HEADER_FORMAT = "A4 v2"
EXTENSIONS = %w(LTIM GRBS RTBL GEXT)
FILE_MAGIC = "ACROSS&DOWN\0"
def initialize
@xw = XWord.new
@cs = OpenStruct.new
@xw.extensions = []
end
def read(data)
s = data.force_encoding("ISO-8859-1")
i = s.index(FILE_MAGIC)
check("Could not recognise AcrossLite binary file") { i }
# read the header
h_start, h_end = i - 2, i - 2 + 0x34
header = s[h_start .. h_end]
cs.global, _, cs.cib, cs.masked_low, cs.masked_high,
xw.version, _, cs.scrambled, _,
xw.width, xw.height, xw.n_clues, xw.puzzle_type, xw.scrambled_state =
header.unpack(HEADER_FORMAT)
# solution and fill = blocks of w*h bytes each
size = xw.width * xw.height
xw.solution = unpack_solution xw, s[h_end, size]
xw.fill = s[h_end + size, size]
s = s[h_end + 2 * size .. -1]
# title, author, copyright, clues * n, notes = zero-terminated strings
xw.title, xw.author, xw.copyright, *xw.clues, xw.notes, s =
s.split("\0", xw.n_clues + 5)
# extensions: 8-byte header + len bytes data + \0
while (s.length > 8) do
e = OpenStruct.new
e.section, e.len, e.checksum = s.unpack(EXT_HEADER_FORMAT)
check("Unrecognised extension #{e.section}") { EXTENSIONS.include? e.section }
size = 8 + e.len + 1
break if s.length < size
e.data = s[8 ... size]
self.send(:"read_#{e.section.downcase}", e)
xw.extensions << e
s = s[size .. -1]
end
# verify checksums
check("Failed checksum") { checksums == cs }
process_extensions
unpack_clues
xw
end
def write(xw)
@xw = xw
# fill in some fields that might not be present (checksums needs this)
pack_clues
xw.clues = xw.clues.map(&:to_s)
xw.n_clues = xw.clues.length
xw.fill ||= empty_fill(xw)
xw.puzzle_type ||= 1
xw.scrambled_state ||= 0
xw.version = "1.3"
xw.notes ||= ""
xw.extensions ||= []
xw.title ||= ""
xw.author ||= ""
xw.copyright ||= ""
# extensions
xw.encode_rebus!
if not xw.rebus.empty?
# GRBS
e = OpenStruct.new
e.section = "GRBS"
e.grid = xw.to_array({:black => 0, :null => 0}) {|s|
s.rebus? ? s.solution.symbol.to_i : 0
}.flatten
xw.extensions << e
# RTBL
e = OpenStruct.new
e.section = "RTBL"
e.rebus = {}
xw.rebus.each do |long, (k, short)|
e.rebus[k] = [long, short]
end
xw.extensions << e
end
# calculate checksums
@cs = checksums
h = [cs.global, FILE_MAGIC, cs.cib, cs.masked_low, cs.masked_high,
xw.version + "\0", 0, cs.scrambled, "\0" * 12,
xw.width, xw.height, xw.n_clues, xw.puzzle_type, xw.scrambled_state]
header = h.pack(HEADER_FORMAT)
strings = [xw.title, xw.author, xw.copyright] + xw.clues + [xw.notes]
strings = strings.map {|x| x + "\0"}.join
[header, pack_solution(xw), xw.fill, strings, write_extensions].map {|x|
x.force_encoding("ISO-8859-1")
}.join
end
private
# sort incoming clues in xw.clues -> across and down
def unpack_clues
across, down = xw.number
clues = across.map {|x| [x, :a]} + down.map {|x| [x, :d]}
clues.sort!
xw.across_clues = []
xw.down_clues = []
clues.zip(xw.clues).each do |(n, dir), clue|
if dir == :a
xw.across_clues << clue
else
xw.down_clues << clue
end
end
end
# combine across and down clues -> xw.clues
def pack_clues
across, down = xw.number
clues = across.map {|x| [x, :a]} + down.map {|x| [x, :d]}
clues.sort!
ac, dn = xw.across_clues.dup, xw.down_clues.dup
xw.clues = []
clues.each do |n, dir|
if dir == :a
xw.clues << ac.shift
else
xw.clues << dn.shift
end
end
check("Extra across clue") { ac.empty? }
check("Extra down clue") { dn.empty? }
end
def get_extension(s)
return nil unless xw.extensions
xw.extensions.find {|e| e.section == s}
end
def process_extensions
# record these for file inspection, though they're unlikely to be useful
if (ltim = get_extension("LTIM"))
xw.time_elapsed = ltim.elapsed
xw.paused
end
# we need both grbs and rtbl
grbs, rtbl = get_extension("GRBS"), get_extension("RTBL")
if grbs and rtbl
grbs.grid.each_with_index do |n, i|
if n > 0 and (v = rtbl.rebus[n])
x, y = i % xw.width, i / xw.width
cell = xw.solution[y][x]
cell.solution = Rebus.new(v[0])
end
end
end
end
def read_ltim(e)
m = e.data.match /^(\d+),(\d+)\0$/
check("Could not read extension LTIM") { m }
e.elapsed = m[1].to_i
e.stopped = m[2] == "1"
end
def write_ltim(e)
e.elapsed.to_s + "," + (e.stopped ? "1" : "0") + "\0"
end
def read_rtbl(e)
rx = /(([\d ]\d):(\w+);)/
m = e.data.match /^#{rx}*\0$/
check("Could not read extension RTBL") { m }
e.rebus = {}
e.data.scan(rx).each {|_, k, v|
e.rebus[k.to_i] = [v, '-']
}
end
def write_rtbl(e)
e.rebus.keys.sort.map {|x|
x.to_s.rjust(2) + ":" + e.rebus[x][0] + ";"
}.join
end
def read_gext(e)
e.grid = e.data.bytes
end
def write_gext(e)
e.grid.map(&:chr).join
end
def read_grbs(e)
e.grid = e.data.bytes.map {|b| b == 0 ? 0 : b - 1 }
end
def write_grbs(e)
e.grid.map {|x| x == 0 ? 0 : x + 1}.map(&:chr).join
end
def write_extensions
xw.extensions.map {|e|
e.data = self.send(:"write_#{e.section.downcase}", e)
e.len = e.data.length
e.data += "\0"
e.checksum = Checksum.of_string(e.data)
[e.section, e.len, e.checksum].pack(EXT_HEADER_FORMAT) +
e.data
}.join
end
# checksums
def text_checksum(seed)
c = Checksum.new(seed)
c.add_string_0 xw.title
c.add_string_0 xw.author
c.add_string_0 xw.copyright
xw.clues.each {|cl| c.add_string cl}
if (xw.version == '1.3')
c.add_string_0 xw.notes
end
c.sum
end
def header_checksum
h = [xw.width, xw.height, xw.n_clues, xw.puzzle_type, xw.scrambled_state]
Checksum.of_string h.pack(HEADER_CHECKSUM_FORMAT)
end
def global_checksum
c = Checksum.new header_checksum
c.add_string pack_solution(xw)
c.add_string xw.fill
text_checksum c.sum
end
def magic_checksums
mask = "ICHEATED".bytes
sums = [
text_checksum(0),
Checksum.of_string(xw.fill),
Checksum.of_string(pack_solution(xw)),
header_checksum
]
l, h = 0, 0
sums.each_with_index do |sum, i|
l = (l << 8) | (mask[3 - i] ^ (sum & 0xff))
h = (h << 8) | (mask[7 - i] ^ (sum >> 8))
end
[l, h]
end
def checksums
c = OpenStruct.new
c.masked_low, c.masked_high = magic_checksums
c.cib = header_checksum
c.global = global_checksum
c.scrambled = 0
c
end
end
# Text format
class AcrossLiteText < Plugin
include AcrossLiteUtils
attr_accessor :xw, :rebus
def initialize
@xw = XWord.new
end
def read(data)
s = data.each_line.map(&:strip)
# first line must be <ACROSS PUZZLE> or <ACROSS PUZZLE V2>
xw.version = { "<ACROSS PUZZLE>" => 1, "<ACROSS PUZZLE V2>" => 2 }[s.shift]
check("Could not recognise Across Lite text file") { !xw.version.nil? }
header, section = "START", []
s.each do |line|
if line =~ /^<(.*)>/
process_section header, section
header = $1
section = []
else
section << line
end
end
process_section header, section
xw
end
def write(xw)
@xw = xw
# scan the grid for rebus squares and replace them with lookup keys
xw.encode_rebus!
# fill in dummy clues if none exist
across, down = xw.number
if xw.across_clues.empty?
xw.across_clues = ["(no clue)"]*across.length
end
if xw.down_clues.empty?
xw.down_clues = ["(no clue)"]*down.length
end
sections = [
['TITLE', [xw.title]],
['AUTHOR', [xw.author]],
['COPYRIGHT', [xw.copyright]],
['SIZE', ["#{xw.height}x#{xw.width}"]],
['GRID', write_grid],
['REBUS', write_rebus],
['ACROSS', xw.across_clues],
['DOWN', xw.down_clues],
['NOTEPAD', xw.notes.to_s.split("\n")]
]
out = ["<ACROSS PUZZLE V2>"]
sections.each do |h, s|
next if s.nil? || s.empty?
out << "<#{h}>"
s.each {|l| out << " #{l}"}
end
out.join("\n") + "\n"
end
private
def process_section(header, section)
case header
when "START"
return
when "TITLE", "AUTHOR", "COPYRIGHT"
check { section.length == 1 }
xw[header.downcase] = section[0]
when "NOTEPAD"
xw.notes = section.join("\n")
when "SIZE"
check { section.length == 1 && section[0] =~ /^\d+x\d+/ }
xw.height, xw.width = section[0].split('x').map(&:to_i)
when "GRID"
check { xw.width && xw.height }
check { section.length == xw.height }
check { section.all? {|line| line.length == xw.width } }
xw.solution = unpack_solution xw, section.join
when "REBUS"
check { section.length > 0 }
check("Text format v1 does not support <REBUS>") {xw.version == 2}
# flag list (currently MARK or nothing)
xw.mark = section[0] == "MARK;"
section.shift if xw.mark
section.each do |line|
check { line =~ /^.+:.+:.$/ }
sym, long, short = line.split(':')
xw.each_cell do |c|
if c.solution == sym
c.solution = Rebus.new(long, short)
end
end
end
xw.encode_rebus!
when "ACROSS"
xw.across_clues = section
when "DOWN"
xw.down_clues = section
else
raise PuzzleFormatError, "Unrecognised header #{header}"
end
end
def write_grid
xw.to_array(GRID_CHARS).map(&:join)
end
def write_rebus
out = []
out << "MARK;" if xw.mark
xw.rebus.keys.sort.each do |long|
key, short = xw.rebus[long]
out << "#{key}:#{long}:#{short}"
end
out
end
end
end # module Pangrid
|
module PgHero
module Methods
module Maintenance
# http://www.postgresql.org/docs/9.1/static/routine-vacuuming.html#VACUUM-FOR-WRAPAROUND
# "the system will shut down and refuse to start any new transactions
# once there are fewer than 1 million transactions left until wraparound"
# warn when 10,000,000 transactions left
def transaction_id_danger(threshold: 10000000, max_value: 2146483648)
max_value = max_value.to_i
threshold = threshold.to_i
select_all <<-SQL
SELECT
n.nspname AS schema,
c.relname AS table,
#{quote(max_value)} - GREATEST(AGE(c.relfrozenxid), AGE(t.relfrozenxid)) AS transactions_left
FROM
pg_class c
INNER JOIN
pg_catalog.pg_namespace n ON n.oid = c.relnamespace
LEFT JOIN
pg_class t ON c.reltoastrelid = t.oid
WHERE
c.relkind = 'r'
AND (#{quote(max_value)} - GREATEST(AGE(c.relfrozenxid), AGE(t.relfrozenxid))) < #{quote(threshold)}
ORDER BY
2, 1
SQL
end
def autovacuum_danger
max_value = select_one("SHOW autovacuum_freeze_max_age").to_i
transaction_id_danger(threshold: 2000000, max_value: max_value)
end
def maintenance_info
select_all <<-SQL
SELECT
schemaname AS schema,
relname AS table,
last_vacuum,
last_autovacuum,
last_analyze,
last_autoanalyze
FROM
pg_stat_user_tables
ORDER BY
1, 2
SQL
end
def analyze(table, verbose: false)
execute "ANALYZE #{verbose ? "VERBOSE " : ""}#{quote_table_name(table)}"
true
end
def analyze_tables(verbose: false, min_size: nil, tables: nil)
tables = table_stats(table: tables).reject { |s| %w(information_schema pg_catalog).include?(s[:schema]) }
tables = tables.select { |s| s[:size_bytes] > min_size } if min_size
tables.map { |s| s.slice(:schema, :table) }.each do |stats|
begin
with_transaction(lock_timeout: 5000, statement_timeout: 120000) do
analyze "#{stats[:schema]}.#{stats[:table]}", verbose: verbose
end
success = true
rescue ActiveRecord::StatementInvalid => e
$stderr.puts e.message
success = false
end
stats[:success] = success
end
end
end
end
end
Fixed transaction id danger sorting [skip ci]
module PgHero
module Methods
module Maintenance
# http://www.postgresql.org/docs/9.1/static/routine-vacuuming.html#VACUUM-FOR-WRAPAROUND
# "the system will shut down and refuse to start any new transactions
# once there are fewer than 1 million transactions left until wraparound"
# warn when 10,000,000 transactions left
def transaction_id_danger(threshold: 10000000, max_value: 2146483648)
max_value = max_value.to_i
threshold = threshold.to_i
select_all <<-SQL
SELECT
n.nspname AS schema,
c.relname AS table,
#{quote(max_value)} - GREATEST(AGE(c.relfrozenxid), AGE(t.relfrozenxid)) AS transactions_left
FROM
pg_class c
INNER JOIN
pg_catalog.pg_namespace n ON n.oid = c.relnamespace
LEFT JOIN
pg_class t ON c.reltoastrelid = t.oid
WHERE
c.relkind = 'r'
AND (#{quote(max_value)} - GREATEST(AGE(c.relfrozenxid), AGE(t.relfrozenxid))) < #{quote(threshold)}
ORDER BY
3, 1, 2
SQL
end
def autovacuum_danger
max_value = select_one("SHOW autovacuum_freeze_max_age").to_i
transaction_id_danger(threshold: 2000000, max_value: max_value)
end
def maintenance_info
select_all <<-SQL
SELECT
schemaname AS schema,
relname AS table,
last_vacuum,
last_autovacuum,
last_analyze,
last_autoanalyze
FROM
pg_stat_user_tables
ORDER BY
1, 2
SQL
end
def analyze(table, verbose: false)
execute "ANALYZE #{verbose ? "VERBOSE " : ""}#{quote_table_name(table)}"
true
end
def analyze_tables(verbose: false, min_size: nil, tables: nil)
tables = table_stats(table: tables).reject { |s| %w(information_schema pg_catalog).include?(s[:schema]) }
tables = tables.select { |s| s[:size_bytes] > min_size } if min_size
tables.map { |s| s.slice(:schema, :table) }.each do |stats|
begin
with_transaction(lock_timeout: 5000, statement_timeout: 120000) do
analyze "#{stats[:schema]}.#{stats[:table]}", verbose: verbose
end
success = true
rescue ActiveRecord::StatementInvalid => e
$stderr.puts e.message
success = false
end
stats[:success] = success
end
end
end
end
end
|
module Phantomjs
module Binaries
PHANTOM_VERSION = "1.9.2"
VERSION = PHANTOM_VERSION + ".4"
end
end
bump version
module Phantomjs
module Binaries
PHANTOM_VERSION = "1.9.8"
VERSION = PHANTOM_VERSION + ".0"
end
end
|
module Physiqual
module Imputers
class Imputer
include ActiveSupport::Callbacks
define_callbacks :process_impute
def process_impute(_array)
fail 'Subclass does not implement process_impute! method.'
end
def self.impute!(*args)
new.send(:impute!, *args)
end
private
def impute!(array)
# Return an array of nils if there are not at least two values that do not need imputation
nr_values_to_be_imputed = array.count { |elem| need_imputation?(elem) }
return Array.new(array.size, nil) if nr_values_to_be_imputed + 1 >= array.size
# Return if no nils or -1's
return array unless array.any? { |elem| need_imputation?(elem) }
# Return array if the array contains a string
return array if array.any? { |elem| elem.is_a? String }
impute_callback array: array
end
def impute_callback(array:)
run_callbacks :process_impute do
process_impute array
end
end
protected
def need_imputation?(value)
[nil, -1].include? value
end
end
end
end
also added the case where there is just one value that needs imputation
module Physiqual
module Imputers
class Imputer
include ActiveSupport::Callbacks
define_callbacks :process_impute
def process_impute(_array)
fail 'Subclass does not implement process_impute! method.'
end
def self.impute!(*args)
new.send(:impute!, *args)
end
private
def impute!(array)
nr_values_to_be_imputed = array.count { |elem| need_imputation?(elem) }
# Return an array of a single value if there is only one single non-nil/-1 value
return Array.new(array.size, single_value(array)) if nr_values_to_be_imputed + 1 == array.size
# Return an array of nils if all values need imputation
return Array.new(array.size, nil) if nr_values_to_be_imputed == array.size
# Return if no nils or -1's
return array unless array.any? { |elem| need_imputation?(elem) }
# Return array if the array contains a string
return array if array.any? { |elem| elem.is_a? String }
impute_callback array: array
end
def impute_callback(array:)
run_callbacks :process_impute do
process_impute array
end
end
protected
def need_imputation?(value)
[nil, -1].include? value
end
def single_value(array)
array.find { |elem| !need_imputation(elem) }
end
end
end
end
|
require 'project_compat'
module Acts
module Authorized
module PolicyBasedAuthorization
def self.included klass
klass.extend ClassMethods
klass.class_eval do
belongs_to :contributor, :polymorphic => true unless method_defined? :contributor
after_initialize :contributor_or_default_if_new
#checks a policy exists, and if missing resorts to using a private policy
after_initialize :policy_or_default_if_new
include ProjectCompat unless method_defined? :projects
belongs_to :policy, :required_access_to_owner => :manage, :autosave => true
end
end
module ClassMethods
end
def contributor_credited?
true
end
def private?
policy.private?
end
def public?
policy.public?
end
def default_policy
Policy.default
end
def policy_or_default
if self.policy.nil?
self.policy = default_policy
end
end
def policy_or_default_if_new
if self.new_record?
policy_or_default
end
end
def default_contributor
User.current_user
end
#when having a sharing_scope policy of Policy::ALL_SYSMO_USERS it is concidered to have advanced permissions if any of the permissions do not relate to the projects associated with the resource (ISA or Asset))
#this is a temporary work-around for the loss of the custom_permissions flag when defining a pre-canned permission of shared with sysmo, but editable/downloadable within mhy project
#other policy sharing scopes are simpler, and are concidered to have advanced permissions if there are more than zero permissions defined
def has_advanced_permissions?
if policy.sharing_scope==Policy::ALL_SYSMO_USERS
!(policy.permissions.collect{|p| p.contributor} - projects).empty?
else
policy.permissions.count > 0
end
end
def contributor_or_default_if_new
if self.new_record? && contributor.nil?
self.contributor = default_contributor
end
end
#contritutor or person who can manage the item and the item was published
def can_publish?
((Ability.new(User.current_user).can? :publish, self) && self.can_manage?) || self.contributor == User.current_user || try_block{self.contributor.user} == User.current_user || (self.can_manage? && self.policy.sharing_scope == Policy::EVERYONE) || Seek::Config.is_virtualliver
end
#use request_permission_summary to retrieve who can manage the item
def people_can_manage
contributor = self.contributor.kind_of?(Person) ? self.contributor : self.contributor.try(:person)
return [[contributor.id, "#{contributor.first_name} #{contributor.last_name}", Policy::MANAGING]] if policy.blank?
creators = is_downloadable? ? self.creators : []
asset_managers = projects.collect(&:asset_managers).flatten
grouped_people_by_access_type = policy.summarize_permissions creators,asset_managers, contributor
grouped_people_by_access_type[Policy::MANAGING]
end
AUTHORIZATION_ACTIONS.each do |action|
eval <<-END_EVAL
def can_#{action}? user = User.current_user
if Seek::Config.auth_caching_enabled
key = cache_keys(user, "#{action}")
new_record? || Rails.cache.fetch(key) {perform_auth(user,"#{action}") ? :true : :false} == :true
else
new_record? || perform_auth(user,"#{action}")
end
end
END_EVAL
end
def perform_auth user,action
(Authorization.is_authorized? action, nil, self, user) || (Ability.new(user).can? action.to_sym, self) || (Ability.new(user).can? "#{action}_asset".to_sym, self)
end
#returns a list of the people that can manage this file
#which will be the contributor, and those that have manage permissions
def managers
#FIXME: how to handle projects as contributors - return all people or just specific people (pals or other role)?
people=[]
unless self.contributor.nil?
people << self.contributor.person if self.contributor.kind_of?(User)
people << self.contributor if self.contributor.kind_of?(Person)
end
self.policy.permissions.each do |perm|
unless perm.contributor.nil? || perm.access_type!=Policy::MANAGING
people << (perm.contributor) if perm.contributor.kind_of?(Person)
people << (perm.contributor.person) if perm.contributor.kind_of?(User)
end
end
people.uniq
end
def cache_keys user, action
#start off with the keys for the person
keys = generate_person_key(user.try(:person))
#action
keys << "can_#{action}?"
#item (to invalidate when contributor is changed)
keys << self.cache_key
#item creators (to invalidate when creators are changed)
if self.respond_to? :assets_creators
keys |= self.assets_creators.sort_by(&:id).collect(&:cache_key)
end
#policy
keys << policy.cache_key
#permissions
keys |= policy.permissions.sort_by(&:id).collect(&:cache_key)
keys
end
def generate_person_key person
keys = [person.try(:cache_key)]
#group_memberships + favourite_group_memberships
unless person.nil?
keys |= person.group_memberships.sort_by(&:id).collect(&:cache_key)
keys |= person.favourite_group_memberships.sort_by(&:id).collect(&:cache_key)
end
keys
end
end
end
end
auth_caching_enabled is only read and used when the class is created, rather than on every request - however now requires a restart when changing the flag
require 'project_compat'
module Acts
module Authorized
module PolicyBasedAuthorization
def self.included klass
klass.extend ClassMethods
klass.class_eval do
belongs_to :contributor, :polymorphic => true unless method_defined? :contributor
after_initialize :contributor_or_default_if_new
#checks a policy exists, and if missing resorts to using a private policy
after_initialize :policy_or_default_if_new
include ProjectCompat unless method_defined? :projects
belongs_to :policy, :required_access_to_owner => :manage, :autosave => true
end
end
module ClassMethods
end
def contributor_credited?
true
end
def private?
policy.private?
end
def public?
policy.public?
end
def default_policy
Policy.default
end
def policy_or_default
if self.policy.nil?
self.policy = default_policy
end
end
def policy_or_default_if_new
if self.new_record?
policy_or_default
end
end
def default_contributor
User.current_user
end
#when having a sharing_scope policy of Policy::ALL_SYSMO_USERS it is concidered to have advanced permissions if any of the permissions do not relate to the projects associated with the resource (ISA or Asset))
#this is a temporary work-around for the loss of the custom_permissions flag when defining a pre-canned permission of shared with sysmo, but editable/downloadable within mhy project
#other policy sharing scopes are simpler, and are concidered to have advanced permissions if there are more than zero permissions defined
def has_advanced_permissions?
if policy.sharing_scope==Policy::ALL_SYSMO_USERS
!(policy.permissions.collect{|p| p.contributor} - projects).empty?
else
policy.permissions.count > 0
end
end
def contributor_or_default_if_new
if self.new_record? && contributor.nil?
self.contributor = default_contributor
end
end
#contritutor or person who can manage the item and the item was published
def can_publish?
((Ability.new(User.current_user).can? :publish, self) && self.can_manage?) || self.contributor == User.current_user || try_block{self.contributor.user} == User.current_user || (self.can_manage? && self.policy.sharing_scope == Policy::EVERYONE) || Seek::Config.is_virtualliver
end
#use request_permission_summary to retrieve who can manage the item
def people_can_manage
contributor = self.contributor.kind_of?(Person) ? self.contributor : self.contributor.try(:person)
return [[contributor.id, "#{contributor.first_name} #{contributor.last_name}", Policy::MANAGING]] if policy.blank?
creators = is_downloadable? ? self.creators : []
asset_managers = projects.collect(&:asset_managers).flatten
grouped_people_by_access_type = policy.summarize_permissions creators,asset_managers, contributor
grouped_people_by_access_type[Policy::MANAGING]
end
AUTHORIZATION_ACTIONS.each do |action|
if Seek::Config.auth_caching_enabled
eval <<-END_EVAL
def can_#{action}? user = User.current_user
key = cache_keys(user, "#{action}")
new_record? || Rails.cache.fetch(key) {perform_auth(user,"#{action}") ? :true : :false} == :true
end
END_EVAL
else
eval <<-END_EVAL
def can_#{action}? user = User.current_user
new_record? || perform_auth(user,"#{action}")
end
END_EVAL
end
end
def perform_auth user,action
(Authorization.is_authorized? action, nil, self, user) || (Ability.new(user).can? action.to_sym, self) || (Ability.new(user).can? "#{action}_asset".to_sym, self)
end
#returns a list of the people that can manage this file
#which will be the contributor, and those that have manage permissions
def managers
#FIXME: how to handle projects as contributors - return all people or just specific people (pals or other role)?
people=[]
unless self.contributor.nil?
people << self.contributor.person if self.contributor.kind_of?(User)
people << self.contributor if self.contributor.kind_of?(Person)
end
self.policy.permissions.each do |perm|
unless perm.contributor.nil? || perm.access_type!=Policy::MANAGING
people << (perm.contributor) if perm.contributor.kind_of?(Person)
people << (perm.contributor.person) if perm.contributor.kind_of?(User)
end
end
people.uniq
end
def cache_keys user, action
#start off with the keys for the person
keys = generate_person_key(user.try(:person))
#action
keys << "can_#{action}?"
#item (to invalidate when contributor is changed)
keys << self.cache_key
#item creators (to invalidate when creators are changed)
if self.respond_to? :assets_creators
keys |= self.assets_creators.sort_by(&:id).collect(&:cache_key)
end
#policy
keys << policy.cache_key
#permissions
keys |= policy.permissions.sort_by(&:id).collect(&:cache_key)
keys
end
def generate_person_key person
keys = [person.try(:cache_key)]
#group_memberships + favourite_group_memberships
unless person.nil?
keys |= person.group_memberships.sort_by(&:id).collect(&:cache_key)
keys |= person.favourite_group_memberships.sort_by(&:id).collect(&:cache_key)
end
keys
end
end
end
end
|
require 'redis'
module PrivatePub
# This class is an extension for the Faye::RackAdapter.
# It is used inside of PrivatePub.faye_app.
class FayeExtension
def initialize(redis_address = "", redis_port = 6379, redis_password = nil)
puts "initialize faye extension, address: #{redis_address || '127.0.0.1'}, port: #{redis_port}"
if redis_password.nil?
Redis.current = Redis.new(host: redis_address || '127.0.0.1', port: redis_port)
else
Redis.current = Redis.new(host: redis_address || '127.0.0.1', port: redis_port, password: redis_password)
end
return self
end
# Callback to handle incoming Faye messages. This authenticates both
# subscribe and publish calls.
def incoming(message, callback)
#puts "\n#{Time.now} incoming, msg", message
if message["channel"] == "/meta/subscribe"
authenticate_subscribe(message)
elsif message["channel"] !~ %r{^/meta/}
authenticate_publish(message)
else
maintain_channel_subscriptions(message)
end
message['data']['channel'] ||= message['channel'] if message['data']
callback.call(message)
end
private
# Ensure the subscription signature is correct and that it has not expired.
def authenticate_subscribe(message)
subscription = PrivatePub.subscription(:channel => message["subscription"], :timestamp => message["ext"]["private_pub_timestamp"])
#Redis.current.hset('log', "#{Time.now.to_i}_auth", {called_method: "authenticate_subscribe", message_subscription: message["subscription"], client_id: message['clientId']})
if message["ext"]["private_pub_signature"] != subscription[:signature]
message["error"] = "Incorrect signature."
elsif PrivatePub.signature_expired? message["ext"]["private_pub_timestamp"].to_i
message["error"] = "Signature has expired."
elsif message["subscription"].index('/feed/actor') == 0
current_subsciptions = Redis.current.hgetall('subscriptions')
present_subscription = current_subsciptions[message["subscription"]]
puts "already present sub? #{present_subscription}"
if present_subscription
client_ids = eval(present_subscription)[:client_ids]
client_ids[message['clientId']] = Time.now.to_i
else
client_ids = {message['clientId'] => Time.now.to_i}
end
## begin try
begin
puts "writing sub: #{client_ids}"
Redis.current.hset('subscriptions', message["subscription"], {time: Time.now.to_i, client_ids: client_ids, called_method: "authenticate_subscribe"})
rescue Exception => e
puts "\nException: #{e}\n"
end
## end try
end
end
# Ensures the secret token is correct before publishing.
def authenticate_publish(message)
if PrivatePub.config[:secret_token].nil?
raise Error, "No secret_token config set, ensure private_pub.yml is loaded properly."
elsif message["ext"].nil? || (message["ext"]["private_pub_token"] != PrivatePub.config[:secret_token] && !credentials_valid?(message))
message["error"] = "Incorrect or no token."
else
message["ext"]["private_pub_token"] = nil
end
end
def credentials_valid?(message)
return message['ext']['private_pub_signature'] == Digest::SHA1.hexdigest([PrivatePub.config[:secret_token], message['channel'], message['ext']['private_pub_timestamp']].join)
end
def maintain_channel_subscriptions(message)
begin ## begin try
current_subsciptions = Redis.current.hgetall('subscriptions')
puts "current_subsciptions: #{current_subsciptions}"
#Redis.current.hset('log', "#{Time.now.to_i}_inco", {called_method: "incoming", message: message, current_subsciptions: current_subsciptions})
return unless current_subsciptions
message_client_id = message['clientId']
key = current_subsciptions.find{|k, v| eval(v)[:client_ids] != nil && eval(v)[:client_ids][message_client_id]}
puts "this user's subscription: #{key ? key.first + " --> " + key.last : "NONE"}"
return unless key && key.first.index('/feed/actor') == 0
channel = key.first
channel_hash = eval(current_subsciptions[channel])
if message['channel'] == '/meta/disconnect'
if channel_hash[:client_ids].length > 1
puts "disconnect, deleting one client id #{message_client_id}, setting new timestamp: #{Time.now.to_i}"
channel_hash[:time] = Time.now.to_i
channel_hash[:called_method] = "incoming"
channel_hash[:client_ids].delete(message_client_id)
Redis.current.hset('subscriptions', channel, channel_hash)
else
puts "disconnect, deleting user's channel"
Redis.current.hdel('subscriptions', channel)
end
else
channel_hash[:time] = Time.now.to_i
channel_hash[:called_method] = "incoming"
channel_hash[:client_ids] = cleanup_client_id_timestamps(channel_hash[:client_ids], message_client_id)
puts "updating timestamps for subscription of channel #{channel}: #{channel_hash}"
Redis.current.hset('subscriptions', channel, channel_hash)
end # don't do anything for /meta/unsubscribe
rescue Exception => e
puts "\nException: #{e}\n"
end ## end try
end
def cleanup_client_id_timestamps(client_ids, current_client_id)
client_ids.each do |client_id, timestamp|
if client_id == current_client_id
puts "found id, updating: #{client_id}"
client_ids[client_id] = Time.now.to_i
elsif Time.now.to_i - timestamp > 50 # 2 * the keep alive time
puts "deleting: #{client_id}"
client_ids.delete(client_id)
end
end
return client_ids
end
end
end
made log messages more comprehensive
require 'redis'
module PrivatePub
# This class is an extension for the Faye::RackAdapter.
# It is used inside of PrivatePub.faye_app.
class FayeExtension
def initialize(redis_address = "", redis_port = 6379, redis_password = nil)
puts "initialize faye extension, address: #{redis_address || '127.0.0.1'}, port: #{redis_port}"
if redis_password.nil?
Redis.current = Redis.new(host: redis_address || '127.0.0.1', port: redis_port)
else
Redis.current = Redis.new(host: redis_address || '127.0.0.1', port: redis_port, password: redis_password)
end
return self
end
# Callback to handle incoming Faye messages. This authenticates both
# subscribe and publish calls.
def incoming(message, callback)
#puts "\n#{Time.now} incoming, msg", message
if message["channel"] == "/meta/subscribe"
authenticate_subscribe(message)
elsif message["channel"] !~ %r{^/meta/}
authenticate_publish(message)
else
maintain_channel_subscriptions(message)
end
message['data']['channel'] ||= message['channel'] if message['data']
callback.call(message)
end
private
# Ensure the subscription signature is correct and that it has not expired.
def authenticate_subscribe(message)
subscription = PrivatePub.subscription(:channel => message["subscription"], :timestamp => message["ext"]["private_pub_timestamp"])
#Redis.current.hset('log', "#{Time.now.to_i}_auth", {called_method: "authenticate_subscribe", message_subscription: message["subscription"], client_id: message['clientId']})
if message["ext"]["private_pub_signature"] != subscription[:signature]
message["error"] = "Incorrect signature."
elsif PrivatePub.signature_expired? message["ext"]["private_pub_timestamp"].to_i
message["error"] = "Signature has expired."
elsif message["subscription"].index('/feed/actor') == 0
puts "\nincoming new subscription: #{message["subscription"]}"
current_subsciptions = Redis.current.hgetall('subscriptions')
present_subscription = current_subsciptions[message["subscription"]]
puts "already present sub by this actor? #{!present_subscription.nil? && present_subscription.length != 0}"
if present_subscription
client_ids = eval(present_subscription)[:client_ids]
client_ids[message['clientId']] = Time.now.to_i
else
client_ids = {message['clientId'] => Time.now.to_i}
end
## begin try
begin
puts "writing new subscription, now #{client_ids.length} channels in total"
Redis.current.hset('subscriptions', message["subscription"], {time: Time.now.to_i, client_ids: client_ids, called_method: "authenticate_subscribe"})
rescue Exception => e
puts "\nException: #{e}\n"
end
## end try
end
end
# Ensures the secret token is correct before publishing.
def authenticate_publish(message)
if PrivatePub.config[:secret_token].nil?
raise Error, "No secret_token config set, ensure private_pub.yml is loaded properly."
elsif message["ext"].nil? || (message["ext"]["private_pub_token"] != PrivatePub.config[:secret_token] && !credentials_valid?(message))
message["error"] = "Incorrect or no token."
else
message["ext"]["private_pub_token"] = nil
end
end
def credentials_valid?(message)
return message['ext']['private_pub_signature'] == Digest::SHA1.hexdigest([PrivatePub.config[:secret_token], message['channel'], message['ext']['private_pub_timestamp']].join)
end
def maintain_channel_subscriptions(message)
begin ## begin try
current_subsciptions = Redis.current.hgetall('subscriptions')
puts "\n#{Time.now} incoming, current_subsciptions: #{current_subsciptions.keys.join(', ')}"
#Redis.current.hset('log', "#{Time.now.to_i}_inco", {called_method: "incoming", message: message, current_subsciptions: current_subsciptions})
return unless current_subsciptions
message_client_id = message['clientId']
key = current_subsciptions.find{|k, v| eval(v)[:client_ids] != nil && eval(v)[:client_ids][message_client_id]}
puts "this user's subscription: #{key ? ("#{key.first} --> #{eval(key.last)[:client_ids].length} channels, latest update: #{Time.at(eval(key.last)[:time])}") : "NONE, returning"}"
return unless key && key.first.index('/feed/actor') == 0
channel = key.first
channel_hash = eval(current_subsciptions[channel])
if message['channel'] == '/meta/disconnect'
if channel_hash[:client_ids].length > 1
puts "\ndisconnect, deleting channel, setting new subscription timestamp: #{Time.now}. channels left: #{channel_hash[:client_ids].length - 1}"
channel_hash[:time] = Time.now.to_i
channel_hash[:called_method] = "incoming"
channel_hash[:client_ids].delete(message_client_id)
Redis.current.hset('subscriptions', channel, channel_hash)
else
puts "disconnect, deleting user's subscription (no channels left)"
Redis.current.hdel('subscriptions', channel)
end
else
channel_hash[:time] = Time.now.to_i
channel_hash[:called_method] = "incoming"
puts "performing cleanup for subscription:"
channel_hash[:client_ids] = cleanup_client_id_timestamps(channel_hash[:client_ids], message_client_id)
puts "updating timestamps for subscription of channel #{channel}: #{channel_hash[:client_ids].length} channels, latest update: #{Time.now}"
Redis.current.hset('subscriptions', channel, channel_hash)
end # don't do anything for /meta/unsubscribe
rescue Exception => e
puts "\nException: #{e}\n"
end ## end try
end
def cleanup_client_id_timestamps(client_ids, current_client_id)
client_ids.each_with_index do |(client_id, timestamp), index|
if client_id == current_client_id
puts "index #{index}: found id, updating"
client_ids[client_id] = Time.now.to_i
elsif Time.now.to_i - timestamp > 50 # 2 * the keep alive time
puts "index #{index}: deleting, grace time exceeded"
client_ids.delete(client_id)
else
puts "index #{index}: omitting delete, grace time not exceeded"
end
end
return client_ids
end
end
end
|
module PropertySolutions
VERSION = '0.0.20'
end
Updated version file
module PropertySolutions
VERSION = '0.0.21'
end
|
#!/usr/bin/ruby
require 'rubygems'
require 'mechanize'
require 'nokogiri'
require 'highline/import'
require 'stringio'
# Change based on Semester
$term = '01'
$year = '2016'
$frequency = 1 # Number of Seconds between check requests
$name = ''
$failed_adds = 0
$agent = Mechanize.new
$agent.redirect_ok = true
$agent.user_agent = 'Mozilla/5.0 (Macintosh; Intel Mac OS X 10_6_8) AppleWebKit/535.19 (KHTML, like Gecko) Chrome/18.0.1025.11 Safari/535.19'
$agent.verify_mode = OpenSSL::SSL::VERIFY_NONE
# Uber simway to colorize outputin
class String
def color(c)
colors = {
black: 30,
red: 31,
green: 32,
yellow: 33,
blue: 34,
magenta: 35,
cyan: 36,
white: 37
}
return "\e[#{colors[c] || c}m#{self}\e[0m"
end
end
# Logins, Gets the Courses, Returns Courses Obj with Name/URL/Tools for each
def login(username, password)
# Login to the system!
page = $agent.get('https://auth.vt.edu/login?service=https://webapps.banner.vt.edu/banner-cas-prod/authorized/banner/SelfService')
login = page.forms.first
login.set_fields({
username: username,
password: password
})
if (login.submit.body.match(/Invalid username or password/)) then
return false
else
return true
end
end
# Gets Course Information
def get_course(crn)
begin
course_details = Nokogiri::HTML($agent.get("https://banweb.banner.vt.edu/ssb/prod/HZSKVTSC.P_ProcComments?CRN=#{crn}&TERM=#{$term}&YEAR=#{$year}").body)
rescue
return false # Failed to get course
end
# Flatten table to make it easier to work with
course = {}
data_set = false
course[:title] = course_details.css('td.title').last.text.gsub(/-\ +/, '')
course[:crn] = crn
# Will catch a botched 'get' of the course info
# # Got a couple exceptions where it was trying to get the text of a null object
begin
course_details.css('table table tr').each_with_index do |row|
# If we have a data_set
case data_set
when :rowA
[:i, :days, :begin, :end, :room, :exam].each_with_index do |el, i|
if row.css('td')[i] then
course[el] = row.css('td')[i].text
end
end
when :rowB
[ :instructor, :type, :status, :seats, :capacity ].each_with_index do |el, i|
course[el] = row.css('td')[i].text
end
end
data_set = false
# Is there a dataset?
row.css('td').each do |cell|
case cell.text
when 'Days'
data_set = :rowA
when 'Instructor'
data_set = :rowB
end
end
end
rescue
course[:seats] = 'Full'
end
return course
end
# Registers you for the given CRN, returns true if successful, false if not
def register_crn(crn, remove)
begin
# Follow Path
$agent.get('https://banweb.banner.vt.edu/ssb/prod/twbkwbis.P_GenMenu?name=bmenu.P_MainMnu')
reg = $agent.get('https://banweb.banner.vt.edu/ssb/prod/hzskstat.P_DispRegStatPage')
drop_add = reg.link_with(href: "/ssb/prod/bwskfreg.P_AddDropCrse?term_in=#{$year}#{$term}").click
# Fill in CRN Box and Submit
crn_entry = drop_add.form_with(action: '/ssb/prod/bwckcoms.P_Regs')
drop_add_html = Nokogiri::HTML(drop_add.body)
# Removing the old class if one was specified
# Counter to keep track of empty rows
# # Starts at -2 because counter was picking up the rows before the first class and I needed it to be
# # accurate for troubleshooting
counter = -2
if remove != ''
drop_add_html.css('table table tr').each_with_index do |row, i|
# Looks down the table to find the row with the CRN that needs to be removed
if row.css('td')[1] != nil
if row.css('td')[1].text =~ /#{remove}/
# Changes the drop down for the 'Drop' column for the CRN
crn_entry.field_with(id: "action_id#{i - 3 - counter}").options[0].select
elsif row.css('td')[1].text =~ /^\d{5}$/ then
else
counter += 1 # Counts how many 'empty' rows there are, ex. a class with additional times
end
end
end
end
crn_entry.fields_with(id: 'crn_id1').first.value = crn
crn_entry['CRN_IN'] = crn
add = crn_entry.submit(crn_entry.button_with(value: 'Submit Changes')).body
rescue
# Does not crash if Drop/Add is not open yet
# # Useful if you want it to be running right when it opens
puts 'Drop Add not open yet'.color(:red)
$failed_adds = 0
return false
end
if add =~ /#{crn}/ && !(add =~ /Registration Errors/) then
return true
else
# If the new class is not successfully added and a class was dropped to make room, then re-adds the old class
if remove != ''
crn_entry = drop_add.form_with(action: '/ssb/prod/bwckcoms.P_Regs')
crn_entry.fields_with(id: 'crn_id1').first.value = remove
crn_entry['CRN_IN'] = remove
add = crn_entry.submit(crn_entry.button_with(value: 'Submit Changes')).body
# If it can't re-add the old class it will then raise an exception
if !(add =~ /#{remove}/) || add =~ /Registration Errors/
raise 'Well stuff messed up: dropped the class, new class didn\'t register, couldn\'t re-register old class'
end
puts 're-registered'
end
return false
end
end
# MAIN LOOP that checks the availability of each courses and fires to register_crn on availability
def check_courses(courses)
request_count = 0
$failed_adds = 0
time_start = Time.new
successes = []
loop do
system('clear')
request_count += 1
time_now = Time.new
puts 'Checking Availability of CRNs for '.color(:yellow) + $name.to_s
puts "--------------------------------\n"
puts "Started:\t#{time_start.asctime}".color(:magenta)
puts "Now: \t#{time_now.asctime}".color(:cyan)
puts "Request:\t#{request_count} (Once every #{$frequency} seconds)".color(:green)
puts "--------------------------------\n\n"
courses.each_with_index do |c, i|
puts "#{c[:crn]} - #{c[:title]}".color(:blue)
course = get_course(c[:crn])
next unless course # If throws error
puts "Availability: #{course[:seats]} / #{course[:capacity]}".color(:red)
if (course[:seats] =~ /Full/) then
# If course is full, do nothing
else
if (register_crn(c[:crn], c[:remove])) then
puts "CRN #{c[:crn]} Registration Successful"
# Tracks what CRNs have been added
successes.push(courses.slice!(i))
# Remove classes with the same title
courses.delete_if { |x| x[:title] == successes.last[:title] }
# If the registration is successful than resets the failed counter
$failed_adds = 0
else
puts 'Couldn\'t Register'
$failed_adds += 1
if $failed_adds == 3 then
raise "CRN #{c[:crn]} was unsuccessfully added 3 times"
end
end
end
print "\n\n"
end
# Lists the CRNs that have been added so far
if successes.length > 0
puts 'These CRNs have been added successfully: '.color(:magenta)
successes.each_with_index do |added,i|
puts "#{i+1}: #{added[:crn]} - #{added[:title]}".color(:cyan)
end
puts "\n"
end
# When they are done adding returns true so the
if courses.size == 0
puts 'All classes added'.color(:yellow)
return true
end
sleep $frequency
end
end
# Add courses to be checked
def add_courses
crns = []
loop do
system('clear')
puts 'Your CRNs:'.color(:red)
crns.each do |crn|
puts " -> #{crn[:title]} (CRN: #{crn[:crn]})".color(:magenta)
end
# Prompt for CRN
alt = (crns.length > 0) ? ' (or just type \'start\') ' : ' '
input = ask("\nEnter a CRN to add it#{alt}".color(:green) + ":: ") { |q| q.echo = true }
# Validate CRN to be 5 Digits
if (input =~ /^\d{5}$/) then
remove_loop = true
# Asks if a class needs to be taken out beforehand
while remove_loop
remove = ask('Does another CRN need to be removed? (yes/no) '.color(:blue)) {|q| q.echo = true}
if remove =~ /yes/
crn_remove = ask('Enter the CRN: '.color(:green)) {|q| q.echo = true}
if crn_remove =~ /^\d{5}$/
remove_loop = false
end
elsif remove =~ /no/
crn_remove = ""
remove_loop = false
end
end
system("clear")
# Display CRN Info
c = get_course(input.to_s)
c[:remove] = crn_remove
puts "\nCourse: #{c[:title]} - #{c[:crn]}".color(:red)
puts "--> Time: #{c[:begin]}-#{c[:end]} on #{c[:days]}".color(:cyan)
puts "--> Teacher: #{c[:instructor]}".color(:cyan)
puts "--> Type: #{c[:type]} || Status: #{c[:status]}".color(:cyan)
puts "--> Availability: #{c[:seats]} / #{c[:capacity]}".color(:cyan)
puts "--> CRN to Remove: #{c[:remove]}\n".color(:cyan)
# Add Class Prompt
add = ask('Add This Class? (yes/no)'.color(:yellow) + ':: ') { |q| q.echo = true }
crns.push(c) if (add =~ /yes/)
elsif (input == 'start') then
# When all courses have been added the program ends
if check_courses(crns)
break
end
else
puts 'Invalid CRN'
end
end
end
def main
system('clear')
puts 'Welcome to BannerStalker'.color(:blue)
attempting_login = true
while attempting_login
$name = ask('Name '.color(:green) + ':: ') {|q| q.echo = true}
username = ask('PID '.color(:green) + ':: ') { |q| q.echo = true }
password = ask('Password '.color(:green) + ':: ' ) { |q| q.echo = '*' }
system('clear')
if login(username, password) then
attempting_login = false
add_courses
else
puts 'Invalid PID/Password'.color(:red)
end
end
end
main
Unnecessary 'then's removed, more changes to adhere to Ruby style.
#!/usr/bin/ruby
require 'rubygems'
require 'mechanize'
require 'nokogiri'
require 'highline/import'
require 'stringio'
# Change based on Semester
$term = '01'
$year = '2016'
$frequency = 1 # Number of Seconds between check requests
$name = ''
$failed_adds = 0
$agent = Mechanize.new
$agent.redirect_ok = true
$agent.user_agent = 'Mozilla/5.0 (Macintosh; Intel Mac OS X 10_6_8) AppleWebKit/535.19 (KHTML, like Gecko) Chrome/18.0.1025.11 Safari/535.19'
$agent.verify_mode = OpenSSL::SSL::VERIFY_NONE
# Uber simway to colorize outputin
class String
def color(c)
colors = {
black: 30,
red: 31,
green: 32,
yellow: 33,
blue: 34,
magenta: 35,
cyan: 36,
white: 37
}
return "\e[#{colors[c] || c}m#{self}\e[0m"
end
end
# Logins, Gets the Courses, Returns Courses Obj with Name/URL/Tools for each
def login(username, password)
# Login to the system!
page = $agent.get('https://auth.vt.edu/login?service=https://webapps.banner.vt.edu/banner-cas-prod/authorized/banner/SelfService')
login = page.forms.first
login.set_fields({
username: username,
password: password
})
if login.submit.body.match(/Invalid username or password/)
return false
else
return true
end
end
# Gets Course Information
def get_course(crn)
begin
course_details = Nokogiri::HTML($agent.get("https://banweb.banner.vt.edu/ssb/prod/HZSKVTSC.P_ProcComments?CRN=#{crn}&TERM=#{$term}&YEAR=#{$year}").body)
rescue
return false # Failed to get course
end
# Flatten table to make it easier to work with
course = {}
data_set = false
course[:title] = course_details.css('td.title').last.text.gsub(/-\ +/, '')
course[:crn] = crn
# Will catch a botched 'get' of the course info
# # Got a couple exceptions where it was trying to get the text of a null object
begin
course_details.css('table table tr').each_with_index do |row|
# If we have a data_set
case data_set
when :rowA
[:i, :days, :begin, :end, :room, :exam].each_with_index do |el, i|
if row.css('td')[i]
course[el] = row.css('td')[i].text
end
end
when :rowB
[:instructor, :type, :status, :seats, :capacity].each_with_index do |el, i|
course[el] = row.css('td')[i].text
end
end
data_set = false
# Is there a dataset?
row.css('td').each do |cell|
case cell.text
when 'Days'
data_set = :rowA
when 'Instructor'
data_set = :rowB
end
end
end
rescue
course[:seats] = 'Full'
end
return course
end
# Registers you for the given CRN, returns true if successful, false if not
def register_crn(crn, remove)
begin
# Follow Path
$agent.get('https://banweb.banner.vt.edu/ssb/prod/twbkwbis.P_GenMenu?name=bmenu.P_MainMnu')
reg = $agent.get('https://banweb.banner.vt.edu/ssb/prod/hzskstat.P_DispRegStatPage')
drop_add = reg.link_with(href: "/ssb/prod/bwskfreg.P_AddDropCrse?term_in=#{$year}#{$term}").click
# Fill in CRN Box and Submit
crn_entry = drop_add.form_with(action: '/ssb/prod/bwckcoms.P_Regs')
drop_add_html = Nokogiri::HTML(drop_add.body)
# Removing the old class if one was specified
# Counter to keep track of empty rows
# # Starts at -2 because counter was picking up the rows before the first class and I needed it to be
# # accurate for troubleshooting
counter = -2
if remove != ''
drop_add_html.css('table table tr').each_with_index do |row, i|
# Looks down the table to find the row with the CRN that needs to be removed
if row.css('td')[1] != nil
if row.css('td')[1].text =~ /#{remove}/
# Changes the drop down for the 'Drop' column for the CRN
crn_entry.field_with(id: "action_id#{i - 3 - counter}").options[0].select
elsif row.css('td')[1].text =~ /^\d{5}$/ then
else
counter += 1 # Counts how many 'empty' rows there are, ex. a class with additional times
end
end
end
end
crn_entry.fields_with(id: 'crn_id1').first.value = crn
crn_entry['CRN_IN'] = crn
add = crn_entry.submit(crn_entry.button_with(value: 'Submit Changes')).body
rescue
# Does not crash if Drop/Add is not open yet
# # Useful if you want it to be running right when it opens
puts 'Drop Add not open yet'.color(:red)
$failed_adds = 0
return false
end
if add =~ /#{crn}/ && !(add =~ /Registration Errors/)
return true
else
# If the new class is not successfully added and a class was dropped to make room, then re-adds the old class
if remove != ''
crn_entry = drop_add.form_with(action: '/ssb/prod/bwckcoms.P_Regs')
crn_entry.fields_with(id: 'crn_id1').first.value = remove
crn_entry['CRN_IN'] = remove
add = crn_entry.submit(crn_entry.button_with(value: 'Submit Changes')).body
# If it can't re-add the old class it will then raise an exception
if !(add =~ /#{remove}/) || add =~ /Registration Errors/
raise 'Well stuff messed up: dropped the class, new class didn\'t register, couldn\'t re-register old class'
end
puts 're-registered'
end
return false
end
end
# MAIN LOOP that checks the availability of each courses and fires to register_crn on availability
def check_courses(courses)
request_count = 0
$failed_adds = 0
time_start = Time.new
successes = []
loop do
system('clear')
request_count += 1
time_now = Time.new
puts 'Checking Availability of CRNs for '.color(:yellow) + $name.to_s
puts "--------------------------------\n"
puts "Started:\t#{time_start.asctime}".color(:magenta)
puts "Now: \t#{time_now.asctime}".color(:cyan)
puts "Request:\t#{request_count} (Once every #{$frequency} seconds)".color(:green)
puts "--------------------------------\n\n"
courses.each_with_index do |c, i|
puts "#{c[:crn]} - #{c[:title]}".color(:blue)
course = get_course(c[:crn])
next unless course # If throws error
puts "Availability: #{course[:seats]} / #{course[:capacity]}".color(:red)
if (course[:seats] =~ /Full/) then
# If course is full, do nothing
else
if register_crn(c[:crn], c[:remove])
puts "CRN #{c[:crn]} Registration Successful"
# Tracks what CRNs have been added
successes.push(courses.slice!(i))
# Remove classes with the same title
courses.delete_if { |x| x[:title] == successes.last[:title] }
# If the registration is successful than resets the failed counter
$failed_adds = 0
else
puts 'Couldn\'t Register'
$failed_adds += 1
if $failed_adds == 3
raise "CRN #{c[:crn]} was unsuccessfully added 3 times"
end
end
end
print "\n\n"
end
# Lists the CRNs that have been added so far
if successes.length > 0
puts 'These CRNs have been added successfully: '.color(:magenta)
successes.each_with_index do |added, i|
puts "#{i + 1}: #{added[:crn]} - #{added[:title]}".color(:cyan)
end
puts "\n"
end
# When they are done adding returns true so the
if courses.size == 0
puts 'All classes added'.color(:yellow)
return true
end
sleep $frequency
end
end
# Add courses to be checked
def add_courses
crns = []
loop do
system('clear')
puts 'Your CRNs:'.color(:red)
crns.each do |crn|
puts " -> #{crn[:title]} (CRN: #{crn[:crn]})".color(:magenta)
end
# Prompt for CRN
alt = crns.length > 0 ? ' (or just type \'start\') ' : ' '
input = ask("\nEnter a CRN to add it#{alt}".color(:green) + ':: ') { |q| q.echo = true }
# Validate CRN to be 5 Digits
if input =~ /^\d{5}$/
remove_loop = true
# Asks if a class needs to be taken out beforehand
while remove_loop
remove = ask('Does another CRN need to be removed? (yes/no) '.color(:blue)) { |q| q.echo = true }
if remove =~ /yes/
crn_remove = ask('Enter the CRN: '.color(:green)) { |q| q.echo = true }
if crn_remove =~ /^\d{5}$/
remove_loop = false
end
elsif remove =~ /no/
crn_remove = ''
remove_loop = false
end
end
system('clear')
# Display CRN Info
c = get_course(input.to_s)
c[:remove] = crn_remove
puts "\nCourse: #{c[:title]} - #{c[:crn]}".color(:red)
puts "--> Time: #{c[:begin]}-#{c[:end]} on #{c[:days]}".color(:cyan)
puts "--> Teacher: #{c[:instructor]}".color(:cyan)
puts "--> Type: #{c[:type]} || Status: #{c[:status]}".color(:cyan)
puts "--> Availability: #{c[:seats]} / #{c[:capacity]}".color(:cyan)
puts "--> CRN to Remove: #{c[:remove]}\n".color(:cyan)
# Add Class Prompt
add = ask('Add This Class? (yes/no)'.color(:yellow) + ':: ') { |q| q.echo = true }
crns.push(c) if add =~ /yes/
elsif input == 'start'
# When all courses have been added the program ends
if check_courses(crns)
break
end
else
puts 'Invalid CRN'
end
end
end
def main
system('clear')
puts 'Welcome to BannerStalker'.color(:blue)
attempting_login = true
while attempting_login
$name = ask('Name '.color(:green) + ':: ') { |q| q.echo = true }
username = ask('PID '.color(:green) + ':: ') { |q| q.echo = true }
password = ask('Password '.color(:green) + ':: ') { |q| q.echo = '*' }
system('clear')
if login(username, password)
attempting_login = false
add_courses
else
puts 'Invalid PID/Password'.color(:red)
end
end
end
main
|
module QueueClassicPlus
VERSION = "0.0.1"
end
Bump version
module QueueClassicPlus
VERSION = "0.0.2"
end
|
module RailmanDeployment
VERSION = '0.0.2'
end
Bump to 0.0.3
module RailmanDeployment
VERSION = '0.0.3'
end |
# Copyright (c) 2011-2017 VMware, Inc. All Rights Reserved.
# SPDX-License-Identifier: MIT
class RbVmomi::VIM::VirtualMachine
# Retrieve the MAC addresses for all virtual NICs.
# @return [Hash] Keyed by device label.
def macs
Hash[self.config.hardware.device.grep(RbVmomi::VIM::VirtualEthernetCard).map { |x| [x.deviceInfo.label, x.macAddress] }]
end
def network
Hash[self.config.hardware.device.grep(RbVmomi::VIM::VirtualEthernetCard).map { |x| [x.deviceInfo.label, x.deviceInfo.summary] }]
end
# Retrieve all virtual disk devices.
# @return [Array] Array of virtual disk devices.
def disks
self.config.hardware.device.grep(RbVmomi::VIM::VirtualDisk)
end
# Get the IP of the guest, but only if it is not stale
# @return [String] Current IP reported (as per VMware Tools) or nil
def guest_ip
g = self.guest
if g.ipAddress && (g.toolsStatus == "toolsOk" || g.toolsStatus == "toolsOld")
g.ipAddress
else
nil
end
end
# Add a layer of delta disks (redo logs) in front of every disk on the VM.
# This is similar to taking a snapshot and makes the VM a valid target for
# creating a linked clone.
#
# Background: The API for linked clones is quite strange. We can't create
# a linked straight from any VM. The disks of the VM for which we can create a
# linked clone need to be read-only and thus VC demands that the VM we
# are cloning from uses delta-disks. Only then it will allow us to
# share the base disk.
def add_delta_disk_layer_on_all_disks
devices, = self.collect 'config.hardware.device'
disks = devices.grep(RbVmomi::VIM::VirtualDisk)
spec = update_spec_add_delta_disk_layer_on_all_disks
self.ReconfigVM_Task(:spec => spec).wait_for_completion
end
# Updates a passed in spec to perform the task of adding a delta disk layer
# on top of all disks. Does the same as add_delta_disk_layer_on_all_disks
# but instead of issuing the ReconfigVM_Task, it just constructs the
# spec, so that the caller can batch a couple of updates into one
# ReconfigVM_Task.
def update_spec_add_delta_disk_layer_on_all_disks spec = {}
devices, = self.collect 'config.hardware.device'
disks = devices.grep(RbVmomi::VIM::VirtualDisk)
device_change = []
disks.each do |disk|
device_change << {
:operation => :remove,
:device => disk
}
device_change << {
:operation => :add,
:fileOperation => :create,
:device => disk.dup.tap { |x|
x.backing = x.backing.dup
x.backing.fileName = "[#{disk.backing.datastore.name}]"
x.backing.parent = disk.backing
},
}
end
if spec.is_a?(RbVmomi::VIM::VirtualMachineConfigSpec)
spec.deviceChange ||= []
spec.deviceChange += device_change
else
spec[:deviceChange] ||= []
spec[:deviceChange] += device_change
end
spec
end
end
Remove the overriden `network` on `VirtualMachine`
This method was proposed and accepted in #96. The other code in this
are not overriden methods, but additional methods that are not included
in the API. This was accepted mistakenly believing that this code was
adding a new method, not overriding an already available method.
This commit removes the override and calls the API's method. If the
functionality added in #96 is desired, it should be done by the caller
or added as a new method that does not override the API's method.
Fixes #108
# Copyright (c) 2011-2017 VMware, Inc. All Rights Reserved.
# SPDX-License-Identifier: MIT
class RbVmomi::VIM::VirtualMachine
# Retrieve the MAC addresses for all virtual NICs.
# @return [Hash] Keyed by device label.
def macs
Hash[self.config.hardware.device.grep(RbVmomi::VIM::VirtualEthernetCard).map { |x| [x.deviceInfo.label, x.macAddress] }]
end
# Retrieve all virtual disk devices.
# @return [Array] Array of virtual disk devices.
def disks
self.config.hardware.device.grep(RbVmomi::VIM::VirtualDisk)
end
# Get the IP of the guest, but only if it is not stale
# @return [String] Current IP reported (as per VMware Tools) or nil
def guest_ip
g = self.guest
if g.ipAddress && (g.toolsStatus == "toolsOk" || g.toolsStatus == "toolsOld")
g.ipAddress
else
nil
end
end
# Add a layer of delta disks (redo logs) in front of every disk on the VM.
# This is similar to taking a snapshot and makes the VM a valid target for
# creating a linked clone.
#
# Background: The API for linked clones is quite strange. We can't create
# a linked straight from any VM. The disks of the VM for which we can create a
# linked clone need to be read-only and thus VC demands that the VM we
# are cloning from uses delta-disks. Only then it will allow us to
# share the base disk.
def add_delta_disk_layer_on_all_disks
devices, = self.collect 'config.hardware.device'
disks = devices.grep(RbVmomi::VIM::VirtualDisk)
spec = update_spec_add_delta_disk_layer_on_all_disks
self.ReconfigVM_Task(:spec => spec).wait_for_completion
end
# Updates a passed in spec to perform the task of adding a delta disk layer
# on top of all disks. Does the same as add_delta_disk_layer_on_all_disks
# but instead of issuing the ReconfigVM_Task, it just constructs the
# spec, so that the caller can batch a couple of updates into one
# ReconfigVM_Task.
def update_spec_add_delta_disk_layer_on_all_disks spec = {}
devices, = self.collect 'config.hardware.device'
disks = devices.grep(RbVmomi::VIM::VirtualDisk)
device_change = []
disks.each do |disk|
device_change << {
:operation => :remove,
:device => disk
}
device_change << {
:operation => :add,
:fileOperation => :create,
:device => disk.dup.tap { |x|
x.backing = x.backing.dup
x.backing.fileName = "[#{disk.backing.datastore.name}]"
x.backing.parent = disk.backing
},
}
end
if spec.is_a?(RbVmomi::VIM::VirtualMachineConfigSpec)
spec.deviceChange ||= []
spec.deviceChange += device_change
else
spec[:deviceChange] ||= []
spec[:deviceChange] += device_change
end
spec
end
end
|
# frozen_string_literal: true
module RestoreStrategies
VERSION = '2.2.3'
end
Version bump
# frozen_string_literal: true
module RestoreStrategies
VERSION = '2.3.0'
end
|
module RidgepoleRake
module Bundler
module Configuration
def self.prepended(klass)
klass.class_eval { attr_accessor :bundler }
end
# @note override
def initialize
super
@bundler = { use: true, clean_system: true }.with_indifferent_access
end
end
module Command
# @note override
def execute
if config.bundler[:use] && config.bundler[:clean_system]
::Bundler.clean_system(command)
else
# TODO: Raise stack level too deep when call `super`
Kernel.system(command)
end
end
private
# @note override
def add_ridgepole
super
stash.unshift('bundle exec') if config.bundler[:use]
end
end
end
end
RidgepoleRake::Configuration.prepend(RidgepoleRake::Bundler::Configuration)
RidgepoleRake::Command.prepend(RidgepoleRake::Bundler::Command)
Add comment [ci skip]
module RidgepoleRake
module Bundler
module Configuration
def self.prepended(klass)
klass.class_eval { attr_accessor :bundler }
end
# @note override
def initialize
super
@bundler = { use: true, clean_system: true }.with_indifferent_access
end
end
module Command
# @note override
def execute
if config.bundler[:use] && config.bundler[:clean_system]
::Bundler.clean_system(command)
else
# TODO: Raise stack level too deep when call `super`
# --seed 57894 39216 45333
Kernel.system(command)
end
end
private
# @note override
def add_ridgepole
super
stash.unshift('bundle exec') if config.bundler[:use]
end
end
end
end
RidgepoleRake::Configuration.prepend(RidgepoleRake::Bundler::Configuration)
RidgepoleRake::Command.prepend(RidgepoleRake::Bundler::Command)
|
require "action_view"
module RightDevelop::Commands
class Git
include ActionView::Helpers::DateHelper
NAME_SPLIT_CHARS = /-|_|\//
YES = /(ye?s?)/i
NO = /(no?)/i
TASKS = %w(prune)
# Parse command-line options and create a Command object
def self.create
task_list = TASKS.map { |c| " * #{c}" }.join("\n")
options = Trollop.options do
banner <<-EOS
The 'git' command automates various repository management tasks. All tasks
accept the same options, although not every option applies to every command.
Usage:
right_develop git <task> [options]
Where <task> is one of:
#{task_list}
EOS
opt :age, "Minimum age to consider", :default => "3.months"
opt :only, "Limit to branches matching this prefix", :type=>:string
opt :except, "Ignore branches matching this prefix", :type=>:string, :default=>"release|v?[0-9.]+"
opt :local, "Limit to local branches"
opt :remote, "Limit to remote branches"
opt :merged, "Limit to branches that are fully merged into the named branch", :type=>:string
stop_on TASKS
end
task = ARGV.shift
case task
when "prune"
git = RightDevelop::Git::Repository.new(Dir.pwd)
self.new(git, :prune, options)
else
Trollop.die "unknown task #{task}"
end
end
# @option options [String] :age Ignore branches newer than this time-ago-in-words e.g. "3 months"; default unit is months
# @option options [String] :except Ignore branches matching this regular expression
# @option options [String] :only Consider only branches matching this regular expression
# @option options [true|false] :local Consider local branches
# @option options [true|false] :remote Consider remote branches
# @option options [String] :merged Consider only branches that are fully merged into this branch (e.g. master)
def initialize(repo, task, options)
# Post-process "age" option; transform from natural-language expression into a timestamp.
if (age = options.delete(:age))
age = age.gsub(/\s+/, ".")
if age =~ /^[0-9]+\.?(hours|days|weeks|months|years)$/
age = eval(age).ago
elsif age =~ /^[0-9]+$/
age = age.to_i.months.ago
else
raise ArgumentError, "Can't parse age of '#{age}'"
end
options[:age] = age
end
# Post-process "except" option; transform into a Regexp.
if (except = options.delete(:except))
except = Regexp.new("^(origin/)?(#{Regexp.escape(except)})")
options[:except] = except
end
# Post-process "only" option; transform into a Regexp.
if (only = options.delete(:only))
only = Regexp.new("^(origin/)?(#{Regexp.escape(only)})")
options[:only] = only
end
@git = repo
@task = task
@options = options
end
# Run the task that was specified when this object was instantiated. This
# method does no work; it just delegates to a task method.
def run
case @task
when :prune
prune(@options)
else
raise StateError, "Invalid @task; check Git.create!"
end
end
protected
# Prune dead branches from the repository.
#
# @option options [Time] :age Ignore branches whose HEAD commit is newer than this timestamp
# @option options [Regexp] :except Ignore branches matching this pattern
# @option options [Regexp] :only Consider only branches matching this pattern
# @option options [true|false] :local Consider local branches
# @option options [true|false] :remote Consider remote branches
# @option options [String] :merged Consider only branches that are fully merged into this branch (e.g. master)
def prune(options={})
branches = @git.branches
#Filter by name prefix
branches = branches.select { |x| x =~ options[:only] } if options[:only]
branches = branches.reject { |x| x =~ options[:except] } if options[:except]
#Filter by location (local/remote)
if options[:local] && !options[:remote]
branches = branches.local
elsif options[:remote] && !options[:local]
branches = branches.remote
elsif options[:remote] && options[:local]
raise ArgumentError, "Cannot specify both --local and --remote!"
end
#Filter by merge status
if options[:merged]
branches = branches.merged(options[:merged])
end
old = {}
branches.each do |branch|
latest = @git.log(branch, :tail=>1).first
timestamp = latest.timestamp
if timestamp < options[:age] &&
old[branch] = timestamp
end
end
if old.empty?
STDERR.puts "No branches older than #{time_ago_in_words(options[:age])} found; do you need to specify --remote?"
exit -2
end
all_by_prefix = branches.group_by { |b| b.name.split(NAME_SPLIT_CHARS).first }
all_by_prefix.each_pair do |prefix, branches|
old_in_group = branches.select { |b| old.key?(b) }
next if old_in_group.empty?
old_in_group = old_in_group.sort { |a, b| old[a] <=> old[b] }
puts prefix
puts '-' * prefix.length
old_in_group.each do |b|
puts "\t" + b.display(40) + "\t" + time_ago_in_words(old[b])
end
puts
end
unless options[:force]
return unless prompt("Delete all #{old.size} branches above?", true)
end
old.each do |branch, timestamp|
branch.delete
end
end
private
def prompt(p, yes_no=false)
puts #newline for newline's sake!
loop do
print p, ' '
line = STDIN.readline.strip
if yes_no
return true if line =~ YES
return false if line =~ NO
else
return line
end
end
end
end
end
Fix help presentation and overly-aggressive regexp escaping
require "action_view"
module RightDevelop::Commands
class Git
include ActionView::Helpers::DateHelper
NAME_SPLIT_CHARS = /-|_|\//
YES = /(ye?s?)/i
NO = /(no?)/i
TASKS = %w(prune)
# Parse command-line options and create a Command object
def self.create
task_list = TASKS.map { |c| " * #{c}" }.join("\n")
options = Trollop.options do
banner <<-EOS
The 'git' command automates various repository management tasks. All tasks
accept the same options, although not every option applies to every command.
Usage:
right_develop git <task> [options]
Where <task> is one of:
#{task_list}
And [options] are selected from:
EOS
opt :age, "Minimum age to consider", :default => "3.months"
opt :only, "Limit to branches matching this prefix", :type=>:string
opt :except, "Ignore branches matching this prefix", :type=>:string, :default=>"release|v?[0-9.]+"
opt :local, "Limit to local branches"
opt :remote, "Limit to remote branches"
opt :merged, "Limit to branches that are fully merged into the named branch", :type=>:string
stop_on TASKS
end
task = ARGV.shift
case task
when "prune"
git = RightDevelop::Git::Repository.new(Dir.pwd)
self.new(git, :prune, options)
else
Trollop.die "unknown task #{task}"
end
end
# @option options [String] :age Ignore branches newer than this time-ago-in-words e.g. "3 months"; default unit is months
# @option options [String] :except Ignore branches matching this regular expression
# @option options [String] :only Consider only branches matching this regular expression
# @option options [true|false] :local Consider local branches
# @option options [true|false] :remote Consider remote branches
# @option options [String] :merged Consider only branches that are fully merged into this branch (e.g. master)
def initialize(repo, task, options)
# Post-process "age" option; transform from natural-language expression into a timestamp.
if (age = options.delete(:age))
age = age.gsub(/\s+/, ".")
if age =~ /^[0-9]+\.?(hours|days|weeks|months|years)$/
age = eval(age).ago
elsif age =~ /^[0-9]+$/
age = age.to_i.months.ago
else
raise ArgumentError, "Can't parse age of '#{age}'"
end
options[:age] = age
end
# Post-process "except" option; transform into a Regexp.
if (except = options.delete(:except))
except = Regexp.new("^(origin/)?(#{except})")
options[:except] = except
end
# Post-process "only" option; transform into a Regexp.
if (only = options.delete(:only))
only = Regexp.new("^(origin/)?(#{only})")
options[:only] = only
end
@git = repo
@task = task
@options = options
end
# Run the task that was specified when this object was instantiated. This
# method does no work; it just delegates to a task method.
def run
case @task
when :prune
prune(@options)
else
raise StateError, "Invalid @task; check Git.create!"
end
end
protected
# Prune dead branches from the repository.
#
# @option options [Time] :age Ignore branches whose HEAD commit is newer than this timestamp
# @option options [Regexp] :except Ignore branches matching this pattern
# @option options [Regexp] :only Consider only branches matching this pattern
# @option options [true|false] :local Consider local branches
# @option options [true|false] :remote Consider remote branches
# @option options [String] :merged Consider only branches that are fully merged into this branch (e.g. master)
def prune(options={})
branches = @git.branches
#Filter by name prefix
branches = branches.select { |x| x =~ options[:only] } if options[:only]
branches = branches.reject { |x| x =~ options[:except] } if options[:except]
#Filter by location (local/remote)
if options[:local] && !options[:remote]
branches = branches.local
elsif options[:remote] && !options[:local]
branches = branches.remote
elsif options[:remote] && options[:local]
raise ArgumentError, "Cannot specify both --local and --remote!"
end
#Filter by merge status
if options[:merged]
branches = branches.merged(options[:merged])
end
old = {}
branches.each do |branch|
latest = @git.log(branch, :tail=>1).first
timestamp = latest.timestamp
if timestamp < options[:age] &&
old[branch] = timestamp
end
end
if old.empty?
STDERR.puts "No branches older than #{time_ago_in_words(options[:age])} found; do you need to specify --remote?"
exit -2
end
all_by_prefix = branches.group_by { |b| b.name.split(NAME_SPLIT_CHARS).first }
all_by_prefix.each_pair do |prefix, branches|
old_in_group = branches.select { |b| old.key?(b) }
next if old_in_group.empty?
old_in_group = old_in_group.sort { |a, b| old[a] <=> old[b] }
puts prefix
puts '-' * prefix.length
old_in_group.each do |b|
puts "\t" + b.display(40) + "\t" + time_ago_in_words(old[b])
end
puts
end
unless options[:force]
return unless prompt("Delete all #{old.size} branches above?", true)
end
old.each do |branch, timestamp|
branch.delete
end
end
private
def prompt(p, yes_no=false)
puts #newline for newline's sake!
loop do
print p, ' '
line = STDIN.readline.strip
if yes_no
return true if line =~ YES
return false if line =~ NO
else
return line
end
end
end
end
end |
module Roadworker
class Route53Wrapper
RRSET_ATTRS = [
:set_identifier,
:weight,
:ttl,
:resource_records,
:dns_name,
:region,
:geo_location,
:failover,
:health_check,
]
RRSET_ATTRS_WITH_TYPE = [:type] + RRSET_ATTRS
def initialize(options)
@options = options
end
def export
Exporter.export(@options)
end
def hosted_zones
HostedzoneCollectionWrapper.new(@options.route53.list_hosted_zones, @options)
end
class HostedzoneCollectionWrapper
include Roadworker::Log
def initialize(hosted_zones_response, options)
@hosted_zones_response = hosted_zones_response
@options = options
end
def each
Collection.batch(@hosted_zones_response, :hosted_zones) do |zone|
resp = @options.route53.get_hosted_zone(id: zone.id)
yield(HostedzoneWrapper.new(resp.hosted_zone, resp.vp_cs, @options))
end
end
def create(name, opts = {})
if vpc = opts[:vpc]
vpcs = [vpc]
else
vpcs = []
opts.delete(:vpc)
end
logmsg = 'Create Hostedzone'
logmsg << " #{vpc.inspect}"
log(:info, logmsg, :cyan, name)
if @options.dry_run
opts.delete(:vpc)
zone = OpenStruct.new({:name => name, :rrsets => [], :vpcs => vpcs}.merge(opts))
else
params = {
:name => name,
:caller_reference => "roadworker #{Roadworker::VERSION} #{UUID.new.generate}",
}
if vpc
params[:vpc] = vpc
end
zone = @options.route53.create_hosted_zone(params).hosted_zone
@options.hosted_zone_name = name
@options.updated = true
end
HostedzoneWrapper.new(zone, vpcs, @options)
end
end # HostedzoneCollection
class HostedzoneWrapper
include Roadworker::Log
def initialize(hosted_zone, vpcs, options)
@hosted_zone = hosted_zone
@vpcs = vpcs
@options = options
end
attr_reader :vpcs
def resource_record_sets
ResourceRecordSetCollectionWrapper.new(@hosted_zone, @options)
end
alias rrsets resource_record_sets
def delete
if @options.force
log(:info, 'Delete Hostedzone', :red, @hosted_zone.name)
self.rrsets.each do |record|
record.delete
end
unless @options.dry_run
@options.route53.delete_hosted_zone(id: @hosted_zone.id)
@options.updated = true
end
else
log(:info, 'Undefined Hostedzone (pass `--force` if you want to remove)', :yellow, @hosted_zone.name)
end
end
def associate_vpc(vpc)
log(:info, "Associate #{vpc.inspect}", :green, @hosted_zone.name)
unless @options.dry_run
@options.route53.associate_vpc_with_hosted_zone(
hosted_zone_id: @hosted_zone.id,
vpc: vpc,
)
end
end
def disassociate_vpc(vpc)
log(:info, "Disassociate #{vpc.inspect}", :red, @hosted_zone.name)
unless @options.dry_run
@options.route53.disassociate_vpc_from_hosted_zone(
hosted_zone_id: @hosted_zone.id,
vpc: vpc,
)
end
end
private
def method_missing(method_name, *args)
@hosted_zone.send(method_name, *args)
end
end # HostedzoneWrapper
class ResourceRecordSetCollectionWrapper
include Roadworker::Log
def initialize(hosted_zone, options)
@hosted_zone = hosted_zone
@options = options
end
def each
if @hosted_zone.id
Collection.batch(@options.route53.list_resource_record_sets(hosted_zone_id: @hosted_zone.id), :resource_record_sets) do |record|
yield(ResourceRecordSetWrapper.new(record, @hosted_zone, @options))
end
end
end
def create(name, type, expected_record)
log(:info, 'Create ResourceRecordSet', :cyan) do
log_id = [name, type].join(' ')
rrset_setid = expected_record.set_identifier
rrset_setid ? (log_id + " (#{rrset_setid})") : log_id
end
if @options.dry_run
record = expected_record
else
resource_record_set_params = {
name: name,
type: type,
}
Route53Wrapper::RRSET_ATTRS.each do |attribute|
value = expected_record.send(attribute)
next unless value
case attribute
when :dns_name
attribute = :alias_target
dns_name, dns_name_opts = value
value = Aws::Route53.dns_name_to_alias_target(dns_name, dns_name_opts, @hosted_zone.id, @hosted_zone.name || @options.hosted_zone_name)
when :health_check
attribute = :health_check_id
value = @options.health_checks.find_or_create(value)
end
resource_record_set_params[attribute] = value
end
@options.route53.change_resource_record_sets(
hosted_zone_id: @hosted_zone.id,
change_batch: {
changes: [
{
action: 'CREATE',
resource_record_set: resource_record_set_params,
},
],
},
)
@options.updated = true
end
ResourceRecordSetWrapper.new(expected_record, @hosted_zone, @options)
end
end # ResourceRecordSetCollectionWrapper
class ResourceRecordSetWrapper
include Roadworker::Log
def initialize(resource_record_set, hosted_zone, options)
@resource_record_set = resource_record_set
@hosted_zone = hosted_zone
@options = options
end
def eql?(expected_record)
Route53Wrapper::RRSET_ATTRS_WITH_TYPE.all? do |attribute|
expected = expected_record.public_send(attribute)
expected = expected.sort_by {|i| i.to_s } if expected.kind_of?(Array)
expected = nil if expected.kind_of?(Array) && expected.empty?
actual = self.public_send(attribute)
actual = actual.sort_by {|i| i.to_s } if actual.kind_of?(Array)
actual = nil if actual.kind_of?(Array) && actual.empty?
if attribute == :geo_location and actual
actual = Hash[actual.each_pair.select {|k, v| not v.nil? }]
end
if !expected and !actual
true
elsif expected and actual
case attribute
when :health_check
if actual[:alarm_identifier]
actual[:alarm_identifier] = actual[:alarm_identifier].to_h
end
when :dns_name
expected[0] = expected[0].downcase.sub(/\.\z/, '')
actual[0] = actual[0].downcase.sub(/\.\z/, '')
if expected[0] !~ /\Adualstack\./i and actual[0] =~ /\Adualstack\./i
log(:warn, "`dualstack` prefix is used in the actual DNS name", :yellow) do
log_id = [self.name, self.type].join(' ')
rrset_setid = self.set_identifier
rrset_setid ? (log_id + " (#{rrset_setid})") : log_id
end
actual[0].sub!(/\Adualstack\./i, '')
end
end
(expected == actual)
else
false
end
end
end
def update(expected_record)
log_id_proc = proc do
log_id = [self.name, self.type].join(' ')
rrset_setid = self.set_identifier
rrset_setid ? (log_id + " (#{rrset_setid})") : log_id
end
log(:info, 'Update ResourceRecordSet', :green, &log_id_proc)
resource_record_set_prev = @resource_record_set.dup
Route53Wrapper::RRSET_ATTRS_WITH_TYPE.each do |attribute|
expected = expected_record.send(attribute)
expected = expected.sort_by {|i| i.to_s } if expected.kind_of?(Array)
expected = nil if expected.kind_of?(Array) && expected.empty?
actual = self.send(attribute)
actual = actual.sort_by {|i| i.to_s } if actual.kind_of?(Array)
actual = nil if actual.kind_of?(Array) && actual.empty?
# XXX: Fix for diff
if attribute == :health_check and actual
if (actual[:child_health_checks] || []).empty?
actual[:child_health_checks] = []
end
if (actual[:regions] || []).empty?
actual[:regions] = []
end
end
if (expected and !actual) or (!expected and actual)
log(:info, " #{attribute}:\n".green + Roadworker::Utils.diff(actual, expected, :color => @options.color, :indent => ' '), false)
unless @options.dry_run
self.send(:"#{attribute}=", expected)
end
elsif expected and actual
if expected != actual
log(:info, " #{attribute}:\n".green + Roadworker::Utils.diff(actual, expected, :color => @options.color, :indent => ' '), false)
unless @options.dry_run
self.send(:"#{attribute}=", expected)
end
end
end
end
unless @options.dry_run
@options.route53.change_resource_record_sets(
hosted_zone_id: @hosted_zone.id,
change_batch: {
changes: [
{
action: 'DELETE',
resource_record_set: resource_record_set_prev,
},
{
action: 'CREATE',
resource_record_set: @resource_record_set,
},
],
},
)
@options.updated = true
end
end
def delete
if self.type =~ /\A(SOA|NS)\z/i
hz_name = (@hosted_zone.name || @options.hosted_zone_name).downcase.sub(/\.\z/, '')
rrs_name = @resource_record_set.name.downcase.sub(/\.\z/, '')
return if hz_name == rrs_name
end
log(:info, 'Delete ResourceRecordSet', :red) do
log_id = [self.name, self.type].join(' ')
rrset_setid = self.set_identifier
rrset_setid ? (log_id + " (#{rrset_setid})") : log_id
end
unless @options.dry_run
@options.route53.change_resource_record_sets(
hosted_zone_id: @hosted_zone.id,
change_batch: {
changes: [
{
action: 'DELETE',
resource_record_set: @resource_record_set,
},
],
},
)
@options.updated = true
end
end
def name
value = @resource_record_set.name
value ? value.gsub("\\052", '*') : value
end
def dns_name
alias_target = @resource_record_set.alias_target || {}
dns_name = alias_target[:dns_name]
if dns_name
[
dns_name,
Aws::Route53.normalize_dns_name_options(alias_target),
]
else
nil
end
end
def dns_name=(value)
if value
dns_name, dns_name_opts = value
@resource_record_set.alias_target = Aws::Route53.dns_name_to_alias_target(dns_name, dns_name_opts, @hosted_zone.id, @hosted_zone.name || @options.hosted_zone_name)
else
@resource_record_set.alias_target = nil
end
end
def health_check
@options.health_checks[@resource_record_set.health_check_id]
end
def health_check=(check)
health_check_id = check ? @options.health_checks.find_or_create(check) : nil
@resource_record_set.health_check_id = health_check_id
end
private
def method_missing(method_name, *args)
@resource_record_set.send(method_name, *args)
end
end # ResourceRecordSetWrapper
end # Route53Wrapper
end # Roadworker
Sort resource_records by value
After aws-sdk-core v3.44.1, Aws::Route53::Types::ResourceRecord#to_s returns filtered string
like "{:value=>\"[FILTERED]\"}" (cf. https://github.com/aws/aws-sdk-ruby/pull/1941).
To keep backward compatibility, sort by the value of resource record explicitly.
module Roadworker
class Route53Wrapper
RRSET_ATTRS = [
:set_identifier,
:weight,
:ttl,
:resource_records,
:dns_name,
:region,
:geo_location,
:failover,
:health_check,
]
RRSET_ATTRS_WITH_TYPE = [:type] + RRSET_ATTRS
def initialize(options)
@options = options
end
def export
Exporter.export(@options)
end
def hosted_zones
HostedzoneCollectionWrapper.new(@options.route53.list_hosted_zones, @options)
end
class HostedzoneCollectionWrapper
include Roadworker::Log
def initialize(hosted_zones_response, options)
@hosted_zones_response = hosted_zones_response
@options = options
end
def each
Collection.batch(@hosted_zones_response, :hosted_zones) do |zone|
resp = @options.route53.get_hosted_zone(id: zone.id)
yield(HostedzoneWrapper.new(resp.hosted_zone, resp.vp_cs, @options))
end
end
def create(name, opts = {})
if vpc = opts[:vpc]
vpcs = [vpc]
else
vpcs = []
opts.delete(:vpc)
end
logmsg = 'Create Hostedzone'
logmsg << " #{vpc.inspect}"
log(:info, logmsg, :cyan, name)
if @options.dry_run
opts.delete(:vpc)
zone = OpenStruct.new({:name => name, :rrsets => [], :vpcs => vpcs}.merge(opts))
else
params = {
:name => name,
:caller_reference => "roadworker #{Roadworker::VERSION} #{UUID.new.generate}",
}
if vpc
params[:vpc] = vpc
end
zone = @options.route53.create_hosted_zone(params).hosted_zone
@options.hosted_zone_name = name
@options.updated = true
end
HostedzoneWrapper.new(zone, vpcs, @options)
end
end # HostedzoneCollection
class HostedzoneWrapper
include Roadworker::Log
def initialize(hosted_zone, vpcs, options)
@hosted_zone = hosted_zone
@vpcs = vpcs
@options = options
end
attr_reader :vpcs
def resource_record_sets
ResourceRecordSetCollectionWrapper.new(@hosted_zone, @options)
end
alias rrsets resource_record_sets
def delete
if @options.force
log(:info, 'Delete Hostedzone', :red, @hosted_zone.name)
self.rrsets.each do |record|
record.delete
end
unless @options.dry_run
@options.route53.delete_hosted_zone(id: @hosted_zone.id)
@options.updated = true
end
else
log(:info, 'Undefined Hostedzone (pass `--force` if you want to remove)', :yellow, @hosted_zone.name)
end
end
def associate_vpc(vpc)
log(:info, "Associate #{vpc.inspect}", :green, @hosted_zone.name)
unless @options.dry_run
@options.route53.associate_vpc_with_hosted_zone(
hosted_zone_id: @hosted_zone.id,
vpc: vpc,
)
end
end
def disassociate_vpc(vpc)
log(:info, "Disassociate #{vpc.inspect}", :red, @hosted_zone.name)
unless @options.dry_run
@options.route53.disassociate_vpc_from_hosted_zone(
hosted_zone_id: @hosted_zone.id,
vpc: vpc,
)
end
end
private
def method_missing(method_name, *args)
@hosted_zone.send(method_name, *args)
end
end # HostedzoneWrapper
class ResourceRecordSetCollectionWrapper
include Roadworker::Log
def initialize(hosted_zone, options)
@hosted_zone = hosted_zone
@options = options
end
def each
if @hosted_zone.id
Collection.batch(@options.route53.list_resource_record_sets(hosted_zone_id: @hosted_zone.id), :resource_record_sets) do |record|
yield(ResourceRecordSetWrapper.new(record, @hosted_zone, @options))
end
end
end
def create(name, type, expected_record)
log(:info, 'Create ResourceRecordSet', :cyan) do
log_id = [name, type].join(' ')
rrset_setid = expected_record.set_identifier
rrset_setid ? (log_id + " (#{rrset_setid})") : log_id
end
if @options.dry_run
record = expected_record
else
resource_record_set_params = {
name: name,
type: type,
}
Route53Wrapper::RRSET_ATTRS.each do |attribute|
value = expected_record.send(attribute)
next unless value
case attribute
when :dns_name
attribute = :alias_target
dns_name, dns_name_opts = value
value = Aws::Route53.dns_name_to_alias_target(dns_name, dns_name_opts, @hosted_zone.id, @hosted_zone.name || @options.hosted_zone_name)
when :health_check
attribute = :health_check_id
value = @options.health_checks.find_or_create(value)
end
resource_record_set_params[attribute] = value
end
@options.route53.change_resource_record_sets(
hosted_zone_id: @hosted_zone.id,
change_batch: {
changes: [
{
action: 'CREATE',
resource_record_set: resource_record_set_params,
},
],
},
)
@options.updated = true
end
ResourceRecordSetWrapper.new(expected_record, @hosted_zone, @options)
end
end # ResourceRecordSetCollectionWrapper
class ResourceRecordSetWrapper
include Roadworker::Log
def initialize(resource_record_set, hosted_zone, options)
@resource_record_set = resource_record_set
@hosted_zone = hosted_zone
@options = options
end
def eql?(expected_record)
Route53Wrapper::RRSET_ATTRS_WITH_TYPE.all? do |attribute|
expected = expected_record.public_send(attribute)
expected = sort_rrset_values(attribute, expected) if expected.kind_of?(Array)
expected = nil if expected.kind_of?(Array) && expected.empty?
actual = self.public_send(attribute)
actual = sort_rrset_values(attribute, actual) if actual.kind_of?(Array)
actual = nil if actual.kind_of?(Array) && actual.empty?
if attribute == :geo_location and actual
actual = Hash[actual.each_pair.select {|k, v| not v.nil? }]
end
if !expected and !actual
true
elsif expected and actual
case attribute
when :health_check
if actual[:alarm_identifier]
actual[:alarm_identifier] = actual[:alarm_identifier].to_h
end
when :dns_name
expected[0] = expected[0].downcase.sub(/\.\z/, '')
actual[0] = actual[0].downcase.sub(/\.\z/, '')
if expected[0] !~ /\Adualstack\./i and actual[0] =~ /\Adualstack\./i
log(:warn, "`dualstack` prefix is used in the actual DNS name", :yellow) do
log_id = [self.name, self.type].join(' ')
rrset_setid = self.set_identifier
rrset_setid ? (log_id + " (#{rrset_setid})") : log_id
end
actual[0].sub!(/\Adualstack\./i, '')
end
end
(expected == actual)
else
false
end
end
end
def update(expected_record)
log_id_proc = proc do
log_id = [self.name, self.type].join(' ')
rrset_setid = self.set_identifier
rrset_setid ? (log_id + " (#{rrset_setid})") : log_id
end
log(:info, 'Update ResourceRecordSet', :green, &log_id_proc)
resource_record_set_prev = @resource_record_set.dup
Route53Wrapper::RRSET_ATTRS_WITH_TYPE.each do |attribute|
expected = expected_record.send(attribute)
expected = expected.sort_by {|i| i.to_s } if expected.kind_of?(Array)
expected = nil if expected.kind_of?(Array) && expected.empty?
actual = self.send(attribute)
actual = actual.sort_by {|i| i.to_s } if actual.kind_of?(Array)
actual = nil if actual.kind_of?(Array) && actual.empty?
# XXX: Fix for diff
if attribute == :health_check and actual
if (actual[:child_health_checks] || []).empty?
actual[:child_health_checks] = []
end
if (actual[:regions] || []).empty?
actual[:regions] = []
end
end
if (expected and !actual) or (!expected and actual)
log(:info, " #{attribute}:\n".green + Roadworker::Utils.diff(actual, expected, :color => @options.color, :indent => ' '), false)
unless @options.dry_run
self.send(:"#{attribute}=", expected)
end
elsif expected and actual
if expected != actual
log(:info, " #{attribute}:\n".green + Roadworker::Utils.diff(actual, expected, :color => @options.color, :indent => ' '), false)
unless @options.dry_run
self.send(:"#{attribute}=", expected)
end
end
end
end
unless @options.dry_run
@options.route53.change_resource_record_sets(
hosted_zone_id: @hosted_zone.id,
change_batch: {
changes: [
{
action: 'DELETE',
resource_record_set: resource_record_set_prev,
},
{
action: 'CREATE',
resource_record_set: @resource_record_set,
},
],
},
)
@options.updated = true
end
end
def delete
if self.type =~ /\A(SOA|NS)\z/i
hz_name = (@hosted_zone.name || @options.hosted_zone_name).downcase.sub(/\.\z/, '')
rrs_name = @resource_record_set.name.downcase.sub(/\.\z/, '')
return if hz_name == rrs_name
end
log(:info, 'Delete ResourceRecordSet', :red) do
log_id = [self.name, self.type].join(' ')
rrset_setid = self.set_identifier
rrset_setid ? (log_id + " (#{rrset_setid})") : log_id
end
unless @options.dry_run
@options.route53.change_resource_record_sets(
hosted_zone_id: @hosted_zone.id,
change_batch: {
changes: [
{
action: 'DELETE',
resource_record_set: @resource_record_set,
},
],
},
)
@options.updated = true
end
end
def name
value = @resource_record_set.name
value ? value.gsub("\\052", '*') : value
end
def dns_name
alias_target = @resource_record_set.alias_target || {}
dns_name = alias_target[:dns_name]
if dns_name
[
dns_name,
Aws::Route53.normalize_dns_name_options(alias_target),
]
else
nil
end
end
def dns_name=(value)
if value
dns_name, dns_name_opts = value
@resource_record_set.alias_target = Aws::Route53.dns_name_to_alias_target(dns_name, dns_name_opts, @hosted_zone.id, @hosted_zone.name || @options.hosted_zone_name)
else
@resource_record_set.alias_target = nil
end
end
def health_check
@options.health_checks[@resource_record_set.health_check_id]
end
def health_check=(check)
health_check_id = check ? @options.health_checks.find_or_create(check) : nil
@resource_record_set.health_check_id = health_check_id
end
private
def sort_rrset_values(attribute, values)
sort_lambda =
case attribute
when :resource_records
# After aws-sdk-core v3.44.1, Aws::Route53::Types::ResourceRecord#to_s returns filtered string
# like "{:value=>\"[FILTERED]\"}" (cf. https://github.com/aws/aws-sdk-ruby/pull/1941).
# To keep backward compatibility, sort by the value of resource record explicitly.
lambda { |i| i[:value] }
else
lambda { |i| i.to_s }
end
values.sort_by(&sort_lambda)
end
def method_missing(method_name, *args)
@resource_record_set.send(method_name, *args)
end
end # ResourceRecordSetWrapper
end # Route53Wrapper
end # Roadworker
|
# frozen_string_literal: true
module RussianInflect
UnknownError = Class.new(StandardError)
class UnknownCaseException < UnknownError
def initialize(gcase)
super "Unknown grammatical case: `#{gcase}'."
end
end
class UnknownRuleException < UnknownError
def initialize(word)
super "Can't find rule for `#{word}'."
end
end
class Dictionary
def initialize(rules)
@rules = rules
end
def inflect(word, gcase)
type = RussianInflect::Detector.new(word).word_type
apply(word, gcase, rule_for(word, type)) # Найти правило и применить к слову с учетом падежа
rescue UnknownRuleException
word
end
private
# Найти подходящее правило в исключениях или суффиксах
def rule_for(word, type)
if scoped_rules = @rules[type][:exceptions]
ex = find(word, scoped_rules, true)
return ex if ex
end
scoped_rules = @rules[type][:suffixes]
find(word, scoped_rules) || raise(UnknownRuleException, word)
end
def find(word, scoped_rules, match_whole_word = false)
scoped_rules.detect { |rule| match?(word, rule, match_whole_word) }
end
# Проверить правило
def match?(word, rule, match_whole_word = false)
# word = UnicodeUtils.downcase(word)
rule[:test].any? do |chars|
test = match_whole_word ? word : word.slice([word.length - chars.length, 0].max..-1)
test == chars
end
end
# Применить правило
def apply(word, gcase, rule)
modificator = modificator_for(gcase, rule)
result = word.dup
modificator.each_char do |char|
case char
when '.' then nil
when '-' then result.slice!(-1)
else result << char
end
end
result
end
# Получить модификатор из указанного правиля для указанного склонения
def modificator_for(gcase, rule)
case gcase.to_sym
when NOMINATIVE then '.'
when GENITIVE then rule[:mods][0]
when DATIVE then rule[:mods][1]
when ACCUSATIVE then rule[:mods][2]
when INSTRUMENTAL then rule[:mods][3]
when PREPOSITIONAL then rule[:mods][4]
else raise UnknownCaseException, gcase
end
end
end
end
Remove unnecessary conditions and variable assignments from rule_for method
# frozen_string_literal: true
module RussianInflect
UnknownError = Class.new(StandardError)
class UnknownCaseException < UnknownError
def initialize(gcase)
super "Unknown grammatical case: `#{gcase}'."
end
end
class UnknownRuleException < UnknownError
def initialize(word)
super "Can't find rule for `#{word}'."
end
end
class Dictionary
def initialize(rules)
@rules = rules
end
def inflect(word, gcase)
type = RussianInflect::Detector.new(word).word_type
apply(word, gcase, rule_for(word, type)) # Найти правило и применить к слову с учетом падежа
rescue UnknownRuleException
word
end
private
# Найти подходящее правило в исключениях или суффиксах
def rule_for(word, type)
find(word, @rules[type][:exceptions], true) ||
find(word, @rules[type][:suffixes]) ||
raise(UnknownRuleException, word)
end
def find(word, scoped_rules, match_whole_word = false)
return if scoped_rules.nil?
scoped_rules.detect { |rule| match?(word, rule, match_whole_word) }
end
# Проверить правило
def match?(word, rule, match_whole_word = false)
# word = UnicodeUtils.downcase(word)
rule[:test].any? do |chars|
test = match_whole_word ? word : word.slice([word.length - chars.length, 0].max..-1)
test == chars
end
end
# Применить правило
def apply(word, gcase, rule)
modificator = modificator_for(gcase, rule)
result = word.dup
modificator.each_char do |char|
case char
when '.' then nil
when '-' then result.slice!(-1)
else result << char
end
end
result
end
# Получить модификатор из указанного правиля для указанного склонения
def modificator_for(gcase, rule)
case gcase.to_sym
when NOMINATIVE then '.'
when GENITIVE then rule[:mods][0]
when DATIVE then rule[:mods][1]
when ACCUSATIVE then rule[:mods][2]
when INSTRUMENTAL then rule[:mods][3]
when PREPOSITIONAL then rule[:mods][4]
else raise UnknownCaseException, gcase
end
end
end
end
|
require 'builder'
module SamlIdp
class SignatureBuilder
attr_accessor :signed_info_builder
def initialize(signed_info_builder)
self.signed_info_builder = signed_info_builder
end
def raw
@raw ||= builder.tag! "ds:Signature", "xmlns:ds" => "http://www.w3.org/2000/09/xmldsig#" do |signature|
signature << signed_info
signature.tag! "ds:SignatureValue", signature_value
signature.KeyInfo xmlns: "http://www.w3.org/2000/09/xmldsig#" do |key_info|
key_info.tag! "ds:X509Data" do |x509|
x509.tag! "ds:X509Certificate", x509_certificate
end
end
end
end
def x509_certificate
SamlIdp.config.x509_certificate
end
private :x509_certificate
def signed_info
signed_info_builder.raw
end
private :signed_info
def signature_value
signed_info_builder.signed
end
private :signature_value
def builder
@builder ||= Builder::XmlMarkup.new
end
private :builder
end
end
don't use memoized builder, more safe this way
require 'builder'
module SamlIdp
class SignatureBuilder
attr_accessor :signed_info_builder
def initialize(signed_info_builder)
self.signed_info_builder = signed_info_builder
end
def raw
builder = Builder::XmlMarkup.new
builder.tag! "ds:Signature", "xmlns:ds" => "http://www.w3.org/2000/09/xmldsig#" do |signature|
signature << signed_info
signature.tag! "ds:SignatureValue", signature_value
signature.KeyInfo xmlns: "http://www.w3.org/2000/09/xmldsig#" do |key_info|
key_info.tag! "ds:X509Data" do |x509|
x509.tag! "ds:X509Certificate", x509_certificate
end
end
end
end
def x509_certificate
SamlIdp.config.x509_certificate
end
private :x509_certificate
def signed_info
signed_info_builder.raw
end
private :signed_info
def signature_value
signed_info_builder.signed
end
private :signature_value
end
end
|
#
# ServerEngine
#
# Copyright (C) 2012-2013 Sadayuki Furuhashi
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
module ServerEngine
require 'logger'
class DaemonLogger < Logger
def initialize(logdev, config={})
rotate_age = config[:log_rotate_age] || 5
rotate_size = config[:log_rotate_size] || 1048576
@file_dev = MultiprocessFileLogDevice.new(nil,
shift_age: rotate_age, shift_size: rotate_size)
super(nil)
self.level = config[:log_level] || 'debug'
self.logdev = logdev
end
def logdev=(logdev)
# overwrites Logger's @logdev variable
if logdev.respond_to?(:write) and logdev.respond_to?(:close)
# IO
@logdev = logdev
@logdev.sync = true if @logdev.respond_to?(:sync=)
@file_dev.path = nil
else
# path string
@file_dev.path = logdev
@logdev = @file_dev
end
logdev
end
# override add method
def add(severity, message = nil, progname = nil, &block)
if severity < @level
return true
end
if message.nil?
if block_given?
message = yield
else
message = progname
progname = nil
end
end
progname ||= @progname
self << format_message(SEVERITY_FORMATS_[severity+1], Time.now, progname, message)
true
end
module Severity
include Logger::Severity
TRACE = -1
end
include Severity
SEVERITY_FORMATS_ = %w(TRACE DEBUG INFO WARN ERROR FATAL ANY)
def level=(expr)
case expr.to_s
when 'fatal', FATAL.to_s
e = FATAL
when 'error', ERROR.to_s
e = ERROR
when 'warn', WARN.to_s
e = WARN
when 'info', INFO.to_s
e = INFO
when 'debug', DEBUG.to_s
e = DEBUG
when 'trace', TRACE.to_s
e = TRACE
else
raise ArgumentError, "invalid log level: #{expr}"
end
super(e)
end
def trace?; @level <= TRACE; end
def reopen!
@file_dev.reopen!
nil
end
def reopen
begin
reopen!
return true
rescue
# TODO log?
return false
end
end
def close
@file_dev.close
nil
end
class MultiprocessFileLogDevice
def initialize(path, opts={})
@shift_age = opts[:shift_age] || 7
@shift_size = opts[:shift_size] || 1024*1024
@mutex = Mutex.new
self.path = path
end
def write(data)
# it's hard to remove this synchronize because IO#write raises
# Errno::ENOENT if IO#reopen is running concurrently.
@mutex.synchronize do
unless @file
return nil
end
log_rotate_or_reopen
@file.write(data)
end
rescue Exception => e
warn "log writing failed: #{e}"
end
def path=(path)
@mutex.synchronize do
old_file = @file
file = open_logfile(path)
begin
@file = file
@path = path
file = old_file
ensure
file.close if file
end
end
return path
end
def close
@mutex.synchronize do
@file.close
@file = nil
end
nil
end
attr_reader :path
def reopen!
@mutex.synchronize do
if @file
@file.reopen(@path, 'a')
@file.sync = true
end
end
true
end
# for compatibility with Logger::LogDevice
def dev
@file
end
# for compatibility with Logger::LogDevice
def filename
@path
end
private
def open_logfile(path)
return nil unless path
file = File.open(path, 'a')
file.sync = true
return file
end
def log_rotate_or_reopen
stat = @file.stat
if stat.size <= @shift_size
return
end
# inter-process locking
retry_limit = 8
retry_sleep = 0.1
begin
# 1) other process is log-rotating now
# 2) other process log rotated
# 3) no active processes
lock = File.open(@path, File::WRONLY | File::APPEND)
begin
lock.flock(File::LOCK_EX)
ino = lock.stat.ino
if ino == File.stat(@path).ino
# 3)
log_rotate
else
reopen!
end
rescue
lock.close
end
rescue Errno::ENOENT => e
raise e if retry_limit <= 0
sleep retry_sleep
retry_limit -= 1
retry_sleep *= 2
retry
end
rescue => e
warn "log rotation inter-process lock failed: #{e}"
end
def log_rotate
(@shift_age-2).downto(0) do |i|
old_path = "#{@path}.#{i}"
shift_path = "#{@path}.#{i+1}"
if FileTest.exist?(old_path)
File.rename(old_path, shift_path)
end
end
File.rename(@path, "#{@path}.0")
@file.reopen(@path, 'a')
@file.sync = true
rescue => e
warn "log rotation failed: #{e}"
end
end
end
end
Fix multi process log rotation
#
# ServerEngine
#
# Copyright (C) 2012-2013 Sadayuki Furuhashi
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
module ServerEngine
require 'logger'
class DaemonLogger < Logger
def initialize(logdev, config={})
rotate_age = config[:log_rotate_age] || 5
rotate_size = config[:log_rotate_size] || 1048576
@file_dev = MultiprocessFileLogDevice.new(nil,
shift_age: rotate_age, shift_size: rotate_size)
super(nil)
self.level = config[:log_level] || 'debug'
self.logdev = logdev
end
def logdev=(logdev)
# overwrites Logger's @logdev variable
if logdev.respond_to?(:write) and logdev.respond_to?(:close)
# IO
@logdev = logdev
@logdev.sync = true if @logdev.respond_to?(:sync=)
@file_dev.path = nil
else
# path string
@file_dev.path = logdev
@logdev = @file_dev
end
logdev
end
# override add method
def add(severity, message = nil, progname = nil, &block)
if severity < @level
return true
end
if message.nil?
if block_given?
message = yield
else
message = progname
progname = nil
end
end
progname ||= @progname
self << format_message(SEVERITY_FORMATS_[severity+1], Time.now, progname, message)
true
end
module Severity
include Logger::Severity
TRACE = -1
end
include Severity
SEVERITY_FORMATS_ = %w(TRACE DEBUG INFO WARN ERROR FATAL ANY)
def level=(expr)
case expr.to_s
when 'fatal', FATAL.to_s
e = FATAL
when 'error', ERROR.to_s
e = ERROR
when 'warn', WARN.to_s
e = WARN
when 'info', INFO.to_s
e = INFO
when 'debug', DEBUG.to_s
e = DEBUG
when 'trace', TRACE.to_s
e = TRACE
else
raise ArgumentError, "invalid log level: #{expr}"
end
super(e)
end
def trace?; @level <= TRACE; end
def reopen!
@file_dev.reopen!
nil
end
def reopen
begin
reopen!
return true
rescue
# TODO log?
return false
end
end
def close
@file_dev.close
nil
end
class MultiprocessFileLogDevice
def initialize(path, opts={})
@shift_age = opts[:shift_age] || 7
@shift_size = opts[:shift_size] || 1024*1024
@mutex = Mutex.new
self.path = path
end
def write(data)
# it's hard to remove this synchronize because IO#write raises
# Errno::ENOENT if IO#reopen is running concurrently.
@mutex.synchronize do
unless @file
return nil
end
log_rotate_or_reopen
@file.write(data)
end
rescue Exception => e
warn "log writing failed: #{e}"
end
def path=(path)
@mutex.synchronize do
old_file = @file
file = open_logfile(path)
begin
@file = file
@path = path
file = old_file
ensure
file.close if file
end
end
return path
end
def close
@mutex.synchronize do
@file.close
@file = nil
end
nil
end
attr_reader :path
def reopen!
@mutex.synchronize do
if @file
@file.reopen(@path, 'a')
@file.sync = true
end
end
true
end
# for compatibility with Logger::LogDevice
def dev
@file
end
# for compatibility with Logger::LogDevice
def filename
@path
end
private
def open_logfile(path)
return nil unless path
file = File.open(path, 'a')
file.sync = true
return file
end
def log_rotate_or_reopen
stat = @file.stat
if stat.size <= @shift_size
return
end
# inter-process locking
retry_limit = 8
retry_sleep = 0.1
begin
# 1) other process is log-rotating now
# 2) other process log rotated
# 3) no active processes
lock = File.open(@path, File::WRONLY | File::APPEND)
begin
lock.flock(File::LOCK_EX)
ino = lock.stat.ino
if ino == File.stat(@path).ino and ino == stat.ino
# 3)
log_rotate
else
@file.reopen(@path, 'a')
@file.sync = true
end
ensure
lock.close
end
rescue Errno::ENOENT => e
raise e if retry_limit <= 0
sleep retry_sleep
retry_limit -= 1
retry_sleep *= 2
retry
end
rescue => e
warn "log rotation inter-process lock failed: #{e}"
end
def log_rotate
(@shift_age-2).downto(0) do |i|
old_path = "#{@path}.#{i}"
shift_path = "#{@path}.#{i+1}"
if FileTest.exist?(old_path)
File.rename(old_path, shift_path)
end
end
File.rename(@path, "#{@path}.0")
@file.reopen(@path, 'a')
@file.sync = true
rescue => e
warn "log rotation failed: #{e}"
end
end
end
end
|
module Serverspec
module Commands
class RedHat < Linux
def check_access_by_user file, user, access
# Redhat-specific
"runuser -s sh -c \"test -#{access} #{file}\" #{user}"
end
def check_enabled service
"chkconfig --list #{escape(service)} | grep 3:on"
end
def check_repository(repository)
"yum repolist | grep #{escape(repository)}"
end
def check_repository_enabled(repository)
"yum repolist | grep #{escape(repository)} | grep enabled"
end
def check_installed package
"rpm -q #{escape(package)}"
end
end
end
end
fix repository's commands
module Serverspec
module Commands
class RedHat < Linux
def check_access_by_user file, user, access
# Redhat-specific
"runuser -s sh -c \"test -#{access} #{file}\" #{user}"
end
def check_enabled service
"chkconfig --list #{escape(service)} | grep 3:on"
end
def check_repository(repository)
"yum repolist | grep ^#{escape(repository)}"
end
def check_repository_enabled(repository)
"yum repolist all | grep ^#{escape(repository)} | grep enabled"
end
def check_installed package
"rpm -q #{escape(package)}"
end
end
end
end
|
module SimplePagesRails
VERSION = '0.3.1'
end
bump version
module SimplePagesRails
VERSION = '0.4.0'
end
|
require 'mail'
require 'socket'
module SimpleBackup
module Utils
class Mailer
@@logger = Logger.instance
def initialize()
@to = []
@cc = []
@bcc = []
@hostname = Socket.gethostbyname(Socket.gethostname).first
end
def subject_prefix(prefix)
@subject_prefix = prefix
end
def from(from)
@from = from
end
def to(to)
@to << to
end
def cc(cc)
@cc << cc
end
def bcc(bcc)
@bcc << bcc
end
def send
@@logger.info "Setting sender to: #{@from}"
from = @from
@@logger.scope_start :info, "Adding recipients:"
to = @to
to.each do |mail|
@@logger.info "to: #{mail}"
end
cc = @cc
cc.each do |mail|
@@logger.info "cc: #{mail}"
end
bcc = @bcc
bcc.each do |mail|
@@logger.info "bcc: #{mail}"
end
@@logger.scope_end
@subject_prefix += '[FAILED]' if SimpleBackup.status == :failed
subject = "%s Backup %s for %s" % [@subject_prefix, TIMESTAMP, @hostname]
@@logger.debug "Subject: #{subject}"
body = get_body
mail = Mail.new do
from from
to to
cc cc
bcc bcc
subject subject.strip
body body
end
mail.delivery_method :sendmail
@@logger.debug "Setting delivery method to sendmail"
mail.deliver
@@logger.info "Notification sent"
end
private
def get_body
sources = ''
SimpleBackup::Sources.instance.each do |name, source|
sources += " - %s\n" % source.desc
end
body = <<MAIL
Hi,
Backup #{TIMESTAMP} was created!
Backup contains:
#{sources}
Disk usage after backup:
#{disk_usage}
Backup log:
------------
#{@@logger.buffer.join("\n")}
------------
Have a nice day,
SimpleBackup
--
Mail was send automatically
Do not respond!
MAIL
body
end
def disk_usage
content = "%16s %25s %12s %12s %12s %12s\n" % ['Mount', 'Filesystem', 'Size', 'Used', 'Available', 'Percent used']
usage = Utils::Disk::usage
usage[:mounts].each do |m|
percent_usage = (m[:percent] * 100).to_s
percent_usage = '(!!) ' + percent_usage if m[:high_usage_exceeded]
content += "%16s %25s %8s MiB %8s MiB %8s MiB %11s%%\n" % [m[:mount], m[:fs], m[:size], m[:used], m[:available], percent_usage]
end
content += "\nHigh usage treshold exceeded!\nMax usage is #{usage[:high_usage]} where treshold is set to #{Utils::Disk::high_usage_treshold}\n" if usage[:high_usage_exceeded]
content
end
end
end
end
Fix a typo in notification mail signature
require 'mail'
require 'socket'
module SimpleBackup
module Utils
class Mailer
@@logger = Logger.instance
def initialize()
@to = []
@cc = []
@bcc = []
@hostname = Socket.gethostbyname(Socket.gethostname).first
end
def subject_prefix(prefix)
@subject_prefix = prefix
end
def from(from)
@from = from
end
def to(to)
@to << to
end
def cc(cc)
@cc << cc
end
def bcc(bcc)
@bcc << bcc
end
def send
@@logger.info "Setting sender to: #{@from}"
from = @from
@@logger.scope_start :info, "Adding recipients:"
to = @to
to.each do |mail|
@@logger.info "to: #{mail}"
end
cc = @cc
cc.each do |mail|
@@logger.info "cc: #{mail}"
end
bcc = @bcc
bcc.each do |mail|
@@logger.info "bcc: #{mail}"
end
@@logger.scope_end
@subject_prefix += '[FAILED]' if SimpleBackup.status == :failed
subject = "%s Backup %s for %s" % [@subject_prefix, TIMESTAMP, @hostname]
@@logger.debug "Subject: #{subject}"
body = get_body
mail = Mail.new do
from from
to to
cc cc
bcc bcc
subject subject.strip
body body
end
mail.delivery_method :sendmail
@@logger.debug "Setting delivery method to sendmail"
mail.deliver
@@logger.info "Notification sent"
end
private
def get_body
sources = ''
SimpleBackup::Sources.instance.each do |name, source|
sources += " - %s\n" % source.desc
end
body = <<MAIL
Hi,
Backup #{TIMESTAMP} was created!
Backup contains:
#{sources}
Disk usage after backup:
#{disk_usage}
Backup log:
------------
#{@@logger.buffer.join("\n")}
------------
Have a nice day,
SimpleBackup
--
Mail was sent automatically
Do not respond!
MAIL
body
end
def disk_usage
content = "%16s %25s %12s %12s %12s %12s\n" % ['Mount', 'Filesystem', 'Size', 'Used', 'Available', 'Percent used']
usage = Utils::Disk::usage
usage[:mounts].each do |m|
percent_usage = (m[:percent] * 100).to_s
percent_usage = '(!!) ' + percent_usage if m[:high_usage_exceeded]
content += "%16s %25s %8s MiB %8s MiB %8s MiB %11s%%\n" % [m[:mount], m[:fs], m[:size], m[:used], m[:available], percent_usage]
end
content += "\nHigh usage treshold exceeded!\nMax usage is #{usage[:high_usage]} where treshold is set to #{Utils::Disk::high_usage_treshold}\n" if usage[:high_usage_exceeded]
content
end
end
end
end
|
module Sorge
class Engine
class TaskOperator
Event = Struct.new(:task_name, :time)
TaskResult = Struct.new(:successed?, :state, :emitted)
TriggerContext = Struct.new(:state, :jobflow_status)
def initialize(engine, task_name)
@engine = engine
@task = @engine.app.tasks[task_name]
@state = {}
@trigger_state = {}
@pending = PaneSet.new
@running = [] # Array<Pane>
@finished = []
@position = Time.at(0)
@worker = AsyncWorker.new(@engine, :default)
@mutex = Mutex.new
end
def post(time, jobflow_status)
@mutex.synchronize do
ns_enqueue([Event[nil, time]], jobflow_status)
ns_collect_status
end
end
def resume(task_status)
@mutex.synchronize do
@state = task_status.state
@trigger_state = task_status.trigger_state
@pending = task_status.pending
@running = task_status.running
@finished = task_status.finished
@position = task_status.position
@running.each { @worker.post { perform } }
end
end
def flush
@mutex.synchronize do
ps = @pending.panes
@pending = PaneSet[]
@running += ps
ps.each { @worker.post { perform } }
ns_collect_status
end
end
def stop
@worker.stop
end
def wait_stop
@worker.wait_stop
@mutex.synchronize { ns_collect_status }
end
def kill
@worker.kill
end
def update(jobflow_status)
@mutex.synchronize do
events = []
@task.upstreams.each do |task_name, _|
jobflow_status[task_name].finished.each do |time|
events << Event[task_name, time]
end
end
ns_enqueue(events, jobflow_status)
ns_collect_status
end
end
private
def ns_enqueue(events, jobflow_status)
ns_append_events(events)
return unless @running.empty?
target = ns_shift_pending(jobflow_status)
return if target.nil?
@running += [target]
@worker.post { perform }
end
def ns_append_events(events)
events.each do |event|
time = @task.time_trunc.call(event.time)
@pending = @pending.add(time, event.task_name)
end
end
def ns_shift_pending(jobflow_status)
context = TriggerContext[@trigger_state.dup, jobflow_status]
ready, pending = @task.trigger.call(@pending.panes, context)
target, *rest = ready
@pending = PaneSet[*rest, *pending]
@trigger_state = context.state
target
end
def perform
pane = @mutex.synchronize { @running.first }
result = execute(pane)
@mutex.synchronize { ns_update_status(result) }
end
def execute(pane)
context = DSL::TaskContext[pane.time, @state.dup, pane]
task_instance = @task.new(context)
result = task_instance.invoke
TaskResult[result, context.state, task_instance.emitted]
end
def ns_update_status(result)
@state = result.state
pane, *@running = @running
return unless result.successed?
@finished += result.emitted.empty? ? [pane.time] : result.emitted
@position = [@position, pane.time].max
end
def ns_collect_status
status = ns_build_status
@finished = []
status
end
def ns_build_status
TaskStatus[
@state,
@trigger_state,
@pending,
@running,
@finished,
@position
].freeze!
end
end
end
end
Flush tasks one by one
module Sorge
class Engine
class TaskOperator
Event = Struct.new(:task_name, :time)
TaskResult = Struct.new(:successed?, :state, :emitted)
TriggerContext = Struct.new(:state, :jobflow_status)
def initialize(engine, task_name)
@engine = engine
@task = @engine.app.tasks[task_name]
@state = {}
@trigger_state = {}
@pending = PaneSet.new
@running = [] # Array<Pane>
@finished = []
@position = Time.at(0)
@worker = AsyncWorker.new(@engine, :default)
@mutex = Mutex.new
end
def post(time, jobflow_status)
@mutex.synchronize do
ns_enqueue([Event[nil, time]], jobflow_status)
ns_collect_status
end
end
def resume(task_status)
@mutex.synchronize do
@state = task_status.state
@trigger_state = task_status.trigger_state
@pending = task_status.pending
@running = task_status.running
@finished = task_status.finished
@position = task_status.position
@running.each { @worker.post { perform } }
end
end
def flush
@mutex.synchronize do
return if @pending.empty?
return unless @running.empty?
target, *rest = @pending.panes
@pending = PaneSet[*rest]
@running += [target]
@worker.post { perform }
ns_collect_status
end
end
def stop
@worker.stop
end
def wait_stop
@worker.wait_stop
@mutex.synchronize { ns_collect_status }
end
def kill
@worker.kill
end
def update(jobflow_status)
@mutex.synchronize do
events = []
@task.upstreams.each do |task_name, _|
jobflow_status[task_name].finished.each do |time|
events << Event[task_name, time]
end
end
ns_enqueue(events, jobflow_status)
ns_collect_status
end
end
private
def ns_enqueue(events, jobflow_status)
ns_append_events(events)
return unless @running.empty?
target = ns_shift_pending(jobflow_status)
return if target.nil?
@running += [target]
@worker.post { perform }
end
def ns_append_events(events)
events.each do |event|
time = @task.time_trunc.call(event.time)
@pending = @pending.add(time, event.task_name)
end
end
def ns_shift_pending(jobflow_status)
context = TriggerContext[@trigger_state.dup, jobflow_status]
ready, pending = @task.trigger.call(@pending.panes, context)
target, *rest = ready
@pending = PaneSet[*rest, *pending]
@trigger_state = context.state
target
end
def perform
pane = @mutex.synchronize { @running.first }
result = execute(pane)
@mutex.synchronize { ns_update_status(result) }
end
def execute(pane)
context = DSL::TaskContext[pane.time, @state.dup, pane]
task_instance = @task.new(context)
result = task_instance.invoke
TaskResult[result, context.state, task_instance.emitted]
end
def ns_update_status(result)
@state = result.state
pane, *@running = @running
return unless result.successed?
@finished += result.emitted.empty? ? [pane.time] : result.emitted
@position = [@position, pane.time].max
end
def ns_collect_status
status = ns_build_status
@finished = []
status
end
def ns_build_status
TaskStatus[
@state,
@trigger_state,
@pending,
@running,
@finished,
@position
].freeze!
end
end
end
end
|
module Spree
module Search
class Elasticsearch < Spree::Core::Search::Base
def retrieve_products
@products_scope = get_base_scope
curr_page = page || 1
if keywords.nil?
@products = @products_scope.includes([:master => :prices])
end
if !@products.nil?
unless Spree::Config.show_products_without_price
@products = @products.where("spree_prices.amount IS NOT NULL").where("spree_prices.currency" => Spree::Config[:presentation_currency] || current_currency)
end
end
if keywords.nil?
@products = @products.page(curr_page).per(per_page)
else
@products = @products_scope.page(curr_page).per(per_page).records
end
@products
end
protected
def get_base_scope
if keywords.nil?
base_scope = Spree::Product.active
base_scope = base_scope.in_taxon(taxon) unless taxon.blank?
base_scope = add_search_scopes(base_scope)
base_scope = base_scope.descend_by_created_at
base_scope
else
elasticsearch_query = build_es_query
base_scope = Spree::Product.es_search(elasticsearch_query)
base_scope
end
end
def build_es_query
query = {
"query" => {
"query_string" => {
"default_field" => "name",
"query" => keywords
}
}
}
query
end
end
end
end
products without images will not be displayed
module Spree
module Search
class Elasticsearch < Spree::Core::Search::Base
def retrieve_products
@products_scope = get_base_scope
curr_page = page || 1
if keywords.nil?
@products = @products_scope.includes([:master => :prices])
end
if !@products.nil?
unless Spree::Config.show_products_without_price
@products = @products.where("spree_prices.amount IS NOT NULL").where("spree_prices.currency" => Spree::Config[:presentation_currency] || current_currency)
end
@products = @products.select { |product| product.images.lengh > 0 }
end
if keywords.nil?
@products = @products.page(curr_page).per(per_page)
else
@products = @products_scope.page(curr_page).per(per_page).records
end
@products
end
protected
def get_base_scope
if keywords.nil?
base_scope = Spree::Product.active
base_scope = base_scope.in_taxon(taxon) unless taxon.blank?
base_scope = add_search_scopes(base_scope)
base_scope = base_scope.descend_by_created_at
base_scope
else
elasticsearch_query = build_es_query
base_scope = Spree::Product.es_search(elasticsearch_query)
base_scope
end
end
def build_es_query
query = {
"query" => {
"query_string" => {
"default_field" => "name",
"query" => keywords
}
}
}
query
end
end
end
end
|
require 'pathname'
module Sprockets
# `AssetAttributes` is a wrapper similar to `Pathname` that provides
# some helper accessors.
#
# These methods should be considered internalish.
class AssetAttributes
attr_reader :environment, :pathname
def initialize(environment, path)
@environment = environment
@pathname = path.is_a?(Pathname) ? path : Pathname.new(path.to_s)
end
# Returns paths search the load path for.
def search_paths
paths = [pathname.to_s]
if pathname.basename(extensions.join).to_s != 'index'
path_without_extensions = extensions.inject(pathname) { |p, ext| p.sub(ext, '') }
index_path = path_without_extensions.join("index#{extensions.join}").to_s
paths << index_path
end
paths
end
# Reverse guess logical path for fully expanded path.
#
# This has some known issues. For an example if a file is
# shaddowed in the path, but is required relatively, its logical
# path will be incorrect.
def logical_path
raise ArgumentError unless pathname.absolute?
if root_path = environment.paths.detect { |path| pathname.to_s[path] }
path = pathname.relative_path_from(Pathname.new(root_path)).to_s
path = engine_extensions.inject(path) { |p, ext| p.sub(ext, '') }
path = "#{path}#{engine_format_extension}" unless format_extension
path
else
raise FileOutsidePaths, "#{pathname} isn't in paths: #{environment.paths.join(', ')}"
end
end
# Returns `Array` of extension `String`s.
#
# "foo.js.coffee"
# # => [".js", ".coffee"]
#
def extensions
@extensions ||= @pathname.basename.to_s.scan(/\.[^.]+/)
end
# Returns the format extension.
#
# "foo.js.coffee"
# # => ".js"
#
def format_extension
extensions.detect { |ext|
@environment.mime_types(ext) && !@environment.engines(ext)
}
end
# Returns an `Array` of engine extensions.
#
# "foo.js.coffee.erb"
# # => [".coffee", ".erb"]
#
def engine_extensions
exts = extensions
if offset = extensions.index(format_extension)
exts = extensions[offset+1..-1]
end
exts.select { |ext| @environment.engines(ext) }
end
# Returns engine classes.
def engines
engine_extensions.map { |ext| @environment.engines(ext) }
end
# Returns all processors to run on the path.
def processors
environment.preprocessors(content_type) +
engines.reverse +
environment.postprocessors(content_type)
end
# Returns the content type for the pathname. Falls back to `application/octet-stream`.
def content_type
@content_type ||= begin
if format_extension.nil?
engine_content_type || 'application/octet-stream'
else
@environment.mime_types(format_extension) ||
engine_content_type ||
'application/octet-stream'
end
end
end
private
# Returns implicit engine content type.
#
# `.coffee` files carry an implicit `application/javascript`
# content type.
def engine_content_type
engines.reverse.each do |engine|
if engine.respond_to?(:default_mime_type) && engine.default_mime_type
return engine.default_mime_type
end
end
nil
end
def engine_format_extension
if content_type = engine_content_type
environment.extension_for_mime_type(content_type)
end
end
end
end
Speed up logical_path
require 'pathname'
module Sprockets
# `AssetAttributes` is a wrapper similar to `Pathname` that provides
# some helper accessors.
#
# These methods should be considered internalish.
class AssetAttributes
attr_reader :environment, :pathname
def initialize(environment, path)
@environment = environment
@pathname = path.is_a?(Pathname) ? path : Pathname.new(path.to_s)
end
# Returns paths search the load path for.
def search_paths
paths = [pathname.to_s]
if pathname.basename(extensions.join).to_s != 'index'
path_without_extensions = extensions.inject(pathname) { |p, ext| p.sub(ext, '') }
index_path = path_without_extensions.join("index#{extensions.join}").to_s
paths << index_path
end
paths
end
# Reverse guess logical path for fully expanded path.
#
# This has some known issues. For an example if a file is
# shaddowed in the path, but is required relatively, its logical
# path will be incorrect.
def logical_path
if root_path = environment.paths.detect { |path| pathname.to_s[path] }
path = pathname.to_s.sub("#{root_path}/", '')
# path = pathname.relative_path_from(Pathname.new(root_path)).to_s
path = engine_extensions.inject(path) { |p, ext| p.sub(ext, '') }
path = "#{path}#{engine_format_extension}" unless format_extension
path
else
raise FileOutsidePaths, "#{pathname} isn't in paths: #{environment.paths.join(', ')}"
end
end
# Returns `Array` of extension `String`s.
#
# "foo.js.coffee"
# # => [".js", ".coffee"]
#
def extensions
@extensions ||= @pathname.basename.to_s.scan(/\.[^.]+/)
end
# Returns the format extension.
#
# "foo.js.coffee"
# # => ".js"
#
def format_extension
extensions.detect { |ext|
@environment.mime_types(ext) && !@environment.engines(ext)
}
end
# Returns an `Array` of engine extensions.
#
# "foo.js.coffee.erb"
# # => [".coffee", ".erb"]
#
def engine_extensions
exts = extensions
if offset = extensions.index(format_extension)
exts = extensions[offset+1..-1]
end
exts.select { |ext| @environment.engines(ext) }
end
# Returns engine classes.
def engines
engine_extensions.map { |ext| @environment.engines(ext) }
end
# Returns all processors to run on the path.
def processors
environment.preprocessors(content_type) +
engines.reverse +
environment.postprocessors(content_type)
end
# Returns the content type for the pathname. Falls back to `application/octet-stream`.
def content_type
@content_type ||= begin
if format_extension.nil?
engine_content_type || 'application/octet-stream'
else
@environment.mime_types(format_extension) ||
engine_content_type ||
'application/octet-stream'
end
end
end
private
# Returns implicit engine content type.
#
# `.coffee` files carry an implicit `application/javascript`
# content type.
def engine_content_type
engines.reverse.each do |engine|
if engine.respond_to?(:default_mime_type) && engine.default_mime_type
return engine.default_mime_type
end
end
nil
end
def engine_format_extension
if content_type = engine_content_type
environment.extension_for_mime_type(content_type)
end
end
end
end
|
# -*- encoding: utf-8 -*-
lib = File.expand_path('../lib', __FILE__)
$LOAD_PATH.unshift(lib) unless $LOAD_PATH.include?(lib)
require 'babelish/version'
Gem::Specification.new do |s|
s.name = 'babelish'
s.version = Babelish::VERSION
s.date = '2015-11-09'
s.summary = "CSV converter for localization files"
s.description = "This set of commands converts a CSV file to the following formats:
- .strings (iOS)
- .xml (Android)
- .json
- .php"
s.authors = ["François Benaiteau", "Markus Paeschke"]
s.email = ['francois.benaiteau@gmail.com', 'markus.paeschke@gmail.com']
s.homepage = 'http://netbe.github.io/Babelish/'
s.license = 'MIT'
s.add_dependency "thor"
s.add_dependency "google_drive", "~> 1.0.1"
s.add_dependency "nokogiri"
# google_drive dependency to ask for mail and password
s.add_dependency "highline"
# json support
s.add_dependency "json"
s.add_development_dependency "rake"
s.add_development_dependency "test-unit"
s.add_development_dependency "simplecov"
s.add_development_dependency "yard"
s.files = `git ls-files`.split("\n")
s.test_files = `git ls-files -- {test,spec,features}/*`.split("\n")
s.executables = `git ls-files -- bin/*`.split("\n").map{ |f| File.basename(f) }
s.require_path = 'lib'
end
Add correct date for gem automatically
# -*- encoding: utf-8 -*-
lib = File.expand_path('../lib', __FILE__)
$LOAD_PATH.unshift(lib) unless $LOAD_PATH.include?(lib)
require 'babelish/version'
Gem::Specification.new do |s|
s.name = 'babelish'
s.version = Babelish::VERSION
s.date = Time.now.strftime("%Y-%m-%d")
s.summary = "CSV converter for localization files"
s.description = "This set of commands converts a CSV file to the following formats:
- .strings (iOS)
- .xml (Android)
- .json
- .php"
s.authors = ["François Benaiteau", "Markus Paeschke"]
s.email = ['francois.benaiteau@gmail.com', 'markus.paeschke@gmail.com']
s.homepage = 'http://netbe.github.io/Babelish/'
s.license = 'MIT'
s.add_dependency "thor"
s.add_dependency "google_drive", "~> 1.0.1"
s.add_dependency "nokogiri"
# google_drive dependency to ask for mail and password
s.add_dependency "highline"
# json support
s.add_dependency "json"
s.add_development_dependency "rake"
s.add_development_dependency "test-unit"
s.add_development_dependency "simplecov"
s.add_development_dependency "yard"
s.files = `git ls-files`.split("\n")
s.test_files = `git ls-files -- {test,spec,features}/*`.split("\n")
s.executables = `git ls-files -- bin/*`.split("\n").map{ |f| File.basename(f) }
s.require_path = 'lib'
end
|
default[:golang] = {
# can be "stable" or "tip"
:version => "stable",
:multi => {
:versions => %w(go1.0.3 go1.1.2 go1.2.2 go1.3.3),
:default_version => "go1.3.3",
:aliases => {
"go1.1" => "go1.1.2",
"go1.2" => "go1.2.2"
}
}
}
Drop go 1.0.3
It doesn't compile on Trusty.
default[:golang] = {
# can be "stable" or "tip"
:version => "stable",
:multi => {
:versions => %w(go1.1.2 go1.2.2 go1.3.3),
:default_version => "go1.3.3",
:aliases => {
"go1.1" => "go1.1.2",
"go1.2" => "go1.2.2"
}
}
}
|
#--
# Author:: Daniel DeLeo (<dan@opscode.com>)
# Copyright:: Copyright (c) 2011 Opscode, Inc.
# License:: Apache License, Version 2.0
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
require 'timeout'
require 'win32/open3'
class Chef
class ShellOut
module Windows
#--
# Missing lots of features from the UNIX version, such as
# environment, cwd, etc.
def run_command
Chef::Log.debug("sh(#{@command})")
# win32 open4 is really just open3.
Open3.popen3(@command) do |stdin,stdout,stderr|
@child_pid = child_pid
@finished_stdout = false
@finished_stderr = false
stdin.close
stdout.sync = true
stderr.sync = true
Timeout.timeout(timeout) do
loop do
read_stdout(stdout)
read_stderr(stderr)
break if (@finished_stdout && @finished_stderr)
end
@status = $?
end
end
self
rescue Timeout::Error
raise Chef::Exceptions::CommandTimeout, "command timed out:\n#{format_for_exception}"
end
def read_stdout(stdout)
return nil if @finished_stdout
if chunk = stdout.read_nonblock(8096)
@stdout << chunk
end
rescue EOFError
@finished_stdout = true
rescue Errno::EAGAIN
end
def read_stderr(stderr)
return nil if @finished_stderr
if chunk = stderr.read_nonblock(8096)
@stderr << chunk
end
rescue EOFError
@finished_stderr = true
rescue Errno::EAGAIN
end
end
end
end
never see the child pid when using open3
#--
# Author:: Daniel DeLeo (<dan@opscode.com>)
# Copyright:: Copyright (c) 2011 Opscode, Inc.
# License:: Apache License, Version 2.0
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
require 'timeout'
require 'win32/open3'
class Chef
class ShellOut
module Windows
#--
# Missing lots of features from the UNIX version, such as
# environment, cwd, etc.
def run_command
Chef::Log.debug("sh(#{@command})")
# win32 open4 is really just open3.
Open3.popen3(@command) do |stdin,stdout,stderr|
@finished_stdout = false
@finished_stderr = false
stdin.close
stdout.sync = true
stderr.sync = true
Timeout.timeout(timeout) do
loop do
read_stdout(stdout)
read_stderr(stderr)
break if (@finished_stdout && @finished_stderr)
end
@status = $?
end
end
self
rescue Timeout::Error
raise Chef::Exceptions::CommandTimeout, "command timed out:\n#{format_for_exception}"
end
def read_stdout(stdout)
return nil if @finished_stdout
if chunk = stdout.read_nonblock(8096)
@stdout << chunk
end
rescue EOFError
@finished_stdout = true
rescue Errno::EAGAIN
end
def read_stderr(stderr)
return nil if @finished_stderr
if chunk = stderr.read_nonblock(8096)
@stderr << chunk
end
rescue EOFError
@finished_stderr = true
rescue Errno::EAGAIN
end
end
end
end
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.