CombinedText stringlengths 4 3.42M |
|---|
require 'yajl'
require 'set'
module Remodel
class Entity
def initialize(attributes = {})
@attributes = {}
attributes.each do |key, value|
send("#{key}=", value) if respond_to? "#{key}="
end
end
def self.create(attributes = {})
new(attributes).save
end
def self.from_json(json)
new(parse(json))
end
def self.find(key)
from_json(fetch(key))
end
def save
self.key = self.class.next_key if key.nil?
redis.set(key, to_json)
self
end
def reload
initialize(self.class.parse(self.class.fetch(key)))
reset_collections
self
end
def to_json
Yajl::Encoder.encode(self.class.pack(@attributes))
end
def self.parse(json)
unpack(Yajl::Parser.parse(json))
end
protected
def self.set_key_prefix(prefix)
raise InvalidKeyPrefix unless prefix =~ /^[a-z]+$/
@key_prefix = prefix
end
def self.property(name, options = {})
name = name.to_sym
mapper[name] = options[:mapper] || DefaultMapper
define_method(name) { @attributes[name] }
define_method("#{name}=") { |value| @attributes[name] = value }
end
def self.has_many(name, options)
name = name.to_sym
define_method(name) do
var = "@collection_#{name}".to_sym
if instance_variable_defined? var
instance_variable_get var
else
instance_variable_set var, Collection.new(options[:class], "#{key}:#{name}")
end
end
end
private
def self.inherited(subclass)
subclass.property(:key)
end
def reset_collections
instance_variables.each do |var|
remove_instance_variable(var) if var =~ /^@collection_/
end
end
def self.fetch(key)
redis.get(key) || raise(EntityNotFound)
end
def self.next_key
next_val = redis.incr("#{key_prefix}:seq")
"#{key_prefix}:#{next_val}"
end
def self.key_prefix
@key_prefix ||= name[0,1].downcase
end
def self.pack(attributes)
result = {}
attributes.each do |key, value|
result[key] = mapper[key].pack(value)
end
result
end
def self.unpack(attributes)
result = {}
attributes.each do |key, value|
key = key.to_sym
result[key] = mapper[key].unpack(value)
end
result
end
def self.mapper
@mapper ||= {}
end
def self.redis
Remodel.redis
end
def redis
Remodel.redis
end
end
end
a little refactoring
require 'yajl'
require 'set'
module Remodel
class Entity
def initialize(attributes = {})
@attributes = {}
attributes.each do |key, value|
send("#{key}=", value) if respond_to? "#{key}="
end
end
def self.create(attributes = {})
new(attributes).save
end
def self.from_json(json)
new(parse(json))
end
def self.find(key)
from_json(fetch(key))
end
def save
self.key = self.class.next_key if key.nil?
self.class.redis.set(key, to_json)
self
end
def reload
initialize(self.class.parse(self.class.fetch(key)))
instance_variables.each do |var|
remove_instance_variable(var) if var =~ /^@collection_/
end
self
end
def to_json
Yajl::Encoder.encode(self.class.pack(@attributes))
end
def self.parse(json)
unpack(Yajl::Parser.parse(json))
end
protected
def self.set_key_prefix(prefix)
raise InvalidKeyPrefix unless prefix =~ /^[a-z]+$/
@key_prefix = prefix
end
def self.property(name, options = {})
name = name.to_sym
mapper[name] = options[:mapper] || DefaultMapper
define_method(name) { @attributes[name] }
define_method("#{name}=") { |value| @attributes[name] = value }
end
def self.has_many(name, options)
name = name.to_sym
define_method(name) do
var = "@collection_#{name}".to_sym
if instance_variable_defined? var
instance_variable_get var
else
instance_variable_set var, Collection.new(options[:class], "#{key}:#{name}")
end
end
end
private
def self.inherited(subclass)
subclass.property(:key)
end
def self.fetch(key)
redis.get(key) || raise(EntityNotFound)
end
def self.next_key
next_val = redis.incr("#{key_prefix}:seq")
"#{key_prefix}:#{next_val}"
end
def self.key_prefix
@key_prefix ||= name[0,1].downcase
end
def self.pack(attributes)
result = {}
attributes.each do |key, value|
result[key] = mapper[key].pack(value)
end
result
end
def self.unpack(attributes)
result = {}
attributes.each do |key, value|
key = key.to_sym
result[key] = mapper[key].unpack(value)
end
result
end
def self.mapper
@mapper ||= {}
end
def self.redis
Remodel.redis
end
end
end |
require "resque-latency/version"
require 'resque-latency/server'
module Resque
def Job.create(queue, klass, *args)
Resque.validate(klass, queue)
if Resque.inline?
# Instantiating a Resque::Job and calling perform on it so callbacks run
# decode(encode(args)) to ensure that args are normalized in the same manner as a non-inline job
new(:inline, {'class' => klass, 'args' => decode(encode(args))}).perform
else
Resque.push(queue, 'class' => klass.to_s, 'args' => args, 'timestamp' => Time.now.utc.to_i)
end
end
def Job.new(queue, payload)
# latency queue: resque:latency:queue_name
key = ['latency', queue].join(':')
latency = Time.now.utc.to_i - payload['timestamp'].to_i
redis.set key, [ latency.to_s, Time.now.utc.to_i ].join(':')
super
end
def latency(queue)
redis.get("latency:#{queue}").split(':').first.to_i
end
def latency_updated_at(queue)
Time.at(redis.get("latency:#{queue}").split(':').last.to_i)
end
end
added comment for how latency is stored
require "resque-latency/version"
require 'resque-latency/server'
module Resque
def Job.create(queue, klass, *args)
Resque.validate(klass, queue)
if Resque.inline?
# Instantiating a Resque::Job and calling perform on it so callbacks run
# decode(encode(args)) to ensure that args are normalized in the same manner as a non-inline job
new(:inline, {'class' => klass, 'args' => decode(encode(args))}).perform
else
Resque.push(queue, 'class' => klass.to_s, 'args' => args, 'timestamp' => Time.now.utc.to_i)
end
end
def Job.new(queue, payload)
# latency queue: resque:latency:queue_name
key = ['latency', queue].join(':')
latency = Time.now.utc.to_i - payload['timestamp'].to_i
# store the latency of this job in seconds and the current timestamp in the key
# timestamp is stored in seconds since epoch UTC.
# delimited by :
redis.set key, [ latency.to_s, Time.now.utc.to_i ].join(':')
super
end
def latency(queue)
redis.get("latency:#{queue}").split(':').first.to_i
end
def latency_updated_at(queue)
Time.at(redis.get("latency:#{queue}").split(':').last.to_i)
end
end
|
module Rms
class LoginFailedError < Exception
attr_accessor :cause
end
class Connection < ::Mechanize
DEF_TIMEOUT = 180
DEF_AGENT = 'Windows IE 7'
DEF_MAX_HISTORY = 1
DEF_ENCODING = 'euc-jp'
LOGIN_URL = "https://glogin.rms.rakuten.co.jp/?sp_id=1"
def initialize(auth1_id ,auth1_pwd ,auth2_id ,auth2_pwd)
super()
@auth_parameters = auth_parameter(auth1_id,
auth1_pwd,
auth2_id,
auth2_pwd)
self.read_timeout = DEF_TIMEOUT
self.user_agent_alias = DEF_AGENT
self.max_history = DEF_MAX_HISTORY
self
end
# login and mover to top menu
def connect
login_page = get(LOGIN_URL)
# R-login
form = login_page.forms[0]
form.field_with(:name => 'login_id').value = @auth_parameters[:AUTH1_ID]
form.field_with(:name => 'passwd').value = @auth_parameters[:AUTH1_PWD]
page = set_enc(form.click_button)
end
def get(*params)
set_enc(super(*params))
end
def set_enc(page)
if page.body.to_s.tosjis =~ /charset=(.*)\"/
ec = $1
if ec =~ /^[xX]\-(.*)/
ec = $1
end
page.encoding = ec
else
page.encoding = DEF_ENCODING
end
page
end
def auth_parameter(auth1_id ,auth1_pwd ,auth2_id ,auth2_pwd)
if !auth1_id || !auth1_id.is_a?(String) || auth1_id.strip == '' ||
!auth1_pwd || !auth1_pwd.is_a?(String) || auth1_pwd.strip == '' ||
!auth2_id || !auth2_id.is_a?(String) || auth2_pwd.strip == '' ||
!auth2_pwd || !auth2_pwd.is_a?(String) || auth2_pwd.strip == ''
raise "invalid auth_params"
end
{:AUTH1_ID => auth1_id,
:AUTH1_PWD => auth1_pwd,
:AUTH2_ID => auth2_id,
:AUTH2_PWD => auth2_pwd}
end
end
end
implements first auth
# -*- coding: utf-8 -*-
module Rms
class LoginFailedError < Exception
attr_accessor :cause
end
class Connection < ::Mechanize
DEF_TIMEOUT = 180
DEF_AGENT = 'Windows IE 7'
DEF_MAX_HISTORY = 1
DEF_ENCODING = 'euc-jp'
LOGIN_URL = "https://glogin.rms.rakuten.co.jp/?sp_id=1"
WRD_R_LOGIN_SUCCESS = 'R-Login IDの認証を行いました。'.tosjis
def initialize(auth1_id ,auth1_pwd ,auth2_id ,auth2_pwd)
super()
@auth_parameters = auth_parameter(auth1_id,
auth1_pwd,
auth2_id,
auth2_pwd)
self.read_timeout = DEF_TIMEOUT
self.user_agent_alias = DEF_AGENT
self.max_history = DEF_MAX_HISTORY
self
end
# login and move to top menu
def connect
# R-login
login_page1 = get(LOGIN_URL)
form = login_page1.forms[0]
form.field_with(:name => 'login_id').value = @auth_parameters[:AUTH1_ID]
form.field_with(:name => 'passwd').value = @auth_parameters[:AUTH1_PWD]
login_page2 = set_enc(form.click_button)
unless login_page2.body.to_s.tosjis.index(WRD_R_LOGIN_SUCCESS)
raise LoginFailedError.new('R-Login failed.')
end
# Rakuten Member Login
form = login_page2.forms[0]
form.field_with(:name => 'user_id').value = @auth_parameters[:AUTH2_ID]
form.field_with(:name => 'user_passwd').value = @auth_parameters[:AUTH2_PWD]
announce_page = set_enc(form.click_button)
end
def get(*params)
set_enc(super(*params))
end
def set_enc(page)
if page.body.to_s.tosjis =~ /charset=(.*)\"/
ec = $1
if ec =~ /^[xX]\-(.*)/
ec = $1
end
page.encoding = ec
else
page.encoding = DEF_ENCODING
end
page
end
def auth_parameter(auth1_id ,auth1_pwd ,auth2_id ,auth2_pwd)
if !auth1_id || !auth1_id.is_a?(String) || auth1_id.strip == '' ||
!auth1_pwd || !auth1_pwd.is_a?(String) || auth1_pwd.strip == '' ||
!auth2_id || !auth2_id.is_a?(String) || auth2_pwd.strip == '' ||
!auth2_pwd || !auth2_pwd.is_a?(String) || auth2_pwd.strip == ''
raise "invalid auth_params"
end
{:AUTH1_ID => auth1_id,
:AUTH1_PWD => auth1_pwd,
:AUTH2_ID => auth2_id,
:AUTH2_PWD => auth2_pwd}
end
end
end
|
module RockRMS
class Error < StandardError; end
class BadGateway < Error; end
class BadRequest < Error; end
class Forbidden < Error; end
class GatewayTimeout < Error; end
class InternalServerError < Error; end
class NotFound < Error; end
class ServiceUnavailable < Error; end
class Unauthorized < Error; end
end
require 'faraday'
module FaradayMiddleware
class RockRMSErrorHandler < Faraday::Response::Middleware
ERROR_STATUSES = 400..600
def on_complete(env)
case env[:status]
when 400
raise RockRMS::BadRequest, error_message(env)
when 401
raise RockRMS::Unauthorized, error_message(env)
when 403
raise RockRMS::Forbidden, error_message(env)
when 404
raise RockRMS::NotFound, error_message(env)
when 500
raise RockRMS::InternalServerError, error_message(env)
when 502
raise RockRMS::BadGateway, error_message(env)
when 503
raise RockRMS::ServiceUnavailable, error_message(env)
when 504
raise RockRMS::GatewayTimeout, error_message(env)
when ERROR_STATUSES
raise RockRMS::Error, error_message(env)
end
end
private
def error_message(env)
"#{env[:status]}: #{env[:url]} #{env[:body]}"
end
end
end
Add Cloudflare error
module RockRMS
class Error < StandardError; end
class BadGateway < Error; end
class BadRequest < Error; end
class CloudflareError < Error; end
class Forbidden < Error; end
class GatewayTimeout < Error; end
class InternalServerError < Error; end
class NotFound < Error; end
class ServiceUnavailable < Error; end
class Unauthorized < Error; end
end
require 'faraday'
module FaradayMiddleware
class RockRMSErrorHandler < Faraday::Response::Middleware
ERROR_STATUSES = 400..600
def on_complete(env)
case env[:status]
when 400
raise RockRMS::BadRequest, error_message(env)
when 401
raise RockRMS::Unauthorized, error_message(env)
when 403
raise RockRMS::Forbidden, error_message(env)
when 404
raise RockRMS::NotFound, error_message(env)
when 500
raise RockRMS::InternalServerError, error_message(env)
when 502
raise RockRMS::BadGateway, error_message(env)
when 503
raise RockRMS::ServiceUnavailable, error_message(env)
when 504
raise RockRMS::GatewayTimeout, error_message(env)
when 520
raise RockRMS::CloudflareError, error_message(env)
when ERROR_STATUSES
raise RockRMS::Error, error_message(env)
end
end
private
def error_message(env)
"#{env[:status]}: #{env[:url]} #{env[:body]}"
end
end
end
|
require sprintf('%s/../../%s', File.dirname(File.expand_path(__FILE__)), 'path_helper')
require 'json'
require 'net/https'
require 'socket'
require 'uri'
# TODO use @cache_timeout to invalidate data cached here
class Rouster
##
# facter
#
# runs facter, returns parsed hash of { fact1 => value1, factN => valueN }
#
# parameters
# * [cache] - whether to store/return cached facter data, if available
# * [custom_facts] - whether to include custom facts in return (uses -p argument)
def facter(cache=true, custom_facts=true)
if cache.true? and ! self.facts.nil?
return self.facts
end
raw = self.run(sprintf('facter %s', custom_facts.true? ? '-p' : ''))
res = Hash.new()
raw.split("\n").each do |line|
next unless line.match(/(\S*?)\s\=\>\s(.*)/)
res[$1] = $2
end
if cache.true?
self.facts = res
end
res
end
##
# get_catalog
#
# not completely implemented method to get a compiled catalog about a node (based on its facts) from a puppetmaster
#
# original implementation used the catalog face, which does not actually work. switched to an API call, but still need to convert facts into PSON
#
# parameters
# * [hostname] - hostname of node to return catalog for, if not specified, will use `hostname --fqdn`
# * [puppetmaster] - hostname of puppetmaster to use in API call, defaults to 'puppet'
# * [facts] - hash of facts to pass to puppetmaster
# * [puppetmaster_port] - port to talk to the puppetmaster on, defaults to 8140
def get_catalog(hostname=nil, puppetmaster=nil, facts=nil, puppetmaster_port=8140)
# post https://<puppetmaster>/catalog/<node>?facts_format=pson&facts=<pson URL encoded> == ht to patrick@puppetlabs
certname = hostname.nil? ? self.run('hostname --fqdn').chomp : hostname
puppetmaster = puppetmaster.nil? ? 'puppet' : puppetmaster
facts = facts.nil? ? self.facter() : facts
%w(fqdn hostname operatingsystem operatingsystemrelease osfamily rubyversion).each do |required|
raise ArgumentError.new(sprintf('missing required fact[%s]', required)) unless facts.has_key?(required)
end
raise InternalError.new('need to finish conversion of facts to PSON')
facts.to_pson # this does not work, but needs to
json = nil
url = sprintf('https://%s:%s/catalog/%s?facts_format=pson&facts=%s', puppetmaster, puppetmaster_port, certname, facts)
uri = URI.parse(url)
begin
res = Net::HTTP.get(uri)
json = res.to_json
rescue => e
raise ExternalError.new("calling[#{url}] led to exception[#{e}")
end
json
end
##
# get_puppet_errors
#
# parses input for puppet errors, returns array of strings
#
# parameters
# * [input] - string to look at, defaults to self.get_output()
def get_puppet_errors(input=nil)
str = input.nil? ? self.get_output() : input
errors = str.scan(/35merr:.*/)
errors.empty? ? nil : errors
end
##
# get_puppet_notices
#
# parses input for puppet notices, returns array of strings
#
# parameters
# * [input] - string to look at, defaults to self.get_output()
def get_puppet_notices(input=nil)
str = input.nil? ? self.get_output() : input
notices = str.scan(/36mnotice:.*/)
notices.empty? ? nil : notices
end
##
# get_puppet_version
#
# executes `puppet --version` and returns parsed version string or nil
def get_puppet_version
version = nil
installed = self.is_in_path?('puppet')
if installed
raw = self.run('puppet --version')
version = raw.match(/([\d\.]*)\s/) ? $1 : nil
else
version = nil
end
version
end
##
# hiera
#
# returns hiera results from self
#
# parameters
# * <key> - hiera key to look up
# * [config] - path to hiera configuration -- this is only optional if you have a hiera.yaml file in ~/vagrant
def hiera(key, config=nil)
# TODO implement this
raise NotImplementedError.new()
end
##
# parse_catalog
#
# looks at the ['data']['resources'] keys in catalog for Files, Groups, Packages, Services and Users, returns hash of expectations compatible with validate_*
#
# this is a very lightly tested implementation, please open issues as necessary
#
# parameters
# * <catalog> - JSON string or Hash representation of catalog, typically from get_catalog()
def parse_catalog(catalog)
classes = nil
resources = nil
results = Hash.new()
if catalog.is_a?(String)
begin
JSON.parse!(catalog)
rescue
raise InternalError.new(sprintf('unable to parse catalog[%s] as JSON', catalog))
end
end
unless catalog.has_key?('data') and catalog['data'].has_key?('classes')
raise InternalError.new(sprintf('catalog does not contain a classes key[%s]', catalog))
end
unless catalog.has_key?('data') and catalog['data'].has_key?('resources')
raise InternalError.new(sprintf('catalog does not contain a resources key[%s]', catalog))
end
raw_resources = catalog['data']['resources']
raw_resources.each do |r|
# samples of eacb type of resource is available at
# https://github.com/chorankates/rouster/issues/20#issuecomment-18635576
#
# we can do a lot better here
type = r['type']
case type
when 'Class'
classes.push(r['title'])
when 'File'
name = r['title']
resources[name] = Hash.new()
resources[name][:type] = :file
resources[name][:directory] = false
resources[name][:ensure] = r['ensure'] ||= 'present'
resources[name][:file] = true
resources[name][:group] = r['parameters'].has_key?('group') ? r['parameters']['group'] : nil
resources[name][:mode] = r['parameters'].has_key?('mode') ? r['parameters']['mode'] : nil
resources[name][:owner] = r['parameters'].has_key?('owner') ? r['parameters']['owner'] : nil
resources[name][:contains] = r.has_key?('content') ? r['content'] : nil
when 'Group'
name = r['title']
resources[name] = Hash.new()
resources[name][:type] = :group
resources[name][:ensure] = r['ensure'] ||= 'present'
resources[name][:gid] = r['parameters'].has_key?('gid') ? r['parameters']['gid'] : nil
when 'Package'
name = r['title']
resources[name] = Hash.new()
resources[name][:type] = :package
resources[name][:ensure] = r['ensure'] ||= 'present'
resources[name][:version] = r['ensure'] =~ /\d/ ? r['ensure'] : nil
when 'Service'
name = r['title']
resources[name] = Hash.new()
resources[name][:type] = :service
resources[name][:ensure] = r['ensure'] ||= 'present'
resources[name][:state] = r['ensure']
when 'User'
name = r['title']
resources[name] = Hash.new()
resources[name][:type] = :user
resources[name][:ensure] = r['ensure'] ||= 'present'
resources[name][:home] = r['parameters'].has_key?('home') ? r['parameters']['home'] : nil
resources[name][:gid] = r['parameters'].has_key?('gid') ? r['parameters']['gid'] : nil
resources[name][:group] = r['parameters'].has_key?('groups') ? r['parameters']['groups'] : nil
resources[name][:shell] = r['parameters'].has_key?('shell') ? r['parameters']['shell'] : nil
resources[name][:uid] = r['parameters'].has_key?('uid') ? r['parameters']['uid'] : nil
else
raise NotImplementedError.new(sprintf('parsing support for [%s] is incomplete', type))
end
end
# remove all nil references
resources.each_key do |name|
resources[name].each_pair do |k,v|
unless v
resources[name].delete(k)
end
end
end
results[:classes] = classes
results[:resources] = resources
results
end
##
# remove_existing_certs
#
# ... removes existing certificates - really only useful when called on a puppetmaster
# useful in testing environments where you want to destroy/rebuild agents without rebuilding the puppetmaster every time (think autosign)
#
# parameters
# * <puppetmaster> - string/partial regex of certificate names to keep
def remove_existing_certs (puppetmaster)
hosts = Array.new()
res = self.run('puppet cert list --all')
res.each_line do |line|
next if line.match(/#{puppetmaster}/)
host = $1 if line.match(/^\+\s"(.*?)"/)
hosts.push(host)
end
hosts.each do |host|
self.run(sprintf('puppet cert --clean %s', host))
end
end
##
# run_puppet
#
# ... runs puppet on self, returns nothing
#
# currently supports 2 methods of running puppet:
# * master - runs '/sbin/service puppet once -t'
# * supported options
# * expected_exitcode - string/integer/array of acceptable exit code(s)
# * masterless - runs 'puppet apply <options>' after determining version of puppet running and adjusting arguments
# * supported options
# * expected_exitcode - string/integer/array of acceptable exit code(s)
# * hiera_config - path to hiera configuration -- only supported by Puppet 3.0+
# * manifest_file - string/array of strings of paths to manifest(s) to apply
# * manifest_dir - string/array of strings of directories containing manifest(s) to apply - is recursive
# * module_dir - path to module directory -- currently a required parameter, is this correct?
#
# parameters
# * [mode] - method to run puppet, defaults to 'master'
# * [opts] - hash of additional options
def run_puppet(mode='master', passed_opts=nil)
if mode.eql?('master')
opts = {
:expected_exitcode => 0
}.merge!(passed_opts)
self.run('/sbin/service puppet once -t', opts[:expected_exitcode])
elsif mode.eql?('masterless')
opts = {
:expected_exitcode => 2,
:hiera_config => nil,
:manifest_file => nil, # can be a string or array, will 'puppet apply' each
:manifest_dir => nil, # can be a string or array, will 'puppet apply' each module in the dir (recursively)
:module_dir => nil
}.merge!(passed_opts)
## validate required arguments
raise InternalError.new(sprintf('invalid hiera config specified[%s]', opts[:hiera_config])) unless self.is_file?(opts[:hiera_config])
raise InternalError.new(sprintf('invalid module dir specified[%s]', opts[:module_dir])) unless self.is_dir?(opts[:module_dir])
puppet_version = self.get_puppet_version() # hiera_config specification is only supported in >3.0
if opts[:manifest_file]
opts[:manifest_file] = opts[:manifest_file].class.eql?(Array) ? opts[:manifest_file] : [opts[:manifest_file]]
opts[:manifest_file].each do |file|
raise InternalError.new(sprintf('invalid manifest file specified[%s]', file)) unless self.is_file?(file)
self.run(sprintf('puppet apply %s --modulepath=%s %s', (puppet_version > '3.0') ? "--hiera_config=#{opts[:hiera_config]}" : '', opts[:module_dir], file), opts[:expected_exitcode])
end
end
if opts[:manifest_dir]
opts[:manifest_dir] = opts[:manifest_dir].class.eql?(Array) ? opts[:manifest_dir] : [opts[:manifest_dir]]
opts[:manifest_dir].each do |dir|
raise InternalError.new(sprintf('invalid manifest dir specified[%s]', dir)) unless self.is_dir?(dir)
manifests = self.files(dir, '*.pp', true)
manifests.each do |m|
self.run(sprintf('puppet apply %s --modulepath=%s %s', (puppet_version > '3.0') ? "--hiera_config=#{opts[:hiera_config]}" : '', opts[:module_dir], m), opts[:expected_exitcode])
end
end
end
else
raise InternalError.new(sprintf('unknown mode [%s]', mode))
end
end
end
run_puppet command is changed to 'puppet agent -t' for accurate exit codes
require sprintf('%s/../../%s', File.dirname(File.expand_path(__FILE__)), 'path_helper')
require 'json'
require 'net/https'
require 'socket'
require 'uri'
# TODO use @cache_timeout to invalidate data cached here
class Rouster
##
# facter
#
# runs facter, returns parsed hash of { fact1 => value1, factN => valueN }
#
# parameters
# * [cache] - whether to store/return cached facter data, if available
# * [custom_facts] - whether to include custom facts in return (uses -p argument)
def facter(cache=true, custom_facts=true)
if cache.true? and ! self.facts.nil?
return self.facts
end
raw = self.run(sprintf('facter %s', custom_facts.true? ? '-p' : ''))
res = Hash.new()
raw.split("\n").each do |line|
next unless line.match(/(\S*?)\s\=\>\s(.*)/)
res[$1] = $2
end
if cache.true?
self.facts = res
end
res
end
##
# get_catalog
#
# not completely implemented method to get a compiled catalog about a node (based on its facts) from a puppetmaster
#
# original implementation used the catalog face, which does not actually work. switched to an API call, but still need to convert facts into PSON
#
# parameters
# * [hostname] - hostname of node to return catalog for, if not specified, will use `hostname --fqdn`
# * [puppetmaster] - hostname of puppetmaster to use in API call, defaults to 'puppet'
# * [facts] - hash of facts to pass to puppetmaster
# * [puppetmaster_port] - port to talk to the puppetmaster on, defaults to 8140
def get_catalog(hostname=nil, puppetmaster=nil, facts=nil, puppetmaster_port=8140)
# post https://<puppetmaster>/catalog/<node>?facts_format=pson&facts=<pson URL encoded> == ht to patrick@puppetlabs
certname = hostname.nil? ? self.run('hostname --fqdn').chomp : hostname
puppetmaster = puppetmaster.nil? ? 'puppet' : puppetmaster
facts = facts.nil? ? self.facter() : facts
%w(fqdn hostname operatingsystem operatingsystemrelease osfamily rubyversion).each do |required|
raise ArgumentError.new(sprintf('missing required fact[%s]', required)) unless facts.has_key?(required)
end
raise InternalError.new('need to finish conversion of facts to PSON')
facts.to_pson # this does not work, but needs to
json = nil
url = sprintf('https://%s:%s/catalog/%s?facts_format=pson&facts=%s', puppetmaster, puppetmaster_port, certname, facts)
uri = URI.parse(url)
begin
res = Net::HTTP.get(uri)
json = res.to_json
rescue => e
raise ExternalError.new("calling[#{url}] led to exception[#{e}")
end
json
end
##
# get_puppet_errors
#
# parses input for puppet errors, returns array of strings
#
# parameters
# * [input] - string to look at, defaults to self.get_output()
def get_puppet_errors(input=nil)
str = input.nil? ? self.get_output() : input
errors = str.scan(/35merr:.*/)
errors.empty? ? nil : errors
end
##
# get_puppet_notices
#
# parses input for puppet notices, returns array of strings
#
# parameters
# * [input] - string to look at, defaults to self.get_output()
def get_puppet_notices(input=nil)
str = input.nil? ? self.get_output() : input
notices = str.scan(/36mnotice:.*/)
notices.empty? ? nil : notices
end
##
# get_puppet_version
#
# executes `puppet --version` and returns parsed version string or nil
def get_puppet_version
version = nil
installed = self.is_in_path?('puppet')
if installed
raw = self.run('puppet --version')
version = raw.match(/([\d\.]*)\s/) ? $1 : nil
else
version = nil
end
version
end
##
# hiera
#
# returns hiera results from self
#
# parameters
# * <key> - hiera key to look up
# * [config] - path to hiera configuration -- this is only optional if you have a hiera.yaml file in ~/vagrant
def hiera(key, config=nil)
# TODO implement this
raise NotImplementedError.new()
end
##
# parse_catalog
#
# looks at the ['data']['resources'] keys in catalog for Files, Groups, Packages, Services and Users, returns hash of expectations compatible with validate_*
#
# this is a very lightly tested implementation, please open issues as necessary
#
# parameters
# * <catalog> - JSON string or Hash representation of catalog, typically from get_catalog()
def parse_catalog(catalog)
classes = nil
resources = nil
results = Hash.new()
if catalog.is_a?(String)
begin
JSON.parse!(catalog)
rescue
raise InternalError.new(sprintf('unable to parse catalog[%s] as JSON', catalog))
end
end
unless catalog.has_key?('data') and catalog['data'].has_key?('classes')
raise InternalError.new(sprintf('catalog does not contain a classes key[%s]', catalog))
end
unless catalog.has_key?('data') and catalog['data'].has_key?('resources')
raise InternalError.new(sprintf('catalog does not contain a resources key[%s]', catalog))
end
raw_resources = catalog['data']['resources']
raw_resources.each do |r|
# samples of eacb type of resource is available at
# https://github.com/chorankates/rouster/issues/20#issuecomment-18635576
#
# we can do a lot better here
type = r['type']
case type
when 'Class'
classes.push(r['title'])
when 'File'
name = r['title']
resources[name] = Hash.new()
resources[name][:type] = :file
resources[name][:directory] = false
resources[name][:ensure] = r['ensure'] ||= 'present'
resources[name][:file] = true
resources[name][:group] = r['parameters'].has_key?('group') ? r['parameters']['group'] : nil
resources[name][:mode] = r['parameters'].has_key?('mode') ? r['parameters']['mode'] : nil
resources[name][:owner] = r['parameters'].has_key?('owner') ? r['parameters']['owner'] : nil
resources[name][:contains] = r.has_key?('content') ? r['content'] : nil
when 'Group'
name = r['title']
resources[name] = Hash.new()
resources[name][:type] = :group
resources[name][:ensure] = r['ensure'] ||= 'present'
resources[name][:gid] = r['parameters'].has_key?('gid') ? r['parameters']['gid'] : nil
when 'Package'
name = r['title']
resources[name] = Hash.new()
resources[name][:type] = :package
resources[name][:ensure] = r['ensure'] ||= 'present'
resources[name][:version] = r['ensure'] =~ /\d/ ? r['ensure'] : nil
when 'Service'
name = r['title']
resources[name] = Hash.new()
resources[name][:type] = :service
resources[name][:ensure] = r['ensure'] ||= 'present'
resources[name][:state] = r['ensure']
when 'User'
name = r['title']
resources[name] = Hash.new()
resources[name][:type] = :user
resources[name][:ensure] = r['ensure'] ||= 'present'
resources[name][:home] = r['parameters'].has_key?('home') ? r['parameters']['home'] : nil
resources[name][:gid] = r['parameters'].has_key?('gid') ? r['parameters']['gid'] : nil
resources[name][:group] = r['parameters'].has_key?('groups') ? r['parameters']['groups'] : nil
resources[name][:shell] = r['parameters'].has_key?('shell') ? r['parameters']['shell'] : nil
resources[name][:uid] = r['parameters'].has_key?('uid') ? r['parameters']['uid'] : nil
else
raise NotImplementedError.new(sprintf('parsing support for [%s] is incomplete', type))
end
end
# remove all nil references
resources.each_key do |name|
resources[name].each_pair do |k,v|
unless v
resources[name].delete(k)
end
end
end
results[:classes] = classes
results[:resources] = resources
results
end
##
# remove_existing_certs
#
# ... removes existing certificates - really only useful when called on a puppetmaster
# useful in testing environments where you want to destroy/rebuild agents without rebuilding the puppetmaster every time (think autosign)
#
# parameters
# * <puppetmaster> - string/partial regex of certificate names to keep
def remove_existing_certs (puppetmaster)
hosts = Array.new()
res = self.run('puppet cert list --all')
res.each_line do |line|
next if line.match(/#{puppetmaster}/)
host = $1 if line.match(/^\+\s"(.*?)"/)
hosts.push(host)
end
hosts.each do |host|
self.run(sprintf('puppet cert --clean %s', host))
end
end
##
# run_puppet
#
# ... runs puppet on self, returns nothing
#
# currently supports 2 methods of running puppet:
# * master - runs '/sbin/service puppet once -t'
# * supported options
# * expected_exitcode - string/integer/array of acceptable exit code(s)
# * masterless - runs 'puppet apply <options>' after determining version of puppet running and adjusting arguments
# * supported options
# * expected_exitcode - string/integer/array of acceptable exit code(s)
# * hiera_config - path to hiera configuration -- only supported by Puppet 3.0+
# * manifest_file - string/array of strings of paths to manifest(s) to apply
# * manifest_dir - string/array of strings of directories containing manifest(s) to apply - is recursive
# * module_dir - path to module directory -- currently a required parameter, is this correct?
#
# parameters
# * [mode] - method to run puppet, defaults to 'master'
# * [opts] - hash of additional options
def run_puppet(mode='master', passed_opts=nil)
if mode.eql?('master')
opts = {
:expected_exitcode => 0
}.merge!(passed_opts)
self.run('puppet agent -t', opts[:expected_exitcode])
elsif mode.eql?('masterless')
opts = {
:expected_exitcode => 2,
:hiera_config => nil,
:manifest_file => nil, # can be a string or array, will 'puppet apply' each
:manifest_dir => nil, # can be a string or array, will 'puppet apply' each module in the dir (recursively)
:module_dir => nil
}.merge!(passed_opts)
## validate required arguments
raise InternalError.new(sprintf('invalid hiera config specified[%s]', opts[:hiera_config])) unless self.is_file?(opts[:hiera_config])
raise InternalError.new(sprintf('invalid module dir specified[%s]', opts[:module_dir])) unless self.is_dir?(opts[:module_dir])
puppet_version = self.get_puppet_version() # hiera_config specification is only supported in >3.0
if opts[:manifest_file]
opts[:manifest_file] = opts[:manifest_file].class.eql?(Array) ? opts[:manifest_file] : [opts[:manifest_file]]
opts[:manifest_file].each do |file|
raise InternalError.new(sprintf('invalid manifest file specified[%s]', file)) unless self.is_file?(file)
self.run(sprintf('puppet apply %s --modulepath=%s %s', (puppet_version > '3.0') ? "--hiera_config=#{opts[:hiera_config]}" : '', opts[:module_dir], file), opts[:expected_exitcode])
end
end
if opts[:manifest_dir]
opts[:manifest_dir] = opts[:manifest_dir].class.eql?(Array) ? opts[:manifest_dir] : [opts[:manifest_dir]]
opts[:manifest_dir].each do |dir|
raise InternalError.new(sprintf('invalid manifest dir specified[%s]', dir)) unless self.is_dir?(dir)
manifests = self.files(dir, '*.pp', true)
manifests.each do |m|
self.run(sprintf('puppet apply %s --modulepath=%s %s', (puppet_version > '3.0') ? "--hiera_config=#{opts[:hiera_config]}" : '', opts[:module_dir], m), opts[:expected_exitcode])
end
end
end
else
raise InternalError.new(sprintf('unknown mode [%s]', mode))
end
end
end
|
require sprintf('%s/../../%s', File.dirname(File.expand_path(__FILE__)), 'path_helper')
require 'json'
require 'socket'
class Rouster
def facter(use_cache=true, custom_facts=true)
if use_cache.true? and ! self.facts.nil?
self.facts
end
json = nil
res = self.run(sprintf('facter %s', custom_facts.true? ? '-p' : ''))
begin
json = res.to_json
rescue
raise InternalError.new(sprintf('unable to parse[%s] as JSON', res))
end
if use_cache.true?
self.facts = res
end
json
end
# TODO we should be able to run this without upping the box in question --
# just need to be able to talk to the same puppetmaster, which means we should 'require puppet' instead of shelling out
def get_catalog(hostname)
certname = hostname.nil? ? self.run('hostname --fqdn') : hostname
json = nil
res = self.run(sprintf('puppet catalog find %s', certname))
begin
json = res.to_json
rescue
raise InternalError.new(sprintf('unable to parse[%s] as JSON', res))
end
json
end
def get_puppet_errors(input = nil)
str = input.nil? ? self.get_output() : input
errors = str.scan(/35merr:.*/)
errors.empty? ? nil : errors
end
def get_puppet_notices(input = nil)
str = input.nil? ? self.get_output() : input
notices = str.scan(/36mnotice:.*/)
notices.empty? ? nil : notices
end
# TODO parse into a hash that can be passed to the validate_* methods
def parse_catalog(catalog)
resources = nil
# support either JSON or already parsed Hash
if catalog.is_a?(String)
begin
JSON.parse!(catalog)
rescue
raise InternalError.new(sprintf('unable to parse catalog[%s] as JSON', catalog))
end
end
unless catalog.has_key?('data') and catalog['data'].has_key?('resources')
raise InternalError.new(sprintf('catalog does not contain a resources key[%s]', catalog))
end
resources = catalog['data']['resources']
end
def run_puppet
# TODO should we make this more flexible?
self.run('/sbin/service puppet once -t')
end
end
adding some comments/examples re resources #20
require sprintf('%s/../../%s', File.dirname(File.expand_path(__FILE__)), 'path_helper')
require 'json'
require 'socket'
class Rouster
def facter(use_cache=true, custom_facts=true)
if use_cache.true? and ! self.facts.nil?
self.facts
end
json = nil
res = self.run(sprintf('facter %s', custom_facts.true? ? '-p' : ''))
begin
json = res.to_json
rescue
raise InternalError.new(sprintf('unable to parse[%s] as JSON', res))
end
if use_cache.true?
self.facts = res
end
json
end
# TODO we should be able to run this without upping the box in question --
# just need to be able to talk to the same puppetmaster, which means we should 'require puppet' instead of shelling out
def get_catalog(hostname)
certname = hostname.nil? ? self.run('hostname --fqdn') : hostname
json = nil
res = self.run(sprintf('puppet catalog find %s', certname))
begin
json = res.to_json
rescue
raise InternalError.new(sprintf('unable to parse[%s] as JSON', res))
end
json
end
def get_puppet_errors(input = nil)
str = input.nil? ? self.get_output() : input
errors = str.scan(/35merr:.*/)
errors.empty? ? nil : errors
end
def get_puppet_notices(input = nil)
str = input.nil? ? self.get_output() : input
notices = str.scan(/36mnotice:.*/)
notices.empty? ? nil : notices
end
# TODO parse into a hash that can be passed to the validate_* methods
def parse_catalog(catalog)
classes = nil
resources = nil
results = nil
# support either JSON or already parsed Hash
if catalog.is_a?(String)
begin
JSON.parse!(catalog)
rescue
raise InternalError.new(sprintf('unable to parse catalog[%s] as JSON', catalog))
end
end
unless catalog.has_key?('data') and catalog['data'].has_key?('classes')
raise InternalError.new(sprintf('catalog does not contain a classes key[%s]', catalog))
end
classes = catalog['data']['classes']
unless catalog.has_key?('data') and catalog['data'].has_key?('resources')
raise InternalError.new(sprintf('catalog does not contain a resources key[%s]', catalog))
end
resources = catalog['data']['resources']
resources.each do |r|
# first array element -- looks like this element comes before each set of resources for the class in question
# {"exported"=>false, "type"=>"Class", "title"=>"P4users", "tags"=>["class", "p4users", "baseclass", "node", "default"]}
# file resource
# {"exported"=>false, "file"=>"/etc/puppet/modules/p4users/manifests/init.pp", "parameters"=>{"owner"=>"root", "group"=>"root", "ensure"=>"present", "source"=>"puppet:///modules/p4users/p4"}, "line"=>34, "type"=>"File", "title"=>"/usr/local/bin/p4", "tags"=>["file", "class", "p4users", "baseclass", "node", "default"]}
# stage resource
# {"exported"=>false, "parameters"=>{"name"=>"main"}, "type"=>"Stage", "title"=>"main", "tags"=>["stage"]}
# node resource
# {"exported"=>false, "type"=>"Node", "title"=>"default", "tags"=>["node", "default", "class"]}
# file resource with a stage
# {"exported"=>false, "file"=>"/etc/puppet/manifests/templates.pp", "parameters"=>{"before"=>"Stage[main]"}, "line"=>18, "type"=>"Stage", "title"=>"first", "tags"=>["stage", "first", "class"]}
end
results
end
def run_puppet
# TODO should we make this more flexible?
self.run('/sbin/service puppet once -t')
end
end |
module Rtlize
# Originally ported from http://github.com/ded/R2
class RTLizer
@property_map = {
'margin-left' => 'margin-right',
'margin-right' => 'margin-left',
'padding-left' => 'padding-right',
'padding-right' => 'padding-left',
'border-left' => 'border-right',
'border-right' => 'border-left',
'border-left-width' => 'border-right-width',
'border-right-width' => 'border-left-width',
'border-left-style' => 'border-right-style',
'border-right-style' => 'border-left-style',
'border-left-color' => 'border-right-color',
'border-right-color' => 'border-left-color',
'border-radius-bottomleft' => 'border-radius-bottomright',
'border-radius-bottomright' => 'border-radius-bottomleft',
'border-bottom-right-radius' => 'border-bottom-left-radius',
'border-bottom-left-radius' => 'border-bottom-right-radius',
'-webkit-border-bottom-right-radius' => '-webkit-border-bottom-left-radius',
'-webkit-border-bottom-left-radius' => '-webkit-border-bottom-right-radius',
'-moz-border-radius-bottomright' => '-moz-border-radius-bottomleft',
'-moz-border-radius-bottomleft' => '-moz-border-radius-bottomright',
'border-radius-topleft' => 'border-radius-topright',
'border-radius-topright' => 'border-radius-topleft',
'border-top-right-radius' => 'border-top-left-radius',
'border-top-left-radius' => 'border-top-right-radius',
'-webkit-border-top-right-radius' => '-webkit-border-top-left-radius',
'-webkit-border-top-left-radius' => '-webkit-border-top-right-radius',
'-moz-border-radius-topright' => '-moz-border-radius-topleft',
'-moz-border-radius-topleft' => '-moz-border-radius-topright',
'left' => 'right',
'right' => 'left',
}
@value_map = {
'border-color' => :quad,
'border-style' => :quad,
'border-width' => :quad,
'padding' => :quad,
'margin' => :quad,
'text-align' => :rtltr,
'float' => :rtltr,
'clear' => :rtltr,
'direction' => :direction,
'border-radius' => :quad_radius,
'-webkit-border-radius' => :quad_radius,
'-moz-border-radius' => :quad_radius,
'box-shadow' => :box_shadow,
'-webkit-box-shadow' => :box_shadow,
'-moz-box-shadow' => :box_shadow,
}
class << self
def transform(css)
no_invert = false
css.gsub(/([^{]+\{[^}]+\})+?/) do |rule|
# Break rule into selector|declaration parts
parts = rule.match(/([^{]+)\{([^}]+)/)
if parts && !parts[1].gsub(/\/\*[\s\S]+?\*\//, '').match(/\.rtl\b/) # Don't transform rules that include the selector ".rtl" (remove comments first)
selector, declarations = parts[1..2]
# The CSS comment must start with "!" in order to be considered as important by the YUI compressor, otherwise, it will be removed by the asset pipeline before reaching this processor.
if selector.match(/\/\*!= begin\(no-rtl\) \*\//)
no_invert = true
# selector.gsub!(/\/\*!= begin\(no-rtl\) \*\//, '')
elsif selector.match(/\/\*!= end\(no-rtl\) \*\//)
no_invert = false
# selector.gsub!(/\/\*!= end\(no-rtl\) \*\//, '')
end
selector + '{' + self.transform_declarations(declarations, no_invert) + '}'
else
rule
end
end
end
def transform_declarations(declarations, no_invert = false)
declarations.split(/;(?!base64)/).map do |decl|
m = decl.match(/([^:]+):(.+)$/)
if m && !no_invert
prop, val = m[1..2]
# Get the property, without comments or spaces, to be able to find it.
prop_name = prop.strip.split(' ').last
if @property_map[prop_name]
prop = prop.sub(prop_name, @property_map[prop_name])
end
if @value_map[prop_name]
val = val.sub(val.strip, self.send(@value_map[prop_name], val.strip))
end
prop + ':' + val + ';'
elsif m
decl + ';'
else
decl
end
end.join
end
def rtltr(v)
v == 'left' ? 'right' : v == 'right' ? 'left' : v
end
def direction(v)
v == 'ltr' ? 'rtl' : v == 'rtl' ? 'ltr' : v
end
def quad(v)
# 1px 2px 3px 4px => 1px 4px 3px 2px
m = v.split(/\s+/)
m.length == 4 ? [m[0], m[3], m[2], m[1]].join(' ') : v
end
def quad_radius(v)
# 1px 2px 3px 4px => 1px 2px 4px 3px
# since border-radius: top-left top-right bottom-right bottom-left
# will be border-radius: top-right top-left bottom-left bottom-right
m = v.split(/\s+/)
m.length == 4 ? [m[1], m[0], m[3], m[2]].join(' ') : v
end
def box_shadow(v)
found = false
v.gsub(/rgba\([^)]*\)|,|#\S*|[-0-9px]+/) do |m|
if m == ","
# this property can take several comma-seperated values, we account for that, and transform each one correctly.
found = false
m
elsif m.match(/rgba\([^)]*\)|#\S*/) || found
m
else
found = true
m.to_i.zero? ? m : m.gsub(m.to_i.to_s, (-1 * m.to_i).to_s)
end
end
end
end
end
end
Remove incorrect border-radius properties
module Rtlize
# Originally ported from http://github.com/ded/R2
class RTLizer
@property_map = {
'margin-left' => 'margin-right',
'margin-right' => 'margin-left',
'padding-left' => 'padding-right',
'padding-right' => 'padding-left',
'border-left' => 'border-right',
'border-right' => 'border-left',
'border-left-width' => 'border-right-width',
'border-right-width' => 'border-left-width',
'border-left-style' => 'border-right-style',
'border-right-style' => 'border-left-style',
'border-left-color' => 'border-right-color',
'border-right-color' => 'border-left-color',
'border-bottom-right-radius' => 'border-bottom-left-radius',
'border-bottom-left-radius' => 'border-bottom-right-radius',
'-webkit-border-bottom-right-radius' => '-webkit-border-bottom-left-radius',
'-webkit-border-bottom-left-radius' => '-webkit-border-bottom-right-radius',
'-moz-border-radius-bottomright' => '-moz-border-radius-bottomleft',
'-moz-border-radius-bottomleft' => '-moz-border-radius-bottomright',
'border-top-right-radius' => 'border-top-left-radius',
'border-top-left-radius' => 'border-top-right-radius',
'-webkit-border-top-right-radius' => '-webkit-border-top-left-radius',
'-webkit-border-top-left-radius' => '-webkit-border-top-right-radius',
'-moz-border-radius-topright' => '-moz-border-radius-topleft',
'-moz-border-radius-topleft' => '-moz-border-radius-topright',
'left' => 'right',
'right' => 'left',
}
@value_map = {
'border-color' => :quad,
'border-style' => :quad,
'border-width' => :quad,
'padding' => :quad,
'margin' => :quad,
'text-align' => :rtltr,
'float' => :rtltr,
'clear' => :rtltr,
'direction' => :direction,
'border-radius' => :quad_radius,
'-webkit-border-radius' => :quad_radius,
'-moz-border-radius' => :quad_radius,
'box-shadow' => :box_shadow,
'-webkit-box-shadow' => :box_shadow,
'-moz-box-shadow' => :box_shadow,
}
class << self
def transform(css)
no_invert = false
css.gsub(/([^{]+\{[^}]+\})+?/) do |rule|
# Break rule into selector|declaration parts
parts = rule.match(/([^{]+)\{([^}]+)/)
if parts && !parts[1].gsub(/\/\*[\s\S]+?\*\//, '').match(/\.rtl\b/) # Don't transform rules that include the selector ".rtl" (remove comments first)
selector, declarations = parts[1..2]
# The CSS comment must start with "!" in order to be considered as important by the YUI compressor, otherwise, it will be removed by the asset pipeline before reaching this processor.
if selector.match(/\/\*!= begin\(no-rtl\) \*\//)
no_invert = true
# selector.gsub!(/\/\*!= begin\(no-rtl\) \*\//, '')
elsif selector.match(/\/\*!= end\(no-rtl\) \*\//)
no_invert = false
# selector.gsub!(/\/\*!= end\(no-rtl\) \*\//, '')
end
selector + '{' + self.transform_declarations(declarations, no_invert) + '}'
else
rule
end
end
end
def transform_declarations(declarations, no_invert = false)
declarations.split(/;(?!base64)/).map do |decl|
m = decl.match(/([^:]+):(.+)$/)
if m && !no_invert
prop, val = m[1..2]
# Get the property, without comments or spaces, to be able to find it.
prop_name = prop.strip.split(' ').last
if @property_map[prop_name]
prop = prop.sub(prop_name, @property_map[prop_name])
end
if @value_map[prop_name]
val = val.sub(val.strip, self.send(@value_map[prop_name], val.strip))
end
prop + ':' + val + ';'
elsif m
decl + ';'
else
decl
end
end.join
end
def rtltr(v)
v == 'left' ? 'right' : v == 'right' ? 'left' : v
end
def direction(v)
v == 'ltr' ? 'rtl' : v == 'rtl' ? 'ltr' : v
end
def quad(v)
# 1px 2px 3px 4px => 1px 4px 3px 2px
m = v.split(/\s+/)
m.length == 4 ? [m[0], m[3], m[2], m[1]].join(' ') : v
end
def quad_radius(v)
# 1px 2px 3px 4px => 1px 2px 4px 3px
# since border-radius: top-left top-right bottom-right bottom-left
# will be border-radius: top-right top-left bottom-left bottom-right
m = v.split(/\s+/)
m.length == 4 ? [m[1], m[0], m[3], m[2]].join(' ') : v
end
def box_shadow(v)
found = false
v.gsub(/rgba\([^)]*\)|,|#\S*|[-0-9px]+/) do |m|
if m == ","
# this property can take several comma-seperated values, we account for that, and transform each one correctly.
found = false
m
elsif m.match(/rgba\([^)]*\)|#\S*/) || found
m
else
found = true
m.to_i.zero? ? m : m.gsub(m.to_i.to_s, (-1 * m.to_i).to_s)
end
end
end
end
end
end
|
module Rubarb
VERSION = "0.0.1"
end
Fixed version. Should have checked that sooner...
class Rubarb
VERSION = "0.0.1"
end
|
require File.dirname(__FILE__) + '/spec'
class Object
class << self
# Lookup missing generators using const_missing. This allows any
# generator to reference another without having to know its location:
# RubyGems, ~/.rubigen/generators, and APP_ROOT/generators.
def lookup_missing_generator(class_id)
if md = /(.+)Generator$/.match(class_id.to_s)
name = md.captures.first.demodulize.underscore
RubiGen::Base.active.lookup(name).klass
else
const_missing_before_generators(class_id)
end
end
unless respond_to?(:const_missing_before_generators)
alias_method :const_missing_before_generators, :const_missing
alias_method :const_missing, :lookup_missing_generator
end
end
end
# User home directory lookup adapted from RubyGems.
def Dir.user_home
if ENV['HOME']
ENV['HOME']
elsif ENV['USERPROFILE']
ENV['USERPROFILE']
elsif ENV['HOMEDRIVE'] and ENV['HOMEPATH']
"#{ENV['HOMEDRIVE']}:#{ENV['HOMEPATH']}"
else
File.expand_path '~'
end
end
module RubiGen
# Generator lookup is managed by a list of sources which return specs
# describing where to find and how to create generators. This module
# provides class methods for manipulating the source list and looking up
# generator specs, and an #instance wrapper for quickly instantiating
# generators by name.
#
# A spec is not a generator: it's a description of where to find
# the generator and how to create it. A source is anything that
# yields generators from #each. PathSource and GemGeneratorSource are provided.
module Lookup
def self.included(base)
base.extend(ClassMethods)
base.use_component_sources!
end
# Convenience method to instantiate another generator.
def instance(generator_name, args, runtime_options = {})
self.class.instance(generator_name, args, runtime_options)
end
module ClassMethods
# The list of sources where we look, in order, for generators.
def sources
if read_inheritable_attribute(:sources).blank?
if superclass == RubiGen::Base
superclass_sources = superclass.sources
diff = superclass_sources.inject([]) do |mem, source|
found = false
application_sources.each { |app_source| found ||= true if app_source == source}
mem << source unless found
mem
end
write_inheritable_attribute(:sources, diff)
end
use_component_sources! if read_inheritable_attribute(:sources).blank?
end
read_inheritable_attribute(:sources)
end
# Add a source to the end of the list.
def append_sources(*args)
sources.concat(args.flatten)
invalidate_cache!
end
# Add a source to the beginning of the list.
def prepend_sources(*args)
sources = self.sources
reset_sources
write_inheritable_array(:sources, args.flatten + sources)
invalidate_cache!
end
# Reset the source list.
def reset_sources
write_inheritable_attribute(:sources, [])
invalidate_cache!
end
# Use application generators (app, ?).
def use_application_sources!(*filters)
reset_sources
write_inheritable_attribute(:sources, application_sources(filters))
end
def application_sources(filters = [])
filters.unshift 'app'
app_sources = []
app_sources << PathSource.new(:builtin, File.join(File.dirname(__FILE__), %w[.. .. app_generators]))
app_sources << filtered_sources(filters)
app_sources.flatten
end
# Use component generators (test_unit, etc).
# 1. Current application. If APP_ROOT is defined we know we're
# generating in the context of this application, so search
# APP_ROOT/generators.
# 2. User home directory. Search ~/.rubigen/generators.
# 3. RubyGems. Search for gems containing /{scope}_generators folder.
# 4. Builtins. None currently.
#
# Search can be filtered by passing one or more prefixes.
# e.g. use_component_sources!(:rubygems) means it will also search in the following
# folders:
# 5. User home directory. Search ~/.rubigen/rubygems_generators.
# 6. RubyGems. Search for gems containing /rubygems_generators folder.
def use_component_sources!(*filters)
reset_sources
new_sources = []
if defined? ::APP_ROOT
new_sources << PathSource.new(:root, "#{::APP_ROOT}/generators")
new_sources << PathSource.new(:vendor, "#{::APP_ROOT}/vendor/generators")
new_sources << PathSource.new(:plugins, "#{::APP_ROOT}/vendor/plugins/*/**/generators")
end
new_sources << filtered_sources(filters)
write_inheritable_attribute(:sources, new_sources.flatten)
end
def filtered_sources(filters)
new_sources = []
new_sources << PathFilteredSource.new(:user, "#{Dir.user_home}/.rubigen/", *filters)
if Object.const_defined?(:Gem)
new_sources << GemPathSource.new(*filters)
end
new_sources
end
# Lookup knows how to find generators' Specs from a list of Sources.
# Searches the sources, in order, for the first matching name.
def lookup(generator_name)
@found ||= {}
generator_name = generator_name.to_s.downcase
@found[generator_name] ||= cache.find { |spec| spec.name == generator_name }
unless @found[generator_name]
chars = generator_name.scan(/./).map{|c|"#{c}.*?"}
rx = /^#{chars}$/
gns = cache.select {|spec| spec.name =~ rx }
@found[generator_name] ||= gns.first if gns.length == 1
raise GeneratorError, "Pattern '#{generator_name}' matches more than one generator: #{gns.map{|sp|sp.name}.join(', ')}" if gns.length > 1
end
@found[generator_name] or raise GeneratorError, "Couldn't find '#{generator_name}' generator"
end
# Convenience method to lookup and instantiate a generator.
def instance(generator_name, args = [], runtime_options = {})
lookup(generator_name).klass.new(args, full_options(runtime_options))
end
private
# Lookup and cache every generator from the source list.
def cache
@cache ||= sources.inject([]) { |cache, source| cache + source.to_a }
end
# Clear the cache whenever the source list changes.
def invalidate_cache!
@cache = nil
end
end
end
# Sources enumerate (yield from #each) generator specs which describe
# where to find and how to create generators. Enumerable is mixed in so,
# for example, source.collect will retrieve every generator.
# Sources may be assigned a label to distinguish them.
class Source
include Enumerable
attr_reader :label
def initialize(label)
@label = label
end
# The each method must be implemented in subclasses.
# The base implementation raises an error.
def each
raise NotImplementedError
end
# Return a convenient sorted list of all generator names.
def names(filter = nil)
inject([]) do |mem, spec|
case filter
when :visible
mem << spec.name if spec.visible?
end
mem
end.sort
end
end
# PathSource looks for generators in a filesystem directory.
class PathSource < Source
attr_reader :path
def initialize(label, path)
super label
@path = File.expand_path path
end
# Yield each eligible subdirectory.
def each
Dir["#{path}/[a-z]*"].each do |dir|
if File.directory?(dir)
yield Spec.new(File.basename(dir), dir, label)
end
end
end
def ==(source)
self.class == source.class && path == source.path
end
end
class PathFilteredSource < PathSource
attr_reader :filters
def initialize(label, path, *filters)
super label, File.join(path, "#{filter_str(filters)}generators")
end
def filter_str(filters)
@filters = filters.first.is_a?(Array) ? filters.first : filters
return "" if @filters.blank?
filter_str = @filters.map {|filter| "#{filter}_"}.join(",")
filter_str += ","
"{#{filter_str}}"
end
def ==(source)
self.class == source.class && path == source.path && filters == source.filters && label == source.label
end
end
class AbstractGemSource < Source
def initialize
super :RubyGems
end
end
# GemPathSource looks for generators within any RubyGem's /{filter_}generators/**/<generator_name>_generator.rb file.
class GemPathSource < AbstractGemSource
attr_accessor :filters
def initialize(*filters)
super()
@filters = filters
end
# Yield each generator within rails_generator subdirectories.
def each
generator_full_paths.each do |generator|
yield Spec.new(File.basename(generator).sub(/_generator.rb$/, ''), File.dirname(generator), label)
end
end
def ==(source)
self.class == source.class && filters == source.filters
end
private
def generator_full_paths
@generator_full_paths ||=
Gem::cache.inject({}) do |latest, name_gem|
name, gem = name_gem
hem = latest[gem.name]
latest[gem.name] = gem if hem.nil? or gem.version > hem.version
latest
end.values.inject([]) do |mem, gem|
Dir[gem.full_gem_path + "/#{filter_str}generators/**/*_generator.rb"].each do |generator|
mem << generator
end
mem
end.reverse
end
def filter_str
@filters = filters.first.is_a?(Array) ? filters.first : filters
return "" if filters.blank?
filter_str = filters.map {|filter| "#{filter}_"}.join(",")
filter_str += ","
"{#{filter_str}}"
end
end
end
more uses of #active to ensure using correct RubiGen::Base class or subclass
require File.dirname(__FILE__) + '/spec'
class Object
class << self
# Lookup missing generators using const_missing. This allows any
# generator to reference another without having to know its location:
# RubyGems, ~/.rubigen/generators, and APP_ROOT/generators.
def lookup_missing_generator(class_id)
if md = /(.+)Generator$/.match(class_id.to_s)
name = md.captures.first.demodulize.underscore
RubiGen::Base.active.lookup(name).klass
else
const_missing_before_generators(class_id)
end
end
unless respond_to?(:const_missing_before_generators)
alias_method :const_missing_before_generators, :const_missing
alias_method :const_missing, :lookup_missing_generator
end
end
end
# User home directory lookup adapted from RubyGems.
def Dir.user_home
if ENV['HOME']
ENV['HOME']
elsif ENV['USERPROFILE']
ENV['USERPROFILE']
elsif ENV['HOMEDRIVE'] and ENV['HOMEPATH']
"#{ENV['HOMEDRIVE']}:#{ENV['HOMEPATH']}"
else
File.expand_path '~'
end
end
module RubiGen
# Generator lookup is managed by a list of sources which return specs
# describing where to find and how to create generators. This module
# provides class methods for manipulating the source list and looking up
# generator specs, and an #instance wrapper for quickly instantiating
# generators by name.
#
# A spec is not a generator: it's a description of where to find
# the generator and how to create it. A source is anything that
# yields generators from #each. PathSource and GemGeneratorSource are provided.
module Lookup
def self.included(base)
base.extend(ClassMethods)
base.use_component_sources!
end
# Convenience method to instantiate another generator.
def instance(generator_name, args, runtime_options = {})
self.class.active.instance(generator_name, args, runtime_options)
end
module ClassMethods
# The list of sources where we look, in order, for generators.
def sources
if read_inheritable_attribute(:sources).blank?
if superclass == RubiGen::Base
superclass_sources = superclass.sources
diff = superclass_sources.inject([]) do |mem, source|
found = false
application_sources.each { |app_source| found ||= true if app_source == source}
mem << source unless found
mem
end
write_inheritable_attribute(:sources, diff)
end
use_component_sources! if read_inheritable_attribute(:sources).blank?
end
read_inheritable_attribute(:sources)
end
# Add a source to the end of the list.
def append_sources(*args)
sources.concat(args.flatten)
invalidate_cache!
end
# Add a source to the beginning of the list.
def prepend_sources(*args)
sources = self.sources
reset_sources
write_inheritable_array(:sources, args.flatten + sources)
invalidate_cache!
end
# Reset the source list.
def reset_sources
write_inheritable_attribute(:sources, [])
invalidate_cache!
end
# Use application generators (app, ?).
def use_application_sources!(*filters)
reset_sources
write_inheritable_attribute(:sources, application_sources(filters))
end
def application_sources(filters = [])
filters.unshift 'app'
app_sources = []
app_sources << PathSource.new(:builtin, File.join(File.dirname(__FILE__), %w[.. .. app_generators]))
app_sources << filtered_sources(filters)
app_sources.flatten
end
# Use component generators (test_unit, etc).
# 1. Current application. If APP_ROOT is defined we know we're
# generating in the context of this application, so search
# APP_ROOT/generators.
# 2. User home directory. Search ~/.rubigen/generators.
# 3. RubyGems. Search for gems containing /{scope}_generators folder.
# 4. Builtins. None currently.
#
# Search can be filtered by passing one or more prefixes.
# e.g. use_component_sources!(:rubygems) means it will also search in the following
# folders:
# 5. User home directory. Search ~/.rubigen/rubygems_generators.
# 6. RubyGems. Search for gems containing /rubygems_generators folder.
def use_component_sources!(*filters)
reset_sources
new_sources = []
if defined? ::APP_ROOT
new_sources << PathSource.new(:root, "#{::APP_ROOT}/generators")
new_sources << PathSource.new(:vendor, "#{::APP_ROOT}/vendor/generators")
new_sources << PathSource.new(:plugins, "#{::APP_ROOT}/vendor/plugins/*/**/generators")
end
new_sources << filtered_sources(filters)
write_inheritable_attribute(:sources, new_sources.flatten)
end
def filtered_sources(filters)
new_sources = []
new_sources << PathFilteredSource.new(:user, "#{Dir.user_home}/.rubigen/", *filters)
if Object.const_defined?(:Gem)
new_sources << GemPathSource.new(*filters)
end
new_sources
end
# Lookup knows how to find generators' Specs from a list of Sources.
# Searches the sources, in order, for the first matching name.
def lookup(generator_name)
@found ||= {}
generator_name = generator_name.to_s.downcase
@found[generator_name] ||= cache.find { |spec| spec.name == generator_name }
unless @found[generator_name]
chars = generator_name.scan(/./).map{|c|"#{c}.*?"}
rx = /^#{chars}$/
gns = cache.select {|spec| spec.name =~ rx }
@found[generator_name] ||= gns.first if gns.length == 1
raise GeneratorError, "Pattern '#{generator_name}' matches more than one generator: #{gns.map{|sp|sp.name}.join(', ')}" if gns.length > 1
end
@found[generator_name] or raise GeneratorError, "Couldn't find '#{generator_name}' generator"
end
# Convenience method to lookup and instantiate a generator.
def instance(generator_name, args = [], runtime_options = {})
active.lookup(generator_name).klass.new(args, full_options(runtime_options))
end
private
# Lookup and cache every generator from the source list.
def cache
@cache ||= sources.inject([]) { |cache, source| cache + source.to_a }
end
# Clear the cache whenever the source list changes.
def invalidate_cache!
@cache = nil
end
end
end
# Sources enumerate (yield from #each) generator specs which describe
# where to find and how to create generators. Enumerable is mixed in so,
# for example, source.collect will retrieve every generator.
# Sources may be assigned a label to distinguish them.
class Source
include Enumerable
attr_reader :label
def initialize(label)
@label = label
end
# The each method must be implemented in subclasses.
# The base implementation raises an error.
def each
raise NotImplementedError
end
# Return a convenient sorted list of all generator names.
def names(filter = nil)
inject([]) do |mem, spec|
case filter
when :visible
mem << spec.name if spec.visible?
end
mem
end.sort
end
end
# PathSource looks for generators in a filesystem directory.
class PathSource < Source
attr_reader :path
def initialize(label, path)
super label
@path = File.expand_path path
end
# Yield each eligible subdirectory.
def each
Dir["#{path}/[a-z]*"].each do |dir|
if File.directory?(dir)
yield Spec.new(File.basename(dir), dir, label)
end
end
end
def ==(source)
self.class == source.class && path == source.path
end
end
class PathFilteredSource < PathSource
attr_reader :filters
def initialize(label, path, *filters)
super label, File.join(path, "#{filter_str(filters)}generators")
end
def filter_str(filters)
@filters = filters.first.is_a?(Array) ? filters.first : filters
return "" if @filters.blank?
filter_str = @filters.map {|filter| "#{filter}_"}.join(",")
filter_str += ","
"{#{filter_str}}"
end
def ==(source)
self.class == source.class && path == source.path && filters == source.filters && label == source.label
end
end
class AbstractGemSource < Source
def initialize
super :RubyGems
end
end
# GemPathSource looks for generators within any RubyGem's /{filter_}generators/**/<generator_name>_generator.rb file.
class GemPathSource < AbstractGemSource
attr_accessor :filters
def initialize(*filters)
super()
@filters = filters
end
# Yield each generator within rails_generator subdirectories.
def each
generator_full_paths.each do |generator|
yield Spec.new(File.basename(generator).sub(/_generator.rb$/, ''), File.dirname(generator), label)
end
end
def ==(source)
self.class == source.class && filters == source.filters
end
private
def generator_full_paths
@generator_full_paths ||=
Gem::cache.inject({}) do |latest, name_gem|
name, gem = name_gem
hem = latest[gem.name]
latest[gem.name] = gem if hem.nil? or gem.version > hem.version
latest
end.values.inject([]) do |mem, gem|
Dir[gem.full_gem_path + "/#{filter_str}generators/**/*_generator.rb"].each do |generator|
mem << generator
end
mem
end.reverse
end
def filter_str
@filters = filters.first.is_a?(Array) ? filters.first : filters
return "" if filters.blank?
filter_str = filters.map {|filter| "#{filter}_"}.join(",")
filter_str += ","
"{#{filter_str}}"
end
end
end
|
module Ruboto
VERSION = '0.8.0.rc.0'
end
* Bumped version to 0.8.0 for release.
module Ruboto
VERSION = '0.8.0'
end
|
module Rugged
Version = VERSION = '0.24.0b13'
end
Update version to b14
module Rugged
Version = VERSION = '0.24.0b14'
end
|
# Recipes for using RVM on a server with capistrano.
module Capistrano
Configuration.instance(true).load do
# Taken from the capistrano code.
def _cset(name, *args, &block)
unless exists?(name)
set(name, *args, &block)
end
end
_cset :rvm_shell do
shell = File.join(rvm_bin_path, "rvm-shell")
ruby = fetch(:rvm_ruby_string_evaluated).strip
case ruby
when "release_path"
shell = "rvm_path=#{rvm_path} #{shell} --path '#{release_path}'"
when "local"
ruby = (ENV['GEM_HOME'] || "").gsub(/.*\//, "")
raise "Failed to get ruby version from GEM_HOME. Please make sure rvm is loaded!" if ruby.empty?
shell = "rvm_path=#{rvm_path} #{shell} '#{ruby}'"
else
shell = "rvm_path=#{rvm_path} #{shell} '#{ruby}'" unless ruby.empty?
end
shell
end
if fetch(:rvm_require_role,nil).nil?
_cset :default_shell do
fetch(:rvm_shell)
end
else
class << self
def run(cmd, options={}, &block)
if options[:eof].nil? && !cmd.include?(sudo)
options = options.merge(:eof => !block_given?)
end
shell = options[:shell]
options[:shell] = false
parallel(options) do |session|
if shell.nil?
session.when "in?(:#{fetch(:rvm_require_role,nil)})", command_with_shell(cmd, fetch(:rvm_shell)), &block
end
session.else command_with_shell(cmd, shell), &block
end
end
def command_with_shell(cmd, shell=nil)
if shell == false
cmd
else
"#{shell || "sh"} -c '#{cmd.gsub(/'/) { |m| "'\\''" }}'"
end
end
end
end
# Let users set the type of their rvm install.
_cset(:rvm_type, :user)
# Define rvm_path
# This is used in the default_shell command to pass the required variable to rvm-shell, allowing
# rvm to boostrap using the proper path. This is being lost in Capistrano due to the lack of a
# full environment.
_cset(:rvm_path) do
case rvm_type
when :root, :system
"/usr/local/rvm"
when :local, :user, :default
"$HOME/.rvm/"
else
rvm_type.to_s.empty? ? "$HOME/.rvm" : rvm_type.to_s
end
end
# Let users override the rvm_bin_path
_cset(:rvm_bin_path) do
case rvm_type
when :root, :system
"/usr/local/rvm/bin"
when :local, :user, :default
"$HOME/.rvm/bin"
else
rvm_type.to_s.empty? ? "#{rvm_path}/bin" : rvm_type.to_s
end
end
set :rvm_ruby_string_evaluated do
value = fetch(:rvm_ruby_string, :default)
if value.to_sym == :local
value = ENV['GEM_HOME'].gsub(/.*\//,"")
end
value.to_s
end
# Let users configure a path to export/import gemsets
_cset(:rvm_gemset_path, "#{rvm_path}/gemsets")
# Use the default ruby on the server, by default :)
_cset(:rvm_ruby_string, :default)
# Default sudo state
_cset(:rvm_install_with_sudo, false)
# Let users set the install type and shell of their choice.
_cset(:rvm_install_type, :stable)
_cset(:rvm_install_shell, :bash)
# Let users set the (re)install for ruby.
_cset(:rvm_install_ruby, :install)
_cset(:rvm_install_ruby_threads, "$(cat /proc/cpuinfo 2>/dev/null | (grep vendor_id || echo 'vendor_id : Other';) | wc -l)")
# Pass no special params to the ruby build by default
_cset(:rvm_install_ruby_params, '')
# Additional rvm packages to install.
_cset(:rvm_install_pkgs, [])
# By default system installations add deploying user to rvm group. also try :all
_cset(:rvm_add_to_group, fetch(:user,"$USER"))
namespace :rvm do
def run_silent_curl(command)
run <<-EOF.gsub(/[\s\n]+/, ' '), :shell => "#{rvm_install_shell}"
__LAST_STATUS=0;
export CURL_HOME="${TMPDIR:-${HOME}}/.rvm-curl-config.$$";
mkdir ${CURL_HOME}/;
{
[[ -r ${HOME}/.curlrc ]] && cat ${HOME}/.curlrc;
echo "silent";
echo "show-error";
} > $CURL_HOME/.curlrc;
#{command} || __LAST_STATUS=$?;
rm -rf $CURL_HOME;
exit ${__LAST_STATUS}
EOF
end
def with_rvm_group(command)
case rvm_type
when :root, :system
"sg rvm -c \"#{command}\""
else
command
end
end
def rvm_task(name,&block)
if fetch(:rvm_require_role,nil).nil?
task name, &block
else
task name, :roles => fetch(:rvm_require_role), &block
end
end
desc <<-EOF
Install RVM of the given choice to the server.
By default RVM "stable" is installed, change with:
set :rvm_install_type, :head
By default BASH is used for installer, change with:
set :rvm_install_shell, :zsh
EOF
rvm_task :install_rvm do
command_fetch = "curl -L get.rvm.io"
command_install = case rvm_type
when :root, :system
if fetch(:use_sudo, true) == false && rvm_install_with_sudo == false
raise "
:use_sudo is set to 'false' but sudo is needed to install rvm_type: #{rvm_type}.
You can enable use_sudo within rvm for use only by this install operation by adding to deploy.rb: set :rvm_install_with_sudo, true
"
else
"#{sudo} "
end
else
''
end
command_install << "#{rvm_install_shell} -s #{rvm_install_type} --path #{rvm_path}"
case rvm_type
when :root, :system
command_install << " --add-to-rvm-group #{[rvm_add_to_group].flatten.map(&:to_s).join(",")}"
end
run_silent_curl "#{command_fetch} | #{command_install}"
end
desc <<-EOF
Install RVM ruby to the server, create gemset if needed.
By default ruby is installed, you can reinstall with:
set :rvm_install_ruby, :reinstall
By default ruby is compiled using all CPU cores, change with:
set :rvm_install_ruby_threads, :reinstall
By default BASH is used for installer, change with:
set :rvm_install_shell, :zsh
EOF
rvm_task :install_ruby do
ruby, gemset = fetch(:rvm_ruby_string_evaluated).to_s.strip.split /@/
if %w( release_path default ).include? "#{ruby}"
raise "
ruby can not be installed when using :rvm_ruby_string => :#{ruby}
"
else
command_install = ""
autolibs_flag = fetch(:rvm_autolibs_flag, 2).to_s
autolibs_flag_no_requirements = %w(
0 disable disabled
1 read read-only
2 fail read-fail
).include?( autolibs_flag )
if autolibs_flag_no_requirements
command_install << with_rvm_group("#{File.join(rvm_bin_path, "rvm")} --autolibs=#{autolibs_flag} #{rvm_install_ruby} #{ruby} -j #{rvm_install_ruby_threads} #{rvm_install_ruby_params}")
else
if fetch(:use_sudo, true) == false && rvm_install_with_sudo == false
raise "
:use_sudo is set to 'false' but sudo is needed to install requirements with autolibs '#{autolibs_flag}'.
You can enable use_sudo within rvm for use only by this ruby install operation by adding to deploy.rb: set :rvm_install_with_sudo, true
"
else
command_install << "#{sudo} #{File.join(rvm_bin_path, "rvm")} --autolibs=#{autolibs_flag} requirements #{ruby}"
command_install << "; "
command_install << with_rvm_group("#{File.join(rvm_bin_path, "rvm")} --autolibs=1 #{rvm_install_ruby} #{ruby} -j #{rvm_install_ruby_threads} #{rvm_install_ruby_params}")
end
end
if gemset
command_install << "; "
command_install << with_rvm_group("#{File.join(rvm_bin_path, "rvm")} #{ruby} do rvm gemset create #{gemset}")
end
run_silent_curl command_install
end
end
desc <<-EOF
Install RVM packages to the server.
This must come before the 'rvm:install_ruby' task is called.
The package list is empty by default. Specifiy the packages to install with:
set :rvm_install_pkgs, %w[libyaml curl]
Full list of packages available at https://rvm.io/packages/ or by running 'rvm pkg'.
EOF
rvm_task :install_pkgs do
rvm_install_pkgs.each do |pkg|
run "#{File.join(rvm_bin_path, "rvm")} pkg install #{pkg}", :shell => "#{rvm_install_shell}"
end
end
desc "Create gemset"
rvm_task :create_gemset do
ruby, gemset = fetch(:rvm_ruby_string_evaluated).to_s.strip.split /@/
if %w( release_path default ).include? "#{ruby}"
raise "
gemset can not be created when using :rvm_ruby_string => :#{ruby}
"
else
if gemset
run with_rvm_group("#{File.join(rvm_bin_path, "rvm")} #{ruby} do rvm gemset create #{gemset}"), :shell => "#{rvm_install_shell}"
end
end
end
desc <<-EOF
Import file contents to the current RVM ruby gemset.
The gemset filename must match :rvm_ruby_string.gems and be located in :rvm_gemset_path.
:rvm_gemset_path defaults to :rvm_path/gemsets
The gemset can be created with 'cap rvm:gemset_export'.
EOF
rvm_task :import_gemset do
ruby, gemset = fetch(:rvm_ruby_string_evaluated).to_s.strip.split /@/
if %w( release_path default ).include? "#{ruby}"
raise "gemset can not be imported when using :rvm_ruby_string => :#{ruby}"
else
if gemset
run "#{File.join(rvm_bin_path, "rvm-shell")} #{fetch(:rvm_ruby_string_evaluated)} rvm gemset import #{File.join(rvm_gemset_path, "#{fetch(:rvm_ruby_string_evaluated)}.gems")}", :shell => "#{rvm_install_shell}"
end
end
end
desc <<-EOF
Export the current RVM ruby gemset contents to a file.
The gemset filename will match :rvm_ruby_string.gems and be located in :rvm_gemset_path.
:rvm_gemset_path defaults to :rvm_path/gemsets
The gemset can be imported with 'cap rvm:gemset_import'.
EOF
rvm_task :export_gemset do
ruby, gemset = fetch(:rvm_ruby_string_evaluated).to_s.strip.split /@/
if %w( release_path default ).include? "#{ruby}"
raise "gemset can not be imported when using :rvm_ruby_string => :#{ruby}"
else
if gemset
run "#{File.join(rvm_bin_path, "rvm-shell")} #{fetch(:rvm_ruby_string_evaluated)} rvm gemset export > #{File.join(rvm_gemset_path, "#{fetch(:rvm_ruby_string_evaluated)}.gems")}", :shell => "#{rvm_install_shell}"
end
end
end
desc "Install a gem, 'cap rvm:install_gem GEM=my_gem'."
rvm_task :install_gem do
run "#{File.join(rvm_bin_path, "rvm")} #{fetch(:rvm_ruby_string_evaluated)} do gem install #{ENV['GEM']}", :shell => "#{rvm_install_shell}"
end
desc "Uninstall a gem, 'cap rvm:uninstall_gem GEM=my_gem'."
rvm_task :uninstall_gem do
run "#{File.join(rvm_bin_path, "rvm")} #{fetch(:rvm_ruby_string_evaluated)} do gem uninstall --no-executables #{ENV['GEM']}", :shell => "#{rvm_install_shell}"
end
end
end if Capistrano.const_defined? :Configuration and Capistrano::Configuration.methods.map(&:to_sym).include? :instance
end
# E.g, to use ree and rails 3:
#
# require 'rvm/capistrano'
# set :rvm_ruby_string, "ree@rails3"
#
prefix sg with sudo
# Recipes for using RVM on a server with capistrano.
module Capistrano
Configuration.instance(true).load do
# Taken from the capistrano code.
def _cset(name, *args, &block)
unless exists?(name)
set(name, *args, &block)
end
end
_cset :rvm_shell do
shell = File.join(rvm_bin_path, "rvm-shell")
ruby = fetch(:rvm_ruby_string_evaluated).strip
case ruby
when "release_path"
shell = "rvm_path=#{rvm_path} #{shell} --path '#{release_path}'"
when "local"
ruby = (ENV['GEM_HOME'] || "").gsub(/.*\//, "")
raise "Failed to get ruby version from GEM_HOME. Please make sure rvm is loaded!" if ruby.empty?
shell = "rvm_path=#{rvm_path} #{shell} '#{ruby}'"
else
shell = "rvm_path=#{rvm_path} #{shell} '#{ruby}'" unless ruby.empty?
end
shell
end
if fetch(:rvm_require_role,nil).nil?
_cset :default_shell do
fetch(:rvm_shell)
end
else
class << self
def run(cmd, options={}, &block)
if options[:eof].nil? && !cmd.include?(sudo)
options = options.merge(:eof => !block_given?)
end
shell = options[:shell]
options[:shell] = false
parallel(options) do |session|
if shell.nil?
session.when "in?(:#{fetch(:rvm_require_role,nil)})", command_with_shell(cmd, fetch(:rvm_shell)), &block
end
session.else command_with_shell(cmd, shell), &block
end
end
def command_with_shell(cmd, shell=nil)
if shell == false
cmd
else
"#{shell || "sh"} -c '#{cmd.gsub(/'/) { |m| "'\\''" }}'"
end
end
end
end
# Let users set the type of their rvm install.
_cset(:rvm_type, :user)
# Define rvm_path
# This is used in the default_shell command to pass the required variable to rvm-shell, allowing
# rvm to boostrap using the proper path. This is being lost in Capistrano due to the lack of a
# full environment.
_cset(:rvm_path) do
case rvm_type
when :root, :system
"/usr/local/rvm"
when :local, :user, :default
"$HOME/.rvm/"
else
rvm_type.to_s.empty? ? "$HOME/.rvm" : rvm_type.to_s
end
end
# Let users override the rvm_bin_path
_cset(:rvm_bin_path) do
case rvm_type
when :root, :system
"/usr/local/rvm/bin"
when :local, :user, :default
"$HOME/.rvm/bin"
else
rvm_type.to_s.empty? ? "#{rvm_path}/bin" : rvm_type.to_s
end
end
set :rvm_ruby_string_evaluated do
value = fetch(:rvm_ruby_string, :default)
if value.to_sym == :local
value = ENV['GEM_HOME'].gsub(/.*\//,"")
end
value.to_s
end
# Let users configure a path to export/import gemsets
_cset(:rvm_gemset_path, "#{rvm_path}/gemsets")
# Use the default ruby on the server, by default :)
_cset(:rvm_ruby_string, :default)
# Default sudo state
_cset(:rvm_install_with_sudo, false)
# Let users set the install type and shell of their choice.
_cset(:rvm_install_type, :stable)
_cset(:rvm_install_shell, :bash)
# Let users set the (re)install for ruby.
_cset(:rvm_install_ruby, :install)
_cset(:rvm_install_ruby_threads, "$(cat /proc/cpuinfo 2>/dev/null | (grep vendor_id || echo 'vendor_id : Other';) | wc -l)")
# Pass no special params to the ruby build by default
_cset(:rvm_install_ruby_params, '')
# Additional rvm packages to install.
_cset(:rvm_install_pkgs, [])
# By default system installations add deploying user to rvm group. also try :all
_cset(:rvm_add_to_group, fetch(:user,"$USER"))
namespace :rvm do
def run_silent_curl(command)
run <<-EOF.gsub(/[\s\n]+/, ' '), :shell => "#{rvm_install_shell}"
__LAST_STATUS=0;
export CURL_HOME="${TMPDIR:-${HOME}}/.rvm-curl-config.$$";
mkdir ${CURL_HOME}/;
{
[[ -r ${HOME}/.curlrc ]] && cat ${HOME}/.curlrc;
echo "silent";
echo "show-error";
} > $CURL_HOME/.curlrc;
#{command} || __LAST_STATUS=$?;
rm -rf $CURL_HOME;
exit ${__LAST_STATUS}
EOF
end
def with_rvm_group(command)
case rvm_type
when :root, :system
"#{sudo} sg rvm -c \"#{command}\""
else
command
end
end
def rvm_task(name,&block)
if fetch(:rvm_require_role,nil).nil?
task name, &block
else
task name, :roles => fetch(:rvm_require_role), &block
end
end
desc <<-EOF
Install RVM of the given choice to the server.
By default RVM "stable" is installed, change with:
set :rvm_install_type, :head
By default BASH is used for installer, change with:
set :rvm_install_shell, :zsh
EOF
rvm_task :install_rvm do
command_fetch = "curl -L get.rvm.io"
command_install = case rvm_type
when :root, :system
if fetch(:use_sudo, true) == false && rvm_install_with_sudo == false
raise "
:use_sudo is set to 'false' but sudo is needed to install rvm_type: #{rvm_type}.
You can enable use_sudo within rvm for use only by this install operation by adding to deploy.rb: set :rvm_install_with_sudo, true
"
else
"#{sudo} "
end
else
''
end
command_install << "#{rvm_install_shell} -s #{rvm_install_type} --path #{rvm_path}"
case rvm_type
when :root, :system
command_install << " --add-to-rvm-group #{[rvm_add_to_group].flatten.map(&:to_s).join(",")}"
end
run_silent_curl "#{command_fetch} | #{command_install}"
end
desc <<-EOF
Install RVM ruby to the server, create gemset if needed.
By default ruby is installed, you can reinstall with:
set :rvm_install_ruby, :reinstall
By default ruby is compiled using all CPU cores, change with:
set :rvm_install_ruby_threads, :reinstall
By default BASH is used for installer, change with:
set :rvm_install_shell, :zsh
EOF
rvm_task :install_ruby do
ruby, gemset = fetch(:rvm_ruby_string_evaluated).to_s.strip.split /@/
if %w( release_path default ).include? "#{ruby}"
raise "
ruby can not be installed when using :rvm_ruby_string => :#{ruby}
"
else
command_install = ""
autolibs_flag = fetch(:rvm_autolibs_flag, 2).to_s
autolibs_flag_no_requirements = %w(
0 disable disabled
1 read read-only
2 fail read-fail
).include?( autolibs_flag )
if autolibs_flag_no_requirements
command_install << with_rvm_group("#{File.join(rvm_bin_path, "rvm")} --autolibs=#{autolibs_flag} #{rvm_install_ruby} #{ruby} -j #{rvm_install_ruby_threads} #{rvm_install_ruby_params}")
else
if fetch(:use_sudo, true) == false && rvm_install_with_sudo == false
raise "
:use_sudo is set to 'false' but sudo is needed to install requirements with autolibs '#{autolibs_flag}'.
You can enable use_sudo within rvm for use only by this ruby install operation by adding to deploy.rb: set :rvm_install_with_sudo, true
"
else
command_install << "#{sudo} #{File.join(rvm_bin_path, "rvm")} --autolibs=#{autolibs_flag} requirements #{ruby}"
command_install << "; "
command_install << with_rvm_group("#{File.join(rvm_bin_path, "rvm")} --autolibs=1 #{rvm_install_ruby} #{ruby} -j #{rvm_install_ruby_threads} #{rvm_install_ruby_params}")
end
end
if gemset
command_install << "; "
command_install << with_rvm_group("#{File.join(rvm_bin_path, "rvm")} #{ruby} do rvm gemset create #{gemset}")
end
run_silent_curl command_install
end
end
desc <<-EOF
Install RVM packages to the server.
This must come before the 'rvm:install_ruby' task is called.
The package list is empty by default. Specifiy the packages to install with:
set :rvm_install_pkgs, %w[libyaml curl]
Full list of packages available at https://rvm.io/packages/ or by running 'rvm pkg'.
EOF
rvm_task :install_pkgs do
rvm_install_pkgs.each do |pkg|
run "#{File.join(rvm_bin_path, "rvm")} pkg install #{pkg}", :shell => "#{rvm_install_shell}"
end
end
desc "Create gemset"
rvm_task :create_gemset do
ruby, gemset = fetch(:rvm_ruby_string_evaluated).to_s.strip.split /@/
if %w( release_path default ).include? "#{ruby}"
raise "
gemset can not be created when using :rvm_ruby_string => :#{ruby}
"
else
if gemset
run with_rvm_group("#{File.join(rvm_bin_path, "rvm")} #{ruby} do rvm gemset create #{gemset}"), :shell => "#{rvm_install_shell}"
end
end
end
desc <<-EOF
Import file contents to the current RVM ruby gemset.
The gemset filename must match :rvm_ruby_string.gems and be located in :rvm_gemset_path.
:rvm_gemset_path defaults to :rvm_path/gemsets
The gemset can be created with 'cap rvm:gemset_export'.
EOF
rvm_task :import_gemset do
ruby, gemset = fetch(:rvm_ruby_string_evaluated).to_s.strip.split /@/
if %w( release_path default ).include? "#{ruby}"
raise "gemset can not be imported when using :rvm_ruby_string => :#{ruby}"
else
if gemset
run "#{File.join(rvm_bin_path, "rvm-shell")} #{fetch(:rvm_ruby_string_evaluated)} rvm gemset import #{File.join(rvm_gemset_path, "#{fetch(:rvm_ruby_string_evaluated)}.gems")}", :shell => "#{rvm_install_shell}"
end
end
end
desc <<-EOF
Export the current RVM ruby gemset contents to a file.
The gemset filename will match :rvm_ruby_string.gems and be located in :rvm_gemset_path.
:rvm_gemset_path defaults to :rvm_path/gemsets
The gemset can be imported with 'cap rvm:gemset_import'.
EOF
rvm_task :export_gemset do
ruby, gemset = fetch(:rvm_ruby_string_evaluated).to_s.strip.split /@/
if %w( release_path default ).include? "#{ruby}"
raise "gemset can not be imported when using :rvm_ruby_string => :#{ruby}"
else
if gemset
run "#{File.join(rvm_bin_path, "rvm-shell")} #{fetch(:rvm_ruby_string_evaluated)} rvm gemset export > #{File.join(rvm_gemset_path, "#{fetch(:rvm_ruby_string_evaluated)}.gems")}", :shell => "#{rvm_install_shell}"
end
end
end
desc "Install a gem, 'cap rvm:install_gem GEM=my_gem'."
rvm_task :install_gem do
run "#{File.join(rvm_bin_path, "rvm")} #{fetch(:rvm_ruby_string_evaluated)} do gem install #{ENV['GEM']}", :shell => "#{rvm_install_shell}"
end
desc "Uninstall a gem, 'cap rvm:uninstall_gem GEM=my_gem'."
rvm_task :uninstall_gem do
run "#{File.join(rvm_bin_path, "rvm")} #{fetch(:rvm_ruby_string_evaluated)} do gem uninstall --no-executables #{ENV['GEM']}", :shell => "#{rvm_install_shell}"
end
end
end if Capistrano.const_defined? :Configuration and Capistrano::Configuration.methods.map(&:to_sym).include? :instance
end
# E.g, to use ree and rails 3:
#
# require 'rvm/capistrano'
# set :rvm_ruby_string, "ree@rails3"
#
|
module Rworkflow
class Flow
STATE_SUCCESSFUL = :successful
STATE_FAILED = :failed
STATES_TERMINAL = [STATE_FAILED, STATE_SUCCESSFUL].freeze
STATES_FAILED = [STATE_FAILED].freeze
REDIS_NS = 'flow'.freeze
WORKFLOW_REGISTRY = "#{REDIS_NS}:__registry".freeze
attr_accessor :id
attr_reader :lifecycle
def initialize(id)
@id = id
@redis_key = "#{REDIS_NS}:#{id}"
@storage = RedisRds::Hash.new(@redis_key)
@flow_data = RedisRds::Hash.new("#{@redis_key}__data")
@processing = RedisRds::Hash.new("#{@redis_key}__processing")
load_lifecycle
end
def load_lifecycle
serialized = @storage.get(:lifecycle)
unless serialized.nil?
raw = self.class.serializer.load(serialized)
@lifecycle = Rworkflow::Lifecycle.unserialize(raw) unless raw.nil?
end
rescue
@lifecycle = nil
end
private :load_lifecycle
def lifecycle=(new_lifecycle)
@lifecycle = new_lifecycle
@storage.set(:lifecycle, self.class.serializer.dump(@lifecycle.serialize))
end
def finished?
return false unless started?
total = counters.reduce(0) do |sum, pair|
self.class.terminal?(pair[0]) ? sum : (sum + pair[1].to_i)
end
return total == 0
end
def status
status = 'Running'
status = successful? ? 'Finished' : 'Failed' if finished?
return status
end
def created_at
return @created_at ||= begin Time.zone.at(get(:created_at, 0)) end
end
def started?
return !get(:start_time).nil?
end
def name
return get(:name, @id)
end
def name=(name)
return set(:name, name)
end
def start_time
return Time.zone.at(get(:start_time, 0))
end
def finish_time
return Time.zone.at(get(:finish_time, 0))
end
def expected_duration
return Float::INFINITY
end
def valid?
return !@lifecycle.nil?
end
def count(state)
return get_list(state).size
end
def counters
counters = @storage.get(:counters)
if !counters.nil?
counters = begin
self.class.serializer.load(counters)
rescue => e
Rails.logger.error("Error loading stored flow counters: #{e.message}")
nil
end
end
return counters || counters!
end
# fetches counters atomically
def counters!
counters = { processing: 0 }
names = @lifecycle.states.keys
results = RedisRds::Object.connection.multi do
self.class::STATES_TERMINAL.each { |name| get_list(name).size }
names.each { |name| get_list(name).size }
@processing.getall
end
(self.class::STATES_TERMINAL + names).each do |name|
counters[name] = results.shift.to_i
end
counters[:processing] = results.shift.reduce(0) { |sum, pair| sum + pair.last.to_i }
return counters
end
private :counters!
def fetch(fetcher_id, state_name)
@processing.set(fetcher_id, 1)
list = get_state_list(state_name)
unless list.nil?
failed = []
cardinality = @lifecycle.states[state_name].cardinality
cardinality = get(:start_count).to_i if cardinality == Lifecycle::CARDINALITY_ALL_STARTED
force_list_complete = @lifecycle.states[state_name].policy == State::STATE_POLICY_WAIT
raw_objects = list.lpop(cardinality, force_list_complete)
unless raw_objects.empty?
objects = raw_objects.map do |raw_object|
begin
self.class.serializer.load(raw_object)
rescue StandardError => _
failed << raw_object
nil
end
end.compact
@processing.set(fetcher_id, objects.size)
unless failed.empty?
push(failed, STATE_FAILED)
Rails.logger.error("Failed to parse #{failed.size} in workflow #{@id} for fetcher id #{fetcher_id} at state #{state_name}")
end
yield(objects) if block_given?
end
end
ensure
@processing.remove(fetcher_id)
terminate if finished?
end
def list_objects(state_name, limit = -1)
list = get_list(state_name)
return list.get(0, limit).map {|object| self.class.serializer.load(object)}
end
def get_state_list(state_name)
list = nil
state = @lifecycle.states[state_name]
if !state.nil?
list = get_list(state_name)
else
Rails.logger.error("Tried accessing invalid state #{state_name} for workflow #{id}")
end
return list
end
private :get_state_list
def terminate
mutex = RedisRds::Mutex.new(self.id)
mutex.synchronize do
if !self.cleaned_up?
set(:finish_time, Time.now.to_i)
post_process
if self.public?
counters = counters!
counters[:processing] = 0 # Some worker might have increased the processing flag at that time even if there is no more jobs to be done
@storage.setnx(:counters, self.class.serializer.dump(counters))
states_cleanup
else
self.cleanup
end
end
end
end
def post_process
end
protected :post_process
def metadata_string
return "Rworkflow: #{self.name}"
end
def cleaned_up?
return states_list.all? { |name| !get_list(name).exists? }
end
def states_list
states = self.class::STATES_TERMINAL
states += @lifecycle.states.keys if valid?
return states
end
def transition(from_state, name, objects)
objects = Array.wrap(objects)
to_state = begin
lifecycle.transition(from_state, name)
rescue Rworkflow::StateError => e
Rails.logger.error("Error transitioning: #{e}")
nil
end
if !to_state.nil?
push(objects, to_state)
log(from_state, name, objects.size)
end
end
def logging?
return get(:logging, false)
end
def log(from_state, transition, num_objects)
logger.incrby("#{from_state}__#{transition}", num_objects.to_i) if logging?
end
def logger
return @logger ||= begin
RedisRds::Hash.new("#{@redis_key}__logger")
end
end
def logs
logs = {}
if valid? && logging?
state_transition_counters = logger.getall
state_transition_counters.each do |state_transition, counter|
state, transition = state_transition.split('__')
logs[state] = {} unless logs.key?(state)
logs[state][transition] = counter.to_i
end
end
return logs
end
def get_state_cardinality(state_name)
cardinality = @lifecycle.states[state_name].cardinality
cardinality = self.get(:start_count).to_i if cardinality == Rworkflow::Lifecycle::CARDINALITY_ALL_STARTED
return cardinality
end
def set(key, value)
@flow_data.set(key, self.class.serializer.dump(value))
end
def get(key, default = nil)
value = @flow_data.get(key)
value = if value.nil? then default else self.class.serializer.load(value) end
return value
end
def incr(key, value = 1)
return @flow_data.incrby(key, value)
end
def push(objects, state)
objects = Array.wrap(objects)
return 0 if objects.empty?
list = get_list(state)
list.rpush(objects.map { |object| self.class.serializer.dump(object) })
return objects.size
end
private :push
def get_list(name)
return RedisRds::List.new("#{@redis_key}:lists:#{name}")
end
private :get_list
def cleanup
return if Rails.env.test?
states_cleanup
logger.delete
@processing.delete
self.class.unregister(self)
@flow_data.delete
@storage.delete
end
def states_cleanup
return if Rails.env.test?
states_list.each { |name| get_list(name).delete }
end
protected :states_cleanup
def start(objects)
objects = Array.wrap(objects)
set(:start_time, Time.now.to_i)
set(:start_count, objects.size)
push(objects, lifecycle.initial)
log(lifecycle.initial, 'initial', objects.size)
end
def total_objects_processed(counters = nil)
return (counters || counters).reduce(0) do |sum, pair|
if self.class.terminal?(pair[0])
sum + pair[1]
else
sum
end
end
end
def total_objects(counters = nil)
return (counters || counters).reduce(0) { |sum, pair| sum + pair[1] }
end
def total_objects_failed(counters = nil)
return (counters || counters).reduce(0) do |sum, pair|
if self.class.failure?(pair[0])
sum + pair[1]
else
sum
end
end
end
def successful?
return false if !finished?
return !failed?
end
def failed?
return false if !finished?
return total_objects_failed > 0
end
def public?
return @public ||= begin get(:public, false) end
end
class << self
def create(lifecycle, name = '', options = {})
id = generate_id(name)
workflow = new(id)
workflow.name = name
workflow.lifecycle = lifecycle
workflow.set(:created_at, Time.now.to_i)
workflow.set(:public, options.fetch(:public, false))
workflow.set(:logging, options.fetch(:logging, true))
register(workflow)
return workflow
end
def generate_id(workflow_name)
now = Time.now.to_f
random = Random.new(now)
return "#{name}__#{workflow_name}__#{(Time.now.to_f * 1000).to_i}__#{random.rand(now).to_i}"
end
private :generate_id
def cleanup(id)
workflow = new(id)
workflow.cleanup
end
def get_public_workflows(options = {})
return registry.public_flows(options.reverse_merge(parent_class: self)).map { |id| load(id) }
end
def get_private_workflows(options = {})
return registry.private_flows(options.reverse_merge(parent_class: self)).map { |id| load(id) }
end
def all(options = {})
return registry.all(options.reverse_merge(parent_class: self)).map { |id| load(id) }
end
def load(id, klass = nil)
workflow = nil
klass = read_flow_class(id) if klass.nil?
workflow = klass.new(id) if klass.respond_to?(:new)
return workflow
end
def read_flow_class(id)
klass = nil
raw_class = id.split('__').first
klass = begin
raw_class.constantize
rescue NameError => _
Rails.logger.warn("Unknown flow class for workflow id #{id}")
nil
end if !raw_class.nil?
return klass
end
def registered?(workflow)
return registry.include?(workflow)
end
def register(workflow)
registry.add(workflow)
end
def unregister(workflow)
registry.remove(workflow)
end
def terminal?(state)
return self::STATES_TERMINAL.include?(state)
end
def failure?(state)
return self::STATES_FAILED.include?(state)
end
def registry
return @registry ||= begin
FlowRegistry.new(Rworkflow::VERSION.to_s)
end
end
def serializer
YAML
end
end
end
end
lint lib/rworkflow/flow.rb
module Rworkflow
class Flow
STATE_SUCCESSFUL = :successful
STATE_FAILED = :failed
STATES_TERMINAL = [STATE_FAILED, STATE_SUCCESSFUL].freeze
STATES_FAILED = [STATE_FAILED].freeze
REDIS_NS = 'flow'.freeze
WORKFLOW_REGISTRY = "#{REDIS_NS}:__registry".freeze
attr_accessor :id
attr_reader :lifecycle
def initialize(id)
@id = id
@redis_key = "#{REDIS_NS}:#{id}"
@storage = RedisRds::Hash.new(@redis_key)
@flow_data = RedisRds::Hash.new("#{@redis_key}__data")
@processing = RedisRds::Hash.new("#{@redis_key}__processing")
load_lifecycle
end
def load_lifecycle
serialized = @storage.get(:lifecycle)
unless serialized.nil?
raw = self.class.serializer.load(serialized)
@lifecycle = Rworkflow::Lifecycle.unserialize(raw) unless raw.nil?
end
rescue
@lifecycle = nil
end
private :load_lifecycle
def lifecycle=(new_lifecycle)
@lifecycle = new_lifecycle
@storage.set(:lifecycle, self.class.serializer.dump(@lifecycle.serialize))
end
def finished?
return false unless started?
total = counters.reduce(0) do |sum, pair|
self.class.terminal?(pair[0]) ? sum : (sum + pair[1].to_i)
end
return total == 0
end
def status
status = 'Running'
status = successful? ? 'Finished' : 'Failed' if finished?
return status
end
def created_at
return @created_at ||= begin Time.zone.at(get(:created_at, 0)) end
end
def started?
return !get(:start_time).nil?
end
def name
return get(:name, @id)
end
def name=(name)
return set(:name, name)
end
def start_time
return Time.zone.at(get(:start_time, 0))
end
def finish_time
return Time.zone.at(get(:finish_time, 0))
end
def expected_duration
return Float::INFINITY
end
def valid?
return !@lifecycle.nil?
end
def count(state)
return get_list(state).size
end
def counters
counters = @storage.get(:counters)
if !counters.nil?
counters = begin
self.class.serializer.load(counters)
rescue => e
Rails.logger.error("Error loading stored flow counters: #{e.message}")
nil
end
end
return counters || counters!
end
# fetches counters atomically
def counters!
counters = { processing: 0 }
names = @lifecycle.states.keys
results = RedisRds::Object.connection.multi do
self.class::STATES_TERMINAL.each { |name| get_list(name).size }
names.each { |name| get_list(name).size }
@processing.getall
end
(self.class::STATES_TERMINAL + names).each do |name|
counters[name] = results.shift.to_i
end
counters[:processing] = results.shift.reduce(0) { |sum, pair| sum + pair.last.to_i }
return counters
end
private :counters!
def fetch(fetcher_id, state_name)
@processing.set(fetcher_id, 1)
list = get_state_list(state_name)
unless list.nil?
failed = []
cardinality = @lifecycle.states[state_name].cardinality
cardinality = get(:start_count).to_i if cardinality == Lifecycle::CARDINALITY_ALL_STARTED
force_list_complete = @lifecycle.states[state_name].policy == State::STATE_POLICY_WAIT
raw_objects = list.lpop(cardinality, force_list_complete)
unless raw_objects.empty?
objects = raw_objects.map do |raw_object|
begin
self.class.serializer.load(raw_object)
rescue StandardError => _
failed << raw_object
nil
end
end.compact
@processing.set(fetcher_id, objects.size)
unless failed.empty?
push(failed, STATE_FAILED)
Rails.logger.error("Failed to parse #{failed.size} in workflow #{@id} for fetcher id #{fetcher_id} at state #{state_name}")
end
yield(objects) if block_given?
end
end
ensure
@processing.remove(fetcher_id)
terminate if finished?
end
def list_objects(state_name, limit = -1)
list = get_list(state_name)
return list.get(0, limit).map { |object| self.class.serializer.load(object) }
end
def get_state_list(state_name)
list = nil
state = @lifecycle.states[state_name]
if !state.nil?
list = get_list(state_name)
else
Rails.logger.error("Tried accessing invalid state #{state_name} for workflow #{id}")
end
return list
end
private :get_state_list
def terminate
mutex = RedisRds::Mutex.new(self.id)
mutex.synchronize do
if !self.cleaned_up?
set(:finish_time, Time.now.to_i)
post_process
if self.public?
counters = counters!
counters[:processing] = 0 # Some worker might have increased the processing flag at that time even if there is no more jobs to be done
@storage.setnx(:counters, self.class.serializer.dump(counters))
states_cleanup
else
self.cleanup
end
end
end
end
def post_process; end
protected :post_process
def metadata_string
return "Rworkflow: #{self.name}"
end
def cleaned_up?
return states_list.all? { |name| !get_list(name).exists? }
end
def states_list
states = self.class::STATES_TERMINAL
states += @lifecycle.states.keys if valid?
return states
end
def transition(from_state, name, objects)
objects = Array.wrap(objects)
to_state = begin
lifecycle.transition(from_state, name)
rescue Rworkflow::StateError => e
Rails.logger.error("Error transitioning: #{e}")
nil
end
if !to_state.nil?
push(objects, to_state)
log(from_state, name, objects.size)
end
end
def logging?
return get(:logging, false)
end
def log(from_state, transition, num_objects)
logger.incrby("#{from_state}__#{transition}", num_objects.to_i) if logging?
end
def logger
return @logger ||= begin
RedisRds::Hash.new("#{@redis_key}__logger")
end
end
def logs
logs = {}
if valid? && logging?
state_transition_counters = logger.getall
state_transition_counters.each do |state_transition, counter|
state, transition = state_transition.split('__')
logs[state] = {} unless logs.key?(state)
logs[state][transition] = counter.to_i
end
end
return logs
end
def get_state_cardinality(state_name)
cardinality = @lifecycle.states[state_name].cardinality
cardinality = self.get(:start_count).to_i if cardinality == Rworkflow::Lifecycle::CARDINALITY_ALL_STARTED
return cardinality
end
def set(key, value)
@flow_data.set(key, self.class.serializer.dump(value))
end
def get(key, default = nil)
value = @flow_data.get(key)
value = value.nil? ? default : self.class.serializer.load(value)
return value
end
def incr(key, value = 1)
return @flow_data.incrby(key, value)
end
def push(objects, state)
objects = Array.wrap(objects)
return 0 if objects.empty?
list = get_list(state)
list.rpush(objects.map { |object| self.class.serializer.dump(object) })
return objects.size
end
private :push
def get_list(name)
return RedisRds::List.new("#{@redis_key}:lists:#{name}")
end
private :get_list
def cleanup
return if Rails.env.test?
states_cleanup
logger.delete
@processing.delete
self.class.unregister(self)
@flow_data.delete
@storage.delete
end
def states_cleanup
return if Rails.env.test?
states_list.each { |name| get_list(name).delete }
end
protected :states_cleanup
def start(objects)
objects = Array.wrap(objects)
set(:start_time, Time.now.to_i)
set(:start_count, objects.size)
push(objects, lifecycle.initial)
log(lifecycle.initial, 'initial', objects.size)
end
def total_objects_processed(counters = nil)
return (counters || counters).reduce(0) do |sum, pair|
if self.class.terminal?(pair[0])
sum + pair[1]
else
sum
end
end
end
def total_objects(counters = nil)
return (counters || counters).reduce(0) { |sum, pair| sum + pair[1] }
end
def total_objects_failed(counters = nil)
return (counters || counters).reduce(0) do |sum, pair|
if self.class.failure?(pair[0])
sum + pair[1]
else
sum
end
end
end
def successful?
return false if !finished?
return !failed?
end
def failed?
return false if !finished?
return total_objects_failed > 0
end
def public?
return @public ||= begin get(:public, false) end
end
class << self
def create(lifecycle, name = '', options = {})
id = generate_id(name)
workflow = new(id)
workflow.name = name
workflow.lifecycle = lifecycle
workflow.set(:created_at, Time.now.to_i)
workflow.set(:public, options.fetch(:public, false))
workflow.set(:logging, options.fetch(:logging, true))
register(workflow)
return workflow
end
def generate_id(workflow_name)
now = Time.now.to_f
random = Random.new(now)
return "#{name}__#{workflow_name}__#{(Time.now.to_f * 1000).to_i}__#{random.rand(now).to_i}"
end
private :generate_id
def cleanup(id)
workflow = new(id)
workflow.cleanup
end
def get_public_workflows(options = {})
return registry.public_flows(options.reverse_merge(parent_class: self)).map { |id| load(id) }
end
def get_private_workflows(options = {})
return registry.private_flows(options.reverse_merge(parent_class: self)).map { |id| load(id) }
end
def all(options = {})
return registry.all(options.reverse_merge(parent_class: self)).map { |id| load(id) }
end
def load(id, klass = nil)
workflow = nil
klass = read_flow_class(id) if klass.nil?
workflow = klass.new(id) if klass.respond_to?(:new)
return workflow
end
def read_flow_class(id)
klass = nil
raw_class = id.split('__').first
if !raw_class.nil?
klass = begin
raw_class.constantize
rescue NameError => _
Rails.logger.warn("Unknown flow class for workflow id #{id}")
nil
end
end
return klass
end
def registered?(workflow)
return registry.include?(workflow)
end
def register(workflow)
registry.add(workflow)
end
def unregister(workflow)
registry.remove(workflow)
end
def terminal?(state)
return self::STATES_TERMINAL.include?(state)
end
def failure?(state)
return self::STATES_FAILED.include?(state)
end
def registry
return @registry ||= begin
FlowRegistry.new(Rworkflow::VERSION.to_s)
end
end
def serializer
YAML
end
end
end
end
|
##
# Define version
module S3Repo
VERSION = '0.1.1'
end
bump version
##
# Define version
module S3Repo
VERSION = '0.1.2'
end
|
module Saddle
VERSION = '0.1.0.rc1'
end
Remove rc1 from the version.
module Saddle
VERSION = '0.1.0'
end
|
module SassSpec::Util
class << self
# Normalizes the whitespace in the given CSS to make it easier to compare
# across implementations.
def normalize_output(css)
css.gsub(/(?:\r?\n)+/, "\n")
end
# Normalizes the path names and whitespace in the given error message.
def normalize_error(error)
# TODO(nweiz): Delete path normalization when sass/libsass#2861 is fixed.
error.gsub(/(?:\/todo_|_todo\/)/, "/") # hide todo pre/suffix
.gsub(/\/libsass\-[a-z]+\-tests\//, "/") # hide test directory
.gsub(/\/libsass\-[a-z]+\-issues\//, "/libsass-issues/") # normalize issue specs
.gsub(/(([\w\/.\-\\:]+?[\/\\])|([\/\\]|(?!:\s)))spec[\/\\]+/, "/sass/spec/") # normalize abs paths
.sub(/(?:\r?\n)*\z/, "\n") # make sure we have exactly one trailing linefeed
.sub(/\A(?:\r?\s)+\z/, "") # clear the whole file if only whitespace
.gsub(/\r\n/, "\n") # remove Windows line feeds
end
# Yields each directory in `path`, from the outermost to the innermost.
def each_directory(path)
return to_enum(__method__, path) unless block_given?
path_so_far = nil
Pathname.new(path).each_filename do |dir|
if path_so_far.nil?
path_so_far = String.new(dir)
else
path_so_far << File::SEPARATOR << dir
end
yield path_so_far
end
end
end
end
Always compare outputs in UTF-8 mode
module SassSpec::Util
class << self
# Normalizes the whitespace in the given CSS to make it easier to compare
# across implementations.
def normalize_output(css)
css.gsub(/(?:\r?\n)+/, "\n").force_encoding("utf-8")
end
# Normalizes the path names and whitespace in the given error message.
def normalize_error(error)
# TODO(nweiz): Delete path normalization when sass/libsass#2861 is fixed.
error.gsub(/(?:\/todo_|_todo\/)/, "/") # hide todo pre/suffix
.gsub(/\/libsass\-[a-z]+\-tests\//, "/") # hide test directory
.gsub(/\/libsass\-[a-z]+\-issues\//, "/libsass-issues/") # normalize issue specs
.gsub(/(([\w\/.\-\\:]+?[\/\\])|([\/\\]|(?!:\s)))spec[\/\\]+/, "/sass/spec/") # normalize abs paths
.sub(/(?:\r?\n)*\z/, "\n") # make sure we have exactly one trailing linefeed
.sub(/\A(?:\r?\s)+\z/, "") # clear the whole file if only whitespace
.gsub(/\r\n/, "\n") # remove Windows line feeds
.force_encoding("utf-8")
end
# Yields each directory in `path`, from the outermost to the innermost.
def each_directory(path)
return to_enum(__method__, path) unless block_given?
path_so_far = nil
Pathname.new(path).each_filename do |dir|
if path_so_far.nil?
path_so_far = String.new(dir)
else
path_so_far << File::SEPARATOR << dir
end
yield path_so_far
end
end
end
end
|
module Schlep
VERSION = "0.1.0"
end
Bump version to 0.2.0
module Schlep
VERSION = "0.2.0"
end
|
require 'scm/util'
require 'pathname'
module SCM
class Repository
include Util
# The path of the repository
attr_reader :path
#
# Creates a new repository.
#
# @param [String] path
# The path to the repository.
#
def initialize(path)
@path = Pathname.new(File.expand_path(path))
end
#
# Creates a new repository.
#
# @param [String] path
# Path to the repository.
#
# @param [Hash] options
# Additional options.
#
# @return [Repository]
# The newly created repository.
#
# @abstract
#
def self.create(path,options={})
new(path)
end
#
# Clones a remote repository.
#
# @param [URI, String] uri
# The URI of the remote repository.
#
# @param [Hash] options
# Additional options.
#
# @return [Boolean]
# Specifies whether the clone was successful.
#
# @abstract
#
def self.clone(uri,options={})
false
end
#
# Queries the status of the files.
#
# @param [Array] paths
# The optional paths to query.
#
# @return [Hash{String => Symbol}]
# The file paths and their statuses.
#
# @abstract
#
def status(*paths)
{}
end
#
# Adds files or directories to the repository.
#
# @param [Array] paths
# The paths of the files/directories to add.
#
# @abstract
#
def add(*paths)
end
#
# Moves a file or directory.
#
# @param [String] source
# The path of the source file/directory.
#
# @param [String] dest
# The new destination path.
#
# @param [Boolean] force
# Specifies whether to force the move.
#
# @abstract
#
def move(source,dest,force=false)
end
#
# Removes files or directories.
#
# @param [String, Array] paths
# The path(s) to remove.
#
# @param [Hash] options
# Additional options.
#
# @option options [Boolean] :force (false)
# Specifies whether to forcibly remove the files/directories.
#
# @option options [Boolean] :recursive (false)
# Specifies whether to recursively remove the files/directories.
#
# @abstract
#
def remove(paths,options={})
end
#
# Makes a commit.
#
# @param [String] message
# The message for the commit.
#
# @param [Hash] options
# Commit options.
#
# @option options [String] :paths
# The path of the file to commit.
#
# @return [Boolean]
# Specifies whether the commit was successfully made.
#
# @abstract
#
def commit(message=nil,options={})
false
end
#
# Lists branches.
#
# @return [Array<String>]
# The branch names.
#
# @abstract
#
def branches
[]
end
#
# The current branch.
#
# @return [String]
# The name of the current branch.
#
# @abstract
#
def current_branch
end
#
# Swtiches to a branch.
#
# @param [String, Symbol] name
# The name of the branch to switch to.
#
# @return [Boolean]
# Specifies whether the branch was successfully switched.
#
# @abstract
#
def switch_branch(name)
false
end
#
# Deletes a branch.
#
# @param [String] name
# The name of the branch to delete.
#
# @return [Boolean]
# Specifies whether the branch was successfully deleted.
#
# @abstract
#
def delete_branch(name)
false
end
#
# Lists tags.
#
# @return [Array<String>]
# The tag names.
#
# @abstract
#
def tags
[]
end
#
# Tags a release.
#
# @param [String] name
# The name for the tag.
#
# @param [String] commit
# The specific commit to make the tag at.
#
# @return [Boolean]
# Specifies whether the tag was successfully created.
#
# @abstract
#
def tag(name,commit=nil)
false
end
#
# Deletes a tag.
#
# @param [String] name
# The name of the tag.
#
# @return [Boolean]
# Specifies whether the tag was successfully deleted.
#
# @abstract
#
def delete_tag(name)
false
end
#
# Prints a log of commits.
#
# @param [String] :commit
# Commit to begin the log at.
#
# @param [String] :paths
# File to list commits for.
#
# @abstract
#
def log(options={})
false
end
#
# Pushes changes to the remote repository.
#
# @param [Hash] options
# Additional options.
#
# @return [Boolean]
# Specifies whether the changes were successfully pushed.
#
# @abstract
#
def push(options={})
false
end
#
# Pulls changes from the remote repository.
#
# @param [Hash] options
# Additional options.
#
# @return [Boolean]
# Specifies whether the changes were successfully pulled.
#
# @abstract
#
def pull(options={})
false
end
#
# Lists commits.
#
# @param [Hash] options
# Additional options.
#
# @return [Enumerator<SCM::Commit>]
# The commits within the repository.
#
# @raise [NotImplementedError]
# If a subclass does not provide its own implementation.
#
# @abstract
#
def commits(options={})
raise(NotImplementedError,"This method is not implemented for #{self.class}")
end
#
# Converts the repository to a String.
#
# @return [String]
# The path of the repository.
#
def to_s
@path.to_s
end
protected
#
# Runs a command within the repository.
#
# @param [Symbol] command
# The command to run.
#
# @param [Array] arguments
# Additional arguments to pass to the command.
#
# @return [Boolean]
# Specifies whether the SVN command exited successfully.
#
def run(command,*arguments)
Dir.chdir(@path) { super(command,*arguments) }
end
#
# Runs a command as a separate process.
#
# @param [Symbol] command
# The command to run.
#
# @param [Array] arguments
# Additional arguments to pass to the command.
#
# @yield [line]
# The given block will be passed each line read-in.
#
# @yieldparam [String] line
# A line read from the program.
#
# @return [IO]
# The stdout of the command being ran.
#
def popen(command,*arguments)
Dir.chdir(@path) { super(command,*arguments) }
end
end
end
Added Repository#inspect.
require 'scm/util'
require 'pathname'
module SCM
class Repository
include Util
# The path of the repository
attr_reader :path
#
# Creates a new repository.
#
# @param [String] path
# The path to the repository.
#
def initialize(path)
@path = Pathname.new(File.expand_path(path))
end
#
# Creates a new repository.
#
# @param [String] path
# Path to the repository.
#
# @param [Hash] options
# Additional options.
#
# @return [Repository]
# The newly created repository.
#
# @abstract
#
def self.create(path,options={})
new(path)
end
#
# Clones a remote repository.
#
# @param [URI, String] uri
# The URI of the remote repository.
#
# @param [Hash] options
# Additional options.
#
# @return [Boolean]
# Specifies whether the clone was successful.
#
# @abstract
#
def self.clone(uri,options={})
false
end
#
# Queries the status of the files.
#
# @param [Array] paths
# The optional paths to query.
#
# @return [Hash{String => Symbol}]
# The file paths and their statuses.
#
# @abstract
#
def status(*paths)
{}
end
#
# Adds files or directories to the repository.
#
# @param [Array] paths
# The paths of the files/directories to add.
#
# @abstract
#
def add(*paths)
end
#
# Moves a file or directory.
#
# @param [String] source
# The path of the source file/directory.
#
# @param [String] dest
# The new destination path.
#
# @param [Boolean] force
# Specifies whether to force the move.
#
# @abstract
#
def move(source,dest,force=false)
end
#
# Removes files or directories.
#
# @param [String, Array] paths
# The path(s) to remove.
#
# @param [Hash] options
# Additional options.
#
# @option options [Boolean] :force (false)
# Specifies whether to forcibly remove the files/directories.
#
# @option options [Boolean] :recursive (false)
# Specifies whether to recursively remove the files/directories.
#
# @abstract
#
def remove(paths,options={})
end
#
# Makes a commit.
#
# @param [String] message
# The message for the commit.
#
# @param [Hash] options
# Commit options.
#
# @option options [String] :paths
# The path of the file to commit.
#
# @return [Boolean]
# Specifies whether the commit was successfully made.
#
# @abstract
#
def commit(message=nil,options={})
false
end
#
# Lists branches.
#
# @return [Array<String>]
# The branch names.
#
# @abstract
#
def branches
[]
end
#
# The current branch.
#
# @return [String]
# The name of the current branch.
#
# @abstract
#
def current_branch
end
#
# Swtiches to a branch.
#
# @param [String, Symbol] name
# The name of the branch to switch to.
#
# @return [Boolean]
# Specifies whether the branch was successfully switched.
#
# @abstract
#
def switch_branch(name)
false
end
#
# Deletes a branch.
#
# @param [String] name
# The name of the branch to delete.
#
# @return [Boolean]
# Specifies whether the branch was successfully deleted.
#
# @abstract
#
def delete_branch(name)
false
end
#
# Lists tags.
#
# @return [Array<String>]
# The tag names.
#
# @abstract
#
def tags
[]
end
#
# Tags a release.
#
# @param [String] name
# The name for the tag.
#
# @param [String] commit
# The specific commit to make the tag at.
#
# @return [Boolean]
# Specifies whether the tag was successfully created.
#
# @abstract
#
def tag(name,commit=nil)
false
end
#
# Deletes a tag.
#
# @param [String] name
# The name of the tag.
#
# @return [Boolean]
# Specifies whether the tag was successfully deleted.
#
# @abstract
#
def delete_tag(name)
false
end
#
# Prints a log of commits.
#
# @param [String] :commit
# Commit to begin the log at.
#
# @param [String] :paths
# File to list commits for.
#
# @abstract
#
def log(options={})
false
end
#
# Pushes changes to the remote repository.
#
# @param [Hash] options
# Additional options.
#
# @return [Boolean]
# Specifies whether the changes were successfully pushed.
#
# @abstract
#
def push(options={})
false
end
#
# Pulls changes from the remote repository.
#
# @param [Hash] options
# Additional options.
#
# @return [Boolean]
# Specifies whether the changes were successfully pulled.
#
# @abstract
#
def pull(options={})
false
end
#
# Lists commits.
#
# @param [Hash] options
# Additional options.
#
# @return [Enumerator<SCM::Commit>]
# The commits within the repository.
#
# @raise [NotImplementedError]
# If a subclass does not provide its own implementation.
#
# @abstract
#
def commits(options={})
raise(NotImplementedError,"This method is not implemented for #{self.class}")
end
#
# Converts the repository to a String.
#
# @return [String]
# The path of the repository.
#
def to_s
@path.to_s
end
#
# Inspects the Repository.
#
# @return [String]
# The repository class name and path.
#
def inspect
"#<#{self.class}: #{@path}>"
end
protected
#
# Runs a command within the repository.
#
# @param [Symbol] command
# The command to run.
#
# @param [Array] arguments
# Additional arguments to pass to the command.
#
# @return [Boolean]
# Specifies whether the SVN command exited successfully.
#
def run(command,*arguments)
Dir.chdir(@path) { super(command,*arguments) }
end
#
# Runs a command as a separate process.
#
# @param [Symbol] command
# The command to run.
#
# @param [Array] arguments
# Additional arguments to pass to the command.
#
# @yield [line]
# The given block will be passed each line read-in.
#
# @yieldparam [String] line
# A line read from the program.
#
# @return [IO]
# The stdout of the command being ran.
#
def popen(command,*arguments)
Dir.chdir(@path) { super(command,*arguments) }
end
end
end
|
module Scrape
class Maneger
attr_reader :agent, :url
def initialize(current_user)
@agent = Mechanize.new
@base = 'http://beatmania-clearlamp.com/'
@url = []
@current_user = current_user
search
end
def sync
go
end
private
def maneger_register(title, state)
return false unless Sheet.exists?(title: title)
sheet_id = Sheet.find_by(title: title).id
score = @current_user.scores.find_by(sheet_id: sheet_id)
return false if score.state <= state
score.update_with_logs({ 'sheet_id' => sheet_id, 'state' => state }, nil, nil)
true
end
def go(url = @url)
# @urlがなければ収集終了
return false if url.count == 0
# 配列の数だけ収集
url.each { |u| extract(u) }
true
end
def folder_specific(html)
data = nil
html.xpath('//div[@class="list"]').each do |node|
cnt = 0
node.xpath('dl/dd[@class="level l12"]').each { |_| cnt += 1 }
data = node if 150 < cnt
end
data
end
def data_shaping(data)
data = data.to_s.split('</table>')
elems = nil
data.each do |d|
next unless d.index('level l12')
elems = d.split('</dl>')
end
elems
end
def extract(url)
html = Nokogiri::HTML.parse(@agent.get(@base + url).body, nil, 'UTF-8')
# Level12フォルダの特定
data = folder_specific(html)
return false unless data
# HTMLを整形
elems = data_shaping(data)
return false unless elems
# HTMLから曲名と状態を抽出し,登録する
elems.each do |elem|
break if elem.index('</div>')
preparation_register(elem)
end
true
end
def preparation_register(elem)
state = value(elem.split('<dt class="')[1].split('">')[0])
title = title_check(elem.split('<dd class="musicName">')[1].split('</dd>')[0].strip)
title = gigadelic_innocentwalls(title, elem)
maneger_register(title, state.to_i)
end
def gigadelic_innocentwalls(title, e)
return title if title != 'gigadelic' && title != 'Innocent Walls'
if e.split('<dl class="')[1].split('">')[0].index('hyper')
title += '[H]'
else
title += '[A]'
end
title
end
# クリアランプマネージャとの表記ゆれに対応
def title_check(e)
e.gsub!('&', '&')
return e if Sheet.exists?(title: e)
case e
when %(ピアノ協奏曲第1番"蠍火") then e = %(ピアノ協奏曲第1番”蠍火”)
when %(キャトられ 恋はモ~モク) then e = %(キャトられ恋はモ~モク)
when %(†渚の小悪魔ラヴリィ〜レイディオ†(IIDX EDIT)) then e = %(†渚の小悪魔ラヴリィ~レイディオ†(IIDX EDIT))
when %(疾風迅雷 †LEGGENDARIA) then e = %(疾風迅雷†LEGGENDARIA)
when %(We're so Happy (P*Light Remix) IIDX Ver.) then e = %(We're so Happy (P*Light Remix) IIDX ver.)
when %(Verflucht †LEGGENDARIA) then e = %(Verflucht†LEGGENDARIA)
when %(Sigmund †LEGGENDARIA) then e = %(Sigmund†LEGGENDARIA)
when %(invoker †LEGGENDARIA) then e = %(invoker†LEGGENDARIA)
when %(Feel The Beat †LEGGENDARIA) then e = %(Feel The Beat†LEGGENDARIA)
when %(Close the World feat.a☆ru †LEGGENDARIA) then e = %(Close the World feat.a☆ru†LEGGENDARIA)
when %(Session 9-Chronicles-) then e = %(Session 9 -Chronicles-)
when %(恋する☆宇宙戦争っ!!) then e = %(恋する☆宇宙戦争っ!!)
when %(ワルツ第17番 ト短調"大犬のワルツ") then e = %(ワルツ第17番 ト短調”大犬のワルツ”)
when %(Ancient Scapes †LEGGENDARIA) then e = %(Ancient Scapes†LEGGENDARIA)
when %(Scripted Connection⇒A mix) then e = %(Scripted Connection⇒ A mix)
when %(Colors(radio edit)) then e = %(Colors (radio edit))
when %(EΛΠIΣ) then e = %(ΕΛΠΙΣ)
when %(Timepiece phase II(CN Ver.)) then e = %(Timepiece phase II (CN Ver.))
when %(Hollywood Galaxy (DJ NAGAI Remix)) then e = %(Hollywood Galaxy(DJ NAGAI Remix))
when %(表裏一体!?怪盗いいんちょの悩み♥) then e = %(表裏一体!?怪盗いいんちょの悩み)
end
e
end
def value(e)
hash = { 'FC' => 0, 'EX' => 1, 'H' => 2, 'C' => 3, 'E' => 4, 'A' => 5, 'F' => 6, 'NO' => 7 }
hash[e]
end
def search(current_user = @current_user)
# ユーザの探索
search_user(current_user)
# そのユーザのページのURLを配列に格納
html = Nokogiri::HTML.parse(@agent.page.body, nil, 'UTF-8')
html.xpath('//table/tbody/tr').each do |tr|
collect_user_page(tr)
end
end
def collect_user_page(tr)
cnt = 0
tmp = ''
tr.xpath('td').each do |td|
tmp = td.to_s.split('a href="/')[1].split('"')[0] if cnt == 1
tmp = '' if cnt == 6 && td.text != iidxid
cnt += 1
end
@url.push(tmp + 'sp/') unless tmp == ''
end
def search_user(current_user = @current_user)
@agent.get(@base + 'djdata/')
@agent.page.encoding = 'UTF-8'
form = @agent.page.forms[2]
iidxid = current_user.iidxid.delete('-')
form.searchWord = iidxid
@agent.submit(form)
end
end
end
変数の状態管理を修正
module Scrape
class Maneger
attr_reader :agent, :url
def initialize(current_user)
@agent = Mechanize.new
@base = 'http://beatmania-clearlamp.com/'
@url = []
@current_user = current_user
search
end
def sync
go
end
private
def maneger_register(title, state)
return false unless Sheet.exists?(title: title)
sheet_id = Sheet.find_by(title: title).id
score = @current_user.scores.find_by(sheet_id: sheet_id)
return false if score.state <= state
score.update_with_logs({ 'sheet_id' => sheet_id, 'state' => state }, nil, nil)
true
end
def go(url = @url)
# @urlがなければ収集終了
return false if url.count == 0
# 配列の数だけ収集
url.each { |u| extract(u) }
true
end
def folder_specific(html)
data = nil
html.xpath('//div[@class="list"]').each do |node|
cnt = 0
node.xpath('dl/dd[@class="level l12"]').each { |_| cnt += 1 }
data = node if 150 < cnt
end
data
end
def data_shaping(data)
data = data.to_s.split('</table>')
elems = nil
data.each do |d|
next unless d.index('level l12')
elems = d.split('</dl>')
end
elems
end
def extract(url)
html = Nokogiri::HTML.parse(@agent.get(@base + url).body, nil, 'UTF-8')
# Level12フォルダの特定
data = folder_specific(html)
return false unless data
# HTMLを整形
elems = data_shaping(data)
return false unless elems
# HTMLから曲名と状態を抽出し,登録する
elems.each do |elem|
break if elem.index('</div>')
preparation_register(elem)
end
true
end
def preparation_register(elem)
state = value(elem.split('<dt class="')[1].split('">')[0])
title = title_check(elem.split('<dd class="musicName">')[1].split('</dd>')[0].strip)
title = gigadelic_innocentwalls(title, elem)
maneger_register(title, state.to_i)
end
def gigadelic_innocentwalls(title, e)
return title if title != 'gigadelic' && title != 'Innocent Walls'
if e.split('<dl class="')[1].split('">')[0].index('hyper')
title += '[H]'
else
title += '[A]'
end
title
end
# クリアランプマネージャとの表記ゆれに対応
def title_check(e)
e.gsub!('&', '&')
return e if Sheet.exists?(title: e)
case e
when %(ピアノ協奏曲第1番"蠍火") then e = %(ピアノ協奏曲第1番”蠍火”)
when %(キャトられ 恋はモ~モク) then e = %(キャトられ恋はモ~モク)
when %(†渚の小悪魔ラヴリィ〜レイディオ†(IIDX EDIT)) then e = %(†渚の小悪魔ラヴリィ~レイディオ†(IIDX EDIT))
when %(疾風迅雷 †LEGGENDARIA) then e = %(疾風迅雷†LEGGENDARIA)
when %(We're so Happy (P*Light Remix) IIDX Ver.) then e = %(We're so Happy (P*Light Remix) IIDX ver.)
when %(Verflucht †LEGGENDARIA) then e = %(Verflucht†LEGGENDARIA)
when %(Sigmund †LEGGENDARIA) then e = %(Sigmund†LEGGENDARIA)
when %(invoker †LEGGENDARIA) then e = %(invoker†LEGGENDARIA)
when %(Feel The Beat †LEGGENDARIA) then e = %(Feel The Beat†LEGGENDARIA)
when %(Close the World feat.a☆ru †LEGGENDARIA) then e = %(Close the World feat.a☆ru†LEGGENDARIA)
when %(Session 9-Chronicles-) then e = %(Session 9 -Chronicles-)
when %(恋する☆宇宙戦争っ!!) then e = %(恋する☆宇宙戦争っ!!)
when %(ワルツ第17番 ト短調"大犬のワルツ") then e = %(ワルツ第17番 ト短調”大犬のワルツ”)
when %(Ancient Scapes †LEGGENDARIA) then e = %(Ancient Scapes†LEGGENDARIA)
when %(Scripted Connection⇒A mix) then e = %(Scripted Connection⇒ A mix)
when %(Colors(radio edit)) then e = %(Colors (radio edit))
when %(EΛΠIΣ) then e = %(ΕΛΠΙΣ)
when %(Timepiece phase II(CN Ver.)) then e = %(Timepiece phase II (CN Ver.))
when %(Hollywood Galaxy (DJ NAGAI Remix)) then e = %(Hollywood Galaxy(DJ NAGAI Remix))
when %(表裏一体!?怪盗いいんちょの悩み♥) then e = %(表裏一体!?怪盗いいんちょの悩み)
end
e
end
def value(e)
hash = { 'FC' => 0, 'EX' => 1, 'H' => 2, 'C' => 3, 'E' => 4, 'A' => 5, 'F' => 6, 'NO' => 7 }
hash[e]
end
def search(current_user = @current_user)
# ユーザの探索
search_user(current_user)
# そのユーザのページのURLを配列に格納
html = Nokogiri::HTML.parse(@agent.page.body, nil, 'UTF-8')
html.xpath('//table/tbody/tr').each do |tr|
collect_user_page(tr)
end
end
def collect_user_page(tr)
cnt = 0
tmp = ''
tr.xpath('td').each do |td|
tmp = td.to_s.split('a href="/')[1].split('"')[0] if cnt == 1
tmp = '' if cnt == 6 && td.text != @iidxid
cnt += 1
end
@url.push(tmp + 'sp/') unless tmp == ''
end
def search_user(current_user = @current_user)
@agent.get(@base + 'djdata/')
@agent.page.encoding = 'UTF-8'
form = @agent.page.forms[2]
@iidxid = current_user.iidxid.delete('-')
form.searchWord = @iidxid
@agent.submit(form)
end
end
end
|
module Shanty
VERSION = "0.0.7"
end
Bumped version to v0.0.8
module Shanty
VERSION = "0.0.8"
end
|
module Sheets
VERSION = "0.2.2"
end
version bump
module Sheets
VERSION = "0.2.4"
end
|
require 'pty'
require 'shellwords'
require 'fileutils'
require 'timeout'
module Shipit
class Command
MAX_READ = 64.kilobytes
Error = Class.new(StandardError)
Failed = Class.new(Error)
NotFound = Class.new(Error)
Denied = Class.new(Error)
attr_reader :out, :code, :chdir, :env, :args, :pid, :timeout
def initialize(*args, default_timeout: 5.minutes.to_i, env: {}, chdir:)
@args, options = parse_arguments(args)
@timeout = options['timeout'.freeze] || options[:timeout] || default_timeout
@env = env
@chdir = chdir.to_s
end
def to_s
@args.join(' ')
end
def interpolate_environment_variables(argument)
return argument.map { |a| interpolate_environment_variables(a) } if argument.is_a?(Array)
argument.gsub(/(\$\w+)/) do |variable|
variable.sub!('$', '')
Shellwords.escape(@env.fetch(variable) { ENV[variable] })
end
end
def success?
code == 0
end
def exit_message
"#{self} exited with status #{@code}"
end
def run
output = []
stream do |out|
output << out
end
output.join
end
def run!
output = []
stream! do |out|
output << out
end
output.join
end
def with_full_path
old_path = ENV['PATH']
ENV['PATH'] = "#{ENV['PATH']}:#{Shipit::Engine.root.join('lib', 'snippets')}"
yield
ensure
ENV['PATH'] = old_path
end
def interpolated_arguments
interpolate_environment_variables(@args)
end
def start
return if @started
child_in = @out = @pid = nil
FileUtils.mkdir_p(@chdir)
with_full_path do
begin
@out, child_in, @pid = PTY.spawn(@env, *interpolated_arguments, chdir: @chdir)
child_in.close
rescue Errno::ENOENT
raise NotFound, "#{Shellwords.split(interpolated_arguments.first).first}: command not found"
rescue Errno::EACCES
raise Denied, "#{Shellwords.split(interpolated_arguments.first).first}: Permission denied"
end
end
@started = true
self
end
def stream(&block)
start
begin
read_stream(@out, &block)
rescue Timeout::Error => error
@code = 'timeout'
yield red("No output received in the last #{timeout} seconds.") + "\n"
terminate!(&block)
raise error
rescue Errno::EIO # Somewhat expected on Linux: http://stackoverflow.com/a/10306782
end
_, status = Process.waitpid2(@pid)
@code = status.exitstatus
self
end
def check_status
end
def red(text)
"\033[1;31m#{text}\033[0m"
end
def stream!(&block)
stream(&block)
raise Failed.new(exit_message) unless success?
self
end
def read_stream(io)
loop do
with_timeout do
yield io.readpartial(MAX_READ)
end
end
rescue EOFError
end
def with_timeout(&block)
return yield unless timeout
Timeout.timeout(timeout, &block)
end
def terminate!(&block)
kill_and_wait('INT', 5, &block)
kill_and_wait('INT', 2, &block)
kill_and_wait('TERM', 5, &block)
kill_and_wait('TERM', 2, &block)
kill('KILL', &block)
rescue Errno::ECHILD
true # much success
ensure
begin
read_stream(@out, &block)
rescue
end
end
def kill_and_wait(sig, wait, &block)
kill(sig, &block)
Timeout.timeout(wait) do
read_stream(@out, &block)
end
rescue Timeout::Error
end
def kill(sig)
yield red("Sending SIG#{sig} to PID #{@pid}\n")
Process.kill(sig, @pid)
end
def parse_arguments(arguments)
options = {}
args = arguments.flatten.map do |argument|
case argument
when Hash
options.merge!(argument.values.first)
argument.keys.first
else
argument
end
end
return args, options
end
end
end
Remove dead code
require 'pty'
require 'shellwords'
require 'fileutils'
require 'timeout'
module Shipit
class Command
MAX_READ = 64.kilobytes
Error = Class.new(StandardError)
Failed = Class.new(Error)
NotFound = Class.new(Error)
Denied = Class.new(Error)
attr_reader :out, :code, :chdir, :env, :args, :pid, :timeout
def initialize(*args, default_timeout: 5.minutes.to_i, env: {}, chdir:)
@args, options = parse_arguments(args)
@timeout = options['timeout'.freeze] || options[:timeout] || default_timeout
@env = env
@chdir = chdir.to_s
end
def to_s
@args.join(' ')
end
def interpolate_environment_variables(argument)
return argument.map { |a| interpolate_environment_variables(a) } if argument.is_a?(Array)
argument.gsub(/(\$\w+)/) do |variable|
variable.sub!('$', '')
Shellwords.escape(@env.fetch(variable) { ENV[variable] })
end
end
def success?
code == 0
end
def exit_message
"#{self} exited with status #{@code}"
end
def run
output = []
stream do |out|
output << out
end
output.join
end
def run!
output = []
stream! do |out|
output << out
end
output.join
end
def with_full_path
old_path = ENV['PATH']
ENV['PATH'] = "#{ENV['PATH']}:#{Shipit::Engine.root.join('lib', 'snippets')}"
yield
ensure
ENV['PATH'] = old_path
end
def interpolated_arguments
interpolate_environment_variables(@args)
end
def start
return if @started
child_in = @out = @pid = nil
FileUtils.mkdir_p(@chdir)
with_full_path do
begin
@out, child_in, @pid = PTY.spawn(@env, *interpolated_arguments, chdir: @chdir)
child_in.close
rescue Errno::ENOENT
raise NotFound, "#{Shellwords.split(interpolated_arguments.first).first}: command not found"
rescue Errno::EACCES
raise Denied, "#{Shellwords.split(interpolated_arguments.first).first}: Permission denied"
end
end
@started = true
self
end
def stream(&block)
start
begin
read_stream(@out, &block)
rescue Timeout::Error => error
@code = 'timeout'
yield red("No output received in the last #{timeout} seconds.") + "\n"
terminate!(&block)
raise error
rescue Errno::EIO # Somewhat expected on Linux: http://stackoverflow.com/a/10306782
end
_, status = Process.waitpid2(@pid)
@code = status.exitstatus
self
end
def red(text)
"\033[1;31m#{text}\033[0m"
end
def stream!(&block)
stream(&block)
raise Failed.new(exit_message) unless success?
self
end
def read_stream(io)
loop do
with_timeout do
yield io.readpartial(MAX_READ)
end
end
rescue EOFError
end
def with_timeout(&block)
return yield unless timeout
Timeout.timeout(timeout, &block)
end
def terminate!(&block)
kill_and_wait('INT', 5, &block)
kill_and_wait('INT', 2, &block)
kill_and_wait('TERM', 5, &block)
kill_and_wait('TERM', 2, &block)
kill('KILL', &block)
rescue Errno::ECHILD
true # much success
ensure
begin
read_stream(@out, &block)
rescue
end
end
def kill_and_wait(sig, wait, &block)
kill(sig, &block)
Timeout.timeout(wait) do
read_stream(@out, &block)
end
rescue Timeout::Error
end
def kill(sig)
yield red("Sending SIG#{sig} to PID #{@pid}\n")
Process.kill(sig, @pid)
end
def parse_arguments(arguments)
options = {}
args = arguments.flatten.map do |argument|
case argument
when Hash
options.merge!(argument.values.first)
argument.keys.first
else
argument
end
end
return args, options
end
end
end
|
module Shoppe
VERSION = '1.2'.freeze
end
Billing Address on orders view
module Shoppe
VERSION = '1.2.1'.freeze
end
|
require 'naturally'
require 'simctl/object'
module SimCtl
class Runtime < Object
attr_reader :availability, :buildversion, :identifier, :name, :version
def ==(other)
return false if other.nil?
return false unless other.kind_of? Runtime
other.identifier == identifier
end
# Returns the latest available runtime
#
# @param name [String] type (ios, watchos, tvos)
# @return [SimCtl::Runtime] the latest available runtime
def self.latest(type)
Naturally.sort_by(SimCtl.list_runtimes.where(name: %r|#{type}|i), :version).last
end
end
end
Add runtime.type
require 'naturally'
require 'simctl/object'
module SimCtl
class Runtime < Object
attr_reader :availability, :buildversion, :identifier, :name, :type, :version
def type
@type ||= name.split("\s").first.downcase.to_sym
end
def ==(other)
return false if other.nil?
return false unless other.kind_of? Runtime
other.identifier == identifier
end
# Returns the latest available runtime
#
# @param name [String] type (ios, watchos, tvos)
# @return [SimCtl::Runtime] the latest available runtime
def self.latest(type)
Naturally.sort_by(SimCtl.list_runtimes.where(name: %r|#{type}|i), :version).last
end
end
end
|
class SkyRunner::Exec
def exec_async
end
end
Remove unused file
|
# This file is part of ruby-snowmath.
# Copyright (c) 2013 Noel Raymond Cower. All rights reserved.
# See COPYING for license details.
require 'snow-math'
module Snow
#
# Provides basic support for converting Snow math objects to Ruby arrays. In
# addition, it also provides rudimentary support for each, map, and map! for
# all math types.
#
# For example:
#
# # Arrays of cells by column
# Mat4[*(1 .. 16)].group_by {
# |cell|
# (cell.floor - 1) % 4
# }
#
# # Arrays of cells by row
# Mat4[*(1 .. 16)].group_by {
# |cell|
# ((cell - 1) / 4).floor
# } # => { 0 => [1, 2, 3, 4],
# # 1 => [5, 6, 7, 8],
# # 2 => [9, 10, 11, 12],
# # 3 => [13, 14, 15, 16] }
#
# Note that these examples are only showing that you can use these types like
# most others that include the Enumerable module. The above examples are not
# sane ways to get columns or rows out of a Mat4.
#
module ArraySupport
include ::Enumerable
#
# Returns an array composed of the elements of self.
#
# call-seq: to_a -> new_ary
#
def to_a
(0 ... self.length).each.map { |index| fetch(index) }
end
#
# Iterates over all elements of the object and yields them to a block.
# In the second form, returns an Enumerator.
#
# call-seq:
# each { |elem| block } -> self
# each -> Enumerator
#
def each(&block)
return to_enum(:each) unless block_given?
(0 ... self.length).each {
|index|
yield(fetch(index))
}
self
end
#
# In the first form, iterates over all elements of the object, yields them
# to the block given, and overwrites the element's value with the value
# returned by the block.
#
# In the second form, returns an Enumerator.
#
# The return value of the block must be the same kind of object as was
# yielded to the block. So, if yielded a Vec3, the block must return a Vec3.
# If yielded a Numeric, it must return a Numeric.
#
# call-seq:
# map! { |elem| block } -> self
# map! -> Enumerator
#
def map!(&block)
return to_enum(:map!) unless block_given?
(0 ... self.length).each {
|index|
store(index, yield(fetch(index)))
}
self
end
#
# In the first form, duplicates self and then calls map! on the duplicated
# object, passing the block to map!.
#
# In the second form, returns an Enumerator.
#
# The return value of the block must be the same kind of object as was
# yielded to the block. So, if yielded a Vec3, the block must return a Vec3.
# If yielded a Numeric, it must return a Numeric.
#
# call-seq:
# map { |elem| block } -> new object
# map -> Enumerator
#
def map(&block)
return to_enum(:map) unless block_given?
self.dup.map!(&block)
end
end
class Vec2 ; include ::Snow::ArraySupport ; end
class Vec3 ; include ::Snow::ArraySupport ; end
class Vec4 ; include ::Snow::ArraySupport ; end
class Quat ; include ::Snow::ArraySupport ; end
class Mat3 ; include ::Snow::ArraySupport ; end
class Mat4 ; include ::Snow::ArraySupport ; end
if const_defined?(:Vec2Array)
class Vec2Array
include ::Snow::ArraySupport
#
# Duplicates the Vec2Array and returns it.
#
# call-seq: dup -> new vec3_array
#
def dup
self.class.new(self)
end
end
end
if const_defined?(:Vec3Array)
class Vec3Array
include ::Snow::ArraySupport
#
# Duplicates the Vec3Array and returns it.
#
# call-seq: dup -> new vec3_array
#
def dup
self.class.new(self)
end
end
end
if const_defined?(:Vec4Array)
class Vec4Array
include ::Snow::ArraySupport
#
# Duplicates the Vec4Array and returns it.
#
# call-seq: dup -> new vec4_array
#
def dup
self.class.new(self)
end
end
end
if const_defined?(:QuatArray)
class QuatArray
include ::Snow::ArraySupport
#
# Duplicates the QuatArray and returns it.
#
# call-seq: dup -> new quat_array
#
def dup
self.class.new(self)
end
end
end
if const_defined?(:Mat3Array)
class Mat3Array
include ::Snow::ArraySupport
#
# Duplicates the Mat3Array and returns it.
#
# call-seq: dup -> new mat3_array
#
def dup
self.class.new(self)
end
end
end
if const_defined?(:Mat4Array)
class Mat4Array
include ::Snow::ArraySupport
#
# Duplicates the Mat4Array and returns it.
#
# call-seq: dup -> new mat4_array
#
def dup
self.class.new(self)
end
end
end
end
Alias dup to clone for arrays.
# This file is part of ruby-snowmath.
# Copyright (c) 2013 Noel Raymond Cower. All rights reserved.
# See COPYING for license details.
require 'snow-math'
module Snow
#
# Provides basic support for converting Snow math objects to Ruby arrays. In
# addition, it also provides rudimentary support for each, map, and map! for
# all math types.
#
# For example:
#
# # Arrays of cells by column
# Mat4[*(1 .. 16)].group_by {
# |cell|
# (cell.floor - 1) % 4
# }
#
# # Arrays of cells by row
# Mat4[*(1 .. 16)].group_by {
# |cell|
# ((cell - 1) / 4).floor
# } # => { 0 => [1, 2, 3, 4],
# # 1 => [5, 6, 7, 8],
# # 2 => [9, 10, 11, 12],
# # 3 => [13, 14, 15, 16] }
#
# Note that these examples are only showing that you can use these types like
# most others that include the Enumerable module. The above examples are not
# sane ways to get columns or rows out of a Mat4.
#
module ArraySupport
include ::Enumerable
#
# Returns an array composed of the elements of self.
#
# call-seq: to_a -> new_ary
#
def to_a
(0 ... self.length).each.map { |index| fetch(index) }
end
#
# Iterates over all elements of the object and yields them to a block.
# In the second form, returns an Enumerator.
#
# call-seq:
# each { |elem| block } -> self
# each -> Enumerator
#
def each(&block)
return to_enum(:each) unless block_given?
(0 ... self.length).each {
|index|
yield(fetch(index))
}
self
end
#
# In the first form, iterates over all elements of the object, yields them
# to the block given, and overwrites the element's value with the value
# returned by the block.
#
# In the second form, returns an Enumerator.
#
# The return value of the block must be the same kind of object as was
# yielded to the block. So, if yielded a Vec3, the block must return a Vec3.
# If yielded a Numeric, it must return a Numeric.
#
# call-seq:
# map! { |elem| block } -> self
# map! -> Enumerator
#
def map!(&block)
return to_enum(:map!) unless block_given?
(0 ... self.length).each {
|index|
store(index, yield(fetch(index)))
}
self
end
#
# In the first form, duplicates self and then calls map! on the duplicated
# object, passing the block to map!.
#
# In the second form, returns an Enumerator.
#
# The return value of the block must be the same kind of object as was
# yielded to the block. So, if yielded a Vec3, the block must return a Vec3.
# If yielded a Numeric, it must return a Numeric.
#
# call-seq:
# map { |elem| block } -> new object
# map -> Enumerator
#
def map(&block)
return to_enum(:map) unless block_given?
self.dup.map!(&block)
end
end
class Vec2 ; include ::Snow::ArraySupport ; end
class Vec3 ; include ::Snow::ArraySupport ; end
class Vec4 ; include ::Snow::ArraySupport ; end
class Quat ; include ::Snow::ArraySupport ; end
class Mat3 ; include ::Snow::ArraySupport ; end
class Mat4 ; include ::Snow::ArraySupport ; end
if const_defined?(:Vec2Array)
class Vec2Array
include ::Snow::ArraySupport
#
# Duplicates the Vec2Array and returns it.
#
# call-seq: dup -> new vec3_array
#
def dup
self.class.new(self)
end
alias_method :clone, :dup
end
end
if const_defined?(:Vec3Array)
class Vec3Array
include ::Snow::ArraySupport
#
# Duplicates the Vec3Array and returns it.
#
# call-seq: dup -> new vec3_array
#
def dup
self.class.new(self)
end
alias_method :clone, :dup
end
end
if const_defined?(:Vec4Array)
class Vec4Array
include ::Snow::ArraySupport
#
# Duplicates the Vec4Array and returns it.
#
# call-seq: dup -> new vec4_array
#
def dup
self.class.new(self)
end
alias_method :clone, :dup
end
end
if const_defined?(:QuatArray)
class QuatArray
include ::Snow::ArraySupport
#
# Duplicates the QuatArray and returns it.
#
# call-seq: dup -> new quat_array
#
def dup
self.class.new(self)
end
alias_method :clone, :dup
end
end
if const_defined?(:Mat3Array)
class Mat3Array
include ::Snow::ArraySupport
#
# Duplicates the Mat3Array and returns it.
#
# call-seq: dup -> new mat3_array
#
def dup
self.class.new(self)
end
alias_method :clone, :dup
end
end
if const_defined?(:Mat4Array)
class Mat4Array
include ::Snow::ArraySupport
#
# Duplicates the Mat4Array and returns it.
#
# call-seq: dup -> new mat4_array
#
def dup
self.class.new(self)
end
alias_method :clone, :dup
end
end
end
|
# encoding: utf-8
require "sockjs/buffer"
module SockJS
class Adapter
CONTENT_TYPES ||= {
plain: "text/plain; charset=UTF-8",
html: "text/html; charset=UTF-8",
javascript: "application/javascript; charset=UTF-8",
event_stream: "text/event-stream; charset=UTF-8"
}
class << self
attr_accessor :prefix, :method, :subclasses, :session_class
end
self.method ||= "GET"
self.subclasses ||= Array.new
self.session_class ||= SessionWitchCachedMessages
# TODO: refactor the following two methods: just find the prefix and check the method later on, so we won't need the second method at all.
def self.handler(prefix, method)
self.subclasses.find do |handler|
handler.prefix === prefix && handler.method == method
end
end
def self.match_handler_for_http_405(prefix, method)
self.subclasses.find do |handler|
handler.prefix === prefix && handler.method != method
end
end
def self.inherited(subclass)
Adapter.subclasses << subclass
subclass.method = self.method
subclass.prefix = self.prefix
subclass.session_class = self.session_class
end
# Instance methods.
attr_reader :connection, :options, :buffer
def initialize(connection, options)
@connection, @options, @buffer = connection, options, Buffer.new
end
def disabled?
disabled_transports = @options[:disabled_transports] || Array.new
return disabled_transports.include?(self.class)
end
# TODO: Make it use the adapter user uses.
def response_class
SockJS::Thin::Response
end
def response(*args, &block)
@response ||= self.response_class.new(*args, &block)
end
def write_response(request, status, headers, body, &block)
self.response(request, status, headers, &block)
@response.write_head
@response.write(body) unless body.nil?
@response.finish
return @response
end
def format_frame(payload)
"#{payload}\n"
end
def send(data, *args)
@buffer << self.format_frame(data, *args)
end
def finish
@response.finish(@buffer.to_frame)
end
# 1) There's no session -> create it. AND CONTINUE
# 2) There's a session:
# a) It's closing -> Send c[3000,"Go away!"] AND END
# b) It's open:
# i) There IS NOT any consumer -> OK. AND CONTINUE
# i) There IS a consumer -> Send c[2010,"Another con still open"] AND END
# TODO: use this method
def get_session(request, response, preamble = nil)
match = request.path_info.match(self.class.prefix)
if session = self.connection.sessions[match[1]]
if session.closing?
session.close
return nil
elsif session.open? && session.response.nil?
return session
elsif session.open? && session.response
session.close(2010, "Another connection still open")
return nil
end
else
response.write(preamble) if preamble
session = self.connection.create_session(match[1], self)
session.open!
return session
end
end
def try_timer_if_valid(request, response, preamble = nil)
session = self.get_session(request, response, preamble)
self.init_timer(response, session, 0.1) if session
end
def init_timer(response, session, interval)
timer = EM::PeriodicTimer.new(interval) do
if data = session.process_buffer
response_data = format_frame(data.chomp!)
puts "~ Responding with #{response_data.inspect}"
response.write(response_data) unless data == "a[]\n" # FIXME
if data[0] == "c" # close frame. TODO: Do this by raising an exception or something, this is a mess :o Actually ... do we need here some 5s timeout as well?
timer.cancel
response.finish
end
end
end
end
end
end
SockJS::Adapter#handle, so we can use super to access response and session easily.
# encoding: utf-8
require "sockjs/buffer"
module SockJS
class Adapter
CONTENT_TYPES ||= {
plain: "text/plain; charset=UTF-8",
html: "text/html; charset=UTF-8",
javascript: "application/javascript; charset=UTF-8",
event_stream: "text/event-stream; charset=UTF-8"
}
class << self
attr_accessor :prefix, :method, :subclasses, :session_class
end
self.method ||= "GET"
self.subclasses ||= Array.new
self.session_class ||= SessionWitchCachedMessages
# TODO: refactor the following two methods: just find the prefix and check the method later on, so we won't need the second method at all.
def self.handler(prefix, method)
self.subclasses.find do |handler|
handler.prefix === prefix && handler.method == method
end
end
def self.match_handler_for_http_405(prefix, method)
self.subclasses.find do |handler|
handler.prefix === prefix && handler.method != method
end
end
def self.inherited(subclass)
Adapter.subclasses << subclass
subclass.method = self.method
subclass.prefix = self.prefix
subclass.session_class = self.session_class
end
# Instance methods.
attr_reader :connection, :options, :buffer
def initialize(connection, options)
@connection, @options, @buffer = connection, options, Buffer.new
end
def disabled?
disabled_transports = @options[:disabled_transports] || Array.new
return disabled_transports.include?(self.class)
end
# TODO: Make it use the adapter user uses.
def response_class
SockJS::Thin::Response
end
def response(*args, &block)
@response ||= self.response_class.new(*args, &block)
end
def write_response(request, status, headers, body, &block)
self.response(request, status, headers, &block)
@response.write_head
@response.write(body) unless body.nil?
@response.finish
return @response
end
def format_frame(payload)
"#{payload}\n"
end
def send(data, *args)
@buffer << self.format_frame(data, *args)
end
def finish
@response.finish(@buffer.to_frame)
end
def handle(request, status, &block)
response = self.response(request, status)
response.set_session_id(request.session_id)
session = self.get_session(request, response) # TODO: preamble
block.call(response, session)
end
# 1) There's no session -> create it. AND CONTINUE
# 2) There's a session:
# a) It's closing -> Send c[3000,"Go away!"] AND END
# b) It's open:
# i) There IS NOT any consumer -> OK. AND CONTINUE
# i) There IS a consumer -> Send c[2010,"Another con still open"] AND END
def get_session(request, response, preamble = nil)
match = request.path_info.match(self.class.prefix)
if session = self.connection.sessions[match[1]]
if session.closing?
session.close
return nil
elsif session.open? && session.response.nil?
return session
elsif session.open? && session.response
session.close(2010, "Another connection still open")
return nil
end
else
response.write(preamble) if preamble
session = self.connection.create_session(match[1], self)
session.open!
return session
end
end
def try_timer_if_valid(request, response, preamble = nil)
session = self.get_session(request, response, preamble)
self.init_timer(response, session, 0.1) if session
end
def init_timer(response, session, interval)
timer = EM::PeriodicTimer.new(interval) do
if data = session.process_buffer
response_data = format_frame(data.chomp!)
puts "~ Responding with #{response_data.inspect}"
response.write(response_data) unless data == "a[]\n" # FIXME
if data[0] == "c" # close frame. TODO: Do this by raising an exception or something, this is a mess :o Actually ... do we need here some 5s timeout as well?
timer.cancel
response.finish
end
end
end
end
end
end
|
require 'fileutils'
class StackCommands < Commands
def initialize(stack)
@stack = stack
end
def fetch
create_directories
if Dir.exists?(@stack.git_path)
git('fetch', 'origin', @stack.branch, env: env, chdir: @stack.git_path)
else
git('clone', '--single-branch', '--branch', @stack.branch, @stack.repo_git_url, @stack.git_path, env: env, chdir: @stack.deploys_path)
end
end
def create_directories
FileUtils.mkdir_p(@stack.deploys_path)
end
end
Remove single branch
require 'fileutils'
class StackCommands < Commands
def initialize(stack)
@stack = stack
end
def fetch
create_directories
if Dir.exists?(@stack.git_path)
git('fetch', 'origin', @stack.branch, env: env, chdir: @stack.git_path)
else
git('clone', '--branch', @stack.branch, @stack.repo_git_url, @stack.git_path, env: env, chdir: @stack.deploys_path)
end
end
def create_directories
FileUtils.mkdir_p(@stack.deploys_path)
end
end
|
require 'stax/aws/rds'
module Stax
module Rds
def self.included(thor)
thor.desc('rds COMMAND', 'RDS subcommands')
thor.subcommand(:rds, Cmd::Rds)
end
end
module Cmd
class Rds < SubCommand
stax_info :clusters, :instances, :endpoints
COLORS = {
available: :green,
'in-sync': :green,
Complete: :green,
Active: :green,
}
no_commands do
def stack_db_clusters
Aws::Cfn.resources_by_type(my.stack_name, 'AWS::RDS::DBCluster')
end
def stack_db_instances
Aws::Cfn.resources_by_type(my.stack_name, 'AWS::RDS::DBInstance')
end
def stack_rds_clusters
filter = { name: 'db-cluster-id', values: stack_db_clusters.map(&:physical_resource_id) }
Aws::Rds.clusters(filters: [filter])
end
def stack_rds_instances
filter = { name: 'db-instance-id', values: stack_db_instances.map(&:physical_resource_id) }
Aws::Rds.instances(filters: [filter])
end
def stack_db_subnet_groups
Aws::Cfn.resources_by_type(my.stack_name, 'AWS::RDS::DBSubnetGroup')
end
end
desc 'ls', 'list clusters with members'
def ls
debug("RDS databases for #{my.stack_name}")
stack_rds_clusters.map do |c|
cluster = [ c.db_cluster_identifier, 'cluster', color(c.status), c.engine ]
instances = c.db_cluster_members.map do |m|
role = m.is_cluster_writer ? 'writer' : 'reader'
i = Aws::Rds.instances(filters: [ { name: 'db-instance-id', values: [ m.db_instance_identifier ] } ]).first
[ '- ' + i.db_instance_identifier, role, color(i.db_instance_status), i.engine, i.availability_zone, i.db_instance_class ]
end
[ cluster ] + instances
end.flatten(1).tap do |list|
print_table list
end
end
desc 'clusters', 'list db clusters for stack'
def clusters
debug("RDS DB clusters for #{my.stack_name}")
print_table stack_rds_clusters.map { |c|
[c.db_cluster_identifier, c.engine, c.engine_version, color(c.status, COLORS), c.cluster_create_time]
}
end
desc 'members', 'list db cluster members for stack'
def members
stack_rds_clusters.each do |c|
debug("RDS DB members for cluster #{c.db_cluster_identifier}")
print_table c.db_cluster_members.map { |m|
role = m.is_cluster_writer ? 'writer' : 'reader'
[m.db_instance_identifier, role, m.db_cluster_parameter_group_status]
}
end
end
desc 'instances', 'list db instances for stack'
def instances
debug("RDS DB instances for #{my.stack_name}")
print_table stack_rds_instances.map { |i|
[i.db_instance_identifier, i.engine, i.engine_version, color(i.db_instance_status, COLORS), i.db_instance_class, i.db_subnet_group&.vpc_id, i.availability_zone]
}
end
desc 'endpoints', 'list db instance endpoints'
def endpoints
stack_rds_clusters.each do |c|
debug("RDS DB endpoints for cluster #{c.db_cluster_identifier}")
print_table [
['writer', c.endpoint, c.port, c.hosted_zone_id],
['reader', c.reader_endpoint, c.port, c.hosted_zone_id],
]
end
debug("RDS DB instance endpoints for #{my.stack_name}")
print_table stack_rds_instances.map { |i|
[i.db_instance_identifier, i.endpoint&.address, i.endpoint&.port, i.endpoint&.hosted_zone_id]
}
end
desc 'subnets', 'list db subnet groups'
def subnets
stack_db_subnet_groups.map do |r|
Aws::Rds.subnet_groups(db_subnet_group_name: r.physical_resource_id)
end.flatten.each do |g|
debug("Subnets for group #{g.db_subnet_group_name}")
print_table g.subnets.map { |s|
[s&.subnet_availability_zone&.name, s&.subnet_identifier, color(s&.subnet_status, COLORS)]
}
end
end
desc 'failover', 'failover clusters'
method_option :target, type: :string, default: nil, description: 'id of instance to promote'
def failover
stack_rds_clusters.each do |c|
if yes?("Failover #{c.db_cluster_identifier}?", :yellow)
resp = Aws::Rds.client.failover_db_cluster(db_cluster_identifier: c.db_cluster_identifier, target_db_instance_identifier: options[:target])
puts "failing over #{resp.db_cluster.db_cluster_identifier}"
end
end
end
end
end
end
use new version of colors method
require 'stax/aws/rds'
module Stax
module Rds
def self.included(thor)
thor.desc('rds COMMAND', 'RDS subcommands')
thor.subcommand(:rds, Cmd::Rds)
end
end
module Cmd
class Rds < SubCommand
stax_info :clusters, :instances, :endpoints
COLORS = {
available: :green,
'in-sync': :green,
Complete: :green,
Active: :green,
}
no_commands do
def stack_db_clusters
Aws::Cfn.resources_by_type(my.stack_name, 'AWS::RDS::DBCluster')
end
def stack_db_instances
Aws::Cfn.resources_by_type(my.stack_name, 'AWS::RDS::DBInstance')
end
def stack_rds_clusters
filter = { name: 'db-cluster-id', values: stack_db_clusters.map(&:physical_resource_id) }
Aws::Rds.clusters(filters: [filter])
end
def stack_rds_instances
filter = { name: 'db-instance-id', values: stack_db_instances.map(&:physical_resource_id) }
Aws::Rds.instances(filters: [filter])
end
def stack_db_subnet_groups
Aws::Cfn.resources_by_type(my.stack_name, 'AWS::RDS::DBSubnetGroup')
end
end
desc 'ls', 'list clusters with members'
def ls
debug("RDS databases for #{my.stack_name}")
stack_rds_clusters.map do |c|
cluster = [ c.db_cluster_identifier, 'cluster', color(c.status), c.engine ]
instances = c.db_cluster_members.map do |m|
role = m.is_cluster_writer ? 'writer' : 'reader'
i = Aws::Rds.instances(filters: [ { name: 'db-instance-id', values: [ m.db_instance_identifier ] } ]).first
[ '- ' + i.db_instance_identifier, role, color(i.db_instance_status), i.engine, i.availability_zone, i.db_instance_class ]
end
[ cluster ] + instances
end.flatten(1).tap do |list|
print_table list
end
end
desc 'clusters', 'list db clusters for stack'
def clusters
debug("RDS DB clusters for #{my.stack_name}")
print_table stack_rds_clusters.map { |c|
[c.db_cluster_identifier, c.engine, c.engine_version, color(c.status), c.cluster_create_time]
}
end
desc 'members', 'list db cluster members for stack'
def members
stack_rds_clusters.each do |c|
debug("RDS DB members for cluster #{c.db_cluster_identifier}")
print_table c.db_cluster_members.map { |m|
role = m.is_cluster_writer ? 'writer' : 'reader'
[m.db_instance_identifier, role, m.db_cluster_parameter_group_status]
}
end
end
desc 'instances', 'list db instances for stack'
def instances
debug("RDS DB instances for #{my.stack_name}")
print_table stack_rds_instances.map { |i|
[i.db_instance_identifier, i.engine, i.engine_version, color(i.db_instance_status), i.db_instance_class, i.db_subnet_group&.vpc_id, i.availability_zone]
}
end
desc 'endpoints', 'list db instance endpoints'
def endpoints
stack_rds_clusters.each do |c|
debug("RDS DB endpoints for cluster #{c.db_cluster_identifier}")
print_table [
['writer', c.endpoint, c.port, c.hosted_zone_id],
['reader', c.reader_endpoint, c.port, c.hosted_zone_id],
]
end
debug("RDS DB instance endpoints for #{my.stack_name}")
print_table stack_rds_instances.map { |i|
[i.db_instance_identifier, i.endpoint&.address, i.endpoint&.port, i.endpoint&.hosted_zone_id]
}
end
desc 'subnets', 'list db subnet groups'
def subnets
stack_db_subnet_groups.map do |r|
Aws::Rds.subnet_groups(db_subnet_group_name: r.physical_resource_id)
end.flatten.each do |g|
debug("Subnets for group #{g.db_subnet_group_name}")
print_table g.subnets.map { |s|
[s&.subnet_availability_zone&.name, s&.subnet_identifier, color(s&.subnet_status)]
}
end
end
desc 'failover', 'failover clusters'
method_option :target, type: :string, default: nil, description: 'id of instance to promote'
def failover
stack_rds_clusters.each do |c|
if yes?("Failover #{c.db_cluster_identifier}?", :yellow)
resp = Aws::Rds.client.failover_db_cluster(db_cluster_identifier: c.db_cluster_identifier, target_db_instance_identifier: options[:target])
puts "failing over #{resp.db_cluster.db_cluster_identifier}"
end
end
end
end
end
end
|
require 'rubygems/package'
require 'fileutils'
require 'tempfile'
require 'zlib'
module Stove
class Packager
include Logify
ACCEPTABLE_FILES = [
'README.*',
'CHANGELOG.*',
'metadata.json',
'attributes/*.rb',
'definitions/*.rb',
'files/**/*',
'libraries/*.rb',
'providers/*.rb',
'recipes/*.rb',
'resources/*.rb',
'templates/**/*',
].freeze
ACCEPTABLE_FILES_LIST = ACCEPTABLE_FILES.join(',').freeze
TMP_FILES = [
/^(?:.*[\\\/])?\.[^\\\/]+\.sw[p-z]$/,
/~$/,
].freeze
# The cookbook to package.
#
# @erturn [Stove::Cookbook]
attr_reader :cookbook
# Create a new packager instance.
#
# @param [Stove::Cookbook]
# the cookbook to package
def initialize(cookbook)
@cookbook = cookbook
end
# A map from physical file path to tarball file path
#
# @example
# # Assuming +cookbook.name+ is 'apt'
#
# {
# '/home/user/apt-cookbook/metadata.json' => 'apt/metadata.json',
# '/home/user/apt-cookbook/README.md' => 'apt/README.md'
# }
#
# @return [Hash<String, String>]
# the map of file paths
def packaging_slip
root = File.expand_path(cookbook.path)
path = File.join(root, "{#{ACCEPTABLE_FILES_LIST}}")
Dir.glob(path, File::FNM_DOTMATCH)
.reject { |path| %w(. ..).include?(File.basename(path)) }
.reject { |path| TMP_FILES.any? { |regex| path.match(regex) } }
.map { |path| [path, path.sub(/^#{Regexp.escape(root)}/, cookbook.name)] }
.reduce({}) do |map, (cookbook_file, tarball_file)|
map[cookbook_file] = tarball_file
map
end
end
def tarball
# Generate the metadata.json on the fly
metadata_json = File.join(cookbook.path, 'metadata.json')
File.open(metadata_json, 'wb') do |file|
file.write(cookbook.metadata.to_json)
end
io = tar(File.dirname(cookbook.path), packaging_slip)
tgz = gzip(io)
tempfile = Tempfile.new([cookbook.name, '.tar.gz'])
while buffer = tgz.read(1024)
tempfile.write(buffer)
end
tempfile.rewind
tempfile
ensure
if defined?(metadata_json)
File.delete(metadata_json)
end
end
#
# Create a tar file from the given root and packaging slip
#
# @param [String] root
# the root where the tar files are being created
# @param [Hash<String, String>] slip
# the map from physical file path to tarball file path
#
# @return [StringIO]
# the io object that contains the tarball contents
#
def tar(root, slip)
io = StringIO.new('')
Gem::Package::TarWriter.new(io) do |tar|
slip.each do |original_file, tarball_file|
mode = File.stat(original_file).mode
if File.directory?(original_file)
tar.mkdir(tarball_file, mode)
else
tar.add_file(tarball_file, mode) do |tf|
File.open(original_file, 'rb') { |f| tf.write(f.read) }
end
end
end
end
io.rewind
io
end
#
# GZip the given IO object (like a File or StringIO).
#
# @param [IO] io
# the io object to gzip
#
# @return [IO]
# the gzipped IO object
#
def gzip(io)
gz = StringIO.new('')
z = Zlib::GzipWriter.new(gz)
z.write(io.string)
z.close
# z was closed to write the gzip footer, so
# now we need a new StringIO
StringIO.new(gz.string)
end
end
end
Prevent line-ending modification
See https://github.com/sethvargo/stove/issues/66
require 'rubygems/package'
require 'fileutils'
require 'tempfile'
require 'zlib'
module Stove
class Packager
include Logify
ACCEPTABLE_FILES = [
'README.*',
'CHANGELOG.*',
'metadata.json',
'attributes/*.rb',
'definitions/*.rb',
'files/**/*',
'libraries/*.rb',
'providers/*.rb',
'recipes/*.rb',
'resources/*.rb',
'templates/**/*',
].freeze
ACCEPTABLE_FILES_LIST = ACCEPTABLE_FILES.join(',').freeze
TMP_FILES = [
/^(?:.*[\\\/])?\.[^\\\/]+\.sw[p-z]$/,
/~$/,
].freeze
# The cookbook to package.
#
# @erturn [Stove::Cookbook]
attr_reader :cookbook
# Create a new packager instance.
#
# @param [Stove::Cookbook]
# the cookbook to package
def initialize(cookbook)
@cookbook = cookbook
end
# A map from physical file path to tarball file path
#
# @example
# # Assuming +cookbook.name+ is 'apt'
#
# {
# '/home/user/apt-cookbook/metadata.json' => 'apt/metadata.json',
# '/home/user/apt-cookbook/README.md' => 'apt/README.md'
# }
#
# @return [Hash<String, String>]
# the map of file paths
def packaging_slip
root = File.expand_path(cookbook.path)
path = File.join(root, "{#{ACCEPTABLE_FILES_LIST}}")
Dir.glob(path, File::FNM_DOTMATCH)
.reject { |path| %w(. ..).include?(File.basename(path)) }
.reject { |path| TMP_FILES.any? { |regex| path.match(regex) } }
.map { |path| [path, path.sub(/^#{Regexp.escape(root)}/, cookbook.name)] }
.reduce({}) do |map, (cookbook_file, tarball_file)|
map[cookbook_file] = tarball_file
map
end
end
def tarball
# Generate the metadata.json on the fly
metadata_json = File.join(cookbook.path, 'metadata.json')
File.open(metadata_json, 'wb') do |file|
file.write(cookbook.metadata.to_json)
end
io = tar(File.dirname(cookbook.path), packaging_slip)
tgz = gzip(io)
tempfile = Tempfile.new([cookbook.name, '.tar.gz'], Dir.tmpdir, mode: File::RDWR|File::CREAT|File::EXCL|File::BINARY)
while buffer = tgz.read(1024)
tempfile.write(buffer)
end
tempfile.rewind
tempfile
ensure
if defined?(metadata_json)
File.delete(metadata_json)
end
end
#
# Create a tar file from the given root and packaging slip
#
# @param [String] root
# the root where the tar files are being created
# @param [Hash<String, String>] slip
# the map from physical file path to tarball file path
#
# @return [StringIO]
# the io object that contains the tarball contents
#
def tar(root, slip)
io = StringIO.new('', 'r+b')
Gem::Package::TarWriter.new(io) do |tar|
slip.each do |original_file, tarball_file|
mode = File.stat(original_file).mode
if File.directory?(original_file)
tar.mkdir(tarball_file, mode)
else
tar.add_file(tarball_file, mode) do |tf|
File.open(original_file, 'rb') { |f| tf.write(f.read) }
end
end
end
end
io.rewind
io
end
#
# GZip the given IO object (like a File or StringIO).
#
# @param [IO] io
# the io object to gzip
#
# @return [IO]
# the gzipped IO object
#
def gzip(io)
gz = StringIO.new('')
z = Zlib::GzipWriter.new(gz)
z.write(io.string)
z.close
# z was closed to write the gzip footer, so
# now we need a new StringIO
StringIO.new(gz.string)
end
end
end
|
module Straight
# This module should be included into your own class to extend it with Order functionality.
# For example, if you have a ActiveRecord model called Order, you can include OrderModule into it
# and you'll now be able to do everything to check order's status, but you'll also get AR Database storage
# funcionality, its validations etc.
#
# The right way to implement this would be to do it the other way: inherit from Straight::Order, then
# include ActiveRecord, but at this point ActiveRecord doesn't work this way. Furthermore, some other libraries, like Sequel,
# also require you to inherit from them. Thus, the module.
#
# When this module is included, it doesn't actually *include* all the methods, some are prepended (see Ruby docs on #prepend).
# It is important specifically for getters and setters and as a general rule only getters and setters are prepended.
#
# If you don't want to bother yourself with modules, please use Straight::Order class and simply create new instances of it.
# However, if you are contributing to the library, all new funcionality should go to either Straight::OrderModule::Includable or
# Straight::OrderModule::Prependable (most likely the former).
module OrderModule
# Only add getters and setters for those properties in the extended class
# that don't already have them. This is very useful with ActiveRecord for example
# where we don't want to override AR getters and setters that set attribtues.
def self.included(base)
base.class_eval do
[:amount, :amount_paid, :address, :gateway, :keychain_id, :status, :tid].each do |field|
attr_reader field unless base.method_defined?(field)
attr_writer field unless base.method_defined?("#{field}=")
end
prepend Prependable
include Includable
end
end
# Worth noting that statuses above 1 are immutable. That is, an order status cannot be changed
# if it is more than 1. It makes sense because if an order is paid (5) or expired (2), nothing
# else should be able to change the status back. Similarly, if an order is overpaid (4) or
# underpaid (5), it requires admin supervision and possibly a new order to be created.
STATUSES = {
new: 0, # no transactions received
unconfirmed: 1, # transaction has been received doesn't have enough confirmations yet
paid: 2, # transaction received with enough confirmations and the correct amount
underpaid: 3, # amount that was received in a transaction was not enough
overpaid: 4, # amount that was received in a transaction was too large
expired: 5, # too much time passed since creating an order
canceled: 6, # user decides to economize
}
attr_reader :old_status
class IncorrectAmount < Exception; end
# If you are defining methods in this module, it means you most likely want to
# call super() somehwere inside those methods. An example would be the #status=
# setter. We do our thing, then call super() so that the class this module is prepended to
# could do its thing. For instance, if we included it into ActiveRecord, then after
# #status= is executed, it would call ActiveRecord model setter #status=
#
# In short, the idea is to let the class we're being prepended to do its magic
# after out methods are finished.
module Prependable
# Checks #transaction and returns one of the STATUSES based
# on the meaning of each status and the contents of transaction
# If as_sym is set to true, then each status is returned as Symbol, otherwise
# an equivalent Integer from STATUSES is returned.
def status(as_sym: false, reload: false)
if defined?(super)
begin
@status = super
# if no method with arguments found in the class
# we're prepending to, then let's use a standard getter
# with no argument.
rescue ArgumentError
@status = super()
end
end
# Prohibit status update if the order was paid in some way.
# This is just a caching workaround so we don't query
# the blockchain needlessly. The actual safety switch is in the setter.
# Therefore, even if you remove the following line, status won't actually
# be allowed to change.
if @status && @status > 1
return as_sym ? STATUSES.invert[@status] : @status
end
if reload || !@status
t = transaction(reload: reload)
self.status = if t.nil?
STATUSES[:new]
else
if t[:confirmations] >= gateway.confirmations_required
if t[:total_amount] == amount
STATUSES[:paid]
elsif t[:total_amount] < amount
STATUSES[:underpaid]
else
STATUSES[:overpaid]
end
else
STATUSES[:unconfirmed]
end
end
end
as_sym ? STATUSES.invert[@status] : @status
end
def status=(new_status)
# Prohibit status update if the order was paid in some way,
# so statuses above 1 are in fact immutable.
return false if @status && @status > 1
self.tid = transaction[:tid] if transaction
# Pay special attention to the order of these statements. If you place
# the assignment @status = new_status below the callback call,
# you may get a "Stack level too deep" error if the callback checks
# for the status and it's nil (therefore, force reload and the cycle continues).
#
# The order in which these statements currently are prevents that error, because
# by the time a callback checks the status it's already set.
@status_changed = (@status != new_status)
@old_status = @status
@status = new_status
gateway.order_status_changed(self) if status_changed?
super if defined?(super)
end
def status_changed?
@status_changed
end
def paid_order?
%i(paid overpaid underpaid).include? STATUSES.key(@status)
end
end
module Includable
# Returns an array of transactions for the order's address, each as a hash:
# [ {tid: "feba9e7bfea...", amount: 1202000, ...} ]
#
# An order is supposed to have only one transaction to its address, but we cannot
# always guarantee that (especially when a merchant decides to reuse the address
# for some reason -- he shouldn't but you know people).
#
# Therefore, this method returns all of the transactions.
# For compliance, there's also a #transaction method which always returns
# the last transaction made to the address.
def transactions(reload: false)
@transactions = gateway.fetch_transactions_for(address) if reload || !@transactions
@transactions
end
# Last transaction made to the address. Always use this method to check whether a transaction
# for this order has arrived. We pick last and not first because an address may be reused and we
# always assume it's the last transaction that we want to check.
def transaction(reload: false)
transactions(reload: reload).first
end
# Starts a loop which calls #status(reload: true) according to the schedule
# determined in @status_check_schedule. This method is supposed to be
# called in a separate thread, for example:
#
# Thread.new do
# order.start_periodic_status_check
# end
#
# `duration` argument (value is in seconds) allows you to
# control in what time an order expires. In other words, we
# keep checking for new transactions until the time passes.
# Then we stop and set Order's status to STATUS[:expired]. See
# #check_status_on_schedule for the implementation details.
def start_periodic_status_check(duration: 600)
check_status_on_schedule(duration: duration)
end
# Recursion here! Keeps calling itself according to the schedule until
# either the status changes or the schedule tells it to stop.
def check_status_on_schedule(period: 10, iteration_index: 0, duration: 600, time_passed: 0)
self.status(reload: true)
time_passed += period
if duration >= time_passed # Stop checking if status is >= 2
if self.status < 2
schedule = gateway.status_check_schedule.call(period, iteration_index)
sleep period
check_status_on_schedule(
period: schedule[:period],
iteration_index: schedule[:iteration_index],
duration: duration,
time_passed: time_passed
)
end
elsif self.status < 2
self.status = STATUSES[:expired]
end
end
def to_json
to_h.to_json
end
def to_h
{ status: status, amount: amount, address: address, tid: tid }
end
def amount_in_btc(as: :number)
a = Satoshi.new(amount, from_unit: :satoshi, to_unit: :btc)
as == :string ? a.to_unit(as: :string) : a.to_unit
end
end
end
# Instances of this class are generated when we'd like to start watching
# some addresses to check whether a transaction containing a certain amount
# has arrived to it.
#
# It is worth noting that instances do not know how store themselves anywhere,
# so as the class is written here, those instances are only supposed to exist
# in memory. Storing orders is entirely up to you.
class Order
include OrderModule
def initialize
@status = 0
end
end
end
Method of transformation amount to BTC is accept an optional field
module Straight
# This module should be included into your own class to extend it with Order functionality.
# For example, if you have a ActiveRecord model called Order, you can include OrderModule into it
# and you'll now be able to do everything to check order's status, but you'll also get AR Database storage
# funcionality, its validations etc.
#
# The right way to implement this would be to do it the other way: inherit from Straight::Order, then
# include ActiveRecord, but at this point ActiveRecord doesn't work this way. Furthermore, some other libraries, like Sequel,
# also require you to inherit from them. Thus, the module.
#
# When this module is included, it doesn't actually *include* all the methods, some are prepended (see Ruby docs on #prepend).
# It is important specifically for getters and setters and as a general rule only getters and setters are prepended.
#
# If you don't want to bother yourself with modules, please use Straight::Order class and simply create new instances of it.
# However, if you are contributing to the library, all new funcionality should go to either Straight::OrderModule::Includable or
# Straight::OrderModule::Prependable (most likely the former).
module OrderModule
# Only add getters and setters for those properties in the extended class
# that don't already have them. This is very useful with ActiveRecord for example
# where we don't want to override AR getters and setters that set attribtues.
def self.included(base)
base.class_eval do
[:amount, :amount_paid, :address, :gateway, :keychain_id, :status, :tid].each do |field|
attr_reader field unless base.method_defined?(field)
attr_writer field unless base.method_defined?("#{field}=")
end
prepend Prependable
include Includable
end
end
# Worth noting that statuses above 1 are immutable. That is, an order status cannot be changed
# if it is more than 1. It makes sense because if an order is paid (5) or expired (2), nothing
# else should be able to change the status back. Similarly, if an order is overpaid (4) or
# underpaid (5), it requires admin supervision and possibly a new order to be created.
STATUSES = {
new: 0, # no transactions received
unconfirmed: 1, # transaction has been received doesn't have enough confirmations yet
paid: 2, # transaction received with enough confirmations and the correct amount
underpaid: 3, # amount that was received in a transaction was not enough
overpaid: 4, # amount that was received in a transaction was too large
expired: 5, # too much time passed since creating an order
canceled: 6, # user decides to economize
}
attr_reader :old_status
class IncorrectAmount < Exception; end
# If you are defining methods in this module, it means you most likely want to
# call super() somehwere inside those methods. An example would be the #status=
# setter. We do our thing, then call super() so that the class this module is prepended to
# could do its thing. For instance, if we included it into ActiveRecord, then after
# #status= is executed, it would call ActiveRecord model setter #status=
#
# In short, the idea is to let the class we're being prepended to do its magic
# after out methods are finished.
module Prependable
# Checks #transaction and returns one of the STATUSES based
# on the meaning of each status and the contents of transaction
# If as_sym is set to true, then each status is returned as Symbol, otherwise
# an equivalent Integer from STATUSES is returned.
def status(as_sym: false, reload: false)
if defined?(super)
begin
@status = super
# if no method with arguments found in the class
# we're prepending to, then let's use a standard getter
# with no argument.
rescue ArgumentError
@status = super()
end
end
# Prohibit status update if the order was paid in some way.
# This is just a caching workaround so we don't query
# the blockchain needlessly. The actual safety switch is in the setter.
# Therefore, even if you remove the following line, status won't actually
# be allowed to change.
if @status && @status > 1
return as_sym ? STATUSES.invert[@status] : @status
end
if reload || !@status
t = transaction(reload: reload)
self.status = if t.nil?
STATUSES[:new]
else
if t[:confirmations] >= gateway.confirmations_required
if t[:total_amount] == amount
STATUSES[:paid]
elsif t[:total_amount] < amount
STATUSES[:underpaid]
else
STATUSES[:overpaid]
end
else
STATUSES[:unconfirmed]
end
end
end
as_sym ? STATUSES.invert[@status] : @status
end
def status=(new_status)
# Prohibit status update if the order was paid in some way,
# so statuses above 1 are in fact immutable.
return false if @status && @status > 1
self.tid = transaction[:tid] if transaction
# Pay special attention to the order of these statements. If you place
# the assignment @status = new_status below the callback call,
# you may get a "Stack level too deep" error if the callback checks
# for the status and it's nil (therefore, force reload and the cycle continues).
#
# The order in which these statements currently are prevents that error, because
# by the time a callback checks the status it's already set.
@status_changed = (@status != new_status)
@old_status = @status
@status = new_status
gateway.order_status_changed(self) if status_changed?
super if defined?(super)
end
def status_changed?
@status_changed
end
def paid_order?
%i(paid overpaid underpaid).include? STATUSES.key(@status)
end
end
module Includable
# Returns an array of transactions for the order's address, each as a hash:
# [ {tid: "feba9e7bfea...", amount: 1202000, ...} ]
#
# An order is supposed to have only one transaction to its address, but we cannot
# always guarantee that (especially when a merchant decides to reuse the address
# for some reason -- he shouldn't but you know people).
#
# Therefore, this method returns all of the transactions.
# For compliance, there's also a #transaction method which always returns
# the last transaction made to the address.
def transactions(reload: false)
@transactions = gateway.fetch_transactions_for(address) if reload || !@transactions
@transactions
end
# Last transaction made to the address. Always use this method to check whether a transaction
# for this order has arrived. We pick last and not first because an address may be reused and we
# always assume it's the last transaction that we want to check.
def transaction(reload: false)
transactions(reload: reload).first
end
# Starts a loop which calls #status(reload: true) according to the schedule
# determined in @status_check_schedule. This method is supposed to be
# called in a separate thread, for example:
#
# Thread.new do
# order.start_periodic_status_check
# end
#
# `duration` argument (value is in seconds) allows you to
# control in what time an order expires. In other words, we
# keep checking for new transactions until the time passes.
# Then we stop and set Order's status to STATUS[:expired]. See
# #check_status_on_schedule for the implementation details.
def start_periodic_status_check(duration: 600)
check_status_on_schedule(duration: duration)
end
# Recursion here! Keeps calling itself according to the schedule until
# either the status changes or the schedule tells it to stop.
def check_status_on_schedule(period: 10, iteration_index: 0, duration: 600, time_passed: 0)
self.status(reload: true)
time_passed += period
if duration >= time_passed # Stop checking if status is >= 2
if self.status < 2
schedule = gateway.status_check_schedule.call(period, iteration_index)
sleep period
check_status_on_schedule(
period: schedule[:period],
iteration_index: schedule[:iteration_index],
duration: duration,
time_passed: time_passed
)
end
elsif self.status < 2
self.status = STATUSES[:expired]
end
end
def to_json
to_h.to_json
end
def to_h
{ status: status, amount: amount, address: address, tid: tid }
end
def amount_in_btc(field: amount, as: :number)
a = Satoshi.new(field, from_unit: :satoshi, to_unit: :btc)
as == :string ? a.to_unit(as: :string) : a.to_unit
end
end
end
# Instances of this class are generated when we'd like to start watching
# some addresses to check whether a transaction containing a certain amount
# has arrived to it.
#
# It is worth noting that instances do not know how store themselves anywhere,
# so as the class is written here, those instances are only supposed to exist
# in memory. Storing orders is entirely up to you.
class Order
include OrderModule
def initialize
@status = 0
end
end
end
|
require 'fastlane_core'
require 'credentials_manager'
module Supply
class Options
def self.available_options
@options ||= [
FastlaneCore::ConfigItem.new(key: :package_name,
env_name: "SUPPLY_PACKAGE_NAME",
short_option: "-p",
description: "The package name of the Application to modify",
default_value: CredentialsManager::AppfileConfig.try_fetch_value(:package_name)),
FastlaneCore::ConfigItem.new(key: :track,
short_option: "-a",
env_name: "SUPPLY_TRACK",
description: "The Track to upload the Application to: production, beta, alpha",
default_value: 'production',
verify_block: proc do |value|
available = %w(production beta alpha)
raise "Invalid '#{value}', must be #{available.join(', ')}".red unless available.include? value
end),
FastlaneCore::ConfigItem.new(key: :metadata_path,
env_name: "SUPPLY_METADATA_PATH",
short_option: "-m",
optional: true,
description: "Path to the directory containing the metadata files",
default_value: (Dir["./fastlane/metadata/android"] + Dir["./metadata"]).first,
verify_block: proc do |value|
raise "Could not find folder".red unless File.directory? value
end),
FastlaneCore::ConfigItem.new(key: :key,
env_name: "SUPPLY_KEY",
description: "The p12 File used to authenticate with Google",
default_value: Dir["*.p12"].first || CredentialsManager::AppfileConfig.try_fetch_value(:keyfile),
verify_block: proc do |value|
raise "Could not find p12 file at path '#{File.expand_path(value)}'".red unless File.exist?(File.expand_path(value))
end),
FastlaneCore::ConfigItem.new(key: :issuer,
short_option: "-i",
env_name: "SUPPLY_ISSUER",
description: "The issuer of the p12 file (email address of the service account)",
default_value: CredentialsManager::AppfileConfig.try_fetch_value(:issuer)),
FastlaneCore::ConfigItem.new(key: :apk,
env_name: "SUPPLY_APK",
description: "Path to the APK file to upload",
short_option: "-b",
default_value: Dir["*.apk"].first,
optional: true,
verify_block: proc do |value|
raise "Could not find apk file at path '#{value}'".red unless File.exist?(value)
raise "apk file is not an apk".red unless value.end_with?(value)
end)
]
end
end
end
Improved default values
require 'fastlane_core'
require 'credentials_manager'
module Supply
class Options
def self.available_options
@options ||= [
FastlaneCore::ConfigItem.new(key: :package_name,
env_name: "SUPPLY_PACKAGE_NAME",
short_option: "-p",
description: "The package name of the Application to modify",
default_value: CredentialsManager::AppfileConfig.try_fetch_value(:package_name)),
FastlaneCore::ConfigItem.new(key: :track,
short_option: "-a",
env_name: "SUPPLY_TRACK",
description: "The Track to upload the Application to: production, beta, alpha",
default_value: 'production',
verify_block: proc do |value|
available = %w(production beta alpha)
raise "Invalid value '#{value}', must be #{available.join(', ')}".red unless available.include? value
end),
FastlaneCore::ConfigItem.new(key: :metadata_path,
env_name: "SUPPLY_METADATA_PATH",
short_option: "-m",
optional: true,
description: "Path to the directory containing the metadata files",
default_value: (Dir["./fastlane/metadata/android"] + Dir["./metadata"]).first,
verify_block: proc do |value|
raise "Could not find folder".red unless File.directory? value
end),
FastlaneCore::ConfigItem.new(key: :key,
env_name: "SUPPLY_KEY",
description: "The p12 File used to authenticate with Google",
default_value: Dir["*.p12"].first || CredentialsManager::AppfileConfig.try_fetch_value(:keyfile),
verify_block: proc do |value|
raise "Could not find p12 file at path '#{File.expand_path(value)}'".red unless File.exist?(File.expand_path(value))
end),
FastlaneCore::ConfigItem.new(key: :issuer,
short_option: "-i",
env_name: "SUPPLY_ISSUER",
description: "The issuer of the p12 file (email address of the service account)",
default_value: CredentialsManager::AppfileConfig.try_fetch_value(:issuer)),
FastlaneCore::ConfigItem.new(key: :apk,
env_name: "SUPPLY_APK",
description: "Path to the APK file to upload",
short_option: "-b",
default_value: Dir["*.apk"].last || Dir[File.join("app", "build", "outputs", "apk", "app-Release.apk")].last,
optional: true,
verify_block: proc do |value|
raise "Could not find apk file at path '#{value}'".red unless File.exist?(value)
raise "apk file is not an apk".red unless value.end_with?(value)
end)
]
end
end
end
|
require_relative 'schema'
module Tarantool16
class DB
attr :conn
def initialize(host, opts = {})
@host = host
@opts = opts.dup
@future = nil
@spaces = nil
@defined_fields = {}
_fill_standard_spaces
@conn = self.class::Connection.new(@host, @opts)
end
def define_fields(sid, fields)
sid = sid.to_s if sid.is_a?(Symbol)
@defined_fields[sid] = fields
if @spaces && (sp = @spaces[sid])
if sp.sid && sp.name && !sp.name.empty?
rf1 = @defined_fields[sp.sid]
rf2 = @defined_fields[sp.name]
if rf1 && rf2 && rf1 != rf2
raise "Misconfigured defined fields for #{sp.name_sid}"
end
end
sp.fields = fields
end
end
def _fill_standard_spaces
rf = @defined_fields
rf[SPACE_INDEX] =
[%w{sid num}, %w{iid num}, %w{name str},
%w{type str}, %w{unique num}, %w{part_count num},
{name: 'parts', type: [:num, :str], tail: true}]
end
def _synchronized
raise "Override #_synchronized"
end
UNDEF = Object.new.freeze
def _with_space(name, cb)
future = @future || _space_future
future.then_blk do |r|
unless r.ok?
cb.call r
else
sps = r.data
sp = sps[name]
if sp.nil? && Symbol == name
sp = sps[name.to_s]
sps[name] = sp unless sp.nil?
end
if sp.nil?
cb.call Option.error(SchemaError, "space #{name} not found")
else
yield sp
end
end
end
end
def _space_future
_synchronized do
return @future if @future
future = @future = self.class::SchemaFuture.new
fill_indexes = nil
spaces = nil
fill_spaces = lambda do|r|
unless r.ok?
future.set r
_synchronized do
@future = nil
end
else
_synchronized do
_fill_spaces(r.data)
spaces = @spaces
_select(SPACE_INDEX, 0, [], 0, 2**30, :all, false, fill_indexes)
end
end
end
fill_indexes = lambda do |r|
unless r.ok?
future.set r
_synchronized do
@future = nil
@spaces = nil
end
else
_synchronized do
_fill_indices(spaces, r.data)
future.set Option.ok(spaces)
end
end
end
_select(SPACE_SPACE, 0, [], 0, 2**30, :all, false, fill_spaces)
return future
end
end
def _fill_spaces(rows)
@spaces = {}
rows.each do |row|
fields = @defined_fields[row[0]] || @defined_fields[row[2]] || row[6]
sp = SchemaSpace.new(row[0], row[2], fields)
@spaces[row[0]] = sp
@spaces[sp.name] = sp
@spaces[sp.name.to_sym] = sp
end
end
def _fill_indices(spaces, rows)
rows.
map{|row| [row[0], [row[2], row[1], row[3], 6.step(row.size-1, 2).map{|i| row[i]}]]}.
group_by{|sid, _| sid}.
each do |sid, inds|
sp = spaces[sid]
sp.indices = inds.map{|_sid, ind| ind}
end
end
def _insert(sno, tuple, need_hash, cb)
if !need_hash && sno.is_a?(Integer) && tuple.is_a?(Array)
return conn._insert(sno, tuple, cb)
end
_with_space(sno, cb) do |sp|
_tuple = tuple.is_a?(Hash) ? sp.map_tuple(tuple) : tuple
_cb = need_hash ? sp.wrap_cb(cb) : cb
conn._insert(sp.sid, _tuple, _cb)
end
end
def _replace(sno, tuple, need_hash, cb)
if !need_hash && sno.is_a?(Integer) && tuple.is_a?(Array)
return conn._replace(sno, tuple, cb)
end
_with_space(sno, cb) do |sp|
_tuple = tuple.is_a?(Hash) ? sp.map_tuple(tuple) : tuple
_cb = need_hash ? sp.wrap_cb(cb) : cb
conn._replace(sp.sid, _tuple, _cb)
end
end
def _delete(sno, ino, key, need_hash, cb)
ino = 0 if ino.nil? && key.is_a?(Array)
if !need_hash && sno.is_a?(Integer) && ino.is_a?(Integer) && key.is_a?(Array)
return conn._delete(sno, ino, key, cb)
end
_with_space(sno, cb) do |sp|
sp.get_ino(ino, key, ITERATOR_EQ, cb) do |_ino, _key|
_cb = need_hash ? sp.wrap_cb(cb) : cb
conn._delete(sp.sid, _ino, _key, _cb)
end
end
end
def _select(sno, ino, key, offset, limit, iterator, need_hash, cb)
key = [] if key.nil?
ino = 0 if ino.nil? && key.is_a?(Array)
unless iterator.is_a?(Integer)
if key.empty? && (Array === key || Hash === key)
iterator = ITERATOR_ALL
else
iterator = ::Tarantool16.iter(iterator)
end
end
if sno.is_a?(Integer) && ino.is_a?(Integer) && (key.is_a?(Array) || key.nil?)
return conn._select(sno, ino, key, offset, limit, iterator, cb)
end
_with_space(sno, cb) do |sp|
sp.get_ino(ino, key, iterator, cb) do |_ino, _key|
_cb = need_hash ? sp.wrap_cb(cb) : cb
conn._select(sp.sid, _ino, _key, offset, limit, iterator, _cb)
end
end
end
def _update(sno, ino, key, ops, need_hash, cb)
ino = 0 if ino.nil? && key.is_a?(Array)
ops_good = ops.is_a?(Array) && ops.all?{|a| ops[1].is_a?(Integer)}
if sno.is_a?(Integer) && ino.is_a?(Integer) && key.is_a?(Array) && ops_good
return conn._update(sno, ino, key, ops, cb)
end
_with_space(sno, cb) do |sp|
sp.get_ino(ino, key, ITERATOR_EQ, cb) do |_ino, _key|
_ops = ops_good ? ops : sp.map_ops(ops)
_cb = need_hash ? sp.wrap_cb(cb) : cb
conn._update(sp.sid, _ino, _key, _ops, _cb)
end
end
end
def _call(name, args, cb)
conn._call(name, args, cb)
end
def _eval(expr, args, cb)
conn._eval(expr, args, cb)
end
def _ping(cb)
conn._ping(cb)
end
end
end
fix parsing index definitions for new tarantool
require_relative 'schema'
module Tarantool16
class DB
attr :conn
def initialize(host, opts = {})
@host = host
@opts = opts.dup
@future = nil
@spaces = nil
@defined_fields = {}
_fill_standard_spaces
@conn = self.class::Connection.new(@host, @opts)
end
def define_fields(sid, fields)
sid = sid.to_s if sid.is_a?(Symbol)
@defined_fields[sid] = fields
if @spaces && (sp = @spaces[sid])
if sp.sid && sp.name && !sp.name.empty?
rf1 = @defined_fields[sp.sid]
rf2 = @defined_fields[sp.name]
if rf1 && rf2 && rf1 != rf2
raise "Misconfigured defined fields for #{sp.name_sid}"
end
end
sp.fields = fields
end
end
def _fill_standard_spaces
rf = @defined_fields
rf[SPACE_INDEX] =
[%w{sid num}, %w{iid num}, %w{name str},
%w{type str}, %w{unique num}, %w{part_count num},
{name: 'parts', type: [:num, :str], tail: true}]
end
def _synchronized
raise "Override #_synchronized"
end
UNDEF = Object.new.freeze
def _with_space(name, cb)
future = @future || _space_future
future.then_blk do |r|
unless r.ok?
cb.call r
else
sps = r.data
sp = sps[name]
if sp.nil? && Symbol == name
sp = sps[name.to_s]
sps[name] = sp unless sp.nil?
end
if sp.nil?
cb.call Option.error(SchemaError, "space #{name} not found")
else
yield sp
end
end
end
end
def _space_future
_synchronized do
return @future if @future
future = @future = self.class::SchemaFuture.new
fill_indexes = nil
spaces = nil
fill_spaces = lambda do|r|
unless r.ok?
future.set r
_synchronized do
@future = nil
end
else
_synchronized do
_fill_spaces(r.data)
spaces = @spaces
_select(SPACE_INDEX, 0, [], 0, 2**30, :all, false, fill_indexes)
end
end
end
fill_indexes = lambda do |r|
unless r.ok?
future.set r
_synchronized do
@future = nil
@spaces = nil
end
else
_synchronized do
_fill_indices(spaces, r.data)
future.set Option.ok(spaces)
end
end
end
_select(SPACE_SPACE, 0, [], 0, 2**30, :all, false, fill_spaces)
return future
end
end
def _fill_spaces(rows)
@spaces = {}
rows.each do |row|
fields = @defined_fields[row[0]] || @defined_fields[row[2]] || row[6]
sp = SchemaSpace.new(row[0], row[2], fields)
@spaces[row[0]] = sp
@spaces[sp.name] = sp
@spaces[sp.name.to_sym] = sp
end
end
def _fill_indices(spaces, rows)
rows.
map{|row|
if row[4].is_a? Hash
# new format
[row[0], [row[2], row[1], row[3], row[5].map(&:first)]]
else
[row[0], [row[2], row[1], row[3], 6.step(row.size-1, 2).map{|i| row[i]}]]
end
}.
group_by{|sid, _| sid}.
each do |sid, inds|
sp = spaces[sid]
sp.indices = inds.map{|_sid, ind| ind}
end
end
def _insert(sno, tuple, need_hash, cb)
if !need_hash && sno.is_a?(Integer) && tuple.is_a?(Array)
return conn._insert(sno, tuple, cb)
end
_with_space(sno, cb) do |sp|
_tuple = tuple.is_a?(Hash) ? sp.map_tuple(tuple) : tuple
_cb = need_hash ? sp.wrap_cb(cb) : cb
conn._insert(sp.sid, _tuple, _cb)
end
end
def _replace(sno, tuple, need_hash, cb)
if !need_hash && sno.is_a?(Integer) && tuple.is_a?(Array)
return conn._replace(sno, tuple, cb)
end
_with_space(sno, cb) do |sp|
_tuple = tuple.is_a?(Hash) ? sp.map_tuple(tuple) : tuple
_cb = need_hash ? sp.wrap_cb(cb) : cb
conn._replace(sp.sid, _tuple, _cb)
end
end
def _delete(sno, ino, key, need_hash, cb)
ino = 0 if ino.nil? && key.is_a?(Array)
if !need_hash && sno.is_a?(Integer) && ino.is_a?(Integer) && key.is_a?(Array)
return conn._delete(sno, ino, key, cb)
end
_with_space(sno, cb) do |sp|
sp.get_ino(ino, key, ITERATOR_EQ, cb) do |_ino, _key|
_cb = need_hash ? sp.wrap_cb(cb) : cb
conn._delete(sp.sid, _ino, _key, _cb)
end
end
end
def _select(sno, ino, key, offset, limit, iterator, need_hash, cb)
key = [] if key.nil?
ino = 0 if ino.nil? && key.is_a?(Array)
unless iterator.is_a?(Integer)
if key.empty? && (Array === key || Hash === key)
iterator = ITERATOR_ALL
else
iterator = ::Tarantool16.iter(iterator)
end
end
if sno.is_a?(Integer) && ino.is_a?(Integer) && (key.is_a?(Array) || key.nil?)
return conn._select(sno, ino, key, offset, limit, iterator, cb)
end
_with_space(sno, cb) do |sp|
sp.get_ino(ino, key, iterator, cb) do |_ino, _key|
_cb = need_hash ? sp.wrap_cb(cb) : cb
conn._select(sp.sid, _ino, _key, offset, limit, iterator, _cb)
end
end
end
def _update(sno, ino, key, ops, need_hash, cb)
ino = 0 if ino.nil? && key.is_a?(Array)
ops_good = ops.is_a?(Array) && ops.all?{|a| ops[1].is_a?(Integer)}
if sno.is_a?(Integer) && ino.is_a?(Integer) && key.is_a?(Array) && ops_good
return conn._update(sno, ino, key, ops, cb)
end
_with_space(sno, cb) do |sp|
sp.get_ino(ino, key, ITERATOR_EQ, cb) do |_ino, _key|
_ops = ops_good ? ops : sp.map_ops(ops)
_cb = need_hash ? sp.wrap_cb(cb) : cb
conn._update(sp.sid, _ino, _key, _ops, _cb)
end
end
end
def _call(name, args, cb)
conn._call(name, args, cb)
end
def _eval(expr, args, cb)
conn._eval(expr, args, cb)
end
def _ping(cb)
conn._ping(cb)
end
end
end
|
task 'assets:precompile:before' do
require 'uglifier'
unless %w{profile production}.include? Rails.env
raise "rake assets:precompile should only be run in RAILS_ENV=production, you are risking unminified assets"
end
# Ensure we ALWAYS do a clean build
# We use many .erbs that get out of date quickly, especially with plugins
puts "Purging temp files"
`rm -fr #{Rails.root}/tmp/cache`
if Rails.configuration.assets.js_compressor == :uglifier && !`which uglifyjs`.empty? && !ENV['SKIP_NODE_UGLIFY']
$node_uglify = true
end
puts "Bundling assets"
# in the past we applied a patch that removed asset postfixes, but it is terrible practice
# leaving very complicated build issues
# https://github.com/rails/sprockets-rails/issues/49
require 'sprockets'
require 'digest/sha1'
# Needed for proper source maps with a CDN
load "#{Rails.root}/lib/global_path.rb"
include GlobalPath
if $node_uglify
Rails.configuration.assets.js_compressor = nil
module ::Sprockets
# TODO: https://github.com/rails/sprockets-rails/pull/342
# Rails.configuration.assets.gzip = false
class Base
def skip_gzip?
true
end
end
end
end
end
task 'assets:precompile:css' => 'environment' do
if ENV["DONT_PRECOMPILE_CSS"] == "1"
STDERR.puts "Skipping CSS precompilation, ensure CSS lives in a shared directory across hosts"
else
STDERR.puts "Start compiling CSS: #{Time.zone.now}"
RailsMultisite::ConnectionManagement.each_connection do |db|
# Heroku precompiles assets before db migration, so tables may not exist.
# css will get precompiled during first request instead in that case.
if ActiveRecord::Base.connection.table_exists?(ColorScheme.table_name)
STDERR.puts "Compiling css for #{db}"
[:desktop, :mobile, :desktop_rtl, :mobile_rtl].each do |target|
STDERR.puts "target: #{target} #{DiscourseStylesheets.compile(target)}"
end
end
end
STDERR.puts "Done compiling CSS: #{Time.zone.now}"
end
end
def assets_path
"#{Rails.root}/public/assets"
end
def compress_node(from,to)
to_path = "#{assets_path}/#{to}"
assets = cdn_relative_path("/assets")
source_map_root = assets + ((d=File.dirname(from)) == "." ? "" : "/#{d}")
source_map_url = cdn_path "/assets/#{to}.map"
cmd = "uglifyjs '#{assets_path}/#{from}' -p relative -c -m -o '#{to_path}' --source-map-root '#{source_map_root}' --source-map '#{assets_path}/#{to}.map' --source-map-url '#{source_map_url}'"
STDERR.puts cmd
result = `#{cmd} 2>&1`
unless $?.success?
STDERR.puts result
exit 1
end
result
end
def compress_ruby(from,to)
data = File.read("#{assets_path}/#{from}")
uglified, map = Uglifier.new(comments: :none,
screw_ie8: true,
source_filename: File.basename(from),
output_filename: File.basename(to)
)
.compile_with_map(data)
dest = "#{assets_path}/#{to}"
File.write(dest, uglified << "\n//# sourceMappingURL=#{cdn_path "/assets/#{to}.map"}")
File.write(dest + ".map", map)
end
def gzip(path)
STDERR.puts "gzip #{path}"
STDERR.puts `gzip -f -c -9 #{path} > #{path}.gz`
end
def brotli(path)
if ENV['COMPRESS_BROTLI']
STDERR.puts "brotli #{path}"
STDERR.puts `brotli --quality 11 --input #{path} --output #{path}.bl`
end
end
def compress(from,to)
if @has_uglifyjs ||= !`which uglifyjs`.empty?
compress_node(from,to)
else
compress_ruby(from,to)
end
end
def concurrent?
if ENV["CONCURRENT"] == "1"
concurrent_compressors = []
yield(Proc.new { |&block| concurrent_compressors << Concurrent::Future.execute { block.call } })
concurrent_compressors.each(&:wait!)
else
yield(Proc.new { |&block| block.call })
end
end
task 'assets:precompile' => 'assets:precompile:before' do
# Run after assets:precompile
Rake::Task["assets:precompile:css"].invoke
if $node_uglify
puts "Compressing Javascript and Generating Source Maps"
manifest = Sprockets::Manifest.new(assets_path)
concurrent? do |proc|
to_skip = Rails.configuration.assets.skip_minification || []
manifest.files
.select{|k,v| k =~ /\.js$/}
.each do |file, info|
path = "#{assets_path}/#{file}"
_file = (d = File.dirname(file)) == "." ? "_#{file}" : "#{d}/_#{File.basename(file)}"
_path = "#{assets_path}/#{_file}"
if File.exists?(_path)
STDERR.puts "Skipping: #{file} already compressed"
else
STDERR.puts "Compressing: #{file}"
proc.call do
# We can specify some files to never minify
unless (ENV["DONT_MINIFY"] == "1") || to_skip.include?(info['logical_path'])
FileUtils.mv(path, _path)
compress(_file,file)
end
info["size"] = File.size(path)
info["mtime"] = File.mtime(path).iso8601
gzip(path)
brotli(path)
end
end
end
end
# protected
manifest.send :save
end
end
rename CONCURRENT to SPROCKETS_CONCURRENT
task 'assets:precompile:before' do
require 'uglifier'
unless %w{profile production}.include? Rails.env
raise "rake assets:precompile should only be run in RAILS_ENV=production, you are risking unminified assets"
end
# Ensure we ALWAYS do a clean build
# We use many .erbs that get out of date quickly, especially with plugins
puts "Purging temp files"
`rm -fr #{Rails.root}/tmp/cache`
if Rails.configuration.assets.js_compressor == :uglifier && !`which uglifyjs`.empty? && !ENV['SKIP_NODE_UGLIFY']
$node_uglify = true
end
puts "Bundling assets"
# in the past we applied a patch that removed asset postfixes, but it is terrible practice
# leaving very complicated build issues
# https://github.com/rails/sprockets-rails/issues/49
require 'sprockets'
require 'digest/sha1'
# Needed for proper source maps with a CDN
load "#{Rails.root}/lib/global_path.rb"
include GlobalPath
if $node_uglify
Rails.configuration.assets.js_compressor = nil
module ::Sprockets
# TODO: https://github.com/rails/sprockets-rails/pull/342
# Rails.configuration.assets.gzip = false
class Base
def skip_gzip?
true
end
end
end
end
end
task 'assets:precompile:css' => 'environment' do
if ENV["DONT_PRECOMPILE_CSS"] == "1"
STDERR.puts "Skipping CSS precompilation, ensure CSS lives in a shared directory across hosts"
else
STDERR.puts "Start compiling CSS: #{Time.zone.now}"
RailsMultisite::ConnectionManagement.each_connection do |db|
# Heroku precompiles assets before db migration, so tables may not exist.
# css will get precompiled during first request instead in that case.
if ActiveRecord::Base.connection.table_exists?(ColorScheme.table_name)
STDERR.puts "Compiling css for #{db}"
[:desktop, :mobile, :desktop_rtl, :mobile_rtl].each do |target|
STDERR.puts "target: #{target} #{DiscourseStylesheets.compile(target)}"
end
end
end
STDERR.puts "Done compiling CSS: #{Time.zone.now}"
end
end
def assets_path
"#{Rails.root}/public/assets"
end
def compress_node(from,to)
to_path = "#{assets_path}/#{to}"
assets = cdn_relative_path("/assets")
source_map_root = assets + ((d=File.dirname(from)) == "." ? "" : "/#{d}")
source_map_url = cdn_path "/assets/#{to}.map"
cmd = "uglifyjs '#{assets_path}/#{from}' -p relative -c -m -o '#{to_path}' --source-map-root '#{source_map_root}' --source-map '#{assets_path}/#{to}.map' --source-map-url '#{source_map_url}'"
STDERR.puts cmd
result = `#{cmd} 2>&1`
unless $?.success?
STDERR.puts result
exit 1
end
result
end
def compress_ruby(from,to)
data = File.read("#{assets_path}/#{from}")
uglified, map = Uglifier.new(comments: :none,
screw_ie8: true,
source_filename: File.basename(from),
output_filename: File.basename(to)
)
.compile_with_map(data)
dest = "#{assets_path}/#{to}"
File.write(dest, uglified << "\n//# sourceMappingURL=#{cdn_path "/assets/#{to}.map"}")
File.write(dest + ".map", map)
end
def gzip(path)
STDERR.puts "gzip #{path}"
STDERR.puts `gzip -f -c -9 #{path} > #{path}.gz`
end
def brotli(path)
if ENV['COMPRESS_BROTLI']
STDERR.puts "brotli #{path}"
STDERR.puts `brotli --quality 11 --input #{path} --output #{path}.bl`
end
end
def compress(from,to)
if @has_uglifyjs ||= !`which uglifyjs`.empty?
compress_node(from,to)
else
compress_ruby(from,to)
end
end
def concurrent?
if ENV["SPROCKETS_CONCURRENT"] == "1"
concurrent_compressors = []
yield(Proc.new { |&block| concurrent_compressors << Concurrent::Future.execute { block.call } })
concurrent_compressors.each(&:wait!)
else
yield(Proc.new { |&block| block.call })
end
end
task 'assets:precompile' => 'assets:precompile:before' do
# Run after assets:precompile
Rake::Task["assets:precompile:css"].invoke
if $node_uglify
puts "Compressing Javascript and Generating Source Maps"
manifest = Sprockets::Manifest.new(assets_path)
concurrent? do |proc|
to_skip = Rails.configuration.assets.skip_minification || []
manifest.files
.select{|k,v| k =~ /\.js$/}
.each do |file, info|
path = "#{assets_path}/#{file}"
_file = (d = File.dirname(file)) == "." ? "_#{file}" : "#{d}/_#{File.basename(file)}"
_path = "#{assets_path}/#{_file}"
if File.exists?(_path)
STDERR.puts "Skipping: #{file} already compressed"
else
STDERR.puts "Compressing: #{file}"
proc.call do
# We can specify some files to never minify
unless (ENV["DONT_MINIFY"] == "1") || to_skip.include?(info['logical_path'])
FileUtils.mv(path, _path)
compress(_file,file)
end
info["size"] = File.size(path)
info["mtime"] = File.mtime(path).iso8601
gzip(path)
brotli(path)
end
end
end
end
# protected
manifest.send :save
end
end
|
namespace :graphql do
task :generate_schema do
puts 'Generating GraphQL Schema...'
Schema.generate
end
task :remove_schema do
Schema.remove
end
end
Rake::Task['assets:precompile'].enhance ['graphql:generate_schema']
Rake::Task['assets:clobber'].enhance ['graphql:remove_schema']
comment out for now
# namespace :graphql do
# task :generate_schema do
# puts 'Generating GraphQL Schema...'
# Schema.generate
# end
#
# task :remove_schema do
# Schema.remove
# end
# end
#
# Rake::Task['assets:precompile'].enhance ['graphql:generate_schema']
# Rake::Task['assets:clobber'].enhance ['graphql:remove_schema']
|
# Copyright 2011-2015, The Trustees of Indiana University and Northwestern
# University. Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
#
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software distributed
# under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR
# CONDITIONS OF ANY KIND, either express or implied. See the License for the
# specific language governing permissions and limitations under the License.
# --- END LICENSE_HEADER BLOCK ---
namespace :avalon do
task clean: :environment do
require 'active_fedora/cleaner'
ActiveFedora::Cleaner.clean!
end
desc "Migrate Avalon 5.x to 6.x"
task migrate: :environment do
Rake::Task['avalon:migrate:repo'].invoke
Rake::Task['avalon:migrate:db'].invoke
end
namespace :migrate do
desc "Migrate all my objects"
task repo: :environment do
unless ENV['CONFIRM'] == 'yes'
$stderr.puts <<-EOC
WARNING: This migration task currently has known issues.
For example, some metadata is not migrated or is migrated incorrectly.
This migration task is part of a larger migration process. More info can be found at:
https://wiki.dlib.indiana.edu/display/VarVideo/Avalon+5+to+6+Database+Migration
Please run `rake avalon:migrate:repo CONFIRM=yes` to confirm.
EOC
exit 1
end
#disable callbacks
Admin::Collection.skip_callback(:save, :around, :reindex_members)
::MediaObject.skip_callback(:save, :before, :update_dependent_properties!)
models = [Admin::Collection, ::MediaObject, ::MasterFile, ::Derivative, ::Lease]
migrator = FedoraMigrate::ClassOrderedRepositoryMigrator.new('avalon', { class_order: models })
migrator.migrate_objects
migrator
end
desc "Migrate my database"
task db: :environment do
Bookmark.all.each do |b|
status_record = MigrationStatus.find_or_create_by(source_class: Bookmark.name, f3_pid: "Bookmark:#{b.id}")
next if status_record.status == "completed"
status_record.update_attributes status: "migrate", log: nil
begin
obj = MediaObject.where("identifier_ssim:\"#{b.document_id}\"").first
obj ||= MediaObject.where(id: b.document_id).first
raise FedoraMigrate::Errors::MigrationError, "Media Object with Avalon 5 ID #{b.document_id} could not be found" unless obj
b.document_id = obj.id
b.save!
status_record.update_attribute :status, "completed"
rescue StandardError => e
status_record.update_attributes status: "failed", log: %{#{e.class.name}: "#{e.message}"}
end
end
AvalonClip.all.each do |anno|
status_record = MigrationStatus.find_or_create_by(source_class: AvalonClip.name, f3_pid: "AvalonClip:#{anno.id}")
next if status_record.status == "completed"
status_record.update_attributes status: "migrate", log: nil
begin
old_id = anno.source.split('/').last
mf = MasterFile.where("identifier_ssim:\"#{old_id}\"").first
mf ||= MasterFile.where(id: old_id).first
raise FedoraMigrate::Errors::MigrationError, "Master File with Avalon 5 ID #{old_id} could not be found" unless mf
anno.master_file = mf
anno.save!
status_record.update_attribute :status, "completed"
rescue StandardError => e
status_record.update_attributes status: "failed", log: %{#{e.class.name}: "#{e.message}"}
end
end
AvalonMarker.all.each do |anno|
status_record = MigrationStatus.find_or_create_by(source_class: AvalonMarker.name, f3_pid: "AvalonMarker:#{anno.id}")
next if status_record.status == "completed"
status_record.update_attributes status: "migrate", log: nil
begin
old_id = anno.source.split('/').last
mf = MasterFile.where("identifier_ssim:\"#{old_id}\"").first
mf ||= MasterFile.where(id: old_id).first
raise FedoraMigrate::Errors::MigrationError, "Master File with Avalon 5 ID #{old_id} could not be found" unless mf
anno.master_file = mf
anno.save!
status_record.update_attribute :status, "completed"
rescue StandardError => e
status_record.update_attributes status: "failed", log: %{#{e.class.name}: "#{e.message}"}
end
end
end
desc "Cleanup failed bookmarks"
task bookmark_cleanup: :environment do
deleted_count = 0
passed_count = 0
failed_count = 0
Bookmark.all.each do |b|
if MediaObject.where(id: b.document_id).count > 0
passed_count = passed_count + 1
else
begin
b.destroy
deleted_count = deleted_count + 1
rescue Exception => e
puts "Failed to delete #{b.id}"
failed_count = failed_count + 1
puts e.message
end
end
end
puts "Deleted: #{deleted_count} Passed: #{passed_count} Failed: #{failed_count}"
end
end
desc 'migrate databases for the rails app and the active annotations gem'
task :db_migrate do
`rake db:migrate`
`rails generate active_annotations:install`
end
namespace :services do
services = ["jetty", "felix", "delayed_job"]
desc "Start Avalon's dependent services"
task :start do
services.map { |service| Rake::Task["#{service}:start"].invoke }
end
desc "Stop Avalon's dependent services"
task :stop do
services.map { |service| Rake::Task["#{service}:stop"].invoke }
end
desc "Status of Avalon's dependent services"
task :status do
services.map { |service| Rake::Task["#{service}:status"].invoke }
end
desc "Restart Avalon's dependent services"
task :restart do
services.map { |service| Rake::Task["#{service}:restart"].invoke }
end
end
namespace :assets do
desc "Clears javascripts/cache and stylesheets/cache"
task :clear => :environment do
FileUtils.rm(Dir['public/javascripts/cache/[^.]*'])
FileUtils.rm(Dir['public/stylesheets/cache/[^.]*'])
end
end
namespace :derivative do
desc "Sets streaming urls for derivatives based on configured content_path in avalon.yml"
task :set_streams => :environment do
Derivative.find_each({},{batch_size:5}) do |derivative|
derivative.set_streaming_locations!
derivative.save!
end
end
end
namespace :batch do
desc "Starts Avalon batch ingest"
task :ingest => :environment do
# Starts the ingest process
require 'avalon/batch/ingest'
WithLocking.run(name: 'batch_ingest') do
logger.info "<< Scanning for new batch packages in existing collections >>"
Admin::Collection.all.each do |collection|
Avalon::Batch::Ingest.new(collection).ingest
end
end
end
end
namespace :user do
desc "Create user (assumes identity authentication)"
task :create => :environment do
if ENV['avalon_username'].nil? or ENV['avalon_password'].nil?
abort "You must specify a username and password. Example: rake avalon:user:create avalon_username=user@example.edu avalon_password=password avalon_groups=group1,group2"
end
require 'avalon/role_controls'
username = ENV['avalon_username'].dup
password = ENV['avalon_password']
groups = ENV['avalon_groups'].split(",")
Identity.create!(email: username, password: password, password_confirmation: password)
User.create!(username: username, email: username)
groups.each do |group|
Avalon::RoleControls.add_role(group) unless Avalon::RoleControls.role_exists? group
Avalon::RoleControls.add_user_role(username, group)
end
puts "User #{username} created and added to groups #{groups}"
end
desc "Delete user"
task :delete => :environment do
if ENV['avalon_username'].nil?
abort "You must specify a username Example: rake avalon:user:delete avalon_username=user@example.edu"
end
require 'avalon/role_controls'
username = ENV['avalon_username'].dup
groups = Avalon::RoleControls.user_roles username
Identity.where(email: username).destroy_all
User.where(username: username).destroy_all
groups.each do |group|
Avalon::RoleControls.remove_user_role(username, group)
end
puts "Deleted user #{username} and removed them from groups #{groups}"
end
desc "Change password (assumes identity authentication)"
task :passwd => :environment do
if ENV['avalon_username'].nil? or ENV['avalon_password'].nil?
abort "You must specify a username and password. Example: rake avalon:user:passwd avalon_username=user@example.edu avalon_password=password"
end
username = ENV['avalon_username'].dup
password = ENV['avalon_password']
Identity.where(email: username).each {|identity| identity.password = password; identity.save}
puts "Updated password for user #{username}"
end
end
namespace :test do
desc "Create a test media object"
task :media_object => :environment do
require 'factory_girl'
require 'faker'
Dir[Rails.root.join("spec/factories/**/*.rb")].each {|f| require f}
mf_count = [ENV['master_files'].to_i,1].max
mo = FactoryGirl.create(:media_object)
mf_count.times do |i|
FactoryGirl.create(:master_file_with_derivative, mediaobject: mo)
end
puts mo.id
end
end
desc 'Reindex all Avalon objects'
# @example RAILS_ENV=production bundle exec rake avalon:reindex would do a single threaded production environment reindex
# @example RAILS_ENV=production bundle exec rake avalon:reindex[2] would do a dual threaded production environment reindex
task :reindex, [:threads] => :environment do |t, args|
descendants = ActiveFedora::Base.descendant_uris(ActiveFedora.fedora.base_uri)
descendants.shift # remove the root
Parallel.map(descendants, in_threads: args[:threads].to_i || 1) do |uri|
begin
ActiveFedora::Base.find(ActiveFedora::Base.uri_to_id(uri)).update_index
puts "#{uri} reindexed"
rescue
puts "Error reindexing #{uri}"
end
end
end
desc "Identify invalid Avalon Media Objects"
task :validate => :environment do
MediaObject.find_each({},{batch_size:5}) {|mo| puts "#{mo.id}: #{mo.errors.full_messages}" if !mo.valid? }
end
namespace :variations do
desc "Import playlists/bookmarks from Variation export"
task :import => :environment do
if ENV['filename'].nil?
abort "You must specify a file. Example: rake avalon:variations:import filename=export.json"
end
puts "Importing JSON file: #{ENV['filename']}"
unless File.file?(ENV['filename'])
abort "Could not find specified file"
end
require 'json'
require 'htmlentities'
f = File.open(ENV['filename'])
s = f.read()
import_json = JSON.parse(s)
f.close()
user_count = 0
new_user_count = 0
user_errors = []
new_playlist_count = 0
playlist_errors = []
item_count = 0
new_item_count = 0
item_errors = []
bookmark_count = 0
new_bookmark_count = 0
bookmark_errors = []
# Setup temporary tables to hold existing playlist data. Allows for re-importing of bookmark data without creating duplicates.
conn = ActiveRecord::Base.connection
conn.execute("DROP TABLE IF EXISTS temp_playlist")
conn.execute("DROP TABLE IF EXISTS temp_playlist_item")
conn.execute("DROP TABLE IF EXISTS temp_marker")
conn.execute("CREATE TABLE temp_playlist (id int primary key, title text, user_id int)")
conn.execute("CREATE TABLE temp_playlist_item (id int primary key, playlist_id int, user_id int, clip_id int, master_file text, position int, title text, start_time int, end_time int)")
conn.execute("CREATE TABLE temp_marker (id int primary key, playlist_item_id int, master_file text, title text, start_time int)")
# Save existing playlist/item/marker data for users being imported
puts "Compiling existing avalon marker data"
usernames = import_json.collect{|user|user['username']}
userids = User.where(username: usernames).collect(&:id)
userids.each do |user_id|
print "."
playlist = Playlist.where(user_id: user_id, title:'Variations Bookmarks').first
next if playlist.nil?
sql = ActiveRecord::Base.send(:sanitize_sql_array, ["INSERT INTO temp_playlist VALUES (?, ?, ?)", playlist.id, playlist.title, playlist.user_id])
conn.execute(sql)
playlist.items.each do |item|
begin
sql = ActiveRecord::Base.send(:sanitize_sql_array, ["INSERT INTO temp_playlist_item VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?)", item.id, item.playlist_id, playlist.user_id, item.clip_id, item.master_file.pid, item.position, item.title, item.start_time, item.end_time])
conn.execute(sql)
item.marker.each do |marker|
sql = ActiveRecord::Base.send(:sanitize_sql_array, ["INSERT INTO temp_marker VALUES (?,?,?,?,?)", marker.id, item.id, marker.master_file.pid, marker.title, marker.start_time])
conn.execute(sql)
end
rescue Exception => e
puts " Bad existing playlist item"
end
end
end
# Import each user's playlist
import_json.each do |user|
user_count += 1
user_obj = User.find_by_username(user['username'])
unless user_obj.present?
user_obj = User.create(username: user['username'], email: "#{user['username']}@indiana.edu")
unless user_obj.persisted?
user_errors += [{username: user['username'], errors: user_obj.errors.full_messages}]
end
new_user_count += 1
end
playlist_name = user['playlist_name']
puts "Importing user #{user['username']}"
puts " playlist name: #{playlist_name}"
playlist_obj = Playlist.where(user_id: user_obj, title: playlist_name).first
unless playlist_obj.present?
playlist_obj = Playlist.create(user: user_obj, title: playlist_name, visibility: 'private')
unless playlist_obj.persisted?
playlist_errors += [{username: user['username'], title: playlist_name, errors: playlist_obj.errors.full_messages}]
end
new_playlist_count += 1
end
user['playlist_item'].each do |playlist_item|
container = playlist_item['container_string']
comment = HTMLEntities.new.decode(playlist_item['comment'])
title = HTMLEntities.new.decode(playlist_item['name'])
mf_obj = MasterFile.where("identifier_ssim:#{container}").first
unless mf_obj.present?
item_errors += [{username: user['username'], playlist_id: playlist_obj.id, container: container, title: title, errors: ['Masterfile not found']}]
next
end
item_count += 1
puts " Importing playlist item #{title}"
sql = ActiveRecord::Base.send(:sanitize_sql_array, ["SELECT id FROM temp_playlist_item WHERE playlist_id=? and master_file=? and title=?", playlist_obj.id, mf_obj.id, title])
playlist_item_id = conn.exec_query(sql)
pi_obj = !playlist_item_id.empty? ? PlaylistItem.find(playlist_item_id.first['id']) : []
unless pi_obj.present?
clip_obj = AvalonClip.create(title: title, master_file: mf_obj, start_time: 0, comment: comment)
pi_obj = PlaylistItem.create(clip: clip_obj, playlist: playlist_obj)
unless pi_obj.persisted?
item_errors += [{username: user['username'], playlist_id: playlist_obj.id, container: container, title: title, errors: pi_obj.errors.full_messages}]
next
end
new_item_count += 1
end
playlist_item['bookmark'].each do |bookmark|
bookmark_count += 1
bookmark_name = HTMLEntities.new.decode(bookmark['name'])
sql = ActiveRecord::Base.send(:sanitize_sql_array, ["SELECT id FROM temp_marker WHERE playlist_item_id=? and title=? and start_time=?", pi_obj.id, bookmark_name, bookmark['start_time']])
bookmark_id = conn.exec_query(sql)
bookmark_obj = !bookmark_id.empty? ? AvalonMarker.find(bookmark_id.first['id']) : []
unless bookmark_obj.present?
marker_obj = AvalonMarker.create(playlist_item: pi_obj, title: bookmark_name, master_file: mf_obj, start_time: bookmark['start_time'])
unless marker_obj.persisted?
bookmark_errors += [{username: user['username'], playlist_id: playlist_obj.id, playlist_item_id: pi_obj.id, container: container, playlist_item_title: title, bookmark_title: bookmark_name, bookmark_start_time: bookmark['start_time'], errors: marker_obj.errors.full_messages}]
next
end
new_bookmark_count += 1
end
puts " Importing bookmark #{bookmark_name} (#{bookmark['start_time']})"
end
end
end
conn.execute("DROP TABLE IF EXISTS temp_playlist")
conn.execute("DROP TABLE IF EXISTS temp_playlist_item")
conn.execute("DROP TABLE IF EXISTS temp_marker")
puts "------------------------------------------------------------------------------------"
puts "Errors"
puts " user_errors = #{user_errors}" if user_errors.present?
puts " playlist_errors = #{playlist_errors}" if playlist_errors.present?
puts " item_errors = #{item_errors}" if item_errors.present?
puts " bookmark_errors = #{bookmark_errors}" if bookmark_errors.present?
puts "------------------------------------------------------------------------------------"
puts "Imported #{user_count} users with #{bookmark_count} bookmarks in #{item_count} valid playlist items"
puts " Created #{new_user_count} new users (#{user_errors.length} errors)"
puts " Created #{new_playlist_count} new playlists (#{playlist_errors.length} errors)"
puts " Created #{new_item_count} new playlist items (#{item_errors.length} errors)"
puts " Created #{new_bookmark_count} new bookmarks (#{bookmark_errors.length} errors)"
end
end
end
Add avalon:index_for_speed rake task
# Copyright 2011-2015, The Trustees of Indiana University and Northwestern
# University. Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
#
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software distributed
# under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR
# CONDITIONS OF ANY KIND, either express or implied. See the License for the
# specific language governing permissions and limitations under the License.
# --- END LICENSE_HEADER BLOCK ---
namespace :avalon do
task clean: :environment do
require 'active_fedora/cleaner'
ActiveFedora::Cleaner.clean!
end
desc "Migrate Avalon 5.x to 6.x"
task migrate: :environment do
Rake::Task['avalon:migrate:repo'].invoke
Rake::Task['avalon:migrate:db'].invoke
end
namespace :migrate do
desc "Migrate all my objects"
task repo: :environment do
unless ENV['CONFIRM'] == 'yes'
$stderr.puts <<-EOC
WARNING: This migration task currently has known issues.
For example, some metadata is not migrated or is migrated incorrectly.
This migration task is part of a larger migration process. More info can be found at:
https://wiki.dlib.indiana.edu/display/VarVideo/Avalon+5+to+6+Database+Migration
Please run `rake avalon:migrate:repo CONFIRM=yes` to confirm.
EOC
exit 1
end
#disable callbacks
Admin::Collection.skip_callback(:save, :around, :reindex_members)
::MediaObject.skip_callback(:save, :before, :update_dependent_properties!)
models = [Admin::Collection, ::MediaObject, ::MasterFile, ::Derivative, ::Lease]
migrator = FedoraMigrate::ClassOrderedRepositoryMigrator.new('avalon', { class_order: models })
migrator.migrate_objects
migrator
end
desc "Migrate my database"
task db: :environment do
Bookmark.all.each do |b|
status_record = MigrationStatus.find_or_create_by(source_class: Bookmark.name, f3_pid: "Bookmark:#{b.id}")
next if status_record.status == "completed"
status_record.update_attributes status: "migrate", log: nil
begin
obj = MediaObject.where("identifier_ssim:\"#{b.document_id}\"").first
obj ||= MediaObject.where(id: b.document_id).first
raise FedoraMigrate::Errors::MigrationError, "Media Object with Avalon 5 ID #{b.document_id} could not be found" unless obj
b.document_id = obj.id
b.save!
status_record.update_attribute :status, "completed"
rescue StandardError => e
status_record.update_attributes status: "failed", log: %{#{e.class.name}: "#{e.message}"}
end
end
AvalonClip.all.each do |anno|
status_record = MigrationStatus.find_or_create_by(source_class: AvalonClip.name, f3_pid: "AvalonClip:#{anno.id}")
next if status_record.status == "completed"
status_record.update_attributes status: "migrate", log: nil
begin
old_id = anno.source.split('/').last
mf = MasterFile.where("identifier_ssim:\"#{old_id}\"").first
mf ||= MasterFile.where(id: old_id).first
raise FedoraMigrate::Errors::MigrationError, "Master File with Avalon 5 ID #{old_id} could not be found" unless mf
anno.master_file = mf
anno.save!
status_record.update_attribute :status, "completed"
rescue StandardError => e
status_record.update_attributes status: "failed", log: %{#{e.class.name}: "#{e.message}"}
end
end
AvalonMarker.all.each do |anno|
status_record = MigrationStatus.find_or_create_by(source_class: AvalonMarker.name, f3_pid: "AvalonMarker:#{anno.id}")
next if status_record.status == "completed"
status_record.update_attributes status: "migrate", log: nil
begin
old_id = anno.source.split('/').last
mf = MasterFile.where("identifier_ssim:\"#{old_id}\"").first
mf ||= MasterFile.where(id: old_id).first
raise FedoraMigrate::Errors::MigrationError, "Master File with Avalon 5 ID #{old_id} could not be found" unless mf
anno.master_file = mf
anno.save!
status_record.update_attribute :status, "completed"
rescue StandardError => e
status_record.update_attributes status: "failed", log: %{#{e.class.name}: "#{e.message}"}
end
end
end
desc "Cleanup failed bookmarks"
task bookmark_cleanup: :environment do
deleted_count = 0
passed_count = 0
failed_count = 0
Bookmark.all.each do |b|
if MediaObject.where(id: b.document_id).count > 0
passed_count = passed_count + 1
else
begin
b.destroy
deleted_count = deleted_count + 1
rescue Exception => e
puts "Failed to delete #{b.id}"
failed_count = failed_count + 1
puts e.message
end
end
end
puts "Deleted: #{deleted_count} Passed: #{passed_count} Failed: #{failed_count}"
end
end
desc 'migrate databases for the rails app and the active annotations gem'
task :db_migrate do
`rake db:migrate`
`rails generate active_annotations:install`
end
desc "Index MasterFiles and subresources to take advantage of SpeedyAF"
task :index_for_speed do
MasterFile.find_each do |mf|
$stderr.print "m["
mf.update_index;
mf.declared_attached_files.each_pair do |name, file|
$stderr.print name.to_s[0]
file.update_external_index if file.respond_to?(:update_external_index)
end
$stderr.print "]"
end
$stderr.puts
end
namespace :services do
services = ["jetty", "felix", "delayed_job"]
desc "Start Avalon's dependent services"
task :start do
services.map { |service| Rake::Task["#{service}:start"].invoke }
end
desc "Stop Avalon's dependent services"
task :stop do
services.map { |service| Rake::Task["#{service}:stop"].invoke }
end
desc "Status of Avalon's dependent services"
task :status do
services.map { |service| Rake::Task["#{service}:status"].invoke }
end
desc "Restart Avalon's dependent services"
task :restart do
services.map { |service| Rake::Task["#{service}:restart"].invoke }
end
end
namespace :assets do
desc "Clears javascripts/cache and stylesheets/cache"
task :clear => :environment do
FileUtils.rm(Dir['public/javascripts/cache/[^.]*'])
FileUtils.rm(Dir['public/stylesheets/cache/[^.]*'])
end
end
namespace :derivative do
desc "Sets streaming urls for derivatives based on configured content_path in avalon.yml"
task :set_streams => :environment do
Derivative.find_each({},{batch_size:5}) do |derivative|
derivative.set_streaming_locations!
derivative.save!
end
end
end
namespace :batch do
desc "Starts Avalon batch ingest"
task :ingest => :environment do
# Starts the ingest process
require 'avalon/batch/ingest'
WithLocking.run(name: 'batch_ingest') do
logger.info "<< Scanning for new batch packages in existing collections >>"
Admin::Collection.all.each do |collection|
Avalon::Batch::Ingest.new(collection).ingest
end
end
end
end
namespace :user do
desc "Create user (assumes identity authentication)"
task :create => :environment do
if ENV['avalon_username'].nil? or ENV['avalon_password'].nil?
abort "You must specify a username and password. Example: rake avalon:user:create avalon_username=user@example.edu avalon_password=password avalon_groups=group1,group2"
end
require 'avalon/role_controls'
username = ENV['avalon_username'].dup
password = ENV['avalon_password']
groups = ENV['avalon_groups'].split(",")
Identity.create!(email: username, password: password, password_confirmation: password)
User.create!(username: username, email: username)
groups.each do |group|
Avalon::RoleControls.add_role(group) unless Avalon::RoleControls.role_exists? group
Avalon::RoleControls.add_user_role(username, group)
end
puts "User #{username} created and added to groups #{groups}"
end
desc "Delete user"
task :delete => :environment do
if ENV['avalon_username'].nil?
abort "You must specify a username Example: rake avalon:user:delete avalon_username=user@example.edu"
end
require 'avalon/role_controls'
username = ENV['avalon_username'].dup
groups = Avalon::RoleControls.user_roles username
Identity.where(email: username).destroy_all
User.where(username: username).destroy_all
groups.each do |group|
Avalon::RoleControls.remove_user_role(username, group)
end
puts "Deleted user #{username} and removed them from groups #{groups}"
end
desc "Change password (assumes identity authentication)"
task :passwd => :environment do
if ENV['avalon_username'].nil? or ENV['avalon_password'].nil?
abort "You must specify a username and password. Example: rake avalon:user:passwd avalon_username=user@example.edu avalon_password=password"
end
username = ENV['avalon_username'].dup
password = ENV['avalon_password']
Identity.where(email: username).each {|identity| identity.password = password; identity.save}
puts "Updated password for user #{username}"
end
end
namespace :test do
desc "Create a test media object"
task :media_object => :environment do
require 'factory_girl'
require 'faker'
Dir[Rails.root.join("spec/factories/**/*.rb")].each {|f| require f}
mf_count = [ENV['master_files'].to_i,1].max
mo = FactoryGirl.create(:media_object)
mf_count.times do |i|
FactoryGirl.create(:master_file_with_derivative, mediaobject: mo)
end
puts mo.id
end
end
desc 'Reindex all Avalon objects'
# @example RAILS_ENV=production bundle exec rake avalon:reindex would do a single threaded production environment reindex
# @example RAILS_ENV=production bundle exec rake avalon:reindex[2] would do a dual threaded production environment reindex
task :reindex, [:threads] => :environment do |t, args|
descendants = ActiveFedora::Base.descendant_uris(ActiveFedora.fedora.base_uri)
descendants.shift # remove the root
Parallel.map(descendants, in_threads: args[:threads].to_i || 1) do |uri|
begin
ActiveFedora::Base.find(ActiveFedora::Base.uri_to_id(uri)).update_index
puts "#{uri} reindexed"
rescue
puts "Error reindexing #{uri}"
end
end
end
desc "Identify invalid Avalon Media Objects"
task :validate => :environment do
MediaObject.find_each({},{batch_size:5}) {|mo| puts "#{mo.id}: #{mo.errors.full_messages}" if !mo.valid? }
end
namespace :variations do
desc "Import playlists/bookmarks from Variation export"
task :import => :environment do
if ENV['filename'].nil?
abort "You must specify a file. Example: rake avalon:variations:import filename=export.json"
end
puts "Importing JSON file: #{ENV['filename']}"
unless File.file?(ENV['filename'])
abort "Could not find specified file"
end
require 'json'
require 'htmlentities'
f = File.open(ENV['filename'])
s = f.read()
import_json = JSON.parse(s)
f.close()
user_count = 0
new_user_count = 0
user_errors = []
new_playlist_count = 0
playlist_errors = []
item_count = 0
new_item_count = 0
item_errors = []
bookmark_count = 0
new_bookmark_count = 0
bookmark_errors = []
# Setup temporary tables to hold existing playlist data. Allows for re-importing of bookmark data without creating duplicates.
conn = ActiveRecord::Base.connection
conn.execute("DROP TABLE IF EXISTS temp_playlist")
conn.execute("DROP TABLE IF EXISTS temp_playlist_item")
conn.execute("DROP TABLE IF EXISTS temp_marker")
conn.execute("CREATE TABLE temp_playlist (id int primary key, title text, user_id int)")
conn.execute("CREATE TABLE temp_playlist_item (id int primary key, playlist_id int, user_id int, clip_id int, master_file text, position int, title text, start_time int, end_time int)")
conn.execute("CREATE TABLE temp_marker (id int primary key, playlist_item_id int, master_file text, title text, start_time int)")
# Save existing playlist/item/marker data for users being imported
puts "Compiling existing avalon marker data"
usernames = import_json.collect{|user|user['username']}
userids = User.where(username: usernames).collect(&:id)
userids.each do |user_id|
print "."
playlist = Playlist.where(user_id: user_id, title:'Variations Bookmarks').first
next if playlist.nil?
sql = ActiveRecord::Base.send(:sanitize_sql_array, ["INSERT INTO temp_playlist VALUES (?, ?, ?)", playlist.id, playlist.title, playlist.user_id])
conn.execute(sql)
playlist.items.each do |item|
begin
sql = ActiveRecord::Base.send(:sanitize_sql_array, ["INSERT INTO temp_playlist_item VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?)", item.id, item.playlist_id, playlist.user_id, item.clip_id, item.master_file.pid, item.position, item.title, item.start_time, item.end_time])
conn.execute(sql)
item.marker.each do |marker|
sql = ActiveRecord::Base.send(:sanitize_sql_array, ["INSERT INTO temp_marker VALUES (?,?,?,?,?)", marker.id, item.id, marker.master_file.pid, marker.title, marker.start_time])
conn.execute(sql)
end
rescue Exception => e
puts " Bad existing playlist item"
end
end
end
# Import each user's playlist
import_json.each do |user|
user_count += 1
user_obj = User.find_by_username(user['username'])
unless user_obj.present?
user_obj = User.create(username: user['username'], email: "#{user['username']}@indiana.edu")
unless user_obj.persisted?
user_errors += [{username: user['username'], errors: user_obj.errors.full_messages}]
end
new_user_count += 1
end
playlist_name = user['playlist_name']
puts "Importing user #{user['username']}"
puts " playlist name: #{playlist_name}"
playlist_obj = Playlist.where(user_id: user_obj, title: playlist_name).first
unless playlist_obj.present?
playlist_obj = Playlist.create(user: user_obj, title: playlist_name, visibility: 'private')
unless playlist_obj.persisted?
playlist_errors += [{username: user['username'], title: playlist_name, errors: playlist_obj.errors.full_messages}]
end
new_playlist_count += 1
end
user['playlist_item'].each do |playlist_item|
container = playlist_item['container_string']
comment = HTMLEntities.new.decode(playlist_item['comment'])
title = HTMLEntities.new.decode(playlist_item['name'])
mf_obj = MasterFile.where("identifier_ssim:#{container}").first
unless mf_obj.present?
item_errors += [{username: user['username'], playlist_id: playlist_obj.id, container: container, title: title, errors: ['Masterfile not found']}]
next
end
item_count += 1
puts " Importing playlist item #{title}"
sql = ActiveRecord::Base.send(:sanitize_sql_array, ["SELECT id FROM temp_playlist_item WHERE playlist_id=? and master_file=? and title=?", playlist_obj.id, mf_obj.id, title])
playlist_item_id = conn.exec_query(sql)
pi_obj = !playlist_item_id.empty? ? PlaylistItem.find(playlist_item_id.first['id']) : []
unless pi_obj.present?
clip_obj = AvalonClip.create(title: title, master_file: mf_obj, start_time: 0, comment: comment)
pi_obj = PlaylistItem.create(clip: clip_obj, playlist: playlist_obj)
unless pi_obj.persisted?
item_errors += [{username: user['username'], playlist_id: playlist_obj.id, container: container, title: title, errors: pi_obj.errors.full_messages}]
next
end
new_item_count += 1
end
playlist_item['bookmark'].each do |bookmark|
bookmark_count += 1
bookmark_name = HTMLEntities.new.decode(bookmark['name'])
sql = ActiveRecord::Base.send(:sanitize_sql_array, ["SELECT id FROM temp_marker WHERE playlist_item_id=? and title=? and start_time=?", pi_obj.id, bookmark_name, bookmark['start_time']])
bookmark_id = conn.exec_query(sql)
bookmark_obj = !bookmark_id.empty? ? AvalonMarker.find(bookmark_id.first['id']) : []
unless bookmark_obj.present?
marker_obj = AvalonMarker.create(playlist_item: pi_obj, title: bookmark_name, master_file: mf_obj, start_time: bookmark['start_time'])
unless marker_obj.persisted?
bookmark_errors += [{username: user['username'], playlist_id: playlist_obj.id, playlist_item_id: pi_obj.id, container: container, playlist_item_title: title, bookmark_title: bookmark_name, bookmark_start_time: bookmark['start_time'], errors: marker_obj.errors.full_messages}]
next
end
new_bookmark_count += 1
end
puts " Importing bookmark #{bookmark_name} (#{bookmark['start_time']})"
end
end
end
conn.execute("DROP TABLE IF EXISTS temp_playlist")
conn.execute("DROP TABLE IF EXISTS temp_playlist_item")
conn.execute("DROP TABLE IF EXISTS temp_marker")
puts "------------------------------------------------------------------------------------"
puts "Errors"
puts " user_errors = #{user_errors}" if user_errors.present?
puts " playlist_errors = #{playlist_errors}" if playlist_errors.present?
puts " item_errors = #{item_errors}" if item_errors.present?
puts " bookmark_errors = #{bookmark_errors}" if bookmark_errors.present?
puts "------------------------------------------------------------------------------------"
puts "Imported #{user_count} users with #{bookmark_count} bookmarks in #{item_count} valid playlist items"
puts " Created #{new_user_count} new users (#{user_errors.length} errors)"
puts " Created #{new_playlist_count} new playlists (#{playlist_errors.length} errors)"
puts " Created #{new_item_count} new playlist items (#{item_errors.length} errors)"
puts " Created #{new_bookmark_count} new bookmarks (#{bookmark_errors.length} errors)"
end
end
end
|
namespace :coyote do
desc "Checks for new MCA images"
task :update_mca => :environment do
require 'multi_json'
require 'open-uri'
@website = Website.first
limit = 1000
offset = 0
if Image.all.count > 10 #kludge
updated_at = (Time.zone.now - 1.minute).iso8601
else
updated_at = nil
end
length = 1
root = "https://cms.mcachicago.org"
updated = 0
created = 0
errors = 0
while length != 0 do
#some images have a null updated at
if updated_at
url = root + "/api/v1/attachment_images?updated_at=#{updated_at}&offset=#{offset}&limit=#{limit}"
else
url = root + "/api/v1/attachment_images?offset=#{offset}&limit=#{limit}"
end
Rails.logger.info "grabbing images for #{url}"
begin
content = open(url, { "Content-Type" => "application/json", ssl_verify_mode: OpenSSL::SSL::VERIFY_NONE}).read
rescue OpenURI::HTTPError => error
response = error.io
Rails.logger.error response.string
length = 0
end
begin
images = JSON.parse(content)
rescue Exception => e
Rails.logger.error "JSON parsing exception"
length = 0
end
length = images.length
Rails.logger.info "length is #{length}"
Rails.logger.info "Created: #{created}"
Rails.logger.info "Updated: #{updated}"
Rails.logger.info "Errors: #{errors}"
Rails.logger.info "Total: #{errors + updated + created}"
Rails.logger.info "Our total: #{@website.images.count}"
images.each do |i|
begin
image = Image.find_or_create_by(canonical_id: i["id"], website: @website)
if image.new_record?
image.website = @website
group = Group.find_or_create_by(title: i["group"])
group.save if group.new_record?
image.group = group
image.path = i["thumb_url"]
image.created_at = i["created_at"]
image.updated_at = i["updated_at"]
image.save
#create initial description field
Rails.logger.info "created image #{image.id} from canonical id #{image.canonical_id}"
created += 1
else
#update
image.path = i["thumb_url"]
image.updated_at = i["updated_at"]
if image.save
Rails.logger.info "updated image #{image.id} from canonical id #{image.canonical_id}"
updated += 1
else
Rails.logger.error "save failed"
errors += 1
end
end
#create description if none are handy
if image.descriptions.length == 0 and !i["title"].blank?
d = Description.new(text: i["title"], locale: "en", metum_id: 2, image_id: image.id, status_id: 1, user_id: 1)
if d.save
Rails.logger.info "description #{d.id} for image #{image.id} saved"
else
Rails.logger.error "description save failed"
end
end
rescue Exception => e
Rails.logger.error "image creation error"
Rails.logger.error i
Rails.logger.error e
errors += 1
end
end
offset += limit
end
Rails.logger.info "--- Totals for #{Time.now} ---"
Rails.logger.info "Created: #{created}"
Rails.logger.info "Updated: #{updated}"
Rails.logger.info "Errors: #{errors}"
Rails.logger.info "Total: #{errors + updated + created}"
Rails.logger.info "Our total: #{@website.images.count}"
Rails.logger.info "---"
end
end
don't create descriptions on update_mca from titles
namespace :coyote do
desc "Checks for new MCA images"
task :update_mca => :environment do
require 'multi_json'
require 'open-uri'
@website = Website.first
limit = 1000
offset = 0
if Image.all.count > 10 #kludge
updated_at = (Time.zone.now - 1.minute).iso8601
else
updated_at = nil
end
length = 1
root = "https://cms.mcachicago.org"
updated = 0
created = 0
errors = 0
while length != 0 do
#some images have a null updated at
if updated_at
url = root + "/api/v1/attachment_images?updated_at=#{updated_at}&offset=#{offset}&limit=#{limit}"
else
url = root + "/api/v1/attachment_images?offset=#{offset}&limit=#{limit}"
end
Rails.logger.info "grabbing images for #{url}"
begin
content = open(url, { "Content-Type" => "application/json", ssl_verify_mode: OpenSSL::SSL::VERIFY_NONE}).read
rescue OpenURI::HTTPError => error
response = error.io
Rails.logger.error response.string
length = 0
end
begin
images = JSON.parse(content)
rescue Exception => e
Rails.logger.error "JSON parsing exception"
length = 0
end
length = images.length
Rails.logger.info "length is #{length}"
Rails.logger.info "Created: #{created}"
Rails.logger.info "Updated: #{updated}"
Rails.logger.info "Errors: #{errors}"
Rails.logger.info "Total: #{errors + updated + created}"
Rails.logger.info "Our total: #{@website.images.count}"
images.each do |i|
begin
image = Image.find_or_create_by(canonical_id: i["id"], website: @website)
if image.new_record?
image.website = @website
group = Group.find_or_create_by(title: i["group"])
group.save if group.new_record?
image.group = group
image.path = i["thumb_url"]
image.created_at = i["created_at"]
image.updated_at = i["updated_at"]
image.save
#create initial description field
Rails.logger.info "created image #{image.id} from canonical id #{image.canonical_id}"
created += 1
else
#update
image.path = i["thumb_url"]
image.updated_at = i["updated_at"]
if image.save
Rails.logger.info "updated image #{image.id} from canonical id #{image.canonical_id}"
updated += 1
else
Rails.logger.error "save failed"
errors += 1
end
end
#create description if none are handy
#if image.descriptions.length == 0 and !i["title"].blank?
#d = Description.new(text: i["title"], locale: "en", metum_id: 2, image_id: image.id, status_id: 1, user_id: 1)
#if d.save
#Rails.logger.info "description #{d.id} for image #{image.id} saved"
#else
#Rails.logger.error "description save failed"
#end
#end
rescue Exception => e
Rails.logger.error "image creation error"
Rails.logger.error i
Rails.logger.error e
errors += 1
end
end
offset += limit
end
Rails.logger.info "--- Totals for #{Time.now} ---"
Rails.logger.info "Created: #{created}"
Rails.logger.info "Updated: #{updated}"
Rails.logger.info "Errors: #{errors}"
Rails.logger.info "Total: #{errors + updated + created}"
Rails.logger.info "Our total: #{@website.images.count}"
Rails.logger.info "---"
end
end
|
begin
namespace :db do
namespace :import do
task :emails, [:file] => :environment do |t, args|
# pass variables like so bundle exec rake db:import:emails[db/emails.csv]
#Organization.set_gmaps4rails_options!({ :check_process => true })
Organization.import_emails(args[:file],1000)
end
end
end
end
this appears to make everything go green; have to make sure that the lifecycle doesn't bite us in a running ruby process -- after running rake task, does the Organization go back to being its normal self upon each request?
begin
namespace :db do
namespace :import do
task :emails, [:file] => :environment do |t, args|
# pass variables like so bundle exec rake db:import:emails[db/emails.csv]
#Organization.acts_as_gmappable({ :check_process => true })
require File.expand_path("../../config/environment", File.dirname(__FILE__))
#class Organization
#private
Organization.instance_eval do
private :remove_errors_with_address do
errors_hash = errors.to_hash
errors.clear
errors_hash.each do |key, value|
logger.warn "#{key} --> #{value}"
if key.to_s != 'gmaps4rails_address'
errors.add(key, value)
end
end
end
end
Organization.import_emails(args[:file],1000)
end
end
end
end
|
# Rakefile to execute fastqc for given items
namespace :quanto do
# executable
core = File.join(PROJ_ROOT, "exe", "quanto-core")
# setup working dir
workdir = ENV['workdir'] || PROJ_ROOT
table_dir = File.join(workdir, "tables")
list_available = ENV['list_available'] || File.join(table_dir, "experiments.available.tab")
fastqc_dir = ENV['fastqc_dir'] || File.join(workdir, "fastqc")
checksum_table = ENV['checksum_table'] || File.join(table_dir, "dra", "fastqlist")
# logging
date = Time.now.strftime("%Y%m%d-%H%M")
logdir = File.join(PROJ_ROOT, "log", date)
logfile = File.join(logdir, "exec.log")
logdir_job = File.join(logdir, "job")
logdir_ftp = File.join(logdir, "ftp")
logdir_uge = File.join(logdir, "uge")
logdir_table = File.join(logdir, "tables")
directory logdir
directory logdir_job
directory logdir_ftp
directory logdir_uge
file logfile => logdir do |t|
touch t.name
end
file logdir_table => logdir do |t|
mkdir_p t.name
cp_r Dir.glob("#{table_dir}/*tab"), t.name
end
def logwrite(logfile, m)
open(logfile, "a"){|f| f.puts(m) }
end
desc "option: workdir, fastqc_dir"
task :exec => [list_available, logfile, logdir_job, logdir_ftp, logdir_uge, logdir_table] do
list_records = Quanto::Records::IO.read(list_available)
logwrite(logfile, "#{Time.now}: Number of total target experiments: #{list_records.size}")
grouped_records = list_records.each_slice(50000).to_a
grouped_records.each_with_index do |records, i|
while !`#{QSUB.gsub(/qsub$/,"qstat")} | grep Quanto`.empty? do
sleep 300
end
logwrite(logfile, "#{Time.now}: Start FastQC execution #{i}/#{grouped_records.size}")
# Create process list for array job
process_list = File.join(logdir, "process_list_#{i}.txt")
open(process_list, "w") do |f|
records.each do |records|
exp_id = records[0]
acc_id = records[1]
layout = records[2]
logdir_exp = File.join(logdir_job, exp_id.sub(/...$/,""))
mkdir_p logdir_exp
logfile_job = File.join(logdir_exp, exp_id + ".log")
f.puts("#{acc_id} #{exp_id} #{layout} #{logfile_job}")
end
end
# Submit array job
qsub_args = [
"-N Quanto.#{Time.now.strftime("%Y%m%d-%H%M")}",
"-j y",
"-o #{logdir_uge}",
"-t 1-#{list.size}",
]
fastqc_args = [
"--fastqc-dir #{fastqc_dir}",
"--ftp-connection-pool #{logdir_ftp}",
"--fastq-checksum #{checksum_table}",
"--job-list #{process_list}",
]
mes = `#{QSUB} #{qsub_args.join("\s")} #{core} #{fastqc_args.join("\s")}`
logwrite(logfile, "#{Time.now}: #{mes}")
end
end
end
fixed wrong usage of variables
# Rakefile to execute fastqc for given items
namespace :quanto do
# executable
core = File.join(PROJ_ROOT, "exe", "quanto-core")
# setup working dir
workdir = ENV['workdir'] || PROJ_ROOT
table_dir = File.join(workdir, "tables")
list_available = ENV['list_available'] || File.join(table_dir, "experiments.available.tab")
fastqc_dir = ENV['fastqc_dir'] || File.join(workdir, "fastqc")
checksum_table = ENV['checksum_table'] || File.join(table_dir, "dra", "fastqlist")
# logging
date = Time.now.strftime("%Y%m%d-%H%M")
logdir = File.join(PROJ_ROOT, "log", date)
logfile = File.join(logdir, "exec.log")
logdir_job = File.join(logdir, "job")
logdir_ftp = File.join(logdir, "ftp")
logdir_uge = File.join(logdir, "uge")
logdir_table = File.join(logdir, "tables")
directory logdir
directory logdir_job
directory logdir_ftp
directory logdir_uge
file logfile => logdir do |t|
touch t.name
end
file logdir_table => logdir do |t|
mkdir_p t.name
cp_r Dir.glob("#{table_dir}/*tab"), t.name
end
def logwrite(logfile, m)
open(logfile, "a"){|f| f.puts(m) }
end
desc "option: workdir, fastqc_dir"
task :exec => [list_available, logfile, logdir_job, logdir_ftp, logdir_uge, logdir_table] do
list_records = Quanto::Records::IO.read(list_available)
logwrite(logfile, "#{Time.now}: Number of total target experiments: #{list_records.size}")
grouped_records = list_records.each_slice(50000).to_a
grouped_records.each_with_index do |records, i|
while !`#{QSUB.gsub(/qsub$/,"qstat")} | grep Quanto`.empty? do
sleep 300
end
logwrite(logfile, "#{Time.now}: Start FastQC execution #{i}/#{grouped_records.size}")
# Create process list for array job
process_list = File.join(logdir, "process_list_#{i}.txt")
open(process_list, "w") do |f|
records.each do |record|
exp_id = record[0]
acc_id = record[1]
layout = record[2]
logdir_exp = File.join(logdir_job, exp_id.sub(/...$/,""))
mkdir_p logdir_exp
logfile_job = File.join(logdir_exp, exp_id + ".log")
f.puts("#{acc_id} #{exp_id} #{layout} #{logfile_job}")
end
end
# Submit array job
qsub_args = [
"-N Quanto.#{Time.now.strftime("%Y%m%d-%H%M")}",
"-j y",
"-o #{logdir_uge}",
"-t 1-#{records.size}",
]
fastqc_args = [
"--fastqc-dir #{fastqc_dir}",
"--ftp-connection-pool #{logdir_ftp}",
"--fastq-checksum #{checksum_table}",
"--job-list #{process_list}",
]
mes = `#{QSUB} #{qsub_args.join("\s")} #{core} #{fastqc_args.join("\s")}`
logwrite(logfile, "#{Time.now}: #{mes}")
end
end
end
|
namespace :hudson do
def report_path
"hudson/reports/features/"
end
if defined? Cucumber
Cucumber::Rake::Task.new({:cucumber => [:report_setup, 'db:migrate', 'db:test:prepare']}) do |t|
t.cucumber_opts = %{--profile default --format junit --out #{report_path} --format html --out #{report_path}/report.html}
end
end
task :report_setup do
rm_rf report_path
mkdir_p report_path
end
task :spec => ["hudson:setup:rspec", 'db:migrate', 'db:test:prepare', 'rake:spec']
namespace :setup do
task :pre_ci do
ENV["CI_REPORTS"] = 'hudson/reports/spec/'
gem 'ci_reporter'
require 'ci/reporter/rake/rspec'
end
task :rspec => [:pre_ci, "ci:setup:rspec"]
end
end
Add hudson:everything task which will run both Cucumber and RSpec suites without failing until the very end.
namespace :hudson do
def report_path
"hudson/reports/features/"
end
if defined? Cucumber
Cucumber::Rake::Task.new({:cucumber => [:report_setup, 'db:migrate', 'db:test:prepare']}) do |t|
t.cucumber_opts = %{--profile default --format junit --out #{report_path} --format html --out #{report_path}/report.html}
end
end
task :report_setup do
rm_rf report_path
mkdir_p report_path
end
desc "Run the cucumber and RSpec tests, but don't fail until both suites have run."
task :everything do
tasks = %w{ hudson:cucumber hudson:spec }
exceptions = []
tasks.each do |t|
begin
Rake::Task[t].invoke
rescue => e
exceptions << e
end
end
exceptions.each do |e|
puts "Exception encountered:"
puts "#{e}\n#{e.backtrace.join("\n")}"
end
raise "Test failures" if exceptions.size > 0
end
task :spec => ["hudson:setup:rspec", 'db:migrate', 'db:test:prepare', 'rake:spec']
namespace :setup do
task :pre_ci do
ENV["CI_REPORTS"] = 'hudson/reports/spec/'
gem 'ci_reporter'
require 'ci/reporter/rake/rspec'
end
task :rspec => [:pre_ci, "ci:setup:rspec"]
end
end |
namespace :ikemen do
desc "ikemenDBの初期セットアップを行う"
task :setup=> :environment do
#すべての設定を行う
#compareユーザーの追加
Rake::Task["ikemen:db_user"].execute
#単語リストの全削除
CompareNoun.delete_all
#単語の追加
Rake::Task["ikemen:db_noun"].execute
end
desc "compareUserのTweetをcompareNounテーブルへ追加"
task :db_noun => :environment do
#compareUsersを読み込む -> compareNounテーブルの更新
CompareUser.all.each do |compare_user|
Analyze::glow_compare_nouns(compare_user)
end
end
desc "seedのtwitterIDリストを元に、CompareUsersテーブルへIDを追加"
task :db_user => :environment do
#compareUsersテーブルをseedから更新
#
#CompareUserを全削除
CompareUser.delete_all
#入れ直す
Rake::Task["db:seed"].execute
end
end
twitter_idを指定して得点を算出するtaskを追加
namespace :ikemen do
desc "ikemenDBの初期セットアップを行う"
task :setup=> :environment do
#すべての設定を行う
#compareユーザーの追加
Rake::Task["ikemen:db_user"].execute
#単語リストの全削除
CompareNoun.delete_all
#単語の追加
Rake::Task["ikemen:db_noun"].execute
end
desc "compareUserのTweetをcompareNounテーブルへ追加"
task :db_noun => :environment do
#compareUsersを読み込む -> compareNounテーブルの更新
CompareUser.all.each do |compare_user|
Analyze::glow_compare_nouns(compare_user)
end
end
desc "seedのtwitterIDリストを元に、CompareUsersテーブルへIDを追加"
task :db_user => :environment do
#compareUsersテーブルをseedから更新
#
#CompareUserを全削除
CompareUser.delete_all
#入れ直す
Rake::Task["db:seed"].execute
end
desc "twitter_idを指定して、イケメン度を算出 [ rails ikemen:point TWITTER_ID='xxxxxxxxxxx' ]"
task :point => :environment do
twitter_id = ENV['TWITTER_ID'].to_s
puts "."
puts ".."
puts "..."
point = Analyze::point_with_twitter_id(twitter_id)
puts "point -> #{point}"
end
end
|
require 'csv'
namespace :import do
desc 'Import user data from CSV (export cms_users.csv from legacy app)'
task :users, [:csv_file_path] => :environment do |t, args|
csv_file_path = args[:csv_file_path]
unless csv_file_path
puts "Usage: rake 'import:users[/PATH/TO/FILE.csv]'" and return
end
CSV.foreach(csv_file_path, headers: true) do |row|
begin
user = User.new(
username: row['username'],
encrypted_password: row['password'],
legacy_password: true,
email: row['email'] || "#{row['username']}@cfb-hasenheide.de"
)
user.save!(validate: false)
puts "IMPORTED: #{user.username}"
rescue Exception => e
puts "SKIPPED: #{user.username} (#{e})"
next
end
end
end
end
destroy all users before importing
require 'csv'
namespace :import do
desc 'Import user data from CSV (export cms_users.csv from legacy app)'
task :users, [:csv_file_path] => :environment do |t, args|
csv_file_path = args[:csv_file_path]
unless csv_file_path
puts "Usage: rake 'import:users[/PATH/TO/FILE.csv]'" and return
end
User.destroy_all
puts "DESTROYED: all existing users"
CSV.foreach(csv_file_path, headers: true) do |row|
username = row['username']
begin
user = User.new(
username: username,
encrypted_password: row['password'],
legacy_password: true,
email: row['email'] || "#{username}@cfb-hasenheide.de"
)
user.save!(validate: false)
puts "IMPORTED: #{username}"
rescue Exception => e
puts "SKIPPED: #{username} (#{e})"
next
end
end
end
end
|
require 'spreadsheet'
require 'tiny_tds'
require 'csv'
#Export addresses from legacy SQL Server database as a CSV file to be manually cleansed with AddressFinder batch service
#usage: $ rake export_from_SQL_SERVER[sql-server-ip-address] > output-file.csv
task :export_from_SQL_SERVER, [:ip] do |t, args|
client = TinyTds::Client.new username: "bfnz2", password: "bfnz", host: args[:ip]
result = client.execute("select * from subscribers")
counter = 0
puts CSV.generate_line(["id", "first_name", "last_name", "address"])
result.each do |r|
post_code = ''
city_town = ''
if r['city_town']
post_code = r['city_town'].match(/\d{4}$/)
city_town = r['city_town'].gsub(/ \d{4}$/, '')
end
address = (r['address'] ? r['address'] : '') + ', ' + (r['suburb'] ? r['suburb'] : '') + ', ' + city_town
address.gsub!(/(, ){2,}/, ', ')
address.gsub!(/, $/, '')
puts CSV.generate_line([r['id'], r['first_name'], r['last_name'], address])
counter += 1
break if counter > 10
end
end
# Import data from ASP version of Bibles for NZ
# run with : rake import[sql-server-ip-address,path-to-cleansed-address-file]
# (note: no space between arguments on the command line)
task :import, [:ip, :path_to_cleansed_addresses] do |t, args|
# copy code in here when it works
end
# run with : rake import[sql-server-ip-address,path-to-cleansed-address-file]
# (note: no space between arguments on the command line)
task :temp, [:ip, :path_to_cleansed_addresses] => :environment do |t, args|
sql_client = TinyTds::Client.new username: "bfnz2", password: "bfnz", host: args[:ip]
territorial_authorities = get_territorial_authorities
addresses = get_cleansed_addresses(args[:path_to_cleansed_addresses])
old_subscribers = get_old_subscribers(sql_client, addresses, territorial_authorities)
old_items = get_old_items(sql_client, get_new_items)
old_requests = get_old_requests_by_subscriber(sql_client)
old_shipments, unique_shipment_dates = get_old_shipments_by_subscriber_and_shipments(sql_client)
old_how_heard = get_old_how_heard(sql_client, Order.method_of_discoveries)
old_method_received = get_old_method_received(sql_client, Order.method_receiveds)
#TODO: further_contact, bad address
#TODO: need to create customers, orders, and shipments
Shipment.delete_all()
Order.delete_all()
Customer.delete_all()
# unique_shipment_dates.keys.each do |date|
# shipment = Shipment.create()
# shipment.created_at = date
# shipment.save
# unique_shipment_dates[date] = shipment.id
# end
sub_counter = 0
old_subscribers.each do |sub_id, sub|
customer = Customer.create(sub)
sub[:new_id] = customer.id
puts "#{id} - #{customer.errors.full_messages.join(",")}" if customer.errors.any?
sub_counter += 1 if customer.persisted?
end
puts "#{sub_counter} customers created"
# Shipment.find_each do |shipment|
# puts "#{shipment.id}, #{shipment.created_at}, #{shipment.updated_at}"
# end
# counter = 0
# result.each do |r|
# customer = Customer.create(
# territorial_authority_id: ta_id,
# first_name: r['first_name'],
# last_name: r['last_name'],
# address: address_info[:full_address],
# suburb: address_info[:suburb],
# city_town: city_town,
# post_code: postcode,
# ta: address_info[:ta],
# pxid: address_info[:pxid],
# phone: r['phone'],
# email: r['email'],
# title: title,
# tertiary_student: r['tertiary_student'],
# tertiary_institution: r['institution'],
# admin_notes: r['admin_notes'],
# coordinator_notes: r['coordinator_notes'],
# old_subscriber_id: old_id,
# old_system_address: r['address'],
# old_system_suburb: r['suburb'],
# old_system_city_town: r['city_town']
# )
# puts "#{id} - #{customer.errors.full_messages.join(",")}" if customer.errors.any?
# counter += 1 if customer.persisted?
# break if counter > 10
# end
#puts "Imported #{counter} customers"
end
def get_new_items
items = {}
Item.find_each do |item|
items[item.code] = [item.id, item.title]
end
items
end
def get_territorial_authorities
territorial_authorities = {}
TerritorialAuthority.find_each do |ta|
territorial_authorities[ta.name] = ta.id
end
territorial_authorities
end
def get_cleansed_addresses(path)
addresses = {}
CSV.foreach(path, :headers => true) do |r|
addresses[r['id'].to_i] = {
full_address: r['full address'],
suburb: r['suburb'],
city: r['city'],
postcode: r['postcode'],
ta: r['territorial authority'],
pxid: r['pxid']
}
end
addresses
end
def get_old_subscribers(sql_client, addresses, territorial_authorities)
result = sql_client.execute("select * from subscribers where id in (44586,11452,26895,26845,46245,30628)")
old_subscribers = {}
result.each do |r|
old_id = r['id'].to_i
address_info = addresses[old_id]
ta_name = address_info[:ta]
ta_id = nil
if ta_name
ta_id = territorial_authorities[ta_name]
end
postcode = nil
if address_info[:postcode]
postcode = address_info[:postcode]
elsif r['city_town'].match(/\d{4}$/)
postcode = r['city_town'].match(/\d{4}$/)[0]
end
city_town = nil
if address_info[:city]
city_town = address_info[:city]
else
city_town = r['city_town'].gsub(/ \d{4}$/, '')
end
title = nil
if r['gender'] == 'Male'
title = 'Mr'
elsif r['gender'] == 'Female'
title = 'Ms'
end
old_subscribers[old_id] = {
territorial_authority_id: ta_id,
first_name: r['first_name'],
last_name: r['last_name'],
address: address_info[:full_address],
suburb: address_info[:suburb],
city_town: city_town,
post_code: postcode,
ta: address_info[:ta],
pxid: address_info[:pxid],
phone: r['phone'],
email: r['email'],
title: title,
tertiary_student: r['tertiary_student'],
tertiary_institution: r['institution'],
admin_notes: r['admin_notes'],
coordinator_notes: r['coordinator_notes'],
old_subscriber_id: old_id,
old_system_address: r['address'],
old_system_suburb: r['suburb'],
old_system_city_town: r['city_town'],
created_at: int_to_date_time(r['date_entered'])
}
end
old_subscribers
end
def get_old_items(sql_client, new_items)
result = sql_client.execute("select * from items order by ship_order")
old_items = {}
result.each do |r|
name = r['name']
new_item_id = nil
if name == 'Recovery Version'
new_item_id = new_items['R'][0]
elsif name == 'Basic Elements 1'
new_item_id = new_items['X1'][0]
elsif name == 'Basic Elements 2'
new_item_id = new_items['X2'][0]
elsif name == 'Basic Elements 3'
new_item_id = new_items['X3'][0]
end
old_items[r['id'].to_i] = {name: r['name'], new_item_id: new_item_id}
end
old_items
end
def get_old_requests_by_subscriber(sql_client)
result = sql_client.execute("select * from requests")
old_requests = {}
result.each do |r|
sub_id = r['subscriber_id']
request = {date_requested: int_to_date_time(r['date_requested']), item_id: r['item_id']}
if old_requests.has_key?(sub_id)
old_requests[sub_id] << request
else
old_requests[sub_id] = [request]
end
end
old_requests
end
def get_old_shipments_by_subscriber_and_shipments(sql_client)
result = sql_client.execute("select * from shipments")
old_shipments_by_subscriber = {}
unique_shipment_dates = {}
result.each do |r|
sub_id = r['subscriber_id']
date_shipped = int_to_date_time(r['date_shipped'])
item_id = r['item_id']
shipment = {date_shipped: date_shipped, item_id: item_id}
if old_shipments_by_subscriber.has_key?(sub_id)
old_shipments_by_subscriber[sub_id] << shipment
else
old_shipments_by_subscriber[sub_id] = [shipment]
end
unique_shipment_dates[date_shipped] = 1
end
[old_shipments_by_subscriber, unique_shipment_dates]
end
# returns hash: {old how_heard_id => new method_discovered enum index}
def get_old_how_heard(sql_client, new_method_of_discoveries)
result = sql_client.execute("select * from how_heard")
old_how_heard = {}
result.each do |r|
case r['how_heard_short']
when 'Unknown'
old_how_heard[r['id']] = new_method_of_discoveries['unknown']
when 'Mail'
old_how_heard[r['id']] = new_method_of_discoveries['mail_disc']
when 'Uni Lit'
old_how_heard[r['id']] = new_method_of_discoveries['uni_lit']
when 'Non-uni Lit'
old_how_heard[r['id']] = new_method_of_discoveries['non_uni_lit']
when 'Other Ad'
old_how_heard[r['id']] = new_method_of_discoveries['other_ad']
when 'Word of Mouth'
old_how_heard[r['id']] = new_method_of_discoveries['word_of_mouth']
when 'Internet'
old_how_heard[r['id']] = new_method_of_discoveries['website']
when 'Other'
old_how_heard[r['id']] = new_method_of_discoveries['other_disc']
end
end
old_how_heard
end
# returns hash: {old how_heard_id => new method_discovered enum index}
def get_old_method_received(sql_client, new_method_receiveds)
result = sql_client.execute("select * from method_received")
old_method_received = {}
result.each do |r|
case r['method_received']
when 'Mail'
old_method_received[r['id']] = new_method_receiveds['mail']
when 'Phone'
old_method_received[r['id']] = new_method_receiveds['phone']
when 'Personally delivered'
old_method_received[r['id']] = new_method_receiveds['personally_delivered']
when 'Internet'
old_method_received[r['id']] = new_method_receiveds['internet']
when 'Other'
old_method_received[r['id']] = new_method_receiveds['other']
end
end
old_method_received
end
def int_to_date_time(int)
DateTime.strptime(int.to_s, '%Y%m%d%H%M%S')
end
Migration WIP
require 'spreadsheet'
require 'tiny_tds'
require 'csv'
#Export addresses from legacy SQL Server database as a CSV file to be manually cleansed with AddressFinder batch service
#usage: $ rake export_from_SQL_SERVER_with_existing[sql-server-ip-address,path-to-existing-cleansed-addresses] > output-file.csv
# (note: no space between arguments on the command line)
task :export_from_SQL_SERVER_with_existing, [:ip, :path_to_existing_cleansed_addresses] do |t, args|
existing_addresses = get_cleansed_addresses(args[:path_to_existing_cleansed_addresses])
sql_client = TinyTds::Client.new username: "bfnz2", password: "bfnz", host: args[:ip]
new_export_addresses = export_addresses(sql_client, existing_addresses)
new_export_addresses.each do |address|
puts CSV.generate_line(address)
end
end
#usage: $ rake export_from_SQL_SERVER[sql-server-ip-address] > output-file.csv
task :export_from_SQL_SERVER, [:ip] do |t, args|
sql_client = TinyTds::Client.new username: "bfnz2", password: "bfnz", host: args[:ip]
new_export_addresses = export_addresses(sql_client, {})
new_export_addresses.each do |address|
puts CSV.generate_line(address)
end
end
def export_addresses(sql_client, existing_addresses)
result = sql_client.execute("select * from subscribers")
addresses = []
addresses << ["id", "first_name", "last_name", "address"]
result.each do |r|
sub_id = r['id']
if !existing_addresses.has_key?(sub_id)
post_code = ''
city_town = ''
if r['city_town']
post_code = r['city_town'].match(/\d{4}$/)
city_town = r['city_town'].gsub(/ \d{4}$/, '')
end
address = (r['address'] ? r['address'] : '') + ', ' + (r['suburb'] ? r['suburb'] : '') + ', ' + city_town
address.gsub!(/(, ){2,}/, ', ')
address.gsub!(/, $/, '')
addresses << [sub_id, r['first_name'], r['last_name'], address]
end
end
addresses
end
# Import data from ASP version of Bibles for NZ
# run with : rake import[sql-server-ip-address,path-to-cleansed-address-file]
# (note: no space between arguments on the command line)
task :import, [:ip, :path_to_cleansed_addresses] do |t, args|
# copy code in here when it works
end
# run with : rake import[sql-server-ip-address,path-to-cleansed-address-file]
# (note: no space between arguments on the command line)
task :temp, [:ip, :path_to_cleansed_addresses] => :environment do |t, args|
sql_client = TinyTds::Client.new username: "bfnz2", password: "bfnz", host: args[:ip]
territorial_authorities = get_territorial_authorities
addresses = get_cleansed_addresses(args[:path_to_cleansed_addresses])
old_subscribers, old_subscribers_other_info = get_old_subscribers(sql_client, addresses, territorial_authorities)
old_items = get_old_items(sql_client, get_new_items)
old_requests = get_old_requests_by_subscriber(sql_client)
old_shipments, unique_shipment_dates = get_old_shipments_by_subscriber_and_shipments(sql_client)
old_how_heard = get_old_how_heard(sql_client, Order.method_of_discoveries)
old_method_received = get_old_method_received(sql_client, Order.method_receiveds)
#TODO: further_contact, bad address
#TODO: split out adding cleansed address stuff, so that I can run the import from SQL Server without cleansed addresses, then run a separate taks to add the cleansed addresses. That way, we can test importing all of the data without having the cleansed addresses
Shipment.delete_all()
Order.find_each do |order|
order.items.each do |item|
order.items.delete(item)
end
end
Order.delete_all()
Customer.delete_all()
ship_counter = 0
unique_shipment_dates.keys.each do |date|
shipment = Shipment.create()
shipment.created_at = date
shipment.save
puts "#{shipment.errors.full_messages.join(",")}" if shipment.errors.any?
ship_counter += 1 if shipment.persisted?
unique_shipment_dates[date] = shipment.id
end
puts "#{ship_counter} shipments created"
sub_counter = 0
old_subscribers.each do |sub_id, sub|
customer = Customer.create(sub)
sub[:new_id] = customer.id
puts "#{sub_id} - #{customer.errors.full_messages.join(",")}" if customer.errors.any?
sub_counter += 1 if customer.persisted?
end
puts "#{sub_counter} customers created"
req_counter = 0
ship_order_counter = 0
old_requests.each do |sub_id, requests|
if old_subscribers[sub_id]
new_cust_id = old_subscribers[sub_id][:new_id]
requests.each do |request|
old_item_id = request[:item_id]
new_item_id = old_items[old_item_id][:new_item_id]
item = Item.find(new_item_id)
request_date = request[:date_requested]
method_of_discovery = old_how_heard[old_subscribers_other_info[sub_id][:how_heard_id]]
method_received = old_method_received[old_subscribers_other_info[sub_id][:method_received_id]]
new_customer_id = old_subscribers[sub_id][:new_id]
order = Order.create(
method_of_discovery: method_of_discovery,
created_at: request_date,
method_received: method_received,
customer_id: new_customer_id)
request[:new_order_id] = order.id
puts "#{order.id} - #{order.errors.full_messages.join(",")}" if order.errors.any?
order.items << item
order.save
req_counter += 1 if order.persisted?
if old_shipments[sub_id] && old_shipments[sub_id].has_key?(old_item_id)
date_shipped = old_shipments[sub_id][old_item_id]
if unique_shipment_dates.has_key?(date_shipped)
new_shipment_id = unique_shipment_dates[date_shipped]
order.shipment_id = new_shipment_id
order.save
ship_order_counter += 1
else
puts "Error: shipment for date not found: #{date_shipped}"
end
end
end
end
end
puts "#{req_counter} orders created"
puts "#{ship_order_counter} orders shipped"
end
def get_new_items
items = {}
Item.find_each do |item|
items[item.code] = [item.id, item.title]
end
items
end
def get_territorial_authorities
territorial_authorities = {}
TerritorialAuthority.find_each do |ta|
territorial_authorities[ta.name] = ta.id
end
territorial_authorities
end
def get_cleansed_addresses(path)
addresses = {}
CSV.foreach(path, :headers => true) do |r|
addresses[r['id'].to_i] = {
full_address: r['full address'],
suburb: r['suburb'],
city: r['city'],
postcode: r['postcode'],
ta: r['territorial authority'],
pxid: r['pxid']
}
end
addresses
end
def get_old_subscribers(sql_client, addresses, territorial_authorities)
result = sql_client.execute("select * from subscribers where id in (44586,11452,26895,26845,46245,30628)")
old_subscribers = {}
old_subscribers_other_info = {}
result.each do |r|
old_id = r['id'].to_i
old_subscribers_other_info[old_id] = {how_heard_id: r['how_heard_id'], method_received_id: r['method_received_id']}
address_info = addresses[old_id]
ta_name = address_info[:ta]
ta_id = nil
if ta_name
ta_id = territorial_authorities[ta_name]
end
postcode = nil
if address_info[:postcode]
postcode = address_info[:postcode]
elsif r['city_town'].match(/\d{4}$/)
postcode = r['city_town'].match(/\d{4}$/)[0]
end
city_town = nil
if address_info[:city]
city_town = address_info[:city]
else
city_town = r['city_town'].gsub(/ \d{4}$/, '')
end
title = nil
if r['gender'] == 'Male'
title = 'Mr'
elsif r['gender'] == 'Female'
title = 'Ms'
end
old_subscribers[old_id] = {
territorial_authority_id: ta_id,
first_name: r['first_name'],
last_name: r['last_name'],
address: address_info[:full_address],
suburb: address_info[:suburb],
city_town: city_town,
post_code: postcode,
ta: address_info[:ta],
pxid: address_info[:pxid],
phone: r['phone'],
email: r['email'],
title: title,
tertiary_student: r['tertiary_student'],
tertiary_institution: r['institution'],
admin_notes: r['admin_notes'],
coordinator_notes: r['coordinator_notes'],
old_subscriber_id: old_id,
old_system_address: r['address'],
old_system_suburb: r['suburb'],
old_system_city_town: r['city_town'],
created_at: int_to_date_time(r['date_entered'])
}
end
[old_subscribers, old_subscribers_other_info]
end
def get_old_items(sql_client, new_items)
result = sql_client.execute("select * from items order by ship_order")
old_items = {}
result.each do |r|
name = r['name']
new_item_id = nil
if name == 'Recovery Version'
new_item_id = new_items['R'][0]
elsif name == 'Basic Elements 1'
new_item_id = new_items['X1'][0]
elsif name == 'Basic Elements 2'
new_item_id = new_items['X2'][0]
elsif name == 'Basic Elements 3'
new_item_id = new_items['X3'][0]
end
old_items[r['id'].to_i] = {name: r['name'], new_item_id: new_item_id}
end
old_items
end
def get_old_requests_by_subscriber(sql_client)
result = sql_client.execute("select * from requests")
old_requests = {}
result.each do |r|
sub_id = r['subscriber_id']
request = {date_requested: int_to_date_time(r['date_requested']), item_id: r['item_id']}
if old_requests.has_key?(sub_id)
old_requests[sub_id] << request
else
old_requests[sub_id] = [request]
end
end
old_requests
end
def get_old_shipments_by_subscriber_and_shipments(sql_client)
result = sql_client.execute("select * from shipments")
old_shipments_by_subscriber = {}
unique_shipment_dates = {}
result.each do |r|
sub_id = r['subscriber_id']
date_shipped = int_to_date_time(r['date_shipped'])
item_id = r['item_id']
shipment = {date_shipped: date_shipped, item_id: item_id}
if old_shipments_by_subscriber.has_key?(sub_id)
old_shipments_by_subscriber[sub_id][item_id] = date_shipped
else
old_shipments_by_subscriber[sub_id] = {item_id => date_shipped}
end
unique_shipment_dates[date_shipped] = 1
end
[old_shipments_by_subscriber, unique_shipment_dates]
end
# returns hash: {old how_heard_id => new method_discovered enum index}
def get_old_how_heard(sql_client, new_method_of_discoveries)
result = sql_client.execute("select * from how_heard")
old_how_heard = {}
result.each do |r|
case r['how_heard_short']
when 'Unknown'
old_how_heard[r['id']] = new_method_of_discoveries['unknown']
when 'Mail'
old_how_heard[r['id']] = new_method_of_discoveries['mail_disc']
when 'Uni Lit'
old_how_heard[r['id']] = new_method_of_discoveries['uni_lit']
when 'Non-uni Lit'
old_how_heard[r['id']] = new_method_of_discoveries['non_uni_lit']
when 'Other Ad'
old_how_heard[r['id']] = new_method_of_discoveries['other_ad']
when 'Word of Mouth'
old_how_heard[r['id']] = new_method_of_discoveries['word_of_mouth']
when 'Internet'
old_how_heard[r['id']] = new_method_of_discoveries['website']
when 'Other'
old_how_heard[r['id']] = new_method_of_discoveries['other_disc']
end
end
old_how_heard
end
# returns hash: {old how_heard_id => new method_discovered enum index}
def get_old_method_received(sql_client, new_method_receiveds)
result = sql_client.execute("select * from method_received")
old_method_received = {}
result.each do |r|
case r['method_received']
when 'Mail'
old_method_received[r['id']] = new_method_receiveds['mail']
when 'Phone'
old_method_received[r['id']] = new_method_receiveds['phone']
when 'Personally delivered'
old_method_received[r['id']] = new_method_receiveds['personally_delivered']
when 'Internet'
old_method_received[r['id']] = new_method_receiveds['internet']
when 'Other'
old_method_received[r['id']] = new_method_receiveds['other']
end
end
old_method_received
end
def int_to_date_time(int)
DateTime.strptime(int.to_s, '%Y%m%d%H%M%S')
end
|
namespace :db do
namespace :couch do
desc "Load views in db/couch/* into the configured couchdb instance"
task :migrate => :environment do
Dir["db/couch/**/*.js"].each do |file|
source = File.read(file).
gsub(/\n\s*/, ''). # Our JS multiline string implementation :-p
gsub(/\/\*.*?\*\//, '') # And strip multiline comments as well.
document = JSON.parse source
document['_id'] ||= "_design/#{File.basename(file, '.js')}"
document['language'] ||= 'javascript'
db = CouchRest::Model::Base.database
id = document['_id']
curr = db.get(id) rescue nil
if curr.nil?
db.save_doc(document)
else
db.delete_doc(curr)
db.save_doc(document)
end
end
end
end
end
Refactor design document creation/update using CouchRest::Design
namespace :db do
namespace :couch do
desc "Load views in db/couch/* into the configured couchdb instance"
task :migrate => :environment do
Dir["db/couch/**/*.js"].each do |file|
source = File.read(file).
gsub(/\n\s*/, ''). # Our JS multiline string implementation :-p
gsub(/\/\*.*?\*\//, '') # And strip multiline comments as well.
document = CouchRest::Design.new JSON.parse(source)
document.database = CouchRest::Model::Base.database
document['_id'] ||= "_design/#{File.basename(file, '.js')}"
document['language'] ||= 'javascript'
current = document.database.get(document.id) rescue nil
if current.nil?
document.save
else
current.update(document)
current.save
end
end
end
end
end
|
require 'csv'
require 'date'
namespace :report do
desc 'Generate CSV file with ALM stats for public sources'
task :alm_stats => :environment do
filename = "alm_stats.csv"
sources = Source.installed.without_private
csv = AlmStatsReport.new(sources).to_csv
if csv.nil?
puts "No data for report \"#{filename}\"."
elsif Report.write(filename, csv)
puts "Report \"#{filename}\" has been written."
else
puts "Report \"#{filename}\" could not be written."
end
end
desc 'Generate CSV file with ALM stats for private and public sources'
task :alm_private_stats => :environment do
filename = "alm_private_stats.csv"
sources = Source.installed
csv = AlmStatsReport.new(sources).to_csv
if csv.nil?
puts "No data for report \"#{filename}\"."
elsif Report.write(filename, csv)
puts "Report \"#{filename}\" has been written."
else
puts "Report \"#{filename}\" could not be written."
end
end
desc 'Generate CSV file with Mendeley stats'
task :mendeley_stats => :environment do
filename = "mendeley_stats.csv"
# check that source is installed
source = Source.visible.where(name: "mendeley").first
next if source.nil?
csv = MendeleyReport.new(source).to_csv
if csv.nil?
puts "No data for report \"#{filename}\"."
elsif Report.write(filename, csv)
puts "Report \"#{filename}\" has been written."
else
puts "Report \"#{filename}\" could not be written."
end
end
desc 'Generate CSV file with PMC usage stats'
task :pmc => :environment do
# check that source is installed
source = Source.visible.where(name: "pmc").first
next if source.nil?
if ENV['FORMAT']
filename = "pmc_#{ENV['FORMAT']}.csv"
report = PmcByMonthReport.new(source, format: ENV['FORMAT'], month: ENV['MONTH'], year: ENV['YEAR'])
else
filename = "pmc_stats.csv"
report = PmcReport.new(source)
end
csv = report.to_csv
if csv.nil?
puts "No data for report \"#{filename}\"."
elsif Report.write(filename, csv)
puts "Report \"#{filename}\" has been written."
else
puts "Report \"#{filename}\" could not be written."
end
end
desc 'Generate CSV file with PMC HTML usage stats over time'
task :pmc_html_stats => :environment do
date = Time.zone.now - 1.year
ENV['FORMAT'] = "html"
ENV['MONTH'] = date.month.to_s
ENV['YEAR'] = date.year.to_s
Rake::Task["report:pmc"].invoke
Rake::Task["report:pmc"].reenable
end
desc 'Generate CSV file with PMC PDF usage stats over time'
task :pmc_pdf_stats => :environment do
date = Time.zone.now - 1.year
ENV['FORMAT'] = "pdf"
ENV['MONTH'] = date.month.to_s
ENV['YEAR'] = date.year.to_s
Rake::Task["report:pmc"].invoke
Rake::Task["report:pmc"].reenable
end
desc 'Generate CSV file with PMC combined usage stats over time'
task :pmc_combined_stats => :environment do
date = Time.zone.now - 1.year
ENV['FORMAT'] = "combined"
ENV['MONTH'] = date.month.to_s
ENV['YEAR'] = date.year.to_s
Rake::Task["report:pmc"].invoke
Rake::Task["report:pmc"].reenable
end
desc 'Generate CSV file with PMC cumulative usage stats'
task :pmc_stats => :environment do
ENV['FORMAT'] = nil
ENV['MONTH'] = nil
ENV['YEAR'] = nil
Rake::Task["report:pmc"].invoke
Rake::Task["report:pmc"].reenable
end
desc 'Generate CSV file with Counter usage stats'
task :counter => :environment do
# check that source is installed
source = Source.visible.where(name: "counter").first
next if source.nil?
if ENV['FORMAT']
filename = "counter_#{ENV['FORMAT']}.csv"
report = CounterByMonthReport.new(source, format: ENV['FORMAT'], month: ENV['MONTH'], year: ENV['YEAR'])
else
filename = "counter_stats.csv"
report = CounterReport.new(source)
end
csv = report.to_csv
if csv.nil?
puts "No data for report \"#{filename}\"."
elsif Report.write(filename, csv)
puts "Report \"#{filename}\" has been written."
else
puts "Report \"#{filename}\" could not be written."
end
end
desc 'Generate CSV file with Counter HTML usage stats over time'
task :counter_html_stats => :environment do
date = Time.zone.now - 1.year
ENV['FORMAT'] = "html"
ENV['MONTH'] = date.month.to_s
ENV['YEAR'] = date.year.to_s
Rake::Task["report:counter"].invoke
Rake::Task["report:counter"].reenable
end
desc 'Generate CSV file with Counter PDF usage stats over time'
task :counter_pdf_stats => :environment do
date = Time.zone.now - 1.year
ENV['FORMAT'] = "pdf"
ENV['MONTH'] = date.month.to_s
ENV['YEAR'] = date.year.to_s
Rake::Task["report:counter"].invoke
Rake::Task["report:counter"].reenable
end
desc 'Generate CSV file with Counter XML usage stats over time'
task :counter_xml_stats => :environment do
date = Time.zone.now - 1.year
ENV['FORMAT'] = "xml"
ENV['MONTH'] = date.month.to_s
ENV['YEAR'] = date.year.to_s
Rake::Task["report:counter"].invoke
Rake::Task["report:counter"].reenable
end
desc 'Generate CSV file with Counter combined usage stats over time'
task :counter_combined_stats => :environment do
date = Time.zone.now - 1.year
ENV['FORMAT'] = "combined"
ENV['MONTH'] = date.month.to_s
ENV['YEAR'] = date.year.to_s
Rake::Task["report:counter"].invoke
Rake::Task["report:counter"].reenable
end
desc 'Generate CSV file with cumulative Counter usage stats'
task :counter_stats => :environment do
ENV['FORMAT'] = nil
ENV['MONTH'] = nil
ENV['YEAR'] = nil
Rake::Task["report:counter"].invoke
Rake::Task["report:counter"].reenable
end
desc 'Generate CSV file with combined ALM stats'
task :combined_stats => :environment do
filename = "alm_report.csv"
csv = AlmCombinedStatsReport.new(
alm_report: AlmStatsReport.new(Source.installed.without_private),
pmc_report: PmcReport.new(Source.visible.where(name: "pmc").first),
counter_report: CounterReport.new(Source.visible.where(name:"counter").first),
mendeley_report: MendeleyReport.new(Source.visible.where(name:"mendeley").first)
).to_csv
if csv.nil?
puts "No data for report \"#{filename}\"."
elsif Report.write(filename, csv)
puts "Report \"#{filename}\" has been written."
else
puts "Report \"#{filename}\" could not be written."
end
end
desc 'Generate CSV file with combined ALM private and public stats'
task :combined_private_stats => :environment do
filename = "alm_private_report.csv"
csv = AlmCombinedStatsReport.new(
alm_report: AlmStatsReport.new(Source.installed),
pmc_report: PmcReport.new(Source.visible.where(name: "pmc").first),
counter_report: CounterReport.new(Source.visible.where(name:"counter").first),
mendeley_report: MendeleyReport.new(Source.visible.where(name:"mendeley").first)
).to_csv
if csv.nil?
puts "No data for report \"#{filename}\"."
elsif Report.write(filename, csv)
puts "Report \"#{filename}\" has been written."
else
puts "Report \"#{filename}\" could not be written."
end
end
desc 'Zip reports'
task :zip => :environment do
folderpath = "#{Rails.root}/data/report_#{Time.zone.now.to_date}"
if !Dir.exist? folderpath
puts "No reports to compress."
elsif Report.zip_file && Report.zip_folder
puts "Reports have been compressed."
else
puts "Reports could not be compressed."
end
end
desc 'Generate all article stats reports'
task :all_stats => [:environment, :alm_stats, :mendeley_stats, :pmc_html_stats, :pmc_pdf_stats, :pmc_combined_stats, :pmc_stats, :counter_html_stats, :counter_pdf_stats, :counter_xml_stats, :counter_combined_stats, :counter_stats, :combined_stats, :alm_private_stats, :combined_private_stats, :zip]
end
Small refactoring step: remove conditionals that generate different kinds of reports in report.rake
require 'csv'
require 'date'
namespace :report do
desc 'Generate CSV file with ALM stats for public sources'
task :alm_stats => :environment do
filename = "alm_stats.csv"
sources = Source.installed.without_private
csv = AlmStatsReport.new(sources).to_csv
if csv.nil?
puts "No data for report \"#{filename}\"."
elsif Report.write(filename, csv)
puts "Report \"#{filename}\" has been written."
else
puts "Report \"#{filename}\" could not be written."
end
end
desc 'Generate CSV file with ALM stats for private and public sources'
task :alm_private_stats => :environment do
filename = "alm_private_stats.csv"
sources = Source.installed
csv = AlmStatsReport.new(sources).to_csv
if csv.nil?
puts "No data for report \"#{filename}\"."
elsif Report.write(filename, csv)
puts "Report \"#{filename}\" has been written."
else
puts "Report \"#{filename}\" could not be written."
end
end
desc 'Generate CSV file with Mendeley stats'
task :mendeley_stats => :environment do
filename = "mendeley_stats.csv"
# check that source is installed
source = Source.visible.where(name: "mendeley").first
next if source.nil?
csv = MendeleyReport.new(source).to_csv
if csv.nil?
puts "No data for report \"#{filename}\"."
elsif Report.write(filename, csv)
puts "Report \"#{filename}\" has been written."
else
puts "Report \"#{filename}\" could not be written."
end
end
desc 'Generate CSV file with PMC HTML usage stats over time'
task :pmc_html_stats => :environment do
# check that source is installed
source = Source.visible.where(name: "pmc").first
next if source.nil?
filename = "pmc_html.csv"
date = Time.zone.now - 1.year
report = PmcByMonthReport.new(source, format: "html", month: date.month.to_s, year: date.year.to_s)
csv = report.to_csv
if csv.nil?
puts "No data for report \"#{filename}\"."
elsif Report.write(filename, csv)
puts "Report \"#{filename}\" has been written."
else
puts "Report \"#{filename}\" could not be written."
end
end
desc 'Generate CSV file with PMC PDF usage stats over time'
task :pmc_pdf_stats => :environment do
# check that source is installed
source = Source.visible.where(name: "pmc").first
next if source.nil?
filename = "pmc_html.csv"
date = Time.zone.now - 1.year
report = PmcByMonthReport.new(source, format: "pdf", month: date.month.to_s, year: date.year.to_s)
csv = report.to_csv
if csv.nil?
puts "No data for report \"#{filename}\"."
elsif Report.write(filename, csv)
puts "Report \"#{filename}\" has been written."
else
puts "Report \"#{filename}\" could not be written."
end
end
desc 'Generate CSV file with PMC combined usage stats over time'
task :pmc_combined_stats => :environment do
# check that source is installed
source = Source.visible.where(name: "pmc").first
next if source.nil?
filename = "pmc_combined.csv"
date = Time.zone.now - 1.year
report = PmcByMonthReport.new(source, format: "combined", month: date.month.to_s, year: date.year.to_s)
csv = report.to_csv
if csv.nil?
puts "No data for report \"#{filename}\"."
elsif Report.write(filename, csv)
puts "Report \"#{filename}\" has been written."
else
puts "Report \"#{filename}\" could not be written."
end
end
desc 'Generate CSV file with PMC cumulative usage stats'
task :pmc_stats => :environment do
# check that source is installed
source = Source.visible.where(name: "pmc").first
next if source.nil?
filename = "pmc_stats.csv"
report = PmcReport.new(source)
csv = report.to_csv
if csv.nil?
puts "No data for report \"#{filename}\"."
elsif Report.write(filename, csv)
puts "Report \"#{filename}\" has been written."
else
puts "Report \"#{filename}\" could not be written."
end
end
desc 'Generate CSV file with Counter HTML usage stats over time'
task :counter_html_stats => :environment do
# check that source is installed
source = Source.visible.where(name: "counter").first
next if source.nil?
filename = "counter_html.csv"
date = Time.zone.now - 1.year
report = CounterByMonthReport.new(source, format: 'html', month: date.month.to_s, year: date.year.to_s)
csv = report.to_csv
if csv.nil?
puts "No data for report \"#{filename}\"."
elsif Report.write(filename, csv)
puts "Report \"#{filename}\" has been written."
else
puts "Report \"#{filename}\" could not be written."
end
end
desc 'Generate CSV file with Counter PDF usage stats over time'
task :counter_pdf_stats => :environment do
# check that source is installed
source = Source.visible.where(name: "counter").first
next if source.nil?
filename = "counter_pdf.csv"
date = Time.zone.now - 1.year
report = CounterByMonthReport.new(source, format: 'pdf', month: date.month.to_s, year: date.year.to_s)
csv = report.to_csv
if csv.nil?
puts "No data for report \"#{filename}\"."
elsif Report.write(filename, csv)
puts "Report \"#{filename}\" has been written."
else
puts "Report \"#{filename}\" could not be written."
end
end
desc 'Generate CSV file with Counter XML usage stats over time'
task :counter_xml_stats => :environment do
# check that source is installed
source = Source.visible.where(name: "counter").first
next if source.nil?
filename = "counter_xml.csv"
date = Time.zone.now - 1.year
report = CounterByMonthReport.new(source, format: 'xml', month: date.month.to_s, year: date.year.to_s)
csv = report.to_csv
if csv.nil?
puts "No data for report \"#{filename}\"."
elsif Report.write(filename, csv)
puts "Report \"#{filename}\" has been written."
else
puts "Report \"#{filename}\" could not be written."
end
end
desc 'Generate CSV file with Counter combined usage stats over time'
task :counter_combined_stats => :environment do
# check that source is installed
source = Source.visible.where(name: "counter").first
next if source.nil?
filename = "counter_combined.csv"
date = Time.zone.now - 1.year
report = CounterByMonthReport.new(source, format: 'combined', month: date.month.to_s, year: date.year.to_s)
csv = report.to_csv
if csv.nil?
puts "No data for report \"#{filename}\"."
elsif Report.write(filename, csv)
puts "Report \"#{filename}\" has been written."
else
puts "Report \"#{filename}\" could not be written."
end
end
desc 'Generate CSV file with cumulative Counter usage stats'
task :counter_stats => :environment do
# check that source is installed
source = Source.visible.where(name: "counter").first
next if source.nil?
filename = "counter_stats.csv"
report = CounterReport.new(source)
csv = report.to_csv
if csv.nil?
puts "No data for report \"#{filename}\"."
elsif Report.write(filename, csv)
puts "Report \"#{filename}\" has been written."
else
puts "Report \"#{filename}\" could not be written."
end
end
desc 'Generate CSV file with combined ALM stats'
task :combined_stats => :environment do
filename = "alm_report.csv"
csv = AlmCombinedStatsReport.new(
alm_report: AlmStatsReport.new(Source.installed.without_private),
pmc_report: PmcReport.new(Source.visible.where(name: "pmc").first),
counter_report: CounterReport.new(Source.visible.where(name:"counter").first),
mendeley_report: MendeleyReport.new(Source.visible.where(name:"mendeley").first)
).to_csv
if csv.nil?
puts "No data for report \"#{filename}\"."
elsif Report.write(filename, csv)
puts "Report \"#{filename}\" has been written."
else
puts "Report \"#{filename}\" could not be written."
end
end
desc 'Generate CSV file with combined ALM private and public stats'
task :combined_private_stats => :environment do
filename = "alm_private_report.csv"
csv = AlmCombinedStatsReport.new(
alm_report: AlmStatsReport.new(Source.installed),
pmc_report: PmcReport.new(Source.visible.where(name: "pmc").first),
counter_report: CounterReport.new(Source.visible.where(name:"counter").first),
mendeley_report: MendeleyReport.new(Source.visible.where(name:"mendeley").first)
).to_csv
if csv.nil?
puts "No data for report \"#{filename}\"."
elsif Report.write(filename, csv)
puts "Report \"#{filename}\" has been written."
else
puts "Report \"#{filename}\" could not be written."
end
end
desc 'Zip reports'
task :zip => :environment do
folderpath = "#{Rails.root}/data/report_#{Time.zone.now.to_date}"
if !Dir.exist? folderpath
puts "No reports to compress."
elsif Report.zip_file && Report.zip_folder
puts "Reports have been compressed."
else
puts "Reports could not be compressed."
end
end
desc 'Generate all article stats reports'
task :all_stats => [:environment, :alm_stats, :mendeley_stats, :pmc_html_stats, :pmc_pdf_stats, :pmc_combined_stats, :pmc_stats, :counter_html_stats, :counter_pdf_stats, :counter_xml_stats, :counter_combined_stats, :counter_stats, :combined_stats, :alm_private_stats, :combined_private_stats, :zip]
end
|
Add rake task to register Contacts with router
Hard coding HMRC for now. Relates to
https://www.pivotaltracker.com/s/projects/1010882/stories/67458078
namespace :router do
task :router_environment => :environment do
require 'plek'
require 'gds_api/router'
@router_api = GdsApi::Router.new(Plek.current.find('router-api'))
end
task :register_backend => :router_environment do
@router_api.add_backend('contacts', Plek.current.find('contacts', :force_http => true) + "/")
end
task :register_routes => :router_environment do
@router_api.add_route('/contact/hm-revenue-customs', 'prefix', 'contacts')
end
desc "Register Contacts application and routes with the router"
task :register => [ :register_backend, :register_routes ]
end
|
namespace :router do
task :router_environment => :environment do
require 'plek'
require 'gds_api/router'
@router_api = GdsApi::Router.new(Plek.current.find('router-api'))
end
task :register => [:register_backend, :register_browse]
task :register_backend => :router_environment do
@router_api.add_backend('collections', Plek.current.find('collections', :force_http => true) + "/")
end
task :register_browse => [:unregister_browse_redirects, :register_browse_redirects, :router_environment] do
routes = [
%w(/browse prefix),
%w(/browse.json exact),
]
routes.each do |path, type|
@router_api.add_route(path, type, 'collections')
end
@router_api.commit_routes
end
task :register_browse_redirects => :router_environment do
routes = [
%w(/visas-immigration /browse/visas-immigration),
%w(/business /browse/business),
]
routes.each do |path, destination|
@router_api.add_redirect_route(path, 'prefix', destination)
end
@router_api.commit_routes
end
task :unregister_browse_redirects => :router_environment do
routes = [
%w(/browse/business exact),
%w(/browse/visas-immigration exact),
]
routes.each do |path, type|
begin
@router_api.delete_route(path, type)
rescue GdsApi::HTTPNotFound
# the router api returns a 404 if the route doesn't already exist
end
end
@router_api.commit_routes
end
end
Register redirects as exact routes
The existing redirects are 'exact' routes, so they override the prefix
routes here.
There isn't a good reason for these to be prefixes, so change them to
'exact'.
namespace :router do
task :router_environment => :environment do
require 'plek'
require 'gds_api/router'
@router_api = GdsApi::Router.new(Plek.current.find('router-api'))
end
task :register => [:register_backend, :register_browse]
task :register_backend => :router_environment do
@router_api.add_backend('collections', Plek.current.find('collections', :force_http => true) + "/")
end
task :register_browse => [:unregister_browse_redirects, :register_browse_redirects, :router_environment] do
routes = [
%w(/browse prefix),
%w(/browse.json exact),
]
routes.each do |path, type|
@router_api.add_route(path, type, 'collections')
end
@router_api.commit_routes
end
task :register_browse_redirects => :router_environment do
routes = [
%w(/visas-immigration /browse/visas-immigration),
%w(/business /browse/business),
]
routes.each do |path, destination|
@router_api.add_redirect_route(path, 'exact', destination)
end
@router_api.commit_routes
end
task :unregister_browse_redirects => :router_environment do
routes = [
%w(/browse/business exact),
%w(/browse/visas-immigration exact),
]
routes.each do |path, type|
begin
@router_api.delete_route(path, type)
rescue GdsApi::HTTPNotFound
# the router api returns a 404 if the route doesn't already exist
end
end
@router_api.commit_routes
end
end
|
namespace :sample do
desc 'Reset Auto Increment Ids'
task reset: :environment do
Alert.connection.execute('ALTER SEQUENCE alerts_id_seq RESTART 1')
Order.connection.execute('ALTER SEQUENCE orders_id_seq RESTART 1')
OrderItem.connection.execute('ALTER SEQUENCE order_items_id_seq RESTART 1')
Project.connection.execute('ALTER SEQUENCE projects_id_seq RESTART 1')
Product.connection.execute('ALTER SEQUENCE products_id_seq RESTART 1')
Staff.connection.execute('ALTER SEQUENCE staff_id_seq RESTART 1')
end
desc 'Generates demo data'
task demo: :environment do
Staff.create!([
{ id: 4, first_name: "Unused", last_name: "Staff", email: "unused@projectjellyfish.org", phone: nil, password: "jellyfish", reset_password_token: nil, reset_password_sent_at: nil, remember_created_at: nil, sign_in_count: 0, current_sign_in_at: nil, last_sign_in_at: nil, current_sign_in_ip: nil, last_sign_in_ip: nil, role: 0, deleted_at: nil, secret: 'jellyfish-token'},
{ id: 2, first_name: "ManageIQ", last_name: "Staff", email: "miq@projectjellyfish.org", phone: nil, password: "jellyfish", reset_password_token: nil, reset_password_sent_at: nil, remember_created_at: nil, sign_in_count: 17, current_sign_in_at: "2015-02-06 17:04:10", last_sign_in_at: "2015-02-06 16:57:41", current_sign_in_ip: "54.172.90.47", last_sign_in_ip: "54.172.90.47", role: 1, deleted_at: nil, secret: 'jellyfish-token'},
{ id: 3, first_name: "User", last_name: "Staff", email: "user@projectjellyfish.org", phone: nil, password: "jellyfish", reset_password_token: nil, reset_password_sent_at: nil, remember_created_at: nil, sign_in_count: 4, current_sign_in_at: "2015-02-13 18:00:54", last_sign_in_at: "2015-02-12 19:37:15", current_sign_in_ip: "128.229.4.2", last_sign_in_ip: "128.229.4.2", role: 0, deleted_at: nil, secret: 'jellyfish-token'},
{ id: 5, first_name: "All", last_name: "Users", email: "projectjellyfish@bah.com", phone: nil, password: "jellyfish", reset_password_token: nil, reset_password_sent_at: nil, remember_created_at: nil, sign_in_count: 0, current_sign_in_at: nil, last_sign_in_at: nil, current_sign_in_ip: nil, last_sign_in_ip: nil, role: 0, deleted_at: nil, secret: 'jellyfish-token'},
{ id: 1, first_name: "Admin", last_name: "Staff", email: "admin@projectjellyfish.org", phone: nil, password: "jellyfish", reset_password_token: nil, reset_password_sent_at: nil, remember_created_at: nil, sign_in_count: 36, current_sign_in_at: "2015-02-18 00:39:32", last_sign_in_at: "2015-02-17 20:28:51", current_sign_in_ip: "127.0.0.1", last_sign_in_ip: "108.45.125.67", role: 1, deleted_at: nil, secret: 'jellyfish-token'}
])
Cloud.create!([
{ id: 1, name: "AWS", description: nil, extra: "{}", deleted_at: nil},
{ id: 2, name: "Azure", description: nil, extra: "{}", deleted_at: nil},
{ id: 3, name: "Rackspace", description: nil, extra: "{}", deleted_at: nil},
{ id: 4, name: "VMware", description: nil, extra: nil, deleted_at: nil},
{ id: 5, name: "Google", description: nil, extra: nil, deleted_at: nil},
{ id: 6, name: "Other", description: nil, extra: nil, deleted_at: nil},
{ id: 7, name: "OpenStack", description: nil, extra: nil, deleted_at: nil}
])
Product.create!([
{ id: 1, name: "Small", description: "Small EC2 Instance", service_type_id: 8, service_catalog_id: 1, cloud_id: 1, chef_role: "--CHEF-ROLE--", active: true, img: "products/aws_ec2.png", options: {}, deleted_at: nil, product_type_id: 1, setup_price: "1.99", hourly_price: "0.001", monthly_price: "0.05"},
{ id: 2, name: "Medium", description: "Medium EC2 Instance", service_type_id: 8, service_catalog_id: 1, cloud_id: 1, chef_role: "--CHEF-ROLE--", active: true, img: "products/aws_ec2.png", options: {}, deleted_at: nil, product_type_id: 1, setup_price: "2.99", hourly_price: "0.0025", monthly_price: "0.075"},
{ id: 3, name: "Large", description: "Large EC2 Instance", service_type_id: 8, service_catalog_id: 1, cloud_id: 1, chef_role: "--CHEF-ROLE--", active: true, img: "products/aws_ec2.png", options: {}, deleted_at: nil, product_type_id: 1, setup_price: "3.99", hourly_price: "0.0055", monthly_price: "0.12"},
{ id: 5, name: "Medium MySQL", description: "Medium MySQL", service_type_id: 3, service_catalog_id: 1, cloud_id: 1, chef_role: "--CHEF-ROLE--", active: true, img: "products/aws_rds.png", options: {}, deleted_at: nil, product_type_id: 3, setup_price: "1.99", hourly_price: "0.004", monthly_price: "0.1"},
{ id: 6, name: "Medium PostgreSQL", description: "Medium PostgreSQL", service_type_id: 3, service_catalog_id: 1, cloud_id: 1, chef_role: "--CHEF-ROLE--", active: true, img: "products/aws_rds.png", options: {}, deleted_at: nil, product_type_id: 3, setup_price: "2.99", hourly_price: "0.004", monthly_price: "0.25"},
{ id: 7, name: "Large PostgreSQL", description: "Large PostgreSQL", service_type_id: 3, service_catalog_id: 1, cloud_id: 1, chef_role: "--CHEF-ROLE--", active: true, img: "products/aws_rds.png", options: {}, deleted_at: nil, product_type_id: 3, setup_price: "3.99", hourly_price: "0.009", monthly_price: "0.5"},
{ id: 8, name: "Medium Aurora", description: "Medium Aurora", service_type_id: 3, service_catalog_id: 1, cloud_id: 1, chef_role: "--CHEF-ROLE--", active: true, img: "products/aws_rds.png", options: {}, deleted_at: nil, product_type_id: 3, setup_price: "4.99", hourly_price: "0.015", monthly_price: "0.95"},
{ id: 9, name: "Large SQL Server", description: "Large SQL Server", service_type_id: 3, service_catalog_id: 1, cloud_id: 1, chef_role: "--CHEF-ROLE--", active: true, img: "products/aws_rds.png", options: {}, deleted_at: nil, product_type_id: 3, setup_price: "5.99", hourly_price: "0.025", monthly_price: "1.29"},
{ id: 11, name: "West Coast Storage", description: "Normal, Northern California", service_type_id: 5, service_catalog_id: 1, cloud_id: 1, chef_role: "--CHEF-ROLE--", active: true, img: "products/aws_s3.png", options: {}, deleted_at: nil, product_type_id: 4, setup_price: "0.99", hourly_price: "0.001", monthly_price: "0.05"},
{ id: 4, name: "Small MySQL", description: "Small MySQL", service_type_id: 3, service_catalog_id: 1, cloud_id: 1, chef_role: "--CHEF-ROLE--", active: true, img: "products/aws_rds.png", options: {}, deleted_at: nil, product_type_id: 3, setup_price: "1.0", hourly_price: "1.0", monthly_price: "1.0"},
{ id: 16, name: "LAMP Stack", description: "Linux, Apache, MySQL, PHP", service_type_id: 0, service_catalog_id: 0, cloud_id: 1, chef_role: "0", active: true, img: "products/php.png", options: nil, deleted_at: nil, product_type_id: 5, setup_price: "10.0", hourly_price: "10.0", monthly_price: "10.0"},
{ id: 17, name: "LAMP Stack", description: "Linux, Apache, MySQL, PHP", service_type_id: 0, service_catalog_id: 0, cloud_id: 4, chef_role: "0", active: true, img: "products/php.png", options: nil, deleted_at: nil, product_type_id: 5, setup_price: "20.0", hourly_price: "20.0", monthly_price: "20.0"},
{ id: 18, name: "Rails Stack", description: "Ruby on Rails Stack", service_type_id: 0, service_catalog_id: 0, cloud_id: 1, chef_role: "0", active: true, img: "products/rails.png", options: nil, deleted_at: nil, product_type_id: 5, setup_price: "10.0", hourly_price: "10.0", monthly_price: "10.0"},
{ id: 19, name: "MEAN Stack", description: "MongoDB, ExpressJS, AngularJS, NodeJS.", service_type_id: 0, service_catalog_id: 0, cloud_id: 4, chef_role: "0", active: true, img: "products/mean.png", options: nil, deleted_at: nil, product_type_id: 5, setup_price: "10.0", hourly_price: "10.0", monthly_price: "10.0"},
{ id: 20, name: "Sr. Java Developer", description: "", service_type_id: 0, service_catalog_id: 0, cloud_id: 6, chef_role: "0", active: true, img: "products/woman.png", options: nil, deleted_at: nil, product_type_id: 7, setup_price: "10.0", hourly_price: "10.0", monthly_price: "10.0"},
{ id: 21, name: "Sr. System Administrator", description: "Sr. System Administrator", service_type_id: 0, service_catalog_id: 0, cloud_id: 6, chef_role: "0", active: true, img: "products/woman.png", options: nil, deleted_at: nil, product_type_id: 7, setup_price: "10.0", hourly_price: "10.0", monthly_price: "10.0"},
{ id: 22, name: "Project Manager", description: "Project Manager", service_type_id: 0, service_catalog_id: 0, cloud_id: 6, chef_role: "0", active: true, img: "products/man.png", options: nil, deleted_at: nil, product_type_id: 7, setup_price: "10.0", hourly_price: "10.0", monthly_price: "10.0"},
{ id: 23, name: "JIRA Project", description: "A project in corporate JIRA instance.", service_type_id: 0, service_catalog_id: 0, cloud_id: 4, chef_role: "0", active: true, img: "products/jira.png", options: nil, deleted_at: nil, product_type_id: 6, setup_price: "10.0", hourly_price: "10.0", monthly_price: "10.0"},
{ id: 24, name: "Confluence Project", description: "Confluence Project", service_type_id: 0, service_catalog_id: 0, cloud_id: 6, chef_role: "0", active: true, img: "products/confluence.png", options: nil, deleted_at: nil, product_type_id: 6, setup_price: "10.0", hourly_price: "10.0", monthly_price: "10.0"},
{ id: 25, name: "Bugzilla Instance", description: "Bugzilla Instance", service_type_id: 0, service_catalog_id: 0, cloud_id: 6, chef_role: "0", active: true, img: "products/bugzilla.png", options: nil, deleted_at: nil, product_type_id: 6, setup_price: "10.0", hourly_price: "10.0", monthly_price: "10.0"},
{ id: 26, name: "1GB NetApps Storage", description: "NetApps Storage", service_type_id: 0, service_catalog_id: 0, cloud_id: 6, chef_role: "0", active: true, img: "products/netapp.png", options: nil, deleted_at: nil, product_type_id: 4, setup_price: "10.0", hourly_price: "10.0", monthly_price: "10.0"},
{ id: 10, name: "S3 Storage", description: "", service_type_id: 5, service_catalog_id: 1, cloud_id: 1, chef_role: "--CHEF-ROLE--", active: true, img: "products/aws_s3.png", options: {}, deleted_at: nil, product_type_id: 4, setup_price: "1.0", hourly_price: "1.0", monthly_price: "1.0"},
{ id: 28, name: "Teradata", description: "Teradata", service_type_id: 0, service_catalog_id: 0, cloud_id: 6, chef_role: "0", active: true, img: "products/teradata.png", options: nil, deleted_at: nil, product_type_id: 2, setup_price: "10.0", hourly_price: "10.0", monthly_price: "10.0"},
{ id: 31, name: "RHEL 6 Large ", description: "Large RHEL 6 Instance", service_type_id: 0, service_catalog_id: 0, cloud_id: 4, chef_role: "--CHEF-ROLE--", active: true, img: "products/redhat.png", options: nil, deleted_at: nil, product_type_id: 1, setup_price: "10.0", hourly_price: "10.0", monthly_price: "10.0"},
{ id: 32, name: "RHEL 6 Medium", description: "RHEL 6 Medium", service_type_id: 0, service_catalog_id: 0, cloud_id: 4, chef_role: "--CHEF-ROLE--", active: true, img: "products/redhat.png", options: nil, deleted_at: nil, product_type_id: 1, setup_price: "10.0", hourly_price: "10.0", monthly_price: "10.0"},
{ id: 30, name: "RHEL 6 Small ", description: "Small RHEL 6 Instance", service_type_id: 0, service_catalog_id: 0, cloud_id: 4, chef_role: "--CHEF-ROLE--", active: true, img: "products/redhat.png", options: nil, deleted_at: nil, product_type_id: 1, setup_price: "10.0", hourly_price: "10.0", monthly_price: "10.0"},
{ id: 33, name: "Apache Web Server ", description: "Apache Web Server", service_type_id: 0, service_catalog_id: 0, cloud_id: 7, chef_role: "--CHEF-ROLE--", active: true, img: "products/apache.png", options: nil, deleted_at: nil, product_type_id: 1, setup_price: "10.0", hourly_price: "10.0", monthly_price: "10.0"},
{ id: 34, name: "MS Exchange Server", description: "MS Exchange Server", service_type_id: 0, service_catalog_id: 0, cloud_id: 2, chef_role: "0", active: true, img: "products/exchange.png", options: nil, deleted_at: nil, product_type_id: 5, setup_price: "10.0", hourly_price: "10.0", monthly_price: "10.0"},
{ id: 27, name: "100 Node Hadoop Cluster", description: nil, service_type_id: 0, service_catalog_id: 0, cloud_id: 6, chef_role: "0", active: true, img: "products/hadoop.png", options: nil, deleted_at: nil, product_type_id: 2, setup_price: "10.0", hourly_price: "10.0", monthly_price: "10.0"},
{ id: 29, name: "10 Node Hadoop Cluster", description: nil, service_type_id: 0, service_catalog_id: 0, cloud_id: 1, chef_role: "0", active: true, img: "products/hadoop.png", options: nil, deleted_at: nil, product_type_id: 2, setup_price: "10.0", hourly_price: "10.0", monthly_price: "10.0"}
])
ProductAnswer.create!([
{ id: 1, product_id: 1, product_type_question_id: 1, answer: "t2.micro"},
{ id: 2, product_id: 1, product_type_question_id: 2, answer: "20"},
{ id: 3, product_id: 2, product_type_question_id: 1, answer: "m3.medium"},
{ id: 4, product_id: 2, product_type_question_id: 2, answer: "40"},
{ id: 5, product_id: 3, product_type_question_id: 1, answer: "m3.large"},
{ id: 6, product_id: 3, product_type_question_id: 2, answer: "80"},
{ id: 7, product_id: 4, product_type_question_id: 6, answer: "db.m3.medium"},
{ id: 8, product_id: 4, product_type_question_id: 7, answer: "mysql"},
{ id: 9, product_id: 4, product_type_question_id: 8, answer: "20"},
{ id: 11, product_id: 5, product_type_question_id: 6, answer: "db.m3.medium"},
{ id: 12, product_id: 5, product_type_question_id: 7, answer: "mysql"},
{ id: 13, product_id: 5, product_type_question_id: 8, answer: "40"},
{ id: 14, product_id: 5, product_type_question_id: 9, answer: "magnetic"},
{ id: 15, product_id: 6, product_type_question_id: 6, answer: "db.m3.medium"},
{ id: 16, product_id: 6, product_type_question_id: 7, answer: "posgresql"},
{ id: 17, product_id: 6, product_type_question_id: 8, answer: "40"},
{ id: 18, product_id: 6, product_type_question_id: 9, answer: "ssd"},
{ id: 19, product_id: 7, product_type_question_id: 6, answer: "db.m3.large"},
{ id: 20, product_id: 7, product_type_question_id: 7, answer: "postgresql"},
{ id: 21, product_id: 7, product_type_question_id: 8, answer: "120"},
{ id: 22, product_id: 7, product_type_question_id: 9, answer: "ssd"},
{ id: 23, product_id: 8, product_type_question_id: 6, answer: "db.m3.medium"},
{ id: 24, product_id: 8, product_type_question_id: 7, answer: "aurora"},
{ id: 25, product_id: 8, product_type_question_id: 8, answer: "40"},
{ id: 26, product_id: 8, product_type_question_id: 9, answer: "magnetic"},
{ id: 27, product_id: 9, product_type_question_id: 6, answer: "db.m3.xlarge"},
{ id: 28, product_id: 9, product_type_question_id: 7, answer: "sqlserver"},
{ id: 29, product_id: 9, product_type_question_id: 8, answer: "120"},
{ id: 30, product_id: 9, product_type_question_id: 9, answer: "ssd"},
{ id: 31, product_id: 10, product_type_question_id: 10, answer: "normal"},
{ id: 33, product_id: 11, product_type_question_id: 10, answer: "normal"},
{ id: 34, product_id: 11, product_type_question_id: 11, answer: "us-west-1"},
{ id: 35, product_id: 12, product_type_question_id: 10, answer: "normal"},
{ id: 36, product_id: 12, product_type_question_id: 11, answer: "us-west-2"},
{ id: 37, product_id: 13, product_type_question_id: 10, answer: "reduced"},
{ id: 38, product_id: 13, product_type_question_id: 11, answer: ""},
{ id: 39, product_id: 14, product_type_question_id: 10, answer: "reduced"},
{ id: 40, product_id: 14, product_type_question_id: 11, answer: "us-west-1"},
{ id: 41, product_id: 15, product_type_question_id: 10, answer: "reduced"},
{ id: 42, product_id: 15, product_type_question_id: 11, answer: "us-west-2"},
{ id: 10, product_id: 4, product_type_question_id: 9, answer: "standard"},
{ id: 43, product_id: 26, product_type_question_id: 10, answer: "normal"},
{ id: 44, product_id: 26, product_type_question_id: 11, answer: "us-west-1"},
{ id: 32, product_id: 10, product_type_question_id: 11, answer: "us-west-2"},
{ id: 45, product_id: 27, product_type_question_id: 3, answer: "4"},
{ id: 46, product_id: 27, product_type_question_id: 4, answer: "40"},
{ id: 47, product_id: 27, product_type_question_id: 5, answer: "2"},
{ id: 48, product_id: 28, product_type_question_id: 3, answer: "4"},
{ id: 49, product_id: 28, product_type_question_id: 4, answer: "40"},
{ id: 50, product_id: 28, product_type_question_id: 5, answer: "2"},
{ id: 51, product_id: 29, product_type_question_id: 3, answer: "4"},
{ id: 52, product_id: 29, product_type_question_id: 4, answer: "40"},
{ id: 53, product_id: 29, product_type_question_id: 5, answer: "1"}
])
ProductType.create!([
{ id: 1, name: "Infrastructure", description: "Available Infrastructure"},
{ id: 5, name: "Platforms", description: "Available Platforms\n"},
{ id: 3, name: "Databases", description: "Available Database"},
{ id: 2, name: "Big Data", description: "Available Big Data Solutions"},
{ id: 6, name: "Applications", description: "Available Applications"},
{ id: 4, name: "Storage", description: "Available Storage"},
{ id: 7, name: "Staff", description: "Available Staff"}
])
ProductTypeQuestion.create!([
{ id: 1, product_type_id: 1, label: "Instance Size", field_type: "select", placeholder: "", help: "", options: [["t2.micro", "t2.micro"], ["m3.medium", "m3.medium"], ["m3.large", "m3.large"]], default: "m3.medium", required: true, load_order: 0, manageiq_key: "instance_size"},
{ id: 2, product_type_id: 1, label: "Disk Size", field_type: "text", placeholder: "Size in GBs", help: "", options: nil, default: "40", required: true, load_order: 1, manageiq_key: "disk_size"},
{ id: 3, product_type_id: 2, label: "RAM", field_type: "text", placeholder: "Size in GBs", help: "", options: nil, default: "4", required: true, load_order: 0, manageiq_key: "ram_size"},
{ id: 4, product_type_id: 2, label: "Disk Size", field_type: "text", placeholder: "Size in GBs", help: "", options: nil, default: "40", required: true, load_order: 1, manageiq_key: "disk_size"},
{ id: 5, product_type_id: 2, label: "CPU", field_type: "select", placeholder: "", help: "", options: [["1", "1 CPU"], ["2", "2 CPUs"], ["4", "4 CPUs"], ["6", "6 CPUs"], ["8", "8 CPUs"]], default: "1", required: true, load_order: 2, manageiq_key: "cpu_count"},
{ id: 6, product_type_id: 3, label: "Instance Size", field_type: "select", placeholder: "", help: "", options: [["db.m3.medium", "db.m3.medium"], ["db.m3.large", "db.m3.large"], ["db.m3.xlarge", "db.m3.xlarge"]], default: "db.m3.medium", required: true, load_order: 0, manageiq_key: "instance_size"},
{ id: 7, product_type_id: 3, label: "DB Engine", field_type: "select", placeholder: "", help: "", options: [["aurora", "Aurora"], ["mysql", "MySQL"], ["postgresql", "PostgreSQL"], ["sqlserver", "SQL Server"]], default: "", required: true, load_order: 1, manageiq_key: "db_engine"},
{ id: 8, product_type_id: 3, label: "Disk Size", field_type: "text", placeholder: "Size in GBs", help: "", options: nil, default: "40", required: true, load_order: 2, manageiq_key: "disk_size"},
{ id: 9, product_type_id: 3, label: "Storage Type", field_type: "select", placeholder: "", help: "", options: [["standard", "standard"], ["gp2", "gp2"], ["io1", "io1"]], default: "ssd", required: true, load_order: 3, manageiq_key: "storage_type"},
{ id: 10, product_type_id: 4, label: "Storage Redundancy", field_type: "select", placeholder: "", help: "", options: [["normal", "Normal"], ["reduced", "Reduced"]], default: "normal", required: true, load_order: 0, manageiq_key: "availability"},
{ id: 11, product_type_id: 4, label: "Region", field_type: "select", placeholder: "", help: "", options: [["", "US Standard"], ["us-west-1", "US-West (Northern California)"], ["us-west-2", "US-West (Oregon)"], ["EU", "EU (Ireland)"], ["ap-northeast-1", "Asia Pacific (Tokyo)"], ["ap-southeast-1", "Asia Pacific (Singapore)"], ["ap-southeast-2", "Asia Pacific (Sydney)"]], default: "", required: true, load_order: 1, manageiq_key: "region"}
])
Project.create!([
{ id: 3, name: "Blog", description: "Project description", cc: "--CC--", budget: 2000.0, staff_id: "--STAFF_ID--", start_date: "2015-02-06", end_date: "2015-11-06", img: "images/128x128-wordpress.png", deleted_at: nil, spent: "1800.0", status: 0, approval: 1},
{ id: 4, name: "Cloud File Share", description: "Project description", cc: "--CC--", budget: 123654.0, staff_id: "--STAFF_ID--", start_date: "2015-02-06", end_date: "2015-11-06", img: "images//cloud-checkmark-128.png", deleted_at: nil, spent: "0.0", status: 0, approval: 1},
{ id: 1, name: "Project 1", description: "Project description", cc: "--CC--", budget: 123654.0, staff_id: "--STAFF_ID--", start_date: "2015-02-06", end_date: "2015-11-06", img: "images/documentation.png", deleted_at: nil, spent: "0.0", status: 0, approval: 0},
{ id: 2, name: "Mobile App API", description: "Project description", cc: "--CC--", budget: 3000.0, staff_id: "--STAFF_ID--", start_date: "2015-02-06", end_date: "2015-11-06", img: "images/icon-mobile-orange.png", deleted_at: nil, spent: "2000.0", status: 0, approval: 1},
{ id: 5, name: "Cloud Exchange", description: nil, cc: nil, budget: 1000000000.0, staff_id: nil, start_date: "2015-02-12", end_date: "2016-02-11", img: nil, deleted_at: nil, spent: "0.0", status: 0, approval: 0},
{ id: 6, name: "Project Jellyfish Demo", description: nil, cc: nil, budget: 10000.0, staff_id: nil, start_date: "2015-02-13", end_date: "2015-03-13", img: nil, deleted_at: nil, spent: "0.0", status: 0, approval: 0}
])
ProjectQuestion.create!([
{ id: 1, question: "Project Description", help_text: "", required: true, deleted_at: nil, load_order: nil, options: [], field_type: 2},
{ id: 2, question: "Project Charge Code", help_text: "", required: true, deleted_at: nil, load_order: nil, options: [], field_type: 2},
{ id: 3, question: "Maintenance Day", help_text: "", required: true, deleted_at: nil, load_order: nil, options: [], field_type: 3},
{ id: 4, question: "Performed Maintenance", help_text: "", required: true, deleted_at: nil, load_order: nil, options: [], field_type: 0},
{ id: 5, question: "Default provisioning location", help_text: "", required: true, deleted_at: nil, load_order: nil, options: ["East Coast Data Center", "West Coast Data Center", "Classified Data Center"], field_type: 1},
{ id: 6, question: "Will this run in production?", help_text: "", required: true, deleted_at: nil, load_order: nil, options: ["Yes", "No"], field_type: 1},
{ id: 7, question: "FISMA Classification", help_text: "", required: true, deleted_at: nil, load_order: nil, options: ["Low", "Medium", "High"], field_type: 1},
{ id: 8, question: "Period of Performance", help_text: "in months", required: nil, deleted_at: nil, load_order: 1, options: nil, field_type: 2}
])
Approval.create!([
{ id: 1, staff_id: 3, project_id: 1, approved: false, reason: nil},
{ id: 2, staff_id: 3, project_id: 2, approved: true, reason: nil}
])
Order.create!([
{ id: 1, staff_id: 1, engine_response: nil, active: nil, options: {}, deleted_at: nil, total: 0.0},
{ id: 2, staff_id: 1, engine_response: nil, active: nil, options: {}, deleted_at: nil, total: 0.0},
{ id: 3, staff_id: 1, engine_response: nil, active: nil, options: {}, deleted_at: nil, total: 0.0},
{ id: 4, staff_id: 1, engine_response: nil, active: nil, options: nil, deleted_at: nil, total: 0.0},
{ id: 5, staff_id: 1, engine_response: nil, active: nil, options: nil, deleted_at: nil, total: 0.0},
{ id: 6, staff_id: 1, engine_response: nil, active: nil, options: nil, deleted_at: nil, total: 0.0},
{ id: 7, staff_id: 1, engine_response: nil, active: nil, options: nil, deleted_at: nil, total: 0.0},
{ id: 8, staff_id: 1, engine_response: nil, active: nil, options: nil, deleted_at: nil, total: 0.0},
{ id: 9, staff_id: 1, engine_response: nil, active: nil, options: nil, deleted_at: nil, total: 0.0}
])
OrderItem.create!([
{ id: 9, order_id: 3, cloud_id: 1, :product => Product.where(id: 2).first, service_id: nil, provision_status: 0, deleted_at: nil, :project => Project.where(id: 4).first, host: nil, port: nil, miq_id: nil, ip_address: nil, hostname: nil, uuid: "54048bdc-3cab-4e71-85ca-3b50a3879a31", setup_price: "2.99", hourly_price: "0.0025", monthly_price: "0.075", payload_to_miq: nil, payload_reply_from_miq: nil, payload_response_from_miq: nil, latest_alert_id: nil},
{ id: 8, order_id: 2, cloud_id: 1, :product => Product.where(id: 5).first, service_id: nil, provision_status: 2, deleted_at: nil, :project => Project.where(id: 3).first, host: nil, port: nil, miq_id: nil, ip_address: nil, hostname: nil, uuid: "4f249639-17ca-493d-8548-9b0728bfc99b", setup_price: "1.99", hourly_price: "0.004", monthly_price: "0.1", payload_to_miq: nil, payload_reply_from_miq: nil, payload_response_from_miq: nil, latest_alert_id: nil},
{ id: 4, order_id: 1, cloud_id: 1, :product => Product.where(id: 10).first, service_id: nil, provision_status: 0, deleted_at: nil, :project => Project.where(id: 2).first, host: nil, port: nil, miq_id: nil, ip_address: nil, hostname: nil, uuid: "0c01b271-fcc6-4fdd-9dab-21f3f2f44e59", setup_price: "0.99", hourly_price: "0.01", monthly_price: "0.05", payload_to_miq: nil, payload_reply_from_miq: nil, payload_response_from_miq: nil, latest_alert_id: nil},
{ id: 10, order_id: 3, cloud_id: 1, :product => Product.where(id: 2).first, service_id: nil, provision_status: 0, deleted_at: nil, :project => Project.where(id: 4).first, host: nil, port: nil, miq_id: nil, ip_address: nil, hostname: nil, uuid: "e8e488c2-ca19-4d6f-aaf1-42d28050904d", setup_price: "2.99", hourly_price: "0.0025", monthly_price: "0.075", payload_to_miq: nil, payload_reply_from_miq: nil, payload_response_from_miq: nil, latest_alert_id: nil},
{ id: 12, order_id: 3, cloud_id: 1, :product => Product.where(id: 10).first, service_id: nil, provision_status: 0, deleted_at: nil, :project => Project.where(id: 4).first, host: nil, port: nil, miq_id: nil, ip_address: nil, hostname: nil, uuid: "ee0164e6-89b7-451f-8351-8fd3d52d4eee", setup_price: "0.99", hourly_price: "0.001", monthly_price: "0.05", payload_to_miq: nil, payload_reply_from_miq: nil, payload_response_from_miq: nil, latest_alert_id: nil},
{ id: 1, order_id: 1, cloud_id: 1, :product => Product.where(id: 1).first, service_id: nil, provision_status: 0, deleted_at: nil, :project => Project.where(id: 2).first, host: nil, port: nil, miq_id: nil, ip_address: nil, hostname: nil, uuid: "422d7851-23ad-4525-b4e9-fad1ad0ce797", setup_price: "1.99", hourly_price: "0.05", monthly_price: "0.05", payload_to_miq: nil, payload_reply_from_miq: nil, payload_response_from_miq: nil, latest_alert_id: nil},
{ id: 13, order_id: 3, cloud_id: 1, :product => Product.where(id: 11).first, service_id: nil, provision_status: 0, deleted_at: nil, :project => Project.where(id: 4).first, host: nil, port: nil, miq_id: nil, ip_address: nil, hostname: nil, uuid: "4d5fc121-9ff6-4464-9529-d279a6b9ac41", setup_price: "0.99", hourly_price: "0.001", monthly_price: "0.05", payload_to_miq: nil, payload_reply_from_miq: nil, payload_response_from_miq: nil, latest_alert_id: nil},
{ id: 5, order_id: 2, cloud_id: 1, :product => Product.where(id: 2).first, service_id: nil, provision_status: 0, deleted_at: nil, :project => Project.where(id: 3).first, host: nil, port: nil, miq_id: nil, ip_address: nil, hostname: nil, uuid: "020d8618-e2b2-4a3f-9390-a086d4fdc84a", setup_price: "2.99", hourly_price: "0.0025", monthly_price: "0.075", payload_to_miq: nil, payload_reply_from_miq: nil, payload_response_from_miq: nil, latest_alert_id: nil},
{ id: 6, order_id: 2, cloud_id: 1, :product => Product.where(id: 2).first, service_id: nil, provision_status: 0, deleted_at: nil, :project => Project.where(id: 3).first, host: nil, port: nil, miq_id: nil, ip_address: nil, hostname: nil, uuid: "152a5fb2-708c-412c-9187-3030d07089fd", setup_price: "2.99", hourly_price: "0.0025", monthly_price: "0.075", payload_to_miq: nil, payload_reply_from_miq: nil, payload_response_from_miq: nil, latest_alert_id: nil},
{ id: 11, order_id: 3, cloud_id: 1, :product => Product.where(id: 7).first, service_id: nil, provision_status: 2, deleted_at: nil, :project => Project.where(id: 4).first, host: nil, port: nil, miq_id: nil, ip_address: nil, hostname: nil, uuid: "8402db1c-b0ca-43b0-9e65-d442be7683ed", setup_price: "3.99", hourly_price: "0.009", monthly_price: "0.5", payload_to_miq: nil, payload_reply_from_miq: nil, payload_response_from_miq: nil, latest_alert_id: nil},
{ id: 15, order_id: 4, cloud_id: 1, :product => Product.where(id: 4).first, service_id: nil, provision_status: 0, deleted_at: nil, :project => Project.where(id: 3).first, host: nil, port: 3306, miq_id: nil, ip_address: nil, hostname: nil, uuid: "a9e59602-36bf-430f-be92-14f329a99c4a", setup_price: "1.0", hourly_price: "1.0", monthly_price: "1.0", payload_to_miq: nil, payload_reply_from_miq: nil, payload_response_from_miq: nil, latest_alert_id: nil},
{ id: 2, order_id: 1, cloud_id: 1, :product => Product.where(id: 1).first, service_id: nil, provision_status: 0, deleted_at: nil, :project => Project.where(id: 2).first, host: nil, port: nil, miq_id: nil, ip_address: nil, hostname: nil, uuid: "7ee39a34-8fb2-4cf4-979a-9ae4d480b6e6", setup_price: "1.99", hourly_price: "0.05", monthly_price: "0.05", payload_to_miq: nil, payload_reply_from_miq: nil, payload_response_from_miq: nil, latest_alert_id: nil},
{ id: 3, order_id: 1, cloud_id: 1, :product => Product.where(id: 6).first, service_id: nil, provision_status: 1, deleted_at: nil, :project => Project.where(id: 2).first, host: nil, port: nil, miq_id: nil, ip_address: nil, hostname: nil, uuid: "69ea7d91-e7bb-4854-9ff2-bcd167fe6a71", setup_price: "2.99", hourly_price: "0.09", monthly_price: "0.25", payload_to_miq: nil, payload_reply_from_miq: nil, payload_response_from_miq: nil, latest_alert_id: nil},
{ id: 17, order_id: 5, cloud_id: 1, :product => Product.where(id: 5).first, service_id: nil, provision_status: nil, deleted_at: nil, :project => Project.where(id: 2).first, host: nil, port: nil, miq_id: nil, ip_address: nil, hostname: nil, uuid: "4b0185f0-c309-4a1a-b3be-c9c0438b945d", setup_price: "1.99", hourly_price: "0.004", monthly_price: "0.1", payload_to_miq: nil, payload_reply_from_miq: nil, payload_response_from_miq: nil, latest_alert_id: nil},
{ id: 18, order_id: 6, cloud_id: 1, :product => Product.where(id: 1).first, service_id: nil, provision_status: nil, deleted_at: nil, :project => Project.where(id: 4).first, host: nil, port: nil, miq_id: nil, ip_address: nil, hostname: nil, uuid: "5a459228-b301-42e1-a121-e927cfbfca54", setup_price: "1.99", hourly_price: "0.001", monthly_price: "0.05", payload_to_miq: nil, payload_reply_from_miq: nil, payload_response_from_miq: nil, latest_alert_id: nil},
{ id: 19, order_id: 7, cloud_id: 1, :product => Product.where(id: 16).first, service_id: nil, provision_status: nil, deleted_at: nil, :project => Project.where(id: 3).first, host: nil, port: nil, miq_id: nil, ip_address: nil, hostname: nil, uuid: "ce160133-9e2c-4766-923c-d237659de8e6", setup_price: "10.0", hourly_price: "10.0", monthly_price: "10.0", payload_to_miq: nil, payload_reply_from_miq: nil, payload_response_from_miq: nil, latest_alert_id: nil},
{ id: 20, order_id: 8, cloud_id: 1, :product => Product.where(id: 18).first, service_id: nil, provision_status: nil, deleted_at: nil, :project => Project.where(id: 3).first, host: nil, port: nil, miq_id: nil, ip_address: nil, hostname: nil, uuid: "44642c1d-2fb9-41d8-9acf-d57e87da61fd", setup_price: "10.0", hourly_price: "10.0", monthly_price: "10.0", payload_to_miq: nil, payload_reply_from_miq: nil, payload_response_from_miq: nil, latest_alert_id: nil},
{ id: 21, order_id: 9, cloud_id: 2, :product => Product.where(id: 34).first, service_id: nil, provision_status: nil, deleted_at: nil, :project => Project.where(id: 3).first, host: nil, port: nil, miq_id: nil, ip_address: nil, hostname: nil, uuid: "add8e14e-6ac2-4476-a9f5-84c6b351a716", setup_price: "10.0", hourly_price: "10.0", monthly_price: "10.0", payload_to_miq: nil, payload_reply_from_miq: nil, payload_response_from_miq: nil, latest_alert_id: nil},
])
Alert.create!([
{ id: 1, project_id: 3, staff_id: 0, status: "CRITICAL", message: "$200 of $2,000 budget remaining. Please increase funding or instance will be retired.", start_date: nil, end_date: nil, order_item_id: 6},
{ id: 2, project_id: 2, staff_id: 0, status: "WARNING", message: "Medium PostgreSQL is approaching capacity. Please increase DB size or add addtional resources to avoid service interruptions.", start_date: nil, end_date: nil, order_item_id: 3}
])
StaffProject.create!([
{ id: 1, staff_id: 3, project_id: 1},
{ id: 2, staff_id: 3, project_id: 2},
{ id: 4, staff_id: 3, project_id: 5},
{ id: 5, staff_id: 5, project_id: 3},
{ id: 6, staff_id: 2, project_id: 3}
])
end
end
Autoincrement IDs need to be manually set to the max id + 1 of the last record inserted, otherwise DB reset will leave it at 1 which is what caused the demo insertion errors.
namespace :sample do
desc 'Reset Auto Increment Ids'
task reset: :environment do
Alert.connection.execute('ALTER SEQUENCE alerts_id_seq RESTART 1')
Order.connection.execute('ALTER SEQUENCE orders_id_seq RESTART 1')
OrderItem.connection.execute('ALTER SEQUENCE order_items_id_seq RESTART 1')
Project.connection.execute('ALTER SEQUENCE projects_id_seq RESTART 1')
Product.connection.execute('ALTER SEQUENCE products_id_seq RESTART 1')
Staff.connection.execute('ALTER SEQUENCE staff_id_seq RESTART 1')
end
desc 'Generates demo data'
task demo: :environment do
Staff.create!([
{ id: 4, first_name: "Unused", last_name: "Staff", email: "unused@projectjellyfish.org", phone: nil, password: "jellyfish", reset_password_token: nil, reset_password_sent_at: nil, remember_created_at: nil, sign_in_count: 0, current_sign_in_at: nil, last_sign_in_at: nil, current_sign_in_ip: nil, last_sign_in_ip: nil, role: 0, deleted_at: nil, secret: 'jellyfish-token'},
{ id: 2, first_name: "ManageIQ", last_name: "Staff", email: "miq@projectjellyfish.org", phone: nil, password: "jellyfish", reset_password_token: nil, reset_password_sent_at: nil, remember_created_at: nil, sign_in_count: 17, current_sign_in_at: "2015-02-06 17:04:10", last_sign_in_at: "2015-02-06 16:57:41", current_sign_in_ip: "54.172.90.47", last_sign_in_ip: "54.172.90.47", role: 1, deleted_at: nil, secret: 'jellyfish-token'},
{ id: 3, first_name: "User", last_name: "Staff", email: "user@projectjellyfish.org", phone: nil, password: "jellyfish", reset_password_token: nil, reset_password_sent_at: nil, remember_created_at: nil, sign_in_count: 4, current_sign_in_at: "2015-02-13 18:00:54", last_sign_in_at: "2015-02-12 19:37:15", current_sign_in_ip: "128.229.4.2", last_sign_in_ip: "128.229.4.2", role: 0, deleted_at: nil, secret: 'jellyfish-token'},
{ id: 5, first_name: "All", last_name: "Users", email: "projectjellyfish@bah.com", phone: nil, password: "jellyfish", reset_password_token: nil, reset_password_sent_at: nil, remember_created_at: nil, sign_in_count: 0, current_sign_in_at: nil, last_sign_in_at: nil, current_sign_in_ip: nil, last_sign_in_ip: nil, role: 0, deleted_at: nil, secret: 'jellyfish-token'},
{ id: 1, first_name: "Admin", last_name: "Staff", email: "admin@projectjellyfish.org", phone: nil, password: "jellyfish", reset_password_token: nil, reset_password_sent_at: nil, remember_created_at: nil, sign_in_count: 36, current_sign_in_at: "2015-02-18 00:39:32", last_sign_in_at: "2015-02-17 20:28:51", current_sign_in_ip: "127.0.0.1", last_sign_in_ip: "108.45.125.67", role: 1, deleted_at: nil, secret: 'jellyfish-token'}
])
Staff.connection.execute("ALTER SEQUENCE staff_id_seq RESTART #{Staff.all.order('id DESC').first.id + 1}")
Cloud.create!([
{ id: 1, name: "AWS", description: nil, extra: "{}", deleted_at: nil},
{ id: 2, name: "Azure", description: nil, extra: "{}", deleted_at: nil},
{ id: 3, name: "Rackspace", description: nil, extra: "{}", deleted_at: nil},
{ id: 4, name: "VMware", description: nil, extra: nil, deleted_at: nil},
{ id: 5, name: "Google", description: nil, extra: nil, deleted_at: nil},
{ id: 6, name: "Other", description: nil, extra: nil, deleted_at: nil},
{ id: 7, name: "OpenStack", description: nil, extra: nil, deleted_at: nil}
])
Cloud.connection.execute("ALTER SEQUENCE clouds_id_seq RESTART #{Cloud.all.order('id DESC').first.id + 1}")
Product.create!([
{ id: 1, name: "Small", description: "Small EC2 Instance", service_type_id: 8, service_catalog_id: 1, cloud_id: 1, chef_role: "--CHEF-ROLE--", active: true, img: "products/aws_ec2.png", options: {}, deleted_at: nil, product_type_id: 1, setup_price: "1.99", hourly_price: "0.001", monthly_price: "0.05"},
{ id: 2, name: "Medium", description: "Medium EC2 Instance", service_type_id: 8, service_catalog_id: 1, cloud_id: 1, chef_role: "--CHEF-ROLE--", active: true, img: "products/aws_ec2.png", options: {}, deleted_at: nil, product_type_id: 1, setup_price: "2.99", hourly_price: "0.0025", monthly_price: "0.075"},
{ id: 3, name: "Large", description: "Large EC2 Instance", service_type_id: 8, service_catalog_id: 1, cloud_id: 1, chef_role: "--CHEF-ROLE--", active: true, img: "products/aws_ec2.png", options: {}, deleted_at: nil, product_type_id: 1, setup_price: "3.99", hourly_price: "0.0055", monthly_price: "0.12"},
{ id: 5, name: "Medium MySQL", description: "Medium MySQL", service_type_id: 3, service_catalog_id: 1, cloud_id: 1, chef_role: "--CHEF-ROLE--", active: true, img: "products/aws_rds.png", options: {}, deleted_at: nil, product_type_id: 3, setup_price: "1.99", hourly_price: "0.004", monthly_price: "0.1"},
{ id: 6, name: "Medium PostgreSQL", description: "Medium PostgreSQL", service_type_id: 3, service_catalog_id: 1, cloud_id: 1, chef_role: "--CHEF-ROLE--", active: true, img: "products/aws_rds.png", options: {}, deleted_at: nil, product_type_id: 3, setup_price: "2.99", hourly_price: "0.004", monthly_price: "0.25"},
{ id: 7, name: "Large PostgreSQL", description: "Large PostgreSQL", service_type_id: 3, service_catalog_id: 1, cloud_id: 1, chef_role: "--CHEF-ROLE--", active: true, img: "products/aws_rds.png", options: {}, deleted_at: nil, product_type_id: 3, setup_price: "3.99", hourly_price: "0.009", monthly_price: "0.5"},
{ id: 8, name: "Medium Aurora", description: "Medium Aurora", service_type_id: 3, service_catalog_id: 1, cloud_id: 1, chef_role: "--CHEF-ROLE--", active: true, img: "products/aws_rds.png", options: {}, deleted_at: nil, product_type_id: 3, setup_price: "4.99", hourly_price: "0.015", monthly_price: "0.95"},
{ id: 9, name: "Large SQL Server", description: "Large SQL Server", service_type_id: 3, service_catalog_id: 1, cloud_id: 1, chef_role: "--CHEF-ROLE--", active: true, img: "products/aws_rds.png", options: {}, deleted_at: nil, product_type_id: 3, setup_price: "5.99", hourly_price: "0.025", monthly_price: "1.29"},
{ id: 11, name: "West Coast Storage", description: "Normal, Northern California", service_type_id: 5, service_catalog_id: 1, cloud_id: 1, chef_role: "--CHEF-ROLE--", active: true, img: "products/aws_s3.png", options: {}, deleted_at: nil, product_type_id: 4, setup_price: "0.99", hourly_price: "0.001", monthly_price: "0.05"},
{ id: 4, name: "Small MySQL", description: "Small MySQL", service_type_id: 3, service_catalog_id: 1, cloud_id: 1, chef_role: "--CHEF-ROLE--", active: true, img: "products/aws_rds.png", options: {}, deleted_at: nil, product_type_id: 3, setup_price: "1.0", hourly_price: "1.0", monthly_price: "1.0"},
{ id: 16, name: "LAMP Stack", description: "Linux, Apache, MySQL, PHP", service_type_id: 0, service_catalog_id: 0, cloud_id: 1, chef_role: "0", active: true, img: "products/php.png", options: nil, deleted_at: nil, product_type_id: 5, setup_price: "10.0", hourly_price: "10.0", monthly_price: "10.0"},
{ id: 17, name: "LAMP Stack", description: "Linux, Apache, MySQL, PHP", service_type_id: 0, service_catalog_id: 0, cloud_id: 4, chef_role: "0", active: true, img: "products/php.png", options: nil, deleted_at: nil, product_type_id: 5, setup_price: "20.0", hourly_price: "20.0", monthly_price: "20.0"},
{ id: 18, name: "Rails Stack", description: "Ruby on Rails Stack", service_type_id: 0, service_catalog_id: 0, cloud_id: 1, chef_role: "0", active: true, img: "products/rails.png", options: nil, deleted_at: nil, product_type_id: 5, setup_price: "10.0", hourly_price: "10.0", monthly_price: "10.0"},
{ id: 19, name: "MEAN Stack", description: "MongoDB, ExpressJS, AngularJS, NodeJS.", service_type_id: 0, service_catalog_id: 0, cloud_id: 4, chef_role: "0", active: true, img: "products/mean.png", options: nil, deleted_at: nil, product_type_id: 5, setup_price: "10.0", hourly_price: "10.0", monthly_price: "10.0"},
{ id: 20, name: "Sr. Java Developer", description: "", service_type_id: 0, service_catalog_id: 0, cloud_id: 6, chef_role: "0", active: true, img: "products/woman.png", options: nil, deleted_at: nil, product_type_id: 7, setup_price: "10.0", hourly_price: "10.0", monthly_price: "10.0"},
{ id: 21, name: "Sr. System Administrator", description: "Sr. System Administrator", service_type_id: 0, service_catalog_id: 0, cloud_id: 6, chef_role: "0", active: true, img: "products/woman.png", options: nil, deleted_at: nil, product_type_id: 7, setup_price: "10.0", hourly_price: "10.0", monthly_price: "10.0"},
{ id: 22, name: "Project Manager", description: "Project Manager", service_type_id: 0, service_catalog_id: 0, cloud_id: 6, chef_role: "0", active: true, img: "products/man.png", options: nil, deleted_at: nil, product_type_id: 7, setup_price: "10.0", hourly_price: "10.0", monthly_price: "10.0"},
{ id: 23, name: "JIRA Project", description: "A project in corporate JIRA instance.", service_type_id: 0, service_catalog_id: 0, cloud_id: 4, chef_role: "0", active: true, img: "products/jira.png", options: nil, deleted_at: nil, product_type_id: 6, setup_price: "10.0", hourly_price: "10.0", monthly_price: "10.0"},
{ id: 24, name: "Confluence Project", description: "Confluence Project", service_type_id: 0, service_catalog_id: 0, cloud_id: 6, chef_role: "0", active: true, img: "products/confluence.png", options: nil, deleted_at: nil, product_type_id: 6, setup_price: "10.0", hourly_price: "10.0", monthly_price: "10.0"},
{ id: 25, name: "Bugzilla Instance", description: "Bugzilla Instance", service_type_id: 0, service_catalog_id: 0, cloud_id: 6, chef_role: "0", active: true, img: "products/bugzilla.png", options: nil, deleted_at: nil, product_type_id: 6, setup_price: "10.0", hourly_price: "10.0", monthly_price: "10.0"},
{ id: 26, name: "1GB NetApps Storage", description: "NetApps Storage", service_type_id: 0, service_catalog_id: 0, cloud_id: 6, chef_role: "0", active: true, img: "products/netapp.png", options: nil, deleted_at: nil, product_type_id: 4, setup_price: "10.0", hourly_price: "10.0", monthly_price: "10.0"},
{ id: 10, name: "S3 Storage", description: "", service_type_id: 5, service_catalog_id: 1, cloud_id: 1, chef_role: "--CHEF-ROLE--", active: true, img: "products/aws_s3.png", options: {}, deleted_at: nil, product_type_id: 4, setup_price: "1.0", hourly_price: "1.0", monthly_price: "1.0"},
{ id: 28, name: "Teradata", description: "Teradata", service_type_id: 0, service_catalog_id: 0, cloud_id: 6, chef_role: "0", active: true, img: "products/teradata.png", options: nil, deleted_at: nil, product_type_id: 2, setup_price: "10.0", hourly_price: "10.0", monthly_price: "10.0"},
{ id: 31, name: "RHEL 6 Large ", description: "Large RHEL 6 Instance", service_type_id: 0, service_catalog_id: 0, cloud_id: 4, chef_role: "--CHEF-ROLE--", active: true, img: "products/redhat.png", options: nil, deleted_at: nil, product_type_id: 1, setup_price: "10.0", hourly_price: "10.0", monthly_price: "10.0"},
{ id: 32, name: "RHEL 6 Medium", description: "RHEL 6 Medium", service_type_id: 0, service_catalog_id: 0, cloud_id: 4, chef_role: "--CHEF-ROLE--", active: true, img: "products/redhat.png", options: nil, deleted_at: nil, product_type_id: 1, setup_price: "10.0", hourly_price: "10.0", monthly_price: "10.0"},
{ id: 30, name: "RHEL 6 Small ", description: "Small RHEL 6 Instance", service_type_id: 0, service_catalog_id: 0, cloud_id: 4, chef_role: "--CHEF-ROLE--", active: true, img: "products/redhat.png", options: nil, deleted_at: nil, product_type_id: 1, setup_price: "10.0", hourly_price: "10.0", monthly_price: "10.0"},
{ id: 33, name: "Apache Web Server ", description: "Apache Web Server", service_type_id: 0, service_catalog_id: 0, cloud_id: 7, chef_role: "--CHEF-ROLE--", active: true, img: "products/apache.png", options: nil, deleted_at: nil, product_type_id: 1, setup_price: "10.0", hourly_price: "10.0", monthly_price: "10.0"},
{ id: 34, name: "MS Exchange Server", description: "MS Exchange Server", service_type_id: 0, service_catalog_id: 0, cloud_id: 2, chef_role: "0", active: true, img: "products/exchange.png", options: nil, deleted_at: nil, product_type_id: 5, setup_price: "10.0", hourly_price: "10.0", monthly_price: "10.0"},
{ id: 27, name: "100 Node Hadoop Cluster", description: nil, service_type_id: 0, service_catalog_id: 0, cloud_id: 6, chef_role: "0", active: true, img: "products/hadoop.png", options: nil, deleted_at: nil, product_type_id: 2, setup_price: "10.0", hourly_price: "10.0", monthly_price: "10.0"},
{ id: 29, name: "10 Node Hadoop Cluster", description: nil, service_type_id: 0, service_catalog_id: 0, cloud_id: 1, chef_role: "0", active: true, img: "products/hadoop.png", options: nil, deleted_at: nil, product_type_id: 2, setup_price: "10.0", hourly_price: "10.0", monthly_price: "10.0"}
])
Product.connection.execute("ALTER SEQUENCE products_id_seq RESTART #{Product.all.order('id DESC').first.id + 1}")
ProductAnswer.create!([
{ id: 1, product_id: 1, product_type_question_id: 1, answer: "t2.micro"},
{ id: 2, product_id: 1, product_type_question_id: 2, answer: "20"},
{ id: 3, product_id: 2, product_type_question_id: 1, answer: "m3.medium"},
{ id: 4, product_id: 2, product_type_question_id: 2, answer: "40"},
{ id: 5, product_id: 3, product_type_question_id: 1, answer: "m3.large"},
{ id: 6, product_id: 3, product_type_question_id: 2, answer: "80"},
{ id: 7, product_id: 4, product_type_question_id: 6, answer: "db.m3.medium"},
{ id: 8, product_id: 4, product_type_question_id: 7, answer: "mysql"},
{ id: 9, product_id: 4, product_type_question_id: 8, answer: "20"},
{ id: 11, product_id: 5, product_type_question_id: 6, answer: "db.m3.medium"},
{ id: 12, product_id: 5, product_type_question_id: 7, answer: "mysql"},
{ id: 13, product_id: 5, product_type_question_id: 8, answer: "40"},
{ id: 14, product_id: 5, product_type_question_id: 9, answer: "magnetic"},
{ id: 15, product_id: 6, product_type_question_id: 6, answer: "db.m3.medium"},
{ id: 16, product_id: 6, product_type_question_id: 7, answer: "posgresql"},
{ id: 17, product_id: 6, product_type_question_id: 8, answer: "40"},
{ id: 18, product_id: 6, product_type_question_id: 9, answer: "ssd"},
{ id: 19, product_id: 7, product_type_question_id: 6, answer: "db.m3.large"},
{ id: 20, product_id: 7, product_type_question_id: 7, answer: "postgresql"},
{ id: 21, product_id: 7, product_type_question_id: 8, answer: "120"},
{ id: 22, product_id: 7, product_type_question_id: 9, answer: "ssd"},
{ id: 23, product_id: 8, product_type_question_id: 6, answer: "db.m3.medium"},
{ id: 24, product_id: 8, product_type_question_id: 7, answer: "aurora"},
{ id: 25, product_id: 8, product_type_question_id: 8, answer: "40"},
{ id: 26, product_id: 8, product_type_question_id: 9, answer: "magnetic"},
{ id: 27, product_id: 9, product_type_question_id: 6, answer: "db.m3.xlarge"},
{ id: 28, product_id: 9, product_type_question_id: 7, answer: "sqlserver"},
{ id: 29, product_id: 9, product_type_question_id: 8, answer: "120"},
{ id: 30, product_id: 9, product_type_question_id: 9, answer: "ssd"},
{ id: 31, product_id: 10, product_type_question_id: 10, answer: "normal"},
{ id: 33, product_id: 11, product_type_question_id: 10, answer: "normal"},
{ id: 34, product_id: 11, product_type_question_id: 11, answer: "us-west-1"},
{ id: 35, product_id: 12, product_type_question_id: 10, answer: "normal"},
{ id: 36, product_id: 12, product_type_question_id: 11, answer: "us-west-2"},
{ id: 37, product_id: 13, product_type_question_id: 10, answer: "reduced"},
{ id: 38, product_id: 13, product_type_question_id: 11, answer: ""},
{ id: 39, product_id: 14, product_type_question_id: 10, answer: "reduced"},
{ id: 40, product_id: 14, product_type_question_id: 11, answer: "us-west-1"},
{ id: 41, product_id: 15, product_type_question_id: 10, answer: "reduced"},
{ id: 42, product_id: 15, product_type_question_id: 11, answer: "us-west-2"},
{ id: 10, product_id: 4, product_type_question_id: 9, answer: "standard"},
{ id: 43, product_id: 26, product_type_question_id: 10, answer: "normal"},
{ id: 44, product_id: 26, product_type_question_id: 11, answer: "us-west-1"},
{ id: 32, product_id: 10, product_type_question_id: 11, answer: "us-west-2"},
{ id: 45, product_id: 27, product_type_question_id: 3, answer: "4"},
{ id: 46, product_id: 27, product_type_question_id: 4, answer: "40"},
{ id: 47, product_id: 27, product_type_question_id: 5, answer: "2"},
{ id: 48, product_id: 28, product_type_question_id: 3, answer: "4"},
{ id: 49, product_id: 28, product_type_question_id: 4, answer: "40"},
{ id: 50, product_id: 28, product_type_question_id: 5, answer: "2"},
{ id: 51, product_id: 29, product_type_question_id: 3, answer: "4"},
{ id: 52, product_id: 29, product_type_question_id: 4, answer: "40"},
{ id: 53, product_id: 29, product_type_question_id: 5, answer: "1"}
])
ProductAnswer.connection.execute("ALTER SEQUENCE product_answers_id_seq RESTART #{ProductAnswer.all.order('id DESC').first.id + 1}")
ProductType.create!([
{ id: 1, name: "Infrastructure", description: "Available Infrastructure"},
{ id: 5, name: "Platforms", description: "Available Platforms\n"},
{ id: 3, name: "Databases", description: "Available Database"},
{ id: 2, name: "Big Data", description: "Available Big Data Solutions"},
{ id: 6, name: "Applications", description: "Available Applications"},
{ id: 4, name: "Storage", description: "Available Storage"},
{ id: 7, name: "Staff", description: "Available Staff"}
])
ProductType.connection.execute("ALTER SEQUENCE product_types_id_seq RESTART #{ProductType.all.order('id DESC').first.id + 1}")
ProductTypeQuestion.create!([
{ id: 1, product_type_id: 1, label: "Instance Size", field_type: "select", placeholder: "", help: "", options: [["t2.micro", "t2.micro"], ["m3.medium", "m3.medium"], ["m3.large", "m3.large"]], default: "m3.medium", required: true, load_order: 0, manageiq_key: "instance_size"},
{ id: 2, product_type_id: 1, label: "Disk Size", field_type: "text", placeholder: "Size in GBs", help: "", options: nil, default: "40", required: true, load_order: 1, manageiq_key: "disk_size"},
{ id: 3, product_type_id: 2, label: "RAM", field_type: "text", placeholder: "Size in GBs", help: "", options: nil, default: "4", required: true, load_order: 0, manageiq_key: "ram_size"},
{ id: 4, product_type_id: 2, label: "Disk Size", field_type: "text", placeholder: "Size in GBs", help: "", options: nil, default: "40", required: true, load_order: 1, manageiq_key: "disk_size"},
{ id: 5, product_type_id: 2, label: "CPU", field_type: "select", placeholder: "", help: "", options: [["1", "1 CPU"], ["2", "2 CPUs"], ["4", "4 CPUs"], ["6", "6 CPUs"], ["8", "8 CPUs"]], default: "1", required: true, load_order: 2, manageiq_key: "cpu_count"},
{ id: 6, product_type_id: 3, label: "Instance Size", field_type: "select", placeholder: "", help: "", options: [["db.m3.medium", "db.m3.medium"], ["db.m3.large", "db.m3.large"], ["db.m3.xlarge", "db.m3.xlarge"]], default: "db.m3.medium", required: true, load_order: 0, manageiq_key: "instance_size"},
{ id: 7, product_type_id: 3, label: "DB Engine", field_type: "select", placeholder: "", help: "", options: [["aurora", "Aurora"], ["mysql", "MySQL"], ["postgresql", "PostgreSQL"], ["sqlserver", "SQL Server"]], default: "", required: true, load_order: 1, manageiq_key: "db_engine"},
{ id: 8, product_type_id: 3, label: "Disk Size", field_type: "text", placeholder: "Size in GBs", help: "", options: nil, default: "40", required: true, load_order: 2, manageiq_key: "disk_size"},
{ id: 9, product_type_id: 3, label: "Storage Type", field_type: "select", placeholder: "", help: "", options: [["standard", "standard"], ["gp2", "gp2"], ["io1", "io1"]], default: "ssd", required: true, load_order: 3, manageiq_key: "storage_type"},
{ id: 10, product_type_id: 4, label: "Storage Redundancy", field_type: "select", placeholder: "", help: "", options: [["normal", "Normal"], ["reduced", "Reduced"]], default: "normal", required: true, load_order: 0, manageiq_key: "availability"},
{ id: 11, product_type_id: 4, label: "Region", field_type: "select", placeholder: "", help: "", options: [["", "US Standard"], ["us-west-1", "US-West (Northern California)"], ["us-west-2", "US-West (Oregon)"], ["EU", "EU (Ireland)"], ["ap-northeast-1", "Asia Pacific (Tokyo)"], ["ap-southeast-1", "Asia Pacific (Singapore)"], ["ap-southeast-2", "Asia Pacific (Sydney)"]], default: "", required: true, load_order: 1, manageiq_key: "region"}
])
ProductTypeQuestion.connection.execute("ALTER SEQUENCE product_type_questions_id_seq RESTART #{ProductTypeQuestion.all.order('id DESC').first.id + 1}")
Project.create!([
{ id: 1, name: "Project 1", description: "Project description", cc: "--CC--", budget: 123654.0, staff_id: "--STAFF_ID--", start_date: "2015-02-06", end_date: "2015-11-06", img: "images/documentation.png", deleted_at: nil, spent: "0.0", status: 0, approval: 0},
{ id: 2, name: "Mobile App API", description: "Project description", cc: "--CC--", budget: 3000.0, staff_id: "--STAFF_ID--", start_date: "2015-02-06", end_date: "2015-11-06", img: "images/icon-mobile-orange.png", deleted_at: nil, spent: "2000.0", status: 0, approval: 1},
{ id: 3, name: "Blog", description: "Project description", cc: "--CC--", budget: 2000.0, staff_id: "--STAFF_ID--", start_date: "2015-02-06", end_date: "2015-11-06", img: "images/128x128-wordpress.png", deleted_at: nil, spent: "1800.0", status: 0, approval: 1},
{ id: 4, name: "Cloud File Share", description: "Project description", cc: "--CC--", budget: 123654.0, staff_id: "--STAFF_ID--", start_date: "2015-02-06", end_date: "2015-11-06", img: "images//cloud-checkmark-128.png", deleted_at: nil, spent: "0.0", status: 0, approval: 1},
{ id: 5, name: "Cloud Exchange", description: nil, cc: nil, budget: 1000000000.0, staff_id: nil, start_date: "2015-02-12", end_date: "2016-02-11", img: nil, deleted_at: nil, spent: "0.0", status: 0, approval: 0},
{ id: 6, name: "Project Jellyfish Demo", description: nil, cc: nil, budget: 10000.0, staff_id: nil, start_date: "2015-02-13", end_date: "2015-03-13", img: nil, deleted_at: nil, spent: "0.0", status: 0, approval: 0}
])
Project.connection.execute("ALTER SEQUENCE projects_id_seq RESTART #{Project.all.order('id DESC').first.id + 1}")
ProjectQuestion.create!([
{ id: 1, question: "Project Description", help_text: "", required: true, deleted_at: nil, load_order: nil, options: [], field_type: 2},
{ id: 2, question: "Project Charge Code", help_text: "", required: true, deleted_at: nil, load_order: nil, options: [], field_type: 2},
{ id: 3, question: "Maintenance Day", help_text: "", required: true, deleted_at: nil, load_order: nil, options: [], field_type: 3},
{ id: 4, question: "Performed Maintenance", help_text: "", required: true, deleted_at: nil, load_order: nil, options: [], field_type: 0},
{ id: 5, question: "Default provisioning location", help_text: "", required: true, deleted_at: nil, load_order: nil, options: ["East Coast Data Center", "West Coast Data Center", "Classified Data Center"], field_type: 1},
{ id: 6, question: "Will this run in production?", help_text: "", required: true, deleted_at: nil, load_order: nil, options: ["Yes", "No"], field_type: 1},
{ id: 7, question: "FISMA Classification", help_text: "", required: true, deleted_at: nil, load_order: nil, options: ["Low", "Medium", "High"], field_type: 1},
{ id: 8, question: "Period of Performance", help_text: "in months", required: nil, deleted_at: nil, load_order: 1, options: nil, field_type: 2}
])
ProjectQuestion.connection.execute("ALTER SEQUENCE project_questions_id_seq RESTART #{ProjectQuestion.all.order('id DESC').first.id + 1}")
Approval.create!([
{ id: 1, staff_id: 3, project_id: 1, approved: false, reason: nil},
{ id: 2, staff_id: 3, project_id: 2, approved: true, reason: nil}
])
Approval.connection.execute("ALTER SEQUENCE approvals_id_seq RESTART #{Approval.all.order('id DESC').first.id + 1}")
Order.create!([
{ id: 1, :staff => Staff.where(id: 1).first, engine_response: nil, active: nil, options: {}, deleted_at: nil, total: 0.0},
{ id: 2, :staff => Staff.where(id: 1).first, engine_response: nil, active: nil, options: {}, deleted_at: nil, total: 0.0},
{ id: 3, :staff => Staff.where(id: 1).first, engine_response: nil, active: nil, options: {}, deleted_at: nil, total: 0.0},
{ id: 4, :staff => Staff.where(id: 1).first, engine_response: nil, active: nil, options: nil, deleted_at: nil, total: 0.0},
{ id: 5, :staff => Staff.where(id: 1).first, engine_response: nil, active: nil, options: nil, deleted_at: nil, total: 0.0},
{ id: 6, :staff => Staff.where(id: 1).first, engine_response: nil, active: nil, options: nil, deleted_at: nil, total: 0.0},
{ id: 7, :staff => Staff.where(id: 1).first, engine_response: nil, active: nil, options: nil, deleted_at: nil, total: 0.0},
{ id: 8, :staff => Staff.where(id: 1).first, engine_response: nil, active: nil, options: nil, deleted_at: nil, total: 0.0},
{ id: 9, :staff => Staff.where(id: 1).first, engine_response: nil, active: nil, options: nil, deleted_at: nil, total: 0.0}
])
Order.connection.execute("ALTER SEQUENCE orders_id_seq RESTART #{Order.all.order('id DESC').first.id + 1}")
OrderItem.create!([
{ id: 9, order_id: 3, cloud_id: 1, :product => Product.where(id: 2).first, service_id: nil, provision_status: 0, deleted_at: nil, :project => Project.where(id: 4).first, host: nil, port: nil, miq_id: nil, ip_address: nil, hostname: nil, uuid: "54048bdc-3cab-4e71-85ca-3b50a3879a31", setup_price: "2.99", hourly_price: "0.0025", monthly_price: "0.075", payload_to_miq: nil, payload_reply_from_miq: nil, payload_response_from_miq: nil, latest_alert_id: nil},
{ id: 8, order_id: 2, cloud_id: 1, :product => Product.where(id: 5).first, service_id: nil, provision_status: 2, deleted_at: nil, :project => Project.where(id: 3).first, host: nil, port: nil, miq_id: nil, ip_address: nil, hostname: nil, uuid: "4f249639-17ca-493d-8548-9b0728bfc99b", setup_price: "1.99", hourly_price: "0.004", monthly_price: "0.1", payload_to_miq: nil, payload_reply_from_miq: nil, payload_response_from_miq: nil, latest_alert_id: nil},
{ id: 4, order_id: 1, cloud_id: 1, :product => Product.where(id: 10).first, service_id: nil, provision_status: 0, deleted_at: nil, :project => Project.where(id: 2).first, host: nil, port: nil, miq_id: nil, ip_address: nil, hostname: nil, uuid: "0c01b271-fcc6-4fdd-9dab-21f3f2f44e59", setup_price: "0.99", hourly_price: "0.01", monthly_price: "0.05", payload_to_miq: nil, payload_reply_from_miq: nil, payload_response_from_miq: nil, latest_alert_id: nil},
{ id: 10, order_id: 3, cloud_id: 1, :product => Product.where(id: 2).first, service_id: nil, provision_status: 0, deleted_at: nil, :project => Project.where(id: 4).first, host: nil, port: nil, miq_id: nil, ip_address: nil, hostname: nil, uuid: "e8e488c2-ca19-4d6f-aaf1-42d28050904d", setup_price: "2.99", hourly_price: "0.0025", monthly_price: "0.075", payload_to_miq: nil, payload_reply_from_miq: nil, payload_response_from_miq: nil, latest_alert_id: nil},
{ id: 12, order_id: 3, cloud_id: 1, :product => Product.where(id: 10).first, service_id: nil, provision_status: 0, deleted_at: nil, :project => Project.where(id: 4).first, host: nil, port: nil, miq_id: nil, ip_address: nil, hostname: nil, uuid: "ee0164e6-89b7-451f-8351-8fd3d52d4eee", setup_price: "0.99", hourly_price: "0.001", monthly_price: "0.05", payload_to_miq: nil, payload_reply_from_miq: nil, payload_response_from_miq: nil, latest_alert_id: nil},
{ id: 1, order_id: 1, cloud_id: 1, :product => Product.where(id: 1).first, service_id: nil, provision_status: 0, deleted_at: nil, :project => Project.where(id: 2).first, host: nil, port: nil, miq_id: nil, ip_address: nil, hostname: nil, uuid: "422d7851-23ad-4525-b4e9-fad1ad0ce797", setup_price: "1.99", hourly_price: "0.05", monthly_price: "0.05", payload_to_miq: nil, payload_reply_from_miq: nil, payload_response_from_miq: nil, latest_alert_id: nil},
{ id: 13, order_id: 3, cloud_id: 1, :product => Product.where(id: 11).first, service_id: nil, provision_status: 0, deleted_at: nil, :project => Project.where(id: 4).first, host: nil, port: nil, miq_id: nil, ip_address: nil, hostname: nil, uuid: "4d5fc121-9ff6-4464-9529-d279a6b9ac41", setup_price: "0.99", hourly_price: "0.001", monthly_price: "0.05", payload_to_miq: nil, payload_reply_from_miq: nil, payload_response_from_miq: nil, latest_alert_id: nil},
{ id: 5, order_id: 2, cloud_id: 1, :product => Product.where(id: 2).first, service_id: nil, provision_status: 0, deleted_at: nil, :project => Project.where(id: 3).first, host: nil, port: nil, miq_id: nil, ip_address: nil, hostname: nil, uuid: "020d8618-e2b2-4a3f-9390-a086d4fdc84a", setup_price: "2.99", hourly_price: "0.0025", monthly_price: "0.075", payload_to_miq: nil, payload_reply_from_miq: nil, payload_response_from_miq: nil, latest_alert_id: nil},
{ id: 6, order_id: 2, cloud_id: 1, :product => Product.where(id: 2).first, service_id: nil, provision_status: 0, deleted_at: nil, :project => Project.where(id: 3).first, host: nil, port: nil, miq_id: nil, ip_address: nil, hostname: nil, uuid: "152a5fb2-708c-412c-9187-3030d07089fd", setup_price: "2.99", hourly_price: "0.0025", monthly_price: "0.075", payload_to_miq: nil, payload_reply_from_miq: nil, payload_response_from_miq: nil, latest_alert_id: nil},
{ id: 11, order_id: 3, cloud_id: 1, :product => Product.where(id: 7).first, service_id: nil, provision_status: 2, deleted_at: nil, :project => Project.where(id: 4).first, host: nil, port: nil, miq_id: nil, ip_address: nil, hostname: nil, uuid: "8402db1c-b0ca-43b0-9e65-d442be7683ed", setup_price: "3.99", hourly_price: "0.009", monthly_price: "0.5", payload_to_miq: nil, payload_reply_from_miq: nil, payload_response_from_miq: nil, latest_alert_id: nil},
{ id: 15, order_id: 4, cloud_id: 1, :product => Product.where(id: 4).first, service_id: nil, provision_status: 0, deleted_at: nil, :project => Project.where(id: 3).first, host: nil, port: 3306, miq_id: nil, ip_address: nil, hostname: nil, uuid: "a9e59602-36bf-430f-be92-14f329a99c4a", setup_price: "1.0", hourly_price: "1.0", monthly_price: "1.0", payload_to_miq: nil, payload_reply_from_miq: nil, payload_response_from_miq: nil, latest_alert_id: nil},
{ id: 2, order_id: 1, cloud_id: 1, :product => Product.where(id: 1).first, service_id: nil, provision_status: 0, deleted_at: nil, :project => Project.where(id: 2).first, host: nil, port: nil, miq_id: nil, ip_address: nil, hostname: nil, uuid: "7ee39a34-8fb2-4cf4-979a-9ae4d480b6e6", setup_price: "1.99", hourly_price: "0.05", monthly_price: "0.05", payload_to_miq: nil, payload_reply_from_miq: nil, payload_response_from_miq: nil, latest_alert_id: nil},
{ id: 3, order_id: 1, cloud_id: 1, :product => Product.where(id: 6).first, service_id: nil, provision_status: 1, deleted_at: nil, :project => Project.where(id: 2).first, host: nil, port: nil, miq_id: nil, ip_address: nil, hostname: nil, uuid: "69ea7d91-e7bb-4854-9ff2-bcd167fe6a71", setup_price: "2.99", hourly_price: "0.09", monthly_price: "0.25", payload_to_miq: nil, payload_reply_from_miq: nil, payload_response_from_miq: nil, latest_alert_id: nil},
{ id: 17, order_id: 5, cloud_id: 1, :product => Product.where(id: 5).first, service_id: nil, provision_status: nil, deleted_at: nil, :project => Project.where(id: 2).first, host: nil, port: nil, miq_id: nil, ip_address: nil, hostname: nil, uuid: "4b0185f0-c309-4a1a-b3be-c9c0438b945d", setup_price: "1.99", hourly_price: "0.004", monthly_price: "0.1", payload_to_miq: nil, payload_reply_from_miq: nil, payload_response_from_miq: nil, latest_alert_id: nil},
{ id: 18, order_id: 6, cloud_id: 1, :product => Product.where(id: 1).first, service_id: nil, provision_status: nil, deleted_at: nil, :project => Project.where(id: 4).first, host: nil, port: nil, miq_id: nil, ip_address: nil, hostname: nil, uuid: "5a459228-b301-42e1-a121-e927cfbfca54", setup_price: "1.99", hourly_price: "0.001", monthly_price: "0.05", payload_to_miq: nil, payload_reply_from_miq: nil, payload_response_from_miq: nil, latest_alert_id: nil},
{ id: 19, order_id: 7, cloud_id: 1, :product => Product.where(id: 16).first, service_id: nil, provision_status: nil, deleted_at: nil, :project => Project.where(id: 3).first, host: nil, port: nil, miq_id: nil, ip_address: nil, hostname: nil, uuid: "ce160133-9e2c-4766-923c-d237659de8e6", setup_price: "10.0", hourly_price: "10.0", monthly_price: "10.0", payload_to_miq: nil, payload_reply_from_miq: nil, payload_response_from_miq: nil, latest_alert_id: nil},
{ id: 20, order_id: 8, cloud_id: 1, :product => Product.where(id: 18).first, service_id: nil, provision_status: nil, deleted_at: nil, :project => Project.where(id: 3).first, host: nil, port: nil, miq_id: nil, ip_address: nil, hostname: nil, uuid: "44642c1d-2fb9-41d8-9acf-d57e87da61fd", setup_price: "10.0", hourly_price: "10.0", monthly_price: "10.0", payload_to_miq: nil, payload_reply_from_miq: nil, payload_response_from_miq: nil, latest_alert_id: nil},
{ id: 21, order_id: 9, cloud_id: 2, :product => Product.where(id: 34).first, service_id: nil, provision_status: nil, deleted_at: nil, :project => Project.where(id: 3).first, host: nil, port: nil, miq_id: nil, ip_address: nil, hostname: nil, uuid: "add8e14e-6ac2-4476-a9f5-84c6b351a716", setup_price: "10.0", hourly_price: "10.0", monthly_price: "10.0", payload_to_miq: nil, payload_reply_from_miq: nil, payload_response_from_miq: nil, latest_alert_id: nil},
])
OrderItem.connection.execute("ALTER SEQUENCE order_items_id_seq RESTART #{OrderItem.all.order('id DESC').first.id + 1}")
Alert.create!([
{ id: 1, project_id: 3, staff_id: 0, status: "CRITICAL", message: "$200 of $2,000 budget remaining. Please increase funding or instance will be retired.", start_date: nil, end_date: nil, order_item_id: 6},
{ id: 2, project_id: 2, staff_id: 0, status: "WARNING", message: "Medium PostgreSQL is approaching capacity. Please increase DB size or add addtional resources to avoid service interruptions.", start_date: nil, end_date: nil, order_item_id: 3}
])
Alert.connection.execute("ALTER SEQUENCE alerts_id_seq RESTART #{Alert.all.order('id DESC').first.id + 1}")
StaffProject.create!([
{ id: 1, staff_id: 3, project_id: 1},
{ id: 2, staff_id: 3, project_id: 2},
{ id: 4, staff_id: 3, project_id: 5},
{ id: 5, staff_id: 5, project_id: 3},
{ id: 6, staff_id: 2, project_id: 3}
])
StaffProject.connection.execute("ALTER SEQUENCE staff_projects_id_seq RESTART #{StaffProject.all.order('id DESC').first.id + 1}")
end
end
|
# coding: utf-8
require 'csv'
require 'date'
require 'open-uri'
require 'csv'
require 'nokogiri'
MONTHS = %w(January February March April May June July August September October November December)
JURISDICTIONS = [
'Canada',
'Canadian',
'Federal',
'Alberta',
'British Columbia',
'Manitoba',
'New Brunswick',
'Newfoundland and Labrador',
'Northwest Territories',
'Nova Scotia',
'Nunavut',
'Ontario',
'Prince Edward Island',
'Saskatchewan',
'Quebec',
'Québec',
'Yukon',
].map do |jurisdiction|
Regexp.escape(jurisdiction)
end.join('|')
namespace :scrape do
desc "Scrape Government Site"
task :govt => [:environment,] do
source = 'http://www.psc-cfp.gc.ca/plac-acpl/leave-conge/ann2-eng.htm'
doc = Nokogiri::HTML(open(source))
doc.xpath('//tr').each do |tr|
next if tr.at_css('th')
tds = tr.css('td')
tds[1].css('br').each{|br| br.replace(' ')}
type, notes = tds[1].text.downcase.match(/\A([^(]+?)(?: \(([^)]+)\))?\z/)[1..2]
if %w(federal provincial territorial).include?(type)
type = 'general'
end
scope = nil
if ['cities, towns and villages', 'hamlets', 'municipalities', 'resort villages', 'rural municipalities'].include?(type)
scope = type
type = 'municipal'
end
Election.create_or_update({
start_date: Date.parse(tds[2].text),
jurisdiction: tds[0].text,
type: type,
scope: scope,
notes: notes,
source: source,
})
end
end
desc "Scrape Wikipedia page"
task :wiki => :environment do
def parse_wiki(href, year)
source = "http://en.wikipedia.org#{href}"
doc = Nokogiri::HTML(open(source))
doc.xpath('//div[@id="mw-content-text"]/ul/li').each do |li|
date, text = li.text.sub(/ elections?, #{year}/, '').split(/:| - /)
unless MONTHS.include?(date.split(' ')[0])
date = li.at_xpath('parent::*/preceding-sibling::h2[1]').text + date
date = date.gsub('[edit]','')
end
if text && !text[/leadership|co-spokesperson|referendum|plebiscite|school/i]
type = text.slice!(/by-election|general|municipal/)
jurisdiction = text.slice!(/#{JURISDICTIONS}/)
text.slice!(/\(([^)]+)\)/)
scope = $1
text.slice!(/in (\S+)/)
division = $1
if jurisdiction.nil? || jurisdiction.strip.empty?
text.slice!(/provincial/)
if li.at_css('a/@title[contains("does not exist")]')
puts li.text
else
doc = Nokogiri::HTML(open("http://en.wikipedia.org#{li.at_css('a')[:href]}"))
if doc.at_css('.infobox th')
jurisdiction = doc.at_css('.infobox th').text.slice!(/#{JURISDICTIONS}/) ||
doc.at_css('h1.firstHeading span').text.slice!(/#{JURISDICTIONS}/)
end
division = text.strip.slice!(/.+/)
end
end
if jurisdiction == 'Federal'
jurisdiction = 'Canada'
end
unless text.strip.empty?
# puts "Warning: Unrecognized text #{text.inspect}"
end
Election.create_or_update({
start_date: Date.parse("#{date} #{year}"),
jurisdiction: jurisdiction,
election_type: type,
scope: scope,
division: division,
source: source,
})
end
end
end
current_year = Date.today.year
doc = Nokogiri::HTML(open('http://en.wikipedia.org/wiki/Canadian_electoral_calendar'))
doc.xpath('//div[@id="mw-content-text"]/ul/li/a').each do |a|
if a.text.to_i >= 2007 #current_year
parse_wiki(a[:href], a.text)
end
end
end
desc "Scrape Municipal page"
task :muni => :environment do
source = 'http://www.icurr.org/research/municipal_facts/Elections/index.php'
doc = Nokogiri::HTML(open(source))
doc.xpath('//table/tbody//tr').each do |tr|
texts = tr.at_xpath('.//td[@class="rcell"]').to_s.split('<br>').map do |html|
Nokogiri::HTML(html).text.strip
end
texts.each_with_index do |text,index|
if MONTHS.include?(text.split(' ')[0])
jurisdiction = tr.at_xpath('.//td[@class="lcell"]').text
if jurisdiction == 'Québec'
jurisdiction = 'Quebec'
end
notes = nil
scope = nil
if index.nonzero?
texts[index - 1].slice!(/\(([^)]+)\):\z/)
notes = $1
scope = texts[index - 1].gsub("\n", '').sub(/\AFor /, '').sub(/:\z/, '').downcase.strip
end
Election.create_or_update({
start_date: Date.parse(text),
jurisdiction: jurisdiction,
type: 'municipal',
scope: scope,
notes: notes,
source: source,
})
end
end
end
end
end
looking through nested lists, need to find out why election_type = News so often
# coding: utf-8
require 'csv'
require 'date'
require 'open-uri'
require 'csv'
require 'nokogiri'
MONTHS = %w(January February March April May June July August September October November December)
JURISDICTIONS = [
'Canada',
'Canadian',
'Federal',
'Alberta',
'British Columbia',
'Manitoba',
'New Brunswick',
'Newfoundland and Labrador',
'Northwest Territories',
'Nova Scotia',
'Nunavut',
'Ontario',
'Prince Edward Island',
'Saskatchewan',
'Quebec',
'Québec',
'Yukon',
].map do |jurisdiction|
Regexp.escape(jurisdiction)
end.join('|')
namespace :scrape do
desc "Scrape Government Site"
task :govt => [:environment,] do
source = 'http://www.psc-cfp.gc.ca/plac-acpl/leave-conge/ann2-eng.htm'
doc = Nokogiri::HTML(open(source))
doc.xpath('//tr').each do |tr|
next if tr.at_css('th')
tds = tr.css('td')
tds[1].css('br').each{|br| br.replace(' ')}
type, notes = tds[1].text.downcase.match(/\A([^(]+?)(?: \(([^)]+)\))?\z/)[1..2]
if %w(federal provincial territorial).include?(type)
type = 'general'
end
scope = nil
if ['cities, towns and villages', 'hamlets', 'municipalities', 'resort villages', 'rural municipalities'].include?(type)
scope = type
type = 'municipal'
end
Election.create_or_update({
start_date: Date.parse(tds[2].text),
jurisdiction: tds[0].text,
type: type,
scope: scope,
notes: notes,
source: source,
})
end
end
desc "Scrape Wikipedia page"
task :wiki => :environment do
def parse_wiki(href, year)
source = "http://en.wikipedia.org#{href}"
doc = Nokogiri::HTML(open(source))
doc.xpath('//div[@id="mw-content-text"]/ul/li').each do |li|
date, text = li.text.sub(/ elections?, #{year}/, '').split(/:| - /)
unless MONTHS.include?(date.split(' ')[0])
date = li.at_xpath('parent::*/preceding-sibling::h2[1]').text + date
date = date.gsub('[edit]','')
end
if text
parse_line(source, li, year, date, text)
end
#if there is a nested list (one date and many elections)
if MONTHS.include?(date.split(' ')[0]) && !text
li.xpath('.//li').each do |nested_li|
date = date.split("\n")[0]
text = nested_li.text
parse_line(source, nested_li, year, date, text)
end
end
end
end
def parse_line(source, li, year, date, text)
if !text[/leadership|co-spokesperson|referendum|plebiscite|school/i]
type = text.slice!(/by-election|general|municipal/)
jurisdiction = text.slice!(/#{JURISDICTIONS}/)
text.slice!(/\(([^)]+)\)/)
scope = $1
text.slice!(/in (\S+)/)
division = $1
if jurisdiction.nil? || jurisdiction.strip.empty?
text.slice!(/provincial/)
if li.at_css('a/@title[contains("does not exist")]') || !li.at_css('a')
puts li.text
else
doc = Nokogiri::HTML(open("http://en.wikipedia.org#{li.at_css('a')[:href]}"))
if doc.at_css('.infobox th')
jurisdiction = doc.at_css('.infobox th').text.slice!(/#{JURISDICTIONS}/) ||
doc.at_css('h1.firstHeading span').text.slice!(/#{JURISDICTIONS}/)
end
division = text.strip.slice!(/.+/)
end
end
if jurisdiction == 'Federal'
jurisdiction = 'Canada'
end
unless text.strip.empty?
puts "Warning: Unrecognized text #{text.inspect}"
end
Election.create_or_update({
start_date: Date.parse("#{date} #{year}"),
jurisdiction: jurisdiction,
election_type: type,
scope: scope,
division: division,
source: source,
})
end
end
current_year = Date.today.year
doc = Nokogiri::HTML(open('http://en.wikipedia.org/wiki/Canadian_electoral_calendar'))
doc.xpath('//div[@id="mw-content-text"]/ul/li/a').each do |a|
if a.text.to_i >= current_year
parse_wiki(a[:href], a.text)
end
end
end
desc "Scrape Municipal page"
task :muni => :environment do
source = 'http://www.icurr.org/research/municipal_facts/Elections/index.php'
doc = Nokogiri::HTML(open(source))
doc.xpath('//table/tbody//tr').each do |tr|
texts = tr.at_xpath('.//td[@class="rcell"]').to_s.split('<br>').map do |html|
Nokogiri::HTML(html).text.strip
end
texts.each_with_index do |text,index|
if MONTHS.include?(text.split(' ')[0])
jurisdiction = tr.at_xpath('.//td[@class="lcell"]').text
if jurisdiction == 'Québec'
jurisdiction = 'Quebec'
end
notes = nil
scope = nil
if index.nonzero?
texts[index - 1].slice!(/\(([^)]+)\):\z/)
notes = $1
scope = texts[index - 1].gsub("\n", '').sub(/\AFor /, '').sub(/:\z/, '').downcase.strip
end
Election.create_or_update({
start_date: Date.parse(text),
jurisdiction: jurisdiction,
type: 'municipal',
scope: scope,
notes: notes,
source: source,
})
end
end
end
end
end
|
# coding: utf-8
namespace :import do
# Imports rinks straight-forwardly from spreadsheet.
desc 'Add rinks from Google Spreadsheets'
task :google => :environment do
require 'csv'
require 'open-uri'
CSV.parse(open('https://docs.google.com/spreadsheet/pub?hl=en_US&hl=en_US&key=0AtzgYYy0ZABtdEgwenRMR2MySmU5NFBDVk5wc1RQVEE&single=true&gid=0&output=csv').read, headers: true) do |row|
arrondissement = Arrondissement.find_or_initialize_by_nom_arr row['nom_arr']
arrondissement.source = 'docs.google.com'
arrondissement.save!
row.delete('nom_arr')
row.delete('extra')
row.delete('source_url')
patinoire = Patinoire.find_or_initialize_by_parc_and_genre_and_disambiguation_and_arrondissement_id row['parc'], row['genre'], row['disambiguation'], arrondissement.id
patinoire.attributes = row.to_hash
patinoire.source = 'docs.google.com'
patinoire.save!
end
end
desc 'Add contact info from Google Spreadsheets'
task :contacts => :environment do
require 'csv'
require 'open-uri'
CSV.parse(open('https://docs.google.com/spreadsheet/pub?hl=en_US&hl=en_US&key=0AtzgYYy0ZABtdFMwSF94MjRxcW1yZ1JYVkdqM1Fzanc&single=true&gid=0&output=csv').read, headers: true) do |row|
arrondissement = Arrondissement.find_or_initialize_by_nom_arr row['Authority']
arrondissement.attributes = {
name: [row['Name'], row['Title']].compact.join(', '),
email: row['Email'],
tel: row['Phone'] && row['Phone'].sub(/x\d+/, '').gsub(/\D/, ''),
ext: row['Phone'] && row['Phone'][/x(\d+)/, 1],
}
arrondissement.source ||= 'docs.google.com'
arrondissement.save!
end
end
desc 'Add rinks from Sherlock and add addresses to rinks from donnees.ville.montreal.qc.ca'
task :sherlock => :environment do
require 'iconv'
require 'open-uri'
TEL_REGEX = /\d{3}.?\d{3}.?\d{4}/
GENRE_REGEX = /C|PP|PPL|PSE|anneau de glace|étang avec musique|rond de glace|sentier glacé|patinoire réfrigérée du Canadien de Montréal/
# List of boroughs represented on donnees.ville.montreal.qc.ca
ARRONDISSEMENTS_FROM_XML = Arrondissement.where(source: 'donnees.ville.montreal.qc.ca').all.map(&:nom_arr)
def update_patinoires(arrondissement, attributes, text)
# Find the rink to update.
matches = Patinoire.where(attributes.slice(:parc, :genre, :disambiguation).merge(arrondissement_id: arrondissement.id)).all
# If no rink found, switch PP for PPL.
if matches.empty? && ARRONDISSEMENTS_FROM_XML.include?(arrondissement.nom_arr)
matches = Patinoire.where(attributes.slice(:parc, :disambiguation).merge(genre: attributes[:genre] == 'PP' ? 'PPL' : 'PP', arrondissement_id: arrondissement.id)).all
end
# If single match found, just update address.
if matches.size > 1
puts %("#{text}" matches many rinks)
elsif attributes[:parc] == 'Sir-Wilfrid-Laurier'
# @note Sherlock uses nord, sud, but XML uses no 1, no 2, no 3. Do nothing.
elsif matches.size == 1
matches.first.update_attributes attributes.slice(:adresse, :tel, :ext).select{|k,v| v.present?}
# Special case.
if text[/2 PSE/]
Patinoire.where(attributes.slice(:parc, :genre).merge(arrondissement_id: arrondissement.id, disambiguation: 'no 2')).first.update_attributes attributes.slice(:adresse, :tel, :ext).select{|k,v| v.present?}
end
elsif matches.empty?
# There's only one rink in Pratt park. vleduc@ville.montreal.qc.ca
unless attributes[:parc] == 'Pratt'
# donnees.ville.montreal.qc.ca should generally have all a borough's rinks.
if ARRONDISSEMENTS_FROM_XML.include?(arrondissement.nom_arr)
puts %("#{text}" matches no rink. Creating!)
end
arrondissement.patinoires.create! attributes.slice(:genre, :disambiguation, :parc, :adresse, :tel, :ext).merge(source: 'ville.montreal.qc.ca')
end
end
end
nom_arr = nil
tel = nil
ext = nil
flip = 1
# As the source data is poorly formatted, go line by line with regex.
open('http://www11.ville.montreal.qc.ca/sherlock2/servlet/template/sherlock%2CAfficherDocumentInternet.vm/nodocument/154').each do |line|
line = Iconv.conv('UTF-8', 'ISO-8859-1', line).gsub(/[[:space:]]/, ' ').decode_html_entities.chomp
text = ActionController::Base.helpers.strip_tags(line)
# If it's a borough header:
if match = line[%r{<strong>([^<]+)</strong>.+\d+ patinoires}, 1]
nom_arr = match.gsub("\u0096", '—') # fix dashes
tel = line[TEL_REGEX]
ext = line[/poste (\d+)/, 1]
else
attributes = {}
# If it's a rink:
if genre = line[/[^>]\b(#{GENRE_REGEX})\b/, 1]
attributes = {
genre: genre,
tel: text[TEL_REGEX] || tel,
ext: ext,
parc: text[/\A([^(,*]+)/, 1].andand.strip,
adresse: text[/,(?: \()?((?:[^()](?! 514))+)/, 1].andand.strip,
patinoire: text[/\bet \(?(#{GENRE_REGEX})\)/, 1].andand.strip,
disambiguation: text[/\((nord|sud|petite|grande)\)/, 1].andand.strip,
extra: text.scan(/\((1 M|LA|abri|cabane|chalet|chalet fermé|chalet pas toujours ouvert|pas de chalet|roulotte|toilettes)\)/).flatten.map(&:strip),
}
# If it's a rink, with no rink type specified:
elsif line[/\A(Parc <strong>|<strong>Bassin\b)/]
attributes = {
parc: text[/\A([^(,*]+)/, 1].andand.strip,
adresse: text[/,(?: \()?((?:[^()](?! 514))+)/, 1].andand.strip,
extra: [],
}
end
unless attributes.empty?
raw = Marshal.load(Marshal.dump(attributes)) # deep copy
# Append attributes.
if text['*']
attributes[:disambiguation] = 'réfrigérée'
end
if attributes[:genre] == 'étang avec musique'
attributes[:extra] << 'musique'
end
# From joseeboudreau@ville.montreal.qc.ca
if %w(Gohier Hartenstein).include? attributes[:parc]
attributes[:extra] << 'glissade'
end
if text[/réfrigérée/]
attributes[:description] = 'Patinoire réfrigérée'
attributes[:disambiguation] = 'réfrigérée'
end
# Clean attributes.
attributes[:parc].slice!(/\A(Parc|Patinoire) /)
if attributes[:parc][Patinoire::PREPOSITIONS]
attributes[:parc][Patinoire::PREPOSITIONS] = attributes[:parc][Patinoire::PREPOSITIONS].downcase
end
if attributes[:tel]
attributes[:tel].delete!('^0-9')
end
# Map attributes.
if attributes[:patinoire] == 'rond de glace'
attributes[:patinoire] = 'PPL'
end
attributes[:extra].map! do |v|
{'1 M' => 'musique', 'LA' => 'location et aiguisage'}[v] || v
end
attributes[:genre] = {
'sentier glacé' => 'PP',
'anneau de glace' => 'PP',
'étang avec musique' => 'PP', # Jarry
'rond de glace' => 'PPL',
'patinoire réfrigérée du Canadien de Montréal' => 'PSE',
}[attributes[:genre]] || attributes[:genre]
# Sherlock has the wrong park name.
attributes[:parc] = {
'Bleu Blanc Bouge Le Carignan' => 'Le Carignan',
'Bleu Blanc Bouge de Saint-Michel' => 'François-Perrault',
'Bleu Blanc Bouge' => 'Willibrord', # must be after above
"de l'école Dalpé-Viau" => 'école Dalbé-Viau',
'de Kent' => 'Kent',
'Dr-Bernard-Paquette' => 'Dr-Bernard-Paquet',
'Georges-Vernot' => 'George-Vernot',
'Lasalle' => 'LaSalle',
'Pierre-E.-Trudeau' => 'Pierre-E-Trudeau',
"Terrain de piste et pelouse attenant à l'aréna Martin-Brodeur" => 'Saint-Léonard',
}[attributes[:parc]] || attributes[:parc]
# Sherlock has the wrong rink description.
if nom_arr == 'Saint-Laurent'
attributes[:description] = case attributes[:genre]
when 'PSE'
'Patinoire extérieure'
when 'PPL', 'PP'
if attributes[:parc] == 'Beaudet'
'Sentier de glace'
else
'Rond de glace'
end
else
attributes[:description]
end
end
# Special case.
if text[/2 PSE/]
attributes[:disambiguation] = 'no 1'
elsif text == "Parc Beaubien, 6633, 6e Avenue (PSE) (chalet) "
attributes[:disambiguation] = 'nord'
end
# There are identical lines in Sherlock.
if (attributes[:parc] == 'Eugène-Dostie' && attributes[:genre] == 'PSE') || (attributes[:parc] == 'Alexander' && attributes[:genre] == 'PPL') || (attributes[:parc] == 'À-Ma-Baie' && attributes[:genre] == 'PPL')
attributes[:disambiguation] = "no #{flip}"
flip = flip == 1 ? 2 : 1
end
# Sherlock has useless address.
if attributes[:adresse] == 'PAT'
attributes[:adresse] = nil
end
# Sherlock has the wrong rink type.
attributes[:genre] = case attributes[:parc]
when 'Maurice-Cullen'
'PPL'
when 'Champdoré'
'PSE'
when 'Chamberland'
'PSE' # from joseeboudreau@ville.montreal.qc.ca
when 'Kent'
if attributes[:disambiguation].nil?
'PPL'
else
attributes[:genre]
end
when 'Oakwood'
'PPL'
when 'Van Horne'
'PP'
else
attributes[:genre]
end
# Fill in missing genre.
if ['Pilon', 'Saint-Léonard', 'Bassin Bonsecours'].include? attributes[:parc]
attributes[:genre] ||= 'PPL'
end
if ['Camille'].include? attributes[:parc]
attributes[:genre] ||= 'PSE'
end
# Fix dashes.
nom_arr = {
'Côte-des-Neiges–Notre-Dame-de-Grâce' => 'Côte-des-Neiges—Notre-Dame-de-Grâce',
"L'Île-Bizard–Sainte-Geneviève" => "L'Île-Bizard—Sainte-Geneviève",
'Mercier–Hochelaga-Maisonneuve' => 'Mercier—Hochelaga-Maisonneuve',
'Rivière-des-Prairies–Pointe-aux-Trembles' => 'Rivière-des-Prairies—Pointe-aux-Trembles',
'Rosemont–La Petite-Patrie' => 'Rosemont—La Petite-Patrie',
'Villeray–Saint-Michel–Parc-Extension' => 'Villeray—Saint-Michel—Parc-Extension',
}[nom_arr] || nom_arr
# Create boroughs and rinks.
arrondissement = Arrondissement.find_or_initialize_by_nom_arr nom_arr
arrondissement.source ||= 'ville.montreal.qc.ca'
arrondissement.save!
update_patinoires arrondissement, attributes, text
if attributes[:patinoire]
update_patinoires arrondissement, attributes.merge(genre: attributes[:patinoire]), text
end
# Check if any text has been omitted from extraction.
rest = raw.reduce(text.dup) do |s,(_,v)|
if Array === v
v.each do |x|
s.sub!(x, '')
end
elsif v
s.sub!(v, '')
end
s
end.sub(%r{\bet\b|\((demi-glace|anciennement Marc-Aurèle-Fortin|Habitations Jeanne-Mance|lac aux Castors|Paul-Émile-Léger|rue D'Iberville/rue de Rouen|St-Anthony)\)}, '').gsub(/\p{Punct}/, '').strip
puts %(didn't extract "#{rest}" from "#{text}") unless rest.empty?
end
end
end
end
end
fix spacing
# coding: utf-8
namespace :import do
# Imports rinks straight-forwardly from spreadsheet.
desc 'Add rinks from Google Spreadsheets'
task :google => :environment do
require 'csv'
require 'open-uri'
CSV.parse(open('https://docs.google.com/spreadsheet/pub?hl=en_US&hl=en_US&key=0AtzgYYy0ZABtdEgwenRMR2MySmU5NFBDVk5wc1RQVEE&single=true&gid=0&output=csv').read, headers: true) do |row|
arrondissement = Arrondissement.find_or_initialize_by_nom_arr row['nom_arr']
arrondissement.source = 'docs.google.com'
arrondissement.save!
row.delete('nom_arr')
row.delete('extra')
row.delete('source_url')
patinoire = Patinoire.find_or_initialize_by_parc_and_genre_and_disambiguation_and_arrondissement_id row['parc'], row['genre'], row['disambiguation'], arrondissement.id
patinoire.attributes = row.to_hash
patinoire.source = 'docs.google.com'
patinoire.save!
end
end
desc 'Add contact info from Google Spreadsheets'
task :contacts => :environment do
require 'csv'
require 'open-uri'
CSV.parse(open('https://docs.google.com/spreadsheet/pub?hl=en_US&hl=en_US&key=0AtzgYYy0ZABtdFMwSF94MjRxcW1yZ1JYVkdqM1Fzanc&single=true&gid=0&output=csv').read, headers: true) do |row|
arrondissement = Arrondissement.find_or_initialize_by_nom_arr row['Authority']
arrondissement.attributes = {
name: [row['Name'], row['Title']].compact.join(', '),
email: row['Email'].strip,
tel: row['Phone'] && row['Phone'].sub(/x\d+/, '').gsub(/\D/, ''),
ext: row['Phone'] && row['Phone'][/x(\d+)/, 1],
}
arrondissement.source ||= 'docs.google.com'
arrondissement.save!
end
end
desc 'Add rinks from Sherlock and add addresses to rinks from donnees.ville.montreal.qc.ca'
task :sherlock => :environment do
require 'iconv'
require 'open-uri'
TEL_REGEX = /\d{3}.?\d{3}.?\d{4}/
GENRE_REGEX = /C|PP|PPL|PSE|anneau de glace|étang avec musique|rond de glace|sentier glacé|patinoire réfrigérée du Canadien de Montréal/
# List of boroughs represented on donnees.ville.montreal.qc.ca
ARRONDISSEMENTS_FROM_XML = Arrondissement.where(source: 'donnees.ville.montreal.qc.ca').all.map(&:nom_arr)
def update_patinoires(arrondissement, attributes, text)
# Find the rink to update.
matches = Patinoire.where(attributes.slice(:parc, :genre, :disambiguation).merge(arrondissement_id: arrondissement.id)).all
# If no rink found, switch PP for PPL.
if matches.empty? && ARRONDISSEMENTS_FROM_XML.include?(arrondissement.nom_arr)
matches = Patinoire.where(attributes.slice(:parc, :disambiguation).merge(genre: attributes[:genre] == 'PP' ? 'PPL' : 'PP', arrondissement_id: arrondissement.id)).all
end
# If single match found, just update address.
if matches.size > 1
puts %("#{text}" matches many rinks)
elsif attributes[:parc] == 'Sir-Wilfrid-Laurier'
# @note Sherlock uses nord, sud, but XML uses no 1, no 2, no 3. Do nothing.
elsif matches.size == 1
matches.first.update_attributes attributes.slice(:adresse, :tel, :ext).select{|k,v| v.present?}
# Special case.
if text[/2 PSE/]
Patinoire.where(attributes.slice(:parc, :genre).merge(arrondissement_id: arrondissement.id, disambiguation: 'no 2')).first.update_attributes attributes.slice(:adresse, :tel, :ext).select{|k,v| v.present?}
end
elsif matches.empty?
# There's only one rink in Pratt park. vleduc@ville.montreal.qc.ca
unless attributes[:parc] == 'Pratt'
# donnees.ville.montreal.qc.ca should generally have all a borough's rinks.
if ARRONDISSEMENTS_FROM_XML.include?(arrondissement.nom_arr)
puts %("#{text}" matches no rink. Creating!)
end
arrondissement.patinoires.create! attributes.slice(:genre, :disambiguation, :parc, :adresse, :tel, :ext).merge(source: 'ville.montreal.qc.ca')
end
end
end
nom_arr = nil
tel = nil
ext = nil
flip = 1
# As the source data is poorly formatted, go line by line with regex.
open('http://www11.ville.montreal.qc.ca/sherlock2/servlet/template/sherlock%2CAfficherDocumentInternet.vm/nodocument/154').each do |line|
line = Iconv.conv('UTF-8', 'ISO-8859-1', line).gsub(/[[:space:]]/, ' ').decode_html_entities.chomp
text = ActionController::Base.helpers.strip_tags(line)
# If it's a borough header:
if match = line[%r{<strong>([^<]+)</strong>.+\d+ patinoires}, 1]
nom_arr = match.gsub("\u0096", '—') # fix dashes
tel = line[TEL_REGEX]
ext = line[/poste (\d+)/, 1]
else
attributes = {}
# If it's a rink:
if genre = line[/[^>]\b(#{GENRE_REGEX})\b/, 1]
attributes = {
genre: genre,
tel: text[TEL_REGEX] || tel,
ext: ext,
parc: text[/\A([^(,*]+)/, 1].andand.strip,
adresse: text[/,(?: \()?((?:[^()](?! 514))+)/, 1].andand.strip,
patinoire: text[/\bet \(?(#{GENRE_REGEX})\)/, 1].andand.strip,
disambiguation: text[/\((nord|sud|petite|grande)\)/, 1].andand.strip,
extra: text.scan(/\((1 M|LA|abri|cabane|chalet|chalet fermé|chalet pas toujours ouvert|pas de chalet|roulotte|toilettes)\)/).flatten.map(&:strip),
}
# If it's a rink, with no rink type specified:
elsif line[/\A(Parc <strong>|<strong>Bassin\b)/]
attributes = {
parc: text[/\A([^(,*]+)/, 1].andand.strip,
adresse: text[/,(?: \()?((?:[^()](?! 514))+)/, 1].andand.strip,
extra: [],
}
end
unless attributes.empty?
raw = Marshal.load(Marshal.dump(attributes)) # deep copy
# Append attributes.
if text['*']
attributes[:disambiguation] = 'réfrigérée'
end
if attributes[:genre] == 'étang avec musique'
attributes[:extra] << 'musique'
end
# From joseeboudreau@ville.montreal.qc.ca
if %w(Gohier Hartenstein).include? attributes[:parc]
attributes[:extra] << 'glissade'
end
if text[/réfrigérée/]
attributes[:description] = 'Patinoire réfrigérée'
attributes[:disambiguation] = 'réfrigérée'
end
# Clean attributes.
attributes[:parc].slice!(/\A(Parc|Patinoire) /)
if attributes[:parc][Patinoire::PREPOSITIONS]
attributes[:parc][Patinoire::PREPOSITIONS] = attributes[:parc][Patinoire::PREPOSITIONS].downcase
end
if attributes[:tel]
attributes[:tel].delete!('^0-9')
end
# Map attributes.
if attributes[:patinoire] == 'rond de glace'
attributes[:patinoire] = 'PPL'
end
attributes[:extra].map! do |v|
{'1 M' => 'musique', 'LA' => 'location et aiguisage'}[v] || v
end
attributes[:genre] = {
'sentier glacé' => 'PP',
'anneau de glace' => 'PP',
'étang avec musique' => 'PP', # Jarry
'rond de glace' => 'PPL',
'patinoire réfrigérée du Canadien de Montréal' => 'PSE',
}[attributes[:genre]] || attributes[:genre]
# Sherlock has the wrong park name.
attributes[:parc] = {
'Bleu Blanc Bouge Le Carignan' => 'Le Carignan',
'Bleu Blanc Bouge de Saint-Michel' => 'François-Perrault',
'Bleu Blanc Bouge' => 'Willibrord', # must be after above
"de l'école Dalpé-Viau" => 'école Dalbé-Viau',
'de Kent' => 'Kent',
'Dr-Bernard-Paquette' => 'Dr-Bernard-Paquet',
'Georges-Vernot' => 'George-Vernot',
'Lasalle' => 'LaSalle',
'Pierre-E.-Trudeau' => 'Pierre-E-Trudeau',
"Terrain de piste et pelouse attenant à l'aréna Martin-Brodeur" => 'Saint-Léonard',
}[attributes[:parc]] || attributes[:parc]
# Sherlock has the wrong rink description.
if nom_arr == 'Saint-Laurent'
attributes[:description] = case attributes[:genre]
when 'PSE'
'Patinoire extérieure'
when 'PPL', 'PP'
if attributes[:parc] == 'Beaudet'
'Sentier de glace'
else
'Rond de glace'
end
else
attributes[:description]
end
end
# Special case.
if text[/2 PSE/]
attributes[:disambiguation] = 'no 1'
elsif text == "Parc Beaubien, 6633, 6e Avenue (PSE) (chalet) "
attributes[:disambiguation] = 'nord'
end
# There are identical lines in Sherlock.
if (attributes[:parc] == 'Eugène-Dostie' && attributes[:genre] == 'PSE') || (attributes[:parc] == 'Alexander' && attributes[:genre] == 'PPL') || (attributes[:parc] == 'À-Ma-Baie' && attributes[:genre] == 'PPL')
attributes[:disambiguation] = "no #{flip}"
flip = flip == 1 ? 2 : 1
end
# Sherlock has useless address.
if attributes[:adresse] == 'PAT'
attributes[:adresse] = nil
end
# Sherlock has the wrong rink type.
attributes[:genre] = case attributes[:parc]
when 'Maurice-Cullen'
'PPL'
when 'Champdoré'
'PSE'
when 'Chamberland'
'PSE' # from joseeboudreau@ville.montreal.qc.ca
when 'Kent'
if attributes[:disambiguation].nil?
'PPL'
else
attributes[:genre]
end
when 'Oakwood'
'PPL'
when 'Van Horne'
'PP'
else
attributes[:genre]
end
# Fill in missing genre.
if ['Pilon', 'Saint-Léonard', 'Bassin Bonsecours'].include? attributes[:parc]
attributes[:genre] ||= 'PPL'
end
if ['Camille'].include? attributes[:parc]
attributes[:genre] ||= 'PSE'
end
# Fix dashes.
nom_arr = {
'Côte-des-Neiges–Notre-Dame-de-Grâce' => 'Côte-des-Neiges—Notre-Dame-de-Grâce',
"L'Île-Bizard–Sainte-Geneviève" => "L'Île-Bizard—Sainte-Geneviève",
'Mercier–Hochelaga-Maisonneuve' => 'Mercier—Hochelaga-Maisonneuve',
'Rivière-des-Prairies–Pointe-aux-Trembles' => 'Rivière-des-Prairies—Pointe-aux-Trembles',
'Rosemont–La Petite-Patrie' => 'Rosemont—La Petite-Patrie',
'Villeray–Saint-Michel–Parc-Extension' => 'Villeray—Saint-Michel—Parc-Extension',
}[nom_arr] || nom_arr
# Create boroughs and rinks.
arrondissement = Arrondissement.find_or_initialize_by_nom_arr nom_arr
arrondissement.source ||= 'ville.montreal.qc.ca'
arrondissement.save!
update_patinoires arrondissement, attributes, text
if attributes[:patinoire]
update_patinoires arrondissement, attributes.merge(genre: attributes[:patinoire]), text
end
# Check if any text has been omitted from extraction.
rest = raw.reduce(text.dup) do |s,(_,v)|
if Array === v
v.each do |x|
s.sub!(x, '')
end
elsif v
s.sub!(v, '')
end
s
end.sub(%r{\bet\b|\((demi-glace|anciennement Marc-Aurèle-Fortin|Habitations Jeanne-Mance|lac aux Castors|Paul-Émile-Léger|rue D'Iberville/rue de Rouen|St-Anthony)\)}, '').gsub(/\p{Punct}/, '').strip
puts %(didn't extract "#{rest}" from "#{text}") unless rest.empty?
end
end
end
end
end
|
namespace :tariff do
desc 'Installs Trade Tariff, creates relevant records, imports national data'
task install: %w[environment
install:taric:sections
install:taric:section_notes
install:taric:chapter_notes
install:chief:static_national_data
install:chief:standing_data]
desc 'Reindex relevant entities on ElasticSearch'
task reindex: %w[environment] do
TradeTariffBackend.reindex
end
desc 'Download and apply Taric and CHIEF data'
task sync: %w[environment sync:apply]
desc "Restore missing chief records files"
task restore_missing_chief_records: :environment do
require "csv"
# Custom converter
CSV::Converters[:null_to_nil] = lambda do |field|
field && field == "NULL" ? nil : field
end
["comm", "tamf", "tbl9", "mfcm", "tame"].each do |table_name|
file_path = File.join(Rails.root, "data", "missing_chief_records", "#{table_name}.csv")
rows = CSV.read(file_path, headers: true, header_converters: :symbol, converters: [:null_to_nil])
rows.each do |line|
"Chief::#{table_name.capitalize}".constantize.insert line.to_hash
end
puts "#{table_name} table processed"
end
end
desc "Process missing chief records files"
task process_missing_chief_records: :environment do
processor = ChiefTransformer::Processor.new(Chief::Mfcm.unprocessed.all, Chief::Tame.unprocessed.all)
processor.process
end
namespace :sync do
desc 'Update database by downloading and then applying CHIEF and TARIC updates via worker'
task update: [:environment, :class_eager_load] do
UpdatesSynchronizerWorker.perform_async
end
desc 'Download pending Taric and CHIEF update files, Update tariff_updates table'
task download: [:environment, :class_eager_load] do
TariffSynchronizer.download
end
desc 'Apply pending updates Taric and CHIEF'
task apply: [:environment, :class_eager_load] do
TariffSynchronizer.apply
end
desc 'Transform CHIEF updates'
task transform: %w[environment] do
require 'chief_transformer'
# Transform imported intermediate Chief records to insert/change national measures
mode = ENV["MODE"].try(:to_sym).presence || :update
ChiefTransformer.instance.invoke(mode)
# Reindex ElasticSearch to see new/updated commodities
Rake::Task['tariff:reindex'].execute
end
desc 'Rollback to specific date in the past'
task rollback: %w[environment class_eager_load] do
if ENV['DATE']
TariffSynchronizer.rollback(ENV['DATE'], ENV['KEEP'])
else
raise ArgumentError.new("Please set the date using environment variable 'DATE'")
end
end
end
namespace :install do
desc "Load Green Page (SearchReference) entities from reference file"
task green_pages: :environment do
ImportSearchReferences.reload
end
namespace :taric do
desc "Add Sections and associate to Chapters"
task sections: :environment do
load(File.join(Rails.root, 'db', 'import_sections.rb'))
end
desc "Dump Section notes"
task dump_section_notes: :environment do
section_note = SectionNote.all.each do |section_note|
section_file = "db/notes/sections/#{section_note.section_id}.yaml"
File.open(section_file, 'w') do |out|
section_doc = {
section: section_note.section_id,
content: section_note.content
}
YAML::dump(section_doc, out)
end
end
end
desc "Load Section notes into database"
task section_notes: :environment do
Dir[Rails.root.join('db','notes','sections','*')].each do |file|
begin
note = YAML.load(File.read(file))
section_note = SectionNote.find(section_id: note[:section]) || SectionNote.new(section_id: note[:section])
section_note.content = note[:content]
section_note.save
rescue StandardError => e
puts "Error loading: #{file}, #{e}"
end
end
end
desc "Dump Chapter notes"
task dump_chapter_notes: :environment do
chatper_notes = ChapterNote.all.each do |chatper_note|
chapter_file = "db/notes/chapters/#{chatper_note.section_id}_#{chatper_note.chapter_id.to_i}.yaml"
File.open(chapter_file, 'w') do |out|
chapter_doc = {
section: chatper_note.section_id,
chapter: chatper_note.chapter_id.to_i,
content: chatper_note.content.force_encoding("ASCII-8BIT").encode('UTF-8', undef: :replace, replace: '')
}
YAML::dump(chapter_doc, out)
end
end
end
desc "Load Chapter notes into database"
task chapter_notes: :environment do
Dir[Rails.root.join('db','notes','chapters','*')].each do |file|
begin
note = YAML.load(File.read(file))
chapter_note = ChapterNote.find(section_id: note[:section],
chapter_id: note[:chapter].to_s) || ChapterNote.new(section_id: note[:section], chapter_id: note[:chapter].to_s)
chapter_note.content = note[:content]
chapter_note.save
end
end
end
end
namespace :chief do
desc "Load Static National Data"
task static_national_data: :environment do
Sequel::Model.db.transaction do
File.readlines(Rails.root.join('db', 'chief', 'static_national_data_insert.sql')).each do |line|
Sequel::Model.db.run(line.strip)
end
end
end
desc "Load Chief Standing data used for Transformation"
task standing_data: :environment do
load(File.join(Rails.root, 'db', 'chief_standing_data.rb'))
end
end
end
desc 'Removes additional Trade Tariff entries'
task remove: %w[environment
remove:taric:sections
remove:chief:standing_data
remove:chief:static_national_data]
namespace :remove do
namespace :updates do
desc "Remove pending tariff_update entries"
task pending: :environment do
Sequel::Model.db.run("DELETE FROM tariff_updates WHERE state = 'P'");
end
end
namespace :taric do
desc "Remove Sections and Chapter<->Section association records"
task sections: :environment do
Section.dataset.delete
Sequel::Model.db.run('DELETE FROM chapters_sections');
end
end
namespace :chief do
desc "Remove Static National data for CHIEF"
task static_national_data: :environment do
Sequel::Model.db.transaction do
File.readlines(Rails.root.join('db', 'chief', 'static_national_data_delete.sql')).each do |line|
Sequel::Model.db.run(line)
end
end
end
desc "Remove CHIEF standing data"
task standing_data: :environment do
[Chief::CountryCode, Chief::CountryGroup, Chief::MeasureTypeAdco, Chief::DutyExpression,
Chief::MeasureTypeCond, Chief::MeasureTypeFootnote, Chief::MeasurementUnit].each do |chief_model|
chief_model.truncate
end
end
end
end
namespace :support do
desc 'Fix CHIEF initial seed last effective dates'
task fix_chief: :environment do
Chief::Tame.unprocessed
.order(:msrgp_code, :msr_type, :tty_code)
.distinct(:msrgp_code, :msr_type, :tty_code)
.where(tar_msr_no: nil).each do |ref_tame|
tames = Chief::Tame.unprocessed
.where(msrgp_code: ref_tame.msrgp_code,
msr_type: ref_tame.msr_type,
tty_code: ref_tame.tty_code)
.order(Sequel.asc(:fe_tsmp))
.all
blank_tames = tames.select{|tame| tame.le_tsmp.blank? }
if blank_tames.size > 1
blank_tames.each do |blank_tame|
Chief::Tame.filter(blank_tame.pk_hash).update(le_tsmp: tames[tames.index(blank_tame)+1].fe_tsmp) unless blank_tame == tames.last
end
end
end
end
desc "Create feiled measures report"
task failed_measures_report: %w[environment] do
require "csv"
items = []
CSV.open("data/failed-measures-report.csv", "wb", { col_sep: ";" }) do |csv|
csv << ["Goods Nomenclature", "Measure Type", "Update File", "Errors", "Candidate Measure", "Notes"]
Dir["data/measures/*"].select{|f| f.include?("failed")}.sort.each do |path|
puts "Processing #{path}"
file = File.open(path, "r")
origin = path.sub("-failed.json.txt", ".txt").split("/").last
file.each_line do |line|
line = JSON.parse(line)
items << [
line["goods_nomenclature_item_id"],
line["measure_type_id"],
origin,
line["errors"],
line
]
end
end
items.uniq{ |i| [i[0], i[1], i[3]] }.each { |item| csv << item }
end
extra_namespaces = {
'xmlns:oub' => 'urn:publicid:-:DGTAXUD:TARIC:MESSAGE:1.0',
'xmlns:env' => "urn:publicid:-:DGTAXUD:GENERAL:ENVELOPE:1.0"
}
items = items.map{ |i| i[0] }.uniq.sort
CSV.open("data/failed-measures-report-taric.csv", "wb", { col_sep: ";" }) do |csv|
Dir["data/taric/*"].select{ |path| path > "data/taric/2017-05-31_TGB17101.xml" }.sort.each do |path|
items.each do |item|
puts "Processing #{item} #{path}"
origin = path.split("/").last
doc = Nokogiri::XML(File.open(path))
matches = doc.xpath(
"//oub:goods.nomenclature/oub:goods.nomenclature.item.id[contains(text(), '#{item}')]",
extra_namespaces
)
matches.each do |m|
start_date = m.parent.children.select{ |c| c.name == 'validity.start.date' }.first.try(:text)
end_date = m.parent.children.select{ |c| c.name == 'validity.end.date' }.first.try(:text)
csv << [item, origin, start_date, end_date]
end
end
end
end
end
end
namespace :audit do
desc "Traverse all TARIC tables and perform conformance validations on all the records"
task verify: [:environment, :class_eager_load] do
models = (ENV['MODELS']) ? ENV['MODELS'].split(',') : []
TradeTariffBackend::Auditor.new(models, ENV["SINCE"], ENV["AUDIT_LOG"]).run
end
end
end
Add script for checking codes on EU website
namespace :tariff do
desc 'Installs Trade Tariff, creates relevant records, imports national data'
task install: %w[environment
install:taric:sections
install:taric:section_notes
install:taric:chapter_notes
install:chief:static_national_data
install:chief:standing_data]
desc 'Reindex relevant entities on ElasticSearch'
task reindex: %w[environment] do
TradeTariffBackend.reindex
end
desc 'Download and apply Taric and CHIEF data'
task sync: %w[environment sync:apply]
desc "Restore missing chief records files"
task restore_missing_chief_records: :environment do
require "csv"
# Custom converter
CSV::Converters[:null_to_nil] = lambda do |field|
field && field == "NULL" ? nil : field
end
["comm", "tamf", "tbl9", "mfcm", "tame"].each do |table_name|
file_path = File.join(Rails.root, "data", "missing_chief_records", "#{table_name}.csv")
rows = CSV.read(file_path, headers: true, header_converters: :symbol, converters: [:null_to_nil])
rows.each do |line|
"Chief::#{table_name.capitalize}".constantize.insert line.to_hash
end
puts "#{table_name} table processed"
end
end
desc "Process missing chief records files"
task process_missing_chief_records: :environment do
processor = ChiefTransformer::Processor.new(Chief::Mfcm.unprocessed.all, Chief::Tame.unprocessed.all)
processor.process
end
namespace :sync do
desc 'Update database by downloading and then applying CHIEF and TARIC updates via worker'
task update: [:environment, :class_eager_load] do
UpdatesSynchronizerWorker.perform_async
end
desc 'Download pending Taric and CHIEF update files, Update tariff_updates table'
task download: [:environment, :class_eager_load] do
TariffSynchronizer.download
end
desc 'Apply pending updates Taric and CHIEF'
task apply: [:environment, :class_eager_load] do
TariffSynchronizer.apply
end
desc 'Transform CHIEF updates'
task transform: %w[environment] do
require 'chief_transformer'
# Transform imported intermediate Chief records to insert/change national measures
mode = ENV["MODE"].try(:to_sym).presence || :update
ChiefTransformer.instance.invoke(mode)
# Reindex ElasticSearch to see new/updated commodities
Rake::Task['tariff:reindex'].execute
end
desc 'Rollback to specific date in the past'
task rollback: %w[environment class_eager_load] do
if ENV['DATE']
TariffSynchronizer.rollback(ENV['DATE'], ENV['KEEP'])
else
raise ArgumentError.new("Please set the date using environment variable 'DATE'")
end
end
end
namespace :install do
desc "Load Green Page (SearchReference) entities from reference file"
task green_pages: :environment do
ImportSearchReferences.reload
end
namespace :taric do
desc "Add Sections and associate to Chapters"
task sections: :environment do
load(File.join(Rails.root, 'db', 'import_sections.rb'))
end
desc "Dump Section notes"
task dump_section_notes: :environment do
section_note = SectionNote.all.each do |section_note|
section_file = "db/notes/sections/#{section_note.section_id}.yaml"
File.open(section_file, 'w') do |out|
section_doc = {
section: section_note.section_id,
content: section_note.content
}
YAML::dump(section_doc, out)
end
end
end
desc "Load Section notes into database"
task section_notes: :environment do
Dir[Rails.root.join('db','notes','sections','*')].each do |file|
begin
note = YAML.load(File.read(file))
section_note = SectionNote.find(section_id: note[:section]) || SectionNote.new(section_id: note[:section])
section_note.content = note[:content]
section_note.save
rescue StandardError => e
puts "Error loading: #{file}, #{e}"
end
end
end
desc "Dump Chapter notes"
task dump_chapter_notes: :environment do
chatper_notes = ChapterNote.all.each do |chatper_note|
chapter_file = "db/notes/chapters/#{chatper_note.section_id}_#{chatper_note.chapter_id.to_i}.yaml"
File.open(chapter_file, 'w') do |out|
chapter_doc = {
section: chatper_note.section_id,
chapter: chatper_note.chapter_id.to_i,
content: chatper_note.content.force_encoding("ASCII-8BIT").encode('UTF-8', undef: :replace, replace: '')
}
YAML::dump(chapter_doc, out)
end
end
end
desc "Load Chapter notes into database"
task chapter_notes: :environment do
Dir[Rails.root.join('db','notes','chapters','*')].each do |file|
begin
note = YAML.load(File.read(file))
chapter_note = ChapterNote.find(section_id: note[:section],
chapter_id: note[:chapter].to_s) || ChapterNote.new(section_id: note[:section], chapter_id: note[:chapter].to_s)
chapter_note.content = note[:content]
chapter_note.save
end
end
end
end
namespace :chief do
desc "Load Static National Data"
task static_national_data: :environment do
Sequel::Model.db.transaction do
File.readlines(Rails.root.join('db', 'chief', 'static_national_data_insert.sql')).each do |line|
Sequel::Model.db.run(line.strip)
end
end
end
desc "Load Chief Standing data used for Transformation"
task standing_data: :environment do
load(File.join(Rails.root, 'db', 'chief_standing_data.rb'))
end
end
end
desc 'Removes additional Trade Tariff entries'
task remove: %w[environment
remove:taric:sections
remove:chief:standing_data
remove:chief:static_national_data]
namespace :remove do
namespace :updates do
desc "Remove pending tariff_update entries"
task pending: :environment do
Sequel::Model.db.run("DELETE FROM tariff_updates WHERE state = 'P'");
end
end
namespace :taric do
desc "Remove Sections and Chapter<->Section association records"
task sections: :environment do
Section.dataset.delete
Sequel::Model.db.run('DELETE FROM chapters_sections');
end
end
namespace :chief do
desc "Remove Static National data for CHIEF"
task static_national_data: :environment do
Sequel::Model.db.transaction do
File.readlines(Rails.root.join('db', 'chief', 'static_national_data_delete.sql')).each do |line|
Sequel::Model.db.run(line)
end
end
end
desc "Remove CHIEF standing data"
task standing_data: :environment do
[Chief::CountryCode, Chief::CountryGroup, Chief::MeasureTypeAdco, Chief::DutyExpression,
Chief::MeasureTypeCond, Chief::MeasureTypeFootnote, Chief::MeasurementUnit].each do |chief_model|
chief_model.truncate
end
end
end
end
namespace :support do
desc 'Fix CHIEF initial seed last effective dates'
task fix_chief: :environment do
Chief::Tame.unprocessed
.order(:msrgp_code, :msr_type, :tty_code)
.distinct(:msrgp_code, :msr_type, :tty_code)
.where(tar_msr_no: nil).each do |ref_tame|
tames = Chief::Tame.unprocessed
.where(msrgp_code: ref_tame.msrgp_code,
msr_type: ref_tame.msr_type,
tty_code: ref_tame.tty_code)
.order(Sequel.asc(:fe_tsmp))
.all
blank_tames = tames.select{|tame| tame.le_tsmp.blank? }
if blank_tames.size > 1
blank_tames.each do |blank_tame|
Chief::Tame.filter(blank_tame.pk_hash).update(le_tsmp: tames[tames.index(blank_tame)+1].fe_tsmp) unless blank_tame == tames.last
end
end
end
end
desc "Create feiled measures report"
task failed_measures_report: %w[environment] do
require "csv"
items = []
CSV.open("data/failed-measures-report.csv", "wb", { col_sep: ";" }) do |csv|
csv << ["Goods Nomenclature", "Measure Type", "Update File", "Errors", "Candidate Measure", "Notes"]
Dir["data/measures/*"].select{|f| f.include?("failed")}.sort.each do |path|
puts "Processing #{path}"
file = File.open(path, "r")
origin = path.sub("-failed.json.txt", ".txt").split("/").last
file.each_line do |line|
line = JSON.parse(line)
items << [
line["goods_nomenclature_item_id"],
line["measure_type_id"],
origin,
line["errors"],
line
]
end
end
items.uniq{ |i| [i[0], i[1], i[3]] }.each { |item| csv << item }
end
extra_namespaces = {
'xmlns:oub' => 'urn:publicid:-:DGTAXUD:TARIC:MESSAGE:1.0',
'xmlns:env' => "urn:publicid:-:DGTAXUD:GENERAL:ENVELOPE:1.0"
}
items = items.map{ |i| i[0] }.uniq.sort
CSV.open("data/failed-measures-report-taric.csv", "wb", { col_sep: ";" }) do |csv|
Dir["data/taric/*"].select{ |path| path > "data/taric/2017-05-31_TGB17101.xml" }.sort.each do |path|
items.each do |item|
puts "Processing #{item} #{path}"
origin = path.split("/").last
doc = Nokogiri::XML(File.open(path))
matches = doc.xpath(
"//oub:goods.nomenclature/oub:goods.nomenclature.item.id[contains(text(), '#{item}')]",
extra_namespaces
)
matches.each do |m|
start_date = m.parent.children.select{ |c| c.name == 'validity.start.date' }.first.try(:text)
end_date = m.parent.children.select{ |c| c.name == 'validity.end.date' }.first.try(:text)
csv << [item, origin, start_date, end_date]
end
end
end
end
end
desc "Check codes on EU website, put your codes in `codes` array"
task check_codes_on_eu: %w[environment] do
require 'net/http'
codes = []
not_on_eu = []
codes.each do |code|
puts "checking #{code}"
url = "http://ec.europa.eu/taxation_customs/dds2/taric/measures.jsp?Lang=en&SimDate=20180105&Area=&MeasType=&StartPub=&EndPub=&MeasText=&GoodsText=&Taric=#{code}&search_text=goods&textSearch=&LangDescr=en&OrderNum=&Regulation=&measStartDat=&measEndDat="
uri = URI(url)
s = Net::HTTP.get(uri)
not_on_eu << code unless s.include? "TARIC measure information"
end
puts "compeled"
puts not_on_eu
end
end
namespace :audit do
desc "Traverse all TARIC tables and perform conformance validations on all the records"
task verify: [:environment, :class_eager_load] do
models = (ENV['MODELS']) ? ENV['MODELS'].split(',') : []
TradeTariffBackend::Auditor.new(models, ENV["SINCE"], ENV["AUDIT_LOG"]).run
end
end
end
|
module Taxjar
module Version
module_function
def major
1
end
def minor
2
end
def patch
1
end
def pre
nil
end
def to_h
{
major: major,
minor: minor,
patch: patch,
pre: pre,
}
end
def to_a
to_h.values.compact
end
def to_s
to_a.join('.')
end
end
end
1.2.2
module Taxjar
module Version
module_function
def major
1
end
def minor
2
end
def patch
2
end
def pre
nil
end
def to_h
{
major: major,
minor: minor,
patch: patch,
pre: pre,
}
end
def to_a
to_h.values.compact
end
def to_s
to_a.join('.')
end
end
end
|
module Taxjar
module Version
module_function
def major
1
end
def minor
3
end
def patch
1
end
def pre
nil
end
def to_h
{
major: major,
minor: minor,
patch: patch,
pre: pre,
}
end
def to_a
to_h.values.compact
end
def to_s
to_a.join('.')
end
end
end
1.3.2
module Taxjar
module Version
module_function
def major
1
end
def minor
3
end
def patch
2
end
def pre
nil
end
def to_h
{
major: major,
minor: minor,
patch: patch,
pre: pre,
}
end
def to_a
to_h.values.compact
end
def to_s
to_a.join('.')
end
end
end
|
module Taxjar
module Version
module_function
def major
1
end
def minor
3
end
def patch
2
end
def pre
nil
end
def to_h
{
major: major,
minor: minor,
patch: patch,
pre: pre,
}
end
def to_a
to_h.values.compact
end
def to_s
to_a.join('.')
end
end
end
1.4.0
module Taxjar
module Version
module_function
def major
1
end
def minor
4
end
def patch
0
end
def pre
nil
end
def to_h
{
major: major,
minor: minor,
patch: patch,
pre: pre,
}
end
def to_a
to_h.values.compact
end
def to_s
to_a.join('.')
end
end
end
|
# coding: utf-8
require "gemoji"
class String
CHAR_CODES_OF_WIDTH_1 = [1608, 1641, 1782, 3232,
3237, 3248, 3267, 3589, 3665, 3844, 5026, 5046, 5072, 5603,
5608, 7447, 7461, 7500, 7506, 8198, 8203, 8451, 8461, 8469, 8545, 9000, 9166,
11015, 57643, 58141, 58370, 58381,
58387]
MULTI_CHAR_OF_WIDTH_1 = %w{ ☺️ ❤️ ♍️ ☔️ ‾᷄ ‾᷅ ⁻̫ ✖️ 😂 ☀︎ ❓ ⁉️ ☁︎ ⬇️ ❄️ ✌️ ♨️ 6⃣ ♻️ ♒️
✏️ 🇨🇳 ☁️ ✈️ ☀️ ♥️ ⚡️ ✔️ 🇰🇷 ⌛️ }
MULTI_CHAR_OF_WIDTH_2 = %w{ ・᷄ ・᷅ ㊙️ }
def twidth
result = 0
MULTI_CHAR_OF_WIDTH_1.each do |c|
if include?(c)
result += 1 * scan(c).size
gsub!(c, '')
end
end
MULTI_CHAR_OF_WIDTH_2.each do |c|
if include?(c)
result += 2 * scan(c).size
gsub!(c, '')
end
end
chars.inject(result) do |result, c|
case c.ord
when (0..0x7F) # Basic Latin
result += 1
when (0x80..0xFF) # Latin-1 Supplement
result += 1
when (0x100..0x17F) # Latin Extended-A
result += 1
when (0x180..0x24F) # Latin Extended-B
result += 1
when (0x2C60..0x2C7F) # Latin Extended-C
result += 1
when (0xA720..0xA7FF) # Latin Extended-D
result += 1
when (0xAB30..0xAB6F) # Latin Extended-E
result += 1
when (0x1E00..0x1EFF) # Latin Extended Additional
result += 1
when (0xFB00..0xFB06) # Latin Ligatures
result += 1
when (0x250..0x2AF) # IPA Extensions
result += 1
when (0x300..0x36F) # Combining Diacritical Marks
result += 0
when (0x1DC0..0x1DFF) # Combining Diacritical Marks Supplement
result += 0
when (0x2B0..0x2FF) # Spacing Modifier Letters
result += 1
when (0x370..0x3FF) # Greek and Coptic
result += 1
when (0x400..0x482) # Cyrillic
result += 1
when (0x530..0x58F) # Armenian
result += 1
when (0x2070..0x209F) # Superscripts and Subscripts
result += 1
when (0x2190..0x21FF) # Arrows
result += 1
when (0x2200..0x22FF) # Mathematical Operators
result += 1
when (0x2500..0x257F) # Box Drawing
result += 1
when (0x2580..0x259F) # Block Elements
result += 1
when (0x25A0..0x25FF) # Geometric Shapes
result += 1
when (0x2600..0x26FF) # Miscellaneous Symbols
result += 1
when (0x2700..0x27BF) # Dingbats
result += 1
# http://www.unicode.org/charts/PDF/U2000.pdf
# General Punctuation
# Range: 2000–206F
when (0x2012..0x2027)
result += 1
when (0x2030..0x205E)
result += 1
# http://www.unicode.org/charts/PDF/U20D0.pdf
# Combining Diacritical Marks for Symbols
# Range: 20D0–20FF
when (0x20D0..0x20DC)
result += 0
# http://www.unicode.org/charts/PDF/U0600.pdf
# Arabic
# Range: 0600–06FF
when (0x610..0x614) # Honorifics
result += 0
when 0x615 # Koranic annotation sign
result += 0
when 0x616 # Extended Arabic mark
result += 0
when (0x617..0x61A) # Koranic annotation signs
result += 0
when (0x6D6..0x6DC) # Koranic annotation signs
result += 0
# http://www.unicode.org/charts/PDF/U0900.pdf
# Devanagari
# Range: 0900–097F
when (0x941..0x948) # Dependent vowel signs
result += 0
# http://www.unicode.org/charts/PDF/U0A80.pdf
# Gujarati
# Range: 0A80–0AFF
when (0xAC1..0xAC8) # Dependent vowel signs
result += 0
# http://www.unicode.org/charts/PDF/U0B00.pdf
# Oriya
# Range: 0B00–0B7F
when (0xB66..0xB77)
result += 1
# http://www.unicode.org/charts/PDF/U0E00.pdf
# Thai
# Range: 0E00–0E7F
when (0xE34..0xE3A) # Vowels
result += 0
when (0xE48..0xE4B) # Tone marks
result += 0
# http://www.unicode.org/charts/PDF/U0F00.pdf
# Tibetan
# Range: 0F00–0FFF
when (0xF3A..0xF47)
result += 1
# http://www.unicode.org/charts/PDF/UFF00.pdf
# Halfwidth and Fullwidth Forms
# Range: FF00–FFEF
when (0xFF01..0xFF5E) # Fullwidth ASCII variants
result += 2
when (0xFF5F..0xFF60) # Fullwidth brackets
result += 2
when (0xFF61..0xFF64) # Halfwidth CJK punctuation
result += 1
when (0xFF65..0xFF9F) # Halfwidth Katakana variants
result += 1
when (0xFFA0..0xFFDC) # Halfwidth Hangul variants
result += 1
when (0xFFE0..0xFFE6) # Fullwidth symbol variants
result += 2
when (0xFFE8..0xFFEE) # Halfwidth symbol variants
result += 1
when *CHAR_CODES_OF_WIDTH_1
result += 1
when lambda { |ord| Emoji.find_by_unicode([ord].pack('U*')) }
result += 1
else
result += 2
end
end
end
def tljust(width)
if width > twidth
self + ' ' * (width - twidth)
else
self
end
end
end
module Terminal
class Table
attr_accessor :rows
attr_accessor :headings
attr_accessor :column_widths
attr_accessor :new_line_symbol
def initialize(object = nil, options = {})
@rows = []
@headings = []
@column_widths = []
if options[:use_new_line_symbol]
@new_line_symbol = '⏎'
else
@new_line_symbol = ' '
end
if options[:flatten]
raise 'should be an array' unless object.respond_to?(:each)
all_keys = object.each.map(&:keys).flatten.map(&:to_sym).uniq
object.each do |hash|
all_keys.each do |key|
hash[key] = '' if hash[key].nil?
end
end
end
if object
if object.is_a?(Hash)
add_hash(object, options)
elsif object.respond_to?(:each)
object.each { |o| add_object(o, options) }
else
add_object(object, options)
end
end
yield self if block_given?
recalculate_column_widths!
end
def add_object(object, options)
if object.respond_to?(:to_hash)
add_hash(object.to_hash, options)
elsif object.respond_to?(:each)
@rows << object
end
end
def add_hash(hash, options)
if options[:only]
hash.keep_if { |k, v| options[:only].map(&:to_sym).include?(k.to_sym) }
elsif options[:except]
hash.delete_if { |k, v| options[:except].map(&:to_sym).include?(k.to_sym) }
end
@headings = hash.keys.map(&:to_s)
@rows << hash.values
end
def headings=(headings)
@headings = headings
end
def recalculate_column_widths!
@rows = rows.map { |row| row.map { |item| item.to_s.gsub("\r\n", @new_line_symbol).gsub("\n", @new_line_symbol).gsub("\r", @new_line_symbol) } }
if @rows.count > 0
(0...@rows.first.size).each do |col|
@column_widths[col] = @rows.map { |row| row[col].to_s.twidth }.max
end
end
if @headings.count > 0
(0...@headings.size).each do |col|
@column_widths[col] = [@column_widths[col] || 0, @headings[col].twidth].max
end
end
end
def to_s
recalculate_column_widths!
result = ''
header_and_footer = '+' + @column_widths.map { |w| '-' * (w + 2) }.join('+') + '+' + "\n"
if @headings.count > 0
result += header_and_footer
content = @headings.each_with_index.map { |grid, i| grid.to_s.tljust(@column_widths[i]) }
result += '| ' + content.join(' | ') + " |\n"
end
result += header_and_footer
@rows.each do |row|
content = row.each_with_index.map { |grid, i| grid.to_s.tljust(@column_widths[i]) }
result += '| ' + content.join(' | ') + " |\n"
end
result + header_and_footer
end
class << self
def special_tokens
String::CHARS_OF_WIDTH_OF_1 + String::CHARS_OF_WIDTH_OF_0
end
end
end
end
Letterlike Symbols
# coding: utf-8
require "gemoji"
class String
CHAR_CODES_OF_WIDTH_1 = [1608, 1641, 1782, 3232,
3237, 3248, 3267, 3589, 3665, 3844, 5026, 5046, 5072, 5603,
5608, 7447, 7461, 7500, 7506, 8198, 8203, 8545, 9000, 9166,
11015, 57643, 58141, 58370, 58381,
58387]
MULTI_CHAR_OF_WIDTH_1 = %w{ ☺️ ❤️ ♍️ ☔️ ‾᷄ ‾᷅ ⁻̫ ✖️ 😂 ☀︎ ❓ ⁉️ ☁︎ ⬇️ ❄️ ✌️ ♨️ 6⃣ ♻️ ♒️
✏️ 🇨🇳 ☁️ ✈️ ☀️ ♥️ ⚡️ ✔️ 🇰🇷 ⌛️ }
MULTI_CHAR_OF_WIDTH_2 = %w{ ・᷄ ・᷅ ㊙️ }
def twidth
result = 0
MULTI_CHAR_OF_WIDTH_1.each do |c|
if include?(c)
result += 1 * scan(c).size
gsub!(c, '')
end
end
MULTI_CHAR_OF_WIDTH_2.each do |c|
if include?(c)
result += 2 * scan(c).size
gsub!(c, '')
end
end
chars.inject(result) do |result, c|
case c.ord
when (0..0x7F) # Basic Latin
result += 1
when (0x80..0xFF) # Latin-1 Supplement
result += 1
when (0x100..0x17F) # Latin Extended-A
result += 1
when (0x180..0x24F) # Latin Extended-B
result += 1
when (0x2C60..0x2C7F) # Latin Extended-C
result += 1
when (0xA720..0xA7FF) # Latin Extended-D
result += 1
when (0xAB30..0xAB6F) # Latin Extended-E
result += 1
when (0x1E00..0x1EFF) # Latin Extended Additional
result += 1
when (0xFB00..0xFB06) # Latin Ligatures
result += 1
when (0x250..0x2AF) # IPA Extensions
result += 1
when (0x300..0x36F) # Combining Diacritical Marks
result += 0
when (0x1DC0..0x1DFF) # Combining Diacritical Marks Supplement
result += 0
when (0x2B0..0x2FF) # Spacing Modifier Letters
result += 1
when (0x370..0x3FF) # Greek and Coptic
result += 1
when (0x400..0x482) # Cyrillic
result += 1
when (0x530..0x58F) # Armenian
result += 1
when (0x2070..0x209F) # Superscripts and Subscripts
result += 1
when (0x2100..0x214F) # Letterlike Symbols
result += 1
when (0x2190..0x21FF) # Arrows
result += 1
when (0x2200..0x22FF) # Mathematical Operators
result += 1
when (0x2500..0x257F) # Box Drawing
result += 1
when (0x2580..0x259F) # Block Elements
result += 1
when (0x25A0..0x25FF) # Geometric Shapes
result += 1
when (0x2600..0x26FF) # Miscellaneous Symbols
result += 1
when (0x2700..0x27BF) # Dingbats
result += 1
# http://www.unicode.org/charts/PDF/U2000.pdf
# General Punctuation
# Range: 2000–206F
when (0x2012..0x2027)
result += 1
when (0x2030..0x205E)
result += 1
# http://www.unicode.org/charts/PDF/U20D0.pdf
# Combining Diacritical Marks for Symbols
# Range: 20D0–20FF
when (0x20D0..0x20DC)
result += 0
# http://www.unicode.org/charts/PDF/U0600.pdf
# Arabic
# Range: 0600–06FF
when (0x610..0x614) # Honorifics
result += 0
when 0x615 # Koranic annotation sign
result += 0
when 0x616 # Extended Arabic mark
result += 0
when (0x617..0x61A) # Koranic annotation signs
result += 0
when (0x6D6..0x6DC) # Koranic annotation signs
result += 0
# http://www.unicode.org/charts/PDF/U0900.pdf
# Devanagari
# Range: 0900–097F
when (0x941..0x948) # Dependent vowel signs
result += 0
# http://www.unicode.org/charts/PDF/U0A80.pdf
# Gujarati
# Range: 0A80–0AFF
when (0xAC1..0xAC8) # Dependent vowel signs
result += 0
# http://www.unicode.org/charts/PDF/U0B00.pdf
# Oriya
# Range: 0B00–0B7F
when (0xB66..0xB77)
result += 1
# http://www.unicode.org/charts/PDF/U0E00.pdf
# Thai
# Range: 0E00–0E7F
when (0xE34..0xE3A) # Vowels
result += 0
when (0xE48..0xE4B) # Tone marks
result += 0
# http://www.unicode.org/charts/PDF/U0F00.pdf
# Tibetan
# Range: 0F00–0FFF
when (0xF3A..0xF47)
result += 1
# http://www.unicode.org/charts/PDF/UFF00.pdf
# Halfwidth and Fullwidth Forms
# Range: FF00–FFEF
when (0xFF01..0xFF5E) # Fullwidth ASCII variants
result += 2
when (0xFF5F..0xFF60) # Fullwidth brackets
result += 2
when (0xFF61..0xFF64) # Halfwidth CJK punctuation
result += 1
when (0xFF65..0xFF9F) # Halfwidth Katakana variants
result += 1
when (0xFFA0..0xFFDC) # Halfwidth Hangul variants
result += 1
when (0xFFE0..0xFFE6) # Fullwidth symbol variants
result += 2
when (0xFFE8..0xFFEE) # Halfwidth symbol variants
result += 1
when *CHAR_CODES_OF_WIDTH_1
result += 1
when lambda { |ord| Emoji.find_by_unicode([ord].pack('U*')) }
result += 1
else
result += 2
end
end
end
def tljust(width)
if width > twidth
self + ' ' * (width - twidth)
else
self
end
end
end
module Terminal
class Table
attr_accessor :rows
attr_accessor :headings
attr_accessor :column_widths
attr_accessor :new_line_symbol
def initialize(object = nil, options = {})
@rows = []
@headings = []
@column_widths = []
if options[:use_new_line_symbol]
@new_line_symbol = '⏎'
else
@new_line_symbol = ' '
end
if options[:flatten]
raise 'should be an array' unless object.respond_to?(:each)
all_keys = object.each.map(&:keys).flatten.map(&:to_sym).uniq
object.each do |hash|
all_keys.each do |key|
hash[key] = '' if hash[key].nil?
end
end
end
if object
if object.is_a?(Hash)
add_hash(object, options)
elsif object.respond_to?(:each)
object.each { |o| add_object(o, options) }
else
add_object(object, options)
end
end
yield self if block_given?
recalculate_column_widths!
end
def add_object(object, options)
if object.respond_to?(:to_hash)
add_hash(object.to_hash, options)
elsif object.respond_to?(:each)
@rows << object
end
end
def add_hash(hash, options)
if options[:only]
hash.keep_if { |k, v| options[:only].map(&:to_sym).include?(k.to_sym) }
elsif options[:except]
hash.delete_if { |k, v| options[:except].map(&:to_sym).include?(k.to_sym) }
end
@headings = hash.keys.map(&:to_s)
@rows << hash.values
end
def headings=(headings)
@headings = headings
end
def recalculate_column_widths!
@rows = rows.map { |row| row.map { |item| item.to_s.gsub("\r\n", @new_line_symbol).gsub("\n", @new_line_symbol).gsub("\r", @new_line_symbol) } }
if @rows.count > 0
(0...@rows.first.size).each do |col|
@column_widths[col] = @rows.map { |row| row[col].to_s.twidth }.max
end
end
if @headings.count > 0
(0...@headings.size).each do |col|
@column_widths[col] = [@column_widths[col] || 0, @headings[col].twidth].max
end
end
end
def to_s
recalculate_column_widths!
result = ''
header_and_footer = '+' + @column_widths.map { |w| '-' * (w + 2) }.join('+') + '+' + "\n"
if @headings.count > 0
result += header_and_footer
content = @headings.each_with_index.map { |grid, i| grid.to_s.tljust(@column_widths[i]) }
result += '| ' + content.join(' | ') + " |\n"
end
result += header_and_footer
@rows.each do |row|
content = row.each_with_index.map { |grid, i| grid.to_s.tljust(@column_widths[i]) }
result += '| ' + content.join(' | ') + " |\n"
end
result + header_and_footer
end
class << self
def special_tokens
String::CHARS_OF_WIDTH_OF_1 + String::CHARS_OF_WIDTH_OF_0
end
end
end
end
|
# coding: utf-8
require "gemoji"
class String
CHAR_CODES_OF_WIDTH_0 = [768, 769, 776, 780, 785, 800, 801, 802, 804, 805, 807, 808,
809, 811, 820, 821, 822, 823, 840, 847, 860, 862, 863, 865,
1552, 1553, 1554, 1555, 1556, 1557, 1558, 1560, 1561, 1756,
2370, 2760, 3267, 3636, 3659, 7620, 7621, 8408, 8409, 8411]
CHAR_CODES_OF_WIDTH_1 = [660, 661, 662, 666,
706, 707, 713, 714, 715, 717, 726, 728, 730, 757, 758, 920,
921, 927, 931, 949, 969, 1013, 1014, 1044, 1053, 1072, 1076,
1079, 1090, 1096, 1342, 1608, 1641, 1782, 2919, 2920, 3232,
3237, 3589, 3665, 3844, 3900, 3901, 5026, 5046, 5072, 5603,
5608, 7447, 7461, 7500, 7506, 8198, 8203, 8212, 8214, 8216, 8217,
8220, 8221, 8226, 8230, 8242, 8248, 8251, 8254, 8316, 8317,
8318, 8320, 8330, 8333, 8334, 8451, 8461, 8469, 8545, 8592,
8594, 8595, 8704, 8711, 8730, 8736, 8743, 8745, 8746, 8750,
8804, 8805, 8806, 8807, 8857, 9000, 9166, 9472, 9473, 9484,
9488, 9517, 9518, 9531, 9573, 9581, 9582, 9583, 9584, 9587,
9600, 9604, 9608, 9612, 9633, 9649, 9651, 9660, 9661, 9670,
9675, 9678, 9679, 9697, 9728, 9729, 9730, 9733, 9734, 9752,
9756, 9758, 9786, 9787, 9794, 9818, 9825, 9829, 9834, 9836,
9996, 9999, 10004, 10008, 10023, 10023, 10026, 10047, 10048,
10084, 10085, 10086, 10102, 11015, 57643, 58141, 58370, 58381,
58387]
MULTI_CHAR_OF_WIDTH_1 = %w{ ☺️ ❤️ ♍️ ☔️ ‾᷄ ‾᷅ ⁻̫ ✖️ 😂 ☀︎ ❓ ⁉️ ☁︎ ⬇️ ❄️ ✌️ ♨️ 6⃣ ♻️ ♒️
✏️ 🇨🇳 ☁️ ✈️ ☀️ ♥️ ⚡️ ✔️ 🇰🇷 ⌛️ }
MULTI_CHAR_OF_WIDTH_2 = %w{ ・᷄ ・᷅ ㊙️ }
def twidth
result = 0
MULTI_CHAR_OF_WIDTH_1.each do |c|
if include?(c)
result += 1 * scan(c).size
gsub!(c, '')
end
end
MULTI_CHAR_OF_WIDTH_2.each do |c|
if include?(c)
result += 2 * scan(c).size
gsub!(c, '')
end
end
chars.inject(result) do |result, c|
case c.ord
when (0..0x7F) # Basic Latin
result += 1
when (0x80..0xFF) # Latin-1 Supplement
result += 1
when (0x100..0x17F) # Latin Extended-A
result += 1
when (0x180..0x24F) # Latin Extended-B
result += 1
when (0x2C60..0x2C7F) # Latin Extended-C
result += 1
when (0xA720..0xA7FF) # Latin Extended-D
result += 1
when (0xAB30..0xAB6F) # Latin Extended-E
result += 1
when (0x1E00..0x1EFF) # Latin Extended Additional
result += 1
when (0xFB00..0xFB06) # Latin Ligatures
result += 1
# http://www.unicode.org/charts/PDF/UFF00.pdf
when (0xFF01..0xFF5E) # Fullwidth ASCII variants
result += 2
when (0xFF5F..0xFF60) # Fullwidth brackets
result += 2
when (0xFF61..0xFF64) # Halfwidth CJK punctuation
result += 1
when (0xFF65..0xFF9F) # Halfwidth Katakana variants
result += 1
when (0xFFA0..0xFFDC) # Halfwidth Hangul variants
result += 1
when *CHAR_CODES_OF_WIDTH_0
result += 0
when *CHAR_CODES_OF_WIDTH_1
result += 1
when lambda { |ord| Emoji.find_by_unicode([ord].pack('U*')) }
result += 1
else
result += 2
end
end
end
def tljust(width)
if width > twidth
self + ' ' * (width - twidth)
else
self
end
end
end
module Terminal
class Table
attr_accessor :rows
attr_accessor :headings
attr_accessor :column_widths
attr_accessor :new_line_symbol
def initialize(object = nil, options = {})
@rows = []
@headings = []
@column_widths = []
if options[:use_new_line_symbol]
@new_line_symbol = '⏎'
else
@new_line_symbol = ' '
end
if options[:flatten]
raise 'should be an array' unless object.respond_to?(:each)
all_keys = object.each.map(&:keys).flatten.map(&:to_sym).uniq
object.each do |hash|
all_keys.each do |key|
hash[key] = '' if hash[key].nil?
end
end
end
if object
if object.is_a?(Hash)
add_hash(object, options)
elsif object.respond_to?(:each)
object.each { |o| add_object(o, options) }
else
add_object(object, options)
end
end
yield self if block_given?
recalculate_column_widths!
end
def add_object(object, options)
if object.respond_to?(:to_hash)
add_hash(object.to_hash, options)
elsif object.respond_to?(:each)
@rows << object
end
end
def add_hash(hash, options)
if options[:only]
hash.keep_if { |k, v| options[:only].map(&:to_sym).include?(k.to_sym) }
elsif options[:except]
hash.delete_if { |k, v| options[:except].map(&:to_sym).include?(k.to_sym) }
end
@headings = hash.keys.map(&:to_s)
@rows << hash.values
end
def headings=(headings)
@headings = headings
end
def recalculate_column_widths!
@rows = rows.map { |row| row.map { |item| item.to_s.gsub("\r\n", @new_line_symbol).gsub("\n", @new_line_symbol).gsub("\r", @new_line_symbol) } }
if @rows.count > 0
(0...@rows.first.size).each do |col|
@column_widths[col] = @rows.map { |row| row[col].to_s.twidth }.max
end
end
if @headings.count > 0
(0...@headings.size).each do |col|
@column_widths[col] = [@column_widths[col] || 0, @headings[col].twidth].max
end
end
end
def to_s
recalculate_column_widths!
result = ''
header_and_footer = '+' + @column_widths.map { |w| '-' * (w + 2) }.join('+') + '+' + "\n"
if @headings.count > 0
result += header_and_footer
content = @headings.each_with_index.map { |grid, i| grid.to_s.tljust(@column_widths[i]) }
result += '| ' + content.join(' | ') + " |\n"
end
result += header_and_footer
@rows.each do |row|
content = row.each_with_index.map { |grid, i| grid.to_s.tljust(@column_widths[i]) }
result += '| ' + content.join(' | ') + " |\n"
end
result + header_and_footer
end
class << self
def special_tokens
String::CHARS_OF_WIDTH_OF_1 + String::CHARS_OF_WIDTH_OF_0
end
end
end
end
more on FF00–FFEF
# coding: utf-8
require "gemoji"
class String
CHAR_CODES_OF_WIDTH_0 = [768, 769, 776, 780, 785, 800, 801, 802, 804, 805, 807, 808,
809, 811, 820, 821, 822, 823, 840, 847, 860, 862, 863, 865,
1552, 1553, 1554, 1555, 1556, 1557, 1558, 1560, 1561, 1756,
2370, 2760, 3267, 3636, 3659, 7620, 7621, 8408, 8409, 8411]
CHAR_CODES_OF_WIDTH_1 = [660, 661, 662, 666,
706, 707, 713, 714, 715, 717, 726, 728, 730, 757, 758, 920,
921, 927, 931, 949, 969, 1013, 1014, 1044, 1053, 1072, 1076,
1079, 1090, 1096, 1342, 1608, 1641, 1782, 2919, 2920, 3232,
3237, 3589, 3665, 3844, 3900, 3901, 5026, 5046, 5072, 5603,
5608, 7447, 7461, 7500, 7506, 8198, 8203, 8212, 8214, 8216, 8217,
8220, 8221, 8226, 8230, 8242, 8248, 8251, 8254, 8316, 8317,
8318, 8320, 8330, 8333, 8334, 8451, 8461, 8469, 8545, 8592,
8594, 8595, 8704, 8711, 8730, 8736, 8743, 8745, 8746, 8750,
8804, 8805, 8806, 8807, 8857, 9000, 9166, 9472, 9473, 9484,
9488, 9517, 9518, 9531, 9573, 9581, 9582, 9583, 9584, 9587,
9600, 9604, 9608, 9612, 9633, 9649, 9651, 9660, 9661, 9670,
9675, 9678, 9679, 9697, 9728, 9729, 9730, 9733, 9734, 9752,
9756, 9758, 9786, 9787, 9794, 9818, 9825, 9829, 9834, 9836,
9996, 9999, 10004, 10008, 10023, 10023, 10026, 10047, 10048,
10084, 10085, 10086, 10102, 11015, 57643, 58141, 58370, 58381,
58387]
MULTI_CHAR_OF_WIDTH_1 = %w{ ☺️ ❤️ ♍️ ☔️ ‾᷄ ‾᷅ ⁻̫ ✖️ 😂 ☀︎ ❓ ⁉️ ☁︎ ⬇️ ❄️ ✌️ ♨️ 6⃣ ♻️ ♒️
✏️ 🇨🇳 ☁️ ✈️ ☀️ ♥️ ⚡️ ✔️ 🇰🇷 ⌛️ }
MULTI_CHAR_OF_WIDTH_2 = %w{ ・᷄ ・᷅ ㊙️ }
def twidth
result = 0
MULTI_CHAR_OF_WIDTH_1.each do |c|
if include?(c)
result += 1 * scan(c).size
gsub!(c, '')
end
end
MULTI_CHAR_OF_WIDTH_2.each do |c|
if include?(c)
result += 2 * scan(c).size
gsub!(c, '')
end
end
chars.inject(result) do |result, c|
case c.ord
when (0..0x7F) # Basic Latin
result += 1
when (0x80..0xFF) # Latin-1 Supplement
result += 1
when (0x100..0x17F) # Latin Extended-A
result += 1
when (0x180..0x24F) # Latin Extended-B
result += 1
when (0x2C60..0x2C7F) # Latin Extended-C
result += 1
when (0xA720..0xA7FF) # Latin Extended-D
result += 1
when (0xAB30..0xAB6F) # Latin Extended-E
result += 1
when (0x1E00..0x1EFF) # Latin Extended Additional
result += 1
when (0xFB00..0xFB06) # Latin Ligatures
result += 1
# http://www.unicode.org/charts/PDF/UFF00.pdf
# Halfwidth and Fullwidth Forms
# Range: FF00–FFEF
when (0xFF01..0xFF5E) # Fullwidth ASCII variants
result += 2
when (0xFF5F..0xFF60) # Fullwidth brackets
result += 2
when (0xFF61..0xFF64) # Halfwidth CJK punctuation
result += 1
when (0xFF65..0xFF9F) # Halfwidth Katakana variants
result += 1
when (0xFFA0..0xFFDC) # Halfwidth Hangul variants
result += 1
when (0xFFE0..0xFFE6) # Fullwidth symbol variants
result += 2
when (0xFFE8..0xFFEE) # Halfwidth symbol variants
result += 1
when *CHAR_CODES_OF_WIDTH_0
result += 0
when *CHAR_CODES_OF_WIDTH_1
result += 1
when lambda { |ord| Emoji.find_by_unicode([ord].pack('U*')) }
result += 1
else
result += 2
end
end
end
def tljust(width)
if width > twidth
self + ' ' * (width - twidth)
else
self
end
end
end
module Terminal
class Table
attr_accessor :rows
attr_accessor :headings
attr_accessor :column_widths
attr_accessor :new_line_symbol
def initialize(object = nil, options = {})
@rows = []
@headings = []
@column_widths = []
if options[:use_new_line_symbol]
@new_line_symbol = '⏎'
else
@new_line_symbol = ' '
end
if options[:flatten]
raise 'should be an array' unless object.respond_to?(:each)
all_keys = object.each.map(&:keys).flatten.map(&:to_sym).uniq
object.each do |hash|
all_keys.each do |key|
hash[key] = '' if hash[key].nil?
end
end
end
if object
if object.is_a?(Hash)
add_hash(object, options)
elsif object.respond_to?(:each)
object.each { |o| add_object(o, options) }
else
add_object(object, options)
end
end
yield self if block_given?
recalculate_column_widths!
end
def add_object(object, options)
if object.respond_to?(:to_hash)
add_hash(object.to_hash, options)
elsif object.respond_to?(:each)
@rows << object
end
end
def add_hash(hash, options)
if options[:only]
hash.keep_if { |k, v| options[:only].map(&:to_sym).include?(k.to_sym) }
elsif options[:except]
hash.delete_if { |k, v| options[:except].map(&:to_sym).include?(k.to_sym) }
end
@headings = hash.keys.map(&:to_s)
@rows << hash.values
end
def headings=(headings)
@headings = headings
end
def recalculate_column_widths!
@rows = rows.map { |row| row.map { |item| item.to_s.gsub("\r\n", @new_line_symbol).gsub("\n", @new_line_symbol).gsub("\r", @new_line_symbol) } }
if @rows.count > 0
(0...@rows.first.size).each do |col|
@column_widths[col] = @rows.map { |row| row[col].to_s.twidth }.max
end
end
if @headings.count > 0
(0...@headings.size).each do |col|
@column_widths[col] = [@column_widths[col] || 0, @headings[col].twidth].max
end
end
end
def to_s
recalculate_column_widths!
result = ''
header_and_footer = '+' + @column_widths.map { |w| '-' * (w + 2) }.join('+') + '+' + "\n"
if @headings.count > 0
result += header_and_footer
content = @headings.each_with_index.map { |grid, i| grid.to_s.tljust(@column_widths[i]) }
result += '| ' + content.join(' | ') + " |\n"
end
result += header_and_footer
@rows.each do |row|
content = row.each_with_index.map { |grid, i| grid.to_s.tljust(@column_widths[i]) }
result += '| ' + content.join(' | ') + " |\n"
end
result + header_and_footer
end
class << self
def special_tokens
String::CHARS_OF_WIDTH_OF_1 + String::CHARS_OF_WIDTH_OF_0
end
end
end
end
|
require "thread_watcher/version"
require "thread_watcher/process_watch"
module ThreadWatcher
require 'singleton'
class Monitor
include Singleton
def initialize
@process_watch = ThreadWatcher::ProcessWatch.new
end
def run options = {}, &block
@process_watch.run options, &block
end
def self.run options = {}, &block
instance.run options, &block
end
def kill id
@process_watch.kill id
end
def self.kill id
instance.kill id
end
def kill! id
@process_watch.kill! id
end
def self.kill! id
instance.kill! id
end
def restart id
@process_watch.restart id
end
def self.restart id
instance.restart id
end
def status
@process_watch.status
end
def self.status
instance.status
end
end
end
restructured require files
require "thread_watcher/version"
require "thread_watcher/thread_formatter"
require "thread_watcher/thread_holder"
require "thread_watcher/process_watch"
module ThreadWatcher
require 'singleton'
class Monitor
include Singleton
def initialize
@process_watch = ThreadWatcher::ProcessWatch.new
end
def run options = {}, &block
@process_watch.run options, &block
end
def self.run options = {}, &block
instance.run options, &block
end
def kill id
@process_watch.kill id
end
def self.kill id
instance.kill id
end
def kill! id
@process_watch.kill! id
end
def self.kill! id
instance.kill! id
end
def restart id
@process_watch.restart id
end
def self.restart id
instance.restart id
end
def status
@process_watch.status
end
def self.status
instance.status
end
end
end
|
module Timers
VERSION = "3.0.1"
end
Bump version.
module Timers
VERSION = "4.0.0"
end
|
module Timing
VERSION = '0.1.0'
end
Set version number
module Timing
VERSION = '0.0.1'
end
|
# encoding: UTF-8
module Tinder
VERSION = '1.9.0' unless defined?(::Tinder::VERSION)
end
Bump version to 1.9.1
# encoding: UTF-8
module Tinder
VERSION = '1.9.1' unless defined?(::Tinder::VERSION)
end
|
require 'thor'
require 'active_support/all'
module TogoStanza
module CLI
class ProviderGenerator < Thor::Group
include Thor::Actions
argument :name
def self.source_root
File.expand_path('../../../templates/provider', __FILE__)
end
def create_files
template 'gitignore.erb', "#{name}/.gitignore"
template 'Gemfile.erb', "#{name}/Gemfile"
template 'config.ru.erb', "#{name}/config.ru"
template 'Procfile.erb', "#{name}/Procfile"
template 'config/unicorn.rb.erb', "#{name}/config/unicorn.rb"
create_file "#{name}/log/.keep"
end
def init_repo
inside name do
run "bundle"
run "git init ."
run "git add -A"
end
end
end
class StanzaGenerator < Thor::Group
include Thor::Actions
argument :name
def self.source_root
File.expand_path('../../../templates/stanza', __FILE__)
end
def create_files
template 'Gemfile.erb', "#{file_name}/Gemfile"
template 'gemspec.erb', "#{file_name}/#{file_name}.gemspec"
template 'lib.rb.erb', "#{file_name}/lib/#{file_name}.rb"
template 'stanza.rb.erb', "#{file_name}/stanza.rb"
template 'template.hbs.erb', "#{file_name}/template.hbs"
template 'help.md.erb', "#{file_name}/help.md"
template 'metadata.json.erb', "#{file_name}/metadata.json"
create_file "#{file_name}/assets/#{stanza_id}/.keep"
end
def inject_gem
append_to_file 'Gemfile', "gem '#{file_name}', path: './#{file_name}'\n"
end
private
def stanza_id
name.underscore.sub(/_stanza$/, '')
end
def file_name
stanza_id + '_stanza'
end
def class_name
file_name.classify
end
def title
stanza_id.titleize
end
end
class NameModifier < Thor::Group
include Thor::Actions
argument :name1 ,:type => :string
argument :name2 ,:type => :string
def self.source_root
File.expand_path('../../../templates/stanza', __FILE__)
end
def replace_description
name1_chopped = chop_slash(name1)
name2_chopped = chop_slash(name2)
gsub_file("#{files_name(name1_chopped)}/help.md", titles(name1_chopped), titles(name2_chopped))
gsub_file("#{files_name(name1_chopped)}/help.md", stanzas_id(name1_chopped), stanzas_id(name2_chopped))
gsub_file("#{files_name(name1_chopped)}/#{files_name(name1_chopped)}.gemspec", files_name(name1_chopped), files_name(name2_chopped))
gsub_file("#{files_name(name1_chopped)}/lib/#{files_name(name1_chopped)}.rb", classes_name(name1_chopped), classes_name(name2_chopped))
if File.exist?("#{files_name(name1_chopped)}/metadata.json") == false then
template 'metadata.json.erb', "#{file_name}/metadata.json"
end
gsub_file("#{files_name(name1_chopped)}/metadata.json", stanzas_id(name1_chopped), stanzas_id(name2_chopped))
gsub_file("#{files_name(name1_chopped)}/stanza.rb", classes_name(name1_chopped), classes_name(name2_chopped))
gsub_file("#{files_name(name1_chopped)}/template.hbs", titles(name1_chopped), titles(name2_chopped))
gsub_file("#{files_name(name1_chopped)}/template.hbs", "assets/#{stanzas_id(name1_chopped)}","assets/#{stanzas_id(name2_chopped)}")
gsub_file("#{files_name(name1_chopped)}/template.hbs", "#{stanzas_id(name1_chopped)}/resources", "#{stanzas_id(name2_chopped)}/resources")
gsub_file("Gemfile", "#{files_name(name1_chopped)}", "#{files_name(name2_chopped)}")
end
def rename_directory
name1_chopped = chop_slash(name1)
name2_chopped = chop_slash(name2)
File.rename("#{files_name(name1_chopped)}/assets/#{stanzas_id(name1_chopped)}", "#{files_name(name1_chopped)}/assets/#{stanzas_id(name2_chopped)}")
File.rename("#{files_name(name1_chopped)}/lib/#{files_name(name1_chopped)}.rb", "#{files_name(name1_chopped)}/lib/#{files_name(name2_chopped)}.rb")
File.rename("#{files_name(name1_chopped)}/#{files_name(name1_chopped)}.gemspec", "#{files_name(name1_chopped)}/#{files_name(name2_chopped)}.gemspec")
File.rename("#{files_name(name1_chopped)}", "#{files_name(name2_chopped)}")
end
private
def chop_slash(name)
if "#{name[-1]}" == "/" then
name.chop
else
name
end
end
def stanza_id
name1_chopped = chop_slash(name1)
name1_chopped.underscore.sub(/_stanza$/, '')
end
def file_name
stanza_id + '_stanza'
end
def stanzas_id(name)
name.underscore.sub(/_stanza$/, '')
end
def files_name(name)
stanzas_id(name) + '_stanza'
end
def classes_name(name)
files_name(name).classify
end
def titles(name)
stanzas_id(name).titleize
end
end
class NameRegister < Thor::Group
include Thor::Actions
argument :name
def template_dir
File.expand_path('../../../templates/stanza', __FILE__)
end
def replace_author
gsub_file("#{template_dir}/gemspec.erb", /spec.authors\s*=\s\[\'.*\'\]/, "spec.authors = ['#{name}']")
gsub_file("#{template_dir}/metadata.json.erb", /author":\s".*"/, "author\": \"#{name}\"")
end
end
class MailRegister < Thor::Group
include Thor::Actions
argument :name
def template_dir
File.expand_path('../../../templates/stanza', __FILE__)
end
def replace_author
gsub_file("#{self.source_root}/gemspec.erb", /spec.email\s*=\s\[\'.*\'\]/, "spec.email = ['#{name}']")
gsub_file("#{self.source_root}/metadata.json.erb", /address":\s".*"/, "address\": \"#{name}\"")
end
end
class Stanza < Thor
register StanzaGenerator, 'new', 'new NAME', 'Creates a new stanza'
end
class Stanza < Thor
register NameModifier, 'modify', 'modify OLD_NAME NEW_NAME', 'Modify a name of stanza'
end
class Root <Thor
register NameRegister, 'name' , 'name NAME' , 'register your name'
end
class Root <Thor
register MailRegister, 'mail' , 'mail ADDRESS' , 'register your mail'
end
class Root < Thor
register ProviderGenerator, 'init', 'init NAME', 'Creates a new provider'
desc 'stanza [COMMAND]', ''
subcommand 'stanza', Stanza
end
end
end
正規表現のために、Gemfileで似た名前を置換してしまう問題が有ったので解消
require 'thor'
require 'active_support/all'
module TogoStanza
module CLI
class ProviderGenerator < Thor::Group
include Thor::Actions
argument :name
def self.source_root
File.expand_path('../../../templates/provider', __FILE__)
end
def create_files
template 'gitignore.erb', "#{name}/.gitignore"
template 'Gemfile.erb', "#{name}/Gemfile"
template 'config.ru.erb', "#{name}/config.ru"
template 'Procfile.erb', "#{name}/Procfile"
template 'config/unicorn.rb.erb', "#{name}/config/unicorn.rb"
create_file "#{name}/log/.keep"
end
def init_repo
inside name do
run "bundle"
run "git init ."
run "git add -A"
end
end
end
class StanzaGenerator < Thor::Group
include Thor::Actions
argument :name
def self.source_root
File.expand_path('../../../templates/stanza', __FILE__)
end
def create_files
template 'Gemfile.erb', "#{file_name}/Gemfile"
template 'gemspec.erb', "#{file_name}/#{file_name}.gemspec"
template 'lib.rb.erb', "#{file_name}/lib/#{file_name}.rb"
template 'stanza.rb.erb', "#{file_name}/stanza.rb"
template 'template.hbs.erb', "#{file_name}/template.hbs"
template 'help.md.erb', "#{file_name}/help.md"
template 'metadata.json.erb', "#{file_name}/metadata.json"
create_file "#{file_name}/assets/#{stanza_id}/.keep"
end
def inject_gem
append_to_file 'Gemfile', "gem '#{file_name}', path: './#{file_name}'\n"
end
private
def stanza_id
name.underscore.sub(/_stanza$/, '')
end
def file_name
stanza_id + '_stanza'
end
def class_name
file_name.classify
end
def title
stanza_id.titleize
end
end
class NameModifier < Thor::Group
include Thor::Actions
argument :name1 ,:type => :string
argument :name2 ,:type => :string
def self.source_root
File.expand_path('../../../templates/stanza', __FILE__)
end
def replace_description
name1_chopped = chop_slash(name1)
name2_chopped = chop_slash(name2)
gsub_file("#{files_name(name1_chopped)}/help.md", titles(name1_chopped), titles(name2_chopped))
gsub_file("#{files_name(name1_chopped)}/help.md", stanzas_id(name1_chopped), stanzas_id(name2_chopped))
gsub_file("#{files_name(name1_chopped)}/#{files_name(name1_chopped)}.gemspec", files_name(name1_chopped), files_name(name2_chopped))
gsub_file("#{files_name(name1_chopped)}/lib/#{files_name(name1_chopped)}.rb", classes_name(name1_chopped), classes_name(name2_chopped))
if File.exist?("#{files_name(name1_chopped)}/metadata.json") == false then
template 'metadata.json.erb', "#{file_name}/metadata.json"
end
gsub_file("#{files_name(name1_chopped)}/metadata.json", stanzas_id(name1_chopped), stanzas_id(name2_chopped))
gsub_file("#{files_name(name1_chopped)}/stanza.rb", classes_name(name1_chopped), classes_name(name2_chopped))
gsub_file("#{files_name(name1_chopped)}/template.hbs", titles(name1_chopped), titles(name2_chopped))
gsub_file("#{files_name(name1_chopped)}/template.hbs", "assets/#{stanzas_id(name1_chopped)}","assets/#{stanzas_id(name2_chopped)}")
gsub_file("#{files_name(name1_chopped)}/template.hbs", "#{stanzas_id(name1_chopped)}/resources", "#{stanzas_id(name2_chopped)}/resources")
gsub_file("Gemfile", /\'#{files_name(name1_chopped)}\'/, "\'#{files_name(name2_chopped)}\'")
gsub_file("Gemfile", /\'\.\/#{files_name(name1_chopped)}\'/, "\'\.\/#{files_name(name2_chopped)}\'")
end
def rename_directory
name1_chopped = chop_slash(name1)
name2_chopped = chop_slash(name2)
if File.exist?("#{files_name(name1_chopped)}/assets/#{stanzas_id(name1_chopped)}") == false then
Dir.mkdir("#{files_name(name1_chopped)}/assets/#{stanzas_id(name1_chopped)}")
end
File.rename("#{files_name(name1_chopped)}/assets/#{stanzas_id(name1_chopped)}", "#{files_name(name1_chopped)}/assets/#{stanzas_id(name2_chopped)}")
File.rename("#{files_name(name1_chopped)}/lib/#{files_name(name1_chopped)}.rb", "#{files_name(name1_chopped)}/lib/#{files_name(name2_chopped)}.rb")
File.rename("#{files_name(name1_chopped)}/#{files_name(name1_chopped)}.gemspec", "#{files_name(name1_chopped)}/#{files_name(name2_chopped)}.gemspec")
File.rename("#{files_name(name1_chopped)}", "#{files_name(name2_chopped)}")
end
private
def chop_slash(name)
if "#{name[-1]}" == "/" then
name.chop
else
name
end
end
def stanza_id
name1_chopped = chop_slash(name1)
name1_chopped.underscore.sub(/_stanza$/, '')
end
def file_name
stanza_id + '_stanza'
end
def stanzas_id(name)
name.underscore.sub(/_stanza$/, '')
end
def files_name(name)
stanzas_id(name) + '_stanza'
end
def classes_name(name)
files_name(name).classify
end
def titles(name)
stanzas_id(name).titleize
end
end
class NameRegister < Thor::Group
include Thor::Actions
argument :name
def template_dir
File.expand_path('../../../templates/stanza', __FILE__)
end
def replace_author
gsub_file("#{template_dir}/gemspec.erb", /spec.authors\s*=\s\[\'.*\'\]/, "spec.authors = ['#{name}']")
gsub_file("#{template_dir}/metadata.json.erb", /author":\s".*"/, "author\": \"#{name}\"")
end
end
class MailRegister < Thor::Group
include Thor::Actions
argument :name
def template_dir
File.expand_path('../../../templates/stanza', __FILE__)
end
def replace_author
gsub_file("#{self.source_root}/gemspec.erb", /spec.email\s*=\s\[\'.*\'\]/, "spec.email = ['#{name}']")
gsub_file("#{self.source_root}/metadata.json.erb", /address":\s".*"/, "address\": \"#{name}\"")
end
end
class Stanza < Thor
register StanzaGenerator, 'new', 'new NAME', 'Creates a new stanza'
end
class Stanza < Thor
register NameModifier, 'modify', 'modify OLD_NAME NEW_NAME', 'Modify a name of stanza'
end
class Root <Thor
register NameRegister, 'name' , 'name NAME' , 'register your name'
end
class Root <Thor
register MailRegister, 'mail' , 'mail ADDRESS' , 'register your mail'
end
class Root < Thor
register ProviderGenerator, 'init', 'init NAME', 'Creates a new provider'
desc 'stanza [COMMAND]', ''
subcommand 'stanza', Stanza
end
end
end
|
module TOML
class Generator
attr_reader :body, :doc
def initialize(doc)
@body = ""
@doc = doc
visit(@doc)
return @body
end
def visit(hash, path = "")
hash_pairs = [] # Sub-hashes
other_pairs = []
hash.keys.sort.each do |key|
val = hash[key]
# TODO: Refactor for other hash-likes (OrderedHash)
if val.is_a? Hash
hash_pairs << [key, val]
else
other_pairs << [key, val]
end
end
# Handle all the key-values
if !path.empty? && !other_pairs.empty?
@body += "[#{path}]\n"
end
other_pairs.each do |pair|
key, val = pair
@body += "#{key} = #{format(val)}\n"
end
@body += "\n" unless other_pairs.empty?
# Then deal with sub-hashes
hash_pairs.each do |pair|
key, hash = pair
visit(hash, (path.empty? ? key : [path, key].join(".")))
end
end#visit
# Returns the value formatted for TOML.
def format(val)
# For most everything this should work just fine.
val.inspect # TODO: Real escaping and such.
end
end#Generator
end#TOML
Add injected to_toml methods for Ruby internal classes (for #4)
module TOML
class Generator
attr_reader :body, :doc
def initialize(doc)
# Ensure all the to_toml methods are injected into the base Ruby classes
# used by TOML.
self.class.inject!
@body = ""
@doc = doc
visit(@doc)
return @body
end
@@injected = false # Whether or not the injections have already been done.
# Inject to_toml methods into the Ruby classes used by TOML (booleans,
# String, Numeric, Array). You can add to_toml methods to your own classes
# to allow them to be easily serialized by the generator (and it will shout
# if something doesn't have a to_toml method).
def self.inject!
return if @@injected
TrueClass.instance_eval { define_method(:to_toml) { "true" } }
FalseClass.instance_eval { define_method(:to_toml) { "false" } }
String.instance_eval do
define_method(:to_toml) do
# TODO: Make sure this is 100% TOML spec-compliant.
self.inspect
end
end
Numeric.instance_eval { define_method(:to_toml) { self.to_s } }
Array.instance_eval do
define_method(:to_toml) do
# TODO: Add validations to make sure all values are the same type.
"[" + self.map {|v| v.to_toml }.join(",") + "]"
end
end
@@injected = true
end#self.inject!
def visit(hash, path = "")
hash_pairs = [] # Sub-hashes
other_pairs = []
hash.keys.sort.each do |key|
val = hash[key]
# TODO: Refactor for other hash-likes (OrderedHash)
if val.is_a? Hash
hash_pairs << [key, val]
else
other_pairs << [key, val]
end
end
# Handle all the key-values
if !path.empty? && !other_pairs.empty?
@body += "[#{path}]\n"
end
other_pairs.each do |pair|
key, val = pair
@body += "#{key} = #{format(val)}\n"
end
@body += "\n" unless other_pairs.empty?
# Then deal with sub-hashes
hash_pairs.each do |pair|
key, hash = pair
visit(hash, (path.empty? ? key : [path, key].join(".")))
end
end#visit
# Returns the value formatted for TOML.
def format(val)
val.to_toml
end
end#Generator
end#TOML
|
require 'httpclient'
require 's3'
module Tootsie
class IncompatibleOutputError < Exception; end
class Output
def initialize(url)
@url = url
@temp_file = Tempfile.new('tootsie')
@temp_file.close
@file_name = @temp_file.path
@logger = Application.get.logger
end
# Put data into the output. Options:
#
# * +:content_type+ - content type of the stored data.
#
def put!(options = {})
@logger.info("Storing #{@url}")
case @url
when /^file:(.*)/
FileUtils.cp(@temp_file.path, $1)
when /^s3:.*/
s3_options = S3Utilities.parse_uri(@url)
bucket_name, path = s3_options[:bucket], s3_options[:key]
File.open(@temp_file.path, 'r') do |file|
s3_service = Tootsie::Application.get.s3_service
begin
object = s3_service.buckets.find(bucket_name).objects.build(path)
object.acl = s3_options[:acl] || :private
object.content_type = s3_options[:content_type]
object.content_type ||= @content_type if @content_type
object.storage_class = s3_options[:storage_class] || :standard
object.content = file
object.save
@result_url = object.url
rescue ::S3::Error::NoSuchBucket
raise IncompatibleOutputError, "Bucket #{bucket_name} not found"
end
end
when /^http(s?):\/\//
File.open(@temp_file.path, 'wb') do |file|
HTTPClient.new.post(@url, [],
{'Content-Type' => @content_type, :data => file})
end
else
raise IncompatibleOutputError, "Don't know to store output URL: #{@url}"
end
end
def close
@temp_file.unlink
end
attr_reader :url
attr_reader :result_url
attr_reader :file_name
attr_accessor :content_type
end
end
Fix non-working file:// output.
require 'httpclient'
require 's3'
module Tootsie
class IncompatibleOutputError < Exception; end
class Output
def initialize(url)
@url = url
@temp_file = Tempfile.new('tootsie')
@temp_file.close
@file_name = @temp_file.path
@logger = Application.get.logger
end
# Put data into the output. Options:
#
# * +:content_type+ - content type of the stored data.
#
def put!(options = {})
@logger.info("Storing #{@url}")
case @url
when /^file:(.*)/
FileUtils.cp(@temp_file.path, $1)
@result_url = @url
when /^s3:.*/
s3_options = S3Utilities.parse_uri(@url)
bucket_name, path = s3_options[:bucket], s3_options[:key]
File.open(@temp_file.path, 'r') do |file|
s3_service = Tootsie::Application.get.s3_service
begin
object = s3_service.buckets.find(bucket_name).objects.build(path)
object.acl = s3_options[:acl] || :private
object.content_type = s3_options[:content_type]
object.content_type ||= @content_type if @content_type
object.storage_class = s3_options[:storage_class] || :standard
object.content = file
object.save
@result_url = object.url
rescue ::S3::Error::NoSuchBucket
raise IncompatibleOutputError, "Bucket #{bucket_name} not found"
end
end
when /^http(s?):\/\//
File.open(@temp_file.path, 'wb') do |file|
HTTPClient.new.post(@url, [],
{'Content-Type' => @content_type, :data => file})
end
else
raise IncompatibleOutputError, "Don't know to store output URL: #{@url}"
end
end
def close
@temp_file.unlink
end
attr_reader :url
attr_reader :result_url
attr_reader :file_name
attr_accessor :content_type
end
end
|
require 'rubygems'
require 'json'
require 'uri'
module TorqueSpec
module AS7
def port
9990
end
def start_command
"#{ENV['JAVA_HOME']}/bin/java #{TorqueSpec.jvm_args} -Dorg.jboss.resolver.warning=true -Dsun.rmi.dgc.client.gcInterval=3600000 -Dsun.rmi.dgc.server.gcInterval=3600000 -Dorg.jboss.boot.log.file=#{TorqueSpec.jboss_home}/standalone/log/boot.log -Dlogging.configuration=file:#{TorqueSpec.jboss_home}/standalone/configuration/logging.properties -jar #{TorqueSpec.jboss_home}/jboss-modules.jar -mp #{TorqueSpec.jboss_home}/modules -logmodule org.jboss.logmanager -jaxpmodule javax.xml.jaxp-provider org.jboss.as.standalone -Djboss.home.dir=#{TorqueSpec.jboss_home}"
end
def shutdown
domain_api( :operation => "shutdown" )
rescue EOFError
# ignorable
end
def _deploy(path)
once = true
begin
domain_api( :operation => "add",
:address => [ "deployment", addressify(path) ],
:url => urlify(path) )
rescue Exception
_undeploy(path)
if once
once = false
retry
end
end
domain_api( :operation => "deploy",
:address => [ "deployment", addressify(path) ] )
end
def _undeploy(path)
domain_api( :operation => "remove",
:address => [ "deployment", addressify(path) ] )
end
def ready?
response = JSON.parse( domain_api( :operation => "read-attribute",
:name => "server-state") )
response['outcome']=='success' && response['result']=='RUNNING'
rescue
false
end
private
def domain_api(params)
post('/domain-api', params.merge('json.pretty' => 1).to_json)
end
def urlify(path)
URI.parse(path).scheme.nil? ? "file:#{path}" : path
end
def addressify(path)
File.basename(path)
end
end
end
Adjust to slightly different deployment API under AS7-SNAPSHOT
require 'rubygems'
require 'json'
require 'uri'
module TorqueSpec
module AS7
def port
9990
end
def start_command
"#{ENV['JAVA_HOME']}/bin/java #{TorqueSpec.jvm_args} -Dorg.jboss.resolver.warning=true -Dsun.rmi.dgc.client.gcInterval=3600000 -Dsun.rmi.dgc.server.gcInterval=3600000 -Dorg.jboss.boot.log.file=#{TorqueSpec.jboss_home}/standalone/log/boot.log -Dlogging.configuration=file:#{TorqueSpec.jboss_home}/standalone/configuration/logging.properties -jar #{TorqueSpec.jboss_home}/jboss-modules.jar -mp #{TorqueSpec.jboss_home}/modules -logmodule org.jboss.logmanager -jaxpmodule javax.xml.jaxp-provider org.jboss.as.standalone -Djboss.home.dir=#{TorqueSpec.jboss_home}"
end
def shutdown
domain_api( :operation => "shutdown" )
rescue EOFError
# ignorable
end
def _deploy(path)
once = true
begin
domain_api( :operation => "add",
:address => [ "deployment", addressify(path) ],
:content => [ { :url=>urlify(path)} ] )
rescue Exception=>e
_undeploy(path)
if once
once = false
retry
else
raise e
end
end
domain_api( :operation => "deploy",
:address => [ "deployment", addressify(path) ] )
end
def _undeploy(path)
domain_api( :operation => "remove",
:address => [ "deployment", addressify(path) ] )
end
def ready?
response = JSON.parse( domain_api( :operation => "read-attribute",
:name => "server-state") )
response['outcome']=='success' && response['result']=='RUNNING'
rescue
false
end
private
def domain_api(params)
post('/domain-api', params.merge('json.pretty' => 1).to_json)
end
def urlify(path)
URI.parse(path).scheme.nil? ? "file:#{path}" : path
end
def addressify(path)
File.basename(path)
end
end
end
|
require 'json'
require 'pathname'
require 'org-ruby'
require 'ostruct'
require_relative 'file_bundle'
module Trackler
# Track is a collection of exercises in a given language.
class Track
TOPICS = %w(about installation tests learning resources)
Image = Struct.new(:path) do
def exists?
File.exist?(path)
end
def type
File.extname(path).sub('.', '').to_sym
end
end
attr_reader :id, :root, :file_bundle
def initialize(id, root)
@id = id
@root = Pathname.new(root)
@file_bundle = FileBundle.new(dir.join("global"))
end
def exists?
File.exist?(dir)
end
def active?
!!config["active"]
end
def upcoming?
!active? && implementations.length > 0
end
def planned?
!active? && implementations.length.zero?
end
def implementations
@implementations ||= Implementations.new(repository, active_slugs, root, self)
end
def problems
warn "DEPRECATION WARNING: A track only has implementations, call track.implementations instead"
implementations
end
def checklist_issue
config.fetch("checklist_issue", 1)
end
def gitter
config["gitter"]
end
def icon_path
icon.path if icon.exists?
end
def icon
@icon ||= svg_icon.exists? ? svg_icon : png_icon
end
def language
config['language'].to_s.strip
end
def repository
config['repository'].to_s.strip
end
def test_pattern
if config.key?('test_pattern')
Regexp.new(config['test_pattern'])
else
/test/i
end
end
def ignore_pattern
config.fetch('ignore_pattern', 'example')
end
def docs(image_path: DocFile::DEFAULT_IMAGE_PATH)
OpenStruct.new(docs_by_topic(image_path))
end
def img(file_path)
Image.new(File.join(dir, file_path))
end
def doc_format
default_format = 'md'
path = File.join(dir, "docs", "*.*")
most_popular_format(path) || default_format
end
def global_zip
@zip ||= file_bundle.zip
end
# Every slug mentioned in the configuration.
def slugs
active_slugs + foregone_slugs + deprecated_slugs
end
def dir
root.join("tracks", id)
end
def hints
docfile = DocFile.find(basename: 'EXERCISE_README_INSERT', track_dir: dir).render
end
private
def active_slugs
(config["exercises"] || []).map { |ex| ex["slug"] }
end
def foregone_slugs
config["foregone"] || []
end
def deprecated_slugs
config["deprecated"] || []
end
def most_popular_format(path)
formats = Dir.glob(path).map do |filename|
File.extname(filename).sub(/^\./, '')
end
formats.max_by { |format| formats.count(format) }
end
def config
@config ||= JSON.parse(File.read(config_filename))
end
def config_filename
File.join(dir, "config.json")
end
def docs_by_topic(image_path)
Hash[
TOPICS.zip(
TOPICS.map { |topic|
DocFile.find(basename: topic.upcase, track_dir: dir).render(image_path: image_path)
}
)
]
end
def svg_icon
@svg_icon ||= Image.new(File.join(dir, "img/icon.svg"))
end
def png_icon
@png_icon ||= Image.new(File.join(dir, "img/icon.png"))
end
end
end
Delete stray assignment
require 'json'
require 'pathname'
require 'org-ruby'
require 'ostruct'
require_relative 'file_bundle'
module Trackler
# Track is a collection of exercises in a given language.
class Track
TOPICS = %w(about installation tests learning resources)
Image = Struct.new(:path) do
def exists?
File.exist?(path)
end
def type
File.extname(path).sub('.', '').to_sym
end
end
attr_reader :id, :root, :file_bundle
def initialize(id, root)
@id = id
@root = Pathname.new(root)
@file_bundle = FileBundle.new(dir.join("global"))
end
def exists?
File.exist?(dir)
end
def active?
!!config["active"]
end
def upcoming?
!active? && implementations.length > 0
end
def planned?
!active? && implementations.length.zero?
end
def implementations
@implementations ||= Implementations.new(repository, active_slugs, root, self)
end
def problems
warn "DEPRECATION WARNING: A track only has implementations, call track.implementations instead"
implementations
end
def checklist_issue
config.fetch("checklist_issue", 1)
end
def gitter
config["gitter"]
end
def icon_path
icon.path if icon.exists?
end
def icon
@icon ||= svg_icon.exists? ? svg_icon : png_icon
end
def language
config['language'].to_s.strip
end
def repository
config['repository'].to_s.strip
end
def test_pattern
if config.key?('test_pattern')
Regexp.new(config['test_pattern'])
else
/test/i
end
end
def ignore_pattern
config.fetch('ignore_pattern', 'example')
end
def docs(image_path: DocFile::DEFAULT_IMAGE_PATH)
OpenStruct.new(docs_by_topic(image_path))
end
def img(file_path)
Image.new(File.join(dir, file_path))
end
def doc_format
default_format = 'md'
path = File.join(dir, "docs", "*.*")
most_popular_format(path) || default_format
end
def global_zip
@zip ||= file_bundle.zip
end
# Every slug mentioned in the configuration.
def slugs
active_slugs + foregone_slugs + deprecated_slugs
end
def dir
root.join("tracks", id)
end
def hints
DocFile.find(basename: 'EXERCISE_README_INSERT', track_dir: dir).render
end
private
def active_slugs
(config["exercises"] || []).map { |ex| ex["slug"] }
end
def foregone_slugs
config["foregone"] || []
end
def deprecated_slugs
config["deprecated"] || []
end
def most_popular_format(path)
formats = Dir.glob(path).map do |filename|
File.extname(filename).sub(/^\./, '')
end
formats.max_by { |format| formats.count(format) }
end
def config
@config ||= JSON.parse(File.read(config_filename))
end
def config_filename
File.join(dir, "config.json")
end
def docs_by_topic(image_path)
Hash[
TOPICS.zip(
TOPICS.map { |topic|
DocFile.find(basename: topic.upcase, track_dir: dir).render(image_path: image_path)
}
)
]
end
def svg_icon
@svg_icon ||= Image.new(File.join(dir, "img/icon.svg"))
end
def png_icon
@png_icon ||= Image.new(File.join(dir, "img/icon.png"))
end
end
end
|
module Transip
class Client
API_VERSION = '5.0'
API_SERVICE = 'DomainService'
attr_accessor :username, :password, :ip, :mode, :hash
attr_reader :response
def api_version
# We use self.class:: here to not use parentclass constant.
@api_version || self.class::API_VERSION
end
def api_service
@api_service || self.class::API_SERVICE
end
def wsdl
"https://api.transip.nl/wsdl/?service=#{api_service}"
end
attr_accessor :debug
# Options:
# * username - Your login name on the TransIP website.
# * ip - needed in production
# * key / key_file - key is one of your private keys (these can be requested via your Controlpanel). key_file is path to file containing key.
# * mode - :readonly, :readwrite
#
# Example:
# transip = Transip.new(:username => 'api_username', :ip => '12.34.12.3', :key => mykey, :mode => 'readwrite') # use this in production
def initialize(options = {})
@key = options[:key] || (options[:key_file] && File.read(options[:key_file]))
@username = options[:username]
@ip = options[:ip]
@api_version = options[:api_version]
@api_service = options[:api_service]
raise ArgumentError, "The :username, :ip and :key options are required!" if @username.nil? or @key.nil?
@mode = options[:mode] || :readonly
@endpoint = options[:endpoint] || 'api.transip.nl'
if options[:password]
@password = options[:password]
end
@savon_options = {
:wsdl => wsdl
}
# By default we don't want to debug!
self.turn_off_debugging!
end
# By default we don't want to debug!
# Changing might impact other Savon usages.
def turn_off_debugging!
@savon_options[:log] = false # disable logging
@savon_options[:log_level] = :info # changing the log level
end
# Make Savon log to Rails.logger and turn_off_debugging!
def use_with_rails!
if Rails.env.production?
self.turn_off_debugging!
end
@savon_options[:logger] = Rails.logger # using the Rails logger
end
# yes, i know, it smells bad
def convert_array_to_hash(array)
result = {}
array.each_with_index do |value, index|
result[index] = value
end
result
end
def urlencode(input)
output = URI.encode_www_form_component(input)
output.gsub!('+', '%20')
output.gsub!('%7E', '~')
output
end
def serialize_parameters(parameters, key_prefix=nil)
debug_log("serialize_parameters(#{parameters.inspect}, #{key_prefix.inspect}")
parameters = parameters.to_hash.values.first if parameters.is_a? TransipStruct
parameters = convert_array_to_hash(parameters) if parameters.is_a? Array
if not parameters.is_a? Hash
return urlencode(parameters)
end
return "#{key_prefix}=" if parameters.empty?
encoded_parameters = []
parameters.each do |key, value|
next if key.to_s == '@xsi:type'
encoded_key = (key_prefix.nil?) ? urlencode(key) : "#{key_prefix}[#{urlencode(key)}]"
if value.is_a?(Hash) or value.is_a?(Array) or value.is_a?(TransipStruct)
encoded_parameters << serialize_parameters(value, encoded_key)
else
encoded_value = urlencode(value)
encoded_parameters << "#{encoded_key}=#{encoded_value}"
end
end
encoded_parameters = encoded_parameters.join("&")
debug_log("encoded_parameters:\n#{encoded_parameters.split('&').join("\n")}")
encoded_parameters
end
# does all the techy stuff to calculate transip's sick authentication scheme:
# a hash with all the request information is subsequently:
# serialized like a www form
# SHA512 digested
# asn1 header added
# private key encrypted
# Base64 encoded
# URL encoded
# I think the guys at transip were trying to use their entire crypto-toolbox!
def signature(method, parameters, time, nonce)
formatted_method = method.to_s.lower_camelcase
parameters ||= {}
input = convert_array_to_hash(parameters.values)
options = {
'__method' => formatted_method,
'__service' => api_service,
'__hostname' => @endpoint,
'__timestamp' => time,
'__nonce' => nonce
}
input.merge!(options)
raise "Invalid RSA key" unless @key =~ /-----BEGIN (RSA )?PRIVATE KEY-----(.*)-----END (RSA )?PRIVATE KEY-----/sim
serialized_input = serialize_parameters(input)
digest = Digest::SHA512.new.digest(serialized_input)
asn_header = "\x30\x51\x30\x0d\x06\x09\x60\x86\x48\x01\x65\x03\x04\x02\x03\x05\x00\x04\x40"
# convert asn_header literal to ASCII-8BIT
if RUBY_VERSION.split('.')[0] == "2"
asn = asn_header.b + digest
else
asn = asn_header + digest
end
private_key = OpenSSL::PKey::RSA.new(@key)
encrypted_asn = private_key.private_encrypt(asn)
readable_encrypted_asn = Base64.encode64(encrypted_asn)
urlencode(readable_encrypted_asn)
end
def to_cookies(content)
content.map do |item|
HTTPI::Cookie.new item
end
end
# Used for authentication
def cookies(method, parameters)
time = Time.new.to_i
#strip out the -'s because transip requires the nonce to be between 6 and 32 chars
nonce = SecureRandom.uuid.gsub("-", '')
result = to_cookies [ "login=#{self.username}",
"mode=#{self.mode}",
"timestamp=#{time}",
"nonce=#{nonce}",
"clientVersion=#{api_version}",
"signature=#{signature(method, parameters, time, nonce)}"
]
debug_log("signature:\n#{signature(method, parameters, time, nonce)}")
result
end
# Same as client method but initializes a brand new fresh client.
# You have to use this one when you want to re-set the mode (readwrite, readonly),
# or authentication details of your client.
def client!
@client = Savon::Client.new(@savon_options) do
namespaces(
"xmlns:enc" => "http://schemas.xmlsoap.org/soap/encoding/"
)
end
return @client
end
# Returns a Savon::Client object to be used in the connection.
# This object is re-used and cached as @client.
def client
@client ||= client!
end
# Returns Array with all possible SOAP WSDL actions.
def actions
client.operations
end
# This makes sure that arrays are properly encoded as soap-arrays by Gyoku
def fix_array_definitions(options)
result = {}
options.each do |key, value|
if value.is_a?(Array) and (value.size > 0)
entry_name = value.first.class.name.split(":").last
result[key] = {
'item' => {:content! => value, :'@xsi:type' => "tns:#{entry_name}"},
:'@xsi:type' => "tns:ArrayOf#{entry_name}",
:'@enc:arrayType' => "tns:#{entry_name}[#{value.size}]"
}
else
result[key] = value
end
end
result
end
# converts the savon response object to something we can return to the caller
# - A TransipStruct object
# - An array of TransipStructs
# - nil
def process_response(response)
response = response.to_hash.values.first[:return] rescue nil
TransipStruct.from_soap(response)
end
# This is the main request function
# throws ApiError
# returns response object (can be TransipStruct or Array of TransipStruct)
def request(action, options = nil)
formatted_action = action.to_s.lower_camelcase
parameters = {
# for some reason, the transip server wants the body root tag to be
# the name of the action.
:message_tag => formatted_action
}
options = options.to_hash if options.is_a?(Transip::TransipStruct)
if options.is_a?(Hash)
xml_options = fix_array_definitions(options)
elsif options.nil?
xml_options = nil
else
raise "Invalid parameter format (should be nil, hash or TransipStruct)"
end
parameters[:message] = xml_options
parameters[:cookies] = cookies(action, options)
debug_log("parameters:\n#{parameters.inspect}")
response = client.call(action, parameters)
process_response(response)
rescue Savon::SOAPFault => e
raise ApiError.new(e), e.message.sub(/^\(\d+\)\s+/,'') # We raise our own error (FIXME: Correct?).
end
private
def debug_log(msg)
puts msg if @debug
end
end
# 'Aliased' by Transip::Client.
class DomainClient < Client;end
# We name it VpsClient instead of VpsService since the latter is already in use by
# the TransipStruct.
class VpsClient < Client
API_SERVICE = 'VpsService'
end
class ColocationClient < Client
API_SERVICE = 'ColocationService'
end
class WebhostingClient < Client
API_SERVICE = 'WebhostingService'
end
class ForwardClient < Client
API_SERVICE = 'ForwardService'
end
end
fix nested hash values
module Transip
class Client
API_VERSION = '5.0'
API_SERVICE = 'DomainService'
attr_accessor :username, :password, :ip, :mode, :hash
attr_reader :response
def api_version
# We use self.class:: here to not use parentclass constant.
@api_version || self.class::API_VERSION
end
def api_service
@api_service || self.class::API_SERVICE
end
def wsdl
"https://api.transip.nl/wsdl/?service=#{api_service}"
end
attr_accessor :debug
# Options:
# * username - Your login name on the TransIP website.
# * ip - needed in production
# * key / key_file - key is one of your private keys (these can be requested via your Controlpanel). key_file is path to file containing key.
# * mode - :readonly, :readwrite
#
# Example:
# transip = Transip.new(:username => 'api_username', :ip => '12.34.12.3', :key => mykey, :mode => 'readwrite') # use this in production
def initialize(options = {})
@key = options[:key] || (options[:key_file] && File.read(options[:key_file]))
@username = options[:username]
@ip = options[:ip]
@api_version = options[:api_version]
@api_service = options[:api_service]
raise ArgumentError, "The :username, :ip and :key options are required!" if @username.nil? or @key.nil?
@mode = options[:mode] || :readonly
@endpoint = options[:endpoint] || 'api.transip.nl'
if options[:password]
@password = options[:password]
end
@savon_options = {
:wsdl => wsdl
}
# By default we don't want to debug!
self.turn_off_debugging!
end
# By default we don't want to debug!
# Changing might impact other Savon usages.
def turn_off_debugging!
@savon_options[:log] = false # disable logging
@savon_options[:log_level] = :info # changing the log level
end
# Make Savon log to Rails.logger and turn_off_debugging!
def use_with_rails!
if Rails.env.production?
self.turn_off_debugging!
end
@savon_options[:logger] = Rails.logger # using the Rails logger
end
# yes, i know, it smells bad
def convert_array_to_hash(array)
result = {}
array.each_with_index do |value, index|
result[index] = value
end
result
end
def urlencode(input)
output = URI.encode_www_form_component(input)
output.gsub!('+', '%20')
output.gsub!('%7E', '~')
output
end
def serialize_parameters(parameters, key_prefix=nil)
debug_log("serialize_parameters(#{parameters.inspect}, #{key_prefix.inspect}")
parameters = parameters.to_hash.values.first if parameters.is_a? TransipStruct
parameters = convert_array_to_hash(parameters) if parameters.is_a? Array
if not parameters.is_a? Hash
return urlencode(parameters)
end
return "#{key_prefix}=" if parameters.empty?
encoded_parameters = []
parameters.each do |key, value|
next if key.to_s == '@xsi:type'
encoded_key = (key_prefix.nil?) ? urlencode(key) : "#{key_prefix}[#{urlencode(key)}]"
if value.is_a?(Hash) or value.is_a?(Array) or value.is_a?(TransipStruct)
encoded_parameters << serialize_parameters(value, encoded_key)
else
encoded_value = urlencode(value)
encoded_parameters << "#{encoded_key}=#{encoded_value}"
end
end
encoded_parameters = encoded_parameters.join("&")
debug_log("encoded_parameters:\n#{encoded_parameters.split('&').join("\n")}")
encoded_parameters
end
# does all the techy stuff to calculate transip's sick authentication scheme:
# a hash with all the request information is subsequently:
# serialized like a www form
# SHA512 digested
# asn1 header added
# private key encrypted
# Base64 encoded
# URL encoded
# I think the guys at transip were trying to use their entire crypto-toolbox!
def signature(method, parameters, time, nonce)
formatted_method = method.to_s.lower_camelcase
parameters ||= {}
input = convert_array_to_hash(parameters.values)
options = {
'__method' => formatted_method,
'__service' => api_service,
'__hostname' => @endpoint,
'__timestamp' => time,
'__nonce' => nonce
}
input.merge!(options)
raise "Invalid RSA key" unless @key =~ /-----BEGIN (RSA )?PRIVATE KEY-----(.*)-----END (RSA )?PRIVATE KEY-----/sim
serialized_input = serialize_parameters(input)
digest = Digest::SHA512.new.digest(serialized_input)
asn_header = "\x30\x51\x30\x0d\x06\x09\x60\x86\x48\x01\x65\x03\x04\x02\x03\x05\x00\x04\x40"
# convert asn_header literal to ASCII-8BIT
if RUBY_VERSION.split('.')[0] == "2"
asn = asn_header.b + digest
else
asn = asn_header + digest
end
private_key = OpenSSL::PKey::RSA.new(@key)
encrypted_asn = private_key.private_encrypt(asn)
readable_encrypted_asn = Base64.encode64(encrypted_asn)
urlencode(readable_encrypted_asn)
end
def to_cookies(content)
content.map do |item|
HTTPI::Cookie.new item
end
end
# Used for authentication
def cookies(method, parameters)
time = Time.new.to_i
#strip out the -'s because transip requires the nonce to be between 6 and 32 chars
nonce = SecureRandom.uuid.gsub("-", '')
result = to_cookies [ "login=#{self.username}",
"mode=#{self.mode}",
"timestamp=#{time}",
"nonce=#{nonce}",
"clientVersion=#{api_version}",
"signature=#{signature(method, parameters, time, nonce)}"
]
debug_log("signature:\n#{signature(method, parameters, time, nonce)}")
result
end
# Same as client method but initializes a brand new fresh client.
# You have to use this one when you want to re-set the mode (readwrite, readonly),
# or authentication details of your client.
def client!
@client = Savon::Client.new(@savon_options) do
namespaces(
"xmlns:enc" => "http://schemas.xmlsoap.org/soap/encoding/"
)
end
return @client
end
# Returns a Savon::Client object to be used in the connection.
# This object is re-used and cached as @client.
def client
@client ||= client!
end
# Returns Array with all possible SOAP WSDL actions.
def actions
client.operations
end
# This makes sure that arrays are properly encoded as soap-arrays by Gyoku
def fix_array_definitions(options)
result = {}
options.each do |key, value|
if value.is_a?(Array) and (value.size > 0)
entry_name = value.first.class.name.split(":").last
result[key] = {
'item' => {:content! => value, :'@xsi:type' => "tns:#{entry_name}"},
:'@xsi:type' => "tns:ArrayOf#{entry_name}",
:'@enc:arrayType' => "tns:#{entry_name}[#{value.size}]"
}
elsif value.is_a?(Hash)
result[key] = fix_array_definitions(value)
else
result[key] = value
end
end
result
end
# converts the savon response object to something we can return to the caller
# - A TransipStruct object
# - An array of TransipStructs
# - nil
def process_response(response)
response = response.to_hash.values.first[:return] rescue nil
TransipStruct.from_soap(response)
end
# This is the main request function
# throws ApiError
# returns response object (can be TransipStruct or Array of TransipStruct)
def request(action, options = nil)
formatted_action = action.to_s.lower_camelcase
parameters = {
# for some reason, the transip server wants the body root tag to be
# the name of the action.
:message_tag => formatted_action
}
options = options.to_hash if options.is_a?(Transip::TransipStruct)
if options.is_a?(Hash)
xml_options = fix_array_definitions(options)
elsif options.nil?
xml_options = nil
else
raise "Invalid parameter format (should be nil, hash or TransipStruct)"
end
parameters[:message] = xml_options
parameters[:cookies] = cookies(action, options)
debug_log("parameters:\n#{parameters.inspect}")
response = client.call(action, parameters)
process_response(response)
rescue Savon::SOAPFault => e
raise ApiError.new(e), e.message.sub(/^\(\d+\)\s+/,'') # We raise our own error (FIXME: Correct?).
end
private
def debug_log(msg)
puts msg if @debug
end
end
# 'Aliased' by Transip::Client.
class DomainClient < Client;end
# We name it VpsClient instead of VpsService since the latter is already in use by
# the TransipStruct.
class VpsClient < Client
API_SERVICE = 'VpsService'
end
class ColocationClient < Client
API_SERVICE = 'ColocationService'
end
class WebhostingClient < Client
API_SERVICE = 'WebhostingService'
end
class ForwardClient < Client
API_SERVICE = 'ForwardService'
end
end |
require 'travis'
require 'travis/model'
require 'travis/support/amqp'
require 'travis/states_cache'
require 'backports'
require 'rack'
require 'rack/protection'
require 'rack/contrib'
require 'dalli'
require 'memcachier'
require 'rack/cache'
require 'rack/attack'
require 'active_record'
require 'redis'
require 'gh'
require 'raven'
require 'sidekiq'
require 'metriks/reporter/logger'
require 'metriks/librato_metrics_reporter'
require 'travis/support/log_subscriber/active_record_metrics'
require 'fileutils'
require 'travis/api/v2/http'
# Rack class implementing the HTTP API.
# Instances respond to #call.
#
# run Travis::Api::App.new
#
# Requires TLS in production.
module Travis::Api
class App
autoload :AccessToken, 'travis/api/app/access_token'
autoload :Base, 'travis/api/app/base'
autoload :Endpoint, 'travis/api/app/endpoint'
autoload :Extensions, 'travis/api/app/extensions'
autoload :Helpers, 'travis/api/app/helpers'
autoload :Middleware, 'travis/api/app/middleware'
autoload :Responders, 'travis/api/app/responders'
autoload :Cors, 'travis/api/app/cors'
Rack.autoload :SSL, 'rack/ssl'
ERROR_RESPONSE = JSON.generate(error: 'Travis encountered an error, sorry :(')
# Used to track if setup already ran.
def self.setup?
@setup ||= false
end
# Loads all endpoints and middleware and hooks them up properly.
# Calls #setup on any middleware and endpoint.
#
# This method is not threadsafe, but called when loading
# the environment, so no biggy.
def self.setup(options = {})
setup! unless setup?
Endpoint.set(options) if options
FileUtils.touch('/tmp/app-initialized')
end
def self.new(options = {})
setup(options)
super()
end
def self.deploy_sha
@deploy_sha ||= File.exist?(deploy_sha_path) ? File.read(deploy_sha_path)[0..7] : 'deploy-sha'
end
def self.deploy_sha_path
File.expand_path('../../../../.deploy-sha', __FILE__)
end
attr_accessor :app
def initialize
@app = Rack::Builder.app do
use(Rack::Config) { |env| env['metriks.request.start'] ||= Time.now.utc }
Rack::Utils::HTTP_STATUS_CODES[420] = "Enhance Your Calm"
use Rack::Attack
Rack::Attack.blacklist('block client requesting ruby builds') do |req|
req.ip == "130.15.4.210"
end
Rack::Attack.blacklisted_response = lambda do |env|
[ 420, {}, ['Enhance Your Calm']]
end
use Travis::Api::App::Cors if Travis.env == 'development'
use Raven::Rack if Endpoint.production?
use Rack::Protection::PathTraversal
use Rack::SSL if Endpoint.production?
use ActiveRecord::ConnectionAdapters::ConnectionManagement
use ActiveRecord::QueryCache
memcache_servers = ENV['MEMCACHIER_SERVERS']
if Travis::Features.feature_active?(:use_rack_cache) && memcache_servers
use Rack::Cache,
verbose: true,
metastore: "memcached://#{memcache_servers}/meta-#{Travis::Api::App.deploy_sha}",
entitystore: "memcached://#{memcache_servers}/body-#{Travis::Api::App.deploy_sha}"
end
use Rack::Deflater
use Rack::PostBodyContentTypeParser
use Rack::JSONP
use Rack::Config do |env|
env['travis.global_prefix'] = env['SCRIPT_NAME']
end
use Travis::Api::App::Middleware::ScopeCheck
use Travis::Api::App::Middleware::Logging
use Travis::Api::App::Middleware::Metriks
use Travis::Api::App::Middleware::Rewrite
Endpoint.subclasses.each do |e|
next if e == SettingsEndpoint # TODO: add something like abstract? method to check if
# class should be registered
map(e.prefix) { run(e.new) }
end
end
end
# Rack protocol
def call(env)
app.call(env)
rescue
if Endpoint.production?
[500, {'Content-Type' => 'application/json'}, [ERROR_RESPONSE]]
else
raise
end
end
private
def self.console?
defined? Travis::Console
end
def self.setup!
setup_travis
load_endpoints
setup_endpoints
@setup = true
end
def self.setup_travis
Travis::Amqp.config = Travis.config.amqp
setup_database_connections
if Travis.env == 'production' || Travis.env == 'staging'
Sidekiq.configure_client do |config|
config.redis = Travis.config.redis.merge(size: 1, namespace: Travis.config.sidekiq.namespace)
end
end
if Travis.env == 'production' and not console?
setup_monitoring
end
end
def self.setup_database_connections
Travis::Database.connect
if Travis.config.logs_database
Log.establish_connection 'logs_database'
Log::Part.establish_connection 'logs_database'
end
end
def self.setup_monitoring
Raven.configure do |config|
config.dsn = Travis.config.sentry.dsn
end if Travis.config.sentry
Travis::LogSubscriber::ActiveRecordMetrics.attach
Travis::Notification.setup(instrumentation: false)
if Travis.config.librato
email, token, source = Travis.config.librato.email,
Travis.config.librato.token,
Travis.config.librato_source
on_error = proc {|ex| puts "librato error: #{ex.message} (#{ex.response})"}
Metriks::LibratoMetricsReporter.new(email, token, source: source, on_error: on_error).start
end
end
def self.load_endpoints
Backports.require_relative_dir 'app/middleware'
Backports.require_relative_dir 'app/endpoint'
end
def self.setup_endpoints
Base.subclasses.each(&:setup)
end
end
end
Output body.
require 'travis'
require 'travis/model'
require 'travis/support/amqp'
require 'travis/states_cache'
require 'backports'
require 'rack'
require 'rack/protection'
require 'rack/contrib'
require 'dalli'
require 'memcachier'
require 'rack/cache'
require 'rack/attack'
require 'active_record'
require 'redis'
require 'gh'
require 'raven'
require 'sidekiq'
require 'metriks/reporter/logger'
require 'metriks/librato_metrics_reporter'
require 'travis/support/log_subscriber/active_record_metrics'
require 'fileutils'
require 'travis/api/v2/http'
# Rack class implementing the HTTP API.
# Instances respond to #call.
#
# run Travis::Api::App.new
#
# Requires TLS in production.
module Travis::Api
class App
autoload :AccessToken, 'travis/api/app/access_token'
autoload :Base, 'travis/api/app/base'
autoload :Endpoint, 'travis/api/app/endpoint'
autoload :Extensions, 'travis/api/app/extensions'
autoload :Helpers, 'travis/api/app/helpers'
autoload :Middleware, 'travis/api/app/middleware'
autoload :Responders, 'travis/api/app/responders'
autoload :Cors, 'travis/api/app/cors'
Rack.autoload :SSL, 'rack/ssl'
ERROR_RESPONSE = JSON.generate(error: 'Travis encountered an error, sorry :(')
# Used to track if setup already ran.
def self.setup?
@setup ||= false
end
# Loads all endpoints and middleware and hooks them up properly.
# Calls #setup on any middleware and endpoint.
#
# This method is not threadsafe, but called when loading
# the environment, so no biggy.
def self.setup(options = {})
setup! unless setup?
Endpoint.set(options) if options
FileUtils.touch('/tmp/app-initialized')
end
def self.new(options = {})
setup(options)
super()
end
def self.deploy_sha
@deploy_sha ||= File.exist?(deploy_sha_path) ? File.read(deploy_sha_path)[0..7] : 'deploy-sha'
end
def self.deploy_sha_path
File.expand_path('../../../../.deploy-sha', __FILE__)
end
attr_accessor :app
def initialize
@app = Rack::Builder.app do
use(Rack::Config) { |env| env['metriks.request.start'] ||= Time.now.utc }
Rack::Utils::HTTP_STATUS_CODES[420] = "Enhance Your Calm"
use Rack::Attack
Rack::Attack.blacklist('block client requesting ruby builds') do |req|
req.ip == "130.15.4.210"
end
Rack::Attack.blacklisted_response = lambda do |env|
[ 420, {}, ['Enhance Your Calm']]
end
use Travis::Api::App::Cors if Travis.env == 'development'
use Raven::Rack if Endpoint.production?
use Rack::Protection::PathTraversal
use Rack::SSL if Endpoint.production?
use ActiveRecord::ConnectionAdapters::ConnectionManagement
use ActiveRecord::QueryCache
memcache_servers = ENV['MEMCACHIER_SERVERS']
if Travis::Features.feature_active?(:use_rack_cache) && memcache_servers
use Rack::Cache,
verbose: true,
metastore: "memcached://#{memcache_servers}/meta-#{Travis::Api::App.deploy_sha}",
entitystore: "memcached://#{memcache_servers}/body-#{Travis::Api::App.deploy_sha}"
end
use Rack::Deflater
use Rack::PostBodyContentTypeParser
use Rack::JSONP
use Rack::Config do |env|
env['travis.global_prefix'] = env['SCRIPT_NAME']
end
use Travis::Api::App::Middleware::ScopeCheck
use Travis::Api::App::Middleware::Logging
use Travis::Api::App::Middleware::Metriks
use Travis::Api::App::Middleware::Rewrite
Endpoint.subclasses.each do |e|
next if e == SettingsEndpoint # TODO: add something like abstract? method to check if
# class should be registered
map(e.prefix) { run(e.new) }
end
end
end
# Rack protocol
def call(env)
app.call(env)
rescue
if Endpoint.production?
[500, {'Content-Type' => 'application/json'}, [ERROR_RESPONSE]]
else
raise
end
end
private
def self.console?
defined? Travis::Console
end
def self.setup!
setup_travis
load_endpoints
setup_endpoints
@setup = true
end
def self.setup_travis
Travis::Amqp.config = Travis.config.amqp
setup_database_connections
if Travis.env == 'production' || Travis.env == 'staging'
Sidekiq.configure_client do |config|
config.redis = Travis.config.redis.merge(size: 1, namespace: Travis.config.sidekiq.namespace)
end
end
if Travis.env == 'production' and not console?
setup_monitoring
end
end
def self.setup_database_connections
Travis::Database.connect
if Travis.config.logs_database
Log.establish_connection 'logs_database'
Log::Part.establish_connection 'logs_database'
end
end
def self.setup_monitoring
Raven.configure do |config|
config.dsn = Travis.config.sentry.dsn
end if Travis.config.sentry
Travis::LogSubscriber::ActiveRecordMetrics.attach
Travis::Notification.setup(instrumentation: false)
if Travis.config.librato
email, token, source = Travis.config.librato.email,
Travis.config.librato.token,
Travis.config.librato_source
on_error = proc {|ex| puts "librato error: #{ex.message} (#{ex.response.body})"}
Metriks::LibratoMetricsReporter.new(email, token, source: source, on_error: on_error).start
end
end
def self.load_endpoints
Backports.require_relative_dir 'app/middleware'
Backports.require_relative_dir 'app/endpoint'
end
def self.setup_endpoints
Base.subclasses.each(&:setup)
end
end
end
|
module Tripod
VERSION = "0.7.1"
end
bump ver
module Tripod
VERSION = "0.7.2"
end
|
# frozen-string-literal: true
begin
require "aws-sdk-s3"
if Gem::Version.new(Aws::S3::GEM_VERSION) < Gem::Version.new("1.2.0")
raise "Tus::Storage::S3 requires aws-sdk-s3 version 1.2.0 or above"
end
rescue LoadError
warn "Using aws-sdk 2.x is deprecated and support for it will be removed in tus-server 2.0, use the new aws-sdk-s3 gem instead."
require "aws-sdk"
Aws.eager_autoload!(services: ["S3"])
end
require "tus/info"
require "tus/errors"
require "json"
require "cgi"
require "fiber"
require "stringio"
module Tus
module Storage
class S3
MIN_PART_SIZE = 5 * 1024 * 1024 # 5MB is the minimum part size for S3 multipart uploads
attr_reader :client, :bucket, :prefix, :upload_options
def initialize(bucket:, prefix: nil, upload_options: {}, thread_count: 10, **client_options)
resource = Aws::S3::Resource.new(**client_options)
@client = resource.client
@bucket = resource.bucket(bucket) or fail(ArgumentError, "the :bucket option was nil")
@prefix = prefix
@upload_options = upload_options
@thread_count = thread_count
end
def create_file(uid, info = {})
tus_info = Tus::Info.new(info)
options = upload_options.dup
options[:content_type] = tus_info.metadata["content_type"]
if filename = tus_info.metadata["filename"]
# Aws-sdk-s3 doesn't sign non-ASCII characters correctly, and browsers
# will automatically URI-decode filenames.
filename = CGI.escape(filename).gsub("+", " ")
options[:content_disposition] ||= "inline"
options[:content_disposition] += "; filename=\"#{filename}\""
end
multipart_upload = object(uid).initiate_multipart_upload(options)
info["multipart_id"] = multipart_upload.id
info["multipart_parts"] = []
multipart_upload
end
def concatenate(uid, part_uids, info = {})
multipart_upload = create_file(uid, info)
objects = part_uids.map { |part_uid| object(part_uid) }
parts = copy_parts(objects, multipart_upload)
info["multipart_parts"].concat parts
finalize_file(uid, info)
delete(part_uids.flat_map { |part_uid| [object(part_uid), object("#{part_uid}.info")] })
# Tus server requires us to return the size of the concatenated file.
object = client.head_object(bucket: bucket.name, key: object(uid).key)
object.content_length
rescue => error
abort_multipart_upload(multipart_upload) if multipart_upload
raise error
end
def patch_file(uid, input, info = {})
tus_info = Tus::Info.new(info)
upload_id = info["multipart_id"]
part_offset = info["multipart_parts"].count
bytes_uploaded = 0
jobs = []
chunk = StringIO.new(input.read(MIN_PART_SIZE).to_s)
loop do
next_chunk = StringIO.new(input.read(MIN_PART_SIZE).to_s)
# merge next chunk into previous if it's smaller than minimum chunk size
if next_chunk.size < MIN_PART_SIZE
chunk = StringIO.new(chunk.string + next_chunk.string)
next_chunk.close
next_chunk = nil
end
# abort if chunk is smaller than 5MB and is not the last chunk
if chunk.size < MIN_PART_SIZE
break if (tus_info.length && tus_info.offset) &&
chunk.size + tus_info.offset < tus_info.length
end
thread = upload_part_thread(chunk, uid, upload_id, part_offset += 1)
jobs << [thread, chunk]
chunk = next_chunk or break
end
begin
jobs.each do |thread, body|
info["multipart_parts"] << thread.value
bytes_uploaded += body.size
body.close
end
rescue Seahorse::Client::NetworkingError => exception
warn "ERROR: #{exception.inspect} occurred during upload"
# ignore networking errors and return what client has uploaded so far
end
bytes_uploaded
end
def finalize_file(uid, info = {})
upload_id = info["multipart_id"]
parts = info["multipart_parts"].map do |part|
{ part_number: part["part_number"], etag: part["etag"] }
end
multipart_upload = object(uid).multipart_upload(upload_id)
multipart_upload.complete(multipart_upload: { parts: parts })
info.delete("multipart_id")
info.delete("multipart_parts")
end
def read_info(uid)
response = object("#{uid}.info").get
JSON.parse(response.body.string)
rescue Aws::S3::Errors::NoSuchKey
raise Tus::NotFound
end
def update_info(uid, info)
object("#{uid}.info").put(body: info.to_json)
end
def get_file(uid, info = {}, range: nil)
tus_info = Tus::Info.new(info)
length = range ? range.size : tus_info.length
range = "bytes=#{range.begin}-#{range.end}" if range
chunks = object(uid).enum_for(:get, range: range)
# We return a response object that responds to #each, #length and #close,
# which the tus server can return directly as the Rack response.
Response.new(chunks: chunks, length: length)
end
def delete_file(uid, info = {})
if info["multipart_id"]
multipart_upload = object(uid).multipart_upload(info["multipart_id"])
abort_multipart_upload(multipart_upload)
delete [object("#{uid}.info")]
else
delete [object(uid), object("#{uid}.info")]
end
end
def expire_files(expiration_date)
old_objects = bucket.objects.select do |object|
object.last_modified <= expiration_date
end
delete(old_objects)
bucket.multipart_uploads.each do |multipart_upload|
# no need to check multipart uploads initiated before expiration date
next if multipart_upload.initiated > expiration_date
most_recent_part = multipart_upload.parts.sort_by(&:last_modified).last
if most_recent_part.nil? || most_recent_part.last_modified <= expiration_date
abort_multipart_upload(multipart_upload)
end
end
end
private
def upload_part_thread(body, key, upload_id, part_number)
Thread.new { upload_part(body, key, upload_id, part_number) }
end
def upload_part(body, key, upload_id, part_number)
multipart_upload = object(key).multipart_upload(upload_id)
multipart_part = multipart_upload.part(part_number)
response = multipart_part.upload(body: body)
{ "part_number" => part_number, "etag" => response.etag }
end
def delete(objects)
# S3 can delete maximum of 1000 objects in a single request
objects.each_slice(1000) do |objects_batch|
delete_params = { objects: objects_batch.map { |object| { key: object.key } } }
bucket.delete_objects(delete: delete_params)
end
end
# In order to ensure the multipart upload was successfully aborted,
# we need to check whether all parts have been deleted, and retry
# the abort if the list is nonempty.
def abort_multipart_upload(multipart_upload)
loop do
multipart_upload.abort
break unless multipart_upload.parts.any?
end
rescue Aws::S3::Errors::NoSuchUpload
# multipart upload was successfully aborted or doesn't exist
end
def copy_parts(objects, multipart_upload)
parts = compute_parts(objects, multipart_upload)
queue = parts.inject(Queue.new) { |queue, part| queue << part }
threads = @thread_count.times.map { copy_part_thread(queue) }
threads.flat_map(&:value).sort_by { |part| part["part_number"] }
end
def compute_parts(objects, multipart_upload)
objects.map.with_index do |object, idx|
{
bucket: multipart_upload.bucket_name,
key: multipart_upload.object_key,
upload_id: multipart_upload.id,
copy_source: [object.bucket_name, object.key].join("/"),
part_number: idx + 1,
}
end
end
def copy_part_thread(queue)
Thread.new do
begin
results = []
loop do
part = queue.deq(true) rescue break
results << copy_part(part)
end
results
rescue
queue.clear
raise
end
end
end
def copy_part(part)
response = client.upload_part_copy(part)
{ "part_number" => part[:part_number], "etag" => response.copy_part_result.etag }
end
def object(key)
bucket.object([*prefix, key].join("/"))
end
class Response
def initialize(chunks:, length:)
@chunks = chunks
@length = length
end
def length
@length
end
def each
return enum_for(__method__) unless block_given?
while (chunk = chunks_fiber.resume)
yield chunk
end
end
def close
chunks_fiber.resume(:close) if chunks_fiber.alive?
end
private
def chunks_fiber
@chunks_fiber ||= Fiber.new do
@chunks.each do |chunk|
action = Fiber.yield chunk
break if action == :close
end
nil
end
end
end
end
end
end
Improve handling aws-sdk fallback in S3 storage
Previously if neither aws-sdk-s3 nor aws-sdk were present, a deprecation
warning for "aws-sdk 2.x" would get printed. We change this behaviour to
first attempt to require aws-sdk after not succeeding with aws-sdk-s3,
and only then print a deprecation warning.
Another undesired behaviour was that if neither aws-sdk-s3 nor aws-sdk
were present, user would get a LoadError saying that aws-sdk was failed
to get required. This isn't what we want, because the user should
actually add aws-sdk-s3, not aws-sdk. We fix that by re-raising the
original aws-sdk-s3 LoadError if aws-sdk failed to get required.
# frozen-string-literal: true
begin
require "aws-sdk-s3"
if Gem::Version.new(Aws::S3::GEM_VERSION) < Gem::Version.new("1.2.0")
raise "Tus::Storage::S3 requires aws-sdk-s3 version 1.2.0 or above"
end
rescue LoadError => exception
begin
require "aws-sdk"
warn "Using aws-sdk 2.x is deprecated and support for it will be removed in tus-server 2.0, use the new aws-sdk-s3 gem instead."
Aws.eager_autoload!(services: ["S3"])
rescue LoadError
raise exception
end
end
require "tus/info"
require "tus/errors"
require "json"
require "cgi"
require "fiber"
require "stringio"
module Tus
module Storage
class S3
MIN_PART_SIZE = 5 * 1024 * 1024 # 5MB is the minimum part size for S3 multipart uploads
attr_reader :client, :bucket, :prefix, :upload_options
def initialize(bucket:, prefix: nil, upload_options: {}, thread_count: 10, **client_options)
resource = Aws::S3::Resource.new(**client_options)
@client = resource.client
@bucket = resource.bucket(bucket) or fail(ArgumentError, "the :bucket option was nil")
@prefix = prefix
@upload_options = upload_options
@thread_count = thread_count
end
def create_file(uid, info = {})
tus_info = Tus::Info.new(info)
options = upload_options.dup
options[:content_type] = tus_info.metadata["content_type"]
if filename = tus_info.metadata["filename"]
# Aws-sdk-s3 doesn't sign non-ASCII characters correctly, and browsers
# will automatically URI-decode filenames.
filename = CGI.escape(filename).gsub("+", " ")
options[:content_disposition] ||= "inline"
options[:content_disposition] += "; filename=\"#{filename}\""
end
multipart_upload = object(uid).initiate_multipart_upload(options)
info["multipart_id"] = multipart_upload.id
info["multipart_parts"] = []
multipart_upload
end
def concatenate(uid, part_uids, info = {})
multipart_upload = create_file(uid, info)
objects = part_uids.map { |part_uid| object(part_uid) }
parts = copy_parts(objects, multipart_upload)
info["multipart_parts"].concat parts
finalize_file(uid, info)
delete(part_uids.flat_map { |part_uid| [object(part_uid), object("#{part_uid}.info")] })
# Tus server requires us to return the size of the concatenated file.
object = client.head_object(bucket: bucket.name, key: object(uid).key)
object.content_length
rescue => error
abort_multipart_upload(multipart_upload) if multipart_upload
raise error
end
def patch_file(uid, input, info = {})
tus_info = Tus::Info.new(info)
upload_id = info["multipart_id"]
part_offset = info["multipart_parts"].count
bytes_uploaded = 0
jobs = []
chunk = StringIO.new(input.read(MIN_PART_SIZE).to_s)
loop do
next_chunk = StringIO.new(input.read(MIN_PART_SIZE).to_s)
# merge next chunk into previous if it's smaller than minimum chunk size
if next_chunk.size < MIN_PART_SIZE
chunk = StringIO.new(chunk.string + next_chunk.string)
next_chunk.close
next_chunk = nil
end
# abort if chunk is smaller than 5MB and is not the last chunk
if chunk.size < MIN_PART_SIZE
break if (tus_info.length && tus_info.offset) &&
chunk.size + tus_info.offset < tus_info.length
end
thread = upload_part_thread(chunk, uid, upload_id, part_offset += 1)
jobs << [thread, chunk]
chunk = next_chunk or break
end
begin
jobs.each do |thread, body|
info["multipart_parts"] << thread.value
bytes_uploaded += body.size
body.close
end
rescue Seahorse::Client::NetworkingError => exception
warn "ERROR: #{exception.inspect} occurred during upload"
# ignore networking errors and return what client has uploaded so far
end
bytes_uploaded
end
def finalize_file(uid, info = {})
upload_id = info["multipart_id"]
parts = info["multipart_parts"].map do |part|
{ part_number: part["part_number"], etag: part["etag"] }
end
multipart_upload = object(uid).multipart_upload(upload_id)
multipart_upload.complete(multipart_upload: { parts: parts })
info.delete("multipart_id")
info.delete("multipart_parts")
end
def read_info(uid)
response = object("#{uid}.info").get
JSON.parse(response.body.string)
rescue Aws::S3::Errors::NoSuchKey
raise Tus::NotFound
end
def update_info(uid, info)
object("#{uid}.info").put(body: info.to_json)
end
def get_file(uid, info = {}, range: nil)
tus_info = Tus::Info.new(info)
length = range ? range.size : tus_info.length
range = "bytes=#{range.begin}-#{range.end}" if range
chunks = object(uid).enum_for(:get, range: range)
# We return a response object that responds to #each, #length and #close,
# which the tus server can return directly as the Rack response.
Response.new(chunks: chunks, length: length)
end
def delete_file(uid, info = {})
if info["multipart_id"]
multipart_upload = object(uid).multipart_upload(info["multipart_id"])
abort_multipart_upload(multipart_upload)
delete [object("#{uid}.info")]
else
delete [object(uid), object("#{uid}.info")]
end
end
def expire_files(expiration_date)
old_objects = bucket.objects.select do |object|
object.last_modified <= expiration_date
end
delete(old_objects)
bucket.multipart_uploads.each do |multipart_upload|
# no need to check multipart uploads initiated before expiration date
next if multipart_upload.initiated > expiration_date
most_recent_part = multipart_upload.parts.sort_by(&:last_modified).last
if most_recent_part.nil? || most_recent_part.last_modified <= expiration_date
abort_multipart_upload(multipart_upload)
end
end
end
private
def upload_part_thread(body, key, upload_id, part_number)
Thread.new { upload_part(body, key, upload_id, part_number) }
end
def upload_part(body, key, upload_id, part_number)
multipart_upload = object(key).multipart_upload(upload_id)
multipart_part = multipart_upload.part(part_number)
response = multipart_part.upload(body: body)
{ "part_number" => part_number, "etag" => response.etag }
end
def delete(objects)
# S3 can delete maximum of 1000 objects in a single request
objects.each_slice(1000) do |objects_batch|
delete_params = { objects: objects_batch.map { |object| { key: object.key } } }
bucket.delete_objects(delete: delete_params)
end
end
# In order to ensure the multipart upload was successfully aborted,
# we need to check whether all parts have been deleted, and retry
# the abort if the list is nonempty.
def abort_multipart_upload(multipart_upload)
loop do
multipart_upload.abort
break unless multipart_upload.parts.any?
end
rescue Aws::S3::Errors::NoSuchUpload
# multipart upload was successfully aborted or doesn't exist
end
def copy_parts(objects, multipart_upload)
parts = compute_parts(objects, multipart_upload)
queue = parts.inject(Queue.new) { |queue, part| queue << part }
threads = @thread_count.times.map { copy_part_thread(queue) }
threads.flat_map(&:value).sort_by { |part| part["part_number"] }
end
def compute_parts(objects, multipart_upload)
objects.map.with_index do |object, idx|
{
bucket: multipart_upload.bucket_name,
key: multipart_upload.object_key,
upload_id: multipart_upload.id,
copy_source: [object.bucket_name, object.key].join("/"),
part_number: idx + 1,
}
end
end
def copy_part_thread(queue)
Thread.new do
begin
results = []
loop do
part = queue.deq(true) rescue break
results << copy_part(part)
end
results
rescue
queue.clear
raise
end
end
end
def copy_part(part)
response = client.upload_part_copy(part)
{ "part_number" => part[:part_number], "etag" => response.copy_part_result.etag }
end
def object(key)
bucket.object([*prefix, key].join("/"))
end
class Response
def initialize(chunks:, length:)
@chunks = chunks
@length = length
end
def length
@length
end
def each
return enum_for(__method__) unless block_given?
while (chunk = chunks_fiber.resume)
yield chunk
end
end
def close
chunks_fiber.resume(:close) if chunks_fiber.alive?
end
private
def chunks_fiber
@chunks_fiber ||= Fiber.new do
@chunks.each do |chunk|
action = Fiber.yield chunk
break if action == :close
end
nil
end
end
end
end
end
end
|
require 'hashie' unless defined?(Hashie)
module Tvdbr
class DataSet < Hashie::Mash
attr_reader :parent
## INSTANCE METHODS ##
# Tvdb::DataSet.new(self, { :foo => "bar" })
def initialize(parent, source_hash = nil, default = nil, &block)
@parent = parent
source_hash = normalize_keys(source_hash) if source_hash.is_a?(Hash)
super(source_hash, default, &block)
end
# Outputs: <#Tvdb::Series actors="..." added=nil added_by=nil>
def inspect
ret = "<##{self.class.to_s}"
self.keys.sort.each do |key|
ret << " #{key}=#{self[key].inspect}"
end
ret << ">"
ret
end
## CLASS METHODS ##
# Aliases the original propery to the new method name
# alias_property :old, :new
def self.alias_property(original, name)
define_method(name) { self.send(original) }
end
# Turns a property "a | b | c" => ['a', 'b', 'c']
# listify :lista, :listb
def self.listify(*attrs)
attrs.each do |a|
define_method(a) { self[a] ? self[a][1..-1].split("|").map(&:strip) : [] }
end
end
# Turns a property into a date object
# dateify :release_date
def self.dateify(*attrs)
attrs.each do |a|
define_method(a) { Time.parse(self[a]) rescue nil if self[a] }
end
end
# Turns a relative image link to a full tvdb link url
# absolutize :file_name
def self.absolutize(*attrs)
attrs.each do |a|
define_method(a) { File.join("http://www.thetvdb.com/banners/", self[a]) if self[a] }
end
end
private
# Translates all keys to lowercase and to a symbol
# => [:foo => "bar", ...]
def normalize_keys(hash)
hash.inject({}) do |options, (key, value)|
options[(underscore(key) rescue key) || key] = normalize_value(value)
options
end
end
# Normalizes a value for the formatted hash
# Sometimes TVDB returns a hash with a "__content__" key which needs to be removed
def normalize_value(val)
val.respond_to?(:has_key?) && val.has_key?("__content__") ? val["__content__"] : val
end
def underscore(camel_cased_word)
word = camel_cased_word.to_s.dup
word.gsub!(/::/, '/')
word.gsub!(/([A-Z]+)([A-Z][a-z])/,'\1_\2')
word.gsub!(/([a-z\d])([A-Z])/,'\1_\2')
word.tr!("-", "_")
word.downcase!
word
end
end
end
[data_set] Be more generic about extracting content from nested hashes
require 'hashie' unless defined?(Hashie)
module Tvdbr
class DataSet < Hashie::Mash
class InvalidFormat < StandardError; end
attr_reader :parent
## INSTANCE METHODS ##
# Tvdb::DataSet.new(self, { :foo => "bar" })
def initialize(parent, source_hash = nil, default = nil, &block)
@parent = parent
source_hash = normalize_keys(source_hash) if source_hash.is_a?(Hash)
super(source_hash, default, &block)
rescue ArgumentError => e # #<ArgumentError: wrong number of arguments (0 for 1)>
raise InvalidFormat, "#{self.class.name} parse failed with source #{source_hash.inspect}"
end
# Outputs: <#Tvdb::Series actors="..." added=nil added_by=nil>
def inspect
ret = "<##{self.class.to_s}"
self.keys.sort.each do |key|
ret << " #{key}=#{self[key].inspect}"
end
ret << ">"
ret
end
## CLASS METHODS ##
# Aliases the original propery to the new method name
# alias_property :old, :new
def self.alias_property(original, name)
define_method(name) { self.send(original) }
end
# Turns a property "a | b | c" => ['a', 'b', 'c']
# listify :lista, :listb
def self.listify(*attrs)
attrs.each do |a|
define_method(a) { self[a] ? self[a][1..-1].split("|").map(&:strip) : [] }
end
end
# Turns a property into a date object
# dateify :release_date
def self.dateify(*attrs)
attrs.each do |a|
define_method(a) { Time.parse(self[a]) rescue nil if self[a] }
end
end
# Turns a relative image link to a full tvdb link url
# absolutize :file_name
def self.absolutize(*attrs)
attrs.each do |a|
define_method(a) { File.join("http://www.thetvdb.com/banners/", self[a]) if self[a] }
end
end
private
# Translates all keys to lowercase and to a symbol
# => [:foo => "bar", ...]
def normalize_keys(hash)
hash.inject({}) do |options, (key, value)|
options[(underscore(key) rescue key) || key] = normalize_value(value)
options
end
end
# Normalizes a value for the formatted hash values
# TVDB hashes should not contain more hashes
# Sometimes TVDB returns a hash with content inside which needs to be extracted
def normalize_value(val)
if val.is_a?(Hash)
val = val["__content__"] if val.has_key?("__content__")
val.to_s
else # any other value
val
end
end
def underscore(camel_cased_word)
word = camel_cased_word.to_s.dup
word.gsub!(/::/, '/')
word.gsub!(/([A-Z]+)([A-Z][a-z])/,'\1_\2')
word.gsub!(/([a-z\d])([A-Z])/,'\1_\2')
word.tr!("-", "_")
word.downcase!
word
end
end
end
|
#--
###############################################################################
# #
# twitter2jabber - Twitter-to-Jabber gateway. #
# #
# Copyright (C) 2009 Jens Wille #
# #
# Authors: #
# Jens Wille <ww@blackwinter.de> #
# #
# twitter2jabber is free software; you can redistribute it and/or modify it #
# under the terms of the GNU General Public License as published by the Free #
# Software Foundation; either version 3 of the License, or (at your option) #
# any later version. #
# #
# twitter2jabber is distributed in the hope that it will be useful, but #
# WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY #
# or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for #
# more details. #
# #
# You should have received a copy of the GNU General Public License along #
# with twitter2jabber. If not, see <http://www.gnu.org/licenses/>. #
# #
###############################################################################
#++
require 'time'
require 'erb'
require 'rubygems'
require 'twitter'
require 'xmpp4r-simple'
require 'shorturl'
require 'twitter2jabber/version'
class Twitter2Jabber
MAX_LENGTH = 140
DEFAULT_PAUSE = 60
DEFAULT_FORMATS = %w[txt]
DEFAULT_TEMPLATES = File.expand_path(File.join(File.dirname(__FILE__), %w[.. sample templates]))
JABBER_NS = 'http://jabber.org/protocol/xhtml-im'
XHTML_NS = 'http://www.w3.org/1999/xhtml'
def self.loop(options, recipients = [], pause = nil, &block)
new(options).loop(recipients, pause, &block)
end
def self.run(options, recipients = [], &block)
new(options).run(recipients, &block)
end
attr_reader :id, :verbose, :debug, :twitter, :jabber, :filter, :formats, :templates, :_erb
def initialize(options, &block)
[:twitter, :jabber].each { |client|
raise ArgumentError, "#{client} config missing" unless options[client].is_a?(Hash)
}
@id = "#{options[:twitter][:user]} -> #{options[:jabber][:user]}"
@verbose = options[:verbose]
@debug = options[:debug]
@twitter = twitter_connect(options[:twitter])
@jabber = jabber_connect(options[:jabber])
@filter = options[:filter] || block
@formats = options[:formats] || DEFAULT_FORMATS
@templates = Dir[
File.join(options[:template_dir] || DEFAULT_TEMPLATES, 'tweet.*')
].inject({}) { |hash, template|
hash.update(File.extname(template).sub(/\A\./, '') => File.read(template))
}
@_erb = Hash.new { |hash, format|
template = templates[format]
hash[format] = template && ERB.new(template)
}
end
def run(recipients = [], seen = {}, flag = true, &block)
deliver_tweets(recipients, seen, &block) if flag
post_messages(recipients)
end
def loop(recipients = [], pause = nil, &block)
pause ||= DEFAULT_PAUSE
# jabber/twitter ratio
ratio = 10
pause /= ratio
# sleep at least one second
pause = 1 if pause < 1
i, seen = 1, Hash.new { |h, k| h[k] = true; false }
trap(:INT) { i = -1 }
while i > 0
run(recipients, seen, i % ratio == 1, &block)
sleep pause
i += 1
end
end
def deliver_tweets(recipients, seen = {}, &block)
get_tweets.each { |tweet|
next if seen[tweet.id]
logt tweet.id
# apply filters
next if filter && !filter[tweet]
next if block && !block[tweet]
msg = format_tweet(tweet)
recipients.each { |recipient|
deliver(recipient, msg)
}
sleep 1
}
end
def post_messages(recipients = [])
allowed = %r{\A(?:#{recipients.map { |r| Regexp.escape(r) }.join('|')})\z}i
jabber.received_messages { |msg|
next unless msg.type == :chat
next unless msg.from.bare.to_s =~ allowed
logj msg.id
handle_command(msg.body, msg.from)
}
end
private
def twitter_connect(options)
auth = Twitter::HTTPAuth.new(options[:user], options[:pass])
client = Twitter::Base.new(auth)
# verify credentials
client.verify_credentials
logt "connected #{Time.now}"
client
rescue Twitter::TwitterError => err
raise "Can't connect to Twitter with ID '#{options[:user]}': #{err}"
end
def jabber_connect(options)
client = Jabber::Simple.new(options[:user], options[:pass])
logj "connected #{Time.now}"
client
rescue Jabber::JabberError => err
raise "Can't connect to Jabber with JID '#{options[:user]}': #{err}"
end
def get_tweets
twitter.friends_timeline.sort_by { |tweet|
tweet.created_at = Time.parse(tweet.created_at)
}
rescue Twitter::TwitterError, Timeout::Error
sleep DEFAULT_PAUSE
retry
rescue StandardError => err
warn "#{err} (#{err.class})"
sleep DEFAULT_PAUSE
retry
end
def format_tweet(tweet)
user = tweet.user
msg = Jabber::Message.new.set_type(:chat)
formats.each { |format|
if erb = _erb[format]
msg.add_element(format_element(format, erb.result(binding)))
end
}
msg
end
# cf. <http://devblog.famundo.com/articles/2006/10/18/ruby-and-xmpp-jabber-part-3-adding-html-to-the-messages>
def format_element(format, text)
body = REXML::Element.new('body')
case format
when 'html'
REXML::Text.new(process_html(text), false, body, true, nil, /.^/)
html = REXML::Element.new('html').add_namespace(JABBER_NS)
html.add(body.add_namespace(XHTML_NS))
html
else
REXML::Text.new(text, true, body, true, nil, /.^/)
body
end
end
def process_html(text)
text.gsub(/((?:\A|\s)@)(\w+)/, '\1<a href="http://twitter.com/\2">\2</a>')
end
def handle_command(body, from, execute = true)
case body
when /\Ahe?(?:lp)?\z/i
deliver(from, <<-HELP) if execute
h[e[lp]] -- Print this help
de[bug] -- Print debug mode
de[bug] on|off -- Turn debug mode on/off
bl[ock] #ID -- Block ID
fa[v[orite]] #ID -- Create favorite #ID
re[ply] #ID[:] [!] STATUS -- Reply to ID (Force if too long)
le[n[gth]] STATUS -- Determine length
[!] STATUS -- Update status (Force if too long)
(Note: Message body must be shorter than #{MAX_LENGTH} characters)
HELP
when /\Ade(?:bug)?(?:\s+(on|off))?\z/i
if execute
flag = $1.downcase if $1
case flag
when 'on'
@debug = true
when 'off'
@debug = false
end
deliver(from, "DEBUG = #{debug ? 'on' : 'off'}")
end
when /\Abl(?:ock)?\s+#?(\d+)\z/i
twitter.block($1) if execute && !debug
when /\Afav?(?:orite)?\s+#?(\d+)\z/i
twitter.favorite_create($1) if execute && !debug
else
options = {}
if execute && body.sub!(/\Alen?(?:gth)?\s+/i, '')
if body = handle_command(body, from, false)
length = body.length
hint = length <= MAX_LENGTH ? 'OK' : 'TOO LONG'
deliver(from, "#{length} [#{hint}]: #{body}")
end
return
end
if body.sub!(/\Are(?:ply)?\s+#?(\d+):?\s+/i, '')
options[:in_reply_to_status_id] = $1
end
if body.sub!(/\A!\s+/, '')
force = true
end
body.gsub!(/https?:\/\/\S+/) { |match|
match.length < 30 ? match : ShortURL.shorten(match)
}
return body unless execute
if force || body.length <= MAX_LENGTH
update(body, options)
else
deliver(from, "MSG TOO LONG (> #{MAX_LENGTH}): #{body}")
end
end
end
def deliver(recipient, msg)
if debug
logj "#{recipient}: #{msg}", true
return
end
jabber.deliver(recipient, msg)
end
def update(msg, options = {})
if debug
logt "#{msg} (#{options.inspect})", true
return
end
twitter.update(msg, options)
end
def log(msg, verbose = verbose)
warn "[#{id}] #{msg}" if verbose
end
def logt(msg, verbose = verbose)
log("TWITTER #{msg}", verbose)
end
def logj(msg, verbose = verbose)
log("JABBER #{msg}", verbose)
end
end
Log shutdown.
#--
###############################################################################
# #
# twitter2jabber - Twitter-to-Jabber gateway. #
# #
# Copyright (C) 2009 Jens Wille #
# #
# Authors: #
# Jens Wille <ww@blackwinter.de> #
# #
# twitter2jabber is free software; you can redistribute it and/or modify it #
# under the terms of the GNU General Public License as published by the Free #
# Software Foundation; either version 3 of the License, or (at your option) #
# any later version. #
# #
# twitter2jabber is distributed in the hope that it will be useful, but #
# WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY #
# or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for #
# more details. #
# #
# You should have received a copy of the GNU General Public License along #
# with twitter2jabber. If not, see <http://www.gnu.org/licenses/>. #
# #
###############################################################################
#++
require 'time'
require 'erb'
require 'rubygems'
require 'twitter'
require 'xmpp4r-simple'
require 'shorturl'
require 'twitter2jabber/version'
class Twitter2Jabber
MAX_LENGTH = 140
DEFAULT_PAUSE = 60
DEFAULT_FORMATS = %w[txt]
DEFAULT_TEMPLATES = File.expand_path(File.join(File.dirname(__FILE__), %w[.. sample templates]))
JABBER_NS = 'http://jabber.org/protocol/xhtml-im'
XHTML_NS = 'http://www.w3.org/1999/xhtml'
def self.loop(options, recipients = [], pause = nil, &block)
new(options).loop(recipients, pause, &block)
end
def self.run(options, recipients = [], &block)
new(options).run(recipients, &block)
end
attr_reader :id, :verbose, :debug, :twitter, :jabber, :filter, :formats, :templates, :_erb
def initialize(options, &block)
[:twitter, :jabber].each { |client|
raise ArgumentError, "#{client} config missing" unless options[client].is_a?(Hash)
}
@id = "#{options[:twitter][:user]} -> #{options[:jabber][:user]}"
@verbose = options[:verbose]
@debug = options[:debug]
@twitter = twitter_connect(options[:twitter])
@jabber = jabber_connect(options[:jabber])
@filter = options[:filter] || block
@formats = options[:formats] || DEFAULT_FORMATS
@templates = Dir[
File.join(options[:template_dir] || DEFAULT_TEMPLATES, 'tweet.*')
].inject({}) { |hash, template|
hash.update(File.extname(template).sub(/\A\./, '') => File.read(template))
}
@_erb = Hash.new { |hash, format|
template = templates[format]
hash[format] = template && ERB.new(template)
}
end
def run(recipients = [], seen = {}, flag = true, &block)
deliver_tweets(recipients, seen, &block) if flag
post_messages(recipients)
end
def loop(recipients = [], pause = nil, &block)
pause ||= DEFAULT_PAUSE
# jabber/twitter ratio
ratio = 10
pause /= ratio
# sleep at least one second
pause = 1 if pause < 1
i, seen = 1, Hash.new { |h, k| h[k] = true; false }
trap(:INT) {
log "SIGINT received #{Time.now}, shutting down..."
i = -1
}
while i > 0
run(recipients, seen, i % ratio == 1, &block)
sleep pause
i += 1
end
log "KTHXBYE (#{Time.now})"
end
def deliver_tweets(recipients, seen = {}, &block)
get_tweets.each { |tweet|
next if seen[tweet.id]
logt tweet.id
# apply filters
next if filter && !filter[tweet]
next if block && !block[tweet]
msg = format_tweet(tweet)
recipients.each { |recipient|
deliver(recipient, msg)
}
sleep 1
}
end
def post_messages(recipients = [])
allowed = %r{\A(?:#{recipients.map { |r| Regexp.escape(r) }.join('|')})\z}i
jabber.received_messages { |msg|
next unless msg.type == :chat
next unless msg.from.bare.to_s =~ allowed
logj msg.id
handle_command(msg.body, msg.from)
}
end
private
def twitter_connect(options)
auth = Twitter::HTTPAuth.new(options[:user], options[:pass])
client = Twitter::Base.new(auth)
# verify credentials
client.verify_credentials
logt "connected #{Time.now}"
client
rescue Twitter::TwitterError => err
raise "Can't connect to Twitter with ID '#{options[:user]}': #{err}"
end
def jabber_connect(options)
client = Jabber::Simple.new(options[:user], options[:pass])
logj "connected #{Time.now}"
client
rescue Jabber::JabberError => err
raise "Can't connect to Jabber with JID '#{options[:user]}': #{err}"
end
def get_tweets
twitter.friends_timeline.sort_by { |tweet|
tweet.created_at = Time.parse(tweet.created_at)
}
rescue Twitter::TwitterError, Timeout::Error
sleep DEFAULT_PAUSE
retry
rescue StandardError => err
warn "#{err} (#{err.class})"
sleep DEFAULT_PAUSE
retry
end
def format_tweet(tweet)
user = tweet.user
msg = Jabber::Message.new.set_type(:chat)
formats.each { |format|
if erb = _erb[format]
msg.add_element(format_element(format, erb.result(binding)))
end
}
msg
end
# cf. <http://devblog.famundo.com/articles/2006/10/18/ruby-and-xmpp-jabber-part-3-adding-html-to-the-messages>
def format_element(format, text)
body = REXML::Element.new('body')
case format
when 'html'
REXML::Text.new(process_html(text), false, body, true, nil, /.^/)
html = REXML::Element.new('html').add_namespace(JABBER_NS)
html.add(body.add_namespace(XHTML_NS))
html
else
REXML::Text.new(text, true, body, true, nil, /.^/)
body
end
end
def process_html(text)
text.gsub(/((?:\A|\s)@)(\w+)/, '\1<a href="http://twitter.com/\2">\2</a>')
end
def handle_command(body, from, execute = true)
case body
when /\Ahe?(?:lp)?\z/i
deliver(from, <<-HELP) if execute
h[e[lp]] -- Print this help
de[bug] -- Print debug mode
de[bug] on|off -- Turn debug mode on/off
bl[ock] #ID -- Block ID
fa[v[orite]] #ID -- Create favorite #ID
re[ply] #ID[:] [!] STATUS -- Reply to ID (Force if too long)
le[n[gth]] STATUS -- Determine length
[!] STATUS -- Update status (Force if too long)
(Note: Message body must be shorter than #{MAX_LENGTH} characters)
HELP
when /\Ade(?:bug)?(?:\s+(on|off))?\z/i
if execute
flag = $1.downcase if $1
case flag
when 'on'
@debug = true
when 'off'
@debug = false
end
deliver(from, "DEBUG = #{debug ? 'on' : 'off'}")
end
when /\Abl(?:ock)?\s+#?(\d+)\z/i
twitter.block($1) if execute && !debug
when /\Afav?(?:orite)?\s+#?(\d+)\z/i
twitter.favorite_create($1) if execute && !debug
else
options = {}
if execute && body.sub!(/\Alen?(?:gth)?\s+/i, '')
if body = handle_command(body, from, false)
length = body.length
hint = length <= MAX_LENGTH ? 'OK' : 'TOO LONG'
deliver(from, "#{length} [#{hint}]: #{body}")
end
return
end
if body.sub!(/\Are(?:ply)?\s+#?(\d+):?\s+/i, '')
options[:in_reply_to_status_id] = $1
end
if body.sub!(/\A!\s+/, '')
force = true
end
body.gsub!(/https?:\/\/\S+/) { |match|
match.length < 30 ? match : ShortURL.shorten(match)
}
return body unless execute
if force || body.length <= MAX_LENGTH
update(body, options)
else
deliver(from, "MSG TOO LONG (> #{MAX_LENGTH}): #{body}")
end
end
end
def deliver(recipient, msg)
if debug
logj "#{recipient}: #{msg}", true
return
end
jabber.deliver(recipient, msg)
end
def update(msg, options = {})
if debug
logt "#{msg} (#{options.inspect})", true
return
end
twitter.update(msg, options)
end
def log(msg, verbose = verbose)
warn "[#{id}] #{msg}" if verbose
end
def logt(msg, verbose = verbose)
log("TWITTER #{msg}", verbose)
end
def logj(msg, verbose = verbose)
log("JABBER #{msg}", verbose)
end
end
|
require 'singleton'
class TwitterClient
include Singleton
def initialize
@client ||= initialize_client
end
def send_tweet content
@client.update content
end
private
def initialize_client
Twitter::REST::Client.new do |config|
config.consumer_key = Rails.application.secrets.twitter_consumer_key
config.consumer_secret = Rails.application.secrets.twitter_consumer_secret
config.access_token = Rails.application.secrets.twitter_access_token
config.access_token_secret = Rails.application.secrets.twitter_access_secret
end
end
end
Hotfix loading twitter credentials from either secrets file or environement variable
require 'singleton'
class TwitterClient
include Singleton
def initialize
@twitter_consumer_key = Rails.application.secrets.twitter_consumer_key || ENV["TWITTER_CONSUMER_KEY"]
@twitter_consumer_secret = Rails.application.secrets.twitter_consumer_secret || ENV["TWITTER_CONSUMER_SECRET"]
@twitter_access_token = Rails.application.secrets.twitter_access_token || ENV["TWITTER_ACCESS_TOKEN"]
@twitter_access_secret = Rails.application.secrets.twitter_access_secret || ENV["TWITTER_ACCESS_SECRET"]
@client ||= initialize_client
end
def send_tweet content
@client.update content
end
private
def initialize_client
Twitter::REST::Client.new do |config|
config.consumer_key = @twitter_consumer_key
config.consumer_secret = @twitter_consumer_secret
config.access_token = @twitter_access_token
config.access_token_secret = @twitter_access_secret
end
end
end
|
require 'typhoeus/hydra/callbacks'
require 'typhoeus/hydra/connect_options'
require 'typhoeus/hydra/stubbing'
module Typhoeus
class Hydra
include ConnectOptions
include Stubbing
extend Callbacks
def initialize(options = {})
@memoize_requests = true
@multi = Multi.new
@easy_pool = []
initial_pool_size = options[:initial_pool_size] || 10
@max_concurrency = options[:max_concurrency] || 200
initial_pool_size.times { @easy_pool << Easy.new }
@memoized_requests = {}
@retrieved_from_cache = {}
@queued_requests = []
@running_requests = 0
self.stubs = []
@active_stubs = []
end
def self.hydra
@hydra ||= new
end
def self.hydra=(val)
@hydra = val
end
# clears the queue thus aborting the rest of the run
def abort
@queued_requests.clear
end
def clear_cache_callbacks
@cache_setter = nil
@cache_getter = nil
end
def fire_and_forget
@queued_requests.each {|r| queue(r, false)}
@multi.fire_and_forget
end
def queue(request, obey_concurrency_limit = true)
return if assign_to_stub(request)
# At this point, we are running over live HTTP. Make sure we haven't
# disabled live requests.
check_allow_net_connect!(request)
if @running_requests >= @max_concurrency && obey_concurrency_limit
@queued_requests << request
else
if request.method == :get
if @memoize_requests && @memoized_requests.has_key?(request.url)
if response = @retrieved_from_cache[request.url]
request.response = response
request.call_handlers
else
@memoized_requests[request.url] << request
end
else
@memoized_requests[request.url] = [] if @memoize_requests
get_from_cache_or_queue(request)
end
else
get_from_cache_or_queue(request)
end
end
end
def run
while !@active_stubs.empty?
m = @active_stubs.first
while request = m.requests.shift
response = m.response
response.request = request
handle_request(request, response)
end
@active_stubs.delete(m)
end
@multi.perform
@memoized_requests = {}
@retrieved_from_cache = {}
end
def disable_memoization
@memoize_requests = false
end
def cache_getter(&block)
@cache_getter = block
end
def cache_setter(&block)
@cache_setter = block
end
def on_complete(&block)
@on_complete = block
end
def on_complete=(proc)
@on_complete = proc
end
def get_from_cache_or_queue(request)
if @cache_getter
val = @cache_getter.call(request)
if val
@retrieved_from_cache[request.url] = val
handle_request(request, val, false)
else
@multi.add(get_easy_object(request))
end
else
@multi.add(get_easy_object(request))
end
end
private :get_from_cache_or_queue
def get_easy_object(request)
@running_requests += 1
easy = @easy_pool.pop || Easy.new
easy.verbose = request.verbose
if request.username || request.password
auth = { :username => request.username, :password => request.password }
auth[:method] = Typhoeus::Easy::AUTH_TYPES["CURLAUTH_#{request.auth_method.to_s.upcase}".to_sym] if request.auth_method
easy.auth = auth
end
if request.proxy
proxy = { :server => request.proxy }
proxy[:type] = Typhoeus::Easy::PROXY_TYPES["CURLPROXY_#{request.proxy_type.to_s.upcase}".to_sym] if request.proxy_type
easy.proxy = proxy if request.proxy
end
if request.proxy_username || request.proxy_password
auth = { :username => request.proxy_username, :password => request.proxy_password }
auth[:method] = Typhoeus::Easy::AUTH_TYPES["CURLAUTH_#{request.proxy_auth_method.to_s.upcase}".to_sym] if request.proxy_auth_method
easy.proxy_auth = auth
end
easy.url = request.url
easy.method = request.method
easy.params = request.params if request.method == :post && !request.params.nil?
easy.headers = request.headers if request.headers
easy.request_body = request.body if request.body
easy.timeout = request.timeout if request.timeout
easy.connect_timeout = request.connect_timeout if request.connect_timeout
easy.follow_location = request.follow_location if request.follow_location
easy.max_redirects = request.max_redirects if request.max_redirects
easy.disable_ssl_peer_verification if request.disable_ssl_peer_verification
easy.ssl_cert = request.ssl_cert
easy.ssl_cert_type = request.ssl_cert_type
easy.ssl_key = request.ssl_key
easy.ssl_key_type = request.ssl_key_type
easy.ssl_key_password = request.ssl_key_password
easy.ssl_cacert = request.ssl_cacert
easy.ssl_capath = request.ssl_capath
easy.verbose = request.verbose
easy.on_success do |easy|
queue_next
handle_request(request, response_from_easy(easy, request))
release_easy_object(easy)
end
easy.on_failure do |easy|
queue_next
handle_request(request, response_from_easy(easy, request))
release_easy_object(easy)
end
easy.set_headers
easy
end
private :get_easy_object
def queue_next
@running_requests -= 1
queue(@queued_requests.pop) unless @queued_requests.empty?
end
private :queue_next
def release_easy_object(easy)
easy.reset
@easy_pool.push easy
end
private :release_easy_object
def handle_request(request, response, live_request = true)
request.response = response
self.class.run_global_hooks_for(:after_request_before_on_complete,
request)
if live_request && request.cache_timeout && @cache_setter
@cache_setter.call(request)
end
@on_complete.call(response) if @on_complete
request.call_handlers
if requests = @memoized_requests[request.url]
requests.each do |r|
r.response = response
r.call_handlers
end
end
end
private :handle_request
def response_from_easy(easy, request)
Response.new(:code => easy.response_code,
:headers => easy.response_header,
:body => easy.response_body,
:time => easy.total_time_taken,
:start_transfer_time => easy.start_transfer_time,
:app_connect_time => easy.app_connect_time,
:pretransfer_time => easy.pretransfer_time,
:connect_time => easy.connect_time,
:name_lookup_time => easy.name_lookup_time,
:effective_url => easy.effective_url,
:request => request)
end
private :response_from_easy
end
end
abort(): updated comments
require 'typhoeus/hydra/callbacks'
require 'typhoeus/hydra/connect_options'
require 'typhoeus/hydra/stubbing'
module Typhoeus
class Hydra
include ConnectOptions
include Stubbing
extend Callbacks
def initialize(options = {})
@memoize_requests = true
@multi = Multi.new
@easy_pool = []
initial_pool_size = options[:initial_pool_size] || 10
@max_concurrency = options[:max_concurrency] || 200
initial_pool_size.times { @easy_pool << Easy.new }
@memoized_requests = {}
@retrieved_from_cache = {}
@queued_requests = []
@running_requests = 0
self.stubs = []
@active_stubs = []
end
def self.hydra
@hydra ||= new
end
def self.hydra=(val)
@hydra = val
end
#
# Abort the run on a best-effort basis.
#
# It won't abort the current burst of @max_concurrency requests,
# however it won't fire the rest of the queued requests so the run
# will be aborted as soon as possible...
#
def abort
@queued_requests.clear
end
def clear_cache_callbacks
@cache_setter = nil
@cache_getter = nil
end
def fire_and_forget
@queued_requests.each {|r| queue(r, false)}
@multi.fire_and_forget
end
def queue(request, obey_concurrency_limit = true)
return if assign_to_stub(request)
# At this point, we are running over live HTTP. Make sure we haven't
# disabled live requests.
check_allow_net_connect!(request)
if @running_requests >= @max_concurrency && obey_concurrency_limit
@queued_requests << request
else
if request.method == :get
if @memoize_requests && @memoized_requests.has_key?(request.url)
if response = @retrieved_from_cache[request.url]
request.response = response
request.call_handlers
else
@memoized_requests[request.url] << request
end
else
@memoized_requests[request.url] = [] if @memoize_requests
get_from_cache_or_queue(request)
end
else
get_from_cache_or_queue(request)
end
end
end
def run
while !@active_stubs.empty?
m = @active_stubs.first
while request = m.requests.shift
response = m.response
response.request = request
handle_request(request, response)
end
@active_stubs.delete(m)
end
@multi.perform
@memoized_requests = {}
@retrieved_from_cache = {}
end
def disable_memoization
@memoize_requests = false
end
def cache_getter(&block)
@cache_getter = block
end
def cache_setter(&block)
@cache_setter = block
end
def on_complete(&block)
@on_complete = block
end
def on_complete=(proc)
@on_complete = proc
end
def get_from_cache_or_queue(request)
if @cache_getter
val = @cache_getter.call(request)
if val
@retrieved_from_cache[request.url] = val
handle_request(request, val, false)
else
@multi.add(get_easy_object(request))
end
else
@multi.add(get_easy_object(request))
end
end
private :get_from_cache_or_queue
def get_easy_object(request)
@running_requests += 1
easy = @easy_pool.pop || Easy.new
easy.verbose = request.verbose
if request.username || request.password
auth = { :username => request.username, :password => request.password }
auth[:method] = Typhoeus::Easy::AUTH_TYPES["CURLAUTH_#{request.auth_method.to_s.upcase}".to_sym] if request.auth_method
easy.auth = auth
end
if request.proxy
proxy = { :server => request.proxy }
proxy[:type] = Typhoeus::Easy::PROXY_TYPES["CURLPROXY_#{request.proxy_type.to_s.upcase}".to_sym] if request.proxy_type
easy.proxy = proxy if request.proxy
end
if request.proxy_username || request.proxy_password
auth = { :username => request.proxy_username, :password => request.proxy_password }
auth[:method] = Typhoeus::Easy::AUTH_TYPES["CURLAUTH_#{request.proxy_auth_method.to_s.upcase}".to_sym] if request.proxy_auth_method
easy.proxy_auth = auth
end
easy.url = request.url
easy.method = request.method
easy.params = request.params if request.method == :post && !request.params.nil?
easy.headers = request.headers if request.headers
easy.request_body = request.body if request.body
easy.timeout = request.timeout if request.timeout
easy.connect_timeout = request.connect_timeout if request.connect_timeout
easy.follow_location = request.follow_location if request.follow_location
easy.max_redirects = request.max_redirects if request.max_redirects
easy.disable_ssl_peer_verification if request.disable_ssl_peer_verification
easy.ssl_cert = request.ssl_cert
easy.ssl_cert_type = request.ssl_cert_type
easy.ssl_key = request.ssl_key
easy.ssl_key_type = request.ssl_key_type
easy.ssl_key_password = request.ssl_key_password
easy.ssl_cacert = request.ssl_cacert
easy.ssl_capath = request.ssl_capath
easy.verbose = request.verbose
easy.on_success do |easy|
queue_next
handle_request(request, response_from_easy(easy, request))
release_easy_object(easy)
end
easy.on_failure do |easy|
queue_next
handle_request(request, response_from_easy(easy, request))
release_easy_object(easy)
end
easy.set_headers
easy
end
private :get_easy_object
def queue_next
@running_requests -= 1
queue(@queued_requests.pop) unless @queued_requests.empty?
end
private :queue_next
def release_easy_object(easy)
easy.reset
@easy_pool.push easy
end
private :release_easy_object
def handle_request(request, response, live_request = true)
request.response = response
self.class.run_global_hooks_for(:after_request_before_on_complete,
request)
if live_request && request.cache_timeout && @cache_setter
@cache_setter.call(request)
end
@on_complete.call(response) if @on_complete
request.call_handlers
if requests = @memoized_requests[request.url]
requests.each do |r|
r.response = response
r.call_handlers
end
end
end
private :handle_request
def response_from_easy(easy, request)
Response.new(:code => easy.response_code,
:headers => easy.response_header,
:body => easy.response_body,
:time => easy.total_time_taken,
:start_transfer_time => easy.start_transfer_time,
:app_connect_time => easy.app_connect_time,
:pretransfer_time => easy.pretransfer_time,
:connect_time => easy.connect_time,
:name_lookup_time => easy.name_lookup_time,
:effective_url => easy.effective_url,
:request => request)
end
private :response_from_easy
end
end
|
require 'uri'
require 'uri_mapper/uri_builder'
require 'uri_mapper/path'
require 'uri_mapper/query'
require 'uri_mapper/subdomains'
# TODO (2013-08-25) Consider responsibilities: does Uri split things into
# parts, or does <component>#build ?
#
# TODO (2013-08-25) Make a testing plan, this'll get complicated
#
module UriMapper
class Uri
extend UriBuilder
# TODO (2013-08-25) alias_component, use both :scheme and :protocol
component :scheme
component :path, :class => Path
component :query, :class => Query
component :subdomains, :class => Subdomains, :depends => [:host]
component :host, :depends => [:subdomains, :domains] do
(subdomains.to_a + domains.raw).join('.')
end
component :domains, :depends => [:host] do
@uri.host.split('.').last(2)
end
def initialize(string)
@components = {}
@uri = URI.parse(string)
end
def map(component = nil, &block)
Uri.new(@uri.to_s).map!(component, &block)
end
alias_method :change, :map
def get(component_name)
if self.class.component_names.include?(component_name)
public_send(component_name)
else
raise "Unknown component: #{component_name}"
end
end
def set(component_name, replacement)
get(component_name).reload(replacement)
end
def map!(component = nil)
# No component requested, just yield the whole thing
if not component
yield self
return self
end
# Components with static changes, just merge them in
if component.is_a? Hash
component.each do |name, replacement|
set(name, replacement)
end
return self
end
# Component and a block
replacement = yield get(component)
set(component, replacement)
self
end
alias_method :change!, :map!
def to_s
uri = @uri.dup
uri.scheme = scheme.to_s
uri.host = host.to_s
uri.path = path.to_s
uri.query = query.to_s
uri.to_s
end
end
end
Some minor reformatting
require 'uri'
require 'uri_mapper/uri_builder'
require 'uri_mapper/path'
require 'uri_mapper/query'
require 'uri_mapper/subdomains'
# TODO (2013-08-25) Consider responsibilities: does Uri split things into
# parts, or does <component>#build ?
#
# TODO (2013-08-25) Make a testing plan, this'll get complicated
#
module UriMapper
class Uri
extend UriBuilder
# TODO (2013-08-25) alias_component, use both :scheme and :protocol
component :scheme
component :path, :class => Path
component :query, :class => Query
component :subdomains, :class => Subdomains, :depends => [:host]
component :host, :depends => [:subdomains, :domains] do
(subdomains.to_a + domains.raw).join('.')
end
component :domains, :depends => [:host] do
@uri.host.split('.').last(2)
end
def initialize(string)
@components = {}
@uri = URI.parse(string)
end
def get(component_name)
if self.class.component_names.include?(component_name)
public_send(component_name)
else
raise "Unknown component: #{component_name}"
end
end
def set(component_name, replacement)
get(component_name).reload(replacement)
end
def map(component = nil, &block)
Uri.new(@uri.to_s).map!(component, &block)
end
alias_method :change, :map
def map!(component = nil)
if not component
# No component requested, just yield the whole thing
yield self
elsif component.is_a? Hash
# Components with static changes, just merge them in
component.each do |name, replacement|
set(name, replacement)
end
else
# Component and a block
replacement = yield get(component)
set(component, replacement)
end
self
end
alias_method :change!, :map!
def to_s
uri = @uri.dup
uri.scheme = scheme.to_s
uri.host = host.to_s
uri.path = path.to_s
uri.query = query.to_s
uri.to_s
end
end
end
|
require 'capybara'
require 'capybara/poltergeist'
module Valanga
class Client
LOGIN_PAGE = "https://p.eagate.573.jp/gate/p/login.html"
attr_reader :session
def initialize(username, password)
Capybara.register_driver :poltergeist do |app|
Capybara::Poltergeist::Driver.new(app, js_errors: false)
end
@session = Capybara::Session.new(:poltergeist)
login!(username, password)
end
def music
@music ||= Music.new(@session)
end
private
def login!(username, password)
@session.visit LOGIN_PAGE
@session.fill_in 'KID', with: username
@session.fill_in 'pass', with: password
@session.click_on "規約に同意してログイン"
unless @session.current_path == "/gate/p/mypage/index.html"
raise LoginError, session.find(:xpath, '//div[@class="error_text_box"]/p').text
end
end
end
end
includes Valanga::MusicSearch in Valanga::Client
require 'capybara'
require 'capybara/poltergeist'
module Valanga
class Client
include MusicSearch
LOGIN_PAGE = "https://p.eagate.573.jp/gate/p/login.html"
attr_reader :session
def initialize(username, password)
Capybara.register_driver :poltergeist do |app|
Capybara::Poltergeist::Driver.new(app, js_errors: false)
end
@session = Capybara::Session.new(:poltergeist)
login!(username, password)
end
def music
@music ||= Music.new(@session)
end
private
def login!(username, password)
@session.visit LOGIN_PAGE
@session.fill_in 'KID', with: username
@session.fill_in 'pass', with: password
@session.click_on "規約に同意してログイン"
unless @session.current_path == "/gate/p/mypage/index.html"
raise LoginError, session.find(:xpath, '//div[@class="error_text_box"]/p').text
end
end
end
end
|
class VersionHelper
require 'Versionomy'
def initialize(command)
@command = command
end
def check_version
color "Checking for zappifest update...", :green
latest_zappifest_stable = parse_latest_zappifest_version(`brew info zappifest`)
prompt_for_update(latest_zappifest_stable) if update_required?(latest_zappifest_stable)
rescue
puts "Failed to check zappifest update - please check manually by running `brew info zappifest`"
end
private
def prompt_for_update(version)
if agree update_message(version)
update_zappifest
elsif @command.name == "publish"
puts "You need to update to the latest version in order to publish"
exit
end
end
def update_required?(latest_zappifest_stable)
Versionomy.parse(latest_zappifest_stable) > VERSION
end
def update_message(version)
"A new zappifest version is available (#{version}). Do you want to upgrade ? (yes/no)"
end
def parse_latest_zappifest_version(cmd)
cmd.split("\n")[0].split(" ")[-1]
end
def update_zappifest
puts "Updating zappifest..."
system "brew update && brew upgrade zappifest"
end
end
address comments
class VersionHelper
require 'Versionomy'
def initialize(command)
@command = command
end
def check_version
color "Checking for zappifest update...", :green
@latest_zappifest_stable = `brew info zappifest`.split("\n")[0].split(" ")[-1]
prompt_for_update if update_required?
rescue
puts "Failed to check zappifest update - please check manually by running `brew info zappifest`"
end
private
def prompt_for_update
if agree update_message
update_zappifest
elsif @command.name == "publish"
puts "You need to update to the latest version in order to publish a plugin"
exit
end
end
def update_required?
Versionomy.parse(@latest_zappifest_stable) > VERSION
end
def update_message
"A new zappifest version is available (#{@latest_zappifest_stable}). Do you want to upgrade ? (yes/no)"
end
def update_zappifest
puts "Updating zappifest..."
system "brew update && brew upgrade zappifest"
end
end
|
require 'json'
require 'villein/tags'
module Villein
##
# Villein::Client allows you to order existing serf agent.
# You will need RPC address and agent name to command.
class Client
##
# for serf command failures
class SerfError < Exception; end
##
# Error for the given argument exceeds the limit of serf when setting tags and sending events.
class LengthExceedsLimitError < SerfError; end
##
# Error for connection failures
class SerfConnectionError < SerfError; end
##
# Error when an called serf command is not found.
class SerfCommandNotFound < SerfError; end
##
# Error when an operation is not supported by the current version.
class InsufficientVersionError < SerfError; end
def initialize(rpc_addr, name: nil, serf: 'serf', silence: true)
@rpc_addr = rpc_addr
@name = name
@serf = serf
@silence = true
retrieve_name unless @name
end
def silence?() !!@silence; end
attr_writer :silence
attr_reader :name, :rpc_addr, :serf
##
# Returns a result of `serf info`.
# This may raise InsufficientVersionError when `serf info` is not supported.
def info
JSON.parse call_serf('info', '-format', 'json')
rescue SerfCommandNotFound
raise InsufficientVersionError, 'serf v0.6.0 or later is required to run `serf info`.'
end
def event(name, payload, coalesce: true)
options = []
unless coalesce
options << '-coalesce=false'
end
call_serf 'event', *options, name, payload
end
def query(name, payload, node: nil, tag: nil, timeout: nil, no_ack: false)
# TODO: version check
options = ['-format', 'json']
if node
node = [node] unless node.respond_to?(:each)
node.each do |n|
options << "-node=#{n}"
end
end
if tag
tag = [tag] unless tag.respond_to?(:each)
tag.each do |t|
options << "-tag=#{t}"
end
end
if timeout
options << "-timeout=#{timeout}"
end
if no_ack
options << "-no-ack"
end
out = call_serf('query', *options, name, payload)
JSON.parse(out)
end
def join(addr, replay: false)
options = []
if replay
options << '-replay'
end
call_serf 'join', *options, addr
end
def leave
call_serf 'leave'
end
def force_leave(node)
call_serf 'force-leave', node
end
def members(status: nil, name: nil, tags: {})
options = ['-format', 'json']
options.push('-status', status.to_s) if status
options.push('-name', name.to_s) if name
tags.each do |tag, val|
options.push('-tag', "#{tag}=#{val}")
end
json = call_serf('members', *options)
response = JSON.parse(json)
response["members"]
end
##
# Returns Villein::Tags object for the current agent.
# Villein::Tags provides high-level API for tagging agents.
def tags
@tags ||= Tags.new(self)
end
##
# Get tag from the agent.
# Using Villein::Client#tags method is recommended. It provides high-level API via +Villein::Tags+.
def get_tags
me = members(name: self.name)[0]
me["tags"]
end
##
# Remove tag from the agent.
# Using Villein::Client#tags method is recommended. It provides high-level API via +Villein::Tags+.
def delete_tag(key)
call_serf 'tags', '-delete', key
end
##
# Set tag to the agent.
# Using Villein::Client#tags method is recommended. It provides high-level API via +Villein::Tags+.
def set_tag(key, val)
call_serf 'tags', '-set', "#{key}=#{val}"
end
private
def retrieve_name
@name = self.info["agent"]["name"]
end
def call_serf(cmd, *args)
status, out = IO.popen([@serf, cmd, "-rpc-addr=#{rpc_addr}", *args, err: [:child, :out]], 'r') do |io|
_, s = Process.waitpid2(io.pid)
[s, io.read]
end
unless status.success?
case out
when /^Error connecting to Serf agent:/
raise SerfConnectionError, out.chomp
when /exceeds limit of \d+ bytes$/
raise LengthExceedsLimitError, out.chomp
when /^Available commands are:/
raise SerfCommandNotFound
else
raise SerfError, out.chomp
end
end
out
end
end
end
Read first, then wait for process end
require 'json'
require 'villein/tags'
module Villein
##
# Villein::Client allows you to order existing serf agent.
# You will need RPC address and agent name to command.
class Client
##
# for serf command failures
class SerfError < Exception; end
##
# Error for the given argument exceeds the limit of serf when setting tags and sending events.
class LengthExceedsLimitError < SerfError; end
##
# Error for connection failures
class SerfConnectionError < SerfError; end
##
# Error when an called serf command is not found.
class SerfCommandNotFound < SerfError; end
##
# Error when an operation is not supported by the current version.
class InsufficientVersionError < SerfError; end
def initialize(rpc_addr, name: nil, serf: 'serf', silence: true)
@rpc_addr = rpc_addr
@name = name
@serf = serf
@silence = true
retrieve_name unless @name
end
def silence?() !!@silence; end
attr_writer :silence
attr_reader :name, :rpc_addr, :serf
##
# Returns a result of `serf info`.
# This may raise InsufficientVersionError when `serf info` is not supported.
def info
JSON.parse call_serf('info', '-format', 'json')
rescue SerfCommandNotFound
raise InsufficientVersionError, 'serf v0.6.0 or later is required to run `serf info`.'
end
def event(name, payload, coalesce: true)
options = []
unless coalesce
options << '-coalesce=false'
end
call_serf 'event', *options, name, payload
end
def query(name, payload, node: nil, tag: nil, timeout: nil, no_ack: false)
# TODO: version check
options = ['-format', 'json']
if node
node = [node] unless node.respond_to?(:each)
node.each do |n|
options << "-node=#{n}"
end
end
if tag
tag = [tag] unless tag.respond_to?(:each)
tag.each do |t|
options << "-tag=#{t}"
end
end
if timeout
options << "-timeout=#{timeout}"
end
if no_ack
options << "-no-ack"
end
out = call_serf('query', *options, name, payload)
JSON.parse(out)
end
def join(addr, replay: false)
options = []
if replay
options << '-replay'
end
call_serf 'join', *options, addr
end
def leave
call_serf 'leave'
end
def force_leave(node)
call_serf 'force-leave', node
end
def members(status: nil, name: nil, tags: {})
options = ['-format', 'json']
options.push('-status', status.to_s) if status
options.push('-name', name.to_s) if name
tags.each do |tag, val|
options.push('-tag', "#{tag}=#{val}")
end
json = call_serf('members', *options)
response = JSON.parse(json)
response["members"]
end
##
# Returns Villein::Tags object for the current agent.
# Villein::Tags provides high-level API for tagging agents.
def tags
@tags ||= Tags.new(self)
end
##
# Get tag from the agent.
# Using Villein::Client#tags method is recommended. It provides high-level API via +Villein::Tags+.
def get_tags
me = members(name: self.name)[0]
me["tags"]
end
##
# Remove tag from the agent.
# Using Villein::Client#tags method is recommended. It provides high-level API via +Villein::Tags+.
def delete_tag(key)
call_serf 'tags', '-delete', key
end
##
# Set tag to the agent.
# Using Villein::Client#tags method is recommended. It provides high-level API via +Villein::Tags+.
def set_tag(key, val)
call_serf 'tags', '-set', "#{key}=#{val}"
end
private
def retrieve_name
@name = self.info["agent"]["name"]
end
def call_serf(cmd, *args)
status, out = IO.popen([@serf, cmd, "-rpc-addr=#{rpc_addr}", *args, err: [:child, :out]], 'r') do |io|
o = io.read
_, s = Process.waitpid2(io.pid)
[s, o]
end
unless status.success?
case out
when /^Error connecting to Serf agent:/
raise SerfConnectionError, out.chomp
when /exceeds limit of \d+ bytes$/
raise LengthExceedsLimitError, out.chomp
when /^Available commands are:/
raise SerfCommandNotFound
else
raise SerfError, out.chomp
end
end
out
end
end
end
|
require 'thor'
module Vim
module Flavor
class CLI < Thor
desc 'install', 'Install Vim plugins according to VimFlavor file.'
method_option :vimfiles_path,
:desc => 'Where to install Vim plugins.',
:banner => 'DIR'
def install
Facade.new().install(
options[:vimfiles_path] || ENV['HOME'].to_vimfiles_path
)
end
end
end
end
Define CLI#default_vimfiles_path
require 'thor'
module Vim
module Flavor
class CLI < Thor
desc 'install', 'Install Vim plugins according to VimFlavor file.'
method_option :vimfiles_path,
:desc => 'Where to install Vim plugins.',
:banner => 'DIR'
def install
Facade.new().install(
options[:vimfiles_path] || ENV['HOME'].to_vimfiles_path
)
end
no_tasks do
def default_vimfiles_path
ENV['HOME'].to_vimfiles_path
end
end
end
end
end
|
module Weaver
VERSION = "0.6.5"
end
version bump
module Weaver
VERSION = "0.6.6"
end
|
class Whatsa::Scraper
attr_reader :query, :page
WIKISEARCH = 'https://en.wikipedia.org/w/index.php?search='
def initialize(term)
# only keep word chars, turn everything between each 'word' to a single '+'
# and remove '+'s at the beginning and end if they're there
@query = term.gsub(/\W+/, '+').gsub(/(\A\+|\+\z)/, '')
@page = Nokogiri::HTML(open(WIKISEARCH + self.query))
end
def results_page?
!self.page.css('.searchresults').empty?
end
def not_found?
!self.page.css('.mw-search-nonefound').empty?
end
def article?
!self.page.css('#ca-nstab-main').empty? && self.page.css('#disambigbox').empty?
end
end
feces = Whatsa::Scraper.new("feces") # search term that exists exactly
poop = Whatsa::Scraper.new("poop") # search term that disambiguates
jiggly = Whatsa::Scraper.new("jiggly") # search term that has results
gg = Whatsa::Scraper.new("gobblegobble") # search term that has no results
binding.pry
added #disambig?
class Whatsa::Scraper
attr_reader :query, :page
WIKISEARCH = 'https://en.wikipedia.org/w/index.php?search='
def initialize(term)
# only keep word chars, turn everything between each 'word' to a single '+'
# and remove '+'s at the beginning and end if they're there
@query = term.gsub(/\W+/, '+').gsub(/(\A\+|\+\z)/, '')
@page = Nokogiri::HTML(open(WIKISEARCH + self.query))
end
def results_page?
!self.page.css('.searchresults').empty?
end
def not_found?
!self.page.css('.mw-search-nonefound').empty?
end
def article?
!self.page.css('#ca-nstab-main').empty? && !disambig?
end
def disambig?
!self.page.css('#disambigbox').empty?
end
end
feces = Whatsa::Scraper.new("feces") # search term that goes to an article
poop = Whatsa::Scraper.new("poop") # search term that disambiguates
jiggly = Whatsa::Scraper.new("jiggly") # search term that has results
gg = Whatsa::Scraper.new("gobblegobble") # search term that has no results
binding.pry
|
require 'rest_client'
module WhitLi
class Client
attr_accessor :api_key
def initialize api_key
@api_key = api_key
end
def import_token fb_id, token
api_call "user/importToken", "post", { :uid => fb_id, :oauth_token => token }
end
def populate fb_id
api_call "user/populate", "get", { :uid => fb_id }
end
def import_generic request_body
api_call "user/importGeneric", "put", { :RequestBody => request_body }
end
def get fb_id, key_id, schema = "fb"
api_call "key/get", "get", { :uid => fb_id, :key_id => key_id, :schema => schema }
end
def compare fb_id_1, fb_id_2, context_id, schema = "fb"
api_call "key/compare", "get", { :uid1 => fb_id_1, :uid2 => fb_id_2, :context_id => context_id, :schema => schema }
end
private
def api_call path, method = "get", params = {}
params = params.merge({:api_key => @api_key, :format => WhitLi::Config::FORMAT})
begin
response = RestClient.send method, [WhitLi::Config::API_URL, path].join("/")+"."+params[:format], { :params => params }
rescue => e
raise_errors e.response
end
raise_errors response
WhitLi::Mash.from_json response.body
end
def raise_errors response
case response.code.to_i
when 400
data = Mash.from_json(response.body)
raise WhitLi::Errors::GeneralError.new(data), "(#{data.status}): #{data.message}"
when 403
raise WhitLi::Errors::AccessDeniedError, "(#{response.code}): #{response.message}"
when 405, 401
raise WhitLi::Errors::UnauthorizedError, "(#{response.code}): #{response.message}"
when 404
raise WhitLi::Errors::NotFoundError, "(#{response.code}): #{response.message}"
when 500
raise WhitLi::Errors::InformWhitLiError, "WhitLi had an internal error. (#{response.code}): #{response.message}"
when 502..503
raise WhitLi::Errors::UnavailableError, "(#{response.code}): #{response.message}"
end
end
end
end
pending
require 'rest_client'
module WhitLi
class Client
attr_accessor :api_key
def initialize api_key
@api_key = api_key
end
def import_token fb_id, token
api_call "user/importToken", "post", { :uid => fb_id, :oauth_token => token }
end
def populate fb_id
api_call "user/populate", "get", { :uid => fb_id }
end
def import_generic request_body
api_call "user/importGeneric", "put", { :RequestBody => request_body }
end
def get fb_id, key_id, schema = "fb"
api_call "key/get", "get", { :uid => fb_id, :key_id => key_id, :schema => schema }
end
def compare fb_id_1, fb_id_2, context_id, schema = "fb"
api_call "key/compare", "get", { :uid1 => fb_id_1, :uid2 => fb_id_2, :context_id => context_id, :schema => schema }
end
private
def raise_response_error response
#pending
end
def api_call path, method = "get", params = {}
params = params.merge({:api_key => @api_key, :format => WhitLi::Config::FORMAT})
begin
response = RestClient.send method, [WhitLi::Config::API_URL, path].join("/")+"."+params[:format], { :params => params }
rescue => e
raise_errors e.response
end
WhitLi::Mash.from_json response.body
end
def raise_errors response
case response.code.to_i
when 400
data = Mash.from_json(response.body)
raise WhitLi::Errors::GeneralError.new(data), "(#{data.status}): #{data.message}"
when 403
raise WhitLi::Errors::AccessDeniedError, "(#{response.code}): #{response.message}"
when 405, 401
raise WhitLi::Errors::UnauthorizedError, "(#{response.code}): #{response.message}"
when 404
raise WhitLi::Errors::NotFoundError, "(#{response.code}): #{response.message}"
when 500
raise WhitLi::Errors::InformWhitLiError, "WhitLi had an internal error. (#{response.code}): #{response.message}"
when 502..503
raise WhitLi::Errors::UnavailableError, "(#{response.code}): #{response.message}"
end
end
end
end |
module Parse
class Results
def initialize
@result = nil
end
def pull_from_hash(hash, key)
@hash = hash
@key = key
if @hash.include?(@key)
@result = @hash[@key]
else
@hash.each_pair do |k, v|
if v.class == Hash
pull_from_hash(v, @key)
end
end
end
@result
end
end
# Extract portions of text from Wiki article
class Text < Results
attr_reader :api_return
def initialize(api_return, prop='extract')
@request = self.pull_from_hash(api_return, prop)
if @request.class == Array
@request = self.pull_from_hash(@request[0], "*")
end
end
# Returns the requested number of paragraphs of a Wiki article
#
# quantity - the Number of paragraphs to be returned starting from the top
# of the article. Defaults is to get the first paragraph.
#
def paragraph(quantity)
# Break the article into individual paragraphs and store in an array.
start = @request.split("</p>")
# Re-add the closing paragraph HTML tags.
start.each do |string|
string << "</p>"
end
# Check to make sure the quantity being requested is not more paragraphs
# than exist.
#
# Return the correct number of paragraphs assigned to new_arr
if start.length < quantity
quantity = start.length - 1
new_arr = start[0..quantity]
else
quantity = quantity - 1
new_arr = start[0..quantity]
end
end
# Removes HTML tags from a String
#
# string - a String that contains HTML tags.
#
# Returns the string without HTML tags.
def only_text(string)
no_html_tags = string.gsub(/<\/?.*?>/,'')
end
# Return the text from the sidebar, if one exists
# def sidebar
# @sidebar = content_split(0)
# end
# Return the image from the sidebar, if one exists
def sidebar_image
binding.pry
img_name = content_split(0)[/(?<= image = )\S*/]
img_name = img_name[0..-2]
img_name_call = Api::Call.new(img_name, :prop => "imageinfo", :iiprop => true)
img_name_2 = img_name_call.call_api
img_array = pull_from_hash(img_name_2, "imageinfo")
img_array[0]["url"]
end
# Return all refrences as an array
def refs
@content = content_split(1, 2)
#add all references to an array. still in wiki markup
@content.scan(/<ref>(.*?)<\/ref>/)
end
# Return all paragraphs under a given heading
#
# header = the name of the header as a String
# paras = the number of paragraphs
def find_header(header)
# Find the requested header
start = @request.index(header)
# Find next instance of the tag.
end_first_tag = start + @request[start..-1].index("h2") + 3
# Find
start_next_tag = @request[end_first_tag..-1].index("h2") +
end_first_tag - 2
# Select substring of requested text.
@request[end_first_tag..start_next_tag]
end
# splits the content into side bar and everything else.
# this method is for Parsing methods that use the raw markup from the revisions call.
# specify start as 0 for sidebar content, for everything else specify 1 ..2
# TODO:split the content from the catagory info
def content_split(start, finish=nil)
@content = @request.split("'''")
if finish == nil
return @content[start]
else
return @content[start..finish].join
end
end
end
class Media < Results
attr_reader :api_return
def initialize(api_return, prop)
@request = self.pull_from_hash(api_return, prop)
end
# Return a hash containing an array of urls and an array of image titles.
#
def list_images
# Call API for initial list of images
isolated_list = @request
# Parse JSON object for list of image titles
image_title_array = []
isolated_list.each do |key, value|
image_title_array << value["title"]
end
# Make API call for individual image links
img_url_call_array = []
image_title_array.each do |title|
individual_img_call = Api::Call.new(title, :prop => "imageinfo", :iiprop => true)
img_url_call_array << individual_img_call.call_api
end
# Pull pages object containing imageinfo array out from JSON object
imageinfo_array = []
img_url_call_array.each do |object|
imageinfo_array << pull_from_hash(object, "pages")
end
# Pull imageinfo array out of nested hash
info_array = []
imageinfo_array.each do |object|
info_array << pull_from_hash(object, "imageinfo")
end
# Pull each URL and place in an array
url_array = []
info_array.each do |array|
url_array << array[0]["url"]
end
return { urls: url_array, titles: image_title_array }
end
end
end
Fix Text#sidebar_image method.
module Parse
class Results
def initialize
@result = nil
end
def pull_from_hash(hash, key)
@hash = hash
@key = key
if @hash.include?(@key)
@result = @hash[@key]
else
@hash.each_pair do |k, v|
if v.class == Hash
pull_from_hash(v, @key)
end
end
end
@result
end
end
# Extract portions of text from Wiki article
class Text < Results
attr_reader :api_return
def initialize(api_return, prop='extract')
@request = self.pull_from_hash(api_return, prop)
if @request.class == Array
@request = self.pull_from_hash(@request[0], "*")
end
end
# Returns the requested number of paragraphs of a Wiki article
#
# quantity - the Number of paragraphs to be returned starting from the top
# of the article. Defaults is to get the first paragraph.
#
def paragraph(quantity)
# Break the article into individual paragraphs and store in an array.
start = @request.split("</p>")
# Re-add the closing paragraph HTML tags.
start.each do |string|
string << "</p>"
end
# Check to make sure the quantity being requested is not more paragraphs
# than exist.
#
# Return the correct number of paragraphs assigned to new_arr
if start.length < quantity
quantity = start.length - 1
new_arr = start[0..quantity]
else
quantity = quantity - 1
new_arr = start[0..quantity]
end
end
# Removes HTML tags from a String
#
# string - a String that contains HTML tags.
#
# Returns the string without HTML tags.
def only_text(string)
no_html_tags = string.gsub(/<\/?.*?>/,'')
end
# Return the text from the sidebar, if one exists
# def sidebar
# @sidebar = content_split(0)
# end
# Return the image from the sidebar, if one exists
def sidebar_image
img_name = content_split(0)[/(image\s* =\s*).*?(g|f)/]
image_name = img_name.split("= ")[1]
img_name_call = Api::Call.new('File:'+image_name, :prop => "imageinfo", :iiprop => true)
get_url = img_name_call.call_api
img_name_2 = pull_from_hash(get_url, "pages")
img_array = pull_from_hash(img_name_2, "imageinfo")
img_array[0]["url"]
end
# Return all refrences as an array
def refs
@content = content_split(1, 2)
#add all references to an array. still in wiki markup
@content.scan(/<ref>(.*?)<\/ref>/)
end
# Return all paragraphs under a given heading
#
# header = the name of the header as a String
# paras = the number of paragraphs
def find_header(header)
# Find the requested header
start = @request.index(header)
# Find next instance of the tag.
end_first_tag = start + @request[start..-1].index("h2") + 3
# Find
start_next_tag = @request[end_first_tag..-1].index("h2") +
end_first_tag - 2
# Select substring of requested text.
@request[end_first_tag..start_next_tag]
end
# splits the content into side bar and everything else.
# this method is for Parsing methods that use the raw markup from the revisions call.
# specify start as 0 for sidebar content, for everything else specify 1 ..2
# TODO:split the content from the catagory info
def content_split(start, finish=nil)
@content = @request.split("'''")
if finish == nil
return @content[start]
else
return @content[start..finish].join
end
end
end
class Media < Results
attr_reader :api_return
def initialize(api_return, prop)
@request = self.pull_from_hash(api_return, prop)
end
# Return a hash containing an array of urls and an array of image titles.
#
def list_images
# Call API for initial list of images
isolated_list = @request
# Parse JSON object for list of image titles
image_title_array = []
isolated_list.each do |key, value|
image_title_array << value["title"]
end
# Make API call for individual image links
img_url_call_array = []
image_title_array.each do |title|
individual_img_call = Api::Call.new(title, :prop => "imageinfo", :iiprop => true)
img_url_call_array << individual_img_call.call_api
end
# Pull pages object containing imageinfo array out from JSON object
imageinfo_array = []
img_url_call_array.each do |object|
imageinfo_array << pull_from_hash(object, "pages")
end
# Pull imageinfo array out of nested hash
info_array = []
imageinfo_array.each do |object|
info_array << pull_from_hash(object, "imageinfo")
end
# Pull each URL and place in an array
url_array = []
info_array.each do |array|
url_array << array[0]["url"]
end
return { urls: url_array, titles: image_title_array }
end
end
end
|
module Wraith
VERSION = '1.3.6'
end
bump version
module Wraith
VERSION = '1.3.7'
end
|
module Wraith
VERSION = '1.3.4'
end
bump version
module Wraith
VERSION = '1.3.5'
end
|
require 'rest_client'
require 'json'
require 'cgi'
require 'securerandom'
require 'active_support/core_ext/hash/conversions'
module WxPay
module Service
GATEWAY_URL = 'https://api.mch.weixin.qq.com'.freeze
SANDBOX_GATEWAY_URL = 'https://api.mch.weixin.qq.com/sandboxnew'.freeze
FRAUD_GATEWAY_URL = 'https://fraud.mch.weixin.qq.com'.freeze
def self.generate_authorize_url(redirect_uri, state = nil)
state ||= SecureRandom.hex 16
"https://open.weixin.qq.com/connect/oauth2/authorize?appid=#{WxPay.appid}&redirect_uri=#{CGI::escape redirect_uri}&response_type=code&scope=snsapi_base&state=#{state}"
end
def self.authenticate(authorization_code, options = {})
options = WxPay.extra_rest_client_options.merge(options)
payload = {
appid: options.delete(:appid) || WxPay.appid,
secret: options.delete(:appsecret) || WxPay.appsecret,
code: authorization_code,
grant_type: 'authorization_code'
}
url = "https://api.weixin.qq.com/sns/oauth2/access_token"
::JSON.parse(RestClient::Request.execute(
{
method: :get,
headers: {params: payload},
url: url
}.merge(options)
), quirks_mode: true)
end
def self.get_sandbox_signkey(mch_id = WxPay.mch_id, options = {})
params = {
mch_id: mch_id,
key: options.delete(:key) || WxPay.key,
nonce_str: SecureRandom.uuid.tr('-', '')
}
r = WxPay::Result.new(Hash.from_xml(invoke_remote("/pay/getsignkey", xmlify_payload(params))))
yield r if block_given?
r
end
def self.authenticate_from_weapp(js_code, options = {})
options = WxPay.extra_rest_client_options.merge(options)
payload = {
appid: options.delete(:appid) || WxPay.appid,
secret: options.delete(:appsecret) || WxPay.appsecret,
js_code: js_code,
grant_type: 'authorization_code'
}
url = "https://api.weixin.qq.com/sns/jscode2session"
::JSON.parse(RestClient::Request.execute(
{
method: :get,
headers: {params: payload},
url: url
}.merge(options)
), quirks_mode: true)
end
INVOKE_UNIFIEDORDER_REQUIRED_FIELDS = [:body, :out_trade_no, :total_fee, :spbill_create_ip, :notify_url, :trade_type]
def self.invoke_unifiedorder(params, options = {})
params = {
appid: options.delete(:appid) || WxPay.appid,
mch_id: options.delete(:mch_id) || WxPay.mch_id,
key: options.delete(:key) || WxPay.key,
nonce_str: SecureRandom.uuid.tr('-', '')
}.merge(params)
check_required_options(params, INVOKE_UNIFIEDORDER_REQUIRED_FIELDS)
r = WxPay::Result.new(Hash.from_xml(invoke_remote("/pay/unifiedorder", make_payload(params), options)))
yield r if block_given?
r
end
INVOKE_CLOSEORDER_REQUIRED_FIELDS = [:out_trade_no]
def self.invoke_closeorder(params, options = {})
params = {
appid: options.delete(:appid) || WxPay.appid,
mch_id: options.delete(:mch_id) || WxPay.mch_id,
key: options.delete(:key) || WxPay.key,
nonce_str: SecureRandom.uuid.tr('-', '')
}.merge(params)
check_required_options(params, INVOKE_CLOSEORDER_REQUIRED_FIELDS)
r = WxPay::Result.new(Hash.from_xml(invoke_remote("/pay/closeorder", make_payload(params), options)))
yield r if block_given?
r
end
GENERATE_APP_PAY_REQ_REQUIRED_FIELDS = [:prepayid, :noncestr]
def self.generate_app_pay_req(params, options = {})
params = {
appid: options.delete(:appid) || WxPay.appid,
partnerid: options.delete(:mch_id) || WxPay.mch_id,
key: options.delete(:key) || WxPay.key,
package: 'Sign=WXPay',
timestamp: Time.now.to_i.to_s
}.merge(params)
check_required_options(params, GENERATE_APP_PAY_REQ_REQUIRED_FIELDS)
params[:sign] = WxPay::Sign.generate(params)
params
end
GENERATE_JS_PAY_REQ_REQUIRED_FIELDS = [:prepayid, :noncestr]
def self.generate_js_pay_req(params, options = {})
check_required_options(params, GENERATE_JS_PAY_REQ_REQUIRED_FIELDS)
params = {
appId: options.delete(:appid) || WxPay.appid,
package: "prepay_id=#{params.delete(:prepayid)}",
key: options.delete(:key) || WxPay.key,
nonceStr: params.delete(:noncestr),
timeStamp: Time.now.to_i.to_s,
signType: 'MD5'
}.merge(params)
params[:paySign] = WxPay::Sign.generate(params)
params
end
INVOKE_REFUND_REQUIRED_FIELDS = [:out_refund_no, :total_fee, :refund_fee, :op_user_id]
# out_trade_no 和 transaction_id 是二选一(必填)
def self.invoke_refund(params, options = {})
params = {
appid: options.delete(:appid) || WxPay.appid,
mch_id: options.delete(:mch_id) || WxPay.mch_id,
key: options.delete(:key) || WxPay.key,
nonce_str: SecureRandom.uuid.tr('-', ''),
}.merge(params)
params[:op_user_id] ||= params[:mch_id]
check_required_options(params, INVOKE_REFUND_REQUIRED_FIELDS)
warn("WxPay Warn: missing required option: out_trade_no or transaction_id must have one") if ([:out_trade_no, :transaction_id] & params.keys) == []
options = {
ssl_client_cert: options.delete(:apiclient_cert) || WxPay.apiclient_cert,
ssl_client_key: options.delete(:apiclient_key) || WxPay.apiclient_key,
verify_ssl: OpenSSL::SSL::VERIFY_NONE
}.merge(options)
r = WxPay::Result.new(Hash.from_xml(invoke_remote("/secapi/pay/refund", make_payload(params), options)))
yield r if block_given?
r
end
REFUND_QUERY_REQUIRED_FIELDS = [:out_trade_no]
def self.refund_query(params, options = {})
params = {
appid: options.delete(:appid) || WxPay.appid,
mch_id: options.delete(:mch_id) || WxPay.mch_id,
key: options.delete(:key) || WxPay.key,
nonce_str: SecureRandom.uuid.tr('-', '')
}.merge(params)
check_required_options(params, ORDER_QUERY_REQUIRED_FIELDS)
r = WxPay::Result.new(Hash.from_xml(invoke_remote("/pay/refundquery", make_payload(params), options)))
yield r if block_given?
r
end
INVOKE_TRANSFER_REQUIRED_FIELDS = [:partner_trade_no, :openid, :check_name, :amount, :desc, :spbill_create_ip]
def self.invoke_transfer(params, options = {})
params = {
mch_appid: options.delete(:appid) || WxPay.appid,
mchid: options.delete(:mch_id) || WxPay.mch_id,
nonce_str: SecureRandom.uuid.tr('-', ''),
key: options.delete(:key) || WxPay.key
}.merge(params)
check_required_options(params, INVOKE_TRANSFER_REQUIRED_FIELDS)
options = {
ssl_client_cert: options.delete(:apiclient_cert) || WxPay.apiclient_cert,
ssl_client_key: options.delete(:apiclient_key) || WxPay.apiclient_key,
verify_ssl: OpenSSL::SSL::VERIFY_NONE
}.merge(options)
r = WxPay::Result.new(Hash.from_xml(invoke_remote("/mmpaymkttransfers/promotion/transfers", make_payload(params), options)))
yield r if block_given?
r
end
GETTRANSFERINFO_FIELDS = [:partner_trade_no]
def self.gettransferinfo(params, options = {})
params = {
appid: options.delete(:appid) || WxPay.appid,
mch_id: options.delete(:mch_id) || WxPay.mch_id,
nonce_str: SecureRandom.uuid.tr('-', ''),
key: options.delete(:key) || WxPay.key
}.merge(params)
check_required_options(params, GETTRANSFERINFO_FIELDS)
options = {
ssl_client_cert: options.delete(:apiclient_cert) || WxPay.apiclient_cert,
ssl_client_key: options.delete(:apiclient_key) || WxPay.apiclient_key,
verify_ssl: OpenSSL::SSL::VERIFY_NONE
}.merge(options)
r = WxPay::Result.new(Hash.from_xml(invoke_remote("/mmpaymkttransfers/gettransferinfo", make_payload(params), options)))
yield r if block_given?
r
end
# 获取加密银行卡号和收款方用户名的RSA公钥
def self.risk_get_public_key(options = {})
params = {
mch_id: options.delete(:mch_id) || WxPay.mch_id,
nonce_str: SecureRandom.uuid.tr('-', ''),
key: options.delete(:key) || WxPay.key,
sign_type: 'MD5'
}
options = {
ssl_client_cert: options.delete(:apiclient_cert) || WxPay.apiclient_cert,
ssl_client_key: options.delete(:apiclient_key) || WxPay.apiclient_key,
verify_ssl: OpenSSL::SSL::VERIFY_NONE,
gateway_url: FRAUD_GATEWAY_URL
}.merge(options)
r = WxPay::Result.new(Hash.from_xml(invoke_remote("/risk/getpublickey", make_payload(params), options)))
yield r if block_given?
r
end
PAY_BANK_FIELDS = [:enc_bank_no, :enc_true_name, :bank_code, :amount, :desc]
def self.pay_bank(params, options = {})
params = {
mch_id: options.delete(:mch_id) || WxPay.mch_id,
nonce_str: SecureRandom.uuid.tr('-', ''),
key: options.delete(:key) || WxPay.key,
}.merge(params)
check_required_options(params, PAY_BANK_FIELDS)
options = {
ssl_client_cert: options.delete(:apiclient_cert) || WxPay.apiclient_cert,
ssl_client_key: options.delete(:apiclient_key) || WxPay.apiclient_key,
verify_ssl: OpenSSL::SSL::VERIFY_NONE
}.merge(options)
r = WxPay::Result.new(Hash.from_xml(invoke_remote("/mmpaysptrans/pay_bank", make_payload(params), options)))
yield r if block_given?
r
end
QUERY_BANK_FIELDS = [:partner_trade_no]
def self.query_bank(params, options = {})
params = {
mch_id: options.delete(:mch_id) || WxPay.mch_id,
nonce_str: SecureRandom.uuid.tr('-', ''),
key: options.delete(:key) || WxPay.key,
}.merge(params)
check_required_options(params, QUERY_BANK_FIELDS)
options = {
ssl_client_cert: options.delete(:apiclient_cert) || WxPay.apiclient_cert,
ssl_client_key: options.delete(:apiclient_key) || WxPay.apiclient_key,
verify_ssl: OpenSSL::SSL::VERIFY_NONE
}.merge(options)
r = WxPay::Result.new(Hash.from_xml(invoke_remote("/mmpaysptrans/query_bank", make_payload(params), options)))
yield r if block_given?
r
end
INVOKE_REVERSE_REQUIRED_FIELDS = [:out_trade_no]
def self.invoke_reverse(params, options = {})
params = {
appid: options.delete(:appid) || WxPay.appid,
mch_id: options.delete(:mch_id) || WxPay.mch_id,
key: options.delete(:key) || WxPay.key,
nonce_str: SecureRandom.uuid.tr('-', '')
}.merge(params)
check_required_options(params, INVOKE_REVERSE_REQUIRED_FIELDS)
options = {
ssl_client_cert: options.delete(:apiclient_cert) || WxPay.apiclient_cert,
ssl_client_key: options.delete(:apiclient_key) || WxPay.apiclient_key,
verify_ssl: OpenSSL::SSL::VERIFY_NONE
}.merge(options)
r = WxPay::Result.new(Hash.from_xml(invoke_remote("/secapi/pay/reverse", make_payload(params), options)))
yield r if block_given?
r
end
INVOKE_MICROPAY_REQUIRED_FIELDS = [:body, :out_trade_no, :total_fee, :spbill_create_ip, :auth_code]
def self.invoke_micropay(params, options = {})
params = {
appid: options.delete(:appid) || WxPay.appid,
mch_id: options.delete(:mch_id) || WxPay.mch_id,
key: options.delete(:key) || WxPay.key,
nonce_str: SecureRandom.uuid.tr('-', '')
}.merge(params)
check_required_options(params, INVOKE_MICROPAY_REQUIRED_FIELDS)
options = {
ssl_client_cert: options.delete(:apiclient_cert) || WxPay.apiclient_cert,
ssl_client_key: options.delete(:apiclient_key) || WxPay.apiclient_key,
verify_ssl: OpenSSL::SSL::VERIFY_NONE
}.merge(options)
r = WxPay::Result.new(Hash.from_xml(invoke_remote("/pay/micropay", make_payload(params), options)))
yield r if block_given?
r
end
ORDER_QUERY_REQUIRED_FIELDS = [:out_trade_no]
def self.order_query(params, options = {})
params = {
appid: options.delete(:appid) || WxPay.appid,
mch_id: options.delete(:mch_id) || WxPay.mch_id,
key: options.delete(:key) || WxPay.key,
nonce_str: SecureRandom.uuid.tr('-', '')
}.merge(params)
r = WxPay::Result.new(Hash.from_xml(invoke_remote("/pay/orderquery", make_payload(params), options)))
check_required_options(params, ORDER_QUERY_REQUIRED_FIELDS)
yield r if block_given?
r
end
DOWNLOAD_BILL_REQUIRED_FIELDS = [:bill_date, :bill_type]
def self.download_bill(params, options = {})
params = {
appid: options.delete(:appid) || WxPay.appid,
mch_id: options.delete(:mch_id) || WxPay.mch_id,
key: options.delete(:key) || WxPay.key,
nonce_str: SecureRandom.uuid.tr('-', ''),
}.merge(params)
check_required_options(params, DOWNLOAD_BILL_REQUIRED_FIELDS)
r = invoke_remote("/pay/downloadbill", make_payload(params), options)
yield r if block_given?
r
end
DOWNLOAD_FUND_FLOW_REQUIRED_FIELDS = [:bill_date, :account_type]
def self.download_fund_flow(params, options = {})
params = {
appid: options.delete(:appid) || WxPay.appid,
mch_id: options.delete(:mch_id) || WxPay.mch_id,
nonce_str: SecureRandom.uuid.tr('-', ''),
key: options.delete(:key) || WxPay.key
}.merge(params)
check_required_options(params, DOWNLOAD_FUND_FLOW_REQUIRED_FIELDS)
options = {
ssl_client_cert: options.delete(:apiclient_cert) || WxPay.apiclient_cert,
ssl_client_key: options.delete(:apiclient_key) || WxPay.apiclient_key,
verify_ssl: OpenSSL::SSL::VERIFY_NONE
}.merge(options)
r = invoke_remote("/pay/downloadfundflow", make_payload(params, WxPay::Sign::SIGN_TYPE_HMAC_SHA256), options)
yield r if block_given?
r
end
def self.sendgroupredpack(params, options={})
params = {
wxappid: options.delete(:appid) || WxPay.appid,
mch_id: options.delete(:mch_id) || WxPay.mch_id,
key: options.delete(:key) || WxPay.key,
nonce_str: SecureRandom.uuid.tr('-', '')
}.merge(params)
#check_required_options(params, INVOKE_MICROPAY_REQUIRED_FIELDS)
options = {
ssl_client_cert: options.delete(:apiclient_cert) || WxPay.apiclient_cert,
ssl_client_key: options.delete(:apiclient_key) || WxPay.apiclient_key,
verify_ssl: OpenSSL::SSL::VERIFY_NONE
}.merge(options)
r = WxPay::Result.new(Hash.from_xml(invoke_remote("/mmpaymkttransfers/sendgroupredpack", make_payload(params), options)))
yield r if block_given?
r
end
def self.sendredpack(params, options={})
params = {
wxappid: options.delete(:appid) || WxPay.appid,
mch_id: options.delete(:mch_id) || WxPay.mch_id,
key: options.delete(:key) || WxPay.key,
nonce_str: SecureRandom.uuid.tr('-', '')
}.merge(params)
#check_required_options(params, INVOKE_MICROPAY_REQUIRED_FIELDS)
options = {
ssl_client_cert: options.delete(:apiclient_cert) || WxPay.apiclient_cert,
ssl_client_key: options.delete(:apiclient_key) || WxPay.apiclient_key,
verify_ssl: OpenSSL::SSL::VERIFY_NONE
}.merge(options)
r = WxPay::Result.new(Hash.from_xml(invoke_remote("/mmpaymkttransfers/sendredpack", make_payload(params), options)))
yield r if block_given?
r
end
# 用于商户对已发放的红包进行查询红包的具体信息,可支持普通红包和裂变包。
GETHBINFO_FIELDS = [:mch_billno, :bill_type]
def self.gethbinfo(params, options = {})
params = {
appid: options.delete(:appid) || WxPay.appid,
mch_id: options.delete(:mch_id) || WxPay.mch_id,
nonce_str: SecureRandom.uuid.tr('-', ''),
key: options.delete(:key) || WxPay.key
}.merge(params)
check_required_options(params, GETHBINFO_FIELDS)
options = {
ssl_client_cert: options.delete(:apiclient_cert) || WxPay.apiclient_cert,
ssl_client_key: options.delete(:apiclient_key) || WxPay.apiclient_key,
verify_ssl: OpenSSL::SSL::VERIFY_NONE
}.merge(options)
r = WxPay::Result.new(Hash.from_xml(invoke_remote("/mmpaymkttransfers/gethbinfo", make_payload(params), options)))
yield r if block_given?
r
end
class << self
private
def get_gateway_url
return SANDBOX_GATEWAY_URL if WxPay.sandbox_mode?
GATEWAY_URL
end
def check_required_options(options, names)
return unless WxPay.debug_mode?
names.each do |name|
warn("WxPay Warn: missing required option: #{name}") unless options.has_key?(name)
end
end
def xmlify_payload(params, sign_type = WxPay::Sign::SIGN_TYPE_MD5)
sign = WxPay::Sign.generate(params, sign_type)
"<xml>#{params.except(:key).sort.map { |k, v| "<#{k}>#{v}</#{k}>" }.join}<sign>#{sign}</sign></xml>"
end
def make_payload(params, sign_type = WxPay::Sign::SIGN_TYPE_MD5)
# TODO: Move this out
if WxPay.sandbox_mode? && !WxPay.manual_get_sandbox_key?
r = get_sandbox_signkey
if r['return_code'] == WxPay::Result::SUCCESS_FLAG
params = params.merge(
mch_id: r['mch_id'] || WxPay.mch_id,
key: r['sandbox_signkey']
)
else
warn("WxPay Warn: fetch sandbox sign key failed #{r['return_msg']}")
end
end
xmlify_payload(params, sign_type)
end
def invoke_remote(url, payload, options = {})
options = WxPay.extra_rest_client_options.merge(options)
gateway_url = options.delete(:gateway_url) || get_gateway_url
url = "#{gateway_url}#{url}"
RestClient::Request.execute(
{
method: :post,
url: url,
payload: payload,
headers: { content_type: 'application/xml' }
}.merge(options)
)
end
end
end
end
feat: add sharing profit
require 'rest_client'
require 'json'
require 'cgi'
require 'securerandom'
require 'active_support/core_ext/hash/conversions'
module WxPay
module Service
GATEWAY_URL = 'https://api.mch.weixin.qq.com'.freeze
SANDBOX_GATEWAY_URL = 'https://api.mch.weixin.qq.com/sandboxnew'.freeze
FRAUD_GATEWAY_URL = 'https://fraud.mch.weixin.qq.com'.freeze
def self.generate_authorize_url(redirect_uri, state = nil)
state ||= SecureRandom.hex 16
"https://open.weixin.qq.com/connect/oauth2/authorize?appid=#{WxPay.appid}&redirect_uri=#{CGI::escape redirect_uri}&response_type=code&scope=snsapi_base&state=#{state}"
end
def self.authenticate(authorization_code, options = {})
options = WxPay.extra_rest_client_options.merge(options)
payload = {
appid: options.delete(:appid) || WxPay.appid,
secret: options.delete(:appsecret) || WxPay.appsecret,
code: authorization_code,
grant_type: 'authorization_code'
}
url = "https://api.weixin.qq.com/sns/oauth2/access_token"
::JSON.parse(RestClient::Request.execute(
{
method: :get,
headers: {params: payload},
url: url
}.merge(options)
), quirks_mode: true)
end
def self.get_sandbox_signkey(mch_id = WxPay.mch_id, options = {})
params = {
mch_id: mch_id,
key: options.delete(:key) || WxPay.key,
nonce_str: SecureRandom.uuid.tr('-', '')
}
r = WxPay::Result.new(Hash.from_xml(invoke_remote("/pay/getsignkey", xmlify_payload(params))))
yield r if block_given?
r
end
def self.authenticate_from_weapp(js_code, options = {})
options = WxPay.extra_rest_client_options.merge(options)
payload = {
appid: options.delete(:appid) || WxPay.appid,
secret: options.delete(:appsecret) || WxPay.appsecret,
js_code: js_code,
grant_type: 'authorization_code'
}
url = "https://api.weixin.qq.com/sns/jscode2session"
::JSON.parse(RestClient::Request.execute(
{
method: :get,
headers: {params: payload},
url: url
}.merge(options)
), quirks_mode: true)
end
INVOKE_UNIFIEDORDER_REQUIRED_FIELDS = [:body, :out_trade_no, :total_fee, :spbill_create_ip, :notify_url, :trade_type]
def self.invoke_unifiedorder(params, options = {})
params = {
appid: options.delete(:appid) || WxPay.appid,
mch_id: options.delete(:mch_id) || WxPay.mch_id,
key: options.delete(:key) || WxPay.key,
nonce_str: SecureRandom.uuid.tr('-', '')
}.merge(params)
check_required_options(params, INVOKE_UNIFIEDORDER_REQUIRED_FIELDS)
r = WxPay::Result.new(Hash.from_xml(invoke_remote("/pay/unifiedorder", make_payload(params), options)))
yield r if block_given?
r
end
INVOKE_CLOSEORDER_REQUIRED_FIELDS = [:out_trade_no]
def self.invoke_closeorder(params, options = {})
params = {
appid: options.delete(:appid) || WxPay.appid,
mch_id: options.delete(:mch_id) || WxPay.mch_id,
key: options.delete(:key) || WxPay.key,
nonce_str: SecureRandom.uuid.tr('-', '')
}.merge(params)
check_required_options(params, INVOKE_CLOSEORDER_REQUIRED_FIELDS)
r = WxPay::Result.new(Hash.from_xml(invoke_remote("/pay/closeorder", make_payload(params), options)))
yield r if block_given?
r
end
GENERATE_APP_PAY_REQ_REQUIRED_FIELDS = [:prepayid, :noncestr]
def self.generate_app_pay_req(params, options = {})
params = {
appid: options.delete(:appid) || WxPay.appid,
partnerid: options.delete(:mch_id) || WxPay.mch_id,
key: options.delete(:key) || WxPay.key,
package: 'Sign=WXPay',
timestamp: Time.now.to_i.to_s
}.merge(params)
check_required_options(params, GENERATE_APP_PAY_REQ_REQUIRED_FIELDS)
params[:sign] = WxPay::Sign.generate(params)
params
end
GENERATE_JS_PAY_REQ_REQUIRED_FIELDS = [:prepayid, :noncestr]
def self.generate_js_pay_req(params, options = {})
check_required_options(params, GENERATE_JS_PAY_REQ_REQUIRED_FIELDS)
params = {
appId: options.delete(:appid) || WxPay.appid,
package: "prepay_id=#{params.delete(:prepayid)}",
key: options.delete(:key) || WxPay.key,
nonceStr: params.delete(:noncestr),
timeStamp: Time.now.to_i.to_s,
signType: 'MD5'
}.merge(params)
params[:paySign] = WxPay::Sign.generate(params)
params
end
INVOKE_REFUND_REQUIRED_FIELDS = [:out_refund_no, :total_fee, :refund_fee, :op_user_id]
# out_trade_no 和 transaction_id 是二选一(必填)
def self.invoke_refund(params, options = {})
params = {
appid: options.delete(:appid) || WxPay.appid,
mch_id: options.delete(:mch_id) || WxPay.mch_id,
key: options.delete(:key) || WxPay.key,
nonce_str: SecureRandom.uuid.tr('-', ''),
}.merge(params)
params[:op_user_id] ||= params[:mch_id]
check_required_options(params, INVOKE_REFUND_REQUIRED_FIELDS)
warn("WxPay Warn: missing required option: out_trade_no or transaction_id must have one") if ([:out_trade_no, :transaction_id] & params.keys) == []
options = {
ssl_client_cert: options.delete(:apiclient_cert) || WxPay.apiclient_cert,
ssl_client_key: options.delete(:apiclient_key) || WxPay.apiclient_key,
verify_ssl: OpenSSL::SSL::VERIFY_NONE
}.merge(options)
r = WxPay::Result.new(Hash.from_xml(invoke_remote("/secapi/pay/refund", make_payload(params), options)))
yield r if block_given?
r
end
REFUND_QUERY_REQUIRED_FIELDS = [:out_trade_no]
def self.refund_query(params, options = {})
params = {
appid: options.delete(:appid) || WxPay.appid,
mch_id: options.delete(:mch_id) || WxPay.mch_id,
key: options.delete(:key) || WxPay.key,
nonce_str: SecureRandom.uuid.tr('-', '')
}.merge(params)
check_required_options(params, ORDER_QUERY_REQUIRED_FIELDS)
r = WxPay::Result.new(Hash.from_xml(invoke_remote("/pay/refundquery", make_payload(params), options)))
yield r if block_given?
r
end
INVOKE_TRANSFER_REQUIRED_FIELDS = [:partner_trade_no, :openid, :check_name, :amount, :desc, :spbill_create_ip]
def self.invoke_transfer(params, options = {})
params = {
mch_appid: options.delete(:appid) || WxPay.appid,
mchid: options.delete(:mch_id) || WxPay.mch_id,
nonce_str: SecureRandom.uuid.tr('-', ''),
key: options.delete(:key) || WxPay.key
}.merge(params)
check_required_options(params, INVOKE_TRANSFER_REQUIRED_FIELDS)
options = {
ssl_client_cert: options.delete(:apiclient_cert) || WxPay.apiclient_cert,
ssl_client_key: options.delete(:apiclient_key) || WxPay.apiclient_key,
verify_ssl: OpenSSL::SSL::VERIFY_NONE
}.merge(options)
r = WxPay::Result.new(Hash.from_xml(invoke_remote("/mmpaymkttransfers/promotion/transfers", make_payload(params), options)))
yield r if block_given?
r
end
GETTRANSFERINFO_FIELDS = [:partner_trade_no]
def self.gettransferinfo(params, options = {})
params = {
appid: options.delete(:appid) || WxPay.appid,
mch_id: options.delete(:mch_id) || WxPay.mch_id,
nonce_str: SecureRandom.uuid.tr('-', ''),
key: options.delete(:key) || WxPay.key
}.merge(params)
check_required_options(params, GETTRANSFERINFO_FIELDS)
options = {
ssl_client_cert: options.delete(:apiclient_cert) || WxPay.apiclient_cert,
ssl_client_key: options.delete(:apiclient_key) || WxPay.apiclient_key,
verify_ssl: OpenSSL::SSL::VERIFY_NONE
}.merge(options)
r = WxPay::Result.new(Hash.from_xml(invoke_remote("/mmpaymkttransfers/gettransferinfo", make_payload(params), options)))
yield r if block_given?
r
end
# 获取加密银行卡号和收款方用户名的RSA公钥
def self.risk_get_public_key(options = {})
params = {
mch_id: options.delete(:mch_id) || WxPay.mch_id,
nonce_str: SecureRandom.uuid.tr('-', ''),
key: options.delete(:key) || WxPay.key,
sign_type: 'MD5'
}
options = {
ssl_client_cert: options.delete(:apiclient_cert) || WxPay.apiclient_cert,
ssl_client_key: options.delete(:apiclient_key) || WxPay.apiclient_key,
verify_ssl: OpenSSL::SSL::VERIFY_NONE,
gateway_url: FRAUD_GATEWAY_URL
}.merge(options)
r = WxPay::Result.new(Hash.from_xml(invoke_remote("/risk/getpublickey", make_payload(params), options)))
yield r if block_given?
r
end
PAY_BANK_FIELDS = [:enc_bank_no, :enc_true_name, :bank_code, :amount, :desc]
def self.pay_bank(params, options = {})
params = {
mch_id: options.delete(:mch_id) || WxPay.mch_id,
nonce_str: SecureRandom.uuid.tr('-', ''),
key: options.delete(:key) || WxPay.key,
}.merge(params)
check_required_options(params, PAY_BANK_FIELDS)
options = {
ssl_client_cert: options.delete(:apiclient_cert) || WxPay.apiclient_cert,
ssl_client_key: options.delete(:apiclient_key) || WxPay.apiclient_key,
verify_ssl: OpenSSL::SSL::VERIFY_NONE
}.merge(options)
r = WxPay::Result.new(Hash.from_xml(invoke_remote("/mmpaysptrans/pay_bank", make_payload(params), options)))
yield r if block_given?
r
end
QUERY_BANK_FIELDS = [:partner_trade_no]
def self.query_bank(params, options = {})
params = {
mch_id: options.delete(:mch_id) || WxPay.mch_id,
nonce_str: SecureRandom.uuid.tr('-', ''),
key: options.delete(:key) || WxPay.key,
}.merge(params)
check_required_options(params, QUERY_BANK_FIELDS)
options = {
ssl_client_cert: options.delete(:apiclient_cert) || WxPay.apiclient_cert,
ssl_client_key: options.delete(:apiclient_key) || WxPay.apiclient_key,
verify_ssl: OpenSSL::SSL::VERIFY_NONE
}.merge(options)
r = WxPay::Result.new(Hash.from_xml(invoke_remote("/mmpaysptrans/query_bank", make_payload(params), options)))
yield r if block_given?
r
end
INVOKE_REVERSE_REQUIRED_FIELDS = [:out_trade_no]
def self.invoke_reverse(params, options = {})
params = {
appid: options.delete(:appid) || WxPay.appid,
mch_id: options.delete(:mch_id) || WxPay.mch_id,
key: options.delete(:key) || WxPay.key,
nonce_str: SecureRandom.uuid.tr('-', '')
}.merge(params)
check_required_options(params, INVOKE_REVERSE_REQUIRED_FIELDS)
options = {
ssl_client_cert: options.delete(:apiclient_cert) || WxPay.apiclient_cert,
ssl_client_key: options.delete(:apiclient_key) || WxPay.apiclient_key,
verify_ssl: OpenSSL::SSL::VERIFY_NONE
}.merge(options)
r = WxPay::Result.new(Hash.from_xml(invoke_remote("/secapi/pay/reverse", make_payload(params), options)))
yield r if block_given?
r
end
INVOKE_MICROPAY_REQUIRED_FIELDS = [:body, :out_trade_no, :total_fee, :spbill_create_ip, :auth_code]
def self.invoke_micropay(params, options = {})
params = {
appid: options.delete(:appid) || WxPay.appid,
mch_id: options.delete(:mch_id) || WxPay.mch_id,
key: options.delete(:key) || WxPay.key,
nonce_str: SecureRandom.uuid.tr('-', '')
}.merge(params)
check_required_options(params, INVOKE_MICROPAY_REQUIRED_FIELDS)
options = {
ssl_client_cert: options.delete(:apiclient_cert) || WxPay.apiclient_cert,
ssl_client_key: options.delete(:apiclient_key) || WxPay.apiclient_key,
verify_ssl: OpenSSL::SSL::VERIFY_NONE
}.merge(options)
r = WxPay::Result.new(Hash.from_xml(invoke_remote("/pay/micropay", make_payload(params), options)))
yield r if block_given?
r
end
ORDER_QUERY_REQUIRED_FIELDS = [:out_trade_no]
def self.order_query(params, options = {})
params = {
appid: options.delete(:appid) || WxPay.appid,
mch_id: options.delete(:mch_id) || WxPay.mch_id,
key: options.delete(:key) || WxPay.key,
nonce_str: SecureRandom.uuid.tr('-', '')
}.merge(params)
r = WxPay::Result.new(Hash.from_xml(invoke_remote("/pay/orderquery", make_payload(params), options)))
check_required_options(params, ORDER_QUERY_REQUIRED_FIELDS)
yield r if block_given?
r
end
DOWNLOAD_BILL_REQUIRED_FIELDS = [:bill_date, :bill_type]
def self.download_bill(params, options = {})
params = {
appid: options.delete(:appid) || WxPay.appid,
mch_id: options.delete(:mch_id) || WxPay.mch_id,
key: options.delete(:key) || WxPay.key,
nonce_str: SecureRandom.uuid.tr('-', ''),
}.merge(params)
check_required_options(params, DOWNLOAD_BILL_REQUIRED_FIELDS)
r = invoke_remote("/pay/downloadbill", make_payload(params), options)
yield r if block_given?
r
end
DOWNLOAD_FUND_FLOW_REQUIRED_FIELDS = [:bill_date, :account_type]
def self.download_fund_flow(params, options = {})
params = {
appid: options.delete(:appid) || WxPay.appid,
mch_id: options.delete(:mch_id) || WxPay.mch_id,
nonce_str: SecureRandom.uuid.tr('-', ''),
key: options.delete(:key) || WxPay.key
}.merge(params)
check_required_options(params, DOWNLOAD_FUND_FLOW_REQUIRED_FIELDS)
options = {
ssl_client_cert: options.delete(:apiclient_cert) || WxPay.apiclient_cert,
ssl_client_key: options.delete(:apiclient_key) || WxPay.apiclient_key,
verify_ssl: OpenSSL::SSL::VERIFY_NONE
}.merge(options)
r = invoke_remote("/pay/downloadfundflow", make_payload(params, WxPay::Sign::SIGN_TYPE_HMAC_SHA256), options)
yield r if block_given?
r
end
def self.sendgroupredpack(params, options={})
params = {
wxappid: options.delete(:appid) || WxPay.appid,
mch_id: options.delete(:mch_id) || WxPay.mch_id,
key: options.delete(:key) || WxPay.key,
nonce_str: SecureRandom.uuid.tr('-', '')
}.merge(params)
#check_required_options(params, INVOKE_MICROPAY_REQUIRED_FIELDS)
options = {
ssl_client_cert: options.delete(:apiclient_cert) || WxPay.apiclient_cert,
ssl_client_key: options.delete(:apiclient_key) || WxPay.apiclient_key,
verify_ssl: OpenSSL::SSL::VERIFY_NONE
}.merge(options)
r = WxPay::Result.new(Hash.from_xml(invoke_remote("/mmpaymkttransfers/sendgroupredpack", make_payload(params), options)))
yield r if block_given?
r
end
def self.sendredpack(params, options={})
params = {
wxappid: options.delete(:appid) || WxPay.appid,
mch_id: options.delete(:mch_id) || WxPay.mch_id,
key: options.delete(:key) || WxPay.key,
nonce_str: SecureRandom.uuid.tr('-', '')
}.merge(params)
#check_required_options(params, INVOKE_MICROPAY_REQUIRED_FIELDS)
options = {
ssl_client_cert: options.delete(:apiclient_cert) || WxPay.apiclient_cert,
ssl_client_key: options.delete(:apiclient_key) || WxPay.apiclient_key,
verify_ssl: OpenSSL::SSL::VERIFY_NONE
}.merge(options)
r = WxPay::Result.new(Hash.from_xml(invoke_remote("/mmpaymkttransfers/sendredpack", make_payload(params), options)))
yield r if block_given?
r
end
# 用于商户对已发放的红包进行查询红包的具体信息,可支持普通红包和裂变包。
GETHBINFO_FIELDS = [:mch_billno, :bill_type]
def self.gethbinfo(params, options = {})
params = {
appid: options.delete(:appid) || WxPay.appid,
mch_id: options.delete(:mch_id) || WxPay.mch_id,
nonce_str: SecureRandom.uuid.tr('-', ''),
key: options.delete(:key) || WxPay.key
}.merge(params)
check_required_options(params, GETHBINFO_FIELDS)
options = {
ssl_client_cert: options.delete(:apiclient_cert) || WxPay.apiclient_cert,
ssl_client_key: options.delete(:apiclient_key) || WxPay.apiclient_key,
verify_ssl: OpenSSL::SSL::VERIFY_NONE
}.merge(options)
r = WxPay::Result.new(Hash.from_xml(invoke_remote("/mmpaymkttransfers/gethbinfo", make_payload(params), options)))
yield r if block_given?
r
end
PROFITSHARINGADDRECEIVER = [:nonce_str, :receiver]
# 添加分账接收方
def self.profitsharingaddreceiver(params, options = {})
params = {
appid: options.delete(:appid) || WxPay.appid,
mch_id: options.delete(:mch_id) || WxPay.mch_id,
nonce_str: SecureRandom.uuid.tr('-', ''),
key: options.delete(:key) || WxPay.key
}.merge(params)
check_required_options(params, PROFITSHARINGADDRECEIVER)
r = WxPay::Result.new(Hash.from_xml(invoke_remote("/pay/profitsharingaddreceiver", make_payload(params, WxPay::Sign::SIGN_TYPE_HMAC_SHA256), options)))
yield r if block_given?
r
end
PROFITSHARINGREMOVERECEIVER = [:nonce_str, :receiver]
# 删除分账接收方
def self.profitsharingremovereceiver(params, options = {})
params = {
appid: options.delete(:appid) || WxPay.appid,
mch_id: options.delete(:mch_id) || WxPay.mch_id,
nonce_str: SecureRandom.uuid.tr('-', ''),
key: options.delete(:key) || WxPay.key
}.merge(params)
check_required_options(params, PROFITSHARINGADDRECEIVER)
r = WxPay::Result.new(Hash.from_xml(invoke_remote("/pay/profitsharingremovereceiver", make_payload(params, WxPay::Sign::SIGN_TYPE_HMAC_SHA256), options)))
yield r if block_given?
r
end
# 单次分账
PROFITSHARING = [:nonce_str, :receivers, :transaction_id, :out_order_no]
def self.profitsharing(params, options = {})
params = {
appid: options.delete(:appid) || WxPay.appid,
mch_id: options.delete(:mch_id) || WxPay.mch_id,
nonce_str: SecureRandom.uuid.tr('-', ''),
key: options.delete(:key) || WxPay.key
}.merge(params)
check_required_options(params, PROFITSHARING)
options = {
ssl_client_cert: options.delete(:apiclient_cert) || WxPay.apiclient_cert,
ssl_client_key: options.delete(:apiclient_key) || WxPay.apiclient_key,
verify_ssl: OpenSSL::SSL::VERIFY_NONE
}.merge(options)
r = WxPay::Result.new(Hash.from_xml(invoke_remote("/secapi/pay/profitsharing", make_payload(params, WxPay::Sign::SIGN_TYPE_HMAC_SHA256), options)))
yield r if block_given?
r
end
class << self
private
def get_gateway_url
return SANDBOX_GATEWAY_URL if WxPay.sandbox_mode?
GATEWAY_URL
end
def check_required_options(options, names)
return unless WxPay.debug_mode?
names.each do |name|
warn("WxPay Warn: missing required option: #{name}") unless options.has_key?(name)
end
end
def xmlify_payload(params, sign_type = WxPay::Sign::SIGN_TYPE_MD5)
sign = WxPay::Sign.generate(params, sign_type)
"<xml>#{params.except(:key).sort.map { |k, v| "<#{k}>#{v}</#{k}>" }.join}<sign>#{sign}</sign></xml>"
end
def make_payload(params, sign_type = WxPay::Sign::SIGN_TYPE_MD5)
# TODO: Move this out
if WxPay.sandbox_mode? && !WxPay.manual_get_sandbox_key?
r = get_sandbox_signkey
if r['return_code'] == WxPay::Result::SUCCESS_FLAG
params = params.merge(
mch_id: r['mch_id'] || WxPay.mch_id,
key: r['sandbox_signkey']
)
else
warn("WxPay Warn: fetch sandbox sign key failed #{r['return_msg']}")
end
end
xmlify_payload(params, sign_type)
end
def invoke_remote(url, payload, options = {})
options = WxPay.extra_rest_client_options.merge(options)
gateway_url = options.delete(:gateway_url) || get_gateway_url
url = "#{gateway_url}#{url}"
RestClient::Request.execute(
{
method: :post,
url: url,
payload: payload,
headers: { content_type: 'application/xml' }
}.merge(options)
)
end
end
end
end
|
# frozen_string_literal: true
module XmlHasher
class Node
attr_accessor :name, :attributes, :children, :text
def initialize(name)
@name = name
@attributes = {}
@children = []
end
def to_hash
retult = { name => content }
retult[name] = nil if retult[name].empty?
retult
end
private
def content
return text if text && !text.empty?
prepare_attributes.merge(prepare_children)
end
def prepare_attributes
attributes.each_with_object({}) do |(key, value), data|
next if value.nil? || value.to_s.empty?
data[key] = value
end
end
def prepare_children
return children.first.to_hash if children.size == 1
children.group_by(&:name).each_with_object({}) do |(key, nodes), data|
next data.merge!(nodes.first.to_hash) if nodes.length == 1
data[key] = nodes.map do |node|
node.to_hash[node.name]
end
end
end
end
end
Small improvement
# frozen_string_literal: true
module XmlHasher
class Node
attr_accessor :name, :attributes, :children, :text
def initialize(name)
@name = name
@attributes = {}
@children = []
end
def to_hash
node_content = content
{ name => node_content.empty? ? nil : node_content }
end
private
def content
return text if text && !text.empty?
prepare_attributes.merge(prepare_children)
end
def prepare_attributes
attributes.each_with_object({}) do |(key, value), data|
next if value.nil? || value.to_s.empty?
data[key] = value
end
end
def prepare_children
return children.first.to_hash if children.size == 1
children.group_by(&:name).each_with_object({}) do |(key, nodes), data|
next data.merge!(nodes.first.to_hash) if nodes.length == 1
data[key] = nodes.map do |node|
node.to_hash[node.name]
end
end
end
end
end
|
module Yasuri
VERSION = "2.0.11"
end
Update version for 2.0.12
module Yasuri
VERSION = "2.0.12"
end
|
require "zebra/zpl/printable"
module Zebra
module Zpl
class Text
include Printable
attr_reader :font_size
def font_size=(f)
FontSize.validate_font_size f
@font_size = f
end
def print_mode=(mode)
PrintMode.validate_mode mode
@print_mode = mode
end
def print_mode
@print_mode || PrintMode::NORMAL
end
def h_multiplier
@h_multiplier || HorizontalMultiplier::VALUE_1
end
def v_multiplier
@v_multiplier || VerticalMultiplier::VALUE_1
end
def print_mode
@print_mode || PrintMode::NORMAL
end
def h_multiplier=(multiplier)
HorizontalMultiplier.validate_multiplier multiplier
@h_multiplier = multiplier
end
def v_multiplier=(multiplier)
VerticalMultiplier.validate_multiplier multiplier
@v_multiplier = multiplier
end
def to_zpl
check_attributes
# ["A#{x}", y, rotation, font_size, h_multiplier, v_multiplier, print_mode, "\"#{data}\""].join(",")
# "^FO25,25^FB600,100,0,C,0^FDFoo^FS"
"^CF0,#{font_size},#{font_size}^FO#{x},#{y}^FB600,4,0,L,0^FD#{data}^FS"
end
private
def check_attributes
super
raise MissingAttributeError.new("the font_size to be used is not given") unless @font_size
end
end
end
end
Add justification attribute to text label builder
require "zebra/zpl/printable"
module Zebra
module Zpl
class Text
include Printable
attr_reader :font_size
def font_size=(f)
FontSize.validate_font_size f
@font_size = f
end
def print_mode=(mode)
PrintMode.validate_mode mode
@print_mode = mode
end
def print_mode
@print_mode || PrintMode::NORMAL
end
def h_multiplier
@h_multiplier || HorizontalMultiplier::VALUE_1
end
def v_multiplier
@v_multiplier || VerticalMultiplier::VALUE_1
end
def print_mode
@print_mode || PrintMode::NORMAL
end
def h_multiplier=(multiplier)
HorizontalMultiplier.validate_multiplier multiplier
@h_multiplier = multiplier
end
def v_multiplier=(multiplier)
VerticalMultiplier.validate_multiplier multiplier
@v_multiplier = multiplier
end
def to_zpl
check_attributes
# ["A#{x}", y, rotation, font_size, h_multiplier, v_multiplier, print_mode, "\"#{data}\""].join(",")
# "^FO25,25^FB600,100,0,C,0^FDFoo^FS"
"^CF0,#{font_size},#{font_size}^FO#{x},#{y}^FB800,4,0,#{justification},0^FD#{data}^FS"
end
private
def check_attributes
super
raise MissingAttributeError.new("the font_size to be used is not given") unless @font_size
end
end
end
end
|
module ZK
module Group
@@mutex = Mutex.new unless defined?(@@mutex)
DEFAULT_ROOT = '/_zk/groups'
# @private
DEFAULT_PREFIX = 'm'.freeze
class << self
# @private
def mutex
@@mutex
end
# The path under which all groups will be created.
# defaults to DEFAULT_ROOT if not set
def zk_root
@@mutex.synchronize { @@zk_root ||= DEFAULT_ROOT }
end
# Sets the default global zk root path.
def zk_root=(path)
@@mutex.synchronize { @@zk_root = path.dup.freeze }
end
end
def self.new(*args)
ZK::Group::Group.new(*args)
end
# The basis for forming different kinds of Groups with customizable
# memberhip policies.
class Group
extend Forwardable
include Logging
include Common
def_delegators :@mutex, :synchronize
protected :synchronize
# the ZK Client instance
attr_reader :zk
# the name for this group
attr_reader :name
# the absolute root path of this group, generally, this can be left at the default
attr_reader :root
# the combination of `"#{root}/#{name}"`
attr_reader :path
# @return [ZK::Stat] the stat from the last time we either set or retrieved
# data from the server.
# @private
attr_accessor :last_stat
# Prefix used for creating sequential nodes under {#path} that represent membership.
# The default is 'm', so for the path `/_zk/groups/foo` a member path would look like
# `/zkgroups/foo/m000000078`
#
# @return [String] the prefix
attr_accessor :prefix
def initialize(zk, name, opts={})
@orig_zk = zk
@zk = GroupExceptionTranslator.new(zk, self)
raise ArgumentError, "name must not be nil" unless name
@name = name.to_s
@root = opts[:root] || ZK::Group.zk_root
@prefix = opts[:prefix] || DEFAULT_PREFIX
@path = File.join(@root, @name)
@mutex = Monitor.new
@created = false
@known_members = []
@membership_subscriptions = []
# ThreadedCallback will queue calls to the block and deliver them one at a time
# on their own thread. This guarantees order and simplifies locking.
@broadcast_callback = ThreadedCallback.new { |event| broadcast_membership_change!(event) }
@membership_ev_sub = zk.register(path, :only => :child) do |event|
@broadcast_callback.call(event)
end
@on_connected_sub = zk.on_connected do |event|
@broadcast_callback.call(event)
end
validate!
end
# stop receiving event notifications, tracking membership changes, etc.
# XXX: what about memberships?
def close
synchronize do
return unless @created
@created = false
@broadcast_callback.shutdown
@on_connected_sub.unsubscribe
@membership_ev_sub.unsubscribe
@known_members.clear
@membership_subscriptions.each(&:unsubscribe)
end
end
# this is "are we set up" not "did *we* create the group"
def created?
synchronize { !!@created }
end
# does the group exist already?
def exists?
zk.exists?(path)
end
# creates this group, does not raise an exception if the group already
# exists.
#
# @return [String,nil] String containing the path of this group if
# created, nil if group already exists
#
# @overload create(}
# creates this group with empty data
#
# @overload create(data)
# creates this group with the given data. if the group already exists
# the data will not be written.
#
# @param [String] data the data to be set for this group
#
def create(*args)
synchronize do
begin
create!(*args)
rescue Exceptions::GroupAlreadyExistsError
# ok, this is a little odd, if you call create! and it fails, semantically
# in this method we're supposed to catch the exception and return. The problem
# is that the @known_members and @last_stat won't have been set up. we want
# both of these methods available, so we need to set that state up here, but
# only if create! fails in this particular way
@created = true
broadcast_membership_change!
nil
end
end
end
# same as {#create} but raises an exception if the group already exists
#
# @raise [Exceptions::GroupAlreadyExistsError] if the group already exists
def create!(*args)
ensure_root_exists!
data = args.empty? ? '' : args.first
synchronize do
zk.create(path, data).tap do
logger.debug { "create!(#{path.inspect}, #{data.inspect}) succeeded, setting initial state" }
@created = true
broadcast_membership_change!
end
end
end
# Creates a Member object that represents 'belonging' to this group.
#
# The basic behavior is creating a unique path under the {#path} (using
# a sequential, ephemeral node).
#
# You may receive notification that the member was created before this method
# returns your Member instance. "heads up"
#
# @overload join(opts={})
# join the group and set the node's data to blank
#
# @option opts [Class] :member_class (ZK::Group::Member) an alternate
# class to manage membership in the group. if this is set to nil,
# no Member will be created and just the created path will be
# returned
#
# @overload join(data, opts={})
# join the group and set the node's initial data
#
# @option opts [Class] :member_class (ZK::Group::Member) an alternate
# class to manage membership in the group. If this is set to nil,
# no Member will be created and just the created path will be
# returned
#
# @param data [String] (nil) the data this node should have to start
# with, default is no data
#
# @return [Member] used to control a single member of the group
#
def join(*args)
opts = args.extract_options!
data = args.first || ''
member_class = opts.fetch(:member_class, Member)
member_path = zk.create("#{path}/#{prefix}", data, :sequence => true, :ephemeral => true)
member_class ? member_class.new(@orig_zk, self, member_path) : member_path
end
# returns the current list of member names, sorted.
#
# @option opts [true,false] :absolute (false) return member information
# as absolute znode paths.
#
# @option opts [true,false] :watch (true) causes a watch to be set on
# this group's znode for child changes. This will cause the on_membership_change
# callback to be triggered, when delivered.
#
def member_names(opts={})
watch = opts.fetch(:watch, true)
absolute = opts.fetch(:absolute, false)
zk.children(path, :watch => watch).sort.tap do |rval|
rval.map! { |n| File.join(path, n) } if absolute
end
end
# Register a block to be called back when the group membership changes.
#
# Notifications will be delivered concurrently (i.e. using the client's
# threadpool), but serially. In other words, when notification is
# delivered to us that the group membership has changed, we queue up
# notifications for all callbacks before handling the next event. This
# way each callback will see the same sequence of updates every other
# callback sees in order. They just may receive the notifications at
# different times.
#
# @note Due to the way ZooKeeper works, it's possible that you may not see every
# change to the membership of the group. That is *very* important to know.
# ZooKeeper _may batch updates_, so you can see a jump of members, especially
# if they're added very quickly. DO NOT assume you will receive a callback for _each
# individual membership added_.
#
# @options opts [true,false] :absolute (false) block will be called with members
# as absolute paths
#
# @yield [last_members,current_members] called when membership of the
# current group changes.
#
# @yieldparam [Array] last_members the last known membership list of the group
#
# @yieldparam [Array] current_members the list of members just retrieved from zookeeper
#
def on_membership_change(opts={}, &blk)
MembershipSubscription.new(self, opts, blk).tap do |ms|
# the watch is registered in create!
synchronize { @membership_subscriptions << ms }
end
end
# called by the MembershipSubscription object to deregister itself
# @private
def unregister(subscription)
synchronize do
@membership_subscriptions.delete(subscription)
end
nil
end
# @private
def broadcast_membership_change!(_ignored=nil)
logger.debug { "#{__method__} received event #{_ignored.inspect}" }
# we might get an on_connected event before creation
unless created?
logger.debug { "uh, created? #{created?} so returning" }
return
end
last_members, @known_members = @known_members, member_names(:watch => true)
logger.debug { "last_members: #{last_members.inspect}" }
logger.debug { "@known_members: #{@known_members.inspect}" }
# we do this check so that on a connected event, we can run this callback
# without producing false positives
#
if last_members == @known_members
logger.debug { "membership data did not actually change, not notifying" }
else
@membership_subscriptions.each do |sub|
lm, km = last_members.dup, @known_members.dup
sub.notify(lm, km)
end
end
end
private
# Creates a Member instance for this Group. This its own method to allow
# subclasses to override. By default, uses Member
def create_member(znode_path, member_klass)
logger.debug { "created member #{znode_path.inspect} returning object" }
member_klass.new(@orig_zk, self, znode_path)
end
def ensure_root_exists!
zk.mkdir_p(root)
end
def validate!
raise ArgumentError, "root must start with '/'" unless @root.start_with?('/')
end
end # Group
end # Group
end # ZK
synchronize access to broadcast_membership_change
just to be safe, should probably hold the lock when messing with this
module ZK
module Group
@@mutex = Mutex.new unless defined?(@@mutex)
DEFAULT_ROOT = '/_zk/groups'
# @private
DEFAULT_PREFIX = 'm'.freeze
class << self
# @private
def mutex
@@mutex
end
# The path under which all groups will be created.
# defaults to DEFAULT_ROOT if not set
def zk_root
@@mutex.synchronize { @@zk_root ||= DEFAULT_ROOT }
end
# Sets the default global zk root path.
def zk_root=(path)
@@mutex.synchronize { @@zk_root = path.dup.freeze }
end
end
def self.new(*args)
ZK::Group::Group.new(*args)
end
# The basis for forming different kinds of Groups with customizable
# memberhip policies.
class Group
extend Forwardable
include Logging
include Common
def_delegators :@mutex, :synchronize
protected :synchronize
# the ZK Client instance
attr_reader :zk
# the name for this group
attr_reader :name
# the absolute root path of this group, generally, this can be left at the default
attr_reader :root
# the combination of `"#{root}/#{name}"`
attr_reader :path
# @return [ZK::Stat] the stat from the last time we either set or retrieved
# data from the server.
# @private
attr_accessor :last_stat
# Prefix used for creating sequential nodes under {#path} that represent membership.
# The default is 'm', so for the path `/_zk/groups/foo` a member path would look like
# `/zkgroups/foo/m000000078`
#
# @return [String] the prefix
attr_accessor :prefix
def initialize(zk, name, opts={})
@orig_zk = zk
@zk = GroupExceptionTranslator.new(zk, self)
raise ArgumentError, "name must not be nil" unless name
@name = name.to_s
@root = opts[:root] || ZK::Group.zk_root
@prefix = opts[:prefix] || DEFAULT_PREFIX
@path = File.join(@root, @name)
@mutex = Monitor.new
@created = false
@known_members = []
@membership_subscriptions = []
# ThreadedCallback will queue calls to the block and deliver them one at a time
# on their own thread. This guarantees order and simplifies locking.
@broadcast_callback = ThreadedCallback.new { |event| broadcast_membership_change!(event) }
@membership_ev_sub = zk.register(path, :only => :child) do |event|
@broadcast_callback.call(event)
end
@on_connected_sub = zk.on_connected do |event|
@broadcast_callback.call(event)
end
validate!
end
# stop receiving event notifications, tracking membership changes, etc.
# XXX: what about memberships?
def close
synchronize do
return unless @created
@created = false
@broadcast_callback.shutdown
@on_connected_sub.unsubscribe
@membership_ev_sub.unsubscribe
@known_members.clear
@membership_subscriptions.each(&:unsubscribe)
end
end
# this is "are we set up" not "did *we* create the group"
def created?
synchronize { !!@created }
end
# does the group exist already?
def exists?
zk.exists?(path)
end
# creates this group, does not raise an exception if the group already
# exists.
#
# @return [String,nil] String containing the path of this group if
# created, nil if group already exists
#
# @overload create(}
# creates this group with empty data
#
# @overload create(data)
# creates this group with the given data. if the group already exists
# the data will not be written.
#
# @param [String] data the data to be set for this group
#
def create(*args)
synchronize do
begin
create!(*args)
rescue Exceptions::GroupAlreadyExistsError
# ok, this is a little odd, if you call create! and it fails, semantically
# in this method we're supposed to catch the exception and return. The problem
# is that the @known_members and @last_stat won't have been set up. we want
# both of these methods available, so we need to set that state up here, but
# only if create! fails in this particular way
@created = true
broadcast_membership_change!
nil
end
end
end
# same as {#create} but raises an exception if the group already exists
#
# @raise [Exceptions::GroupAlreadyExistsError] if the group already exists
def create!(*args)
ensure_root_exists!
data = args.empty? ? '' : args.first
synchronize do
zk.create(path, data).tap do
logger.debug { "create!(#{path.inspect}, #{data.inspect}) succeeded, setting initial state" }
@created = true
broadcast_membership_change!
end
end
end
# Creates a Member object that represents 'belonging' to this group.
#
# The basic behavior is creating a unique path under the {#path} (using
# a sequential, ephemeral node).
#
# You may receive notification that the member was created before this method
# returns your Member instance. "heads up"
#
# @overload join(opts={})
# join the group and set the node's data to blank
#
# @option opts [Class] :member_class (ZK::Group::Member) an alternate
# class to manage membership in the group. if this is set to nil,
# no Member will be created and just the created path will be
# returned
#
# @overload join(data, opts={})
# join the group and set the node's initial data
#
# @option opts [Class] :member_class (ZK::Group::Member) an alternate
# class to manage membership in the group. If this is set to nil,
# no Member will be created and just the created path will be
# returned
#
# @param data [String] (nil) the data this node should have to start
# with, default is no data
#
# @return [Member] used to control a single member of the group
#
def join(*args)
opts = args.extract_options!
data = args.first || ''
member_class = opts.fetch(:member_class, Member)
member_path = zk.create("#{path}/#{prefix}", data, :sequence => true, :ephemeral => true)
member_class ? member_class.new(@orig_zk, self, member_path) : member_path
end
# returns the current list of member names, sorted.
#
# @option opts [true,false] :absolute (false) return member information
# as absolute znode paths.
#
# @option opts [true,false] :watch (true) causes a watch to be set on
# this group's znode for child changes. This will cause the on_membership_change
# callback to be triggered, when delivered.
#
def member_names(opts={})
watch = opts.fetch(:watch, true)
absolute = opts.fetch(:absolute, false)
zk.children(path, :watch => watch).sort.tap do |rval|
rval.map! { |n| File.join(path, n) } if absolute
end
end
# Register a block to be called back when the group membership changes.
#
# Notifications will be delivered concurrently (i.e. using the client's
# threadpool), but serially. In other words, when notification is
# delivered to us that the group membership has changed, we queue up
# notifications for all callbacks before handling the next event. This
# way each callback will see the same sequence of updates every other
# callback sees in order. They just may receive the notifications at
# different times.
#
# @note Due to the way ZooKeeper works, it's possible that you may not see every
# change to the membership of the group. That is *very* important to know.
# ZooKeeper _may batch updates_, so you can see a jump of members, especially
# if they're added very quickly. DO NOT assume you will receive a callback for _each
# individual membership added_.
#
# @options opts [true,false] :absolute (false) block will be called with members
# as absolute paths
#
# @yield [last_members,current_members] called when membership of the
# current group changes.
#
# @yieldparam [Array] last_members the last known membership list of the group
#
# @yieldparam [Array] current_members the list of members just retrieved from zookeeper
#
def on_membership_change(opts={}, &blk)
MembershipSubscription.new(self, opts, blk).tap do |ms|
# the watch is registered in create!
synchronize { @membership_subscriptions << ms }
end
end
# called by the MembershipSubscription object to deregister itself
# @private
def unregister(subscription)
synchronize do
@membership_subscriptions.delete(subscription)
end
nil
end
# @private
def broadcast_membership_change!(_ignored=nil)
synchronize do
logger.debug { "#{__method__} received event #{_ignored.inspect}" }
# we might get an on_connected event before creation
unless created?
logger.debug { "uh, created? #{created?} so returning" }
return
end
last_members, @known_members = @known_members, member_names(:watch => true)
logger.debug { "last_members: #{last_members.inspect}" }
logger.debug { "@known_members: #{@known_members.inspect}" }
# we do this check so that on a connected event, we can run this callback
# without producing false positives
#
if last_members == @known_members
logger.debug { "membership data did not actually change, not notifying" }
else
@membership_subscriptions.each do |sub|
lm, km = last_members.dup, @known_members.dup
sub.notify(lm, km)
end
end
end
end
private
# Creates a Member instance for this Group. This its own method to allow
# subclasses to override. By default, uses Member
def create_member(znode_path, member_klass)
logger.debug { "created member #{znode_path.inspect} returning object" }
member_klass.new(@orig_zk, self, znode_path)
end
def ensure_root_exists!
zk.mkdir_p(root)
end
def validate!
raise ArgumentError, "root must start with '/'" unless @root.start_with?('/')
end
end # Group
end # Group
end # ZK
|
[Update] KCUserKit (0.1.14)
Pod::Spec.new do |s|
s.name = "KCUserKit"
s.version = "0.1.14"
s.summary = "A short description of KCUserKit."
s.license = 'MIT'
s.author = { "Emil Wojtaszek" => "emil@appunite.com" }
s.source = { :git => "git@git.appunite.com:newmedia/kingschat-user-ios.git", :tag => s.version.to_s }
s.platform = :ios, '8.0'
s.requires_arc = true
s.source_files = 'KCUserKit/*.{h,m}'
s.homepage = 'https://www.appunite.com'
# kingschat
s.dependency 'KCEnvironmentKit'
s.dependency 'KCSStorageClient/Core'
# networking
s.dependency 'AFNetworking', '~> 2.6'
s.dependency 'AFgzipRequestSerializer'
# phone validation
s.dependency 'APAddressBook', '~> 0.2'
s.dependency 'libPhoneNumber-iOS', '~> 0.8'
# storage
s.dependency 'Objective-LevelDB-appunite', '~> 2.1'
s.dependency 'Mantle', '~> 2.0'
# others
s.dependency 'Valet', '~> 2.0'
end
|
[Update] KCUserKit (0.1.82)
Pod::Spec.new do |s|
s.name = "KCUserKit"
s.version = "0.1.82"
s.summary = "A short description of KCUserKit."
s.license = 'MIT'
s.author = { "Emil Wojtaszek" => "emil@appunite.com" }
s.source = { :git => "git@git.appunite.com:newmedia/kingschat-user-ios.git", :tag => s.version.to_s }
s.platform = :ios, '8.0'
s.requires_arc = true
s.source_files = 'KCUserKit/*.{h,m}'
s.homepage = 'https://www.appunite.com'
# kingschat
s.dependency 'KCEnvironmentKit'
s.dependency 'KCSStorageClient/Core'
s.dependency 'KCCountriesKit'
# networking
s.dependency 'AFNetworking', '< 3.0'
s.dependency 'AFgzipRequestSerializer'
# phone validation
s.dependency 'APAddressBook', '~> 0.2.2'
s.dependency 'libPhoneNumber-iOS', '~> 0.8'
# storage
s.dependency 'ObjectiveLevelDBappunite', '~> 2.1'
s.dependency 'Mantle', '~> 2.0'
s.dependency 'FastCoding', '~> 3.2'
# others
s.dependency 'Valet', '~> 2.0'
end
|
require 'formula'
class ArgpStandalone < Formula
homepage 'http://www.lysator.liu.se/~nisse/misc/'
url 'http://www.lysator.liu.se/~nisse/misc/argp-standalone-1.3.tar.gz'
sha1 '815c560680ebdc11694b88de2f8ec15133e0bfa0'
def patches
# This patch fixes compilation with Clang.
{:p0 =>
"https://trac.macports.org/export/86556/trunk/dports/devel/argp-standalone/files/patch-argp-fmtstream.h"
}
end
def install
system "./configure", "--prefix=#{prefix}"
system "make install"
lib.install 'libargp.a'
include.install 'argp.h'
end
end
argp-standalone: add 1.3 bottle.
require 'formula'
class ArgpStandalone < Formula
homepage 'http://www.lysator.liu.se/~nisse/misc/'
url 'http://www.lysator.liu.se/~nisse/misc/argp-standalone-1.3.tar.gz'
sha1 '815c560680ebdc11694b88de2f8ec15133e0bfa0'
bottle do
cellar :any
sha1 "3e781159b73d2fbbb22ea626e568904d6f72bd2d" => :mavericks
sha1 "51228d446622730ba12dfa33e83d41ad79678fef" => :mountain_lion
sha1 "58936047ba691811df5aa11dbbb4ed2304ef3b8b" => :lion
end
def patches
# This patch fixes compilation with Clang.
{:p0 =>
"https://trac.macports.org/export/86556/trunk/dports/devel/argp-standalone/files/patch-argp-fmtstream.h"
}
end
def install
system "./configure", "--prefix=#{prefix}"
system "make install"
lib.install 'libargp.a'
include.install 'argp.h'
end
end
|
require "formula"
class BashGitPrompt < Formula
homepage "https://github.com/magicmonty/bash-git-prompt"
url "https://github.com/magicmonty/bash-git-prompt/archive/2.2.tar.gz"
sha1 "392d430b87639e7f85fde02f2b4a37cb43be9aaf"
head "https://github.com/magicmonty/bash-git-prompt.git"
def install
share.install "gitprompt.sh", "gitprompt.fish", "git-prompt-help.sh",
"gitstatus.sh", "prompt-colors.sh"
(share/"themes").install Dir["themes/*.bgptheme"]
doc.install "README.md"
end
def caveats; <<-EOS.undent
You should add the following to your .bashrc (or equivalent):
if [ -f "$(brew --prefix)/opt/bash-git-prompt/share/gitprompt.sh" ]; then
source "$(brew --prefix)/opt/bash-git-prompt/share/gitprompt.sh"
fi
EOS
end
end
bash-git-prompt 2.2.1
Closes #33005.
Signed-off-by: Jack Nagel <43386ce32af96f5c56f2a88e458cb94cebee3751@gmail.com>
require "formula"
class BashGitPrompt < Formula
homepage "https://github.com/magicmonty/bash-git-prompt"
url "https://github.com/magicmonty/bash-git-prompt/archive/2.2.1.tar.gz"
sha1 "6eec4b05744d4071f831202eee25edc0821f4e09"
head "https://github.com/magicmonty/bash-git-prompt.git"
def install
share.install "gitprompt.sh", "gitprompt.fish", "git-prompt-help.sh",
"gitstatus.sh", "prompt-colors.sh"
(share/"themes").install Dir["themes/*.bgptheme"]
doc.install "README.md"
end
def caveats; <<-EOS.undent
You should add the following to your .bashrc (or equivalent):
if [ -f "$(brew --prefix)/opt/bash-git-prompt/share/gitprompt.sh" ]; then
source "$(brew --prefix)/opt/bash-git-prompt/share/gitprompt.sh"
fi
EOS
end
end
|
class BottleVersion < Version
def self._parse spec
spec = Pathname.new(spec) unless spec.is_a? Pathname
stem = spec.stem
# e.g. perforce-2013.1.610569-x86_64.mountain_lion.bottle.tar.gz
m = /-([\d\.]+-x86(_64)?)/.match(stem)
return m.captures.first unless m.nil?
# e.g. R14B04 from erlang-r14-R14B04.yosemite.bottle.tar.gz
m = /erlang-r\d+-(R\d+B\d+(-\d)?)/.match(stem)
return m.captures.first unless m.nil?
# e.g. x264-r2197.4.mavericks.bottle.tar.gz
# e.g. lz4-r114.mavericks.bottle.tar.gz
m = /-(r\d+\.?\d*)/.match(stem)
return m.captures.first unless m.nil?
# e.g. 00-5.0.5 from zpython-00-5.0.5.mavericks.bottle.tar.gz
m = /(00-\d+\.\d+(\.\d+)+)/.match(stem)
return m.captures.first unless m.nil?
# e.g. 13-2.9.19 from libpano-13-2.9.19_1.yosemite.bottle.tar.gz
m = /\D+-(\d+-[\d\.]+)/.match(stem)
return m.captures.first unless m.nil?
# e.g. 1.6.39 from pazpar2-1.6.39.mavericks.bottle.tar.gz
m = /-(\d+\.\d+(\.\d+)+)/.match(stem)
return m.captures.first unless m.nil?
# e.g. ssh-copy-id-6.2p2.mountain_lion.bottle.tar.gz
# e.g. icu4c-52.1.mountain_lion.bottle.tar.gz
m = /-(\d+\.(\d)+(p(\d)+)?)/.match(stem)
return m.captures.first unless m.nil?
# e.g. 0_5_0 from disco-0_5_0.mavericks.bottle.tar.gz
m = /-(\d+_\d+(_\d+)+)/.match(stem)
return m.captures.first unless m.nil?
# e.g. 20120731 from fontforge-20120731.mavericks.bottle.tar.gz
m = /-(\d{8})/.match(stem)
return m.captures.first unless m.nil?
# e.g. 2007f from imap-uw-2007f.yosemite.bottle.tar.gz
m = /-(\d+[a-z])/.match(stem)
return m.captures.first unless m.nil?
# e.g. 22 from ngircd-22.mavericks.bottle.tar.gz
m = /-(\d{2})/.match(stem)
return m.captures.first unless m.nil?
super
end
end
bottle_version: support psutils style
Closes #37959.
Signed-off-by: Mike McQuaid <a17fed27eaa842282862ff7c1b9c8395a26ac320@mikemcquaid.com>
class BottleVersion < Version
def self._parse spec
spec = Pathname.new(spec) unless spec.is_a? Pathname
stem = spec.stem
# e.g. perforce-2013.1.610569-x86_64.mountain_lion.bottle.tar.gz
m = /-([\d\.]+-x86(_64)?)/.match(stem)
return m.captures.first unless m.nil?
# e.g. R14B04 from erlang-r14-R14B04.yosemite.bottle.tar.gz
m = /erlang-r\d+-(R\d+B\d+(-\d)?)/.match(stem)
return m.captures.first unless m.nil?
# e.g. x264-r2197.4.mavericks.bottle.tar.gz
# e.g. lz4-r114.mavericks.bottle.tar.gz
m = /-(r\d+\.?\d*)/.match(stem)
return m.captures.first unless m.nil?
# e.g. 00-5.0.5 from zpython-00-5.0.5.mavericks.bottle.tar.gz
m = /(00-\d+\.\d+(\.\d+)+)/.match(stem)
return m.captures.first unless m.nil?
# e.g. 13-2.9.19 from libpano-13-2.9.19_1.yosemite.bottle.tar.gz
m = /\D+-(\d+-[\d\.]+)/.match(stem)
return m.captures.first unless m.nil?
# e.g. 1.6.39 from pazpar2-1.6.39.mavericks.bottle.tar.gz
m = /-(\d+\.\d+(\.\d+)+)/.match(stem)
return m.captures.first unless m.nil?
# e.g. ssh-copy-id-6.2p2.mountain_lion.bottle.tar.gz
# e.g. icu4c-52.1.mountain_lion.bottle.tar.gz
m = /-(\d+\.(\d)+(p(\d)+)?)/.match(stem)
return m.captures.first unless m.nil?
# e.g. 0_5_0 from disco-0_5_0.mavericks.bottle.tar.gz
m = /-(\d+_\d+(_\d+)+)/.match(stem)
return m.captures.first unless m.nil?
# e.g. 20120731 from fontforge-20120731.mavericks.bottle.tar.gz
m = /-(\d{8})/.match(stem)
return m.captures.first unless m.nil?
# e.g. 2007f from imap-uw-2007f.yosemite.bottle.tar.gz
m = /-(\d+[a-z])/.match(stem)
return m.captures.first unless m.nil?
# e.g. 22 from ngircd-22.mavericks.bottle.tar.gz
m = /-(\d{2})/.match(stem)
return m.captures.first unless m.nil?
# e.g. p17 from psutils-p17.yosemite.bottle.tar.gz
m = /-(p\d{2})/.match(stem)
return m.captures.first unless m.nil?
super
end
end
|
require "hardware"
require "extend/ENV/shared"
# @private
module Stdenv
include SharedEnvExtension
# @private
SAFE_CFLAGS_FLAGS = "-w -pipe".freeze
DEFAULT_FLAGS = "-march=core2 -msse4".freeze
# @private
def setup_build_environment(formula = nil)
super
self["HOMEBREW_ENV"] = "std"
PATH.new(ENV["HOMEBREW_PATH"]).each { |p| prepend_path "PATH", p }
# Set the default pkg-config search path, overriding the built-in paths
# Anything in PKG_CONFIG_PATH is searched before paths in this variable
self["PKG_CONFIG_LIBDIR"] = determine_pkg_config_libdir
self["MAKEFLAGS"] = "-j#{make_jobs}"
unless HOMEBREW_PREFIX.to_s == "/usr/local"
# /usr/local is already an -isystem and -L directory so we skip it
self["CPPFLAGS"] = "-isystem#{HOMEBREW_PREFIX}/include"
self["LDFLAGS"] = "-L#{HOMEBREW_PREFIX}/lib"
# CMake ignores the variables above
self["CMAKE_PREFIX_PATH"] = HOMEBREW_PREFIX.to_s
end
frameworks = HOMEBREW_PREFIX.join("Frameworks")
if frameworks.directory?
append "CPPFLAGS", "-F#{frameworks}"
append "LDFLAGS", "-F#{frameworks}"
self["CMAKE_FRAMEWORK_PATH"] = frameworks.to_s
end
# Os is the default Apple uses for all its stuff so let's trust them
define_cflags "-Os #{SAFE_CFLAGS_FLAGS}"
append "LDFLAGS", "-Wl,-headerpad_max_install_names"
send(compiler)
return unless cc =~ GNU_GCC_REGEXP
gcc_formula = gcc_version_formula($&)
append_path "PATH", gcc_formula.opt_bin.to_s
end
alias generic_setup_build_environment setup_build_environment
def homebrew_extra_pkg_config_paths
[]
end
def determine_pkg_config_libdir
PATH.new(
HOMEBREW_PREFIX/"lib/pkgconfig",
HOMEBREW_PREFIX/"share/pkgconfig",
homebrew_extra_pkg_config_paths,
"/usr/lib/pkgconfig",
).existing
end
# Removes the MAKEFLAGS environment variable, causing make to use a single job.
# This is useful for makefiles with race conditions.
# When passed a block, MAKEFLAGS is removed only for the duration of the block and is restored after its completion.
def deparallelize
old = self["MAKEFLAGS"]
remove "MAKEFLAGS", /-j\d+/
if block_given?
begin
yield
ensure
self["MAKEFLAGS"] = old
end
end
old
end
%w[O3 O2 O1 O0 Os].each do |opt|
define_method opt do
remove_from_cflags(/-O./)
append_to_cflags "-#{opt}"
end
end
# @private
def determine_cc
s = super
DevelopmentTools.locate(s) || Pathname.new(s)
end
# @private
def determine_cxx
dir, base = determine_cc.split
dir/base.to_s.sub("gcc", "g++").sub("clang", "clang++")
end
def gcc_4_0
super
set_cpu_cflags "-march=nocona -mssse3"
end
def gcc_4_2
super
set_cpu_cflags
end
GNU_GCC_VERSIONS.each do |n|
define_method(:"gcc-#{n}") do
super()
set_cpu_cflags
end
end
def clang
super
replace_in_cflags(/-Xarch_#{Hardware::CPU.arch_32_bit} (-march=\S*)/, '\1')
# Clang mistakenly enables AES-NI on plain Nehalem
map = Hardware::CPU.optimization_flags
map = map.merge(nehalem: "-march=native -Xclang -target-feature -Xclang -aes")
set_cpu_cflags "-march=native", map
end
def minimal_optimization
define_cflags "-Os #{SAFE_CFLAGS_FLAGS}"
end
alias generic_minimal_optimization minimal_optimization
def no_optimization
define_cflags SAFE_CFLAGS_FLAGS
end
alias generic_no_optimization no_optimization
# we've seen some packages fail to build when warnings are disabled!
def enable_warnings
remove_from_cflags "-w"
end
def m64
append_to_cflags "-m64"
append "LDFLAGS", "-arch #{Hardware::CPU.arch_64_bit}"
end
def m32
append_to_cflags "-m32"
append "LDFLAGS", "-arch #{Hardware::CPU.arch_32_bit}"
end
def universal_binary
check_for_compiler_universal_support
append_to_cflags Hardware::CPU.universal_archs.as_arch_flags
append "LDFLAGS", Hardware::CPU.universal_archs.as_arch_flags
return if compiler_any_clang?
return unless Hardware.is_32_bit?
# Can't mix "-march" for a 32-bit CPU with "-arch x86_64"
replace_in_cflags(/-march=\S*/, "-Xarch_#{Hardware::CPU.arch_32_bit} \\0")
end
def cxx11
if compiler == :clang
append "CXX", "-std=c++11"
append "CXX", "-stdlib=libc++"
elsif compiler_with_cxx11_support?(compiler)
append "CXX", "-std=c++11"
else
raise "The selected compiler doesn't support C++11: #{compiler}"
end
end
def libcxx
append "CXX", "-stdlib=libc++" if compiler == :clang
end
def libstdcxx
append "CXX", "-stdlib=libstdc++" if compiler == :clang
end
# @private
def replace_in_cflags(before, after)
CC_FLAG_VARS.each do |key|
self[key] = self[key].sub(before, after) if key?(key)
end
end
# Convenience method to set all C compiler flags in one shot.
def define_cflags(val)
CC_FLAG_VARS.each { |key| self[key] = val }
end
# Sets architecture-specific flags for every environment variable
# given in the list `flags`.
# @private
def set_cpu_flags(flags, default = DEFAULT_FLAGS, map = Hardware::CPU.optimization_flags)
cflags =~ /(-Xarch_#{Hardware::CPU.arch_32_bit} )-march=/
xarch = Regexp.last_match(1).to_s
remove flags, /(-Xarch_#{Hardware::CPU.arch_32_bit} )?-march=\S*/
remove flags, /( -Xclang \S+)+/
remove flags, /-mssse3/
remove flags, /-msse4(\.\d)?/
append flags, xarch unless xarch.empty?
append flags, map.fetch(effective_arch, default)
end
alias generic_set_cpu_flags set_cpu_flags
def x11; end
# @private
def effective_arch
if ARGV.build_bottle?
ARGV.bottle_arch || Hardware.oldest_cpu
elsif Hardware::CPU.intel? && !Hardware::CPU.sse4?
# If the CPU doesn't support SSE4, we cannot trust -march=native or
# -march=<cpu family> to do the right thing because we might be running
# in a VM or on a Hackintosh.
Hardware.oldest_cpu
else
Hardware::CPU.family
end
end
# @private
def set_cpu_cflags(default = DEFAULT_FLAGS, map = Hardware::CPU.optimization_flags)
set_cpu_flags CC_FLAG_VARS, default, map
end
def make_jobs
# '-j' requires a positive integral argument
if (jobs = self["HOMEBREW_MAKE_JOBS"].to_i).positive?
jobs
else
Hardware::CPU.cores
end
end
# This method does nothing in stdenv since there's no arg refurbishment
# @private
def refurbish_args; end
end
require "extend/os/extend/ENV/std"
std: use HOMEBREW_ARCH env var on Linux
Unlike macOS, Linux installations can be installed/deployed on a
variety of hardware. The HOMEBREW_ARCH environment variable
allows the end user to set a custom -march=... option for the
compiler. If the variable is not set, it defaults to "native".
This only applies on Linux; on macOS, behavior remains unchanged.
require "hardware"
require "extend/ENV/shared"
# @private
module Stdenv
include SharedEnvExtension
# @private
SAFE_CFLAGS_FLAGS = "-w -pipe".freeze
HOMEBREW_ARCH = (ENV["HOMEBREW_ARCH"] || "native").freeze
DEFAULT_FLAGS = (OS.mac? ? "-march=core2 -msse4" : "-march=#{HOMEBREW_ARCH}").freeze
# @private
def setup_build_environment(formula = nil)
super
self["HOMEBREW_ENV"] = "std"
PATH.new(ENV["HOMEBREW_PATH"]).each { |p| prepend_path "PATH", p }
# Set the default pkg-config search path, overriding the built-in paths
# Anything in PKG_CONFIG_PATH is searched before paths in this variable
self["PKG_CONFIG_LIBDIR"] = determine_pkg_config_libdir
self["MAKEFLAGS"] = "-j#{make_jobs}"
unless HOMEBREW_PREFIX.to_s == "/usr/local"
# /usr/local is already an -isystem and -L directory so we skip it
self["CPPFLAGS"] = "-isystem#{HOMEBREW_PREFIX}/include"
self["LDFLAGS"] = "-L#{HOMEBREW_PREFIX}/lib"
# CMake ignores the variables above
self["CMAKE_PREFIX_PATH"] = HOMEBREW_PREFIX.to_s
end
frameworks = HOMEBREW_PREFIX.join("Frameworks")
if frameworks.directory?
append "CPPFLAGS", "-F#{frameworks}"
append "LDFLAGS", "-F#{frameworks}"
self["CMAKE_FRAMEWORK_PATH"] = frameworks.to_s
end
# Os is the default Apple uses for all its stuff so let's trust them
define_cflags "-Os #{SAFE_CFLAGS_FLAGS}"
append "LDFLAGS", "-Wl,-headerpad_max_install_names"
send(compiler)
return unless cc =~ GNU_GCC_REGEXP
gcc_formula = gcc_version_formula($&)
append_path "PATH", gcc_formula.opt_bin.to_s
end
alias generic_setup_build_environment setup_build_environment
def homebrew_extra_pkg_config_paths
[]
end
def determine_pkg_config_libdir
PATH.new(
HOMEBREW_PREFIX/"lib/pkgconfig",
HOMEBREW_PREFIX/"share/pkgconfig",
homebrew_extra_pkg_config_paths,
"/usr/lib/pkgconfig",
).existing
end
# Removes the MAKEFLAGS environment variable, causing make to use a single job.
# This is useful for makefiles with race conditions.
# When passed a block, MAKEFLAGS is removed only for the duration of the block and is restored after its completion.
def deparallelize
old = self["MAKEFLAGS"]
remove "MAKEFLAGS", /-j\d+/
if block_given?
begin
yield
ensure
self["MAKEFLAGS"] = old
end
end
old
end
%w[O3 O2 O1 O0 Os].each do |opt|
define_method opt do
remove_from_cflags(/-O./)
append_to_cflags "-#{opt}"
end
end
# @private
def determine_cc
s = super
DevelopmentTools.locate(s) || Pathname.new(s)
end
# @private
def determine_cxx
dir, base = determine_cc.split
dir/base.to_s.sub("gcc", "g++").sub("clang", "clang++")
end
def gcc_4_0
super
set_cpu_cflags "-march=nocona -mssse3"
end
def gcc_4_2
super
set_cpu_cflags
end
GNU_GCC_VERSIONS.each do |n|
define_method(:"gcc-#{n}") do
super()
set_cpu_cflags
end
end
def clang
super
replace_in_cflags(/-Xarch_#{Hardware::CPU.arch_32_bit} (-march=\S*)/, '\1')
# Clang mistakenly enables AES-NI on plain Nehalem
map = Hardware::CPU.optimization_flags
map = map.merge(nehalem: "-march=native -Xclang -target-feature -Xclang -aes")
set_cpu_cflags "-march=native", map
end
def minimal_optimization
define_cflags "-Os #{SAFE_CFLAGS_FLAGS}"
end
alias generic_minimal_optimization minimal_optimization
def no_optimization
define_cflags SAFE_CFLAGS_FLAGS
end
alias generic_no_optimization no_optimization
# we've seen some packages fail to build when warnings are disabled!
def enable_warnings
remove_from_cflags "-w"
end
def m64
append_to_cflags "-m64"
append "LDFLAGS", "-arch #{Hardware::CPU.arch_64_bit}"
end
def m32
append_to_cflags "-m32"
append "LDFLAGS", "-arch #{Hardware::CPU.arch_32_bit}"
end
def universal_binary
check_for_compiler_universal_support
append_to_cflags Hardware::CPU.universal_archs.as_arch_flags
append "LDFLAGS", Hardware::CPU.universal_archs.as_arch_flags
return if compiler_any_clang?
return unless Hardware.is_32_bit?
# Can't mix "-march" for a 32-bit CPU with "-arch x86_64"
replace_in_cflags(/-march=\S*/, "-Xarch_#{Hardware::CPU.arch_32_bit} \\0")
end
def cxx11
if compiler == :clang
append "CXX", "-std=c++11"
append "CXX", "-stdlib=libc++"
elsif compiler_with_cxx11_support?(compiler)
append "CXX", "-std=c++11"
else
raise "The selected compiler doesn't support C++11: #{compiler}"
end
end
def libcxx
append "CXX", "-stdlib=libc++" if compiler == :clang
end
def libstdcxx
append "CXX", "-stdlib=libstdc++" if compiler == :clang
end
# @private
def replace_in_cflags(before, after)
CC_FLAG_VARS.each do |key|
self[key] = self[key].sub(before, after) if key?(key)
end
end
# Convenience method to set all C compiler flags in one shot.
def define_cflags(val)
CC_FLAG_VARS.each { |key| self[key] = val }
end
# Sets architecture-specific flags for every environment variable
# given in the list `flags`.
# @private
def set_cpu_flags(flags, default = DEFAULT_FLAGS, map = Hardware::CPU.optimization_flags)
cflags =~ /(-Xarch_#{Hardware::CPU.arch_32_bit} )-march=/
xarch = Regexp.last_match(1).to_s
remove flags, /(-Xarch_#{Hardware::CPU.arch_32_bit} )?-march=\S*/
remove flags, /( -Xclang \S+)+/
remove flags, /-mssse3/
remove flags, /-msse4(\.\d)?/
append flags, xarch unless xarch.empty?
append flags, map.fetch(effective_arch, default)
end
alias generic_set_cpu_flags set_cpu_flags
def x11; end
# @private
def effective_arch
if ARGV.build_bottle?
ARGV.bottle_arch || Hardware.oldest_cpu
elsif Hardware::CPU.intel? && !Hardware::CPU.sse4?
# If the CPU doesn't support SSE4, we cannot trust -march=native or
# -march=<cpu family> to do the right thing because we might be running
# in a VM or on a Hackintosh.
Hardware.oldest_cpu
else
Hardware::CPU.family
end
end
# @private
def set_cpu_cflags(default = DEFAULT_FLAGS, map = Hardware::CPU.optimization_flags)
set_cpu_flags CC_FLAG_VARS, default, map
end
def make_jobs
# '-j' requires a positive integral argument
if (jobs = self["HOMEBREW_MAKE_JOBS"].to_i).positive?
jobs
else
Hardware::CPU.cores
end
end
# This method does nothing in stdenv since there's no arg refurbishment
# @private
def refurbish_args; end
end
require "extend/os/extend/ENV/std"
|
# frozen_string_literal: true
module OS
module Linux
module Glibc
module_function
def system_version
return @system_version if @system_version
version = Utils.popen_read("/usr/bin/ldd", "--version")[/ (\d+\.\d+)/, 1]
return Version::NULL unless version
@system_version = Version.new version
end
def minimum_version
Version.new "2.12"
end
def below_minimum_version?
system_version < minimum_version
end
end
end
end
os/linux/glibc.rb: up minimum glibc version to 2.13
# frozen_string_literal: true
module OS
module Linux
module Glibc
module_function
def system_version
return @system_version if @system_version
version = Utils.popen_read("/usr/bin/ldd", "--version")[/ (\d+\.\d+)/, 1]
return Version::NULL unless version
@system_version = Version.new version
end
def minimum_version
Version.new "2.13"
end
def below_minimum_version?
system_version < minimum_version
end
end
end
end
|
TAP_MIGRATIONS = {
"aimage" => "homebrew/boneyard",
"aws-iam-tools" => "homebrew/boneyard",
"apple-gcc42" => "homebrew/versions",
"blackbox" => "homebrew/boneyard",
"boost149" => "homebrew/versions",
"catdoc" => "homebrew/boneyard",
"clam" => "homebrew/boneyard",
"cmucl" => "homebrew/binary",
"comparepdf" => "homebrew/boneyard",
"denyhosts" => "homebrew/boneyard",
"dotwrp" => "homebrew/science",
"drizzle" => "homebrew/boneyard",
"dsniff" => "homebrew/boneyard",
"grads" => "homebrew/binary",
"hugs98" => "homebrew/boneyard",
"hwloc" => "homebrew/science",
"ipopt" => "homebrew/science",
"iulib" => "homebrew/boneyard",
"jscoverage" => "homebrew/boneyard",
"jsl" => "homebrew/binary",
"kerl" => "homebrew/headonly",
"kismet" => "homebrew/boneyard",
"libgtextutils" => "homebrew/science",
"librets" => "homebrew/boneyard",
"lmutil" => "homebrew/binary",
"mlkit" => "homebrew/boneyard",
"mlton" => "homebrew/boneyard",
"mydumper" => "homebrew/boneyard",
"nlopt" => "homebrew/science",
"octave" => "homebrew/science",
"opencv" => "homebrew/science",
"pan" => "homebrew/boneyard",
"pocl" => "homebrew/science",
"qfits" => "homebrew/boneyard",
"qrupdate" => "homebrew/science",
"slicot" => "homebrew/science",
"syslog-ng" => "homebrew/boneyard",
"urweb" => "homebrew/boneyard",
"wkhtmltopdf" => "homebrew/boneyard",
}
Remove justniffer
Closes Homebrew/homebrew#27021.
TAP_MIGRATIONS = {
"aimage" => "homebrew/boneyard",
"aws-iam-tools" => "homebrew/boneyard",
"apple-gcc42" => "homebrew/versions",
"blackbox" => "homebrew/boneyard",
"boost149" => "homebrew/versions",
"catdoc" => "homebrew/boneyard",
"clam" => "homebrew/boneyard",
"cmucl" => "homebrew/binary",
"comparepdf" => "homebrew/boneyard",
"denyhosts" => "homebrew/boneyard",
"dotwrp" => "homebrew/science",
"drizzle" => "homebrew/boneyard",
"dsniff" => "homebrew/boneyard",
"grads" => "homebrew/binary",
"hugs98" => "homebrew/boneyard",
"hwloc" => "homebrew/science",
"ipopt" => "homebrew/science",
"iulib" => "homebrew/boneyard",
"jscoverage" => "homebrew/boneyard",
"jsl" => "homebrew/binary",
"justniffer" => "homebrew/boneyard",
"kerl" => "homebrew/headonly",
"kismet" => "homebrew/boneyard",
"libgtextutils" => "homebrew/science",
"librets" => "homebrew/boneyard",
"lmutil" => "homebrew/binary",
"mlkit" => "homebrew/boneyard",
"mlton" => "homebrew/boneyard",
"mydumper" => "homebrew/boneyard",
"nlopt" => "homebrew/science",
"octave" => "homebrew/science",
"opencv" => "homebrew/science",
"pan" => "homebrew/boneyard",
"pocl" => "homebrew/science",
"qfits" => "homebrew/boneyard",
"qrupdate" => "homebrew/science",
"slicot" => "homebrew/science",
"syslog-ng" => "homebrew/boneyard",
"urweb" => "homebrew/boneyard",
"wkhtmltopdf" => "homebrew/boneyard",
}
|
require 'active_support/all'
require 'active_command/version'
require 'active_command/configuration'
require 'active_command/concerns/authorizable'
require 'active_command/concerns/chainable'
require 'active_command/concerns/commandable'
require 'active_command/concerns/combinable'
require 'active_command/concerns/respondable'
require 'active_command/concerns/measurable'
require 'active_command/concerns/validatable'
require 'active_command/base'
require 'active_command/backends'
require 'active_command/responders'
require 'active_command/extender' if defined? ActionController
module ActiveCommand
class << self
# Getter for shared global objects
attr_reader :configuration
# Returns the global [Configuration](ActiveCommand/Configuration) object.
#
# @example
# ActiveCommand.configuration.responder = :pusher
def configuration
@configuration ||= ActiveCommand::Configuration.new
end
# Yields the global configuration to a block.
# @yield [Configuration] global configuration
#
# @example
# ActiveCommand.configure do |config|
# config.responder = :fayer
# end
def configure
yield configuration if block_given?
Backends.create
Responders.create
end
end
end
require rubygems
require 'rubygems'
require 'active_support/all'
require 'active_command/version'
require 'active_command/configuration'
require 'active_command/concerns/authorizable'
require 'active_command/concerns/chainable'
require 'active_command/concerns/commandable'
require 'active_command/concerns/combinable'
require 'active_command/concerns/respondable'
require 'active_command/concerns/measurable'
require 'active_command/concerns/validatable'
require 'active_command/base'
require 'active_command/backends'
require 'active_command/responders'
require 'active_command/extender' if defined? ActionController
module ActiveCommand
class << self
# Getter for shared global objects
attr_reader :configuration
# Returns the global [Configuration](ActiveCommand/Configuration) object.
#
# @example
# ActiveCommand.configuration.responder = :pusher
def configuration
@configuration ||= ActiveCommand::Configuration.new
end
# Yields the global configuration to a block.
# @yield [Configuration] global configuration
#
# @example
# ActiveCommand.configure do |config|
# config.responder = :fayer
# end
def configure
yield configuration if block_given?
Backends.create
Responders.create
end
end
end
|
require 'log4r'
require 'log4r/configurator'
require 'log4r/outputter/consoleoutputters'
Log4r::Configurator.custom_levels(:DEBUG, :DEBUG_FINE, :DEBUG_MEDIUM, :DEBUG_GROSS, :DETAIL, :INFO, :WARN, :ALARM, :ERROR, :FATAL)
module Af
class Application < ::Af::CommandLiner
DEFAULT_LOG_LEVEL = Log4r::ALL
opt_group :logging, "logger options", :priority => 100, :hidden => true, :description => <<-DESCRIPTION
These are options associated with logging. By default, logging is turned on when
a process is daemonized.
You can set the log file name in components with --log-dir, --log-file-basename, and --log-file_extension
which will ensure "log dir" exists. You can also set the file simply with --log-file (the path to the
log file must exist).
--log-level is used to turn on and off loggers. Current levels are:
Log4r::#{Log4r::LNAMES.join(', Log4r::')}
the parameter for --log-level should be a formated key/value pair where the key is the name
of the logger ("Process::ExampleProgram" for instance) and log level ("Log4r::DEBUG_MEDIUM") separated by '='
each key/value pair should be separated by a ','. the logger name 'default' can be used as the base application
logger name:
Process::ExampleProgram=Log4r::DEBUG_MEDIUM,Process::ExampleProgram::SubClassThing=Log4r::DEBUG_FINE
or:
default=Log4r::ALL
DESCRIPTION
opt :daemon, "run as daemon", :short => :d
opt :log_dir, "directory to store log files", :default => "/var/log/af", :group => :logging
opt :log_file_basename, "base name of file to log output", :default => "af", :group => :logging
opt :log_file_extension, "extension name of file to log output", :default => '.log', :group => :logging
opt :log_file, "full path name of log file", :type => :string, :env => "AF_LOG_FILE", :group => :logging
opt :log_all_output, "start logging output", :default => false, :group => :logging
opt :log_level, "set the levels of one or more loggers", :type => :hash, :env => "AF_LOG_LEVEL", :group => :logging
opt :log_configuration_file, "load an log4r xml configuration file", :type => :string, :argument_note => 'FILENAME', :group => :logging
attr_accessor :has_errors, :daemon, :log_dir, :log_file, :log_file_basebane, :log_file_extension, :log_all_output, :log_level, :log_configuration_file
@@singleton = nil
def self.singleton(safe = false)
if @@singleton.nil?
if safe
@@singleton = new
else
fail("Application @@singleton not initialized! Maybe you are using a Proxy before creating an instance? or use SafeProxy")
end
end
return @@singleton
end
def initialize
super
@@singleton = self
@loggers = {}
@logger_levels = {:default => DEFAULT_LOG_LEVEL}
@log4r_formatter = nil
@log4r_outputter = {}
@log4r_name_suffix = ""
ActiveRecord::ConnectionAdapters::ConnectionPool.initialize_connection_application_name(self.class.database_application_name)
$stdout.sync = true
$stderr.sync = true
update_opts :log_file_basename, :default => af_name
end
def self.database_application_name
return "#{self.name}(pid: #{Process.pid})"
end
def af_name
return self.class.name
end
def log4r_pattern_formatter_format
return "%l %C %M"
end
def log4r_formatter(logger_name = :default)
logger_name = :default if logger_name == af_name
return Log4r::PatternFormatter.new(:pattern => log4r_pattern_formatter_format)
end
def log4r_outputter(logger_name = :default)
logger_name = :default if logger_name == af_name
unless @log4r_outputter.has_key?(logger_name)
@log4r_outputter[logger_name] = Log4r::StdoutOutputter.new("stdout", :formatter => log4r_formatter(logger_name))
end
return @log4r_outputter[logger_name]
end
def logger_level(logger_name = :default)
logger_name = :default if logger_name == af_name
return @logger_levels[logger_name] || DEFAULT_LOG_LEVEL
end
def set_logger_level(new_logger_level, logger_name = :default)
logger_name = :default if logger_name == af_name
@logger_level[logger_name] = new_logger_level
end
def logger(logger_name = :default)
logger_name = :default if logger_name == af_name
unless @loggers.has_key?(logger_name)
l = Log4r::Logger.new(logger_name == :default ? af_name : "#{af_name}::#{logger_name}")
l.outputters = log4r_outputter(logger_name)
l.level = logger_level(logger_name)
l.additive = false
@loggers[logger_name] = l
end
return @loggers[logger_name]
end
def self._run(*arguments)
# this ARGV hack is here for test specs to add script arguments
ARGV[0..-1] = arguments if arguments.length > 0
self.new._run
end
def _run(usage = nil, options = {})
@options = options
@usage = (usage or "rails runner #{self.class.name}.run [OPTIONS]")
command_line_options(@options, @usage)
post_command_line_parsing
pre_work
return self
end
def _work
work
exit @has_errors ? 1 : 0
end
def self.run(*arguments)
application = self.new._run(*arguments)
application._work
end
protected
def option_handler(option, argument)
end
# Overload to do any operations that need to be handled before work is called.
# call exit if needed. always call super
def pre_work
logger.debug_gross "pre work"
end
def set_logger_levels(log_level_hash)
# (nlim) We really shouldn't log anything until the log level is set.
# logger.info "set_logger_levels: #{log_level_hash.map{|k,v| k.to_s + '=>' + v.to_s}.join(',')}"
logger_level_value = DEFAULT_LOG_LEVEL
# Fix overriding
coerced_log_level_hash = log_level_hash.keys.each_with_object({}) { |logger_name, hash|
logger_level = log_level_hash[logger_name]
begin
logger_level_value = logger_level.constantize
rescue StandardError => e
logger.error "invalid log level value: #{logger_level} for logger: #{logger_name}, using Log4r::ALL = (0)"
end
# Use symbol :default for the Af logger, otherwise, use a string for the key
hash[logger_name == "default" ? :default : logger_name] = logger_level_value
}
@logger_levels.merge!(coerced_log_level_hash)
@logger_levels.each do |logger_name, logger_level|
# Get or create the logger by name
l = logger(logger_name)
# Make sure the level is overridden
l.level = logger_level_value
logger.detail "set_logger_levels: #{logger_name} => #{logger_level_value}"
end
end
def post_command_line_parsing
if @log_configuration_file.present?
begin
Log4r::Configurator.load_xml_file(@log_configuration_file)
rescue StandardError => e
puts "error while parsing log_configuration_file: #{@log_configuration_file}: #{e.message}"
puts "continuing ... since this is probably not fatal"
end
end
if @log_level.present?
set_logger_levels(@log_level)
end
if @daemon
@log_all_output = true
end
if @log_all_output
path = Pathname.new(@log_dir.to_s)
path.mkpath
if @log_file.present?
log_path = @log_file
else
log_path = path + "#{@log_file_basename}#{@log_file_extension}"
end
$stdout.reopen(log_path, "a")
$stderr.reopen(log_path, "a")
$stdout.sync = true
$stderr.sync = true
end
if @daemon
logger.info "Daemonizing"
pid = fork
if pid
exit 0
else
logger.info "forked"
Process.setsid
trap 'SIGHUP', 'IGNORE'
cleanup_after_fork
end
end
end
def cleanup_after_fork
ActiveRecord::Base.connection.reconnect!
end
module Proxy
def af_logger(logger_name = (self.try(:af_name) || "Unknown"))
return ::Af::Application.singleton.logger(logger_name)
end
def af_name
return ::Af::Application.singleton.af_name
end
end
module SafeProxy
def af_logger(logger_name = (self.try(:af_name) || "Unknown"))
return ::Af::Application.singleton(true).logger(logger_name)
end
def af_name
return ::Af::Application.singleton(true).af_name
end
end
end
end
truncate database_application_name before sending to Postgres
require 'log4r'
require 'log4r/configurator'
require 'log4r/outputter/consoleoutputters'
Log4r::Configurator.custom_levels(:DEBUG, :DEBUG_FINE, :DEBUG_MEDIUM, :DEBUG_GROSS, :DETAIL, :INFO, :WARN, :ALARM, :ERROR, :FATAL)
module Af
class Application < ::Af::CommandLiner
DEFAULT_LOG_LEVEL = Log4r::ALL
opt_group :logging, "logger options", :priority => 100, :hidden => true, :description => <<-DESCRIPTION
These are options associated with logging. By default, logging is turned on when
a process is daemonized.
You can set the log file name in components with --log-dir, --log-file-basename, and --log-file_extension
which will ensure "log dir" exists. You can also set the file simply with --log-file (the path to the
log file must exist).
--log-level is used to turn on and off loggers. Current levels are:
Log4r::#{Log4r::LNAMES.join(', Log4r::')}
the parameter for --log-level should be a formated key/value pair where the key is the name
of the logger ("Process::ExampleProgram" for instance) and log level ("Log4r::DEBUG_MEDIUM") separated by '='
each key/value pair should be separated by a ','. the logger name 'default' can be used as the base application
logger name:
Process::ExampleProgram=Log4r::DEBUG_MEDIUM,Process::ExampleProgram::SubClassThing=Log4r::DEBUG_FINE
or:
default=Log4r::ALL
DESCRIPTION
opt :daemon, "run as daemon", :short => :d
opt :log_dir, "directory to store log files", :default => "/var/log/af", :group => :logging
opt :log_file_basename, "base name of file to log output", :default => "af", :group => :logging
opt :log_file_extension, "extension name of file to log output", :default => '.log', :group => :logging
opt :log_file, "full path name of log file", :type => :string, :env => "AF_LOG_FILE", :group => :logging
opt :log_all_output, "start logging output", :default => false, :group => :logging
opt :log_level, "set the levels of one or more loggers", :type => :hash, :env => "AF_LOG_LEVEL", :group => :logging
opt :log_configuration_file, "load an log4r xml configuration file", :type => :string, :argument_note => 'FILENAME', :group => :logging
attr_accessor :has_errors, :daemon, :log_dir, :log_file, :log_file_basebane, :log_file_extension, :log_all_output, :log_level, :log_configuration_file
@@singleton = nil
def self.singleton(safe = false)
if @@singleton.nil?
if safe
@@singleton = new
else
fail("Application @@singleton not initialized! Maybe you are using a Proxy before creating an instance? or use SafeProxy")
end
end
return @@singleton
end
def initialize
super
@@singleton = self
@loggers = {}
@logger_levels = {:default => DEFAULT_LOG_LEVEL}
@log4r_formatter = nil
@log4r_outputter = {}
@log4r_name_suffix = ""
ActiveRecord::ConnectionAdapters::ConnectionPool.initialize_connection_application_name(self.class.database_application_name)
$stdout.sync = true
$stderr.sync = true
update_opts :log_file_basename, :default => af_name
end
def self.database_application_name
# (nlim) Truncate to Postgres limit so Postgres stops yelling
return "#{self.name}(pid: #{Process.pid})".slice(0, 63)
end
def af_name
return self.class.name
end
def log4r_pattern_formatter_format
return "%l %C %M"
end
def log4r_formatter(logger_name = :default)
logger_name = :default if logger_name == af_name
return Log4r::PatternFormatter.new(:pattern => log4r_pattern_formatter_format)
end
def log4r_outputter(logger_name = :default)
logger_name = :default if logger_name == af_name
unless @log4r_outputter.has_key?(logger_name)
@log4r_outputter[logger_name] = Log4r::StdoutOutputter.new("stdout", :formatter => log4r_formatter(logger_name))
end
return @log4r_outputter[logger_name]
end
def logger_level(logger_name = :default)
logger_name = :default if logger_name == af_name
return @logger_levels[logger_name] || DEFAULT_LOG_LEVEL
end
def set_logger_level(new_logger_level, logger_name = :default)
logger_name = :default if logger_name == af_name
@logger_level[logger_name] = new_logger_level
end
def logger(logger_name = :default)
logger_name = :default if logger_name == af_name
unless @loggers.has_key?(logger_name)
l = Log4r::Logger.new(logger_name == :default ? af_name : "#{af_name}::#{logger_name}")
l.outputters = log4r_outputter(logger_name)
l.level = logger_level(logger_name)
l.additive = false
@loggers[logger_name] = l
end
return @loggers[logger_name]
end
def self._run(*arguments)
# this ARGV hack is here for test specs to add script arguments
ARGV[0..-1] = arguments if arguments.length > 0
self.new._run
end
def _run(usage = nil, options = {})
@options = options
@usage = (usage or "rails runner #{self.class.name}.run [OPTIONS]")
command_line_options(@options, @usage)
post_command_line_parsing
pre_work
return self
end
def _work
work
exit @has_errors ? 1 : 0
end
def self.run(*arguments)
application = self.new._run(*arguments)
application._work
end
protected
def option_handler(option, argument)
end
# Overload to do any operations that need to be handled before work is called.
# call exit if needed. always call super
def pre_work
logger.debug_gross "pre work"
end
def set_logger_levels(log_level_hash)
# (nlim) We really shouldn't log anything until the log level is set.
# logger.info "set_logger_levels: #{log_level_hash.map{|k,v| k.to_s + '=>' + v.to_s}.join(',')}"
logger_level_value = DEFAULT_LOG_LEVEL
# Fix overriding
coerced_log_level_hash = log_level_hash.keys.each_with_object({}) { |logger_name, hash|
logger_level = log_level_hash[logger_name]
begin
logger_level_value = logger_level.constantize
rescue StandardError => e
logger.error "invalid log level value: #{logger_level} for logger: #{logger_name}, using Log4r::ALL = (0)"
end
# Use symbol :default for the Af logger, otherwise, use a string for the key
hash[logger_name == "default" ? :default : logger_name] = logger_level_value
}
@logger_levels.merge!(coerced_log_level_hash)
@logger_levels.each do |logger_name, logger_level|
# Get or create the logger by name
l = logger(logger_name)
# Make sure the level is overridden
l.level = logger_level_value
logger.detail "set_logger_levels: #{logger_name} => #{logger_level_value}"
end
end
def post_command_line_parsing
if @log_configuration_file.present?
begin
Log4r::Configurator.load_xml_file(@log_configuration_file)
rescue StandardError => e
puts "error while parsing log_configuration_file: #{@log_configuration_file}: #{e.message}"
puts "continuing ... since this is probably not fatal"
end
end
if @log_level.present?
set_logger_levels(@log_level)
end
if @daemon
@log_all_output = true
end
if @log_all_output
path = Pathname.new(@log_dir.to_s)
path.mkpath
if @log_file.present?
log_path = @log_file
else
log_path = path + "#{@log_file_basename}#{@log_file_extension}"
end
$stdout.reopen(log_path, "a")
$stderr.reopen(log_path, "a")
$stdout.sync = true
$stderr.sync = true
end
if @daemon
logger.info "Daemonizing"
pid = fork
if pid
exit 0
else
logger.info "forked"
Process.setsid
trap 'SIGHUP', 'IGNORE'
cleanup_after_fork
end
end
end
def cleanup_after_fork
ActiveRecord::Base.connection.reconnect!
end
module Proxy
def af_logger(logger_name = (self.try(:af_name) || "Unknown"))
return ::Af::Application.singleton.logger(logger_name)
end
def af_name
return ::Af::Application.singleton.af_name
end
end
module SafeProxy
def af_logger(logger_name = (self.try(:af_name) || "Unknown"))
return ::Af::Application.singleton(true).logger(logger_name)
end
def af_name
return ::Af::Application.singleton(true).af_name
end
end
end
end
|
require 'faraday'
require 'faraday_middleware'
require 'json'
require 'active_support'
module Amorail
# Amorail http client
class Client
attr_reader :usermail, :api_key, :api_endpoint, :custom_options
def initialize(api_endpoint: Amorail.config.api_endpoint,
api_key: Amorail.config.api_key,
usermail: Amorail.config.usermail,
custom_options: {})
@api_endpoint = api_endpoint
@api_key = api_key
@usermail = usermail
@custom_options = custom_options if custom_options.any?
@connect = Faraday.new(url: api_endpoint) do |faraday|
faraday.adapter Faraday.default_adapter
faraday.response :json, content_type: /\bjson$/
faraday.use :instrumentation
end
end
def properties
@properties ||= Property.new(self)
end
def connect
@connect || self.class.new
end
def authorize
puts "\n\n\n\n\n authorize authorize authorize authorize authorize authorize \n\n\n\n"
self.cookies = nil
response = post(
Amorail.config.auth_url,
'USER_LOGIN' => usermail,
'USER_HASH' => api_key
)
cookie_handler(response)
response
end
def safe_request(method, url, params = {})
send(method, url, params)
rescue ::Amorail::AmoUnauthorizedError
authorize
send(method, url, params)
end
def get(url, params = {})
dt = 'Tue, 27 Jun 2017 08:56:56'
# dt = (DateTime.now.- 10.minutes).httpdate
# dt = (DateTime.now - 10.minutes).utc
puts "\n GEEEETT url=[#{url}] params=[#{params.to_json}] \n"
headers = (params[:headers]) ? params.slice!(*params.keys.map { |x| (x == :headers) ? nil : x })[:headers] : nil
puts "\n GEEEETT headers=[#{headers.to_json}] params=[#{params.to_json}] dt=[#{dt}] \n"
response = connect.get(url, params) do |request|
request.headers['Cookie'] = cookies if cookies.present?
# request.env["HTTP_IF_MODIFIED_SINCE"] = dt
# request.headers['If-Modified-Since'] = dt
request.headers['if-modified-since'] = dt unless (url.eql? '/private/api/v2/json/accounts/current')
# request.headers['HTTP_IF_MODIFIED_SINCE'] = dt
# request.headers['Last-Modified'] = dt
# headers&.each { |k, v|
# puts "\n header k=[#{k}] val=[v] \n"
# request.headers[k.to_s] = v.to_s
# }
# request.headers.merge(headers) if headers
puts "\n get_r_headers=[#{request.headers.to_json}] \n\n env=[#{request.env.to_json}]\n\n\n"
end
handle_response(response)
end
def post(url, params = {})
puts "\n POST POST url=[#{url}] params=[#{params.to_json}] \n"
headers = (params[:headers]) ? params.slice!(*params.keys.map { |x| (x == :headers) ? nil : x })[:headers] : nil
puts "\n POST POST headers=[#{headers.to_json}] params=[#{params.to_json}] \n"
# puts "\n\n\n\n\n POST POST POST POST POST POST url=[#{url}] headers=[#{headers.to_json}] params=[#{params.to_json}] \n\n\n\n"
response = connect.post(url) do |request|
request.headers['Cookie'] = cookies if cookies.present?
request.headers['Content-Type'] = 'application/json'
headers&.each { |k, v|
puts "\n header k=[#{k}] val=[v] \n"
request.headers[k.to_s] = v.to_s
}
# request.headers.merge(headers) if headers
puts "\n post_r_headers=[#{request.headers.to_json}]\n"
request.body = params.to_json
end
handle_response(response)
end
private
attr_accessor :cookies
def cookie_handler(response)
self.cookies = response.headers['set-cookie'].split('; ')[0]
end
def handle_response(response) # rubocop:disable all
return response if [200, 201, 204].include? response.status
case response.status
when 301
fail ::Amorail::AmoMovedPermanentlyError
when 400
fail ::Amorail::AmoBadRequestError
when 401
fail ::Amorail::AmoUnauthorizedError
when 403
fail ::Amorail::AmoForbiddenError
when 404
fail ::Amorail::AmoNotFoundError
when 500
fail ::Amorail::AmoInternalError
when 502
fail ::Amorail::AmoBadGatewayError
when 503
fail ::Amorail::AmoServiceUnaviableError
else
fail ::Amorail::AmoUnknownError(response.body)
end
end
end
end
eeeeeeeeeeee
require 'faraday'
require 'faraday_middleware'
require 'json'
require 'active_support'
module Amorail
# Amorail http client
class Client
attr_reader :usermail, :api_key, :api_endpoint, :custom_options
def initialize(api_endpoint: Amorail.config.api_endpoint,
api_key: Amorail.config.api_key,
usermail: Amorail.config.usermail,
custom_options: {})
@api_endpoint = api_endpoint
@api_key = api_key
@usermail = usermail
@custom_options = custom_options if custom_options.any?
@connect = Faraday.new(url: api_endpoint) do |faraday|
faraday.adapter Faraday.default_adapter
faraday.response :json, content_type: /\bjson$/
faraday.use :instrumentation
end
end
def properties
@properties ||= Property.new(self)
end
def connect
@connect || self.class.new
end
def authorize
puts "\n\n\n\n\n authorize authorize authorize authorize authorize authorize \n\n\n\n"
self.cookies = nil
response = post(
Amorail.config.auth_url,
'USER_LOGIN' => usermail,
'USER_HASH' => api_key
)
cookie_handler(response)
response
end
def safe_request(method, url, params = {})
send(method, url, params)
rescue ::Amorail::AmoUnauthorizedError
authorize
send(method, url, params)
end
def get(url, params = {})
dt = 'Tue, 27 Jun 2017 08:56:56'
# dt = (DateTime.now.- 10.minutes).httpdate
# dt = (DateTime.now - 10.minutes).utc
puts "\n GEEEETT url=[#{url}] params=[#{params.to_json}] \n"
headers = (params[:headers]) ? params.slice!(*params.keys.map { |x| (x == :headers) ? nil : x })[:headers] : nil
puts "\n GEEEETT headers=[#{headers.to_json}] params=[#{params.to_json}] dt=[#{dt}] \n"
response = connect.get(url, params) do |request|
request.headers['Cookie'] = cookies if cookies.present?
# request.env["HTTP_IF_MODIFIED_SINCE"] = dt
# request.headers['If-Modified-Since'] = dt
request.headers['if-modified-since'] = dt unless (url.eql? '/private/api/v2/json/accounts/current')
# request.headers['HTTP_IF_MODIFIED_SINCE'] = dt
# request.headers['Last-Modified'] = dt
# headers&.each { |k, v|
# puts "\n header k=[#{k}] val=[v] \n"
# request.headers[k.to_s] = v.to_s
# }
# request.headers.merge(headers) if headers
puts "\n get_r_headers=[#{request.headers.to_json}] \n\n\n"
end
handle_response(response)
end
def post(url, params = {})
puts "\n POST POST url=[#{url}] params=[#{params.to_json}] \n"
headers = (params[:headers]) ? params.slice!(*params.keys.map { |x| (x == :headers) ? nil : x })[:headers] : nil
puts "\n POST POST headers=[#{headers.to_json}] params=[#{params.to_json}] \n"
# puts "\n\n\n\n\n POST POST POST POST POST POST url=[#{url}] headers=[#{headers.to_json}] params=[#{params.to_json}] \n\n\n\n"
response = connect.post(url) do |request|
request.headers['Cookie'] = cookies if cookies.present?
request.headers['Content-Type'] = 'application/json'
headers&.each { |k, v|
puts "\n header k=[#{k}] val=[v] \n"
request.headers[k.to_s] = v.to_s
}
# request.headers.merge(headers) if headers
puts "\n post_r_headers=[#{request.headers.to_json}]\n"
request.body = params.to_json
end
handle_response(response)
end
private
attr_accessor :cookies
def cookie_handler(response)
self.cookies = response.headers['set-cookie'].split('; ')[0]
end
def handle_response(response) # rubocop:disable all
return response if [200, 201, 204].include? response.status
case response.status
when 301
fail ::Amorail::AmoMovedPermanentlyError
when 400
fail ::Amorail::AmoBadRequestError
when 401
fail ::Amorail::AmoUnauthorizedError
when 403
fail ::Amorail::AmoForbiddenError
when 404
fail ::Amorail::AmoNotFoundError
when 500
fail ::Amorail::AmoInternalError
when 502
fail ::Amorail::AmoBadGatewayError
when 503
fail ::Amorail::AmoServiceUnaviableError
else
fail ::Amorail::AmoUnknownError(response.body)
end
end
end
end
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.