CombinedText stringlengths 4 3.42M |
|---|
require 'tescha/result_set'
module Tescha
class Pack
def initialize description, &block
@description = description
@test_block = block
@test_results = []
end
def run_tests
puts self.judge_results.summary
end
def judge_results
self.instance_eval( &@test_block )
Tescha::ResultSet.new
end
end
end
if __FILE__ == $PROGRAM_NAME
require 'tescha/meta_test'
instance_in_test = Tescha::Pack.new 'An empty test pack' do
end
puts "\n---------------------------#judge_results"
Tescha::MetaTest.test(
"returns a Tescha::ResultSet",
( actual = instance_in_test.judge_results ).instance_of?( expected = Tescha::ResultSet ),
"#{actual.inspect} is not a #{expected}"
)
puts "\n---------------------------#initialize"
pack = Tescha::Pack.new "test pack to test its test block's context" do
Tescha::MetaTest.test(
"the given block is evaluated in the context of the Tescha::Pack instance",
self.instance_of?( Tescha::Pack ),
"#{self.inspect} is not a Tescha::Pack"
)
end
pack.judge_results
Tescha::MetaTest.test(
"outside the block is NOT evaluated in the context of the Tescha::Pack instance",
( self.to_s == 'main' ),
"#{self.inspect} is not main object"
)
end
Delete some verbose expressions in spec
require 'tescha/result_set'
module Tescha
class Pack
def initialize description, &block
@description = description
@test_block = block
@test_results = []
end
def run_tests
puts self.judge_results.summary
end
def judge_results
self.instance_eval( &@test_block )
Tescha::ResultSet.new
end
end
end
if __FILE__ == $PROGRAM_NAME
require 'tescha/meta_test'
include Tescha
instance_in_test = Pack.new 'An empty test pack' do
end
puts "\n---------------------------#judge_results"
MetaTest.test(
"returns a Tescha::ResultSet",
( actual = instance_in_test.judge_results ).instance_of?( ResultSet ),
"#{actual.inspect} is not a Tescha::ResultSet"
)
puts "\n---------------------------#initialize"
pack = Pack.new "test pack to test its test block's context" do
MetaTest.test(
"the given block is evaluated in the context of the Tescha::Pack instance",
self.instance_of?( Pack ),
"#{self.inspect} is not a Tescha::Pack"
)
end
pack.judge_results
MetaTest.test(
"outside the block is NOT evaluated in the context of the Tescha::Pack instance",
( self.to_s == 'main' ),
"#{self.inspect} is not main object"
)
end
|
click here to comment on code in ./lib/thor/runner.rb
require 'thor'
require 'thor/group'
require 'thor/core_ext/file_binary_read'
require 'fileutils'
require 'open-uri'
require 'yaml'
require 'digest/md5'
require 'pathname'
class Thor::Runner < Thor #:nodoc:
map "-T" => :list, "-i" => :install, "-u" => :update, "-v" => :version
# Override Thor#help so it can give information about any class and any method.
#
def help(meth = nil)
if meth && !self.respond_to?(meth)
initialize_thorfiles(meth)
klass, task = Thor::Util.find_class_and_task_by_namespace(meth)
self.class.handle_no_task_error(task, false) if klass.nil?
klass.start(["-h", task].compact, :shell => self.shell)
else
super
end
end
# If a task is not found on Thor::Runner, method missing is invoked and
# Thor::Runner is then responsible for finding the task in all classes.
#
def method_missing(meth, *args)
meth = meth.to_s
initialize_thorfiles(meth)
klass, task = Thor::Util.find_class_and_task_by_namespace(meth)
self.class.handle_no_task_error(task, false) if klass.nil?
args.unshift(task) if task
klass.start(args, :shell => self.shell)
end
desc "install NAME", "Install an optionally named Thor file into your system tasks"
method_options :as => :string, :relative => :boolean, :force => :boolean
def install(name)
initialize_thorfiles
# If a directory name is provided as the argument, look for a 'main.thor'
# task in said directory.
begin
if File.directory?(File.expand_path(name))
base, package = File.join(name, "main.thor"), :directory
contents = open(base) {|input| input.read }
else
base, package = name, :file
contents = open(name) {|input| input.read }
end
rescue OpenURI::HTTPError
raise Error, "Error opening URI '#{name}'"
rescue Errno::ENOENT
raise Error, "Error opening file '#{name}'"
end
say "Your Thorfile contains:"
say contents
unless options["force"]
return false if no?("Do you wish to continue [y/N]?")
end
as = options["as"] || begin
first_line = contents.split("\n")[0]
(match = first_line.match(/\s*#\s*module:\s*([^\n]*)/)) ? match[1].strip : nil
end
unless as
basename = File.basename(name)
as = ask("Please specify a name for #{name} in the system repository [#{basename}]:")
as = basename if as.empty?
end
location = if options[:relative] || name =~ /^https?:\/\//
name
else
File.expand_path(name)
end
thor_yaml[as] = {
:filename => Digest::MD5.hexdigest(name + as),
:location => location,
:namespaces => Thor::Util.namespaces_in_content(contents, base)
}
save_yaml(thor_yaml)
say "Storing thor file in your system repository"
destination = File.join(thor_root, thor_yaml[as][:filename])
if package == :file
File.open(destination, "w") { |f| f.puts contents }
else
FileUtils.cp_r(name, destination)
end
thor_yaml[as][:filename] # Indicate success
end
desc "version", "Show Thor version"
def version
require 'thor/version'
say "Thor #{Thor::VERSION}"
end
desc "uninstall NAME", "Uninstall a named Thor module"
def uninstall(name)
raise Error, "Can't find module '#{name}'" unless thor_yaml[name]
say "Uninstalling #{name}."
FileUtils.rm_rf(File.join(thor_root, "#{thor_yaml[name][:filename]}"))
thor_yaml.delete(name)
save_yaml(thor_yaml)
puts "Done."
end
desc "update NAME", "Update a Thor file from its original location"
def update(name)
raise Error, "Can't find module '#{name}'" if !thor_yaml[name] || !thor_yaml[name][:location]
say "Updating '#{name}' from #{thor_yaml[name][:location]}"
old_filename = thor_yaml[name][:filename]
self.options = self.options.merge("as" => name)
if File.directory? File.expand_path(name)
FileUtils.rm_rf(File.join(thor_root, old_filename))
thor_yaml.delete(old_filename)
save_yaml(thor_yaml)
filename = install(name)
else
filename = install(thor_yaml[name][:location])
end
unless filename == old_filename
File.delete(File.join(thor_root, old_filename))
end
end
desc "installed", "List the installed Thor modules and tasks"
method_options :internal => :boolean
def installed
initialize_thorfiles(nil, true)
display_klasses(true, options["internal"])
end
desc "list [SEARCH]", "List the available thor tasks (--substring means .*SEARCH)"
method_options :substring => :boolean, :group => :string, :all => :boolean, :debug => :boolean
def list(search="")
initialize_thorfiles
search = ".*#{search}" if options["substring"]
search = /^#{search}.*/i
group = options[:group] || "standard"
klasses = Thor::Base.subclasses.select do |k|
(options[:all] || k.group == group) && k.namespace =~ search
end
display_klasses(false, false, klasses)
end
private
def self.banner(task, all = false, subcommand = false)
"thor " + task.formatted_usage(self, all, subcommand)
end
def thor_root
Thor::Util.thor_root
end
def thor_yaml
@thor_yaml ||= begin
yaml_file = File.join(thor_root, "thor.yml")
yaml = YAML.load_file(yaml_file) if File.exists?(yaml_file)
yaml || {}
end
end
# Save the yaml file. If none exists in thor root, creates one.
#
def save_yaml(yaml)
yaml_file = File.join(thor_root, "thor.yml")
unless File.exists?(yaml_file)
FileUtils.mkdir_p(thor_root)
yaml_file = File.join(thor_root, "thor.yml")
FileUtils.touch(yaml_file)
end
File.open(yaml_file, "w") { |f| f.puts yaml.to_yaml }
end
def self.exit_on_failure?
true
end
# Load the Thorfiles. If relevant_to is supplied, looks for specific files
# in the thor_root instead of loading them all.
#
# By default, it also traverses the current path until find Thor files, as
# described in thorfiles. This look up can be skipped by suppliying
# skip_lookup true.
#
def initialize_thorfiles(relevant_to=nil, skip_lookup=false)
thorfiles(relevant_to, skip_lookup).each do |f|
Thor::Util.load_thorfile(f, nil, options[:debug]) unless Thor::Base.subclass_files.keys.include?(File.expand_path(f))
end
end
# Finds Thorfiles by traversing from your current directory down to the root
# directory of your system. If at any time we find a Thor file, we stop.
#
# We also ensure that system-wide Thorfiles are loaded first, so local
# Thorfiles can override them.
#
# ==== Example
#
# If we start at /Users/wycats/dev/thor ...
#
# 1. /Users/wycats/dev/thor
# 2. /Users/wycats/dev
# 3. /Users/wycats <-- we find a Thorfile here, so we stop
#
# Suppose we start at c:\Documents and Settings\james\dev\thor ...
#
# 1. c:\Documents and Settings\james\dev\thor
# 2. c:\Documents and Settings\james\dev
# 3. c:\Documents and Settings\james
# 4. c:\Documents and Settings
# 5. c:\ <-- no Thorfiles found!
#
def thorfiles(relevant_to=nil, skip_lookup=false)
thorfiles = []
unless skip_lookup
Pathname.pwd.ascend do |path|
thorfiles = Thor::Util.globs_for(path).map { |g| Dir[g] }.flatten
break unless thorfiles.empty?
end
end
files = (relevant_to ? thorfiles_relevant_to(relevant_to) : Thor::Util.thor_root_glob)
files += thorfiles
files -= ["#{thor_root}/thor.yml"]
files.map! do |file|
File.directory?(file) ? File.join(file, "main.thor") : file
end
end
# Load Thorfiles relevant to the given method. If you provide "foo:bar" it
# will load all thor files in the thor.yaml that has "foo" e "foo:bar"
# namespaces registered.
#
def thorfiles_relevant_to(meth)
lookup = [ meth, meth.split(":")[0...-1].join(":") ]
files = thor_yaml.select do |k, v|
v[:namespaces] && !(v[:namespaces] & lookup).empty?
end
files.map { |k, v| File.join(thor_root, "#{v[:filename]}") }
end
# Display information about the given klasses. If with_module is given,
# it shows a table with information extracted from the yaml file.
#
def display_klasses(with_modules=false, show_internal=false, klasses=Thor::Base.subclasses)
klasses -= [Thor, Thor::Runner, Thor::Group] unless show_internal
raise Error, "No Thor tasks available" if klasses.empty?
show_modules if with_modules && !thor_yaml.empty?
list = Hash.new { |h,k| h[k] = [] }
groups = klasses.select { |k| k.ancestors.include?(Thor::Group) }
# Get classes which inherit from Thor
(klasses - groups).each { |k| list[k.namespace.split(":").first] += k.printable_tasks(false) }
# Get classes which inherit from Thor::Base
groups.map! { |k| k.printable_tasks(false).first }
list["root"] = groups
# Order namespaces with default coming first
list = list.sort{ |a,b| a[0].sub(/^default/, '') <=> b[0].sub(/^default/, '') }
list.each { |n, tasks| display_tasks(n, tasks) unless tasks.empty? }
end
def display_tasks(namespace, list) #:nodoc:
list.sort!{ |a,b| a[0] <=> b[0] }
say shell.set_color(namespace, :blue, true)
say "-" * namespace.size
print_table(list, :truncate => true)
say
end
def show_modules #:nodoc:
info = []
labels = ["Modules", "Namespaces"]
info << labels
info << [ "-" * labels[0].size, "-" * labels[1].size ]
thor_yaml.each do |name, hash|
info << [ name, hash[:namespaces].join(", ") ]
end
print_table info
say ""
end
end
|
class TokenStore
@token_caches = {} # Hash of Memory/Dalli Store Caches, Keyed by namespace
def self.acquire(namespace, token_ttl)
@token_caches[namespace] ||= begin
case ::Settings.server.session_store
when "sql"
SqlStore.new(cache_store_options(namespace, token_ttl))
when "memory"
require 'active_support/cache/memory_store'
ActiveSupport::Cache::MemoryStore.new(cache_store_options(namespace, token_ttl))
when "cache"
require 'active_support/cache/dalli_store'
ActiveSupport::Cache::DalliStore.new(MiqMemcached.server_address, cache_store_options(namespace, token_ttl))
else
raise "unsupported session store type: #{::Settings.server.session_store}"
end
end
end
def self.cache_store_options(namespace, token_ttl)
{
:namespace => "MIQ:TOKENS:#{namespace.upcase}",
:threadsafe => true,
:expires_in => token_ttl
}
end
private_class_method :cache_store_options
end
extract options from TokenStore.aquire
class TokenStore
@token_caches = {} # Hash of Memory/Dalli Store Caches, Keyed by namespace
# only used by TokenManager.token_store
# @return a token store for users
def self.acquire(namespace, token_ttl)
@token_caches[namespace] ||= begin
options = cache_store_options(namespace, token_ttl)
case ::Settings.server.session_store
when "sql"
SqlStore.new(options)
when "memory"
require 'active_support/cache/memory_store'
ActiveSupport::Cache::MemoryStore.new(options)
when "cache"
require 'active_support/cache/dalli_store'
ActiveSupport::Cache::DalliStore.new(MiqMemcached.server_address, options)
else
raise "unsupported session store type: #{::Settings.server.session_store}"
end
end
end
def self.cache_store_options(namespace, token_ttl)
{
:namespace => "MIQ:TOKENS:#{namespace.upcase}",
:threadsafe => true,
:expires_in => token_ttl
}
end
private_class_method :cache_store_options
end
|
require 'tracker_api/version'
# dependencies
require 'virtus'
require 'faraday'
require 'faraday_middleware'
# stdlib
require 'addressable/uri'
require 'forwardable'
require 'logger'
module TrackerApi
autoload :Error, 'tracker_api/error'
autoload :Client, 'tracker_api/client'
autoload :Logger, 'tracker_api/logger'
module Errors
class UnexpectedData < StandardError; end
end
module Endpoints
autoload :Epic, 'tracker_api/endpoints/epic'
autoload :Epics, 'tracker_api/endpoints/epics'
autoload :Iterations, 'tracker_api/endpoints/iterations'
autoload :Me, 'tracker_api/endpoints/me'
autoload :Project, 'tracker_api/endpoints/project'
autoload :Projects, 'tracker_api/endpoints/projects'
autoload :Stories, 'tracker_api/endpoints/stories'
end
module Resources
autoload :Account, 'tracker_api/resources/account'
autoload :Epic, 'tracker_api/resources/epic'
autoload :Iteration, 'tracker_api/resources/iteration'
autoload :Me, 'tracker_api/resources/me'
autoload :MembershipSummary, 'tracker_api/resources/membership_summary'
autoload :Label, 'tracker_api/resources/label'
autoload :Project, 'tracker_api/resources/project'
autoload :Story, 'tracker_api/resources/story'
autoload :TimeZone, 'tracker_api/resources/time_zone'
end
end
Add Story Endpoint to Endpoints module declaration
While attempting to call ```.story([id])``` on a project object, received error:
> NameError: uninitialized constant Tracker::Endpoints::Story
Found we weren't actually including that endpoint. =)
require 'tracker_api/version'
# dependencies
require 'virtus'
require 'faraday'
require 'faraday_middleware'
# stdlib
require 'addressable/uri'
require 'forwardable'
require 'logger'
module TrackerApi
autoload :Error, 'tracker_api/error'
autoload :Client, 'tracker_api/client'
autoload :Logger, 'tracker_api/logger'
module Errors
class UnexpectedData < StandardError; end
end
module Endpoints
autoload :Epic, 'tracker_api/endpoints/epic'
autoload :Epics, 'tracker_api/endpoints/epics'
autoload :Iterations, 'tracker_api/endpoints/iterations'
autoload :Me, 'tracker_api/endpoints/me'
autoload :Project, 'tracker_api/endpoints/project'
autoload :Projects, 'tracker_api/endpoints/projects'
autoload :Stories, 'tracker_api/endpoints/stories'
autoload :Story, 'tracker_api/endpoints/story'
end
module Resources
autoload :Account, 'tracker_api/resources/account'
autoload :Epic, 'tracker_api/resources/epic'
autoload :Iteration, 'tracker_api/resources/iteration'
autoload :Me, 'tracker_api/resources/me'
autoload :MembershipSummary, 'tracker_api/resources/membership_summary'
autoload :Label, 'tracker_api/resources/label'
autoload :Project, 'tracker_api/resources/project'
autoload :Story, 'tracker_api/resources/story'
autoload :TimeZone, 'tracker_api/resources/time_zone'
end
end
|
require 'sidekiq/redis_connection'
require 'travis/logs/config'
if RUBY_PLATFORM =~ /^java/
require 'jrjackson'
else
require 'oj'
end
module Travis
def self.config
Travis::Logs.config
end
module Logs
class << self
attr_writer :config, :database_connection, :redis
def config
@config ||= Travis::Logs::Config.load
end
def database_connection
@database_connection ||= Travis::Logs::Helpers::Database.connect
end
def redis_pool
@redis_pool ||= ::Sidekiq::RedisConnection.create(
url: config.redis.url,
namespace: config.sidekiq.namespace,
size: config.sidekiq.pool_size
)
end
def version
@version ||=
`git rev-parse HEAD 2>/dev/null || echo ${SOURCE_VERSION:-fafafaf}`.strip
end
end
end
end
Switch to env var with correct deployed version
require 'sidekiq/redis_connection'
require 'travis/logs/config'
if RUBY_PLATFORM =~ /^java/
require 'jrjackson'
else
require 'oj'
end
module Travis
def self.config
Travis::Logs.config
end
module Logs
class << self
attr_writer :config, :database_connection, :redis
def config
@config ||= Travis::Logs::Config.load
end
def database_connection
@database_connection ||= Travis::Logs::Helpers::Database.connect
end
def redis_pool
@redis_pool ||= ::Sidekiq::RedisConnection.create(
url: config.redis.url,
namespace: config.sidekiq.namespace,
size: config.sidekiq.pool_size
)
end
def version
@version ||=
`git rev-parse HEAD 2>/dev/null || echo ${HEROKU_SLUG_COMMIT:-fafafaf}`.strip
end
end
end
end
|
require 'faraday'
require 'core_ext/hash/compact'
require 'core_ext/hash/deep_symbolize_keys'
require 'active_support/core_ext/string'
require 'active_support/core_ext/class/attribute'
require 'travis/support/logging'
require 'travis/support/exceptions'
module Travis
class Task
include Logging
class_attribute :run_local
DEFAULT_TIMEOUT = 60
class << self
extend Exceptions::Handling
def run(queue, *args)
info "async_options: #{async_options(queue)}; args: #{args}"
Travis::Async.run(self, :perform, async_options(queue), *args)
end
def perform(*args)
new(*args).run
end
end
attr_reader :payload, :params
def initialize(payload, params = {})
@payload = payload.deep_symbolize_keys
@params = params.deep_symbolize_keys
end
def run
process(params[:timeout] || DEFAULT_TIMEOUT)
end
private
def repository
@repository ||= payload[:repository]
end
def slug
@slug ||= payload.values_at(:owner_name, :name).join("/")
end
def build_url
@build_url ||= payload[:build_url]
end
def job
@job ||= payload[:job]
end
def build
@build ||= payload[:build]
end
def request
@request ||= payload[:request]
end
def commit
@commit ||= payload[:commit]
end
def pull_request?
build[:pull_request]
end
def pull_request_number
if pull_request?
payload[:pull_request_number]
end
end
def with_utm(url, source)
utm = { utm_source: source, utm_medium: :notification }
Travis.config.utm ? with_query_params(url, utm) : url
end
def with_query_params(url, params)
"#{url}?#{params.map { |pair| pair.join('=') }.join('&')}"
end
def http
@http ||= Faraday.new(http_options) do |f|
f.request :url_encoded
f.adapter :net_http
end
end
def http_options
{
ssl: Travis.config.ssl.compact,
proxy: Travis.config.fixie.url
}
end
end
end
Compact http options for safety
require 'faraday'
require 'core_ext/hash/compact'
require 'core_ext/hash/deep_symbolize_keys'
require 'active_support/core_ext/string'
require 'active_support/core_ext/class/attribute'
require 'travis/support/logging'
require 'travis/support/exceptions'
module Travis
class Task
include Logging
class_attribute :run_local
DEFAULT_TIMEOUT = 60
class << self
extend Exceptions::Handling
def run(queue, *args)
info "async_options: #{async_options(queue)}; args: #{args}"
Travis::Async.run(self, :perform, async_options(queue), *args)
end
def perform(*args)
new(*args).run
end
end
attr_reader :payload, :params
def initialize(payload, params = {})
@payload = payload.deep_symbolize_keys
@params = params.deep_symbolize_keys
end
def run
process(params[:timeout] || DEFAULT_TIMEOUT)
end
private
def repository
@repository ||= payload[:repository]
end
def slug
@slug ||= payload.values_at(:owner_name, :name).join("/")
end
def build_url
@build_url ||= payload[:build_url]
end
def job
@job ||= payload[:job]
end
def build
@build ||= payload[:build]
end
def request
@request ||= payload[:request]
end
def commit
@commit ||= payload[:commit]
end
def pull_request?
build[:pull_request]
end
def pull_request_number
if pull_request?
payload[:pull_request_number]
end
end
def with_utm(url, source)
utm = { utm_source: source, utm_medium: :notification }
Travis.config.utm ? with_query_params(url, utm) : url
end
def with_query_params(url, params)
"#{url}?#{params.map { |pair| pair.join('=') }.join('&')}"
end
def http
@http ||= Faraday.new(http_options) do |f|
f.request :url_encoded
f.adapter :net_http
end
end
def http_options
{
ssl: Travis.config.ssl.compact,
proxy: Travis.config.fixie.url
}.compact
end
end
end
|
module TRecs
class Frame
include Enumerable
attr_accessor :content
def initialize(content="")
@content = content
end
def width
content.each_line.map { |line|
line.chomp.size
}.max
end
def height
content.lines.count
end
def each
content.each_line
end
def to_s
content
end
alias :to_str :to_s
end
end
end
Added Frame(str) convertion function
module TRecs
class Frame
include Enumerable
attr_accessor :content
def initialize(content="")
@content = content
end
def width
content.each_line.map { |line|
line.chomp.size
}.max
end
def height
content.lines.count
end
def each
content.each_line
end
def to_s
content
end
alias :to_str :to_s
end
def Frame(value)
case value
when Frame then value
else
Frame.new(value.to_str)
end
end
module_function :Frame
end
|
# encoding: utf-8
# frozen_string_literal: true
require 'tty-cursor'
require_relative 'spinner/version'
require_relative 'spinner/formats'
module TTY
# Used for creating terminal spinner
#
# @api public
class Spinner
include Formats
# @raised when attempting to join dead thread
NotSpinningError = Class.new(StandardError)
ECMA_ESC = "\x1b".freeze
ECMA_CSI = "\x1b[".freeze
ECMA_CHA = 'G'.freeze
ECMA_CLR = 'K'.freeze
DEC_RST = 'l'.freeze
DEC_SET = 'h'.freeze
DEC_TCEM = '?25'.freeze
MATCHER = /:spinner/
TICK = '✔'.freeze
CROSS = '✖'.freeze
CURSOR_USAGE_LOCK = Monitor.new
# The object that responds to print call defaulting to stderr
#
# @api public
attr_reader :output
# The current format type
#
# @return [String]
#
# @api public
attr_reader :format
# Whether to show or hide cursor
#
# @return [Boolean]
#
# @api public
attr_reader :hide_cursor
# The message to print before the spinner
#
# @return [String]
# the current message
#
# @api public
attr_reader :message
# Tokens for the message
#
# @return [Hash[Symbol, Object]]
# the current tokens
#
# @api public
attr_reader :tokens
# Initialize a spinner
#
# @example
# spinner = TTY::Spinner.new
#
# @param [String] message
# the message to print in front of the spinner
#
# @param [Hash] options
# @option options [String] :format
# the spinner format type defaulting to :spin_1
# @option options [Object] :output
# the object that responds to print call defaulting to stderr
# @option options [Boolean] :hide_cursor
# display or hide cursor
# @option options [Boolean] :clear
# clear ouptut when finished
# @option options [Float] :interval
# the interval for auto spinning
#
# @api public
def initialize(*args)
options = args.last.is_a?(::Hash) ? args.pop : {}
@message = args.empty? ? ':spinner' : args.pop
@tokens = {}
@format = options.fetch(:format) { :classic }
@output = options.fetch(:output) { $stderr }
@hide_cursor = options.fetch(:hide_cursor) { false }
@frames = options.fetch(:frames) do
fetch_format(@format.to_sym, :frames)
end
@clear = options.fetch(:clear) { false }
@success_mark= options.fetch(:success_mark) { TICK }
@error_mark = options.fetch(:error_mark) { CROSS }
@interval = options.fetch(:interval) do
fetch_format(@format.to_sym, :interval)
end
@callbacks = Hash.new { |h, k| h[k] = [] }
@length = @frames.length
@current = 0
@done = false
@state = :stopped
@thread = nil
@multispinner= nil
@index = nil
@succeeded = false
@first_run = true
end
# Notifies the TTY::Spinner that it is running under a multispinner
#
# @param [TTY::Spinner::Multi] the multispinner that it is running under
# @param [Integer] the index of this spinner in the multispinner
#
# @api private
def add_multispinner(multispinner, index)
@multispinner = multispinner
@index = index
end
# Whether the spinner has succeeded
#
# @return [Boolean] whether or not the spinner succeeded
#
# @api public
def succeeded?
done? && @succeeded
end
# Whether the spinner has errored
#
# @return [Boolean] whether or not the spinner errored
#
# @api public
def errored?
done? && !@succeeded
end
# Whether the spinner has completed spinning
#
# @return [Boolean] whether or not the spinner has finished
#
# @api public
def done?
@done
end
# Whether the spinner is spinner
#
# @return [Boolean] whether or not the spinner is spinning
#
# @api public
def spinning?
@state == :spinning
end
# Whether the spinner is in the success state. This is only true
# temporarily while it is being marked with a success mark.
#
# @return [Boolean] whether or not the spinner is succeeding
#
# @api private
def success?
@state == :success
end
# Whether the spinner is in the error state. This is only true
# temporarily while it is being marked with a failure mark.
#
# @return [Boolean] whether or not the spinner is erroring
#
# @api private
def error?
@state == :error
end
# Register callback
#
# @api public
def on(name, &block)
@callbacks[name] << block
self
end
# Start timer and unlock spinner
#
# @api public
def start
@started_at = Time.now
@done = false
reset
end
# Start automatic spinning animation
#
# @api public
def auto_spin
CURSOR_USAGE_LOCK.synchronize do
start
sleep_time = 1.0 / @interval
spin
@thread = Thread.new do
sleep(sleep_time)
while @started_at
spin
sleep(sleep_time)
end
end
end
end
# Run spinner while executing job
#
# @param [String] stop_message
# the message displayed when block is finished
#
# @yield automatically animate and finish spinner
#
# @example
# spinner.run('Migrated DB') { ... }
#
# @api public
def run(stop_message = '', &block)
auto_spin
@work = Thread.new(&block)
@work.join
ensure
stop(stop_message)
end
# Duration of the spinning animation
#
# @return [Numeric]
#
# @api public
def duration
@started_at ? Time.now - @started_at : nil
end
# Join running spinner
#
# @param [Float] timeout
# the timeout for join
#
# @api public
def join(timeout = nil)
unless @thread
raise(NotSpinningError, 'Cannot join spinner that is not running')
end
timeout ? @thread.join(timeout) : @thread.join
end
# Kill running spinner
#
# @api public
def kill
@thread.kill if @thread
end
# Perform a spin
#
# @return [String]
# the printed data
#
# @api public
def spin
return if @done
if @hide_cursor && !spinning?
write(ECMA_CSI + DEC_TCEM + DEC_RST)
end
data = message.gsub(MATCHER, @frames[@current])
data = replace_tokens(data)
write(data, true)
@current = (@current + 1) % @length
@state = :spinning
data
end
# Redraw the indent for this spinner, if it exists
#
# @api private
def redraw_indent
if @hide_cursor && !spinning?
write(ECMA_CSI + DEC_TCEM + DEC_RST)
end
write("", false)
end
# Finish spining
#
# @param [String] stop_message
# the stop message to print
#
# @api public
def stop(stop_message = '')
return if @done
if @hide_cursor
write(ECMA_CSI + DEC_TCEM + DEC_SET, false)
end
return clear_line if @clear
data = message.gsub(MATCHER, next_char)
data = replace_tokens(data)
if !stop_message.empty?
data << ' ' + stop_message
end
write(data, true)
write("\n", false) unless @clear || @multispinner
ensure
@state = :stopped
@done = true
@started_at = nil
emit(:done)
kill
end
# Retrieve next character
#
# @return [String]
#
# @api private
def next_char
if success?
@success_mark
elsif error?
@error_mark
else
@frames[@current - 1]
end
end
# Finish spinning and set state to :success
#
# @api public
def success(stop_message = '')
@state = :success
@succeeded = true
stop(stop_message)
emit(:success)
end
# Finish spinning and set state to :error
#
# @api public
def error(stop_message = '')
@state = :error
stop(stop_message)
emit(:error)
end
# Clear current line
#
# @api public
def clear_line
write(ECMA_CSI + '0m' + ECMA_CSI + '1000D' + ECMA_CSI + ECMA_CLR)
end
# Update string formatting tokens
#
# @param [Hash[Symbol]] tokens
# the tokens used in formatting string
#
# @api public
def update(tokens)
clear_line if spinning?
@tokens.merge!(tokens)
end
# Reset the spinner to initial frame
#
# @api public
def reset
@current = 0
@first_run = true
end
private
# Execute a block on the proper terminal line if the spinner is running
# under a multispinner. Otherwise, execute the block on the current line.
#
# @api private
def execute_on_line
if @multispinner
CURSOR_USAGE_LOCK.synchronize do
lines_up = @multispinner.count_line_offset(@index)
if @first_run
yield if block_given?
output.print "\n"
@first_run = false
else
output.print TTY::Cursor.save
output.print TTY::Cursor.up(lines_up)
yield if block_given?
output.print TTY::Cursor.restore
end
end
else
yield if block_given?
end
end
# Write data out to output
#
# @return [nil]
#
# @api private
def write(data, clear_first = false)
execute_on_line do
output.print(ECMA_CSI + '1' + ECMA_CHA) if clear_first
# If there's a top level spinner, print with inset
characters_in = @multispinner.nil? ? "" : @multispinner.line_inset(self)
output.print(characters_in + data)
output.flush
end
end
# Emit callback
#
# @api private
def emit(name, *args)
@callbacks[name].each do |block|
block.call(*args)
end
end
# Find frames by token name
#
# @param [Symbol] token
# the name for the frames
#
# @return [Array, String]
#
# @api private
def fetch_format(token, property)
if FORMATS.key?(token)
FORMATS[token][property]
else
raise ArgumentError, "Unknown format token `:#{token}`"
end
end
# Replace any token inside string
#
# @param [String] string
# the string containing tokens
#
# @return [String]
#
# @api private
def replace_tokens(string)
data = string.dup
@tokens.each do |name, val|
data.gsub!(/\:#{name}/, val)
end
data
end
end # Spinner
end # TTY
Change to replace direct codes with cursor calls
# encoding: utf-8
# frozen_string_literal: true
require 'tty-cursor'
require_relative 'spinner/version'
require_relative 'spinner/formats'
module TTY
# Used for creating terminal spinner
#
# @api public
class Spinner
include Formats
# @raised when attempting to join dead thread
NotSpinningError = Class.new(StandardError)
ECMA_CSI = "\x1b[".freeze
ECMA_CLR = 'K'.freeze
MATCHER = /:spinner/
TICK = '✔'.freeze
CROSS = '✖'.freeze
CURSOR_USAGE_LOCK = Monitor.new
# The object that responds to print call defaulting to stderr
#
# @api public
attr_reader :output
# The current format type
#
# @return [String]
#
# @api public
attr_reader :format
# Whether to show or hide cursor
#
# @return [Boolean]
#
# @api public
attr_reader :hide_cursor
# The message to print before the spinner
#
# @return [String]
# the current message
#
# @api public
attr_reader :message
# Tokens for the message
#
# @return [Hash[Symbol, Object]]
# the current tokens
#
# @api public
attr_reader :tokens
# Initialize a spinner
#
# @example
# spinner = TTY::Spinner.new
#
# @param [String] message
# the message to print in front of the spinner
#
# @param [Hash] options
# @option options [String] :format
# the spinner format type defaulting to :spin_1
# @option options [Object] :output
# the object that responds to print call defaulting to stderr
# @option options [Boolean] :hide_cursor
# display or hide cursor
# @option options [Boolean] :clear
# clear ouptut when finished
# @option options [Float] :interval
# the interval for auto spinning
#
# @api public
def initialize(*args)
options = args.last.is_a?(::Hash) ? args.pop : {}
@message = args.empty? ? ':spinner' : args.pop
@tokens = {}
@format = options.fetch(:format) { :classic }
@output = options.fetch(:output) { $stderr }
@hide_cursor = options.fetch(:hide_cursor) { false }
@frames = options.fetch(:frames) do
fetch_format(@format.to_sym, :frames)
end
@clear = options.fetch(:clear) { false }
@success_mark= options.fetch(:success_mark) { TICK }
@error_mark = options.fetch(:error_mark) { CROSS }
@interval = options.fetch(:interval) do
fetch_format(@format.to_sym, :interval)
end
@callbacks = Hash.new { |h, k| h[k] = [] }
@length = @frames.length
@current = 0
@done = false
@state = :stopped
@thread = nil
@multispinner= nil
@index = nil
@succeeded = false
@first_run = true
end
# Notifies the TTY::Spinner that it is running under a multispinner
#
# @param [TTY::Spinner::Multi] the multispinner that it is running under
# @param [Integer] the index of this spinner in the multispinner
#
# @api private
def add_multispinner(multispinner, index)
@multispinner = multispinner
@index = index
end
# Whether the spinner has succeeded
#
# @return [Boolean] whether or not the spinner succeeded
#
# @api public
def succeeded?
done? && @succeeded
end
# Whether the spinner has errored
#
# @return [Boolean] whether or not the spinner errored
#
# @api public
def errored?
done? && !@succeeded
end
# Whether the spinner has completed spinning
#
# @return [Boolean] whether or not the spinner has finished
#
# @api public
def done?
@done
end
# Whether the spinner is spinner
#
# @return [Boolean] whether or not the spinner is spinning
#
# @api public
def spinning?
@state == :spinning
end
# Whether the spinner is in the success state. This is only true
# temporarily while it is being marked with a success mark.
#
# @return [Boolean] whether or not the spinner is succeeding
#
# @api private
def success?
@state == :success
end
# Whether the spinner is in the error state. This is only true
# temporarily while it is being marked with a failure mark.
#
# @return [Boolean] whether or not the spinner is erroring
#
# @api private
def error?
@state == :error
end
# Register callback
#
# @api public
def on(name, &block)
@callbacks[name] << block
self
end
# Start timer and unlock spinner
#
# @api public
def start
@started_at = Time.now
@done = false
reset
end
# Start automatic spinning animation
#
# @api public
def auto_spin
CURSOR_USAGE_LOCK.synchronize do
start
sleep_time = 1.0 / @interval
spin
@thread = Thread.new do
sleep(sleep_time)
while @started_at
spin
sleep(sleep_time)
end
end
end
end
# Run spinner while executing job
#
# @param [String] stop_message
# the message displayed when block is finished
#
# @yield automatically animate and finish spinner
#
# @example
# spinner.run('Migrated DB') { ... }
#
# @api public
def run(stop_message = '', &block)
auto_spin
@work = Thread.new(&block)
@work.join
ensure
stop(stop_message)
end
# Duration of the spinning animation
#
# @return [Numeric]
#
# @api public
def duration
@started_at ? Time.now - @started_at : nil
end
# Join running spinner
#
# @param [Float] timeout
# the timeout for join
#
# @api public
def join(timeout = nil)
unless @thread
raise(NotSpinningError, 'Cannot join spinner that is not running')
end
timeout ? @thread.join(timeout) : @thread.join
end
# Kill running spinner
#
# @api public
def kill
@thread.kill if @thread
end
# Perform a spin
#
# @return [String]
# the printed data
#
# @api public
def spin
return if @done
if @hide_cursor && !spinning?
write(TTY::Cursor.hide)
end
data = message.gsub(MATCHER, @frames[@current])
data = replace_tokens(data)
write(data, true)
@current = (@current + 1) % @length
@state = :spinning
data
end
# Redraw the indent for this spinner, if it exists
#
# @api private
def redraw_indent
if @hide_cursor && !spinning?
write(ECMA_CSI + DEC_TCEM + DEC_RST)
end
write("", false)
end
# Finish spining
#
# @param [String] stop_message
# the stop message to print
#
# @api public
def stop(stop_message = '')
return if @done
if @hide_cursor
write(TTY::Cursor.show, false)
end
return clear_line if @clear
data = message.gsub(MATCHER, next_char)
data = replace_tokens(data)
if !stop_message.empty?
data << ' ' + stop_message
end
write(data, true)
write("\n", false) unless @clear || @multispinner
ensure
@state = :stopped
@done = true
@started_at = nil
emit(:done)
kill
end
# Retrieve next character
#
# @return [String]
#
# @api private
def next_char
if success?
@success_mark
elsif error?
@error_mark
else
@frames[@current - 1]
end
end
# Finish spinning and set state to :success
#
# @api public
def success(stop_message = '')
@state = :success
@succeeded = true
stop(stop_message)
emit(:success)
end
# Finish spinning and set state to :error
#
# @api public
def error(stop_message = '')
@state = :error
stop(stop_message)
emit(:error)
end
# Clear current line
#
# @api public
def clear_line
write(ECMA_CSI + '0m' + ECMA_CSI + '1000D' + ECMA_CSI + ECMA_CLR)
end
# Update string formatting tokens
#
# @param [Hash[Symbol]] tokens
# the tokens used in formatting string
#
# @api public
def update(tokens)
clear_line if spinning?
@tokens.merge!(tokens)
end
# Reset the spinner to initial frame
#
# @api public
def reset
@current = 0
@first_run = true
end
private
# Execute a block on the proper terminal line if the spinner is running
# under a multispinner. Otherwise, execute the block on the current line.
#
# @api private
def execute_on_line
if @multispinner
CURSOR_USAGE_LOCK.synchronize do
lines_up = @multispinner.count_line_offset(@index)
if @first_run
yield if block_given?
output.print "\n"
@first_run = false
else
output.print TTY::Cursor.save
output.print TTY::Cursor.up(lines_up)
yield if block_given?
output.print TTY::Cursor.restore
end
end
else
yield if block_given?
end
end
# Write data out to output
#
# @return [nil]
#
# @api private
def write(data, clear_first = false)
execute_on_line do
output.print(TTY::Cursor.column(1)) if clear_first
# If there's a top level spinner, print with inset
characters_in = @multispinner.nil? ? "" : @multispinner.line_inset(self)
output.print(characters_in + data)
output.flush
end
end
# Emit callback
#
# @api private
def emit(name, *args)
@callbacks[name].each do |block|
block.call(*args)
end
end
# Find frames by token name
#
# @param [Symbol] token
# the name for the frames
#
# @return [Array, String]
#
# @api private
def fetch_format(token, property)
if FORMATS.key?(token)
FORMATS[token][property]
else
raise ArgumentError, "Unknown format token `:#{token}`"
end
end
# Replace any token inside string
#
# @param [String] string
# the string containing tokens
#
# @return [String]
#
# @api private
def replace_tokens(string)
data = string.dup
@tokens.each do |name, val|
data.gsub!(/\:#{name}/, val)
end
data
end
end # Spinner
end # TTY
|
# encoding: utf-8
module TTY
VERSION = "0.5.0"
end # TTY
Bump gem version up.
# encoding: utf-8
module TTY
VERSION = "0.6.0"
end # TTY
|
require 'update_repo/version'
require 'yaml'
require 'colorize'
require 'confoog'
require 'trollop'
# Overall module with classes performing the functionality
# Contains Class UpdateRepo::WalkRepo
module UpdateRepo
# This constant holds the name to the config file, located in ~/
CONFIG_FILE = '.updatereporc'.freeze
# An encapsulated class to walk the repo directories and update all Git
# repositories found therein.
class WalkRepo
# Read-only attribute holding the total number of traversed repositories
# @attr_reader :counter [fixnum] total number of traversed repositories
attr_reader :counter
# Class constructor. No parameters required.
# @return [void]
def initialize
# @counter - this will be incremented with each repo updated.
@counter = 0
# @skip_counter - will count all repos deliberately skipped
@skip_count = 0
# @ start_time - will be used to get elapsed time
@start_time = 0
# @config - Class. Reads the configuration from a file in YAML format and
# allows easy access to the configuration data
@config = Confoog::Settings.new(filename: '.updatereporc',
prefix: 'update_repo',
autoload: true,
autosave: false)
exit 1 unless @config.status[:errors] == Status::INFO_FILE_LOADED
# store the command line variables in a configuration variable
@config['cmd'] = set_options
end
# This function will perform the required actions to traverse the Repo.
# @example
# walk_repo = UpdateRepo::WalkRepo.new
# walk_repo.start
def start
exceptions = @config['exceptions']
show_header(exceptions)
@config['location'].each do |loc|
recurse_dir(loc, exceptions)
end
# print out an informative footer...
footer
end
private
# rubocop:disable Metrics//MethodLength
def set_options
Trollop.options do
version "\nupdate_repo version #{VERSION} (C)2016 G. Ramsay\n"
banner <<-EOS
Keep multiple local Git-Cloned Repositories up to date with one command.
Usage:
update_repo [options]
Options are not required. If none are specified then the program will read from
the standard configuration file (~/.updatereporc) and automatically update the
specified Repositories.
Options:
EOS
# opt :color, 'Use colored output', default: true
# opt :quiet, 'Only minimal output to the terminal', default: false
# opt :silent, 'Completely silent, no output to terminal at all.',
# default: false
end
end
# rubocop:enable Metrics//MethodLength
# take each directory contained in the Repo directory, and if it is detected
# as a Git repository then update it.
# @param dirname [string] Contains the directory to search for Git repos.
# @param exceptions [array] Each repo matching one of these will be ignored.
def recurse_dir(dirname, exceptions)
Dir.foreach(dirname) do |dir|
dirpath = dirname + '/' + dir
next unless File.directory?(dirpath) && notdot?(dir)
if gitdir?(dirpath)
!exceptions.include?(dir) ? update_repo(dirpath) : skip_dir(dirpath)
else
recurse_dir(dirpath, exceptions)
end
end
end
# Display a simple header to the console
# @example
# show_header(exceptions)
# @return [void]
def show_header(exceptions)
# print an informative header before starting
print "\nGit Repo update utility (v", VERSION, ')',
" \u00A9 Grant Ramsay <seapagan@gmail.com>\n"
print "Using Configuration from '#{@config.config_path}'\n"
list_locations
if exceptions
print "\nExclusions:".underline, ' ',
exceptions.join(', ').yellow, "\n"
end
# save the start time for later display in the footer...
@start_time = Time.now
print "\n" # blank line before processing starts
end
# print out a brief footer. This will be expanded later.
# @return [void]
def footer
duration = Time.now - @start_time
print "\nUpdates completed : ", @counter.to_s.green,
' repositories processed'
print ' / ', @skip_count.to_s.yellow, ' skipped' unless @skip_count == 0
print ' in ', show_time(duration), "\n\n"
end
def list_locations
print "\nRepo location(s):\n".underline
@config['location'].each do |loc|
print '-> ', loc.cyan, "\n"
end
end
def skip_dir(dirpath)
Dir.chdir(dirpath) do
repo_url = `git config remote.origin.url`.chomp
print "* Skipping #{dirpath}".yellow, " (#{repo_url})\n"
@skip_count += 1
end
end
def update_repo(dirname)
Dir.chdir(dirname) do
repo_url = `git config remote.origin.url`.chomp
print '* ', 'Checking ', dirname.green, " (#{repo_url})\n", ' -> '
system 'git pull'
@counter += 1
end
end
def gitdir?(dirpath)
gitpath = dirpath + '/.git'
File.exist?(gitpath) && File.directory?(gitpath)
end
def notdot?(dir)
(dir != '.' && dir != '..')
end
def show_time(duration)
time_taken = Time.at(duration).utc
time_taken.strftime('%-H hours, %-M Minutes and %-S seconds.').cyan
end
end
end
Remove 'counter' attr_reader.
No longer required since all logic moved in to the #start method.
Signed-off-by: Grant Ramsay <4ab1b2fdb7784a8f9b55e81e3261617f44fd0585@gmail.com>
require 'update_repo/version'
require 'yaml'
require 'colorize'
require 'confoog'
require 'trollop'
# Overall module with classes performing the functionality
# Contains Class UpdateRepo::WalkRepo
module UpdateRepo
# This constant holds the name to the config file, located in ~/
CONFIG_FILE = '.updatereporc'.freeze
# An encapsulated class to walk the repo directories and update all Git
# repositories found therein.
class WalkRepo
# Class constructor. No parameters required.
# @return [void]
def initialize
# @counter - this will be incremented with each repo updated.
@counter = 0
# @skip_counter - will count all repos deliberately skipped
@skip_count = 0
# @ start_time - will be used to get elapsed time
@start_time = 0
# @config - Class. Reads the configuration from a file in YAML format and
# allows easy access to the configuration data
@config = Confoog::Settings.new(filename: '.updatereporc',
prefix: 'update_repo',
autoload: true,
autosave: false)
exit 1 unless @config.status[:errors] == Status::INFO_FILE_LOADED
# store the command line variables in a configuration variable
@config['cmd'] = set_options
end
# This function will perform the required actions to traverse the Repo.
# @example
# walk_repo = UpdateRepo::WalkRepo.new
# walk_repo.start
def start
exceptions = @config['exceptions']
show_header(exceptions)
@config['location'].each do |loc|
recurse_dir(loc, exceptions)
end
# print out an informative footer...
footer
end
private
# rubocop:disable Metrics//MethodLength
def set_options
Trollop.options do
version "\nupdate_repo version #{VERSION} (C)2016 G. Ramsay\n"
banner <<-EOS
Keep multiple local Git-Cloned Repositories up to date with one command.
Usage:
update_repo [options]
Options are not required. If none are specified then the program will read from
the standard configuration file (~/.updatereporc) and automatically update the
specified Repositories.
Options:
EOS
# opt :color, 'Use colored output', default: true
# opt :quiet, 'Only minimal output to the terminal', default: false
# opt :silent, 'Completely silent, no output to terminal at all.',
# default: false
end
end
# rubocop:enable Metrics//MethodLength
# take each directory contained in the Repo directory, and if it is detected
# as a Git repository then update it.
# @param dirname [string] Contains the directory to search for Git repos.
# @param exceptions [array] Each repo matching one of these will be ignored.
def recurse_dir(dirname, exceptions)
Dir.foreach(dirname) do |dir|
dirpath = dirname + '/' + dir
next unless File.directory?(dirpath) && notdot?(dir)
if gitdir?(dirpath)
!exceptions.include?(dir) ? update_repo(dirpath) : skip_dir(dirpath)
else
recurse_dir(dirpath, exceptions)
end
end
end
# Display a simple header to the console
# @example
# show_header(exceptions)
# @return [void]
def show_header(exceptions)
# print an informative header before starting
print "\nGit Repo update utility (v", VERSION, ')',
" \u00A9 Grant Ramsay <seapagan@gmail.com>\n"
print "Using Configuration from '#{@config.config_path}'\n"
list_locations
if exceptions
print "\nExclusions:".underline, ' ',
exceptions.join(', ').yellow, "\n"
end
# save the start time for later display in the footer...
@start_time = Time.now
print "\n" # blank line before processing starts
end
# print out a brief footer. This will be expanded later.
# @return [void]
def footer
duration = Time.now - @start_time
print "\nUpdates completed : ", @counter.to_s.green,
' repositories processed'
print ' / ', @skip_count.to_s.yellow, ' skipped' unless @skip_count == 0
print ' in ', show_time(duration), "\n\n"
end
def list_locations
print "\nRepo location(s):\n".underline
@config['location'].each do |loc|
print '-> ', loc.cyan, "\n"
end
end
def skip_dir(dirpath)
Dir.chdir(dirpath) do
repo_url = `git config remote.origin.url`.chomp
print "* Skipping #{dirpath}".yellow, " (#{repo_url})\n"
@skip_count += 1
end
end
def update_repo(dirname)
Dir.chdir(dirname) do
repo_url = `git config remote.origin.url`.chomp
print '* ', 'Checking ', dirname.green, " (#{repo_url})\n", ' -> '
system 'git pull'
@counter += 1
end
end
def gitdir?(dirpath)
gitpath = dirpath + '/.git'
File.exist?(gitpath) && File.directory?(gitpath)
end
def notdot?(dir)
(dir != '.' && dir != '..')
end
def show_time(duration)
time_taken = Time.at(duration).utc
time_taken.strftime('%-H hours, %-M Minutes and %-S seconds.').cyan
end
end
end
|
module Vebra
class << self
# After converting an XML document to a Nokogiri XML object,
# this method generates an opinionated, well-formed Ruby hash
# tailored specifically for Vebra output
def parse(nokogiri_xml)
customise(parse_node(nokogiri_xml))
end
private
# Nokogiri XML object => Ruby hash
def parse_node(node)
# bypass the top-level (document) node
if node.respond_to?(:root)
node = node.root
end
# searching within a document returns a node set, in which
# case we need to retrieve the first element
if !node.respond_to?(:element?)
node = node[0]
end
if node.element?
# if an element, check for presence of (valid) attributes and/or children;
# otherwise, set value to nil
node_hash = {}
# if the node has attributes, extract them
unless node.attributes.empty?
attributes = node.attributes
node_hash[:attributes] = attributes.inject({}) do |result, (key, value)|
attribute = attributes[key]
if attribute.namespace.nil? # ignore namespace schemas
attr_key = (mappings[key] || key).to_sym
result[attr_key] = parse_value(attribute.value)
end
result
end
# if the attributes hash is still empty, remove it
node_hash.delete(:attributes) if node_hash[:attributes].empty?
end
# merge the attributes hash with the main object hash in some circumstances
if merge_attributes.include?(node.name.to_sym) && !node_hash[:attributes].nil? && node_hash[:attributes] != {}
node_hash = node_hash.delete(:attributes)
end
# iterate over the node's children, if there are any
node.children.each do |child_node|
child_result = parse_node(child_node)
# convert { :value => #<value> } to #<value>
if child_result.respond_to?(:keys) && child_result.size == 1 && child_result.keys.first == :value
child_result = child_result.values.first
end
# map codes to their string equivalent
if lookup = lookups[child_node.name]
child_result = send("#{lookup}_lookup", child_result)
end
# define or extend the attribute
unless child_node.name == "text" && child_result.nil?
attr_key = (mappings[child_node.name] || child_node.name).downcase.to_sym
attr_key = :value if attr_key == :text
if !node_hash[attr_key]
# if this attribute hasn't yet been set, set it's value
if child_result && collections.include?(attr_key)
# if this key is known to hold a collection, force it
first_value = child_result.values.first
node_hash[attr_key] = first_value.respond_to?(:<<) ? first_value : [ first_value ]
else
# set the value
node_hash[attr_key] = child_result
end
elsif child_result
# if this attribute already exists, create or extend a collection
if node_hash[attr_key].respond_to?(:<<) && node_hash[attr_key].respond_to?(:each)
# if the attribute's value is a collection already, add inject the new value
node_hash[attr_key] << child_result
else
# otherwise, build a new collection
node_hash[attr_key] = [ node_hash[attr_key], child_result ]
end
end
end
end
return node_hash.empty? ? nil : node_hash
else
# this is a text node; parse the value
parse_value(node.content.to_s)
end
end
# As all values are initially strings, we try to convert them to
# Ruby objects where possible
def parse_value(value)
if value.is_a?(String)
if value.to_i.to_s == value
value.to_i
elsif value.to_f.to_s == value
value.to_f
elsif value.gsub(/^\s+|\s+$/, '') == '' || value == '(Not Specified)'
nil
elsif /^\d{2}\/\d{2}\/\d{4}$/ =~ value
Date.parse(value)
elsif /^\d{2}\/\d{2}\/\d{4}\s\d{2}:\d{2}:\d{2}$/ =~ value
Time.parse(value)
else
value
end
else
value
end
end
# Vebra don't have consistent key names, so we map them where appropriate
def mappings
{
'propertyid' => 'property_id',
'prop_id' => 'vebra_ref',
'propid' => 'vebra_ref',
'firmid' => 'firm_id',
'branchid' => 'branch_id',
'lastchanged' => 'last_changed',
'solddate' => 'sold_on',
'leaseend' => 'lease_ends_on',
'soldprice' => 'sold_price',
'groundrent' => 'ground_rent',
'userfield1' => 'user_field_1',
'userfield2' => 'user_field_2' ,
'updated' => 'updated_at',
'FirmID' => 'firm_id',
'BranchID' => 'branch_id',
'web_status' => 'status',
'available' => 'available_on',
'uploaded' => 'uploaded_on',
'price' => 'price_attributes'
}
end
# These attributes should always form an array, even with only a single item
def collections
%w( paragraphs bullets files ).map(&:to_sym)
end
# These attributes do not require a separate "attributes" attribute
def merge_attributes
%w( price area paragraph bullet file ).map(&:to_sym)
end
# The values of these attributes are codes which are mapped via
# their corresponding lookup (see below)
def lookups
{
'web_status' => 'property_status',
'furnished' => 'furnished_status'
}
end
# Map the web_status code
def property_status_lookup(code)
case code.to_i
when 0 then [ 'For Sale', 'To Let' ]
when 1 then [ 'Under Offer', 'Let' ]
when 2 then [ 'Sold', 'Under Offer' ]
when 3 then [ 'SSTC', 'Reserved' ]
when 4 then [ 'For Sale By Auction', 'Let Agreed' ]
when 5 then [ 'Reserved' ]
when 6 then [ 'New Instruction' ]
when 7 then [ 'Just on Market' ]
when 8 then [ 'Price Reduction' ]
when 9 then [ 'Keen to Sell' ]
when 10 then [ 'No Chain' ]
when 11 then [ 'Vendor will pay stamp duty' ]
when 12 then [ 'Offers in the region of' ]
when 13 then [ 'Guide Price' ]
when 200 then [ 'For Sale', 'To Let' ]
when 201 then [ 'Under Offer', 'Let' ]
when 202 then [ 'Sold', 'Under Offer' ]
when 203 then [ 'SSTC', 'Reserved' ]
when 214 then [ 'Under Offer', 'Let' ]
when 255 then []
else nil
end
end
# Map the furnished code
def furnished_status_lookup(code)
case code.to_i
when 0 then 'Furnished'
when 1 then 'Part Furnished'
when 2 then 'Un-Furnished'
when 3 then 'Not Specified'
when 4 then 'Furnished / Un-Furnished'
else nil
end
end
# Map the let_type code
def let_type_lookup(code)
case code.to_i
when 0 then 'Not Specified'
when 1 then 'Long Term'
when 2 then 'Short Term'
when 3 then 'Student'
when 4 then 'Commercial'
else nil
end
end
# After parsing & converting the Nokogiri object into a Ruby hash,
# some additional changes are required to better structure the data
def customise(hash)
# was: { :attributes => { :id => #<value> } }
# now: { :attributes => { :vebra_id => #<value> } }
if hash[:attributes] && hash[:attributes][:id]
hash[:vebra_ref] = hash[:attributes].delete(:id)
end
# was: { :price_attributes => { :value => #<value>, ... } }
# now: { :price_attributes => { ... }, :price => #<value> }
if hash[:price_attributes]
hash[:price] = hash[:price_attributes].delete(:value)
end
# was: { :type => [#<value>, #<value>] } or: { :type => #<value> }
# now: { :property_type => #<value> }
if hash[:type]
hash[:property_type] = hash.delete(:type)
hash[:property_type] = hash[:property_type].first if hash[:property_type].respond_to?(:each)
end
# was: { :reference => { :agents => #<value> } }
# now: { :agent_reference => #<value> }
if hash[:reference] && hash[:reference].size == 1 && hash[:reference].keys.first == :agents
reference = hash.delete(:reference)
hash[:agent_reference] = reference.delete(:agents)
end
# was: { :area => [ #<area - imperial>, #<area - metric> ] }
# now: { :area => { :imperial => #<imperial>, :metric => #<metric> } }
if area = hash[:area]
hash[:area] = {}
area.each do |a|
hash[:area][a.delete(:measure).to_sym] = a
end
end
# was: { :bullets => [ { :value => #<value> }, { :value => #<value> } ] }
# now: { :bullets => [ #<value>, #<value> ] }
if hash[:bullets]
hash[:bullets].map! { |b| b[:value] }
end
# was: { :paragraphs => [ #<paragraph - type a, #<paragraph - type b> ] }
# now: { :type_a => [ #<paragraph> ], :type_b => [ #<paragraph> ] }
if paragraphs = hash.delete(:paragraphs)
# extract each paragraph type into separate collections
hash[:rooms] = paragraphs.select { |p| p[:type] == 0; }
hash[:energy_reports] = paragraphs.select { |p| p[:type] == 1; }
hash[:disclaimers] = paragraphs.select { |p| p[:type] == 2; }
%w( rooms energy_reports disclaimers ).map(&:to_sym).each do |paragraph_type|
hash[paragraph_type].each { |p| p[:vebra_ref] = p.delete(:id); p.delete(:type) }
end
hash[:rooms].each do |room|
room[:room_type] = room[:name].gsub(/\s?[\d+]$/, '').downcase.gsub(/\s/, '_')
end
end
# was: { :files => [ #<file - type a>, #<file - type b> ] }
# now: { :files => { :type_a => [ #<file> ], :type_b => [ #<file> ] } }
if files = hash.delete(:files)
# extract each file type into separate collections
hash[:files] = {
:images => files.select { |f| f[:type] == 0 },
:maps => files.select { |f| f[:type] == 1 },
:floorplans => files.select { |f| f[:type] == 2 },
:tours => files.select { |f| f[:type] == 3 },
:ehouses => files.select { |f| f[:type] == 4 },
:ipixes => files.select { |f| f[:type] == 5 },
:pdfs => files.select { |f| f[:type] == 7 },
:urls => files.select { |f| f[:type] == 8 },
:energy_certificates => files.select { |f| f[:type] == 9 },
:info_packs => files.select { |f| f[:type] == 10 }
}
%w( images maps floorplans tours ehouses ipixes pdfs urls energy_certificates info_packs ).map(&:to_sym).each do |file_type|
hash[:files][file_type].each { |f| f[:vebra_ref] = f.delete(:id); f.delete(:type) }
end
end
# was: { :hip => { :energy_performance => #<energy performance> } }
# now: { :energy_performance => #<energy performance> }
if hip = hash.delete(:hip)
hash[:energy_performance] = hip[:energy_performance]
end
# was: { :street => #<street>, :town => #<town>, ... }
# now: { :address => { :street => #<street>, :town => #<town>, ... } }
if !hash[:address] && hash[:street] && hash[:town] && hash[:county] && hash[:postcode]
hash[:address] = {
:street => hash.delete(:street),
:town => hash.delete(:town),
:county => hash.delete(:county),
:postcode => hash.delete(:postcode)
}
end
# was: { :attributes => { :database => 1 }, :web_status => ['For Sale', 'To Let'] }
# now: { :attributes => { :database => 1 }, :web_status => 'For Sale', :grouping => :sales }
if hash[:attributes] && hash[:attributes][:database]
hash[:group] = case hash[:attributes][:database]
when 1 then :sales
when 2 then :lettings
end
if hash[:status]
hash[:status] = hash[:status][hash[:attributes][:database]-1]
end
end
# was: { :garden => nil }
# now: { :garden => false }
hash[:garden] = !!hash[:garden] if hash.keys.include?(:garden)
# was: { :parking => nil }
# now: { :parking => false }
hash[:parking] = !!hash[:parking] if hash.keys.include?(:parking)
hash
end
end
end
Address issues pulling in the parking & garden attributes
module Vebra
class << self
# After converting an XML document to a Nokogiri XML object,
# this method generates an opinionated, well-formed Ruby hash
# tailored specifically for Vebra output
def parse(nokogiri_xml)
customise(parse_node(nokogiri_xml))
end
private
# Nokogiri XML object => Ruby hash
def parse_node(node)
# bypass the top-level (document) node
if node.respond_to?(:root)
node = node.root
end
# searching within a document returns a node set, in which
# case we need to retrieve the first element
if !node.respond_to?(:element?)
node = node[0]
end
if node.element?
# if an element, check for presence of (valid) attributes and/or children;
# otherwise, set value to nil
node_hash = {}
# if the node has attributes, extract them
unless node.attributes.empty?
attributes = node.attributes
node_hash[:attributes] = attributes.inject({}) do |result, (key, value)|
attribute = attributes[key]
if attribute.namespace.nil? # ignore namespace schemas
attr_key = (mappings[key] || key).to_sym
result[attr_key] = parse_value(attribute.value)
end
result
end
# if the attributes hash is still empty, remove it
node_hash.delete(:attributes) if node_hash[:attributes].empty?
end
# merge the attributes hash with the main object hash in some circumstances
if merge_attributes.include?(node.name.to_sym) && !node_hash[:attributes].nil? && node_hash[:attributes] != {}
node_hash = node_hash.delete(:attributes)
end
# iterate over the node's children, if there are any
node.children.each do |child_node|
child_result = parse_node(child_node)
# convert { :value => #<value> } to #<value>
if child_result.respond_to?(:keys) && child_result.size == 1 && child_result.keys.first == :value
child_result = child_result.values.first
end
# map codes to their string equivalent
if lookup = lookups[child_node.name]
child_result = send("#{lookup}_lookup", child_result)
end
# define or extend the attribute
unless child_node.name == "text" && child_result.nil?
attr_key = (mappings[child_node.name] || child_node.name).downcase.to_sym
attr_key = :value if attr_key == :text
if !node_hash[attr_key]
# if this attribute hasn't yet been set, set it's value
if child_result && collections.include?(attr_key)
# if this key is known to hold a collection, force it
first_value = child_result.values.first
node_hash[attr_key] = first_value.respond_to?(:<<) ? first_value : [ first_value ]
else
# set the value
node_hash[attr_key] = child_result
end
elsif child_result
# if this attribute already exists, create or extend a collection
if node_hash[attr_key].respond_to?(:<<) && node_hash[attr_key].respond_to?(:each)
# if the attribute's value is a collection already, add inject the new value
node_hash[attr_key] << child_result
else
# otherwise, build a new collection
node_hash[attr_key] = [ node_hash[attr_key], child_result ]
end
end
end
end
return node_hash.empty? ? nil : node_hash
else
# this is a text node; parse the value
parse_value(node.content.to_s)
end
end
# As all values are initially strings, we try to convert them to
# Ruby objects where possible
def parse_value(value)
if value.is_a?(String)
if value.to_i.to_s == value
value.to_i
elsif value.to_f.to_s == value
value.to_f
elsif value.gsub(/^\s+|\s+$/, '') == '' || value == '(Not Specified)'
nil
elsif /^\d{2}\/\d{2}\/\d{4}$/ =~ value
Date.parse(value)
elsif /^\d{2}\/\d{2}\/\d{4}\s\d{2}:\d{2}:\d{2}$/ =~ value
Time.parse(value)
else
value
end
else
value
end
end
# Vebra don't have consistent key names, so we map them where appropriate
def mappings
{
'propertyid' => 'property_id',
'prop_id' => 'vebra_ref',
'propid' => 'vebra_ref',
'firmid' => 'firm_id',
'branchid' => 'branch_id',
'lastchanged' => 'last_changed',
'solddate' => 'sold_on',
'leaseend' => 'lease_ends_on',
'soldprice' => 'sold_price',
'groundrent' => 'ground_rent',
'userfield1' => 'user_field_1',
'userfield2' => 'user_field_2' ,
'updated' => 'updated_at',
'FirmID' => 'firm_id',
'BranchID' => 'branch_id',
'web_status' => 'status',
'available' => 'available_on',
'uploaded' => 'uploaded_on',
'price' => 'price_attributes'
}
end
# These attributes should always form an array, even with only a single item
def collections
%w( paragraphs bullets files ).map(&:to_sym)
end
# These attributes do not require a separate "attributes" attribute
def merge_attributes
%w( price area paragraph bullet file ).map(&:to_sym)
end
# The values of these attributes are codes which are mapped via
# their corresponding lookup (see below)
def lookups
{
'web_status' => 'property_status',
'furnished' => 'furnished_status'
}
end
# Map the web_status code
def property_status_lookup(code)
case code.to_i
when 0 then [ 'For Sale', 'To Let' ]
when 1 then [ 'Under Offer', 'Let' ]
when 2 then [ 'Sold', 'Under Offer' ]
when 3 then [ 'SSTC', 'Reserved' ]
when 4 then [ 'For Sale By Auction', 'Let Agreed' ]
when 5 then [ 'Reserved' ]
when 6 then [ 'New Instruction' ]
when 7 then [ 'Just on Market' ]
when 8 then [ 'Price Reduction' ]
when 9 then [ 'Keen to Sell' ]
when 10 then [ 'No Chain' ]
when 11 then [ 'Vendor will pay stamp duty' ]
when 12 then [ 'Offers in the region of' ]
when 13 then [ 'Guide Price' ]
when 200 then [ 'For Sale', 'To Let' ]
when 201 then [ 'Under Offer', 'Let' ]
when 202 then [ 'Sold', 'Under Offer' ]
when 203 then [ 'SSTC', 'Reserved' ]
when 214 then [ 'Under Offer', 'Let' ]
when 255 then []
else nil
end
end
# Map the furnished code
def furnished_status_lookup(code)
case code.to_i
when 0 then 'Furnished'
when 1 then 'Part Furnished'
when 2 then 'Un-Furnished'
when 3 then 'Not Specified'
when 4 then 'Furnished / Un-Furnished'
else nil
end
end
# Map the let_type code
def let_type_lookup(code)
case code.to_i
when 0 then 'Not Specified'
when 1 then 'Long Term'
when 2 then 'Short Term'
when 3 then 'Student'
when 4 then 'Commercial'
else nil
end
end
# After parsing & converting the Nokogiri object into a Ruby hash,
# some additional changes are required to better structure the data
def customise(hash)
# was: { :attributes => { :id => #<value> } }
# now: { :attributes => { :vebra_id => #<value> } }
if hash[:attributes] && hash[:attributes][:id]
hash[:vebra_ref] = hash[:attributes].delete(:id)
end
# was: { :price_attributes => { :value => #<value>, ... } }
# now: { :price_attributes => { ... }, :price => #<value> }
if hash[:price_attributes]
hash[:price] = hash[:price_attributes].delete(:value)
end
# was: { :type => [#<value>, #<value>] } or: { :type => #<value> }
# now: { :property_type => #<value> }
if hash[:type]
hash[:property_type] = hash.delete(:type)
hash[:property_type] = hash[:property_type].first if hash[:property_type].respond_to?(:each)
end
# was: { :reference => { :agents => #<value> } }
# now: { :agent_reference => #<value> }
if hash[:reference] && hash[:reference].size == 1 && hash[:reference].keys.first == :agents
reference = hash.delete(:reference)
hash[:agent_reference] = reference.delete(:agents)
end
# was: { :area => [ #<area - imperial>, #<area - metric> ] }
# now: { :area => { :imperial => #<imperial>, :metric => #<metric> } }
if area = hash[:area]
hash[:area] = {}
area.each do |a|
hash[:area][a.delete(:measure).to_sym] = a
end
end
# was: { :bullets => [ { :value => #<value> }, { :value => #<value> } ] }
# now: { :bullets => [ #<value>, #<value> ] }
if hash[:bullets]
hash[:bullets].map! { |b| b[:value] }
end
# was: { :paragraphs => [ #<paragraph - type a, #<paragraph - type b> ] }
# now: { :type_a => [ #<paragraph> ], :type_b => [ #<paragraph> ] }
if paragraphs = hash.delete(:paragraphs)
# extract each paragraph type into separate collections
hash[:rooms] = paragraphs.select { |p| p[:type] == 0; }
hash[:energy_reports] = paragraphs.select { |p| p[:type] == 1; }
hash[:disclaimers] = paragraphs.select { |p| p[:type] == 2; }
%w( rooms energy_reports disclaimers ).map(&:to_sym).each do |paragraph_type|
hash[paragraph_type].each { |p| p[:vebra_ref] = p.delete(:id); p.delete(:type) }
end
hash[:rooms].each do |room|
room[:room_type] = room[:name].gsub(/\s?[\d+]$/, '').downcase.gsub(/\s/, '_')
end
end
# was: { :files => [ #<file - type a>, #<file - type b> ] }
# now: { :files => { :type_a => [ #<file> ], :type_b => [ #<file> ] } }
if files = hash.delete(:files)
# extract each file type into separate collections
hash[:files] = {
:images => files.select { |f| f[:type] == 0 },
:maps => files.select { |f| f[:type] == 1 },
:floorplans => files.select { |f| f[:type] == 2 },
:tours => files.select { |f| f[:type] == 3 },
:ehouses => files.select { |f| f[:type] == 4 },
:ipixes => files.select { |f| f[:type] == 5 },
:pdfs => files.select { |f| f[:type] == 7 },
:urls => files.select { |f| f[:type] == 8 },
:energy_certificates => files.select { |f| f[:type] == 9 },
:info_packs => files.select { |f| f[:type] == 10 }
}
%w( images maps floorplans tours ehouses ipixes pdfs urls energy_certificates info_packs ).map(&:to_sym).each do |file_type|
hash[:files][file_type].each { |f| f[:vebra_ref] = f.delete(:id); f.delete(:type) }
end
end
# was: { :hip => { :energy_performance => #<energy performance> } }
# now: { :energy_performance => #<energy performance> }
if hip = hash.delete(:hip)
hash[:energy_performance] = hip[:energy_performance]
end
# was: { :street => #<street>, :town => #<town>, ... }
# now: { :address => { :street => #<street>, :town => #<town>, ... } }
if !hash[:address] && hash[:street] && hash[:town] && hash[:county] && hash[:postcode]
hash[:address] = {
:street => hash.delete(:street),
:town => hash.delete(:town),
:county => hash.delete(:county),
:postcode => hash.delete(:postcode)
}
end
# was: { :attributes => { :database => 1 }, :web_status => ['For Sale', 'To Let'] }
# now: { :attributes => { :database => 1 }, :web_status => 'For Sale', :grouping => :sales }
if hash[:attributes] && hash[:attributes][:database]
hash[:group] = case hash[:attributes][:database]
when 1 then :sales
when 2 then :lettings
end
if hash[:status]
hash[:status] = hash[:status][hash[:attributes][:database]-1]
end
end
# was: { :garden/parking => nil } or: { :garden/parking => 0 }
# now: { :garden/parking => false }
[ :parking, :garden ].each do |key|
if hash.keys.include?(key)
hash[key] = hash[key].present? && hash[key].to_i != 0
end
end
hash
end
end
end |
require 'wab'
module WAB
module IO
class Call
attr_accessor :rid
attr_accessor :result
attr_accessor :thread
attr_accessor :handler # controller
attr_accessor :qrid
attr_accessor :giveup
def initialize(handler, qrid, timeout=2.0)
@rid = nil
@qrid = qrid
@result = nil
@giveup = Time.now + timeout
@handler = handler
if handler.nil?
@thread = Thread.current
else
@thread = nil
end
end
end # Call
end # IO
end # WAB
remove duplicate assignments in `IO::Call`
defining attr_accessors for an instance_variable already assigns the
variable an initial value of `nil`
require 'wab'
module WAB
module IO
class Call
attr_accessor :rid
attr_accessor :result
attr_accessor :thread
attr_accessor :handler # controller
attr_accessor :qrid
attr_accessor :giveup
def initialize(handler, qrid, timeout=2.0)
@qrid = qrid
@giveup = Time.now + timeout
@handler = handler
@thread = Thread.current if handler.nil?
end
end # Call
end # IO
end # WAB
|
require 'active_record'
module WepayRails
module Payments
require 'helpers/controller_helpers'
class Gateway
include HTTParty
attr_accessor :wepay_auth_code
def initialize(*args)
yml = Rails.root.join('config', 'wepay.yml').to_s
@config = YAML.load_file(yml)[Rails.env].symbolize_keys
@base_uri = Rails.env.production? ? "https://api.wepay.com" : "https://stage.wepay.com"
base_uri @base_uri
end
def wepay_auth_header
{'Authorization' => "Bearer: #{@wepay_auth_code}"}
end
def wepay_user
response = self.class.get("/v2/user", {:headers => wepay_auth_header})
puts response.inspect
JSON.parse(response)
end
end
include WepayRails::Helpers::ControllerHelpers
end
require 'helpers/model_helpers'
def self.included(base)
base.extend WepayRails::Helpers::ModelHelpers
end
end
ActiveRecord::Base.send(:include, WepayRails)
Building in the ability to collect User data through the wepay api
require 'active_record'
module WepayRails
module Payments
require 'helpers/controller_helpers'
class Gateway
include HTTParty
base_uri Rails.env.production? ? "https://api.wepay.com" : "https://stage.wepay.com"
attr_accessor :wepay_auth_code
def initialize(*args)
yml = Rails.root.join('config', 'wepay.yml').to_s
@config = YAML.load_file(yml)[Rails.env].symbolize_keys
end
def wepay_auth_header
{'Authorization' => "Bearer: #{@wepay_auth_code}"}
end
def wepay_user
response = self.class.get("/v2/user", {:headers => wepay_auth_header})
puts response.inspect
JSON.parse(response)
end
end
include WepayRails::Helpers::ControllerHelpers
end
require 'helpers/model_helpers'
def self.included(base)
base.extend WepayRails::Helpers::ModelHelpers
end
end
ActiveRecord::Base.send(:include, WepayRails) |
require "mechanize"
require "wtpkp/course"
module WTPKP
class Query
def initialize(origin:, destination:, time: Time.now)
@origin = origin
@destination = destination
@mechanize = Mechanize.new
@time = time or Time.now
end
def fetch
@mechanize.get(WTPKP::URL) do |page|
form = page.form_with(:name => 'formular') do |form|
form.field_with(id: "from").value = @origin
form.field_with(id: "to").value = @destination
form.field_with(id: "date").value = @time.strftime("%d.%m.%Y")
form.field_with(id: "time").value = @time.strftime("%H:%M")
end
departures_page = form.submit(form.button_with(name: 'start'))
departures = departures_page.search('.tpOverview > :nth-child(5), .selected > :nth-child(5)').map do |departure|
departure.children.map(&:to_s).select { |t| t =~ /\d\d:\d\d/ }
end.map do |record|
Course.new origin: @origin, destination: @destination, departure: record[0], arrival: record[1]
end
return departures.sort
end
end
end
end
Your commit is writing checks your merge can't cash.
require "mechanize"
require "wtpkp/course"
module WTPKP
class Query
def initialize(origin:, destination:, time: Time.now)
@origin = origin
@destination = destination
@mechanize = Mechanize.new
@time = time || Time.now
end
def fetch
@mechanize.get(WTPKP::URL) do |page|
form = page.form_with(:name => 'formular') do |form|
form.field_with(id: "from").value = @origin
form.field_with(id: "to").value = @destination
form.field_with(id: "date").value = @time.strftime("%d.%m.%Y")
form.field_with(id: "time").value = @time.strftime("%H:%M")
end
departures_page = form.submit(form.button_with(name: 'start'))
departures = departures_page.search('.tpOverview > :nth-child(5), .selected > :nth-child(5)').map do |departure|
departure.children.map(&:to_s).select { |t| t =~ /\d\d:\d\d/ }
end.map do |record|
Course.new origin: @origin, destination: @destination, departure: record[0], arrival: record[1]
end
return departures.sort
end
end
end
end
|
# encoding: utf-8
require "ostruct"
module Y2R
module AST
# Classes in this module represent YCP AST nodes. Their main taks is to
# compile themselves into Ruby AST nodes using the |compile| methods and its
# siblings (|compile_as_block|, etc.).
#
# The structure of the AST is heavily influenced by the structure of XML
# emitted by ycpc -x.
module YCP
# Compilation context passed to nodes' |compile| method. It mainly tracks
# the scope we're in and contains related helper methods.
class CompilerContext < OpenStruct
def in?(klass)
blocks.find { |b| b.is_a?(klass) } ? true : false
end
def innermost(*klasses)
blocks.reverse.find { |b| klasses.any? { |k| b.is_a?(k) } }
end
def inside(block)
context = dup
context.blocks = blocks + [block]
yield context
end
def module_name
blocks.first.name
end
def symbols
blocks.map { |b| b.symbols.map(&:name) }.flatten
end
def locals
index = blocks.index(&:creates_local_scope?) || blocks.length
blocks[index..-1].map { |b| b.symbols.map(&:name) }.flatten
end
def globals
index = blocks.index(&:creates_local_scope?) || blocks.length
blocks[0..index].map { |b| b.symbols.map(&:name) }.flatten
end
def symbol_for(name)
symbols = blocks.map { |b| b.symbols }.flatten
symbols.reverse.find { |s| s.name == name }
end
end
# Represents a YCP type.
class Type
attr_reader :type
def initialize(type)
@type = type
end
def ==(other)
other.instance_of?(Type) && other.type == @type
end
def to_s
@type
end
def reference?
@type =~ /&$/
end
def no_const
@type =~ /^const / ? Type.new(@type.sub(/^const /, "")) : self
end
def arg_types
types = []
type = ""
nesting_level = 0
in_parens = @type.sub(/^[^(]*\((.*)\)[^)]*$/, '\1')
in_parens.each_char do |ch|
case ch
when ","
if nesting_level == 0
types << type
type = ""
else
type += ch
end
when "(", "<"
nesting_level += 1
type += ch
when ")", ">"
nesting_level -= 1
type += ch
else
type += ch
end
end
types << type unless type.empty?
types.map { |t| Type.new(t.strip) }
end
BOOLEAN = Type.new("boolean")
INTEGER = Type.new("integer")
SYMBOL = Type.new("symbol")
end
# Contains utility functions related to Ruby variables.
module RubyVar
# Taken from Ruby's parse.y (for 1.9.3).
RUBY_KEYWORDS = [
"BEGIN",
"END",
"__ENCODING__",
"__FILE__",
"__LINE__",
"alias",
"and",
"begin",
"break",
"case",
"class",
"def",
"defined",
"do",
"else",
"elsif",
"end",
"ensure",
"false",
"for",
"if",
"in",
"module",
"next",
"nil",
"not",
"or",
"redo",
"rescue",
"retry",
"return",
"self",
"super",
"then",
"true",
"undef",
"unless",
"until",
"when",
"while",
"yield"
]
class << self
# Escapes a YCP variable name so that it is a valid Ruby local
# variable name.
#
# The escaping is constructed so that it can't create any collision
# between names. More precisely, for any distinct strings passed to
# this function the results will be also distinct.
def escape_local(name)
name.sub(/^(#{RUBY_KEYWORDS.join("|")}|[A-Z_].*)$/) { |s| "_#{s}" }
end
# Builds a Ruby AST node for a variable with given name in given
# context, doing all necessary escaping, de-aliasing, etc.
def for(ns, name, context, mode)
# In the XML, all global module variable references are qualified
# (e.g. "M::i"). This includes references to variables defined in
# this module. All other variable references are unqualified (e.g
# "i").
if ns
if ns == context.module_name
Ruby::Variable.new(:name => "@#{name}")
else
Ruby::MethodCall.new(
:receiver => Ruby::Variable.new(:name => ns),
:name => name,
:args => [],
:block => nil,
:parens => true
)
end
else
is_local = context.locals.include?(name)
variables = if is_local
context.locals
else
context.globals
end
# If there already is a variable with given name (coming from some
# parent scope), suffix the variable name with "2". If there are two
# such variables, suffix the name with "3". And so on.
#
# The loop is needed because we need to do the same check and maybe
# additional round(s) of suffixing also for suffixed variable names to
# prevent conflicts.
suffixed_name = name
begin
count = variables.select { |v| v == suffixed_name }.size
suffixed_name = suffixed_name + count.to_s if count > 1
end while count > 1
variable_name = if is_local
RubyVar.escape_local(suffixed_name)
else
"@#{suffixed_name}"
end
variable = Ruby::Variable.new(:name => variable_name)
case mode
when :in_code
symbol = context.symbol_for(name)
# The "symbol &&" part is needed only because of tests. The symbol
# should be always present in real-world situations.
if symbol && symbol.category == :reference
Ruby::MethodCall.new(
:receiver => variable,
:name => "value",
:args => [],
:block => nil,
:parens => true
)
else
variable
end
when :in_arg
variable
else
raise "Unknown mode: #{mode.inspect}."
end
end
end
end
end
class Node < OpenStruct
def creates_local_scope?
false
end
def compile_statements(statements, context)
if statements
statements.compile(context)
else
Ruby::Statements.new(:statements => [])
end
end
def compile_statements_inside_block(statements, context)
context.inside self do |inner_context|
compile_statements(statements, inner_context)
end
end
def header_comment(filename, comment)
lines = []
lines << "Translated by Y2R (https://github.com/yast/y2r)."
lines << ""
lines << "Original file: #{filename}"
if comment
lines << ""
lines << comment
end
lines.join("\n")
end
end
# Sorted alphabetically.
class Assign < Node
def compile(context)
Ruby::Assignment.new(
:lhs => RubyVar.for(ns, name, context, :in_code),
:rhs => child.compile(context)
)
end
end
class Bracket < Node
def compile(context)
Ruby::MethodCall.new(
:receiver => Ruby::Variable.new(:name => "Ops"),
:name => "assign",
:args => [
entry.compile(context),
arg.compile(context),
rhs.compile(context),
],
:block => nil,
:parens => true
)
end
end
class Break < Node
def compile(context)
case context.innermost(While, Do, Repeat, UnspecBlock)
when While, Do, Repeat
Ruby::Break.new
when UnspecBlock
Ruby::MethodCall.new(
:receiver => nil,
:name => "raise",
:args => [Ruby::Variable.new(:name => "Break")],
:block => nil,
:parens => false
)
else
raise "Misplaced \"break\" statement."
end
end
end
class Builtin < Node
def compile(context)
module_name = case ns
when "SCR"
"SCR"
when "WFM"
"WFM"
when "float"
"Builtins::Float"
when "list"
"Builtins::List"
when "multiset"
"Builtins::Multiset"
else
"Builtins"
end
Ruby::MethodCall.new(
:receiver => Ruby::Variable.new(:name => module_name),
:name => name,
:args => args.map { |a| a.compile(context) },
:block => block ? block.compile_as_block(context) : nil,
:parens => true
)
end
end
class Call < Node
def compile(context)
call = case category
when :function
if !ns && context.locals.include?(name)
Ruby::MethodCall.new(
:receiver => RubyVar.for(nil, name, context, :in_code),
:name => "call",
:args => args.map { |a| a.compile(context) },
:block => nil,
:parens => true
)
else
# In the XML, all module function calls are qualified (e.g.
# "M::i"). This includes call to functions defined in this
# module. The problem is that in generated Ruby code, the module
# namespace may not exist yet (e.g. when the function is called
# at module toplvel in YCP), so we have to omit it (which is OK,
# because then the call will be invoked on |self|, whish is
# always our module).
fixed_ns = ns == context.module_name ? nil : ns
receiver = if fixed_ns
Ruby::Variable.new(:name => fixed_ns)
else
nil
end
Ruby::MethodCall.new(
:receiver => receiver,
:name => name,
:args => args.map { |a| a.compile(context) },
:block => nil,
:parens => true
)
end
when :variable # function reference stored in variable
Ruby::MethodCall.new(
:receiver => RubyVar.for(ns, name, context, :in_code),
:name => "call",
:args => args.map { |a| a.compile(context) },
:block => nil,
:parens => true
)
else
raise "Unknown call category: #{category.inspect}."
end
reference_args_with_types = args.zip(type.arg_types).select do |arg, type|
type.reference?
end
if !reference_args_with_types.empty?
setters = reference_args_with_types.map do |arg, type|
arg.compile_as_setter(context)
end
getters = reference_args_with_types.map do |arg, type|
arg.compile_as_getter(context)
end
result_var = Ruby::Variable.new(
:name => RubyVar.escape_local("#{name}_result")
)
Ruby::Expressions.new(
:expressions => [
*setters,
Ruby::Assignment.new(:lhs => result_var, :rhs => call),
*getters,
result_var
]
)
else
call
end
end
end
class Case < Node
def compile(context)
if body.statements.last.is_a?(Break)
# The following dance is here because we want ot keep the AST nodes
# immutable and thus avoid modifying their data.
body_without_break = body.dup
body_without_break.statements = body.statements[0..-2]
elsif body.statements.last.is_a?(Return)
body_without_break = body
else
raise NotImplementedError,
"Case without a break or return encountered. These are not supported."
end
Ruby::When.new(
:values => values.map { |v| v.compile(context) },
:body => body_without_break.compile(context)
)
end
end
class Compare < Node
OPS_TO_METHODS = {
"==" => "equal",
"!=" => "not_equal",
"<" => "less_than",
">" => "greater_than",
"<=" => "less_or_equal",
">=" => "greater_or_equal"
}
def compile(context)
Ruby::MethodCall.new(
:receiver => Ruby::Variable.new(:name => "Ops"),
:name => OPS_TO_METHODS[op],
:args => [lhs.compile(context), rhs.compile(context)],
:block => nil,
:parens => true
)
end
end
class Const < Node
def compile(context)
case type
when :void
Ruby::Literal.new(:value => nil)
when :bool
case value
when "true"
Ruby::Literal.new(:value => true)
when "false"
Ruby::Literal.new(:value => false)
else
raise "Unknown boolean value: #{value.inspect}."
end
when :int
Ruby::Literal.new(:value => value.to_i)
when :float
Ruby::Literal.new(:value => value.sub(/\.$/, ".0").to_f)
when :symbol
Ruby::Literal.new(:value => value.to_sym)
when :string
Ruby::Literal.new(:value => value)
when :path
Ruby::MethodCall.new(
:receiver => nil,
:name => "path",
:args => [Ruby::Literal.new(:value => value)],
:block => nil,
:parens => true
)
else
raise "Unknown const type: #{type.inspect}."
end
end
end
class Continue < Node
def compile(context)
Ruby::Next.new
end
end
class Default < Node
def compile(context)
if body.statements.last.is_a?(Break)
# The following dance is here because we want ot keep the AST nodes
# immutable and thus avoid modifying their data.
body_without_break = body.dup
body_without_break.statements = body.statements[0..-2]
else
body_without_break = body
end
Ruby::Else.new(:body => body_without_break.compile(context))
end
end
class DefBlock < Node
def creates_local_scope?
true
end
def compile(context)
context.inside self do |inner_context|
Ruby::Statements.new(
:statements => statements.map { |s| s.compile(inner_context) }
)
end
end
end
class Do < Node
def symbols
[]
end
def compile(context)
Ruby::While.new(
:condition => self.while.compile(context),
:body => Ruby::Begin.new(
:statements => compile_statements_inside_block(self.do, context)
)
)
end
end
class Entry < Node
def compile(context)
RubyVar.for(ns, name, context, :in_code)
end
def compile_as_ref(context)
Ruby::Variable.new(:name => "#{name}_ref")
end
end
class FileBlock < Node
def name
nil
end
def compile(context)
class_statements = []
context.inside self do |inner_context|
class_statements += build_main_def(inner_context)
class_statements += build_other_defs(inner_context)
end
Ruby::Program.new(
:statements => Ruby::Statements.new(
:statements => [
Ruby::Module.new(
:name => "Yast",
:statements => Ruby::Class.new(
:name => class_name,
:superclass => Ruby::Variable.new(:name => "Client"),
:statements => Ruby::Statements.new(
:statements => class_statements
)
)
),
Ruby::MethodCall.new(
:receiver => Ruby::MethodCall.new(
:receiver => Ruby::ConstAccess.new(
:receiver => Ruby::Variable.new(:name => "Yast"),
:name => class_name
),
:name => "new",
:args => [],
:block => nil,
:parens => true
),
:name => "main",
:args => [],
:block => nil,
:parens => true
)
]
),
:comment => header_comment(filename, comment)
)
end
private
def class_name
client_name = File.basename(filename).sub(/\.[^.]*$/, "")
client_name.
gsub(/^./) { |s| s.upcase }.
gsub(/[_.-]./) { |s| s[1].upcase } + "Client"
end
def fundef_statements
statements.select { |s| s.is_a?(FunDef) }
end
def other_statements
statements - fundef_statements
end
def build_main_def(context)
if !other_statements.empty?
main_statements = other_statements.map { |s| s.compile(context) }
main_statements << Ruby::Literal.new(:value => nil)
[
Ruby::Def.new(
:name => "main",
:args => [],
:statements => Ruby::Statements.new(
:statements => main_statements
)
)
]
else
[]
end
end
def build_other_defs(context)
fundef_statements.map { |t| t.compile(context) }
end
end
class Filename < Node
def compile(context)
# Ignored because we don't care about filename information.
end
end
class FunDef < Node
def compile(context)
statements = block.compile(context)
context.inside block do |inner_context|
statements.statements = args.select(&:needs_copy?).map do |arg|
arg.compile_as_copy_arg_call(inner_context)
end + statements.statements
unless statements.statements.last.is_a? Ruby::Return
statements.statements << Ruby::Literal.new(:value => nil)
end
if !context.in?(DefBlock)
Ruby::Def.new(
:name => name,
:args => args.map { |a| a.compile(inner_context) },
:statements => statements
)
else
Ruby::Assignment.new(
:lhs => RubyVar.for(nil, name, context, :in_code),
:rhs => Ruby::MethodCall.new(
:receiver => nil,
:name => "lambda",
:args => [],
:block => Ruby::Block.new(
:args => args.map { |a| a.compile(inner_context) },
:statements => statements
),
:parens => true
)
)
end
end
end
end
class If < Node
def compile(context)
then_compiled = compile_statements(self.then, context)
else_compiled = if self.else
compile_statements(self.else, context)
else
nil
end
Ruby::If.new(
:condition => cond.compile(context),
:then => then_compiled,
:else => else_compiled
)
end
end
class Import < Node
def compile(context)
# Using any SCR or WFM function results in an auto-import. We ignore
# these auto-imports becasue neither SCR nor WFM are real modules.
return nil if name == "SCR" || name == "WFM"
Ruby::MethodCall.new(
:receiver => Ruby::Variable.new(:name => "Yast"),
:name => "import",
:args => [Ruby::Literal.new(:value => name)],
:block => nil,
:parens => true
)
end
end
class Include < Node
def compile(context)
if context.options[:dont_inline_include_files]
args = [
if context.options[:as_include_file]
Ruby::Variable.new(:name => "include_target")
else
Ruby::Self.new
end,
Ruby::Literal.new(:value => name.sub(/\.y(cp|h)$/, ".rb"))
]
Ruby::MethodCall.new(
:receiver => Ruby::Variable.new(:name => "Yast"),
:name => "include",
:args => args,
:block => nil,
:parens => true
)
else
nil # ycpc already included the file for us.
end
end
end
class IncludeBlock < Node
def name
nil
end
def compile(context)
class_statements = []
context.inside self do |inner_context|
class_statements += build_initialize_method_def(inner_context)
class_statements += build_other_defs(inner_context)
end
Ruby::Program.new(
:statements => Ruby::Statements.new(
:statements => [
Ruby::Module.new(
:name => "Yast",
:statements => Ruby::Module.new(
:name => module_name,
:statements => Ruby::Statements.new(
:statements => class_statements
)
)
)
]
),
:comment => header_comment(filename, comment)
)
end
private
def module_name
parts = path_parts.map do |part|
part.
gsub(/^./) { |s| s.upcase }.
gsub(/[_.-]./) { |s| s[1].upcase }
end
"#{parts.join("")}Include"
end
def initialize_method_name
parts = path_parts.map { |p| p.gsub(/[_.-]/, "_") }
"initialize_#{parts.join("_")}"
end
def path_parts
path = if filename =~ /\/src\/include\//
filename.sub(/^.*\/src\/include\//, "")
else
File.basename(filename)
end
path.sub(/\.y(cp|h)$/, "").split("/")
end
def fundef_statements
statements.select { |s| s.is_a?(FunDef) }
end
def other_statements
statements - fundef_statements
end
def build_initialize_method_def(context)
if !other_statements.empty?
initialize_method_statements = other_statements.map { |s| s.compile(context) }
[
Ruby::Def.new(
:name => initialize_method_name,
:args => [Ruby::Variable.new(:name => "include_target")],
:statements => Ruby::Statements.new(
:statements => initialize_method_statements
)
)
]
else
[]
end
end
def build_other_defs(context)
fundef_statements.map { |t| t.compile(context) }
end
end
class List < Node
def compile(context)
Ruby::Array.new(
:elements => children.map { |ch| ch.compile(context) }
)
end
end
class Locale < Node
def compile(context)
Ruby::MethodCall.new(
:receiver => nil,
:name => "_",
:args => [Ruby::Literal.new(:value => text)],
:block => nil,
:parens => true
)
end
end
class Map < Node
def compile(context)
Ruby::Hash.new(:entries => children.map { |ch| ch.compile(context) })
end
end
class MapElement < Node
def compile(context)
Ruby::HashEntry.new(
:key => key.compile(context),
:value => value.compile(context)
)
end
end
class ModuleBlock < Node
def compile(context)
if name !~ /^[A-Z][a-zA-Z0-9_]*$/
raise NotImplementedError,
"Invalid module name: #{name.inspect}. Module names that are not Ruby class names are not supported."
end
class_statements = []
context.inside self do |inner_context|
class_statements += build_main_def(inner_context)
class_statements += build_other_defs(inner_context)
class_statements += build_publish_calls(inner_context)
end
module_statements = [
Ruby::Class.new(
:name => "#{name}Class",
:superclass => Ruby::Variable.new(:name => "Module"),
:statements => Ruby::Statements.new(
:statements => class_statements
)
),
Ruby::Assignment.new(
:lhs => Ruby::Variable.new(:name => name),
:rhs => Ruby::MethodCall.new(
:receiver => Ruby::Variable.new(:name => "#{name}Class"),
:name => "new",
:args => [],
:block => nil,
:parens => true
)
)
]
if has_main_def?
module_statements << Ruby::MethodCall.new(
:receiver => Ruby::Variable.new(:name => name),
:name => "main",
:args => [],
:block => nil,
:parens => true
)
end
Ruby::Program.new(
:statements => Ruby::Statements.new(
:statements => [
Ruby::MethodCall.new(
:receiver => nil,
:name => "require",
:args => [Ruby::Literal.new(:value => "ycp")],
:block => nil,
:parens => false
),
Ruby::Module.new(
:name => "Yast",
:statements => Ruby::Statements.new(
:statements => module_statements
)
)
]
),
:comment => header_comment(filename, comment)
)
end
private
def fundef_statements
statements.select { |s| s.is_a?(FunDef) }
end
def other_statements
statements - fundef_statements
end
def constructor
fundef_statements.find { |s| s.name == name }
end
def has_main_def?
!other_statements.empty? || constructor
end
def build_main_def(context)
if has_main_def?
main_statements = other_statements.map { |s| s.compile(context) }
if constructor
main_statements << Ruby::MethodCall.new(
:receiver => nil,
:name => name,
:args => [],
:block => nil,
:parens => true
)
end
[
Ruby::Def.new(
:name => "main",
:args => [],
:statements => Ruby::Statements.new(
:statements => main_statements
)
)
]
else
[]
end
end
def build_other_defs(context)
fundef_statements.map { |t| t.compile(context) }
end
def build_publish_calls(context)
exported_symbols = if context.options[:export_private]
symbols
else
symbols.select(&:global)
end
exported_symbols.map { |s| s.compile_as_publish_call(context) }
end
end
class Repeat < Node
def symbols
[]
end
def compile(context)
Ruby::Until.new(
:condition => self.until.compile(context),
:body => Ruby::Begin.new(
:statements => compile_statements_inside_block(self.do, context)
)
)
end
end
class Return < Node
def compile(context)
case context.innermost(DefBlock, FileBlock, UnspecBlock)
when DefBlock, FileBlock
Ruby::Return.new(:value => child ? child.compile(context) : nil)
when UnspecBlock
Ruby::Next.new(:value => child ? child.compile(context) : nil)
else
raise "Misplaced \"return\" statement."
end
end
end
class StmtBlock < Node
def compile(context)
context.inside self do |inner_context|
Ruby::Statements.new(
:statements => statements.map { |s| s.compile(inner_context) }
)
end
end
end
class Switch < Node
def compile(context)
Ruby::Case.new(
:expression => cond.compile(context),
:whens => cases.map { |c| c.compile(context) },
:else => default ? default.compile(context) : nil
)
end
end
class Symbol < Node
def needs_copy?
immutable_types = [Type::BOOLEAN, Type::INTEGER, Type::SYMBOL]
!immutable_types.include?(type.no_const) && !type.reference?
end
def compile(context)
RubyVar.for(nil, name, context, :in_arg)
end
def compile_as_copy_arg_call(context)
Ruby::Assignment.new(
:lhs => RubyVar.for(nil, name, context, :in_code),
:rhs => Ruby::MethodCall.new(
:receiver => nil,
:name => "deep_copy",
:args => [RubyVar.for(nil, name, context, :in_code)],
:block => nil,
:parens => true
)
)
end
def compile_as_publish_call(context)
entries = [
Ruby::HashEntry.new(
:key => Ruby::Literal.new(:value => category),
:value => Ruby::Literal.new(:value => name.to_sym)
),
Ruby::HashEntry.new(
:key => Ruby::Literal.new(:value => :type),
:value => Ruby::Literal.new(:value => type.to_s)
)
]
unless global
entries << Ruby::HashEntry.new(
:key => Ruby::Literal.new(:value => :private),
:value => Ruby::Literal.new(:value => true)
)
end
Ruby::MethodCall.new(
:receiver => nil,
:name => "publish",
:args => [Ruby::Hash.new(:entries => entries)],
:block => nil,
:parens => true
)
end
end
class Textdomain < Node
def compile(context)
Ruby::MethodCall.new(
:receiver => nil,
:name => "textdomain",
:args => [Ruby::Literal.new(:value => name)],
:block => nil,
:parens => false
)
end
end
class Typedef < Node
def compile(context)
# Ignored because ycpc expands defined types in the XML, so we never
# actually encounter them.
end
end
class UnspecBlock < Node
def creates_local_scope?
true
end
def compile(context)
context.inside self do |inner_context|
Ruby::MethodCall.new(
:receiver => nil,
:name => "lambda",
:args => [],
:block => Ruby::Block.new(
:args => [],
:statements => Ruby::Statements.new(
:statements => statements.map { |s| s.compile(inner_context) }
)
),
:parens => true
)
end
end
def compile_as_block(context)
context.inside self do |inner_context|
Ruby::Block.new(
:args => args.map { |a| a.compile(inner_context) },
:statements => Ruby::Statements.new(
:statements => statements.map { |s| s.compile(inner_context) }
)
)
end
end
end
class Variable < Node
def compile(context)
case category
when :variable, :reference
RubyVar.for(ns, name, context, :in_code)
when :function
getter = if !ns && context.locals.include?(name)
RubyVar.for(nil, name, context, :in_code)
else
# In the XML, all global module function references are
# qualified (e.g. "M::i"). This includes references to functions
# defined in this module. The problem is that in generated Ruby
# code, the module namespace may not exist yet (e.g. when the
# function is refrenced at module toplvel in YCP), so we have to
# omit it (which is OK, because then the |method| call will be
# invoked on |self|, whish is always our module).
real_ns = ns == context.module_name ? nil : ns
Ruby::MethodCall.new(
:receiver => real_ns ? Ruby::Variable.new(:name => real_ns) : nil,
:name => "method",
:args => [
Ruby::Literal.new(:value => name.to_sym)
],
:block => nil,
:parens => true
)
end
Ruby::MethodCall.new(
:receiver => nil,
:name => "fun_ref",
:args => [getter, Ruby::Literal.new(:value => type.to_s)],
:block => nil,
:parens => true
)
else
raise "Unknown variable category: #{category.inspect}."
end
end
end
class While < Node
def symbols
[]
end
def compile(context)
Ruby::While.new(
:condition => cond.compile(context),
:body => compile_statements_inside_block(self.do, context)
)
end
end
class YCPCode < Node
def creates_local_scope?
true
end
def compile(context)
Ruby::MethodCall.new(
:receiver => nil,
:name => "lambda",
:args => [],
:block => Ruby::Block.new(
:args => [],
:statements => child.compile(context)
),
:parens => true
)
end
def compile_as_block(context)
context.inside self do |inner_context|
Ruby::Block.new(
:args => args.map { |a| a.compile(inner_context) },
:statements => child.compile(inner_context)
)
end
end
end
class YEBinary < Node
OPS_TO_OPS = {
"&&" => "&&",
"||" => "||"
}
OPS_TO_METHODS = {
"+" => "add",
"-" => "subtract",
"*" => "multiply",
"/" => "divide",
"%" => "modulo",
"&" => "bitwise_and",
"|" => "bitwise_or",
"^" => "bitwise_xor",
"<<" => "shift_left",
">>" => "shift_right"
}
def compile(context)
if OPS_TO_OPS[name]
Ruby::BinaryOperator.new(
:op => OPS_TO_OPS[name],
:lhs => lhs.compile(context),
:rhs => rhs.compile(context)
)
elsif OPS_TO_METHODS[name]
Ruby::MethodCall.new(
:receiver => Ruby::Variable.new(:name => "Ops"),
:name => OPS_TO_METHODS[name],
:args => [lhs.compile(context), rhs.compile(context)],
:block => nil,
:parens => true
)
else
raise "Unknown binary operator: #{name.inspect}."
end
end
end
class YEBracket < Node
def compile(context)
# In expressions like |m["foo"]:f()|, the |f| function is called only
# when the value is missing. In other words, the default is evaluated
# lazily. We need to emulate this laziness at least for the calls.
if default.is_a?(Call)
args = [value.compile(context), index.compile(context)]
block = Ruby::Block.new(
:args => [],
:statements => default.compile(context)
)
else
args = [
value.compile(context),
index.compile(context),
default.compile(context),
]
block = nil
end
Ruby::MethodCall.new(
:receiver => Ruby::Variable.new(:name => "Ops"),
:name => "index",
:args => args,
:block => block,
:parens => true
)
end
end
class YEIs < Node
def compile(context)
Ruby::MethodCall.new(
:receiver => Ruby::Variable.new(:name => "Ops"),
:name => "is",
:args => [
child.compile(context),
Ruby::Literal.new(:value => type.to_s)
],
:block => nil,
:parens => true
)
end
end
class YEPropagate < Node
# Is identical to list of shortcuts in ruby-bindings ycp/convert.rb
TYPES_WITH_SHORTCUT_CONVERSION = [
"boolean",
"float",
"integer",
"list",
"locale",
"map",
"path",
"string",
"symbol",
"term",
]
def compile(context)
if from.no_const != to.no_const
if TYPES_WITH_SHORTCUT_CONVERSION.include?(to.to_s) && from.to_s == "any"
Ruby::MethodCall.new(
:receiver => Ruby::Variable.new(:name => "Convert"),
:name => "to_#{to}",
:args => [child.compile(context)],
:block => nil,
:parens => true
)
else
Ruby::MethodCall.new(
:receiver => Ruby::Variable.new(:name => "Convert"),
:name => "convert",
:args => [
child.compile(context),
Ruby::Hash.new(
:entries => [
Ruby::HashEntry.new(
:key => Ruby::Literal.new(:value => :from),
:value => Ruby::Literal.new(:value => from.no_const.to_s)
),
Ruby::HashEntry.new(
:key => Ruby::Literal.new(:value => :to),
:value => Ruby::Literal.new(:value => to.no_const.to_s)
)
]
)
],
:block => nil,
:parens => true
)
end
else
child.compile(context)
end
end
end
class YEReference < Node
def compile(context)
child.compile_as_ref(context)
end
def compile_as_setter(context)
Ruby::Assignment.new(
:lhs => compile(context),
:rhs => Ruby::MethodCall.new(
:receiver => nil,
:name => "arg_ref",
:args => [child.compile(context)],
:block => nil,
:parens => true
)
)
end
def compile_as_getter(context)
Ruby::Assignment.new(
:lhs => child.compile(context),
:rhs => Ruby::MethodCall.new(
:receiver => compile(context),
:name => "value",
:args => [],
:block => nil,
:parens => true
)
)
end
end
class YEReturn < Node
def creates_local_scope?
true
end
def compile(context)
Ruby::MethodCall.new(
:receiver => nil,
:name => "lambda",
:args => [],
:block => Ruby::Block.new(
:args => [],
:statements => child.compile(context)
),
:parens => true
)
end
def compile_as_block(context)
context.inside self do |inner_context|
Ruby::Block.new(
:args => args.map { |a| a.compile(inner_context) },
:statements => child.compile(inner_context)
)
end
end
end
class YETerm < Node
UI_TERMS = [
:BarGraph,
:Bottom,
:CheckBox,
:ColoredLabel,
:ComboBox,
:Date,
:DownloadProgress,
:DumbTab,
:DummySpecialWidget,
:Empty,
:Frame,
:HBox,
:HBoxvHCenter,
:HMultiProgressMeter,
:HSpacing,
:HSquash,
:HStretch,
:HVCenter,
:HVSquash,
:HVStretch,
:HWeight,
:Heading,
:IconButton,
:Image,
:IntField,
:Label,
:Left,
:LogView,
:MarginBox,
:MenuButton,
:MinHeight,
:MinSize,
:MinWidth,
:MultiLineEdit,
:MultiSelectionBox,
:PackageSelector,
:PatternSelector,
:PartitionSplitter,
:Password,
:PkgSpecial,
:ProgressBar,
:PushButton,
:RadioButton,
:RadioButtonGroup,
:ReplacePoint,
:RichText,
:Right,
:SelectionBox,
:Slider,
:Table,
:TextEntry,
:Time,
:Top,
:Tree,
:VBox,
:VCenter,
:VMultiProgressMeter,
:VSpacing,
:VSquash,
:VStretch,
:VWeight,
:Wizard
]
def compile(context)
children_compiled = children.map { |ch| ch.compile(context) }
if UI_TERMS.include?(name.to_sym) && !context.symbols.include?(name)
Ruby::MethodCall.new(
:receiver => nil,
:name => name,
:args => children_compiled,
:block => nil,
:parens => true
)
else
name_compiled = Ruby::Literal.new(:value => name.to_sym)
Ruby::MethodCall.new(
:receiver => nil,
:name => "term",
:args => [name_compiled] + children_compiled,
:block => nil,
:parens => true
)
end
end
end
class YETriple < Node
def compile(context)
Ruby::TernaryOperator.new(
:condition => cond.compile(context),
:then => self.true.compile(context),
:else => self.false.compile(context)
)
end
end
class YEUnary < Node
OPS_TO_OPS = {
"!" => "!"
}
OPS_TO_METHODS = {
"-" => "unary_minus",
"~" => "bitwise_not",
}
def compile(context)
if OPS_TO_OPS[name]
Ruby::UnaryOperator.new(
:op => OPS_TO_OPS[name],
:expression => child.compile(context)
)
elsif OPS_TO_METHODS[name]
Ruby::MethodCall.new(
:receiver => Ruby::Variable.new(:name => "Ops"),
:name => OPS_TO_METHODS[name],
:args => [child.compile(context)],
:block => nil,
:parens => true
)
else
raise "Unknown unary operator: #{name.inspect}."
end
end
end
end
end
end
Do not require "/" before "src/include" for include files
The initial "/" may not be there when --report-file is used (the passed
path will be YaST module-relative in that case).
Part of a fix for #23.
# encoding: utf-8
require "ostruct"
module Y2R
module AST
# Classes in this module represent YCP AST nodes. Their main taks is to
# compile themselves into Ruby AST nodes using the |compile| methods and its
# siblings (|compile_as_block|, etc.).
#
# The structure of the AST is heavily influenced by the structure of XML
# emitted by ycpc -x.
module YCP
# Compilation context passed to nodes' |compile| method. It mainly tracks
# the scope we're in and contains related helper methods.
class CompilerContext < OpenStruct
def in?(klass)
blocks.find { |b| b.is_a?(klass) } ? true : false
end
def innermost(*klasses)
blocks.reverse.find { |b| klasses.any? { |k| b.is_a?(k) } }
end
def inside(block)
context = dup
context.blocks = blocks + [block]
yield context
end
def module_name
blocks.first.name
end
def symbols
blocks.map { |b| b.symbols.map(&:name) }.flatten
end
def locals
index = blocks.index(&:creates_local_scope?) || blocks.length
blocks[index..-1].map { |b| b.symbols.map(&:name) }.flatten
end
def globals
index = blocks.index(&:creates_local_scope?) || blocks.length
blocks[0..index].map { |b| b.symbols.map(&:name) }.flatten
end
def symbol_for(name)
symbols = blocks.map { |b| b.symbols }.flatten
symbols.reverse.find { |s| s.name == name }
end
end
# Represents a YCP type.
class Type
attr_reader :type
def initialize(type)
@type = type
end
def ==(other)
other.instance_of?(Type) && other.type == @type
end
def to_s
@type
end
def reference?
@type =~ /&$/
end
def no_const
@type =~ /^const / ? Type.new(@type.sub(/^const /, "")) : self
end
def arg_types
types = []
type = ""
nesting_level = 0
in_parens = @type.sub(/^[^(]*\((.*)\)[^)]*$/, '\1')
in_parens.each_char do |ch|
case ch
when ","
if nesting_level == 0
types << type
type = ""
else
type += ch
end
when "(", "<"
nesting_level += 1
type += ch
when ")", ">"
nesting_level -= 1
type += ch
else
type += ch
end
end
types << type unless type.empty?
types.map { |t| Type.new(t.strip) }
end
BOOLEAN = Type.new("boolean")
INTEGER = Type.new("integer")
SYMBOL = Type.new("symbol")
end
# Contains utility functions related to Ruby variables.
module RubyVar
# Taken from Ruby's parse.y (for 1.9.3).
RUBY_KEYWORDS = [
"BEGIN",
"END",
"__ENCODING__",
"__FILE__",
"__LINE__",
"alias",
"and",
"begin",
"break",
"case",
"class",
"def",
"defined",
"do",
"else",
"elsif",
"end",
"ensure",
"false",
"for",
"if",
"in",
"module",
"next",
"nil",
"not",
"or",
"redo",
"rescue",
"retry",
"return",
"self",
"super",
"then",
"true",
"undef",
"unless",
"until",
"when",
"while",
"yield"
]
class << self
# Escapes a YCP variable name so that it is a valid Ruby local
# variable name.
#
# The escaping is constructed so that it can't create any collision
# between names. More precisely, for any distinct strings passed to
# this function the results will be also distinct.
def escape_local(name)
name.sub(/^(#{RUBY_KEYWORDS.join("|")}|[A-Z_].*)$/) { |s| "_#{s}" }
end
# Builds a Ruby AST node for a variable with given name in given
# context, doing all necessary escaping, de-aliasing, etc.
def for(ns, name, context, mode)
# In the XML, all global module variable references are qualified
# (e.g. "M::i"). This includes references to variables defined in
# this module. All other variable references are unqualified (e.g
# "i").
if ns
if ns == context.module_name
Ruby::Variable.new(:name => "@#{name}")
else
Ruby::MethodCall.new(
:receiver => Ruby::Variable.new(:name => ns),
:name => name,
:args => [],
:block => nil,
:parens => true
)
end
else
is_local = context.locals.include?(name)
variables = if is_local
context.locals
else
context.globals
end
# If there already is a variable with given name (coming from some
# parent scope), suffix the variable name with "2". If there are two
# such variables, suffix the name with "3". And so on.
#
# The loop is needed because we need to do the same check and maybe
# additional round(s) of suffixing also for suffixed variable names to
# prevent conflicts.
suffixed_name = name
begin
count = variables.select { |v| v == suffixed_name }.size
suffixed_name = suffixed_name + count.to_s if count > 1
end while count > 1
variable_name = if is_local
RubyVar.escape_local(suffixed_name)
else
"@#{suffixed_name}"
end
variable = Ruby::Variable.new(:name => variable_name)
case mode
when :in_code
symbol = context.symbol_for(name)
# The "symbol &&" part is needed only because of tests. The symbol
# should be always present in real-world situations.
if symbol && symbol.category == :reference
Ruby::MethodCall.new(
:receiver => variable,
:name => "value",
:args => [],
:block => nil,
:parens => true
)
else
variable
end
when :in_arg
variable
else
raise "Unknown mode: #{mode.inspect}."
end
end
end
end
end
class Node < OpenStruct
def creates_local_scope?
false
end
def compile_statements(statements, context)
if statements
statements.compile(context)
else
Ruby::Statements.new(:statements => [])
end
end
def compile_statements_inside_block(statements, context)
context.inside self do |inner_context|
compile_statements(statements, inner_context)
end
end
def header_comment(filename, comment)
lines = []
lines << "Translated by Y2R (https://github.com/yast/y2r)."
lines << ""
lines << "Original file: #{filename}"
if comment
lines << ""
lines << comment
end
lines.join("\n")
end
end
# Sorted alphabetically.
class Assign < Node
def compile(context)
Ruby::Assignment.new(
:lhs => RubyVar.for(ns, name, context, :in_code),
:rhs => child.compile(context)
)
end
end
class Bracket < Node
def compile(context)
Ruby::MethodCall.new(
:receiver => Ruby::Variable.new(:name => "Ops"),
:name => "assign",
:args => [
entry.compile(context),
arg.compile(context),
rhs.compile(context),
],
:block => nil,
:parens => true
)
end
end
class Break < Node
def compile(context)
case context.innermost(While, Do, Repeat, UnspecBlock)
when While, Do, Repeat
Ruby::Break.new
when UnspecBlock
Ruby::MethodCall.new(
:receiver => nil,
:name => "raise",
:args => [Ruby::Variable.new(:name => "Break")],
:block => nil,
:parens => false
)
else
raise "Misplaced \"break\" statement."
end
end
end
class Builtin < Node
def compile(context)
module_name = case ns
when "SCR"
"SCR"
when "WFM"
"WFM"
when "float"
"Builtins::Float"
when "list"
"Builtins::List"
when "multiset"
"Builtins::Multiset"
else
"Builtins"
end
Ruby::MethodCall.new(
:receiver => Ruby::Variable.new(:name => module_name),
:name => name,
:args => args.map { |a| a.compile(context) },
:block => block ? block.compile_as_block(context) : nil,
:parens => true
)
end
end
class Call < Node
def compile(context)
call = case category
when :function
if !ns && context.locals.include?(name)
Ruby::MethodCall.new(
:receiver => RubyVar.for(nil, name, context, :in_code),
:name => "call",
:args => args.map { |a| a.compile(context) },
:block => nil,
:parens => true
)
else
# In the XML, all module function calls are qualified (e.g.
# "M::i"). This includes call to functions defined in this
# module. The problem is that in generated Ruby code, the module
# namespace may not exist yet (e.g. when the function is called
# at module toplvel in YCP), so we have to omit it (which is OK,
# because then the call will be invoked on |self|, whish is
# always our module).
fixed_ns = ns == context.module_name ? nil : ns
receiver = if fixed_ns
Ruby::Variable.new(:name => fixed_ns)
else
nil
end
Ruby::MethodCall.new(
:receiver => receiver,
:name => name,
:args => args.map { |a| a.compile(context) },
:block => nil,
:parens => true
)
end
when :variable # function reference stored in variable
Ruby::MethodCall.new(
:receiver => RubyVar.for(ns, name, context, :in_code),
:name => "call",
:args => args.map { |a| a.compile(context) },
:block => nil,
:parens => true
)
else
raise "Unknown call category: #{category.inspect}."
end
reference_args_with_types = args.zip(type.arg_types).select do |arg, type|
type.reference?
end
if !reference_args_with_types.empty?
setters = reference_args_with_types.map do |arg, type|
arg.compile_as_setter(context)
end
getters = reference_args_with_types.map do |arg, type|
arg.compile_as_getter(context)
end
result_var = Ruby::Variable.new(
:name => RubyVar.escape_local("#{name}_result")
)
Ruby::Expressions.new(
:expressions => [
*setters,
Ruby::Assignment.new(:lhs => result_var, :rhs => call),
*getters,
result_var
]
)
else
call
end
end
end
class Case < Node
def compile(context)
if body.statements.last.is_a?(Break)
# The following dance is here because we want ot keep the AST nodes
# immutable and thus avoid modifying their data.
body_without_break = body.dup
body_without_break.statements = body.statements[0..-2]
elsif body.statements.last.is_a?(Return)
body_without_break = body
else
raise NotImplementedError,
"Case without a break or return encountered. These are not supported."
end
Ruby::When.new(
:values => values.map { |v| v.compile(context) },
:body => body_without_break.compile(context)
)
end
end
class Compare < Node
OPS_TO_METHODS = {
"==" => "equal",
"!=" => "not_equal",
"<" => "less_than",
">" => "greater_than",
"<=" => "less_or_equal",
">=" => "greater_or_equal"
}
def compile(context)
Ruby::MethodCall.new(
:receiver => Ruby::Variable.new(:name => "Ops"),
:name => OPS_TO_METHODS[op],
:args => [lhs.compile(context), rhs.compile(context)],
:block => nil,
:parens => true
)
end
end
class Const < Node
def compile(context)
case type
when :void
Ruby::Literal.new(:value => nil)
when :bool
case value
when "true"
Ruby::Literal.new(:value => true)
when "false"
Ruby::Literal.new(:value => false)
else
raise "Unknown boolean value: #{value.inspect}."
end
when :int
Ruby::Literal.new(:value => value.to_i)
when :float
Ruby::Literal.new(:value => value.sub(/\.$/, ".0").to_f)
when :symbol
Ruby::Literal.new(:value => value.to_sym)
when :string
Ruby::Literal.new(:value => value)
when :path
Ruby::MethodCall.new(
:receiver => nil,
:name => "path",
:args => [Ruby::Literal.new(:value => value)],
:block => nil,
:parens => true
)
else
raise "Unknown const type: #{type.inspect}."
end
end
end
class Continue < Node
def compile(context)
Ruby::Next.new
end
end
class Default < Node
def compile(context)
if body.statements.last.is_a?(Break)
# The following dance is here because we want ot keep the AST nodes
# immutable and thus avoid modifying their data.
body_without_break = body.dup
body_without_break.statements = body.statements[0..-2]
else
body_without_break = body
end
Ruby::Else.new(:body => body_without_break.compile(context))
end
end
class DefBlock < Node
def creates_local_scope?
true
end
def compile(context)
context.inside self do |inner_context|
Ruby::Statements.new(
:statements => statements.map { |s| s.compile(inner_context) }
)
end
end
end
class Do < Node
def symbols
[]
end
def compile(context)
Ruby::While.new(
:condition => self.while.compile(context),
:body => Ruby::Begin.new(
:statements => compile_statements_inside_block(self.do, context)
)
)
end
end
class Entry < Node
def compile(context)
RubyVar.for(ns, name, context, :in_code)
end
def compile_as_ref(context)
Ruby::Variable.new(:name => "#{name}_ref")
end
end
class FileBlock < Node
def name
nil
end
def compile(context)
class_statements = []
context.inside self do |inner_context|
class_statements += build_main_def(inner_context)
class_statements += build_other_defs(inner_context)
end
Ruby::Program.new(
:statements => Ruby::Statements.new(
:statements => [
Ruby::Module.new(
:name => "Yast",
:statements => Ruby::Class.new(
:name => class_name,
:superclass => Ruby::Variable.new(:name => "Client"),
:statements => Ruby::Statements.new(
:statements => class_statements
)
)
),
Ruby::MethodCall.new(
:receiver => Ruby::MethodCall.new(
:receiver => Ruby::ConstAccess.new(
:receiver => Ruby::Variable.new(:name => "Yast"),
:name => class_name
),
:name => "new",
:args => [],
:block => nil,
:parens => true
),
:name => "main",
:args => [],
:block => nil,
:parens => true
)
]
),
:comment => header_comment(filename, comment)
)
end
private
def class_name
client_name = File.basename(filename).sub(/\.[^.]*$/, "")
client_name.
gsub(/^./) { |s| s.upcase }.
gsub(/[_.-]./) { |s| s[1].upcase } + "Client"
end
def fundef_statements
statements.select { |s| s.is_a?(FunDef) }
end
def other_statements
statements - fundef_statements
end
def build_main_def(context)
if !other_statements.empty?
main_statements = other_statements.map { |s| s.compile(context) }
main_statements << Ruby::Literal.new(:value => nil)
[
Ruby::Def.new(
:name => "main",
:args => [],
:statements => Ruby::Statements.new(
:statements => main_statements
)
)
]
else
[]
end
end
def build_other_defs(context)
fundef_statements.map { |t| t.compile(context) }
end
end
class Filename < Node
def compile(context)
# Ignored because we don't care about filename information.
end
end
class FunDef < Node
def compile(context)
statements = block.compile(context)
context.inside block do |inner_context|
statements.statements = args.select(&:needs_copy?).map do |arg|
arg.compile_as_copy_arg_call(inner_context)
end + statements.statements
unless statements.statements.last.is_a? Ruby::Return
statements.statements << Ruby::Literal.new(:value => nil)
end
if !context.in?(DefBlock)
Ruby::Def.new(
:name => name,
:args => args.map { |a| a.compile(inner_context) },
:statements => statements
)
else
Ruby::Assignment.new(
:lhs => RubyVar.for(nil, name, context, :in_code),
:rhs => Ruby::MethodCall.new(
:receiver => nil,
:name => "lambda",
:args => [],
:block => Ruby::Block.new(
:args => args.map { |a| a.compile(inner_context) },
:statements => statements
),
:parens => true
)
)
end
end
end
end
class If < Node
def compile(context)
then_compiled = compile_statements(self.then, context)
else_compiled = if self.else
compile_statements(self.else, context)
else
nil
end
Ruby::If.new(
:condition => cond.compile(context),
:then => then_compiled,
:else => else_compiled
)
end
end
class Import < Node
def compile(context)
# Using any SCR or WFM function results in an auto-import. We ignore
# these auto-imports becasue neither SCR nor WFM are real modules.
return nil if name == "SCR" || name == "WFM"
Ruby::MethodCall.new(
:receiver => Ruby::Variable.new(:name => "Yast"),
:name => "import",
:args => [Ruby::Literal.new(:value => name)],
:block => nil,
:parens => true
)
end
end
class Include < Node
def compile(context)
if context.options[:dont_inline_include_files]
args = [
if context.options[:as_include_file]
Ruby::Variable.new(:name => "include_target")
else
Ruby::Self.new
end,
Ruby::Literal.new(:value => name.sub(/\.y(cp|h)$/, ".rb"))
]
Ruby::MethodCall.new(
:receiver => Ruby::Variable.new(:name => "Yast"),
:name => "include",
:args => args,
:block => nil,
:parens => true
)
else
nil # ycpc already included the file for us.
end
end
end
class IncludeBlock < Node
def name
nil
end
def compile(context)
class_statements = []
context.inside self do |inner_context|
class_statements += build_initialize_method_def(inner_context)
class_statements += build_other_defs(inner_context)
end
Ruby::Program.new(
:statements => Ruby::Statements.new(
:statements => [
Ruby::Module.new(
:name => "Yast",
:statements => Ruby::Module.new(
:name => module_name,
:statements => Ruby::Statements.new(
:statements => class_statements
)
)
)
]
),
:comment => header_comment(filename, comment)
)
end
private
def module_name
parts = path_parts.map do |part|
part.
gsub(/^./) { |s| s.upcase }.
gsub(/[_.-]./) { |s| s[1].upcase }
end
"#{parts.join("")}Include"
end
def initialize_method_name
parts = path_parts.map { |p| p.gsub(/[_.-]/, "_") }
"initialize_#{parts.join("_")}"
end
def path_parts
path = if filename =~ /src\/include\//
filename.sub(/^.*src\/include\//, "")
else
File.basename(filename)
end
path.sub(/\.y(cp|h)$/, "").split("/")
end
def fundef_statements
statements.select { |s| s.is_a?(FunDef) }
end
def other_statements
statements - fundef_statements
end
def build_initialize_method_def(context)
if !other_statements.empty?
initialize_method_statements = other_statements.map { |s| s.compile(context) }
[
Ruby::Def.new(
:name => initialize_method_name,
:args => [Ruby::Variable.new(:name => "include_target")],
:statements => Ruby::Statements.new(
:statements => initialize_method_statements
)
)
]
else
[]
end
end
def build_other_defs(context)
fundef_statements.map { |t| t.compile(context) }
end
end
class List < Node
def compile(context)
Ruby::Array.new(
:elements => children.map { |ch| ch.compile(context) }
)
end
end
class Locale < Node
def compile(context)
Ruby::MethodCall.new(
:receiver => nil,
:name => "_",
:args => [Ruby::Literal.new(:value => text)],
:block => nil,
:parens => true
)
end
end
class Map < Node
def compile(context)
Ruby::Hash.new(:entries => children.map { |ch| ch.compile(context) })
end
end
class MapElement < Node
def compile(context)
Ruby::HashEntry.new(
:key => key.compile(context),
:value => value.compile(context)
)
end
end
class ModuleBlock < Node
def compile(context)
if name !~ /^[A-Z][a-zA-Z0-9_]*$/
raise NotImplementedError,
"Invalid module name: #{name.inspect}. Module names that are not Ruby class names are not supported."
end
class_statements = []
context.inside self do |inner_context|
class_statements += build_main_def(inner_context)
class_statements += build_other_defs(inner_context)
class_statements += build_publish_calls(inner_context)
end
module_statements = [
Ruby::Class.new(
:name => "#{name}Class",
:superclass => Ruby::Variable.new(:name => "Module"),
:statements => Ruby::Statements.new(
:statements => class_statements
)
),
Ruby::Assignment.new(
:lhs => Ruby::Variable.new(:name => name),
:rhs => Ruby::MethodCall.new(
:receiver => Ruby::Variable.new(:name => "#{name}Class"),
:name => "new",
:args => [],
:block => nil,
:parens => true
)
)
]
if has_main_def?
module_statements << Ruby::MethodCall.new(
:receiver => Ruby::Variable.new(:name => name),
:name => "main",
:args => [],
:block => nil,
:parens => true
)
end
Ruby::Program.new(
:statements => Ruby::Statements.new(
:statements => [
Ruby::MethodCall.new(
:receiver => nil,
:name => "require",
:args => [Ruby::Literal.new(:value => "ycp")],
:block => nil,
:parens => false
),
Ruby::Module.new(
:name => "Yast",
:statements => Ruby::Statements.new(
:statements => module_statements
)
)
]
),
:comment => header_comment(filename, comment)
)
end
private
def fundef_statements
statements.select { |s| s.is_a?(FunDef) }
end
def other_statements
statements - fundef_statements
end
def constructor
fundef_statements.find { |s| s.name == name }
end
def has_main_def?
!other_statements.empty? || constructor
end
def build_main_def(context)
if has_main_def?
main_statements = other_statements.map { |s| s.compile(context) }
if constructor
main_statements << Ruby::MethodCall.new(
:receiver => nil,
:name => name,
:args => [],
:block => nil,
:parens => true
)
end
[
Ruby::Def.new(
:name => "main",
:args => [],
:statements => Ruby::Statements.new(
:statements => main_statements
)
)
]
else
[]
end
end
def build_other_defs(context)
fundef_statements.map { |t| t.compile(context) }
end
def build_publish_calls(context)
exported_symbols = if context.options[:export_private]
symbols
else
symbols.select(&:global)
end
exported_symbols.map { |s| s.compile_as_publish_call(context) }
end
end
class Repeat < Node
def symbols
[]
end
def compile(context)
Ruby::Until.new(
:condition => self.until.compile(context),
:body => Ruby::Begin.new(
:statements => compile_statements_inside_block(self.do, context)
)
)
end
end
class Return < Node
def compile(context)
case context.innermost(DefBlock, FileBlock, UnspecBlock)
when DefBlock, FileBlock
Ruby::Return.new(:value => child ? child.compile(context) : nil)
when UnspecBlock
Ruby::Next.new(:value => child ? child.compile(context) : nil)
else
raise "Misplaced \"return\" statement."
end
end
end
class StmtBlock < Node
def compile(context)
context.inside self do |inner_context|
Ruby::Statements.new(
:statements => statements.map { |s| s.compile(inner_context) }
)
end
end
end
class Switch < Node
def compile(context)
Ruby::Case.new(
:expression => cond.compile(context),
:whens => cases.map { |c| c.compile(context) },
:else => default ? default.compile(context) : nil
)
end
end
class Symbol < Node
def needs_copy?
immutable_types = [Type::BOOLEAN, Type::INTEGER, Type::SYMBOL]
!immutable_types.include?(type.no_const) && !type.reference?
end
def compile(context)
RubyVar.for(nil, name, context, :in_arg)
end
def compile_as_copy_arg_call(context)
Ruby::Assignment.new(
:lhs => RubyVar.for(nil, name, context, :in_code),
:rhs => Ruby::MethodCall.new(
:receiver => nil,
:name => "deep_copy",
:args => [RubyVar.for(nil, name, context, :in_code)],
:block => nil,
:parens => true
)
)
end
def compile_as_publish_call(context)
entries = [
Ruby::HashEntry.new(
:key => Ruby::Literal.new(:value => category),
:value => Ruby::Literal.new(:value => name.to_sym)
),
Ruby::HashEntry.new(
:key => Ruby::Literal.new(:value => :type),
:value => Ruby::Literal.new(:value => type.to_s)
)
]
unless global
entries << Ruby::HashEntry.new(
:key => Ruby::Literal.new(:value => :private),
:value => Ruby::Literal.new(:value => true)
)
end
Ruby::MethodCall.new(
:receiver => nil,
:name => "publish",
:args => [Ruby::Hash.new(:entries => entries)],
:block => nil,
:parens => true
)
end
end
class Textdomain < Node
def compile(context)
Ruby::MethodCall.new(
:receiver => nil,
:name => "textdomain",
:args => [Ruby::Literal.new(:value => name)],
:block => nil,
:parens => false
)
end
end
class Typedef < Node
def compile(context)
# Ignored because ycpc expands defined types in the XML, so we never
# actually encounter them.
end
end
class UnspecBlock < Node
def creates_local_scope?
true
end
def compile(context)
context.inside self do |inner_context|
Ruby::MethodCall.new(
:receiver => nil,
:name => "lambda",
:args => [],
:block => Ruby::Block.new(
:args => [],
:statements => Ruby::Statements.new(
:statements => statements.map { |s| s.compile(inner_context) }
)
),
:parens => true
)
end
end
def compile_as_block(context)
context.inside self do |inner_context|
Ruby::Block.new(
:args => args.map { |a| a.compile(inner_context) },
:statements => Ruby::Statements.new(
:statements => statements.map { |s| s.compile(inner_context) }
)
)
end
end
end
class Variable < Node
def compile(context)
case category
when :variable, :reference
RubyVar.for(ns, name, context, :in_code)
when :function
getter = if !ns && context.locals.include?(name)
RubyVar.for(nil, name, context, :in_code)
else
# In the XML, all global module function references are
# qualified (e.g. "M::i"). This includes references to functions
# defined in this module. The problem is that in generated Ruby
# code, the module namespace may not exist yet (e.g. when the
# function is refrenced at module toplvel in YCP), so we have to
# omit it (which is OK, because then the |method| call will be
# invoked on |self|, whish is always our module).
real_ns = ns == context.module_name ? nil : ns
Ruby::MethodCall.new(
:receiver => real_ns ? Ruby::Variable.new(:name => real_ns) : nil,
:name => "method",
:args => [
Ruby::Literal.new(:value => name.to_sym)
],
:block => nil,
:parens => true
)
end
Ruby::MethodCall.new(
:receiver => nil,
:name => "fun_ref",
:args => [getter, Ruby::Literal.new(:value => type.to_s)],
:block => nil,
:parens => true
)
else
raise "Unknown variable category: #{category.inspect}."
end
end
end
class While < Node
def symbols
[]
end
def compile(context)
Ruby::While.new(
:condition => cond.compile(context),
:body => compile_statements_inside_block(self.do, context)
)
end
end
class YCPCode < Node
def creates_local_scope?
true
end
def compile(context)
Ruby::MethodCall.new(
:receiver => nil,
:name => "lambda",
:args => [],
:block => Ruby::Block.new(
:args => [],
:statements => child.compile(context)
),
:parens => true
)
end
def compile_as_block(context)
context.inside self do |inner_context|
Ruby::Block.new(
:args => args.map { |a| a.compile(inner_context) },
:statements => child.compile(inner_context)
)
end
end
end
class YEBinary < Node
OPS_TO_OPS = {
"&&" => "&&",
"||" => "||"
}
OPS_TO_METHODS = {
"+" => "add",
"-" => "subtract",
"*" => "multiply",
"/" => "divide",
"%" => "modulo",
"&" => "bitwise_and",
"|" => "bitwise_or",
"^" => "bitwise_xor",
"<<" => "shift_left",
">>" => "shift_right"
}
def compile(context)
if OPS_TO_OPS[name]
Ruby::BinaryOperator.new(
:op => OPS_TO_OPS[name],
:lhs => lhs.compile(context),
:rhs => rhs.compile(context)
)
elsif OPS_TO_METHODS[name]
Ruby::MethodCall.new(
:receiver => Ruby::Variable.new(:name => "Ops"),
:name => OPS_TO_METHODS[name],
:args => [lhs.compile(context), rhs.compile(context)],
:block => nil,
:parens => true
)
else
raise "Unknown binary operator: #{name.inspect}."
end
end
end
class YEBracket < Node
def compile(context)
# In expressions like |m["foo"]:f()|, the |f| function is called only
# when the value is missing. In other words, the default is evaluated
# lazily. We need to emulate this laziness at least for the calls.
if default.is_a?(Call)
args = [value.compile(context), index.compile(context)]
block = Ruby::Block.new(
:args => [],
:statements => default.compile(context)
)
else
args = [
value.compile(context),
index.compile(context),
default.compile(context),
]
block = nil
end
Ruby::MethodCall.new(
:receiver => Ruby::Variable.new(:name => "Ops"),
:name => "index",
:args => args,
:block => block,
:parens => true
)
end
end
class YEIs < Node
def compile(context)
Ruby::MethodCall.new(
:receiver => Ruby::Variable.new(:name => "Ops"),
:name => "is",
:args => [
child.compile(context),
Ruby::Literal.new(:value => type.to_s)
],
:block => nil,
:parens => true
)
end
end
class YEPropagate < Node
# Is identical to list of shortcuts in ruby-bindings ycp/convert.rb
TYPES_WITH_SHORTCUT_CONVERSION = [
"boolean",
"float",
"integer",
"list",
"locale",
"map",
"path",
"string",
"symbol",
"term",
]
def compile(context)
if from.no_const != to.no_const
if TYPES_WITH_SHORTCUT_CONVERSION.include?(to.to_s) && from.to_s == "any"
Ruby::MethodCall.new(
:receiver => Ruby::Variable.new(:name => "Convert"),
:name => "to_#{to}",
:args => [child.compile(context)],
:block => nil,
:parens => true
)
else
Ruby::MethodCall.new(
:receiver => Ruby::Variable.new(:name => "Convert"),
:name => "convert",
:args => [
child.compile(context),
Ruby::Hash.new(
:entries => [
Ruby::HashEntry.new(
:key => Ruby::Literal.new(:value => :from),
:value => Ruby::Literal.new(:value => from.no_const.to_s)
),
Ruby::HashEntry.new(
:key => Ruby::Literal.new(:value => :to),
:value => Ruby::Literal.new(:value => to.no_const.to_s)
)
]
)
],
:block => nil,
:parens => true
)
end
else
child.compile(context)
end
end
end
class YEReference < Node
def compile(context)
child.compile_as_ref(context)
end
def compile_as_setter(context)
Ruby::Assignment.new(
:lhs => compile(context),
:rhs => Ruby::MethodCall.new(
:receiver => nil,
:name => "arg_ref",
:args => [child.compile(context)],
:block => nil,
:parens => true
)
)
end
def compile_as_getter(context)
Ruby::Assignment.new(
:lhs => child.compile(context),
:rhs => Ruby::MethodCall.new(
:receiver => compile(context),
:name => "value",
:args => [],
:block => nil,
:parens => true
)
)
end
end
class YEReturn < Node
def creates_local_scope?
true
end
def compile(context)
Ruby::MethodCall.new(
:receiver => nil,
:name => "lambda",
:args => [],
:block => Ruby::Block.new(
:args => [],
:statements => child.compile(context)
),
:parens => true
)
end
def compile_as_block(context)
context.inside self do |inner_context|
Ruby::Block.new(
:args => args.map { |a| a.compile(inner_context) },
:statements => child.compile(inner_context)
)
end
end
end
class YETerm < Node
UI_TERMS = [
:BarGraph,
:Bottom,
:CheckBox,
:ColoredLabel,
:ComboBox,
:Date,
:DownloadProgress,
:DumbTab,
:DummySpecialWidget,
:Empty,
:Frame,
:HBox,
:HBoxvHCenter,
:HMultiProgressMeter,
:HSpacing,
:HSquash,
:HStretch,
:HVCenter,
:HVSquash,
:HVStretch,
:HWeight,
:Heading,
:IconButton,
:Image,
:IntField,
:Label,
:Left,
:LogView,
:MarginBox,
:MenuButton,
:MinHeight,
:MinSize,
:MinWidth,
:MultiLineEdit,
:MultiSelectionBox,
:PackageSelector,
:PatternSelector,
:PartitionSplitter,
:Password,
:PkgSpecial,
:ProgressBar,
:PushButton,
:RadioButton,
:RadioButtonGroup,
:ReplacePoint,
:RichText,
:Right,
:SelectionBox,
:Slider,
:Table,
:TextEntry,
:Time,
:Top,
:Tree,
:VBox,
:VCenter,
:VMultiProgressMeter,
:VSpacing,
:VSquash,
:VStretch,
:VWeight,
:Wizard
]
def compile(context)
children_compiled = children.map { |ch| ch.compile(context) }
if UI_TERMS.include?(name.to_sym) && !context.symbols.include?(name)
Ruby::MethodCall.new(
:receiver => nil,
:name => name,
:args => children_compiled,
:block => nil,
:parens => true
)
else
name_compiled = Ruby::Literal.new(:value => name.to_sym)
Ruby::MethodCall.new(
:receiver => nil,
:name => "term",
:args => [name_compiled] + children_compiled,
:block => nil,
:parens => true
)
end
end
end
class YETriple < Node
def compile(context)
Ruby::TernaryOperator.new(
:condition => cond.compile(context),
:then => self.true.compile(context),
:else => self.false.compile(context)
)
end
end
class YEUnary < Node
OPS_TO_OPS = {
"!" => "!"
}
OPS_TO_METHODS = {
"-" => "unary_minus",
"~" => "bitwise_not",
}
def compile(context)
if OPS_TO_OPS[name]
Ruby::UnaryOperator.new(
:op => OPS_TO_OPS[name],
:expression => child.compile(context)
)
elsif OPS_TO_METHODS[name]
Ruby::MethodCall.new(
:receiver => Ruby::Variable.new(:name => "Ops"),
:name => OPS_TO_METHODS[name],
:args => [child.compile(context)],
:block => nil,
:parens => true
)
else
raise "Unknown unary operator: #{name.inspect}."
end
end
end
end
end
end
|
module Yao
VERSION = "0.10.0"
end
v0.10.1
module Yao
VERSION = "0.10.1"
end
|
require 'forwardable'
module Yeah
class Vector
extend Forwardable
class << self
def [](*args)
new(*args)
end
end
attr_reader :components
alias_method :to_a, :components
def_delegators :@components, :[]
def initialize(*components)
@components = components
end
%i[x y z].each_with_index do |component, i|
define_method(component) { @components[i] }
define_method("#{component}=") { |v| @components[i] = v }
end
def +(vector)
self.class.new(*(0...@components.count).map { |i|
@components[i] + vector.components[i]
})
end
def -(vector)
self.class.new(*(0...@components.count).map { |i|
@components[i] - vector.components[i]
})
end
def *(number)
self.class.new(*(0...@components.count).map { |i|
@components[i] * number
})
end
def /(number)
self.class.new(*(0...@components.count).map { |i|
@components[i] / number
})
end
end
end
Yeah::V = Yeah::Vector
vector +/-@
require 'forwardable'
module Yeah
class Vector
extend Forwardable
class << self
def [](*args)
new(*args)
end
end
attr_reader :components
alias_method :to_a, :components
def_delegators :@components, :[]
def initialize(*components)
@components = components
end
%i[x y z].each_with_index do |component, i|
define_method(component) { @components[i] }
define_method("#{component}=") { |v| @components[i] = v }
end
def +(vector)
self.class.new(*(0...@components.count).map { |i|
@components[i] + vector.components[i]
})
end
def -(vector)
self.class.new(*(0...@components.count).map { |i|
@components[i] - vector.components[i]
})
end
def *(number)
self.class.new(*(0...@components.count).map { |i|
@components[i] * number
})
end
def /(number)
self.class.new(*(0...@components.count).map { |i|
@components[i] / number
})
end
def +@
self.class.new(@components)
end
def -@
self.class.new(*(0...@components.count).map { |i| -@components[i] })
end
end
end
Yeah::V = Yeah::Vector
|
module ZCI
VERSION = '0.0.4'
end
version 0.0.5
module ZCI
VERSION = '0.0.5'
end
|
require 'zeamays/cob/gene'
require 'zeamays/cob/growth'
require 'zeamays/cob/freezing'
module Zeamays
class Cob
extend Gene
include Growth
include Freezing
end
end
Update cob.rb
Cob extend Defreezing
require 'zeamays/cob/gene'
require 'zeamays/cob/growth'
require 'zeamays/cob/freezing'
require 'zeamays/cob/defreezing'
module Zeamays
class Cob
extend Gene
include Growth
include Freezing
extend Defreezing
end
end
|
#
# Cookbook Name:: systemd
#
# Copyright 2016 The Authors
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
require 'chef/resource/directory'
require 'chef/resource/file'
require 'chef/resource'
require 'chef/provider'
require_relative 'systemd'
require_relative 'mixins'
require_relative 'helpers'
class SystemdCookbook
class Utils
class Provider < Chef::Provider
Systemd::UTILS.each do |util|
provides "systemd_#{util}".to_sym
end
def load_current_resource
@current_resource =
SystemdCookbook::Utils.const_get(new_resource.util_type)
.new(new_resource.name)
current_resource.content =
File.read(util_path) if File.exist?(util_path)
end
def action_create
if @content != new_resource.to_ini
converge_by("creating systemd util config: #{new_resource.name}") do
Chef::Resource::Directory.new(File.dirname(util_path), run_context)
.run_action(:create)
manage_util_config(:create)
end
end
end
def action_delete
if ::File.exist?(util_path)
converge_by("deleting systemd util config: #{new_resource.name}") do
manage_util_config(:delete)
end
end
end
private
def util_path
"/etc/systemd/#{new_resource.util_type}.conf.d/#{new_resource.name}.conf"
end
def manage_util_config(action = :nothing)
Chef::Resource::File.new(util_path, run_context).tap do |f|
f.content new_resource.to_ini
end.run_action(action)
end
end
Systemd::UTILS.each do |util|
SystemdCookbook::Utils.const_set(
util.capitalize,
Class.new(Chef::Resource) do
UTIL ||= util
include Systemd::Mixins::Conversion
resource_name "systemd_#{util}".to_sym
default_action :create
allowed_actions :create, :delete
def util_type
UTIL
end
property :content, String, desired_state: true
option_properties Systemd.const_get(util.capitalize)::OPTIONS
def to_ini
Systemd::Helpers.hash_to_ini(
property_hash(Systemd.const_get(UTIL.capitalize)::OPTIONS)
)
end
end
)
end
end
end
fix cop
#
# Cookbook Name:: systemd
#
# Copyright 2016 The Authors
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
require 'chef/resource/directory'
require 'chef/resource/file'
require 'chef/resource'
require 'chef/provider'
require_relative 'systemd'
require_relative 'mixins'
require_relative 'helpers'
class SystemdCookbook
class Utils
class Provider < Chef::Provider
Systemd::UTILS.each do |util|
provides "systemd_#{util}".to_sym
end
def load_current_resource
@current_resource =
SystemdCookbook::Utils.const_get(new_resource.util_type)
.new(new_resource.name)
current_resource.content =
File.read(util_path) if File.exist?(util_path)
end
def action_create
if @content != new_resource.to_ini
converge_by("creating systemd util config: #{new_resource.name}") do
Chef::Resource::Directory.new(File.dirname(util_path), run_context)
.run_action(:create)
manage_util_config(:create)
end
end
end
def action_delete
if ::File.exist?(util_path)
converge_by("deleting systemd util config: #{new_resource.name}") do
manage_util_config(:delete)
end
end
end
private
def util_path
r = new_resource
"/etc/systemd/#{r.util_type}.conf.d/#{r.name}.conf"
end
def manage_util_config(action = :nothing)
Chef::Resource::File.new(util_path, run_context).tap do |f|
f.content new_resource.to_ini
end.run_action(action)
end
end
Systemd::UTILS.each do |util|
SystemdCookbook::Utils.const_set(
util.capitalize,
Class.new(Chef::Resource) do
UTIL ||= util
include Systemd::Mixins::Conversion
resource_name "systemd_#{util}".to_sym
default_action :create
allowed_actions :create, :delete
def util_type
UTIL
end
property :content, String, desired_state: true
option_properties Systemd.const_get(util.capitalize)::OPTIONS
def to_ini
Systemd::Helpers.hash_to_ini(
property_hash(Systemd.const_get(UTIL.capitalize)::OPTIONS)
)
end
end
)
end
end
end
|
require 'fileutils'
require 'parallel'
require 'sqlite3'
require 'sequel'
def page
return SQLiteStore.new
end
class SQLiteStore
attr_reader :version
def initialize
@version = get_version
end
def db_file_path
get_base_path("data.sqlite")
end
def db_exists
File.exists?(db_file_path)
end
def connection
return @connection if @connection != nil
@connection = Sequel.sqlite(db_file_path)
return @connection
end
def get_version
return connection[:version].get(:version).to_i if connection.table_exists?(:version)
return -1
end
def get_base_path(file_path)
return get_conf['base_files_directory'] + "/#{file_path}"
end
def update_database_version(version)
connection[:version].update(:version => version)
end
def migrate
if @version == -1
connection.create_table(:version) do
Bignum :version
end
connection.create_table(:pages) do
primary_key :id, type: Bignum
String :page_id
Text :title
Text :content
end
connection[:version].insert(0)
FlatFileStore.new.list.each {|id|
data = FlatFileStore.new.load(id)
title = data['title']
content = data['content']
connection[:pages].insert(page_id: id, title: title, content: content)
}
end
end
def close
if @connection
@connection.disconnect
@connection = nil
end
end
def load(page_id)
page = connection[:pages].where(page_id: page_id).first
to_hash(page)
end
def save(page_id, content)
connection[:pages].where(page_id: page_id).update(content: content['content'])
destroy_cache(page_id)
end
def to_hash(page)
{
'title' => page[:title],
'id' => page[:id],
'content' => page[:content]
}
end
def list
connection[:pages].collect {|page| page[:page_id] }.sort
end
def delete(page_id)
destroy_cache(page_id)
connection[:pages].where(page_id: page_id).delete
end
end
class FlatFileStore
def exists?(page_id)
File.exists?("#{get_conf['base_files_directory']}/pages/#{page_id}")
end
def load(page_id)
JSON.parse(File.read(get_page_filepath(page_id)))
end
def get_page_filepath(page_id)
return "#{get_conf['base_files_directory']}/pages/#{page_id}"
end
def save(page_id, content)
if page_id.include?('/') || page_id.include?('..')
raise "Cannot create a page containing either / or .."
end
page_filepath = get_page_filepath(page_id)
File.open(page_filepath, "w+") {|f| f.write JSON.dump(content) }
destroy_cache(page_id)
end
def list
Dir.entries("#{get_conf['base_files_directory']}/pages").reject {|file| file.index('.') == 0}
end
def delete(page_id)
destroy_cache(page_id)
page_filepath = get_page_filepath(page_id)
File.delete(page_filepath) if File.exists?(page_filepath)
end
end
def get_edit_link(url)
uri = URI.parse(url)
uri.path += '/edit'
return uri.to_s
end
def strip_quotes(val)
val = (val || "").to_s.strip
if (val[0] == '"' && val[val.length - 1] == '"') || (val[0] == '\'' && val[val.length - 1] == '\'')
return val[1...val.length-1]
end
return val
end
def destroy_cache(page_id)
cache_filepath = "#{get_conf['base_files_directory']}/cache/#{page_id}"
FileUtils.rm_rf(cache_filepath) if Dir.exists?(cache_filepath)
end
def get_cache_file_name(page_id, params)
base_path = "#{get_conf['base_files_directory']}/cache/#{page_id}"
return [base_path, params] if params && params.length > 0
return [base_path, page_id]
end
def get_cached_page(page_id, params)
base_path, file_name = get_cache_file_name(page_id, params)
path = base_path + "/" + file_name
return [File.mtime(path), File.read(path)] if File.exists?(path)
return nil
end
def write_cached_page(page_id, params, content)
base_path, file_name = get_cache_file_name(page_id, params)
FileUtils.mkdir_p(base_path) if !Dir.exists?(base_path)
path = base_path + "/" + file_name
File.open(path, "w+") {|f| f.write(content) }
end
def parse_params(params)
matches = params.scan(/\w+=(?:\w+|::\w+::|"[\w ]+")(?:,(?:\w+|::\w+::|"[\w ]+"))*/)
return {} if matches.length == 0
return Hash[*matches.collect {|v| v.split("=") }.flatten]
end
module FormTag
def form(opts)
return "<form method=\"GET\" action=\"\">" + opts[:text] + "</form>"
end
end
def format_column(col)
return ("%.2f" % col).to_s if col.is_a? BigDecimal
return col.to_s
end
def render_table(cols, result, markdown_table_class_added)
view = ""
view += "table(table table-compact).\n" if !markdown_table_class_added
view += "|_." + cols.collect {|col| col.to_s }.join("|_.") + "|\n"
view += result.collect {|row|
"|" + row.collect {|col| format_column(col) }.join("|") + "|"
}.join("\n")
return view
end
def single_quoted(val)
"'strip_quotes(val.to_s)'"
end
def double_quoted(val)
"\"strip_quotes(val.to_s)\""
end
def emit_chart(chart_type, matrix, cols, name, title, xtitle, ytitle, height, width)
matrix = matrix.clone
matrix.unshift cols
if chart_type != :pie
if xtitle == nil
return "[xtitle not specified for #{chart_type.to_s} chart.]"
elsif ytitle == nil
return "[ytitle not specified for #{chart_type.to_s} chart.]"
end
end
js_object_name = {:line => 'LineChart', :bar => 'BarChart', :pie => 'PieChart'}[chart_type]
if js_object_name == nil
return "[Chart type not recognized.]"
end
formatted_data = "[" + matrix.collect {|row|
"[" + row.collect {|val|
val.is_a?(String) ? "\"#{val}\"" : val.to_s
}.join(',') + "]"
}.join(',') + "]"
width = strip_quotes(width) if width
height = strip_quotes(height) if height
options = "var options = {"
options += " title: '#{title}'," if title
options += " height: '#{height}'," if height
options += " width: '#{width}'," if width
options += "colors: ['#D3D3D3'], vAxis: {title: '#{ytitle}'}, hAxis: {title: '#{xtitle}'}" if [:bar_chart, :line_chart].include?(chart_type)
options += "};"
width_clause = width != nil ? "width: #{width}; " : ""
height_clause = height != nil ? "height: #{height}; " : ""
name = Random.srand.to_s
return "<script type=\"text/javascript\">
google.load(\"visualization\", \"1\", {packages:[\"corechart\"]});
google.setOnLoadCallback(drawChart);
function drawChart() {
var data = google.visualization.arrayToDataTable(#{formatted_data});
#{options}
var chart = new google.visualization.#{js_object_name}(document.getElementById('#{name}'));
chart.draw(data, options);
} </script> <div id=\"#{name}\" style=\"#{width_clause} #{height_clause}\"></div>"
end
store = SQLiteStore.new
if store.version == -1
store.migrate
end
store.close
store = nil
Added a unique constraint to page_id in pages
Enabled the write ahead log
require 'fileutils'
require 'parallel'
require 'sqlite3'
require 'sequel'
def page
return SQLiteStore.new
end
class SQLiteStore
attr_reader :version
def initialize
@version = get_version
end
def db_file_path
get_base_path("data.sqlite")
end
def db_exists
File.exists?(db_file_path)
end
def db
return @db if @db != nil
@db = Sequel.sqlite(db_file_path)
return @db
end
def get_version
return db[:version].get(:version).to_i if db.table_exists?(:version)
return -1
end
def get_base_path(file_path)
return get_conf['base_files_directory'] + "/#{file_path}"
end
def update_database_version(version)
db[:version].update(:version => version)
end
def migrate
if @version == -1
db.create_table(:version) do
Bignum :version
end
db.create_table(:pages) do
primary_key :id, type: Bignum
String :page_id, unique: true
Text :title
Text :content
end
db[:version].insert(0)
FlatFileStore.new.list.each {|id|
data = FlatFileStore.new.load(id)
title = data['title']
content = data['content']
db[:pages].insert(page_id: id, title: title, content: content)
}
db.run 'PRAGMA journal_mode=WAL'
end
end
def close
if @db
@db.disconnect
@db = nil
end
end
def load(page_id)
page = db[:pages].where(page_id: page_id).first
to_hash(page)
end
def save(page_id, content)
db[:pages].where(page_id: page_id).update(content: content['content'])
destroy_cache(page_id)
end
def to_hash(page)
{
'title' => page[:title],
'id' => page[:id],
'content' => page[:content]
}
end
def list
db[:pages].collect {|page| page[:page_id] }.sort
end
def delete(page_id)
destroy_cache(page_id)
db[:pages].where(page_id: page_id).delete
end
end
class FlatFileStore
def exists?(page_id)
File.exists?("#{get_conf['base_files_directory']}/pages/#{page_id}")
end
def load(page_id)
JSON.parse(File.read(get_page_filepath(page_id)))
end
def get_page_filepath(page_id)
return "#{get_conf['base_files_directory']}/pages/#{page_id}"
end
def save(page_id, content)
if page_id.include?('/') || page_id.include?('..')
raise "Cannot create a page containing either / or .."
end
page_filepath = get_page_filepath(page_id)
File.open(page_filepath, "w+") {|f| f.write JSON.dump(content) }
destroy_cache(page_id)
end
def list
Dir.entries("#{get_conf['base_files_directory']}/pages").reject {|file| file.index('.') == 0}
end
def delete(page_id)
destroy_cache(page_id)
page_filepath = get_page_filepath(page_id)
File.delete(page_filepath) if File.exists?(page_filepath)
end
end
def get_edit_link(url)
uri = URI.parse(url)
uri.path += '/edit'
return uri.to_s
end
def strip_quotes(val)
val = (val || "").to_s.strip
if (val[0] == '"' && val[val.length - 1] == '"') || (val[0] == '\'' && val[val.length - 1] == '\'')
return val[1...val.length-1]
end
return val
end
def destroy_cache(page_id)
cache_filepath = "#{get_conf['base_files_directory']}/cache/#{page_id}"
FileUtils.rm_rf(cache_filepath) if Dir.exists?(cache_filepath)
end
def get_cache_file_name(page_id, params)
base_path = "#{get_conf['base_files_directory']}/cache/#{page_id}"
return [base_path, params] if params && params.length > 0
return [base_path, page_id]
end
def get_cached_page(page_id, params)
base_path, file_name = get_cache_file_name(page_id, params)
path = base_path + "/" + file_name
return [File.mtime(path), File.read(path)] if File.exists?(path)
return nil
end
def write_cached_page(page_id, params, content)
base_path, file_name = get_cache_file_name(page_id, params)
FileUtils.mkdir_p(base_path) if !Dir.exists?(base_path)
path = base_path + "/" + file_name
File.open(path, "w+") {|f| f.write(content) }
end
def parse_params(params)
matches = params.scan(/\w+=(?:\w+|::\w+::|"[\w ]+")(?:,(?:\w+|::\w+::|"[\w ]+"))*/)
return {} if matches.length == 0
return Hash[*matches.collect {|v| v.split("=") }.flatten]
end
module FormTag
def form(opts)
return "<form method=\"GET\" action=\"\">" + opts[:text] + "</form>"
end
end
def format_column(col)
return ("%.2f" % col).to_s if col.is_a? BigDecimal
return col.to_s
end
def render_table(cols, result, markdown_table_class_added)
view = ""
view += "table(table table-compact).\n" if !markdown_table_class_added
view += "|_." + cols.collect {|col| col.to_s }.join("|_.") + "|\n"
view += result.collect {|row|
"|" + row.collect {|col| format_column(col) }.join("|") + "|"
}.join("\n")
return view
end
def single_quoted(val)
"'strip_quotes(val.to_s)'"
end
def double_quoted(val)
"\"strip_quotes(val.to_s)\""
end
def emit_chart(chart_type, matrix, cols, name, title, xtitle, ytitle, height, width)
matrix = matrix.clone
matrix.unshift cols
if chart_type != :pie
if xtitle == nil
return "[xtitle not specified for #{chart_type.to_s} chart.]"
elsif ytitle == nil
return "[ytitle not specified for #{chart_type.to_s} chart.]"
end
end
js_object_name = {:line => 'LineChart', :bar => 'BarChart', :pie => 'PieChart'}[chart_type]
if js_object_name == nil
return "[Chart type not recognized.]"
end
formatted_data = "[" + matrix.collect {|row|
"[" + row.collect {|val|
val.is_a?(String) ? "\"#{val}\"" : val.to_s
}.join(',') + "]"
}.join(',') + "]"
width = strip_quotes(width) if width
height = strip_quotes(height) if height
options = "var options = {"
options += " title: '#{title}'," if title
options += " height: '#{height}'," if height
options += " width: '#{width}'," if width
options += "colors: ['#D3D3D3'], vAxis: {title: '#{ytitle}'}, hAxis: {title: '#{xtitle}'}" if [:bar_chart, :line_chart].include?(chart_type)
options += "};"
width_clause = width != nil ? "width: #{width}; " : ""
height_clause = height != nil ? "height: #{height}; " : ""
name = Random.srand.to_s
return "<script type=\"text/javascript\">
google.load(\"visualization\", \"1\", {packages:[\"corechart\"]});
google.setOnLoadCallback(drawChart);
function drawChart() {
var data = google.visualization.arrayToDataTable(#{formatted_data});
#{options}
var chart = new google.visualization.#{js_object_name}(document.getElementById('#{name}'));
chart.draw(data, options);
} </script> <div id=\"#{name}\" style=\"#{width_clause} #{height_clause}\"></div>"
end
store = SQLiteStore.new
if store.version == -1
store.migrate
end
store.close
store = nil
|
#!/usr/bin/env ruby -rubygems
# -*- encoding: utf-8 -*-
Gem::Specification.new do |gem|
gem.version = File.read('VERSION').chomp
gem.date = File.mtime('VERSION').strftime('%Y-%m-%d')
gem.name = 'linkeddata'
gem.homepage = 'http://ruby-rdf.github.com/'
gem.license = 'Unlicense'
gem.summary = 'Linked Data for Ruby.'
gem.description = 'A metadistribution of RDF.rb including a full set of parsing/serialization plugins.'
gem.authors = ['Arto Bendiken', 'Ben Lavender', 'Gregg Kellogg', 'Tom Johnson']
gem.email = 'public-rdf-ruby@w3.org'
gem.platform = Gem::Platform::RUBY
gem.files = %w(AUTHORS CREDITS README.md UNLICENSE VERSION lib/linkeddata.rb)
gem.bindir = %q(bin)
gem.executables = %w()
gem.default_executable = gem.executables.first
gem.require_paths = %w(lib)
gem.extensions = %w()
gem.test_files = %w()
gem.has_rdoc = false
gem.required_ruby_version = '>= 2.2.2'
gem.requirements = []
gem.add_runtime_dependency 'rdf', '~> 2.2'
gem.add_runtime_dependency 'rdf-aggregate-repo', '~> 2.1'
gem.add_runtime_dependency 'rdf-isomorphic', '~> 2.0'
gem.add_runtime_dependency 'rdf-json', '~> 2.0'
gem.add_runtime_dependency 'rdf-microdata', '~> 2.1'
gem.add_runtime_dependency 'rdf-n3', '~> 2.1'
gem.add_runtime_dependency 'rdf-normalize', '~> 0.3'
gem.add_runtime_dependency 'rdf-rdfa', '~> 2.1'
gem.add_runtime_dependency 'rdf-rdfxml', '~> 2.0'
gem.add_runtime_dependency 'rdf-reasoner', '~> 0.4'
gem.add_runtime_dependency 'rdf-tabular', '~> 2.2'
gem.add_runtime_dependency 'rdf-trig', '~> 2.0'
gem.add_runtime_dependency 'rdf-trix', '~> 2.0'
gem.add_runtime_dependency 'rdf-turtle', '~> 2.2'
gem.add_runtime_dependency 'rdf-vocab', '~> 2.1'
gem.add_runtime_dependency 'rdf-xsd', '~> 2.1'
gem.add_runtime_dependency 'json-ld', '~> 2.1'
gem.add_runtime_dependency 'ld-patch', '~> 0.3'
gem.add_runtime_dependency 'shex', '~> 0.5'
gem.add_runtime_dependency 'sparql', '~> 2.1'
gem.add_runtime_dependency 'sparql-client', '~> 2.1'
gem.add_runtime_dependency 'nokogiri', '~> 1.7'
gem.add_runtime_dependency 'equivalent-xml', '~> 0.6'
gem.add_development_dependency 'yard', '~> 0.9'
gem.add_development_dependency 'rspec', '~> 3.5'
gem.add_development_dependency 'rspec-its', '~> 1.2'
gem.add_development_dependency 'rdf-spec', '~> 2.2'
gem.post_install_message = nil
end
Relax dependences to prep for 3.0 release.
#!/usr/bin/env ruby -rubygems
# -*- encoding: utf-8 -*-
Gem::Specification.new do |gem|
gem.version = File.read('VERSION').chomp
gem.date = File.mtime('VERSION').strftime('%Y-%m-%d')
gem.name = 'linkeddata'
gem.homepage = 'http://ruby-rdf.github.com/'
gem.license = 'Unlicense'
gem.summary = 'Linked Data for Ruby.'
gem.description = 'A metadistribution of RDF.rb including a full set of parsing/serialization plugins.'
gem.authors = ['Arto Bendiken', 'Ben Lavender', 'Gregg Kellogg', 'Tom Johnson']
gem.email = 'public-rdf-ruby@w3.org'
gem.platform = Gem::Platform::RUBY
gem.files = %w(AUTHORS CREDITS README.md UNLICENSE VERSION lib/linkeddata.rb)
gem.bindir = %q(bin)
gem.executables = %w()
gem.default_executable = gem.executables.first
gem.require_paths = %w(lib)
gem.extensions = %w()
gem.test_files = %w()
gem.has_rdoc = false
gem.required_ruby_version = '>= 2.2.2'
gem.requirements = []
#gem.add_runtime_dependency 'rdf', '~> 3.0'
#gem.add_runtime_dependency 'rdf-aggregate-repo', '~> 3.0'
#gem.add_runtime_dependency 'rdf-isomorphic', '~> 3.0'
#gem.add_runtime_dependency 'rdf-json', '~> 3.0'
#gem.add_runtime_dependency 'rdf-microdata', '~> 3.0'
#gem.add_runtime_dependency 'rdf-n3', '~> 3.0'
#gem.add_runtime_dependency 'rdf-rdfa', '~> 3.0'
#gem.add_runtime_dependency 'rdf-rdfxml', '~> 3.0'
#gem.add_runtime_dependency 'rdf-reasoner', '~> 3.0'
#gem.add_runtime_dependency 'rdf-tabular', '~> 3.0'
#gem.add_runtime_dependency 'rdf-trig', '~> 3.0'
#gem.add_runtime_dependency 'rdf-trix', '~> 3.0'
#gem.add_runtime_dependency 'rdf-turtle', '~> 3.0'
#gem.add_runtime_dependency 'rdf-vocab', '~> 3.0'
#gem.add_runtime_dependency 'rdf-xsd', '~> 3.0'
#gem.add_runtime_dependency 'json-ld', '~> 3.0'
#gem.add_runtime_dependency 'ld-patch', '~> 3.0'
#gem.add_runtime_dependency 'shex', '~> 3.0'
#gem.add_runtime_dependency 'sparql', '~> 3.0'
#gem.add_runtime_dependency 'sparql-client', '~> 3.0'
gem.add_runtime_dependency 'rdf', '>= 2.2', '< 4.0'
gem.add_runtime_dependency 'rdf-aggregate-repo', '>= 2.2', '< 4.0'
gem.add_runtime_dependency 'rdf-isomorphic', '>= 2.0', '< 4.0'
gem.add_runtime_dependency 'rdf-json', '>= 2.0', '< 4.0'
gem.add_runtime_dependency 'rdf-microdata', '>= 2.2', '< 4.0'
gem.add_runtime_dependency 'rdf-n3', '>= 2.1', '< 4.0'
gem.add_runtime_dependency 'rdf-normalize', '~> 0.3'
gem.add_runtime_dependency 'rdf-rdfa', '>= 2.2', '< 4.0'
gem.add_runtime_dependency 'rdf-rdfxml', '>= 2.2', '< 4.0'
gem.add_runtime_dependency 'rdf-reasoner', '~> 0.4'
gem.add_runtime_dependency 'rdf-tabular', '>= 2.2', '< 4.0'
gem.add_runtime_dependency 'rdf-trig', '>= 2.0', '< 4.0'
gem.add_runtime_dependency 'rdf-trix', '>= 2.2', '< 4.0'
gem.add_runtime_dependency 'rdf-turtle', '>= 2.2', '< 4.0'
gem.add_runtime_dependency 'rdf-vocab', '>= 2.2', '< 4.0'
gem.add_runtime_dependency 'rdf-xsd', '>= 2.2', '< 4.0'
gem.add_runtime_dependency 'json-ld', '>= 2.1', '< 4.0'
gem.add_runtime_dependency 'ld-patch', '~> 0.3'
gem.add_runtime_dependency 'shex', '~> 0.5'
gem.add_runtime_dependency 'sparql', '>= 2.2', '< 4.0'
gem.add_runtime_dependency 'sparql-client', '>= 2.1', '< 4.0'
gem.add_runtime_dependency 'nokogiri', '~> 1.8'
gem.add_runtime_dependency 'equivalent-xml', '~> 0.6'
gem.add_development_dependency 'yard', '~> 0.9'
gem.add_development_dependency 'rspec', '~> 3.7'
gem.add_development_dependency 'rspec-its', '~> 1.2'
#gem.add_development_dependency 'rdf-spec', '~> 2.2'
gem.add_development_dependency 'rdf-spec', '>= 2.2', '< 4.0'
gem.post_install_message = nil
end
|
Gem::Specification.new do |spec|
spec.name = "lita-bitly"
spec.version = "0.0.2"
spec.authors = ["Garfield Carneiro"]
spec.email = ["gary.carneiro@gmail.com"]
spec.description = %q{Shorten URL using Bit.ly}
spec.summary = %q{Looks for http/https URL and shortens them}
spec.homepage = "https://bitbucket.org/GaryCarneiro/lita-bitly"
spec.license = "MIT"
spec.metadata = { "lita_plugin_type" => "handler" }
spec.files = `git ls-files`.split($/)
spec.executables = spec.files.grep(%r{^bin/}) { |f| File.basename(f) }
spec.test_files = spec.files.grep(%r{^(test|spec|features)/})
spec.require_paths = ["lib"]
spec.add_runtime_dependency "lita", ">= 3.3"
spec.add_development_dependency "bundler", "~> 1.3"
spec.add_development_dependency "rake"
spec.add_development_dependency "rspec", ">= 3.0.0"
spec.add_development_dependency "simplecov"
spec.add_development_dependency "coveralls"
spec.add_development_dependency "url_shortener"
end
Modified spec.homepage and version
Gem::Specification.new do |spec|
spec.name = "lita-bitly"
spec.version = "0.0.3"
spec.authors = ["Garfield Carneiro"]
spec.email = ["gary.carneiro@gmail.com"]
spec.description = %q{Shorten URL using Bit.ly}
spec.summary = %q{Extend lita command to append shorten or bitly}
spec.homepage = "https://github.com/GaryCarneiro/lita-bitly"
spec.license = "MIT"
spec.metadata = { "lita_plugin_type" => "handler" }
spec.files = `git ls-files`.split($/)
spec.executables = spec.files.grep(%r{^bin/}) { |f| File.basename(f) }
spec.test_files = spec.files.grep(%r{^(test|spec|features)/})
spec.require_paths = ["lib"]
spec.add_runtime_dependency "lita", ">= 3.3"
spec.add_development_dependency "bundler", "~> 1.3"
spec.add_development_dependency "rake"
spec.add_development_dependency "rspec", ">= 3.0.0"
spec.add_development_dependency "simplecov"
spec.add_development_dependency "coveralls"
spec.add_development_dependency "url_shortener"
end
|
Gem::Specification.new do |spec|
spec.name = "lita-karma"
spec.version = "1.2.0"
spec.authors = ["Jimmy Cuadra"]
spec.email = ["jimmy@jimmycuadra.com"]
spec.description = %q{A Lita handler for tracking karma points for arbitrary terms.}
spec.summary = %q{A Lita handler for tracking karma points for arbitrary terms.}
spec.homepage = "https://github.com/jimmycuadra/lita-karma"
spec.license = "MIT"
spec.files = `git ls-files`.split($/)
spec.executables = spec.files.grep(%r{^bin/}) { |f| File.basename(f) }
spec.test_files = spec.files.grep(%r{^(test|spec|features)/})
spec.require_paths = ["lib"]
spec.add_runtime_dependency "lita", "~> 2.6"
spec.add_development_dependency "bundler", "~> 1.3"
spec.add_development_dependency "rake"
spec.add_development_dependency "rspec", "~> 2.14"
spec.add_development_dependency "simplecov"
spec.add_development_dependency "coveralls"
end
Bump version to 2.0.0.
Gem::Specification.new do |spec|
spec.name = "lita-karma"
spec.version = "2.0.0"
spec.authors = ["Jimmy Cuadra"]
spec.email = ["jimmy@jimmycuadra.com"]
spec.description = %q{A Lita handler for tracking karma points for arbitrary terms.}
spec.summary = %q{A Lita handler for tracking karma points for arbitrary terms.}
spec.homepage = "https://github.com/jimmycuadra/lita-karma"
spec.license = "MIT"
spec.files = `git ls-files`.split($/)
spec.executables = spec.files.grep(%r{^bin/}) { |f| File.basename(f) }
spec.test_files = spec.files.grep(%r{^(test|spec|features)/})
spec.require_paths = ["lib"]
spec.add_runtime_dependency "lita", "~> 2.6"
spec.add_development_dependency "bundler", "~> 1.3"
spec.add_development_dependency "rake"
spec.add_development_dependency "rspec", "~> 2.14"
spec.add_development_dependency "simplecov"
spec.add_development_dependency "coveralls"
end
|
Added sequence column to operations table to maintain the order of bitmask.
class AddOrderToOperations < ActiveRecord::Migration
def change
add_column :operations, :sequence, :integer
end
end
|
module Repl
class Session
require 'mysql2'
require 'httparty'
def initialize(opts)
@logging = opts.delete(:log)
@client = Mysql2::Client.new(opts)
@db = opts[:db] || 'enwiki_p'
@getter = HTTParty
@base_uri = 'https://xtools.wmflabs.org/api/user'
end
def count_articles_created(username)
@getter.get(
"#{@base_uri}/pages_count/#{@db}/#{URI.escape(username.score)}"
)['counts']['count'].to_i
end
def count_namespace_edits(username, namespace = 0)
@getter.get(
"#{@base_uri}/namespace_totals/#{@db}/#{URI.escape(username.score)}"
)['namespace_totals'][namespace.to_s].to_i
end
def count_nonautomated_edits(username)
@getter.get(
"#{@base_uri}/automated_editcount/en.wikipedia.org/#{URI.escape(username.score)}"
)['nonautomated_editcount'].to_i
end
def count_nonautomated_namespace_edits(username, namespace)
@getter.get(
"#{@base_uri}/automated_editcount/en.wikipedia.org/#{URI.escape(username.score)}/#{namespace}"
)['nonautomated_editcount'].to_i
end
def count_tool_edits(username, tool)
countAutomatedEdits(username, false, tool)
end
def query(sql)
puts sql if @logging
@client.query(sql)
end
def prepare(sql)
puts sql if @logging
@client.prepare(sql)
end
def escape(string)
@client.escape(string)
end
def getter
@getter
end
private
def count(sql)
query(sql).first.values[0].to_i
end
end
end
Repl: fix URI.escape deprecation
module Repl
class Session
require 'mysql2'
require 'httparty'
def initialize(opts)
@logging = opts.delete(:log)
@client = Mysql2::Client.new(opts)
@db = opts[:db] || 'enwiki_p'
@getter = HTTParty
@base_uri = 'https://xtools.wmflabs.org/api/user'
@uri = URI::Parser.new
end
def count_articles_created(username)
@getter.get(
"#{@base_uri}/pages_count/#{@db}/#{@uri.escape(username.score)}"
)['counts']['count'].to_i
end
def count_namespace_edits(username, namespace = 0)
@getter.get(
"#{@base_uri}/namespace_totals/#{@db}/#{@uri.escape(username.score)}"
)['namespace_totals'][namespace.to_s].to_i
end
def count_nonautomated_edits(username)
@getter.get(
"#{@base_uri}/automated_editcount/en.wikipedia.org/#{@uri.escape(username.score)}"
)['nonautomated_editcount'].to_i
end
def count_nonautomated_namespace_edits(username, namespace)
@getter.get(
"#{@base_uri}/automated_editcount/en.wikipedia.org/#{@uri.escape(username.score)}/#{namespace}"
)['nonautomated_editcount'].to_i
end
def count_tool_edits(username, tool)
countAutomatedEdits(username, false, tool)
end
def query(sql)
puts sql if @logging
@client.query(sql)
end
def prepare(sql)
puts sql if @logging
@client.prepare(sql)
end
def escape(string)
@client.escape(string)
end
def getter
@getter
end
private
def count(sql)
query(sql).first.values[0].to_i
end
end
end
|
#! /bin/env ruby
#
############################################################
# Author: Alice "Duchess" Archer
# Name: rirc
# Description: IRC framework for IRC bots written in ruby
############################################################
require 'socket'
require 'openssl'
class IRC_message
def initialize(command, nick, channel, message, ircmsg)
@command = command
@nick = nick
@channel = channel
@message = message
@ircmsg = ircmsg
end
def ircmsg
return @ircmsg
end
def message
return @message
end
def nick
return @nick
end
def command
return @command
end
def channel
return @channel
end
def check_regex(type, regex)
if type == "command"
if @command.match(regex) then return true end
elsif type == "nick"
if @nick.match(regex) then return true end
elsif type == "channel"
if @channel.match(regex) then return true end
elsif type == "message"
if @message.match(regex) then return true end
else
if @message.match(regex) then return true end
end
return false
end
def message_regex(regex)
if @message.match(regex) then return true end
return false
end
end
class Pluginf
def initialize(regex, name, file_name, help)
@regexp = Regexp.new(regex.to_s)
@name = name.to_s
@file_name = file_name.to_s
@help = help
@chan_list = []
@chan_list.push("any")
end
# default function
def script(message, nick, chan)
end
def regex
return @regexp
end
def chans
return @chan_list
end
def name
return @name
end
def file_name
return @file_name
end
def help
return @help
end
def cleanup
return ""
end
end
class Plugin_manager
def initialize(plugin_folder)
@plugins = []
@plugin_folder = plugin_folder
end
# returns all the plugins
def plugins
if @plugins.length == 0
return []
end
return @plugins
end
# search functions
def get_names
if @plugins.length == 0
return []
end
names = []
@plugins.each { |a| names.push(a.name) }
return names
end
def get_helps
if @plugins.length == 0
return []
end
names = []
@plugins.each { |a| names.push(a.help) }
return names
end
def get_files
if @plugins.length == 0
return []
end
names = []
@plugins.each { |a| names.push(a.file_name) }
return names
end
def get_chans
if @plugins.length == 0
return []
end
names = []
@plugins.each { |a| names.push(a.chans) }
return names
end
def get_regexps
if @plugins.length == 0
return []
end
names = []
@plugins.each { |a| names.push(a.regex) }
return names
end
def get_plugin(name) # gets a plugin by name or nil if it is not loaded
if @plugins.length == 0
return nil
end
@plugins.each { |a| if a.name == name then return a end }
return nil
end
def plugin_help(name) # gets the help for a plugin
if @plugins.length == 0
return nil
end
@plugins.each { |a| if a.name == name then return a.help end }
return nil
end
def plugin_file_name(name) # gets the file name for a plugin
if @plugins.length == 0
return nil
end
@plugins.each { |a| if a.name == name then return a.file_name end }
return nil
end
def plugin_chans(name) # gets the array of channels for a plugin
if @plugins.length == 0
return nil
end
@plugins.each { |a| if a.name == name then return a.chans end }
return nil
end
def plugin_regex(name) # gets the regex for a plugin
if @plugins.length == 0
return nil
end
@plugins.each { |a| if a.name == name then return a.regex end }
return nil
end
# check if a plugin is loaded
def plugin_loaded(name)
if @plugins.length == 0
return false
end
@plugins.each do |a|
if a.name == name
return true
end
end
return false
end
# regex check function
# this function uses the IRC_message object for message input
# inputs:
# - name
# - IRC_message object
# - array of admins [can be an empty array]
# - backlog array [can be an empty array]
# output: string
def check_plugin(name, message, admins, backlog) #checks an individual plugin's (by name) regex against message
if @plugins.length == 0
return ""
end
if !plugin_loaded(name)
return ""
else
if message.message.match(get_plugin(name).regex) and (get_plugin(name).chans.include? "any" or get_plugin(name).chans.include? message.channel)
begin
return get_plugin(name).script(message, admins, backlog) # plugins use the IRC_message object
rescue => e
return "an error occured for plugin: #{name}"
end
end
end
return ""
end
# regex check function that returns responses for all plugins in an array
# inputs:
# - IRC_message object
# - array of admins [can be an empty array]
# - backlog array [can be an empty array]
# output: array of strings
def check_all(message, admins, backlog)
if @plugins.length == 0
return []
end
response = []
# this is incredibly inneficient but it makes check_plugin flexible
@plugins.each { |a| response.push(check_plugin(a.name, message, admins, backlog)) }
return response
end
# load
def plugin_load(name)
$LOAD_PATH << "#{@plugin_folder}"
response = ""
temp_plugin = nil
if name.match(/.rb$/)
begin
load "#{name}"
plugin_loader = Loader.new
temp_plugin = plugin_loader.get_plugin
if plugin_loaded(temp_plugin.name)
temp_plugin = nil
return "Plugin #{name} is already loaded"
end
@plugins.push(temp_plugin)
temp_plugin = nil
response = "#{name[0..-4]} loaded"
rescue => e
response = "cannot load plugin"
end
else
begin
load "#{name}.rb"
plugin_loader = Loader.new
temp_plugin = plugin_loader.get_plugin
if plugin_loaded(temp_plugin.name)
temp_plugin = nil
return "Plugin #{name} is already loaded"
end
@plugins.push(temp_plugin)
temp_plugin = nil
response = "#{name} loaded"
rescue => e
response = "cannot load plugin"
end
end
$LOAD_PATH << './'
return response
end
# unload
def unload(name)
if !plugin_loaded(name)
return "plugin is not loaded"
end
get_plugin(name).cleanup
@plugins.delete_if { |a| a.name == name }
return "plugin #{name} unloaded"
end
# reload
def reload(name)
if !plugin_loaded(name)
return "plugin is not loaded"
end
temp_file_name = get_plugin(name).file_name
unload(name)
plugin_load(temp_file_name)
return "plugin #{name} reloaded"
end
end
class IRCBot
def initialize(network, port, nick, user_name, real_name)
@network = network
@port = port
@nick = nick
@user_name = user_name
@real_name = real_name
@socket = nil
@channels = []
@admins = []
@ignore = []
@hooks = {}
@backlog = []
end
def backlog
return @backlog
end
def ignore
return @ignore
end
def channels
return @channels
end
def admins
return @admins
end
def network
return @network
end
def port
return @port
end
def nick_name
return @nick
end
def user_name
return @user_name
end
def real_name
return @real_name
end
def socket
return @socket
end
def say(message)
@socket.puts message
end
def join(channel)
say "JOIN #{channel}"
if !@channels.include? channel then @channels.push(channel) end
end
def connect
@socket = TCPSocket.open(@network, @port)
end
def connect_ssl
ssl_context = OpenSSL::SSL::SSLContext.new
ssl_context.verify_mode = OpenSSL::SSL::VERIFY_NONE
@socket = OpenSSL::SSL::SSLSocket.new(@socket, ssl_context)
@socket.sync = true
@socket.connect
end
def connect_pass(pass)
say "PASS #{pass}"
end
def nick(nick)
@nick = nick
say "NICK #{nick}"
end
def privmsg(dest, message)
say "PRIVMSG #{dest} :#{message}"
end
def action(dest, message)
privmsg(dest, "\01ACTION #{message}\07\01")
end
def notice(dest, message)
say "NOTICE #{dest} :#{message}"
end
def ctcp(dest, message)
privmsg(dest, "\01VERSION #{message}\07\01")
end
def part(dest, message)
say "PART #{dest} :#{message}"
end
def quit(message)
say "QUIT :#{message}"
end
def names(dest)
say "NAMES #{dest}"
end
def identify(nickserv_pass)
say "PRIVMSG nickserv :identify #{nickserv_pass}"
end
def auth(nickserv_pass)
say "VERSION"
say "USER #{@user_name} * * :#{@real_name}"
nick(@nick)
if nickserv_pass != "" and nickserv_pass != nil
identify(nickserv_pass)
end
end
def read
if !@socket.eof
msg = @socket.gets
if msg.match(/^PING :(.*)$/)
say "PONG #{$~[1]}"
return "PING"
end
return msg
else
return nil
end
end
def parse(msg)
message_reg = msg.match(/^(:(?<prefix>\S+) )?(?<command>\S+)( (?!:)(?<params>.+?))?( :(?<trail>.+))?$/)
nick_n = message_reg[:prefix].to_s.split("!")[0]
command = message_reg[:command].to_s
chan = message_reg[:params].to_s
message = message_reg[:trail].to_s
message = message.chomp
if chan == @nick then chan = nick_n end
ircmsg = IRC_message.new(command, nick_n, chan, message, msg)
return ircmsg
end
def add_admin(nick)
@admins.push(nick)
end
def remove_admin(nick)
@admins.delete_if { |a| a == nick }
end
def add_ignore(nick)
@ignore.push(nick)
end
def remove_ignore(nick)
@ignore.delete_if { |a| a == nick }
end
def on(type, &block)
type = type.to_s
@hooks[type] ||= []
@hooks[type] << block
end
def set_admins(admins_s)
admins_s.each { |a| self.add_admin(a) }
end
def join_channels(channels_s)
channels_s.each { |a| self.join(a) }
end
def create_log
if !File.exist?("./log")
File.open("./log", "w+") { |fw| fw.write("Command and Privmsg LOGS") }
end
end
def setup(use_ssl, use_pass, pass, nickserv_pass, channels_s)
self.connect
if use_ssl then self.connect_ssl end
if use_pass then self.connect_pass(pass) end
self.auth(nickserv_pass)
self.create_log
self.join_channels(channels_s)
self.on :message do |msg|
if msg.channel == msg.nick
File.write("./log", msg.ircmsg, File.size("./log"), mode: 'a')
end
if !self.nick_name == msg.nick and !self.ignore.include? msg.nick
@backlog.push(msg)
end
end
self.on :message do |msg|
if self.admins.include? msg.nick and msg.message_regex(/^`plsgo$/) then abort end
end
end
def start!
until self.socket.eof? do
ircmsg = self.read
msg = self.parse(ircmsg)
if ircmsg == "PING" or self.ignore.include?(msg.nick) then next end
begin
hooks = @hooks['message']
if hooks != nil
hooks.each { |h| h.call(msg) }
end
rescue => e
# do not do anything
end
begin
hooks = @hooks['command']
if hooks != nil
hooks.each { |h| h.call(msg.channel, msg.command) }
end
rescue => e
# do not do anything
end
begin
hooks = @hooks['ircmsg']
if hooks != nil
hooks.each { |h| h.call(msg.nick, msg.command, msg.channel, msg.message) }
end
rescue => e
# do not do anything
end
end
end
end
class Commands_manager
def initialize
@reg_s = []
@hook_s = []
@size = 0
end
def on(reg, &block)
reg = Regexp.new(reg.to_s)
@reg_s.push(reg)
@hook_s << block
@size += 1
end
def check_all(bot, msg, plugins)
0.upto(@size - 1) do |i|
if msg.message_regex(@reg_s[i])
@hook_s[i].call(bot, msg, plugins)
end
end
end
def hooks
return @hook_s
end
def regexes
return @reg_s
end
def size
return @size
end
end
update
#! /bin/env ruby
#
############################################################
# Author: Alice "Duchess" Archer
# Name: rirc
# Description: IRC framework for IRC bots written in ruby
############################################################
require 'socket'
require 'openssl'
class IRC_message
def initialize(command, nick, channel, message, ircmsg)
@command = command
@nick = nick
@channel = channel
@message = message
@ircmsg = ircmsg
end
def ircmsg
return @ircmsg
end
def message
return @message
end
def nick
return @nick
end
def command
return @command
end
def channel
return @channel
end
def check_regex(type, regex)
if type == "command"
if @command.match(regex) then return true end
elsif type == "nick"
if @nick.match(regex) then return true end
elsif type == "channel"
if @channel.match(regex) then return true end
elsif type == "message"
if @message.match(regex) then return true end
else
if @message.match(regex) then return true end
end
return false
end
def message_regex(regex)
if @message.match(regex) then return true end
return false
end
end
class Pluginf
def initialize(regex, name, file_name, help)
@regexp = Regexp.new(regex.to_s)
@name = name.to_s
@file_name = file_name.to_s
@help = help
@chan_list = []
@chan_list.push("any")
end
# default function
def script(message, nick, chan)
end
def regex
return @regexp
end
def chans
return @chan_list
end
def name
return @name
end
def file_name
return @file_name
end
def help
return @help
end
def cleanup
return ""
end
end
class Plugin_manager
def initialize(plugin_folder)
@plugins = []
@plugin_folder = plugin_folder
end
# returns all the plugins
def plugins
if @plugins.length == 0
return []
end
return @plugins
end
# search functions
def get_names
if @plugins.length == 0
return []
end
names = []
@plugins.each { |a| names.push(a.name) }
return names
end
def get_helps
if @plugins.length == 0
return []
end
names = []
@plugins.each { |a| names.push(a.help) }
return names
end
def get_files
if @plugins.length == 0
return []
end
names = []
@plugins.each { |a| names.push(a.file_name) }
return names
end
def get_chans
if @plugins.length == 0
return []
end
names = []
@plugins.each { |a| names.push(a.chans) }
return names
end
def get_regexps
if @plugins.length == 0
return []
end
names = []
@plugins.each { |a| names.push(a.regex) }
return names
end
def get_plugin(name) # gets a plugin by name or nil if it is not loaded
if @plugins.length == 0
return nil
end
@plugins.each { |a| if a.name == name then return a end }
return nil
end
def plugin_help(name) # gets the help for a plugin
if @plugins.length == 0
return nil
end
@plugins.each { |a| if a.name == name then return a.help end }
return nil
end
def plugin_file_name(name) # gets the file name for a plugin
if @plugins.length == 0
return nil
end
@plugins.each { |a| if a.name == name then return a.file_name end }
return nil
end
def plugin_chans(name) # gets the array of channels for a plugin
if @plugins.length == 0
return nil
end
@plugins.each { |a| if a.name == name then return a.chans end }
return nil
end
def plugin_regex(name) # gets the regex for a plugin
if @plugins.length == 0
return nil
end
@plugins.each { |a| if a.name == name then return a.regex end }
return nil
end
# check if a plugin is loaded
def plugin_loaded(name)
if @plugins.length == 0
return false
end
@plugins.each do |a|
if a.name == name
return true
end
end
return false
end
# regex check function
# this function uses the IRC_message object for message input
# inputs:
# - name
# - IRC_message object
# - array of admins [can be an empty array]
# - backlog array [can be an empty array]
# output: string
def check_plugin(name, message, admins, backlog) #checks an individual plugin's (by name) regex against message
if @plugins.length == 0
return ""
end
if !plugin_loaded(name)
return ""
else
if message.message.match(get_plugin(name).regex) and (get_plugin(name).chans.include? "any" or get_plugin(name).chans.include? message.channel)
begin
return get_plugin(name).script(message, admins, backlog) # plugins use the IRC_message object
rescue => e
return "an error occured for plugin: #{name}"
end
end
end
return ""
end
# regex check function that returns responses for all plugins in an array
# inputs:
# - IRC_message object
# - array of admins [can be an empty array]
# - backlog array [can be an empty array]
# output: array of strings
def check_all(message, admins, backlog)
if @plugins.length == 0
return []
end
response = []
# this is incredibly inneficient but it makes check_plugin flexible
@plugins.each { |a| response.push(check_plugin(a.name, message, admins, backlog)) }
return response
end
# load
def plugin_load(name)
$LOAD_PATH << "#{@plugin_folder}"
response = ""
temp_plugin = nil
if name.match(/.rb$/)
begin
load "#{name}"
plugin_loader = Loader.new
temp_plugin = plugin_loader.get_plugin
if plugin_loaded(temp_plugin.name)
temp_plugin = nil
return "Plugin #{name} is already loaded"
end
@plugins.push(temp_plugin)
temp_plugin = nil
response = "#{name[0..-4]} loaded"
rescue => e
response = "cannot load plugin"
end
else
begin
load "#{name}.rb"
plugin_loader = Loader.new
temp_plugin = plugin_loader.get_plugin
if plugin_loaded(temp_plugin.name)
temp_plugin = nil
return "Plugin #{name} is already loaded"
end
@plugins.push(temp_plugin)
temp_plugin = nil
response = "#{name} loaded"
rescue => e
response = "cannot load plugin"
end
end
$LOAD_PATH << './'
return response
end
# unload
def unload(name)
if !plugin_loaded(name)
return "plugin is not loaded"
end
get_plugin(name).cleanup
@plugins.delete_if { |a| a.name == name }
return "plugin #{name} unloaded"
end
# reload
def reload(name)
if !plugin_loaded(name)
return "plugin is not loaded"
end
temp_file_name = get_plugin(name).file_name
unload(name)
plugin_load(temp_file_name)
return "plugin #{name} reloaded"
end
end
class IRCBot
def initialize(network, port, nick, user_name, real_name)
@network = network
@port = port
@nick = nick
@user_name = user_name
@real_name = real_name
@socket = nil
@channels = []
@admins = []
@ignore = []
@hooks = {}
@backlog = []
end
def backlog
return @backlog
end
def ignore
return @ignore
end
def channels
return @channels
end
def admins
return @admins
end
def network
return @network
end
def port
return @port
end
def nick_name
return @nick
end
def user_name
return @user_name
end
def real_name
return @real_name
end
def socket
return @socket
end
def say(message)
@socket.puts message
end
def join(channel)
say "JOIN #{channel}"
if !@channels.include? channel then @channels.push(channel) end
end
def connect
@socket = TCPSocket.open(@network, @port)
end
def connect_ssl
ssl_context = OpenSSL::SSL::SSLContext.new
ssl_context.verify_mode = OpenSSL::SSL::VERIFY_NONE
@socket = OpenSSL::SSL::SSLSocket.new(@socket, ssl_context)
@socket.sync = true
@socket.connect
end
def connect_pass(pass)
say "PASS #{pass}"
end
def nick(nick)
@nick = nick
say "NICK #{nick}"
end
def privmsg(dest, message)
say "PRIVMSG #{dest} :#{message}"
end
def action(dest, message)
privmsg(dest, "\01ACTION #{message}\07\01")
end
def notice(dest, message)
say "NOTICE #{dest} :#{message}"
end
def ctcp(dest, message)
privmsg(dest, "\01VERSION #{message}\07\01")
end
def part(dest, message)
say "PART #{dest} :#{message}"
end
def quit(message)
say "QUIT :#{message}"
end
def names(dest)
say "NAMES #{dest}"
end
def identify(nickserv_pass)
say "PRIVMSG nickserv :identify #{nickserv_pass}"
end
def auth(nickserv_pass)
say "VERSION"
say "USER #{@user_name} * * :#{@real_name}"
nick(@nick)
if nickserv_pass != "" and nickserv_pass != nil
identify(nickserv_pass)
end
end
def read
if !@socket.eof
msg = @socket.gets
if msg.match(/^PING :(.*)$/)
say "PONG #{$~[1]}"
return "PING"
end
return msg
else
return nil
end
end
def parse(msg)
message_reg = msg.match(/^(:(?<prefix>\S+) )?(?<command>\S+)( (?!:)(?<params>.+?))?( :(?<trail>.+))?$/)
nick_n = message_reg[:prefix].to_s.split("!")[0]
command = message_reg[:command].to_s
chan = message_reg[:params].to_s
message = message_reg[:trail].to_s
message = message.chomp
if chan == @nick then chan = nick_n end
ircmsg = IRC_message.new(command, nick_n, chan, message, msg)
return ircmsg
end
def add_admin(nick)
@admins.push(nick)
end
def remove_admin(nick)
@admins.delete_if { |a| a == nick }
end
def add_ignore(nick)
@ignore.push(nick)
end
def remove_ignore(nick)
@ignore.delete_if { |a| a == nick }
end
def on(type, &block)
type = type.to_s
@hooks[type] ||= []
@hooks[type] << block
end
def set_admins(admins_s)
admins_s.each { |a| self.add_admin(a) }
end
def join_channels(channels_s)
channels_s.each { |a| self.join(a) }
end
def create_log
if !File.exist?("./log")
File.open("./log", "w+") { |fw| fw.write("Command and Privmsg LOGS") }
end
end
def setup(use_ssl, use_pass, pass, nickserv_pass, channels_s)
self.connect
if use_ssl then self.connect_ssl end
if use_pass then self.connect_pass(pass) end
self.auth(nickserv_pass)
self.create_log
self.join_channels(channels_s)
self.on :message do |msg|
if msg.channel == msg.nick
File.write("./log", msg.ircmsg, File.size("./log"), mode: 'a')
end
if !self.nick_name == msg.nick and !self.ignore.include? msg.nick
@backlog.push(msg)
end
end
self.on :message do |msg|
if self.admins.include? msg.nick and msg.message_regex(/^`plsgo$/) then abort end
end
end
def start!
until self.socket.eof? do
ircmsg = self.read
msg = self.parse(ircmsg)
if ircmsg == "PING" or self.ignore.include?(msg.nick) then next end
begin
hooks = @hooks['message']
if hooks != nil
hooks.each { |h| h.call(msg) }
end
rescue => e
# do not do anything
end
begin
hooks = @hooks['command']
if hooks != nil
hooks.each { |h| h.call(msg.channel, msg.command) }
end
rescue => e
# do not do anything
end
begin
hooks = @hooks['ircmsg']
if hooks != nil
hooks.each { |h| h.call(msg.nick, msg.command, msg.channel, msg.message) }
end
rescue => e
# do not do anything
end
end
end
end
class Commands_manager
def initialize
@reg_s = []
@hook_s = []
@size = 0
end
def on(reg, &block)
reg = Regexp.new(reg.to_s)
@reg_s.push(reg)
@hook_s << block
@size += 1
end
def check_all(bot, msg, plugins)
0.upto(@size - 1) do |i|
p "reg: #{reg_s[i]}"
p "hook #{hook_s[i]}"
if msg.message_regex(@reg_s[i])
@hook_s[i].call(bot, msg, plugins)
end
end
end
def hooks
return @hook_s
end
def regexes
return @reg_s
end
def size
return @size
end
end
|
#! /bin/env ruby
#
############################################################
# Author: Alice "Duchess" Archer
# Name: rirc
# Description: IRC framework for IRC bots written in ruby
############################################################
require 'socket'
require 'openssl'
class IRC_message
def initialize(command, nick, channel, message)
@command = command
@nick = nick
@channel = channel
@message = message
end
def message
return @message
end
def nick
return @nick
end
def command
return @command
end
def channel
return @channel
end
end
class Pluginf
def initialize(regex, name, file_name, help)
@regexp = Regexp.new(regex.to_s)
@name = name.to_s
@file_name = file_name.to_s
@help = help
@chan_list = []
@chan_list.push("any")
end
# default function
def script(message, nick, chan)
end
def regex
return @regexp
end
def chans
return @chan_list
end
def name
return @name
end
def file_name
return @file_name
end
def help
return @help
end
def cleanup
return ""
end
end
class Plugin_manager
def initialize(plugin_folder)
@plugins = []
@plugin_folder = plugin_folder
end
# returns all the plugins
def plugins
return @plugins
end
# search functions
def get_plugin(name) # gets a plugin by name or nil if it is not loaded
@plugins.each { |a| if a.name == name then return a end }
return nil
end
def plugin_help(name) # gets the help for a plugin
@plugins.each { |a| if a.name == name then return a.help end }
return nil
end
def plugin_file_name(name) # gets the file name for a plugin
@plugins.each { |a| if a.name == name then return a.file_name end }
return nil
end
def plugin_chans(name) # gets the array of channels for a plugin
@plugins.each { |a| if a.name == name then return a.chans end }
return nil
end
def plugin_regex(name) # gets the regex for a plugin
@plugins.each { |a| if a.name == name then return a.regex end }
return nil
end
# check if a plugin is loaded
def plugin_loaded(name)
@plugins.each do |a|
if a.name == name
return true
end
end
return false
end
# regex check function
# this function uses the IRC_message object for message input
# inputs:
# - name
# - IRC_message object
# - array of admins [can be an empty array]
# - backlog array [can be an empty array]
# output: string
def check_plugin(name, message, admins, backlog) #checks an individual plugin's (by name) regex against message
if !plugin_loaded(name)
return ""
else
if message.message.match(get_plugin(name).regex) and (get_plugin(name).chans.include? "any" or get_plugin(name).chans.include? message.channel)
return get_plugin(name).script(message, admins, backlog) # plugins use the IRC_message object
end
end
return ""
end
# regex check function that returns responses for all plugins in an array
# inputs:
# - IRC_message object
# - array of admins [can be an empty array]
# - backlog array [can be an empty array]
# output: array of strings
def check_all(message, admins, backlog)
response = []
@plugins.each { |a| response.push(check_plugin(a.name, message, admins, backlog)) }
return response
end
# load
def load(name)
if plugin_loaded(name)
return "plugin is already loaded"
end
$LOAD_PATH << "#{@plugin_folder}"
response = ""
$temp_plugin = nil # allows a global to be set, thus allowing the plugin to create a temporary we can add
if name.match(/.rb$/)
begin
load "#{name}"
@plugins.push($temp_plugin)
$temp_plugin = nil
response = "#{name[0..-4]} loaded"
rescue => e
response = "cannot load plugin"
end
else
begin
load "#{name}.rb"
@plugins.push($temp_plugin)
$temp_plugin = nil
response = "#{name} loaded"
rescue => e
response = "cannot load plugin"
end
end
$LOAD_PATH << './'
return response
end
# unload
def unload(name)
if !plugin_loaded(name)
return "plugin is not loaded"
end
get_plugin(name).cleanup
@plugins.delete_if { |a| a.name == name }
end
# reload
def reload(name)
if !plugin_loaded(name)
return "plugin is not loaded"
end
temp_name = name
temp_file_name = get_plugin(name).file_name
unload(temp_name)
load(temp_file_name)
end
end
class IRCBot
def initialize(network, port, nick, user_name, real_name)
@network = network
@port = port
@nick = nick
@user_name = user_name
@real_name = real_name
@socket = nil
@channels = []
@admins = []
end
def network
return @network
end
def port
return @port
end
def nick
return @nick
end
def user_name
return @user_name
end
def real_name
return @real_name
end
def socket
return @socket
end
def say(message)
@socket.puts message
end
def join(channel)
say "JOIN #{channel}"
@channels.push(channel)
end
def connect
@socket = TCPSocket.open(@network, @port)
end
def connect_ssl
ssl_context = OpenSSL::SSL::SSLContext.new
ssl_context.verify_mode = OpenSSL::SSL::VERIFY_NONE
@socket = OpenSSL::SSL::SSLSocket.new(@socket, ssl_context)
@socket.sync = true
@socket.connect
end
def connect_pass(pass)
say "PASS #{pass}"
end
def nick(nick)
say "NICK #{nick}"
end
def privmsg(dest, message)
say "PRIVMSG #{dest} :#{message}"
end
def action(dest, message)
privmsg(dest, "\01ACTION #{message}\07\01")
end
def notice(dest, message)
say "NOTICE #{dest} :#{message}"
end
def ctcp(dest, message)
privmsg(dest, "\01VERSION #{message}\07\01")
end
def part(dest, message)
say "PART #{dest} :#{message}"
end
def quit(message)
say "QUIT :#{message}"
end
def names(dest)
say "NAMES #{dest}"
end
def auth
say "VERSION"
say "USER #{@user_name} * * :#{@real_name}"
nick(@nick)
if @nickserv_pass != ""
say "PRIVMSG nickserv :#{@nickserv_pass}"
end
end
def read
if !@socket.eof
msg = @socket.gets
if msg.match(/^PING :(.*)$/)
say "PONG #{$~[1]}"
return "PING"
end
return msg
else
return nil
end
end
def parse(msg)
message_reg = msg.match(/^(:(?<prefix>\S+) )?(?<command>\S+)( (?!:)(?<params>.+?))?( :(?<trail>.+))?$/)
nick = message_reg[:prefix].to_s.split("!")[0]
command = message_reg[:command].to_s
chan = message_reg[:params].to_s
message = message_reg[:trail].to_s
message = message.chomp
ircmsg = IRC_message.new(command, nick, chan, message)
return ircmsg
end
def add_admin(nick)
@admins.push(nick)
end
def remove_admin(nick)
@admins.delete_if { |a| a == nick }
end
end
update
#! /bin/env ruby
#
############################################################
# Author: Alice "Duchess" Archer
# Name: rirc
# Description: IRC framework for IRC bots written in ruby
############################################################
require 'socket'
require 'openssl'
class IRC_message
def initialize(command, nick, channel, message)
@command = command
@nick = nick
@channel = channel
@message = message
end
def message
return @message
end
def nick
return @nick
end
def command
return @command
end
def channel
return @channel
end
end
class Pluginf
def initialize(regex, name, file_name, help)
@regexp = Regexp.new(regex.to_s)
@name = name.to_s
@file_name = file_name.to_s
@help = help
@chan_list = []
@chan_list.push("any")
end
# default function
def script(message, nick, chan)
end
def regex
return @regexp
end
def chans
return @chan_list
end
def name
return @name
end
def file_name
return @file_name
end
def help
return @help
end
def cleanup
return ""
end
end
class Plugin_manager
def initialize(plugin_folder)
@plugins = []
@plugin_folder = plugin_folder
end
# returns all the plugins
def plugins
if @plugins.length == 0
return []
end
return @plugins
end
# search functions
def get_plugin(name) # gets a plugin by name or nil if it is not loaded
if @plugins.length == 0
return nil
end
@plugins.each { |a| if a.name == name then return a end }
return nil
end
def plugin_help(name) # gets the help for a plugin
if @plugins.length == 0
return nil
end
@plugins.each { |a| if a.name == name then return a.help end }
return nil
end
def plugin_file_name(name) # gets the file name for a plugin
if @plugins.length == 0
return nil
end
@plugins.each { |a| if a.name == name then return a.file_name end }
return nil
end
def plugin_chans(name) # gets the array of channels for a plugin
if @plugins.length == 0
return nil
end
@plugins.each { |a| if a.name == name then return a.chans end }
return nil
end
def plugin_regex(name) # gets the regex for a plugin
if @plugins.length == 0
return nil
end
@plugins.each { |a| if a.name == name then return a.regex end }
return nil
end
# check if a plugin is loaded
def plugin_loaded(name)
if @plugins.length == 0
return false
end
@plugins.each do |a|
if a.name == name
return true
end
end
return false
end
# regex check function
# this function uses the IRC_message object for message input
# inputs:
# - name
# - IRC_message object
# - array of admins [can be an empty array]
# - backlog array [can be an empty array]
# output: string
def check_plugin(name, message, admins, backlog) #checks an individual plugin's (by name) regex against message
if @plugins.length == 0
return ""
end
if !plugin_loaded(name)
return ""
else
if message.message.match(get_plugin(name).regex) and (get_plugin(name).chans.include? "any" or get_plugin(name).chans.include? message.channel)
return get_plugin(name).script(message, admins, backlog) # plugins use the IRC_message object
end
end
return ""
end
# regex check function that returns responses for all plugins in an array
# inputs:
# - IRC_message object
# - array of admins [can be an empty array]
# - backlog array [can be an empty array]
# output: array of strings
def check_all(message, admins, backlog)
if @plugins.length == 0
return []
end
response = []
@plugins.each { |a| response.push(check_plugin(a.name, message, admins, backlog)) }
return response
end
# load
def load(name)
if plugin_loaded(name)
return "plugin is already loaded"
end
$LOAD_PATH << "#{@plugin_folder}"
response = ""
$temp_plugin = nil # allows a global to be set, thus allowing the plugin to create a temporary we can add
if name.match(/.rb$/)
begin
load "#{name}"
@plugins.push($temp_plugin)
$temp_plugin = nil
response = "#{name[0..-4]} loaded"
rescue => e
response = "cannot load plugin"
end
else
begin
load "#{name}.rb"
@plugins.push($temp_plugin)
$temp_plugin = nil
response = "#{name} loaded"
rescue => e
response = "cannot load plugin"
end
end
$LOAD_PATH << './'
return response
end
# unload
def unload(name)
if !plugin_loaded(name)
return "plugin is not loaded"
end
get_plugin(name).cleanup
@plugins.delete_if { |a| a.name == name }
end
# reload
def reload(name)
if !plugin_loaded(name)
return "plugin is not loaded"
end
temp_name = name
temp_file_name = get_plugin(name).file_name
unload(temp_name)
load(temp_file_name)
end
end
class IRCBot
def initialize(network, port, nick, user_name, real_name)
@network = network
@port = port
@nick = nick
@user_name = user_name
@real_name = real_name
@socket = nil
@channels = []
@admins = []
end
def network
return @network
end
def port
return @port
end
def nick
return @nick
end
def user_name
return @user_name
end
def real_name
return @real_name
end
def socket
return @socket
end
def say(message)
@socket.puts message
end
def join(channel)
say "JOIN #{channel}"
@channels.push(channel)
end
def connect
@socket = TCPSocket.open(@network, @port)
end
def connect_ssl
ssl_context = OpenSSL::SSL::SSLContext.new
ssl_context.verify_mode = OpenSSL::SSL::VERIFY_NONE
@socket = OpenSSL::SSL::SSLSocket.new(@socket, ssl_context)
@socket.sync = true
@socket.connect
end
def connect_pass(pass)
say "PASS #{pass}"
end
def nick(nick)
say "NICK #{nick}"
end
def privmsg(dest, message)
say "PRIVMSG #{dest} :#{message}"
end
def action(dest, message)
privmsg(dest, "\01ACTION #{message}\07\01")
end
def notice(dest, message)
say "NOTICE #{dest} :#{message}"
end
def ctcp(dest, message)
privmsg(dest, "\01VERSION #{message}\07\01")
end
def part(dest, message)
say "PART #{dest} :#{message}"
end
def quit(message)
say "QUIT :#{message}"
end
def names(dest)
say "NAMES #{dest}"
end
def auth
say "VERSION"
say "USER #{@user_name} * * :#{@real_name}"
nick(@nick)
if @nickserv_pass != ""
say "PRIVMSG nickserv :#{@nickserv_pass}"
end
end
def read
if !@socket.eof
msg = @socket.gets
if msg.match(/^PING :(.*)$/)
say "PONG #{$~[1]}"
return "PING"
end
return msg
else
return nil
end
end
def parse(msg)
message_reg = msg.match(/^(:(?<prefix>\S+) )?(?<command>\S+)( (?!:)(?<params>.+?))?( :(?<trail>.+))?$/)
nick = message_reg[:prefix].to_s.split("!")[0]
command = message_reg[:command].to_s
chan = message_reg[:params].to_s
message = message_reg[:trail].to_s
message = message.chomp
ircmsg = IRC_message.new(command, nick, chan, message)
return ircmsg
end
def add_admin(nick)
@admins.push(nick)
end
def remove_admin(nick)
@admins.delete_if { |a| a == nick }
end
end
|
require "logger"
LOG = Logger.new(STDOUT)
module Ruck
SAMPLE_RATE = 22050
BITS_PER_SAMPLE = 16
def blackhole
@@blackhole ||= Bus.new
end
def run
@shreduler ||= Shreduler.new
log.error("Ruck already running") and return if @shreduler.running
@shreduler.run
end
def spork(name = "unnamed", &shred)
@shreduler ||= Shreduler.new
@shreduler.spork(name, &shred)
end
def play(samples)
@shreduler.current_shred.yield(samples)
end
def finish
shred = @shreduler.current_shred
@shreduler.remove_shred shred
shred.finish
end
end
require File.join(File.dirname(__FILE__), "shreduling")
require File.join(File.dirname(__FILE__), "time")
require File.join(File.dirname(__FILE__), "misc", "metaid")
require File.join(File.dirname(__FILE__), "misc", "linkage")
require File.join(File.dirname(__FILE__), "ugen", "general")
require File.join(File.dirname(__FILE__), "ugen", "wav")
require File.join(File.dirname(__FILE__), "ugen", "oscillators")
# run the ruck script
if __FILE__ == $0
include Ruck
LOG.level = Logger::WARN
unless File.readable?(ARGV[0])
LOG.fatal "Cannot read file #{ARGV[0]}"
exit
end
spork("main") { require ARGV[0] }
run
end
Spork multiple scripts at once
require "logger"
LOG = Logger.new(STDOUT)
module Ruck
SAMPLE_RATE = 22050
BITS_PER_SAMPLE = 16
def blackhole
@@blackhole ||= Bus.new
end
def run
@shreduler ||= Shreduler.new
log.error("Ruck already running") and return if @shreduler.running
@shreduler.run
end
def spork(name = "unnamed", &shred)
@shreduler ||= Shreduler.new
@shreduler.spork(name, &shred)
end
def play(samples)
@shreduler.current_shred.yield(samples)
end
def finish
shred = @shreduler.current_shred
@shreduler.remove_shred shred
shred.finish
end
end
require File.join(File.dirname(__FILE__), "shreduling")
require File.join(File.dirname(__FILE__), "time")
require File.join(File.dirname(__FILE__), "misc", "metaid")
require File.join(File.dirname(__FILE__), "misc", "linkage")
require File.join(File.dirname(__FILE__), "ugen", "general")
require File.join(File.dirname(__FILE__), "ugen", "wav")
require File.join(File.dirname(__FILE__), "ugen", "oscillators")
# run the ruck scripts
if __FILE__ == $0
include Ruck
LOG.level = Logger::WARN
filenames = ARGV
filenames.each do |filename|
unless File.readable?(filename)
LOG.fatal "Cannot read file #{filename}"
exit
end
end
filenames.each { |filename| spork("main") { require filename } }
run
end
|
salt: migrate from core
require 'formula'
class Salt < Formula
homepage 'http://supernovae.in2p3.fr/salt/doku.php?id=start'
url 'http://supernovae.in2p3.fr/salt/lib/exe/fetch.php?media=snfit-2.4.0.tar.gz'
sha1 '7f6e36e78199d8dec0458b464643e1e90fc51282'
version '2.4'
depends_on :fortran
conflicts_with 'fastbit', :because => 'both install `include/filter.h`'
resource 'data' do
url 'http://supernovae.in2p3.fr/salt/lib/exe/fetch.php?media=salt2-4_data.tgz'
sha1 '92c34fe3363fe6a88c8cda75c543503a9b3196f7'
end
resource '03d4ag' do
url 'http://supernovae.in2p3.fr/salt/lib/exe/fetch.php?media=jla-03d4ag.tar.gz'
sha1 'b227f5e50ea227375720f3c00dd849f964cfa2ba'
end
def install
ENV.deparallelize
# the libgfortran.a path needs to be set explicitly
libgfortran = `$FC --print-file-name libgfortran.a`.chomp
ENV.append 'LDFLAGS', "-L#{File.dirname(libgfortran)} -lgfortran"
system "./configure", "--prefix=#{prefix}", "--disable-static"
system "make install"
# install all the model data
(prefix/'data').install resource('data')
# for testing
(prefix/'03d4ag').install resource('03d4ag')
end
test do
ENV['SALTPATH'] = "#{prefix}/data"
cp_r Dir["#{prefix}/03d4ag/*"], '.'
system bin/"snfit", testpath/"lc-03D4ag.list"
assert File.exist?("result_salt2.dat")
end
def caveats
<<-EOS.undent
You should add the following to your .bashrc or equivalent:
export SALTPATH=#{prefix}/data
EOS
end
end
|
module Gobgems
class ::Fixnum
def compile
to_s
end
end
class ::TrueClass
def compile
'true'
end
end
class ::FalseClass
def compile
'false'
end
end
class Value
def initialize(compiled)
@compiled = compiled
end
def compile
@compiled
end
end
East = Value.new('Este')
North = Value.new('Norte')
South = Value.new('Sur')
West = Value.new('Oeste')
Red = Value.new('Rojo')
Blue = Value.new('Azul')
Green = Value.new('Verde')
Black = Value.new('Negro')
def pop(color)
Value.new("Sacar(#{color.compile})")
end
def push(color)
Value.new("Poner(#{color.compile})")
end
def move(direction)
Value.new("Mover(#{direction.compile})")
end
end
include Gobgems
describe "valores" do
describe "numeros" do
it { expect(1.compile).to eq '1' }
it { expect(10.compile).to eq '10' }
end
describe "booleanos" do
it { expect(true.compile).to eq 'true' }
it { expect(false.compile).to eq 'false' }
end
describe "direcciones" do
it { expect(East.compile).to eq 'Este' }
it { expect(North.compile).to eq 'Norte' }
it { expect(South.compile).to eq 'Sur' }
it { expect(West.compile).to eq 'Oeste' }
end
describe "colores" do
it { expect(Blue.compile).to eq 'Azul' }
it { expect(Red.compile).to eq 'Rojo' }
it { expect(Black.compile).to eq 'Negro' }
it { expect(Green.compile).to eq 'Verde' }
end
describe "acciones" do
it { expect((push Red).compile).to eq 'Poner(Rojo)' }
it { expect((push Green).compile).to eq 'Poner(Verde)' }
it { expect((move West).compile).to eq 'Mover(Oeste)' }
it { expect((move East).compile).to eq 'Mover(Este)' }
it { expect((pop Red).compile).to eq 'Sacar(Rojo)' }
it { expect((pop Green).compile).to eq 'Sacar(Verde)' }
end
end
Rename Value -> Expression
module Gobgems
class ::Fixnum
def compile
to_s
end
end
class ::TrueClass
def compile
'true'
end
end
class ::FalseClass
def compile
'false'
end
end
class Expression
def initialize(compiled)
@compiled = compiled
end
def compile
@compiled
end
end
East = Expression.new('Este')
North = Expression.new('Norte')
South = Expression.new('Sur')
West = Expression.new('Oeste')
Red = Expression.new('Rojo')
Blue = Expression.new('Azul')
Green = Expression.new('Verde')
Black = Expression.new('Negro')
def pop(color)
Expression.new("Sacar(#{color.compile})")
end
def push(color)
Expression.new("Poner(#{color.compile})")
end
def move(direction)
Expression.new("Mover(#{direction.compile})")
end
end
include Gobgems
describe "valores" do
describe "numeros" do
it { expect(1.compile).to eq '1' }
it { expect(10.compile).to eq '10' }
end
describe "booleanos" do
it { expect(true.compile).to eq 'true' }
it { expect(false.compile).to eq 'false' }
end
describe "direcciones" do
it { expect(East.compile).to eq 'Este' }
it { expect(North.compile).to eq 'Norte' }
it { expect(South.compile).to eq 'Sur' }
it { expect(West.compile).to eq 'Oeste' }
end
describe "colores" do
it { expect(Blue.compile).to eq 'Azul' }
it { expect(Red.compile).to eq 'Rojo' }
it { expect(Black.compile).to eq 'Negro' }
it { expect(Green.compile).to eq 'Verde' }
end
describe "acciones" do
it { expect((push Red).compile).to eq 'Poner(Rojo)' }
it { expect((push Green).compile).to eq 'Poner(Verde)' }
it { expect((move West).compile).to eq 'Mover(Oeste)' }
it { expect((move East).compile).to eq 'Mover(Este)' }
it { expect((pop Red).compile).to eq 'Sacar(Rojo)' }
it { expect((pop Green).compile).to eq 'Sacar(Verde)' }
end
end |
require 'formula'
class Szip < Formula
homepage 'http://www.hdfgroup.org/HDF5/release/obtain5.html#extlibs'
url 'http://www.hdfgroup.org/ftp/lib-external/szip/2.1/src/szip-2.1.tar.gz'
sha256 'a816d95d5662e8279625abdbea7d0e62157d7d1f028020b1075500bf483ed5ef'
bottle do
root_url 'https://juliabottles.s3.amazonaws.com'
cellar :any
end
option :universal
def install
ENV.universal_binary if build.universal?
system "./configure", "--disable-debug", "--disable-dependency-tracking",
"--prefix=#{prefix}"
system "make install"
end
end
We need a revision here too
require 'formula'
class Szip < Formula
homepage 'http://www.hdfgroup.org/HDF5/release/obtain5.html#extlibs'
url 'http://www.hdfgroup.org/ftp/lib-external/szip/2.1/src/szip-2.1.tar.gz'
sha256 'a816d95d5662e8279625abdbea7d0e62157d7d1f028020b1075500bf483ed5ef'
revision 1
bottle do
root_url 'https://juliabottles.s3.amazonaws.com'
cellar :any
end
option :universal
def install
ENV.universal_binary if build.universal?
system "./configure", "--disable-debug", "--disable-dependency-tracking",
"--prefix=#{prefix}"
system "make install"
end
end
|
# Generate pages from individual records in yml files
# (c) 2014-2016 Adolfo Villafiorita
# Distributed under the conditions of the MIT License
module Jekyll
module Sanitizer
# strip characters and whitespace to create valid filenames, also lowercase
def sanitize_filename(name)
if(name.is_a? Integer)
return name.to_s
end
return name.downcase.strip.gsub(' ', '-').gsub(/[^\w.-]/, '')
end
end
# this class is used to tell Jekyll to generate a page
class DataPage < Page
include Sanitizer
# - site and base are copied from other plugins: to be honest, I am not sure what they do
#
# - `index_files` specifies if we want to generate named folders (true) or not (false)
# - `dir` is the default output directory
# - `data` is the data defined in `_data.yml` of the record for which we are generating a page
# - `name` is the key in `data` which determines the output filename
# - `template` is the name of the template for generating the page
# - `extension` is the extension for the generated file
def initialize(site, base, index_files, dir, data, name, template, extension)
@site = site
@base = base
# @dir is the directory where we want to output the page
# @name is the name of the page to generate
#
# the value of these variables changes according to whether we
# want to generate named folders or not
filename = sanitize_filename(data[name]).to_s
if index_files
@dir = dir + (index_files ? "/" + filename + "/" : "")
@name = "index" + "." + extension.to_s
else
@dir = dir
@name = filename + "." + extension.to_s
end
self.process(@name)
self.read_yaml(File.join(base, '_layouts'), template + ".html")
self.data['title'] = data[name]
# add all the information defined in _data for the current record to the
# current page (so that we can access it with liquid tags)
self.data.merge!(data)
end
end
class DataPagesGenerator < Generator
safe true
# generate loops over _config.yml/page_gen invoking the DataPage
# constructor for each record for which we want to generate a page
def generate(site)
# page_gen_dirs determines whether we want to generate index pages
# (name/index.html) or standard files (name.html). This information
# is passed to the DataPage constructor, which sets the @dir variable
# as required by this directive
index_files = site.config['page_gen-dirs'] == true
# data contains the specification of the data for which we want to generate
# the pages (look at the README file for its specification)
data = site.config['page_gen']
if data
data.each do |data_spec|
template = data_spec['template'] || data_spec['data']
name = data_spec['name']
dir = data_spec['dir'] || data_spec['data']
extension = data_spec['extension'] || "html"
if site.layouts.key? template
# records is the list of records defined in _data.yml
# for which we want to generate different pages
records = nil
data_spec['data'].split('.').each do |level|
if records.nil?
records = site.data[level]
else
records = records[level]
end
end
records.each do |record|
site.pages << DataPage.new(site, site.source, index_files, dir, record, name, template, extension)
end
else
puts "error. could not find template #{template}" if not site.layouts.key? template
end
end
end
end
end
module DataPageLinkGenerator
include Sanitizer
# use it like this: {{input | datapage_url: dir}}
# to generate a link to a data_page.
#
# the filter is smart enough to generate different link styles
# according to the data_page-dirs directive ...
#
# ... however, the filter is not smart enough to support different
# extensions for filenames.
#
# Thus, if you use the `extension` feature of this plugin, you
# need to generate the links by hand
def datapage_url(input, dir)
@gen_dir = Jekyll.configuration({})['page_gen-dirs']
if @gen_dir then
dir + "/" + sanitize_filename(input) + "/index.html"
else
dir + "/" + sanitize_filename(input) + ".html"
end
end
end
end
Liquid::Template.register_filter(Jekyll::DataPageLinkGenerator)
Fix _plugins/data_page_generator.rb
Evita di ricaricare il file di configurazione per ogni issue e di
floodare i log con righe di "Configuration file: "
# Generate pages from individual records in yml files
# (c) 2014-2016 Adolfo Villafiorita
# Distributed under the conditions of the MIT License
module Jekyll
module Sanitizer
# strip characters and whitespace to create valid filenames, also lowercase
def sanitize_filename(name)
if(name.is_a? Integer)
return name.to_s
end
return name.downcase.strip.gsub(' ', '-').gsub(/[^\w.-]/, '')
end
end
# this class is used to tell Jekyll to generate a page
class DataPage < Page
include Sanitizer
# - site and base are copied from other plugins: to be honest, I am not sure what they do
#
# - `index_files` specifies if we want to generate named folders (true) or not (false)
# - `dir` is the default output directory
# - `data` is the data defined in `_data.yml` of the record for which we are generating a page
# - `name` is the key in `data` which determines the output filename
# - `template` is the name of the template for generating the page
# - `extension` is the extension for the generated file
def initialize(site, base, index_files, dir, data, name, template, extension)
@site = site
@base = base
# @dir is the directory where we want to output the page
# @name is the name of the page to generate
#
# the value of these variables changes according to whether we
# want to generate named folders or not
filename = sanitize_filename(data[name]).to_s
if index_files
@dir = dir + (index_files ? "/" + filename + "/" : "")
@name = "index" + "." + extension.to_s
else
@dir = dir
@name = filename + "." + extension.to_s
end
self.process(@name)
self.read_yaml(File.join(base, '_layouts'), template + ".html")
self.data['title'] = data[name]
# add all the information defined in _data for the current record to the
# current page (so that we can access it with liquid tags)
self.data.merge!(data)
end
end
class DataPagesGenerator < Generator
safe true
# generate loops over _config.yml/page_gen invoking the DataPage
# constructor for each record for which we want to generate a page
def generate(site)
# page_gen_dirs determines whether we want to generate index pages
# (name/index.html) or standard files (name.html). This information
# is passed to the DataPage constructor, which sets the @dir variable
# as required by this directive
index_files = site.config['page_gen-dirs'] == true
# data contains the specification of the data for which we want to generate
# the pages (look at the README file for its specification)
data = site.config['page_gen']
if data
data.each do |data_spec|
template = data_spec['template'] || data_spec['data']
name = data_spec['name']
dir = data_spec['dir'] || data_spec['data']
extension = data_spec['extension'] || "html"
if site.layouts.key? template
# records is the list of records defined in _data.yml
# for which we want to generate different pages
records = nil
data_spec['data'].split('.').each do |level|
if records.nil?
records = site.data[level]
else
records = records[level]
end
end
records.each do |record|
site.pages << DataPage.new(site, site.source, index_files, dir, record, name, template, extension)
end
else
puts "error. could not find template #{template}" if not site.layouts.key? template
end
end
end
end
end
module DataPageLinkGenerator
include Sanitizer
# use it like this: {{input | datapage_url: dir}}
# to generate a link to a data_page.
#
# the filter is smart enough to generate different link styles
# according to the data_page-dirs directive ...
#
# ... however, the filter is not smart enough to support different
# extensions for filenames.
#
# Thus, if you use the `extension` feature of this plugin, you
# need to generate the links by hand
def datapage_url(input, dir)
@gen_dir = @context.registers[:site].config['page_gen-dirs']
if @gen_dir then
dir + "/" + sanitize_filename(input) + "/index.html"
else
dir + "/" + sanitize_filename(input) + ".html"
end
end
end
end
Liquid::Template.register_filter(Jekyll::DataPageLinkGenerator)
|
# -*- encoding: utf-8 -*-
require File.expand_path('../lib/rack/heartbeat/version', __FILE__)
Gem::Specification.new do |gem|
gem.authors = ["James Cox"]
gem.email = ["james@imaj.es"]
gem.description = %q{TODO: Write a gem description}
gem.summary = %q{TODO: Write a gem summary}
gem.homepage = ""
gem.files = `git ls-files`.split($\)
gem.executables = gem.files.grep(%r{^bin/}).map{ |f| File.basename(f) }
gem.test_files = gem.files.grep(%r{^(test|spec|features)/})
gem.name = "rack-heartbeat"
gem.require_paths = ["lib"]
gem.version = Rack::Heartbeat::VERSION
end
Add rack dep
# -*- encoding: utf-8 -*-
require File.expand_path('../lib/rack/heartbeat', __FILE__)
Gem::Specification.new do |gem|
gem.authors = ["James Cox"]
gem.email = ["james@imaj.es"]
gem.description = %q{TODO: Write a gem description}
gem.summary = %q{TODO: Write a gem summary}
gem.homepage = ""
gem.files = `git ls-files`.split($\)
gem.executables = gem.files.grep(%r{^bin/}).map{ |f| File.basename(f) }
gem.test_files = gem.files.grep(%r{^(test|spec|features)/})
gem.name = "rack-heartbeat"
gem.require_paths = ["lib"]
gem.version = Rack::Heartbeat::VERSION
gem.add_dependency('rack')
end
|
#
# -*- encoding: utf-8 -*-
Gem::Specification.new do |s|
s.name = "puppetmodule-stdlib"
s.version = "4.0.2"
s.required_rubygems_version = Gem::Requirement.new(">= 0") if s.respond_to? :required_rubygems_version=
s.authors = ["Puppet Labs"]
s.date = "2013-04-12"
s.description = [ 'This Gem format of the stdlib module is intended to make',
'it easier for _module authors_ to resolve dependencies',
'using a Gemfile when running automated testing jobs like',
'Travis or Jenkins. The recommended best practice for',
'installation by end users is to use the `puppet module',
'install` command to install stdlib from the [Puppet',
'Forge](http://forge.puppetlabs.com/puppetlabs/stdlib).' ].join(' ')
s.email = "puppet-dev@puppetlabs.com"
s.executables = []
s.files = [ 'CHANGELOG', 'CONTRIBUTING.md', 'Gemfile', 'LICENSE', 'Modulefile',
'README.markdown', 'README_DEVELOPER.markdown', 'RELEASE_PROCESS.markdown',
'Rakefile', 'spec/spec.opts' ]
s.files += Dir['lib/**/*.rb'] + Dir['manifests/**/*.pp'] + Dir['tests/**/*.pp'] + Dir['spec/**/*.rb']
s.homepage = "http://forge.puppetlabs.com/puppetlabs/stdlib"
s.rdoc_options = ["--title", "Puppet Standard Library Development Gem", "--main", "README.markdown", "--line-numbers"]
s.require_paths = ["lib"]
s.rubyforge_project = "puppetmodule-stdlib"
s.rubygems_version = "1.8.24"
s.summary = "This gem provides a way to make the standard library available for other module spec testing tasks."
if s.respond_to? :specification_version then
s.specification_version = 3
if Gem::Version.new(Gem::VERSION) >= Gem::Version.new('1.2.0') then
else
end
else
end
end
Remove gemspec
This is not a current practice and it's very outdated. Remove it to
avoid confusion.
|
#!/usr/bin/env ruby -rubygems
# -*- encoding: utf-8 -*-
begin
RUBY_ENGINE
rescue NameError
RUBY_ENGINE = "ruby" # Not defined in Ruby 1.8.7
end
Gem::Specification.new do |gem|
gem.version = File.read('VERSION').chomp
gem.date = File.mtime('VERSION').strftime('%Y-%m-%d')
gem.name = "rdf-microdata"
gem.homepage = "http://github.com/ruby-rdf/rdf-microdata"
gem.license = 'Public Domain' if gem.respond_to?(:license=)
gem.summary = "Microdata reader for Ruby."
gem.description = gem.summary
gem.rubyforge_project = 'rdf-microdata'
gem.authors = %w(Gregg Kellogg)
gem.email = 'public-rdf-ruby@w3.org'
gem.platform = Gem::Platform::RUBY
gem.files = %w(AUTHORS README UNLICENSE VERSION) + Dir.glob('lib/**/*.rb') + Dir.glob('etc/*')
gem.require_paths = %w(lib)
gem.extensions = %w()
gem.test_files = %w()
gem.has_rdoc = false
gem.required_ruby_version = '>= 1.8.1'
gem.requirements = []
gem.add_runtime_dependency 'rdf', '>= 1.0'
gem.add_runtime_dependency 'json', '>= 1.7.6'
gem.add_runtime_dependency 'rdf-xsd', '>= 1.0'
gem.add_runtime_dependency 'htmlentities', '>= 4.3.0'
gem.add_development_dependency 'nokogiri' , '>= 1.5.6'
gem.add_development_dependency 'equivalent-xml' , '>= 0.3.0'
gem.add_development_dependency 'open-uri-cached', '>= 0.0.5'
gem.add_development_dependency 'yard' , '>= 0.8.3'
gem.add_development_dependency 'spira', '= 0.0.12'
gem.add_development_dependency 'rspec', '>= 2.12.0'
gem.add_development_dependency 'rdf-spec', '>= 1.0'
gem.add_development_dependency 'rdf-rdfa'
gem.add_development_dependency 'rdf-turtle'
gem.add_development_dependency 'rdf-isomorphic'
gem.post_install_message = nil
end
Update homepage and dependencies.
#!/usr/bin/env ruby -rubygems
# -*- encoding: utf-8 -*-
begin
RUBY_ENGINE
rescue NameError
RUBY_ENGINE = "ruby" # Not defined in Ruby 1.8.7
end
Gem::Specification.new do |gem|
gem.version = File.read('VERSION').chomp
gem.date = File.mtime('VERSION').strftime('%Y-%m-%d')
gem.name = "rdf-microdata"
gem.homepage = "http://ruby-rdf.github.com/rdf-microdata"
gem.license = 'Public Domain' if gem.respond_to?(:license=)
gem.summary = "Microdata reader for Ruby."
gem.description = gem.summary
gem.rubyforge_project = 'rdf-microdata'
gem.authors = %w(Gregg Kellogg)
gem.email = 'public-rdf-ruby@w3.org'
gem.platform = Gem::Platform::RUBY
gem.files = %w(AUTHORS README UNLICENSE VERSION) + Dir.glob('lib/**/*.rb') + Dir.glob('etc/*')
gem.require_paths = %w(lib)
gem.extensions = %w()
gem.test_files = %w()
gem.has_rdoc = false
gem.required_ruby_version = '>= 1.8.1'
gem.requirements = []
gem.add_runtime_dependency 'rdf', '>= 1.0.5'
gem.add_runtime_dependency 'json', '>= 1.7.7'
gem.add_runtime_dependency 'rdf-xsd', '>= 1.0'
gem.add_runtime_dependency 'htmlentities', '>= 4.3.0'
gem.add_development_dependency 'nokogiri' , '>= 1.5.9'
gem.add_development_dependency 'equivalent-xml' , '>= 0.3.0'
gem.add_development_dependency 'open-uri-cached', '>= 0.0.5'
gem.add_development_dependency 'yard' , '>= 0.8.5'
gem.add_development_dependency 'spira', '= 0.0.12'
gem.add_development_dependency 'rspec', '>= 2.13.0'
gem.add_development_dependency 'rdf-spec', '>= 1.0'
gem.add_development_dependency 'rdf-rdfa'
gem.add_development_dependency 'rdf-turtle'
gem.add_development_dependency 'rdf-isomorphic'
gem.post_install_message = nil
end
|
#!/usr/bin/env ruby -rubygems
# -*- encoding: utf-8 -*-
GEMSPEC = Gem::Specification.new do |gem|
gem.version = File.read('VERSION').chomp
gem.date = File.mtime('VERSION').strftime('%Y-%m-%d')
gem.name = 'rdf-json'
gem.homepage = 'http://rdf.rubyforge.org/'
gem.license = 'Public Domain' if gem.respond_to?(:license=)
gem.summary = 'RDF/JSON support for RDF.rb.'
gem.description = 'RDF.rb plugin for parsing/serializing RDF/JSON data.'
gem.rubyforge_project = 'rdf'
gem.authors = ['Arto Bendiken']
gem.email = 'arto.bendiken@gmail.com'
gem.platform = Gem::Platform::RUBY
gem.files = %w(AUTHORS README UNLICENSE VERSION etc/doap.json) + Dir.glob('lib/**/*.rb')
gem.bindir = %q(bin)
gem.executables = %w()
gem.default_executable = gem.executables.first
gem.require_paths = %w(lib)
gem.extensions = %w()
gem.test_files = %w()
gem.has_rdoc = false
gem.required_ruby_version = '>= 1.8.2'
gem.requirements = []
gem.add_development_dependency 'rdf-spec', '>= 0.1.4'
gem.add_development_dependency 'rspec', '>= 1.3.0'
gem.add_development_dependency 'yard' , '>= 0.5.3'
gem.add_runtime_dependency 'rdf', '>= 0.1.4'
gem.add_runtime_dependency 'json_pure', '>= 1.2.3'
gem.post_install_message = nil
end
Bumped the RDF.rb, JSON and YARD dependencies.
#!/usr/bin/env ruby -rubygems
# -*- encoding: utf-8 -*-
GEMSPEC = Gem::Specification.new do |gem|
gem.version = File.read('VERSION').chomp
gem.date = File.mtime('VERSION').strftime('%Y-%m-%d')
gem.name = 'rdf-json'
gem.homepage = 'http://rdf.rubyforge.org/'
gem.license = 'Public Domain' if gem.respond_to?(:license=)
gem.summary = 'RDF/JSON support for RDF.rb.'
gem.description = 'RDF.rb plugin for parsing/serializing RDF/JSON data.'
gem.rubyforge_project = 'rdf'
gem.authors = ['Arto Bendiken']
gem.email = 'arto.bendiken@gmail.com'
gem.platform = Gem::Platform::RUBY
gem.files = %w(AUTHORS README UNLICENSE VERSION etc/doap.json) + Dir.glob('lib/**/*.rb')
gem.bindir = %q(bin)
gem.executables = %w()
gem.default_executable = gem.executables.first
gem.require_paths = %w(lib)
gem.extensions = %w()
gem.test_files = %w()
gem.has_rdoc = false
gem.required_ruby_version = '>= 1.8.2'
gem.requirements = []
gem.add_development_dependency 'rdf-spec', '~> 0.1.10'
gem.add_development_dependency 'rspec', '>= 1.3.0'
gem.add_development_dependency 'yard' , '>= 0.5.5'
gem.add_runtime_dependency 'rdf', '~> 0.1.10'
gem.add_runtime_dependency 'json_pure', '~> 1.4.3'
gem.post_install_message = nil
end
|
# Copyright (c) 2014 SUSE
# Licensed under the terms of the MIT license.
When(/^I execute mgr\-sync "([^"]*)" with user "([^"]*)" and password "([^"]*)"$/) do |arg1, u, p|
$command_output = sshcmd("echo -e '#{u}\n#{p}\n' | mgr-sync #{arg1}", ignore_err: true)[:stdout]
end
When(/^I execute mgr\-sync "([^"]*)"$/) do |arg1|
$command_output = sshcmd("mgr-sync #{arg1}")[:stdout]
end
When(/^I remove the mgr\-sync cache file$/) do
$command_output = sshcmd("rm -f ~/.mgr-sync")[:stdout]
end
When(/^I execute mgr\-sync refresh$/) do
$command_output = sshcmd("mgr-sync refresh", ignore_err: true)[:stderr]
end
When(/^I execute mgr\-bootstrap "([^"]*)"$/) do |arg1|
arch = `uname -m`
arch.chomp!
if arch != "x86_64"
arch = "i586"
end
$command_output = sshcmd("mgr-bootstrap --activation-keys=1-SUSE-PKG-#{arch} #{arg1}")[:stdout]
end
When(/^I fetch "([^"]*)" from server$/) do |arg1|
output = `curl -SkO http://$TESTHOST/#{arg1}`
unless $?.success?
raise "Execute command failed: #{$!}: #{output}"
end
end
When(/^I execute "([^"]*)"$/) do |arg1|
output = `sh ./#{arg1} 2>&1`
unless $?.success?
raise "Execute command (#{arg1}) failed(#{$?}): #{$!}: #{output}"
end
end
When(/^file "([^"]*)" exists on server$/) do |arg1|
sshcmd("test -f #{arg1}")
end
When(/^file "([^"]*)" not exists on server$/) do |arg1|
sshcmd("test -f #{arg1}")
end
When(/^file "([^"]*)" contains "([^"]*)"$/) do |arg1, arg2|
output = sshcmd("grep #{arg2} #{arg1}", ignore_err: true)
unless output[:stderr].empty?
$stderr.write("-----\n#{output[:stderr]}\n-----\n")
raise "#{arg2} not found in File #{arg1}"
end
end
When(/^I check the tomcat logs for errors$/) do
output = sshcmd("grep ERROR /var/log/tomcat6/catalina.out", ignore_err: true)[:stdout]
output.each_line do |line|
puts line
end
end
When(/^I check the tomcat logs for NullPointerExceptions$/) do
output = sshcmd("grep -n1 NullPointer /var/log/tomcat6/catalina.out", ignore_err: true)[:stdout]
output.each_line do |line|
puts line
end
end
Then(/^I restart the spacewalk service$/) do
sshcmd("spacewalk-service restart")
end
Then(/^I execute spacewalk-debug on the server$/) do
sshcmd("spacewalk-debug")
end
When(/^I copy "([^"]*)"$/) do |arg1|
user = "root@"
$command_output = `echo | scp -o StrictHostKeyChecking=no #{user}$TESTHOST:#{arg1} . 2>&1`
unless $?.success?
raise "Execute command failed: #{$!}: #{$command_output}"
end
end
When(/^I copy to server "([^"]*)"$/) do |arg1|
user = "root@"
$command_output = `echo | scp -o StrictHostKeyChecking=no #{arg1} #{user}$TESTHOST: 2>&1`
unless $?.success?
raise "Execute command failed: #{$!}: #{$command_output}"
end
end
Then(/^the pxe-default-profile should be enabled$/) do
sleep(1)
step %(file "/srv/tftpboot/pxelinux.cfg/default" contains "ONTIMEOUT\ pxe-default-profile")
end
Then(/^the pxe-default-profile should be disabled$/) do
step "file \"/srv/tftpboot/pxelinux.cfg/default\" contains \"ONTIMEOUT\\ local\""
end
Then(/^the cobbler report contains "([^"]*)"$/) do |arg1|
output = sshcmd("cobbler system report --name #{$myhostname}:1", ignore_err: true)[:stdout]
unless output.include?(arg1)
raise "Not found: #{output}"
end
end
Then(/^I clean the search index on the server$/) do
output = sshcmd("/usr/sbin/rcrhn-search cleanindex", ignore_err: true)
fail if output[:stdout].include?('ERROR')
end
When(/^I execute spacewalk\-channel and pass "([^"]*)"$/) do |arg1|
$command_output = `spacewalk-channel #{arg1} 2>&1`
unless $?.success?
raise "spacewalk-channel with #{arg1} command failed #{$command_output}"
end
end
When(/^spacewalk\-channel fails with "([^"]*)"$/) do |arg1|
$command_output = `spacewalk-channel #{arg1} 2>&1`
if $?.success? # || $command_status.exitstatus != arg1.to_i
raise "Executed command was successful: #{$status}"
end
end
Then(/^I want to get "([^"]*)"$/) do |arg1|
found = false
$command_output.each_line do |line|
if line.include?(arg1)
found = true
break
end
end
unless found
raise "'#{arg1}' not found in output '#{$command_output}'"
end
end
Then(/^I wont get "([^"]*)"$/) do |arg1|
found = false
$command_output.each_line do |line|
if line.include?(arg1)
found = true
break
end
end
if found
raise "'#{arg1}' found in output '#{$command_output}'"
end
end
Then(/^I wait for mgr-sync refresh is finished$/) do
for c in 0..20
begin
sshcmd('ls /var/lib/spacewalk/scc/scc-data/*organizations_orders.json')
rescue
sleep 15
else
break
end
end
end
Then(/^I should see "(.*?)" in the output$/) do |arg1|
assert_includes(@command_output, arg1)
end
Then(/^Service "([^"]*)" is enabled on the Server$/) do |service|
output = sshcmd("systemctl is-enabled '#{service}'", ignore_err: true)[:stdout]
output.chomp!
fail if output != "enabled"
end
Then(/^Service "([^"]*)" is running on the Server$/) do |service|
output = sshcmd("systemctl is-active '#{service}'", ignore_err: true)[:stdout]
output.chomp!
fail if output != "active"
end
fix baremetal test
# Copyright (c) 2014 SUSE
# Licensed under the terms of the MIT license.
When(/^I execute mgr\-sync "([^"]*)" with user "([^"]*)" and password "([^"]*)"$/) do |arg1, u, p|
$command_output = sshcmd("echo -e '#{u}\n#{p}\n' | mgr-sync #{arg1}", ignore_err: true)[:stdout]
end
When(/^I execute mgr\-sync "([^"]*)"$/) do |arg1|
$command_output = sshcmd("mgr-sync #{arg1}")[:stdout]
end
When(/^I remove the mgr\-sync cache file$/) do
$command_output = sshcmd("rm -f ~/.mgr-sync")[:stdout]
end
When(/^I execute mgr\-sync refresh$/) do
$command_output = sshcmd("mgr-sync refresh", ignore_err: true)[:stderr]
end
When(/^I execute mgr\-bootstrap "([^"]*)"$/) do |arg1|
arch = `uname -m`
arch.chomp!
if arch != "x86_64"
arch = "i586"
end
$command_output = sshcmd("mgr-bootstrap --activation-keys=1-SUSE-PKG-#{arch} #{arg1}")[:stdout]
end
When(/^I fetch "([^"]*)" from server$/) do |arg1|
output = `curl -SkO http://$TESTHOST/#{arg1}`
unless $?.success?
raise "Execute command failed: #{$!}: #{output}"
end
end
When(/^I execute "([^"]*)"$/) do |arg1|
output = `sh ./#{arg1} 2>&1`
unless $?.success?
raise "Execute command (#{arg1}) failed(#{$?}): #{$!}: #{output}"
end
end
When(/^file "([^"]*)" exists on server$/) do |arg1|
sshcmd("test -f #{arg1}")
end
When(/^file "([^"]*)" not exists on server$/) do |arg1|
sshcmd("test -f #{arg1}")
end
When(/^file "([^"]*)" contains "([^"]*)"$/) do |arg1, arg2|
output = sshcmd("grep #{arg2} #{arg1}", ignore_err: true)
unless output[:stderr].empty?
$stderr.write("-----\n#{output[:stderr]}\n-----\n")
raise "#{arg2} not found in File #{arg1}"
end
end
When(/^I check the tomcat logs for errors$/) do
output = sshcmd("grep ERROR /var/log/tomcat6/catalina.out", ignore_err: true)[:stdout]
output.each_line do |line|
puts line
end
end
When(/^I check the tomcat logs for NullPointerExceptions$/) do
output = sshcmd("grep -n1 NullPointer /var/log/tomcat6/catalina.out", ignore_err: true)[:stdout]
output.each_line do |line|
puts line
end
end
Then(/^I restart the spacewalk service$/) do
sshcmd("spacewalk-service restart")
end
Then(/^I execute spacewalk-debug on the server$/) do
sshcmd("spacewalk-debug")
end
When(/^I copy "([^"]*)"$/) do |arg1|
user = "root@"
$command_output = `echo | scp -o StrictHostKeyChecking=no #{user}$TESTHOST:#{arg1} . 2>&1`
unless $?.success?
raise "Execute command failed: #{$!}: #{$command_output}"
end
end
When(/^I copy to server "([^"]*)"$/) do |arg1|
user = "root@"
$command_output = `echo | scp -o StrictHostKeyChecking=no #{arg1} #{user}$TESTHOST: 2>&1`
unless $?.success?
raise "Execute command failed: #{$!}: #{$command_output}"
end
end
Then(/^the pxe-default-profile should be enabled$/) do
sleep(1)
step %(file "/srv/tftpboot/pxelinux.cfg/default" contains "'ONTIMEOUT pxe-default-profile'")
end
Then(/^the pxe-default-profile should be disabled$/) do
step %(file "/srv/tftpboot/pxelinux.cfg/default" contains "'ONTIMEOUT local'")
end
Then(/^the cobbler report contains "([^"]*)"$/) do |arg1|
output = sshcmd("cobbler system report --name #{$myhostname}:1", ignore_err: true)[:stdout]
unless output.include?(arg1)
raise "Not found: #{output}"
end
end
Then(/^I clean the search index on the server$/) do
output = sshcmd("/usr/sbin/rcrhn-search cleanindex", ignore_err: true)
fail if output[:stdout].include?('ERROR')
end
When(/^I execute spacewalk\-channel and pass "([^"]*)"$/) do |arg1|
$command_output = `spacewalk-channel #{arg1} 2>&1`
unless $?.success?
raise "spacewalk-channel with #{arg1} command failed #{$command_output}"
end
end
When(/^spacewalk\-channel fails with "([^"]*)"$/) do |arg1|
$command_output = `spacewalk-channel #{arg1} 2>&1`
if $?.success? # || $command_status.exitstatus != arg1.to_i
raise "Executed command was successful: #{$status}"
end
end
Then(/^I want to get "([^"]*)"$/) do |arg1|
found = false
$command_output.each_line do |line|
if line.include?(arg1)
found = true
break
end
end
unless found
raise "'#{arg1}' not found in output '#{$command_output}'"
end
end
Then(/^I wont get "([^"]*)"$/) do |arg1|
found = false
$command_output.each_line do |line|
if line.include?(arg1)
found = true
break
end
end
if found
raise "'#{arg1}' found in output '#{$command_output}'"
end
end
Then(/^I wait for mgr-sync refresh is finished$/) do
for c in 0..20
begin
sshcmd('ls /var/lib/spacewalk/scc/scc-data/*organizations_orders.json')
rescue
sleep 15
else
break
end
end
end
Then(/^I should see "(.*?)" in the output$/) do |arg1|
assert_includes(@command_output, arg1)
end
Then(/^Service "([^"]*)" is enabled on the Server$/) do |service|
output = sshcmd("systemctl is-enabled '#{service}'", ignore_err: true)[:stdout]
output.chomp!
fail if output != "enabled"
end
Then(/^Service "([^"]*)" is running on the Server$/) do |service|
output = sshcmd("systemctl is-active '#{service}'", ignore_err: true)[:stdout]
output.chomp!
fail if output != "active"
end
|
# Linda: This is where our step definitions live.
# Linda: Let's not try to have low-level step definitions
# (https://github.com/cucumber/cucumber-rails/issues/174)
# Linda: We can totally do better!
Then /^(?:|I) should see "(.*?)"$/ do |input|
page.should have_content input
end
Then /^(?:|I) should not see "(.*?)"$/ do |input|
page.should_not have_content input
end
Then /^(?:|I) should see an image$/ do
page.should have_xpath("//img")
# page.should have_css('img', text: 'base64') # FIXME: probably broken
end
Then /^I should (not )?see an element "(.*?)"$/ do |negate, selector|
expectation = negate ? :should_not : :should
page.send(expectation, have_css(selector))
end
Then /^(?:|I) should see the link "(.*?)" to "(.*?)"$/ do |link, url|
page.should have_link(link, :href => url)
end
Then /^(?:|I) should see "(.*?)" before "(.*?)"$/ do |first, second|
assert page.include? first
assert page.include? second
assert page.index(first) < page.index(second)
end
Then /^(?:|I) should see the submit button "(.*?)"$/ do |input|
page.should have_selector("input[type=submit][value='#{input}']")
end
Then /^(?:|I) should not see the submit button "(.*?)"$/ do |input|
page.should_not have_selector("input[type=submit][value='#{input}']")
end
Then /^(?:|I) should see a file input$/ do
page.should have_selector("input[type=file]")
end
Then /^(?:|I) should not see a file input$/ do
page.should_not have_selector("input[type=file]")
end
Given /^(?:|I) am logged in as "(.*?)" with password "(.*?)"$/ do |user, password|
if page.body.include? "Logout"
visit logout_path
end
visit login_path
fill_in "user_email", :with => user
fill_in "user_password", :with => password
click_button "Log in"
end
Then /^I enter "(.*?)" for "(.*?)"$/ do |value, field|
fill_in field, :with => value
end
Then /^I log out$/ do
visit logout_path
end
Given /^(?:|I) am on the project details page for "(.*?)"$/ do |page|
proj = Project.find_by_title page
visit "/projects/" + proj.id.to_s
end
Given /^(?:|I) am on the project edit page for "(.*?)"$/ do |page|
proj = Project.find_by_title page
visit "/projects/" + proj.id.to_s + "/edit"
end
Given /^(?:|I) am on the profile page for "(.*?)"$/ do |username|
user = User.find_by_username username
visit "/users/" + user.id.to_s
end
Given /^(?:|I) am on the profile edit page for "(.*?)"$/ do |username|
user = User.find_by_username username
visit "/users/" + user.id.to_s + "/edit"
end
Given /^(?:|I) am on the home page$/ do
visit "/"
end
Given(/^I go to the link "(.*?)"$/) do |link|
visit link
end
And /^(?:|I) enter in "(.*?)" $/ do |entry|
flunk "Unimplemented"
end
# check if username/pw is valid, if signup is valid, etc.?
And /^(?:|my) "(.*?)" (is|are) valid$/ do |field|
flunk "Unimplemented"
end
#for things like <project's> <attribute> should be
Then /^(?:|I) should be "(.+)" page$/ do |target|
flunk "Unimplemented"
end
Given /the following projects exist/ do |project_table|
project_table.hashes.each do |project|
Project.create(project)
end
end
Given /the following users exist/ do |user_table|
user_table.hashes.each do |user|
User.create(user)
end
end
Then /^(?:|I) press "(.*?)" $/ do |button|
click_button(button)
end
Then(/^I follow "(.*?)"$/) do |link|
click_link(link)
end
Then(/^I will be on the edit page for "(.*?)"$/) do |target|
proj = Project.find_by_title target
page.current_path.should eq "/projects/" + proj.id.to_s + "/edit"
end
Then(/^I will be on the edit page for user (.*?)$/) do |id|
page.current_path.should eq "/users/" + id.to_s + "/edit"
end
Then(/^I will be on the profile page for user (.*?)$/) do |id|
page.current_path.should eq "/users/" + id.to_s
end
Then(/^I will be on the home page$/) do
page.current_path.should eq "/"
end
Given(/^I am on the login page$/) do
visit login_path
end
When(/^I fill in "(.*?)" with "(.*?)"$/) do |field, value|
fill_in field, :with => value
end
When(/^I press "(.*?)"$/) do |button|
click_on button
end
When(/^I check "(.*?)"$/) do |check_box|
check check_box
end
# this could probably be combined with the above
When(/^I uncheck "(.*?)"$/) do |check_box|
uncheck check_box
end
i can't believe i can figure out what i did with my commit messages. i thought they were shit.
# Linda: This is where our step definitions live.
# Linda: Let's not try to have low-level step definitions
# (https://github.com/cucumber/cucumber-rails/issues/174)
# Linda: We can totally do better!
Then /^(?:|I) should see "(.*?)"$/ do |input|
page.should have_content input
end
Then /^(?:|I) should not see "(.*?)"$/ do |input|
page.should_not have_content input
end
Then /^(?:|I) should see an image$/ do
page.should have_xpath("//img")
# page.should have_css('img', text: 'base64') # FIXME: probably broken
end
Then /^I should (not )?see an element "(.*?)"$/ do |negate, selector|
expectation = negate ? :should_not : :should
page.send(expectation, have_css(selector))
end
Then /^(?:|I) should see the link "(.*?)" to "(.*?)"$/ do |link, url|
page.should have_link(link, :href => url)
end
Then /^(?:|I) should see "(.*?)" before "(.*?)"$/ do |first, second|
assert page.include? first
assert page.include? second
assert page.index(first) < page.index(second)
end
Then /^(?:|I) should see the submit button "(.*?)"$/ do |input|
page.should have_selector("input[type=submit][value='#{input}']")
end
Then /^(?:|I) should not see the submit button "(.*?)"$/ do |input|
page.should_not have_selector("input[type=submit][value='#{input}']")
end
Then /^(?:|I) should see a file input$/ do
page.should have_selector("input[type=file]")
end
Then /^(?:|I) should not see a file input$/ do
page.should_not have_selector("input[type=file]")
end
Given /^(?:|I) am logged in as "(.*?)" with password "(.*?)"$/ do |user, password|
if page.body.include? "Logout"
visit logout_path
end
visit login_path
fill_in "user_login", :with => user
fill_in "user_password", :with => password
click_button "Log in"
end
Then /^I enter "(.*?)" for "(.*?)"$/ do |value, field|
fill_in field, :with => value
end
Then /^I log out$/ do
visit logout_path
end
Given /^(?:|I) am on the project details page for "(.*?)"$/ do |page|
proj = Project.find_by_title page
visit "/projects/" + proj.id.to_s
end
Given /^(?:|I) am on the project edit page for "(.*?)"$/ do |page|
proj = Project.find_by_title page
visit "/projects/" + proj.id.to_s + "/edit"
end
Given /^(?:|I) am on the profile page for "(.*?)"$/ do |username|
user = User.find_by_username username
visit "/users/" + user.id.to_s
end
Given /^(?:|I) am on the profile edit page for "(.*?)"$/ do |username|
user = User.find_by_username username
visit "/users/" + user.id.to_s + "/edit"
end
Given /^(?:|I) am on the home page$/ do
visit "/"
end
Given(/^I go to the link "(.*?)"$/) do |link|
visit link
end
And /^(?:|I) enter in "(.*?)" $/ do |entry|
flunk "Unimplemented"
end
# check if username/pw is valid, if signup is valid, etc.?
And /^(?:|my) "(.*?)" (is|are) valid$/ do |field|
flunk "Unimplemented"
end
#for things like <project's> <attribute> should be
Then /^(?:|I) should be "(.+)" page$/ do |target|
flunk "Unimplemented"
end
Given /the following projects exist/ do |project_table|
project_table.hashes.each do |project|
Project.create(project)
end
end
Given /the following users exist/ do |user_table|
user_table.hashes.each do |user|
User.create(user)
end
end
Then /^(?:|I) press "(.*?)" $/ do |button|
click_button(button)
end
Then(/^I follow "(.*?)"$/) do |link|
click_link(link)
end
Then(/^I will be on the edit page for "(.*?)"$/) do |target|
proj = Project.find_by_title target
page.current_path.should eq "/projects/" + proj.id.to_s + "/edit"
end
Then(/^I will be on the edit page for user (.*?)$/) do |id|
page.current_path.should eq "/users/" + id.to_s + "/edit"
end
Then(/^I will be on the profile page for user (.*?)$/) do |id|
page.current_path.should eq "/users/" + id.to_s
end
Then(/^I will be on the home page$/) do
page.current_path.should eq "/"
end
Given(/^I am on the login page$/) do
visit login_path
end
When(/^I fill in "(.*?)" with "(.*?)"$/) do |field, value|
fill_in field, :with => value
end
When(/^I press "(.*?)"$/) do |button|
click_on button
end
When(/^I check "(.*?)"$/) do |check_box|
check check_box
end
# this could probably be combined with the above
When(/^I uncheck "(.*?)"$/) do |check_box|
uncheck check_box
end
|
# Generated by jeweler
# DO NOT EDIT THIS FILE DIRECTLY
# Instead, edit Jeweler::Tasks in Rakefile, and run 'rake gemspec'
# -*- encoding: utf-8 -*-
Gem::Specification.new do |s|
s.name = "capistrano-didi"
s.version = "0.2.3"
s.required_rubygems_version = Gem::Requirement.new(">= 0") if s.respond_to? :required_rubygems_version=
s.authors = ["Koen Van Winckel"]
s.date = "2011-11-02"
s.description = "didi is a collection of recipes for capistrano that allow drupal to be deployed, tested and used in a CI environment"
s.email = "koenvw@gmail.com"
s.executables = ["didify", "didi"]
s.extra_rdoc_files = [
"LICENSE.txt",
"README.rdoc"
]
s.files = [
".document",
"Gemfile",
"Gemfile.lock",
"LICENSE.txt",
"README.rdoc",
"Rakefile",
"VERSION",
"bin/didi",
"bin/didify",
"capistrano-didi.gemspec",
"lib/didi/recipes/didi.rb",
"test/helper.rb",
"test/test_didi.rb"
]
s.homepage = "http://github.com/koenvw/didi"
s.licenses = ["MIT"]
s.post_install_message = "=> \"didify\" and \"didi\" commands installed. Try them out!"
s.require_paths = ["lib"]
s.rubygems_version = "1.8.11"
s.summary = "didi - drupal deployment script based on capistrano"
if s.respond_to? :specification_version then
s.specification_version = 3
if Gem::Version.new(Gem::VERSION) >= Gem::Version.new('1.2.0') then
s.add_development_dependency(%q<railsless-deploy>, [">= 1.0.2"])
s.add_development_dependency(%q<capistrano-ext>, [">= 1.2.1"])
s.add_development_dependency(%q<capistrano>, [">= 2.9.0"])
s.add_development_dependency(%q<shoulda>, [">= 0"])
s.add_development_dependency(%q<bundler>, ["~> 1.0.0"])
s.add_development_dependency(%q<jeweler>, ["~> 1.6.4"])
s.add_development_dependency(%q<rcov>, [">= 0"])
s.add_runtime_dependency(%q<capistrano>, [">= 2.9.0"])
s.add_runtime_dependency(%q<railsless-deploy>, [">= 1.0.2"])
s.add_runtime_dependency(%q<capistrano-ext>, [">= 1.2.1"])
else
s.add_dependency(%q<railsless-deploy>, [">= 1.0.2"])
s.add_dependency(%q<capistrano-ext>, [">= 1.2.1"])
s.add_dependency(%q<capistrano>, [">= 2.9.0"])
s.add_dependency(%q<shoulda>, [">= 0"])
s.add_dependency(%q<bundler>, ["~> 1.0.0"])
s.add_dependency(%q<jeweler>, ["~> 1.6.4"])
s.add_dependency(%q<rcov>, [">= 0"])
s.add_dependency(%q<capistrano>, [">= 2.9.0"])
s.add_dependency(%q<railsless-deploy>, [">= 1.0.2"])
s.add_dependency(%q<capistrano-ext>, [">= 1.2.1"])
end
else
s.add_dependency(%q<railsless-deploy>, [">= 1.0.2"])
s.add_dependency(%q<capistrano-ext>, [">= 1.2.1"])
s.add_dependency(%q<capistrano>, [">= 2.9.0"])
s.add_dependency(%q<shoulda>, [">= 0"])
s.add_dependency(%q<bundler>, ["~> 1.0.0"])
s.add_dependency(%q<jeweler>, ["~> 1.6.4"])
s.add_dependency(%q<rcov>, [">= 0"])
s.add_dependency(%q<capistrano>, [">= 2.9.0"])
s.add_dependency(%q<railsless-deploy>, [">= 1.0.2"])
s.add_dependency(%q<capistrano-ext>, [">= 1.2.1"])
end
end
Regenerate gemspec for version 0.3.0
# Generated by jeweler
# DO NOT EDIT THIS FILE DIRECTLY
# Instead, edit Jeweler::Tasks in Rakefile, and run 'rake gemspec'
# -*- encoding: utf-8 -*-
Gem::Specification.new do |s|
s.name = "capistrano-didi"
s.version = "0.3.0"
s.required_rubygems_version = Gem::Requirement.new(">= 0") if s.respond_to? :required_rubygems_version=
s.authors = ["Koen Van Winckel"]
s.date = "2011-11-24"
s.description = "didi is a collection of recipes for capistrano that allow drupal to be deployed, tested and used in a CI environment"
s.email = "koenvw@gmail.com"
s.executables = ["didify", "didi"]
s.extra_rdoc_files = [
"LICENSE.txt",
"README.rdoc"
]
s.files = [
".document",
"Gemfile",
"Gemfile.lock",
"LICENSE.txt",
"README.rdoc",
"Rakefile",
"VERSION",
"bin/didi",
"bin/didify",
"capistrano-didi.gemspec",
"lib/didi/recipes/didi.rb",
"test/helper.rb",
"test/test_didi.rb"
]
s.homepage = "http://github.com/koenvw/didi"
s.licenses = ["MIT"]
s.post_install_message = "=> \"didify\" and \"didi\" commands installed. Try them out!"
s.require_paths = ["lib"]
s.rubygems_version = "1.8.11"
s.summary = "didi - drupal deployment script based on capistrano"
if s.respond_to? :specification_version then
s.specification_version = 3
if Gem::Version.new(Gem::VERSION) >= Gem::Version.new('1.2.0') then
s.add_development_dependency(%q<railsless-deploy>, [">= 1.0.2"])
s.add_development_dependency(%q<capistrano-ext>, [">= 1.2.1"])
s.add_development_dependency(%q<capistrano>, [">= 2.9.0"])
s.add_development_dependency(%q<shoulda>, [">= 0"])
s.add_development_dependency(%q<bundler>, ["~> 1.0.0"])
s.add_development_dependency(%q<jeweler>, ["~> 1.6.4"])
s.add_development_dependency(%q<rcov>, [">= 0"])
s.add_runtime_dependency(%q<capistrano>, [">= 2.9.0"])
s.add_runtime_dependency(%q<railsless-deploy>, [">= 1.0.2"])
s.add_runtime_dependency(%q<capistrano-ext>, [">= 1.2.1"])
else
s.add_dependency(%q<railsless-deploy>, [">= 1.0.2"])
s.add_dependency(%q<capistrano-ext>, [">= 1.2.1"])
s.add_dependency(%q<capistrano>, [">= 2.9.0"])
s.add_dependency(%q<shoulda>, [">= 0"])
s.add_dependency(%q<bundler>, ["~> 1.0.0"])
s.add_dependency(%q<jeweler>, ["~> 1.6.4"])
s.add_dependency(%q<rcov>, [">= 0"])
s.add_dependency(%q<capistrano>, [">= 2.9.0"])
s.add_dependency(%q<railsless-deploy>, [">= 1.0.2"])
s.add_dependency(%q<capistrano-ext>, [">= 1.2.1"])
end
else
s.add_dependency(%q<railsless-deploy>, [">= 1.0.2"])
s.add_dependency(%q<capistrano-ext>, [">= 1.2.1"])
s.add_dependency(%q<capistrano>, [">= 2.9.0"])
s.add_dependency(%q<shoulda>, [">= 0"])
s.add_dependency(%q<bundler>, ["~> 1.0.0"])
s.add_dependency(%q<jeweler>, ["~> 1.6.4"])
s.add_dependency(%q<rcov>, [">= 0"])
s.add_dependency(%q<capistrano>, [">= 2.9.0"])
s.add_dependency(%q<railsless-deploy>, [">= 1.0.2"])
s.add_dependency(%q<capistrano-ext>, [">= 1.2.1"])
end
end
|
[Add] FirebaseABTesting (10.1.0)
Pod::Spec.new do |s|
s.name = 'FirebaseABTesting'
s.version='10.1.0'
s.summary = 'Firebase ABTesting'
s.description = <<-DESC
A/B testing is a Firebase service that lets you run experiments across users of
your mobile apps. It lets you learn how well one or more changes to
your app work with a smaller set of users before you roll out changes to all
users. You can run experiments to find the most effective ways to use
Firebase Cloud Messaging and Firebase Remote Config in your app.
DESC
s.homepage = 'https://firebase.google.com'
s.license = { :type => 'Apache-2.0', :file => 'LICENSE' }
s.authors = 'Google, Inc.'
s.source = {
:git => 'https://github.com/firebase/firebase-ios-sdk.git',
:tag => 'CocoaPods-10.1.0.nightly'
}
s.social_media_url = 'https://twitter.com/Firebase'
ios_deployment_target = '11.0'
osx_deployment_target = '10.13'
tvos_deployment_target = '12.0'
watchos_deployment_target = '6.0'
s.ios.deployment_target = ios_deployment_target
s.osx.deployment_target = osx_deployment_target
s.tvos.deployment_target = tvos_deployment_target
s.watchos.deployment_target = watchos_deployment_target
s.cocoapods_version = '>= 1.4.0'
s.prefix_header_file = false
s.swift_version = '5.3'
base_dir = "FirebaseABTesting/Sources/"
s.source_files = [
base_dir + '**/*.[mh]',
'Interop/Analytics/Public/*.h',
'FirebaseCore/Extension/*.h',
]
s.requires_arc = base_dir + '*.m'
s.public_header_files = base_dir + 'Public/FirebaseABTesting/*.h'
s.pod_target_xcconfig = {
'GCC_C_LANGUAGE_STANDARD' => 'c99',
'HEADER_SEARCH_PATHS' => '"${PODS_TARGET_SRCROOT}"'
}
s.dependency 'FirebaseCore', '~> 10.0'
s.test_spec 'unit' do |unit_tests|
unit_tests.scheme = { :code_coverage => true }
unit_tests.platforms = {
:ios => ios_deployment_target,
:osx => '10.15',
:tvos => tvos_deployment_target
}
unit_tests.source_files = 'FirebaseABTesting/Tests/Unit/**/*.[mh]'
unit_tests.resources = 'FirebaseABTesting/Tests/Unit/Resources/*.txt'
unit_tests.requires_app_host = true
unit_tests.dependency 'OCMock'
end
end
|
[Add] FirebaseABTesting (8.14.0)
Pod::Spec.new do |s|
s.name = 'FirebaseABTesting'
s.version = '8.14.0'
s.summary = 'Firebase ABTesting'
s.description = <<-DESC
A/B testing is a Firebase service that lets you run experiments across users of
your mobile apps. It lets you learn how well one or more changes to
your app work with a smaller set of users before you roll out changes to all
users. You can run experiments to find the most effective ways to use
Firebase Cloud Messaging and Firebase Remote Config in your app.
DESC
s.homepage = 'https://firebase.google.com'
s.license = { :type => 'Apache', :file => 'LICENSE' }
s.authors = 'Google, Inc.'
s.source = {
:git => 'https://github.com/firebase/firebase-ios-sdk.git',
:tag => 'CocoaPods-8.14.0.nightly'
}
s.social_media_url = 'https://twitter.com/Firebase'
ios_deployment_target = '10.0'
osx_deployment_target = '10.12'
tvos_deployment_target = '10.0'
watchos_deployment_target = '6.0'
s.ios.deployment_target = ios_deployment_target
s.osx.deployment_target = osx_deployment_target
s.tvos.deployment_target = tvos_deployment_target
s.watchos.deployment_target = watchos_deployment_target
s.cocoapods_version = '>= 1.4.0'
s.prefix_header_file = false
base_dir = "FirebaseABTesting/Sources/"
s.source_files = [
base_dir + '**/*.[mh]',
'Interop/Analytics/Public/*.h',
'FirebaseCore/Sources/Private/*.h',
]
s.requires_arc = base_dir + '*.m'
s.public_header_files = base_dir + 'Public/FirebaseABTesting/*.h'
s.pod_target_xcconfig = {
'GCC_C_LANGUAGE_STANDARD' => 'c99',
'HEADER_SEARCH_PATHS' => '"${PODS_TARGET_SRCROOT}"'
}
s.dependency 'FirebaseCore', '~> 8.0'
s.test_spec 'unit' do |unit_tests|
unit_tests.scheme = { :code_coverage => true }
unit_tests.platforms = {
:ios => ios_deployment_target,
:osx => osx_deployment_target,
:tvos => tvos_deployment_target
}
unit_tests.source_files = 'FirebaseABTesting/Tests/Unit/**/*.[mh]'
unit_tests.resources = 'FirebaseABTesting/Tests/Unit/Resources/*.txt'
unit_tests.requires_app_host = true
unit_tests.dependency 'OCMock'
end
end
|
[Add] FirebaseFirestore (7.10.0)
Pod::Spec.new do |s|
s.name = 'FirebaseFirestore'
s.version = '7.10.0'
s.summary = 'Google Cloud Firestore'
s.description = <<-DESC
Google Cloud Firestore is a NoSQL document database built for automatic scaling, high performance, and ease of application development.
DESC
s.homepage = 'https://developers.google.com/'
s.license = { :type => 'Apache', :file => 'LICENSE' }
s.authors = 'Google, Inc.'
s.source = {
:git => 'https://github.com/firebase/firebase-ios-sdk.git',
:tag => 'CocoaPods-7.10.0.nightly'
}
s.ios.deployment_target = '10.0'
s.osx.deployment_target = '10.12'
s.tvos.deployment_target = '10.0'
s.cocoapods_version = '>= 1.4.0'
s.prefix_header_file = false
# Header files that constitute the interface to this module. Only Objective-C
# headers belong here, since FirebaseFirestore is primarily an Objective-C
# framework.
s.public_header_files = 'Firestore/Source/Public/FirebaseFirestore/*.h'
# source_files contains most of the header and source files for the project.
# This includes files named in `public_header_files`.
#
# Each header in this list must be globally unique, even within customer
# projects. This generally means that only Objective-C Headers with a `FIR`
# or `FST` prefix can be in `source_files`. Non-public C++ headers that have
# no filename prefix must be in `preserve_paths`. See
# https://github.com/firebase/firebase-ios-sdk/issues/4035 for more details.
#
# Note: headers from FirebaseCore can be in this list because while they're
# not globally unique, each copy will be the same. It doesn't matter which
# version wins in the global header map. The benefit of keeping them here is
# that "quick open" by filename in Xcode will continue to work.
s.source_files = [
'FirebaseCore/Sources/Private/*.h',
'Firestore/Source/Public/FirebaseFirestore/*.h',
'Firestore/Source/**/*.{m,mm}',
'Firestore/Protos/nanopb/**/*.cc',
'Firestore/core/include/**/*.{cc,mm}',
'Firestore/core/src/**/*.{cc,mm}',
'Interop/Auth/Public/*.h',
]
# Internal headers that aren't necessarily globally unique. Most C++ internal
# headers should be here to avoid polluting the global header map with
# unprefixed filenames.
#
# These filenames won't be available in Xcode's "quick open" but the types
# inside these files will be available.
s.preserve_paths = [
'Firestore/Source/API/*.h',
'Firestore/Source/Core/*.h',
'Firestore/Source/Local/*.h',
'Firestore/Source/Remote/*.h',
'Firestore/Source/Util/*.h',
'Firestore/Protos/nanopb/**/*.h',
'Firestore/core/include/**/*.h',
'Firestore/core/src/**/*.h',
'Firestore/third_party/nlohmann_json/json.hpp',
]
s.requires_arc = [
'Firestore/Source/**/*',
'Firestore/core/src/**/*.mm',
]
# Exclude alternate implementations for other platforms. These types depend
# upon link-time substitution, and there's no provision within CocoaPods for
# selecting files dynamically.
s.exclude_files = [
'Firestore/core/src/api/input_validation_std.cc',
'Firestore/core/src/remote/connectivity_monitor_noop.cc',
'Firestore/core/src/util/filesystem_win.cc',
'Firestore/core/src/util/hard_assert_stdio.cc',
'Firestore/core/src/util/log_stdio.cc',
'Firestore/core/src/util/secure_random_openssl.cc'
]
s.dependency 'FirebaseCore', '~> 7.0'
abseil_version = '0.20200225.0'
s.dependency 'abseil/algorithm', abseil_version
s.dependency 'abseil/base', abseil_version
s.dependency 'abseil/memory', abseil_version
s.dependency 'abseil/meta', abseil_version
s.dependency 'abseil/strings/strings', abseil_version
s.dependency 'abseil/time', abseil_version
s.dependency 'abseil/types', abseil_version
s.dependency 'gRPC-C++', '~> 1.28.0'
s.dependency 'leveldb-library', '~> 1.22'
s.dependency 'nanopb', '~> 2.30908.0'
s.ios.frameworks = 'SystemConfiguration', 'UIKit'
s.osx.frameworks = 'SystemConfiguration'
s.tvos.frameworks = 'SystemConfiguration', 'UIKit'
s.library = 'c++'
s.pod_target_xcconfig = {
'CLANG_CXX_LANGUAGE_STANDARD' => 'c++0x',
'GCC_C_LANGUAGE_STANDARD' => 'c99',
'GCC_PREPROCESSOR_DEFINITIONS' =>
"FIRFirestore_VERSION=#{s.version} " +
# The nanopb pod sets these defs, so we must too. (We *do* require 16bit
# (or larger) fields, so we'd have to set at least PB_FIELD_16BIT
# anyways.)
'PB_FIELD_32BIT=1 PB_NO_PACKED_STRUCTS=1 PB_ENABLE_MALLOC=1',
'HEADER_SEARCH_PATHS' =>
'"${PODS_TARGET_SRCROOT}" ' +
'"${PODS_TARGET_SRCROOT}/Firestore/Source/Public/FirebaseFirestore" ' +
'"${PODS_ROOT}/nanopb" ' +
'"${PODS_TARGET_SRCROOT}/Firestore/Protos/nanopb"'
}
s.compiler_flags = '$(inherited) -Wreorder -Werror=reorder -Wno-comma'
end
|
[Add] FirebaseFirestore (8.13.0)
Pod::Spec.new do |s|
s.name = 'FirebaseFirestore'
s.version = '8.13.0'
s.summary = 'Google Cloud Firestore'
s.description = <<-DESC
Google Cloud Firestore is a NoSQL document database built for automatic scaling, high performance, and ease of application development.
DESC
s.homepage = 'https://developers.google.com/'
s.license = { :type => 'Apache', :file => 'Firestore/LICENSE' }
s.authors = 'Google, Inc.'
s.source = {
:git => 'https://github.com/firebase/firebase-ios-sdk.git',
:tag => 'CocoaPods-8.13.0.nightly'
}
s.ios.deployment_target = '10.0'
s.osx.deployment_target = '10.12'
s.tvos.deployment_target = '10.0'
s.cocoapods_version = '>= 1.4.0'
s.prefix_header_file = false
# Header files that constitute the interface to this module. Only Objective-C
# headers belong here, since FirebaseFirestore is primarily an Objective-C
# framework.
s.public_header_files = 'Firestore/Source/Public/FirebaseFirestore/*.h'
# source_files contains most of the header and source files for the project.
# This includes files named in `public_header_files`.
#
# Each header in this list must be globally unique, even within customer
# projects. This generally means that only Objective-C Headers with a `FIR`
# or `FST` prefix can be in `source_files`. Non-public C++ headers that have
# no filename prefix must be in `preserve_paths`. See
# https://github.com/firebase/firebase-ios-sdk/issues/4035 for more details.
#
# Note: headers from FirebaseCore can be in this list because while they're
# not globally unique, each copy will be the same. It doesn't matter which
# version wins in the global header map. The benefit of keeping them here is
# that "quick open" by filename in Xcode will continue to work.
s.source_files = [
'FirebaseAppCheck/Sources/Interop/*.h',
'FirebaseCore/Sources/Private/*.h',
'Firestore/Source/Public/FirebaseFirestore/*.h',
'Firestore/Source/**/*.{m,mm}',
'Firestore/Protos/nanopb/**/*.cc',
'Firestore/core/include/**/*.{cc,mm}',
'Firestore/core/src/**/*.{cc,mm}',
'Interop/Auth/Public/*.h',
]
# Internal headers that aren't necessarily globally unique. Most C++ internal
# headers should be here to avoid polluting the global header map with
# unprefixed filenames.
#
# These filenames won't be available in Xcode's "quick open" but the types
# inside these files will be available.
s.preserve_paths = [
'Firestore/Source/API/*.h',
'Firestore/Source/Core/*.h',
'Firestore/Source/Local/*.h',
'Firestore/Source/Remote/*.h',
'Firestore/Source/Util/*.h',
'Firestore/Protos/nanopb/**/*.h',
'Firestore/core/include/**/*.h',
'Firestore/core/src/**/*.h',
'Firestore/third_party/nlohmann_json/json.hpp',
]
s.requires_arc = [
'Firestore/Source/**/*',
'Firestore/core/src/**/*.mm',
]
# Exclude alternate implementations for other platforms. These types depend
# upon link-time substitution, and there's no provision within CocoaPods for
# selecting files dynamically.
s.exclude_files = [
'Firestore/core/src/api/input_validation_std.cc',
'Firestore/core/src/remote/connectivity_monitor_noop.cc',
'Firestore/core/src/util/filesystem_win.cc',
'Firestore/core/src/util/hard_assert_stdio.cc',
'Firestore/core/src/util/log_stdio.cc',
'Firestore/core/src/util/secure_random_openssl.cc'
]
s.dependency 'FirebaseCore', '~> 8.0'
abseil_version = '0.20200225.0'
s.dependency 'abseil/algorithm', abseil_version
s.dependency 'abseil/base', abseil_version
s.dependency 'abseil/container/flat_hash_map', abseil_version
s.dependency 'abseil/memory', abseil_version
s.dependency 'abseil/meta', abseil_version
s.dependency 'abseil/strings/strings', abseil_version
s.dependency 'abseil/time', abseil_version
s.dependency 'abseil/types', abseil_version
s.dependency 'gRPC-C++', '~> 1.28.0'
s.dependency 'leveldb-library', '~> 1.22'
s.dependency 'nanopb', '~> 2.30908.0'
s.ios.frameworks = 'SystemConfiguration', 'UIKit'
s.osx.frameworks = 'SystemConfiguration'
s.tvos.frameworks = 'SystemConfiguration', 'UIKit'
s.library = 'c++'
s.pod_target_xcconfig = {
'CLANG_CXX_LANGUAGE_STANDARD' => 'c++0x',
'GCC_C_LANGUAGE_STANDARD' => 'c99',
'GCC_PREPROCESSOR_DEFINITIONS' =>
"FIRFirestore_VERSION=#{s.version} " +
# The nanopb pod sets these defs, so we must too. (We *do* require 16bit
# (or larger) fields, so we'd have to set at least PB_FIELD_16BIT
# anyways.)
'PB_FIELD_32BIT=1 PB_NO_PACKED_STRUCTS=1 PB_ENABLE_MALLOC=1',
'HEADER_SEARCH_PATHS' =>
'"${PODS_TARGET_SRCROOT}" ' +
'"${PODS_TARGET_SRCROOT}/Firestore/Source/Public/FirebaseFirestore" ' +
'"${PODS_ROOT}/nanopb" ' +
'"${PODS_TARGET_SRCROOT}/Firestore/Protos/nanopb"'
}
s.compiler_flags = '$(inherited) -Wreorder -Werror=reorder -Wno-comma'
end
|
[Add] FirebaseFunctions (10.1.0)
Pod::Spec.new do |s|
s.name = 'FirebaseFunctions'
s.version='10.1.0'
s.summary = 'Cloud Functions for Firebase'
s.description = <<-DESC
Cloud Functions for Firebase.
DESC
s.homepage = 'https://developers.google.com/'
s.license = { :type => 'Apache-2.0', :file => 'LICENSE' }
s.authors = 'Google, Inc.'
s.source = {
:git => 'https://github.com/Firebase/firebase-ios-sdk.git',
:tag => 'CocoaPods-10.1.0.nightly'
}
s.swift_version = '5.3'
ios_deployment_target = '11.0'
osx_deployment_target = '10.13'
tvos_deployment_target = '12.0'
watchos_deployment_target = '6.0'
s.ios.deployment_target = ios_deployment_target
s.osx.deployment_target = osx_deployment_target
s.tvos.deployment_target = tvos_deployment_target
s.watchos.deployment_target = watchos_deployment_target
s.cocoapods_version = '>= 1.4.0'
s.prefix_header_file = false
s.swift_version = '5.3'
s.source_files = [
'FirebaseFunctions/Sources/**/*.swift',
]
s.dependency 'FirebaseCore', '~> 10.0'
s.dependency 'FirebaseCoreExtension', '~> 10.0'
s.dependency 'FirebaseAppCheckInterop', '~> 10.0'
s.dependency 'FirebaseAuthInterop', '~> 10.0'
s.dependency 'FirebaseMessagingInterop', '~> 10.0'
s.dependency 'FirebaseSharedSwift', '~> 10.0'
s.dependency 'GTMSessionFetcher/Core', '~> 2.1'
s.test_spec 'objc' do |objc_tests|
objc_tests.platforms = {
:ios => ios_deployment_target,
:osx => '10.15',
:tvos => tvos_deployment_target
}
objc_tests.source_files = [
'FirebaseFunctions/Tests/ObjCIntegration/ObjC*'
]
end
s.test_spec 'integration' do |int_tests|
int_tests.platforms = {
:ios => ios_deployment_target,
:osx => osx_deployment_target,
:tvos => tvos_deployment_target
}
int_tests.source_files = 'FirebaseFunctions/Tests/Integration/*.swift'
end
end
|
[Add] FirebaseMessaging (8.12.0)
Pod::Spec.new do |s|
s.name = 'FirebaseMessaging'
s.version = '8.12.0'
s.summary = 'Firebase Messaging'
s.description = <<-DESC
Firebase Messaging is a service that allows you to send data from your server to your users'
iOS device, and also to receive messages from devices on the same connection. The service handles
all aspects of queueing of messages and delivery to the target application running on the target
device, and it is completely free.
DESC
s.homepage = 'https://firebase.google.com'
s.license = { :type => 'Apache', :file => 'LICENSE' }
s.authors = 'Google, Inc.'
s.source = {
:git => 'https://github.com/firebase/firebase-ios-sdk.git',
:tag => 'CocoaPods-8.12.0.nightly'
}
s.social_media_url = 'https://twitter.com/Firebase'
ios_deployment_target = '10.0'
osx_deployment_target = '10.12'
tvos_deployment_target = '10.0'
watchos_deployment_target = '6.0'
s.ios.deployment_target = ios_deployment_target
s.osx.deployment_target = osx_deployment_target
s.tvos.deployment_target = tvos_deployment_target
s.watchos.deployment_target = watchos_deployment_target
s.cocoapods_version = '>= 1.4.0'
s.prefix_header_file = false
base_dir = "FirebaseMessaging/"
s.source_files = [
base_dir + 'Sources/**/*',
base_dir + 'Sources/Protogen/nanopb/*.h',
'Interop/Analytics/Public/*.h',
'FirebaseCore/Sources/Private/*.h',
'FirebaseInstallations/Source/Library/Private/*.h',
]
s.public_header_files = base_dir + 'Sources/Public/FirebaseMessaging/*.h'
s.library = 'sqlite3'
s.pod_target_xcconfig = {
'GCC_C_LANGUAGE_STANDARD' => 'c99',
'GCC_PREPROCESSOR_DEFINITIONS' =>
# for nanopb:
'PB_FIELD_32BIT=1 PB_NO_PACKED_STRUCTS=1 PB_ENABLE_MALLOC=1',
# Unit tests do library imports using repo-root relative paths.
'HEADER_SEARCH_PATHS' => '"${PODS_TARGET_SRCROOT}"',
}
s.ios.framework = 'SystemConfiguration'
s.tvos.framework = 'SystemConfiguration'
s.osx.framework = 'SystemConfiguration'
s.weak_framework = 'UserNotifications'
s.dependency 'FirebaseInstallations', '~> 8.0'
s.dependency 'FirebaseCore', '~> 8.0'
s.dependency 'GoogleUtilities/AppDelegateSwizzler', '~> 7.7'
s.dependency 'GoogleUtilities/Reachability', '~> 7.7'
s.dependency 'GoogleUtilities/Environment', '~> 7.7'
s.dependency 'GoogleUtilities/UserDefaults', '~> 7.7'
s.dependency 'GoogleDataTransport', '~> 9.1'
s.dependency 'nanopb', '~> 2.30908.0'
s.test_spec 'unit' do |unit_tests|
unit_tests.scheme = { :code_coverage => true }
unit_tests.platforms = {
:ios => ios_deployment_target,
:osx => osx_deployment_target,
:tvos => tvos_deployment_target
}
unit_tests.source_files = [
'FirebaseMessaging/Tests/UnitTests*/*.{m,h,swift}',
'SharedTestUtilities/URLSession/*.[mh]',
]
unit_tests.requires_app_host = true
unit_tests.pod_target_xcconfig = {
'CLANG_ENABLE_OBJC_WEAK' => 'YES'
}
unit_tests.dependency 'OCMock'
end
s.test_spec 'integration' do |int_tests|
int_tests.scheme = { :code_coverage => true }
int_tests.platforms = {
:ios => ios_deployment_target,
:osx => osx_deployment_target,
:tvos => tvos_deployment_target
}
int_tests.source_files = 'FirebaseMessaging/Tests/IntegrationTests/*.swift'
int_tests.requires_app_host = true
int_tests.resources = 'FirebaseMessaging/Tests/IntegrationTests/Resources/GoogleService-Info.plist'
end
end
|
[Add] FirebaseMessaging (8.15.0)
Pod::Spec.new do |s|
s.name = 'FirebaseMessaging'
s.version = '8.15.0'
s.summary = 'Firebase Messaging'
s.description = <<-DESC
Firebase Messaging is a service that allows you to send data from your server to your users'
iOS device, and also to receive messages from devices on the same connection. The service handles
all aspects of queueing of messages and delivery to the target application running on the target
device, and it is completely free.
DESC
s.homepage = 'https://firebase.google.com'
s.license = { :type => 'Apache', :file => 'LICENSE' }
s.authors = 'Google, Inc.'
s.source = {
:git => 'https://github.com/firebase/firebase-ios-sdk.git',
:tag => 'CocoaPods-8.15.0.nightly'
}
s.social_media_url = 'https://twitter.com/Firebase'
ios_deployment_target = '10.0'
osx_deployment_target = '10.12'
tvos_deployment_target = '10.0'
watchos_deployment_target = '6.0'
s.ios.deployment_target = ios_deployment_target
s.osx.deployment_target = osx_deployment_target
s.tvos.deployment_target = tvos_deployment_target
s.watchos.deployment_target = watchos_deployment_target
s.cocoapods_version = '>= 1.4.0'
s.prefix_header_file = false
base_dir = "FirebaseMessaging/"
s.source_files = [
base_dir + 'Sources/**/*',
base_dir + 'Sources/Protogen/nanopb/*.h',
'Interop/Analytics/Public/*.h',
'FirebaseCore/Sources/Private/*.h',
'FirebaseInstallations/Source/Library/Private/*.h',
]
s.public_header_files = base_dir + 'Sources/Public/FirebaseMessaging/*.h'
s.library = 'sqlite3'
s.pod_target_xcconfig = {
'GCC_C_LANGUAGE_STANDARD' => 'c99',
'GCC_PREPROCESSOR_DEFINITIONS' =>
# for nanopb:
'PB_FIELD_32BIT=1 PB_NO_PACKED_STRUCTS=1 PB_ENABLE_MALLOC=1',
# Unit tests do library imports using repo-root relative paths.
'HEADER_SEARCH_PATHS' => '"${PODS_TARGET_SRCROOT}"',
}
s.ios.framework = 'SystemConfiguration'
s.tvos.framework = 'SystemConfiguration'
s.osx.framework = 'SystemConfiguration'
s.weak_framework = 'UserNotifications'
s.dependency 'FirebaseInstallations', '~> 8.0'
s.dependency 'FirebaseCore', '~> 8.0'
s.dependency 'GoogleUtilities/AppDelegateSwizzler', '~> 7.7'
s.dependency 'GoogleUtilities/Reachability', '~> 7.7'
s.dependency 'GoogleUtilities/Environment', '~> 7.7'
s.dependency 'GoogleUtilities/UserDefaults', '~> 7.7'
s.dependency 'GoogleDataTransport', '~> 9.1'
s.dependency 'nanopb', '~> 2.30908.0'
s.test_spec 'unit' do |unit_tests|
unit_tests.scheme = { :code_coverage => true }
unit_tests.platforms = {
:ios => ios_deployment_target,
:osx => osx_deployment_target,
:tvos => tvos_deployment_target
}
unit_tests.source_files = [
'FirebaseMessaging/Tests/UnitTests*/*.{m,h,swift}',
'SharedTestUtilities/URLSession/*.[mh]',
]
unit_tests.requires_app_host = true
unit_tests.pod_target_xcconfig = {
'CLANG_ENABLE_OBJC_WEAK' => 'YES'
}
unit_tests.dependency 'OCMock'
end
s.test_spec 'integration' do |int_tests|
int_tests.scheme = { :code_coverage => true }
int_tests.platforms = {
:ios => ios_deployment_target,
:osx => osx_deployment_target,
:tvos => tvos_deployment_target
}
int_tests.source_files = 'FirebaseMessaging/Tests/IntegrationTests/*.swift'
int_tests.requires_app_host = true
int_tests.resources = 'FirebaseMessaging/Tests/IntegrationTests/Resources/GoogleService-Info.plist'
end
end
|
# frozen_string_literal: true
require File.expand_path("lib/github-pages-health-check/version", __dir__)
Gem::Specification.new do |s|
s.required_ruby_version = ">= 2.2.0"
s.name = "github-pages-health-check"
s.version = GitHubPages::HealthCheck::VERSION
s.summary = "Checks your GitHub Pages site for commons DNS configuration issues"
s.description = "Checks your GitHub Pages site for commons DNS configuration issues."
s.authors = "GitHub, Inc."
s.email = "support@github.com"
s.homepage = "https://github.com/github/github-pages-health-check"
s.license = "MIT"
s.files = `git ls-files -z`.split("\x0").reject { |f| f.match(%r{^(test|spec|features)/}) }
s.require_paths = ["lib"]
s.add_dependency("addressable", "~> 2.3")
s.add_dependency("dnsruby", "~> 1.60")
s.add_dependency("octokit", "~> 4.0")
s.add_dependency("public_suffix", ">= 2.0.2", "< 5.0")
s.add_dependency("typhoeus", "~> 1.3")
end
chore: bump public_suffix past v4
Resolve an issue when attempting to vendor this project into gh/gh:
```sh
Bundler could not find compatible versions for gem "public_suffix":
In snapshot (Gemfile.lock):
public_suffix (= 4.0.6)
In Gemfile:
public_suffix (~> 4.0)
github-pages-health-check (= 1.7.4) was resolved to 1.7.4, which depends on
public_suffix (~> 2.0)
Running `bundle update` will rebuild your snapshot from scratch, using only
the gems in your Gemfile, which may resolve the conflict.
```
# frozen_string_literal: true
require File.expand_path("lib/github-pages-health-check/version", __dir__)
Gem::Specification.new do |s|
s.required_ruby_version = ">= 2.2.0"
s.name = "github-pages-health-check"
s.version = GitHubPages::HealthCheck::VERSION
s.summary = "Checks your GitHub Pages site for commons DNS configuration issues"
s.description = "Checks your GitHub Pages site for commons DNS configuration issues."
s.authors = "GitHub, Inc."
s.email = "support@github.com"
s.homepage = "https://github.com/github/github-pages-health-check"
s.license = "MIT"
s.files = `git ls-files -z`.split("\x0").reject { |f| f.match(%r{^(test|spec|features)/}) }
s.require_paths = ["lib"]
s.add_dependency("addressable", "~> 2.3")
s.add_dependency("dnsruby", "~> 1.60")
s.add_dependency("octokit", "~> 4.0")
s.add_dependency("public_suffix", ">= 4.0.6", "< 5.0")
s.add_dependency("typhoeus", "~> 1.3")
end
|
# frozen_string_literal: true
require "action_cable"
require "active_support/testing/autorun"
require "puma"
require "mocha/minitest"
require "rack/mock"
begin
require "byebug"
rescue LoadError
end
# Require all the stubs and models
Dir[File.expand_path("stubs/*.rb", __dir__)].each { |file| require file }
class ActionCable::TestCase < ActiveSupport::TestCase
def wait_for_async
wait_for_executor Concurrent.global_io_executor
end
def run_in_eventmachine
yield
wait_for_async
end
def wait_for_executor(executor)
# do not wait forever, wait 2s
timeout = 2
until executor.completed_task_count == executor.scheduled_task_count
sleep 0.1
timeout -= 0.1
raise "Executor could not complete all tasks in 2 seconds" unless timeout > 0
end
end
end
Remove mocha from ActionCable tests
Q.E.D.
# frozen_string_literal: true
require "action_cable"
require "active_support/testing/autorun"
require "puma"
require "rack/mock"
begin
require "byebug"
rescue LoadError
end
# Require all the stubs and models
Dir[File.expand_path("stubs/*.rb", __dir__)].each { |file| require file }
class ActionCable::TestCase < ActiveSupport::TestCase
def wait_for_async
wait_for_executor Concurrent.global_io_executor
end
def run_in_eventmachine
yield
wait_for_async
end
def wait_for_executor(executor)
# do not wait forever, wait 2s
timeout = 2
until executor.completed_task_count == executor.scheduled_task_count
sleep 0.1
timeout -= 0.1
raise "Executor could not complete all tasks in 2 seconds" unless timeout > 0
end
end
end
|
# coding: utf-8
lib = File.expand_path('../lib', __FILE__)
$LOAD_PATH.unshift(lib) unless $LOAD_PATH.include?(lib)
require 'active_admin_pagination/version'
Gem::Specification.new do |spec|
spec.name = "active_admin_pagination"
spec.version = ActiveAdminPagination::VERSION
spec.authors = ["Timo Schilling"]
spec.email = ["timo@schilling.io"]
spec.summary = %q{Provides a pagination 'per page' interface for ActiveAdmin.}
spec.description = %q{Provides a pagination 'per page' interface for ActiveAdmin. It renders a sidebar section with a numeric select and modifies the Controller to use that 'per page'' value.}
spec.homepage = ""
spec.license = "MIT"
spec.files = `git ls-files -z`.split("\x0")
spec.executables = spec.files.grep(%r{^bin/}) { |f| File.basename(f) }
spec.test_files = spec.files.grep(%r{^(test|spec|features)/})
spec.require_paths = ["lib"]
spec.add_development_dependency "bundler", "~> 1.6"
spec.add_development_dependency "rake"
end
fix gemspec description
# coding: utf-8
lib = File.expand_path('../lib', __FILE__)
$LOAD_PATH.unshift(lib) unless $LOAD_PATH.include?(lib)
require 'active_admin_pagination/version'
Gem::Specification.new do |spec|
spec.name = "active_admin_pagination"
spec.version = ActiveAdminPagination::VERSION
spec.authors = ["Timo Schilling"]
spec.email = ["timo@schilling.io"]
spec.summary = %q{Provides a pagination 'per page' interface for ActiveAdmin.}
spec.description = %q{Provides a pagination 'per page' interface for ActiveAdmin. It renders a sidebar section with a numeric select and modifies the Controller to use that 'per page' value.}
spec.homepage = ""
spec.license = "MIT"
spec.files = `git ls-files -z`.split("\x0")
spec.executables = spec.files.grep(%r{^bin/}) { |f| File.basename(f) }
spec.test_files = spec.files.grep(%r{^(test|spec|features)/})
spec.require_paths = ["lib"]
spec.add_development_dependency "bundler", "~> 1.6"
spec.add_development_dependency "rake"
end
|
Gem::Specification.new do |s|
s.name = "active_record_host_pool"
s.version = "0.5.3"
s.authors = ["Ben Osheroff"]
s.date = %q{2011-10-28}
s.summary = "Allow ActiveRecord to share a connection to multiple databases on the same host"
s.description = ""
s.email = ["ben@gimbo.net"]
s.extra_rdoc_files = [
"LICENSE",
"README.md"
]
s.files = [
"README.md",
"lib/active_record_host_pool.rb",
"lib/active_record_host_pool/connection_adapter_mixin.rb",
"lib/active_record_host_pool/connection_proxy.rb",
"lib/active_record_host_pool/pool_proxy.rb"
]
s.homepage = "http://github.com/zendesk/active_record_host_pool"
s.require_paths = ["lib"]
s.rubygems_version = %q{1.5.3}
s.test_files = ["test/database.yml", "test/helper.rb", "test/schema.rb", "test/test_arhp.rb"]
s.add_runtime_dependency("activerecord")
s.add_development_dependency("rake")
s.add_development_dependency("mysql")
s.add_development_dependency("shoulda")
s.add_development_dependency("mocha")
s.add_development_dependency("ruby-debug")
end
bump version -> 0.6.0
Gem::Specification.new do |s|
s.name = "active_record_host_pool"
s.version = "0.6.0"
s.authors = ["Ben Osheroff"]
s.date = %q{2011-10-28}
s.summary = "Allow ActiveRecord to share a connection to multiple databases on the same host"
s.description = ""
s.email = ["ben@gimbo.net"]
s.extra_rdoc_files = [
"LICENSE",
"README.md"
]
s.files = [
"README.md",
"lib/active_record_host_pool.rb",
"lib/active_record_host_pool/connection_adapter_mixin.rb",
"lib/active_record_host_pool/connection_proxy.rb",
"lib/active_record_host_pool/pool_proxy.rb"
]
s.homepage = "http://github.com/zendesk/active_record_host_pool"
s.require_paths = ["lib"]
s.rubygems_version = %q{1.5.3}
s.test_files = ["test/database.yml", "test/helper.rb", "test/schema.rb", "test/test_arhp.rb"]
s.add_runtime_dependency("activerecord")
s.add_development_dependency("rake")
s.add_development_dependency("mysql")
s.add_development_dependency("shoulda")
s.add_development_dependency("mocha")
s.add_development_dependency("ruby-debug")
end
|
require_plugin 'restful_authentication'
require_plugin 'toolbox'
require 'user'
User.class_eval { include ::StylishPermissions::User }
stylish permissions do ot require restful_authentication anymore
require_plugin 'toolbox'
require 'user'
User.class_eval { include ::StylishPermissions::User }
|
require 'rails_helper'
RSpec.describe Issue1977RemoveInvalidUserForeignKeys, type: :db_migration do
context 'no online_notifications foreign key' do
self.use_transactional_tests = false
let(:existing_user_id) { User.first.id }
context 'invalid User foreign key columns' do
it 'cleans up OnlineNotification#user_id' do
witout_foreign_key(:online_notifications, column: :user_id)
create(:online_notification, user_id: 1337)
valid = create(:online_notification, user_id: existing_user_id)
expect do
migrate
end.to change {
OnlineNotification.count
}.by(-1)
# cleanup since we disabled
# transactions for this tests
valid.destroy
end
it 'cleans up RecentView#created_by_id' do
witout_foreign_key(:online_notifications, column: :user_id)
witout_foreign_key(:recent_views, column: :created_by_id)
create(:recent_view, created_by_id: 1337)
valid = create(:recent_view, created_by_id: existing_user_id)
expect do
migrate
end.to change {
RecentView.count
}.by(-1)
# cleanup since we disabled
# transactions for this tests
valid.destroy
end
it 'cleans up Avatar#o_id' do
witout_foreign_key(:online_notifications, column: :user_id)
create(:avatar, object_lookup_id: ObjectLookup.by_name('User'), o_id: 1337)
valid_ticket = create(:avatar, object_lookup_id: ObjectLookup.by_name('Ticket'), o_id: 1337)
valid_user = create(:avatar, object_lookup_id: ObjectLookup.by_name('User'), o_id: existing_user_id)
expect do
migrate
end.to change {
Avatar.count
}.by(-1)
# cleanup since we disabled
# transactions for this tests
valid_ticket.destroy
valid_user.destroy
end
end
it 'adds OnlineNotification#user_id foreign key' do
adds_foreign_key(:online_notifications, column: :user_id)
end
end
context 'no recent_views foreign key' do
self.use_transactional_tests = false
it 'adds RecentView#created_by_id foreign key' do
adds_foreign_key(:recent_views, column: :created_by_id)
end
end
end
Folllow up for issue #1977 - recent_views fk already exists (no test needed).
require 'rails_helper'
RSpec.describe Issue1977RemoveInvalidUserForeignKeys, type: :db_migration do
context 'no online_notifications foreign key' do
self.use_transactional_tests = false
let(:existing_user_id) { User.first.id }
context 'invalid User foreign key columns' do
it 'cleans up OnlineNotification#user_id' do
witout_foreign_key(:online_notifications, column: :user_id)
create(:online_notification, user_id: 1337)
valid = create(:online_notification, user_id: existing_user_id)
expect do
migrate
end.to change {
OnlineNotification.count
}.by(-1)
# cleanup since we disabled
# transactions for this tests
valid.destroy
end
it 'cleans up RecentView#created_by_id' do
witout_foreign_key(:online_notifications, column: :user_id)
witout_foreign_key(:recent_views, column: :created_by_id)
create(:recent_view, created_by_id: 1337)
valid = create(:recent_view, created_by_id: existing_user_id)
expect do
migrate
end.to change {
RecentView.count
}.by(-1)
# cleanup since we disabled
# transactions for this tests
valid.destroy
end
it 'cleans up Avatar#o_id' do
witout_foreign_key(:online_notifications, column: :user_id)
create(:avatar, object_lookup_id: ObjectLookup.by_name('User'), o_id: 1337)
valid_ticket = create(:avatar, object_lookup_id: ObjectLookup.by_name('Ticket'), o_id: 1337)
valid_user = create(:avatar, object_lookup_id: ObjectLookup.by_name('User'), o_id: existing_user_id)
expect do
migrate
end.to change {
Avatar.count
}.by(-1)
# cleanup since we disabled
# transactions for this tests
valid_ticket.destroy
valid_user.destroy
end
end
it 'adds OnlineNotification#user_id foreign key' do
adds_foreign_key(:online_notifications, column: :user_id)
end
end
end
|
require 'util/miq-encode'
require 'active_support/inflector'
require 'more_core_extensions/core_ext/string'
class String
def miqEncode
MIQEncode.encode(self)
end
##############################################################################
#
# File activesupport-3.1.1/lib/active_support/core_ext/string/inflections.rb
#
##############################################################################
# ActiveSupport extensions included for non-Rails based code, where
# ActiveSupport itself cannot be included.
##############################################################################
# Returns the plural form of the word in the string.
#
# "post".pluralize # => "posts"
# "octopus".pluralize # => "octopi"
# "sheep".pluralize # => "sheep"
# "words".pluralize # => "words"
# "the blue mailman".pluralize # => "the blue mailmen"
# "CamelOctopus".pluralize # => "CamelOctopi"
def pluralize
ActiveSupport::Inflector.pluralize(self)
end unless method_defined?(:pluralize)
# The reverse of +pluralize+, returns the singular form of a word in a string.
#
# "posts".singularize # => "post"
# "octopi".singularize # => "octopus"
# "sheep".singularize # => "sheep"
# "word".singularize # => "word"
# "the blue mailmen".singularize # => "the blue mailman"
# "CamelOctopi".singularize # => "CamelOctopus"
def singularize
ActiveSupport::Inflector.singularize(self)
end unless method_defined?(:singularize)
# +constantize+ tries to find a declared constant with the name specified
# in the string. It raises a NameError when the name is not in CamelCase
# or is not initialized.
#
# Examples
# "Module".constantize # => Module
# "Class".constantize # => Class
def constantize
ActiveSupport::Inflector.constantize(self)
end unless method_defined?(:constantize)
# By default, +camelize+ converts strings to UpperCamelCase. If the argument to camelize
# is set to <tt>:lower</tt> then camelize produces lowerCamelCase.
#
# +camelize+ will also convert '/' to '::' which is useful for converting paths to namespaces.
#
# "active_record".camelize # => "ActiveRecord"
# "active_record".camelize(:lower) # => "activeRecord"
# "active_record/errors".camelize # => "ActiveRecord::Errors"
# "active_record/errors".camelize(:lower) # => "activeRecord::Errors"
def camelize(first_letter = :upper)
case first_letter
when :upper then ActiveSupport::Inflector.camelize(self, true)
when :lower then ActiveSupport::Inflector.camelize(self, false)
end
end unless method_defined?(:camelize)
# Capitalizes all the words and replaces some characters in the string to create
# a nicer looking title. +titleize+ is meant for creating pretty output. It is not
# used in the Rails internals.
#
# +titleize+ is also aliased as +titlecase+.
#
# "man from the boondocks".titleize # => "Man From The Boondocks"
# "x-men: the last stand".titleize # => "X Men: The Last Stand"
def titleize
ActiveSupport::Inflector.titleize(self)
end unless method_defined?(:titleize)
# The reverse of +camelize+. Makes an underscored, lowercase form from the expression in the string.
#
# +underscore+ will also change '::' to '/' to convert namespaces to paths.
#
# "ActiveRecord".underscore # => "active_record"
# "ActiveRecord::Errors".underscore # => active_record/errors
def underscore
ActiveSupport::Inflector.underscore(self)
end unless method_defined?(:underscore)
# Replaces underscores with dashes in the string.
#
# "puni_puni" # => "puni-puni"
def dasherize
ActiveSupport::Inflector.dasherize(self)
end unless method_defined?(:dasherize)
# Removes the module part from the constant expression in the string.
#
# "ActiveRecord::CoreExtensions::String::Inflections".demodulize # => "Inflections"
# "Inflections".demodulize # => "Inflections"
def demodulize
ActiveSupport::Inflector.demodulize(self)
end unless method_defined?(:demodulize)
# Creates the name of a table like Rails does for models to table names. This method
# uses the +pluralize+ method on the last word in the string.
#
# "RawScaledScorer".tableize # => "raw_scaled_scorers"
# "egg_and_ham".tableize # => "egg_and_hams"
# "fancyCategory".tableize # => "fancy_categories"
def tableize
ActiveSupport::Inflector.tableize(self)
end unless method_defined?(:tableize)
# Create a class name from a plural table name like Rails does for table names to models.
# Note that this returns a string and not a class. (To convert to an actual class
# follow +classify+ with +constantize+.)
#
# "egg_and_hams".classify # => "EggAndHam"
# "posts".classify # => "Post"
#
# Singular names are not handled correctly.
#
# "business".classify # => "Busines"
def classify
ActiveSupport::Inflector.classify(self)
end unless method_defined?(:classify)
# Capitalizes the first word, turns underscores into spaces, and strips '_id'.
# Like +titleize+, this is meant for creating pretty output.
#
# "employee_salary" # => "Employee salary"
# "author_id" # => "Author"
def humanize
ActiveSupport::Inflector.humanize(self)
end unless method_defined?(:humanize)
# Creates a foreign key name from a class name.
# +separate_class_name_and_id_with_underscore+ sets whether
# the method should put '_' between the name and 'id'.
#
# Examples
# "Message".foreign_key # => "message_id"
# "Message".foreign_key(false) # => "messageid"
# "Admin::Post".foreign_key # => "post_id"
def foreign_key(separate_class_name_and_id_with_underscore = true)
ActiveSupport::Inflector.foreign_key(self, separate_class_name_and_id_with_underscore)
end unless method_defined?(:foreign_key)
# Support with IEC size format
# http://physics.nist.gov/cuu/Units/binary.html
IEC_SIZE_SUFFIXES = %w(Ki Mi Gi Ti).freeze
def to_iec_integer
exp_index = IEC_SIZE_SUFFIXES.index(self[-2..-1])
if exp_index.nil?
Integer(self)
else
Integer(self[0..-3]) * 1024**(exp_index + 1)
end
end
end
Remove unused code for ActiveSupport::Inflector
requiring 'active_support/inflector' defines these methods on String
therefore none of this code in needed
require 'util/miq-encode'
require 'active_support/inflector'
require 'more_core_extensions/core_ext/string'
class String
def miqEncode
MIQEncode.encode(self)
end
# Support with IEC size format
# http://physics.nist.gov/cuu/Units/binary.html
IEC_SIZE_SUFFIXES = %w(Ki Mi Gi Ti).freeze
def to_iec_integer
exp_index = IEC_SIZE_SUFFIXES.index(self[-2..-1])
if exp_index.nil?
Integer(self)
else
Integer(self[0..-3]) * 1024**(exp_index + 1)
end
end
end
|
# coding: utf-8
lib = File.expand_path('../lib', __FILE__)
$LOAD_PATH.unshift(lib) unless $LOAD_PATH.include?(lib)
require 'accel/version'
Gem::Specification.new do |spec|
spec.name = "accel-rb"
spec.version = Accel::VERSION
spec.authors = ["Brandon Holt"]
spec.email = ["bholt@cs.washington.edu"]
spec.description = %q{Small Ruby helpers for making life easier, especially for bash-style scripting tasks.}
spec.summary = %q{Helpers for tasks such as writing shell scripts, using Pry effectively, etc.}
spec.homepage = ""
spec.license = ""
spec.files = `git ls-files`.split($/)
spec.executables = spec.files.grep(%r{^bin/}) { |f| File.basename(f) }
spec.test_files = spec.files.grep(%r{^(test|spec|features)/})
spec.require_paths = ["lib"]
spec.add_development_dependency "bundler", "~> 1.3"
spec.add_development_dependency "rake"
end
add pry dependency
# coding: utf-8
lib = File.expand_path('../lib', __FILE__)
$LOAD_PATH.unshift(lib) unless $LOAD_PATH.include?(lib)
require 'accel/version'
Gem::Specification.new do |spec|
spec.name = "accel-rb"
spec.version = Accel::VERSION
spec.authors = ["Brandon Holt"]
spec.email = ["bholt@cs.washington.edu"]
spec.description = %q{Small Ruby helpers for making life easier, especially for bash-style scripting tasks.}
spec.summary = %q{Helpers for tasks such as writing shell scripts, using Pry effectively, etc.}
spec.homepage = ""
spec.license = ""
spec.files = `git ls-files`.split($/)
spec.executables = spec.files.grep(%r{^bin/}) { |f| File.basename(f) }
spec.test_files = spec.files.grep(%r{^(test|spec|features)/})
spec.require_paths = ["lib"]
spec.add_dependency "pry"
spec.add_development_dependency "bundler", "~> 1.3"
spec.add_development_dependency "rake"
end
|
require 'requirement'
# We support Python 2.x and 3.x, either brewed or external.
# This requirement locates the correct CPython binary (no PyPy), provides
# support methods like `site_packages`, and writes our sitecustomize.py file.
# In `dependency_collector.rb`, special `:python` and `:python3` shortcuts are
# defined. You can specify a minimum version of the Python that needs to be
# present, but since not every package is ported to 3.x yet,
# `PythonInstalled("2")` is not satisfied by 3.x.
# In a formula that shall provide support for 2.x and 3.x, the idiom is:
# depends_on :python
# depends_on :python3 => :optional # or :recommended
#
# Todo:
# - Allow further options that choose: universal, framework?, brewed?...
class PythonInstalled < Requirement
attr_reader :min_version
attr_reader :if3then3
attr_reader :imports
attr_accessor :site_packages
attr_writer :binary # The python.rb formula needs to set the binary
fatal true # you can still make Python optional by `depends_on :python => :optional`
class PythonVersion < Version
def major
tokens[0].to_s.to_i # Python's major.minor are always ints.
end
def minor
tokens[1].to_s.to_i
end
end
def initialize(default_version="2.6", tags=[])
tags = [tags].flatten
# Extract the min_version if given. Default to default_version else
if /(\d+\.)*\d+/ === tags.first.to_s
@min_version = PythonVersion.new(tags.shift)
else
@min_version = PythonVersion.new(default_version)
end
# often used idiom: e.g. sipdir = "share/sip#{python.if3then3}"
if @min_version.major == 3
@if3then3 = "3"
else
@if3then3 = ""
end
# Set name according to the major version.
# The name is used to generate the options like --without-python3
@name = "python" + @if3then3
# Check if any python modules should be importable. We use a hash to store
# the corresponding name on PyPi "<import_name>" => "<name_on_PyPi>".
# Example: `depends_on :python => ['enchant' => 'pyenchant']
@imports = {}
tags.each do |tag|
if tag.kind_of? String
@imports[tag] = tag # if the module name is the same as the PyPi name
elsif tag.kind_of? Hash
@imports.merge!(tag)
end
end
# will be set later by the python_helper, because it needs the
# formula prefix to set site_packages
@site_packages = nil
super tags
end
# Note that during `satisfy` we still have the PATH as the user has set.
# We look for a brewed python or an external Python and store the loc of
# that binary for later usage. (See Formula#python)
satisfy :build_env => false do
ENV['PYTHONPATH'] = nil
@unsatisfied_because = ''
if binary.nil? || !binary.executable?
@unsatisfied_because += "No `#{@name}` found in your PATH! Consider to `brew install #{@name}`."
false
elsif pypy?
@unsatisfied_because += "Your #{@name} executable appears to be a PyPy, which is not supported."
false
elsif version.major != @min_version.major
@unsatisfied_because += "No Python #{@min_version.major}.x found in your PATH! --> `brew install #{@name}`?"
false
elsif version < @min_version
@unsatisfied_because += "Python version #{version} is too old (need at least #{@min_version})."
false
elsif @min_version.major == 2 && `python -c "import sys; print(sys.version_info[0])"`.strip == "3"
@unsatisfied_because += "Your `python` points to a Python 3.x. This is not supported."
false
else
@imports.keys.all? do |module_name|
if importable? module_name
true
else
@unsatisfied_because += "Unsatisfied dependency: #{module_name}\n"
@unsatisfied_because += "OS X System's " if from_osx?
@unsatisfied_because += "Brewed " if brewed?
@unsatisfied_because += "External " unless brewed? || from_osx?
@unsatisfied_because += "Python cannot `import #{module_name}`. Install with:\n "
@unsatisfied_because += "sudo easy_install pip\n " unless importable? 'pip'
@unsatisfied_because += "pip-#{version.major}.#{version.minor} install #{@imports[module_name]}"
false
end
end
end
end
def importable? module_name
quiet_system(binary, "-c", "import #{module_name}")
end
# The full path to the python or python3 executable, depending on `version`.
def binary
@binary ||= begin
if brewed?
# If the python is brewed we always prefer it!
# Note, we don't support homebrew/versions/pythonXX.rb, though.
Formula.factory(@name).opt_prefix/"bin/python#{@min_version.major}"
else
which(@name)
end
end
end
# The python prefix (special cased for a brewed python to point into the opt_prefix)
def prefix
if brewed?
# Homebrew since a long while only supports frameworked python
HOMEBREW_PREFIX/"opt/#{name}/Frameworks/Python.framework/Versions/#{version.major}.#{version.minor}"
elsif from_osx?
# Python on OS X has been stripped off its includes (unless you install the CLT), therefore we use the MacOS.sdk.
Pathname.new("#{MacOS.sdk_path}/System/Library/Frameworks/Python.framework/Versions/#{version.major}.#{version.minor}")
else
# What Python knows about itself
Pathname.new(`#{binary} -c 'import sys;print(sys.prefix)'`.strip)
end
end
# Get the actual x.y.z version by asking python (or python3 if @min_version>=3)
def version
@version ||= PythonVersion.new(`#{binary} -c 'import sys;print(sys.version[:5])'`.strip)
end
# python.xy => "python2.7" is often used (and many formulae had this as `which_python`).
def xy
"python#{version.major}.#{version.minor}"
end
# Homebrew's global site-packages. The local ones (just `site_packages`) are
# populated by the python_helperg method when the `prefix` of a formula is known.
def global_site_packages
HOMEBREW_PREFIX/"lib/#{xy}/site-packages"
end
# Dir containing Python.h and others.
def incdir
if (from_osx? || brewed?) && framework?
prefix/"Headers"
else
# For all other we use Python's own standard method (works with a non-framework version, too)
Pathname.new(`#{binary} -c 'from distutils import sysconfig; print(sysconfig.get_python_inc())'`.strip)
end
end
# Dir containing e.g. libpython2.7.dylib
def libdir
if brewed? || from_osx?
if @min_version.major == 3
prefix/"lib/#{xy}/config-#{version.major}.#{version.minor}m"
else
prefix/"lib/#{xy}/config"
end
else
Pathname.new(`#{binary} -c "from distutils import sysconfig; print(sysconfig.get_config_var('LIBPL'))"`.strip)
end
end
# Pkgconfig (pc) files of python
def pkg_config_path
if from_osx?
# No matter if CLT-only or Xcode-only, the pc file is always here on OS X:
path = Pathname.new("/System/Library/Frameworks/Python.framework/Versions/#{version.major}.#{version.minor}/lib/pkgconfig")
path if path.exist?
else
prefix/"lib/pkgconfig"
end
end
# Is the brewed Python installed
def brewed?
@brewed ||= begin
require 'formula'
Formula.factory(@name).linked_keg.exist?
end
end
# Is the python the one from OS X?
def from_osx?
@from_osx ||= begin
p = `#{binary} -c "import sys; print(sys.prefix)"`.strip
p.start_with?("/System/Library/Frameworks/Python.framework")
end
end
# Is the `python` a PyPy?
def pypy?
@pypy ||= !(`#{binary} -c "import sys; print(sys.version)"`.downcase =~ /.*pypy.*/).nil?
end
def framework
# We return the path to Frameworks and not the 'Python.framework', because
# the latter is (sadly) the same for 2.x and 3.x.
if prefix.to_s =~ /^(.*\/Frameworks)\/(Python\.framework).*$/
@framework = $1
end
end
def framework?; not framework.nil? end
def universal?
@universal ||= archs_for_command(binary).universal?
end
def standard_caveats
if brewed?
"" # empty string, so we can concat this
else
<<-EOS.undent
For non-homebrew #{@name} (#{@min_version.major}.x), you need to amend your PYTHONPATH like so:
export PYTHONPATH=#{global_site_packages}:$PYTHONPATH
EOS
end
end
def modify_build_environment
# Most methods fail if we don't have a binary.
return if binary.nil?
# Write our sitecustomize.py
file = global_site_packages/"sitecustomize.py"
ohai "Writing #{file}" if ARGV.verbose? && ARGV.debug?
%w{.pyc .pyo .py}.each do |ext|
f = global_site_packages/"sitecustomize#{ext}"
f.unlink if f.exist?
end
file.write(sitecustomize)
# For non-system python's we add the opt_prefix/bin of python to the path.
ENV.prepend_path 'PATH', binary.dirname unless from_osx?
ENV['PYTHONHOME'] = nil # to avoid fuck-ups.
ENV['PYTHONPATH'] = if brewed? then nil; else global_site_packages.to_s; end
ENV.append_path 'CMAKE_INCLUDE_PATH', incdir
ENV.append_path 'PKG_CONFIG_PATH', pkg_config_path if pkg_config_path
# We don't set the -F#{framework} here, because if Python 2.x and 3.x are
# used, `Python.framework` is ambiguous. However, in the `python do` block
# we can set LDFLAGS+="-F#{framework}" because only one is temporarily set.
# Udpate distutils.cfg (later we can remove this, but people still have
# their old brewed pythons and we have to update it here)
# Todo: If Jack's formula revisions arrive, we can get rid of this here!
if brewed?
require 'formula'
file = Formula.factory(@name).prefix/"Frameworks/Python.framework/Versions/#{version.major}.#{version.minor}/lib/#{xy}/distutils/distutils.cfg"
ohai "Writing #{file}" if ARGV.verbose? && ARGV.debug?
file.delete if file.exist?
file.write <<-EOF.undent
[global]
verbose=1
[install]
force=1
prefix=#{HOMEBREW_PREFIX}
EOF
end
end
def sitecustomize
<<-EOF.undent
# This file is created by Homebrew and is executed on each python startup.
# Don't print from here, or else python command line scripts may fail!
# <https://github.com/mxcl/homebrew/wiki/Homebrew-and-Python>
import sys
if sys.version_info[0] != #{version.major}:
import os
# This can only happen if the user has set the PYTHONPATH for 3.x and run Python 2.x or vice versa.
# Every Python looks at the PYTHONPATH variable and we can't fix it here in sitecustomize.py,
# because the PYTHONPATH is evaluated after the sitecustomize.py. Many modules (e.g. PyQt4) are
# built only for a specific version of Python and will fail with cryptic error messages.
# In the end this means: Don't set the PYTHONPATH permanently if you use different Python versions.
exit('Your PYTHONPATH points to a site-packages dir for Python #{version.major}.x but you are running Python ' +
str(sys.version_info[0]) + '.x!\\n PYTHONPATH is currently: "' + str(os.environ['PYTHONPATH']) + '"\\n' +
' You should `unset PYTHONPATH` to fix this.')
else:
# Only do this for a brewed python:
if sys.executable.startswith('#{HOMEBREW_PREFIX}'):
# Remove /System site-packages, and the Cellar site-packages
# which we moved to lib/pythonX.Y/site-packages. Further, remove
# HOMEBREW_PREFIX/lib/python because we later addsitedir(...).
sys.path = [ p for p in sys.path
if (not p.startswith('/System') and
not p.startswith('#{HOMEBREW_PREFIX}/lib/python') and
not (p.startswith('#{HOMEBREW_PREFIX}/Cellar/python') and p.endswith('site-packages'))) ]
# LINKFORSHARED (and python-config --ldflags) return the
# full path to the lib (yes, "Python" is actually the lib, not a
# dir) so that third-party software does not need to add the
# -F/#{HOMEBREW_PREFIX}/Frameworks switch.
# Assume Framework style build (default since months in brew)
try:
from _sysconfigdata import build_time_vars
build_time_vars['LINKFORSHARED'] = '-u _PyMac_Error #{HOMEBREW_PREFIX}/opt/#{name}/Frameworks/Python.framework/Versions/#{version.major}.#{version.minor}/Python'
except:
pass # remember: don't print here. Better to fail silently.
# Set the sys.executable to use the opt_prefix
sys.executable = '#{HOMEBREW_PREFIX}/opt/#{name}/bin/#{xy}'
# Tell about homebrew's site-packages location.
# This is needed for Python to parse *.pth.
import site
site.addsitedir('#{HOMEBREW_PREFIX}/lib/#{xy}/site-packages')
EOF
end
def message
@unsatisfied_because
end
def <=> other
version <=> other.version
end
def to_s
binary.to_s
end
# Objects of this class are used to represent dependencies on Python and
# dependencies on Python modules, so the combination of name + imports is
# enough to identify them uniquely.
def eql?(other)
instance_of?(other.class) && name == other.name && imports == other.imports
end
def hash
[name, *imports].hash
end
end
PythonInstalled, name includes modules
If `depends_on :python => ['modulename', :optional]` then the generated
option is now `--with-python-modulename`, so that it is possible to
actually make depending on python modules optional.
Further, `brew options` becomes more meaningful.
require 'requirement'
# We support Python 2.x and 3.x, either brewed or external.
# This requirement locates the correct CPython binary (no PyPy), provides
# support methods like `site_packages`, and writes our sitecustomize.py file.
# In `dependency_collector.rb`, special `:python` and `:python3` shortcuts are
# defined. You can specify a minimum version of the Python that needs to be
# present, but since not every package is ported to 3.x yet,
# `PythonInstalled("2")` is not satisfied by 3.x.
# In a formula that shall provide support for 2.x and 3.x, the idiom is:
# depends_on :python
# depends_on :python3 => :optional # or :recommended
#
# Todo:
# - Allow further options that choose: universal, framework?, brewed?...
class PythonInstalled < Requirement
attr_reader :min_version
attr_reader :if3then3
attr_reader :imports
attr_reader :python
attr_accessor :site_packages
attr_writer :binary # The python.rb formula needs to set the binary
fatal true # you can still make Python optional by `depends_on :python => :optional`
class PythonVersion < Version
def major
tokens[0].to_s.to_i # Python's major.minor are always ints.
end
def minor
tokens[1].to_s.to_i
end
end
def initialize(default_version="2.6", tags=[])
tags = [tags].flatten
# Extract the min_version if given. Default to default_version else
if /(\d+\.)*\d+/ === tags.first.to_s
@min_version = PythonVersion.new(tags.shift)
else
@min_version = PythonVersion.new(default_version)
end
# often used idiom: e.g. sipdir = "share/sip#{python.if3then3}"
if @min_version.major == 3
@if3then3 = "3"
else
@if3then3 = ""
end
@python = "python"+@if3then3
# Check if any python modules should be importable. We use a hash to store
# the corresponding name on PyPi "<import_name>" => "<name_on_PyPi>".
# Example: `depends_on :python => ['enchant' => 'pyenchant']
@imports = {}
tags.each do |tag|
if tag.kind_of? String
@imports[tag] = tag # if the module name is the same as the PyPi name
elsif tag.kind_of? Hash
@imports.merge!(tag)
end
end
# Set name according to the major version and optionally python modules:
# Used to generate the options like --without-python3, --with-python-numpy
@name = "python#{@if3then3}"
@name += "-#{@imports.values*'-'}" unless @imports.empty?
# will be set later by the python_helper, because it needs the
# formula prefix to set site_packages
@site_packages = nil
super tags
end
# Note that during `satisfy` we still have the PATH as the user has set.
# We look for a brewed python or an external Python and store the loc of
# that binary for later usage. (See Formula#python)
satisfy :build_env => false do
ENV['PYTHONPATH'] = nil
@unsatisfied_because = ''
if binary.nil? || !binary.executable?
@unsatisfied_because += "No `#{@python}` found in your PATH! Consider to `brew install #{@python}`."
false
elsif pypy?
@unsatisfied_because += "Your #{@python} executable appears to be a PyPy, which is not supported."
false
elsif version.major != @min_version.major
@unsatisfied_because += "No Python #{@min_version.major}.x found in your PATH! --> `brew install #{@python}`?"
false
elsif version < @min_version
@unsatisfied_because += "Python version #{version} is too old (need at least #{@min_version})."
false
elsif @min_version.major == 2 && `python -c "import sys; print(sys.version_info[0])"`.strip == "3"
@unsatisfied_because += "Your `python` points to a Python 3.x. This is not supported."
false
else
@imports.keys.all? do |module_name|
if importable? module_name
true
else
@unsatisfied_because += "Unsatisfied dependency: #{module_name}\n"
@unsatisfied_because += "OS X System's " if from_osx?
@unsatisfied_because += "Brewed " if brewed?
@unsatisfied_because += "External " unless brewed? || from_osx?
@unsatisfied_because += "Python cannot `import #{module_name}`. Install with:\n "
@unsatisfied_because += "sudo easy_install pip\n " unless importable? 'pip'
@unsatisfied_because += "pip-#{version.major}.#{version.minor} install #{@imports[module_name]}"
false
end
end
end
end
def importable? module_name
quiet_system(binary, "-c", "import #{module_name}")
end
# The full path to the python or python3 executable, depending on `version`.
def binary
@binary ||= begin
if brewed?
# If the python is brewed we always prefer it!
# Note, we don't support homebrew/versions/pythonXX.rb, though.
Formula.factory(@python).opt_prefix/"bin/python#{@min_version.major}"
else
which(@python)
end
end
end
# The python prefix (special cased for a brewed python to point into the opt_prefix)
def prefix
if brewed?
# Homebrew since a long while only supports frameworked python
HOMEBREW_PREFIX/"opt/#{python}/Frameworks/Python.framework/Versions/#{version.major}.#{version.minor}"
elsif from_osx?
# Python on OS X has been stripped off its includes (unless you install the CLT), therefore we use the MacOS.sdk.
Pathname.new("#{MacOS.sdk_path}/System/Library/Frameworks/Python.framework/Versions/#{version.major}.#{version.minor}")
else
# What Python knows about itself
Pathname.new(`#{binary} -c 'import sys;print(sys.prefix)'`.strip)
end
end
# Get the actual x.y.z version by asking python (or python3 if @min_version>=3)
def version
@version ||= PythonVersion.new(`#{binary} -c 'import sys;print(sys.version[:5])'`.strip)
end
# python.xy => "python2.7" is often used (and many formulae had this as `which_python`).
def xy
"python#{version.major}.#{version.minor}"
end
# Homebrew's global site-packages. The local ones (just `site_packages`) are
# populated by the python_helperg method when the `prefix` of a formula is known.
def global_site_packages
HOMEBREW_PREFIX/"lib/#{xy}/site-packages"
end
# Dir containing Python.h and others.
def incdir
if (from_osx? || brewed?) && framework?
prefix/"Headers"
else
# For all other we use Python's own standard method (works with a non-framework version, too)
Pathname.new(`#{binary} -c 'from distutils import sysconfig; print(sysconfig.get_python_inc())'`.strip)
end
end
# Dir containing e.g. libpython2.7.dylib
def libdir
if brewed? || from_osx?
if @min_version.major == 3
prefix/"lib/#{xy}/config-#{version.major}.#{version.minor}m"
else
prefix/"lib/#{xy}/config"
end
else
Pathname.new(`#{binary} -c "from distutils import sysconfig; print(sysconfig.get_config_var('LIBPL'))"`.strip)
end
end
# Pkgconfig (pc) files of python
def pkg_config_path
if from_osx?
# No matter if CLT-only or Xcode-only, the pc file is always here on OS X:
path = Pathname.new("/System/Library/Frameworks/Python.framework/Versions/#{version.major}.#{version.minor}/lib/pkgconfig")
path if path.exist?
else
prefix/"lib/pkgconfig"
end
end
# Is the brewed Python installed
def brewed?
@brewed ||= begin
require 'formula'
Formula.factory(@python).linked_keg.exist?
end
end
# Is the python the one from OS X?
def from_osx?
@from_osx ||= begin
p = `#{binary} -c "import sys; print(sys.prefix)"`.strip
p.start_with?("/System/Library/Frameworks/Python.framework")
end
end
# Is the `python` a PyPy?
def pypy?
@pypy ||= !(`#{binary} -c "import sys; print(sys.version)"`.downcase =~ /.*pypy.*/).nil?
end
def framework
# We return the path to Frameworks and not the 'Python.framework', because
# the latter is (sadly) the same for 2.x and 3.x.
if prefix.to_s =~ /^(.*\/Frameworks)\/(Python\.framework).*$/
@framework = $1
end
end
def framework?; not framework.nil? end
def universal?
@universal ||= archs_for_command(binary).universal?
end
def standard_caveats
if brewed?
"" # empty string, so we can concat this
else
<<-EOS.undent
For non-homebrew #{@python} (#{@min_version.major}.x), you need to amend your PYTHONPATH like so:
export PYTHONPATH=#{global_site_packages}:$PYTHONPATH
EOS
end
end
def modify_build_environment
# Most methods fail if we don't have a binary.
return if binary.nil?
# Write our sitecustomize.py
file = global_site_packages/"sitecustomize.py"
ohai "Writing #{file}" if ARGV.verbose? && ARGV.debug?
%w{.pyc .pyo .py}.each do |ext|
f = global_site_packages/"sitecustomize#{ext}"
f.unlink if f.exist?
end
file.write(sitecustomize)
# For non-system python's we add the opt_prefix/bin of python to the path.
ENV.prepend_path 'PATH', binary.dirname unless from_osx?
ENV['PYTHONHOME'] = nil # to avoid fuck-ups.
ENV['PYTHONPATH'] = if brewed? then nil; else global_site_packages.to_s; end
ENV.append_path 'CMAKE_INCLUDE_PATH', incdir
ENV.append_path 'PKG_CONFIG_PATH', pkg_config_path if pkg_config_path
# We don't set the -F#{framework} here, because if Python 2.x and 3.x are
# used, `Python.framework` is ambiguous. However, in the `python do` block
# we can set LDFLAGS+="-F#{framework}" because only one is temporarily set.
# Udpate distutils.cfg (later we can remove this, but people still have
# their old brewed pythons and we have to update it here)
# Todo: If Jack's formula revisions arrive, we can get rid of this here!
if brewed?
require 'formula'
file = Formula.factory(@python).prefix/"Frameworks/Python.framework/Versions/#{version.major}.#{version.minor}/lib/#{xy}/distutils/distutils.cfg"
ohai "Writing #{file}" if ARGV.verbose? && ARGV.debug?
file.delete if file.exist?
file.write <<-EOF.undent
[global]
verbose=1
[install]
force=1
prefix=#{HOMEBREW_PREFIX}
EOF
end
end
def sitecustomize
<<-EOF.undent
# This file is created by Homebrew and is executed on each python startup.
# Don't print from here, or else python command line scripts may fail!
# <https://github.com/mxcl/homebrew/wiki/Homebrew-and-Python>
import sys
if sys.version_info[0] != #{version.major}:
import os
# This can only happen if the user has set the PYTHONPATH for 3.x and run Python 2.x or vice versa.
# Every Python looks at the PYTHONPATH variable and we can't fix it here in sitecustomize.py,
# because the PYTHONPATH is evaluated after the sitecustomize.py. Many modules (e.g. PyQt4) are
# built only for a specific version of Python and will fail with cryptic error messages.
# In the end this means: Don't set the PYTHONPATH permanently if you use different Python versions.
exit('Your PYTHONPATH points to a site-packages dir for Python #{version.major}.x but you are running Python ' +
str(sys.version_info[0]) + '.x!\\n PYTHONPATH is currently: "' + str(os.environ['PYTHONPATH']) + '"\\n' +
' You should `unset PYTHONPATH` to fix this.')
else:
# Only do this for a brewed python:
if sys.executable.startswith('#{HOMEBREW_PREFIX}'):
# Remove /System site-packages, and the Cellar site-packages
# which we moved to lib/pythonX.Y/site-packages. Further, remove
# HOMEBREW_PREFIX/lib/python because we later addsitedir(...).
sys.path = [ p for p in sys.path
if (not p.startswith('/System') and
not p.startswith('#{HOMEBREW_PREFIX}/lib/python') and
not (p.startswith('#{HOMEBREW_PREFIX}/Cellar/python') and p.endswith('site-packages'))) ]
# LINKFORSHARED (and python-config --ldflags) return the
# full path to the lib (yes, "Python" is actually the lib, not a
# dir) so that third-party software does not need to add the
# -F/#{HOMEBREW_PREFIX}/Frameworks switch.
# Assume Framework style build (default since months in brew)
try:
from _sysconfigdata import build_time_vars
build_time_vars['LINKFORSHARED'] = '-u _PyMac_Error #{HOMEBREW_PREFIX}/opt/#{python}/Frameworks/Python.framework/Versions/#{version.major}.#{version.minor}/Python'
except:
pass # remember: don't print here. Better to fail silently.
# Set the sys.executable to use the opt_prefix
sys.executable = '#{HOMEBREW_PREFIX}/opt/#{python}/bin/#{xy}'
# Tell about homebrew's site-packages location.
# This is needed for Python to parse *.pth.
import site
site.addsitedir('#{HOMEBREW_PREFIX}/lib/#{xy}/site-packages')
EOF
end
def message
@unsatisfied_because
end
def <=> other
version <=> other.version
end
def to_s
binary.to_s
end
# Objects of this class are used to represent dependencies on Python and
# dependencies on Python modules. Both are already included in `name`
def eql?(other)
instance_of?(other.class) && name == other.name
end
def hash
name.hash
end
end
|
require 'rubygems'
require 'ap' # awesome_print gem
require 'base64'
require 'hmac-sha2' # ruby-hmac gem
require 'rest_client' # rest-client gem
def build_authenticated_uri_query_string(signature, timestamp)
"?x_a=#{$app_id}&x_b=#{$user_id}&x_c=#{get_base64_hash_string($app_key, signature)}&x_d=#{get_base64_hash_string($user_key, signature)}&x_t=#{timestamp}"
end
def create_authenticated_uri(path, httpmethod)
query_string = get_query_string(path, httpmethod)
"http://#{$hostname}#{path}#{query_string}"
end
def format_signature(path, httpmethod, timestamp)
"#{httpmethod.upcase}&#{path.downcase}&#{timestamp}"
end
def get_base64_hash_string(key, signature)
sha256_string = HMAC::SHA256.hexdigest(signature, key)
sha256_string2 = HMAC::SHA256.hexdigest(key, signature)
ap "sig first: #{sha256_string}"
ap "key first: #{sha256_string2}"
urlsafe_base64_string = Base64.urlsafe_encode64(sha256_string2).gsub('=', '')
end
def get_query_string(path, httpmethod)
timestamp = Time.now.to_i + 3600
signature = format_signature(path, httpmethod, timestamp)
query_string = build_authenticated_uri_query_string(signature, timestamp)
end
# devsandbox.wiu.edu.desire2learnvalence.com
$app_id = 'G9nUpvbZQyiPrk3um2YAkQ'
$app_key = 'ybZu7fm_JKJTFwKEHfoZ7Q'
# admin:mencel6377e943
$user_id = 'sNdpIGdXMBbjpNtXGSBXVO'
$user_key = 'vnRxMIDYeGbd0E9-vNeN42'
$hostname = 'devsandbox.wiu.edu.desire2learnvalence.com'
path = '/d2l/api/lp/1.0/users/whoami'
httpmethod = 'GET'
the_uri = create_authenticated_uri(path, httpmethod)
puts "THE_URI: #{the_uri}"
RestClient.get(the_uri){ |response, request, result, &block|
case response.code
when 200
p 'It worked !'
ap response
when 423
fail SomeCustomExceptionIfYouWant
else
response.return!(request, result, &block)
end
}
exit
ap "KEY: #{app_key}"
# 1) Take the key used to create the signature and produce a set of key-bytes by ASCII-encoding the key.
app_key_asc = app_key.encode('ASCII')
user_key_asc = user_key.encode('ASCII')
ap "ASCII: #{app_key_asc}"
# 2) Take the base-string for the signature and produce a set of base-string-bytes by UTF8-encoding the base-string.
app_key_utf8 = app_key_asc.encode('UTF-8')
user_key_utf8 = user_key_asc.encode('UTF-8')
ap "UTF8: #{app_key_utf8}"
# 3) Produce the HMAC-SHA256 signature hash by using the key-bytes and base-string-bytes as input parameters.
signature = "GET&/d2l/api/lp/1.0/users/whoami&#{Time.now.to_i + 3600}"
app_key_sha256 = HMAC::SHA256.hexdigest(signature, app_key_utf8)
user_key_sha256 = HMAC::SHA256.hexdigest(signature, user_key_utf8)
ap "SHA256: #{app_key_sha256}"
# 4) Take the signature hash and produce a set of signature-bytes by base64url encoding the hash (see RFC 4648; no ‘=’ padding used, ‘+’ and ‘/’ replaced with ‘-‘ and ‘_’ respectively).
app_key_base64 = Base64.urlsafe_encode64(app_key_sha256).gsub('=', '')
user_key_base64 = Base64.urlsafe_encode64(user_key_sha256).gsub('=', '')
ap "BASE64: #{app_key_base64.chomp}"
# 5) Pass these generated signature-bytes in all the query parameters where you’re expected to provide a signature.
unix_timestamp = Time.now.to_i
# api_sample_url = "http://devsandbox.wiu.edu.desire2learnvalence.com/d2l/auth/api/token?x_a=#{app_id}&x_b=#{app_key_base64}&x_target=myrubyprog://something"
api_sample_url = "http://devsandbox.wiu.edu.desire2learnvalence.com/d2l/api/lp/1.0/users/whoami?x_a=#{app_id}&x_b=#{user_id}&x_c=#{app_key_base64}&x_d=#{user_key_base64}&x_t=#{unix_timestamp}"
ap api_sample_url
RestClient.get(api_sample_url){ |response, request, result, &block|
case response.code
when 200
p 'It worked !'
ap response
when 423
fail SomeCustomExceptionIfYouWant
else
response.return!(request, result, &block)
end
}
ap response
exit
API_TOKEN_PATH = '/d2l/auth/api/token'
URL = 'https://westernonline-beta.wiu.edu'
USER_ID = 'Badm7gxucDxEbLzE0YO4e3'
USER_KEY = 'xUIxUIiemgANTr2n-E3zz0'
# https://westernonline-beta.wiu.edu/d2l/auth/api/token?x+target=http://www.wiu.edu&x_a=G9nUpvbZQyiPrk3um2YAkQ&x_b=ybZu7fm_JKJTFwKEHfoZ7Q
response = RestClient.get "https://westernonline-beta.wiu.edu/d2l/auth/api/lp/1.0/users/whoami?x_a=#{APP_ID}&x_b=#{USER_ID}&"
puts response
change hostname
require 'rubygems'
require 'ap' # awesome_print gem
require 'base64'
require 'hmac-sha2' # ruby-hmac gem
require 'rest_client' # rest-client gem
def build_authenticated_uri_query_string(signature, timestamp)
"?x_a=#{$app_id}&x_b=#{$user_id}&x_c=#{get_base64_hash_string($app_key, signature)}&x_d=#{get_base64_hash_string($user_key, signature)}&x_t=#{timestamp}"
end
def create_authenticated_uri(path, httpmethod)
query_string = get_query_string(path, httpmethod)
"http://#{$hostname}#{path}#{query_string}"
end
def format_signature(path, httpmethod, timestamp)
"#{httpmethod.upcase}&#{path.downcase}&#{timestamp}"
end
def get_base64_hash_string(key, signature)
sha256_string = HMAC::SHA256.hexdigest(signature, key)
sha256_string2 = HMAC::SHA256.hexdigest(key, signature)
ap "sig first: #{sha256_string}"
ap "key first: #{sha256_string2}"
urlsafe_base64_string = Base64.urlsafe_encode64(sha256_string2).gsub('=', '')
end
def get_query_string(path, httpmethod)
timestamp = Time.now.to_i + 3600
signature = format_signature(path, httpmethod, timestamp)
query_string = build_authenticated_uri_query_string(signature, timestamp)
end
# devsandbox.wiu.edu.desire2learnvalence.com
$app_id = 'G9nUpvbZQyiPrk3um2YAkQ'
$app_key = 'ybZu7fm_JKJTFwKEHfoZ7Q'
# admin:mencel6377e943
$user_id = 'sNdpIGdXMBbjpNtXGSBXVO'
$user_key = 'vnRxMIDYeGbd0E9-vNeN42'
$hostname = 'wiutest.desire2learn.com'
path = '/d2l/api/lp/1.0/users/whoami'
httpmethod = 'GET'
the_uri = create_authenticated_uri(path, httpmethod)
puts "THE_URI: #{the_uri}"
RestClient.get(the_uri){ |response, request, result, &block|
case response.code
when 200
p 'It worked !'
ap response
when 423
fail SomeCustomExceptionIfYouWant
else
response.return!(request, result, &block)
end
}
exit
ap "KEY: #{app_key}"
# 1) Take the key used to create the signature and produce a set of key-bytes by ASCII-encoding the key.
app_key_asc = app_key.encode('ASCII')
user_key_asc = user_key.encode('ASCII')
ap "ASCII: #{app_key_asc}"
# 2) Take the base-string for the signature and produce a set of base-string-bytes by UTF8-encoding the base-string.
app_key_utf8 = app_key_asc.encode('UTF-8')
user_key_utf8 = user_key_asc.encode('UTF-8')
ap "UTF8: #{app_key_utf8}"
# 3) Produce the HMAC-SHA256 signature hash by using the key-bytes and base-string-bytes as input parameters.
signature = "GET&/d2l/api/lp/1.0/users/whoami&#{Time.now.to_i + 3600}"
app_key_sha256 = HMAC::SHA256.hexdigest(signature, app_key_utf8)
user_key_sha256 = HMAC::SHA256.hexdigest(signature, user_key_utf8)
ap "SHA256: #{app_key_sha256}"
# 4) Take the signature hash and produce a set of signature-bytes by base64url encoding the hash (see RFC 4648; no ‘=’ padding used, ‘+’ and ‘/’ replaced with ‘-‘ and ‘_’ respectively).
app_key_base64 = Base64.urlsafe_encode64(app_key_sha256).gsub('=', '')
user_key_base64 = Base64.urlsafe_encode64(user_key_sha256).gsub('=', '')
ap "BASE64: #{app_key_base64.chomp}"
# 5) Pass these generated signature-bytes in all the query parameters where you’re expected to provide a signature.
unix_timestamp = Time.now.to_i
# api_sample_url = "http://devsandbox.wiu.edu.desire2learnvalence.com/d2l/auth/api/token?x_a=#{app_id}&x_b=#{app_key_base64}&x_target=myrubyprog://something"
api_sample_url = "http://devsandbox.wiu.edu.desire2learnvalence.com/d2l/api/lp/1.0/users/whoami?x_a=#{app_id}&x_b=#{user_id}&x_c=#{app_key_base64}&x_d=#{user_key_base64}&x_t=#{unix_timestamp}"
ap api_sample_url
RestClient.get(api_sample_url){ |response, request, result, &block|
case response.code
when 200
p 'It worked !'
ap response
when 423
fail SomeCustomExceptionIfYouWant
else
response.return!(request, result, &block)
end
}
ap response
exit
API_TOKEN_PATH = '/d2l/auth/api/token'
URL = 'https://wiutest.desire2learn.comu'
USER_ID = 'Badm7gxucDxEbLzE0YO4e3'
USER_KEY = 'xUIxUIiemgANTr2n-E3zz0'
response = RestClient.get "https://wiutest.desire2learn.com/d2l/auth/api/lp/1.0/users/whoami?x_a=#{APP_ID}&x_b=#{USER_ID}&"
puts response
|
# Test.rb
#
# Description: Implements replacive fuzzing by testing a set of URL's with
# attack vectors and determines if a vulnerabilty occured on
# the target.
#
# Author: Peter Mikitsh pam3961
# Author: Akshay Karnawat
require 'mechanize'
require 'net/http'
class Test
# urls: string array of absolute URL paths to test
# vectors: string array of replacive vectors to append to urls
# authAgent: optional 'Mechanize' agent (if authentication used
def self.test(urls, vectors, authAgent, random, slow)
puts "Testing Vectors..."
# create a new agent with timeout attributes
agent = authAgent ? authAgent : Mechanize.new {|a|
a.open_timeout = slow
a.read_timeout = slow
}
# urls.each do |url|
vectors.each do |vector|
Test.replaciveFuzz(urls, vector, agent)
end
# end
end
def self.createAttackURL(url, vector)
return url + vector;
end
def self.replaciveFuzz(url, vector, agent)
begin
puts "Testing #{vector} on #{url}"
agent.get(Test.createAttackURL(url, vector))
rescue Mechanize::ResponseCodeError => e
puts "\t#{e.response_code} Unexcepted response code."
end
end
end
catch slow server responses in test class
# Test.rb
#
# Description: Implements replacive fuzzing by testing a set of URL's with
# attack vectors and determines if a vulnerabilty occured on
# the target.
#
# Author: Peter Mikitsh pam3961
# Author: Akshay Karnawat
require 'mechanize'
require 'net/http'
require 'timeout'
class Test
# urls: string array of absolute URL paths to test
# vectors: string array of replacive vectors to append to urls
# authAgent: optional 'Mechanize' agent (if authentication used
def self.test(urls, vectors, authAgent, random, timeout)
puts "Testing Vectors..."
# create a new agent with timeout attributes
agent = authAgent ? authAgent : Mechanize.new
urls.each do |url|
vectors.each do |vector|
Test.replaciveFuzz(url, vector, agent, timeout)
end
end
end
def self.createAttackURL(url, vector)
return url + vector;
end
def self.replaciveFuzz(url, vector, agent, timeout)
begin
puts "Testing #{vector} on #{url}"
Timeout.timeout(5) { agent.get(Test.createAttackURL(url, vector)) }
rescue Mechanize::ResponseCodeError => e
puts "\t#{e.response_code} Unexcepted response code for url #{url} with vector #{vector}."
rescue Timeout::Error
puts "Timeout error for url #{url} with vector #{vector}."
end
end
end
|
# Test.rb
#
# Description: Implements replacive fuzzing by testing a set of URL's with
# attack vectors and determines if a vulnerabilty occured on
# the target.
#
# Author: Peter Mikitsh pam3961
# Author: Akshay Karnawat
require 'mechanize'
require 'net/http'
require 'timeout'
class Test
# urls: string array of absolute URL paths to test
# vectors: string array of replacive vectors to append to urls
# authAgent: optional 'Mechanize' agent (if authentication used
def self.test(urls, vectors, authAgent, random, timeout)
puts "Testing Vectors..."
# create a new agent with timeout attributes
agent = authAgent ? authAgent : Mechanize.new
urls.each do |url|
vectors.each do |vector|
Test.replaciveFuzz(url, vector, agent, timeout)
end
end
end
def self.createAttackURL(url, vector)
return url + vector;
end
def self.replaciveFuzz(url, vector, agent, timeout)
begin
puts "Testing #{vector} on #{url}"
Timeout.timeout(timeout) { agent.get(Test.createAttackURL(url, vector)) }
rescue Mechanize::ResponseCodeError => e
puts "\t Possible vulnerability identified. #{e.response_code} Unexcepted response code for url #{url} with vector #{vector}."
rescue Timeout::Error
puts "\t Possible vulnerability identified. Timeout error for url #{url} with vector #{vector}."
end
if agent.body.include? vector
puts "\t Possible vulnerability identified. The response body contains the attack vector. Vector: #{vector} Url: #{url}"
end
end
end
changed urls array to url
# Test.rb
#
# Description: Implements replacive fuzzing by testing a set of URL's with
# attack vectors and determines if a vulnerabilty occured on
# the target.
#
# Author: Peter Mikitsh pam3961
# Author: Akshay Karnawat
require 'mechanize'
require 'net/http'
require 'timeout'
class Test
# url: string array of absolute URL paths to test
# vectors: string array of replacive vectors to append to urls
# authAgent: optional 'Mechanize' agent (if authentication used
def self.test(url, vectors, authAgent, random, timeout)
puts "Testing Vectors..."
# create a new agent with timeout attributes
agent = authAgent ? authAgent : Mechanize.new
vectors.each do |vector|
Test.replaciveFuzz(url, vector, agent, timeout)
end
end
def self.createAttackURL(url, vector)
return url + vector;
end
def self.replaciveFuzz(url, vector, agent, timeout)
begin
puts "Testing #{vector} on #{url}"
Timeout.timeout(timeout) { agent.get(Test.createAttackURL(url, vector)) }
rescue Mechanize::ResponseCodeError => e
puts "\t Possible vulnerability identified. #{e.response_code} Unexcepted response code for url #{url} with vector #{vector}."
rescue Timeout::Error
puts "\t Possible vulnerability identified. Timeout error for url #{url} with vector #{vector}."
end
if agent.body.include? vector
puts "\t Possible vulnerability identified. The response body contains the attack vector. Vector: #{vector} Url: #{url}"
end
end
end
|
require 'repf.rb'
#require 'neural_net.rb'
require 'ruby-fann'
def scale(value, from_low, from_high, to_low, to_high)
(value - from_low) * (to_high - to_low) / (from_high - from_low).to_f
end
def mse(actual, ideal)
errors = actual.zip(ideal).map {|a, i| a - i }
( errors.inject(0) {|sum, err| sum += err**2} ) / errors.length.to_f
end
def run_test(neuralnet, inputs, expected_outputs)
watts_err, errsum = 0, 0
outputs = []
inputs.each.with_index do |input, i|
output = neuralnet.run input
outputs << output
watts_err += (to_watts(output[0]) - to_watts(expected_outputs[i][0])).abs
errsum += mse(output, expected_outputs[i])
end
y_mean = expected_outputs.inject(0.0) { |sum, val| sum + val[0] } / expected_outputs.size
y_sum_squares = expected_outputs.map{|val| (val[0] - y_mean)**2 }.reduce(:+)
y_residual_sum_squares = outputs.zip(expected_outputs).map {|out, expected| (expected[0] - out[0])**2 }.reduce(:+)
r_squared = 1.0 - (y_residual_sum_squares / y_sum_squares)
[watts_err / inputs.length.to_f, errsum / inputs.length.to_f, r_squared]
end
def show_examples(neuralnet, x, y)
puts "Actual\tPredict\tError (watts)"
10.times do |i|
output = neuralnet.run x[i]
predicted = to_watts(output[0])
actual = to_watts(y[i][0])
puts "#{actual.round(1)}\t#{predicted.round(1)}\t#{(predicted - actual).abs.round(1)}"
end
end
def to_watts(value)
scale(value, 0, 1, 0, 1500)
end
raw_data = []
DATA_SIZE = 1000
DATA_SIZE.times do
temp = ( 20 * (rand() * 3).to_i)
raw_data << REPF::Solar.new( :capacity => 1000, :insolation => ( 700 + rand() * 600 ), :temperature => temp, :cloud_cover => ( rand() / 3 ).to_i )
end
input_stream = []
output_stream = []
raw_data.each do |d|
inputs = [ scale( d.capacity, 0, 1500, 0, 1 ),
scale( d.insolation, 0, 3000, 0, 1 ),
scale( d.temperature, 0, 120, 0, 1 ),
scale( d.cloud_cover, 0, 100, 0, 1 ) ]
outputs = [ scale( d.instant_power, 0, 1500, 0, 1 ) ]
input_stream << inputs
output_stream << outputs
end
test_size = DATA_SIZE / 2
train_size = raw_data.length - test_size
#x_train = input_stream.slice(0, train_size)
x_train = input_stream.slice(0, raw_data.length)
#y_train = output_stream.slice(0, train_size)
y_train = output_stream.slice(0, raw_data.length)
x_test = input_stream.slice(train_size, test_size)
y_test = output_stream.slice(train_size, test_size)
#neuralnet = NeuralNet.new [4,4,1]
train = RubyFann::TrainData.new(:inputs => x_train, :desired_outputs => y_train)
neuralnet = RubyFann::Standard.new(:num_inputs => 4, :hidden_neurons => [4], :num_outputs => 1)
puts "Testing the untrained network..."
watts_err, avg_mse, r_squared = run_test(neuralnet, x_test, y_test)
puts "Average prediction error: #{watts_err.round(2)} watts (mse: #{(avg_mse * 100).round(2)}%, r-squared: #{r_squared.round(2)})"
puts "\nTraining the network...\n\n"
t1 = Time.now
#result = neuralnet.train(x_train, y_train, error_threshold: 0.0005,
# max_iterations: 1000,
# log_every: 50
# )
neuralnet.train_on_data(train, 20000, 500, 0.00005)
# puts result
#puts "\nDone training the network: #{result[:iterations]} iterations, #{(result[:error] * 100).round(2)}% mse, #{(Time.now - t1).round(1)}s"
puts "\nTesting the trained network..."
watts_err, avg_mse, r_squared = run_test(neuralnet, x_test, y_test)
puts "Average prediction error: #{watts_err.round(2)} watts (mse: #{(avg_mse * 100).round(2)}%, r-squared: #{r_squared.round(2)})"
puts "\nTrained test examples (first 10):"
show_examples(neuralnet, x_test, y_test)
Deleted the large complicated test program
|
conflict test
sdfsdf
|
# coding: utf-8
lib = File.expand_path("../lib", __FILE__)
$LOAD_PATH.unshift(lib) unless $LOAD_PATH.include?(lib)
require "fog/version"
Gem::Specification.new do |s|
s.specification_version = 2 if s.respond_to? :specification_version=
s.required_rubygems_version = Gem::Requirement.new(">= 0") if s.respond_to? :required_rubygems_version=
## Leave these as is they will be modified for you by the rake gemspec task.
## If your rubyforge_project name is different, then edit it and comment out
## the sub! line in the Rakefile
s.name = "fog"
s.version = "2.2.0"
s.date = "2019-06-18"
## Make sure your summary is short. The description may be as long
## as you like.
s.summary = "brings clouds to you"
s.description = "The Ruby cloud services library. Supports all major cloud providers including AWS, Rackspace, Linode, Blue Box, StormOnDemand, and many others. Full support for most AWS services including EC2, S3, CloudWatch, SimpleDB, ELB, and RDS."
## List the primary authors. If there are a bunch of authors, it's probably
## better to set the email to an email list or something. If you don't have
## a custom homepage, consider using your GitHub URL or the like.
s.authors = ["geemus (Wesley Beary)"]
s.email = "geemus@gmail.com"
s.homepage = "https://github.com/fog/fog"
s.license = "MIT"
## This sections is only necessary if you have C extensions.
# s.require_paths << 'ext'
# s.extensions = %w[ext/extconf.rb]
## This gets added to the $LOAD_PATH so that 'lib/NAME.rb' can be required as
## require 'NAME.rb' or'/lib/NAME/file.rb' can be as require 'NAME/file.rb'
s.require_paths = %w[lib]
## If your gem includes any executables, list them here.
s.executables = ["fog"]
## Specify any RDoc options here. You'll want to add your README and
## LICENSE files to the extra_rdoc_files list.
s.rdoc_options = ["--charset=UTF-8"]
s.extra_rdoc_files = %w[README.md]
s.required_ruby_version = '>= 2.5.0'
s.add_dependency("fog-core", "~> 2.1")
s.add_dependency("fog-json")
s.add_dependency("fog-xml", "~> 0.1.1")
s.add_dependency("json", "~> 2.3")
s.add_dependency("ipaddress", "~> 0.5")
# Modular providers (please keep sorted)
s.add_dependency("fog-aliyun",">= 0.1.0")
s.add_dependency("fog-atmos")
s.add_dependency("fog-aws", ">= 0.6.0")
s.add_dependency("fog-brightbox", ">= 0.4", "< 2.0")
s.add_dependency("fog-cloudatcost", "~> 0.4")
s.add_dependency("fog-cloudstack", "~> 0.1.0")
s.add_dependency("fog-digitalocean", ">= 0.3.0")
s.add_dependency("fog-dnsimple", "~> 2.1")
s.add_dependency("fog-dynect", ">= 0.0.2", "< 0.6.0")
s.add_dependency("fog-ecloud", "~> 0.1")
s.add_dependency("fog-google", "~> 1.0")
s.add_dependency("fog-internet-archive")
s.add_dependency("fog-local")
s.add_dependency("fog-openstack")
s.add_dependency("fog-ovirt")
s.add_dependency("fog-powerdns", ">= 0.1.1")
s.add_dependency("fog-profitbricks")
s.add_dependency("fog-rackspace")
s.add_dependency("fog-radosgw", ">= 0.0.2")
s.add_dependency("fog-riakcs")
s.add_dependency("fog-sakuracloud", ">= 0.0.4")
s.add_dependency("fog-serverlove")
s.add_dependency("fog-softlayer")
s.add_dependency("fog-storm_on_demand")
s.add_dependency("fog-terremark")
s.add_dependency("fog-vmfusion")
s.add_dependency("fog-voxel")
s.add_dependency("fog-vsphere", ">= 0.4.0")
s.add_dependency("fog-xenserver")
s.add_development_dependency("docker-api", ">= 1.13.6")
s.add_development_dependency("fission")
s.add_development_dependency("mime-types")
s.add_development_dependency("minitest")
s.add_development_dependency("minitest-stub-const")
s.add_development_dependency("opennebula")
s.add_development_dependency("pry")
s.add_development_dependency("rake")
s.add_development_dependency("rbvmomi")
s.add_development_dependency("rubocop", "1.18.2")
s.add_development_dependency("shindo", "~> 0.3.4")
s.add_development_dependency("simplecov")
s.add_development_dependency("thor")
s.add_development_dependency("yard")
s.add_development_dependency("rspec-core")
s.add_development_dependency("rspec-expectations")
s.add_development_dependency("vcr")
s.add_development_dependency("webmock","~>3.13.0")
s.files = `git ls-files`.split("\n")
s.test_files = `git ls-files -- {spec,tests}/*`.split("\n")
postinstall_message = <<-POSTINST
------------------------------
Thank you for installing fog!
IMPORTANT NOTICE:
If there's a metagem available for your cloud provider, e.g. `fog-aws`,
you should be using it instead of requiring the full fog collection to avoid
unnecessary dependencies.
'fog' should be required explicitly only if the provider you use doesn't yet
have a metagem available.
------------------------------
POSTINST
s.post_install_message = postinstall_message
end
Update rubocop requirement from = 1.18.2 to = 1.18.3
Updates the requirements on [rubocop](https://github.com/rubocop/rubocop) to permit the latest version.
- [Release notes](https://github.com/rubocop/rubocop/releases)
- [Changelog](https://github.com/rubocop/rubocop/blob/master/CHANGELOG.md)
- [Commits](https://github.com/rubocop/rubocop/compare/v1.18.2...v1.18.3)
---
updated-dependencies:
- dependency-name: rubocop
dependency-type: direct:development
...
Signed-off-by: dependabot[bot] <5bdcd3c0d4d24ae3e71b3b452a024c6324c7e4bb@github.com>
# coding: utf-8
lib = File.expand_path("../lib", __FILE__)
$LOAD_PATH.unshift(lib) unless $LOAD_PATH.include?(lib)
require "fog/version"
Gem::Specification.new do |s|
s.specification_version = 2 if s.respond_to? :specification_version=
s.required_rubygems_version = Gem::Requirement.new(">= 0") if s.respond_to? :required_rubygems_version=
## Leave these as is they will be modified for you by the rake gemspec task.
## If your rubyforge_project name is different, then edit it and comment out
## the sub! line in the Rakefile
s.name = "fog"
s.version = "2.2.0"
s.date = "2019-06-18"
## Make sure your summary is short. The description may be as long
## as you like.
s.summary = "brings clouds to you"
s.description = "The Ruby cloud services library. Supports all major cloud providers including AWS, Rackspace, Linode, Blue Box, StormOnDemand, and many others. Full support for most AWS services including EC2, S3, CloudWatch, SimpleDB, ELB, and RDS."
## List the primary authors. If there are a bunch of authors, it's probably
## better to set the email to an email list or something. If you don't have
## a custom homepage, consider using your GitHub URL or the like.
s.authors = ["geemus (Wesley Beary)"]
s.email = "geemus@gmail.com"
s.homepage = "https://github.com/fog/fog"
s.license = "MIT"
## This sections is only necessary if you have C extensions.
# s.require_paths << 'ext'
# s.extensions = %w[ext/extconf.rb]
## This gets added to the $LOAD_PATH so that 'lib/NAME.rb' can be required as
## require 'NAME.rb' or'/lib/NAME/file.rb' can be as require 'NAME/file.rb'
s.require_paths = %w[lib]
## If your gem includes any executables, list them here.
s.executables = ["fog"]
## Specify any RDoc options here. You'll want to add your README and
## LICENSE files to the extra_rdoc_files list.
s.rdoc_options = ["--charset=UTF-8"]
s.extra_rdoc_files = %w[README.md]
s.required_ruby_version = '>= 2.5.0'
s.add_dependency("fog-core", "~> 2.1")
s.add_dependency("fog-json")
s.add_dependency("fog-xml", "~> 0.1.1")
s.add_dependency("json", "~> 2.3")
s.add_dependency("ipaddress", "~> 0.5")
# Modular providers (please keep sorted)
s.add_dependency("fog-aliyun",">= 0.1.0")
s.add_dependency("fog-atmos")
s.add_dependency("fog-aws", ">= 0.6.0")
s.add_dependency("fog-brightbox", ">= 0.4", "< 2.0")
s.add_dependency("fog-cloudatcost", "~> 0.4")
s.add_dependency("fog-cloudstack", "~> 0.1.0")
s.add_dependency("fog-digitalocean", ">= 0.3.0")
s.add_dependency("fog-dnsimple", "~> 2.1")
s.add_dependency("fog-dynect", ">= 0.0.2", "< 0.6.0")
s.add_dependency("fog-ecloud", "~> 0.1")
s.add_dependency("fog-google", "~> 1.0")
s.add_dependency("fog-internet-archive")
s.add_dependency("fog-local")
s.add_dependency("fog-openstack")
s.add_dependency("fog-ovirt")
s.add_dependency("fog-powerdns", ">= 0.1.1")
s.add_dependency("fog-profitbricks")
s.add_dependency("fog-rackspace")
s.add_dependency("fog-radosgw", ">= 0.0.2")
s.add_dependency("fog-riakcs")
s.add_dependency("fog-sakuracloud", ">= 0.0.4")
s.add_dependency("fog-serverlove")
s.add_dependency("fog-softlayer")
s.add_dependency("fog-storm_on_demand")
s.add_dependency("fog-terremark")
s.add_dependency("fog-vmfusion")
s.add_dependency("fog-voxel")
s.add_dependency("fog-vsphere", ">= 0.4.0")
s.add_dependency("fog-xenserver")
s.add_development_dependency("docker-api", ">= 1.13.6")
s.add_development_dependency("fission")
s.add_development_dependency("mime-types")
s.add_development_dependency("minitest")
s.add_development_dependency("minitest-stub-const")
s.add_development_dependency("opennebula")
s.add_development_dependency("pry")
s.add_development_dependency("rake")
s.add_development_dependency("rbvmomi")
s.add_development_dependency("rubocop", "1.18.3")
s.add_development_dependency("shindo", "~> 0.3.4")
s.add_development_dependency("simplecov")
s.add_development_dependency("thor")
s.add_development_dependency("yard")
s.add_development_dependency("rspec-core")
s.add_development_dependency("rspec-expectations")
s.add_development_dependency("vcr")
s.add_development_dependency("webmock","~>3.13.0")
s.files = `git ls-files`.split("\n")
s.test_files = `git ls-files -- {spec,tests}/*`.split("\n")
postinstall_message = <<-POSTINST
------------------------------
Thank you for installing fog!
IMPORTANT NOTICE:
If there's a metagem available for your cloud provider, e.g. `fog-aws`,
you should be using it instead of requiring the full fog collection to avoid
unnecessary dependencies.
'fog' should be required explicitly only if the provider you use doesn't yet
have a metagem available.
------------------------------
POSTINST
s.post_install_message = postinstall_message
end
|
require File.join(File.dirname(__FILE__), "lib/fpm/version")
Gem::Specification.new do |spec|
files = []
dirs = %w{lib bin templates}
dirs.each do |dir|
files += Dir["#{dir}/**/*"]
end
files << "LICENSE"
files << "CONTRIBUTORS"
files << "CHANGELIST"
spec.name = "fpm"
spec.version = FPM::VERSION
spec.summary = "fpm - package building and mangling"
spec.description = "Convert directories, rpms, python eggs, rubygems, and " \
"more to rpms, debs, solaris packages and more. Win at package " \
"management without wasting pointless hours debugging bad rpm specs!"
spec.license = "MIT-like"
# For parsing JSON (required for some Python support, etc)
# http://flori.github.com/json/doc/index.html
spec.add_dependency("json", ">= 1.7.7") # license: Ruby License
# For logging
# https://github.com/jordansissel/ruby-cabin
spec.add_dependency("cabin", ">= 0.6.0") # license: Apache 2
# For backports to older rubies
# https://github.com/marcandre/backports
spec.add_dependency("backports", ">= 2.6.2") # license: MIT
# For reading and writing rpms
spec.add_dependency("arr-pm", "~> 0.0.8") # license: Apache 2
# For http stuff
spec.add_dependency("ftw", "~> 0.0.30") # license: Apache 2
# For command-line flag support
# https://github.com/mdub/clamp/blob/master/README.markdown
spec.add_dependency("clamp", "~> 0.6") # license: MIT
# For starting external processes across various ruby interpreters
spec.add_dependency("childprocess") # license: ???
# For calling functions in dynamic libraries
spec.add_dependency("ffi") # license: GPL3/LGPL3
spec.add_development_dependency("rspec") # license: MIT (according to wikipedia)
spec.add_development_dependency("insist", "~> 0.0.5") # license: ???
spec.add_development_dependency("minitest")
spec.add_development_dependency("pry")
spec.add_development_dependency("stud")
spec.files = files
spec.require_paths << "lib"
spec.bindir = "bin"
spec.executables << "fpm"
spec.author = "Jordan Sissel"
spec.email = "jls@semicomplete.com"
spec.homepage = "https://github.com/jordansissel/fpm"
end
Upgrade arr-pm to 0.0.9; fixes a race condition in RPM file reading
This only affected the tests, best I can tell, but would have only
caused problems for using '-s rpm' (rpm input).
require File.join(File.dirname(__FILE__), "lib/fpm/version")
Gem::Specification.new do |spec|
files = []
dirs = %w{lib bin templates}
dirs.each do |dir|
files += Dir["#{dir}/**/*"]
end
files << "LICENSE"
files << "CONTRIBUTORS"
files << "CHANGELIST"
spec.name = "fpm"
spec.version = FPM::VERSION
spec.summary = "fpm - package building and mangling"
spec.description = "Convert directories, rpms, python eggs, rubygems, and " \
"more to rpms, debs, solaris packages and more. Win at package " \
"management without wasting pointless hours debugging bad rpm specs!"
spec.license = "MIT-like"
# For parsing JSON (required for some Python support, etc)
# http://flori.github.com/json/doc/index.html
spec.add_dependency("json", ">= 1.7.7") # license: Ruby License
# For logging
# https://github.com/jordansissel/ruby-cabin
spec.add_dependency("cabin", ">= 0.6.0") # license: Apache 2
# For backports to older rubies
# https://github.com/marcandre/backports
spec.add_dependency("backports", ">= 2.6.2") # license: MIT
# For reading and writing rpms
spec.add_dependency("arr-pm", "~> 0.0.9") # license: Apache 2
# For http stuff
spec.add_dependency("ftw", "~> 0.0.30") # license: Apache 2
# For command-line flag support
# https://github.com/mdub/clamp/blob/master/README.markdown
spec.add_dependency("clamp", "~> 0.6") # license: MIT
# For starting external processes across various ruby interpreters
spec.add_dependency("childprocess") # license: ???
# For calling functions in dynamic libraries
spec.add_dependency("ffi") # license: GPL3/LGPL3
spec.add_development_dependency("rspec") # license: MIT (according to wikipedia)
spec.add_development_dependency("insist", "~> 0.0.5") # license: ???
spec.add_development_dependency("minitest")
spec.add_development_dependency("pry")
spec.add_development_dependency("stud")
spec.files = files
spec.require_paths << "lib"
spec.bindir = "bin"
spec.executables << "fpm"
spec.author = "Jordan Sissel"
spec.email = "jls@semicomplete.com"
spec.homepage = "https://github.com/jordansissel/fpm"
end
|
require 'formula'
class Freetype < Formula
homepage 'http://www.freetype.org'
url 'http://downloads.sf.net/project/freetype/freetype2/2.5.0/freetype-2.5.0.1.tar.gz'
sha1 '2d539b375688466a8e7dcc4260ab21003faab08c'
option :universal
def install
ENV.universal_binary if build.universal?
system "./configure", "--prefix=#{prefix}"
system "make"
system "make install"
end
test do
system "#{bin}/freetype-config", '--cflags', '--libs', '--ftversion',
'--exec-prefix', '--prefix'
end
end
freetype depends on libpng
require 'formula'
class Freetype < Formula
homepage 'http://www.freetype.org'
url 'http://downloads.sf.net/project/freetype/freetype2/2.5.0/freetype-2.5.0.1.tar.gz'
sha1 '2d539b375688466a8e7dcc4260ab21003faab08c'
option :universal
depends_on 'staticfloat/juliadeps/libpng'
def install
ENV.universal_binary if build.universal?
system "./configure", "--prefix=#{prefix}"
system "make"
system "make install"
end
test do
system "#{bin}/freetype-config", '--cflags', '--libs', '--ftversion',
'--exec-prefix', '--prefix'
end
end
|
## Remember to run 'bundle install' if something in Gemfile has changed!
## To now start the app run 'rackup -p 4567' instead of 'ruby kt.rb' !
require 'rubygems'
require 'bundler'
require 'sinatra/base'
require "sinatra/contrib/all"
require 'sinatra/assetpack'
require 'rack-flash'
require 'rack/flash/test'
require 'filesize'
require 'dalli'
require 'memcachier'
require 'rack/session/dalli'
require 'rack-cache'
require './UserAccount'
#require './helpers/KtApi'
#require './helpers/DetailsHelper'
require './PasswordRecoveryList'
Bundler.require(:default)
class KtApp < Sinatra::Base
set :root, File.dirname(__FILE__)
register Sinatra::Contrib
register Sinatra::AssetPack
require_relative "helpers/KtApi"
require_relative "helpers/DetailsHelper"
require_relative "helpers/SearchHelper"
require_relative "helpers/ApplicationHelper"
require_relative "helpers/SessionHelper"
require_relative "helpers/MailSendHelper"
# Enable flash messages
use Rack::Flash, :sweep => true
helpers do
def flash_types
[:success, :notice, :warning, :error]
end
end
#Some configurations
configure do
# Set up Memcache
dalliOptions={:expires_in =>1800} #30 minuten
set :cache, Dalli::Client.new(nil,dalliOptions)
end
configure :development do
#enable sessions, for 900 seconds (15 minutes)
use Rack::Session::Pool,
:expire_after => 900,
:key => "KtApp",
:secret => "06c6a115a065cfd20cc2c9fcd2c3d7a7d354de3189ee58bce0240abd586db044"
# at Development SQLlite will do fine
DataMapper.setup(:default, "sqlite3://#{Dir.pwd}/development.db")
DataMapper.auto_upgrade!
# Payments
Braintree::Configuration.environment = :sandbox
Braintree::Configuration.merchant_id = "6d3bxmf7cd8g9m7s"
Braintree::Configuration.public_key = "2tdfpxc79jtk4437"
Braintree::Configuration.private_key = "ca0de6ffc93d667297cf6b533981316a"
# Mail Send
Mail.defaults do
delivery_method :smtp, { :address => "smtp.gmail.com",
:port => 587,
:user_name => "vvanchesa@gmail.com",
:password => "bla123_yuhuu",
:authentication => :plain,
:enable_starttls_auto => true }
end
DataMapper.auto_upgrade!
end
#Some configurations
configure :production do
use Rack::Session::Dalli,
:cache => Dalli::Client.new,
:expire_after => 900, # 15 minutes
:key => 'keytech_web', # cookie name (probably change this)
:secret => '06c6a115a065cfd20cc2c9fcd2c3d7a7d354de3189ee58bce0240abd586db044',
:httponly => true, # bad js! No cookies for you!
:compress => true,
:secure => false, # NOTE: if you're storing user authentication information in session set this to true and provide pages via SSL instead of standard HTTP or, to quote nkp, "risk the firesheep!" Seriously, don't fuck around with this one.
:path => '/'
# A Postgres connection:
DataMapper.setup(:default, ENV['DATABASE_URL'] || 'postgres://localhost/mydb')
# TODO: Payments als Production Code einbauen
# Mail Send
Mail.defaults do
delivery_method :smtp, { :address => "smtp.sendgrid.net",
:port => 587,
:user_name => ENV['SENDGRID_USERNAME'],
:password => ENV['SENDGRID_PASSWORD'],
:authentication => :plain,
:enable_starttls_auto => true }
end
DataMapper.auto_upgrade!
end
assets do
serve '/js', from: 'app/js' # Default
serve '/css', from: 'app/css' # Default
serve '/images', from: 'app/images' # Default
js :application, [
#'/js/vendor/custom.modernizr.js',
'/js/popup.js'
# You can also do this: 'js/*.js'
]
js :body, [
#'/js/vendor/jquery.js',
#'/js/foundation.min.js'
# You can also do this: 'js/*.js'
]
css :application, [
#'/css/normalize.css',
#'/css/foundation.css',
'/css/application.css',
'/css/search.css'
]
js_compression :jsmin
css_compression :simple
end
enable :method_override
#include Helpers module
helpers ApplicationHelper
helpers SearchHelper
helpers SessionHelper
helpers Sinatra::KtApiHelper
helpers DetailsHelper
helpers MailSendHelper
# Routes
# These are your Controllers! Can be outsourced to own files but I leave them here for now.
#main page controller
get '/' do
if session[:user]
redirect '/search'
else
#Never logged in, show the normal index / login page
erb :index
end
end
#new User signup page
get '/signup' do
erb :signup
end
# Signup a new user, take POST arguments and try to create a new useraccount
# flash message if something goes wrong
post '/signup' do
@user = UserAccount.new(:email => params[:email],
:password => params[:password], :password_confirmation => params[:password_confirmation],
:keytechUserName =>params[:keytech_username],
:keytechPassword => params[:keytech_password],
:keytechAPIURL => params[:keytech_APIURL])
if @user.save
if UserAccount.hasKeytechAccess(@user)
# OK, Access granted by API
session[:user] = @user.id
redirect '/'
else
flash[:warning] = "User access denied by keytech API."
end
else
flash[:error] = @user.errors.full_messages
redirect '/signup'
end
end
get '/account' do
# Shows an edit page for current account
@user = currentUser
if @user
if params[:action].eql? "cancelPlan"
print "Cancel Plan"
# Cancel current subscription
Braintree::Subscription.cancel(@user.subscriptionID)
@user.subscriptionID = "" # Remove subscriptionID
@user.save
redirect '/account'
return
end
if params[:action].eql? "startPlan"
print "Start Plan"
# Start a new subscription. (Now without any trials)
customer = Braintree::Customer.find(@user.billingID)
if customer
payment_method_token = customer.credit_cards[0].token
result = Braintree::Subscription.create(
:payment_method_token => payment_method_token,
:plan_id => "silver_plan",
:options => {
:start_immediately => true # A recreated plan does not have a trial period
}
)
@user.subscriptionID = result.subscription.id # Add subscriptionID
@user.save
redirect '/account'
else
# Customer with this ID not found - remove from Customer
@user.billingID = 0
@user.save
flash[:error] = "No customer record found. Please try again."
redirect '/account'
end
end
erb :account
else
redirect '/'
end
end
put '/account' do
user = currentUser
if user
if params[:commitKeytechCredentials] == "Save"
user.keytechAPIURL = params[:keytechAPIURL]
user.keytechPassword = params[:keytechPassword]
user.keytechUserName = params[:keytechUserName]
if !user.save
flash[:warning] = user.errors.full_messages
end
end
if params[:commitProfile] == "Save"
# Do nothing!
# Currently not allowed to change email address!
end
if params[:commitPassword] == "Save"
# Check for current Password
if !params[:current_password]
flash[:error] = "Password was empty"
redirect '/account'
end
authUser = UserAccount.authenticate(user.email, params[:current_password])
if authUser
password = params[:password]
password_confirmation = params[:password_confirmation]
if password.empty? && password_confirmation.empty?
flash[:warning] = "New password can not be empty"
redirect '/account'
end
if password.eql? password_confirmation
user.password = password
user.password_confirmation = password_confirmation
if !user.save
flash[:error] = user.errors.full_messages
end
else
flash[:error] = "Password and password confirmation did not match."
end
else
flash[:error] = "Current password is invalid"
end
puts params
end
else
puts "No user found!"
end
# Return to account site
redirect '/account'
end
# Sets a credit card for current logged in user
get '/account/subscription' do
@user = currentUser
if @user
if !@user.subscriptionID.empty?
# A billing customer is already given
# TODO: Eine Subscription kann gesetzt sein, auf 'Aktiv' - Status prüfen
erb :showBillingPlan
else
erb :customerAccount
end
else
redirect'/'
end
end
# For Payment Data
post '/account/subscription' do
result = Braintree::Customer.create(
:first_name => params[:first_name],
:last_name => params[:last_name],
:credit_card => {
:billing_address => {
:postal_code => params[:postal_code]
},
:number => params[:number],
:expiration_month => params[:month],
:expiration_year => params[:year],
:cvv => params[:cvv]
}
)
if result.success?
"<h1>Customer created with name: #{result.customer.first_name} #{result.customer.last_name}</h1>"
currentUser.billingID = result.customer.id
# Start the plan
customer = result.customer
payment_method_token = customer.credit_cards[0].token
result = Braintree::Subscription.create(
:payment_method_token => payment_method_token,
:plan_id => "silver_plan" # This is teh default monthly plan
)
if result.success?
"<h1>Subscription Status #{result.subscription.status}"
else
flash[:error] = result.message
redirect '/create_customer'
end
else
# Something goes wrong
flash[:error] = result.message
redirect '/create_customer'
end
end
#login controller
post '/login' do
user = UserAccount.authenticate(params[:username],params[:passwd])
if user
session[:user] = user.id
redirect '/search'
else
flash[:error] = "Invalid username or password"
redirect '/'
end
end
get "/logout" do
session.destroy
#KtApi.destroy_session
flash[:notice] = "You have logged out."
redirect '/'
end
get '/account/forgotpassword' do
erb :"passwordManagement/forgotpassword"
end
# Send a password recovery link
post '/account/forgotpassword' do
# existiert diese Mail- Adrese ?
if params[:email].empty?
flash[:warning] = "Enter a valid mail address"
redirect '/account/forgotpassword'
return
end
# Get user account by its mail
user = UserAccount.first(:email => params[:email].to_s)
if !user
flash[:warning] = "This email address is unknown. Please enter a valid useraccount identified by it's email"
redirect '/account/forgotpassword'
return
end
# Delete all old password recoveries based in this email
PasswordRecoveryList.all(:email => params[:email]).destroy
# Generate a new password recovery pending entry
newRecovery = PasswordRecoveryList.create(:email=> params[:email] )
# Now send a mail
if newRecovery
sendPasswordRecoveryMail(newRecovery)
flash[:notice] = "A recovery mail was send to #{params[:email]} please check your inbox."
erb :"passwordManagement/recoveryMailSent"
end
end
# Recovers lost password,if recoveryID is still valid in database
get '/account/password/reset/:recoveryID' do
if params[:recoveryID]
recovery = PasswordRecoveryList.first(:recoveryID => params[:recoveryID])
print "Recovery: #{recovery}"
if recovery
if !recovery.isValid?
recovery.destroy
flash[:warning] = "Recovery token has expired"
return erb :"passwordManagement/invalidPasswordRecovery"
end
@user = UserAccount.first(:email => recovery.email.to_s)
if @user
print " User account found!"
# Start a new password, if useraccount matches
erb :"passwordManagement/newPassword"
else
flash[:warning] = "Can not recover a password from a deleted or disabled useraccount."
erb :"passwordManagement/invalidPasswordRecovery"
end
else
flash[:warning] = "Recovery token not found or invalid"
erb :"passwordManagement/invalidPasswordRecovery"
end
else
flash[:warning] = "Invalid page - a recovery token is missing."
erb :"passwordManagement/invalidPasswordRecovery"
end
end
# accepts a new password and assigns it to current user
post '/account/password/reset/' do
recovery = PasswordRecoveryList.first(:recoveryID => params[:recoveryID])
print " Recovery: #{recovery}"
if recovery
user = UserAccount.first(:email => recovery.email.to_s)
if user
# Password check and store it
print " User: #{user}"
password = params[:password]
password_confirmation = params[:password_confirmation]
if password.empty? && password_confirmation.empty?
flash[:warning] = "New password can not be empty"
redirect '/account/password/reset/#{params[:recoveryID]}'
end
if password.eql? password_confirmation
user.password = password
user.password_confirmation = password_confirmation
if !user.save
flash[:error] = user.errors.full_messages
else
# Everything is OK now
print " Password reset: OK!"
recovery.destroy
flash[:notice] = "Your new password was accepted. Login now with you new password."
redirect '/'
end
else
flash[:error] = "Password and password confirmation did not match."
redirect '/account/password/reset/#{params[:recoveryID]}'
end
end
end
end
#Loads a element detail, if present
get '/elementdetails/:elementKey' do
if currentUser
@elementKey = params[:elementKey]
if params[:format]=='json'
elementData = loadElement(@elementKey,'ALL')
return elementData.to_json
end
# OK, viewtype is relevant
@element = loadElement(@elementKey)
@detailsLink = "/elementdetails/#{params[:elementKey]}"
@viewType = params[:viewType]
erb :elementdetails
else
flash[:notice] = sessionInvalidText
redirect '/'
end
end
# redirects to a search page and fill search Data, parameter q is needed
get '/search' do
if currentUser
if currentUser.usesDemoAPI? || currentUser.hasValidSubscription?
@result= findElements(params[:q])
erb :search
else
flash[:warning] = "You need a valid subscription to use a API other than the demo API. Go to the account page and check your current subscription under the 'Billing' area."
erb :search
end
else
flash[:notice] = sessionInvalidText
redirect '/'
end
end
get '/admin' do
if loggedIn? && currentUser.isAdmin?
@users=UserAccount.all
erb :admin
else
flash[:notice] = "You are not logged in."
redirect '/'
end
end
get '/support' do
"<h3> To Be Done </h3>"
end
get '/about' do
"<h3> To Be Done </h3>"
end
get '/features' do
"<h3> To Be Done </h3>"
end
get '/pricing' do
"<h3> To Be Done </h3>"
end
# Redirection for file download
# Image forwarding. Redirect classimages provided by API to another image directly fetched by API
get "/images/classimages/:classKey" do
if currentUser
cache_control :public, mag_age:1800
content_type "image/png"
loadClassImage(params[:classKey])
else
flash[:notice] = sessionInvalidText
redirect '/'
end
end
get "/files/:elementKey/masterfile" do
if currentUser
content_type "application/octet-stream"
loadMasterfile(params[:elementKey])
else
flash[:notice] = sessionInvalidText
redirect '/'
end
end
get "/files/:elementKey/files/:fileID" do
if currentUser
content_type "application/octet-stream"
loadFile(params[:elementKey],params[:fileID])
else
flash[:notice] = sessionInvalidText
redirect '/'
end
end
get "/element/:thumbnailHint/thumbnail" do
if currentUser
content_type "image/png"
loadElementThumbnail(params[:thumbnailHint])
else
flash[:notice] = sessionInvalidText
redirect '/'
end
end
end
# Get database up to date
DataMapper.auto_upgrade!
changed path
## Remember to run 'bundle install' if something in Gemfile has changed!
## To now start the app run 'rackup -p 4567' instead of 'ruby kt.rb' !
require 'rubygems'
require 'bundler'
require 'sinatra/base'
require "sinatra/contrib/all"
require 'sinatra/assetpack'
require 'rack-flash'
require 'rack/flash/test'
require 'filesize'
require 'dalli'
require 'memcachier'
require 'rack/session/dalli'
require 'rack-cache'
require './UserAccount'
#require './helpers/KtApi'
#require './helpers/DetailsHelper'
require './PasswordRecoveryList'
Bundler.require(:default)
class KtApp < Sinatra::Base
set :root, File.dirname(__FILE__)
register Sinatra::Contrib
register Sinatra::AssetPack
require_relative "helpers/KtApi"
require_relative "helpers/DetailsHelper"
require_relative "helpers/SearchHelper"
require_relative "helpers/ApplicationHelper"
require_relative "helpers/SessionHelper"
require_relative "helpers/MailSendHelper"
# Enable flash messages
use Rack::Flash, :sweep => true
helpers do
def flash_types
[:success, :notice, :warning, :error]
end
end
#Some configurations
configure do
# Set up Memcache
dalliOptions={:expires_in =>1800} #30 minuten
set :cache, Dalli::Client.new(nil,dalliOptions)
end
configure :development do
#enable sessions, for 900 seconds (15 minutes)
use Rack::Session::Pool,
:expire_after => 900,
:key => "KtApp",
:secret => "06c6a115a065cfd20cc2c9fcd2c3d7a7d354de3189ee58bce0240abd586db044"
# at Development SQLlite will do fine
DataMapper.setup(:default, "sqlite3://#{Dir.pwd}/development.db")
DataMapper.auto_upgrade!
# Payments
Braintree::Configuration.environment = :sandbox
Braintree::Configuration.merchant_id = "6d3bxmf7cd8g9m7s"
Braintree::Configuration.public_key = "2tdfpxc79jtk4437"
Braintree::Configuration.private_key = "ca0de6ffc93d667297cf6b533981316a"
# Mail Send
Mail.defaults do
delivery_method :smtp, { :address => "smtp.gmail.com",
:port => 587,
:user_name => "vvanchesa@gmail.com",
:password => "bla123_yuhuu",
:authentication => :plain,
:enable_starttls_auto => true }
end
DataMapper.auto_upgrade!
end
#Some configurations
configure :production do
use Rack::Session::Dalli,
:cache => Dalli::Client.new,
:expire_after => 900, # 15 minutes
:key => 'keytech_web', # cookie name (probably change this)
:secret => '06c6a115a065cfd20cc2c9fcd2c3d7a7d354de3189ee58bce0240abd586db044',
:httponly => true, # bad js! No cookies for you!
:compress => true,
:secure => false, # NOTE: if you're storing user authentication information in session set this to true and provide pages via SSL instead of standard HTTP or, to quote nkp, "risk the firesheep!" Seriously, don't fuck around with this one.
:path => '/'
# A Postgres connection:
DataMapper.setup(:default, ENV['DATABASE_URL'] || 'postgres://localhost/mydb')
# TODO: Payments als Production Code einbauen
# Mail Send
Mail.defaults do
delivery_method :smtp, { :address => "smtp.sendgrid.net",
:port => 587,
:user_name => ENV['SENDGRID_USERNAME'],
:password => ENV['SENDGRID_PASSWORD'],
:authentication => :plain,
:enable_starttls_auto => true }
end
DataMapper.auto_upgrade!
end
assets do
serve '/js', from: 'app/js' # Default
serve '/css', from: 'app/css' # Default
serve '/images', from: 'app/images' # Default
js :application, [
#'/js/vendor/custom.modernizr.js',
'/js/popup.js'
# You can also do this: 'js/*.js'
]
js :body, [
#'/js/vendor/jquery.js',
#'/js/foundation.min.js'
# You can also do this: 'js/*.js'
]
css :application, [
#'/css/normalize.css',
#'/css/foundation.css',
'/css/application.css',
'/css/search.css'
]
js_compression :jsmin
css_compression :simple
end
enable :method_override
#include Helpers module
helpers ApplicationHelper
helpers SearchHelper
helpers SessionHelper
helpers Sinatra::KtApiHelper
helpers DetailsHelper
helpers MailSendHelper
# Routes
# These are your Controllers! Can be outsourced to own files but I leave them here for now.
#main page controller
get '/' do
if session[:user]
redirect '/search'
else
#Never logged in, show the normal index / login page
erb :index
end
end
#new User signup page
get '/signup' do
erb :signup
end
# Signup a new user, take POST arguments and try to create a new useraccount
# flash message if something goes wrong
post '/signup' do
@user = UserAccount.new(:email => params[:email],
:password => params[:password], :password_confirmation => params[:password_confirmation],
:keytechUserName =>params[:keytech_username],
:keytechPassword => params[:keytech_password],
:keytechAPIURL => params[:keytech_APIURL])
if @user.save
if UserAccount.hasKeytechAccess(@user)
# OK, Access granted by API
session[:user] = @user.id
redirect '/'
else
flash[:warning] = "User access denied by keytech API."
end
else
flash[:error] = @user.errors.full_messages
redirect '/signup'
end
end
get '/account' do
# Shows an edit page for current account
@user = currentUser
if @user
if params[:action].eql? "cancelPlan"
print "Cancel Plan"
# Cancel current subscription
Braintree::Subscription.cancel(@user.subscriptionID)
@user.subscriptionID = "" # Remove subscriptionID
@user.save
redirect '/account'
return
end
if params[:action].eql? "startPlan"
print "Start Plan"
# Start a new subscription. (Now without any trials)
customer = Braintree::Customer.find(@user.billingID)
if customer
payment_method_token = customer.credit_cards[0].token
result = Braintree::Subscription.create(
:payment_method_token => payment_method_token,
:plan_id => "silver_plan",
:options => {
:start_immediately => true # A recreated plan does not have a trial period
}
)
@user.subscriptionID = result.subscription.id # Add subscriptionID
@user.save
redirect '/account'
else
# Customer with this ID not found - remove from Customer
@user.billingID = 0
@user.save
flash[:error] = "No customer record found. Please try again."
redirect '/account'
end
end
erb :account
else
redirect '/'
end
end
put '/account' do
user = currentUser
if user
if params[:commitKeytechCredentials] == "Save"
user.keytechAPIURL = params[:keytechAPIURL]
user.keytechPassword = params[:keytechPassword]
user.keytechUserName = params[:keytechUserName]
if !user.save
flash[:warning] = user.errors.full_messages
end
end
if params[:commitProfile] == "Save"
# Do nothing!
# Currently not allowed to change email address!
end
if params[:commitPassword] == "Save"
# Check for current Password
if !params[:current_password]
flash[:error] = "Password was empty"
redirect '/account'
end
authUser = UserAccount.authenticate(user.email, params[:current_password])
if authUser
password = params[:password]
password_confirmation = params[:password_confirmation]
if password.empty? && password_confirmation.empty?
flash[:warning] = "New password can not be empty"
redirect '/account'
end
if password.eql? password_confirmation
user.password = password
user.password_confirmation = password_confirmation
if !user.save
flash[:error] = user.errors.full_messages
end
else
flash[:error] = "Password and password confirmation did not match."
end
else
flash[:error] = "Current password is invalid"
end
puts params
end
else
puts "No user found!"
end
# Return to account site
redirect '/account'
end
# Sets a credit card for current logged in user
get '/account/subscription' do
@user = currentUser
if @user
if !@user.subscriptionID.empty?
# A billing customer is already given
# TODO: Eine Subscription kann gesetzt sein, auf 'Aktiv' - Status prüfen
erb :showBillingPlan
else
erb :customerAccount
end
else
redirect'/'
end
end
# For Payment Data
post '/account/subscription' do
result = Braintree::Customer.create(
:first_name => params[:first_name],
:last_name => params[:last_name],
:credit_card => {
:billing_address => {
:postal_code => params[:postal_code]
},
:number => params[:number],
:expiration_month => params[:month],
:expiration_year => params[:year],
:cvv => params[:cvv]
}
)
if result.success?
"<h1>Customer created with name: #{result.customer.first_name} #{result.customer.last_name}</h1>"
currentUser.billingID = result.customer.id
# Start the plan
customer = result.customer
payment_method_token = customer.credit_cards[0].token
result = Braintree::Subscription.create(
:payment_method_token => payment_method_token,
:plan_id => "silver_plan" # This is teh default monthly plan
)
if result.success?
"<h1>Subscription Status #{result.subscription.status}"
else
flash[:error] = result.message
redirect '/create_customer'
end
else
# Something goes wrong
flash[:error] = result.message
redirect '/create_customer'
end
end
#login controller
post '/login' do
user = UserAccount.authenticate(params[:username],params[:passwd])
if user
session[:user] = user.id
redirect '/search'
else
flash[:error] = "Invalid username or password"
redirect '/'
end
end
get "/logout" do
session.destroy
#KtApi.destroy_session
flash[:notice] = "You have logged out."
redirect '/'
end
get '/account/forgotpassword' do
erb :"passwordManagement/forgotpassword"
end
# Send a password recovery link
post '/account/forgotpassword' do
# existiert diese Mail- Adrese ?
if params[:email].empty?
flash[:warning] = "Enter a valid mail address"
redirect '/account/forgotpassword'
return
end
# Get user account by its mail
user = UserAccount.first(:email => params[:email].to_s)
if !user
flash[:warning] = "This email address is unknown. Please enter a valid useraccount identified by it's email"
redirect '/account/forgotpassword'
return
end
# Delete all old password recoveries based in this email
PasswordRecoveryList.all(:email => params[:email]).destroy
# Generate a new password recovery pending entry
newRecovery = PasswordRecoveryList.create(:email=> params[:email] )
# Now send a mail
if newRecovery
sendPasswordRecoveryMail(newRecovery)
flash[:notice] = "A recovery mail was send to #{params[:email]} please check your inbox."
erb :"passwordManagement/recoveryMailSent"
end
end
# Recovers lost password,if recoveryID is still valid in database
get '/account/password/reset/:recoveryID' do
if params[:recoveryID]
recovery = PasswordRecoveryList.first(:recoveryID => params[:recoveryID])
print "Recovery: #{recovery}"
if recovery
if !recovery.isValid?
recovery.destroy
flash[:warning] = "Recovery token has expired"
return erb :"passwordManagement/invalidPasswordRecovery"
end
@user = UserAccount.first(:email => recovery.email.to_s)
if @user
print " User account found!"
# Start a new password, if useraccount matches
erb :"passwordManagement/newPassword"
else
flash[:warning] = "Can not recover a password from a deleted or disabled useraccount."
erb :"passwordManagement/invalidPasswordRecovery"
end
else
flash[:warning] = "Recovery token not found or invalid"
erb :"passwordManagement/invalidPasswordRecovery"
end
else
flash[:warning] = "Invalid page - a recovery token is missing."
erb :"passwordManagement/invalidPasswordRecovery"
end
end
# accepts a new password and assigns it to current user
post '/account/password/reset' do
recovery = PasswordRecoveryList.first(:recoveryID => params[:recoveryID])
print " Recovery: #{recovery}"
if recovery
user = UserAccount.first(:email => recovery.email.to_s)
if user
# Password check and store it
print " User: #{user}"
password = params[:password]
password_confirmation = params[:password_confirmation]
if password.empty? && password_confirmation.empty?
flash[:warning] = "New password can not be empty"
redirect '/account/password/reset/#{params[:recoveryID]}'
end
if password.eql? password_confirmation
user.password = password
user.password_confirmation = password_confirmation
if !user.save
flash[:error] = user.errors.full_messages
else
# Everything is OK now
print " Password reset: OK!"
recovery.destroy
flash[:notice] = "Your new password was accepted. Login now with you new password."
redirect '/'
end
else
flash[:error] = "Password and password confirmation did not match."
redirect '/account/password/reset/#{params[:recoveryID]}'
end
end
end
end
#Loads a element detail, if present
get '/elementdetails/:elementKey' do
if currentUser
@elementKey = params[:elementKey]
if params[:format]=='json'
elementData = loadElement(@elementKey,'ALL')
return elementData.to_json
end
# OK, viewtype is relevant
@element = loadElement(@elementKey)
@detailsLink = "/elementdetails/#{params[:elementKey]}"
@viewType = params[:viewType]
erb :elementdetails
else
flash[:notice] = sessionInvalidText
redirect '/'
end
end
# redirects to a search page and fill search Data, parameter q is needed
get '/search' do
if currentUser
if currentUser.usesDemoAPI? || currentUser.hasValidSubscription?
@result= findElements(params[:q])
erb :search
else
flash[:warning] = "You need a valid subscription to use a API other than the demo API. Go to the account page and check your current subscription under the 'Billing' area."
erb :search
end
else
flash[:notice] = sessionInvalidText
redirect '/'
end
end
get '/admin' do
if loggedIn? && currentUser.isAdmin?
@users=UserAccount.all
erb :admin
else
flash[:notice] = "You are not logged in."
redirect '/'
end
end
get '/support' do
"<h3> To Be Done </h3>"
end
get '/about' do
"<h3> To Be Done </h3>"
end
get '/features' do
"<h3> To Be Done </h3>"
end
get '/pricing' do
"<h3> To Be Done </h3>"
end
# Redirection for file download
# Image forwarding. Redirect classimages provided by API to another image directly fetched by API
get "/images/classimages/:classKey" do
if currentUser
cache_control :public, mag_age:1800
content_type "image/png"
loadClassImage(params[:classKey])
else
flash[:notice] = sessionInvalidText
redirect '/'
end
end
get "/files/:elementKey/masterfile" do
if currentUser
content_type "application/octet-stream"
loadMasterfile(params[:elementKey])
else
flash[:notice] = sessionInvalidText
redirect '/'
end
end
get "/files/:elementKey/files/:fileID" do
if currentUser
content_type "application/octet-stream"
loadFile(params[:elementKey],params[:fileID])
else
flash[:notice] = sessionInvalidText
redirect '/'
end
end
get "/element/:thumbnailHint/thumbnail" do
if currentUser
content_type "image/png"
loadElementThumbnail(params[:thumbnailHint])
else
flash[:notice] = sessionInvalidText
redirect '/'
end
end
end
# Get database up to date
DataMapper.auto_upgrade!
|
require 'fromcvs'
require 'python'
require 'python/mercurial/ui'
require 'python/mercurial/localrepo'
class HGDestRepo
def initialize(hgroot, status)
@status = status
ui = Py.mercurial.ui.ui(Py::KW, :interactive => false)
@hgrepo = Py.mercurial.localrepo.localrepository(ui, hgroot)
flush_tag_cache
@tags = @hgrepo.tags
unless @tags.include? 'HEAD'
tag('HEAD', Py.mercurial.node.nullid)
end
end
def start
@wlock = @hgrepo.wlock
@transaction = @hgrepo.transaction
@commits = 0
end
def flush
return if @commits < 10
@transaction.close
@transaction = @hgrepo.transaction
@commits = 0
end
def has_branch?(branch)
@tags.include? branch
end
def create_branch(branch, parent, vendor_p)
if vendor_p
node = Py.mercurial.node.nullid
else
parent ||= 'HEAD'
node = @tags[parent]
status "creating branch #{branch} from #{parent}, cset #{node.unpack('H12')}"
end
tag(branch, node)
end
def select_branch(branch)
@curbranch = branch || 'HEAD'
end
def remove(file)
begin
File.unlink(@hgrepo.wjoin(file))
rescue Errno::ENOENT
# well, it is gone already
end
end
def update(file, data, mode, uid, gid)
@hgrepo.wwrite(file, data)
mode |= 0666
mode &= ~File.umask
File.chmod(mode, @hgrepo.wjoin(file))
end
def commit(author, date, msg, files)
status "committing set by #{author} at #{date} to #@curbranch"
_commit(author, date, msg, files)
end
def merge(branch, author, date, msg, files)
status "merging cset #{branch.unpack('H12')[0]} by #{author} at #{date} to #@curbranch"
_commit(author, date, msg, files, branch)
end
def finish
@transaction.close
flush_tag_cache
@wlock.release
end
private
def _commit(author, date, msg, files, p2=nil)
p1 = @tags[@curbranch]
@hgrepo.rawcommit(files, msg, author, "#{date.to_i} 0", p1, p2, @wlock)
tag(@curbranch, @hgrepo.changelog.tip)
@commits += 1
end
private
def tag(branch, node)
@tags[branch] = node
@hgrepo.tag(branch, node, nil, true, nil, nil)
node
end
private
def flush_tag_cache
@tags = @hgrepo.tags
tf = @hgrepo.opener('localtags', 'w')
tf.truncate(0)
@tags.each do |branch, node|
tf.write("#{node.unpack('H*')} #{branch}\n")
end
tf.close
@hgrepo.reload # just to be sure
@tags = @hgrepo.tags
end
def status(str)
@status.call(str) if @status
end
end
if $0 == __FILE__
status = lambda do |str|
puts str
end
if ARGV.length != 2
puts "call: tohg <cvsdir> <hgdir>"
exit 1
end
cvsdir, hgdir = ARGV
cvsrepo = Repo.new(cvsdir, status)
hgrepo = HGDestRepo.new(hgdir, status)
cvsrepo.scan
cvsrepo.commit(hgrepo)
end
we have to return the tag return value
require 'fromcvs'
require 'python'
require 'python/mercurial/ui'
require 'python/mercurial/localrepo'
class HGDestRepo
def initialize(hgroot, status)
@status = status
ui = Py.mercurial.ui.ui(Py::KW, :interactive => false)
@hgrepo = Py.mercurial.localrepo.localrepository(ui, hgroot)
flush_tag_cache
@tags = @hgrepo.tags
unless @tags.include? 'HEAD'
tag('HEAD', Py.mercurial.node.nullid)
end
end
def start
@wlock = @hgrepo.wlock
@transaction = @hgrepo.transaction
@commits = 0
end
def flush
return if @commits < 10
@transaction.close
@transaction = @hgrepo.transaction
@commits = 0
end
def has_branch?(branch)
@tags.include? branch
end
def create_branch(branch, parent, vendor_p)
if vendor_p
node = Py.mercurial.node.nullid
else
parent ||= 'HEAD'
node = @tags[parent]
status "creating branch #{branch} from #{parent}, cset #{node.unpack('H12')}"
end
tag(branch, node)
end
def select_branch(branch)
@curbranch = branch || 'HEAD'
end
def remove(file)
begin
File.unlink(@hgrepo.wjoin(file))
rescue Errno::ENOENT
# well, it is gone already
end
end
def update(file, data, mode, uid, gid)
@hgrepo.wwrite(file, data)
mode |= 0666
mode &= ~File.umask
File.chmod(mode, @hgrepo.wjoin(file))
end
def commit(author, date, msg, files)
status "committing set by #{author} at #{date} to #@curbranch"
_commit(author, date, msg, files)
end
def merge(branch, author, date, msg, files)
status "merging cset #{branch.unpack('H12')[0]} by #{author} at #{date} to #@curbranch"
_commit(author, date, msg, files, branch)
end
def finish
@transaction.close
flush_tag_cache
@wlock.release
end
private
def _commit(author, date, msg, files, p2=nil)
p1 = @tags[@curbranch]
@hgrepo.rawcommit(files, msg, author, "#{date.to_i} 0", p1, p2, @wlock)
@commits += 1
tag(@curbranch, @hgrepo.changelog.tip)
end
private
def tag(branch, node)
@tags[branch] = node
@hgrepo.tag(branch, node, nil, true, nil, nil)
node
end
private
def flush_tag_cache
@tags = @hgrepo.tags
tf = @hgrepo.opener('localtags', 'w')
tf.truncate(0)
@tags.each do |branch, node|
tf.write("#{node.unpack('H*')} #{branch}\n")
end
tf.close
@hgrepo.reload # just to be sure
@tags = @hgrepo.tags
end
def status(str)
@status.call(str) if @status
end
end
if $0 == __FILE__
status = lambda do |str|
puts str
end
if ARGV.length != 2
puts "call: tohg <cvsdir> <hgdir>"
exit 1
end
cvsdir, hgdir = ARGV
cvsrepo = Repo.new(cvsdir, status)
hgrepo = HGDestRepo.new(hgdir, status)
cvsrepo.scan
cvsrepo.commit(hgrepo)
end
|
load "#{Rails.root}/lib/ladder.rb"
class HomeController < ApplicationController
def index
dsize = dictionary.size
@start_word = dictionary[rand(dsize)]
@end_word = dictionary[rand(dsize)]
end
def result
start_word = params[:start_word]
end_word = params[:end_word]
word1 = params[:word1]
word2 = params[:word2]
word3 = params[:word3]
word4 = params[:word4]
word5 = params[:word5]
@res = false
if (edit_distance_of_one(start_word, word1))
@res = true
else
@res = false
end
end
end
Reading params
load "#{Rails.root}/lib/ladder.rb"
class HomeController < ApplicationController
def index
dsize = dictionary.size
@start_word = dictionary[rand(dsize)]
@end_word = dictionary[rand(dsize)]
end
def result
start_word = params[:start_word]
end_word = params[:end_word]
word1 = params[:word1]
word2 = params[:word2]
word3 = params[:word3]
word4 = params[:word4]
word5 = params[:word5]
end
end |
require_relative 'spec_helper'
require 'objspace'
load_extension("typed_data")
describe "CApiAllocTypedSpecs (a class with an alloc func defined)" do
it "calls the alloc func" do
@s = CApiAllocTypedSpecs.new
@s.typed_wrapped_data.should == 42 # not defined in initialize
end
it "uses the specified memsize function for ObjectSpace.memsize" do
@s = CApiAllocTypedSpecs.new
ObjectSpace.memsize_of(@s).should > 42
end
end
describe "CApiWrappedTypedStruct" do
before :each do
@s = CApiWrappedTypedStructSpecs.new
end
it "wraps and unwraps data" do
a = @s.typed_wrap_struct(1024)
@s.typed_get_struct(a).should == 1024
end
it "throws an exception for a wrong type" do
a = @s.typed_wrap_struct(1024)
lambda { @s.typed_get_struct_other(a) }.should raise_error(TypeError)
end
it "unwraps data for a parent type" do
a = @s.typed_wrap_struct(1024)
@s.typed_get_struct_parent(a).should == 1024
end
describe "RTYPEDATA" do
it "returns the struct data" do
a = @s.typed_wrap_struct(1024)
@s.typed_get_struct_rdata(a).should == 1024
end
it "can be used to change the wrapped struct" do
a = @s.typed_wrap_struct(1024)
@s.typed_change_struct(a, 100)
@s.typed_get_struct(a).should == 100
end
end
describe "DATA_PTR" do
it "returns the struct data" do
a = @s.typed_wrap_struct(1024)
@s.typed_get_struct_data_ptr(a).should == 1024
end
end
end
Clarify reason for expected memsize in spec.
require_relative 'spec_helper'
require 'objspace'
load_extension("typed_data")
describe "CApiAllocTypedSpecs (a class with an alloc func defined)" do
it "calls the alloc func" do
@s = CApiAllocTypedSpecs.new
@s.typed_wrapped_data.should == 42 # not defined in initialize
end
it "uses the specified memsize function for ObjectSpace.memsize" do
@s = CApiAllocTypedSpecs.new
# The defined memsize function for the type should return 42 as
# the size, and this should be added to the size of the object as
# known by Ruby.
ObjectSpace.memsize_of(@s).should > 42
end
end
describe "CApiWrappedTypedStruct" do
before :each do
@s = CApiWrappedTypedStructSpecs.new
end
it "wraps and unwraps data" do
a = @s.typed_wrap_struct(1024)
@s.typed_get_struct(a).should == 1024
end
it "throws an exception for a wrong type" do
a = @s.typed_wrap_struct(1024)
lambda { @s.typed_get_struct_other(a) }.should raise_error(TypeError)
end
it "unwraps data for a parent type" do
a = @s.typed_wrap_struct(1024)
@s.typed_get_struct_parent(a).should == 1024
end
describe "RTYPEDATA" do
it "returns the struct data" do
a = @s.typed_wrap_struct(1024)
@s.typed_get_struct_rdata(a).should == 1024
end
it "can be used to change the wrapped struct" do
a = @s.typed_wrap_struct(1024)
@s.typed_change_struct(a, 100)
@s.typed_get_struct(a).should == 100
end
end
describe "DATA_PTR" do
it "returns the struct data" do
a = @s.typed_wrap_struct(1024)
@s.typed_get_struct_data_ptr(a).should == 1024
end
end
end
|
# Copyright (c) 2014-2016 Cisco and/or its affiliates.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
require_relative 'ciscotest'
require_relative '../lib/cisco_node_utils/portchannel_global'
# TestX__CLASS_NAME__X - Minitest for X__CLASS_NAME__X node utility class
class TestPortchannelGlobal < CiscoTestCase
@skip_unless_supported = 'portchannel_global'
@@cleaned = false # rubocop:disable Style/ClassVars
DEFAULT_NAME = 'default'
def setup
super
cleanup unless @@cleaned
@@cleaned = true # rubocop:disable Style/ClassVars
end
def teardown
cleanup
super
end
def cleanup
ethernet = node.product_id[/N(3|7|8|9)/] ? '' : 'ethernet'
config_no_warn "no port-channel load-balance #{ethernet}"
end
def n3k_in_n3k_mode?
return unless /N3/ =~ node.product_id
mode = config('show system switch-mode')
# note: an n3k in n9k mode displays: 'system switch-mode n9k'
patterns = ['system switch-mode n3k',
'Switch mode configuration is not not applicable']
mode[Regexp.union(patterns)] ? true : false
end
def create_portchannel_global(name=DEFAULT_NAME)
PortChannelGlobal.new(name)
end
def test_hash_distribution
global = create_portchannel_global
if validate_property_excluded?('portchannel_global', 'hash_distribution')
assert_raises(Cisco::UnsupportedError) do
global.hash_distribution = 'fixed'
end
assert_nil(global.hash_distribution)
else
global.hash_distribution = 'fixed'
assert_equal('fixed', global.hash_distribution)
global.hash_distribution =
global.default_hash_distribution
assert_equal(global.default_hash_distribution,
global.hash_distribution)
end
end
def test_load_defer
global = create_portchannel_global
if validate_property_excluded?('portchannel_global', 'load_defer')
assert_raises(Cisco::UnsupportedError) do
global.load_defer = 1000
end
assert_nil(global.load_defer)
else
global.load_defer = 1000
assert_equal(1000, global.load_defer)
global.load_defer =
global.default_load_defer
assert_equal(global.default_load_defer,
global.load_defer)
end
end
def test_resilient
global = create_portchannel_global
if validate_property_excluded?('portchannel_global', 'resilient')
assert_raises(Cisco::UnsupportedError) { global.resilient = true }
assert_nil(global.resilient)
return
end
# Verify that hardware supports feature. Unfortunately the current cli
# only displays a warning and does not raise an error so we have to
# test for it explicitly.
cmd = 'port-channel load-balance resilient'
skip('Skip test: Feature is not supported on this device') if
config(cmd)[/Resilient Hashing Mode unsupported/]
global = create_portchannel_global
# For n3k the default is different from n9k
if n3k_in_n3k_mode?
global.resilient = false
refute(global.resilient)
global.resilient = global.default_resilient
assert_equal(global.default_resilient, global.resilient)
else
config('no ' + cmd)
global = create_portchannel_global
global.resilient = true
assert(global.resilient)
global.resilient = global.default_resilient
assert_equal(global.default_resilient, global.resilient)
end
end
def test_load_balance_no_rotate
skip('Test not supported on this platform') unless n3k_in_n3k_mode?
global = create_portchannel_global
global.send(:port_channel_load_balance=,
'src-dst', 'ip-only', nil, nil, true, nil, nil)
assert_equal('src-dst',
global.bundle_select)
assert_equal('ip-only',
global.bundle_hash)
assert_equal(true, global.symmetry)
global.send(
:port_channel_load_balance=,
global.default_bundle_select,
global.default_bundle_hash,
nil,
nil,
global.default_symmetry,
nil,
nil)
assert_equal(
global.default_bundle_select,
global.bundle_select)
assert_equal(
global.default_bundle_hash,
global.bundle_hash)
assert_equal(
global.default_symmetry,
global.symmetry)
end
def test_load_balance_sym_concat_rot
# rubocop:disable Style/MultilineOperationIndentation
skip('Test not supported on this platform') if n3k_in_n3k_mode? ||
validate_property_excluded?('portchannel_global', 'symmetry')
# rubocop:enable Style/MultilineOperationIndentation
global = create_portchannel_global
global.send(:port_channel_load_balance=,
'src-dst', 'ip-l4port', nil, nil, true, true, 4)
assert_equal('src-dst',
global.bundle_select)
assert_equal('ip-l4port',
global.bundle_hash)
assert_equal(true, global.symmetry)
assert_equal(true, global.concatenation)
assert_equal(4, global.rotate)
global.send(
:port_channel_load_balance=,
global.default_bundle_select,
global.default_bundle_hash,
nil,
nil,
global.default_symmetry,
global.default_concatenation,
global.default_rotate)
assert_equal(
global.default_bundle_select,
global.bundle_select)
assert_equal(
global.default_bundle_hash,
global.bundle_hash)
assert_equal(
global.default_symmetry,
global.symmetry)
assert_equal(
global.default_concatenation,
global.concatenation)
assert_equal(global.default_rotate,
global.rotate)
end
# assert_hash_poly_crc
# Depending on the chipset, hash_poly may have have a different
# default value within the same platform family (this is done to
# avoid polarization) but there is currently no command available
# to dynamically determine the default state. As a result the
# getter simply hard-codes a default value which means it may
# encounter occasional idempotence issues.
# For testing purposes this becomes a best-effort test; i.e. we expect the
# hash_poly test to pass for all asserts except the one that matches the
# default value for that chipset.
def assert_hash_poly_crc(exp, actual)
assert_equal(exp, actual) if exp == actual
end
def test_load_balance_hash_poly
global = create_portchannel_global
if validate_property_excluded?('portchannel_global', 'hash_poly')
skip('Test not supported on this platform')
return
end
global.send(:port_channel_load_balance=,
'src-dst', 'ip-only', 'CRC10c', nil, nil, nil, nil)
assert_equal('src-dst', global.bundle_select)
assert_equal('ip-only', global.bundle_hash)
assert_hash_poly_crc('CRC10c', global.hash_poly)
global.send(:port_channel_load_balance=,
'dst', 'mac', 'CRC10a', nil, nil, nil, nil)
assert_equal('dst', global.bundle_select)
assert_equal('mac', global.bundle_hash)
assert_hash_poly_crc('CRC10a', global.hash_poly)
global.send(:port_channel_load_balance=,
global.default_bundle_select,
global.default_bundle_hash,
'CRC10b', nil, nil, nil, nil)
assert_equal(global.default_bundle_select, global.bundle_select)
assert_equal(global.default_bundle_hash, global.bundle_hash)
assert_hash_poly_crc('CRC10b', global.hash_poly)
end
def test_load_balance_asym_rot
global = create_portchannel_global
if validate_property_excluded?('portchannel_global', 'asymmetric')
skip('Test not supported on this platform')
return
end
global.send(:port_channel_load_balance=,
'src-dst', 'ip-vlan', nil, true, nil, nil, 4)
assert_equal('src-dst', global.bundle_select)
assert_equal('ip-vlan', global.bundle_hash)
assert_equal(true, global.asymmetric)
assert_equal(4, global.rotate)
global.send(:port_channel_load_balance=,
global.default_bundle_select,
global.default_bundle_hash,
nil, global.default_asymmetric,
nil, nil, global.default_rotate)
assert_equal(global.default_bundle_select, global.bundle_select)
assert_equal(global.default_bundle_hash, global.bundle_hash)
assert_equal(global.default_asymmetric, global.asymmetric)
assert_equal(global.default_rotate, global.rotate)
end
def test_load_balance_no_hash_rot
global = create_portchannel_global
if validate_property_excluded?('portchannel_global', 'rotate')
skip('Test not supported on this platform')
return
end
global.send(:port_channel_load_balance=,
'src-dst', 'ip-vlan', nil, nil, nil, nil, 4)
assert_equal('src-dst', global.bundle_select)
assert_equal('ip-vlan', global.bundle_hash)
assert_equal(4, global.rotate)
global.send(:port_channel_load_balance=,
global.default_bundle_select,
global.default_bundle_hash,
nil, nil,
nil, nil, global.default_rotate)
assert_equal(global.default_bundle_select, global.bundle_select)
assert_equal(global.default_bundle_hash, global.bundle_hash)
assert_equal(global.default_rotate, global.rotate)
end
end
MT: portchannel: n3k_in_n3k bad pattern
# Copyright (c) 2014-2016 Cisco and/or its affiliates.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
require_relative 'ciscotest'
require_relative '../lib/cisco_node_utils/portchannel_global'
# TestX__CLASS_NAME__X - Minitest for X__CLASS_NAME__X node utility class
class TestPortchannelGlobal < CiscoTestCase
@skip_unless_supported = 'portchannel_global'
@@cleaned = false # rubocop:disable Style/ClassVars
DEFAULT_NAME = 'default'
def setup
super
cleanup unless @@cleaned
@@cleaned = true # rubocop:disable Style/ClassVars
end
def teardown
cleanup
super
end
def cleanup
ethernet = node.product_id[/N(3|7|8|9)/] ? '' : 'ethernet'
config_no_warn "no port-channel load-balance #{ethernet}"
end
def n3k_in_n3k_mode?
return unless /N3/ =~ node.product_id
mode = config('show system switch-mode')
# note: an n3k in n9k mode displays: 'system switch-mode n9k'
patterns = ['system switch-mode n3k',
'Switch mode configuration is not applicable']
mode[Regexp.union(patterns)] ? true : false
end
def create_portchannel_global(name=DEFAULT_NAME)
PortChannelGlobal.new(name)
end
def test_hash_distribution
global = create_portchannel_global
if validate_property_excluded?('portchannel_global', 'hash_distribution')
assert_raises(Cisco::UnsupportedError) do
global.hash_distribution = 'fixed'
end
assert_nil(global.hash_distribution)
else
global.hash_distribution = 'fixed'
assert_equal('fixed', global.hash_distribution)
global.hash_distribution =
global.default_hash_distribution
assert_equal(global.default_hash_distribution,
global.hash_distribution)
end
end
def test_load_defer
global = create_portchannel_global
if validate_property_excluded?('portchannel_global', 'load_defer')
assert_raises(Cisco::UnsupportedError) do
global.load_defer = 1000
end
assert_nil(global.load_defer)
else
global.load_defer = 1000
assert_equal(1000, global.load_defer)
global.load_defer =
global.default_load_defer
assert_equal(global.default_load_defer,
global.load_defer)
end
end
def test_resilient
global = create_portchannel_global
if validate_property_excluded?('portchannel_global', 'resilient')
assert_raises(Cisco::UnsupportedError) { global.resilient = true }
assert_nil(global.resilient)
return
end
# Verify that hardware supports feature. Unfortunately the current cli
# only displays a warning and does not raise an error so we have to
# test for it explicitly.
cmd = 'port-channel load-balance resilient'
skip('Skip test: Feature is not supported on this device') if
config(cmd)[/Resilient Hashing Mode unsupported/]
global = create_portchannel_global
# For n3k the default is different from n9k
if n3k_in_n3k_mode?
global.resilient = false
refute(global.resilient)
global.resilient = global.default_resilient
assert_equal(global.default_resilient, global.resilient)
else
config('no ' + cmd)
global = create_portchannel_global
global.resilient = true
assert(global.resilient)
global.resilient = global.default_resilient
assert_equal(global.default_resilient, global.resilient)
end
end
def test_load_balance_no_rotate
skip('Test not supported on this platform') unless n3k_in_n3k_mode?
global = create_portchannel_global
global.send(:port_channel_load_balance=,
'src-dst', 'ip-only', nil, nil, true, nil, nil)
assert_equal('src-dst',
global.bundle_select)
assert_equal('ip-only',
global.bundle_hash)
assert_equal(true, global.symmetry)
global.send(
:port_channel_load_balance=,
global.default_bundle_select,
global.default_bundle_hash,
nil,
nil,
global.default_symmetry,
nil,
nil)
assert_equal(
global.default_bundle_select,
global.bundle_select)
assert_equal(
global.default_bundle_hash,
global.bundle_hash)
assert_equal(
global.default_symmetry,
global.symmetry)
end
def test_load_balance_sym_concat_rot
# rubocop:disable Style/MultilineOperationIndentation
skip('Test not supported on this platform') if n3k_in_n3k_mode? ||
validate_property_excluded?('portchannel_global', 'symmetry')
# rubocop:enable Style/MultilineOperationIndentation
global = create_portchannel_global
global.send(:port_channel_load_balance=,
'src-dst', 'ip-l4port', nil, nil, true, true, 4)
assert_equal('src-dst',
global.bundle_select)
assert_equal('ip-l4port',
global.bundle_hash)
assert_equal(true, global.symmetry)
assert_equal(true, global.concatenation)
assert_equal(4, global.rotate)
global.send(
:port_channel_load_balance=,
global.default_bundle_select,
global.default_bundle_hash,
nil,
nil,
global.default_symmetry,
global.default_concatenation,
global.default_rotate)
assert_equal(
global.default_bundle_select,
global.bundle_select)
assert_equal(
global.default_bundle_hash,
global.bundle_hash)
assert_equal(
global.default_symmetry,
global.symmetry)
assert_equal(
global.default_concatenation,
global.concatenation)
assert_equal(global.default_rotate,
global.rotate)
end
# assert_hash_poly_crc
# Depending on the chipset, hash_poly may have have a different
# default value within the same platform family (this is done to
# avoid polarization) but there is currently no command available
# to dynamically determine the default state. As a result the
# getter simply hard-codes a default value which means it may
# encounter occasional idempotence issues.
# For testing purposes this becomes a best-effort test; i.e. we expect the
# hash_poly test to pass for all asserts except the one that matches the
# default value for that chipset.
def assert_hash_poly_crc(exp, actual)
assert_equal(exp, actual) if exp == actual
end
def test_load_balance_hash_poly
global = create_portchannel_global
if validate_property_excluded?('portchannel_global', 'hash_poly')
skip('Test not supported on this platform')
return
end
global.send(:port_channel_load_balance=,
'src-dst', 'ip-only', 'CRC10c', nil, nil, nil, nil)
assert_equal('src-dst', global.bundle_select)
assert_equal('ip-only', global.bundle_hash)
assert_hash_poly_crc('CRC10c', global.hash_poly)
global.send(:port_channel_load_balance=,
'dst', 'mac', 'CRC10a', nil, nil, nil, nil)
assert_equal('dst', global.bundle_select)
assert_equal('mac', global.bundle_hash)
assert_hash_poly_crc('CRC10a', global.hash_poly)
global.send(:port_channel_load_balance=,
global.default_bundle_select,
global.default_bundle_hash,
'CRC10b', nil, nil, nil, nil)
assert_equal(global.default_bundle_select, global.bundle_select)
assert_equal(global.default_bundle_hash, global.bundle_hash)
assert_hash_poly_crc('CRC10b', global.hash_poly)
end
def test_load_balance_asym_rot
global = create_portchannel_global
if validate_property_excluded?('portchannel_global', 'asymmetric')
skip('Test not supported on this platform')
return
end
global.send(:port_channel_load_balance=,
'src-dst', 'ip-vlan', nil, true, nil, nil, 4)
assert_equal('src-dst', global.bundle_select)
assert_equal('ip-vlan', global.bundle_hash)
assert_equal(true, global.asymmetric)
assert_equal(4, global.rotate)
global.send(:port_channel_load_balance=,
global.default_bundle_select,
global.default_bundle_hash,
nil, global.default_asymmetric,
nil, nil, global.default_rotate)
assert_equal(global.default_bundle_select, global.bundle_select)
assert_equal(global.default_bundle_hash, global.bundle_hash)
assert_equal(global.default_asymmetric, global.asymmetric)
assert_equal(global.default_rotate, global.rotate)
end
def test_load_balance_no_hash_rot
global = create_portchannel_global
if validate_property_excluded?('portchannel_global', 'rotate')
skip('Test not supported on this platform')
return
end
global.send(:port_channel_load_balance=,
'src-dst', 'ip-vlan', nil, nil, nil, nil, 4)
assert_equal('src-dst', global.bundle_select)
assert_equal('ip-vlan', global.bundle_hash)
assert_equal(4, global.rotate)
global.send(:port_channel_load_balance=,
global.default_bundle_select,
global.default_bundle_hash,
nil, nil,
nil, nil, global.default_rotate)
assert_equal(global.default_bundle_select, global.bundle_select)
assert_equal(global.default_bundle_hash, global.bundle_hash)
assert_equal(global.default_rotate, global.rotate)
end
end
|
lib = File.expand_path("../lib", __FILE__)
$:.unshift lib unless $:.include? lib
require "gas/version"
Gem::Specification.new do |s|
s.name = "gas"
s.version = Gas::VERSION
s.authors = "Fredrik Wallgren"
s.email = "fredrik.wallgren@gmail.com"
s.homepage = "https://github.com/walle/gas"
s.summary = "Manage your git author accounts"
s.description = "Gas is a utility to keep track of your git authors. Add them to gas and switch at any time. Great if you use one author at work and one at home for instance."
s.rubyforge_project = s.name
s.rdoc_options = ["--charset=UTF-8"]
s.extra_rdoc_files = %w[README.textile LICENSE]
s.add_dependency 'thor', '~> 0.14.6'
s.add_development_dependency 'rake'
s.add_development_dependency 'bundler'
s.add_development_dependency 'rspec'
s.files = Dir.glob("{bin,lib,spec,config}/**/*") + ['LICENSE', 'README.textile']
s.executables = ['gas']
s.require_path = ['lib']
end
Add rr as dependency
lib = File.expand_path("../lib", __FILE__)
$:.unshift lib unless $:.include? lib
require "gas/version"
Gem::Specification.new do |s|
s.name = "gas"
s.version = Gas::VERSION
s.authors = "Fredrik Wallgren"
s.email = "fredrik.wallgren@gmail.com"
s.homepage = "https://github.com/walle/gas"
s.summary = "Manage your git author accounts"
s.description = "Gas is a utility to keep track of your git authors. Add them to gas and switch at any time. Great if you use one author at work and one at home for instance."
s.rubyforge_project = s.name
s.rdoc_options = ["--charset=UTF-8"]
s.extra_rdoc_files = %w[README.textile LICENSE]
s.add_dependency 'thor', '~> 0.14.6'
s.add_development_dependency 'rake'
s.add_development_dependency 'bundler'
s.add_development_dependency 'rspec'
s.add_development_dependency 'rr'
s.files = Dir.glob("{bin,lib,spec,config}/**/*") + ['LICENSE', 'README.textile']
s.executables = ['gas']
s.require_path = ['lib']
end
|
require 'open-uri'
require 'nokogiri'
require 'dotenv/load'
require 'date'
require 'time'
require 'sequel'
require 'uri'
$fuel_type = {
'regular' => 'A',
'midgrade' => 'B',
'premium' => 'C',
'diesel' => 'D'
}
$locations = ['Atherton',
'Belmont',
'Broadmoor',
'Burbank',
'Burlingame',
'Campbell',
'Colma',
'Cupertino',
'Daly City',
'East Palo Alto',
'Foster City',
'Gilroy',
'Half Moon Bay',
'Hillsborough',
'Lexington Hills',
'Los Altos',
'Los Gatos',
'Menlo Park',
'Millbrae',
'Montara',
'Monte Sereno',
'Morgan Hill',
'Moss Beach',
'Mountain View',
'Pacifica',
'Palo Alto',
'Portola Valley',
'Redwood City',
'San Bruno',
'San Carlos',
'San Mateo',
'Santa Clara',
'Saratoga',
'South San Francisco',
'Sunnyvale',
'Woodside']
DB = Sequel.connect(ENV['DATABASE_URL'])
def parseStation(name, location, station_id)
puts "scraping #{name} station in #{location} with id #{station_id}"
location = location.gsub('Redwood City', 'Redwood_City')
name = name.gsub(/ & /, '_-and-_')
url = URI.escape("http://www.sanfrangasprices.com/#{name}_Gas_Stations/#{location}/#{station_id}/index.aspx")
page = Nokogiri::HTML(open(url))
info = page.xpath('//*[@id="spa_cont"]/div[1]/dl')
data = Hash.new('')
data[:name] = info.css('dt').text.strip
address, phone = info.css('dd').text.split(/phone:/i)
data[:address] = address.strip
if phone.nil?
data[:phone] = ''
else
data[:phone] = phone.strip
end
puts data
begin
DB[:stations].insert(:station_id => station_id,
:location => location,
:name => data[:name],
:address => data[:address],
:phone => data[:phone])
rescue => e
puts e
# nothing
end
end
def parseLocation(location, fuel)
puts "scraping prices in #{location} for #{fuel} fuel"
type = $fuel_type[fuel]
url = URI.escape("http://www.sanfrangasprices.com/GasPriceSearch.aspx?fuel=#{type}&typ=adv&srch=1&state=CA&area=#{location}&site=SanFran,SanJose,California&tme_limit=4")
page = Nokogiri::HTML(open(url))
rows = page.xpath('//*[@id="pp_table"]/table/tbody/tr')
collected = Time.now
rows.each do |row|
if row.css('.address').css('a').first['href'].match(/redirect/i)
puts "skipping station with a redirect"
next
end
if row.css('.address').css('a').first['href'].match(/FUEL/)
puts "skipping FUEL 24:7 station with a bad URL"
next
end
data = Hash.new('')
p_price = row.css('.p_price')
data[:price] = Float(p_price.text)
data[:station_id] = Integer(p_price[0]['id'].split('_').last)
address = row.css('.address')
data[:name] = address.css('a').text.strip
data[:address] = address.css('dd').text.strip
address = row.css('.address')
data[:name] = address.css('a').text.strip
data[:address] = address.css('dd').text.strip
data[:user] = row.css('.mem').text.strip
data[:reported] = DateTime.parse(row.css('.tm')[0]['title']).to_time
puts data
noStation = false
begin
DB.transaction do
if (DB[:stations].where(:station_id => data[:station_id]).count < 1)
noStation = true
# rate limiting
sleep(0.5)
parseStation(data[:name], location, data[:station_id])
end
DB[:prices].insert(:station_id => data[:station_id],
:collected => collected,
:reported => data[:reported],
:type => fuel, # real name not A/B/C/D
:price => data[:price],
:user => data[:user])
end
rescue => e
puts e
if noStation
retry
end
end
end
end
$locations.each do |loc|
$fuel_type.keys.each do |fuel|
parseLocation(loc, fuel)
# rate limiting
sleep(0.5)
end
end
scrape lat/long, stop infinite retries
require 'open-uri'
require 'nokogiri'
require 'dotenv/load'
require 'date'
require 'time'
require 'sequel'
require 'uri'
$fuel_type = {
'regular' => 'A',
'midgrade' => 'B',
'premium' => 'C',
'diesel' => 'D'
}
$locations = ['Atherton',
'Belmont',
'Broadmoor',
'Burbank',
'Burlingame',
'Campbell',
'Colma',
'Cupertino',
'Daly City',
'East Palo Alto',
'Foster City',
'Gilroy',
'Half Moon Bay',
'Hillsborough',
'Lexington Hills',
'Los Altos',
'Los Gatos',
'Menlo Park',
'Millbrae',
'Montara',
'Monte Sereno',
'Morgan Hill',
'Moss Beach',
'Mountain View',
'Pacifica',
'Palo Alto',
'Portola Valley',
'Redwood City',
'San Bruno',
'San Carlos',
'San Mateo',
'Santa Clara',
'Saratoga',
'South San Francisco',
'Sunnyvale',
'Woodside']
DB = Sequel.connect(ENV['DATABASE_URL'])
def parseStation(name, location, station_id)
puts "scraping #{name} station in #{location} with id #{station_id}"
location = location.gsub('Redwood City', 'Redwood_City')
name = name.gsub(/ & /, '_-and-_')
url = URI.escape("http://www.sanfrangasprices.com/#{name}_Gas_Stations/#{location}/#{station_id}/index.aspx")
page = Nokogiri::HTML(open(url))
info = page.xpath('//*[@id="spa_cont"]/div[1]/dl')
data = Hash.new('')
data[:name] = info.css('dt').text.strip
address, phone = info.css('dd').text.split(/phone:/i)
data[:address] = address.strip
if phone.nil?
data[:phone] = ''
else
data[:phone] = phone.strip
end
mapLink = page.xpath('//*[@id="spa_cont"]/div[1]/div[1]/a')[0]['href']
data[:lat] = Float(mapLink.match(/lat=(-?\d+.\d+)/).captures[0])
data[:long] = Float(mapLink.match(/long=(-?\d+.\d+)/).captures[0])
puts data
begin
DB[:stations].insert(:station_id => station_id,
:location => location,
:name => data[:name],
:address => data[:address],
:phone => data[:phone])
# TODO put lat, long into db
rescue => e
puts e
# nothing
end
end
def parseLocation(location, fuel)
puts "scraping prices in #{location} for #{fuel} fuel"
type = $fuel_type[fuel]
url = URI.escape("http://www.sanfrangasprices.com/GasPriceSearch.aspx?fuel=#{type}&typ=adv&srch=1&state=CA&area=#{location}&site=SanFran,SanJose,California&tme_limit=4")
page = Nokogiri::HTML(open(url))
rows = page.xpath('//*[@id="pp_table"]/table/tbody/tr')
collected = Time.now
rows.each do |row|
if row.css('.address').css('a').first['href'].match(/redirect/i)
puts "skipping station with a redirect"
next
end
if row.css('.address').css('a').first['href'].match(/FUEL/)
puts "skipping FUEL 24:7 station with a bad URL"
next
end
data = Hash.new('')
p_price = row.css('.p_price')
data[:price] = Float(p_price.text)
data[:station_id] = Integer(p_price[0]['id'].split('_').last)
address = row.css('.address')
data[:name] = address.css('a').text.strip
data[:address] = address.css('dd').text.strip
address = row.css('.address')
data[:name] = address.css('a').text.strip
data[:address] = address.css('dd').text.strip
data[:user] = row.css('.mem').text.strip
data[:reported] = DateTime.parse(row.css('.tm')[0]['title']).to_time
puts data
noStation = false
tries = 0
begin
DB.transaction do
if (DB[:stations].where(:station_id => data[:station_id]).count < 1)
noStation = true
tries += 1
# rate limiting
sleep(0.5)
parseStation(data[:name], location, data[:station_id])
end
DB[:prices].insert(:station_id => data[:station_id],
:collected => collected,
:reported => data[:reported],
:type => fuel, # real name not A/B/C/D
:price => data[:price],
:user => data[:user])
end
rescue => e
puts e
if tries > 0
next
elsif noStation
retry
end
end
end
end
$locations.each do |loc|
$fuel_type.keys.each do |fuel|
parseLocation(loc, fuel)
# rate limiting
sleep(0.5)
end
end |
#!/usr/bin/env ruby
def update_light(value, action)
case action
when 'turn on' then true
when 'turn off' then false
when 'toggle' then !value
end
end
def solve(data)
lights = []
(0..999).each do |i|
lights[i] = [false] * 1000
end
data.each_line do |line|
action, ux, uy, lx, ly = /(.*) (\d+),(\d+) through (\d+),(\d+)/.match(
line
)[1..5]
(ux.to_i..lx.to_i).to_a.product((uy.to_i..ly.to_i).to_a) do |x, y|
lights[x][y] = update_light(lights[x][y], action)
end
end
lights.flatten.count(true)
end
def solve2(data)
lights = []
(0..999).each do |i|
lights[i] = [0] * 1000
end
data.each_line do |line|
ux, uy, lx, ly = line.scan(/\d+/).map(&:to_i)
(ux..lx).each do |x|
(uy..ly).each do |y|
if line.start_with?('turn on')
lights[x][y] += 1
elsif line.start_with?('turn off')
lights[x][y] = [lights[x][y] - 1, 0].max
elsif line.start_with?('toggle')
lights[x][y] += 2
end
end
end
end
lights.flatten.sum
end
if $PROGRAM_NAME == __FILE__
data = IO.read('day6.input').chomp
puts "#{solve(data)} lights are lit."
puts "Total light brightness is #{solve2(data)}"
end
2015 day 6 in Ruby, not using named matches
#!/usr/bin/env ruby
def update_light(value, action)
case action
when 'turn on' then true
when 'turn off' then false
when 'toggle' then !value
end
end
def solve(data)
lights = []
(0..999).each do |i|
lights[i] = [false] * 1000
end
data.each_line do |line|
# Tried switching to
# m = /(?<action>.*) (?<ux>\d+),(?<uy>\d+) through (?<lx>\d+),(?<ly>\d+)/.match(line)
# And accesing via `m[:action]`
# But it ran a lot slower; ~3.5 seconds vs ~5.6 seconds
action, ux, uy, lx, ly = /(.*) (\d+),(\d+) through (\d+),(\d+)/.match(
line
)[1..5]
(ux.to_i..lx.to_i).to_a.product((uy.to_i..ly.to_i).to_a) do |x, y|
lights[x][y] = update_light(lights[x][y], action)
end
end
lights.flatten.count(true)
end
def solve2(data)
lights = []
(0..999).each do |i|
lights[i] = [0] * 1000
end
data.each_line do |line|
ux, uy, lx, ly = line.scan(/\d+/).map(&:to_i)
(ux..lx).each do |x|
(uy..ly).each do |y|
if line.start_with?('turn on')
lights[x][y] += 1
elsif line.start_with?('turn off')
lights[x][y] = [lights[x][y] - 1, 0].max
elsif line.start_with?('toggle')
lights[x][y] += 2
end
end
end
end
lights.flatten.sum
end
if $PROGRAM_NAME == __FILE__
data = IO.read('day6.input').chomp
puts "#{solve(data)} lights are lit."
puts "Total light brightness is #{solve2(data)}"
end
|
# coding: utf-8
lib = File.expand_path('../lib', __FILE__)
$LOAD_PATH.unshift(lib) unless $LOAD_PATH.include?(lib)
require 'electric_sheep/version'
Gem::Specification.new do |spec|
spec.name = "electric_sheep"
spec.version = ElectricSheep::VERSION
spec.authors = ["Benoit Anselme", "Patrice Izzo", "Jef Mathiot", "Fabrice Nourisson",
"Eric Hartmann", "Benjamin Severac"]
spec.email = ["foss@servebox.com"]
spec.description = %q{A backup system for Application Developers.}
spec.summary = %q{A backup system for Application Developers and DevOps.}
spec.homepage = "https://github.com/servebox/electric_sheep"
spec.license = "MIT"
spec.files = `git ls-files`.split($/)
spec.files.reject! { |fn| fn =~ /^acceptance\// || fn =~ /^build\// }
spec.executables = spec.files.grep(%r{^bin/}) { |f| File.basename(f) }
spec.test_files = spec.files.grep(%r{^(test|spec|features)/})
spec.require_paths = ["lib"]
spec.add_development_dependency 'bundler', '>= 1.3.0', '< 2.0'
spec.add_development_dependency 'rake', '~>10.3', '>= 10.3.2'
spec.add_development_dependency 'mocha', '~> 1.1', '>= 1.1.0'
spec.add_development_dependency 'coveralls', '~> 0.7', '>= 0.7.1'
spec.add_development_dependency 'minitest', '~> 5.4', '>= 5.4.1'
spec.add_development_dependency 'minitest-implicit-subject', '~> 1.4', '>= 1.4.0'
spec.add_development_dependency 'rb-readline', '~> 0.5', '>= 0.5.0'
spec.add_development_dependency 'guard-minitest', '~> 2.3', '>= 2.3.2'
spec.add_development_dependency 'timecop', '~> 0.7', '>= 0.7.1'
spec.add_development_dependency 'aruba', '~> 0.6', '>= 0.6.1'
spec.add_dependency 'thor', '~> 0.19', '>= 0.19.1'
spec.add_dependency 'colorize', '~> 0.7', '>= 0.7.3'
spec.add_dependency 'activesupport', '~> 4.1', '>= 4.1.0'
spec.add_dependency 'session', '~> 3.2', '>= 3.2.0'
spec.add_dependency 'net-ssh', '~> 2.9', '>= 2.9.0'
spec.add_dependency 'net-scp', '~> 1.2', '>= 1.2.1'
spec.add_dependency 'fog', '~> 1.23', '>= 1.23.0'
spec.add_dependency 'lumberjack', '~> 1.0', '>= 1.0.9'
spec.add_dependency 'mail', '~> 2.6', '>= 2.6.3'
spec.add_dependency 'premailer', '~> 1.8', '>= 1.8.2'
spec.add_dependency 'posix-spawn', '~> 0.3', '>= 0.3.9'
end
Add Guard dependency
# coding: utf-8
lib = File.expand_path('../lib', __FILE__)
$LOAD_PATH.unshift(lib) unless $LOAD_PATH.include?(lib)
require 'electric_sheep/version'
Gem::Specification.new do |spec|
spec.name = "electric_sheep"
spec.version = ElectricSheep::VERSION
spec.authors = ["Benoit Anselme", "Patrice Izzo", "Jef Mathiot", "Fabrice Nourisson",
"Eric Hartmann", "Benjamin Severac"]
spec.email = ["foss@servebox.com"]
spec.description = %q{A backup system for Application Developers.}
spec.summary = %q{A backup system for Application Developers and DevOps.}
spec.homepage = "https://github.com/servebox/electric_sheep"
spec.license = "MIT"
spec.files = `git ls-files`.split($/)
spec.files.reject! { |fn| fn =~ /^acceptance\// || fn =~ /^build\// }
spec.executables = spec.files.grep(%r{^bin/}) { |f| File.basename(f) }
spec.test_files = spec.files.grep(%r{^(test|spec|features)/})
spec.require_paths = ["lib"]
spec.add_development_dependency 'bundler', '>= 1.3.0', '< 2.0'
spec.add_development_dependency 'rake', '~>10.3', '>= 10.3.2'
spec.add_development_dependency 'mocha', '~> 1.1', '>= 1.1.0'
spec.add_development_dependency 'coveralls', '~> 0.7', '>= 0.7.1'
spec.add_development_dependency 'minitest', '~> 5.4', '>= 5.4.1'
spec.add_development_dependency 'minitest-implicit-subject', '~> 1.4', '>= 1.4.0'
spec.add_development_dependency 'rb-readline', '~> 0.5', '>= 0.5.0'
spec.add_development_dependency 'guard', '~> 2.11', '>= 2.11.1'
spec.add_development_dependency 'guard-minitest', '~> 2.3', '>= 2.3.2'
spec.add_development_dependency 'timecop', '~> 0.7', '>= 0.7.1'
spec.add_development_dependency 'aruba', '~> 0.6', '>= 0.6.1'
spec.add_dependency 'thor', '~> 0.19', '>= 0.19.1'
spec.add_dependency 'colorize', '~> 0.7', '>= 0.7.3'
spec.add_dependency 'activesupport', '~> 4.1', '>= 4.1.0'
spec.add_dependency 'session', '~> 3.2', '>= 3.2.0'
spec.add_dependency 'net-ssh', '~> 2.9', '>= 2.9.0'
spec.add_dependency 'net-scp', '~> 1.2', '>= 1.2.1'
spec.add_dependency 'fog', '~> 1.23', '>= 1.23.0'
spec.add_dependency 'lumberjack', '~> 1.0', '>= 1.0.9'
spec.add_dependency 'mail', '~> 2.6', '>= 2.6.3'
spec.add_dependency 'premailer', '~> 1.8', '>= 1.8.2'
spec.add_dependency 'posix-spawn', '~> 0.3', '>= 0.3.9'
end
|
framework 'AppKit'
framework 'Foundation'
framework 'Cocoa'
framework 'QuartzCore'
require 'yaml'
CONFIG_FILENAME = 'gitifier.yml'
class Repo
def initialize(dir)
@dir = dir
@img_ok = NSImage.new.initWithContentsOfFile 'accept.png'
@img_dirty = NSImage.new.initWithContentsOfFile 'cancel.png'
@img_error = NSImage.new.initWithContentsOfFile 'error.png'
@img_push = NSImage.new.initWithContentsOfFile 'drive_add.png'
@img_pull = NSImage.new.initWithContentsOfFile 'drive_delete.png'
end
def attach(menu_item)
@menu_item = menu_item
end
def pull
puts "Pulling #{name}"
Dir.chdir(@dir) do
`git pull`
end
end
def push
puts "Pushing #{name}"
Dir.chdir(@dir) do
r = `git push`
if r.include?('[rejected]')
@error = 'merge'
end
end
end
def update
return unless @menu_item
if @error
@menu_item.setImage @img_error
elsif !clean?
@menu_item.setImage @img_dirty
elsif needs_to_pull?
@menu_item.setImage @img_pull
elsif needs_to_push?
@menu_item.setImage @img_push
else
@menu_item.setImage @img_ok
end
end
def fetch
puts "Fetching #{name}"
Dir.chdir(@dir) do
`git fetch --all`
end
end
def proceed
@error = nil
if clean?
pull if needs_to_pull?
push if needs_to_push?
end
update
end
def name
File.basename(@dir)
end
def git_status
Dir.chdir(@dir) do
`git status`
end
end
def clean?
git_status.include? 'nothing to commit, working directory clean'
end
def needs_to_pull?
git_status.include? 'branch is behind'
end
def needs_to_push?
git_status.include? 'branch is ahead'
end
end
def load_repos
puts "Loading repos"
@repos = []
@repo_paths.each do |p|
puts "Inspecting #{p} ..."
Dir["#{p}/*"].each do |d|
if File.directory?(d) and File.exist?(File.join(d, '.git'))
puts " - found #{File.basename(d)}"
@repos << Repo.new(d)
end
end
end
@repos = @repos.sort_by{|r| r.name}
end
# We build the status bar item menu
def setupMenu
menu = NSMenu.new
menu.initWithTitle 'Gitifier'
@repos.each_with_index do |r, ri|
mi = NSMenuItem.new
mi.title = r.name
mi.target = self
mi.action = "proceed:"
mi.setRepresentedObject ri
r.attach mi
menu.addItem mi
end
mi = NSMenuItem.new
mi.title = 'Reload cfg&repos'
mi.action = 'reload:'
mi.target = self
menu.addItem mi
mi = NSMenuItem.new
mi.title = 'Quit'
mi.action = 'quit:'
mi.target = self
menu.addItem mi
menu
end
# Init the status bar
def initStatusBar(menu)
status_bar = NSStatusBar.systemStatusBar
status_item = status_bar.statusItemWithLength(NSVariableStatusItemLength)
status_item.setMenu menu
img = NSImage.new.initWithContentsOfFile 'bug.png'
status_item.setImage(img)
end
# Menu Item Actions
# def sayHello(sender)
# alert = NSAlert.new
# alert.messageText = 'This is MacRuby Status Bar Application'
# alert.informativeText = 'Cool, huh?'
# alert.alertStyle = NSInformationalAlertStyle
# alert.addButtonWithTitle("Yeah!")
# response = alert.runModal
# end
def proceed(sender)
@repos[sender.representedObject.to_i].proceed
end
def fetchRepos(sender)
@fetch_index ||= 0
@fetch_index += 1
@repos[@fetch_index % @repos.size].fetch
end
def updateRepos(sender)
@update_index ||= 0
@update_index += 1
@repos[@update_index % @repos.size].update
end
def quit(sender)
app = NSApplication.sharedApplication
app.terminate(self)
end
def load_config
raise "[#{CONFIG_FILENAME}] not found!" unless File.exist?(CONFIG_FILENAME)
yaml = YAML::load(File.open CONFIG_FILENAME)
@repo_paths = yaml['paths']
end
def reload(sender)
load_config
load_repos
initStatusBar(setupMenu)
end
load_config
load_repos
app = NSApplication.sharedApplication
initStatusBar(setupMenu)
@repos.each(&:update)
NSTimer.scheduledTimerWithTimeInterval 1, target: self, selector: 'updateRepos:', userInfo: nil, repeats: true
NSTimer.scheduledTimerWithTimeInterval 15, target: self, selector: 'fetchRepos:', userInfo: nil, repeats: true
app.run
mass fetching
framework 'AppKit'
framework 'Foundation'
framework 'Cocoa'
framework 'QuartzCore'
require 'yaml'
CONFIG_FILENAME = 'gitifier.yml'
class Repo
def initialize(dir)
@dir = dir
@img_ok = NSImage.new.initWithContentsOfFile 'accept.png'
@img_dirty = NSImage.new.initWithContentsOfFile 'cancel.png'
@img_error = NSImage.new.initWithContentsOfFile 'error.png'
@img_push = NSImage.new.initWithContentsOfFile 'drive_add.png'
@img_pull = NSImage.new.initWithContentsOfFile 'drive_delete.png'
end
def attach(menu_item)
@menu_item = menu_item
end
def pull
puts "Pulling #{name}"
Dir.chdir(@dir) do
`git pull`
end
end
def push
puts "Pushing #{name}"
Dir.chdir(@dir) do
r = `git push`
if r.include?('[rejected]')
@error = 'merge'
end
end
end
def update
return unless @menu_item
if @error
@menu_item.setImage @img_error
elsif !clean?
@menu_item.setImage @img_dirty
elsif needs_to_pull?
@menu_item.setImage @img_pull
elsif needs_to_push?
@menu_item.setImage @img_push
else
@menu_item.setImage @img_ok
end
end
def fetch
puts "Fetching #{name}"
Dir.chdir(@dir) do
`git fetch --all`
end
end
def proceed
@error = nil
if clean?
pull if needs_to_pull?
push if needs_to_push?
end
update
end
def name
File.basename(@dir)
end
def git_status
Dir.chdir(@dir) do
`git status`
end
end
def clean?
git_status.include? 'nothing to commit, working directory clean'
end
def needs_to_pull?
git_status.include? 'branch is behind'
end
def needs_to_push?
git_status.include? 'branch is ahead'
end
end
def pull_all(sender)
@repos.each do |r|
r.fetch
r.pull
end
end
def load_repos
puts "Loading repos"
@repos = []
@repo_paths.each do |p|
puts "Inspecting #{p} ..."
Dir["#{p}/*"].each do |d|
if File.directory?(d) and File.exist?(File.join(d, '.git'))
puts " - found #{File.basename(d)}"
@repos << Repo.new(d)
end
end
end
@repos = @repos.sort_by{|r| r.name}
end
# We build the status bar item menu
def setupMenu
menu = NSMenu.new
menu.initWithTitle 'Gitifier'
@repos.each_with_index do |r, ri|
mi = NSMenuItem.new
mi.title = r.name
mi.target = self
mi.action = "proceed:"
mi.setRepresentedObject ri
r.attach mi
menu.addItem mi
end
mi = NSMenuItem.new
mi.title = "Pull'n'fetch"
mi.action = 'pull_all:'
mi.target = self
menu.addItem mi
# mi = NSMenuItem.new
# mi.title = 'Reload cfg&repos'
# mi.action = 'reload:'
# mi.target = self
# menu.addItem mi
mi = NSMenuItem.new
mi.title = 'Quit'
mi.action = 'quit:'
mi.target = self
menu.addItem mi
menu
end
# Init the status bar
def initStatusBar(menu)
status_bar = NSStatusBar.systemStatusBar
status_item = status_bar.statusItemWithLength(NSVariableStatusItemLength)
status_item.setMenu menu
img = NSImage.new.initWithContentsOfFile 'bug.png'
status_item.setImage(img)
end
# Menu Item Actions
# def sayHello(sender)
# alert = NSAlert.new
# alert.messageText = 'This is MacRuby Status Bar Application'
# alert.informativeText = 'Cool, huh?'
# alert.alertStyle = NSInformationalAlertStyle
# alert.addButtonWithTitle("Yeah!")
# response = alert.runModal
# end
def proceed(sender)
@repos[sender.representedObject.to_i].proceed
end
def fetchRepos(sender)
@fetch_index ||= 0
@fetch_index += 1
@repos[@fetch_index % @repos.size].fetch
end
def updateRepos(sender)
@update_index ||= 0
@update_index += 1
@repos[@update_index % @repos.size].update
end
def quit(sender)
app = NSApplication.sharedApplication
app.terminate(self)
end
def load_config
raise "[#{CONFIG_FILENAME}] not found!" unless File.exist?(CONFIG_FILENAME)
yaml = YAML::load(File.open CONFIG_FILENAME)
@repo_paths = yaml['paths']
end
def reload(sender)
load_config
load_repos
initStatusBar(setupMenu)
end
load_config
load_repos
app = NSApplication.sharedApplication
initStatusBar(setupMenu)
@repos.each(&:update)
NSTimer.scheduledTimerWithTimeInterval 1, target: self, selector: 'updateRepos:', userInfo: nil, repeats: true
NSTimer.scheduledTimerWithTimeInterval 15, target: self, selector: 'fetchRepos:', userInfo: nil, repeats: true
app.run
|
Add database migration for Sequel schema.
This database should be painfully simple. We are really only storing
references so that we are able to build a proper URL to the GitHub
repository.
There is obvious room for changes here such as if a repository moves,
gets deleted, etc. That logic may need to be something that is sketched
out later.
require 'sequel'
Sequel::Model.plugin(:schema)
# Define the database table which represents the users that have registered
# through Omniauth and wish to add a Gear to the registry.
DB.create_table? :users do
set_primary_key :id
column :name, String, :index => true, :null => false
end
# Define the database table which represents the Gears that have been registered
# through GitHub (or by other means) callbacks.
DB.create_table? :gears do
set_primary_key [:name, :version], :name => 'PkGears'
column :name, String, :index => true, :null => false
column :version, String, :index => true, :null => false
column :repository, String, :null => false
column :status, Boolean, :null => false, :default => true
foreign_key :user_id, :users, :key => :id
end
|
#!/usr/bin/env ruby
require 'open-uri'
require 'json'
require 'nokogiri'
require 'pp'
require 'fileutils'
require 'bigdecimal'
current_folder = File.dirname(File.expand_path(__FILE__))
@path = File.join(current_folder, 'public', 'api')
cmc_data = open("http://coinmarketcap.com/all/views/all/")
@doc = Nokogiri::HTML(cmc_data)
# File.write('static.html', cmc_data.read)
# @doc = Nokogiri::HTML(File.read('static.html'))
@ts = Time.now.to_i
@currencies = ['usd', 'btc']
@exchange_currencies = %w(usd aud brl cad chf cny eur gbp hkd idr inr jpy krw mxn rub)
# order is important and KEEP ID AS THE LAST ELEMENT. you have been warned
@keys = ['position', 'name', 'symbol', 'category', 'marketCap', 'price', 'availableSupply', 'availableSupplyNumber', 'volume24', 'change1h', 'change7h', 'change7d', 'timestamp']
# converts a coin to the old json format
def old_format coin, currency
coin['currency'] = currency
['marketCap', 'price', 'volume24'].each do |key|
coin[key] = coin[key][currency]
end
coin
end
def write path, hash
File.open(path,'w') { |f| f.write(hash.to_json) }
end
# converts all coins in hash['markets'] to old json format
def old_format_all coins, currency
old_formatted_coins = {
timestamp: coins['timestamp'],
markets: []
}
coins['markets'].each do |market|
old_formatted_coins[:markets].push old_format(market.clone, currency)
end
old_formatted_coins
end
def to_v1_format coin, currency='usd'
{
"position"=> coin['position'],
"name"=> coin['name'],
"marketCap"=> coin['marketCap'][currency],
"price"=> coin['price']['usd'],
"totalSupply"=> coin['availableSupply'],
"volume24"=> coin['volume24'][currency],
"change24"=> "0.0 %",
"change1h"=> coin['change1h'][currency],
"change7h"=> coin['change7h'][currency],
"change7d"=> coin['change7d'][currency],
"timestamp"=> coin['timestamp'],
"lowVolume"=> false,
"id"=> coin['symbol'].downcase,
"currency"=> currency
}
end
def to_v2_format coin, currency='usd'
to_v1_format(coin, currency)
end
def to_v4_format coin
{
position: coin['position'],
name: coin['name'],
marketCap: coin['marketCap'],
price: coin['price'],
totalSupply: coin['availableSupply'],
volume24: coin['volume24'],
change24: "0.0 %",
change1h: coin['change1h'],
change7h: coin['change7h'],
change7d: coin['change7d'],
timestamp: coin['timestamp'],
lowVolume: false,
id: coin['symbol'].downcase
}
end
def to_v6_format coin
def to_general_number n
return nil if n == '?' || n == nil
n.to_f
end
coin_clone = coin.clone
# this will ensure the order
coin_clone['change24h'] = coin_clone.delete('change7h')
coin_clone['change7d'] = coin_clone.delete('change7d')
coin_clone['timestamp'] = coin_clone.delete('timestamp')
coin_clone['change1h'] = to_general_number(coin_clone['change1h']['usd'])
coin_clone['change24h'] = to_general_number(coin_clone['change24h']['usd'])
coin_clone['change7d'] = to_general_number(coin_clone['change7d']['usd'])
coin_clone['availableSupply'] = coin_clone.delete('availableSupplyNumber')
coin_clone['position'] = coin_clone['position'].to_i
['marketCap', 'price'].each do |key|
coin_clone[key].keys.each do |currency|
coin_clone[key][currency] = to_general_number(coin_clone[key][currency])
end
end
coin_clone['volume24'].keys.each do |currency|
btc_price = BigDecimal(coin_clone['price'][currency].to_s) / BigDecimal(coin_clone['price']['btc'].to_s)
coin_clone['volume24'][currency] = btc_price.nan? ? 0.to_f : (BigDecimal(coin_clone['volume24']['btc'].to_s) * btc_price).to_f
end
coin_clone
end
def write_one coin
# version 1
write("#{@path}/#{coin['symbol'].downcase}.json", to_v1_format(coin))
# version 2
@currencies.each do |currency|
write("#{@path}/#{currency}/#{coin['symbol'].downcase}.json", to_v2_format(coin, currency))
end
write("#{@path}/v4/#{coin['symbol'].downcase}.json", to_v4_format(coin))
# version 5
coin_path = "#{@path}/v5/#{coin['symbol']}.json"
write(coin_path, coin)
# version 6
coin_path = "#{@path}/v6/#{coin['symbol']}.json"
v6_coin = to_v6_format(coin)
write(coin_path, v6_coin)
write_history(v6_coin)
end
def write_history coin
time_at = Time.at(@ts)
path = "#{@path}/v6/history/#{coin['symbol']}_#{time_at.year}.json"
write(path, { 'symbol' => coin['symbol'], 'history' => {} }) unless File.exists?(path)
hash = JSON.parse(File.read(path))
key = time_at.strftime('%d-%m-%Y')
unless hash['history'].key?(key)
hash['history'][key] = coin
write(path, hash)
end
end
# writes all.json for all API versions.
def write_all coin
# version 1
h = {
"timestamp"=> coin['timestamp'],
"markets"=> []
}
coin['markets'].each do |c|
h['markets'] << to_v1_format(c)
end
write("#{@path}/all.json", h)
# version 2
h = {
"timestamp"=> coin['timestamp'],
"markets"=> []
}
@currencies.each do |currency|
coin['markets'].each do |c|
h['markets'] << to_v2_format(c, currency)
end
write("#{@path}/#{currency}/all.json", h)
end
# version 4
h = {
"timestamp"=> coin['timestamp'],
"markets"=> []
}
coin['markets'].each do |c|
h['markets'] << to_v4_format(c)
end
write("#{@path}/v4/all.json", h)
# version 5
write("#{@path}/v5/all.json", coin)
# version 6
all_clone = coin.clone
all_clone['markets'] = all_clone['markets'].map { |e| to_v6_format(e) }
write("#{@path}/v6/all.json", all_clone)
end
def get_json_data table_id
markets = []
cer = @doc.css("#currency-exchange-rates")
currency_exchange_rates = {}
@exchange_currencies.each do |currency|
currency_exchange_rates[currency] = cer.attribute("data-#{currency}").text.strip
end
# reverse is needed because
# https://www.reddit.com/r/coinmarketcapjson/comments/2pqvwi/amazing_service_thank_you_very_much/cmz6sxr
@doc.css("#{table_id} tbody tr").reverse.each do |tr|
tds = tr.css('td')
td_position = tds[0].text.strip
td_name = tds[1].text.strip
td_symbol = tds[2].text.strip
begin
td_category = tds[1].css('a')[0]['href'].include?('assets') ? 'asset' : 'currency'
rescue
td_category = '?'
end
td_market_cap = {}
td_price = {}
begin
td_available_supply = tds[5].css('a').text.strip
td_available_supply_number = td_available_supply.gsub(',','').to_i
rescue
td_available_supply = '?'
td_available_supply_number = '?'
end
td_volume_24h = {}
td_change_1h = {}
td_change_24h = {}
td_change_7d = {}
@currencies.each do |currency|
begin
td_market_cap[currency] = tds[3].attribute("data-#{currency}").text.strip
rescue
td_market_cap[currency] = '?'
end
begin
td_price[currency] = tds[4].css('a').attribute("data-#{currency}").text.strip
rescue
td_price[currency] = '?'
end
begin
td_volume_24h[currency] = tds[6].css('a').attribute("data-#{currency}").text.strip
rescue
td_volume_24h[currency] = '0.0 %'
end
begin
td_change_1h[currency] = tds[7].attribute("data-#{currency}").text.strip
rescue
td_change_1h[currency] = '?'
end
begin
td_change_24h[currency] = tds[8].attribute("data-#{currency}").text.strip
rescue
td_change_24h[currency] = '?'
end
begin
td_change_7d[currency] = tds[9].attribute("data-#{currency}").text.strip
rescue
td_change_7d[currency] = '?'
end
end
def convert number, currency, currency_exchange_rates
(BigDecimal(number['usd'].to_s) / BigDecimal(currency_exchange_rates[currency].to_s)).to_f.to_s rescue '?'
end
@exchange_currencies.each do |currency|
td_market_cap[currency] = convert(td_market_cap, currency, currency_exchange_rates)
td_price[currency] = convert(td_price, currency, currency_exchange_rates)
td_volume_24h[currency] = '0.0 %'
td_change_1h[currency] = td_change_1h['usd']
td_change_24h[currency] = td_change_24h['usd']
td_change_7d[currency] = td_change_7d['usd']
end
coin = [
td_position,
td_name,
td_symbol,
td_category,
td_market_cap,
td_price,
td_available_supply,
td_available_supply_number,
td_volume_24h,
td_change_1h,
td_change_24h,
td_change_7d,
@ts,
]
markets << Hash[@keys.zip(coin)]
end
{ 'timestamp' => @ts, 'markets' => markets, 'currencyExchangeRates' => currency_exchange_rates }
end
def mkdir *strings
FileUtils.mkdir_p File.join(strings)
end
def mkdirs
mkdir(@path, 'btc')
mkdir(@path, 'usd')
mkdir(@path, 'v3')
mkdir(@path, 'v4')
mkdir(@path, 'v5')
mkdir(@path, 'v5/history')
mkdir(@path, 'v6')
mkdir(@path, 'v6/history')
end
def run_script
mkdirs
json_data = get_json_data('#currencies-all')
json_data['markets'].each do |h|
write_one h
end
write_all json_data
end
def convert_history_v5_v6
mkdirs
Dir["#{@path}/v5/history/*.json"].each do |path|
hash = JSON.parse(File.read(path))
next if hash['history'].nil?
next if hash['history'].empty?
hash['history'].keys.each do |day|
target = hash['history'][day]
next if hash['history'][day]['position'].is_a? Numeric
hash['history'][day] = to_v6_format(target)
end
new_path = path.gsub('/api/v5/history', '/api/v6/history')
write(new_path, hash)
end
end
def update_to_volume_v6
mkdirs
Dir["#{@path}/v6/history/*.json"].each do |path|
hash = JSON.parse(File.read(path))
next if hash['history'].nil?
next if hash['history'].empty?
hash['history'].keys.each do |day|
target = hash['history'][day]
volume_hash = {}
@exchange_currencies.each do |ec|
# next if target['price'][ec].nil?
btc_price = BigDecimal(target['price'][ec].to_s) / BigDecimal(target['price']['btc'].to_s)
volume_hash[ec] = btc_price.nan? ? 0.to_f : (BigDecimal(target['volume24']['btc'].to_s) * btc_price).to_f
end
target['volume24'] = volume_hash
end
write(path, hash)
end
end
def help
puts <<-EOF
This is the CLI which gathers all the data from coinmarketcap.com
List of commands:
* run - queries coinmarketcap.com, parses the data and writes it to disk
* convert_history_v5_v6 - converts history from v5 to v6
* update_to_volume_v6
* help - this text
Example usage:
./script.rb
./script.rb run
ruby script.rb run
EOF
end
if ARGV.empty?
run_script
else
case ARGV[0]
when 'run'
run_script
when 'convert_history_v5_v6'
convert_history_v5_v6
when 'update_to_volume_v6'
update_to_volume_v6
else
help
end
end
fixes #4 history is converted to the correct format
#!/usr/bin/env ruby
require 'open-uri'
require 'json'
require 'nokogiri'
require 'pp'
require 'fileutils'
require 'bigdecimal'
current_folder = File.dirname(File.expand_path(__FILE__))
@path = File.join(current_folder, 'public', 'api')
cmc_data = open("http://coinmarketcap.com/all/views/all/")
@doc = Nokogiri::HTML(cmc_data)
# File.write('static.html', cmc_data.read)
# @doc = Nokogiri::HTML(File.read('static.html'))
@ts = Time.now.to_i
@currencies = ['usd', 'btc']
@exchange_currencies = %w(usd aud brl cad chf cny eur gbp hkd idr inr jpy krw mxn rub)
# order is important and KEEP ID AS THE LAST ELEMENT. you have been warned
@keys = ['position', 'name', 'symbol', 'category', 'marketCap', 'price', 'availableSupply', 'availableSupplyNumber', 'volume24', 'change1h', 'change7h', 'change7d', 'timestamp']
# converts a coin to the old json format
def old_format coin, currency
coin['currency'] = currency
['marketCap', 'price', 'volume24'].each do |key|
coin[key] = coin[key][currency]
end
coin
end
def write path, hash
File.open(path,'w') { |f| f.write(hash.to_json) }
end
# converts all coins in hash['markets'] to old json format
def old_format_all coins, currency
old_formatted_coins = {
timestamp: coins['timestamp'],
markets: []
}
coins['markets'].each do |market|
old_formatted_coins[:markets].push old_format(market.clone, currency)
end
old_formatted_coins
end
def to_v1_format coin, currency='usd'
{
"position"=> coin['position'],
"name"=> coin['name'],
"marketCap"=> coin['marketCap'][currency],
"price"=> coin['price']['usd'],
"totalSupply"=> coin['availableSupply'],
"volume24"=> coin['volume24'][currency],
"change24"=> "0.0 %",
"change1h"=> coin['change1h'][currency],
"change7h"=> coin['change7h'][currency],
"change7d"=> coin['change7d'][currency],
"timestamp"=> coin['timestamp'],
"lowVolume"=> false,
"id"=> coin['symbol'].downcase,
"currency"=> currency
}
end
def to_v2_format coin, currency='usd'
to_v1_format(coin, currency)
end
def to_v4_format coin
{
position: coin['position'],
name: coin['name'],
marketCap: coin['marketCap'],
price: coin['price'],
totalSupply: coin['availableSupply'],
volume24: coin['volume24'],
change24: "0.0 %",
change1h: coin['change1h'],
change7h: coin['change7h'],
change7d: coin['change7d'],
timestamp: coin['timestamp'],
lowVolume: false,
id: coin['symbol'].downcase
}
end
def to_v6_format coin
def to_general_number n
return nil if n == '?' || n == nil
n.to_f
end
coin_clone = coin.clone
# this will ensure the order
coin_clone['change24h'] = coin_clone.delete('change7h')
coin_clone['change7d'] = coin_clone.delete('change7d')
coin_clone['timestamp'] = coin_clone.delete('timestamp')
coin_clone['change1h'] = to_general_number(coin_clone['change1h']['usd'])
coin_clone['change24h'] = to_general_number(coin_clone['change24h']['usd'])
coin_clone['change7d'] = to_general_number(coin_clone['change7d']['usd'])
coin_clone['availableSupply'] = coin_clone.delete('availableSupplyNumber')
coin_clone['position'] = coin_clone['position'].to_i
['marketCap', 'price'].each do |key|
coin_clone[key].keys.each do |currency|
coin_clone[key][currency] = to_general_number(coin_clone[key][currency])
end
end
coin_clone['volume24'].keys.each do |currency|
btc_price = BigDecimal(coin_clone['price'][currency].to_s) / BigDecimal(coin_clone['price']['btc'].to_s)
coin_clone['volume24'][currency] = btc_price.nan? ? 0.to_f : (BigDecimal(coin_clone['volume24']['btc'].to_s) * btc_price).to_f
end
coin_clone
end
def write_one coin
# version 1
write("#{@path}/#{coin['symbol'].downcase}.json", to_v1_format(coin))
# version 2
@currencies.each do |currency|
write("#{@path}/#{currency}/#{coin['symbol'].downcase}.json", to_v2_format(coin, currency))
end
write("#{@path}/v4/#{coin['symbol'].downcase}.json", to_v4_format(coin))
# version 5
coin_path = "#{@path}/v5/#{coin['symbol']}.json"
write(coin_path, coin)
# version 6
coin_path = "#{@path}/v6/#{coin['symbol']}.json"
v6_coin = to_v6_format(coin)
write(coin_path, v6_coin)
write_history(v6_coin)
end
def write_history coin
time_at = Time.at(@ts)
path = "#{@path}/v6/history/#{coin['symbol']}_#{time_at.year}.json"
write(path, { 'symbol' => coin['symbol'], 'history' => {} }) unless File.exists?(path)
hash = JSON.parse(File.read(path))
key = time_at.strftime('%d-%m-%Y')
unless hash['history'].key?(key)
hash['history'][key] = coin
write(path, hash)
end
end
# writes all.json for all API versions.
def write_all coin
# version 1
h = {
"timestamp"=> coin['timestamp'],
"markets"=> []
}
coin['markets'].each do |c|
h['markets'] << to_v1_format(c)
end
write("#{@path}/all.json", h)
# version 2
h = {
"timestamp"=> coin['timestamp'],
"markets"=> []
}
@currencies.each do |currency|
coin['markets'].each do |c|
h['markets'] << to_v2_format(c, currency)
end
write("#{@path}/#{currency}/all.json", h)
end
# version 4
h = {
"timestamp"=> coin['timestamp'],
"markets"=> []
}
coin['markets'].each do |c|
h['markets'] << to_v4_format(c)
end
write("#{@path}/v4/all.json", h)
# version 5
write("#{@path}/v5/all.json", coin)
# version 6
all_clone = coin.clone
all_clone['markets'] = all_clone['markets'].map { |e| to_v6_format(e) }
write("#{@path}/v6/all.json", all_clone)
end
def get_json_data table_id
markets = []
cer = @doc.css("#currency-exchange-rates")
currency_exchange_rates = {}
@exchange_currencies.each do |currency|
currency_exchange_rates[currency] = cer.attribute("data-#{currency}").text.strip
end
# reverse is needed because
# https://www.reddit.com/r/coinmarketcapjson/comments/2pqvwi/amazing_service_thank_you_very_much/cmz6sxr
@doc.css("#{table_id} tbody tr").reverse.each do |tr|
tds = tr.css('td')
td_position = tds[0].text.strip
td_name = tds[1].text.strip
td_symbol = tds[2].text.strip
begin
td_category = tds[1].css('a')[0]['href'].include?('assets') ? 'asset' : 'currency'
rescue
td_category = '?'
end
td_market_cap = {}
td_price = {}
begin
td_available_supply = tds[5].css('a').text.strip
td_available_supply_number = td_available_supply.gsub(',','').to_i
rescue
td_available_supply = '?'
td_available_supply_number = '?'
end
td_volume_24h = {}
td_change_1h = {}
td_change_24h = {}
td_change_7d = {}
@currencies.each do |currency|
begin
td_market_cap[currency] = tds[3].attribute("data-#{currency}").text.strip
rescue
td_market_cap[currency] = '?'
end
begin
td_price[currency] = tds[4].css('a').attribute("data-#{currency}").text.strip
rescue
td_price[currency] = '?'
end
begin
td_volume_24h[currency] = tds[6].css('a').attribute("data-#{currency}").text.strip
rescue
td_volume_24h[currency] = '0.0 %'
end
begin
td_change_1h[currency] = tds[7].attribute("data-#{currency}").text.strip
rescue
td_change_1h[currency] = '?'
end
begin
td_change_24h[currency] = tds[8].attribute("data-#{currency}").text.strip
rescue
td_change_24h[currency] = '?'
end
begin
td_change_7d[currency] = tds[9].attribute("data-#{currency}").text.strip
rescue
td_change_7d[currency] = '?'
end
end
def convert number, currency, currency_exchange_rates
(BigDecimal(number['usd'].to_s) / BigDecimal(currency_exchange_rates[currency].to_s)).to_f.to_s rescue '?'
end
@exchange_currencies.each do |currency|
td_market_cap[currency] = convert(td_market_cap, currency, currency_exchange_rates)
td_price[currency] = convert(td_price, currency, currency_exchange_rates)
td_volume_24h[currency] = '0.0 %'
td_change_1h[currency] = td_change_1h['usd']
td_change_24h[currency] = td_change_24h['usd']
td_change_7d[currency] = td_change_7d['usd']
end
coin = [
td_position,
td_name,
td_symbol,
td_category,
td_market_cap,
td_price,
td_available_supply,
td_available_supply_number,
td_volume_24h,
td_change_1h,
td_change_24h,
td_change_7d,
@ts,
]
markets << Hash[@keys.zip(coin)]
end
{ 'timestamp' => @ts, 'markets' => markets, 'currencyExchangeRates' => currency_exchange_rates }
end
def mkdir *strings
FileUtils.mkdir_p File.join(strings)
end
def mkdirs
mkdir(@path, 'btc')
mkdir(@path, 'usd')
mkdir(@path, 'v3')
mkdir(@path, 'v4')
mkdir(@path, 'v5')
mkdir(@path, 'v5/history')
mkdir(@path, 'v6')
mkdir(@path, 'v6/history')
end
def run_script
mkdirs
json_data = get_json_data('#currencies-all')
json_data['markets'].each do |h|
write_one h
end
write_all json_data
end
def convert_history_v5_v6
mkdirs
Dir["#{@path}/v5/history/*.json"].each do |path|
hash = JSON.parse(File.read(path))
next if hash['history'].nil?
next if hash['history'].empty?
hash['history'].keys.each do |day|
target = hash['history'][day]
next if hash['history'][day]['position'].is_a? Numeric
hash['history'][day] = to_v6_format(target)
end
new_path = path.gsub('/api/v5/history', '/api/v6/history')
write(new_path, hash)
end
end
def update_to_volume_v6
mkdirs
Dir["#{@path}/v6/history/*.json"].each do |path|
hash = JSON.parse(File.read(path))
next if hash['history'].nil?
next if hash['history'].empty?
hash['history'].keys.each do |day|
target = hash['history'][day]
next if !target['volume24'].is_a?(Numeric)
volume_hash = {}
target['price'].keys.each do |ec|
btc_price = BigDecimal(target['price'][ec].to_s) / BigDecimal(target['price']['btc'].to_s)
if target['volume24'].nil?
volume_hash[ec] = 0.to_f
else
volume_hash[ec] = btc_price.nan? ? 0.to_f : (BigDecimal(target['volume24'].to_s) * btc_price).to_f
end
end
target['volume24'] = volume_hash
end
write(path, hash)
end
end
def help
puts <<-EOF
This is the CLI which gathers all the data from coinmarketcap.com
List of commands:
* run - queries coinmarketcap.com, parses the data and writes it to disk
* convert_history_v5_v6 - converts history from v5 to v6
* update_to_volume_v6
* help - this text
Example usage:
./script.rb
./script.rb run
ruby script.rb run
EOF
end
if ARGV.empty?
run_script
else
case ARGV[0]
when 'run'
run_script
when 'convert_history_v5_v6'
convert_history_v5_v6
when 'update_to_volume_v6'
update_to_volume_v6
else
help
end
end
|
class ResourcesUpdatesController < ApplicationController
require 'nokogiri'
require 'pp'
START_MARKER = /ArchivesSpace field code/
DO_START_MARKER = /ArchivesSpace digital object import field codes/
set_access_control "update_resource_record" => [:new, :edit, :create, :update, :rde, :add_children, :publish, :accept_children, :load_ss, :get_file, :get_do_file, :load_dos]
require 'pry'
require 'rubyXL'
require 'asutils'
require 'enum_list'
include NotesHelper
include UpdatesUtils
include LinkedObjects
require 'ingest_report'
# create the file form for the digital object spreadsheet
def get_do_file
rid = params[:rid]
id = params[:id]
end
# create the file form for the spreadsheet
def get_file
rid = params[:rid]
type = params[:type]
aoid = params[:aoid] || ''
ref_id = params[:ref_id] || ''
resource = params[:resource]
position = params[:position] || '1'
@resource = Resource.find(params[:rid])
repo_id = @resource['repository']['ref'].split('/').last
return render_aspace_partial :partial => "resources/bulk_file_form", :locals => {:rid => rid, :aoid => aoid, :type => type, :ref_id => ref_id, :resource => resource, :position => position, :repo_id => repo_id}
end
# load the digital objects
def load_dos
#first time out of the box:
Rails.logger.info "\t**** LOAD DOS ***"
ao = fetch_archival_object(params)
Rails.logger.info "ao instances? #{!ao["instances"].blank?}" if ao
if !ao['instances'].blank?
digs = []
ao['instances'].each {|instance| digs.append(ao) if instance.dig("digital_object") != nil }
unless digs.blank?
# add thrown exception here!
ao = nil
end
end
Rails.logger.info {ao.pretty_inspect}
end
# load in a spreadsheet
def load_ss
@report_out = []
@report = IngestReport.new
@headers
@digital_load = params.fetch(:digital_load,'') == 'true'
Pry::ColorPrinter.pp "digital_load? #{@digital_load}"
if @digital_load
@find_uri = "/repositories/#{params[:repo_id]}/find_by_id/archival_objects"
@resource_ref = "/repositories/#{params[:repo_id]}/resources/#{params[:id]}"
@repo_id = params[:repo_id]
@start_marker = DO_START_MARKER
else
@created_ao_refs = []
@first_level_aos = []
@archival_levels = EnumList.new('archival_record_level')
@container_types = EnumList.new('container_type')
@date_types = EnumList.new('date_type')
@date_labels = EnumList.new('date_label')
@date_certainty = EnumList.new('date_certainty')
@extent_types = EnumList.new('extent_extent_type')
@extent_portions = EnumList.new('extent_portion')
@instance_types ||= EnumList.new('instance_instance_type')
@parents = ParentTracker.new
@start_marker = START_MARKER
end
@start_position
@need_to_move = false
begin
rows = initialize_info(params)
while @headers.nil? && (row = rows.next)
@counter += 1
if (row[0] && (row[0].value.to_s =~ @start_marker) || row[2] && row[2].value == 'ead') #FIXME: TEMP FIX
Pry::ColorPrinter.pp "Got the HEADERS!"
@headers = row_values(row)
# Skip the human readable header too
rows.next
@counter += 1 # for the skipping
end
end
begin
while (row = rows.next)
@counter += 1
values = row_values(row)
next if values.compact.empty?
@row_hash = Hash[@headers.zip(values)]
ao = nil
begin
@report.new_row(@counter)
if @digital_load
ao = process_do_row(params)
else
ao = process_row
end
@rows_processed += 1
@error_level = nil
# Pry::ColorPrinter.pp "no ao" if !ao
rescue StopExcelImportException => se
@report.add_errors(I18n.t('plugins.aspace-import-excel.error.stopped', :row => @counter, :msg => se.message))
raise StopIteration.new
rescue ExcelImportException => e
@error_rows += 1
@report.add_errors( e.message)
@error_level = @hier
# Pry::ColorPrinter.pp "Error level: #{@error_level}"
end
@report.end_row
end
rescue StopIteration
# we just want to catch this without processing further
end
if @rows_processed == 0
raise ExcelImportException.new( I18n.t('plugins.aspace-import-excel.error.no_data'))
end
rescue Exception => e
if e.is_a?( ExcelImportException) || e.is_a?( StopExcelImportException)
@report.add_terminal_error(I18n.t('plugins.aspace-import-excel.error.excel', :errs => e.message), @counter)
elsif e.is_a?(StopIteration) && @headers.nil?
@report.add_terminal_error(I18n.t('plugins.aspace-import-excel.error.no_header'), @counter)
else # something else went wrong
@report.add_terminal_error(I18n.t('plugins.aspace-import-excel.error.system', :msg => e.message), @counter)
Pry::ColorPrinter.pp "UNEXPECTED EXCEPTION!"
Pry::ColorPrinter.pp e.message
Pry::ColorPrinter.pp e.backtrace
end
@report.end_row
return render_aspace_partial :status => 400, :partial => "resources/bulk_response", :locals => {:rid => params[:rid],
:report => @report, :do_load => @digital_load}
end
move_archival_objects if @need_to_move
@report.end_row
return render_aspace_partial :partial => "resources/bulk_response", :locals => {:rid => params[:rid], :report => @report,
:do_load => @digital_load}
end
private
# save the archival object, then revive it
def ao_save(ao)
revived = nil
begin
id = ao.save
revived = JSONModel(:archival_object).find(id)
rescue Exception => e
Pry::ColorPrinter.pp "UNEXPECTED save error: #{e.message}"
Pry::ColorPrinter.pp ASUtils.jsonmodels_to_hashes(ao) if ao
raise e
end
revived
end
# required fields for a digital object row: ead match, ao_ref_id and at least one of digital_object_link, thumbnail
def check_do_row
err_arr = []
begin
err_arr.push I18n.t('plugins.aspace-import-excel.error.ref_id_miss') if @row_hash['ao_ref_id'].blank?
obj_link = @row_hash['digital_object_link']
thumb = @row_hash['thumbnail'] || @row_hash['Thumbnail']
err_arr.push I18n.t('plugins.aspace-import-excel.error.dig_info_miss') if @row_hash['digital_object_link'].blank? && thumb.blank?
end
v = @row_hash['publish']
v = v.strip if !v.blank?
@row_hash['publish'] = (v == '1')
err_arr.join('; ')
end
# look for all the required fields to make sure they are legit
# strip all the strings and turn publish and restrictions_flaginto true/false
def check_row
err_arr = []
begin
# we'll check hierarchical level first, in case there was a parent that didn't get created
hier = @row_hash['hierarchy']
if !hier
err_arr.push I18n.t('plugins.aspace-import-excel.error.hier_miss')
else
hier = hier.to_i
# we bail if the parent wasn't created!
return I18n.t('plugins.aspace-import-excel.error.hier_below_error_level') if (@error_level && hier > @error_level)
err_arr.push I18n.t('plugins.aspace-import-excel.error.hier_zero') if hier < 1
# going from a 1 to a 3, for example
if (hier - 1) > @hier
err_arr.push I18n.t('plugins.aspace-import-excel.error.hier_wrong')
if @hier == 0
err_arr.push I18n.t('plugins.aspace-import-excel.error.hier_wrong_resource')
raise StopExcelImportException.new(err_arr.join(';'))
end
end
@hier = hier
end
missing_title = @row_hash['title'].blank?
#date stuff: if already missing the title, we have to make sure the date label is valid
missing_date = [@row_hash['begin'],@row_hash['end'],@row_hash['expression']].compact.empty?
if !missing_date
begin
label = @date_labels.value((@row_hash['dates_label'] || 'creation'))
rescue Exception => e
err_arr.push I18n.t('plugins.aspace-import-excel.error.invalid_date', :what => e.message)
missing_date = true
end
end
err_arr.push I18n.t('plugins.aspace-import-excel.error.title_and_date') if (missing_title && missing_date)
# tree hierachy
begin
level = @archival_levels.value(@row_hash['level'])
rescue Exception => e
err_arr.push I18n.t('plugins.aspace-import-excel.error.level')
end
rescue StopExcelImportException => se
raise
rescue Exception => e
Pry::ColorPrinter.pp ["UNEXPLAINED EXCEPTION", e.message, e.backtrace, @row_hash]
end
if err_arr.blank?
@row_hash.each do |k, v|
@row_hash[k] = v.strip if !v.blank?
if k == 'publish' || k == 'restrictions_flag'
@row_hash[k] = (v == '1')
end
end
end
err_arr.join('; ')
end
# create an archival_object
def create_archival_object(parent_uri)
ao = JSONModel(:archival_object).new._always_valid!
ao.title = @row_hash['title'] if @row_hash['title']
unless [@row_hash['begin'],@row_hash['end'],@row_hash['expression']].compact.empty?
begin
ao.dates = create_date
rescue Exception => e
# Pry::ColorPrinter.pp "We gots a date exception! #{e.message}"
@report.add_errors(I18n.t('plugins.aspace-import-excel.error.invalid_date', :what => e.message))
end
end
#because the date may have been invalid, we should check if there's a title, otherwise bail
if ao.title.blank? && ao.dates.blank?
raise ExcelImportException.new(I18n.t('plugins.aspace-import-excel.error.title_and_date'))
end
ao.resource = {'ref' => @resource['uri']}
ao.component_id = @row_hash['unit_id'] if @row_hash['unit_id']
ao.repository_processing_note = @row_hash['processing_note'] if @row_hash['processing_note']
ao.level = @archival_levels.value(@row_hash['level'])
ao.other_level = @row_hash['other_level'] || 'unspecified' if ao.level == 'otherlevel'
ao.publish = @row_hash['publish']
ao.restrictions_apply = @row_hash['restrictions_flag']
ao.parent = {'ref' => parent_uri} unless parent_uri.blank?
begin
ao.extents = create_extent unless [@row_hash['number'],@row_hash['extent_type'], @row_hash['portion']].compact.empty?
rescue Exception => e
@report.add_errors(e.message)
end
errs = handle_notes(ao)
@report.add_errors(errs) if !errs.blank?
# we have to save the ao for the display_string
ao = ao_save(ao)
instance = create_top_container_instance
ao.instances = [instance] if instance
if (dig_instance = DigitalObjectHandler.create(@row_hash, ao, @report))
ao.instances ||= []
ao.instances << dig_instance
end
subjs = process_subjects
subjs.each {|subj| ao.subjects.push({'ref' => subj.uri})} unless subjs.blank?
links = process_agents
ao.linked_agents = links
ao
end
def create_date
date_type = 'inclusive'
begin
date_type = @date_types.value(@row_hash['date_type'] || 'inclusive')
rescue Exception => e
@report.add_errors(I18n.t('plugins.aspace-import-excel.error.date_type', :what => @row_hash['date_type']))
end
date = { 'date_type' => date_type,
'label' => @date_labels.value((@row_hash['dates_label'] || 'creation')) }
if @row_hash['date_certainty']
begin
date['certainty'] = @date_certainty.value(@row_hash['date_certainty'])
rescue Exception => e
@report.add_errors(I18n.t('plugins.aspace-import-excel.error.certainty', :what => e.message))
end
end
%w(begin end expression).each do |w|
date[w] = @row_hash[w] if @row_hash[w]
end
invalids = JSONModel::Validations.check_date(date)
unless invalids.blank?
err_msg = ''
invalids.each do |inv|
err_msg << " #{inv[0]}: #{inv[1]}"
end
raise Exception.new(err_msg)
end
d = JSONModel(:date).new(date)
[d]
end
def create_extent
begin
extent = {'portion' => @extent_portions.value(@row_hash['portion'] || 'whole'),
'extent_type' => @extent_types.value((@row_hash['extent_type']))}
%w(number container_summary physical_details dimensions).each do |w|
extent[w] = @row_hash[w] || nil
end
ex = JSONModel(:extent).new(extent)
if UpdatesUtils.test_exceptions(ex, "Extent")
return [ex]
end
rescue Exception => e
raise ExcelImportException.new(I18n.t('plugins.aspace-import-excel.error.extent_validation', :msg => e.message))
end
end
def create_top_container_instance
instance = nil
unless @row_hash['cont_instance_type'].blank? && @row_hash['type_1'].blank?
begin
instance = ContainerInstanceHandler.create_container_instance(@row_hash, @resource['uri'], @report)
rescue ExcelImportException => ee
@report.add_errors(I18n.t('plugins.aspace-import-excel.error.no_container_instance', :why =>ee.message))
rescue Exception => e
@report.add_errors(I18n.t('plugins.aspace-import-excel.error.no_tc', :why => e.message))
# Pry::ColorPrinter.pp e.message
end
end
#Pry::ColorPrinter.pp "instance"
#Pry::ColorPrinter.pp instance
instance
end
def fetch_archival_object(ref_id)
ao = nil
response = JSONModel::HTTP::get_json(URI(@find_uri),{"ref_id[]" => ref_id, "resolve[]" => "archival_objects"})
unless response["archival_objects"].blank?
Rails.logger.info "RESPONSE #{ response["archival_objects"].length}"
aos = []
response["archival_objects"].each { |ao|
Rails.logger.info "aodig: #{ao.dig('_resolved','resource','ref')}"
aos.append(ao["ref"]) if ao.dig('_resolved','resource','ref') == @resource_ref
}
Rails.logger.info "length: #{aos.length}"
Rails.logger.info {aos.pretty_inspect}
if aos.length == 1
parsed = JSONModel.parse_reference(aos[0])
begin
ao = JSONModel(:archival_object).find(parsed[:id], :repo_id => @repo_id)
Rails.logger.info "ao JSONMODEL"
Rails.logger.info {ao.pretty_inspect}
rescue Exception => e
Rails.logger.info {e.pretty_inspect}
end
end
end
ao
end
def handle_notes(ao)
publish = ao.publish
errs = []
notes_keys = @row_hash.keys.grep(/^n_/)
notes_keys.each do |key|
unless @row_hash[key].blank?
content = @row_hash[key]
type = key.match(/n_(.+)$/)[1]
note_type = @note_types[type]
# Pry::ColorPrinter.pp "content for #{key}: |#{content}| type: #{type} note_type#{note_type}"
note = JSONModel(note_type[:target]).new
note.publish = publish
note.type = note_type[:value]
begin
wellformed(content)
# if the target is multipart, then the data goes in a JSONMODEL(:note_text).content;, which is pushed to the note.subnote array; otherwise it's just pushed to the note.content array
if note_type[:target] == :note_multipart
inner_note = JSONModel(:note_text).new
inner_note.content = content
inner_note.publish = publish
note.subnotes.push inner_note
else
note.content.push content
end
ao.notes.push note
rescue Exception => e
errs.push(I18n.t('plugins.aspace-import-excel.error.bad_note', :type => note_type[:value] , :msg => CGI::escapeHTML( e.message)))
end
end
end
errs
end
# this refreshes the controlled list enumerations, which may have changed since the last import
def initialize_handler_enums
ContainerInstanceHandler.renew
DigitalObjectHandler.renew
SubjectHandler.renew
end
# set up all the @ variables (except for @header)
def initialize_info(params)
dispatched_file = params[:file]
@orig_filename = dispatched_file.original_filename
@report.set_file_name(@orig_filename)
initialize_handler_enums
@resource = Resource.find(params[:rid])
@repository = @resource['repository']['ref']
@hier = 1
# ingest archival objects needs this
unless @digital_load
@note_types = note_types_for(:archival_object)
tree = JSONModel(:resource_tree).find(nil, :resource_id => params[:rid]).to_hash
@ao = nil
aoid = params[:aoid]
@resource_level = aoid.blank?
@first_one = false # to determine whether we need to worry about positioning
if @resource_level
@parents.set_uri(0, nil)
@hier = 0
else
@ao = JSONModel(:archival_object).find(aoid, find_opts )
@start_position = @ao.position
parent = @ao.parent # we need this for sibling/child disabiguation later on
@parents.set_uri(0, (parent ? ASUtils.jsonmodels_to_hashes(parent)['ref'] : nil))
@parents.set_uri(1, @ao.uri)
@first_one = true
end
end
@input_file = dispatched_file.tempfile
@counter = 0
@rows_processed = 0
@error_rows = 0
workbook = RubyXL::Parser.parse(@input_file)
sheet = workbook[0]
rows = sheet.enum_for(:each)
end
def move_archival_objects
unless @first_level_aos.empty?
uri = (@ao && @ao.parent) ? @ao.parent['ref'] : @resource.uri
# Pry::ColorPrinter.pp "moving: URI: #{uri}"
response = JSONModel::HTTP.post_form("#{uri}/accept_children",
"children[]" => @first_level_aos,
"position" => @start_position + 1)
unless response.code == '200'
Pry::ColorPrinter.pp "UNEXPECTED BAD MOVE! #{response.code}"
Pry::ColorPrinter.pp response.body
@report.errors(I18n.t('plugins.aspace-import-excel.error.no_move', :code => response.code))
end
end
end
def process_agents
agent_links = []
%w(people corporate_entities families).each do |type|
(1..3).each do |num|
id_key = "#{type}_agent_record_id_#{num}"
header_key = "#{type}_agent_header_#{num}"
unless @row_hash[id_key].blank? && @row_hash[header_key].blank?
link = nil
begin
link = AgentHandler.get_or_create(@row_hash, type, num.to_s, @resource['uri'], @report)
agent_links.push link if link
rescue ExcelImportException => e
@report.add_errors(e.message)
end
end
end
end
agent_links
end
def process_do_row(params)
ret_str = resource_match
# mismatch of resource stops all other processing
if ret_str.blank?
ret_str = check_do_row
end
raise ExcelImportException.new( I18n.t('plugins.aspace-import-excel.row_error', :row => @counter, :errs => ret_str )) if !ret_str.blank?
begin
ao = fetch_archival_object(@row_hash['ao_ref_id'])
raise ExcelImportException.new( I18n.t('plugins.aspace-import-excel.row_error', :row => @counter, :errs => I18n.t('plugins.aspace-import-excel.ref_id_notfound', :ref_id => @row_hash['ao_ref_id']))) if ao == nil
@report.add_archival_object(ao)
if ao.instances
digs = []
ao.instances.each {|instance| digs.append(1) if instance["instance_type"] == "digital_object" }
unless digs.blank?
raise ExcelImportException.new( I18n.t('plugins.aspace-import-excel.row_error', :row => @counter, :errs => I18n.t('plugins.aspace-import-excel.error.has_dig_obj')))
end
end
if (dig_instance = DigitalObjectHandler.create(@row_hash, ao, @report))
ao.instances ||= []
ao.instances << dig_instance
ao = ao_save(ao)
end
end
end
def process_row
Pry::ColorPrinter.pp @counter
ret_str = resource_match
# mismatch of resource stops all other processing
if ret_str.blank?
ret_str = check_row
end
raise ExcelImportException.new( I18n.t('plugins.aspace-import-excel.row_error', :row => @counter, :errs => ret_str )) if !ret_str.blank?
parent_uri = @parents.parent_for(@row_hash['hierarchy'].to_i)
begin
ao = create_archival_object(parent_uri)
ao = ao_save(ao)
rescue JSONModel::ValidationException => ve
# ao won't have been created
Pry::ColorPrinter.pp "VALIDATION ERROR ON SECOND SAVE: #{ve.message}"
raise ExcelImportException.new(ve.message)
rescue Exception => e
Pry::ColorPrinter.pp "UNEXPECTED #{e.message}"
Pry::ColorPrinter.pp e.backtrace
Pry::ColorPrinter.pp ASUtils.jsonmodels_to_hashes(ao)
raise ExcelImportException.new(e.message)
end
@report.add_archival_object(ao)
@parents.set_uri(@hier, ao.uri)
@created_ao_refs.push ao.uri
if @hier == 1
@first_level_aos.push ao.uri
if @first_one && @start_position
@need_to_move = (ao.position - @start_position) > 1
@first_one = false
# Pry::ColorPrinter.pp "Need to move: #{@need_to_move}"
end
end
end
def process_subjects
ret_subjs = []
(1..2).each do |num|
unless @row_hash["subject_#{num}_record_id"].blank? && @row_hash["subject_#{num}_term"].blank?
subj = nil
begin
subj = SubjectHandler.get_or_create(@row_hash, num, @repository.split('/')[2], @report)
ret_subjs.push subj if subj
rescue ExcelImportException => e
@report.add_errors(e.message)
end
end
end
ret_subjs
end
# make sure that the resource ead id from the form matches that in the spreadsheet
# throws an exception if the designated resource ead doesn't match the spreadsheet row ead
def resource_match
Pry::ColorPrinter.pp @resource['ead_id']
ret_str = ''
ret_str = I18n.t('plugins.aspace-import-excel.error.res_ead') if @resource['ead_id'].blank?
ret_str = ' ' + I18n.t('plugins.aspace-import-excel.error.row_ead') if @row_hash['ead'].blank?
if ret_str.blank?
ret_str = I18n.t('plugins.aspace-import-excel.error.ead_mismatch', :res_ead => @resource['ead_id'], :row_ead => @row_hash['ead']) if @resource['ead_id'] != @row_hash['ead']
end
ret_str.blank? ? nil : ret_str
end
def find_subject(subject,source, ext_id)
#title:subject AND primary_type:subject AND source:#{source} AND external_id:#{ext_id}
end
def find_agent(primary_name, rest_name, type, source, ext_id)
#title: #{primary_name}, #{rest_name} AND primary_type:agent_#{type} AND source:#{source} AND external_id:#{ext_id}
end
# use nokogiri if there seems to be an XML element (or element closure); allow exceptions to bubble up
def wellformed(note)
if note.match("</?[a-zA-Z]+>")
frag = Nokogiri::XML("<root>#{note}</root>") {|config| config.strict}
end
end
def row_values(row)
# Pry::ColorPrinter.pp "ROW!"
(1...row.size).map {|i| (row[i] && row[i].value) ? row[i].value.to_s.strip : nil}
end
end
Fixed AgentHandler.renew never being called.
class ResourcesUpdatesController < ApplicationController
require 'nokogiri'
require 'pp'
START_MARKER = /ArchivesSpace field code/
DO_START_MARKER = /ArchivesSpace digital object import field codes/
set_access_control "update_resource_record" => [:new, :edit, :create, :update, :rde, :add_children, :publish, :accept_children, :load_ss, :get_file, :get_do_file, :load_dos]
require 'pry'
require 'rubyXL'
require 'asutils'
require 'enum_list'
include NotesHelper
include UpdatesUtils
include LinkedObjects
require 'ingest_report'
# create the file form for the digital object spreadsheet
def get_do_file
rid = params[:rid]
id = params[:id]
end
# create the file form for the spreadsheet
def get_file
rid = params[:rid]
type = params[:type]
aoid = params[:aoid] || ''
ref_id = params[:ref_id] || ''
resource = params[:resource]
position = params[:position] || '1'
@resource = Resource.find(params[:rid])
repo_id = @resource['repository']['ref'].split('/').last
return render_aspace_partial :partial => "resources/bulk_file_form", :locals => {:rid => rid, :aoid => aoid, :type => type, :ref_id => ref_id, :resource => resource, :position => position, :repo_id => repo_id}
end
# load the digital objects
def load_dos
#first time out of the box:
Rails.logger.info "\t**** LOAD DOS ***"
ao = fetch_archival_object(params)
Rails.logger.info "ao instances? #{!ao["instances"].blank?}" if ao
if !ao['instances'].blank?
digs = []
ao['instances'].each {|instance| digs.append(ao) if instance.dig("digital_object") != nil }
unless digs.blank?
# add thrown exception here!
ao = nil
end
end
Rails.logger.info {ao.pretty_inspect}
end
# load in a spreadsheet
def load_ss
@report_out = []
@report = IngestReport.new
@headers
@digital_load = params.fetch(:digital_load,'') == 'true'
Pry::ColorPrinter.pp "digital_load? #{@digital_load}"
if @digital_load
@find_uri = "/repositories/#{params[:repo_id]}/find_by_id/archival_objects"
@resource_ref = "/repositories/#{params[:repo_id]}/resources/#{params[:id]}"
@repo_id = params[:repo_id]
@start_marker = DO_START_MARKER
else
@created_ao_refs = []
@first_level_aos = []
@archival_levels = EnumList.new('archival_record_level')
@container_types = EnumList.new('container_type')
@date_types = EnumList.new('date_type')
@date_labels = EnumList.new('date_label')
@date_certainty = EnumList.new('date_certainty')
@extent_types = EnumList.new('extent_extent_type')
@extent_portions = EnumList.new('extent_portion')
@instance_types ||= EnumList.new('instance_instance_type')
@parents = ParentTracker.new
@start_marker = START_MARKER
end
@start_position
@need_to_move = false
begin
rows = initialize_info(params)
while @headers.nil? && (row = rows.next)
@counter += 1
if (row[0] && (row[0].value.to_s =~ @start_marker) || row[2] && row[2].value == 'ead') #FIXME: TEMP FIX
Pry::ColorPrinter.pp "Got the HEADERS!"
@headers = row_values(row)
# Skip the human readable header too
rows.next
@counter += 1 # for the skipping
end
end
begin
while (row = rows.next)
@counter += 1
values = row_values(row)
next if values.compact.empty?
@row_hash = Hash[@headers.zip(values)]
ao = nil
begin
@report.new_row(@counter)
if @digital_load
ao = process_do_row(params)
else
ao = process_row
end
@rows_processed += 1
@error_level = nil
# Pry::ColorPrinter.pp "no ao" if !ao
rescue StopExcelImportException => se
@report.add_errors(I18n.t('plugins.aspace-import-excel.error.stopped', :row => @counter, :msg => se.message))
raise StopIteration.new
rescue ExcelImportException => e
@error_rows += 1
@report.add_errors( e.message)
@error_level = @hier
# Pry::ColorPrinter.pp "Error level: #{@error_level}"
end
@report.end_row
end
rescue StopIteration
# we just want to catch this without processing further
end
if @rows_processed == 0
raise ExcelImportException.new( I18n.t('plugins.aspace-import-excel.error.no_data'))
end
rescue Exception => e
if e.is_a?( ExcelImportException) || e.is_a?( StopExcelImportException)
@report.add_terminal_error(I18n.t('plugins.aspace-import-excel.error.excel', :errs => e.message), @counter)
elsif e.is_a?(StopIteration) && @headers.nil?
@report.add_terminal_error(I18n.t('plugins.aspace-import-excel.error.no_header'), @counter)
else # something else went wrong
@report.add_terminal_error(I18n.t('plugins.aspace-import-excel.error.system', :msg => e.message), @counter)
Pry::ColorPrinter.pp "UNEXPECTED EXCEPTION!"
Pry::ColorPrinter.pp e.message
Pry::ColorPrinter.pp e.backtrace
end
@report.end_row
return render_aspace_partial :status => 400, :partial => "resources/bulk_response", :locals => {:rid => params[:rid],
:report => @report, :do_load => @digital_load}
end
move_archival_objects if @need_to_move
@report.end_row
return render_aspace_partial :partial => "resources/bulk_response", :locals => {:rid => params[:rid], :report => @report,
:do_load => @digital_load}
end
private
# save the archival object, then revive it
def ao_save(ao)
revived = nil
begin
id = ao.save
revived = JSONModel(:archival_object).find(id)
rescue Exception => e
Pry::ColorPrinter.pp "UNEXPECTED save error: #{e.message}"
Pry::ColorPrinter.pp ASUtils.jsonmodels_to_hashes(ao) if ao
raise e
end
revived
end
# required fields for a digital object row: ead match, ao_ref_id and at least one of digital_object_link, thumbnail
def check_do_row
err_arr = []
begin
err_arr.push I18n.t('plugins.aspace-import-excel.error.ref_id_miss') if @row_hash['ao_ref_id'].blank?
obj_link = @row_hash['digital_object_link']
thumb = @row_hash['thumbnail'] || @row_hash['Thumbnail']
err_arr.push I18n.t('plugins.aspace-import-excel.error.dig_info_miss') if @row_hash['digital_object_link'].blank? && thumb.blank?
end
v = @row_hash['publish']
v = v.strip if !v.blank?
@row_hash['publish'] = (v == '1')
err_arr.join('; ')
end
# look for all the required fields to make sure they are legit
# strip all the strings and turn publish and restrictions_flaginto true/false
def check_row
err_arr = []
begin
# we'll check hierarchical level first, in case there was a parent that didn't get created
hier = @row_hash['hierarchy']
if !hier
err_arr.push I18n.t('plugins.aspace-import-excel.error.hier_miss')
else
hier = hier.to_i
# we bail if the parent wasn't created!
return I18n.t('plugins.aspace-import-excel.error.hier_below_error_level') if (@error_level && hier > @error_level)
err_arr.push I18n.t('plugins.aspace-import-excel.error.hier_zero') if hier < 1
# going from a 1 to a 3, for example
if (hier - 1) > @hier
err_arr.push I18n.t('plugins.aspace-import-excel.error.hier_wrong')
if @hier == 0
err_arr.push I18n.t('plugins.aspace-import-excel.error.hier_wrong_resource')
raise StopExcelImportException.new(err_arr.join(';'))
end
end
@hier = hier
end
missing_title = @row_hash['title'].blank?
#date stuff: if already missing the title, we have to make sure the date label is valid
missing_date = [@row_hash['begin'],@row_hash['end'],@row_hash['expression']].compact.empty?
if !missing_date
begin
label = @date_labels.value((@row_hash['dates_label'] || 'creation'))
rescue Exception => e
err_arr.push I18n.t('plugins.aspace-import-excel.error.invalid_date', :what => e.message)
missing_date = true
end
end
err_arr.push I18n.t('plugins.aspace-import-excel.error.title_and_date') if (missing_title && missing_date)
# tree hierachy
begin
level = @archival_levels.value(@row_hash['level'])
rescue Exception => e
err_arr.push I18n.t('plugins.aspace-import-excel.error.level')
end
rescue StopExcelImportException => se
raise
rescue Exception => e
Pry::ColorPrinter.pp ["UNEXPLAINED EXCEPTION", e.message, e.backtrace, @row_hash]
end
if err_arr.blank?
@row_hash.each do |k, v|
@row_hash[k] = v.strip if !v.blank?
if k == 'publish' || k == 'restrictions_flag'
@row_hash[k] = (v == '1')
end
end
end
err_arr.join('; ')
end
# create an archival_object
def create_archival_object(parent_uri)
ao = JSONModel(:archival_object).new._always_valid!
ao.title = @row_hash['title'] if @row_hash['title']
unless [@row_hash['begin'],@row_hash['end'],@row_hash['expression']].compact.empty?
begin
ao.dates = create_date
rescue Exception => e
# Pry::ColorPrinter.pp "We gots a date exception! #{e.message}"
@report.add_errors(I18n.t('plugins.aspace-import-excel.error.invalid_date', :what => e.message))
end
end
#because the date may have been invalid, we should check if there's a title, otherwise bail
if ao.title.blank? && ao.dates.blank?
raise ExcelImportException.new(I18n.t('plugins.aspace-import-excel.error.title_and_date'))
end
ao.resource = {'ref' => @resource['uri']}
ao.component_id = @row_hash['unit_id'] if @row_hash['unit_id']
ao.repository_processing_note = @row_hash['processing_note'] if @row_hash['processing_note']
ao.level = @archival_levels.value(@row_hash['level'])
ao.other_level = @row_hash['other_level'] || 'unspecified' if ao.level == 'otherlevel'
ao.publish = @row_hash['publish']
ao.restrictions_apply = @row_hash['restrictions_flag']
ao.parent = {'ref' => parent_uri} unless parent_uri.blank?
begin
ao.extents = create_extent unless [@row_hash['number'],@row_hash['extent_type'], @row_hash['portion']].compact.empty?
rescue Exception => e
@report.add_errors(e.message)
end
errs = handle_notes(ao)
@report.add_errors(errs) if !errs.blank?
# we have to save the ao for the display_string
ao = ao_save(ao)
instance = create_top_container_instance
ao.instances = [instance] if instance
if (dig_instance = DigitalObjectHandler.create(@row_hash, ao, @report))
ao.instances ||= []
ao.instances << dig_instance
end
subjs = process_subjects
subjs.each {|subj| ao.subjects.push({'ref' => subj.uri})} unless subjs.blank?
links = process_agents
ao.linked_agents = links
ao
end
def create_date
date_type = 'inclusive'
begin
date_type = @date_types.value(@row_hash['date_type'] || 'inclusive')
rescue Exception => e
@report.add_errors(I18n.t('plugins.aspace-import-excel.error.date_type', :what => @row_hash['date_type']))
end
date = { 'date_type' => date_type,
'label' => @date_labels.value((@row_hash['dates_label'] || 'creation')) }
if @row_hash['date_certainty']
begin
date['certainty'] = @date_certainty.value(@row_hash['date_certainty'])
rescue Exception => e
@report.add_errors(I18n.t('plugins.aspace-import-excel.error.certainty', :what => e.message))
end
end
%w(begin end expression).each do |w|
date[w] = @row_hash[w] if @row_hash[w]
end
invalids = JSONModel::Validations.check_date(date)
unless invalids.blank?
err_msg = ''
invalids.each do |inv|
err_msg << " #{inv[0]}: #{inv[1]}"
end
raise Exception.new(err_msg)
end
d = JSONModel(:date).new(date)
[d]
end
def create_extent
begin
extent = {'portion' => @extent_portions.value(@row_hash['portion'] || 'whole'),
'extent_type' => @extent_types.value((@row_hash['extent_type']))}
%w(number container_summary physical_details dimensions).each do |w|
extent[w] = @row_hash[w] || nil
end
ex = JSONModel(:extent).new(extent)
if UpdatesUtils.test_exceptions(ex, "Extent")
return [ex]
end
rescue Exception => e
raise ExcelImportException.new(I18n.t('plugins.aspace-import-excel.error.extent_validation', :msg => e.message))
end
end
def create_top_container_instance
instance = nil
unless @row_hash['cont_instance_type'].blank? && @row_hash['type_1'].blank?
begin
instance = ContainerInstanceHandler.create_container_instance(@row_hash, @resource['uri'], @report)
rescue ExcelImportException => ee
@report.add_errors(I18n.t('plugins.aspace-import-excel.error.no_container_instance', :why =>ee.message))
rescue Exception => e
@report.add_errors(I18n.t('plugins.aspace-import-excel.error.no_tc', :why => e.message))
# Pry::ColorPrinter.pp e.message
end
end
#Pry::ColorPrinter.pp "instance"
#Pry::ColorPrinter.pp instance
instance
end
def fetch_archival_object(ref_id)
ao = nil
response = JSONModel::HTTP::get_json(URI(@find_uri),{"ref_id[]" => ref_id, "resolve[]" => "archival_objects"})
unless response["archival_objects"].blank?
Rails.logger.info "RESPONSE #{ response["archival_objects"].length}"
aos = []
response["archival_objects"].each { |ao|
Rails.logger.info "aodig: #{ao.dig('_resolved','resource','ref')}"
aos.append(ao["ref"]) if ao.dig('_resolved','resource','ref') == @resource_ref
}
Rails.logger.info "length: #{aos.length}"
Rails.logger.info {aos.pretty_inspect}
if aos.length == 1
parsed = JSONModel.parse_reference(aos[0])
begin
ao = JSONModel(:archival_object).find(parsed[:id], :repo_id => @repo_id)
Rails.logger.info "ao JSONMODEL"
Rails.logger.info {ao.pretty_inspect}
rescue Exception => e
Rails.logger.info {e.pretty_inspect}
end
end
end
ao
end
def handle_notes(ao)
publish = ao.publish
errs = []
notes_keys = @row_hash.keys.grep(/^n_/)
notes_keys.each do |key|
unless @row_hash[key].blank?
content = @row_hash[key]
type = key.match(/n_(.+)$/)[1]
note_type = @note_types[type]
# Pry::ColorPrinter.pp "content for #{key}: |#{content}| type: #{type} note_type#{note_type}"
note = JSONModel(note_type[:target]).new
note.publish = publish
note.type = note_type[:value]
begin
wellformed(content)
# if the target is multipart, then the data goes in a JSONMODEL(:note_text).content;, which is pushed to the note.subnote array; otherwise it's just pushed to the note.content array
if note_type[:target] == :note_multipart
inner_note = JSONModel(:note_text).new
inner_note.content = content
inner_note.publish = publish
note.subnotes.push inner_note
else
note.content.push content
end
ao.notes.push note
rescue Exception => e
errs.push(I18n.t('plugins.aspace-import-excel.error.bad_note', :type => note_type[:value] , :msg => CGI::escapeHTML( e.message)))
end
end
end
errs
end
# this refreshes the controlled list enumerations, which may have changed since the last import
def initialize_handler_enums
ContainerInstanceHandler.renew
DigitalObjectHandler.renew
SubjectHandler.renew
AgentHandler.renew
end
# set up all the @ variables (except for @header)
def initialize_info(params)
dispatched_file = params[:file]
@orig_filename = dispatched_file.original_filename
@report.set_file_name(@orig_filename)
initialize_handler_enums
@resource = Resource.find(params[:rid])
@repository = @resource['repository']['ref']
@hier = 1
# ingest archival objects needs this
unless @digital_load
@note_types = note_types_for(:archival_object)
tree = JSONModel(:resource_tree).find(nil, :resource_id => params[:rid]).to_hash
@ao = nil
aoid = params[:aoid]
@resource_level = aoid.blank?
@first_one = false # to determine whether we need to worry about positioning
if @resource_level
@parents.set_uri(0, nil)
@hier = 0
else
@ao = JSONModel(:archival_object).find(aoid, find_opts )
@start_position = @ao.position
parent = @ao.parent # we need this for sibling/child disabiguation later on
@parents.set_uri(0, (parent ? ASUtils.jsonmodels_to_hashes(parent)['ref'] : nil))
@parents.set_uri(1, @ao.uri)
@first_one = true
end
end
@input_file = dispatched_file.tempfile
@counter = 0
@rows_processed = 0
@error_rows = 0
workbook = RubyXL::Parser.parse(@input_file)
sheet = workbook[0]
rows = sheet.enum_for(:each)
end
def move_archival_objects
unless @first_level_aos.empty?
uri = (@ao && @ao.parent) ? @ao.parent['ref'] : @resource.uri
# Pry::ColorPrinter.pp "moving: URI: #{uri}"
response = JSONModel::HTTP.post_form("#{uri}/accept_children",
"children[]" => @first_level_aos,
"position" => @start_position + 1)
unless response.code == '200'
Pry::ColorPrinter.pp "UNEXPECTED BAD MOVE! #{response.code}"
Pry::ColorPrinter.pp response.body
@report.errors(I18n.t('plugins.aspace-import-excel.error.no_move', :code => response.code))
end
end
end
def process_agents
agent_links = []
%w(people corporate_entities families).each do |type|
(1..3).each do |num|
id_key = "#{type}_agent_record_id_#{num}"
header_key = "#{type}_agent_header_#{num}"
unless @row_hash[id_key].blank? && @row_hash[header_key].blank?
link = nil
begin
link = AgentHandler.get_or_create(@row_hash, type, num.to_s, @resource['uri'], @report)
agent_links.push link if link
rescue ExcelImportException => e
@report.add_errors(e.message)
end
end
end
end
agent_links
end
def process_do_row(params)
ret_str = resource_match
# mismatch of resource stops all other processing
if ret_str.blank?
ret_str = check_do_row
end
raise ExcelImportException.new( I18n.t('plugins.aspace-import-excel.row_error', :row => @counter, :errs => ret_str )) if !ret_str.blank?
begin
ao = fetch_archival_object(@row_hash['ao_ref_id'])
raise ExcelImportException.new( I18n.t('plugins.aspace-import-excel.row_error', :row => @counter, :errs => I18n.t('plugins.aspace-import-excel.ref_id_notfound', :ref_id => @row_hash['ao_ref_id']))) if ao == nil
@report.add_archival_object(ao)
if ao.instances
digs = []
ao.instances.each {|instance| digs.append(1) if instance["instance_type"] == "digital_object" }
unless digs.blank?
raise ExcelImportException.new( I18n.t('plugins.aspace-import-excel.row_error', :row => @counter, :errs => I18n.t('plugins.aspace-import-excel.error.has_dig_obj')))
end
end
if (dig_instance = DigitalObjectHandler.create(@row_hash, ao, @report))
ao.instances ||= []
ao.instances << dig_instance
ao = ao_save(ao)
end
end
end
def process_row
Pry::ColorPrinter.pp @counter
ret_str = resource_match
# mismatch of resource stops all other processing
if ret_str.blank?
ret_str = check_row
end
raise ExcelImportException.new( I18n.t('plugins.aspace-import-excel.row_error', :row => @counter, :errs => ret_str )) if !ret_str.blank?
parent_uri = @parents.parent_for(@row_hash['hierarchy'].to_i)
begin
ao = create_archival_object(parent_uri)
ao = ao_save(ao)
rescue JSONModel::ValidationException => ve
# ao won't have been created
Pry::ColorPrinter.pp "VALIDATION ERROR ON SECOND SAVE: #{ve.message}"
raise ExcelImportException.new(ve.message)
rescue Exception => e
Pry::ColorPrinter.pp "UNEXPECTED #{e.message}"
Pry::ColorPrinter.pp e.backtrace
Pry::ColorPrinter.pp ASUtils.jsonmodels_to_hashes(ao)
raise ExcelImportException.new(e.message)
end
@report.add_archival_object(ao)
@parents.set_uri(@hier, ao.uri)
@created_ao_refs.push ao.uri
if @hier == 1
@first_level_aos.push ao.uri
if @first_one && @start_position
@need_to_move = (ao.position - @start_position) > 1
@first_one = false
# Pry::ColorPrinter.pp "Need to move: #{@need_to_move}"
end
end
end
def process_subjects
ret_subjs = []
(1..2).each do |num|
unless @row_hash["subject_#{num}_record_id"].blank? && @row_hash["subject_#{num}_term"].blank?
subj = nil
begin
subj = SubjectHandler.get_or_create(@row_hash, num, @repository.split('/')[2], @report)
ret_subjs.push subj if subj
rescue ExcelImportException => e
@report.add_errors(e.message)
end
end
end
ret_subjs
end
# make sure that the resource ead id from the form matches that in the spreadsheet
# throws an exception if the designated resource ead doesn't match the spreadsheet row ead
def resource_match
Pry::ColorPrinter.pp @resource['ead_id']
ret_str = ''
ret_str = I18n.t('plugins.aspace-import-excel.error.res_ead') if @resource['ead_id'].blank?
ret_str = ' ' + I18n.t('plugins.aspace-import-excel.error.row_ead') if @row_hash['ead'].blank?
if ret_str.blank?
ret_str = I18n.t('plugins.aspace-import-excel.error.ead_mismatch', :res_ead => @resource['ead_id'], :row_ead => @row_hash['ead']) if @resource['ead_id'] != @row_hash['ead']
end
ret_str.blank? ? nil : ret_str
end
def find_subject(subject,source, ext_id)
#title:subject AND primary_type:subject AND source:#{source} AND external_id:#{ext_id}
end
def find_agent(primary_name, rest_name, type, source, ext_id)
#title: #{primary_name}, #{rest_name} AND primary_type:agent_#{type} AND source:#{source} AND external_id:#{ext_id}
end
# use nokogiri if there seems to be an XML element (or element closure); allow exceptions to bubble up
def wellformed(note)
if note.match("</?[a-zA-Z]+>")
frag = Nokogiri::XML("<root>#{note}</root>") {|config| config.strict}
end
end
def row_values(row)
# Pry::ColorPrinter.pp "ROW!"
(1...row.size).map {|i| (row[i] && row[i].value) ? row[i].value.to_s.strip : nil}
end
end
|
$:.push File.expand_path("../lib", __FILE__)
Gem::Specification.new do |s|
s.name = 'apivore'
s.version = '1.2.0'
s.date = '2015-05-20'
s.summary = "Tests your API against its Swagger 2.0 spec"
s.description = "Tests your rails API using its Swagger description of end-points, models, and query parameters."
s.authors = ["Charles Horn"]
s.email = 'charles.horn@gmail.com'
s.files = ['lib/apivore.rb', 'data/swagger_2.0_schema.json', 'data/draft04_schema.json']
s.files += Dir['lib/apivore/*.rb']
s.homepage = 'http://github.com/westfieldlabs/apivore'
s.licenses = ['Apache 2.0', 'MIT']
s.add_runtime_dependency 'json-schema', '~> 2.5'
s.add_runtime_dependency 'rspec', '~> 3'
s.add_runtime_dependency 'rspec-expectations', '~> 3.1'
s.add_runtime_dependency 'rspec-mocks', '~> 3.1'
s.add_runtime_dependency 'actionpack', '~> 4'
s.add_runtime_dependency 'hashie', '~> 3.3'
s.add_development_dependency 'pry', '~> 0'
s.add_development_dependency 'rake', '~> 10.3'
s.add_development_dependency 'rspec-rails', '~> 3'
s.add_development_dependency 'activesupport', '~> 4'
if RUBY_VERSION >= '2.2.0'
s.add_development_dependency 'test-unit', '~> 3'
end
end
update gemspec, bump version
$:.push File.expand_path("../lib", __FILE__)
Gem::Specification.new do |s|
s.name = 'apivore'
s.version = '1.3.0'
s.date = '2015-06-11'
s.summary = "Tests your API against its Swagger 2.0 spec"
s.description = "Tests your rails API using its Swagger description of end-points, models, and query parameters."
s.authors = ["Charles Horn"]
s.email = 'charles.horn@gmail.com'
s.files = ['lib/apivore.rb', 'data/swagger_2.0_schema.json', 'data/draft04_schema.json']
s.files += Dir['lib/apivore/*.rb']
s.files += Dir['data/custom_schemata/*.json']
s.homepage = 'http://github.com/westfieldlabs/apivore'
s.licenses = ['Apache 2.0', 'MIT']
s.add_runtime_dependency 'json-schema', '~> 2.5'
s.add_runtime_dependency 'rspec', '~> 3'
s.add_runtime_dependency 'rspec-expectations', '~> 3.1'
s.add_runtime_dependency 'rspec-mocks', '~> 3.1'
s.add_runtime_dependency 'actionpack', '~> 4'
s.add_runtime_dependency 'hashie', '~> 3.3'
s.add_development_dependency 'pry', '~> 0'
s.add_development_dependency 'rake', '~> 10.3'
s.add_development_dependency 'rspec-rails', '~> 3'
s.add_development_dependency 'activesupport', '~> 4'
if RUBY_VERSION >= '2.2.0'
s.add_development_dependency 'test-unit', '~> 3'
end
end
|
# search.rb
require 'rubygems'
require 'sinatra/base'
require 'tempfile'
require 'yaml'
require 'logger'
require 'pp'
require 'stringio'
require './lib/blast.rb'
require 'lib/sequencehelpers.rb'
# Helper module - initialize the blast server.
class SequenceServer < Sinatra::Base
include SequenceHelpers
class Database < Struct.new("Database", :name, :title)
def to_s
"#{title} #{name}"
end
end
LOG = Logger.new(STDOUT)
LOG.datetime_format = "%Y-%m-%d %H:%M:%S" # to be more compact (and a little more like sinatra's)
enable :session
enable :logging
set :root, File.dirname(__FILE__)
set :blasturl, 'http://www.ncbi.nlm.nih.gov/blast/Blast.cgi?CMD=Web&PAGE_TYPE=BlastDocs&DOC_TYPE=Download'
class << self
def run!(options={})
init(config)
super
end
# Initializes the blast server : executables, database. Exit if blast
# executables, and databses can not be found.
def init(config = {})
# scan system path as fallback
bin = scan_blast_executables(config["bin"] || nil)
bin = bin.freeze
SequenceServer.set :bin, bin
# use 'db' relative to the current working directory as fallback
db = scan_blast_db(config["db"] || 'db')
db = db.freeze
SequenceServer.set :db, db
rescue IOError => error
LOG.fatal error
exit
end
# Checks for the presence of blast executables. Assumes the executables
# to be present in the bin directory passed to it, or in the sytem path.
# ---
# Arguments:
# * bin(String) - path (relative/absolute) to the binaries
# ---
# Returns:
# * absolute path to the blast executables directory, or empty
# string (implies executables in the system path)
# ---
# Raises:
# * IOError - if the executables can't be found
def scan_blast_executables(bin)
bin = File.expand_path(bin) rescue ''
if bin.empty?
# search system path
%w|blastn blastp blastx tblastn tblastx blastdbcmd|.each do |method|
raise IOError, "You may need to install BLAST+ from: #{settings.blasturl}.
And/or create a config.yml file that points to blast's 'bin' directory." unless command?( method )
end
else
# assume executables in bin
raise IOError, "The directory '#{bin}' defined in config.yml doesn't exist." unless File.directory?( bin )
end
bin
end
# Scan the given directory for blast databases.
# ---
# Arguments:
# * db_root(String) - path (relative/absolute) to the databases
# ---
# Returns:
# * a hash of blast databases
# ---
# Raises:
# * IOError - if no database can be found
def scan_blast_db(db_root)
db_root = File.expand_path(db_root)
raise IOError, "Database directory doesn't exist: #{db_root}" unless File.directory?( db_root )
db_list = %x|blastdbcmd -recursive -list #{db_root} -list_outfmt "%p %f %t"|
raise IOError, "No formatted blast databases found! You may need to run 'makeblastdb' "\
"on a fasta file in '#{ db_root }' ." if db_list.empty?
db = {}
db_list.each_line do |line|
type, name, *title = line.split(' ')
type = type.downcase
name = name.freeze
title = title.join(' ').freeze
(db[type] ||= []) << Database.new(name, title)
LOG.info("Found #{ type } database: '#{ title }' at #{ name }")
end
db
end
# Load config.yml; return a Hash. The Hash is empty if config.yml does not exist.
def config
config = YAML.load_file( "config.yml" )
raise IOError, "config.yml should return a hash" unless config.is_a?( Hash )
return config
rescue Errno::ENOENT
LOG.warn("config.yml not found - assuming default settings")
return {}
end
# check if the given command exists and is executable
def command?(command)
system("which #{command} > /dev/null 2>&1")
end
end
get '/' do
erb :search
end
post '/' do
method = settings.bin + params[:method]
db = selected_db_files
sequence = to_fasta(params[:sequence])
legal_blast_search?(sequence, method, selected_db_type) # quiet if ok; raises if bad
blast = Blast.blast_string(method, db, sequence)
# need to check for errors
#if blast.success?
LOG.info('Ran: ' + blast.command)
'<pre><code>' + format_blast_results(blast.result, selected_db_files) + '</pre></code>' # put in a div?
#end
end
#get '/get_sequence/:sequenceids/:retreival_databases' do # multiple seqs separated by whitespace... all other chars exist in identifiers
# I have the feeling you need to spat for multiple dbs... that sucks.
get '/get_sequence/:*/:*' do
params[ :sequenceids], params[ :retrieval_databases] = params["splat"]
sequenceids = params[ :sequenceids].split(/\s/).uniq # in a multi-blast query some may have been found multiply
LOG.info('Getting: ' + sequenceids.to_s)
# the results do not indicate which database a hit is from.
# Thus if several databases were used for blasting, we must check them all
# if it works, refactor with "inject" or "collect"?
found_sequences = ''
retrieval_databases = params[ :retrieval_databases ].split(/\s/)
raise ArgumentError, 'Nothing in params[ :retrieval_databases]. session info is lost?' if retrieval_databases.nil?
retrieval_databases.each do |database| # we need to populate this session variable from the erb.
begin
found_sequences += sequence_from_blastdb(sequenceids, database)
rescue
LOG.debug('None of the following sequences: '+ sequenceids.to_s + ' found in '+ database)
end
end
# just in case, checking we found right number of sequences
if sequenceids.length != found_sequences.count('>')
raise IOError, 'Wrong number of sequences found. Expecting: ' + sequenceids.to_s + '. Found: "' + found_sequences + '"'
end
'<pre><code>' + found_sequences + '</pre></code>'
end
# returns the type of selected databases - 'protein', or 'nucleotide'
def selected_db_type
params['db'].first.first
end
# returns a String of fasta files corresponding to the databases selected
# eg. - 'Protein_foo.fasta Protein_moo.fasta'
def selected_db_files
type = selected_db_type
params['db'][type].map{|index| settings.db[type][index.to_i].name}.join(' ')
end
def to_fasta(sequence)
sequence.lstrip! # removes leading whitespace
if sequence[0] != '>'
# forgetting the leading '>sequenceIdentifer\n' no longer breaks blast, but leaves an empty query
# line in the blast report. lets replace it with info about the user
sequence.insert(0, '>Submitted_By_'+request.ip.to_s + '_at_' + Time.now.strftime("%y%m%d-%H:%M:%S") + "\n")
end
return sequence
end
def legal_blast_search?(sequence, blast_method, blast_db_type) # if ajax stuff is done correctly:checking that user didnt mix seuqences, and constrainind blast_methods_for_query_type and sequence_from_blastdb, then method is not required.
# returns TRUE if everything is ok.
legal_blast_methods = %w|blastn blastp blastx tblastn tblastx|
#raise IOError, 'input_fasta missing:' + input_fasta.to_s if !File.exists?(input_fasta) #unnecessary?
raise IOError, 'undefined blast method...' if blast_method.nil?
raise ArgumentError, 'wrong method : ' + blast_method.to_s if !legal_blast_methods.include?(blast_method)
# check if input_fasta is compatible within blast_method
input_sequence_type = type_of_sequences(sequence)
LOG.debug('input seq type: ' + input_sequence_type.to_s)
LOG.debug('blast db type: ' + blast_db_type.to_s)
LOG.debug('blast method: ' + blast_method)
#if !blast_methods_for_query_type(input_sequence_type).include?(blast_method)
#raise ArgumentError, "Cannot #{blast_method} a #{input_sequence_type} query"
#end
# check if blast_database_type is compatible with blast_method
if !(db_type_for(blast_method).to_s == blast_db_type)
raise ArgumentError, "Cannot #{blast_method} against a #{blast_db_type} database " +
"need " + db_type_for(blast_method).to_s
end
return TRUE
end
def format_blast_results(result, string_of_used_databases)
raise ArgumentError, 'Problem: empty result! Maybe your query was invalid?' if !result.class == String
raise ArgumentError, 'Problem: empty result! Maybe your query was invalid?' if result.empty?
formatted_result = ''
all_retrievable_ids = []
result.each do |line|
if line.match(/^>\S/) #if there is a space right after the '>', makeblastdb was run without -parse_seqids
puts line
complete_id = line[/^>*(\S+)\s*.*/, 1] # get id part
id = complete_id.include?('|') ? complete_id.split('|')[1] : complete_id.split('|')[0]
all_retrievable_ids.push(id)
LOG.debug('Added link for: '+ id)
link_to_fasta = "/get_sequence/:#{id}/:#{string_of_used_databases}" # several dbs... separate by ' '
replacement_text_with_link = "<a href='#{link_to_fasta}' title='Full #{id} FASTA sequence'>#{id}</a>"
formatted_result += line.gsub(id, replacement_text_with_link)
else
formatted_result += line
end
end
link_to_fasta_of_all = "/get_sequence/:#{all_retrievable_ids.join(' ')}/:#{string_of_used_databases}" #dbs must be sep by ' '
retrieval_text = all_retrievable_ids.empty? ? '' : "<p><a href='#{link_to_fasta_of_all}'>FASTA of #{all_retrievable_ids.length} retrievable hit(s)</a></p>"
retrieval_text + '<pre><code>' + formatted_result + '</pre></code>' # should this be somehow put in a div?
end
end
SequenceServer.run! if __FILE__ == $0
log initialization results in init if logging has been enabled
# search.rb
require 'rubygems'
require 'sinatra/base'
require 'tempfile'
require 'yaml'
require 'logger'
require 'pp'
require 'stringio'
require './lib/blast.rb'
require 'lib/sequencehelpers.rb'
# Helper module - initialize the blast server.
class SequenceServer < Sinatra::Base
include SequenceHelpers
class Database < Struct.new("Database", :name, :title)
def to_s
"#{title} #{name}"
end
end
LOG = Logger.new(STDOUT)
LOG.datetime_format = "%Y-%m-%d %H:%M:%S" # to be more compact (and a little more like sinatra's)
enable :session
enable :logging
set :root, File.dirname(__FILE__)
set :blasturl, 'http://www.ncbi.nlm.nih.gov/blast/Blast.cgi?CMD=Web&PAGE_TYPE=BlastDocs&DOC_TYPE=Download'
class << self
def run!(options={})
init(config)
super
end
# Initializes the blast server : executables, database. Exit if blast
# executables, and databses can not be found. Logs the result if logging
# has been enabled.
def init(config = {})
# scan system path as fallback
bin = scan_blast_executables(config["bin"] || nil)
bin = bin.freeze
SequenceServer.set :bin, bin
# use 'db' relative to the current working directory as fallback
db = scan_blast_db(config["db"] || 'db')
db = db.freeze
SequenceServer.set :db, db
if logging
LOG.info("Found blast executables #{bin}")
db.each do |type, dbs|
LOG.info("Found #{ type } databases:\n#{dbs.join("\n")}")
end
end
rescue IOError => error
LOG.fatal("Fail: #{error}")
exit
end
# Checks for the presence of blast executables. Assumes the executables
# to be present in the bin directory passed to it, or in the sytem path.
# ---
# Arguments:
# * bin(String) - path (relative/absolute) to the binaries
# ---
# Returns:
# * absolute path to the blast executables directory, or empty
# string (implies executables in the system path)
# ---
# Raises:
# * IOError - if the executables can't be found
def scan_blast_executables(bin)
bin = File.expand_path(bin) rescue ''
if bin.empty?
# search system path
%w|blastn blastp blastx tblastn tblastx blastdbcmd|.each do |method|
raise IOError, "You may need to install BLAST+ from: #{settings.blasturl}.
And/or create a config.yml file that points to blast's 'bin' directory." unless command?( method )
end
else
# assume executables in bin
raise IOError, "The directory '#{bin}' defined in config.yml doesn't exist." unless File.directory?( bin )
end
bin
end
# Scan the given directory for blast databases.
# ---
# Arguments:
# * db_root(String) - path (relative/absolute) to the databases
# ---
# Returns:
# * a hash of blast databases
# ---
# Raises:
# * IOError - if no database can be found
def scan_blast_db(db_root)
db_root = File.expand_path(db_root)
raise IOError, "Database directory doesn't exist: #{db_root}" unless File.directory?( db_root )
db_list = %x|blastdbcmd -recursive -list #{db_root} -list_outfmt "%p %f %t"|
raise IOError, "No formatted blast databases found! You may need to run 'makeblastdb' "\
"on a fasta file in '#{ db_root }' ." if db_list.empty?
db = {}
db_list.each_line do |line|
type, name, *title = line.split(' ')
type = type.downcase
name = name.freeze
title = title.join(' ').freeze
(db[type] ||= []) << Database.new(name, title)
end
db
end
# Load config.yml; return a Hash. The Hash is empty if config.yml does not exist.
def config
config = YAML.load_file( "config.yml" )
raise IOError, "config.yml should return a hash" unless config.is_a?( Hash )
return config
rescue Errno::ENOENT
LOG.warn("config.yml not found - assuming default settings")
return {}
end
# check if the given command exists and is executable
def command?(command)
system("which #{command} > /dev/null 2>&1")
end
end
get '/' do
erb :search
end
post '/' do
method = settings.bin + params[:method]
db = selected_db_files
sequence = to_fasta(params[:sequence])
legal_blast_search?(sequence, method, selected_db_type) # quiet if ok; raises if bad
blast = Blast.blast_string(method, db, sequence)
# need to check for errors
#if blast.success?
LOG.info('Ran: ' + blast.command)
'<pre><code>' + format_blast_results(blast.result, selected_db_files) + '</pre></code>' # put in a div?
#end
end
#get '/get_sequence/:sequenceids/:retreival_databases' do # multiple seqs separated by whitespace... all other chars exist in identifiers
# I have the feeling you need to spat for multiple dbs... that sucks.
get '/get_sequence/:*/:*' do
params[ :sequenceids], params[ :retrieval_databases] = params["splat"]
sequenceids = params[ :sequenceids].split(/\s/).uniq # in a multi-blast query some may have been found multiply
LOG.info('Getting: ' + sequenceids.to_s)
# the results do not indicate which database a hit is from.
# Thus if several databases were used for blasting, we must check them all
# if it works, refactor with "inject" or "collect"?
found_sequences = ''
retrieval_databases = params[ :retrieval_databases ].split(/\s/)
raise ArgumentError, 'Nothing in params[ :retrieval_databases]. session info is lost?' if retrieval_databases.nil?
retrieval_databases.each do |database| # we need to populate this session variable from the erb.
begin
found_sequences += sequence_from_blastdb(sequenceids, database)
rescue
LOG.debug('None of the following sequences: '+ sequenceids.to_s + ' found in '+ database)
end
end
# just in case, checking we found right number of sequences
if sequenceids.length != found_sequences.count('>')
raise IOError, 'Wrong number of sequences found. Expecting: ' + sequenceids.to_s + '. Found: "' + found_sequences + '"'
end
'<pre><code>' + found_sequences + '</pre></code>'
end
# returns the type of selected databases - 'protein', or 'nucleotide'
def selected_db_type
params['db'].first.first
end
# returns a String of fasta files corresponding to the databases selected
# eg. - 'Protein_foo.fasta Protein_moo.fasta'
def selected_db_files
type = selected_db_type
params['db'][type].map{|index| settings.db[type][index.to_i].name}.join(' ')
end
def to_fasta(sequence)
sequence.lstrip! # removes leading whitespace
if sequence[0] != '>'
# forgetting the leading '>sequenceIdentifer\n' no longer breaks blast, but leaves an empty query
# line in the blast report. lets replace it with info about the user
sequence.insert(0, '>Submitted_By_'+request.ip.to_s + '_at_' + Time.now.strftime("%y%m%d-%H:%M:%S") + "\n")
end
return sequence
end
def legal_blast_search?(sequence, blast_method, blast_db_type) # if ajax stuff is done correctly:checking that user didnt mix seuqences, and constrainind blast_methods_for_query_type and sequence_from_blastdb, then method is not required.
# returns TRUE if everything is ok.
legal_blast_methods = %w|blastn blastp blastx tblastn tblastx|
#raise IOError, 'input_fasta missing:' + input_fasta.to_s if !File.exists?(input_fasta) #unnecessary?
raise IOError, 'undefined blast method...' if blast_method.nil?
raise ArgumentError, 'wrong method : ' + blast_method.to_s if !legal_blast_methods.include?(blast_method)
# check if input_fasta is compatible within blast_method
input_sequence_type = type_of_sequences(sequence)
LOG.debug('input seq type: ' + input_sequence_type.to_s)
LOG.debug('blast db type: ' + blast_db_type.to_s)
LOG.debug('blast method: ' + blast_method)
#if !blast_methods_for_query_type(input_sequence_type).include?(blast_method)
#raise ArgumentError, "Cannot #{blast_method} a #{input_sequence_type} query"
#end
# check if blast_database_type is compatible with blast_method
if !(db_type_for(blast_method).to_s == blast_db_type)
raise ArgumentError, "Cannot #{blast_method} against a #{blast_db_type} database " +
"need " + db_type_for(blast_method).to_s
end
return TRUE
end
def format_blast_results(result, string_of_used_databases)
raise ArgumentError, 'Problem: empty result! Maybe your query was invalid?' if !result.class == String
raise ArgumentError, 'Problem: empty result! Maybe your query was invalid?' if result.empty?
formatted_result = ''
all_retrievable_ids = []
result.each do |line|
if line.match(/^>\S/) #if there is a space right after the '>', makeblastdb was run without -parse_seqids
puts line
complete_id = line[/^>*(\S+)\s*.*/, 1] # get id part
id = complete_id.include?('|') ? complete_id.split('|')[1] : complete_id.split('|')[0]
all_retrievable_ids.push(id)
LOG.debug('Added link for: '+ id)
link_to_fasta = "/get_sequence/:#{id}/:#{string_of_used_databases}" # several dbs... separate by ' '
replacement_text_with_link = "<a href='#{link_to_fasta}' title='Full #{id} FASTA sequence'>#{id}</a>"
formatted_result += line.gsub(id, replacement_text_with_link)
else
formatted_result += line
end
end
link_to_fasta_of_all = "/get_sequence/:#{all_retrievable_ids.join(' ')}/:#{string_of_used_databases}" #dbs must be sep by ' '
retrieval_text = all_retrievable_ids.empty? ? '' : "<p><a href='#{link_to_fasta_of_all}'>FASTA of #{all_retrievable_ids.length} retrievable hit(s)</a></p>"
retrieval_text + '<pre><code>' + formatted_result + '</pre></code>' # should this be somehow put in a div?
end
end
SequenceServer.run! if __FILE__ == $0
|
# frozen_string_literal: true
ActiveAdmin.register Ad do
controller do
def scoped_collection
super.recent_first
end
end
permit_params :woeid_code, :type, :body, :title
filter :title
filter :body
filter :user_username, as: :string
filter :user_id, as: :string
filter :woeid_code
filter :type, as: :select, collection: [['Regalo', 1], ['Busco', 2]]
filter :status, as: :select, collection: [['Disponible', 1], ['Reservado', 2], ['Entregado', 3]]
filter :published_at
index do
selectable_column
column(:title) { |ad| link_to ad.title, admin_ad_path(ad) }
column :body
column :user
column :type_string
column :status_class
column(:city, &:woeid_name_short)
column(:published_at) { |ad| ad.published_at.strftime('%d/%m/%y %H:%M') }
actions(defaults: false) do |ad|
edit = link_to 'Editar', edit_admin_ad_path(ad)
delete = link_to 'Eliminar', admin_ad_path(ad), method: :delete
safe_join([edit, delete], ' ')
end
end
form do |f|
f.inputs do
f.input :type, as: :select,
collection: [['give', 1], ['want', 2]],
include_blank: false
f.input :title
f.input :body
f.input :woeid_code
end
f.actions
end
action_item :view, only: :show do
link_to 'Ver en la web', ad_path(ad)
end
end
Better ad status & type columns in admin index
# frozen_string_literal: true
ActiveAdmin.register Ad do
controller do
def scoped_collection
super.recent_first
end
end
permit_params :woeid_code, :type, :body, :title
filter :title
filter :body
filter :user_username, as: :string
filter :user_id, as: :string
filter :woeid_code
filter :type, as: :select, collection: [['Regalo', 1], ['Busco', 2]]
filter :status, as: :select, collection: [['Disponible', 1], ['Reservado', 2], ['Entregado', 3]]
filter :published_at
index do
selectable_column
column(:title) { |ad| link_to ad.title, admin_ad_path(ad) }
column :body
column :user
column(:type) do |ad|
status_tag({ 'give' => 'green', 'want' => 'red' }[ad.type_class],
label: ad.type_class)
end
column(:status) do |ad|
status_tag({ 'available' => 'green',
'booked' => 'orange',
'delivered' => 'red' }[ad.status_class],
label: ad.status_class)
end
column(:city, &:woeid_name_short)
column(:published_at) { |ad| ad.published_at.strftime('%d/%m/%y %H:%M') }
actions(defaults: false) do |ad|
edit = link_to 'Editar', edit_admin_ad_path(ad)
delete = link_to 'Eliminar', admin_ad_path(ad), method: :delete
safe_join([edit, delete], ' ')
end
end
form do |f|
f.inputs do
f.input :type, as: :select,
collection: [['give', 1], ['want', 2]],
include_blank: false
f.input :title
f.input :body
f.input :woeid_code
end
f.actions
end
action_item :view, only: :show do
link_to 'Ver en la web', ad_path(ad)
end
end
|
added boot file for loading specific version of doozer
DOOZER_GEM_VERSION='0.1.4'
begin
require 'date'
require 'rubygems'
gem 'doozer', "= #{DOOZER_GEM_VERSION}"
require 'doozer'
rescue Gem::LoadError
raise "Doozer #{DOOZER_GEM_VERSION} gem not installed"
end
|
class Vips < Formula
desc "Image processing library"
homepage "http://www.vips.ecs.soton.ac.uk/"
url "http://www.vips.ecs.soton.ac.uk/supported/8.3/vips-8.3.0.tar.gz"
sha256 "d6ca79b1c5d78f33ebb6d7d3d6948a3bd7ade1c0e09fd162707b1023e17243ec"
bottle do
sha256 "6cf11425e98a8a4cf1481384ceb013ac06bb70f70b700c3e2d863d6930d5b8d2" => :el_capitan
sha256 "a30e35fb6830ad6f979d76dec1eba77aafee94505dfec9d983c789d0b91c9d43" => :yosemite
sha256 "99cd35942ac211a93ed22adad5ab7322ab3f16664cba16c9d420c58aad99501b" => :mavericks
end
option "without-test", "Disable build time checks (not recommended)"
deprecated_option "without-check" => "without-test"
depends_on "pkg-config" => :build
depends_on "fontconfig"
depends_on "gettext"
depends_on "glib"
depends_on "libpng" => :recommended
depends_on "jpeg" => :recommended
depends_on "orc" => :recommended
depends_on "libgsf" => :recommended
depends_on "libtiff" => :recommended
depends_on "fftw" => :recommended
depends_on "little-cms2" => :recommended
depends_on "pango" => :recommended
depends_on "libexif" => :recommended
depends_on "gobject-introspection" => :recommended
depends_on "pygobject3" => :recommended
depends_on "python" => :recommended
depends_on "poppler" => :recommended
depends_on "librsvg" => :recommended
depends_on "giflib" => :recommended
depends_on "openslide" => :optional
depends_on "imagemagick" => :optional
depends_on "graphicsmagick" => :optional
depends_on "openexr" => :optional
depends_on "cfitsio" => :optional
depends_on "webp" => :optional
depends_on "python3" => :optional
depends_on "libmatio" => :optional
depends_on "mozjpeg" => :optional
depends_on "jpeg-turbo" => :optional
def install
args = %W[
--disable-dependency-tracking
--prefix=#{prefix}
]
args.concat %w[--with-magick --with-magickpackage=GraphicsMagick] if build.with? "graphicsmagick"
system "./configure", *args
if build.with? "check"
# Test scripts fail with non-english decimal separator, see jcupitt/libvips#367
ENV["LC_NUMERIC"] = "C"
system "make", "check"
end
system "make", "install"
end
test do
system "#{bin}/vips", "-l"
system "#{bin}/vipsheader", test_fixtures("test.png")
end
end
vips: update 8.3.0 bottle.
class Vips < Formula
desc "Image processing library"
homepage "http://www.vips.ecs.soton.ac.uk/"
url "http://www.vips.ecs.soton.ac.uk/supported/8.3/vips-8.3.0.tar.gz"
sha256 "d6ca79b1c5d78f33ebb6d7d3d6948a3bd7ade1c0e09fd162707b1023e17243ec"
bottle do
sha256 "85c20f71743b59cae9ab536ca143df2c891b21228fcf214ea245300cf3b21031" => :el_capitan
sha256 "d0a1e69fd18541d4eb911d0ddb94cf386e4d754613a1b69e54a1b7180438774d" => :yosemite
sha256 "7db4ba701781e181487b02aea4a13717242b9551f4d23184ea6940d427ce5047" => :mavericks
end
option "without-test", "Disable build time checks (not recommended)"
deprecated_option "without-check" => "without-test"
depends_on "pkg-config" => :build
depends_on "fontconfig"
depends_on "gettext"
depends_on "glib"
depends_on "libpng" => :recommended
depends_on "jpeg" => :recommended
depends_on "orc" => :recommended
depends_on "libgsf" => :recommended
depends_on "libtiff" => :recommended
depends_on "fftw" => :recommended
depends_on "little-cms2" => :recommended
depends_on "pango" => :recommended
depends_on "libexif" => :recommended
depends_on "gobject-introspection" => :recommended
depends_on "pygobject3" => :recommended
depends_on "python" => :recommended
depends_on "poppler" => :recommended
depends_on "librsvg" => :recommended
depends_on "giflib" => :recommended
depends_on "openslide" => :optional
depends_on "imagemagick" => :optional
depends_on "graphicsmagick" => :optional
depends_on "openexr" => :optional
depends_on "cfitsio" => :optional
depends_on "webp" => :optional
depends_on "python3" => :optional
depends_on "libmatio" => :optional
depends_on "mozjpeg" => :optional
depends_on "jpeg-turbo" => :optional
def install
args = %W[
--disable-dependency-tracking
--prefix=#{prefix}
]
args.concat %w[--with-magick --with-magickpackage=GraphicsMagick] if build.with? "graphicsmagick"
system "./configure", *args
if build.with? "check"
# Test scripts fail with non-english decimal separator, see jcupitt/libvips#367
ENV["LC_NUMERIC"] = "C"
system "make", "check"
end
system "make", "install"
end
test do
system "#{bin}/vips", "-l"
system "#{bin}/vipsheader", test_fixtures("test.png")
end
end
|
require File.dirname(__FILE__) + '/../../spec_helper'
require File.dirname(__FILE__) + '/fixtures/classes'
# cosine : (-Inf, Inf) --> (-1.0, 1.0)
describe "Math.cos" do
it "returns a float" do
Math.cos(Math::PI).class.should == Float
end
it "returns the cosine of the argument expressed in radians" do
Math.cos(Math::PI).should be_close(-1.0, TOLERANCE)
Math.cos(0).should be_close(1.0, TOLERANCE)
Math.cos(Math::PI/2).should be_close(0.0, TOLERANCE)
Math.cos(3*Math::PI/2).should be_close(0.0, TOLERANCE)
Math.cos(2*Math::PI).should be_close(1.0, TOLERANCE)
end
ruby_version_is ""..."1.9" do
it "raises an ArgumentError if the argument cannot be coerced with Float()" do
lambda { Math.cos("test") }.should raise_error(ArgumentError)
end
end
ruby_version_is "1.9" do
it "raises a TypeError if the argument cannot be coerced with Float()" do
lambda { Math.cos("test") }.should raise_error(TypeError)
end
end
it "raises a TypeError if the argument is nil" do
lambda { Math.cos(nil) }.should raise_error(TypeError)
end
it "accepts any argument that can be coerced with Float()" do
Math.cos(MathSpecs::Float.new).should be_close(0.54030230586814, TOLERANCE)
end
end
describe "Math#cos" do
it "is accessible as a private instance method" do
IncludesMath.new.send(:cos, 3.1415).should be_close(-0.999999995707656, TOLERANCE)
end
end
Math.cos: Explicit test for use of #to_f in coercion.
require File.dirname(__FILE__) + '/../../spec_helper'
require File.dirname(__FILE__) + '/fixtures/classes'
# cosine : (-Inf, Inf) --> (-1.0, 1.0)
describe "Math.cos" do
it "returns a float" do
Math.cos(Math::PI).class.should == Float
end
it "returns the cosine of the argument expressed in radians" do
Math.cos(Math::PI).should be_close(-1.0, TOLERANCE)
Math.cos(0).should be_close(1.0, TOLERANCE)
Math.cos(Math::PI/2).should be_close(0.0, TOLERANCE)
Math.cos(3*Math::PI/2).should be_close(0.0, TOLERANCE)
Math.cos(2*Math::PI).should be_close(1.0, TOLERANCE)
end
ruby_version_is ""..."1.9" do
it "raises an ArgumentError if the argument cannot be coerced with Float()" do
lambda { Math.cos("test") }.should raise_error(ArgumentError)
end
end
ruby_version_is "1.9" do
it "raises a TypeError unless the argument is Numeric and has #to_f" do
lambda { Math.cos("test") }.should raise_error(TypeError)
end
end
it "raises a TypeError if the argument is nil" do
lambda { Math.cos(nil) }.should raise_error(TypeError)
end
it "coerces its argument with #to_f" do
f = mock_numeric('8.2')
f.should_receive(:to_f).and_return(8.2)
Math.cos(f).should == Math.cos(8.2)
end
end
describe "Math#cos" do
it "is accessible as a private instance method" do
IncludesMath.new.send(:cos, 3.1415).should be_close(-0.999999995707656, TOLERANCE)
end
end
|
dir = File.expand_path(File.dirname(__FILE__))
require dir + '/environment'
require 'sinatra'
require 'json'
set :static, true
set :public, dir + '/public'
set :views, dir + '/views'
PERMISSIONS = {
'read_notes' => 'Read all your notes'
}
ERROR_RESPONSE = JSON.unparse('error' => 'No soup for you!')
get('/') { erb(:home) }
#================================================================
# Register applications
get '/oauth/apps/new' do
@client = OAuth2::Model::Client.new
erb :new_client
end
post '/oauth/apps' do
@client = OAuth2::Model::Client.new(params)
@client.save ? redirect("/oauth/apps/#{@client.id}") : erb(:new_client)
end
get '/oauth/apps/:id' do
@client = OAuth2::Model::Client.find_by_id(params[:id])
erb :show_client
end
#================================================================
# OAuth 2.0 flow
# Initial request exmample:
# /oauth/authorize?response_type=token&client_id=7uljxxdgsksmecn5cycvug46v&redirect_uri=http%3A%2F%2Fexample.com%2Fcb&scope=read_notes
[:get, :post].each do |method|
__send__ method, '/oauth/authorize' do
respond_to_oauth { erb(:login) }
end
end
post '/login' do
@oauth2 = OAuth2::Provider.parse(request)
@user = User.find_by_username(params[:username])
erb(@user ? :authorize : :login)
end
post '/oauth/allow' do
@user = User.find_by_id(params[:user_id])
@auth = OAuth2::Provider::Authorization.new(params)
if params['allow'] == '1'
@auth.grant_access!(@user)
else
@auth.deny_access!
end
redirect @auth.redirect_uri
end
#================================================================
# Domain API
get '/me' do
access_token = OAuth2::Provider.access_token(request)
authorization = OAuth2::Model::Authorization.find_by_access_token(access_token)
if authorization
user = authorization.owner
JSON.unparse('username' => user.username)
else
ERROR_RESPONSE
end
end
get '/users/:user_id/notes' do
verify_access :read_notes do |user|
notes = user.notes.map do |n|
{:note_id => n.id, :url => "#{host}/users/#{user.id}/notes/#{n.id}"}
end
JSON.unparse(:notes => notes)
end
end
get '/users/:user_id/notes/:note_id' do
verify_access :read_notes do |user|
note = user.notes.find_by_id(params[:note_id])
note ? note.to_json : JSON.unparse(:error => 'No such note')
end
end
helpers do
#================================================================
# Generic handler for incoming OAuth requests
def respond_to_oauth
@oauth2 = OAuth2::Provider.parse(request)
redirect @oauth2.redirect_uri if @oauth2.redirect?
headers @oauth2.response_headers
status @oauth2.response_status
@oauth2.response_body || yield
end
#================================================================
# Check for OAuth access before rendering a resource
def verify_access(scope)
user = User.find_by_id(params[:user_id])
token = OAuth2::Provider.access_token(request)
unless user and user.grants_access?(token, scope.to_s)
return ERROR_RESPONSE
end
yield user
end
#================================================================
# Return the full app domain
def host
request.scheme + '://' + request.host_with_port
end
end
Protect the /me resource properly.
dir = File.expand_path(File.dirname(__FILE__))
require dir + '/environment'
require 'sinatra'
require 'json'
set :static, true
set :public, dir + '/public'
set :views, dir + '/views'
PERMISSIONS = {
'read_notes' => 'Read all your notes'
}
ERROR_RESPONSE = JSON.unparse('error' => 'No soup for you!')
get('/') { erb(:home) }
#================================================================
# Register applications
get '/oauth/apps/new' do
@client = OAuth2::Model::Client.new
erb :new_client
end
post '/oauth/apps' do
@client = OAuth2::Model::Client.new(params)
@client.save ? redirect("/oauth/apps/#{@client.id}") : erb(:new_client)
end
get '/oauth/apps/:id' do
@client = OAuth2::Model::Client.find_by_id(params[:id])
erb :show_client
end
#================================================================
# OAuth 2.0 flow
# Initial request exmample:
# /oauth/authorize?response_type=token&client_id=7uljxxdgsksmecn5cycvug46v&redirect_uri=http%3A%2F%2Fexample.com%2Fcb&scope=read_notes
[:get, :post].each do |method|
__send__ method, '/oauth/authorize' do
respond_to_oauth { erb(:login) }
end
end
post '/login' do
@oauth2 = OAuth2::Provider.parse(request)
@user = User.find_by_username(params[:username])
erb(@user ? :authorize : :login)
end
post '/oauth/allow' do
@user = User.find_by_id(params[:user_id])
@auth = OAuth2::Provider::Authorization.new(params)
if params['allow'] == '1'
@auth.grant_access!(@user)
else
@auth.deny_access!
end
redirect @auth.redirect_uri
end
#================================================================
# Domain API
get '/me' do
access_token = OAuth2::Provider.access_token(request)
authorization = access_token && OAuth2::Model::Authorization.find_by_access_token(access_token)
if authorization
user = authorization.owner
JSON.unparse('username' => user.username)
else
ERROR_RESPONSE
end
end
get '/users/:user_id/notes' do
verify_access :read_notes do |user|
notes = user.notes.map do |n|
{:note_id => n.id, :url => "#{host}/users/#{user.id}/notes/#{n.id}"}
end
JSON.unparse(:notes => notes)
end
end
get '/users/:user_id/notes/:note_id' do
verify_access :read_notes do |user|
note = user.notes.find_by_id(params[:note_id])
note ? note.to_json : JSON.unparse(:error => 'No such note')
end
end
helpers do
#================================================================
# Generic handler for incoming OAuth requests
def respond_to_oauth
@oauth2 = OAuth2::Provider.parse(request)
redirect @oauth2.redirect_uri if @oauth2.redirect?
headers @oauth2.response_headers
status @oauth2.response_status
@oauth2.response_body || yield
end
#================================================================
# Check for OAuth access before rendering a resource
def verify_access(scope)
user = User.find_by_id(params[:user_id])
token = OAuth2::Provider.access_token(request)
unless user and user.grants_access?(token, scope.to_s)
return ERROR_RESPONSE
end
yield user
end
#================================================================
# Return the full app domain
def host
request.scheme + '://' + request.host_with_port
end
end
|
desc "The Basic Testplan"
usecase 60, "Simple search" do
#userpool :anonymous
#httpauth :stella, :stella
get "/", "Homepage" do
wait 1
end
get "/search", "Search Results" do
wait 3
param :what => 'Big'
param :where => ''
response 200 do
listing = doc.css('div.listing').first
set :lid, listing['id'].match(/(\d+)/)[0]
end
end
get "/listing/:lid" do
desc "Selected listing"
wait 3
response 200 do
#status
#headers['Content-Type']
#body
end
end
post "/listing/add" do
desc "Add a business"
param :name => "Heavenly trucks #{rand(1000000)}"
param :city => "Vancouver"
response 200 do
puts body
end
end
end
usecase 40, "Direct to listing" do
#resource :lid => file('listing_ids.csv')
get "/listing/:lid.yaml" do
desc "Select listing"
#param :lid => random[:lid]
param :lid => "1000"
end
end
Some CLI output cleanup
desc "The Basic Testplan"
usecase 60, "Simple search" do
#userpool :anonymous
#httpauth :stella, :stella
get "/", "Homepage" do
wait 1
end
get "/search", "Search Results" do
wait 3
param :what => 'Big'
param :where => ''
response 200 do
listing = doc.css('div.listing').first
set :lid, listing['id'].match(/(\d+)/)[0]
end
end
get "/listing/:lid" do
desc "Selected listing"
wait 3
response 200 do
#status
#headers['Content-Type']
#body
end
end
post "/listing/add" do
desc "Add a business"
param :name => "Heavenly trucks #{rand(1000000)}"
param :city => "Vancouver"
response 200 do
puts body
end
end
end
usecase 40, "Direct to listing" do
resource :lid => file('listing_ids.csv')
get "/listing/:lid.yaml" do
desc "Select listing"
param :lid => random[:lid]
#param :lid => "1000"
end
end
|
# -*- encoding: utf-8 -*-
$LOAD_PATH.unshift File.expand_path("../lib", __FILE__)
require "cucumber/core/version"
Gem::Specification.new do |s|
s.name = 'cucumber-core'
s.version = Cucumber::Core::Version
s.authors = ["Aslak Hellesøy", "Matt Wynne", "Steve Tooke", "Oleg Sukhodolsky", "Tom Brand"]
s.description = 'Core library for the Cucumber BDD app'
s.summary = "cucumber-core-#{s.version}"
s.email = 'cukes@googlegroups.com'
s.homepage = "https://cucumber.io"
s.platform = Gem::Platform::RUBY
s.license = "MIT"
s.required_ruby_version = '>= 2.2' # Keep in sync with .travis.yml
s.metadata = {
'bug_tracker_uri' => 'https://github.com/cucumber/cucumber-ruby-core/issues',
'changelog_uri' => 'https://github.com/cucumber/cucumber-ruby-core/blob/master/CHANGELOG.md',
'documentation_uri' => 'https://www.rubydoc.info/github/cucumber/cucumber-ruby-core',
'mailing_list_uri' => 'https://groups.google.com/forum/#!forum/cukes',
'source_code_uri' => 'https://github.com/cucumber/cucumber-ruby-core',
}
s.add_dependency 'gherkin', '~> 7.0', '>= 7.0.3'
s.add_dependency 'cucumber-tag_expressions', '~> 2.0', '>= 2.0.2'
s.add_dependency 'backports', '~> 3.15', '>= 3.15.0'
s.add_development_dependency 'coveralls', '~> 0.8', '>= 0.8.23'
s.add_development_dependency 'bundler', '~> 1.17', '>= 1.17.2'
s.add_development_dependency 'rake', '~> 12.3', '>= 12.3.3'
s.add_development_dependency 'rspec', '~> 3.8', '>= 3.8.0'
s.add_development_dependency 'unindent', '~> 1.0', '>= 1.0'
s.rubygems_version = ">= 1.6.1"
s.test_files = Dir[
'spec/**/*'
]
s.files = Dir[
'CHANGELOG.md',
'CONTRIBUTING.md',
'README.md',
'LICENSE',
'lib/**/*'
]
s.rdoc_options = ["--charset=UTF-8"]
s.require_path = "lib"
end
Downgrade bundler
# -*- encoding: utf-8 -*-
$LOAD_PATH.unshift File.expand_path("../lib", __FILE__)
require "cucumber/core/version"
Gem::Specification.new do |s|
s.name = 'cucumber-core'
s.version = Cucumber::Core::Version
s.authors = ["Aslak Hellesøy", "Matt Wynne", "Steve Tooke", "Oleg Sukhodolsky", "Tom Brand"]
s.description = 'Core library for the Cucumber BDD app'
s.summary = "cucumber-core-#{s.version}"
s.email = 'cukes@googlegroups.com'
s.homepage = "https://cucumber.io"
s.platform = Gem::Platform::RUBY
s.license = "MIT"
s.required_ruby_version = '>= 2.2' # Keep in sync with .travis.yml
s.metadata = {
'bug_tracker_uri' => 'https://github.com/cucumber/cucumber-ruby-core/issues',
'changelog_uri' => 'https://github.com/cucumber/cucumber-ruby-core/blob/master/CHANGELOG.md',
'documentation_uri' => 'https://www.rubydoc.info/github/cucumber/cucumber-ruby-core',
'mailing_list_uri' => 'https://groups.google.com/forum/#!forum/cukes',
'source_code_uri' => 'https://github.com/cucumber/cucumber-ruby-core',
}
s.add_dependency 'gherkin', '~> 7.0', '>= 7.0.3'
s.add_dependency 'cucumber-tag_expressions', '~> 2.0', '>= 2.0.2'
s.add_dependency 'backports', '~> 3.15', '>= 3.15.0'
s.add_development_dependency 'coveralls', '~> 0.8', '>= 0.8.23'
s.add_development_dependency 'bundler', '~> 1.16', '>= 1.16.2'
s.add_development_dependency 'rake', '~> 12.3', '>= 12.3.3'
s.add_development_dependency 'rspec', '~> 3.8', '>= 3.8.0'
s.add_development_dependency 'unindent', '~> 1.0', '>= 1.0'
s.rubygems_version = ">= 1.6.1"
s.test_files = Dir[
'spec/**/*'
]
s.files = Dir[
'CHANGELOG.md',
'CONTRIBUTING.md',
'README.md',
'LICENSE',
'lib/**/*'
]
s.rdoc_options = ["--charset=UTF-8"]
s.require_path = "lib"
end
|
require 'require_relative' if RUBY_VERSION[0,3] == '1.8'
require_relative 'acceptance_helper'
describe "Webfinger" do
include AcceptanceHelper
it "404s if that user doesnt exist" do
get "/users/acct:nonexistent@somedomain.com/xrd.xml"
if last_response.status == 301
follow_redirect!
end
last_response.status.must_equal(404)
end
it "renders the user's xrd" do
@user = Fabricate(:user)
param = "acct:#{@user.username}@#{@user.author.domain}"
get "/users/#{param}/xrd.xml"
if last_response.status == 301
follow_redirect!
end
xml = Nokogiri.XML(last_response.body)
subject = xml.xpath("//xmlns:Subject").first.content
subject.must_equal(param)
end
end
Adds tests to ensure that the salmon urls exist in the user xrd.
I added a section to the webfinger tests which test that the xrd served
by a webfinger request contain the three salmon urls necessary to get
salmon notifications. Again, the three are not necessary, but it is nice
to provide all three even if they are the same url.
I will add more tests for the remaining things in the xrd to this set of
acceptance tests.
require 'require_relative' if RUBY_VERSION[0,3] == '1.8'
require_relative 'acceptance_helper'
describe "Webfinger" do
include AcceptanceHelper
describe "user xrd" do
before do
@user = Fabricate(:user)
@subject = "acct:#{@user.username}@#{@user.author.domain}"
get "/users/#{@subject}/xrd.xml"
if last_response.status == 301
follow_redirect!
end
@xml = Nokogiri.XML(last_response.body)
end
it "contains the salmon url" do
regex = /^http(?:s)?:\/\/.*\/feeds\/#{@user.feed.id}\/salmon$/
profile_rel = "salmon"
profile_uri = @xml.xpath("//xmlns:Link[@rel='#{profile_rel}']")
profile_uri.first.attr("href").must_match regex
end
it "contains the salmon-replies url" do
regex = /^http(?:s)?:\/\/.*\/feeds\/#{@user.feed.id}\/salmon$/
profile_rel = "http://salmon-protocol.org/ns/salmon-replies"
profile_uri = @xml.xpath("//xmlns:Link[@rel='#{profile_rel}']")
profile_uri.first.attr("href").must_match regex
end
it "contains the salmon-mention url" do
regex = /^http(?:s)?:\/\/.*\/feeds\/#{@user.feed.id}\/salmon$/
profile_rel = "http://salmon-protocol.org/ns/salmon-mention"
profile_uri = @xml.xpath("//xmlns:Link[@rel='#{profile_rel}']")
profile_uri.first.attr("href").must_match regex
end
end
it "404s if that user doesnt exist" do
get "/users/acct:nonexistent@somedomain.com/xrd.xml"
if last_response.status == 301
follow_redirect!
end
last_response.status.must_equal(404)
end
it "renders the user's xrd" do
@user = Fabricate(:user)
param = "acct:#{@user.username}@#{@user.author.domain}"
get "/users/#{param}/xrd.xml"
if last_response.status == 301
follow_redirect!
end
xml = Nokogiri.XML(last_response.body)
subject = xml.xpath("//xmlns:Subject").first.content
subject.must_equal(param)
end
end
|
# encoding: utf-8
require File.expand_path('../../test_helper', __FILE__)
class AccessorsTest < Test::Unit::TestCase
test "*_translatons methods are generated" do
assert User.new.respond_to?(:name_translations)
assert User.new.respond_to?(:name_translations=)
end
test "new user name_translations" do
user = User.new
translations = {}
assert_equal translations, user.name_translations
end
test "new user name_translations with name assigned" do
user = User.new(:name => 'John')
translations = {:en => 'John'}.stringify_keys!
assert_equal translations, user.name_translations
with_locale(:de) { user.name = 'Jan' }
translations = {:en => 'John', :de => 'Jan'}.stringify_keys!
assert_equal translations, user.name_translations
end
test "created user name_translations" do
user = User.create(:name => 'John', :email => 'mad@max.com')
translations = {:en => 'John'}.stringify_keys!
assert_equal translations, user.name_translations
with_locale(:de) { user.name = 'Jan' }
translations = {:en => 'John', :de => 'Jan'}.stringify_keys!
assert_equal translations, user.name_translations
user.save
assert_equal translations, user.name_translations
user.reload
assert_equal translations, user.name_translations
end
test "new user name_translations=" do
user = User.new(:name => 'Max', :email => 'mad@max.com')
user.name_translations = {:en => 'John', :de => 'Jan', :ru => 'Иван'}
assert_translated user, :en, :name, 'John'
assert_translated user, :de, :name, 'Jan'
assert_translated user, :ru, :name, 'Иван'
user.save
assert_translated user, :en, :name, 'John'
assert_translated user, :de, :name, 'Jan'
assert_translated user, :ru, :name, 'Иван'
end
test "created user name_translations=" do
user = User.create(:name => 'Max', :email => 'mad@max.com')
user.name_translations = {:en => 'John', :de => 'Jan', :ru => 'Иван'}
assert_translated user, :en, :name, 'John'
assert_translated user, :de, :name, 'Jan'
assert_translated user, :ru, :name, 'Иван'
translations = {:en => 'John', :de => 'Jan', :ru => 'Иван'}.stringify_keys!
assert_equal translations, user.name_translations
end
test "*_<locale> accessors are generated" do
assert AccessorsPost.new.respond_to?(:title_en)
assert AccessorsPost.new.respond_to?(:title_fr)
assert AccessorsPost.new.respond_to?(:title_en=)
assert AccessorsPost.new.respond_to?(:title_fr=)
end
test "post title_* getter" do
post = AccessorsPost.new(:title => 'title')
Globalize.with_locale(:fr) { post.title = 'titre' }
assert_equal post.title_en, 'title'
assert_equal post.title_fr, 'titre'
end
test "post title_* setter" do
post = AccessorsPost.new(:title => 'title')
post.title_fr = 'titre'
assert_equal 'title', post.title
assert_equal 'titre', Globalize.with_locale(:fr) { post.title }
end
end
check that translation accessors are created only for locales passed in as option to 'translates'.
# encoding: utf-8
require File.expand_path('../../test_helper', __FILE__)
class AccessorsTest < Test::Unit::TestCase
test "*_translatons methods are generated" do
assert User.new.respond_to?(:name_translations)
assert User.new.respond_to?(:name_translations=)
end
test "new user name_translations" do
user = User.new
translations = {}
assert_equal translations, user.name_translations
end
test "new user name_translations with name assigned" do
user = User.new(:name => 'John')
translations = {:en => 'John'}.stringify_keys!
assert_equal translations, user.name_translations
with_locale(:de) { user.name = 'Jan' }
translations = {:en => 'John', :de => 'Jan'}.stringify_keys!
assert_equal translations, user.name_translations
end
test "created user name_translations" do
user = User.create(:name => 'John', :email => 'mad@max.com')
translations = {:en => 'John'}.stringify_keys!
assert_equal translations, user.name_translations
with_locale(:de) { user.name = 'Jan' }
translations = {:en => 'John', :de => 'Jan'}.stringify_keys!
assert_equal translations, user.name_translations
user.save
assert_equal translations, user.name_translations
user.reload
assert_equal translations, user.name_translations
end
test "new user name_translations=" do
user = User.new(:name => 'Max', :email => 'mad@max.com')
user.name_translations = {:en => 'John', :de => 'Jan', :ru => 'Иван'}
assert_translated user, :en, :name, 'John'
assert_translated user, :de, :name, 'Jan'
assert_translated user, :ru, :name, 'Иван'
user.save
assert_translated user, :en, :name, 'John'
assert_translated user, :de, :name, 'Jan'
assert_translated user, :ru, :name, 'Иван'
end
test "created user name_translations=" do
user = User.create(:name => 'Max', :email => 'mad@max.com')
user.name_translations = {:en => 'John', :de => 'Jan', :ru => 'Иван'}
assert_translated user, :en, :name, 'John'
assert_translated user, :de, :name, 'Jan'
assert_translated user, :ru, :name, 'Иван'
translations = {:en => 'John', :de => 'Jan', :ru => 'Иван'}.stringify_keys!
assert_equal translations, user.name_translations
end
test "*_<locale> accessors are generated" do
assert AccessorsPost.new.respond_to?(:title_en)
assert AccessorsPost.new.respond_to?(:title_fr)
assert AccessorsPost.new.respond_to?(:title_en=)
assert AccessorsPost.new.respond_to?(:title_fr=)
assert !AccessorsPost.new.respond_to?(:title_pt)
assert !AccessorsPost.new.respond_to?(:title_pt=)
end
test "post title_* getter" do
post = AccessorsPost.new(:title => 'title')
Globalize.with_locale(:fr) { post.title = 'titre' }
assert_equal post.title_en, 'title'
assert_equal post.title_fr, 'titre'
end
test "post title_* setter" do
post = AccessorsPost.new(:title => 'title')
post.title_fr = 'titre'
assert_equal 'title', post.title
assert_equal 'titre', Globalize.with_locale(:fr) { post.title }
end
end
|
require "minitest/autorun"
require 'rails'
require 'typus'
require 'yaml'
require "i18n/backend/flatten"
I18n::Backend::Simple.send(:include, I18n::Backend::Flatten)
class LocalesCompletenessTest < Minitest::Test
REFERENCE_LOCALE = "en"
def setup
I18n.enforce_available_locales = false
end
class << self
def locales_to_test
%w(de)
end
def locale_file(locale)
if (locale == REFERENCE_LOCALE)
Admin::Engine.root.join("config/locales/typus.#{locale}.yml")
else
File.join(File.dirname(__FILE__), "../config/locales/typus.#{locale}.yml")
end
end
def translations(locale)
file = locale_file(locale)
data = YAML.load_file(file)[locale]
I18n.backend.flatten_translations(locale, data, false, false)
end
def reference_keys
@reference_keys ||= translations(REFERENCE_LOCALE).keys
end
end
locales_to_test.each do |current_locale|
#
# test all translated locales are complete, i.e. contain all keys that are in the gem
#
define_method("test_#{current_locale}_is_complete") do
reference_keys = self.class.reference_keys
locale_keys = self.class.translations(current_locale).keys
difference = reference_keys.dup - locale_keys
msg = %(The locale "#{current_locale}" is missing translations. Please add translations for the keys listed below)
assert_equal [], difference, msg
end
#
# test the translated locales have no obsolete keys
#
define_method("test_#{current_locale}_has_no_obsolete_keys") do
reference_keys = self.class.reference_keys
locale_keys = self.class.translations(current_locale).keys
difference = locale_keys - reference_keys.dup
msg = %(The locale "#{current_locale}" has obsolete translations. Please remove the keys listed below)
assert_equal [], difference, msg
end
end
end
Array#dup isn’t necessary here.
require "minitest/autorun"
require 'rails'
require 'typus'
require 'yaml'
require "i18n/backend/flatten"
I18n::Backend::Simple.send(:include, I18n::Backend::Flatten)
class LocalesCompletenessTest < Minitest::Test
REFERENCE_LOCALE = "en"
def setup
I18n.enforce_available_locales = false
end
class << self
def locales_to_test
%w(de)
end
def locale_file(locale)
if (locale == REFERENCE_LOCALE)
Admin::Engine.root.join("config/locales/typus.#{locale}.yml")
else
File.join(File.dirname(__FILE__), "../config/locales/typus.#{locale}.yml")
end
end
def translations(locale)
file = locale_file(locale)
data = YAML.load_file(file)[locale]
I18n.backend.flatten_translations(locale, data, false, false)
end
def reference_keys
@reference_keys ||= translations(REFERENCE_LOCALE).keys
end
end
locales_to_test.each do |current_locale|
#
# test all translated locales are complete, i.e. contain all keys that are in the gem
#
define_method("test_#{current_locale}_is_complete") do
reference_keys = self.class.reference_keys
locale_keys = self.class.translations(current_locale).keys
difference = reference_keys - locale_keys
msg = %(The locale "#{current_locale}" is missing translations. Please add translations for the keys listed below)
assert_equal [], difference, msg
end
#
# test the translated locales have no obsolete keys
#
define_method("test_#{current_locale}_has_no_obsolete_keys") do
reference_keys = self.class.reference_keys
locale_keys = self.class.translations(current_locale).keys
difference = locale_keys - reference_keys
msg = %(The locale "#{current_locale}" has obsolete translations. Please remove the keys listed below)
assert_equal [], difference, msg
end
end
end |
- Add test for inputs/file
#!/usr/bin/env ruby
require 'rubygems'
$:.unshift File.dirname(__FILE__) + "/../../lib"
require "test/unit"
require "logstash"
require "logstash/filters"
require "logstash/event"
require "tempfile"
require "socket"
# TODO(sissel): refactor this so we can more easily specify tests.
class TestInputFile < Test::Unit::TestCase
def em_setup
@tmpfile = Tempfile.new(self.class.name)
@type = "default"
@hostname = Socket.gethostname
config = YAML.load <<-"YAML"
inputs:
#{@type}:
- file://#{@tmpfile.path}
outputs:
- internal:///
YAML
@output = EventMachine::Channel.new
@agent = LogStash::Agent.new(config)
@agent.register
@agent.outputs[0].callback do |event|
@output.push(event)
end
end
def test_simple
data = [ "hello", "world", "hello world 1 2 3 4", "1", "2", "3", "4", "5" ]
remaining = data.size
EventMachine.run do
em_setup
expect_data = data.clone
@output.subscribe do |event|
expect_message = expect_data.shift
assert_equal(expect_message, event.message)
assert_equal("file://#{@hostname}#{@tmpfile.path}", event.source)
assert_equal(@type, event.type, "type")
assert_equal([], event.tags, "tags should be empty")
# Done testing if we run out of data.
@agent.stop if expect_data.size == 0
end
# Write to the file periodically
timer = EM::PeriodicTimer.new(0.2) do
a = data.shift((rand * 3).to_i + 1).join("\n")
@tmpfile.puts a
@tmpfile.flush
timer.cancel if data.length == 0
end
end
end # def test_simple
end # class TestInputFile
|
require File.join(File.expand_path(File.dirname(__FILE__)), '..', 'test_helper')
require 'cc_deville'
class FacebookItemTest < ActiveSupport::TestCase
test "should get canonical URL parsed from facebook html when it is relative" do
relative_url = '/dina.samak/posts/10153679232246949'
url = "https://www.facebook.com#{relative_url}"
Media.any_instance.stubs(:get_html).returns(Nokogiri::HTML("<meta property='og:url' content='#{relative_url}'>"))
Media.any_instance.stubs(:follow_redirections)
m = create_media url: url
assert_equal url, m.url
Media.any_instance.unstub(:get_html)
Media.any_instance.unstub(:follow_redirections)
end
test "should get canonical URL parsed from facebook html when it is a page" do
canonical_url = 'https://www.facebook.com/CyrineOfficialPage/posts/10154332542247479'
Media.any_instance.stubs(:get_html).returns(Nokogiri::HTML("<meta property='og:url' content='#{canonical_url}'>"))
Media.any_instance.stubs(:follow_redirections)
Media.stubs(:validate_url).with(canonical_url).returns(true)
m = create_media url: 'https://www.facebook.com/CyrineOfficialPage/posts/10154332542247479?pnref=story.unseen-section'
assert_equal canonical_url, m.url
Media.any_instance.unstub(:get_html)
Media.any_instance.unstub(:follow_redirections)
Media.unstub(:validate_url)
end
test "should get canonical URL from facebook object 3" do
expected = 'https://www.facebook.com/54212446406/photos/a.397338611406/10157431603156407?type=3&theater'
url = 'https://www.facebook.com/54212446406/photos/a.397338611406/10157431603156407/?type=3&theater'
media = Media.new(url: url)
media.as_json({ force: 1 })
assert_equal expected, media.url
end
{ a_pattern: 'https://www.facebook.com/Classic.mou/photos/a.136991166478555/1494688604042131',
post_pattern: 'https://www.facebook.com/Classic.mou/photos/1630270703817253',
pcb_pattern: 'https://www.facebook.com/Classic.mou/photos/pcb.613639338813733/613639175480416/'
}.each do |pattern, url|
test "should parse facebook url with a photo album #{pattern}" do
expected = {
title: 'Classic',
username: 'Classic.mou',
author_name: 'Classic',
}.with_indifferent_access
media = Media.new url: url
data = media.as_json
assert !data['author_url'].blank?
expected.each do |key, value|
assert_match value, data[key], "Expected #{key} '#{data[key]}' to match #{value} on #{url}"
end
end
end
test "should parse Facebook live post from mobile URL" do
url = 'https://m.facebook.com/story.php?story_fbid=10154584426664820&id=355665009819%C2%ACif_t=live_video%C2%ACif_id=1476846578702256&ref=bookmarks'
id = Media.get_id url
m = create_media url: url
data = m.as_json
assert_match /South China Morning Post/, data['title']
assert_match /SCMP #FacebookLive amid chaotic scenes in #HongKong Legco/, data['description']
assert_not_nil data['published_at']
assert_match 'South China Morning Post', data['author_name']
assert_match 'facebook.com/355665009819', data['author_url']
assert_match /#{id}\/author_picture.jpg/, data['author_picture']
assert !data['picture'].blank?
end
test "should create Facebook post from mobile URL" do
m = create_media url: 'https://m.facebook.com/KIKOLOUREIROofficial/photos/a.10150618138397252/10152555300292252/?type=3&theater'
data = m.as_json
assert_match /Bolívia/, data['description']
assert_match 'kiko', data['author_name'].downcase
assert_equal 1, data['media_count']
assert_equal '20/11/2014', Time.parse(data['published_at']).strftime("%d/%m/%Y")
end
test "should parse Facebook pure text post url" do
Media.any_instance.stubs(:get_crowdtangle_data)
url = 'https://www.facebook.com/dina.samak/posts/10153679232246949?pnref=story.unseen-section'
html = "<title id='pageTitle'>Dina Samak | Facebook</title>
<div data-testid='post_message' class='_5pbx userContent'>
<p>إذا كنت تعرف هيثم محمدين كما أعرفه فمن المؤكد انك قد استمتعت بقدرته الرائعة على الحكي..</p>
</div>"
Media.any_instance.stubs(:get_html).returns(Nokogiri::HTML(html))
Media.any_instance.stubs(:follow_redirections)
m = create_media url: url
data = m.as_json
assert_match /Dina Samak/, data['title']
Media.any_instance.unstub(:get_html)
Media.any_instance.unstub(:follow_redirections)
Media.any_instance.unstub(:get_crowdtangle_data)
end
test "should parse Facebook live post" do
url = 'https://www.facebook.com/cbcnews/videos/10154783484119604/'
id = Media.get_id url
m = create_media url: url
data = m.as_json
assert_equal 'https://www.facebook.com/cbcnews/videos/10154783484119604/', m.url
assert_match /CBC News/, data['title']
assert_match /Live now: This is the National for Monday, Oct. 31, 2016./, data['description']
assert_not_nil data['published_at']
assert_match 'cbcnews', data['username']
assert_match /facebook.com\/5823419603/, data['author_url']
assert_match /#{id}\/author_picture.jpg/, data['author_picture']
assert_match /#{id}\/picture.jpg/, data['picture']
end
test "should parse Facebook removed live post" do
url = 'https://www.facebook.com/teste637621352/posts/1538843716180215'
id = Media.get_id url
m = create_media url: url
data = m.as_json
assert_equal 'https://www.facebook.com/teste637621352/posts/1538843716180215', m.url
assert_match /Not Identified/, data['title']
assert_equal '', data['description']
assert_equal '', data['published_at']
assert_match 'teste637621352', data['username']
end
test "should parse Facebook livemap" do
variations = %w(
https://www.facebook.com/livemap/#@-12.991858482361014,-38.521747589110994,4z
https://www.facebook.com/live/map/#@37.777053833008,-122.41587829590001,4z
https://www.facebook.com/live/discover/map/#@37.777053833008,-122.41587829590001,4z
)
variations.each do |url|
m = create_media url: url
data = m.as_json
assert_match /facebook\.com/, m.url
assert_match /Facebook/, data['title']
assert_not_nil data['published_at']
end
end
test "should parse Facebook event post" do
m = create_media url: 'https://www.facebook.com/events/364677040588691/permalink/376287682760960/?ref=1&action_history=null'
data = m.as_json
variations = %w(
https://www.facebook.com/events/364677040588691/permalink/376287682760960?ref=1&action_history=null
https://www.facebook.com/events/zawya/zawyas-tribute-to-mohamed-khan-%D9%85%D9%88%D8%B9%D8%AF-%D9%85%D8%B9-%D8%AE%D8%A7%D9%86/364677040588691/
https://web.facebook.com/events/364677040588691/permalink/376287682760960?ref=1&action_history=null&_rdc=1&_rdr
)
assert_includes variations, m.url
assert_not_nil data['published_at']
assert_match /#{data['user_uuid']}/, data['author_url']
assert_match /#{data['user_uuid']}/, data['author_picture']
assert_match /^https?:/, data['picture']
assert_match /Zawya/, data['title']
end
test "should parse Facebook event url" do
m = create_media url: 'https://www.facebook.com/events/1090503577698748'
data = m.as_json
assert_match /Nancy Ajram/, data['title']
assert_not_nil data['description']
assert_match /^http/, data['picture']
assert_not_nil data['published_at']
assert_not_nil data['author_picture']
end
test "should parse Facebook video url from a page" do
m = create_media url: 'https://www.facebook.com/144585402276277/videos/1127489833985824'
data = m.as_json
assert_match /Trent Aric - Meteorologist/, data['title']
assert_match /MATTHEW YOU ARE DRUNK...GO HOME!/, data['description']
assert_equal 'item', data['type']
assert_not_nil data['picture']
assert_not_nil Time.parse(data['published_at'])
end
test "should parse Facebook video url from a page with another url pattern" do
m = create_media url: 'https://www.facebook.com/democrats/videos/10154268929856943'
data = m.as_json
assert_match /Democratic Party/, data['title']
assert_match /On National Voter Registration Day/, data['description']
assert_equal 'item', data['type']
assert_not_nil data['picture']
assert_not_nil Time.parse(data['published_at'])
end
test "should parse Facebook video url from a profile" do
m = create_media url: 'https://www.facebook.com/edwinscott143/videos/vb.737361619/10154242961741620/?type=2&theater'
data = m.as_json
assert !data['title'].blank?
assert_equal 'item', data['type']
assert_not_nil data['author_picture']
end
test "should parse Facebook video on page album" do
url = 'https://www.facebook.com/scmp/videos/10154584426664820'
id = Media.get_id url
m = create_media url: url
data = m.as_json
assert_match /(South China Morning Post|scmp)/, data['title']
assert_match /SCMP #FacebookLive/, data['description']
assert_match 'scmp', data['username']
assert_match /#{id}\/author_picture.jpg/, data['author_picture']
assert_match /facebook.com\/355665009819/, data['author_url']
assert_match /(South China Morning Post|scmp)/, data['author_name']
end
test "should parse Facebook gif photo url" do
m = create_media url: 'https://www.facebook.com/quoted.pictures/posts/1095740107184121'
data = m.as_json
assert_match /quoted/, data['title'].downcase
assert_not_nil data['description']
assert data['photos'].any? { |p| p =~ /giphy.gif/ }, "photos should include gif image"
end
test "should parse album post with a permalink" do
url = 'https://www.facebook.com/permalink.php?story_fbid=10154534111016407&id=54212446406'
id = Media.get_id url
m = create_media url: url
data = m.as_json
assert_match /Mariano Rajoy Brey/, data['title']
assert_equal 'item', data['type']
assert_match /#{id}\/author_picture.jpg/, data['author_picture']
assert !data['picture'].blank?
assert_not_nil Time.parse(data['published_at'])
assert_match '10154534111016407', data['object_id']
assert_match 'https://www.facebook.com/54212446406/posts/10154534111016407', m.url
end
test "should parse facebook user post" do
url = 'https://www.facebook.com/dina.hawary/posts/10158416884740321'
id = Media.get_id url
m = create_media url: url
data = m.as_json
assert_equal 'item', data['type']
assert_equal 'facebook', data['provider']
end
test "should parse facebook url with colon mark" do
url = 'https://www.facebook.com/Classic.mou/posts/666508790193454:0'
id = Media.get_id url
m = create_media url: url
data = m.as_json
assert_equal 'item', data['type']
assert_equal 'facebook', data['provider']
end
test "should parse Facebook post from media set" do
url = 'https://www.facebook.com/media/set?set=a.10154534110871407.1073742048.54212446406&type=3'
m = create_media url: url
data = m.as_json
assert_match '54212446406_10154534110871407', data['uuid']
assert_match '54212446406', data['user_uuid']
assert_match '10154534110871407', data['object_id']
assert_match url, m.url
end
test "should support facebook pattern with pg" do
m = create_media url: 'https://www.facebook.com/pg/Mariano-Rajoy-Brey-54212446406/photos/?tab=album&album_id=10154534110871407'
data = m.as_json
assert_equal 'item', data['type']
assert_match '54212446406_10154534110871407', data['uuid']
assert_match(/En el Museo Serralves de Oporto/, data['text'])
assert_match '54212446406', data['user_uuid']
assert_match 'Mariano Rajoy Brey', data['author_name']
assert_match '10154534110871407', data['object_id']
assert_match 'https://www.facebook.com/pages/category/Politician/Mariano-Rajoy-Brey-54212446406/photos/', m.url
end
test "should support facebook pattern with album" do
m = create_media url: 'https://www.facebook.com/album.php?fbid=10154534110871407&id=54212446406&aid=1073742048'
data = m.as_json
assert_match '10154534110871407_10154534110871407', data['uuid']
assert_nil data['error']
assert_match 'https://www.facebook.com/media/set?set=a.10154534110871407', m.url
end
test "should get facebook data from original_url when url fails" do
Media.any_instance.stubs(:url).returns('https://www.facebook.com/Mariano-Rajoy-Brey-54212446406/photos')
Media.any_instance.stubs(:original_url).returns('https://www.facebook.com/pg/Mariano-Rajoy-Brey-54212446406/photos/?tab=album&album_id=10154534110871407')
m = create_media url: 'https://www.facebook.com/pg/Mariano-Rajoy-Brey-54212446406/photos'
data = m.as_json
assert_match '54212446406_10154534110871407', data['uuid']
assert_match(/Militante del Partido Popular/, data['text'])
assert_match '54212446406', data['user_uuid']
assert_match 'Mariano', data['author_name']
assert_match '10154534110871407', data['object_id']
Media.any_instance.unstub(:url)
Media.any_instance.unstub(:original_url)
end
test "should store data of post returned by oembed" do
m = create_media url: 'https://www.facebook.com/teste637621352/posts/1028416870556238'
oembed = m.as_json['raw']['oembed']
assert oembed.is_a? Hash
assert !oembed.empty?
end
test "should store oembed data of a facebook post" do
m = create_media url: 'https://www.facebook.com/nostalgia.y/photos/a.508939832569501.1073741829.456182634511888/942167619246718/?type=3&theater'
data = m.as_json
assert data['raw']['oembed'].is_a? Hash
assert_match /facebook.com/, data['oembed']['provider_url']
assert_equal "facebook", data['oembed']['provider_name'].downcase
end
test "should store oembed data of a facebook page" do
m = create_media url: 'https://www.facebook.com/pages/Meedan/105510962816034?fref=ts'
data = m.as_json
assert data['raw']['oembed'].is_a? Hash
assert_match 'Meedan', data['oembed']['author_name']
assert_match 'Meedan', data['oembed']['title']
end
test "should parse Facebook post from page photo" do
m = create_media url: 'https://www.facebook.com/quoted.pictures/photos/a.128828073875334.28784.128791873878954/1096134023811396/?type=3&theater'
data = m.as_json
assert_match /quoted.pictures/, data['title'].downcase
assert_match 'quoted.pictures', data['username']
assert_match /quoted.pictures/, data['author_name'].downcase
assert !data['author_url'].blank?
assert !data['picture'].blank?
assert_nil data['error']
end
test "should parse facebook url without identified pattern as item" do
url = 'https://www.facebook.com/Bimbo.Memories/photos/pb.235404669918505.-2207520000.1481570271./1051597428299221/?type=3&theater'
m = create_media url: url
data = m.as_json
assert_equal 'item', data['type']
assert_match /Bimbo/, data['title']
assert_not_nil data['description']
assert_not_nil data['published_at']
assert_match 'Bimbo', data['author_name']
assert_match 'Bimbo', data['username']
assert_match /facebook.com\/(235404669918505|Bimbo.Memories)/, data['author_url']
end
test "should parse Facebook photo post within an album url" do
url = 'https://www.facebook.com/ESCAPE.Egypt/photos/ms.c.eJxNk8d1QzEMBDvyQw79N2ZyaeD7osMIwAZKLGTUViod1qU~;DCBNHcpl8gfMKeR8bz2gH6ABlHRuuHYM6AdywPkEsH~;gqAjxqLAKJtQGZFxw7CzIa6zdF8j1EZJjXRgTzAP43XBa4HfFa1REA2nXugScCi3wN7FZpF5BPtaVDEBqwPNR60O9Lsi0nbDrw3KyaPCVZfqAYiWmZO13YwvSbtygCWeKleh9KEVajW8FfZz32qcUrNgA5wfkA4Xfh004x46d9gdckQt2xR74biSOegwIcoB9OW~_oVIxKML0JWYC0XHvDkdZy0oY5bgjvBAPwdBpRuKE7kZDNGtnTLoCObBYqJJ4Ky5FF1kfh75Gnyl~;Qxqsv.bps.a.1204090389632094.1073742218.423930480981426/1204094906298309/?type=3&theater'
id = Media.get_id url
m = create_media url: url
data = m.as_json
assert_equal 'item', data['type']
assert_match /escape/, data['title'].downcase
assert_match /1204094906298309/, data['uuid']
end
test "should parse photo in a photo album" do
url = 'https://www.facebook.com/nostalgia.y/photos/pb.456182634511888.-2207520000.1484079948./928269767303170/?type=3&theater'
id = Media.get_id url
m = create_media url: url
data = m.as_json
assert_equal 'item', data['type']
assert_equal 'facebook', data['provider']
assert_match /nostalgia/, data['title'].downcase
assert_not_nil data['published_at']
assert_match 'nostalgia.y', data['username']
assert_match 'nostalgia', data['author_name'].downcase
assert_match /facebook.com\/(456182634511888|nostalgia.y)/, data['author_url']
assert_nil data['error']
end
test "should create Facebook post from page photo URL" do
m = create_media url: 'https://www.facebook.com/teste637621352/photos/a.754851877912740.1073741826.749262715138323/896869113711015/?type=3'
data = m.as_json
assert_match /896869113711015/, data['uuid']
assert_match 'teste', data['author_name'].downcase
assert_match 'teste637621352', data['username']
assert_match '896869113711015', data['object_id']
assert_nil data['error']
end
test "should create Facebook post from page photos URL" do
m = create_media url: 'https://www.facebook.com/teste637621352/posts/1028795030518422'
data = m.as_json
assert_equal '749262715138323_1028795030518422', data['uuid']
assert_match 'This is just a test with many photos.', data['text']
assert_match '749262715138323', data['user_uuid']
assert_match 'Teste', data['author_name']
assert_equal 2, data['media_count']
assert_match '1028795030518422', data['object_id']
assert_equal '11/2015', Time.parse(data['published_at']).strftime("%m/%Y")
end
test "should create Facebook post from user photos URL" do
m = create_media url: 'https://www.facebook.com/nanabhay/posts/10156130657385246?pnref=story'
data = m.as_json
assert_match '10156130657385246', data['uuid']
assert_match 'Such a great evening with friends last night. Sultan Sooud Al-Qassemi has an amazing collecting of modern Arab art. It was a visual tour of the history of the region over the last century.', data['text'].strip
assert_match 'Mohamed Nanabhay', data['author_name']
end
test "should parse Facebook post from user photo URL" do
url = 'https://www.facebook.com/photo.php?fbid=10155150801660195&set=p.10155150801660195&type=1&theater'
id = Media.get_id url
m = create_media url: url
data = m.as_json
assert_match '10155150801660195_10155150801660195', data['uuid']
assert_match '10155150801660195', data['user_uuid']
assert !data['author_name'].blank?
assert_match '10155150801660195', data['object_id']
assert !data['title'].blank?
assert data['error'].nil?
end
tests = YAML.load_file(File.join(Rails.root, 'test', 'data', 'fbposts.yml'))
tests.each do |url, text|
test "should get text from Facebook user post from URL '#{url}'" do
Media.any_instance.stubs(:get_crowdtangle_data)
Media.any_instance.stubs(:get_html).returns(Nokogiri::HTML("<meta name='description' content='#{text}'>"))
Media.any_instance.stubs(:follow_redirections)
m = create_media url: url
data = m.as_json
assert_match text, data['text'].gsub(/\s+/, ' ').strip
Media.any_instance.unstub(:get_html)
Media.any_instance.unstub(:follow_redirections)
Media.any_instance.unstub(:get_crowdtangle_data)
end
end
test "should create Facebook post with picture and photos" do
url = 'https://www.facebook.com/teste637621352/posts/1028795030518422'
id = Media.get_id url
m = create_media url: url
data = m.as_json
assert_match /#{id}\/picture/, data['picture']
assert_kind_of Array, data['photos']
assert_equal 2, data['media_count']
assert data['photos'].size > 1, "photos should have more than 1 image"
url = 'https://www.facebook.com/teste637621352/posts/1035783969819528'
id = Media.get_id url
m = create_media url: url
data = m.as_json
assert_match /#{id}\/picture/, data['picture']
assert_match /#{id}\/author_picture/, data['author_picture']
assert_kind_of Array, data['photos']
assert data['media_count'].size > 1, "media_count should be more than 1 image"
assert data['photos'].size > 1, "photos should have more than 1 image"
url = 'https://www.facebook.com/teste637621352/posts/2194142813983632'
id = Media.get_id url
m = create_media url: url
data = m.as_json
assert_match /#{id}\/picture/, data['picture']
assert_match /#{id}\/author_picture/, data['author_picture']
assert_kind_of Array, data['photos']
assert data['media_count'].size > 1, "media_count should be more than 1 image"
assert data['photos'].size > 1, "photos should have more than 1 image"
end
test "should get normalized URL from crowdtangle" do
url = 'https://www.facebook.com/quoted.pictures/posts/3424788280945947'
m = create_media url: url
data = m.as_json
url = 'https://www.facebook.com/quoted.pictures/photos/a.525451984212939/3424788187612623?type=3'
m = create_media url: url
data = m.as_json
assert_equal url, data['url']
end
test "should return item as oembed" do
url = 'https://www.facebook.com/pages/Meedan/105510962816034?fref=ts'
m = create_media url: url
data = Media.as_oembed(m.as_json, "http://pender.org/medias.html?url=#{url}", 300, 150)
assert_match 'Meedan', data['title']
assert_match 'Meedan', data['author_name']
assert_match 'https://www.facebook.com/pages/Meedan/105510962816034', data['author_url']
assert_equal 'facebook', data['provider_name']
assert_equal 'http://www.facebook.com', data['provider_url']
assert_equal 300, data['width']
assert_equal 150, data['height']
assert_equal '<iframe src="http://pender.org/medias.html?url=https://www.facebook.com/pages/Meedan/105510962816034?fref=ts" width="300" height="150" scrolling="no" border="0" seamless>Not supported</iframe>', data['html']
assert_not_nil data['thumbnail_url']
end
test "should return item as oembed when data is not on cache" do
url = 'https://www.facebook.com/pages/Meedan/105510962816034?fref=ts'
m = create_media url: url
data = Media.as_oembed(nil, "http://pender.org/medias.html?url=#{url}", 300, 150, m)
assert_match 'Meedan', data['title']
assert_match 'Meedan', data['author_name']
assert_match 'https://www.facebook.com/pages/Meedan/105510962816034', data['author_url']
assert_equal 'facebook', data['provider_name']
assert_equal 'http://www.facebook.com', data['provider_url']
assert_equal 300, data['width']
assert_equal 150, data['height']
assert_equal '<iframe src="http://pender.org/medias.html?url=https://www.facebook.com/pages/Meedan/105510962816034?fref=ts" width="300" height="150" scrolling="no" border="0" seamless>Not supported</iframe>', data['html']
assert_not_nil data['thumbnail_url']
end
test "should return item as oembed when data is on cache and raw key is missing" do
url = 'https://www.facebook.com/pages/Meedan/105510962816034?fref=ts'
m = create_media url: url
json_data = m.as_json
json_data.delete('raw')
data = Media.as_oembed(json_data, "http://pender.org/medias.html?url=#{url}", 300, 150)
assert_match 'Meedan', data['title']
assert_match 'Meedan', data['author_name']
assert_match 'https://www.facebook.com/pages/Meedan/105510962816034', data['author_url']
assert_equal 'facebook', data['provider_name']
assert_equal 'http://www.facebook.com', data['provider_url']
assert_equal 300, data['width']
assert_equal 150, data['height']
assert_equal '<iframe src="http://pender.org/medias.html?url=https://www.facebook.com/pages/Meedan/105510962816034?fref=ts" width="300" height="150" scrolling="no" border="0" seamless>Not supported</iframe>', data['html']
assert_not_nil data['thumbnail_url']
end
test "should return item as oembed when the page has oembed url" do
url = 'https://www.facebook.com/teste637621352/posts/1028416870556238'
Media.any_instance.stubs(:get_html).returns(Nokogiri::HTML("<meta property='og:title' content='Teste'>"))
m = create_media url: url
data = Media.as_oembed(m.as_json, "http://pender.org/medias.html?url=#{url}", 300, 150, m)
assert_match /Teste/, data['title']
assert_match 'Teste', data['author_name']
assert_match /facebook.com\//, data['author_url']
assert_equal 'facebook', data['provider_name']
assert_match /https?:\/\/www.facebook.com/, data['provider_url']
Media.any_instance.unstub(:get_html)
end
test "should not use Facebook embed if is a link to redirect" do
url = 'https://l.facebook.com/l.php?u=https://hindi.indiatvnews.com/paisa/business-1-07-cr-new-taxpayers-added-dropped-filers-down-at-25-22-lakh-in-fy18-630914&h=AT1WAU-mDHKigOgFNrUsxsS2doGO0_F5W9Yck7oYUx-IsYAHx8JqyHwO02-N0pX8UOlcplZO50px8mkTA1XNyKig8Z2CfX6t3Sh0bHtO9MYPtWqacCm6gOXs5lbC6VGMLjDALNXZ6vg&s=1'
m = create_media url: url
data = m.as_json
assert_match 'Leaving Facebook', data['author_name']
assert_equal '', data['html']
end
test "should get image from original post if is a shared content" do
original_url = 'https://www.facebook.com/dcc1968/posts/1538584976252118'
original_id = Media.get_id(original_url)
m = create_media url: original_url.to_s
data = m.as_json
assert_nil data.dig('original_post')
url = 'https://www.facebook.com/danielafeitosa/posts/1862242233833668'
id = Media.get_id(url)
m = create_media url: url.to_s
data = m.as_json
assert_match /facebook.com\/dcc1968/, data.dig('original_post')
assert data['photos'].any? { |p| p =~ /1538581556252460_5832184448275185664/ }, "photos should include the original image"
end
#commented until #8563 be fixed
# test "should not get original post if it's already parsing the original post" do
# m = create_media url: 'https://www.facebook.com/groups/1863694297275556/permalink/2193768444268138/'
# data = m.as_json
# original_post = data.dig('original_post')
# assert_not_nil original_post
# original = Media.new url: original_post
# assert_nil original.as_json['original_post']
# end
test "should have external id for post" do
Media.any_instance.stubs(:doc).returns(Nokogiri::HTML("<meta property='og:url' content='https://www.facebook.com/ironmaiden/posts/10156071020577051'>"))
m = create_media url: 'https://www.facebook.com/ironmaiden/posts/10156071020577051'
data = m.as_json
assert_match '10156071020577051', data['external_id']
Media.any_instance.unstub(:doc)
end
test "should parse Facebook category page" do
m = create_media url: 'https://www.facebook.com/pages/category/Society---Culture-Website/PoporDezamagit/photos/'
data = m.as_json
assert_match 'Popor dezamagit on Facebook', data[:title]
end
test "should add not found error and return empty html" do
urls = ['https://www.facebook.com/danielafeitosa/posts/2074906892567200', 'https://www.facebook.com/caiosba/posts/8457689347638947']
urls.each do |url|
m = create_media url: url
data = m.as_json
assert_equal '', data[:html]
assert_not_nil data[:error][:code]
assert_not_nil data[:error][:message]
end
end
test "should add login required error and return empty html" do
m = create_media url: 'https://www.facebook.com/caiosba/posts/2914211445293757'
data = m.as_json
assert_equal '', data[:html]
assert_equal 'Login required to see this profile', data[:error][:message]
assert_equal LapisConstants::ErrorCodes::const_get('LOGIN_REQUIRED'), data[:error][:code]
end
test "should get the group name when parsing group post" do
url = 'https://www.facebook.com/groups/memetics.hacking/permalink/1580570905320222/'
m = Media.new url: url
data = m.as_json
assert_no_match "Not Identified", data['title']
assert !data['description'].blank?
assert_match 'permalink/1580570905320222/', data['url']
end
test "should parse page post date from public page profile" do
url = 'https://www.facebook.com/nytimes/posts/10152441141079999'
m = Media.new url: url
data = m.as_json
assert_equal '2020-09-04T21:25:04.000+00:00', data['published_at']
end
test "should parse post date from public person profile" do
url = 'https://www.facebook.com/marc.smolowitz/posts/10158161767684331'
m = Media.new url: url
data = m.as_json
assert_equal '2020-09-04T22:57:41.000+00:00', data['published_at']
url = 'https://www.facebook.com/julien.caidos/posts/10158477528272170'
m = Media.new url: url
data = m.as_json
assert_equal '2020-09-03T11:01:21.000+00:00', data['published_at']
url = 'https://example.com'
Media.any_instance.stubs(:doc).returns(Nokogiri::HTML('<div class="_5pcr userContentWrapper"><abbr data-utime="1599260261" class="_5ptz"><span class="timestampContent" id="js_1">4 de setembro de 2020</span></abbr></div>'))
m = Media.new url: url
assert_equal Time.at(1599260261), m.get_facebook_published_time_from_html
Media.any_instance.unstub(:doc)
end
test "should parse post from public group" do
url = 'https://www.facebook.com/groups/memetics.hacking/permalink/1580570905320222/'
m = Media.new url: url
data = m.as_json
assert_match /This group is a gathering for those interested in exploring belief systems/, data['description']
end
test "should get full text of Facebook post" do
Media.any_instance.stubs(:get_crowdtangle_data)
url = 'https://www.facebook.com/ironmaiden/posts/10157024746512051'
html = "<div data-testid='post_message' class='_5pbx userContent'>
<p>Legacy of the Beast Touring Update 2020/21</p>
<p> I hope you and your loved ones are staying safe and well, wherever you may be, and my continued thanks to you all for bearing with us so patiently.</p>
<p> Due to the continuing health issues Worldwide around Covid-19 we regretfully inform you that Iron Maiden will now not be playing any concerts until June 2021.</p>
<p> However, we are now in a position to give you details of our touring plans in respect to those shows we had hoped to play this year.</p>
</div>"
Media.any_instance.stubs(:get_html).returns(Nokogiri::HTML(html))
Media.any_instance.stubs(:follow_redirections)
m = Media.new url: url
data = m.as_json
assert_match /However, we are now in a position to give you details of our touring plans in respect to those shows we had hoped to play this year/, data['description']
Media.any_instance.unstub(:get_html)
Media.any_instance.unstub(:follow_redirections)
Media.any_instance.unstub(:get_crowdtangle_data)
end
test "should not change url when redirected to login page" do
url = 'https://www.facebook.com/ugmhmyanmar/posts/2850282508516442'
redirection_to_login_page = 'https://www.facebook.com/login/?next=https%3A%2F%2Fwww.facebook.com%2Fugmhmyanmar%2Fposts%2F2850282508516442'
response = 'mock'; response.stubs(:code).returns('302')
response.stubs(:header).returns({ 'location' => redirection_to_login_page })
response_login_page = 'mock'; response_login_page.stubs(:code).returns('200')
Media.stubs(:request_url).with(url, 'Head').returns(response)
Media.stubs(:request_url).with(redirection_to_login_page, 'Head').returns(response_login_page)
m = create_media url: url
assert_equal url, m.url
Media.unstub(:request_url)
end
end
Ticket CHECK-159: Fix FB test
require File.join(File.expand_path(File.dirname(__FILE__)), '..', 'test_helper')
require 'cc_deville'
class FacebookItemTest < ActiveSupport::TestCase
test "should get canonical URL parsed from facebook html when it is relative" do
relative_url = '/dina.samak/posts/10153679232246949'
url = "https://www.facebook.com#{relative_url}"
Media.any_instance.stubs(:get_html).returns(Nokogiri::HTML("<meta property='og:url' content='#{relative_url}'>"))
Media.any_instance.stubs(:follow_redirections)
m = create_media url: url
assert_equal url, m.url
Media.any_instance.unstub(:get_html)
Media.any_instance.unstub(:follow_redirections)
end
test "should get canonical URL parsed from facebook html when it is a page" do
canonical_url = 'https://www.facebook.com/CyrineOfficialPage/posts/10154332542247479'
Media.any_instance.stubs(:get_html).returns(Nokogiri::HTML("<meta property='og:url' content='#{canonical_url}'>"))
Media.any_instance.stubs(:follow_redirections)
Media.stubs(:validate_url).with(canonical_url).returns(true)
m = create_media url: 'https://www.facebook.com/CyrineOfficialPage/posts/10154332542247479?pnref=story.unseen-section'
assert_equal canonical_url, m.url
Media.any_instance.unstub(:get_html)
Media.any_instance.unstub(:follow_redirections)
Media.unstub(:validate_url)
end
test "should get canonical URL from facebook object 3" do
expected = 'https://www.facebook.com/54212446406/photos/a.397338611406/10157431603156407?type=3&theater'
url = 'https://www.facebook.com/54212446406/photos/a.397338611406/10157431603156407/?type=3&theater'
media = Media.new(url: url)
media.as_json({ force: 1 })
assert_equal expected, media.url
end
{ a_pattern: 'https://www.facebook.com/Classic.mou/photos/a.136991166478555/1494688604042131',
post_pattern: 'https://www.facebook.com/Classic.mou/photos/1630270703817253',
pcb_pattern: 'https://www.facebook.com/Classic.mou/photos/pcb.613639338813733/613639175480416/'
}.each do |pattern, url|
test "should parse facebook url with a photo album #{pattern}" do
expected = {
title: 'Classic',
username: 'Classic.mou',
author_name: 'Classic',
}.with_indifferent_access
media = Media.new url: url
data = media.as_json
assert !data['author_url'].blank?
expected.each do |key, value|
assert_match value, data[key], "Expected #{key} '#{data[key]}' to match #{value} on #{url}"
end
end
end
test "should parse Facebook live post from mobile URL" do
url = 'https://m.facebook.com/story.php?story_fbid=10154584426664820&id=355665009819%C2%ACif_t=live_video%C2%ACif_id=1476846578702256&ref=bookmarks'
id = Media.get_id url
m = create_media url: url
data = m.as_json
assert_match /South China Morning Post/, data['title']
assert_match /SCMP #FacebookLive amid chaotic scenes in #HongKong Legco/, data['description']
assert_not_nil data['published_at']
assert_match 'South China Morning Post', data['author_name']
assert_match 'facebook.com/355665009819', data['author_url']
assert_match /#{id}\/author_picture.jpg/, data['author_picture']
assert !data['picture'].blank?
end
test "should create Facebook post from mobile URL" do
m = create_media url: 'https://m.facebook.com/KIKOLOUREIROofficial/photos/a.10150618138397252/10152555300292252/?type=3&theater'
data = m.as_json
assert_match /Bolívia/, data['description']
assert_match 'kiko', data['author_name'].downcase
assert_equal 1, data['media_count']
assert_equal '20/11/2014', Time.parse(data['published_at']).strftime("%d/%m/%Y")
end
test "should parse Facebook pure text post url" do
Media.any_instance.stubs(:get_crowdtangle_data)
url = 'https://www.facebook.com/dina.samak/posts/10153679232246949?pnref=story.unseen-section'
html = "<title id='pageTitle'>Dina Samak | Facebook</title>
<div data-testid='post_message' class='_5pbx userContent'>
<p>إذا كنت تعرف هيثم محمدين كما أعرفه فمن المؤكد انك قد استمتعت بقدرته الرائعة على الحكي..</p>
</div>"
Media.any_instance.stubs(:get_html).returns(Nokogiri::HTML(html))
Media.any_instance.stubs(:follow_redirections)
m = create_media url: url
data = m.as_json
assert_match /Dina Samak/, data['title']
Media.any_instance.unstub(:get_html)
Media.any_instance.unstub(:follow_redirections)
Media.any_instance.unstub(:get_crowdtangle_data)
end
test "should parse Facebook live post" do
url = 'https://www.facebook.com/cbcnews/videos/10154783484119604/'
id = Media.get_id url
m = create_media url: url
data = m.as_json
assert_equal 'https://www.facebook.com/cbcnews/videos/10154783484119604/', m.url
assert_match /CBC News/, data['title']
assert_match /Live now: This is the National for Monday, Oct. 31, 2016./, data['description']
assert_not_nil data['published_at']
assert_match 'cbcnews', data['username']
assert_match /facebook.com\/5823419603/, data['author_url']
assert_match /#{id}\/author_picture.jpg/, data['author_picture']
assert_match /#{id}\/picture.(jpg|png)/, data['picture']
end
test "should parse Facebook removed live post" do
url = 'https://www.facebook.com/teste637621352/posts/1538843716180215'
id = Media.get_id url
m = create_media url: url
data = m.as_json
assert_equal 'https://www.facebook.com/teste637621352/posts/1538843716180215', m.url
assert_match /Not Identified/, data['title']
assert_equal '', data['description']
assert_equal '', data['published_at']
assert_match 'teste637621352', data['username']
end
test "should parse Facebook livemap" do
variations = %w(
https://www.facebook.com/livemap/#@-12.991858482361014,-38.521747589110994,4z
https://www.facebook.com/live/map/#@37.777053833008,-122.41587829590001,4z
https://www.facebook.com/live/discover/map/#@37.777053833008,-122.41587829590001,4z
)
variations.each do |url|
m = create_media url: url
data = m.as_json
assert_match /facebook\.com/, m.url
assert_match /Facebook/, data['title']
assert_not_nil data['published_at']
end
end
test "should parse Facebook event post" do
m = create_media url: 'https://www.facebook.com/events/364677040588691/permalink/376287682760960/?ref=1&action_history=null'
data = m.as_json
variations = %w(
https://www.facebook.com/events/364677040588691/permalink/376287682760960?ref=1&action_history=null
https://www.facebook.com/events/zawya/zawyas-tribute-to-mohamed-khan-%D9%85%D9%88%D8%B9%D8%AF-%D9%85%D8%B9-%D8%AE%D8%A7%D9%86/364677040588691/
https://web.facebook.com/events/364677040588691/permalink/376287682760960?ref=1&action_history=null&_rdc=1&_rdr
)
assert_includes variations, m.url
assert_not_nil data['published_at']
assert_match /#{data['user_uuid']}/, data['author_url']
assert_match /#{data['user_uuid']}/, data['author_picture']
assert_match /^https?:/, data['picture']
assert_match /Zawya/, data['title']
end
test "should parse Facebook event url" do
m = create_media url: 'https://www.facebook.com/events/1090503577698748'
data = m.as_json
assert_match /Nancy Ajram/, data['title']
assert_not_nil data['description']
assert_match /^http/, data['picture']
assert_not_nil data['published_at']
assert_not_nil data['author_picture']
end
test "should parse Facebook video url from a page" do
m = create_media url: 'https://www.facebook.com/144585402276277/videos/1127489833985824'
data = m.as_json
assert_match /Trent Aric - Meteorologist/, data['title']
assert_match /MATTHEW YOU ARE DRUNK...GO HOME!/, data['description']
assert_equal 'item', data['type']
assert_not_nil data['picture']
assert_not_nil Time.parse(data['published_at'])
end
test "should parse Facebook video url from a page with another url pattern" do
m = create_media url: 'https://www.facebook.com/democrats/videos/10154268929856943'
data = m.as_json
assert_match /Democratic Party/, data['title']
assert_match /On National Voter Registration Day/, data['description']
assert_equal 'item', data['type']
assert_not_nil data['picture']
assert_not_nil Time.parse(data['published_at'])
end
test "should parse Facebook video url from a profile" do
m = create_media url: 'https://www.facebook.com/edwinscott143/videos/vb.737361619/10154242961741620/?type=2&theater'
data = m.as_json
assert !data['title'].blank?
assert_equal 'item', data['type']
assert_not_nil data['author_picture']
end
test "should parse Facebook video on page album" do
url = 'https://www.facebook.com/scmp/videos/10154584426664820'
id = Media.get_id url
m = create_media url: url
data = m.as_json
assert_match /(South China Morning Post|scmp)/, data['title']
assert_match /SCMP #FacebookLive/, data['description']
assert_match 'scmp', data['username']
assert_match /#{id}\/author_picture.jpg/, data['author_picture']
assert_match /facebook.com\/355665009819/, data['author_url']
assert_match /(South China Morning Post|scmp)/, data['author_name']
end
test "should parse Facebook gif photo url" do
m = create_media url: 'https://www.facebook.com/quoted.pictures/posts/1095740107184121'
data = m.as_json
assert_match /quoted/, data['title'].downcase
assert_not_nil data['description']
assert data['photos'].any? { |p| p =~ /giphy.gif/ }, "photos should include gif image"
end
test "should parse album post with a permalink" do
url = 'https://www.facebook.com/permalink.php?story_fbid=10154534111016407&id=54212446406'
id = Media.get_id url
m = create_media url: url
data = m.as_json
assert_match /Mariano Rajoy Brey/, data['title']
assert_equal 'item', data['type']
assert_match /#{id}\/author_picture.jpg/, data['author_picture']
assert !data['picture'].blank?
assert_not_nil Time.parse(data['published_at'])
assert_match '10154534111016407', data['object_id']
assert_match 'https://www.facebook.com/54212446406/posts/10154534111016407', m.url
end
test "should parse facebook user post" do
url = 'https://www.facebook.com/dina.hawary/posts/10158416884740321'
id = Media.get_id url
m = create_media url: url
data = m.as_json
assert_equal 'item', data['type']
assert_equal 'facebook', data['provider']
end
test "should parse facebook url with colon mark" do
url = 'https://www.facebook.com/Classic.mou/posts/666508790193454:0'
id = Media.get_id url
m = create_media url: url
data = m.as_json
assert_equal 'item', data['type']
assert_equal 'facebook', data['provider']
end
test "should parse Facebook post from media set" do
url = 'https://www.facebook.com/media/set?set=a.10154534110871407.1073742048.54212446406&type=3'
m = create_media url: url
data = m.as_json
assert_match '54212446406_10154534110871407', data['uuid']
assert_match '54212446406', data['user_uuid']
assert_match '10154534110871407', data['object_id']
assert_match url, m.url
end
test "should support facebook pattern with pg" do
m = create_media url: 'https://www.facebook.com/pg/Mariano-Rajoy-Brey-54212446406/photos/?tab=album&album_id=10154534110871407'
data = m.as_json
assert_equal 'item', data['type']
assert_match '54212446406_10154534110871407', data['uuid']
assert_match(/En el Museo Serralves de Oporto/, data['text'])
assert_match '54212446406', data['user_uuid']
assert_match 'Mariano Rajoy Brey', data['author_name']
assert_match '10154534110871407', data['object_id']
assert_match 'https://www.facebook.com/pages/category/Politician/Mariano-Rajoy-Brey-54212446406/photos/', m.url
end
test "should support facebook pattern with album" do
m = create_media url: 'https://www.facebook.com/album.php?fbid=10154534110871407&id=54212446406&aid=1073742048'
data = m.as_json
assert_match '10154534110871407_10154534110871407', data['uuid']
assert_nil data['error']
assert_match 'https://www.facebook.com/media/set?set=a.10154534110871407', m.url
end
test "should get facebook data from original_url when url fails" do
Media.any_instance.stubs(:url).returns('https://www.facebook.com/Mariano-Rajoy-Brey-54212446406/photos')
Media.any_instance.stubs(:original_url).returns('https://www.facebook.com/pg/Mariano-Rajoy-Brey-54212446406/photos/?tab=album&album_id=10154534110871407')
m = create_media url: 'https://www.facebook.com/pg/Mariano-Rajoy-Brey-54212446406/photos'
data = m.as_json
assert_match '54212446406_10154534110871407', data['uuid']
assert_match(/Militante del Partido Popular/, data['text'])
assert_match '54212446406', data['user_uuid']
assert_match 'Mariano', data['author_name']
assert_match '10154534110871407', data['object_id']
Media.any_instance.unstub(:url)
Media.any_instance.unstub(:original_url)
end
test "should store data of post returned by oembed" do
m = create_media url: 'https://www.facebook.com/teste637621352/posts/1028416870556238'
oembed = m.as_json['raw']['oembed']
assert oembed.is_a? Hash
assert !oembed.empty?
end
test "should store oembed data of a facebook post" do
m = create_media url: 'https://www.facebook.com/nostalgia.y/photos/a.508939832569501.1073741829.456182634511888/942167619246718/?type=3&theater'
data = m.as_json
assert data['raw']['oembed'].is_a? Hash
assert_match /facebook.com/, data['oembed']['provider_url']
assert_equal "facebook", data['oembed']['provider_name'].downcase
end
test "should store oembed data of a facebook page" do
m = create_media url: 'https://www.facebook.com/pages/Meedan/105510962816034?fref=ts'
data = m.as_json
assert data['raw']['oembed'].is_a? Hash
assert_match 'Meedan', data['oembed']['author_name']
assert_match 'Meedan', data['oembed']['title']
end
test "should parse Facebook post from page photo" do
m = create_media url: 'https://www.facebook.com/quoted.pictures/photos/a.128828073875334.28784.128791873878954/1096134023811396/?type=3&theater'
data = m.as_json
assert_match /quoted.pictures/, data['title'].downcase
assert_match 'quoted.pictures', data['username']
assert_match /quoted.pictures/, data['author_name'].downcase
assert !data['author_url'].blank?
assert !data['picture'].blank?
assert_nil data['error']
end
test "should parse facebook url without identified pattern as item" do
url = 'https://www.facebook.com/Bimbo.Memories/photos/pb.235404669918505.-2207520000.1481570271./1051597428299221/?type=3&theater'
m = create_media url: url
data = m.as_json
assert_equal 'item', data['type']
assert_match /Bimbo/, data['title']
assert_not_nil data['description']
assert_not_nil data['published_at']
assert_match 'Bimbo', data['author_name']
assert_match 'Bimbo', data['username']
assert_match /facebook.com\/(235404669918505|Bimbo.Memories)/, data['author_url']
end
test "should parse Facebook photo post within an album url" do
url = 'https://www.facebook.com/ESCAPE.Egypt/photos/ms.c.eJxNk8d1QzEMBDvyQw79N2ZyaeD7osMIwAZKLGTUViod1qU~;DCBNHcpl8gfMKeR8bz2gH6ABlHRuuHYM6AdywPkEsH~;gqAjxqLAKJtQGZFxw7CzIa6zdF8j1EZJjXRgTzAP43XBa4HfFa1REA2nXugScCi3wN7FZpF5BPtaVDEBqwPNR60O9Lsi0nbDrw3KyaPCVZfqAYiWmZO13YwvSbtygCWeKleh9KEVajW8FfZz32qcUrNgA5wfkA4Xfh004x46d9gdckQt2xR74biSOegwIcoB9OW~_oVIxKML0JWYC0XHvDkdZy0oY5bgjvBAPwdBpRuKE7kZDNGtnTLoCObBYqJJ4Ky5FF1kfh75Gnyl~;Qxqsv.bps.a.1204090389632094.1073742218.423930480981426/1204094906298309/?type=3&theater'
id = Media.get_id url
m = create_media url: url
data = m.as_json
assert_equal 'item', data['type']
assert_match /escape/, data['title'].downcase
assert_match /1204094906298309/, data['uuid']
end
test "should parse photo in a photo album" do
url = 'https://www.facebook.com/nostalgia.y/photos/pb.456182634511888.-2207520000.1484079948./928269767303170/?type=3&theater'
id = Media.get_id url
m = create_media url: url
data = m.as_json
assert_equal 'item', data['type']
assert_equal 'facebook', data['provider']
assert_match /nostalgia/, data['title'].downcase
assert_not_nil data['published_at']
assert_match 'nostalgia.y', data['username']
assert_match 'nostalgia', data['author_name'].downcase
assert_match /facebook.com\/(456182634511888|nostalgia.y)/, data['author_url']
assert_nil data['error']
end
test "should create Facebook post from page photo URL" do
m = create_media url: 'https://www.facebook.com/teste637621352/photos/a.754851877912740.1073741826.749262715138323/896869113711015/?type=3'
data = m.as_json
assert_match /896869113711015/, data['uuid']
assert_match 'teste', data['author_name'].downcase
assert_match 'teste637621352', data['username']
assert_match '896869113711015', data['object_id']
assert_nil data['error']
end
test "should create Facebook post from page photos URL" do
m = create_media url: 'https://www.facebook.com/teste637621352/posts/1028795030518422'
data = m.as_json
assert_equal '749262715138323_1028795030518422', data['uuid']
assert_match 'This is just a test with many photos.', data['text']
assert_match '749262715138323', data['user_uuid']
assert_match 'Teste', data['author_name']
assert_equal 2, data['media_count']
assert_match '1028795030518422', data['object_id']
assert_equal '11/2015', Time.parse(data['published_at']).strftime("%m/%Y")
end
test "should create Facebook post from user photos URL" do
m = create_media url: 'https://www.facebook.com/nanabhay/posts/10156130657385246?pnref=story'
data = m.as_json
assert_match '10156130657385246', data['uuid']
assert_match 'Such a great evening with friends last night. Sultan Sooud Al-Qassemi has an amazing collecting of modern Arab art. It was a visual tour of the history of the region over the last century.', data['text'].strip
assert_match 'Mohamed Nanabhay', data['author_name']
end
test "should parse Facebook post from user photo URL" do
url = 'https://www.facebook.com/photo.php?fbid=10155150801660195&set=p.10155150801660195&type=1&theater'
id = Media.get_id url
m = create_media url: url
data = m.as_json
assert_match '10155150801660195_10155150801660195', data['uuid']
assert_match '10155150801660195', data['user_uuid']
assert !data['author_name'].blank?
assert_match '10155150801660195', data['object_id']
assert !data['title'].blank?
assert data['error'].nil?
end
tests = YAML.load_file(File.join(Rails.root, 'test', 'data', 'fbposts.yml'))
tests.each do |url, text|
test "should get text from Facebook user post from URL '#{url}'" do
Media.any_instance.stubs(:get_crowdtangle_data)
Media.any_instance.stubs(:get_html).returns(Nokogiri::HTML("<meta name='description' content='#{text}'>"))
Media.any_instance.stubs(:follow_redirections)
m = create_media url: url
data = m.as_json
assert_match text, data['text'].gsub(/\s+/, ' ').strip
Media.any_instance.unstub(:get_html)
Media.any_instance.unstub(:follow_redirections)
Media.any_instance.unstub(:get_crowdtangle_data)
end
end
test "should create Facebook post with picture and photos" do
url = 'https://www.facebook.com/teste637621352/posts/1028795030518422'
id = Media.get_id url
m = create_media url: url
data = m.as_json
assert_match /#{id}\/picture/, data['picture']
assert_kind_of Array, data['photos']
assert_equal 2, data['media_count']
assert data['photos'].size > 1, "photos should have more than 1 image"
url = 'https://www.facebook.com/teste637621352/posts/1035783969819528'
id = Media.get_id url
m = create_media url: url
data = m.as_json
assert_match /#{id}\/picture/, data['picture']
assert_match /#{id}\/author_picture/, data['author_picture']
assert_kind_of Array, data['photos']
assert data['media_count'].size > 1, "media_count should be more than 1 image"
assert data['photos'].size > 1, "photos should have more than 1 image"
url = 'https://www.facebook.com/teste637621352/posts/2194142813983632'
id = Media.get_id url
m = create_media url: url
data = m.as_json
assert_match /#{id}\/picture/, data['picture']
assert_match /#{id}\/author_picture/, data['author_picture']
assert_kind_of Array, data['photos']
assert data['media_count'].size > 1, "media_count should be more than 1 image"
assert data['photos'].size > 1, "photos should have more than 1 image"
end
test "should get normalized URL from crowdtangle" do
url = 'https://www.facebook.com/quoted.pictures/posts/3424788280945947'
m = create_media url: url
data = m.as_json
url = 'https://www.facebook.com/quoted.pictures/photos/a.525451984212939/3424788187612623?type=3'
m = create_media url: url
data = m.as_json
assert_equal url, data['url']
end
test "should return item as oembed" do
url = 'https://www.facebook.com/pages/Meedan/105510962816034?fref=ts'
m = create_media url: url
data = Media.as_oembed(m.as_json, "http://pender.org/medias.html?url=#{url}", 300, 150)
assert_match 'Meedan', data['title']
assert_match 'Meedan', data['author_name']
assert_match 'https://www.facebook.com/pages/Meedan/105510962816034', data['author_url']
assert_equal 'facebook', data['provider_name']
assert_equal 'http://www.facebook.com', data['provider_url']
assert_equal 300, data['width']
assert_equal 150, data['height']
assert_equal '<iframe src="http://pender.org/medias.html?url=https://www.facebook.com/pages/Meedan/105510962816034?fref=ts" width="300" height="150" scrolling="no" border="0" seamless>Not supported</iframe>', data['html']
assert_not_nil data['thumbnail_url']
end
test "should return item as oembed when data is not on cache" do
url = 'https://www.facebook.com/pages/Meedan/105510962816034?fref=ts'
m = create_media url: url
data = Media.as_oembed(nil, "http://pender.org/medias.html?url=#{url}", 300, 150, m)
assert_match 'Meedan', data['title']
assert_match 'Meedan', data['author_name']
assert_match 'https://www.facebook.com/pages/Meedan/105510962816034', data['author_url']
assert_equal 'facebook', data['provider_name']
assert_equal 'http://www.facebook.com', data['provider_url']
assert_equal 300, data['width']
assert_equal 150, data['height']
assert_equal '<iframe src="http://pender.org/medias.html?url=https://www.facebook.com/pages/Meedan/105510962816034?fref=ts" width="300" height="150" scrolling="no" border="0" seamless>Not supported</iframe>', data['html']
assert_not_nil data['thumbnail_url']
end
test "should return item as oembed when data is on cache and raw key is missing" do
url = 'https://www.facebook.com/pages/Meedan/105510962816034?fref=ts'
m = create_media url: url
json_data = m.as_json
json_data.delete('raw')
data = Media.as_oembed(json_data, "http://pender.org/medias.html?url=#{url}", 300, 150)
assert_match 'Meedan', data['title']
assert_match 'Meedan', data['author_name']
assert_match 'https://www.facebook.com/pages/Meedan/105510962816034', data['author_url']
assert_equal 'facebook', data['provider_name']
assert_equal 'http://www.facebook.com', data['provider_url']
assert_equal 300, data['width']
assert_equal 150, data['height']
assert_equal '<iframe src="http://pender.org/medias.html?url=https://www.facebook.com/pages/Meedan/105510962816034?fref=ts" width="300" height="150" scrolling="no" border="0" seamless>Not supported</iframe>', data['html']
assert_not_nil data['thumbnail_url']
end
test "should return item as oembed when the page has oembed url" do
url = 'https://www.facebook.com/teste637621352/posts/1028416870556238'
Media.any_instance.stubs(:get_html).returns(Nokogiri::HTML("<meta property='og:title' content='Teste'>"))
m = create_media url: url
data = Media.as_oembed(m.as_json, "http://pender.org/medias.html?url=#{url}", 300, 150, m)
assert_match /Teste/, data['title']
assert_match 'Teste', data['author_name']
assert_match /facebook.com\//, data['author_url']
assert_equal 'facebook', data['provider_name']
assert_match /https?:\/\/www.facebook.com/, data['provider_url']
Media.any_instance.unstub(:get_html)
end
test "should not use Facebook embed if is a link to redirect" do
url = 'https://l.facebook.com/l.php?u=https://hindi.indiatvnews.com/paisa/business-1-07-cr-new-taxpayers-added-dropped-filers-down-at-25-22-lakh-in-fy18-630914&h=AT1WAU-mDHKigOgFNrUsxsS2doGO0_F5W9Yck7oYUx-IsYAHx8JqyHwO02-N0pX8UOlcplZO50px8mkTA1XNyKig8Z2CfX6t3Sh0bHtO9MYPtWqacCm6gOXs5lbC6VGMLjDALNXZ6vg&s=1'
m = create_media url: url
data = m.as_json
assert_match 'Leaving Facebook', data['author_name']
assert_equal '', data['html']
end
test "should get image from original post if is a shared content" do
original_url = 'https://www.facebook.com/dcc1968/posts/1538584976252118'
original_id = Media.get_id(original_url)
m = create_media url: original_url.to_s
data = m.as_json
assert_nil data.dig('original_post')
url = 'https://www.facebook.com/danielafeitosa/posts/1862242233833668'
id = Media.get_id(url)
m = create_media url: url.to_s
data = m.as_json
assert_match /facebook.com\/dcc1968/, data.dig('original_post')
assert data['photos'].any? { |p| p =~ /1538581556252460_5832184448275185664/ }, "photos should include the original image"
end
#commented until #8563 be fixed
# test "should not get original post if it's already parsing the original post" do
# m = create_media url: 'https://www.facebook.com/groups/1863694297275556/permalink/2193768444268138/'
# data = m.as_json
# original_post = data.dig('original_post')
# assert_not_nil original_post
# original = Media.new url: original_post
# assert_nil original.as_json['original_post']
# end
test "should have external id for post" do
Media.any_instance.stubs(:doc).returns(Nokogiri::HTML("<meta property='og:url' content='https://www.facebook.com/ironmaiden/posts/10156071020577051'>"))
m = create_media url: 'https://www.facebook.com/ironmaiden/posts/10156071020577051'
data = m.as_json
assert_match '10156071020577051', data['external_id']
Media.any_instance.unstub(:doc)
end
test "should parse Facebook category page" do
m = create_media url: 'https://www.facebook.com/pages/category/Society---Culture-Website/PoporDezamagit/photos/'
data = m.as_json
assert_match 'Popor dezamagit on Facebook', data[:title]
end
test "should add not found error and return empty html" do
urls = ['https://www.facebook.com/danielafeitosa/posts/2074906892567200', 'https://www.facebook.com/caiosba/posts/8457689347638947']
urls.each do |url|
m = create_media url: url
data = m.as_json
assert_equal '', data[:html]
assert_not_nil data[:error][:code]
assert_not_nil data[:error][:message]
end
end
test "should add login required error and return empty html" do
m = create_media url: 'https://www.facebook.com/caiosba/posts/2914211445293757'
data = m.as_json
assert_equal '', data[:html]
assert_equal 'Login required to see this profile', data[:error][:message]
assert_equal LapisConstants::ErrorCodes::const_get('LOGIN_REQUIRED'), data[:error][:code]
end
test "should get the group name when parsing group post" do
url = 'https://www.facebook.com/groups/memetics.hacking/permalink/1580570905320222/'
m = Media.new url: url
data = m.as_json
assert_no_match "Not Identified", data['title']
assert !data['description'].blank?
assert_match 'permalink/1580570905320222/', data['url']
end
test "should parse page post date from public page profile" do
url = 'https://www.facebook.com/nytimes/posts/10152441141079999'
m = Media.new url: url
data = m.as_json
assert_equal '2020-09-04T21:25:04.000+00:00', data['published_at']
end
test "should parse post date from public person profile" do
url = 'https://www.facebook.com/marc.smolowitz/posts/10158161767684331'
m = Media.new url: url
data = m.as_json
assert_equal '2020-09-04T22:57:41.000+00:00', data['published_at']
url = 'https://www.facebook.com/julien.caidos/posts/10158477528272170'
m = Media.new url: url
data = m.as_json
assert_equal '2020-09-03T11:01:21.000+00:00', data['published_at']
url = 'https://example.com'
Media.any_instance.stubs(:doc).returns(Nokogiri::HTML('<div class="_5pcr userContentWrapper"><abbr data-utime="1599260261" class="_5ptz"><span class="timestampContent" id="js_1">4 de setembro de 2020</span></abbr></div>'))
m = Media.new url: url
assert_equal Time.at(1599260261), m.get_facebook_published_time_from_html
Media.any_instance.unstub(:doc)
end
test "should parse post from public group" do
url = 'https://www.facebook.com/groups/memetics.hacking/permalink/1580570905320222/'
m = Media.new url: url
data = m.as_json
assert_match /This group is a gathering for those interested in exploring belief systems/, data['description']
end
test "should get full text of Facebook post" do
Media.any_instance.stubs(:get_crowdtangle_data)
url = 'https://www.facebook.com/ironmaiden/posts/10157024746512051'
html = "<div data-testid='post_message' class='_5pbx userContent'>
<p>Legacy of the Beast Touring Update 2020/21</p>
<p> I hope you and your loved ones are staying safe and well, wherever you may be, and my continued thanks to you all for bearing with us so patiently.</p>
<p> Due to the continuing health issues Worldwide around Covid-19 we regretfully inform you that Iron Maiden will now not be playing any concerts until June 2021.</p>
<p> However, we are now in a position to give you details of our touring plans in respect to those shows we had hoped to play this year.</p>
</div>"
Media.any_instance.stubs(:get_html).returns(Nokogiri::HTML(html))
Media.any_instance.stubs(:follow_redirections)
m = Media.new url: url
data = m.as_json
assert_match /However, we are now in a position to give you details of our touring plans in respect to those shows we had hoped to play this year/, data['description']
Media.any_instance.unstub(:get_html)
Media.any_instance.unstub(:follow_redirections)
Media.any_instance.unstub(:get_crowdtangle_data)
end
test "should not change url when redirected to login page" do
url = 'https://www.facebook.com/ugmhmyanmar/posts/2850282508516442'
redirection_to_login_page = 'https://www.facebook.com/login/?next=https%3A%2F%2Fwww.facebook.com%2Fugmhmyanmar%2Fposts%2F2850282508516442'
response = 'mock'; response.stubs(:code).returns('302')
response.stubs(:header).returns({ 'location' => redirection_to_login_page })
response_login_page = 'mock'; response_login_page.stubs(:code).returns('200')
Media.stubs(:request_url).with(url, 'Head').returns(response)
Media.stubs(:request_url).with(redirection_to_login_page, 'Head').returns(response_login_page)
m = create_media url: url
assert_equal url, m.url
Media.unstub(:request_url)
end
end
|
require 'test_helper'
class GamingObjectTest < ActiveSupport::TestCase
test "GamingObject attributes" do
gaming_object = GamingObject.new
assert_respond_to gaming_object, :name
assert_respond_to gaming_object, :description
assert_respond_to gaming_object, :type
assert_respond_to gaming_object, :image_path
end
test "should have STI map & character" do
Map.create(name: "mymap", description: "test")
Character.create(name: "mycharacter", description: "test")
assert_equal ["mymap"], GamingObject.where(type: "Map").pluck(:name)
assert_equal ["mycharacter"], GamingObject.where(type: "Character").pluck(:name)
end
test "has_many tips" do
map = Map.create(name: "mymap", description: "test")
map.tips.build(title:"mytip", description: "test").save
assert_equal ["mytip"], map.tips.pluck(:title)
end
end
fix test for gaming_object has_many tips
require 'test_helper'
class GamingObjectTest < ActiveSupport::TestCase
test "GamingObject attributes" do
gaming_object = GamingObject.new
assert_respond_to gaming_object, :name
assert_respond_to gaming_object, :description
assert_respond_to gaming_object, :type
assert_respond_to gaming_object, :image_path
end
test "should have STI map & character" do
Map.create(name: "mymap", description: "test")
Character.create(name: "mycharacter", description: "test")
assert_equal ["mymap"], GamingObject.where(type: "Map").pluck(:name)
assert_equal ["mycharacter"], GamingObject.where(type: "Character").pluck(:name)
end
test "has_many tips" do
map = Map.create(name: "mymap", description: "test")
map.tips.build(title:"mytip", description: "test", category: "as").save
assert_equal ["mytip"], map.tips.pluck(:title)
end
end
|
$:.unshift(File.join(File.dirname(__FILE__), '..', 'lib'))
require './test/replica_sets/rs_test_helper'
class ConnectTest < Test::Unit::TestCase
def setup
@old_mongodb_uri = ENV['MONGODB_URI']
ensure_rs
end
def teardown
@rs.restart_killed_nodes
@conn.close if defined?(@conn) && @conn
ENV['MONGODB_URI'] = @old_mongodb_uri
end
# TODO: test connect timeout.
def test_connect_with_deprecated_multi
silently do
@conn = Connection.multi([[@rs.host, @rs.ports[0]], [@rs.host, @rs.ports[1]]], :name => @rs.name)
end
assert @conn.is_a?(ReplSetConnection)
assert @conn.connected?
end
def test_connect_bad_name
assert_raise_error(ReplicaSetConnectionError, "-wrong") do
@conn = ReplSetConnection.new(build_seeds(3), :name => @rs.name + "-wrong")
end
end
def test_connect_with_primary_node_killed
@rs.kill_primary
# Becuase we're killing the primary and trying to connect right away,
# this is going to fail right away.
assert_raise_error(ConnectionFailure, "Failed to connect to primary node") do
@conn = ReplSetConnection.new build_seeds(3)
end
# This allows the secondary to come up as a primary
rescue_connection_failure do
@conn = ReplSetConnection.new build_seeds(3)
end
end
def test_connect_with_secondary_node_killed
@rs.kill_secondary
rescue_connection_failure do
@conn = ReplSetConnection.new build_seeds(3)
end
assert @conn.connected?
end
def test_connect_with_third_node_killed
@rs.kill(@rs.get_node_from_port(@rs.ports[2]))
rescue_connection_failure do
@conn = ReplSetConnection.new build_seeds(3)
end
assert @conn.connected?
end
def test_connect_with_primary_stepped_down
@conn = ReplSetConnection.new build_seeds(3)
@conn[MONGO_TEST_DB]['bar'].save({:a => 1}, {:safe => {:w => 3}})
assert @conn[MONGO_TEST_DB]['bar'].find_one
primary = Mongo::Connection.new(@conn.primary_pool.host, @conn.primary_pool.port)
assert_raise Mongo::ConnectionFailure do
primary['admin'].command({:replSetStepDown => 60})
end
assert @conn.connected?
assert_raise Mongo::ConnectionFailure do
@conn[MONGO_TEST_DB]['bar'].find_one
end
assert !@conn.connected?
rescue_connection_failure do
@conn[MONGO_TEST_DB]['bar'].find_one
end
end
def test_save_with_primary_stepped_down
@conn = ReplSetConnection.new build_seeds(3)
primary = Mongo::Connection.new(@conn.primary_pool.host, @conn.primary_pool.port)
# Adding force=true to avoid 'no secondaries within 10 seconds of my optime' errors
step_down_command = BSON::OrderedHash.new
step_down_command[:replSetStepDown] = 60
step_down_command[:force] = true
assert_raise Mongo::ConnectionFailure do
primary['admin'].command(step_down_command)
end
rescue_connection_failure do
@conn[MONGO_TEST_DB]['bar'].save({:a => 1}, {:safe => {:w => 3}})
end
end
def test_connect_with_connection_string
@conn = Connection.from_uri("mongodb://#{@rs.host}:#{@rs.ports[0]},#{@rs.host}:#{@rs.ports[1]}?replicaset=#{@rs.name}")
assert @conn.is_a?(ReplSetConnection)
assert @conn.connected?
end
def test_connect_with_connection_string_in_env_var
ENV['MONGODB_URI'] = "mongodb://#{@rs.host}:#{@rs.ports[0]},#{@rs.host}:#{@rs.ports[1]}?replicaset=#{@rs.name}"
@conn = ReplSetConnection.new
assert @conn.is_a?(ReplSetConnection)
assert @conn.connected?
end
def test_connect_with_connection_string_in_implicit_mongodb_uri
ENV['MONGODB_URI'] = "mongodb://#{@rs.host}:#{@rs.ports[0]},#{@rs.host}:#{@rs.ports[1]}?replicaset=#{@rs.name}"
@conn = Connection.from_uri
assert @conn.is_a?(ReplSetConnection)
assert @conn.connected?
end
def test_connect_with_new_seed_format
@conn = ReplSetConnection.new build_seeds(3)
assert @conn.connected?
end
def test_connect_with_old_seed_format
silently do
@conn = ReplSetConnection.new([@rs.host, @rs.ports[0]], [@rs.host, @rs.ports[1]], [@rs.host, @rs.ports[2]])
end
assert @conn.connected?
end
def test_connect_with_full_connection_string
@conn = Connection.from_uri("mongodb://#{@rs.host}:#{@rs.ports[0]},#{@rs.host}:#{@rs.ports[1]}?replicaset=#{@rs.name};safe=true;w=2;fsync=true;slaveok=true")
assert @conn.is_a?(ReplSetConnection)
assert @conn.connected?
assert_equal 2, @conn.safe[:w]
assert @conn.safe[:fsync]
assert @conn.read_pool
end
def test_connect_with_full_connection_string_in_env_var
ENV['MONGODB_URI'] = "mongodb://#{@rs.host}:#{@rs.ports[0]},#{@rs.host}:#{@rs.ports[1]}?replicaset=#{@rs.name};safe=true;w=2;fsync=true;slaveok=true"
@conn = ReplSetConnection.new
assert @conn.is_a?(ReplSetConnection)
assert @conn.connected?
assert_equal 2, @conn.safe[:w]
assert @conn.safe[:fsync]
assert @conn.read_pool
end
def test_connect_options_override_env_var
ENV['MONGODB_URI'] = "mongodb://#{@rs.host}:#{@rs.ports[0]},#{@rs.host}:#{@rs.ports[1]}?replicaset=#{@rs.name};safe=true;w=2;fsync=true;slaveok=true"
@conn = ReplSetConnection.new({:safe => false})
assert @conn.is_a?(ReplSetConnection)
assert @conn.connected?
assert_equal @conn.safe, false
end
end
minor: Additional testing for ReplSetConnection connecting via ENV
$:.unshift(File.join(File.dirname(__FILE__), '..', 'lib'))
require './test/replica_sets/rs_test_helper'
class ConnectTest < Test::Unit::TestCase
def setup
@old_mongodb_uri = ENV['MONGODB_URI']
ensure_rs
end
def teardown
@rs.restart_killed_nodes
@conn.close if defined?(@conn) && @conn
ENV['MONGODB_URI'] = @old_mongodb_uri
end
# TODO: test connect timeout.
def test_connect_with_deprecated_multi
silently do
@conn = Connection.multi([[@rs.host, @rs.ports[0]], [@rs.host, @rs.ports[1]]], :name => @rs.name)
end
assert @conn.is_a?(ReplSetConnection)
assert @conn.connected?
end
def test_connect_bad_name
assert_raise_error(ReplicaSetConnectionError, "-wrong") do
@conn = ReplSetConnection.new(build_seeds(3), :name => @rs.name + "-wrong")
end
end
def test_connect_with_primary_node_killed
@rs.kill_primary
# Becuase we're killing the primary and trying to connect right away,
# this is going to fail right away.
assert_raise_error(ConnectionFailure, "Failed to connect to primary node") do
@conn = ReplSetConnection.new build_seeds(3)
end
# This allows the secondary to come up as a primary
rescue_connection_failure do
@conn = ReplSetConnection.new build_seeds(3)
end
end
def test_connect_with_secondary_node_killed
@rs.kill_secondary
rescue_connection_failure do
@conn = ReplSetConnection.new build_seeds(3)
end
assert @conn.connected?
end
def test_connect_with_third_node_killed
@rs.kill(@rs.get_node_from_port(@rs.ports[2]))
rescue_connection_failure do
@conn = ReplSetConnection.new build_seeds(3)
end
assert @conn.connected?
end
def test_connect_with_primary_stepped_down
@conn = ReplSetConnection.new build_seeds(3)
@conn[MONGO_TEST_DB]['bar'].save({:a => 1}, {:safe => {:w => 3}})
assert @conn[MONGO_TEST_DB]['bar'].find_one
primary = Mongo::Connection.new(@conn.primary_pool.host, @conn.primary_pool.port)
assert_raise Mongo::ConnectionFailure do
primary['admin'].command({:replSetStepDown => 60})
end
assert @conn.connected?
assert_raise Mongo::ConnectionFailure do
@conn[MONGO_TEST_DB]['bar'].find_one
end
assert !@conn.connected?
rescue_connection_failure do
@conn[MONGO_TEST_DB]['bar'].find_one
end
end
def test_save_with_primary_stepped_down
@conn = ReplSetConnection.new build_seeds(3)
primary = Mongo::Connection.new(@conn.primary_pool.host, @conn.primary_pool.port)
# Adding force=true to avoid 'no secondaries within 10 seconds of my optime' errors
step_down_command = BSON::OrderedHash.new
step_down_command[:replSetStepDown] = 60
step_down_command[:force] = true
assert_raise Mongo::ConnectionFailure do
primary['admin'].command(step_down_command)
end
rescue_connection_failure do
@conn[MONGO_TEST_DB]['bar'].save({:a => 1}, {:safe => {:w => 3}})
end
end
def test_connect_with_connection_string
@conn = Connection.from_uri("mongodb://#{@rs.host}:#{@rs.ports[0]},#{@rs.host}:#{@rs.ports[1]}?replicaset=#{@rs.name}")
assert @conn.is_a?(ReplSetConnection)
assert @conn.connected?
end
def test_connect_with_connection_string_in_env_var
ENV['MONGODB_URI'] = "mongodb://#{@rs.host}:#{@rs.ports[0]},#{@rs.host}:#{@rs.ports[1]}?replicaset=#{@rs.name}"
@conn = ReplSetConnection.new
assert @conn.is_a?(ReplSetConnection)
assert_equal 2, @conn.seeds.length
assert_equal @rs.host, @conn.seeds[0][0]
assert_equal @rs.host, @conn.seeds[1][0]
assert_equal @rs.ports[0], @conn.seeds[0][1]
assert_equal @rs.ports[1], @conn.seeds[1][1]
assert @conn.connected?
end
def test_connect_with_connection_string_in_implicit_mongodb_uri
ENV['MONGODB_URI'] = "mongodb://#{@rs.host}:#{@rs.ports[0]},#{@rs.host}:#{@rs.ports[1]}?replicaset=#{@rs.name}"
@conn = Connection.from_uri
assert @conn.is_a?(ReplSetConnection)
assert_equal 2, @conn.seeds.length
assert_equal @rs.host, @conn.seeds[0][0]
assert_equal @rs.host, @conn.seeds[1][0]
assert_equal @rs.ports[0], @conn.seeds[0][1]
assert_equal @rs.ports[1], @conn.seeds[1][1]
assert_equal @rs.name, @conn.replica_set_name
assert @conn.connected?
end
def test_connect_with_new_seed_format
@conn = ReplSetConnection.new build_seeds(3)
assert @conn.connected?
end
def test_connect_with_old_seed_format
silently do
@conn = ReplSetConnection.new([@rs.host, @rs.ports[0]], [@rs.host, @rs.ports[1]], [@rs.host, @rs.ports[2]])
end
assert @conn.connected?
end
def test_connect_with_full_connection_string
@conn = Connection.from_uri("mongodb://#{@rs.host}:#{@rs.ports[0]},#{@rs.host}:#{@rs.ports[1]}?replicaset=#{@rs.name};safe=true;w=2;fsync=true;slaveok=true")
assert @conn.is_a?(ReplSetConnection)
assert @conn.connected?
assert_equal 2, @conn.safe[:w]
assert @conn.safe[:fsync]
assert @conn.read_pool
end
def test_connect_with_full_connection_string_in_env_var
ENV['MONGODB_URI'] = "mongodb://#{@rs.host}:#{@rs.ports[0]},#{@rs.host}:#{@rs.ports[1]}?replicaset=#{@rs.name};safe=true;w=2;fsync=true;slaveok=true"
@conn = ReplSetConnection.new
assert @conn.is_a?(ReplSetConnection)
assert @conn.connected?
assert_equal 2, @conn.safe[:w]
assert @conn.safe[:fsync]
assert @conn.read_pool
end
def test_connect_options_override_env_var
ENV['MONGODB_URI'] = "mongodb://#{@rs.host}:#{@rs.ports[0]},#{@rs.host}:#{@rs.ports[1]}?replicaset=#{@rs.name};safe=true;w=2;fsync=true;slaveok=true"
@conn = ReplSetConnection.new({:safe => false})
assert @conn.is_a?(ReplSetConnection)
assert @conn.connected?
assert_equal @conn.safe, false
end
end
|
Commit the new test suite file
(cherry picked from commit c30d1d09edb2544081cf38e0c4e66bbc3f3240a8)
# The Inspec reference, with examples and extensive documentation, can be
# found at http://inspec.io/docs/reference/resources/
describe package('rabbitmq-server') do
it { should be_installed }
end
describe service('rabbitmq-server') do
it { should be_running }
end
describe command('HOSTNAME=$(hostname) rabbitmq-diagnostics ping') do
its(:exit_status) { should eq 0 }
end
describe file('/etc/systemd/system/rabbitmq-server.service.d/limits.conf') do
it { should be_file }
its('owner') { should eq 'root' }
its('group') { should eq 'root' }
its('content') { should match(%r{LimitNOFILE=54000}) }
end
describe file('/etc/rabbitmq/rabbitmq.config') do
it { should be_file }
its('owner') { should eq 'root' }
its('group') { should eq 'root' }
end
|
require File.dirname(__FILE__) + '/helper'
class TestFramecurveValidator < Test::Unit::TestCase
def test_should_error_out_with_malformed_input_to_parse_and_validate
v = Framecurve::Validator.new
io = StringIO.new("foobar")
v.parse_and_validate(io)
assert v.any_errors?
assert_equal ["Malformed line \"foobar\" at offset 6, line 1"], v.errors
end
def test_should_not_error_out_with_good_input_to_parse_and_validate
v = Framecurve::Validator.new
io = StringIO.new("# Nice framecurve\r\n1\t146.0")
v.parse_and_validate(io)
assert !v.any_errors?
end
def test_should_try_to_open_file_at_path_if_string_passed_to_parse_and_validate
v = Framecurve::Validator.new
assert_raise(Errno::ENOENT) do
v.parse_and_validate("/tmp/some_file.framecurve.txt")
end
end
def test_should_record_filename_error_with_improper_extension
File.open("wrong.extension", "wb"){|f| f.write("# This might have been\r\n1\t123.45") }
begin
v = Framecurve::Validator.new
v.parse_and_validate("wrong.extension")
assert v.any_errors?
assert_equal ["The framecurve file has to have the .framecurve.txt double extension, but had \".extension\""], v.errors
ensure
File.unlink("wrong.extension")
end
end
def test_should_init_with_empty_errors_and_warnings
v = Framecurve::Validator.new
assert !v.any_errors?
assert !v.any_warnings?
assert_equal [], v.errors
assert_equal [], v.warnings
end
def test_should_error_out_with_empty
v = Framecurve::Validator.new
v.validate([])
assert v.any_errors?
assert_equal ["The framecurve did not contain any lines at all",
"The framecurve did not contain any frame correlation records"], v.errors
end
def test_should_error_out_without_actual_tuples
c = Framecurve::Curve.new( Framecurve::Comment.new("Only text") )
v = Framecurve::Validator.new
v.validate(c)
assert v.any_errors?
assert_equal ["The framecurve did not contain any frame correlation records"], v.errors
end
def test_should_error_out_with_dupe_frames
c = Framecurve::Curve.new( Framecurve::Tuple.new(10, 123.4), Framecurve::Tuple.new(10, 123.4) )
v = Framecurve::Validator.new
v.validate(c)
assert v.any_errors?
assert_equal ["The framecurve contains the same frame (10) twice or more (2 times)"], v.errors
end
def test_should_error_out_with_improper_sequencing
c = Framecurve::Curve.new( Framecurve::Tuple.new(10, 123.4), Framecurve::Tuple.new(1, 123.4) )
v = Framecurve::Validator.new
v.validate(c)
assert !v.ok?
assert v.any_errors?
assert_equal ["The frame sequencing is out of order (expected [1, 10] but got [10, 1]). The framecurve spec mandates that frames are recorded sequentially"], v.errors
end
def test_should_error_out_with_linebreaks_in_comment
c = Framecurve::Curve.new( Framecurve::Comment.new("Foo bar \r\n"), Framecurve::Tuple.new(10, 123.4))
v = Framecurve::Validator.new
v.validate(c)
assert !v.ok?
assert v.any_errors?
assert_equal ["The comment at line 1 contains a line break"], v.errors
end
def test_should_error_out_with_neg_source_and_dest_values
c = Framecurve::Curve.new( Framecurve::Tuple.new(-10, 123.4), Framecurve::Tuple.new(1, -345.67) )
v = Framecurve::Validator.new
v.validate(c)
assert v.any_errors?
errs = ["The line 1 had it's at_frame value (-10) below 1. The spec mandates at_frame >= 1.",
"The line 2 had a use_frame_of_source value (-345.67000) below 0. The spec mandates use_frame_of_source >= 0."]
assert !v.ok?
assert_equal errs, v.errors
end
def test_parse_from_err_bad_extension
v = Framecurve::Validator.new
v.parse_and_validate(File.dirname(__FILE__) + "/fixtures/framecurves/incorrect.extension")
assert !v.ok?
assert_equal ["The framecurve file has to have the .framecurve.txt double extension, but had \".extension\""], v.errors
end
def test_parse_from_err_neg_frames
v = Framecurve::Validator.new
v.parse_and_validate(File.dirname(__FILE__) + "/fixtures/framecurves/err-neg-frames.framecurve.txt")
assert !v.ok?
assert_equal ["The line 3 had it's at_frame value (-1) below 1. The spec mandates at_frame >= 1."], v.errors
end
def test_parse_from_err_no_tuples
v = Framecurve::Validator.new
v.parse_and_validate(File.dirname(__FILE__) + "/fixtures/framecurves/err-no-tuples.framecurve.txt")
assert !v.ok?
assert_equal ["The framecurve did not contain any frame correlation records"], v.errors
end
def test_should_warn_without_preamble_url
c = Framecurve::Curve.new( Framecurve::Tuple.new(10, 123.4))
v = Framecurve::Validator.new
v.validate(c)
assert v.any_warnings?
assert !v.ok?
assert_equal "It is recommended that a framecurve starts with a comment with the specification URL", v.warnings[0]
end
def test_should_warn_without_preamble_headers
c = Framecurve::Curve.new( Framecurve::Comment.new("http://framecurve.org/specification-v1"), Framecurve::Tuple.new(10, 123.4))
v = Framecurve::Validator.new
v.validate(c)
assert !v.ok?
assert v.any_warnings?
assert_equal "It is recommended for the second comment to provide a column header", v.warnings[0]
end
def test_should_parse_well
c = Framecurve::Curve.new(
Framecurve::Comment.new("http://framecurve.org/specification-v1"),
Framecurve::Comment.new("at_frame\tuse_frame_of_source"),
Framecurve::Tuple.new(10, 123.4)
)
v = Framecurve::Validator.new
v.validate(c)
assert v.ok?
end
end
OK this is a cheat but still
require File.dirname(__FILE__) + '/helper'
class TestFramecurveValidator < Test::Unit::TestCase
def test_should_error_out_with_malformed_input_to_parse_and_validate
v = Framecurve::Validator.new
io = StringIO.new("foobar")
v.parse_and_validate(io)
assert v.any_errors?
assert_equal ["Malformed line \"foobar\" at offset 6, line 1"], v.errors
end
def test_should_not_error_out_with_good_input_to_parse_and_validate
v = Framecurve::Validator.new
io = StringIO.new("# Nice framecurve\r\n1\t146.0")
v.parse_and_validate(io)
assert !v.any_errors?
end
def test_should_try_to_open_file_at_path_if_string_passed_to_parse_and_validate
v = Framecurve::Validator.new
assert_raise(Errno::ENOENT) do
v.parse_and_validate("/tmp/some_file.framecurve.txt")
end
end
def test_should_record_filename_error_with_improper_extension
File.open("wrong.extension", "wb"){|f| f.write("# This might have been\r\n1\t123.45") }
begin
v = Framecurve::Validator.new
v.parse_and_validate("wrong.extension")
assert v.any_errors?
assert_equal ["The framecurve file has to have the .framecurve.txt double extension, but had \".extension\""], v.errors
ensure
File.unlink("wrong.extension")
end
end
def test_should_init_with_empty_errors_and_warnings
v = Framecurve::Validator.new
assert !v.any_errors?
assert !v.any_warnings?
assert_equal [], v.errors
assert_equal [], v.warnings
end
def test_should_error_out_with_empty
v = Framecurve::Validator.new
v.validate([])
assert v.any_errors?
assert_equal ["The framecurve did not contain any lines at all",
"The framecurve did not contain any frame correlation records"], v.errors
end
def test_should_error_out_without_actual_tuples
c = Framecurve::Curve.new( Framecurve::Comment.new("Only text") )
v = Framecurve::Validator.new
v.validate(c)
assert v.any_errors?
assert_equal ["The framecurve did not contain any frame correlation records"], v.errors
end
def test_should_error_out_with_dupe_frames
c = Framecurve::Curve.new( Framecurve::Tuple.new(10, 123.4), Framecurve::Tuple.new(10, 123.4) )
v = Framecurve::Validator.new
v.validate(c)
assert v.any_errors?
assert_equal ["The framecurve contains the same frame (10) twice or more (2 times)"], v.errors
end
def test_should_error_out_with_improper_sequencing
c = Framecurve::Curve.new( Framecurve::Tuple.new(10, 123.4), Framecurve::Tuple.new(1, 123.4) )
v = Framecurve::Validator.new
v.validate(c)
assert !v.ok?
assert v.any_errors?
assert_equal ["The frame sequencing is out of order (expected [1, 10] but got [10, 1]). The framecurve spec mandates that frames are recorded sequentially"], v.errors
end
def test_should_error_out_with_linebreaks_in_comment
c = Framecurve::Curve.new( Framecurve::Comment.new("Foo bar \r\n"), Framecurve::Tuple.new(10, 123.4))
v = Framecurve::Validator.new
v.validate(c)
assert !v.ok?
assert v.any_errors?
assert_equal ["The comment at line 1 contains a line break"], v.errors
end
def test_should_error_out_with_neg_source_and_dest_values
c = Framecurve::Curve.new( Framecurve::Tuple.new(-10, 123.4), Framecurve::Tuple.new(1, -345.67) )
v = Framecurve::Validator.new
v.validate(c)
assert v.any_errors?
errs = ["The line 1 had it's at_frame value (-10) below 1. The spec mandates at_frame >= 1.",
"The line 2 had a use_frame_of_source value (-345.67000) below 0. The spec mandates use_frame_of_source >= 0."]
assert !v.ok?
assert_equal errs, v.errors
end
def test_parse_from_err_bad_extension
v = Framecurve::Validator.new
v.parse_and_validate(File.dirname(__FILE__) + "/fixtures/framecurves/incorrect.extension")
assert !v.ok?
assert_equal ["The framecurve file has to have the .framecurve.txt double extension, but had \".extension\""], v.errors
end
def test_parse_from_err_neg_frames
v = Framecurve::Validator.new
v.parse_and_validate(File.dirname(__FILE__) + "/fixtures/framecurves/err-neg-frames.framecurve.txt")
assert !v.ok?
assert_equal ["The line 3 had it's at_frame value (-1) below 1. The spec mandates at_frame >= 1."], v.errors
end
def test_parse_from_err_no_tuples
v = Framecurve::Validator.new
v.parse_and_validate(File.dirname(__FILE__) + "/fixtures/framecurves/err-no-tuples.framecurve.txt")
assert !v.ok?
assert_equal ["The framecurve did not contain any frame correlation records"], v.errors
end
def test_should_warn_without_preamble_url
c = Framecurve::Curve.new( Framecurve::Tuple.new(10, 123.4))
v = Framecurve::Validator.new
v.validate(c)
assert v.any_warnings?
assert !v.ok?
assert v.warnings.include?("It is recommended that a framecurve starts with a comment with the specification URL")
end
def test_should_warn_without_preamble_headers
c = Framecurve::Curve.new( Framecurve::Comment.new("http://framecurve.org/specification-v1"), Framecurve::Tuple.new(10, 123.4))
v = Framecurve::Validator.new
v.validate(c)
assert !v.ok?
assert v.any_warnings?
assert_equal "It is recommended for the second comment to provide a column header", v.warnings[0]
end
def test_should_parse_well
c = Framecurve::Curve.new(
Framecurve::Comment.new("http://framecurve.org/specification-v1"),
Framecurve::Comment.new("at_frame\tuse_frame_of_source"),
Framecurve::Tuple.new(10, 123.4)
)
v = Framecurve::Validator.new
v.validate(c)
assert v.ok?
end
end |
require 'spec_helper'
describe 'g5-rbenv::default' do
let(:chef_run) do
ChefSpec::Runner.new do |node|
node.set['rbenv']['ruby_version'] = ruby_version
end.converge(described_recipe)
end
let(:ruby_version) { '1.9.3' }
it 'includes the rbenv default recipe' do
expect(chef_run).to include_recipe('rbenv::default')
end
it 'includes the ruby_build recipe' do
expect(chef_run).to include_recipe('rbenv::ruby_build')
end
it 'installs the default global ruby' do
expect(chef_run).to install_rbenv_ruby(ruby_version).with(global: true)
end
it 'installs bundler for the default ruby' do
expect(chef_run).to install_rbenv_gem('bundler').with(ruby_version: ruby_version)
end
it 'sets permissions on the rbenv dir' do
expect(chef_run).to run_execute('chmod -R 775 /opt/rbenv')
end
it 'adds vagrant to the rbenv group' do
expect(chef_run).to create_group('rbenv').with(members: ['vagrant'])
end
it 'uses the overridden template for rbenv.sh' do
expect(chef_run).to create_template('/etc/profile.d/rbenv.sh').with(
source: 'rbenv.sh.erb',
cookbook: 'g5-rbenv'
)
end
end
Update unit tests for latest ChefSpec
require 'spec_helper'
describe 'g5-rbenv::default' do
let(:chef_run) do
ChefSpec::SoloRunner.new do |node|
node.set['rbenv']['ruby_version'] = ruby_version
end.converge(described_recipe)
end
let(:ruby_version) { '1.9.3' }
it 'includes the rbenv default recipe' do
expect(chef_run).to include_recipe('rbenv::default')
end
it 'includes the ruby_build recipe' do
expect(chef_run).to include_recipe('rbenv::ruby_build')
end
it 'installs the default global ruby' do
expect(chef_run).to install_rbenv_ruby(ruby_version).with(global: true)
end
it 'installs bundler for the default ruby' do
expect(chef_run).to install_rbenv_gem('bundler').with(ruby_version: ruby_version)
end
it 'sets permissions on the rbenv dir' do
expect(chef_run).to run_execute('chmod -R 775 /opt/rbenv')
end
it 'adds vagrant to the rbenv group' do
expect(chef_run).to create_group('rbenv').with(members: ['vagrant'])
end
it 'uses the overridden template for rbenv.sh' do
expect(chef_run).to create_template('/etc/profile.d/rbenv.sh').with(
source: 'rbenv.sh.erb',
cookbook: 'g5-rbenv'
)
end
end
|
require 'helper'
require 'date'
module ArelExtensions
module WthAr
class ListTest < Minitest::Test
require 'minitest/pride'
def connect_db
ActiveRecord::Base.configurations = YAML.load_file('test/database.yml')
if ENV['DB'] == 'oracle' && ((defined?(RUBY_ENGINE) && RUBY_ENGINE == "rbx") || (RUBY_PLATFORM == 'java')) # not supported
@env_db = (RUBY_PLATFORM == 'java' ? "jdbc-sqlite" : 'sqlite')
skip "Platform not supported"
else
@env_db = ENV['DB']
end
ActiveRecord::Base.establish_connection(@env_db.try(:to_sym) || (RUBY_PLATFORM == 'java' ? :"jdbc-sqlite" : :sqlite))
ActiveRecord::Base.default_timezone = :utc
@cnx = ActiveRecord::Base.connection
$sqlite = @cnx.adapter_name =~ /sqlite/i
$load_extension_disabled ||= false
csf = CommonSqlFunctions.new(@cnx)
csf.add_sql_functions(@env_db)
end
def setup_db
@cnx.drop_table(:user_tests) rescue nil
@cnx.create_table :user_tests do |t|
t.column :age, :integer
t.column :name, :string
t.column :comments, :text
t.column :created_at, :date
t.column :updated_at, :datetime
t.column :score, :decimal, :precision => 20, :scale => 10
end
@cnx.drop_table(:product_tests) rescue nil
@cnx.create_table :product_tests do |t|
t.column :price, :decimal, :precision => 20, :scale => 10
end
end
class User < ActiveRecord::Base
self.table_name = 'user_tests'
end
class Product < ActiveRecord::Base
self.table_name = 'product_tests'
end
def setup
d = Date.new(2016, 5, 23)
connect_db
setup_db
u = User.create :age => 5, :name => "Lucas", :created_at => d, :score => 20.16, :updated_at => Time.utc(2014, 3, 3, 12, 42, 0)
@lucas = User.where(:id => u.id)
u = User.create :age => 15, :name => "Sophie", :created_at => d, :score => 20.16
@sophie = User.where(:id => u.id)
u = User.create :age => 20, :name => "Camille", :created_at => d, :score => -20.16
@camille = User.where(:id => u.id)
u = User.create :age => 21, :name => "Arthur", :created_at => d, :score => 65.62
@arthur = User.where(:id => u.id)
u = User.create :age => 23, :name => "Myung", :created_at => d, :score => 20.16, :comments => ' '
@myung = User.where(:id => u.id)
u = User.create :age => 25, :name => "Laure", :created_at => d, :score => 20.16
@laure = User.where(:id => u.id)
u = User.create :age => nil, :name => "Test", :created_at => d, :score => 1.62
@test = User.where(:id => u.id)
u = User.create :age => -42, :name => "Negatif", :comments => '1,22,3,42,2', :created_at => d, :updated_at => d.to_time, :score => 0.17
@neg = User.where(:id => u.id)
@age = User.arel_table[:age]
@name = User.arel_table[:name]
@score = User.arel_table[:score]
@created_at = User.arel_table[:created_at]
@updated_at = User.arel_table[:updated_at]
@comments = User.arel_table[:comments]
@price = Product.arel_table[:price]
@ut = User.arel_table
@pt = Product.arel_table
end
def teardown
@cnx.drop_table(:user_tests)
@cnx.drop_table(:product_tests)
end
def t(scope, node)
scope.select(node.as('res')).first.res
end
# Math Functions
def test_classical_arel
assert_in_epsilon 42.16, t(@laure, @score + 22), 0.01
end
def test_abs
assert_equal 42, t(@neg, @age.abs)
assert_equal 20.16, t(@camille, @score.abs)
assert_equal 14, t(@laure, (@age - 39).abs)
assert_equal 28, t(@laure, (@age - 39).abs + (@age - 39).abs)
end
def test_ceil
# skip "Sqlite version can't load extension for ceil" if $sqlite && $load_extension_disabled
assert_equal 2, t(@test, @score.ceil) # 1.62
assert_equal(-20, t(@camille, @score.ceil)) # -20.16
assert_equal(-20, t(@camille, (@score - 0.5).ceil)) # -20.16
assert_equal 63, t(@arthur, @age.ceil + 42)
end
def test_floor
# skip "Sqlite version can't load extension for floor" if $sqlite && $load_extension_disabled
assert_equal 0, t(@neg, @score.floor)
assert_equal 1, t(@test, @score.floor) # 1.62
assert_equal(-9, t(@test, (@score - 10).floor)) # 1.62
assert_equal 42, t(@arthur, @score.floor - 23)
end
def test_rand
assert 42 != User.select(Arel.rand.as('res')).first.res
assert 0 <= User.select(Arel.rand.abs.as('res')).first.res
assert_equal 8, User.order(Arel.rand).limit(50).count
end
def test_round
assert_equal 1, User.where(@age.round(0).eq(5.0)).count
assert_equal 0, User.where(@age.round(-1).eq(6.0)).count
assert_equal 66, t(@arthur, @score.round)
assert_in_epsilon 67.6, t(@arthur, @score.round(1) + 2), 0.01
end
def test_sum
if @env_db == 'mssql'
skip "SQL Server forces order?" # TODO
assert_equal 68, User.select((@age.sum + 1).as("res"), User.arel_table[:id].sum).take(50).reorder(@age).first.res
assert_equal 134, User.reorder(nil).select((@age.sum + @age.sum).as("res"), User.arel_table[:id].sum).take(50).first.res
assert_equal 201, User.reorder(nil).select(((@age * 3).sum).as("res"), User.arel_table[:id].sum).take(50).first.res
assert_equal 4009, User.reorder(nil).select(((@age * @age).sum).as("res"), User.arel_table[:id].sum).take(50).first.res
else
assert_equal 68, User.select((@age.sum + 1).as("res")).take(50).first.res
assert_equal 134, User.select((@age.sum + @age.sum).as("res")).take(50).first.res
assert_equal 201, User.select(((@age * 3).sum).as("res")).take(50).first.res
assert_equal 4009, User.select(((@age * @age).sum).as("res")).take(50).first.res
end
end
# String Functions
def test_concat
assert_equal 'Camille Camille', t(@camille, @name + ' ' + @name)
assert_equal 'Laure 2', t(@laure, @name + ' ' + 2)
assert_equal 'Test Laure', t(@laure, Arel::Nodes.build_quoted('Test ') + @name)
skip "TODO: find a way... to do group_concat/listagg in SQL Server" if @env_db == 'mssql'
if @env_db == 'postgresql'
assert_equal "Lucas Sophie", t(User.reorder(nil).from(User.select(:name).where(:name => ['Lucas', 'Sophie']).reorder(:name).as('user_tests')), @name.group_concat(' '))
else
assert_equal "Lucas Sophie", t(User.where(:name => ['Lucas', 'Sophie']).reorder(:name), @name.group_concat(' '))
end
end
def test_length
assert_equal 7, t(@camille, @name.length)
assert_equal 7, t(@camille, @name.length.round.abs)
assert_equal 42, t(@laure, @name.length + 37)
end
def test_locate
skip "Sqlite version can't load extension for locate" if $sqlite && $load_extension_disabled
assert_equal 1, t(@camille, @name.locate("C"))
assert_equal 0, t(@lucas, @name.locate("z"))
assert_equal 5, t(@lucas, @name.locate("s"))
end
def test_substring
assert_equal 'C', t(@camille, @name.substring(1, 1))
if @env_db == 'oracle'
assert_nil(t(@lucas, @name.substring(42)))
else
assert_equal('', t(@lucas, @name.substring(42)))
end
assert_equal 'Lu', t(@lucas, @name.substring(1,2))
assert_equal 'C', t(@camille, @name[0, 1])
assert_equal 'C', t(@camille, @name[0])
if @env_db == 'oracle'
assert_nil(t(@lucas, @name[42]))
else
assert_equal('', t(@lucas, @name[42]))
end
assert_equal 'Lu', t(@lucas, @name[0,2])
assert_equal 'Lu', t(@lucas, @name[0..1])
end
def test_find_in_set
skip "Sqlite version can't load extension for find_in_set" if $sqlite && $load_extension_disabled
skip "SQL Server does not know about FIND_IN_SET" if @env_db == 'mssql'
assert_equal 5, t(@neg, @comments & 2)
assert_equal 0, t(@neg, @comments & 6) # not found
end
def test_string_comparators
skip "Oracle can't use math operators to compare strings" if @env_db == 'oracle' # use GREATEST ?
skip "SQL Server can't use math operators to compare strings" if @env_db == 'mssql' # use GREATEST ?
if @env_db == 'postgresql' # may return real boolean
assert t(@neg, @name >= 'Mest') == true || t(@neg, @name >= 'Mest') == 't' # depends of ar version
assert t(@neg, @name <= (@name + 'Z')) == true || t(@neg, @name <= (@name + 'Z')) == 't'
else
assert_equal 1, t(@neg, @name >= 'Mest')
assert_equal 1, t(@neg, @name <= (@name + 'Z'))
end
end
def test_regexp_not_regexp
skip "Sqlite version can't load extension for regexp" if $sqlite && $load_extension_disabled
skip "SQL Server does not know about REGEXP without extensions" if @env_db == 'mssql'
assert_equal 1, User.where(@name =~ '^M').count
assert_equal 6, User.where(@name !~ '^L').count
assert_equal 1, User.where(@name =~ /^M/).count
assert_equal 6, User.where(@name !~ /^L/).count
end
def test_imatches
assert_equal 1, User.where(@name.imatches('m%')).count
assert_equal 4, User.where(@name.imatches_any(['L%', '%e'])).count
assert_equal 6, User.where(@name.idoes_not_match('L%')).count
end
def test_replace
assert_equal "LucaX", t(@lucas, @name.replace("s", "X"))
assert_equal "replace", t(@lucas, @name.replace(@name, "replace"))
end
def test_replace_once
skip "TODO"
# skip "Sqlite version can't load extension for locate" if $sqlite && $load_extension_disabled
assert_equal "LuCas", t(@lucas, @name.substring(1, @name.locate('c') - 1) + 'C' + @name.substring(@name.locate('c') + 1, @name.length))
end
def test_soundex
skip "Sqlite version can't load extension for soundex" if $sqlite && $load_extension_disabled
skip "PostgreSql version can't load extension for soundex" if @env_db == 'postgresql'
assert_equal "C540", t(@camille, @name.soundex)
assert_equal 8, User.where(@name.soundex.eq(@name.soundex)).count
end
def test_change_case
assert_equal "myung", t(@myung, @name.downcase)
assert_equal "MYUNG", t(@myung, @name.upcase)
assert_equal "myung", t(@myung, @name.upcase.downcase)
end
def test_trim
assert_equal "Myung", t(@myung, @name.trim)
assert_equal "Myung", t(@myung, @name.trim.ltrim.rtrim)
assert_equal "Myun", t(@myung, @name.rtrim("g"))
assert_equal "yung", t(@myung, @name.ltrim("M"))
assert_equal "yung", t(@myung, (@name + "M").trim("M"))
skip "Oracle does not accept multi char trim" if @env_db == 'oracle'
assert_equal "", t(@myung, @name.rtrim(@name))
end
def test_blank
if @env_db == 'postgresql'
assert_includes [false, 'f'], t(@myung, @name.blank) # depends of adapter
assert_includes [true, 't'], t(@myung, @name.not_blank) # depends of adapter
assert_includes [true, 't'], t(@myung, @comments.blank)
assert_includes [false, 'f'], t(@myung, @comments.not_blank)
end
assert_equal 0, @myung.where(@name.blank).count
assert_equal 1, @myung.where(@name.not_blank).count
assert_equal 1, @myung.where(@comments.blank).count
assert_equal 0, @neg.where(@comments.blank).count
assert_equal 1, @neg.where(@comments.not_blank).count
assert_equal 0, @myung.where(@comments.not_blank).count
assert_equal 'false', t(@myung, @name.blank.then('true', 'false'))
assert_equal 'true', t(@myung, @name.not_blank.then('true', 'false'))
assert_equal 'true', t(@myung, @comments.blank.then('true', 'false'))
assert_equal 'false', t(@myung, @comments.not_blank.then('true', 'false'))
assert_equal 'false', t(@neg, @comments.blank.then('true', 'false'))
assert_equal 'true', t(@neg, @comments.not_blank.then('true', 'false'))
end
def test_format
assert_equal '2016-05-23', t(@lucas, @created_at.format('%Y-%m-%d'))
skip "SQL Server does not accept any format" if @env_db == 'mssql'
assert_equal '2014/03/03 12:42:00', t(@lucas, @updated_at.format('%Y/%m/%d %H:%M:%S'))
end
def test_coalesce
assert_equal 'Camille concat', t(@camille, @name.coalesce(nil, "default") + ' concat')
assert_equal ' ', t(@myung, @comments.coalesce("Myung").coalesce('ignored'))
assert_equal 'Laure', t(@laure, @comments.coalesce("Laure"))
if @env_db == 'oracle'
assert_nil t(@laure, @comments.coalesce(""))
else
assert_equal('', t(@laure, @comments.coalesce("")))
end
if @env_db == 'postgresql'
assert_equal 100, t(@test, @age.coalesce(100))
assert_equal "Camille", t(@camille, @name.coalesce(nil, "default"))
assert_equal 20, t(@test, @age.coalesce(nil, 20))
else
assert_equal "Camille", t(@camille, @name.coalesce(nil, '20'))
assert_equal 20, t(@test, @age.coalesce(nil, 20))
end
end
# Comparators
def test_number_comparator
assert_equal 2, User.where(@age < 6).count
assert_equal 2, User.where(@age <= 10).count
assert_equal 3, User.where(@age > 20).count
assert_equal 4, User.where(@age >= 20).count
assert_equal 1, User.where(@age > 5).where(@age < 20).count
end
def test_date_comparator
d = Date.new(2016, 5, 23)
assert_equal 0, User.where(@created_at < d).count
assert_equal 8, User.where(@created_at >= d).count
end
def test_date_duration
#Year
assert_equal 2016, t(@lucas, @created_at.year).to_i
assert_equal 0, User.where(@created_at.year.eq("2012")).count
#Month
assert_equal 5, t(@camille, @created_at.month).to_i
assert_equal 8, User.where(@created_at.month.eq("05")).count
#Week
assert_equal(@env_db == 'mssql' ? 22 : 21, t(@arthur, @created_at.week).to_i)
assert_equal 8, User.where(@created_at.month.eq("05")).count
#Day
assert_equal 23, t(@laure, @created_at.day).to_i
assert_equal 0, User.where(@created_at.day.eq("05")).count
skip "manage DATE" if @env_db == 'oracle'
#Hour
assert_equal 0, t(@laure, @created_at.hour).to_i
assert_equal 12, t(@lucas, @updated_at.hour).to_i
#Minute
assert_equal 0, t(@laure, @created_at.minute).to_i
assert_equal 42, t(@lucas, @updated_at.minute).to_i
#Second
assert_equal 0, t(@laure, @created_at.second).to_i
assert_equal 0, t(@lucas, @updated_at.second).to_i
end
def test_datetime_diff
assert_equal 0, t(@lucas, @updated_at - Time.utc(2014, 3, 3, 12, 42)).to_i
if @env_db == 'oracle' && Arel::VERSION.to_i > 6 # in rails 5, result is multiplied by 24*60*60 = 86400...
assert_equal 42 * 86400, t(@lucas, @updated_at - Time.utc(2014, 3, 3, 12, 41, 18)).to_i
assert_equal(-3600 * 86400, t(@lucas, @updated_at - Time.utc(2014, 3, 3, 13, 42)).to_i)
else
assert_equal 42, t(@lucas, @updated_at - Time.utc(2014, 3, 3, 12, 41, 18)).to_i
assert_equal(-3600, t(@lucas, @updated_at - Time.utc(2014, 3, 3, 13, 42)).to_i)
if @env_db == 'mssql' || @env_db == 'oracle' # can't select booleans
assert_equal 0, @lucas.where((@updated_at - Time.utc(2014, 3, 3, 12, 41, 18)) < -1).count
else
assert_includes [nil, 0, 'f', false], t(@lucas, (@updated_at - Time.utc(2014, 3, 3, 12, 41, 18)) < -1)
end
end
end
# TODO; cast types
def test_cast_types
skip "not implemented yet"
assert_equal true, t(@arthur, @score =~ /22/)
end
def test_is_null
assert_equal "Test", User.where(@age.is_null).select(@name).first.name
end
def test_math_plus
d = Date.new(1997, 6, 15)
#Concat String
assert_equal "SophiePhan", t(@sophie, @name + "Phan")
assert_equal "Sophie2", t(@sophie, @name + 2)
assert_equal "Sophie1997-06-15", t(@sophie, @name + d)
assert_equal "Sophie15", t(@sophie, @name + @age)
assert_equal "SophieSophie", t(@sophie, @name + @name)
#FIXME: should work as expected in Oracle
assert_equal "Sophie2016-05-23", t(@sophie, @name + @created_at) unless @env_db == 'oracle'
#concat Integer
assert_equal 1, User.where((@age + 10).eq(33)).count
assert_equal 1, User.where((@age + "1").eq(6)).count
assert_equal 1, User.where((@age + @age).eq(10)).count
#concat Date
# puts((User.arel_table[:created_at] + 1).as("res").to_sql.inspect)
assert_equal "2016-05-24", t(@myung, @created_at + 1).to_date.to_s
assert_equal "2016-05-25", t(@myung, @created_at + 2.day).to_date.to_s
end
def test_math_minus
d = Date.new(2016, 5, 20)
#Datediff
assert_equal 8, User.where((@created_at - @created_at).eq(0)).count
assert_equal 3, @laure.select((@created_at - d).as("res")).first.res.abs.to_i
#Substraction
assert_equal 0, User.where((@age - 10).eq(50)).count
assert_equal 0, User.where((@age - "10").eq(50)).count
# assert_equal 0, User.where((@age - 9.5).eq(50.5)).count # should work: TODO
assert_equal 0, User.where((@age - "9.5").eq(50.5)).count
end
def test_wday
d = Date.new(2016, 6, 26)
assert_equal(@env_db == 'oracle' || @env_db == 'mssql' ? 2 : 1, t(@myung, @created_at.wday).to_i) # monday
assert_equal 0, User.select(d.wday).as("res").first.to_i
end
# Boolean functions
def test_boolean_functions
assert_equal 1, @laure.where(
(@score.round > 19).⋀(@score.round < 21).⋁(@score.round(1) >= 20.1)
).count
end
# Union operator
def test_union_operator
assert_equal 3, User.from((@ut.project(@age).where(@age > 22) + @ut.project(@age).where(@age < 0)).as('my_union')).count
assert_equal 3, User.from((@ut.project(@age).where(@age == 20) + @ut.project(@age).where(@age == 23) + @ut.project(@age).where(@age = 21)).as('my_union')).count
assert_equal 2, User.from((@ut.project(@age).where(@age == 20) + @ut.project(@age).where(@age == 20) + @ut.project(@age).where(@age = 21)).as('my_union')).count
end
end
end
end
agnostic tests for union
require 'helper'
require 'date'
module ArelExtensions
module WthAr
class ListTest < Minitest::Test
require 'minitest/pride'
def connect_db
ActiveRecord::Base.configurations = YAML.load_file('test/database.yml')
if ENV['DB'] == 'oracle' && ((defined?(RUBY_ENGINE) && RUBY_ENGINE == "rbx") || (RUBY_PLATFORM == 'java')) # not supported
@env_db = (RUBY_PLATFORM == 'java' ? "jdbc-sqlite" : 'sqlite')
skip "Platform not supported"
else
@env_db = ENV['DB']
end
ActiveRecord::Base.establish_connection(@env_db.try(:to_sym) || (RUBY_PLATFORM == 'java' ? :"jdbc-sqlite" : :sqlite))
ActiveRecord::Base.default_timezone = :utc
@cnx = ActiveRecord::Base.connection
$sqlite = @cnx.adapter_name =~ /sqlite/i
$load_extension_disabled ||= false
csf = CommonSqlFunctions.new(@cnx)
csf.add_sql_functions(@env_db)
end
def setup_db
@cnx.drop_table(:user_tests) rescue nil
@cnx.create_table :user_tests do |t|
t.column :age, :integer
t.column :name, :string
t.column :comments, :text
t.column :created_at, :date
t.column :updated_at, :datetime
t.column :score, :decimal, :precision => 20, :scale => 10
end
@cnx.drop_table(:product_tests) rescue nil
@cnx.create_table :product_tests do |t|
t.column :price, :decimal, :precision => 20, :scale => 10
end
end
class User < ActiveRecord::Base
self.table_name = 'user_tests'
end
class Product < ActiveRecord::Base
self.table_name = 'product_tests'
end
def setup
d = Date.new(2016, 5, 23)
connect_db
setup_db
u = User.create :age => 5, :name => "Lucas", :created_at => d, :score => 20.16, :updated_at => Time.utc(2014, 3, 3, 12, 42, 0)
@lucas = User.where(:id => u.id)
u = User.create :age => 15, :name => "Sophie", :created_at => d, :score => 20.16
@sophie = User.where(:id => u.id)
u = User.create :age => 20, :name => "Camille", :created_at => d, :score => -20.16
@camille = User.where(:id => u.id)
u = User.create :age => 21, :name => "Arthur", :created_at => d, :score => 65.62
@arthur = User.where(:id => u.id)
u = User.create :age => 23, :name => "Myung", :created_at => d, :score => 20.16, :comments => ' '
@myung = User.where(:id => u.id)
u = User.create :age => 25, :name => "Laure", :created_at => d, :score => 20.16
@laure = User.where(:id => u.id)
u = User.create :age => nil, :name => "Test", :created_at => d, :score => 1.62
@test = User.where(:id => u.id)
u = User.create :age => -42, :name => "Negatif", :comments => '1,22,3,42,2', :created_at => d, :updated_at => d.to_time, :score => 0.17
@neg = User.where(:id => u.id)
@age = User.arel_table[:age]
@name = User.arel_table[:name]
@score = User.arel_table[:score]
@created_at = User.arel_table[:created_at]
@updated_at = User.arel_table[:updated_at]
@comments = User.arel_table[:comments]
@price = Product.arel_table[:price]
@ut = User.arel_table
@pt = Product.arel_table
end
def teardown
@cnx.drop_table(:user_tests)
@cnx.drop_table(:product_tests)
end
def t(scope, node)
scope.select(node.as('res')).first.res
end
# Math Functions
def test_classical_arel
assert_in_epsilon 42.16, t(@laure, @score + 22), 0.01
end
def test_abs
assert_equal 42, t(@neg, @age.abs)
assert_equal 20.16, t(@camille, @score.abs)
assert_equal 14, t(@laure, (@age - 39).abs)
assert_equal 28, t(@laure, (@age - 39).abs + (@age - 39).abs)
end
def test_ceil
# skip "Sqlite version can't load extension for ceil" if $sqlite && $load_extension_disabled
assert_equal 2, t(@test, @score.ceil) # 1.62
assert_equal(-20, t(@camille, @score.ceil)) # -20.16
assert_equal(-20, t(@camille, (@score - 0.5).ceil)) # -20.16
assert_equal 63, t(@arthur, @age.ceil + 42)
end
def test_floor
# skip "Sqlite version can't load extension for floor" if $sqlite && $load_extension_disabled
assert_equal 0, t(@neg, @score.floor)
assert_equal 1, t(@test, @score.floor) # 1.62
assert_equal(-9, t(@test, (@score - 10).floor)) # 1.62
assert_equal 42, t(@arthur, @score.floor - 23)
end
def test_rand
assert 42 != User.select(Arel.rand.as('res')).first.res
assert 0 <= User.select(Arel.rand.abs.as('res')).first.res
assert_equal 8, User.order(Arel.rand).limit(50).count
end
def test_round
assert_equal 1, User.where(@age.round(0).eq(5.0)).count
assert_equal 0, User.where(@age.round(-1).eq(6.0)).count
assert_equal 66, t(@arthur, @score.round)
assert_in_epsilon 67.6, t(@arthur, @score.round(1) + 2), 0.01
end
def test_sum
if @env_db == 'mssql'
skip "SQL Server forces order?" # TODO
assert_equal 68, User.select((@age.sum + 1).as("res"), User.arel_table[:id].sum).take(50).reorder(@age).first.res
assert_equal 134, User.reorder(nil).select((@age.sum + @age.sum).as("res"), User.arel_table[:id].sum).take(50).first.res
assert_equal 201, User.reorder(nil).select(((@age * 3).sum).as("res"), User.arel_table[:id].sum).take(50).first.res
assert_equal 4009, User.reorder(nil).select(((@age * @age).sum).as("res"), User.arel_table[:id].sum).take(50).first.res
else
assert_equal 68, User.select((@age.sum + 1).as("res")).take(50).first.res
assert_equal 134, User.select((@age.sum + @age.sum).as("res")).take(50).first.res
assert_equal 201, User.select(((@age * 3).sum).as("res")).take(50).first.res
assert_equal 4009, User.select(((@age * @age).sum).as("res")).take(50).first.res
end
end
# String Functions
def test_concat
assert_equal 'Camille Camille', t(@camille, @name + ' ' + @name)
assert_equal 'Laure 2', t(@laure, @name + ' ' + 2)
assert_equal 'Test Laure', t(@laure, Arel::Nodes.build_quoted('Test ') + @name)
skip "TODO: find a way... to do group_concat/listagg in SQL Server" if @env_db == 'mssql'
if @env_db == 'postgresql'
assert_equal "Lucas Sophie", t(User.reorder(nil).from(User.select(:name).where(:name => ['Lucas', 'Sophie']).reorder(:name).as('user_tests')), @name.group_concat(' '))
else
assert_equal "Lucas Sophie", t(User.where(:name => ['Lucas', 'Sophie']).reorder(:name), @name.group_concat(' '))
end
end
def test_length
assert_equal 7, t(@camille, @name.length)
assert_equal 7, t(@camille, @name.length.round.abs)
assert_equal 42, t(@laure, @name.length + 37)
end
def test_locate
skip "Sqlite version can't load extension for locate" if $sqlite && $load_extension_disabled
assert_equal 1, t(@camille, @name.locate("C"))
assert_equal 0, t(@lucas, @name.locate("z"))
assert_equal 5, t(@lucas, @name.locate("s"))
end
def test_substring
assert_equal 'C', t(@camille, @name.substring(1, 1))
if @env_db == 'oracle'
assert_nil(t(@lucas, @name.substring(42)))
else
assert_equal('', t(@lucas, @name.substring(42)))
end
assert_equal 'Lu', t(@lucas, @name.substring(1,2))
assert_equal 'C', t(@camille, @name[0, 1])
assert_equal 'C', t(@camille, @name[0])
if @env_db == 'oracle'
assert_nil(t(@lucas, @name[42]))
else
assert_equal('', t(@lucas, @name[42]))
end
assert_equal 'Lu', t(@lucas, @name[0,2])
assert_equal 'Lu', t(@lucas, @name[0..1])
end
def test_find_in_set
skip "Sqlite version can't load extension for find_in_set" if $sqlite && $load_extension_disabled
skip "SQL Server does not know about FIND_IN_SET" if @env_db == 'mssql'
assert_equal 5, t(@neg, @comments & 2)
assert_equal 0, t(@neg, @comments & 6) # not found
end
def test_string_comparators
skip "Oracle can't use math operators to compare strings" if @env_db == 'oracle' # use GREATEST ?
skip "SQL Server can't use math operators to compare strings" if @env_db == 'mssql' # use GREATEST ?
if @env_db == 'postgresql' # may return real boolean
assert t(@neg, @name >= 'Mest') == true || t(@neg, @name >= 'Mest') == 't' # depends of ar version
assert t(@neg, @name <= (@name + 'Z')) == true || t(@neg, @name <= (@name + 'Z')) == 't'
else
assert_equal 1, t(@neg, @name >= 'Mest')
assert_equal 1, t(@neg, @name <= (@name + 'Z'))
end
end
def test_regexp_not_regexp
skip "Sqlite version can't load extension for regexp" if $sqlite && $load_extension_disabled
skip "SQL Server does not know about REGEXP without extensions" if @env_db == 'mssql'
assert_equal 1, User.where(@name =~ '^M').count
assert_equal 6, User.where(@name !~ '^L').count
assert_equal 1, User.where(@name =~ /^M/).count
assert_equal 6, User.where(@name !~ /^L/).count
end
def test_imatches
assert_equal 1, User.where(@name.imatches('m%')).count
assert_equal 4, User.where(@name.imatches_any(['L%', '%e'])).count
assert_equal 6, User.where(@name.idoes_not_match('L%')).count
end
def test_replace
assert_equal "LucaX", t(@lucas, @name.replace("s", "X"))
assert_equal "replace", t(@lucas, @name.replace(@name, "replace"))
end
def test_replace_once
skip "TODO"
# skip "Sqlite version can't load extension for locate" if $sqlite && $load_extension_disabled
assert_equal "LuCas", t(@lucas, @name.substring(1, @name.locate('c') - 1) + 'C' + @name.substring(@name.locate('c') + 1, @name.length))
end
def test_soundex
skip "Sqlite version can't load extension for soundex" if $sqlite && $load_extension_disabled
skip "PostgreSql version can't load extension for soundex" if @env_db == 'postgresql'
assert_equal "C540", t(@camille, @name.soundex)
assert_equal 8, User.where(@name.soundex.eq(@name.soundex)).count
end
def test_change_case
assert_equal "myung", t(@myung, @name.downcase)
assert_equal "MYUNG", t(@myung, @name.upcase)
assert_equal "myung", t(@myung, @name.upcase.downcase)
end
def test_trim
assert_equal "Myung", t(@myung, @name.trim)
assert_equal "Myung", t(@myung, @name.trim.ltrim.rtrim)
assert_equal "Myun", t(@myung, @name.rtrim("g"))
assert_equal "yung", t(@myung, @name.ltrim("M"))
assert_equal "yung", t(@myung, (@name + "M").trim("M"))
skip "Oracle does not accept multi char trim" if @env_db == 'oracle'
assert_equal "", t(@myung, @name.rtrim(@name))
end
def test_blank
if @env_db == 'postgresql'
assert_includes [false, 'f'], t(@myung, @name.blank) # depends of adapter
assert_includes [true, 't'], t(@myung, @name.not_blank) # depends of adapter
assert_includes [true, 't'], t(@myung, @comments.blank)
assert_includes [false, 'f'], t(@myung, @comments.not_blank)
end
assert_equal 0, @myung.where(@name.blank).count
assert_equal 1, @myung.where(@name.not_blank).count
assert_equal 1, @myung.where(@comments.blank).count
assert_equal 0, @neg.where(@comments.blank).count
assert_equal 1, @neg.where(@comments.not_blank).count
assert_equal 0, @myung.where(@comments.not_blank).count
assert_equal 'false', t(@myung, @name.blank.then('true', 'false'))
assert_equal 'true', t(@myung, @name.not_blank.then('true', 'false'))
assert_equal 'true', t(@myung, @comments.blank.then('true', 'false'))
assert_equal 'false', t(@myung, @comments.not_blank.then('true', 'false'))
assert_equal 'false', t(@neg, @comments.blank.then('true', 'false'))
assert_equal 'true', t(@neg, @comments.not_blank.then('true', 'false'))
end
def test_format
assert_equal '2016-05-23', t(@lucas, @created_at.format('%Y-%m-%d'))
skip "SQL Server does not accept any format" if @env_db == 'mssql'
assert_equal '2014/03/03 12:42:00', t(@lucas, @updated_at.format('%Y/%m/%d %H:%M:%S'))
end
def test_coalesce
assert_equal 'Camille concat', t(@camille, @name.coalesce(nil, "default") + ' concat')
assert_equal ' ', t(@myung, @comments.coalesce("Myung").coalesce('ignored'))
assert_equal 'Laure', t(@laure, @comments.coalesce("Laure"))
if @env_db == 'oracle'
assert_nil t(@laure, @comments.coalesce(""))
else
assert_equal('', t(@laure, @comments.coalesce("")))
end
if @env_db == 'postgresql'
assert_equal 100, t(@test, @age.coalesce(100))
assert_equal "Camille", t(@camille, @name.coalesce(nil, "default"))
assert_equal 20, t(@test, @age.coalesce(nil, 20))
else
assert_equal "Camille", t(@camille, @name.coalesce(nil, '20'))
assert_equal 20, t(@test, @age.coalesce(nil, 20))
end
end
# Comparators
def test_number_comparator
assert_equal 2, User.where(@age < 6).count
assert_equal 2, User.where(@age <= 10).count
assert_equal 3, User.where(@age > 20).count
assert_equal 4, User.where(@age >= 20).count
assert_equal 1, User.where(@age > 5).where(@age < 20).count
end
def test_date_comparator
d = Date.new(2016, 5, 23)
assert_equal 0, User.where(@created_at < d).count
assert_equal 8, User.where(@created_at >= d).count
end
def test_date_duration
#Year
assert_equal 2016, t(@lucas, @created_at.year).to_i
assert_equal 0, User.where(@created_at.year.eq("2012")).count
#Month
assert_equal 5, t(@camille, @created_at.month).to_i
assert_equal 8, User.where(@created_at.month.eq("05")).count
#Week
assert_equal(@env_db == 'mssql' ? 22 : 21, t(@arthur, @created_at.week).to_i)
assert_equal 8, User.where(@created_at.month.eq("05")).count
#Day
assert_equal 23, t(@laure, @created_at.day).to_i
assert_equal 0, User.where(@created_at.day.eq("05")).count
skip "manage DATE" if @env_db == 'oracle'
#Hour
assert_equal 0, t(@laure, @created_at.hour).to_i
assert_equal 12, t(@lucas, @updated_at.hour).to_i
#Minute
assert_equal 0, t(@laure, @created_at.minute).to_i
assert_equal 42, t(@lucas, @updated_at.minute).to_i
#Second
assert_equal 0, t(@laure, @created_at.second).to_i
assert_equal 0, t(@lucas, @updated_at.second).to_i
end
def test_datetime_diff
assert_equal 0, t(@lucas, @updated_at - Time.utc(2014, 3, 3, 12, 42)).to_i
if @env_db == 'oracle' && Arel::VERSION.to_i > 6 # in rails 5, result is multiplied by 24*60*60 = 86400...
assert_equal 42 * 86400, t(@lucas, @updated_at - Time.utc(2014, 3, 3, 12, 41, 18)).to_i
assert_equal(-3600 * 86400, t(@lucas, @updated_at - Time.utc(2014, 3, 3, 13, 42)).to_i)
else
assert_equal 42, t(@lucas, @updated_at - Time.utc(2014, 3, 3, 12, 41, 18)).to_i
assert_equal(-3600, t(@lucas, @updated_at - Time.utc(2014, 3, 3, 13, 42)).to_i)
if @env_db == 'mssql' || @env_db == 'oracle' # can't select booleans
assert_equal 0, @lucas.where((@updated_at - Time.utc(2014, 3, 3, 12, 41, 18)) < -1).count
else
assert_includes [nil, 0, 'f', false], t(@lucas, (@updated_at - Time.utc(2014, 3, 3, 12, 41, 18)) < -1)
end
end
end
# TODO; cast types
def test_cast_types
skip "not implemented yet"
assert_equal true, t(@arthur, @score =~ /22/)
end
def test_is_null
assert_equal "Test", User.where(@age.is_null).select(@name).first.name
end
def test_math_plus
d = Date.new(1997, 6, 15)
#Concat String
assert_equal "SophiePhan", t(@sophie, @name + "Phan")
assert_equal "Sophie2", t(@sophie, @name + 2)
assert_equal "Sophie1997-06-15", t(@sophie, @name + d)
assert_equal "Sophie15", t(@sophie, @name + @age)
assert_equal "SophieSophie", t(@sophie, @name + @name)
#FIXME: should work as expected in Oracle
assert_equal "Sophie2016-05-23", t(@sophie, @name + @created_at) unless @env_db == 'oracle'
#concat Integer
assert_equal 1, User.where((@age + 10).eq(33)).count
assert_equal 1, User.where((@age + "1").eq(6)).count
assert_equal 1, User.where((@age + @age).eq(10)).count
#concat Date
# puts((User.arel_table[:created_at] + 1).as("res").to_sql.inspect)
assert_equal "2016-05-24", t(@myung, @created_at + 1).to_date.to_s
assert_equal "2016-05-25", t(@myung, @created_at + 2.day).to_date.to_s
end
def test_math_minus
d = Date.new(2016, 5, 20)
#Datediff
assert_equal 8, User.where((@created_at - @created_at).eq(0)).count
assert_equal 3, @laure.select((@created_at - d).as("res")).first.res.abs.to_i
#Substraction
assert_equal 0, User.where((@age - 10).eq(50)).count
assert_equal 0, User.where((@age - "10").eq(50)).count
# assert_equal 0, User.where((@age - 9.5).eq(50.5)).count # should work: TODO
assert_equal 0, User.where((@age - "9.5").eq(50.5)).count
end
def test_wday
d = Date.new(2016, 6, 26)
assert_equal(@env_db == 'oracle' || @env_db == 'mssql' ? 2 : 1, t(@myung, @created_at.wday).to_i) # monday
assert_equal 0, User.select(d.wday).as("res").first.to_i
end
# Boolean functions
def test_boolean_functions
assert_equal 1, @laure.where(
(@score.round > 19).⋀(@score.round < 21).⋁(@score.round(1) >= 20.1)
).count
end
# Union operator
def test_union_operator
# assert_equal 3, User.from((@ut.project(@age).where(@age > 22) + @ut.project(@age).where(@age < 0)).as('my_union')).count
# assert_equal 3, User.from((@ut.project(@age).where(@age == 20) + @ut.project(@age).where(@age == 23) + @ut.project(@age).where(@age = 21)).as('my_union')).count
# assert_equal 2, User.from((@ut.project(@age).where(@age == 20) + @ut.project(@age).where(@age == 20) + @ut.project(@age).where(@age = 21)).as('my_union')).count
end
end
end
end
|
add test
require "time"
require "date"
class TestRole < Test::Unit::TestCase
def test_volume
params = {
'name' => 'cinder',
'size' => 10
}
volume = Yao::Volume.new(params)
assert_equal('cinder', volume.name)
assert_equal(10, volume.size)
end
end
|
* Missed a file to add
svn path=/trunk/kdebindings/korundum/; revision=382645
class MyPrefs < KDE::ConfigSkeleton
def initialize( a )
super( a )
end
end
|
# Generated by jeweler
# DO NOT EDIT THIS FILE DIRECTLY
# Instead, edit Jeweler::Tasks in Rakefile, and run 'rake gemspec'
# -*- encoding: utf-8 -*-
Gem::Specification.new do |s|
s.name = "asadify"
s.version = "0.0.0"
s.required_rubygems_version = Gem::Requirement.new(">= 0") if s.respond_to? :required_rubygems_version=
s.authors = ["Albert Peng"]
s.date = "2013-05-10"
s.description = "Asadify your site."
s.email = "albert.peng@cureus.com"
s.extra_rdoc_files = [
"LICENSE.txt",
"README.rdoc"
]
s.files = [
".document",
".rvmrc",
"Gemfile",
"Gemfile.lock",
"LICENSE.txt",
"README.rdoc",
"Rakefile",
"VERSION",
"lib/asadify.rb"
]
s.homepage = "http://github.com/holoiii/asadify"
s.licenses = ["MIT"]
s.require_paths = ["lib"]
s.rubygems_version = "2.0.3"
s.summary = "Asadify your site."
if s.respond_to? :specification_version then
s.specification_version = 4
if Gem::Version.new(Gem::VERSION) >= Gem::Version.new('1.2.0') then
s.add_development_dependency(%q<jeweler>, ["~> 1.8.4"])
else
s.add_dependency(%q<jeweler>, ["~> 1.8.4"])
end
else
s.add_dependency(%q<jeweler>, ["~> 1.8.4"])
end
end
Regenerate gemspec for version 0.1.0
# Generated by jeweler
# DO NOT EDIT THIS FILE DIRECTLY
# Instead, edit Jeweler::Tasks in Rakefile, and run 'rake gemspec'
# -*- encoding: utf-8 -*-
Gem::Specification.new do |s|
s.name = "asadify"
s.version = "0.1.0"
s.required_rubygems_version = Gem::Requirement.new(">= 0") if s.respond_to? :required_rubygems_version=
s.authors = ["Albert Peng"]
s.date = "2013-05-10"
s.description = "Asadify your site."
s.email = "albert.peng@cureus.com"
s.extra_rdoc_files = [
"LICENSE.txt",
"README.rdoc"
]
s.files = [
".document",
".rvmrc",
"Gemfile",
"Gemfile.lock",
"LICENSE.txt",
"README.rdoc",
"Rakefile",
"VERSION",
"asadify.gemspec",
"lib/asadify.rb",
"vendor/assets/images/asad-final-scaled.png",
"vendor/assets/javascripts/asadify.js"
]
s.homepage = "http://github.com/holoiii/asadify"
s.licenses = ["MIT"]
s.require_paths = ["lib"]
s.rubygems_version = "2.0.3"
s.summary = "Asadify your site."
if s.respond_to? :specification_version then
s.specification_version = 4
if Gem::Version.new(Gem::VERSION) >= Gem::Version.new('1.2.0') then
s.add_runtime_dependency(%q<railties>, [">= 0"])
s.add_runtime_dependency(%q<jquery-rails>, [">= 0"])
s.add_development_dependency(%q<jeweler>, ["~> 1.8.4"])
else
s.add_dependency(%q<railties>, [">= 0"])
s.add_dependency(%q<jquery-rails>, [">= 0"])
s.add_dependency(%q<jeweler>, ["~> 1.8.4"])
end
else
s.add_dependency(%q<railties>, [">= 0"])
s.add_dependency(%q<jquery-rails>, [">= 0"])
s.add_dependency(%q<jeweler>, ["~> 1.8.4"])
end
end
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.