CombinedText stringlengths 4 3.42M |
|---|
require 'fileutils'
module Smeg
class Exporter
@@path = "output"
class << self
def path; @@path; end
def path=(path); @@path = path; end
def process!
setup
copy_public
copy_assets
end
def publish!
teardown
setup
copy_assets
copy_public
compress_assets
write_index
write_pages
end
protected
def teardown
Smeg.log.debug "Removing directory: #{path}"
FileUtils.rm_rf path
end
def setup
Smeg.log.debug "Creating directory"
FileUtils.mkdir_p path
end
def write_index
Smeg.log.debug "Writing Index"
File.open("#{path}/index.html", "w") {|file| file.write(Page.find("index").render)}
end
def write_pages
Page.all.each do |page|
Smeg.log.debug "Writing page: #{page.permalink}"
FileUtils.mkdir_p("#{path}#{page.permalink}")
File.open("#{path}#{page.write_path}", "w"){|file| file.write(page.render) }
end
end
def copy_assets
Smeg.log.info "Copying images"
Page.all.each do |page|
Smeg.log.info "Copying images for #{page.permalink}"
page.assets.each do |asset|
# Create the path to the asset by the export path of the page + File.dirname(asset permalink)
FileUtils.mkdir_p "#{path}#{File.dirname(asset[:path])}"
# Copy the the asset from its disk path to File.dirname(asset permalink)
FileUtils.cp asset[:disk_path], "#{path}#{asset[:path]}"
end
end
end
def copy_public
generate_css_from_less
Smeg.log.debug "Copying public files"
Dir["#{Smeg.root_dir}/public/*"].each {|file| FileUtils.cp_r file, path }
end
def compress_assets
yui_compressor = File.expand_path("#{File.dirname(__FILE__)}/../../vendor/yui-compressor/yuicompressor-2.4.2.jar")
Smeg.log.info "Compressing javascript and stylesheets..."
Dir["#{path}/**/*.{js,css}"].each do |asset|
system "java -jar #{yui_compressor} #{File.expand_path(asset)} -o #{File.expand_path(asset)}"
end
end
def generate_css_from_less
Dir["#{Smeg.root_dir}/public/**/*.less"].each do |lessfile|
css = File.open(lessfile) {|f| Less::Engine.new(f) }.to_css
path = "#{File.dirname(lessfile)}/#{File.basename(lessfile, ".less")}.css"
File.open(path, "w") {|file| file.write(css) }
end
rescue Less::SyntaxError => exception
Smeg.log.error "LessCSS Syntax error\n\n#{exception.message}"
end
end
end
end
Quieten'd down the logger
require 'fileutils'
module Smeg
class Exporter
@@path = "output"
class << self
def path; @@path; end
def path=(path); @@path = path; end
def process!
setup
copy_public
copy_assets
end
def publish!
teardown
setup
copy_assets
copy_public
compress_assets
write_index
write_pages
end
protected
def teardown
Smeg.log.debug "Removing directory: #{path}"
FileUtils.rm_rf path
end
def setup
Smeg.log.debug "Creating directory"
FileUtils.mkdir_p path
end
def write_index
Smeg.log.debug "Writing Index"
File.open("#{path}/index.html", "w") {|file| file.write(Page.find("index").render)}
end
def write_pages
Smeg.log.info "Writing pages..."
Page.all.each do |page|
FileUtils.mkdir_p("#{path}#{page.permalink}")
File.open("#{path}#{page.write_path}", "w"){|file| file.write(page.render) }
end
end
def copy_assets
Smeg.log.info "Copying images..."
Page.all.each do |page|
page.assets.each do |asset|
# Create the path to the asset by the export path of the page + File.dirname(asset permalink)
FileUtils.mkdir_p "#{path}#{File.dirname(asset[:path])}"
# Copy the the asset from its disk path to File.dirname(asset permalink)
FileUtils.cp asset[:disk_path], "#{path}#{asset[:path]}"
end
end
end
def copy_public
generate_css_from_less
Smeg.log.debug "Copying public files"
Dir["#{Smeg.root_dir}/public/*"].each {|file| FileUtils.cp_r file, path }
end
def compress_assets
yui_compressor = File.expand_path("#{File.dirname(__FILE__)}/../../vendor/yui-compressor/yuicompressor-2.4.2.jar")
Smeg.log.info "Compressing javascript and stylesheets..."
Dir["#{path}/**/*.{js,css}"].each do |asset|
system "java -jar #{yui_compressor} #{File.expand_path(asset)} -o #{File.expand_path(asset)}"
end
end
def generate_css_from_less
Dir["#{Smeg.root_dir}/public/**/*.less"].each do |lessfile|
css = File.open(lessfile) {|f| Less::Engine.new(f) }.to_css
path = "#{File.dirname(lessfile)}/#{File.basename(lessfile, ".less")}.css"
File.open(path, "w") {|file| file.write(css) }
end
rescue Less::SyntaxError => exception
Smeg.log.error "LessCSS Syntax error\n\n#{exception.message}"
end
end
end
end |
require "httparty"
require "active_support/core_ext/string"
require "active_support/core_ext/hash/except"
module Spooky
class Client
attr_accessor :subdomain, :client_id, :client_secret, :endpoint
def initialize(attrs = {})
@subdomain = ENV["GHOST_SUBDOMAIN"] || attrs[:subdomain]
@client_id = ENV["GHOST_CLIENT_ID"] || attrs[:client_id]
@client_secret = ENV["GHOST_CLIENT_SECRET"] || attrs[:client_secret]
@endpoint = "https://#{subdomain}.ghost.io/ghost/api/v0.1/"
check_credentials!
end
# Fetch methods.
def fetch_json(resource, options = {})
url = @endpoint.dup
url << "#{resource}/"
if options
clean_options = options[:id] ? options.except(:id) : options
clean_option_params = clean_options.map { |k, v| "#{k}=#{v}" }.join("&")
url << "#{options[:id]}?#{clean_option_params}"
end
url << "&client_id=#{@client_id}&client_secret=#{@client_secret}"
HTTParty.get(url).parsed_response
end
def fetch(resource, options = {})
resource_type = resource.to_s.match(/^(\w+)/)[0]
response = fetch_json(resource, options)[resource_type]
object = "Spooky::#{resource_type.classify}".constantize
if response
array = response.map { |o| object.send(:new, o) }
options[:id] ? array.reduce : array
end
end
def fetch_with_associations(resource, options = {})
options[:include] = "author,tags,count.posts"
fetch(resource, options)
end
# Utilities.
def pages(resource, options = {})
fetch_json(resource, options)["meta"]["pagination"]["pages"]
end
# Client level object collection methods.
[:posts, :tags, :users].each do |object|
define_method(object) do |options = {}|
fetch_with_associations(object, options)
end
define_method("#{object.to_s.singularize}_pages") do |options = {}|
pages(object, options)
end
define_method("find_#{object.to_s.singularize}_by_id") do |id|
fetch_with_associations(object, id: id)
end
define_method("find_#{object.to_s.singularize}_by_slug") do |slug|
fetch_with_associations("#{object}/slug", id: slug)
end
end
private
def check_credentials!
creds_check = @subdomain && @client_id && @client_secret
raise ArgumentError, "Credentials must be initialized" unless creds_check
end
end
end
:wrench: Add #find_posts_with_tags(tags) method to Spooky::Client
require "httparty"
require "active_support/core_ext/string"
require "active_support/core_ext/hash/except"
module Spooky
class Client
attr_accessor :subdomain, :client_id, :client_secret, :endpoint
def initialize(attrs = {})
@subdomain = ENV["GHOST_SUBDOMAIN"] || attrs[:subdomain]
@client_id = ENV["GHOST_CLIENT_ID"] || attrs[:client_id]
@client_secret = ENV["GHOST_CLIENT_SECRET"] || attrs[:client_secret]
@endpoint = "https://#{subdomain}.ghost.io/ghost/api/v0.1/"
check_credentials!
end
# Fetch methods.
def fetch_json(resource, options = {})
url = @endpoint.dup
url << "#{resource}/"
if options
clean_options = options[:id] ? options.except(:id) : options
clean_option_params = clean_options.map { |k, v| "#{k}=#{v}" }.join("&")
url << "#{options[:id]}?#{clean_option_params}"
end
url << "&client_id=#{@client_id}&client_secret=#{@client_secret}"
HTTParty.get(url).parsed_response
end
def fetch(resource, options = {})
resource_type = resource.to_s.match(/^(\w+)/)[0]
response = fetch_json(resource, options)[resource_type]
object = "Spooky::#{resource_type.classify}".constantize
if response
array = response.map { |o| object.send(:new, o) }
options[:id] ? array.reduce : array
end
end
def fetch_with_associations(resource, options = {})
options[:include] = "author,tags,count.posts"
fetch(resource, options)
end
# Utilities.
def pages(resource, options = {})
fetch_json(resource, options)["meta"]["pagination"]["pages"]
end
# Client level object collection methods.
[:posts, :tags, :users].each do |object|
define_method(object) do |options = {}|
fetch_with_associations(object, options)
end
define_method("#{object.to_s.singularize}_pages") do |options = {}|
pages(object, options)
end
define_method("find_#{object.to_s.singularize}_by_id") do |id|
fetch_with_associations(object, id: id)
end
define_method("find_#{object.to_s.singularize}_by_slug") do |slug|
fetch_with_associations("#{object}/slug", id: slug)
end
end
# Post specific collection lookup methods
def find_posts_with_tags(tags)
fetch_with_associations(:posts, filter: "tags:[#{tags}]")
end
private
def check_credentials!
creds_check = @subdomain && @client_id && @client_secret
raise ArgumentError, "Credentials must be initialized" unless creds_check
end
end
end
|
module Spout
module VERSION #:nodoc:
MAJOR = 0
MINOR = 11
TINY = 0
BUILD = 'beta1' # 'pre', 'rc', 'rc2', nil
STRING = [MAJOR, MINOR, TINY, BUILD].compact.join('.')
end
end
Version bump to 0.11.0.beta2
module Spout
module VERSION #:nodoc:
MAJOR = 0
MINOR = 11
TINY = 0
BUILD = 'beta2' # 'pre', 'rc', 'rc2', nil
STRING = [MAJOR, MINOR, TINY, BUILD].compact.join('.')
end
end
|
module Spout
module VERSION #:nodoc:
MAJOR = 0
MINOR = 10
TINY = 0
BUILD = "beta7" # nil, "pre", "rc", "rc2"
STRING = [MAJOR, MINOR, TINY, BUILD].compact.join('.')
end
end
Version bump to 0.10.0.beta8
module Spout
module VERSION #:nodoc:
MAJOR = 0
MINOR = 10
TINY = 0
BUILD = "beta8" # nil, "pre", "rc", "rc2"
STRING = [MAJOR, MINOR, TINY, BUILD].compact.join('.')
end
end
|
module Stall
VERSION = "0.1.3"
end
bump 0.1.4
module Stall
VERSION = "0.2.0"
end
|
module Statsd
class Runner
def self.default_config
{
:host => "0.0.0.0",
:port => 8125,
:daemonize => false,
:debug => false,
:flush_interval => 10,
:threshold_pct => 90,
:graphite_host => '127.0.0.1',
:graphite_port => 2003
}
end
def self.run!(opts = {})
config = self.default_config.merge(opts)
EM::run do
server = EM::open_datagram_socket(config[:host], config[:port], Server, config)
EM::add_periodic_timer(config[:flush_interval]) do
begin
EM::connect(config[:graphite_host], config[:graphite_port], Publisher, server)
rescue
$stderr.puts "Unable to connect to %s:%s" % [ config[:graphite_host], config[:graphite_port] ] if config[:debug]
end
end
if config[:daemonize]
app_name = 'statsd %s:%d' % config[:host], config[:port]
Daemons.daemonize(:app_name => app_name)
else
puts "Now accepting connections on address #{config[:host]}, port #{config[:port]}..."
end
end
end
end
end
quick bug fix
module Statsd
class Runner
def self.default_config
{
:host => "0.0.0.0",
:port => 8125,
:daemonize => false,
:debug => false,
:flush_interval => 10,
:threshold_pct => 90,
:graphite_host => '127.0.0.1',
:graphite_port => 2003
}
end
def self.run!(opts = {})
config = self.default_config.merge(opts)
EM::run do
server = EM::open_datagram_socket(config[:host], config[:port], Server, config)
EM::add_periodic_timer(config[:flush_interval]) do
begin
EM::connect(config[:graphite_host], config[:graphite_port], Publisher, server)
rescue
$stderr.puts "Unable to connect to %s:%s" % [ config[:graphite_host], config[:graphite_port] ] if config[:debug]
end
end
if config[:daemonize]
app_name = 'statsd %s:%d' % [ config[:host], config[:port] ]
Daemons.daemonize(:app_name => app_name)
else
puts "Now accepting connections on address #{config[:host]}, port #{config[:port]}..."
end
end
end
end
end
|
module TwitterLogin
# VERSION = "0.1.0"
class StripeInit
api_key
apiBase = 'https://api.stripe.com'
apiVersion = null;
verifySslCerts = true;
def getApiKey
return self::api_key
end
def setApiKey api_key
puts "setting you stripe key"
puts api_key
self::api_key = api_key
end
end
end
Updated
module TwitterLogin
# VERSION = "0.1.0"
class StripeInit
api_key
apiBase = 'https://api.stripe.com'
apiVersion = null;
verifySslCerts = true;
def self.getApiKey
return self::api_key
end
def self.setApiKey api_key
puts "setting you stripe key"
puts api_key
self::api_key = api_key
end
end
end |
module Sworn
VERSION = "0.0.1"
end
Releasing v0.0.2
Signed-off-by: Martin Svangren <54669547a225ff20cba8b75a4adca540eef25858@masv.net>
module Sworn
VERSION = "0.0.2"
end
|
require './lib/dc/urls' # For `:environment`-less `render_template()`
namespace :build do
namespace :embed do
task :viewer do
puts "Building viewer..."
# Navigate up and over to the `document-viewer` repo
# TODO: Stop doing this!
Dir.chdir '../document-viewer'
build_dir = "tmp"
FileUtils.rm_r(build_dir) if File.exists?(build_dir)
`jammit -f -o #{build_dir}`
`rm #{build_dir}/*.gz`
Dir["#{build_dir}/*.css"].each do |css_file|
File.open(css_file, 'r+') do |file|
css = file.read
css.gsub!(/(\.\.\/)+images/, 'images')
file.rewind
file.write(css)
file.truncate(css.length)
end
end
FileUtils.cp_r("public/images", "#{build_dir}/images")
# Export back to DocumentCloud
FileUtils.cp_r("#{build_dir}/images", "../documentcloud/public/viewer")
`cat #{build_dir}/viewer.js #{build_dir}/templates.js > #{build_dir}/viewer_new.js`
FileUtils.rm_r(["#{build_dir}/viewer.js", "#{build_dir}/templates.js"])
FileUtils.mv("#{build_dir}/viewer_new.js", "#{build_dir}/viewer.js")
FileUtils.cp("#{build_dir}/print.css", "../documentcloud/public/viewer/printviewer.css")
Dir["#{build_dir}/viewer*"].each do |asset|
FileUtils.cp(asset, "../documentcloud/public/viewer/#{File.basename(asset)}")
end
# Clean up temp build directory
FileUtils.rm_r(build_dir) if File.exists?(build_dir)
puts "Done building viewer"
end
task :page do
puts "Building page embed..."
vendor_dir = "public/javascripts/vendor/documentcloud-pages"
page_embed_dir = "public/embed/page"
loader_dir = "public/embed/loader"
FileUtils.rm_r(page_embed_dir) if File.exists?(page_embed_dir)
FileUtils.mkdir(page_embed_dir)
FileUtils.cp_r(Dir.glob("#{vendor_dir}/dist/*"), page_embed_dir)
FileUtils.cp_r(Dir.glob("#{vendor_dir}/src/css/vendor/fontello/font"), page_embed_dir)
`cat #{vendor_dir}/src/js/config/config.js.erb #{page_embed_dir}/enhance.js > #{loader_dir}/enhance.js.erb`
FileUtils.rm(["#{page_embed_dir}/enhance.js"])
# Mimic deployment of the loader so that development has a copy too
File.write("#{loader_dir}/enhance.js", render_template("#{loader_dir}/enhance.js.erb"))
puts "Done building page embed"
end
task :note do
puts "Building note embed..."
note_embed_dir = 'public/note_embed'
FileUtils.rm_r(note_embed_dir) if File.exists?(note_embed_dir)
FileUtils.mkdir(note_embed_dir)
FileUtils.cp_r(Dir.glob("public/javascripts/vendor/documentcloud-notes/dist/*"), note_embed_dir)
# Mimic deployment of the loader so that development has a copy too
File.write("public/notes/loader.js", render_template("app/views/annotations/embed_loader.js.erb"))
puts "Done building note embed"
end
task :search do
puts "Building search embed..."
search_embed_dir = "public/search_embed"
build_dir = "tmp/build"
FileUtils.rm_r(build_dir) if File.exists?(build_dir)
`jammit -f -o #{build_dir} -c config/search_embed_assets.yml`
Dir["#{build_dir}/*.css"].each do |css_file|
File.open(css_file, 'r+') do |file|
css = file.read
css.gsub!("/images/search_embed", 'images')
file.rewind
file.write(css)
file.truncate(css.length)
end
end
FileUtils.cp_r("public/images/search_embed", "#{build_dir}/images") if File.exists?("public/images/search_embed")
FileUtils.rm_r(search_embed_dir) if File.exists?(search_embed_dir)
FileUtils.cp_r(build_dir, search_embed_dir)
# Clean up temp build directory
FileUtils.rm_r(build_dir) if File.exists?(build_dir)
# Mimic deployment of the loader so that development has a copy too
File.write("public/embed/loader.js", render_template("app/views/search/embed_loader.js.erb"))
puts "Done building search embed"
end
task :all do
invoke "build:embed:page"
invoke "build:embed:note"
invoke "build:embed:search"
end
end
# Notices for old task names
task :viewer do puts "REMOVED: Use `build:embed:viewer` instead." end
task :note_embed do puts "REMOVED: Use `build:embed:note` instead." end
task :search_embed do puts "REMOVED: Use `build:embed:search` instead." end
def render_template(template_path); ERB.new(File.read(template_path)).result(binding); end
end
Add viewer to `build:embed:all`
require './lib/dc/urls' # For `:environment`-less `render_template()`
namespace :build do
namespace :embed do
task :viewer do
puts "Building viewer..."
# Navigate up and over to the `document-viewer` repo
# TODO: Stop doing this!
Dir.chdir '../document-viewer'
build_dir = "tmp"
FileUtils.rm_r(build_dir) if File.exists?(build_dir)
`jammit -f -o #{build_dir}`
`rm #{build_dir}/*.gz`
Dir["#{build_dir}/*.css"].each do |css_file|
File.open(css_file, 'r+') do |file|
css = file.read
css.gsub!(/(\.\.\/)+images/, 'images')
file.rewind
file.write(css)
file.truncate(css.length)
end
end
FileUtils.cp_r("public/images", "#{build_dir}/images")
# Export back to DocumentCloud
FileUtils.cp_r("#{build_dir}/images", "../documentcloud/public/viewer")
`cat #{build_dir}/viewer.js #{build_dir}/templates.js > #{build_dir}/viewer_new.js`
FileUtils.rm_r(["#{build_dir}/viewer.js", "#{build_dir}/templates.js"])
FileUtils.mv("#{build_dir}/viewer_new.js", "#{build_dir}/viewer.js")
FileUtils.cp("#{build_dir}/print.css", "../documentcloud/public/viewer/printviewer.css")
Dir["#{build_dir}/viewer*"].each do |asset|
FileUtils.cp(asset, "../documentcloud/public/viewer/#{File.basename(asset)}")
end
# Clean up temp build directory
FileUtils.rm_r(build_dir) if File.exists?(build_dir)
Dir.chdir '../documentcloud'
puts "Done building viewer"
end
task :page do
puts "Building page embed..."
vendor_dir = "public/javascripts/vendor/documentcloud-pages"
page_embed_dir = "public/embed/page"
loader_dir = "public/embed/loader"
FileUtils.rm_r(page_embed_dir) if File.exists?(page_embed_dir)
FileUtils.mkdir(page_embed_dir)
FileUtils.cp_r(Dir.glob("#{vendor_dir}/dist/*"), page_embed_dir)
FileUtils.cp_r(Dir.glob("#{vendor_dir}/src/css/vendor/fontello/font"), page_embed_dir)
`cat #{vendor_dir}/src/js/config/config.js.erb #{page_embed_dir}/enhance.js > #{loader_dir}/enhance.js.erb`
FileUtils.rm(["#{page_embed_dir}/enhance.js"])
# Mimic deployment of the loader so that development has a copy too
File.write("#{loader_dir}/enhance.js", render_template("#{loader_dir}/enhance.js.erb"))
puts "Done building page embed"
end
task :note do
puts "Building note embed..."
note_embed_dir = 'public/note_embed'
FileUtils.rm_r(note_embed_dir) if File.exists?(note_embed_dir)
FileUtils.mkdir(note_embed_dir)
FileUtils.cp_r(Dir.glob("public/javascripts/vendor/documentcloud-notes/dist/*"), note_embed_dir)
# Mimic deployment of the loader so that development has a copy too
File.write("public/notes/loader.js", render_template("app/views/annotations/embed_loader.js.erb"))
puts "Done building note embed"
end
task :search do
puts "Building search embed..."
search_embed_dir = "public/search_embed"
build_dir = "tmp/build"
FileUtils.rm_r(build_dir) if File.exists?(build_dir)
`jammit -f -o #{build_dir} -c config/search_embed_assets.yml`
Dir["#{build_dir}/*.css"].each do |css_file|
File.open(css_file, 'r+') do |file|
css = file.read
css.gsub!("/images/search_embed", 'images')
file.rewind
file.write(css)
file.truncate(css.length)
end
end
FileUtils.cp_r("public/images/search_embed", "#{build_dir}/images") if File.exists?("public/images/search_embed")
FileUtils.rm_r(search_embed_dir) if File.exists?(search_embed_dir)
FileUtils.cp_r(build_dir, search_embed_dir)
# Clean up temp build directory
FileUtils.rm_r(build_dir) if File.exists?(build_dir)
# Mimic deployment of the loader so that development has a copy too
File.write("public/embed/loader.js", render_template("app/views/search/embed_loader.js.erb"))
puts "Done building search embed"
end
task :all do
invoke "build:embed:viewer"
invoke "build:embed:page"
invoke "build:embed:note"
invoke "build:embed:search"
end
end
# Notices for old task names
task :viewer do puts "REMOVED: Use `build:embed:viewer` instead." end
task :note_embed do puts "REMOVED: Use `build:embed:note` instead." end
task :search_embed do puts "REMOVED: Use `build:embed:search` instead." end
def render_template(template_path); ERB.new(File.read(template_path)).result(binding); end
end
|
#!/usr/bin/env ruby
# -*- coding: utf-8 -*-
require 'json'
require 'yaml'
require 'csv'
namespace :dojos do
desc 'Parseから出力したjsonファイルをベースに、yamlファイルを生成します'
task generate_yaml: :environment do
dojos = JSON.parse(File.read(Rails.root.join('db', 'parse_backup.json')))['results']
dojos.sort_by!{ |hash| hash['order'] }
# Tweak dojo info if needed
dojos.each do |dojo|
dojo['description'].strip!
dojo.delete 'objectId' # Delete Parse-specific key
dojo.delete 'createdAt' # This is managed by database
dojo.delete 'updatedAt' # This is managed by database
end
Dojo.dump_attributes_to_yaml(dojos)
end
desc '現在のyamlファイルを元にデータベースを更新します'
task update_db_by_yaml: :environment do
dojos = Dojo.load_attributes_from_yaml
dojos.each do |dojo|
d = Dojo.find_or_initialize_by(id: dojo['id'])
d.name = dojo['name']
d.email = ''
d.order = dojo['order'] || search_order_number(dojo['name'])
d.description = dojo['description']
d.logo = dojo['logo']
d.tags = dojo['tags']
d.url = dojo['url']
d.created_at = d.new_record? ? Time.zone.now : dojo['created_at'] || d.created_at
d.updated_at = Time.zone.now
d.prefecture_id = dojo['prefecture_id']
d.save!
end
end
# search order number for google spred sheets
# 'yamlファイルのnameからorderの値を生成します'
def search_order_number(pre_city)
if /(?<city>.+)\s\(.+\)/ =~ pre_city
table = CSV.table(Rails.root.join('db','city_code.csv'))
row = table.find{ |r| r[:city].to_s.start_with?(city)}
row ? row[:order] : raise("Not found order by #{pre_city}")
else
raise("It is not valid data for #{pre_city}")
end
end
desc '現在のyamlファイルのカラムをソートします'
task sort_yaml: :environment do
dojos = Dojo.load_attributes_from_yaml
# Dojo column should start with 'name' for human-readability
dojos.map! do |dojo|
dojo.sort_by{|a,b| a.last}.to_h
end
Dojo.dump_attributes_to_yaml(dojos)
end
desc 'DBからyamlファイルを生成します'
task migrate_adding_id_to_yaml: :environment do
dojos = Dojo.load_attributes_from_yaml
dojos.map! do |dojo|
d = Dojo.find_by(name: dojo['name'])
new_dojo = {}
new_dojo['id'] = d.id
new_dojo.merge!(dojo)
new_dojo
end
Dojo.dump_attributes_to_yaml(dojos)
end
end
Update order column in yaml maigration task
#!/usr/bin/env ruby
# -*- coding: utf-8 -*-
require 'json'
require 'yaml'
require 'csv'
namespace :dojos do
desc 'Parseから出力したjsonファイルをベースに、yamlファイルを生成します'
task generate_yaml: :environment do
dojos = JSON.parse(File.read(Rails.root.join('db', 'parse_backup.json')))['results']
dojos.sort_by!{ |hash| hash['order'] }
# Tweak dojo info if needed
dojos.each do |dojo|
dojo['description'].strip!
dojo.delete 'objectId' # Delete Parse-specific key
dojo.delete 'createdAt' # This is managed by database
dojo.delete 'updatedAt' # This is managed by database
end
Dojo.dump_attributes_to_yaml(dojos)
end
desc '現在のyamlファイルを元にデータベースを更新します'
task update_db_by_yaml: :environment do
dojos = Dojo.load_attributes_from_yaml
dojos.each do |dojo|
d = Dojo.find_or_initialize_by(id: dojo['id'])
d.name = dojo['name']
d.email = ''
d.order = dojo['order'] || search_order_number(dojo['name'])
d.description = dojo['description']
d.logo = dojo['logo']
d.tags = dojo['tags']
d.url = dojo['url']
d.created_at = d.new_record? ? Time.zone.now : dojo['created_at'] || d.created_at
d.updated_at = Time.zone.now
d.prefecture_id = dojo['prefecture_id']
d.save!
end
end
# search order number for google spred sheets
# 'yamlファイルのnameからorderの値を生成します'
def search_order_number(pre_city)
if /(?<city>.+)\s\(.+\)/ =~ pre_city
table = CSV.table(Rails.root.join('db','city_code.csv'))
row = table.find{ |r| r[:city].to_s.start_with?(city)}
row ? row[:order] : raise("Not found order by #{pre_city}")
else
raise("It is not valid data for #{pre_city}")
end
end
desc '現在のyamlファイルのカラムをソートします'
task sort_yaml: :environment do
dojos = Dojo.load_attributes_from_yaml
# Dojo column should start with 'name' for human-readability
dojos.map! do |dojo|
dojo.sort_by{|a,b| a.last}.to_h
end
Dojo.dump_attributes_to_yaml(dojos)
end
desc 'DBからyamlファイルを生成します'
task migrate_adding_id_to_yaml: :environment do
dojos = Dojo.load_attributes_from_yaml
dojos.map! do |dojo|
d = Dojo.find_by(name: dojo['name'])
new_dojo = {}
new_dojo['id'] = d.id
new_dojo['order'] = d.order
new_dojo.merge!(dojo)
new_dojo
end
Dojo.dump_attributes_to_yaml(dojos)
end
end
|
require "seed_dummy_data"
namespace :db do
desc "Add system data"
task dummy: [:environment, :seed] do
SeedDummyData.run
end
end
`db:dummy` doesn't invoke `db:seed` anymore
Invoking `db:seed` just before `db:dummy` makes a lot of queries to the database.
This is a problem in heroku's dummy environment because the free database plan limits the number of queries the app can execute
require "seed_dummy_data"
namespace :db do
desc "Add system data"
task dummy: :environment do
SeedDummyData.run
end
end
|
namespace :email do
desc "List queued emails"
task(:list => :environment) do
print "#{DOMAIN}\n"
for e in QueuedEmail.find(:all, :include => [
:queued_email_integers, :queued_email_note, :queued_email_strings, :user])
print "#{e.id}: from => #{e.user.login}, to => #{e.to_user.login}, flavor => #{e.flavor}, queued => #{e.queued}\n"
for i in e.queued_email_integers
print "\t#{i.key.to_s} => #{i.value}\n"
end
for i in e.queued_email_strings
print "\t#{i.key.to_s} => #{i.value}\n"
end
if e.queued_email_note
print "\tNote: #{e.queued_email_note.value}\n"
end
end
end
desc "Send queued emails"
task(:send => :environment) do
count = 0
for e in QueuedEmail.find(:all)
if e.queued + QUEUE_DELAY < Time.now() # Has it been queued (and unchanged) for QUEUE_DELAY or more
if e.send_email
e.destroy
count += 1
if count >= EMAIL_PER_MINUTE
break
end
end
end
end
print "Sent #{count} email(s)\n"
end
desc "Purge the email queue without sending anything"
task(:purge => :environment) do
for e in QueuedEmail.find(:all)
print "Purging #{e.id}: from => #{e.user.login}, to => #{e.to_user.login}, flavor => #{e.flavor}, queued => #{e.queued}\n"
e.destroy
end
end
end
More output for email related rake tasks
namespace :email do
desc "List queued emails"
task(:list => :environment) do
print "#{DOMAIN}, #{RAILS_ENV}\n"
for e in QueuedEmail.find(:all, :include => [
:queued_email_integers, :queued_email_note, :queued_email_strings, :user])
print "#{e.id}: from => #{e.user and e.user.login}, to => #{e.to_user.login}, flavor => #{e.flavor}, queued => #{e.queued}\n"
for i in e.queued_email_integers
print "\t#{i.key.to_s} => #{i.value}\n"
end
for i in e.queued_email_strings
print "\t#{i.key.to_s} => #{i.value}\n"
end
if e.queued_email_note
print "\tNote: #{e.queued_email_note.value}\n"
end
end
end
desc "Send queued emails"
task(:send => :environment) do
count = 0
for e in QueuedEmail.find(:all)
if e.queued + QUEUE_DELAY < Time.now() # Has it been queued (and unchanged) for QUEUE_DELAY or more
if e.send_email
print "Sent #{e.flavor} email from #{e.user and e.user.login} to #{e.to_user.login}\n"
e.destroy
count += 1
if count >= EMAIL_PER_MINUTE
break
end
end
end
end
end
desc "Purge the email queue without sending anything"
task(:purge => :environment) do
for e in QueuedEmail.find(:all)
print "Purging #{e.id}: from => #{e.user and e.user.login}, to => #{e.to_user.login}, flavor => #{e.flavor}, queued => #{e.queued}\n"
e.destroy
end
end
end
|
namespace :ender do
namespace :node do
desc "install nvm"
task :install_nvm do
begin
if `which curl`.size.>(0)
sh "curl https://raw.github.com/creationix/nvm/master/install.sh | sh"
else
sh "wget -qO- https://raw.github.com/creationix/nvm/master/install.sh | sh"
end
puts "NVM : https://github.com/creationix/nvm"
puts "has been installed for your account. NVM is the node/npm version manager."
puts "If you liked RVM, you will feel like at home."
puts
rescue Exception => e
puts e
puts e.backtrace
end
end
desc "install node version NODE_VERSION (default 0.8.16)"
task :install do
Rake::Task[:install_nvm].invoke unless ENV['NVM_BIN']
node_version = ENV['NODE_VERSION'] || '0.8.16'
sh "sh #{ENV['NVM_DIR']}/nvm.sh && nvm install #{node_version}"
end
end
desc "install files from ender"
task :install do
Rake::Task['ender:node:install'].invoke unless `which npm`.size.>(0)
sh 'npm install ender'
end
desc "check is ender is installed"
task :check do
Rake::Task['ender:install'].invoke unless `which ender`.size.>(0)
end
desc "build ender base jeesh packages (and remove all the rest)"
task :build => :check do
output_path = File.join(Rails.root,'app','assets','javascripts','ender')
sh "ender build jeesh --output #{output_path}"
FileUtils.rm("#{output_path}.min.js") if File.exist?("#{output_path}.min.js")
end
desc "refresh the build"
task :refresh => :check do
output_path = File.join(Rails.root,'app','assets','javascripts','ender')
sh "ender refresh --use #{output_path}"
FileUtils.rm("#{output_path}.min.js") if File.exist?("#{output_path}.min.js")
end
desc "show list of installed packages"
task :info => :check do
output_path = File.join(Rails.root,'app','assets','javascripts','ender')
sh "ender info --use #{output_path}"
end
desc "display ender help"
task :help => :check do
sh "ender"
end
desc "add a package to the ender build"
task :add, [:package] => [:check] do |t,p|
puts t.inspect
puts p.inspect
output_path = File.join(Rails.root,'app','assets','javascripts','ender')
sh "ender add #{p[:package]} --use #{output_path}"
FileUtils.rm("#{output_path}.min.js") if File.exist?("#{output_path}.min.js")
end
desc "remove a package from the ender build"
task :remove, [:package] => [:check] do |t,p|
output_path = File.join(Rails.root,'app','assets','javascripts','ender')
sh "ender remove #{p[:package]} --use #{output_path}"
FileUtils.rm("#{output_path}.min.js") if File.exist?("#{output_path}.min.js")
end
end
small text fixes
namespace :ender do
namespace :node do
desc "install nvm"
task :install_nvm do
begin
if `which curl`.size.>(0)
sh "curl https://raw.github.com/creationix/nvm/master/install.sh | sh"
else
sh "wget -qO- https://raw.github.com/creationix/nvm/master/install.sh | sh"
end
puts "NVM : https://github.com/creationix/nvm"
puts "has been installed for your account. NVM is the node/npm version manager."
puts "If you liked RVM, you will feel like at home."
puts
rescue Exception => e
puts e
puts e.backtrace
end
end
desc "install node version NODE_VERSION (default 0.8.16)"
task :install do
Rake::Task[:install_nvm].invoke unless ENV['NVM_BIN']
node_version = ENV['NODE_VERSION'] || '0.8.16'
sh "sh #{ENV['NVM_DIR']}/nvm.sh && nvm install #{node_version}"
end
end
desc "install files from ender"
task :install do
Rake::Task['ender:node:install'].invoke unless `which npm`.size.>(0)
sh 'npm install ender'
end
desc "check if ender is installed"
task :check do
Rake::Task['ender:install'].invoke unless `which ender`.size.>(0)
end
desc "build ender base Jeesh package (careful it rebuilds from scratch)"
task :build => :check do
output_path = File.join(Rails.root,'app','assets','javascripts','ender')
sh "ender build jeesh --output #{output_path}"
FileUtils.rm("#{output_path}.min.js") if File.exist?("#{output_path}.min.js")
end
desc "refresh the build"
task :refresh => :check do
output_path = File.join(Rails.root,'app','assets','javascripts','ender')
sh "ender refresh --use #{output_path}"
FileUtils.rm("#{output_path}.min.js") if File.exist?("#{output_path}.min.js")
end
desc "show list of installed packages"
task :info => :check do
output_path = File.join(Rails.root,'app','assets','javascripts','ender')
sh "ender info --use #{output_path}"
end
desc "display ender help"
task :help => :check do
sh "ender"
end
desc "add a package to the ender build"
task :add, [:package] => [:check] do |t,p|
puts t.inspect
puts p.inspect
output_path = File.join(Rails.root,'app','assets','javascripts','ender')
sh "ender add #{p[:package]} --use #{output_path}"
FileUtils.rm("#{output_path}.min.js") if File.exist?("#{output_path}.min.js")
end
desc "remove a package from the ender build"
task :remove, [:package] => [:check] do |t,p|
output_path = File.join(Rails.root,'app','assets','javascripts','ender')
sh "ender remove #{p[:package]} --use #{output_path}"
FileUtils.rm("#{output_path}.min.js") if File.exist?("#{output_path}.min.js")
end
end
|
## fixall0 and fixall1 can be ran in parallel, fixall2 must be ran at the end
desc "Fix all"
task :fixall => [:init, :fixall0, :fixall1, :fixall2, :fixall3] do
end
task :fixall0 => [:init, "fixdb:create_thumbnails"] do
end
task :fixall1 => [:init, "fixdb:questions", "fixdb:contributions", "fixdb:dates", "fixdb:openid", "fixdb:relocate", "fixdb:votes", "fixdb:counters", "fixdb:sync_counts", "fixdb:last_target_type", "fixdb:fix_moved_comments_and_set_comment_count", "fixdb:comments", "fixdb:widgets", "fixdb:tags", "fixdb:update_answers_favorite"] do
end
task :fixall2 => [:init, "fixdb:groups", "fixdb:remove_retag_other_tag", "setup:create_reputation_constrains_modes", "fixdb:update_group_notification_config", "fixdb:set_follow_ids", "fixdb:set_friends_lists", "fixdb:fix_twitter_users", "fixdb:fix_facebook_users", "fixdb:set_invitations_perms", "fixdb:set_signup_type", "fixdb:versions", "fixdb:ads", "fixdb:wiki_booleans", "fixdb:themes", "fixdb:update_reputation_keys", "fixdb:votes_to_followers"]
task :fixall3 => [:init, "fixdb:memberships", "fixdb:update_tag_followers_count"] do
end
task :init => [:environment] do
class Question
def set_created_at; end
def set_updated_at; end
end
class Answer
def set_created_at; end
def set_updated_at; end
end
class Group
def set_created_at; end
def set_updated_at; end
end
GC.start
end
namespace :fixdb do
task :memberships => [:init] do
user_count= User.count
user_count_i = 0
memberships = []
p "gathering memberships"
User.all.each do |user|
if user[:membership_list]
count = user[:membership_list].count
user.memberships.delete_all
(user[:membership_list] || {}).each do |group_id, membership|
if Group.find(group_id)
membership["_id"] = BSON::ObjectId.new.to_s
membership["group_id"] = group_id
membership["user_id"] = user.id
membership["joined_at"] ||= user.created_at
memberships << membership
end
end
end
user_count_i+=1
puts "#{user_count_i}/#{user_count}"
end
msc = memberships.size
msi = 0
p "creating memberships:"
memberships.each do |m|
Membership.create!(m)
p "#{msi+=1}/#{msc}"
end
User.unset({}, {:membership_list => 1})
p "done creating membership"
end
task :questions => [:init] do
Question.all.each do |question|
question.override(:_random => rand())
question.override(:_random_times => 0.0)
watchers = question.raw_attributes["watchers"]
question.unset(:watchers => true)
if watchers.kind_of?(Array)
question.override(:follower_ids => watchers)
end
end
end
task :contributions => [:init] do
Question.only(:user_id, :contributor_ids).all.each do |question|
question.add_contributor(question.user) if question.user
question.answers.only(:user_id).all.each do |answer|
question.add_contributor(answer.user) if answer.user
end
end
end
task :dates => [:init] do
%w[badges questions comments votes users announcements groups memberships pages reputation_events user_stats versions views_counts].each do |cname|
coll = Mongoid.master.collection(cname)
coll.find.each do |q|
%w[activity_at last_target_date created_at updated_at birthday last_logged_at starts_at ends_at last_activity_at time date].each do |key|
if q[key].is_a?(String)
q[key] = Time.parse(q[key])
end
end
coll.save(q)
end
end
end
task :openid => [:init] do
User.all.each do |user|
next if user.identity_url.blank?
puts "Updating: #{user.login}"
user.push_uniq(:auth_keys => "open_id_#{user[:identity_url]}")
user.unset(:identity_url => 1)
end
end
task :update_answers_favorite => [:init] do
Mongoid.database.collection("favorites").remove
answers = Mongoid.database.collection("answers")
answers.update({ }, {"$set" => {"favorite_counts" => 0}})
end
task :sync_counts => [:init] do
votes = Mongoid.database.collection("votes")
comments = Mongoid.database.collection("comments")
puts "updating comment's counts"
comments.find.each do |c|
print "."
votes_average=0
votes.find(:voteable_id => c["_id"]).each do |v|
votes_average+=v["value"]
end
comments.update({:_id => c["id"]},
{"$set" => {"votes_count" => votes.find(:voteable_id => c["_id"]).count,
"votes_average" => votes_average}})
if c["flags"]
comments.update({:_id => c["id"]}, {"$set" => {"flags_count" => c["flags"].size}})
end
end
puts "updating questions's counts"
Question.all.each do |q|
print "."
votes_average=0
votes.find(:voteable_id => q.id).each do |v|
votes_average+=v["value"]
end
q.override("flags_count" => q.flags.size, "votes_count" => q.votes.size, "votes_average" => votes_average)
end
end
task :counters => :init do
Question.all.each do |q|
q.override(:close_requests_count => q.close_requests.size)
q.override(:open_requests_count => q.open_requests.size)
end
end
task :last_target_type => [:init] do
puts "updating questions#last_target_type"
Question.where({:last_target_type.ne => nil}).all.each do |q|
print "."
if(q.last_target_type != "Comment")
last_target = q.last_target_type.constantize.find(q.last_target_id)
else
data = Mongoid.database.collection("comments").find_one(:_id => q.last_target_id)
last_target = Comment.new(data)
end
if(last_target)
if(last_target.respond_to?(:updated_at) && last_target.updated_at && last_target.updated_at.is_a?(String))
last_target.updated_at = Time.parse(last_target.updated_at)
end
Question.update_last_target(q.id, last_target)
end
end
end
task :votes => [:init] do
puts "updating votes"
comments = Mongoid.database.collection("comments")
comments.update({:votes => nil}, {"$set" => {"votes" => {}}}, :multi => true)
questions = Mongoid.database.collection("questions")
questions.update({:votes => nil}, {"$set" => {"votes" => {}}}, :multi => true)
Group.all.each do |group|
count = 0
Mongoid.database.collection("votes").find({:group_id => group["_id"]}).each do |vote|
vote.delete("group_id")
id = vote.delete("voteable_id")
klass = vote.delete("voteable_type")
collection = comments
if klass == "Question"
collection = questions;
end
count += 1
collection.update({:_id => id}, "$set" => {"votes.#{vote["user_id"]}" => vote["value"]})
end
if count > 0
puts "Updated #{count} #{group["name"]} votes"
end
end
Mongoid.database.collection("votes").drop
end
task :fix_moved_comments_and_set_comment_count => [:init] do
comments = Mongoid.database.collection("comments")
questions = Mongoid.database.collection("questions")
users = Mongoid.database.collection("users")
x = 0
Mongoid.database.collection("comments").find(:_type => "Comment").each do |c|
collection = comments
if c["commentable_type"] == "Question"
collection = questions;
end
parent = collection.find(:_id => c["commentable_id"]).first
if parent && parent["group_id"] != c["group_id"]
c["group_id"] = parent["group_id"]
comments.update({ :_id => c["_id"]}, c)
x += 1
end
# update user's comment count
users.update({ :_id => c["user_id"]}, "$inc" => {"membership_list.#{c['group_id']}.comments_count" => 1})
end
p "#{x} moved comments had the wrong group_id"
end
task :comments => [:init] do
puts "updating comments"
comments = Mongoid.database.collection("comments")
questions = Mongoid.database.collection("questions")
questions.update({}, {"$set" => {:comments => []}})
comments.update({}, {"$set" => {:comments => []}})
Mongoid.database.collection("comments").find(:_type => "Comment").each do |comment|
id = comment.delete("commentable_id")
klass = comment.delete("commentable_type")
collection = comments
%w[created_at updated_at].each do |key|
if comment[key].is_a?(String)
comment[key] = Time.parse(comment[key])
end
end
if klass == "Question"
collection = questions;
end
comment.delete("comments")
collection.update({:_id => id}, "$addToSet" => {:comments => comment})
comments.remove({:_id => comment["_id"]})
end
begin
Mongoid.database.collection("answers").drop
ensure
begin
comments.rename("answers")
rescue
puts "comments collection doesn't exists"
ensure
Answer.override({}, {:_type => "Answer"})
end
end
answers_coll = Mongoid.database.collection("answers")
answers_coll.find().each do |answer|
%w[created_at updated_at].each do |key|
if answer[key].is_a?(String)
answer[key] = Time.parse(answer[key])
end
end
answers_coll.save(answer)
end
puts "updated comments"
end
task :groups => [:init] do
Group.where({:language.in => [nil, '', 'none']}).all.each do |group|
lang = group.description.to_s.language
puts "Updating #{group.name} subdomain='#{group.subdomain}' detected as: #{lang}"
group.language = (lang == :spanish) ? 'es' : 'en'
group.languages = DEFAULT_USER_LANGUAGES
if group.valid?
group.save
else
puts "Invalid group: #{group.errors.full_messages}"
end
end
end
task :relocate => [:init] do
doc = JSON.parse(File.read('data/countries.json'))
i=0
Question.override({:address => nil}, :address => {})
Answer.override({:address => nil}, :address => {})
User.override({:address => nil}, :address => {})
doc.keys.each do |key|
User.where({:country_name => key}).all.each do |u|
p "#{u.login}: before: #{u.country_name}, after: #{doc[key]["address"]["country"]}"
lat = Float(doc[key]["lat"])
lon = Float(doc[key]["lon"])
User.override({:_id => u.id},
{:position => {lat: lat, long: lon},
:address => doc[key]["address"] || {}})
# FIXME
# Comment.override({:user_id => u.id},
# {:position => GeoPosition.new(lat, lon),
# :address => doc[key]["address"]})
Question.override({:user_id => u.id},
{:position => {lat: lat, long: lon},
:address => doc[key]["address"] || {}})
Answer.override({:user_id => u.id},
{:position => {lat: lat, long: lon},
:address => doc[key]["address"] || {}})
end
end
end
task :widgets => [:init] do
c=Group.count
Group.unset({}, [:widgets, :question_widgets, :mainlist_widgets, :external_widgets])
i=0
Group.all.each do |g|
g.reset_widgets!
g.save
p "(#{i+=1}/#{c}) Updated widgets for group #{g.name}"
end
end
task :update_group_notification_config => [:init] do
puts "updating groups notification config"
Group.all.each do |g|
g.notification_opts = GroupNotificationConfig.new
g.save
end
puts "done"
end
task :tags => [:init] do
count = Question.count
i = 0
bad_count = 0
Question.where(:tags => {"$ne" => [], "$ne" => nil}).all.each do |q|
q.tags.each do |tag_name|
existing_tag = Tag.where(:name => tag_name, :group_id => q.group_id).first
if existing_tag
existing_tag.inc(:count, 1)
else
tag = Tag.new(:name => tag_name)
if q.group
tag.group = q.group
tag.user = q.group.owner
tag.used_at = tag.created_at = tag.updated_at = q.group.questions.where(:created_at=>{:$ne=>nil}).order_by([:created_at, :asc]).first.created_at
tag.save
else
bad_count += 0
end
end
end
p "#{i+=1}/#{count}"
end
p "Found #{bad_count} questions without"
end
task :remove_retag_other_tag => [:init] do
Group.unset({}, "reputation_constrains.retag_others_tags" => 1 )
end
task :cleanup => [:init] do
p "removing #{Question.where(:group_id => nil).destroy_all} orphan questions"
p "removing #{Answer.where(:group_id => nil).destroy_all} orphan answers"
end
task :set_follow_ids => [:init] do
p "setting nil following_ids to []"
FriendList.override({:following_ids => nil}, {:following_ids => []})
p "setting nil follower_ids to []"
FriendList.override({:follower_ids => nil}, {:follower_ids => []})
p "done"
end
task :set_friends_lists => [:init] do
total = User.count
i = 1
p "updating #{total} users facebook friends list"
User.all.each do |u|
u.send(:initialize_fields)
if u.external_friends_list.nil?
u.send(:create_lists)
end
if u.read_list.nil?
read_list = ReadList.create
u.read_list = read_list
end
p "#{i}/#{total} #{u.login}"
i += 1
end
p "done"
end
task :fix_twitter_users => [:init] do
users = User.where({:twitter_token => {:$ne => nil}})
users.each do |u|
twitter_id = u.twitter_token.split('-').first
p "fixing #{u.login} with twitter id #{twitter_id}"
u["auth_keys"] = [] if u["auth_keys"].nil?
u["auth_keys"] << "twitter_#{twitter_id}"
u["auth_keys"].uniq!
u["twitter_id"] = twitter_id
u["user_info"] = { } if u["user_info"].nil?
u["user_info"]["twitter"] = { "old" => 1}
u.save(:validate => false)
end
end
task :fix_facebook_users => [:init] do
users = User.where({:facebook_id => {:$ne => nil}})
users.each do |u|
facebook_id = u.facebook_id
p "fixing #{u.login} with facebook id #{facebook_id}"
u["auth_keys"] = [] if u["auth_keys"].nil?
u["auth_keys"] << "facebook_#{facebook_id}"
u["auth_keys"].uniq!
u["user_info"] = { } if u["user_info"].nil?
u["user_info"]["facebook"] = { "old" => 1}
u.save(:validate => false)
end
end
task :create_thumbnails => [:init] do
Group.all.each do |g|
begin
puts "Creating thumbnails for #{g.name} #{g.id}"
Jobs::Images.generate_group_thumbnails(g.id)
rescue Mongo::GridFileNotFound => e
puts "error getting #{g.name}'s logo"
end
end
end
task :set_invitations_perms => [:init] do
p "setting invitations permissions on groups"
p "only owners can invite people on private group by default"
Group.override({:private => false}, {:invitations_perms => "owner"})
p "anyone can invite people on private group by default"
Group.override({:private => false}, {:invitations_perms => "user"})
p "done"
end
task :set_signup_type => [:init] do
p "setting signup type for groups"
Group.override({:openid_only => true}, {:signup_type => "noemail"})
Group.override({:openid_only => false}, {:signup_type => "all"})
p "done"
end
task :versions => [:init] do
Question.only(:versions, :versions_count).each do |question|
next if question.versions.count > 0
question.override({:versions_count => 0})
(question[:versions]||[]).each do |version|
version["created_at"] = version.delete("date")
version["target"] = question
question.version_klass.create!(version)
end
question.unset({:versions => true})
end
Answer.only(:versions, :versions_count).each do |post|
next if post.versions_count.to_i > 0
post.override({:versions_count => 0})
(post[:versions]||[]).each do |version|
version["created_at"] = version.delete("date")
version["target"] = post
post.version_klass.create!(version)
end
post.unset({:versions => true})
end
end
task :ads => [:init] do
collection = Mongoid.database.collection("ads")
counters = {}
collection.find.each do |ad|
group = Group.find(ad["group_id"])
positions = {'context_panel' => "sidebar",
'header' => "header",
'footer' => "footer",
'content' => "navbar"}
widget = nil
if ad['_type'] == "Adsense"
widget = AdsenseWidget.new(:settings =>{:client => ad['google_ad_client'],
:slot => ad['google_ad_slot'],
:width => ad['google_ad_width'],
:height => ad['google_ad_height']})
widget_list = group.mainlist_widgets
widget_list.send(:"#{positions[ad['position']]}") << widget
widget.save
end
end
collection.remove
end
task :wiki_booleans => [:init] do
Answer.override({:wiki=>"0"},{:wiki=>false})
Answer.override({:wiki=>"1"},{:wiki=>true})
Question.override({:wiki=>"0"},{:wiki=>false})
Question.override({:wiki=>"1"},{:wiki=>true})
end
task :themes => [:init] do
theme = Theme.where(:is_default => true).first
if !theme
theme = Theme.create_default
theme.bg_image = File.open(Rails.root+"public/images/back-site.gif")
Jobs::Themes.generate_stylesheet(theme.id)
Group.override({}, :current_theme_id => theme.id)
end
Group.all.each do |g|
if g.has_custom_css? && !g.custom_css.nil?
begin
custom_css = g.custom_css.read
if !custom_css.blank?
theme = Theme.create(:name => "#{g.name}'s theme", :custom_css => custom_css)
Jobs::Themes.generate_stylesheet(theme.id)
end
g.delete_file("custom_css")
rescue
g.delete_file("custom_css")
p "error"
end
end
end
end
task :regenerate_themes => [:init] do
Theme.all.each {|theme| Jobs::Themes.generate_stylesheet(theme.id)}
end
task :update_tag_followers_count => [:init] do
Tag.override({}, {:followers_count => 0.0})
Membership.all.each do |membership|
Tag.increment({:name => {:$in => membership.preferred_tags||[]}, :group_id => membership.group.id}, {:followers_count => 1})
end
end
task :update_reputation_keys => [:init] do
Group.override({}, {"reputation_rewards.post_banned" => -200})
Group.override({}, {"reputation_constrains.ask" => -100})
Group.override({}, {"reputation_constrains.answer" => -300})
ConstrainsConfig.override({}, {"content.ask" => -100})
ConstrainsConfig.override({}, {"content.answer" => -300})
end
task :themes_files => [:init] do
Theme.all.each do |f|
f.stylesheet["content_type"] = "text/css"
f.save
end
Theme.all.each do |f|
f.has_js = false
f.save
end
end
task :fix_themes => [:init] do
Theme.all.each do |theme|
next if !theme[:button_bg_color]
theme.override(:brand_color => theme[:button_bg_color])
end
Theme.unset({}, {:use_button_bg_color => true, :button_fg_color=> true, :button_bg_color=> true, :use_link_bg_color=> true, :link_bg_color=> true, :link_fg_color=> true, :view_fg_color=> true})
Theme.all.each {|theme| Jobs::Themes.generate_stylesheet(theme.id)}
end
task :votes_to_followers => [:init] do
count = Question.count
i=1
Question.all.each do |q|
p "#{i}/#{count}"
q.votes.keys.each do |u|
q.add_follower(User.find(u))
end
i+=1
end
end
end
reorder fixall tasks
Signed-off-by: David A. Cuadrado <9e86d6d2f3901859480d56aec66da07500c6a5df@gmail.com>
## fixall0 and fixall1 can be ran in parallel, fixall2 must be ran at the end
desc "Fix all"
task :fixall => [:init, :fixall0, :fixall1, :fixall2, :fixall3, :fixall4] do
end
task :fixall0 => [:init, "fixdb:create_thumbnails"] do
end
task :fixall1 => [:init, "fixdb:questions", "fixdb:contributions", "fixdb:dates", "fixdb:openid", "fixdb:relocate", "fixdb:votes", "fixdb:counters", "fixdb:sync_counts", "fixdb:last_target_type"] do
end
task :fixall2 => [:init, "fixdb:fix_moved_comments_and_set_comment_count", "fixdb:comments", "fixdb:widgets", "fixdb:tags", "fixdb:update_answers_favorite"] do
end
task :fixall3 => [:init, "fixdb:groups", "fixdb:remove_retag_other_tag", "setup:create_reputation_constrains_modes", "fixdb:update_group_notification_config", "fixdb:set_follow_ids", "fixdb:set_friends_lists", "fixdb:fix_twitter_users", "fixdb:fix_facebook_users", "fixdb:set_invitations_perms", "fixdb:set_signup_type", "fixdb:versions", "fixdb:ads", "fixdb:wiki_booleans", "fixdb:themes", "fixdb:update_reputation_keys", "fixdb:votes_to_followers"]
task :fixall4 => [:init, "fixdb:memberships", "fixdb:update_tag_followers_count"] do
end
task :init => [:environment] do
class Question
def set_created_at; end
def set_updated_at; end
end
class Answer
def set_created_at; end
def set_updated_at; end
end
class Group
def set_created_at; end
def set_updated_at; end
end
GC.start
end
namespace :fixdb do
task :memberships => [:init] do
user_count= User.count
user_count_i = 0
memberships = []
p "gathering memberships"
User.all.each do |user|
if user[:membership_list]
count = user[:membership_list].count
user.memberships.delete_all
(user[:membership_list] || {}).each do |group_id, membership|
if Group.find(group_id)
membership["_id"] = BSON::ObjectId.new.to_s
membership["group_id"] = group_id
membership["user_id"] = user.id
membership["joined_at"] ||= user.created_at
memberships << membership
end
end
end
user_count_i+=1
puts "#{user_count_i}/#{user_count}"
end
msc = memberships.size
msi = 0
p "creating memberships:"
memberships.each do |m|
Membership.create!(m)
p "#{msi+=1}/#{msc}"
end
User.unset({}, {:membership_list => 1})
p "done creating membership"
end
task :questions => [:init] do
Question.all.each do |question|
question.override(:_random => rand())
question.override(:_random_times => 0.0)
watchers = question.raw_attributes["watchers"]
question.unset(:watchers => true)
if watchers.kind_of?(Array)
question.override(:follower_ids => watchers)
end
end
end
task :contributions => [:init] do
Question.only(:user_id, :contributor_ids).all.each do |question|
question.add_contributor(question.user) if question.user
question.answers.only(:user_id).all.each do |answer|
question.add_contributor(answer.user) if answer.user
end
end
end
task :dates => [:init] do
%w[badges questions comments votes users announcements groups memberships pages reputation_events user_stats versions views_counts].each do |cname|
coll = Mongoid.master.collection(cname)
coll.find.each do |q|
%w[activity_at last_target_date created_at updated_at birthday last_logged_at starts_at ends_at last_activity_at time date].each do |key|
if q[key].is_a?(String)
q[key] = Time.parse(q[key])
end
end
coll.save(q)
end
end
end
task :openid => [:init] do
User.all.each do |user|
next if user.identity_url.blank?
puts "Updating: #{user.login}"
user.push_uniq(:auth_keys => "open_id_#{user[:identity_url]}")
user.unset(:identity_url => 1)
end
end
task :update_answers_favorite => [:init] do
Mongoid.database.collection("favorites").remove
answers = Mongoid.database.collection("answers")
answers.update({ }, {"$set" => {"favorite_counts" => 0}})
end
task :sync_counts => [:init] do
votes = Mongoid.database.collection("votes")
comments = Mongoid.database.collection("comments")
puts "updating comment's counts"
comments.find.each do |c|
print "."
votes_average=0
votes.find(:voteable_id => c["_id"]).each do |v|
votes_average+=v["value"]
end
comments.update({:_id => c["id"]},
{"$set" => {"votes_count" => votes.find(:voteable_id => c["_id"]).count,
"votes_average" => votes_average}})
if c["flags"]
comments.update({:_id => c["id"]}, {"$set" => {"flags_count" => c["flags"].size}})
end
end
puts "updating questions's counts"
Question.all.each do |q|
print "."
votes_average=0
votes.find(:voteable_id => q.id).each do |v|
votes_average+=v["value"]
end
q.override("flags_count" => q.flags.size, "votes_count" => q.votes.size, "votes_average" => votes_average)
end
end
task :counters => :init do
Question.all.each do |q|
q.override(:close_requests_count => q.close_requests.size)
q.override(:open_requests_count => q.open_requests.size)
end
end
task :last_target_type => [:init] do
puts "updating questions#last_target_type"
Question.where({:last_target_type.ne => nil}).all.each do |q|
print "."
if(q.last_target_type != "Comment")
last_target = q.last_target_type.constantize.find(q.last_target_id)
else
data = Mongoid.database.collection("comments").find_one(:_id => q.last_target_id)
last_target = Comment.new(data)
end
if(last_target)
if(last_target.respond_to?(:updated_at) && last_target.updated_at && last_target.updated_at.is_a?(String))
last_target.updated_at = Time.parse(last_target.updated_at)
end
Question.update_last_target(q.id, last_target)
end
end
end
task :votes => [:init] do
puts "updating votes"
comments = Mongoid.database.collection("comments")
comments.update({:votes => nil}, {"$set" => {"votes" => {}}}, :multi => true)
questions = Mongoid.database.collection("questions")
questions.update({:votes => nil}, {"$set" => {"votes" => {}}}, :multi => true)
Group.all.each do |group|
count = 0
Mongoid.database.collection("votes").find({:group_id => group["_id"]}).each do |vote|
vote.delete("group_id")
id = vote.delete("voteable_id")
klass = vote.delete("voteable_type")
collection = comments
if klass == "Question"
collection = questions;
end
count += 1
collection.update({:_id => id}, "$set" => {"votes.#{vote["user_id"]}" => vote["value"]})
end
if count > 0
puts "Updated #{count} #{group["name"]} votes"
end
end
Mongoid.database.collection("votes").drop
end
task :fix_moved_comments_and_set_comment_count => [:init] do
comments = Mongoid.database.collection("comments")
questions = Mongoid.database.collection("questions")
users = Mongoid.database.collection("users")
x = 0
Mongoid.database.collection("comments").find(:_type => "Comment").each do |c|
collection = comments
if c["commentable_type"] == "Question"
collection = questions;
end
parent = collection.find(:_id => c["commentable_id"]).first
if parent && parent["group_id"] != c["group_id"]
c["group_id"] = parent["group_id"]
comments.update({ :_id => c["_id"]}, c)
x += 1
end
# update user's comment count
users.update({ :_id => c["user_id"]}, "$inc" => {"membership_list.#{c['group_id']}.comments_count" => 1})
end
p "#{x} moved comments had the wrong group_id"
end
task :comments => [:init] do
puts "updating comments"
comments = Mongoid.database.collection("comments")
questions = Mongoid.database.collection("questions")
questions.update({}, {"$set" => {:comments => []}})
comments.update({}, {"$set" => {:comments => []}})
Mongoid.database.collection("comments").find(:_type => "Comment").each do |comment|
id = comment.delete("commentable_id")
klass = comment.delete("commentable_type")
collection = comments
%w[created_at updated_at].each do |key|
if comment[key].is_a?(String)
comment[key] = Time.parse(comment[key])
end
end
if klass == "Question"
collection = questions;
end
comment.delete("comments")
collection.update({:_id => id}, "$addToSet" => {:comments => comment})
comments.remove({:_id => comment["_id"]})
end
begin
Mongoid.database.collection("answers").drop
ensure
begin
comments.rename("answers")
rescue
puts "comments collection doesn't exists"
ensure
Answer.override({}, {:_type => "Answer"})
end
end
answers_coll = Mongoid.database.collection("answers")
answers_coll.find().each do |answer|
%w[created_at updated_at].each do |key|
if answer[key].is_a?(String)
answer[key] = Time.parse(answer[key])
end
end
answers_coll.save(answer)
end
puts "updated comments"
end
task :groups => [:init] do
Group.where({:language.in => [nil, '', 'none']}).all.each do |group|
lang = group.description.to_s.language
puts "Updating #{group.name} subdomain='#{group.subdomain}' detected as: #{lang}"
group.language = (lang == :spanish) ? 'es' : 'en'
group.languages = DEFAULT_USER_LANGUAGES
if group.valid?
group.save
else
puts "Invalid group: #{group.errors.full_messages}"
end
end
end
task :relocate => [:init] do
doc = JSON.parse(File.read('data/countries.json'))
i=0
Question.override({:address => nil}, :address => {})
Answer.override({:address => nil}, :address => {})
User.override({:address => nil}, :address => {})
doc.keys.each do |key|
User.where({:country_name => key}).all.each do |u|
p "#{u.login}: before: #{u.country_name}, after: #{doc[key]["address"]["country"]}"
lat = Float(doc[key]["lat"])
lon = Float(doc[key]["lon"])
User.override({:_id => u.id},
{:position => {lat: lat, long: lon},
:address => doc[key]["address"] || {}})
# FIXME
# Comment.override({:user_id => u.id},
# {:position => GeoPosition.new(lat, lon),
# :address => doc[key]["address"]})
Question.override({:user_id => u.id},
{:position => {lat: lat, long: lon},
:address => doc[key]["address"] || {}})
Answer.override({:user_id => u.id},
{:position => {lat: lat, long: lon},
:address => doc[key]["address"] || {}})
end
end
end
task :widgets => [:init] do
c=Group.count
Group.unset({}, [:widgets, :question_widgets, :mainlist_widgets, :external_widgets])
i=0
Group.all.each do |g|
g.reset_widgets!
g.save(:validate => false)
p "(#{i+=1}/#{c}) Updated widgets for group #{g.name}"
end
end
task :update_group_notification_config => [:init] do
puts "updating groups notification config"
Group.all.each do |g|
g.notification_opts = GroupNotificationConfig.new
g.save
end
puts "done"
end
task :tags => [:init] do
count = Question.count
i = 0
bad_count = 0
Question.where(:tags => {"$ne" => [], "$ne" => nil}).all.each do |q|
q.tags.each do |tag_name|
existing_tag = Tag.where(:name => tag_name, :group_id => q.group_id).first
if existing_tag
existing_tag.inc(:count, 1)
else
tag = Tag.new(:name => tag_name)
if q.group
tag.group = q.group
tag.user = q.group.owner
tag.used_at = tag.created_at = tag.updated_at = q.group.questions.where(:created_at=>{:$ne=>nil}).order_by([:created_at, :asc]).first.created_at
tag.save
else
bad_count += 0
end
end
end
p "#{i+=1}/#{count}"
end
p "Found #{bad_count} questions without"
end
task :remove_retag_other_tag => [:init] do
Group.unset({}, "reputation_constrains.retag_others_tags" => 1 )
end
task :cleanup => [:init] do
p "removing #{Question.where(:group_id => nil).destroy_all} orphan questions"
p "removing #{Answer.where(:group_id => nil).destroy_all} orphan answers"
end
task :set_follow_ids => [:init] do
p "setting nil following_ids to []"
FriendList.override({:following_ids => nil}, {:following_ids => []})
p "setting nil follower_ids to []"
FriendList.override({:follower_ids => nil}, {:follower_ids => []})
p "done"
end
task :set_friends_lists => [:init] do
total = User.count
i = 1
p "updating #{total} users facebook friends list"
User.all.each do |u|
u.send(:initialize_fields)
if u.external_friends_list.nil?
u.send(:create_lists)
end
if u.read_list.nil?
read_list = ReadList.create
u.read_list = read_list
end
p "#{i}/#{total} #{u.login}"
i += 1
end
p "done"
end
task :fix_twitter_users => [:init] do
users = User.where({:twitter_token => {:$ne => nil}})
users.each do |u|
twitter_id = u.twitter_token.split('-').first
p "fixing #{u.login} with twitter id #{twitter_id}"
u["auth_keys"] = [] if u["auth_keys"].nil?
u["auth_keys"] << "twitter_#{twitter_id}"
u["auth_keys"].uniq!
u["twitter_id"] = twitter_id
u["user_info"] = { } if u["user_info"].nil?
u["user_info"]["twitter"] = { "old" => 1}
u.save(:validate => false)
end
end
task :fix_facebook_users => [:init] do
users = User.where({:facebook_id => {:$ne => nil}})
users.each do |u|
facebook_id = u.facebook_id
p "fixing #{u.login} with facebook id #{facebook_id}"
u["auth_keys"] = [] if u["auth_keys"].nil?
u["auth_keys"] << "facebook_#{facebook_id}"
u["auth_keys"].uniq!
u["user_info"] = { } if u["user_info"].nil?
u["user_info"]["facebook"] = { "old" => 1}
u.save(:validate => false)
end
end
task :create_thumbnails => [:init] do
Group.all.each do |g|
begin
puts "Creating thumbnails for #{g.name} #{g.id}"
Jobs::Images.generate_group_thumbnails(g.id)
rescue Mongo::GridFileNotFound => e
puts "error getting #{g.name}'s logo"
end
end
end
task :set_invitations_perms => [:init] do
p "setting invitations permissions on groups"
p "only owners can invite people on private group by default"
Group.override({:private => false}, {:invitations_perms => "owner"})
p "anyone can invite people on private group by default"
Group.override({:private => false}, {:invitations_perms => "user"})
p "done"
end
task :set_signup_type => [:init] do
p "setting signup type for groups"
Group.override({:openid_only => true}, {:signup_type => "noemail"})
Group.override({:openid_only => false}, {:signup_type => "all"})
p "done"
end
task :versions => [:init] do
Question.only(:versions, :versions_count).each do |question|
next if question.versions.count > 0
question.override({:versions_count => 0})
(question[:versions]||[]).each do |version|
version["created_at"] = version.delete("date")
version["target"] = question
question.version_klass.create!(version)
end
question.unset({:versions => true})
end
Answer.only(:versions, :versions_count).each do |post|
next if post.versions_count.to_i > 0
post.override({:versions_count => 0})
(post[:versions]||[]).each do |version|
version["created_at"] = version.delete("date")
version["target"] = post
post.version_klass.create!(version)
end
post.unset({:versions => true})
end
end
task :ads => [:init] do
collection = Mongoid.database.collection("ads")
counters = {}
collection.find.each do |ad|
group = Group.find(ad["group_id"])
positions = {'context_panel' => "sidebar",
'header' => "header",
'footer' => "footer",
'content' => "navbar"}
widget = nil
if ad['_type'] == "Adsense"
widget = AdsenseWidget.new(:settings =>{:client => ad['google_ad_client'],
:slot => ad['google_ad_slot'],
:width => ad['google_ad_width'],
:height => ad['google_ad_height']})
widget_list = group.mainlist_widgets
widget_list.send(:"#{positions[ad['position']]}") << widget
widget.save
end
end
collection.remove
end
task :wiki_booleans => [:init] do
Answer.override({:wiki=>"0"},{:wiki=>false})
Answer.override({:wiki=>"1"},{:wiki=>true})
Question.override({:wiki=>"0"},{:wiki=>false})
Question.override({:wiki=>"1"},{:wiki=>true})
end
task :themes => [:init] do
theme = Theme.where(:is_default => true).first
if !theme
theme = Theme.create_default
theme.bg_image = File.open(Rails.root+"public/images/back-site.gif")
Jobs::Themes.generate_stylesheet(theme.id)
Group.override({}, :current_theme_id => theme.id)
end
Group.all.each do |g|
if g.has_custom_css? && !g.custom_css.nil?
begin
custom_css = g.custom_css.read
if !custom_css.blank?
theme = Theme.create(:name => "#{g.name}'s theme", :custom_css => custom_css)
Jobs::Themes.generate_stylesheet(theme.id)
end
g.delete_file("custom_css")
rescue
g.delete_file("custom_css")
p "error"
end
end
end
end
task :regenerate_themes => [:init] do
Theme.all.each {|theme| Jobs::Themes.generate_stylesheet(theme.id)}
end
task :update_tag_followers_count => [:init] do
Tag.override({}, {:followers_count => 0.0})
Membership.all.each do |membership|
Tag.increment({:name => {:$in => membership.preferred_tags||[]}, :group_id => membership.group.id}, {:followers_count => 1})
end
end
task :update_reputation_keys => [:init] do
Group.override({}, {"reputation_rewards.post_banned" => -200})
Group.override({}, {"reputation_constrains.ask" => -100})
Group.override({}, {"reputation_constrains.answer" => -300})
ConstrainsConfig.override({}, {"content.ask" => -100})
ConstrainsConfig.override({}, {"content.answer" => -300})
end
task :themes_files => [:init] do
Theme.all.each do |f|
f.stylesheet["content_type"] = "text/css"
f.save
end
Theme.all.each do |f|
f.has_js = false
f.save
end
end
task :fix_themes => [:init] do
Theme.all.each do |theme|
next if !theme[:button_bg_color]
theme.override(:brand_color => theme[:button_bg_color])
end
Theme.unset({}, {:use_button_bg_color => true, :button_fg_color=> true, :button_bg_color=> true, :use_link_bg_color=> true, :link_bg_color=> true, :link_fg_color=> true, :view_fg_color=> true})
Theme.all.each {|theme| Jobs::Themes.generate_stylesheet(theme.id)}
end
task :votes_to_followers => [:init] do
count = Question.count
i=1
Question.all.each do |q|
p "#{i}/#{count}"
q.votes.keys.each do |u|
q.add_follower(User.find(u))
end
i+=1
end
end
end
|
require 'pp'
namespace :fixer do
#########################################################################################################
desc "check fixer status mismatches"
task check: [:environment] do
verbose = ENV['VERBOSE']
debug = ENV['DEBUG']
limit = ENV['LIMIT']
ok_to_recover = ENV['RECOVER']
recover_types = Hash[ ENV['RECOVER_TYPES'] ? ENV['RECOVER_TYPES'].split(/\ *,\ */).map{|t| [t, true]} : [] ]
debug and pp recover_types.inspect
puts "Finding all tasks with status mismatch..."
puts "Will try to recover these types: #{ recover_types.keys.inspect }"
mismatched_tasks = Task.get_mismatched_status('working')
mismatched_report = Hash.new{ |h,k| h[k] = 0 }
mismatched_tasks.each do |task|
mismatched_report[task.type] += 1
if limit and limit.to_i <= mismatched_report[task.type]
debug and puts "Limit #{limit} reached for #{task.type}. Skipping eval of task #{task.id}"
next
end
debug and puts "Task #{task.type} #{task.id} has status '#{task.status}' with results: " + task.results.inspect
if recover_types.has_key?(task.type) and ok_to_recover
verbose and puts "Calling #{task.type}.find(#{task.id}).recover!"
task.recover!
verbose and puts "#{task.type}.find(#{task.id}) new status == #{task.status}"
mismatched_report[task.status] += 1
end
end
puts "These tasks with mismatched status were found:"
pp mismatched_report
end
#########################################################################################################
desc "nudge unfinished tasks toward the finish line"
task nudge: [:environment] do
verbose = ENV['VERBOSE']
debug = ENV['DEBUG']
limit = ENV['LIMIT']
ok_to_recover = ENV['RECOVER']
recover_types = Hash[ ENV['RECOVER_TYPES'] ? ENV['RECOVER_TYPES'].split(/\ *,\ */).map{|t| [t, true]} : [] ]
report = Hash.new{ |h,k| h[k] = 0 }
unfinished = Task.incomplete
verbose and puts "Nudging #{unfinished.count} unfinished tasks"
verbose and puts "Will try to recover these types: #{ recover_types.keys.inspect }"
unfinished.find_in_batches do |taskgroup|
taskgroup.each do |task|
if limit and limit.to_i <= report[task.type]
next
end
debug and puts "Task.find(#{task.id}) -> #{task.type}"
report[task.type] += 1
if task.stuck?
report[task.type+'-stuck'] += 1
if ok_to_recover and recover_types.has_key?(task.type)
begin
task.recover!
report[task.type+'-recovered'] += 1
rescue Exceptions::PrivateFileNotFound => err
STDERR.puts err # warn and continue
rescue => err
raise err # re-throw
end
end
end
end
end
verbose and pp report
end
#########################################################################################################
desc "nudge unfinished uploads toward the finish line"
task nudge_uploads: [:environment] do
verbose = ENV['VERBOSE']
debug = ENV['DEBUG']
limit = ENV['LIMIT']
recover = ENV['RECOVER']
report = Hash.new{ |h,k| h[k] = 0 }
unfinished = Task.upload.incomplete
verbose and puts "Nudging #{unfinished.count} unfinished Upload tasks"
unfinished.find_in_batches do |taskgroup|
taskgroup.each do |task|
if limit and limit.to_i <= report[task.type]
next
end
debug and puts "Task::UploadTask.find(#{task.id})"
report[:incomplete] += 1
if task.num_chunks == 0
report[:zero_chunks] += 1
end
if task.num_chunks > 0 and task.num_chunks != task.chunks_uploaded.size
report[:chunks_unfinished] += 1
end
if task.stuck?
report[:stuck] += 1
task.recover! if recover
end
end
end
verbose and pp report
end
#########################################################################################################
desc "set duration from any complete transcoding"
task set_duration: [:environment] do
print "Finding all audio_file records with nil duration... "
affected = AudioFile.where('duration is null')
puts "found #{affected.count}"
fixed = 0
affected.find_in_batches do |afgroup|
afgroup.each do |af|
af.tasks.each do |task|
if task.type == "Tasks::TranscodeTask" and task.status == 'complete'
if task.results and task.results['info'] and task.results['info']['length']
puts "audio #{af.id} has nil duration, but task #{task.identifier}:#{task.id} has length #{task.results['info']['length']}"
af.update_attribute(:duration, task.results['info']['length'].to_i)
fixed += 1
end
end
end
end
end
puts "Updated #{fixed} audio_files"
end
#########################################################################################################
desc "unfinished uploads"
task unfinished_uploads: [:environment] do
print "Finding all audio_file records with nil duration... "
affected = AudioFile.where('duration is null')
puts "found #{affected.count}"
affected.find_in_batches do |afgroup|
afgroup.each do |af|
if af.has_failed_upload?
puts "AudioFile.find(#{af.id}) has failed upload. Status==#{af.current_status}"
end
end
end
end
#########################################################################################################
desc "speechmatics sanity check"
task speechmatics_poll: [:environment] do
# No tasks are recovered by default, since that means notifying the user on success,
# which we might not want to do in testing/dev. Set RECOVER env var to trigger task.recover!
ok_to_recover = ENV['RECOVER']
# find all status=created older than N hours
# and verify they exist at SM. If not, cancel them.
ago = Task.work_window
sm_tasks = Task.speechmatics_transcribe.incomplete.where('created_at < ?', ago)
sm_tasks_count = sm_tasks.count
puts "Found #{sm_tasks_count} unfinished Speechmatics tasks older than #{ago}"
report = {'cancelled, no job_id' => 0, 'missing job_id' => 0, 'No SM job found for job_id' => 0, 'recovered' => 0}
# fetch all SM jobs at once to save HTTP overhead.
# TODO ask them to implement sorting, searching, paging.
sm = Speechmatics::Client.new({ :request => { :timeout => 120 } })
sm_jobs = sm.user.jobs.list.jobs
# create lookup hash by job id
sm_jobs_lookup = Hash[ sm_jobs.map{ |smjob| [smjob['id'].to_s, smjob] } ]
sm_tasks.find_in_batches do |taskgroup|
taskgroup.each do |task|
# if we don't have a job_id then it never was created at SM
if !task.extras['job_id']
puts "Task.find(#{task.id}) has no job_id: #{task.inspect}"
# if not recovering, log it and skip to next
if !ok_to_recover
report['missing job_id'] += 1
next
end
begin
task.recover! # should cancel it with err msg if can't reverse lookup job_id
rescue Exception => err
puts "Task.find(#{task.id}).recover failed with #{err}"
next
end
if task.status == "cancelled"
report['cancelled, no job_id'] += 1
elsif task.status == "complete" || task.status == "working"
report['recovered'] += 1
else
puts "Called Task.find(#{task.id}).recover! and ended with status '#{task.status}'"
end
next
end
# lookup SM status
sm_job = sm_jobs_lookup[task.extras['job_id']]
if !sm_job
task.extras[:error] = "No SM job found for job_id"
task.cancel!
puts "No SM job found for task: #{task.inspect}"
report['No SM job found for job_id'] += 1
next
end
puts "Task.find(#{task.id}) looks like this at SM: #{sm_job.inspect}"
if ok_to_recover
task.recover! && report['recovered'] += 1
end
end
end
report.keys.each do |k|
puts "#{k} => #{report[k].to_s}"
end
end
#########################################################################################################
desc "check for audio with no transcript and (optionally) create it"
task transcript_check: [:environment] do
ok_to_recover = ENV['RECOVER']
verbose = ENV['VERBOSE']
AudioFile.where('duration is not null').where('id not in (select audio_file_id from transcripts)').find_in_batches do |afgroup|
afgroup.each do |af|
if !af.has_file? && af.original_file_url.blank?
next # can't do anything
end
if af.needs_transcript?
verbose and puts "AudioFile.find(#{af.id}) needs any transcript"
ok_to_recover and af.check_tasks
end
end
end
end # task
#########################################################################################################
desc "fix broken audio file links"
task sc_fix: [:environment] do
filename = ENV['FILE'] or raise "FILE required"
File.readlines(filename).each do |line|
item_id = line.chomp
item = Item.find item_id
item.audio_files.each do |af|
if af.stuck?
puts '='*80
puts "af #{af.id} #{af.current_status}"
puts "token=#{af.item.token}"
puts "url=#{af.url}"
puts "dest_path=#{af.destination_path}"
puts "process_file_url=#{af.process_file_url}"
copy_url = URI(af.tasks.copy.valid.first.identifier)
puts "actual=#{copy_url}"
bucket = copy_url.host
real_token = bucket+'/'+copy_url.path.split(/\//)[1]
puts "real_token=#{real_token}"
cmd = "aws ls #{real_token}"
puts "#{cmd}"
#system(cmd)
aws_info = `#{cmd}`.split("\n")
orig_path = nil
aws_info.grep(/^\|/).slice(2..-1).each do |awsi|
aws_parts = awsi.split(/\ *\|\ */)
puts "aws_parts=#{aws_parts.inspect}"
orig_path = aws_parts[6]
if orig_path.match(/\S/)
orig_filename = File.basename(orig_path)
copier = "aws copy #{bucket}/#{af.item.token}/#{orig_filename} /#{bucket}/#{orig_path}"
deleter = "aws rm /#{bucket}/#{orig_path}"
puts "copier=#{copier}"
puts "deleter=#{deleter}"
system(copier) && system(deleter)
end
end
#resp = Utils::head_resp(af.url, 1)
#puts resp.inspect
end
end
end
end
#########################################################################################################
## migration to fixer.popuparchive.com
desc "migrate storage_id on items and audio_files"
task migrate_storage_ids: [:environment] do
# my dev env tests show this performs at about 100 rows/sec
Item.where(storage_id: nil).find_in_batches do |items|
items.each do |item|
item.update_columns(:storage_id, item.collection.default_storage_id)
end
end
AudioFile.where(storage_id: nil).find_in_batches do |afs|
afs.each do |af|
next unless af.item
af.update_columns(:storage_id, af.item.storage_id)
end
end
end
desc "migrate collection storage ids"
task migrate_collection_storage_ids: [:environment] do
# **********************************************************************************************
# WARNING: should only run this once per environment since it is destructive and not idempotent.
# MUST run AFTER migrate_storage_ids
# **********************************************************************************************
# we want to assign all existing collections to new storage_configurations
# so that any new items/audio_files added to those collections will point at
# the new s3 location.
# first we create some new StorageConfiguration objects pointing at new AWS location.
aws_private_store = StorageConfiguration.create(provider: 'AWS', key: ENV['AWS_ACCESS_KEY'], secret: ENV['AWS_SECRET_ACCESS_KEY'], is_public: false, bucket: ENV['AWS_BUCKET'])
aws_public_store = StorageConfiguration.create(provider: 'AWS', key: ENV['AWS_ACCESS_KEY'], secret: ENV['AWS_SECRET_ACCESS_KEY'], is_public: false, bucket: ENV['AWS_BUCKET'])
# second, iterate over all collections and change any whose storage is currently AWS to a new config
Collection.find_in_batches do |colls|
colls.each do |coll|
next unless coll.default_storage.provider == 'AWS'
if coll.default_storage.is_public
coll.default_storage_id = aws_public_store.id
else
coll.default_storage_id = aws_private_store.id
end
coll.save!
end
end
end
desc "run all fixer.popuparchive.com migrations"
task pua_migrate: [:environment] do
Rake::Task["fixer:migrate_storage_ids"].invoke
Rake::Task["fixer:migrate_storage_ids"].reenable
Rake::Task["fixer:migrate_collection_storage_ids"].invoke
Rake::Task["fixer:migrate_collection_storage_ids"].reenable
end
end
migrate image_files along with audio and items
require 'pp'
namespace :fixer do
#########################################################################################################
desc "check fixer status mismatches"
task check: [:environment] do
verbose = ENV['VERBOSE']
debug = ENV['DEBUG']
limit = ENV['LIMIT']
ok_to_recover = ENV['RECOVER']
recover_types = Hash[ ENV['RECOVER_TYPES'] ? ENV['RECOVER_TYPES'].split(/\ *,\ */).map{|t| [t, true]} : [] ]
debug and pp recover_types.inspect
puts "Finding all tasks with status mismatch..."
puts "Will try to recover these types: #{ recover_types.keys.inspect }"
mismatched_tasks = Task.get_mismatched_status('working')
mismatched_report = Hash.new{ |h,k| h[k] = 0 }
mismatched_tasks.each do |task|
mismatched_report[task.type] += 1
if limit and limit.to_i <= mismatched_report[task.type]
debug and puts "Limit #{limit} reached for #{task.type}. Skipping eval of task #{task.id}"
next
end
debug and puts "Task #{task.type} #{task.id} has status '#{task.status}' with results: " + task.results.inspect
if recover_types.has_key?(task.type) and ok_to_recover
verbose and puts "Calling #{task.type}.find(#{task.id}).recover!"
task.recover!
verbose and puts "#{task.type}.find(#{task.id}) new status == #{task.status}"
mismatched_report[task.status] += 1
end
end
puts "These tasks with mismatched status were found:"
pp mismatched_report
end
#########################################################################################################
desc "nudge unfinished tasks toward the finish line"
task nudge: [:environment] do
verbose = ENV['VERBOSE']
debug = ENV['DEBUG']
limit = ENV['LIMIT']
ok_to_recover = ENV['RECOVER']
recover_types = Hash[ ENV['RECOVER_TYPES'] ? ENV['RECOVER_TYPES'].split(/\ *,\ */).map{|t| [t, true]} : [] ]
report = Hash.new{ |h,k| h[k] = 0 }
unfinished = Task.incomplete
verbose and puts "Nudging #{unfinished.count} unfinished tasks"
verbose and puts "Will try to recover these types: #{ recover_types.keys.inspect }"
unfinished.find_in_batches do |taskgroup|
taskgroup.each do |task|
if limit and limit.to_i <= report[task.type]
next
end
debug and puts "Task.find(#{task.id}) -> #{task.type}"
report[task.type] += 1
if task.stuck?
report[task.type+'-stuck'] += 1
if ok_to_recover and recover_types.has_key?(task.type)
begin
task.recover!
report[task.type+'-recovered'] += 1
rescue Exceptions::PrivateFileNotFound => err
STDERR.puts err # warn and continue
rescue => err
raise err # re-throw
end
end
end
end
end
verbose and pp report
end
#########################################################################################################
desc "nudge unfinished uploads toward the finish line"
task nudge_uploads: [:environment] do
verbose = ENV['VERBOSE']
debug = ENV['DEBUG']
limit = ENV['LIMIT']
recover = ENV['RECOVER']
report = Hash.new{ |h,k| h[k] = 0 }
unfinished = Task.upload.incomplete
verbose and puts "Nudging #{unfinished.count} unfinished Upload tasks"
unfinished.find_in_batches do |taskgroup|
taskgroup.each do |task|
if limit and limit.to_i <= report[task.type]
next
end
debug and puts "Task::UploadTask.find(#{task.id})"
report[:incomplete] += 1
if task.num_chunks == 0
report[:zero_chunks] += 1
end
if task.num_chunks > 0 and task.num_chunks != task.chunks_uploaded.size
report[:chunks_unfinished] += 1
end
if task.stuck?
report[:stuck] += 1
task.recover! if recover
end
end
end
verbose and pp report
end
#########################################################################################################
desc "set duration from any complete transcoding"
task set_duration: [:environment] do
print "Finding all audio_file records with nil duration... "
affected = AudioFile.where('duration is null')
puts "found #{affected.count}"
fixed = 0
affected.find_in_batches do |afgroup|
afgroup.each do |af|
af.tasks.each do |task|
if task.type == "Tasks::TranscodeTask" and task.status == 'complete'
if task.results and task.results['info'] and task.results['info']['length']
puts "audio #{af.id} has nil duration, but task #{task.identifier}:#{task.id} has length #{task.results['info']['length']}"
af.update_attribute(:duration, task.results['info']['length'].to_i)
fixed += 1
end
end
end
end
end
puts "Updated #{fixed} audio_files"
end
#########################################################################################################
desc "unfinished uploads"
task unfinished_uploads: [:environment] do
print "Finding all audio_file records with nil duration... "
affected = AudioFile.where('duration is null')
puts "found #{affected.count}"
affected.find_in_batches do |afgroup|
afgroup.each do |af|
if af.has_failed_upload?
puts "AudioFile.find(#{af.id}) has failed upload. Status==#{af.current_status}"
end
end
end
end
#########################################################################################################
desc "speechmatics sanity check"
task speechmatics_poll: [:environment] do
# No tasks are recovered by default, since that means notifying the user on success,
# which we might not want to do in testing/dev. Set RECOVER env var to trigger task.recover!
ok_to_recover = ENV['RECOVER']
# find all status=created older than N hours
# and verify they exist at SM. If not, cancel them.
ago = Task.work_window
sm_tasks = Task.speechmatics_transcribe.incomplete.where('created_at < ?', ago)
sm_tasks_count = sm_tasks.count
puts "Found #{sm_tasks_count} unfinished Speechmatics tasks older than #{ago}"
report = {'cancelled, no job_id' => 0, 'missing job_id' => 0, 'No SM job found for job_id' => 0, 'recovered' => 0}
# fetch all SM jobs at once to save HTTP overhead.
# TODO ask them to implement sorting, searching, paging.
sm = Speechmatics::Client.new({ :request => { :timeout => 120 } })
sm_jobs = sm.user.jobs.list.jobs
# create lookup hash by job id
sm_jobs_lookup = Hash[ sm_jobs.map{ |smjob| [smjob['id'].to_s, smjob] } ]
sm_tasks.find_in_batches do |taskgroup|
taskgroup.each do |task|
# if we don't have a job_id then it never was created at SM
if !task.extras['job_id']
puts "Task.find(#{task.id}) has no job_id: #{task.inspect}"
# if not recovering, log it and skip to next
if !ok_to_recover
report['missing job_id'] += 1
next
end
begin
task.recover! # should cancel it with err msg if can't reverse lookup job_id
rescue Exception => err
puts "Task.find(#{task.id}).recover failed with #{err}"
next
end
if task.status == "cancelled"
report['cancelled, no job_id'] += 1
elsif task.status == "complete" || task.status == "working"
report['recovered'] += 1
else
puts "Called Task.find(#{task.id}).recover! and ended with status '#{task.status}'"
end
next
end
# lookup SM status
sm_job = sm_jobs_lookup[task.extras['job_id']]
if !sm_job
task.extras[:error] = "No SM job found for job_id"
task.cancel!
puts "No SM job found for task: #{task.inspect}"
report['No SM job found for job_id'] += 1
next
end
puts "Task.find(#{task.id}) looks like this at SM: #{sm_job.inspect}"
if ok_to_recover
task.recover! && report['recovered'] += 1
end
end
end
report.keys.each do |k|
puts "#{k} => #{report[k].to_s}"
end
end
#########################################################################################################
desc "check for audio with no transcript and (optionally) create it"
task transcript_check: [:environment] do
ok_to_recover = ENV['RECOVER']
verbose = ENV['VERBOSE']
AudioFile.where('duration is not null').where('id not in (select audio_file_id from transcripts)').find_in_batches do |afgroup|
afgroup.each do |af|
if !af.has_file? && af.original_file_url.blank?
next # can't do anything
end
if af.needs_transcript?
verbose and puts "AudioFile.find(#{af.id}) needs any transcript"
ok_to_recover and af.check_tasks
end
end
end
end # task
#########################################################################################################
desc "fix broken audio file links"
task sc_fix: [:environment] do
filename = ENV['FILE'] or raise "FILE required"
File.readlines(filename).each do |line|
item_id = line.chomp
item = Item.find item_id
item.audio_files.each do |af|
if af.stuck?
puts '='*80
puts "af #{af.id} #{af.current_status}"
puts "token=#{af.item.token}"
puts "url=#{af.url}"
puts "dest_path=#{af.destination_path}"
puts "process_file_url=#{af.process_file_url}"
copy_url = URI(af.tasks.copy.valid.first.identifier)
puts "actual=#{copy_url}"
bucket = copy_url.host
real_token = bucket+'/'+copy_url.path.split(/\//)[1]
puts "real_token=#{real_token}"
cmd = "aws ls #{real_token}"
puts "#{cmd}"
#system(cmd)
aws_info = `#{cmd}`.split("\n")
orig_path = nil
aws_info.grep(/^\|/).slice(2..-1).each do |awsi|
aws_parts = awsi.split(/\ *\|\ */)
puts "aws_parts=#{aws_parts.inspect}"
orig_path = aws_parts[6]
if orig_path.match(/\S/)
orig_filename = File.basename(orig_path)
copier = "aws copy #{bucket}/#{af.item.token}/#{orig_filename} /#{bucket}/#{orig_path}"
deleter = "aws rm /#{bucket}/#{orig_path}"
puts "copier=#{copier}"
puts "deleter=#{deleter}"
system(copier) && system(deleter)
end
end
#resp = Utils::head_resp(af.url, 1)
#puts resp.inspect
end
end
end
end
#########################################################################################################
## migration to fixer.popuparchive.com
desc "migrate storage_id on items and audio_files"
task migrate_storage_ids: [:environment] do
# my dev env tests show this performs at about 100 rows/sec
Item.where(storage_id: nil).find_in_batches do |items|
items.each do |item|
item.update_columns(:storage_id, item.collection.default_storage_id)
end
end
AudioFile.where(storage_id: nil).find_in_batches do |afs|
afs.each do |af|
next unless af.item
af.update_columns(:storage_id, af.item.storage_id)
end
end
ImageFile.where(storage_id: nil).find_in_batches do |imgs|
imgs.each do |imgf|
next unless imgf.imageable
imgf.update_columns(:storage_id, imgf.imageable.storage_id)
end
end
end
desc "migrate collection storage ids"
task migrate_collection_storage_ids: [:environment] do
# **********************************************************************************************
# WARNING: should only run this once per environment since it is destructive and not idempotent.
# MUST run AFTER migrate_storage_ids
# **********************************************************************************************
# we want to assign all existing collections to new storage_configurations
# so that any new items/audio_files added to those collections will point at
# the new s3 location.
# first we create some new StorageConfiguration objects pointing at new AWS location.
aws_private_store = StorageConfiguration.create(provider: 'AWS', key: ENV['AWS_ACCESS_KEY'], secret: ENV['AWS_SECRET_ACCESS_KEY'], is_public: false, bucket: ENV['AWS_BUCKET'])
aws_public_store = StorageConfiguration.create(provider: 'AWS', key: ENV['AWS_ACCESS_KEY'], secret: ENV['AWS_SECRET_ACCESS_KEY'], is_public: false, bucket: ENV['AWS_BUCKET'])
# second, iterate over all collections and change any whose storage is currently AWS to a new config
Collection.find_in_batches do |colls|
colls.each do |coll|
next unless coll.default_storage.provider == 'AWS'
if coll.default_storage.is_public
coll.default_storage_id = aws_public_store.id
else
coll.default_storage_id = aws_private_store.id
end
coll.save!
end
end
end
desc "run all fixer.popuparchive.com migrations"
task pua_migrate: [:environment] do
Rake::Task["fixer:migrate_storage_ids"].invoke
Rake::Task["fixer:migrate_storage_ids"].reenable
Rake::Task["fixer:migrate_collection_storage_ids"].invoke
Rake::Task["fixer:migrate_collection_storage_ids"].reenable
end
end
|
# rubocop:disable Style/FrozenStringLiteralComment
require "asciidoctor"
require "octokit"
require "time"
require "digest/sha1"
def index_l10n_doc(filter_tags, doc_list, get_content)
ActiveRecord::Base.logger.level = Logger::WARN
rebuild = ENV["REBUILD_DOC"]
rerun = ENV["RERUN"] || rebuild || false
filter_tags.call(rebuild, false).sort_by { |tag| Version.version_to_num(tag.first[1..-1]) }.each do |tag|
name, commit_sha, tree_sha, ts = tag
puts "#{name}: #{ts}, #{commit_sha[0, 8]}, #{tree_sha[0, 8]}"
stag = Version.where(name: name.gsub("v", "l10n")).first_or_create
next if (stag.commit_sha == commit_sha) && !rerun
stag.commit_sha = commit_sha
stag.tree_sha = tree_sha
stag.committed = ts
stag.save
tag_files = doc_list.call(tree_sha)
doc_files = tag_files.select { |ent| ent.first =~
/^([-_\w]+)\/(
(
git.*
)\.txt)/x
}
puts "Found #{doc_files.size} entries"
doc_limit = ENV["ONLY_BUILD_DOC"]
get_content_f = Proc.new do |source, target|
name = File.join(File.dirname(source), target)
content_file = tag_files.detect { |ent| ent.first == name }
if content_file
new_content = get_content.call (content_file.second)
else
puts "Included file #{name} was not translated. Processing anyway\n"
end
[new_content, name]
end
def expand!(path, content, get_f_content , categories)
content.gsub!(/include::(\S+)\.txt/) do |line|
line.gsub!("include::", "")
if categories[line]
new_content = categories[line]
else
new_content, path = get_f_content.call(path, line)
end
if new_content
expand!(path, new_content, get_f_content, categories)
else
"\n\n[WARNING]\n====\nMissing `#{path}`\n\nSee original version for this content.\n====\n\n"
end
end
return content
end
doc_files.each do |entry|
full_path, sha = entry
lang = File.dirname(full_path)
path = File.basename(full_path, ".txt")
#next if doc_limit && path !~ /#{doc_limit}/
file = DocFile.where(name: path).first_or_create
puts " build: #{path} for #{lang}"
content = get_content.call sha
categories = {}
expand!(full_path, content, get_content_f, categories)
content.gsub!(/link:(?:technical\/)?(\S*?)\.html(\#\S*?)?\[(.*?)\]/m, "link:/docs/\\1/#{lang}\\2[\\3]")
asciidoc = Asciidoctor::Document.new(content, attributes: {"sectanchors" => ""}, doctype: "book")
asciidoc_sha = Digest::SHA1.hexdigest(asciidoc.source)
doc = Doc.where(blob_sha: asciidoc_sha).first_or_create
if rerun || !doc.plain || !doc.html
html = asciidoc.render
html.gsub!(/linkgit:(\S+)\[(\d+)\]/) do |line|
x = /^linkgit:(\S+)\[(\d+)\]/.match(line)
line = "<a href='/docs/#{x[1]}/#{lang}'>#{x[1]}[#{x[2]}]</a>"
end
#HTML anchor on hdlist1 (i.e. command options)
html.gsub!(/<dt class="hdlist1">(.*?)<\/dt>/) do |m|
text = $1.tr("^A-Za-z0-9-", "")
anchor = "#{path}-#{text}"
"<dt class=\"hdlist1\" id=\"#{anchor}\"> <a class=\"anchor\" href=\"##{anchor}\"></a>#{$1} </dt>"
end
doc.plain = asciidoc.source
doc.html = html
doc.save
end
dv = DocVersion.where(version_id: stag.id, doc_file_id: file.id, language: lang).first_or_create
dv.doc_id = doc.id
dv.language = lang
dv.save
end
end
end
def drop_uninteresting_tags(tags)
# proceed in reverse-chronological order, as we'll pick only the
# highest-numbered point release for older versions
ret = Array.new
tags.reverse.each do |tag|
numeric = Version.version_to_num(tag.first[1..-1])
# drop anything older than v2.0
next if numeric < 2000000
# older than v2.17, take only the highest release
if numeric < 2170000 and !ret.empty?
old = Version.version_to_num(ret[0].first[1..-1])
next if old.to_i.div(10000) == numeric.to_i.div(10000)
end
# keep everything else
ret.unshift(tag)
end
return ret
end
def index_doc(filter_tags, doc_list, get_content)
ActiveRecord::Base.logger.level = Logger::WARN
rebuild = ENV["REBUILD_DOC"]
rerun = ENV["RERUN"] || rebuild || false
tags = filter_tags.call(rebuild).sort_by { |tag| Version.version_to_num(tag.first[1..-1]) }
drop_uninteresting_tags(tags).each do |tag|
name, commit_sha, tree_sha, ts = tag
puts "#{name}: #{ts}, #{commit_sha[0, 8]}, #{tree_sha[0, 8]}"
stag = Version.where(name: name.gsub("v", "")).first
next if stag && !rerun
stag = Version.where(name: name.gsub("v", "")).first_or_create
stag.commit_sha = commit_sha
stag.tree_sha = tree_sha
stag.committed = ts
stag.save
tag_files = doc_list.call(tree_sha)
doc_files = tag_files.select { |ent| ent.first =~
/^Documentation\/(
SubmittingPatches |
MyFirstContribution.txt |
(
git.* |
everyday |
howto-index |
user-manual |
diff.* |
fetch.* |
merge.* |
rev.* |
pretty.* |
pull.* |
technical\/.*
)\.txt)/x
}
puts "Found #{doc_files.size} entries"
doc_limit = ENV["ONLY_BUILD_DOC"]
# generate command-list content
generated = {}
cmd = tag_files.detect { |f| f.first =~ /command-list\.txt/ }
if cmd
cmd_list = get_content.call(cmd.second).match(/(### command list.*|# command name.*)/m)[0].split("\n").reject { |l| l =~ /^#/ }.inject({}) do |list, cmd|
name, kind, attr = cmd.split(/\s+/)
list[kind] ||= []
list[kind] << [name, attr]
list
end
generated = cmd_list.keys.inject({}) do |list, category|
links = cmd_list[category].map do |cmd, attr|
if cmd_file = tag_files.detect { |ent| ent.first == "Documentation/#{cmd}.txt" }
if match = get_content.call(cmd_file.second).match(/NAME\n----\n\S+ - (.*)$/)
"linkgit:#{cmd}[1]::\n\t#{attr == 'deprecated' ? '(deprecated) ' : ''}#{match[1]}\n"
end
end
end
list.merge!("Documentation/cmds-#{category}.txt" => links.compact.join("\n"))
end
tools = tag_files.select { |ent| ent.first =~/^mergetools\// }.map do |entry|
path, sha = entry
tool = File.basename path
content = get_content.call sha
merge = (content.include? "can_merge") ? "" : " * #{tool}\n"
diff = (content.include? "can_diff") ? "" : " * #{tool}\n"
[merge, diff]
end
can_merge, can_diff = tools.transpose.map { |strs| strs.join "" }
generated["Documentation/mergetools-diff.txt"] = can_diff
generated["Documentation/mergetools-merge.txt"] = can_merge
get_content_f = Proc.new do |name|
content_file = tag_files.detect { |ent| ent.first == name }
if content_file
new_content = get_content.call (content_file.second)
end
new_content
end
def expand_content(content, path, get_f_content , generated)
content.gsub(/include::(\S+)\.txt\[\]/) do |line|
if File.dirname(path)=="."
new_fname = "#{$1}.txt"
else
new_fname = (Pathname.new(path).dirname + Pathname.new("#{$1}.txt")).cleanpath.to_s
end
if generated[new_fname]
new_content = generated[new_fname]
else
new_content = get_f_content.call(new_fname)
if new_content
expand_content(new_content.force_encoding("UTF-8"), new_fname, get_f_content, generated)
else
puts "#{new_fname} could not be resolved for expansion"
end
end
end
end
doc_files.each do |entry|
path, sha = entry
docname = File.basename(path, ".txt")
next if doc_limit && path !~ /#{doc_limit}/
file = DocFile.where(name: docname).first_or_create
puts " build: #{docname}"
content = expand_content((get_content.call sha).force_encoding("UTF-8"), path, get_content_f, generated)
content.gsub!(/link:(?:technical\/)?(\S*?)\.html(\#\S*?)?\[(.*?)\]/m, "link:/docs/\\1\\2[\\3]")
asciidoc = Asciidoctor::Document.new(content, attributes: {"sectanchors" => ""}, doctype: "book")
asciidoc_sha = Digest::SHA1.hexdigest(asciidoc.source)
doc = Doc.where(blob_sha: asciidoc_sha).first_or_create
if rerun || !doc.plain || !doc.html
html = asciidoc.render
html.gsub!(/linkgit:(\S+)\[(\d+)\]/) do |line|
x = /^linkgit:(\S+)\[(\d+)\]/.match(line)
line = "<a href='/docs/#{x[1]}'>#{x[1]}[#{x[2]}]</a>"
end
#HTML anchor on hdlist1 (i.e. command options)
html.gsub!(/<dt class="hdlist1">(.*?)<\/dt>/) do |m|
text = $1.tr("^A-Za-z0-9-", "")
anchor = "#{path}-#{text}"
"<dt class=\"hdlist1\" id=\"#{anchor}\"> <a class=\"anchor\" href=\"##{anchor}\"></a>#{$1} </dt>"
end
doc.plain = asciidoc.source
doc.html = html
doc.save
end
dv = DocVersion.where(version_id: stag.id, doc_file_id: file.id, language: "en").first_or_create
dv.doc_id = doc.id
dv.language = "en"
dv.save
end
end
Rails.cache.write("latest-version", Version.latest_version.name)
end
end
def github_index_doc(index_fun, repo)
Octokit.auto_paginate = true
if ENV["GITHUB_API_TOKEN"]
@octokit = Octokit::Client.new(access_token: ENV["GITHUB_API_TOKEN"])
else
@octokit = Octokit::Client.new(login: ENV["API_USER"], password: ENV["API_PASS"])
end
repo = ENV["GIT_REPO"] || repo
blob_content = Hash.new do |blobs, sha|
content = Base64.decode64(@octokit.blob(repo, sha, encoding: "base64").content)
blobs[sha] = content.force_encoding("UTF-8")
end
tag_filter = -> (tagname, gettags = true) do
# find all tags
if gettags
tags = @octokit.tags(repo).select { |tag| !tag.nil? && tag.name =~ /v\d([\.\d])+$/ } # just get release tags
if tagname
tags = tags.select { |t| t.name == tagname }
end
else
tags=[Struct.new(:name).new("heads/master")]
end
tags.collect do |tag|
# extract metadata
commit_info = @octokit.commit(repo, tag.name)
commit_sha = commit_info.sha
tree_sha = commit_info.commit.tree.sha
# ts = Time.parse( commit_info.commit.committer.date )
ts = commit_info.commit.committer.date
[tag.name, commit_sha, tree_sha, ts]
end
end
get_content = -> (sha) do blob_content[sha] end
get_file_list = -> (tree_sha) do
tree_info = @octokit.tree(repo, tree_sha, recursive: true)
tree_info.tree.collect { |ent| [ent.path, ent.sha] }
end
send(index_fun, tag_filter, get_file_list, get_content)
end
def local_index_doc(index_fun)
dir = ENV["GIT_REPO"]
Dir.chdir(dir) do
tag_filter = -> (tagname, gettags = true) do
if gettags
# find all tags
tags = `git tag | egrep 'v1|v2'`.strip.split("\n")
tags = tags.select { |tag| tag =~ /v\d([\.\d])+$/ } # just get release tags
if tagname
tags = tags.select { |t| t == tagname }
end
else
tags=["master"]
end
tags.collect do |tag|
# extract metadata
commit_sha = `git rev-parse #{tag}`.chomp
tree_sha = `git rev-parse #{tag}^{tree}`.chomp
tagger = `git cat-file commit #{tag} | grep committer`.chomp.split(" ")
tz = tagger.pop
ts = tagger.pop
ts = Time.at(ts.to_i)
[tag, commit_sha, tree_sha, ts]
end
end
get_content = -> (sha) do `git cat-file blob #{sha}` end
get_file_list = -> (tree_sha) do
entries = `git ls-tree -r #{tree_sha}`.strip.split("\n")
tree = entries. map do |e|
mode, type, sha, path = e.split(" ")
[path, sha]
end
end
send(index_fun, tag_filter, get_file_list, get_content)
end
end
task local_index: :environment do
local_index_doc(:index_doc)
end
task local_index_l10n: :environment do
local_index_doc(:index_l10n_doc)
end
task preindex: :environment do
github_index_doc(:index_doc, "gitster/git")
end
task preindex_l10n: :environment do
github_index_doc(:index_l10n_doc, "jnavila/git-html-l10n")
end
lib/tasks/index.rake: appease Rubocop
Rubocup prefers that we use 'Enumerable#reverse_each' instead of using
each method separately. This improves performance by not requiring
'#each' to slurp up the results of reversing the list before enumerating
it.
Switch out one for the other for a slight performance win, but mostly to
get Rubocop happy again.
# rubocop:disable Style/FrozenStringLiteralComment
require "asciidoctor"
require "octokit"
require "time"
require "digest/sha1"
def index_l10n_doc(filter_tags, doc_list, get_content)
ActiveRecord::Base.logger.level = Logger::WARN
rebuild = ENV["REBUILD_DOC"]
rerun = ENV["RERUN"] || rebuild || false
filter_tags.call(rebuild, false).sort_by { |tag| Version.version_to_num(tag.first[1..-1]) }.each do |tag|
name, commit_sha, tree_sha, ts = tag
puts "#{name}: #{ts}, #{commit_sha[0, 8]}, #{tree_sha[0, 8]}"
stag = Version.where(name: name.gsub("v", "l10n")).first_or_create
next if (stag.commit_sha == commit_sha) && !rerun
stag.commit_sha = commit_sha
stag.tree_sha = tree_sha
stag.committed = ts
stag.save
tag_files = doc_list.call(tree_sha)
doc_files = tag_files.select { |ent| ent.first =~
/^([-_\w]+)\/(
(
git.*
)\.txt)/x
}
puts "Found #{doc_files.size} entries"
doc_limit = ENV["ONLY_BUILD_DOC"]
get_content_f = Proc.new do |source, target|
name = File.join(File.dirname(source), target)
content_file = tag_files.detect { |ent| ent.first == name }
if content_file
new_content = get_content.call (content_file.second)
else
puts "Included file #{name} was not translated. Processing anyway\n"
end
[new_content, name]
end
def expand!(path, content, get_f_content , categories)
content.gsub!(/include::(\S+)\.txt/) do |line|
line.gsub!("include::", "")
if categories[line]
new_content = categories[line]
else
new_content, path = get_f_content.call(path, line)
end
if new_content
expand!(path, new_content, get_f_content, categories)
else
"\n\n[WARNING]\n====\nMissing `#{path}`\n\nSee original version for this content.\n====\n\n"
end
end
return content
end
doc_files.each do |entry|
full_path, sha = entry
lang = File.dirname(full_path)
path = File.basename(full_path, ".txt")
#next if doc_limit && path !~ /#{doc_limit}/
file = DocFile.where(name: path).first_or_create
puts " build: #{path} for #{lang}"
content = get_content.call sha
categories = {}
expand!(full_path, content, get_content_f, categories)
content.gsub!(/link:(?:technical\/)?(\S*?)\.html(\#\S*?)?\[(.*?)\]/m, "link:/docs/\\1/#{lang}\\2[\\3]")
asciidoc = Asciidoctor::Document.new(content, attributes: {"sectanchors" => ""}, doctype: "book")
asciidoc_sha = Digest::SHA1.hexdigest(asciidoc.source)
doc = Doc.where(blob_sha: asciidoc_sha).first_or_create
if rerun || !doc.plain || !doc.html
html = asciidoc.render
html.gsub!(/linkgit:(\S+)\[(\d+)\]/) do |line|
x = /^linkgit:(\S+)\[(\d+)\]/.match(line)
line = "<a href='/docs/#{x[1]}/#{lang}'>#{x[1]}[#{x[2]}]</a>"
end
#HTML anchor on hdlist1 (i.e. command options)
html.gsub!(/<dt class="hdlist1">(.*?)<\/dt>/) do |m|
text = $1.tr("^A-Za-z0-9-", "")
anchor = "#{path}-#{text}"
"<dt class=\"hdlist1\" id=\"#{anchor}\"> <a class=\"anchor\" href=\"##{anchor}\"></a>#{$1} </dt>"
end
doc.plain = asciidoc.source
doc.html = html
doc.save
end
dv = DocVersion.where(version_id: stag.id, doc_file_id: file.id, language: lang).first_or_create
dv.doc_id = doc.id
dv.language = lang
dv.save
end
end
end
def drop_uninteresting_tags(tags)
# proceed in reverse-chronological order, as we'll pick only the
# highest-numbered point release for older versions
ret = Array.new
tags.reverse_each do |tag|
numeric = Version.version_to_num(tag.first[1..-1])
# drop anything older than v2.0
next if numeric < 2000000
# older than v2.17, take only the highest release
if numeric < 2170000 and !ret.empty?
old = Version.version_to_num(ret[0].first[1..-1])
next if old.to_i.div(10000) == numeric.to_i.div(10000)
end
# keep everything else
ret.unshift(tag)
end
return ret
end
def index_doc(filter_tags, doc_list, get_content)
ActiveRecord::Base.logger.level = Logger::WARN
rebuild = ENV["REBUILD_DOC"]
rerun = ENV["RERUN"] || rebuild || false
tags = filter_tags.call(rebuild).sort_by { |tag| Version.version_to_num(tag.first[1..-1]) }
drop_uninteresting_tags(tags).each do |tag|
name, commit_sha, tree_sha, ts = tag
puts "#{name}: #{ts}, #{commit_sha[0, 8]}, #{tree_sha[0, 8]}"
stag = Version.where(name: name.gsub("v", "")).first
next if stag && !rerun
stag = Version.where(name: name.gsub("v", "")).first_or_create
stag.commit_sha = commit_sha
stag.tree_sha = tree_sha
stag.committed = ts
stag.save
tag_files = doc_list.call(tree_sha)
doc_files = tag_files.select { |ent| ent.first =~
/^Documentation\/(
SubmittingPatches |
MyFirstContribution.txt |
(
git.* |
everyday |
howto-index |
user-manual |
diff.* |
fetch.* |
merge.* |
rev.* |
pretty.* |
pull.* |
technical\/.*
)\.txt)/x
}
puts "Found #{doc_files.size} entries"
doc_limit = ENV["ONLY_BUILD_DOC"]
# generate command-list content
generated = {}
cmd = tag_files.detect { |f| f.first =~ /command-list\.txt/ }
if cmd
cmd_list = get_content.call(cmd.second).match(/(### command list.*|# command name.*)/m)[0].split("\n").reject { |l| l =~ /^#/ }.inject({}) do |list, cmd|
name, kind, attr = cmd.split(/\s+/)
list[kind] ||= []
list[kind] << [name, attr]
list
end
generated = cmd_list.keys.inject({}) do |list, category|
links = cmd_list[category].map do |cmd, attr|
if cmd_file = tag_files.detect { |ent| ent.first == "Documentation/#{cmd}.txt" }
if match = get_content.call(cmd_file.second).match(/NAME\n----\n\S+ - (.*)$/)
"linkgit:#{cmd}[1]::\n\t#{attr == 'deprecated' ? '(deprecated) ' : ''}#{match[1]}\n"
end
end
end
list.merge!("Documentation/cmds-#{category}.txt" => links.compact.join("\n"))
end
tools = tag_files.select { |ent| ent.first =~/^mergetools\// }.map do |entry|
path, sha = entry
tool = File.basename path
content = get_content.call sha
merge = (content.include? "can_merge") ? "" : " * #{tool}\n"
diff = (content.include? "can_diff") ? "" : " * #{tool}\n"
[merge, diff]
end
can_merge, can_diff = tools.transpose.map { |strs| strs.join "" }
generated["Documentation/mergetools-diff.txt"] = can_diff
generated["Documentation/mergetools-merge.txt"] = can_merge
get_content_f = Proc.new do |name|
content_file = tag_files.detect { |ent| ent.first == name }
if content_file
new_content = get_content.call (content_file.second)
end
new_content
end
def expand_content(content, path, get_f_content , generated)
content.gsub(/include::(\S+)\.txt\[\]/) do |line|
if File.dirname(path)=="."
new_fname = "#{$1}.txt"
else
new_fname = (Pathname.new(path).dirname + Pathname.new("#{$1}.txt")).cleanpath.to_s
end
if generated[new_fname]
new_content = generated[new_fname]
else
new_content = get_f_content.call(new_fname)
if new_content
expand_content(new_content.force_encoding("UTF-8"), new_fname, get_f_content, generated)
else
puts "#{new_fname} could not be resolved for expansion"
end
end
end
end
doc_files.each do |entry|
path, sha = entry
docname = File.basename(path, ".txt")
next if doc_limit && path !~ /#{doc_limit}/
file = DocFile.where(name: docname).first_or_create
puts " build: #{docname}"
content = expand_content((get_content.call sha).force_encoding("UTF-8"), path, get_content_f, generated)
content.gsub!(/link:(?:technical\/)?(\S*?)\.html(\#\S*?)?\[(.*?)\]/m, "link:/docs/\\1\\2[\\3]")
asciidoc = Asciidoctor::Document.new(content, attributes: {"sectanchors" => ""}, doctype: "book")
asciidoc_sha = Digest::SHA1.hexdigest(asciidoc.source)
doc = Doc.where(blob_sha: asciidoc_sha).first_or_create
if rerun || !doc.plain || !doc.html
html = asciidoc.render
html.gsub!(/linkgit:(\S+)\[(\d+)\]/) do |line|
x = /^linkgit:(\S+)\[(\d+)\]/.match(line)
line = "<a href='/docs/#{x[1]}'>#{x[1]}[#{x[2]}]</a>"
end
#HTML anchor on hdlist1 (i.e. command options)
html.gsub!(/<dt class="hdlist1">(.*?)<\/dt>/) do |m|
text = $1.tr("^A-Za-z0-9-", "")
anchor = "#{path}-#{text}"
"<dt class=\"hdlist1\" id=\"#{anchor}\"> <a class=\"anchor\" href=\"##{anchor}\"></a>#{$1} </dt>"
end
doc.plain = asciidoc.source
doc.html = html
doc.save
end
dv = DocVersion.where(version_id: stag.id, doc_file_id: file.id, language: "en").first_or_create
dv.doc_id = doc.id
dv.language = "en"
dv.save
end
end
Rails.cache.write("latest-version", Version.latest_version.name)
end
end
def github_index_doc(index_fun, repo)
Octokit.auto_paginate = true
if ENV["GITHUB_API_TOKEN"]
@octokit = Octokit::Client.new(access_token: ENV["GITHUB_API_TOKEN"])
else
@octokit = Octokit::Client.new(login: ENV["API_USER"], password: ENV["API_PASS"])
end
repo = ENV["GIT_REPO"] || repo
blob_content = Hash.new do |blobs, sha|
content = Base64.decode64(@octokit.blob(repo, sha, encoding: "base64").content)
blobs[sha] = content.force_encoding("UTF-8")
end
tag_filter = -> (tagname, gettags = true) do
# find all tags
if gettags
tags = @octokit.tags(repo).select { |tag| !tag.nil? && tag.name =~ /v\d([\.\d])+$/ } # just get release tags
if tagname
tags = tags.select { |t| t.name == tagname }
end
else
tags=[Struct.new(:name).new("heads/master")]
end
tags.collect do |tag|
# extract metadata
commit_info = @octokit.commit(repo, tag.name)
commit_sha = commit_info.sha
tree_sha = commit_info.commit.tree.sha
# ts = Time.parse( commit_info.commit.committer.date )
ts = commit_info.commit.committer.date
[tag.name, commit_sha, tree_sha, ts]
end
end
get_content = -> (sha) do blob_content[sha] end
get_file_list = -> (tree_sha) do
tree_info = @octokit.tree(repo, tree_sha, recursive: true)
tree_info.tree.collect { |ent| [ent.path, ent.sha] }
end
send(index_fun, tag_filter, get_file_list, get_content)
end
def local_index_doc(index_fun)
dir = ENV["GIT_REPO"]
Dir.chdir(dir) do
tag_filter = -> (tagname, gettags = true) do
if gettags
# find all tags
tags = `git tag | egrep 'v1|v2'`.strip.split("\n")
tags = tags.select { |tag| tag =~ /v\d([\.\d])+$/ } # just get release tags
if tagname
tags = tags.select { |t| t == tagname }
end
else
tags=["master"]
end
tags.collect do |tag|
# extract metadata
commit_sha = `git rev-parse #{tag}`.chomp
tree_sha = `git rev-parse #{tag}^{tree}`.chomp
tagger = `git cat-file commit #{tag} | grep committer`.chomp.split(" ")
tz = tagger.pop
ts = tagger.pop
ts = Time.at(ts.to_i)
[tag, commit_sha, tree_sha, ts]
end
end
get_content = -> (sha) do `git cat-file blob #{sha}` end
get_file_list = -> (tree_sha) do
entries = `git ls-tree -r #{tree_sha}`.strip.split("\n")
tree = entries. map do |e|
mode, type, sha, path = e.split(" ")
[path, sha]
end
end
send(index_fun, tag_filter, get_file_list, get_content)
end
end
task local_index: :environment do
local_index_doc(:index_doc)
end
task local_index_l10n: :environment do
local_index_doc(:index_l10n_doc)
end
task preindex: :environment do
github_index_doc(:index_doc, "gitster/git")
end
task preindex_l10n: :environment do
github_index_doc(:index_l10n_doc, "jnavila/git-html-l10n")
end
|
namespace :setup do
desc "create the admin user"
task :admin => :environment do
admin = User.first(:conditions => {:permission_level => -1})
if admin
puts "Admin user already exists"
else
admin = User.create({
:email => "admin@yourdomain.com",
:login => "siteadmin",
:password => "p455wd!",
:password_confirmation => "p455wd!",
:permission_level => -1
})
if admin
puts "Created admin user. login: siteadmin, pasword: p455wd!"
else
puts "Error encountered while creating admin"
end
end
end
desc "create the remuneration schedule contributors"
task :remunerations => :environment do
puts 'Setting up remuneration schedule...'
for remun in ['tenure', 'peer review', 'capital contribution',
'time worked', 'capital reinvestment', 'other']
RemunerationContributor.find_or_create_by_name(:name => remun, :use => true)
end
puts 'Done'
end
desc "setup the admin user and renumeration schedule"
task :all => [:admin, :remunerations]
end
fixed setup rake task
namespace :setup do
desc "create the admin user"
task :admin => :environment do
admin = User.first(:conditions => {:permission_level => -1})
if admin
puts "Admin user already exists"
else
admin = User.create({
:email => "admin@yourdomain.com",
:login => "siteadmin",
:password => "p455wd!",
:password_confirmation => "p455wd!",
:permission_level => -1
})
if admin
puts "Created admin user. login: siteadmin, pasword: p455wd!"
else
puts "Error encountered while creating admin"
end
end
end
desc "setup the admin user and renumeration schedule"
task :all => [:admin]
end
|
namespace :empirical do
task :setup do
puts "** Starting setup..."
puts "** Creating tmp directories..."
Rake::Task["tmp:create"].invoke
unless File.exist?("config/database.yml")
puts "** Copying DB Credentials..."
`cp config/database.yml.example config/database.yml`
end
puts '** Copying env variables...'
`cp .env-sample .env`
puts "** Creating database..."
Rake::Task["db:create"].invoke
puts "** Loading Schema..."
Rake::Task['db:schema:load'].invoke
puts "** Seeding database..."
Rake::Task["db:seed"].invoke
puts "** Setup complete!"
end
end
update rake setup task to use structure rather than schema
namespace :empirical do
task :setup do
puts "** Starting setup..."
puts "** Creating tmp directories..."
Rake::Task["tmp:create"].invoke
unless File.exist?("config/database.yml")
puts "** Copying DB Credentials..."
`cp config/database.yml.example config/database.yml`
end
puts '** Copying env variables...'
`cp .env-sample .env`
puts "** Creating database..."
Rake::Task["db:create"].invoke
puts "** Loading Structure..."
Rake::Task['db:structure:load'].invoke
puts "** Seeding database..."
Rake::Task["db:seed"].invoke
puts "** Setup complete!"
end
end
|
namespace :wildland do
desc 'Updates libs and database'
task :setup do
# Checkout ruby and node versions
print 'Checking ruby version... '
needed_ruby_version = File.read('.ruby-version')
unless ruby_version_up_to_date?(needed_ruby_version)
puts "out of date. Updating."
update_ruby(needed_ruby_version)
else
puts 'up to date.'
end
Dir.chdir('app-ember') do
system('npm install')
system('bower install')
end
system('rake db:drop')
system('rake db:create')
system('rake db:migrate')
system('rake db:setup')
system('rake demo:seed')
end
end
desc 'Gets development environment setup.'
task wildland: 'wildland:setup'
def ruby_version_up_to_date?(needed_ruby_version)
ruby_version = `ruby -v`
ruby_version.include?(needed_ruby_version)
end
def update_ruby(version)
case
when system("which rvm > /dev/null 2>&1")
update_ruby_with_rvm(version)
when system("which rbenv > /dev/null 2>&1")
update_ruby_with_rbenv(version)
else
puts "No ruby manager installed. Please manually update to Ruby #{version}"
end
end
def update_ruby_with_rvm(version)
# Try to use the version or install and use
system("rvm use #{version}")
unless ruby_version_up_to_date?(version)
system("rvm install #{version}")
system("rvm use #{version}")
end
end
def update_ruby_with_rbenv(version)
puts 'rbenv updater not written.'
end
Fixes #14.
namespace :wildland do
desc 'Updates libs and database'
task :setup do
# Checkout ruby and node versions
print 'Checking ruby version... '
needed_ruby_version = File.read('.ruby-version')
unless ruby_version_up_to_date?(needed_ruby_version)
puts "out of date. Updating."
update_ruby(needed_ruby_version)
else
puts 'up to date.'
end
if ember_cli_rails_installed?
puts 'ember-cli-rails installed'
system('npm install')
else
puts 'install ember dependencies'
old_ember_setup
end
system('rake db:drop')
system('rake db:create')
system('rake db:migrate')
system('rake db:setup')
system('rake demo:seed')
end
end
desc 'Gets development environment setup.'
task wildland: 'wildland:setup'
def ruby_version_up_to_date?(needed_ruby_version)
ruby_version = `ruby -v`
ruby_version.include?(needed_ruby_version)
end
def update_ruby(version)
case
when system("which rvm > /dev/null 2>&1")
update_ruby_with_rvm(version)
when system("which rbenv > /dev/null 2>&1")
update_ruby_with_rbenv(version)
else
puts "No ruby manager installed. Please manually update to Ruby #{version}"
end
end
def update_ruby_with_rvm(version)
# Try to use the version or install and use
system("rvm use #{version}")
unless ruby_version_up_to_date?(version)
system("rvm install #{version}")
system("rvm use #{version}")
end
end
def update_ruby_with_rbenv(version)
puts 'rbenv updater not written.'
end
def ember_cli_rails_installed?
File.exists?('bin/heroku_install') && File.exists?('package.json')
end
def old_ember_setup
Dir.chdir('app-ember') do
system('npm install')
system('bower install')
end
end
|
[
1. Add task file of rake
2. You can run ./bin/rake task_one / ./bin/rake namespace:task_two etc
]
task :task_one do
puts "This is Task One!"
end
namespace :namespace do
desc "Task with namespace"
task :task_two do
puts "This is Task Two"
end
desc "The third task"
task :task_three => :task_one do
puts "This is Task Three"
end
desc "The fourth task"
task :task_four => [:task_one, :task_two] do
puts "This is Task Four"
end
desc "The fifth task"
task :task_five do
Rake::Task['namespace:task_three'].invoke
puts "This is task five!"
end
desc "The task related environment, tell Rake to load full the application environment"
task :task_six => :environment do
puts "This is task six"
end
end
task :all => [:task_one, 'namespace:task_two', 'namespace:task_three', 'namespace:task_four', 'namespace:task_five', 'namespace:task_six'] |
require "optparse"
module Teleport
# The main class for the teleport command line.
class Main
include Constants
include Util
TAR = "#{DIR}.tgz"
def initialize(cmd = :teleport)
cli(cmd)
case @options[:cmd]
when :teleport
$stderr = $stdout
teleport
when :install
$stderr = $stdout
install
when :infer
infer
end
end
# Parse ARGV.
def cli(cmd)
@options = { }
@options[:cmd] = cmd
@options[:file] = "Telfile"
opt = OptionParser.new do |o|
o.banner = "Usage: teleport <hostname>"
o.on("-f", "--file FILE", "use this file instead of Telfile") do |f|
@options[:file] = f
end
o.on("-i", "--infer", "infer a new Telfile from YOUR machine") do |f|
@options[:cmd] = :infer
end
o.on_tail("-h", "--help", "print this help text") do
puts opt
exit(0)
end
end
begin
opt.parse!
rescue OptionParser::InvalidOption, OptionParser::MissingArgument
puts $!
puts opt
exit(1)
end
if @options[:cmd] == :teleport
# print this error message early, to give the user a hint
# instead of complaining about command line arguments
if ARGV.length != 1
puts opt
exit(1)
end
@options[:host] = ARGV.shift
end
end
# Read Telfile
def read_config
if !File.exists?(@options[:file])
fatal("Sadly, I can't find #{@options[:file]} here. Please create one.")
end
@config = Config.new(@options[:file])
end
# Assemble the the tgz before we teleport to the host
def assemble_tgz
banner "Assembling #{TAR}..."
rm_and_mkdir(DIR)
# gem
run("cp", ["-r", "#{File.dirname(__FILE__)}/../../lib", GEM])
# Telfile, if necessary
if @options[:file] != "Telfile"
run("cp", [@options[:file], "Telfile"])
end
# data
run("cp", ["-r", ".", DATA])
# config.sh
File.open("#{DIR}/config", "w") do |f|
f.puts("CONFIG_HOST='#{@options[:host]}'")
f.puts("CONFIG_RUBY='#{@config.ruby}'")
f.puts("CONFIG_RUBYGEMS='#{RUBYGEMS}'")
end
# keys
ssh_key = "#{ENV["HOME"]}/.ssh/#{PUBKEY}"
if File.exists?(ssh_key)
run("cp", [ssh_key, DIR])
end
Dir.chdir(File.dirname(DIR)) do
run("tar", ["cfpz", TAR, File.basename(DIR)])
end
end
# Copy the tgz to the host, then run there.
def ssh_tgz
begin
banner "scp #{TAR} to #{@options[:host]}:#{TAR}..."
args = []
args += @config.ssh_options if @config.ssh_options
args << TAR
args << "#{@options[:host]}:#{TAR}"
run("scp", args)
cmd = [
"cd /tmp",
"(sudo -n echo gub > /dev/null 2> /dev/null || (echo `whoami` could not sudo. && exit 1))",
"sudo rm -rf #{DIR}",
"sudo tar xfpz #{TAR}",
"sudo #{DIR}/gem/teleport/run.sh"
]
banner "ssh to #{@options[:host]} and run..."
args = []
args += @config.ssh_options if @config.ssh_options
args << @options[:host]
args << cmd.join(" && ")
run("ssh", args)
rescue RunError
fatal("Failed!")
end
banner "Success!"
end
# Teleport to the host.
def teleport
read_config
assemble_tgz
ssh_tgz
end
# We're running on the host - install!
def install
Dir.chdir(DATA) do
read_config
end
Install.new(@config)
end
# try to infer a new Telfile based on the current machine
def infer
Infer.new
end
end
end
don't assemble all files - just the ones we need
require "optparse"
module Teleport
# The main class for the teleport command line.
class Main
include Constants
include Util
TAR = "#{DIR}.tgz"
def initialize(cmd = :teleport)
cli(cmd)
case @options[:cmd]
when :teleport
$stderr = $stdout
teleport
when :install
$stderr = $stdout
install
when :infer
infer
end
end
# Parse ARGV.
def cli(cmd)
@options = { }
@options[:cmd] = cmd
opt = OptionParser.new do |o|
o.banner = "Usage: teleport <hostname>"
o.on("-i", "--infer", "infer a new Telfile from YOUR machine") do |f|
@options[:cmd] = :infer
end
o.on_tail("-h", "--help", "print this help text") do
puts opt
exit(0)
end
end
begin
opt.parse!
rescue OptionParser::InvalidOption, OptionParser::MissingArgument
puts $!
puts opt
exit(1)
end
if @options[:cmd] == :teleport
# print this error message early, to give the user a hint
# instead of complaining about command line arguments
if ARGV.length != 1
puts opt
exit(1)
end
@options[:host] = ARGV.shift
end
end
# Read Telfile
def read_config
if !File.exists?("Telfile")
fatal("Sadly, I can't find Telfile here. Please create one.")
end
@config = Config.new("Telfile")
end
# Assemble the the tgz before we teleport to the host
def assemble_tgz
banner "Assembling #{TAR}..."
rm_and_mkdir(DIR)
# gem
run("cp", ["-r", "#{File.dirname(__FILE__)}/../../lib", GEM])
# data
mkdir(DATA)
copy = []
copy << "Telfile"
copy += Dir["files*"]
copy.sort.each { |i| run("cp", ["-r", i, DATA]) }
# config.sh
File.open("#{DIR}/config", "w") do |f|
f.puts("CONFIG_HOST='#{@options[:host]}'")
f.puts("CONFIG_RUBY='#{@config.ruby}'")
f.puts("CONFIG_RUBYGEMS='#{RUBYGEMS}'")
end
# keys
ssh_key = "#{ENV["HOME"]}/.ssh/#{PUBKEY}"
if File.exists?(ssh_key)
run("cp", [ssh_key, DIR])
end
Dir.chdir(File.dirname(DIR)) do
run("tar", ["cfpz", TAR, File.basename(DIR)])
end
end
# Copy the tgz to the host, then run there.
def ssh_tgz
begin
banner "scp #{TAR} to #{@options[:host]}:#{TAR}..."
args = []
args += @config.ssh_options if @config.ssh_options
args << TAR
args << "#{@options[:host]}:#{TAR}"
run("scp", args)
cmd = [
"cd /tmp",
"(sudo -n echo gub > /dev/null 2> /dev/null || (echo `whoami` could not sudo. && exit 1))",
"sudo rm -rf #{DIR}",
"sudo tar xfpz #{TAR}",
"sudo #{DIR}/gem/teleport/run.sh"
]
banner "ssh to #{@options[:host]} and run..."
args = []
args += @config.ssh_options if @config.ssh_options
args << @options[:host]
args << cmd.join(" && ")
run("ssh", args)
rescue RunError
fatal("Failed!")
end
banner "Success!"
end
# Teleport to the host.
def teleport
read_config
assemble_tgz
ssh_tgz
end
# We're running on the host - install!
def install
Dir.chdir(DATA) do
read_config
end
Install.new(@config)
end
# try to infer a new Telfile based on the current machine
def infer
Infer.new
end
end
end
|
module Telos
VERSION = "0.0.1"
end
Bump patch version
module Telos
VERSION = "0.0.2"
end
|
module Termpic
class Image
def initialize(path)
@image = Magick::ImageList.new(path)
end
def draw
cols = @image.columns
rows = @image.rows
ret = []
rows.times do |y|
cols.times do |x|
ret[y] ||= []
pixcel = @image.pixel_color(x, y)
r = pixcel.red / 256
g = pixcel.green / 256
b = pixcel.blue / 256
ret[y] << " ".background(r, g, b)
end
end
ret.each do |row|
puts row.join
end
end
end
end
extract methods
module Termpic
class Image
def initialize(path)
@image = Magick::ImageList.new(path)
end
def draw
rgb_analyze
ansi_analyze
puts_ansi
end
def rgb_analyze
@rgb = []
cols = @image.columns
rows = @image.rows
rows.times do |y|
cols.times do |x|
@rgb[y] ||= []
pixcel = @image.pixel_color(x, y)
r = pixcel.red / 256
g = pixcel.green / 256
b = pixcel.blue / 256
@rgb[y] << [r, g, b]
end
end
end
def ansi_analyze
raise "use rgb_analyze before ansi_analyze" unless @rgb
ret = []
@rgb.map! do |row|
ret << row.map{|pixcel|
r, g, b = pixcel
" ".background(r, g, b)
}.join
end
@ansi = ret.join("\n")
end
def puts_ansi
raise "use ansi_analyze before to_ansi" unless @ansi
puts @ansi
end
end
end
|
class Hash
class << self
# map any Enumerable into a Hash, like Hash[obj.map ... ]
def map(obj, &block)
Hash[
obj.map do |*args|
block.call *args
end.compact
]
end
end # class << self
# expand update() to accept multiple arguments
# eg. {}.update({a: 1}, {b: 2})
def update(*hashes)
clone.update! *hashes
end
def update!(*hashes)
hashes.each do |h|
h.each {|k,v| self[k] = v}
end
self
end
def except(*keys)
clone.except! *keys
end
def except!(*keys)
keys.each { |key| delete(key) }
self
end
def select_keys(*keys)
if keys.length == 1 and keys.first.class < Enumerable
keys = keys.first
end
Hash.map keys do |k|
[ k, self[k] ]
end
end
def select_keys!(*keys)
replace select_keys *keys
end
def compact(modifier = nil)
falsy = modifier == :falsy
blanks = falsy || modifier == :blanks
reject do |k, v|
isblank = blanks && v.respond_to?(:empty?) && v.empty?
isfalsy = falsy && (v == 0)
!v || isblank || isfalsy
end
end
def compact!(modifier = nil)
replace compact(modifier)
end
# map the block's results back to a hash
def hmap(&block)
Hash[ map {|k, v| block.call(k, v) }.compact ]
end
def hmap!(&block)
replace hmap &block
end
# map keys, but preserve associated values
# ie. http://apidock.com/rails/v4.2.7/Hash/transform_keys
def kmap(&block)
Hash[map do |k, v|
[ block.arity == 1 ? block.call(k) : block.call(k, v), v ]
end]
end
def kmap!(&block)
replace kmap &block
end
# map values, but preserve associated keys
# ie. http://apidock.com/rails/v4.2.7/Hash/transform_values
def vmap(&block)
Hash[map do |k, v|
[ k, block.arity == 1 ? block.call(v) : block.call(k, v) ]
end]
end
def vmap!(&block)
replace vmap &block
end
# sort by key values, for pretty printing
def ksort(&block)
Hash[
sort_by do |k, v|
if block
yield k
else
k
end
end
]
end
def ksort!(&block)
replace ksort &block
end
def vsort(&block)
Hash[
sort_by do |k, v|
if block
yield v
else
v
end
end
]
end
def vsort!(&block)
replace vsort &block
end
# set like operator
def -(other)
raise TypeError unless other.class <= Hash
select {|k,v| !other.has_key? k}
end
private
# replace contents of hash with new stuff
def replace(hash)
clear
merge! hash
end
end
cleanup
class Hash
class << self
# map any Enumerable into a Hash, like Hash[obj.map ... ]
def map(obj, &block)
Hash[
obj.map do |*args|
block.call *args
end.compact
]
end
end # class << self
# expand update() to accept multiple arguments
# eg. {}.update({a: 1}, {b: 2})
def update(*hashes)
clone.update! *hashes
end
def update!(*hashes)
hashes.each do |h|
h.each {|k,v| self[k] = v}
end
self
end
def except(*keys)
clone.except! *keys
end
def except!(*keys)
keys.each { |key| delete(key) }
self
end
def select_keys(*keys)
if keys.length == 1 and keys.first.class < Enumerable
keys = keys.first
end
Hash.map keys do |k|
[ k, self[k] ]
end
end
def select_keys!(*keys)
replace select_keys *keys
end
def compact(modifier = nil)
falsy = modifier == :falsy
blanks = falsy || modifier == :blanks
reject do |k, v|
isblank = blanks && v.respond_to?(:empty?) && v.empty?
isfalsy = falsy && (v == 0)
!v || isblank || isfalsy
end
end
def compact!(modifier = nil)
replace compact(modifier)
end
# map the block's results back to a hash
def hmap(&block)
Hash[ map {|k, v| block.call(k, v) }.compact ]
end
def hmap!(&block)
replace hmap &block
end
# map keys, but preserve associated values
# ie. http://apidock.com/rails/v4.2.7/Hash/transform_keys
def kmap(&block)
Hash[map do |k, v|
[ block.arity == 1 ? block.call(k) : block.call(k, v), v ]
end]
end
def kmap!(&block)
replace kmap &block
end
# map values, but preserve associated keys
# ie. http://apidock.com/rails/v4.2.7/Hash/transform_values
def vmap(&block)
clone.vmap! &block
end
def vmap!(&block)
each do |k, v|
self[k] = block.arity == 1 ? block.call(v) : block.call(k, v)
end
end
# sort by key values, for pretty printing
def ksort(&block)
Hash[
sort_by do |k, v|
if block
yield k
else
k
end
end
]
end
def ksort!(&block)
replace ksort &block
end
def vsort(&block)
Hash[
sort_by do |k, v|
if block
yield v
else
v
end
end
]
end
def vsort!(&block)
replace vsort &block
end
# set like operator
def -(other)
raise TypeError unless other.class <= Hash
select {|k,v| !other.has_key? k}
end
private
# replace contents of hash with new stuff
def replace(hash)
clear
merge! hash
end
end
|
module Three
VERSION = "1.0.1"
end
Bump the version.
module Three
VERSION = "1.1.0"
end
|
if ENV["ANCIENT_THRIFT"]
$LOAD_PATH.unshift("/Users/eweaver/p/twitter/rails/vendor/gems/thrift-751142/lib")
$LOAD_PATH.unshift("/Users/eweaver/p/twitter/rails/vendor/gems/thrift-751142/ext")
require 'thrift'
else
require 'rubygems'
require 'thrift'
end
require 'rubygems'
require 'thrift_client/thrift'
class ThriftClient
class NoServersAvailable < StandardError; end
DEFAULTS = {
:protocol => Thrift::BinaryProtocol,
:protocol_extra_params => [],
:transport => Thrift::FramedTransport,
:randomize_server_list => true,
:exception_classes => [
IOError,
Thrift::Exception,
Thrift::ProtocolException,
Thrift::ApplicationException,
Thrift::TransportException,
NoServersAvailable],
:raise => true,
:retries => nil,
:server_retry_period => 1,
:server_max_requests => nil,
:timeout => 1,
:timeout_overrides => {},
:defaults => {}
}.freeze
attr_reader :client, :client_class, :server_list, :options
=begin rdoc
Create a new ThriftClient instance. Accepts an internal Thrift client class (such as CassandraRb::Client), a list of servers with ports, and optional parameters.
Valid optional parameters are:
<tt>:protocol</tt>:: Which Thrift protocol to use. Defaults to <tt>Thrift::BinaryProtocol</tt>.
<tt>:protocol_extra_params</tt>:: An array of additional parameters to pass to the protocol initialization call. Defaults to <tt>[]</tt>.
<tt>:transport</tt>:: Which Thrift transport to use. Defaults to <tt>Thrift::FramedTransport</tt>.
<tt>:randomize_server_list</tt>:: Whether to connect to the servers randomly, instead of in order. Defaults to <tt>true</tt>.
<tt>:raise</tt>:: Whether to reraise errors if no responsive servers are found. Defaults to <tt>true</tt>.
<tt>:retries</tt>:: How many times to retry a request. Defaults to the number of servers defined.
<tt>:server_retry_period</tt>:: How many seconds to wait before trying to reconnect after marking all servers as down. Defaults to <tt>1</tt>. Set to <tt>nil</tt> to retry endlessly.
<tt>:server_max_requests</tt>:: How many requests to perform before moving on to the next server in the pool, regardless of error status. Defaults to <tt>nil</tt> (no limit).
<tt>:timeout</tt>:: Specify the default timeout for every call. Defaults to <tt>.
<tt>:timeout_overrides</tt>:: Specify timeouts on a per-method basis. Only work with <tt>Thrift::BufferedTransport</tt>.
<tt>:defaults</tt>:: Specify default values to return on a per-method basis, if <tt>:raise</tt> is set to false.
=end rdoc
def initialize(client_class, servers, options = {})
@options = DEFAULTS.merge(options)
@client_class = client_class
@server_list = Array(servers)
@retries = options[:retries] || @server_list.size
@server_list = @server_list.sort_by { rand } if @options[:randomize_server_list]
if @options[:timeout_overrides].any?
if @options[:transport].instance_methods.include?("timeout=")
@set_timeout = true
else
warn "ThriftClient: Timeout overrides have no effect with with transport type #{@options[:transport]}"
end
end
@live_server_list = @server_list.dup
@last_retry = Time.now
@request_count = 0
@max_requests = @options[:server_max_requests]
@client_class.instance_methods.each do |method_name|
if method_name =~ /^recv_(.*)$/
instance_eval("def #{$1}(*args); proxy(:'#{$1}', *args); end")
end
end
end
# Force the client to connect to the server.
def connect!
server = next_server.to_s.split(":")
raise ArgumentError, 'Servers must be in the form "host:port"' if server.size != 2
@transport = @options[:transport].new(
Thrift::Socket.new(server.first, server.last.to_i, @options[:timeout]))
@transport.open
@client = @client_class.new(@options[:protocol].new(@transport, *@options[:protocol_extra_params]))
rescue Thrift::TransportException
retry
end
# Force the client to disconnect from the server.
def disconnect!
@transport.close rescue nil
@request_count = 0
@client = nil
end
private
def proxy(method_name, *args)
disconnect! if @max_requests and @request_count >= @max_requests
connect! unless @client
set_timeout!(method_name) if @set_timeout
@request_count += 1
@client.send(method_name, *args)
rescue NoServersAvailable => e
handle_exception(e, method_name, args)
rescue *@options[:exception_classes] => e
tries ||= @retries
if (tries -= 1) == 0
handle_exception(e, method_name, args)
else
disconnect!
retry
end
end
def set_timeout!(method_name)
@client.timeout = @options[:timeout_overrides][method_name.to_sym] || @options[:timeout]
end
def handle_exception(e, method_name, args)
raise e if @options[:raise]
@options[:defaults][method_name.to_sym]
end
def next_server
if @live_server_list.empty?
if @options[:server_retry_period] and Time.now < @last_retry + @options[:server_retry_period]
raise NoServersAvailable, "No live servers in #{@server_list.inspect} since #{@last_retry.inspect}."
end
@last_retry = Time.now
@live_server_list = @server_list.dup
end
@live_server_list.pop
end
end
Docs.
if ENV["ANCIENT_THRIFT"]
$LOAD_PATH.unshift("/Users/eweaver/p/twitter/rails/vendor/gems/thrift-751142/lib")
$LOAD_PATH.unshift("/Users/eweaver/p/twitter/rails/vendor/gems/thrift-751142/ext")
require 'thrift'
else
require 'rubygems'
require 'thrift'
end
require 'rubygems'
require 'thrift_client/thrift'
class ThriftClient
class NoServersAvailable < StandardError; end
DEFAULTS = {
:protocol => Thrift::BinaryProtocol,
:protocol_extra_params => [],
:transport => Thrift::FramedTransport,
:randomize_server_list => true,
:exception_classes => [
IOError,
Thrift::Exception,
Thrift::ProtocolException,
Thrift::ApplicationException,
Thrift::TransportException,
NoServersAvailable],
:raise => true,
:retries => nil,
:server_retry_period => 1,
:server_max_requests => nil,
:timeout => 1,
:timeout_overrides => {},
:defaults => {}
}.freeze
attr_reader :client, :client_class, :server_list, :options
=begin rdoc
Create a new ThriftClient instance. Accepts an internal Thrift client class (such as CassandraRb::Client), a list of servers with ports, and optional parameters.
Valid optional parameters are:
<tt>:protocol</tt>:: Which Thrift protocol to use. Defaults to <tt>Thrift::BinaryProtocol</tt>.
<tt>:protocol_extra_params</tt>:: An array of additional parameters to pass to the protocol initialization call. Defaults to <tt>[]</tt>.
<tt>:transport</tt>:: Which Thrift transport to use. Defaults to <tt>Thrift::FramedTransport</tt>.
<tt>:randomize_server_list</tt>:: Whether to connect to the servers randomly, instead of in order. Defaults to <tt>true</tt>.
<tt>:raise</tt>:: Whether to reraise errors if no responsive servers are found. Defaults to <tt>true</tt>.
<tt>:retries</tt>:: How many times to retry a request. Defaults to the number of servers defined.
<tt>:server_retry_period</tt>:: How many seconds to wait before trying to reconnect after marking all servers as down. Defaults to <tt>1</tt>. Set to <tt>nil</tt> to retry endlessly.
<tt>:server_max_requests</tt>:: How many requests to perform before moving on to the next server in the pool, regardless of error status. Defaults to <tt>nil</tt> (no limit).
<tt>:timeout</tt>:: Specify the default timeout in seconds. Defaults to <tt>1</tt>.
<tt>:timeout_overrides</tt>:: Specify additional timeouts on a per-method basis, in seconds. Only works with <tt>Thrift::BufferedTransport</tt>.
<tt>:defaults</tt>:: Specify default values to return on a per-method basis, if <tt>:raise</tt> is set to false.
=end rdoc
def initialize(client_class, servers, options = {})
@options = DEFAULTS.merge(options)
@client_class = client_class
@server_list = Array(servers)
@retries = options[:retries] || @server_list.size
@server_list = @server_list.sort_by { rand } if @options[:randomize_server_list]
if @options[:timeout_overrides].any?
if @options[:transport].instance_methods.include?("timeout=")
@set_timeout = true
else
warn "ThriftClient: Timeout overrides have no effect with with transport type #{@options[:transport]}"
end
end
@live_server_list = @server_list.dup
@last_retry = Time.now
@request_count = 0
@max_requests = @options[:server_max_requests]
@client_class.instance_methods.each do |method_name|
if method_name =~ /^recv_(.*)$/
instance_eval("def #{$1}(*args); proxy(:'#{$1}', *args); end")
end
end
end
# Force the client to connect to the server.
def connect!
server = next_server.to_s.split(":")
raise ArgumentError, 'Servers must be in the form "host:port"' if server.size != 2
@transport = @options[:transport].new(
Thrift::Socket.new(server.first, server.last.to_i, @options[:timeout]))
@transport.open
@client = @client_class.new(@options[:protocol].new(@transport, *@options[:protocol_extra_params]))
rescue Thrift::TransportException
retry
end
# Force the client to disconnect from the server.
def disconnect!
@transport.close rescue nil
@request_count = 0
@client = nil
end
private
def proxy(method_name, *args)
disconnect! if @max_requests and @request_count >= @max_requests
connect! unless @client
set_timeout!(method_name) if @set_timeout
@request_count += 1
@client.send(method_name, *args)
rescue NoServersAvailable => e
handle_exception(e, method_name, args)
rescue *@options[:exception_classes] => e
tries ||= @retries
if (tries -= 1) == 0
handle_exception(e, method_name, args)
else
disconnect!
retry
end
end
def set_timeout!(method_name)
@client.timeout = @options[:timeout_overrides][method_name.to_sym] || @options[:timeout]
end
def handle_exception(e, method_name, args)
raise e if @options[:raise]
@options[:defaults][method_name.to_sym]
end
def next_server
if @live_server_list.empty?
if @options[:server_retry_period] and Time.now < @last_retry + @options[:server_retry_period]
raise NoServersAvailable, "No live servers in #{@server_list.inspect} since #{@last_retry.inspect}."
end
@last_retry = Time.now
@live_server_list = @server_list.dup
end
@live_server_list.pop
end
end
|
require 'securerandom'
#
# Supporting class for Managing Tokens, i.e. Authentication Tokens for REST API, etc.
#
class TokenManager
RESTRICTED_OPTIONS = [:expires_on]
DEFAULT_NS = "default"
def initialize(namespace = DEFAULT_NS, options = {})
@namespace = namespace
@options = {:token_ttl => 10.minutes}.merge(options)
end
def gen_token(token_options = {})
token = SecureRandom.hex(16)
ttl = token_options.delete(:token_ttl_override) || @options[:token_ttl]
token_data = {:token_ttl => ttl, :expires_on => Time.now.utc + ttl}
token_store.write(token,
token_data.merge!(prune_token_options(token_options)),
:expires_in => @options[:token_ttl])
token
end
def reset_token(token)
token_data = token_store.read(token)
return {} if token_data.nil?
ttl = token_data[:token_ttl]
token_data[:expires_on] = Time.now.utc + ttl
token_store.write(token,
token_data,
:expires_in => ttl)
end
def token_get_info(token, what = nil)
return {} unless token_valid?(token)
what.nil? ? token_store.read(token) : token_store.read(token)[what]
end
def token_valid?(token)
!token_store.read(token).nil?
end
def invalidate_token(token)
token_store.delete(token)
end
def token_ttl
@options[:token_ttl]
end
private
def token_store
TokenStore.acquire(@namespace, @options[:token_ttl])
end
def prune_token_options(token_options = {})
token_options.except(*RESTRICTED_OPTIONS)
end
end
Use the wrapped #token_ttl instead of going through @options
require 'securerandom'
#
# Supporting class for Managing Tokens, i.e. Authentication Tokens for REST API, etc.
#
class TokenManager
RESTRICTED_OPTIONS = [:expires_on]
DEFAULT_NS = "default"
def initialize(namespace = DEFAULT_NS, options = {})
@namespace = namespace
@options = {:token_ttl => 10.minutes}.merge(options)
end
def gen_token(token_options = {})
token = SecureRandom.hex(16)
ttl = token_options.delete(:token_ttl_override) || token_ttl
token_data = {:token_ttl => ttl, :expires_on => Time.now.utc + ttl}
token_store.write(token,
token_data.merge!(prune_token_options(token_options)),
:expires_in => token_ttl)
token
end
def reset_token(token)
token_data = token_store.read(token)
return {} if token_data.nil?
ttl = token_data[:token_ttl]
token_data[:expires_on] = Time.now.utc + ttl
token_store.write(token,
token_data,
:expires_in => ttl)
end
def token_get_info(token, what = nil)
return {} unless token_valid?(token)
what.nil? ? token_store.read(token) : token_store.read(token)[what]
end
def token_valid?(token)
!token_store.read(token).nil?
end
def invalidate_token(token)
token_store.delete(token)
end
def token_ttl
@options[:token_ttl]
end
private
def token_store
TokenStore.acquire(@namespace, token_ttl)
end
def prune_token_options(token_options = {})
token_options.except(*RESTRICTED_OPTIONS)
end
end
|
class TokFile::Eqdsk
attr_accessor :nrbox, :nzbox
#attr_accessor :rboxlen, :zboxlen, :r0exp, :rboxlft
DATANAMES = [
:rboxlen, :zboxlen, :r0exp, :rboxlft, :dummy,
:raxis, :zaxis, :psiaxis, :dummy, :b0exp,
:current, :dummy, :dummy, :dummy, :dummy,
:dummy, :dummy, :dummy, :dummy, :dummy,
:t,
:pr,
:ttprime,
:pprime,
:psi,
:q,
:nbound, :nlimiter,
:bound,
:limiter
]
attr_accessor :rbound, :zbound, :rlimiter, :zlimiter
require 'scanf'
def read(line)
#@lines_columns[i] = @lines[i].split(/\s+|(?<=\d)[+-]/).map{|s| eval(s)}
line.sub(/\A\s+/, '').sub(/\s+\Z/, '').split(/\s+|(?<=\d)(?=[+-])/).map{|s| eval(s)}
#line.scan(/.{15}/).map{|s| eval(s)}
#ep ['line', line]
#arr = []
#res = line.scanf("%16.9E"){|n| arr.push n}
#line.scanf("%E"){|n| arr.push n[0]}
#ep ['res', res, arr]
#ep ['res', arr]
#arr
end
def convert(format)
case format
when 'eqdsk'
self.dup
else
raise "Conversion from eqdsk to #{format} not implemented yet"
end
end
def initialize(file)
iline = 0
counter = 0
#filehandle = File.open(file, 'r')
lines = File.read(file).split("\n")
#@datastarts = DATANAMES.inject({}){|h, name| h[name] = nil; h}
#line1 = filehandle.gets
#@nrbox, @nzbox = filehandle.gets.split(/\s+/).slice(-2..-1).map{|s| eval(s)}
@nrbox, @nzbox = lines[0].split(/\s+/).slice(-2..-1).map{|s| eval(s)}
#@rboxlen, @zboxlen, @r0exp, @rboxlft, dummy = read(filehandle.gets)
#@raxis, @zaxis, @psiaxis, dummy, @b0exp = read(filehandle.gets)
#@current, dummy, dummy, dummy, dummy = read(filehandle.gets)
#dummy, dummy, dummy, dummy, dummy = read(filehandle.gets)
array = []
i = 1
vb2 = TokFile.verbosity > 1
if vb2
eputs
eputs "Reading data from eqdsk file #{file}."
eputs
end
total_size = 0
DATANAMES.each do |name|
sz = size(name)
total_size += sz
#ep ['name', name, 'size', sz]
if vb2
Terminal.erewind(1)
eputs "#{name}(#{sz})#{Terminal::CLEAR_LINE}" if vb2
end
if array.size < sz
#array += read(filehandle.gets)
#array += read(lines[i])
#i+=1
begin
array += read(lines.slice(i...(i+=(sz.to_f/5.0).ceil)).join(' '))
rescue NoMethodError
$stderr.puts "Finished reading at #{name}"
break
end
#array += lines.slice(i...(i+=(sz.to_f/5.0).ceil)).join(' ')
#filehandle.gets.scanf("%e"){|scan| array.push scan[0]}
end
if array.size == sz
data = array
array = []
else
data = []
while data.size < sz
data.push array.shift
end
end
self.class.attr_accessor name
case name
when :psi
set(name, GSL::Matrix.alloc(*data.pieces(@nzbox)).transpose)
when :bound
data = data.pieces(@nbound).transpose
set(:rbound, data[0].to_gslv)
set(:zbound, data[1].to_gslv)
when :limiter
data = data.pieces(@nlimiter).transpose
set(:rlimiter, data[0].to_gslv)
set(:zlimiter, data[1].to_gslv)
else
case sz
when 1
#p [name, 'yy', data]
set(name, data[0])
else
#p [name,'xx', data.inspect[0..2000]]
#p [name,data.to_gslv.inspect[0..10]]
set(name, data.to_gslv)
end
end
end
@r = GSL::Vector.linspace(@rboxlft, @rboxlft+@rboxlen, @nrbox)
@z = GSL::Vector.linspace(-@zboxlen/2.0, @zboxlen/2.0, @nzbox)
if vb2
Terminal.erewind(1)
eputs "Read total data size of #{total_size.to_f * 8.0/1.0e6} MB"
end
if TokFile.verbosity > 0
eputs <<EOF
--------------------------------------
Tokfile
--------------------------------------
Successfully read an eqdsk file called
#{file}
with the following parameters:
nrbox = #@nrbox
nzbox = #@nzbox
nbound = #@nbound
raxis = #@raxis
--------------------------------------
EOF
end
#@lines = File.read(file).split("\n").map{|str| str.sub(/\A\s+/, '').sub(/\s+\Z/, '')}
#@nrbox, @nzbox = @lines[0].split(/\s+/).slice(-2..-1).map{|s| eval(s)}
#in_data = true; i = 1
#@atoms = []
#@lines_columns = []
#while in_data
#@lines_columns[i] = @lines[i].split(/\s+|(?<=\d)[+-]/).map{|s| eval(s)}
#@atoms += @lines_columns[i]
#if @nbound = @atoms[start(:nbound)]
#if @atoms.size > start(:limiter)
#in_data = false
#end
#end
#i+=1
#end
#[:t, :p, :pprime, :ttprime, :psi, :q, :rzbound].each do |var|
#attr_accessor var
#st = start(var)
#set(var, @atoms.slice(st...(st+@nrbox))).to_gslv
#end
#@rboxlen, @zboxlen, @r0exp, @rboxlft, dummy = @lines_columns[1]
#ep ['lines_columns', @lines_columns[1], @lines[1]]
if not send(:nbound)
$stderr.puts("Boundary missing: trying to reconstruct")
reconstruct_boundary
end
end
FMT="%16.9e" * 5 + "\n"
def write_file(filename)
File.open(filename, 'w') do |file|
printf(file, " EQDSK GEN.BY TOKFILE %d %d\n", @nrbox, @nzbox)
array = []
DATANAMES.each do |dname|
next if [:zlimiter, :nlimiter].include? dname
sz = size(dname)
data = sz==1 ? [send(dname)] : send(dname).to_a
case dname
when :psi
data = data.transpose.flatten
when :bound
data = [send(:rbound).to_a, send(:zbound).to_a].transpose.flatten
when :limiter
data = [send(:rlimiter).to_a, send(:zlimiter).to_a].transpose.flatten
end
unless dname==:nbound
array += data
end
#p [dname]
while array.size > 5
out = []
5.times{out.push(array.shift)}
printf(file, FMT, *out)
end
if [:nbound, :limiter].include? dname
# Clear buffer
printf(file, FMT, *array)
array = []
end
if dname == :nbound
printf(file, "%5d%5d\n", data[0], send(:nlimiter))
end
end
end
end
def reconstruct_boundary
require 'gsl_extras'
$stderr.puts "Reconstructing boundary"
contour = GSL::Contour.new(@r, @z, @psi)
#psikit = GraphKit.quick_create([@r, @z, @psi])
#psikit.data[0].gp.with = 'pm3d'
#psikit.gp.view = "map"
ck = contour.graphkit(0.0)
#psikit+= GraphKit.quick_create([ck.data[0].x.data, ck.data[0].y.data, ck.data[0].y.data.collect{0.0}])
#psikit.gnuplot
@nbound = @nlimiter = ck.data[0].x.data.size
@rbound = @rlimiter = ck.data[0].x.data
@zbound = @zlimiter = ck.data[0].y.data
end
def summary_graphkit
psivec = GSL::Vector.linspace(@psi.min, 0.0, @nrbox)
multkit = GraphKit::MultiWindow.new([:pr, :pprime, :t, :ttprime, :q].map{|name|
kit = GraphKit.quick_create([psivec, send(name)])
kit.title = name.to_s
kit.ylabel = nil
kit.xlabel = 'psi'
kit
})
psikit = GraphKit.quick_create([@r, @z, @psi])
psikit.data[0].gp.with = 'pm3d'
psikit.gp.view = "map"
boundkit = GraphKit.quick_create([@rbound, @zbound, @rbound.collect{0.0}])
psikit += boundkit
psikit.gp.key = "off"
psikit.title = 'psi'
psikit.xlabel = 'R'
psikit.ylabel = 'Z'
multkit.push psikit
multkit.gp.multiplot = "layout 2,3"
multkit
end
def size(var)
case var
#when :nbound, :nlimiter
#1
when :bound
@nbound * 2
when :limiter
@nlimiter * 2
when :psi
@nrbox * @nzbox
when :t, :pr, :pprime, :ttprime, :q
@nrbox
else
1
end
end
#def start(var)
#case var
#when :t
#20
#when :p
#start(:t) + @nrbox
#when :ttprime
#start(:p) + @nrbox
#when :pprime
#start(:ttprime) + @nrbox
#when :psi
#start(:pprime) + @nrbox
#when :q
#start(:psi) + @nrbox * @nzbox
#when :nbound
#start(:q) + @nrbox + 1
#when :rzbound
#start(:q) + @nrbox + 3
#when :limiter
#start(:rzbound) + @nbound * 2
#else
#raise "Start of #{var} unknown"
#end
#end
#def
#def get_int(line, col)
#end
end
--Fixed bug for missing data in eqdsk
class TokFile::Eqdsk
attr_accessor :nrbox, :nzbox
#attr_accessor :rboxlen, :zboxlen, :r0exp, :rboxlft
DATANAMES = [
:rboxlen, :zboxlen, :r0exp, :rboxlft, :dummy,
:raxis, :zaxis, :psiaxis, :dummy, :b0exp,
:current, :dummy, :dummy, :dummy, :dummy,
:dummy, :dummy, :dummy, :dummy, :dummy,
:t,
:pr,
:ttprime,
:pprime,
:psi,
:q,
:nbound, :nlimiter,
:bound,
:limiter
]
attr_accessor :rbound, :zbound, :rlimiter, :zlimiter, :nbound, :nlimiter, :bound, :limiter
require 'scanf'
def read(line)
#@lines_columns[i] = @lines[i].split(/\s+|(?<=\d)[+-]/).map{|s| eval(s)}
line.sub(/\A\s+/, '').sub(/\s+\Z/, '').split(/\s+|(?<=\d)(?=[+-])/).map{|s| eval(s)}
#line.scan(/.{15}/).map{|s| eval(s)}
#ep ['line', line]
#arr = []
#res = line.scanf("%16.9E"){|n| arr.push n}
#line.scanf("%E"){|n| arr.push n[0]}
#ep ['res', res, arr]
#ep ['res', arr]
#arr
end
def convert(format)
case format
when 'eqdsk'
self.dup
else
raise "Conversion from eqdsk to #{format} not implemented yet"
end
end
def initialize(file)
iline = 0
counter = 0
#filehandle = File.open(file, 'r')
lines = File.read(file).split("\n")
#@datastarts = DATANAMES.inject({}){|h, name| h[name] = nil; h}
#line1 = filehandle.gets
#@nrbox, @nzbox = filehandle.gets.split(/\s+/).slice(-2..-1).map{|s| eval(s)}
@nrbox, @nzbox = lines[0].split(/\s+/).slice(-2..-1).map{|s| eval(s)}
#@rboxlen, @zboxlen, @r0exp, @rboxlft, dummy = read(filehandle.gets)
#@raxis, @zaxis, @psiaxis, dummy, @b0exp = read(filehandle.gets)
#@current, dummy, dummy, dummy, dummy = read(filehandle.gets)
#dummy, dummy, dummy, dummy, dummy = read(filehandle.gets)
array = []
i = 1
vb2 = TokFile.verbosity > 1
if vb2
eputs
eputs "Reading data from eqdsk file #{file}."
eputs
end
total_size = 0
DATANAMES.each do |name|
sz = size(name)
total_size += sz
#ep ['name', name, 'size', sz]
if vb2
Terminal.erewind(1)
eputs "#{name}(#{sz})#{Terminal::CLEAR_LINE}" if vb2
end
if array.size < sz
#array += read(filehandle.gets)
#array += read(lines[i])
#i+=1
begin
array += read(lines.slice(i...(i+=(sz.to_f/5.0).ceil)).join(' '))
rescue NoMethodError
$stderr.puts "Finished reading at #{name}"
break
end
#array += lines.slice(i...(i+=(sz.to_f/5.0).ceil)).join(' ')
#filehandle.gets.scanf("%e"){|scan| array.push scan[0]}
end
if array.size == sz
data = array
array = []
else
data = []
while data.size < sz
data.push array.shift
end
end
self.class.attr_accessor name
case name
when :psi
set(name, GSL::Matrix.alloc(*data.pieces(@nzbox)).transpose)
when :bound
data = data.pieces(@nbound).transpose
set(:rbound, data[0].to_gslv)
set(:zbound, data[1].to_gslv)
when :limiter
data = data.pieces(@nlimiter).transpose
set(:rlimiter, data[0].to_gslv)
set(:zlimiter, data[1].to_gslv)
else
case sz
when 1
#p [name, 'yy', data]
set(name, data[0])
else
#p [name,'xx', data.inspect[0..2000]]
#p [name,data.to_gslv.inspect[0..10]]
set(name, data.to_gslv)
end
end
end
@r = GSL::Vector.linspace(@rboxlft, @rboxlft+@rboxlen, @nrbox)
@z = GSL::Vector.linspace(-@zboxlen/2.0, @zboxlen/2.0, @nzbox)
if vb2
Terminal.erewind(1)
eputs "Read total data size of #{total_size.to_f * 8.0/1.0e6} MB"
end
if TokFile.verbosity > 0
eputs <<EOF
--------------------------------------
Tokfile
--------------------------------------
Successfully read an eqdsk file called
#{file}
with the following parameters:
nrbox = #@nrbox
nzbox = #@nzbox
nbound = #@nbound
raxis = #@raxis
--------------------------------------
EOF
end
#@lines = File.read(file).split("\n").map{|str| str.sub(/\A\s+/, '').sub(/\s+\Z/, '')}
#@nrbox, @nzbox = @lines[0].split(/\s+/).slice(-2..-1).map{|s| eval(s)}
#in_data = true; i = 1
#@atoms = []
#@lines_columns = []
#while in_data
#@lines_columns[i] = @lines[i].split(/\s+|(?<=\d)[+-]/).map{|s| eval(s)}
#@atoms += @lines_columns[i]
#if @nbound = @atoms[start(:nbound)]
#if @atoms.size > start(:limiter)
#in_data = false
#end
#end
#i+=1
#end
#[:t, :p, :pprime, :ttprime, :psi, :q, :rzbound].each do |var|
#attr_accessor var
#st = start(var)
#set(var, @atoms.slice(st...(st+@nrbox))).to_gslv
#end
#@rboxlen, @zboxlen, @r0exp, @rboxlft, dummy = @lines_columns[1]
#ep ['lines_columns', @lines_columns[1], @lines[1]]
if not send(:nbound)
$stderr.puts("Boundary missing: trying to reconstruct")
reconstruct_boundary
end
end
FMT="%16.9e" * 5 + "\n"
def write_file(filename)
File.open(filename, 'w') do |file|
printf(file, " EQDSK GEN.BY TOKFILE %d %d\n", @nrbox, @nzbox)
array = []
DATANAMES.each do |dname|
next if [:zlimiter, :nlimiter].include? dname
sz = size(dname)
data = sz==1 ? [send(dname)] : send(dname).to_a
case dname
when :psi
data = data.transpose.flatten
when :bound
data = [send(:rbound).to_a, send(:zbound).to_a].transpose.flatten
when :limiter
data = [send(:rlimiter).to_a, send(:zlimiter).to_a].transpose.flatten
end
unless dname==:nbound
array += data
end
#p [dname]
while array.size > 5
out = []
5.times{out.push(array.shift)}
printf(file, FMT, *out)
end
if [:nbound, :limiter].include? dname
# Clear buffer
printf(file, "%16.9e"*array.size + "\n", *array)
array = []
end
if dname == :nbound
printf(file, "%5d%5d\n", data[0], send(:nlimiter))
end
end
end
end
def reconstruct_boundary
require 'gsl_extras'
$stderr.puts "Reconstructing boundary"
contour = GSL::Contour.new(@r, @z, @psi)
#psikit = GraphKit.quick_create([@r, @z, @psi])
#psikit.data[0].gp.with = 'pm3d'
#psikit.gp.view = "map"
ck = contour.graphkit(0.0)
#psikit+= GraphKit.quick_create([ck.data[0].x.data, ck.data[0].y.data, ck.data[0].y.data.collect{0.0}])
#psikit.gnuplot
@nbound = @nlimiter = ck.data[0].x.data.size
@rbound = @rlimiter = ck.data[0].x.data
@zbound = @zlimiter = ck.data[0].y.data
end
def summary_graphkit
psivec = GSL::Vector.linspace(@psi.min, 0.0, @nrbox)
multkit = GraphKit::MultiWindow.new([:pr, :pprime, :t, :ttprime, :q].map{|name|
kit = GraphKit.quick_create([psivec, send(name)])
kit.title = name.to_s
kit.ylabel = nil
kit.xlabel = 'psi'
kit
})
psikit = GraphKit.quick_create([@r, @z, @psi])
psikit.data[0].gp.with = 'pm3d'
psikit.gp.view = "map"
boundkit = GraphKit.quick_create([@rbound, @zbound, @rbound.collect{0.0}])
psikit += boundkit
psikit.gp.key = "off"
psikit.title = 'psi'
psikit.xlabel = 'R'
psikit.ylabel = 'Z'
multkit.push psikit
multkit.gp.multiplot = "layout 2,3"
multkit
end
def size(var)
case var
#when :nbound, :nlimiter
#1
when :bound
@nbound * 2
when :limiter
@nlimiter * 2
when :psi
@nrbox * @nzbox
when :t, :pr, :pprime, :ttprime, :q
@nrbox
else
1
end
end
#def start(var)
#case var
#when :t
#20
#when :p
#start(:t) + @nrbox
#when :ttprime
#start(:p) + @nrbox
#when :pprime
#start(:ttprime) + @nrbox
#when :psi
#start(:pprime) + @nrbox
#when :q
#start(:psi) + @nrbox * @nzbox
#when :nbound
#start(:q) + @nrbox + 1
#when :rzbound
#start(:q) + @nrbox + 3
#when :limiter
#start(:rzbound) + @nbound * 2
#else
#raise "Start of #{var} unknown"
#end
#end
#def
#def get_int(line, col)
#end
end
|
# Ruby wrapper around the Trello API
# Copyright (c) 2012, Jeremy Tregunna
# Use and distribution terms may be found in the file LICENSE included in this distribution.
require 'addressable/uri'
module Trello
class Client
class EnterYourPublicKey < StandardError; end
class EnterYourSecret < StandardError; end
class << self
attr_writer :public_key, :secret, :app_name
def query(path, options = { :method => :get, :params => {} })
uri = Addressable::URI.parse("https://api.trello.com#{path}")
uri.query_values = options[:params]
access_token.send(options[:method], uri.to_s)
end
protected
def consumer
raise EnterYourPublicKey if @public_key.to_s.empty?
raise EnterYourSecret if @secret.to_s.empty?
OAuth::Consumer.new(@public_key, @secret, :site => 'https://trello.com',
:request_token_path => '/1/OAuthGetRequestToken',
:authorize_path => '/1/OAuthAuthorizeToken',
:access_token_path => '/1/OAuthGetAccessToken',
:http_method => :get)
end
def access_token
return @access_token if @access_token
@access_token = OAuth::AccessToken.new(consumer)
end
end
end
end
adds get/post/put/delete helpers on the client
# Ruby wrapper around the Trello API
# Copyright (c) 2012, Jeremy Tregunna
# Use and distribution terms may be found in the file LICENSE included in this distribution.
require 'addressable/uri'
module Trello
class Client
class EnterYourPublicKey < StandardError; end
class EnterYourSecret < StandardError; end
class << self
attr_writer :public_key, :secret, :app_name
def query(path, options = { :method => :get, :params => {} })
uri = Addressable::URI.parse("https://api.trello.com#{path}")
uri.query_values = options[:params]
access_token.send(options[:method], uri.to_s)
end
%w{get post put delete}.each do |http_method|
send(:define_method, http_method) do |path, params = {}|
query(path, :method => http_method, :params => params)
end
end
protected
def consumer
raise EnterYourPublicKey if @public_key.to_s.empty?
raise EnterYourSecret if @secret.to_s.empty?
OAuth::Consumer.new(@public_key, @secret, :site => 'https://trello.com',
:request_token_path => '/1/OAuthGetRequestToken',
:authorize_path => '/1/OAuthAuthorizeToken',
:access_token_path => '/1/OAuthGetAccessToken',
:http_method => :get)
end
def access_token
return @access_token if @access_token
@access_token = OAuth::AccessToken.new(consumer)
end
end
end
end
|
%w(rubygems yaml socket json net/http uri).each { |lib| require lib }
#####
# This Ruby Script Emulates the Asterisk Gateway Interface (AGI)
# VERSION = '0.1.6'
#####
# If we are testing, then add some methods, $currentCall will be nil if a call did not start this session
if $currentCall.nil? && $destination.nil?
Object.class_eval do
def log(val)
val
end
def show(val)
log("====> #{val} <====")
end
end
end
# We patch the Hash class to symbolize our keys
class Hash
def symbolize_keys
inject({}) do |options, (key, value)|
options[(key.to_sym rescue key) || key] = value
options
end
end
def symbolize_keys!
self.replace(self.symbolize_keys)
end
end
#########
# @author Jason Goecke
class TropoAGItate
module Helpers
##
# Strips the quotes from a string
#
# @param [String] the string to remove the strings from
#
# @return [String] the string with the quotes removed
def strip_quotes(text)
text.chop! if text[text.length - 1] == 34
text.reverse!.chop!.reverse! if text[0] == 34
text
end
##
# Formats the output to the log for consistency
#
# @param [String] string to output to the log
# @return nil
def show(str)
log "====> #{str} <===="
end
##
# Provides the current method's name
#
# @return [String] the name of the current method
def this_method
caller[0]
# caller[0][/`([^']*)'/, 1]
end
end
include Helpers
class Commands
include Helpers
##
# Creates an instance of Command
#
# @param [Object] the currentCall object from Tropo Scripting
# @param [Hash] contains the configuration of the files available as Asterisk Sounds
#
# @return [Object] an instance of Command
def initialize(current_call, tropo_agi_config)
@current_call = current_call
@tropo_agi_config = tropo_agi_config
@agi_response = "200 result="
@tropo_voice = @tropo_agi_config['tropo']['voice']
@tropo_recognizer = @tropo_agi_config['tropo']['recognizer']
# Used to store user request values for SET/GET VARIABLE commands of Asterisk
# May also be passed in as a JSON string from the Tropo Session API
if $user_vars
@user_vars = JSON.parse $user_vars
else
@user_vars = {}
end
@asterisk_sound_files = asterisk_sound_files if @tropo_agi_config['asterisk']['sounds']['enabled']
end
##
# Initiates an answer to the Tropo call object based on an answer request via AGI
# AGI: http://www.voip-info.org/wiki/view/answer
# Tropo: https://www.tropo.com/docs/scripting/answer.htm
#
# @return [String] the response in AGI raw form
def answer
if @current_call.state == 'RINGING'
@current_call.answer
else
show "Warning - invalid call state to invoke an answer: #{@current_call.state.inspect}"
end
@agi_response + "0\n"
rescue => e
log_error(this_method, e)
end
##
# Initiates an ask to the Tropo call object
# Tropo: https://www.tropo.com/docs/scripting/ask.htm
#
# @param [Hash] the options to be used on the ask method
#
# @return [String] the response in AGI raw form
def ask(options={})
check_state
options[:args][:recognizer] = @tropo_recognizer if options[:args]['recognizer'].nil?
options[:args][:voice] = @tropo_voice if options[:args]['voice'].nil?
# Check for Asterisk sounds
asterisk_sound_url = fetch_asterisk_sound(options[:args]['prompt'])
if asterisk_sound_url
prompt = asterisk_sound_url
else
prompt = options[:args]['prompt']
end
response = @current_call.ask prompt, options[:args].symbolize_keys!
if response.value == 'NO_SPEECH' || response.value == 'NO_MATCH'
result = { :interpretation => response.value }
else
result = { :concept => response.choice.concept,
:confidence => response.choice.confidence,
:interpretation => response.choice.interpretation,
:tag => response.choice.tag }
end
@agi_response + result.to_json + "\n"
rescue => e
log_error(this_method, e)
end
##
# Sets the callerid and calleridname params in Tropo
#
# @param [Hash] the options to be used when setting callerid/calleridname
#
# @return [String] the response in AGI raw form
def callerid(options={})
@user_vars[options[:command].downcase] = options[:args][0]
@agi_response + "0\n"
rescue => e
log_error(this_method, e)
end
##
# Initiates a transfer on Tropo which corresopnds as a dial over AGI
# AGI: http://www.voip-info.org/wiki/view/Asterisk+cmd+Dial
# Tropo: https://www.tropo.com/docs/scripting/transfer.htm
#
# @param [Hash] the options used to place the dial
#
# @return [String] the response in AGI raw form
def dial(options={})
check_state
args = options.delete(:args) || {}
destinations = parse_destinations(args.shift.split('&'))
options = {}
# Copy the channel variables hash. We need to remove certain variables that
# cause problems if converted to JSON (specifically: anything with
# parenthesis in the name)
vars = @user_vars.clone
# Convert Asterisk app_dial inputs to Tropo syntax
options[:timeout] = args.shift.to_i if args.count
# TODO: We may want to provide some compatibility with Asterisk dial flags
# like m for MOH, A() to play announcement to called party,
# D() for post-dial DTMF, L() for call duration limits
#astflags = args.shift if args.count
options[:callerID] = vars.delete('CALLERID(num)') if vars.has_key?('CALLERID(num)')
options[:headers] = set_headers(vars)
show "Destination: #{destinations.inspect}, Options: #{options.inspect}"
result = @current_call.transfer destinations, options
# Map the Tropo result to the Asterisk DIALSTATUS channel variable
@user_vars['DIALSTATUS'] = case result.name.downcase
when 'transfer' then 'ANSWER'
when 'success' then 'ANSWER'
when 'timeout' then 'NOANSWER'
when 'error' then 'CONGESTION'
when 'callFailure' then 'CHANUNAVAIL'
else 'CONGESTION'
end
@agi_response + "0\n"
rescue => e
log_error(this_method, e)
end
##
# Play the given file
# AGI: http://www.voip-info.org/wiki/view/stream+file
#
# The enhanced capability won't work for now, since Adhearsion expects back ASCII single digits
# enhanced_input_example {
# postal_code = input 1, :play => { :prompt => 'Please enter your post code?',
# :timeout => 5,
# :choices => '[5 DIGITS]',
# :terminator => '#' }.to_json
#
# ahn_log.postal_code.debug postal_code
# play "You entered"
# say_digits postal_code
# }
#
# @param [Hash] the options used to play the file back
#
# @return [String] the response in AGI raw form
def file(options={})
check_state
@wait_for_digits_options = parse_input_string options[:args][0], 16
if @wait_for_digits_options.nil?
prompt, escape_digits = extract_prompt_and_escape_digits(options[:args][0])
asterisk_sound_url = fetch_asterisk_sound(prompt)
prompt = asterisk_sound_url if asterisk_sound_url
if escape_digits.nil?
@current_call.say prompt, :voice => @tropo_voice
result = @agi_response + "0 endpos=0\n"
else
# Timeout is set to 0 so we return immediately after playback
response = @current_call.ask prompt, { :choices => create_choices(escape_digits),
:choiceMode => 'keypad',
:timeout => 0 }
result = @agi_response + response.value[0].to_s + " endpos=0\n"
end
end
show "File response: #{response.inspect}"
result
rescue => e
log_error(this_method, e)
end
alias :streamfile :file
##
# Grabs all of the SIP headers off of the current session/call
# This is a work around until the $currentCall.getHeaderMap works, currently a bug in the Ruby shim
#
# @return [Hash] all of the SIP headers on the current session
def getheaders
hash = {}
# We are accessing the Java object directly, so we get a Java HashMap back
hash = hashmap_to_hash($incomingCall.getHeaderMap) if $incomingCall != 'nullCall'
hash.merge!({ :tropo_tag => $tropo_tag }) if $tropo_tag
hash
end
##
# Initiates a hangup to the Tropo call object
# AGI: http://www.voip-info.org/wiki/view/hangup
# Tropo: https://www.tropo.com/docs/scripting/hangup.htm
#
# @return [String] the response in AGI raw form
def hangup
@current_call.hangup
@agi_response + "1\n"
rescue => e
log_error(this_method, e)
end
##
# Initiates a conference
# AGI: http://www.voip-info.org/wiki/view/Asterisk+cmd+MeetMe
# Tropo: https://www.tropo.com/docs/scripting/conference.htm
#
# param [Hash] a hash of items
# @return [String] the response in AGI raw form
def meetme(options={})
check_state
options = options[:args][0].split('|')
@current_call.conference options[0].chop
@agi_response + "0\n"
rescue => e
log_error(this_method, e)
end
alias :conference :meetme
##
# Traps any unknown/unsupported commands and logs an error mesage to the Tropo debugger
#
# @param [Object] the arguments used to make the method call
#
# @return [String] the response in AGI raw form
def method_missing(method, *args)
show "Invalid or unknown command: #{method.inspect}"
return "510 result=Invalid or unknown Command\n"
end
##
# Initiates a recording of the call
# AGI:
# - http://www.voip-info.org/index.php?content_id=3134
# - http://www.voip-info.org/wiki/view/Asterisk+cmd+MixMonitor
# Tropo: https://www.tropo.com/docs/scripting/startcallrecording.htm
#
# @param [Hash] options used to build the startCallRecording
#
# @return [String] the response in AGI raw form
def monitor(options={})
check_state
@current_call.startCallRecording options[:args]['uri'], options[:args]
@agi_response + "0\n"
rescue => e
log_error(this_method, e)
end
alias :mixmonitor :monitor
alias :startcallrecording :monitor
##
# Initiates a playback to the Tropo call object for Speech Synthesis/TTS
# AGI: http://www.voip-info.org/index.php?content_id=3168
# Tropo: https://www.tropo.com/docs/scripting/say.htm
#
# @param [Hash] the options used for the Tropo say method
#
# @return [String] the response in AGI raw form
def playback(options={})
check_state
asterisk_sound_url = fetch_asterisk_sound(options[:args][0])
if asterisk_sound_url
text = asterisk_sound_url
else
text = options[:args][0]
end
@current_call.say text, :voice => @tropo_voice
@agi_response + "0\n"
rescue => e
log_error(this_method, e)
end
alias :saynumber :playback
alias :say :playback
##
# Reads a #-terminated string of digits a certain number of times from the user in to the given variable.
# AGI: https://wiki.asterisk.org/wiki/display/AST/Application_Read
# Tropo: https://www.tropo.com/docs/scripting/ask.htm
#
# @param [Hash] the options used for the Tropo ask method
#
# @return [String] the response in the AGI raw form
def read(options={})
check_state
# Check to see if the READ arguments were sent in quotes, like from Asterisk-Java
options[:args] = options[:args][0].split(',', -4) if options[:args].length == 1
# Set defaults
prompt, choices, attempts, timeout = 'silence', '[1-255 DIGITS]', 1, 30
# Set the prompt
prompt = options[:args][1] if options[:args][1] != ""
asterisk_sound_url = fetch_asterisk_sound(prompt)
prompt = asterisk_sound_url if asterisk_sound_url
# Set other values if provided
choices = "[1-#{options[:args][2]} DIGITS]" unless options[:args][2].nil? || options[:args][2].empty?
attempts = options[:args][4] unless options[:args][4].nil? || options[:args][4].empty?
timeout = options[:args][5].to_f unless options[:args][5].nil? || options[:args][5].empty?
response = nil
attempts.to_i.times do
response = @current_call.ask prompt, { :choices => choices,
:choiceMode => 'keypad',
:terminator => '#',
:timeout => timeout }
break if response.value
end
# Set the variable the user has specified for the value to insert into
@user_vars[options[:args][0].downcase] = response.value
@agi_response + "0\n"
end
##
# Used to change the voice being used for speech recognition/ASR
#
# @param [Hash] options used set the recognizer
#
# @return [String] the response in AGI raw form
def recognizer(options={})
if options[:args][0] == 'default'
@tropo_recognizer = @tropo_agi_config['tropo']['recognizer']
else
@tropo_recognizer = options[:args][0]
end
@agi_response + "0\n"
end
##
# Records a user input
# AGI: http://www.voip-info.org/index.php?content_id=3176
# Tropo: https://www.tropo.com/docs/scripting/record.htm
#
# @param [Hash] the options used for the record
#
# @return [String] the response in AGI raw form
def record(options={})
check_state
options = options[:args][0].split
silence_timeout = strip_quotes(options[options.length - 1]).split('=')
beep = true if strip_quotes(options[5]) == 'BEEP'
options = { :recordURI => strip_quotes(options[0]),
:silenceTimeout => silence_timeout[1].to_i / 1000,
:maxTime => strip_quotes(options[3]).to_i,
:recordFormat => strip_quotes(options[1]),
:terminator => strip_quotes(options[2]),
:beep => beep }
ssml =
@current_call.record '<speak> </speak>', options
@agi_response + "0\n"
rescue => e
log_error(this_method, e)
end
##
# Provides the ability to redirect a call after it is answered
# Tropo: https://www.tropo.com/docs/scripting/redirect.htm
#
# @return [String] the response in AGI raw form
def redirect(options={})
check_state
@current_call.redirect options[:args][0]
@agi_response + "0\n"
rescue => e
log_error(this_method, e)
end
##
# Provides the ability to reject a call before it is answered
# Tropo: https://www.tropo.com/docs/scripting/reject.htm
#
# @return [String] the response in AGI raw form
def reject
@current_call.reject
@agi_response + "0\n"
rescue => e
log_error(this_method, e)
end
##
# Provides a RAW say capability
# Tropo: https://www.tropo.com/docs/scripting/say.htm
#
# @param [Hash] options used to build the say
#
# @return [String] the response in AGI raw form
def say(options={})
check_state
@current_call.say options[:args]['prompt'], options[:args]
@agi_response + "0\n"
rescue => e
log_error(this_method, e)
end
##
# Plays back digits using SSML
# AGI: http://www.voip-info.org/index.php?content_id=3182
# Tropo: https://www.tropo.com/docs/scripting/say.htm
#
# @param [Hash] options used to build the say
# @return [String] the response in AGI raw form
def saydigits(options={})
check_state
ssml = "<speak><say-as interpret-as='vxml:digits'>#{options[:args][0]}</say-as></speak>"
@current_call.say ssml, :voice => @tropo_voice
@agi_response + "0\n"
rescue => e
log_error(this_method, e)
end
##
# Speaks back individual characters in a string
# AGI: http://www.voip-info.org/wiki/index.php?page=Asterisk+cmd+SayPhonetic
# Tropo: https://www.tropo.com/docs/scripting/say.htm
#
# @param [Hash] options used to build the say
#
# @return [String] the response in AGI raw form
def sayphonetic(options={})
check_state
text = ''
options[:args][0].split(//).each do |char|
text = text + char + ' '
end
@current_call.say text, :voice => TROPO_VOICE
@agi_response + "0\n"
rescue => e
log_error(this_method, e)
end
##
# ====> TO BE IMPLEMENTED <====
#
# Speaks back the time
# AGI:
# Tropo: https://www.tropo.com/docs/scripting/say.htm
#
# @param [Hash] options used to build the say
#
# @return [String] the response in AGI raw form
def saytime(options={})
check_state
@agi_response + "0\n"
rescue => e
log_error(this_method, e)
end
##
# Plays DTMF/touch tone digits to the audio channel
# AGI: http://www.voip-info.org/index.php?content_id=3184
# Tropo: https://www.tropo.com/docs/scripting/say.htm
#
# @param [Hash] options used to build the say
#
# @return [String] the response in AGI raw form
def senddtmf(options={})
check_state
base_uri = 'http://hosting.tropo.com/49767/www/audio/dtmf/'
options[:args][0].split(//).each do |char|
case char
when '1', '2', '3', '4', '5', '6', '7', '8', '9', '0', 'a', 'b', 'c', 'd'
playback({ :args => [ base_uri + "#{char}.wav" ] })
when '#'
playback({ :args => [ base_uri + "#.wav" ] })
else
show "Cannot play DTMF with: #{char.inspect}"
end
end
@agi_response + "0\n"
rescue => e
log_error(this_method, e)
end
##
# Returns the current state of the call
# AGI: http://www.voip-info.org/wiki/view/channel+status
#
# @return [String] the AGI response
def status(options={})
case @current_call.state
when 'RINGING'
status = 4
when 'ANSWERED'
status = 6
else
status = 0
end
@agi_response + status.to_s + "\n"
rescue => e
log_error(this_method, e)
end
##
# Returns the current state of the call
# AGI: http://www.voip-info.org/wiki/view/channel+status
#
# @return [String] the AGI response
def stopcallrecording(options={})
check_state
@current_call.stopCallRecording
@agi_response + "0\n"
rescue => e
log_error(this_method, e)
end
alias :monitor_stop :stopcallrecording
alias :mixmonitor_stop :stopcallrecording
##
# Handles the storing/retrieving of User Variables associated to the call
# AGI:
# - http://www.voip-info.org/wiki/view/set+variable
# - http://www.voip-info.org/wiki/view/get+variable
#
# @param [Hash] options used to build the say
#
# @return [String] the response in AGI raw form
def user_vars(options={})
case options[:action]
when 'set'
key_value = options[:args][0].split(' ')
@user_vars[strip_quotes(key_value[0])] = strip_quotes(key_value[1])
@agi_response + "0\n"
when 'get'
if @user_vars[strip_quotes(options[:args][0])]
@agi_response + '1 (' + @user_vars[strip_quotes(options[:args][0])] + ")\n"
else
# Variable has not been set
@agi_response + "0\n"
end
end
rescue => e
log_error(this_method, e)
end
##
# Used to change the voice being used for speech synthesis/TTS
#
# @param [Hash] options used set the voice
#
# @return [String] the response in AGI raw form
def voice(options={})
if options[:args][0] == 'default'
@tropo_voice = @tropo_agi_config['tropo']['voice']
else
@tropo_voice = options[:args][0]
end
@agi_response + "0\n"
end
##
# Provides the ability to wait a specified period of time
# Tropo: https://www.tropo.com/docs/scripting/wait.htm
#
# @return [String] the response in AGI raw form
def wait(options={})
@current_call.wait options[:args][0].to_i * 1000
@agi_response + "0\n"
rescue => e
log_error(this_method, e)
end
##
# Asks the user to input digits, may work with touch tones or speech recognition/ASR
# AGI: http://www.voip-info.org/wiki/view/wait+for+digit
# Tropo: https://www.tropo.com/docs/scripting/ask.htm
#
# @param [Hash] options used to build the ask
#
# @return [String] the response in AGI raw form
def wait_for_digits(options={})
check_state
if @wait_for_digits_options.nil?
timeout = strip_quotes(options[:args][0].split(' ')[1]).to_i
timeout = 1000 if timeout == -1
timeout = timeout / 1000
response = @current_call.ask('', { 'timeout' => timeout,
'choices' => '[1 DIGIT], *, #',
'choiceMode' => 'keypad' })
else
response = @current_call.ask(@wait_for_digits_options['prompt'], @wait_for_digits_options)
end
@agi_response + response.value[0].to_s + "\n"
rescue => e
log_error(this_method, e)
end
##
# Builds a hash of the available Asterisk Sound files from a JSON file stored on Tropo
#
# @return [Hash] all of the sound files available to be played back
def asterisk_sound_files
JSON.parse(Net::HTTP.get(URI.parse(@tropo_agi_config['asterisk']['sounds']['available_files'])))
end
private
##
# Automatically answers the call/session if not explicitly done
def check_state
case @current_call.state
when 'DISCONNECTED'
raise RuntimeError, '511 result=Command Not Permitted on a dead channel'
when 'RINGING'
@current_call.answer
# Sleep to allow audio to settle, in the case of Skype
sleep 2
end
true
end
##
# Converts the choices passed in a STREAM FILE into the requisite comma-delimitted format for Tropo
#
# @param [required, String] escape_digits to convert
def create_choices(escape_digits)
choices = ''
# 1.3.1 does not have the each_char method on the String class
if JRUBY_VERSION == '1.3.1'
escape_digits.each_byte { |char| choices = choices + char.chr + ',' }
else
escape_digits.each_char { |char| choices = choices + char + ',' }
end
choices.chop
end
##
# Extracts the prompt and escape digits from a STREAM FILE request
#
# @param [required, String] original_string to extract the prompt and escape digits out of
def extract_prompt_and_escape_digits(original_string)
original_string.gsub!('"', '')
match_data = original_string.match /\d{1,}\#$|\d{1,}$|\d{1,}\*\#$|\d{1,}\#\*$|\d{1,}\*|\#|\*$/
if match_data.nil?
return original_string, nil
else
return match_data.pre_match.rstrip, match_data[0]
end
end
##
# Returns the URI location of the Asterisk sound file if it is available
#
# @param [String] the name of the sound file to be played
#
# @return [String] the URL to play the file from if the filename exists
def fetch_asterisk_sound(text)
text = strip_quotes text
if @tropo_agi_config['asterisk']['sounds']['enabled']
if @asterisk_sound_files[text]
return @tropo_agi_config['asterisk']['sounds']['base_uri'] + '/' +
@tropo_agi_config['asterisk']['sounds']['language'] + '/' +
@asterisk_sound_files[text]
end
end
false
end
##
# This is a work around until the $currentCall.getHeaderMap works, currently a bug in the Ruby shim
#
# @param [JavaHashMap] the Java HashMap to convert to a Ruby Hash
#
# @return [Hash] the converted native Ruby hash
def hashmap_to_hash(hashmap)
# We get the Java iterator off of the object
iter = hashmap.keySet.iterator
hash = {}
# We then iterate through the HashMap and build a native Ruby hash
while iter.hasNext
key = iter.next
hash[key] = hashmap.get(key)
end
hash
end
##
# Logs formatted errors to the Tropo debugger
#
# @param [String] the aciton that was requested
# @param [String] the error itself
#
# @return [String] the response in AGI raw form
def log_error(action, error)
@current_call.log '====> Tropo AGI ACTION ERROR - Start <===='
show "Error: Unable to execute the #{action} request. call_active? #{@current_call.isActive.inspect}"
show "Error output: #{error.inspect}"
show "Trace: #{error.backtrace.join("\n")}"
@current_call.log '====> Tropo AGI ACTION ERROR - End <===='
# Return an error based on the error encountered
case error.to_s
when '511 result=Command Not Permitted on a dead channel'
error.to_s + "\n"
else
@agi_response + "-1\n"
end
end
##
# Parses the destinations sent over the AGI protocol into an array of dialable destinations
# Also converts the Asterisk style of SIP/ to sip:, the proper SIP URI format
#
# @param [Array] the unformatted destinations to be parsed from AGI
#
# @return [Array] an array of destinations
def parse_destinations(destinations)
destinations_array = []
destinations.each do |destination|
destination = destination.reverse.chop.reverse if destination[0] == 34
if destination.match /^(sip|SIP|tel)(\:|\/)\w{1,}$/
destinations_array << destination.gsub('SIP/', 'sip:')
else
destinations_array << destination.gsub('SIP/', 'sip:')
end
end
destinations_array
rescue => e
show "parse_destinations method error: #{e.inspect}"
end
##
# Parses the STREAM FILE for input to see if it is a JSON string and if so return the Hash
#
# @param [String] the string to parse
#
# @return [Hash, nil] the hash if it was JSON, nil if it was not
def parse_input_string(string, leftchop)
JSON.parse string[0..-leftchop].gsub("\\", '')
rescue => e
nil
end
##
# Preps @user_vars to be set as headers
#
# @return [Hash] the formatted headers
def set_headers(vars)
show "Headers to map: #{vars.inspect}"
headers = {}
vars.each do |k, v|
headers['x-tropo-' + k] = v
end
headers
end
end#end class Commands
##
# Creates a new instance of TropoAGItate
#
# @param [Object] the currentCall object of Tropo
# @param [String] the AGI URI of the AGI server
# @param [Hash] the configuration details of using/not using the built-in Asterisk Sound files
# @return [Object] instance of TropoAGItate
def initialize(current_call, current_app)
@current_call = current_call
@current_app = current_app
@tropo_agi_config = tropo_agi_config
show "With Configuration #{@tropo_agi_config.inspect}"
@commands = Commands.new(@current_call, @tropo_agi_config)
rescue => e
show "Could not find your configuration file. #{e.inspect}"
# Could not find any config, so failing over to the default location
failover('sip:9991443146@sip.tropo.com')
show 'Session sent to default backup location, Now aborting the script'
abort
end
##
# Executes the loop that sends and receives the AGI messages to and from the AGI server
#
# @return [Boolean] whether the socket is open or not
def run
if create_socket_connection
while @current_call.isActive
begin
command = @agi_client.gets
show "Raw string: #{command}"
result = execute_command(command)
response = @agi_client.write(result)
rescue => e
@current_call.log '====> Broken pipe to the AGI server, Adhearsion tends to drop the socket after sending a hangup. <===='
show "Error is: #{e}"
@current_call.hangup
end
end
close_socket
end
end
alias :start :run
##
# Creates the TCP socket connection
#
# @return nil
def create_socket_connection
@agi_uri = URI.parse @tropo_agi_config['agi']['uri']
@agi_uri.port = 4573 if @agi_uri.port.nil?
@current_call.log "Connecting to AGI server at #{@agi_uri.host}:#{@agi_uri.port}"
@agi_client = TCPSocket.new(@agi_uri.host, @agi_uri.port)
@agi_client.write(initial_message(@agi_uri.host, @agi_uri.port, @agi_uri.path[1..-1]))
true
rescue => e
# If we can not open the socket to the AGI server, play/log an error message and hangup the call
error_message = 'We are unable to connect to the A G I server at this time, please try again later.'
@current_call.log "====> #{error_message} <===="
@current_call.log e
failover(@tropo_agi_config['tropo']['next_sip_uri'])
false
end
##
# Closes the socket
#
# @return [Boolean] indicates if the socket is open or closed, true if closed, false if open
def close_socket
begin
@agi_client.close
rescue => e
end
@agi_client.closed?
end
##
# Sends the initial AGI message to the AGI server
# AGI: http://www.voip-info.org/wiki/view/Asterisk+AGI
#
# @param [String] the hostname of the AGI server
# @param [Integer] the port of the AGI server
# @param [String] the context to be used
#
# @return [String] the response in AGI raw form
def initial_message(agi_host, agi_port, agi_context)
# Grab the headers and then push them in the initial message
headers = @commands.getheaders
rdnis = 'unknown'
rdnis = headers['x-sbc-diversion'] if headers['x-sbc-diversion']
<<-MSG
agi_network: yes
agi_network_script: #{agi_context}
agi_request: agi://#{agi_host}:#{agi_port}/#{agi_context}
agi_channel: TROPO/#{@current_call.id}
agi_language: en
agi_type: TROPO
agi_uniqueid: #{@current_call.id}
agi_version: tropo-agi-0.1.0
agi_callerid: #{@current_call.callerID}
agi_calleridname: #{@current_call.callerName}
agi_callingpres: 0
agi_callingani2: 0
agi_callington: 0
agi_callingtns: 0
agi_dnid: #{@current_call.calledID}
agi_rdnis: #{rdnis}
agi_context: #{agi_context}
agi_extension: 1
agi_priority: 1
agi_enhanced: 0.0
agi_accountcode: 0
agi_threadid: #{Thread.current.to_s}
tropo_headers: #{headers.to_json if headers.keys.length > 0}
MSG
end
##
# Executes the given command from AGI to Tropo
#
# @param [String] the raw AGI form from the AGI server
#
# @return [String] the response to the command in raw AGI form
def execute_command(data)
data = "HANGUP" if data.nil?
options = parse_command(data)
case options[:action]
when 'answer', 'hangup'
@commands.send(options[:action].to_sym)
when 'set', 'get'
if options[:command].downcase == 'variable'
@commands.user_vars(options)
elsif options[:command].downcase == 'callerid' || options[:command].downcase == 'calleridname'
@commands.callerid(options)
end
when 'exec', 'stream', 'channel'
@commands.send(options[:command].downcase.to_sym, options)
when 'wait'
@commands.wait_for_digits(options)
when 'record'
@commands.record(options)
else
show "Invalid or unknown command #{data}"
return "510 result=Invalid or unknown Command\n"
end
end
##
# Takes the AGI response from the AGI server, breaks into the arguments
# and returns the commands to be executed stripped of quotes
#
# @param [String] the AGI server response
#
# @return [Hash] the command
def parse_command(data)
# Break down the command into its component parts
parts = data.match /^(\w+)\s*(\w+|"\w+")?\s*(.*)?$/
return if parts.nil?
part1, part2, part3 = parts[1], parts[2], parts[3]
command = { :action => part1.downcase }
command.merge!({ :command => strip_quotes(part2.downcase) }) unless part2.nil?
command.merge!({ :args => parse_args(part3) }) unless part3.nil? || part3.empty?
show "command #{command.inspect}"
command
end
##
# Parses the arguments to strip quotes, put into an array or a hash if JSON
#
# @param [String] the arguments to be parsed
#
# @return [Array, Hash] the parsed arguments
def parse_args(parts)
begin
args = JSON.parse strip_quotes(parts.clone)
rescue
# Split with a RegEx, since we may have commas inside of elements as well as
# delimitting them
elements = parts.split(/(,|\r\n|\n|\r)(?=(?:[^\"]*\"[^\"]*\")*(?![^\"]*\"))/m)
# Get rid of the extraneous commas
elements.delete(",")
args = []
elements.each do |ele|
args << strip_quotes(ele)
end
end
args
end
##
# This method fails over to the backup SIP URI or plays the error message if no backup
# provided
#
# @return nil
def failover(location)
if @current_call.isActive
@current_call.answer
if location
begin
@current_call.transfer location
rescue => e
show "Unable to transfer to your next_sip_uri location #{e}"
end
else
error_message = 'We are unable to connect to the fail over sip U R I. Please try your call again later.'
@current_call.log "====> #{error_message} <===="
@current_call.say error_message, :voice => @tropo_voice
@current_call.hangup
end
end
end
##
# Load the configuration from the current account FTP/WebDAV files of Tropo
#
# @return [Hash] the configuration details
def tropo_agi_config
# Find the account number this app is running under
account_data = fetch_account_data
# Try from the www directory on the Tropo file system
result = fetch_config_file "/#{account_data[1]}/www/tropo_agi_config/tropo_agi_config.yml"
return YAML.load(result.body) if result.code == '200'
show "Can not find config file. #{result.body}"
# No config file found
raise RuntimeError, "Configuration file not found"
end
##
# Fetches the account data
#
# @return [Array] the account data details derived from the underlying directory structure
def fetch_account_data
@current_app.baseDir.to_s.match /(\d+)$/
end
##
# Fetches the configuration file
#
# @param [String] the resource where the file is to be found
#
# @return [Object] the resulting HTTP object
def fetch_config_file(resource)
url = URI.parse("http://hosting.tropo.com")
Net::HTTP.start(url.host, url.port) {|http|
http.get resource
}
end
end#end class TropoAGItate
# Are we running as a spec, or is this live?
if @tropo_testing.nil?
log "====> Running Tropo-AGI <===="
# If this is an outbound request place the call
# see: https://www.tropo.com/docs/scripting/call.htm
if $destination
options = {}
# User may pass in the caller ID to use
options[:callerID] = $caller_id if $caller_id
# User may pass in text or voice to use for the channel
options[:channel] = $channel || 'voice'
# User may pass in AIM, GTALK, MSN, JABBER, TWITTER, SMS or YAHOO, SMS is default
options[:network] = $network || 'SMS'
# Time tropo will wait before hanging up, default is 30
options[:timeout] = $timeout if $timeout
# If voice turn the phone number into a Tel URI, but only if not a SIP URI
$destination = 'tel:+' + $destination if options[:channel].downcase == 'voice' && $destination[0..2] != 'sip'
log "====> Calling to: #{$destination} - with these options: #{options.inspect} <===="
# Place the call
call $destination, options
end
# If we have an active call, start running the AGI client
if $currentCall
# Create the instance of TropoAGItate with Tropo's currentCall object
tropo_agi = TropoAGItate.new($currentCall, $currentApp)
# Start sending/receiving AGI commands via the TCP socket
tropo_agi.run
else
log '====> No Outbound Address Provided - Exiting <===='
end
end
First pass implementing magic channel variables
%w(rubygems yaml socket json net/http uri).each { |lib| require lib }
#####
# This Ruby Script Emulates the Asterisk Gateway Interface (AGI)
# VERSION = '0.1.6'
#####
# If we are testing, then add some methods, $currentCall will be nil if a call did not start this session
if $currentCall.nil? && $destination.nil?
Object.class_eval do
def log(val)
val
end
def show(val)
log("====> #{val} <====")
end
end
end
# We patch the Hash class to symbolize our keys
class Hash
def symbolize_keys
inject({}) do |options, (key, value)|
options[(key.to_sym rescue key) || key] = value
options
end
end
def symbolize_keys!
self.replace(self.symbolize_keys)
end
end
#########
# @author Jason Goecke
class TropoAGItate
module Helpers
##
# Strips the quotes from a string
#
# @param [String] the string to remove the strings from
#
# @return [String] the string with the quotes removed
def strip_quotes(text)
text.chop! if text[text.length - 1] == 34
text.reverse!.chop!.reverse! if text[0] == 34
text
end
##
# Formats the output to the log for consistency
#
# @param [String] string to output to the log
# @return nil
def show(str)
log "====> #{str} <===="
end
##
# Provides the current method's name
#
# @return [String] the name of the current method
def this_method
caller[0]
# caller[0][/`([^']*)'/, 1]
end
end
include Helpers
class Commands
include Helpers
##
# Creates an instance of Command
#
# @param [Object] the currentCall object from Tropo Scripting
# @param [Hash] contains the configuration of the files available as Asterisk Sounds
#
# @return [Object] an instance of Command
def initialize(current_call, tropo_agi_config)
@current_call = current_call
@tropo_agi_config = tropo_agi_config
@agi_response = "200 result="
@tropo_voice = @tropo_agi_config['tropo']['voice']
@tropo_recognizer = @tropo_agi_config['tropo']['recognizer']
# Used to store user request values for SET/GET VARIABLE commands of Asterisk
# May also be passed in as a JSON string from the Tropo Session API
if $user_vars
@chanvars = TropoAGItate::MagicChannelVariables.new JSON.parse $user_vars
else
@chanvars = TropoAGItate::MagicChannelVariables.new
end
@asterisk_sound_files = asterisk_sound_files if @tropo_agi_config['asterisk']['sounds']['enabled']
end
##
# Initiates an answer to the Tropo call object based on an answer request via AGI
# AGI: http://www.voip-info.org/wiki/view/answer
# Tropo: https://www.tropo.com/docs/scripting/answer.htm
#
# @return [String] the response in AGI raw form
def answer
if @current_call.state == 'RINGING'
@current_call.answer
else
show "Warning - invalid call state to invoke an answer: #{@current_call.state.inspect}"
end
@agi_response + "0\n"
rescue => e
log_error(this_method, e)
end
##
# Initiates an ask to the Tropo call object
# Tropo: https://www.tropo.com/docs/scripting/ask.htm
#
# @param [Hash] the options to be used on the ask method
#
# @return [String] the response in AGI raw form
def ask(options={})
check_state
options[:args][:recognizer] = @tropo_recognizer if options[:args]['recognizer'].nil?
options[:args][:voice] = @tropo_voice if options[:args]['voice'].nil?
# Check for Asterisk sounds
asterisk_sound_url = fetch_asterisk_sound(options[:args]['prompt'])
if asterisk_sound_url
prompt = asterisk_sound_url
else
prompt = options[:args]['prompt']
end
response = @current_call.ask prompt, options[:args].symbolize_keys!
if response.value == 'NO_SPEECH' || response.value == 'NO_MATCH'
result = { :interpretation => response.value }
else
result = { :concept => response.choice.concept,
:confidence => response.choice.confidence,
:interpretation => response.choice.interpretation,
:tag => response.choice.tag }
end
@agi_response + result.to_json + "\n"
rescue => e
log_error(this_method, e)
end
##
# Sets the callerid params in Tropo
#
# @param [Hash] the options to be used when setting callerid/calleridname
#
# @return [String] the response in AGI raw form
def callerid(options={})
@chanvars['CALLERID'] = options[:args][0]
@agi_response + "0\n"
rescue => e
log_error(this_method, e)
end
##
# Initiates a transfer on Tropo which corresopnds as a dial over AGI
# AGI: http://www.voip-info.org/wiki/view/Asterisk+cmd+Dial
# Tropo: https://www.tropo.com/docs/scripting/transfer.htm
#
# @param [Hash] the options used to place the dial
#
# @return [String] the response in AGI raw form
def dial(options={})
check_state
args = options.delete(:args) || {}
destinations = parse_destinations(args.shift.split('&'))
options = {}
# Copy the channel variables hash. We need to remove certain variables that
# cause problems if converted to JSON (specifically: anything with
# parenthesis in the name)
vars = @chanvars.clone
# Convert Asterisk app_dial inputs to Tropo syntax
options[:timeout] = args.shift.to_i if args.count
# TODO: We may want to provide some compatibility with Asterisk dial flags
# like m for MOH, A() to play announcement to called party,
# D() for post-dial DTMF, L() for call duration limits
#astflags = args.shift if args.count
options[:callerID] = vars.delete('CALLERID(num)') if vars.has_key?('CALLERID(num)')
options[:headers] = set_headers(vars)
show "Destination: #{destinations.inspect}, Options: #{options.inspect}"
result = @current_call.transfer destinations, options
# Map the Tropo result to the Asterisk DIALSTATUS channel variable
@chanvars['DIALSTATUS'] = case result.name.downcase
when 'transfer' then 'ANSWER'
when 'success' then 'ANSWER'
when 'timeout' then 'NOANSWER'
when 'error' then 'CONGESTION'
when 'callFailure' then 'CHANUNAVAIL'
else 'CONGESTION'
end
@agi_response + "0\n"
rescue => e
log_error(this_method, e)
end
##
# Play the given file
# AGI: http://www.voip-info.org/wiki/view/stream+file
#
# The enhanced capability won't work for now, since Adhearsion expects back ASCII single digits
# enhanced_input_example {
# postal_code = input 1, :play => { :prompt => 'Please enter your post code?',
# :timeout => 5,
# :choices => '[5 DIGITS]',
# :terminator => '#' }.to_json
#
# ahn_log.postal_code.debug postal_code
# play "You entered"
# say_digits postal_code
# }
#
# @param [Hash] the options used to play the file back
#
# @return [String] the response in AGI raw form
def file(options={})
check_state
@wait_for_digits_options = parse_input_string options[:args][0], 16
if @wait_for_digits_options.nil?
prompt, escape_digits = extract_prompt_and_escape_digits(options[:args][0])
asterisk_sound_url = fetch_asterisk_sound(prompt)
prompt = asterisk_sound_url if asterisk_sound_url
if escape_digits.nil?
@current_call.say prompt, :voice => @tropo_voice
result = @agi_response + "0 endpos=0\n"
else
# Timeout is set to 0 so we return immediately after playback
response = @current_call.ask prompt, { :choices => create_choices(escape_digits),
:choiceMode => 'keypad',
:timeout => 0 }
result = @agi_response + response.value[0].to_s + " endpos=0\n"
end
end
show "File response: #{response.inspect}"
result
rescue => e
log_error(this_method, e)
end
alias :streamfile :file
##
# Grabs all of the SIP headers off of the current session/call
# This is a work around until the $currentCall.getHeaderMap works, currently a bug in the Ruby shim
#
# @return [Hash] all of the SIP headers on the current session
def getheaders
hash = {}
# We are accessing the Java object directly, so we get a Java HashMap back
hash = hashmap_to_hash($incomingCall.getHeaderMap) if $incomingCall != 'nullCall'
hash.merge!({ :tropo_tag => $tropo_tag }) if $tropo_tag
hash
end
##
# Initiates a hangup to the Tropo call object
# AGI: http://www.voip-info.org/wiki/view/hangup
# Tropo: https://www.tropo.com/docs/scripting/hangup.htm
#
# @return [String] the response in AGI raw form
def hangup
@current_call.hangup
@agi_response + "1\n"
rescue => e
log_error(this_method, e)
end
##
# Initiates a conference
# AGI: http://www.voip-info.org/wiki/view/Asterisk+cmd+MeetMe
# Tropo: https://www.tropo.com/docs/scripting/conference.htm
#
# param [Hash] a hash of items
# @return [String] the response in AGI raw form
def meetme(options={})
check_state
options = options[:args][0].split('|')
@current_call.conference options[0].chop
@agi_response + "0\n"
rescue => e
log_error(this_method, e)
end
alias :conference :meetme
##
# Traps any unknown/unsupported commands and logs an error mesage to the Tropo debugger
#
# @param [Object] the arguments used to make the method call
#
# @return [String] the response in AGI raw form
def method_missing(method, *args)
show "Invalid or unknown command: #{method.inspect}"
return "510 result=Invalid or unknown Command\n"
end
##
# Initiates a recording of the call
# AGI:
# - http://www.voip-info.org/index.php?content_id=3134
# - http://www.voip-info.org/wiki/view/Asterisk+cmd+MixMonitor
# Tropo: https://www.tropo.com/docs/scripting/startcallrecording.htm
#
# @param [Hash] options used to build the startCallRecording
#
# @return [String] the response in AGI raw form
def monitor(options={})
check_state
@current_call.startCallRecording options[:args]['uri'], options[:args]
@agi_response + "0\n"
rescue => e
log_error(this_method, e)
end
alias :mixmonitor :monitor
alias :startcallrecording :monitor
##
# Initiates a playback to the Tropo call object for Speech Synthesis/TTS
# AGI: http://www.voip-info.org/index.php?content_id=3168
# Tropo: https://www.tropo.com/docs/scripting/say.htm
#
# @param [Hash] the options used for the Tropo say method
#
# @return [String] the response in AGI raw form
def playback(options={})
check_state
asterisk_sound_url = fetch_asterisk_sound(options[:args][0])
if asterisk_sound_url
text = asterisk_sound_url
else
text = options[:args][0]
end
@current_call.say text, :voice => @tropo_voice
@agi_response + "0\n"
rescue => e
log_error(this_method, e)
end
alias :saynumber :playback
alias :say :playback
##
# Reads a #-terminated string of digits a certain number of times from the user in to the given variable.
# AGI: https://wiki.asterisk.org/wiki/display/AST/Application_Read
# Tropo: https://www.tropo.com/docs/scripting/ask.htm
#
# @param [Hash] the options used for the Tropo ask method
#
# @return [String] the response in the AGI raw form
def read(options={})
check_state
# Check to see if the READ arguments were sent in quotes, like from Asterisk-Java
options[:args] = options[:args][0].split(',', -4) if options[:args].length == 1
# Set defaults
prompt, choices, attempts, timeout = 'silence', '[1-255 DIGITS]', 1, 30
# Set the prompt
prompt = options[:args][1] if options[:args][1] != ""
asterisk_sound_url = fetch_asterisk_sound(prompt)
prompt = asterisk_sound_url if asterisk_sound_url
# Set other values if provided
choices = "[1-#{options[:args][2]} DIGITS]" unless options[:args][2].nil? || options[:args][2].empty?
attempts = options[:args][4] unless options[:args][4].nil? || options[:args][4].empty?
timeout = options[:args][5].to_f unless options[:args][5].nil? || options[:args][5].empty?
response = nil
attempts.to_i.times do
response = @current_call.ask prompt, { :choices => choices,
:choiceMode => 'keypad',
:terminator => '#',
:timeout => timeout }
break if response.value
end
# Set the variable the user has specified for the value to insert into
@chanvars[options[:args][0]] = response.value
@agi_response + "0\n"
end
##
# Used to change the voice being used for speech recognition/ASR
#
# @param [Hash] options used set the recognizer
#
# @return [String] the response in AGI raw form
def recognizer(options={})
if options[:args][0] == 'default'
@tropo_recognizer = @tropo_agi_config['tropo']['recognizer']
else
@tropo_recognizer = options[:args][0]
end
@agi_response + "0\n"
end
##
# Records a user input
# AGI: http://www.voip-info.org/index.php?content_id=3176
# Tropo: https://www.tropo.com/docs/scripting/record.htm
#
# @param [Hash] the options used for the record
#
# @return [String] the response in AGI raw form
def record(options={})
check_state
options = options[:args][0].split
silence_timeout = strip_quotes(options[options.length - 1]).split('=')
beep = true if strip_quotes(options[5]) == 'BEEP'
options = { :recordURI => strip_quotes(options[0]),
:silenceTimeout => silence_timeout[1].to_i / 1000,
:maxTime => strip_quotes(options[3]).to_i,
:recordFormat => strip_quotes(options[1]),
:terminator => strip_quotes(options[2]),
:beep => beep }
ssml =
@current_call.record '<speak> </speak>', options
@agi_response + "0\n"
rescue => e
log_error(this_method, e)
end
##
# Provides the ability to redirect a call after it is answered
# Tropo: https://www.tropo.com/docs/scripting/redirect.htm
#
# @return [String] the response in AGI raw form
def redirect(options={})
check_state
@current_call.redirect options[:args][0]
@agi_response + "0\n"
rescue => e
log_error(this_method, e)
end
##
# Provides the ability to reject a call before it is answered
# Tropo: https://www.tropo.com/docs/scripting/reject.htm
#
# @return [String] the response in AGI raw form
def reject
@current_call.reject
@agi_response + "0\n"
rescue => e
log_error(this_method, e)
end
##
# Provides a RAW say capability
# Tropo: https://www.tropo.com/docs/scripting/say.htm
#
# @param [Hash] options used to build the say
#
# @return [String] the response in AGI raw form
def say(options={})
check_state
@current_call.say options[:args]['prompt'], options[:args]
@agi_response + "0\n"
rescue => e
log_error(this_method, e)
end
##
# Plays back digits using SSML
# AGI: http://www.voip-info.org/index.php?content_id=3182
# Tropo: https://www.tropo.com/docs/scripting/say.htm
#
# @param [Hash] options used to build the say
# @return [String] the response in AGI raw form
def saydigits(options={})
check_state
ssml = "<speak><say-as interpret-as='vxml:digits'>#{options[:args][0]}</say-as></speak>"
@current_call.say ssml, :voice => @tropo_voice
@agi_response + "0\n"
rescue => e
log_error(this_method, e)
end
##
# Speaks back individual characters in a string
# AGI: http://www.voip-info.org/wiki/index.php?page=Asterisk+cmd+SayPhonetic
# Tropo: https://www.tropo.com/docs/scripting/say.htm
#
# @param [Hash] options used to build the say
#
# @return [String] the response in AGI raw form
def sayphonetic(options={})
check_state
text = ''
options[:args][0].split(//).each do |char|
text = text + char + ' '
end
@current_call.say text, :voice => TROPO_VOICE
@agi_response + "0\n"
rescue => e
log_error(this_method, e)
end
##
# ====> TO BE IMPLEMENTED <====
#
# Speaks back the time
# AGI:
# Tropo: https://www.tropo.com/docs/scripting/say.htm
#
# @param [Hash] options used to build the say
#
# @return [String] the response in AGI raw form
def saytime(options={})
check_state
@agi_response + "0\n"
rescue => e
log_error(this_method, e)
end
##
# Plays DTMF/touch tone digits to the audio channel
# AGI: http://www.voip-info.org/index.php?content_id=3184
# Tropo: https://www.tropo.com/docs/scripting/say.htm
#
# @param [Hash] options used to build the say
#
# @return [String] the response in AGI raw form
def senddtmf(options={})
check_state
base_uri = 'http://hosting.tropo.com/49767/www/audio/dtmf/'
options[:args][0].split(//).each do |char|
case char
when '1', '2', '3', '4', '5', '6', '7', '8', '9', '0', 'a', 'b', 'c', 'd'
playback({ :args => [ base_uri + "#{char}.wav" ] })
when '#'
playback({ :args => [ base_uri + "#.wav" ] })
else
show "Cannot play DTMF with: #{char.inspect}"
end
end
@agi_response + "0\n"
rescue => e
log_error(this_method, e)
end
##
# Returns the current state of the call
# AGI: http://www.voip-info.org/wiki/view/channel+status
#
# @return [String] the AGI response
def status(options={})
case @current_call.state
when 'RINGING'
status = 4
when 'ANSWERED'
status = 6
else
status = 0
end
@agi_response + status.to_s + "\n"
rescue => e
log_error(this_method, e)
end
##
# Returns the current state of the call
# AGI: http://www.voip-info.org/wiki/view/channel+status
#
# @return [String] the AGI response
def stopcallrecording(options={})
check_state
@current_call.stopCallRecording
@agi_response + "0\n"
rescue => e
log_error(this_method, e)
end
alias :monitor_stop :stopcallrecording
alias :mixmonitor_stop :stopcallrecording
##
# Handles the storing/retrieving of User Variables associated to the call
# AGI:
# - http://www.voip-info.org/wiki/view/set+variable
# - http://www.voip-info.org/wiki/view/get+variable
#
# @param [Hash] options used to build the say
#
# @return [String] the response in AGI raw form
def user_vars(options={})
case options[:action]
when 'set'
key_value = options[:args][0].split(' ')
@chanvars[strip_quotes(key_value[0])] = strip_quotes(key_value[1])
@agi_response + "0\n"
when 'get'
if @chanvars[strip_quotes(options[:args][0])]
@agi_response + '1 (' + @chanvars[strip_quotes(options[:args][0])] + ")\n"
else
# Variable has not been set
@agi_response + "0\n"
end
end
rescue => e
log_error(this_method, e)
end
##
# Used to change the voice being used for speech synthesis/TTS
#
# @param [Hash] options used set the voice
#
# @return [String] the response in AGI raw form
def voice(options={})
if options[:args][0] == 'default'
@tropo_voice = @tropo_agi_config['tropo']['voice']
else
@tropo_voice = options[:args][0]
end
@agi_response + "0\n"
end
##
# Provides the ability to wait a specified period of time
# Tropo: https://www.tropo.com/docs/scripting/wait.htm
#
# @return [String] the response in AGI raw form
def wait(options={})
@current_call.wait options[:args][0].to_i * 1000
@agi_response + "0\n"
rescue => e
log_error(this_method, e)
end
##
# Asks the user to input digits, may work with touch tones or speech recognition/ASR
# AGI: http://www.voip-info.org/wiki/view/wait+for+digit
# Tropo: https://www.tropo.com/docs/scripting/ask.htm
#
# @param [Hash] options used to build the ask
#
# @return [String] the response in AGI raw form
def wait_for_digits(options={})
check_state
if @wait_for_digits_options.nil?
timeout = strip_quotes(options[:args][0].split(' ')[1]).to_i
timeout = 1000 if timeout == -1
timeout = timeout / 1000
response = @current_call.ask('', { 'timeout' => timeout,
'choices' => '[1 DIGIT], *, #',
'choiceMode' => 'keypad' })
else
response = @current_call.ask(@wait_for_digits_options['prompt'], @wait_for_digits_options)
end
@agi_response + response.value[0].to_s + "\n"
rescue => e
log_error(this_method, e)
end
##
# Builds a hash of the available Asterisk Sound files from a JSON file stored on Tropo
#
# @return [Hash] all of the sound files available to be played back
def asterisk_sound_files
JSON.parse(Net::HTTP.get(URI.parse(@tropo_agi_config['asterisk']['sounds']['available_files'])))
end
private
##
# Automatically answers the call/session if not explicitly done
def check_state
case @current_call.state
when 'DISCONNECTED'
raise RuntimeError, '511 result=Command Not Permitted on a dead channel'
when 'RINGING'
@current_call.answer
# Sleep to allow audio to settle, in the case of Skype
sleep 2
end
true
end
##
# Converts the choices passed in a STREAM FILE into the requisite comma-delimitted format for Tropo
#
# @param [required, String] escape_digits to convert
def create_choices(escape_digits)
choices = ''
# 1.3.1 does not have the each_char method on the String class
if JRUBY_VERSION == '1.3.1'
escape_digits.each_byte { |char| choices = choices + char.chr + ',' }
else
escape_digits.each_char { |char| choices = choices + char + ',' }
end
choices.chop
end
##
# Extracts the prompt and escape digits from a STREAM FILE request
#
# @param [required, String] original_string to extract the prompt and escape digits out of
def extract_prompt_and_escape_digits(original_string)
original_string.gsub!('"', '')
match_data = original_string.match /\d{1,}\#$|\d{1,}$|\d{1,}\*\#$|\d{1,}\#\*$|\d{1,}\*|\#|\*$/
if match_data.nil?
return original_string, nil
else
return match_data.pre_match.rstrip, match_data[0]
end
end
##
# Returns the URI location of the Asterisk sound file if it is available
#
# @param [String] the name of the sound file to be played
#
# @return [String] the URL to play the file from if the filename exists
def fetch_asterisk_sound(text)
text = strip_quotes text
if @tropo_agi_config['asterisk']['sounds']['enabled']
if @asterisk_sound_files[text]
return @tropo_agi_config['asterisk']['sounds']['base_uri'] + '/' +
@tropo_agi_config['asterisk']['sounds']['language'] + '/' +
@asterisk_sound_files[text]
end
end
false
end
##
# This is a work around until the $currentCall.getHeaderMap works, currently a bug in the Ruby shim
#
# @param [JavaHashMap] the Java HashMap to convert to a Ruby Hash
#
# @return [Hash] the converted native Ruby hash
def hashmap_to_hash(hashmap)
# We get the Java iterator off of the object
iter = hashmap.keySet.iterator
hash = {}
# We then iterate through the HashMap and build a native Ruby hash
while iter.hasNext
key = iter.next
hash[key] = hashmap.get(key)
end
hash
end
##
# Logs formatted errors to the Tropo debugger
#
# @param [String] the aciton that was requested
# @param [String] the error itself
#
# @return [String] the response in AGI raw form
def log_error(action, error)
@current_call.log '====> Tropo AGI ACTION ERROR - Start <===='
show "Error: Unable to execute the #{action} request. call_active? #{@current_call.isActive.inspect}"
show "Error output: #{error.inspect}"
show "Trace: #{error.backtrace.join("\n")}"
@current_call.log '====> Tropo AGI ACTION ERROR - End <===='
# Return an error based on the error encountered
case error.to_s
when '511 result=Command Not Permitted on a dead channel'
error.to_s + "\n"
else
@agi_response + "-1\n"
end
end
##
# Parses the destinations sent over the AGI protocol into an array of dialable destinations
# Also converts the Asterisk style of SIP/ to sip:, the proper SIP URI format
#
# @param [Array] the unformatted destinations to be parsed from AGI
#
# @return [Array] an array of destinations
def parse_destinations(destinations)
destinations_array = []
destinations.each do |destination|
destination = destination.reverse.chop.reverse if destination[0] == 34
if destination.match /^(sip|SIP|tel)(\:|\/)\w{1,}$/
destinations_array << destination.gsub('SIP/', 'sip:')
else
destinations_array << destination.gsub('SIP/', 'sip:')
end
end
destinations_array
rescue => e
show "parse_destinations method error: #{e.inspect}"
end
##
# Parses the STREAM FILE for input to see if it is a JSON string and if so return the Hash
#
# @param [String] the string to parse
#
# @return [Hash, nil] the hash if it was JSON, nil if it was not
def parse_input_string(string, leftchop)
JSON.parse string[0..-leftchop].gsub("\\", '')
rescue => e
nil
end
##
# Preps @chanvars to be set as headers
#
# @return [Hash] the formatted headers
def set_headers(vars)
show "Headers to map: #{vars.inspect}"
headers = {}
vars.each do |k, v|
headers['x-tropo-' + k] = v
end
headers
end
end#end class Commands
##
# Creates a new instance of TropoAGItate
#
# @param [Object] the currentCall object of Tropo
# @param [String] the AGI URI of the AGI server
# @param [Hash] the configuration details of using/not using the built-in Asterisk Sound files
# @return [Object] instance of TropoAGItate
def initialize(current_call, current_app)
@current_call = current_call
@current_app = current_app
@tropo_agi_config = tropo_agi_config
show "With Configuration #{@tropo_agi_config.inspect}"
@commands = Commands.new(@current_call, @tropo_agi_config)
rescue => e
show "Could not find your configuration file. #{e.inspect}"
# Could not find any config, so failing over to the default location
failover('sip:9991443146@sip.tropo.com')
show 'Session sent to default backup location, Now aborting the script'
abort
end
##
# Executes the loop that sends and receives the AGI messages to and from the AGI server
#
# @return [Boolean] whether the socket is open or not
def run
if create_socket_connection
while @current_call.isActive
begin
command = @agi_client.gets
show "Raw string: #{command}"
result = execute_command(command)
response = @agi_client.write(result)
rescue => e
@current_call.log '====> Broken pipe to the AGI server, Adhearsion tends to drop the socket after sending a hangup. <===='
show "Error is: #{e}"
@current_call.hangup
end
end
close_socket
end
end
alias :start :run
##
# Creates the TCP socket connection
#
# @return nil
def create_socket_connection
@agi_uri = URI.parse @tropo_agi_config['agi']['uri']
@agi_uri.port = 4573 if @agi_uri.port.nil?
@current_call.log "Connecting to AGI server at #{@agi_uri.host}:#{@agi_uri.port}"
@agi_client = TCPSocket.new(@agi_uri.host, @agi_uri.port)
@agi_client.write(initial_message(@agi_uri.host, @agi_uri.port, @agi_uri.path[1..-1]))
true
rescue => e
# If we can not open the socket to the AGI server, play/log an error message and hangup the call
error_message = 'We are unable to connect to the A G I server at this time, please try again later.'
@current_call.log "====> #{error_message} <===="
@current_call.log e
failover(@tropo_agi_config['tropo']['next_sip_uri'])
false
end
##
# Closes the socket
#
# @return [Boolean] indicates if the socket is open or closed, true if closed, false if open
def close_socket
begin
@agi_client.close
rescue => e
end
@agi_client.closed?
end
##
# Sends the initial AGI message to the AGI server
# AGI: http://www.voip-info.org/wiki/view/Asterisk+AGI
#
# @param [String] the hostname of the AGI server
# @param [Integer] the port of the AGI server
# @param [String] the context to be used
#
# @return [String] the response in AGI raw form
def initial_message(agi_host, agi_port, agi_context)
# Grab the headers and then push them in the initial message
headers = @commands.getheaders
rdnis = 'unknown'
rdnis = headers['x-sbc-diversion'] if headers['x-sbc-diversion']
<<-MSG
agi_network: yes
agi_network_script: #{agi_context}
agi_request: agi://#{agi_host}:#{agi_port}/#{agi_context}
agi_channel: TROPO/#{@current_call.id}
agi_language: en
agi_type: TROPO
agi_uniqueid: #{@current_call.id}
agi_version: tropo-agi-0.1.0
agi_callerid: #{@current_call.callerID}
agi_calleridname: #{@current_call.callerName}
agi_callingpres: 0
agi_callingani2: 0
agi_callington: 0
agi_callingtns: 0
agi_dnid: #{@current_call.calledID}
agi_rdnis: #{rdnis}
agi_context: #{agi_context}
agi_extension: 1
agi_priority: 1
agi_enhanced: 0.0
agi_accountcode: 0
agi_threadid: #{Thread.current.to_s}
tropo_headers: #{headers.to_json if headers.keys.length > 0}
MSG
end
##
# Executes the given command from AGI to Tropo
#
# @param [String] the raw AGI form from the AGI server
#
# @return [String] the response to the command in raw AGI form
def execute_command(data)
data = "HANGUP" if data.nil?
options = parse_command(data)
case options[:action]
when 'answer', 'hangup'
@commands.send(options[:action].to_sym)
when 'set', 'get'
if options[:command].downcase == 'variable'
@commands.user_vars(options)
elsif options[:command].downcase == 'callerid' || options[:command].downcase == 'calleridname'
@commands.callerid(options)
end
when 'exec', 'stream', 'channel'
@commands.send(options[:command].downcase.to_sym, options)
when 'wait'
@commands.wait_for_digits(options)
when 'record'
@commands.record(options)
else
show "Invalid or unknown command #{data}"
return "510 result=Invalid or unknown Command\n"
end
end
##
# Takes the AGI response from the AGI server, breaks into the arguments
# and returns the commands to be executed stripped of quotes
#
# @param [String] the AGI server response
#
# @return [Hash] the command
def parse_command(data)
# Break down the command into its component parts
parts = data.match /^(\w+)\s*(\w+|"\w+")?\s*(.*)?$/
return if parts.nil?
part1, part2, part3 = parts[1], parts[2], parts[3]
command = { :action => part1.downcase }
command.merge!({ :command => strip_quotes(part2.downcase) }) unless part2.nil?
command.merge!({ :args => parse_args(part3) }) unless part3.nil? || part3.empty?
show "command #{command.inspect}"
command
end
##
# Parses the arguments to strip quotes, put into an array or a hash if JSON
#
# @param [String] the arguments to be parsed
#
# @return [Array, Hash] the parsed arguments
def parse_args(parts)
begin
args = JSON.parse strip_quotes(parts.clone)
rescue
# Split with a RegEx, since we may have commas inside of elements as well as
# delimitting them
elements = parts.split(/(,|\r\n|\n|\r)(?=(?:[^\"]*\"[^\"]*\")*(?![^\"]*\"))/m)
# Get rid of the extraneous commas
elements.delete(",")
args = []
elements.each do |ele|
args << strip_quotes(ele)
end
end
args
end
##
# This method fails over to the backup SIP URI or plays the error message if no backup
# provided
#
# @return nil
def failover(location)
if @current_call.isActive
@current_call.answer
if location
begin
@current_call.transfer location
rescue => e
show "Unable to transfer to your next_sip_uri location #{e}"
end
else
error_message = 'We are unable to connect to the fail over sip U R I. Please try your call again later.'
@current_call.log "====> #{error_message} <===="
@current_call.say error_message, :voice => @tropo_voice
@current_call.hangup
end
end
end
##
# Load the configuration from the current account FTP/WebDAV files of Tropo
#
# @return [Hash] the configuration details
def tropo_agi_config
# Find the account number this app is running under
account_data = fetch_account_data
# Try from the www directory on the Tropo file system
result = fetch_config_file "/#{account_data[1]}/www/tropo_agi_config/tropo_agi_config.yml"
return YAML.load(result.body) if result.code == '200'
show "Can not find config file. #{result.body}"
# No config file found
raise RuntimeError, "Configuration file not found"
end
##
# Fetches the account data
#
# @return [Array] the account data details derived from the underlying directory structure
def fetch_account_data
@current_app.baseDir.to_s.match /(\d+)$/
end
##
# Fetches the configuration file
#
# @param [String] the resource where the file is to be found
#
# @return [Object] the resulting HTTP object
def fetch_config_file(resource)
url = URI.parse("http://hosting.tropo.com")
Net::HTTP.start(url.host, url.port) {|http|
http.get resource
}
end
##
# A special class to mimic some of Asterisk's behavior toward certain
# channel variables.
class MagicChannelVariables
include Enumerable
def initialize(variables = {})
variables.each_pair do |k,v|
set(k,v)
end
end
def set(k, v)
case k
when "CALLERIDNAME", "CALLERID(name)"
@variables[:callerid][:name] = v
when "CALLERIDNUM", "CALLERID(num)"
@variables[:callerid][:num] = v
when "CALLERID", "CALLERID(all)"
# Parse out the callerID details
# MUST be in the form of "Name"<number>
# See http://www.voip-info.org/wiki/view/set+callerid
name, number = v.scan(/"([^"]*)"\s*<([^>]*)>/).first
@variables[:callerid][:name] = name if !name.nil?
@variables[:callerid][:num] = number if !number.nil?
else
@variables[k] = v
end
end
alias :[]= :set
def get(k)
case k
when "CALLERIDNAME", "CALLERID(name)"
@variables[:callerid][:name]
when "CALLERIDNUM", "CALLERID(num)"
@variables[:callerid][:num]
when "CALLERID", "CALLERID(all)"
"\"#{@variables[:callerid][:name]}\"<#{@variables[:callerid][:num]}>"
else
@variables[k] || nil
end
end
alias :[] :get
end
end#end class TropoAGItate
# Are we running as a spec, or is this live?
if @tropo_testing.nil?
log "====> Running Tropo-AGI <===="
# If this is an outbound request place the call
# see: https://www.tropo.com/docs/scripting/call.htm
if $destination
options = {}
# User may pass in the caller ID to use
options[:callerID] = $caller_id if $caller_id
# User may pass in text or voice to use for the channel
options[:channel] = $channel || 'voice'
# User may pass in AIM, GTALK, MSN, JABBER, TWITTER, SMS or YAHOO, SMS is default
options[:network] = $network || 'SMS'
# Time tropo will wait before hanging up, default is 30
options[:timeout] = $timeout if $timeout
# If voice turn the phone number into a Tel URI, but only if not a SIP URI
$destination = 'tel:+' + $destination if options[:channel].downcase == 'voice' && $destination[0..2] != 'sip'
log "====> Calling to: #{$destination} - with these options: #{options.inspect} <===="
# Place the call
call $destination, options
end
# If we have an active call, start running the AGI client
if $currentCall
# Create the instance of TropoAGItate with Tropo's currentCall object
tropo_agi = TropoAGItate.new($currentCall, $currentApp)
# Start sending/receiving AGI commands via the TCP socket
tropo_agi.run
else
log '====> No Outbound Address Provided - Exiting <===='
end
end
|
=begin
Arachni
Copyright (c) 2010-2011 Tasos "Zapotek" Laskos <tasos.laskos@gmail.com>
This is free software; you can copy and distribute and modify
this program under the term of the GPL v2.0 License
(See LICENSE file for details)
=end
require 'webrick'
require 'webrick/https'
require 'openssl'
require 'sinatra/base'
require "rack/csrf"
require 'rack-flash'
require 'erb'
require 'yaml'
require 'cgi'
require 'fileutils'
require 'ap'
module Arachni
module UI
require Arachni::Options.instance.dir['lib'] + 'ui/cli/output'
require Arachni::Options.instance.dir['lib'] + 'framework'
require Arachni::Options.instance.dir['lib'] + 'rpc/xml/client/dispatcher'
require Arachni::Options.instance.dir['lib'] + 'rpc/xml/client/instance'
require Arachni::Options.instance.dir['lib'] + 'ui/web/report_manager'
require Arachni::Options.instance.dir['lib'] + 'ui/web/log'
require Arachni::Options.instance.dir['lib'] + 'ui/web/output_stream'
#
#
# Provides a web user interface for the Arachni Framework using Sinatra.<br/>
#
# @author: Tasos "Zapotek" Laskos
# <tasos.laskos@gmail.com>
# <zapotek@segfault.gr>
# @version: 0.1-pre
#
# @see Arachni::RPC::XML::Client::Instance
# @see Arachni::RPC::XML::Client::Dispatcher
#
module Web
VERSION = '0.1-pre'
class Server < Sinatra::Base
configure do
use Rack::Flash
use Rack::Session::Cookie
use Rack::Csrf, :raise => true
@@conf = YAML::load_file( Arachni::Options.instance.dir['root'] + 'conf/webui.yaml' )
Arachni::Options.instance.ssl = @@conf['ssl']['client']['enable']
Arachni::Options.instance.ssl_pkey = @@conf['ssl']['client']['key']
Arachni::Options.instance.ssl_cert = @@conf['ssl']['client']['cert']
Arachni::Options.instance.ssl_ca = @@conf['ssl']['client']['ca']
end
helpers do
def report_count
settings.reports.all.size
end
def plugin_has_required_file_option?( options )
options.each {
|opt|
return true if opt['type'] == 'path' && opt['required']
}
return false
end
def format_redundants( rules )
return if !rules || !rules.is_a?( Array ) || rules.empty?
str = ''
rules.each {
|rule|
next if !rule['regexp'] || !rule['count']
str += rule['regexp'] + ':' + rule['count'] + "\r\n"
}
return str
end
def escape( str )
CGI.escapeHTML( str )
end
def selected_tab?( tab )
splits = env['PATH_INFO'].split( '/' )
( splits.empty? && tab == '/' ) || splits[1] == tab
end
def csrf_token
Rack::Csrf.csrf_token( env )
end
def csrf_tag
Rack::Csrf.csrf_tag( env )
end
def modules
@@modules
end
def plugins
@@plugins
end
def proc_mem( rss )
# we assume a page size of 4096
(rss.to_i * 4096 / 1024 / 1024).to_s + 'MB'
end
def secs_to_hms( secs )
secs = secs.to_i
return [secs/3600, secs/60 % 60, secs % 60].map {
|t|
t.to_s.rjust( 2, '0' )
}.join(':')
end
end
dir = File.dirname( File.expand_path( __FILE__ ) )
set :views, "#{dir}/server/views"
set :public, "#{dir}/server/public"
set :tmp, "#{dir}/server/tmp"
set :db, "#{dir}/server/db"
set :static, true
set :environment, :development
#
# This will be used for the "owner" field of the helper instance
#
HELPER_OWNER = "WebUI helper"
set :log, Log.new( Arachni::Options.instance, settings )
set :reports, ReportManager.new( Arachni::Options.instance, settings )
enable :sessions
configure do
# shit's on!
settings.log.webui_started
end
def exception_jail( &block )
# begin
block.call
# rescue Errno::ECONNREFUSED => e
# erb :error, { :layout => true }, :error => 'Remote server has been shut down.'
# end
end
def show( page, layout = true )
if page == :dispatcher
ensure_dispatcher
erb :dispatcher, { :layout => true }, :stats => dispatcher_stats
else
erb page.to_sym, { :layout => layout }
end
end
#
# Provides an easy way to connect to an instance by port
#
# @param [Integer] port
#
def connect_to_instance( port )
prep_session
begin
return Arachni::RPC::XML::Client::Instance.new( options, port_to_url( port ) )
rescue Exception
raise "Instance on port #{port} has shutdown."
end
end
#
# Converts a port to a URL instance.
#
# @param [Integer] port
#
def port_to_url( port )
uri = URI( session[:dispatcher_url] )
uri.port = port.to_i
uri.to_s
end
#
# Provides easy access to the dispatcher and handles failure
#
def dispatcher
begin
@dispatcher ||= Arachni::RPC::XML::Client::Dispatcher.new( options, session[:dispatcher_url] )
rescue Exception => e
redirect '/dispatcher_error'
end
end
#
# Provides statistics about running jobs etc using the dispatcher
#
def dispatcher_stats
stats = dispatcher.stats
stats['running_jobs'].each {
|job|
begin
job['paused'] = connect_to_instance( job['port'] ).framework.paused?
rescue
end
}
return stats
end
def options
Arachni::Options.instance
end
#
# Similar to String.to_i but it returns the original object if String is not a number
#
def to_i( str )
return str if !str.is_a?( String )
if str.match( /\d+/ ).to_s.size == str.size
return str.to_i
else
return str
end
end
#
# Prepares form params to be used as options for XMLRPC transmission
#
# @param [Hash] params
#
# @return [Hash] normalized hash
#
def prep_opts( params )
need_to_split = [
'exclude_cookies',
'exclude',
'include'
]
cparams = {}
params.each_pair {
|name, value|
next if [ '_csrf', 'modules', 'plugins' ].include?( name ) || ( value.is_a?( String ) && value.empty?)
value = true if value == 'on'
if name == 'cookiejar'
cparams['cookies'] = Arachni::HTTP.parse_cookiejar( value[:tempfile] )
elsif need_to_split.include?( name ) && value.is_a?( String )
cparams[name] = value.split( "\r\n" )
elsif name == 'redundant'
cparams[name] = []
value.split( "\r\n" ).each {
|rule|
regexp, counter = rule.split( ':', 2 )
cparams[name] << {
'regexp' => regexp,
'count' => counter
}
}
else
cparams[name] = to_i( value )
end
}
if !cparams['audit_links'] && !cparams['audit_forms'] &&
!cparams['audit_cookies'] && !cparams['audit_headers']
cparams['audit_links'] = true
cparams['audit_forms'] = true
cparams['audit_cookies'] = true
end
return cparams
end
def prep_modules( params )
return ['-'] if !params['modules']
mods = params['modules'].keys
return ['*'] if mods.empty?
return mods
end
def prep_plugins( params )
plugins = {}
return plugins if !params['plugins']
params['plugins'].keys.each {
|name|
plugins[name] = params['options'][name] || {}
}
return plugins
end
def helper_instance
begin
@@arachni ||= nil
if !@@arachni
instance = dispatcher.dispatch( HELPER_OWNER )
@@arachni = connect_to_instance( instance['port'] )
end
return @@arachni
rescue
redirect '/dispatcher/error'
end
end
def component_cache_filled?
begin
return @@modules.size + @@plugins.size
rescue
return false
end
end
def fill_component_cache
if !component_cache_filled?
do_shutdown = true
else
do_shutdown = false
end
@@modules ||= helper_instance.framework.lsmod.dup
@@plugins ||= helper_instance.framework.lsplug.dup
# shutdown the helper instance, we got what we wanted
helper_instance.service.shutdown! if do_shutdown
end
#
# Makes sure that all systems are go and populates the session with default values
#
def prep_session
session[:dispatcher_url] ||= 'http://localhost:7331'
ensure_dispatcher
session['opts'] ||= {}
session['opts']['settings'] ||= {
'audit_links' => true,
'audit_forms' => true,
'audit_cookies' => true,
'http_req_limit' => 20,
'user_agent' => 'Arachni/' + Arachni::VERSION
}
session['opts']['modules'] ||= [ '*' ]
session['opts']['plugins'] ||= YAML::dump( {
'content_types' => {},
'healthmap' => {},
'metamodules' => {}
} )
#
# Garbage collector, zombie killer. Reaps idle processes every 5 seconds.
#
@@reaper ||= Thread.new {
while( true )
shutdown_zombies
::IO::select( nil, nil, nil, 5 )
end
}
end
#
# Makes sure that we have a dispatcher, if not it redirects the user to
# an appropriate error page.
#
# @return [Bool] true if alive, redirect if not
#
def ensure_dispatcher
begin
dispatcher.alive?
rescue Exception => e
redirect '/dispatcher/error'
end
end
#
# Saves the report, shuts down the instance and returns the content as HTML
# to be sent back to the user's browser.
#
# @param [Arachni::RPC::XML::Client::Instance] arachni
#
def save_shutdown_and_show( arachni )
report = save_and_shutdown( arachni )
settings.reports.get( 'html', File.basename( report, '.afr' ) )
end
#
# Saves the report and shuts down the instance
#
# @param [Arachni::RPC::XML::Client::Instance] arachni
#
def save_and_shutdown( arachni )
report_path = settings.reports.save( arachni.framework.auditstore )
arachni.service.shutdown!
return report_path
end
#
# Kills all running instances
#
def shutdown_all
settings.log.dispatcher_global_shutdown( env )
dispatcher.stats['running_jobs'].each {
|job|
begin
save_and_shutdown( connect_to_instance( job['port'] ) )
rescue
begin
connect_to_instance( job['port'] ).service.shutdown!
rescue
settings.log.instance_fucker_wont_die( env, port_to_url( job['port'] ) )
next
end
end
settings.log.instance_shutdown( env, port_to_url( job['port'] ) )
}
end
#
# Kills all idle instances
#
# @return [Integer] the number of reaped instances
#
def shutdown_zombies
i = 0
dispatcher.stats['running_jobs'].each {
|job|
begin
arachni = connect_to_instance( job['port'] )
begin
if !arachni.framework.busy? && !job['owner'] != HELPER_OWNER
save_and_shutdown( arachni )
settings.log.webui_zombie_cleanup( env, port_to_url( job['port'] ) )
i+=1
end
rescue
end
rescue
end
}
return i
end
get "/" do
prep_session
show :home
end
get "/dispatcher" do
show :dispatcher
end
#
# sets the dispatcher URL
#
post "/dispatcher" do
if !params['url'] || params['url'].empty?
flash[:err] = "URL cannot be empty."
show :dispatcher_error
else
session[:dispatcher_url] = params['url']
settings.log.dispatcher_selected( env, params['url'] )
begin
dispatcher.jobs
settings.log.dispatcher_verified( env, params['url'] )
redirect '/'
rescue
settings.log.dispatcher_error( env, params['url'] )
flash[:err] = "Couldn't find a dispatcher at \"#{escape( params['url'] )}\"."
show :dispatcher_error
end
end
end
#
# shuts down all instances
#
post "/dispatcher/shutdown" do
shutdown_all
redirect '/dispatcher'
end
get '/dispatcher/error' do
show :dispatcher_error
end
#
# starts a scan
#
post "/scan" do
valid = true
begin
URI.parse( params['url'] )
rescue
valid = false
end
if !params['url'] || params['url'].empty?
flash[:err] = "URL cannot be empty."
show :home
elsif !valid
flash[:err] = "Invalid URL."
show :home
else
instance = dispatcher.dispatch( params['url'] )
settings.log.instance_dispatched( env, port_to_url( instance['port'] ) )
settings.log.instance_owner_assigned( env, params['url'] )
arachni = connect_to_instance( instance['port'] )
session['opts']['settings']['url'] = params['url']
session['opts']['settings']['audit_links'] = true if session['opts']['settings']['audit_links']
session['opts']['settings']['audit_forms'] = true if session['opts']['settings']['audit_forms']
session['opts']['settings']['audit_cookies'] = true if session['opts']['settings']['audit_cookies']
session['opts']['settings']['audit_headers'] = true if session['opts']['settings']['audit_headers']
opts = prep_opts( session['opts']['settings'] )
arachni.opts.set( opts )
arachni.modules.load( session['opts']['modules'] )
arachni.plugins.load( YAML::load( session['opts']['plugins'] ) )
arachni.framework.run
settings.log.scan_started( env, params['url'] )
redirect '/instance/' + instance['port'].to_s
end
end
get "/modules" do
fill_component_cache
prep_session
show :modules, true
end
#
# sets modules
#
post "/modules" do
session['opts']['modules'] = prep_modules( params )
flash.now[:notice] = "Modules updated."
show :modules, true
end
get "/plugins" do
fill_component_cache
prep_session
erb :plugins, { :layout => true }
end
#
# sets plugins
#
post "/plugins" do
session['opts']['plugins'] = YAML::dump( prep_plugins( params ) )
flash.now[:notice] = "Plugins updated."
show :plugins, true
end
get "/settings" do
prep_session
erb :settings, { :layout => true }
end
#
# sets general framework settings
#
post "/settings" do
if session['opts']['settings']['url']
url = session['opts']['settings']['url'].dup
session['opts']['settings'] = prep_opts( params )
session['opts']['settings']['url'] = url
end
flash.now[:notice] = "Settings updated."
show :settings, true
end
get "/instance/:port" do
begin
arachni = connect_to_instance( params[:port] )
erb :instance, { :layout => true }, :paused => arachni.framework.paused?, :shutdown => false
rescue
flash.now[:notice] = "Instance on port #{params[:port]} has been shutdown."
erb :instance, { :layout => true }, :shutdown => true, :stats => dispatcher_stats
end
end
get "/instance/:port/output" do
begin
arachni = connect_to_instance( params[:port] )
if arachni.framework.busy?
OutputStream.new( arachni, 38 )
else
settings.log.instance_shutdown( env, port_to_url( params[:port] ) )
save_shutdown_and_show( arachni )
end
rescue Errno::ECONNREFUSED
"The server has been shut down."
end
end
post "/*/:port/pause" do
arachni = connect_to_instance( params[:port] )
begin
arachni.framework.pause!
settings.log.instance_paused( env, port_to_url( params[:port] ) )
flash.now[:notice] = "Instance on port #{params[:port]} will pause as soon as the current page is audited."
erb params[:splat][0].to_sym, { :layout => true }, :paused => arachni.framework.paused?, :shutdown => false, :stats => dispatcher_stats
rescue
flash.now[:notice] = "Instance on port #{params[:port]} has been shutdown."
erb params[:splat][0].to_sym, { :layout => true }, :shutdown => true, :stats => dispatcher_stats
end
end
post "/*/:port/resume" do
arachni = connect_to_instance( params[:port] )
begin
arachni.framework.resume!
settings.log.instance_resumed( env, port_to_url( params[:port] ) )
flash.now[:ok] = "Instance on port #{params[:port]} resumes."
erb params[:splat][0].to_sym, { :layout => true }, :paused => arachni.framework.paused?, :shutdown => false, :stats => dispatcher_stats
rescue
flash.now[:notice] = "Instance on port #{params[:port]} has been shutdown."
erb params[:splat][0].to_sym, { :layout => true }, :shutdown => true, :stats => dispatcher_stats
end
end
post "/*/:port/shutdown" do
arachni = connect_to_instance( params[:port] )
begin
arachni.framework.busy?
settings.log.instance_shutdown( env, port_to_url( params[:port] ) )
begin
save_shutdown_and_show( arachni )
rescue
flash.now[:ok] = "Instance on port #{params[:port]} has been shutdown."
show params[:splat][0].to_sym
ensure
arachni.service.shutdown!
end
rescue
flash.now[:notice] = "Instance on port #{params[:port]} has already been shutdown."
erb params[:splat][0].to_sym, { :layout => true }, :shutdown => true, :stats => dispatcher_stats
end
end
get "/reports" do
reports = []
settings.reports.all.each {
|report|
name = File.basename( report, '.afr' )
host, date = name.split( ':', 2 )
reports << {
'host' => host,
'date' => date,
'name' => name
}
}
erb :reports, { :layout => true }, :reports => reports,
:available => settings.reports.available
end
get '/reports/formats' do
erb :report_formats, { :layout => true }, :reports => settings.reports.available
end
post '/reports/delete' do
settings.reports.delete_all
settings.log.reports_deleted( env )
redirect '/reports'
end
post '/report/:name/delete' do
settings.reports.delete( params[:name] )
settings.log.report_deleted( env, params[:name] )
redirect '/reports'
end
get '/report/:name.:type' do
settings.log.report_converted( env, params[:name] + '.' + params[:type] )
settings.reports.get( params[:type], params[:name] )
end
get '/log' do
erb :log, { :layout => true }, :entries => settings.log.entry.all.reverse
end
# override run! using this patch: https://github.com/sinatra/sinatra/pull/132
def self.run!( options = {} )
set options
handler = detect_rack_handler
handler_name = handler.name.gsub( /.*::/, '' )
# handler specific options use the lower case handler name as hash key, if present
handler_opts = options[handler_name.downcase.to_sym] || {}
puts "== Sinatra/#{Sinatra::VERSION} has taken the stage " +
"on #{port} for #{environment} with backup from #{handler_name}" unless handler_name =~/cgi/i
handler.run self, handler_opts.merge( :Host => bind, :Port => port ) do |server|
[ :INT, :TERM ].each { |sig| trap( sig ) { quit!( server, handler_name ) } }
set :running, true
end
rescue Errno::EADDRINUSE => e
puts "== Someone is already performing on port #{port}!"
end
def self.prep_webrick
if @@conf['ssl']['server']['key']
pkey = ::OpenSSL::PKey::RSA.new( File.read( @@conf['ssl']['server']['key'] ) )
end
if @@conf['ssl']['server']['cert']
cert = ::OpenSSL::X509::Certificate.new( File.read( @@conf['ssl']['server']['cert'] ) )
end
if @@conf['ssl']['key'] || @@conf['ssl']['cert'] || @@conf['ssl']['ca']
verification = OpenSSL::SSL::VERIFY_PEER | OpenSSL::SSL::VERIFY_FAIL_IF_NO_PEER_CERT
else
verification = ::OpenSSL::SSL::VERIFY_NONE
end
return {
:SSLEnable => @@conf['ssl']['server']['enable'] || false,
:SSLVerifyClient => verification,
:SSLCertName => [ [ "CN", Arachni::Options.instance.server || ::WEBrick::Utils::getservername ] ],
:SSLCertificate => cert,
:SSLPrivateKey => pkey,
:SSLCACertificateFile => @@conf['ssl']['server']['ca']
}
end
run! :host => Arachni::Options.instance.server || ::WEBrick::Utils::getservername,
:port => Arachni::Options.instance.rpc_port || 4567,
:server => %w[ webrick ],
:webrick => prep_webrick
at_exit do
settings.log.webui_shutdown
begin
# shutdown our helper instance
@@arachni ||= nil
@@arachni.service.shutdown! if @@arachni
rescue
end
end
end
end
end
end
added proper content-types when showing reports
=begin
Arachni
Copyright (c) 2010-2011 Tasos "Zapotek" Laskos <tasos.laskos@gmail.com>
This is free software; you can copy and distribute and modify
this program under the term of the GPL v2.0 License
(See LICENSE file for details)
=end
require 'webrick'
require 'webrick/https'
require 'openssl'
require 'sinatra/base'
require "rack/csrf"
require 'rack-flash'
require 'erb'
require 'yaml'
require 'cgi'
require 'fileutils'
require 'ap'
module Arachni
module UI
require Arachni::Options.instance.dir['lib'] + 'ui/cli/output'
require Arachni::Options.instance.dir['lib'] + 'framework'
require Arachni::Options.instance.dir['lib'] + 'rpc/xml/client/dispatcher'
require Arachni::Options.instance.dir['lib'] + 'rpc/xml/client/instance'
require Arachni::Options.instance.dir['lib'] + 'ui/web/report_manager'
require Arachni::Options.instance.dir['lib'] + 'ui/web/log'
require Arachni::Options.instance.dir['lib'] + 'ui/web/output_stream'
#
#
# Provides a web user interface for the Arachni Framework using Sinatra.<br/>
#
# @author: Tasos "Zapotek" Laskos
# <tasos.laskos@gmail.com>
# <zapotek@segfault.gr>
# @version: 0.1-pre
#
# @see Arachni::RPC::XML::Client::Instance
# @see Arachni::RPC::XML::Client::Dispatcher
#
module Web
VERSION = '0.1-pre'
class Server < Sinatra::Base
configure do
use Rack::Flash
use Rack::Session::Cookie
use Rack::Csrf, :raise => true
@@conf = YAML::load_file( Arachni::Options.instance.dir['root'] + 'conf/webui.yaml' )
Arachni::Options.instance.ssl = @@conf['ssl']['client']['enable']
Arachni::Options.instance.ssl_pkey = @@conf['ssl']['client']['key']
Arachni::Options.instance.ssl_cert = @@conf['ssl']['client']['cert']
Arachni::Options.instance.ssl_ca = @@conf['ssl']['client']['ca']
end
helpers do
def report_count
settings.reports.all.size
end
def plugin_has_required_file_option?( options )
options.each {
|opt|
return true if opt['type'] == 'path' && opt['required']
}
return false
end
def format_redundants( rules )
return if !rules || !rules.is_a?( Array ) || rules.empty?
str = ''
rules.each {
|rule|
next if !rule['regexp'] || !rule['count']
str += rule['regexp'] + ':' + rule['count'] + "\r\n"
}
return str
end
def escape( str )
CGI.escapeHTML( str )
end
def selected_tab?( tab )
splits = env['PATH_INFO'].split( '/' )
( splits.empty? && tab == '/' ) || splits[1] == tab
end
def csrf_token
Rack::Csrf.csrf_token( env )
end
def csrf_tag
Rack::Csrf.csrf_tag( env )
end
def modules
@@modules
end
def plugins
@@plugins
end
def proc_mem( rss )
# we assume a page size of 4096
(rss.to_i * 4096 / 1024 / 1024).to_s + 'MB'
end
def secs_to_hms( secs )
secs = secs.to_i
return [secs/3600, secs/60 % 60, secs % 60].map {
|t|
t.to_s.rjust( 2, '0' )
}.join(':')
end
end
dir = File.dirname( File.expand_path( __FILE__ ) )
set :views, "#{dir}/server/views"
set :public, "#{dir}/server/public"
set :tmp, "#{dir}/server/tmp"
set :db, "#{dir}/server/db"
set :static, true
set :environment, :development
#
# This will be used for the "owner" field of the helper instance
#
HELPER_OWNER = "WebUI helper"
set :log, Log.new( Arachni::Options.instance, settings )
set :reports, ReportManager.new( Arachni::Options.instance, settings )
enable :sessions
configure do
# shit's on!
settings.log.webui_started
end
def exception_jail( &block )
# begin
block.call
# rescue Errno::ECONNREFUSED => e
# erb :error, { :layout => true }, :error => 'Remote server has been shut down.'
# end
end
def show( page, layout = true )
if page == :dispatcher
ensure_dispatcher
erb :dispatcher, { :layout => true }, :stats => dispatcher_stats
else
erb page.to_sym, { :layout => layout }
end
end
#
# Provides an easy way to connect to an instance by port
#
# @param [Integer] port
#
def connect_to_instance( port )
prep_session
begin
return Arachni::RPC::XML::Client::Instance.new( options, port_to_url( port ) )
rescue Exception
raise "Instance on port #{port} has shutdown."
end
end
#
# Converts a port to a URL instance.
#
# @param [Integer] port
#
def port_to_url( port )
uri = URI( session[:dispatcher_url] )
uri.port = port.to_i
uri.to_s
end
#
# Provides easy access to the dispatcher and handles failure
#
def dispatcher
begin
@dispatcher ||= Arachni::RPC::XML::Client::Dispatcher.new( options, session[:dispatcher_url] )
rescue Exception => e
redirect '/dispatcher_error'
end
end
#
# Provides statistics about running jobs etc using the dispatcher
#
def dispatcher_stats
stats = dispatcher.stats
stats['running_jobs'].each {
|job|
begin
job['paused'] = connect_to_instance( job['port'] ).framework.paused?
rescue
end
}
return stats
end
def options
Arachni::Options.instance
end
#
# Similar to String.to_i but it returns the original object if String is not a number
#
def to_i( str )
return str if !str.is_a?( String )
if str.match( /\d+/ ).to_s.size == str.size
return str.to_i
else
return str
end
end
#
# Prepares form params to be used as options for XMLRPC transmission
#
# @param [Hash] params
#
# @return [Hash] normalized hash
#
def prep_opts( params )
need_to_split = [
'exclude_cookies',
'exclude',
'include'
]
cparams = {}
params.each_pair {
|name, value|
next if [ '_csrf', 'modules', 'plugins' ].include?( name ) || ( value.is_a?( String ) && value.empty?)
value = true if value == 'on'
if name == 'cookiejar'
cparams['cookies'] = Arachni::HTTP.parse_cookiejar( value[:tempfile] )
elsif need_to_split.include?( name ) && value.is_a?( String )
cparams[name] = value.split( "\r\n" )
elsif name == 'redundant'
cparams[name] = []
value.split( "\r\n" ).each {
|rule|
regexp, counter = rule.split( ':', 2 )
cparams[name] << {
'regexp' => regexp,
'count' => counter
}
}
else
cparams[name] = to_i( value )
end
}
if !cparams['audit_links'] && !cparams['audit_forms'] &&
!cparams['audit_cookies'] && !cparams['audit_headers']
cparams['audit_links'] = true
cparams['audit_forms'] = true
cparams['audit_cookies'] = true
end
return cparams
end
def prep_modules( params )
return ['-'] if !params['modules']
mods = params['modules'].keys
return ['*'] if mods.empty?
return mods
end
def prep_plugins( params )
plugins = {}
return plugins if !params['plugins']
params['plugins'].keys.each {
|name|
plugins[name] = params['options'][name] || {}
}
return plugins
end
def helper_instance
begin
@@arachni ||= nil
if !@@arachni
instance = dispatcher.dispatch( HELPER_OWNER )
@@arachni = connect_to_instance( instance['port'] )
end
return @@arachni
rescue
redirect '/dispatcher/error'
end
end
def component_cache_filled?
begin
return @@modules.size + @@plugins.size
rescue
return false
end
end
def fill_component_cache
if !component_cache_filled?
do_shutdown = true
else
do_shutdown = false
end
@@modules ||= helper_instance.framework.lsmod.dup
@@plugins ||= helper_instance.framework.lsplug.dup
# shutdown the helper instance, we got what we wanted
helper_instance.service.shutdown! if do_shutdown
end
#
# Makes sure that all systems are go and populates the session with default values
#
def prep_session
session[:dispatcher_url] ||= 'http://localhost:7331'
ensure_dispatcher
session['opts'] ||= {}
session['opts']['settings'] ||= {
'audit_links' => true,
'audit_forms' => true,
'audit_cookies' => true,
'http_req_limit' => 20,
'user_agent' => 'Arachni/' + Arachni::VERSION
}
session['opts']['modules'] ||= [ '*' ]
session['opts']['plugins'] ||= YAML::dump( {
'content_types' => {},
'healthmap' => {},
'metamodules' => {}
} )
#
# Garbage collector, zombie killer. Reaps idle processes every 5 seconds.
#
@@reaper ||= Thread.new {
while( true )
shutdown_zombies
::IO::select( nil, nil, nil, 5 )
end
}
end
#
# Makes sure that we have a dispatcher, if not it redirects the user to
# an appropriate error page.
#
# @return [Bool] true if alive, redirect if not
#
def ensure_dispatcher
begin
dispatcher.alive?
rescue Exception => e
redirect '/dispatcher/error'
end
end
#
# Saves the report, shuts down the instance and returns the content as HTML
# to be sent back to the user's browser.
#
# @param [Arachni::RPC::XML::Client::Instance] arachni
#
def save_shutdown_and_show( arachni )
report = save_and_shutdown( arachni )
settings.reports.get( 'html', File.basename( report, '.afr' ) )
end
#
# Saves the report and shuts down the instance
#
# @param [Arachni::RPC::XML::Client::Instance] arachni
#
def save_and_shutdown( arachni )
report_path = settings.reports.save( arachni.framework.auditstore )
arachni.service.shutdown!
return report_path
end
#
# Kills all running instances
#
def shutdown_all
settings.log.dispatcher_global_shutdown( env )
dispatcher.stats['running_jobs'].each {
|job|
begin
save_and_shutdown( connect_to_instance( job['port'] ) )
rescue
begin
connect_to_instance( job['port'] ).service.shutdown!
rescue
settings.log.instance_fucker_wont_die( env, port_to_url( job['port'] ) )
next
end
end
settings.log.instance_shutdown( env, port_to_url( job['port'] ) )
}
end
#
# Kills all idle instances
#
# @return [Integer] the number of reaped instances
#
def shutdown_zombies
i = 0
dispatcher.stats['running_jobs'].each {
|job|
begin
arachni = connect_to_instance( job['port'] )
begin
if !arachni.framework.busy? && !job['owner'] != HELPER_OWNER
save_and_shutdown( arachni )
settings.log.webui_zombie_cleanup( env, port_to_url( job['port'] ) )
i+=1
end
rescue
end
rescue
end
}
return i
end
get "/" do
prep_session
show :home
end
get "/dispatcher" do
show :dispatcher
end
#
# sets the dispatcher URL
#
post "/dispatcher" do
if !params['url'] || params['url'].empty?
flash[:err] = "URL cannot be empty."
show :dispatcher_error
else
session[:dispatcher_url] = params['url']
settings.log.dispatcher_selected( env, params['url'] )
begin
dispatcher.jobs
settings.log.dispatcher_verified( env, params['url'] )
redirect '/'
rescue
settings.log.dispatcher_error( env, params['url'] )
flash[:err] = "Couldn't find a dispatcher at \"#{escape( params['url'] )}\"."
show :dispatcher_error
end
end
end
#
# shuts down all instances
#
post "/dispatcher/shutdown" do
shutdown_all
redirect '/dispatcher'
end
get '/dispatcher/error' do
show :dispatcher_error
end
#
# starts a scan
#
post "/scan" do
valid = true
begin
URI.parse( params['url'] )
rescue
valid = false
end
if !params['url'] || params['url'].empty?
flash[:err] = "URL cannot be empty."
show :home
elsif !valid
flash[:err] = "Invalid URL."
show :home
else
instance = dispatcher.dispatch( params['url'] )
settings.log.instance_dispatched( env, port_to_url( instance['port'] ) )
settings.log.instance_owner_assigned( env, params['url'] )
arachni = connect_to_instance( instance['port'] )
session['opts']['settings']['url'] = params['url']
session['opts']['settings']['audit_links'] = true if session['opts']['settings']['audit_links']
session['opts']['settings']['audit_forms'] = true if session['opts']['settings']['audit_forms']
session['opts']['settings']['audit_cookies'] = true if session['opts']['settings']['audit_cookies']
session['opts']['settings']['audit_headers'] = true if session['opts']['settings']['audit_headers']
opts = prep_opts( session['opts']['settings'] )
arachni.opts.set( opts )
arachni.modules.load( session['opts']['modules'] )
arachni.plugins.load( YAML::load( session['opts']['plugins'] ) )
arachni.framework.run
settings.log.scan_started( env, params['url'] )
redirect '/instance/' + instance['port'].to_s
end
end
get "/modules" do
fill_component_cache
prep_session
show :modules, true
end
#
# sets modules
#
post "/modules" do
session['opts']['modules'] = prep_modules( params )
flash.now[:notice] = "Modules updated."
show :modules, true
end
get "/plugins" do
fill_component_cache
prep_session
erb :plugins, { :layout => true }
end
#
# sets plugins
#
post "/plugins" do
session['opts']['plugins'] = YAML::dump( prep_plugins( params ) )
flash.now[:notice] = "Plugins updated."
show :plugins, true
end
get "/settings" do
prep_session
erb :settings, { :layout => true }
end
#
# sets general framework settings
#
post "/settings" do
if session['opts']['settings']['url']
url = session['opts']['settings']['url'].dup
session['opts']['settings'] = prep_opts( params )
session['opts']['settings']['url'] = url
end
flash.now[:notice] = "Settings updated."
show :settings, true
end
get "/instance/:port" do
begin
arachni = connect_to_instance( params[:port] )
erb :instance, { :layout => true }, :paused => arachni.framework.paused?, :shutdown => false
rescue
flash.now[:notice] = "Instance on port #{params[:port]} has been shutdown."
erb :instance, { :layout => true }, :shutdown => true, :stats => dispatcher_stats
end
end
get "/instance/:port/output" do
begin
arachni = connect_to_instance( params[:port] )
if arachni.framework.busy?
OutputStream.new( arachni, 38 )
else
settings.log.instance_shutdown( env, port_to_url( params[:port] ) )
save_shutdown_and_show( arachni )
end
rescue Errno::ECONNREFUSED
"The server has been shut down."
end
end
post "/*/:port/pause" do
arachni = connect_to_instance( params[:port] )
begin
arachni.framework.pause!
settings.log.instance_paused( env, port_to_url( params[:port] ) )
flash.now[:notice] = "Instance on port #{params[:port]} will pause as soon as the current page is audited."
erb params[:splat][0].to_sym, { :layout => true }, :paused => arachni.framework.paused?, :shutdown => false, :stats => dispatcher_stats
rescue
flash.now[:notice] = "Instance on port #{params[:port]} has been shutdown."
erb params[:splat][0].to_sym, { :layout => true }, :shutdown => true, :stats => dispatcher_stats
end
end
post "/*/:port/resume" do
arachni = connect_to_instance( params[:port] )
begin
arachni.framework.resume!
settings.log.instance_resumed( env, port_to_url( params[:port] ) )
flash.now[:ok] = "Instance on port #{params[:port]} resumes."
erb params[:splat][0].to_sym, { :layout => true }, :paused => arachni.framework.paused?, :shutdown => false, :stats => dispatcher_stats
rescue
flash.now[:notice] = "Instance on port #{params[:port]} has been shutdown."
erb params[:splat][0].to_sym, { :layout => true }, :shutdown => true, :stats => dispatcher_stats
end
end
post "/*/:port/shutdown" do
arachni = connect_to_instance( params[:port] )
begin
arachni.framework.busy?
settings.log.instance_shutdown( env, port_to_url( params[:port] ) )
begin
save_shutdown_and_show( arachni )
rescue
flash.now[:ok] = "Instance on port #{params[:port]} has been shutdown."
show params[:splat][0].to_sym
ensure
arachni.service.shutdown!
end
rescue
flash.now[:notice] = "Instance on port #{params[:port]} has already been shutdown."
erb params[:splat][0].to_sym, { :layout => true }, :shutdown => true, :stats => dispatcher_stats
end
end
get "/reports" do
reports = []
settings.reports.all.each {
|report|
name = File.basename( report, '.afr' )
host, date = name.split( ':', 2 )
reports << {
'host' => host,
'date' => date,
'name' => name
}
}
erb :reports, { :layout => true }, :reports => reports,
:available => settings.reports.available
end
get '/reports/formats' do
erb :report_formats, { :layout => true }, :reports => settings.reports.available
end
post '/reports/delete' do
settings.reports.delete_all
settings.log.reports_deleted( env )
redirect '/reports'
end
post '/report/:name/delete' do
settings.reports.delete( params[:name] )
settings.log.report_deleted( env, params[:name] )
redirect '/reports'
end
get '/report/:name.:type' do
settings.log.report_converted( env, params[:name] + '.' + params[:type] )
content_type( params[:type], :default => 'application/octet-stream' )
settings.reports.get( params[:type], params[:name] )
end
get '/log' do
erb :log, { :layout => true }, :entries => settings.log.entry.all.reverse
end
# override run! using this patch: https://github.com/sinatra/sinatra/pull/132
def self.run!( options = {} )
set options
handler = detect_rack_handler
handler_name = handler.name.gsub( /.*::/, '' )
# handler specific options use the lower case handler name as hash key, if present
handler_opts = options[handler_name.downcase.to_sym] || {}
puts "== Sinatra/#{Sinatra::VERSION} has taken the stage " +
"on #{port} for #{environment} with backup from #{handler_name}" unless handler_name =~/cgi/i
handler.run self, handler_opts.merge( :Host => bind, :Port => port ) do |server|
[ :INT, :TERM ].each { |sig| trap( sig ) { quit!( server, handler_name ) } }
set :running, true
end
rescue Errno::EADDRINUSE => e
puts "== Someone is already performing on port #{port}!"
end
def self.prep_webrick
if @@conf['ssl']['server']['key']
pkey = ::OpenSSL::PKey::RSA.new( File.read( @@conf['ssl']['server']['key'] ) )
end
if @@conf['ssl']['server']['cert']
cert = ::OpenSSL::X509::Certificate.new( File.read( @@conf['ssl']['server']['cert'] ) )
end
if @@conf['ssl']['key'] || @@conf['ssl']['cert'] || @@conf['ssl']['ca']
verification = OpenSSL::SSL::VERIFY_PEER | OpenSSL::SSL::VERIFY_FAIL_IF_NO_PEER_CERT
else
verification = ::OpenSSL::SSL::VERIFY_NONE
end
return {
:SSLEnable => @@conf['ssl']['server']['enable'] || false,
:SSLVerifyClient => verification,
:SSLCertName => [ [ "CN", Arachni::Options.instance.server || ::WEBrick::Utils::getservername ] ],
:SSLCertificate => cert,
:SSLPrivateKey => pkey,
:SSLCACertificateFile => @@conf['ssl']['server']['ca']
}
end
run! :host => Arachni::Options.instance.server || ::WEBrick::Utils::getservername,
:port => Arachni::Options.instance.rpc_port || 4567,
:server => %w[ webrick ],
:webrick => prep_webrick
at_exit do
settings.log.webui_shutdown
begin
# shutdown our helper instance
@@arachni ||= nil
@@arachni.service.shutdown! if @@arachni
rescue
end
end
end
end
end
end
|
module Uspec
VERSION = '0.2.2'
end
Bumping version to 0.2.3.
module Uspec
VERSION = '0.2.3'
end
|
require 'nokogiri'
require 'pry'
module Vapor
class Crawler
def games_for(user)
doc = fetch("#{user.profile_url}games")
doc.gamesList.games.elements
end
private
def fetch(url)
doc = client.get("#{url}?xml=1").body
Nokogiri::Slop(doc)
end
def client
@client ||= HTTPClient.new
end
end
end
removing pry call
require 'nokogiri'
module Vapor
class Crawler
def games_for(user)
doc = fetch("#{user.profile_url}games")
doc.gamesList.games.elements
end
private
def fetch(url)
doc = client.get("#{url}?xml=1").body
Nokogiri::Slop(doc)
end
def client
@client ||= HTTPClient.new
end
end
end
|
Loomio::Version::PATCH = 64
bump version
Loomio::Version::PATCH = 65 |
Loomio::Version::PATCH = 17
bump version
Loomio::Version::PATCH = 18 |
Loomio::Version::PATCH = 10
bump version
Loomio::Version::PATCH = 11 |
module Vimdb
VERSION = '0.3.0'
end
Bumped to version 0.4.0
module Vimdb
VERSION = '0.4.0'
end
|
#--
# This file generated automatically. Do not edit!
#++
module Vips
class Image
##
# :singleton-method: system
# :call-seq:
# system(cmd_format) =>
#
# Run an external command.
#
# Input:
# [cmd_format] Command to run, input gchararray
#
# Options:
# [in] Array of input images, input VipsArrayImage
# [in_format] Format for input filename, input gchararray
# [out_format] Format for output filename, input gchararray
#
# Output options:
# [out] Output image, output VipsImage
# [log] Command log, output gchararray
##
# :method: add
# :call-seq:
# add(right) => out
#
# Add two images.
#
# Input:
# [right] Right-hand image argument, input VipsImage
#
# Output:
# [out] Output image, output VipsImage
##
# :method: subtract
# :call-seq:
# subtract(right) => out
#
# Subtract two images.
#
# Input:
# [right] Right-hand image argument, input VipsImage
#
# Output:
# [out] Output image, output VipsImage
##
# :method: multiply
# :call-seq:
# multiply(right) => out
#
# Multiply two images.
#
# Input:
# [right] Right-hand image argument, input VipsImage
#
# Output:
# [out] Output image, output VipsImage
##
# :method: divide
# :call-seq:
# divide(right) => out
#
# Divide two images.
#
# Input:
# [right] Right-hand image argument, input VipsImage
#
# Output:
# [out] Output image, output VipsImage
##
# :method: relational
# :call-seq:
# relational(right, relational) => out
#
# Relational operation on two images.
#
# Input:
# [right] Right-hand image argument, input VipsImage
# [relational] relational to perform, input VipsOperationRelational
#
# Output:
# [out] Output image, output VipsImage
##
# :method: remainder
# :call-seq:
# remainder(right) => out
#
# Remainder after integer division of two images.
#
# Input:
# [right] Right-hand image argument, input VipsImage
#
# Output:
# [out] Output image, output VipsImage
##
# :method: boolean
# :call-seq:
# boolean(right, boolean) => out
#
# Boolean operation on two images.
#
# Input:
# [right] Right-hand image argument, input VipsImage
# [boolean] boolean to perform, input VipsOperationBoolean
#
# Output:
# [out] Output image, output VipsImage
##
# :method: math2
# :call-seq:
# math2(right, math2) => out
#
# Binary math operations.
#
# Input:
# [right] Right-hand image argument, input VipsImage
# [math2] math to perform, input VipsOperationMath2
#
# Output:
# [out] Output image, output VipsImage
##
# :method: complex2
# :call-seq:
# complex2(right, cmplx) => out
#
# Complex binary operations on two images.
#
# Input:
# [right] Right-hand image argument, input VipsImage
# [cmplx] binary complex operation to perform, input VipsOperationComplex2
#
# Output:
# [out] Output image, output VipsImage
##
# :method: complexform
# :call-seq:
# complexform(right) => out
#
# Form a complex image from two real images.
#
# Input:
# [right] Right-hand image argument, input VipsImage
#
# Output:
# [out] Output image, output VipsImage
##
# :singleton-method: sum
# :call-seq:
# sum(in) => out
#
# Sum an array of images.
#
# Input:
# [in] Array of input images, input VipsArrayImage
#
# Output:
# [out] Output image, output VipsImage
##
# :method: invert
# :call-seq:
# invert() => out
#
# Invert an image.
#
# Output:
# [out] Output image, output VipsImage
##
# :method: linear
# :call-seq:
# linear(a, b) => out
#
# Calculate (a * in + b).
#
# Input:
# [a] Multiply by this, input VipsArrayDouble
# [b] Add this, input VipsArrayDouble
#
# Output:
# [out] Output image, output VipsImage
#
# Options:
# [uchar] Output should be uchar, input gboolean
##
# :method: math
# :call-seq:
# math(math) => out
#
# Apply a math operation to an image.
#
# Input:
# [math] math to perform, input VipsOperationMath
#
# Output:
# [out] Output image, output VipsImage
##
# :method: abs
# :call-seq:
# abs() => out
#
# Absolute value of an image.
#
# Output:
# [out] Output image, output VipsImage
##
# :method: sign
# :call-seq:
# sign() => out
#
# Unit vector of pixel.
#
# Output:
# [out] Output image, output VipsImage
##
# :method: round
# :call-seq:
# round(round) => out
#
# Perform a round function on an image.
#
# Input:
# [round] rounding operation to perform, input VipsOperationRound
#
# Output:
# [out] Output image, output VipsImage
##
# :method: relational_const
# :call-seq:
# relational_const(c, relational) => out
#
# Relational operations against a constant.
#
# Input:
# [c] Array of constants, input VipsArrayDouble
# [relational] relational to perform, input VipsOperationRelational
#
# Output:
# [out] Output image, output VipsImage
##
# :method: remainder_const
# :call-seq:
# remainder_const(c) => out
#
# Remainder after integer division of an image and a constant.
#
# Input:
# [c] Array of constants, input VipsArrayDouble
#
# Output:
# [out] Output image, output VipsImage
##
# :method: boolean_const
# :call-seq:
# boolean_const(c, boolean) => out
#
# Boolean operations against a constant.
#
# Input:
# [c] Array of constants, input VipsArrayDouble
# [boolean] boolean to perform, input VipsOperationBoolean
#
# Output:
# [out] Output image, output VipsImage
##
# :method: math2_const
# :call-seq:
# math2_const(c, math2) => out
#
# Pow( @in, @c ).
#
# Input:
# [c] Array of constants, input VipsArrayDouble
# [math2] math to perform, input VipsOperationMath2
#
# Output:
# [out] Output image, output VipsImage
##
# :method: complex
# :call-seq:
# complex(cmplx) => out
#
# Perform a complex operation on an image.
#
# Input:
# [cmplx] complex to perform, input VipsOperationComplex
#
# Output:
# [out] Output image, output VipsImage
##
# :method: complexget
# :call-seq:
# complexget(get) => out
#
# Get a component from a complex image.
#
# Input:
# [get] complex to perform, input VipsOperationComplexget
#
# Output:
# [out] Output image, output VipsImage
##
# :method: avg
# :call-seq:
# avg() => out
#
# Find image average.
#
# Output:
# [out] Output value, output gdouble
##
# :method: min
# :call-seq:
# min() => out
#
# Find image minimum.
#
# Output:
# [out] Output value, output gdouble
#
# Options:
# [size] Number of minimum values to find, input gint
#
# Output options:
# [x] Horizontal position of minimum, output gint
# [y] Vertical position of minimum, output gint
# [out_array] Array of output values, output VipsArrayDouble
# [x_array] Array of horizontal positions, output VipsArrayInt
# [y_array] Array of vertical positions, output VipsArrayInt
##
# :method: max
# :call-seq:
# max() => out
#
# Find image maximum.
#
# Output:
# [out] Output value, output gdouble
#
# Options:
# [size] Number of maximum values to find, input gint
#
# Output options:
# [x] Horizontal position of maximum, output gint
# [y] Vertical position of maximum, output gint
# [out_array] Array of output values, output VipsArrayDouble
# [x_array] Array of horizontal positions, output VipsArrayInt
# [y_array] Array of vertical positions, output VipsArrayInt
##
# :method: deviate
# :call-seq:
# deviate() => out
#
# Find image standard deviation.
#
# Output:
# [out] Output value, output gdouble
##
# :method: stats
# :call-seq:
# stats() => out
#
# Find image average.
#
# Output:
# [out] Output array of statistics, output VipsImage
##
# :method: hist_find
# :call-seq:
# hist_find() => out
#
# Find image histogram.
#
# Output:
# [out] Output histogram, output VipsImage
#
# Options:
# [band] Find histogram of band, input gint
##
# :method: hist_find_ndim
# :call-seq:
# hist_find_ndim() => out
#
# Find n-dimensional image histogram.
#
# Output:
# [out] Output histogram, output VipsImage
#
# Options:
# [bins] Number of bins in each dimension, input gint
##
# :method: hist_find_indexed
# :call-seq:
# hist_find_indexed(index) => out
#
# Find indexed image histogram.
#
# Input:
# [index] Index image, input VipsImage
#
# Output:
# [out] Output histogram, output VipsImage
##
# :method: hough_line
# :call-seq:
# hough_line() => out
#
# Find hough line transform.
#
# Output:
# [out] Output image, output VipsImage
#
# Options:
# [width] horizontal size of parameter space, input gint
# [height] Vertical size of parameter space, input gint
##
# :method: hough_circle
# :call-seq:
# hough_circle() => out
#
# Find hough circle transform.
#
# Output:
# [out] Output image, output VipsImage
#
# Options:
# [scale] Scale down dimensions by this factor, input gint
# [min_radius] Smallest radius to search for, input gint
# [max_radius] Largest radius to search for, input gint
##
# :method: project
# :call-seq:
# project() => columns, rows
#
# Find image projections.
#
# Output:
# [columns] Sums of columns, output VipsImage
# [rows] Sums of rows, output VipsImage
##
# :method: profile
# :call-seq:
# profile() => columns, rows
#
# Find image profiles.
#
# Output:
# [columns] First non-zero pixel in column, output VipsImage
# [rows] First non-zero pixel in row, output VipsImage
##
# :method: measure
# :call-seq:
# measure(h, v) => out
#
# Measure a set of patches on a colour chart.
#
# Input:
# [h] Number of patches across chart, input gint
# [v] Number of patches down chart, input gint
#
# Output:
# [out] Output array of statistics, output VipsImage
#
# Options:
# [left] Left edge of extract area, input gint
# [top] Top edge of extract area, input gint
# [width] Width of extract area, input gint
# [height] Height of extract area, input gint
##
# :method: getpoint
# :call-seq:
# getpoint(x, y) => out_array
#
# Read a point from an image.
#
# Input:
# [x] Point to read, input gint
# [y] Point to read, input gint
#
# Output:
# [out_array] Array of output values, output VipsArrayDouble
##
# :method: copy
# :call-seq:
# copy() => out
#
# Copy an image.
#
# Output:
# [out] Output image, output VipsImage
#
# Options:
# [width] Image width in pixels, input gint
# [height] Image height in pixels, input gint
# [bands] Number of bands in image, input gint
# [format] Pixel format in image, input VipsBandFormat
# [coding] Pixel coding, input VipsCoding
# [interpretation] Pixel interpretation, input VipsInterpretation
# [xres] Horizontal resolution in pixels/mm, input gdouble
# [yres] Vertical resolution in pixels/mm, input gdouble
# [xoffset] Horizontal offset of origin, input gint
# [yoffset] Vertical offset of origin, input gint
##
# :method: tilecache
# :call-seq:
# tilecache() => out
#
# Cache an image as a set of tiles.
#
# Output:
# [out] Output image, output VipsImage
#
# Options:
# [tile_width] Tile width in pixels, input gint
# [tile_height] Tile height in pixels, input gint
# [max_tiles] Maximum number of tiles to cache, input gint
# [access] Expected access pattern, input VipsAccess
# [threaded] Allow threaded access, input gboolean
# [persistent] Keep cache between evaluations, input gboolean
##
# :method: linecache
# :call-seq:
# linecache() => out
#
# Cache an image as a set of lines.
#
# Output:
# [out] Output image, output VipsImage
#
# Options:
# [tile_height] Tile height in pixels, input gint
# [access] Expected access pattern, input VipsAccess
# [threaded] Allow threaded access, input gboolean
# [persistent] Keep cache between evaluations, input gboolean
##
# :method: sequential
# :call-seq:
# sequential() => out
#
# Check sequential access.
#
# Output:
# [out] Output image, output VipsImage
#
# Options:
# [trace] trace pixel requests, input gboolean
# [tile_height] Tile height in pixels, input gint
# [access] Expected access pattern, input VipsAccess
##
# :method: cache
# :call-seq:
# cache() => out
#
# Cache an image.
#
# Output:
# [out] Output image, output VipsImage
#
# Options:
# [tile_width] Tile width in pixels, input gint
# [tile_height] Tile height in pixels, input gint
# [max_tiles] Maximum number of tiles to cache, input gint
##
# :method: embed
# :call-seq:
# embed(x, y, width, height) => out
#
# Embed an image in a larger image.
#
# Input:
# [x] Left edge of input in output, input gint
# [y] Top edge of input in output, input gint
# [width] Image width in pixels, input gint
# [height] Image height in pixels, input gint
#
# Output:
# [out] Output image, output VipsImage
#
# Options:
# [extend] How to generate the extra pixels, input VipsExtend
# [background] Colour for background pixels, input VipsArrayDouble
##
# :method: flip
# :call-seq:
# flip(direction) => out
#
# Flip an image.
#
# Input:
# [direction] Direction to flip image, input VipsDirection
#
# Output:
# [out] Output image, output VipsImage
##
# :method: insert
# :call-seq:
# insert(sub, x, y) => out
#
# Insert image @sub into @main at @x, @y.
#
# Input:
# [sub] Sub-image to insert into main image, input VipsImage
# [x] Left edge of sub in main, input gint
# [y] Top edge of sub in main, input gint
#
# Output:
# [out] Output image, output VipsImage
#
# Options:
# [expand] Expand output to hold all of both inputs, input gboolean
# [background] Colour for new pixels, input VipsArrayDouble
##
# :method: join
# :call-seq:
# join(in2, direction) => out
#
# Join a pair of images.
#
# Input:
# [in2] Second input image, input VipsImage
# [direction] Join left-right or up-down, input VipsDirection
#
# Output:
# [out] Output image, output VipsImage
#
# Options:
# [align] Align on the low, centre or high coordinate edge, input VipsAlign
# [expand] Expand output to hold all of both inputs, input gboolean
# [shim] Pixels between images, input gint
# [background] Colour for new pixels, input VipsArrayDouble
##
# :method: extract_area
# :call-seq:
# extract_area(left, top, width, height) => out
#
# Extract an area from an image.
#
# Input:
# [left] Left edge of extract area, input gint
# [top] Top edge of extract area, input gint
# [width] Width of extract area, input gint
# [height] Height of extract area, input gint
#
# Output:
# [out] Output image, output VipsImage
##
# :method: extract_area
# :call-seq:
# extract_area(left, top, width, height) => out
#
# Extract an area from an image.
#
# Input:
# [left] Left edge of extract area, input gint
# [top] Top edge of extract area, input gint
# [width] Width of extract area, input gint
# [height] Height of extract area, input gint
#
# Output:
# [out] Output image, output VipsImage
##
# :method: extract_band
# :call-seq:
# extract_band(band) => out
#
# Extract band from an image.
#
# Input:
# [band] Band to extract, input gint
#
# Output:
# [out] Output image, output VipsImage
#
# Options:
# [n] Number of bands to extract, input gint
##
# :singleton-method: bandrank
# :call-seq:
# bandrank(in) => out
#
# Band-wise rank of a set of images.
#
# Input:
# [in] Array of input images, input VipsArrayImage
#
# Output:
# [out] Output image, output VipsImage
#
# Options:
# [index] Select this band element from sorted list, input gint
##
# :method: bandmean
# :call-seq:
# bandmean() => out
#
# Band-wise average.
#
# Output:
# [out] Output image, output VipsImage
##
# :method: bandbool
# :call-seq:
# bandbool(boolean) => out
#
# Boolean operation across image bands.
#
# Input:
# [boolean] boolean to perform, input VipsOperationBoolean
#
# Output:
# [out] Output image, output VipsImage
##
# :method: replicate
# :call-seq:
# replicate(across, down) => out
#
# Replicate an image.
#
# Input:
# [across] Repeat this many times horizontally, input gint
# [down] Repeat this many times vertically, input gint
#
# Output:
# [out] Output image, output VipsImage
##
# :method: cast
# :call-seq:
# cast(format) => out
#
# Cast an image.
#
# Input:
# [format] Format to cast to, input VipsBandFormat
#
# Output:
# [out] Output image, output VipsImage
#
# Options:
# [shift] Shift integer values up and down, input gboolean
##
# :method: rot
# :call-seq:
# rot(angle) => out
#
# Rotate an image.
#
# Input:
# [angle] Angle to rotate image, input VipsAngle
#
# Output:
# [out] Output image, output VipsImage
##
# :method: rot45
# :call-seq:
# rot45() => out
#
# Rotate an image.
#
# Output:
# [out] Output image, output VipsImage
#
# Options:
# [angle] Angle to rotate image, input VipsAngle45
##
# :method: autorot
# :call-seq:
# autorot() => out
#
# Autorotate image by exif tag.
#
# Output:
# [out] Output image, output VipsImage
#
# Output options:
# [angle] Angle image was rotated by, output VipsAngle
##
# :method: recomb
# :call-seq:
# recomb(m) => out
#
# Linear recombination with matrix.
#
# Input:
# [m] matrix of coefficients, input VipsImage
#
# Output:
# [out] Output image, output VipsImage
##
# :method: bandfold
# :call-seq:
# bandfold() => out
#
# Fold up x axis into bands.
#
# Output:
# [out] Output image, output VipsImage
#
# Options:
# [factor] Fold by this factor, input gint
##
# :method: bandunfold
# :call-seq:
# bandunfold() => out
#
# Unfold image bands into x axis.
#
# Output:
# [out] Output image, output VipsImage
#
# Options:
# [factor] Unfold by this factor, input gint
##
# :method: flatten
# :call-seq:
# flatten() => out
#
# Flatten alpha out of an image.
#
# Output:
# [out] Output image, output VipsImage
#
# Options:
# [background] Background value, input VipsArrayDouble
# [max_alpha] Maximum value of alpha channel, input gdouble
##
# :method: premultiply
# :call-seq:
# premultiply() => out
#
# Premultiply image alpha.
#
# Output:
# [out] Output image, output VipsImage
#
# Options:
# [max_alpha] Maximum value of alpha channel, input gdouble
##
# :method: unpremultiply
# :call-seq:
# unpremultiply() => out
#
# Unpremultiply image alpha.
#
# Output:
# [out] Output image, output VipsImage
#
# Options:
# [max_alpha] Maximum value of alpha channel, input gdouble
##
# :method: grid
# :call-seq:
# grid(tile_height, across, down) => out
#
# Grid an image.
#
# Input:
# [tile_height] chop into tiles this high, input gint
# [across] number of tiles across, input gint
# [down] number of tiles down, input gint
#
# Output:
# [out] Output image, output VipsImage
##
# :method: scale
# :call-seq:
# scale() => out
#
# Scale an image to uchar.
#
# Output:
# [out] Output image, output VipsImage
#
# Options:
# [log] Log scale, input gboolean
# [exp] Exponent for log scale, input gdouble
##
# :method: wrap
# :call-seq:
# wrap() => out
#
# Wrap image origin.
#
# Output:
# [out] Output image, output VipsImage
#
# Options:
# [x] Left edge of input in output, input gint
# [y] Top edge of input in output, input gint
##
# :method: zoom
# :call-seq:
# zoom(xfac, yfac) => out
#
# Zoom an image.
#
# Input:
# [xfac] Horizontal zoom factor, input gint
# [yfac] Vertical zoom factor, input gint
#
# Output:
# [out] Output image, output VipsImage
##
# :method: subsample
# :call-seq:
# subsample(xfac, yfac) => out
#
# Subsample an image.
#
# Input:
# [xfac] Horizontal subsample factor, input gint
# [yfac] Vertical subsample factor, input gint
#
# Output:
# [out] Output image, output VipsImage
#
# Options:
# [point] Point sample, input gboolean
##
# :method: msb
# :call-seq:
# msb() => out
#
# Pick most-significant byte from an image.
#
# Output:
# [out] Output image, output VipsImage
#
# Options:
# [band] Band to msb, input gint
##
# :method: byteswap
# :call-seq:
# byteswap() => out
#
# Byteswap an image.
#
# Output:
# [out] Output image, output VipsImage
##
# :method: falsecolour
# :call-seq:
# falsecolour() => out
#
# False-colour an image.
#
# Output:
# [out] Output image, output VipsImage
##
# :method: gamma
# :call-seq:
# gamma() => out
#
# Gamma an image.
#
# Output:
# [out] Output image, output VipsImage
#
# Options:
# [exponent] Gamma factor, input gdouble
##
# :singleton-method: black
# :call-seq:
# black(width, height) => out
#
# Make a black image.
#
# Input:
# [width] Image width in pixels, input gint
# [height] Image height in pixels, input gint
#
# Output:
# [out] Output image, output VipsImage
#
# Options:
# [bands] Number of bands in image, input gint
##
# :singleton-method: gaussnoise
# :call-seq:
# gaussnoise(width, height) => out
#
# Make a gaussnoise image.
#
# Input:
# [width] Image width in pixels, input gint
# [height] Image height in pixels, input gint
#
# Output:
# [out] Output image, output VipsImage
#
# Options:
# [mean] Mean of pixels in generated image, input gdouble
# [sigma] Standard deviation of pixels in generated image, input gdouble
##
# :singleton-method: text
# :call-seq:
# text(text) => out
#
# Make a text image.
#
# Input:
# [text] Text to render, input gchararray
#
# Output:
# [out] Output image, output VipsImage
#
# Options:
# [font] Font to render width, input gchararray
# [width] Maximum image width in pixels, input gint
# [align] Align on the low, centre or high edge, input VipsAlign
# [dpi] DPI to render at, input gint
# [spacing] Line spacing, input gint
##
# :singleton-method: xyz
# :call-seq:
# xyz(width, height) => out
#
# Make an image where pixel values are coordinates.
#
# Input:
# [width] Image width in pixels, input gint
# [height] Image height in pixels, input gint
#
# Output:
# [out] Output image, output VipsImage
#
# Options:
# [csize] Size of third dimension, input gint
# [dsize] Size of fourth dimension, input gint
# [esize] Size of fifth dimension, input gint
##
# :singleton-method: gaussmat
# :call-seq:
# gaussmat(sigma, min_ampl) => out
#
# Make a gaussian image.
#
# Input:
# [sigma] Sigma of Gaussian, input gdouble
# [min_ampl] Minimum amplitude of Gaussian, input gdouble
#
# Output:
# [out] Output image, output VipsImage
#
# Options:
# [separable] Generate separable Gaussian, input gboolean
# [precision] Generate with this precision, input VipsPrecision
##
# :singleton-method: logmat
# :call-seq:
# logmat(sigma, min_ampl) => out
#
# Make a laplacian of gaussian image.
#
# Input:
# [sigma] Radius of Logmatian, input gdouble
# [min_ampl] Minimum amplitude of Logmatian, input gdouble
#
# Output:
# [out] Output image, output VipsImage
#
# Options:
# [separable] Generate separable Logmatian, input gboolean
# [precision] Generate with this precision, input VipsPrecision
##
# :singleton-method: eye
# :call-seq:
# eye(width, height) => out
#
# Make an image showing the eye's spatial response.
#
# Input:
# [width] Image width in pixels, input gint
# [height] Image height in pixels, input gint
#
# Output:
# [out] Output image, output VipsImage
#
# Options:
# [uchar] Output an unsigned char image, input gboolean
# [factor] Maximum spatial frequency, input gdouble
##
# :singleton-method: grey
# :call-seq:
# grey(width, height) => out
#
# Make a grey ramp image.
#
# Input:
# [width] Image width in pixels, input gint
# [height] Image height in pixels, input gint
#
# Output:
# [out] Output image, output VipsImage
#
# Options:
# [uchar] Output an unsigned char image, input gboolean
##
# :singleton-method: zone
# :call-seq:
# zone(width, height) => out
#
# Make a zone plate.
#
# Input:
# [width] Image width in pixels, input gint
# [height] Image height in pixels, input gint
#
# Output:
# [out] Output image, output VipsImage
#
# Options:
# [uchar] Output an unsigned char image, input gboolean
##
# :singleton-method: sines
# :call-seq:
# sines(width, height) => out
#
# Make a 2d sine wave.
#
# Input:
# [width] Image width in pixels, input gint
# [height] Image height in pixels, input gint
#
# Output:
# [out] Output image, output VipsImage
#
# Options:
# [uchar] Output an unsigned char image, input gboolean
# [hfreq] Horizontal spatial frequency, input gdouble
# [vfreq] Vertical spatial frequency, input gdouble
##
# :singleton-method: mask_ideal
# :call-seq:
# mask_ideal(width, height, frequency_cutoff) => out
#
# Make an ideal filter.
#
# Input:
# [width] Image width in pixels, input gint
# [height] Image height in pixels, input gint
# [frequency_cutoff] Frequency cutoff, input gdouble
#
# Output:
# [out] Output image, output VipsImage
#
# Options:
# [uchar] Output an unsigned char image, input gboolean
# [nodc] Remove DC component, input gboolean
# [reject] Invert the sense of the filter, input gboolean
# [optical] Rotate quadrants to optical space, input gboolean
##
# :singleton-method: mask_ideal_ring
# :call-seq:
# mask_ideal_ring(width, height, frequency_cutoff, ringwidth) => out
#
# Make an ideal ring filter.
#
# Input:
# [width] Image width in pixels, input gint
# [height] Image height in pixels, input gint
# [frequency_cutoff] Frequency cutoff, input gdouble
# [ringwidth] Ringwidth, input gdouble
#
# Output:
# [out] Output image, output VipsImage
#
# Options:
# [uchar] Output an unsigned char image, input gboolean
# [nodc] Remove DC component, input gboolean
# [reject] Invert the sense of the filter, input gboolean
# [optical] Rotate quadrants to optical space, input gboolean
##
# :singleton-method: mask_ideal_band
# :call-seq:
# mask_ideal_band(width, height, frequency_cutoff_x, frequency_cutoff_y, radius) => out
#
# Make an ideal band filter.
#
# Input:
# [width] Image width in pixels, input gint
# [height] Image height in pixels, input gint
# [frequency_cutoff_x] Frequency cutoff x, input gdouble
# [frequency_cutoff_y] Frequency cutoff y, input gdouble
# [radius] radius of circle, input gdouble
#
# Output:
# [out] Output image, output VipsImage
#
# Options:
# [uchar] Output an unsigned char image, input gboolean
# [optical] Rotate quadrants to optical space, input gboolean
# [nodc] Remove DC component, input gboolean
# [reject] Invert the sense of the filter, input gboolean
##
# :singleton-method: mask_butterworth
# :call-seq:
# mask_butterworth(width, height, order, frequency_cutoff, amplitude_cutoff) => out
#
# Make a butterworth filter.
#
# Input:
# [width] Image width in pixels, input gint
# [height] Image height in pixels, input gint
# [order] Filter order, input gdouble
# [frequency_cutoff] Frequency cutoff, input gdouble
# [amplitude_cutoff] Amplitude cutoff, input gdouble
#
# Output:
# [out] Output image, output VipsImage
#
# Options:
# [uchar] Output an unsigned char image, input gboolean
# [optical] Rotate quadrants to optical space, input gboolean
# [nodc] Remove DC component, input gboolean
# [reject] Invert the sense of the filter, input gboolean
##
# :singleton-method: mask_butterworth_ring
# :call-seq:
# mask_butterworth_ring(width, height, order, frequency_cutoff, amplitude_cutoff, ringwidth) => out
#
# Make a butterworth ring filter.
#
# Input:
# [width] Image width in pixels, input gint
# [height] Image height in pixels, input gint
# [order] Filter order, input gdouble
# [frequency_cutoff] Frequency cutoff, input gdouble
# [amplitude_cutoff] Amplitude cutoff, input gdouble
# [ringwidth] Ringwidth, input gdouble
#
# Output:
# [out] Output image, output VipsImage
#
# Options:
# [uchar] Output an unsigned char image, input gboolean
# [optical] Rotate quadrants to optical space, input gboolean
# [nodc] Remove DC component, input gboolean
# [reject] Invert the sense of the filter, input gboolean
##
# :singleton-method: mask_butterworth_band
# :call-seq:
# mask_butterworth_band(width, height, order, frequency_cutoff_x, frequency_cutoff_y, radius, amplitude_cutoff) => out
#
# Make a butterworth_band filter.
#
# Input:
# [width] Image width in pixels, input gint
# [height] Image height in pixels, input gint
# [order] Filter order, input gdouble
# [frequency_cutoff_x] Frequency cutoff x, input gdouble
# [frequency_cutoff_y] Frequency cutoff y, input gdouble
# [radius] radius of circle, input gdouble
# [amplitude_cutoff] Amplitude cutoff, input gdouble
#
# Output:
# [out] Output image, output VipsImage
#
# Options:
# [uchar] Output an unsigned char image, input gboolean
# [optical] Rotate quadrants to optical space, input gboolean
# [reject] Invert the sense of the filter, input gboolean
# [nodc] Remove DC component, input gboolean
##
# :singleton-method: mask_gaussian
# :call-seq:
# mask_gaussian(width, height, frequency_cutoff, amplitude_cutoff) => out
#
# Make a gaussian filter.
#
# Input:
# [width] Image width in pixels, input gint
# [height] Image height in pixels, input gint
# [frequency_cutoff] Frequency cutoff, input gdouble
# [amplitude_cutoff] Amplitude cutoff, input gdouble
#
# Output:
# [out] Output image, output VipsImage
#
# Options:
# [uchar] Output an unsigned char image, input gboolean
# [nodc] Remove DC component, input gboolean
# [reject] Invert the sense of the filter, input gboolean
# [optical] Rotate quadrants to optical space, input gboolean
##
# :singleton-method: mask_gaussian_ring
# :call-seq:
# mask_gaussian_ring(width, height, frequency_cutoff, amplitude_cutoff, ringwidth) => out
#
# Make a gaussian ring filter.
#
# Input:
# [width] Image width in pixels, input gint
# [height] Image height in pixels, input gint
# [frequency_cutoff] Frequency cutoff, input gdouble
# [amplitude_cutoff] Amplitude cutoff, input gdouble
# [ringwidth] Ringwidth, input gdouble
#
# Output:
# [out] Output image, output VipsImage
#
# Options:
# [uchar] Output an unsigned char image, input gboolean
# [optical] Rotate quadrants to optical space, input gboolean
# [nodc] Remove DC component, input gboolean
# [reject] Invert the sense of the filter, input gboolean
##
# :singleton-method: mask_gaussian_band
# :call-seq:
# mask_gaussian_band(width, height, frequency_cutoff_x, frequency_cutoff_y, radius, amplitude_cutoff) => out
#
# Make a gaussian filter.
#
# Input:
# [width] Image width in pixels, input gint
# [height] Image height in pixels, input gint
# [frequency_cutoff_x] Frequency cutoff x, input gdouble
# [frequency_cutoff_y] Frequency cutoff y, input gdouble
# [radius] radius of circle, input gdouble
# [amplitude_cutoff] Amplitude cutoff, input gdouble
#
# Output:
# [out] Output image, output VipsImage
#
# Options:
# [uchar] Output an unsigned char image, input gboolean
# [optical] Rotate quadrants to optical space, input gboolean
# [nodc] Remove DC component, input gboolean
# [reject] Invert the sense of the filter, input gboolean
##
# :singleton-method: mask_fractal
# :call-seq:
# mask_fractal(width, height, fractal_dimension) => out
#
# Make fractal filter.
#
# Input:
# [width] Image width in pixels, input gint
# [height] Image height in pixels, input gint
# [fractal_dimension] Fractal dimension, input gdouble
#
# Output:
# [out] Output image, output VipsImage
#
# Options:
# [uchar] Output an unsigned char image, input gboolean
# [nodc] Remove DC component, input gboolean
# [reject] Invert the sense of the filter, input gboolean
# [optical] Rotate quadrants to optical space, input gboolean
##
# :method: buildlut
# :call-seq:
# buildlut() => out
#
# Build a look-up table.
#
# Output:
# [out] Output image, output VipsImage
##
# :method: invertlut
# :call-seq:
# invertlut() => out
#
# Build an inverted look-up table.
#
# Output:
# [out] Output image, output VipsImage
#
# Options:
# [size] LUT size to generate, input gint
##
# :singleton-method: tonelut
# :call-seq:
# tonelut() => out
#
# Build a look-up table.
#
# Output:
# [out] Output image, output VipsImage
#
# Options:
# [in_max] Size of LUT to build, input gint
# [out_max] Maximum value in output LUT, input gint
# [Lb] Lowest value in output, input gdouble
# [Lw] Highest value in output, input gdouble
# [Ps] Position of shadow, input gdouble
# [Pm] Position of mid-tones, input gdouble
# [Ph] Position of highlights, input gdouble
# [S] Adjust shadows by this much, input gdouble
# [M] Adjust mid-tones by this much, input gdouble
# [H] Adjust highlights by this much, input gdouble
##
# :singleton-method: identity
# :call-seq:
# identity() => out
#
# Make a 1d image where pixel values are indexes.
#
# Output:
# [out] Output image, output VipsImage
#
# Options:
# [bands] Number of bands in LUT, input gint
# [ushort] Create a 16-bit LUT, input gboolean
# [size] Size of 16-bit LUT, input gint
##
# :singleton-method: fractsurf
# :call-seq:
# fractsurf(width, height, fractal_dimension) => out
#
# Make a fractal surface.
#
# Input:
# [width] Image width in pixels, input gint
# [height] Image height in pixels, input gint
# [fractal_dimension] Fractal dimension, input gdouble
#
# Output:
# [out] Output image, output VipsImage
##
# :singleton-method: radload
# :call-seq:
# radload(filename) => out
#
# Load a radiance image from a file.
#
# Input:
# [filename] Filename to load from, input gchararray
#
# Output:
# [out] Output image, output VipsImage
#
# Options:
# [disc] Open to disc, input gboolean
# [access] Required access pattern for this file, input VipsAccess
#
# Output options:
# [flags] Flags for this file, output VipsForeignFlags
##
# :singleton-method: ppmload
# :call-seq:
# ppmload(filename) => out
#
# Load ppm from file.
#
# Input:
# [filename] Filename to load from, input gchararray
#
# Output:
# [out] Output image, output VipsImage
#
# Options:
# [disc] Open to disc, input gboolean
# [access] Required access pattern for this file, input VipsAccess
#
# Output options:
# [flags] Flags for this file, output VipsForeignFlags
##
# :singleton-method: csvload
# :call-seq:
# csvload(filename) => out
#
# Load csv from file.
#
# Input:
# [filename] Filename to load from, input gchararray
#
# Output:
# [out] Output image, output VipsImage
#
# Options:
# [disc] Open to disc, input gboolean
# [access] Required access pattern for this file, input VipsAccess
# [skip] Skip this many lines at the start of the file, input gint
# [lines] Read this many lines from the file, input gint
# [whitespace] Set of whitespace characters, input gchararray
# [separator] Set of separator characters, input gchararray
#
# Output options:
# [flags] Flags for this file, output VipsForeignFlags
##
# :singleton-method: matrixload
# :call-seq:
# matrixload(filename) => out
#
# Load matrix from file.
#
# Input:
# [filename] Filename to load from, input gchararray
#
# Output:
# [out] Output image, output VipsImage
#
# Options:
# [disc] Open to disc, input gboolean
# [access] Required access pattern for this file, input VipsAccess
#
# Output options:
# [flags] Flags for this file, output VipsForeignFlags
##
# :singleton-method: analyzeload
# :call-seq:
# analyzeload(filename) => out
#
# Load an analyze6 image.
#
# Input:
# [filename] Filename to load from, input gchararray
#
# Output:
# [out] Output image, output VipsImage
#
# Options:
# [disc] Open to disc, input gboolean
# [access] Required access pattern for this file, input VipsAccess
#
# Output options:
# [flags] Flags for this file, output VipsForeignFlags
##
# :singleton-method: rawload
# :call-seq:
# rawload(filename, width, height, bands) => out
#
# Load raw data from a file.
#
# Input:
# [filename] Filename to load from, input gchararray
# [width] Image width in pixels, input gint
# [height] Image height in pixels, input gint
# [bands] Number of bands in image, input gint
#
# Output:
# [out] Output image, output VipsImage
#
# Options:
# [disc] Open to disc, input gboolean
# [access] Required access pattern for this file, input VipsAccess
# [offset] Offset in bytes from start of file, input guint64
#
# Output options:
# [flags] Flags for this file, output VipsForeignFlags
##
# :singleton-method: vipsload
# :call-seq:
# vipsload(filename) => out
#
# Load vips from file.
#
# Input:
# [filename] Filename to load from, input gchararray
#
# Output:
# [out] Output image, output VipsImage
#
# Options:
# [disc] Open to disc, input gboolean
# [access] Required access pattern for this file, input VipsAccess
#
# Output options:
# [flags] Flags for this file, output VipsForeignFlags
##
# :singleton-method: pngload
# :call-seq:
# pngload(filename) => out
#
# Load png from file.
#
# Input:
# [filename] Filename to load from, input gchararray
#
# Output:
# [out] Output image, output VipsImage
#
# Options:
# [disc] Open to disc, input gboolean
# [access] Required access pattern for this file, input VipsAccess
#
# Output options:
# [flags] Flags for this file, output VipsForeignFlags
##
# :singleton-method: pngload_buffer
# :call-seq:
# pngload_buffer(buffer) => out
#
# Load png from buffer.
#
# Input:
# [buffer] Buffer to load from, input VipsBlob
#
# Output:
# [out] Output image, output VipsImage
#
# Options:
# [disc] Open to disc, input gboolean
# [access] Required access pattern for this file, input VipsAccess
#
# Output options:
# [flags] Flags for this file, output VipsForeignFlags
##
# :singleton-method: matload
# :call-seq:
# matload(filename) => out
#
# Load mat from file.
#
# Input:
# [filename] Filename to load from, input gchararray
#
# Output:
# [out] Output image, output VipsImage
#
# Options:
# [disc] Open to disc, input gboolean
# [access] Required access pattern for this file, input VipsAccess
#
# Output options:
# [flags] Flags for this file, output VipsForeignFlags
##
# :singleton-method: jpegload
# :call-seq:
# jpegload(filename) => out
#
# Load jpeg from file.
#
# Input:
# [filename] Filename to load from, input gchararray
#
# Output:
# [out] Output image, output VipsImage
#
# Options:
# [disc] Open to disc, input gboolean
# [access] Required access pattern for this file, input VipsAccess
# [shrink] Shrink factor on load, input gint
# [fail] Fail on first warning, input gboolean
# [autorotate] Rotate image using exif orientation, input gboolean
#
# Output options:
# [flags] Flags for this file, output VipsForeignFlags
##
# :singleton-method: jpegload_buffer
# :call-seq:
# jpegload_buffer(buffer) => out
#
# Load jpeg from buffer.
#
# Input:
# [buffer] Buffer to load from, input VipsBlob
#
# Output:
# [out] Output image, output VipsImage
#
# Options:
# [disc] Open to disc, input gboolean
# [access] Required access pattern for this file, input VipsAccess
# [shrink] Shrink factor on load, input gint
# [fail] Fail on first warning, input gboolean
# [autorotate] Rotate image using exif orientation, input gboolean
#
# Output options:
# [flags] Flags for this file, output VipsForeignFlags
##
# :singleton-method: webpload
# :call-seq:
# webpload(filename) => out
#
# Load webp from file.
#
# Input:
# [filename] Filename to load from, input gchararray
#
# Output:
# [out] Output image, output VipsImage
#
# Options:
# [disc] Open to disc, input gboolean
# [access] Required access pattern for this file, input VipsAccess
#
# Output options:
# [flags] Flags for this file, output VipsForeignFlags
##
# :singleton-method: webpload_buffer
# :call-seq:
# webpload_buffer(buffer) => out
#
# Load webp from buffer.
#
# Input:
# [buffer] Buffer to load from, input VipsBlob
#
# Output:
# [out] Output image, output VipsImage
#
# Options:
# [disc] Open to disc, input gboolean
# [access] Required access pattern for this file, input VipsAccess
#
# Output options:
# [flags] Flags for this file, output VipsForeignFlags
##
# :singleton-method: tiffload
# :call-seq:
# tiffload(filename) => out
#
# Load tiff from file.
#
# Input:
# [filename] Filename to load from, input gchararray
#
# Output:
# [out] Output image, output VipsImage
#
# Options:
# [disc] Open to disc, input gboolean
# [access] Required access pattern for this file, input VipsAccess
# [page] Load this page from the image, input gint
#
# Output options:
# [flags] Flags for this file, output VipsForeignFlags
##
# :singleton-method: tiffload_buffer
# :call-seq:
# tiffload_buffer(buffer) => out
#
# Load tiff from buffer.
#
# Input:
# [buffer] Buffer to load from, input VipsBlob
#
# Output:
# [out] Output image, output VipsImage
#
# Options:
# [disc] Open to disc, input gboolean
# [access] Required access pattern for this file, input VipsAccess
# [page] Load this page from the image, input gint
#
# Output options:
# [flags] Flags for this file, output VipsForeignFlags
##
# :singleton-method: openslideload
# :call-seq:
# openslideload(filename) => out
#
# Load file with openslide.
#
# Input:
# [filename] Filename to load from, input gchararray
#
# Output:
# [out] Output image, output VipsImage
#
# Options:
# [disc] Open to disc, input gboolean
# [access] Required access pattern for this file, input VipsAccess
# [level] Load this level from the file, input gint
# [autocrop] Crop to image bounds, input gboolean
# [associated] Load this associated image, input gchararray
#
# Output options:
# [flags] Flags for this file, output VipsForeignFlags
##
# :singleton-method: magickload
# :call-seq:
# magickload(filename) => out
#
# Load file with imagemagick.
#
# Input:
# [filename] Filename to load from, input gchararray
#
# Output:
# [out] Output image, output VipsImage
#
# Options:
# [all_frames] Read all frames from an image, input gboolean
# [density] Canvas resolution for rendering vector formats like SVG, input gchararray
# [disc] Open to disc, input gboolean
# [access] Required access pattern for this file, input VipsAccess
#
# Output options:
# [flags] Flags for this file, output VipsForeignFlags
##
# :singleton-method: magickload_buffer
# :call-seq:
# magickload_buffer(buffer) => out
#
# Load buffer with imagemagick.
#
# Input:
# [buffer] Buffer to load from, input VipsBlob
#
# Output:
# [out] Output image, output VipsImage
#
# Options:
# [all_frames] Read all frames from an image, input gboolean
# [density] Canvas resolution for rendering vector formats like SVG, input gchararray
# [disc] Open to disc, input gboolean
# [access] Required access pattern for this file, input VipsAccess
#
# Output options:
# [flags] Flags for this file, output VipsForeignFlags
##
# :singleton-method: fitsload
# :call-seq:
# fitsload(filename) => out
#
# Load a fits image.
#
# Input:
# [filename] Filename to load from, input gchararray
#
# Output:
# [out] Output image, output VipsImage
#
# Options:
# [disc] Open to disc, input gboolean
# [access] Required access pattern for this file, input VipsAccess
#
# Output options:
# [flags] Flags for this file, output VipsForeignFlags
##
# :singleton-method: openexrload
# :call-seq:
# openexrload(filename) => out
#
# Load an openexr image.
#
# Input:
# [filename] Filename to load from, input gchararray
#
# Output:
# [out] Output image, output VipsImage
#
# Options:
# [disc] Open to disc, input gboolean
# [access] Required access pattern for this file, input VipsAccess
#
# Output options:
# [flags] Flags for this file, output VipsForeignFlags
##
# :method: radsave
# :call-seq:
# radsave(filename) =>
#
# Save image to radiance file.
#
# Input:
# [filename] Filename to save to, input gchararray
#
# Options:
# [strip] Strip all metadata from image, input gboolean
# [background] Background value, input VipsArrayDouble
##
# :method: ppmsave
# :call-seq:
# ppmsave(filename) =>
#
# Save image to ppm file.
#
# Input:
# [filename] Filename to save to, input gchararray
#
# Options:
# [ascii] save as ascii, input gboolean
# [squash] save as one bit, input gboolean
# [strip] Strip all metadata from image, input gboolean
# [background] Background value, input VipsArrayDouble
##
# :method: csvsave
# :call-seq:
# csvsave(filename) =>
#
# Save image to csv file.
#
# Input:
# [filename] Filename to save to, input gchararray
#
# Options:
# [separator] Separator characters, input gchararray
# [strip] Strip all metadata from image, input gboolean
# [background] Background value, input VipsArrayDouble
##
# :method: matrixsave
# :call-seq:
# matrixsave(filename) =>
#
# Save image to matrix file.
#
# Input:
# [filename] Filename to save to, input gchararray
#
# Options:
# [strip] Strip all metadata from image, input gboolean
# [background] Background value, input VipsArrayDouble
##
# :method: matrixprint
# :call-seq:
# matrixprint() =>
#
# Print matrix.
#
# Options:
# [strip] Strip all metadata from image, input gboolean
# [background] Background value, input VipsArrayDouble
##
# :method: rawsave
# :call-seq:
# rawsave(filename) =>
#
# Save image to raw file.
#
# Input:
# [filename] Filename to save to, input gchararray
#
# Options:
# [strip] Strip all metadata from image, input gboolean
# [background] Background value, input VipsArrayDouble
##
# :method: rawsave_fd
# :call-seq:
# rawsave_fd(fd) =>
#
# Write raw image to file descriptor.
#
# Input:
# [fd] File descriptor to write to, input gint
#
# Options:
# [strip] Strip all metadata from image, input gboolean
# [background] Background value, input VipsArrayDouble
##
# :method: vipssave
# :call-seq:
# vipssave(filename) =>
#
# Save image to vips file.
#
# Input:
# [filename] Filename to save to, input gchararray
#
# Options:
# [strip] Strip all metadata from image, input gboolean
# [background] Background value, input VipsArrayDouble
##
# :method: dzsave
# :call-seq:
# dzsave(filename) =>
#
# Save image to deep zoom format.
#
# Input:
# [filename] Filename to save to, input gchararray
#
# Options:
# [layout] Directory layout, input VipsForeignDzLayout
# [suffix] Filename suffix for tiles, input gchararray
# [overlap] Tile overlap in pixels, input gint
# [tile_size] Tile size in pixels, input gint
# [centre] Center image in tile, input gboolean
# [depth] Pyramid depth, input VipsForeignDzDepth
# [angle] Rotate image during save, input VipsAngle
# [container] Pyramid container type, input VipsForeignDzContainer
# [properties] Write a properties file to the output directory, input gboolean
# [strip] Strip all metadata from image, input gboolean
# [background] Background value, input VipsArrayDouble
##
# :method: pngsave
# :call-seq:
# pngsave(filename) =>
#
# Save image to png file.
#
# Input:
# [filename] Filename to save to, input gchararray
#
# Options:
# [compression] Compression factor, input gint
# [interlace] Interlace image, input gboolean
# [profile] ICC profile to embed, input gchararray
# [filter] libpng row filter flag(s), input VipsForeignPngFilter
# [strip] Strip all metadata from image, input gboolean
# [background] Background value, input VipsArrayDouble
##
# :method: pngsave_buffer
# :call-seq:
# pngsave_buffer() => buffer
#
# Save image to png buffer.
#
# Output:
# [buffer] Buffer to save to, output VipsBlob
#
# Options:
# [compression] Compression factor, input gint
# [interlace] Interlace image, input gboolean
# [profile] ICC profile to embed, input gchararray
# [filter] libpng row filter flag(s), input VipsForeignPngFilter
# [strip] Strip all metadata from image, input gboolean
# [background] Background value, input VipsArrayDouble
##
# :method: jpegsave
# :call-seq:
# jpegsave(filename) =>
#
# Save image to jpeg file.
#
# Input:
# [filename] Filename to save to, input gchararray
#
# Options:
# [Q] Q factor, input gint
# [profile] ICC profile to embed, input gchararray
# [optimize_coding] Compute optimal Huffman coding tables, input gboolean
# [interlace] Generate an interlaced (progressive) jpeg, input gboolean
# [no_subsample] Disable chroma subsample, input gboolean
# [trellis_quant] Apply trellis quantisation to each 8x8 block, input gboolean
# [overshoot_deringing] Apply overshooting to samples with extreme values, input gboolean
# [optimize_scans] Split the spectrum of DCT coefficients into separate scans, input gboolean
# [strip] Strip all metadata from image, input gboolean
# [background] Background value, input VipsArrayDouble
##
# :method: jpegsave_buffer
# :call-seq:
# jpegsave_buffer() => buffer
#
# Save image to jpeg buffer.
#
# Output:
# [buffer] Buffer to save to, output VipsBlob
#
# Options:
# [Q] Q factor, input gint
# [profile] ICC profile to embed, input gchararray
# [optimize_coding] Compute optimal Huffman coding tables, input gboolean
# [interlace] Generate an interlaced (progressive) jpeg, input gboolean
# [no_subsample] Disable chroma subsample, input gboolean
# [trellis_quant] Apply trellis quantisation to each 8x8 block, input gboolean
# [overshoot_deringing] Apply overshooting to samples with extreme values, input gboolean
# [optimize_scans] Split the spectrum of DCT coefficients into separate scans, input gboolean
# [strip] Strip all metadata from image, input gboolean
# [background] Background value, input VipsArrayDouble
##
# :method: jpegsave_mime
# :call-seq:
# jpegsave_mime() =>
#
# Save image to jpeg mime.
#
# Options:
# [Q] Q factor, input gint
# [profile] ICC profile to embed, input gchararray
# [optimize_coding] Compute optimal Huffman coding tables, input gboolean
# [interlace] Generate an interlaced (progressive) jpeg, input gboolean
# [no_subsample] Disable chroma subsample, input gboolean
# [trellis_quant] Apply trellis quantisation to each 8x8 block, input gboolean
# [overshoot_deringing] Apply overshooting to samples with extreme values, input gboolean
# [optimize_scans] Split the spectrum of DCT coefficients into separate scans, input gboolean
# [strip] Strip all metadata from image, input gboolean
# [background] Background value, input VipsArrayDouble
##
# :method: webpsave
# :call-seq:
# webpsave(filename) =>
#
# Save image to webp file.
#
# Input:
# [filename] Filename to save to, input gchararray
#
# Options:
# [Q] Q factor, input gint
# [lossless] enable lossless compression, input gboolean
# [strip] Strip all metadata from image, input gboolean
# [background] Background value, input VipsArrayDouble
##
# :method: webpsave_buffer
# :call-seq:
# webpsave_buffer() => buffer
#
# Save image to webp buffer.
#
# Output:
# [buffer] Buffer to save to, output VipsBlob
#
# Options:
# [Q] Q factor, input gint
# [lossless] enable lossless compression, input gboolean
# [strip] Strip all metadata from image, input gboolean
# [background] Background value, input VipsArrayDouble
##
# :method: tiffsave
# :call-seq:
# tiffsave(filename) =>
#
# Save image to tiff file.
#
# Input:
# [filename] Filename to save to, input gchararray
#
# Options:
# [compression] Compression for this file, input VipsForeignTiffCompression
# [Q] Q factor, input gint
# [predictor] Compression prediction, input VipsForeignTiffPredictor
# [profile] ICC profile to embed, input gchararray
# [tile] Write a tiled tiff, input gboolean
# [tile_width] Tile width in pixels, input gint
# [tile_height] Tile height in pixels, input gint
# [pyramid] Write a pyramidal tiff, input gboolean
# [miniswhite] Use 0 for white in 1-bit images, input gboolean
# [squash] Squash images down to 1 bit, input gboolean
# [resunit] Resolution unit, input VipsForeignTiffResunit
# [xres] Horizontal resolution in pixels/mm, input gdouble
# [yres] Vertical resolution in pixels/mm, input gdouble
# [bigtiff] Write a bigtiff image, input gboolean
# [strip] Strip all metadata from image, input gboolean
# [background] Background value, input VipsArrayDouble
##
# :method: fitssave
# :call-seq:
# fitssave(filename) =>
#
# Save image to fits file.
#
# Input:
# [filename] Filename to save to, input gchararray
#
# Options:
# [strip] Strip all metadata from image, input gboolean
# [background] Background value, input VipsArrayDouble
##
# :method: shrink
# :call-seq:
# shrink(xshrink, yshrink) => out
#
# Shrink an image.
#
# Input:
# [xshrink] Horizontal shrink factor, input gdouble
# [yshrink] Vertical shrink factor, input gdouble
#
# Output:
# [out] Output image, output VipsImage
##
# :method: quadratic
# :call-seq:
# quadratic(coeff) => out
#
# Resample an image with a quadratic transform.
#
# Input:
# [coeff] Coefficient matrix, input VipsImage
#
# Output:
# [out] Output image, output VipsImage
#
# Options:
# [interpolate] Interpolate values with this, input VipsInterpolate
##
# :method: affine
# :call-seq:
# affine(matrix) => out
#
# Affine transform of an image.
#
# Input:
# [matrix] Transformation matrix, input VipsArrayDouble
#
# Output:
# [out] Output image, output VipsImage
#
# Options:
# [interpolate] Interpolate pixels with this, input VipsInterpolate
# [oarea] Area of output to generate, input VipsArrayInt
# [odx] Horizontal output displacement, input gdouble
# [ody] Vertical output displacement, input gdouble
# [idx] Horizontal input displacement, input gdouble
# [idy] Vertical input displacement, input gdouble
##
# :method: similarity
# :call-seq:
# similarity() => out
#
# Similarity transform of an image.
#
# Output:
# [out] Output image, output VipsImage
#
# Options:
# [interpolate] Interpolate pixels with this, input VipsInterpolate
# [scale] Scale by this factor, input gdouble
# [angle] Rotate anticlockwise by this many degrees, input gdouble
# [odx] Horizontal output displacement, input gdouble
# [ody] Vertical output displacement, input gdouble
# [idx] Horizontal input displacement, input gdouble
# [idy] Vertical input displacement, input gdouble
##
# :method: resize
# :call-seq:
# resize(scale) => out
#
# Resize an image.
#
# Input:
# [scale] Scale image by this factor, input gdouble
#
# Output:
# [out] Output image, output VipsImage
#
# Options:
# [interpolate] Interpolate pixels with this, input VipsInterpolate
# [idx] Horizontal input displacement, input gdouble
# [idy] Vertical input displacement, input gdouble
##
# :method: colourspace
# :call-seq:
# colourspace(space) => out
#
# Convert to a new colourspace.
#
# Input:
# [space] Destination colour space, input VipsInterpretation
#
# Output:
# [out] Output image, output VipsImage
#
# Options:
# [source_space] Source colour space, input VipsInterpretation
##
# :method: Lab2XYZ
# :call-seq:
# Lab2XYZ() => out
#
# Transform cielab to xyz.
#
# Output:
# [out] Output image, output VipsImage
#
# Options:
# [temp] Colour temperature, input VipsArrayDouble
##
# :method: XYZ2Lab
# :call-seq:
# XYZ2Lab() => out
#
# Transform xyz to lab.
#
# Output:
# [out] Output image, output VipsImage
#
# Options:
# [temp] Colour temperature, input VipsArrayDouble
##
# :method: Lab2LCh
# :call-seq:
# Lab2LCh() => out
#
# Transform lab to lch.
#
# Output:
# [out] Output image, output VipsImage
##
# :method: LCh2Lab
# :call-seq:
# LCh2Lab() => out
#
# Transform lch to lab.
#
# Output:
# [out] Output image, output VipsImage
##
# :method: LCh2CMC
# :call-seq:
# LCh2CMC() => out
#
# Transform lch to cmc.
#
# Output:
# [out] Output image, output VipsImage
##
# :method: CMC2LCh
# :call-seq:
# CMC2LCh() => out
#
# Transform lch to cmc.
#
# Output:
# [out] Output image, output VipsImage
##
# :method: XYZ2Yxy
# :call-seq:
# XYZ2Yxy() => out
#
# Transform xyz to yxy.
#
# Output:
# [out] Output image, output VipsImage
##
# :method: Yxy2XYZ
# :call-seq:
# Yxy2XYZ() => out
#
# Transform yxy to xyz.
#
# Output:
# [out] Output image, output VipsImage
##
# :method: scRGB2XYZ
# :call-seq:
# scRGB2XYZ() => out
#
# Transform scrgb to xyz.
#
# Output:
# [out] Output image, output VipsImage
##
# :method: XYZ2scRGB
# :call-seq:
# XYZ2scRGB() => out
#
# Transform xyz to scrgb.
#
# Output:
# [out] Output image, output VipsImage
##
# :method: LabQ2Lab
# :call-seq:
# LabQ2Lab() => out
#
# Unpack a labq image to float lab.
#
# Output:
# [out] Output image, output VipsImage
##
# :method: Lab2LabQ
# :call-seq:
# Lab2LabQ() => out
#
# Transform float lab to labq coding.
#
# Output:
# [out] Output image, output VipsImage
##
# :method: LabQ2LabS
# :call-seq:
# LabQ2LabS() => out
#
# Unpack a labq image to short lab.
#
# Output:
# [out] Output image, output VipsImage
##
# :method: LabS2LabQ
# :call-seq:
# LabS2LabQ() => out
#
# Transform short lab to labq coding.
#
# Output:
# [out] Output image, output VipsImage
##
# :method: LabS2Lab
# :call-seq:
# LabS2Lab() => out
#
# Transform signed short lab to float.
#
# Output:
# [out] Output image, output VipsImage
##
# :method: Lab2LabS
# :call-seq:
# Lab2LabS() => out
#
# Transform float lab to signed short.
#
# Output:
# [out] Output image, output VipsImage
##
# :method: rad2float
# :call-seq:
# rad2float() => out
#
# Unpack radiance coding to float rgb.
#
# Output:
# [out] Output image, output VipsImage
##
# :method: float2rad
# :call-seq:
# float2rad() => out
#
# Transform float rgb to radiance coding.
#
# Output:
# [out] Output image, output VipsImage
##
# :method: LabQ2sRGB
# :call-seq:
# LabQ2sRGB() => out
#
# Convert a labq image to srgb.
#
# Output:
# [out] Output image, output VipsImage
##
# :method: sRGB2HSV
# :call-seq:
# sRGB2HSV() => out
#
# Transform srgb to hsv.
#
# Output:
# [out] Output image, output VipsImage
##
# :method: HSV2sRGB
# :call-seq:
# HSV2sRGB() => out
#
# Transform hsv to srgb.
#
# Output:
# [out] Output image, output VipsImage
##
# :method: icc_import
# :call-seq:
# icc_import() => out
#
# Import from device with icc profile.
#
# Output:
# [out] Output image, output VipsImage
#
# Options:
# [pcs] Set Profile Connection Space, input VipsPCS
# [intent] Rendering intent, input VipsIntent
# [embedded] Use embedded input profile, if available, input gboolean
# [input_profile] Filename to load input profile from, input gchararray
##
# :method: icc_export
# :call-seq:
# icc_export() => out
#
# Output to device with icc profile.
#
# Output:
# [out] Output image, output VipsImage
#
# Options:
# [pcs] Set Profile Connection Space, input VipsPCS
# [intent] Rendering intent, input VipsIntent
# [output_profile] Filename to load output profile from, input gchararray
# [depth] Output device space depth in bits, input gint
##
# :method: icc_transform
# :call-seq:
# icc_transform(output_profile) => out
#
# Transform between devices with icc profiles.
#
# Input:
# [output_profile] Filename to load output profile from, input gchararray
#
# Output:
# [out] Output image, output VipsImage
#
# Options:
# [pcs] Set Profile Connection Space, input VipsPCS
# [intent] Rendering intent, input VipsIntent
# [embedded] Use embedded input profile, if available, input gboolean
# [input_profile] Filename to load input profile from, input gchararray
# [depth] Output device space depth in bits, input gint
##
# :method: dE76
# :call-seq:
# dE76(right) => out
#
# Calculate de76.
#
# Input:
# [right] Right-hand input image, input VipsImage
#
# Output:
# [out] Output image, output VipsImage
##
# :method: dE00
# :call-seq:
# dE00(right) => out
#
# Calculate de00.
#
# Input:
# [right] Right-hand input image, input VipsImage
#
# Output:
# [out] Output image, output VipsImage
##
# :method: dECMC
# :call-seq:
# dECMC(right) => out
#
# Calculate decmc.
#
# Input:
# [right] Right-hand input image, input VipsImage
#
# Output:
# [out] Output image, output VipsImage
##
# :method: sRGB2scRGB
# :call-seq:
# sRGB2scRGB() => out
#
# Convert an srgb image to scrgb.
#
# Output:
# [out] Output image, output VipsImage
##
# :method: scRGB2BW
# :call-seq:
# scRGB2BW() => out
#
# Convert scrgb to bw.
#
# Output:
# [out] Output image, output VipsImage
#
# Options:
# [depth] Output device space depth in bits, input gint
##
# :method: scRGB2sRGB
# :call-seq:
# scRGB2sRGB() => out
#
# Convert an scrgb image to srgb.
#
# Output:
# [out] Output image, output VipsImage
#
# Options:
# [depth] Output device space depth in bits, input gint
##
# :method: maplut
# :call-seq:
# maplut(lut) => out
#
# Map an image though a lut.
#
# Input:
# [lut] Look-up table image, input VipsImage
#
# Output:
# [out] Output image, output VipsImage
#
# Options:
# [band] apply one-band lut to this band of in, input gint
##
# :method: percent
# :call-seq:
# percent(percent) => threshold
#
# Find threshold for percent of pixels.
#
# Input:
# [percent] Percent of pixels, input gdouble
#
# Output:
# [threshold] Threshold above which lie percent of pixels, output gint
##
# :method: stdif
# :call-seq:
# stdif(width, height) => out
#
# Statistical difference.
#
# Input:
# [width] Window width in pixels, input gint
# [height] Window height in pixels, input gint
#
# Output:
# [out] Output image, output VipsImage
#
# Options:
# [a] Weight of new mean, input gdouble
# [s0] New deviation, input gdouble
# [b] Weight of new deviation, input gdouble
# [m0] New mean, input gdouble
##
# :method: hist_cum
# :call-seq:
# hist_cum() => out
#
# Form cumulative histogram.
#
# Output:
# [out] Output image, output VipsImage
##
# :method: hist_match
# :call-seq:
# hist_match(ref) => out
#
# Match two histograms.
#
# Input:
# [ref] Reference histogram, input VipsImage
#
# Output:
# [out] Output image, output VipsImage
##
# :method: hist_norm
# :call-seq:
# hist_norm() => out
#
# Normalise histogram.
#
# Output:
# [out] Output image, output VipsImage
##
# :method: hist_equal
# :call-seq:
# hist_equal() => out
#
# Histogram equalisation.
#
# Output:
# [out] Output image, output VipsImage
#
# Options:
# [band] Equalise with this band, input gint
##
# :method: hist_plot
# :call-seq:
# hist_plot() => out
#
# Plot histogram.
#
# Output:
# [out] Output image, output VipsImage
##
# :method: hist_local
# :call-seq:
# hist_local(width, height) => out
#
# Local histogram equalisation.
#
# Input:
# [width] Window width in pixels, input gint
# [height] Window height in pixels, input gint
#
# Output:
# [out] Output image, output VipsImage
##
# :method: hist_ismonotonic
# :call-seq:
# hist_ismonotonic() => monotonic
#
# Test for monotonicity.
#
# Output:
# [monotonic] true if in is monotonic, output gboolean
##
# :method: hist_entropy
# :call-seq:
# hist_entropy() => out
#
# Estimate image entropy.
#
# Output:
# [out] Output value, output gdouble
##
# :method: conv
# :call-seq:
# conv(mask) => out
#
# Convolution operation.
#
# Input:
# [mask] Input matrix image, input VipsImage
#
# Output:
# [out] Output image, output VipsImage
#
# Options:
# [precision] Convolve with this precision, input VipsPrecision
# [layers] Use this many layers in approximation, input gint
# [cluster] Cluster lines closer than this in approximation, input gint
##
# :method: compass
# :call-seq:
# compass(mask) => out
#
# Convolve with rotating mask.
#
# Input:
# [mask] Input matrix image, input VipsImage
#
# Output:
# [out] Output image, output VipsImage
#
# Options:
# [times] Rotate and convolve this many times, input gint
# [angle] Rotate mask by this much between convolutions, input VipsAngle45
# [combine] Combine convolution results like this, input VipsCombine
# [precision] Convolve with this precision, input VipsPrecision
# [layers] Use this many layers in approximation, input gint
# [cluster] Cluster lines closer than this in approximation, input gint
##
# :method: convsep
# :call-seq:
# convsep(mask) => out
#
# Seperable convolution operation.
#
# Input:
# [mask] Input matrix image, input VipsImage
#
# Output:
# [out] Output image, output VipsImage
#
# Options:
# [precision] Convolve with this precision, input VipsPrecision
# [layers] Use this many layers in approximation, input gint
# [cluster] Cluster lines closer than this in approximation, input gint
##
# :method: fastcor
# :call-seq:
# fastcor(ref) => out
#
# Fast correlation.
#
# Input:
# [ref] Input reference image, input VipsImage
#
# Output:
# [out] Output image, output VipsImage
##
# :method: spcor
# :call-seq:
# spcor(ref) => out
#
# Spatial correlation.
#
# Input:
# [ref] Input reference image, input VipsImage
#
# Output:
# [out] Output image, output VipsImage
##
# :method: sharpen
# :call-seq:
# sharpen() => out
#
# Unsharp masking for print.
#
# Output:
# [out] Output image, output VipsImage
#
# Options:
# [radius] Mask radius, input gint
# [x1] Flat/jaggy threshold, input gdouble
# [y2] Maximum brightening, input gdouble
# [y3] Maximum darkening, input gdouble
# [m1] Slope for flat areas, input gdouble
# [m2] Slope for jaggy areas, input gdouble
##
# :method: gaussblur
# :call-seq:
# gaussblur(sigma) => out
#
# Gaussian blur.
#
# Input:
# [sigma] Sigma of Gaussian, input gdouble
#
# Output:
# [out] Output image, output VipsImage
#
# Options:
# [min_ampl] Minimum amplitude of Gaussian, input gdouble
# [precision] Convolve with this precision, input VipsPrecision
##
# :method: fwfft
# :call-seq:
# fwfft() => out
#
# Forward fft.
#
# Output:
# [out] Output image, output VipsImage
##
# :method: invfft
# :call-seq:
# invfft() => out
#
# Inverse fft.
#
# Output:
# [out] Output image, output VipsImage
#
# Options:
# [real] Output only the real part of the transform, input gboolean
##
# :method: freqmult
# :call-seq:
# freqmult(mask) => out
#
# Frequency-domain filtering.
#
# Input:
# [mask] Input mask image, input VipsImage
#
# Output:
# [out] Output image, output VipsImage
##
# :method: spectrum
# :call-seq:
# spectrum() => out
#
# Make displayable power spectrum.
#
# Output:
# [out] Output image, output VipsImage
##
# :method: phasecor
# :call-seq:
# phasecor(in2) => out
#
# Calculate phase correlation.
#
# Input:
# [in2] Second input image, input VipsImage
#
# Output:
# [out] Output image, output VipsImage
##
# :method: morph
# :call-seq:
# morph(mask, morph) => out
#
# Morphology operation.
#
# Input:
# [mask] Input matrix image, input VipsImage
# [morph] Morphological operation to perform, input VipsOperationMorphology
#
# Output:
# [out] Output image, output VipsImage
##
# :method: rank
# :call-seq:
# rank(width, height, index) => out
#
# Rank filter.
#
# Input:
# [width] Window width in pixels, input gint
# [height] Window height in pixels, input gint
# [index] Select pixel at index, input gint
#
# Output:
# [out] Output image, output VipsImage
##
# :method: countlines
# :call-seq:
# countlines(direction) => nolines
#
# Count lines in an image.
#
# Input:
# [direction] Countlines left-right or up-down, input VipsDirection
#
# Output:
# [nolines] Number of lines, output gdouble
##
# :method: labelregions
# :call-seq:
# labelregions() => mask
#
# Label regions in an image.
#
# Output:
# [mask] Mask of region labels, output VipsImage
#
# Output options:
# [segments] Number of discrete contigious regions, output gint
##
# :method: draw_rect
# :call-seq:
# draw_rect(ink, left, top, width, height) => image
#
# Paint a rectangle on an image.
#
# Input:
# [ink] Colour for pixels, input VipsArrayDouble
# [left] Rect to fill, input gint
# [top] Rect to fill, input gint
# [width] Rect to fill, input gint
# [height] Rect to fill, input gint
#
# Output:
# [image] Image to draw on, input VipsImage
#
# Options:
# [fill] Draw a solid object, input gboolean
##
# :method: draw_mask
# :call-seq:
# draw_mask(ink, mask, x, y) => image
#
# Draw a mask on an image.
#
# Input:
# [ink] Colour for pixels, input VipsArrayDouble
# [mask] Mask of pixels to draw, input VipsImage
# [x] Draw mask here, input gint
# [y] Draw mask here, input gint
#
# Output:
# [image] Image to draw on, input VipsImage
##
# :method: draw_line
# :call-seq:
# draw_line(ink, x1, y1, x2, y2) => image
#
# Draw a line on an image.
#
# Input:
# [ink] Colour for pixels, input VipsArrayDouble
# [x1] Start of draw_line, input gint
# [y1] Start of draw_line, input gint
# [x2] End of draw_line, input gint
# [y2] End of draw_line, input gint
#
# Output:
# [image] Image to draw on, input VipsImage
##
# :method: draw_circle
# :call-seq:
# draw_circle(ink, cx, cy, radius) => image
#
# Draw a circle on an image.
#
# Input:
# [ink] Colour for pixels, input VipsArrayDouble
# [cx] Centre of draw_circle, input gint
# [cy] Centre of draw_circle, input gint
# [radius] Radius in pixels, input gint
#
# Output:
# [image] Image to draw on, input VipsImage
#
# Options:
# [fill] Draw a solid object, input gboolean
##
# :method: draw_flood
# :call-seq:
# draw_flood(ink, x, y) => image
#
# Flood-fill an area.
#
# Input:
# [ink] Colour for pixels, input VipsArrayDouble
# [x] DrawFlood start point, input gint
# [y] DrawFlood start point, input gint
#
# Output:
# [image] Image to draw on, input VipsImage
#
# Options:
# [test] Test pixels in this image, input VipsImage
# [equal] DrawFlood while equal to edge, input gboolean
#
# Output options:
# [left] Left edge of modified area, output gint
# [top] top edge of modified area, output gint
# [width] width of modified area, output gint
# [height] height of modified area, output gint
##
# :method: draw_image
# :call-seq:
# draw_image(sub, x, y) => image
#
# Paint an image into another image.
#
# Input:
# [sub] Sub-image to insert into main image, input VipsImage
# [x] Draw image here, input gint
# [y] Draw image here, input gint
#
# Output:
# [image] Image to draw on, input VipsImage
#
# Options:
# [mode] Combining mode, input VipsCombineMode
##
# :method: draw_smudge
# :call-seq:
# draw_smudge(left, top, width, height) => image
#
# Blur a rectangle on an image.
#
# Input:
# [left] Rect to fill, input gint
# [top] Rect to fill, input gint
# [width] Rect to fill, input gint
# [height] Rect to fill, input gint
#
# Output:
# [image] Image to draw on, input VipsImage
##
# :method: merge
# :call-seq:
# merge(sec, direction, dx, dy) => out
#
# Merge two images.
#
# Input:
# [sec] Secondary image, input VipsImage
# [direction] Horizontal or vertcial merge, input VipsDirection
# [dx] Horizontal displacement from sec to ref, input gint
# [dy] Vertical displacement from sec to ref, input gint
#
# Output:
# [out] Output image, output VipsImage
#
# Options:
# [mblend] Maximum blend size, input gint
##
# :method: mosaic
# :call-seq:
# mosaic(sec, direction, xref, yref, xsec, ysec) => out
#
# Mosaic two images.
#
# Input:
# [sec] Secondary image, input VipsImage
# [direction] Horizontal or vertcial mosaic, input VipsDirection
# [xref] Position of reference tie-point, input gint
# [yref] Position of reference tie-point, input gint
# [xsec] Position of secondary tie-point, input gint
# [ysec] Position of secondary tie-point, input gint
#
# Output:
# [out] Output image, output VipsImage
#
# Options:
# [hwindow] Half window size, input gint
# [harea] Half area size, input gint
# [mblend] Maximum blend size, input gint
# [bandno] Band to search for features on, input gint
#
# Output options:
# [dx0] Detected integer offset, output gint
# [dy0] Detected integer offset, output gint
# [scale1] Detected scale, output gdouble
# [angle1] Detected rotation, output gdouble
# [dx1] Detected first-order displacement, output gdouble
# [dy1] Detected first-order displacement, output gdouble
##
# :method: mosaic1
# :call-seq:
# mosaic1(sec, direction, xr1, yr1, xs1, ys1, xr2, yr2, xs2, ys2) => out
#
# First-order mosaic of two images.
#
# Input:
# [sec] Secondary image, input VipsImage
# [direction] Horizontal or vertcial mosaic, input VipsDirection
# [xr1] Position of first reference tie-point, input gint
# [yr1] Position of first reference tie-point, input gint
# [xs1] Position of first secondary tie-point, input gint
# [ys1] Position of first secondary tie-point, input gint
# [xr2] Position of second reference tie-point, input gint
# [yr2] Position of second reference tie-point, input gint
# [xs2] Position of second secondary tie-point, input gint
# [ys2] Position of second secondary tie-point, input gint
#
# Output:
# [out] Output image, output VipsImage
#
# Options:
# [hwindow] Half window size, input gint
# [harea] Half area size, input gint
# [search] Search to improve tie-points, input gboolean
# [interpolate] Interpolate pixels with this, input VipsInterpolate
# [mblend] Maximum blend size, input gint
# [bandno] Band to search for features on, input gint
##
# :method: match
# :call-seq:
# match(sec, xr1, yr1, xs1, ys1, xr2, yr2, xs2, ys2) => out
#
# First-order match of two images.
#
# Input:
# [sec] Secondary image, input VipsImage
# [xr1] Position of first reference tie-point, input gint
# [yr1] Position of first reference tie-point, input gint
# [xs1] Position of first secondary tie-point, input gint
# [ys1] Position of first secondary tie-point, input gint
# [xr2] Position of second reference tie-point, input gint
# [yr2] Position of second reference tie-point, input gint
# [xs2] Position of second secondary tie-point, input gint
# [ys2] Position of second secondary tie-point, input gint
#
# Output:
# [out] Output image, output VipsImage
#
# Options:
# [hwindow] Half window size, input gint
# [harea] Half area size, input gint
# [search] Search to improve tie-points, input gboolean
# [interpolate] Interpolate pixels with this, input VipsInterpolate
##
# :method: globalbalance
# :call-seq:
# globalbalance() => out
#
# Global balance an image mosaic.
#
# Output:
# [out] Output image, output VipsImage
#
# Options:
# [gamma] Image gamma, input gdouble
# [int_output] Integer output, input gboolean
end
end
regenerate docs
#--
# This file generated automatically. Do not edit!
#++
module Vips
class Image
##
# :singleton-method: system
# :call-seq:
# system(cmd_format) =>
#
# Run an external command.
#
# Input:
# [cmd_format] Command to run, input gchararray
#
# Options:
# [in] Array of input images, input VipsArrayImage
# [in_format] Format for input filename, input gchararray
# [out_format] Format for output filename, input gchararray
#
# Output options:
# [out] Output image, output VipsImage
# [log] Command log, output gchararray
##
# :method: add
# :call-seq:
# add(right) => out
#
# Add two images.
#
# Input:
# [right] Right-hand image argument, input VipsImage
#
# Output:
# [out] Output image, output VipsImage
##
# :method: subtract
# :call-seq:
# subtract(right) => out
#
# Subtract two images.
#
# Input:
# [right] Right-hand image argument, input VipsImage
#
# Output:
# [out] Output image, output VipsImage
##
# :method: multiply
# :call-seq:
# multiply(right) => out
#
# Multiply two images.
#
# Input:
# [right] Right-hand image argument, input VipsImage
#
# Output:
# [out] Output image, output VipsImage
##
# :method: divide
# :call-seq:
# divide(right) => out
#
# Divide two images.
#
# Input:
# [right] Right-hand image argument, input VipsImage
#
# Output:
# [out] Output image, output VipsImage
##
# :method: relational
# :call-seq:
# relational(right, relational) => out
#
# Relational operation on two images.
#
# Input:
# [right] Right-hand image argument, input VipsImage
# [relational] relational to perform, input VipsOperationRelational
#
# Output:
# [out] Output image, output VipsImage
##
# :method: remainder
# :call-seq:
# remainder(right) => out
#
# Remainder after integer division of two images.
#
# Input:
# [right] Right-hand image argument, input VipsImage
#
# Output:
# [out] Output image, output VipsImage
##
# :method: boolean
# :call-seq:
# boolean(right, boolean) => out
#
# Boolean operation on two images.
#
# Input:
# [right] Right-hand image argument, input VipsImage
# [boolean] boolean to perform, input VipsOperationBoolean
#
# Output:
# [out] Output image, output VipsImage
##
# :method: math2
# :call-seq:
# math2(right, math2) => out
#
# Binary math operations.
#
# Input:
# [right] Right-hand image argument, input VipsImage
# [math2] math to perform, input VipsOperationMath2
#
# Output:
# [out] Output image, output VipsImage
##
# :method: complex2
# :call-seq:
# complex2(right, cmplx) => out
#
# Complex binary operations on two images.
#
# Input:
# [right] Right-hand image argument, input VipsImage
# [cmplx] binary complex operation to perform, input VipsOperationComplex2
#
# Output:
# [out] Output image, output VipsImage
##
# :method: complexform
# :call-seq:
# complexform(right) => out
#
# Form a complex image from two real images.
#
# Input:
# [right] Right-hand image argument, input VipsImage
#
# Output:
# [out] Output image, output VipsImage
##
# :singleton-method: sum
# :call-seq:
# sum(in) => out
#
# Sum an array of images.
#
# Input:
# [in] Array of input images, input VipsArrayImage
#
# Output:
# [out] Output image, output VipsImage
##
# :method: invert
# :call-seq:
# invert() => out
#
# Invert an image.
#
# Output:
# [out] Output image, output VipsImage
##
# :method: linear
# :call-seq:
# linear(a, b) => out
#
# Calculate (a * in + b).
#
# Input:
# [a] Multiply by this, input VipsArrayDouble
# [b] Add this, input VipsArrayDouble
#
# Output:
# [out] Output image, output VipsImage
#
# Options:
# [uchar] Output should be uchar, input gboolean
##
# :method: math
# :call-seq:
# math(math) => out
#
# Apply a math operation to an image.
#
# Input:
# [math] math to perform, input VipsOperationMath
#
# Output:
# [out] Output image, output VipsImage
##
# :method: abs
# :call-seq:
# abs() => out
#
# Absolute value of an image.
#
# Output:
# [out] Output image, output VipsImage
##
# :method: sign
# :call-seq:
# sign() => out
#
# Unit vector of pixel.
#
# Output:
# [out] Output image, output VipsImage
##
# :method: round
# :call-seq:
# round(round) => out
#
# Perform a round function on an image.
#
# Input:
# [round] rounding operation to perform, input VipsOperationRound
#
# Output:
# [out] Output image, output VipsImage
##
# :method: relational_const
# :call-seq:
# relational_const(c, relational) => out
#
# Relational operations against a constant.
#
# Input:
# [c] Array of constants, input VipsArrayDouble
# [relational] relational to perform, input VipsOperationRelational
#
# Output:
# [out] Output image, output VipsImage
##
# :method: remainder_const
# :call-seq:
# remainder_const(c) => out
#
# Remainder after integer division of an image and a constant.
#
# Input:
# [c] Array of constants, input VipsArrayDouble
#
# Output:
# [out] Output image, output VipsImage
##
# :method: boolean_const
# :call-seq:
# boolean_const(c, boolean) => out
#
# Boolean operations against a constant.
#
# Input:
# [c] Array of constants, input VipsArrayDouble
# [boolean] boolean to perform, input VipsOperationBoolean
#
# Output:
# [out] Output image, output VipsImage
##
# :method: math2_const
# :call-seq:
# math2_const(c, math2) => out
#
# Pow( @in, @c ).
#
# Input:
# [c] Array of constants, input VipsArrayDouble
# [math2] math to perform, input VipsOperationMath2
#
# Output:
# [out] Output image, output VipsImage
##
# :method: complex
# :call-seq:
# complex(cmplx) => out
#
# Perform a complex operation on an image.
#
# Input:
# [cmplx] complex to perform, input VipsOperationComplex
#
# Output:
# [out] Output image, output VipsImage
##
# :method: complexget
# :call-seq:
# complexget(get) => out
#
# Get a component from a complex image.
#
# Input:
# [get] complex to perform, input VipsOperationComplexget
#
# Output:
# [out] Output image, output VipsImage
##
# :method: avg
# :call-seq:
# avg() => out
#
# Find image average.
#
# Output:
# [out] Output value, output gdouble
##
# :method: min
# :call-seq:
# min() => out
#
# Find image minimum.
#
# Output:
# [out] Output value, output gdouble
#
# Options:
# [size] Number of minimum values to find, input gint
#
# Output options:
# [x] Horizontal position of minimum, output gint
# [y] Vertical position of minimum, output gint
# [out_array] Array of output values, output VipsArrayDouble
# [x_array] Array of horizontal positions, output VipsArrayInt
# [y_array] Array of vertical positions, output VipsArrayInt
##
# :method: max
# :call-seq:
# max() => out
#
# Find image maximum.
#
# Output:
# [out] Output value, output gdouble
#
# Options:
# [size] Number of maximum values to find, input gint
#
# Output options:
# [x] Horizontal position of maximum, output gint
# [y] Vertical position of maximum, output gint
# [out_array] Array of output values, output VipsArrayDouble
# [x_array] Array of horizontal positions, output VipsArrayInt
# [y_array] Array of vertical positions, output VipsArrayInt
##
# :method: deviate
# :call-seq:
# deviate() => out
#
# Find image standard deviation.
#
# Output:
# [out] Output value, output gdouble
##
# :method: stats
# :call-seq:
# stats() => out
#
# Find image average.
#
# Output:
# [out] Output array of statistics, output VipsImage
##
# :method: hist_find
# :call-seq:
# hist_find() => out
#
# Find image histogram.
#
# Output:
# [out] Output histogram, output VipsImage
#
# Options:
# [band] Find histogram of band, input gint
##
# :method: hist_find_ndim
# :call-seq:
# hist_find_ndim() => out
#
# Find n-dimensional image histogram.
#
# Output:
# [out] Output histogram, output VipsImage
#
# Options:
# [bins] Number of bins in each dimension, input gint
##
# :method: hist_find_indexed
# :call-seq:
# hist_find_indexed(index) => out
#
# Find indexed image histogram.
#
# Input:
# [index] Index image, input VipsImage
#
# Output:
# [out] Output histogram, output VipsImage
##
# :method: hough_line
# :call-seq:
# hough_line() => out
#
# Find hough line transform.
#
# Output:
# [out] Output image, output VipsImage
#
# Options:
# [width] horizontal size of parameter space, input gint
# [height] Vertical size of parameter space, input gint
##
# :method: hough_circle
# :call-seq:
# hough_circle() => out
#
# Find hough circle transform.
#
# Output:
# [out] Output image, output VipsImage
#
# Options:
# [scale] Scale down dimensions by this factor, input gint
# [min_radius] Smallest radius to search for, input gint
# [max_radius] Largest radius to search for, input gint
##
# :method: project
# :call-seq:
# project() => columns, rows
#
# Find image projections.
#
# Output:
# [columns] Sums of columns, output VipsImage
# [rows] Sums of rows, output VipsImage
##
# :method: profile
# :call-seq:
# profile() => columns, rows
#
# Find image profiles.
#
# Output:
# [columns] First non-zero pixel in column, output VipsImage
# [rows] First non-zero pixel in row, output VipsImage
##
# :method: measure
# :call-seq:
# measure(h, v) => out
#
# Measure a set of patches on a colour chart.
#
# Input:
# [h] Number of patches across chart, input gint
# [v] Number of patches down chart, input gint
#
# Output:
# [out] Output array of statistics, output VipsImage
#
# Options:
# [left] Left edge of extract area, input gint
# [top] Top edge of extract area, input gint
# [width] Width of extract area, input gint
# [height] Height of extract area, input gint
##
# :method: getpoint
# :call-seq:
# getpoint(x, y) => out_array
#
# Read a point from an image.
#
# Input:
# [x] Point to read, input gint
# [y] Point to read, input gint
#
# Output:
# [out_array] Array of output values, output VipsArrayDouble
##
# :method: copy
# :call-seq:
# copy() => out
#
# Copy an image.
#
# Output:
# [out] Output image, output VipsImage
#
# Options:
# [width] Image width in pixels, input gint
# [height] Image height in pixels, input gint
# [bands] Number of bands in image, input gint
# [format] Pixel format in image, input VipsBandFormat
# [coding] Pixel coding, input VipsCoding
# [interpretation] Pixel interpretation, input VipsInterpretation
# [xres] Horizontal resolution in pixels/mm, input gdouble
# [yres] Vertical resolution in pixels/mm, input gdouble
# [xoffset] Horizontal offset of origin, input gint
# [yoffset] Vertical offset of origin, input gint
##
# :method: tilecache
# :call-seq:
# tilecache() => out
#
# Cache an image as a set of tiles.
#
# Output:
# [out] Output image, output VipsImage
#
# Options:
# [tile_width] Tile width in pixels, input gint
# [tile_height] Tile height in pixels, input gint
# [max_tiles] Maximum number of tiles to cache, input gint
# [access] Expected access pattern, input VipsAccess
# [threaded] Allow threaded access, input gboolean
# [persistent] Keep cache between evaluations, input gboolean
##
# :method: linecache
# :call-seq:
# linecache() => out
#
# Cache an image as a set of lines.
#
# Output:
# [out] Output image, output VipsImage
#
# Options:
# [tile_height] Tile height in pixels, input gint
# [access] Expected access pattern, input VipsAccess
# [threaded] Allow threaded access, input gboolean
# [persistent] Keep cache between evaluations, input gboolean
##
# :method: sequential
# :call-seq:
# sequential() => out
#
# Check sequential access.
#
# Output:
# [out] Output image, output VipsImage
#
# Options:
# [trace] trace pixel requests, input gboolean
# [tile_height] Tile height in pixels, input gint
# [access] Expected access pattern, input VipsAccess
##
# :method: cache
# :call-seq:
# cache() => out
#
# Cache an image.
#
# Output:
# [out] Output image, output VipsImage
#
# Options:
# [tile_width] Tile width in pixels, input gint
# [tile_height] Tile height in pixels, input gint
# [max_tiles] Maximum number of tiles to cache, input gint
##
# :method: embed
# :call-seq:
# embed(x, y, width, height) => out
#
# Embed an image in a larger image.
#
# Input:
# [x] Left edge of input in output, input gint
# [y] Top edge of input in output, input gint
# [width] Image width in pixels, input gint
# [height] Image height in pixels, input gint
#
# Output:
# [out] Output image, output VipsImage
#
# Options:
# [extend] How to generate the extra pixels, input VipsExtend
# [background] Colour for background pixels, input VipsArrayDouble
##
# :method: flip
# :call-seq:
# flip(direction) => out
#
# Flip an image.
#
# Input:
# [direction] Direction to flip image, input VipsDirection
#
# Output:
# [out] Output image, output VipsImage
##
# :method: insert
# :call-seq:
# insert(sub, x, y) => out
#
# Insert image @sub into @main at @x, @y.
#
# Input:
# [sub] Sub-image to insert into main image, input VipsImage
# [x] Left edge of sub in main, input gint
# [y] Top edge of sub in main, input gint
#
# Output:
# [out] Output image, output VipsImage
#
# Options:
# [expand] Expand output to hold all of both inputs, input gboolean
# [background] Colour for new pixels, input VipsArrayDouble
##
# :method: join
# :call-seq:
# join(in2, direction) => out
#
# Join a pair of images.
#
# Input:
# [in2] Second input image, input VipsImage
# [direction] Join left-right or up-down, input VipsDirection
#
# Output:
# [out] Output image, output VipsImage
#
# Options:
# [align] Align on the low, centre or high coordinate edge, input VipsAlign
# [expand] Expand output to hold all of both inputs, input gboolean
# [shim] Pixels between images, input gint
# [background] Colour for new pixels, input VipsArrayDouble
##
# :singleton-method: arrayjoin
# :call-seq:
# arrayjoin(in) => out
#
# Join an array of images.
#
# Input:
# [in] Array of input images, input VipsArrayImage
#
# Output:
# [out] Output image, output VipsImage
#
# Options:
# [across] Number of images across grid, input gint
# [shim] Pixels between images, input gint
# [background] Colour for new pixels, input VipsArrayDouble
# [halign] Align on the left, centre or right, input VipsAlign
# [valign] Align on the top, centre or bottom, input VipsAlign
# [hspacing] Horizontal spacing between images, input gint
# [vspacing] Vertical spacing between images, input gint
##
# :method: crop
# :call-seq:
# crop(left, top, width, height) => out
#
# Extract an area from an image.
#
# Input:
# [left] Left edge of extract area, input gint
# [top] Top edge of extract area, input gint
# [width] Width of extract area, input gint
# [height] Height of extract area, input gint
#
# Output:
# [out] Output image, output VipsImage
##
# :method: extract_area
# :call-seq:
# extract_area(left, top, width, height) => out
#
# Extract an area from an image.
#
# Input:
# [left] Left edge of extract area, input gint
# [top] Top edge of extract area, input gint
# [width] Width of extract area, input gint
# [height] Height of extract area, input gint
#
# Output:
# [out] Output image, output VipsImage
##
# :method: extract_band
# :call-seq:
# extract_band(band) => out
#
# Extract band from an image.
#
# Input:
# [band] Band to extract, input gint
#
# Output:
# [out] Output image, output VipsImage
#
# Options:
# [n] Number of bands to extract, input gint
##
# :method: bandjoin_const
# :call-seq:
# bandjoin_const(c) => out
#
# Append a constant band to an image.
#
# Input:
# [c] Array of constants to add, input VipsArrayDouble
#
# Output:
# [out] Output image, output VipsImage
##
# :singleton-method: bandrank
# :call-seq:
# bandrank(in) => out
#
# Band-wise rank of a set of images.
#
# Input:
# [in] Array of input images, input VipsArrayImage
#
# Output:
# [out] Output image, output VipsImage
#
# Options:
# [index] Select this band element from sorted list, input gint
##
# :method: bandmean
# :call-seq:
# bandmean() => out
#
# Band-wise average.
#
# Output:
# [out] Output image, output VipsImage
##
# :method: bandbool
# :call-seq:
# bandbool(boolean) => out
#
# Boolean operation across image bands.
#
# Input:
# [boolean] boolean to perform, input VipsOperationBoolean
#
# Output:
# [out] Output image, output VipsImage
##
# :method: replicate
# :call-seq:
# replicate(across, down) => out
#
# Replicate an image.
#
# Input:
# [across] Repeat this many times horizontally, input gint
# [down] Repeat this many times vertically, input gint
#
# Output:
# [out] Output image, output VipsImage
##
# :method: cast
# :call-seq:
# cast(format) => out
#
# Cast an image.
#
# Input:
# [format] Format to cast to, input VipsBandFormat
#
# Output:
# [out] Output image, output VipsImage
#
# Options:
# [shift] Shift integer values up and down, input gboolean
##
# :method: rot
# :call-seq:
# rot(angle) => out
#
# Rotate an image.
#
# Input:
# [angle] Angle to rotate image, input VipsAngle
#
# Output:
# [out] Output image, output VipsImage
##
# :method: rot45
# :call-seq:
# rot45() => out
#
# Rotate an image.
#
# Output:
# [out] Output image, output VipsImage
#
# Options:
# [angle] Angle to rotate image, input VipsAngle45
##
# :method: autorot
# :call-seq:
# autorot() => out
#
# Autorotate image by exif tag.
#
# Output:
# [out] Output image, output VipsImage
#
# Output options:
# [angle] Angle image was rotated by, output VipsAngle
##
# :method: recomb
# :call-seq:
# recomb(m) => out
#
# Linear recombination with matrix.
#
# Input:
# [m] matrix of coefficients, input VipsImage
#
# Output:
# [out] Output image, output VipsImage
##
# :method: bandfold
# :call-seq:
# bandfold() => out
#
# Fold up x axis into bands.
#
# Output:
# [out] Output image, output VipsImage
#
# Options:
# [factor] Fold by this factor, input gint
##
# :method: bandunfold
# :call-seq:
# bandunfold() => out
#
# Unfold image bands into x axis.
#
# Output:
# [out] Output image, output VipsImage
#
# Options:
# [factor] Unfold by this factor, input gint
##
# :method: flatten
# :call-seq:
# flatten() => out
#
# Flatten alpha out of an image.
#
# Output:
# [out] Output image, output VipsImage
#
# Options:
# [background] Background value, input VipsArrayDouble
# [max_alpha] Maximum value of alpha channel, input gdouble
##
# :method: premultiply
# :call-seq:
# premultiply() => out
#
# Premultiply image alpha.
#
# Output:
# [out] Output image, output VipsImage
#
# Options:
# [max_alpha] Maximum value of alpha channel, input gdouble
##
# :method: unpremultiply
# :call-seq:
# unpremultiply() => out
#
# Unpremultiply image alpha.
#
# Output:
# [out] Output image, output VipsImage
#
# Options:
# [max_alpha] Maximum value of alpha channel, input gdouble
##
# :method: grid
# :call-seq:
# grid(tile_height, across, down) => out
#
# Grid an image.
#
# Input:
# [tile_height] chop into tiles this high, input gint
# [across] number of tiles across, input gint
# [down] number of tiles down, input gint
#
# Output:
# [out] Output image, output VipsImage
##
# :method: scale
# :call-seq:
# scale() => out
#
# Scale an image to uchar.
#
# Output:
# [out] Output image, output VipsImage
#
# Options:
# [log] Log scale, input gboolean
# [exp] Exponent for log scale, input gdouble
##
# :method: wrap
# :call-seq:
# wrap() => out
#
# Wrap image origin.
#
# Output:
# [out] Output image, output VipsImage
#
# Options:
# [x] Left edge of input in output, input gint
# [y] Top edge of input in output, input gint
##
# :method: zoom
# :call-seq:
# zoom(xfac, yfac) => out
#
# Zoom an image.
#
# Input:
# [xfac] Horizontal zoom factor, input gint
# [yfac] Vertical zoom factor, input gint
#
# Output:
# [out] Output image, output VipsImage
##
# :method: subsample
# :call-seq:
# subsample(xfac, yfac) => out
#
# Subsample an image.
#
# Input:
# [xfac] Horizontal subsample factor, input gint
# [yfac] Vertical subsample factor, input gint
#
# Output:
# [out] Output image, output VipsImage
#
# Options:
# [point] Point sample, input gboolean
##
# :method: msb
# :call-seq:
# msb() => out
#
# Pick most-significant byte from an image.
#
# Output:
# [out] Output image, output VipsImage
#
# Options:
# [band] Band to msb, input gint
##
# :method: byteswap
# :call-seq:
# byteswap() => out
#
# Byteswap an image.
#
# Output:
# [out] Output image, output VipsImage
##
# :method: falsecolour
# :call-seq:
# falsecolour() => out
#
# False-colour an image.
#
# Output:
# [out] Output image, output VipsImage
##
# :method: gamma
# :call-seq:
# gamma() => out
#
# Gamma an image.
#
# Output:
# [out] Output image, output VipsImage
#
# Options:
# [exponent] Gamma factor, input gdouble
##
# :singleton-method: black
# :call-seq:
# black(width, height) => out
#
# Make a black image.
#
# Input:
# [width] Image width in pixels, input gint
# [height] Image height in pixels, input gint
#
# Output:
# [out] Output image, output VipsImage
#
# Options:
# [bands] Number of bands in image, input gint
##
# :singleton-method: gaussnoise
# :call-seq:
# gaussnoise(width, height) => out
#
# Make a gaussnoise image.
#
# Input:
# [width] Image width in pixels, input gint
# [height] Image height in pixels, input gint
#
# Output:
# [out] Output image, output VipsImage
#
# Options:
# [mean] Mean of pixels in generated image, input gdouble
# [sigma] Standard deviation of pixels in generated image, input gdouble
##
# :singleton-method: text
# :call-seq:
# text(text) => out
#
# Make a text image.
#
# Input:
# [text] Text to render, input gchararray
#
# Output:
# [out] Output image, output VipsImage
#
# Options:
# [font] Font to render width, input gchararray
# [width] Maximum image width in pixels, input gint
# [align] Align on the low, centre or high edge, input VipsAlign
# [dpi] DPI to render at, input gint
# [spacing] Line spacing, input gint
##
# :singleton-method: xyz
# :call-seq:
# xyz(width, height) => out
#
# Make an image where pixel values are coordinates.
#
# Input:
# [width] Image width in pixels, input gint
# [height] Image height in pixels, input gint
#
# Output:
# [out] Output image, output VipsImage
#
# Options:
# [csize] Size of third dimension, input gint
# [dsize] Size of fourth dimension, input gint
# [esize] Size of fifth dimension, input gint
##
# :singleton-method: gaussmat
# :call-seq:
# gaussmat(sigma, min_ampl) => out
#
# Make a gaussian image.
#
# Input:
# [sigma] Sigma of Gaussian, input gdouble
# [min_ampl] Minimum amplitude of Gaussian, input gdouble
#
# Output:
# [out] Output image, output VipsImage
#
# Options:
# [separable] Generate separable Gaussian, input gboolean
# [precision] Generate with this precision, input VipsPrecision
##
# :singleton-method: logmat
# :call-seq:
# logmat(sigma, min_ampl) => out
#
# Make a laplacian of gaussian image.
#
# Input:
# [sigma] Radius of Logmatian, input gdouble
# [min_ampl] Minimum amplitude of Logmatian, input gdouble
#
# Output:
# [out] Output image, output VipsImage
#
# Options:
# [separable] Generate separable Logmatian, input gboolean
# [precision] Generate with this precision, input VipsPrecision
##
# :singleton-method: eye
# :call-seq:
# eye(width, height) => out
#
# Make an image showing the eye's spatial response.
#
# Input:
# [width] Image width in pixels, input gint
# [height] Image height in pixels, input gint
#
# Output:
# [out] Output image, output VipsImage
#
# Options:
# [uchar] Output an unsigned char image, input gboolean
# [factor] Maximum spatial frequency, input gdouble
##
# :singleton-method: grey
# :call-seq:
# grey(width, height) => out
#
# Make a grey ramp image.
#
# Input:
# [width] Image width in pixels, input gint
# [height] Image height in pixels, input gint
#
# Output:
# [out] Output image, output VipsImage
#
# Options:
# [uchar] Output an unsigned char image, input gboolean
##
# :singleton-method: zone
# :call-seq:
# zone(width, height) => out
#
# Make a zone plate.
#
# Input:
# [width] Image width in pixels, input gint
# [height] Image height in pixels, input gint
#
# Output:
# [out] Output image, output VipsImage
#
# Options:
# [uchar] Output an unsigned char image, input gboolean
##
# :singleton-method: sines
# :call-seq:
# sines(width, height) => out
#
# Make a 2d sine wave.
#
# Input:
# [width] Image width in pixels, input gint
# [height] Image height in pixels, input gint
#
# Output:
# [out] Output image, output VipsImage
#
# Options:
# [uchar] Output an unsigned char image, input gboolean
# [hfreq] Horizontal spatial frequency, input gdouble
# [vfreq] Vertical spatial frequency, input gdouble
##
# :singleton-method: mask_ideal
# :call-seq:
# mask_ideal(width, height, frequency_cutoff) => out
#
# Make an ideal filter.
#
# Input:
# [width] Image width in pixels, input gint
# [height] Image height in pixels, input gint
# [frequency_cutoff] Frequency cutoff, input gdouble
#
# Output:
# [out] Output image, output VipsImage
#
# Options:
# [uchar] Output an unsigned char image, input gboolean
# [nodc] Remove DC component, input gboolean
# [reject] Invert the sense of the filter, input gboolean
# [optical] Rotate quadrants to optical space, input gboolean
##
# :singleton-method: mask_ideal_ring
# :call-seq:
# mask_ideal_ring(width, height, frequency_cutoff, ringwidth) => out
#
# Make an ideal ring filter.
#
# Input:
# [width] Image width in pixels, input gint
# [height] Image height in pixels, input gint
# [frequency_cutoff] Frequency cutoff, input gdouble
# [ringwidth] Ringwidth, input gdouble
#
# Output:
# [out] Output image, output VipsImage
#
# Options:
# [uchar] Output an unsigned char image, input gboolean
# [nodc] Remove DC component, input gboolean
# [reject] Invert the sense of the filter, input gboolean
# [optical] Rotate quadrants to optical space, input gboolean
##
# :singleton-method: mask_ideal_band
# :call-seq:
# mask_ideal_band(width, height, frequency_cutoff_x, frequency_cutoff_y, radius) => out
#
# Make an ideal band filter.
#
# Input:
# [width] Image width in pixels, input gint
# [height] Image height in pixels, input gint
# [frequency_cutoff_x] Frequency cutoff x, input gdouble
# [frequency_cutoff_y] Frequency cutoff y, input gdouble
# [radius] radius of circle, input gdouble
#
# Output:
# [out] Output image, output VipsImage
#
# Options:
# [uchar] Output an unsigned char image, input gboolean
# [optical] Rotate quadrants to optical space, input gboolean
# [nodc] Remove DC component, input gboolean
# [reject] Invert the sense of the filter, input gboolean
##
# :singleton-method: mask_butterworth
# :call-seq:
# mask_butterworth(width, height, order, frequency_cutoff, amplitude_cutoff) => out
#
# Make a butterworth filter.
#
# Input:
# [width] Image width in pixels, input gint
# [height] Image height in pixels, input gint
# [order] Filter order, input gdouble
# [frequency_cutoff] Frequency cutoff, input gdouble
# [amplitude_cutoff] Amplitude cutoff, input gdouble
#
# Output:
# [out] Output image, output VipsImage
#
# Options:
# [uchar] Output an unsigned char image, input gboolean
# [optical] Rotate quadrants to optical space, input gboolean
# [nodc] Remove DC component, input gboolean
# [reject] Invert the sense of the filter, input gboolean
##
# :singleton-method: mask_butterworth_ring
# :call-seq:
# mask_butterworth_ring(width, height, order, frequency_cutoff, amplitude_cutoff, ringwidth) => out
#
# Make a butterworth ring filter.
#
# Input:
# [width] Image width in pixels, input gint
# [height] Image height in pixels, input gint
# [order] Filter order, input gdouble
# [frequency_cutoff] Frequency cutoff, input gdouble
# [amplitude_cutoff] Amplitude cutoff, input gdouble
# [ringwidth] Ringwidth, input gdouble
#
# Output:
# [out] Output image, output VipsImage
#
# Options:
# [uchar] Output an unsigned char image, input gboolean
# [optical] Rotate quadrants to optical space, input gboolean
# [nodc] Remove DC component, input gboolean
# [reject] Invert the sense of the filter, input gboolean
##
# :singleton-method: mask_butterworth_band
# :call-seq:
# mask_butterworth_band(width, height, order, frequency_cutoff_x, frequency_cutoff_y, radius, amplitude_cutoff) => out
#
# Make a butterworth_band filter.
#
# Input:
# [width] Image width in pixels, input gint
# [height] Image height in pixels, input gint
# [order] Filter order, input gdouble
# [frequency_cutoff_x] Frequency cutoff x, input gdouble
# [frequency_cutoff_y] Frequency cutoff y, input gdouble
# [radius] radius of circle, input gdouble
# [amplitude_cutoff] Amplitude cutoff, input gdouble
#
# Output:
# [out] Output image, output VipsImage
#
# Options:
# [uchar] Output an unsigned char image, input gboolean
# [optical] Rotate quadrants to optical space, input gboolean
# [reject] Invert the sense of the filter, input gboolean
# [nodc] Remove DC component, input gboolean
##
# :singleton-method: mask_gaussian
# :call-seq:
# mask_gaussian(width, height, frequency_cutoff, amplitude_cutoff) => out
#
# Make a gaussian filter.
#
# Input:
# [width] Image width in pixels, input gint
# [height] Image height in pixels, input gint
# [frequency_cutoff] Frequency cutoff, input gdouble
# [amplitude_cutoff] Amplitude cutoff, input gdouble
#
# Output:
# [out] Output image, output VipsImage
#
# Options:
# [uchar] Output an unsigned char image, input gboolean
# [nodc] Remove DC component, input gboolean
# [reject] Invert the sense of the filter, input gboolean
# [optical] Rotate quadrants to optical space, input gboolean
##
# :singleton-method: mask_gaussian_ring
# :call-seq:
# mask_gaussian_ring(width, height, frequency_cutoff, amplitude_cutoff, ringwidth) => out
#
# Make a gaussian ring filter.
#
# Input:
# [width] Image width in pixels, input gint
# [height] Image height in pixels, input gint
# [frequency_cutoff] Frequency cutoff, input gdouble
# [amplitude_cutoff] Amplitude cutoff, input gdouble
# [ringwidth] Ringwidth, input gdouble
#
# Output:
# [out] Output image, output VipsImage
#
# Options:
# [uchar] Output an unsigned char image, input gboolean
# [optical] Rotate quadrants to optical space, input gboolean
# [nodc] Remove DC component, input gboolean
# [reject] Invert the sense of the filter, input gboolean
##
# :singleton-method: mask_gaussian_band
# :call-seq:
# mask_gaussian_band(width, height, frequency_cutoff_x, frequency_cutoff_y, radius, amplitude_cutoff) => out
#
# Make a gaussian filter.
#
# Input:
# [width] Image width in pixels, input gint
# [height] Image height in pixels, input gint
# [frequency_cutoff_x] Frequency cutoff x, input gdouble
# [frequency_cutoff_y] Frequency cutoff y, input gdouble
# [radius] radius of circle, input gdouble
# [amplitude_cutoff] Amplitude cutoff, input gdouble
#
# Output:
# [out] Output image, output VipsImage
#
# Options:
# [uchar] Output an unsigned char image, input gboolean
# [optical] Rotate quadrants to optical space, input gboolean
# [nodc] Remove DC component, input gboolean
# [reject] Invert the sense of the filter, input gboolean
##
# :singleton-method: mask_fractal
# :call-seq:
# mask_fractal(width, height, fractal_dimension) => out
#
# Make fractal filter.
#
# Input:
# [width] Image width in pixels, input gint
# [height] Image height in pixels, input gint
# [fractal_dimension] Fractal dimension, input gdouble
#
# Output:
# [out] Output image, output VipsImage
#
# Options:
# [uchar] Output an unsigned char image, input gboolean
# [nodc] Remove DC component, input gboolean
# [reject] Invert the sense of the filter, input gboolean
# [optical] Rotate quadrants to optical space, input gboolean
##
# :method: buildlut
# :call-seq:
# buildlut() => out
#
# Build a look-up table.
#
# Output:
# [out] Output image, output VipsImage
##
# :method: invertlut
# :call-seq:
# invertlut() => out
#
# Build an inverted look-up table.
#
# Output:
# [out] Output image, output VipsImage
#
# Options:
# [size] LUT size to generate, input gint
##
# :singleton-method: tonelut
# :call-seq:
# tonelut() => out
#
# Build a look-up table.
#
# Output:
# [out] Output image, output VipsImage
#
# Options:
# [in_max] Size of LUT to build, input gint
# [out_max] Maximum value in output LUT, input gint
# [Lb] Lowest value in output, input gdouble
# [Lw] Highest value in output, input gdouble
# [Ps] Position of shadow, input gdouble
# [Pm] Position of mid-tones, input gdouble
# [Ph] Position of highlights, input gdouble
# [S] Adjust shadows by this much, input gdouble
# [M] Adjust mid-tones by this much, input gdouble
# [H] Adjust highlights by this much, input gdouble
##
# :singleton-method: identity
# :call-seq:
# identity() => out
#
# Make a 1d image where pixel values are indexes.
#
# Output:
# [out] Output image, output VipsImage
#
# Options:
# [bands] Number of bands in LUT, input gint
# [ushort] Create a 16-bit LUT, input gboolean
# [size] Size of 16-bit LUT, input gint
##
# :singleton-method: fractsurf
# :call-seq:
# fractsurf(width, height, fractal_dimension) => out
#
# Make a fractal surface.
#
# Input:
# [width] Image width in pixels, input gint
# [height] Image height in pixels, input gint
# [fractal_dimension] Fractal dimension, input gdouble
#
# Output:
# [out] Output image, output VipsImage
##
# :singleton-method: radload
# :call-seq:
# radload(filename) => out
#
# Load a radiance image from a file.
#
# Input:
# [filename] Filename to load from, input gchararray
#
# Output:
# [out] Output image, output VipsImage
#
# Options:
# [disc] Open to disc, input gboolean
# [access] Required access pattern for this file, input VipsAccess
#
# Output options:
# [flags] Flags for this file, output VipsForeignFlags
##
# :singleton-method: ppmload
# :call-seq:
# ppmload(filename) => out
#
# Load ppm from file.
#
# Input:
# [filename] Filename to load from, input gchararray
#
# Output:
# [out] Output image, output VipsImage
#
# Options:
# [disc] Open to disc, input gboolean
# [access] Required access pattern for this file, input VipsAccess
#
# Output options:
# [flags] Flags for this file, output VipsForeignFlags
##
# :singleton-method: csvload
# :call-seq:
# csvload(filename) => out
#
# Load csv from file.
#
# Input:
# [filename] Filename to load from, input gchararray
#
# Output:
# [out] Output image, output VipsImage
#
# Options:
# [disc] Open to disc, input gboolean
# [access] Required access pattern for this file, input VipsAccess
# [skip] Skip this many lines at the start of the file, input gint
# [lines] Read this many lines from the file, input gint
# [whitespace] Set of whitespace characters, input gchararray
# [separator] Set of separator characters, input gchararray
#
# Output options:
# [flags] Flags for this file, output VipsForeignFlags
##
# :singleton-method: matrixload
# :call-seq:
# matrixload(filename) => out
#
# Load matrix from file.
#
# Input:
# [filename] Filename to load from, input gchararray
#
# Output:
# [out] Output image, output VipsImage
#
# Options:
# [disc] Open to disc, input gboolean
# [access] Required access pattern for this file, input VipsAccess
#
# Output options:
# [flags] Flags for this file, output VipsForeignFlags
##
# :singleton-method: analyzeload
# :call-seq:
# analyzeload(filename) => out
#
# Load an analyze6 image.
#
# Input:
# [filename] Filename to load from, input gchararray
#
# Output:
# [out] Output image, output VipsImage
#
# Options:
# [disc] Open to disc, input gboolean
# [access] Required access pattern for this file, input VipsAccess
#
# Output options:
# [flags] Flags for this file, output VipsForeignFlags
##
# :singleton-method: rawload
# :call-seq:
# rawload(filename, width, height, bands) => out
#
# Load raw data from a file.
#
# Input:
# [filename] Filename to load from, input gchararray
# [width] Image width in pixels, input gint
# [height] Image height in pixels, input gint
# [bands] Number of bands in image, input gint
#
# Output:
# [out] Output image, output VipsImage
#
# Options:
# [disc] Open to disc, input gboolean
# [access] Required access pattern for this file, input VipsAccess
# [offset] Offset in bytes from start of file, input guint64
#
# Output options:
# [flags] Flags for this file, output VipsForeignFlags
##
# :singleton-method: vipsload
# :call-seq:
# vipsload(filename) => out
#
# Load vips from file.
#
# Input:
# [filename] Filename to load from, input gchararray
#
# Output:
# [out] Output image, output VipsImage
#
# Options:
# [disc] Open to disc, input gboolean
# [access] Required access pattern for this file, input VipsAccess
#
# Output options:
# [flags] Flags for this file, output VipsForeignFlags
##
# :singleton-method: pngload
# :call-seq:
# pngload(filename) => out
#
# Load png from file.
#
# Input:
# [filename] Filename to load from, input gchararray
#
# Output:
# [out] Output image, output VipsImage
#
# Options:
# [disc] Open to disc, input gboolean
# [access] Required access pattern for this file, input VipsAccess
#
# Output options:
# [flags] Flags for this file, output VipsForeignFlags
##
# :singleton-method: pngload_buffer
# :call-seq:
# pngload_buffer(buffer) => out
#
# Load png from buffer.
#
# Input:
# [buffer] Buffer to load from, input VipsBlob
#
# Output:
# [out] Output image, output VipsImage
#
# Options:
# [disc] Open to disc, input gboolean
# [access] Required access pattern for this file, input VipsAccess
#
# Output options:
# [flags] Flags for this file, output VipsForeignFlags
##
# :singleton-method: matload
# :call-seq:
# matload(filename) => out
#
# Load mat from file.
#
# Input:
# [filename] Filename to load from, input gchararray
#
# Output:
# [out] Output image, output VipsImage
#
# Options:
# [disc] Open to disc, input gboolean
# [access] Required access pattern for this file, input VipsAccess
#
# Output options:
# [flags] Flags for this file, output VipsForeignFlags
##
# :singleton-method: jpegload
# :call-seq:
# jpegload(filename) => out
#
# Load jpeg from file.
#
# Input:
# [filename] Filename to load from, input gchararray
#
# Output:
# [out] Output image, output VipsImage
#
# Options:
# [disc] Open to disc, input gboolean
# [access] Required access pattern for this file, input VipsAccess
# [shrink] Shrink factor on load, input gint
# [fail] Fail on first warning, input gboolean
# [autorotate] Rotate image using exif orientation, input gboolean
#
# Output options:
# [flags] Flags for this file, output VipsForeignFlags
##
# :singleton-method: jpegload_buffer
# :call-seq:
# jpegload_buffer(buffer) => out
#
# Load jpeg from buffer.
#
# Input:
# [buffer] Buffer to load from, input VipsBlob
#
# Output:
# [out] Output image, output VipsImage
#
# Options:
# [disc] Open to disc, input gboolean
# [access] Required access pattern for this file, input VipsAccess
# [shrink] Shrink factor on load, input gint
# [fail] Fail on first warning, input gboolean
# [autorotate] Rotate image using exif orientation, input gboolean
#
# Output options:
# [flags] Flags for this file, output VipsForeignFlags
##
# :singleton-method: webpload
# :call-seq:
# webpload(filename) => out
#
# Load webp from file.
#
# Input:
# [filename] Filename to load from, input gchararray
#
# Output:
# [out] Output image, output VipsImage
#
# Options:
# [disc] Open to disc, input gboolean
# [access] Required access pattern for this file, input VipsAccess
#
# Output options:
# [flags] Flags for this file, output VipsForeignFlags
##
# :singleton-method: webpload_buffer
# :call-seq:
# webpload_buffer(buffer) => out
#
# Load webp from buffer.
#
# Input:
# [buffer] Buffer to load from, input VipsBlob
#
# Output:
# [out] Output image, output VipsImage
#
# Options:
# [disc] Open to disc, input gboolean
# [access] Required access pattern for this file, input VipsAccess
#
# Output options:
# [flags] Flags for this file, output VipsForeignFlags
##
# :singleton-method: tiffload
# :call-seq:
# tiffload(filename) => out
#
# Load tiff from file.
#
# Input:
# [filename] Filename to load from, input gchararray
#
# Output:
# [out] Output image, output VipsImage
#
# Options:
# [disc] Open to disc, input gboolean
# [access] Required access pattern for this file, input VipsAccess
# [page] Load this page from the image, input gint
#
# Output options:
# [flags] Flags for this file, output VipsForeignFlags
##
# :singleton-method: tiffload_buffer
# :call-seq:
# tiffload_buffer(buffer) => out
#
# Load tiff from buffer.
#
# Input:
# [buffer] Buffer to load from, input VipsBlob
#
# Output:
# [out] Output image, output VipsImage
#
# Options:
# [disc] Open to disc, input gboolean
# [access] Required access pattern for this file, input VipsAccess
# [page] Load this page from the image, input gint
#
# Output options:
# [flags] Flags for this file, output VipsForeignFlags
##
# :singleton-method: openslideload
# :call-seq:
# openslideload(filename) => out
#
# Load file with openslide.
#
# Input:
# [filename] Filename to load from, input gchararray
#
# Output:
# [out] Output image, output VipsImage
#
# Options:
# [disc] Open to disc, input gboolean
# [access] Required access pattern for this file, input VipsAccess
# [level] Load this level from the file, input gint
# [autocrop] Crop to image bounds, input gboolean
# [associated] Load this associated image, input gchararray
#
# Output options:
# [flags] Flags for this file, output VipsForeignFlags
##
# :singleton-method: magickload
# :call-seq:
# magickload(filename) => out
#
# Load file with imagemagick.
#
# Input:
# [filename] Filename to load from, input gchararray
#
# Output:
# [out] Output image, output VipsImage
#
# Options:
# [all_frames] Read all frames from an image, input gboolean
# [density] Canvas resolution for rendering vector formats like SVG, input gchararray
# [disc] Open to disc, input gboolean
# [access] Required access pattern for this file, input VipsAccess
#
# Output options:
# [flags] Flags for this file, output VipsForeignFlags
##
# :singleton-method: magickload_buffer
# :call-seq:
# magickload_buffer(buffer) => out
#
# Load buffer with imagemagick.
#
# Input:
# [buffer] Buffer to load from, input VipsBlob
#
# Output:
# [out] Output image, output VipsImage
#
# Options:
# [all_frames] Read all frames from an image, input gboolean
# [density] Canvas resolution for rendering vector formats like SVG, input gchararray
# [disc] Open to disc, input gboolean
# [access] Required access pattern for this file, input VipsAccess
#
# Output options:
# [flags] Flags for this file, output VipsForeignFlags
##
# :singleton-method: fitsload
# :call-seq:
# fitsload(filename) => out
#
# Load a fits image.
#
# Input:
# [filename] Filename to load from, input gchararray
#
# Output:
# [out] Output image, output VipsImage
#
# Options:
# [disc] Open to disc, input gboolean
# [access] Required access pattern for this file, input VipsAccess
#
# Output options:
# [flags] Flags for this file, output VipsForeignFlags
##
# :singleton-method: openexrload
# :call-seq:
# openexrload(filename) => out
#
# Load an openexr image.
#
# Input:
# [filename] Filename to load from, input gchararray
#
# Output:
# [out] Output image, output VipsImage
#
# Options:
# [disc] Open to disc, input gboolean
# [access] Required access pattern for this file, input VipsAccess
#
# Output options:
# [flags] Flags for this file, output VipsForeignFlags
##
# :method: radsave
# :call-seq:
# radsave(filename) =>
#
# Save image to radiance file.
#
# Input:
# [filename] Filename to save to, input gchararray
#
# Options:
# [strip] Strip all metadata from image, input gboolean
# [background] Background value, input VipsArrayDouble
##
# :method: ppmsave
# :call-seq:
# ppmsave(filename) =>
#
# Save image to ppm file.
#
# Input:
# [filename] Filename to save to, input gchararray
#
# Options:
# [ascii] save as ascii, input gboolean
# [squash] save as one bit, input gboolean
# [strip] Strip all metadata from image, input gboolean
# [background] Background value, input VipsArrayDouble
##
# :method: csvsave
# :call-seq:
# csvsave(filename) =>
#
# Save image to csv file.
#
# Input:
# [filename] Filename to save to, input gchararray
#
# Options:
# [separator] Separator characters, input gchararray
# [strip] Strip all metadata from image, input gboolean
# [background] Background value, input VipsArrayDouble
##
# :method: matrixsave
# :call-seq:
# matrixsave(filename) =>
#
# Save image to matrix file.
#
# Input:
# [filename] Filename to save to, input gchararray
#
# Options:
# [strip] Strip all metadata from image, input gboolean
# [background] Background value, input VipsArrayDouble
##
# :method: matrixprint
# :call-seq:
# matrixprint() =>
#
# Print matrix.
#
# Options:
# [strip] Strip all metadata from image, input gboolean
# [background] Background value, input VipsArrayDouble
##
# :method: rawsave
# :call-seq:
# rawsave(filename) =>
#
# Save image to raw file.
#
# Input:
# [filename] Filename to save to, input gchararray
#
# Options:
# [strip] Strip all metadata from image, input gboolean
# [background] Background value, input VipsArrayDouble
##
# :method: rawsave_fd
# :call-seq:
# rawsave_fd(fd) =>
#
# Write raw image to file descriptor.
#
# Input:
# [fd] File descriptor to write to, input gint
#
# Options:
# [strip] Strip all metadata from image, input gboolean
# [background] Background value, input VipsArrayDouble
##
# :method: vipssave
# :call-seq:
# vipssave(filename) =>
#
# Save image to vips file.
#
# Input:
# [filename] Filename to save to, input gchararray
#
# Options:
# [strip] Strip all metadata from image, input gboolean
# [background] Background value, input VipsArrayDouble
##
# :method: dzsave
# :call-seq:
# dzsave(filename) =>
#
# Save image to deep zoom format.
#
# Input:
# [filename] Filename to save to, input gchararray
#
# Options:
# [layout] Directory layout, input VipsForeignDzLayout
# [suffix] Filename suffix for tiles, input gchararray
# [overlap] Tile overlap in pixels, input gint
# [tile_size] Tile size in pixels, input gint
# [centre] Center image in tile, input gboolean
# [depth] Pyramid depth, input VipsForeignDzDepth
# [angle] Rotate image during save, input VipsAngle
# [container] Pyramid container type, input VipsForeignDzContainer
# [properties] Write a properties file to the output directory, input gboolean
# [strip] Strip all metadata from image, input gboolean
# [background] Background value, input VipsArrayDouble
##
# :method: pngsave
# :call-seq:
# pngsave(filename) =>
#
# Save image to png file.
#
# Input:
# [filename] Filename to save to, input gchararray
#
# Options:
# [compression] Compression factor, input gint
# [interlace] Interlace image, input gboolean
# [profile] ICC profile to embed, input gchararray
# [filter] libpng row filter flag(s), input VipsForeignPngFilter
# [strip] Strip all metadata from image, input gboolean
# [background] Background value, input VipsArrayDouble
##
# :method: pngsave_buffer
# :call-seq:
# pngsave_buffer() => buffer
#
# Save image to png buffer.
#
# Output:
# [buffer] Buffer to save to, output VipsBlob
#
# Options:
# [compression] Compression factor, input gint
# [interlace] Interlace image, input gboolean
# [profile] ICC profile to embed, input gchararray
# [filter] libpng row filter flag(s), input VipsForeignPngFilter
# [strip] Strip all metadata from image, input gboolean
# [background] Background value, input VipsArrayDouble
##
# :method: jpegsave
# :call-seq:
# jpegsave(filename) =>
#
# Save image to jpeg file.
#
# Input:
# [filename] Filename to save to, input gchararray
#
# Options:
# [Q] Q factor, input gint
# [profile] ICC profile to embed, input gchararray
# [optimize_coding] Compute optimal Huffman coding tables, input gboolean
# [interlace] Generate an interlaced (progressive) jpeg, input gboolean
# [no_subsample] Disable chroma subsample, input gboolean
# [trellis_quant] Apply trellis quantisation to each 8x8 block, input gboolean
# [overshoot_deringing] Apply overshooting to samples with extreme values, input gboolean
# [optimize_scans] Split the spectrum of DCT coefficients into separate scans, input gboolean
# [strip] Strip all metadata from image, input gboolean
# [background] Background value, input VipsArrayDouble
##
# :method: jpegsave_buffer
# :call-seq:
# jpegsave_buffer() => buffer
#
# Save image to jpeg buffer.
#
# Output:
# [buffer] Buffer to save to, output VipsBlob
#
# Options:
# [Q] Q factor, input gint
# [profile] ICC profile to embed, input gchararray
# [optimize_coding] Compute optimal Huffman coding tables, input gboolean
# [interlace] Generate an interlaced (progressive) jpeg, input gboolean
# [no_subsample] Disable chroma subsample, input gboolean
# [trellis_quant] Apply trellis quantisation to each 8x8 block, input gboolean
# [overshoot_deringing] Apply overshooting to samples with extreme values, input gboolean
# [optimize_scans] Split the spectrum of DCT coefficients into separate scans, input gboolean
# [strip] Strip all metadata from image, input gboolean
# [background] Background value, input VipsArrayDouble
##
# :method: jpegsave_mime
# :call-seq:
# jpegsave_mime() =>
#
# Save image to jpeg mime.
#
# Options:
# [Q] Q factor, input gint
# [profile] ICC profile to embed, input gchararray
# [optimize_coding] Compute optimal Huffman coding tables, input gboolean
# [interlace] Generate an interlaced (progressive) jpeg, input gboolean
# [no_subsample] Disable chroma subsample, input gboolean
# [trellis_quant] Apply trellis quantisation to each 8x8 block, input gboolean
# [overshoot_deringing] Apply overshooting to samples with extreme values, input gboolean
# [optimize_scans] Split the spectrum of DCT coefficients into separate scans, input gboolean
# [strip] Strip all metadata from image, input gboolean
# [background] Background value, input VipsArrayDouble
##
# :method: webpsave
# :call-seq:
# webpsave(filename) =>
#
# Save image to webp file.
#
# Input:
# [filename] Filename to save to, input gchararray
#
# Options:
# [Q] Q factor, input gint
# [lossless] enable lossless compression, input gboolean
# [strip] Strip all metadata from image, input gboolean
# [background] Background value, input VipsArrayDouble
##
# :method: webpsave_buffer
# :call-seq:
# webpsave_buffer() => buffer
#
# Save image to webp buffer.
#
# Output:
# [buffer] Buffer to save to, output VipsBlob
#
# Options:
# [Q] Q factor, input gint
# [lossless] enable lossless compression, input gboolean
# [strip] Strip all metadata from image, input gboolean
# [background] Background value, input VipsArrayDouble
##
# :method: tiffsave
# :call-seq:
# tiffsave(filename) =>
#
# Save image to tiff file.
#
# Input:
# [filename] Filename to save to, input gchararray
#
# Options:
# [compression] Compression for this file, input VipsForeignTiffCompression
# [Q] Q factor, input gint
# [predictor] Compression prediction, input VipsForeignTiffPredictor
# [profile] ICC profile to embed, input gchararray
# [tile] Write a tiled tiff, input gboolean
# [tile_width] Tile width in pixels, input gint
# [tile_height] Tile height in pixels, input gint
# [pyramid] Write a pyramidal tiff, input gboolean
# [miniswhite] Use 0 for white in 1-bit images, input gboolean
# [squash] Squash images down to 1 bit, input gboolean
# [resunit] Resolution unit, input VipsForeignTiffResunit
# [xres] Horizontal resolution in pixels/mm, input gdouble
# [yres] Vertical resolution in pixels/mm, input gdouble
# [bigtiff] Write a bigtiff image, input gboolean
# [properties] Write a properties document to IMAGEDESCRIPTION, input gboolean
# [strip] Strip all metadata from image, input gboolean
# [background] Background value, input VipsArrayDouble
##
# :method: fitssave
# :call-seq:
# fitssave(filename) =>
#
# Save image to fits file.
#
# Input:
# [filename] Filename to save to, input gchararray
#
# Options:
# [strip] Strip all metadata from image, input gboolean
# [background] Background value, input VipsArrayDouble
##
# :method: mapim
# :call-seq:
# mapim(index) => out
#
# Resample with an mapim image.
#
# Input:
# [index] Index pixels with this, input VipsImage
#
# Output:
# [out] Output image, output VipsImage
#
# Options:
# [interpolate] Interpolate pixels with this, input VipsInterpolate
##
# :method: shrink
# :call-seq:
# shrink(xshrink, yshrink) => out
#
# Shrink an image.
#
# Input:
# [xshrink] Horizontal shrink factor, input gdouble
# [yshrink] Vertical shrink factor, input gdouble
#
# Output:
# [out] Output image, output VipsImage
##
# :method: shrinkh
# :call-seq:
# shrinkh(xshrink) => out
#
# Shrink an image horizontally.
#
# Input:
# [xshrink] Horizontal shrink factor, input gint
#
# Output:
# [out] Output image, output VipsImage
##
# :method: shrinkv
# :call-seq:
# shrinkv(yshrink) => out
#
# Shrink an image vertically.
#
# Input:
# [yshrink] Vertical shrink factor, input gint
#
# Output:
# [out] Output image, output VipsImage
##
# :method: shrink2
# :call-seq:
# shrink2(xshrink, yshrink) => out
#
# Shrink an image.
#
# Input:
# [xshrink] Horizontal shrink factor, input gdouble
# [yshrink] Vertical shrink factor, input gdouble
#
# Output:
# [out] Output image, output VipsImage
##
# :method: quadratic
# :call-seq:
# quadratic(coeff) => out
#
# Resample an image with a quadratic transform.
#
# Input:
# [coeff] Coefficient matrix, input VipsImage
#
# Output:
# [out] Output image, output VipsImage
#
# Options:
# [interpolate] Interpolate values with this, input VipsInterpolate
##
# :method: affine
# :call-seq:
# affine(matrix) => out
#
# Affine transform of an image.
#
# Input:
# [matrix] Transformation matrix, input VipsArrayDouble
#
# Output:
# [out] Output image, output VipsImage
#
# Options:
# [interpolate] Interpolate pixels with this, input VipsInterpolate
# [oarea] Area of output to generate, input VipsArrayInt
# [odx] Horizontal output displacement, input gdouble
# [ody] Vertical output displacement, input gdouble
# [idx] Horizontal input displacement, input gdouble
# [idy] Vertical input displacement, input gdouble
##
# :method: similarity
# :call-seq:
# similarity() => out
#
# Similarity transform of an image.
#
# Output:
# [out] Output image, output VipsImage
#
# Options:
# [interpolate] Interpolate pixels with this, input VipsInterpolate
# [scale] Scale by this factor, input gdouble
# [angle] Rotate anticlockwise by this many degrees, input gdouble
# [odx] Horizontal output displacement, input gdouble
# [ody] Vertical output displacement, input gdouble
# [idx] Horizontal input displacement, input gdouble
# [idy] Vertical input displacement, input gdouble
##
# :method: resize
# :call-seq:
# resize(scale) => out
#
# Resize an image.
#
# Input:
# [scale] Scale image by this factor, input gdouble
#
# Output:
# [out] Output image, output VipsImage
#
# Options:
# [interpolate] Interpolate pixels with this, input VipsInterpolate
# [vscale] Vertical scale image by this factor, input gdouble
# [idx] Horizontal input displacement, input gdouble
# [idy] Vertical input displacement, input gdouble
##
# :method: colourspace
# :call-seq:
# colourspace(space) => out
#
# Convert to a new colourspace.
#
# Input:
# [space] Destination colour space, input VipsInterpretation
#
# Output:
# [out] Output image, output VipsImage
#
# Options:
# [source_space] Source colour space, input VipsInterpretation
##
# :method: Lab2XYZ
# :call-seq:
# Lab2XYZ() => out
#
# Transform cielab to xyz.
#
# Output:
# [out] Output image, output VipsImage
#
# Options:
# [temp] Colour temperature, input VipsArrayDouble
##
# :method: XYZ2Lab
# :call-seq:
# XYZ2Lab() => out
#
# Transform xyz to lab.
#
# Output:
# [out] Output image, output VipsImage
#
# Options:
# [temp] Colour temperature, input VipsArrayDouble
##
# :method: Lab2LCh
# :call-seq:
# Lab2LCh() => out
#
# Transform lab to lch.
#
# Output:
# [out] Output image, output VipsImage
##
# :method: LCh2Lab
# :call-seq:
# LCh2Lab() => out
#
# Transform lch to lab.
#
# Output:
# [out] Output image, output VipsImage
##
# :method: LCh2CMC
# :call-seq:
# LCh2CMC() => out
#
# Transform lch to cmc.
#
# Output:
# [out] Output image, output VipsImage
##
# :method: CMC2LCh
# :call-seq:
# CMC2LCh() => out
#
# Transform lch to cmc.
#
# Output:
# [out] Output image, output VipsImage
##
# :method: XYZ2Yxy
# :call-seq:
# XYZ2Yxy() => out
#
# Transform xyz to yxy.
#
# Output:
# [out] Output image, output VipsImage
##
# :method: Yxy2XYZ
# :call-seq:
# Yxy2XYZ() => out
#
# Transform yxy to xyz.
#
# Output:
# [out] Output image, output VipsImage
##
# :method: scRGB2XYZ
# :call-seq:
# scRGB2XYZ() => out
#
# Transform scrgb to xyz.
#
# Output:
# [out] Output image, output VipsImage
##
# :method: XYZ2scRGB
# :call-seq:
# XYZ2scRGB() => out
#
# Transform xyz to scrgb.
#
# Output:
# [out] Output image, output VipsImage
##
# :method: LabQ2Lab
# :call-seq:
# LabQ2Lab() => out
#
# Unpack a labq image to float lab.
#
# Output:
# [out] Output image, output VipsImage
##
# :method: Lab2LabQ
# :call-seq:
# Lab2LabQ() => out
#
# Transform float lab to labq coding.
#
# Output:
# [out] Output image, output VipsImage
##
# :method: LabQ2LabS
# :call-seq:
# LabQ2LabS() => out
#
# Unpack a labq image to short lab.
#
# Output:
# [out] Output image, output VipsImage
##
# :method: LabS2LabQ
# :call-seq:
# LabS2LabQ() => out
#
# Transform short lab to labq coding.
#
# Output:
# [out] Output image, output VipsImage
##
# :method: LabS2Lab
# :call-seq:
# LabS2Lab() => out
#
# Transform signed short lab to float.
#
# Output:
# [out] Output image, output VipsImage
##
# :method: Lab2LabS
# :call-seq:
# Lab2LabS() => out
#
# Transform float lab to signed short.
#
# Output:
# [out] Output image, output VipsImage
##
# :method: rad2float
# :call-seq:
# rad2float() => out
#
# Unpack radiance coding to float rgb.
#
# Output:
# [out] Output image, output VipsImage
##
# :method: float2rad
# :call-seq:
# float2rad() => out
#
# Transform float rgb to radiance coding.
#
# Output:
# [out] Output image, output VipsImage
##
# :method: LabQ2sRGB
# :call-seq:
# LabQ2sRGB() => out
#
# Convert a labq image to srgb.
#
# Output:
# [out] Output image, output VipsImage
##
# :method: sRGB2HSV
# :call-seq:
# sRGB2HSV() => out
#
# Transform srgb to hsv.
#
# Output:
# [out] Output image, output VipsImage
##
# :method: HSV2sRGB
# :call-seq:
# HSV2sRGB() => out
#
# Transform hsv to srgb.
#
# Output:
# [out] Output image, output VipsImage
##
# :method: icc_import
# :call-seq:
# icc_import() => out
#
# Import from device with icc profile.
#
# Output:
# [out] Output image, output VipsImage
#
# Options:
# [pcs] Set Profile Connection Space, input VipsPCS
# [intent] Rendering intent, input VipsIntent
# [embedded] Use embedded input profile, if available, input gboolean
# [input_profile] Filename to load input profile from, input gchararray
##
# :method: icc_export
# :call-seq:
# icc_export() => out
#
# Output to device with icc profile.
#
# Output:
# [out] Output image, output VipsImage
#
# Options:
# [pcs] Set Profile Connection Space, input VipsPCS
# [intent] Rendering intent, input VipsIntent
# [output_profile] Filename to load output profile from, input gchararray
# [depth] Output device space depth in bits, input gint
##
# :method: icc_transform
# :call-seq:
# icc_transform(output_profile) => out
#
# Transform between devices with icc profiles.
#
# Input:
# [output_profile] Filename to load output profile from, input gchararray
#
# Output:
# [out] Output image, output VipsImage
#
# Options:
# [pcs] Set Profile Connection Space, input VipsPCS
# [intent] Rendering intent, input VipsIntent
# [embedded] Use embedded input profile, if available, input gboolean
# [input_profile] Filename to load input profile from, input gchararray
# [depth] Output device space depth in bits, input gint
##
# :method: dE76
# :call-seq:
# dE76(right) => out
#
# Calculate de76.
#
# Input:
# [right] Right-hand input image, input VipsImage
#
# Output:
# [out] Output image, output VipsImage
##
# :method: dE00
# :call-seq:
# dE00(right) => out
#
# Calculate de00.
#
# Input:
# [right] Right-hand input image, input VipsImage
#
# Output:
# [out] Output image, output VipsImage
##
# :method: dECMC
# :call-seq:
# dECMC(right) => out
#
# Calculate decmc.
#
# Input:
# [right] Right-hand input image, input VipsImage
#
# Output:
# [out] Output image, output VipsImage
##
# :method: sRGB2scRGB
# :call-seq:
# sRGB2scRGB() => out
#
# Convert an srgb image to scrgb.
#
# Output:
# [out] Output image, output VipsImage
##
# :method: scRGB2BW
# :call-seq:
# scRGB2BW() => out
#
# Convert scrgb to bw.
#
# Output:
# [out] Output image, output VipsImage
#
# Options:
# [depth] Output device space depth in bits, input gint
##
# :method: scRGB2sRGB
# :call-seq:
# scRGB2sRGB() => out
#
# Convert an scrgb image to srgb.
#
# Output:
# [out] Output image, output VipsImage
#
# Options:
# [depth] Output device space depth in bits, input gint
##
# :method: maplut
# :call-seq:
# maplut(lut) => out
#
# Map an image though a lut.
#
# Input:
# [lut] Look-up table image, input VipsImage
#
# Output:
# [out] Output image, output VipsImage
#
# Options:
# [band] apply one-band lut to this band of in, input gint
##
# :method: percent
# :call-seq:
# percent(percent) => threshold
#
# Find threshold for percent of pixels.
#
# Input:
# [percent] Percent of pixels, input gdouble
#
# Output:
# [threshold] Threshold above which lie percent of pixels, output gint
##
# :method: stdif
# :call-seq:
# stdif(width, height) => out
#
# Statistical difference.
#
# Input:
# [width] Window width in pixels, input gint
# [height] Window height in pixels, input gint
#
# Output:
# [out] Output image, output VipsImage
#
# Options:
# [a] Weight of new mean, input gdouble
# [s0] New deviation, input gdouble
# [b] Weight of new deviation, input gdouble
# [m0] New mean, input gdouble
##
# :method: hist_cum
# :call-seq:
# hist_cum() => out
#
# Form cumulative histogram.
#
# Output:
# [out] Output image, output VipsImage
##
# :method: hist_match
# :call-seq:
# hist_match(ref) => out
#
# Match two histograms.
#
# Input:
# [ref] Reference histogram, input VipsImage
#
# Output:
# [out] Output image, output VipsImage
##
# :method: hist_norm
# :call-seq:
# hist_norm() => out
#
# Normalise histogram.
#
# Output:
# [out] Output image, output VipsImage
##
# :method: hist_equal
# :call-seq:
# hist_equal() => out
#
# Histogram equalisation.
#
# Output:
# [out] Output image, output VipsImage
#
# Options:
# [band] Equalise with this band, input gint
##
# :method: hist_plot
# :call-seq:
# hist_plot() => out
#
# Plot histogram.
#
# Output:
# [out] Output image, output VipsImage
##
# :method: hist_local
# :call-seq:
# hist_local(width, height) => out
#
# Local histogram equalisation.
#
# Input:
# [width] Window width in pixels, input gint
# [height] Window height in pixels, input gint
#
# Output:
# [out] Output image, output VipsImage
##
# :method: hist_ismonotonic
# :call-seq:
# hist_ismonotonic() => monotonic
#
# Test for monotonicity.
#
# Output:
# [monotonic] true if in is monotonic, output gboolean
##
# :method: hist_entropy
# :call-seq:
# hist_entropy() => out
#
# Estimate image entropy.
#
# Output:
# [out] Output value, output gdouble
##
# :method: conv
# :call-seq:
# conv(mask) => out
#
# Convolution operation.
#
# Input:
# [mask] Input matrix image, input VipsImage
#
# Output:
# [out] Output image, output VipsImage
#
# Options:
# [precision] Convolve with this precision, input VipsPrecision
# [layers] Use this many layers in approximation, input gint
# [cluster] Cluster lines closer than this in approximation, input gint
##
# :method: compass
# :call-seq:
# compass(mask) => out
#
# Convolve with rotating mask.
#
# Input:
# [mask] Input matrix image, input VipsImage
#
# Output:
# [out] Output image, output VipsImage
#
# Options:
# [times] Rotate and convolve this many times, input gint
# [angle] Rotate mask by this much between convolutions, input VipsAngle45
# [combine] Combine convolution results like this, input VipsCombine
# [precision] Convolve with this precision, input VipsPrecision
# [layers] Use this many layers in approximation, input gint
# [cluster] Cluster lines closer than this in approximation, input gint
##
# :method: convsep
# :call-seq:
# convsep(mask) => out
#
# Seperable convolution operation.
#
# Input:
# [mask] Input matrix image, input VipsImage
#
# Output:
# [out] Output image, output VipsImage
#
# Options:
# [precision] Convolve with this precision, input VipsPrecision
# [layers] Use this many layers in approximation, input gint
# [cluster] Cluster lines closer than this in approximation, input gint
##
# :method: fastcor
# :call-seq:
# fastcor(ref) => out
#
# Fast correlation.
#
# Input:
# [ref] Input reference image, input VipsImage
#
# Output:
# [out] Output image, output VipsImage
##
# :method: spcor
# :call-seq:
# spcor(ref) => out
#
# Spatial correlation.
#
# Input:
# [ref] Input reference image, input VipsImage
#
# Output:
# [out] Output image, output VipsImage
##
# :method: sharpen
# :call-seq:
# sharpen() => out
#
# Unsharp masking for print.
#
# Output:
# [out] Output image, output VipsImage
#
# Options:
# [radius] Mask radius, input gint
# [x1] Flat/jaggy threshold, input gdouble
# [y2] Maximum brightening, input gdouble
# [y3] Maximum darkening, input gdouble
# [m1] Slope for flat areas, input gdouble
# [m2] Slope for jaggy areas, input gdouble
##
# :method: gaussblur
# :call-seq:
# gaussblur(sigma) => out
#
# Gaussian blur.
#
# Input:
# [sigma] Sigma of Gaussian, input gdouble
#
# Output:
# [out] Output image, output VipsImage
#
# Options:
# [min_ampl] Minimum amplitude of Gaussian, input gdouble
# [precision] Convolve with this precision, input VipsPrecision
##
# :method: fwfft
# :call-seq:
# fwfft() => out
#
# Forward fft.
#
# Output:
# [out] Output image, output VipsImage
##
# :method: invfft
# :call-seq:
# invfft() => out
#
# Inverse fft.
#
# Output:
# [out] Output image, output VipsImage
#
# Options:
# [real] Output only the real part of the transform, input gboolean
##
# :method: freqmult
# :call-seq:
# freqmult(mask) => out
#
# Frequency-domain filtering.
#
# Input:
# [mask] Input mask image, input VipsImage
#
# Output:
# [out] Output image, output VipsImage
##
# :method: spectrum
# :call-seq:
# spectrum() => out
#
# Make displayable power spectrum.
#
# Output:
# [out] Output image, output VipsImage
##
# :method: phasecor
# :call-seq:
# phasecor(in2) => out
#
# Calculate phase correlation.
#
# Input:
# [in2] Second input image, input VipsImage
#
# Output:
# [out] Output image, output VipsImage
##
# :method: morph
# :call-seq:
# morph(mask, morph) => out
#
# Morphology operation.
#
# Input:
# [mask] Input matrix image, input VipsImage
# [morph] Morphological operation to perform, input VipsOperationMorphology
#
# Output:
# [out] Output image, output VipsImage
##
# :method: rank
# :call-seq:
# rank(width, height, index) => out
#
# Rank filter.
#
# Input:
# [width] Window width in pixels, input gint
# [height] Window height in pixels, input gint
# [index] Select pixel at index, input gint
#
# Output:
# [out] Output image, output VipsImage
##
# :method: countlines
# :call-seq:
# countlines(direction) => nolines
#
# Count lines in an image.
#
# Input:
# [direction] Countlines left-right or up-down, input VipsDirection
#
# Output:
# [nolines] Number of lines, output gdouble
##
# :method: labelregions
# :call-seq:
# labelregions() => mask
#
# Label regions in an image.
#
# Output:
# [mask] Mask of region labels, output VipsImage
#
# Output options:
# [segments] Number of discrete contigious regions, output gint
##
# :method: draw_rect
# :call-seq:
# draw_rect(ink, left, top, width, height) => image
#
# Paint a rectangle on an image.
#
# Input:
# [ink] Colour for pixels, input VipsArrayDouble
# [left] Rect to fill, input gint
# [top] Rect to fill, input gint
# [width] Rect to fill, input gint
# [height] Rect to fill, input gint
#
# Output:
# [image] Image to draw on, input VipsImage
#
# Options:
# [fill] Draw a solid object, input gboolean
##
# :method: draw_mask
# :call-seq:
# draw_mask(ink, mask, x, y) => image
#
# Draw a mask on an image.
#
# Input:
# [ink] Colour for pixels, input VipsArrayDouble
# [mask] Mask of pixels to draw, input VipsImage
# [x] Draw mask here, input gint
# [y] Draw mask here, input gint
#
# Output:
# [image] Image to draw on, input VipsImage
##
# :method: draw_line
# :call-seq:
# draw_line(ink, x1, y1, x2, y2) => image
#
# Draw a line on an image.
#
# Input:
# [ink] Colour for pixels, input VipsArrayDouble
# [x1] Start of draw_line, input gint
# [y1] Start of draw_line, input gint
# [x2] End of draw_line, input gint
# [y2] End of draw_line, input gint
#
# Output:
# [image] Image to draw on, input VipsImage
##
# :method: draw_circle
# :call-seq:
# draw_circle(ink, cx, cy, radius) => image
#
# Draw a circle on an image.
#
# Input:
# [ink] Colour for pixels, input VipsArrayDouble
# [cx] Centre of draw_circle, input gint
# [cy] Centre of draw_circle, input gint
# [radius] Radius in pixels, input gint
#
# Output:
# [image] Image to draw on, input VipsImage
#
# Options:
# [fill] Draw a solid object, input gboolean
##
# :method: draw_flood
# :call-seq:
# draw_flood(ink, x, y) => image
#
# Flood-fill an area.
#
# Input:
# [ink] Colour for pixels, input VipsArrayDouble
# [x] DrawFlood start point, input gint
# [y] DrawFlood start point, input gint
#
# Output:
# [image] Image to draw on, input VipsImage
#
# Options:
# [test] Test pixels in this image, input VipsImage
# [equal] DrawFlood while equal to edge, input gboolean
#
# Output options:
# [left] Left edge of modified area, output gint
# [top] top edge of modified area, output gint
# [width] width of modified area, output gint
# [height] height of modified area, output gint
##
# :method: draw_image
# :call-seq:
# draw_image(sub, x, y) => image
#
# Paint an image into another image.
#
# Input:
# [sub] Sub-image to insert into main image, input VipsImage
# [x] Draw image here, input gint
# [y] Draw image here, input gint
#
# Output:
# [image] Image to draw on, input VipsImage
#
# Options:
# [mode] Combining mode, input VipsCombineMode
##
# :method: draw_smudge
# :call-seq:
# draw_smudge(left, top, width, height) => image
#
# Blur a rectangle on an image.
#
# Input:
# [left] Rect to fill, input gint
# [top] Rect to fill, input gint
# [width] Rect to fill, input gint
# [height] Rect to fill, input gint
#
# Output:
# [image] Image to draw on, input VipsImage
##
# :method: merge
# :call-seq:
# merge(sec, direction, dx, dy) => out
#
# Merge two images.
#
# Input:
# [sec] Secondary image, input VipsImage
# [direction] Horizontal or vertcial merge, input VipsDirection
# [dx] Horizontal displacement from sec to ref, input gint
# [dy] Vertical displacement from sec to ref, input gint
#
# Output:
# [out] Output image, output VipsImage
#
# Options:
# [mblend] Maximum blend size, input gint
##
# :method: mosaic
# :call-seq:
# mosaic(sec, direction, xref, yref, xsec, ysec) => out
#
# Mosaic two images.
#
# Input:
# [sec] Secondary image, input VipsImage
# [direction] Horizontal or vertcial mosaic, input VipsDirection
# [xref] Position of reference tie-point, input gint
# [yref] Position of reference tie-point, input gint
# [xsec] Position of secondary tie-point, input gint
# [ysec] Position of secondary tie-point, input gint
#
# Output:
# [out] Output image, output VipsImage
#
# Options:
# [hwindow] Half window size, input gint
# [harea] Half area size, input gint
# [mblend] Maximum blend size, input gint
# [bandno] Band to search for features on, input gint
#
# Output options:
# [dx0] Detected integer offset, output gint
# [dy0] Detected integer offset, output gint
# [scale1] Detected scale, output gdouble
# [angle1] Detected rotation, output gdouble
# [dx1] Detected first-order displacement, output gdouble
# [dy1] Detected first-order displacement, output gdouble
##
# :method: mosaic1
# :call-seq:
# mosaic1(sec, direction, xr1, yr1, xs1, ys1, xr2, yr2, xs2, ys2) => out
#
# First-order mosaic of two images.
#
# Input:
# [sec] Secondary image, input VipsImage
# [direction] Horizontal or vertcial mosaic, input VipsDirection
# [xr1] Position of first reference tie-point, input gint
# [yr1] Position of first reference tie-point, input gint
# [xs1] Position of first secondary tie-point, input gint
# [ys1] Position of first secondary tie-point, input gint
# [xr2] Position of second reference tie-point, input gint
# [yr2] Position of second reference tie-point, input gint
# [xs2] Position of second secondary tie-point, input gint
# [ys2] Position of second secondary tie-point, input gint
#
# Output:
# [out] Output image, output VipsImage
#
# Options:
# [hwindow] Half window size, input gint
# [harea] Half area size, input gint
# [search] Search to improve tie-points, input gboolean
# [interpolate] Interpolate pixels with this, input VipsInterpolate
# [mblend] Maximum blend size, input gint
# [bandno] Band to search for features on, input gint
##
# :method: match
# :call-seq:
# match(sec, xr1, yr1, xs1, ys1, xr2, yr2, xs2, ys2) => out
#
# First-order match of two images.
#
# Input:
# [sec] Secondary image, input VipsImage
# [xr1] Position of first reference tie-point, input gint
# [yr1] Position of first reference tie-point, input gint
# [xs1] Position of first secondary tie-point, input gint
# [ys1] Position of first secondary tie-point, input gint
# [xr2] Position of second reference tie-point, input gint
# [yr2] Position of second reference tie-point, input gint
# [xs2] Position of second secondary tie-point, input gint
# [ys2] Position of second secondary tie-point, input gint
#
# Output:
# [out] Output image, output VipsImage
#
# Options:
# [hwindow] Half window size, input gint
# [harea] Half area size, input gint
# [search] Search to improve tie-points, input gboolean
# [interpolate] Interpolate pixels with this, input VipsInterpolate
##
# :method: globalbalance
# :call-seq:
# globalbalance() => out
#
# Global balance an image mosaic.
#
# Output:
# [out] Output image, output VipsImage
#
# Options:
# [gamma] Image gamma, input gdouble
# [int_output] Integer output, input gboolean
end
end
|
#!/usr/bin/ruby
require 'rubygems'
require 'wakame'
require 'wakame/packets'
require 'wakame/service'
require 'wakame/queue_declare'
require 'wakame/vm_manipulator'
module Wakame
class AgentMonitor
include ThreadImmutable
attr_reader :registered_agents, :unregistered_agents, :master, :gc_period
def initialize(master)
bind_thread
@master = master
@registered_agents = {}
@unregistered_agents = {}
@agent_timeout = 31.to_f
@agent_kill_timeout = @agent_timeout * 2
@gc_period = 20.to_f
# GC event trigger for agent timer & status
calc_agent_timeout = proc {
#Wakame.log.debug("Started agent GC : agents.size=#{@registered_agents.size}")
kill_list=[]
registered_agents.each { |agent_id, agent|
next if agent.status == Service::Agent::STATUS_OFFLINE
diff_time = Time.now - agent.last_ping_at
#Wakame.log.debug "AgentMonitor GC : #{agent_id}: #{diff_time}"
if diff_time > @agent_timeout.to_f
agent.status = Service::Agent::STATUS_TIMEOUT
end
if diff_time > @agent_kill_timeout.to_f
kill_list << agent_id
end
}
kill_list.each { |agent_id|
r_agent = @registered_agents.delete(agent_id)
u_agent = @unregistered_agents.delete(agent_id)
ED.fire_event(Event::AgentUnMonitored.new(r_agent)) unless r_agent.nil?
ED.fire_event(Event::AgentUnMonitored.new(u_agent)) unless u_agent.nil?
}
#Wakame.log.debug("Finished agent GC")
}
@agent_timeout_timer = EventMachine::PeriodicTimer.new(@gc_period, calc_agent_timeout)
master.add_subscriber('registry') { |data|
data = eval(data)
agent_id = data[:agent_id]
case data[:type]
when 'Wakame::Packets::Register'
register_agent(data)
when 'Wakame::Packets::UnRegister'
unregister_agent(agent_id)
end
}
master.add_subscriber('ping') { |data|
ping = eval(data)
# Skip the old ping responses before starting master node.
next if Time.parse(ping[:responded_at]) < master.started_at
# Variable update function for the common members
set_report_values = proc { |agent|
agent.status = Service::Agent::STATUS_ONLINE
agent.uptime = 0
agent.last_ping_at = Time.parse(ping[:responded_at])
agent.attr = ping[:attrs]
agent.services.clear
ping.services.each { |svc_id, i|
agent.services[svc_id] = master.service_cluster.instances[svc_id]
}
}
agent = agent(ping[:agent_id])
if agent.nil?
agent = Service::Agent.new(ping[:agent_id])
set_report_values.call(agent)
unregistered_agents[ping[:agent_id]]=agent
else
set_report_values.call(agent)
end
ED.fire_event(Event::AgentPong.new(agent))
}
master.add_subscriber('agent_event') { |data|
response = eval(data)
#p response
case response[:type]
when 'Wakame::Packets::ServiceStatusChanged'
svc_inst = Service::ServiceInstance.instance_collection[response[:svc_id]]
if svc_inst
response_time = Time.parse(response[:responded_at])
svc_inst.update_status(response[:new_status], response_time, response[:fail_message])
# tmp_event = Event::ServiceStatusChanged.new(response[:svc_id], svc_inst.property, response[:status], response[:previous_status])
# tmp_event.time = response_time
# ED.fire_event(tmp_event)
# if response[:previous_status] != Service::STATUS_ONLINE && response[:new_status] == Service::STATUS_ONLINE
# tmp_event = Event::ServiceOnline.new(tmp_event.instance_id, svc_inst.property)
# tmp_event.time = response_time
# ED.fire_event(tmp_event)
# elsif response[:previous_status] != Service::STATUS_OFFLINE && response[:new_status] == Service::STATUS_OFFLINE
# tmp_event = Event::ServiceOffline.new(tmp_event.instance_id, svc_inst.property)
# tmp_event.time = response_time
# ED.fire_event(tmp_event)
# elsif response[:previous_status] != Service::STATUS_FAIL && response[:new_status] == Service::STATUS_FAIL
# tmp_event = Event::ServiceFailed.new(tmp_event.instance_id, svc_inst.property, response[:fail_message])
# tmp_event.time = response_time
# ED.fire_event(tmp_event)
# end
end
when 'Wakame::Packets::ActorResponse'
case response[:status]
when Actor::STATUS_RUNNING
ED.fire_event(Event::ActorProgress.new(response[:agent_id], response[:token], 0))
else
ED.fire_event(Event::ActorComplete.new(response[:agent_id], response[:token], response[:status]))
end
else
Wakame.log.warn("#{self.class}: Unhandled agent response: #{response[:type]}")
end
}
end
def agent(agent_id)
registered_agents[agent_id] || unregistered_agents[agent_id]
end
def register_agent(data)
agent_id = data[:agent_id]
agent = registered_agents[agent_id]
if agent.nil?
agent = unregistered_agents[agent_id]
if agent.nil?
# The agent is going to be registered at first time.
agent = Service::Agent.new(agent_id)
registered_agents[agent_id] = agent
else
# Move the reference from unregistered group to the registered group.
registered_agents[agent_id] = unregistered_agents[agent_id]
unregistered_agents.delete(agent_id)
end
Wakame.log.debug("The Agent has been registered: #{data.inspect}")
#Wakame.log.debug(unregistered_agents)
ED.fire_event(Event::AgentMonitored.new(agent))
end
agent.root_path = data[:root_path]
end
def unregister_agent(agent_id)
agent = registered_agents[agent_id]
if agent
unregistered_agents[agent_id] = registered_agents[agent_id]
registered_agents.delete(agent_id)
ED.fire_event(Event::AgentUnMonitored.new(agent))
end
end
# def bind_agent(service_instance, &filter)
# agent_id, agent = @agents.find { |agent_id, agent|
# next false if agent.has_service_type?(service_instance.property.class)
# filter.call(agent)
# }
# return nil if agent.nil?
# service_instance.bind_agent(agent)
# agent
# end
# def unbind_agent(service_instance)
# service_instance.unbind_agent
# end
# Retruns the master local agent object
def master_local
agent = registered_agents[@master.master_local_agent_id]
puts "#{agent} = registered_agents[#{@master.master_local_agent_id}]"
raise "Master does not identify the master local agent yet." if agent.nil?
agent
end
def each_online(&blk)
registered_agents.each { |k, v|
next if v.status != Service::Agent::STATUS_ONLINE
blk.call(v)
}
end
def dump_status
ag = []
res = {:registered=>[], :unregistered=>[]}
@registered_agents.each { |key, a|
res[:registered] << a.dump_status
}
@unregistered_agents.each { |key, a|
res[:unregistered] << a.dump_status
}
res
end
end
class Master
include Wakame::AMQPClient
include Wakame::QueueDeclare
define_queue 'agent_event', 'agent_event'
define_queue 'ping', 'ping'
define_queue 'registry', 'registry'
attr_reader :command_queue, :agent_monitor, :configuration, :service_cluster, :started_at
def initialize(opts={})
pre_setup
connect(opts) {
post_setup
}
Wakame.log.info("Started master process : WAKAME_ROOT=#{Wakame.config.root_path} WAKAME_ENV=#{Wakame.config.environment}")
end
# def send_agent_command(command, agent_id=nil)
# raise TypeError unless command.is_a? Packets::RequestBase
# EM.next_tick {
# if agent_id
# publish_to('agent_command', "agent_id.#{agent_id}", Marshal.dump(command))
# else
# publish_to('agent_command', '*', Marshal.dump(command))
# end
# }
# end
def actor_request(agent_id, path, *args)
request = Wakame::Packets::ActorRequest.new(agent_id, Util.gen_id, path, *args)
ActorRequest.new(self, request)
end
def attr
agent_monitor.master_local.attr
end
def cleanup
@command_queue.shutdown
end
def master_local_agent_id
@master_local_agent_id
end
private
def determine_agent_id
if Wakame.config.environment == :EC2
@master_local_agent_id = VmManipulator::EC2::MetadataService.query_metadata_uri('instance-id')
else
@master_local_agent_id = VmManipulator::StandAlone::INSTANCE_ID
end
end
def pre_setup
determine_agent_id
@started_at = Time.now
EM.barrier {
Wakame.log.debug("Binding thread info to EventDispatcher.")
EventDispatcher.instance.bind_thread(Thread.current)
}
end
def post_setup
raise 'has to be put in EM.run context' unless EM.reactor_running?
@command_queue = CommandQueue.new(self)
@agent_monitor = AgentMonitor.new(self)
@service_cluster = Util.new_(Wakame.config.cluster_class, self)
end
end
class ActorRequest
attr_reader :master
def initialize(master, packet)
raise TypeError unless packet.is_a?(Wakame::Packets::ActorRequest)
@master = master
@packet = packet
@requested = false
@event_ticket = nil
@wait_lock = ::Queue.new
end
def request
raise "The request has already been sent." if @requested
@event_ticket = ED.subscribe(Event::ActorComplete) { |event|
if event.token == @packet.token
# Any of status except RUNNING are accomplishment of the actor request.
Wakame.log.debug("#{self.class}: The actor request has been completed: token=#{self.token}, status=#{event.status}")
ED.unsubscribe(@event_ticket)
@wait_lock.enq(event.status)
end
}
Wakame.log.debug("#{self.class}: Send the actor request: #{@packet.path}@#{@packet.agent_id}, token=#{self.token}")
master.publish_to('agent_command', "agent_id.#{@packet.agent_id}", @packet.marshal)
@requested = true
self
end
def token
@packet.token
end
def progress
check_requested?
raise NotImplementedError
end
def cancel
check_requested?
raise NotImplementedError
#master.publish_to('agent_command', "agent_id.#{@packet.agent_id}", Wakame::Packets::ActorCancel.new(@packet.agent_id, ).marshal)
#ED.unsubscribe(@event_ticket)
end
def wait_completion(tout=60*30)
check_requested?
timeout(tout) {
Wakame.log.debug("#{self.class}: Waiting a response from the actor: #{@packet.path}@#{@packet.agent_id}, token=#{@packet.token}")
ret_status = @wait_lock.deq
Wakame.log.debug("#{self.class}: A response (status=#{ret_status}) back from the actor: #{@packet.path}@#{@packet.agent_id}, token=#{@packet.token}")
if ret_status == Actor::STATUS_FAILED
raise RuntimeError, "Failed status has been returned: Actor Request #{token}"
end
}
end
alias :wait :wait_completion
private
def check_requested?
raise "The request has not been sent yet." unless @requested
end
end
end
Deleted unnecessary lines from previous change(5d2ceb6...)
#!/usr/bin/ruby
require 'rubygems'
require 'wakame'
require 'wakame/packets'
require 'wakame/service'
require 'wakame/queue_declare'
require 'wakame/vm_manipulator'
module Wakame
class AgentMonitor
include ThreadImmutable
attr_reader :registered_agents, :unregistered_agents, :master, :gc_period
def initialize(master)
bind_thread
@master = master
@registered_agents = {}
@unregistered_agents = {}
@agent_timeout = 31.to_f
@agent_kill_timeout = @agent_timeout * 2
@gc_period = 20.to_f
# GC event trigger for agent timer & status
calc_agent_timeout = proc {
#Wakame.log.debug("Started agent GC : agents.size=#{@registered_agents.size}")
kill_list=[]
registered_agents.each { |agent_id, agent|
next if agent.status == Service::Agent::STATUS_OFFLINE
diff_time = Time.now - agent.last_ping_at
#Wakame.log.debug "AgentMonitor GC : #{agent_id}: #{diff_time}"
if diff_time > @agent_timeout.to_f
agent.status = Service::Agent::STATUS_TIMEOUT
end
if diff_time > @agent_kill_timeout.to_f
kill_list << agent_id
end
}
kill_list.each { |agent_id|
agent = @registered_agents.delete(agent_id)
ED.fire_event(Event::AgentUnMonitored.new(agent)) unless agent.nil?
}
#Wakame.log.debug("Finished agent GC")
}
@agent_timeout_timer = EventMachine::PeriodicTimer.new(@gc_period, calc_agent_timeout)
master.add_subscriber('registry') { |data|
data = eval(data)
agent_id = data[:agent_id]
case data[:type]
when 'Wakame::Packets::Register'
register_agent(data)
when 'Wakame::Packets::UnRegister'
unregister_agent(agent_id)
end
}
master.add_subscriber('ping') { |data|
ping = eval(data)
# Skip the old ping responses before starting master node.
next if Time.parse(ping[:responded_at]) < master.started_at
# Variable update function for the common members
set_report_values = proc { |agent|
agent.status = Service::Agent::STATUS_ONLINE
agent.uptime = 0
agent.last_ping_at = Time.parse(ping[:responded_at])
agent.attr = ping[:attrs]
agent.services.clear
ping.services.each { |svc_id, i|
agent.services[svc_id] = master.service_cluster.instances[svc_id]
}
}
agent = agent(ping[:agent_id])
if agent.nil?
agent = Service::Agent.new(ping[:agent_id])
set_report_values.call(agent)
unregistered_agents[ping[:agent_id]]=agent
else
set_report_values.call(agent)
end
ED.fire_event(Event::AgentPong.new(agent))
}
master.add_subscriber('agent_event') { |data|
response = eval(data)
#p response
case response[:type]
when 'Wakame::Packets::ServiceStatusChanged'
svc_inst = Service::ServiceInstance.instance_collection[response[:svc_id]]
if svc_inst
response_time = Time.parse(response[:responded_at])
svc_inst.update_status(response[:new_status], response_time, response[:fail_message])
# tmp_event = Event::ServiceStatusChanged.new(response[:svc_id], svc_inst.property, response[:status], response[:previous_status])
# tmp_event.time = response_time
# ED.fire_event(tmp_event)
# if response[:previous_status] != Service::STATUS_ONLINE && response[:new_status] == Service::STATUS_ONLINE
# tmp_event = Event::ServiceOnline.new(tmp_event.instance_id, svc_inst.property)
# tmp_event.time = response_time
# ED.fire_event(tmp_event)
# elsif response[:previous_status] != Service::STATUS_OFFLINE && response[:new_status] == Service::STATUS_OFFLINE
# tmp_event = Event::ServiceOffline.new(tmp_event.instance_id, svc_inst.property)
# tmp_event.time = response_time
# ED.fire_event(tmp_event)
# elsif response[:previous_status] != Service::STATUS_FAIL && response[:new_status] == Service::STATUS_FAIL
# tmp_event = Event::ServiceFailed.new(tmp_event.instance_id, svc_inst.property, response[:fail_message])
# tmp_event.time = response_time
# ED.fire_event(tmp_event)
# end
end
when 'Wakame::Packets::ActorResponse'
case response[:status]
when Actor::STATUS_RUNNING
ED.fire_event(Event::ActorProgress.new(response[:agent_id], response[:token], 0))
else
ED.fire_event(Event::ActorComplete.new(response[:agent_id], response[:token], response[:status]))
end
else
Wakame.log.warn("#{self.class}: Unhandled agent response: #{response[:type]}")
end
}
end
def agent(agent_id)
registered_agents[agent_id] || unregistered_agents[agent_id]
end
def register_agent(data)
agent_id = data[:agent_id]
agent = registered_agents[agent_id]
if agent.nil?
agent = unregistered_agents[agent_id]
if agent.nil?
# The agent is going to be registered at first time.
agent = Service::Agent.new(agent_id)
registered_agents[agent_id] = agent
else
# Move the reference from unregistered group to the registered group.
registered_agents[agent_id] = unregistered_agents[agent_id]
unregistered_agents.delete(agent_id)
end
Wakame.log.debug("The Agent has been registered: #{data.inspect}")
#Wakame.log.debug(unregistered_agents)
ED.fire_event(Event::AgentMonitored.new(agent))
end
agent.root_path = data[:root_path]
end
def unregister_agent(agent_id)
agent = registered_agents[agent_id]
if agent
unregistered_agents[agent_id] = registered_agents[agent_id]
registered_agents.delete(agent_id)
ED.fire_event(Event::AgentUnMonitored.new(agent))
end
end
# def bind_agent(service_instance, &filter)
# agent_id, agent = @agents.find { |agent_id, agent|
# next false if agent.has_service_type?(service_instance.property.class)
# filter.call(agent)
# }
# return nil if agent.nil?
# service_instance.bind_agent(agent)
# agent
# end
# def unbind_agent(service_instance)
# service_instance.unbind_agent
# end
# Retruns the master local agent object
def master_local
agent = registered_agents[@master.master_local_agent_id]
puts "#{agent} = registered_agents[#{@master.master_local_agent_id}]"
raise "Master does not identify the master local agent yet." if agent.nil?
agent
end
def each_online(&blk)
registered_agents.each { |k, v|
next if v.status != Service::Agent::STATUS_ONLINE
blk.call(v)
}
end
def dump_status
ag = []
res = {:registered=>[], :unregistered=>[]}
@registered_agents.each { |key, a|
res[:registered] << a.dump_status
}
@unregistered_agents.each { |key, a|
res[:unregistered] << a.dump_status
}
res
end
end
class Master
include Wakame::AMQPClient
include Wakame::QueueDeclare
define_queue 'agent_event', 'agent_event'
define_queue 'ping', 'ping'
define_queue 'registry', 'registry'
attr_reader :command_queue, :agent_monitor, :configuration, :service_cluster, :started_at
def initialize(opts={})
pre_setup
connect(opts) {
post_setup
}
Wakame.log.info("Started master process : WAKAME_ROOT=#{Wakame.config.root_path} WAKAME_ENV=#{Wakame.config.environment}")
end
# def send_agent_command(command, agent_id=nil)
# raise TypeError unless command.is_a? Packets::RequestBase
# EM.next_tick {
# if agent_id
# publish_to('agent_command', "agent_id.#{agent_id}", Marshal.dump(command))
# else
# publish_to('agent_command', '*', Marshal.dump(command))
# end
# }
# end
def actor_request(agent_id, path, *args)
request = Wakame::Packets::ActorRequest.new(agent_id, Util.gen_id, path, *args)
ActorRequest.new(self, request)
end
def attr
agent_monitor.master_local.attr
end
def cleanup
@command_queue.shutdown
end
def master_local_agent_id
@master_local_agent_id
end
private
def determine_agent_id
if Wakame.config.environment == :EC2
@master_local_agent_id = VmManipulator::EC2::MetadataService.query_metadata_uri('instance-id')
else
@master_local_agent_id = VmManipulator::StandAlone::INSTANCE_ID
end
end
def pre_setup
determine_agent_id
@started_at = Time.now
EM.barrier {
Wakame.log.debug("Binding thread info to EventDispatcher.")
EventDispatcher.instance.bind_thread(Thread.current)
}
end
def post_setup
raise 'has to be put in EM.run context' unless EM.reactor_running?
@command_queue = CommandQueue.new(self)
@agent_monitor = AgentMonitor.new(self)
@service_cluster = Util.new_(Wakame.config.cluster_class, self)
end
end
class ActorRequest
attr_reader :master
def initialize(master, packet)
raise TypeError unless packet.is_a?(Wakame::Packets::ActorRequest)
@master = master
@packet = packet
@requested = false
@event_ticket = nil
@wait_lock = ::Queue.new
end
def request
raise "The request has already been sent." if @requested
@event_ticket = ED.subscribe(Event::ActorComplete) { |event|
if event.token == @packet.token
# Any of status except RUNNING are accomplishment of the actor request.
Wakame.log.debug("#{self.class}: The actor request has been completed: token=#{self.token}, status=#{event.status}")
ED.unsubscribe(@event_ticket)
@wait_lock.enq(event.status)
end
}
Wakame.log.debug("#{self.class}: Send the actor request: #{@packet.path}@#{@packet.agent_id}, token=#{self.token}")
master.publish_to('agent_command', "agent_id.#{@packet.agent_id}", @packet.marshal)
@requested = true
self
end
def token
@packet.token
end
def progress
check_requested?
raise NotImplementedError
end
def cancel
check_requested?
raise NotImplementedError
#master.publish_to('agent_command', "agent_id.#{@packet.agent_id}", Wakame::Packets::ActorCancel.new(@packet.agent_id, ).marshal)
#ED.unsubscribe(@event_ticket)
end
def wait_completion(tout=60*30)
check_requested?
timeout(tout) {
Wakame.log.debug("#{self.class}: Waiting a response from the actor: #{@packet.path}@#{@packet.agent_id}, token=#{@packet.token}")
ret_status = @wait_lock.deq
Wakame.log.debug("#{self.class}: A response (status=#{ret_status}) back from the actor: #{@packet.path}@#{@packet.agent_id}, token=#{@packet.token}")
if ret_status == Actor::STATUS_FAILED
raise RuntimeError, "Failed status has been returned: Actor Request #{token}"
end
}
end
alias :wait :wait_completion
private
def check_requested?
raise "The request has not been sent yet." unless @requested
end
end
end
|
# coding: utf-8
require 'wareki/calendar_def'
require 'wareki/era_def'
require 'date'
module Wareki
GREGORIAN_START = 2405160 # Date.new(1873, 1, 1, Date::GREGORIAN).jd
GREGORIAN_START_YEAR = 1873
IMPERIAL_START = 1480041 # Date.new(-660, 2, 11, Date::GREGORIAN).jd
IMPERIAL_START_YEAR = -660
DATE_INFINITY = ::Date.new(2**(0.size * 8 -2) -1, 12, 31) # Use max Fixnum as year.
YEAR_BY_NUM = Hash[*YEAR_DEFS.map{|y| [y.year, y]}.flatten].freeze
ERA_BY_NAME = Hash[*(ERA_NORTH_DEFS + ERA_DEFS).map {|g| [g.name, g]}.flatten]
ERA_BY_NAME['皇紀'] = ERA_BY_NAME['神武天皇即位紀元'] = Era.new('皇紀', -660, 1480041, DATE_INFINITY.jd)
ERA_BY_NAME.keys.each do |era_name|
alt_era_name = era_name.tr("宝霊神応暦祥寿斎観寛徳禄万福禎国亀", "寳靈神應曆祥壽斉觀寬德祿萬福禎國龜")
alt_era_name == era_name and next
ERA_BY_NAME[alt_era_name] = ERA_BY_NAME[era_name]
end
ERA_BY_NAME.freeze
NUM_CHARS = "零〇一二三四五六七八九十卄廿卅丗卌肆百皕千万億兆01234567890123456789"
ALT_MONTH_NAME = %w(睦月 如月 弥生 卯月 皐月 水無月 文月 葉月 長月 神無月 霜月 師走).freeze
REGEX = %r{^
(?<era_name>西暦|#{ERA_BY_NAME.keys.join('|')})?
(?:(?<year>[元#{NUM_CHARS}]+)年)?
(?:(?<is_leap>閏|潤|うるう)?
(?:(?<month>[正#{NUM_CHARS}]+)月 |
(?<alt_month>#{ALT_MONTH_NAME.join('|')})))?
(?:(?<day>[元朔晦#{NUM_CHARS}]+)日)?
$}x
class UnsupportedDateRange < StandardError; end
module_function
def parse_to_date(str, start = ::Date::ITALY)
begin
Date.parse(str).to_date(start)
rescue ArgumentError => e
::Date.parse(str, true, start)
end
end
end
Fix date infinity for jruby.
# coding: utf-8
require 'wareki/calendar_def'
require 'wareki/era_def'
require 'date'
module Wareki
GREGORIAN_START = 2405160 # Date.new(1873, 1, 1, Date::GREGORIAN).jd
GREGORIAN_START_YEAR = 1873
IMPERIAL_START = 1480041 # Date.new(-660, 2, 11, Date::GREGORIAN).jd
IMPERIAL_START_YEAR = -660
if defined? JRUBY_VERSION
DATE_INFINITY = ::Date.new(java.lang.Integer::MAX_VALUE, 12, 31) # Use max Integer for jruby.
else
DATE_INFINITY = ::Date.new(2**(0.size * 8 -2) -1, 12, 31) # Use max Fixnum as year.
end
YEAR_BY_NUM = Hash[*YEAR_DEFS.map{|y| [y.year, y]}.flatten].freeze
ERA_BY_NAME = Hash[*(ERA_NORTH_DEFS + ERA_DEFS).map {|g| [g.name, g]}.flatten]
ERA_BY_NAME['皇紀'] = ERA_BY_NAME['神武天皇即位紀元'] = Era.new('皇紀', -660, 1480041, DATE_INFINITY.jd)
ERA_BY_NAME.keys.each do |era_name|
alt_era_name = era_name.tr("宝霊神応暦祥寿斎観寛徳禄万福禎国亀", "寳靈神應曆祥壽斉觀寬德祿萬福禎國龜")
alt_era_name == era_name and next
ERA_BY_NAME[alt_era_name] = ERA_BY_NAME[era_name]
end
ERA_BY_NAME.freeze
NUM_CHARS = "零〇一二三四五六七八九十卄廿卅丗卌肆百皕千万億兆01234567890123456789"
ALT_MONTH_NAME = %w(睦月 如月 弥生 卯月 皐月 水無月 文月 葉月 長月 神無月 霜月 師走).freeze
REGEX = %r{^
(?<era_name>西暦|#{ERA_BY_NAME.keys.join('|')})?
(?:(?<year>[元#{NUM_CHARS}]+)年)?
(?:(?<is_leap>閏|潤|うるう)?
(?:(?<month>[正#{NUM_CHARS}]+)月 |
(?<alt_month>#{ALT_MONTH_NAME.join('|')})))?
(?:(?<day>[元朔晦#{NUM_CHARS}]+)日)?
$}x
class UnsupportedDateRange < StandardError; end
module_function
def parse_to_date(str, start = ::Date::ITALY)
begin
Date.parse(str).to_date(start)
rescue ArgumentError => e
::Date.parse(str, true, start)
end
end
end
|
require 'rest_client'
module Wechat
class Client
attr_reader :base
def initialize(base)
@base = base
end
def get path, header={}
request(path, header) do |url, header|
RestClient.get(url, header)
end
end
def post path, payload, header = {}
request(path, header) do |url, header|
RestClient.post(url, payload, header)
end
end
def request path, header={}, &block
url = "#{header.delete(:base) || self.base}#{path}"
as = header.delete(:as)
header.merge!(:accept => :json) if as == :json
response = yield(url, header)
raise "Request not OK, response code #{response.code}" if response.code != 200
parse_response(response, as || :json) do |parse_as, data|
break data unless (parse_as == :json && data["errcode"].present? && data["errcode"] != 0)
case data["errcode"]
when 0 # for request didn't expect results
url =~ /card/ ? data : true # card api return 0 when successful
#42001: access_token超时, 40014:不合法的access_token, 48001: api unauthorized
when 40001, 42001, 40014, 48001
raise AccessTokenExpiredError
else
raise ResponseError.new(data['errcode'], data['errmsg'])
end
end
end
private
def parse_response response, as
content_type = response.headers[:content_type]
parse_as = {
/^application\/json/ => :json,
/^image\/.*/ => :file
}.inject([]){|memo, match| memo<<match[1] if content_type =~ match[0]; memo}.first || as || :text
case parse_as
when :file
file = Tempfile.new("tmp")
file.binmode
file.write(response.body)
file.close
data = file
when :json
data = HashWithIndifferentAccess.new_from_hash_copying_default(JSON.parse(response.body))
when :xml
xml = Hash.from_xml(response.body).fetch('xml', {})
data = HashWithIndifferentAccess.new_from_hash_copying_default(xml)
else
data = response.body
end
return yield(parse_as, data)
end
end
end
fix weixin client api
require 'rest_client'
module Wechat
class Client
attr_reader :base
def initialize(base)
@base = base
end
def get path, header={}
request(path, header) do |url, header|
RestClient.get(url, header)
end
end
def post path, payload, header = {}
request(path, header) do |url, header|
RestClient.post(url, payload, header)
end
end
def request path, header={}, &block
url = "#{header.delete(:base) || self.base}#{path}"
as = header.delete(:as)
header.merge!(:accept => :json) if as == :json
response = yield(url, header)
raise "Request not OK, response code #{response.code}" if response.code != 200
parse_response(response, as || :json) do |parse_as, data|
break data unless (parse_as == :json && data["errcode"].present? && data["errcode"] != 0)
case data["errcode"]
when 0 # for request didn't expect results
url =~ /card/ ? data : true # card api return 0 when successful
#42001: access_token超时, 40014:不合法的access_token, 48001: api unauthorized
when 40001, 42001, 40014, 48001
raise AccessTokenExpiredError
else
raise ResponseError.new(data['errcode'], data['errmsg'])
end
end
end
private
def parse_response response, as
content_type = response.headers[:content_type]
parse_as = {
/^application\/json/ => :json,
/^image\/.*/ => :file
}.inject([]){|memo, match| memo<<match[1] if content_type =~ match[0]; memo}.first || as || :text
case parse_as
when :file
extname = response.headers[:content_disposition][/.*(\..*)\"/, 1]
file = Tempfile.new(["wx-", extname])
file.binmode
file.write(response.body)
file.close
data = file
when :json
data = HashWithIndifferentAccess.new_from_hash_copying_default(JSON.parse(response.body))
when :xml
xml = Hash.from_xml(response.body).fetch('xml', {})
data = HashWithIndifferentAccess.new_from_hash_copying_default(xml)
else
data = response.body
end
return yield(parse_as, data)
end
end
end
|
# frozen_string_literal: true
#= Reports message to Sentry about the success or failure of wiki edits
class WikiResponse
###############
# Entry point #
###############
def self.capture(response_data, opts)
message = new(response_data, opts)
message.parse_api_response
message.send_to_sentry
end
#################
# Main routines #
#################
def initialize(response_data, opts={})
@response_data = response_data
@edit_data = response_data['edit']
@current_user = opts[:current_user] || {}
@post_data = opts[:post_data]
@type = opts[:type]
end
def parse_api_response
# A successful edit will have response data like this:
# {"edit"=>
# {"result"=>"Success",
# "pageid"=>11543696,
# "title"=>"User:Ragesock",
# "contentmodel"=>"wikitext",
# "oldrevid"=>671572777,
# "newrevid"=>674946741,
# "newtimestamp"=>"2015-08-07T05:27:43Z"}}
#
# A failed edit will have a response like this:
# {"servedby"=>"mw1135",
# "error"=>
# {"code"=>"protectedpage",
# "info"=>"The \"templateeditor\" right is required to edit this page",
# "*"=>"See https://en.wikipedia.org/w/api.php for API usage"}}
#
# An edit stopped by the abuse filter will respond like this:
# {"edit"=>
# {"result"=>"Failure",
# "code"=>"abusefilter-warning-email",
# "info"=>"Hit AbuseFilter: Adding emails in articles",
# "warning"=>"[LOTS OF HTML WARNING TEXT]"}}
if @response_data['error']
parse_api_error_response
elsif @edit_data
parse_api_edit_response
elsif @response_data['query']
parse_api_query_response
elsif @response_data['options']
parse_api_options_response
else
parse_api_unknown_response
end
end
def send_to_sentry
Raven.capture_message @title,
level: @level,
tags: { username: @current_user[:username],
action_type: @type },
extra: { response_data: @response_data,
post_data: @post_data,
current_user: @current_user }
end
###################
# Parsing methods #
###################
private
def parse_api_edit_response
if @edit_data['result'] == 'Success'
@title = "Successful #{@type}"
@level = 'info'
else
parse_failed_edit
end
end
def parse_failed_edit
@title = "Failed #{@type}"
@title += ': CAPTCHA' if @edit_data['captcha']
@title += ': spamblacklist' if @edit_data['spamblacklist']
code = @response_data['edit']['code']
@title += ": #{code}" if @edit_data['code']
@level = 'warning'
end
def parse_api_error_response
code = @response_data['error']['code']
# If the OAuth credentials are invalid, we need to flag this.
# It gets handled by application controller.
if code == 'mwoauth-invalid-authorization'
@current_user.update_attributes(wiki_token: 'invalid')
end
@title = "Failed #{@type}: #{code}"
@level = 'warning'
end
def parse_api_query_response
@title = "#{@type} query"
@level = 'info'
end
def parse_api_options_response
if @response_data['warnings']
@title = "Unexpected warning for #{@type} update"
@level = 'error'
else
@title = "Successful #{@type} update"
@level = 'info'
end
end
def parse_api_unknown_response
@title = "Unknown response for #{@type}"
@level = 'error'
end
end
Don't log 'Successful edit' or 'tokens query'.
This should cut down the long reponse time for course enrollment and other actions that involve Mediawiki interaction.
# frozen_string_literal: true
#= Reports message to Sentry about the success or failure of wiki edits
class WikiResponse
###############
# Entry point #
###############
def self.capture(response_data, opts)
message = new(response_data, opts)
message.parse_api_response
message.send_to_sentry
end
#################
# Main routines #
#################
def initialize(response_data, opts={})
@response_data = response_data
@edit_data = response_data['edit']
@current_user = opts[:current_user] || {}
@post_data = opts[:post_data]
@type = opts[:type]
end
def parse_api_response
# A successful edit will have response data like this:
# {"edit"=>
# {"result"=>"Success",
# "pageid"=>11543696,
# "title"=>"User:Ragesock",
# "contentmodel"=>"wikitext",
# "oldrevid"=>671572777,
# "newrevid"=>674946741,
# "newtimestamp"=>"2015-08-07T05:27:43Z"}}
#
# A failed edit will have a response like this:
# {"servedby"=>"mw1135",
# "error"=>
# {"code"=>"protectedpage",
# "info"=>"The \"templateeditor\" right is required to edit this page",
# "*"=>"See https://en.wikipedia.org/w/api.php for API usage"}}
#
# An edit stopped by the abuse filter will respond like this:
# {"edit"=>
# {"result"=>"Failure",
# "code"=>"abusefilter-warning-email",
# "info"=>"Hit AbuseFilter: Adding emails in articles",
# "warning"=>"[LOTS OF HTML WARNING TEXT]"}}
if @response_data['error']
parse_api_error_response
elsif @edit_data
parse_api_edit_response
elsif @response_data['query']
parse_api_query_response
elsif @response_data['options']
parse_api_options_response
else
parse_api_unknown_response
end
end
# These represent well-known messages that we do not need to capture.
# Bypassing Sentry capture avoids a performance hit.
MESSAGES_TO_IGNORE = [
'Successful edit',
'tokens query'
].freeze
def send_to_sentry
return if MESSAGES_TO_IGNORE.include?(@title)
Raven.capture_message @title,
level: @level,
tags: { username: @current_user[:username],
action_type: @type },
extra: { response_data: @response_data,
post_data: @post_data,
current_user: @current_user }
end
###################
# Parsing methods #
###################
private
def parse_api_edit_response
if @edit_data['result'] == 'Success'
@title = "Successful #{@type}"
@level = 'info'
else
parse_failed_edit
end
end
def parse_failed_edit
@title = "Failed #{@type}"
@title += ': CAPTCHA' if @edit_data['captcha']
@title += ': spamblacklist' if @edit_data['spamblacklist']
code = @response_data['edit']['code']
@title += ": #{code}" if @edit_data['code']
@level = 'warning'
end
def parse_api_error_response
code = @response_data['error']['code']
# If the OAuth credentials are invalid, we need to flag this.
# It gets handled by application controller.
if code == 'mwoauth-invalid-authorization'
@current_user.update_attributes(wiki_token: 'invalid')
end
@title = "Failed #{@type}: #{code}"
@level = 'warning'
end
def parse_api_query_response
@title = "#{@type} query"
@level = 'info'
end
def parse_api_options_response
if @response_data['warnings']
@title = "Unexpected warning for #{@type} update"
@level = 'error'
else
@title = "Successful #{@type} update"
@level = 'info'
end
end
def parse_api_unknown_response
@title = "Unknown response for #{@type}"
@level = 'error'
end
end
|
require 'wraith'
class Wraith::FolderManager
attr_reader :wraith
def initialize(config)
@wraith = Wraith::Wraith.new(config)
end
def dir
wraith.directory
end
def history_dir
wraith.history_dir
end
def paths
wraith.paths
end
def spider_paths
if !paths
paths = File.read(wraith.spider_file)
eval(paths)
else
wraith.paths
end
end
def clear_shots_folder
FileUtils.rm_rf("./#{dir}")
FileUtils.mkdir_p("#{dir}")
end
def copy_old_shots
FileUtils.cp_r(dir, history_dir)
end
def restore_shots
puts 'restoring'
FileUtils.cp_r(Dir.glob("#{history_dir}/*"), dir)
end
def create_folders
spider_paths.each do |folder_label, path|
unless path
path = folder_label
folder_label = path.gsub('/', '__')
end
FileUtils.mkdir_p("#{dir}/thumbnails/#{folder_label}")
FileUtils.mkdir_p("#{dir}/#{folder_label}")
end
puts 'Creating Folders'
end
# Tidy up the shots folder, removing uncessary files
#
def tidy_shots_folder(dirs)
if wraith.mode == 'diffs_only'
dirs.each do |a, b|
# If we are running in "diffs_only mode, and none of the variants show a difference
# we remove the file from the shots folder
if b.none? { |_k, v| v[:data] > 0 }
FileUtils.rm_rf("#{wraith.directory}/#{a}")
dirs.delete(a)
end
end
end
end
end
Copy contents, not the folder
The `FolderManager#copy_old_shots` method, as it currently stands, copies the old folder **inside** the history folder, if the old folder and the history folder exist. This, in turn, causes all kinds of problems (like not being able to generate gallery files, etc).
The correct way is to copy folder contents is [apparently](http://stackoverflow.com/a/4645159/98634) via an extra `.`
require 'wraith'
class Wraith::FolderManager
attr_reader :wraith
def initialize(config)
@wraith = Wraith::Wraith.new(config)
end
def dir
wraith.directory
end
def history_dir
wraith.history_dir
end
def paths
wraith.paths
end
def spider_paths
if !paths
paths = File.read(wraith.spider_file)
eval(paths)
else
wraith.paths
end
end
def clear_shots_folder
FileUtils.rm_rf("./#{dir}")
FileUtils.mkdir_p("#{dir}")
end
def copy_old_shots
FileUtils.cp_r("#{dir}/.", "#{history_dir}/")
end
def restore_shots
puts 'restoring'
FileUtils.cp_r(Dir.glob("#{history_dir}/*"), dir)
end
def create_folders
spider_paths.each do |folder_label, path|
unless path
path = folder_label
folder_label = path.gsub('/', '__')
end
FileUtils.mkdir_p("#{dir}/thumbnails/#{folder_label}")
FileUtils.mkdir_p("#{dir}/#{folder_label}")
end
puts 'Creating Folders'
end
# Tidy up the shots folder, removing uncessary files
#
def tidy_shots_folder(dirs)
if wraith.mode == 'diffs_only'
dirs.each do |a, b|
# If we are running in "diffs_only mode, and none of the variants show a difference
# we remove the file from the shots folder
if b.none? { |_k, v| v[:data] > 0 }
FileUtils.rm_rf("#{wraith.directory}/#{a}")
dirs.delete(a)
end
end
end
end
end
|
require 'fileutils'
require 'pathname'
require 'rexml/document'
require 'spaceship'
require 'json'
require 'rubygems/version'
require 'xcode/install/command'
require 'xcode/install/version'
require 'shellwords'
require 'open3'
require 'fastlane/actions/actions_helper'
module XcodeInstall
CACHE_DIR = Pathname.new("#{ENV['HOME']}/Library/Caches/XcodeInstall")
class Curl
COOKIES_PATH = Pathname.new('/tmp/curl-cookies.txt')
# @param url: The URL to download
# @param directory: The directory to download this file into
# @param cookies: Any cookies we should use for the download (used for auth with Apple)
# @param output: A PathName for where we want to store the file
# @param progress: parse and show the progress?
# @param progress_block: A block that's called whenever we have an updated progress %
# the parameter is a single number that's literally percent (e.g. 1, 50, 80 or 100)
# rubocop:disable Metrics/AbcSize
def fetch(url: nil,
directory: nil,
cookies: nil,
output: nil,
progress: nil,
progress_block: nil)
options = cookies.nil? ? [] : ['--cookie', cookies, '--cookie-jar', COOKIES_PATH]
uri = URI.parse(url)
output ||= File.basename(uri.path)
output = (Pathname.new(directory) + Pathname.new(output)) if directory
# Piping over all of stderr over to a temporary file
# the file content looks like this:
# 0 4766M 0 6835k 0 0 573k 0 2:21:58 0:00:11 2:21:47 902k
# This way we can parse the current %
# The header is
# % Total % Received % Xferd Average Speed Time Time Time Current
#
# Discussion for this on GH: https://github.com/KrauseFx/xcode-install/issues/276
# It was not easily possible to reimplement the same system using built-in methods
# especially when it comes to resuming downloads
# Piping over stderror to Ruby directly didn't work, due to the lack of flushing
# from curl. The only reasonable way to trigger this, is to pipe things directly into a
# local file, and parse that, and just poll that. We could get real time updates using
# the `tail` command or similar, however the download task is not time sensitive enough
# to make this worth the extra complexity, that's why we just poll and
# wait for the process to be finished
progress_log_file = File.join(CACHE_DIR, "progress.#{Time.now.to_i}.progress")
FileUtils.rm_f(progress_log_file)
retry_options = ['--retry', '3']
command = [
'curl',
'--disable',
*options,
*retry_options,
'--location',
'--continue-at',
'-',
'--output',
output,
url
].map(&:to_s)
command_string = command.collect(&:shellescape).join(' ')
command_string += " 2> #{progress_log_file}" # to not run shellescape on the `2>`
# Run the curl command in a loop, retry when curl exit status is 18
# "Partial file. Only a part of the file was transferred."
# https://curl.haxx.se/mail/archive-2008-07/0098.html
# https://github.com/KrauseFx/xcode-install/issues/210
3.times do
# Non-blocking call of Open3
# We're not using the block based syntax, as the bacon testing
# library doesn't seem to support writing tests for it
stdin, stdout, stderr, wait_thr = Open3.popen3(command_string)
# Poll the file and see if we're done yet
while wait_thr.alive?
sleep(0.5) # it's not critical for this to be real-time
next unless File.exist?(progress_log_file) # it might take longer for it to be created
progress_content = File.read(progress_log_file).split("\r").last
# Print out the progress for the CLI
if progress
print "\r#{progress_content}%"
$stdout.flush
end
# Call back the block for other processes that might be interested
matched = progress_content.match(/^\s*(\d+)/)
next unless matched && matched.length == 2
percent = matched[1].to_i
progress_block.call(percent) if progress_block
end
# as we're not making use of the block-based syntax
# we need to manually close those
stdin.close
stdout.close
stderr.close
return wait_thr.value.success? if wait_thr.value.success?
end
false
ensure
FileUtils.rm_f(COOKIES_PATH)
FileUtils.rm_f(progress_log_file)
end
end
# rubocop:disable Metrics/ClassLength
class Installer
attr_reader :xcodes
def initialize
FileUtils.mkdir_p(CACHE_DIR)
end
def cache_dir
CACHE_DIR
end
def current_symlink
File.symlink?(SYMLINK_PATH) ? SYMLINK_PATH : nil
end
def download(version, progress, url = nil, progress_block = nil)
xcode = find_xcode_version(version) if url.nil?
return if url.nil? && xcode.nil?
dmg_file = Pathname.new(File.basename(url || xcode.path))
result = Curl.new.fetch(
url: url || xcode.url,
directory: CACHE_DIR,
cookies: url ? nil : spaceship.cookie,
output: dmg_file,
progress: progress,
progress_block: progress_block
)
result ? CACHE_DIR + dmg_file : nil
end
def find_xcode_version(version)
# By checking for the name and the version we have the best success rate
# Sometimes the user might pass
# "4.3 for Lion"
# or they might pass an actual Gem::Version
# Gem::Version.new("8.0.0")
# which should automatically match with "Xcode 8"
begin
parsed_version = Gem::Version.new(version)
rescue ArgumentError
nil
end
seedlist.each do |current_seed|
return current_seed if current_seed.name == version
return current_seed if parsed_version && current_seed.version == parsed_version
end
nil
end
def exist?(version)
return true if find_xcode_version(version)
false
end
def installed?(version)
installed_versions.map(&:version).include?(version)
end
def installed_versions
installed.map { |x| InstalledXcode.new(x) }.sort do |a, b|
Gem::Version.new(a.version) <=> Gem::Version.new(b.version)
end
end
# Returns an array of `XcodeInstall::Xcode`
# <XcodeInstall::Xcode:0x007fa1d451c390
# @date_modified=2015,
# @name="6.4",
# @path="/Developer_Tools/Xcode_6.4/Xcode_6.4.dmg",
# @url=
# "https://developer.apple.com/devcenter/download.action?path=/Developer_Tools/Xcode_6.4/Xcode_6.4.dmg",
# @version=Gem::Version.new("6.4")>,
#
# the resulting list is sorted with the most recent release as first element
def seedlist
@xcodes = Marshal.load(File.read(LIST_FILE)) if LIST_FILE.exist? && xcodes.nil?
all_xcodes = (xcodes || fetch_seedlist)
# We have to set the `installed` value here, as we might still use
# the cached list of available Xcode versions, but have a new Xcode
# installed in the mean-time
cached_installed_versions = installed_versions.map(&:bundle_version)
all_xcodes.each do |current_xcode|
current_xcode.installed = cached_installed_versions.include?(current_xcode.version)
end
all_xcodes.sort_by(&:version)
end
def install_dmg(dmg_path, suffix = '', switch = true, clean = true)
archive_util = '/System/Library/CoreServices/Applications/Archive Utility.app/Contents/MacOS/Archive Utility'
prompt = "Please authenticate for Xcode installation.\nPassword: "
xcode_path = "/Applications/Xcode#{suffix}.app"
if dmg_path.extname == '.xip'
`'#{archive_util}' #{dmg_path}`
xcode_orig_path = dmg_path.dirname + 'Xcode.app'
xcode_beta_path = dmg_path.dirname + 'Xcode-beta.app'
if Pathname.new(xcode_orig_path).exist?
`sudo -p "#{prompt}" mv "#{xcode_orig_path}" "#{xcode_path}"`
elsif Pathname.new(xcode_beta_path).exist?
`sudo -p "#{prompt}" mv "#{xcode_beta_path}" "#{xcode_path}"`
else
out = <<-HELP
No `Xcode.app(or Xcode-beta.app)` found in XIP. Please remove #{dmg_path} if you
suspect a corrupted download or run `xcversion update` to see if the version
you tried to install has been pulled by Apple. If none of this is true,
please open a new GH issue.
HELP
$stderr.puts out.tr("\n", ' ')
return
end
else
mount_dir = mount(dmg_path)
source = Dir.glob(File.join(mount_dir, 'Xcode*.app')).first
if source.nil?
out = <<-HELP
No `Xcode.app` found in DMG. Please remove #{dmg_path} if you suspect a corrupted
download or run `xcversion update` to see if the version you tried to install
has been pulled by Apple. If none of this is true, please open a new GH issue.
HELP
$stderr.puts out.tr("\n", ' ')
return
end
`sudo -p "#{prompt}" ditto "#{source}" "#{xcode_path}"`
`umount "/Volumes/Xcode"`
end
xcode = InstalledXcode.new(xcode_path)
unless xcode.verify_integrity
`sudo rm -rf #{xcode_path}`
return
end
enable_developer_mode
xcode.approve_license
xcode.install_components
if switch
`sudo rm -f #{SYMLINK_PATH}` unless current_symlink.nil?
`sudo ln -sf #{xcode_path} #{SYMLINK_PATH}` unless SYMLINK_PATH.exist?
`sudo xcode-select --switch #{xcode_path}`
puts `xcodebuild -version`
end
FileUtils.rm_f(dmg_path) if clean
end
# rubocop:disable Metrics/ParameterLists
def install_version(version, switch = true, clean = true, install = true, progress = true, url = nil, show_release_notes = true, progress_block = nil)
dmg_path = get_dmg(version, progress, url, progress_block)
fail Informative, "Failed to download Xcode #{version}." if dmg_path.nil?
if install
install_dmg(dmg_path, "-#{version.to_s.split(' ').join('.')}", switch, clean)
else
puts "Downloaded Xcode #{version} to '#{dmg_path}'"
end
open_release_notes_url(version) if show_release_notes && !url
end
def open_release_notes_url(version)
return if version.nil?
xcode = seedlist.find { |x| x.name == version }
`open #{xcode.release_notes_url}` unless xcode.nil? || xcode.release_notes_url.nil?
end
def list_annotated(xcodes_list)
installed = installed_versions.map(&:version)
xcodes_list.map do |x|
xcode_version = x.split(' ').first # exclude "beta N", "for Lion".
xcode_version << '.0' unless xcode_version.include?('.')
installed.include?(xcode_version) ? "#{x} (installed)" : x
end.join("\n")
end
def list
list_annotated(list_versions.sort_by(&:to_f))
end
def rm_list_cache
FileUtils.rm_f(LIST_FILE)
end
def symlink(version)
xcode = installed_versions.find { |x| x.version == version }
`sudo rm -f #{SYMLINK_PATH}` unless current_symlink.nil?
`sudo ln -sf #{xcode.path} #{SYMLINK_PATH}` unless xcode.nil? || SYMLINK_PATH.exist?
end
def symlinks_to
File.absolute_path(File.readlink(current_symlink), SYMLINK_PATH.dirname) if current_symlink
end
def mount(dmg_path)
plist = hdiutil('mount', '-plist', '-nobrowse', '-noverify', dmg_path.to_s)
document = REXML::Document.new(plist)
node = REXML::XPath.first(document, "//key[.='mount-point']/following-sibling::*[1]")
fail Informative, 'Failed to mount image.' unless node
node.text
end
private
def spaceship
@spaceship ||= begin
begin
Spaceship.login(ENV['XCODE_INSTALL_USER'], ENV['XCODE_INSTALL_PASSWORD'])
rescue Spaceship::Client::InvalidUserCredentialsError
raise 'The specified Apple developer account credentials are incorrect.'
rescue Spaceship::Client::NoUserCredentialsError
raise <<-HELP
Please provide your Apple developer account credentials via the
XCODE_INSTALL_USER and XCODE_INSTALL_PASSWORD environment variables.
HELP
end
if ENV.key?('XCODE_INSTALL_TEAM_ID')
Spaceship.client.team_id = ENV['XCODE_INSTALL_TEAM_ID']
end
Spaceship.client
end
end
LIST_FILE = CACHE_DIR + Pathname.new('xcodes.bin')
MINIMUM_VERSION = Gem::Version.new('4.3')
SYMLINK_PATH = Pathname.new('/Applications/Xcode.app')
def enable_developer_mode
`sudo /usr/sbin/DevToolsSecurity -enable`
`sudo /usr/sbin/dseditgroup -o edit -t group -a staff _developer`
end
def get_dmg(version, progress = true, url = nil, progress_block = nil)
if url
path = Pathname.new(url)
return path if path.exist?
end
if ENV.key?('XCODE_INSTALL_CACHE_DIR')
Pathname.glob(ENV['XCODE_INSTALL_CACHE_DIR'] + '/*').each do |fpath|
return fpath if /^xcode_#{version}\.dmg|xip$/ =~ fpath.basename.to_s
end
end
download(version, progress, url, progress_block)
end
def fetch_seedlist
@xcodes = parse_seedlist(spaceship.send(:request, :post,
'/services-account/QH65B2/downloadws/listDownloads.action').body)
names = @xcodes.map(&:name)
@xcodes += prereleases.reject { |pre| names.include?(pre.name) }
File.open(LIST_FILE, 'wb') do |f|
f << Marshal.dump(xcodes)
end
xcodes
end
def installed
result = `mdfind "kMDItemCFBundleIdentifier == 'com.apple.dt.Xcode'" 2>/dev/null`.split("\n")
if result.empty?
result = `find /Applications -maxdepth 1 -name '*.app' -type d -exec sh -c \
'if [ "$(/usr/libexec/PlistBuddy -c "Print :CFBundleIdentifier" \
"{}/Contents/Info.plist" 2>/dev/null)" == "com.apple.dt.Xcode" ]; then echo "{}"; fi' ';'`.split("\n")
end
result
end
def parse_seedlist(seedlist)
fail Informative, seedlist['resultString'] unless seedlist['resultCode'].eql? 0
seeds = Array(seedlist['downloads']).select do |t|
/^Xcode [0-9]/.match(t['name'])
end
xcodes = seeds.map { |x| Xcode.new(x) }.reject { |x| x.version < MINIMUM_VERSION }.sort do |a, b|
a.date_modified <=> b.date_modified
end
xcodes.select { |x| x.url.end_with?('.dmg') || x.url.end_with?('.xip') }
end
def list_versions
seedlist.map(&:name)
end
def prereleases
body = spaceship.send(:request, :get, '/download/').body
links = body.scan(%r{<a.+?href="(.+?/Xcode.+?/Xcode_(.+?)\.(dmg|xip))".*>(.*)</a>})
links = links.map do |link|
parent = link[0].scan(%r{path=(/.*/.*/)}).first.first
match = body.scan(/#{Regexp.quote(parent)}(.+?.pdf)/).first
if match
link + [parent + match.first]
else
link + [nil]
end
end
links = links.map { |pre| Xcode.new_prerelease(pre[1].strip.tr('_', ' '), pre[0], pre[4]) }
if links.count.zero?
rg = %r{platform-title.*Xcode.* beta.*<\/p>}
scan = body.scan(rg)
if scan.count.zero?
rg = %r{Xcode.* GM.*<\/p>}
scan = body.scan(rg)
end
return [] if scan.empty?
version = scan.first.gsub(/<.*?>/, '').gsub(/.*Xcode /, '')
link = body.scan(%r{<button .*"(.+?.(dmg|xip))".*</button>}).first.first
notes = body.scan(%r{<a.+?href="(/go/\?id=xcode-.+?)".*>(.*)</a>}).first.first
links << Xcode.new(version, link, notes)
end
links
end
def hdiutil(*args)
io = IO.popen(['hdiutil', *args])
result = io.read
io.close
unless $?.exitstatus.zero?
file_path = args[-1]
if `file -b #{file_path}`.start_with?('HTML')
fail Informative, "Failed to mount #{file_path}, logging into your account from a browser should tell you what is going wrong."
end
fail Informative, 'Failed to invoke hdiutil.'
end
result
end
end
class Simulator
attr_reader :version
attr_reader :name
attr_reader :identifier
attr_reader :source
attr_reader :xcode
def initialize(downloadable)
@version = Gem::Version.new(downloadable['version'])
@install_prefix = apply_variables(downloadable['userInfo']['InstallPrefix'])
@name = apply_variables(downloadable['name'])
@identifier = apply_variables(downloadable['identifier'])
@source = apply_variables(downloadable['source'])
end
def installed?
# FIXME: use downloadables' `InstalledIfAllReceiptsArePresentOrNewer` key
File.directory?(@install_prefix)
end
def installed_string
installed? ? 'installed' : 'not installed'
end
def to_s
"#{name} (#{installed_string})"
end
def xcode
Installer.new.installed_versions.find do |x|
x.available_simulators.find do |s|
s.version == version
end
end
end
def download(progress, progress_block = nil)
result = Curl.new.fetch(
url: source,
directory: CACHE_DIR,
progress: progress,
progress_block: progress_block
)
result ? dmg_path : nil
end
def install(progress, should_install)
dmg_path = download(progress)
fail Informative, "Failed to download #{@name}." if dmg_path.nil?
return unless should_install
prepare_package unless pkg_path.exist?
puts "Please authenticate to install #{name}..."
`sudo installer -pkg #{pkg_path} -target /`
fail Informative, "Could not install #{name}, please try again" unless installed?
source_receipts_dir = '/private/var/db/receipts'
target_receipts_dir = "#{@install_prefix}/System/Library/Receipts"
FileUtils.mkdir_p(target_receipts_dir)
FileUtils.cp("#{source_receipts_dir}/#{@identifier}.bom", target_receipts_dir)
FileUtils.cp("#{source_receipts_dir}/#{@identifier}.plist", target_receipts_dir)
puts "Successfully installed #{name}"
end
:private
def prepare_package
puts 'Mounting DMG'
mount_location = Installer.new.mount(dmg_path)
puts 'Expanding pkg'
expanded_pkg_path = CACHE_DIR + identifier
FileUtils.rm_rf(expanded_pkg_path)
`pkgutil --expand #{mount_location}/*.pkg #{expanded_pkg_path}`
puts "Expanded pkg into #{expanded_pkg_path}"
puts 'Unmounting DMG'
`umount #{mount_location}`
puts 'Setting package installation location'
package_info_path = expanded_pkg_path + 'PackageInfo'
package_info_contents = File.read(package_info_path)
File.open(package_info_path, 'w') do |f|
f << package_info_contents.sub('pkg-info', %(pkg-info install-location="#{@install_prefix}"))
end
puts 'Rebuilding package'
`pkgutil --flatten #{expanded_pkg_path} #{pkg_path}`
FileUtils.rm_rf(expanded_pkg_path)
end
def dmg_path
CACHE_DIR + Pathname.new(source).basename
end
def pkg_path
CACHE_DIR + "#{identifier}.pkg"
end
def apply_variables(template)
variable_map = {
'$(DOWNLOADABLE_VERSION_MAJOR)' => version.to_s.split('.')[0],
'$(DOWNLOADABLE_VERSION_MINOR)' => version.to_s.split('.')[1],
'$(DOWNLOADABLE_IDENTIFIER)' => identifier,
'$(DOWNLOADABLE_VERSION)' => version.to_s
}.freeze
variable_map.each do |key, value|
next unless template.include?(key)
template.sub!(key, value)
end
template
end
end
class InstalledXcode
TEAM_IDENTIFIER = '59GAB85EFG'.freeze
AUTHORITY = 'Apple Mac OS Application Signing'.freeze
attr_reader :path
attr_reader :version
attr_reader :bundle_version
attr_reader :uuid
attr_reader :downloadable_index_url
attr_reader :available_simulators
def initialize(path)
@path = Pathname.new(path)
end
def version
@version ||= fetch_version
end
def bundle_version
@bundle_version ||= Gem::Version.new(bundle_version_string)
end
def uuid
@uuid ||= plist_entry(':DVTPlugInCompatibilityUUID')
end
def downloadable_index_url
@downloadable_index_url ||= begin
if Gem::Version.new(version) >= Gem::Version.new('8.1')
"https://devimages-cdn.apple.com/downloads/xcode/simulators/index-#{bundle_version}-#{uuid}.dvtdownloadableindex"
else
"https://devimages.apple.com.edgekey.net/downloads/xcode/simulators/index-#{bundle_version}-#{uuid}.dvtdownloadableindex"
end
end
end
def approve_license
if Gem::Version.new(version) < Gem::Version.new('7.3')
license_info_path = File.join(@path, 'Contents/Resources/LicenseInfo.plist')
license_id = `/usr/libexec/PlistBuddy -c 'Print :licenseID' #{license_info_path}`
license_type = `/usr/libexec/PlistBuddy -c 'Print :licenseType' #{license_info_path}`
license_plist_path = '/Library/Preferences/com.apple.dt.Xcode.plist'
`sudo rm -rf #{license_plist_path}`
if license_type == 'GM'
`sudo /usr/libexec/PlistBuddy -c "add :IDELastGMLicenseAgreedTo string #{license_id}" #{license_plist_path}`
`sudo /usr/libexec/PlistBuddy -c "add :IDEXcodeVersionForAgreedToGMLicense string #{version}" #{license_plist_path}`
else
`sudo /usr/libexec/PlistBuddy -c "add :IDELastBetaLicenseAgreedTo string #{license_id}" #{license_plist_path}`
`sudo /usr/libexec/PlistBuddy -c "add :IDEXcodeVersionForAgreedToBetaLicense string #{version}" #{license_plist_path}`
end
else
`sudo #{@path}/Contents/Developer/usr/bin/xcodebuild -license accept`
end
end
def available_simulators
@available_simulators ||= JSON.parse(`curl -Ls #{downloadable_index_url} | plutil -convert json -o - -`)['downloadables'].map do |downloadable|
Simulator.new(downloadable)
end
rescue JSON::ParserError
return []
end
def install_components
# starting with Xcode 9, we have `xcodebuild -runFirstLaunch` available to do package
# postinstalls using a documented option
if Gem::Version.new(version) >= Gem::Version.new('9')
`sudo #{@path}/Contents/Developer/usr/bin/xcodebuild -runFirstLaunch`
else
Dir.glob("#{@path}/Contents/Resources/Packages/*.pkg").each do |pkg|
`sudo installer -pkg #{pkg} -target /`
end
end
osx_build_version = `sw_vers -buildVersion`.chomp
tools_version = `/usr/libexec/PlistBuddy -c "Print :ProductBuildVersion" "#{@path}/Contents/version.plist"`.chomp
cache_dir = `getconf DARWIN_USER_CACHE_DIR`.chomp
`touch #{cache_dir}com.apple.dt.Xcode.InstallCheckCache_#{osx_build_version}_#{tools_version}`
end
# This method might take a few ms, this could be improved by implementing https://github.com/KrauseFx/xcode-install/issues/273
def fetch_version
output = `DEVELOPER_DIR='' "#{@path}/Contents/Developer/usr/bin/xcodebuild" -version`
return '0.0' if output.nil? || output.empty? # ¯\_(ツ)_/¯
output.split("\n").first.split(' ')[1]
end
def verify_integrity
verify_app_security_assessment && verify_app_cert
end
:private
def bundle_version_string
digits = plist_entry(':DTXcode').to_i.to_s
if digits.length < 3
digits.split(//).join('.')
else
"#{digits[0..-3]}.#{digits[-2]}.#{digits[-1]}"
end
end
def plist_entry(keypath)
`/usr/libexec/PlistBuddy -c "Print :#{keypath}" "#{path}/Contents/Info.plist"`.chomp
end
def verify_app_security_assessment
puts `/usr/sbin/spctl --assess --verbose=4 --type execute #{@path}`
$?.exitstatus.zero?
end
def verify_app_cert
cert_info = Fastlane::Actions::VerifyBuildAction.gather_cert_info(@path)
apple_team_identifier_result = cert_info['team_identifier'] == TEAM_IDENTIFIER
apple_authority_result = cert_info['authority'].include?(AUTHORITY)
apple_team_identifier_result && apple_authority_result
end
end
# A version of Xcode we fetched from the Apple Developer Portal
# we can download & install.
#
# Sample object:
# <XcodeInstall::Xcode:0x007fa1d451c390
# @date_modified=2015,
# @name="6.4",
# @path="/Developer_Tools/Xcode_6.4/Xcode_6.4.dmg",
# @url=
# "https://developer.apple.com/devcenter/download.action?path=/Developer_Tools/Xcode_6.4/Xcode_6.4.dmg",
# @version=Gem::Version.new("6.4")>,
class Xcode
attr_reader :date_modified
# The name might include extra information like "for Lion" or "beta 2"
attr_reader :name
attr_reader :path
attr_reader :url
attr_reader :version
attr_reader :release_notes_url
# Accessor since it's set by the `Installer`
attr_accessor :installed
alias installed? installed
def initialize(json, url = nil, release_notes_url = nil)
if url.nil?
@date_modified = json['dateModified'].to_i
@name = json['name'].gsub(/^Xcode /, '')
@path = json['files'].first['remotePath']
url_prefix = 'https://developer.apple.com/devcenter/download.action?path='
@url = "#{url_prefix}#{@path}"
@release_notes_url = "#{url_prefix}#{json['release_notes_path']}" if json['release_notes_path']
else
@name = json
@path = url.split('/').last
url_prefix = 'https://developer.apple.com/'
@url = "#{url_prefix}#{url}"
@release_notes_url = "#{url_prefix}#{release_notes_url}"
end
begin
@version = Gem::Version.new(@name.split(' ')[0])
rescue
@version = Installer::MINIMUM_VERSION
end
end
def to_s
"Xcode #{version} -- #{url}"
end
def ==(other)
date_modified == other.date_modified && name == other.name && path == other.path && \
url == other.url && version == other.version
end
def self.new_prerelease(version, url, release_notes_path)
new('name' => version,
'files' => [{ 'remotePath' => url.split('=').last }],
'release_notes_path' => release_notes_path)
end
end
end
used xip command to extract xip
Used xip command instead of archive utility to extract xip file
require 'fileutils'
require 'pathname'
require 'rexml/document'
require 'spaceship'
require 'json'
require 'rubygems/version'
require 'xcode/install/command'
require 'xcode/install/version'
require 'shellwords'
require 'open3'
require 'fastlane/actions/actions_helper'
module XcodeInstall
CACHE_DIR = Pathname.new("#{ENV['HOME']}/Library/Caches/XcodeInstall")
class Curl
COOKIES_PATH = Pathname.new('/tmp/curl-cookies.txt')
# @param url: The URL to download
# @param directory: The directory to download this file into
# @param cookies: Any cookies we should use for the download (used for auth with Apple)
# @param output: A PathName for where we want to store the file
# @param progress: parse and show the progress?
# @param progress_block: A block that's called whenever we have an updated progress %
# the parameter is a single number that's literally percent (e.g. 1, 50, 80 or 100)
# rubocop:disable Metrics/AbcSize
def fetch(url: nil,
directory: nil,
cookies: nil,
output: nil,
progress: nil,
progress_block: nil)
options = cookies.nil? ? [] : ['--cookie', cookies, '--cookie-jar', COOKIES_PATH]
uri = URI.parse(url)
output ||= File.basename(uri.path)
output = (Pathname.new(directory) + Pathname.new(output)) if directory
# Piping over all of stderr over to a temporary file
# the file content looks like this:
# 0 4766M 0 6835k 0 0 573k 0 2:21:58 0:00:11 2:21:47 902k
# This way we can parse the current %
# The header is
# % Total % Received % Xferd Average Speed Time Time Time Current
#
# Discussion for this on GH: https://github.com/KrauseFx/xcode-install/issues/276
# It was not easily possible to reimplement the same system using built-in methods
# especially when it comes to resuming downloads
# Piping over stderror to Ruby directly didn't work, due to the lack of flushing
# from curl. The only reasonable way to trigger this, is to pipe things directly into a
# local file, and parse that, and just poll that. We could get real time updates using
# the `tail` command or similar, however the download task is not time sensitive enough
# to make this worth the extra complexity, that's why we just poll and
# wait for the process to be finished
progress_log_file = File.join(CACHE_DIR, "progress.#{Time.now.to_i}.progress")
FileUtils.rm_f(progress_log_file)
retry_options = ['--retry', '3']
command = [
'curl',
'--disable',
*options,
*retry_options,
'--location',
'--continue-at',
'-',
'--output',
output,
url
].map(&:to_s)
command_string = command.collect(&:shellescape).join(' ')
command_string += " 2> #{progress_log_file}" # to not run shellescape on the `2>`
# Run the curl command in a loop, retry when curl exit status is 18
# "Partial file. Only a part of the file was transferred."
# https://curl.haxx.se/mail/archive-2008-07/0098.html
# https://github.com/KrauseFx/xcode-install/issues/210
3.times do
# Non-blocking call of Open3
# We're not using the block based syntax, as the bacon testing
# library doesn't seem to support writing tests for it
stdin, stdout, stderr, wait_thr = Open3.popen3(command_string)
# Poll the file and see if we're done yet
while wait_thr.alive?
sleep(0.5) # it's not critical for this to be real-time
next unless File.exist?(progress_log_file) # it might take longer for it to be created
progress_content = File.read(progress_log_file).split("\r").last
# Print out the progress for the CLI
if progress
print "\r#{progress_content}%"
$stdout.flush
end
# Call back the block for other processes that might be interested
matched = progress_content.match(/^\s*(\d+)/)
next unless matched && matched.length == 2
percent = matched[1].to_i
progress_block.call(percent) if progress_block
end
# as we're not making use of the block-based syntax
# we need to manually close those
stdin.close
stdout.close
stderr.close
return wait_thr.value.success? if wait_thr.value.success?
end
false
ensure
FileUtils.rm_f(COOKIES_PATH)
FileUtils.rm_f(progress_log_file)
end
end
# rubocop:disable Metrics/ClassLength
class Installer
attr_reader :xcodes
def initialize
FileUtils.mkdir_p(CACHE_DIR)
end
def cache_dir
CACHE_DIR
end
def current_symlink
File.symlink?(SYMLINK_PATH) ? SYMLINK_PATH : nil
end
def download(version, progress, url = nil, progress_block = nil)
xcode = find_xcode_version(version) if url.nil?
return if url.nil? && xcode.nil?
dmg_file = Pathname.new(File.basename(url || xcode.path))
result = Curl.new.fetch(
url: url || xcode.url,
directory: CACHE_DIR,
cookies: url ? nil : spaceship.cookie,
output: dmg_file,
progress: progress,
progress_block: progress_block
)
result ? CACHE_DIR + dmg_file : nil
end
def find_xcode_version(version)
# By checking for the name and the version we have the best success rate
# Sometimes the user might pass
# "4.3 for Lion"
# or they might pass an actual Gem::Version
# Gem::Version.new("8.0.0")
# which should automatically match with "Xcode 8"
begin
parsed_version = Gem::Version.new(version)
rescue ArgumentError
nil
end
seedlist.each do |current_seed|
return current_seed if current_seed.name == version
return current_seed if parsed_version && current_seed.version == parsed_version
end
nil
end
def exist?(version)
return true if find_xcode_version(version)
false
end
def installed?(version)
installed_versions.map(&:version).include?(version)
end
def installed_versions
installed.map { |x| InstalledXcode.new(x) }.sort do |a, b|
Gem::Version.new(a.version) <=> Gem::Version.new(b.version)
end
end
# Returns an array of `XcodeInstall::Xcode`
# <XcodeInstall::Xcode:0x007fa1d451c390
# @date_modified=2015,
# @name="6.4",
# @path="/Developer_Tools/Xcode_6.4/Xcode_6.4.dmg",
# @url=
# "https://developer.apple.com/devcenter/download.action?path=/Developer_Tools/Xcode_6.4/Xcode_6.4.dmg",
# @version=Gem::Version.new("6.4")>,
#
# the resulting list is sorted with the most recent release as first element
def seedlist
@xcodes = Marshal.load(File.read(LIST_FILE)) if LIST_FILE.exist? && xcodes.nil?
all_xcodes = (xcodes || fetch_seedlist)
# We have to set the `installed` value here, as we might still use
# the cached list of available Xcode versions, but have a new Xcode
# installed in the mean-time
cached_installed_versions = installed_versions.map(&:bundle_version)
all_xcodes.each do |current_xcode|
current_xcode.installed = cached_installed_versions.include?(current_xcode.version)
end
all_xcodes.sort_by(&:version)
end
def install_dmg(dmg_path, suffix = '', switch = true, clean = true)
prompt = "Please authenticate for Xcode installation.\nPassword: "
xcode_path = "/Applications/Xcode#{suffix}.app"
if dmg_path.extname == '.xip'
`xip -x #{dmg_path}`
xcode_orig_path = File.join(Dir.pwd, 'Xcode.app')
xcode_beta_path = File.join(Dir.pwd, 'Xcode-beta.app')
if Pathname.new(xcode_orig_path).exist?
`sudo -p "#{prompt}" mv "#{xcode_orig_path}" "#{xcode_path}"`
elsif Pathname.new(xcode_beta_path).exist?
`sudo -p "#{prompt}" mv "#{xcode_beta_path}" "#{xcode_path}"`
else
out = <<-HELP
No `Xcode.app(or Xcode-beta.app)` found in XIP. Please remove #{dmg_path} if you
suspect a corrupted download or run `xcversion update` to see if the version
you tried to install has been pulled by Apple. If none of this is true,
please open a new GH issue.
HELP
$stderr.puts out.tr("\n", ' ')
return
end
else
mount_dir = mount(dmg_path)
source = Dir.glob(File.join(mount_dir, 'Xcode*.app')).first
if source.nil?
out = <<-HELP
No `Xcode.app` found in DMG. Please remove #{dmg_path} if you suspect a corrupted
download or run `xcversion update` to see if the version you tried to install
has been pulled by Apple. If none of this is true, please open a new GH issue.
HELP
$stderr.puts out.tr("\n", ' ')
return
end
`sudo -p "#{prompt}" ditto "#{source}" "#{xcode_path}"`
`umount "/Volumes/Xcode"`
end
xcode = InstalledXcode.new(xcode_path)
unless xcode.verify_integrity
`sudo rm -rf #{xcode_path}`
return
end
enable_developer_mode
xcode.approve_license
xcode.install_components
if switch
`sudo rm -f #{SYMLINK_PATH}` unless current_symlink.nil?
`sudo ln -sf #{xcode_path} #{SYMLINK_PATH}` unless SYMLINK_PATH.exist?
`sudo xcode-select --switch #{xcode_path}`
puts `xcodebuild -version`
end
FileUtils.rm_f(dmg_path) if clean
end
# rubocop:disable Metrics/ParameterLists
def install_version(version, switch = true, clean = true, install = true, progress = true, url = nil, show_release_notes = true, progress_block = nil)
dmg_path = get_dmg(version, progress, url, progress_block)
fail Informative, "Failed to download Xcode #{version}." if dmg_path.nil?
if install
install_dmg(dmg_path, "-#{version.to_s.split(' ').join('.')}", switch, clean)
else
puts "Downloaded Xcode #{version} to '#{dmg_path}'"
end
open_release_notes_url(version) if show_release_notes && !url
end
def open_release_notes_url(version)
return if version.nil?
xcode = seedlist.find { |x| x.name == version }
`open #{xcode.release_notes_url}` unless xcode.nil? || xcode.release_notes_url.nil?
end
def list_annotated(xcodes_list)
installed = installed_versions.map(&:version)
xcodes_list.map do |x|
xcode_version = x.split(' ').first # exclude "beta N", "for Lion".
xcode_version << '.0' unless xcode_version.include?('.')
installed.include?(xcode_version) ? "#{x} (installed)" : x
end.join("\n")
end
def list
list_annotated(list_versions.sort_by(&:to_f))
end
def rm_list_cache
FileUtils.rm_f(LIST_FILE)
end
def symlink(version)
xcode = installed_versions.find { |x| x.version == version }
`sudo rm -f #{SYMLINK_PATH}` unless current_symlink.nil?
`sudo ln -sf #{xcode.path} #{SYMLINK_PATH}` unless xcode.nil? || SYMLINK_PATH.exist?
end
def symlinks_to
File.absolute_path(File.readlink(current_symlink), SYMLINK_PATH.dirname) if current_symlink
end
def mount(dmg_path)
plist = hdiutil('mount', '-plist', '-nobrowse', '-noverify', dmg_path.to_s)
document = REXML::Document.new(plist)
node = REXML::XPath.first(document, "//key[.='mount-point']/following-sibling::*[1]")
fail Informative, 'Failed to mount image.' unless node
node.text
end
private
def spaceship
@spaceship ||= begin
begin
Spaceship.login(ENV['XCODE_INSTALL_USER'], ENV['XCODE_INSTALL_PASSWORD'])
rescue Spaceship::Client::InvalidUserCredentialsError
raise 'The specified Apple developer account credentials are incorrect.'
rescue Spaceship::Client::NoUserCredentialsError
raise <<-HELP
Please provide your Apple developer account credentials via the
XCODE_INSTALL_USER and XCODE_INSTALL_PASSWORD environment variables.
HELP
end
if ENV.key?('XCODE_INSTALL_TEAM_ID')
Spaceship.client.team_id = ENV['XCODE_INSTALL_TEAM_ID']
end
Spaceship.client
end
end
LIST_FILE = CACHE_DIR + Pathname.new('xcodes.bin')
MINIMUM_VERSION = Gem::Version.new('4.3')
SYMLINK_PATH = Pathname.new('/Applications/Xcode.app')
def enable_developer_mode
`sudo /usr/sbin/DevToolsSecurity -enable`
`sudo /usr/sbin/dseditgroup -o edit -t group -a staff _developer`
end
def get_dmg(version, progress = true, url = nil, progress_block = nil)
if url
path = Pathname.new(url)
return path if path.exist?
end
if ENV.key?('XCODE_INSTALL_CACHE_DIR')
Pathname.glob(ENV['XCODE_INSTALL_CACHE_DIR'] + '/*').each do |fpath|
return fpath if /^xcode_#{version}\.dmg|xip$/ =~ fpath.basename.to_s
end
end
download(version, progress, url, progress_block)
end
def fetch_seedlist
@xcodes = parse_seedlist(spaceship.send(:request, :post,
'/services-account/QH65B2/downloadws/listDownloads.action').body)
names = @xcodes.map(&:name)
@xcodes += prereleases.reject { |pre| names.include?(pre.name) }
File.open(LIST_FILE, 'wb') do |f|
f << Marshal.dump(xcodes)
end
xcodes
end
def installed
result = `mdfind "kMDItemCFBundleIdentifier == 'com.apple.dt.Xcode'" 2>/dev/null`.split("\n")
if result.empty?
result = `find /Applications -maxdepth 1 -name '*.app' -type d -exec sh -c \
'if [ "$(/usr/libexec/PlistBuddy -c "Print :CFBundleIdentifier" \
"{}/Contents/Info.plist" 2>/dev/null)" == "com.apple.dt.Xcode" ]; then echo "{}"; fi' ';'`.split("\n")
end
result
end
def parse_seedlist(seedlist)
fail Informative, seedlist['resultString'] unless seedlist['resultCode'].eql? 0
seeds = Array(seedlist['downloads']).select do |t|
/^Xcode [0-9]/.match(t['name'])
end
xcodes = seeds.map { |x| Xcode.new(x) }.reject { |x| x.version < MINIMUM_VERSION }.sort do |a, b|
a.date_modified <=> b.date_modified
end
xcodes.select { |x| x.url.end_with?('.dmg') || x.url.end_with?('.xip') }
end
def list_versions
seedlist.map(&:name)
end
def prereleases
body = spaceship.send(:request, :get, '/download/').body
links = body.scan(%r{<a.+?href="(.+?/Xcode.+?/Xcode_(.+?)\.(dmg|xip))".*>(.*)</a>})
links = links.map do |link|
parent = link[0].scan(%r{path=(/.*/.*/)}).first.first
match = body.scan(/#{Regexp.quote(parent)}(.+?.pdf)/).first
if match
link + [parent + match.first]
else
link + [nil]
end
end
links = links.map { |pre| Xcode.new_prerelease(pre[1].strip.tr('_', ' '), pre[0], pre[4]) }
if links.count.zero?
rg = %r{platform-title.*Xcode.* beta.*<\/p>}
scan = body.scan(rg)
if scan.count.zero?
rg = %r{Xcode.* GM.*<\/p>}
scan = body.scan(rg)
end
return [] if scan.empty?
version = scan.first.gsub(/<.*?>/, '').gsub(/.*Xcode /, '')
link = body.scan(%r{<button .*"(.+?.(dmg|xip))".*</button>}).first.first
notes = body.scan(%r{<a.+?href="(/go/\?id=xcode-.+?)".*>(.*)</a>}).first.first
links << Xcode.new(version, link, notes)
end
links
end
def hdiutil(*args)
io = IO.popen(['hdiutil', *args])
result = io.read
io.close
unless $?.exitstatus.zero?
file_path = args[-1]
if `file -b #{file_path}`.start_with?('HTML')
fail Informative, "Failed to mount #{file_path}, logging into your account from a browser should tell you what is going wrong."
end
fail Informative, 'Failed to invoke hdiutil.'
end
result
end
end
class Simulator
attr_reader :version
attr_reader :name
attr_reader :identifier
attr_reader :source
attr_reader :xcode
def initialize(downloadable)
@version = Gem::Version.new(downloadable['version'])
@install_prefix = apply_variables(downloadable['userInfo']['InstallPrefix'])
@name = apply_variables(downloadable['name'])
@identifier = apply_variables(downloadable['identifier'])
@source = apply_variables(downloadable['source'])
end
def installed?
# FIXME: use downloadables' `InstalledIfAllReceiptsArePresentOrNewer` key
File.directory?(@install_prefix)
end
def installed_string
installed? ? 'installed' : 'not installed'
end
def to_s
"#{name} (#{installed_string})"
end
def xcode
Installer.new.installed_versions.find do |x|
x.available_simulators.find do |s|
s.version == version
end
end
end
def download(progress, progress_block = nil)
result = Curl.new.fetch(
url: source,
directory: CACHE_DIR,
progress: progress,
progress_block: progress_block
)
result ? dmg_path : nil
end
def install(progress, should_install)
dmg_path = download(progress)
fail Informative, "Failed to download #{@name}." if dmg_path.nil?
return unless should_install
prepare_package unless pkg_path.exist?
puts "Please authenticate to install #{name}..."
`sudo installer -pkg #{pkg_path} -target /`
fail Informative, "Could not install #{name}, please try again" unless installed?
source_receipts_dir = '/private/var/db/receipts'
target_receipts_dir = "#{@install_prefix}/System/Library/Receipts"
FileUtils.mkdir_p(target_receipts_dir)
FileUtils.cp("#{source_receipts_dir}/#{@identifier}.bom", target_receipts_dir)
FileUtils.cp("#{source_receipts_dir}/#{@identifier}.plist", target_receipts_dir)
puts "Successfully installed #{name}"
end
:private
def prepare_package
puts 'Mounting DMG'
mount_location = Installer.new.mount(dmg_path)
puts 'Expanding pkg'
expanded_pkg_path = CACHE_DIR + identifier
FileUtils.rm_rf(expanded_pkg_path)
`pkgutil --expand #{mount_location}/*.pkg #{expanded_pkg_path}`
puts "Expanded pkg into #{expanded_pkg_path}"
puts 'Unmounting DMG'
`umount #{mount_location}`
puts 'Setting package installation location'
package_info_path = expanded_pkg_path + 'PackageInfo'
package_info_contents = File.read(package_info_path)
File.open(package_info_path, 'w') do |f|
f << package_info_contents.sub('pkg-info', %(pkg-info install-location="#{@install_prefix}"))
end
puts 'Rebuilding package'
`pkgutil --flatten #{expanded_pkg_path} #{pkg_path}`
FileUtils.rm_rf(expanded_pkg_path)
end
def dmg_path
CACHE_DIR + Pathname.new(source).basename
end
def pkg_path
CACHE_DIR + "#{identifier}.pkg"
end
def apply_variables(template)
variable_map = {
'$(DOWNLOADABLE_VERSION_MAJOR)' => version.to_s.split('.')[0],
'$(DOWNLOADABLE_VERSION_MINOR)' => version.to_s.split('.')[1],
'$(DOWNLOADABLE_IDENTIFIER)' => identifier,
'$(DOWNLOADABLE_VERSION)' => version.to_s
}.freeze
variable_map.each do |key, value|
next unless template.include?(key)
template.sub!(key, value)
end
template
end
end
class InstalledXcode
TEAM_IDENTIFIER = '59GAB85EFG'.freeze
AUTHORITY = 'Apple Mac OS Application Signing'.freeze
attr_reader :path
attr_reader :version
attr_reader :bundle_version
attr_reader :uuid
attr_reader :downloadable_index_url
attr_reader :available_simulators
def initialize(path)
@path = Pathname.new(path)
end
def version
@version ||= fetch_version
end
def bundle_version
@bundle_version ||= Gem::Version.new(bundle_version_string)
end
def uuid
@uuid ||= plist_entry(':DVTPlugInCompatibilityUUID')
end
def downloadable_index_url
@downloadable_index_url ||= begin
if Gem::Version.new(version) >= Gem::Version.new('8.1')
"https://devimages-cdn.apple.com/downloads/xcode/simulators/index-#{bundle_version}-#{uuid}.dvtdownloadableindex"
else
"https://devimages.apple.com.edgekey.net/downloads/xcode/simulators/index-#{bundle_version}-#{uuid}.dvtdownloadableindex"
end
end
end
def approve_license
if Gem::Version.new(version) < Gem::Version.new('7.3')
license_info_path = File.join(@path, 'Contents/Resources/LicenseInfo.plist')
license_id = `/usr/libexec/PlistBuddy -c 'Print :licenseID' #{license_info_path}`
license_type = `/usr/libexec/PlistBuddy -c 'Print :licenseType' #{license_info_path}`
license_plist_path = '/Library/Preferences/com.apple.dt.Xcode.plist'
`sudo rm -rf #{license_plist_path}`
if license_type == 'GM'
`sudo /usr/libexec/PlistBuddy -c "add :IDELastGMLicenseAgreedTo string #{license_id}" #{license_plist_path}`
`sudo /usr/libexec/PlistBuddy -c "add :IDEXcodeVersionForAgreedToGMLicense string #{version}" #{license_plist_path}`
else
`sudo /usr/libexec/PlistBuddy -c "add :IDELastBetaLicenseAgreedTo string #{license_id}" #{license_plist_path}`
`sudo /usr/libexec/PlistBuddy -c "add :IDEXcodeVersionForAgreedToBetaLicense string #{version}" #{license_plist_path}`
end
else
`sudo #{@path}/Contents/Developer/usr/bin/xcodebuild -license accept`
end
end
def available_simulators
@available_simulators ||= JSON.parse(`curl -Ls #{downloadable_index_url} | plutil -convert json -o - -`)['downloadables'].map do |downloadable|
Simulator.new(downloadable)
end
rescue JSON::ParserError
return []
end
def install_components
# starting with Xcode 9, we have `xcodebuild -runFirstLaunch` available to do package
# postinstalls using a documented option
if Gem::Version.new(version) >= Gem::Version.new('9')
`sudo #{@path}/Contents/Developer/usr/bin/xcodebuild -runFirstLaunch`
else
Dir.glob("#{@path}/Contents/Resources/Packages/*.pkg").each do |pkg|
`sudo installer -pkg #{pkg} -target /`
end
end
osx_build_version = `sw_vers -buildVersion`.chomp
tools_version = `/usr/libexec/PlistBuddy -c "Print :ProductBuildVersion" "#{@path}/Contents/version.plist"`.chomp
cache_dir = `getconf DARWIN_USER_CACHE_DIR`.chomp
`touch #{cache_dir}com.apple.dt.Xcode.InstallCheckCache_#{osx_build_version}_#{tools_version}`
end
# This method might take a few ms, this could be improved by implementing https://github.com/KrauseFx/xcode-install/issues/273
def fetch_version
output = `DEVELOPER_DIR='' "#{@path}/Contents/Developer/usr/bin/xcodebuild" -version`
return '0.0' if output.nil? || output.empty? # ¯\_(ツ)_/¯
output.split("\n").first.split(' ')[1]
end
def verify_integrity
verify_app_security_assessment && verify_app_cert
end
:private
def bundle_version_string
digits = plist_entry(':DTXcode').to_i.to_s
if digits.length < 3
digits.split(//).join('.')
else
"#{digits[0..-3]}.#{digits[-2]}.#{digits[-1]}"
end
end
def plist_entry(keypath)
`/usr/libexec/PlistBuddy -c "Print :#{keypath}" "#{path}/Contents/Info.plist"`.chomp
end
def verify_app_security_assessment
puts `/usr/sbin/spctl --assess --verbose=4 --type execute #{@path}`
$?.exitstatus.zero?
end
def verify_app_cert
cert_info = Fastlane::Actions::VerifyBuildAction.gather_cert_info(@path)
apple_team_identifier_result = cert_info['team_identifier'] == TEAM_IDENTIFIER
apple_authority_result = cert_info['authority'].include?(AUTHORITY)
apple_team_identifier_result && apple_authority_result
end
end
# A version of Xcode we fetched from the Apple Developer Portal
# we can download & install.
#
# Sample object:
# <XcodeInstall::Xcode:0x007fa1d451c390
# @date_modified=2015,
# @name="6.4",
# @path="/Developer_Tools/Xcode_6.4/Xcode_6.4.dmg",
# @url=
# "https://developer.apple.com/devcenter/download.action?path=/Developer_Tools/Xcode_6.4/Xcode_6.4.dmg",
# @version=Gem::Version.new("6.4")>,
class Xcode
attr_reader :date_modified
# The name might include extra information like "for Lion" or "beta 2"
attr_reader :name
attr_reader :path
attr_reader :url
attr_reader :version
attr_reader :release_notes_url
# Accessor since it's set by the `Installer`
attr_accessor :installed
alias installed? installed
def initialize(json, url = nil, release_notes_url = nil)
if url.nil?
@date_modified = json['dateModified'].to_i
@name = json['name'].gsub(/^Xcode /, '')
@path = json['files'].first['remotePath']
url_prefix = 'https://developer.apple.com/devcenter/download.action?path='
@url = "#{url_prefix}#{@path}"
@release_notes_url = "#{url_prefix}#{json['release_notes_path']}" if json['release_notes_path']
else
@name = json
@path = url.split('/').last
url_prefix = 'https://developer.apple.com/'
@url = "#{url_prefix}#{url}"
@release_notes_url = "#{url_prefix}#{release_notes_url}"
end
begin
@version = Gem::Version.new(@name.split(' ')[0])
rescue
@version = Installer::MINIMUM_VERSION
end
end
def to_s
"Xcode #{version} -- #{url}"
end
def ==(other)
date_modified == other.date_modified && name == other.name && path == other.path && \
url == other.url && version == other.version
end
def self.new_prerelease(version, url, release_notes_path)
new('name' => version,
'files' => [{ 'remotePath' => url.split('=').last }],
'release_notes_path' => release_notes_path)
end
end
end
|
module Yourub
class Client
attr_reader :videos
attr_accessor :config, :extended_info
def initialize()
@extended_info = false
@categories, @videos = [], []
@count_filter = {}
@api_options= {
:part => 'snippet',
:type => 'video',
:eventType => 'completed',
:order => 'relevance',
:safeSearch => 'none',
}
end
def config
Yourub::Config
end
def countries
Yourub::Validator.available_countries
end
def client
@client ||= Google::APIClient.new(
:key => config.developer_key,
:application_name => config.application_name,
:application_version => config.application_version,
:authorization => nil,
)
end
def youtube
@youtube ||= client.discovered_api(config.youtube_api_service_name,
config.youtube_api_version)
end
def search(criteria)
begin
@videos = []
@criteria = Yourub::Validator.confirm(criteria)
search_by_criteria
rescue ArgumentError => e
Yourub.logger.error "#{e}"
end
end
def search_by_criteria
if @criteria.has_key? :id
search_by_id
else
merge_criteria_with_api_options
retrieve_categories
retrieve_videos
end
end
def search_by_id
params = {
:id => @criteria[:id],
:part => 'snippet,statistics',
}
video_response = client.execute!(
:api_method => youtube.videos.list,
:parameters => params
)
entry = Yourub::Reader.parse_videos(video_response)
add_video_to_search_result(entry.first) unless entry.nil?
end
def merge_criteria_with_api_options
mappings = {query: :q, max_results: :maxResults, country: :regionCode}
@api_options.merge! @criteria
@api_options.keys.each do |k|
@api_options[ mappings[k] ] = @api_options.delete(k) if mappings[k]
end
end
def retrieve_categories
if @criteria.has_key? :category
get_categories_for_country(@criteria[:country])
@categories = Yourub::Validator.valid_category(@categories, @criteria[:category])
end
end
def get_categories_for_country(country)
categories_list = video_categories_list_request(country)
categories_list.data.items.each do |cat_result|
category_name = parse_name(cat_result["snippet"]["title"])
@categories.push(cat_result["id"] => category_name)
end
end
def retrieve_videos
consume_criteria do |criteria|
begin
req = search_list_request(criteria)
if @extended_info || Yourub::CountFilter.filter
get_details_and_store req
else
videos = Yourub::Reader.parse_videos(req)
videos.each{ |v| add_video_to_search_result(v) }
end
rescue StandardError => e
Yourub.logger.error "Error #{e} retrieving videos for the criteria: #{criteria.to_s}"
end
end
end
def consume_criteria
to_consume = @api_options
if @criteria[:country]
@criteria[:country].each do |country|
to_consume[:regionCode] = country
consume_categories(to_consume) do |cat|
yield cat
end
end
else
yield to_consume
end
end
def consume_categories(to_consume)
if @categories.size > 0
@categories.each do |cat|
to_consume[:videoCategoryId] = cat.keys[0].to_i
yield to_consume
end
else
yield to_consume
end
end
def get_details_and_store(video_list)
video_list.data.items.each do |video_item|
v = videos_list_request video_item.id.videoId
v = Yourub::Reader.parse_videos(v)
add_video_to_search_result(v.first) if v
end
end
def search_list_request(options)
search_response = client.execute!(
:api_method => youtube.search.list,
:parameters => options
)
end
def videos_list_request(result_video_id)
params = video_params(result_video_id)
video_response = client.execute!(
:api_method => youtube.videos.list,
:parameters => params
)
end
def video_categories_list_request(country)
categories_list = client.execute!(
:api_method => youtube.video_categories.list,
:parameters => {"part" => "snippet","regionCode" => country }
)
end
def video_params(result_video_id)
parameters = {
:id => result_video_id,
:part => 'snippet,statistics,id',
}
unless @extended_info
fields = 'items(id,snippet(title,thumbnails),statistics(viewCount))'
parameters[:fields] = URI::encode(fields)
end
return parameters
end
def add_video_to_search_result(entry)
video = @extended_info ? entry : Yourub::Reader.parse_entry(entry)
if Yourub::CountFilter.accept?(entry)
@videos.push(video)
end
end
def parse_name(name)
return name.gsub("/", "-").downcase.gsub(/\s+/, "")
end
def get_views(id)
request = client.execute!(
:api_method => youtube.videos.list,
:parameters => {:id => id, :part => 'statistics'}
)
v = Yourub::Reader.parse_videos(request)
v ? Yourub::CountFilter.get_views_count(v.first) : nil
end
end
end
remove eventtype
module Yourub
class Client
attr_reader :videos
attr_accessor :config, :extended_info
def initialize()
@extended_info = false
@categories, @videos = [], []
@count_filter = {}
@api_options= {
:part => 'snippet',
:type => 'video',
:order => 'relevance',
:safeSearch => 'none',
}
end
def config
Yourub::Config
end
def countries
Yourub::Validator.available_countries
end
def client
@client ||= Google::APIClient.new(
:key => config.developer_key,
:application_name => config.application_name,
:application_version => config.application_version,
:authorization => nil,
)
end
def youtube
@youtube ||= client.discovered_api(config.youtube_api_service_name,
config.youtube_api_version)
end
def search(criteria)
begin
@videos = []
@criteria = Yourub::Validator.confirm(criteria)
search_by_criteria
rescue ArgumentError => e
Yourub.logger.error "#{e}"
end
end
def search_by_criteria
if @criteria.has_key? :id
search_by_id
else
merge_criteria_with_api_options
retrieve_categories
retrieve_videos
end
end
def search_by_id
params = {
:id => @criteria[:id],
:part => 'snippet,statistics',
}
video_response = client.execute!(
:api_method => youtube.videos.list,
:parameters => params
)
entry = Yourub::Reader.parse_videos(video_response)
add_video_to_search_result(entry.first) unless entry.nil?
end
def merge_criteria_with_api_options
mappings = {query: :q, max_results: :maxResults, country: :regionCode}
@api_options.merge! @criteria
@api_options.keys.each do |k|
@api_options[ mappings[k] ] = @api_options.delete(k) if mappings[k]
end
end
def retrieve_categories
if @criteria.has_key? :category
get_categories_for_country(@criteria[:country])
@categories = Yourub::Validator.valid_category(@categories, @criteria[:category])
end
end
def get_categories_for_country(country)
categories_list = video_categories_list_request(country)
categories_list.data.items.each do |cat_result|
category_name = parse_name(cat_result["snippet"]["title"])
@categories.push(cat_result["id"] => category_name)
end
end
def retrieve_videos
consume_criteria do |criteria|
begin
req = search_list_request(criteria)
if @extended_info || Yourub::CountFilter.filter
get_details_and_store req
else
videos = Yourub::Reader.parse_videos(req)
videos.each{ |v| add_video_to_search_result(v) }
end
rescue StandardError => e
Yourub.logger.error "Error #{e} retrieving videos for the criteria: #{criteria.to_s}"
end
end
end
def consume_criteria
to_consume = @api_options
if @criteria[:country]
@criteria[:country].each do |country|
to_consume[:regionCode] = country
consume_categories(to_consume) do |cat|
yield cat
end
end
else
yield to_consume
end
end
def consume_categories(to_consume)
if @categories.size > 0
@categories.each do |cat|
to_consume[:videoCategoryId] = cat.keys[0].to_i
yield to_consume
end
else
yield to_consume
end
end
def get_details_and_store(video_list)
video_list.data.items.each do |video_item|
v = videos_list_request video_item.id.videoId
v = Yourub::Reader.parse_videos(v)
add_video_to_search_result(v.first) if v
end
end
def search_list_request(options)
search_response = client.execute!(
:api_method => youtube.search.list,
:parameters => options
)
end
def videos_list_request(result_video_id)
params = video_params(result_video_id)
video_response = client.execute!(
:api_method => youtube.videos.list,
:parameters => params
)
end
def video_categories_list_request(country)
categories_list = client.execute!(
:api_method => youtube.video_categories.list,
:parameters => {"part" => "snippet","regionCode" => country }
)
end
def video_params(result_video_id)
parameters = {
:id => result_video_id,
:part => 'snippet,statistics,id',
}
unless @extended_info
fields = 'items(id,snippet(title,thumbnails),statistics(viewCount))'
parameters[:fields] = URI::encode(fields)
end
return parameters
end
def add_video_to_search_result(entry)
video = @extended_info ? entry : Yourub::Reader.parse_entry(entry)
if Yourub::CountFilter.accept?(entry)
@videos.push(video)
end
end
def parse_name(name)
return name.gsub("/", "-").downcase.gsub(/\s+/, "")
end
def get_views(id)
request = client.execute!(
:api_method => youtube.videos.list,
:parameters => {:id => id, :part => 'statistics'}
)
v = Yourub::Reader.parse_videos(request)
v ? Yourub::CountFilter.get_views_count(v.first) : nil
end
end
end
|
# frozen_string_literal: false
#
# Cookbook:: postgresql
# Library:: default
# Author:: David Crane (<davidc@donorschoose.org>)
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
include Chef::Mixin::ShellOut
module PostgresqlCookbook
module Helpers
#######
# Function to truncate value to 4 significant bits, render human readable.
# Used in server_conf resource:
#
# The memory settings (shared_buffers, effective_cache_size, work_mem,
# maintenance_work_mem and wal_buffers) will be rounded down to keep
# the 4 most significant bits, so that SHOW will be likely to use a
# larger divisor. The output is actually a human readable string that
# ends with "GB", "MB" or "kB" if over 1023, exactly what Postgresql
# will expect in a postgresql.conf setting. The output may be up to
# 6.25% less than the original value because of the rounding.
def binaryround(value)
# Keep a multiplier which grows through powers of 1
multiplier = 1
# Truncate value to 4 most significant bits
while value >= 16
value = (value / 2).floor
multiplier *= 2
end
# Factor any remaining powers of 2 into the multiplier
while value == 2 * (value / 2).floor
value = (value / 2).floor
multiplier *= 2
end
# Factor enough powers of 2 back into the value to
# leave the multiplier as a power of 1024 that can
# be represented as units of "GB", "MB" or "kB".
if multiplier >= 1024 * 1024 * 1024
while multiplier > 1024 * 1024 * 1024
value = 2 * value
multiplier = (multiplier / 2).floor
end
multiplier = 1
units = 'GB'
elsif multiplier >= 1024 * 1024
while multiplier > 1024 * 1024
value = 2 * value
multiplier = (multiplier / 2).floor
end
multiplier = 1
units = 'MB'
elsif multiplier >= 1024
while multiplier > 1024
value = 2 * value
multiplier = (multiplier / 2).floor
end
multiplier = 1
units = 'kB'
else
units = ''
end
# Now we can return a nice human readable string.
"#{multiplier * value}#{units}"
end
#######
# Locale Configuration
# Function to test the date order.
# Used in recipes/config_initdb.rb to set this attribute:
# node.default['postgresql']['config']['datestyle']
def locale_date_order
# Test locale conversion of mon=11, day=22, year=33
testtime = DateTime.new(2033, 11, 22, 0, 0, 0, '-00:00')
#=> #<DateTime: 2033-11-22T00:00:00-0000 ...>
# %x - Preferred representation for the date alone, no time
res = testtime.strftime('%x')
return 'mdy' if res.nil?
posM = res.index('11')
posD = res.index('22')
posY = res.index('33')
if posM.nil? || posD.nil? || posY.nil?
return 'mdy'
elseif (posY < posM && posM < posD)
return 'ymd'
elseif (posD < posM)
return 'dmy'
end
'mdy'
end
#######
# Timezone Configuration
require 'find'
# Function to determine where the system stored shared timezone data.
# Used in recipes/config_initdb.rb to detemine where it should have
# select_default_timezone(tzdir) search.
def pg_TZDIR
# System time zone conversions are controlled by a timezone data file
# identified through environment variables (TZ and TZDIR) and/or file
# and directory naming conventions specific to the Linux distribution.
# Each of these timezone names will have been loaded into the PostgreSQL
# pg_timezone_names view by the package maintainer.
#
# Instead of using the timezone name configured as the system default,
# the PostgreSQL server uses ones named in postgresql.conf settings
# (timezone and log_timezone). The initdb utility does initialize those
# settings to the timezone name that corresponds to the system default.
#
# The system's timezone name is actually a filename relative to the
# shared zoneinfo directory. That is usually /usr/share/zoneinfo, but
# it was /usr/lib/zoneinfo in older distributions and can be anywhere
# if specified by the environment variable TZDIR. The tzset(3) manpage
# seems to indicate the following precedence:
tzdir = nil
if ::File.directory?('/usr/lib/zoneinfo')
tzdir = '/usr/lib/zoneinfo'
else
share_path = [ENV['TZDIR'], '/usr/share/zoneinfo'].compact.first
tzdir = share_path if ::File.directory?(share_path)
end
tzdir
end
#######
# Function to support select_default_timezone(tzdir), which is
# used in recipes/config_initdb.rb.
def validate_zone(tzname)
# PostgreSQL does not support leap seconds, so this function tests
# the usual Linux tzname convention to avoid a misconfiguration.
# Assume that the tzdata package maintainer has kept all timezone
# data files with support for leap seconds is kept under the
# so-named "right/" subdir of the shared zoneinfo directory.
#
# The original PostgreSQL initdb is not Unix-specific, so it did a
# very complicated, thorough test in its pg_tz_acceptable() function
# that I could not begin to understand how to do in ruby :).
#
# Testing the tzname is good enough, since a misconfiguration
# will result in an immediate fatal error when the PostgreSQL
# service is started, with pgstartup.log messages such as:
# LOG: time zone "right/US/Eastern" appears to use leap seconds
# DETAIL: PostgreSQL does not support leap seconds.
if tzname.index('right/') == 0
false
else
true
end
end
# Function to support select_default_timezone(tzdir), which is
# used in recipes/config_initdb.rb.
def scan_available_timezones(tzdir)
# There should be an /etc/localtime zoneinfo file that is a link to
# (or a copy of) a timezone data file under tzdir, which should have
# been installed under the "share" directory by the tzdata package.
#
# The initdb utility determines which shared timezone file is being
# used as the system's default /etc/localtime. The timezone name is
# the timezone file path relative to the tzdir.
bestzonename = nil
if tzdir.nil?
Chef::Log.error('The zoneinfo directory not found (looked for /usr/share/zoneinfo and /usr/lib/zoneinfo)')
elsif !::File.exist?('/etc/localtime')
Chef::Log.error('The system zoneinfo file not found (looked for /etc/localtime)')
elsif ::File.directory?('/etc/localtime')
Chef::Log.error('The system zoneinfo file not found (/etc/localtime is a directory instead)')
elsif ::File.symlink?('/etc/localtime')
# PostgreSQL initdb doesn't use the symlink target, but this
# certainly will make sense to any system administrator. A full
# scan of the tzdir to find the shortest filename could result
# "US/Eastern" instead of "America/New_York" as bestzonename,
# in spite of what the sysadmin had specified in the symlink.
# (There are many duplicates under tzdir, with the same timezone
# content appearing as an average of 2-3 different file names.)
path = ::File.realdirpath('/etc/localtime')
bestzonename = path.gsub("#{tzdir}/", '')
else # /etc/localtime is a file, so scan for it under tzdir
localtime_content = File.read('/etc/localtime')
Find.find(tzdir) do |path|
# Only consider files (skip directories or symlinks)
next unless !::File.directory?(path) && !::File.symlink?(path)
# Ignore any file named "posixrules" or "localtime"
next unless ::File.basename(path) != 'posixrules' && ::File.basename(path) != 'localtime'
# Do consider if content exactly matches /etc/localtime.
next unless localtime_content == File.read(path)
tzname = path.gsub("#{tzdir}/", '')
next unless validate_zone(tzname)
if bestzonename.nil? ||
tzname.length < bestzonename.length ||
(tzname.length == bestzonename.length &&
(tzname <=> bestzonename) < 0)
bestzonename = tzname
end
end
end
bestzonename
end
# Function to support select_default_timezone(tzdir), which is
# used in recipes/config_initdb.rb.
def identify_system_timezone(tzdir)
resultbuf = scan_available_timezones(tzdir)
if !resultbuf.nil?
# Ignore Olson's rather silly "Factory" zone; use GMT instead
resultbuf = nil if (resultbuf <=> 'Factory') == 0
else
# Did not find the timezone. Fallback to use a GMT zone. Note that the
# Olson timezone database names the GMT-offset zones in POSIX style: plus
# is west of Greenwich.
testtime = DateTime.now
std_ofs = testtime.strftime('%:z').split(':')[0].to_i
resultbuf = [
'Etc/GMT',
-std_ofs > 0 ? '+' : '',
(-std_ofs).to_s,
].join('')
end
resultbuf
end
#######
# Function to determine the name of the system's default timezone.
# Used in recipes/config_initdb.rb to set these attributes:
# node.default['postgresql']['config']['log_timezone']
# node.default['postgresql']['config']['timezone']
def select_default_timezone(tzdir)
system_timezone = nil
# Check TZ environment variable
tzname = ENV['TZ']
if !tzname.nil? && !tzname.empty? && validate_zone(tzname)
system_timezone = tzname
else
# Nope, so try to identify system timezone from /etc/localtime
tzname = identify_system_timezone(tzdir)
system_timezone = tzname if validate_zone(tzname)
end
system_timezone
end
#######
# Function to execute an SQL statement in the default database.
# Input: Query could be a single String or an Array of String.
# Output: A String with |-separated columns and \n-separated rows.
# Note an empty output could mean psql couldn't connect.
# This is easiest for 1-field (1-row, 1-col) results, otherwise
# it will be complex to parse the results.
def execute_sql(query, db_name)
# query could be a String or an Array of String
statement = query.is_a?(String) ? query : query.join("\n")
cmd = shell_out("psql -q --tuples-only --no-align -d #{db_name} -f -",
user: 'postgres',
input: statement)
# If psql fails, generally the postgresql service is down.
# Instead of aborting chef with a fatal error, let's just
# pass these non-zero exitstatus back as empty cmd.stdout.
if cmd.exitstatus == 0 && !cmd.stderr.empty?
# An SQL failure is still a zero exitstatus, but then the
# stderr explains the error, so let's rais that as fatal.
Chef::Log.fatal("psql failed executing this SQL statement:\n#{statement}")
Chef::Log.fatal(cmd.stderr)
raise 'SQL ERROR'
end
cmd.stdout.chomp
end
# End the Opscode::PostgresqlHelpers module
end
end
Fix eslif in locale helpers (#490)
Fixes #418
# frozen_string_literal: false
#
# Cookbook:: postgresql
# Library:: default
# Author:: David Crane (<davidc@donorschoose.org>)
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
include Chef::Mixin::ShellOut
module PostgresqlCookbook
module Helpers
#######
# Function to truncate value to 4 significant bits, render human readable.
# Used in server_conf resource:
#
# The memory settings (shared_buffers, effective_cache_size, work_mem,
# maintenance_work_mem and wal_buffers) will be rounded down to keep
# the 4 most significant bits, so that SHOW will be likely to use a
# larger divisor. The output is actually a human readable string that
# ends with "GB", "MB" or "kB" if over 1023, exactly what Postgresql
# will expect in a postgresql.conf setting. The output may be up to
# 6.25% less than the original value because of the rounding.
def binaryround(value)
# Keep a multiplier which grows through powers of 1
multiplier = 1
# Truncate value to 4 most significant bits
while value >= 16
value = (value / 2).floor
multiplier *= 2
end
# Factor any remaining powers of 2 into the multiplier
while value == 2 * (value / 2).floor
value = (value / 2).floor
multiplier *= 2
end
# Factor enough powers of 2 back into the value to
# leave the multiplier as a power of 1024 that can
# be represented as units of "GB", "MB" or "kB".
if multiplier >= 1024 * 1024 * 1024
while multiplier > 1024 * 1024 * 1024
value = 2 * value
multiplier = (multiplier / 2).floor
end
multiplier = 1
units = 'GB'
elsif multiplier >= 1024 * 1024
while multiplier > 1024 * 1024
value = 2 * value
multiplier = (multiplier / 2).floor
end
multiplier = 1
units = 'MB'
elsif multiplier >= 1024
while multiplier > 1024
value = 2 * value
multiplier = (multiplier / 2).floor
end
multiplier = 1
units = 'kB'
else
units = ''
end
# Now we can return a nice human readable string.
"#{multiplier * value}#{units}"
end
#######
# Locale Configuration
# Function to test the date order.
# Used in recipes/config_initdb.rb to set this attribute:
# node.default['postgresql']['config']['datestyle']
def locale_date_order
# Test locale conversion of mon=11, day=22, year=33
testtime = DateTime.new(2033, 11, 22, 0, 0, 0, '-00:00')
#=> #<DateTime: 2033-11-22T00:00:00-0000 ...>
# %x - Preferred representation for the date alone, no time
res = testtime.strftime('%x')
return 'mdy' if res.nil?
posM = res.index('11')
posD = res.index('22')
posY = res.index('33')
if posM.nil? || posD.nil? || posY.nil?
return 'mdy'
elsif (posY < posM && posM < posD)
return 'ymd'
elsif (posD < posM)
return 'dmy'
end
'mdy'
end
#######
# Timezone Configuration
require 'find'
# Function to determine where the system stored shared timezone data.
# Used in recipes/config_initdb.rb to detemine where it should have
# select_default_timezone(tzdir) search.
def pg_TZDIR
# System time zone conversions are controlled by a timezone data file
# identified through environment variables (TZ and TZDIR) and/or file
# and directory naming conventions specific to the Linux distribution.
# Each of these timezone names will have been loaded into the PostgreSQL
# pg_timezone_names view by the package maintainer.
#
# Instead of using the timezone name configured as the system default,
# the PostgreSQL server uses ones named in postgresql.conf settings
# (timezone and log_timezone). The initdb utility does initialize those
# settings to the timezone name that corresponds to the system default.
#
# The system's timezone name is actually a filename relative to the
# shared zoneinfo directory. That is usually /usr/share/zoneinfo, but
# it was /usr/lib/zoneinfo in older distributions and can be anywhere
# if specified by the environment variable TZDIR. The tzset(3) manpage
# seems to indicate the following precedence:
tzdir = nil
if ::File.directory?('/usr/lib/zoneinfo')
tzdir = '/usr/lib/zoneinfo'
else
share_path = [ENV['TZDIR'], '/usr/share/zoneinfo'].compact.first
tzdir = share_path if ::File.directory?(share_path)
end
tzdir
end
#######
# Function to support select_default_timezone(tzdir), which is
# used in recipes/config_initdb.rb.
def validate_zone(tzname)
# PostgreSQL does not support leap seconds, so this function tests
# the usual Linux tzname convention to avoid a misconfiguration.
# Assume that the tzdata package maintainer has kept all timezone
# data files with support for leap seconds is kept under the
# so-named "right/" subdir of the shared zoneinfo directory.
#
# The original PostgreSQL initdb is not Unix-specific, so it did a
# very complicated, thorough test in its pg_tz_acceptable() function
# that I could not begin to understand how to do in ruby :).
#
# Testing the tzname is good enough, since a misconfiguration
# will result in an immediate fatal error when the PostgreSQL
# service is started, with pgstartup.log messages such as:
# LOG: time zone "right/US/Eastern" appears to use leap seconds
# DETAIL: PostgreSQL does not support leap seconds.
if tzname.index('right/') == 0
false
else
true
end
end
# Function to support select_default_timezone(tzdir), which is
# used in recipes/config_initdb.rb.
def scan_available_timezones(tzdir)
# There should be an /etc/localtime zoneinfo file that is a link to
# (or a copy of) a timezone data file under tzdir, which should have
# been installed under the "share" directory by the tzdata package.
#
# The initdb utility determines which shared timezone file is being
# used as the system's default /etc/localtime. The timezone name is
# the timezone file path relative to the tzdir.
bestzonename = nil
if tzdir.nil?
Chef::Log.error('The zoneinfo directory not found (looked for /usr/share/zoneinfo and /usr/lib/zoneinfo)')
elsif !::File.exist?('/etc/localtime')
Chef::Log.error('The system zoneinfo file not found (looked for /etc/localtime)')
elsif ::File.directory?('/etc/localtime')
Chef::Log.error('The system zoneinfo file not found (/etc/localtime is a directory instead)')
elsif ::File.symlink?('/etc/localtime')
# PostgreSQL initdb doesn't use the symlink target, but this
# certainly will make sense to any system administrator. A full
# scan of the tzdir to find the shortest filename could result
# "US/Eastern" instead of "America/New_York" as bestzonename,
# in spite of what the sysadmin had specified in the symlink.
# (There are many duplicates under tzdir, with the same timezone
# content appearing as an average of 2-3 different file names.)
path = ::File.realdirpath('/etc/localtime')
bestzonename = path.gsub("#{tzdir}/", '')
else # /etc/localtime is a file, so scan for it under tzdir
localtime_content = File.read('/etc/localtime')
Find.find(tzdir) do |path|
# Only consider files (skip directories or symlinks)
next unless !::File.directory?(path) && !::File.symlink?(path)
# Ignore any file named "posixrules" or "localtime"
next unless ::File.basename(path) != 'posixrules' && ::File.basename(path) != 'localtime'
# Do consider if content exactly matches /etc/localtime.
next unless localtime_content == File.read(path)
tzname = path.gsub("#{tzdir}/", '')
next unless validate_zone(tzname)
if bestzonename.nil? ||
tzname.length < bestzonename.length ||
(tzname.length == bestzonename.length &&
(tzname <=> bestzonename) < 0)
bestzonename = tzname
end
end
end
bestzonename
end
# Function to support select_default_timezone(tzdir), which is
# used in recipes/config_initdb.rb.
def identify_system_timezone(tzdir)
resultbuf = scan_available_timezones(tzdir)
if !resultbuf.nil?
# Ignore Olson's rather silly "Factory" zone; use GMT instead
resultbuf = nil if (resultbuf <=> 'Factory') == 0
else
# Did not find the timezone. Fallback to use a GMT zone. Note that the
# Olson timezone database names the GMT-offset zones in POSIX style: plus
# is west of Greenwich.
testtime = DateTime.now
std_ofs = testtime.strftime('%:z').split(':')[0].to_i
resultbuf = [
'Etc/GMT',
-std_ofs > 0 ? '+' : '',
(-std_ofs).to_s,
].join('')
end
resultbuf
end
#######
# Function to determine the name of the system's default timezone.
# Used in recipes/config_initdb.rb to set these attributes:
# node.default['postgresql']['config']['log_timezone']
# node.default['postgresql']['config']['timezone']
def select_default_timezone(tzdir)
system_timezone = nil
# Check TZ environment variable
tzname = ENV['TZ']
if !tzname.nil? && !tzname.empty? && validate_zone(tzname)
system_timezone = tzname
else
# Nope, so try to identify system timezone from /etc/localtime
tzname = identify_system_timezone(tzdir)
system_timezone = tzname if validate_zone(tzname)
end
system_timezone
end
#######
# Function to execute an SQL statement in the default database.
# Input: Query could be a single String or an Array of String.
# Output: A String with |-separated columns and \n-separated rows.
# Note an empty output could mean psql couldn't connect.
# This is easiest for 1-field (1-row, 1-col) results, otherwise
# it will be complex to parse the results.
def execute_sql(query, db_name)
# query could be a String or an Array of String
statement = query.is_a?(String) ? query : query.join("\n")
cmd = shell_out("psql -q --tuples-only --no-align -d #{db_name} -f -",
user: 'postgres',
input: statement)
# If psql fails, generally the postgresql service is down.
# Instead of aborting chef with a fatal error, let's just
# pass these non-zero exitstatus back as empty cmd.stdout.
if cmd.exitstatus == 0 && !cmd.stderr.empty?
# An SQL failure is still a zero exitstatus, but then the
# stderr explains the error, so let's rais that as fatal.
Chef::Log.fatal("psql failed executing this SQL statement:\n#{statement}")
Chef::Log.fatal(cmd.stderr)
raise 'SQL ERROR'
end
cmd.stdout.chomp
end
# End the Opscode::PostgresqlHelpers module
end
end
|
#
# Cookbook:: runit
# Libraries:: helpers
#
# Author: Joshua Timberman <joshua@chef.io>
# Author: Sean OMeara <sean@chef.io>
# Copyright 2008-2015, Chef Software, Inc. <legal@chef.io>
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
module RunitCookbook
module Helpers
# include Chef::Mixin::ShellOut if it is not already included in the calling class
def self.included(klass)
unless klass.ancestors.include?(Chef::Mixin::ShellOut)
klass.class_eval { include Chef::Mixin::ShellOut }
end
end
# Default settings for resource properties.
def parsed_sv_bin
return new_resource.sv_bin if new_resource.sv_bin
'/usr/bin/sv'
end
def parsed_sv_dir
return new_resource.sv_dir if new_resource.sv_dir
'/etc/sv'
end
def parsed_service_dir
return new_resource.service_dir if new_resource.service_dir
'/etc/service'
end
def parsed_lsb_init_dir
return new_resource.lsb_init_dir if new_resource.lsb_init_dir
'/etc/init.d'
end
# misc helper functions
def inside_docker?
results = `cat /proc/1/cgroup`.strip.split("\n")
results.any? { |val| /docker/ =~ val }
end
def down_file
"#{sv_dir_name}/down"
end
def env_dir
"#{sv_dir_name}/env"
end
def extra_env_files?
files = []
Dir.glob("#{sv_dir_name}/env/*").each do |f|
files << File.basename(f)
end
return true if files.sort != new_resource.env.keys.sort
false
end
def zap_extra_env_files
Dir.glob("#{sv_dir_name}/env/*").each do |f|
unless new_resource.env.key?(File.basename(f))
File.unlink(f)
Chef::Log.info("removing file #{f}")
end
end
end
def wait_for_service
unless inside_docker?
sleep 1 until ::FileTest.pipe?("#{service_dir_name}/supervise/ok")
if new_resource.log
sleep 1 until ::FileTest.pipe?("#{service_dir_name}/log/supervise/ok")
end
end
end
def runit_sv_works?
sv = shell_out("#{sv_bin} --help")
sv.exitstatus == 100 && sv.stderr =~ /usage: sv .* command service/
end
def runit_send_signal(signal, friendly_name = nil)
friendly_name ||= signal
converge_by("send #{friendly_name} to #{new_resource}") do
shell_out!("#{sv_bin} #{sv_args}#{signal} #{service_dir_name}")
Chef::Log.info("#{new_resource} sent #{friendly_name}")
end
end
def running?
cmd = shell_out("#{sv_bin} #{sv_args}status #{service_dir_name}")
(cmd.stdout =~ /^run:/ && cmd.exitstatus == 0)
end
def log_running?
cmd = shell_out("#{sv_bin} #{sv_args}status #{service_dir_name}/log")
(cmd.stdout =~ /^run:/ && cmd.exitstatus == 0)
end
def enabled?
::File.exist?("#{service_dir_name}/run")
end
def log_service_name
"#{new_resource.service_name}/log"
end
def sv_dir_name
"#{parsed_sv_dir}/#{new_resource.service_name}"
end
def sv_args
sv_args = ''
sv_args += "-w '#{new_resource.sv_timeout}' " unless new_resource.sv_timeout.nil?
sv_args += '-v ' if new_resource.sv_verbose
sv_args
end
def sv_bin
parsed_sv_bin
end
def service_dir_name
"#{new_resource.service_dir}/#{new_resource.service_name}"
end
def log_dir_name
"#{new_resource.service_dir}/#{new_resource.service_name}/log"
end
def template_cookbook
new_resource.cookbook.nil? ? new_resource.cookbook_name.to_s : new_resource.cookbook
end
def default_logger_content
<<-EOS
#!/bin/sh
exec svlogd -tt #{new_resource.log_dir}
EOS
end
def disable_service
shell_out("#{new_resource.sv_bin} #{sv_args}down #{service_dir_name}")
FileUtils.rm(service_dir_name)
# per the documentation, a service should be removed from supervision
# within 5 seconds of removing the service dir symlink, so we'll sleep for 6.
# otherwise, runit recreates the 'ok' named pipe too quickly
sleep(6)
# runit will recreate the supervise directory and
# pipes when the service is reenabled
FileUtils.rm("#{sv_dir_name}/supervise/ok")
end
def start_service
shell_out!("#{new_resource.sv_bin} #{sv_args}start #{service_dir_name}")
end
def stop_service
shell_out!("#{new_resource.sv_bin} #{sv_args}stop #{service_dir_name}")
end
def restart_service
shell_out!("#{new_resource.sv_bin} #{sv_args}restart #{service_dir_name}")
end
def restart_log_service
shell_out!("#{new_resource.sv_bin} #{sv_args}restart #{service_dir_name}/log")
end
def reload_service
shell_out!("#{new_resource.sv_bin} #{sv_args}force-reload #{service_dir_name}")
end
def reload_log_service
if log_running?
shell_out!("#{new_resource.sv_bin} #{sv_args}force-reload #{service_dir_name}/log")
end
end
end
end
Delete broken docker check
#
# Cookbook:: runit
# Libraries:: helpers
#
# Author: Joshua Timberman <joshua@chef.io>
# Author: Sean OMeara <sean@chef.io>
# Copyright 2008-2015, Chef Software, Inc. <legal@chef.io>
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
module RunitCookbook
module Helpers
# include Chef::Mixin::ShellOut if it is not already included in the calling class
def self.included(klass)
unless klass.ancestors.include?(Chef::Mixin::ShellOut)
klass.class_eval { include Chef::Mixin::ShellOut }
end
end
# Default settings for resource properties.
def parsed_sv_bin
return new_resource.sv_bin if new_resource.sv_bin
'/usr/bin/sv'
end
def parsed_sv_dir
return new_resource.sv_dir if new_resource.sv_dir
'/etc/sv'
end
def parsed_service_dir
return new_resource.service_dir if new_resource.service_dir
'/etc/service'
end
def parsed_lsb_init_dir
return new_resource.lsb_init_dir if new_resource.lsb_init_dir
'/etc/init.d'
end
def down_file
"#{sv_dir_name}/down"
end
def env_dir
"#{sv_dir_name}/env"
end
def extra_env_files?
files = []
Dir.glob("#{sv_dir_name}/env/*").each do |f|
files << File.basename(f)
end
return true if files.sort != new_resource.env.keys.sort
false
end
def zap_extra_env_files
Dir.glob("#{sv_dir_name}/env/*").each do |f|
unless new_resource.env.key?(File.basename(f))
File.unlink(f)
Chef::Log.info("removing file #{f}")
end
end
end
def wait_for_service
sleep 1 until ::FileTest.pipe?("#{service_dir_name}/supervise/ok")
if new_resource.log
sleep 1 until ::FileTest.pipe?("#{service_dir_name}/log/supervise/ok")
end
end
def runit_sv_works?
sv = shell_out("#{sv_bin} --help")
sv.exitstatus == 100 && sv.stderr =~ /usage: sv .* command service/
end
def runit_send_signal(signal, friendly_name = nil)
friendly_name ||= signal
converge_by("send #{friendly_name} to #{new_resource}") do
shell_out!("#{sv_bin} #{sv_args}#{signal} #{service_dir_name}")
Chef::Log.info("#{new_resource} sent #{friendly_name}")
end
end
def running?
cmd = shell_out("#{sv_bin} #{sv_args}status #{service_dir_name}")
(cmd.stdout =~ /^run:/ && cmd.exitstatus == 0)
end
def log_running?
cmd = shell_out("#{sv_bin} #{sv_args}status #{service_dir_name}/log")
(cmd.stdout =~ /^run:/ && cmd.exitstatus == 0)
end
def enabled?
::File.exist?("#{service_dir_name}/run")
end
def log_service_name
"#{new_resource.service_name}/log"
end
def sv_dir_name
"#{parsed_sv_dir}/#{new_resource.service_name}"
end
def sv_args
sv_args = ''
sv_args += "-w '#{new_resource.sv_timeout}' " unless new_resource.sv_timeout.nil?
sv_args += '-v ' if new_resource.sv_verbose
sv_args
end
def sv_bin
parsed_sv_bin
end
def service_dir_name
"#{new_resource.service_dir}/#{new_resource.service_name}"
end
def log_dir_name
"#{new_resource.service_dir}/#{new_resource.service_name}/log"
end
def template_cookbook
new_resource.cookbook.nil? ? new_resource.cookbook_name.to_s : new_resource.cookbook
end
def default_logger_content
<<-EOS
#!/bin/sh
exec svlogd -tt #{new_resource.log_dir}
EOS
end
def disable_service
shell_out("#{new_resource.sv_bin} #{sv_args}down #{service_dir_name}")
FileUtils.rm(service_dir_name)
# per the documentation, a service should be removed from supervision
# within 5 seconds of removing the service dir symlink, so we'll sleep for 6.
# otherwise, runit recreates the 'ok' named pipe too quickly
sleep(6)
# runit will recreate the supervise directory and
# pipes when the service is reenabled
FileUtils.rm("#{sv_dir_name}/supervise/ok")
end
def start_service
shell_out!("#{new_resource.sv_bin} #{sv_args}start #{service_dir_name}")
end
def stop_service
shell_out!("#{new_resource.sv_bin} #{sv_args}stop #{service_dir_name}")
end
def restart_service
shell_out!("#{new_resource.sv_bin} #{sv_args}restart #{service_dir_name}")
end
def restart_log_service
shell_out!("#{new_resource.sv_bin} #{sv_args}restart #{service_dir_name}/log")
end
def reload_service
shell_out!("#{new_resource.sv_bin} #{sv_args}force-reload #{service_dir_name}")
end
def reload_log_service
if log_running?
shell_out!("#{new_resource.sv_bin} #{sv_args}force-reload #{service_dir_name}/log")
end
end
end
end
|
#
# Author:: John Dewey (<john@dewey.ws>)
# Cookbook Name:: chef-client
# Library:: helpers
#
# Copyright 2012, John Dewey
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
module Opscode
module ChefClient
# helper methods for use in chef-client recipe code
module Helpers
include Chef::Mixin::Language if Chef::VERSION < '11.0.0'
include Chef::DSL::PlatformIntrospection if Chef::VERSION >= '11.0.0'
def chef_server_user
Chef::VERSION >= '11.0.0' ? 'chef_server' : 'chef'
end
def chef_server?
if node['platform'] == 'windows'
node.recipe?('chef-server')
else
Chef::Log.debug("Node has Chef Server Recipe? #{node.recipe?("chef-server")}")
Chef::Log.debug("Node has Chef Server Executable? #{system("which chef-server > /dev/null 2>&1")}")
Chef::Log.debug("Node has Chef Server Ctl Executable? #{system("which chef-server-ctl > /dev/null 2>&1")}")
node.recipe?('chef-server') || system('which chef-server > /dev/null 2>&1') || system('which chef-server-ctl > /dev/null 2>&1')
end
end
def wmi_property_from_query(wmi_property, wmi_query)
@wmi = ::WIN32OLE.connect("winmgmts://")
result = @wmi.ExecQuery(wmi_query)
return nil unless result.each.count > 0
result.each.next.send(wmi_property)
end
def chef_client_service_running
wmi_property_from_query(:name, "select * from Win32_Service where name = 'chef-client'") != nil
end
def root_owner
if ['windows'].include?(node['platform'])
wmi_property_from_query(:name, "select * from Win32_UserAccount where sid like 'S-1-5-21-%-500' and LocalAccount=True")
else
'root'
end
end
def dir_owner
if chef_server?
chef_server_user
else
root_owner
end
end
def root_group
if %w{ openbsd freebsd mac_os_x mac_os_x_server }.include?(node['platform'])
'wheel'
elsif ['windows'].include?(node['platform'])
wmi_property_from_query(:name, "select * from Win32_Group where SID = 'S-1-5-32-544' AND LocalAccount=TRUE")
else
'root'
end
end
def dir_group
if chef_server?
chef_server_user
else
root_group
end
end
def create_directories
# dir_owner and dir_group are not found in the block below.
d_owner = dir_owner
d_group = dir_group
%w{run_path cache_path backup_path log_dir conf_dir}.each do |dir|
directory node['chef_client'][dir] do
recursive true
mode 00750 if dir == 'log_dir'
owner d_owner
group d_group
end
end
end
def find_chef_client
if node['platform'] == 'windows'
existence_check = :exists?
# Where will also return files that have extensions matching PATHEXT (e.g.
# *.bat). We don't want the batch file wrapper, but the actual script.
which = 'set PATHEXT=.exe & where'
Chef::Log.debug "Using exists? and 'where', since we're on Windows"
else
existence_check = :executable?
which = 'which'
Chef::Log.debug "Using executable? and 'which' since we're on Linux"
end
chef_in_sane_path = lambda do
begin
Chef::Client::SANE_PATHS.map do |p|
p = "#{p}/chef-client"
p if ::File.send(existence_check, p)
end.compact.first
rescue NameError
false
end
end
# COOK-635 account for alternate gem paths
# try to use the bin provided by the node attribute
if ::File.send(existence_check, node['chef_client']['bin'])
Chef::Log.debug 'Using chef-client bin from node attributes'
node['chef_client']['bin']
# search for the bin in some sane paths
elsif Chef::Client.const_defined?('SANE_PATHS') && chef_in_sane_path.call
Chef::Log.debug 'Using chef-client bin from sane path'
chef_in_sane_path
# last ditch search for a bin in PATH
elsif (chef_in_path = %x{#{which} chef-client}.chomp) && ::File.send(existence_check, chef_in_path)
Chef::Log.debug 'Using chef-client bin from system path'
chef_in_path
else
fail "Could not locate the chef-client bin in any known path. Please set the proper path by overriding the node['chef_client']['bin'] attribute."
end
end
end
end
end
Ignore FC048 in the helpers libarry
This rule prefers the use of Mixlib::ShellOut
Should maybe be refactored in the future
For now, just silencing these violations
#
# Author:: John Dewey (<john@dewey.ws>)
# Cookbook Name:: chef-client
# Library:: helpers
#
# Copyright 2012, John Dewey
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
module Opscode
module ChefClient
# helper methods for use in chef-client recipe code
module Helpers
include Chef::Mixin::Language if Chef::VERSION < '11.0.0'
include Chef::DSL::PlatformIntrospection if Chef::VERSION >= '11.0.0'
def chef_server_user
Chef::VERSION >= '11.0.0' ? 'chef_server' : 'chef'
end
def chef_server?
if node['platform'] == 'windows'
node.recipe?('chef-server')
else
Chef::Log.debug("Node has Chef Server Recipe? #{node.recipe?("chef-server")}")
Chef::Log.debug("Node has Chef Server Executable? #{system("which chef-server > /dev/null 2>&1")}") # ~FC048 Prefer Mixlib::ShellOut is ignored here
Chef::Log.debug("Node has Chef Server Ctl Executable? #{system("which chef-server-ctl > /dev/null 2>&1")}") # ~FC048 Prefer Mixlib::ShellOut is ignored here
node.recipe?('chef-server') || system('which chef-server > /dev/null 2>&1') || system('which chef-server-ctl > /dev/null 2>&1') # ~FC048 Prefer Mixlib::ShellOut is ignored here
end
end
def wmi_property_from_query(wmi_property, wmi_query)
@wmi = ::WIN32OLE.connect("winmgmts://")
result = @wmi.ExecQuery(wmi_query)
return nil unless result.each.count > 0
result.each.next.send(wmi_property)
end
def chef_client_service_running
wmi_property_from_query(:name, "select * from Win32_Service where name = 'chef-client'") != nil
end
def root_owner
if ['windows'].include?(node['platform'])
wmi_property_from_query(:name, "select * from Win32_UserAccount where sid like 'S-1-5-21-%-500' and LocalAccount=True")
else
'root'
end
end
def dir_owner
if chef_server?
chef_server_user
else
root_owner
end
end
def root_group
if %w{ openbsd freebsd mac_os_x mac_os_x_server }.include?(node['platform'])
'wheel'
elsif ['windows'].include?(node['platform'])
wmi_property_from_query(:name, "select * from Win32_Group where SID = 'S-1-5-32-544' AND LocalAccount=TRUE")
else
'root'
end
end
def dir_group
if chef_server?
chef_server_user
else
root_group
end
end
def create_directories
# dir_owner and dir_group are not found in the block below.
d_owner = dir_owner
d_group = dir_group
%w{run_path cache_path backup_path log_dir conf_dir}.each do |dir|
directory node['chef_client'][dir] do
recursive true
mode 00750 if dir == 'log_dir'
owner d_owner
group d_group
end
end
end
def find_chef_client
if node['platform'] == 'windows'
existence_check = :exists?
# Where will also return files that have extensions matching PATHEXT (e.g.
# *.bat). We don't want the batch file wrapper, but the actual script.
which = 'set PATHEXT=.exe & where'
Chef::Log.debug "Using exists? and 'where', since we're on Windows"
else
existence_check = :executable?
which = 'which'
Chef::Log.debug "Using executable? and 'which' since we're on Linux"
end
chef_in_sane_path = lambda do
begin
Chef::Client::SANE_PATHS.map do |p|
p = "#{p}/chef-client"
p if ::File.send(existence_check, p)
end.compact.first
rescue NameError
false
end
end
# COOK-635 account for alternate gem paths
# try to use the bin provided by the node attribute
if ::File.send(existence_check, node['chef_client']['bin'])
Chef::Log.debug 'Using chef-client bin from node attributes'
node['chef_client']['bin']
# search for the bin in some sane paths
elsif Chef::Client.const_defined?('SANE_PATHS') && chef_in_sane_path.call
Chef::Log.debug 'Using chef-client bin from sane path'
chef_in_sane_path
# last ditch search for a bin in PATH
elsif (chef_in_path = %x{#{which} chef-client}.chomp) && ::File.send(existence_check, chef_in_path) # ~FC048 Prefer Mixlib::ShellOut is ignored here
Chef::Log.debug 'Using chef-client bin from system path'
chef_in_path
else
fail "Could not locate the chef-client bin in any known path. Please set the proper path by overriding the node['chef_client']['bin'] attribute."
end
end
end
end
end
|
require "face_cropper/version"
require "face_cropper/aws_rekognition_face_detector"
require 'mini_magick'
require 'aws-sdk'
class FaceCropper
class FaceBox
attr_reader :top, :left, :height, :width
def initialize(top: , left: , height: , width:)
@top = top
@left = left
@height = height
@width = width
end
def crop_face!(image_path)
image = MiniMagick::Image.open(image_path)
position = calculate_position(image_width: image.width, image_height: image.height)
crop_params = "#{position[:width]}x#{position[:height]}+#{position[:y]}+#{position[:x]}"
image.crop(crop_params)
crop_file = "#{crop_params}_#{@image_key}"
image.write(crop_file)
crop_file
end
def calculate_position(image_width: , image_height:)
{
width: (@width * image_width).to_i,
height: (@height * image.height).to_i,
x: (@top * image.height).to_i,
y: (@left * image.width).to_i
}
end
end
def initialize(params)
@from_bucket = params[:from_bucket]
@to_bucket = params[:to_bucket]
@image_key = params[:image_key]
@face_boxis = params[:face_details]
end
def crop_and_upload!
faces = @face_boxis || detect_faces!
debug_print(faces)
tmp_original_image_path = download_original_image!
crop_faces!(faces, tmp_original_image_path)
end
private
def debug_print(faces)
pp faces if ENV['API_DEBUG']
end
def detect_faces!
detector = AwsRekognitionFaceDetector.new(bucket: @from_bucket, image_key: @image_key)
detector.dcetect!
end
def download_original_image!
image_body = s3_client.get_object(bucket: @from_bucket, key: @image_key).body.read
File.basename(@image_key).tap do |image_path|
File.write(image_path, image_body)
end
end
def crop_faces!(faces, image_path)
faces.face_details.each_with_index do |detail, index|
face_box = FaceBox.new(
width: detail.bounding_box.width,
height: detail.bounding_box.height,
top: detail.bounding_box.top,
left: detail.bounding_box.left
)
crop_file = face_box.crop_face!(image_path)
if @to_bucket
s3_client.put_object(bucket: @to_bucket, key: crop_file, body: File.read(crop_file))
File.unlink crop_file
end
end
end
def s3_client
@s3_client ||= Aws::S3::Client.new(region: 'us-east-1')
end
end
fix debug point
require "face_cropper/version"
require "face_cropper/aws_rekognition_face_detector"
require 'mini_magick'
require 'aws-sdk'
class FaceCropper
class FaceBox
attr_reader :top, :left, :height, :width
def initialize(top: , left: , height: , width:)
@top = top
@left = left
@height = height
@width = width
end
def crop_face!(image_path)
image = MiniMagick::Image.open(image_path)
position = calculate_position(image_width: image.width, image_height: image.height)
crop_params = "#{position[:width]}x#{position[:height]}+#{position[:y]}+#{position[:x]}"
image.crop(crop_params)
crop_file = "#{crop_params}_#{@image_key}"
image.write(crop_file)
crop_file
end
def calculate_position(image_width: , image_height:)
{
width: (@width * image_width).to_i,
height: (@height * image.height).to_i,
x: (@top * image.height).to_i,
y: (@left * image.width).to_i
}
end
end
def initialize(params)
@from_bucket = params[:from_bucket]
@to_bucket = params[:to_bucket]
@image_key = params[:image_key]
@face_boxis = params[:face_details]
end
def crop_and_upload!
faces = @face_boxis || detect_faces!
tmp_original_image_path = download_original_image!
crop_faces!(faces, tmp_original_image_path)
end
private
def debug_print(faces)
pp faces if ENV['API_DEBUG']
end
def detect_faces!
detector = AwsRekognitionFaceDetector.new(bucket: @from_bucket, image_key: @image_key)
detector.dcetect!.tap {|r| debug_print(r) }
end
def download_original_image!
image_body = s3_client.get_object(bucket: @from_bucket, key: @image_key).body.read
File.basename(@image_key).tap do |image_path|
File.write(image_path, image_body)
end
end
def crop_faces!(faces, image_path)
faces.face_details.each_with_index do |detail, index|
face_box = FaceBox.new(
width: detail.bounding_box.width,
height: detail.bounding_box.height,
top: detail.bounding_box.top,
left: detail.bounding_box.left
)
crop_file = face_box.crop_face!(image_path)
if @to_bucket
s3_client.put_object(bucket: @to_bucket, key: crop_file, body: File.read(crop_file))
File.unlink crop_file
end
end
end
def s3_client
@s3_client ||= Aws::S3::Client.new(region: 'us-east-1')
end
end
|
# Represents a property of a domain object. For example, a price in a product.
# If you need to change how one of these is handled in a specific product,
# then you can use it as a #new argument for a class which inherits from
# Flowlink::ObjectBase, and invokes super in .initialize
class FieldMethod # TODO: put in Flowlink module
attr_reader :method_name, :args, :block
def self.multi_new(methods)
methods.map do |m|
m = [m].flatten
FieldMethod.new(m.shift, m)
end
end
def self.merge(overrides, original)
overrides.inject(original) { |a, e| e.merge(a) }
end
def initialize(method_name, *args)
@method_name = method_name.to_sym
@args = args.to_a.flatten
@block, @args = @args.partition { |arg| arg.is_a? Proc }
@block = @block[0]
end
def merge(list)
# This will put itself into a list of other FieldMethods and overwrite
# an existing FM with the same name
list.delete_if { |o_fm| o_fm.method_name == method_name }
list << self
end
def to_a
[method_name] + args
end
def send_to(sendable)
# we can't splat procs, so this is necessary
# TODO: use #to_a and reduce cases/enforce SRP on regular arg assembler
case
when block && args.empty?
sendable.send(method_name, &block)
when block && !args.empty?
sendable.send(method_name, *args, &block)
when !block && args.empty?
sendable.send(method_name)
when !block && !args.empty?
sendable.send(method_name, *args)
end
end
end
add FieldMethod#==, add FieldMethod#merge rename comment
# Represents a property of a domain object. For example, a price in a product.
# If you need to change how one of these is handled in a specific product,
# then you can use it as a #new argument for a class which inherits from
# Flowlink::ObjectBase, and invokes super in .initialize
class FieldMethod # TODO: put in Flowlink module
attr_reader :method_name, :args, :block
def self.multi_new(methods)
methods.map do |m|
m = [m].flatten
FieldMethod.new(m.shift, m)
end
end
def self.merge(overrides, original)
overrides.inject(original) { |a, e| e.merge(a) }
end
def initialize(method_name, *args)
@method_name = method_name.to_sym
@args = args.to_a.flatten
@block, @args = @args.partition { |arg| arg.is_a? Proc }
@block = @block[0]
end
def ==(other)
return false unless other.is_a?(self.class)
to_a == other.to_a
end
def merge(list) # rename to #override, #hard_merge, or add #override alias?
# This will put itself into a list of other FieldMethods and overwrite
# an existing FM with the same name
list.delete_if { |o_fm| o_fm.method_name == method_name }
list << self
end
def to_a
[method_name] + args
end
def send_to(sendable)
# we can't splat procs, so this is necessary
# TODO: use #to_a and reduce cases/enforce SRP on regular arg assembler
case
when block && args.empty?
sendable.send(method_name, &block)
when block && !args.empty?
sendable.send(method_name, *args, &block)
when !block && args.empty?
sendable.send(method_name)
when !block && !args.empty?
sendable.send(method_name, *args)
end
end
end
|
require 'filbert/db_config'
module Filbert
class Task < Thor
include Thor::Actions
method_option :app, type: :string, required: true
desc "backup", "capture and pull latest production snapshot and migrate local database"
def backup
say "Looking for the follower DB..."
db_name = run!("heroku pg:info --app #{options[:app]} | grep Followers | awk '/:(.)*/ { print $2 }'").strip
say "Found the follower: #{db_name}. Capturing..."
backup_id = run!("heroku pgbackups:capture #{db_name} --expire --app #{options[:app]} | grep backup | awk '/--->/ { print $3}'").strip
say "Backup id: #{backup_id}"
say "Fetching backup S3 URL"
backup_url = run!("heroku pgbackups:url #{backup_id} --app #{options[:app]} ").strip.gsub("\"", "")
say "Downloading #{backup_url}"
get backup_url, file_path
say file_path
invoke :cleanup
end
desc "cleanup", "remove backup files older than 12 hours"
def cleanup
old_files.each do |file|
say "Deleting old #{File.basename(file.path)}"
File.delete file.path
end
end
method_option :config, type: :string, default: "config/database.yml"
method_option :env, type: :string, default: "development"
desc "restore", "restore the latest db dump"
def restore
most_recent_file = ordered_dumps.last
check_dump_ready(most_recent_file)
say "Restoring: #{db_config.database} <--- #{most_recent_file.path}"
invoke :kill_connections
ENV['PGPASSWORD'] = db_config.password
run! "pg_restore --clean --no-acl --no-owner -U #{db_config.username} -d #{db_config.database} -w #{most_recent_file.path}"
rescue Errno::ENOENT
say "Could not find config file #{options[:config]}. Please pass in --config with a path to database.yml"
ensure
ENV['PGPASSWORD'] = nil
end
method_option :config, type: :string, default: "config/database.yml"
method_option :env, type: :string, default: "development"
desc "kill_connections", "Kills all open connections to the db"
def kill_connections
database = db_config.database
user = db_config.username
ENV['PGPASSWORD'] = db_config.password
sql = "SELECT pg_terminate_backend(procpid) FROM pg_stat_activity WHERE procpid <> pg_backend_pid();"
run! "echo \"#{sql}\" | psql -d #{database} -U #{user}"
say "Killed connections to #{database} as #{user}"
ensure
ENV['PGPASSWORD'] = nil
end
private
def run!(cmd)
out = `#{cmd}`
unless $?.success?
say "Command exited with status #{$?.to_i}. Exiting.", :red
exit! $?.exitstatus
end
out
end
def old_files
hurdle = Time.now - 60*60*12
ordered_dumps.select{ |file|
file.mtime < hurdle
}
end
def ordered_dumps
Dir.new(backups_dir).select{ |x|
x.end_with? '.dump'
}.map { |filename|
File.new(File.join(backups_dir, filename))
}.sort_by(&:mtime)
end
def file_path
@filename ||= File.join(backups_dir, "#{options[:app]}_#{Time.now.strftime("%Y-%m-%d_%H-%M-%L")}.dump")
end
def backups_dir
File.join(Dir.home, '.heroku_backups')
end
def check_dump_ready(most_recent_file)
if most_recent_file.nil?
say "Didn't find any backup files in #{backups_dir}"
exit 0
end
end
def db_config
@db_config ||= begin
db_config = DbConfig.new(options[:config], options[:env])
if db_config.config.nil?
say "Could not find config for \"#{options[:env]}\" in #{options[:config]}"
exit 0
end
db_config
end
end
end
end
Make the cleanup invocation work when called from the backup method
require 'filbert/db_config'
module Filbert
class Task < Thor
include Thor::Actions
method_option :app, type: :string, required: true
desc "backup", "capture and pull latest production snapshot and migrate local database"
def backup
say "Looking for the follower DB..."
db_name = run!("heroku pg:info --app #{options[:app]} | grep Followers | awk '/:(.)*/ { print $2 }'").strip
say "Found the follower: #{db_name}. Capturing..."
backup_id = run!("heroku pgbackups:capture #{db_name} --expire --app #{options[:app]} | grep backup | awk '/--->/ { print $3}'").strip
say "Backup id: #{backup_id}"
say "Fetching backup S3 URL"
backup_url = run!("heroku pgbackups:url #{backup_id} --app #{options[:app]} ").strip.gsub("\"", "")
say "Downloading #{backup_url}"
get backup_url, file_path
say file_path
invoke :cleanup, [], {}
end
desc "cleanup", "remove backup files older than 12 hours"
def cleanup
old_files.each do |file|
say "Deleting old #{File.basename(file.path)}"
File.delete file.path
end
end
method_option :config, type: :string, default: "config/database.yml"
method_option :env, type: :string, default: "development"
desc "restore", "restore the latest db dump"
def restore
most_recent_file = ordered_dumps.last
check_dump_ready(most_recent_file)
say "Restoring: #{db_config.database} <--- #{most_recent_file.path}"
invoke :kill_connections
ENV['PGPASSWORD'] = db_config.password
run! "pg_restore --clean --no-acl --no-owner -U #{db_config.username} -d #{db_config.database} -w #{most_recent_file.path}"
rescue Errno::ENOENT
say "Could not find config file #{options[:config]}. Please pass in --config with a path to database.yml"
ensure
ENV['PGPASSWORD'] = nil
end
method_option :config, type: :string, default: "config/database.yml"
method_option :env, type: :string, default: "development"
desc "kill_connections", "Kills all open connections to the db"
def kill_connections
database = db_config.database
user = db_config.username
ENV['PGPASSWORD'] = db_config.password
sql = "SELECT pg_terminate_backend(procpid) FROM pg_stat_activity WHERE procpid <> pg_backend_pid();"
run! "echo \"#{sql}\" | psql -d #{database} -U #{user}"
say "Killed connections to #{database} as #{user}"
ensure
ENV['PGPASSWORD'] = nil
end
private
def run!(cmd)
out = `#{cmd}`
unless $?.success?
say "Command exited with status #{$?.to_i}. Exiting.", :red
exit! $?.exitstatus
end
out
end
def old_files
hurdle = Time.now - 60*60*12
ordered_dumps.select{ |file|
file.mtime < hurdle
}
end
def ordered_dumps
Dir.new(backups_dir).select{ |x|
x.end_with? '.dump'
}.map { |filename|
File.new(File.join(backups_dir, filename))
}.sort_by(&:mtime)
end
def file_path
@filename ||= File.join(backups_dir, "#{options[:app]}_#{Time.now.strftime("%Y-%m-%d_%H-%M-%L")}.dump")
end
def backups_dir
File.join(Dir.home, '.heroku_backups')
end
def check_dump_ready(most_recent_file)
if most_recent_file.nil?
say "Didn't find any backup files in #{backups_dir}"
exit 0
end
end
def db_config
@db_config ||= begin
db_config = DbConfig.new(options[:config], options[:env])
if db_config.config.nil?
say "Could not find config for \"#{options[:env]}\" in #{options[:config]}"
exit 0
end
db_config
end
end
end
end |
require 'faraday'
require 'faraday_middleware'
require 'ostruct'
require 'findface_api/version'
# Findface API
module FindfaceApi
ENDPOINT_URI = "https://api.findface.pro/v#{API_VERSION}/".freeze
# Exceptions
module Error
class ClientError < RuntimeError; end
end
# Configuration
module Configuration
attr_accessor :access_token, :proxy, :logger, :adapter
def configure
yield self
true
end
end
# Connection
module Connection
def connection
raise 'No access token specified' if access_token.nil?
@connection ||= begin
conn = Faraday.new ENDPOINT_URI do |c|
c.authorization :Token, access_token
c.request :multipart
c.request :json # either :json or :url_encoded
c.response :logger, logger, headers: false, bodies: true unless logger.nil?
c.response :json, content_type: /\bjson$/
c.proxy proxy unless proxy.nil?
c.adapter adapter.nil? ? Faraday.default_adapter : adapter
end
conn
end
end
end
# API Entities
module Entities
# Bounding box
# Represents a rectangle on a photo. Usually used as a face's bounding box.
class BBox
attr_accessor :x1, :x2, :y1, :y2
def initialize(x1:, x2:, y1:, y2:)
@x1, @x2, @y1, @y2 = x1, x2, y1, y2
end
def width
x2 - x1
end
def height
y2 - y1
end
def to_h
{ x1: x1, x2: x2, y1: y1, y2: y2 }
end
end
# Face
# Represents a human face. Note that it might be several faces on a single photo.
# Different photos of the same person as also considered to be different faces.
class Face
attr_reader :id, :timestamp, :photo, :photo_hash, :thumbnail, :bbox, :meta, :galleries
end
end
# Helpers
module Helpers
def symbolize(myhash)
myhash.keys.each do |key|
myhash[(key.to_sym rescue key) || key] = myhash.delete(key)
end
myhash
end
def request_body(keys, options, **args)
options.reject { |key, _| !keys.include? key }
options.merge(args)
end
def request_path(path, options)
path
.gsub(':gallery', options.fetch(:gallery, :default).to_s)
.gsub(':meta', options.fetch(:meta, '').to_s)
end
def post(uri, data)
response = connection.post uri, data
if !response.success? || response.body.include?('code')
raise FindfaceApi::Error::ClientError, response.body
end
response.body
end
end
# API Methods
module APIMethods
def detect(photo)
response = post('detect/', photo: photo)
response['faces'].map do |box|
::FindfaceApi::Entities::BBox.new(**symbolize(box))
end
end
def verify(photo1, photo2, **options)
keys = %i(bbox1 bbox2 threshold mf_selector)
payload = request_body(keys, options, photo1: photo1, photo2: photo2)
post('verify/', payload)
end
def identify(photo, **options)
keys = %i(bbox threshold n mf_selector)
payload = request_body(keys, options, photo: photo)
path = request_path('faces/gallery/:gallery/identify/', options)
response = post(path, payload)
response.body['results']
end
end
extend Configuration
extend Connection
extend Helpers
extend Entities
extend APIMethods
end
Add custom exception
require 'faraday'
require 'faraday_middleware'
require 'ostruct'
require 'findface_api/version'
# Findface API
module FindfaceApi
ENDPOINT_URI = "https://api.findface.pro/v#{API_VERSION}/".freeze
# Exceptions
module Error
class Error < StandardError; end
class ClientError < Error; end
end
# Configuration
module Configuration
attr_accessor :access_token, :proxy, :logger, :adapter
def configure
yield self
true
end
end
# Connection
module Connection
def connection
raise 'No access token specified' if access_token.nil?
@connection ||= begin
conn = Faraday.new ENDPOINT_URI do |c|
c.authorization :Token, access_token
c.request :multipart
c.request :json # either :json or :url_encoded
c.response :logger, logger, headers: false, bodies: true unless logger.nil?
c.response :json, content_type: /\bjson$/
c.proxy proxy unless proxy.nil?
c.adapter adapter.nil? ? Faraday.default_adapter : adapter
end
conn
end
end
end
# API Entities
module Entities
# Bounding box
# Represents a rectangle on a photo. Usually used as a face's bounding box.
class BBox
attr_accessor :x1, :x2, :y1, :y2
def initialize(x1:, x2:, y1:, y2:)
@x1, @x2, @y1, @y2 = x1, x2, y1, y2
end
def width
x2 - x1
end
def height
y2 - y1
end
def to_h
{ x1: x1, x2: x2, y1: y1, y2: y2 }
end
end
# Face
# Represents a human face. Note that it might be several faces on a single photo.
# Different photos of the same person as also considered to be different faces.
class Face
attr_reader :id, :timestamp, :photo, :photo_hash, :thumbnail, :bbox, :meta, :galleries
end
end
# Helpers
module Helpers
def symbolize(myhash)
myhash.keys.each do |key|
myhash[(key.to_sym rescue key) || key] = myhash.delete(key)
end
myhash
end
def request_body(keys, options, **args)
options.reject { |key, _| !keys.include? key }
options.merge(args)
end
def request_path(path, options)
path
.gsub(':gallery', options.fetch(:gallery, :default).to_s)
.gsub(':meta', options.fetch(:meta, '').to_s)
end
def post(uri, data)
response = connection.post uri, data
if !response.success? || response.body.include?('code')
message = response.body.fetch('reason', 'API Error')
raise FindfaceApi::Error::ClientError, message
end
response.body
end
end
# API Methods
module APIMethods
def detect(photo)
response = post('detect/', photo: photo)
response['faces'].map do |box|
::FindfaceApi::Entities::BBox.new(**symbolize(box))
end
end
def verify(photo1, photo2, **options)
keys = %i(bbox1 bbox2 threshold mf_selector)
payload = request_body(keys, options, photo1: photo1, photo2: photo2)
post('verify/', payload)
end
def identify(photo, **options)
keys = %i(bbox threshold n mf_selector)
payload = request_body(keys, options, photo: photo)
path = request_path('faces/gallery/:gallery/identify/', options)
response = post(path, payload)
response.body['results']
end
end
extend Configuration
extend Connection
extend Helpers
extend Entities
extend APIMethods
end
|
require "fuzzy_record/version"
module FuzzyRecord
extend ActiveSupport::Concern
module ClassMethods
def fuzzy_search(args)
if args.is_a? Hash
search = args
else
[:fuzzy_name, :ident, :name].each do |field|
search = {field => args}
break if self.respond_to?(field)
end
end
self.select{ |record| search.map{|k,v| matches?(record,k,v)}.inject(:|)}.sort_by{|record| sorter(record,k,v)}
end
private
def generalize(str)
return ".*#{str.gsub(/[^a-zA-Z&0-9]/, "").chars.to_a.join(".*")}.*"
end
def ave(arry)
arry.sum.to_f / arry.length
end
def matches?(record,k,v)
record.send(k) =~ /#{generalize(v)}/i
end
def sorter(record,k,v)
ave(search.map{|k,v| record.send(k) ^ v})
end
end
end
ActiveRecord::Base.include(FuzzyRecord)
refactor and generalize
require "fuzzy_record/version"
module FuzzyRecord
extend ActiveSupport::Concern
module ClassMethods
def fuzzy_search(args)
if args.is_a? Hash
search = args
else
[:fuzzy_name, :ident, :name].each do |field|
search = {field => args}
break if self.respond_to?(field)
end
end
self.select{ |record| search.map{|k,v| matches?(record,k,v)}.inject(:|)}.sort_by{|record| sorter(record)}
end
private
def generalize(str)
return ".*#{str.gsub(/[^a-zA-Z&0-9]/, "").chars.to_a.join(".*")}.*"
end
def ave(arry)
arry.sum.to_f / arry.length
end
def matches?(record,k,v)
record.send(k) =~ /#{generalize(v)}/i
end
def sorter(record)
ave(search.map{|k,v| record.send(k) ^ v})
end
end
end
ActiveRecord::Base.include(FuzzyRecord)
|
class GameWrapper
def initialize(game)
@game = game
end
def current_player_score
@current_player.score
end
def players
@game.players
end
def rounds_left?
@game.rounds_left?
end
def winners
@game.winners
end
def start_round_for_next_player
advance_to_next_player
start_round
end
def advance_to_next_player
if @current_player.nil?
@player_index = 0
else
@player_index = (@player_index + 1) % players.size
end
@current_player = players[@player_index]
end
def start_round
@steps = [
[
:ask_for_hold_positions,
->(hold_positions) do
# FIXME: we should roll before asking the user for hold positions.
@current_player.roll_dice
@current_player.reroll(positions_to_reroll(hold_positions))
end
],
[
:ask_for_hold_positions,
->(hold_positions) do
@current_player.reroll(positions_to_reroll(hold_positions))
end
],
[
:ask_for_category,
->(category) do
@current_player.select_category(category)
end
],
]
@current_step = 0
end
def next_step_of_round
@steps[@current_step][0]
end
def advance(input_from_user)
callback = @steps[@current_step][1]
callback.call(input_from_user)
advance_current_step(input_from_user)
end
def advance_current_step(input_from_user)
if input_from_user.is_a?(Array) && input_from_user.size == 5
@current_step += 2
else
@current_step += 1
end
end
def round_finished?
@current_step == 3
end
private
def positions_to_reroll(hold)
[0, 1, 2, 3, 4] - hold
end
end
Extract steps into standalone method
class GameWrapper
def initialize(game)
@game = game
end
def current_player_score
@current_player.score
end
def players
@game.players
end
def rounds_left?
@game.rounds_left?
end
def winners
@game.winners
end
def start_round_for_next_player
advance_to_next_player
start_round
end
def advance_to_next_player
if @current_player.nil?
@player_index = 0
else
@player_index = (@player_index + 1) % players.size
end
@current_player = players[@player_index]
end
def steps
[
[
:ask_for_hold_positions,
->(hold_positions) do
# FIXME: we should roll before asking the user for hold positions.
@current_player.roll_dice
@current_player.reroll(positions_to_reroll(hold_positions))
end
],
[
:ask_for_hold_positions,
->(hold_positions) do
@current_player.reroll(positions_to_reroll(hold_positions))
end
],
[
:ask_for_category,
->(category) do
@current_player.select_category(category)
end
],
]
end
def start_round
@current_step = 0
end
def next_step_of_round
steps[@current_step][0]
end
def advance(input_from_user)
callback = steps[@current_step][1]
callback.call(input_from_user)
advance_current_step(input_from_user)
end
def advance_current_step(input_from_user)
if input_from_user.is_a?(Array) && input_from_user.size == 5
@current_step += 2
else
@current_step += 1
end
end
def round_finished?
@current_step == 3
end
private
def positions_to_reroll(hold)
[0, 1, 2, 3, 4] - hold
end
end
|
module Gateway
class HTTP < Gateway::Base
# purge connection that is stuck in bad state
categorize_error(Net::HTTP::Pipeline::PipelineError, {
:as => :retry,
:for => :pipeline
}) do | gateway |
gateway.purge_current_connection!
end
categorize_error Net::HTTP::Pipeline::Error,
:as => :bad_gateway, :for => :pipeline
categorize_error Net::HTTPError,
:as => :bad_gateway, :for => :all
# It's safe to specify all actions because non-idempotent requests
# will skip retry automatically
categorize_error Timeout::Error, Net::HTTPError, Net::HTTP::Pipeline::ResponseError,
:as => :retry, :for => :all
def self.normalize_uri uri
return uri if uri.is_a?(URI::HTTP) || uri.is_a?(URI::HTTPS)
uri = uri.to_s
uri = "http://#{uri}" unless /^(http|https):\/\// =~ uri
URI.parse(uri)
end
attr_reader :address, :use_ssl, :host, :port
attr_accessor :header
def initialize(name, opts)
super
@address = self.class.normalize_uri(opts[:uri])
@use_ssl = @address.scheme == "https"
@host = @address.host
@port = @address.port
@header = opts[:header] || {}
end
def pipeline(requests, opts={}, &block)
msg = requests.map{|r| absolute_url(r).to_s }.join(',')
execute('pipeline', msg, opts) do |conn|
conn.start unless conn.started?
conn.pipeline(requests.dup, &block)
end
end
def request(req, opts={})
opts = {
:persistent => false,
:retry => false
}.merge(opts) unless idempotent?(req)
action = req.method.downcase.to_sym
execute(action, absolute_url(req), opts) do |conn|
conn.start unless conn.started?
rsp = conn.request(req)
validate_response(req, rsp, valid_responses(opts)) if validate_response?(opts)
rsp
end
end
def absolute_url(req)
address + req.path
end
def head(path, header=nil, opts={})
req = prepare_request(:head, path, nil, header)
request(req, opts)
end
def get(path, header=nil, opts={})
req = prepare_request(:get, path, nil, header)
request(req, opts)
end
def post(path, body=nil, header=nil, opts={})
req = prepare_request(:post, path, body, header)
request(req, opts)
end
def put(path, body=nil, header=nil, opts={})
req = prepare_request(:put, path, body, header)
request(req, opts)
end
def delete(path, header=nil, opts={})
req = prepare_request(:delete, path, nil, header)
request(req, opts)
end
def idempotent?(req)
case req
when Net::HTTP::Delete, Net::HTTP::Get, Net::HTTP::Head,
Net::HTTP::Options, Net::HTTP::Put, Net::HTTP::Trace then
true
end
end
def validate_response?(opts)
opts.fetch(:validate_response, true)
end
def valid_responses(opts)
opts.fetch(:valid_responses, [ Net::HTTPSuccess ])
end
def validate_response(req, rsp, valid_rsp)
is_valid = valid_rsp.any?{|klass| rsp.is_a?(klass) }
raise Gateway::BadResponse.new(
"Invalid Response",
:status => rsp.code,
:url => absolute_url(req)
) unless is_valid
end
def prepare_request(method, path, body, header)
klass = Net::HTTP.const_get method.to_s.capitalize
header = self.header.merge(header || {})
req = klass.new path, header
if allow_body?(req)
if body.is_a?(Hash)
req.set_form_data body
elsif body.respond_to?(:rewind) && body.respond_to?(:read)
body.rewind
req.body = body.read
else
req.body = body.to_s
end
end
req
end
def allow_body?(req)
req.is_a?(Net::HTTP::Post) || req.is_a?(Net::HTTP::Put)
end
def read_timeout
options[:read_timeout]
end
def open_timeout
options[:open_timeout]
end
protected
def success_status(resp)
resp.code
end
def success_message(resp)
resp.message
end
def connect
conn = Net::HTTP.new(host, port)
conn.use_ssl = use_ssl
conn.read_timeout = read_timeout if read_timeout
conn.open_timeout = open_timeout if open_timeout
conn
end
def disconnect(conn)
conn.finish
rescue IOError
end
def reconnect(conn)
disconnect(conn)
conn
end
end
end
Fix success_* methods
pipeline returns an array of responses so this was breaking.
module Gateway
class HTTP < Gateway::Base
# purge connection that is stuck in bad state
categorize_error(Net::HTTP::Pipeline::PipelineError, {
:as => :retry,
:for => :pipeline
}) do | gateway |
gateway.purge_current_connection!
end
categorize_error Net::HTTP::Pipeline::Error,
:as => :bad_gateway, :for => :pipeline
categorize_error Net::HTTPError,
:as => :bad_gateway, :for => :all
# It's safe to specify all actions because non-idempotent requests
# will skip retry automatically
categorize_error Timeout::Error, Net::HTTPError, Net::HTTP::Pipeline::ResponseError,
:as => :retry, :for => :all
def self.normalize_uri uri
return uri if uri.is_a?(URI::HTTP) || uri.is_a?(URI::HTTPS)
uri = uri.to_s
uri = "http://#{uri}" unless /^(http|https):\/\// =~ uri
URI.parse(uri)
end
attr_reader :address, :use_ssl, :host, :port
attr_accessor :header
def initialize(name, opts)
super
@address = self.class.normalize_uri(opts[:uri])
@use_ssl = @address.scheme == "https"
@host = @address.host
@port = @address.port
@header = opts[:header] || {}
end
def pipeline(requests, opts={}, &block)
msg = requests.map{|r| absolute_url(r).to_s }.join(',')
execute('pipeline', msg, opts) do |conn|
conn.start unless conn.started?
conn.pipeline(requests.dup, &block)
end
end
def request(req, opts={})
opts = {
:persistent => false,
:retry => false
}.merge(opts) unless idempotent?(req)
action = req.method.downcase.to_sym
execute(action, absolute_url(req), opts) do |conn|
conn.start unless conn.started?
rsp = conn.request(req)
validate_response(req, rsp, valid_responses(opts)) if validate_response?(opts)
rsp
end
end
def absolute_url(req)
address + req.path
end
def head(path, header=nil, opts={})
req = prepare_request(:head, path, nil, header)
request(req, opts)
end
def get(path, header=nil, opts={})
req = prepare_request(:get, path, nil, header)
request(req, opts)
end
def post(path, body=nil, header=nil, opts={})
req = prepare_request(:post, path, body, header)
request(req, opts)
end
def put(path, body=nil, header=nil, opts={})
req = prepare_request(:put, path, body, header)
request(req, opts)
end
def delete(path, header=nil, opts={})
req = prepare_request(:delete, path, nil, header)
request(req, opts)
end
def idempotent?(req)
case req
when Net::HTTP::Delete, Net::HTTP::Get, Net::HTTP::Head,
Net::HTTP::Options, Net::HTTP::Put, Net::HTTP::Trace then
true
end
end
def validate_response?(opts)
opts.fetch(:validate_response, true)
end
def valid_responses(opts)
opts.fetch(:valid_responses, [ Net::HTTPSuccess ])
end
def validate_response(req, rsp, valid_rsp)
is_valid = valid_rsp.any?{|klass| rsp.is_a?(klass) }
raise Gateway::BadResponse.new(
"Invalid Response",
:status => rsp.code,
:url => absolute_url(req)
) unless is_valid
end
def prepare_request(method, path, body, header)
klass = Net::HTTP.const_get method.to_s.capitalize
header = self.header.merge(header || {})
req = klass.new path, header
if allow_body?(req)
if body.is_a?(Hash)
req.set_form_data body
elsif body.respond_to?(:rewind) && body.respond_to?(:read)
body.rewind
req.body = body.read
else
req.body = body.to_s
end
end
req
end
def allow_body?(req)
req.is_a?(Net::HTTP::Post) || req.is_a?(Net::HTTP::Put)
end
def read_timeout
options[:read_timeout]
end
def open_timeout
options[:open_timeout]
end
protected
def success_status(resp)
resp.respond_to?(:code) ? resp.code : super
end
def success_message(resp)
resp.respond_to?(:message) ? resp.message : super
end
def connect
conn = Net::HTTP.new(host, port)
conn.use_ssl = use_ssl
conn.read_timeout = read_timeout if read_timeout
conn.open_timeout = open_timeout if open_timeout
conn
end
def disconnect(conn)
conn.finish
rescue IOError
end
def reconnect(conn)
disconnect(conn)
conn
end
end
end
|
# The Gday module is the main container for the gday gem.
module Gday
# The version for the gday gem.
VERSION = '0.0.5'
end
Bump to 0.0.6
# The Gday module is the main container for the gday gem.
module Gday
# The version for the gday gem.
VERSION = '0.0.6'
end
|
require_relative 'json_utils'
require 'cgi'
require 'null_logger'
class GdsApi::Base
include GdsApi::JsonUtils
attr_reader :options
class << self
attr_writer :logger
end
def self.logger
@logger ||= NullLogger.instance
end
def initialize(platform, options_or_endpoint_url=nil, maybe_options=nil)
if options_or_endpoint_url.is_a?(String)
@options = maybe_options || {}
@options[:endpoint_url] = options_or_endpoint_url
else
@options = options_or_endpoint_url || {}
end
self.endpoint = options[:endpoint_url] || endpoint_for_platform(adapter_name, platform)
end
def adapter_name
self.class.to_s.split("::").last.downcase
end
def url_for_slug(slug, options={})
base = "#{base_url}/#{slug}.json#{query_string(options)}"
end
private
attr_accessor :endpoint
# This should get simpler if we can be more consistent with our domain names
def endpoint_for_platform(adapter_name, platform)
if platform == 'development'
"http://#{adapter_name}.dev.gov.uk"
else
"http://#{adapter_name}.#{platform}.alphagov.co.uk"
end
end
def query_string(params)
return "" if params.empty?
"?" << params.sort.map { |kv|
kv.map { |a| CGI.escape(a.to_s) }.join("=")
}.join("&")
end
end
We now use https
require_relative 'json_utils'
require 'cgi'
require 'null_logger'
class GdsApi::Base
include GdsApi::JsonUtils
attr_reader :options
class << self
attr_writer :logger
end
def self.logger
@logger ||= NullLogger.instance
end
def initialize(platform, options_or_endpoint_url=nil, maybe_options=nil)
if options_or_endpoint_url.is_a?(String)
@options = maybe_options || {}
@options[:endpoint_url] = options_or_endpoint_url
else
@options = options_or_endpoint_url || {}
end
self.endpoint = options[:endpoint_url] || endpoint_for_platform(adapter_name, platform)
end
def adapter_name
self.class.to_s.split("::").last.downcase
end
def url_for_slug(slug, options={})
base = "#{base_url}/#{slug}.json#{query_string(options)}"
end
private
attr_accessor :endpoint
# This should get simpler if we can be more consistent with our domain names
def endpoint_for_platform(adapter_name, platform)
if platform == 'development'
"http://#{adapter_name}.dev.gov.uk"
else
"https://#{adapter_name}.#{platform}.alphagov.co.uk"
end
end
def query_string(params)
return "" if params.empty?
"?" << params.sort.map { |kv|
kv.map { |a| CGI.escape(a.to_s) }.join("=")
}.join("&")
end
end
|
module Gems
VERSION = "0.3.0" unless defined? ::Gems::VERSION
end
Bump version to 0.4.0
module Gems
VERSION = "0.4.0" unless defined? ::Gems::VERSION
end
|
module Gems
VERSION = "0.1.0" unless defined? ::Gems::VERSION
end
Bump version to 0.2.0
module Gems
VERSION = "0.2.0" unless defined? ::Gems::VERSION
end
|
require 'open3'
require_relative 'gitlab_net'
class GitlabShell
attr_accessor :key_id, :repo_name, :git_cmd, :repos_path, :repo_name
def initialize
@key_id = /key-[0-9]/.match(ARGV.join).to_s
@origin_cmd = ENV['SSH_ORIGINAL_COMMAND']
@repos_path = GitlabConfig.new.repos_path
end
def exec
if @origin_cmd
parse_cmd
if git_cmds.include?(@git_cmd)
ENV['GL_ID'] = @key_id
if validate_access
process_cmd
end
else
puts 'Not allowed command'
end
else
user = api.discover(@key_id)
puts "Welcome to GitLab, #{user && user['name'] || 'Anonymous'}!"
end
end
protected
def parse_cmd
args = @origin_cmd.split(' ')
@git_cmd = args.shift
@repo_name = args.shift
end
def git_cmds
%w(git-upload-pack git-receive-pack git-upload-archive)
end
def process_cmd
repo_full_path = File.join(repos_path, repo_name)
exec_cmd "#{@git_cmd} #{repo_full_path}"
end
def validate_access
api.allowed?(@git_cmd, @repo_name, @key_id, '_any')
end
def exec_cmd args
Kernel::exec args
end
def api
GitlabNet.new
end
end
fixed bad regex
require 'open3'
require_relative 'gitlab_net'
class GitlabShell
attr_accessor :key_id, :repo_name, :git_cmd, :repos_path, :repo_name
def initialize
puts "---"
puts ARGV
@key_id = /key-[0-9]+/.match(ARGV.join).to_s
@origin_cmd = ENV['SSH_ORIGINAL_COMMAND']
@repos_path = GitlabConfig.new.repos_path
end
def exec
if @origin_cmd
parse_cmd
if git_cmds.include?(@git_cmd)
ENV['GL_ID'] = @key_id
if validate_access
process_cmd
end
else
puts 'Not allowed command'
end
else
user = api.discover(@key_id)
puts "Welcome to GitLab, #{user && user['name'] || 'Anonymous'}!"
end
end
protected
def parse_cmd
args = @origin_cmd.split(' ')
@git_cmd = args.shift
@repo_name = args.shift
end
def git_cmds
%w(git-upload-pack git-receive-pack git-upload-archive)
end
def process_cmd
repo_full_path = File.join(repos_path, repo_name)
exec_cmd "#{@git_cmd} #{repo_full_path}"
end
def validate_access
api.allowed?(@git_cmd, @repo_name, @key_id, '_any')
end
def exec_cmd args
Kernel::exec args
end
def api
GitlabNet.new
end
end
|
# coding: utf-8
# A client for the GMO Payment API.
#
# example
# gmo = GMO::Payment::ShopAPI.new({
# shop_id: "foo",
# shop_pass: "bar",
# host: "mul-pay.com",
# locale: "ja"
# })
# result = gmo.post_request("EntryTran.idPass", options)
module GMO
module Payment
module ShopAPIMethods
def initialize(options = {})
@shop_id = options[:shop_id]
@shop_pass = options[:shop_pass]
@host = options[:host]
@locale = options.fetch(:locale, GMO::Const::DEFAULT_LOCALE)
unless @shop_id && @shop_pass && @host
raise ArgumentError, "Initialize must receive a hash with :shop_id, :shop_pass and either :host! (received #{options.inspect})"
end
end
attr_reader :shop_id, :shop_pass, :host, :locale
## 2.1.2.1.取引登録
# これ以降の決済取引で必要となる取引 ID と取引パスワードの発行を行い、取引を開始します。
# ItemCode
# Tax
# TdFlag
# TdTenantName
### @return ###
# AccessID
# AccessPass
# ErrCode
# ErrInfo
### example ###
# gmo.entry_tran({
# order_id: 100,
# job_cd: "AUTH",
# amount: 100
# })
# {"AccessID"=>"a41d83f1f4c908baeda04e6dc03e300c", "AccessPass"=>"d72eca02e28c88f98b9341a33ba46d5d"}
def entry_tran(options = {})
name = "EntryTran.idPass"
required = [:order_id, :job_cd]
required << :amount if options[:job_cd] && options[:job_cd] != "CHECK"
assert_required_options(required, options)
post_request name, options
end
# 【コンビニ払い】
# 2.1.2.1. 取引登録
# これ以降の決済取引で必要となる取引IDと取引パスワードの発行を行い、取引を開始します。
def entry_tran_cvs(options = {})
name = "EntryTranCvs.idPass"
required = [:order_id, :amount]
assert_required_options(required, options)
post_request name, options
end
# 【Pay-easy決済】
# 5.1.2.1. 取引登録
# これ以降の決済取引で必要となる取引IDと取引パスワードの発行を行い、取引を開始します。
def entry_tran_pay_easy(options = {})
name = "EntryTranPayEasy.idPass"
required = [:order_id, :amount]
assert_required_options(required, options)
post_request name, options
end
# 【LINE Pay決済】
# 20.1.2.1. 取引登録
# これ以降の決済取引で必要となる取引IDと取引パスワードの発行を行い、取引を開始します。
def entry_tran_linepay(options = {})
name = "EntryTranLinepay.idPass"
required = [:order_id, :job_cd, :amount]
assert_required_options(required, options)
post_request name, options
end
### @params ###
# OrderID
# JobCd
# Amount
# ItemCode
# Tax
### @return ###
# AccessID
# AccessPass
### example ###
# gmo.entry_tran_brandtoken({
# order_id: "ord12345",
# job_cd: "AUTH",
# item_code: "1000001",
# tax: "0001001",
# amount: 100
# })
# => {"AccessID"=>"139f8ec33a07c55f406937c52ce4473d", "AccessPass"=>"2689b204d2c17192fa35f9269fa7e744"}
def entry_tran_brandtoken(options = {})
name = "EntryTranBrandtoken.idPass"
required = [:order_id, :job_cd, :amount]
assert_required_options(required, options)
post_request name, options
end
## 2.2.2.2.決済実行
# 指定されたサイトに会員を登録します。
# return
# ACS
# OrderID
# Forward
# Method
# PayTimes
# Approve
# TranID
# TranDate
# CheckString
# ClientField1
# ClientField2
# ClientField3
### @return ###
# ACS
# OrderID
# Forward
# Method
# PayTimes
# Approve
# TranID
# CheckString
# ClientField1
# ClientField2
# ClientField3
### example ###
# gmo.exec_tran({
# order_id: 100,
# access_id: "a41d83f1f4c908baeda04e6dc03e300c",
# access_pass: "d72eca02e28c88f98b9341a33ba46d5d",
# method: 1,
# pay_times: 1,
# card_no: "4111111111111111",
# expire: "1405", #format YYMM
# client_field_1: "client_field1"
# })
# {"ACS"=>"0", "OrderID"=>"100", "Forward"=>"2a99662", "Method"=>"1", "PayTimes"=>"", "Approve"=>"6294780", "TranID"=>"1302160543111111111111192829", "TranDate"=>"20130216054346", "CheckString"=>"3e455a2168fefc90dbb7db7ef7b0fe82", "ClientField1"=>"client_field1", "ClientField2"=>"", "ClientField3"=>""}
def exec_tran(options = {})
name = "ExecTran.idPass"
if options[:client_field_1] || options[:client_field_2] || options[:client_field_3]
options[:client_field_flg] = "1"
else
options[:client_field_flg] = "0"
end
options[:device_category] = "0"
# args = {
# "AccessID" => options[:access_id],
# "AccessPass" => options[:access_pass],
# "OrderID" => options[:order_id],
# "Method" => options[:method],
# "PayTimes" => options[:pay_times],
# "CardNo" => options[:card_no],
# "Expire" => options[:expire],
# "HttpAccept" => options[:http_accept],
# "HttpUserAgent" => options[:http_ua],
# "DeviceCategory" => "0",
# "ClientField1" => options[:client_field_1],
# "ClientField2" => options[:client_field_2],
# "ClientField3" => options[:client_field_3],
# "ClientFieldFlag" => client_field_flg
# }
if options[:token].nil?
required = [:access_id, :access_pass, :order_id, :card_no, :expire]
else
required = [:access_id, :access_pass, :token]
end
assert_required_options(required, options)
post_request name, options
end
# 【コンビニ払い】
# 2.1.2.2. 決済実行
# お客様が入力した情報で後続の決済センターと通信を行い決済を実施し、結果を返します。
def exec_tran_cvs(options = {})
name = "ExecTranCvs.idPass"
required = [:access_id, :access_pass, :order_id, :convenience, :customer_name, :customer_kana, :tel_no, :receipts_disp_11, :receipts_disp_12, :receipts_disp_13]
assert_required_options(required, options)
post_request name, options
end
# 【Pay-easy決済】
# 5.1.2.2. 決済実行
# お客様が入力した情報で後続の決済センターと通信を行い決済を実施し、結果を返します。
def exec_tran_pay_easy(options = {})
name = "ExecTranPayEasy.idPass"
required = [:access_id, :access_pass, :order_id, :customer_name, :customer_kana, :tel_no, :receipts_disp_11, :receipts_disp_12, :receipts_disp_13]
assert_required_options(required, options)
post_request name, options
end
# 【LINE Pay決済】
# 20.1.2.2. 決済実行
def exec_tran_linepay(options = {})
name = "ExecTranLinepay.idPass"
required = [:access_id, :access_pass, :order_id, :ret_url, :error_rcv_url, :product_name]
assert_required_options(required, options)
post_request name, options
end
### @params ###
# AccessID
# AccessPass
# OrderID
# TokenType
# Token
# MemberID
# SeqMode
# TokenSeq
# ClientField1
# ClientField2
# ClientField3
### @return ###
# Status
# OrderID
# Forward
# Approve
# TranID
# TranDate
# ClientField1
# ClientField2
# ClientField3
### example ###
# gmo.exec_tran_brandtoken({
# order_id: "597ae8c36120b23a3c00014e",
# access_id: "139f8ec33a07c55f406937c52ce4473d",
# access_pass: "2689b204d2c17192fa35f9269fa7e744",
# token_type: :apple_pay,
# token: <Base64 encoded payment data>,
# seq_mode: "1",
# token_seq: 1001,
# client_field_1: "Custom field value 1",
# client_field_2: "Custom field value 2",
# client_field_3: "Custom field value 3"
# })
# => {"Status"=>"CAPTURE", "OrderID"=>"597ae8c36120b23a3c00014e", "Forward"=>"2a99663", "Approve"=>"5487394", "TranID"=>"1707281634111111111111771216", "TranDate"=>"20170728163453", "ClientField1"=>"Custom field value 1", "ClientField2"=>"Custom field value 2", "ClientField3"=>"Custom field value 3"}
def exec_tran_brandtoken(options = {})
name = "ExecTranBrandtoken.idPass"
options[:token_type] = GMO::Const::TOKEN_TYPES_MAP[options[:token_type]]
required = [:access_id, :access_pass, :order_id]
assert_required_options(required, options)
post_request name, options
end
## 2.14.2.1.決済変更
# 仮売上の決済に対して実売上を行います。尚、実行時に仮売上時との金額チェックを行います。
# /payment/AlterTran.idPass
# ShopID
# ShopPass
# AccessID 取引ID
# AccessPass 取引パスワード
# JobCd 処理区分 "SALES"
# Amount 利用金額
### @return ###
# AccessID
# AccessPass
# Forward
# Approve
# TranID
# TranDate
### example ###
# gmo.alter_tran({
# access_id: "a41d83f1f4c908baeda04e6dc03e300c",
# access_pass: "d72eca02e28c88f98b9341a33ba46d5d",
# job_cd: "SALES",
# amount: 100
# })
# {"AccessID"=>"381d84ae4e6fc37597482573a9569f10", "AccessPass"=>"cc0093ca8758c6616fa0ab9bf6a43e8d", "Forward"=>"2a99662", "Approve"=>"6284199", "TranID"=>"1302140555111111111111193536", "TranDate"=>"20130215110651"}
def alter_tran(options = {})
name = "AlterTran.idPass"
required = [:access_id, :access_pass, :job_cd]
assert_required_options(required, options)
post_request name, options
end
## 2.15.2.1.金額変更
# 決済が完了した取引に対して金額の変更を行います。
### @return ###
# AccessID
# AccessPass
# Forward
# Approve
# TranID
# TranDate
### example ###
# gmo.change_tran({
# access_id: "a41d83f1f4c908baeda04e6dc03e300c",
# access_pass: "d72eca02e28c88f98b9341a33ba46d5d",
# job_cd: "CAPTURE",
# amount: 100
# })
def change_tran(options = {})
name = "ChangeTran.idPass"
required = [:access_id, :access_pass, :job_cd, :amount]
assert_required_options(required, options)
post_request name, options
end
### @params ###
# AccessID
# AccessPass
# OrderID
# JobCd
# Amount
# Tax
### @return ###
# AccessID
# AccessPass
# Status
# Forward
# Approve
# TranID
# TranDate
### example ###
# gmo.change_tran_brandtoken({
# access_id: "21170701482c86c3b88ff72b83bfd363",
# access_pass: "51f36feba120de1e6e29532e5a3a5e3e",
# order_id: "ord10001",
# job_cd: "CAPTURE",
# amount: 2000
# })
# => {"AccessID"=>"21170701482c86c3b88ff72b83bfd363", "AccessPass"=>"51f36feba120de1e6e29532e5a3a5e3e", "Status"=>"CAPTURE", "Forward"=>"2a99663", "Approve"=>"5538477", "TranID"=>"1707311633111111111111771224", "TranDate"=>"20170731163343"}
def change_tran_brandtoken(options = {})
name = "ChangeTranBrandtoken.idPass"
required = [:access_id, :access_pass, :order_id, :job_cd, :amount]
assert_required_options(required, options)
post_request name, options
end
### @params ###
# AccessID
# AccessPass
# OrderID
### @return ###
# AccessID
# AccessPass
# Status
# Forward
# Approve
# TranID
# TranDate
### example ###
# gmo.void_tran_brandtoken({
# access_id: "139f8ec33a07c55f406937c52ce4473d",
# access_pass: "2689b204d2c17192fa35f9269fa7e744",
# order_id: "597ae8c36120b23a3c00014e"
# })
# => {"AccessID"=>"139f8ec33a07c55f406937c52ce4473d", "AccessPass"=>"2689b204d2c17192fa35f9269fa7e744", "Status"=>"VOID", "Forward"=>"2a99663", "Approve"=>"5537590", "TranID"=>"1707311610111111111111771219", "TranDate"=>"20170731161007"}
def void_tran_brandtoken(options = {})
name = "VoidTranBrandtoken.idPass"
required = [:access_id, :access_pass, :order_id]
assert_required_options(required, options)
post_request name, options
end
### @params ###
# AccessID
# AccessPass
# OrderID
# Amount
# Tax
### @return ###
# AccessID
# AccessPass
# Status
# Forward
# Approve
# TranID
# TranDate
### example ###
# gmo.sales_tran_brandtoken({
# access_id: "139f8ec33a07c55f406937c52ce4473d",
# access_pass: "2689b204d2c17192fa35f9269fa7e744",
# order_id: "597ae8c36120b23a3c00014e",
# amount: 1000,
# tax: "0001001"
# })
# => {"AccessID"=>"139f8ec33a07c55f406937c52ce4473d", "AccessPass"=>"2689b204d2c17192fa35f9269fa7e744", "Status"=>"SALES", "Forward"=>"2a99663", "Approve"=>"5537883", "TranID"=>"1707311620111111111111771220", "TranDate"=>"20170731162256"}
def sales_tran_brandtoken(options = {})
name = "SalesTranBrandtoken.idPass"
required = [:access_id, :access_pass, :order_id, :amount]
assert_required_options(required, options)
post_request name, options
end
### @params ###
# AccessID
# AccessPass
# OrderID
# Amount
# Tax
### @return ###
# AccessID
# AccessPass
# Status
# Forward
# Approve
# TranID
# TranDate
### example ###
# gmo.refund_tran_brandtoken({
# access_id: "139f8ec33a07c55f406937c52ce4473d",
# access_pass: "2689b204d2c17192fa35f9269fa7e744",
# order_id: "597ae8c36120b23a3c00014e",
# amount: 1000,
# tax: "0001001"
# })
# => {"AccessID"=>"139f8ec33a07c55f406937c52ce4473d", "AccessPass"=>"2689b204d2c17192fa35f9269fa7e744", "Status"=>"RETURN", "Forward"=>"2a99663", "Approve"=>"5537883", "TranID"=>"1707311620111111111111771220", "TranDate"=>"20170731162256"}
def refund_tran_brandtoken(options = {})
name = "RefundTranBrandtoken.idPass"
required = [:access_id, :access_pass, :order_id, :amount]
assert_required_options(required, options)
post_request name, options
end
## 2.16.2.1.取引状態参照
# 指定したオーダーID の取引情報を取得します。
def search_trade(options = {})
name = "SearchTrade.idPass"
required = [:order_id]
assert_required_options(required, options)
post_request name, options
end
# 13.1.2.1.取引状態参照
# 指定したオーダーIDの取引情報を取得します。
### @params ###
# OrderID
# PayType
### @return ###
# OrderID
# Status
# ProcessDate
# JobCd
# AccessID
# AccessPass
# ItemCode
# Amount
# Tax
# SiteID
# MemberID
# CardNoToken
# Expire
# Method
# PayTimes
# Forward
# TranID
# Approve
# ClientField1
# ClientField2
# ClientField3
# PayType
### example ###
# gmo.search_trade_multi({
# order_id: '598066176120b2235300020b',
# pay_type: 27
# })
# => {"OrderID"=>"598066176120b2235300020b", "Status"=>"CAPTURE", "ProcessDate"=>"20170801202929", "JobCd"=>"CAPTURE", "AccessID"=>"228fc5bc02da46943300c12706d325a2", "AccessPass"=>"090a50ec2f77d92184a18018f07906e5", "ItemCode"=>"0000990", "Amount"=>"557", "Tax"=>"0", "SiteID"=>"", "MemberID"=>"", "CardNoToken"=>"************1111", "Expire"=>"2212", "Method"=>"1", "PayTimes"=>"", "Forward"=>"2a99663", "TranID"=>"1708012029111111111111771228", "Approve"=>"5689128", "ClientField1"=>"", "ClientField2"=>"", "ClientField3"=>"", "PayType"=>"27"}
def search_trade_multi(options = {})
name = "SearchTradeMulti.idPass"
required = [:order_id, :pay_type]
assert_required_options(required, options)
post_request name, options
end
private
def api_call(name, args = {}, verb = "post", options = {})
args.merge!({ "ShopID" => @shop_id, "ShopPass" => @shop_pass })
api(name, args, verb, options) do |response|
if response.is_a?(Hash) && !response["ErrInfo"].nil?
raise APIError.new(response, locale)
end
end
end
end
end
end
Add cvs_cancel method
# coding: utf-8
# A client for the GMO Payment API.
#
# example
# gmo = GMO::Payment::ShopAPI.new({
# shop_id: "foo",
# shop_pass: "bar",
# host: "mul-pay.com",
# locale: "ja"
# })
# result = gmo.post_request("EntryTran.idPass", options)
module GMO
module Payment
module ShopAPIMethods
def initialize(options = {})
@shop_id = options[:shop_id]
@shop_pass = options[:shop_pass]
@host = options[:host]
@locale = options.fetch(:locale, GMO::Const::DEFAULT_LOCALE)
unless @shop_id && @shop_pass && @host
raise ArgumentError, "Initialize must receive a hash with :shop_id, :shop_pass and either :host! (received #{options.inspect})"
end
end
attr_reader :shop_id, :shop_pass, :host, :locale
## 2.1.2.1.取引登録
# これ以降の決済取引で必要となる取引 ID と取引パスワードの発行を行い、取引を開始します。
# ItemCode
# Tax
# TdFlag
# TdTenantName
### @return ###
# AccessID
# AccessPass
# ErrCode
# ErrInfo
### example ###
# gmo.entry_tran({
# order_id: 100,
# job_cd: "AUTH",
# amount: 100
# })
# {"AccessID"=>"a41d83f1f4c908baeda04e6dc03e300c", "AccessPass"=>"d72eca02e28c88f98b9341a33ba46d5d"}
def entry_tran(options = {})
name = "EntryTran.idPass"
required = [:order_id, :job_cd]
required << :amount if options[:job_cd] && options[:job_cd] != "CHECK"
assert_required_options(required, options)
post_request name, options
end
# 【コンビニ払い】
# 2.1.2.1. 取引登録
# これ以降の決済取引で必要となる取引IDと取引パスワードの発行を行い、取引を開始します。
def entry_tran_cvs(options = {})
name = "EntryTranCvs.idPass"
required = [:order_id, :amount]
assert_required_options(required, options)
post_request name, options
end
# 【Pay-easy決済】
# 5.1.2.1. 取引登録
# これ以降の決済取引で必要となる取引IDと取引パスワードの発行を行い、取引を開始します。
def entry_tran_pay_easy(options = {})
name = "EntryTranPayEasy.idPass"
required = [:order_id, :amount]
assert_required_options(required, options)
post_request name, options
end
# 【LINE Pay決済】
# 20.1.2.1. 取引登録
# これ以降の決済取引で必要となる取引IDと取引パスワードの発行を行い、取引を開始します。
def entry_tran_linepay(options = {})
name = "EntryTranLinepay.idPass"
required = [:order_id, :job_cd, :amount]
assert_required_options(required, options)
post_request name, options
end
### @params ###
# OrderID
# JobCd
# Amount
# ItemCode
# Tax
### @return ###
# AccessID
# AccessPass
### example ###
# gmo.entry_tran_brandtoken({
# order_id: "ord12345",
# job_cd: "AUTH",
# item_code: "1000001",
# tax: "0001001",
# amount: 100
# })
# => {"AccessID"=>"139f8ec33a07c55f406937c52ce4473d", "AccessPass"=>"2689b204d2c17192fa35f9269fa7e744"}
def entry_tran_brandtoken(options = {})
name = "EntryTranBrandtoken.idPass"
required = [:order_id, :job_cd, :amount]
assert_required_options(required, options)
post_request name, options
end
## 2.2.2.2.決済実行
# 指定されたサイトに会員を登録します。
# return
# ACS
# OrderID
# Forward
# Method
# PayTimes
# Approve
# TranID
# TranDate
# CheckString
# ClientField1
# ClientField2
# ClientField3
### @return ###
# ACS
# OrderID
# Forward
# Method
# PayTimes
# Approve
# TranID
# CheckString
# ClientField1
# ClientField2
# ClientField3
### example ###
# gmo.exec_tran({
# order_id: 100,
# access_id: "a41d83f1f4c908baeda04e6dc03e300c",
# access_pass: "d72eca02e28c88f98b9341a33ba46d5d",
# method: 1,
# pay_times: 1,
# card_no: "4111111111111111",
# expire: "1405", #format YYMM
# client_field_1: "client_field1"
# })
# {"ACS"=>"0", "OrderID"=>"100", "Forward"=>"2a99662", "Method"=>"1", "PayTimes"=>"", "Approve"=>"6294780", "TranID"=>"1302160543111111111111192829", "TranDate"=>"20130216054346", "CheckString"=>"3e455a2168fefc90dbb7db7ef7b0fe82", "ClientField1"=>"client_field1", "ClientField2"=>"", "ClientField3"=>""}
def exec_tran(options = {})
name = "ExecTran.idPass"
if options[:client_field_1] || options[:client_field_2] || options[:client_field_3]
options[:client_field_flg] = "1"
else
options[:client_field_flg] = "0"
end
options[:device_category] = "0"
# args = {
# "AccessID" => options[:access_id],
# "AccessPass" => options[:access_pass],
# "OrderID" => options[:order_id],
# "Method" => options[:method],
# "PayTimes" => options[:pay_times],
# "CardNo" => options[:card_no],
# "Expire" => options[:expire],
# "HttpAccept" => options[:http_accept],
# "HttpUserAgent" => options[:http_ua],
# "DeviceCategory" => "0",
# "ClientField1" => options[:client_field_1],
# "ClientField2" => options[:client_field_2],
# "ClientField3" => options[:client_field_3],
# "ClientFieldFlag" => client_field_flg
# }
if options[:token].nil?
required = [:access_id, :access_pass, :order_id, :card_no, :expire]
else
required = [:access_id, :access_pass, :token]
end
assert_required_options(required, options)
post_request name, options
end
# 【コンビニ払い】
# 2.1.2.2. 決済実行
# お客様が入力した情報で後続の決済センターと通信を行い決済を実施し、結果を返します。
def exec_tran_cvs(options = {})
name = "ExecTranCvs.idPass"
required = [:access_id, :access_pass, :order_id, :convenience, :customer_name, :customer_kana, :tel_no, :receipts_disp_11, :receipts_disp_12, :receipts_disp_13]
assert_required_options(required, options)
post_request name, options
end
# 【Pay-easy決済】
# 5.1.2.2. 決済実行
# お客様が入力した情報で後続の決済センターと通信を行い決済を実施し、結果を返します。
def exec_tran_pay_easy(options = {})
name = "ExecTranPayEasy.idPass"
required = [:access_id, :access_pass, :order_id, :customer_name, :customer_kana, :tel_no, :receipts_disp_11, :receipts_disp_12, :receipts_disp_13]
assert_required_options(required, options)
post_request name, options
end
# 【LINE Pay決済】
# 20.1.2.2. 決済実行
def exec_tran_linepay(options = {})
name = "ExecTranLinepay.idPass"
required = [:access_id, :access_pass, :order_id, :ret_url, :error_rcv_url, :product_name]
assert_required_options(required, options)
post_request name, options
end
### @params ###
# AccessID
# AccessPass
# OrderID
# TokenType
# Token
# MemberID
# SeqMode
# TokenSeq
# ClientField1
# ClientField2
# ClientField3
### @return ###
# Status
# OrderID
# Forward
# Approve
# TranID
# TranDate
# ClientField1
# ClientField2
# ClientField3
### example ###
# gmo.exec_tran_brandtoken({
# order_id: "597ae8c36120b23a3c00014e",
# access_id: "139f8ec33a07c55f406937c52ce4473d",
# access_pass: "2689b204d2c17192fa35f9269fa7e744",
# token_type: :apple_pay,
# token: <Base64 encoded payment data>,
# seq_mode: "1",
# token_seq: 1001,
# client_field_1: "Custom field value 1",
# client_field_2: "Custom field value 2",
# client_field_3: "Custom field value 3"
# })
# => {"Status"=>"CAPTURE", "OrderID"=>"597ae8c36120b23a3c00014e", "Forward"=>"2a99663", "Approve"=>"5487394", "TranID"=>"1707281634111111111111771216", "TranDate"=>"20170728163453", "ClientField1"=>"Custom field value 1", "ClientField2"=>"Custom field value 2", "ClientField3"=>"Custom field value 3"}
def exec_tran_brandtoken(options = {})
name = "ExecTranBrandtoken.idPass"
options[:token_type] = GMO::Const::TOKEN_TYPES_MAP[options[:token_type]]
required = [:access_id, :access_pass, :order_id]
assert_required_options(required, options)
post_request name, options
end
## 2.14.2.1.決済変更
# 仮売上の決済に対して実売上を行います。尚、実行時に仮売上時との金額チェックを行います。
# /payment/AlterTran.idPass
# ShopID
# ShopPass
# AccessID 取引ID
# AccessPass 取引パスワード
# JobCd 処理区分 "SALES"
# Amount 利用金額
### @return ###
# AccessID
# AccessPass
# Forward
# Approve
# TranID
# TranDate
### example ###
# gmo.alter_tran({
# access_id: "a41d83f1f4c908baeda04e6dc03e300c",
# access_pass: "d72eca02e28c88f98b9341a33ba46d5d",
# job_cd: "SALES",
# amount: 100
# })
# {"AccessID"=>"381d84ae4e6fc37597482573a9569f10", "AccessPass"=>"cc0093ca8758c6616fa0ab9bf6a43e8d", "Forward"=>"2a99662", "Approve"=>"6284199", "TranID"=>"1302140555111111111111193536", "TranDate"=>"20130215110651"}
def alter_tran(options = {})
name = "AlterTran.idPass"
required = [:access_id, :access_pass, :job_cd]
assert_required_options(required, options)
post_request name, options
end
## 2.15.2.1.金額変更
# 決済が完了した取引に対して金額の変更を行います。
### @return ###
# AccessID
# AccessPass
# Forward
# Approve
# TranID
# TranDate
### example ###
# gmo.change_tran({
# access_id: "a41d83f1f4c908baeda04e6dc03e300c",
# access_pass: "d72eca02e28c88f98b9341a33ba46d5d",
# job_cd: "CAPTURE",
# amount: 100
# })
def change_tran(options = {})
name = "ChangeTran.idPass"
required = [:access_id, :access_pass, :job_cd, :amount]
assert_required_options(required, options)
post_request name, options
end
### @params ###
# AccessID
# AccessPass
# OrderID
# JobCd
# Amount
# Tax
### @return ###
# AccessID
# AccessPass
# Status
# Forward
# Approve
# TranID
# TranDate
### example ###
# gmo.change_tran_brandtoken({
# access_id: "21170701482c86c3b88ff72b83bfd363",
# access_pass: "51f36feba120de1e6e29532e5a3a5e3e",
# order_id: "ord10001",
# job_cd: "CAPTURE",
# amount: 2000
# })
# => {"AccessID"=>"21170701482c86c3b88ff72b83bfd363", "AccessPass"=>"51f36feba120de1e6e29532e5a3a5e3e", "Status"=>"CAPTURE", "Forward"=>"2a99663", "Approve"=>"5538477", "TranID"=>"1707311633111111111111771224", "TranDate"=>"20170731163343"}
def change_tran_brandtoken(options = {})
name = "ChangeTranBrandtoken.idPass"
required = [:access_id, :access_pass, :order_id, :job_cd, :amount]
assert_required_options(required, options)
post_request name, options
end
### @params ###
# AccessID
# AccessPass
# OrderID
### @return ###
# AccessID
# AccessPass
# Status
# Forward
# Approve
# TranID
# TranDate
### example ###
# gmo.void_tran_brandtoken({
# access_id: "139f8ec33a07c55f406937c52ce4473d",
# access_pass: "2689b204d2c17192fa35f9269fa7e744",
# order_id: "597ae8c36120b23a3c00014e"
# })
# => {"AccessID"=>"139f8ec33a07c55f406937c52ce4473d", "AccessPass"=>"2689b204d2c17192fa35f9269fa7e744", "Status"=>"VOID", "Forward"=>"2a99663", "Approve"=>"5537590", "TranID"=>"1707311610111111111111771219", "TranDate"=>"20170731161007"}
def void_tran_brandtoken(options = {})
name = "VoidTranBrandtoken.idPass"
required = [:access_id, :access_pass, :order_id]
assert_required_options(required, options)
post_request name, options
end
### @params ###
# AccessID
# AccessPass
# OrderID
# Amount
# Tax
### @return ###
# AccessID
# AccessPass
# Status
# Forward
# Approve
# TranID
# TranDate
### example ###
# gmo.sales_tran_brandtoken({
# access_id: "139f8ec33a07c55f406937c52ce4473d",
# access_pass: "2689b204d2c17192fa35f9269fa7e744",
# order_id: "597ae8c36120b23a3c00014e",
# amount: 1000,
# tax: "0001001"
# })
# => {"AccessID"=>"139f8ec33a07c55f406937c52ce4473d", "AccessPass"=>"2689b204d2c17192fa35f9269fa7e744", "Status"=>"SALES", "Forward"=>"2a99663", "Approve"=>"5537883", "TranID"=>"1707311620111111111111771220", "TranDate"=>"20170731162256"}
def sales_tran_brandtoken(options = {})
name = "SalesTranBrandtoken.idPass"
required = [:access_id, :access_pass, :order_id, :amount]
assert_required_options(required, options)
post_request name, options
end
### @params ###
# AccessID
# AccessPass
# OrderID
# Amount
# Tax
### @return ###
# AccessID
# AccessPass
# Status
# Forward
# Approve
# TranID
# TranDate
### example ###
# gmo.refund_tran_brandtoken({
# access_id: "139f8ec33a07c55f406937c52ce4473d",
# access_pass: "2689b204d2c17192fa35f9269fa7e744",
# order_id: "597ae8c36120b23a3c00014e",
# amount: 1000,
# tax: "0001001"
# })
# => {"AccessID"=>"139f8ec33a07c55f406937c52ce4473d", "AccessPass"=>"2689b204d2c17192fa35f9269fa7e744", "Status"=>"RETURN", "Forward"=>"2a99663", "Approve"=>"5537883", "TranID"=>"1707311620111111111111771220", "TranDate"=>"20170731162256"}
def refund_tran_brandtoken(options = {})
name = "RefundTranBrandtoken.idPass"
required = [:access_id, :access_pass, :order_id, :amount]
assert_required_options(required, options)
post_request name, options
end
# 【コンビニ払い】
## 2.2.2.1. 支払停止
# コンビニ決済センターとの通信を行い取引の支払停止処理を行います。
def cvs_cancel(options = {})
name = "CvsCancel.idPass"
required = [:access_id, :access_pass, :order_id]
assert_required_options(required, options)
post_request name, options
end
## 2.16.2.1.取引状態参照
# 指定したオーダーID の取引情報を取得します。
def search_trade(options = {})
name = "SearchTrade.idPass"
required = [:order_id]
assert_required_options(required, options)
post_request name, options
end
# 13.1.2.1.取引状態参照
# 指定したオーダーIDの取引情報を取得します。
### @params ###
# OrderID
# PayType
### @return ###
# OrderID
# Status
# ProcessDate
# JobCd
# AccessID
# AccessPass
# ItemCode
# Amount
# Tax
# SiteID
# MemberID
# CardNoToken
# Expire
# Method
# PayTimes
# Forward
# TranID
# Approve
# ClientField1
# ClientField2
# ClientField3
# PayType
### example ###
# gmo.search_trade_multi({
# order_id: '598066176120b2235300020b',
# pay_type: 27
# })
# => {"OrderID"=>"598066176120b2235300020b", "Status"=>"CAPTURE", "ProcessDate"=>"20170801202929", "JobCd"=>"CAPTURE", "AccessID"=>"228fc5bc02da46943300c12706d325a2", "AccessPass"=>"090a50ec2f77d92184a18018f07906e5", "ItemCode"=>"0000990", "Amount"=>"557", "Tax"=>"0", "SiteID"=>"", "MemberID"=>"", "CardNoToken"=>"************1111", "Expire"=>"2212", "Method"=>"1", "PayTimes"=>"", "Forward"=>"2a99663", "TranID"=>"1708012029111111111111771228", "Approve"=>"5689128", "ClientField1"=>"", "ClientField2"=>"", "ClientField3"=>"", "PayType"=>"27"}
def search_trade_multi(options = {})
name = "SearchTradeMulti.idPass"
required = [:order_id, :pay_type]
assert_required_options(required, options)
post_request name, options
end
private
def api_call(name, args = {}, verb = "post", options = {})
args.merge!({ "ShopID" => @shop_id, "ShopPass" => @shop_pass })
api(name, args, verb, options) do |response|
if response.is_a?(Hash) && !response["ErrInfo"].nil?
raise APIError.new(response, locale)
end
end
end
end
end
end
|
##########################################
# = Code Runner GS2 Module
##########################################
#
# Authors: Edmund Highcock
# Copyright: 2009 Edmund Highcock
#
# This is free software released under the GPL v3
#
# This module allows easy running of the plasma turbulence simulation code gs2 using Code Runner, by automatically organising, naming and submitting runs, and analysing the run data.
#
# See Code Runner documentation, or documentation for individual methods.
#
# Notes
#
# index variables, e.g. kx_index, ky_index etc always refer to the 1-based Fortran index, to keep correspondance with the gs2 indices. Element variables, e.g. kx_element, always refer to the 0-based C/ruby index
#
# raw NumRu::NetCDF grids are in Fortran row-major order. This means that when you access grids using the NetCDF function NetCDF#get, you must specify the indices in fortran order (but 0-based!). The NetCDF#get function then returns a C-like NArray with the indices in the opposite order. You can convert this to a Ruby Array using the method NArray#to_a (the indices will still be in the same order).
begin
require "numru/netcdf"
rescue LoadError
eputs "Error: No NetCDF: data analysis for gs2 not possible"
end
class CodeRunner
# This is a customised subclass of CodeRunner::Run which allows CodeRunner to submit and analyse simulations from the gyrokinetic flux tube code GS2, which is principally used for simulating plasmas in magnetic confinement fusion.
#
# It performs two distinct roles: submitting simulations and analysing the data.
#
# = Submitting Simulations
#
# This principally involves generating the input file, which is a very nontrivial task. In order to do this, it maintains a complete record of every possible input parameter for GS2, as well as what datatype that parameter is, and sometimes what values it is allowed to take. This allows that not only to generate the input file, but to check that the input file makes sense. However, although generating the input file works beautifully, the set of sanity checks that it makes is not exhaustive: intelligent use is still required!
#
# In tandem with this, it maintains a whole set of tools for manipulating its database of input parameters. This includes updating their allowed values and also editing and accessing help for every input parameter.
#
# = Analysing Simulations
#
# The amount of analysis possible on GS2 data is enormous, and CodeRunner hasn't got close to getting there. What it can do is:
#
# * Check if the run is complete by comparing the number of completed timesteps against nstep
# * Calculate growth rates for linear runs.
# * Check if non-linear runs have saturated and calculate fluxes for those runs.
# * Automatically plot a huge variety of different graphs, ranging from simple plots of heat flux versus time to three-dimensional plots of the spectrum and potential.
class Gs2 < Run::FortranNamelist
#GS2_CRMOD_VERSION = Version.new(Gem.loaded_specs['gs2crmod'].version.to_s)
GS2_CRMOD_VERSION = Version.new('0.5.0')
def agk?
false
end
def spectrogk?
false
end
def gryfx?
false
end
CODE_SCRIPT_FOLDER = MODULE_FOLDER = File.dirname(File.expand_path(__FILE__))
# Include the other files
@code_module_folder = folder = File.dirname(File.expand_path(__FILE__)) # i.e. the directory this file is in
setup_namelists(folder)
require folder + '/graphs.rb'
require folder + '/gsl_data.rb'
require folder + '/gsl_data_3d.rb'
require folder + '/check_convergence.rb'
require folder + '/calculations.rb'
require folder + '/ingen.rb'
require folder + '/properties.rb'
require folder + '/test_gs2.rb'
require folder + '/read_netcdf.rb'
NaN = GSL::NAN
# GSL::Neg
eval(%[
], GLOBAL_BINDING)
################################################
# Quantities that are calculated or determined by CodeRunner
# after the simulation has ended, i.e. quantities
# that are not available from the GS2 output files.
################################################
@results = [
:converged,
:decaying,
:es_heat_flux_stav_error,
:es_heat_flux_stav_std_dev,
:es_mom_flux_stav_error,
:es_mom_flux_stav_std_dev,
:es_part_flux_stav_error,
:es_part_flux_stav_std_dev,
:es_heat_flux_stav,
:es_mom_flux_stav,
:es_part_flux_stav,
:frequency_at_ky_at_kx,
:fastest_growing_mode,
:freq_of_max_growth_rate,
:gamma_r,
:gamma_i,
:growth_rates,
:growth_rates_by_ky, # deprecated
:growth_rates_by_kx, # deprecated
:growth_rate_at_ky,
:growth_rate_at_kx,
:growth_rate_at_ky_at_kx,
:hflux_tot_stav,
:hflux_tot_stav_error,
:hflux_tot_stav_std_dev,
:ky,
:ky_spectrum_peak_idx,
:ky_spectrum_peak_ky,
:ky_spectrum_peak_phi2,
:kx_spectrum_peak_kx,
:kx_spectrum_peak_phi2,
:max_growth_rate,
:max_transient_amplification_index_at_ky,
:phi2_tot_stav,
:par_mom_flux_stav,
:perp_mom_flux_stav,
:phi2_zonal,
:run_time,
:real_frequencies,
:real_frequencies_by_ky,
:saturation_time_index,
:saturated,
:shot_time,
:spectrum_check,
:transient_amplification_at_kx,
:transient_amplification_at_ky,
:transient_amplification_at_ky_at_kx,
:transient_es_heat_flux_amplification_at_species_at_kx,
:transient_es_heat_flux_amplification_at_species_at_ky,
:transient_es_heat_flux_amplification_at_species_at_ky_at_kx,
:vspace_check
]
###############################################
# Other useful information about the run
###############################################
@gs2_run_info = [:time, :percent_of_total_time, :checked_converged, :is_a_restart, :restart_id, :restart_run_name, :completed_timesteps, :response_id]
@run_info = @gs2_run_info.dup
##############################################################
# For backwards compatibility with CodeRunner version 0.5.0
##############################################################
@run_info_0_5_0 = {
time: :to_f,
percent_of_total_time: :to_f,
checked_converged: :to_b
}
@results_0_5_0 = {
converged: :to_b,
decaying: :to_b,
:growth_rates => :to_h,
:real_frequencies => :to_h,
# :ky_list => :to_h,
# :kx_list => :to_h,
:growth_rates_by_ky => :to_s,
:real_frequencies_by_ky => :to_s,
:max_growth_rate => :to_f,
:fastest_growing_mode => :to_f,
:freq_of_max_growth_rate => :to_f,
:ky => :to_f,
:gamma_r => :to_f,
:gamma_i => :to_f,
:run_time => :to_f
# :theta_list => :to_h
}
###############################################################
@uses_mpi = true
@modlet_required = false
@use_graphs = false
Phi = Struct.new("Phi", :phi, :ri, :theta_index, :kx_index, :ky_index)
@naming_pars = []
# def self.finish_setting_up_class
# @@variables += [
# end
# This method, as its name suggests, is called whenever CodeRunner is asked to analyse a run directory.this happens if the run status is not :Complete, or if the user has specified recalc_all(-A on the command line) or reprocess_all (-a on the command line).
#
# the structure of this function is very simple: first it calls get_status to determine the directory status, i.e. :Complete, :Incomplete, :NotStarted or :Failed, then it gets the time, which is the GS2 time at the end of the run, and it also gets the run_time, which is the wall clock time of the run. Finally,if non-linear mode is switched off, it calls calculate_growth_rates_and_frequencies, and if the non-linear mode is switched on, it calls calculate_time_averaged_fluxes.
def process_directory_code_specific
run_namelist_backwards_compatibility
unless @status == :Queueing
get_status
end
eputs "Run #@status: #@run_name" if [:Complete,:Failed].include? @status
try_to_get_error_file
@sys = @@successful_trial_system
return if @status == :NotStarted or @status == :Failed or @status == :Queueing
begin
percent_complete = get_completed_timesteps/@nstep
@percent_of_total_time = percent_complete
rescue
get_time
@percent_of_total_time = @time / (@delt*@nstep) * 100.0 rescue 0.0
end
return if @status == :Incomplete
get_run_time
calculate_results
end
def calculate_results
return if ENV['CODE_RUNNER_NO_ANALYSIS'] =~ /true/
eputs "Analysing run"
if @nonlinear_mode == "off"
calculate_transient_amplifications
elsif @nonlinear_mode == "on"
calculate_saturation_time_index
calculate_time_averaged_fluxes
begin
calculate_spectral_checks
calculate_vspace_checks
rescue
end
end
@growth_rates ||={}
@real_frequencies ||={}
end
# Try to read the runtime in minutes from the GS2 standard out.
def get_run_time
logf(:get_run_time)
output = @output_file || try_to_get_output_file
return nil unless output
begin
Regexp.new("total from timer is:\\s*#{LongRegexen::NUMBER}", Regexp::IGNORECASE).match FileUtils.tail(output, 300)
logi $~
@run_time = $~[:number].to_f
rescue
@run_time = nil
end
end
# Output useful information from the NetCDF file. If no names are provided, output a list of all variables in the NetCDF file. <tt>names</tt> can either be a symbol or an array of symbols, in which case information will be output for the variables with those names. If values are provided, for example :dims,:get, :ndims, this information is retrieved from the file for every variable named.
# ncdump
# ncdump(:hflux)
# ncdump([:hflux, :phi])
# ncdump([:hflux, :phi], :dims)
def ncdump(names=nil, values=nil, extension = '.out.nc')
names = [names] unless !names or names.class == Array
names.map!{|name| name.to_s} if names
pp NumRu::NetCDF.open(@run_name + extension).vars(names).to_a.sort{|var1, var2| var1.name <=> var2.name}.map{|var| values ? [var.name, var.send(values)] : var.name.to_sym}
end
def generate_component_runs
@component_runs = []
logf(:generate_component_runs)
return if @grid_option == "single" and @scan_type == "none"
begin
list(:ky) # This will fail unless the run has output the netcdf file
rescue
return
end
return unless @status == :Complete #and @converged
log(@run_name)
if @grid_option == "box" and @nonlinear_mode == "off"
@ky = nil
# raise CRFatal.new("no @ky_list") unless @ky_list
# log list(:ky)
list(:ky).each do |id, ky|
component_run = create_component #self.dup
component_run.ky = ky
component_run.gamma_r = @growth_rates[ky]
component_run.gamma_i = @real_frequencies[ky]
log @runner.component_ids
# log('@runner.class', @runner.class)
# @runner.add_component_run(component_run)
end
elsif (not gryfx?) and @scan_type and @scan_type != "none"
t = gsl_vector('t')
scan_vals = gsl_vector('scan_parameter_value')
current = scan_vals[0]
start = 0
for i in 0...t.size
if scan_vals[i] != current
component = create_component
component.scan_index_window = [start+1, i] #remember indexes are elements + 1
#ep 'scan_index_window', component.scan_index_window
component.scan_parameter_value = current
component.growth_rate_at_ky = nil
component.growth_rate_at_kx = nil
component.growth_rate_at_ky_at_kx = nil
component.calculate_results
current = scan_vals[i]
start = i
end
end
end
end
def get_time
begin
lt = list(:t)
return lt.values.max if lt.size>0
rescue
end
time = nil
# eputs File.readlines(@run_name +".out").slice(-4..-1).reverse.join( "\n"); gets
raise CRFatal.new("Couldn't find outfile #{@run_name}.out") unless FileTest.exist?(@run_name + ".out")
tail = FileUtils.tail("#@run_name.out", 4)
#File.readlines(@run_name +".out").slice(-4..-1).reverse.join( "\n")
tail.sub(LongRegexen::FLOAT) do
# eputs $~.inspect
time = $~[:float].to_f
end #if FileTest.exist? (@run_name +".out")
#raise CRFatal.new("couldn't get the time from #{tail}") unless time
@time = time
end
def get_completed_timesteps
#raise CRFatal.new("Couldn't find outfile #{@run_name}.out") unless FileTest.exist?(@run_name + ".out")
#p 'try to get completed_timesteps', Dir.pwd, 'nwrite', @nwrite, 'delt', @delt
@completed_timesteps = (list(:t).size - 1) * (@nwrite || 1)
#p 'tried to get completed_timesteps'
#rescue
#`grep time= #@run_name.out`.split.size
# File.read("#@run_name.out").scan(/^\s+time\s*=\s+/).size * @nwrite
end
def incomplete
return (not 100 == percent_complete)
end
def parameter_transition(run)
end
# @@executable_location = nil
# def executable_location
# return "~/gs2_newterm" #(@@executable_location || ($gs2_new_term ? "~/gs2_newterm" : "~/gs2"))
# end
#
# def executable_name
# "gs2"
# end
@code_long = "GS2 Gyrokinetic Flux Tube Code"
@excluded_sub_folders =[]
attr_accessor :theta_list, :ky_list, :ky_graphs, :eigenfunctions, :ky_list, :t_list
attr_accessor :scan_index_window, :scan_parameter_value
class << self
aliold(:check_and_update)
def check_and_update
old_check_and_update
@readout_list = (@variables + @results - [:growth_rates_by_ky, :growth_rates, :real_frequencies, :real_frequencies_by_ky, :ky_list, :kx_list, :theta_list, :t_list])
end
end
def data_string
logf(:data_string)
return "" unless @converged unless @grid_option == 'single'
logi(@ky, @growth_rates, @real_frequencies)
# log(:@@readout_list, @@readout_list)
return rcp.readout_list.inject(""){|str,(var,_)| str+"#{(send(var) || "0")}\t"} + "\n"
# @ky ? (@@variables + @@results - ).inject(""){|str,(var,type_co)| str+"#{(send(var) || "0")}\t"} + sprintf("%e\t%e\t%e\n", @ky, @growth_rates[@ky], @real_frequencies[@ky]) : (@@variables + @@results).inject(""){|str,(var,type_co)| str+"#{(send(var) || "0")}\t"} + sprintf("%e\t%e\t%e\n", @fastest_growing_mode, @max_growth_rate, @freq_of_max_growth_rate)
end
def percent_complete
@completed_timesteps ? @completed_timesteps.to_f / @nstep.to_f * 100.0 : @percent_of_total_time
end
def print_out_line
logf(:print_out_line)
name = @run_name
name += " (res: #@restart_id)" if @restart_id
name += " real_id: #@real_id" if @real_id
beginning = sprintf("%2d:%d %-60s %1s:%2.1f(%s) %3s%1s %1s", @id, @job_no, name, @status.to_s[0,1], @run_time.to_f / 60.0, @nprocs.to_s, percent_complete, "%", @converged.to_s)
if @ky
beginning += sprintf("%3s %4s %4s", @ky, @growth_rates[@ky], @real_frequencies[@ky])
elsif @nonlinear_mode == "off"
beginning += sprintf("%3s %4s %4s",
@fastest_growing_mode, @max_growth_rate,
@freq_of_max_growth_rate)
elsif @nonlinear_mode == "on"
# p @hflux_tot_stav
beginning += " sat:#{saturated.to_s[0]}"
beginning += sprintf(" hflux:%1.2e", @hflux_tot_stav) if @hflux_tot_stav
beginning += sprintf("+/-%1.2e", @hflux_tot_stav_error) if @hflux_tot_stav_error
beginning += sprintf(" momflux:%1.2e", @es_mom_flux_stav.values.sum) if @es_mom_flux_stav and @es_mom_flux_stav.values[0]
beginning += ' SC:' + @spectrum_check.map{|c| c.to_s}.join(',') if @spectrum_check
beginning += ' VC:' + @vspace_check.map{|c| sprintf("%d", ((c*10.0).to_i rescue -1))}.join(',') if @vspace_check
end
beginning += " ---#{@comment}" if @comment
beginning
end
def get_list_of(*args)
#args can be any list of e.g. :ky, :kx, :theta, :t ...
logf(:get_list_of)
refresh = args[-1] == true ? true : false
args.pop if args[-1] == true
logd
Dir.chdir(@directory) do
args.each do |var|
# eputs "Loading #{var}"
list_name = var + :_list
log list_name
# self.class.send(:attr_accessor, list_name)
next if (cache[list_name] and [:Failed, :Complete].include? status and not refresh)
cache[list_name] = {}
if netcdf_file.var(var.to_s)
netcdf_file.var(var.to_s).get.to_a.each_with_index do |value, element|
# print '.'
cache[list_name][element+1]=value
end
else
netcdf_file.dim(var.to_s).length.times.each do |element|
cache[list_name][element+1]='unknown'
end
end
# eputs send(var+:_list)
end
end
logfc :get_list_of
return cache[args[0] + :_list] if args.size == 1
end
alias :list :get_list_of
def visually_check_growth_rate(ky=nil)
logf :visually_check_growth_rate
phi_vec = gsl_vector(:phi2_by_ky_over_time, {ky: ky})
t_vec = gsl_vector(:t)
constant, growth_rate = GSL::Fit::linear(t_vec, 0.5*GSL::Sf::log(phi_vec)).slice(0..1)
eputs growth_rate
graph = @@phi2tot_vs_time_template.graph(["#{constant} * exp (2 * #{growth_rate} * x)"], [[[t_vec, phi_vec], "u 1:2 title 'phi2tot #{@run_name}' w p"]], {"set_show_commands" => "\nset log y\n", "point_size"=>'1.0'})
# eputs graph.inline_data.inspect
graph.show
gets
graph.kill
end
def show_graph
thegraph = special_graph('phi2tot_vs_time_all_kys')
thegraph.title += " for g_exb = #{@g_exb.to_f.to_s}"
thegraph.show
sleep 1.5
# @decaying = Feedback.get_boolean("Is the graph decaying?")
thegraph.kill
end
def restart(new_run)
(rcp.variables).each{|v| new_run.set(v, send(v)) if send(v)}
@naming_pars.delete(:preamble)
SUBMIT_OPTIONS.each{|v| new_run.set(v, self.send(v)) unless new_run.send(v)}
new_run.is_a_restart = true
new_run.ginit_option = "many"
new_run.delt_option = "check_restart"
new_run.restart_id = @id
new_run.restart_run_name = @run_name
@runner.nprocs = @nprocs if @runner.nprocs == "1" # 1 is the default
if !new_run.nprocs or new_run.nprocs != @nprocs
raise "Restart must be on the same number of processors as the previous "\
"run: new is #{new_run.nprocs.inspect} and old is #{@nprocs.inspect}"
end
new_run.run_name = nil
new_run.naming_pars = @naming_pars
new_run.update_submission_parameters(new_run.parameter_hash_string, false) if
new_run.parameter_hash
new_run.naming_pars.delete(:restart_id)
new_run.generate_run_name
copy_restart_files(new_run)
if new_run.read_response and new_run.read_response.fortran_true?
new_run.response_id = new_run.restart_id
copy_response_files(new_run)
end
new_run
end
def copy_restart_files(new_run)
eputs 'Copying restart files...', ''
FileUtils.makedirs(new_run.directory + '/nc')
#old_dir = File.dirname(@restart_file)
new_run.restart_file = "#@run_name.nc"
new_run.restart_dir = "nc"
files = list_of_restart_files.map do |file|
@directory + "/" + file
end
files.each_with_index do |file , index|
eputs "#{index+1} out of #{files.size}"
eputs "\033[2A" # Terminal jargon - go back one line
num = file.scan(/(?:\.\d+|_ene)$/)[0]
#FileUtils.cp("#{old_dir}/#{file}", "nc/#@restart_file#{num}")
FileUtils.cp(file, new_run.directory + "/nc/#{new_run.restart_file}#{num}")
end
end
def copy_response_files(run)
eputs 'Copying response files...', ''
eputs 'The following run parameters have changed. Are you sure you can use '\
'these response files?'
diff_run_parameters(self, run)
FileUtils.makedirs(run.directory + '/response')
run.response_dir = "response"
files = list_of_response_files.map do |file|
@directory + "/" + file
end
files.each_with_index do |file , index|
eputs "#{index+1} out of #{files.size}"
eputs "\033[2A" # Terminal jargon - go back one line
response_ext = file.scan(/_ik_\d+_is_\d+.response/)
FileUtils.cp(file, run.directory + "/response/#{run.run_name}#{response_ext[0]}")
end
end
# The following function is essentially the same as the CR differences_between
# function without the runner loading set up code. This could possibly be moved
# to a more general function in CR.
def diff_run_parameters(run_1, run_2)
runs = [run_1, run_2]
rcp_fetcher = (runs[0] || @runner.run_class).rcp
vars = rcp.variables.dup + rcp.run_info.dup
# Clean up output by deleting some variables
vars.delete_if{|var| runs.map{|r| r.send(var)}.uniq.size == 1}
vars.delete :id
vars.delete :run_name
vars.delete :output_file
vars.delete :error_file
vars.delete :executable
vars.delete :comment
vars.delete :naming_pars
vars.delete :parameter_hash
vars.delete :parameter_hash_string
vars.delete :sys
vars.delete :status
vars.delete :job_no
vars.delete :running
vars.unshift :id
# Fancy table printing
table = vars.map{|var| [var] + runs.map{|r| str = r.instance_eval(var.to_s).to_s;
str.size>10?str[0..9]:str} }
col_widths = table.map{|row| row.map{|v| v.to_s.size}}.
inject{|o,n| o.zip(n).map{|a| a.max}}
eputs
table.each{|row| i=0; eputs row.map{|v| str = sprintf(" %#{col_widths[i]}s ",
v.to_s); i+=1; str}.join('|'); eputs '-' *
(col_widths.sum + col_widths.size*3 - 1) }
end
# Return a list of restart file paths (relative to the run directory).
def list_of_restart_files
Dir.chdir(@directory) do
files = Dir.entries.grep(/^\.\d+$/)
files = Dir.entries.grep(/\.nc(?:\.\d|_ene)/) if files.size == 0
if files.size == 0
(Dir.entries.find_all{|dir| FileTest.directory? dir} - ['.', '..']).each do |dir|
files = Dir.entries(dir).grep(/\.nc(?:\.\d|_ene)/).map{|file| dir + "/" + file}
break if files.size == 0
end
end #if files.size == 0
# Finds a .nc file (w/o a number) in 'nc' folder if using single restart file
if files.size == 0
files = Dir.entries('nc').grep(/\.nc/).map{|file| 'nc' + "/" + file}
end #if files.size == 0
return files
end # Dir.chdir(@directory) do
end
alias :lorf :list_of_restart_files
# Return list of response files similar to method for restart files
def list_of_response_files
Dir.chdir(@directory) do
files = Dir.entries('response').grep(/\.response/).map{|file| 'response' +
"/" + file}
files = Dir.entries.grep(/\.response/) if files.size == 0
if files.size == 0
(Dir.entries.find_all{|dir| FileTest.directory? dir} - ['.', '..']).each do |dir|
files = Dir.entries(dir).grep(/\.response/).map{|file| dir + "/" + file}
end
end
return files
end
end
# Put restart files in the conventional location, i.e. nc/run_name.proc
def standardize_restart_files
Dir.chdir(@directory) do
FileUtils.makedirs('nc')
list_of_restart_files.each do |file|
proc_id = file.scan(/\.\d+$|_ene$/)[0]
#p 'proc_id', proc_id
FileUtils.mv(file, "nc/#@run_name.nc#{proc_id}")
end
end
end
# Delete all the restart files (irreversible!)
#
def delete_restart_files(options={})
puts 'You are about to delete the restart files for:'
puts @run_name
return unless Feedback.get_boolean("This action cannot be reversed. Do you wish to continue?") unless options[:no_confirm]
list_of_restart_files.each{|file| FileUtils.rm file}
end
def species_letter
species_type(1).downcase[0,1]
end
def species_type(index)
if rcp.variables.include? :type_1
type = send(:type_ + index.to_sym)
else
types = rcp.variables.find_all{|var| var.to_s =~ /^type/}.map{|var| send(var)}
type = types[index.to_i - 1]
end
type
end
# Returns true if this run has not been restarted, false if it has. This
# allows one to get data from the final run of a series of restarts.
def no_restarts
raise NoRunnerError unless @runner
!(@runner.runs.find{|run| run.restart_id == @id})
end
def restart_chain
if @restart_id
return @runner.run_list[@restart_id].restart_chain
end
chain = []
currid = @id
loop do
chain.push currid
break unless (restrt = @runner.runs.find{|run| run.restart_id == currid})
currid = restrt.id
end
return chain
end
def get_status
# eputs 'Checking Status'
logf(:get_status)
Dir.chdir(@directory) do
if @running
if FileTest.exist?(@run_name + ".out") and FileUtils.tail(@run_name + ".out", 5).split(/\n/).size > 4 and FileUtils.tail(@run_name + ".out", 200) =~ /t\=/
@status = :Incomplete
else
@status = :NotStarted
end
else
if FileTest.exist?(@run_name + ".out") and FileUtils.tail(@run_name + ".out", 5).split(/\n/).size > 4
#eputs "HERE", @scan_type
if @nonlinear_mode == "off" and FileUtils.tail(@run_name + ".out",200) =~ /omega converged/
eputs 'Omega converged...'
@status = :Complete
elsif @scan_type and @scan_type != "none" and FileUtils.tail(@run_name + ".par_scan",200) =~ /scan\s+is\s+complete/i
eputs 'Scan complete...'
@status = :Complete
elsif @nonlinear_mode == "on" or !@omegatol or @omegatol < 0.0 or (@exit_when_converged and @exit_when_converged.fortran_false?)
eputs 'No omegatol'
if FileTest.exist?(@run_name + ".out.nc")
#p ['pwd', Dir.pwd, netcdf_file, netcdf_file.dim('t'), netcdf_file.dims]
if netcdf_file.dim('t').length > 0
get_completed_timesteps
else
@status = :Failed
return
end
else
eputs "Warning: no netcdf file #@run_name.out.nc"
@status = :Failed
return
end
#ep "completed_timesteps", @completed_timesteps
eputs "#{percent_complete}% of Timesteps Complete"
if percent_complete >= 100.0
@status = :Complete
elsif percent_complete > 5 and FileUtils.tail(output_file, 200) =~ /total from timer is/
@status = :Complete
else
@status = :Failed
end
else
@status = :Failed
end
else
@status=:Failed
end
end
end
end
def self.modify_job_script(runner, runs_in, script)
if CODE_OPTIONS[:gs2] and CODE_OPTIONS[:gs2][:list]
if (list_size = CODE_OPTIONS[:gs2][:list]).kind_of? Integer
raise "The total number of runs must be a multiple of the list size!" unless runs_in.size % list_size == 0
pieces = runs_in.pieces(runs_in.size/list_size)
else
pieces = [runs_in]
end
script = ""
pieces.each do |runs|
#ep 'there is a list'
FileUtils.makedirs('job_lists')
jid = "#{runs[0].id}-#{runs[-1].id}"
list_file = "job_lists/gs2_list_#{jid}.list"
File.open(list_file,'w') do |file|
file.puts runs.size
file.puts runs.map{|r| "#{r.relative_directory}/#{r.run_name}"}.join("\n")
end
raise "runs must all have the same nprocs" unless runs.map{|r| r.nprocs}.uniq.size == 1
runs.each do |r|
# Make sure the restart file name includes the relative directory for
# list runs
reldir = r.relative_directory
rdir = r.restart_dir
#puts rdir[0...reldir.size] == reldir, rdir[0...reldir.size], reldir
#raise ""
if rdir
r.restart_dir = reldir + '/' + rdir if not rdir[0...reldir.size] == reldir
else
r.restart_dir = reldir
end
Dir.chdir(r.directory){r.write_input_file}
end
np = runs[0].nprocs.split('x').map{|n| n.to_i}
np[0] *= runs.size
nprocs = np.map{|n| n.to_s}.join('x')
@runner.nprocs = nprocs
ls = ListSubmitter.new(@runner, nprocs, list_file, jid)
script << ls.run_command
end
end
return script
end
class ListSubmitter
include CodeRunner::SYSTEM_MODULE
@uses_mpi = true
attr_reader :executable_location, :executable_name, :parameter_string
attr_reader :job_identifier
def initialize(runner, nprocs, list_file, jid)
@executable_location = runner.executable_location
@executable_name = runner.executable_name
@parameter_string = list_file
@job_identifier = jid
@nprocs = nprocs
end
def rcp
self.class.rcp
end
def self.rcp
@rcp ||= CodeRunner::Run::RunClassPropertyFetcher.new(self)
end
end #class ListSubmitter
def recheck
logf(:recheck)
Dir.chdir(@directory) do
logi('@runner.object_id', @runner.object_id)
log('@runner.class', @runner.class)
#runner = @runner
instance_variables.each{|var| instance_variable_set(var, nil) unless var == :@runner}
begin File.delete(".code_runner_run_data") rescue Errno::ENOENT end
begin File.delete("code_runner_results.rb") rescue Errno::ENOENT end
logi(:@checked_converged, @checked_converged)
logi('@runner.object_id after reset', @runner.object_id)
log('@runner.class', @runner.class)
process_directory
end
end
def generate_input_file(&block)
raise CRFatal("No Input Module File Given or Module Corrupted") unless
methods.include? (:input_file_text)
run_namelist_backwards_compatibility
@user_comments = "Defaults description: #@defaults_file_description. Run description: #@comment"
# If it is a restart default behaviour will be to copy the response files
# from the run being restarted. Specifying a response_id will override this.
if not @is_a_restart and @response_id
@read_response = ".true."
@runner.run_list[@response_id].copy_response_files(self)
elsif @dump_response and @dump_response.fortran_true? and
(not @read_response or not @read_response.fortran_true?)
@response_dir = "response"
FileUtils.makedirs @response_dir
end
# The second test checks that the restart function has not been called
# manually earlier (e.g. in Trinity), but we must check that it is not in
# fact a resubmitted run.
if @restart_id and (not @is_a_restart or @resubmit_id)
@runner.run_list[@restart_id].restart(self)
elsif ((@save_for_restart and @save_for_restart.fortran_true?) or
(@save_for_restart_new and @save_for_restart_new.fortran_true?)) and
(not @is_a_restart or @resubmit_id)
@restart_dir = "nc"
#if CODE_OPTIONS[:gs2] and CODE_OPTIONS[:gs2][:list]
#FileUtils.makedirs "#{@runner.root_folder}/#@restart_dir"
#else
FileUtils.makedirs @restart_dir
#end
@restart_file = "#@run_name.nc"
end
# Let Gs2 know how much wall clock time is available. avail_cpu_time is a GS2 input parameter.
@avail_cpu_time = @wall_mins * 60 if @wall_mins
# Automatically set the number of nodes to be the maximum possible without parallelising over x, if the user has left the number of nodes unspecified.
set_nprocs
if block
##### Allow the user to define their own pre-flight checks and changes
instance_eval(&block)
else
######### Check for errors and inconsistencies
check_parameters
#########
end
write_input_file
######### Generate a report using the ingen tool if possible
ingen unless block
########
end
def write_input_file
File.open(@run_name + ".in", 'w'){|file| file.puts input_file_text}
end
def set_nprocs
if (nprocs_in = @nprocs) =~ /^x/
max = max_nprocs_no_x
nodes = 0
@nprocs = "#{nodes}#{nprocs_in}"
loop do
nodes += 1
@nprocs = "#{nodes}#{nprocs_in}"
if actual_number_of_processors > max
nodes -= 1
@nprocs = "#{nodes}#{nprocs_in}"
break
end
end
end
end
def actual_number_of_processors
raise "Please specify the processor layout using the -n or (n:) option" unless @nprocs
@nprocs.split('x').map{|n| n.to_i}.inject(1){|ntot, n| ntot*n}
end
alias :anop :actual_number_of_processors
def approximate_grid_size
case @grid_option
when "box"
(2*(@nx-1)/3+1).to_i * (@naky||(@ny-1)/3+1).to_i * @ntheta * (2 * @ngauss + @ntheta/2).to_i * @negrid * 2 * @nspec
else
@ntheta * (2 * @ngauss + @ntheta/2).to_i * @negrid * 2 * @nspec
end
end
alias :agridsze :approximate_grid_size
# Gives a guess as to the maximum number of meshpoints which
# can be parallelized (i.e. excluding ntheta)
#
def parallelizable_meshpoints
approximate_grid_size / ntheta
end
# Gives a guess as to the maximum number of nodes which can be
# can be utilized on the current system
#
def estimated_nodes
parallelizable_meshpoints / max_ppn
end
alias :estnod :estimated_nodes
def parameter_string
return "#{@run_name}.in"
end
def self.list_code_commands
puts (methods - Run.methods).sort
end
def self.add_variable_to_namelist(namelist, var, value)
var = :stir_ + var if namelist == :stir
super(namelist, var, value)
end
def input_file_header
run_namelist_backwards_compatibility
<<EOF
!==============================================================================
! GS2 INPUT FILE automatically generated by CodeRunner
!==============================================================================
!
! GS2 is a gyrokinetic flux tube initial value turbulence code
! which can be used for fusion or astrophysical plasmas.
!
! See http://gyrokinetics.sourceforge.net
!
! CodeRunner is a framework for the automated running and analysis
! of large simulations.
!
! See http://coderunner.sourceforge.net
!
! Created on #{Time.now.to_s}
! by CodeRunner version #{CodeRunner::CODE_RUNNER_VERSION.to_s}
!
!==============================================================================
EOF
end
def self.defaults_file_header
<<EOF1
######################################################################
# Automatically generated defaults file for GS2 CodeRunner module #
# #
# This defaults file specifies a set of defaults for GS2 which are #
# used by CodeRunner to set up and run GS2 simulations. #
# #
# Created #{Time.now.to_s} #
# #
######################################################################
@defaults_file_description = ""
EOF1
end
# Customize this method from Run::FortranNamelist by saying when diagnostics are not switched on.
#def namelist_text(namelist, enum = nil)
#hash = rcp.namelists[namelist]
#text = ""
#ext = enum ? "_#{enum}" : ""
#text << "!#{'='*30}\n!#{hash[:description]} #{enum} \n!#{'='*30}\n" if hash[:description]
#text << "&#{namelist}#{ext}\n"
#hash[:variables].each do |var, var_hash|
#code_var = (var_hash[:code_name] or var)
#cr_var = var+ext.to_sym
## ep cr_var, namelist
#if send(cr_var) and (not var_hash[:should_include] or eval(var_hash[:should_include]))
## var_hash[:tests].each{|tst| eval(tst).test(send(cr_var), cr_var)}
#if String::FORTRAN_BOOLS.include? send(cr_var) # var is a Fortran Bool, not really a string
#output = send(cr_var).to_s
#elsif (v = send(cr_var)).kind_of? Complex
#output = "(#{v.real}, #{v.imag})"
#else
#output = send(cr_var).inspect
#end
#text << " #{code_var} = #{output} #{var_hash[:description] ? "! #{var_hash[:description]}": ""}\n"
#elsif namelist == :gs2_diagnostics_knobs or namelist == :diagnostics
#text << " ! #{code_var} not specified --- #{var_hash[:description]}\n"
#end
#end
## # end
#text << "/\n\n"
#text
#end
@namelists_to_print_not_specified = [:gs2_diagnostics_knobs, :diagnostics]
@fortran_namelist_source_file_match = /(?<!ingen|gs2_diagnostics)((\.f9[05])|(\.fpp))$/
# def self.add_code_var
# rcp.namelists.each do |namelist, hash|
# hash[:variables].each do |var, var_hash|
# p var
# var_hash[:code_name] = var_hash[:gs2_name] if var_hash[:gs2_name]
# end
# end
# save_namelists
# end
def update_physics_parameters_from_miller_input_file(file)
hash = self.class.parse_input_file(file)
hash[:parameters].each do |var, val|
set(var,val)
end
hash[:theta_grid_parameters].each do |var, val|
next if [:ntheta, :nperiod].include? var
set(var, val)
end
hash[:dist_fn_knobs].each do |var, val|
next unless [:g_exb].include? var
set(var, val)
end
hash[:theta_grid_eik_knobs].each do |var, val|
next unless [:s_hat_input, :beta_prime_input].include? var
set(var, val)
end
hash[:species_parameters_2].each do |var, val|
#next unless [:s_hat_input, :beta_prime_input].include? var
set((var.to_s + '_2').to_sym, val)
end
hash[:species_parameters_1].each do |var, val|
#next unless [:s_hat_input, :beta_prime_input].include? var
set((var.to_s + '_1').to_sym, val)
end
end
def renew_info_file
Dir.chdir(@directory){make_info_file("#@run_name.in")}
end
# This method overrides a method defined in heuristic_run_methods.rb in the
# CodeRunner source. It is called when CodeRunner cannot find any of its own
# files in the folder being analysed. It takes a GS2 input file and generates a
# CodeRunner info file. This means that GS2 runs which were not run using
# CodeRunner can nonetheless be analysed by it. In order for it to be called
# the -H flag must be specified on the command line.
def run_heuristic_analysis
ep 'run_heuristic_analysis', Dir.pwd
infiles = Dir.entries.grep(/^[^\.].*\.in$/)
ep infiles
raise CRMild.new('No input file') unless infiles[0]
raise CRError.new("More than one input file in this directory: \n\t#{infiles}") if infiles.size > 1
input_file = infiles[0]
ep 'asdf'
@nprocs ||= "1"
@executable ||= "/dev/null"
make_info_file(input_file, false)
end
@source_code_subfolders = ['utils', 'geo', 'diagnostics']
attr_accessor :iphi00, :saturation_time #Necessary for back. comp. due to an old bug
folder = File.dirname(File.expand_path(__FILE__)) # i.e. the directory this file is in
SPECIES_DEPENDENT_NAMELISTS = eval(File.read(folder + '/species_dependent_namelists.rb'), binding, folder + '/species_dependent_namelists.rb')
SPECIES_DEPENDENT_VARIABLES_WITH_HELP = SPECIES_DEPENDENT_NAMELISTS.values.inject({}) do |hash, namelist_hash|
namelist_hash[:variables].each do |var, var_hash|
hash[var] = var_hash[:help]
end
hash
end
SPECIES_DEPENDENT_VARIABLES = SPECIES_DEPENDENT_VARIABLES_WITH_HELP.keys
SPECIES_DEPENDENT_VARIABLES.each{|var| attr_accessor var} # for backwards compatibility
['i', 'e'].each do |n|
SPECIES_DEPENDENT_VARIABLES_WITH_HELP.each do |name, help|
attr_accessor name + "_#{n}".to_sym #for backwards compatibility
end
end
old_vars = %w[
:TiTe
:Rmaj
:R_geo
:invLp_input
:D_hypervisc
:D_hyperres
:D_hyper
:C_par
:C_perp
].map{|n| n.to_s.sub(/^:/, '').to_sym}
old_vars.each do |var|
alias_method(var, var.to_s.downcase.to_sym)
alias_method("#{var}=".to_sym, "#{var.downcase}=".to_sym)
end
def run_namelist_backwards_compatibility
SPECIES_DEPENDENT_VARIABLES.each do |var|
set(var + "_1".to_sym, (send(var + "_1".to_sym) or send(var + "_i".to_sym) or send(var)))
set(var + "_2".to_sym, (send(var + "_2".to_sym) or send(var + "_e".to_sym)))
end
end
def stop
`touch #@directory/#@run_name.stop`
end
def vim_output
system "vim -Ro #{output_file} #{error_file} #@directory/#@run_name.error #@directory/#@run_name.out "
end
alias :vo :vim_output
def vim_input
system "vim -Ro #@directory/#@run_name.in "
end
alias :vi :vim_input
def vim_stdout
system "vim -Ro #{output_file} "
end
alias :vo1 :vim_stdout
def plot_efit_file
Dir.chdir(@directory) do
text = File.read(@eqfile)
text_lines = text.split("\n")
first_line = text_lines[0].split(/\s+/)
second_line = text_lines[1].split(/\s+/)
nr = first_line[-2].to_i
nz = first_line[-1].to_i
rwidth = second_line[1].to_f
zwidth = second_line[2].to_f
rmag = second_line[3].to_f
nlines = (nr.to_f/5.0).ceil
nlines_psi = ((nr*nz).to_f/5.0).ceil
start = 5
f = text_lines[start...(start+=nlines)].join(" ").split(nil).map{|s| s.to_f}.to_gslv
pres = text_lines[(start)...(start += nlines)].join(" ").split(nil).map{|s| s.to_f}.to_gslv
_ = text_lines[(start)...(start += nlines)].join(" ").split(nil).map{|s| s.to_f}.to_gslv
_ffprime = text_lines[(start)...(start+= nlines)].join(" ").split(nil).map{|s| s.to_f}.to_gslv
psi = text_lines[(start)...(start += nlines_psi)].join(" ")
q = text_lines[(start)...(start += nlines)].join(" ").split(nil).map{|s| s.to_f}.to_gslv
nbound = text_lines[start...start+=1].join(" ").to_i
rz = text_lines[(start)...(start += nbound*2)].join(" ").split(/\s+/)
rz.shift
rbound, zbound, _ = rz.inject([[], [], true]){|arr,val| arr[2] ? [arr[0].push(val), arr[1], false] : [arr[0], arr[1].push(val), true]}
#rbound.shift
psi = psi.split(/\s+/)
psi.shift
psi.map!{|v| v.to_f}
psi_arr = SparseTensor.new(2)
k = 0
for i in 0...nz
for j in 0...nr
psi_arr[j,i] = psi[k]
k+=1
end
end
kit = GraphKit.quick_create([((0...nr).to_a.to_gslv - nr/2 - 1 )/(nr-1)*rwidth+rmag, ((0...nz).to_a.to_gslv-nz/2 + 1)/(nz-1) * zwidth, psi_arr], [rbound, zbound, rbound.map{|r| 0}])
kit.gp.contour = ""
kit.gp.view = "map"
#kit.gp.nosurface = ""
kit.gp.cntrparam = "levels 20"
kit.data[0].gp.with = 'l'
kit.data[1].gp.with = 'l lw 2 nocontours'
kit.gnuplot
kit2 = GraphKit.quick_create([pres/pres.max],[f/f.max],[q/q.max])
kit2.data[0].title = 'Pressure/Max Pressure'
kit2.data[1].title = 'Poloidal current function/Max poloidal current function'
kit2.data[2].title = 'Safety factor/Max Safety factor'
kit2.gnuplot
#p ['f', f, 'p', pres, 'ffprime', ffprime, 'nlines', nlines, 'psi', psi, 'q', q, 'nbound', nbound, 'rbound', rbound, 'zbound', zbound]
end
end
def input_file_extension
'.in'
end
#This section defines a selection of graphs which are written to a latex file when the CR function write_report is called. To add your own, simply copy one a similar looking graph and modify it to your needs.
# The requirements to use the latex report writing is further specified in CodeRunner.
def latex_graphs
#will have a different set of graphs to look at depending on whether linear or nonlinear
if @nonlinear_mode == "off"
#make up a list of graphs that are to be included. The order of the arguments is [code to generate graphkit, LaTeX description]
graphs = [
#[(kit = phi2_by_mode_vs_time({kx_index:1, ky_index:1}); kit.xlabel=%[TEST]; kit.gp.term = "post eps color enhanced size 3.5in,2.33in"; kit.gp.output = "test.eps"; kit), "This is a test graph written into a \LaTeX file. \n\n \\myfigure{test.eps}"]
[(kit = phi2tot_vs_time_graphkit; kit.data[0].title=""; kit.gp.logscale="y"; kit.file_name = "phi2tot.eps"; kit), "Total $\\phi^2$ versus time."],
[(kit = growth_rate_vs_ky_graphkit; kit.data[0].title=""; kit.file_name = "growth_rate_vs_ky.eps"; kit), "Growth rate $\\gamma_E$ as a function of $k_y$ averaged over $k_x$ (if applicable)."],
if @grid_option=="range" then [(kit = graphkit('efnmag', {norm:true, kx_index:1, ky_index: :all}); kit.data.each{|dk| dk.title=""}; kit.gp.logscale="y"; kit.file_name = "efnmag.eps"; kit.data.shift; kit), "Normalized magnitude of the eigenfunction as a function of $\\theta$ for all $k_y$'s in the simulation."] end,
if @grid_option=="single" then [(kit = graphkit('efnmag', {norm:true, kx_index:1, ky_index:1}); kit.data.each{|dk| dk.title=""}; kit.gp.logscale="y"; kit.file_name = "efnmag.eps"; kit), "Normalized magnitude of the eigenfunction as a function of $\\theta$ for all $k_y$'s in the simulation."] end,
].compact
else
graphs = [
[(kit = ky_spectrum_graphkit; kit.gp.logscale="y"; kit.file_name = "ky_spectrum.eps"; kit), "$k_y$ spectrum at the final time step averaged over $k_x$."],
[(kit = kx_spectrum_graphkit; kit.gp.logscale="y"; kit.file_name = "kx_spectrum.eps"; kit), "$k_x$ spectrum at the final time step averaged over $k_y$."],
[(kit = spectrum_graphkit(no_zonal:true); kit.gp.view="map"; kit.gp.logscale="z"; kit.file_name = "spectrum.eps"; kit), "2D spectrum versus $k_x$ and $k_y$ without zonal flows."],
[(kit = hflux_tot_vs_time_graphkit; kit.file_name = "hflux_tot_vs_time.eps"; kit), "Total heat flux $Q_{tot}$ as a function of time."],
[(kit = es_heat_flux_vs_time_graphkit(species_index:1); kit.file_name = "es_heat_1_vs_time.eps"; kit), "Heat flux of species 1 versus time."],
if @nspec > 1 then [(kit = es_heat_flux_vs_time_graphkit(species_index:2); kit.file_name = "es_heat_2_vs_time.eps"; kit), "Heat flux of species 2 versus time."] end,
[(kit = es_heat_flux_vs_ky_graphkit(species_index:1); kit.gp.logscale="y" ; kit.file_name = "es_heat_1_vs_ky.eps"; kit), "Heat flux of species 1 as a function of $k_y$."],
if @nspec > 1 then [(kit = es_heat_flux_vs_ky_graphkit(species_index:2); kit.gp.logscale="y" ; kit.file_name = "es_heat_2_vs_ky.eps"; kit), "Heat flux of species 2 as a function of $k_y$."] end,
[(kit = es_heat_flux_vs_ky_vs_kx_graphkit; kit.gp.view="map" ; kit.file_name = "es_heat_vs_ky_vs_kx.eps"; kit), "2D total heat flux spectrum as a function of $k_x$ and $k_y$."],
[(kit = phi_real_space_graphkit(n0:1, thetamin:get_list_of(:theta).length/2, thetamax:get_list_of(:theta).length/2, gs2_coordinate_factor:1.0); kit.gp.view="map" ; kit.file_name = "phi_real_space.eps"; kit), "Potential fluctuations at the final time step vs GS2 $x$ and $y$ at the outboard midplane."],
[(kit = density_real_space_graphkit(n0:1, species_index:1, thetamin:get_list_of(:theta).length/2, thetamax:get_list_of(:theta).length/2, gs2_coordinate_factor:1.0); kit.gp.view="map" ; kit.file_name = "density_real_space.eps"; kit), "Density fluctuations for species 1 at the final time step vs GS2 $x$ and $y$ at the outboard midplane."],
if @nspec > 1 then [(kit = density_real_space_graphkit(n0:1, species_index:2, thetamin:get_list_of(:theta).length/2, thetamax:get_list_of(:theta).length/2, gs2_coordinate_factor:1.0); kit.gp.view="map" ; kit.file_name = "density_real_space.eps"; kit), "Density fluctuations for species 2 at the final time step vs GS2 $x$ and $y$ at the outboard midplane."] end,
[(kit = es_mom_flux_vs_time_graphkit(species_index:1); kit.file_name = "es_mom_flux_1_vs_time.eps"; kit), "Momentum flux for species 1 as a function of time."],
if @nspec > 1 then [(kit = es_mom_flux_vs_time_graphkit(species_index:2); kit.file_name = "es_mom_flux_2_vs_time.eps"; kit), "Momentum flux for species 2 as a function of time."] end,
[(kit = zonal_spectrum_graphkit; kit.gp.logscale="y"; kit.file_name = "zonal_spectrum.eps"; kit), "Zonal spectrum at the final time step."],
if @write_eigenfunc == ".true." then [(kit = zf_velocity_vs_x_graphkit(theta_index:get_list_of(:theta).length/2); kit.file_name = "zonal_flow_velocity_vs_x.eps"; kit), "Zonal flow velocity avg over time versus x."] end,
if @write_eigenfunc == ".true." and @g_exb then [(kit = zf_velocity_vs_x_graphkit(theta_index:get_list_of(:theta).length/2, add_mean_flow:true); kit.file_name = "zonal_flow_velocity_vs_x_with_mean_flow.eps"; kit), "Zonal flow velocity with mean flow added avg over time versus x."] end,
].compact
end
end
def change_id(new_id)
# Change names for GS2 output files
Dir.chdir(@directory) do
dir_entries = Dir.entries()
dir_entries.each do |f|
if f.include? 'v_'
new_name = f.sub "id_#{@id}", "id_#{new_id}"
`mv "#{f}" "#{new_name}"`
next
end
if (f.include? 'v_' or f.include? 'gs2.')
new_name = f.sub "gs2.#{@id}", "gs2.#{new_id}"
`mv "#{f}" "#{new_name}"`
next
end
end
end
# Change names for GS2 restart files
Dir.chdir(@directory + '/' + @restart_dir) do
dir_entries = Dir.entries()
dir_entries.each do |f|
if f.include? 'v_'
new_name = f.sub "id_#{@id}", "id_#{new_id}"
`mv "#{f}" "#{new_name}"`
end
end
end
new_run_dir = @directory.sub "id_#{@id}", "id_#{new_id}"
`mv "#{@directory}" "#{new_run_dir}"`
@directory = new_run_dir
# Rename variables which go in info and results file
@run_name.sub! "id_#{@id}", "id_#{new_id}"
@restart_file.sub! "id_#{@id}", "id_#{new_id}"
@output_file.sub! "gs2.#{@id}", "gs2.#{new_id}"
@error_file.sub! "gs2.#{@id}", "gs2.#{new_id}"
# Change instance variable and write info and results files again
@id = new_id
write_results
write_info
end
end # class GS2
# For backwards compatibility
Gs2BoxNtRun = Gs2CycloneRun = Gs2BoxCollisionalRun = Gs2Jet42982Run = Gs2ArtunRun = Gs2LinskerRun = Gs2BarnesLinskerRun = Gs2BoxMovieRun = Gs2Run = Gs2
end # class CodeRunner
# ep CodeRunner::Gs2CycloneRun.ancestors
class Float
def <=>(other) # necessary because of netcdf quirks
d = (self - other)
if d.abs / (self.abs + 1) < 1e-10
return 0
else
return (d / d.abs).to_i
end
end
def ==(other)
return false unless other.kind_of? Numeric
return (self - other).abs < 1e-14
end
end
class Hash
# puts self
def convert_to_index(run, *names)
if self[:strongest_non_zonal_mode]
ky_element, kx_element = run.gsl_matrix('spectrum_over_ky_over_kx', no_zonal: true).max_index
p self[:kx_index] = kx_element + 1
p self[:ky_index] = ky_element + 1
self[:strongest_non_zonal_mode] = false
end
raise "No names specified" if names.size == 0
names.each do |name|
if name == :kx
if lkx = self[:lagrangian_kx]
self[:lagrangian_kx_index] = list(:kx).key(lkx)
end
if lkxi = self[:lagrangian_kx_index] ||= self[:lkx_index]
self[:kx_index] = run.eulerian_kx_index(kx_index: lkxi, ky_index: self[:ky_index], t_index: self[:t_index])
end
end
self[:ky_index] = 1 if name == :ky and run.grid_option == "single"
self[:kx_index] = 1 if name == :kx and run.grid_option == "single"
self[name + :_index] ||= run.list(name).key(self[name]) || (raise ("#{name} not specified"))
end
end
def setup_time_window
self[:t_index_window] ||= [self[:t_index],self[:t_index]] if self[:t_index]
self[:begin_element], self[:end_element] = (self[:t_index_window] ? self[:t_index_window].map{|ind| ind - 1} : [0, -1])
end
end
handle exception for no restart files
##########################################
# = Code Runner GS2 Module
##########################################
#
# Authors: Edmund Highcock
# Copyright: 2009 Edmund Highcock
#
# This is free software released under the GPL v3
#
# This module allows easy running of the plasma turbulence simulation code gs2 using Code Runner, by automatically organising, naming and submitting runs, and analysing the run data.
#
# See Code Runner documentation, or documentation for individual methods.
#
# Notes
#
# index variables, e.g. kx_index, ky_index etc always refer to the 1-based Fortran index, to keep correspondance with the gs2 indices. Element variables, e.g. kx_element, always refer to the 0-based C/ruby index
#
# raw NumRu::NetCDF grids are in Fortran row-major order. This means that when you access grids using the NetCDF function NetCDF#get, you must specify the indices in fortran order (but 0-based!). The NetCDF#get function then returns a C-like NArray with the indices in the opposite order. You can convert this to a Ruby Array using the method NArray#to_a (the indices will still be in the same order).
begin
require "numru/netcdf"
rescue LoadError
eputs "Error: No NetCDF: data analysis for gs2 not possible"
end
class CodeRunner
# This is a customised subclass of CodeRunner::Run which allows CodeRunner to submit and analyse simulations from the gyrokinetic flux tube code GS2, which is principally used for simulating plasmas in magnetic confinement fusion.
#
# It performs two distinct roles: submitting simulations and analysing the data.
#
# = Submitting Simulations
#
# This principally involves generating the input file, which is a very nontrivial task. In order to do this, it maintains a complete record of every possible input parameter for GS2, as well as what datatype that parameter is, and sometimes what values it is allowed to take. This allows that not only to generate the input file, but to check that the input file makes sense. However, although generating the input file works beautifully, the set of sanity checks that it makes is not exhaustive: intelligent use is still required!
#
# In tandem with this, it maintains a whole set of tools for manipulating its database of input parameters. This includes updating their allowed values and also editing and accessing help for every input parameter.
#
# = Analysing Simulations
#
# The amount of analysis possible on GS2 data is enormous, and CodeRunner hasn't got close to getting there. What it can do is:
#
# * Check if the run is complete by comparing the number of completed timesteps against nstep
# * Calculate growth rates for linear runs.
# * Check if non-linear runs have saturated and calculate fluxes for those runs.
# * Automatically plot a huge variety of different graphs, ranging from simple plots of heat flux versus time to three-dimensional plots of the spectrum and potential.
class Gs2 < Run::FortranNamelist
#GS2_CRMOD_VERSION = Version.new(Gem.loaded_specs['gs2crmod'].version.to_s)
GS2_CRMOD_VERSION = Version.new('0.5.0')
def agk?
false
end
def spectrogk?
false
end
def gryfx?
false
end
CODE_SCRIPT_FOLDER = MODULE_FOLDER = File.dirname(File.expand_path(__FILE__))
# Include the other files
@code_module_folder = folder = File.dirname(File.expand_path(__FILE__)) # i.e. the directory this file is in
setup_namelists(folder)
require folder + '/graphs.rb'
require folder + '/gsl_data.rb'
require folder + '/gsl_data_3d.rb'
require folder + '/check_convergence.rb'
require folder + '/calculations.rb'
require folder + '/ingen.rb'
require folder + '/properties.rb'
require folder + '/test_gs2.rb'
require folder + '/read_netcdf.rb'
NaN = GSL::NAN
# GSL::Neg
eval(%[
], GLOBAL_BINDING)
################################################
# Quantities that are calculated or determined by CodeRunner
# after the simulation has ended, i.e. quantities
# that are not available from the GS2 output files.
################################################
@results = [
:converged,
:decaying,
:es_heat_flux_stav_error,
:es_heat_flux_stav_std_dev,
:es_mom_flux_stav_error,
:es_mom_flux_stav_std_dev,
:es_part_flux_stav_error,
:es_part_flux_stav_std_dev,
:es_heat_flux_stav,
:es_mom_flux_stav,
:es_part_flux_stav,
:frequency_at_ky_at_kx,
:fastest_growing_mode,
:freq_of_max_growth_rate,
:gamma_r,
:gamma_i,
:growth_rates,
:growth_rates_by_ky, # deprecated
:growth_rates_by_kx, # deprecated
:growth_rate_at_ky,
:growth_rate_at_kx,
:growth_rate_at_ky_at_kx,
:hflux_tot_stav,
:hflux_tot_stav_error,
:hflux_tot_stav_std_dev,
:ky,
:ky_spectrum_peak_idx,
:ky_spectrum_peak_ky,
:ky_spectrum_peak_phi2,
:kx_spectrum_peak_kx,
:kx_spectrum_peak_phi2,
:max_growth_rate,
:max_transient_amplification_index_at_ky,
:phi2_tot_stav,
:par_mom_flux_stav,
:perp_mom_flux_stav,
:phi2_zonal,
:run_time,
:real_frequencies,
:real_frequencies_by_ky,
:saturation_time_index,
:saturated,
:shot_time,
:spectrum_check,
:transient_amplification_at_kx,
:transient_amplification_at_ky,
:transient_amplification_at_ky_at_kx,
:transient_es_heat_flux_amplification_at_species_at_kx,
:transient_es_heat_flux_amplification_at_species_at_ky,
:transient_es_heat_flux_amplification_at_species_at_ky_at_kx,
:vspace_check
]
###############################################
# Other useful information about the run
###############################################
@gs2_run_info = [:time, :percent_of_total_time, :checked_converged, :is_a_restart, :restart_id, :restart_run_name, :completed_timesteps, :response_id]
@run_info = @gs2_run_info.dup
##############################################################
# For backwards compatibility with CodeRunner version 0.5.0
##############################################################
@run_info_0_5_0 = {
time: :to_f,
percent_of_total_time: :to_f,
checked_converged: :to_b
}
@results_0_5_0 = {
converged: :to_b,
decaying: :to_b,
:growth_rates => :to_h,
:real_frequencies => :to_h,
# :ky_list => :to_h,
# :kx_list => :to_h,
:growth_rates_by_ky => :to_s,
:real_frequencies_by_ky => :to_s,
:max_growth_rate => :to_f,
:fastest_growing_mode => :to_f,
:freq_of_max_growth_rate => :to_f,
:ky => :to_f,
:gamma_r => :to_f,
:gamma_i => :to_f,
:run_time => :to_f
# :theta_list => :to_h
}
###############################################################
@uses_mpi = true
@modlet_required = false
@use_graphs = false
Phi = Struct.new("Phi", :phi, :ri, :theta_index, :kx_index, :ky_index)
@naming_pars = []
# def self.finish_setting_up_class
# @@variables += [
# end
# This method, as its name suggests, is called whenever CodeRunner is asked to analyse a run directory.this happens if the run status is not :Complete, or if the user has specified recalc_all(-A on the command line) or reprocess_all (-a on the command line).
#
# the structure of this function is very simple: first it calls get_status to determine the directory status, i.e. :Complete, :Incomplete, :NotStarted or :Failed, then it gets the time, which is the GS2 time at the end of the run, and it also gets the run_time, which is the wall clock time of the run. Finally,if non-linear mode is switched off, it calls calculate_growth_rates_and_frequencies, and if the non-linear mode is switched on, it calls calculate_time_averaged_fluxes.
def process_directory_code_specific
run_namelist_backwards_compatibility
unless @status == :Queueing
get_status
end
eputs "Run #@status: #@run_name" if [:Complete,:Failed].include? @status
try_to_get_error_file
@sys = @@successful_trial_system
return if @status == :NotStarted or @status == :Failed or @status == :Queueing
begin
percent_complete = get_completed_timesteps/@nstep
@percent_of_total_time = percent_complete
rescue
get_time
@percent_of_total_time = @time / (@delt*@nstep) * 100.0 rescue 0.0
end
return if @status == :Incomplete
get_run_time
calculate_results
end
def calculate_results
return if ENV['CODE_RUNNER_NO_ANALYSIS'] =~ /true/
eputs "Analysing run"
if @nonlinear_mode == "off"
calculate_transient_amplifications
elsif @nonlinear_mode == "on"
calculate_saturation_time_index
calculate_time_averaged_fluxes
begin
calculate_spectral_checks
calculate_vspace_checks
rescue
end
end
@growth_rates ||={}
@real_frequencies ||={}
end
# Try to read the runtime in minutes from the GS2 standard out.
def get_run_time
logf(:get_run_time)
output = @output_file || try_to_get_output_file
return nil unless output
begin
Regexp.new("total from timer is:\\s*#{LongRegexen::NUMBER}", Regexp::IGNORECASE).match FileUtils.tail(output, 300)
logi $~
@run_time = $~[:number].to_f
rescue
@run_time = nil
end
end
# Output useful information from the NetCDF file. If no names are provided, output a list of all variables in the NetCDF file. <tt>names</tt> can either be a symbol or an array of symbols, in which case information will be output for the variables with those names. If values are provided, for example :dims,:get, :ndims, this information is retrieved from the file for every variable named.
# ncdump
# ncdump(:hflux)
# ncdump([:hflux, :phi])
# ncdump([:hflux, :phi], :dims)
def ncdump(names=nil, values=nil, extension = '.out.nc')
names = [names] unless !names or names.class == Array
names.map!{|name| name.to_s} if names
pp NumRu::NetCDF.open(@run_name + extension).vars(names).to_a.sort{|var1, var2| var1.name <=> var2.name}.map{|var| values ? [var.name, var.send(values)] : var.name.to_sym}
end
def generate_component_runs
@component_runs = []
logf(:generate_component_runs)
return if @grid_option == "single" and @scan_type == "none"
begin
list(:ky) # This will fail unless the run has output the netcdf file
rescue
return
end
return unless @status == :Complete #and @converged
log(@run_name)
if @grid_option == "box" and @nonlinear_mode == "off"
@ky = nil
# raise CRFatal.new("no @ky_list") unless @ky_list
# log list(:ky)
list(:ky).each do |id, ky|
component_run = create_component #self.dup
component_run.ky = ky
component_run.gamma_r = @growth_rates[ky]
component_run.gamma_i = @real_frequencies[ky]
log @runner.component_ids
# log('@runner.class', @runner.class)
# @runner.add_component_run(component_run)
end
elsif (not gryfx?) and @scan_type and @scan_type != "none"
t = gsl_vector('t')
scan_vals = gsl_vector('scan_parameter_value')
current = scan_vals[0]
start = 0
for i in 0...t.size
if scan_vals[i] != current
component = create_component
component.scan_index_window = [start+1, i] #remember indexes are elements + 1
#ep 'scan_index_window', component.scan_index_window
component.scan_parameter_value = current
component.growth_rate_at_ky = nil
component.growth_rate_at_kx = nil
component.growth_rate_at_ky_at_kx = nil
component.calculate_results
current = scan_vals[i]
start = i
end
end
end
end
def get_time
begin
lt = list(:t)
return lt.values.max if lt.size>0
rescue
end
time = nil
# eputs File.readlines(@run_name +".out").slice(-4..-1).reverse.join( "\n"); gets
raise CRFatal.new("Couldn't find outfile #{@run_name}.out") unless FileTest.exist?(@run_name + ".out")
tail = FileUtils.tail("#@run_name.out", 4)
#File.readlines(@run_name +".out").slice(-4..-1).reverse.join( "\n")
tail.sub(LongRegexen::FLOAT) do
# eputs $~.inspect
time = $~[:float].to_f
end #if FileTest.exist? (@run_name +".out")
#raise CRFatal.new("couldn't get the time from #{tail}") unless time
@time = time
end
def get_completed_timesteps
#raise CRFatal.new("Couldn't find outfile #{@run_name}.out") unless FileTest.exist?(@run_name + ".out")
#p 'try to get completed_timesteps', Dir.pwd, 'nwrite', @nwrite, 'delt', @delt
@completed_timesteps = (list(:t).size - 1) * (@nwrite || 1)
#p 'tried to get completed_timesteps'
#rescue
#`grep time= #@run_name.out`.split.size
# File.read("#@run_name.out").scan(/^\s+time\s*=\s+/).size * @nwrite
end
def incomplete
return (not 100 == percent_complete)
end
def parameter_transition(run)
end
# @@executable_location = nil
# def executable_location
# return "~/gs2_newterm" #(@@executable_location || ($gs2_new_term ? "~/gs2_newterm" : "~/gs2"))
# end
#
# def executable_name
# "gs2"
# end
@code_long = "GS2 Gyrokinetic Flux Tube Code"
@excluded_sub_folders =[]
attr_accessor :theta_list, :ky_list, :ky_graphs, :eigenfunctions, :ky_list, :t_list
attr_accessor :scan_index_window, :scan_parameter_value
class << self
aliold(:check_and_update)
def check_and_update
old_check_and_update
@readout_list = (@variables + @results - [:growth_rates_by_ky, :growth_rates, :real_frequencies, :real_frequencies_by_ky, :ky_list, :kx_list, :theta_list, :t_list])
end
end
def data_string
logf(:data_string)
return "" unless @converged unless @grid_option == 'single'
logi(@ky, @growth_rates, @real_frequencies)
# log(:@@readout_list, @@readout_list)
return rcp.readout_list.inject(""){|str,(var,_)| str+"#{(send(var) || "0")}\t"} + "\n"
# @ky ? (@@variables + @@results - ).inject(""){|str,(var,type_co)| str+"#{(send(var) || "0")}\t"} + sprintf("%e\t%e\t%e\n", @ky, @growth_rates[@ky], @real_frequencies[@ky]) : (@@variables + @@results).inject(""){|str,(var,type_co)| str+"#{(send(var) || "0")}\t"} + sprintf("%e\t%e\t%e\n", @fastest_growing_mode, @max_growth_rate, @freq_of_max_growth_rate)
end
def percent_complete
@completed_timesteps ? @completed_timesteps.to_f / @nstep.to_f * 100.0 : @percent_of_total_time
end
def print_out_line
logf(:print_out_line)
name = @run_name
name += " (res: #@restart_id)" if @restart_id
name += " real_id: #@real_id" if @real_id
beginning = sprintf("%2d:%d %-60s %1s:%2.1f(%s) %3s%1s %1s", @id, @job_no, name, @status.to_s[0,1], @run_time.to_f / 60.0, @nprocs.to_s, percent_complete, "%", @converged.to_s)
if @ky
beginning += sprintf("%3s %4s %4s", @ky, @growth_rates[@ky], @real_frequencies[@ky])
elsif @nonlinear_mode == "off"
beginning += sprintf("%3s %4s %4s",
@fastest_growing_mode, @max_growth_rate,
@freq_of_max_growth_rate)
elsif @nonlinear_mode == "on"
# p @hflux_tot_stav
beginning += " sat:#{saturated.to_s[0]}"
beginning += sprintf(" hflux:%1.2e", @hflux_tot_stav) if @hflux_tot_stav
beginning += sprintf("+/-%1.2e", @hflux_tot_stav_error) if @hflux_tot_stav_error
beginning += sprintf(" momflux:%1.2e", @es_mom_flux_stav.values.sum) if @es_mom_flux_stav and @es_mom_flux_stav.values[0]
beginning += ' SC:' + @spectrum_check.map{|c| c.to_s}.join(',') if @spectrum_check
beginning += ' VC:' + @vspace_check.map{|c| sprintf("%d", ((c*10.0).to_i rescue -1))}.join(',') if @vspace_check
end
beginning += " ---#{@comment}" if @comment
beginning
end
def get_list_of(*args)
#args can be any list of e.g. :ky, :kx, :theta, :t ...
logf(:get_list_of)
refresh = args[-1] == true ? true : false
args.pop if args[-1] == true
logd
Dir.chdir(@directory) do
args.each do |var|
# eputs "Loading #{var}"
list_name = var + :_list
log list_name
# self.class.send(:attr_accessor, list_name)
next if (cache[list_name] and [:Failed, :Complete].include? status and not refresh)
cache[list_name] = {}
if netcdf_file.var(var.to_s)
netcdf_file.var(var.to_s).get.to_a.each_with_index do |value, element|
# print '.'
cache[list_name][element+1]=value
end
else
netcdf_file.dim(var.to_s).length.times.each do |element|
cache[list_name][element+1]='unknown'
end
end
# eputs send(var+:_list)
end
end
logfc :get_list_of
return cache[args[0] + :_list] if args.size == 1
end
alias :list :get_list_of
def visually_check_growth_rate(ky=nil)
logf :visually_check_growth_rate
phi_vec = gsl_vector(:phi2_by_ky_over_time, {ky: ky})
t_vec = gsl_vector(:t)
constant, growth_rate = GSL::Fit::linear(t_vec, 0.5*GSL::Sf::log(phi_vec)).slice(0..1)
eputs growth_rate
graph = @@phi2tot_vs_time_template.graph(["#{constant} * exp (2 * #{growth_rate} * x)"], [[[t_vec, phi_vec], "u 1:2 title 'phi2tot #{@run_name}' w p"]], {"set_show_commands" => "\nset log y\n", "point_size"=>'1.0'})
# eputs graph.inline_data.inspect
graph.show
gets
graph.kill
end
def show_graph
thegraph = special_graph('phi2tot_vs_time_all_kys')
thegraph.title += " for g_exb = #{@g_exb.to_f.to_s}"
thegraph.show
sleep 1.5
# @decaying = Feedback.get_boolean("Is the graph decaying?")
thegraph.kill
end
def restart(new_run)
(rcp.variables).each{|v| new_run.set(v, send(v)) if send(v)}
@naming_pars.delete(:preamble)
SUBMIT_OPTIONS.each{|v| new_run.set(v, self.send(v)) unless new_run.send(v)}
new_run.is_a_restart = true
new_run.ginit_option = "many"
new_run.delt_option = "check_restart"
new_run.restart_id = @id
new_run.restart_run_name = @run_name
@runner.nprocs = @nprocs if @runner.nprocs == "1" # 1 is the default
if !new_run.nprocs or new_run.nprocs != @nprocs
raise "Restart must be on the same number of processors as the previous "\
"run: new is #{new_run.nprocs.inspect} and old is #{@nprocs.inspect}"
end
new_run.run_name = nil
new_run.naming_pars = @naming_pars
new_run.update_submission_parameters(new_run.parameter_hash_string, false) if
new_run.parameter_hash
new_run.naming_pars.delete(:restart_id)
new_run.generate_run_name
copy_restart_files(new_run)
if new_run.read_response and new_run.read_response.fortran_true?
new_run.response_id = new_run.restart_id
copy_response_files(new_run)
end
new_run
end
def copy_restart_files(new_run)
eputs 'Copying restart files...', ''
FileUtils.makedirs(new_run.directory + '/nc')
#old_dir = File.dirname(@restart_file)
new_run.restart_file = "#@run_name.nc"
new_run.restart_dir = "nc"
files = list_of_restart_files.map do |file|
@directory + "/" + file
end
files.each_with_index do |file , index|
eputs "#{index+1} out of #{files.size}"
eputs "\033[2A" # Terminal jargon - go back one line
num = file.scan(/(?:\.\d+|_ene)$/)[0]
#FileUtils.cp("#{old_dir}/#{file}", "nc/#@restart_file#{num}")
FileUtils.cp(file, new_run.directory + "/nc/#{new_run.restart_file}#{num}")
end
end
def copy_response_files(run)
eputs 'Copying response files...', ''
eputs 'The following run parameters have changed. Are you sure you can use '\
'these response files?'
diff_run_parameters(self, run)
FileUtils.makedirs(run.directory + '/response')
run.response_dir = "response"
files = list_of_response_files.map do |file|
@directory + "/" + file
end
files.each_with_index do |file , index|
eputs "#{index+1} out of #{files.size}"
eputs "\033[2A" # Terminal jargon - go back one line
response_ext = file.scan(/_ik_\d+_is_\d+.response/)
FileUtils.cp(file, run.directory + "/response/#{run.run_name}#{response_ext[0]}")
end
end
# The following function is essentially the same as the CR differences_between
# function without the runner loading set up code. This could possibly be moved
# to a more general function in CR.
def diff_run_parameters(run_1, run_2)
runs = [run_1, run_2]
rcp_fetcher = (runs[0] || @runner.run_class).rcp
vars = rcp.variables.dup + rcp.run_info.dup
# Clean up output by deleting some variables
vars.delete_if{|var| runs.map{|r| r.send(var)}.uniq.size == 1}
vars.delete :id
vars.delete :run_name
vars.delete :output_file
vars.delete :error_file
vars.delete :executable
vars.delete :comment
vars.delete :naming_pars
vars.delete :parameter_hash
vars.delete :parameter_hash_string
vars.delete :sys
vars.delete :status
vars.delete :job_no
vars.delete :running
vars.unshift :id
# Fancy table printing
table = vars.map{|var| [var] + runs.map{|r| str = r.instance_eval(var.to_s).to_s;
str.size>10?str[0..9]:str} }
col_widths = table.map{|row| row.map{|v| v.to_s.size}}.
inject{|o,n| o.zip(n).map{|a| a.max}}
eputs
table.each{|row| i=0; eputs row.map{|v| str = sprintf(" %#{col_widths[i]}s ",
v.to_s); i+=1; str}.join('|'); eputs '-' *
(col_widths.sum + col_widths.size*3 - 1) }
end
# Return a list of restart file paths (relative to the run directory).
def list_of_restart_files
Dir.chdir(@directory) do
files = Dir.entries.grep(/^\.\d+$/)
files = Dir.entries.grep(/\.nc(?:\.\d|_ene)/) if files.size == 0
if files.size == 0
(Dir.entries.find_all{|dir| FileTest.directory? dir} - ['.', '..']).each do |dir|
files = Dir.entries(dir).grep(/\.nc(?:\.\d|_ene)/).map{|file| dir + "/" + file}
break if files.size == 0
end
end #if files.size == 0
# Finds a .nc file (w/o a number) in 'nc' folder if using single restart file
if files.size == 0
files = Dir.entries('nc').grep(/\.nc/).map{|file| 'nc' + "/" + file}
end #if files.size == 0
return files
end # Dir.chdir(@directory) do
end
alias :lorf :list_of_restart_files
# Return list of response files similar to method for restart files
def list_of_response_files
Dir.chdir(@directory) do
files = Dir.entries('response').grep(/\.response/).map{|file| 'response' +
"/" + file}
files = Dir.entries.grep(/\.response/) if files.size == 0
if files.size == 0
(Dir.entries.find_all{|dir| FileTest.directory? dir} - ['.', '..']).each do |dir|
files = Dir.entries(dir).grep(/\.response/).map{|file| dir + "/" + file}
end
end
return files
end
end
# Put restart files in the conventional location, i.e. nc/run_name.proc
def standardize_restart_files
Dir.chdir(@directory) do
FileUtils.makedirs('nc')
list_of_restart_files.each do |file|
proc_id = file.scan(/\.\d+$|_ene$/)[0]
#p 'proc_id', proc_id
FileUtils.mv(file, "nc/#@run_name.nc#{proc_id}")
end
end
end
# Delete all the restart files (irreversible!)
#
def delete_restart_files(options={})
puts 'You are about to delete the restart files for:'
puts @run_name
return unless Feedback.get_boolean("This action cannot be reversed. Do you wish to continue?") unless options[:no_confirm]
list_of_restart_files.each{|file| FileUtils.rm file}
end
def species_letter
species_type(1).downcase[0,1]
end
def species_type(index)
if rcp.variables.include? :type_1
type = send(:type_ + index.to_sym)
else
types = rcp.variables.find_all{|var| var.to_s =~ /^type/}.map{|var| send(var)}
type = types[index.to_i - 1]
end
type
end
# Returns true if this run has not been restarted, false if it has. This
# allows one to get data from the final run of a series of restarts.
def no_restarts
raise NoRunnerError unless @runner
!(@runner.runs.find{|run| run.restart_id == @id})
end
def restart_chain
if @restart_id
return @runner.run_list[@restart_id].restart_chain
end
chain = []
currid = @id
loop do
chain.push currid
break unless (restrt = @runner.runs.find{|run| run.restart_id == currid})
currid = restrt.id
end
return chain
end
def get_status
# eputs 'Checking Status'
logf(:get_status)
Dir.chdir(@directory) do
if @running
if FileTest.exist?(@run_name + ".out") and FileUtils.tail(@run_name + ".out", 5).split(/\n/).size > 4 and FileUtils.tail(@run_name + ".out", 200) =~ /t\=/
@status = :Incomplete
else
@status = :NotStarted
end
else
if FileTest.exist?(@run_name + ".out") and FileUtils.tail(@run_name + ".out", 5).split(/\n/).size > 4
#eputs "HERE", @scan_type
if @nonlinear_mode == "off" and FileUtils.tail(@run_name + ".out",200) =~ /omega converged/
eputs 'Omega converged...'
@status = :Complete
elsif @scan_type and @scan_type != "none" and FileUtils.tail(@run_name + ".par_scan",200) =~ /scan\s+is\s+complete/i
eputs 'Scan complete...'
@status = :Complete
elsif @nonlinear_mode == "on" or !@omegatol or @omegatol < 0.0 or (@exit_when_converged and @exit_when_converged.fortran_false?)
eputs 'No omegatol'
if FileTest.exist?(@run_name + ".out.nc")
#p ['pwd', Dir.pwd, netcdf_file, netcdf_file.dim('t'), netcdf_file.dims]
if netcdf_file.dim('t').length > 0
get_completed_timesteps
else
@status = :Failed
return
end
else
eputs "Warning: no netcdf file #@run_name.out.nc"
@status = :Failed
return
end
#ep "completed_timesteps", @completed_timesteps
eputs "#{percent_complete}% of Timesteps Complete"
if percent_complete >= 100.0
@status = :Complete
elsif percent_complete > 5 and FileUtils.tail(output_file, 200) =~ /total from timer is/
@status = :Complete
else
@status = :Failed
end
else
@status = :Failed
end
else
@status=:Failed
end
end
end
end
def self.modify_job_script(runner, runs_in, script)
if CODE_OPTIONS[:gs2] and CODE_OPTIONS[:gs2][:list]
if (list_size = CODE_OPTIONS[:gs2][:list]).kind_of? Integer
raise "The total number of runs must be a multiple of the list size!" unless runs_in.size % list_size == 0
pieces = runs_in.pieces(runs_in.size/list_size)
else
pieces = [runs_in]
end
script = ""
pieces.each do |runs|
#ep 'there is a list'
FileUtils.makedirs('job_lists')
jid = "#{runs[0].id}-#{runs[-1].id}"
list_file = "job_lists/gs2_list_#{jid}.list"
File.open(list_file,'w') do |file|
file.puts runs.size
file.puts runs.map{|r| "#{r.relative_directory}/#{r.run_name}"}.join("\n")
end
raise "runs must all have the same nprocs" unless runs.map{|r| r.nprocs}.uniq.size == 1
runs.each do |r|
# Make sure the restart file name includes the relative directory for
# list runs
reldir = r.relative_directory
rdir = r.restart_dir
#puts rdir[0...reldir.size] == reldir, rdir[0...reldir.size], reldir
#raise ""
if rdir
r.restart_dir = reldir + '/' + rdir if not rdir[0...reldir.size] == reldir
else
r.restart_dir = reldir
end
Dir.chdir(r.directory){r.write_input_file}
end
np = runs[0].nprocs.split('x').map{|n| n.to_i}
np[0] *= runs.size
nprocs = np.map{|n| n.to_s}.join('x')
@runner.nprocs = nprocs
ls = ListSubmitter.new(@runner, nprocs, list_file, jid)
script << ls.run_command
end
end
return script
end
class ListSubmitter
include CodeRunner::SYSTEM_MODULE
@uses_mpi = true
attr_reader :executable_location, :executable_name, :parameter_string
attr_reader :job_identifier
def initialize(runner, nprocs, list_file, jid)
@executable_location = runner.executable_location
@executable_name = runner.executable_name
@parameter_string = list_file
@job_identifier = jid
@nprocs = nprocs
end
def rcp
self.class.rcp
end
def self.rcp
@rcp ||= CodeRunner::Run::RunClassPropertyFetcher.new(self)
end
end #class ListSubmitter
def recheck
logf(:recheck)
Dir.chdir(@directory) do
logi('@runner.object_id', @runner.object_id)
log('@runner.class', @runner.class)
#runner = @runner
instance_variables.each{|var| instance_variable_set(var, nil) unless var == :@runner}
begin File.delete(".code_runner_run_data") rescue Errno::ENOENT end
begin File.delete("code_runner_results.rb") rescue Errno::ENOENT end
logi(:@checked_converged, @checked_converged)
logi('@runner.object_id after reset', @runner.object_id)
log('@runner.class', @runner.class)
process_directory
end
end
def generate_input_file(&block)
raise CRFatal("No Input Module File Given or Module Corrupted") unless
methods.include? (:input_file_text)
run_namelist_backwards_compatibility
@user_comments = "Defaults description: #@defaults_file_description. Run description: #@comment"
# If it is a restart default behaviour will be to copy the response files
# from the run being restarted. Specifying a response_id will override this.
if not @is_a_restart and @response_id
@read_response = ".true."
@runner.run_list[@response_id].copy_response_files(self)
elsif @dump_response and @dump_response.fortran_true? and
(not @read_response or not @read_response.fortran_true?)
@response_dir = "response"
FileUtils.makedirs @response_dir
end
# The second test checks that the restart function has not been called
# manually earlier (e.g. in Trinity), but we must check that it is not in
# fact a resubmitted run.
if @restart_id and (not @is_a_restart or @resubmit_id)
@runner.run_list[@restart_id].restart(self)
elsif ((@save_for_restart and @save_for_restart.fortran_true?) or
(@save_for_restart_new and @save_for_restart_new.fortran_true?)) and
(not @is_a_restart or @resubmit_id)
@restart_dir = "nc"
#if CODE_OPTIONS[:gs2] and CODE_OPTIONS[:gs2][:list]
#FileUtils.makedirs "#{@runner.root_folder}/#@restart_dir"
#else
FileUtils.makedirs @restart_dir
#end
@restart_file = "#@run_name.nc"
end
# Let Gs2 know how much wall clock time is available. avail_cpu_time is a GS2 input parameter.
@avail_cpu_time = @wall_mins * 60 if @wall_mins
# Automatically set the number of nodes to be the maximum possible without parallelising over x, if the user has left the number of nodes unspecified.
set_nprocs
if block
##### Allow the user to define their own pre-flight checks and changes
instance_eval(&block)
else
######### Check for errors and inconsistencies
check_parameters
#########
end
write_input_file
######### Generate a report using the ingen tool if possible
ingen unless block
########
end
def write_input_file
File.open(@run_name + ".in", 'w'){|file| file.puts input_file_text}
end
def set_nprocs
if (nprocs_in = @nprocs) =~ /^x/
max = max_nprocs_no_x
nodes = 0
@nprocs = "#{nodes}#{nprocs_in}"
loop do
nodes += 1
@nprocs = "#{nodes}#{nprocs_in}"
if actual_number_of_processors > max
nodes -= 1
@nprocs = "#{nodes}#{nprocs_in}"
break
end
end
end
end
def actual_number_of_processors
raise "Please specify the processor layout using the -n or (n:) option" unless @nprocs
@nprocs.split('x').map{|n| n.to_i}.inject(1){|ntot, n| ntot*n}
end
alias :anop :actual_number_of_processors
def approximate_grid_size
case @grid_option
when "box"
(2*(@nx-1)/3+1).to_i * (@naky||(@ny-1)/3+1).to_i * @ntheta * (2 * @ngauss + @ntheta/2).to_i * @negrid * 2 * @nspec
else
@ntheta * (2 * @ngauss + @ntheta/2).to_i * @negrid * 2 * @nspec
end
end
alias :agridsze :approximate_grid_size
# Gives a guess as to the maximum number of meshpoints which
# can be parallelized (i.e. excluding ntheta)
#
def parallelizable_meshpoints
approximate_grid_size / ntheta
end
# Gives a guess as to the maximum number of nodes which can be
# can be utilized on the current system
#
def estimated_nodes
parallelizable_meshpoints / max_ppn
end
alias :estnod :estimated_nodes
def parameter_string
return "#{@run_name}.in"
end
def self.list_code_commands
puts (methods - Run.methods).sort
end
def self.add_variable_to_namelist(namelist, var, value)
var = :stir_ + var if namelist == :stir
super(namelist, var, value)
end
def input_file_header
run_namelist_backwards_compatibility
<<EOF
!==============================================================================
! GS2 INPUT FILE automatically generated by CodeRunner
!==============================================================================
!
! GS2 is a gyrokinetic flux tube initial value turbulence code
! which can be used for fusion or astrophysical plasmas.
!
! See http://gyrokinetics.sourceforge.net
!
! CodeRunner is a framework for the automated running and analysis
! of large simulations.
!
! See http://coderunner.sourceforge.net
!
! Created on #{Time.now.to_s}
! by CodeRunner version #{CodeRunner::CODE_RUNNER_VERSION.to_s}
!
!==============================================================================
EOF
end
def self.defaults_file_header
<<EOF1
######################################################################
# Automatically generated defaults file for GS2 CodeRunner module #
# #
# This defaults file specifies a set of defaults for GS2 which are #
# used by CodeRunner to set up and run GS2 simulations. #
# #
# Created #{Time.now.to_s} #
# #
######################################################################
@defaults_file_description = ""
EOF1
end
# Customize this method from Run::FortranNamelist by saying when diagnostics are not switched on.
#def namelist_text(namelist, enum = nil)
#hash = rcp.namelists[namelist]
#text = ""
#ext = enum ? "_#{enum}" : ""
#text << "!#{'='*30}\n!#{hash[:description]} #{enum} \n!#{'='*30}\n" if hash[:description]
#text << "&#{namelist}#{ext}\n"
#hash[:variables].each do |var, var_hash|
#code_var = (var_hash[:code_name] or var)
#cr_var = var+ext.to_sym
## ep cr_var, namelist
#if send(cr_var) and (not var_hash[:should_include] or eval(var_hash[:should_include]))
## var_hash[:tests].each{|tst| eval(tst).test(send(cr_var), cr_var)}
#if String::FORTRAN_BOOLS.include? send(cr_var) # var is a Fortran Bool, not really a string
#output = send(cr_var).to_s
#elsif (v = send(cr_var)).kind_of? Complex
#output = "(#{v.real}, #{v.imag})"
#else
#output = send(cr_var).inspect
#end
#text << " #{code_var} = #{output} #{var_hash[:description] ? "! #{var_hash[:description]}": ""}\n"
#elsif namelist == :gs2_diagnostics_knobs or namelist == :diagnostics
#text << " ! #{code_var} not specified --- #{var_hash[:description]}\n"
#end
#end
## # end
#text << "/\n\n"
#text
#end
@namelists_to_print_not_specified = [:gs2_diagnostics_knobs, :diagnostics]
@fortran_namelist_source_file_match = /(?<!ingen|gs2_diagnostics)((\.f9[05])|(\.fpp))$/
# def self.add_code_var
# rcp.namelists.each do |namelist, hash|
# hash[:variables].each do |var, var_hash|
# p var
# var_hash[:code_name] = var_hash[:gs2_name] if var_hash[:gs2_name]
# end
# end
# save_namelists
# end
def update_physics_parameters_from_miller_input_file(file)
hash = self.class.parse_input_file(file)
hash[:parameters].each do |var, val|
set(var,val)
end
hash[:theta_grid_parameters].each do |var, val|
next if [:ntheta, :nperiod].include? var
set(var, val)
end
hash[:dist_fn_knobs].each do |var, val|
next unless [:g_exb].include? var
set(var, val)
end
hash[:theta_grid_eik_knobs].each do |var, val|
next unless [:s_hat_input, :beta_prime_input].include? var
set(var, val)
end
hash[:species_parameters_2].each do |var, val|
#next unless [:s_hat_input, :beta_prime_input].include? var
set((var.to_s + '_2').to_sym, val)
end
hash[:species_parameters_1].each do |var, val|
#next unless [:s_hat_input, :beta_prime_input].include? var
set((var.to_s + '_1').to_sym, val)
end
end
def renew_info_file
Dir.chdir(@directory){make_info_file("#@run_name.in")}
end
# This method overrides a method defined in heuristic_run_methods.rb in the
# CodeRunner source. It is called when CodeRunner cannot find any of its own
# files in the folder being analysed. It takes a GS2 input file and generates a
# CodeRunner info file. This means that GS2 runs which were not run using
# CodeRunner can nonetheless be analysed by it. In order for it to be called
# the -H flag must be specified on the command line.
def run_heuristic_analysis
ep 'run_heuristic_analysis', Dir.pwd
infiles = Dir.entries.grep(/^[^\.].*\.in$/)
ep infiles
raise CRMild.new('No input file') unless infiles[0]
raise CRError.new("More than one input file in this directory: \n\t#{infiles}") if infiles.size > 1
input_file = infiles[0]
ep 'asdf'
@nprocs ||= "1"
@executable ||= "/dev/null"
make_info_file(input_file, false)
end
@source_code_subfolders = ['utils', 'geo', 'diagnostics']
attr_accessor :iphi00, :saturation_time #Necessary for back. comp. due to an old bug
folder = File.dirname(File.expand_path(__FILE__)) # i.e. the directory this file is in
SPECIES_DEPENDENT_NAMELISTS = eval(File.read(folder + '/species_dependent_namelists.rb'), binding, folder + '/species_dependent_namelists.rb')
SPECIES_DEPENDENT_VARIABLES_WITH_HELP = SPECIES_DEPENDENT_NAMELISTS.values.inject({}) do |hash, namelist_hash|
namelist_hash[:variables].each do |var, var_hash|
hash[var] = var_hash[:help]
end
hash
end
SPECIES_DEPENDENT_VARIABLES = SPECIES_DEPENDENT_VARIABLES_WITH_HELP.keys
SPECIES_DEPENDENT_VARIABLES.each{|var| attr_accessor var} # for backwards compatibility
['i', 'e'].each do |n|
SPECIES_DEPENDENT_VARIABLES_WITH_HELP.each do |name, help|
attr_accessor name + "_#{n}".to_sym #for backwards compatibility
end
end
old_vars = %w[
:TiTe
:Rmaj
:R_geo
:invLp_input
:D_hypervisc
:D_hyperres
:D_hyper
:C_par
:C_perp
].map{|n| n.to_s.sub(/^:/, '').to_sym}
old_vars.each do |var|
alias_method(var, var.to_s.downcase.to_sym)
alias_method("#{var}=".to_sym, "#{var.downcase}=".to_sym)
end
def run_namelist_backwards_compatibility
SPECIES_DEPENDENT_VARIABLES.each do |var|
set(var + "_1".to_sym, (send(var + "_1".to_sym) or send(var + "_i".to_sym) or send(var)))
set(var + "_2".to_sym, (send(var + "_2".to_sym) or send(var + "_e".to_sym)))
end
end
def stop
`touch #@directory/#@run_name.stop`
end
def vim_output
system "vim -Ro #{output_file} #{error_file} #@directory/#@run_name.error #@directory/#@run_name.out "
end
alias :vo :vim_output
def vim_input
system "vim -Ro #@directory/#@run_name.in "
end
alias :vi :vim_input
def vim_stdout
system "vim -Ro #{output_file} "
end
alias :vo1 :vim_stdout
def plot_efit_file
Dir.chdir(@directory) do
text = File.read(@eqfile)
text_lines = text.split("\n")
first_line = text_lines[0].split(/\s+/)
second_line = text_lines[1].split(/\s+/)
nr = first_line[-2].to_i
nz = first_line[-1].to_i
rwidth = second_line[1].to_f
zwidth = second_line[2].to_f
rmag = second_line[3].to_f
nlines = (nr.to_f/5.0).ceil
nlines_psi = ((nr*nz).to_f/5.0).ceil
start = 5
f = text_lines[start...(start+=nlines)].join(" ").split(nil).map{|s| s.to_f}.to_gslv
pres = text_lines[(start)...(start += nlines)].join(" ").split(nil).map{|s| s.to_f}.to_gslv
_ = text_lines[(start)...(start += nlines)].join(" ").split(nil).map{|s| s.to_f}.to_gslv
_ffprime = text_lines[(start)...(start+= nlines)].join(" ").split(nil).map{|s| s.to_f}.to_gslv
psi = text_lines[(start)...(start += nlines_psi)].join(" ")
q = text_lines[(start)...(start += nlines)].join(" ").split(nil).map{|s| s.to_f}.to_gslv
nbound = text_lines[start...start+=1].join(" ").to_i
rz = text_lines[(start)...(start += nbound*2)].join(" ").split(/\s+/)
rz.shift
rbound, zbound, _ = rz.inject([[], [], true]){|arr,val| arr[2] ? [arr[0].push(val), arr[1], false] : [arr[0], arr[1].push(val), true]}
#rbound.shift
psi = psi.split(/\s+/)
psi.shift
psi.map!{|v| v.to_f}
psi_arr = SparseTensor.new(2)
k = 0
for i in 0...nz
for j in 0...nr
psi_arr[j,i] = psi[k]
k+=1
end
end
kit = GraphKit.quick_create([((0...nr).to_a.to_gslv - nr/2 - 1 )/(nr-1)*rwidth+rmag, ((0...nz).to_a.to_gslv-nz/2 + 1)/(nz-1) * zwidth, psi_arr], [rbound, zbound, rbound.map{|r| 0}])
kit.gp.contour = ""
kit.gp.view = "map"
#kit.gp.nosurface = ""
kit.gp.cntrparam = "levels 20"
kit.data[0].gp.with = 'l'
kit.data[1].gp.with = 'l lw 2 nocontours'
kit.gnuplot
kit2 = GraphKit.quick_create([pres/pres.max],[f/f.max],[q/q.max])
kit2.data[0].title = 'Pressure/Max Pressure'
kit2.data[1].title = 'Poloidal current function/Max poloidal current function'
kit2.data[2].title = 'Safety factor/Max Safety factor'
kit2.gnuplot
#p ['f', f, 'p', pres, 'ffprime', ffprime, 'nlines', nlines, 'psi', psi, 'q', q, 'nbound', nbound, 'rbound', rbound, 'zbound', zbound]
end
end
def input_file_extension
'.in'
end
#This section defines a selection of graphs which are written to a latex file when the CR function write_report is called. To add your own, simply copy one a similar looking graph and modify it to your needs.
# The requirements to use the latex report writing is further specified in CodeRunner.
def latex_graphs
#will have a different set of graphs to look at depending on whether linear or nonlinear
if @nonlinear_mode == "off"
#make up a list of graphs that are to be included. The order of the arguments is [code to generate graphkit, LaTeX description]
graphs = [
#[(kit = phi2_by_mode_vs_time({kx_index:1, ky_index:1}); kit.xlabel=%[TEST]; kit.gp.term = "post eps color enhanced size 3.5in,2.33in"; kit.gp.output = "test.eps"; kit), "This is a test graph written into a \LaTeX file. \n\n \\myfigure{test.eps}"]
[(kit = phi2tot_vs_time_graphkit; kit.data[0].title=""; kit.gp.logscale="y"; kit.file_name = "phi2tot.eps"; kit), "Total $\\phi^2$ versus time."],
[(kit = growth_rate_vs_ky_graphkit; kit.data[0].title=""; kit.file_name = "growth_rate_vs_ky.eps"; kit), "Growth rate $\\gamma_E$ as a function of $k_y$ averaged over $k_x$ (if applicable)."],
if @grid_option=="range" then [(kit = graphkit('efnmag', {norm:true, kx_index:1, ky_index: :all}); kit.data.each{|dk| dk.title=""}; kit.gp.logscale="y"; kit.file_name = "efnmag.eps"; kit.data.shift; kit), "Normalized magnitude of the eigenfunction as a function of $\\theta$ for all $k_y$'s in the simulation."] end,
if @grid_option=="single" then [(kit = graphkit('efnmag', {norm:true, kx_index:1, ky_index:1}); kit.data.each{|dk| dk.title=""}; kit.gp.logscale="y"; kit.file_name = "efnmag.eps"; kit), "Normalized magnitude of the eigenfunction as a function of $\\theta$ for all $k_y$'s in the simulation."] end,
].compact
else
graphs = [
[(kit = ky_spectrum_graphkit; kit.gp.logscale="y"; kit.file_name = "ky_spectrum.eps"; kit), "$k_y$ spectrum at the final time step averaged over $k_x$."],
[(kit = kx_spectrum_graphkit; kit.gp.logscale="y"; kit.file_name = "kx_spectrum.eps"; kit), "$k_x$ spectrum at the final time step averaged over $k_y$."],
[(kit = spectrum_graphkit(no_zonal:true); kit.gp.view="map"; kit.gp.logscale="z"; kit.file_name = "spectrum.eps"; kit), "2D spectrum versus $k_x$ and $k_y$ without zonal flows."],
[(kit = hflux_tot_vs_time_graphkit; kit.file_name = "hflux_tot_vs_time.eps"; kit), "Total heat flux $Q_{tot}$ as a function of time."],
[(kit = es_heat_flux_vs_time_graphkit(species_index:1); kit.file_name = "es_heat_1_vs_time.eps"; kit), "Heat flux of species 1 versus time."],
if @nspec > 1 then [(kit = es_heat_flux_vs_time_graphkit(species_index:2); kit.file_name = "es_heat_2_vs_time.eps"; kit), "Heat flux of species 2 versus time."] end,
[(kit = es_heat_flux_vs_ky_graphkit(species_index:1); kit.gp.logscale="y" ; kit.file_name = "es_heat_1_vs_ky.eps"; kit), "Heat flux of species 1 as a function of $k_y$."],
if @nspec > 1 then [(kit = es_heat_flux_vs_ky_graphkit(species_index:2); kit.gp.logscale="y" ; kit.file_name = "es_heat_2_vs_ky.eps"; kit), "Heat flux of species 2 as a function of $k_y$."] end,
[(kit = es_heat_flux_vs_ky_vs_kx_graphkit; kit.gp.view="map" ; kit.file_name = "es_heat_vs_ky_vs_kx.eps"; kit), "2D total heat flux spectrum as a function of $k_x$ and $k_y$."],
[(kit = phi_real_space_graphkit(n0:1, thetamin:get_list_of(:theta).length/2, thetamax:get_list_of(:theta).length/2, gs2_coordinate_factor:1.0); kit.gp.view="map" ; kit.file_name = "phi_real_space.eps"; kit), "Potential fluctuations at the final time step vs GS2 $x$ and $y$ at the outboard midplane."],
[(kit = density_real_space_graphkit(n0:1, species_index:1, thetamin:get_list_of(:theta).length/2, thetamax:get_list_of(:theta).length/2, gs2_coordinate_factor:1.0); kit.gp.view="map" ; kit.file_name = "density_real_space.eps"; kit), "Density fluctuations for species 1 at the final time step vs GS2 $x$ and $y$ at the outboard midplane."],
if @nspec > 1 then [(kit = density_real_space_graphkit(n0:1, species_index:2, thetamin:get_list_of(:theta).length/2, thetamax:get_list_of(:theta).length/2, gs2_coordinate_factor:1.0); kit.gp.view="map" ; kit.file_name = "density_real_space.eps"; kit), "Density fluctuations for species 2 at the final time step vs GS2 $x$ and $y$ at the outboard midplane."] end,
[(kit = es_mom_flux_vs_time_graphkit(species_index:1); kit.file_name = "es_mom_flux_1_vs_time.eps"; kit), "Momentum flux for species 1 as a function of time."],
if @nspec > 1 then [(kit = es_mom_flux_vs_time_graphkit(species_index:2); kit.file_name = "es_mom_flux_2_vs_time.eps"; kit), "Momentum flux for species 2 as a function of time."] end,
[(kit = zonal_spectrum_graphkit; kit.gp.logscale="y"; kit.file_name = "zonal_spectrum.eps"; kit), "Zonal spectrum at the final time step."],
if @write_eigenfunc == ".true." then [(kit = zf_velocity_vs_x_graphkit(theta_index:get_list_of(:theta).length/2); kit.file_name = "zonal_flow_velocity_vs_x.eps"; kit), "Zonal flow velocity avg over time versus x."] end,
if @write_eigenfunc == ".true." and @g_exb then [(kit = zf_velocity_vs_x_graphkit(theta_index:get_list_of(:theta).length/2, add_mean_flow:true); kit.file_name = "zonal_flow_velocity_vs_x_with_mean_flow.eps"; kit), "Zonal flow velocity with mean flow added avg over time versus x."] end,
].compact
end
end
def change_id(new_id)
# Change names for GS2 output files
Dir.chdir(@directory) do
dir_entries = Dir.entries()
dir_entries.each do |f|
if f.include? 'v_'
new_name = f.sub "id_#{@id}", "id_#{new_id}"
`mv "#{f}" "#{new_name}"`
next
end
if (f.include? 'v_' or f.include? 'gs2.')
new_name = f.sub "gs2.#{@id}", "gs2.#{new_id}"
`mv "#{f}" "#{new_name}"`
next
end
end
end
begin
# Change names for GS2 restart files
Dir.chdir(@directory + '/' + @restart_dir) do
dir_entries = Dir.entries()
dir_entries.each do |f|
if f.include? 'v_'
new_name = f.sub "id_#{@id}", "id_#{new_id}"
`mv "#{f}" "#{new_name}"`
end
end
end
rescue
eputs 'No restart files detected. Skipping...'
end
new_run_dir = @directory.sub "id_#{@id}", "id_#{new_id}"
`mv "#{@directory}" "#{new_run_dir}"`
@directory = new_run_dir
# Rename variables which go in info and results file
@run_name.sub! "id_#{@id}", "id_#{new_id}"
@restart_file.sub! "id_#{@id}", "id_#{new_id}"
@output_file.sub! "gs2.#{@id}", "gs2.#{new_id}"
@error_file.sub! "gs2.#{@id}", "gs2.#{new_id}"
# Change instance variable and write info and results files again
@id = new_id
write_results
write_info
end
end # class GS2
# For backwards compatibility
Gs2BoxNtRun = Gs2CycloneRun = Gs2BoxCollisionalRun = Gs2Jet42982Run = Gs2ArtunRun = Gs2LinskerRun = Gs2BarnesLinskerRun = Gs2BoxMovieRun = Gs2Run = Gs2
end # class CodeRunner
# ep CodeRunner::Gs2CycloneRun.ancestors
class Float
def <=>(other) # necessary because of netcdf quirks
d = (self - other)
if d.abs / (self.abs + 1) < 1e-10
return 0
else
return (d / d.abs).to_i
end
end
def ==(other)
return false unless other.kind_of? Numeric
return (self - other).abs < 1e-14
end
end
class Hash
# puts self
def convert_to_index(run, *names)
if self[:strongest_non_zonal_mode]
ky_element, kx_element = run.gsl_matrix('spectrum_over_ky_over_kx', no_zonal: true).max_index
p self[:kx_index] = kx_element + 1
p self[:ky_index] = ky_element + 1
self[:strongest_non_zonal_mode] = false
end
raise "No names specified" if names.size == 0
names.each do |name|
if name == :kx
if lkx = self[:lagrangian_kx]
self[:lagrangian_kx_index] = list(:kx).key(lkx)
end
if lkxi = self[:lagrangian_kx_index] ||= self[:lkx_index]
self[:kx_index] = run.eulerian_kx_index(kx_index: lkxi, ky_index: self[:ky_index], t_index: self[:t_index])
end
end
self[:ky_index] = 1 if name == :ky and run.grid_option == "single"
self[:kx_index] = 1 if name == :kx and run.grid_option == "single"
self[name + :_index] ||= run.list(name).key(self[name]) || (raise ("#{name} not specified"))
end
end
def setup_time_window
self[:t_index_window] ||= [self[:t_index],self[:t_index]] if self[:t_index]
self[:begin_element], self[:end_element] = (self[:t_index_window] ? self[:t_index_window].map{|ind| ind - 1} : [0, -1])
end
end
|
require 'test_helper'
class ModifierTest < Test::Unit::TestCase
def setup
@page_class_with_compound_key = Doc do
key :_id, :default => -> { {n: 42, i: BSON::ObjectId.new} }
key :title, String
key :day_count, Integer, :default => 0
key :week_count, Integer, :default => 0
key :month_count, Integer, :default => 0
key :tags, Array
end
@page_class_with_standard_key = Doc do
key :title, String
key :day_count, Integer, :default => 0
key :week_count, Integer, :default => 0
key :month_count, Integer, :default => 0
key :tags, Array
end
end
def assert_page_counts(page, day_count, week_count, month_count)
page.reload
page.day_count.should == day_count
page.week_count.should == week_count
page.month_count.should == month_count
end
def assert_keys_removed(page_class, page, *keys)
keys.each do |key|
doc = page_class.collection.find_one({:_id => page.id})
doc.keys.should_not include(key)
end
end
context "ClassMethods" do
setup do
@page_class = @page_class_with_standard_key
end
context "unset" do
setup do
@page = @page_class.create(:title => 'Home', :tags => %w(foo bar))
@page2 = @page_class.create(:title => 'Home')
end
should "work with criteria and keys" do
@page_class.unset({:title => 'Home'}, :title, :tags)
assert_keys_removed @page_class, @page, :title, :tags
assert_keys_removed @page_class, @page2, :title, :tags
end
should "work with ids and keys" do
@page_class.unset(@page.id, @page2.id, :title, :tags)
assert_keys_removed @page_class, @page, :title, :tags
assert_keys_removed @page_class, @page2, :title, :tags
end
context "additional options (upsert & safe)" do
should "be able to pass upsert option" do
new_key_value = DateTime.now.to_s
@page_class.unset({:title => new_key_value, :tags => %w(foo bar)}, :tags, {:upsert => true})
@page_class.count(:title => new_key_value).should == 1
@page_class.first(:title => new_key_value).tags.should == []
end
should "be able to pass safe option" do
@page_class.create(:title => "Better Be Safe than Sorry")
Mongo::Collection.any_instance.expects(:update).with(
{:title => "Better Be Safe than Sorry"},
{'$unset' => {:tags => 1}},
{:safe => true, :multi => true}
)
@page_class.unset({:title => "Better Be Safe than Sorry"}, :tags, {:safe => true})
end
should "be able to pass both safe and upsert options" do
new_key_value = DateTime.now.to_s
@page_class.unset({:title => new_key_value, :tags => %w(foo bar)}, :tags, {:upsert => true, :safe => true})
@page_class.count(:title => new_key_value).should == 1
@page_class.first(:title => new_key_value).tags.should == []
end
end
end
context "increment" do
setup do
@page = @page_class.create(:title => 'Home')
@page2 = @page_class.create(:title => 'Home')
end
should "work with criteria and modifier hashes" do
@page_class.increment({:title => 'Home'}, :day_count => 1, :week_count => 2, :month_count => 3)
assert_page_counts @page, 1, 2, 3
assert_page_counts @page2, 1, 2, 3
end
should "work with ids and modifier hash" do
@page_class.increment(@page.id, @page2.id, :day_count => 1, :week_count => 2, :month_count => 3)
assert_page_counts @page, 1, 2, 3
assert_page_counts @page2, 1, 2, 3
end
end
context "decrement" do
setup do
@page = @page_class.create(:title => 'Home', :day_count => 1, :week_count => 2, :month_count => 3)
@page2 = @page_class.create(:title => 'Home', :day_count => 1, :week_count => 2, :month_count => 3)
end
should "work with criteria and modifier hashes" do
@page_class.decrement({:title => 'Home'}, :day_count => 1, :week_count => 2, :month_count => 3)
assert_page_counts @page, 0, 0, 0
assert_page_counts @page2, 0, 0, 0
end
should "work with ids and modifier hash" do
@page_class.decrement(@page.id, @page2.id, :day_count => 1, :week_count => 2, :month_count => 3)
assert_page_counts @page, 0, 0, 0
assert_page_counts @page2, 0, 0, 0
end
should "decrement with positive or negative numbers" do
@page_class.decrement(@page.id, @page2.id, :day_count => -1, :week_count => 2, :month_count => -3)
assert_page_counts @page, 0, 0, 0
assert_page_counts @page2, 0, 0, 0
end
end
context "set" do
setup do
@page = @page_class.create(:title => 'Home')
@page2 = @page_class.create(:title => 'Home')
end
should "work with criteria and modifier hashes" do
@page_class.set({:title => 'Home'}, :title => 'Home Revised')
@page.reload
@page.title.should == 'Home Revised'
@page2.reload
@page2.title.should == 'Home Revised'
end
should "work with ids and modifier hash" do
@page_class.set(@page.id, @page2.id, :title => 'Home Revised')
@page.reload
@page.title.should == 'Home Revised'
@page2.reload
@page2.title.should == 'Home Revised'
end
should "typecast values before querying" do
@page_class.key :tags, Set
assert_nothing_raised do
@page_class.set(@page.id, :tags => ['foo', 'bar'].to_set)
@page.reload
@page.tags.should == Set.new(['foo', 'bar'])
end
end
should "not typecast keys that are not defined in document" do
assert_raises(BSON::InvalidDocument) do
@page_class.set(@page.id, :colors => ['red', 'green'].to_set)
end
end
should "set keys that are not defined in document" do
@page_class.set(@page.id, :colors => %w[red green])
@page.reload
@page[:colors].should == %w[red green]
end
context "additional options (upsert & safe)" do
should "be able to pass upsert option" do
new_key_value = DateTime.now.to_s
@page_class.set({:title => new_key_value}, {:day_count => 1}, {:upsert => true})
@page_class.count(:title => new_key_value).should == 1
@page_class.first(:title => new_key_value).day_count.should == 1
end
should "be able to pass safe option" do
@page_class.create(:title => "Better Be Safe than Sorry")
Mongo::Collection.any_instance.expects(:update).with(
{:title => "Better Be Safe than Sorry"},
{'$set' => {:title => "I like safety."}},
{:safe => true, :multi => true}
)
@page_class.set({:title => "Better Be Safe than Sorry"}, {:title => "I like safety."}, {:safe => true})
end
should "be able to pass both safe and upsert options" do
new_key_value = DateTime.now.to_s
@page_class.set({:title => new_key_value}, {:day_count => 1}, {:upsert => true, :safe => true})
@page_class.count(:title => new_key_value).should == 1
@page_class.first(:title => new_key_value).day_count.should == 1
end
end
end
context "push" do
setup do
@page = @page_class.create(:title => 'Home')
@page2 = @page_class.create(:title => 'Home')
end
should "work with criteria and modifier hashes" do
@page_class.push({:title => 'Home'}, :tags => 'foo')
@page.reload
@page.tags.should == %w(foo)
@page2.reload
@page2.tags.should == %w(foo)
end
should "work with ids and modifier hash" do
@page_class.push(@page.id, @page2.id, :tags => 'foo')
@page.reload
@page.tags.should == %w(foo)
@page2.reload
@page2.tags.should == %w(foo)
end
end
context "push_all" do
setup do
@page = @page_class.create(:title => 'Home')
@page2 = @page_class.create(:title => 'Home')
@tags = %w(foo bar)
end
should "work with criteria and modifier hashes" do
@page_class.push_all({:title => 'Home'}, :tags => @tags)
@page.reload
@page.tags.should == @tags
@page2.reload
@page2.tags.should == @tags
end
should "work with ids and modifier hash" do
@page_class.push_all(@page.id, @page2.id, :tags => @tags)
@page.reload
@page.tags.should == @tags
@page2.reload
@page2.tags.should == @tags
end
end
context "pull" do
setup do
@page = @page_class.create(:title => 'Home', :tags => %w(foo bar))
@page2 = @page_class.create(:title => 'Home', :tags => %w(foo bar))
end
should "work with criteria and modifier hashes" do
@page_class.pull({:title => 'Home'}, :tags => 'foo')
@page.reload
@page.tags.should == %w(bar)
@page2.reload
@page2.tags.should == %w(bar)
end
should "be able to pull with ids and modifier hash" do
@page_class.pull(@page.id, @page2.id, :tags => 'foo')
@page.reload
@page.tags.should == %w(bar)
@page2.reload
@page2.tags.should == %w(bar)
end
end
context "pull_all" do
setup do
@page = @page_class.create(:title => 'Home', :tags => %w(foo bar baz))
@page2 = @page_class.create(:title => 'Home', :tags => %w(foo bar baz))
end
should "work with criteria and modifier hashes" do
@page_class.pull_all({:title => 'Home'}, :tags => %w(foo bar))
@page.reload
@page.tags.should == %w(baz)
@page2.reload
@page2.tags.should == %w(baz)
end
should "work with ids and modifier hash" do
@page_class.pull_all(@page.id, @page2.id, :tags => %w(foo bar))
@page.reload
@page.tags.should == %w(baz)
@page2.reload
@page2.tags.should == %w(baz)
end
end
context "add_to_set" do
setup do
@page = @page_class.create(:title => 'Home', :tags => 'foo')
@page2 = @page_class.create(:title => 'Home')
end
should "be able to add to set with criteria and modifier hash" do
@page_class.add_to_set({:title => 'Home'}, :tags => 'foo')
@page.reload
@page.tags.should == %w(foo)
@page2.reload
@page2.tags.should == %w(foo)
end
should "be able to add to set with ids and modifier hash" do
@page_class.add_to_set(@page.id, @page2.id, :tags => 'foo')
@page.reload
@page.tags.should == %w(foo)
@page2.reload
@page2.tags.should == %w(foo)
end
end
context "push_uniq" do
setup do
@page = @page_class.create(:title => 'Home', :tags => 'foo')
@page2 = @page_class.create(:title => 'Home')
end
should "be able to push uniq with criteria and modifier hash" do
@page_class.push_uniq({:title => 'Home'}, :tags => 'foo')
@page.reload
@page.tags.should == %w(foo)
@page2.reload
@page2.tags.should == %w(foo)
end
should "be able to push uniq with ids and modifier hash" do
@page_class.push_uniq(@page.id, @page2.id, :tags => 'foo')
@page.reload
@page.tags.should == %w(foo)
@page2.reload
@page2.tags.should == %w(foo)
end
end
context "pop" do
setup do
@page = @page_class.create(:title => 'Home', :tags => %w(foo bar))
end
should "be able to remove the last element the array" do
@page_class.pop(@page.id, :tags => 1)
@page.reload
@page.tags.should == %w(foo)
end
should "be able to remove the first element of the array" do
@page_class.pop(@page.id, :tags => -1)
@page.reload
@page.tags.should == %w(bar)
end
end
context "additional options (upsert & safe)" do
should "be able to pass upsert option" do
new_key_value = DateTime.now.to_s
@page_class.increment({:title => new_key_value}, {:day_count => 1}, {:upsert => true})
@page_class.count(:title => new_key_value).should == 1
@page_class.first(:title => new_key_value).day_count.should == 1
end
should "be able to pass safe option" do
@page_class.create(:title => "Better Be Safe than Sorry")
# We are trying to increment a key of type string here which should fail
assert_raises(Mongo::OperationFailure) do
@page_class.increment({:title => "Better Be Safe than Sorry"}, {:title => 1}, {:safe => true})
end
end
should "be able to pass both safe and upsert options" do
new_key_value = DateTime.now.to_s
@page_class.increment({:title => new_key_value}, {:day_count => 1}, {:upsert => true, :safe => true})
@page_class.count(:title => new_key_value).should == 1
@page_class.first(:title => new_key_value).day_count.should == 1
end
end
end
context "instance methods" do
{:page_class_with_standard_key => "with standard key",
:page_class_with_compound_key => "with compound key"}.each do |page_class, description|
context description do
setup do
@page_class = instance_variable_get("@#{page_class}")
end
should "be able to unset with keys" do
page = @page_class.create(:title => 'Foo', :tags => %w(foo))
page.unset(:title, :tags)
assert_keys_removed @page_class, page, :title, :tags
end
should "be able to increment with modifier hashes" do
page = @page_class.create
page.increment(:day_count => 1, :week_count => 2, :month_count => 3)
assert_page_counts page, 1, 2, 3
end
should "be able to decrement with modifier hashes" do
page = @page_class.create(:day_count => 1, :week_count => 2, :month_count => 3)
page.decrement(:day_count => 1, :week_count => 2, :month_count => 3)
assert_page_counts page, 0, 0, 0
end
should "always decrement when decrement is called whether number is positive or negative" do
page = @page_class.create(:day_count => 1, :week_count => 2, :month_count => 3)
page.decrement(:day_count => -1, :week_count => 2, :month_count => -3)
assert_page_counts page, 0, 0, 0
end
should "be able to set with modifier hashes" do
page = @page_class.create(:title => 'Home')
page.set(:title => 'Home Revised')
page.reload
page.title.should == 'Home Revised'
end
should "be able to push with modifier hashes" do
page = @page_class.create
page.push(:tags => 'foo')
page.reload
page.tags.should == %w(foo)
end
should "be able to push_all with modifier hashes" do
page = @page_class.create
page.push_all(:tags => %w(foo bar))
page.reload
page.tags.should == %w(foo bar)
end
should "be able to pull with criteria and modifier hashes" do
page = @page_class.create(:tags => %w(foo bar))
page.pull(:tags => 'foo')
page.reload
page.tags.should == %w(bar)
end
should "be able to pull_all with criteria and modifier hashes" do
page = @page_class.create(:tags => %w(foo bar baz))
page.pull_all(:tags => %w(foo bar))
page.reload
page.tags.should == %w(baz)
end
should "be able to add_to_set with criteria and modifier hash" do
page = @page_class.create(:tags => 'foo')
page2 = @page_class.create
page.add_to_set(:tags => 'foo')
page2.add_to_set(:tags => 'foo')
page.reload
page.tags.should == %w(foo)
page2.reload
page2.tags.should == %w(foo)
end
should "be able to push uniq with criteria and modifier hash" do
page = @page_class.create(:tags => 'foo')
page2 = @page_class.create
page.push_uniq(:tags => 'foo')
page2.push_uniq(:tags => 'foo')
page.reload
page.tags.should == %w(foo)
page2.reload
page2.tags.should == %w(foo)
end
should "be able to pop with modifier hashes" do
page = @page_class.create(:tags => %w(foo bar))
page.pop(:tags => 1)
page.reload
page.tags.should == %w(foo)
end
should "be able to pass upsert option" do
page = @page_class.create(:title => "Upsert Page")
page.increment({:new_count => 1}, {:upsert => true})
page.reload
page.new_count.should == 1
end
should "be able to pass safe option" do
page = @page_class.create(:title => "Safe Page")
# We are trying to increment a key of type string here which should fail
assert_raises(Mongo::OperationFailure) do
page.increment({:title => 1}, {:safe => true})
end
end
should "be able to pass upsert and safe options" do
page = @page_class.create(:title => "Upsert and Safe Page")
page.increment({:another_count => 1}, {:upsert => true, :safe => true})
page.reload
page.another_count.should == 1
end
end
end
end
end
Fixes for ruby 1.8.
require 'test_helper'
class ModifierTest < Test::Unit::TestCase
def setup
@page_class_with_compound_key = Doc do
key :_id, :default => lambda { {:n => 42, :i => BSON::ObjectId.new} }
key :title, String
key :day_count, Integer, :default => 0
key :week_count, Integer, :default => 0
key :month_count, Integer, :default => 0
key :tags, Array
end
@page_class_with_standard_key = Doc do
key :title, String
key :day_count, Integer, :default => 0
key :week_count, Integer, :default => 0
key :month_count, Integer, :default => 0
key :tags, Array
end
end
def assert_page_counts(page, day_count, week_count, month_count)
page.reload
page.day_count.should == day_count
page.week_count.should == week_count
page.month_count.should == month_count
end
def assert_keys_removed(page_class, page, *keys)
keys.each do |key|
doc = page_class.collection.find_one({:_id => page.id})
doc.keys.should_not include(key)
end
end
context "ClassMethods" do
setup do
@page_class = @page_class_with_standard_key
end
context "unset" do
setup do
@page = @page_class.create(:title => 'Home', :tags => %w(foo bar))
@page2 = @page_class.create(:title => 'Home')
end
should "work with criteria and keys" do
@page_class.unset({:title => 'Home'}, :title, :tags)
assert_keys_removed @page_class, @page, :title, :tags
assert_keys_removed @page_class, @page2, :title, :tags
end
should "work with ids and keys" do
@page_class.unset(@page.id, @page2.id, :title, :tags)
assert_keys_removed @page_class, @page, :title, :tags
assert_keys_removed @page_class, @page2, :title, :tags
end
context "additional options (upsert & safe)" do
should "be able to pass upsert option" do
new_key_value = DateTime.now.to_s
@page_class.unset({:title => new_key_value, :tags => %w(foo bar)}, :tags, {:upsert => true})
@page_class.count(:title => new_key_value).should == 1
@page_class.first(:title => new_key_value).tags.should == []
end
should "be able to pass safe option" do
@page_class.create(:title => "Better Be Safe than Sorry")
Mongo::Collection.any_instance.expects(:update).with(
{:title => "Better Be Safe than Sorry"},
{'$unset' => {:tags => 1}},
{:safe => true, :multi => true}
)
@page_class.unset({:title => "Better Be Safe than Sorry"}, :tags, {:safe => true})
end
should "be able to pass both safe and upsert options" do
new_key_value = DateTime.now.to_s
@page_class.unset({:title => new_key_value, :tags => %w(foo bar)}, :tags, {:upsert => true, :safe => true})
@page_class.count(:title => new_key_value).should == 1
@page_class.first(:title => new_key_value).tags.should == []
end
end
end
context "increment" do
setup do
@page = @page_class.create(:title => 'Home')
@page2 = @page_class.create(:title => 'Home')
end
should "work with criteria and modifier hashes" do
@page_class.increment({:title => 'Home'}, :day_count => 1, :week_count => 2, :month_count => 3)
assert_page_counts @page, 1, 2, 3
assert_page_counts @page2, 1, 2, 3
end
should "work with ids and modifier hash" do
@page_class.increment(@page.id, @page2.id, :day_count => 1, :week_count => 2, :month_count => 3)
assert_page_counts @page, 1, 2, 3
assert_page_counts @page2, 1, 2, 3
end
end
context "decrement" do
setup do
@page = @page_class.create(:title => 'Home', :day_count => 1, :week_count => 2, :month_count => 3)
@page2 = @page_class.create(:title => 'Home', :day_count => 1, :week_count => 2, :month_count => 3)
end
should "work with criteria and modifier hashes" do
@page_class.decrement({:title => 'Home'}, :day_count => 1, :week_count => 2, :month_count => 3)
assert_page_counts @page, 0, 0, 0
assert_page_counts @page2, 0, 0, 0
end
should "work with ids and modifier hash" do
@page_class.decrement(@page.id, @page2.id, :day_count => 1, :week_count => 2, :month_count => 3)
assert_page_counts @page, 0, 0, 0
assert_page_counts @page2, 0, 0, 0
end
should "decrement with positive or negative numbers" do
@page_class.decrement(@page.id, @page2.id, :day_count => -1, :week_count => 2, :month_count => -3)
assert_page_counts @page, 0, 0, 0
assert_page_counts @page2, 0, 0, 0
end
end
context "set" do
setup do
@page = @page_class.create(:title => 'Home')
@page2 = @page_class.create(:title => 'Home')
end
should "work with criteria and modifier hashes" do
@page_class.set({:title => 'Home'}, :title => 'Home Revised')
@page.reload
@page.title.should == 'Home Revised'
@page2.reload
@page2.title.should == 'Home Revised'
end
should "work with ids and modifier hash" do
@page_class.set(@page.id, @page2.id, :title => 'Home Revised')
@page.reload
@page.title.should == 'Home Revised'
@page2.reload
@page2.title.should == 'Home Revised'
end
should "typecast values before querying" do
@page_class.key :tags, Set
assert_nothing_raised do
@page_class.set(@page.id, :tags => ['foo', 'bar'].to_set)
@page.reload
@page.tags.should == Set.new(['foo', 'bar'])
end
end
should "not typecast keys that are not defined in document" do
assert_raises(BSON::InvalidDocument) do
@page_class.set(@page.id, :colors => ['red', 'green'].to_set)
end
end
should "set keys that are not defined in document" do
@page_class.set(@page.id, :colors => %w[red green])
@page.reload
@page[:colors].should == %w[red green]
end
context "additional options (upsert & safe)" do
should "be able to pass upsert option" do
new_key_value = DateTime.now.to_s
@page_class.set({:title => new_key_value}, {:day_count => 1}, {:upsert => true})
@page_class.count(:title => new_key_value).should == 1
@page_class.first(:title => new_key_value).day_count.should == 1
end
should "be able to pass safe option" do
@page_class.create(:title => "Better Be Safe than Sorry")
Mongo::Collection.any_instance.expects(:update).with(
{:title => "Better Be Safe than Sorry"},
{'$set' => {:title => "I like safety."}},
{:safe => true, :multi => true}
)
@page_class.set({:title => "Better Be Safe than Sorry"}, {:title => "I like safety."}, {:safe => true})
end
should "be able to pass both safe and upsert options" do
new_key_value = DateTime.now.to_s
@page_class.set({:title => new_key_value}, {:day_count => 1}, {:upsert => true, :safe => true})
@page_class.count(:title => new_key_value).should == 1
@page_class.first(:title => new_key_value).day_count.should == 1
end
end
end
context "push" do
setup do
@page = @page_class.create(:title => 'Home')
@page2 = @page_class.create(:title => 'Home')
end
should "work with criteria and modifier hashes" do
@page_class.push({:title => 'Home'}, :tags => 'foo')
@page.reload
@page.tags.should == %w(foo)
@page2.reload
@page2.tags.should == %w(foo)
end
should "work with ids and modifier hash" do
@page_class.push(@page.id, @page2.id, :tags => 'foo')
@page.reload
@page.tags.should == %w(foo)
@page2.reload
@page2.tags.should == %w(foo)
end
end
context "push_all" do
setup do
@page = @page_class.create(:title => 'Home')
@page2 = @page_class.create(:title => 'Home')
@tags = %w(foo bar)
end
should "work with criteria and modifier hashes" do
@page_class.push_all({:title => 'Home'}, :tags => @tags)
@page.reload
@page.tags.should == @tags
@page2.reload
@page2.tags.should == @tags
end
should "work with ids and modifier hash" do
@page_class.push_all(@page.id, @page2.id, :tags => @tags)
@page.reload
@page.tags.should == @tags
@page2.reload
@page2.tags.should == @tags
end
end
context "pull" do
setup do
@page = @page_class.create(:title => 'Home', :tags => %w(foo bar))
@page2 = @page_class.create(:title => 'Home', :tags => %w(foo bar))
end
should "work with criteria and modifier hashes" do
@page_class.pull({:title => 'Home'}, :tags => 'foo')
@page.reload
@page.tags.should == %w(bar)
@page2.reload
@page2.tags.should == %w(bar)
end
should "be able to pull with ids and modifier hash" do
@page_class.pull(@page.id, @page2.id, :tags => 'foo')
@page.reload
@page.tags.should == %w(bar)
@page2.reload
@page2.tags.should == %w(bar)
end
end
context "pull_all" do
setup do
@page = @page_class.create(:title => 'Home', :tags => %w(foo bar baz))
@page2 = @page_class.create(:title => 'Home', :tags => %w(foo bar baz))
end
should "work with criteria and modifier hashes" do
@page_class.pull_all({:title => 'Home'}, :tags => %w(foo bar))
@page.reload
@page.tags.should == %w(baz)
@page2.reload
@page2.tags.should == %w(baz)
end
should "work with ids and modifier hash" do
@page_class.pull_all(@page.id, @page2.id, :tags => %w(foo bar))
@page.reload
@page.tags.should == %w(baz)
@page2.reload
@page2.tags.should == %w(baz)
end
end
context "add_to_set" do
setup do
@page = @page_class.create(:title => 'Home', :tags => 'foo')
@page2 = @page_class.create(:title => 'Home')
end
should "be able to add to set with criteria and modifier hash" do
@page_class.add_to_set({:title => 'Home'}, :tags => 'foo')
@page.reload
@page.tags.should == %w(foo)
@page2.reload
@page2.tags.should == %w(foo)
end
should "be able to add to set with ids and modifier hash" do
@page_class.add_to_set(@page.id, @page2.id, :tags => 'foo')
@page.reload
@page.tags.should == %w(foo)
@page2.reload
@page2.tags.should == %w(foo)
end
end
context "push_uniq" do
setup do
@page = @page_class.create(:title => 'Home', :tags => 'foo')
@page2 = @page_class.create(:title => 'Home')
end
should "be able to push uniq with criteria and modifier hash" do
@page_class.push_uniq({:title => 'Home'}, :tags => 'foo')
@page.reload
@page.tags.should == %w(foo)
@page2.reload
@page2.tags.should == %w(foo)
end
should "be able to push uniq with ids and modifier hash" do
@page_class.push_uniq(@page.id, @page2.id, :tags => 'foo')
@page.reload
@page.tags.should == %w(foo)
@page2.reload
@page2.tags.should == %w(foo)
end
end
context "pop" do
setup do
@page = @page_class.create(:title => 'Home', :tags => %w(foo bar))
end
should "be able to remove the last element the array" do
@page_class.pop(@page.id, :tags => 1)
@page.reload
@page.tags.should == %w(foo)
end
should "be able to remove the first element of the array" do
@page_class.pop(@page.id, :tags => -1)
@page.reload
@page.tags.should == %w(bar)
end
end
context "additional options (upsert & safe)" do
should "be able to pass upsert option" do
new_key_value = DateTime.now.to_s
@page_class.increment({:title => new_key_value}, {:day_count => 1}, {:upsert => true})
@page_class.count(:title => new_key_value).should == 1
@page_class.first(:title => new_key_value).day_count.should == 1
end
should "be able to pass safe option" do
@page_class.create(:title => "Better Be Safe than Sorry")
# We are trying to increment a key of type string here which should fail
assert_raises(Mongo::OperationFailure) do
@page_class.increment({:title => "Better Be Safe than Sorry"}, {:title => 1}, {:safe => true})
end
end
should "be able to pass both safe and upsert options" do
new_key_value = DateTime.now.to_s
@page_class.increment({:title => new_key_value}, {:day_count => 1}, {:upsert => true, :safe => true})
@page_class.count(:title => new_key_value).should == 1
@page_class.first(:title => new_key_value).day_count.should == 1
end
end
end
context "instance methods" do
{:page_class_with_standard_key => "with standard key",
:page_class_with_compound_key => "with compound key"}.each do |page_class, description|
context description do
setup do
@page_class = instance_variable_get("@#{page_class}")
end
should "be able to unset with keys" do
page = @page_class.create(:title => 'Foo', :tags => %w(foo))
page.unset(:title, :tags)
assert_keys_removed @page_class, page, :title, :tags
end
should "be able to increment with modifier hashes" do
page = @page_class.create
page.increment(:day_count => 1, :week_count => 2, :month_count => 3)
assert_page_counts page, 1, 2, 3
end
should "be able to decrement with modifier hashes" do
page = @page_class.create(:day_count => 1, :week_count => 2, :month_count => 3)
page.decrement(:day_count => 1, :week_count => 2, :month_count => 3)
assert_page_counts page, 0, 0, 0
end
should "always decrement when decrement is called whether number is positive or negative" do
page = @page_class.create(:day_count => 1, :week_count => 2, :month_count => 3)
page.decrement(:day_count => -1, :week_count => 2, :month_count => -3)
assert_page_counts page, 0, 0, 0
end
should "be able to set with modifier hashes" do
page = @page_class.create(:title => 'Home')
page.set(:title => 'Home Revised')
page.reload
page.title.should == 'Home Revised'
end
should "be able to push with modifier hashes" do
page = @page_class.create
page.push(:tags => 'foo')
page.reload
page.tags.should == %w(foo)
end
should "be able to push_all with modifier hashes" do
page = @page_class.create
page.push_all(:tags => %w(foo bar))
page.reload
page.tags.should == %w(foo bar)
end
should "be able to pull with criteria and modifier hashes" do
page = @page_class.create(:tags => %w(foo bar))
page.pull(:tags => 'foo')
page.reload
page.tags.should == %w(bar)
end
should "be able to pull_all with criteria and modifier hashes" do
page = @page_class.create(:tags => %w(foo bar baz))
page.pull_all(:tags => %w(foo bar))
page.reload
page.tags.should == %w(baz)
end
should "be able to add_to_set with criteria and modifier hash" do
page = @page_class.create(:tags => 'foo')
page2 = @page_class.create
page.add_to_set(:tags => 'foo')
page2.add_to_set(:tags => 'foo')
page.reload
page.tags.should == %w(foo)
page2.reload
page2.tags.should == %w(foo)
end
should "be able to push uniq with criteria and modifier hash" do
page = @page_class.create(:tags => 'foo')
page2 = @page_class.create
page.push_uniq(:tags => 'foo')
page2.push_uniq(:tags => 'foo')
page.reload
page.tags.should == %w(foo)
page2.reload
page2.tags.should == %w(foo)
end
should "be able to pop with modifier hashes" do
page = @page_class.create(:tags => %w(foo bar))
page.pop(:tags => 1)
page.reload
page.tags.should == %w(foo)
end
should "be able to pass upsert option" do
page = @page_class.create(:title => "Upsert Page")
page.increment({:new_count => 1}, {:upsert => true})
page.reload
page.new_count.should == 1
end
should "be able to pass safe option" do
page = @page_class.create(:title => "Safe Page")
# We are trying to increment a key of type string here which should fail
assert_raises(Mongo::OperationFailure) do
page.increment({:title => 1}, {:safe => true})
end
end
should "be able to pass upsert and safe options" do
page = @page_class.create(:title => "Upsert and Safe Page")
page.increment({:another_count => 1}, {:upsert => true, :safe => true})
page.reload
page.another_count.should == 1
end
end
end
end
end
|
require 'watir-webdriver'
require 'headless'
require 'fileutils'
require 'date' #do we need this in ruby >=2.0 ?
module Gucci
module House
class Search
attr_accessor :download_dir, :search_params
def initialize(opts={})
@download_dir = opts.delete(:download_dir) || Dir.tmpdir
@search_type = opts[:contributions] ? :contributions : :disclosures
opts.delete(:contributions) if opts[:contributions]
@search_type == :contributions ? FileUtils.rm("#{@download_dir}/Contributions.CSV",:force=>true) : FileUtils.rm("#{@download_dir}/Disclosures.CSV",:force=>true)
@browser = browser
@search_params = validate_params(make_params(opts))
search(@search_params)
end
def browser
headless = Headless.new
headless.start
profile = Selenium::WebDriver::Firefox::Profile.new
profile['browser.download.folderList'] = 2
profile['browser.download.dir'] = @download_dir
profile["browser.helperApps.neverAsk.saveToDisk"] = "text/csv, application/octet-stream"
driver = Selenium::WebDriver.for :firefox, :profile => profile
browser = Watir::Browser.new(driver)
urls = {:contributions => 'disclosures.house.gov/lc/lcsearch.aspx', :disclosures => 'disclosures.house.gov/ld/ldsearch.aspx' }
browser.goto urls[@search_type]
return browser
end
def search(params)
param_count = 0
selected_params = {}
params.each_key do |param|
param_count += 1
selected_params[param_count] = param
param_id = "DropDownList#{param_count}"
@browser.select_list(:id => "#{param_id}").select "#{param}"
sleep 1
end
@browser.button(:name => 'cmdSearch').click
selected_params.keys.sort.each do |param_order|
param_id = valid_params.keys.include?(selected_params[param_order]) ? "DropDownList#{param_order}0" : "TextBox#{param_order}"
if valid_params.keys.include?(selected_params[param_order])
@browser.select_list(:id => "#{param_id}").select "#{params[selected_params[param_order]]}"
else
@browser.text_field(:id => "#{param_id}").set "#{params[selected_params[param_order]]}"
end
sleep 1
end
@browser.button(:name => 'cmdSearch').click
@browser.radio(:id => 'RadioButtonList1_1' ).set # for CSV download
@browser.button(:name => 'cmdDownload').click #download a file of the search results, extension is CSV, but it's actually tab separated
end
def parse_results()
filings = []
results_file = @search_type == :contributions ? 'Contributions.CSV' : 'Disclosures.CSV'
results_delimiter = @search_type == :contributions ? "," : "\t"
open("#{@download_dir}/#{results_file}","r").each_line{|l| l.gsub!('"',''); filings << l.split(results_delimiter)[0..-2]}
filings.shift
filings.sort_by!{|e| e[0].to_i}.reverse! #largest filing_id is newest?
return filings
end
def results(&block)
disclosure_keys = [:filing_id, :registrant_id, :registrant_name, :client_name, :filing_year, :filing_period, :lobbyists]
contribution_keys = [:filing_id,:house_id,:organization_name,:remaining_items ]
keys = @search_type == :contributions ? contribution_keys : disclosure_keys
parsed_results = []
parse_results.each do |row|
row = [row[0..2],row[3..-1].join(",")].flatten if @search_type == :contributions
search_result ||= Gucci::Mapper[*keys.zip(row).flatten]
if block_given?
yield search_result
else
parsed_results << search_result
end
end
block_given? ? nil : parsed_results
end
def make_params(search_params)
@search_type == :contributions ? make_contribution_params(search_params) : make_disclosure_params(search_params)
end
def make_disclosure_params(search_params)
{
'Registrant Name' => search_params[:registrant_name] || '', #validate?
'Client Name' => search_params[:client_name] || '', #validate?
'House ID' => search_params[:house_id] || '', #validate?
'Filing Period' => search_params[:filing_period] || '',
'Filing Type' => search_params[:filing_type] || '',
'Filing Year' => search_params[:filing_year] || '',
'Issue Code' => search_params[:issue_code] || '',
'Lobbyist Name' => search_params[:lobbyist_name] || '', #validate?
'Affiliated Country' => search_params[:affiliated_country] || '',
'Affiliated Name' => search_params[:affiliated_name] || '', #validate?
'Amount Reported' => search_params[:amount_reported] || '', #validate?
'Client Country' => search_params[:client_country] || '',
'Client PPB Country' => search_params[:client_ppb_country] || '',
'Client State' => search_params[:client_state] || '',
'Foreign Entiry PPB Country' => search_params[:foreign_entity_ppb_country] || '', #typo in field name on House form
'Foreign Entity Country' => search_params[:foreign_entity_country] || '',
'Foreign Entity Name' => search_params[:foreign_entity_name] || '', #validate?
'Foreign Entity Ownership' => search_params[:foreign_entity_ownership] || '', #validate?
'Government Entity' => search_params[:government_entity] || '', #validate?
'Issue Data' => search_params[:issue_data] || '', #validate?
'Lobbyist Covered' => search_params[:lobbyist_covered] || '',
'Lobbyist Covered Position' => search_params[:lobbyist_position] || '', #validate?
'Lobbyists Full Name Inactive' => search_params[:lobbyist_inactive] || '', #validate?
'Registrant Country' => search_params[:registrant_country] || '',
'Registrant PPB Country' => search_params[:registrant_ppb_country] || ''
}
end
def make_contribution_params(search_params)
{
'Organization Name' => search_params[:organization_name] || '',
'House ID' => search_params[:house_id] || '',
'Filing Period' => search_params[:filing_period] || '',
'Filing Type' => search_params[:filing_type] || '',
'PAC' => search_params[:pac] || '',
'Filing Year' => search_params[:filing_year] || '',
'Lobbyist Name' => search_params[:lobbyist_name] || '',
'Contact Name' => search_params[:contact_name] || '',
'Senate ID' => search_params[:senate_id] || ''
}
end
def valid_params
@search_type == :contributions ? VALID_CONTRIBUTION_PARAMS : VALID_DISCLOSURE_PARAMS
end
def validate_params(params)
raise ArgumentError, "At least one search parameter must be given, possible parameters are #{valid_params.keys.join(', ')}" if params.values.all? { |x| x.to_s.empty? }
params.delete_if { |k,v| v.to_s.empty? }
raise ArgumentError, "No more than six search parameters are permitted" if params.keys.count > 6
invalid_params = []
valid_params.each_pair do |k,v|
if params.keys.include?(k)
invalid_params.push("#{params[k]} is invalid for #{k}, permitted values are #{v.join(', ')}\n") unless valid_params[k].include?( params[k] )
end
end
raise ArgumentError, "#{invalid_params.count} error(s)\n#{invalid_params.join.chomp}" unless invalid_params.empty?
params
end
end
COUNTRIES = {
"AFG" => "AFGHANISTAN",
"ALB" => "ALBANIA",
"ALG" => "ALGERIA",
"ASA" => "AMERICAN SAMOA",
"AND" => "ANDORRA",
"ANG" => "ANGOLA",
"ANT" => "ANTIGUA/BARBUDA",
"ARG" => "ARGENTINA",
"ARM" => "ARMENIA",
"ARU" => "ARUBA",
"AUS" => "AUSTRALIA",
"AUT" => "AUSTRIA",
"AZE" => "AZERBAIJAN",
"BAH" => "BAHAMAS",
"BRN" => "BAHRAIN",
"BAN" => "BANGLADESH",
"BAR" => "BARBADOS",
"BLR" => "BELARUS",
"BEL" => "BELGIUM",
"BIZ" => "BELIZE",
"BEN" => "BENIN",
"BER" => "BERMUDA",
"BHU" => "BHUTAN",
"BOL" => "BOLIVIA",
"BIH" => "BOSNIA/HERZEGOVINA",
"BOT" => "BOTSWANA",
"BRA" => "BRAZIL",
"IVB" => "BRITISH VIRGIN ISLANDS",
"BRU" => "BRUNEI",
"BUL" => "BULGARIA",
"BUR" => "BURKINA FASO",
"BDI" => "BURUNDI",
"CAM" => "CAMBODIA",
"CMR" => "CAMEROON",
"CAN" => "CANADA",
"CPV" => "CAPE VERDE",
"CAY" => "CAYMAN ISLANDS",
"CAF" => "CENTRAL AFRICAN REPUBLIC",
"CHA" => "CHAD",
"CHI" => "CHILE",
"CHN" => "CHINA",
"COL" => "COLOMBIA",
"COM" => "COMOROS",
"COD" => "CONGO, DEMOCRATIC REPBLIC OF THE",
"CGO" => "CONGO, REPUBLIC OF THE",
"COK" => "COOK ISLANDS",
"CRC" => "COSTA RICA",
"CIV" => "COTE D'IVOIRE",
"CRO" => "CROATIA (HRVATSKA)",
"CUB" => "CUBA",
"CYP" => "CYPRUS",
"CZE" => "CZECH REPUBLIC",
"DEN" => "DENMARK",
"DJI" => "DJIBOUTI",
"DMA" => "DOMINICA",
"DOM" => "DOMINICAN REPUBLIC",
"ECU" => "ECUADOR",
"EGY" => "EGYPT",
"ESA" => "EL SALVADOR",
"GEQ" => "EQUATORIAL GUINEA",
"ERI" => "ERITREA",
"EST" => "ESTONIA",
"ETH" => "ETHIOPIA",
"FIJ" => "FIJI",
"FIN" => "FINLAND",
"FRA" => "FRANCE",
"GAB" => "GABON",
"GAM" => "GAMBIA",
"GEO" => "GEORGIA",
"GER" => "GERMANY",
"GHA" => "GHANA",
"GRE" => "GREECE",
"GRN" => "GRENADA",
"GUM" => "GUAM",
"GUA" => "GUATEMALA",
"GUI" => "GUINEA",
"GBS" => "GUINEA-BISSAU",
"GUY" => "GUYANA",
"HAI" => "HAITI",
"HON" => "HONDURAS",
"HKG" => "HONG KONG",
"HUN" => "HUNGARY",
"ISL" => "ICELAND",
"IND" => "INDIA",
"INA" => "INDONESIA",
"IRI" => "IRAN",
"IRQ" => "IRAQ",
"IRL" => "IRELAND",
"ISR" => "ISRAEL",
"ITA" => "ITALY",
"JAM" => "JAMAICA",
"JPN" => "JAPAN",
"JOR" => "JORDAN",
"KAZ" => "KAZAKHSTAN",
"KEN" => "KENYA",
"PRK" => "KOREA, REPUBLIC OF",
"KOR" => "KOREA, REPUBLIC OF",
"KUW" => "KUWAIT",
"KGZ" => "KYRGYSTAN",
"LAO" => "LAOS, PEOPLES DEMOCRACTIC REPUBLIC",
"LAT" => "LATVIA",
"LIB" => "LEBANON",
"LES" => "LESOTHO",
"LBR" => "LIBERIA",
"LBA" => "LIBYAN ARAB JAMAHIRIYA",
"LIE" => "LIECHTENSTEIN",
"LTU" => "LITHUANIA",
"LUX" => "LUXEMBORG",
"MKD" => "MACEDONIA",
"MAD" => "MADAGASCAR",
"MAW" => "MALAWI",
"MAS" => "MALAYSIA",
"MDV" => "MALDIVES",
"MLI" => "MALI",
"MLT" => "MALTA",
"MTN" => "MAURITANIA",
"MRI" => "MAURITIUS",
"MEX" => "MEXICO",
"FSM" => "MICRONESIA, FEDERATED STATES OF",
"MDA" => "MOLDOVA, REPUBLIC OF",
"MON" => "MONACO",
"MGL" => "MONGOLIA",
"MAR" => "MOROCCO",
"MOZ" => "MOZAMBIQUE",
"MYA" => "MYANMAR",
"NAM" => "NAMIBIA",
"NRU" => "NAURU",
"NEP" => "NEPAL",
"NED" => "NETHERLANDS",
"AHO" => "NETHERLANDS ANTILLES",
"NZL" => "NEW ZEALAND",
"NCA" => "NICARAGUA",
"NIG" => "NIGER",
"NGR" => "NIGERIA",
"NOR" => "NORWAY",
"OMA" => "OMAN",
"PAK" => "PAKISTAN",
"PLW" => "PALAU",
"PLE" => "PALESTINE",
"PAN" => "PANAMA",
"PNG" => "PAPUA NEW GUINEA",
"PAR" => "PARAGUAY",
"PER" => "PERU",
"PHI" => "PHILIPPINES",
"POL" => "POLAND",
"POR" => "PORTUGAL",
"PUR" => "PUERTO RICO",
"QAT" => "QATAR",
"ROM" => "ROMANIA",
"RUS" => "RUSSIAN FEDERATION",
"RWA" => "RWANDA",
"SKN" => "SAINT KITTS & NEVIS",
"LCA" => "SAINT LUCIA",
"VIN" => "SAINT VINCENT & GRENADINES",
"SAM" => "SAMOA",
"SMR" => "SAN MARINO",
"STP" => "SAO TOME & PRINCIPE",
"KSA" => "SAUDI ARABIA",
"SEN" => "SENEGAL",
"SEY" => "SEYCHELLES",
"SLE" => "SIERRA LEONE",
"SIN" => "SINGAPORE",
"SVK" => "SLOVAKIA (SLOVAK REPUBLIC)",
"SLO" => "SLOVENIA",
"SOL" => "SOLOMON ISLANDS",
"SOM" => "SOMALIA",
"RSA" => "SOUTH AFRICA",
"ESP" => "SPAIN",
"SRI" => "SRI LANKA",
"SUD" => "SUDAN",
"SUR" => "SURINAME",
"SWZ" => "SWAZILAND",
"SWE" => "SWEDEN",
"SUI" => "SWITZERLAND",
"SYR" => "SYRIAN ARAB REPUBLIC",
"TRE" => "TAIWAN",
"TJK" => "TAJIKISTAN",
"TAN" => "TANZANIA, UNITED REPUBLIC OF",
"THA" => "THAILAND",
"TOG" => "TOGO",
"TGA" => "TONGA",
"TRI" => "TRINIDAD & TOBAGO",
"TUN" => "TUNISIA",
"TUR" => "TURKEY",
"TKM" => "TURKMENISTAN",
"UGA" => "UGANDA",
"UKR" => "UKRAINE",
"UAE" => "UNITED ARAB EMIRATES",
"GBR" => "UNITED KINGDOM",
"URU" => "URUGUAY",
"USA" => "USA",
"UZB" => "UZBEKISTAN",
"VAN" => "VANUATU",
"VEN" => "VENEZUELA",
"VIE" => "VIETNAM",
"ISV" => "VIRGIN ISLANDS",
"YEM" => "YEMEN",
"YUG" => "YUGOSLAVIA",
"ZAM" => "ZAMBIA",
"ZIM" => "ZIMBABWE"
}
REPORT_TYPES = {
"MM" => "Mid-Year Report",
"MA" => "Mid-Year Amendment Report",
"MT" => "Mid-Year Termination Report",
"M@" => "Mid-Year Termination Amendment Report",
"YY" => "Year-End Report",
"YA" => "Year-End Amendment Report",
"YT" => "Year-End Termination Report",
"Y@" => "Year-End Termination Amendment Report",
"RR" => "Registration",
"RA" => "Registration Amendment",
"Q1" => "1st Quarter Report",
"1A" => "1st Quarter Amendment Report",
"1T" => "1st Quarter Termination Report",
"1@" => "1st Quarter Termination Amendment Report",
"Q2" => "2nd Quarter Report",
"2A" => "2nd Quarter Amendment Report",
"2T" => "2nd Quarter Termination Report",
"2@" => "2nd Quarter Termination Amendment Report",
"Q3" => "3rd Quarter Report",
"3A" => "3rd Quarter Amendment Report",
"3T" => "3rd Quarter Termination Report",
"3@" => "3rd Quarter Termination Amendment Report",
"Q4" => "4th Quarter Report",
"4A" => "4th Quarter Amendment Report",
"4T" => "4th Quarter Termination Report",
"4@" => "4th Quarter Termination Amendment Report",
"NR" => "New Registrant Using Web Form"
}
ISSUES = {
"ACC" => "ACCOUNTING",
"ADV" => "ADVERTISING",
"AER" => "AEROSPACE",
"AGR" => "AGRICULTURE",
"ALC" => "ALCOHOL AND DRUG ABUSE",
"ANI" => "ANIMALS",
"APP" => "APPAREL/CLOTHING INDUSTRY/TEXTILES",
"ART" => "ARTS/ENTERTAINMENT",
"AUT" => "AUTOMOTIVE INDUSTRY",
"AVI" => "AVIATION/AIRCRAFT/AIRLINES",
"BAN" => "BANKING",
"BNK" => "BANKRUPTCY",
"BEV" => "BEVERAGE INDUSTRY",
"BUD" => "BUDGET/APPROPRIATIONS",
"CHM" => "CHEMICALS/CHEMICAL INDUSTRY",
"CIV" => "CIVIL RIGHTS/CIVIL LIBERTIES",
"CAW" => "CLEAN AIR AND WATER (QUALITY)",
"CDT" => "COMMODITIES (BIG TICKET)",
"COM" => "COMMUNICATIONS/BROADCASTING/RADIO/TV",
"CPI" => "COMPUTER INDUSTRY",
"CON" => "CONSTITUTION",
"CSP" => "CONSUMER ISSUES/SAFETY/PRODUCTS",
"CPT" => "COPYRIGHT/PATENT/TRADEMARK",
"DEF" => "DEFENSE",
"DIS" => "DISASTER PLANNING/EMERGENCIES",
"DOC" => "DISTRICT OF COLUMBIA",
"ECN" => "ECONOMICS/ECONOMIC DEVELOPMENT",
"EDU" => "EDUCATION",
"ENG" => "ENERGY/NUCLEAR",
"ENV" => "ENVIRONMENT/SUPERFUND",
"FAM" => "FAMILY ISSUES/ABORTION/ADOPTION",
"FIN" => "FINANCIAL INSTITUTIONS/INVESTMENTS/SEC",
"FIR" => "FIREARMS/GUNS/AMMUNITION",
"FOO" => "FOOD INDUSTRY (SAFETY, LABELING, ETC.)",
"FOR" => "FOREIGN RELATIONS",
"FUE" => "FUEL/GAS/OIL",
"GAM" => "GAMING/GAMBLING/CASINO",
"GOV" => "GOVERNMENT ISSUES",
"HCR" => "HEALTH ISSUES",
"HOM" => "HOMELAND SECURITY",
"HOU" => "HOUSING",
"IMM" => "IMMIGRATION",
"IND" => "INDIAN/NATIVE/AMERICAN AFFAIRS",
"INS" => "INSURANCE",
"INT" => "INTELLIGENCE AND SURVEILLANCE",
"LBR" => "LABOR ISSUES/ANTITRUST/WORKPLACE",
"LAW" => "LAW ENFORCEMENT/CRIME/CRIMINAL JUSTICE",
"MAN" => "MANUFACTURING",
"MAR" => "MARINE/MARITIME/BOATING/FISHERIES",
"MIA" => "MEDIA (INFORMATION/PUBLISHING)",
"MED" => "MEDICAL/DISEASE RESEARCH/CLINICAL LABS",
"MMM" => "MEDICARE/MEDICAID",
"MON" => "MINTING/MONEY/GOLD STANDARD",
"NAT" => "NATURAL RESOURCES",
"PHA" => "PHARMACY",
"POS" => "POSTAL",
"RRR" => "RAILROADS",
"RES" => "REAL ESTATE/LAND USE/CONSERVATION",
"REL" => "RELIGION",
"RET" => "RETIREMENT",
"ROD" => "ROADS/HIGHWAY",
"SCI" => "SCIENCE/TECHNOLOGY",
"SMB" => "SMALL BUSINESS",
"SPO" => "SPORTS/ATHLETICS",
"TAR" => "TARIFF (MISCELLANEOUS TARIFF BILLS)",
"TAX" => "TAXATION/INTERNAL REVENUE CODE",
"TEC" => "TELECOMMUNICATIONS",
"TOB" => "TOBACCO",
"TOR" => "TORTS",
"TRD" => "TRADE (DOMESTIC/FOREIGN)",
"TRA" => "TRANSPORTATION",
"TOU" => "TRAVEL/TOURISM",
"TRU" => "TRUCKING/SHIPPING",
"UNM" => "UNEMPLOYMENT",
"URB" => "URBAN DEVELOPMENT/MUNICIPALITIES",
"UTI" => "UTILITIES",
"VET" => "VETERANS",
"WAS" => "WASTE (HAZARD/SOLID/INTERSTATE/NUCLEAR)",
"WEL" => "WELFARE"
}
STATES = ["AL", "AK", "AS", "AZ", "AR", "CA", "CO", "CT", "DE", "DC", "FL", "GA", "GU", "HI", "ID", "IL", "IN", "IA", "KS", "KY", "LA", "ME", "MD", "MA", "MI", "MN", "MS", "MO", "MT", "NE", "NV", "NH", "NJ", "NM", "NY", "NC", "ND", "OH", "OK", "OR", "PA", "PR", "RI", "SC", "SD", "TN", "TX", "UT", "VT", "VI", "VA", "WA", "WV", "WI", "WY"]
VALID_DISCLOSURE_PARAMS = {
'Filing Period' => ["Mid-Year", "Year-End", "1st Quarter", "2nd Quarter", "3rd Quarter", "4th Quarter"],
'Filing Type' => REPORT_TYPES.values,
'Filing Year' => (2009..Date.today.year).map{ |y| y },
'Issue Code' => ISSUES.values,
'Affiliated Country' => COUNTRIES.values,
'Client Country' => COUNTRIES.values,
'Client PPB Country' => COUNTRIES.values,
'Client State' => STATES,
'Foreign Entiry PPB Country' => COUNTRIES.values, #typo in field name on House form
'Foreign Entity Country' => COUNTRIES.values,
'Lobbyist Covered' => ["True","False"],
'Registrant Country' => COUNTRIES.values,
'Registrant PPB Country' => COUNTRIES.values
}
VALID_CONTRIBUTION_PARAMS = {
'Filing Period' => ["Mid-Year", "Year-End"],
'Filing Type' => REPORT_TYPES.values.grep(/year/i).reject{|v| v=~/termination/i},
'Filing Year' => (2008..Date.today.year).map{ |y| y }
}
end
end
changed CSV deletion to use globbing
require 'watir-webdriver'
require 'headless'
require 'fileutils'
require 'date' #do we need this in ruby >=2.0 ?
module Gucci
module House
class Search
attr_accessor :download_dir, :search_params
def initialize(opts={})
@download_dir = opts.delete(:download_dir) || Dir.tmpdir
@search_type = opts[:contributions] ? :contributions : :disclosures
opts.delete(:contributions) if opts[:contributions]
@search_type == :contributions ? FileUtils.rm_f(Dir.glob("#{@download_dir}/Contributions*.CSV")) : FileUtils.rm_f(Dir.glob("#{@download_dir}/Disclosures*.CSV"))
@browser = browser
@search_params = validate_params(make_params(opts))
search(@search_params)
end
def browser
headless = Headless.new
headless.start
profile = Selenium::WebDriver::Firefox::Profile.new
profile['browser.download.folderList'] = 2
profile['browser.download.dir'] = @download_dir
profile["browser.helperApps.neverAsk.saveToDisk"] = "text/csv, application/octet-stream"
driver = Selenium::WebDriver.for :firefox, :profile => profile
browser = Watir::Browser.new(driver)
urls = {:contributions => 'disclosures.house.gov/lc/lcsearch.aspx', :disclosures => 'disclosures.house.gov/ld/ldsearch.aspx' }
browser.goto urls[@search_type]
return browser
end
def search(params)
param_count = 0
selected_params = {}
params.each_key do |param|
param_count += 1
selected_params[param_count] = param
param_id = "DropDownList#{param_count}"
@browser.select_list(:id => "#{param_id}").select "#{param}"
sleep 1
end
@browser.button(:name => 'cmdSearch').click
selected_params.keys.sort.each do |param_order|
param_id = valid_params.keys.include?(selected_params[param_order]) ? "DropDownList#{param_order}0" : "TextBox#{param_order}"
if valid_params.keys.include?(selected_params[param_order])
@browser.select_list(:id => "#{param_id}").select "#{params[selected_params[param_order]]}"
else
@browser.text_field(:id => "#{param_id}").set "#{params[selected_params[param_order]]}"
end
sleep 1
end
@browser.button(:name => 'cmdSearch').click
@browser.radio(:id => 'RadioButtonList1_1' ).set # for CSV download
@browser.button(:name => 'cmdDownload').click #download a file of the search results, extension is CSV, but it's actually tab separated
end
def parse_results()
filings = []
results_file = @search_type == :contributions ? 'Contributions.CSV' : 'Disclosures.CSV'
results_delimiter = @search_type == :contributions ? "," : "\t"
open("#{@download_dir}/#{results_file}","r").each_line{|l| l.gsub!('"',''); filings << l.split(results_delimiter)[0..-2]}
filings.shift
filings.sort_by!{|e| e[0].to_i}.reverse! #largest filing_id is newest?
return filings
end
def results(&block)
disclosure_keys = [:filing_id, :registrant_id, :registrant_name, :client_name, :filing_year, :filing_period, :lobbyists]
contribution_keys = [:filing_id,:house_id,:organization_name,:remaining_items ]
keys = @search_type == :contributions ? contribution_keys : disclosure_keys
parsed_results = []
parse_results.each do |row|
row = [row[0..2],row[3..-1].join(",")].flatten if @search_type == :contributions
search_result ||= Gucci::Mapper[*keys.zip(row).flatten]
if block_given?
yield search_result
else
parsed_results << search_result
end
end
block_given? ? nil : parsed_results
end
def make_params(search_params)
@search_type == :contributions ? make_contribution_params(search_params) : make_disclosure_params(search_params)
end
def make_disclosure_params(search_params)
{
'Registrant Name' => search_params[:registrant_name] || '', #validate?
'Client Name' => search_params[:client_name] || '', #validate?
'House ID' => search_params[:house_id] || '', #validate?
'Filing Period' => search_params[:filing_period] || '',
'Filing Type' => search_params[:filing_type] || '',
'Filing Year' => search_params[:filing_year] || '',
'Issue Code' => search_params[:issue_code] || '',
'Lobbyist Name' => search_params[:lobbyist_name] || '', #validate?
'Affiliated Country' => search_params[:affiliated_country] || '',
'Affiliated Name' => search_params[:affiliated_name] || '', #validate?
'Amount Reported' => search_params[:amount_reported] || '', #validate?
'Client Country' => search_params[:client_country] || '',
'Client PPB Country' => search_params[:client_ppb_country] || '',
'Client State' => search_params[:client_state] || '',
'Foreign Entiry PPB Country' => search_params[:foreign_entity_ppb_country] || '', #typo in field name on House form
'Foreign Entity Country' => search_params[:foreign_entity_country] || '',
'Foreign Entity Name' => search_params[:foreign_entity_name] || '', #validate?
'Foreign Entity Ownership' => search_params[:foreign_entity_ownership] || '', #validate?
'Government Entity' => search_params[:government_entity] || '', #validate?
'Issue Data' => search_params[:issue_data] || '', #validate?
'Lobbyist Covered' => search_params[:lobbyist_covered] || '',
'Lobbyist Covered Position' => search_params[:lobbyist_position] || '', #validate?
'Lobbyists Full Name Inactive' => search_params[:lobbyist_inactive] || '', #validate?
'Registrant Country' => search_params[:registrant_country] || '',
'Registrant PPB Country' => search_params[:registrant_ppb_country] || ''
}
end
def make_contribution_params(search_params)
{
'Organization Name' => search_params[:organization_name] || '',
'House ID' => search_params[:house_id] || '',
'Filing Period' => search_params[:filing_period] || '',
'Filing Type' => search_params[:filing_type] || '',
'PAC' => search_params[:pac] || '',
'Filing Year' => search_params[:filing_year] || '',
'Lobbyist Name' => search_params[:lobbyist_name] || '',
'Contact Name' => search_params[:contact_name] || '',
'Senate ID' => search_params[:senate_id] || ''
}
end
def valid_params
@search_type == :contributions ? VALID_CONTRIBUTION_PARAMS : VALID_DISCLOSURE_PARAMS
end
def validate_params(params)
raise ArgumentError, "At least one search parameter must be given, possible parameters are #{valid_params.keys.join(', ')}" if params.values.all? { |x| x.to_s.empty? }
params.delete_if { |k,v| v.to_s.empty? }
raise ArgumentError, "No more than six search parameters are permitted" if params.keys.count > 6
invalid_params = []
valid_params.each_pair do |k,v|
if params.keys.include?(k)
invalid_params.push("#{params[k]} is invalid for #{k}, permitted values are #{v.join(', ')}\n") unless valid_params[k].include?( params[k] )
end
end
raise ArgumentError, "#{invalid_params.count} error(s)\n#{invalid_params.join.chomp}" unless invalid_params.empty?
params
end
end
COUNTRIES = {
"AFG" => "AFGHANISTAN",
"ALB" => "ALBANIA",
"ALG" => "ALGERIA",
"ASA" => "AMERICAN SAMOA",
"AND" => "ANDORRA",
"ANG" => "ANGOLA",
"ANT" => "ANTIGUA/BARBUDA",
"ARG" => "ARGENTINA",
"ARM" => "ARMENIA",
"ARU" => "ARUBA",
"AUS" => "AUSTRALIA",
"AUT" => "AUSTRIA",
"AZE" => "AZERBAIJAN",
"BAH" => "BAHAMAS",
"BRN" => "BAHRAIN",
"BAN" => "BANGLADESH",
"BAR" => "BARBADOS",
"BLR" => "BELARUS",
"BEL" => "BELGIUM",
"BIZ" => "BELIZE",
"BEN" => "BENIN",
"BER" => "BERMUDA",
"BHU" => "BHUTAN",
"BOL" => "BOLIVIA",
"BIH" => "BOSNIA/HERZEGOVINA",
"BOT" => "BOTSWANA",
"BRA" => "BRAZIL",
"IVB" => "BRITISH VIRGIN ISLANDS",
"BRU" => "BRUNEI",
"BUL" => "BULGARIA",
"BUR" => "BURKINA FASO",
"BDI" => "BURUNDI",
"CAM" => "CAMBODIA",
"CMR" => "CAMEROON",
"CAN" => "CANADA",
"CPV" => "CAPE VERDE",
"CAY" => "CAYMAN ISLANDS",
"CAF" => "CENTRAL AFRICAN REPUBLIC",
"CHA" => "CHAD",
"CHI" => "CHILE",
"CHN" => "CHINA",
"COL" => "COLOMBIA",
"COM" => "COMOROS",
"COD" => "CONGO, DEMOCRATIC REPBLIC OF THE",
"CGO" => "CONGO, REPUBLIC OF THE",
"COK" => "COOK ISLANDS",
"CRC" => "COSTA RICA",
"CIV" => "COTE D'IVOIRE",
"CRO" => "CROATIA (HRVATSKA)",
"CUB" => "CUBA",
"CYP" => "CYPRUS",
"CZE" => "CZECH REPUBLIC",
"DEN" => "DENMARK",
"DJI" => "DJIBOUTI",
"DMA" => "DOMINICA",
"DOM" => "DOMINICAN REPUBLIC",
"ECU" => "ECUADOR",
"EGY" => "EGYPT",
"ESA" => "EL SALVADOR",
"GEQ" => "EQUATORIAL GUINEA",
"ERI" => "ERITREA",
"EST" => "ESTONIA",
"ETH" => "ETHIOPIA",
"FIJ" => "FIJI",
"FIN" => "FINLAND",
"FRA" => "FRANCE",
"GAB" => "GABON",
"GAM" => "GAMBIA",
"GEO" => "GEORGIA",
"GER" => "GERMANY",
"GHA" => "GHANA",
"GRE" => "GREECE",
"GRN" => "GRENADA",
"GUM" => "GUAM",
"GUA" => "GUATEMALA",
"GUI" => "GUINEA",
"GBS" => "GUINEA-BISSAU",
"GUY" => "GUYANA",
"HAI" => "HAITI",
"HON" => "HONDURAS",
"HKG" => "HONG KONG",
"HUN" => "HUNGARY",
"ISL" => "ICELAND",
"IND" => "INDIA",
"INA" => "INDONESIA",
"IRI" => "IRAN",
"IRQ" => "IRAQ",
"IRL" => "IRELAND",
"ISR" => "ISRAEL",
"ITA" => "ITALY",
"JAM" => "JAMAICA",
"JPN" => "JAPAN",
"JOR" => "JORDAN",
"KAZ" => "KAZAKHSTAN",
"KEN" => "KENYA",
"PRK" => "KOREA, REPUBLIC OF",
"KOR" => "KOREA, REPUBLIC OF",
"KUW" => "KUWAIT",
"KGZ" => "KYRGYSTAN",
"LAO" => "LAOS, PEOPLES DEMOCRACTIC REPUBLIC",
"LAT" => "LATVIA",
"LIB" => "LEBANON",
"LES" => "LESOTHO",
"LBR" => "LIBERIA",
"LBA" => "LIBYAN ARAB JAMAHIRIYA",
"LIE" => "LIECHTENSTEIN",
"LTU" => "LITHUANIA",
"LUX" => "LUXEMBORG",
"MKD" => "MACEDONIA",
"MAD" => "MADAGASCAR",
"MAW" => "MALAWI",
"MAS" => "MALAYSIA",
"MDV" => "MALDIVES",
"MLI" => "MALI",
"MLT" => "MALTA",
"MTN" => "MAURITANIA",
"MRI" => "MAURITIUS",
"MEX" => "MEXICO",
"FSM" => "MICRONESIA, FEDERATED STATES OF",
"MDA" => "MOLDOVA, REPUBLIC OF",
"MON" => "MONACO",
"MGL" => "MONGOLIA",
"MAR" => "MOROCCO",
"MOZ" => "MOZAMBIQUE",
"MYA" => "MYANMAR",
"NAM" => "NAMIBIA",
"NRU" => "NAURU",
"NEP" => "NEPAL",
"NED" => "NETHERLANDS",
"AHO" => "NETHERLANDS ANTILLES",
"NZL" => "NEW ZEALAND",
"NCA" => "NICARAGUA",
"NIG" => "NIGER",
"NGR" => "NIGERIA",
"NOR" => "NORWAY",
"OMA" => "OMAN",
"PAK" => "PAKISTAN",
"PLW" => "PALAU",
"PLE" => "PALESTINE",
"PAN" => "PANAMA",
"PNG" => "PAPUA NEW GUINEA",
"PAR" => "PARAGUAY",
"PER" => "PERU",
"PHI" => "PHILIPPINES",
"POL" => "POLAND",
"POR" => "PORTUGAL",
"PUR" => "PUERTO RICO",
"QAT" => "QATAR",
"ROM" => "ROMANIA",
"RUS" => "RUSSIAN FEDERATION",
"RWA" => "RWANDA",
"SKN" => "SAINT KITTS & NEVIS",
"LCA" => "SAINT LUCIA",
"VIN" => "SAINT VINCENT & GRENADINES",
"SAM" => "SAMOA",
"SMR" => "SAN MARINO",
"STP" => "SAO TOME & PRINCIPE",
"KSA" => "SAUDI ARABIA",
"SEN" => "SENEGAL",
"SEY" => "SEYCHELLES",
"SLE" => "SIERRA LEONE",
"SIN" => "SINGAPORE",
"SVK" => "SLOVAKIA (SLOVAK REPUBLIC)",
"SLO" => "SLOVENIA",
"SOL" => "SOLOMON ISLANDS",
"SOM" => "SOMALIA",
"RSA" => "SOUTH AFRICA",
"ESP" => "SPAIN",
"SRI" => "SRI LANKA",
"SUD" => "SUDAN",
"SUR" => "SURINAME",
"SWZ" => "SWAZILAND",
"SWE" => "SWEDEN",
"SUI" => "SWITZERLAND",
"SYR" => "SYRIAN ARAB REPUBLIC",
"TRE" => "TAIWAN",
"TJK" => "TAJIKISTAN",
"TAN" => "TANZANIA, UNITED REPUBLIC OF",
"THA" => "THAILAND",
"TOG" => "TOGO",
"TGA" => "TONGA",
"TRI" => "TRINIDAD & TOBAGO",
"TUN" => "TUNISIA",
"TUR" => "TURKEY",
"TKM" => "TURKMENISTAN",
"UGA" => "UGANDA",
"UKR" => "UKRAINE",
"UAE" => "UNITED ARAB EMIRATES",
"GBR" => "UNITED KINGDOM",
"URU" => "URUGUAY",
"USA" => "USA",
"UZB" => "UZBEKISTAN",
"VAN" => "VANUATU",
"VEN" => "VENEZUELA",
"VIE" => "VIETNAM",
"ISV" => "VIRGIN ISLANDS",
"YEM" => "YEMEN",
"YUG" => "YUGOSLAVIA",
"ZAM" => "ZAMBIA",
"ZIM" => "ZIMBABWE"
}
REPORT_TYPES = {
"MM" => "Mid-Year Report",
"MA" => "Mid-Year Amendment Report",
"MT" => "Mid-Year Termination Report",
"M@" => "Mid-Year Termination Amendment Report",
"YY" => "Year-End Report",
"YA" => "Year-End Amendment Report",
"YT" => "Year-End Termination Report",
"Y@" => "Year-End Termination Amendment Report",
"RR" => "Registration",
"RA" => "Registration Amendment",
"Q1" => "1st Quarter Report",
"1A" => "1st Quarter Amendment Report",
"1T" => "1st Quarter Termination Report",
"1@" => "1st Quarter Termination Amendment Report",
"Q2" => "2nd Quarter Report",
"2A" => "2nd Quarter Amendment Report",
"2T" => "2nd Quarter Termination Report",
"2@" => "2nd Quarter Termination Amendment Report",
"Q3" => "3rd Quarter Report",
"3A" => "3rd Quarter Amendment Report",
"3T" => "3rd Quarter Termination Report",
"3@" => "3rd Quarter Termination Amendment Report",
"Q4" => "4th Quarter Report",
"4A" => "4th Quarter Amendment Report",
"4T" => "4th Quarter Termination Report",
"4@" => "4th Quarter Termination Amendment Report",
"NR" => "New Registrant Using Web Form"
}
ISSUES = {
"ACC" => "ACCOUNTING",
"ADV" => "ADVERTISING",
"AER" => "AEROSPACE",
"AGR" => "AGRICULTURE",
"ALC" => "ALCOHOL AND DRUG ABUSE",
"ANI" => "ANIMALS",
"APP" => "APPAREL/CLOTHING INDUSTRY/TEXTILES",
"ART" => "ARTS/ENTERTAINMENT",
"AUT" => "AUTOMOTIVE INDUSTRY",
"AVI" => "AVIATION/AIRCRAFT/AIRLINES",
"BAN" => "BANKING",
"BNK" => "BANKRUPTCY",
"BEV" => "BEVERAGE INDUSTRY",
"BUD" => "BUDGET/APPROPRIATIONS",
"CHM" => "CHEMICALS/CHEMICAL INDUSTRY",
"CIV" => "CIVIL RIGHTS/CIVIL LIBERTIES",
"CAW" => "CLEAN AIR AND WATER (QUALITY)",
"CDT" => "COMMODITIES (BIG TICKET)",
"COM" => "COMMUNICATIONS/BROADCASTING/RADIO/TV",
"CPI" => "COMPUTER INDUSTRY",
"CON" => "CONSTITUTION",
"CSP" => "CONSUMER ISSUES/SAFETY/PRODUCTS",
"CPT" => "COPYRIGHT/PATENT/TRADEMARK",
"DEF" => "DEFENSE",
"DIS" => "DISASTER PLANNING/EMERGENCIES",
"DOC" => "DISTRICT OF COLUMBIA",
"ECN" => "ECONOMICS/ECONOMIC DEVELOPMENT",
"EDU" => "EDUCATION",
"ENG" => "ENERGY/NUCLEAR",
"ENV" => "ENVIRONMENT/SUPERFUND",
"FAM" => "FAMILY ISSUES/ABORTION/ADOPTION",
"FIN" => "FINANCIAL INSTITUTIONS/INVESTMENTS/SEC",
"FIR" => "FIREARMS/GUNS/AMMUNITION",
"FOO" => "FOOD INDUSTRY (SAFETY, LABELING, ETC.)",
"FOR" => "FOREIGN RELATIONS",
"FUE" => "FUEL/GAS/OIL",
"GAM" => "GAMING/GAMBLING/CASINO",
"GOV" => "GOVERNMENT ISSUES",
"HCR" => "HEALTH ISSUES",
"HOM" => "HOMELAND SECURITY",
"HOU" => "HOUSING",
"IMM" => "IMMIGRATION",
"IND" => "INDIAN/NATIVE/AMERICAN AFFAIRS",
"INS" => "INSURANCE",
"INT" => "INTELLIGENCE AND SURVEILLANCE",
"LBR" => "LABOR ISSUES/ANTITRUST/WORKPLACE",
"LAW" => "LAW ENFORCEMENT/CRIME/CRIMINAL JUSTICE",
"MAN" => "MANUFACTURING",
"MAR" => "MARINE/MARITIME/BOATING/FISHERIES",
"MIA" => "MEDIA (INFORMATION/PUBLISHING)",
"MED" => "MEDICAL/DISEASE RESEARCH/CLINICAL LABS",
"MMM" => "MEDICARE/MEDICAID",
"MON" => "MINTING/MONEY/GOLD STANDARD",
"NAT" => "NATURAL RESOURCES",
"PHA" => "PHARMACY",
"POS" => "POSTAL",
"RRR" => "RAILROADS",
"RES" => "REAL ESTATE/LAND USE/CONSERVATION",
"REL" => "RELIGION",
"RET" => "RETIREMENT",
"ROD" => "ROADS/HIGHWAY",
"SCI" => "SCIENCE/TECHNOLOGY",
"SMB" => "SMALL BUSINESS",
"SPO" => "SPORTS/ATHLETICS",
"TAR" => "TARIFF (MISCELLANEOUS TARIFF BILLS)",
"TAX" => "TAXATION/INTERNAL REVENUE CODE",
"TEC" => "TELECOMMUNICATIONS",
"TOB" => "TOBACCO",
"TOR" => "TORTS",
"TRD" => "TRADE (DOMESTIC/FOREIGN)",
"TRA" => "TRANSPORTATION",
"TOU" => "TRAVEL/TOURISM",
"TRU" => "TRUCKING/SHIPPING",
"UNM" => "UNEMPLOYMENT",
"URB" => "URBAN DEVELOPMENT/MUNICIPALITIES",
"UTI" => "UTILITIES",
"VET" => "VETERANS",
"WAS" => "WASTE (HAZARD/SOLID/INTERSTATE/NUCLEAR)",
"WEL" => "WELFARE"
}
STATES = ["AL", "AK", "AS", "AZ", "AR", "CA", "CO", "CT", "DE", "DC", "FL", "GA", "GU", "HI", "ID", "IL", "IN", "IA", "KS", "KY", "LA", "ME", "MD", "MA", "MI", "MN", "MS", "MO", "MT", "NE", "NV", "NH", "NJ", "NM", "NY", "NC", "ND", "OH", "OK", "OR", "PA", "PR", "RI", "SC", "SD", "TN", "TX", "UT", "VT", "VI", "VA", "WA", "WV", "WI", "WY"]
VALID_DISCLOSURE_PARAMS = {
'Filing Period' => ["Mid-Year", "Year-End", "1st Quarter", "2nd Quarter", "3rd Quarter", "4th Quarter"],
'Filing Type' => REPORT_TYPES.values,
'Filing Year' => (2009..Date.today.year).map{ |y| y },
'Issue Code' => ISSUES.values,
'Affiliated Country' => COUNTRIES.values,
'Client Country' => COUNTRIES.values,
'Client PPB Country' => COUNTRIES.values,
'Client State' => STATES,
'Foreign Entiry PPB Country' => COUNTRIES.values, #typo in field name on House form
'Foreign Entity Country' => COUNTRIES.values,
'Lobbyist Covered' => ["True","False"],
'Registrant Country' => COUNTRIES.values,
'Registrant PPB Country' => COUNTRIES.values
}
VALID_CONTRIBUTION_PARAMS = {
'Filing Period' => ["Mid-Year", "Year-End"],
'Filing Type' => REPORT_TYPES.values.grep(/year/i).reject{|v| v=~/termination/i},
'Filing Year' => (2008..Date.today.year).map{ |y| y }
}
end
end
|
# frozen_string_literal: true
module Hako
VERSION = '0.14.0'
end
Version 0.14.1
# frozen_string_literal: true
module Hako
VERSION = '0.14.1'
end
|
# frozen_string_literal: true
module Haml
# This class encapsulates all of the configuration options that Haml
# understands. Please see the {file:REFERENCE.md#options Haml Reference} to
# learn how to set the options.
class Options
@valid_formats = [:html4, :html5, :xhtml]
@buffer_option_keys = [:autoclose, :preserve, :attr_wrapper, :format,
:encoding, :escape_html, :escape_filter_interpolations, :escape_attrs, :hyphenate_data_attrs, :cdata]
class << self
# The default option values.
# @return Hash
def defaults
@defaults ||= Haml::TempleEngine.options.to_hash.merge(encoding: 'UTF-8')
end
# An array of valid values for the `:format` option.
# @return Array
def valid_formats
@valid_formats
end
# An array of keys that will be used to provide a hash of options to
# {Haml::Buffer}.
# @return Hash
def buffer_option_keys
@buffer_option_keys
end
# Returns a subset of defaults: those that {Haml::Buffer} cares about.
# @return [{Symbol => Object}] The options hash
def buffer_defaults
@buffer_defaults ||= buffer_option_keys.inject({}) do |hash, key|
hash.merge(key => defaults[key])
end
end
def wrap(options)
if options.is_a?(Options)
options
else
Options.new(options)
end
end
end
# The character that should wrap element attributes. This defaults to `'`
# (an apostrophe). Characters of this type within the attributes will be
# escaped (e.g. by replacing them with `'`) if the character is an
# apostrophe or a quotation mark.
attr_reader :attr_wrapper
# A list of tag names that should be automatically self-closed if they have
# no content. This can also contain regular expressions that match tag names
# (or any object which responds to `#===`). Defaults to `['meta', 'img',
# 'link', 'br', 'hr', 'input', 'area', 'param', 'col', 'base']`.
attr_accessor :autoclose
# The encoding to use for the HTML output.
# This can be a string or an `Encoding` Object. Note that Haml **does not**
# automatically re-encode Ruby values; any strings coming from outside the
# application should be converted before being passed into the Haml
# template. Defaults to `Encoding.default_internal`; if that's not set,
# defaults to the encoding of the Haml template; if that's `US-ASCII`,
# defaults to `"UTF-8"`.
attr_reader :encoding
# Sets whether or not to escape HTML-sensitive characters in attributes. If
# this is true, all HTML-sensitive characters in attributes are escaped. If
# it's set to false, no HTML-sensitive characters in attributes are escaped.
# If it's set to `:once`, existing HTML escape sequences are preserved, but
# other HTML-sensitive characters are escaped.
#
# Defaults to `true`.
attr_accessor :escape_attrs
# Sets whether or not to escape HTML-sensitive characters in script. If this
# is true, `=` behaves like {file:REFERENCE.md#escaping_html `&=`};
# otherwise, it behaves like {file:REFERENCE.md#unescaping_html `!=`}. Note
# that if this is set, `!=` should be used for yielding to subtemplates and
# rendering partials. See also {file:REFERENCE.md#escaping_html Escaping HTML} and
# {file:REFERENCE.md#unescaping_html Unescaping HTML}.
#
# Defaults to false.
attr_accessor :escape_html
# Sets whether or not to escape HTML-sensitive characters in interpolated strings.
# See also {file:REFERENCE.md#escaping_html Escaping HTML} and
# {file:REFERENCE.md#unescaping_html Unescaping HTML}.
#
# Defaults to the current value of `escape_html`.
attr_accessor :escape_filter_interpolations
# The name of the Haml file being parsed.
# This is only used as information when exceptions are raised. This is
# automatically assigned when working through ActionView, so it's really
# only useful for the user to assign when dealing with Haml programatically.
attr_accessor :filename
# If set to `true`, Haml will convert underscores to hyphens in all
# {file:REFERENCE.md#html5_custom_data_attributes Custom Data Attributes} As
# of Haml 4.0, this defaults to `true`.
attr_accessor :hyphenate_data_attrs
# The line offset of the Haml template being parsed. This is useful for
# inline templates, similar to the last argument to `Kernel#eval`.
attr_accessor :line
# Determines the output format. The default is `:html5`. The other options
# are `:html4` and `:xhtml`. If the output is set to XHTML, then Haml
# automatically generates self-closing tags and wraps the output of the
# Javascript and CSS-like filters inside CDATA. When the output is set to
# `:html5` or `:html4`, XML prologs are ignored. In all cases, an appropriate
# doctype is generated from `!!!`.
#
# If the mime_type of the template being rendered is `text/xml` then a
# format of `:xhtml` will be used even if the global output format is set to
# `:html4` or `:html5`.
attr :format
# The mime type that the rendered document will be served with. If this is
# set to `text/xml` then the format will be overridden to `:xhtml` even if
# it has set to `:html4` or `:html5`.
attr_accessor :mime_type
# A list of tag names that should automatically have their newlines
# preserved using the {Haml::Helpers#preserve} helper. This means that any
# content given on the same line as the tag will be preserved. For example,
# `%textarea= "Foo\nBar"` compiles to `<textarea>Foo
Bar</textarea>`.
# Defaults to `['textarea', 'pre']`. See also
# {file:REFERENCE.md#whitespace_preservation Whitespace Preservation}.
attr_accessor :preserve
# If set to `true`, all tags are treated as if both
# {file:REFERENCE.md#whitespace_removal__and_ whitespace removal} options
# were present. Use with caution as this may cause whitespace-related
# formatting errors.
#
# Defaults to `false`.
attr_reader :remove_whitespace
# Whether or not attribute hashes and Ruby scripts designated by `=` or `~`
# should be evaluated. If this is `true`, said scripts are rendered as empty
# strings.
#
# Defaults to `false`.
attr_accessor :suppress_eval
# Whether to include CDATA sections around javascript and css blocks when
# using the `:javascript` or `:css` filters.
#
# This option also affects the `:sass`, `:scss`, `:less` and `:coffeescript`
# filters.
#
# Defaults to `false` for html, `true` for xhtml. Cannot be changed when using
# xhtml.
attr_accessor :cdata
# The parser class to use. Defaults to Haml::Parser.
attr_accessor :parser_class
# The compiler class to use. Defaults to Haml::Compiler.
attr_accessor :compiler_class
# Enable template tracing. If true, it will add a 'data-trace' attribute to
# each tag generated by Haml. The value of the attribute will be the
# source template name and the line number from which the tag was generated,
# separated by a colon. On Rails applications, the path given will be a
# relative path as from the views directory. On non-Rails applications,
# the path will be the full path.
attr_accessor :trace
# Key is filter name in String and value is Class to use. Defaults to {}.
attr_accessor :filters
def initialize(values = {}, &block)
defaults.each {|k, v| instance_variable_set :"@#{k}", v}
values.each {|k, v| send("#{k}=", v) if defaults.has_key?(k) && !v.nil?}
yield if block_given?
end
# Retrieve an option value.
# @param key The value to retrieve.
def [](key)
send key
end
# Set an option value.
# @param key The key to set.
# @param value The value to set for the key.
def []=(key, value)
send "#{key}=", value
end
[:escape_attrs, :hyphenate_data_attrs, :remove_whitespace, :suppress_eval].each do |method|
class_eval(<<-END)
def #{method}?
!! @#{method}
end
END
end
# @return [Boolean] Whether or not the format is XHTML.
def xhtml?
not html?
end
# @return [Boolean] Whether or not the format is any flavor of HTML.
def html?
html4? or html5?
end
# @return [Boolean] Whether or not the format is HTML4.
def html4?
format == :html4
end
# @return [Boolean] Whether or not the format is HTML5.
def html5?
format == :html5
end
def attr_wrapper=(value)
@attr_wrapper = value || self.class.defaults[:attr_wrapper]
end
# Undef :format to suppress warning. It's defined above with the `:attr`
# macro in order to make it appear in Yard's list of instance attributes.
undef :format
def format
mime_type == "text/xml" ? :xhtml : @format
end
def format=(value)
unless self.class.valid_formats.include?(value)
raise Haml::Error, "Invalid output format #{value.inspect}"
end
@format = value
end
undef :cdata
def cdata
xhtml? || @cdata
end
def remove_whitespace=(value)
@remove_whitespace = value
end
def encoding=(value)
return unless value
@encoding = value.is_a?(Encoding) ? value.name : value.to_s
@encoding = "UTF-8" if @encoding.upcase == "US-ASCII"
end
# Returns a non-default subset of options: those that {Haml::Buffer} cares about.
# All of the values here are such that when `#inspect` is called on the hash,
# it can be `Kernel#eval`ed to get the same result back.
#
# See {file:REFERENCE.md#options the Haml options documentation}.
#
# @return [{Symbol => Object}] The options hash
def for_buffer
self.class.buffer_option_keys.inject({}) do |hash, key|
value = public_send(key)
if self.class.buffer_defaults[key] != value
hash[key] = value
end
hash
end
end
private
def defaults
self.class.defaults
end
end
end
:golf:
# frozen_string_literal: true
module Haml
# This class encapsulates all of the configuration options that Haml
# understands. Please see the {file:REFERENCE.md#options Haml Reference} to
# learn how to set the options.
class Options
@valid_formats = [:html4, :html5, :xhtml]
@buffer_option_keys = [:autoclose, :preserve, :attr_wrapper, :format,
:encoding, :escape_html, :escape_filter_interpolations, :escape_attrs, :hyphenate_data_attrs, :cdata]
class << self
# The default option values.
# @return Hash
def defaults
@defaults ||= Haml::TempleEngine.options.to_hash.merge(encoding: 'UTF-8')
end
# An array of valid values for the `:format` option.
# @return Array
attr_reader :valid_formats
# An array of keys that will be used to provide a hash of options to
# {Haml::Buffer}.
# @return Hash
attr_reader :buffer_option_keys
# Returns a subset of defaults: those that {Haml::Buffer} cares about.
# @return [{Symbol => Object}] The options hash
def buffer_defaults
@buffer_defaults ||= buffer_option_keys.inject({}) do |hash, key|
hash.merge(key => defaults[key])
end
end
def wrap(options)
if options.is_a?(Options)
options
else
Options.new(options)
end
end
end
# The character that should wrap element attributes. This defaults to `'`
# (an apostrophe). Characters of this type within the attributes will be
# escaped (e.g. by replacing them with `'`) if the character is an
# apostrophe or a quotation mark.
attr_reader :attr_wrapper
# A list of tag names that should be automatically self-closed if they have
# no content. This can also contain regular expressions that match tag names
# (or any object which responds to `#===`). Defaults to `['meta', 'img',
# 'link', 'br', 'hr', 'input', 'area', 'param', 'col', 'base']`.
attr_accessor :autoclose
# The encoding to use for the HTML output.
# This can be a string or an `Encoding` Object. Note that Haml **does not**
# automatically re-encode Ruby values; any strings coming from outside the
# application should be converted before being passed into the Haml
# template. Defaults to `Encoding.default_internal`; if that's not set,
# defaults to the encoding of the Haml template; if that's `US-ASCII`,
# defaults to `"UTF-8"`.
attr_reader :encoding
# Sets whether or not to escape HTML-sensitive characters in attributes. If
# this is true, all HTML-sensitive characters in attributes are escaped. If
# it's set to false, no HTML-sensitive characters in attributes are escaped.
# If it's set to `:once`, existing HTML escape sequences are preserved, but
# other HTML-sensitive characters are escaped.
#
# Defaults to `true`.
attr_accessor :escape_attrs
# Sets whether or not to escape HTML-sensitive characters in script. If this
# is true, `=` behaves like {file:REFERENCE.md#escaping_html `&=`};
# otherwise, it behaves like {file:REFERENCE.md#unescaping_html `!=`}. Note
# that if this is set, `!=` should be used for yielding to subtemplates and
# rendering partials. See also {file:REFERENCE.md#escaping_html Escaping HTML} and
# {file:REFERENCE.md#unescaping_html Unescaping HTML}.
#
# Defaults to false.
attr_accessor :escape_html
# Sets whether or not to escape HTML-sensitive characters in interpolated strings.
# See also {file:REFERENCE.md#escaping_html Escaping HTML} and
# {file:REFERENCE.md#unescaping_html Unescaping HTML}.
#
# Defaults to the current value of `escape_html`.
attr_accessor :escape_filter_interpolations
# The name of the Haml file being parsed.
# This is only used as information when exceptions are raised. This is
# automatically assigned when working through ActionView, so it's really
# only useful for the user to assign when dealing with Haml programatically.
attr_accessor :filename
# If set to `true`, Haml will convert underscores to hyphens in all
# {file:REFERENCE.md#html5_custom_data_attributes Custom Data Attributes} As
# of Haml 4.0, this defaults to `true`.
attr_accessor :hyphenate_data_attrs
# The line offset of the Haml template being parsed. This is useful for
# inline templates, similar to the last argument to `Kernel#eval`.
attr_accessor :line
# Determines the output format. The default is `:html5`. The other options
# are `:html4` and `:xhtml`. If the output is set to XHTML, then Haml
# automatically generates self-closing tags and wraps the output of the
# Javascript and CSS-like filters inside CDATA. When the output is set to
# `:html5` or `:html4`, XML prologs are ignored. In all cases, an appropriate
# doctype is generated from `!!!`.
#
# If the mime_type of the template being rendered is `text/xml` then a
# format of `:xhtml` will be used even if the global output format is set to
# `:html4` or `:html5`.
attr :format
# The mime type that the rendered document will be served with. If this is
# set to `text/xml` then the format will be overridden to `:xhtml` even if
# it has set to `:html4` or `:html5`.
attr_accessor :mime_type
# A list of tag names that should automatically have their newlines
# preserved using the {Haml::Helpers#preserve} helper. This means that any
# content given on the same line as the tag will be preserved. For example,
# `%textarea= "Foo\nBar"` compiles to `<textarea>Foo
Bar</textarea>`.
# Defaults to `['textarea', 'pre']`. See also
# {file:REFERENCE.md#whitespace_preservation Whitespace Preservation}.
attr_accessor :preserve
# If set to `true`, all tags are treated as if both
# {file:REFERENCE.md#whitespace_removal__and_ whitespace removal} options
# were present. Use with caution as this may cause whitespace-related
# formatting errors.
#
# Defaults to `false`.
attr_reader :remove_whitespace
# Whether or not attribute hashes and Ruby scripts designated by `=` or `~`
# should be evaluated. If this is `true`, said scripts are rendered as empty
# strings.
#
# Defaults to `false`.
attr_accessor :suppress_eval
# Whether to include CDATA sections around javascript and css blocks when
# using the `:javascript` or `:css` filters.
#
# This option also affects the `:sass`, `:scss`, `:less` and `:coffeescript`
# filters.
#
# Defaults to `false` for html, `true` for xhtml. Cannot be changed when using
# xhtml.
attr_accessor :cdata
# The parser class to use. Defaults to Haml::Parser.
attr_accessor :parser_class
# The compiler class to use. Defaults to Haml::Compiler.
attr_accessor :compiler_class
# Enable template tracing. If true, it will add a 'data-trace' attribute to
# each tag generated by Haml. The value of the attribute will be the
# source template name and the line number from which the tag was generated,
# separated by a colon. On Rails applications, the path given will be a
# relative path as from the views directory. On non-Rails applications,
# the path will be the full path.
attr_accessor :trace
# Key is filter name in String and value is Class to use. Defaults to {}.
attr_accessor :filters
def initialize(values = {}, &block)
defaults.each {|k, v| instance_variable_set :"@#{k}", v}
values.each {|k, v| send("#{k}=", v) if defaults.has_key?(k) && !v.nil?}
yield if block_given?
end
# Retrieve an option value.
# @param key The value to retrieve.
def [](key)
send key
end
# Set an option value.
# @param key The key to set.
# @param value The value to set for the key.
def []=(key, value)
send "#{key}=", value
end
[:escape_attrs, :hyphenate_data_attrs, :remove_whitespace, :suppress_eval].each do |method|
class_eval(<<-END)
def #{method}?
!! @#{method}
end
END
end
# @return [Boolean] Whether or not the format is XHTML.
def xhtml?
not html?
end
# @return [Boolean] Whether or not the format is any flavor of HTML.
def html?
html4? or html5?
end
# @return [Boolean] Whether or not the format is HTML4.
def html4?
format == :html4
end
# @return [Boolean] Whether or not the format is HTML5.
def html5?
format == :html5
end
def attr_wrapper=(value)
@attr_wrapper = value || self.class.defaults[:attr_wrapper]
end
# Undef :format to suppress warning. It's defined above with the `:attr`
# macro in order to make it appear in Yard's list of instance attributes.
undef :format
def format
mime_type == "text/xml" ? :xhtml : @format
end
def format=(value)
unless self.class.valid_formats.include?(value)
raise Haml::Error, "Invalid output format #{value.inspect}"
end
@format = value
end
undef :cdata
def cdata
xhtml? || @cdata
end
def remove_whitespace=(value)
@remove_whitespace = value
end
def encoding=(value)
return unless value
@encoding = value.is_a?(Encoding) ? value.name : value.to_s
@encoding = "UTF-8" if @encoding.upcase == "US-ASCII"
end
# Returns a non-default subset of options: those that {Haml::Buffer} cares about.
# All of the values here are such that when `#inspect` is called on the hash,
# it can be `Kernel#eval`ed to get the same result back.
#
# See {file:REFERENCE.md#options the Haml options documentation}.
#
# @return [{Symbol => Object}] The options hash
def for_buffer
self.class.buffer_option_keys.inject({}) do |hash, key|
value = public_send(key)
if self.class.buffer_defaults[key] != value
hash[key] = value
end
hash
end
end
private
def defaults
self.class.defaults
end
end
end
|
require "hamster/immutable"
require "hamster/undefined"
require "hamster/enumerable"
require "hamster/trie"
require "hamster/set"
require "hamster/vector"
require "hamster/nested"
module Hamster
def self.hash(pairs = nil, &block)
(pairs.nil? && block.nil?) ? EmptyHash : Hash.new(pairs, &block)
end
# A `Hamster::Hash` maps from a set of unique keys to corresponding values,
# much like a dictionary maps from words to definitions. Given a key, it can
# efficiently store and retrieve values. Looking up a key given its value is also
# possible, but less efficient. If an existing key is stored again, the new value
# will replace that which was previously stored.
#
# It behaves much like Ruby's built-in Hash, which we will call RubyHash
# for clarity. Like RubyHash, `Hamster::Hash` uses `#hash` and `#eql?` to define
# equality between keys. Keys with the same `#hash` code, but which are not `#eql?`
# to each other, can coexist in the same `Hamster::Hash`. To retrieve a previously
# stored value, the key provided must have the same `#hash` code and be `#eql?`
# to the one used for storage.
#
# A `Hamster::Hash` can be created in several ways:
#
# Hamster.hash('Jane Doe' => 10, 'Jim Doe' => 6)
# Hamster::Hash.new(font_size: 10, font_family: 'Arial')
# Hamster::Hash[first_name: 'John', last_name: 'Smith']
#
# If you want to write your own class which inherits from `Hamster::Hash`, you can
# use either of the latter 2 forms of initialization.
#
# Note that any `Enumerable` object which yields 2-element `[key, value]` arrays
# can be used to initialize a `Hamster::Hash`. So this is also possible:
#
# Hamster::Hash.new([[:first_name, 'John'], [:last_name, 'Smith']])
#
# Like RubyHash, key/value pairs can be added using {#store}. Unlike RubyHash,
# a new hash is returned, and the existing one is left unchanged:
#
# hash = Hamster::Hash[a: 100, b: 200]
# hash.store(:c, 500) # => Hamster::Hash[:a => 100, :b => 200, :c => 500]
# hash # => Hamster::Hash[:a => 100, :b => 200]
#
# You also use {#put}. The difference is that {#put} can optionally take a block which
# is used to calculate the value to be stored:
#
# hash.put(:a) { hash[:b] + 100 } # => Hamster::Hash[:a => 300, :b => 200]
#
# Since it is immutable, all methods which you might expect to "modify" a `Hamster::Hash`
# actually return a new hash and leave the existing one unchanged. This means that the
# `hash[key] = value` syntax used with RubyHashes *cannot* be used with `Hamster::Hash`.
#
# While a `Hamster::Hash` can iterate over its keys (or values), it does not
# guarantee any specific iteration order (unlike RubyHash). Likewise, methods like
# {#flatten} do not guarantee which order the returned key/value pairs will appear
# in.
#
# Like RubyHash, a `Hamster::Hash` can have a default block which is used when
# looking up a key which does not exist in the hash. Unlike RubyHash, the default
# block will only be passed the missing key, not the hash itself:
#
# hash = Hamster::Hash.new { |n| n * 10 }
# hash[5] # => 50
#
# A default block can only be set when creating a `Hamster::Hash` with `Hamster::Hash.new` or
# {Hamster.hash Hamster.hash}, not with {Hamster::Hash.[] Hamster::Hash[]}. Default blocks
# do not survive marshalling and unmarshalling.
#
class Hash
include Immutable
include Enumerable
class << self
# Create a new `Hash` populated with the given key/value pairs.
#
# @example
# Hamster::Hash["A" => 1, "B" => 2]
# # => Hamster::Hash["A" => 1, "B" => 2]
# Hamster::Hash[["A", 1], ["B", 2]]
# # => Hamster::Hash["A" => 1, "B" => 2]
#
# @return [Hash]
def [](pairs = nil)
(pairs.nil? || pairs.empty?) ? empty : new(pairs)
end
# Return an empty `Hash`. If used on a subclass, returns an empty instance
# of that class.
#
# @return [Hash]
def empty
@empty ||= self.new
end
# "Raw" allocation of a new `Hash`. Used internally to create a new
# instance quickly after obtaining a modified {Trie}.
#
# @return [Hash]
# @private
def alloc(trie = EmptyTrie, block = nil)
obj = allocate
obj.instance_variable_set(:@trie, trie)
obj.instance_variable_set(:@default, block)
obj
end
end
def initialize(pairs = nil, &block)
@trie = pairs ? Trie[pairs] : EmptyTrie
@default = block
end
# Return the default block if there is one. Otherwise, return `nil`.
#
# @return [Proc]
def default_proc
@default
end
# Return the number of key/value pairs in this `Hash`.
#
# @example
# Hamster::Hash["A" => 1, "B" => 2, "C" => 3].size # => 3
#
# @return [Integer]
def size
@trie.size
end
alias :length :size
# Return `true` if this `Hash` contains no key/value pairs.
#
# @return [Boolean]
def empty?
@trie.empty?
end
# Return `true` if the given key object is present in this `Hash`. More precisely,
# return `true` if a key with the same `#hash` code, and which is also `#eql?`
# to the given key object is present.
#
# @example
# Hamster::Hash["A" => 1, "B" => 2, "C" => 3].key?("B") # => true
#
# @param key [Object] The key to check for
# @return [Boolean]
def key?(key)
@trie.key?(key)
end
alias :has_key? :key?
alias :include? :key?
alias :member? :key?
# Return `true` if this `Hash` has one or more keys which map to the provided value.
#
# @example
# Hamster::Hash["A" => 1, "B" => 2, "C" => 3].value?(2) # => true
#
# @param value [Object] The value to check for
# @return [Boolean]
def value?(value)
each { |k,v| return true if value == v }
false
end
alias :has_value? :value?
# Retrieve the value corresponding to the provided key object. If not found, and
# this `Hash` has a default block, the default block is called to provide the
# value.
#
# @example
# h = Hamster::Hash["A" => 1, "B" => 2, "C" => 3]
# h["B"] # => 2
# h.get("B") # => 2
# h.get("Elephant") # => nil
#
# # Hamster Hash with a default proc:
# h = Hamster::Hash.new("A" => 1, "B" => 2, "C" => 3) { |key| key.size }
# h.get("B") # => 2
# h.get("Elephant") # => 8
#
# @param key [Object] The key to look up
# @return [Object]
def get(key)
entry = @trie.get(key)
if entry
entry[1]
elsif @default
@default.call(key)
end
end
alias :[] :get
# Retrieve the value corresponding to the given key object, or use the provided
# default value or block, or otherwise raise a `KeyError`.
#
# @overload fetch(key)
# Retrieve the value corresponding to the given key, or raise a `KeyError`
# if it is not found.
# @param key [Object] The key to look up
# @overload fetch(key) { |key| ... }
# Retrieve the value corresponding to the given key, or call the optional
# code block (with the missing key) and get its return value.
# @yield [key] The key which was not found
# @yieldreturn [Object] Object to return since the key was not found
# @param key [Object] The key to look up
# @overload fetch(key, default)
# Retrieve the value corresponding to the given key, or else return
# the provided `default` value.
# @param key [Object] The key to look up
# @param default [Object] Object to return if the key is not found
#
# @example
# h = Hamster::Hash["A" => 1, "B" => 2, "C" => 3]
# h.fetch("B") # => 2
# h.fetch("Elephant") # => KeyError: key not found: "Elephant"
#
# # with a default value:
# h.fetch("B", 99) # => 2
# h.fetch("Elephant", 99) # => 99
#
# # with a block:
# h.fetch("B") { |key| key.size } # => 2
# h.fetch("Elephant") { |key| key.size } # => 8
#
# @return [Object]
def fetch(key, default = Undefined)
entry = @trie.get(key)
if entry
entry[1]
elsif block_given?
yield(key)
elsif default != Undefined
default
else
raise KeyError, "key not found: #{key.inspect}"
end
end
# Return a new `Hash` with the existing key/value associations, plus an association
# between the provided key and value. If an equivalent key is already present, its
# associated value will be replaced with the provided one.
#
# If the `value` argument is missing, but an optional code block is provided,
# it will be passed the existing value (or `nil` if there is none) and what it
# returns will replace the existing value. This is useful for "transforming"
# the value associated with a certain key.
#
# Avoid mutating objects which are used as keys. `String`s are an exception --
# unfrozen `String`s which are used as keys are internally duplicated and
# frozen.
#
# @example
# h = Hamster::Hash["A" => 1, "B" => 2]
# h.put("C", 3)
# # => Hamster::Hash["A" => 1, "B" => 2, "C" => 3]
# h.put("B") { |value| value * 10 }
# # => Hamster::Hash["A" => 1, "B" => 20]
#
# @param key [Object] The key to store
# @param value [Object] The value to associate it with
# @yield [value] The previously stored value
# @yieldreturn [Object] The new value to store
# @return [Hash]
def put(key, value = yield(get(key)))
new_trie = @trie.put(key, value)
if new_trie.equal?(@trie)
self
else
self.class.alloc(new_trie, @default)
end
end
# Return a new `Hash` with a deeply nested value modified to the result of
# the given code block. When traversing the nested `Hash`es and `Vector`s,
# non-existing keys are created with empty `Hash` values.
#
# The code block receives the existing value of the deeply nested key (or
# `nil` if it doesn't exist). This is useful for "transforming" the value
# associated with a certain key.
#
# Note that the original `Hash` and sub-`Hash`es and sub-`Vector`s are left
# unmodified; new data structure copies are created along the path wherever
# needed.
#
# @example
# hash = Hamster::Hash["a" => Hamster::Hash["b" => Hamster::Hash["c" => 42]]]
# hash.update_in("a", "b", "c") { |value| value + 5 }
# # => Hamster::Hash["a" => Hamster::Hash["b" => Hamster::Hash["c" => 47]]]
#
# @param key_path [Object(s)] List of keys which form the path to the key to be modified
# @yield [value] The previously stored value
# @yieldreturn [Object] The new value to store
# @return [Hash]
def update_in(*key_path, &block)
if key_path.empty?
raise ArgumentError, "must have at least one key in path"
end
key = key_path[0]
if key_path.size == 1
new_value = block.call(get(key))
else
value = fetch(key, EmptyHash)
new_value = value.update_in(*key_path[1..-1], &block)
end
put(key, new_value)
end
# Return a new `Hash` with the existing key/value associations, plus an association
# between the provided key and value. If an equivalent key is already present, its
# associated value will be replaced with the provided one.
#
# Avoid mutating objects which are used as keys. `String`s are an exception --
# unfrozen `String`s which are used as keys are internally duplicated and
# frozen.
#
# @example
# Hamster::Hash["A" => 1, "B" => 2].store("C", 3)
# # => Hamster::Hash["A" => 1, "B" => 2, "C" => 3]
#
# @param key [Object] The key to store
# @param value [Object] The value to associate it with
# @return [Hash]
def store(key, value)
put(key, value)
end
# Return a new `Hash` with the association for `key` removed. If `key` is not
# present, return `self`.
#
# @example
# Hamster::Hash["A" => 1, "B" => 2, "C" => 3].delete("B")
# # => Hamster::Hash["A" => 1, "C" => 3]
#
# @param key [Object] The key to remove
# @return [Hash]
def delete(key)
derive_new_hash(@trie.delete(key))
end
# Call the block once for each key/value pair in this `Hash`, passing the key/value
# pair as parameters. No specific iteration order is guaranteed (but the order will
# be stable for any particular `Hash`.)
#
# @example
# Hamster::Hash["A" => 1, "B" => 2, "C" => 3].each { |k, v| puts "k=#{k} v=#{v}" }
#
# k=A v=1
# k=B v=2
# k=C v=3
# # => Hamster::Hash["A" => 1, "B" => 2, "C" => 3]
#
# @return [self]
def each(&block)
return to_enum if not block_given?
@trie.each(&block)
self
end
alias :each_pair :each
# Call the block once for each key/value pair in this `Hash`, passing the key/value
# pair as parameters. Iteration order will be the opposite of {#each}.
#
# @example
# Hamster::Hash["A" => 1, "B" => 2, "C" => 3].reverse_each { |k, v| puts "k=#{k} v=#{v}" }
#
# k=C v=3
# k=B v=2
# k=A v=1
# # => Hamster::Hash["A" => 1, "B" => 2, "C" => 3]
#
# @return [self]
def reverse_each(&block)
return enum_for(:reverse_each) if not block_given?
@trie.reverse_each(&block)
self
end
# Call the block once for each key/value pair in this `Hash`, passing the key as a
# parameter.
#
# @example
# Hamster::Hash["A" => 1, "B" => 2, "C" => 3].each_key { |k| puts "k=#{k}" }
#
# k=A
# k=B
# k=C
# # => Hamster::Hash["A" => 1, "B" => 2, "C" => 3]
#
# @return [self]
def each_key
return enum_for(:each_key) if not block_given?
@trie.each { |k,v| yield k }
self
end
# Call the block once for each key/value pair in this `Hash`, passing the value as a
# parameter.
#
# @example
# Hamster::Hash["A" => 1, "B" => 2, "C" => 3].each_value { |v| puts "v=#{v}" }
#
# v=1
# v=2
# v=3
# # => Hamster::Hash["A" => 1, "B" => 2, "C" => 3]
#
# @return [self]
def each_value
return enum_for(:each_value) if not block_given?
@trie.each { |k,v| yield v }
self
end
# Call the block once for each key/value pair in this `Hash`, passing the key/value
# pair as parameters. The block should return a `[key, value]` array each time.
# All the returned `[key, value]` arrays will be gathered into a new `Hash`.
#
# @example
# h = Hamster::Hash["A" => 1, "B" => 2, "C" => 3]
# h.map { |k, v| ["new-#{k}", v * v] }
# # => Hash["new-C" => 9, "new-B" => 4, "new-A" => 1]
#
# @return [Hash]
def map
return enum_for(:map) unless block_given?
return self if empty?
self.class.new(super, &@default)
end
alias :collect :map
# Return a new `Hash` with all the key/value pairs for which the block returns true.
#
# @example
# h = Hamster::Hash["A" => 1, "B" => 2, "C" => 3]
# h.select { |k, v| v >= 2 }
# # => Hamster::Hash["B" => 2, "C" => 3]
#
# @return [Hash]
def select(&block)
return enum_for(:select) unless block_given?
derive_new_hash(@trie.select(&block))
end
alias :find_all :select
alias :keep_if :select
# Yield `[key, value]` pairs until one is found for which the block returns true.
# Return that `[key, value]` pair. If the block never returns true, return `nil`.
#
# @example
# h = Hamster::Hash["A" => 1, "B" => 2, "C" => 3]
# h.find { |k, v| v.even? }
# # => ["B", 2]
#
# @return [Array]
def find
return enum_for(:find) unless block_given?
each { |entry| return entry if yield entry }
nil
end
alias :detect :find
# Return a new `Hash` containing all the key/value pairs from this `Hash` and
# `other`. If no block is provided, the value for entries with colliding keys
# will be that from `other`. Otherwie, the value for each duplicate key is
# determined by called the block with the key, its value in this `Hash`, and
# its value in `other`.
#
# `other` can be a `Hamster::Hash`, a built-in Ruby `Hash`, or any `Enumerable`
# object which yields `[key, value]` pairs.
#
# @example
# h1 = Hamster::Hash["A" => 1, "B" => 2, "C" => 3]
# h2 = Hamster::Hash["C" => 70, "D" => 80]
# h1.merge(h2)
# # => Hamster::Hash["C" => 70, "A" => 1, "D" => 80, "B" => 2]
# h1.merge(h2) { |key, v1, v2| v1 + v2 }
# # => Hamster::Hash["C" => 73, "A" => 1, "D" => 80, "B" => 2]
#
# @param other [Enumerable] The collection to merge with
# @yieldparam key [Object] The key which was present in both collections
# @yieldparam my_value [Object] The associated value from this `Hash`
# @yieldparam other_value [Object] The associated value from the other collection
# @yieldreturn [Object] The value to associate this key with in the new `Hash`
# @return [Hash]
def merge(other)
trie = if block_given?
other.reduce(@trie) do |trie, (key, value)|
if entry = trie.get(key)
trie.put(key, yield(key, entry[1], value))
else
trie.put(key, value)
end
end
else
@trie.bulk_put(other)
end
derive_new_hash(trie)
end
# Return a {Vector} which contains all the `[key, value]` pairs in this `Hash`
# as 2-element Arrays, either in their natural sorted order as determined by
# `#<=>`, or if an optional block is supplied, by using the block as a comparator.
# See `Enumerable#sort`.
#
# @example
# h = Hamster::Hash["Dog" => 1, "Elephant" => 2, "Lion" => 3]
# h.sort { |(k1, v1), (k2, v2)| k1.size <=> k2.size }
# # => Hamster::Vector[["Dog", 1], ["Lion", 3], ["Elephant", 2]]
#
# @return [Vector]
def sort
Vector.new(super)
end
# Return a {Vector} which contains all the `[key, value]` pairs in this `Hash`
# as 2-element Arrays. The order which the pairs will appear in is determined by
# passing each pair to the code block to obtain a sort key object, and comparing
# the sort keys using `#<=>`.
# See `Enumerable#sort_by`.
#
# @example
# h = Hamster::Hash["Dog" => 1, "Elephant" => 2, "Lion" => 3]
# h.sort_by { |key, value| key.size }
# # => Hamster::Vector[["Dog", 1], ["Lion", 3], ["Elephant", 2]]
#
# @return [Vector]
def sort_by
Vector.new(super)
end
# Return a new `Hash` with the associations for all of the given `keys` removed.
#
# @example
# h = Hamster::Hash["A" => 1, "B" => 2, "C" => 3]
# h.except("A", "C") # => Hamster::Hash["B" => 2]
#
# @param keys [Array] The keys to remove
# @return [Hash]
def except(*keys)
keys.reduce(self) { |hash, key| hash.delete(key) }
end
# Return a new `Hash` with only the associations for the `wanted` keys retained.
# If any of the `wanted` keys are not present in this `Hash`, they will not be present
# in the returned `Hash` either.
#
# @example
# h = Hamster::Hash["A" => 1, "B" => 2, "C" => 3]
# h.slice("B", "C") # => Hamster::Hash["B" => 2, "C" => 3]
#
# @param wanted [Array] The keys to retain
# @return [Hash]
def slice(*wanted)
trie = Trie.new(0)
wanted.each { |key| trie.put!(key, get(key)) if key?(key) }
self.class.alloc(trie, @default)
end
# Return a {Vector} of the values which correspond to the `wanted` keys.
# If any of the `wanted` keys are not present in this `Hash`, they will be skipped.
#
# @example
# h = Hamster::Hash["A" => 1, "B" => 2, "C" => 3]
# h.values_at("B", "A") # => Hamster::Vector[2, 1]
#
# @param wanted [Array] The keys to retrieve
# @return [Vector]
def values_at(*wanted)
array = []
wanted.each { |key| array << get(key) if key?(key) }
Vector.new(array.freeze)
end
# Return a new {Set} populated with the keys from this `Hash`.
#
# @example
# Hamster::Hash["A" => 1, "B" => 2, "C" => 3, "D" => 2].keys
# # => Hamster::Set["D", "C", "B", "A"]
#
# @return [Set]
def keys
Set.alloc(@trie)
end
# Return a new {Vector} populated with the values from this `Hash`.
#
# @example
# Hamster::Hash["A" => 1, "B" => 2, "C" => 3, "D" => 2].values
# # => Hamster::Vector[2, 3, 2, 1]
#
# @return [Vector]
def values
Vector.new(each_value.to_a.freeze)
end
# Return a new `Hash` created by using our keys as values, and values as keys.
# If there are multiple values which are equivalent (as determined by `#hash` and
# `#eql?`), only one out of each group of equivalent values will be retained.
#
# @example
# Hamster::Hash["A" => 1, "B" => 2, "C" => 3, "D" => 2].invert
# # => Hamster::Hash[1 => "A", 3 => "C", 2 => "B"]
#
# @return [Hash]
def invert
pairs = []
each { |k,v| pairs << [v, k] }
self.class.new(pairs, &@default)
end
# Return a new {Vector} which is a one-dimensional flattening of this `Hash`.
# If `level` is 1, all the `[key, value]` pairs in the hash will be concatenated
# into one {Vector}. If `level` is greater than 1, keys or values which are
# themselves `Array`s or {Vector}s will be recursively flattened into the output
# {Vector}. The depth to which that flattening will be recursively applied is
# determined by `level`.
#
# As a special case, if `level` is 0, each `[key, value]` pair will be a
# separate element in the returned {Vector}.
#
# @example
# h = Hamster::Hash["A" => 1, "B" => [2, 3, 4]]
# h.flatten
# # => Hamster::Vector["A", 1, "B", [2, 3, 4]]
# h.flatten(2)
# # => Hamster::Vector["A", 1, "B", 2, 3, 4]
#
# @param level [Integer] The number of times to recursively flatten the `[key, value]` pairs in this `Hash`.
# @return [Vector]
def flatten(level = 1)
return Vector.new(self) if level == 0
array = []
each { |k,v| array << k; array << v }
array.flatten!(level-1) if level > 1
Vector.new(array.freeze)
end
# Searches through the `Hash`, comparing `obj` with each key (using `#==`).
# When a matching key is found, return the `[key, value]` pair as an array.
# Return `nil` if no match is found.
#
# @example
# Hamster::Hash["A" => 1, "B" => 2, "C" => 3].assoc("B") # => ["B", 2]
#
# @param obj [Object] The key to search for (using #==)
# @return [Array]
def assoc(obj)
each { |entry| return entry if obj == entry[0] }
nil
end
# Searches through the `Hash`, comparing `obj` with each value (using `#==`).
# When a matching value is found, return the `[key, value]` pair as an array.
# Return `nil` if no match is found.
#
# @example
# Hamster::Hash["A" => 1, "B" => 2, "C" => 3].rassoc(2) # => ["B", 2]
#
# @param obj [Object] The value to search for (using #==)
# @return [Array]
def rassoc(obj)
each { |entry| return entry if obj == entry[1] }
nil
end
# Searches through the `Hash`, comparing `value` with each value (using `#==`).
# When a matching value is found, return its associated key object.
# Return `nil` if no match is found.
#
# @example
# Hamster::Hash["A" => 1, "B" => 2, "C" => 3].key(2) # => "B"
#
# @param value [Object] The value to search for (using #==)
# @return [Object]
def key(value)
each { |entry| return entry[0] if value == entry[1] }
nil
end
# Return a randomly chosen `[key, value]` pair from this `Hash`. If the hash is empty,
# return `nil`.
#
# @example
# Hamster::Hash["A" => 1, "B" => 2, "C" => 3].sample
# # => ["C", 3]
#
# @return [Array]
def sample
@trie.at(rand(size))
end
# Return an empty `Hash` instance, of the same class as this one. Useful if you
# have multiple subclasses of `Hash` and want to treat them polymorphically.
# Maintains the default block, if there is one.
#
# @return [Hash]
def clear
if @default
self.class.alloc(EmptyTrie, @default)
else
self.class.empty
end
end
# Return true if `other` has the same type and contents as this `Hash`.
#
# @param other [Object] The collection to compare with
# @return [Boolean]
def eql?(other)
return true if other.equal?(self)
instance_of?(other.class) && @trie.eql?(other.instance_variable_get(:@trie))
end
# Return true if `other` has the same contents as this `Hash`. Will convert
# `other` to a Ruby `Hash` using `#to_hash` if necessary.
#
# @param other [Object] The object to compare with
# @return [Boolean]
def ==(other)
self.eql?(other) || (other.respond_to?(:to_hash) && to_hash.eql?(other.to_hash))
end
# See `Object#hash`.
# @return [Integer]
def hash
keys.to_a.sort.reduce(0) do |hash, key|
(hash << 32) - hash + key.hash + get(key).hash
end
end
# Return the contents of this `Hash` as a programmer-readable `String`. If all the
# keys and values are serializable as Ruby literal strings, the returned string can
# be passed to `eval` to reconstitute an equivalent `Hash`. However, the default
# block (if there is one) will be lost when doing this.
#
# @return [String]
def inspect
result = "#{self.class}["
i = 0
each do |key, val|
result << ', ' if i > 0
result << key.inspect << ' => ' << val.inspect
i += 1
end
result << "]"
end
# Allows this `Hash` to be printed at the `pry` console, or using `pp` (from the
# Ruby standard library), in a way which takes the amount of horizontal space on
# the screen into account, and which indents nested structures to make them easier
# to read.
#
# @private
def pretty_print(pp)
pp.group(1, "#{self.class}[", "]") do
pp.breakable ''
pp.seplist(self, nil) do |key, val|
pp.group do
key.pretty_print(pp)
pp.text ' => '
pp.group(1) do
pp.breakable ''
val.pretty_print(pp)
end
end
end
end
end
# Convert this `Hamster::Hash` to an instance of Ruby's built-in `Hash`.
#
# @return [::Hash]
def to_hash
output = {}
each do |key, value|
output[key] = value
end
output
end
alias :to_h :to_hash
# Deeply convert to Ruby Hash.
#
# @return [::Hash]
def to_ruby
Hamster.to_ruby(self)
end
# @return [::Hash]
# @private
def marshal_dump
to_hash
end
# @private
def marshal_load(dictionary)
@trie = Trie[dictionary]
end
private
# Return a new `Hash` which is derived from this one, using a modified {Trie}.
# The new `Hash` will retain the existing default block, if there is one.
#
def derive_new_hash(trie)
if trie.equal?(@trie)
self
elsif trie.empty?
if @default
self.class.alloc(EmptyTrie, @default)
else
self.class.empty
end
else
self.class.alloc(trie, @default)
end
end
end
# The canonical empty `Hash`. Returned by `Hamster.hash` and `Hash[]` when
# invoked with no arguments; also returned by `Hash.empty`. Prefer using this
# one rather than creating many empty hashes using `Hash.new`.
#
EmptyHash = Hamster::Hash.empty
end
Edit Hash documentation.
* Tightened up introduction, removing some words and pushing details
down into method docs.
* Demoted `#store` from introduction. All `#put`, all the time.
* Promoted `#update_in` to introduction, since it is non-obvious and
particularly handy with immutable structures.
* Added missing yield documentation
* Clarified some edge case behaviour. Mostly obvious, but doesn't hurt
to include.
* Assorted typos and grammar fixes.
require "hamster/immutable"
require "hamster/undefined"
require "hamster/enumerable"
require "hamster/trie"
require "hamster/set"
require "hamster/vector"
require "hamster/nested"
module Hamster
def self.hash(pairs = nil, &block)
(pairs.nil? && block.nil?) ? EmptyHash : Hash.new(pairs, &block)
end
# A `Hamster::Hash` maps a set of unique keys to corresponding values, much
# like a dictionary maps from words to definitions. Given a key, it can store
# and retrieve an associated value in constant time. If an existing key is
# stored again, the new value will replace the old. It behaves much like
# Ruby's built-in Hash, which we will call RubyHash for clarity. Like
# RubyHash, two keys that are `#eql?` to each other and have the same
# `#hash` are considered identical in a `Hamster::Hash`.
#
# A `Hamster::Hash` can be created in several ways:
#
# Hamster.hash('Jane Doe' => 10, 'Jim Doe' => 6)
# Hamster::Hash.new(font_size: 10, font_family: 'Arial')
# Hamster::Hash[first_name: 'John', last_name: 'Smith']
#
# Any `Enumerable` object which yields two-element `[key, value]` arrays
# can be used to initialize a `Hamster::Hash`:
#
# Hamster::Hash.new([[:first_name, 'John'], [:last_name, 'Smith']])
#
# Key/value pairs can be added using {#put}. A new hash is returned and the
# existing one is left unchanged:
#
# hash = Hamster::Hash[a: 100, b: 200]
# hash.put(:c, 500) # => Hamster::Hash[:a => 100, :b => 200, :c => 500]
# hash # => Hamster::Hash[:a => 100, :b => 200]
#
# {#put} can also take a block, which is used to calculate the value to be
# stored.
#
# hash.put(:a) { |current| current + 200 } # => Hamster::Hash[:a => 300, :b => 200]
#
# Since it is immutable, all methods which you might expect to "modify" a
# `Hamster::Hash` actually return a new hash and leave the existing one
# unchanged. This means that the `hash[key] = value` syntax from RubyHash
# *cannot* be used with `Hamster::Hash`.
#
# Nested data structures can easily be updated using {#update_in}:
#
# hash = Hamster::Hash["a" => Hamster::Vector[Hamster::Hash["c" => 42]]]
# hash.update_in("a", 0, "c") { |value| value + 5 }
# # => Hamster::Hash["a" => Hamster::Hash["b" => Hamster::Hash["c" => 47]]]
#
# While a `Hamster::Hash` can iterate over its keys or values, it does not
# guarantee any specific iteration order (unlike RubyHash). Methods like
# {#flatten} do not guarantee the order of returned key/value pairs.
#
# Like RubyHash, a `Hamster::Hash` can have a default block which is used
# when looking up a key that does not exist. Unlike RubyHash, the default
# block will only be passed the missing key, without the hash itself:
#
# hash = Hamster::Hash.new { |missing_key| missing_key * 10 }
# hash[5] # => 50
class Hash
include Immutable
include Enumerable
class << self
# Create a new `Hash` populated with the given key/value pairs.
#
# @example
# Hamster::Hash["A" => 1, "B" => 2] # => Hamster::Hash["A" => 1, "B" => 2]
# Hamster::Hash[["A", 1], ["B", 2]] # => Hamster::Hash["A" => 1, "B" => 2]
#
# @param pairs [::Enumerable] initial content of hash. An empty hash is returned if not provided.
# @return [Hash]
def [](pairs = nil)
(pairs.nil? || pairs.empty?) ? empty : new(pairs)
end
# Return an empty `Hash`. If used on a subclass, returns an empty instance
# of that class.
#
# @return [Hash]
def empty
@empty ||= self.new
end
# "Raw" allocation of a new `Hash`. Used internally to create a new
# instance quickly after obtaining a modified {Trie}.
#
# @return [Hash]
# @private
def alloc(trie = EmptyTrie, block = nil)
obj = allocate
obj.instance_variable_set(:@trie, trie)
obj.instance_variable_set(:@default, block)
obj
end
end
# @param pairs [::Enumerable] initial content of hash. An empty hash is returned if not provided.
# @yield [key] Optional _default block_ to be stored and used to calculate the default value of a missing key. It will not be yielded during this method. It will not be preserved when marshalling.
# @yieldparam key Key that was not present in the hash.
def initialize(pairs = nil, &block)
@trie = pairs ? Trie[pairs] : EmptyTrie
@default = block
end
# Return the default block if there is one. Otherwise, return `nil`.
#
# @return [Proc]
def default_proc
@default
end
# Return the number of key/value pairs in this `Hash`.
#
# @example
# Hamster::Hash["A" => 1, "B" => 2, "C" => 3].size # => 3
#
# @return [Integer]
def size
@trie.size
end
alias :length :size
# Return `true` if this `Hash` contains no key/value pairs.
#
# @return [Boolean]
def empty?
@trie.empty?
end
# Return `true` if the given key object is present in this `Hash`. More precisely,
# return `true` if a key with the same `#hash` code, and which is also `#eql?`
# to the given key object is present.
#
# @example
# Hamster::Hash["A" => 1, "B" => 2, "C" => 3].key?("B") # => true
#
# @param key [Object] The key to check for
# @return [Boolean]
def key?(key)
@trie.key?(key)
end
alias :has_key? :key?
alias :include? :key?
alias :member? :key?
# Return `true` if this `Hash` has one or more keys which map to the provided value.
#
# @example
# Hamster::Hash["A" => 1, "B" => 2, "C" => 3].value?(2) # => true
#
# @param value [Object] The value to check for
# @return [Boolean]
def value?(value)
each { |k,v| return true if value == v }
false
end
alias :has_value? :value?
# Retrieve the value corresponding to the provided key object. If not found, and
# this `Hash` has a default block, the default block is called to provide the
# value. Otherwise, return `nil`.
#
# @example
# h = Hamster::Hash["A" => 1, "B" => 2, "C" => 3]
# h["B"] # => 2
# h.get("B") # => 2
# h.get("Elephant") # => nil
#
# # Hamster Hash with a default proc:
# h = Hamster::Hash.new("A" => 1, "B" => 2, "C" => 3) { |key| key.size }
# h.get("B") # => 2
# h.get("Elephant") # => 8
#
# @param key [Object] The key to look up
# @return [Object]
def get(key)
entry = @trie.get(key)
if entry
entry[1]
elsif @default
@default.call(key)
end
end
alias :[] :get
# Retrieve the value corresponding to the given key object, or use the provided
# default value or block, or otherwise raise a `KeyError`.
#
# @overload fetch(key)
# Retrieve the value corresponding to the given key, or raise a `KeyError`
# if it is not found.
# @param key [Object] The key to look up
# @overload fetch(key) { |key| ... }
# Retrieve the value corresponding to the given key, or call the optional
# code block (with the missing key) and get its return value.
# @yield [key] The key which was not found
# @yieldreturn [Object] Object to return since the key was not found
# @param key [Object] The key to look up
# @overload fetch(key, default)
# Retrieve the value corresponding to the given key, or else return
# the provided `default` value.
# @param key [Object] The key to look up
# @param default [Object] Object to return if the key is not found
#
# @example
# h = Hamster::Hash["A" => 1, "B" => 2, "C" => 3]
# h.fetch("B") # => 2
# h.fetch("Elephant") # => KeyError: key not found: "Elephant"
#
# # with a default value:
# h.fetch("B", 99) # => 2
# h.fetch("Elephant", 99) # => 99
#
# # with a block:
# h.fetch("B") { |key| key.size } # => 2
# h.fetch("Elephant") { |key| key.size } # => 8
#
# @return [Object]
def fetch(key, default = Undefined)
entry = @trie.get(key)
if entry
entry[1]
elsif block_given?
yield(key)
elsif default != Undefined
default
else
raise KeyError, "key not found: #{key.inspect}"
end
end
# Return a new `Hash` with the existing key/value associations, plus an association
# between the provided key and value. If an equivalent key is already present, its
# associated value will be replaced with the provided one.
#
# If the `value` argument is missing, but an optional code block is provided,
# it will be passed the existing value (or `nil` if there is none) and what it
# returns will replace the existing value. This is useful for "transforming"
# the value associated with a certain key.
#
# Avoid mutating objects which are used as keys. `String`s are an exception:
# unfrozen `String`s which are used as keys are internally duplicated and
# frozen. This matches RubyHash's behaviour.
#
# @example
# h = Hamster::Hash["A" => 1, "B" => 2]
# h.put("C", 3)
# # => Hamster::Hash["A" => 1, "B" => 2, "C" => 3]
# h.put("B") { |value| value * 10 }
# # => Hamster::Hash["A" => 1, "B" => 20]
#
# @param key [Object] The key to store
# @param value [Object] The value to associate it with
# @yield [value] The previously stored value, or `nil` if none.
# @yieldreturn [Object] The new value to store
# @return [Hash]
def put(key, value = yield(get(key)))
new_trie = @trie.put(key, value)
if new_trie.equal?(@trie)
self
else
self.class.alloc(new_trie, @default)
end
end
# Return a new `Hash` with a deeply nested value modified to the result of
# the given code block. When traversing the nested `Hash`es and `Vector`s,
# non-existing keys are created with empty `Hash` values.
#
# The code block receives the existing value of the deeply nested key (or
# `nil` if it doesn't exist). This is useful for "transforming" the value
# associated with a certain key.
#
# Note that the original `Hash` and sub-`Hash`es and sub-`Vector`s are left
# unmodified; new data structure copies are created along the path wherever
# needed.
#
# @example
# hash = Hamster::Hash["a" => Hamster::Hash["b" => Hamster::Hash["c" => 42]]]
# hash.update_in("a", "b", "c") { |value| value + 5 }
# # => Hamster::Hash["a" => Hamster::Hash["b" => Hamster::Hash["c" => 47]]]
#
# @param key_path [::Array<Object>] List of keys which form the path to the key to be modified
# @yield [value] The previously stored value
# @yieldreturn [Object] The new value to store
# @return [Hash]
def update_in(*key_path, &block)
if key_path.empty?
raise ArgumentError, "must have at least one key in path"
end
key = key_path[0]
if key_path.size == 1
new_value = block.call(get(key))
else
value = fetch(key, EmptyHash)
new_value = value.update_in(*key_path[1..-1], &block)
end
put(key, new_value)
end
# An alias for {#put} to match RubyHash's API. Does not support {#put}'s
# block form.
#
# @see #put
# @param key [Object] The key to store
# @param value [Object] The value to associate it with
# @return [Hash]
def store(key, value)
put(key, value)
end
# Return a new `Hash` with `key` removed. If `key` is not present, return
# `self`.
#
# @example
# Hamster::Hash["A" => 1, "B" => 2, "C" => 3].delete("B")
# # => Hamster::Hash["A" => 1, "C" => 3]
#
# @param key [Object] The key to remove
# @return [Hash]
def delete(key)
derive_new_hash(@trie.delete(key))
end
# Call the block once for each key/value pair in this `Hash`, passing the key/value
# pair as parameters. No specific iteration order is guaranteed, though the order will
# be stable for any particular `Hash`.
#
# @example
# Hamster::Hash["A" => 1, "B" => 2, "C" => 3].each { |k, v| puts "k=#{k} v=#{v}" }
#
# k=A v=1
# k=C v=3
# k=B v=2
# # => Hamster::Hash["A" => 1, "B" => 2, "C" => 3]
#
# @yield [key, value] Once for each key/value pair.
# @return [self]
def each(&block)
return to_enum if not block_given?
@trie.each(&block)
self
end
alias :each_pair :each
# Call the block once for each key/value pair in this `Hash`, passing the key/value
# pair as parameters. Iteration order will be the opposite of {#each}.
#
# @example
# Hamster::Hash["A" => 1, "B" => 2, "C" => 3].reverse_each { |k, v| puts "k=#{k} v=#{v}" }
#
# k=B v=2
# k=C v=3
# k=A v=1
# # => Hamster::Hash["A" => 1, "B" => 2, "C" => 3]
#
# @yield [key, value] Once for each key/value pair.
# @return [self]
def reverse_each(&block)
return enum_for(:reverse_each) if not block_given?
@trie.reverse_each(&block)
self
end
# Call the block once for each key/value pair in this `Hash`, passing the key as a
# parameter. Ordering guarantees are the same as {#each}.
#
# @example
# Hamster::Hash["A" => 1, "B" => 2, "C" => 3].each_key { |k| puts "k=#{k}" }
#
# k=A
# k=C
# k=B
# # => Hamster::Hash["A" => 1, "B" => 2, "C" => 3]
#
# @yield [key] Once for each key/value pair.
# @return [self]
def each_key
return enum_for(:each_key) if not block_given?
@trie.each { |k,v| yield k }
self
end
# Call the block once for each key/value pair in this `Hash`, passing the value as a
# parameter. Ordering guarantees are the same as {#each}.
#
# @example
# Hamster::Hash["A" => 1, "B" => 2, "C" => 3].each_value { |v| puts "v=#{v}" }
#
# v=1
# v=3
# v=2
# # => Hamster::Hash["A" => 1, "B" => 2, "C" => 3]
#
# @yield [value] Once for each key/value pair.
# @return [self]
def each_value
return enum_for(:each_value) if not block_given?
@trie.each { |k,v| yield v }
self
end
# Call the block once for each key/value pair in this `Hash`, passing the key/value
# pair as parameters. The block should return a `[key, value]` array each time.
# All the returned `[key, value]` arrays will be gathered into a new `Hash`.
#
# @example
# h = Hamster::Hash["A" => 1, "B" => 2, "C" => 3]
# h.map { |k, v| ["new-#{k}", v * v] }
# # => Hash["new-C" => 9, "new-B" => 4, "new-A" => 1]
#
# @yield [key, value] Once for each key/value pair.
# @return [Hash]
def map
return enum_for(:map) unless block_given?
return self if empty?
self.class.new(super, &@default)
end
alias :collect :map
# Return a new `Hash` with all the key/value pairs for which the block returns true.
#
# @example
# h = Hamster::Hash["A" => 1, "B" => 2, "C" => 3]
# h.select { |k, v| v >= 2 }
# # => Hamster::Hash["B" => 2, "C" => 3]
#
# @yield [key, value] Once for each key/value pair.
# @yieldreturn Truthy if this pair should be present in the new `Hash`.
# @return [Hash]
def select(&block)
return enum_for(:select) unless block_given?
derive_new_hash(@trie.select(&block))
end
alias :find_all :select
alias :keep_if :select
# Yield `[key, value]` pairs until one is found for which the block returns true.
# Return that `[key, value]` pair. If the block never returns true, return `nil`.
#
# @example
# h = Hamster::Hash["A" => 1, "B" => 2, "C" => 3]
# h.find { |k, v| v.even? }
# # => ["B", 2]
#
# @return [Array]
# @yield [key, value] At most once for each key/value pair, until the block returns `true`.
# @yieldreturn Truthy to halt iteration and return the yielded key/value pair.
def find
return enum_for(:find) unless block_given?
each { |entry| return entry if yield entry }
nil
end
alias :detect :find
# Return a new `Hash` containing all the key/value pairs from this `Hash` and
# `other`. If no block is provided, the value for entries with colliding keys
# will be that from `other`. Otherwise, the value for each duplicate key is
# determined by calling the block.
#
# `other` can be a `Hamster::Hash`, a built-in Ruby `Hash`, or any `Enumerable`
# object which yields `[key, value]` pairs.
#
# @example
# h1 = Hamster::Hash["A" => 1, "B" => 2, "C" => 3]
# h2 = Hamster::Hash["C" => 70, "D" => 80]
# h1.merge(h2)
# # => Hamster::Hash["C" => 70, "A" => 1, "D" => 80, "B" => 2]
# h1.merge(h2) { |key, v1, v2| v1 + v2 }
# # => Hamster::Hash["C" => 73, "A" => 1, "D" => 80, "B" => 2]
#
# @param other [::Enumerable] The collection to merge with
# @yieldparam key [Object] The key which was present in both collections
# @yieldparam my_value [Object] The associated value from this `Hash`
# @yieldparam other_value [Object] The associated value from the other collection
# @yieldreturn [Object] The value to associate this key with in the new `Hash`
# @return [Hash]
def merge(other)
trie = if block_given?
other.reduce(@trie) do |trie, (key, value)|
if entry = trie.get(key)
trie.put(key, yield(key, entry[1], value))
else
trie.put(key, value)
end
end
else
@trie.bulk_put(other)
end
derive_new_hash(trie)
end
# Retrieve the value corresponding to the given key object, or use the provided
# default value or block, or otherwise raise a `KeyError`.
#
# @overload fetch(key)
# Retrieve the value corresponding to the given key, or raise a `KeyError`
# if it is not found.
# @param key [Object] The key to look up
# @overload fetch(key) { |key| ... }
# Return a sorted {Vector} which contains all the `[key, value]` pairs in
# this `Hash` as two-element `Array`s.
#
# @overload sort
# Uses `#<=>` to determine sorted order.
# @overload sort { |(k1, v1), (k2, v2)| ... }
# Uses the block as a comparator to determined sorted order.
#
# @example
# h = Hamster::Hash["Dog" => 1, "Elephant" => 2, "Lion" => 3]
# h.sort { |(k1, v1), (k2, v2)| k1.size <=> k2.size }
# # => Hamster::Vector[["Dog", 1], ["Lion", 3], ["Elephant", 2]]
# @yield [(k1, v1), (k2, v2)] Any number of times with different pairs of key/value associations.
# @yieldreturn [Integer] Negative if the first pair should be sorted
# lower, positive if the latter pair, or 0 if equal.
#
# @see ::Enumerable#sort
#
# @return [Vector]
def sort
Vector.new(super)
end
# Return a {Vector} which contains all the `[key, value]` pairs in this `Hash`
# as two-element Arrays. The order which the pairs will appear in is determined by
# passing each pair to the code block to obtain a sort key object, and comparing
# the sort keys using `#<=>`.
#
# @see ::Enumerable#sort_by
#
# @example
# h = Hamster::Hash["Dog" => 1, "Elephant" => 2, "Lion" => 3]
# h.sort_by { |key, value| key.size }
# # => Hamster::Vector[["Dog", 1], ["Lion", 3], ["Elephant", 2]]
#
# @yield [key, value] Once for each key/value pair.
# @yieldreturn a sort key object for the yielded pair.
# @return [Vector]
def sort_by
Vector.new(super)
end
# Return a new `Hash` with the associations for all of the given `keys` removed.
#
# @example
# h = Hamster::Hash["A" => 1, "B" => 2, "C" => 3]
# h.except("A", "C") # => Hamster::Hash["B" => 2]
#
# @param keys [Array] The keys to remove
# @return [Hash]
def except(*keys)
keys.reduce(self) { |hash, key| hash.delete(key) }
end
# Return a new `Hash` with only the associations for the `wanted` keys retained.
#
# @example
# h = Hamster::Hash["A" => 1, "B" => 2, "C" => 3]
# h.slice("B", "C") # => Hamster::Hash["B" => 2, "C" => 3]
#
# @param wanted [::Enumerable] The keys to retain
# @return [Hash]
def slice(*wanted)
trie = Trie.new(0)
wanted.each { |key| trie.put!(key, get(key)) if key?(key) }
self.class.alloc(trie, @default)
end
# Return a {Vector} of the values which correspond to the `wanted` keys.
# If any of the `wanted` keys are not present in this `Hash`, they will be skipped.
#
# @example
# h = Hamster::Hash["A" => 1, "B" => 2, "C" => 3]
# h.values_at("B", "A", "D") # => Hamster::Vector[2, 1]
#
# @param wanted [Array] The keys to retrieve
# @return [Vector]
def values_at(*wanted)
array = []
wanted.each { |key| array << get(key) if key?(key) }
Vector.new(array.freeze)
end
# Return a new {Set} containing the keys from this `Hash`.
#
# @example
# Hamster::Hash["A" => 1, "B" => 2, "C" => 3, "D" => 2].keys
# # => Hamster::Set["D", "C", "B", "A"]
#
# @return [Set]
def keys
Set.alloc(@trie)
end
# Return a new {Vector} populated with the values from this `Hash`.
#
# @example
# Hamster::Hash["A" => 1, "B" => 2, "C" => 3, "D" => 2].values
# # => Hamster::Vector[2, 3, 2, 1]
#
# @return [Vector]
def values
Vector.new(each_value.to_a.freeze)
end
# Return a new `Hash` created by using keys as values and values as keys.
# If there are multiple values which are equivalent (as determined by `#hash` and
# `#eql?`), only one out of each group of equivalent values will be
# retained. Which one specifically is undefined.
#
# @example
# Hamster::Hash["A" => 1, "B" => 2, "C" => 3, "D" => 2].invert
# # => Hamster::Hash[1 => "A", 3 => "C", 2 => "B"]
#
# @return [Hash]
def invert
pairs = []
each { |k,v| pairs << [v, k] }
self.class.new(pairs, &@default)
end
# Return a new {Vector} which is a one-dimensional flattening of this `Hash`.
# If `level` is 1, all the `[key, value]` pairs in the hash will be concatenated
# into one {Vector}. If `level` is greater than 1, keys or values which are
# themselves `Array`s or {Vector}s will be recursively flattened into the output
# {Vector}. The depth to which that flattening will be recursively applied is
# determined by `level`.
#
# As a special case, if `level` is 0, each `[key, value]` pair will be a
# separate element in the returned {Vector}.
#
# @example
# h = Hamster::Hash["A" => 1, "B" => [2, 3, 4]]
# h.flatten
# # => Hamster::Vector["A", 1, "B", [2, 3, 4]]
# h.flatten(2)
# # => Hamster::Vector["A", 1, "B", 2, 3, 4]
#
# @param level [Integer] The number of times to recursively flatten the `[key, value]` pairs in this `Hash`.
# @return [Vector]
def flatten(level = 1)
return Vector.new(self) if level == 0
array = []
each { |k,v| array << k; array << v }
array.flatten!(level-1) if level > 1
Vector.new(array.freeze)
end
# Searches through the `Hash`, comparing `obj` with each key (using `#==`).
# When a matching key is found, return the `[key, value]` pair as an array.
# Return `nil` if no match is found.
#
# @example
# Hamster::Hash["A" => 1, "B" => 2, "C" => 3].assoc("B") # => ["B", 2]
#
# @param obj [Object] The key to search for (using #==)
# @return [Array]
def assoc(obj)
each { |entry| return entry if obj == entry[0] }
nil
end
# Searches through the `Hash`, comparing `obj` with each value (using `#==`).
# When a matching value is found, return the `[key, value]` pair as an array.
# Return `nil` if no match is found.
#
# @example
# Hamster::Hash["A" => 1, "B" => 2, "C" => 3].rassoc(2) # => ["B", 2]
#
# @param obj [Object] The value to search for (using #==)
# @return [Array]
def rassoc(obj)
each { |entry| return entry if obj == entry[1] }
nil
end
# Searches through the `Hash`, comparing `value` with each value (using `#==`).
# When a matching value is found, return its associated key object.
# Return `nil` if no match is found.
#
# @example
# Hamster::Hash["A" => 1, "B" => 2, "C" => 3].key(2) # => "B"
#
# @param value [Object] The value to search for (using #==)
# @return [Object]
def key(value)
each { |entry| return entry[0] if value == entry[1] }
nil
end
# Return a randomly chosen `[key, value]` pair from this `Hash`. If the hash is empty,
# return `nil`.
#
# @example
# Hamster::Hash["A" => 1, "B" => 2, "C" => 3].sample
# # => ["C", 3]
#
# @return [Array]
def sample
@trie.at(rand(size))
end
# Return an empty `Hash` instance, of the same class as this one. Useful if you
# have multiple subclasses of `Hash` and want to treat them polymorphically.
# Maintains the default block, if there is one.
#
# @return [Hash]
def clear
if @default
self.class.alloc(EmptyTrie, @default)
else
self.class.empty
end
end
# Return true if `other` has the same type and contents as this `Hash`.
#
# @param other [Object] The collection to compare with
# @return [Boolean]
def eql?(other)
return true if other.equal?(self)
instance_of?(other.class) && @trie.eql?(other.instance_variable_get(:@trie))
end
# Return true if `other` has the same contents as this `Hash`. Will convert
# `other` to a Ruby `Hash` using `#to_hash` if necessary.
#
# @param other [Object] The object to compare with
# @return [Boolean]
def ==(other)
self.eql?(other) || (other.respond_to?(:to_hash) && to_hash.eql?(other.to_hash))
end
# See `Object#hash`.
# @return [Integer]
def hash
keys.to_a.sort.reduce(0) do |hash, key|
(hash << 32) - hash + key.hash + get(key).hash
end
end
# Return the contents of this `Hash` as a programmer-readable `String`. If all the
# keys and values are serializable as Ruby literal strings, the returned string can
# be passed to `eval` to reconstitute an equivalent `Hash`. The default
# block (if there is one) will be lost when doing this, however.
#
# @return [String]
def inspect
result = "#{self.class}["
i = 0
each do |key, val|
result << ', ' if i > 0
result << key.inspect << ' => ' << val.inspect
i += 1
end
result << "]"
end
# Allows this `Hash` to be printed at the `pry` console, or using `pp` (from the
# Ruby standard library), in a way which takes the amount of horizontal space on
# the screen into account, and which indents nested structures to make them easier
# to read.
#
# @private
def pretty_print(pp)
pp.group(1, "#{self.class}[", "]") do
pp.breakable ''
pp.seplist(self, nil) do |key, val|
pp.group do
key.pretty_print(pp)
pp.text ' => '
pp.group(1) do
pp.breakable ''
val.pretty_print(pp)
end
end
end
end
end
# Convert this `Hamster::Hash` to an instance of Ruby's built-in `Hash`.
#
# @return [::Hash]
def to_hash
output = {}
each do |key, value|
output[key] = value
end
output
end
alias :to_h :to_hash
# Deeply convert to Ruby Hash and other primitives. No `Hamster` objects of
# any type will remain anywhere in the data structure.
#
# @return [::Hash]
def to_ruby
Hamster.to_ruby(self)
end
# @return [::Hash]
# @private
def marshal_dump
to_hash
end
# @private
def marshal_load(dictionary)
@trie = Trie[dictionary]
end
private
# Return a new `Hash` which is derived from this one, using a modified {Trie}.
# The new `Hash` will retain the existing default block, if there is one.
#
def derive_new_hash(trie)
if trie.equal?(@trie)
self
elsif trie.empty?
if @default
self.class.alloc(EmptyTrie, @default)
else
self.class.empty
end
else
self.class.alloc(trie, @default)
end
end
end
# The canonical empty `Hash`. Returned by `Hamster.hash` and `Hash[]` when
# invoked with no arguments; also returned by `Hash.empty`. Prefer using this
# one rather than creating many empty hashes using `Hash.new`.
#
EmptyHash = Hamster::Hash.empty
end
|
require 'singleton'
module Hamster
def self.list(*items)
items.reverse.reduce(List::Empty.instance) { |list, item| list.cons(item) }
end
module List
module Cadr
private
def method_missing(name, *args, &block)
if name.to_s =~ /^c([ad]+)r$/
accessor($1)
else
super
end
end
# Perform compositions of <tt>car</tt> and <tt>cdr</tt> operations. Their names consist of a 'c', followed by at
# least one 'a' or 'd', and finally an 'r'. The series of 'a's and 'd's in each function's name is chosen to
# identify the series of car and cdr operations that is performed by the function. The order in which the 'a's and
# 'd's appear is the inverse of the order in which the corresponding operations are performed.
def accessor(sequence)
sequence.split(//).reverse!.reduce(self) do |memo, char|
case char
when "a" then memo.head
when "d" then memo.tail
end
end
end
end
class Empty
include Singleton
include Cadr
def head
nil
end
def tail
self
end
def cons(item)
Cons.new(item, self)
end
def empty?
true
end
def size
0
end
alias_method :length, :size
def each
block_given? or return self
nil
end
def map
self
end
def reduce(memo)
memo
end
alias_method :inject, :reduce
def filter
self
end
alias_method :select, :filter
def reject
self
end
def take_while
self
end
def drop_while
self
end
def take(number)
self
end
def drop(number)
self
end
def include?(item)
false
end
alias_method :member?, :include?
def dup
self
end
alias_method :clone, :dup
end
class Cons
include Cadr
def initialize(head, tail)
@head = head
@tail = tail
end
def head
@head
end
def tail
@tail
end
def cons(item)
self.class.new(item, self)
end
def empty?
false
end
def size
tail.size + 1
end
alias_method :length, :size
def each(&block)
block_given? or return self
yield(head)
tail.each(&block)
nil
end
def map(&block)
block_given? or return self
tail.map(&block).cons(yield(head))
end
def reduce(memo, &block)
block_given? or return memo
tail.reduce(yield(memo, head), &block)
end
alias_method :inject, :reduce
def filter(&block)
block_given? or return self
filtered = tail.filter(&block)
yield(head) ? filtered.cons(head) : filtered
end
alias_method :select, :filter
def reject(&block)
block_given? or return self
filter { |item| !yield(item) }
end
def take_while(&block)
block_given? or return self
yield(head) ? tail.take_while(&block).cons(head) : Empty.instance
end
def drop_while(&block)
block_given? or return self
yield(head) ? tail.drop_while(&block) : self
end
def take(number)
number == 0 ? Empty.instance : tail.take(number - 1).cons(head)
end
def drop(number)
number == 0 ? self : tail.drop(number - 1)
end
def include?(item)
item == head || tail.include?(item)
end
alias_method :member?, :include?
def eql?(other)
return true if other.equal?(self)
return false unless other.is_a?(self.class)
other.head == head && other.tail.eql?(tail)
end
alias_method :==, :eql?
def dup
self
end
alias_method :clone, :dup
end
end
end
Remove the need to use Singleton for Empty list.
module Hamster
def self.list(*items)
items.reverse.reduce(List::Empty) { |list, item| list.cons(item) }
end
module List
module Cadr
private
def method_missing(name, *args, &block)
if name.to_s =~ /^c([ad]+)r$/
accessor($1)
else
super
end
end
# Perform compositions of <tt>car</tt> and <tt>cdr</tt> operations. Their names consist of a 'c', followed by at
# least one 'a' or 'd', and finally an 'r'. The series of 'a's and 'd's in each function's name is chosen to
# identify the series of car and cdr operations that is performed by the function. The order in which the 'a's and
# 'd's appear is the inverse of the order in which the corresponding operations are performed.
def accessor(sequence)
sequence.split(//).reverse!.reduce(self) do |memo, char|
case char
when "a" then memo.head
when "d" then memo.tail
end
end
end
end
module Empty
class << self
include Cadr
def head
nil
end
def tail
self
end
def cons(item)
Cons.new(item, self)
end
def empty?
true
end
def size
0
end
alias_method :length, :size
def each
block_given? or return self
nil
end
def map
self
end
def reduce(memo)
memo
end
alias_method :inject, :reduce
def filter
self
end
alias_method :select, :filter
def reject
self
end
def take_while
self
end
def drop_while
self
end
def take(number)
self
end
def drop(number)
self
end
def include?(item)
false
end
alias_method :member?, :include?
def eql?(other)
return true if other.equal?(self)
# return false unless other.is_a?(self.class)
other.empty?
end
alias_method :==, :eql?
def dup
self
end
alias_method :clone, :dup
end
end
class Cons
include Cadr
def initialize(head, tail)
@head = head
@tail = tail
end
def head
@head
end
def tail
@tail
end
def cons(item)
self.class.new(item, self)
end
def empty?
false
end
def size
tail.size + 1
end
alias_method :length, :size
def each(&block)
block_given? or return self
yield(head)
tail.each(&block)
nil
end
def map(&block)
block_given? or return self
tail.map(&block).cons(yield(head))
end
def reduce(memo, &block)
block_given? or return memo
tail.reduce(yield(memo, head), &block)
end
alias_method :inject, :reduce
def filter(&block)
block_given? or return self
filtered = tail.filter(&block)
yield(head) ? filtered.cons(head) : filtered
end
alias_method :select, :filter
def reject(&block)
block_given? or return self
filter { |item| !yield(item) }
end
def take_while(&block)
block_given? or return self
yield(head) ? tail.take_while(&block).cons(head) : Empty
end
def drop_while(&block)
block_given? or return self
yield(head) ? tail.drop_while(&block) : self
end
def take(number)
number == 0 ? Empty : tail.take(number - 1).cons(head)
end
def drop(number)
number == 0 ? self : tail.drop(number - 1)
end
def include?(item)
item == head || tail.include?(item)
end
alias_method :member?, :include?
def eql?(other)
return true if other.equal?(self)
return false unless other.is_a?(self.class)
other.head == head && other.tail.eql?(tail)
end
alias_method :==, :eql?
def dup
self
end
alias_method :clone, :dup
end
end
end
|
require 'test/helper'
class TypusHelperTest < ActiveSupport::TestCase
include TypusHelper
include ActionView::Helpers::UrlHelper
include ActionController::UrlWriter
def test_applications
assert true
end
def test_resources
assert true
end
def test_typus_block
output = typus_block(:model => 'posts', :location => 'sidebar', :partial => 'pum')
assert output.nil?
end
def test_page_title
params = {}
Typus::Configuration.options[:app_name] = 'whatistypus.com'
output = page_title('custom_action')
assert_equal 'whatistypus.com › Custom action', output
end
def test_header
output = header
expected = <<-HTML
<h1>#{Typus::Configuration.options[:app_name]} </h1>
HTML
assert_equal expected, output
end
def test_login_info
typus_user = typus_users(:admin)
output = login_info(typus_user)
expected = <<-HTML
<ul>
<li>Logged as <a href="http://test.host/admin/typus_users/1/edit">Admin Example (admin)</a></li>
<li><a href="http://test.host/admin/sign_out">Sign out</a></li>
</ul>
HTML
assert_equal expected, output
end
def test_display_flash_message
message = { :test => 'This is the message.' }
output = display_flash_message(message)
expected = <<-HTML
<div id="flash" class="test">
<p>This is the message.</p>
</div>
HTML
assert_equal expected, output
message = {}
output = display_flash_message(message)
assert output.nil?
end
def test_typus_message
output = typus_message('chunky bacon', 'yay')
expected = <<-HTML
<div id="flash" class="yay">
<p>chunky bacon</p>
</div>
HTML
assert_equal expected, output
end
def test_locales
Typus::Configuration.options[:locales] = [ [ "English", :en ], [ "Español", :es ] ]
output = locales('set_locale')
expected = <<-HTML
<ul>
<li>Set language:</li>
<li><a href="set_locale?en">English</a></li>
<li><a href="set_locale?es">Español</a></li>
</ul>
HTML
assert_equal expected, output
Typus::Configuration.options[:locales] = [ [ "English", :en ] ]
end
end
Include ActionView::Helpers::TextHelper for cycle method.
require 'test/helper'
class TypusHelperTest < ActiveSupport::TestCase
include TypusHelper
include ActionView::Helpers::UrlHelper
include ActionView::Helpers::TextHelper
include ActionController::UrlWriter
def test_applications
assert true
end
def test_resources
assert true
end
def test_typus_block
output = typus_block(:model => 'posts', :location => 'sidebar', :partial => 'pum')
assert output.nil?
end
def test_page_title
params = {}
Typus::Configuration.options[:app_name] = 'whatistypus.com'
output = page_title('custom_action')
assert_equal 'whatistypus.com › Custom action', output
end
def test_header
output = header
expected = <<-HTML
<h1>#{Typus::Configuration.options[:app_name]} </h1>
HTML
assert_equal expected, output
end
def test_login_info
typus_user = typus_users(:admin)
output = login_info(typus_user)
expected = <<-HTML
<ul>
<li>Logged as <a href="http://test.host/admin/typus_users/1/edit">Admin Example (admin)</a></li>
<li><a href="http://test.host/admin/sign_out">Sign out</a></li>
</ul>
HTML
assert_equal expected, output
end
def test_display_flash_message
message = { :test => 'This is the message.' }
output = display_flash_message(message)
expected = <<-HTML
<div id="flash" class="test">
<p>This is the message.</p>
</div>
HTML
assert_equal expected, output
message = {}
output = display_flash_message(message)
assert output.nil?
end
def test_typus_message
output = typus_message('chunky bacon', 'yay')
expected = <<-HTML
<div id="flash" class="yay">
<p>chunky bacon</p>
</div>
HTML
assert_equal expected, output
end
def test_locales
Typus::Configuration.options[:locales] = [ [ "English", :en ], [ "Español", :es ] ]
output = locales('set_locale')
expected = <<-HTML
<ul>
<li>Set language:</li>
<li><a href="set_locale?en">English</a></li>
<li><a href="set_locale?es">Español</a></li>
</ul>
HTML
assert_equal expected, output
Typus::Configuration.options[:locales] = [ [ "English", :en ] ]
end
end |
module Hawk
VERSION = "0.2.1"
end
Bump version to v0.2.2
module Hawk
VERSION = "0.2.2"
end
|
require 'yaml'
require 'oj'
require 'pry'
module Hippo
module Screen
GROUPS=Hash.new{|h,k| g=Group.new; g.identifier=k; h[k]=g }
DEFINITIONS=Hash.new
mattr_accessor :enabled_group_ids
class DefinitionList
def initialize(extension_id)
@extension_id = extension_id
end
def define(id)
definition = (DEFINITIONS[id] ||= Definition.new(id, @extension_id))
yield definition
end
end
class << self
include Enumerable
def [](config)
if DEFINITIONS.key?(config)
DEFINITIONS[config]
else
nil
end
end
def for_extension(id)
yield DefinitionList.new(id)
end
def define_group(id)
group = GROUPS[id]
yield group
end
def each
Extensions.load_screens
DEFINITIONS.values.each{ | definition | yield definition }
end
def each_group
Extensions.load_screens
GROUPS.values.each{ | group | yield group }
end
def config_file
Hippo::Extensions.controlling.root_path.join("config", "screens.rb")
end
end
class Group
include Concerns::AttrAccessorWithDefault
attr_accessor_with_default :identifier
attr_accessor_with_default :title
attr_accessor_with_default :description
attr_accessor_with_default :icon
attr_accessor_with_default :order
def to_json
Oj.dump({
id: identifier,
title: title,
description: description,
icon: icon
}, mode: :compat)
end
end
class Definition
include Concerns::AttrAccessorWithDefault
attr_accessor_with_default :identifier
attr_accessor_with_default :title
attr_accessor_with_default :description
attr_accessor_with_default :icon
attr_accessor_with_default :group_id
attr_accessor_with_default :extension
attr_accessor_with_default :view_class
attr_accessor_with_default :model_class
attr_accessor_with_default :model_access, 'read'
attr_accessor_with_default :asset
def initialize(id, extension_id)
self.identifier = id
@extension_id = extension_id
@extension = extension_id.underscore.camelize
end
def group
GROUPS[@group_id]
end
def has_file_matching?(pattern)
Pathname.glob(root_path.join(pattern)).any?
end
def root_path
ext = Hippo::Extensions.for_identifier(@extension_id)
raise "Unable to find extension '#{@extension_id}' for screen group" unless ext
ext.root_path.join('client', url_prefix, identifier)
end
def model
return nil if @model_class.blank?
ext = Hippo::Extensions.for_identifier(@extension_id)
(@extension_id.camelize + '::' + @model_class).constantize
end
def viewable_by?(user)
model.nil? || user.can_read?(self)
end
def asset_path
(asset && asset =~ /\//) ? asset : "#{@extension_id}/screens/#{asset || identifier}"
end
def as_json
{
id: identifier,
title: title,
icon: icon,
model: model_class,
view: view_class,
access: model_access,
group_id: group_id,
extension: extension,
description: description,
asset: asset_path
}
end
def to_json
Oj.dump(as_json, mode: :compat)
end
end
end
end
send only active screens in order
require 'yaml'
require 'oj'
require 'pry'
require_relative 'screen/group'
require_relative 'screen/definition'
module Hippo
module Screen
GROUPS=Hash.new{|h,k| g=Group.new; g.identifier=k; h[k]=g }
DEFINITIONS = Hash.new
EXTENSIONS = Hash.new
mattr_accessor :enabled_group_ids
class DefinitionList
attr_reader :ids
include Enumerable
def initialize(extension_id)
@ids = []
@extension_id = extension_id
end
def define(id)
ids.push(id)
definition = (DEFINITIONS[id] ||= Definition.new(id, @extension_id))
yield definition
end
def extend(id)
ids.push(id)
definition = DEFINITIONS[id]
definition.extension_id = @extension_id
yield definition if block_given?
end
def each
ids.each { |id| yield DEFINITIONS[id] }
end
end
class << self
include Enumerable
def [](config)
if DEFINITIONS.key?(config)
DEFINITIONS[config]
else
nil
end
end
def each
DEFINITIONS.values.each { |s| yield s }
end
def for_extension(id)
definition = EXTENSIONS[id] ||= DefinitionList.new(id)
yield definition if block_given?
definition
end
def define_group(id)
group = GROUPS[id]
yield group
end
def active_ids
Extensions.load_screens
for_extension(Hippo::Extensions.controlling.identifier).map(&:identifier)
end
def each_group
Extensions.load_screens
GROUPS.values.each{ | group | yield group }
end
def config_file
Hippo::Extensions.controlling.root_path.join("config", "screens.rb")
end
end
end
end
|
# frozen_string_literal: true
module HTTP
VERSION = "4.0.0"
end
Update version to 5.0.0-dev
# frozen_string_literal: true
module HTTP
VERSION = "5.0.0-dev"
end
|
require 'bunny'
require 'carrot-top'
require 'securerandom'
require 'hutch/logging'
require 'hutch/exceptions'
module Hutch
class Broker
include Logging
attr_accessor :connection, :channel, :exchange, :api_client,
:default_wait_exchange, :wait_exchanges
def initialize(config = nil)
@config = config || Hutch::Config
@wait_exchanges = {}
end
def connect(options = {})
set_up_amqp_connection
set_up_wait_exchange unless @config[:mq_wait_exchange].nil?
set_up_api_connection if options.fetch(:enable_http_api_use, true)
return unless block_given?
begin
yield
ensure
disconnect
end
end
def disconnect
@channel.close if @channel
@connection.close if @connection
@channel, @connection, @exchange, @api_client = nil, nil, nil, nil
@default_wait_exchange, @wait_exchanges = nil, {}
end
# Connect to RabbitMQ via AMQP. This sets up the main connection and
# channel we use for talking to RabbitMQ. It also ensures the existence of
# the exchange we'll be using.
def set_up_amqp_connection
open_connection!
open_channel!
exchange_name = @config[:mq_exchange]
logger.info "using topic exchange '#{exchange_name}'"
with_bunny_precondition_handler('exchange') do
@exchange = @channel.topic(exchange_name, durable: true)
end
end
# Set up wait exchange as a fanout with queue
def set_up_wait_exchange
wait_exchange_name = @config[:mq_wait_exchange]
logger.info "using fanout wait exchange '#{wait_exchange_name}'"
@default_wait_exchange = declare_wait_exchange(wait_exchange_name)
wait_queue_name = @config[:mq_wait_queue]
logger.info "using wait queue '#{wait_queue_name}'"
declare_wait_queue(@default_wait_exchange, wait_queue_name)
expiration_suffices = (@config[:mq_wait_expiration_suffices] || []).map(&:to_s)
expiration_suffices.each do |suffix|
logger.info "using expiration suffix '_#{suffix}'"
suffix_exchange = declare_wait_exchange("#{wait_exchange_name}_#{suffix}")
@wait_exchanges[suffix] = suffix_exchange
declare_wait_queue(suffix_exchange, "#{wait_queue_name}_#{suffix}")
end
end
# rubocop:disable Metrics/AbcSize
def open_connection!
if @config[:uri] && !@config[:uri].empty?
u = URI.parse(@config[:uri])
@config[:mq_host] = u.host
@config[:mq_port] = u.port
@config[:mq_vhost] = u.path.sub(/^\//, '')
@config[:mq_username] = u.user
@config[:mq_password] = u.password
end
host = @config[:mq_host]
port = @config[:mq_port]
vhost = @config[:mq_vhost]
username = @config[:mq_username]
password = @config[:mq_password]
tls = @config[:mq_tls]
tls_key = @config[:mq_tls_key]
tls_cert = @config[:mq_tls_cert]
protocol = tls ? 'amqps://' : 'amqp://'
sanitized_uri = "#{protocol}#{username}@#{host}:#{port}/#{vhost.sub(/^\//, '')}"
logger.info "connecting to rabbitmq (#{sanitized_uri})"
@connection = Bunny.new(host: host, port: port, vhost: vhost,
tls: tls, tls_key: tls_key, tls_cert: tls_cert,
username: username, password: password,
heartbeat: 30, automatically_recover: true,
network_recovery_interval: 1)
with_bunny_connection_handler(sanitized_uri) do
@connection.start
end
logger.info "connected to RabbitMQ at #{host} as #{username}"
@connection
end
# rubocop:enable Metrics/AbcSize
def open_channel!
logger.info 'opening rabbitmq channel'
@channel = connection.create_channel.tap do |ch|
ch.prefetch(@config[:channel_prefetch]) if @config[:channel_prefetch]
end
end
# Set up the connection to the RabbitMQ management API. Unfortunately, this
# is necessary to do a few things that are impossible over AMQP. E.g.
# listing queues and bindings.
def set_up_api_connection
logger.info "connecting to rabbitmq HTTP API (#{api_config.sanitized_uri})"
with_authentication_error_handler do
with_connection_error_handler do
@api_client = CarrotTop.new(host: api_config.host, port: api_config.port,
user: api_config.username, password: api_config.password,
ssl: api_config.ssl)
@api_client.exchanges
end
end
end
# Create / get a durable queue and apply namespace if it exists.
def queue(name)
with_bunny_precondition_handler('queue') do
namespace = @config[:namespace].to_s.downcase.gsub(/[^-:\.\w]/, '')
name = name.prepend(namespace + ':') unless namespace.empty?
channel.queue(name, durable: true)
end
end
# Return a mapping of queue names to the routing keys they're bound to.
def bindings
results = Hash.new { |hash, key| hash[key] = [] }
@api_client.bindings.each do |binding|
next if binding['destination'] == binding['routing_key']
next unless binding['source'] == @config[:mq_exchange]
next unless binding['vhost'] == @config[:mq_vhost]
results[binding['destination']] << binding['routing_key']
end
results
end
# Bind a queue to the broker's exchange on the routing keys provided. Any
# existing bindings on the queue that aren't present in the array of
# routing keys will be unbound.
def bind_queue(queue, routing_keys)
# Find the existing bindings, and unbind any redundant bindings
queue_bindings = bindings.select { |dest, _keys| dest == queue.name }
queue_bindings.each do |_dest, keys|
keys.reject { |key| routing_keys.include?(key) }.each do |key|
logger.debug "removing redundant binding #{queue.name} <--> #{key}"
queue.unbind(@exchange, routing_key: key)
end
end
# Ensure all the desired bindings are present
routing_keys.each do |routing_key|
logger.debug "creating binding #{queue.name} <--> #{routing_key}"
queue.bind(@exchange, routing_key: routing_key)
end
end
# Each subscriber is run in a thread. This calls Thread#join on each of the
# subscriber threads.
def wait_on_threads(timeout)
# Thread#join returns nil when the timeout is hit. If any return nil,
# the threads didn't all join so we return false.
per_thread_timeout = timeout.to_f / work_pool_threads.length
work_pool_threads.none? { |thread| thread.join(per_thread_timeout).nil? }
end
def stop
@channel.work_pool.kill
end
def requeue(delivery_tag)
@channel.reject(delivery_tag, true)
end
def reject(delivery_tag, requeue=false)
@channel.reject(delivery_tag, requeue)
end
def ack(delivery_tag)
@channel.ack(delivery_tag, false)
end
def nack(delivery_tag)
@channel.nack(delivery_tag, false, false)
end
def publish(routing_key, message, properties = {})
ensure_connection!(routing_key, message)
non_overridable_properties = {
routing_key: routing_key,
timestamp: Time.now.to_i,
content_type: 'application/json'
}
properties[:message_id] ||= generate_id
logger.info("publishing message '#{message.inspect}' to #{routing_key}")
@exchange.publish(JSON.dump(message), { persistent: true }
.merge(properties)
.merge(global_properties)
.merge(non_overridable_properties))
end
def publish_wait(routing_key, message, properties = {})
ensure_connection!(routing_key, message)
if @config[:mq_wait_exchange].nil?
raise_publish_error('wait exchange not defined', routing_key, message)
end
non_overridable_properties = {
routing_key: routing_key,
content_type: 'application/json'
}
properties[:message_id] ||= generate_id
properties[:timestamp] ||= Time.now.to_i
logger.info("publishing message '#{message.inspect}' to wait exchange with routing key #{routing_key}")
exchange = @wait_exchanges.fetch(properties[:expiration].to_s, @default_wait_exchange)
exchange.publish(JSON.dump(message), { persistent: true }
.merge(properties)
.merge(global_properties)
.merge(non_overridable_properties))
end
private
def raise_publish_error(reason, routing_key, message)
msg = "Unable to publish - #{reason}. Message: #{message.inspect}, Routing key: #{routing_key}."
logger.error(msg)
raise PublishError, msg
end
def ensure_connection!(routing_key, message)
raise_publish_error('no connection to broker', routing_key, message) unless @connection
raise_publish_error('connection is closed', routing_key, message) unless @connection.open?
end
def api_config
@api_config ||= OpenStruct.new.tap do |config|
config.host = @config[:mq_api_host]
config.port = @config[:mq_api_port]
config.username = @config[:mq_username]
config.password = @config[:mq_password]
config.ssl = @config[:mq_api_ssl]
config.protocol = config.ssl ? 'https://' : 'http://'
config.sanitized_uri = "#{config.protocol}#{config.username}@#{config.host}:#{config.port}/"
end
end
def with_authentication_error_handler
yield
rescue Net::HTTPServerException => ex
logger.error "HTTP API connection error: #{ex.message.downcase}"
if ex.response.code == '401'
raise AuthenticationError, 'invalid HTTP API credentials'
else
raise
end
end
def with_connection_error_handler
yield
rescue Errno::ECONNREFUSED => ex
logger.error "HTTP API connection error: #{ex.message.downcase}"
raise ConnectionError, "couldn't connect to HTTP API at #{api_config.sanitized_uri}"
end
def with_bunny_precondition_handler(item)
yield
rescue Bunny::PreconditionFailed => ex
logger.error ex.message
s = "RabbitMQ responded with 406 Precondition Failed when creating this #{item}. " \
'Perhaps it is being redeclared with non-matching attributes'
raise WorkerSetupError, s
end
def with_bunny_connection_handler(uri)
yield
rescue Bunny::TCPConnectionFailed => ex
logger.error "amqp connection error: #{ex.message.downcase}"
raise ConnectionError, "couldn't connect to rabbitmq at #{uri}"
end
def work_pool_threads
@channel.work_pool.threads || []
end
def generate_id
SecureRandom.uuid
end
def global_properties
Hutch.global_properties.respond_to?(:call) ? Hutch.global_properties.call : Hutch.global_properties
end
def declare_wait_exchange(name)
with_bunny_precondition_handler('exchange') do
@channel.fanout(name, durable: true)
end
end
def declare_wait_queue(exchange, queue_name)
with_bunny_precondition_handler('queue') do
queue = @channel.queue(
queue_name,
durable: true,
arguments: { 'x-dead-letter-exchange' => @config[:mq_exchange] }
)
queue.bind(exchange)
end
end
end
end
Get message_properties before publish
require 'bunny'
require 'carrot-top'
require 'securerandom'
require 'hutch/logging'
require 'hutch/exceptions'
module Hutch
class Broker
include Logging
attr_accessor :connection, :channel, :exchange, :api_client,
:default_wait_exchange, :wait_exchanges
def initialize(config = nil)
@config = config || Hutch::Config
@wait_exchanges = {}
end
def connect(options = {})
set_up_amqp_connection
set_up_wait_exchange unless @config[:mq_wait_exchange].nil?
set_up_api_connection if options.fetch(:enable_http_api_use, true)
return unless block_given?
begin
yield
ensure
disconnect
end
end
def disconnect
@channel.close if @channel
@connection.close if @connection
@channel, @connection, @exchange, @api_client = nil, nil, nil, nil
@default_wait_exchange, @wait_exchanges = nil, {}
end
# Connect to RabbitMQ via AMQP. This sets up the main connection and
# channel we use for talking to RabbitMQ. It also ensures the existence of
# the exchange we'll be using.
def set_up_amqp_connection
open_connection!
open_channel!
exchange_name = @config[:mq_exchange]
logger.info "using topic exchange '#{exchange_name}'"
with_bunny_precondition_handler('exchange') do
@exchange = @channel.topic(exchange_name, durable: true)
end
end
# Set up wait exchange as a fanout with queue
def set_up_wait_exchange
wait_exchange_name = @config[:mq_wait_exchange]
logger.info "using fanout wait exchange '#{wait_exchange_name}'"
@default_wait_exchange = declare_wait_exchange(wait_exchange_name)
wait_queue_name = @config[:mq_wait_queue]
logger.info "using wait queue '#{wait_queue_name}'"
declare_wait_queue(@default_wait_exchange, wait_queue_name)
expiration_suffices = (@config[:mq_wait_expiration_suffices] || []).map(&:to_s)
expiration_suffices.each do |suffix|
logger.info "using expiration suffix '_#{suffix}'"
suffix_exchange = declare_wait_exchange("#{wait_exchange_name}_#{suffix}")
@wait_exchanges[suffix] = suffix_exchange
declare_wait_queue(suffix_exchange, "#{wait_queue_name}_#{suffix}")
end
end
# rubocop:disable Metrics/AbcSize
def open_connection!
if @config[:uri] && !@config[:uri].empty?
u = URI.parse(@config[:uri])
@config[:mq_host] = u.host
@config[:mq_port] = u.port
@config[:mq_vhost] = u.path.sub(/^\//, '')
@config[:mq_username] = u.user
@config[:mq_password] = u.password
end
host = @config[:mq_host]
port = @config[:mq_port]
vhost = @config[:mq_vhost]
username = @config[:mq_username]
password = @config[:mq_password]
tls = @config[:mq_tls]
tls_key = @config[:mq_tls_key]
tls_cert = @config[:mq_tls_cert]
protocol = tls ? 'amqps://' : 'amqp://'
sanitized_uri = "#{protocol}#{username}@#{host}:#{port}/#{vhost.sub(/^\//, '')}"
logger.info "connecting to rabbitmq (#{sanitized_uri})"
@connection = Bunny.new(host: host, port: port, vhost: vhost,
tls: tls, tls_key: tls_key, tls_cert: tls_cert,
username: username, password: password,
heartbeat: 30, automatically_recover: true,
network_recovery_interval: 1)
with_bunny_connection_handler(sanitized_uri) do
@connection.start
end
logger.info "connected to RabbitMQ at #{host} as #{username}"
@connection
end
# rubocop:enable Metrics/AbcSize
def open_channel!
logger.info 'opening rabbitmq channel'
@channel = connection.create_channel.tap do |ch|
ch.prefetch(@config[:channel_prefetch]) if @config[:channel_prefetch]
end
end
# Set up the connection to the RabbitMQ management API. Unfortunately, this
# is necessary to do a few things that are impossible over AMQP. E.g.
# listing queues and bindings.
def set_up_api_connection
logger.info "connecting to rabbitmq HTTP API (#{api_config.sanitized_uri})"
with_authentication_error_handler do
with_connection_error_handler do
@api_client = CarrotTop.new(host: api_config.host, port: api_config.port,
user: api_config.username, password: api_config.password,
ssl: api_config.ssl)
@api_client.exchanges
end
end
end
# Create / get a durable queue and apply namespace if it exists.
def queue(name)
with_bunny_precondition_handler('queue') do
namespace = @config[:namespace].to_s.downcase.gsub(/[^-:\.\w]/, '')
name = name.prepend(namespace + ':') unless namespace.empty?
channel.queue(name, durable: true)
end
end
# Return a mapping of queue names to the routing keys they're bound to.
def bindings
results = Hash.new { |hash, key| hash[key] = [] }
@api_client.bindings.each do |binding|
next if binding['destination'] == binding['routing_key']
next unless binding['source'] == @config[:mq_exchange]
next unless binding['vhost'] == @config[:mq_vhost]
results[binding['destination']] << binding['routing_key']
end
results
end
# Bind a queue to the broker's exchange on the routing keys provided. Any
# existing bindings on the queue that aren't present in the array of
# routing keys will be unbound.
def bind_queue(queue, routing_keys)
# Find the existing bindings, and unbind any redundant bindings
queue_bindings = bindings.select { |dest, _keys| dest == queue.name }
queue_bindings.each do |_dest, keys|
keys.reject { |key| routing_keys.include?(key) }.each do |key|
logger.debug "removing redundant binding #{queue.name} <--> #{key}"
queue.unbind(@exchange, routing_key: key)
end
end
# Ensure all the desired bindings are present
routing_keys.each do |routing_key|
logger.debug "creating binding #{queue.name} <--> #{routing_key}"
queue.bind(@exchange, routing_key: routing_key)
end
end
# Each subscriber is run in a thread. This calls Thread#join on each of the
# subscriber threads.
def wait_on_threads(timeout)
# Thread#join returns nil when the timeout is hit. If any return nil,
# the threads didn't all join so we return false.
per_thread_timeout = timeout.to_f / work_pool_threads.length
work_pool_threads.none? { |thread| thread.join(per_thread_timeout).nil? }
end
def stop
@channel.work_pool.kill
end
def requeue(delivery_tag)
@channel.reject(delivery_tag, true)
end
def reject(delivery_tag, requeue=false)
@channel.reject(delivery_tag, requeue)
end
def ack(delivery_tag)
@channel.ack(delivery_tag, false)
end
def nack(delivery_tag)
@channel.nack(delivery_tag, false, false)
end
def publish(routing_key, message, properties = {})
ensure_connection!(routing_key, message)
non_overridable_properties = {
routing_key: routing_key,
timestamp: Time.now.to_i,
content_type: 'application/json'
}
properties[:message_id] ||= generate_id
logger.info("publishing message '#{message.inspect}' to #{routing_key}")
@exchange.publish(JSON.dump(message), { persistent: true }
.merge(properties)
.merge(global_properties)
.merge(non_overridable_properties))
end
def publish_wait(routing_key, message, properties = {})
ensure_connection!(routing_key, message)
if @config[:mq_wait_exchange].nil?
raise_publish_error('wait exchange not defined', routing_key, message)
end
non_overridable_properties = {
routing_key: routing_key,
content_type: 'application/json'
}
properties[:message_id] ||= generate_id
properties[:timestamp] ||= Time.now.to_i
message_properties = { persistent: true }
.merge(properties)
.merge(global_properties)
.merge(non_overridable_properties)
exchange = @wait_exchanges.fetch(message_properties[:expiration].to_s, @default_wait_exchange)
logger.info("publishing message '#{message.inspect}' to '#{exchange.name}' with routing key '#{routing_key}'")
exchange.publish(JSON.dump(message), message_properties)
end
private
def raise_publish_error(reason, routing_key, message)
msg = "Unable to publish - #{reason}. Message: #{message.inspect}, Routing key: #{routing_key}."
logger.error(msg)
raise PublishError, msg
end
def ensure_connection!(routing_key, message)
raise_publish_error('no connection to broker', routing_key, message) unless @connection
raise_publish_error('connection is closed', routing_key, message) unless @connection.open?
end
def api_config
@api_config ||= OpenStruct.new.tap do |config|
config.host = @config[:mq_api_host]
config.port = @config[:mq_api_port]
config.username = @config[:mq_username]
config.password = @config[:mq_password]
config.ssl = @config[:mq_api_ssl]
config.protocol = config.ssl ? 'https://' : 'http://'
config.sanitized_uri = "#{config.protocol}#{config.username}@#{config.host}:#{config.port}/"
end
end
def with_authentication_error_handler
yield
rescue Net::HTTPServerException => ex
logger.error "HTTP API connection error: #{ex.message.downcase}"
if ex.response.code == '401'
raise AuthenticationError, 'invalid HTTP API credentials'
else
raise
end
end
def with_connection_error_handler
yield
rescue Errno::ECONNREFUSED => ex
logger.error "HTTP API connection error: #{ex.message.downcase}"
raise ConnectionError, "couldn't connect to HTTP API at #{api_config.sanitized_uri}"
end
def with_bunny_precondition_handler(item)
yield
rescue Bunny::PreconditionFailed => ex
logger.error ex.message
s = "RabbitMQ responded with 406 Precondition Failed when creating this #{item}. " \
'Perhaps it is being redeclared with non-matching attributes'
raise WorkerSetupError, s
end
def with_bunny_connection_handler(uri)
yield
rescue Bunny::TCPConnectionFailed => ex
logger.error "amqp connection error: #{ex.message.downcase}"
raise ConnectionError, "couldn't connect to rabbitmq at #{uri}"
end
def work_pool_threads
@channel.work_pool.threads || []
end
def generate_id
SecureRandom.uuid
end
def global_properties
Hutch.global_properties.respond_to?(:call) ? Hutch.global_properties.call : Hutch.global_properties
end
def declare_wait_exchange(name)
with_bunny_precondition_handler('exchange') do
@channel.fanout(name, durable: true)
end
end
def declare_wait_queue(exchange, queue_name)
with_bunny_precondition_handler('queue') do
queue = @channel.queue(
queue_name,
durable: true,
arguments: { 'x-dead-letter-exchange' => @config[:mq_exchange] }
)
queue.bind(exchange)
end
end
end
end
|
require 'integration_helper'
require 'mws/products'
class TestProducts < IntegrationTest
def test_lists_matching_products
clients.each do |client|
res = client.list_matching_products('architecture')
refute_empty res.parse
end
end
def test_gets_matching_product
clients.each do |client|
res = client.get_matching_product('1780935374')
refute_empty res.parse
end
end
def test_gets_matching_product_for_id
clients.each do |client|
res = client.get_matching_product_for_id('ISBN', '9781780935379')
refute_empty res.parse
end
end
def test_gets_competitive_pricing_for_asin
clients.each do |client|
res = client.get_competitive_pricing_for_asin('1780935374')
refute_empty res.parse
end
end
def test_gets_lowest_offer_listings_for_asin
clients.each do |client|
res = client.get_lowest_offer_listings_for_asin('1780935374')
refute_empty res.parse
end
end
def test_gets_lowest_priced_offers_for_asin
clients.each do |client|
res = client.get_lowest_priced_offers_for_asin('1780935374', 'New')
refute_empty res.parse
end
end
def test_gets_product_categories_for_asin
clients.each do |client|
res = client.get_product_categories_for_asin('1780935374')
refute_empty res.parse
end
end
def test_gets_my_fees_estimate
clients.each do |client|
res = client.get_my_fees_estimate(
marketplace_id: client.primary_marketplace_id,
id_type: 'ASIN',
id_value: '1780935374',
price_to_estimate_fees: {
listing_price: {
currency_code: currency_code_for(client.primary_marketplace_id),
amount: 100
}
},
identifier: '123',
is_amazon_fulfilled: false
)
assert res
.parse
.fetch('FeesEstimateResultList')
.fetch('FeesEstimateResult')
.fetch('FeesEstimate')
end
end
def test_gets_service_status
clients.each do |client|
res = client.get_service_status
refute_empty res.parse
end
end
private
CURRENCY_CODES = {
'A2EUQ1WTGCTBG2' => 'CAD',
'AAHKV2X7AFYLW' => 'CNY',
'A1F83G8C2ARO7P' => 'GBP',
'A21TJRUUN4KGV' => 'INR',
'A1VC38T7YXB528' => 'JPY',
'A1AM78C64UM0Y8' => 'MXN',
'ATVPDKIKX0DER' => 'USD'
}.freeze
def currency_code_for(marketplace_id)
CURRENCY_CODES.fetch(marketplace_id, 'EUR')
end
end
Use Hash#dig
require 'integration_helper'
require 'mws/products'
class TestProducts < IntegrationTest
def test_lists_matching_products
clients.each do |client|
res = client.list_matching_products('architecture')
refute_empty res.parse
end
end
def test_gets_matching_product
clients.each do |client|
res = client.get_matching_product('1780935374')
refute_empty res.parse
end
end
def test_gets_matching_product_for_id
clients.each do |client|
res = client.get_matching_product_for_id('ISBN', '9781780935379')
refute_empty res.parse
end
end
def test_gets_competitive_pricing_for_asin
clients.each do |client|
res = client.get_competitive_pricing_for_asin('1780935374')
refute_empty res.parse
end
end
def test_gets_lowest_offer_listings_for_asin
clients.each do |client|
res = client.get_lowest_offer_listings_for_asin('1780935374')
refute_empty res.parse
end
end
def test_gets_lowest_priced_offers_for_asin
clients.each do |client|
res = client.get_lowest_priced_offers_for_asin('1780935374', 'New')
refute_empty res.parse
end
end
def test_gets_product_categories_for_asin
clients.each do |client|
res = client.get_product_categories_for_asin('1780935374')
refute_empty res.parse
end
end
def test_gets_my_fees_estimate
clients.each do |client|
res = client.get_my_fees_estimate(
marketplace_id: client.primary_marketplace_id,
id_type: 'ASIN',
id_value: '1780935374',
price_to_estimate_fees: {
listing_price: {
currency_code: currency_code_for(client.primary_marketplace_id),
amount: 100
}
},
identifier: '123',
is_amazon_fulfilled: false
)
assert res
.parse
.dig('FeesEstimateResultList', 'FeesEstimateResult', 'FeesEstimate')
end
end
def test_gets_service_status
clients.each do |client|
res = client.get_service_status
refute_empty res.parse
end
end
private
CURRENCY_CODES = {
'A2EUQ1WTGCTBG2' => 'CAD',
'AAHKV2X7AFYLW' => 'CNY',
'A1F83G8C2ARO7P' => 'GBP',
'A21TJRUUN4KGV' => 'INR',
'A1VC38T7YXB528' => 'JPY',
'A1AM78C64UM0Y8' => 'MXN',
'ATVPDKIKX0DER' => 'USD'
}.freeze
def currency_code_for(marketplace_id)
CURRENCY_CODES.fetch(marketplace_id, 'EUR')
end
end
|
require 'hutch/error_handlers/logger'
require 'logger'
module Hutch
class UnknownAttributeError < StandardError; end
module Config
require 'yaml'
def self.initialize
@config = {
mq_host: 'localhost',
mq_port: 5672,
mq_exchange: 'hutch', # TODO: should this be required?
mq_vhost: '/',
mq_tls: false,
mq_tls_cert: nil,
mq_tls_key: nil,
mq_username: 'guest',
mq_password: 'guest',
mq_api_host: 'localhost',
mq_api_port: 15672,
mq_api_ssl: false,
log_level: Logger::INFO,
require_paths: [],
autoload_rails: true,
error_handlers: [Hutch::ErrorHandlers::Logger.new],
namespace: nil,
daemonise: false,
channel_prefetch: 0
}
end
def self.get(attr)
check_attr(attr)
user_config[attr]
end
def self.set(attr, value)
check_attr(attr)
user_config[attr] = value
end
class << self
alias_method :[], :get
alias_method :[]=, :set
end
def self.check_attr(attr)
unless user_config.key?(attr)
raise UnknownAttributeError, "#{attr} is not a valid config attribute"
end
end
def self.user_config
initialize unless @config
@config
end
def self.load_from_file(file)
YAML.load(file).each do |attr, value|
Hutch::Config.send("#{attr}=", value)
end
end
def self.method_missing(method, *args, &block)
attr = method.to_s.sub(/=$/, '').to_sym
return super unless user_config.key?(attr)
if method =~ /=$/
set(attr, args.first)
else
get(attr)
end
end
private
def deep_copy(obj)
Marshal.load(Marshal.dump(obj))
end
end
end
optional hash of params to Config constructor
This change allows to pass an optional hash of params to the Config constructor
require 'hutch/error_handlers/logger'
require 'logger'
module Hutch
class UnknownAttributeError < StandardError; end
module Config
require 'yaml'
def self.initialize(params={})
@config = {
mq_host: 'localhost',
mq_port: 5672,
mq_exchange: 'hutch', # TODO: should this be required?
mq_vhost: '/',
mq_tls: false,
mq_tls_cert: nil,
mq_tls_key: nil,
mq_username: 'guest',
mq_password: 'guest',
mq_api_host: 'localhost',
mq_api_port: 15672,
mq_api_ssl: false,
log_level: Logger::INFO,
require_paths: [],
autoload_rails: true,
error_handlers: [Hutch::ErrorHandlers::Logger.new],
namespace: nil,
daemonise: false,
channel_prefetch: 0
}.merge(params)
end
def self.get(attr)
check_attr(attr)
user_config[attr]
end
def self.set(attr, value)
check_attr(attr)
user_config[attr] = value
end
class << self
alias_method :[], :get
alias_method :[]=, :set
end
def self.check_attr(attr)
unless user_config.key?(attr)
raise UnknownAttributeError, "#{attr} is not a valid config attribute"
end
end
def self.user_config
initialize unless @config
@config
end
def self.load_from_file(file)
YAML.load(file).each do |attr, value|
Hutch::Config.send("#{attr}=", value)
end
end
def self.method_missing(method, *args, &block)
attr = method.to_s.sub(/=$/, '').to_sym
return super unless user_config.key?(attr)
if method =~ /=$/
set(attr, args.first)
else
get(attr)
end
end
private
def deep_copy(obj)
Marshal.load(Marshal.dump(obj))
end
end
end
|
# frozen_string_literal: true
require 'base_test_helper'
require 'ffi-gobject_introspection'
GObjectIntrospection::IRepository.prepend_search_path File.join(File.dirname(__FILE__), 'lib')
module LocalSharedLibrary
def shared_library(namespace)
case namespace
when 'Everything', 'GIMarshallingTests', 'Regress', 'Utility', 'WarnLib'
File.join(File.dirname(__FILE__), 'lib', "lib#{namespace.downcase}.so")
else
super
end
end
end
GObjectIntrospection::IRepository.prepend LocalSharedLibrary
module IntrospectionTestExtensions
class << self
attr_accessor :version
end
def get_introspection_data(namespace, name)
gir = GObjectIntrospection::IRepository.default
gir.require namespace, nil
gir.find_by_name namespace, name
end
def get_field_introspection_data(namespace, klass, name)
get_introspection_data(namespace, klass).find_field name
end
def get_method_introspection_data(namespace, klass, name)
get_introspection_data(namespace, klass).find_method name
end
def get_property_introspection_data(namespace, klass, name)
get_introspection_data(namespace, klass).find_property name
end
def get_signal_introspection_data(namespace, klass, name)
get_introspection_data(namespace, klass).find_signal name
end
def get_vfunc_introspection_data(namespace, klass, name)
get_introspection_data(namespace, klass).find_vfunc name
end
def skip_below(introduction_version);
unless LATEST_VERSION >= introduction_version
raise "Version #{introduction_version} is too new and would always be skipped"
end
skip "Introduced in #{introduction_version}" if introduction_version > version
end
def version
IntrospectionTestExtensions.version ||= calculate_version
end
VERSION_GUARDS = {
'1.59.4' => %w(Regress test_array_struct_in_none),
'1.58.3' => %w(Regress TestReferenceCounters),
'1.57.2' => %w(Regress TestInterface emit_signal),
'1.55.2' => %w(Regress FOO_FLAGS_SECOND_AND_THIRD),
'1.53.4' => %w(Regress TestObj name_conflict),
'1.49.1' => %w(Regress AnonymousUnionAndStruct),
'1.47.92' => %w(Regress get_variant),
'1.47.1' => %w(Regress test_noptr_callback)
}.freeze
LATEST_VERSION = VERSION_GUARDS.keys.first
def calculate_version
VERSION_GUARDS.each do |version, (namespace, klass, methodname)|
result = if methodname
get_method_introspection_data(namespace, klass, methodname)
else
get_introspection_data(namespace, klass)
end
return version if result
end
'1.46.0' # Minimum supported version
end
end
Minitest::Test.include IntrospectionTestExtensions
Add version check for gobject-introspection 1.61.1
# frozen_string_literal: true
require 'base_test_helper'
require 'ffi-gobject_introspection'
GObjectIntrospection::IRepository.prepend_search_path File.join(File.dirname(__FILE__), 'lib')
module LocalSharedLibrary
def shared_library(namespace)
case namespace
when 'Everything', 'GIMarshallingTests', 'Regress', 'Utility', 'WarnLib'
File.join(File.dirname(__FILE__), 'lib', "lib#{namespace.downcase}.so")
else
super
end
end
end
GObjectIntrospection::IRepository.prepend LocalSharedLibrary
module IntrospectionTestExtensions
class << self
attr_accessor :version
end
def get_introspection_data(namespace, name)
gir = GObjectIntrospection::IRepository.default
gir.require namespace, nil
gir.find_by_name namespace, name
end
def get_field_introspection_data(namespace, klass, name)
get_introspection_data(namespace, klass).find_field name
end
def get_method_introspection_data(namespace, klass, name)
get_introspection_data(namespace, klass).find_method name
end
def get_property_introspection_data(namespace, klass, name)
get_introspection_data(namespace, klass).find_property name
end
def get_signal_introspection_data(namespace, klass, name)
get_introspection_data(namespace, klass).find_signal name
end
def get_vfunc_introspection_data(namespace, klass, name)
get_introspection_data(namespace, klass).find_vfunc name
end
def skip_below(introduction_version);
unless LATEST_VERSION >= introduction_version
raise "Version #{introduction_version} is too new and would always be skipped"
end
skip "Introduced in #{introduction_version}" if introduction_version > version
end
def version
IntrospectionTestExtensions.version ||= calculate_version
end
VERSION_GUARDS = {
'1.61.1' => %w(Regress TestObj emit_sig_with_error),
'1.59.4' => %w(Regress test_array_struct_in_none),
'1.58.3' => %w(Regress TestReferenceCounters),
'1.57.2' => %w(Regress TestInterface emit_signal),
'1.55.2' => %w(Regress FOO_FLAGS_SECOND_AND_THIRD),
'1.53.4' => %w(Regress TestObj name_conflict),
'1.49.1' => %w(Regress AnonymousUnionAndStruct),
'1.47.92' => %w(Regress get_variant),
'1.47.1' => %w(Regress test_noptr_callback)
}.freeze
LATEST_VERSION = VERSION_GUARDS.keys.first
def calculate_version
VERSION_GUARDS.each do |version, (namespace, klass, methodname)|
result = if methodname
get_method_introspection_data(namespace, klass, methodname)
else
get_introspection_data(namespace, klass)
end
return version if result
end
'1.46.0' # Minimum supported version
end
end
Minitest::Test.include IntrospectionTestExtensions
|
# frozen_string_literal: true
require "digest"
require "addressable/uri"
require "net/http"
require "uri"
require "json"
module Imgix
class Client
DEFAULTS = { use_https: true }.freeze
def initialize(options = {})
options = DEFAULTS.merge(options)
@domain = options[:domain]
validate_domain!
@secure_url_token = options[:secure_url_token]
@api_key = options[:api_key]
@use_https = options[:use_https]
@include_library_param = options.fetch(:include_library_param, true)
@library = options.fetch(:library_param, "rb")
@version = options.fetch(:library_version, Imgix::VERSION)
end
def path(path)
p = Path.new(prefix, @secure_url_token, path)
p.ixlib("#{@library}-#{@version}") if @include_library_param
p
end
def purge(path)
api_key_error = "A valid API key is required to send purge requests"
raise api_key_error if @api_key.nil?
endpoint = URI.parse("https://api.imgix.com/api/v1/purge")
# Ensure the path has been prefixed with '/'.
path = path.start_with?("/") ? path : "/#{path}"
url = prefix + path
req = create_request(endpoint, url, :json_data_from)
sock = Net::HTTP.new(endpoint.host, endpoint.port)
sock.use_ssl = true
sock.start { |http| http.request(req) }
end
def prefix
"#{@use_https ? 'https' : 'http'}://#{@domain}"
end
private
# Create a request object by specifying it's endpoint, resource, and
# an optional data_fmt.
#
# `endpoint` must be a valid URI object
# `resource` must be a valid URL designating the resource to be purged
# `data_fmt` must be a valid method or Proc object
#
# Specify a `data_fmt` method when a resource (URL) requires
# additional formatting before being included in the request body.
# By default, the data format is specified by the `json_data_from`
# method.
def create_request(endpoint, resource, data_fmt = :json_data_from)
req = Net::HTTP::Post.new(endpoint.path)
req["Content-Type"] = "application/vnd.api+json"
req["Authorization"] = "Bearer #{@api_key}"
req["User-Agent"] = "imgix #{@library}-#{@version}"
case
when data_fmt.is_a?(Proc)
req.body = data_fmt.call(resource)
when data_fmt.is_a?(Symbol)
req.body = send(data_fmt, resource)
else
fmt_arg_error = "`fmt' is required to be of class Symbol or " \
"Proc but was found to be\n\s\sof class #{data_fmt.class}\n"
raise ArgumentError, fmt_arg_error
end
req
end
def json_data_from(url)
{
data: {
attributes: {
url: url
},
type: "purges"
}
}.to_json
end
def validate_domain!
domain_error = "The :domain option must be specified"
raise ArgumentError, domain_error if @domain.nil?
domain_error = "Domains must be passed in as fully-qualified"\
"domain names and should not include a protocol"\
'or any path element, i.e. "example.imgix.net"'\
raise ArgumentError, domain_error if @domain.match(DOMAIN_REGEX).nil?
end
end
end
refactor: swap case-when for if-elsif
# frozen_string_literal: true
require "digest"
require "addressable/uri"
require "net/http"
require "uri"
require "json"
module Imgix
class Client
DEFAULTS = { use_https: true }.freeze
def initialize(options = {})
options = DEFAULTS.merge(options)
@domain = options[:domain]
validate_domain!
@secure_url_token = options[:secure_url_token]
@api_key = options[:api_key]
@use_https = options[:use_https]
@include_library_param = options.fetch(:include_library_param, true)
@library = options.fetch(:library_param, "rb")
@version = options.fetch(:library_version, Imgix::VERSION)
end
def path(path)
p = Path.new(prefix, @secure_url_token, path)
p.ixlib("#{@library}-#{@version}") if @include_library_param
p
end
def purge(path)
api_key_error = "A valid API key is required to send purge requests"
raise api_key_error if @api_key.nil?
endpoint = URI.parse("https://api.imgix.com/api/v1/purge")
# Ensure the path has been prefixed with '/'.
path = path.start_with?("/") ? path : "/#{path}"
url = prefix + path
req = create_request(endpoint, url, :json_data_from)
sock = Net::HTTP.new(endpoint.host, endpoint.port)
sock.use_ssl = true
sock.start { |http| http.request(req) }
end
def prefix
"#{@use_https ? 'https' : 'http'}://#{@domain}"
end
private
# Create a request object by specifying it's endpoint, resource, and
# an optional data_fmt.
#
# `endpoint` must be a valid URI object
# `resource` must be a valid URL designating the resource to be purged
# `data_fmt` must be a valid method or Proc object
#
# Specify a `data_fmt` method when a resource (URL) requires
# additional formatting before being included in the request body.
# By default, the data format is specified by the `json_data_from`
# method.
def create_request(endpoint, resource, data_fmt = :json_data_from)
req = Net::HTTP::Post.new(endpoint.path)
req["Content-Type"] = "application/vnd.api+json"
req["Authorization"] = "Bearer #{@api_key}"
req["User-Agent"] = "imgix #{@library}-#{@version}"
if data_fmt.is_a?(Proc)
req.body = data_fmt.call(resource)
elsif data_fmt.is_a?(Symbol)
req.body = send(data_fmt, resource)
else
fmt_arg_error = "`fmt' is required to be of class Symbol or " \
"Proc but was found to be\n\s\sof class #{data_fmt.class}\n"
raise ArgumentError, fmt_arg_error
end
req
end
def json_data_from(url)
{
data: {
attributes: {
url: url
},
type: "purges"
}
}.to_json
end
def validate_domain!
domain_error = "The :domain option must be specified"
raise ArgumentError, domain_error if @domain.nil?
domain_error = "Domains must be passed in as fully-qualified"\
"domain names and should not include a protocol"\
'or any path element, i.e. "example.imgix.net"'\
raise ArgumentError, domain_error if @domain.match(DOMAIN_REGEX).nil?
end
end
end
|
require "minitest/autorun"
require 'rails'
require 'typus'
require 'yaml'
require "i18n/backend/flatten"
I18n::Backend::Simple.send(:include, I18n::Backend::Flatten)
I18n.enforce_available_locales = false
class LocalesCompletenessTest < Minitest::Test
REFERENCE_LOCALE = "en"
LOCALES_TO_TEST = %w(de)
def locale_file(locale)
if (locale == REFERENCE_LOCALE)
Admin::Engine.root.join("config/locales/typus.#{locale}.yml")
else
File.join(File.dirname(__FILE__), "../config/locales/typus.#{locale}.yml")
end
end
def translations(locale)
file = locale_file(locale)
data = YAML.load_file(file)[locale]
I18n.backend.flatten_translations(locale, data, false, false)
end
def locale_keys(locale)
translations(locale).keys
end
def all_keys
locale_keys(REFERENCE_LOCALE)
end
LOCALES_TO_TEST.each do |locale|
define_method("test_#{locale}_is_complete") do
difference = all_keys - locale_keys(locale)
msg = %(The locale "#{locale}" is missing translations. Please add translations for the keys listed below)
assert_equal [], difference, msg
end
define_method("test_#{locale}_has_no_obsolete_keys") do
difference = locale_keys(locale) - all_keys
msg = %(The locale "#{locale}" has obsolete translations. Please remove the keys listed below)
assert_equal [], difference, msg
end
end
end
Determine the locales to test dynamically …
i.e. depending on which files are found in config/locales.
require "minitest/autorun"
require 'rails'
require 'typus'
require 'yaml'
require "i18n/backend/flatten"
I18n::Backend::Simple.send(:include, I18n::Backend::Flatten)
I18n.enforce_available_locales = false
class LocalesCompletenessTest < Minitest::Test
REFERENCE_LOCALE = "en"
def self.locales_to_test
files = Dir.glob(File.join(File.dirname(__FILE__), '../config/locales/typus.*.yml'))
files.map { |file| YAML.load_file(file).keys.first.to_s }
end
def locale_file(locale)
if (locale == REFERENCE_LOCALE)
Admin::Engine.root.join("config/locales/typus.#{locale}.yml")
else
File.join(File.dirname(__FILE__), "../config/locales/typus.#{locale}.yml")
end
end
def translations(locale)
file = locale_file(locale)
data = YAML.load_file(file)[locale]
I18n.backend.flatten_translations(locale, data, false, false)
end
def locale_keys(locale)
translations(locale).keys
end
def all_keys
locale_keys(REFERENCE_LOCALE)
end
locales_to_test.each do |locale|
define_method("test_#{locale}_is_complete") do
difference = all_keys - locale_keys(locale)
msg = %(The locale "#{locale}" is missing translations. Please add translations for the keys listed below)
assert_equal [], difference, msg
end
define_method("test_#{locale}_has_no_obsolete_keys") do
difference = locale_keys(locale) - all_keys
msg = %(The locale "#{locale}" has obsolete translations. Please remove the keys listed below)
assert_equal [], difference, msg
end
end
end
|
module IRuby
class Kernel
RED = "\e[31m"
RESET = "\e[0m"
class<< self
attr_accessor :instance
end
attr_reader :session
def initialize(config_file)
@config = MultiJson.load(File.read(config_file))
IRuby.logger.debug("IRuby kernel start with config #{@config}")
Kernel.instance = self
@session = Session.new(@config)
$stdout = OStream.new(@session, :stdout)
$stderr = OStream.new(@session, :stderr)
init_parent_process_poller
@execution_count = 0
@backend = create_backend
@running = true
end
def create_backend
PryBackend.new
rescue Exception => e
IRuby.logger.warn "Could not load PryBackend: #{e.message}\n#{e.backtrace.join("\n")}" unless LoadError === e
PlainBackend.new
end
def run
send_status :starting
while @running
dispatch
end
end
def dispatch
msg = @session.recv(:reply)
IRuby.logger.debug "Kernel#dispatch: msg = #{msg}"
type = msg[:header]['msg_type']
raise "Unknown message type: #{msg.inspect}" unless type =~ /comm_|_request\Z/ && respond_to?(type)
begin
send_status :busy
send(type, msg)
ensure
send_status :idle
end
rescue Exception => e
IRuby.logger.debug "Kernel error: #{e.message}\n#{e.backtrace.join("\n")}"
@session.send(:publish, :error, error_content(e))
end
def kernel_info_request(msg)
@session.send(:reply, :kernel_info_reply,
protocol_version: '5.0',
implementation: 'iruby',
implementation_version: IRuby::VERSION,
language_info: {
name: 'ruby',
version: RUBY_VERSION,
mimetype: 'application/x-ruby',
file_extension: '.rb'
},
banner: "IRuby #{IRuby::VERSION} (with #{@session.description})",
help_links: [
{
text: "Ruby Documentation",
url: "https://ruby-doc.org/"
}
],
status: :ok)
end
def send_status(status)
IRuby.logger.debug "Send status: #{status}"
@session.send(:publish, :status, execution_state: status)
end
def execute_request(msg)
code = msg[:content]['code']
@execution_count += 1 if msg[:content]['store_history']
@session.send(:publish, :execute_input, code: code, execution_count: @execution_count)
content = {
status: :ok,
payload: [],
user_expressions: {},
execution_count: @execution_count
}
result = nil
begin
result = @backend.eval(code, msg[:content]['store_history'])
rescue SystemExit
content[:payload] << { source: :ask_exit }
rescue Exception => e
content = error_content(e)
@session.send(:publish, :error, content)
content[:status] = :error
content[:execution_count] = @execution_count
end
@session.send(:reply, :execute_reply, content)
@session.send(:publish, :execute_result,
data: Display.display(result),
metadata: {},
execution_count: @execution_count) unless result.nil? || msg[:content]['silent']
end
def error_content(e)
rindex = e.backtrace.rindex{|line| line.start_with?(@backend.eval_path)} || -1
backtrace = e.backtrace[0..rindex]
{ ename: e.class.to_s,
evalue: e.message,
traceback: ["#{RED}#{e.class}#{RESET}: #{e.message}", *backtrace] }
end
def is_complete_request(msg)
# FIXME: the code completeness should be judged by using ripper or other Ruby parser
@session.send(:reply, :is_complete_reply,
status: :unknown)
end
def complete_request(msg)
# HACK for #26, only complete last line
code = msg[:content]['code']
if start = code.rindex(/\s|\R/)
code = code[start+1..-1]
start += 1
end
@session.send(:reply, :complete_reply,
matches: @backend.complete(code),
cursor_start: start.to_i,
cursor_end: msg[:content]['cursor_pos'],
metadata: {},
status: :ok)
end
def connect_request(msg)
@session.send(:reply, :connect_reply, Hash[%w(shell_port iopub_port stdin_port hb_port).map {|k| [k, @config[k]] }])
end
def shutdown_request(msg)
@session.send(:reply, :shutdown_reply, msg[:content])
@running = false
end
def history_request(msg)
# we will just send back empty history for now, pending clarification
# as requested in ipython/ipython#3806
@session.send(:reply, :history_reply, history: [])
end
def inspect_request(msg)
# not yet implemented. See (#119).
@session.send(:reply, :inspect_reply, status: :ok, found: false, data: {}, metadata: {})
end
def comm_open(msg)
comm_id = msg[:content]['comm_id']
target_name = msg[:content]['target_name']
Comm.comm[comm_id] = Comm.target[target_name].new(target_name, comm_id)
end
def comm_msg(msg)
Comm.comm[msg[:content]['comm_id']].handle_msg(msg[:content]['data'])
end
def comm_close(msg)
comm_id = msg[:content]['comm_id']
Comm.comm[comm_id].handle_close(msg[:content]['data'])
Comm.comm.delete(comm_id)
end
private
def init_parent_process_poller
pid = ENV.fetch('JPY_PARENT_PID', 0).to_i
return unless pid > 1
case RUBY_PLATFORM
when /mswin/, /mingw/
# TODO
else
@parent_poller = start_parent_process_pollar_unix
end
end
def start_parent_process_pollar_unix
Thread.start do
IRuby.logger.warn("parent process poller thread started.")
loop do
begin
current_ppid = Process.ppid
if current_ppid == 1
IRuby.logger.warn("parent process appears to exited, shutting down.")
exit!(1)
end
sleep 1
rescue Errno::EINTR
# ignored
end
end
end
end
end
end
feat: improve SyntaxError error message
module IRuby
class Kernel
RED = "\e[31m"
RESET = "\e[0m"
class<< self
attr_accessor :instance
end
attr_reader :session
def initialize(config_file)
@config = MultiJson.load(File.read(config_file))
IRuby.logger.debug("IRuby kernel start with config #{@config}")
Kernel.instance = self
@session = Session.new(@config)
$stdout = OStream.new(@session, :stdout)
$stderr = OStream.new(@session, :stderr)
init_parent_process_poller
@execution_count = 0
@backend = create_backend
@running = true
end
def create_backend
PryBackend.new
rescue Exception => e
IRuby.logger.warn "Could not load PryBackend: #{e.message}\n#{e.backtrace.join("\n")}" unless LoadError === e
PlainBackend.new
end
def run
send_status :starting
while @running
dispatch
end
end
def dispatch
msg = @session.recv(:reply)
IRuby.logger.debug "Kernel#dispatch: msg = #{msg}"
type = msg[:header]['msg_type']
raise "Unknown message type: #{msg.inspect}" unless type =~ /comm_|_request\Z/ && respond_to?(type)
begin
send_status :busy
send(type, msg)
ensure
send_status :idle
end
rescue Exception => e
IRuby.logger.debug "Kernel error: #{e.message}\n#{e.backtrace.join("\n")}"
@session.send(:publish, :error, error_content(e))
end
def kernel_info_request(msg)
@session.send(:reply, :kernel_info_reply,
protocol_version: '5.0',
implementation: 'iruby',
implementation_version: IRuby::VERSION,
language_info: {
name: 'ruby',
version: RUBY_VERSION,
mimetype: 'application/x-ruby',
file_extension: '.rb'
},
banner: "IRuby #{IRuby::VERSION} (with #{@session.description})",
help_links: [
{
text: "Ruby Documentation",
url: "https://ruby-doc.org/"
}
],
status: :ok)
end
def send_status(status)
IRuby.logger.debug "Send status: #{status}"
@session.send(:publish, :status, execution_state: status)
end
def execute_request(msg)
code = msg[:content]['code']
@execution_count += 1 if msg[:content]['store_history']
@session.send(:publish, :execute_input, code: code, execution_count: @execution_count)
content = {
status: :ok,
payload: [],
user_expressions: {},
execution_count: @execution_count
}
result = nil
begin
result = @backend.eval(code, msg[:content]['store_history'])
rescue SystemExit
content[:payload] << { source: :ask_exit }
rescue Exception => e
content = error_content(e)
@session.send(:publish, :error, content)
content[:status] = :error
content[:execution_count] = @execution_count
end
@session.send(:reply, :execute_reply, content)
@session.send(:publish, :execute_result,
data: Display.display(result),
metadata: {},
execution_count: @execution_count) unless result.nil? || msg[:content]['silent']
end
def error_content(e)
rindex = e.backtrace.rindex{|line| line.start_with?(@backend.eval_path)} || -1
backtrace = SyntaxError === e && rindex == -1 ? [] : e.backtrace[0..rindex]
{ ename: e.class.to_s,
evalue: e.message,
traceback: ["#{RED}#{e.class}#{RESET}: #{e.message}", *backtrace] }
end
def is_complete_request(msg)
# FIXME: the code completeness should be judged by using ripper or other Ruby parser
@session.send(:reply, :is_complete_reply,
status: :unknown)
end
def complete_request(msg)
# HACK for #26, only complete last line
code = msg[:content]['code']
if start = code.rindex(/\s|\R/)
code = code[start+1..-1]
start += 1
end
@session.send(:reply, :complete_reply,
matches: @backend.complete(code),
cursor_start: start.to_i,
cursor_end: msg[:content]['cursor_pos'],
metadata: {},
status: :ok)
end
def connect_request(msg)
@session.send(:reply, :connect_reply, Hash[%w(shell_port iopub_port stdin_port hb_port).map {|k| [k, @config[k]] }])
end
def shutdown_request(msg)
@session.send(:reply, :shutdown_reply, msg[:content])
@running = false
end
def history_request(msg)
# we will just send back empty history for now, pending clarification
# as requested in ipython/ipython#3806
@session.send(:reply, :history_reply, history: [])
end
def inspect_request(msg)
# not yet implemented. See (#119).
@session.send(:reply, :inspect_reply, status: :ok, found: false, data: {}, metadata: {})
end
def comm_open(msg)
comm_id = msg[:content]['comm_id']
target_name = msg[:content]['target_name']
Comm.comm[comm_id] = Comm.target[target_name].new(target_name, comm_id)
end
def comm_msg(msg)
Comm.comm[msg[:content]['comm_id']].handle_msg(msg[:content]['data'])
end
def comm_close(msg)
comm_id = msg[:content]['comm_id']
Comm.comm[comm_id].handle_close(msg[:content]['data'])
Comm.comm.delete(comm_id)
end
private
def init_parent_process_poller
pid = ENV.fetch('JPY_PARENT_PID', 0).to_i
return unless pid > 1
case RUBY_PLATFORM
when /mswin/, /mingw/
# TODO
else
@parent_poller = start_parent_process_pollar_unix
end
end
def start_parent_process_pollar_unix
Thread.start do
IRuby.logger.warn("parent process poller thread started.")
loop do
begin
current_ppid = Process.ppid
if current_ppid == 1
IRuby.logger.warn("parent process appears to exited, shutting down.")
exit!(1)
end
sleep 1
rescue Errno::EINTR
# ignored
end
end
end
end
end
end
|
module Iugu
class Invoice < APIResource
include Iugu::APIFetch
include Iugu::APICreate
include Iugu::APISave
include Iugu::APIDelete
def customer
return false unless @attributes["customer_id"]
Customer.fetch @attributes["customer_id"]
end
def cancel
copy Iugu::Factory.create_from_response(self.class.object_type, APIRequest.request("PUT", "#{self.class.url(self.id)}/cancel"))
self.errors = nil
true
rescue Iugu::RequestWithErrors => ex
self.errors = ex.errors
false
end
def refund
copy Iugu::Factory.create_from_response(self.class.object_type, APIRequest.request("POST", "#{self.class.url(self.id)}/refund"))
self.errors = nil
true
rescue Iugu::RequestWithErrors => ex
self.errors = ex.errors
false
end
end
end
Adicionando o duplicate no invoice para gerar a segunda via.
module Iugu
class Invoice < APIResource
include Iugu::APIFetch
include Iugu::APICreate
include Iugu::APISave
include Iugu::APIDelete
def customer
return false unless @attributes["customer_id"]
Customer.fetch @attributes["customer_id"]
end
def cancel
copy Iugu::Factory.create_from_response(self.class.object_type, APIRequest.request("PUT", "#{self.class.url(self.id)}/cancel"))
self.errors = nil
true
rescue Iugu::RequestWithErrors => ex
self.errors = ex.errors
false
end
def refund
copy Iugu::Factory.create_from_response(self.class.object_type, APIRequest.request("POST", "#{self.class.url(self.id)}/refund"))
self.errors = nil
true
rescue Iugu::RequestWithErrors => ex
self.errors = ex.errors
false
end
def duplicate(due_date)
copy Iugu::Factory.create_from_response(self.class.object_type, APIRequest.request("POST", "#{self.class.url(self.id)}/duplicate", { due_date: due_date }))
self.errors = nil
true
rescue Iugu::RequestWithErrors => ex
self.errors = ex.errors
false
end
end
end
|
module Jekyll
module Utils
extend self
autoload :Platforms, 'jekyll/utils/platforms'
autoload :Ansi, "jekyll/utils/ansi"
# Constants for use in #slugify
SLUGIFY_MODES = %w(raw default pretty)
SLUGIFY_RAW_REGEXP = Regexp.new('\\s+').freeze
SLUGIFY_DEFAULT_REGEXP = Regexp.new('[^[:alnum:]]+').freeze
SLUGIFY_PRETTY_REGEXP = Regexp.new("[^[:alnum:]._~!$&'()+,;=@]+").freeze
# Takes a slug and turns it into a simple title.
def titleize_slug(slug)
slug.split("-").map! do |val|
val.capitalize!
end.join(" ")
end
# Non-destructive version of deep_merge_hashes! See that method.
#
# Returns the merged hashes.
def deep_merge_hashes(master_hash, other_hash)
deep_merge_hashes!(master_hash.dup, other_hash)
end
# Merges a master hash with another hash, recursively.
#
# master_hash - the "parent" hash whose values will be overridden
# other_hash - the other hash whose values will be persisted after the merge
#
# This code was lovingly stolen from some random gem:
# http://gemjack.com/gems/tartan-0.1.1/classes/Hash.html
#
# Thanks to whoever made it.
def deep_merge_hashes!(target, overwrite)
target.merge!(overwrite) do |key, old_val, new_val|
if new_val.nil?
old_val
else
mergable?(old_val) && mergable?(new_val) ? deep_merge_hashes(old_val, new_val) : new_val
end
end
if target.respond_to?(:default_proc) && overwrite.respond_to?(:default_proc) && target.default_proc.nil?
target.default_proc = overwrite.default_proc
end
target
end
def mergable?(value)
value.is_a?(Hash) || value.is_a?(Drops::Drop)
end
# Read array from the supplied hash favouring the singular key
# and then the plural key, and handling any nil entries.
#
# hash - the hash to read from
# singular_key - the singular key
# plural_key - the plural key
#
# Returns an array
def pluralized_array_from_hash(hash, singular_key, plural_key)
[].tap do |array|
array << (value_from_singular_key(hash, singular_key) || value_from_plural_key(hash, plural_key))
end.flatten.compact
end
def value_from_singular_key(hash, key)
hash[key] if hash.key?(key) || (hash.default_proc && hash[key])
end
def value_from_plural_key(hash, key)
if hash.key?(key) || (hash.default_proc && hash[key])
val = hash[key]
case val
when String
val.split
when Array
val.compact
end
end
end
def transform_keys(hash)
result = {}
hash.each_key do |key|
result[yield(key)] = hash[key]
end
result
end
# Apply #to_sym to all keys in the hash
#
# hash - the hash to which to apply this transformation
#
# Returns a new hash with symbolized keys
def symbolize_hash_keys(hash)
transform_keys(hash) { |key| key.to_sym rescue key }
end
# Apply #to_s to all keys in the Hash
#
# hash - the hash to which to apply this transformation
#
# Returns a new hash with stringified keys
def stringify_hash_keys(hash)
transform_keys(hash) { |key| key.to_s rescue key }
end
# Parse a date/time and throw an error if invalid
#
# input - the date/time to parse
# msg - (optional) the error message to show the user
#
# Returns the parsed date if successful, throws a FatalException
# if not
def parse_date(input, msg = "Input could not be parsed.")
Time.parse(input).localtime
rescue ArgumentError
raise Errors::FatalException.new("Invalid date '#{input}': " + msg)
end
# Determines whether a given file has
#
# Returns true if the YAML front matter is present.
def has_yaml_header?(file)
!!(File.open(file, 'rb') { |f| f.readline } =~ /\A---\s*\r?\n/)
rescue EOFError
false
end
# Slugify a filename or title.
#
# string - the filename or title to slugify
# mode - how string is slugified
# cased - whether to replace all uppercase letters with their
# lowercase counterparts
#
# When mode is "none", return the given string.
#
# When mode is "raw", return the given string,
# with every sequence of spaces characters replaced with a hyphen.
#
# When mode is "default" or nil, non-alphabetic characters are
# replaced with a hyphen too.
#
# When mode is "pretty", some non-alphabetic characters (._~!$&'()+,;=@)
# are not replaced with hyphen.
#
# If cased is true, all uppercase letters in the result string are
# replaced with their lowercase counterparts.
#
# Examples:
# slugify("The _config.yml file")
# # => "the-config-yml-file"
#
# slugify("The _config.yml file", "pretty")
# # => "the-_config.yml-file"
#
# slugify("The _config.yml file", "pretty", true)
# # => "The-_config.yml file"
#
# Returns the slugified string.
def slugify(string, mode: nil, cased: false)
mode ||= 'default'
return nil if string.nil?
unless SLUGIFY_MODES.include?(mode)
return cased ? string : string.downcase
end
# Replace each character sequence with a hyphen
re =
case mode
when 'raw'
SLUGIFY_RAW_REGEXP
when 'default'
SLUGIFY_DEFAULT_REGEXP
when 'pretty'
# "._~!$&'()+,;=@" is human readable (not URI-escaped) in URL
# and is allowed in both extN and NTFS.
SLUGIFY_PRETTY_REGEXP
end
# Strip according to the mode
slug = string.gsub(re, '-')
# Remove leading/trailing hyphen
slug.gsub!(/^\-|\-$/i, '')
slug.downcase! unless cased
slug
end
# Add an appropriate suffix to template so that it matches the specified
# permalink style.
#
# template - permalink template without trailing slash or file extension
# permalink_style - permalink style, either built-in or custom
#
# The returned permalink template will use the same ending style as
# specified in permalink_style. For example, if permalink_style contains a
# trailing slash (or is :pretty, which indirectly has a trailing slash),
# then so will the returned template. If permalink_style has a trailing
# ":output_ext" (or is :none, :date, or :ordinal) then so will the returned
# template. Otherwise, template will be returned without modification.
#
# Examples:
# add_permalink_suffix("/:basename", :pretty)
# # => "/:basename/"
#
# add_permalink_suffix("/:basename", :date)
# # => "/:basename:output_ext"
#
# add_permalink_suffix("/:basename", "/:year/:month/:title/")
# # => "/:basename/"
#
# add_permalink_suffix("/:basename", "/:year/:month/:title")
# # => "/:basename"
#
# Returns the updated permalink template
def add_permalink_suffix(template, permalink_style)
case permalink_style
when :pretty
template << "/"
when :date, :ordinal, :none
template << ":output_ext"
else
template << "/" if permalink_style.to_s.end_with?("/")
template << ":output_ext" if permalink_style.to_s.end_with?(":output_ext")
end
template
end
# Work the same way as Dir.glob but seperating the input into two parts
# ('dir' + '/' + 'pattern') to make sure the first part('dir') does not act
# as a pattern.
#
# For example, Dir.glob("path[/*") always returns an empty array,
# because the method fails to find the closing pattern to '[' which is ']'
#
# Examples:
# safe_glob("path[", "*")
# # => ["path[/file1", "path[/file2"]
#
# safe_glob("path", "*", File::FNM_DOTMATCH)
# # => ["path/.", "path/..", "path/file1"]
#
# safe_glob("path", ["**", "*"])
# # => ["path[/file1", "path[/folder/file2"]
#
# dir - the dir where glob will be executed under
# (the dir will be included to each result)
# patterns - the patterns (or the pattern) which will be applied under the dir
# flags - the flags which will be applied to the pattern
#
# Returns matched pathes
def safe_glob(dir, patterns, flags = 0)
return [] unless Dir.exist?(dir)
pattern = File.join(Array patterns)
return [dir] if pattern.empty?
Dir.chdir(dir) do
Dir.glob(pattern, flags).map { |f| File.join(dir, f) }
end
end
end
end
add Utils.strip_heredoc
module Jekyll
module Utils
extend self
autoload :Platforms, 'jekyll/utils/platforms'
autoload :Ansi, "jekyll/utils/ansi"
# Constants for use in #slugify
SLUGIFY_MODES = %w(raw default pretty)
SLUGIFY_RAW_REGEXP = Regexp.new('\\s+').freeze
SLUGIFY_DEFAULT_REGEXP = Regexp.new('[^[:alnum:]]+').freeze
SLUGIFY_PRETTY_REGEXP = Regexp.new("[^[:alnum:]._~!$&'()+,;=@]+").freeze
# Takes an indented string and removes the preceding spaces on each line
def strip_heredoc(str)
str.gsub(/^[ \t]{#{(str.scan(/^[ \t]*(?=\S)/).min || "").size}}/, "")
end
# Takes a slug and turns it into a simple title.
def titleize_slug(slug)
slug.split("-").map! do |val|
val.capitalize!
end.join(" ")
end
# Non-destructive version of deep_merge_hashes! See that method.
#
# Returns the merged hashes.
def deep_merge_hashes(master_hash, other_hash)
deep_merge_hashes!(master_hash.dup, other_hash)
end
# Merges a master hash with another hash, recursively.
#
# master_hash - the "parent" hash whose values will be overridden
# other_hash - the other hash whose values will be persisted after the merge
#
# This code was lovingly stolen from some random gem:
# http://gemjack.com/gems/tartan-0.1.1/classes/Hash.html
#
# Thanks to whoever made it.
def deep_merge_hashes!(target, overwrite)
target.merge!(overwrite) do |key, old_val, new_val|
if new_val.nil?
old_val
else
mergable?(old_val) && mergable?(new_val) ? deep_merge_hashes(old_val, new_val) : new_val
end
end
if target.respond_to?(:default_proc) && overwrite.respond_to?(:default_proc) && target.default_proc.nil?
target.default_proc = overwrite.default_proc
end
target
end
def mergable?(value)
value.is_a?(Hash) || value.is_a?(Drops::Drop)
end
# Read array from the supplied hash favouring the singular key
# and then the plural key, and handling any nil entries.
#
# hash - the hash to read from
# singular_key - the singular key
# plural_key - the plural key
#
# Returns an array
def pluralized_array_from_hash(hash, singular_key, plural_key)
[].tap do |array|
array << (value_from_singular_key(hash, singular_key) || value_from_plural_key(hash, plural_key))
end.flatten.compact
end
def value_from_singular_key(hash, key)
hash[key] if hash.key?(key) || (hash.default_proc && hash[key])
end
def value_from_plural_key(hash, key)
if hash.key?(key) || (hash.default_proc && hash[key])
val = hash[key]
case val
when String
val.split
when Array
val.compact
end
end
end
def transform_keys(hash)
result = {}
hash.each_key do |key|
result[yield(key)] = hash[key]
end
result
end
# Apply #to_sym to all keys in the hash
#
# hash - the hash to which to apply this transformation
#
# Returns a new hash with symbolized keys
def symbolize_hash_keys(hash)
transform_keys(hash) { |key| key.to_sym rescue key }
end
# Apply #to_s to all keys in the Hash
#
# hash - the hash to which to apply this transformation
#
# Returns a new hash with stringified keys
def stringify_hash_keys(hash)
transform_keys(hash) { |key| key.to_s rescue key }
end
# Parse a date/time and throw an error if invalid
#
# input - the date/time to parse
# msg - (optional) the error message to show the user
#
# Returns the parsed date if successful, throws a FatalException
# if not
def parse_date(input, msg = "Input could not be parsed.")
Time.parse(input).localtime
rescue ArgumentError
raise Errors::FatalException.new("Invalid date '#{input}': " + msg)
end
# Determines whether a given file has
#
# Returns true if the YAML front matter is present.
def has_yaml_header?(file)
!!(File.open(file, 'rb') { |f| f.readline } =~ /\A---\s*\r?\n/)
rescue EOFError
false
end
# Slugify a filename or title.
#
# string - the filename or title to slugify
# mode - how string is slugified
# cased - whether to replace all uppercase letters with their
# lowercase counterparts
#
# When mode is "none", return the given string.
#
# When mode is "raw", return the given string,
# with every sequence of spaces characters replaced with a hyphen.
#
# When mode is "default" or nil, non-alphabetic characters are
# replaced with a hyphen too.
#
# When mode is "pretty", some non-alphabetic characters (._~!$&'()+,;=@)
# are not replaced with hyphen.
#
# If cased is true, all uppercase letters in the result string are
# replaced with their lowercase counterparts.
#
# Examples:
# slugify("The _config.yml file")
# # => "the-config-yml-file"
#
# slugify("The _config.yml file", "pretty")
# # => "the-_config.yml-file"
#
# slugify("The _config.yml file", "pretty", true)
# # => "The-_config.yml file"
#
# Returns the slugified string.
def slugify(string, mode: nil, cased: false)
mode ||= 'default'
return nil if string.nil?
unless SLUGIFY_MODES.include?(mode)
return cased ? string : string.downcase
end
# Replace each character sequence with a hyphen
re =
case mode
when 'raw'
SLUGIFY_RAW_REGEXP
when 'default'
SLUGIFY_DEFAULT_REGEXP
when 'pretty'
# "._~!$&'()+,;=@" is human readable (not URI-escaped) in URL
# and is allowed in both extN and NTFS.
SLUGIFY_PRETTY_REGEXP
end
# Strip according to the mode
slug = string.gsub(re, '-')
# Remove leading/trailing hyphen
slug.gsub!(/^\-|\-$/i, '')
slug.downcase! unless cased
slug
end
# Add an appropriate suffix to template so that it matches the specified
# permalink style.
#
# template - permalink template without trailing slash or file extension
# permalink_style - permalink style, either built-in or custom
#
# The returned permalink template will use the same ending style as
# specified in permalink_style. For example, if permalink_style contains a
# trailing slash (or is :pretty, which indirectly has a trailing slash),
# then so will the returned template. If permalink_style has a trailing
# ":output_ext" (or is :none, :date, or :ordinal) then so will the returned
# template. Otherwise, template will be returned without modification.
#
# Examples:
# add_permalink_suffix("/:basename", :pretty)
# # => "/:basename/"
#
# add_permalink_suffix("/:basename", :date)
# # => "/:basename:output_ext"
#
# add_permalink_suffix("/:basename", "/:year/:month/:title/")
# # => "/:basename/"
#
# add_permalink_suffix("/:basename", "/:year/:month/:title")
# # => "/:basename"
#
# Returns the updated permalink template
def add_permalink_suffix(template, permalink_style)
case permalink_style
when :pretty
template << "/"
when :date, :ordinal, :none
template << ":output_ext"
else
template << "/" if permalink_style.to_s.end_with?("/")
template << ":output_ext" if permalink_style.to_s.end_with?(":output_ext")
end
template
end
# Work the same way as Dir.glob but seperating the input into two parts
# ('dir' + '/' + 'pattern') to make sure the first part('dir') does not act
# as a pattern.
#
# For example, Dir.glob("path[/*") always returns an empty array,
# because the method fails to find the closing pattern to '[' which is ']'
#
# Examples:
# safe_glob("path[", "*")
# # => ["path[/file1", "path[/file2"]
#
# safe_glob("path", "*", File::FNM_DOTMATCH)
# # => ["path/.", "path/..", "path/file1"]
#
# safe_glob("path", ["**", "*"])
# # => ["path[/file1", "path[/folder/file2"]
#
# dir - the dir where glob will be executed under
# (the dir will be included to each result)
# patterns - the patterns (or the pattern) which will be applied under the dir
# flags - the flags which will be applied to the pattern
#
# Returns matched pathes
def safe_glob(dir, patterns, flags = 0)
return [] unless Dir.exist?(dir)
pattern = File.join(Array patterns)
return [dir] if pattern.empty?
Dir.chdir(dir) do
Dir.glob(pattern, flags).map { |f| File.join(dir, f) }
end
end
end
end
|
require 'open-uri'
require 'nokogiri'
module Johnny5
class Base
LIST_OF_BAD_WORDS = %w{trend gallery bar submit ads-inarticle sponsor shopping widget tool promo shoutbox masthead foot footnote combx com- menu side comments comment bookmarks social links ads related similar footer digg totop metadata sitesub nav sidebar commenting options addcomment leaderboard offscreen job prevlink prevnext navigation reply-link hide hidden sidebox archives vcard tab hyperpuff chicklets trackable advertisement email partners crumbs header share discussion popup _ad ad_ ad- -ad extra community disqus about signup feedback byline trackback login blogroll rss}.uniq
TAGS_TO_EXAMINE = %w{div p table tr td pre tbody section ul ol li}
TAGS_TO_REMOVE = %w{header footer nav script noscript form input}
def initialize(url)
@url = url
end
def read
if @url.to_s.match(/(jpg|png|gif|jpeg|bmp|tiff)$/) # this is an image
# return an image tag
elsif @url.to_s.match(/(avi|mov|mpeg|flv)$/) # this is a movie
# this is a movie, put it in an iframe or something...
else
# look for content
html = Nokogiri::HTML(open(@url))
trim_unwanted_tags(html)
return html.to_s
end
end
def trim_unwanted_tags(nokogiri_object)
TAGS_TO_EXAMINE.each do |tag|
LIST_OF_BAD_WORDS.each do |bad_word|
nokogiri_object.xpath("//#{tag}[contains(@class,\"#{bad_word}\")]").remove if !bad_word.blank? # we have to find a way to make this case insensetive
nokogiri_object.xpath("//#{tag}[contains(@id,\"#{bad_word}\")]").remove if !bad_word.blank?
end
nokogiri_object.xpath("//#{tag}[contains(@style,\"hidden\")]").remove
end
TAGS_TO_REMOVE.each do |tag|
nokogiri_object.css("#{tag}").remove
end
# removes css/js that have local paths
nokogiri_object.css('link').each do |header_object|
header_object.remove if !header_object.attributes['href'].value.match(/http/)
end
# removes images that have local paths, tried including imgur (uses data-src)
html.css('img').each do |image|
image.remove unless image.attributes['src'].value.match(/http/) || image.attributes['data-src'].value.match(/http/)
end
return nokogiri_object
end
def get_main_content(nokogiri_object)
# here we want to look at the biggest div and take its largest container
# somewhere here we also want to remove any borders...
end
def get_title(nokogiri_object)
end
def get_author(nokogiri_object)
end
def get_tags(nokogiri_object)
end
end
end
whoops, fixed nokogiri_object for html
require 'open-uri'
require 'nokogiri'
module Johnny5
class Base
LIST_OF_BAD_WORDS = %w{trend gallery bar submit ads-inarticle sponsor shopping widget tool promo shoutbox masthead foot footnote combx com- menu side comments comment bookmarks social links ads related similar footer digg totop metadata sitesub nav sidebar commenting options addcomment leaderboard offscreen job prevlink prevnext navigation reply-link hide hidden sidebox archives vcard tab hyperpuff chicklets trackable advertisement email partners crumbs header share discussion popup _ad ad_ ad- -ad extra community disqus about signup feedback byline trackback login blogroll rss}.uniq
TAGS_TO_EXAMINE = %w{div p table tr td pre tbody section ul ol li}
TAGS_TO_REMOVE = %w{header footer nav script noscript form input}
def initialize(url)
@url = url
end
def read
if @url.to_s.match(/(jpg|png|gif|jpeg|bmp|tiff)$/) # this is an image
# return an image tag
elsif @url.to_s.match(/(avi|mov|mpeg|flv)$/) # this is a movie
# this is a movie, put it in an iframe or something...
else
# look for content
html = Nokogiri::HTML(open(@url))
trim_unwanted_tags(html)
return html.to_s
end
end
def trim_unwanted_tags(nokogiri_object)
TAGS_TO_EXAMINE.each do |tag|
LIST_OF_BAD_WORDS.each do |bad_word|
nokogiri_object.xpath("//#{tag}[contains(@class,\"#{bad_word}\")]").remove if !bad_word.blank? # we have to find a way to make this case insensetive
nokogiri_object.xpath("//#{tag}[contains(@id,\"#{bad_word}\")]").remove if !bad_word.blank?
end
nokogiri_object.xpath("//#{tag}[contains(@style,\"hidden\")]").remove
end
TAGS_TO_REMOVE.each do |tag|
nokogiri_object.css("#{tag}").remove
end
# removes css/js that have local paths
nokogiri_object.css('link').each do |header_object|
header_object.remove if !header_object.attributes['href'].value.match(/http/)
end
# removes images that have local paths, tried including imgur (uses data-src)
nokogiri_object.css('img').each do |image|
image.remove unless image.attributes['src'].value.match(/http/) || image.attributes['data-src'].value.match(/http/)
end
return nokogiri_object
end
def get_main_content(nokogiri_object)
# here we want to look at the biggest div and take its largest container
# somewhere here we also want to remove any borders...
end
def get_title(nokogiri_object)
end
def get_author(nokogiri_object)
end
def get_tags(nokogiri_object)
end
end
end |
require_relative '../test_helper'
class AdminAbilityTest < ActiveSupport::TestCase
def setup
WebMock.stub_request(:post, /#{Regexp.escape(CONFIG['bridge_reader_url_private'])}.*/)
@t = create_team
Team.stubs(:current).returns(@t)
@u = create_user
@tu = create_team_user user: u , team: t, role: 'owner'
end
def teardown
super
Team.unstub(:current)
end
attr_reader :u, :t, :tu
test "owner permissions for project" do
p = create_project team: t
own_project = create_project team: t, user: u
p2 = create_project
with_current_user_and_team(u) do
ability = AdminAbility.new
assert ability.cannot?(:create, Project)
assert ability.can?(:index, p)
assert ability.can?(:destroy, p)
assert ability.can?(:destroy, own_project)
assert ability.cannot?(:read, p)
assert ability.cannot?(:update, p)
assert ability.cannot?(:update, own_project)
assert ability.cannot?(:read, p2)
assert ability.cannot?(:update, p2)
assert ability.cannot?(:destroy, p2)
end
end
test "owner permissions for media" do
m = create_valid_media
p = create_project team: t
pm = create_project_media project: p, media: m
own_media = create_valid_media user_id: u.id
own_pm = create_project_media project: p, media: own_media
m2 = create_valid_media
pm2 = create_project_media media: m2
with_current_user_and_team(u) do
ability = AdminAbility.new
assert ability.cannot?(:create, Media)
assert ability.cannot?(:update, m)
assert ability.cannot?(:update, own_media)
assert ability.cannot?(:destroy, m)
assert ability.cannot?(:destroy, own_media)
assert ability.cannot?(:update, m2)
assert ability.cannot?(:destroy, m2)
end
end
test "owner permissions for project media" do
m = create_valid_media
p = create_project team: t
pm = create_project_media project: p, media: m
own_media = create_valid_media user_id: u.id
own_pm = create_project_media project: p, media: own_media
m2 = create_valid_media
pm2 = create_project_media media: m2
with_current_user_and_team(u) do
ability = AdminAbility.new
assert ability.can?(:destroy, pm)
assert ability.can?(:destroy, own_pm)
assert ability.cannot?(:update, pm)
assert ability.cannot?(:update, own_pm)
assert ability.cannot?(:update, pm2)
assert ability.cannot?(:destroy, pm2)
end
end
test "owner permissions for team" do
t2 = create_team
tu_test = create_team_user team: t2, role: 'owner'
with_current_user_and_team(u) do
ability = AdminAbility.new
assert ability.can?(:read, t)
assert ability.can?(:update, t)
assert ability.cannot?(:create, Team)
assert ability.cannot?(:destroy, t)
assert ability.cannot?(:update, t2)
assert ability.cannot?(:destroy, t2)
end
end
test "owner permissions for teamUser" do
u2 = create_user
tu2 = create_team_user team: t, role: 'editor'
tu_other = create_team_user
with_current_user_and_team(u) do
ability = AdminAbility.new
assert ability.cannot?(:create, TeamUser)
assert ability.cannot?(:update, tu2)
assert ability.cannot?(:destroy, tu2)
assert ability.cannot?(:update, tu_other)
assert ability.cannot?(:destroy, tu_other)
end
end
test "owner permissions for contact" do
c = create_contact team: t
c1 = create_contact
with_current_user_and_team(u) do
ability = AdminAbility.new
assert ability.cannot?(:create, Contact)
assert ability.cannot?(:read, c)
assert ability.cannot?(:update, c)
assert ability.cannot?(:destroy, c)
assert ability.cannot?(:update, c1)
assert ability.cannot?(:destroy, c1)
end
end
test "owner permissions for user" do
u2_test = create_user
tu2_test = create_team_user user: u2_test , role: 'contributor'
u_test1 = create_user
tu_test1 = create_team_user team: t, user: u_test1, role: 'editor'
with_current_user_and_team(u) do
ability = AdminAbility.new
assert ability.cannot?(:update, u)
assert ability.cannot?(:destroy, u)
assert ability.cannot?(:update, u_test1)
assert ability.cannot?(:destroy, u_test1)
tu_test1.update_column(:role, 'journalist')
assert ability.cannot?(:update, u_test1)
assert ability.cannot?(:destroy, u_test1)
tu_test1.update_column(:role, 'contributor')
assert ability.cannot?(:update, u_test1)
assert ability.cannot?(:destroy, u_test1)
assert ability.cannot?(:update, u2_test)
assert ability.cannot?(:destroy, u2_test)
end
end
test "owner cannot see users not member of his teams" do
u_member = create_user
tu_member = create_team_user team: t, user: u_member, role: 'contributor', status: 'member'
u_requested = create_user
tu_requested = create_team_user team: t, user: u_requested, role: 'contributor', status: 'requested'
u_invited = create_user
tu_invited = create_team_user team: t, user: u_invited, role: 'contributor', status: 'invited'
u_banned = create_user
tu_banned = create_team_user team: t, user: u_banned, role: 'contributor', status: 'banned'
u_other_team = create_user
tu_other_team = create_team_user user: u_other_team, role: 'contributor', status: 'member'
with_current_user_and_team(u) do
ability = AdminAbility.new
assert ability.cannot?(:read, u_member)
assert ability.cannot?(:update, u_member)
assert ability.cannot?(:destroy, u_member)
assert ability.cannot?(:read, u_requested)
assert ability.cannot?(:update, u_requested)
assert ability.cannot?(:destroy, u_requested)
assert ability.cannot?(:read, u_invited)
assert ability.cannot?(:update, u_invited)
assert ability.cannot?(:destroy, u_invited)
assert ability.cannot?(:read, u_banned)
assert ability.cannot?(:update, u_banned)
assert ability.cannot?(:destroy, u_banned)
assert ability.cannot?(:read, u_other_team)
assert ability.cannot?(:update, u_other_team)
assert ability.cannot?(:destroy, u_other_team)
end
end
test "owner permissions for comment" do
p = create_project team: t
pm = create_project_media project: p
mc = create_comment
pm.add_annotation mc
with_current_user_and_team(u) do
ability = AdminAbility.new
assert ability.cannot?(:create, Comment)
assert ability.cannot?(:update, mc)
assert ability.can?(:destroy, mc)
end
end
test "owner of other team permissions for comment" do
p = create_project team: t
pm = create_project_media project: p
mc = create_comment
pm.add_annotation mc
other_user = create_user
create_team_user user: other_user, team: create_team, role: 'owner'
with_current_user_and_team(other_user) do
ability = AdminAbility.new
assert ability.cannot?(:create, Comment)
assert ability.cannot?(:update, mc)
assert ability.cannot?(:destroy, mc)
end
end
test "check annotation permissions" do
# test the create/update/destroy operations
tu.role = 'journalist'
tu.save
p = create_project team: t
pm = create_project_media project: p
c = create_comment annotated: pm
with_current_user_and_team(u) do
ability = AdminAbility.new
assert ability.cannot?(:update, c)
assert ability.cannot?(:destroy, c)
end
tu.role = 'owner'; tu.save!
Rails.cache.clear
c.text = 'for testing';c.save!
assert_equal c.text, 'for testing'
with_current_user_and_team(u) do
ability = AdminAbility.new
assert ability.cannot?(:update, c)
assert ability.can?(:destroy, c)
end
end
test "owner permissions for flag" do
p = create_project team: t
m = create_valid_media
pm = create_project_media project: p, media: m
f = create_flag flag: 'Mark as graphic', annotator: u, annotated: pm
with_current_user_and_team(u) do
ability = AdminAbility.new
assert ability.cannot?(:create, f)
f.flag = 'Graphic content'
assert ability.cannot?(:create, f)
p.update_column(:team_id, nil)
assert ability.cannot?(:create, f)
end
end
test "owner of other team permissions for flag" do
p = create_project team: t
pm = create_project_media project: p
f = create_flag flag: 'Mark as graphic', annotator: u, annotated: pm
other_user = create_user
create_team_user user: other_user, team: create_team, role: 'owner'
with_current_user_and_team(other_user) do
ability = AdminAbility.new
assert ability.cannot?(:create, Flag)
assert ability.cannot?(:update, f)
assert ability.cannot?(:destroy, f)
end
end
test "owner permissions for status" do
p = create_project team: t
m = create_valid_media
pm = create_project_media project: p, media: m
s = create_status status: 'verified', annotated: pm
with_current_user_and_team(u) do
ability = AdminAbility.new
assert ability.cannot?(:create, s)
assert ability.can?(:update, s)
assert ability.can?(:destroy, s)
p.update_column(:team_id, nil)
assert ability.cannot?(:create, s)
assert ability.cannot?(:destroy, s)
end
end
test "owner permissions for embed" do
p = create_project team: t
pm = create_project_media project: p
em = create_embed annotated: pm
link = create_valid_media({ type: 'link', team: t })
em_link = create_embed annotated: link
account = create_valid_account team: t
em_account = create_embed annotated: account
with_current_user_and_team(u) do
ability = AdminAbility.new
assert ability.cannot?(:create, em)
assert ability.cannot?(:read, em)
assert ability.can?(:update, em)
assert ability.cannot?(:destroy, em)
p.update_column(:team_id, nil)
assert ability.cannot?(:destroy, em)
assert ability.cannot?(:read, em_link)
assert ability.cannot?(:update, em_link)
assert ability.cannot?(:destroy, em_link)
assert ability.can?(:update, em_account)
assert ability.cannot?(:read, em_account)
assert ability.cannot?(:destroy, em_account)
end
end
test "owner permissions for tag" do
p = create_project team: t
pm = create_project_media project: p
tg = create_tag tag: 'media_tag', annotated: pm
with_current_user_and_team(u) do
ability = AdminAbility.new
assert ability.cannot?(:create, tg)
assert ability.cannot?(:update, tg)
assert ability.can?(:destroy, tg)
p.update_column(:team_id, nil)
assert ability.cannot?(:create, tg)
assert ability.cannot?(:destroy, tg)
end
end
test "owner of other team permissions for tag" do
p = create_project team: t
pm = create_project_media project: p
tg = create_tag tag: 'media_tag', annotated: pm
other_user = create_user
create_team_user user: other_user, team: create_team, role: 'owner'
with_current_user_and_team(other_user) do
ability = AdminAbility.new
assert ability.cannot?(:create, tg)
assert ability.cannot?(:update, tg)
assert ability.cannot?(:destroy, tg)
end
end
test "only admin users can manage all" do
u = create_user
u.is_admin = true
u.save
ability = AdminAbility.new(u)
assert ability.can?(:manage, :all)
end
test "admins can do anything" do
u = create_user
u.is_admin = true
u.save
t = create_team
tu = create_team_user user: u , team: t
p = create_project team: t
own_project = create_project team: t, user: u
p2 = create_project
with_current_user_and_team(u) do
ability = AdminAbility.new
assert ability.can?(:create, Project)
assert ability.can?(:update, p)
assert ability.can?(:update, own_project)
assert ability.can?(:destroy, p)
assert ability.can?(:destroy, own_project)
assert ability.can?(:update, p2)
assert ability.can?(:destroy, p2)
end
end
test "should not read source without user" do
s = create_source user: nil
with_current_user_and_team(u) do
ability = AdminAbility.new
assert ability.cannot?(:read, s)
end
end
test "should not read own source" do
s = create_source user: u
with_current_user_and_team(u) do
ability = AdminAbility.new
assert ability.cannot?(:read, s)
end
end
test "should not read source from other team user" do
other_user = create_user
tu_other = create_team_user user: other_user , team: create_team, role: 'owner'
s = create_source user: other_user
with_current_user_and_team(u) do
ability = AdminAbility.new
assert ability.cannot?(:read, s)
assert ability.cannot?(:update, s)
assert ability.cannot?(:destroy, s)
end
end
test "should not read source from team user" do
same_team_user = create_user
tu_other = create_team_user user: same_team_user, team: t, role: 'contributor'
s = create_source user: same_team_user
with_current_user_and_team(u) do
ability = AdminAbility.new
assert ability.cannot?(:read, s)
assert ability.cannot?(:update, s)
assert ability.cannot?(:destroy, s)
end
end
test "should only destroy annotation from user teams" do
p1 = create_project team: t
p2 = create_project team: t
pm1 = create_project_media project: p1
pm2 = create_project_media project: p2
a_from_team = create_annotation annotated: pm1
a2_from_team = create_annotation annotated: pm2
a_from_other_team = create_annotation annotated: create_project_media
with_current_user_and_team(u) do
a = AdminAbility.new
assert a.can?(:destroy, a_from_team)
assert a.can?(:destroy, a2_from_team)
assert a.cannot?(:destroy, a_from_other_team)
end
end
test "should not destroy annotation versions" do
p = create_project team: t
pm = create_project_media project: p
with_current_user_and_team(u) do
s = create_status annotated: pm, status: 'verified'
em = create_embed annotated: pm
s_v = s.versions.last
em_v = em.versions.last
ability = AdminAbility.new
# Status versions
assert ability.can?(:create, s_v)
assert ability.cannot?(:read, s_v)
assert ability.cannot?(:update, s_v)
assert ability.cannot?(:destroy, s_v)
# Embed versions
assert ability.can?(:create, em_v)
assert ability.cannot?(:read, em_v)
assert ability.cannot?(:update, em_v)
assert ability.cannot?(:destroy, em_v)
end
end
test "should access rails_admin if user is team owner" do
p = create_project team: t
with_current_user_and_team(u) do
ability = AdminAbility.new
assert ability.can?(:access, :rails_admin)
end
end
test "should not access rails_admin if user not team owner or admin" do
tu.role = 'contributor'
tu.save
p = create_project team: t
%w(contributor journalist editor).each do |role|
tu.role = role; tu.save!
with_current_user_and_team(u) do
ability = AdminAbility.new
assert !ability.can?(:access, :rails_admin)
end
end
end
test "owner permissions for task" do
p = create_project team: t
m = create_valid_media
pm = create_project_media project: p, media: m
tk = create_task annotator: u, annotated: pm
with_current_user_and_team(u) do
ability = AdminAbility.new
assert ability.cannot?(:create, tk)
p.update_column(:team_id, nil)
assert ability.cannot?(:create, tk)
end
end
test "owner permissions for dynamic annotation" do
p = create_project team: t
pm = create_project_media project: p
da = create_dynamic_annotation annotated: pm
own_da = create_dynamic_annotation annotated: pm, annotator: u
with_current_user_and_team(u) do
ability = AdminAbility.new
assert ability.cannot?(:create, Dynamic)
assert ability.cannot?(:update, da)
assert ability.can?(:destroy, da)
assert ability.cannot?(:update, own_da)
assert ability.can?(:destroy, own_da)
end
end
test "owner permissions for export project data" do
project = create_project team: @t
project2 = create_project
with_current_user_and_team(@u, @t) do
ability = Ability.new
assert ability.can?(:export_project, project)
assert ability.cannot?(:export_project, project2)
end
end
test "owner permissions to task" do
task = create_task annotator: u, team: t
create_annotation_type annotation_type: 'response'
task.response = { annotation_type: 'response', set_fields: {} }.to_json
task.save!
with_current_user_and_team(u) do
ability = AdminAbility.new
assert ability.cannot?(:update, Task)
assert ability.cannot?(:update, task)
assert ability.can?(:destroy, task)
end
end
test "owner of other team permissions for task" do
task = create_task annotator: u, team: t
create_annotation_type annotation_type: 'response'
task.response = { annotation_type: 'response', set_fields: {} }.to_json
task.save!
other_user = create_user
create_team_user user: other_user, team: create_team, role: 'owner'
with_current_user_and_team(other_user) do
ability = AdminAbility.new
assert ability.cannot?(:update, Task)
assert ability.cannot?(:update, task)
assert ability.cannot?(:destroy, task)
end
end
test "owner permissions to dynamic annotation" do
p = create_project team: t
pm = create_project_media project: p
task = create_task annotator: u, annotated: p, annotated: pm
dynamic_field = create_field annotation_id: task.id
with_current_user_and_team(u) do
ability = AdminAbility.new
assert ability.cannot?(:update, DynamicAnnotation::Field)
assert ability.cannot?(:update, dynamic_field)
assert ability.can?(:destroy, dynamic_field)
end
end
test "owner of other team permissions for dynamic annotation" do
p = create_project team: t
pm = create_project_media project: p
task = create_task annotator: u, annotated: p, annotated: pm
dynamic_field = create_field annotation_id: task.id
other_user = create_user
create_team_user user: other_user, team: create_team, role: 'owner'
with_current_user_and_team(other_user) do
ability = AdminAbility.new
assert ability.cannot?(:update, DynamicAnnotation::Field)
assert ability.cannot?(:update, dynamic_field)
assert ability.cannot?(:destroy, dynamic_field)
end
end
test "owner permissions to dynamic" do
p = create_project team: t
pm = create_project_media project: p
s = create_status annotated: pm, status: 'verified'
with_current_user_and_team(u) do
ability = AdminAbility.new
assert ability.can?(:destroy, s)
assert ability.can?(:update, s)
end
end
end
Remove useless key on test
require_relative '../test_helper'
class AdminAbilityTest < ActiveSupport::TestCase
def setup
WebMock.stub_request(:post, /#{Regexp.escape(CONFIG['bridge_reader_url_private'])}.*/)
@t = create_team
Team.stubs(:current).returns(@t)
@u = create_user
@tu = create_team_user user: u , team: t, role: 'owner'
end
def teardown
super
Team.unstub(:current)
end
attr_reader :u, :t, :tu
test "owner permissions for project" do
p = create_project team: t
own_project = create_project team: t, user: u
p2 = create_project
with_current_user_and_team(u) do
ability = AdminAbility.new
assert ability.cannot?(:create, Project)
assert ability.can?(:index, p)
assert ability.can?(:destroy, p)
assert ability.can?(:destroy, own_project)
assert ability.cannot?(:read, p)
assert ability.cannot?(:update, p)
assert ability.cannot?(:update, own_project)
assert ability.cannot?(:read, p2)
assert ability.cannot?(:update, p2)
assert ability.cannot?(:destroy, p2)
end
end
test "owner permissions for media" do
m = create_valid_media
p = create_project team: t
pm = create_project_media project: p, media: m
own_media = create_valid_media user_id: u.id
own_pm = create_project_media project: p, media: own_media
m2 = create_valid_media
pm2 = create_project_media media: m2
with_current_user_and_team(u) do
ability = AdminAbility.new
assert ability.cannot?(:create, Media)
assert ability.cannot?(:update, m)
assert ability.cannot?(:update, own_media)
assert ability.cannot?(:destroy, m)
assert ability.cannot?(:destroy, own_media)
assert ability.cannot?(:update, m2)
assert ability.cannot?(:destroy, m2)
end
end
test "owner permissions for project media" do
m = create_valid_media
p = create_project team: t
pm = create_project_media project: p, media: m
own_media = create_valid_media user_id: u.id
own_pm = create_project_media project: p, media: own_media
m2 = create_valid_media
pm2 = create_project_media media: m2
with_current_user_and_team(u) do
ability = AdminAbility.new
assert ability.can?(:destroy, pm)
assert ability.can?(:destroy, own_pm)
assert ability.cannot?(:update, pm)
assert ability.cannot?(:update, own_pm)
assert ability.cannot?(:update, pm2)
assert ability.cannot?(:destroy, pm2)
end
end
test "owner permissions for team" do
t2 = create_team
tu_test = create_team_user team: t2, role: 'owner'
with_current_user_and_team(u) do
ability = AdminAbility.new
assert ability.can?(:read, t)
assert ability.can?(:update, t)
assert ability.cannot?(:create, Team)
assert ability.cannot?(:destroy, t)
assert ability.cannot?(:update, t2)
assert ability.cannot?(:destroy, t2)
end
end
test "owner permissions for teamUser" do
u2 = create_user
tu2 = create_team_user team: t, role: 'editor'
tu_other = create_team_user
with_current_user_and_team(u) do
ability = AdminAbility.new
assert ability.cannot?(:create, TeamUser)
assert ability.cannot?(:update, tu2)
assert ability.cannot?(:destroy, tu2)
assert ability.cannot?(:update, tu_other)
assert ability.cannot?(:destroy, tu_other)
end
end
test "owner permissions for contact" do
c = create_contact team: t
c1 = create_contact
with_current_user_and_team(u) do
ability = AdminAbility.new
assert ability.cannot?(:create, Contact)
assert ability.cannot?(:read, c)
assert ability.cannot?(:update, c)
assert ability.cannot?(:destroy, c)
assert ability.cannot?(:update, c1)
assert ability.cannot?(:destroy, c1)
end
end
test "owner permissions for user" do
u2_test = create_user
tu2_test = create_team_user user: u2_test , role: 'contributor'
u_test1 = create_user
tu_test1 = create_team_user team: t, user: u_test1, role: 'editor'
with_current_user_and_team(u) do
ability = AdminAbility.new
assert ability.cannot?(:update, u)
assert ability.cannot?(:destroy, u)
assert ability.cannot?(:update, u_test1)
assert ability.cannot?(:destroy, u_test1)
tu_test1.update_column(:role, 'journalist')
assert ability.cannot?(:update, u_test1)
assert ability.cannot?(:destroy, u_test1)
tu_test1.update_column(:role, 'contributor')
assert ability.cannot?(:update, u_test1)
assert ability.cannot?(:destroy, u_test1)
assert ability.cannot?(:update, u2_test)
assert ability.cannot?(:destroy, u2_test)
end
end
test "owner cannot see users not member of his teams" do
u_member = create_user
tu_member = create_team_user team: t, user: u_member, role: 'contributor', status: 'member'
u_requested = create_user
tu_requested = create_team_user team: t, user: u_requested, role: 'contributor', status: 'requested'
u_invited = create_user
tu_invited = create_team_user team: t, user: u_invited, role: 'contributor', status: 'invited'
u_banned = create_user
tu_banned = create_team_user team: t, user: u_banned, role: 'contributor', status: 'banned'
u_other_team = create_user
tu_other_team = create_team_user user: u_other_team, role: 'contributor', status: 'member'
with_current_user_and_team(u) do
ability = AdminAbility.new
assert ability.cannot?(:read, u_member)
assert ability.cannot?(:update, u_member)
assert ability.cannot?(:destroy, u_member)
assert ability.cannot?(:read, u_requested)
assert ability.cannot?(:update, u_requested)
assert ability.cannot?(:destroy, u_requested)
assert ability.cannot?(:read, u_invited)
assert ability.cannot?(:update, u_invited)
assert ability.cannot?(:destroy, u_invited)
assert ability.cannot?(:read, u_banned)
assert ability.cannot?(:update, u_banned)
assert ability.cannot?(:destroy, u_banned)
assert ability.cannot?(:read, u_other_team)
assert ability.cannot?(:update, u_other_team)
assert ability.cannot?(:destroy, u_other_team)
end
end
test "owner permissions for comment" do
p = create_project team: t
pm = create_project_media project: p
mc = create_comment
pm.add_annotation mc
with_current_user_and_team(u) do
ability = AdminAbility.new
assert ability.cannot?(:create, Comment)
assert ability.cannot?(:update, mc)
assert ability.can?(:destroy, mc)
end
end
test "owner of other team permissions for comment" do
p = create_project team: t
pm = create_project_media project: p
mc = create_comment
pm.add_annotation mc
other_user = create_user
create_team_user user: other_user, team: create_team, role: 'owner'
with_current_user_and_team(other_user) do
ability = AdminAbility.new
assert ability.cannot?(:create, Comment)
assert ability.cannot?(:update, mc)
assert ability.cannot?(:destroy, mc)
end
end
test "check annotation permissions" do
# test the create/update/destroy operations
tu.role = 'journalist'
tu.save
p = create_project team: t
pm = create_project_media project: p
c = create_comment annotated: pm
with_current_user_and_team(u) do
ability = AdminAbility.new
assert ability.cannot?(:update, c)
assert ability.cannot?(:destroy, c)
end
tu.role = 'owner'; tu.save!
Rails.cache.clear
c.text = 'for testing';c.save!
assert_equal c.text, 'for testing'
with_current_user_and_team(u) do
ability = AdminAbility.new
assert ability.cannot?(:update, c)
assert ability.can?(:destroy, c)
end
end
test "owner permissions for flag" do
p = create_project team: t
m = create_valid_media
pm = create_project_media project: p, media: m
f = create_flag flag: 'Mark as graphic', annotator: u, annotated: pm
with_current_user_and_team(u) do
ability = AdminAbility.new
assert ability.cannot?(:create, f)
f.flag = 'Graphic content'
assert ability.cannot?(:create, f)
p.update_column(:team_id, nil)
assert ability.cannot?(:create, f)
end
end
test "owner of other team permissions for flag" do
p = create_project team: t
pm = create_project_media project: p
f = create_flag flag: 'Mark as graphic', annotator: u, annotated: pm
other_user = create_user
create_team_user user: other_user, team: create_team, role: 'owner'
with_current_user_and_team(other_user) do
ability = AdminAbility.new
assert ability.cannot?(:create, Flag)
assert ability.cannot?(:update, f)
assert ability.cannot?(:destroy, f)
end
end
test "owner permissions for status" do
p = create_project team: t
m = create_valid_media
pm = create_project_media project: p, media: m
s = create_status status: 'verified', annotated: pm
with_current_user_and_team(u) do
ability = AdminAbility.new
assert ability.cannot?(:create, s)
assert ability.can?(:update, s)
assert ability.can?(:destroy, s)
p.update_column(:team_id, nil)
assert ability.cannot?(:create, s)
assert ability.cannot?(:destroy, s)
end
end
test "owner permissions for embed" do
p = create_project team: t
pm = create_project_media project: p
em = create_embed annotated: pm
link = create_valid_media({ type: 'link', team: t })
em_link = create_embed annotated: link
account = create_valid_account team: t
em_account = create_embed annotated: account
with_current_user_and_team(u) do
ability = AdminAbility.new
assert ability.cannot?(:create, em)
assert ability.cannot?(:read, em)
assert ability.can?(:update, em)
assert ability.cannot?(:destroy, em)
p.update_column(:team_id, nil)
assert ability.cannot?(:destroy, em)
assert ability.cannot?(:read, em_link)
assert ability.cannot?(:update, em_link)
assert ability.cannot?(:destroy, em_link)
assert ability.can?(:update, em_account)
assert ability.cannot?(:read, em_account)
assert ability.cannot?(:destroy, em_account)
end
end
test "owner permissions for tag" do
p = create_project team: t
pm = create_project_media project: p
tg = create_tag tag: 'media_tag', annotated: pm
with_current_user_and_team(u) do
ability = AdminAbility.new
assert ability.cannot?(:create, tg)
assert ability.cannot?(:update, tg)
assert ability.can?(:destroy, tg)
p.update_column(:team_id, nil)
assert ability.cannot?(:create, tg)
assert ability.cannot?(:destroy, tg)
end
end
test "owner of other team permissions for tag" do
p = create_project team: t
pm = create_project_media project: p
tg = create_tag tag: 'media_tag', annotated: pm
other_user = create_user
create_team_user user: other_user, team: create_team, role: 'owner'
with_current_user_and_team(other_user) do
ability = AdminAbility.new
assert ability.cannot?(:create, tg)
assert ability.cannot?(:update, tg)
assert ability.cannot?(:destroy, tg)
end
end
test "only admin users can manage all" do
u = create_user
u.is_admin = true
u.save
ability = AdminAbility.new(u)
assert ability.can?(:manage, :all)
end
test "admins can do anything" do
u = create_user
u.is_admin = true
u.save
t = create_team
tu = create_team_user user: u , team: t
p = create_project team: t
own_project = create_project team: t, user: u
p2 = create_project
with_current_user_and_team(u) do
ability = AdminAbility.new
assert ability.can?(:create, Project)
assert ability.can?(:update, p)
assert ability.can?(:update, own_project)
assert ability.can?(:destroy, p)
assert ability.can?(:destroy, own_project)
assert ability.can?(:update, p2)
assert ability.can?(:destroy, p2)
end
end
test "should not read source without user" do
s = create_source user: nil
with_current_user_and_team(u) do
ability = AdminAbility.new
assert ability.cannot?(:read, s)
end
end
test "should not read own source" do
s = create_source user: u
with_current_user_and_team(u) do
ability = AdminAbility.new
assert ability.cannot?(:read, s)
end
end
test "should not read source from other team user" do
other_user = create_user
tu_other = create_team_user user: other_user , team: create_team, role: 'owner'
s = create_source user: other_user
with_current_user_and_team(u) do
ability = AdminAbility.new
assert ability.cannot?(:read, s)
assert ability.cannot?(:update, s)
assert ability.cannot?(:destroy, s)
end
end
test "should not read source from team user" do
same_team_user = create_user
tu_other = create_team_user user: same_team_user, team: t, role: 'contributor'
s = create_source user: same_team_user
with_current_user_and_team(u) do
ability = AdminAbility.new
assert ability.cannot?(:read, s)
assert ability.cannot?(:update, s)
assert ability.cannot?(:destroy, s)
end
end
test "should only destroy annotation from user teams" do
p1 = create_project team: t
p2 = create_project team: t
pm1 = create_project_media project: p1
pm2 = create_project_media project: p2
a_from_team = create_annotation annotated: pm1
a2_from_team = create_annotation annotated: pm2
a_from_other_team = create_annotation annotated: create_project_media
with_current_user_and_team(u) do
a = AdminAbility.new
assert a.can?(:destroy, a_from_team)
assert a.can?(:destroy, a2_from_team)
assert a.cannot?(:destroy, a_from_other_team)
end
end
test "should not destroy annotation versions" do
p = create_project team: t
pm = create_project_media project: p
with_current_user_and_team(u) do
s = create_status annotated: pm, status: 'verified'
em = create_embed annotated: pm
s_v = s.versions.last
em_v = em.versions.last
ability = AdminAbility.new
# Status versions
assert ability.can?(:create, s_v)
assert ability.cannot?(:read, s_v)
assert ability.cannot?(:update, s_v)
assert ability.cannot?(:destroy, s_v)
# Embed versions
assert ability.can?(:create, em_v)
assert ability.cannot?(:read, em_v)
assert ability.cannot?(:update, em_v)
assert ability.cannot?(:destroy, em_v)
end
end
test "should access rails_admin if user is team owner" do
p = create_project team: t
with_current_user_and_team(u) do
ability = AdminAbility.new
assert ability.can?(:access, :rails_admin)
end
end
test "should not access rails_admin if user not team owner or admin" do
tu.role = 'contributor'
tu.save
p = create_project team: t
%w(contributor journalist editor).each do |role|
tu.role = role; tu.save!
with_current_user_and_team(u) do
ability = AdminAbility.new
assert !ability.can?(:access, :rails_admin)
end
end
end
test "owner permissions for task" do
p = create_project team: t
m = create_valid_media
pm = create_project_media project: p, media: m
tk = create_task annotator: u, annotated: pm
with_current_user_and_team(u) do
ability = AdminAbility.new
assert ability.cannot?(:create, tk)
p.update_column(:team_id, nil)
assert ability.cannot?(:create, tk)
end
end
test "owner permissions for dynamic annotation" do
p = create_project team: t
pm = create_project_media project: p
da = create_dynamic_annotation annotated: pm
own_da = create_dynamic_annotation annotated: pm, annotator: u
with_current_user_and_team(u) do
ability = AdminAbility.new
assert ability.cannot?(:create, Dynamic)
assert ability.cannot?(:update, da)
assert ability.can?(:destroy, da)
assert ability.cannot?(:update, own_da)
assert ability.can?(:destroy, own_da)
end
end
test "owner permissions for export project data" do
project = create_project team: @t
project2 = create_project
with_current_user_and_team(@u, @t) do
ability = Ability.new
assert ability.can?(:export_project, project)
assert ability.cannot?(:export_project, project2)
end
end
test "owner permissions to task" do
task = create_task annotator: u, team: t
create_annotation_type annotation_type: 'response'
task.response = { annotation_type: 'response', set_fields: {} }.to_json
task.save!
with_current_user_and_team(u) do
ability = AdminAbility.new
assert ability.cannot?(:update, Task)
assert ability.cannot?(:update, task)
assert ability.can?(:destroy, task)
end
end
test "owner of other team permissions for task" do
task = create_task annotator: u, team: t
create_annotation_type annotation_type: 'response'
task.response = { annotation_type: 'response', set_fields: {} }.to_json
task.save!
other_user = create_user
create_team_user user: other_user, team: create_team, role: 'owner'
with_current_user_and_team(other_user) do
ability = AdminAbility.new
assert ability.cannot?(:update, Task)
assert ability.cannot?(:update, task)
assert ability.cannot?(:destroy, task)
end
end
test "owner permissions to dynamic annotation" do
p = create_project team: t
pm = create_project_media project: p
task = create_task annotator: u, annotated: pm
dynamic_field = create_field annotation_id: task.id
with_current_user_and_team(u) do
ability = AdminAbility.new
assert ability.cannot?(:update, DynamicAnnotation::Field)
assert ability.cannot?(:update, dynamic_field)
assert ability.can?(:destroy, dynamic_field)
end
end
test "owner of other team permissions for dynamic annotation" do
p = create_project team: t
pm = create_project_media project: p
task = create_task annotator: u, annotated: pm
dynamic_field = create_field annotation_id: task.id
other_user = create_user
create_team_user user: other_user, team: create_team, role: 'owner'
with_current_user_and_team(other_user) do
ability = AdminAbility.new
assert ability.cannot?(:update, DynamicAnnotation::Field)
assert ability.cannot?(:update, dynamic_field)
assert ability.cannot?(:destroy, dynamic_field)
end
end
test "owner permissions to dynamic" do
p = create_project team: t
pm = create_project_media project: p
s = create_status annotated: pm, status: 'verified'
with_current_user_and_team(u) do
ability = AdminAbility.new
assert ability.can?(:destroy, s)
assert ability.can?(:update, s)
end
end
end
|
require "rubygems"
gem "thin", ">=1.2.1"
gem "erector", ">=0.6.7"
gem "selenium-client"
dir = File.dirname(__FILE__)
$LOAD_PATH.unshift File.expand_path("#{dir}/../vendor/lucky-luciano/lib")
require "lucky_luciano"
require "fileutils"
require "tmpdir"
require "timeout"
require "cgi"
require "net/http"
require "selenium/client"
require "optparse"
require "json"
require "erector"
require "#{dir}/js_test_core/configuration"
require "#{dir}/js_test_core/extensions"
require "#{dir}/js_test_core/resources"
require "#{dir}/js_test_core/representations"
require "#{dir}/js_test_core/server"
require "#{dir}/js_test_core/client"
require "#{dir}/js_test_core/app"
module JsTestCore
DEFAULT_HOST = "0.0.0.0"
DEFAULT_PORT = 8080
class << self
Configuration.instance = Configuration.new
def method_missing(method_name, *args, &block)
if Configuration.instance.respond_to?(method_name)
Configuration.instance.send(method_name, *args, &block)
else
super
end
end
end
end
Removed unused dependencies.
require "rubygems"
gem "thin", ">=1.2.1"
gem "erector", ">=0.6.7"
gem "selenium-client"
dir = File.dirname(__FILE__)
$LOAD_PATH.unshift File.expand_path("#{dir}/../vendor/lucky-luciano/lib")
require "lucky_luciano"
require "timeout"
require "selenium/client"
require "optparse"
require "json"
require "erector"
require "#{dir}/js_test_core/configuration"
require "#{dir}/js_test_core/extensions"
require "#{dir}/js_test_core/resources"
require "#{dir}/js_test_core/representations"
require "#{dir}/js_test_core/server"
require "#{dir}/js_test_core/client"
require "#{dir}/js_test_core/app"
module JsTestCore
DEFAULT_HOST = "0.0.0.0"
DEFAULT_PORT = 8080
class << self
Configuration.instance = Configuration.new
def method_missing(method_name, *args, &block)
if Configuration.instance.respond_to?(method_name)
Configuration.instance.send(method_name, *args, &block)
else
super
end
end
end
end
|
require_relative '../test_helper'
class ProjectMediaTest < ActiveSupport::TestCase
def setup
require 'sidekiq/testing'
Sidekiq::Testing.fake!
super
end
test "should create project media" do
assert_difference 'ProjectMedia.count' do
create_project_media
end
u = create_user
t = create_team
tu = create_team_user team: t, user: u, role: 'owner'
p = create_project team: t
m = create_valid_media
User.stubs(:current).returns(u)
Team.stubs(:current).returns(t)
assert_difference 'ProjectMedia.count' do
create_project_media project: p, media: m
end
# journalist should assign any media
m2 = create_valid_media
Rails.cache.clear
tu.update_column(:role, 'journalist')
pm = nil
assert_difference 'ProjectMedia.count' do
pm = create_project_media project: p, media: m2
end
m3 = create_valid_media
m3.user_id = u.id; m3.save!
assert_difference 'ProjectMedia.count' do
pm = create_project_media project: p, media: m3
pm.project = create_project team: t
pm.save!
end
User.unstub(:current)
Team.unstub(:current)
end
test "should have a project and media" do
assert_no_difference 'ProjectMedia.count' do
assert_raise ActiveRecord::RecordInvalid do
create_project_media project: nil
end
assert_raise ActiveRecord::RecordInvalid do
create_project_media media: nil
end
end
end
test "should create media if url or quote set" do
url = 'http://test.com'
pender_url = CONFIG['pender_url_private'] + '/api/medias'
response = '{"type":"media","data":{"url":"' + url + '","type":"item"}}'
WebMock.stub_request(:get, pender_url).with({ query: { url: url } }).to_return(body: response)
assert_difference 'ProjectMedia.count', 2 do
create_project_media media: nil, quote: 'Claim report'
create_project_media media: nil, url: url
end
end
test "should find media by normalized url" do
url = 'http://test.com'
pender_url = CONFIG['pender_url_private'] + '/api/medias'
response = '{"type":"media","data":{"url":"' + url + '/normalized","type":"item"}}'
WebMock.stub_request(:get, pender_url).with({ query: { url: url } }).to_return(body: response)
m = create_media url: url
url2 = 'http://test2.com'
response = '{"type":"media","data":{"url":"' + url + '/normalized","type":"item"}}'
WebMock.stub_request(:get, pender_url).with({ query: { url: url2 } }).to_return(body: response)
pm = create_project_media url: url2
assert_equal pm.media, m
end
test "should create with exisitng media if url exists" do
m = create_valid_media
pm = create_project_media media: nil, url: m.url
assert_equal m, pm.media
end
test "should contributor add a new media" do
t = create_team
u = create_user
p = create_project team: t
tu = create_team_user team: t, user: u, role: 'contributor'
with_current_user_and_team(u, t) do
assert_difference 'ProjectMedia.count' do
create_project_media project: p, quote: 'Claim report'
end
end
end
test "should update and destroy project media" do
u = create_user
t = create_team
p = create_project team: t
p2 = create_project team: t
m = create_valid_media user_id: u.id
create_team_user team: t, user: u
pm = create_project_media project: p, media: m, user: u
with_current_user_and_team(u, t) do
pm.project_id = p2.id; pm.save!
pm.reload
assert_equal pm.project_id, p2.id
end
u2 = create_user
tu = create_team_user team: t, user: u2, role: 'editor'
with_current_user_and_team(u2, t) do
pm.save!
end
assert_raise RuntimeError do
with_current_user_and_team(u2, t) do
pm.destroy!
end
end
pm_own = nil
with_current_user_and_team(u2, t) do
own_media = create_valid_media user: u2
pm_own = create_project_media project: p, media: own_media, user: u2
pm_own.project_id = p2.id; pm_own.save!
pm_own.reload
assert_equal pm_own.project_id, p2.id
end
assert_nothing_raised RuntimeError do
with_current_user_and_team(u2, t) do
pm_own.disable_es_callbacks = true
pm_own.destroy!
end
end
assert_raise RuntimeError do
with_current_user_and_team(u, t) do
pm_own.disable_es_callbacks = true
pm.destroy!
end
end
end
test "non members should not read project media in private team" do
u = create_user
t = create_team
p = create_project team: t
m = create_media project: p
pm = create_project_media project: p, media: m
pu = create_user
pt = create_team private: true
create_team_user team: pt, user: pu
pu2 = create_user
create_team_user team: pt, user: pu2, status: 'requested'
pp = create_project team: pt
m = create_media project: pp
ppm = create_project_media project: pp, media: m
ProjectMedia.find_if_can(pm.id)
assert_raise CheckPermissions::AccessDenied do
with_current_user_and_team(u, pt) do
ProjectMedia.find_if_can(ppm.id)
end
end
with_current_user_and_team(pu, pt) do
ProjectMedia.find_if_can(ppm.id)
end
assert_raise CheckPermissions::AccessDenied do
with_current_user_and_team(pu2, pt) do
ProjectMedia.find_if_can(ppm.id)
end
end
end
test "should notify Slack when project media is created" do
t = create_team slug: 'test'
u = create_user
tu = create_team_user team: t, user: u, role: 'owner'
p = create_project team: t
t.set_slack_notifications_enabled = 1; t.set_slack_webhook = 'https://hooks.slack.com/services/123'; t.set_slack_channel = '#test'; t.save!
with_current_user_and_team(u, t) do
m = create_valid_media
pm = create_project_media project: p, media: m
assert pm.sent_to_slack
msg = pm.slack_notification_message
# verify base URL
assert_match "#{CONFIG['checkdesk_client']}/#{t.slug}", msg
# verify notification URL
match = msg.match(/\/project\/([0-9]+)\/media\/([0-9]+)/)
assert_equal p.id, match[1].to_i
assert_equal pm.id, match[2].to_i
# claim media
m = create_claim_media
pm = create_project_media project: p, media: m
assert pm.sent_to_slack
end
end
test "should notify Slack when project media is created with empty user" do
t = create_team slug: 'test'
u = create_user
tu = create_team_user team: t, user: u, role: 'owner'
p = create_project team: t
t.set_slack_notifications_enabled = 1; t.set_slack_webhook = 'https://hooks.slack.com/services/123'; t.set_slack_channel = '#test'; t.save!
with_current_user_and_team(nil, t) do
m = create_valid_media
pm = create_project_media project: p, media: m, user: nil
assert pm.sent_to_slack
msg = pm.slack_notification_message
# verify base URL
assert_match "#{CONFIG['checkdesk_client']}/#{t.slug}", msg
# verify notification URL
match = msg.match(/\/project\/([0-9]+)\/media\/([0-9]+)/)
assert_equal p.id, match[1].to_i
assert_equal pm.id, match[2].to_i
# claim media
m = create_claim_media
pm = create_project_media project: p, media: m, user: nil
assert pm.sent_to_slack
msg = pm.slack_notification_message
assert_match "A new Claim has been added", msg
end
end
test "should verify attribution of Slack notifications" do
t = create_team slug: 'test'
u = create_user
tu = create_team_user team: t, user: u, role: 'owner'
p = create_project team: t
uu = create_user
m = create_valid_media user: uu
t.set_slack_notifications_enabled = 1; t.set_slack_webhook = 'https://hooks.slack.com/services/123'; t.set_slack_channel = '#test'; t.save!
with_current_user_and_team(u, t) do
pm = create_project_media project: p, media: m, origin: 'http://localhost:3333'
assert pm.sent_to_slack
msg = pm.slack_notification_message
assert_match "*#{u.name}* added a new", msg
end
end
test "should notify Pusher when project media is created" do
pm = create_project_media
assert pm.sent_to_pusher
# claim media
t = create_team
p = create_project team: t
m = create_claim_media project_id: p.id
pm = create_project_media project: p, media: m
assert pm.sent_to_pusher
end
test "should notify Pusher when project media is destroyed" do
pm = create_project_media
pm.sent_to_pusher = false
pm.destroy!
assert pm.sent_to_pusher
end
test "should notify Pusher in background" do
Rails.stubs(:env).returns(:production)
t = create_team
p = create_project team: t
CheckNotifications::Pusher::Worker.drain
assert_equal 0, CheckNotifications::Pusher::Worker.jobs.size
create_project_media project: p
assert_equal 4, CheckNotifications::Pusher::Worker.jobs.size
CheckNotifications::Pusher::Worker.drain
assert_equal 0, CheckNotifications::Pusher::Worker.jobs.size
Rails.unstub(:env)
end
test "should update project media embed data" do
pender_url = CONFIG['pender_url_private'] + '/api/medias'
url = 'http://test.com'
response = '{"type":"media","data":{"url":"' + url + '/normalized","type":"item", "title": "test media", "description":"add desc"}}'
WebMock.stub_request(:get, pender_url).with({ query: { url: url } }).to_return(body: response)
m = create_media(account: create_valid_account, url: url)
p1 = create_project
p2 = create_project
pm1 = create_project_media project: p1, media: m
pm2 = create_project_media project: p2, media: m
# fetch data (without overridden)
data = pm1.embed
assert_equal 'test media', data['title']
assert_equal 'add desc', data['description']
# Update media title and description for pm1
info = {title: 'Title A', description: 'Desc A'}.to_json
pm1.embed= info
info = {title: 'Title AA', description: 'Desc AA'}.to_json
pm1.embed= info
# Update media title and description for pm2
info = {title: 'Title B', description: 'Desc B'}.to_json
pm2.embed= info
info = {title: 'Title BB', description: 'Desc BB'}.to_json
pm2.embed= info
# fetch data for pm1
data = pm1.embed
assert_equal 'Title AA', data['title']
assert_equal 'Desc AA', data['description']
# fetch data for pm2
data = pm2.embed
assert_equal 'Title BB', data['title']
assert_equal 'Desc BB', data['description']
end
test "should have annotations" do
pm = create_project_media
c1 = create_comment annotated: pm
c2 = create_comment annotated: pm
c3 = create_comment annotated: nil
assert_equal [c1.id, c2.id].sort, pm.reload.annotations('comment').map(&:id).sort
end
test "should get permissions" do
u = create_user
t = create_team current_user: u
tu = create_team_user team: t, user: u, role: 'owner'
p = create_project team: t
pm = create_project_media project: p, current_user: u
perm_keys = ["read ProjectMedia", "update ProjectMedia", "destroy ProjectMedia", "create Comment", "create Flag",
"create Tag", "create Task", "create Dynamic", "restore ProjectMedia", "embed ProjectMedia", "lock Annotation",
"update Status", "administer Content"].sort
User.stubs(:current).returns(u)
Team.stubs(:current).returns(t)
# load permissions as owner
assert_equal perm_keys, JSON.parse(pm.permissions).keys.sort
# load as editor
tu.update_column(:role, 'editor')
assert_equal perm_keys, JSON.parse(pm.permissions).keys.sort
# load as editor
tu.update_column(:role, 'editor')
assert_equal perm_keys, JSON.parse(pm.permissions).keys.sort
# load as journalist
tu.update_column(:role, 'journalist')
assert_equal perm_keys, JSON.parse(pm.permissions).keys.sort
# load as contributor
tu.update_column(:role, 'contributor')
assert_equal perm_keys, JSON.parse(pm.permissions).keys.sort
# load as authenticated
tu.update_column(:team_id, nil)
assert_equal perm_keys, JSON.parse(pm.permissions).keys.sort
User.unstub(:current)
Team.unstub(:current)
end
test "should set user when project media is created" do
u = create_user
t = create_team
tu = create_team_user team: t, user: u, role: 'journalist'
p = create_project team: t, user: create_user
pm = nil
with_current_user_and_team(u, t) do
pm = create_project_media project: p
end
assert_equal u, pm.user
end
test "should create embed for uploaded image" do
ft = create_field_type field_type: 'image_path', label: 'Image Path'
at = create_annotation_type annotation_type: 'reverse_image', label: 'Reverse Image'
create_field_instance annotation_type_object: at, name: 'reverse_image_path', label: 'Reverse Image', field_type_object: ft, optional: false
create_bot name: 'Check Bot'
pm = ProjectMedia.new
pm.project_id = create_project.id
pm.file = File.new(File.join(Rails.root, 'test', 'data', 'rails.png'))
pm.disable_es_callbacks = true
pm.save!
assert_equal 'rails.png', pm.embed['title']
end
test "should be unique" do
p = create_project
m = create_valid_media
assert_difference 'ProjectMedia.count' do
create_project_media project: p, media: m
end
assert_no_difference 'ProjectMedia.count' do
assert_raises RuntimeError do
create_project_media project: p, media: m
end
end
end
test "should protect attributes from mass assignment" do
raw_params = { project: create_project, user: create_user }
params = ActionController::Parameters.new(raw_params)
assert_raise ActiveModel::ForbiddenAttributesError do
ProjectMedia.create(params)
end
end
test "should flag overridden attributes" do
t = create_team
p = create_project team: t
url = 'http://test.com'
pender_url = CONFIG['pender_url_private'] + '/api/medias'
response = '{"type":"media","data":{"url":"' + url + '","type":"item", "title": "org_title", "description":"org_desc"}}'
WebMock.stub_request(:get, pender_url).with({ query: { url: url } }).to_return(body: response)
pm = create_project_media url: url, project: p
attributes = pm.overridden_embed_attributes
attributes.each{|k| assert_not pm.overridden[k]}
pm.embed={title: 'title'}.to_json
assert pm.overridden['title']
attributes = pm.overridden_embed_attributes
attributes.delete('title')
attributes.each{|k| assert_not pm.overridden[k]}
pm.embed={description: 'description'}.to_json
assert pm.overridden['description']
attributes.delete('description')
attributes.each{|k| assert_not pm.overridden[k]}
pm.embed={username: 'username'}.to_json
assert pm.overridden['username']
attributes.delete('username')
attributes.each{|k| assert_not pm.overridden[k]}
# Claim media
pm = create_project_media quote: 'Claim', project: p
pm.embed={title: 'title', description: 'description', username: 'username'}.to_json
pm.overridden_embed_attributes.each{|k| assert_not pm.overridden[k]}
end
test "should create auto tasks" do
t = create_team
p1 = create_project team: t
p2 = create_project team: t
t.checklist = [ { 'label' => 'Can you see this automatic task?', 'type' => 'free_text', 'description' => 'This was created automatically', 'projects' => [] }, { 'label' => 'Can you see this automatic task for a project only?', 'type' => 'free_text', 'description' => 'This was created automatically', 'projects' => [p2.id] } ]
t.save!
assert_difference 'Task.length', 1 do
pm1 = create_project_media project: p1
end
assert_difference 'Task.length', 2 do
pm2 = create_project_media project: p2
end
end
test "should contributor create auto tasks" do
t = create_team
t.checklist = [ { 'label' => 'Can you see this automatic task?', 'type' => 'free_text', 'description' => 'This was created automatically', 'projects' => [] }]
t.save!
u = create_user
p = create_project team: t
tu = create_team_user team: t, user: u, role: 'contributor'
with_current_user_and_team(u, t) do
assert_difference 'Task.length' do
create_project_media project: p
end
end
end
test "should get project source" do
t = create_team
p = create_project team: t
m = create_valid_media
pm = create_project_media project: p, media: m
assert_not_nil pm.project_source
c = create_claim_media
pm = create_project_media project: p, media: c
assert_nil pm.project_source
pm = create_project_media project: p, quote: 'Claim', quote_attributions: {name: 'source name'}.to_json
assert_not_nil pm.project_source
end
test "should move related sources after move media to other projects" do
t = create_team
p = create_project team: t
m = create_valid_media
pm = create_project_media project: p, media: m
ps = pm.project_source
t2 = create_team
p2 = create_project team: t2
pm.project = p2; pm.save!
assert_equal ps.reload.project_id, p2.id
end
test "should have versions" do
m = create_valid_media
t = create_team
p = create_project team: t
u = create_user
create_team_user user: u, team: t, role: 'owner'
pm = nil
User.current = u
assert_difference 'PaperTrail::Version.count', 3 do
pm = create_project_media project: p, media: m, user: u
end
assert_equal 1, pm.versions.count
User.current = nil
end
test "should check if project media belonged to a previous project" do
t = create_team
u = create_user
create_team_user user: u, team: t
p = create_project team: t
p2 = create_project team: t
with_current_user_and_team(u, t) do
pm = create_project_media project: p
assert ProjectMedia.belonged_to_project(pm.id, p.id)
pm.project = p2; pm.save!
assert_equal p2, pm.project
assert ProjectMedia.belonged_to_project(pm.id, p.id)
end
end
test "should get log" do
create_verification_status_stuff
m = create_valid_media
u = create_user
t = create_team
p = create_project team: t
p2 = create_project team: t
create_team_user user: u, team: t, role: 'owner'
at = create_annotation_type annotation_type: 'response'
ft1 = create_field_type field_type: 'task_reference'
ft2 = create_field_type field_type: 'text'
create_field_instance annotation_type_object: at, field_type_object: ft1, name: 'task'
create_field_instance annotation_type_object: at, field_type_object: ft2, name: 'response'
create_field_instance annotation_type_object: at, field_type_object: ft2, name: 'note'
with_current_user_and_team(u, t) do
pm = create_project_media project: p, media: m, user: u
c = create_comment annotated: pm
tg = create_tag annotated: pm
f = create_flag annotated: pm
s = pm.annotations.where(annotation_type: 'verification_status').last.load
s.status = 'In Progress'; s.save!
e = create_embed annotated: pm, title: 'Test'
info = { title: 'Foo' }.to_json; pm.embed = info; pm.save!
info = { title: 'Bar' }.to_json; pm.embed = info; pm.save!
pm.project_id = p2.id; pm.save!
t = create_task annotated: pm, annotator: u
t = Task.find(t.id); t.response = { annotation_type: 'response', set_fields: { response: 'Test', task: t.id.to_s, note: 'Test' }.to_json }.to_json; t.save!
t = Task.find(t.id); t.label = 'Test?'; t.save!
r = DynamicAnnotation::Field.where(field_name: 'response').last; r.value = 'Test 2'; r.save!
r = DynamicAnnotation::Field.where(field_name: 'note').last; r.value = 'Test 2'; r.save!
assert_equal ["create_dynamic", "create_dynamic", "create_comment", "create_tag", "create_flag", "create_embed", "update_embed", "update_embed", "update_projectmedia", "create_task", "create_dynamicannotationfield", "create_dynamicannotationfield", "create_dynamicannotationfield", "create_dynamicannotationfield", "update_task", "update_task", "update_dynamicannotationfield", "update_dynamicannotationfield", "update_dynamicannotationfield"].sort, pm.get_versions_log.map(&:event_type).sort
assert_equal 15, pm.get_versions_log_count
c.destroy
assert_equal 15, pm.get_versions_log_count
tg.destroy
assert_equal 15, pm.get_versions_log_count
f.destroy
assert_equal 15, pm.get_versions_log_count
end
end
test "should get previous project" do
p1 = create_project
p2 = create_project
pm = create_project_media project: p1
assert_equal p1, pm.project
assert_nil pm.project_was
pm.previous_project_id = p1.id
pm.project_id = p2.id
pm.save!
assert_equal p1, pm.project_was
assert_equal p2, pm.project
end
test "should create annotation when project media with picture is created" do
ft = create_field_type field_type: 'image_path', label: 'Image Path'
at = create_annotation_type annotation_type: 'reverse_image', label: 'Reverse Image'
create_field_instance annotation_type_object: at, name: 'reverse_image_path', label: 'Reverse Image', field_type_object: ft, optional: false
create_bot name: 'Check Bot'
i = create_uploaded_image
assert_difference "Dynamic.where(annotation_type: 'reverse_image').count" do
create_project_media media: i
end
end
test "should refresh Pender data" do
create_translation_status_stuff
create_verification_status_stuff(false)
pender_url = CONFIG['pender_url_private'] + '/api/medias'
url = random_url
WebMock.stub_request(:get, pender_url).with({ query: { url: url } }).to_return(body: '{"type":"media","data":{"url":"' + url + '","type":"item","foo":"1"}}')
WebMock.stub_request(:get, pender_url).with({ query: { url: url, refresh: '1' } }).to_return(body: '{"type":"media","data":{"url":"' + url + '","type":"item","foo":"2"}}')
m = create_media url: url
pm = create_project_media media: m
t1 = pm.updated_at.to_i
em1 = pm.media.pender_embed
assert_not_nil em1
assert_equal '1', JSON.parse(em1.data['embed'])['foo']
assert_equal 1, em1.refreshes_count
sleep 2
pm = ProjectMedia.find(pm.id)
pm.refresh_media = true
pm.save!
t2 = pm.reload.updated_at.to_i
assert t2 > t1
em2 = pm.media.pender_embed
assert_equal '2', JSON.parse(em2.data['embed'])['foo']
assert_equal 2, em2.refreshes_count
assert_equal em1, em2
end
test "should get user id for migration" do
pm = ProjectMedia.new
assert_nil pm.send(:user_id_callback, 'test@test.com')
u = create_user(email: 'test@test.com')
assert_equal u.id, pm.send(:user_id_callback, 'test@test.com')
end
test "should get project id for migration" do
p = create_project
mapping = Hash.new
pm = ProjectMedia.new
assert_nil pm.send(:project_id_callback, 1, mapping)
mapping[1] = p.id
assert_equal p.id, pm.send(:project_id_callback, 1, mapping)
end
test "should set annotation" do
ft = DynamicAnnotation::FieldType.where(field_type: 'text').last || create_field_type(field_type: 'text', label: 'Text')
lt = create_field_type(field_type: 'language', label: 'Language')
at = create_annotation_type annotation_type: 'translation', label: 'Translation'
create_field_instance annotation_type_object: at, name: 'translation_text', label: 'Translation Text', field_type_object: ft, optional: false
create_field_instance annotation_type_object: at, name: 'translation_note', label: 'Translation Note', field_type_object: ft, optional: true
create_field_instance annotation_type_object: at, name: 'translation_language', label: 'Translation Language', field_type_object: lt, optional: false
assert_equal 0, Annotation.where(annotation_type: 'translation').count
create_project_media set_annotation: { annotation_type: 'translation', set_fields: { 'translation_text' => 'Foo', 'translation_note' => 'Bar', 'translation_language' => 'pt' }.to_json }.to_json
assert_equal 1, Annotation.where(annotation_type: 'translation').count
end
test "should have reference to search team object" do
pm = create_project_media
assert_kind_of CheckSearch, pm.check_search_team
end
test "should have reference to search project object" do
pm = create_project_media
assert_kind_of CheckSearch, pm.check_search_project
end
test "should have empty mt annotation" do
ft = DynamicAnnotation::FieldType.where(field_type: 'language').last || create_field_type(field_type: 'language', label: 'Language')
at = create_annotation_type annotation_type: 'language', label: 'Language'
create_field_instance annotation_type_object: at, name: 'language', label: 'Language', field_type_object: ft, optional: false
ft = DynamicAnnotation::FieldType.where(field_type: 'json').last || create_field_type(field_type: 'json', label: 'JSON structure')
at = create_annotation_type annotation_type: 'mt', label: 'Machine translation'
create_field_instance annotation_type_object: at, name: 'mt_translations', label: 'Machine translations', field_type_object: ft, optional: false
create_bot name: 'Alegre Bot'
t = create_team
p = create_project team: t
text = 'Test'
stub_configs({ 'alegre_host' => 'http://alegre', 'alegre_token' => 'test' }) do
url = CONFIG['alegre_host'] + "/api/languages/identification?text=" + text
response = '{"type":"language","data": [["EN", 1]]}'
WebMock.stub_request(:get, url).with(:headers => {'X-Alegre-Token'=> CONFIG['alegre_token']}).to_return(body: response)
pm = create_project_media project: p, quote: text
mt = pm.annotations.where(annotation_type: 'mt').last
assert_nil mt
p.settings = {:languages => ['ar']}; p.save!
pm = create_project_media project: p, quote: text
mt = pm.annotations.where(annotation_type: 'mt').last
assert_not_nil mt
end
end
test "should update mt annotation" do
ft = DynamicAnnotation::FieldType.where(field_type: 'language').last || create_field_type(field_type: 'language', label: 'Language')
at = create_annotation_type annotation_type: 'language', label: 'Language'
create_field_instance annotation_type_object: at, name: 'language', label: 'Language', field_type_object: ft, optional: false
ft = DynamicAnnotation::FieldType.where(field_type: 'json').last || create_field_type(field_type: 'json', label: 'JSON structure')
at = create_annotation_type annotation_type: 'mt', label: 'Machine translation'
create_field_instance annotation_type_object: at, name: 'mt_translations', label: 'Machine translations', field_type_object: ft, optional: false
u = create_user
t = create_team
create_team_user team: t, user: u, role: 'owner'
u = User.find(u.id)
User.stubs(:current).returns(u)
Team.stubs(:current).returns(t)
p = create_project team: t
p.settings = {:languages => ['ar', 'en']}; p.save!
text = 'Testing'
stub_configs({ 'alegre_host' => 'http://alegre', 'alegre_token' => 'test' }) do
url = CONFIG['alegre_host'] + "/api/languages/identification?text=" + text
response = '{"type":"language","data": [["EN", 1]]}'
WebMock.stub_request(:get, url).with(:headers => {'X-Alegre-Token'=> CONFIG['alegre_token']}).to_return(body: response)
pm = create_project_media project: p, quote: text
pm2 = create_project_media project: p, quote: text
Sidekiq::Testing.inline! do
url = CONFIG['alegre_host'] + "/api/mt?from=en&to=ar&text=" + text
# Test with machine translation
response = '{"type":"mt","data": "testing -ar"}'
# Test handle raising an error
WebMock.stub_request(:get, url).with(:headers => {'X-Alegre-Token'=> 'in_valid_token'}).to_return(body: response)
pm.update_mt=1
mt_field = DynamicAnnotation::Field.joins(:annotation).where('annotations.annotation_type' => 'mt', 'annotations.annotated_type' => pm.class.name, 'annotations.annotated_id' => pm.id.to_s, field_type: 'json').first
assert_equal 0, JSON.parse(mt_field.value).size
# Test with valid response
WebMock.stub_request(:get, url).with(:headers => {'X-Alegre-Token'=> CONFIG['alegre_token']}).to_return(body: response)
pm.update_mt=1
mt_field = DynamicAnnotation::Field.joins(:annotation).where('annotations.annotation_type' => 'mt', 'annotations.annotated_type' => pm.class.name, 'annotations.annotated_id' => pm.id.to_s, field_type: 'json').first
assert_equal 1, JSON.parse(mt_field.value).size
# Test with type => error
response = '{"type":"error","data": {"message": "Language not supported"}}'
WebMock.stub_request(:get, url).with(:headers => {'X-Alegre-Token'=> CONFIG['alegre_token']}).to_return(body: response)
pm2.update_mt=1
mt_field = DynamicAnnotation::Field.joins(:annotation).where('annotations.annotation_type' => 'mt', 'annotations.annotated_type' => pm2.class.name, 'annotations.annotated_id' => pm2.id.to_s, field_type: 'json').first
assert_equal 0, JSON.parse(mt_field.value).size
end
end
User.unstub(:current)
Team.unstub(:current)
end
test "should get dynamic annotation by type" do
create_annotation_type annotation_type: 'foo'
create_annotation_type annotation_type: 'bar'
pm = create_project_media
d1 = create_dynamic_annotation annotation_type: 'foo', annotated: pm
d2 = create_dynamic_annotation annotation_type: 'bar', annotated: pm
assert_equal d1, pm.get_dynamic_annotation('foo')
assert_equal d2, pm.get_dynamic_annotation('bar')
end
test "should get report type" do
c = create_claim_media
l = create_link
m = create_project_media media: c
assert_equal 'claim', m.report_type
m = create_project_media media: l
assert_equal 'link', m.report_type
end
test "should delete project media" do
t = create_team
u = create_user
u2 = create_user
tu = create_team_user team: t, user: u, role: 'owner'
tu = create_team_user team: t, user: u2
p = create_project team: t
pm = create_project_media project: p, quote: 'Claim', user: u2
at = create_annotation_type annotation_type: 'test'
ft = create_field_type
fi = create_field_instance name: 'test', field_type_object: ft, annotation_type_object: at
a = create_dynamic_annotation annotator: u2, annotated: pm, annotation_type: 'test', set_fields: { test: 'Test' }.to_json
RequestStore.store[:disable_es_callbacks] = true
with_current_user_and_team(u, t) do
pm.disable_es_callbacks = true
pm.destroy
end
RequestStore.store[:disable_es_callbacks] = false
end
test "should have oEmbed endpoint" do
create_annotation_type_and_fields('Embed Code', { 'Copied' => ['Boolean', false] })
pender_url = CONFIG['pender_url_private'] + '/api/medias'
url = 'http://test.com'
response = '{"type":"media","data":{"url":"' + url + '/normalized","type":"item", "title": "test media", "description":"add desc"}}'
WebMock.stub_request(:get, pender_url).with({ query: { url: url } }).to_return(body: response)
m = create_media(account: create_valid_account, url: url)
pm = create_project_media media: m
assert_equal 'test media', pm.as_oembed[:title]
end
test "should have oEmbed URL" do
RequestStore[:request] = nil
t = create_team private: false
p = create_project team: t
pm = create_project_media project: p
stub_config('checkdesk_base_url', 'https://checkmedia.org') do
assert_equal "https://checkmedia.org/api/project_medias/#{pm.id}/oembed", pm.oembed_url
end
t = create_team private: true
p = create_project team: t
pm = create_project_media project: p
stub_config('checkdesk_base_url', 'https://checkmedia.org') do
assert_equal "https://checkmedia.org/api/project_medias/#{pm.id}/oembed", pm.oembed_url
end
end
test "should get author name for oEmbed" do
u = create_user name: 'Foo Bar'
pm = create_project_media user: u
assert_equal 'Foo Bar', pm.author_name
pm.user = nil
assert_equal '', pm.author_name
end
test "should get author URL for oEmbed" do
url = 'http://twitter.com/test'
pender_url = CONFIG['pender_url_private'] + '/api/medias'
response = '{"type":"media","data":{"url":"' + url + '","type":"profile"}}'
WebMock.stub_request(:get, pender_url).with({ query: { url: url } }).to_return(body: response)
u = create_user url: url, provider: 'twitter'
pm = create_project_media user: u
assert_equal url, pm.author_url
pm.user = create_user
assert_equal '', pm.author_url
pm.user = nil
assert_equal '', pm.author_url
end
test "should get author picture for oEmbed" do
u = create_user
pm = create_project_media user: u
assert_match /^http/, pm.author_picture
end
test "should get author username for oEmbed" do
u = create_user login: 'test'
pm = create_project_media user: u
assert_equal 'test', pm.author_username
pm.user = nil
assert_equal '', pm.author_username
end
test "should get author role for oEmbed" do
t = create_team
u = create_user
create_team_user user: u, team: t, role: 'journalist'
p = create_project team: t
pm = create_project_media project: p, user: u
assert_equal 'journalist', pm.author_role
pm.user = create_user
assert_equal 'none', pm.author_role
pm.user = nil
assert_equal 'none', pm.author_role
end
test "should get source URL for external link for oEmbed" do
url = 'http://twitter.com/test/123456'
pender_url = CONFIG['pender_url_private'] + '/api/medias'
response = '{"type":"media","data":{"url":"' + url + '","type":"item"}}'
WebMock.stub_request(:get, pender_url).with({ query: { url: url } }).to_return(body: response)
l = create_link url: url
pm = create_project_media media: l
assert_equal url, pm.source_url
c = create_claim_media
pm = create_project_media media: c
assert_match CONFIG['checkdesk_client'], pm.source_url
end
test "should get resolved tasks for oEmbed" do
create_annotation_type annotation_type: 'response'
pm = create_project_media
assert_equal [], pm.completed_tasks
assert_equal 0, pm.completed_tasks_count
t1 = create_task annotated: pm
t1.response = { annotation_type: 'response', set_fields: {} }.to_json
t1.save!
t2 = create_task annotated: pm
assert_equal [t1], pm.completed_tasks
assert_equal [t2], pm.open_tasks
assert_equal 1, pm.completed_tasks_count
end
test "should get comments for oEmbed" do
pm = create_project_media
assert_equal [], pm.comments
assert_equal 0, pm.comments_count
c = create_comment annotated: pm
assert_equal [c], pm.comments
assert_equal 1, pm.comments_count
end
test "should get provider for oEmbed" do
url = 'http://twitter.com/test/123456'
pender_url = CONFIG['pender_url_private'] + '/api/medias'
response = '{"type":"media","data":{"url":"' + url + '","type":"item"}}'
WebMock.stub_request(:get, pender_url).with({ query: { url: url } }).to_return(body: response)
l = create_link url: url
pm = create_project_media media: l
assert_equal 'Twitter', pm.provider
c = create_claim_media
pm = create_project_media media: c
stub_config('app_name', 'Check') do
assert_equal 'Check', pm.provider
end
end
test "should get published time for oEmbed" do
create_translation_status_stuff
url = 'http://twitter.com/test/123456'
pender_url = CONFIG['pender_url_private'] + '/api/medias'
response = '{"type":"media","data":{"url":"' + url + '","type":"item","published_at":"1989-01-25 08:30:00"}}'
WebMock.stub_request(:get, pender_url).with({ query: { url: url } }).to_return(body: response)
l = create_link url: url
pm = create_project_media media: l
assert_equal '25/01/1989', pm.published_at.strftime('%d/%m/%Y')
c = create_claim_media
pm = create_project_media media: c
assert_equal Time.now.strftime('%d/%m/%Y'), pm.published_at.strftime('%d/%m/%Y')
end
test "should get source author for oEmbed" do
u = create_user name: 'Foo'
url = 'http://twitter.com/test/123456'
pender_url = CONFIG['pender_url_private'] + '/api/medias'
response = '{"type":"media","data":{"url":"' + url + '","type":"item","author_name":"Bar"}}'
WebMock.stub_request(:get, pender_url).with({ query: { url: url } }).to_return(body: response)
l = create_link url: url
pm = create_project_media media: l, user: u
assert_equal 'Bar', pm.source_author[:author_name]
c = create_claim_media
pm = create_project_media media: c, user: u
assert_equal 'Foo', pm.source_author[:author_name]
end
test "should render oEmbed HTML" do
create_translation_status_stuff
create_verification_status_stuff(false)
Bot::Alegre.delete_all
u = create_user login: 'test', name: 'Test', profile_image: 'http://profile.picture'
c = create_claim_media quote: 'Test'
t = create_team name: 'Test Team', slug: 'test-team'
p = create_project title: 'Test Project', team: t
pm = create_project_media media: c, user: u, project: p
create_comment text: 'A comment', annotated: pm
create_comment text: 'A second comment', annotated: pm
at = create_annotation_type annotation_type: 'task_response_free_text', label: 'Task'
ft1 = create_field_type field_type: 'text_field', label: 'Text Field'
ft2 = create_field_type field_type: 'task_reference', label: 'Task Reference'
fi1 = create_field_instance annotation_type_object: at, name: 'response_task', label: 'Response', field_type_object: ft1
fi2 = create_field_instance annotation_type_object: at, name: 'note_task', label: 'Note', field_type_object: ft1
fi3 = create_field_instance annotation_type_object: at, name: 'task_reference', label: 'Task', field_type_object: ft2
t = create_task annotated: pm
t.response = { annotation_type: 'task_response_free_text', set_fields: { response_task: 'Task response', task_reference: t.id.to_s }.to_json }.to_json
t.save!
ProjectMedia.any_instance.stubs(:created_at).returns(Time.parse('2016-06-05'))
ProjectMedia.any_instance.stubs(:updated_at).returns(Time.parse('2016-06-05'))
expected = File.read(File.join(Rails.root, 'test', 'data', "oembed-#{pm.default_media_status_type}.html")).gsub(/project\/[0-9]+\/media\/[0-9]+/, 'url').gsub(/.*<body/m, '<body').gsub('http://localhost:3333', CONFIG['checkdesk_client']).gsub('http://localhost:3000', CONFIG['checkdesk_base_url'])
actual = ProjectMedia.find(pm.id).html.gsub(/project\/[0-9]+\/media\/[0-9]+/, 'url').gsub(/.*<body/m, '<body')
assert_equal expected, actual
ProjectMedia.any_instance.unstub(:created_at)
ProjectMedia.any_instance.unstub(:updated_at)
end
test "should have metadata for oEmbed" do
pm = create_project_media
assert_kind_of String, pm.metadata
end
test "should clear caches when media is updated" do
create_annotation_type_and_fields('Embed Code', { 'Copied' => ['Boolean', false] })
pm = create_project_media
create_dynamic_annotation annotation_type: 'embed_code', annotated: pm
u = create_user
ProjectMedia.any_instance.unstub(:clear_caches)
CcDeville.expects(:clear_cache_for_url).returns(nil).times(52)
PenderClient::Request.expects(:get_medias).returns(nil).times(16)
Sidekiq::Testing.inline! do
create_comment annotated: pm, user: u
create_task annotated: pm, user: u
end
CcDeville.unstub(:clear_cache_for_url)
PenderClient::Request.unstub(:get_medias)
end
test "should respond to auto-tasks on creation" do
at = create_annotation_type annotation_type: 'task_response_free_text', label: 'Task'
ft1 = create_field_type field_type: 'text_field', label: 'Text Field'
ft2 = create_field_type field_type: 'task_reference', label: 'Task Reference'
fi1 = create_field_instance annotation_type_object: at, name: 'response_free_text', label: 'Response', field_type_object: ft1
fi2 = create_field_instance annotation_type_object: at, name: 'note_free_text', label: 'Note', field_type_object: ft1
fi3 = create_field_instance annotation_type_object: at, name: 'task_free_text', label: 'Task', field_type_object: ft2
t = create_team
p = create_project team: t
t.checklist = [ { 'label' => 'When?', 'type' => 'free_text', 'description' => '', 'projects' => [] } ]
t.save!
pm = create_project_media(project: p, set_tasks_responses: { 'when' => 'Yesterday' })
t = Task.where(annotation_type: 'task').last
assert_equal 'Yesterday', t.first_response
end
test "should auto-response for Krzana report" do
at = create_annotation_type annotation_type: 'task_response_geolocation', label: 'Task Response Geolocation'
geotype = create_field_type field_type: 'geojson', label: 'GeoJSON'
create_field_instance annotation_type_object: at, name: 'response_geolocation', field_type_object: geotype
at = create_annotation_type annotation_type: 'task_response_datetime', label: 'Task Response Date Time'
datetime = create_field_type field_type: 'datetime', label: 'Date Time'
create_field_instance annotation_type_object: at, name: 'response_datetime', field_type_object: datetime
at = create_annotation_type annotation_type: 'task_response_free_text', label: 'Task'
ft1 = create_field_type field_type: 'text_field', label: 'Text Field'
ft2 = create_field_type field_type: 'task_reference', label: 'Task Reference'
fi1 = create_field_instance annotation_type_object: at, name: 'response_free_text', label: 'Response', field_type_object: ft1
fi2 = create_field_instance annotation_type_object: at, name: 'note_free_text', label: 'Note', field_type_object: ft1
fi3 = create_field_instance annotation_type_object: at, name: 'task_free_text', label: 'Task', field_type_object: ft2
t = create_team
p = create_project team: t
p2 = create_project team: t
p3 = create_project team: t
t.checklist = [ { "label" => "who?", "type" => "free_text", "description" => "",
"mapping" => { "type" => "free_text", "match" => "$.mentions[?(@['@type'] == 'Person')].name", "prefix" => "Suggested by Krzana: "},
"projects" => [p.id] },
{ "label" => "where?", "type" => "geolocation", "description" => "",
"mapping" => { "type" => "geolocation", "match" => "$.mentions[?(@['@type'] == 'Place')]", "prefix" => ""},
"projects" => [p2.id] },
{ "label" => "when?", "type" => "datetime", "description" => "",
"mapping" => { "type" => "datetime", "match" => "dateCreated", "prefix" => ""},
"projects" => [p3.id] }
]
t.save!
pender_url = CONFIG['pender_url_private'] + '/api/medias'
# test empty json+ld
url = 'http://test1.com'
raw = {"json+ld": {}}
response = {'type':'media','data': {'url': url, 'type': 'item', 'raw': raw}}.to_json
WebMock.stub_request(:get, pender_url).with({ query: { url: url } }).to_return(body: response)
pm = create_project_media project: p, url: url
t = Task.where(annotation_type: 'task', annotated_id: pm.id).last
assert_nil t.first_response
# test with non exist value
url1 = 'http://test11.com'
raw = { "json+ld": { "mentions": [ { "@type": "Person" } ] } }
response = {'type':'media','data': {'url': url1, 'type': 'item', 'raw': raw}}.to_json
WebMock.stub_request(:get, pender_url).with({ query: { url: url1 } }).to_return(body: response)
pm1 = create_project_media project: p, url: url1
t = Task.where(annotation_type: 'task', annotated_id: pm1.id).last
assert_nil t.first_response
# test with empty value
url12 = 'http://test12.com'
raw = { "json+ld": { "mentions": [ { "@type": "Person", "name": "" } ] } }
response = {'type':'media','data': {'url': url12, 'type': 'item', 'raw': raw}}.to_json
WebMock.stub_request(:get, pender_url).with({ query: { url: url12 } }).to_return(body: response)
pm12 = create_project_media project: p, url: url12
t = Task.where(annotation_type: 'task', annotated_id: pm12.id).last
assert_nil t.first_response
# test with single selection
url2 = 'http://test2.com'
raw = { "json+ld": { "mentions": [ { "@type": "Person", "name": "first_name" } ] } }
response = {'type':'media','data': {'url': url2, 'type': 'item', 'raw': raw}}.to_json
WebMock.stub_request(:get, pender_url).with({ query: { url: url2 } }).to_return(body: response)
pm2 = create_project_media project: p, url: url2
t = Task.where(annotation_type: 'task', annotated_id: pm2.id).last
assert_equal "Suggested by Krzana: first_name", t.first_response
# test multiple selection (should get first one)
url3 = 'http://test3.com'
raw = { "json+ld": { "mentions": [ { "@type": "Person", "name": "first_name" }, { "@type": "Person", "name": "last_name" } ] } }
response = {'type':'media','data': {'url': url3, 'type': 'item', 'raw': raw}}.to_json
WebMock.stub_request(:get, pender_url).with({ query: { url: url3 } }).to_return(body: response)
pm3 = create_project_media project: p, url: url3
t = Task.where(annotation_type: 'task', annotated_id: pm3.id).last
assert_equal "Suggested by Krzana: first_name", t.first_response
# test geolocation mapping
url4 = 'http://test4.com'
raw = { "json+ld": {
"mentions": [ { "name": "Delimara Powerplant", "@type": "Place", "geo": { "latitude": 35.83020073454, "longitude": 14.55602645874 } } ]
} }
response = {'type':'media','data': {'url': url4, 'type': 'item', 'raw': raw}}.to_json
WebMock.stub_request(:get, pender_url).with({ query: { url: url4 } }).to_return(body: response)
pm4 = create_project_media project: p2, url: url4
t = Task.where(annotation_type: 'task', annotated_id: pm4.id).last
# assert_not_nil t.first_response
# test datetime mapping
url5 = 'http://test5.com'
raw = { "json+ld": { "dateCreated": "2017-08-30T14:22:28+00:00" } }
response = {'type':'media','data': {'url': url5, 'type': 'item', 'raw': raw}}.to_json
WebMock.stub_request(:get, pender_url).with({ query: { url: url5 } }).to_return(body: response)
pm5 = create_project_media project: p3, url: url5
t = Task.where(annotation_type: 'task', annotated_id: pm5.id).last
# assert_not_nil t.first_response
end
test "should expose conflict error from Pender" do
url = 'http://test.com'
pender_url = CONFIG['pender_url_private'] + '/api/medias'
response = '{"type":"error","data":{"message":"Conflict","code":9}}'
WebMock.stub_request(:get, pender_url).with({ query: { url: url } }).to_return(body: response, status: 409)
p = create_project
pm = ProjectMedia.new
pm.project = p
pm.url = url
assert !pm.valid?
assert pm.errors.messages.values.flatten.include?('This link is already being parsed, please try again in a few seconds.')
end
test "should create project source" do
t = create_team
p = create_project team: t
u = create_user
create_team_user team: t, user: u, role: 'owner'
pender_url = CONFIG['pender_url_private'] + '/api/medias'
media_url = 'http://www.facebook.com/meedan/posts/123456'
media2_url = 'http://www.facebook.com/meedan/posts/456789'
author_url = 'http://facebook.com/123456'
data = { url: media_url, author_url: author_url, type: 'item' }
response = '{"type":"media","data":' + data.to_json + '}'
WebMock.stub_request(:get, pender_url).with({ query: { url: media_url } }).to_return(body: response)
data = { url: media2_url, author_url: author_url, type: 'item' }
response = '{"type":"media","data":' + data.to_json + '}'
WebMock.stub_request(:get, pender_url).with({ query: { url: media2_url } }).to_return(body: response)
data = { url: author_url, provider: 'facebook', picture: 'http://fb/p.png', author_name: 'UNIVERSITÄT', username: 'username', title: 'Foo', description: 'Bar', type: 'profile' }
response = '{"type":"media","data":' + data.to_json + '}'
WebMock.stub_request(:get, pender_url).with({ query: { url: author_url } }).to_return(body: response)
with_current_user_and_team(u, t) do
assert_difference 'ProjectSource.count' do
create_project_media project: p, url: media_url
end
# should not duplicate ProjectSource for same account
assert_no_difference 'ProjectSource.count' do
create_project_media project: p, url: media2_url
end
assert_no_difference 'ProjectSource.count' do
create_project_media project: p, quote: 'Claim', quote_attributions: {name: 'UNIVERSITÄT'}.to_json
end
end
# test move media to project with same source
p2 = create_project team: t
p3 = create_project team: t
with_current_user_and_team(u, t) do
pm = create_project_media project: p2, url: media_url
pm2 = create_project_media project: p3, url: media2_url
assert_nothing_raised do
pm.project = p3
pm.save!
end
end
end
test "should set quote attributions" do
t = create_team
p = create_project team: t
u = create_user
create_team_user team: t, user: u, role: 'owner'
with_current_user_and_team(u, t) do
assert_difference 'ClaimSource.count', 2 do
pm = create_project_media project: p, quote: 'Claim', quote_attributions: {name: 'source name'}.to_json
s = pm.project_source.source
assert_not_nil pm.project_source
assert_equal s.name, 'source name'
pm2 = create_project_media project: p, quote: 'Claim 2', quote_attributions: {name: 'source name'}.to_json
assert_equal pm2.project_source.source, s
end
end
end
test "should not get project source" do
p = create_project
l = create_link
a = l.account
a.destroy
l = Link.find(l.id)
pm = create_project_media project: p, media: l
assert_nil pm.send(:get_project_source, p.id)
end
test "should not create project media under archived project" do
p = create_project
p.archived = true
p.save!
assert_raises ActiveRecord::RecordInvalid do
create_project_media project: p
end
end
test "should archive" do
pm = create_project_media
assert !pm.archived
pm.archived = true
pm.save!
assert pm.reload.archived
end
test "should create annotation when is embedded for the first time" do
create_annotation_type_and_fields('Embed Code', { 'Copied' => ['Boolean', false] })
pm = create_project_media
assert_difference 'PaperTrail::Version.count', 2 do
pm.as_oembed
end
assert_no_difference 'PaperTrail::Version.count' do
pm.as_oembed
end
end
test "should not create media through browser extension if team is not allowed to" do
t = create_team
t.set_limits_browser_extension = false
t.save!
p = create_project team: t
assert_raises ActiveRecord::RecordInvalid do
RequestStore.stubs(:[]).with(:request).returns(OpenStruct.new({ headers: { 'X-Check-Client' => 'browser-extension' } }))
create_project_media project: p
RequestStore.unstub(:[])
end
end
test "should not crash if mapping value is invalid" do
assert_nothing_raised do
pm = ProjectMedia.new
assert_nil pm.send(:mapping_value, 'foo', 'bar')
end
end
test "should not crash if another user tries to update media" do
u1 = create_user
u2 = create_user
t = create_team
p = create_project team: t
create_team_user team: t, user: u1, role: 'owner'
create_team_user team: t, user: u2, role: 'owner'
pm = nil
with_current_user_and_team(u1, t) do
pm = create_project_media project: p, user: u1
pm = ProjectMedia.find(pm.id)
info = { title: 'Title' }.to_json
pm.embed = info
pm.save!
end
with_current_user_and_team(u2, t) do
pm = ProjectMedia.find(pm.id)
info = { title: 'Title' }.to_json
pm.embed = info
pm.save!
end
assert_nothing_raised do
embed = pm.get_annotations('embed').last.load
embed.title_is_overridden?
end
end
test "should return custom status HTML and color for embed" do
create_translation_status_stuff
create_verification_status_stuff(false)
t = create_team
value = {
label: 'Status',
default: 'stop',
active: 'done',
statuses: [
{ id: 'stop', label: 'Stopped', completed: '', description: 'Not started yet', style: { backgroundColor: '#a00' } },
{ id: 'done', label: 'Done!', completed: '', description: 'Nothing left to be done here', style: { backgroundColor: '#fc3' } }
]
}
pm = create_project_media
t.send "set_media_#{pm.default_media_status_type.pluralize}", value
t.save!
p = create_project team: t
pm = create_project_media project: p
assert_equal 'stop', pm.last_status
assert_equal '<span id="oembed__status">Stopped</span>', pm.last_status_html
assert_equal '#a00', pm.last_status_color
s = pm.last_status_obj
s.status = 'done'
s.save!
assert_equal '<span id="oembed__status">Done!</span>', pm.last_status_html
assert_equal '#fc3', pm.last_status_color
end
test "should return core status HTML and color for embed" do
create_translation_status_stuff
create_verification_status_stuff(false)
t = create_team
p = create_project team: t
pm = create_project_media project: p
assert_equal "<span id=\"oembed__status\" class=\"l\">status_#{pm.last_status}</span>", pm.last_status_html
assert_equal '#518FFF', pm.last_status_color.upcase
s = pm.last_status_obj
s.status = 'in_progress'
s.save!
assert_equal '<span id="oembed__status" class="l">status_in_progress</span>', pm.last_status_html
assert_equal '#FFBB5D', pm.last_status_color.upcase
end
test "should get description" do
c = create_claim_media quote: 'Test'
pm = create_project_media media: c
assert_equal 'Test', pm.reload.description
info = { description: 'Test 2' }.to_json
pm.embed = info
pm.save!
assert_equal 'Test 2', pm.reload.description
end
test "should create pender_archive annotation when link is created" do
create_annotation_type_and_fields('Pender Archive', { 'Response' => ['JSON', false] })
l = create_link
t = create_team
t.archive_pender_archive_enabled = 1
t.set_limits_keep_screenshot = true
t.save!
p = create_project team: t
assert_difference 'Dynamic.where(annotation_type: "pender_archive").count' do
create_project_media media: l, project: p
end
end
test "should not create pender_archive annotation when media is created if media is not a link" do
create_annotation_type_and_fields('Pender Archive', { 'Response' => ['JSON', false] })
c = create_claim_media
t = create_team
t.archive_pender_archive_enabled = 1
t.set_limits_keep_screenshot = true
t.save!
p = create_project team: t
assert_no_difference 'Dynamic.where(annotation_type: "pender_archive").count' do
create_project_media media: c, project: p
end
end
test "should not create pender_archive annotation when link is created if there is no annotation type" do
l = create_link
t = create_team
t.archive_pender_archive_enabled = 1
t.set_limits_keep_screenshot = true
t.save!
p = create_project team: t
assert_no_difference 'Dynamic.where(annotation_type: "pender_archive").count' do
create_project_media media: l, project: p
end
end
test "should not create pender_archive annotation when link is created if team is not allowed" do
create_annotation_type_and_fields('Pender Archive', { 'Response' => ['JSON', false] })
l = create_link
t = create_team
t.archive_pender_archive_enabled = 1
t.set_limits_keep_screenshot = false
t.save!
p = create_project team: t
assert_no_difference 'Dynamic.where(annotation_type: "pender_archive").count' do
create_project_media media: l, project: p
end
end
test "should not create pender_archive annotation when link is created if archiver is not enabled" do
create_annotation_type_and_fields('Pender Archive', { 'Response' => ['JSON', false] })
l = create_link
t = create_team
t.archive_pender_archive_enabled = 0
t.set_limits_keep_screenshot = true
t.save!
p = create_project team: t
assert_no_difference 'Dynamic.where(annotation_type: "pender_archive").count' do
create_project_media media: l, project: p
end
end
test "should create pender_archive annotation when link is created using information from pender_embed" do
create_annotation_type_and_fields('Pender Archive', { 'Response' => ['JSON', false] })
l = create_link
t = create_team
t.archive_pender_archive_enabled = 1
t.set_limits_keep_screenshot = true
t.save!
p = create_project team: t
Link.any_instance.stubs(:pender_embed).returns(OpenStruct.new({ data: { embed: { screenshot_taken: 1, 'archives' => {} }.to_json } }))
assert_difference 'Dynamic.where(annotation_type: "pender_archive").count' do
create_project_media media: l, project: p
end
Link.any_instance.unstub(:pender_embed)
end
test "should create pender_archive annotation when link is created using information from pender_data" do
create_annotation_type_and_fields('Pender Archive', { 'Response' => ['JSON', false] })
l = create_link
t = create_team
t.archive_pender_archive_enabled = 1
t.set_limits_keep_screenshot = true
t.save!
p = create_project team: t
Link.any_instance.stubs(:pender_data).returns({ screenshot_taken: 1, 'archives' => {} })
Link.any_instance.stubs(:pender_embed).raises(RuntimeError)
assert_difference 'Dynamic.where(annotation_type: "pender_archive").count' do
create_project_media media: l, project: p
end
Link.any_instance.unstub(:pender_data)
Link.any_instance.unstub(:pender_embed)
end
test "should get number of contributing users" do
create_verification_status_stuff
pm = create_project_media
create_comment annotated: pm, annotator: create_user
create_comment annotated: pm, annotator: create_user
create_tag annotated: pm, annotator: create_user
create_task annotated: pm, annotator: create_user
assert_equal 5, pm.contributing_users_count
end
test "should get time to first and last status" do
create_translation_status_stuff
create_verification_status_stuff(false)
u = create_user
t = create_team
create_team_user user: u, team: t, role: 'owner'
p = create_project team: t
with_current_user_and_team(u, t) do
time = Time.now - 10.minutes
Time.stubs(:now).returns(time)
pm = create_project_media project: p, user: u
assert_equal '', pm.time_to_status(:first)
assert_equal '', pm.time_to_status(:last)
Time.stubs(:now).returns(time + 5.minutes)
s = pm.last_status_obj
s.status = 'In Progress'; s.save!
assert_equal '', pm.time_to_status(:first)
assert_equal 5.minutes.to_i, pm.time_to_status(:last)
Time.stubs(:now).returns(time + 8.minutes)
s = pm.last_status_obj
s.status = ::Workflow::Workflow.core_options(pm, pm.default_media_status_type)[:default]
s.save!
assert_equal 5.minutes.to_i, pm.time_to_status(:first)
assert_equal 8.minutes.to_i, pm.time_to_status(:last)
Time.unstub(:now)
end
end
test "should reject a status of verified if all required tasks are not resolved" do
create_verification_status_stuff
create_annotation_type annotation_type: 'response'
pm = create_project_media
t1 = create_task annotated: pm
t2 = create_task annotated: pm, required: true
t1.response = { annotation_type: 'response', set_fields: {} }.to_json
t1.save!
s = pm.annotations.where(annotation_type: 'verification_status').last.load
assert_raise ActiveRecord::RecordInvalid do
s.status = 'verified'; s.save!
end
assert_raise ActiveRecord::RecordInvalid do
s.status = 'false'; s.save!
end
t2.response = { annotation_type: 'response', set_fields: {} }.to_json
t2.save!
s.status = 'verified'; s.save!
assert_equal s.reload.status, 'verified'
end
test "should back status to active if required task added to resolved item" do
create_translation_status_stuff
create_verification_status_stuff(false)
p = create_project
pm = create_project_media project: p
s = pm.annotations.where(annotation_type: 'verification_status').last.load
s.status = 'verified'; s.save!
pm = ProjectMedia.find(pm.id)
assert_equal 'verified', pm.last_verification_status
pm = ProjectMedia.find(pm.id)
create_task annotated: pm
assert_equal 'verified', pm.last_verification_status
pm = ProjectMedia.find(pm.id)
create_task annotated: pm, required: true
assert_equal 'in_progress', pm.last_verification_status
end
test "should move pending item to in progress status" do
create_translation_status_stuff
create_verification_status_stuff(false)
stub_config('app_name', 'Check') do
create_annotation_type annotation_type: 'response'
p = create_project
pm = create_project_media project: p
default = 'undetermined'
active = 'in_progress'
s = pm.annotations.where(annotation_type: 'verification_status').last.load
t = create_task annotated: pm
assert_not_equal pm.last_status, active
# add comment
create_comment annotated: pm, disable_update_status: false
assert_equal pm.last_verification_status, active
s.status = default; s.save!
# add tag
create_tag annotated: pm, disable_update_status: false
assert_equal pm.last_verification_status, active
s.status = default; s.save!
# add response
t.response = { annotation_type: 'response', set_fields: {} }.to_json
t.save!
assert_equal pm.last_verification_status, active
# change status to verified and tests autmatic update
s.status = 'verified'; s.save!
create_comment annotated: pm, disable_update_status: false
assert_equal pm.last_verification_status, 'verified'
end
end
test "should update media account when change author_url" do
u = create_user
t = create_team
create_team_user user: u, team: t
pender_url = CONFIG['pender_url_private'] + '/api/medias'
url = 'http://www.facebook.com/meedan/posts/123456'
author_url = 'http://facebook.com/123456'
author_normal_url = 'http://www.facebook.com/meedan'
author2_url = 'http://facebook.com/789123'
author2_normal_url = 'http://www.facebook.com/meedan2'
data = { url: url, author_url: author_url, type: 'item' }
response = '{"type":"media","data":' + data.to_json + '}'
WebMock.stub_request(:get, pender_url).with({ query: { url: url } }).to_return(body: response)
data = { url: url, author_url: author2_url, type: 'item' }
response = '{"type":"media","data":' + data.to_json + '}'
WebMock.stub_request(:get, pender_url).with({ query: { url: url, refresh: '1' } }).to_return(body: response)
data = { url: author_normal_url, provider: 'facebook', picture: 'http://fb/p.png', title: 'Foo', description: 'Bar', type: 'profile' }
response = '{"type":"media","data":' + data.to_json + '}'
WebMock.stub_request(:get, pender_url).with({ query: { url: author_url } }).to_return(body: response)
data = { url: author2_normal_url, provider: 'facebook', picture: 'http://fb/p.png', title: 'NewFoo', description: 'NewBar', type: 'profile' }
response = '{"type":"media","data":' + data.to_json + '}'
WebMock.stub_request(:get, pender_url).with({ query: { url: author2_url } }).to_return(body: response)
m = create_media url: url, account: nil, account_id: nil
a = m.account
p = create_project team: t
pm = create_project_media media: m, project: p
sleep 1
pm = ProjectMedia.find(pm.id)
with_current_user_and_team(u, t) do
pm.refresh_media = true
end
assert_not_equal a, m.reload.account
assert_nil Account.where(id: a.id).last
end
test "should create media when normalized URL exists" do
ft = create_field_type field_type: 'image_path', label: 'Image Path'
at = create_annotation_type annotation_type: 'reverse_image', label: 'Reverse Image'
create_field_instance annotation_type_object: at, name: 'reverse_image_path', label: 'Reverse Image', field_type_object: ft, optional: false
create_bot name: 'Check Bot'
url = 'https://www.facebook.com/Ma3komMona/videos/695409680623722'
pender_url = CONFIG['pender_url_private'] + '/api/medias'
response = '{"type":"media","data":{"url":"' + url + '","type":"item"}}'
WebMock.stub_request(:get, pender_url).with({ query: { url: url } }).to_return(body: response)
l = create_link url: url
pm = create_project_media media: l
url = 'https://www.facebook.com/Ma3komMona/videos/vb.268809099950451/695409680623722/?type=3&theater'
pender_url = CONFIG['pender_url_private'] + '/api/medias'
response = '{"type":"media","data":{"url":"https://www.facebook.com/Ma3komMona/videos/695409680623722","type":"item"}}'
WebMock.stub_request(:get, pender_url).with({ query: { url: url } }).to_return(body: response)
assert_difference 'ProjectMedia.count' do
pm = ProjectMedia.new
pm.url = url
pm.project = create_project
pm.save!
end
end
test "should get previous project search object" do
p1 = create_project
p2 = create_project
pm = create_project_media project: p1
pm.previous_project_id = p1.id
pm.project_id = p2.id
pm.save!
assert_kind_of CheckSearch, pm.check_search_project_was
end
test "should move media to active status" do
create_verification_status_stuff
stub_config('app_name', 'Check') do
pm = create_project_media
assert_equal 'undetermined', pm.last_verification_status
create_task annotated: pm, disable_update_status: false
assert_equal 'in_progress', pm.reload.last_verification_status
end
end
test "should not complete media if there are pending tasks" do
create_verification_status_stuff
pm = create_project_media
s = pm.last_verification_status_obj
create_task annotated: pm, required: true
assert_equal 'undetermined', s.reload.get_field('verification_status_status').status
assert_raises ActiveRecord::RecordInvalid do
s.status = 'verified'
s.save!
end
end
test "should get account from author URL" do
s = create_source
pm = create_project_media
assert_nothing_raised do
pm.send :account_from_author_url, @url, s
end
end
test "should not move media to active status if status is locked" do
create_verification_status_stuff
stub_config('app_name', 'Check') do
pm = create_project_media
assert_equal 'undetermined', pm.last_verification_status
s = pm.last_verification_status_obj
s.locked = true
s.save!
create_task annotated: pm, disable_update_status: false
assert_equal 'undetermined', pm.reload.last_verification_status
end
end
test "should not return to active status if required task added to resolved item but status is locked" do
create_translation_status_stuff
create_verification_status_stuff(false)
p = create_project
pm = create_project_media project: p
s = pm.annotations.where(annotation_type: 'verification_status').last.load
s.status = 'verified'
s.locked = true
s.save!
pm = ProjectMedia.find(pm.id)
assert_equal 'verified', pm.last_verification_status
pm = ProjectMedia.find(pm.id)
create_task annotated: pm
assert_equal 'verified', pm.last_verification_status
pm = ProjectMedia.find(pm.id)
create_task annotated: pm, required: true
assert_equal 'verified', pm.last_verification_status
end
test "should expose target languages" do
pm = create_project_media
assert_nothing_raised do
JSON.parse(pm.target_languages)
end
end
test "should have status permission" do
u = create_user
t = create_team
p = create_project team: t
pm = create_project_media project: p
with_current_user_and_team(u, t) do
permissions = JSON.parse(pm.permissions)
assert permissions.has_key?('update Status')
end
end
test "should not crash if media does not have status" do
pm = create_project_media
Annotation.delete_all
assert_nothing_raised do
assert_nil pm.last_verification_status_obj
assert_nil pm.last_translation_status_obj
end
end
test "should return whether in final state or not" do
create_verification_status_stuff
pm = create_project_media
assert_equal false, pm.is_finished?
s = pm.annotations.where(annotation_type: 'verification_status').last.load
s.status = 'verified'
s.save!
assert_equal true, pm.is_finished?
end
end
#6939 Add create_translation_status_stuff
require_relative '../test_helper'
class ProjectMediaTest < ActiveSupport::TestCase
def setup
require 'sidekiq/testing'
Sidekiq::Testing.fake!
super
end
test "should create project media" do
assert_difference 'ProjectMedia.count' do
create_project_media
end
u = create_user
t = create_team
tu = create_team_user team: t, user: u, role: 'owner'
p = create_project team: t
m = create_valid_media
User.stubs(:current).returns(u)
Team.stubs(:current).returns(t)
assert_difference 'ProjectMedia.count' do
create_project_media project: p, media: m
end
# journalist should assign any media
m2 = create_valid_media
Rails.cache.clear
tu.update_column(:role, 'journalist')
pm = nil
assert_difference 'ProjectMedia.count' do
pm = create_project_media project: p, media: m2
end
m3 = create_valid_media
m3.user_id = u.id; m3.save!
assert_difference 'ProjectMedia.count' do
pm = create_project_media project: p, media: m3
pm.project = create_project team: t
pm.save!
end
User.unstub(:current)
Team.unstub(:current)
end
test "should have a project and media" do
assert_no_difference 'ProjectMedia.count' do
assert_raise ActiveRecord::RecordInvalid do
create_project_media project: nil
end
assert_raise ActiveRecord::RecordInvalid do
create_project_media media: nil
end
end
end
test "should create media if url or quote set" do
url = 'http://test.com'
pender_url = CONFIG['pender_url_private'] + '/api/medias'
response = '{"type":"media","data":{"url":"' + url + '","type":"item"}}'
WebMock.stub_request(:get, pender_url).with({ query: { url: url } }).to_return(body: response)
assert_difference 'ProjectMedia.count', 2 do
create_project_media media: nil, quote: 'Claim report'
create_project_media media: nil, url: url
end
end
test "should find media by normalized url" do
url = 'http://test.com'
pender_url = CONFIG['pender_url_private'] + '/api/medias'
response = '{"type":"media","data":{"url":"' + url + '/normalized","type":"item"}}'
WebMock.stub_request(:get, pender_url).with({ query: { url: url } }).to_return(body: response)
m = create_media url: url
url2 = 'http://test2.com'
response = '{"type":"media","data":{"url":"' + url + '/normalized","type":"item"}}'
WebMock.stub_request(:get, pender_url).with({ query: { url: url2 } }).to_return(body: response)
pm = create_project_media url: url2
assert_equal pm.media, m
end
test "should create with exisitng media if url exists" do
m = create_valid_media
pm = create_project_media media: nil, url: m.url
assert_equal m, pm.media
end
test "should contributor add a new media" do
t = create_team
u = create_user
p = create_project team: t
tu = create_team_user team: t, user: u, role: 'contributor'
with_current_user_and_team(u, t) do
assert_difference 'ProjectMedia.count' do
create_project_media project: p, quote: 'Claim report'
end
end
end
test "should update and destroy project media" do
u = create_user
t = create_team
p = create_project team: t
p2 = create_project team: t
m = create_valid_media user_id: u.id
create_team_user team: t, user: u
pm = create_project_media project: p, media: m, user: u
with_current_user_and_team(u, t) do
pm.project_id = p2.id; pm.save!
pm.reload
assert_equal pm.project_id, p2.id
end
u2 = create_user
tu = create_team_user team: t, user: u2, role: 'editor'
with_current_user_and_team(u2, t) do
pm.save!
end
assert_raise RuntimeError do
with_current_user_and_team(u2, t) do
pm.destroy!
end
end
pm_own = nil
with_current_user_and_team(u2, t) do
own_media = create_valid_media user: u2
pm_own = create_project_media project: p, media: own_media, user: u2
pm_own.project_id = p2.id; pm_own.save!
pm_own.reload
assert_equal pm_own.project_id, p2.id
end
assert_nothing_raised RuntimeError do
with_current_user_and_team(u2, t) do
pm_own.disable_es_callbacks = true
pm_own.destroy!
end
end
assert_raise RuntimeError do
with_current_user_and_team(u, t) do
pm_own.disable_es_callbacks = true
pm.destroy!
end
end
end
test "non members should not read project media in private team" do
u = create_user
t = create_team
p = create_project team: t
m = create_media project: p
pm = create_project_media project: p, media: m
pu = create_user
pt = create_team private: true
create_team_user team: pt, user: pu
pu2 = create_user
create_team_user team: pt, user: pu2, status: 'requested'
pp = create_project team: pt
m = create_media project: pp
ppm = create_project_media project: pp, media: m
ProjectMedia.find_if_can(pm.id)
assert_raise CheckPermissions::AccessDenied do
with_current_user_and_team(u, pt) do
ProjectMedia.find_if_can(ppm.id)
end
end
with_current_user_and_team(pu, pt) do
ProjectMedia.find_if_can(ppm.id)
end
assert_raise CheckPermissions::AccessDenied do
with_current_user_and_team(pu2, pt) do
ProjectMedia.find_if_can(ppm.id)
end
end
end
test "should notify Slack when project media is created" do
t = create_team slug: 'test'
u = create_user
tu = create_team_user team: t, user: u, role: 'owner'
p = create_project team: t
t.set_slack_notifications_enabled = 1; t.set_slack_webhook = 'https://hooks.slack.com/services/123'; t.set_slack_channel = '#test'; t.save!
with_current_user_and_team(u, t) do
m = create_valid_media
pm = create_project_media project: p, media: m
assert pm.sent_to_slack
msg = pm.slack_notification_message
# verify base URL
assert_match "#{CONFIG['checkdesk_client']}/#{t.slug}", msg
# verify notification URL
match = msg.match(/\/project\/([0-9]+)\/media\/([0-9]+)/)
assert_equal p.id, match[1].to_i
assert_equal pm.id, match[2].to_i
# claim media
m = create_claim_media
pm = create_project_media project: p, media: m
assert pm.sent_to_slack
end
end
test "should notify Slack when project media is created with empty user" do
t = create_team slug: 'test'
u = create_user
tu = create_team_user team: t, user: u, role: 'owner'
p = create_project team: t
t.set_slack_notifications_enabled = 1; t.set_slack_webhook = 'https://hooks.slack.com/services/123'; t.set_slack_channel = '#test'; t.save!
with_current_user_and_team(nil, t) do
m = create_valid_media
pm = create_project_media project: p, media: m, user: nil
assert pm.sent_to_slack
msg = pm.slack_notification_message
# verify base URL
assert_match "#{CONFIG['checkdesk_client']}/#{t.slug}", msg
# verify notification URL
match = msg.match(/\/project\/([0-9]+)\/media\/([0-9]+)/)
assert_equal p.id, match[1].to_i
assert_equal pm.id, match[2].to_i
# claim media
m = create_claim_media
pm = create_project_media project: p, media: m, user: nil
assert pm.sent_to_slack
msg = pm.slack_notification_message
assert_match "A new Claim has been added", msg
end
end
test "should verify attribution of Slack notifications" do
t = create_team slug: 'test'
u = create_user
tu = create_team_user team: t, user: u, role: 'owner'
p = create_project team: t
uu = create_user
m = create_valid_media user: uu
t.set_slack_notifications_enabled = 1; t.set_slack_webhook = 'https://hooks.slack.com/services/123'; t.set_slack_channel = '#test'; t.save!
with_current_user_and_team(u, t) do
pm = create_project_media project: p, media: m, origin: 'http://localhost:3333'
assert pm.sent_to_slack
msg = pm.slack_notification_message
assert_match "*#{u.name}* added a new", msg
end
end
test "should notify Pusher when project media is created" do
pm = create_project_media
assert pm.sent_to_pusher
# claim media
t = create_team
p = create_project team: t
m = create_claim_media project_id: p.id
pm = create_project_media project: p, media: m
assert pm.sent_to_pusher
end
test "should notify Pusher when project media is destroyed" do
pm = create_project_media
pm.sent_to_pusher = false
pm.destroy!
assert pm.sent_to_pusher
end
test "should notify Pusher in background" do
Rails.stubs(:env).returns(:production)
t = create_team
p = create_project team: t
CheckNotifications::Pusher::Worker.drain
assert_equal 0, CheckNotifications::Pusher::Worker.jobs.size
create_project_media project: p
assert_equal 4, CheckNotifications::Pusher::Worker.jobs.size
CheckNotifications::Pusher::Worker.drain
assert_equal 0, CheckNotifications::Pusher::Worker.jobs.size
Rails.unstub(:env)
end
test "should update project media embed data" do
pender_url = CONFIG['pender_url_private'] + '/api/medias'
url = 'http://test.com'
response = '{"type":"media","data":{"url":"' + url + '/normalized","type":"item", "title": "test media", "description":"add desc"}}'
WebMock.stub_request(:get, pender_url).with({ query: { url: url } }).to_return(body: response)
m = create_media(account: create_valid_account, url: url)
p1 = create_project
p2 = create_project
pm1 = create_project_media project: p1, media: m
pm2 = create_project_media project: p2, media: m
# fetch data (without overridden)
data = pm1.embed
assert_equal 'test media', data['title']
assert_equal 'add desc', data['description']
# Update media title and description for pm1
info = {title: 'Title A', description: 'Desc A'}.to_json
pm1.embed= info
info = {title: 'Title AA', description: 'Desc AA'}.to_json
pm1.embed= info
# Update media title and description for pm2
info = {title: 'Title B', description: 'Desc B'}.to_json
pm2.embed= info
info = {title: 'Title BB', description: 'Desc BB'}.to_json
pm2.embed= info
# fetch data for pm1
data = pm1.embed
assert_equal 'Title AA', data['title']
assert_equal 'Desc AA', data['description']
# fetch data for pm2
data = pm2.embed
assert_equal 'Title BB', data['title']
assert_equal 'Desc BB', data['description']
end
test "should have annotations" do
pm = create_project_media
c1 = create_comment annotated: pm
c2 = create_comment annotated: pm
c3 = create_comment annotated: nil
assert_equal [c1.id, c2.id].sort, pm.reload.annotations('comment').map(&:id).sort
end
test "should get permissions" do
u = create_user
t = create_team current_user: u
tu = create_team_user team: t, user: u, role: 'owner'
p = create_project team: t
pm = create_project_media project: p, current_user: u
perm_keys = ["read ProjectMedia", "update ProjectMedia", "destroy ProjectMedia", "create Comment", "create Flag",
"create Tag", "create Task", "create Dynamic", "restore ProjectMedia", "embed ProjectMedia", "lock Annotation",
"update Status", "administer Content"].sort
User.stubs(:current).returns(u)
Team.stubs(:current).returns(t)
# load permissions as owner
assert_equal perm_keys, JSON.parse(pm.permissions).keys.sort
# load as editor
tu.update_column(:role, 'editor')
assert_equal perm_keys, JSON.parse(pm.permissions).keys.sort
# load as editor
tu.update_column(:role, 'editor')
assert_equal perm_keys, JSON.parse(pm.permissions).keys.sort
# load as journalist
tu.update_column(:role, 'journalist')
assert_equal perm_keys, JSON.parse(pm.permissions).keys.sort
# load as contributor
tu.update_column(:role, 'contributor')
assert_equal perm_keys, JSON.parse(pm.permissions).keys.sort
# load as authenticated
tu.update_column(:team_id, nil)
assert_equal perm_keys, JSON.parse(pm.permissions).keys.sort
User.unstub(:current)
Team.unstub(:current)
end
test "should set user when project media is created" do
u = create_user
t = create_team
tu = create_team_user team: t, user: u, role: 'journalist'
p = create_project team: t, user: create_user
pm = nil
with_current_user_and_team(u, t) do
pm = create_project_media project: p
end
assert_equal u, pm.user
end
test "should create embed for uploaded image" do
ft = create_field_type field_type: 'image_path', label: 'Image Path'
at = create_annotation_type annotation_type: 'reverse_image', label: 'Reverse Image'
create_field_instance annotation_type_object: at, name: 'reverse_image_path', label: 'Reverse Image', field_type_object: ft, optional: false
create_bot name: 'Check Bot'
pm = ProjectMedia.new
pm.project_id = create_project.id
pm.file = File.new(File.join(Rails.root, 'test', 'data', 'rails.png'))
pm.disable_es_callbacks = true
pm.save!
assert_equal 'rails.png', pm.embed['title']
end
test "should be unique" do
p = create_project
m = create_valid_media
assert_difference 'ProjectMedia.count' do
create_project_media project: p, media: m
end
assert_no_difference 'ProjectMedia.count' do
assert_raises RuntimeError do
create_project_media project: p, media: m
end
end
end
test "should protect attributes from mass assignment" do
raw_params = { project: create_project, user: create_user }
params = ActionController::Parameters.new(raw_params)
assert_raise ActiveModel::ForbiddenAttributesError do
ProjectMedia.create(params)
end
end
test "should flag overridden attributes" do
t = create_team
p = create_project team: t
url = 'http://test.com'
pender_url = CONFIG['pender_url_private'] + '/api/medias'
response = '{"type":"media","data":{"url":"' + url + '","type":"item", "title": "org_title", "description":"org_desc"}}'
WebMock.stub_request(:get, pender_url).with({ query: { url: url } }).to_return(body: response)
pm = create_project_media url: url, project: p
attributes = pm.overridden_embed_attributes
attributes.each{|k| assert_not pm.overridden[k]}
pm.embed={title: 'title'}.to_json
assert pm.overridden['title']
attributes = pm.overridden_embed_attributes
attributes.delete('title')
attributes.each{|k| assert_not pm.overridden[k]}
pm.embed={description: 'description'}.to_json
assert pm.overridden['description']
attributes.delete('description')
attributes.each{|k| assert_not pm.overridden[k]}
pm.embed={username: 'username'}.to_json
assert pm.overridden['username']
attributes.delete('username')
attributes.each{|k| assert_not pm.overridden[k]}
# Claim media
pm = create_project_media quote: 'Claim', project: p
pm.embed={title: 'title', description: 'description', username: 'username'}.to_json
pm.overridden_embed_attributes.each{|k| assert_not pm.overridden[k]}
end
test "should create auto tasks" do
t = create_team
p1 = create_project team: t
p2 = create_project team: t
t.checklist = [ { 'label' => 'Can you see this automatic task?', 'type' => 'free_text', 'description' => 'This was created automatically', 'projects' => [] }, { 'label' => 'Can you see this automatic task for a project only?', 'type' => 'free_text', 'description' => 'This was created automatically', 'projects' => [p2.id] } ]
t.save!
assert_difference 'Task.length', 1 do
pm1 = create_project_media project: p1
end
assert_difference 'Task.length', 2 do
pm2 = create_project_media project: p2
end
end
test "should contributor create auto tasks" do
t = create_team
t.checklist = [ { 'label' => 'Can you see this automatic task?', 'type' => 'free_text', 'description' => 'This was created automatically', 'projects' => [] }]
t.save!
u = create_user
p = create_project team: t
tu = create_team_user team: t, user: u, role: 'contributor'
with_current_user_and_team(u, t) do
assert_difference 'Task.length' do
create_project_media project: p
end
end
end
test "should get project source" do
t = create_team
p = create_project team: t
m = create_valid_media
pm = create_project_media project: p, media: m
assert_not_nil pm.project_source
c = create_claim_media
pm = create_project_media project: p, media: c
assert_nil pm.project_source
pm = create_project_media project: p, quote: 'Claim', quote_attributions: {name: 'source name'}.to_json
assert_not_nil pm.project_source
end
test "should move related sources after move media to other projects" do
t = create_team
p = create_project team: t
m = create_valid_media
pm = create_project_media project: p, media: m
ps = pm.project_source
t2 = create_team
p2 = create_project team: t2
pm.project = p2; pm.save!
assert_equal ps.reload.project_id, p2.id
end
test "should have versions" do
m = create_valid_media
t = create_team
p = create_project team: t
u = create_user
create_team_user user: u, team: t, role: 'owner'
pm = nil
User.current = u
assert_difference 'PaperTrail::Version.count', 3 do
pm = create_project_media project: p, media: m, user: u
end
assert_equal 1, pm.versions.count
User.current = nil
end
test "should check if project media belonged to a previous project" do
t = create_team
u = create_user
create_team_user user: u, team: t
p = create_project team: t
p2 = create_project team: t
with_current_user_and_team(u, t) do
pm = create_project_media project: p
assert ProjectMedia.belonged_to_project(pm.id, p.id)
pm.project = p2; pm.save!
assert_equal p2, pm.project
assert ProjectMedia.belonged_to_project(pm.id, p.id)
end
end
test "should get log" do
create_verification_status_stuff
m = create_valid_media
u = create_user
t = create_team
p = create_project team: t
p2 = create_project team: t
create_team_user user: u, team: t, role: 'owner'
at = create_annotation_type annotation_type: 'response'
ft1 = create_field_type field_type: 'task_reference'
ft2 = create_field_type field_type: 'text'
create_field_instance annotation_type_object: at, field_type_object: ft1, name: 'task'
create_field_instance annotation_type_object: at, field_type_object: ft2, name: 'response'
create_field_instance annotation_type_object: at, field_type_object: ft2, name: 'note'
with_current_user_and_team(u, t) do
pm = create_project_media project: p, media: m, user: u
c = create_comment annotated: pm
tg = create_tag annotated: pm
f = create_flag annotated: pm
s = pm.annotations.where(annotation_type: 'verification_status').last.load
s.status = 'In Progress'; s.save!
e = create_embed annotated: pm, title: 'Test'
info = { title: 'Foo' }.to_json; pm.embed = info; pm.save!
info = { title: 'Bar' }.to_json; pm.embed = info; pm.save!
pm.project_id = p2.id; pm.save!
t = create_task annotated: pm, annotator: u
t = Task.find(t.id); t.response = { annotation_type: 'response', set_fields: { response: 'Test', task: t.id.to_s, note: 'Test' }.to_json }.to_json; t.save!
t = Task.find(t.id); t.label = 'Test?'; t.save!
r = DynamicAnnotation::Field.where(field_name: 'response').last; r.value = 'Test 2'; r.save!
r = DynamicAnnotation::Field.where(field_name: 'note').last; r.value = 'Test 2'; r.save!
assert_equal ["create_dynamic", "create_dynamic", "create_comment", "create_tag", "create_flag", "create_embed", "update_embed", "update_embed", "update_projectmedia", "create_task", "create_dynamicannotationfield", "create_dynamicannotationfield", "create_dynamicannotationfield", "create_dynamicannotationfield", "update_task", "update_task", "update_dynamicannotationfield", "update_dynamicannotationfield", "update_dynamicannotationfield"].sort, pm.get_versions_log.map(&:event_type).sort
assert_equal 15, pm.get_versions_log_count
c.destroy
assert_equal 15, pm.get_versions_log_count
tg.destroy
assert_equal 15, pm.get_versions_log_count
f.destroy
assert_equal 15, pm.get_versions_log_count
end
end
test "should get previous project" do
p1 = create_project
p2 = create_project
pm = create_project_media project: p1
assert_equal p1, pm.project
assert_nil pm.project_was
pm.previous_project_id = p1.id
pm.project_id = p2.id
pm.save!
assert_equal p1, pm.project_was
assert_equal p2, pm.project
end
test "should create annotation when project media with picture is created" do
ft = create_field_type field_type: 'image_path', label: 'Image Path'
at = create_annotation_type annotation_type: 'reverse_image', label: 'Reverse Image'
create_field_instance annotation_type_object: at, name: 'reverse_image_path', label: 'Reverse Image', field_type_object: ft, optional: false
create_bot name: 'Check Bot'
i = create_uploaded_image
assert_difference "Dynamic.where(annotation_type: 'reverse_image').count" do
create_project_media media: i
end
end
test "should refresh Pender data" do
create_translation_status_stuff
create_verification_status_stuff(false)
pender_url = CONFIG['pender_url_private'] + '/api/medias'
url = random_url
WebMock.stub_request(:get, pender_url).with({ query: { url: url } }).to_return(body: '{"type":"media","data":{"url":"' + url + '","type":"item","foo":"1"}}')
WebMock.stub_request(:get, pender_url).with({ query: { url: url, refresh: '1' } }).to_return(body: '{"type":"media","data":{"url":"' + url + '","type":"item","foo":"2"}}')
m = create_media url: url
pm = create_project_media media: m
t1 = pm.updated_at.to_i
em1 = pm.media.pender_embed
assert_not_nil em1
assert_equal '1', JSON.parse(em1.data['embed'])['foo']
assert_equal 1, em1.refreshes_count
sleep 2
pm = ProjectMedia.find(pm.id)
pm.refresh_media = true
pm.save!
t2 = pm.reload.updated_at.to_i
assert t2 > t1
em2 = pm.media.pender_embed
assert_equal '2', JSON.parse(em2.data['embed'])['foo']
assert_equal 2, em2.refreshes_count
assert_equal em1, em2
end
test "should get user id for migration" do
pm = ProjectMedia.new
assert_nil pm.send(:user_id_callback, 'test@test.com')
u = create_user(email: 'test@test.com')
assert_equal u.id, pm.send(:user_id_callback, 'test@test.com')
end
test "should get project id for migration" do
p = create_project
mapping = Hash.new
pm = ProjectMedia.new
assert_nil pm.send(:project_id_callback, 1, mapping)
mapping[1] = p.id
assert_equal p.id, pm.send(:project_id_callback, 1, mapping)
end
test "should set annotation" do
ft = DynamicAnnotation::FieldType.where(field_type: 'text').last || create_field_type(field_type: 'text', label: 'Text')
lt = create_field_type(field_type: 'language', label: 'Language')
at = create_annotation_type annotation_type: 'translation', label: 'Translation'
create_field_instance annotation_type_object: at, name: 'translation_text', label: 'Translation Text', field_type_object: ft, optional: false
create_field_instance annotation_type_object: at, name: 'translation_note', label: 'Translation Note', field_type_object: ft, optional: true
create_field_instance annotation_type_object: at, name: 'translation_language', label: 'Translation Language', field_type_object: lt, optional: false
assert_equal 0, Annotation.where(annotation_type: 'translation').count
create_project_media set_annotation: { annotation_type: 'translation', set_fields: { 'translation_text' => 'Foo', 'translation_note' => 'Bar', 'translation_language' => 'pt' }.to_json }.to_json
assert_equal 1, Annotation.where(annotation_type: 'translation').count
end
test "should have reference to search team object" do
pm = create_project_media
assert_kind_of CheckSearch, pm.check_search_team
end
test "should have reference to search project object" do
pm = create_project_media
assert_kind_of CheckSearch, pm.check_search_project
end
test "should have empty mt annotation" do
ft = DynamicAnnotation::FieldType.where(field_type: 'language').last || create_field_type(field_type: 'language', label: 'Language')
at = create_annotation_type annotation_type: 'language', label: 'Language'
create_field_instance annotation_type_object: at, name: 'language', label: 'Language', field_type_object: ft, optional: false
ft = DynamicAnnotation::FieldType.where(field_type: 'json').last || create_field_type(field_type: 'json', label: 'JSON structure')
at = create_annotation_type annotation_type: 'mt', label: 'Machine translation'
create_field_instance annotation_type_object: at, name: 'mt_translations', label: 'Machine translations', field_type_object: ft, optional: false
create_bot name: 'Alegre Bot'
t = create_team
p = create_project team: t
text = 'Test'
stub_configs({ 'alegre_host' => 'http://alegre', 'alegre_token' => 'test' }) do
url = CONFIG['alegre_host'] + "/api/languages/identification?text=" + text
response = '{"type":"language","data": [["EN", 1]]}'
WebMock.stub_request(:get, url).with(:headers => {'X-Alegre-Token'=> CONFIG['alegre_token']}).to_return(body: response)
pm = create_project_media project: p, quote: text
mt = pm.annotations.where(annotation_type: 'mt').last
assert_nil mt
p.settings = {:languages => ['ar']}; p.save!
pm = create_project_media project: p, quote: text
mt = pm.annotations.where(annotation_type: 'mt').last
assert_not_nil mt
end
end
test "should update mt annotation" do
ft = DynamicAnnotation::FieldType.where(field_type: 'language').last || create_field_type(field_type: 'language', label: 'Language')
at = create_annotation_type annotation_type: 'language', label: 'Language'
create_field_instance annotation_type_object: at, name: 'language', label: 'Language', field_type_object: ft, optional: false
ft = DynamicAnnotation::FieldType.where(field_type: 'json').last || create_field_type(field_type: 'json', label: 'JSON structure')
at = create_annotation_type annotation_type: 'mt', label: 'Machine translation'
create_field_instance annotation_type_object: at, name: 'mt_translations', label: 'Machine translations', field_type_object: ft, optional: false
u = create_user
t = create_team
create_team_user team: t, user: u, role: 'owner'
u = User.find(u.id)
User.stubs(:current).returns(u)
Team.stubs(:current).returns(t)
p = create_project team: t
p.settings = {:languages => ['ar', 'en']}; p.save!
text = 'Testing'
stub_configs({ 'alegre_host' => 'http://alegre', 'alegre_token' => 'test' }) do
url = CONFIG['alegre_host'] + "/api/languages/identification?text=" + text
response = '{"type":"language","data": [["EN", 1]]}'
WebMock.stub_request(:get, url).with(:headers => {'X-Alegre-Token'=> CONFIG['alegre_token']}).to_return(body: response)
pm = create_project_media project: p, quote: text
pm2 = create_project_media project: p, quote: text
Sidekiq::Testing.inline! do
url = CONFIG['alegre_host'] + "/api/mt?from=en&to=ar&text=" + text
# Test with machine translation
response = '{"type":"mt","data": "testing -ar"}'
# Test handle raising an error
WebMock.stub_request(:get, url).with(:headers => {'X-Alegre-Token'=> 'in_valid_token'}).to_return(body: response)
pm.update_mt=1
mt_field = DynamicAnnotation::Field.joins(:annotation).where('annotations.annotation_type' => 'mt', 'annotations.annotated_type' => pm.class.name, 'annotations.annotated_id' => pm.id.to_s, field_type: 'json').first
assert_equal 0, JSON.parse(mt_field.value).size
# Test with valid response
WebMock.stub_request(:get, url).with(:headers => {'X-Alegre-Token'=> CONFIG['alegre_token']}).to_return(body: response)
pm.update_mt=1
mt_field = DynamicAnnotation::Field.joins(:annotation).where('annotations.annotation_type' => 'mt', 'annotations.annotated_type' => pm.class.name, 'annotations.annotated_id' => pm.id.to_s, field_type: 'json').first
assert_equal 1, JSON.parse(mt_field.value).size
# Test with type => error
response = '{"type":"error","data": {"message": "Language not supported"}}'
WebMock.stub_request(:get, url).with(:headers => {'X-Alegre-Token'=> CONFIG['alegre_token']}).to_return(body: response)
pm2.update_mt=1
mt_field = DynamicAnnotation::Field.joins(:annotation).where('annotations.annotation_type' => 'mt', 'annotations.annotated_type' => pm2.class.name, 'annotations.annotated_id' => pm2.id.to_s, field_type: 'json').first
assert_equal 0, JSON.parse(mt_field.value).size
end
end
User.unstub(:current)
Team.unstub(:current)
end
test "should get dynamic annotation by type" do
create_annotation_type annotation_type: 'foo'
create_annotation_type annotation_type: 'bar'
pm = create_project_media
d1 = create_dynamic_annotation annotation_type: 'foo', annotated: pm
d2 = create_dynamic_annotation annotation_type: 'bar', annotated: pm
assert_equal d1, pm.get_dynamic_annotation('foo')
assert_equal d2, pm.get_dynamic_annotation('bar')
end
test "should get report type" do
c = create_claim_media
l = create_link
m = create_project_media media: c
assert_equal 'claim', m.report_type
m = create_project_media media: l
assert_equal 'link', m.report_type
end
test "should delete project media" do
t = create_team
u = create_user
u2 = create_user
tu = create_team_user team: t, user: u, role: 'owner'
tu = create_team_user team: t, user: u2
p = create_project team: t
pm = create_project_media project: p, quote: 'Claim', user: u2
at = create_annotation_type annotation_type: 'test'
ft = create_field_type
fi = create_field_instance name: 'test', field_type_object: ft, annotation_type_object: at
a = create_dynamic_annotation annotator: u2, annotated: pm, annotation_type: 'test', set_fields: { test: 'Test' }.to_json
RequestStore.store[:disable_es_callbacks] = true
with_current_user_and_team(u, t) do
pm.disable_es_callbacks = true
pm.destroy
end
RequestStore.store[:disable_es_callbacks] = false
end
test "should have oEmbed endpoint" do
create_annotation_type_and_fields('Embed Code', { 'Copied' => ['Boolean', false] })
pender_url = CONFIG['pender_url_private'] + '/api/medias'
url = 'http://test.com'
response = '{"type":"media","data":{"url":"' + url + '/normalized","type":"item", "title": "test media", "description":"add desc"}}'
WebMock.stub_request(:get, pender_url).with({ query: { url: url } }).to_return(body: response)
m = create_media(account: create_valid_account, url: url)
pm = create_project_media media: m
assert_equal 'test media', pm.as_oembed[:title]
end
test "should have oEmbed URL" do
RequestStore[:request] = nil
t = create_team private: false
p = create_project team: t
pm = create_project_media project: p
stub_config('checkdesk_base_url', 'https://checkmedia.org') do
assert_equal "https://checkmedia.org/api/project_medias/#{pm.id}/oembed", pm.oembed_url
end
t = create_team private: true
p = create_project team: t
pm = create_project_media project: p
stub_config('checkdesk_base_url', 'https://checkmedia.org') do
assert_equal "https://checkmedia.org/api/project_medias/#{pm.id}/oembed", pm.oembed_url
end
end
test "should get author name for oEmbed" do
u = create_user name: 'Foo Bar'
pm = create_project_media user: u
assert_equal 'Foo Bar', pm.author_name
pm.user = nil
assert_equal '', pm.author_name
end
test "should get author URL for oEmbed" do
url = 'http://twitter.com/test'
pender_url = CONFIG['pender_url_private'] + '/api/medias'
response = '{"type":"media","data":{"url":"' + url + '","type":"profile"}}'
WebMock.stub_request(:get, pender_url).with({ query: { url: url } }).to_return(body: response)
u = create_user url: url, provider: 'twitter'
pm = create_project_media user: u
assert_equal url, pm.author_url
pm.user = create_user
assert_equal '', pm.author_url
pm.user = nil
assert_equal '', pm.author_url
end
test "should get author picture for oEmbed" do
u = create_user
pm = create_project_media user: u
assert_match /^http/, pm.author_picture
end
test "should get author username for oEmbed" do
u = create_user login: 'test'
pm = create_project_media user: u
assert_equal 'test', pm.author_username
pm.user = nil
assert_equal '', pm.author_username
end
test "should get author role for oEmbed" do
t = create_team
u = create_user
create_team_user user: u, team: t, role: 'journalist'
p = create_project team: t
pm = create_project_media project: p, user: u
assert_equal 'journalist', pm.author_role
pm.user = create_user
assert_equal 'none', pm.author_role
pm.user = nil
assert_equal 'none', pm.author_role
end
test "should get source URL for external link for oEmbed" do
url = 'http://twitter.com/test/123456'
pender_url = CONFIG['pender_url_private'] + '/api/medias'
response = '{"type":"media","data":{"url":"' + url + '","type":"item"}}'
WebMock.stub_request(:get, pender_url).with({ query: { url: url } }).to_return(body: response)
l = create_link url: url
pm = create_project_media media: l
assert_equal url, pm.source_url
c = create_claim_media
pm = create_project_media media: c
assert_match CONFIG['checkdesk_client'], pm.source_url
end
test "should get resolved tasks for oEmbed" do
create_annotation_type annotation_type: 'response'
pm = create_project_media
assert_equal [], pm.completed_tasks
assert_equal 0, pm.completed_tasks_count
t1 = create_task annotated: pm
t1.response = { annotation_type: 'response', set_fields: {} }.to_json
t1.save!
t2 = create_task annotated: pm
assert_equal [t1], pm.completed_tasks
assert_equal [t2], pm.open_tasks
assert_equal 1, pm.completed_tasks_count
end
test "should get comments for oEmbed" do
pm = create_project_media
assert_equal [], pm.comments
assert_equal 0, pm.comments_count
c = create_comment annotated: pm
assert_equal [c], pm.comments
assert_equal 1, pm.comments_count
end
test "should get provider for oEmbed" do
url = 'http://twitter.com/test/123456'
pender_url = CONFIG['pender_url_private'] + '/api/medias'
response = '{"type":"media","data":{"url":"' + url + '","type":"item"}}'
WebMock.stub_request(:get, pender_url).with({ query: { url: url } }).to_return(body: response)
l = create_link url: url
pm = create_project_media media: l
assert_equal 'Twitter', pm.provider
c = create_claim_media
pm = create_project_media media: c
stub_config('app_name', 'Check') do
assert_equal 'Check', pm.provider
end
end
test "should get published time for oEmbed" do
create_translation_status_stuff
url = 'http://twitter.com/test/123456'
pender_url = CONFIG['pender_url_private'] + '/api/medias'
response = '{"type":"media","data":{"url":"' + url + '","type":"item","published_at":"1989-01-25 08:30:00"}}'
WebMock.stub_request(:get, pender_url).with({ query: { url: url } }).to_return(body: response)
l = create_link url: url
pm = create_project_media media: l
assert_equal '25/01/1989', pm.published_at.strftime('%d/%m/%Y')
c = create_claim_media
pm = create_project_media media: c
assert_equal Time.now.strftime('%d/%m/%Y'), pm.published_at.strftime('%d/%m/%Y')
end
test "should get source author for oEmbed" do
u = create_user name: 'Foo'
url = 'http://twitter.com/test/123456'
pender_url = CONFIG['pender_url_private'] + '/api/medias'
response = '{"type":"media","data":{"url":"' + url + '","type":"item","author_name":"Bar"}}'
WebMock.stub_request(:get, pender_url).with({ query: { url: url } }).to_return(body: response)
l = create_link url: url
pm = create_project_media media: l, user: u
assert_equal 'Bar', pm.source_author[:author_name]
c = create_claim_media
pm = create_project_media media: c, user: u
assert_equal 'Foo', pm.source_author[:author_name]
end
test "should render oEmbed HTML" do
create_translation_status_stuff
create_verification_status_stuff(false)
Bot::Alegre.delete_all
u = create_user login: 'test', name: 'Test', profile_image: 'http://profile.picture'
c = create_claim_media quote: 'Test'
t = create_team name: 'Test Team', slug: 'test-team'
p = create_project title: 'Test Project', team: t
pm = create_project_media media: c, user: u, project: p
create_comment text: 'A comment', annotated: pm
create_comment text: 'A second comment', annotated: pm
at = create_annotation_type annotation_type: 'task_response_free_text', label: 'Task'
ft1 = create_field_type field_type: 'text_field', label: 'Text Field'
ft2 = create_field_type field_type: 'task_reference', label: 'Task Reference'
fi1 = create_field_instance annotation_type_object: at, name: 'response_task', label: 'Response', field_type_object: ft1
fi2 = create_field_instance annotation_type_object: at, name: 'note_task', label: 'Note', field_type_object: ft1
fi3 = create_field_instance annotation_type_object: at, name: 'task_reference', label: 'Task', field_type_object: ft2
t = create_task annotated: pm
t.response = { annotation_type: 'task_response_free_text', set_fields: { response_task: 'Task response', task_reference: t.id.to_s }.to_json }.to_json
t.save!
ProjectMedia.any_instance.stubs(:created_at).returns(Time.parse('2016-06-05'))
ProjectMedia.any_instance.stubs(:updated_at).returns(Time.parse('2016-06-05'))
expected = File.read(File.join(Rails.root, 'test', 'data', "oembed-#{pm.default_media_status_type}.html")).gsub(/project\/[0-9]+\/media\/[0-9]+/, 'url').gsub(/.*<body/m, '<body').gsub('http://localhost:3333', CONFIG['checkdesk_client']).gsub('http://localhost:3000', CONFIG['checkdesk_base_url'])
actual = ProjectMedia.find(pm.id).html.gsub(/project\/[0-9]+\/media\/[0-9]+/, 'url').gsub(/.*<body/m, '<body')
assert_equal expected, actual
ProjectMedia.any_instance.unstub(:created_at)
ProjectMedia.any_instance.unstub(:updated_at)
end
test "should have metadata for oEmbed" do
pm = create_project_media
assert_kind_of String, pm.metadata
end
test "should clear caches when media is updated" do
create_annotation_type_and_fields('Embed Code', { 'Copied' => ['Boolean', false] })
pm = create_project_media
create_dynamic_annotation annotation_type: 'embed_code', annotated: pm
u = create_user
ProjectMedia.any_instance.unstub(:clear_caches)
CcDeville.expects(:clear_cache_for_url).returns(nil).times(52)
PenderClient::Request.expects(:get_medias).returns(nil).times(16)
Sidekiq::Testing.inline! do
create_comment annotated: pm, user: u
create_task annotated: pm, user: u
end
CcDeville.unstub(:clear_cache_for_url)
PenderClient::Request.unstub(:get_medias)
end
test "should respond to auto-tasks on creation" do
at = create_annotation_type annotation_type: 'task_response_free_text', label: 'Task'
ft1 = create_field_type field_type: 'text_field', label: 'Text Field'
ft2 = create_field_type field_type: 'task_reference', label: 'Task Reference'
fi1 = create_field_instance annotation_type_object: at, name: 'response_free_text', label: 'Response', field_type_object: ft1
fi2 = create_field_instance annotation_type_object: at, name: 'note_free_text', label: 'Note', field_type_object: ft1
fi3 = create_field_instance annotation_type_object: at, name: 'task_free_text', label: 'Task', field_type_object: ft2
t = create_team
p = create_project team: t
t.checklist = [ { 'label' => 'When?', 'type' => 'free_text', 'description' => '', 'projects' => [] } ]
t.save!
pm = create_project_media(project: p, set_tasks_responses: { 'when' => 'Yesterday' })
t = Task.where(annotation_type: 'task').last
assert_equal 'Yesterday', t.first_response
end
test "should auto-response for Krzana report" do
at = create_annotation_type annotation_type: 'task_response_geolocation', label: 'Task Response Geolocation'
geotype = create_field_type field_type: 'geojson', label: 'GeoJSON'
create_field_instance annotation_type_object: at, name: 'response_geolocation', field_type_object: geotype
at = create_annotation_type annotation_type: 'task_response_datetime', label: 'Task Response Date Time'
datetime = create_field_type field_type: 'datetime', label: 'Date Time'
create_field_instance annotation_type_object: at, name: 'response_datetime', field_type_object: datetime
at = create_annotation_type annotation_type: 'task_response_free_text', label: 'Task'
ft1 = create_field_type field_type: 'text_field', label: 'Text Field'
ft2 = create_field_type field_type: 'task_reference', label: 'Task Reference'
fi1 = create_field_instance annotation_type_object: at, name: 'response_free_text', label: 'Response', field_type_object: ft1
fi2 = create_field_instance annotation_type_object: at, name: 'note_free_text', label: 'Note', field_type_object: ft1
fi3 = create_field_instance annotation_type_object: at, name: 'task_free_text', label: 'Task', field_type_object: ft2
t = create_team
p = create_project team: t
p2 = create_project team: t
p3 = create_project team: t
t.checklist = [ { "label" => "who?", "type" => "free_text", "description" => "",
"mapping" => { "type" => "free_text", "match" => "$.mentions[?(@['@type'] == 'Person')].name", "prefix" => "Suggested by Krzana: "},
"projects" => [p.id] },
{ "label" => "where?", "type" => "geolocation", "description" => "",
"mapping" => { "type" => "geolocation", "match" => "$.mentions[?(@['@type'] == 'Place')]", "prefix" => ""},
"projects" => [p2.id] },
{ "label" => "when?", "type" => "datetime", "description" => "",
"mapping" => { "type" => "datetime", "match" => "dateCreated", "prefix" => ""},
"projects" => [p3.id] }
]
t.save!
pender_url = CONFIG['pender_url_private'] + '/api/medias'
# test empty json+ld
url = 'http://test1.com'
raw = {"json+ld": {}}
response = {'type':'media','data': {'url': url, 'type': 'item', 'raw': raw}}.to_json
WebMock.stub_request(:get, pender_url).with({ query: { url: url } }).to_return(body: response)
pm = create_project_media project: p, url: url
t = Task.where(annotation_type: 'task', annotated_id: pm.id).last
assert_nil t.first_response
# test with non exist value
url1 = 'http://test11.com'
raw = { "json+ld": { "mentions": [ { "@type": "Person" } ] } }
response = {'type':'media','data': {'url': url1, 'type': 'item', 'raw': raw}}.to_json
WebMock.stub_request(:get, pender_url).with({ query: { url: url1 } }).to_return(body: response)
pm1 = create_project_media project: p, url: url1
t = Task.where(annotation_type: 'task', annotated_id: pm1.id).last
assert_nil t.first_response
# test with empty value
url12 = 'http://test12.com'
raw = { "json+ld": { "mentions": [ { "@type": "Person", "name": "" } ] } }
response = {'type':'media','data': {'url': url12, 'type': 'item', 'raw': raw}}.to_json
WebMock.stub_request(:get, pender_url).with({ query: { url: url12 } }).to_return(body: response)
pm12 = create_project_media project: p, url: url12
t = Task.where(annotation_type: 'task', annotated_id: pm12.id).last
assert_nil t.first_response
# test with single selection
url2 = 'http://test2.com'
raw = { "json+ld": { "mentions": [ { "@type": "Person", "name": "first_name" } ] } }
response = {'type':'media','data': {'url': url2, 'type': 'item', 'raw': raw}}.to_json
WebMock.stub_request(:get, pender_url).with({ query: { url: url2 } }).to_return(body: response)
pm2 = create_project_media project: p, url: url2
t = Task.where(annotation_type: 'task', annotated_id: pm2.id).last
assert_equal "Suggested by Krzana: first_name", t.first_response
# test multiple selection (should get first one)
url3 = 'http://test3.com'
raw = { "json+ld": { "mentions": [ { "@type": "Person", "name": "first_name" }, { "@type": "Person", "name": "last_name" } ] } }
response = {'type':'media','data': {'url': url3, 'type': 'item', 'raw': raw}}.to_json
WebMock.stub_request(:get, pender_url).with({ query: { url: url3 } }).to_return(body: response)
pm3 = create_project_media project: p, url: url3
t = Task.where(annotation_type: 'task', annotated_id: pm3.id).last
assert_equal "Suggested by Krzana: first_name", t.first_response
# test geolocation mapping
url4 = 'http://test4.com'
raw = { "json+ld": {
"mentions": [ { "name": "Delimara Powerplant", "@type": "Place", "geo": { "latitude": 35.83020073454, "longitude": 14.55602645874 } } ]
} }
response = {'type':'media','data': {'url': url4, 'type': 'item', 'raw': raw}}.to_json
WebMock.stub_request(:get, pender_url).with({ query: { url: url4 } }).to_return(body: response)
pm4 = create_project_media project: p2, url: url4
t = Task.where(annotation_type: 'task', annotated_id: pm4.id).last
# assert_not_nil t.first_response
# test datetime mapping
url5 = 'http://test5.com'
raw = { "json+ld": { "dateCreated": "2017-08-30T14:22:28+00:00" } }
response = {'type':'media','data': {'url': url5, 'type': 'item', 'raw': raw}}.to_json
WebMock.stub_request(:get, pender_url).with({ query: { url: url5 } }).to_return(body: response)
pm5 = create_project_media project: p3, url: url5
t = Task.where(annotation_type: 'task', annotated_id: pm5.id).last
# assert_not_nil t.first_response
end
test "should expose conflict error from Pender" do
url = 'http://test.com'
pender_url = CONFIG['pender_url_private'] + '/api/medias'
response = '{"type":"error","data":{"message":"Conflict","code":9}}'
WebMock.stub_request(:get, pender_url).with({ query: { url: url } }).to_return(body: response, status: 409)
p = create_project
pm = ProjectMedia.new
pm.project = p
pm.url = url
assert !pm.valid?
assert pm.errors.messages.values.flatten.include?('This link is already being parsed, please try again in a few seconds.')
end
test "should create project source" do
t = create_team
p = create_project team: t
u = create_user
create_team_user team: t, user: u, role: 'owner'
pender_url = CONFIG['pender_url_private'] + '/api/medias'
media_url = 'http://www.facebook.com/meedan/posts/123456'
media2_url = 'http://www.facebook.com/meedan/posts/456789'
author_url = 'http://facebook.com/123456'
data = { url: media_url, author_url: author_url, type: 'item' }
response = '{"type":"media","data":' + data.to_json + '}'
WebMock.stub_request(:get, pender_url).with({ query: { url: media_url } }).to_return(body: response)
data = { url: media2_url, author_url: author_url, type: 'item' }
response = '{"type":"media","data":' + data.to_json + '}'
WebMock.stub_request(:get, pender_url).with({ query: { url: media2_url } }).to_return(body: response)
data = { url: author_url, provider: 'facebook', picture: 'http://fb/p.png', author_name: 'UNIVERSITÄT', username: 'username', title: 'Foo', description: 'Bar', type: 'profile' }
response = '{"type":"media","data":' + data.to_json + '}'
WebMock.stub_request(:get, pender_url).with({ query: { url: author_url } }).to_return(body: response)
with_current_user_and_team(u, t) do
assert_difference 'ProjectSource.count' do
create_project_media project: p, url: media_url
end
# should not duplicate ProjectSource for same account
assert_no_difference 'ProjectSource.count' do
create_project_media project: p, url: media2_url
end
assert_no_difference 'ProjectSource.count' do
create_project_media project: p, quote: 'Claim', quote_attributions: {name: 'UNIVERSITÄT'}.to_json
end
end
# test move media to project with same source
p2 = create_project team: t
p3 = create_project team: t
with_current_user_and_team(u, t) do
pm = create_project_media project: p2, url: media_url
pm2 = create_project_media project: p3, url: media2_url
assert_nothing_raised do
pm.project = p3
pm.save!
end
end
end
test "should set quote attributions" do
t = create_team
p = create_project team: t
u = create_user
create_team_user team: t, user: u, role: 'owner'
with_current_user_and_team(u, t) do
assert_difference 'ClaimSource.count', 2 do
pm = create_project_media project: p, quote: 'Claim', quote_attributions: {name: 'source name'}.to_json
s = pm.project_source.source
assert_not_nil pm.project_source
assert_equal s.name, 'source name'
pm2 = create_project_media project: p, quote: 'Claim 2', quote_attributions: {name: 'source name'}.to_json
assert_equal pm2.project_source.source, s
end
end
end
test "should not get project source" do
p = create_project
l = create_link
a = l.account
a.destroy
l = Link.find(l.id)
pm = create_project_media project: p, media: l
assert_nil pm.send(:get_project_source, p.id)
end
test "should not create project media under archived project" do
p = create_project
p.archived = true
p.save!
assert_raises ActiveRecord::RecordInvalid do
create_project_media project: p
end
end
test "should archive" do
pm = create_project_media
assert !pm.archived
pm.archived = true
pm.save!
assert pm.reload.archived
end
test "should create annotation when is embedded for the first time" do
create_annotation_type_and_fields('Embed Code', { 'Copied' => ['Boolean', false] })
pm = create_project_media
assert_difference 'PaperTrail::Version.count', 2 do
pm.as_oembed
end
assert_no_difference 'PaperTrail::Version.count' do
pm.as_oembed
end
end
test "should not create media through browser extension if team is not allowed to" do
t = create_team
t.set_limits_browser_extension = false
t.save!
p = create_project team: t
assert_raises ActiveRecord::RecordInvalid do
RequestStore.stubs(:[]).with(:request).returns(OpenStruct.new({ headers: { 'X-Check-Client' => 'browser-extension' } }))
create_project_media project: p
RequestStore.unstub(:[])
end
end
test "should not crash if mapping value is invalid" do
assert_nothing_raised do
pm = ProjectMedia.new
assert_nil pm.send(:mapping_value, 'foo', 'bar')
end
end
test "should not crash if another user tries to update media" do
u1 = create_user
u2 = create_user
t = create_team
p = create_project team: t
create_team_user team: t, user: u1, role: 'owner'
create_team_user team: t, user: u2, role: 'owner'
pm = nil
with_current_user_and_team(u1, t) do
pm = create_project_media project: p, user: u1
pm = ProjectMedia.find(pm.id)
info = { title: 'Title' }.to_json
pm.embed = info
pm.save!
end
with_current_user_and_team(u2, t) do
pm = ProjectMedia.find(pm.id)
info = { title: 'Title' }.to_json
pm.embed = info
pm.save!
end
assert_nothing_raised do
embed = pm.get_annotations('embed').last.load
embed.title_is_overridden?
end
end
test "should return custom status HTML and color for embed" do
create_translation_status_stuff
create_verification_status_stuff(false)
t = create_team
value = {
label: 'Status',
default: 'stop',
active: 'done',
statuses: [
{ id: 'stop', label: 'Stopped', completed: '', description: 'Not started yet', style: { backgroundColor: '#a00' } },
{ id: 'done', label: 'Done!', completed: '', description: 'Nothing left to be done here', style: { backgroundColor: '#fc3' } }
]
}
pm = create_project_media
t.send "set_media_#{pm.default_media_status_type.pluralize}", value
t.save!
p = create_project team: t
pm = create_project_media project: p
assert_equal 'stop', pm.last_status
assert_equal '<span id="oembed__status">Stopped</span>', pm.last_status_html
assert_equal '#a00', pm.last_status_color
s = pm.last_status_obj
s.status = 'done'
s.save!
assert_equal '<span id="oembed__status">Done!</span>', pm.last_status_html
assert_equal '#fc3', pm.last_status_color
end
test "should return core status HTML and color for embed" do
create_translation_status_stuff
create_verification_status_stuff(false)
t = create_team
p = create_project team: t
pm = create_project_media project: p
assert_equal "<span id=\"oembed__status\" class=\"l\">status_#{pm.last_status}</span>", pm.last_status_html
assert_equal '#518FFF', pm.last_status_color.upcase
s = pm.last_status_obj
s.status = 'in_progress'
s.save!
assert_equal '<span id="oembed__status" class="l">status_in_progress</span>', pm.last_status_html
assert_equal '#FFBB5D', pm.last_status_color.upcase
end
test "should get description" do
c = create_claim_media quote: 'Test'
pm = create_project_media media: c
assert_equal 'Test', pm.reload.description
info = { description: 'Test 2' }.to_json
pm.embed = info
pm.save!
assert_equal 'Test 2', pm.reload.description
end
test "should create pender_archive annotation when link is created" do
create_annotation_type_and_fields('Pender Archive', { 'Response' => ['JSON', false] })
l = create_link
t = create_team
t.archive_pender_archive_enabled = 1
t.set_limits_keep_screenshot = true
t.save!
p = create_project team: t
assert_difference 'Dynamic.where(annotation_type: "pender_archive").count' do
create_project_media media: l, project: p
end
end
test "should not create pender_archive annotation when media is created if media is not a link" do
create_annotation_type_and_fields('Pender Archive', { 'Response' => ['JSON', false] })
c = create_claim_media
t = create_team
t.archive_pender_archive_enabled = 1
t.set_limits_keep_screenshot = true
t.save!
p = create_project team: t
assert_no_difference 'Dynamic.where(annotation_type: "pender_archive").count' do
create_project_media media: c, project: p
end
end
test "should not create pender_archive annotation when link is created if there is no annotation type" do
l = create_link
t = create_team
t.archive_pender_archive_enabled = 1
t.set_limits_keep_screenshot = true
t.save!
p = create_project team: t
assert_no_difference 'Dynamic.where(annotation_type: "pender_archive").count' do
create_project_media media: l, project: p
end
end
test "should not create pender_archive annotation when link is created if team is not allowed" do
create_annotation_type_and_fields('Pender Archive', { 'Response' => ['JSON', false] })
l = create_link
t = create_team
t.archive_pender_archive_enabled = 1
t.set_limits_keep_screenshot = false
t.save!
p = create_project team: t
assert_no_difference 'Dynamic.where(annotation_type: "pender_archive").count' do
create_project_media media: l, project: p
end
end
test "should not create pender_archive annotation when link is created if archiver is not enabled" do
create_annotation_type_and_fields('Pender Archive', { 'Response' => ['JSON', false] })
l = create_link
t = create_team
t.archive_pender_archive_enabled = 0
t.set_limits_keep_screenshot = true
t.save!
p = create_project team: t
assert_no_difference 'Dynamic.where(annotation_type: "pender_archive").count' do
create_project_media media: l, project: p
end
end
test "should create pender_archive annotation when link is created using information from pender_embed" do
create_annotation_type_and_fields('Pender Archive', { 'Response' => ['JSON', false] })
l = create_link
t = create_team
t.archive_pender_archive_enabled = 1
t.set_limits_keep_screenshot = true
t.save!
p = create_project team: t
Link.any_instance.stubs(:pender_embed).returns(OpenStruct.new({ data: { embed: { screenshot_taken: 1, 'archives' => {} }.to_json } }))
assert_difference 'Dynamic.where(annotation_type: "pender_archive").count' do
create_project_media media: l, project: p
end
Link.any_instance.unstub(:pender_embed)
end
test "should create pender_archive annotation when link is created using information from pender_data" do
create_annotation_type_and_fields('Pender Archive', { 'Response' => ['JSON', false] })
l = create_link
t = create_team
t.archive_pender_archive_enabled = 1
t.set_limits_keep_screenshot = true
t.save!
p = create_project team: t
Link.any_instance.stubs(:pender_data).returns({ screenshot_taken: 1, 'archives' => {} })
Link.any_instance.stubs(:pender_embed).raises(RuntimeError)
assert_difference 'Dynamic.where(annotation_type: "pender_archive").count' do
create_project_media media: l, project: p
end
Link.any_instance.unstub(:pender_data)
Link.any_instance.unstub(:pender_embed)
end
test "should get number of contributing users" do
create_verification_status_stuff
pm = create_project_media
create_comment annotated: pm, annotator: create_user
create_comment annotated: pm, annotator: create_user
create_tag annotated: pm, annotator: create_user
create_task annotated: pm, annotator: create_user
assert_equal 5, pm.contributing_users_count
end
test "should get time to first and last status" do
create_translation_status_stuff
create_verification_status_stuff(false)
u = create_user
t = create_team
create_team_user user: u, team: t, role: 'owner'
p = create_project team: t
with_current_user_and_team(u, t) do
time = Time.now - 10.minutes
Time.stubs(:now).returns(time)
pm = create_project_media project: p, user: u
assert_equal '', pm.time_to_status(:first)
assert_equal '', pm.time_to_status(:last)
Time.stubs(:now).returns(time + 5.minutes)
s = pm.last_status_obj
s.status = 'In Progress'; s.save!
assert_equal '', pm.time_to_status(:first)
assert_equal 5.minutes.to_i, pm.time_to_status(:last)
Time.stubs(:now).returns(time + 8.minutes)
s = pm.last_status_obj
s.status = ::Workflow::Workflow.core_options(pm, pm.default_media_status_type)[:default]
s.save!
assert_equal 5.minutes.to_i, pm.time_to_status(:first)
assert_equal 8.minutes.to_i, pm.time_to_status(:last)
Time.unstub(:now)
end
end
test "should reject a status of verified if all required tasks are not resolved" do
create_verification_status_stuff
create_annotation_type annotation_type: 'response'
pm = create_project_media
t1 = create_task annotated: pm
t2 = create_task annotated: pm, required: true
t1.response = { annotation_type: 'response', set_fields: {} }.to_json
t1.save!
s = pm.annotations.where(annotation_type: 'verification_status').last.load
assert_raise ActiveRecord::RecordInvalid do
s.status = 'verified'; s.save!
end
assert_raise ActiveRecord::RecordInvalid do
s.status = 'false'; s.save!
end
t2.response = { annotation_type: 'response', set_fields: {} }.to_json
t2.save!
s.status = 'verified'; s.save!
assert_equal s.reload.status, 'verified'
end
test "should back status to active if required task added to resolved item" do
create_translation_status_stuff
create_verification_status_stuff(false)
p = create_project
pm = create_project_media project: p
s = pm.annotations.where(annotation_type: 'verification_status').last.load
s.status = 'verified'; s.save!
pm = ProjectMedia.find(pm.id)
assert_equal 'verified', pm.last_verification_status
pm = ProjectMedia.find(pm.id)
create_task annotated: pm
assert_equal 'verified', pm.last_verification_status
pm = ProjectMedia.find(pm.id)
create_task annotated: pm, required: true
assert_equal 'in_progress', pm.last_verification_status
end
test "should move pending item to in progress status" do
create_translation_status_stuff
create_verification_status_stuff(false)
stub_config('app_name', 'Check') do
create_annotation_type annotation_type: 'response'
p = create_project
pm = create_project_media project: p
default = 'undetermined'
active = 'in_progress'
s = pm.annotations.where(annotation_type: 'verification_status').last.load
t = create_task annotated: pm
assert_not_equal pm.last_status, active
# add comment
create_comment annotated: pm, disable_update_status: false
assert_equal pm.last_verification_status, active
s.status = default; s.save!
# add tag
create_tag annotated: pm, disable_update_status: false
assert_equal pm.last_verification_status, active
s.status = default; s.save!
# add response
t.response = { annotation_type: 'response', set_fields: {} }.to_json
t.save!
assert_equal pm.last_verification_status, active
# change status to verified and tests autmatic update
s.status = 'verified'; s.save!
create_comment annotated: pm, disable_update_status: false
assert_equal pm.last_verification_status, 'verified'
end
end
test "should update media account when change author_url" do
u = create_user
t = create_team
create_team_user user: u, team: t
pender_url = CONFIG['pender_url_private'] + '/api/medias'
url = 'http://www.facebook.com/meedan/posts/123456'
author_url = 'http://facebook.com/123456'
author_normal_url = 'http://www.facebook.com/meedan'
author2_url = 'http://facebook.com/789123'
author2_normal_url = 'http://www.facebook.com/meedan2'
data = { url: url, author_url: author_url, type: 'item' }
response = '{"type":"media","data":' + data.to_json + '}'
WebMock.stub_request(:get, pender_url).with({ query: { url: url } }).to_return(body: response)
data = { url: url, author_url: author2_url, type: 'item' }
response = '{"type":"media","data":' + data.to_json + '}'
WebMock.stub_request(:get, pender_url).with({ query: { url: url, refresh: '1' } }).to_return(body: response)
data = { url: author_normal_url, provider: 'facebook', picture: 'http://fb/p.png', title: 'Foo', description: 'Bar', type: 'profile' }
response = '{"type":"media","data":' + data.to_json + '}'
WebMock.stub_request(:get, pender_url).with({ query: { url: author_url } }).to_return(body: response)
data = { url: author2_normal_url, provider: 'facebook', picture: 'http://fb/p.png', title: 'NewFoo', description: 'NewBar', type: 'profile' }
response = '{"type":"media","data":' + data.to_json + '}'
WebMock.stub_request(:get, pender_url).with({ query: { url: author2_url } }).to_return(body: response)
m = create_media url: url, account: nil, account_id: nil
a = m.account
p = create_project team: t
pm = create_project_media media: m, project: p
sleep 1
pm = ProjectMedia.find(pm.id)
with_current_user_and_team(u, t) do
pm.refresh_media = true
end
assert_not_equal a, m.reload.account
assert_nil Account.where(id: a.id).last
end
test "should create media when normalized URL exists" do
ft = create_field_type field_type: 'image_path', label: 'Image Path'
at = create_annotation_type annotation_type: 'reverse_image', label: 'Reverse Image'
create_field_instance annotation_type_object: at, name: 'reverse_image_path', label: 'Reverse Image', field_type_object: ft, optional: false
create_bot name: 'Check Bot'
url = 'https://www.facebook.com/Ma3komMona/videos/695409680623722'
pender_url = CONFIG['pender_url_private'] + '/api/medias'
response = '{"type":"media","data":{"url":"' + url + '","type":"item"}}'
WebMock.stub_request(:get, pender_url).with({ query: { url: url } }).to_return(body: response)
l = create_link url: url
pm = create_project_media media: l
url = 'https://www.facebook.com/Ma3komMona/videos/vb.268809099950451/695409680623722/?type=3&theater'
pender_url = CONFIG['pender_url_private'] + '/api/medias'
response = '{"type":"media","data":{"url":"https://www.facebook.com/Ma3komMona/videos/695409680623722","type":"item"}}'
WebMock.stub_request(:get, pender_url).with({ query: { url: url } }).to_return(body: response)
assert_difference 'ProjectMedia.count' do
pm = ProjectMedia.new
pm.url = url
pm.project = create_project
pm.save!
end
end
test "should get previous project search object" do
p1 = create_project
p2 = create_project
pm = create_project_media project: p1
pm.previous_project_id = p1.id
pm.project_id = p2.id
pm.save!
assert_kind_of CheckSearch, pm.check_search_project_was
end
test "should move media to active status" do
create_verification_status_stuff
stub_config('app_name', 'Check') do
pm = create_project_media
assert_equal 'undetermined', pm.last_verification_status
create_task annotated: pm, disable_update_status: false
assert_equal 'in_progress', pm.reload.last_verification_status
end
end
test "should not complete media if there are pending tasks" do
create_verification_status_stuff
pm = create_project_media
s = pm.last_verification_status_obj
create_task annotated: pm, required: true
assert_equal 'undetermined', s.reload.get_field('verification_status_status').status
assert_raises ActiveRecord::RecordInvalid do
s.status = 'verified'
s.save!
end
end
test "should get account from author URL" do
s = create_source
pm = create_project_media
assert_nothing_raised do
pm.send :account_from_author_url, @url, s
end
end
test "should not move media to active status if status is locked" do
create_verification_status_stuff
stub_config('app_name', 'Check') do
pm = create_project_media
assert_equal 'undetermined', pm.last_verification_status
s = pm.last_verification_status_obj
s.locked = true
s.save!
create_task annotated: pm, disable_update_status: false
assert_equal 'undetermined', pm.reload.last_verification_status
end
end
test "should not return to active status if required task added to resolved item but status is locked" do
create_translation_status_stuff
create_verification_status_stuff(false)
p = create_project
pm = create_project_media project: p
s = pm.annotations.where(annotation_type: 'verification_status').last.load
s.status = 'verified'
s.locked = true
s.save!
pm = ProjectMedia.find(pm.id)
assert_equal 'verified', pm.last_verification_status
pm = ProjectMedia.find(pm.id)
create_task annotated: pm
assert_equal 'verified', pm.last_verification_status
pm = ProjectMedia.find(pm.id)
create_task annotated: pm, required: true
assert_equal 'verified', pm.last_verification_status
end
test "should expose target languages" do
pm = create_project_media
assert_nothing_raised do
JSON.parse(pm.target_languages)
end
end
test "should have status permission" do
u = create_user
t = create_team
p = create_project team: t
pm = create_project_media project: p
with_current_user_and_team(u, t) do
permissions = JSON.parse(pm.permissions)
assert permissions.has_key?('update Status')
end
end
test "should not crash if media does not have status" do
pm = create_project_media
Annotation.delete_all
assert_nothing_raised do
assert_nil pm.last_verification_status_obj
assert_nil pm.last_translation_status_obj
end
end
test "should return whether in final state or not" do
create_verification_status_stuff
create_translation_status_stuff
pm = create_project_media
assert_equal false, pm.is_finished?
s = pm.annotations.where(annotation_type: 'verification_status').last.load
s.status = 'verified'
s.save!
assert_equal true, pm.is_finished?
end
end
|
require 'time'
require 'json'
require 'securerandom'
require 'sinatra/base'
require 'slim'
require 'i18n'
require 'i18n/backend/fallbacks'
# Sinatra App for Kalindar, show ics files.
class KalindarApp < Sinatra::Base
$conf = JSON.load(File.new('config.json'))
$cal = EventCalendar.new($conf['calendar_files'])
configure do
I18n::Backend::Simple.send(:include, I18n::Backend::Fallbacks)
I18n.load_path = Dir[File.join(settings.root, 'locales', '*.yml')]
I18n.backend.load_translations
I18n.default_locale = $conf['locale'].to_sym
end
# Will use http-verb PUT
enable :method_override
# We like pretty html indentation
set :slim, :pretty => true
helpers do
def li_day_class day
return "sunday" if day.sunday?
return "saturday" if day.saturday?
"day"
end
def t(*args)
I18n.t(*args)
end
def l(*args)
I18n.l(*args)
end
end
get '/' do
redirect '/events'
end
get '/events/:year/:month' do
@events = {}
# Events from start time to 31 days later
date = Date.new(params[:year].to_i, params[:month].to_i, 1)
(date .. date + 30).each do |day|
#@events[d] = $cal.events_for(d)
@events[day] = $cal.find_events day.to_date
end
slim :event_list
end
get '/events' do
@events = {}
# events from today to in 30 days
(DateTime.now .. DateTime.now + 30).each do |day|
#@events[d] = $cal.events_for(d)
@events[day] = $cal.find_events day.to_date
end
slim :event_list
end
get '/events/twoday' do
@events = {}
# events from today to in 30 days
(DateTime.now .. DateTime.now + 30).each do |day|
#@events[d] = $cal.events_for(d)
@events[day] = $cal.find_events day.to_date
end
@events = @events.values.flatten.sort_by {|e| e.start_time}
slim :twoday_list
end
# Add new event, save ics file.
put '/event' do
errors = EventParamHelper.check_params params
if !errors.empty?
slim :new_event, :locals => {'start_date' => Date.parse(params[:start_day])}
end
begin
event = Event.create_from_params params
rescue
return 502, "Eingabefehler"
end
$cal.calendars.first.events << event
$cal.calendars.first.write_back!
if request.xhr?
@events = {}
# Events from today to in 30 days
(DateTime.now .. DateTime.now + 30).each do |day|
@events[day] = $cal.find_events day.to_date
end
slim :event_list, :layout => false
else
redirect '/'
end
end
# Show new event template.
get '/event/new/:day' do
# Aim is to get a new event in every case
#@event = Event.create_from_params params
@event = Event.new(RiCal::Component::Event.new($cal.calendars.first))
@event.dtstart = Date.parse(params[:day])
slim :new_event, :locals => {'start_date' => Date.parse(params[:day])}
end
# Yet empty route.
get '/event/delete/:uuid' do
redirect back
end
# Show edit view.
get '/event/edit/:uuid' do
event = $cal.find_by_uid params[:uuid]
if event.nil?
redirect back
else
slim :edit_event, :locals => {'event' => event}
end
end
# Edit/save an event.
put '/event/edit/:uuid' do
# validate_params
puts params
event = $cal.find_by_uid(params[:uuid])
event.update params
$cal.calendars.first.write_back!
redirect '/'
end
end
allow inclusion in iframes.
require 'time'
require 'json'
require 'securerandom'
require 'sinatra/base'
require 'slim'
require 'i18n'
require 'i18n/backend/fallbacks'
# Sinatra App for Kalindar, show ics files.
class KalindarApp < Sinatra::Base
$conf = JSON.load(File.new('config.json'))
$cal = EventCalendar.new($conf['calendar_files'])
configure do
I18n::Backend::Simple.send(:include, I18n::Backend::Fallbacks)
I18n.load_path = Dir[File.join(settings.root, 'locales', '*.yml')]
I18n.backend.load_translations
I18n.default_locale = $conf['locale'].to_sym
end
# Will use http-verb PUT
enable :method_override
# We like pretty html indentation
set :slim, :pretty => true
# Allow inclusion in iframe.
set :protection, :except => :frame_options
helpers do
def li_day_class day
return "sunday" if day.sunday?
return "saturday" if day.saturday?
"day"
end
def t(*args)
I18n.t(*args)
end
def l(*args)
I18n.l(*args)
end
end
get '/' do
redirect '/events'
end
get '/events/:year/:month' do
@events = {}
# Events from start time to 31 days later
date = Date.new(params[:year].to_i, params[:month].to_i, 1)
(date .. date + 30).each do |day|
#@events[d] = $cal.events_for(d)
@events[day] = $cal.find_events day.to_date
end
slim :event_list
end
get '/events' do
@events = {}
# events from today to in 30 days
(DateTime.now .. DateTime.now + 30).each do |day|
#@events[d] = $cal.events_for(d)
@events[day] = $cal.find_events day.to_date
end
slim :event_list
end
get '/events/twoday' do
@events = {}
# events from today to in 30 days
(DateTime.now .. DateTime.now + 30).each do |day|
#@events[d] = $cal.events_for(d)
@events[day] = $cal.find_events day.to_date
end
@events = @events.values.flatten.sort_by {|e| e.start_time}
slim :twoday_list
end
# Add new event, save ics file.
put '/event' do
errors = EventParamHelper.check_params params
if !errors.empty?
slim :new_event, :locals => {'start_date' => Date.parse(params[:start_day])}
end
begin
event = Event.create_from_params params
rescue
return 502, "Eingabefehler"
end
$cal.calendars.first.events << event
$cal.calendars.first.write_back!
if request.xhr?
@events = {}
# Events from today to in 30 days
(DateTime.now .. DateTime.now + 30).each do |day|
@events[day] = $cal.find_events day.to_date
end
slim :event_list, :layout => false
else
redirect '/'
end
end
# Show new event template.
get '/event/new/:day' do
# Aim is to get a new event in every case
#@event = Event.create_from_params params
@event = Event.new(RiCal::Component::Event.new($cal.calendars.first))
@event.dtstart = Date.parse(params[:day])
slim :new_event, :locals => {'start_date' => Date.parse(params[:day])}
end
# Yet empty route.
get '/event/delete/:uuid' do
redirect back
end
# Show edit view.
get '/event/edit/:uuid' do
event = $cal.find_by_uid params[:uuid]
if event.nil?
redirect back
else
slim :edit_event, :locals => {'event' => event}
end
end
# Edit/save an event.
put '/event/edit/:uuid' do
# validate_params
puts params
event = $cal.find_by_uid(params[:uuid])
event.update params
$cal.calendars.first.write_back!
redirect '/'
end
end
|
require 'helper'
class CriteriaHashTest < Test::Unit::TestCase
include Plucky
context "Plucky::CriteriaHash" do
should "delegate missing methods to the source hash" do
hash = {:baz => 'wick', :foo => 'bar'}
criteria = CriteriaHash.new(hash)
criteria[:foo].should == 'bar'
criteria[:baz].should == 'wick'
criteria.keys.to_set.should == [:baz, :foo].to_set
end
SymbolOperators.each do |operator|
should "work with #{operator} symbol operator" do
CriteriaHash.new(:age.send(operator) => 21)[:age].should == {"$#{operator}" => 21}
end
end
should "handle multiple symbol operators on the same field" do
CriteriaHash.new(:age.gt => 12, :age.lt => 20)[:age].should == {
'$gt' => 12, '$lt' => 20
}
end
context "#initialize_copy" do
setup do
@original = CriteriaHash.new({
:comments => {:_id => 1}, :tags => ['mongo', 'ruby'],
}, :object_ids => [:_id])
@cloned = @original.clone
end
should "duplicate source hash" do
@cloned.source.should_not equal(@original.source)
end
should "duplicate options hash" do
@cloned.options.should_not equal(@original.options)
end
should "clone duplicable? values" do
@cloned[:comments].should_not equal(@original[:comments])
@cloned[:tags].should_not equal(@original[:tags])
end
end
context "#object_ids=" do
should "work with array" do
criteria = CriteriaHash.new
criteria.object_ids = [:_id]
criteria.object_ids.should == [:_id]
end
should "flatten multi-dimensional array" do
criteria = CriteriaHash.new
criteria.object_ids = [[:_id]]
criteria.object_ids.should == [:_id]
end
should "raise argument error if not array" do
assert_raises(ArgumentError) { CriteriaHash.new.object_ids = {} }
assert_raises(ArgumentError) { CriteriaHash.new.object_ids = nil }
assert_raises(ArgumentError) { CriteriaHash.new.object_ids = 'foo' }
end
end
context "#[]=" do
should "leave string values for string keys alone" do
criteria = CriteriaHash.new
criteria[:foo] = 'bar'
criteria[:foo].should == 'bar'
end
should "convert string values to object ids for object id keys" do
id = BSON::ObjectId.new
criteria = CriteriaHash.new({}, :object_ids => [:_id])
criteria[:_id] = id.to_s
criteria[:_id].should == id
end
should "convert sets to arrays" do
criteria = CriteriaHash.new
criteria[:foo] = [1, 2].to_set
criteria[:foo].should == {'$in' => [1, 2]}
end
should "convert times to utc" do
time = Time.now
criteria = CriteriaHash.new
criteria[:foo] = time
criteria[:foo].should be_utc
criteria[:foo].should == time.utc
end
should "convert :id to :_id" do
criteria = CriteriaHash.new
criteria[:id] = 1
criteria[:_id].should == 1
criteria[:id].should be_nil
end
should "work with symbol operators" do
criteria = CriteriaHash.new
criteria[:_id.in] = ['foo']
criteria[:_id].should == {'$in' => ['foo']}
end
should "set each of the conditions pairs" do
criteria = CriteriaHash.new
criteria[:conditions] = {:_id => 'john', :foo => 'bar'}
criteria[:_id].should == 'john'
criteria[:foo].should == 'bar'
end
end
context "with id key" do
should "convert to _id" do
id = BSON::ObjectId.new
criteria = CriteriaHash.new(:id => id)
criteria[:_id].should == id
criteria[:id].should be_nil
end
should "convert id with symbol operator to _id with modifier" do
id = BSON::ObjectId.new
criteria = CriteriaHash.new(:id.ne => id)
criteria[:_id].should == {'$ne' => id}
criteria[:id].should be_nil
end
end
context "with time value" do
should "convert to utc if not utc" do
CriteriaHash.new(:created_at => Time.now)[:created_at].utc?.should be(true)
end
should "leave utc alone" do
CriteriaHash.new(:created_at => Time.now.utc)[:created_at].utc?.should be(true)
end
end
context "with array value" do
should "default to $in" do
CriteriaHash.new(:numbers => [1,2,3])[:numbers].should == {'$in' => [1,2,3]}
end
should "use existing modifier if present" do
CriteriaHash.new(:numbers => {'$all' => [1,2,3]})[:numbers].should == {'$all' => [1,2,3]}
CriteriaHash.new(:numbers => {'$any' => [1,2,3]})[:numbers].should == {'$any' => [1,2,3]}
end
should "not turn value to $in with $or or $and key" do
CriteriaHash.new(:$or => [{:numbers => 1}, {:numbers => 2}] )[:$or].should == [{:numbers=>1}, {:numbers=>2}]
CriteriaHash.new(:$and => [{:numbers => 1}, {:numbers => 2}] )[:$and].should == [{:numbers=>1}, {:numbers=>2}]
end
end
context "with set value" do
should "default to $in and convert to array" do
CriteriaHash.new(:numbers => [1,2,3].to_set)[:numbers].should == {'$in' => [1,2,3]}
end
should "use existing modifier if present and convert to array" do
CriteriaHash.new(:numbers => {'$all' => [1,2,3].to_set})[:numbers].should == {'$all' => [1,2,3]}
CriteriaHash.new(:numbers => {'$any' => [1,2,3].to_set})[:numbers].should == {'$any' => [1,2,3]}
end
end
context "with string ids for string keys" do
setup do
@id = BSON::ObjectId.new
@room_id = BSON::ObjectId.new
@criteria = CriteriaHash.new(:_id => @id.to_s, :room_id => @room_id.to_s)
end
should "leave string ids as strings" do
@criteria[:_id].should == @id.to_s
@criteria[:room_id].should == @room_id.to_s
@criteria[:_id].should be_instance_of(String)
@criteria[:room_id].should be_instance_of(String)
end
end
context "with string ids for object id keys" do
setup do
@id = BSON::ObjectId.new
@room_id = BSON::ObjectId.new
end
should "convert strings to object ids" do
criteria = CriteriaHash.new({:_id => @id.to_s, :room_id => @room_id.to_s}, :object_ids => [:_id, :room_id])
criteria[:_id].should == @id
criteria[:room_id].should == @room_id
criteria[:_id].should be_instance_of(BSON::ObjectId)
criteria[:room_id].should be_instance_of(BSON::ObjectId)
end
should "convert :id with string value to object id value" do
criteria = CriteriaHash.new({:id => @id.to_s}, :object_ids => [:_id])
criteria[:_id].should == @id
end
end
context "with string ids for object id keys (nested)" do
setup do
@id1 = BSON::ObjectId.new
@id2 = BSON::ObjectId.new
@criteria = CriteriaHash.new({:_id => {'$in' => [@id1.to_s, @id2.to_s]}}, :object_ids => [:_id])
end
should "convert strings to object ids" do
@criteria[:_id].should == {'$in' => [@id1, @id2]}
end
end
context "#merge" do
should "work when no keys match" do
c1 = CriteriaHash.new(:foo => 'bar')
c2 = CriteriaHash.new(:baz => 'wick')
c1.merge(c2).should == CriteriaHash.new(:foo => 'bar', :baz => 'wick')
end
should "turn matching keys with simple values into array" do
c1 = CriteriaHash.new(:foo => 'bar')
c2 = CriteriaHash.new(:foo => 'baz')
c1.merge(c2).should == CriteriaHash.new(:foo => {'$in' => %w[bar baz]})
end
should "unique matching key values" do
c1 = CriteriaHash.new(:foo => 'bar')
c2 = CriteriaHash.new(:foo => 'bar')
c1.merge(c2).should == CriteriaHash.new(:foo => {'$in' => %w[bar]})
end
should "correctly merge arrays and non-arrays" do
c1 = CriteriaHash.new(:foo => 'bar')
c2 = CriteriaHash.new(:foo => %w[bar baz])
c1.merge(c2).should == CriteriaHash.new(:foo => {'$in' => %w[bar baz]})
c2.merge(c1).should == CriteriaHash.new(:foo => {'$in' => %w[bar baz]})
end
should "be able to merge two modifier hashes" do
c1 = CriteriaHash.new('$in' => [1, 2])
c2 = CriteriaHash.new('$in' => [2, 3])
c1.merge(c2).should == CriteriaHash.new('$in' => [1, 2, 3])
end
should "merge matching keys with a single modifier" do
c1 = CriteriaHash.new(:foo => {'$in' => [1, 2, 3]})
c2 = CriteriaHash.new(:foo => {'$in' => [1, 4, 5]})
c1.merge(c2).should == CriteriaHash.new(:foo => {'$in' => [1, 2, 3, 4, 5]})
end
should "merge matching keys with multiple modifiers" do
c1 = CriteriaHash.new(:foo => {'$in' => [1, 2, 3]})
c2 = CriteriaHash.new(:foo => {'$all' => [1, 4, 5]})
c1.merge(c2).should == CriteriaHash.new(:foo => {'$in' => [1, 2, 3], '$all' => [1, 4, 5]})
end
should "not update mergee" do
c1 = CriteriaHash.new(:foo => 'bar')
c2 = CriteriaHash.new(:foo => 'baz')
c1.merge(c2).should_not equal(c1)
c1[:foo].should == 'bar'
end
end
context "#merge!" do
should "merge and replace" do
c1 = CriteriaHash.new(:foo => 'bar')
c2 = CriteriaHash.new(:foo => 'baz')
c1.merge!(c2)
c1[:foo].should == {'$in' => ['bar', 'baz']}
end
end
context "#simple?" do
should "be true if only filtering by _id" do
CriteriaHash.new(:_id => 'id').should be_simple
end
should "be true if only filtering by Sci" do
CriteriaHash.new(:_id => 'id', :_type => 'Foo').should be_simple
end
should "be false if querying by anthing other than _id/Sci" do
CriteriaHash.new(:foo => 'bar').should_not be_simple
end
should "be false if querying only by _type" do
CriteriaHash.new(:_type => 'Foo').should_not be_simple
end
end
end
end
Separate $or and $and tests.
require 'helper'
class CriteriaHashTest < Test::Unit::TestCase
include Plucky
context "Plucky::CriteriaHash" do
should "delegate missing methods to the source hash" do
hash = {:baz => 'wick', :foo => 'bar'}
criteria = CriteriaHash.new(hash)
criteria[:foo].should == 'bar'
criteria[:baz].should == 'wick'
criteria.keys.to_set.should == [:baz, :foo].to_set
end
SymbolOperators.each do |operator|
should "work with #{operator} symbol operator" do
CriteriaHash.new(:age.send(operator) => 21)[:age].should == {"$#{operator}" => 21}
end
end
should "handle multiple symbol operators on the same field" do
CriteriaHash.new(:age.gt => 12, :age.lt => 20)[:age].should == {
'$gt' => 12, '$lt' => 20
}
end
context "#initialize_copy" do
setup do
@original = CriteriaHash.new({
:comments => {:_id => 1}, :tags => ['mongo', 'ruby'],
}, :object_ids => [:_id])
@cloned = @original.clone
end
should "duplicate source hash" do
@cloned.source.should_not equal(@original.source)
end
should "duplicate options hash" do
@cloned.options.should_not equal(@original.options)
end
should "clone duplicable? values" do
@cloned[:comments].should_not equal(@original[:comments])
@cloned[:tags].should_not equal(@original[:tags])
end
end
context "#object_ids=" do
should "work with array" do
criteria = CriteriaHash.new
criteria.object_ids = [:_id]
criteria.object_ids.should == [:_id]
end
should "flatten multi-dimensional array" do
criteria = CriteriaHash.new
criteria.object_ids = [[:_id]]
criteria.object_ids.should == [:_id]
end
should "raise argument error if not array" do
assert_raises(ArgumentError) { CriteriaHash.new.object_ids = {} }
assert_raises(ArgumentError) { CriteriaHash.new.object_ids = nil }
assert_raises(ArgumentError) { CriteriaHash.new.object_ids = 'foo' }
end
end
context "#[]=" do
should "leave string values for string keys alone" do
criteria = CriteriaHash.new
criteria[:foo] = 'bar'
criteria[:foo].should == 'bar'
end
should "convert string values to object ids for object id keys" do
id = BSON::ObjectId.new
criteria = CriteriaHash.new({}, :object_ids => [:_id])
criteria[:_id] = id.to_s
criteria[:_id].should == id
end
should "convert sets to arrays" do
criteria = CriteriaHash.new
criteria[:foo] = [1, 2].to_set
criteria[:foo].should == {'$in' => [1, 2]}
end
should "convert times to utc" do
time = Time.now
criteria = CriteriaHash.new
criteria[:foo] = time
criteria[:foo].should be_utc
criteria[:foo].should == time.utc
end
should "convert :id to :_id" do
criteria = CriteriaHash.new
criteria[:id] = 1
criteria[:_id].should == 1
criteria[:id].should be_nil
end
should "work with symbol operators" do
criteria = CriteriaHash.new
criteria[:_id.in] = ['foo']
criteria[:_id].should == {'$in' => ['foo']}
end
should "set each of the conditions pairs" do
criteria = CriteriaHash.new
criteria[:conditions] = {:_id => 'john', :foo => 'bar'}
criteria[:_id].should == 'john'
criteria[:foo].should == 'bar'
end
end
context "with id key" do
should "convert to _id" do
id = BSON::ObjectId.new
criteria = CriteriaHash.new(:id => id)
criteria[:_id].should == id
criteria[:id].should be_nil
end
should "convert id with symbol operator to _id with modifier" do
id = BSON::ObjectId.new
criteria = CriteriaHash.new(:id.ne => id)
criteria[:_id].should == {'$ne' => id}
criteria[:id].should be_nil
end
end
context "with time value" do
should "convert to utc if not utc" do
CriteriaHash.new(:created_at => Time.now)[:created_at].utc?.should be(true)
end
should "leave utc alone" do
CriteriaHash.new(:created_at => Time.now.utc)[:created_at].utc?.should be(true)
end
end
context "with array value" do
should "default to $in" do
CriteriaHash.new(:numbers => [1,2,3])[:numbers].should == {'$in' => [1,2,3]}
end
should "use existing modifier if present" do
CriteriaHash.new(:numbers => {'$all' => [1,2,3]})[:numbers].should == {'$all' => [1,2,3]}
CriteriaHash.new(:numbers => {'$any' => [1,2,3]})[:numbers].should == {'$any' => [1,2,3]}
end
should "not turn value to $in with $or key" do
CriteriaHash.new(:$or => [{:numbers => 1}, {:numbers => 2}] )[:$or].should == [{:numbers=>1}, {:numbers=>2}]
end
should "not turn value to $in with $and key" do
CriteriaHash.new(:$and => [{:numbers => 1}, {:numbers => 2}] )[:$and].should == [{:numbers=>1}, {:numbers=>2}]
end
end
context "with set value" do
should "default to $in and convert to array" do
CriteriaHash.new(:numbers => [1,2,3].to_set)[:numbers].should == {'$in' => [1,2,3]}
end
should "use existing modifier if present and convert to array" do
CriteriaHash.new(:numbers => {'$all' => [1,2,3].to_set})[:numbers].should == {'$all' => [1,2,3]}
CriteriaHash.new(:numbers => {'$any' => [1,2,3].to_set})[:numbers].should == {'$any' => [1,2,3]}
end
end
context "with string ids for string keys" do
setup do
@id = BSON::ObjectId.new
@room_id = BSON::ObjectId.new
@criteria = CriteriaHash.new(:_id => @id.to_s, :room_id => @room_id.to_s)
end
should "leave string ids as strings" do
@criteria[:_id].should == @id.to_s
@criteria[:room_id].should == @room_id.to_s
@criteria[:_id].should be_instance_of(String)
@criteria[:room_id].should be_instance_of(String)
end
end
context "with string ids for object id keys" do
setup do
@id = BSON::ObjectId.new
@room_id = BSON::ObjectId.new
end
should "convert strings to object ids" do
criteria = CriteriaHash.new({:_id => @id.to_s, :room_id => @room_id.to_s}, :object_ids => [:_id, :room_id])
criteria[:_id].should == @id
criteria[:room_id].should == @room_id
criteria[:_id].should be_instance_of(BSON::ObjectId)
criteria[:room_id].should be_instance_of(BSON::ObjectId)
end
should "convert :id with string value to object id value" do
criteria = CriteriaHash.new({:id => @id.to_s}, :object_ids => [:_id])
criteria[:_id].should == @id
end
end
context "with string ids for object id keys (nested)" do
setup do
@id1 = BSON::ObjectId.new
@id2 = BSON::ObjectId.new
@criteria = CriteriaHash.new({:_id => {'$in' => [@id1.to_s, @id2.to_s]}}, :object_ids => [:_id])
end
should "convert strings to object ids" do
@criteria[:_id].should == {'$in' => [@id1, @id2]}
end
end
context "#merge" do
should "work when no keys match" do
c1 = CriteriaHash.new(:foo => 'bar')
c2 = CriteriaHash.new(:baz => 'wick')
c1.merge(c2).should == CriteriaHash.new(:foo => 'bar', :baz => 'wick')
end
should "turn matching keys with simple values into array" do
c1 = CriteriaHash.new(:foo => 'bar')
c2 = CriteriaHash.new(:foo => 'baz')
c1.merge(c2).should == CriteriaHash.new(:foo => {'$in' => %w[bar baz]})
end
should "unique matching key values" do
c1 = CriteriaHash.new(:foo => 'bar')
c2 = CriteriaHash.new(:foo => 'bar')
c1.merge(c2).should == CriteriaHash.new(:foo => {'$in' => %w[bar]})
end
should "correctly merge arrays and non-arrays" do
c1 = CriteriaHash.new(:foo => 'bar')
c2 = CriteriaHash.new(:foo => %w[bar baz])
c1.merge(c2).should == CriteriaHash.new(:foo => {'$in' => %w[bar baz]})
c2.merge(c1).should == CriteriaHash.new(:foo => {'$in' => %w[bar baz]})
end
should "be able to merge two modifier hashes" do
c1 = CriteriaHash.new('$in' => [1, 2])
c2 = CriteriaHash.new('$in' => [2, 3])
c1.merge(c2).should == CriteriaHash.new('$in' => [1, 2, 3])
end
should "merge matching keys with a single modifier" do
c1 = CriteriaHash.new(:foo => {'$in' => [1, 2, 3]})
c2 = CriteriaHash.new(:foo => {'$in' => [1, 4, 5]})
c1.merge(c2).should == CriteriaHash.new(:foo => {'$in' => [1, 2, 3, 4, 5]})
end
should "merge matching keys with multiple modifiers" do
c1 = CriteriaHash.new(:foo => {'$in' => [1, 2, 3]})
c2 = CriteriaHash.new(:foo => {'$all' => [1, 4, 5]})
c1.merge(c2).should == CriteriaHash.new(:foo => {'$in' => [1, 2, 3], '$all' => [1, 4, 5]})
end
should "not update mergee" do
c1 = CriteriaHash.new(:foo => 'bar')
c2 = CriteriaHash.new(:foo => 'baz')
c1.merge(c2).should_not equal(c1)
c1[:foo].should == 'bar'
end
end
context "#merge!" do
should "merge and replace" do
c1 = CriteriaHash.new(:foo => 'bar')
c2 = CriteriaHash.new(:foo => 'baz')
c1.merge!(c2)
c1[:foo].should == {'$in' => ['bar', 'baz']}
end
end
context "#simple?" do
should "be true if only filtering by _id" do
CriteriaHash.new(:_id => 'id').should be_simple
end
should "be true if only filtering by Sci" do
CriteriaHash.new(:_id => 'id', :_type => 'Foo').should be_simple
end
should "be false if querying by anthing other than _id/Sci" do
CriteriaHash.new(:foo => 'bar').should_not be_simple
end
should "be false if querying only by _type" do
CriteriaHash.new(:_type => 'Foo').should_not be_simple
end
end
end
end |
module Leml
VERSION = '0.2.0'
end
versioning v0.2.1
module Leml
VERSION = '0.2.1'
end
|
require "lita"
require 'trello'
require 'lita-timing'
require 'review_cards'
require 'new_card'
module Lita
module Handlers
class Lean < Handler
RESPONSE_TIMEOUT = 300 # seconds
BUG = "[bug]"
MAINTENANCE = "[maint]"
TECH = "[tech]"
FEATURE = "[feature]"
CONTENT = "[content]"
DATA = "[data]"
COMMERCIAL = "[commercial]"
TIMER_INTERVAL = 60
config :trello_public_key
config :trello_member_token
config :development_board_id
config :old_review_cards_channel
config :list_id
on :loaded, :start_timer
on :buildkite_build_finished, :build_finished
route(/\Alean count ([a-zA-Z0-9]+)\Z/i, :count, command: true, help: { "lean count [board id]" => "Count cards on the nominated trello board"})
route(/\Alean breakdown ([a-zA-Z0-9]+)\Z/i, :breakdown, command: true, help: { "lean breakdown [board id]" => "Breakdown of card types on the nominated trello board"})
route(/\Alean set-types ([a-zA-Z0-9]+)\Z/i, :set_types, command: true, help: { "lean set-types [board id]" => "Begin looping through cards without a type on the nominated trello board"})
route(/\Alean set-streams ([a-zA-Z0-9]+)\Z/i, :set_streams, command: true, help: { "lean set-streams [board id]" => "Begin looping through cards without a stream on the nominated trello board"})
route(/\Alean confirmed-cards\Z/i, :list_cards, command: true, help: { "lean confirmed-cards" => "List all cards in the confirmed column" })
route(/\A([bmtf])\Z/i, :type, command: false)
route(/\A([cdo])\Z/i, :stream, command: false)
def start_timer(payload)
start_review_timer
end
# Returns cards listed in Confirmed on the Development board
def list_cards(response)
msg = NewCard.new(trello_client, config.list_id).display_confirmed_msg(config.development_board_id)
response.reply("#{msg}")
end
# Creates a card with specified value in the Confirmed column on
# the Development board when the tc-i18n-hygiene build fails
def create_confirmed
new_card = NewCard.new(trello_client, config.list_id).create_new_card
response = "#{new_card.name}, #{new_card.url}"
robot.send_message(target, response)
end
def build_finished(payload)
event = payload[:event]
if event.pipeline_name == "tc-i18n-hygiene" && !event.passed?
create_confirmed
end
end
# Returns a count of cards on a Trello board, broken down by
# the card type
#
def breakdown(response)
board_id = response.args.last
board = trello_client.find(:boards, board_id)
board.lists.each do |list|
stats = list_stats(list)
response.reply("#{list.name}: #{stats.inspect}")
end
end
# Returns a simple count of cards on a Trello board
#
def count(response)
board_id = response.args.last
board = trello_client.find(:boards, board_id)
response.reply("#{board.cards.size} cards on #{board.name}")
end
# Set the current channel into Q&A mode, allowing users to loop through
# the cards on a Trello board and choose a card type
def set_types(response)
board_id = response.args.last
board = trello_client.find(:boards, board_id)
response.reply("Starting Set Types session for board: #{board.name}")
response.reply("Note: You have #{RESPONSE_TIMEOUT} seconds between questions to reply")
select_next_typeless_card_from_board(response, board)
end
# Set the current channel into Q&A mode, allowing users to loop through
# the cards on a Trello board and choose a card stream
def set_streams(response)
board_id = response.args.last
board = trello_client.find(:boards, board_id)
response.reply("Starting Set Streams session for board: #{board.name}")
response.reply("Note: You have #{RESPONSE_TIMEOUT} seconds between questions to reply")
select_next_streamless_card_from_board(response, board)
end
# Set the type for a single Trello card. To reach this command, first
# use the "set-types" command to put a channel into active mode.
def type(response)
room_name = response.message.source.room.to_s
board_id = redis.get("#{room_name}-board-id")
card_id = redis.get("#{room_name}-card-id")
board = trello_client.find(:boards, board_id)
card = trello_client.find(:cards, card_id)
new_type = case response.message.body
when "b", "B" then BUG
when "m", "M" then MAINTENANCE
when "t", "T" then TECH
else
FEATURE
end
card.name = "#{new_type} #{card.name}"
card.save
select_next_typeless_card_from_board(response, board)
end
# Set the stream for a single Trello card. To reach this command, first
# use the "set-streams" command to put a channel into active mode.
def stream(response)
room_name = response.message.source.room.to_s
board_id = redis.get("#{room_name}-board-id")
card_id = redis.get("#{room_name}-card-id")
board = trello_client.find(:boards, board_id)
card = trello_client.find(:cards, card_id)
new_stream = case response.message.body
when "c", "C" then CONTENT
when "d", "D" then DATA
else
COMMERCIAL
end
card.name = "#{new_stream} #{card.name}"
card.save
select_next_streamless_card_from_board(response, board)
end
private
def start_review_timer
every_with_logged_errors(TIMER_INTERVAL) do |timer|
daily_at("23:00", [:sunday, :monday, :tuesday, :wednesday, :thursday], "review-column-activity") do
msg = ReviewCards.new(trello_client).to_msg(config.development_board_id)
robot.send_message(target, msg) if msg
end
end
end
def days_in_seconds(days)
60 * 60* 24 * days.to_i
end
def every_with_logged_errors(interval, &block)
logged_errors do
every(interval, &block)
end
end
def logged_errors(&block)
yield
rescue Exception => e
puts "Error in timer loop: #{e.inspect}"
end
def daily_at(time, day, name, &block)
Lita::Timing::Scheduled.new(name, redis).daily_at(time, day, &block)
end
def target
Source.new(room: Lita::Room.find_by_name(config.old_review_cards_channel) || "general")
end
def select_next_typeless_card_from_board(response, board)
room_name = response.message.source.room.to_s
card = detect_card_with_no_type(board)
if card
set_state(room_name, board.id, card.id)
response.reply(card_to_string(card))
response.reply("[b]ug [m]aintenance [t]ech [f]eature")
else
reset_state(room_name)
response.reply("All cards have been classified")
end
end
def select_next_streamless_card_from_board(response, board)
room_name = response.message.source.room.to_s
card = detect_card_with_no_stream(board)
if card
set_state(room_name, board.id, card.id)
response.reply(card_to_string(card))
response.reply("[c]ontent [d]ata c[o]mmercial")
else
reset_state(room_name)
response.reply("All cards have been classified")
end
end
def set_state(room_name, board_id, card_id)
redis.set("#{room_name}-board-id", board_id, ex: RESPONSE_TIMEOUT)
redis.set("#{room_name}-card-id", card_id, ex: RESPONSE_TIMEOUT)
end
def reset_state(room_name)
redis.del("#{room_name}-board-id")
redis.del("#{room_name}-card-id")
end
def card_to_string(card)
labels = card.labels.map(&:name)
"#{card.name} [#{labels.join(", ")}] [#{card.url}]"
end
def detect_card_with_no_type(board)
cards = board.cards
cards.detect { |card|
!card.name.include?(BUG) &&
!card.name.include?(MAINTENANCE) &&
!card.name.include?(TECH) &&
!card.name.include?(FEATURE)
}
end
def detect_card_with_no_stream(board)
cards = board.cards
cards.detect { |card|
!card.name.include?(CONTENT) &&
!card.name.include?(DATA) &&
!card.name.include?(COMMERCIAL)
}
end
def list_stats(list)
cards = list.cards
result = {}
result[:total] = cards.size
result[:feature] = cards.map(&:name).select {|name| name.include?(FEATURE) }.size
result[:bug] = cards.map(&:name).select {|name| name.include?(BUG) }.size
result[:maintenance] = cards.map(&:name).select {|name| name.include?(MAINTENANCE) }.size
result[:tech] = cards.map(&:name).select {|name| name.include?(TECH) }.size
result[:unknown_type] = cards.map(&:name).select {|name|
!name.include?(FEATURE) &&
!name.include?(BUG) &&
!name.include?(MAINTENANCE) &&
!name.include?(TECH)
}.size
result[:content] = cards.map(&:name).select {|name| name.include?(CONTENT) }.size
result[:data] = cards.map(&:name).select {|name| name.include?(DATA) }.size
result[:commercial] = cards.map(&:name).select {|name| name.include?(COMMERCIAL) }.size
result[:unknown_stream] = cards.map(&:name).select {|name|
!name.include?(CONTENT) &&
!name.include?(DATA) &&
!name.include?(COMMERCIAL)
}.size
result
end
def trello_client
@client ||= Trello::Client.new(
developer_public_key: config.trello_public_key,
member_token: config.trello_member_token
)
end
Lita.register_handler(self)
end
end
end
Return a list of all features requests on Trello
require "lita"
require 'trello'
require 'lita-timing'
require 'review_cards'
require 'new_card'
module Lita
module Handlers
class Lean < Handler
RESPONSE_TIMEOUT = 300 # seconds
BUG = "[bug]"
MAINTENANCE = "[maint]"
TECH = "[tech]"
FEATURE = "[feature]"
CONTENT = "[content]"
DATA = "[data]"
COMMERCIAL = "[commercial]"
TIMER_INTERVAL = 60
config :trello_public_key
config :trello_member_token
config :development_board_id
config :old_review_cards_channel
config :list_id
on :loaded, :start_timer
on :buildkite_build_finished, :build_finished
route(/\Alean count ([a-zA-Z0-9]+)\Z/i, :count, command: true, help: { "lean count [board id]" => "Count cards on the nominated trello board"})
route(/\Alean breakdown ([a-zA-Z0-9]+)\Z/i, :breakdown, command: true, help: { "lean breakdown [board id]" => "Breakdown of card types on the nominated trello board"})
route(/\Alean set-types ([a-zA-Z0-9]+)\Z/i, :set_types, command: true, help: { "lean set-types [board id]" => "Begin looping through cards without a type on the nominated trello board"})
route(/\Alean set-streams ([a-zA-Z0-9]+)\Z/i, :set_streams, command: true, help: { "lean set-streams [board id]" => "Begin looping through cards without a stream on the nominated trello board"})
route(/\Alean confirmed-cards\Z/i, :list_cards, command: true, help: { "lean confirmed-cards" => "List all cards in the confirmed column" })
route(/\lean list-feature-requests\Z/i, :list_feature_request, command: true, help: { "lean feature-request" => "List all cards on the Feature Request baord" })
route(/\A([bmtf])\Z/i, :type, command: false)
route(/\A([cdo])\Z/i, :stream, command: false)
def start_timer(payload)
start_review_timer
end
# Returns cards listed in Confirmed on the Development board
def list_cards(response)
msg = NewCard.new(trello_client, config.list_id).display_confirmed_msg(config.development_board_id)
response.reply("#{msg}")
end
# Creates a card with specified value in the Confirmed column on
# the Development board when the tc-i18n-hygiene build fails
def create_confirmed
new_card = NewCard.new(trello_client, config.list_id).create_new_card
response = "#{new_card.name}, #{new_card.url}"
robot.send_message(target, response)
end
def build_finished(payload)
event = payload[:event]
if event.pipeline_name == "tc-i18n-hygiene" && !event.passed?
create_confirmed
end
# Returns a list of all cards on the Feature request board
#
def list_feature_request(response)
board_id = "hD3oNZ2P"
board = trello_client.find(:boards, board_id)
board.cards.each do |card|
response.reply("#{card.name}, #{card.url}", "#{card.list.name}")
end
end
# Returns a count of cards on a Trello board, broken down by
# the card type
#
def breakdown(response)
board_id = response.args.last
board = trello_client.find(:boards, board_id)
board.lists.each do |list|
stats = list_stats(list)
response.reply("#{list.name}: #{stats.inspect}")
end
end
# Returns a simple count of cards on a Trello board
#
def count(response)
board_id = response.args.last
board = trello_client.find(:boards, board_id)
response.reply("#{board.cards.size} cards on #{board.name}")
end
# Set the current channel into Q&A mode, allowing users to loop through
# the cards on a Trello board and choose a card type
def set_types(response)
board_id = response.args.last
board = trello_client.find(:boards, board_id)
response.reply("Starting Set Types session for board: #{board.name}")
response.reply("Note: You have #{RESPONSE_TIMEOUT} seconds between questions to reply")
select_next_typeless_card_from_board(response, board)
end
# Set the current channel into Q&A mode, allowing users to loop through
# the cards on a Trello board and choose a card stream
def set_streams(response)
board_id = response.args.last
board = trello_client.find(:boards, board_id)
response.reply("Starting Set Streams session for board: #{board.name}")
response.reply("Note: You have #{RESPONSE_TIMEOUT} seconds between questions to reply")
select_next_streamless_card_from_board(response, board)
end
# Set the type for a single Trello card. To reach this command, first
# use the "set-types" command to put a channel into active mode.
def type(response)
room_name = response.message.source.room.to_s
board_id = redis.get("#{room_name}-board-id")
card_id = redis.get("#{room_name}-card-id")
board = trello_client.find(:boards, board_id)
card = trello_client.find(:cards, card_id)
new_type = case response.message.body
when "b", "B" then BUG
when "m", "M" then MAINTENANCE
when "t", "T" then TECH
else
FEATURE
end
card.name = "#{new_type} #{card.name}"
card.save
select_next_typeless_card_from_board(response, board)
end
# Set the stream for a single Trello card. To reach this command, first
# use the "set-streams" command to put a channel into active mode.
def stream(response)
room_name = response.message.source.room.to_s
board_id = redis.get("#{room_name}-board-id")
card_id = redis.get("#{room_name}-card-id")
board = trello_client.find(:boards, board_id)
card = trello_client.find(:cards, card_id)
new_stream = case response.message.body
when "c", "C" then CONTENT
when "d", "D" then DATA
else
COMMERCIAL
end
card.name = "#{new_stream} #{card.name}"
card.save
select_next_streamless_card_from_board(response, board)
end
private
def start_review_timer
every_with_logged_errors(TIMER_INTERVAL) do |timer|
daily_at("23:00", [:sunday, :monday, :tuesday, :wednesday, :thursday], "review-column-activity") do
msg = ReviewCards.new(trello_client).to_msg(config.development_board_id)
robot.send_message(target, msg) if msg
end
end
end
def days_in_seconds(days)
60 * 60* 24 * days.to_i
end
def every_with_logged_errors(interval, &block)
logged_errors do
every(interval, &block)
end
end
def logged_errors(&block)
yield
rescue Exception => e
puts "Error in timer loop: #{e.inspect}"
end
def daily_at(time, day, name, &block)
Lita::Timing::Scheduled.new(name, redis).daily_at(time, day, &block)
end
def target
Source.new(room: Lita::Room.find_by_name(config.old_review_cards_channel) || "general")
end
def select_next_typeless_card_from_board(response, board)
room_name = response.message.source.room.to_s
card = detect_card_with_no_type(board)
if card
set_state(room_name, board.id, card.id)
response.reply(card_to_string(card))
response.reply("[b]ug [m]aintenance [t]ech [f]eature")
else
reset_state(room_name)
response.reply("All cards have been classified")
end
end
def select_next_streamless_card_from_board(response, board)
room_name = response.message.source.room.to_s
card = detect_card_with_no_stream(board)
if card
set_state(room_name, board.id, card.id)
response.reply(card_to_string(card))
response.reply("[c]ontent [d]ata c[o]mmercial")
else
reset_state(room_name)
response.reply("All cards have been classified")
end
end
def set_state(room_name, board_id, card_id)
redis.set("#{room_name}-board-id", board_id, ex: RESPONSE_TIMEOUT)
redis.set("#{room_name}-card-id", card_id, ex: RESPONSE_TIMEOUT)
end
def reset_state(room_name)
redis.del("#{room_name}-board-id")
redis.del("#{room_name}-card-id")
end
def card_to_string(card)
labels = card.labels.map(&:name)
"#{card.name} [#{labels.join(", ")}] [#{card.url}]"
end
def detect_card_with_no_type(board)
cards = board.cards
cards.detect { |card|
!card.name.include?(BUG) &&
!card.name.include?(MAINTENANCE) &&
!card.name.include?(TECH) &&
!card.name.include?(FEATURE)
}
end
def detect_card_with_no_stream(board)
cards = board.cards
cards.detect { |card|
!card.name.include?(CONTENT) &&
!card.name.include?(DATA) &&
!card.name.include?(COMMERCIAL)
}
end
def list_stats(list)
cards = list.cards
result = {}
result[:total] = cards.size
result[:feature] = cards.map(&:name).select {|name| name.include?(FEATURE) }.size
result[:bug] = cards.map(&:name).select {|name| name.include?(BUG) }.size
result[:maintenance] = cards.map(&:name).select {|name| name.include?(MAINTENANCE) }.size
result[:tech] = cards.map(&:name).select {|name| name.include?(TECH) }.size
result[:unknown_type] = cards.map(&:name).select {|name|
!name.include?(FEATURE) &&
!name.include?(BUG) &&
!name.include?(MAINTENANCE) &&
!name.include?(TECH)
}.size
result[:content] = cards.map(&:name).select {|name| name.include?(CONTENT) }.size
result[:data] = cards.map(&:name).select {|name| name.include?(DATA) }.size
result[:commercial] = cards.map(&:name).select {|name| name.include?(COMMERCIAL) }.size
result[:unknown_stream] = cards.map(&:name).select {|name|
!name.include?(CONTENT) &&
!name.include?(DATA) &&
!name.include?(COMMERCIAL)
}.size
result
end
def trello_client
@client ||= Trello::Client.new(
developer_public_key: config.trello_public_key,
member_token: config.trello_member_token
)
end
Lita.register_handler(self)
end
end
end
|
module ActiveScaffold
module Localization
mattr_reader :lang
def self.lang=(value)
@@lang = standardize_name(value)
end
@@l10s = { 'en-us' => {} }
@@lang = 'en-us'
def self._(string_to_localize, *args)
sprintf translate(string_to_localize, *args)
end
def self.translate(string_to_localize, *args)
if @@l10s[@@lang].nil? or @@l10s[@@lang][string_to_localize].nil?
string_to_localize
else
@@l10s[@@lang][string_to_localize]
end
end
def self.define(lang = 'en-us')
lang = standardize_name(lang)
@@l10s[lang] ||= {}
yield @@l10s[lang]
end
def self.standardize_name(value)
tmp = value.split("-") if value["-"]
tmp = value.split("_") if value["_"]
tmp[0].downcase + "-" + tmp[1].downcase
end
end
end
class Object
def _(*args)
ActiveScaffold::Localization._(*args)
end
end
Fixed sprintf in Localization.
git-svn-id: b2ff2946fad280430023ee7ead77858f96a610fa@305 561dde7e-7729-0410-be8e-ef83869d6c7d
module ActiveScaffold
module Localization
mattr_reader :lang
def self.lang=(value)
@@lang = standardize_name(value)
end
@@l10s = { 'en-us' => {} }
@@lang = 'en-us'
def self._(string_to_localize, *args)
sprintf translate(string_to_localize), *args
end
def self.translate(string_to_localize)
if @@l10s[@@lang].nil? or @@l10s[@@lang][string_to_localize].nil?
string_to_localize
else
@@l10s[@@lang][string_to_localize]
end
end
def self.define(lang = 'en-us')
lang = standardize_name(lang)
@@l10s[lang] ||= {}
yield @@l10s[lang]
end
def self.standardize_name(value)
tmp = value.split("-") if value["-"]
tmp = value.split("_") if value["_"]
tmp[0].downcase + "-" + tmp[1].downcase
end
end
end
class Object
def _(*args)
ActiveScaffold::Localization._(*args)
end
end
|
# transferred -> Impuestos trasladado
# detained -> Impuestos retenidos
# tax -> Nombre del Impuesto
# rate -> Tasa
# import -> Importe
#
module MCFDI
class Taxes < Base
attr_accessor :transferred, :detained
def initialize
@transferred = []
@detained = []
end
# return total of all transferred taxes.
def total_transferred
return 0 unless @transferred.any?
@transferred.map(&:import).reduce(:+)
end
# return total of all detained taxes.
def total_detained
return 0 unless @detained.any?
@detained.map(&:import).reduce(:+)
end
# return count of all taxes.
def count
@transferred.count + @detained.count
end
def transferred=(tax)
if data.is_a? Array
data.map do |c|
c = Transferred.new(c) unless c.is_a? Tansferred
@transferred << c
end
elsif data.is_a? Hash
@transferred << Transferred.new(data)
elsif data.is_a? Transferred
@transferred << data
end
@transferred
end
def detained=(tax)
if data.is_a? Array
data.map do |c|
c << Detained.new(c) unless c.is_a? Detained
@detained << c
end
elsif data.is_a? Hash
@detained << Detained.new(data)
elsif data.is_a? Detained
@detained << data
end
@detained
end
# return original string of all transferred taxes.
def transferred_original_string
os = []
@transferred.each do |trans|
os += trans.original_string
end
os
end
# return original string of all detained taxes.
def detained_original_string
os = []
@detained.each do |detaind|
os += detaind.original_string
end
os
end
end
class Transferred
attr_accessor :tax, :rate, :import
def initialize(args = {})
args.each { |key, value| send("#{key}=", value) }
end
def rate=(rate)
@rate = format('%.2f', rate).to_f
end
def import=(import)
@import = format('%.2f', import).to_f
end
def original_string
[@tax, @rate, @import]
end
end
class Detained
attr_accessor :tax, :rate, :import
def initialize(args = {})
args.each { |key, value| send("#{key}=", value) }
end
def rate=(rate)
@rate = format('%.2f', rate).to_f
end
def import=(import)
@import = format('%.2f', import).to_f
end
def original_string
[@tax, @rate, @import]
end
end
end
fix errors
# transferred -> Impuestos trasladado
# detained -> Impuestos retenidos
# tax -> Nombre del Impuesto
# rate -> Tasa
# import -> Importe
#
module MCFDI
class Taxes < Base
attr_accessor :transferred, :detained
def initialize
@transferred = []
@detained = []
end
# return total of all transferred taxes.
def total_transferred
return 0 unless @transferred.any?
@transferred.map(&:import).reduce(:+)
end
# return total of all detained taxes.
def total_detained
return 0 unless @detained.any?
@detained.map(&:import).reduce(:+)
end
# return count of all taxes.
def count
@transferred.count + @detained.count
end
def transferred=(data)
if data.is_a? Array
data.map do |c|
c = Transferred.new(c) unless c.is_a? Tansferred
@transferred << c
end
elsif data.is_a? Hash
@transferred << Transferred.new(data)
elsif data.is_a? Transferred
@transferred << data
end
@transferred
end
def detained=(data)
if data.is_a? Array
data.map do |c|
c << Detained.new(c) unless c.is_a? Detained
@detained << c
end
elsif data.is_a? Hash
@detained << Detained.new(data)
elsif data.is_a? Detained
@detained << data
end
@detained
end
# return original string of all transferred taxes.
def transferred_original_string
os = []
@transferred.each do |trans|
os += trans.original_string
end
os
end
# return original string of all detained taxes.
def detained_original_string
os = []
@detained.each do |detaind|
os += detaind.original_string
end
os
end
end
class Transferred
attr_accessor :tax, :rate, :import
def initialize(args = {})
args.each { |key, value| send("#{key}=", value) }
end
def rate=(rate)
@rate = format('%.2f', rate).to_f
end
def import=(import)
@import = format('%.2f', import).to_f
end
def original_string
[@tax, @rate, @import]
end
end
class Detained
attr_accessor :tax, :rate, :import
def initialize(args = {})
args.each { |key, value| send("#{key}=", value) }
end
def rate=(rate)
@rate = format('%.2f', rate).to_f
end
def import=(import)
@import = format('%.2f', import).to_f
end
def original_string
[@tax, @rate, @import]
end
end
end
|
# frozen_string_literal: true
require "open3"
require "optparse"
require "rbconfig"
require "eventmachine"
require "thin"
module EventMachine
# Monkey patch fix for 10deb4
# See https://github.com/eventmachine/eventmachine/issues/569
def self.reactor_running?
(@reactor_running || false)
end
end
require "mail_catcher/version"
module MailCatcher extend self
autoload :Bus, "mail_catcher/bus"
autoload :Mail, "mail_catcher/mail"
autoload :Smtp, "mail_catcher/smtp"
autoload :Web, "mail_catcher/web"
def env
ENV.fetch("MAILCATCHER_ENV", "production")
end
def development?
env == "development"
end
def which?(command)
ENV["PATH"].split(File::PATH_SEPARATOR).any? do |directory|
File.executable?(File.join(directory, command.to_s))
end
end
def windows?
RbConfig::CONFIG["host_os"] =~ /mswin|mingw/
end
def browseable?
windows? or which? "open"
end
def browse url
if windows?
system "start", "/b", url
elsif which? "open"
system "open", url
end
end
def log_exception(message, context, exception)
gems_paths = (Gem.path | [Gem.default_dir]).map { |path| Regexp.escape(path) }
gems_regexp = %r{(?:#{gems_paths.join("|")})/gems/([^/]+)-([\w.]+)/(.*)}
gems_replace = '\1 (\2) \3'
puts "*** #{message}: #{context.inspect}"
puts " Exception: #{exception}"
puts " Backtrace:", *exception.backtrace.map { |line| " #{line.sub(gems_regexp, gems_replace)}" }
puts " Please submit this as an issue at https://github.com/sj26/mailcatcher/issues"
end
@@defaults = {
:smtp_ip => "127.0.0.1",
:smtp_port => "1025",
:http_ip => "127.0.0.1",
:http_port => "1080",
:http_path => "/",
:messages_limit => nil,
:verbose => false,
:daemon => !windows?,
:browse => false,
:quit => true,
}
def options
@@options
end
def quittable?
options[:quit]
end
def parse! arguments=ARGV, defaults=@defaults
@@defaults.dup.tap do |options|
OptionParser.new do |parser|
parser.banner = "Usage: mailcatcher [options]"
parser.version = VERSION
parser.separator ""
parser.separator "MailCatcher v#{VERSION}"
parser.separator ""
parser.on("--ip IP", "Set the ip address of both servers") do |ip|
options[:smtp_ip] = options[:http_ip] = ip
end
parser.on("--smtp-ip IP", "Set the ip address of the smtp server") do |ip|
options[:smtp_ip] = ip
end
parser.on("--smtp-port PORT", Integer, "Set the port of the smtp server") do |port|
options[:smtp_port] = port
end
parser.on("--http-ip IP", "Set the ip address of the http server") do |ip|
options[:http_ip] = ip
end
parser.on("--http-port PORT", Integer, "Set the port address of the http server") do |port|
options[:http_port] = port
end
parser.on("--messages-limit COUNT", Integer, "Only keep up to COUNT most recent messages") do |count|
options[:messages_limit] = count
end
parser.on("--http-path PATH", String, "Add a prefix to all HTTP paths") do |path|
clean_path = Rack::Utils.clean_path_info("/#{path}")
options[:http_path] = clean_path
end
parser.on("--no-quit", "Don't allow quitting the process") do
options[:quit] = false
end
unless windows?
parser.on("-f", "--foreground", "Run in the foreground") do
options[:daemon] = false
end
end
if browseable?
parser.on("-b", "--browse", "Open web browser") do
options[:browse] = true
end
end
parser.on("-v", "--verbose", "Be more verbose") do
options[:verbose] = true
end
parser.on_tail("-h", "--help", "Display this help information") do
puts parser
exit
end
parser.on_tail("--version", "Display the current version") do
puts "mailcatcher #{VERSION}"
exit
end
end.parse!
end
end
def run! options=nil
# If we are passed options, fill in the blanks
options &&= @@defaults.merge options
# Otherwise, parse them from ARGV
options ||= parse!
# Stash them away for later
@@options = options
# If we're running in the foreground sync the output.
unless options[:daemon]
$stdout.sync = $stderr.sync = true
end
puts "Starting MailCatcher v#{VERSION}"
Thin::Logging.debug = development?
Thin::Logging.silent = !development?
# One EventMachine loop...
EventMachine.run do
# Set up an SMTP server to run within EventMachine
rescue_port options[:smtp_port] do
EventMachine.start_server options[:smtp_ip], options[:smtp_port], Smtp
puts "==> #{smtp_url}"
end
# Let Thin set itself up inside our EventMachine loop
# (Skinny/WebSockets just works on the inside)
rescue_port options[:http_port] do
Thin::Server.start(options[:http_ip], options[:http_port], Web)
puts "==> #{http_url}"
end
# Open the web browser before detatching console
if options[:browse]
EventMachine.next_tick do
browse http_url
end
end
# Daemonize, if we should, but only after the servers have started.
if options[:daemon]
EventMachine.next_tick do
if quittable?
puts "*** MailCatcher runs as a daemon by default. Go to the web interface to quit."
else
puts "*** MailCatcher is now running as a daemon that cannot be quit."
end
Process.daemon
end
end
end
end
def quit!
EventMachine.next_tick { EventMachine.stop_event_loop }
end
protected
def smtp_url
"smtp://#{@@options[:smtp_ip]}:#{@@options[:smtp_port]}"
end
def http_url
"http://#{@@options[:http_ip]}:#{@@options[:http_port]}#{@@options[:http_path]}"
end
def rescue_port port
begin
yield
# XXX: EventMachine only spits out RuntimeError with a string description
rescue RuntimeError
if $!.to_s =~ /\bno acceptor\b/
puts "~~> ERROR: Something's using port #{port}. Are you already running MailCatcher?"
puts "==> #{smtp_url}"
puts "==> #{http_url}"
exit -1
else
raise
end
end
end
end
Make 00version output consistent
# frozen_string_literal: true
require "open3"
require "optparse"
require "rbconfig"
require "eventmachine"
require "thin"
module EventMachine
# Monkey patch fix for 10deb4
# See https://github.com/eventmachine/eventmachine/issues/569
def self.reactor_running?
(@reactor_running || false)
end
end
require "mail_catcher/version"
module MailCatcher extend self
autoload :Bus, "mail_catcher/bus"
autoload :Mail, "mail_catcher/mail"
autoload :Smtp, "mail_catcher/smtp"
autoload :Web, "mail_catcher/web"
def env
ENV.fetch("MAILCATCHER_ENV", "production")
end
def development?
env == "development"
end
def which?(command)
ENV["PATH"].split(File::PATH_SEPARATOR).any? do |directory|
File.executable?(File.join(directory, command.to_s))
end
end
def windows?
RbConfig::CONFIG["host_os"] =~ /mswin|mingw/
end
def browseable?
windows? or which? "open"
end
def browse url
if windows?
system "start", "/b", url
elsif which? "open"
system "open", url
end
end
def log_exception(message, context, exception)
gems_paths = (Gem.path | [Gem.default_dir]).map { |path| Regexp.escape(path) }
gems_regexp = %r{(?:#{gems_paths.join("|")})/gems/([^/]+)-([\w.]+)/(.*)}
gems_replace = '\1 (\2) \3'
puts "*** #{message}: #{context.inspect}"
puts " Exception: #{exception}"
puts " Backtrace:", *exception.backtrace.map { |line| " #{line.sub(gems_regexp, gems_replace)}" }
puts " Please submit this as an issue at https://github.com/sj26/mailcatcher/issues"
end
@@defaults = {
:smtp_ip => "127.0.0.1",
:smtp_port => "1025",
:http_ip => "127.0.0.1",
:http_port => "1080",
:http_path => "/",
:messages_limit => nil,
:verbose => false,
:daemon => !windows?,
:browse => false,
:quit => true,
}
def options
@@options
end
def quittable?
options[:quit]
end
def parse! arguments=ARGV, defaults=@defaults
@@defaults.dup.tap do |options|
OptionParser.new do |parser|
parser.banner = "Usage: mailcatcher [options]"
parser.version = VERSION
parser.separator ""
parser.separator "MailCatcher v#{VERSION}"
parser.separator ""
parser.on("--ip IP", "Set the ip address of both servers") do |ip|
options[:smtp_ip] = options[:http_ip] = ip
end
parser.on("--smtp-ip IP", "Set the ip address of the smtp server") do |ip|
options[:smtp_ip] = ip
end
parser.on("--smtp-port PORT", Integer, "Set the port of the smtp server") do |port|
options[:smtp_port] = port
end
parser.on("--http-ip IP", "Set the ip address of the http server") do |ip|
options[:http_ip] = ip
end
parser.on("--http-port PORT", Integer, "Set the port address of the http server") do |port|
options[:http_port] = port
end
parser.on("--messages-limit COUNT", Integer, "Only keep up to COUNT most recent messages") do |count|
options[:messages_limit] = count
end
parser.on("--http-path PATH", String, "Add a prefix to all HTTP paths") do |path|
clean_path = Rack::Utils.clean_path_info("/#{path}")
options[:http_path] = clean_path
end
parser.on("--no-quit", "Don't allow quitting the process") do
options[:quit] = false
end
unless windows?
parser.on("-f", "--foreground", "Run in the foreground") do
options[:daemon] = false
end
end
if browseable?
parser.on("-b", "--browse", "Open web browser") do
options[:browse] = true
end
end
parser.on("-v", "--verbose", "Be more verbose") do
options[:verbose] = true
end
parser.on_tail("-h", "--help", "Display this help information") do
puts parser
exit
end
parser.on_tail("--version", "Display the current version") do
puts "MailCatcher v#{VERSION}"
exit
end
end.parse!
end
end
def run! options=nil
# If we are passed options, fill in the blanks
options &&= @@defaults.merge options
# Otherwise, parse them from ARGV
options ||= parse!
# Stash them away for later
@@options = options
# If we're running in the foreground sync the output.
unless options[:daemon]
$stdout.sync = $stderr.sync = true
end
puts "Starting MailCatcher v#{VERSION}"
Thin::Logging.debug = development?
Thin::Logging.silent = !development?
# One EventMachine loop...
EventMachine.run do
# Set up an SMTP server to run within EventMachine
rescue_port options[:smtp_port] do
EventMachine.start_server options[:smtp_ip], options[:smtp_port], Smtp
puts "==> #{smtp_url}"
end
# Let Thin set itself up inside our EventMachine loop
# (Skinny/WebSockets just works on the inside)
rescue_port options[:http_port] do
Thin::Server.start(options[:http_ip], options[:http_port], Web)
puts "==> #{http_url}"
end
# Open the web browser before detatching console
if options[:browse]
EventMachine.next_tick do
browse http_url
end
end
# Daemonize, if we should, but only after the servers have started.
if options[:daemon]
EventMachine.next_tick do
if quittable?
puts "*** MailCatcher runs as a daemon by default. Go to the web interface to quit."
else
puts "*** MailCatcher is now running as a daemon that cannot be quit."
end
Process.daemon
end
end
end
end
def quit!
EventMachine.next_tick { EventMachine.stop_event_loop }
end
protected
def smtp_url
"smtp://#{@@options[:smtp_ip]}:#{@@options[:smtp_port]}"
end
def http_url
"http://#{@@options[:http_ip]}:#{@@options[:http_port]}#{@@options[:http_path]}"
end
def rescue_port port
begin
yield
# XXX: EventMachine only spits out RuntimeError with a string description
rescue RuntimeError
if $!.to_s =~ /\bno acceptor\b/
puts "~~> ERROR: Something's using port #{port}. Are you already running MailCatcher?"
puts "==> #{smtp_url}"
puts "==> #{http_url}"
exit -1
else
raise
end
end
end
end
|
# encoding: UTF-8
require "thor"
module Mango
class Runner < Thor
include Thor::Actions
add_runtime_options!
source_root File.join(File.dirname(__FILE__), "templates")
desc "create /path/to/your/app",
"Creates a new Mango application with a default directory structure and configuration at the path you specify."
def create(destination)
self.destination_root = destination
empty_directory(destination)
template("README.md", File.join(self.destination_root, "README.md"))
template("config.ru", File.join(self.destination_root, "config.ru"))
build_content_path
build_themes_path
end
###############################################################################################
protected
def build_content_path
content_root = File.join(self.destination_root, "content")
empty_directory(content_root)
template("content/index.md", File.join(content_root, "index.md"))
end
def build_themes_path
themes_root = File.join(self.destination_root, "themes")
empty_directory(themes_root)
default_root = File.join(themes_root, "default")
empty_directory(default_root)
build_public_path default_root
build_styles_path default_root
build_views_path default_root
end
###############################################################################################
protected
def build_public_path(destination)
public_root = File.join(destination, "public")
empty_directory(public_root)
create_file(File.join(public_root, "favicon.ico"))
template("themes/default/public/robots.txt", File.join(public_root, "robots.txt"))
build_public_images_path public_root
build_public_javascripts_path public_root
build_public_styles_path public_root
end
def build_public_images_path(destination)
public_images_root = File.join(destination, "images")
empty_directory(public_images_root)
template("themes/default/public/images/particles.gif", File.join(public_images_root, "particles.gif"))
end
def build_public_javascripts_path(destination)
public_javascripts_root = File.join(destination, "javascripts")
empty_directory(public_javascripts_root)
template("themes/default/public/javascripts/fireworks.js", File.join(public_javascripts_root, "fireworks.js"))
template("themes/default/public/javascripts/timer.js", File.join(public_javascripts_root, "timer.js"))
end
def build_public_styles_path(destination)
public_styles_root = File.join(destination, "styles")
empty_directory(public_javascripts_root)
template("themes/default/public/styles/fireworks.css", File.join(public_styles_root, "fireworks.css"))
template("themes/default/public/styles/reset.css", File.join(public_styles_root, "reset.css"))
end
def build_styles_path(destination)
styles_root = File.join(destination, "styles")
empty_directory(styles_root)
template("themes/default/styles/screen.sass", File.join(styles_root, "screen.sass"))
end
def build_views_path(destination)
views_root = File.join(destination, "views")
empty_directory(views_root)
template("themes/default/views/404.haml", File.join(views_root, "404.haml"))
template("themes/default/views/layout.haml", File.join(views_root, "layout.haml"))
template("themes/default/views/page.haml", File.join(views_root, "page.haml"))
end
end
end
lib/mango/runner.rb
* Fixed bug
# encoding: UTF-8
require "thor"
module Mango
class Runner < Thor
include Thor::Actions
add_runtime_options!
source_root File.join(File.dirname(__FILE__), "templates")
desc "create /path/to/your/app",
"Creates a new Mango application with a default directory structure and configuration at the path you specify."
def create(destination)
self.destination_root = destination
empty_directory(destination)
template("README.md", File.join(self.destination_root, "README.md"))
template("config.ru", File.join(self.destination_root, "config.ru"))
build_content_path
build_themes_path
end
###############################################################################################
protected
def build_content_path
content_root = File.join(self.destination_root, "content")
empty_directory(content_root)
template("content/index.md", File.join(content_root, "index.md"))
end
def build_themes_path
themes_root = File.join(self.destination_root, "themes")
empty_directory(themes_root)
default_root = File.join(themes_root, "default")
empty_directory(default_root)
build_public_path default_root
build_styles_path default_root
build_views_path default_root
end
###############################################################################################
protected
def build_public_path(destination)
public_root = File.join(destination, "public")
empty_directory(public_root)
create_file(File.join(public_root, "favicon.ico"))
template("themes/default/public/robots.txt", File.join(public_root, "robots.txt"))
build_public_images_path public_root
build_public_javascripts_path public_root
build_public_styles_path public_root
end
def build_public_images_path(destination)
public_images_root = File.join(destination, "images")
empty_directory(public_images_root)
template("themes/default/public/images/particles.gif", File.join(public_images_root, "particles.gif"))
end
def build_public_javascripts_path(destination)
public_javascripts_root = File.join(destination, "javascripts")
empty_directory(public_javascripts_root)
template("themes/default/public/javascripts/fireworks.js", File.join(public_javascripts_root, "fireworks.js"))
template("themes/default/public/javascripts/timer.js", File.join(public_javascripts_root, "timer.js"))
end
def build_public_styles_path(destination)
public_styles_root = File.join(destination, "styles")
empty_directory(public_styles_root)
template("themes/default/public/styles/fireworks.css", File.join(public_styles_root, "fireworks.css"))
template("themes/default/public/styles/reset.css", File.join(public_styles_root, "reset.css"))
end
def build_styles_path(destination)
styles_root = File.join(destination, "styles")
empty_directory(styles_root)
template("themes/default/styles/screen.sass", File.join(styles_root, "screen.sass"))
end
def build_views_path(destination)
views_root = File.join(destination, "views")
empty_directory(views_root)
template("themes/default/views/404.haml", File.join(views_root, "404.haml"))
template("themes/default/views/layout.haml", File.join(views_root, "layout.haml"))
template("themes/default/views/page.haml", File.join(views_root, "page.haml"))
end
end
end
|
module Meem
VERSION = "1.0.0"
end
Bump version to 1.0.1
module Meem
VERSION = "1.0.1"
end
|
module MGit
VERSION = '0.2.5'
end
Bump version to 0.2.6
module MGit
VERSION = '0.2.6'
end
|
class Mill
VERSION = '0.0.6'
end
Bump version to 0.1.
class Mill
VERSION = '0.1'
end
|
# -*- encoding : utf-8 -*-
#A module to tell something to a pseudo next time the bot see it
linael :tell do
help [
"A module to tell something to somebody next time the bot see her",
" ",
"#####Function#####",
"!tell somebody : something => tell something to somebody"
]
on_init do
@tell_list = Hash.new()
end
#add a tell
on :cmd, :tell_add, /^!tell/ do |msg,options|
who_tell = options.who.gsub(":","")
@tell_list[who_tell] ||= []
@tell_list[who_tell] = @tell_list[who_tell] << [options.from_who,options.all.gsub(/^.*:/,"")]
answer(msg,"Oki doki! I'll tell this to #{who_tell} :)")
end
#tell if in tell_list
[:join,:nick].each do |type|
on type, "tell_on_#{type}" do |msg|
who = msg.who if type == :join
who = msg.newNick if type == :nick
if @tell_list.has_key?(who)
@tell_list[who].each do |message|
talk(who,"Hey! #{message[0]} told me this: #{message[1]}")
end
@tell_list.delete(who)
end
end
end
end
moar downcase
# -*- encoding : utf-8 -*-
#A module to tell something to a pseudo next time the bot see it
linael :tell do
help [
"A module to tell something to somebody next time the bot see her",
" ",
"#####Function#####",
"!tell somebody : something => tell something to somebody"
]
on_init do
@tell_list = Hash.new()
end
#add a tell
on :cmd, :tell_add, /^!tell/ do |msg,options|
who_tell = options.who.downcase.gsub(":","")
@tell_list[who_tell] ||= []
@tell_list[who_tell] = @tell_list[who_tell] << [options.from_who,options.all.gsub(/^.*:/,"")]
answer(msg,"Oki doki! I'll tell this to #{who_tell} :)")
end
#tell if in tell_list
[:join,:nick,:msg].each do |type|
on type, "tell_on_#{type}" do |msg|
who = msg.who.downcase if type == :join
who = msg.newNick.downcase if type == :nick
who = msg.who.downcase if type == :msg
if @tell_list.has_key?(who)
@tell_list[who].each do |message|
talk(who,"Hey! #{message[0]} told me this: #{message[1]}")
end
@tell_list.delete(who)
end
end
end
end
|
module Moip2
class Client
include HTTParty
attr_reader :env, :auth, :uri
def initialize(env = :sandbox, auth = nil, opts = {})
@env, @auth, @opts = env.to_sym, auth, opts
@uri = get_base_uri
self.class.base_uri @uri
end
def sandbox?
env == :sandbox
end
def production?
env == :production
end
def opts
opts = @opts
opts[:headers] ||= {}
opts[:headers].merge!(
{
"Content-Type" => "application/json",
"Authorization" => auth.header
}
)
opts
end
def post(path, resource)
options = opts().merge(body: convert_hash_keys_to(:camel_case, resource).to_json)
resp = self.class.post path, options
create_response resp
end
def put(path, resource)
options = opts().merge(body: convert_hash_keys_to(:camel_case, resource).to_json)
resp = self.class.put path, options
create_response resp
end
def get(path)
resp = self.class.get path, opts()
create_response resp
end
private
def get_base_uri
return ENV["base_uri"] if ENV["base_uri"]
if production?
"https://api.moip.com.br"
else
"https://sandbox.moip.com.br"
end
end
def create_response(resp)
raise NotFoundError, "Resource not found" if resp.code == 404
Response.new resp, convert_hash_keys_to(:snake_case, resp.parsed_response)
end
def basic_auth
{ username: @auth[:token], password: @auth[:secret]}
end
def convert_hash_keys_to(conversion, value)
case value
when Array
value.map { |v| convert_hash_keys_to(conversion, v) }
when Hash
Hash[value.map { |k, v| [send(conversion, k).to_sym, convert_hash_keys_to(conversion, v)] }]
else
value
end
end
def camel_case(str)
return str.to_s if str.to_s !~ /_/ && str.to_s =~ /[A-Z]+.*/
words = str.to_s.split('_')
(words[0..0] << words[1..-1].map{|e| e.capitalize}).join
end
def snake_case(str)
str.gsub(/::/, '/').
gsub(/([A-Z]+)([A-Z][a-z])/,'\1_\2').
gsub(/([a-z\d])([A-Z])/,'\1_\2').
tr("-", "_").
downcase
end
end
end
HTTParty ssl_version to TLS v1.2
module Moip2
class Client
include HTTParty
ssl_version = :TLSv1_2
attr_reader :env, :auth, :uri
def initialize(env = :sandbox, auth = nil, opts = {})
@env, @auth, @opts = env.to_sym, auth, opts
@uri = get_base_uri
self.class.base_uri @uri
end
def sandbox?
env == :sandbox
end
def production?
env == :production
end
def opts
opts = @opts
opts[:headers] ||= {}
opts[:headers].merge!(
{
"Content-Type" => "application/json",
"Authorization" => auth.header
}
)
opts
end
def post(path, resource)
options = opts().merge(body: convert_hash_keys_to(:camel_case, resource).to_json)
resp = self.class.post path, options
create_response resp
end
def put(path, resource)
options = opts().merge(body: convert_hash_keys_to(:camel_case, resource).to_json)
resp = self.class.put path, options
create_response resp
end
def get(path)
resp = self.class.get path, opts()
create_response resp
end
private
def get_base_uri
return ENV["base_uri"] if ENV["base_uri"]
if production?
"https://api.moip.com.br"
else
"https://sandbox.moip.com.br"
end
end
def create_response(resp)
raise NotFoundError, "Resource not found" if resp.code == 404
Response.new resp, convert_hash_keys_to(:snake_case, resp.parsed_response)
end
def basic_auth
{ username: @auth[:token], password: @auth[:secret]}
end
def convert_hash_keys_to(conversion, value)
case value
when Array
value.map { |v| convert_hash_keys_to(conversion, v) }
when Hash
Hash[value.map { |k, v| [send(conversion, k).to_sym, convert_hash_keys_to(conversion, v)] }]
else
value
end
end
def camel_case(str)
return str.to_s if str.to_s !~ /_/ && str.to_s =~ /[A-Z]+.*/
words = str.to_s.split('_')
(words[0..0] << words[1..-1].map{|e| e.capitalize}).join
end
def snake_case(str)
str.gsub(/::/, '/').
gsub(/([A-Z]+)([A-Z][a-z])/,'\1_\2').
gsub(/([a-z\d])([A-Z])/,'\1_\2').
tr("-", "_").
downcase
end
end
end
|
require 'active_support'
require 'mongoid'
require 'mongoid/paranoia'
require 'mongoid/versioning'
module Mongoid
module Undo
extend ActiveSupport::Concern
include Mongoid::Paranoia
include Mongoid::Versioning
include Mongoid::Callbacks
included do
field :action, type: Symbol, versioned: false
index deleted_at: 1
[:create, :update, :destroy].each do |action|
name = :"set_action_after_#{action}"
define_method name do
collection.find(atomic_selector).update('$set' => { action: action })
version = self.instance_variable_get(:@version)
reload
self.instance_variable_set :@version, version unless version.nil?
end
set_callback action, :after, name
end
after_find do
@version = read_attribute(:version)
end
define_model_callbacks :undo, :redo
end
def undo
run_callbacks __callee__ do
case action
when :create, :destroy
deleted_at.present? ? restore : delete
when :update
retrieve
end
end
end
alias_method :redo, :undo
def undoable?
case action
when :create, :destroy
true
when :update
read_attribute(:version).to_i > @version
end
end
alias_method :redoable?, :undoable?
private
def retrieve
update_attributes(versions.last.versioned_attributes.except('version', 'updated_at'))
end
end
end
Mongoid::Callbacks is now Mongoid::Interceptable.
require 'active_support'
require 'mongoid'
require 'mongoid/paranoia'
require 'mongoid/versioning'
module Mongoid
module Undo
extend ActiveSupport::Concern
include Mongoid::Paranoia
include Mongoid::Versioning
include Mongoid::Interceptable
included do
field :action, type: Symbol, versioned: false
index deleted_at: 1
[:create, :update, :destroy].each do |action|
name = :"set_action_after_#{action}"
define_method name do
collection.find(atomic_selector).update('$set' => { action: action })
version = self.instance_variable_get(:@version)
reload
self.instance_variable_set :@version, version unless version.nil?
end
set_callback action, :after, name
end
after_find do
@version = read_attribute(:version)
end
define_model_callbacks :undo, :redo
end
def undo
run_callbacks __callee__ do
case action
when :create, :destroy
deleted_at.present? ? restore : delete
when :update
retrieve
end
end
end
alias_method :redo, :undo
def undoable?
case action
when :create, :destroy
true
when :update
read_attribute(:version).to_i > @version
end
end
alias_method :redoable?, :undoable?
private
def retrieve
update_attributes(versions.last.versioned_attributes.except('version', 'updated_at'))
end
end
end
|
module MoSQL
class SchemaError < StandardError; end;
class Schema
include MoSQL::Logging
def to_array(lst)
array = []
lst.each do |ent|
if ent.is_a?(Hash) && ent[:source].is_a?(String) && ent[:type].is_a?(String)
# new configuration format
array << {
:source => ent.delete(:source),
:type => ent.delete(:type),
:name => ent.first.first,
}
elsif ent.is_a?(Hash) && ent.keys.length == 1 && ent.values.first.is_a?(String)
array << {
:source => ent.first.first,
:name => ent.first.first,
:type => ent.first.last
}
else
raise "Invalid ordered hash entry #{ent.inspect}"
end
end
array
end
def check_columns!(ns, spec)
seen = Set.new
spec[:columns].each do |col|
if seen.include?(col[:source])
raise "Duplicate source #{col[:source]} in column definition #{col[:name]} for #{ns}."
end
seen.add(col[:source])
end
end
def parse_spec(ns, spec)
out = spec.dup
out[:columns] = to_array(spec[:columns])
check_columns!(ns, out)
out
end
def initialize(map)
@map = {}
map.each do |dbname, db|
@map[dbname] ||= {}
db.each do |cname, spec|
@map[dbname][cname] = parse_spec("#{dbname}.#{cname}", spec)
end
end
end
def create_schema(db, clobber=false)
@map.values.map(&:values).flatten.each do |collection|
meta = collection[:meta]
log.info("Creating table '#{meta[:table]}'...")
db.send(clobber ? :create_table! : :create_table?, meta[:table]) do
collection[:columns].each do |col|
column col[:name], col[:type]
if col[:source].to_sym == :_id
primary_key [col[:name].to_sym]
end
end
if meta[:extra_props]
column '_extra_props', 'TEXT'
end
end
end
end
def find_ns(ns)
db, collection = ns.split(".")
schema = (@map[db] || {})[collection]
if schema.nil?
log.debug("No mapping for ns: #{ns}")
return nil
end
schema
end
def find_ns!(ns)
schema = find_ns(ns)
raise SchemaError.new("No mapping for namespace: #{ns}") if schema.nil?
schema
end
def fetch_and_delete_dotted(obj, dotted)
pieces = dotted.split(".")
breadcrumbs = []
while pieces.length > 1
key = pieces.shift
breadcrumbs << [obj, key]
obj = obj[key]
return nil unless obj.is_a?(Hash)
end
val = obj.delete(pieces.first)
breadcrumbs.reverse.each do |obj, key|
obj.delete(key) if obj[key].empty?
end
val
end
def transform(ns, obj, schema=nil)
schema ||= find_ns!(ns)
obj = obj.dup
row = []
schema[:columns].each do |col|
source = col[:source]
type = col[:type]
v = fetch_and_delete_dotted(obj, source)
case v
when BSON::Binary, BSON::ObjectId
v = v.to_s
end
row << v
end
if schema[:meta][:extra_props]
# Kludgily delete binary blobs from _extra_props -- they may
# contain invalid UTF-8, which to_json will not properly encode.
obj.each do |k,v|
obj.delete(k) if v.is_a?(BSON::Binary)
end
row << obj.to_json
end
log.debug { "Transformed: #{row.inspect}" }
row
end
def all_columns(schema)
cols = []
schema[:columns].each do |col|
cols << col[:name]
end
if schema[:meta][:extra_props]
cols << "_extra_props"
end
cols
end
def copy_data(db, ns, objs)
schema = find_ns!(ns)
db.synchronize do |pg|
sql = "COPY \"#{schema[:meta][:table]}\" " +
"(#{all_columns(schema).map {|c| "\"#{c}\""}.join(",")}) FROM STDIN"
pg.execute(sql)
objs.each do |o|
pg.put_copy_data(transform_to_copy(ns, o, schema) + "\n")
end
pg.put_copy_end
begin
pg.get_result.check
rescue PGError => e
db.send(:raise_error, e)
end
end
end
def quote_copy(val)
case val
when nil
"\\N"
when true
't'
when false
'f'
else
val.to_s.gsub(/([\\\t\n\r])/, '\\\\\\1')
end
end
def transform_to_copy(ns, row, schema=nil)
row.map { |c| quote_copy(c) }.join("\t")
end
def table_for_ns(ns)
find_ns!(ns)[:meta][:table]
end
def all_mongo_dbs
@map.keys
end
def collections_for_mongo_db(db)
(@map[db]||{}).keys
end
def primary_sql_key_for_ns(ns)
find_ns!(ns)[:columns].find {|c| c[:source] == '_id'}[:name]
end
end
end
Don't mutate the schema while converting it.
Among other reasons, this lets you use the same schema for multiple
DBs by using YAML references.
module MoSQL
class SchemaError < StandardError; end;
class Schema
include MoSQL::Logging
def to_array(lst)
array = []
lst.each do |ent|
if ent.is_a?(Hash) && ent[:source].is_a?(String) && ent[:type].is_a?(String)
# new configuration format
array << {
:source => ent.fetch(:source),
:type => ent.fetch(:type),
:name => ent.first.first,
}
elsif ent.is_a?(Hash) && ent.keys.length == 1 && ent.values.first.is_a?(String)
array << {
:source => ent.first.first,
:name => ent.first.first,
:type => ent.first.last
}
else
raise "Invalid ordered hash entry #{ent.inspect}"
end
end
array
end
def check_columns!(ns, spec)
seen = Set.new
spec[:columns].each do |col|
if seen.include?(col[:source])
raise "Duplicate source #{col[:source]} in column definition #{col[:name]} for #{ns}."
end
seen.add(col[:source])
end
end
def parse_spec(ns, spec)
out = spec.dup
out[:columns] = to_array(spec[:columns])
check_columns!(ns, out)
out
end
def initialize(map)
@map = {}
map.each do |dbname, db|
@map[dbname] ||= {}
db.each do |cname, spec|
@map[dbname][cname] = parse_spec("#{dbname}.#{cname}", spec)
end
end
end
def create_schema(db, clobber=false)
@map.values.map(&:values).flatten.each do |collection|
meta = collection[:meta]
log.info("Creating table '#{meta[:table]}'...")
db.send(clobber ? :create_table! : :create_table?, meta[:table]) do
collection[:columns].each do |col|
column col[:name], col[:type]
if col[:source].to_sym == :_id
primary_key [col[:name].to_sym]
end
end
if meta[:extra_props]
column '_extra_props', 'TEXT'
end
end
end
end
def find_ns(ns)
db, collection = ns.split(".")
schema = (@map[db] || {})[collection]
if schema.nil?
log.debug("No mapping for ns: #{ns}")
return nil
end
schema
end
def find_ns!(ns)
schema = find_ns(ns)
raise SchemaError.new("No mapping for namespace: #{ns}") if schema.nil?
schema
end
def fetch_and_delete_dotted(obj, dotted)
pieces = dotted.split(".")
breadcrumbs = []
while pieces.length > 1
key = pieces.shift
breadcrumbs << [obj, key]
obj = obj[key]
return nil unless obj.is_a?(Hash)
end
val = obj.delete(pieces.first)
breadcrumbs.reverse.each do |obj, key|
obj.delete(key) if obj[key].empty?
end
val
end
def transform(ns, obj, schema=nil)
schema ||= find_ns!(ns)
obj = obj.dup
row = []
schema[:columns].each do |col|
source = col[:source]
type = col[:type]
v = fetch_and_delete_dotted(obj, source)
case v
when BSON::Binary, BSON::ObjectId
v = v.to_s
end
row << v
end
if schema[:meta][:extra_props]
# Kludgily delete binary blobs from _extra_props -- they may
# contain invalid UTF-8, which to_json will not properly encode.
obj.each do |k,v|
obj.delete(k) if v.is_a?(BSON::Binary)
end
row << obj.to_json
end
log.debug { "Transformed: #{row.inspect}" }
row
end
def all_columns(schema)
cols = []
schema[:columns].each do |col|
cols << col[:name]
end
if schema[:meta][:extra_props]
cols << "_extra_props"
end
cols
end
def copy_data(db, ns, objs)
schema = find_ns!(ns)
db.synchronize do |pg|
sql = "COPY \"#{schema[:meta][:table]}\" " +
"(#{all_columns(schema).map {|c| "\"#{c}\""}.join(",")}) FROM STDIN"
pg.execute(sql)
objs.each do |o|
pg.put_copy_data(transform_to_copy(ns, o, schema) + "\n")
end
pg.put_copy_end
begin
pg.get_result.check
rescue PGError => e
db.send(:raise_error, e)
end
end
end
def quote_copy(val)
case val
when nil
"\\N"
when true
't'
when false
'f'
else
val.to_s.gsub(/([\\\t\n\r])/, '\\\\\\1')
end
end
def transform_to_copy(ns, row, schema=nil)
row.map { |c| quote_copy(c) }.join("\t")
end
def table_for_ns(ns)
find_ns!(ns)[:meta][:table]
end
def all_mongo_dbs
@map.keys
end
def collections_for_mongo_db(db)
(@map[db]||{}).keys
end
def primary_sql_key_for_ns(ns)
find_ns!(ns)[:columns].find {|c| c[:source] == '_id'}[:name]
end
end
end
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.