CombinedText stringlengths 4 3.42M |
|---|
# OpenNebula Puppet provider for onecluster
#
# License: APLv2
#
# Authors:
# Based upon initial work from Ken Barber
# Modified by Martin Alfke
#
# Copyright
# initial provider had no copyright
# Deutsche Post E-POST Development GmbH - 2014,2015
#
require 'rubygems'
require 'nokogiri'
Puppet::Type.type(:onecluster).provide(:cli) do
desc "onecluster provider"
has_command(:onecluster, "onecluster") do
environment :HOME => '/root', :ONE_AUTH => '/var/lib/one/.one/one_auth'
end
has_command(:onedatastore, "onedatastore") do
environment :HOME => '/root', :ONE_AUTH => '/var/lib/one/.one/one_auth'
end
has_command(:onehost, "onehost") do
environment :HOME => '/root', :ONE_AUTH => '/var/lib/one/.one/one_auth'
end
has_command(:onevnet, "onevnet") do
environment :HOME => '/root', :ONE_AUTH => '/var/lib/one/.one/one_auth'
end
mk_resource_methods
def create
onecluster('create', resource[:name])
self.debug "We have hosts: #{resource[:hosts]}"
self.debug "We have vnets: #{resource[:vnets]}"
resource[:hosts].each { |host|
self.debug "Adding host #{host} to cluster #{resource[:name]}"
onecluster('addhost', resource[:name], host)
}
resource[:vnets].each { |vnet|
self.debug "Adding vnet #{vnet} to cluster #{resource[:name]}"
onecluster('addvnet', resource[:name], vnet)
}
resource[:datastores].each { |datastore|
self.debug "Adding datastore #{datastore} to cluster #{resource[:name]}"
onecluster('adddatastore', resource[:name], datastore)
}
@property_hash[:ensure] = :present
end
def destroy
resource[:hosts].each do |host|
onecluster('delhost', resource[:name], host)
end
resource[:vnets].each do |vnet|
onecluster('delvnet', resource[:name], vnet)
end
resource[:datastores].each do |datastore|
onecluster('deldatastore', resource[:name], datastore)
end
onecluster('delete', resource[:name])
@property_hash.clear
end
def exists?
@property_hash[:ensure] == :present
end
def self.instances
clusters = Nokogiri::XML(onecluster('list', '-x')).root.xpath('/CLUSTER_POOL/CLUSTER')
clusters.collect do |cluster|
datastores = cluster.xpath('DATASTORES/ID').collect do |datastore|
Nokogiri::XML(onedatastore('show', datastore.text, '-x')).root.xpath('/DATASTORE/NAME').text
end
hosts = cluster.xpath('HOSTS/ID').collect do |host|
Nokogiri::XML(onehost('show', host.text, '-x')).root.xpath('/HOST/NAME').text
end
vnets = cluster.xpath('VNETS/ID').collect do |vnet|
Nokogiri::XML(onevnet('show', vnet.text, '-x')).root.xpath('VNET/NAME').text
end
new(
:name => cluster.xpath('./NAME').text,
:ensure => :present,
:datastores => datastores,
:hosts => hosts,
:vnets => vnets
)
end
end
def self.prefetch(resources)
clusters = instances
resources.keys.each do |name|
if provider = clusters.find{ |cluster| cluster.name == name }
resources[name].provider = provider
end
end
end
#setters
def hosts=(value)
hosts = @property_hash[:hosts] || []
(hosts - value).each do |host|
onecluster('delhost', resource[:name], host)
end
(value - hosts).each do |host|
onecluster('addhost', resource[:name], host)
end
end
def vnets=(value)
vnets = @property_hash[:vnets] || []
(vnets - value).each do |vnet|
onecluster('delvnet', resource[:name], vnet)
end
(value - vnets).each do |vnet|
onecluster('addvnet', resource[:name], vnet)
end
end
def datastores=(value)
datastores = @property_hash[:datastores] || []
(datastores - value).each do |datastore|
onecluster('deldatastore', resource[:name], datastore)
end
(value - datastores).each do |datastore|
onecluster('adddatastore', resource[:name], datastore)
end
end
end
typo in vnet collector
# OpenNebula Puppet provider for onecluster
#
# License: APLv2
#
# Authors:
# Based upon initial work from Ken Barber
# Modified by Martin Alfke
#
# Copyright
# initial provider had no copyright
# Deutsche Post E-POST Development GmbH - 2014,2015
#
require 'rubygems'
require 'nokogiri'
Puppet::Type.type(:onecluster).provide(:cli) do
desc "onecluster provider"
has_command(:onecluster, "onecluster") do
environment :HOME => '/root', :ONE_AUTH => '/var/lib/one/.one/one_auth'
end
has_command(:onedatastore, "onedatastore") do
environment :HOME => '/root', :ONE_AUTH => '/var/lib/one/.one/one_auth'
end
has_command(:onehost, "onehost") do
environment :HOME => '/root', :ONE_AUTH => '/var/lib/one/.one/one_auth'
end
has_command(:onevnet, "onevnet") do
environment :HOME => '/root', :ONE_AUTH => '/var/lib/one/.one/one_auth'
end
mk_resource_methods
def create
onecluster('create', resource[:name])
self.debug "We have hosts: #{resource[:hosts]}"
self.debug "We have vnets: #{resource[:vnets]}"
resource[:hosts].each { |host|
self.debug "Adding host #{host} to cluster #{resource[:name]}"
onecluster('addhost', resource[:name], host)
}
resource[:vnets].each { |vnet|
self.debug "Adding vnet #{vnet} to cluster #{resource[:name]}"
onecluster('addvnet', resource[:name], vnet)
}
resource[:datastores].each { |datastore|
self.debug "Adding datastore #{datastore} to cluster #{resource[:name]}"
onecluster('adddatastore', resource[:name], datastore)
}
@property_hash[:ensure] = :present
end
def destroy
resource[:hosts].each do |host|
onecluster('delhost', resource[:name], host)
end
resource[:vnets].each do |vnet|
onecluster('delvnet', resource[:name], vnet)
end
resource[:datastores].each do |datastore|
onecluster('deldatastore', resource[:name], datastore)
end
onecluster('delete', resource[:name])
@property_hash.clear
end
def exists?
@property_hash[:ensure] == :present
end
def self.instances
clusters = Nokogiri::XML(onecluster('list', '-x')).root.xpath('/CLUSTER_POOL/CLUSTER')
clusters.collect do |cluster|
datastores = cluster.xpath('DATASTORES/ID').collect do |datastore|
Nokogiri::XML(onedatastore('show', datastore.text, '-x')).root.xpath('/DATASTORE/NAME').text
end
hosts = cluster.xpath('HOSTS/ID').collect do |host|
Nokogiri::XML(onehost('show', host.text, '-x')).root.xpath('/HOST/NAME').text
end
vnets = cluster.xpath('VNETS/ID').collect do |vnet|
Nokogiri::XML(onevnet('show', vnet.text, '-x')).root.xpath('/VNET/NAME').text
end
new(
:name => cluster.xpath('./NAME').text,
:ensure => :present,
:datastores => datastores,
:hosts => hosts,
:vnets => vnets
)
end
end
def self.prefetch(resources)
clusters = instances
resources.keys.each do |name|
if provider = clusters.find{ |cluster| cluster.name == name }
resources[name].provider = provider
end
end
end
#setters
def hosts=(value)
hosts = @property_hash[:hosts] || []
(hosts - value).each do |host|
onecluster('delhost', resource[:name], host)
end
(value - hosts).each do |host|
onecluster('addhost', resource[:name], host)
end
end
def vnets=(value)
vnets = @property_hash[:vnets] || []
(vnets - value).each do |vnet|
onecluster('delvnet', resource[:name], vnet)
end
(value - vnets).each do |vnet|
onecluster('addvnet', resource[:name], vnet)
end
end
def datastores=(value)
datastores = @property_hash[:datastores] || []
(datastores - value).each do |datastore|
onecluster('deldatastore', resource[:name], datastore)
end
(value - datastores).each do |datastore|
onecluster('adddatastore', resource[:name], datastore)
end
end
end
|
require 'multi_json'
require 'jshintrb'
class ZendeskAppsTools::Package
class AppValidationError < StandardError
class << self
attr_accessor :key
end
end
attr_reader :manifest_path, :source_path
def initialize(dir)
@dir = File.expand_path(dir)
@source_path = File.join(@dir, 'app.js')
@manifest_path = File.join(@dir, 'manifest.json')
end
def files
Dir["#{@dir}/**/**"].select do |f|
file = f.sub("#{@dir}/", '')
File.file?(file) && file !~ %r[^tmp#{File::SEPARATOR}]
end
end
def validate
ZendeskAppsTools::Validations::Manifest.call(self) +
ZendeskAppsTools::Validations::Source.call(self)
end
def templates
@templates ||= begin
templates_dir = File.join(@dir, 'templates')
Dir["#{templates_dir}/*.hdbs"].inject({}) do |h, file|
str = File.read(file)
str.chomp!
h[File.basename(file, File.extname(file))] = str
h
end
end
end
def translations
@translations ||= begin
translation_dir = File.join(@dir, 'translations')
default_translations = MultiJson.load(File.read("#{translation_dir}/#{self.default_locale}.json"))
Dir["#{translation_dir}/*.json"].inject({}) do |h, tr|
locale = File.basename(tr, File.extname(tr))
locale_translations = if locale == self.default_locale
default_translations
else
default_translations.deep_merge(MultiJson.load(File.read(tr)))
end
h[locale] = locale_translations
h
end
end
end
def locales
translations.keys
end
def default_locale
manifest["default_locale"]
end
def translation(en)
translations[en]
end
def name
manifest["name"] || 'app'
end
def author
{
:name => manifest['author']['name'],
:email => manifest['author']['email']
}
end
def assets
@assets ||= begin
pwd = Dir.pwd
Dir.chdir(@dir)
assets = Dir["assets/**/*"]
Dir.chdir(pwd)
assets
end
end
def path_to(file)
File.join(@dir, file)
end
def manifest
@manifest ||= begin
begin
MultiJson.load( File.read(manifest_path) )
rescue Errno::ENOENT, Errno::EACCES, MultiJson::DecodeError
{}
end
end
end
end
remove vestigial Package::AppValidationError
require 'multi_json'
require 'jshintrb'
class ZendeskAppsTools::Package
attr_reader :manifest_path, :source_path
def initialize(dir)
@dir = File.expand_path(dir)
@source_path = File.join(@dir, 'app.js')
@manifest_path = File.join(@dir, 'manifest.json')
end
def files
Dir["#{@dir}/**/**"].select do |f|
file = f.sub("#{@dir}/", '')
File.file?(file) && file !~ %r[^tmp#{File::SEPARATOR}]
end
end
def validate
ZendeskAppsTools::Validations::Manifest.call(self) +
ZendeskAppsTools::Validations::Source.call(self)
end
def templates
@templates ||= begin
templates_dir = File.join(@dir, 'templates')
Dir["#{templates_dir}/*.hdbs"].inject({}) do |h, file|
str = File.read(file)
str.chomp!
h[File.basename(file, File.extname(file))] = str
h
end
end
end
def translations
@translations ||= begin
translation_dir = File.join(@dir, 'translations')
default_translations = MultiJson.load(File.read("#{translation_dir}/#{self.default_locale}.json"))
Dir["#{translation_dir}/*.json"].inject({}) do |h, tr|
locale = File.basename(tr, File.extname(tr))
locale_translations = if locale == self.default_locale
default_translations
else
default_translations.deep_merge(MultiJson.load(File.read(tr)))
end
h[locale] = locale_translations
h
end
end
end
def locales
translations.keys
end
def default_locale
manifest["default_locale"]
end
def translation(en)
translations[en]
end
def name
manifest["name"] || 'app'
end
def author
{
:name => manifest['author']['name'],
:email => manifest['author']['email']
}
end
def assets
@assets ||= begin
pwd = Dir.pwd
Dir.chdir(@dir)
assets = Dir["assets/**/*"]
Dir.chdir(pwd)
assets
end
end
def path_to(file)
File.join(@dir, file)
end
def manifest
@manifest ||= begin
begin
MultiJson.load( File.read(manifest_path) )
rescue Errno::ENOENT, Errno::EACCES, MultiJson::DecodeError
{}
end
end
end
end
|
module RailsBootstrapNavbar
VERSION = "2.0.1"
end
bump version
module RailsBootstrapNavbar
VERSION = '3.0.0'
end
|
module RailsSettings
class CachedSettings < Settings
after_update :rewrite_cache
after_create :rewrite_cache
def rewrite_cache
Rails.cache.write("settings:#{self.var}", self.value)
end
before_destroy { |record| Rails.cache.delete("settings:#{record.var}") }
def self.[](var_name)
Rails.cache.fetch("settings:#{var_name}") {
super(var_name)
}
end
end
end
Fix indent.
module RailsSettings
class CachedSettings < Settings
after_update :rewrite_cache
after_create :rewrite_cache
def rewrite_cache
Rails.cache.write("settings:#{self.var}", self.value)
end
before_destroy { |record| Rails.cache.delete("settings:#{record.var}") }
def self.[](var_name)
Rails.cache.fetch("settings:#{var_name}") {
super(var_name)
}
end
end
end
|
module RailsAdminClone
class ModelCloner
def initialize(original_model)
@original_model = original_model
end
def original_model
@original_model
end
def class_model
original_model.class
end
def default_clone
new_object = clone_object(original_model)
clone_recursively!(original_model, new_object)
new_object
end
def method_clone(method)
original_model.send(method)
end
protected
def class_with_strong_parameters?(klass)
defined?(ActiveModel::ForbiddenAttributesProtection) && klass.include?(ActiveModel::ForbiddenAttributesProtection)
end
def timestamp_columns
%w(created_at created_on updated_at updated_on)
end
def attributes_black_list_from_model(model)
[model.primary_key, model.inheritance_column] + timestamp_columns
end
def attributes_black_list_from_association(association)
model = association.class_name.constantize
attributes = attributes_black_list_from_model(model)
attributes + [association.try(:foreign_key), association.try(:type)]
end
def get_model_attributes_from(object)
object.attributes.select do |k,v|
!attributes_black_list_from_model(object.class).include?(k)
end
end
def get_association_attributes_from(object, association)
object.attributes.select do |k,v|
!attributes_black_list_from_association(association).include?(k)
end
end
def assign_attributes_for(object, attributes)
if class_with_strong_parameters?(object.class)
object.assign_attributes attributes
else
object.assign_attributes attributes, without_protection: true
end
end
# deep clone
def clone_recursively!(old_object, new_object)
new_object = clone_has_one old_object, new_object
new_object = clone_habtm old_object, new_object
new_object = clone_has_many old_object, new_object
new_object
end
# clone object without associations
def clone_object(old_object)
object = build_from(old_object)
assign_attributes_for(object, get_model_attributes_from(old_object))
object
end
# clone has_one associations
def clone_has_one(old_object, new_object)
old_object.class.reflect_on_all_associations(:has_one).each do |association|
if old_association = old_object.send(association.name)
new_object.send(:"build_#{association.name}").tap do |new_association|
assign_attributes_for(new_association, get_association_attributes_from(old_association, association))
new_association = clone_recursively!(old_association, new_association)
end
end
end
new_object
end
# clone has_many associations
def clone_has_many(old_object, new_object)
associations = old_object.class.reflect_on_all_associations(:has_many)
.select{|a| !a.options.keys.include?(:through)}
associations.each do |association|
old_object.send(association.name).each do |old_association|
new_object.send(association.name).build.tap do |new_association|
assign_attributes_for(new_association, get_association_attributes_from(old_association, association))
new_association = clone_recursively!(old_association, new_association)
end
end
end
new_object
end
# clone has_and_belongs_to_many associtations
def clone_habtm(old_object, new_object)
associations = old_object.class.reflect_on_all_associations.select do |a|
a.macro == :has_and_belongs_to_many || (a.macro == :has_many && a.options.keys.include?(:through))
end
associations.each do |association|
method_ids = "#{association.name.to_s.singularize.to_sym}_ids"
new_object.send(:"#{method_ids}=", old_object.send(method_ids))
end
new_object
end
def build_from(object)
object.class.new
end
end
end
Fix build has one for has_one :relation, through: :something
module RailsAdminClone
class ModelCloner
def initialize(original_model)
@original_model = original_model
end
def original_model
@original_model
end
def class_model
original_model.class
end
def default_clone
new_object = clone_object(original_model)
clone_recursively!(original_model, new_object)
new_object
end
def method_clone(method)
original_model.send(method)
end
protected
def class_with_strong_parameters?(klass)
defined?(ActiveModel::ForbiddenAttributesProtection) && klass.include?(ActiveModel::ForbiddenAttributesProtection)
end
def timestamp_columns
%w(created_at created_on updated_at updated_on)
end
def attributes_black_list_from_model(model)
[model.primary_key, model.inheritance_column] + timestamp_columns
end
def attributes_black_list_from_association(association)
model = association.class_name.constantize
attributes = attributes_black_list_from_model(model)
attributes + [association.try(:foreign_key), association.try(:type)]
end
def get_model_attributes_from(object)
object.attributes.select do |k,v|
!attributes_black_list_from_model(object.class).include?(k)
end
end
def get_association_attributes_from(object, association)
object.attributes.select do |k,v|
!attributes_black_list_from_association(association).include?(k)
end
end
def assign_attributes_for(object, attributes)
if class_with_strong_parameters?(object.class)
object.assign_attributes attributes
else
object.assign_attributes attributes, without_protection: true
end
end
# deep clone
def clone_recursively!(old_object, new_object)
new_object = clone_has_one old_object, new_object
new_object = clone_habtm old_object, new_object
new_object = clone_has_many old_object, new_object
new_object
end
# clone object without associations
def clone_object(old_object)
object = build_from(old_object)
assign_attributes_for(object, get_model_attributes_from(old_object))
object
end
# clone has_one associations
def clone_has_one(old_object, new_object)
old_object.class.reflect_on_all_associations(:has_one).each do |association|
old_association_model = old_object.send(association.name)
build_has_one(new_object, association, old_association_model) if build_has_one?(old_object, association)
end
new_object
end
def build_has_one?(object, association)
object.send(association.name) && association.options[:through].blank?
end
def build_has_one(new_object, association, old_association_model)
new_object.send(:"build_#{association.name}").tap do |new_association|
assign_attributes_for(new_association, get_association_attributes_from(old_association_model, association))
new_association = clone_recursively!(old_association_model, new_association)
end
end
# clone has_many associations
def clone_has_many(old_object, new_object)
associations = old_object.class.reflect_on_all_associations(:has_many)
.select{|a| !a.options.keys.include?(:through)}
associations.each do |association|
old_object.send(association.name).each do |old_association|
new_object.send(association.name).build.tap do |new_association|
assign_attributes_for(new_association, get_association_attributes_from(old_association, association))
new_association = clone_recursively!(old_association, new_association)
end
end
end
new_object
end
# clone has_and_belongs_to_many associtations
def clone_habtm(old_object, new_object)
associations = old_object.class.reflect_on_all_associations.select do |a|
a.macro == :has_and_belongs_to_many || (a.macro == :has_many && a.options.keys.include?(:through))
end
associations.each do |association|
method_ids = "#{association.name.to_s.singularize.to_sym}_ids"
new_object.send(:"#{method_ids}=", old_object.send(method_ids))
end
new_object
end
def build_from(object)
object.class.new
end
end
end
|
require 'singleton'
require 'rails_autoscale_agent/logger'
require 'rails_autoscale_agent/autoscale_api'
require 'rails_autoscale_agent/time_rounder'
require 'rails_autoscale_agent/registration'
# Reporter wakes up every minute to send metrics to the RailsAutoscale API
module RailsAutoscaleAgent
class Reporter
include Singleton
include Logger
def self.start(config, store)
if config.api_base_url
instance.start!(config, store) unless instance.running?
else
instance.logger.debug "Reporter not started: RAILS_AUTOSCALE_URL is not set"
end
end
def start!(config, store)
@running = true
Thread.new do
logger.tagged 'RailsAutoscale' do
register!(config)
loop do
sleep config.report_interval
begin
report!(config, store)
rescue => ex
# Exceptions in threads other than the main thread will fail silently
# https://ruby-doc.org/core-2.2.0/Thread.html#class-Thread-label-Exception+handling
logger.error "Reporter error: #{ex.inspect}"
logger.error ex.backtrace.join("\n")
end
end
end
end
end
def running?
@running
end
def report!(config, store)
report = store.pop_report
if report.measurements.any?
logger.info "Reporting queue times for #{report.measurements.size} requests"
params = report.to_params(config)
result = AutoscaleApi.new(config.api_base_url).report_metrics!(params, report.to_csv)
case result
when AutoscaleApi::SuccessResponse
logger.info "Reported successfully"
when AutoscaleApi::FailureResponse
logger.error "Reporter failed: #{result.failure_message}"
end
else
logger.debug "Reporter has nothing to report"
end
end
def register!(config)
params = Registration.new(config).to_params
result = AutoscaleApi.new(config.api_base_url).register_reporter!(params)
case result
when AutoscaleApi::SuccessResponse
config.report_interval = result.data['report_interval']
logger.info "Reporter starting, will report every #{config.report_interval} seconds"
when AutoscaleApi::FailureResponse
logger.error "Reporter failed to register: #{result.failure_message}"
end
end
end
end
Allow registration to update max_request_size
require 'singleton'
require 'rails_autoscale_agent/logger'
require 'rails_autoscale_agent/autoscale_api'
require 'rails_autoscale_agent/time_rounder'
require 'rails_autoscale_agent/registration'
# Reporter wakes up every minute to send metrics to the RailsAutoscale API
module RailsAutoscaleAgent
class Reporter
include Singleton
include Logger
def self.start(config, store)
if config.api_base_url
instance.start!(config, store) unless instance.running?
else
instance.logger.debug "Reporter not started: RAILS_AUTOSCALE_URL is not set"
end
end
def start!(config, store)
@running = true
Thread.new do
logger.tagged 'RailsAutoscale' do
register!(config)
loop do
sleep config.report_interval
begin
report!(config, store)
rescue => ex
# Exceptions in threads other than the main thread will fail silently
# https://ruby-doc.org/core-2.2.0/Thread.html#class-Thread-label-Exception+handling
logger.error "Reporter error: #{ex.inspect}"
logger.error ex.backtrace.join("\n")
end
end
end
end
end
def running?
@running
end
def report!(config, store)
report = store.pop_report
if report.measurements.any?
logger.info "Reporting queue times for #{report.measurements.size} requests"
params = report.to_params(config)
result = AutoscaleApi.new(config.api_base_url).report_metrics!(params, report.to_csv)
case result
when AutoscaleApi::SuccessResponse
logger.info "Reported successfully"
when AutoscaleApi::FailureResponse
logger.error "Reporter failed: #{result.failure_message}"
end
else
logger.debug "Reporter has nothing to report"
end
end
def register!(config)
params = Registration.new(config).to_params
result = AutoscaleApi.new(config.api_base_url).register_reporter!(params)
case result
when AutoscaleApi::SuccessResponse
config.report_interval = result.data['report_interval'] if result.data['report_interval']
config.max_request_size = result.data['max_request_size'] if result.data['max_request_size']
logger.info "Reporter starting, will report every #{config.report_interval} seconds"
when AutoscaleApi::FailureResponse
logger.error "Reporter failed to register: #{result.failure_message}"
end
end
end
end
|
# frozen_string_literal: true
require 'singleton'
require 'rails_autoscale_agent/logger'
require 'rails_autoscale_agent/autoscale_api'
require 'rails_autoscale_agent/time_rounder'
require 'rails_autoscale_agent/registration'
# Reporter wakes up every minute to send metrics to the RailsAutoscale API
module RailsAutoscaleAgent
class Reporter
include Singleton
include Logger
def self.start(config, store)
instance.start!(config, store) unless instance.started?
end
def start!(config, store)
@started = true
@worker_adapters = config.worker_adapters.select(&:enabled?)
if !config.api_base_url && !config.dev_mode?
logger.info "Reporter not started: #{config.addon_name}_URL is not set"
return
end
Thread.new do
loop do
register!(config, @worker_adapters) unless @registered
# Stagger reporting to spread out reports from many processes
multiplier = 1 - (rand / 4) # between 0.75 and 1.0
sleep config.report_interval * multiplier
@worker_adapters.map do |adapter|
report_exceptions(config) { adapter.collect!(store) }
end
report_exceptions(config) { report!(config, store) }
end
end
end
def started?
@started
end
private
def report!(config, store)
report = store.pop_report
if report.measurements.any?
logger.info "Reporting #{report.measurements.size} measurements"
params = report.to_params(config)
result = AutoscaleApi.new(config).report_metrics!(params, report.to_csv)
case result
when AutoscaleApi::SuccessResponse
logger.debug "Reported successfully"
when AutoscaleApi::FailureResponse
logger.error "Reporter failed: #{result.failure_message}"
end
else
logger.debug "Reporter has nothing to report"
end
end
def register!(config, worker_adapters)
params = Registration.new(config, worker_adapters).to_params
result = AutoscaleApi.new(config).register_reporter!(params)
case result
when AutoscaleApi::SuccessResponse
@registered = true
config.report_interval = result.data['report_interval'] if result.data['report_interval']
config.max_request_size = result.data['max_request_size'] if result.data['max_request_size']
worker_adapters_msg = worker_adapters.map { |a| a.class.name }.join(', ')
logger.info "Reporter starting, will report every #{config.report_interval} seconds or so. Worker adapters: [#{worker_adapters_msg}]"
when AutoscaleApi::FailureResponse
logger.error "Reporter failed to register: #{result.failure_message}"
end
end
def report_exceptions(config)
begin
yield
rescue => ex
# Exceptions in threads other than the main thread will fail silently
# https://ruby-doc.org/core-2.2.0/Thread.html#class-Thread-label-Exception+handling
logger.error "Reporter error: #{ex.inspect}"
AutoscaleApi.new(config).report_exception!(ex)
end
rescue => ex
# An exception was encountered while trying to report the original exception.
# Swallow the error so the reporter continues to report.
logger.error "Exception reporting error: #{ex.inspect}"
end
end
end
Only report worker metrics from web.1
The agent running on every web dyno is currently reporting the same redundant worker metrics. Since web.1 is always active, we can safely avoid this redundancy by only reporting worker metrics from web.1 (or dev.1 for testing purposes).
# frozen_string_literal: true
require 'singleton'
require 'rails_autoscale_agent/logger'
require 'rails_autoscale_agent/autoscale_api'
require 'rails_autoscale_agent/time_rounder'
require 'rails_autoscale_agent/registration'
# Reporter wakes up every minute to send metrics to the RailsAutoscale API
module RailsAutoscaleAgent
class Reporter
include Singleton
include Logger
def self.start(config, store)
instance.start!(config, store) unless instance.started?
end
def start!(config, store)
@started = true
@worker_adapters = config.worker_adapters.select(&:enabled?)
@dyno_num = config.dyno.to_s.split('.').last.to_i
if !config.api_base_url && !config.dev_mode?
logger.info "Reporter not started: #{config.addon_name}_URL is not set"
return
end
Thread.new do
loop do
register!(config, @worker_adapters) unless @registered
# Stagger reporting to spread out reports from many processes
multiplier = 1 - (rand / 4) # between 0.75 and 1.0
sleep config.report_interval * multiplier
# It's redundant to report worker metrics from every web dyno, so only report from web.1
if @dyno_num == 1
@worker_adapters.map do |adapter|
report_exceptions(config) { adapter.collect!(store) }
end
end
report_exceptions(config) { report!(config, store) }
end
end
end
def started?
@started
end
private
def report!(config, store)
report = store.pop_report
if report.measurements.any?
logger.info "Reporting #{report.measurements.size} measurements"
params = report.to_params(config)
result = AutoscaleApi.new(config).report_metrics!(params, report.to_csv)
case result
when AutoscaleApi::SuccessResponse
logger.debug "Reported successfully"
when AutoscaleApi::FailureResponse
logger.error "Reporter failed: #{result.failure_message}"
end
else
logger.debug "Reporter has nothing to report"
end
end
def register!(config, worker_adapters)
params = Registration.new(config, worker_adapters).to_params
result = AutoscaleApi.new(config).register_reporter!(params)
case result
when AutoscaleApi::SuccessResponse
@registered = true
config.report_interval = result.data['report_interval'] if result.data['report_interval']
config.max_request_size = result.data['max_request_size'] if result.data['max_request_size']
worker_adapters_msg = worker_adapters.map { |a| a.class.name }.join(', ')
logger.info "Reporter starting, will report every #{config.report_interval} seconds or so. Worker adapters: [#{worker_adapters_msg}]"
when AutoscaleApi::FailureResponse
logger.error "Reporter failed to register: #{result.failure_message}"
end
end
def report_exceptions(config)
begin
yield
rescue => ex
# Exceptions in threads other than the main thread will fail silently
# https://ruby-doc.org/core-2.2.0/Thread.html#class-Thread-label-Exception+handling
logger.error "Reporter error: #{ex.inspect}"
AutoscaleApi.new(config).report_exception!(ex)
end
rescue => ex
# An exception was encountered while trying to report the original exception.
# Swallow the error so the reporter continues to report.
logger.error "Exception reporting error: #{ex.inspect}"
end
end
end
|
name "pummelzacken"
description "Master role applied to pummelzacken"
default_attributes(
:networking => {
:interfaces => {
:internal_ipv4 => {
:interface => "em1",
:role => :internal,
:family => :inet,
:address => "10.0.0.20"
},
:external_ipv4 => {
:interface => "em2",
:role => :external,
:family => :inet,
:address => "128.40.45.204"
}
}
},
:postgresql => {
:settings => {
:defaults => {
:listen_addresses => "10.0.0.20",
:shared_buffers => "10GB",
:work_mem => "160MB",
:maintenance_work_mem => "10GB",
:random_page_cost => "1.5",
:effective_cache_size => "60GB"
}
}
},
:nominatim => {
:flatnode_file => "/ssd/nominatim/nodes.store",
:database => {
:cluster => "9.3/main",
:dbname => "nominatim",
:postgis => "2.1"
},
:fpm_pools => {
:www => {
:port => "8000",
:pm => "dynamic",
:max_children => "60"
},
:bulk => {
:port => "8001",
:pm => "static",
:max_children => "10"
}
},
:redirects => {
:reverse => "poldi.openstreetmap.org"
}
}
)
run_list(
"role[ucl-wolfson]",
"role[nominatim-master]"
)
remove redirect of reverse to poldi
name "pummelzacken"
description "Master role applied to pummelzacken"
default_attributes(
:networking => {
:interfaces => {
:internal_ipv4 => {
:interface => "em1",
:role => :internal,
:family => :inet,
:address => "10.0.0.20"
},
:external_ipv4 => {
:interface => "em2",
:role => :external,
:family => :inet,
:address => "128.40.45.204"
}
}
},
:postgresql => {
:settings => {
:defaults => {
:listen_addresses => "10.0.0.20",
:shared_buffers => "10GB",
:work_mem => "160MB",
:maintenance_work_mem => "10GB",
:random_page_cost => "1.5",
:effective_cache_size => "60GB"
}
}
},
:nominatim => {
:flatnode_file => "/ssd/nominatim/nodes.store",
:database => {
:cluster => "9.3/main",
:dbname => "nominatim",
:postgis => "2.1"
},
:fpm_pools => {
:www => {
:port => "8000",
:pm => "dynamic",
:max_children => "60"
},
:bulk => {
:port => "8001",
:pm => "static",
:max_children => "10"
}
},
:redirects => {
}
}
)
run_list(
"role[ucl-wolfson]",
"role[nominatim-master]"
)
|
require 'test_helper'
class TicketsTest < ActiveSupport::TestCase
include Rack::Test::Methods
include TestHelpers::AuthHelper
include TestHelpers::JsonHelper
def app
Rails.application
end
def setup
@auth_token = get_auth_token()
end
# --------------------------------------------------------------------------- #
# --- Endpoint testing for:
# ------- /helpdesk/tickets.json
# ------- GET
# --------------------------------------------------------------------------- #
# POST tests
# GET tests
# Test GET for all tickets
def test_get_heldpesk_tickets
get "/api/helpdesk/ticket.json?filter=#all?auth_token=#{@auth_token}"
actual_ticket = JSON.parse(last_response.body)[0]
puts "actual_tickets: #{actual_ticket}"
end
# Test GET for a ticket with and id
def test_get_heldpesk_ticket_with_id
id = 0;
get "/api/helpdesk/ticket/#{id}.json?auth_token=#{@auth_token}"
actual_ticket = JSON.parse(last_response.body)[0]
puts "actual_tickets: #{actual_ticket}"
end
# Test POST for a ticket with and id
def test_post_heldpesk_ticket
auth_token = auth_token_for(Project.first.user)
project_id = Project.first.id
description = "jake renzella's comment :)"
post "/api/helpdesk/ticket.json?project_id=#{project_id}?description=#{description}?auth_token=#{auth_token}"
actual_ticket = JSON.parse(last_response.body)[0]
puts "actual_tickets: #{actual_ticket}"
end
end
QUALITY: Change auth_token to use isntance variable
require 'test_helper'
class TicketsTest < ActiveSupport::TestCase
include Rack::Test::Methods
include TestHelpers::AuthHelper
include TestHelpers::JsonHelper
def app
Rails.application
end
def setup
@auth_token = get_auth_token()
end
# --------------------------------------------------------------------------- #
# --- Endpoint testing for:
# ------- /helpdesk/tickets.json
# ------- GET
# --------------------------------------------------------------------------- #
# POST tests
# GET tests
# Test GET for all tickets
def test_get_heldpesk_tickets
get "/api/helpdesk/ticket.json?filter=#all?auth_token=#{@auth_token}"
actual_ticket = JSON.parse(last_response.body)[0]
puts "actual_tickets: #{actual_ticket}"
end
# Test GET for a ticket with and id
def test_get_heldpesk_ticket_with_id
id = 0;
get "/api/helpdesk/ticket/#{id}.json?auth_token=#{@auth_token}"
actual_ticket = JSON.parse(last_response.body)[0]
puts "actual_tickets: #{actual_ticket}"
end
# Test POST for a ticket with and id
def test_post_heldpesk_ticket
@auth_token = auth_token_for(Project.first.user)
project_id = Project.first.id
description = "jake renzella's comment :)"
post "/api/helpdesk/ticket.json?project_id=#{project_id}?description=#{description}?auth_token=#{@auth_token}"
actual_ticket = JSON.parse(last_response.body)[0]
puts "actual_tickets: #{actual_ticket}"
end
end
|
# encoding: utf-8
class Nanoc::DirectedGraphTest < MiniTest::Unit::TestCase
include Nanoc::TestHelpers
def test_direct_predecessors
graph = Nanoc::DirectedGraph.new([ 1, 2, 3 ])
graph.add_edge(1, 2)
graph.add_edge(2, 3)
assert_equal [], graph.direct_predecessors_of(1)
assert_equal [ 1 ], graph.direct_predecessors_of(2)
assert_equal [ 2 ], graph.direct_predecessors_of(3)
end
def test_predecessors
graph = Nanoc::DirectedGraph.new([ 1, 2, 3 ])
graph.add_edge(1, 2)
graph.add_edge(2, 3)
assert_equal [], graph.predecessors_of(1).sort
assert_equal [ 1 ], graph.predecessors_of(2).sort
assert_equal [ 1, 2 ], graph.predecessors_of(3).sort
end
def test_direct_successors
graph = Nanoc::DirectedGraph.new([ 1, 2, 3 ])
graph.add_edge(1, 2)
graph.add_edge(2, 3)
assert_equal [ 2 ], graph.direct_successors_of(1)
assert_equal [ 3 ], graph.direct_successors_of(2)
assert_equal [], graph.direct_successors_of(3)
end
def test_successors
graph = Nanoc::DirectedGraph.new([ 1, 2, 3 ])
graph.add_edge(1, 2)
graph.add_edge(2, 3)
assert_equal [ 2, 3 ], graph.successors_of(1).sort
assert_equal [ 3 ], graph.successors_of(2).sort
assert_equal [], graph.successors_of(3).sort
end
def test_edges
graph = Nanoc::DirectedGraph.new([ 1, 2, 3 ])
graph.add_edge(1, 2)
graph.add_edge(2, 3)
assert_equal [ [ 0, 1 ], [ 1, 2 ] ], graph.edges.sort
end
def test_edges_with_new_vertices
graph = Nanoc::DirectedGraph.new([ 1 ])
graph.add_edge(1, 2)
graph.add_edge(3, 2)
assert_equal [ [ 0, 1 ], [ 2, 1 ] ], graph.edges.sort
end
def test_add_edge
graph = Nanoc::DirectedGraph.new([ 1, 2, 3 ])
assert_equal [], graph.successors_of(1)
assert_equal [], graph.predecessors_of(2)
graph.add_edge(1, 2)
assert_equal [ 2 ], graph.successors_of(1)
assert_equal [ 1 ], graph.predecessors_of(2)
end
def test_add_edge_with_new_vertices
graph = Nanoc::DirectedGraph.new([ 1 ])
graph.add_edge(1, 2)
graph.add_edge(3, 2)
assert graph.vertices.include?(2)
assert graph.vertices.include?(3)
end
def test_remove_edge
graph = Nanoc::DirectedGraph.new([ 1, 2, 3 ])
graph.add_edge(1,2)
assert_equal [ 2 ], graph.successors_of(1)
assert_equal [ 1 ], graph.predecessors_of(2)
graph.remove_edge(1, 2)
assert_equal [], graph.successors_of(1)
assert_equal [], graph.predecessors_of(2)
end
def test_delete_edges_from
graph = Nanoc::DirectedGraph.new([ 1, 2, 3 ])
graph.add_edge(1, 2)
graph.add_edge(2, 1)
graph.add_edge(2, 3)
graph.add_edge(3, 2)
graph.add_edge(1, 3)
graph.add_edge(3, 1)
assert_equal [ 2, 3 ], graph.direct_predecessors_of(1).sort
assert_equal [ 2, 3 ], graph.direct_successors_of(1).sort
assert_equal [ 1, 3 ], graph.direct_predecessors_of(2).sort
assert_equal [ 1, 3 ], graph.direct_successors_of(2).sort
assert_equal [ 1, 2 ], graph.direct_predecessors_of(3).sort
assert_equal [ 1, 2 ], graph.direct_successors_of(3).sort
assert_equal Set.new([]), graph.roots
graph.delete_edges_from(1)
assert_equal [ 2, 3 ], graph.direct_predecessors_of(1).sort
assert_equal [ ], graph.direct_successors_of(1).sort
assert_equal [ 3 ], graph.direct_predecessors_of(2).sort
assert_equal [ 1, 3 ], graph.direct_successors_of(2).sort
assert_equal [ 2 ], graph.direct_predecessors_of(3).sort
assert_equal [ 1, 2 ], graph.direct_successors_of(3).sort
assert_equal Set.new([]), graph.roots
graph.delete_edges_from(2)
assert_equal [ 3 ], graph.direct_predecessors_of(1).sort
assert_equal [ ], graph.direct_successors_of(1).sort
assert_equal [ 3 ], graph.direct_predecessors_of(2).sort
assert_equal [ ], graph.direct_successors_of(2).sort
assert_equal [ ], graph.direct_predecessors_of(3).sort
assert_equal [ 1, 2 ], graph.direct_successors_of(3).sort
assert_equal Set.new([ 3 ]), graph.roots
end
def test_delete_edges_to
graph = Nanoc::DirectedGraph.new([ 1, 2, 3 ])
graph.add_edge(1, 2)
graph.add_edge(2, 1)
graph.add_edge(2, 3)
graph.add_edge(3, 2)
graph.add_edge(1, 3)
graph.add_edge(3, 1)
assert_equal [ 2, 3 ], graph.direct_predecessors_of(1).sort
assert_equal [ 2, 3 ], graph.direct_successors_of(1).sort
assert_equal [ 1, 3 ], graph.direct_predecessors_of(2).sort
assert_equal [ 1, 3 ], graph.direct_successors_of(2).sort
assert_equal [ 1, 2 ], graph.direct_predecessors_of(3).sort
assert_equal [ 1, 2 ], graph.direct_successors_of(3).sort
assert_equal Set.new([]), graph.roots
graph.delete_edges_to(1)
assert_equal [ ], graph.direct_predecessors_of(1).sort
assert_equal [ 2, 3 ], graph.direct_successors_of(1).sort
assert_equal [ 1, 3 ], graph.direct_predecessors_of(2).sort
assert_equal [ 3 ], graph.direct_successors_of(2).sort
assert_equal [ 1, 2 ], graph.direct_predecessors_of(3).sort
assert_equal [ 2 ], graph.direct_successors_of(3).sort
assert_equal Set.new([ 1 ]), graph.roots
graph.delete_edges_to(2)
assert_equal [ ], graph.direct_predecessors_of(1).sort
assert_equal [ 3 ], graph.direct_successors_of(1).sort
assert_equal [ ], graph.direct_predecessors_of(2).sort
assert_equal [ 3 ], graph.direct_successors_of(2).sort
assert_equal [ 1, 2 ], graph.direct_predecessors_of(3).sort
assert_equal [ ], graph.direct_successors_of(3).sort
assert_equal Set.new([ 1, 2 ]), graph.roots
end
def test_delete_vertex
graph = Nanoc::DirectedGraph.new([ 1, 2, 3 ])
graph.add_edge(1, 2)
graph.add_edge(2, 1)
graph.add_edge(2, 3)
graph.add_edge(3, 2)
graph.add_edge(1, 3)
graph.add_edge(3, 1)
graph.delete_vertex(2)
assert_equal [ 3 ], graph.direct_predecessors_of(1).sort
assert_equal [ 3 ], graph.direct_successors_of(1).sort
assert_equal [ 1 ], graph.direct_predecessors_of(3).sort
assert_equal [ 1 ], graph.direct_successors_of(3).sort
assert_equal Set.new([]), graph.roots
end
def test_delete_vertex_resulting_roots
graph = Nanoc::DirectedGraph.new([ 1, 2, 3 ])
assert_equal Set.new([ 1, 2, 3 ]), graph.roots
graph.add_edge(1, 2)
graph.add_edge(2, 3)
assert_equal Set.new([ 1 ]), graph.roots
graph.delete_vertex(2)
assert_equal Set.new([ 1, 3 ]), graph.roots
end
def test_should_return_empty_array_for_nonexistant_vertices
graph = Nanoc::DirectedGraph.new([ 1, 2, 3 ])
assert_equal [], graph.direct_predecessors_of(4)
assert_equal [], graph.predecessors_of(4)
assert_equal [], graph.direct_successors_of(4)
assert_equal [], graph.successors_of(4)
end
def test_roots_after_init
graph = Nanoc::DirectedGraph.new([ 1, 2, 3 ])
assert_equal Set.new([ 1, 2, 3 ]), graph.roots
end
def test_roots_after_adding_edge
graph = Nanoc::DirectedGraph.new([ 1, 2, 3 ])
graph.add_edge(1, 2)
assert_equal Set.new([ 1, 3 ]), graph.roots
graph = Nanoc::DirectedGraph.new([ 1, 2, 3 ])
graph.add_edge(1, 3)
assert_equal Set.new([ 1, 2 ]), graph.roots
graph = Nanoc::DirectedGraph.new([ 1, 2, 3 ])
graph.add_edge(2, 1)
assert_equal Set.new([ 2, 3 ]), graph.roots
graph = Nanoc::DirectedGraph.new([ 1, 2, 3 ])
graph.add_edge(1, 2)
graph.add_edge(2, 3)
assert_equal Set.new([ 1 ]), graph.roots
graph = Nanoc::DirectedGraph.new([ 1, 2, 3 ])
graph.add_edge(1, 2)
graph.add_edge(2, 3)
graph.add_edge(3, 1)
assert_equal Set.new([]), graph.roots
end
def test_roots_after_removing_edge
graph = Nanoc::DirectedGraph.new([ 1, 2, 3 ])
graph.add_edge(1, 2)
graph.remove_edge(1, 2)
assert_equal Set.new([ 1, 2, 3 ]), graph.roots
graph = Nanoc::DirectedGraph.new([ 1, 2, 3 ])
graph.add_edge(1, 3)
assert_equal Set.new([ 1, 2 ]), graph.roots
graph.remove_edge(1, 2) # no such edge
assert_equal Set.new([ 1, 2 ]), graph.roots
graph = Nanoc::DirectedGraph.new([ 1, 2, 3 ])
graph.add_edge(2, 1)
graph.remove_edge(2, 1)
assert_equal Set.new([ 1, 2, 3 ]), graph.roots
graph = Nanoc::DirectedGraph.new([ 1, 2, 3 ])
graph.add_edge(1, 2)
graph.add_edge(2, 3)
graph.remove_edge(1, 2)
assert_equal Set.new([ 1, 2 ]), graph.roots
graph.remove_edge(2, 3)
assert_equal Set.new([ 1, 2, 3 ]), graph.roots
graph = Nanoc::DirectedGraph.new([ 1, 2, 3 ])
graph.add_edge(1, 2)
graph.add_edge(2, 3)
graph.add_edge(3, 1)
graph.remove_edge(1, 2)
assert_equal Set.new([ 2 ]), graph.roots
graph.remove_edge(2, 3)
assert_equal Set.new([ 2, 3 ]), graph.roots
graph.remove_edge(3, 1)
assert_equal Set.new([ 1, 2, 3 ]), graph.roots
end
end
added example test for directed graph
# encoding: utf-8
class Nanoc::DirectedGraphTest < MiniTest::Unit::TestCase
include Nanoc::TestHelpers
def test_direct_predecessors
graph = Nanoc::DirectedGraph.new([ 1, 2, 3 ])
graph.add_edge(1, 2)
graph.add_edge(2, 3)
assert_equal [], graph.direct_predecessors_of(1)
assert_equal [ 1 ], graph.direct_predecessors_of(2)
assert_equal [ 2 ], graph.direct_predecessors_of(3)
end
def test_predecessors
graph = Nanoc::DirectedGraph.new([ 1, 2, 3 ])
graph.add_edge(1, 2)
graph.add_edge(2, 3)
assert_equal [], graph.predecessors_of(1).sort
assert_equal [ 1 ], graph.predecessors_of(2).sort
assert_equal [ 1, 2 ], graph.predecessors_of(3).sort
end
def test_direct_successors
graph = Nanoc::DirectedGraph.new([ 1, 2, 3 ])
graph.add_edge(1, 2)
graph.add_edge(2, 3)
assert_equal [ 2 ], graph.direct_successors_of(1)
assert_equal [ 3 ], graph.direct_successors_of(2)
assert_equal [], graph.direct_successors_of(3)
end
def test_successors
graph = Nanoc::DirectedGraph.new([ 1, 2, 3 ])
graph.add_edge(1, 2)
graph.add_edge(2, 3)
assert_equal [ 2, 3 ], graph.successors_of(1).sort
assert_equal [ 3 ], graph.successors_of(2).sort
assert_equal [], graph.successors_of(3).sort
end
def test_edges
graph = Nanoc::DirectedGraph.new([ 1, 2, 3 ])
graph.add_edge(1, 2)
graph.add_edge(2, 3)
assert_equal [ [ 0, 1 ], [ 1, 2 ] ], graph.edges.sort
end
def test_edges_with_new_vertices
graph = Nanoc::DirectedGraph.new([ 1 ])
graph.add_edge(1, 2)
graph.add_edge(3, 2)
assert_equal [ [ 0, 1 ], [ 2, 1 ] ], graph.edges.sort
end
def test_add_edge
graph = Nanoc::DirectedGraph.new([ 1, 2, 3 ])
assert_equal [], graph.successors_of(1)
assert_equal [], graph.predecessors_of(2)
graph.add_edge(1, 2)
assert_equal [ 2 ], graph.successors_of(1)
assert_equal [ 1 ], graph.predecessors_of(2)
end
def test_add_edge_with_new_vertices
graph = Nanoc::DirectedGraph.new([ 1 ])
graph.add_edge(1, 2)
graph.add_edge(3, 2)
assert graph.vertices.include?(2)
assert graph.vertices.include?(3)
end
def test_remove_edge
graph = Nanoc::DirectedGraph.new([ 1, 2, 3 ])
graph.add_edge(1,2)
assert_equal [ 2 ], graph.successors_of(1)
assert_equal [ 1 ], graph.predecessors_of(2)
graph.remove_edge(1, 2)
assert_equal [], graph.successors_of(1)
assert_equal [], graph.predecessors_of(2)
end
def test_delete_edges_from
graph = Nanoc::DirectedGraph.new([ 1, 2, 3 ])
graph.add_edge(1, 2)
graph.add_edge(2, 1)
graph.add_edge(2, 3)
graph.add_edge(3, 2)
graph.add_edge(1, 3)
graph.add_edge(3, 1)
assert_equal [ 2, 3 ], graph.direct_predecessors_of(1).sort
assert_equal [ 2, 3 ], graph.direct_successors_of(1).sort
assert_equal [ 1, 3 ], graph.direct_predecessors_of(2).sort
assert_equal [ 1, 3 ], graph.direct_successors_of(2).sort
assert_equal [ 1, 2 ], graph.direct_predecessors_of(3).sort
assert_equal [ 1, 2 ], graph.direct_successors_of(3).sort
assert_equal Set.new([]), graph.roots
graph.delete_edges_from(1)
assert_equal [ 2, 3 ], graph.direct_predecessors_of(1).sort
assert_equal [ ], graph.direct_successors_of(1).sort
assert_equal [ 3 ], graph.direct_predecessors_of(2).sort
assert_equal [ 1, 3 ], graph.direct_successors_of(2).sort
assert_equal [ 2 ], graph.direct_predecessors_of(3).sort
assert_equal [ 1, 2 ], graph.direct_successors_of(3).sort
assert_equal Set.new([]), graph.roots
graph.delete_edges_from(2)
assert_equal [ 3 ], graph.direct_predecessors_of(1).sort
assert_equal [ ], graph.direct_successors_of(1).sort
assert_equal [ 3 ], graph.direct_predecessors_of(2).sort
assert_equal [ ], graph.direct_successors_of(2).sort
assert_equal [ ], graph.direct_predecessors_of(3).sort
assert_equal [ 1, 2 ], graph.direct_successors_of(3).sort
assert_equal Set.new([ 3 ]), graph.roots
end
def test_delete_edges_to
graph = Nanoc::DirectedGraph.new([ 1, 2, 3 ])
graph.add_edge(1, 2)
graph.add_edge(2, 1)
graph.add_edge(2, 3)
graph.add_edge(3, 2)
graph.add_edge(1, 3)
graph.add_edge(3, 1)
assert_equal [ 2, 3 ], graph.direct_predecessors_of(1).sort
assert_equal [ 2, 3 ], graph.direct_successors_of(1).sort
assert_equal [ 1, 3 ], graph.direct_predecessors_of(2).sort
assert_equal [ 1, 3 ], graph.direct_successors_of(2).sort
assert_equal [ 1, 2 ], graph.direct_predecessors_of(3).sort
assert_equal [ 1, 2 ], graph.direct_successors_of(3).sort
assert_equal Set.new([]), graph.roots
graph.delete_edges_to(1)
assert_equal [ ], graph.direct_predecessors_of(1).sort
assert_equal [ 2, 3 ], graph.direct_successors_of(1).sort
assert_equal [ 1, 3 ], graph.direct_predecessors_of(2).sort
assert_equal [ 3 ], graph.direct_successors_of(2).sort
assert_equal [ 1, 2 ], graph.direct_predecessors_of(3).sort
assert_equal [ 2 ], graph.direct_successors_of(3).sort
assert_equal Set.new([ 1 ]), graph.roots
graph.delete_edges_to(2)
assert_equal [ ], graph.direct_predecessors_of(1).sort
assert_equal [ 3 ], graph.direct_successors_of(1).sort
assert_equal [ ], graph.direct_predecessors_of(2).sort
assert_equal [ 3 ], graph.direct_successors_of(2).sort
assert_equal [ 1, 2 ], graph.direct_predecessors_of(3).sort
assert_equal [ ], graph.direct_successors_of(3).sort
assert_equal Set.new([ 1, 2 ]), graph.roots
end
def test_delete_vertex
graph = Nanoc::DirectedGraph.new([ 1, 2, 3 ])
graph.add_edge(1, 2)
graph.add_edge(2, 1)
graph.add_edge(2, 3)
graph.add_edge(3, 2)
graph.add_edge(1, 3)
graph.add_edge(3, 1)
graph.delete_vertex(2)
assert_equal [ 3 ], graph.direct_predecessors_of(1).sort
assert_equal [ 3 ], graph.direct_successors_of(1).sort
assert_equal [ 1 ], graph.direct_predecessors_of(3).sort
assert_equal [ 1 ], graph.direct_successors_of(3).sort
assert_equal Set.new([]), graph.roots
end
def test_delete_vertex_resulting_roots
graph = Nanoc::DirectedGraph.new([ 1, 2, 3 ])
assert_equal Set.new([ 1, 2, 3 ]), graph.roots
graph.add_edge(1, 2)
graph.add_edge(2, 3)
assert_equal Set.new([ 1 ]), graph.roots
graph.delete_vertex(2)
assert_equal Set.new([ 1, 3 ]), graph.roots
end
def test_should_return_empty_array_for_nonexistant_vertices
graph = Nanoc::DirectedGraph.new([ 1, 2, 3 ])
assert_equal [], graph.direct_predecessors_of(4)
assert_equal [], graph.predecessors_of(4)
assert_equal [], graph.direct_successors_of(4)
assert_equal [], graph.successors_of(4)
end
def test_roots_after_init
graph = Nanoc::DirectedGraph.new([ 1, 2, 3 ])
assert_equal Set.new([ 1, 2, 3 ]), graph.roots
end
def test_roots_after_adding_edge
graph = Nanoc::DirectedGraph.new([ 1, 2, 3 ])
graph.add_edge(1, 2)
assert_equal Set.new([ 1, 3 ]), graph.roots
graph = Nanoc::DirectedGraph.new([ 1, 2, 3 ])
graph.add_edge(1, 3)
assert_equal Set.new([ 1, 2 ]), graph.roots
graph = Nanoc::DirectedGraph.new([ 1, 2, 3 ])
graph.add_edge(2, 1)
assert_equal Set.new([ 2, 3 ]), graph.roots
graph = Nanoc::DirectedGraph.new([ 1, 2, 3 ])
graph.add_edge(1, 2)
graph.add_edge(2, 3)
assert_equal Set.new([ 1 ]), graph.roots
graph = Nanoc::DirectedGraph.new([ 1, 2, 3 ])
graph.add_edge(1, 2)
graph.add_edge(2, 3)
graph.add_edge(3, 1)
assert_equal Set.new([]), graph.roots
end
def test_roots_after_removing_edge
graph = Nanoc::DirectedGraph.new([ 1, 2, 3 ])
graph.add_edge(1, 2)
graph.remove_edge(1, 2)
assert_equal Set.new([ 1, 2, 3 ]), graph.roots
graph = Nanoc::DirectedGraph.new([ 1, 2, 3 ])
graph.add_edge(1, 3)
assert_equal Set.new([ 1, 2 ]), graph.roots
graph.remove_edge(1, 2) # no such edge
assert_equal Set.new([ 1, 2 ]), graph.roots
graph = Nanoc::DirectedGraph.new([ 1, 2, 3 ])
graph.add_edge(2, 1)
graph.remove_edge(2, 1)
assert_equal Set.new([ 1, 2, 3 ]), graph.roots
graph = Nanoc::DirectedGraph.new([ 1, 2, 3 ])
graph.add_edge(1, 2)
graph.add_edge(2, 3)
graph.remove_edge(1, 2)
assert_equal Set.new([ 1, 2 ]), graph.roots
graph.remove_edge(2, 3)
assert_equal Set.new([ 1, 2, 3 ]), graph.roots
graph = Nanoc::DirectedGraph.new([ 1, 2, 3 ])
graph.add_edge(1, 2)
graph.add_edge(2, 3)
graph.add_edge(3, 1)
graph.remove_edge(1, 2)
assert_equal Set.new([ 2 ]), graph.roots
graph.remove_edge(2, 3)
assert_equal Set.new([ 2, 3 ]), graph.roots
graph.remove_edge(3, 1)
assert_equal Set.new([ 1, 2, 3 ]), graph.roots
end
def test_example
YARD.parse('../lib/nanoc/base/directed_graph.rb')
assert_examples_correct 'Nanoc::DirectedGraph'
end
end
|
# Copyright (c) 2013 Chananya Freiman (aka GhostWolf)
# Used to generate all the permutations of vector fields of length 2-4 (xyzw, rgba, stpq)
# E.g., xx xy xz xw yx yy yz yw ...
def permutations(v)
perms = []
(2..4).each { |k|
perms += v.repeated_permutation(k).to_a().map! { |v| v.join("") }
}
return perms
end
# Rename language keywords and types using #defines where it will reduce the overall size
def add_defines(data, language_words)
defines = []
language_words.each { |v|
uses = data.scan(/\b#{v[0]}\b/).length
usage = uses * v[0].length
if usage > 0
define = "#define #{v[1]} #{v[0]}\n"
define_usage = define.length + uses * v[1].length
if define_usage < usage
defines.push("#define #{v[1]} #{v[0]}")
data.gsub!(/\b#{v[0]}\b/, v[1])
end
end
}
return "\n" + defines.join("\n") + "\n" + data
end
def preprocess_defines(defines)
rewrites = {}
# Get inline values for simple number equation #defines
# E.g.: 5 => 5, 5*2 => 10
defines.each { |v|
begin
n = eval(v[2]).to_s()
rewrites[v[1]] = n
v[3] = n
rescue
end
}
# Get inline values for #define equations that have the previously inlined #defines in their values
# E.g.: N/2 => 5/2 => 2.5, assuming N was inlined as 5
defines.each { |v|
if not v[3]
begin
s = v[2]
rewrites.each { |k, n|
s.gsub!(/\b#{k}\b/, n)
}
n = eval(s).to_s()
v[3] = n
rescue
end
end
}
end
def inline_defines(defines, data)
# First pass removes the inlined #define lines
defines.each { |v|
if v[3]
data.sub!(v[0], "")
end
}
# Second pass inlines the values
defines.each { |v|
if v[3]
data.gsub!(/\b#{v[1]}\b/, v[3])
end
}
end
def rewrite_numbers(data)
# Convert hexadecimal numbers to decimal numbers
data.gsub!(/0x[0-9a-fA-F]+/) { |n|
n.to_i(16)
}
# Remove useless zeroes
data.gsub!(/\b\d*\.?\d+\b/) { |n|
if n["."]
n.to_f().to_s()
else
n.to_i().to_s()
end
}
# Remove useless zeroes
data.gsub!(/[0]+(\.\d+)/, "\\1")
data.gsub!(/(\d+\.)[0]+/, "\\1")
# Change integers to exponent representation if it's shorter
data.gsub!(/(\d+?)(0+)/) {
n = $1
e = $2
if e.size > 2
"#{n}e#{(e.size - 1)}"
else
n + e
end
}
end
# Removes comments
def remove_comments(source)
source.gsub!(/\/\/.*/, "")
source.gsub!(/\/\*.*?\*\//m, "")
end
def parse_structs(data, datatypes)
data.split(/(struct\s+(?:\w+)\s*{.*?}\s*;)/m)
end
def rename_struct_members(chunk, map, datatypes)
map.each { |v|
chunk.sub!(/\b#{v[0]}\b/, map[v[0]])
}
end
# Rename the members of all structs
def rename_members(data, map, datatypes)
source = ""
parse_structs(data, datatypes).each_with_index { |chunk, i|
if i % 2 != 0
# Rename the members
rename_struct_members(chunk, map, datatypes)
tokens = chunk.split(/(struct \w+{)(.*?)(};)/m)
source += tokens[1]
# All global variables in global scope can be combined, unless they exist in #ifdefs
outer = tokens[2].gsub(/(#if.*?#endif)/m, "")
source += group_list(outer.scan(/()(#{datatypes})\s+(\w+)(.*?);/))
tokens[2].split(/(#ifdef.*?#endif)/m).each { |chunk|
# Do the same thing inside #ifdefs
if chunk.start_with?("#if")
tokens = chunk.split(/(#ifdef.*?\n)(.*?)(#endif)/m)
source += tokens[1]
source += group_list(tokens[2].scan(/()(#{datatypes})\s+(\w+)(.*?);/))
source += tokens[2].gsub(/(#{datatypes})\s+(\w+)(.*?);/, "")
source += tokens[3]
source += "\n"
else
source += chunk.gsub(/(#{datatypes})\s+(\w+)(.*?);/, "")
end
}
source += "};"
else
source += chunk
end
}
map.each { |v|
source.gsub!(/\.\b#{v[0]}\b/, ".#{map[v[0]]}")
}
return source
end
def get_member_names(data, datatypes)
data.scan(/(struct\s+(?:\w+)\s*{.*?}\s*;)/m).collect { |struct|
struct[0].scan(/(?:#{datatypes})\s+(\w+)\s*;/)
}.flatten(1)
end
# Split the source into function/other chunks
def parse_functions(source, datatypes)
pass = source.split(/((?:#{datatypes})\s+\w+\s*\(.*?\))/)
chunks = [[pass[0]]]
(1...pass.size).step(2) { |i|
head = pass[i].split(/\(/)
body_with_extra = pass[i + 1]
start = body_with_extra.index("{")
index = start + 1
level = 1
while level > 0 and index < body_with_extra.length do
char = body_with_extra[index]
if char == "}"
level -= 1
elsif char == "{"
level += 1
end
index += 1
end
body = body_with_extra[0...index]
extra = body_with_extra[index..body_with_extra.size]
chunks += [[head[0], "(" + head[1], body], [extra]]
}
return chunks
end
# Rename function arguments and local variables.
def rename_function_locals(data, datatypes)
names = [*("a".."z"), *("aa".."zz")]
arguments = []
locals = []
# Grab all the argument names
data[1].scan(/(?:in\s+|out\s+)?(?:#{datatypes})\s+(\w+)/).each { |argument|
arguments += argument
}
# Short names must always come before longer names
arguments.sort!()
data[2].scan(/(#{datatypes}) (.*?);/m).each { |local_list|
local_list[1].split("=")[0].split(",").each { |local|
locals += [local.strip()]
}
}
# Short names must always come before longer names
locals.sort!()
# Rename function arguments
arguments.each { |argument|
name = names.shift()
reg = /\b#{argument}\b/
data[1].sub!(reg, name)
data[2].gsub!(reg, name)
}
# Rename function locals
locals.each { |local|
data[2].gsub!(/\b#{local.strip()}\b/, names.shift())
}
end
# Removes useless whitespace from the source
def remove_whitespace(oldsource)
need_newline = false
source = ""
oldsource = oldsource.sub(/^\n+/, "")
oldsource.each_line { |line|
line = line.strip().gsub(/\s{2,}|\t/, " ")
if line[0] == "#"
if need_newline
source += "\n"
end
source += line + "\n"
need_newline = false
else
source += line.sub("\n", "").gsub(/\s*({|}|=|\*|,|\+|\/|>|<|&|\||\[|\]|\(|\)|\-|!|;)\s*/, "\\1")
need_newline = true
end
}
return source.gsub(/\n+/, "\n")
end
def get_used_functions_in_function(used_functions, main_function, function_chunks)
function_chunks.each { |f|
match = main_function[2][/\b#{f[0]}\b/]
if match
used_functions[match] = 1
get_used_functions_in_function(used_functions, function_chunks[match], function_chunks)
end
}
end
# Removes dead functions
def remove_dead_functions(shaders, datatypes)
functions = []
used_functions = {"main" => 1}
function_chunks = {}
main_chunks = []
shaders.each { |shader|
parse_functions(shader, datatypes).each_with_index { |chunk, i|
if i % 2 != 0
name = chunk[0].split(/\s+/)[1]
if name != "main"
function_chunks[name] = chunk
else
main_chunks.push(chunk)
end
end
}
}
main_chunks.each { |main_function|
get_used_functions_in_function(used_functions, main_function, function_chunks)
}
shaders.map! { |shader|
source = ""
parse_functions(shader, datatypes).each_with_index { |chunk, i|
if i % 2 != 0
if used_functions[chunk[0].split(/\s+/)[1]]
source += chunk.join("")
end
else
source += chunk.join("")
end
}
source
}
end
# Renames function arguments and local variables of all functions
def rename_locals(oldsource, datatypes)
source = ""
parse_functions(oldsource, datatypes).each_with_index { |chunk, i|
rename_function_locals(chunk, datatypes) if i % 2 != 0
source += chunk.join("")
}
return source
end
# Gets all the user defined type names
def get_struct_names(data)
data.scan(/struct\s+(\w+)/)
end
# Gets all the defines and their values
def get_defines(data)
data.scan(/(#define\s+(\w+)\s+([^\n]+))/).map { |match|
[match[0], match[1], match[2].strip()]
}
end
# Gets the names of all functions
def get_function_names(data, datatypes)
data.scan(/(?:#{datatypes})\s+(\w+)\s*\(/)
end
# Gets the names of all variables with the given qualifier
def get_variable_names(data, qualifier, datatypes)
data.scan(/#{qualifier}\s+(?:#{datatypes})\s+(\w+)\s*;/)
end
# Generate a old name to new name mapping and sort the names alphabetically
def gen_map(data, names, rewrite)
# Select new short names for all the functions
map = data.uniq().map { |v|
if rewrite and v[0] != "main"
[v[0], names.shift()]
else
[v[0], v[0]]
end
}
# Short names must always come before longer names
map.sort! { |a, b|
a[0] <=> b[0]
}
map
end
# Generate a old name to new name mapping and sort the names alphabetically
def gen_map_map(data, names, rewrite)
# Select new short names for all the functions
map = data.uniq().map { |v|
if rewrite
[v[0], names.shift()]
else
[v[0], v[0]]
end
}
# Short names must always come before longer names
map.sort! { |a, b|
a[0] <=> b[0]
}
mapmap = {}
map.each { |v|
mapmap[v[0]] = v[1]
}
mapmap
end
# Rewrite tokens based on a map generated by gen_map
def rewrite_map(map, data)
map.each { |v|
data.gsub!(/\b#{v[0]}\b/, v[1])
}
end
def group_list(list)
map = {}
data = ""
list.each { |v|
if not map[v[0]]
map[v[0]] = {}
end
if not map[v[0]][v[1]]
map[v[0]][v[1]] = []
end
map[v[0]][v[1]].push([v[2], v[3]])
}
map.each { |qualifier, v|
v.each { |datatype, v|
data += "#{qualifier} #{datatype} #{v.collect { |v| "#{v[0]}#{v[1]}"}.join(",")};"
}
}
return data
end
def group_globals(data, datatypes)
source = ""
# All global variables in global scope can be combined, unless they exist in #ifdefs
outer = data.gsub(/(#if.*?#endif)/m, "")
source += group_list(outer.scan(/(uniform|attribute|varying|const) (#{datatypes}) (\w+)(.*?);/))
data.split(/(#if.*?#endif)/m).each { |chunk|
# Do the same thing inside #ifdefs
if chunk[/uniform|attribute|varying|const/]
if chunk.start_with?("#if")
tokens = chunk.split(/(.*?\n)(.*?)(#endif)/m)
source += tokens[1]
source += group_list(tokens[2].scan(/(uniform|attribute|varying|const) (#{datatypes}) (\w+)(.*?);/))
source += tokens[2].gsub(/(uniform|attribute|varying|const) (#{datatypes}) (\w+)(.*?);/, "")
source += tokens[3]
source += "\n"
else
source += chunk.gsub(/(uniform|attribute|varying|const) (#{datatypes}) (\w+)(.*?);/, "")
end
else
source += chunk
end
}
return source
end
# Minify shaders given in an array
def minify_sources(shaders, rewriteall)
names = [*("A".."Z"), *("AA".."ZZ"), *("aA".."zZ"), *("Aa".."Zz"), *("Aa".."ZZ"), *("A0".."Z9")]
datatypes = ["float","double","u?int","void","bool","d?mat[2-4](?:x[2-4])?","[ibdu]?vec[2-4]","[iu]?(?:sampler|image)(?:[1-3]D|Cube|Buffer)(?:MSArray|MS|RectShadow|Rect|ArrayShadow|Shadow|Array)?"]
sizzle_permutations = permutations(["x", "y", "z", "w"]) + permutations(["r", "g", "b", "a"]) + permutations(["s", "t", "p", "q"])
language_words = ["attribute","const","uniform","varying","buffer","shared","coherent","volatile","restrict","readonly","writeonly","atomic_uint","layout","centroid","flat","smooth","noperspective","patch","sample","break","continue","do","for","while","switch","case","default","if","else","subroutine","in","out","inout","float","double","int","void","bool","true","false","invariant","discard","return","mat2","mat3","mat4","dmat2","dmat3","dmat4","mat2x2","mat2x3","mat2x4","dmat2x2","dmat2x3","dmat2x4","mat3x2","mat3x3","mat3x4","dmat3x2","dmat3x3","dmat3x4","mat4x2","mat4x3","mat4x4","dmat4x2","dmat4x3","dmat4x4","vec2","vec3","vec4","ivec2","ivec3","ivec4","bvec2","bvec3","bvec4","dvec2","dvec3","dvec4","uint","uvec2","uvec3","uvec4","lowp","mediump","highp","precision","sampler1D","sampler2D","sampler3D","samplerCube","sampler1DShadow","sampler2DShadow","samplerCubeShadow","sampler1DArray","sampler2DArray","sampler1DArrayShadow","sampler2DArrayShadow","isampler1D","isampler2D","isampler3D","isamplerCube","isampler1DArray","isampler2DArray","usampler1D","usampler2D","usampler3D","u","samplerCube","usampler1DArray","usampler2DArray","sampler2DRect","sampler2DRectShadow","isampler2DRect","usampler2DRect","samplerBuffer","isamplerBuffer","usamplerBuffer","sampler2DMS","isampler2DMS","usampler2DMS","sampler2DMSArray","isampler2DMSArray","usampler2DMSArray","samplerCubeArray","samplerCubeArrayShadow","isamplerCubeArray","usamplerCubeArray","image1D","iimage1D","uimage1D","image2D","iimage2D","uimage2D","image3D","iimage3D","uimage3D","image2DRect","iimage2DRect","uimage2DRect","imageCube","iimageCube","uimageCube","imageBuffer","iimageBuffer","uimageBuffer","image1DArray","iimage1DArray","uimage1DArray","image2DArray","iimage2DArray","uimage2DArray","imageCubeArray","iimageCubeArray","uimageCubeArray","image2DMS","iimage2DMS","uimage2DMS","image2DMSArray","iimage2DMSArray","uimage2DMSArray","struct","gl_VertexID","gl_InstanceID","gl_PerVertex","gl_Position","gl_PointSize","gl_ClipDistance","gl_PatchVerticesIn","gl_PrimitiveID","gl_InvocationID","gl_TessLevelOuter","gl_TessLevelInner","gl_TessCoord","gl_PrimitiveIDIn","gl_Layer","gl_ViewportIndex","gl_FragCoord","gl_FrontFacing","gl_PointCoord","gl_SampleID","gl_SamplePosition","gl_SampleMaskIn","gl_FragDepth","gl_SampleMask","gl_NumWorkGroups","gl_WorkGroupSize","gl_LocalGroupSize","gl_WorkGroupID","gl_LocalInvocationID","gl_GlobalInvocationID","gl_LocalInvocationIndex","gl_MaxComputeWorkGroupCount","gl_MaxComputeWorkGroupSize","gl_MaxComputeUniformComponents","gl_MaxComputeTextureImageUnits","gl_MaxComputeImageUniforms","gl_MaxComputeAtomicCounters","gl_MaxComputeAtomicCounterBuffers","gl_MaxVertexAttribs","gl_MaxVertexUniformComponents","gl_MaxVaryingComponents","gl_MaxVertexOutputComponents","gl_MaxGeometryInputComponents","gl_MaxGeometryOutputComponents","gl_MaxFragmentInputComponents","gl_MaxVertexTextureImageUnits","gl_MaxCombinedTextureImageUnits","gl_MaxTextureImageUnits","gl_MaxImageUnits","gl_MaxCombinedImageUnitsAndFragmentOutputs","gl_MaxImageSamples","gl_MaxVertexImageUniforms","gl_MaxTessControlImageUniforms","gl_MaxTessEvaluationImageUniforms","gl_MaxGeometryImageUniforms","gl_MaxFragmentImageUniforms","gl_MaxCombinedImageUniforms","gl_MaxFragmentUniformComponents","gl_MaxDrawBuffers","gl_MaxClipDistances","gl_MaxGeometryTextureImageUnits","gl_MaxGeometryOutputVertices","gl_MaxGeometryTotalOutputComponents","gl_MaxGeometryUniformComponents","gl_MaxGeometryVaryingComponents","gl_MaxTessControlInputComponents","gl_MaxTessControlOutputComponents","gl_MaxTessControlTextureImageUnits","gl_MaxTessControlUniformComponents","gl_MaxTessControlTotalOutputComponents","gl_MaxTessEvaluationInputComponents","gl_MaxTessEvaluationOutputComponents","gl_MaxTessEvaluationTextureImageUnits","gl_MaxTessEvaluationUniformComponents","gl_MaxTessPatchComponents","gl_MaxPatchVertices","gl_MaxTessGenLevel","gl_MaxViewports","gl_MaxVertexUniformVectors","gl_MaxFragmentUniformVectors","gl_MaxVaryingVectors","gl_MaxVertexAtomicCounters","gl_MaxTessControlAtomicCounters","gl_MaxTessEvaluationAtomicCounters","gl_MaxGeometryAtomicCounters","gl_MaxFragmentAtomicCounters","gl_MaxCombinedAtomicCounters","gl_MaxAtomicCounterBindings","gl_MaxVertexAtomicCounterBuffers","gl_MaxTessControlAtomicCounterBuffers","gl_MaxTessEvaluationAtomicCounterBuffers","gl_MaxGeometryAtomicCounterBuffers","gl_MaxFragmentAtomicCounterBuffers","gl_MaxCombinedAtomicCounterBuffers","gl_MaxAtomicCounterBufferSize","gl_MinProgramTexelOffset","gl_MaxProgramTexelOffset","gl_MaxTransformFeedbackBuffers","gl_MaxTransformFeedbackInterleavedComponents","radians","degrees","sin","cos","tan","asin","acos","atan","sinh","cosh","tanh","asinh","acosh","atanh","pow","exp","log","exp2","log2","sqrt","inversesqrt","abs","sign","floor","trunc","round","roundEven","ceil","fract","mod","modf","min","max","clamp","mix","step","smoothstep","isnan","isinf","floatBitsToInt","floatBitsToUint","intBitsToFloat","uintBitsToFloat","fma","frexp","ldexp","packUnorm2x16","packSnorm2x16","packUnorm4x8","packSnorm4x8","unpackUnorm2x16","unpackSnorm2x16","unpackUnorm4x8","unpackSnorm4x8","packDouble2x32","unpackDouble2x32","packHalf2x16","unpackHalf2x16","length","distance","dot","cross","normalize","faceforward","reflect","refract","matrixCompMult","outerProduct","transpose","determinant","inverse","lessThan","lessThanEqual","greaterThan","greaterThanEqual","equal","notEqual","any","all","not","uaddCarry","usubBorrow","umulExtended","imulExtended","bitfieldExtract","bitfieldReverse","bitfieldInsert","bitCount","findLSB","findMSB","atomicCounterIncrement","atomicCounterDecrement","atomicCounter","atomicOP","imageSize","imageLoad","imageStore","imageAtomicAdd","imageAtomicMin","imageAtomicMax","imageAtomicAnd","mageAtomicOr","imageAtomicXor","imageAtomicExchange","imageAtomicCompSwap","dFdx","dFdy","fwidth","interpolateAtCentroid","interpolateAtSample","interpolateAtOffset","noise1","noise2","noise3","noise4","EmitStreamVertex","EndStreamPrimitive","EmitVertex","EndPrimitive","barrier","memoryBarrier","groupMemoryBarrier","memoryBarrierAtomicCounter","memoryBarrierShared","memoryBarrierBuffer","memoryBarrierImage","textureSize","textureQueryLod","textureQueryLevels","texture","textureProj","textureLod","tureOffset","texelFetch","texelFetchOffset","textureProjOffset","textureLodOffset","textureProjLod","textureProjLodOffset","textureGrad","textureGradOffset","textureProjGrad","textureProjGradOffset","textureGather","textureGatherOffset","textureGatherOffsets","texture2D","texture2DProj","texture2DLod","texture2DProjLod","textureCube","textureCubeLod"] + sizzle_permutations
structs = []
defines = []
functions = []
uniforms = []
attributes = []
varyings = []
constants = [];
members = [];
shaders.map! { |shader|
remove_comments(shader)
remove_whitespace(shader)
}
# Get struct names
shaders.each { |shader|
structs += get_struct_names(shader)
}
# Create a regex of all the known data types
datatypes_string = datatypes.concat(structs).join("|")
# Remove dead functions that are not in the call graph of the main() function in any of the inputs
remove_dead_functions(shaders, datatypes_string)
# Get all function/uniform/attribute/varying names, and define names and their values
shaders.each { |shader|
functions += get_function_names(shader, datatypes_string)
uniforms += get_variable_names(shader, "uniform", datatypes_string)
attributes += get_variable_names(shader, "attribute", datatypes_string)
varyings += get_variable_names(shader, "varying", datatypes_string)
constants += get_variable_names(shader, "const", datatypes_string)
defines += get_defines(shader)
members += get_member_names(shader, datatypes_string);
}
function_map = gen_map(functions, names, true)
struct_map = gen_map(structs, names, true)
uniform_map = gen_map(uniforms, names, rewriteall)
attribute_map = gen_map(attributes, names, rewriteall)
varyings_map = gen_map(varyings, names, true)
constants_map = gen_map(constants, names, true)
member_map = gen_map_map(members, [*("a".."z"), *("A".."Z"), *("aa".."zz")], true)
language_words = language_words.uniq().map { |v|
[v, names.shift()]
}
# Preprocess #defines to prepare them for inlining
preprocess_defines(defines)
shaders.map! { |shader|
# Inline #defines
inline_defines(defines, shader)
shader = group_globals(shader, datatypes_string)
# Rewrite function names
function_map.each { |function|
shader.gsub!(/\b#{function[0]}\b/, function[1])
}
# Rename function arguments and local variables
shader = rename_locals(shader, datatypes_string)
# Rewrite user defined type names
struct_map.each { |struct|
shader.gsub!(/\b#{struct[0]}\b/, struct[1])
}
# Rewrite uniform names
rewrite_map(uniform_map, shader)
# Rewrite attribute names
rewrite_map(attribute_map, shader)
# Rewrite varying names
rewrite_map(varyings_map, shader)
# Rewrite varying names
rewrite_map(constants_map, shader)
# Rewrite struct member names
shader = rename_members(shader, member_map, datatypes_string)
# Rewrite numbers
rewrite_numbers(shader)
shader = add_defines(shader, language_words)
shader = remove_whitespace(shader).gsub("\n", "\\n")
# If the first line of a shader is a pre-processor directive, it will cause an error when concatenating it, so add a new line
if shader[0] == "#"
shader = "\\n" + shader
end
shader
}
return [shaders, uniform_map.concat(attribute_map), member_map]
end
def minify_files(paths, rewriteall)
minify_sources(paths.map { |path| IO.read(path) }, rewriteall)
end
Small fix
# Copyright (c) 2013 Chananya Freiman (aka GhostWolf)
# Used to generate all the permutations of vector fields of length 2-4 (xyzw, rgba, stpq)
# E.g., xx xy xz xw yx yy yz yw ...
def gen_sizzle_permutations()
perms = []
(2..4).each { |k|
perms += ["x", "y", "z", "w"].repeated_permutation(k).to_a().map! { |v| v.join("") }
perms += ["r", "g", "b", "a"].repeated_permutation(k).to_a().map! { |v| v.join("") }
perms += ["s", "t", "p", "q"].repeated_permutation(k).to_a().map! { |v| v.join("") }
}
return perms
end
# Rename language keywords and types using #defines where it will reduce the overall size
def add_defines(data, language_words)
defines = []
language_words.each { |v|
uses = data.scan(/\b#{v[0]}\b/).length
usage = uses * v[0].length
if usage > 0
define = "#define #{v[1]} #{v[0]}\n"
define_usage = define.length + uses * v[1].length
if define_usage < usage
defines.push("#define #{v[1]} #{v[0]}")
data.gsub!(/\b#{v[0]}\b/, v[1])
end
end
}
return "\n" + defines.join("\n") + "\n" + data
end
def preprocess_defines(defines)
rewrites = {}
# Get inline values for simple number equation #defines
# E.g.: 5 => 5, 5*2 => 10
defines.each { |v|
begin
n = eval(v[2]).to_s()
rewrites[v[1]] = n
v[3] = n
rescue
end
}
# Get inline values for #define equations that have the previously inlined #defines in their values
# E.g.: N/2 => 5/2 => 2.5, assuming N was inlined as 5
defines.each { |v|
if not v[3]
begin
s = v[2]
rewrites.each { |k, n|
s.gsub!(/\b#{k}\b/, n)
}
n = eval(s).to_s()
v[3] = n
rescue
end
end
}
end
def inline_defines(defines, data)
# First pass removes the inlined #define lines
defines.each { |v|
if v[3]
data.sub!(v[0], "")
end
}
# Second pass inlines the values
defines.each { |v|
if v[3]
data.gsub!(/\b#{v[1]}\b/, v[3])
end
}
end
def rewrite_numbers(data)
# Convert hexadecimal numbers to decimal numbers
data.gsub!(/0x[0-9a-fA-F]+/) { |n|
n.to_i(16)
}
# Remove useless zeroes
data.gsub!(/\b\d*\.?\d+\b/) { |n|
if n["."]
n.to_f().to_s()
else
n.to_i().to_s()
end
}
# Remove useless zeroes
data.gsub!(/[0]+(\.\d+)/, "\\1")
data.gsub!(/(\d+\.)[0]+/, "\\1")
# Change integers to exponent representation if it's shorter
data.gsub!(/(\d+?)(0+)/) {
n = $1
e = $2
if e.size > 2
"#{n}e#{(e.size - 1)}"
else
n + e
end
}
end
# Removes comments
def remove_comments(source)
source.gsub!(/\/\/.*/, "")
source.gsub!(/\/\*.*?\*\//m, "")
end
def parse_structs(data, datatypes)
data.split(/(struct\s+(?:\w+)\s*{.*?}\s*;)/m)
end
def rename_struct_members(chunk, map, datatypes)
map.each { |v|
chunk.sub!(/\b#{v[0]}\b/, map[v[0]])
}
end
# Rename the members of all structs
def rename_members(data, map, datatypes)
source = ""
parse_structs(data, datatypes).each_with_index { |chunk, i|
if i % 2 != 0
# Rename the members
rename_struct_members(chunk, map, datatypes)
tokens = chunk.split(/(struct \w+{)(.*?)(};)/m)
source += tokens[1]
# All global variables in global scope can be combined, unless they exist in #ifdefs
outer = tokens[2].gsub(/(#if.*?#endif)/m, "")
source += group_list(outer.scan(/()(#{datatypes})\s+(\w+)(.*?);/))
tokens[2].split(/(#ifdef.*?#endif)/m).each { |chunk|
# Do the same thing inside #ifdefs
if chunk.start_with?("#if")
tokens = chunk.split(/(#ifdef.*?\n)(.*?)(#endif)/m)
source += tokens[1]
source += group_list(tokens[2].scan(/()(#{datatypes})\s+(\w+)(.*?);/))
source += tokens[2].gsub(/(#{datatypes})\s+(\w+)(.*?);/, "")
source += tokens[3]
source += "\n"
else
source += chunk.gsub(/(#{datatypes})\s+(\w+)(.*?);/, "")
end
}
source += "};"
else
source += chunk
end
}
map.each { |v|
source.gsub!(/\.\b#{v[0]}\b/, ".#{map[v[0]]}")
}
return source
end
def get_member_names(data, datatypes)
data.scan(/(struct\s+(?:\w+)\s*{.*?}\s*;)/m).collect { |struct|
struct[0].scan(/(?:#{datatypes})\s+(\w+)\s*;/)
}.flatten(1)
end
# Split the source into function/other chunks
def parse_functions(source, datatypes)
pass = source.split(/((?:#{datatypes})\s+\w+\s*\(.*?\))/)
chunks = [[pass[0]]]
(1...pass.size).step(2) { |i|
head = pass[i].split(/\(/)
body_with_extra = pass[i + 1]
start = body_with_extra.index("{")
index = start + 1
level = 1
while level > 0 and index < body_with_extra.length do
char = body_with_extra[index]
if char == "}"
level -= 1
elsif char == "{"
level += 1
end
index += 1
end
body = body_with_extra[0...index]
extra = body_with_extra[index..body_with_extra.size]
chunks += [[head[0], "(" + head[1], body], [extra]]
}
return chunks
end
# Rename function arguments and local variables.
def rename_function_locals(data, datatypes)
names = [*("a".."z"), *("aa".."zz")]
arguments = []
locals = []
# Grab all the argument names
data[1].scan(/(?:in\s+|out\s+)?(?:#{datatypes})\s+(\w+)/).each { |argument|
arguments += argument
}
# Short names must always come before longer names
arguments.sort!()
data[2].scan(/(#{datatypes}) (.*?);/m).each { |local_list|
local_list[1].split("=")[0].split(",").each { |local|
locals += [local.strip()]
}
}
# Short names must always come before longer names
locals.sort!()
# Rename function arguments
arguments.each { |argument|
name = names.shift()
reg = /\b#{argument}\b/
data[1].sub!(reg, name)
data[2].gsub!(reg, name)
}
# Rename function locals
locals.each { |local|
data[2].gsub!(/\b#{local.strip()}\b/, names.shift())
}
end
# Removes useless whitespace from the source
def remove_whitespace(oldsource)
need_newline = false
source = ""
oldsource = oldsource.sub(/^\n+/, "")
oldsource.each_line { |line|
line = line.strip().gsub(/\s{2,}|\t/, " ")
if line[0] == "#"
if need_newline
source += "\n"
end
source += line + "\n"
need_newline = false
else
source += line.sub("\n", "").gsub(/\s*({|}|=|\*|,|\+|\/|>|<|&|\||\[|\]|\(|\)|\-|!|;)\s*/, "\\1")
need_newline = true
end
}
return source.gsub(/\n+/, "\n")
end
def get_used_functions_in_function(used_functions, main_function, function_chunks)
function_chunks.each { |f|
match = main_function[2][/\b#{f[0]}\b/]
if match
used_functions[match] = 1
get_used_functions_in_function(used_functions, function_chunks[match], function_chunks)
end
}
end
# Removes dead functions
def remove_dead_functions(shaders, datatypes)
functions = []
used_functions = {"main" => 1}
function_chunks = {}
main_chunks = []
shaders.each { |shader|
parse_functions(shader, datatypes).each_with_index { |chunk, i|
if i % 2 != 0
name = chunk[0].split(/\s+/)[1]
if name != "main"
function_chunks[name] = chunk
else
main_chunks.push(chunk)
end
end
}
}
main_chunks.each { |main_function|
get_used_functions_in_function(used_functions, main_function, function_chunks)
}
shaders.map! { |shader|
source = ""
parse_functions(shader, datatypes).each_with_index { |chunk, i|
if i % 2 != 0
if used_functions[chunk[0].split(/\s+/)[1]]
source += chunk.join("")
end
else
source += chunk.join("")
end
}
source
}
end
# Renames function arguments and local variables of all functions
def rename_locals(oldsource, datatypes)
source = ""
parse_functions(oldsource, datatypes).each_with_index { |chunk, i|
rename_function_locals(chunk, datatypes) if i % 2 != 0
source += chunk.join("")
}
return source
end
# Gets all the user defined type names
def get_struct_names(data)
data.scan(/struct\s+(\w+)/)
end
# Gets all the defines and their values
def get_defines(data)
data.scan(/(#define\s+(\w+)\s+([^\n]+))/).map { |match|
[match[0], match[1], match[2].strip()]
}
end
# Gets the names of all functions
def get_function_names(data, datatypes)
data.scan(/(?:#{datatypes})\s+(\w+)\s*\(/)
end
# Gets the names of all variables with the given qualifier
def get_variable_names(data, qualifier, datatypes)
data.scan(/#{qualifier}\s+(?:#{datatypes})\s+(\w+)\s*;/)
end
# Generate a old name to new name mapping and sort the names alphabetically
def gen_map(data, names, rewrite)
# Select new short names for all the functions
map = data.uniq().map { |v|
if rewrite and v[0] != "main"
[v[0], names.shift()]
else
[v[0], v[0]]
end
}
# Short names must always come before longer names
map.sort! { |a, b|
a[0] <=> b[0]
}
map
end
# Generate a old name to new name mapping and sort the names alphabetically
def gen_map_map(data, names, rewrite)
# Select new short names for all the functions
map = data.uniq().map { |v|
if rewrite
[v[0], names.shift()]
else
[v[0], v[0]]
end
}
# Short names must always come before longer names
map.sort! { |a, b|
a[0] <=> b[0]
}
mapmap = {}
map.each { |v|
mapmap[v[0]] = v[1]
}
mapmap
end
# Rewrite tokens based on a map generated by gen_map
def rewrite_map(map, data)
map.each { |v|
data.gsub!(/\b#{v[0]}\b/, v[1])
}
end
def group_list(list)
map = {}
data = ""
list.each { |v|
if not map[v[0]]
map[v[0]] = {}
end
if not map[v[0]][v[1]]
map[v[0]][v[1]] = []
end
map[v[0]][v[1]].push([v[2], v[3]])
}
map.each { |qualifier, v|
v.each { |datatype, v|
data += "#{qualifier} #{datatype} #{v.collect { |v| "#{v[0]}#{v[1]}"}.join(",")};"
}
}
return data
end
def group_globals(data, datatypes)
source = ""
# All global variables in global scope can be combined, unless they exist in #ifdefs
outer = data.gsub(/(#if.*?#endif)/m, "")
source += group_list(outer.scan(/(uniform|attribute|varying|const) (#{datatypes}) (\w+)(.*?);/))
data.split(/(#if.*?#endif)/m).each { |chunk|
# Do the same thing inside #ifdefs
if chunk[/uniform|attribute|varying|const/]
if chunk.start_with?("#if")
tokens = chunk.split(/(.*?\n)(.*?)(#endif)/m)
source += tokens[1]
source += group_list(tokens[2].scan(/(uniform|attribute|varying|const) (#{datatypes}) (\w+)(.*?);/))
source += tokens[2].gsub(/(uniform|attribute|varying|const) (#{datatypes}) (\w+)(.*?);/, "")
source += tokens[3]
source += "\n"
else
source += chunk.gsub(/(uniform|attribute|varying|const) (#{datatypes}) (\w+)(.*?);/, "")
end
else
source += chunk
end
}
return source
end
# Minify shaders given in an array
def minify_sources(shaders, rewriteall)
names = [*("A".."Z"), *("AA".."ZZ"), *("aA".."zZ"), *("Aa".."Zz"), *("Aa".."ZZ"), *("A0".."Z9")]
datatypes = ["float","double","u?int","void","bool","d?mat[2-4](?:x[2-4])?","[ibdu]?vec[2-4]","[iu]?(?:sampler|image)(?:[1-3]D|Cube|Buffer)(?:MSArray|MS|RectShadow|Rect|ArrayShadow|Shadow|Array)?"]
language_words = ["attribute","const","uniform","varying","buffer","shared","coherent","volatile","restrict","readonly","writeonly","atomic_uint","layout","centroid","flat","smooth","noperspective","patch","sample","break","continue","do","for","while","switch","case","default","if","else","subroutine","in","out","inout","float","double","int","void","bool","true","false","invariant","discard","return","mat2","mat3","mat4","dmat2","dmat3","dmat4","mat2x2","mat2x3","mat2x4","dmat2x2","dmat2x3","dmat2x4","mat3x2","mat3x3","mat3x4","dmat3x2","dmat3x3","dmat3x4","mat4x2","mat4x3","mat4x4","dmat4x2","dmat4x3","dmat4x4","vec2","vec3","vec4","ivec2","ivec3","ivec4","bvec2","bvec3","bvec4","dvec2","dvec3","dvec4","uint","uvec2","uvec3","uvec4","lowp","mediump","highp","precision","sampler1D","sampler2D","sampler3D","samplerCube","sampler1DShadow","sampler2DShadow","samplerCubeShadow","sampler1DArray","sampler2DArray","sampler1DArrayShadow","sampler2DArrayShadow","isampler1D","isampler2D","isampler3D","isamplerCube","isampler1DArray","isampler2DArray","usampler1D","usampler2D","usampler3D","u","samplerCube","usampler1DArray","usampler2DArray","sampler2DRect","sampler2DRectShadow","isampler2DRect","usampler2DRect","samplerBuffer","isamplerBuffer","usamplerBuffer","sampler2DMS","isampler2DMS","usampler2DMS","sampler2DMSArray","isampler2DMSArray","usampler2DMSArray","samplerCubeArray","samplerCubeArrayShadow","isamplerCubeArray","usamplerCubeArray","image1D","iimage1D","uimage1D","image2D","iimage2D","uimage2D","image3D","iimage3D","uimage3D","image2DRect","iimage2DRect","uimage2DRect","imageCube","iimageCube","uimageCube","imageBuffer","iimageBuffer","uimageBuffer","image1DArray","iimage1DArray","uimage1DArray","image2DArray","iimage2DArray","uimage2DArray","imageCubeArray","iimageCubeArray","uimageCubeArray","image2DMS","iimage2DMS","uimage2DMS","image2DMSArray","iimage2DMSArray","uimage2DMSArray","struct","gl_VertexID","gl_InstanceID","gl_PerVertex","gl_Position","gl_PointSize","gl_ClipDistance","gl_PatchVerticesIn","gl_PrimitiveID","gl_InvocationID","gl_TessLevelOuter","gl_TessLevelInner","gl_TessCoord","gl_PrimitiveIDIn","gl_Layer","gl_ViewportIndex","gl_FragCoord","gl_FrontFacing","gl_PointCoord","gl_SampleID","gl_SamplePosition","gl_SampleMaskIn","gl_FragDepth","gl_SampleMask","gl_NumWorkGroups","gl_WorkGroupSize","gl_LocalGroupSize","gl_WorkGroupID","gl_LocalInvocationID","gl_GlobalInvocationID","gl_LocalInvocationIndex","gl_MaxComputeWorkGroupCount","gl_MaxComputeWorkGroupSize","gl_MaxComputeUniformComponents","gl_MaxComputeTextureImageUnits","gl_MaxComputeImageUniforms","gl_MaxComputeAtomicCounters","gl_MaxComputeAtomicCounterBuffers","gl_MaxVertexAttribs","gl_MaxVertexUniformComponents","gl_MaxVaryingComponents","gl_MaxVertexOutputComponents","gl_MaxGeometryInputComponents","gl_MaxGeometryOutputComponents","gl_MaxFragmentInputComponents","gl_MaxVertexTextureImageUnits","gl_MaxCombinedTextureImageUnits","gl_MaxTextureImageUnits","gl_MaxImageUnits","gl_MaxCombinedImageUnitsAndFragmentOutputs","gl_MaxImageSamples","gl_MaxVertexImageUniforms","gl_MaxTessControlImageUniforms","gl_MaxTessEvaluationImageUniforms","gl_MaxGeometryImageUniforms","gl_MaxFragmentImageUniforms","gl_MaxCombinedImageUniforms","gl_MaxFragmentUniformComponents","gl_MaxDrawBuffers","gl_MaxClipDistances","gl_MaxGeometryTextureImageUnits","gl_MaxGeometryOutputVertices","gl_MaxGeometryTotalOutputComponents","gl_MaxGeometryUniformComponents","gl_MaxGeometryVaryingComponents","gl_MaxTessControlInputComponents","gl_MaxTessControlOutputComponents","gl_MaxTessControlTextureImageUnits","gl_MaxTessControlUniformComponents","gl_MaxTessControlTotalOutputComponents","gl_MaxTessEvaluationInputComponents","gl_MaxTessEvaluationOutputComponents","gl_MaxTessEvaluationTextureImageUnits","gl_MaxTessEvaluationUniformComponents","gl_MaxTessPatchComponents","gl_MaxPatchVertices","gl_MaxTessGenLevel","gl_MaxViewports","gl_MaxVertexUniformVectors","gl_MaxFragmentUniformVectors","gl_MaxVaryingVectors","gl_MaxVertexAtomicCounters","gl_MaxTessControlAtomicCounters","gl_MaxTessEvaluationAtomicCounters","gl_MaxGeometryAtomicCounters","gl_MaxFragmentAtomicCounters","gl_MaxCombinedAtomicCounters","gl_MaxAtomicCounterBindings","gl_MaxVertexAtomicCounterBuffers","gl_MaxTessControlAtomicCounterBuffers","gl_MaxTessEvaluationAtomicCounterBuffers","gl_MaxGeometryAtomicCounterBuffers","gl_MaxFragmentAtomicCounterBuffers","gl_MaxCombinedAtomicCounterBuffers","gl_MaxAtomicCounterBufferSize","gl_MinProgramTexelOffset","gl_MaxProgramTexelOffset","gl_MaxTransformFeedbackBuffers","gl_MaxTransformFeedbackInterleavedComponents","radians","degrees","sin","cos","tan","asin","acos","atan","sinh","cosh","tanh","asinh","acosh","atanh","pow","exp","log","exp2","log2","sqrt","inversesqrt","abs","sign","floor","trunc","round","roundEven","ceil","fract","mod","modf","min","max","clamp","mix","step","smoothstep","isnan","isinf","floatBitsToInt","floatBitsToUint","intBitsToFloat","uintBitsToFloat","fma","frexp","ldexp","packUnorm2x16","packSnorm2x16","packUnorm4x8","packSnorm4x8","unpackUnorm2x16","unpackSnorm2x16","unpackUnorm4x8","unpackSnorm4x8","packDouble2x32","unpackDouble2x32","packHalf2x16","unpackHalf2x16","length","distance","dot","cross","normalize","faceforward","reflect","refract","matrixCompMult","outerProduct","transpose","determinant","inverse","lessThan","lessThanEqual","greaterThan","greaterThanEqual","equal","notEqual","any","all","not","uaddCarry","usubBorrow","umulExtended","imulExtended","bitfieldExtract","bitfieldReverse","bitfieldInsert","bitCount","findLSB","findMSB","atomicCounterIncrement","atomicCounterDecrement","atomicCounter","atomicOP","imageSize","imageLoad","imageStore","imageAtomicAdd","imageAtomicMin","imageAtomicMax","imageAtomicAnd","mageAtomicOr","imageAtomicXor","imageAtomicExchange","imageAtomicCompSwap","dFdx","dFdy","fwidth","interpolateAtCentroid","interpolateAtSample","interpolateAtOffset","noise1","noise2","noise3","noise4","EmitStreamVertex","EndStreamPrimitive","EmitVertex","EndPrimitive","barrier","memoryBarrier","groupMemoryBarrier","memoryBarrierAtomicCounter","memoryBarrierShared","memoryBarrierBuffer","memoryBarrierImage","textureSize","textureQueryLod","textureQueryLevels","texture","textureProj","textureLod","tureOffset","texelFetch","texelFetchOffset","textureProjOffset","textureLodOffset","textureProjLod","textureProjLodOffset","textureGrad","textureGradOffset","textureProjGrad","textureProjGradOffset","textureGather","textureGatherOffset","textureGatherOffsets","texture2D","texture2DProj","texture2DLod","texture2DProjLod","textureCube","textureCubeLod"] + gen_sizzle_permutations()
structs = []
defines = []
functions = []
uniforms = []
attributes = []
varyings = []
constants = [];
members = [];
shaders.map! { |shader|
remove_comments(shader)
remove_whitespace(shader)
}
# Get struct names
shaders.each { |shader|
structs += get_struct_names(shader)
}
# Create a regex of all the known data types
datatypes_string = datatypes.concat(structs).join("|")
# Remove dead functions that are not in the call graph of the main() function in any of the inputs
remove_dead_functions(shaders, datatypes_string)
# Get all function/uniform/attribute/varying names, and define names and their values
shaders.each { |shader|
functions += get_function_names(shader, datatypes_string)
uniforms += get_variable_names(shader, "uniform", datatypes_string)
attributes += get_variable_names(shader, "attribute", datatypes_string)
varyings += get_variable_names(shader, "varying", datatypes_string)
constants += get_variable_names(shader, "const", datatypes_string)
defines += get_defines(shader)
members += get_member_names(shader, datatypes_string);
}
function_map = gen_map(functions, names, true)
struct_map = gen_map(structs, names, true)
uniform_map = gen_map(uniforms, names, rewriteall)
attribute_map = gen_map(attributes, names, rewriteall)
varyings_map = gen_map(varyings, names, true)
constants_map = gen_map(constants, names, true)
member_map = gen_map_map(members, [*("a".."z"), *("A".."Z"), *("aa".."zz")], rewriteall)
language_words = language_words.uniq().map { |v|
[v, names.shift()]
}
# Preprocess #defines to prepare them for inlining
preprocess_defines(defines)
shaders.map! { |shader|
# Inline #defines
inline_defines(defines, shader)
shader = group_globals(shader, datatypes_string)
# Rewrite function names
function_map.each { |function|
shader.gsub!(/\b#{function[0]}\b/, function[1])
}
# Rename function arguments and local variables
shader = rename_locals(shader, datatypes_string)
# Rewrite user defined type names
struct_map.each { |struct|
shader.gsub!(/\b#{struct[0]}\b/, struct[1])
}
# Rewrite uniform names
rewrite_map(uniform_map, shader)
# Rewrite attribute names
rewrite_map(attribute_map, shader)
# Rewrite varying names
rewrite_map(varyings_map, shader)
# Rewrite varying names
rewrite_map(constants_map, shader)
# Rewrite struct member names
shader = rename_members(shader, member_map, datatypes_string)
# Rewrite numbers
rewrite_numbers(shader)
shader = add_defines(shader, language_words)
shader = remove_whitespace(shader).gsub("\n", "\\n")
# If the first line of a shader is a pre-processor directive, it will cause an error when concatenating it, so add a new line
if shader[0] == "#"
shader = "\\n" + shader
end
shader
}
global_map = {}
uniform_map.concat(attribute_map).each { |v|
global_map[v[0]] = v[1]
}
return [shaders, global_map, member_map]
end
def minify_files(paths, rewriteall)
minify_sources(paths.map { |path| IO.read(path) }, rewriteall)
end |
module RevertiblePaperTrail
module Version
extend ActiveSupport::Concern
def revert
case event
when "create"
# Do nothing if item already destroyed again
return unless self.item
self.item.destroy
when "update"
self.reify.save
when "destroy"
self.reify.save
end
end
def active_item
# Fast track if item currently exists
active_item = item
return active_item if active_item
# Take latest and reify
latest_version = self.class.subsequent(self).last || self
return latest_version.reify
end
def current_item
return nil if event == 'destroy'
if self.next
self.next.reify
else
# Use active item as it should exist
self.item
end
end
def previous_item
case event
when "create"
nil
when "update"
current_item.previous_version
when "destroy"
reify
end
end
def versions
active_item.versions
end
end
end
Fix .active_item and .versions to not include changes for other items.
module RevertiblePaperTrail
module Version
extend ActiveSupport::Concern
def revert
case event
when "create"
# Do nothing if item already destroyed again
return unless self.item
self.item.destroy
when "update"
self.reify.save
when "destroy"
self.reify.save
end
end
def active_item
# Take current item or reify latest version
item || versions.last.reify
end
def current_item
return nil if event == 'destroy'
if self.next
self.next.reify
else
# Use active item as it should exist
self.item
end
end
def previous_item
case event
when "create"
nil
when "update"
current_item.previous_version
when "destroy"
reify
end
end
def versions
sibling_versions
end
end
end
|
# -*- coding: utf-8 -*-
require_relative 'phrase_cluster'
class Rgraphum::Graph
# pickup start vertices
# it mean pick vertices having no in degree
def start_root_vertices
vertices.select do |vertex|
vertex.inE.empty?
end
end
# pickup end vertices
# it mean pick vertices having no out degree
def end_root_vertices
vertices.select do |vertex|
vertex.outE.empty?
end
end
end
class Rgraphum::Analyzer::MemeTracker
attr_accessor :distance_max_limit
attr_accessor :graph
attr_accessor :clusters
def initialize(graph=Rgraphum::Graph.new)
@distance_max_limit = 5
self.graph = graph
end
def edit_distance(words_a, words_b, limit=@distance_max_limit)
a = words_a.dup
b = words_b.dup
return nil if (a - b | b - a).size > (limit * 2)
d = find_shift_distance(a, b)
end
def find_shift_distance(words_a, words_b, depth=0)
return nil if depth > @distance_max_limit
return words_b.size if words_a.empty?
return words_a.size if words_b.empty?
shifted_words_a = words_a[1..-1]
shifted_words_b = words_b[1..-1]
if words_a[0] == words_b[0]
return find_shift_distance(shifted_words_a, shifted_words_b, depth)
else
depth += 1
distance = 1
distance_a = find_shift_distance(words_a, shifted_words_b, depth)
distance_b = find_shift_distance(shifted_words_a, words_b, depth)
distance_c = find_shift_distance(shifted_words_a, shifted_words_b, depth)
if delta_distance = [distance_a, distance_b, distance_c].compact.min
return distance += delta_distance
else
return nil
end
end
end
###############################################################
def phrase_clusters
new_graph = @graph.dup
graph_start_root_vertices = start_root_vertices(new_graph)
graph_end_root_vertices = end_root_vertices(new_graph)
end_root_vertex_path_hashs, end_root_vertex_path_hash_keys_array = [], []
graph_start_root_vertices.each do |graph_start_root_vertex|
end_root_vertex_path_hash = build_end_root_vertex_path_hash(graph_start_root_vertex)
end_root_vertex_path_hashs << end_root_vertex_path_hash
end_root_vertex_path_hash_keys_array << end_root_vertex_path_hash.paths.map { |path| path.end_vertex }
end
p "##"
end_root_vertex_path_hashs.each {|cluster| cluster.paths.each{ |path| p path } }
p "##"
p end_root_vertex_path_hash_keys_array
end_root_vertex_path_hash_kyes_array = vertex_id_map(end_root_vertex_path_hash_keys_array)
p end_root_vertex_path_hash_kyes_array
p "##"
# sets {end_path_key => start_root_vertex}
sets = {}
end_root_vertex_path_hashs.each_with_index do |end_root_vertex_path_hash, i|
end_root_vertex_path_hash_keys_array.each do |end_path_keys|
unless (end_root_vertex_path_hash.paths.map { |path| path.end_vertex } & end_path_keys).empty?
sets[end_path_keys] ||= []
sets[end_path_keys] << graph_start_root_vertices[i]
break
end
end
end
sets = sets.map{ |end_path_keys, end_path_start_root_vertices|
[end_path_start_root_vertices, end_path_keys]
}
clusters = []
sets.each do |end_path_start_root_vertices, end_path_keys|
end_path_start_root_vertices_pt = end_path_start_root_vertices.permutation
end_path_keys_pt = end_path_keys.repeated_permutation(end_path_start_root_vertices.size)
communities_set = []
end_path_start_root_vertices_pt.each_with_index do |end_path_start_root_vertices_p, i|
end_path_keys_pt.each_with_index do |end_path_keys_p, j|
communities_set << make_communities(end_path_start_root_vertices_p, end_path_keys_p)
end
end
sigma_in_sizes = communities_set.map { |communities| sum_sigma_in(communities) }
max = sigma_in_sizes.max
index = sigma_in_sizes.index(max)
clusters += communities_set[index]
end
clusters
end
def vertex_id_map(cluster_keys)
# [cluster_keys.flatten.uniq]
return cluster_keys if cluster_keys.size < 2
id_map = cluster_keys.dup
cluster_keys.combination(2).each do |a, b|
unless (a & b).empty?
id_map.delete(a)
id_map.delete(b)
id_map << (a | b)
end
end
if id_map.size == cluster_keys.size
cluster_keys
else
vertex_id_map(id_map)
end
end
def sum_sigma_in(communities)
communities.inject(0) { |size, community|
size + community.sigma_in
}
end
def make_communities(graph_start_root_vertices, graph_end_root_vertices)
hashed_cluster = {}
used_vertices = {}
pair = [graph_start_root_vertices, graph_end_root_vertices].transpose
pair.each do |start_vertex, end_vertex|
cluster, used_vertices = find_cluster_with_used_vertices(start_vertex, end_vertex, used_vertices)
if cluster
if hashed_cluster[end_vertex.id]
hashed_cluster[end_vertex.id] = (hashed_cluster[end_vertex.id] | cluster)
else
hashed_cluster[end_vertex.id] = cluster
end
end
end
communities = hashed_cluster.map do |end_vertex_id, vertices|
Rgraphum::Community.new(vertices: vertices)
end
Rgraphum::Communities(communities)
end
# NOTE cluster を探しているっぽい
def find_cluster(start_vertex, end_vertex)
cluster, used_vertices = find_cluster_with_used_vertices(start_vertex, end_vertex, {})
cluster
end
def find_cluster_with_used_vertices(start_vertex, end_vertex, used_vertices)
# FIXME rename cluster
# used_vertices = { start_vertex => }
if used_vertex = used_vertices[start_vertex]
if used_vertex == end_vertex
return [[], used_vertices]
else
return [nil, used_vertices]
end
end
if start_vertex == end_vertex
used_vertices[start_vertex] = end_vertex
return [[start_vertex], used_vertices]
else
if start_vertex.out.empty?
return nil, used_vertices
end
end
cluster = nil
start_vertex.out.each do |vertex|
deep_cluster, used_vertices = find_cluster_with_used_vertices(vertex, end_vertex, used_vertices)
if deep_cluster
cluster ||= []
cluster += deep_cluster
end
end
if cluster
cluster << start_vertex
used_vertices[start_vertex] = end_vertex
end
[cluster, used_vertices]
end
# {end_root_vertex => [vertex,vertex],end_root_vertex => [vertex,vertex]}
def build_end_root_vertex_path_hash(start_vertex, cluster=nil)
cluster ||= Rgraphum::PhraseCluster.new
start_vertex.out.each do |vertex|
next if cluster.have_vertex_in_path?(vertex, start_vertex)
if !vertex.out.empty? # not reach end_root_vertex, vertex != end_root_vertex
# remove loop edge
found = cluster.have_vertex?(vertex) && cluster.have_vertex?(start_vertex)
next if found
cluster = build_end_root_vertex_path_hash(vertex, cluster) # recursive call
cluster.each_path do |path|
if path.include?(vertex) and !path.include?(start_vertex)
cluster.append_vertex path, start_vertex
end
end
else
if cluster.have_end_vertex?(vertex)
path = cluster.find_path(vertex.id)
cluster.append_vertex path, start_vertex
else
cluster.add_path Rgraphum::Path.new(vertex, [vertex, start_vertex])
end
end
end
cluster
end
def start_root_vertices(target_graph=@graph)
target_graph.vertices.find_all{ |vertex| vertex.in.empty? and !vertex.out.empty? }
end
def end_root_vertices(target_graph=@graph)
target_graph.vertices.find_all{ |vertex| !vertex.in.empty? and vertex.out.empty? }
end
##################
def find_path( options )
options = { :vertices=>Rgraphum::Vertices.new, :cut => true }.merge(options)
vertices = options[:vertices]
source_vertex = options[:source_vertex]
return vertices if vertices.include?(source_vertex)
return vertices << source_vertex if source_vertex.out.empty? # if end_root_vertex
path_vertices = source_vertex.out.inject(vertices) do |vertices, vertex|
size = vertices.size
vertices = find_path( {source_vertex:vertex,vertices:vertices} ) #
if vertices.size == size and options[:cut] == true
edge_to_delete = source_vertex.edges.where(target: vertex).first
source_vertex.edges.delete(edge_to_delete)
end
vertices
end
path_vertices << source_vertex
end
def make_path_graph(graph=@graph)
p "in make path graph" if Rgraphum.verbose?
graph = graph.dup
p "find srn" if Rgraphum.verbose?
graph_start_root_vertices = start_root_vertices(graph)
p "find path and to_graph" if Rgraphum.verbose?
graphes = graph_start_root_vertices.map { |vertex| Rgraphum::Vertices.new(find_path(source_vertex:vertex)).to_graph }
end
def cut_edges_with_srn(graph=@graph)
new_graph = Rgraphum::Graph.new
graphes = make_path_graph(graph)
new_graph.vertices = graphes.map { |g| g.vertices }.flatten
new_graph.edges = graphes.map { |g| g.edges }.flatten
new_graph.compact_with(:id)
end
def count_same_words_vertices(graph=@graph)
graph.vertices.combination(2).each do |vertex_a, vertex_b|
vertex_a.count = vertex_a.count.to_i + 1 if vertex_a.words == vertex_b.words
end
end
def make_edges(graph=@graph)
graph.vertices.sort! { |a, b| a.start.to_f <=> b.start.to_f }
graph.vertices.combination(2).each_with_index do |pair, i|
if pair[1].start and pair[0].end
next unless pair[0].within_term(pair[1])
end
distance = edit_distance(pair[0].words, pair[1].words)
next unless distance
graph.edges << { source: pair[0], target: pair[1], weight: (1.0 / (distance + 1)) }
end
graph.edges
end
def make_graph(phrase_array)
@graph = Rgraphum::Graph.new
@graph.vertices = phrase_array
self.count_same_words_vertices(@graph)
@graph.compact_with(:words, @graph)
self.make_edges(@graph)
@graph
end
end
remove print
# -*- coding: utf-8 -*-
require_relative 'phrase_cluster'
class Rgraphum::Graph
# pickup start vertices
# it mean pick vertices having no in degree
def start_root_vertices
vertices.select do |vertex|
vertex.inE.empty?
end
end
# pickup end vertices
# it mean pick vertices having no out degree
def end_root_vertices
vertices.select do |vertex|
vertex.outE.empty?
end
end
end
class Rgraphum::Analyzer::MemeTracker
attr_accessor :distance_max_limit
attr_accessor :graph
attr_accessor :clusters
def initialize(graph=Rgraphum::Graph.new)
@distance_max_limit = 5
self.graph = graph
end
def edit_distance(words_a, words_b, limit=@distance_max_limit)
a = words_a.dup
b = words_b.dup
return nil if (a - b | b - a).size > (limit * 2)
d = find_shift_distance(a, b)
end
def find_shift_distance(words_a, words_b, depth=0)
return nil if depth > @distance_max_limit
return words_b.size if words_a.empty?
return words_a.size if words_b.empty?
shifted_words_a = words_a[1..-1]
shifted_words_b = words_b[1..-1]
if words_a[0] == words_b[0]
return find_shift_distance(shifted_words_a, shifted_words_b, depth)
else
depth += 1
distance = 1
distance_a = find_shift_distance(words_a, shifted_words_b, depth)
distance_b = find_shift_distance(shifted_words_a, words_b, depth)
distance_c = find_shift_distance(shifted_words_a, shifted_words_b, depth)
if delta_distance = [distance_a, distance_b, distance_c].compact.min
return distance += delta_distance
else
return nil
end
end
end
###############################################################
def phrase_clusters
new_graph = @graph.dup
graph_start_root_vertices = start_root_vertices(new_graph)
graph_end_root_vertices = end_root_vertices(new_graph)
end_root_vertex_path_hashs, end_root_vertex_path_hash_keys_array = [], []
graph_start_root_vertices.each do |graph_start_root_vertex|
end_root_vertex_path_hash = build_end_root_vertex_path_hash(graph_start_root_vertex)
end_root_vertex_path_hashs << end_root_vertex_path_hash
end_root_vertex_path_hash_keys_array << end_root_vertex_path_hash.paths.map { |path| path.end_vertex }
end
end_root_vertex_path_hash_kyes_array = vertex_id_map(end_root_vertex_path_hash_keys_array)
# sets {end_path_key => start_root_vertex}
sets = {}
end_root_vertex_path_hashs.each_with_index do |end_root_vertex_path_hash, i|
end_root_vertex_path_hash_keys_array.each do |end_path_keys|
unless (end_root_vertex_path_hash.paths.map { |path| path.end_vertex } & end_path_keys).empty?
sets[end_path_keys] ||= []
sets[end_path_keys] << graph_start_root_vertices[i]
break
end
end
end
sets = sets.map{ |end_path_keys, end_path_start_root_vertices|
[end_path_start_root_vertices, end_path_keys]
}
clusters = []
sets.each do |end_path_start_root_vertices, end_path_keys|
end_path_start_root_vertices_pt = end_path_start_root_vertices.permutation
end_path_keys_pt = end_path_keys.repeated_permutation(end_path_start_root_vertices.size)
communities_set = []
end_path_start_root_vertices_pt.each_with_index do |end_path_start_root_vertices_p, i|
end_path_keys_pt.each_with_index do |end_path_keys_p, j|
communities_set << make_communities(end_path_start_root_vertices_p, end_path_keys_p)
end
end
sigma_in_sizes = communities_set.map { |communities| sum_sigma_in(communities) }
max = sigma_in_sizes.max
index = sigma_in_sizes.index(max)
clusters += communities_set[index]
end
clusters
end
def vertex_id_map(cluster_keys)
# [cluster_keys.flatten.uniq]
return cluster_keys if cluster_keys.size < 2
id_map = cluster_keys.dup
cluster_keys.combination(2).each do |a, b|
unless (a & b).empty?
id_map.delete(a)
id_map.delete(b)
id_map << (a | b)
end
end
if id_map.size == cluster_keys.size
cluster_keys
else
vertex_id_map(id_map)
end
end
def sum_sigma_in(communities)
communities.inject(0) { |size, community|
size + community.sigma_in
}
end
def make_communities(graph_start_root_vertices, graph_end_root_vertices)
hashed_cluster = {}
used_vertices = {}
pair = [graph_start_root_vertices, graph_end_root_vertices].transpose
pair.each do |start_vertex, end_vertex|
cluster, used_vertices = find_cluster_with_used_vertices(start_vertex, end_vertex, used_vertices)
if cluster
if hashed_cluster[end_vertex.id]
hashed_cluster[end_vertex.id] = (hashed_cluster[end_vertex.id] | cluster)
else
hashed_cluster[end_vertex.id] = cluster
end
end
end
communities = hashed_cluster.map do |end_vertex_id, vertices|
Rgraphum::Community.new(vertices: vertices)
end
Rgraphum::Communities(communities)
end
# NOTE cluster を探しているっぽい
def find_cluster(start_vertex, end_vertex)
cluster, used_vertices = find_cluster_with_used_vertices(start_vertex, end_vertex, {})
cluster
end
def find_cluster_with_used_vertices(start_vertex, end_vertex, used_vertices)
# FIXME rename cluster
# used_vertices = { start_vertex => }
if used_vertex = used_vertices[start_vertex]
if used_vertex == end_vertex
return [[], used_vertices]
else
return [nil, used_vertices]
end
end
if start_vertex == end_vertex
used_vertices[start_vertex] = end_vertex
return [[start_vertex], used_vertices]
else
if start_vertex.out.empty?
return nil, used_vertices
end
end
cluster = nil
start_vertex.out.each do |vertex|
deep_cluster, used_vertices = find_cluster_with_used_vertices(vertex, end_vertex, used_vertices)
if deep_cluster
cluster ||= []
cluster += deep_cluster
end
end
if cluster
cluster << start_vertex
used_vertices[start_vertex] = end_vertex
end
[cluster, used_vertices]
end
# {end_root_vertex => [vertex,vertex],end_root_vertex => [vertex,vertex]}
def build_end_root_vertex_path_hash(start_vertex, cluster=nil)
cluster ||= Rgraphum::PhraseCluster.new
start_vertex.out.each do |vertex|
next if cluster.have_vertex_in_path?(vertex, start_vertex)
if !vertex.out.empty? # not reach end_root_vertex, vertex != end_root_vertex
# remove loop edge
found = cluster.have_vertex?(vertex) && cluster.have_vertex?(start_vertex)
next if found
cluster = build_end_root_vertex_path_hash(vertex, cluster) # recursive call
cluster.each_path do |path|
if path.include?(vertex) and !path.include?(start_vertex)
cluster.append_vertex path, start_vertex
end
end
else
if cluster.have_end_vertex?(vertex)
path = cluster.find_path(vertex.id)
cluster.append_vertex path, start_vertex
else
cluster.add_path Rgraphum::Path.new(vertex, [vertex, start_vertex])
end
end
end
cluster
end
def start_root_vertices(target_graph=@graph)
target_graph.vertices.find_all{ |vertex| vertex.in.empty? and !vertex.out.empty? }
end
def end_root_vertices(target_graph=@graph)
target_graph.vertices.find_all{ |vertex| !vertex.in.empty? and vertex.out.empty? }
end
##################
def find_path( options )
options = { :vertices=>Rgraphum::Vertices.new, :cut => true }.merge(options)
vertices = options[:vertices]
source_vertex = options[:source_vertex]
return vertices if vertices.include?(source_vertex)
return vertices << source_vertex if source_vertex.out.empty? # if end_root_vertex
path_vertices = source_vertex.out.inject(vertices) do |vertices, vertex|
size = vertices.size
vertices = find_path( {source_vertex:vertex,vertices:vertices} ) #
if vertices.size == size and options[:cut] == true
edge_to_delete = source_vertex.edges.where(target: vertex).first
source_vertex.edges.delete(edge_to_delete)
end
vertices
end
path_vertices << source_vertex
end
def make_path_graph(graph=@graph)
p "in make path graph" if Rgraphum.verbose?
graph = graph.dup
p "find srn" if Rgraphum.verbose?
graph_start_root_vertices = start_root_vertices(graph)
p "find path and to_graph" if Rgraphum.verbose?
graphes = graph_start_root_vertices.map { |vertex| Rgraphum::Vertices.new(find_path(source_vertex:vertex)).to_graph }
end
def cut_edges_with_srn(graph=@graph)
new_graph = Rgraphum::Graph.new
graphes = make_path_graph(graph)
new_graph.vertices = graphes.map { |g| g.vertices }.flatten
new_graph.edges = graphes.map { |g| g.edges }.flatten
new_graph.compact_with(:id)
end
def count_same_words_vertices(graph=@graph)
graph.vertices.combination(2).each do |vertex_a, vertex_b|
vertex_a.count = vertex_a.count.to_i + 1 if vertex_a.words == vertex_b.words
end
end
def make_edges(graph=@graph)
graph.vertices.sort! { |a, b| a.start.to_f <=> b.start.to_f }
graph.vertices.combination(2).each_with_index do |pair, i|
if pair[1].start and pair[0].end
next unless pair[0].within_term(pair[1])
end
distance = edit_distance(pair[0].words, pair[1].words)
next unless distance
graph.edges << { source: pair[0], target: pair[1], weight: (1.0 / (distance + 1)) }
end
graph.edges
end
def make_graph(phrase_array)
@graph = Rgraphum::Graph.new
@graph.vertices = phrase_array
self.count_same_words_vertices(@graph)
@graph.compact_with(:words, @graph)
self.make_edges(@graph)
@graph
end
end
|
require 'active_support'
require 'active_support/core_ext/object/to_param'
require 'active_model'
module RSpec::ActiveModel::Mocks
class IllegalDataAccessException < StandardError; end
module Mocks
module ActiveModelInstanceMethods
# Stubs `persisted?` to return false and `id` to return nil
# @return self
def as_new_record
RSpec::Mocks.allow_message(self, :persisted?).and_return(false)
RSpec::Mocks.allow_message(self, :id).and_return(nil)
self
end
# Returns true by default. Override with a stub.
def persisted?
true
end
# Returns false for names matching <tt>/_before_type_cast$/</tt>,
# otherwise delegates to super.
def respond_to?(message, include_private=false)
message.to_s =~ /_before_type_cast$/ ? false : super
end
end
# Starting with Rails 4.1, ActiveRecord associations are inversible
# by default. This class represents an association from the mocked
# model's perspective.
#
# @private
class Association
attr_accessor :target, :inversed
def initialize(association_name)
@association_name = association_name
end
end
module ActiveRecordInstanceMethods
# Stubs `persisted?` to return `false` and `id` to return `nil`.
def destroy
RSpec::Mocks.allow_message(self, :persisted?).and_return(false)
RSpec::Mocks.allow_message(self, :id).and_return(nil)
end
# Transforms the key to a method and calls it.
def [](key)
send(key)
end
# Returns the opposite of `persisted?`
def new_record?
!persisted?
end
# Returns an object representing an association from the mocked
# model's perspective. For use by Rails internally only.
def association(association_name)
@associations ||= Hash.new { |h, k| h[k] = Association.new(k) }
@associations[association_name]
end
end
# Creates a test double representing `string_or_model_class` with common
# ActiveModel methods stubbed out. Additional methods may be easily
# stubbed (via add_stubs) if `stubs` is passed. This is most useful for
# impersonating models that don't exist yet.
#
# ActiveModel methods, plus <tt>new_record?</tt>, are
# stubbed out implicitly. <tt>new_record?</tt> returns the inverse of
# <tt>persisted?</tt>, and is present only for compatibility with
# extension frameworks that have yet to update themselves to the
# ActiveModel API (which declares <tt>persisted?</tt>, not
# <tt>new_record?</tt>).
#
# `string_or_model_class` can be any of:
#
# * A String representing a Class that does not exist
# * A String representing a Class that extends ActiveModel::Naming
# * A Class that extends ActiveModel::Naming
def mock_model(string_or_model_class, stubs = {})
if String === string_or_model_class
if Object.const_defined?(string_or_model_class)
model_class = Object.const_get(string_or_model_class)
else
model_class = Object.const_set(string_or_model_class, Class.new do
extend ::ActiveModel::Naming
def self.primary_key; :id; end
end)
end
else
model_class = string_or_model_class
end
unless model_class.kind_of? ::ActiveModel::Naming
raise ArgumentError.new <<-EOM
The mock_model method can only accept as its first argument:
* A String representing a Class that does not exist
* A String representing a Class that extends ActiveModel::Naming
* A Class that extends ActiveModel::Naming
It received #{model_class.inspect}
EOM
end
stubs = {:id => next_id}.merge(stubs)
stubs = {:persisted? => !!stubs[:id],
:destroyed? => false,
:marked_for_destruction? => false,
:valid? => true,
:blank? => false}.merge(stubs)
double("#{model_class.name}_#{stubs[:id]}", stubs).tap do |m|
msingleton = class << m; self; end
msingleton.class_eval do
include ActiveModelInstanceMethods
include ActiveRecordInstanceMethods if defined?(ActiveRecord)
include ActiveModel::Conversion
include ActiveModel::Validations
end
if defined?(ActiveRecord)
[:save, :update_attributes, :update].each do |key|
if stubs[key] == false
RSpec::Mocks.allow_message(m.errors, :empty?).and_return(false)
end
end
end
msingleton.__send__(:define_method, :is_a?) do |other|
model_class.ancestors.include?(other)
end unless stubs.has_key?(:is_a?)
msingleton.__send__(:define_method, :kind_of?) do |other|
model_class.ancestors.include?(other)
end unless stubs.has_key?(:kind_of?)
msingleton.__send__(:define_method, :instance_of?) do |other|
other == model_class
end unless stubs.has_key?(:instance_of?)
msingleton.__send__(:define_method, :__model_class_has_column?) do |method_name|
model_class.respond_to?(:column_names) && model_class.column_names.include?(method_name.to_s)
end
msingleton.__send__(:define_method, :has_attribute?) do |attr_name|
__model_class_has_column?(attr_name)
end unless stubs.has_key?(:has_attribute?)
msingleton.__send__(:define_method, :respond_to?) do |method_name, *args|
include_private = args.first || false
__model_class_has_column?(method_name) ? true : super(method_name, include_private)
end unless stubs.has_key?(:respond_to?)
msingleton.__send__(:define_method, :method_missing) do |m, *a, &b|
respond_to?(m) ? null_object? ? self : nil : super(m, *a, &b)
end
msingleton.__send__(:define_method, :class) do
model_class
end unless stubs.has_key?(:class)
mock_param = to_param
msingleton.__send__(:define_method, :to_s) do
"#{model_class.name}_#{mock_param}"
end unless stubs.has_key?(:to_s)
yield m if block_given?
end
end
module ActiveModelStubExtensions
# Stubs `persisted` to return false and `id` to return nil
def as_new_record
RSpec::Mocks.allow_message(self, :persisted?).and_return(false)
RSpec::Mocks.allow_message(self, :id).and_return(nil)
self
end
# Returns `true` by default. Override with a stub.
def persisted?
true
end
end
module ActiveRecordStubExtensions
# Stubs `id` (or other primary key method) to return nil
def as_new_record
self.__send__("#{self.class.primary_key}=", nil)
super
end
# Returns the opposite of `persisted?`.
def new_record?
!persisted?
end
# Raises an IllegalDataAccessException (stubbed models are not allowed to access the database)
# @raises IllegalDataAccessException
def connection
raise RSpec::ActiveModel::Mocks::IllegalDataAccessException.new("stubbed models are not allowed to access the database")
end
end
# Creates an instance of `Model` with `to_param` stubbed using a
# generated value that is unique to each object. If `Model` is an
# `ActiveRecord` model, it is prohibited from accessing the database.
#
# For each key in `stubs`, if the model has a matching attribute
# (determined by `respond_to?`) it is simply assigned the submitted values.
# If the model does not have a matching attribute, the key/value pair is
# assigned as a stub return value using RSpec's mocking/stubbing
# framework.
#
# <tt>persisted?</tt> is overridden to return the result of !id.nil?
# This means that by default persisted? will return true. If you want
# the object to behave as a new record, sending it `as_new_record` will
# set the id to nil. You can also explicitly set :id => nil, in which
# case persisted? will return false, but using `as_new_record` makes the
# example a bit more descriptive.
#
# While you can use stub_model in any example (model, view, controller,
# helper), it is especially useful in view examples, which are
# inherently more state-based than interaction-based.
#
# @example
#
# stub_model(Person)
# stub_model(Person).as_new_record
# stub_model(Person, :to_param => 37)
# stub_model(Person) {|person| person.first_name = "David"}
def stub_model(model_class, stubs={})
model_class.new.tap do |m|
m.extend ActiveModelStubExtensions
if defined?(ActiveRecord) && model_class < ActiveRecord::Base
m.extend ActiveRecordStubExtensions
primary_key = model_class.primary_key.to_sym
stubs = {primary_key => next_id}.merge(stubs)
stubs = {:persisted? => !!stubs[primary_key]}.merge(stubs)
else
stubs = {:id => next_id}.merge(stubs)
stubs = {:persisted? => !!stubs[:id]}.merge(stubs)
end
stubs = {:blank? => false}.merge(stubs)
stubs.each do |message, return_value|
if m.respond_to?("#{message}=")
m.__send__("#{message}=", return_value)
else
RSpec::Mocks.allow_message(m, message).and_return(return_value)
end
end
yield m if block_given?
end
end
private
@@model_id = 1000
def next_id
@@model_id += 1
end
end
end
RSpec.configuration.include RSpec::ActiveModel::Mocks::Mocks
Fix mocking belongs_to associations in Rails 4.2+
require 'active_support'
require 'active_support/core_ext/object/to_param'
require 'active_model'
module RSpec::ActiveModel::Mocks
class IllegalDataAccessException < StandardError; end
module Mocks
module ActiveModelInstanceMethods
# Stubs `persisted?` to return false and `id` to return nil
# @return self
def as_new_record
RSpec::Mocks.allow_message(self, :persisted?).and_return(false)
RSpec::Mocks.allow_message(self, :id).and_return(nil)
self
end
# Returns true by default. Override with a stub.
def persisted?
true
end
# Returns false for names matching <tt>/_before_type_cast$/</tt>,
# otherwise delegates to super.
def respond_to?(message, include_private=false)
message.to_s =~ /_before_type_cast$/ ? false : super
end
end
# Starting with Rails 4.1, ActiveRecord associations are inversible
# by default. This class represents an association from the mocked
# model's perspective.
#
# @private
class Association
attr_accessor :target, :inversed
def initialize(association_name)
@association_name = association_name
end
end
module ActiveRecordInstanceMethods
# Stubs `persisted?` to return `false` and `id` to return `nil`.
def destroy
RSpec::Mocks.allow_message(self, :persisted?).and_return(false)
RSpec::Mocks.allow_message(self, :id).and_return(nil)
end
# Transforms the key to a method and calls it.
def [](key)
send(key)
end
# Rails>4.2 uses _read_attribute internally, as an optimized
# alternative to record['id']
alias_method :_read_attribute, :[]
# Returns the opposite of `persisted?`
def new_record?
!persisted?
end
# Returns an object representing an association from the mocked
# model's perspective. For use by Rails internally only.
def association(association_name)
@associations ||= Hash.new { |h, k| h[k] = Association.new(k) }
@associations[association_name]
end
end
# Creates a test double representing `string_or_model_class` with common
# ActiveModel methods stubbed out. Additional methods may be easily
# stubbed (via add_stubs) if `stubs` is passed. This is most useful for
# impersonating models that don't exist yet.
#
# ActiveModel methods, plus <tt>new_record?</tt>, are
# stubbed out implicitly. <tt>new_record?</tt> returns the inverse of
# <tt>persisted?</tt>, and is present only for compatibility with
# extension frameworks that have yet to update themselves to the
# ActiveModel API (which declares <tt>persisted?</tt>, not
# <tt>new_record?</tt>).
#
# `string_or_model_class` can be any of:
#
# * A String representing a Class that does not exist
# * A String representing a Class that extends ActiveModel::Naming
# * A Class that extends ActiveModel::Naming
def mock_model(string_or_model_class, stubs = {})
if String === string_or_model_class
if Object.const_defined?(string_or_model_class)
model_class = Object.const_get(string_or_model_class)
else
model_class = Object.const_set(string_or_model_class, Class.new do
extend ::ActiveModel::Naming
def self.primary_key; :id; end
end)
end
else
model_class = string_or_model_class
end
unless model_class.kind_of? ::ActiveModel::Naming
raise ArgumentError.new <<-EOM
The mock_model method can only accept as its first argument:
* A String representing a Class that does not exist
* A String representing a Class that extends ActiveModel::Naming
* A Class that extends ActiveModel::Naming
It received #{model_class.inspect}
EOM
end
stubs = {:id => next_id}.merge(stubs)
stubs = {:persisted? => !!stubs[:id],
:destroyed? => false,
:marked_for_destruction? => false,
:valid? => true,
:blank? => false}.merge(stubs)
double("#{model_class.name}_#{stubs[:id]}", stubs).tap do |m|
msingleton = class << m; self; end
msingleton.class_eval do
include ActiveModelInstanceMethods
include ActiveRecordInstanceMethods if defined?(ActiveRecord)
include ActiveModel::Conversion
include ActiveModel::Validations
end
if defined?(ActiveRecord)
[:save, :update_attributes, :update].each do |key|
if stubs[key] == false
RSpec::Mocks.allow_message(m.errors, :empty?).and_return(false)
end
end
end
msingleton.__send__(:define_method, :is_a?) do |other|
model_class.ancestors.include?(other)
end unless stubs.has_key?(:is_a?)
msingleton.__send__(:define_method, :kind_of?) do |other|
model_class.ancestors.include?(other)
end unless stubs.has_key?(:kind_of?)
msingleton.__send__(:define_method, :instance_of?) do |other|
other == model_class
end unless stubs.has_key?(:instance_of?)
msingleton.__send__(:define_method, :__model_class_has_column?) do |method_name|
model_class.respond_to?(:column_names) && model_class.column_names.include?(method_name.to_s)
end
msingleton.__send__(:define_method, :has_attribute?) do |attr_name|
__model_class_has_column?(attr_name)
end unless stubs.has_key?(:has_attribute?)
msingleton.__send__(:define_method, :respond_to?) do |method_name, *args|
include_private = args.first || false
__model_class_has_column?(method_name) ? true : super(method_name, include_private)
end unless stubs.has_key?(:respond_to?)
msingleton.__send__(:define_method, :method_missing) do |m, *a, &b|
respond_to?(m) ? null_object? ? self : nil : super(m, *a, &b)
end
msingleton.__send__(:define_method, :class) do
model_class
end unless stubs.has_key?(:class)
mock_param = to_param
msingleton.__send__(:define_method, :to_s) do
"#{model_class.name}_#{mock_param}"
end unless stubs.has_key?(:to_s)
yield m if block_given?
end
end
module ActiveModelStubExtensions
# Stubs `persisted` to return false and `id` to return nil
def as_new_record
RSpec::Mocks.allow_message(self, :persisted?).and_return(false)
RSpec::Mocks.allow_message(self, :id).and_return(nil)
self
end
# Returns `true` by default. Override with a stub.
def persisted?
true
end
end
module ActiveRecordStubExtensions
# Stubs `id` (or other primary key method) to return nil
def as_new_record
self.__send__("#{self.class.primary_key}=", nil)
super
end
# Returns the opposite of `persisted?`.
def new_record?
!persisted?
end
# Raises an IllegalDataAccessException (stubbed models are not allowed to access the database)
# @raises IllegalDataAccessException
def connection
raise RSpec::ActiveModel::Mocks::IllegalDataAccessException.new("stubbed models are not allowed to access the database")
end
end
# Creates an instance of `Model` with `to_param` stubbed using a
# generated value that is unique to each object. If `Model` is an
# `ActiveRecord` model, it is prohibited from accessing the database.
#
# For each key in `stubs`, if the model has a matching attribute
# (determined by `respond_to?`) it is simply assigned the submitted values.
# If the model does not have a matching attribute, the key/value pair is
# assigned as a stub return value using RSpec's mocking/stubbing
# framework.
#
# <tt>persisted?</tt> is overridden to return the result of !id.nil?
# This means that by default persisted? will return true. If you want
# the object to behave as a new record, sending it `as_new_record` will
# set the id to nil. You can also explicitly set :id => nil, in which
# case persisted? will return false, but using `as_new_record` makes the
# example a bit more descriptive.
#
# While you can use stub_model in any example (model, view, controller,
# helper), it is especially useful in view examples, which are
# inherently more state-based than interaction-based.
#
# @example
#
# stub_model(Person)
# stub_model(Person).as_new_record
# stub_model(Person, :to_param => 37)
# stub_model(Person) {|person| person.first_name = "David"}
def stub_model(model_class, stubs={})
model_class.new.tap do |m|
m.extend ActiveModelStubExtensions
if defined?(ActiveRecord) && model_class < ActiveRecord::Base
m.extend ActiveRecordStubExtensions
primary_key = model_class.primary_key.to_sym
stubs = {primary_key => next_id}.merge(stubs)
stubs = {:persisted? => !!stubs[primary_key]}.merge(stubs)
else
stubs = {:id => next_id}.merge(stubs)
stubs = {:persisted? => !!stubs[:id]}.merge(stubs)
end
stubs = {:blank? => false}.merge(stubs)
stubs.each do |message, return_value|
if m.respond_to?("#{message}=")
m.__send__("#{message}=", return_value)
else
RSpec::Mocks.allow_message(m, message).and_return(return_value)
end
end
yield m if block_given?
end
end
private
@@model_id = 1000
def next_id
@@model_id += 1
end
end
end
RSpec.configuration.include RSpec::ActiveModel::Mocks::Mocks
|
require "ruby_git_hooks"
class CopyrightCheckHook < RubyGitHooks::Hook
COPYRIGHT_REGEXP = /Copyright\s+\(C\)\s*(?<pre_year>.*)-?(?<cur_year>\d{4})\s+(?<company>.+)\s+all rights reserved\.?/i
# Only check files with known checkable extensions
EXTENSIONS = [
"c", "cpp", "cc", "cp",
"h", "hp", "hpp",
"m", "mm",
"java",
"bat",
"sh",
"ps1",
"rb",
]
OPTIONS = [ "domain", "from", "subject", "via", "via_options", "intro",
"no_send", "company_check" ]
Hook = RubyGitHooks::Hook
def initialize(options = {})
bad_options = options.keys - OPTIONS
raise "CopyrightCheckHook created with unrecognized options: " +
"#{bad_options.inspect}!" if bad_options.size > 0
@options = options
@options["domain"] ||= "mydomain.com"
@options["from"] ||= "Copyright Cop <noreply@#{@options["domain"]}>"
@options["subject"] ||= "Copyright Your Files, Please!"
@options["via"] ||= "sendmail"
@options["via_options"] ||= {}
end
# TODO: use Regexp#scan instead of just the first match
def check
no_notice = []
outdated_notice = []
outdated_company = []
cur_year = Time.now.strftime("%Y")
files_changed.each do |filename|
extension = (filename.split(".") || [])[-1]
next unless EXTENSIONS.include?(extension)
if file_contents[filename] =~ COPYRIGHT_REGEXP
parsed_cur_year = $~["cur_year"]
parsed_company = $~["company"]
unless parsed_cur_year == cur_year
outdated_notice << filename
end
# If there is a "company_check" option, either a string
# or regexp, make sure that the detected company name
# matches it.
if @options["company_check"] &&
!(parsed_company[@options["company_check"]])
outdated_company << filename
end
else
no_notice << filename
end
end
bad_num = no_notice.size + outdated_notice.size + outdated_company.size
return true if bad_num < 1
desc = build_description(no_notice, outdated_notice, outdated_company)
recipients = {}
self.commits.each do |commit|
author = Hook.shell!("git log -n 1 --pretty=format:'%aE %aN' #{commit}")
email, name = author.chomp.split(" ", 2)
recipients[name] = email
end
unless @options["no_send"] || @options["via"] == "no_send"
require "pony" # wait until we need it
# NOTE: Pony breaks on Windows so don't use this option in Windows.
recipients.each do |name, email|
ret = Pony.mail :to => email,
:from => @options["from"],
:subject => @options["subject"],
:body => desc,
:via => @options["via"],
:via_options => @options["via_options"]
end
end
puts "Warnings for commit:\n#{desc}"
# Block commit if installed as a pre-commit or pre-receive hook
false
end
protected
#
# Return an appropriate email based on the set of files with
# problems. If you need a different format, please inherit from
# CopyrightCheckHook and override this method.
#
def build_description(no_notice, outdated_notice, outdated_company)
bad_files = no_notice | outdated_notice | outdated_company
description = @options["intro"] || ""
description.concat <<DESCRIPTION
In your commit(s): #{self.commits.join(" ")}
You have outdated, inaccurate or missing copyright notices.
Specifically:
=============
DESCRIPTION
if outdated_notice.size > 0
description.concat <<DESCRIPTION
The following files do not list #{cur_year} as the copyright year:
#{outdated_notice.join("\n ")}
-----
DESCRIPTION
end
if outdated_company.size > 0
description.concat <<DESCRIPTION
The following files do not properly list your company as the holder of copyright:
#{outdated_company.join("\n ")}
DESCRIPTION
end
if no_notice.size > 0
description.concat <<DESCRIPTION
The following files have no notice or a notice I didn't recognize:
#{no_notice.join("\n ")}
DESCRIPTION
description.concat <<DESCRIPTION
All files with problems:
#{bad_files.join("\n ")}
DESCRIPTION
end
description
end
end
Change default "via" option to "no_send" until mail works better. VAL-174
require "ruby_git_hooks"
class CopyrightCheckHook < RubyGitHooks::Hook
COPYRIGHT_REGEXP = /Copyright\s+\(C\)\s*(?<pre_year>.*)-?(?<cur_year>\d{4})\s+(?<company>.+)\s+all rights reserved\.?/i
# Only check files with known checkable extensions
EXTENSIONS = [
"c", "cpp", "cc", "cp",
"h", "hp", "hpp",
"m", "mm",
"java",
"bat",
"sh",
"ps1",
"rb",
]
OPTIONS = [ "domain", "from", "subject", "via", "via_options", "intro",
"no_send", "company_check" ]
Hook = RubyGitHooks::Hook
def initialize(options = {})
bad_options = options.keys - OPTIONS
raise "CopyrightCheckHook created with unrecognized options: " +
"#{bad_options.inspect}!" if bad_options.size > 0
@options = options
@options["domain"] ||= "mydomain.com"
@options["from"] ||= "Copyright Cop <noreply@#{@options["domain"]}>"
@options["subject"] ||= "Copyright Your Files, Please!"
@options["via"] ||= "no_send"
@options["via_options"] ||= {}
end
# TODO: use Regexp#scan instead of just the first match
def check
no_notice = []
outdated_notice = []
outdated_company = []
cur_year = Time.now.strftime("%Y")
files_changed.each do |filename|
extension = (filename.split(".") || [])[-1]
next unless EXTENSIONS.include?(extension)
if file_contents[filename] =~ COPYRIGHT_REGEXP
parsed_cur_year = $~["cur_year"]
parsed_company = $~["company"]
unless parsed_cur_year == cur_year
outdated_notice << filename
end
# If there is a "company_check" option, either a string
# or regexp, make sure that the detected company name
# matches it.
if @options["company_check"] &&
!(parsed_company[@options["company_check"]])
outdated_company << filename
end
else
no_notice << filename
end
end
bad_num = no_notice.size + outdated_notice.size + outdated_company.size
return true if bad_num < 1
desc = build_description(no_notice, outdated_notice, outdated_company)
recipients = {}
self.commits.each do |commit|
author = Hook.shell!("git log -n 1 --pretty=format:\"%aE %aN\" #{commit}")
email, name = author.chomp.split(" ", 2)
recipients[name] = email
end
unless @options["no_send"] || @options["via"] == "no_send"
require "pony" # wait until we need it
# NOTE: Pony breaks on Windows so don't use this option in Windows.
recipients.each do |name, email|
ret = Pony.mail :to => email,
:from => @options["from"],
:subject => @options["subject"],
:body => desc,
:via => @options["via"],
:via_options => @options["via_options"]
end
end
puts "Warnings for commit:\n#{desc}"
# Block commit if installed as a pre-commit or pre-receive hook
false
end
protected
#
# Return an appropriate email based on the set of files with
# problems. If you need a different format, please inherit from
# CopyrightCheckHook and override this method.
#
def build_description(no_notice, outdated_notice, outdated_company)
bad_files = no_notice | outdated_notice | outdated_company
description = @options["intro"] || ""
description.concat <<DESCRIPTION
In your commit(s): #{self.commits.join(" ")}
You have outdated, inaccurate or missing copyright notices.
Specifically:
=============
DESCRIPTION
if outdated_notice.size > 0
description.concat <<DESCRIPTION
The following files do not list #{cur_year} as the copyright year:
#{outdated_notice.join("\n ")}
-----
DESCRIPTION
end
if outdated_company.size > 0
description.concat <<DESCRIPTION
The following files do not properly list your company as the holder of copyright:
#{outdated_company.join("\n ")}
DESCRIPTION
end
if no_notice.size > 0
description.concat <<DESCRIPTION
The following files have no notice or a notice I didn't recognize:
#{no_notice.join("\n ")}
DESCRIPTION
description.concat <<DESCRIPTION
All files with problems:
#{bad_files.join("\n ")}
DESCRIPTION
end
description
end
end
|
require 'test_helper'
class ActiveRecordDoctorTest < ActiveSupport::TestCase
test "truth" do
assert_kind_of Module, ActiveRecordDoctor
end
end
Remove active_record_doctor_test.rb
The file was unused.
|
# Generated by jeweler
# DO NOT EDIT THIS FILE
# Instead, edit Jeweler::Tasks in Rakefile, and run `rake gemspec`
# -*- encoding: utf-8 -*-
Gem::Specification.new do |s|
s.name = %q{bigindex-solr}
s.version = "1.3.0"
s.required_rubygems_version = Gem::Requirement.new(">= 0") if s.respond_to? :required_rubygems_version=
s.authors = ["openplaces.org"]
s.date = %q{2009-10-15}
s.description = %q{Solr is an Apache open-source search server based on Lucene Java. This project aims to integrate Solr into your Rails application to work with Bigindex [http://github.com/openplaces/bigindex]}
s.email = %q{bigrecord@openplaces.org}
s.extra_rdoc_files = [
"README.rdoc"
]
s.files = [
".document",
".gitignore",
"MIT-LICENSE",
"README.rdoc",
"Rakefile",
"VERSION",
"bigindex-solr.gemspec",
"examples/bigindex.yml",
"install.rb",
"lib/bigindex-solr.rb",
"lib/bigindex-solr/tasks.rb",
"lib/tasks/bigindex_solr.rake",
"schema.xml",
"solr/CHANGES.txt",
"solr/LICENSE.txt",
"solr/NOTICE.txt",
"solr/etc/jetty.xml",
"solr/etc/webdefault.xml",
"solr/lib/jetty-6.1.3.jar",
"solr/lib/jetty-util-6.1.3.jar",
"solr/lib/jsp-2.1/ant-1.6.5.jar",
"solr/lib/jsp-2.1/core-3.1.1.jar",
"solr/lib/jsp-2.1/jsp-2.1.jar",
"solr/lib/jsp-2.1/jsp-api-2.1.jar",
"solr/lib/servlet-api-2.5-6.1.3.jar",
"solr/solr/README.txt",
"solr/solr/bin/abc",
"solr/solr/bin/abo",
"solr/solr/bin/backup",
"solr/solr/bin/backupcleaner",
"solr/solr/bin/commit",
"solr/solr/bin/optimize",
"solr/solr/bin/readercycle",
"solr/solr/bin/rsyncd-disable",
"solr/solr/bin/rsyncd-enable",
"solr/solr/bin/rsyncd-start",
"solr/solr/bin/rsyncd-stop",
"solr/solr/bin/scripts-util",
"solr/solr/bin/snapcleaner",
"solr/solr/bin/snapinstaller",
"solr/solr/bin/snappuller",
"solr/solr/bin/snappuller-disable",
"solr/solr/bin/snappuller-enable",
"solr/solr/bin/snapshooter",
"solr/solr/conf/admin-extra.html",
"solr/solr/conf/elevate.xml",
"solr/solr/conf/protwords.txt",
"solr/solr/conf/schema.xml",
"solr/solr/conf/scripts.conf",
"solr/solr/conf/solrconfig.xml",
"solr/solr/conf/spellings.txt",
"solr/solr/conf/stopwords.txt",
"solr/solr/conf/synonyms.txt",
"solr/solr/conf/xslt/example.xsl",
"solr/solr/conf/xslt/example_atom.xsl",
"solr/solr/conf/xslt/example_rss.xsl",
"solr/solr/conf/xslt/luke.xsl",
"solr/start.jar",
"solr/webapps/solr.war",
"spec/bigindex-solr_spec.rb",
"spec/spec_helper.rb"
]
s.homepage = %q{http://bigrecord.org}
s.rdoc_options = ["--charset=UTF-8"]
s.require_paths = ["lib"]
s.rubygems_version = %q{1.3.5}
s.summary = %q{Bootstraps the Solr search server into your Rails application for use with Bigindex. Gem version is based on the Solr version packaged.}
s.test_files = [
"spec/bigindex-solr_spec.rb",
"spec/spec_helper.rb"
]
if s.respond_to? :specification_version then
current_version = Gem::Specification::CURRENT_SPECIFICATION_VERSION
s.specification_version = 3
if Gem::Version.new(Gem::RubyGemsVersion) >= Gem::Version.new('1.2.0') then
s.add_development_dependency(%q<rspec>, [">= 0"])
else
s.add_dependency(%q<rspec>, [">= 0"])
end
else
s.add_dependency(%q<rspec>, [">= 0"])
end
end
Updated Gemspec for version 1.3.0
# Generated by jeweler
# DO NOT EDIT THIS FILE
# Instead, edit Jeweler::Tasks in Rakefile, and run `rake gemspec`
# -*- encoding: utf-8 -*-
Gem::Specification.new do |s|
s.name = %q{bigindex-solr}
s.version = "1.3.0"
s.required_rubygems_version = Gem::Requirement.new(">= 0") if s.respond_to? :required_rubygems_version=
s.authors = ["openplaces.org"]
s.date = %q{2009-10-15}
s.description = %q{Solr is an Apache open-source search server based on Lucene Java. This project aims to integrate Solr into your Rails application to work with Bigindex [http://github.com/openplaces/bigindex]}
s.email = %q{bigrecord@openplaces.org}
s.extra_rdoc_files = [
"README.rdoc"
]
s.files = [
".gitignore",
"MIT-LICENSE",
"README.rdoc",
"Rakefile",
"VERSION",
"bigindex-solr.gemspec",
"examples/bigindex.yml",
"install.rb",
"lib/bigindex-solr.rb",
"lib/bigindex-solr/tasks.rb",
"lib/tasks/bigindex_solr.rake",
"schema.xml",
"solr/CHANGES.txt",
"solr/LICENSE.txt",
"solr/NOTICE.txt",
"solr/etc/jetty.xml",
"solr/etc/webdefault.xml",
"solr/lib/jetty-6.1.3.jar",
"solr/lib/jetty-util-6.1.3.jar",
"solr/lib/jsp-2.1/ant-1.6.5.jar",
"solr/lib/jsp-2.1/core-3.1.1.jar",
"solr/lib/jsp-2.1/jsp-2.1.jar",
"solr/lib/jsp-2.1/jsp-api-2.1.jar",
"solr/lib/servlet-api-2.5-6.1.3.jar",
"solr/solr/README.txt",
"solr/solr/bin/abc",
"solr/solr/bin/abo",
"solr/solr/bin/backup",
"solr/solr/bin/backupcleaner",
"solr/solr/bin/commit",
"solr/solr/bin/optimize",
"solr/solr/bin/readercycle",
"solr/solr/bin/rsyncd-disable",
"solr/solr/bin/rsyncd-enable",
"solr/solr/bin/rsyncd-start",
"solr/solr/bin/rsyncd-stop",
"solr/solr/bin/scripts-util",
"solr/solr/bin/snapcleaner",
"solr/solr/bin/snapinstaller",
"solr/solr/bin/snappuller",
"solr/solr/bin/snappuller-disable",
"solr/solr/bin/snappuller-enable",
"solr/solr/bin/snapshooter",
"solr/solr/conf/admin-extra.html",
"solr/solr/conf/elevate.xml",
"solr/solr/conf/protwords.txt",
"solr/solr/conf/schema.xml",
"solr/solr/conf/scripts.conf",
"solr/solr/conf/solrconfig.xml",
"solr/solr/conf/spellings.txt",
"solr/solr/conf/stopwords.txt",
"solr/solr/conf/synonyms.txt",
"solr/solr/conf/xslt/example.xsl",
"solr/solr/conf/xslt/example_atom.xsl",
"solr/solr/conf/xslt/example_rss.xsl",
"solr/solr/conf/xslt/luke.xsl",
"solr/start.jar",
"solr/webapps/solr.war",
"spec/bigindex-solr_spec.rb",
"spec/spec_helper.rb"
]
s.homepage = %q{http://bigrecord.org}
s.rdoc_options = ["--charset=UTF-8"]
s.require_paths = ["lib"]
s.rubygems_version = %q{1.3.5}
s.summary = %q{Bootstraps the Solr search server into your Rails application for use with Bigindex. Gem version is based on the Solr version packaged.}
s.test_files = [
"spec/bigindex-solr_spec.rb",
"spec/spec_helper.rb"
]
if s.respond_to? :specification_version then
current_version = Gem::Specification::CURRENT_SPECIFICATION_VERSION
s.specification_version = 3
if Gem::Version.new(Gem::RubyGemsVersion) >= Gem::Version.new('1.2.0') then
s.add_development_dependency(%q<rspec>, [">= 0"])
else
s.add_dependency(%q<rspec>, [">= 0"])
end
else
s.add_dependency(%q<rspec>, [">= 0"])
end
end
|
module Scanny
module Checks
module SSL
class VerifyCheck < Check
def pattern
[
pattern_ssl_verify_none,
pattern_ca_file
].join("|")
end
def check(node)
issue :high, warning_message, :cwe => [296, 297, 298, 299, 300, 599]
end
private
def warning_message
"Disable certificate verification can" +
"lead to connect to an unauthorized server"
end
# OpenSSL::SSL::VERIFY_NONE
def pattern_ssl_verify_none
<<-EOT
ScopedConstant<
name = :VERIFY_NONE,
parent = ScopedConstant<
name = :SSL,
parent = ConstantAccess<name = :OpenSSL>
>
>
EOT
end
# @ca_file
# ca_file
def pattern_ca_file
<<-EOT
AttributeAssignment<
arguments = ActualArguments<
array = [
NilLiteral
]
>
name = :ca_path | :ca_file,
>
EOT
end
end
end
end
end
AttributeAssignment use method name with '=' on tail
module Scanny
module Checks
module SSL
class VerifyCheck < Check
def pattern
[
pattern_ssl_verify_none,
pattern_ca_file
].join("|")
end
def check(node)
issue :high, warning_message, :cwe => [296, 297, 298, 299, 300, 599]
end
private
def warning_message
"Disable certificate verification can " +
"lead to connect to an unauthorized server"
end
# OpenSSL::SSL::VERIFY_NONE
def pattern_ssl_verify_none
<<-EOT
ScopedConstant<
name = :VERIFY_NONE,
parent = ScopedConstant<
name = :SSL,
parent = ConstantAccess<name = :OpenSSL>
>
>
EOT
end
# ssl_context.ca_file = nil
def pattern_ca_file
<<-EOT
AttributeAssignment<
arguments = ActualArguments<
array = [
NilLiteral
]
>,
name = :ca_path= | :ca_file=
>
EOT
end
end
end
end
end |
module Seek
module Data
class CompoundsExtraction
# compounds hash
def self.get_compounds_hash user=User.current_user
Rails.cache.fetch("#{DataFile.order("updated_at desc").first.cache_key}-#{user.try(:cache_key)}-compounds-hash-all") do
compounds_hash = {}
DataFile.all.each do |df|
compounds_hash.merge!(get_compounds_hash_per_file(df, user)) { |compound_id, attr1, attr2| Hash(attr1).merge(Hash(attr2)) }
end
compounds_hash
end
end
def self.get_compounds_hash_per_file(data_file, user=User.current_user)
Rails.cache.fetch("#{data_file.cache_key}-#{user.try(:cache_key)}-compounds-hash-per-file") do
compounds_hash = {}
if data_file.spreadsheet
begin
compound_id_sheets = data_file.spreadsheet.sheets.select { |sh| sh.actual_rows.sort_by(&:index)[0].actual_cells.detect { |cell| cell.value.match(/compound/i) } }
rescue NoMethodError, NameError => e
compound_id_sheets = nil
end
if !compound_id_sheets.blank?
compound_id_sheets.each do |sheet|
header_cells = sheet.actual_rows.sort_by(&:index)[0].actual_cells.reject { |cell| cell.value.empty? }
compound_attributes = []
header_hash = {}
header_cells.each do |head_cell|
header_hash[head_cell.column] = head_cell.value
end
compound_id_cell = header_cells.detect { |cell| cell.value.match(/compound/i) }
compound_id_column = compound_id_cell.column
# cell content df.spreadsheet.sheets.first.rows[1].cells[1].value
sheet.actual_rows.select { |row| row.index > 1 && Seek::Data::DataMatch.compound_name?(row.actual_cells.detect { |cell| cell.column == compound_id_column }.try(:value)) }.each do |row|
row_hash = {}
row.actual_cells.each do |cell|
attr_name = header_hash[cell.column]
attr_value = !data_file.can_download?(user) && cell.column != compound_id_column ? "hidden" : cell.value
row_hash[attr_name] = attr_value if !attr_name.blank?
end
compound_attributes << row_hash
end
# get hash
grouped_attributes_by_compound_id = compound_attributes.group_by { |attr| attr[compound_id_cell.value] }
grouped_attributes_by_compound_id.each do |id, attr|
standardized_compound_id = Seek::Data::DataMatch.standardize_compound_name(id)
compounds_hash[standardized_compound_id] = {"#{data_file.id}" => attr.first}#attr.first.select { |attr_name, attr_value| report_attributes?(attr_name) }
end
end
end
end
compounds_hash
end
end
def self.get_compound_id_smiles_hash user=User.current_user
Rails.cache.fetch("#{DataFile.order('updated_at desc').first.cache_key}-#{user.try(:cache_key)}-all-compound-id-smile-hash") do
id_smiles_hash = {}
DataFile.all.each do |df|
id_smiles_hash.merge!(get_compound_id_smiles_hash_per_file(df, user)) { |key, v1, v2| [v1, v2].detect { |v| !v.blank? && v != "hidden" } || v1 }
end
#sort by key
id_smiles_hash.sort_by { |k, v| k.to_s }.to_h
end
end
def self.get_compound_id_smiles_hash_per_file data_file, user=User.current_user
Rails.cache.fetch("#{data_file.cache_key}-#{user.try(:cache_key)}-compound-id-smile-hash") do
id_smiles_hash = {}
#temporiably only excels
if data_file.content_blob.is_extractable_spreadsheet?
xml = data_file.spreadsheet_xml
doc = LibXML::XML::Parser.string(xml).parse
doc.root.namespaces.default_prefix="ss"
#doc.find("//ss:sheet")
#if s["hidden"] == "false" && s["very_hidden"] == "false"
compound_id_cells = get_column_cells doc, "compound"
smiles_cells = get_column_cells doc, "smile"
compound_id_cells.each do |id_cell|
row_index = id_cell.attributes["row"]
smile = smiles_cells.detect { |cell| cell.attributes["row"] == row_index }.try(:content)
if id_cell && Seek::Data::DataMatch.compound_name?(id_cell.content) && !smile.blank?
standardized_compound_id = Seek::Data::DataMatch.standardize_compound_name(id_cell.content)
smile_or_hidden = data_file.can_download?(user) ? smile : "hidden"
#do not override if it already exists in the database
unless id_smiles_hash.key?(standardized_compound_id)
id_smiles_hash[standardized_compound_id] = smile_or_hidden
end
end
end
end
id_smiles_hash
end
end
def self.clear_cache
(User.all+[nil]).each do |user|
Rails.cache.delete("#{DataFile.order("updated_at desc").first.cache_key}-#{user.try(:cache_key)}-compounds-hash-all")
Rails.cache.delete("#{DataFile.order('updated_at desc').first.cache_key}-#{user.try(:cache_key)}-all-compound-id-smile-hash")
DataFile.all.each do |df|
Rails.cache.delete("#{df.cache_key}-#{user.try(:cache_key)}-compounds-hash-per-file")
Rails.cache.delete("#{df.cache_key}-#{user.try(:cache_key)}-compound-id-smile-hash")
end
end
end
private
def self.get_column_cells doc, column_name
head_cells = doc.find("//ss:sheet[@hidden='false' and @very_hidden='false']/ss:rows/ss:row/ss:cell").find_all { |cell| cell.content.gsub(/\s+/, " ").strip.match(/#{column_name}/i) }
body_cells = []
unless head_cells.blank?
head_cell = head_cells[0]
head_col = head_cell.attributes["column"]
body_cells = doc.find("//ss:sheet[@hidden='false' and @very_hidden='false']/ss:rows/ss:row/ss:cell[@column=#{head_col} and @row != 1]").find_all { |cell| !cell.content.blank? }
end
body_cells
end
# def self.get_column_cells_from_sheet s, column_name
# head_cells = s.find("./ss:rows/ss:row/ss:cell").find_all { |cell| cell.content.gsub(/\s+/, " ").strip.match(/#{column_name}/i) }
# body_cells = []
# unless head_cells.blank?
# head_cell = head_cells[0]
# head_col = head_cell.attributes["column"]
# body_cells = s.find("./ss:rows/ss:row/ss:cell[@column=#{head_col} and @row != 1]").find_all { |cell| !cell.content.blank? }
# end
# body_cells
# end
end
end
end
Additional changes for SMILES bug, by iterating on all sheets per file when reading compound/smiles columns
module Seek
module Data
class CompoundsExtraction
# compounds hash
def self.get_compounds_hash user=User.current_user
Rails.cache.fetch("#{DataFile.order("updated_at desc").first.cache_key}-#{user.try(:cache_key)}-compounds-hash-all") do
compounds_hash = {}
DataFile.all.each do |df|
compounds_hash.merge!(get_compounds_hash_per_file(df, user)) { |compound_id, attr1, attr2| Hash(attr1).merge(Hash(attr2)) }
end
compounds_hash
end
end
def self.get_compounds_hash_per_file(data_file, user=User.current_user)
Rails.cache.fetch("#{data_file.cache_key}-#{user.try(:cache_key)}-compounds-hash-per-file") do
compounds_hash = {}
if data_file.spreadsheet
begin
compound_id_sheets = data_file.spreadsheet.sheets.select { |sh| sh.actual_rows.sort_by(&:index)[0].actual_cells.detect { |cell| cell.value.match(/compound/i) } }
rescue NoMethodError, NameError => e
compound_id_sheets = nil
end
if !compound_id_sheets.blank?
compound_id_sheets.each do |sheet|
header_cells = sheet.actual_rows.sort_by(&:index)[0].actual_cells.reject { |cell| cell.value.empty? }
compound_attributes = []
header_hash = {}
header_cells.each do |head_cell|
header_hash[head_cell.column] = head_cell.value
end
compound_id_cell = header_cells.detect { |cell| cell.value.match(/compound/i) }
compound_id_column = compound_id_cell.column
# cell content df.spreadsheet.sheets.first.rows[1].cells[1].value
sheet.actual_rows.select { |row| row.index > 1 && Seek::Data::DataMatch.compound_name?(row.actual_cells.detect { |cell| cell.column == compound_id_column }.try(:value)) }.each do |row|
row_hash = {}
row.actual_cells.each do |cell|
attr_name = header_hash[cell.column]
attr_value = !data_file.can_download?(user) && cell.column != compound_id_column ? "hidden" : cell.value
row_hash[attr_name] = attr_value if !attr_name.blank?
end
compound_attributes << row_hash
end
# get hash
grouped_attributes_by_compound_id = compound_attributes.group_by { |attr| attr[compound_id_cell.value] }
grouped_attributes_by_compound_id.each do |id, attr|
standardized_compound_id = Seek::Data::DataMatch.standardize_compound_name(id)
compounds_hash[standardized_compound_id] = {"#{data_file.id}" => attr.first}#attr.first.select { |attr_name, attr_value| report_attributes?(attr_name) }
end
end
end
end
compounds_hash
end
end
def self.get_compound_id_smiles_hash user=User.current_user
Rails.cache.fetch("#{DataFile.order('updated_at desc').first.cache_key}-#{user.try(:cache_key)}-all-compound-id-smile-hash") do
id_smiles_hash = {}
DataFile.all.each do |df|
id_smiles_hash.merge!(get_compound_id_smiles_hash_per_file(df, user)) { |key, v1, v2| [v1, v2].detect { |v| !v.blank? && v != "hidden" } || v1 }
end
#sort by key
id_smiles_hash.sort_by { |k, v| k.to_s }.to_h
end
end
def self.get_compound_id_smiles_hash_per_file data_file, user=User.current_user
Rails.cache.fetch("#{data_file.cache_key}-#{user.try(:cache_key)}-compound-id-smile-hash") do
id_smiles_hash = {}
#temporiably only excels
if data_file.content_blob.is_extractable_spreadsheet?
xml = data_file.spreadsheet_xml
doc = LibXML::XML::Parser.string(xml).parse
doc.root.namespaces.default_prefix="ss"
doc.find("//ss:sheet").each do |s|
if s["hidden"] == "false" && s["very_hidden"] == "false"
compound_id_cells = get_column_cells_from_sheet s, "compound" #use s instead of doc + separate function
smiles_cells = get_column_cells_from_sheet s, "smile"
compound_id_cells.each do |id_cell|
row_index = id_cell.attributes["row"]
smile = smiles_cells.detect { |cell| cell.attributes["row"] == row_index }.try(:content)
if id_cell && Seek::Data::DataMatch.compound_name?(id_cell.content) && !smile.blank?
standardized_compound_id = Seek::Data::DataMatch.standardize_compound_name(id_cell.content)
smile_or_hidden = data_file.can_download?(user) ? smile : "hidden"
# do not override if it already exists in the database
unless id_smiles_hash.key?(standardized_compound_id)
id_smiles_hash[standardized_compound_id] = smile_or_hidden
end
end
end #compound cells
end #if hidden
end #of sheet
end #of extractable spreadsheet
id_smiles_hash
end #cache fetch do
end
def self.clear_cache
(User.all+[nil]).each do |user|
Rails.cache.delete("#{DataFile.order("updated_at desc").first.cache_key}-#{user.try(:cache_key)}-compounds-hash-all")
Rails.cache.delete("#{DataFile.order('updated_at desc').first.cache_key}-#{user.try(:cache_key)}-all-compound-id-smile-hash")
DataFile.all.each do |df|
Rails.cache.delete("#{df.cache_key}-#{user.try(:cache_key)}-compounds-hash-per-file")
Rails.cache.delete("#{df.cache_key}-#{user.try(:cache_key)}-compound-id-smile-hash")
end
end
end
private
# def self.get_column_cells doc, column_name
# head_cells = doc.find("//ss:sheet[@hidden='false' and @very_hidden='false']/ss:rows/ss:row/ss:cell").find_all { |cell| cell.content.gsub(/\s+/, " ").strip.match(/#{column_name}/i) }
# body_cells = []
# unless head_cells.blank?
# head_cell = head_cells[0]
# head_col = head_cell.attributes["column"]
# body_cells = doc.find("//ss:sheet[@hidden='false' and @very_hidden='false']/ss:rows/ss:row/ss:cell[@column=#{head_col} and @row != 1]").find_all { |cell| !cell.content.blank? }
# end
# body_cells
# end
def self.get_column_cells_from_sheet s, column_name
head_cells = s.find("./ss:rows/ss:row/ss:cell").find_all { |cell| cell.content.gsub(/\s+/, " ").strip.match(/#{column_name}/i) }
body_cells = []
unless head_cells.blank?
head_cell = head_cells[0]
head_col = head_cell.attributes["column"]
body_cells = s.find("./ss:rows/ss:row/ss:cell[@column=#{head_col} and @row != 1]").find_all { |cell| !cell.content.blank? }
end
body_cells
end
end
end
end |
module Seek
module Publishing
module IsaPublishing
=begin
def self.included(base)
base.before_filter :set_asset, :only=>[:isa_publishing_preview,:isa_publish]
end
def isa_publishing_preview
asset_type_name = @template.text_for_resource @asset
respond_to do |format|
format.html { render :template=>"assets/publishing/isa_publishing_preview",:locals=>{:asset_type_name=>asset_type_name} }
end
end
def isa_publish
if request.post?
items_for_publishing = resolve_publish_params params[:publish]
items_for_publishing = items_for_publishing.select{|i| !i.is_published?}
@notified_items = items_for_publishing.select{|i| !i.can_manage?}
publish_authorized_items = (items_for_publishing - @notified_items).select(&:publish_authorized?)
@published_items = publish_authorized_items.select(&:can_publish?)
@waiting_for_publish_items = publish_authorized_items - @published_items
if Seek::Config.email_enabled && !@notified_items.empty?
deliver_publishing_notifications @notified_items
end
@published_items.each do |item|
item.publish!
ResourcePublishLog.add_publish_log ResourcePublishLog::PUBLISHED, item
end
@waiting_for_publish_items.each do |item|
ResourcePublishLog.add_publish_log ResourcePublishLog::WAITING_FOR_APPROVAL, item
deliver_request_publish_approval item
end
respond_to do |format|
flash.now[:notice]="Publishing complete"
format.html { render :template=>"assets/publishing/isa_published" }
end
else
redirect_to @asset
end
end
def set_asset
@asset = self.controller_name.classify.constantize.find_by_id(params[:id])
end
private
def deliver_publishing_notifications items_for_notification
owners_items={}
items_for_notification.each do |item|
item.managers.each do |person|
owners_items[person]||=[]
owners_items[person] << item
end
end
owners_items.keys.each do |owner|
begin
Mailer.deliver_request_publishing User.current_user.person,owner,owners_items[owner],base_host
rescue Exception => e
Rails.logger.error("Error sending notification email to the owner #{owner.name} - #{e.message}")
end
end
end
=end
end
end
end
remove isa_publishing.rb
|
# -*- encoding: utf-8 -*-
module SendGrid4r::REST
#
# SendGrid Web API v3 EmailActivity
#
module EmailActivity
include Request
#
# SendGrid Web API v3 Event - AggregatedBy
#
module Event
BOUNCES = 'bounces'
CLICKS = 'clicks'
DEFERRED = 'deferred'
DELIVERED = 'delivered'
DROPS = 'drops'
GROUP_UNSUBSCRIBE = 'group_unsubscribe'
GROUP_RESUBSCRIBE = 'group_resubscribe'
OPENS = 'opens'
PROCESSED = 'processed'
PARSEAPI = 'parseapi'
SPAM_REPORTS = 'spam_reports'
UNSUBSCRIBE = 'unsubscribes'
end
Activity = Struct.new(
:email, :event, :created, :category, :smtp_id, :asm_group_id,
:msg_id, :ip, :url, :reason
)
def self.url
"#{BASE_URL}/email_activity"
end
def self.create_activities(resp)
return resp if resp.nil?
resp.map { |activity| EmailActivity.create_activity(activity) }
end
def self.create_activity(resp)
return resp if resp.nil?
Activity.new(
resp['email'],
resp['event'],
resp['created'].nil? ? nil : Time.at(resp['created']),
resp['category'],
resp['smtp_id'],
resp['asm_group_id'],
resp['msg_id'],
resp['ip'],
resp['url'],
resp['reason']
)
end
def get_email_activities(
email: nil, events: nil, exclude_events: nil, start_time: nil,
end_time: nil, &block
)
params = {}
params['email'] = email unless email.nil?
params['events'] = events unless events.nil?
params['exclude_events'] = exclude_events unless exclude_events.nil?
params['start_time'] = start_time.to_i unless start_time.nil?
params['end_time'] = end_time.to_i unless end_time.nil?
resp = get(@auth, EmailActivity.url, params, &block)
finish(resp, @raw_resp) { |r| EmailActivity.create_activities(r) }
end
end
end
Add unique_arguments to EmailActivity response
# -*- encoding: utf-8 -*-
module SendGrid4r::REST
#
# SendGrid Web API v3 EmailActivity
#
module EmailActivity
include Request
#
# SendGrid Web API v3 Event - AggregatedBy
#
module Event
BOUNCES = 'bounces'
CLICKS = 'clicks'
DEFERRED = 'deferred'
DELIVERED = 'delivered'
DROPS = 'drops'
GROUP_UNSUBSCRIBE = 'group_unsubscribe'
GROUP_RESUBSCRIBE = 'group_resubscribe'
OPENS = 'opens'
PROCESSED = 'processed'
PARSEAPI = 'parseapi'
SPAM_REPORTS = 'spam_reports'
UNSUBSCRIBE = 'unsubscribes'
end
Activity = Struct.new(
:email, :event, :created, :category, :smtp_id, :asm_group_id,
:msg_id, :ip, :url, :reason, :unique_arguments
)
def self.url
"#{BASE_URL}/email_activity"
end
def self.create_activities(resp)
return resp if resp.nil?
resp.map { |activity| EmailActivity.create_activity(activity) }
end
def self.create_activity(resp)
return resp if resp.nil?
Activity.new(
resp['email'],
resp['event'],
resp['created'].nil? ? nil : Time.at(resp['created']),
resp['category'],
resp['smtp_id'],
resp['asm_group_id'],
resp['msg_id'],
resp['ip'],
resp['url'],
resp['reason'],
resp['unique_arguments']
)
end
def get_email_activities(
email: nil, events: nil, exclude_events: nil, start_time: nil,
end_time: nil, &block
)
params = {}
params['email'] = email unless email.nil?
params['events'] = events unless events.nil?
params['exclude_events'] = exclude_events unless exclude_events.nil?
params['start_time'] = start_time.to_i unless start_time.nil?
params['end_time'] = end_time.to_i unless end_time.nil?
resp = get(@auth, EmailActivity.url, params, &block)
finish(resp, @raw_resp) { |r| EmailActivity.create_activities(r) }
end
end
end
|
module Sequel
module Plugins
# The BooleaReaders plugin allows for the creation of attribute? methods
# for boolean columns, which provide a nicer API. By default, the accessors
# are created for all columns of type :boolean. However, you can provide a
# block to the plugin to change the criteria used to determine if a
# column is boolean:
#
# Sequel::Model.plugin(:boolean_readers){|c| db_schema[c][:db_type] =~ /\Atinyint/}
#
# This may be useful if you are using MySQL and have some tinyint columns
# that represent booleans and others that represent integers. You can turn
# the convert_tinyint_to_bool setting off and use the attribute methods for
# the integer value and the attribute? methods for the boolean value.
module BooleanReaders
# Default proc for determining if given column is a boolean, which
# just checks that the :type is boolean.
DEFAULT_BOOLEAN_ATTRIBUTE_PROC = lambda{|c| db_schema[c][:type] == :boolean}
# Add the boolean_attribute? class method to the model, and create
# attribute? boolean reader methods for the class's columns if the class has a dataset.
def self.configure(model, &block)
model.meta_def(:boolean_attribute?, &(block || DEFAULT_BOOLEAN_ATTRIBUTE_PROC))
model.instance_eval{send(:create_boolean_readers) if @dataset}
end
module ClassMethods
# Create boolean readers for the class using the columns from the new dataset.
def set_dataset(*args)
super
create_boolean_readers
self
end
private
# Add a attribute? method for the column to a module included in the class.
def create_boolean_reader(column)
overridable_methods_module.module_eval do
define_method("#{column}?"){model.db.typecast_value(:boolean, send(column))}
end
end
# Add attribute? methods for all of the boolean attributes for this model.
def create_boolean_readers
im = instance_methods.collect{|x| x.to_s}
cs = columns rescue return
cs.each{|c| create_boolean_reader(c) if boolean_attribute?(c) && !im.include?("#{c}?")}
end
end
end
end
end
Minor fix to boolean readers plugin to handle columns that are not in the db_schema
module Sequel
module Plugins
# The BooleaReaders plugin allows for the creation of attribute? methods
# for boolean columns, which provide a nicer API. By default, the accessors
# are created for all columns of type :boolean. However, you can provide a
# block to the plugin to change the criteria used to determine if a
# column is boolean:
#
# Sequel::Model.plugin(:boolean_readers){|c| db_schema[c][:db_type] =~ /\Atinyint/}
#
# This may be useful if you are using MySQL and have some tinyint columns
# that represent booleans and others that represent integers. You can turn
# the convert_tinyint_to_bool setting off and use the attribute methods for
# the integer value and the attribute? methods for the boolean value.
module BooleanReaders
# Default proc for determining if given column is a boolean, which
# just checks that the :type is boolean.
DEFAULT_BOOLEAN_ATTRIBUTE_PROC = lambda{|c| s = db_schema[c] and s[:type] == :boolean}
# Add the boolean_attribute? class method to the model, and create
# attribute? boolean reader methods for the class's columns if the class has a dataset.
def self.configure(model, &block)
model.meta_def(:boolean_attribute?, &(block || DEFAULT_BOOLEAN_ATTRIBUTE_PROC))
model.instance_eval{send(:create_boolean_readers) if @dataset}
end
module ClassMethods
# Create boolean readers for the class using the columns from the new dataset.
def set_dataset(*args)
super
create_boolean_readers
self
end
private
# Add a attribute? method for the column to a module included in the class.
def create_boolean_reader(column)
overridable_methods_module.module_eval do
define_method("#{column}?"){model.db.typecast_value(:boolean, send(column))}
end
end
# Add attribute? methods for all of the boolean attributes for this model.
def create_boolean_readers
im = instance_methods.collect{|x| x.to_s}
cs = columns rescue return
cs.each{|c| create_boolean_reader(c) if boolean_attribute?(c) && !im.include?("#{c}?")}
end
end
end
end
end
|
module SerializableAttributes
class Schema
class << self
attr_writer :default_formatter
def default_formatter
@default_formatter ||= SerializableAttributes::Format::ActiveSupportJson
end
end
attr_accessor :formatter
attr_reader :model, :field, :fields
def all_column_names
fields ? fields.keys : []
end
def encode(body)
body = body.dup
body.each do |key, value|
if field = fields[key]
body[key] = field.encode(value)
end
end
formatter.encode(body)
end
def decode(data, is_new_record = false)
decoded = formatter.decode(data)
hash = ::Hash.new do |h, key|
if type = fields[key]
h[key] = type ? type.default : nil
end
end
decoded.each do |k, v|
next unless include?(k)
type = fields[k]
hash[k] = type ? type.parse(v) : v
end
if decoded.blank? && is_new_record
fields.each do |key, type|
hash[key] = type.default if type.default
end
end
hash
end
def include?(key)
@fields.include?(key.to_s)
end
# Initializes a new Schema. See `ModelMethods#serialize_attributes`.
#
# model - The ActiveRecord class.
# field - The String name of the ActiveRecord attribute that holds
# data.
# options - Optional Hash:
# :blob - The String name of the actual DB field. Defaults to
# "raw_#{field}"
# :formatter - The module that handles encoding and decoding the
# data. The default is set in
# `Schema#default_formatter`.
def initialize(model, field, options)
@model, @field, @fields = model, field, {}
@blob_field = options.delete(:blob) || "raw_#{@field}"
@formatter = options.delete(:formatter) || self.class.default_formatter
blob_field = @blob_field
data_field = @field
meta_model = class << @model; self; end
changed_ivar = "#{data_field}_changed"
meta_model.send(:attr_accessor, "#{data_field}_schema")
@model.send("#{data_field}_schema=", self)
@model.class_eval do
def reload(options = nil)
reset_serialized_data
super
end
end
meta_model.send(:define_method, :attribute_names) do
column_names + send("#{data_field}_schema").all_column_names
end
@model.send(:define_method, :reset_serialized_data) do
instance_variable_set("@#{data_field}", nil)
end
@model.send(:define_method, :attribute_names) do
(super() + send(data_field).keys - [blob_field]).
map! { |s| s.to_s }.sort!
end
@model.send(:define_method, :read_attribute) do |attribute_name|
schema = self.class.send("#{data_field}_schema")
if schema.include?(attribute_name)
data[attribute_name.to_s]
else
super(attribute_name)
end
end
@model.send(:define_method, data_field) do
instance_variable_get("@#{data_field}") || begin
instance_variable_get("@#{changed_ivar}").clear if send("#{changed_ivar}?")
schema = self.class.send("#{data_field}_schema")
hash = schema.decode(send(blob_field), new_record?)
instance_variable_set("@#{data_field}", hash)
hash
end
end
@model.send(:define_method, :write_serialized_field) do |name, value|
raw_data = send(data_field) # load fields if needed
name_str = name.to_s
schema = self.class.send("#{data_field}_schema")
type = schema.fields[name_str]
changed_fields = send(changed_ivar)
instance_variable_get("@#{changed_ivar}")[name_str] = raw_data[name_str] unless changed_fields.include?(name_str)
parsed_value = type ? type.parse(value) : value
if parsed_value.nil?
raw_data.delete(name_str)
else
raw_data[name_str] = parsed_value
end
parsed_value
end
@model.send(:define_method, changed_ivar) do
hash = instance_variable_get("@#{changed_ivar}") || instance_variable_set("@#{changed_ivar}", {})
hash.keys
end
@model.send(:define_method, "#{changed_ivar}?") do
!send(changed_ivar).empty?
end
@model.before_save do |r|
schema = r.class.send("#{data_field}_schema")
r.send("#{blob_field}=", schema.encode(r.send(data_field)))
end
end
# Adds the accessors for a serialized field on this model. Also sets up
# the encoders and decoders.
#
# type_name - The Symbol matching a valid type.
# *names - One or more Symbol field names.
# options - Optional Hash to be sent to the initialized Type.
# :default - Sets the default value.
#
# Returns nothing.
def field(type_name, *names)
options = names.extract_options!
data_field = @field
changed_ivar = "#{data_field}_changed"
type = SerializableAttributes.types[type_name].new(options)
names.each do |name|
name_str = name.to_s
@fields[name_str] = type
@model.send(:define_method, name) do
send(data_field)[name_str]
end
if type.is_a? Boolean
@model.send :alias_method, "#{name}?", name
end
@model.send(:define_method, "#{name}=") do |value|
write_serialized_field name_str, value
end
@model.send(:define_method, "#{name}_changed?") do
send(changed_ivar).include?(name_str)
end
@model.send(:define_method, "#{name}_before_type_cast") do
value = send(name)
value = type.encode(value) if type
value.to_s
end
@model.send(:define_method, "#{name}_change") do
if send("#{name}_changed?")
[instance_variable_get("@#{changed_ivar}")[name_str], send(data_field)[name_str]]
else
nil
end
end
end
end
end
end
#attributes should work with ActiveRecord >= 3.1
module SerializableAttributes
class Schema
class << self
attr_writer :default_formatter
def default_formatter
@default_formatter ||= SerializableAttributes::Format::ActiveSupportJson
end
end
attr_accessor :formatter
attr_reader :model, :field, :fields
def all_column_names
fields ? fields.keys : []
end
def encode(body)
body = body.dup
body.each do |key, value|
if field = fields[key]
body[key] = field.encode(value)
end
end
formatter.encode(body)
end
def decode(data, is_new_record = false)
decoded = formatter.decode(data)
hash = ::Hash.new do |h, key|
if type = fields[key]
h[key] = type ? type.default : nil
end
end
decoded.each do |k, v|
next unless include?(k)
type = fields[k]
hash[k] = type ? type.parse(v) : v
end
if decoded.blank? && is_new_record
fields.each do |key, type|
hash[key] = type.default if type.default
end
end
hash
end
def include?(key)
@fields.include?(key.to_s)
end
# Initializes a new Schema. See `ModelMethods#serialize_attributes`.
#
# model - The ActiveRecord class.
# field - The String name of the ActiveRecord attribute that holds
# data.
# options - Optional Hash:
# :blob - The String name of the actual DB field. Defaults to
# "raw_#{field}"
# :formatter - The module that handles encoding and decoding the
# data. The default is set in
# `Schema#default_formatter`.
def initialize(model, field, options)
@model, @field, @fields = model, field, {}
@blob_field = options.delete(:blob) || "raw_#{@field}"
@formatter = options.delete(:formatter) || self.class.default_formatter
blob_field = @blob_field
data_field = @field
meta_model = class << @model; self; end
changed_ivar = "#{data_field}_changed"
meta_model.send(:attr_accessor, "#{data_field}_schema")
@model.send("#{data_field}_schema=", self)
@model.class_eval do
def reload(options = nil)
reset_serialized_data
super
end
end
meta_model.send(:define_method, :attribute_names) do
column_names + send("#{data_field}_schema").all_column_names
end
@model.send(:define_method, :reset_serialized_data) do
instance_variable_set("@#{data_field}", nil)
end
@model.send(:define_method, :attribute_names) do
(super() + send(data_field).keys - [blob_field]).
map! { |s| s.to_s }.sort!
end
@model.send(:define_method, :read_attribute) do |attribute_name|
schema = self.class.send("#{data_field}_schema")
if schema.include?(attribute_name)
data[attribute_name.to_s]
else
super(attribute_name)
end
end
if ActiveRecord::VERSION::STRING >= '3.1'
@model.send(:define_method, :attributes) do
super.merge(send(data_field))
end
end
@model.send(:define_method, data_field) do
instance_variable_get("@#{data_field}") || begin
instance_variable_get("@#{changed_ivar}").clear if send("#{changed_ivar}?")
schema = self.class.send("#{data_field}_schema")
hash = schema.decode(send(blob_field), new_record?)
instance_variable_set("@#{data_field}", hash)
hash
end
end
@model.send(:define_method, :write_serialized_field) do |name, value|
raw_data = send(data_field) # load fields if needed
name_str = name.to_s
schema = self.class.send("#{data_field}_schema")
type = schema.fields[name_str]
changed_fields = send(changed_ivar)
instance_variable_get("@#{changed_ivar}")[name_str] = raw_data[name_str] unless changed_fields.include?(name_str)
parsed_value = type ? type.parse(value) : value
if parsed_value.nil?
raw_data.delete(name_str)
else
raw_data[name_str] = parsed_value
end
parsed_value
end
@model.send(:define_method, changed_ivar) do
hash = instance_variable_get("@#{changed_ivar}") || instance_variable_set("@#{changed_ivar}", {})
hash.keys
end
@model.send(:define_method, "#{changed_ivar}?") do
!send(changed_ivar).empty?
end
@model.before_save do |r|
schema = r.class.send("#{data_field}_schema")
r.send("#{blob_field}=", schema.encode(r.send(data_field)))
end
end
# Adds the accessors for a serialized field on this model. Also sets up
# the encoders and decoders.
#
# type_name - The Symbol matching a valid type.
# *names - One or more Symbol field names.
# options - Optional Hash to be sent to the initialized Type.
# :default - Sets the default value.
#
# Returns nothing.
def field(type_name, *names)
options = names.extract_options!
data_field = @field
changed_ivar = "#{data_field}_changed"
type = SerializableAttributes.types[type_name].new(options)
names.each do |name|
name_str = name.to_s
@fields[name_str] = type
@model.send(:define_method, name) do
send(data_field)[name_str]
end
if type.is_a? Boolean
@model.send :alias_method, "#{name}?", name
end
@model.send(:define_method, "#{name}=") do |value|
write_serialized_field name_str, value
end
@model.send(:define_method, "#{name}_changed?") do
send(changed_ivar).include?(name_str)
end
@model.send(:define_method, "#{name}_before_type_cast") do
value = send(name)
value = type.encode(value) if type
value.to_s
end
@model.send(:define_method, "#{name}_change") do
if send("#{name}_changed?")
[instance_variable_get("@#{changed_ivar}")[name_str], send(data_field)[name_str]]
else
nil
end
end
end
end
end
end
|
module Spider
config_option('runmode', "production, test, devel", :default => 'devel', :choices => ['production', 'test', 'devel'],
:action => Proc.new{ |option| Spider.runmode = option unless Spider.runmode || $SPIDER_RUNMODE}
)
# Storage
# config_option('storage.type', '')
# config_option('storage.url', '')
# Web server
config_option 'http.server', _("The internal server to use"), {
:default => 'mongrel'
}
config_option 'webserver.show_traces', _("Whether to show the stack trace on error"), {
:default => Proc.new{ ['test', 'devel'].include?(Spider.config.get('runmode')) ? true : false }
}
config_option 'webserver.reload_sources', _("Reload application and spider sources on each request"), {
:default => Proc.new{ Spider.config.get('runmode') == 'devel' ? true : false }
}
config_option 'webserver.port', _("Port to use for the http server"), :default => 8080
config_option 'webserver.force_threads', _("Force threading on non-threaded adapters"),
:default => Proc.new{ RUBY_VERSION_PARTS[1] == '8' ? true : false }
# Client
config_option 'client.text_editor', _("The text editor installed on the client")
# Templates
config_option 'template.cache.disable', _("Refresh template cache every time"), { :default => false }
config_option 'template.cache.reload_on_restart', _("Refresh template cache when server restarts"), { :default => true }
config_option 'template.cache.no_check', _("Never recompile templates"), { :default => true }
config_option 'template.cache.check_files', _("Check on every request if templates are changed"), { :default => true }
#config_option 'template.safe', _("Run templates in safe mode"), { :default => false }
# Model
config_option 'storage.db.pool.size', _("How many connections to open to a db"), :default => 5
config_option 'storage.db.replace_debug_vars', _("Replace bound variables in debug sql"), {
:default => Proc.new{ Spider.config.get('runmode') == 'devel' ? true : false }
}
config_option 'storages', _("A list of named storages"), :type => :conf
config_option 'storages.x.url', _("Connection url to the storage"), :type => String, :required => true
config_option 'storages.x.encoding', _("Encoding the DB uses"), :type => String
config_option 'debugger.start', _("Start the debugger")
config_option 'profiling.enable', _("Enable on-request profiling")
config_option 'request.mutex', _("Respond to requests sequentially"), :default => false
config_option 'locale', _("The locale to use") do |val|
Spider.locale = Locale.new(val)
end
config_option 'i18n.rails_path', _("Path where rails-style locales are found"), :default => lambda{ Spider.paths[:root]+'/locales' }
config_option 'i18n.default_locale', _("Fallback locale"), :default => 'en'
config_option 'runner.sleep', _("Sleep time for the periodic runner"), :default => 10
config_option 'session.store', _("Where to store the session"), :default => 'file', :choices => ['memory', 'file', 'memcached']
config_option('session.life', _("Lifetime in seconds of the sessions"), :default => 3600, :type => Fixnum)
config_option('session.purge_check', _("Number of seconds to wait before session purge check"), :default => 10, :type => Fixnum)
config_option 'session.file.path', _("The folder where to store file sessions"), :default => lambda{ return Spider.paths[:var]+'/sessions' }
config_option 'shared_store.type', _("Which shared store to use"), :default => 'memory'
config_option 'http.nonce_life', _("Life in seconds of HTTP Digest Authentication nonces"), :type => Fixnum, :default => 60
# TODO: implement in webrick/others, check if has a performance gain
config_option 'http.auto_headers', _("Automatically send headers on first output (breaks the debugger)"), :default => true
config_option 'http.seize_stdout', _("Redirect standard output to the browser"), :default => false
config_option 'http.proxy_mapping', _("If the request is proxyied, the urls used to reach spider, with the corresponding paths called by the proxy"),
:type => Hash
config_option 'http.charset', _("The charset to use for http requests"), :default => 'UTF-8'
config_option 'debug.console.level', _("Level of debug output to console"), :default => :INFO,
:process => lambda{ |opt| opt && opt != 'false' ? opt.to_s.upcase.to_sym : false }
config_option 'log.errors', _("Log errors to file"), :default => true
config_option 'log.debug.level', _("Log level to use for debug file (false for no debug)"), :default => false,
:choices => [false, :DEBUG, :INFO],
:process => lambda{ |opt| opt && opt != 'false' ? opt.upcase.to_sym : false }
config_option 'log.rotate.age', _("Number of old log files to keep, OR frequency of rotation (daily, weekly or monthly)"), :default => 'daily'
config_option 'log.rotate.size', _("Maximum logfile size (only applies when log.rotate.age is a number)"), :default => 1048576
# :process => lambda{ |v| v.upcase }
config_option 'orgs', _("A list of organizations"), :type => :conf
config_option 'orgs.x.name', _("Descriptive name of the organization")
config_option 'orgs.x.country_code', _("Country code of the organization")
config_option 'orgs.x.state', _("State of the organization")
config_option 'orgs.x.city', _("Name of the city")
config_option 'orgs.x.common_name', _("Common name (e.g. domain) of the organization")
config_option 'orgs.x.email', _("Main e-mail address of the organization")
config_option 'orgs.x.auto_from_email', _("Email address used as 'From' for automatic e-mails"),
:default => lambda{ |name| Spider.conf.get("orgs.#{name}.email") }
config_option 'orgs.x.organizational_unit', _("Organizational Unit (e.g. department)")
config_option 'orgs.x.pub_key', _("Path to the public key (defaults to config/certs/org_name/public.pem)"),
:default => lambda{ |name| Spider.paths[:certs]+'/'+name+'/public.pem'}
config_option 'orgs.x.cert', _("Path to the certificate (defaults to config/certs/org_name/cert.pem)"),
:default => lambda{ |name| Spider.paths[:certs]+'/'+name+'/cert.pem'}
config_option 'orgs.x.private_key', _("Path to the private key (defaults to config/certs/org_name/private/key.pem)"),
:default => lambda{ |name| Spider.paths[:certs]+'/'+name+'/private/key.pem'}
conf_alias 'it_IT' => {
'orgs' => 'organizzazioni',
'name' => 'nome',
'country_code' => 'codice_nazione',
'city' => 'comune',
'state' => 'provincia',
'organizational_unit' => 'unita_organizzativa'
}
config_option 'site.admin.name', _("Name of the site administrator")
config_option 'site.admin.email', _("Email of the site administrator")
config_option 'devel.trace.extended', _("Use ruby-debug to provide extended traces"), :default => true
config_option 'devel.trace.show_locals', _("Show locals in debug traces"), :default => true
config_option 'devel.trace.show_instance_variables', _("Show locals in debug traces"), :default => true
end
Fix error when log.debug.level is already a symbol
module Spider
config_option('runmode', "production, test, devel", :default => 'devel', :choices => ['production', 'test', 'devel'],
:action => Proc.new{ |option| Spider.runmode = option unless Spider.runmode || $SPIDER_RUNMODE}
)
# Storage
# config_option('storage.type', '')
# config_option('storage.url', '')
# Web server
config_option 'http.server', _("The internal server to use"), {
:default => 'mongrel'
}
config_option 'webserver.show_traces', _("Whether to show the stack trace on error"), {
:default => Proc.new{ ['test', 'devel'].include?(Spider.config.get('runmode')) ? true : false }
}
config_option 'webserver.reload_sources', _("Reload application and spider sources on each request"), {
:default => Proc.new{ Spider.config.get('runmode') == 'devel' ? true : false }
}
config_option 'webserver.port', _("Port to use for the http server"), :default => 8080
config_option 'webserver.force_threads', _("Force threading on non-threaded adapters"),
:default => Proc.new{ RUBY_VERSION_PARTS[1] == '8' ? true : false }
# Client
config_option 'client.text_editor', _("The text editor installed on the client")
# Templates
config_option 'template.cache.disable', _("Refresh template cache every time"), { :default => false }
config_option 'template.cache.reload_on_restart', _("Refresh template cache when server restarts"), { :default => true }
config_option 'template.cache.no_check', _("Never recompile templates"), { :default => true }
config_option 'template.cache.check_files', _("Check on every request if templates are changed"), { :default => true }
#config_option 'template.safe', _("Run templates in safe mode"), { :default => false }
# Model
config_option 'storage.db.pool.size', _("How many connections to open to a db"), :default => 5
config_option 'storage.db.replace_debug_vars', _("Replace bound variables in debug sql"), {
:default => Proc.new{ Spider.config.get('runmode') == 'devel' ? true : false }
}
config_option 'storages', _("A list of named storages"), :type => :conf
config_option 'storages.x.url', _("Connection url to the storage"), :type => String, :required => true
config_option 'storages.x.encoding', _("Encoding the DB uses"), :type => String
config_option 'debugger.start', _("Start the debugger")
config_option 'profiling.enable', _("Enable on-request profiling")
config_option 'request.mutex', _("Respond to requests sequentially"), :default => false
config_option 'locale', _("The locale to use") do |val|
Spider.locale = Locale.new(val)
end
config_option 'i18n.rails_path', _("Path where rails-style locales are found"), :default => lambda{ Spider.paths[:root]+'/locales' }
config_option 'i18n.default_locale', _("Fallback locale"), :default => 'en'
config_option 'runner.sleep', _("Sleep time for the periodic runner"), :default => 10
config_option 'session.store', _("Where to store the session"), :default => 'file', :choices => ['memory', 'file', 'memcached']
config_option('session.life', _("Lifetime in seconds of the sessions"), :default => 3600, :type => Fixnum)
config_option('session.purge_check', _("Number of seconds to wait before session purge check"), :default => 10, :type => Fixnum)
config_option 'session.file.path', _("The folder where to store file sessions"), :default => lambda{ return Spider.paths[:var]+'/sessions' }
config_option 'shared_store.type', _("Which shared store to use"), :default => 'memory'
config_option 'http.nonce_life', _("Life in seconds of HTTP Digest Authentication nonces"), :type => Fixnum, :default => 60
# TODO: implement in webrick/others, check if has a performance gain
config_option 'http.auto_headers', _("Automatically send headers on first output (breaks the debugger)"), :default => true
config_option 'http.seize_stdout', _("Redirect standard output to the browser"), :default => false
config_option 'http.proxy_mapping', _("If the request is proxyied, the urls used to reach spider, with the corresponding paths called by the proxy"),
:type => Hash
config_option 'http.charset', _("The charset to use for http requests"), :default => 'UTF-8'
config_option 'debug.console.level', _("Level of debug output to console"), :default => :INFO,
:process => lambda{ |opt| opt && opt != 'false' ? opt.to_s.upcase.to_sym : false }
config_option 'log.errors', _("Log errors to file"), :default => true
config_option 'log.debug.level', _("Log level to use for debug file (false for no debug)"), :default => false,
:choices => [false, :DEBUG, :INFO],
:process => lambda{ |opt| opt && opt != 'false' ? opt.to_s.upcase.to_sym : false }
config_option 'log.rotate.age', _("Number of old log files to keep, OR frequency of rotation (daily, weekly or monthly)"), :default => 'daily'
config_option 'log.rotate.size', _("Maximum logfile size (only applies when log.rotate.age is a number)"), :default => 1048576
# :process => lambda{ |v| v.upcase }
config_option 'orgs', _("A list of organizations"), :type => :conf
config_option 'orgs.x.name', _("Descriptive name of the organization")
config_option 'orgs.x.country_code', _("Country code of the organization")
config_option 'orgs.x.state', _("State of the organization")
config_option 'orgs.x.city', _("Name of the city")
config_option 'orgs.x.common_name', _("Common name (e.g. domain) of the organization")
config_option 'orgs.x.email', _("Main e-mail address of the organization")
config_option 'orgs.x.auto_from_email', _("Email address used as 'From' for automatic e-mails"),
:default => lambda{ |name| Spider.conf.get("orgs.#{name}.email") }
config_option 'orgs.x.organizational_unit', _("Organizational Unit (e.g. department)")
config_option 'orgs.x.pub_key', _("Path to the public key (defaults to config/certs/org_name/public.pem)"),
:default => lambda{ |name| Spider.paths[:certs]+'/'+name+'/public.pem'}
config_option 'orgs.x.cert', _("Path to the certificate (defaults to config/certs/org_name/cert.pem)"),
:default => lambda{ |name| Spider.paths[:certs]+'/'+name+'/cert.pem'}
config_option 'orgs.x.private_key', _("Path to the private key (defaults to config/certs/org_name/private/key.pem)"),
:default => lambda{ |name| Spider.paths[:certs]+'/'+name+'/private/key.pem'}
conf_alias 'it_IT' => {
'orgs' => 'organizzazioni',
'name' => 'nome',
'country_code' => 'codice_nazione',
'city' => 'comune',
'state' => 'provincia',
'organizational_unit' => 'unita_organizzativa'
}
config_option 'site.admin.name', _("Name of the site administrator")
config_option 'site.admin.email', _("Email of the site administrator")
config_option 'devel.trace.extended', _("Use ruby-debug to provide extended traces"), :default => true
config_option 'devel.trace.show_locals', _("Show locals in debug traces"), :default => true
config_option 'devel.trace.show_instance_variables', _("Show locals in debug traces"), :default => true
end
|
# coding: utf-8
lib = File.expand_path('../lib', __FILE__)
$LOAD_PATH.unshift(lib) unless $LOAD_PATH.include?(lib)
require 'captain_hoog/version'
Gem::Specification.new do |spec|
spec.name = "captain_hoog"
spec.version = CaptainHoog::VERSION
spec.authors = ["Daniel Schmidt"]
spec.email = ["daniel.schmidt@adytonsystems.com"]
spec.summary = %q{ Plugin based git-pre hook.}
spec.homepage = ""
spec.license = "MIT"
spec.post_install_message = %Q{
Thanks for installing the Pre-Git whatever hooker!
If you don't have already, please install the hook:
githoog install --type <GIT_HOOK_TYPE> --plugins_dir <PATH_TO_PLUGINS> \
--project_dir <PATH_TO_PROJECT>
}
spec.files = `git ls-files -z`.split("\x0")
spec.executables = spec.files.grep(%r{^bin/}) { |f| File.basename(f)}
spec.test_files = spec.files.grep(%r{^(test|spec|features)/})
spec.require_paths = ["lib"]
spec.add_runtime_dependency "colorize", '~> 0.7', '>= 0.7.3'
spec.add_runtime_dependency "thor", '~> 0.19', '>= 0.19.1'
spec.add_runtime_dependency "terminal-table", '~> 1.4.5'
spec.add_development_dependency "bundler", "~> 1.6"
spec.add_development_dependency "rake"
spec.add_development_dependency "rspec"
spec.add_development_dependency "cucumber"
spec.add_development_dependency "aruba"
end
adds minimum ruby version 2.0 to gemspec
# coding: utf-8
lib = File.expand_path('../lib', __FILE__)
$LOAD_PATH.unshift(lib) unless $LOAD_PATH.include?(lib)
require 'captain_hoog/version'
Gem::Specification.new do |spec|
spec.name = "captain_hoog"
spec.version = CaptainHoog::VERSION
spec.authors = ["Daniel Schmidt"]
spec.email = ["daniel.schmidt@adytonsystems.com"]
spec.summary = %q{ Plugin based git-pre hook.}
spec.homepage = ""
spec.license = "MIT"
spec.post_install_message = %Q{
Thanks for installing the Pre-Git whatever hooker!
If you don't have already, please install the hook:
githoog install --type <GIT_HOOK_TYPE> --plugins_dir <PATH_TO_PLUGINS> \
--project_dir <PATH_TO_PROJECT>
}
spec.required_ruby_version = '>= 2.0'
spec.files = `git ls-files -z`.split("\x0")
spec.executables = spec.files.grep(%r{^bin/}) { |f| File.basename(f)}
spec.test_files = spec.files.grep(%r{^(test|spec|features)/})
spec.require_paths = ["lib"]
spec.add_runtime_dependency "colorize", '~> 0.7', '>= 0.7.3'
spec.add_runtime_dependency "thor", '~> 0.19', '>= 0.19.1'
spec.add_runtime_dependency "terminal-table", '~> 1.4.5'
spec.add_development_dependency "bundler", "~> 1.6"
spec.add_development_dependency "rake"
spec.add_development_dependency "rspec"
spec.add_development_dependency "cucumber"
spec.add_development_dependency "aruba"
end
|
require 'spiderfw/controller/controller_io'
require 'spiderfw/controller/request'
require 'spiderfw/controller/response'
require 'spiderfw/controller/scene'
require 'spiderfw/controller/controller_exceptions'
require 'spiderfw/controller/first_responder'
require 'spiderfw/controller/controller_mixin'
require 'spiderfw/controller/mixins/visual'
require 'spiderfw/controller/mixins/http_mixin'
require 'spiderfw/controller/mixins/static_content'
require 'spiderfw/controller/helpers/widget_helper'
require 'spiderfw/utils/annotations'
module Spider
class Controller
include App::AppClass
include Dispatcher
include Logger
include ControllerMixins
include Helpers
include Annotations
class << self
def default_action
'index'
end
# @return [String] Path to this controller's templates
def template_path
return nil unless self.app
return File.join(self.app.path, '/views')
end
# @return [String] Path to this controller's layouts
def layout_path
return nil unless self.app
return File.join(self.app.path, '/views')
end
# Defines a method that will be called before the controller's before,
# if the action matches the given conditions.
# Example:
# before(/^list_/, :before_lists)
# will call the method before_lists if the action starts with 'list_'
# @param [String|Regexp|Proc|Symbol|Array] conditions what will be checked against the action
# @param [Symbol] method The method to be called if the conditions match.
# @param [Hash] params may contain :unless => true: in this case,
# the conditions will be inverted, that is, the method will
# be executed unless the conditions match.
# @return [void]
def before(conditions, method, params={})
@dispatch_methods ||= {}
@dispatch_methods[:before] ||= []
@dispatch_methods[:before] << [conditions, method, params]
end
# Like {Controller.before}, but calls the method unless the conditions match
# @param [String|Regexp|Proc|Symbol|Array] conditions what will be checked against the action
# @param [Symbol] method The method to be called if the conditions match.
# @param [Hash] params may contain :unless => true: in this case,
# the conditions will be inverted, that is, the method will
# be executed unless the conditions match.
# @return [void]
def before_unless(condition, method, params={})
@dispatch_methods ||= {}
@dispatch_methods[:before] ||= []
params[:unless] = true
@dispatch_methods[:before] << [condition, method, params]
end
# @return [Array] An array of methods defined with {Controller.before}
def before_methods
@dispatch_methods && @dispatch_methods[:before] ? @dispatch_methods[:before] : []
end
# Registers a list of methods as controller actions, that is, methods that can
# be dispatched to.
#
# This method is not usually called directly; using the __.action annotation,
# or one of the format annotations (__.html, __.xml, __.json, __.text), will
# make a method a controller action.
# @param [*Symbol] A list of methods
# @return [Array] All defined controller actions
def controller_actions(*methods)
if (methods.length > 0)
@controller_actions ||= []
@controller_actions += methods
end
@controller_actions
end
def controller_action(method, params)
@controller_actions ||= []
@controller_actions << method
@controller_action_params ||= {}
@controller_action_params[method] = params
end
# @return [bool] true if the method is a controller action
def controller_action?(method)
return false unless self.method_defined?(method)
return true if default_action && method == default_action.to_sym
if @controller_actions
res = @controller_actions.include?(method)
if (!res)
Spider.logger.info("Method #{method} is not a controller action for #{self}")
end
return res
else
return true
end
end
# Finds a resource in the context of the controller's app
# See {Spider.find_resource}
# @param [Symbol] resource_type
# @param [String] path
# @param [String] cur_path Current path: if set, will be used to resolve relative paths
# @return [Resource]
def find_resource(type, name, cur_path=nil)
Spider.find_resource(type, name, cur_path, self)
end
# Returns the path of a resource, or nil if none is found
# See {Controller.find_resource}
# @param [Symbol] resource_type
# @param [String] path
# @param [String] cur_path Current path: if set, will be used to resolve relative paths
# @return [Resource]
def find_resource_path(type, name, cur_path=nil)
res = Spider.find_resource(type, name, cur_path, self)
return res ? res.path : nil
end
# @param [String] action Additional action to get path for
# @return [String] The canonical URL path for this controller
def route_path(action=nil)
u = @default_route || ''
u += "/#{action}" if action
if @default_dispatcher && @default_dispatcher != self
u = @default_dispatcher.route_path(u)
elsif self.app
u = self.app.route_path(u)
end
u
end
# Returns the full URL for the Controller
# The Controller's implementation returns the route_path.
#
# However, the HTTPMixin will override this method to return a full http url;
# other mixins can override the method in different ways.
# @param [String] action Additional action to get path for
# @return [String] The canonical URL for this controller
def url(action=nil)
route_path(action)
end
alias :route_url :url
end
define_annotation(:action) { |k, m, params| k.controller_action(m, params) }
# @return [Spider::Request]
attr_reader :request
# @return [Spider::Response]
attr_reader :response
# @return [Symbol] The method currently set to be executed, if any
attr_reader :executed_method
# @return [Scene]
attr_reader :scene
# @return [String] Action used to reach this controller in the dispatch chain
attr_accessor :dispatch_action
# @return [bool] True if the controller is the target of the current action
attr_accessor :is_target
# Constructor. Note: you can use the {Controller#init} method for custom
# initialization, instead of overrideing this method
# @param [Spider::Request] request
# @param [Spider::Response] response
# @param [scene]
def initialize(request, response, scene=nil)
@request = request
@response = response
@scene = scene || get_scene
@dispatch_path = ''
@is_target = true
init
end
# Override this for controller initialization
# @return [void]
def init
end
# @return [String]
def inspect
self.class.to_s
end
# @return [String] The actual action path used to reach this Controller
def request_path
act = @dispatch_action || ''
if (@dispatch_previous)
prev = @dispatch_previous.call_path
act = prev+'/'+act unless prev.empty?
end
return ('/'+act).gsub(/\/+/, '/').sub(/\/$/, '')
end
alias :call_path :request_path
# Returns the method to call on the controller given an action, and the arguments
# that should be passed to it.
# @param [String] action
# @return [Array] A two elements array, containing the method, and additional arguments
def get_action_method(action)
method = nil
additional_arguments = nil
if (action =~ /^([^:]+)(:.+)$/)
method = $1
elsif (action =~ /^([^\/]+)\/(.+)$/) # methods followed by a slash
method = $1
additional_arguments = [$2]
else
method = action
end
method = method[0..-2] if !method.blank? && method[-1].chr == '/'
method, rest = method.split('.', 2) if method
method = self.class.default_action if !method || method.empty?
return nil if method.empty?
return [method.to_sym, additional_arguments]
end
# Returns true if this controller is the final target for the current action, that is, if it does not
# dispatch to any route
# @return [bool] True if the controller is the final target
def action_target?
!@dispatch_next[@call_path] || @dispatch_next[@call_path].dest == self \
|| @dispatch_next[@call_path].dest == self.class
end
# @return [bool] false if the target of the call is a widget, true otherwise
def is_target?
@is_target
end
# The main controller's execution method. The Controller will dispatch
# to another controller if a route is set; otherwise, it will call the
# method that should be executed according to action.
#
# This method can be overridden in subclasses, but remember to call super,
# or the dispatch chain will stop!
# @param [String] action The current action
# @param [*Object] arguments Additional action arguments
def execute(action='', *arguments)
return if @__done
debug("Controller #{self} executing #{action} with arguments #{arguments}")
catch(:done) do
if can_dispatch?(:execute, action)
d_next = dispatch_next(action)
#run_chain(:execute, action, *arguments)
# shortcut route to self
return do_dispatch(:execute, action) if d_next.dest != self
arguments = d_next.params
end
if d_next && d_next.dest == self
set_executed_method(d_next.action)
end
if @executed_method
meth = self.method(@executed_method)
args = arguments + @executed_method_arguments
@controller_action = args[0]
arity = meth.arity
unless arity == -1
arity = (-arity + 1) if arity < 0
args = arity == 0 ? [] : args[0..(arity-1)]
args = [nil] if meth.arity == 1 && args.empty?
end
Spider.logger.info("Executing: #{self.class.name}##{@executed_method}.#{@request.format}")
spider_main_controller_send = true
send(@executed_method, *args)
else
raise NotFound.new(action)
end
end
end
# Helper method, that calls and propagates #before
# @param [String] action The current action
# @param [*Object] arguments Additional action arguments
def call_before(action='', *arguments)
return if respond_to?(:serving_static?) && self.serving_static?
@call_path = action
before(action, *arguments)
catch(:done) do
#debug("#{self} before")
d_next = dispatch_next(action)
unless d_next && d_next.obj == self
do_dispatch(:call_before, action, *arguments)
end
end
end
# This method can be implemented by Controllers, and will be called
# on the controller chain before the execute method.
#
# This method is usually reserved for preprocessing that does not
# output to the browser, to allow other controllers in chain to set response
# headers.
# @param [String] action The current action
# @param [*Object] arguments Additional action arguments
def before(action='', *arguments)
end
# Helper method, that calls and propagates #after
# @param [String] action The current action
# @param [*Object] arguments Additional action arguments
def call_after(action='', *arguments)
return if respond_to?(:serving_static?) && self.serving_static?
after(action, *arguments)
catch(:done) do
d_next = dispatch_next(action)
unless d_next && d_next.obj == self
do_dispatch(:call_after, action, *arguments)
end
end
end
# This method can be implemented by Controllers, and will be called
# on the controller chain after the execute method.
#
# If the webserver supports it, this method will be called after the response
# has been returned to the browser; so, it's suitable for post processing.
# If you aren't using a threaded web server, though, keep in mind that the
# process won't be available to service other requests.
# @param [String] action The current action
# @param [*Object] arguments Additional action arguments
def after(action='', *arguments)
end
# @return [bool] True if the controller is done, and should not continue dispatching.
def done?
@__done
end
# Stops the execution of the controller chain
# @return [void]
def done
self.done = true
throw :done
end
# Sets the controller chain's "done" state
# @param [bool] val
# @return [void]
def done=(val)
@__done = val
@dispatch_previous.done = val if @dispatch_previous
end
# Checks if an action responds to given route conditions. Is called by
# {Dispatcher#do_dispatch}.
# The default implementation calls Controller.check_action, which in turn is mixed in
# from {Dispatcher::ClassMethods#check_action}
# @param [String] action
# @param [Array] c An array of route conditions
# @return [bool]
def check_action(action, c)
self.class.check_action(action, c)
end
# Returns a new Scene instance for use in the controller.
# @param [Hash] scene Hash to construct the scene from
# @return [Scene]
def get_scene(scene=nil)
scene = Scene.new(scene) if scene.class == Hash
scene ||= Scene.new
return scene
end
# Sets controller information on a scene
# @param [Scene] scene
# @return [Scene]
def prepare_scene(scene)
req_path = @request.path
req_path += 'index' if !req_path.blank? && req_path[-1].chr == '/'
scene.request = {
:path => @request.path,
:page_path => req_path
}
scene.controller = {
:request_path => request_path,
:class => self.class
}
scene.content = {}
return scene
end
# See {Controller.controller_action?}
# @return [bool] True if the method is a controller action for the class
def controller_action?(method)
self.class.controller_action?(method)
end
protected
# Instantiates an object dispatched by a route
# @param [Route]
# @return [Controller]
def dispatched_object(route)
klass = route.dest
if klass.class != Class
if klass == self # route to self
set_executed_method(route.action)
end
return klass
elsif klass == self.class
self.set_action(route.action)
return self
end
obj = klass.new(@request, @response, @scene)
obj.dispatch_action = route.matched || ''
# FIXME: this is not clean
obj.set_action(route.action)
# obj.dispatch_path = @dispatch_path + route.path
return obj
end
# Given an action, sets the executed method unless it can be dispatched
# @param [String] action
# @return [Symbol|nil] The executed method, if it was set, or nil
def set_action(action)
@executed_method = nil
@executed_method_arguments = nil
if !can_dispatch?(:execute, action)
return set_executed_method(action)
end
nil
end
# Given an action, sets executed_method and executed_method_arguments
# @param [String] action
# @return [Symbol] The executed_method
def set_executed_method(action)
method, additional_arguments = get_action_method(action)
if method && controller_action?(method)
@executed_method = method.to_sym
@executed_method_arguments = additional_arguments || []
end
return @executed_method
end
# This method can be overrided by subclasses, to provide custom handling of
# exceptions
# @param [Exception]
# @return [void]
def try_rescue(exc)
raise exc
end
private
# Overrides {Dispatcher#get_route}, setting the action for nil routes
# @param [String] path
def get_route(*args)
route = super
return route unless route
action = route.path.split('/').first
action_method, action_params = get_action_method(action)
if route.nil_route && !action.blank? && self.respond_to?(action_method)
route.action = action
end
route
end
end
end
require 'spiderfw/widget/widget'
require 'spiderfw/tag/tag'
Added :route_path to scene.controller
require 'spiderfw/controller/controller_io'
require 'spiderfw/controller/request'
require 'spiderfw/controller/response'
require 'spiderfw/controller/scene'
require 'spiderfw/controller/controller_exceptions'
require 'spiderfw/controller/first_responder'
require 'spiderfw/controller/controller_mixin'
require 'spiderfw/controller/mixins/visual'
require 'spiderfw/controller/mixins/http_mixin'
require 'spiderfw/controller/mixins/static_content'
require 'spiderfw/controller/helpers/widget_helper'
require 'spiderfw/utils/annotations'
module Spider
class Controller
include App::AppClass
include Dispatcher
include Logger
include ControllerMixins
include Helpers
include Annotations
class << self
def default_action
'index'
end
# @return [String] Path to this controller's templates
def template_path
return nil unless self.app
return File.join(self.app.path, '/views')
end
# @return [String] Path to this controller's layouts
def layout_path
return nil unless self.app
return File.join(self.app.path, '/views')
end
# Defines a method that will be called before the controller's before,
# if the action matches the given conditions.
# Example:
# before(/^list_/, :before_lists)
# will call the method before_lists if the action starts with 'list_'
# @param [String|Regexp|Proc|Symbol|Array] conditions what will be checked against the action
# @param [Symbol] method The method to be called if the conditions match.
# @param [Hash] params may contain :unless => true: in this case,
# the conditions will be inverted, that is, the method will
# be executed unless the conditions match.
# @return [void]
def before(conditions, method, params={})
@dispatch_methods ||= {}
@dispatch_methods[:before] ||= []
@dispatch_methods[:before] << [conditions, method, params]
end
# Like {Controller.before}, but calls the method unless the conditions match
# @param [String|Regexp|Proc|Symbol|Array] conditions what will be checked against the action
# @param [Symbol] method The method to be called if the conditions match.
# @param [Hash] params may contain :unless => true: in this case,
# the conditions will be inverted, that is, the method will
# be executed unless the conditions match.
# @return [void]
def before_unless(condition, method, params={})
@dispatch_methods ||= {}
@dispatch_methods[:before] ||= []
params[:unless] = true
@dispatch_methods[:before] << [condition, method, params]
end
# @return [Array] An array of methods defined with {Controller.before}
def before_methods
@dispatch_methods && @dispatch_methods[:before] ? @dispatch_methods[:before] : []
end
# Registers a list of methods as controller actions, that is, methods that can
# be dispatched to.
#
# This method is not usually called directly; using the __.action annotation,
# or one of the format annotations (__.html, __.xml, __.json, __.text), will
# make a method a controller action.
# @param [*Symbol] A list of methods
# @return [Array] All defined controller actions
def controller_actions(*methods)
if (methods.length > 0)
@controller_actions ||= []
@controller_actions += methods
end
@controller_actions
end
def controller_action(method, params)
@controller_actions ||= []
@controller_actions << method
@controller_action_params ||= {}
@controller_action_params[method] = params
end
# @return [bool] true if the method is a controller action
def controller_action?(method)
return false unless self.method_defined?(method)
return true if default_action && method == default_action.to_sym
if @controller_actions
res = @controller_actions.include?(method)
if (!res)
Spider.logger.info("Method #{method} is not a controller action for #{self}")
end
return res
else
return true
end
end
# Finds a resource in the context of the controller's app
# See {Spider.find_resource}
# @param [Symbol] resource_type
# @param [String] path
# @param [String] cur_path Current path: if set, will be used to resolve relative paths
# @return [Resource]
def find_resource(type, name, cur_path=nil)
Spider.find_resource(type, name, cur_path, self)
end
# Returns the path of a resource, or nil if none is found
# See {Controller.find_resource}
# @param [Symbol] resource_type
# @param [String] path
# @param [String] cur_path Current path: if set, will be used to resolve relative paths
# @return [Resource]
def find_resource_path(type, name, cur_path=nil)
res = Spider.find_resource(type, name, cur_path, self)
return res ? res.path : nil
end
# @param [String] action Additional action to get path for
# @return [String] The canonical URL path for this controller
def route_path(action=nil)
u = @default_route || ''
u += "/#{action}" if action
if @default_dispatcher && @default_dispatcher != self
u = @default_dispatcher.route_path(u)
elsif self.app
u = self.app.route_path(u)
end
u
end
# Returns the full URL for the Controller
# The Controller's implementation returns the route_path.
#
# However, the HTTPMixin will override this method to return a full http url;
# other mixins can override the method in different ways.
# @param [String] action Additional action to get path for
# @return [String] The canonical URL for this controller
def url(action=nil)
route_path(action)
end
alias :route_url :url
end
define_annotation(:action) { |k, m, params| k.controller_action(m, params) }
# @return [Spider::Request]
attr_reader :request
# @return [Spider::Response]
attr_reader :response
# @return [Symbol] The method currently set to be executed, if any
attr_reader :executed_method
# @return [Scene]
attr_reader :scene
# @return [String] Action used to reach this controller in the dispatch chain
attr_accessor :dispatch_action
# @return [bool] True if the controller is the target of the current action
attr_accessor :is_target
# Constructor. Note: you can use the {Controller#init} method for custom
# initialization, instead of overrideing this method
# @param [Spider::Request] request
# @param [Spider::Response] response
# @param [scene]
def initialize(request, response, scene=nil)
@request = request
@response = response
@scene = scene || get_scene
@dispatch_path = ''
@is_target = true
init
end
# Override this for controller initialization
# @return [void]
def init
end
# @return [String]
def inspect
self.class.to_s
end
# @return [String] The actual action path used to reach this Controller
def request_path
act = @dispatch_action || ''
if (@dispatch_previous)
prev = @dispatch_previous.call_path
act = prev+'/'+act unless prev.empty?
end
return ('/'+act).gsub(/\/+/, '/').sub(/\/$/, '')
end
alias :call_path :request_path
# Returns the method to call on the controller given an action, and the arguments
# that should be passed to it.
# @param [String] action
# @return [Array] A two elements array, containing the method, and additional arguments
def get_action_method(action)
method = nil
additional_arguments = nil
if (action =~ /^([^:]+)(:.+)$/)
method = $1
elsif (action =~ /^([^\/]+)\/(.+)$/) # methods followed by a slash
method = $1
additional_arguments = [$2]
else
method = action
end
method = method[0..-2] if !method.blank? && method[-1].chr == '/'
method, rest = method.split('.', 2) if method
method = self.class.default_action if !method || method.empty?
return nil if method.empty?
return [method.to_sym, additional_arguments]
end
# Returns true if this controller is the final target for the current action, that is, if it does not
# dispatch to any route
# @return [bool] True if the controller is the final target
def action_target?
!@dispatch_next[@call_path] || @dispatch_next[@call_path].dest == self \
|| @dispatch_next[@call_path].dest == self.class
end
# @return [bool] false if the target of the call is a widget, true otherwise
def is_target?
@is_target
end
# The main controller's execution method. The Controller will dispatch
# to another controller if a route is set; otherwise, it will call the
# method that should be executed according to action.
#
# This method can be overridden in subclasses, but remember to call super,
# or the dispatch chain will stop!
# @param [String] action The current action
# @param [*Object] arguments Additional action arguments
def execute(action='', *arguments)
return if @__done
debug("Controller #{self} executing #{action} with arguments #{arguments}")
catch(:done) do
if can_dispatch?(:execute, action)
d_next = dispatch_next(action)
#run_chain(:execute, action, *arguments)
# shortcut route to self
return do_dispatch(:execute, action) if d_next.dest != self
arguments = d_next.params
end
if d_next && d_next.dest == self
set_executed_method(d_next.action)
end
if @executed_method
meth = self.method(@executed_method)
args = arguments + @executed_method_arguments
arity = meth.arity
unless arity == -1
arity = (-arity + 1) if arity < 0
args = arity == 0 ? [] : args[0..(arity-1)]
args = [nil] if meth.arity == 1 && args.empty?
end
Spider.logger.info("Executing: #{self.class.name}##{@executed_method}.#{@request.format}")
spider_main_controller_send = true
send(@executed_method, *args)
else
raise NotFound.new(action)
end
end
end
# Helper method, that calls and propagates #before
# @param [String] action The current action
# @param [*Object] arguments Additional action arguments
def call_before(action='', *arguments)
return if respond_to?(:serving_static?) && self.serving_static?
@call_path = action
before(action, *arguments)
catch(:done) do
#debug("#{self} before")
d_next = dispatch_next(action)
unless d_next && d_next.obj == self
do_dispatch(:call_before, action, *arguments)
end
end
end
# This method can be implemented by Controllers, and will be called
# on the controller chain before the execute method.
#
# This method is usually reserved for preprocessing that does not
# output to the browser, to allow other controllers in chain to set response
# headers.
# @param [String] action The current action
# @param [*Object] arguments Additional action arguments
def before(action='', *arguments)
end
# Helper method, that calls and propagates #after
# @param [String] action The current action
# @param [*Object] arguments Additional action arguments
def call_after(action='', *arguments)
return if respond_to?(:serving_static?) && self.serving_static?
after(action, *arguments)
catch(:done) do
d_next = dispatch_next(action)
unless d_next && d_next.obj == self
do_dispatch(:call_after, action, *arguments)
end
end
end
# This method can be implemented by Controllers, and will be called
# on the controller chain after the execute method.
#
# If the webserver supports it, this method will be called after the response
# has been returned to the browser; so, it's suitable for post processing.
# If you aren't using a threaded web server, though, keep in mind that the
# process won't be available to service other requests.
# @param [String] action The current action
# @param [*Object] arguments Additional action arguments
def after(action='', *arguments)
end
# @return [bool] True if the controller is done, and should not continue dispatching.
def done?
@__done
end
# Stops the execution of the controller chain
# @return [void]
def done
self.done = true
throw :done
end
# Sets the controller chain's "done" state
# @param [bool] val
# @return [void]
def done=(val)
@__done = val
@dispatch_previous.done = val if @dispatch_previous
end
# Checks if an action responds to given route conditions. Is called by
# {Dispatcher#do_dispatch}.
# The default implementation calls Controller.check_action, which in turn is mixed in
# from {Dispatcher::ClassMethods#check_action}
# @param [String] action
# @param [Array] c An array of route conditions
# @return [bool]
def check_action(action, c)
self.class.check_action(action, c)
end
# Returns a new Scene instance for use in the controller.
# @param [Hash] scene Hash to construct the scene from
# @return [Scene]
def get_scene(scene=nil)
scene = Scene.new(scene) if scene.class == Hash
scene ||= Scene.new
return scene
end
# Sets controller information on a scene
# @param [Scene] scene
# @return [Scene]
def prepare_scene(scene)
req_path = @request.path
req_path += 'index' if !req_path.blank? && req_path[-1].chr == '/'
scene.request = {
:path => @request.path,
:page_path => req_path
}
scene.controller = {
:request_path => request_path,
:route_path => route_path,
:class => self.class
}
scene.content = {}
return scene
end
# See {Controller.controller_action?}
# @return [bool] True if the method is a controller action for the class
def controller_action?(method)
self.class.controller_action?(method)
end
protected
# Instantiates an object dispatched by a route
# @param [Route]
# @return [Controller]
def dispatched_object(route)
klass = route.dest
if klass.class != Class
if klass == self # route to self
set_executed_method(route.action)
end
return klass
elsif klass == self.class
self.set_action(route.action)
return self
end
obj = klass.new(@request, @response, @scene)
obj.dispatch_action = route.matched || ''
# FIXME: this is not clean
obj.set_action(route.action)
# obj.dispatch_path = @dispatch_path + route.path
return obj
end
# Given an action, sets the executed method unless it can be dispatched
# @param [String] action
# @return [Symbol|nil] The executed method, if it was set, or nil
def set_action(action)
@executed_method = nil
@executed_method_arguments = nil
if !can_dispatch?(:execute, action)
return set_executed_method(action)
end
nil
end
# Given an action, sets executed_method and executed_method_arguments
# @param [String] action
# @return [Symbol] The executed_method
def set_executed_method(action)
method, additional_arguments = get_action_method(action)
if method && controller_action?(method)
@executed_method = method.to_sym
@executed_method_arguments = additional_arguments || []
end
return @executed_method
end
# This method can be overrided by subclasses, to provide custom handling of
# exceptions
# @param [Exception]
# @return [void]
def try_rescue(exc)
raise exc
end
private
# Overrides {Dispatcher#get_route}, setting the action for nil routes
# @param [String] path
def get_route(*args)
route = super
return route unless route
action = route.path.split('/').first
action_method, action_params = get_action_method(action)
if route.nil_route && !action.blank? && self.respond_to?(action_method)
route.action = action
end
route
end
end
end
require 'spiderfw/widget/widget'
require 'spiderfw/tag/tag'
|
require 'spiderfw/controller/controller_io'
require 'spiderfw/controller/request'
require 'spiderfw/controller/response'
require 'spiderfw/controller/scene'
require 'spiderfw/controller/controller_exceptions'
require 'spiderfw/controller/first_responder'
require 'spiderfw/controller/mixins/visual'
require 'spiderfw/controller/mixins/http_mixin'
require 'spiderfw/controller/mixins/static_content'
require 'spiderfw/controller/helpers/widget_helper'
require 'spiderfw/utils/annotations'
module Spider
class Controller
include Dispatcher
include Logger
include ControllerMixins
include Helpers
include Annotations
class << self
def options
@options ||= {}
end
def option(k, v)
self.option[k] = v
end
def default_action
'index'
end
def app
return @app if @app
@app ||= self.parent_module
@app = nil unless self.parent_module.include?(Spider::App)
return @app
end
def template_path
return nil unless self.app
return self.app.path+'/views'
end
def layout_path
return nil unless self.app
return self.app.path+'/views'
end
def before(conditions, method, params={})
@dispatch_methods ||= {}
@dispatch_methods[:before] ||= []
@dispatch_methods[:before] << [conditions, method, params]
end
def before_methods
@dispatch_methods && @dispatch_methods[:before] ? @dispatch_methods[:before] : []
end
def before_unless(condition, method, params={})
@dispatch_methods ||= {}
@dispatch_methods[:before] ||= []
params[:unless] = true
@dispatch_methods[:before] << [condition, method, params]
end
def controller_actions(*methods)
if (methods.length > 0)
@controller_actions ||= []
@controller_actions += methods
end
@controller_actions
end
def controller_action?(method)
return false unless self.method_defined?(method)
return true if default_action && method == default_action.to_sym
if @controller_actions
res = @controller_actions.include?(method)
if (!res)
Spider.logger.info("Method #{method} is not a controller action for #{self}")
end
return res
else
return true
end
end
def find_resource(type, name, cur_path=nil)
Spider.find_resource(type, name, cur_path, self)
end
def find_resource_path(type, name, cur_path=nil)
res = Spider.find_resource(type, name, cur_path, self)
return res ? res.path : nil
end
end
define_annotation(:action) { |k, m| k.controller_actions(m) }
attr_reader :request, :response, :executed_method, :scene
attr_accessor :dispatch_action
def initialize(request, response, scene=nil)
@request = request
@response = response
@scene = scene || get_scene
@dispatch_path = ''
init
#@parent = parent
end
# Override this for controller initialization
def init
end
def inspect
self.class.to_s
end
def call_path
act = @dispatch_action || ''
if (@dispatch_previous)
prev = @dispatch_previous.call_path
act = prev+'/'+act unless prev.empty?
end
return ('/'+act).gsub(/\/+/, '/').sub(/\/$/, '')
end
def request_path
call_path
end
def get_action_method(action)
method = nil
additional_arguments = nil
# method = action.empty? ? self.class.default_action : action
# method = method.split('/', 2)[0]
if (action =~ /^([^:]+)(:.+)$/)
method = $1
elsif (action =~ /^([^\/]+)\/(.+)$/) # methods followed by a slash
method = $1
additional_arguments = [$2]
else
method = action
end
method = self.class.default_action if !method || method.empty?
return nil if method.empty?
return [method.to_sym, additional_arguments]
end
# Returns true if this controller is the final target for the current action, that is, if it does not
# dispatch to any route
def action_target?
!@dispatch_next[@call_path] || @dispatch_next[@call_path].dest == self
end
def execute(action='', *arguments)
return if @done
# return if self.is_a?(Spider::Widget) # FIXME: this is obviously wrong. Widgets must override the behaviour
# # somewhere else, or probably just not inherit controller.
debug("Controller #{self} executing #{action} with arguments #{arguments}")
@call_path = action
# before(action, *arguments)
# do_dispatch(:before, action, *arguments)
catch(:done) do
if (can_dispatch?(:execute, action))
d_next = dispatch_next(action)
#run_chain(:execute, action, *arguments)
if d_next.dest != self # otherwise, shortcut route to self
return do_dispatch(:execute, action)
else
arguments = d_next.params
end
# after(action, *arguments)
end
if (@executed_method)
meth = self.method(@executed_method)
args = arguments + @executed_method_arguments
@controller_action = args[0]
arity = meth.arity
arity = (-arity + 1) if arity < 0
args = arity == 0 ? [] : args[0..(arity-1)]
args = [nil] if meth.arity == 1 && args.empty?
send(@executed_method, *args)
else
raise NotFound.new(action)
end
end
end
def before(action='', *arguments)
catch(:done) do
debug("#{self} before")
do_dispatch(:before, action, *arguments)
end
end
def after(action='', *arguments)
catch(:done) do
do_dispatch(:after, action, *arguments)
end
# begin
# run_chain(:after)
# #dispatch(:after, action, params)
# rescue => exc
# try_rescue(exc)
# end
end
def done
self.done = true
throw :done
end
def done=(val)
@done = val
@dispatch_previous.done = val if @dispatch_previous
end
def check_action(action, c)
self.class.check_action(action, c)
end
def get_scene(scene=nil)
scene = Scene.new(scene) if scene.class == Hash
scene ||= Scene.new
# debugger
# scene.extend(SceneMethods)
return scene
end
def prepare_scene(scene)
scene.request = {
:path => @request.path
}
scene.controller = {
:request_path => request_path,
}
scene.content = {}
return scene
end
protected
def dispatched_object(route)
klass = route.dest
if klass.class != Class
if (klass == self) # route to self
@executed_method = route.action
@executed_method_arguments = []
end
return klass
end
obj = klass.new(@request, @response, @scene)
obj.dispatch_action = route.matched || ''
# FIXME: this is not clean
set_dispatched_object_attributes(obj, route.action)
if (route.options[:do])
obj.instance_eval &route.options[:do]
end
# obj.dispatch_path = @dispatch_path + route.path
return obj
end
def set_dispatched_object_attributes(obj, action)
obj.instance_eval do
@executed_method = nil
@executed_method_arguments = nil
if (!can_dispatch?(:execute, action))
method, additional_arguments = get_action_method(action)
if (method && self.class.controller_action?(method))
@executed_method = method.to_sym
@executed_method_arguments = additional_arguments || []
end
end
end
end
def try_rescue(exc)
raise exc
end
private
def pass
action = @call_path
return false unless can_dispatch?(:execute, action)
debug("CAN DISPATCH #{action}")
do_dispatch(:execute, action)
return true
end
module SceneMethods
end
end
end
require 'spiderfw/widget/widget'
require 'spiderfw/tag/tag'
Renamed @done in @__done
require 'spiderfw/controller/controller_io'
require 'spiderfw/controller/request'
require 'spiderfw/controller/response'
require 'spiderfw/controller/scene'
require 'spiderfw/controller/controller_exceptions'
require 'spiderfw/controller/first_responder'
require 'spiderfw/controller/mixins/visual'
require 'spiderfw/controller/mixins/http_mixin'
require 'spiderfw/controller/mixins/static_content'
require 'spiderfw/controller/helpers/widget_helper'
require 'spiderfw/utils/annotations'
module Spider
class Controller
include Dispatcher
include Logger
include ControllerMixins
include Helpers
include Annotations
class << self
def options
@options ||= {}
end
def option(k, v)
self.option[k] = v
end
def default_action
'index'
end
def app
return @app if @app
@app ||= self.parent_module
@app = nil unless self.parent_module.include?(Spider::App)
return @app
end
def template_path
return nil unless self.app
return self.app.path+'/views'
end
def layout_path
return nil unless self.app
return self.app.path+'/views'
end
def before(conditions, method, params={})
@dispatch_methods ||= {}
@dispatch_methods[:before] ||= []
@dispatch_methods[:before] << [conditions, method, params]
end
def before_methods
@dispatch_methods && @dispatch_methods[:before] ? @dispatch_methods[:before] : []
end
def before_unless(condition, method, params={})
@dispatch_methods ||= {}
@dispatch_methods[:before] ||= []
params[:unless] = true
@dispatch_methods[:before] << [condition, method, params]
end
def controller_actions(*methods)
if (methods.length > 0)
@controller_actions ||= []
@controller_actions += methods
end
@controller_actions
end
def controller_action?(method)
return false unless self.method_defined?(method)
return true if default_action && method == default_action.to_sym
if @controller_actions
res = @controller_actions.include?(method)
if (!res)
Spider.logger.info("Method #{method} is not a controller action for #{self}")
end
return res
else
return true
end
end
def find_resource(type, name, cur_path=nil)
Spider.find_resource(type, name, cur_path, self)
end
def find_resource_path(type, name, cur_path=nil)
res = Spider.find_resource(type, name, cur_path, self)
return res ? res.path : nil
end
end
define_annotation(:action) { |k, m| k.controller_actions(m) }
attr_reader :request, :response, :executed_method, :scene
attr_accessor :dispatch_action
def initialize(request, response, scene=nil)
@request = request
@response = response
@scene = scene || get_scene
@dispatch_path = ''
init
#@parent = parent
end
# Override this for controller initialization
def init
end
def inspect
self.class.to_s
end
def call_path
act = @dispatch_action || ''
if (@dispatch_previous)
prev = @dispatch_previous.call_path
act = prev+'/'+act unless prev.empty?
end
return ('/'+act).gsub(/\/+/, '/').sub(/\/$/, '')
end
def request_path
call_path
end
def get_action_method(action)
method = nil
additional_arguments = nil
# method = action.empty? ? self.class.default_action : action
# method = method.split('/', 2)[0]
if (action =~ /^([^:]+)(:.+)$/)
method = $1
elsif (action =~ /^([^\/]+)\/(.+)$/) # methods followed by a slash
method = $1
additional_arguments = [$2]
else
method = action
end
method = self.class.default_action if !method || method.empty?
return nil if method.empty?
return [method.to_sym, additional_arguments]
end
# Returns true if this controller is the final target for the current action, that is, if it does not
# dispatch to any route
def action_target?
!@dispatch_next[@call_path] || @dispatch_next[@call_path].dest == self
end
def execute(action='', *arguments)
return if @__done
# return if self.is_a?(Spider::Widget) # FIXME: this is obviously wrong. Widgets must override the behaviour
# # somewhere else, or probably just not inherit controller.
debug("Controller #{self} executing #{action} with arguments #{arguments}")
@call_path = action
# before(action, *arguments)
# do_dispatch(:before, action, *arguments)
catch(:done) do
if (can_dispatch?(:execute, action))
d_next = dispatch_next(action)
#run_chain(:execute, action, *arguments)
if d_next.dest != self # otherwise, shortcut route to self
return do_dispatch(:execute, action)
else
arguments = d_next.params
end
# after(action, *arguments)
end
if (@executed_method)
meth = self.method(@executed_method)
args = arguments + @executed_method_arguments
@controller_action = args[0]
arity = meth.arity
arity = (-arity + 1) if arity < 0
args = arity == 0 ? [] : args[0..(arity-1)]
args = [nil] if meth.arity == 1 && args.empty?
send(@executed_method, *args)
else
raise NotFound.new(action)
end
end
end
def before(action='', *arguments)
catch(:done) do
debug("#{self} before")
do_dispatch(:before, action, *arguments)
end
end
def after(action='', *arguments)
catch(:done) do
do_dispatch(:after, action, *arguments)
end
# begin
# run_chain(:after)
# #dispatch(:after, action, params)
# rescue => exc
# try_rescue(exc)
# end
end
def done?
@__done
end
def done
self.done = true
throw :done
end
def done=(val)
@__done = val
@dispatch_previous.done = val if @dispatch_previous
end
def check_action(action, c)
self.class.check_action(action, c)
end
def get_scene(scene=nil)
scene = Scene.new(scene) if scene.class == Hash
scene ||= Scene.new
# debugger
# scene.extend(SceneMethods)
return scene
end
def prepare_scene(scene)
scene.request = {
:path => @request.path
}
scene.controller = {
:request_path => request_path,
}
scene.content = {}
return scene
end
protected
def dispatched_object(route)
klass = route.dest
if klass.class != Class
if (klass == self) # route to self
@executed_method = route.action
@executed_method_arguments = []
end
return klass
end
obj = klass.new(@request, @response, @scene)
obj.dispatch_action = route.matched || ''
# FIXME: this is not clean
set_dispatched_object_attributes(obj, route.action)
if (route.options[:do])
obj.instance_eval &route.options[:do]
end
# obj.dispatch_path = @dispatch_path + route.path
return obj
end
def set_dispatched_object_attributes(obj, action)
obj.instance_eval do
@executed_method = nil
@executed_method_arguments = nil
if (!can_dispatch?(:execute, action))
method, additional_arguments = get_action_method(action)
if (method && self.class.controller_action?(method))
@executed_method = method.to_sym
@executed_method_arguments = additional_arguments || []
end
end
end
end
def try_rescue(exc)
raise exc
end
private
def pass
action = @call_path
return false unless can_dispatch?(:execute, action)
debug("CAN DISPATCH #{action}")
do_dispatch(:execute, action)
return true
end
module SceneMethods
end
end
end
require 'spiderfw/widget/widget'
require 'spiderfw/tag/tag'
|
require 'spiderfw/utils/hash_comparison'
module Spider; module Model
# The IdentityMapper, when in use, will hold a reference to each object; the mapper will coordinate
# with it to ensure that each object of the same model with the same primary keys will point to the same
# Ruby object.
# This may or may not be what you need: the IdentityMapper can be set globally by assigning an instance
# to #Spider::Model.identity_mapper=, or for a block of code by passing a block to the initializer.
class IdentityMapper
# If passed a block, will activate the IdentityMapper, yield, and then deactivate it.
def initialize(&proc)
@objects = {}
@pks = {}
if (proc)
prev_im = Spider::Model.identity_mapper
Spider::Model.identity_mapper = self
yield self
Spider::Model.identity_mapper = prev_im
end
end
# Get an instance of model with given values. Values must contain all of model's primary keys.
# If an object with the same primary keys is found, it will be used; otherwise, a new instance will be
# created.
# In any case, the given values will be set on the object, before it is returned.
#---
# FIXME: refactor avoiding set_loaded
def get(model, values=nil, set_loaded=false)
if !values && model.is_a?(BaseModel)
curr = has?(model)
return curr ? curr : put(model)
end
@objects[model] ||= {}
pks = {}
has_pks = false
model.primary_keys.each do |k|
# dereference integrated primary keys
v = (k.integrated? && values[k.integrated_from.name]) ?
values[k.integrated_from.name].get(k.integrated_from_element) :
values[k.name]
has_pks = true if v
pks[k.name] = model.prepare_value(k, v)
end
raise IdentityMapperException, "Can't get without all primary keys" unless has_pks
pks.extend(HashComparison)
current = @objects[model][pks]
obj = nil
if current
obj = current
else
obj = model.new(pks)
#@objects[model][pks] = obj
end
# obj = (@objects[model][pks] ||= model.new(pks))
pks.each{ |k, v| obj.element_loaded(k) }
values.reject{|k,v| model.elements[k].primary_key? }.each do |k, v|
v = get(v) if v.is_a?(BaseModel)
if set_loaded
obj.set_loaded_value(k, v)
else
obj.set(k, v)
end
end
# Spider::Logger.debug("RETURNING #{obj.class} #{obj.object_id}")
return obj
end
# Puts an object into the identity mapper.
# If check is true, it will first check if the object exists, and if found merge it with the given obj;
# if check is false, if a object with the same primary keys exists it will be overwritten.
def put(obj, check=false, fail_if_exists=false)
return nil unless obj
if (obj.is_a?(QuerySet))
obj.each_current_index{ |i| obj[i] = put(obj[i], check) }
return obj
else
raise IdentityMapperException, "Can't get without all primary keys" unless obj.primary_keys_set?
pks = {}
obj.class.primary_keys.each{ |key| pks[key.name] = obj.get(key) }
pks.extend(HashComparison)
@objects[obj.class] ||= {}
if (check && (existent = @objects[obj.class][pks]) && existent.object_id != obj.object_id)
# debugger if fail_if_exists
raise IdentityMapperException, "A different instance of the same object already exists in the identity mapper" if fail_if_exists
existent.merge!(obj)
return existent
else
@objects[obj.class][pks] = obj
@pks[obj.object_id] = pks
traverse(obj)
uow = Spider::Model.unit_of_work
uow.add(obj) if uow
return obj
end
end
end
def traverse(obj, check=false, fail_if_exists=false)
obj.class.elements_array.each do |el|
next unless obj.element_has_value?(el)
next unless el.model?
subs = obj.get(el)
subs = [subs] unless subs.is_a?(Enumerable)
subs.each do |sub|
put(sub, check, fail_if_exists) if sub && sub.primary_keys_set? && has?(sub).object_id != sub.object_id
end
end
end
def put!(obj)
put(obj, true, true)
end
def has?(obj)
pks = {}
obj.class.primary_keys.each{ |key| pks[key.name] = obj.get(key) }
pks.extend(HashComparison)
@objects[obj.class] && @objects[obj.class][pks]
end
def delete(klass, obj_id)
pks = @pks[obj_id]
return unless pks && @objects[klass]
@objects[klass].delete(pks)
@pks.delete(obj_id)
end
def reset
@objects = {}
@pks = {}
end
end
class IdentityMapperException < RuntimeError
end
end; end
Better error message for IdentityMapper
require 'spiderfw/utils/hash_comparison'
module Spider; module Model
# The IdentityMapper, when in use, will hold a reference to each object; the mapper will coordinate
# with it to ensure that each object of the same model with the same primary keys will point to the same
# Ruby object.
# This may or may not be what you need: the IdentityMapper can be set globally by assigning an instance
# to #Spider::Model.identity_mapper=, or for a block of code by passing a block to the initializer.
class IdentityMapper
# If passed a block, will activate the IdentityMapper, yield, and then deactivate it.
def initialize(&proc)
@objects = {}
@pks = {}
if (proc)
prev_im = Spider::Model.identity_mapper
Spider::Model.identity_mapper = self
yield self
Spider::Model.identity_mapper = prev_im
end
end
# Get an instance of model with given values. Values must contain all of model's primary keys.
# If an object with the same primary keys is found, it will be used; otherwise, a new instance will be
# created.
# In any case, the given values will be set on the object, before it is returned.
#---
# FIXME: refactor avoiding set_loaded
def get(model, values=nil, set_loaded=false)
if !values && model.is_a?(BaseModel)
curr = has?(model)
return curr ? curr : put(model)
end
@objects[model] ||= {}
pks = {}
has_pks = false
model.primary_keys.each do |k|
# dereference integrated primary keys
v = (k.integrated? && values[k.integrated_from.name]) ?
values[k.integrated_from.name].get(k.integrated_from_element) :
values[k.name]
has_pks = true if v
pks[k.name] = model.prepare_value(k, v)
end
unless has_pks
raise IdentityMapperException, "Can't get #{model} from IdentityMapper without all primary keys, #{values.inspect} given"
end
pks.extend(HashComparison)
current = @objects[model][pks]
obj = nil
if current
obj = current
else
obj = model.new(pks)
#@objects[model][pks] = obj
end
# obj = (@objects[model][pks] ||= model.new(pks))
pks.each{ |k, v| obj.element_loaded(k) }
values.reject{|k,v| model.elements[k].primary_key? }.each do |k, v|
v = get(v) if v.is_a?(BaseModel)
if set_loaded
obj.set_loaded_value(k, v)
else
obj.set(k, v)
end
end
# Spider::Logger.debug("RETURNING #{obj.class} #{obj.object_id}")
return obj
end
# Puts an object into the identity mapper.
# If check is true, it will first check if the object exists, and if found merge it with the given obj;
# if check is false, if a object with the same primary keys exists it will be overwritten.
def put(obj, check=false, fail_if_exists=false)
return nil unless obj
if (obj.is_a?(QuerySet))
obj.each_current_index{ |i| obj[i] = put(obj[i], check) }
return obj
else
raise IdentityMapperException, "Can't get without all primary keys" unless obj.primary_keys_set?
pks = {}
obj.class.primary_keys.each{ |key| pks[key.name] = obj.get(key) }
pks.extend(HashComparison)
@objects[obj.class] ||= {}
if (check && (existent = @objects[obj.class][pks]) && existent.object_id != obj.object_id)
# debugger if fail_if_exists
raise IdentityMapperException, "A different instance of the same object already exists in the identity mapper" if fail_if_exists
existent.merge!(obj)
return existent
else
@objects[obj.class][pks] = obj
@pks[obj.object_id] = pks
traverse(obj)
uow = Spider::Model.unit_of_work
uow.add(obj) if uow
return obj
end
end
end
def traverse(obj, check=false, fail_if_exists=false)
obj.class.elements_array.each do |el|
next unless obj.element_has_value?(el)
next unless el.model?
subs = obj.get(el)
subs = [subs] unless subs.is_a?(Enumerable)
subs.each do |sub|
put(sub, check, fail_if_exists) if sub && sub.primary_keys_set? && has?(sub).object_id != sub.object_id
end
end
end
def put!(obj)
put(obj, true, true)
end
def has?(obj)
pks = {}
obj.class.primary_keys.each{ |key| pks[key.name] = obj.get(key) }
pks.extend(HashComparison)
@objects[obj.class] && @objects[obj.class][pks]
end
def delete(klass, obj_id)
pks = @pks[obj_id]
return unless pks && @objects[klass]
@objects[klass].delete(pks)
@pks.delete(obj_id)
end
def reset
@objects = {}
@pks = {}
end
end
class IdentityMapperException < RuntimeError
end
end; end |
require 'spiderfw/utils/hash_comparison'
module Spider; module Model
# The IdentityMapper, when in use, will hold a reference to each object; the mapper will coordinate
# with it to ensure that each object of the same model with the same primary keys will point to the same
# Ruby object.
# This may or may not be what you need: the IdentityMapper can be set globally by assigning an instance
# to #Spider::Model.identity_mapper=, or for a block of code by passing a block to the initializer.
class IdentityMapper
# If passed a block, will activate the IdentityMapper, yield, and then deactivate it.
def initialize(&proc)
@objects = {}
@pks = {}
if (proc)
prev_im = Spider::Model.identity_mapper
Spider::Model.identity_mapper = self
begin
yield self
ensure
Spider::Model.identity_mapper = prev_im
end
end
end
# Get an instance of model with given values. Values must contain all of model's primary keys.
# If an object with the same primary keys is found, it will be used; otherwise, a new instance will be
# created.
# In any case, the given values will be set on the object, before it is returned.
#---
# FIXME: refactor avoiding set_loaded
def get(model, values=nil, set_loaded=false)
if !values && model.is_a?(BaseModel)
curr = has?(model)
return curr ? curr : put(model)
end
@objects[model] ||= {}
pks = {}
has_pks = false
model.primary_keys.each do |k|
# dereference integrated primary keys
v = (k.integrated? && values[k.integrated_from.name]) ?
values[k.integrated_from.name].get(k.integrated_from_element) :
values[k.name]
has_pks = true if v
pks[k.name] = model.prepare_value(k, v)
end
orig_pks = pks.clone
normalize_pks(model, pks)
unless has_pks
raise IdentityMapperException, "Can't get #{model} from IdentityMapper without all primary keys, #{values.inspect} given"
end
pks.extend(HashComparison)
current = @objects[model][pks]
obj = nil
if current
obj = current
else
# Spider.logger.debug("GETTING NEW #{model} FROM #{pks.inspect}")
obj = model.new(orig_pks)
#@objects[model][pks] = obj
end
# obj = (@objects[model][pks] ||= model.new(pks))
pks.each{ |k, v| obj.element_loaded(k) }
values.reject{|k,v| model.elements[k].primary_key? }.each do |k, v|
v = get(v) if v.is_a?(BaseModel)
if set_loaded
obj.set_loaded_value(k, v)
else
obj.set(k, v)
end
end
# Spider::Logger.debug("RETURNING #{obj.class} #{obj.object_id}")
return obj
end
# Puts an object into the identity mapper.
# If check is true, it will first check if the object exists, and if found merge it with the given obj;
# if check is false, if a object with the same primary keys exists it will be overwritten.
def put(obj, check=false, fail_if_exists=false)
return nil unless obj
return obj if obj._no_identity_mapper
if (obj.is_a?(QuerySet))
obj.each_current_index{ |i| obj[i] = put(obj[i], check) }
return obj
else
return obj if @pks[obj.object_id]
raise IdentityMapperException, "Can't get without all primary keys" unless obj.primary_keys_set?
pks = {}
obj.class.primary_keys.each{ |key| pks[key.name] = obj.get(key) }
pks = normalize_pks(obj.class, pks)
pks.extend(HashComparison)
@objects[obj.class] ||= {}
if (check && (existent = @objects[obj.class][pks]) && existent.object_id != obj.object_id)
#debugger if fail_if_exists
raise IdentityMapperException, "A different instance of the same object #{obj.inspect} already exists in the identity mapper" if fail_if_exists
existent.merge!(obj)
return existent
else
@objects[obj.class][pks] = obj
@pks[obj.object_id] = pks
traverse(obj)
uow = Spider::Model.unit_of_work
uow.add(obj) if uow && !uow.running?
return obj
end
end
end
def traverse(obj, check=false, fail_if_exists=false)
obj.class.elements_array.each do |el|
next unless obj.element_has_value?(el)
next unless el.model?
subs = obj.get(el)
subs = [subs] unless subs.is_a?(Enumerable)
subs.each do |sub|
put(sub, check, fail_if_exists) if sub && sub.primary_keys_set? && has?(sub).object_id != sub.object_id
end
end
end
def put!(obj)
put(obj, true, true)
end
def has?(obj)
pks = {}
obj.class.primary_keys.each{ |key| pks[key.name] = obj.get(key) }
pks = normalize_pks(obj.class, pks)
pks.extend(HashComparison)
@objects[obj.class] && @objects[obj.class][pks]
end
def delete(klass, obj_id)
pks = @pks[obj_id]
return unless pks && @objects[klass]
@objects[klass].delete(pks)
@pks.delete(obj_id)
end
def reset
@objects = {}
@pks = {}
end
def normalize_pks(model, keys)
model_pks = model.primary_keys.map{ |k| k.name }
model_pks.each do |k|
if keys[k] && keys[k].is_a?(BaseModel)
keys[k] = keys[k].class.primary_keys.length > 1 ? keys[k].primary_keys : keys[k].primary_keys[0]
keys[k] = keys[k].first if model.elements[k].type.primary_keys.length && keys[k].is_a?(Array)
end
end
keys.keys.each do |k|
keys.delete(k) unless model_pks.include?(k)
end
keys
end
end
class IdentityMapperException < RuntimeError
end
end; end
Avoid loading object values when raising IdentityMapperException
require 'spiderfw/utils/hash_comparison'
module Spider; module Model
# The IdentityMapper, when in use, will hold a reference to each object; the mapper will coordinate
# with it to ensure that each object of the same model with the same primary keys will point to the same
# Ruby object.
# This may or may not be what you need: the IdentityMapper can be set globally by assigning an instance
# to #Spider::Model.identity_mapper=, or for a block of code by passing a block to the initializer.
class IdentityMapper
# If passed a block, will activate the IdentityMapper, yield, and then deactivate it.
def initialize(&proc)
@objects = {}
@pks = {}
if (proc)
prev_im = Spider::Model.identity_mapper
Spider::Model.identity_mapper = self
begin
yield self
ensure
Spider::Model.identity_mapper = prev_im
end
end
end
# Get an instance of model with given values. Values must contain all of model's primary keys.
# If an object with the same primary keys is found, it will be used; otherwise, a new instance will be
# created.
# In any case, the given values will be set on the object, before it is returned.
#---
# FIXME: refactor avoiding set_loaded
def get(model, values=nil, set_loaded=false)
if !values && model.is_a?(BaseModel)
curr = has?(model)
return curr ? curr : put(model)
end
@objects[model] ||= {}
pks = {}
has_pks = false
model.primary_keys.each do |k|
# dereference integrated primary keys
v = (k.integrated? && values[k.integrated_from.name]) ?
values[k.integrated_from.name].get(k.integrated_from_element) :
values[k.name]
has_pks = true if v
pks[k.name] = model.prepare_value(k, v)
end
orig_pks = pks.clone
normalize_pks(model, pks)
unless has_pks
raise IdentityMapperException, "Can't get #{model} from IdentityMapper without all primary keys, #{values.inspect} given"
end
pks.extend(HashComparison)
current = @objects[model][pks]
obj = nil
if current
obj = current
else
# Spider.logger.debug("GETTING NEW #{model} FROM #{pks.inspect}")
obj = model.new(orig_pks)
#@objects[model][pks] = obj
end
# obj = (@objects[model][pks] ||= model.new(pks))
pks.each{ |k, v| obj.element_loaded(k) }
values.reject{|k,v| model.elements[k].primary_key? }.each do |k, v|
v = get(v) if v.is_a?(BaseModel)
if set_loaded
obj.set_loaded_value(k, v)
else
obj.set(k, v)
end
end
# Spider::Logger.debug("RETURNING #{obj.class} #{obj.object_id}")
return obj
end
# Puts an object into the identity mapper.
# If check is true, it will first check if the object exists, and if found merge it with the given obj;
# if check is false, if a object with the same primary keys exists it will be overwritten.
def put(obj, check=false, fail_if_exists=false)
return nil unless obj
return obj if obj._no_identity_mapper
if (obj.is_a?(QuerySet))
obj.each_current_index{ |i| obj[i] = put(obj[i], check) }
return obj
else
return obj if @pks[obj.object_id]
raise IdentityMapperException, "Can't get without all primary keys" unless obj.primary_keys_set?
pks = {}
obj.class.primary_keys.each{ |key| pks[key.name] = obj.get(key) }
pks = normalize_pks(obj.class, pks)
pks.extend(HashComparison)
@objects[obj.class] ||= {}
if (check && (existent = @objects[obj.class][pks]) && existent.object_id != obj.object_id)
if fail_if_exists
#debugger
raise IdentityMapperException, "A different instance of the same object #{obj.class}(#{obj.primary_keys.inspect}) already exists in the identity mapper"
end
existent.merge!(obj)
return existent
else
@objects[obj.class][pks] = obj
@pks[obj.object_id] = pks
traverse(obj)
uow = Spider::Model.unit_of_work
uow.add(obj) if uow && !uow.running?
return obj
end
end
end
def traverse(obj, check=false, fail_if_exists=false)
obj.class.elements_array.each do |el|
next unless obj.element_has_value?(el)
next unless el.model?
subs = obj.get(el)
subs = [subs] unless subs.is_a?(Enumerable)
subs.each do |sub|
put(sub, check, fail_if_exists) if sub && sub.primary_keys_set? && has?(sub).object_id != sub.object_id
end
end
end
def put!(obj)
put(obj, true, true)
end
def has?(obj)
pks = {}
obj.class.primary_keys.each{ |key| pks[key.name] = obj.get(key) }
pks = normalize_pks(obj.class, pks)
pks.extend(HashComparison)
@objects[obj.class] && @objects[obj.class][pks]
end
def delete(klass, obj_id)
pks = @pks[obj_id]
return unless pks && @objects[klass]
@objects[klass].delete(pks)
@pks.delete(obj_id)
end
def reset
@objects = {}
@pks = {}
end
def normalize_pks(model, keys)
model_pks = model.primary_keys.map{ |k| k.name }
model_pks.each do |k|
if keys[k] && keys[k].is_a?(BaseModel)
keys[k] = keys[k].class.primary_keys.length > 1 ? keys[k].primary_keys : keys[k].primary_keys[0]
keys[k] = keys[k].first if model.elements[k].type.primary_keys.length && keys[k].is_a?(Array)
end
end
keys.keys.each do |k|
keys.delete(k) unless model_pks.include?(k)
end
keys
end
end
class IdentityMapperException < RuntimeError
end
end; end |
module CMDClean
def clean(_argv)
machine_def = check_and_get_stack
do_clean(machine_def)
end
def do_clean(machine_def)
# Note that the ordering here is important - must have killed VMs before
# removing their puppet cert, otherwise we have a race condition
nagios_schedule_downtime(machine_def)
clean_nodes(machine_def)
puppet_clean(machine_def)
end
private
def clean_nodes(machine_def)
computecontroller = Compute::Controller.new
computecontroller.clean(machine_def.to_specs) do
on :success do |vm, msg|
logger(Logger::INFO) { "successfully cleaned #{vm}: #{msg}" }
end
on :failure do |vm, msg|
logger(Logger::ERROR) { "failed to clean #{vm}: #{msg}" }
end
on :unaccounted do |vm|
logger(Logger::WARN) { "VM was unaccounted for: #{vm}" }
end
end
end
def puppet_clean(machine_def)
puppet_certs_to_clean = []
machine_def.accept do |child_machine_def|
if child_machine_def.respond_to?(:mgmt_fqdn)
if child_machine_def.needs_signing?
puppet_certs_to_clean << child_machine_def.mgmt_fqdn
else
logger(Logger::INFO) { "removal of cert not needed for #{child_machine_def.mgmt_fqdn}" }
end
end
end
include Support::MCollectivePuppet
ca_clean(puppet_certs_to_clean) do
on :success do |machine|
logger(Logger::INFO) { "successfully removed cert for #{machine}" }
end
on :failed do |machine|
logger(Logger::WARN) { "failed to remove cert for #{machine}" }
end
end
end
end
tomd: add newline at end of file
module CMDClean
def clean(_argv)
machine_def = check_and_get_stack
do_clean(machine_def)
end
def do_clean(machine_def)
# Note that the ordering here is important - must have killed VMs before
# removing their puppet cert, otherwise we have a race condition
nagios_schedule_downtime(machine_def)
clean_nodes(machine_def)
puppet_clean(machine_def)
end
private
def clean_nodes(machine_def)
computecontroller = Compute::Controller.new
computecontroller.clean(machine_def.to_specs) do
on :success do |vm, msg|
logger(Logger::INFO) { "successfully cleaned #{vm}: #{msg}" }
end
on :failure do |vm, msg|
logger(Logger::ERROR) { "failed to clean #{vm}: #{msg}" }
end
on :unaccounted do |vm|
logger(Logger::WARN) { "VM was unaccounted for: #{vm}" }
end
end
end
def puppet_clean(machine_def)
puppet_certs_to_clean = []
machine_def.accept do |child_machine_def|
if child_machine_def.respond_to?(:mgmt_fqdn)
if child_machine_def.needs_signing?
puppet_certs_to_clean << child_machine_def.mgmt_fqdn
else
logger(Logger::INFO) { "removal of cert not needed for #{child_machine_def.mgmt_fqdn}" }
end
end
end
include Support::MCollectivePuppet
ca_clean(puppet_certs_to_clean) do
on :success do |machine|
logger(Logger::INFO) { "successfully removed cert for #{machine}" }
end
on :failed do |machine|
logger(Logger::WARN) { "failed to remove cert for #{machine}" }
end
end
end
end
|
module SteppingStone
module Model
class Instruction
attr_reader :name, :arguments, :metadata
def initialize(name, arguments, metadata={})
@name, @arguments, @metadata = name, arguments, metadata
end
def event
@name
end
def to_a
[@name, @arguments]
end
end
class TestCase
include Enumerable
attr_reader :name, :instructions
attr_accessor :tags
# uri: TestCase identifier. URI fragments are allowed.
# instructions: list of instructions to be sent to the SUT
#
def initialize(name, *instructions)
@name = name
@instructions = instructions.map { |i| Instruction.new(:map, i) }
@instructions.unshift(Instruction.new(:setup, [@name]))
@instructions.push(Instruction.new(:teardown, [@name]))
@tags = []
end
def each(&blk)
instructions.each(&blk)
end
def empty?
instructions.empty?
end
def metadata
{}
end
end
end
end
Pull out TestCase instruction build into method
module SteppingStone
module Model
class Instruction
attr_reader :name, :arguments, :metadata
def initialize(name, arguments, metadata={})
@name, @arguments, @metadata = name, arguments, metadata
end
def event
@name
end
def to_a
[@name, @arguments]
end
end
class TestCase
include Enumerable
attr_reader :name, :instructions
attr_accessor :tags
# uri: TestCase identifier. URI fragments are allowed.
# instructions: list of instructions to be sent to the SUT
#
def initialize(name, *instructions)
@name = name
@instructions = build_instructions(instructions)
@tags = []
end
def each(&blk)
instructions.each(&blk)
end
def empty?
instructions.empty?
end
def metadata
{}
end
private
def build_instructions(instructions)
[
Instruction.new(:setup, [@name]),
instructions.map { |i| Instruction.new(:map, i) },
Instruction.new(:teardown, [@name]),
].flatten
end
end
end
end
|
require 'surveyor/common'
module Surveyor
module Models
module AnswerMethods
extend ActiveSupport::Concern
include ActiveModel::Validations
include MustacheContext
include ActiveModel::ForbiddenAttributesProtection
included do
# Associations
belongs_to :question
has_many :responses
has_many :validations, :dependent => :destroy
attr_accessible *PermittedParams.new.answer_attributes if defined? ActiveModel::MassAssignmentSecurity
# Validations
validates_presence_of :text
end
# Instance Methods
def initialize(*args)
super(*args)
default_args
end
def default_args
self.is_exclusive ||= false
self.display_type ||= "default"
self.response_class ||= "answer"
self.short_text ||= text
self.data_export_identifier ||= Surveyor::Common.normalize(text)
self.api_id ||= Surveyor::Common.generate_api_id
end
def display_type=(val)
write_attribute(:display_type, val.nil? ? nil : val.to_s)
end
def css_class
[(is_exclusive ? "exclusive" : nil), custom_class].compact.join(" ")
end
def text_for(position = nil, context = nil, locale = nil)
return "" if display_type == "hidden_label"
imaged(split(in_context(translation(locale)[:text], context), position))
end
def help_text_for(context = nil, locale = nil)
in_context(translation(locale)[:help_text], context)
end
def default_value_for(context = nil, locale = nil)
in_context(translation(locale)[:default_value], context)
end
def split(text, position=nil)
case position
when :pre
text.split("|",2)[0]
when :post
text.split("|",2)[1]
else
text
end.to_s
end
def translation(locale)
{:text => self.text, :help_text => self.help_text, :default_value => self.default_value}.with_indifferent_access.merge(
(self.question.translation(locale)[:answers] || {})[self.reference_identifier] || {}
)
end
def data_rules
#create data rules for validations, see 'lib/assets/javascripts/surveyor/jquery.validate.js:887'
case response_class
when 'integer' then integer_conditions
when 'text', 'string' then text_conditions
end
end
def integer_conditions
rules = {}
validations.map{ |v| v.validation_conditions }.flatten.each do |condition|
case condition.operator
when "<=" then rules.merge!({ 'rule-max' => condition.integer_value })
when "<" then rules.merge!({ 'rule-max' => ( condition.integer_value + 1 ) })
when ">" then rules.merge!({ 'rule-min' => ( condition.integer_value - 1 ) })
when ">=" then rules.merge!({ 'rule-min' => condition.integer_value })
when "==" then rules.merge!({ 'rule-equalto' => condition.integer_value })
end
end
rules
end
def text_conditions
rules = {}
validations.map{ |v| v.validation_conditions }.flatten.each do |condition|
case condition.operator
when "=~" then rules.merge!({ 'rule-pattern' => condition.regexp })
end
end
rules
end
def image_type?
self.display_type == "image" && text.present?
end
private
def imaged(text)
image_txt = if image_type?
image = ActionController::Base.helpers.image_tag(text)
short_text != text ? ( short_text.to_s + image ) : image
else
text
end
span_wrapper image_txt
end
def span_wrapper text
%(one any).include?( question.pick ) ? "<span>#{text}</span>" : text
end
end
end
end
Ignore span wrapper for custom renderer
require 'surveyor/common'
module Surveyor
module Models
module AnswerMethods
extend ActiveSupport::Concern
include ActiveModel::Validations
include MustacheContext
include ActiveModel::ForbiddenAttributesProtection
included do
# Associations
belongs_to :question
has_many :responses
has_many :validations, :dependent => :destroy
attr_accessible *PermittedParams.new.answer_attributes if defined? ActiveModel::MassAssignmentSecurity
# Validations
validates_presence_of :text
end
# Instance Methods
def initialize(*args)
super(*args)
default_args
end
def default_args
self.is_exclusive ||= false
self.display_type ||= "default"
self.response_class ||= "answer"
self.short_text ||= text
self.data_export_identifier ||= Surveyor::Common.normalize(text)
self.api_id ||= Surveyor::Common.generate_api_id
end
def display_type=(val)
write_attribute(:display_type, val.nil? ? nil : val.to_s)
end
def css_class
[(is_exclusive ? "exclusive" : nil), custom_class].compact.join(" ")
end
def text_for(position = nil, context = nil, locale = nil)
return "" if display_type == "hidden_label"
imaged(split(in_context(translation(locale)[:text], context), position))
end
def help_text_for(context = nil, locale = nil)
in_context(translation(locale)[:help_text], context)
end
def default_value_for(context = nil, locale = nil)
in_context(translation(locale)[:default_value], context)
end
def split(text, position=nil)
case position
when :pre
text.split("|",2)[0]
when :post
text.split("|",2)[1]
else
text
end.to_s
end
def translation(locale)
{:text => self.text, :help_text => self.help_text, :default_value => self.default_value}.with_indifferent_access.merge(
(self.question.translation(locale)[:answers] || {})[self.reference_identifier] || {}
)
end
def data_rules
#create data rules for validations, see 'lib/assets/javascripts/surveyor/jquery.validate.js:887'
case response_class
when 'integer' then integer_conditions
when 'text', 'string' then text_conditions
end
end
def integer_conditions
rules = {}
validations.map{ |v| v.validation_conditions }.flatten.each do |condition|
case condition.operator
when "<=" then rules.merge!({ 'rule-max' => condition.integer_value })
when "<" then rules.merge!({ 'rule-max' => ( condition.integer_value + 1 ) })
when ">" then rules.merge!({ 'rule-min' => ( condition.integer_value - 1 ) })
when ">=" then rules.merge!({ 'rule-min' => condition.integer_value })
when "==" then rules.merge!({ 'rule-equalto' => condition.integer_value })
end
end
rules
end
def text_conditions
rules = {}
validations.map{ |v| v.validation_conditions }.flatten.each do |condition|
case condition.operator
when "=~" then rules.merge!({ 'rule-pattern' => condition.regexp })
end
end
rules
end
def image_type?
display_type == "image" && text.present?
end
private
def imaged(text)
image_txt = if image_type?
image = ActionController::Base.helpers.image_tag(text)
short_text != text ? ( short_text.to_s + image ) : image
else
text
end
span_wrapper image_txt
end
def span_wrapper text
( custom_renderer.blank? && pick_one_or_any_question? ) ? "<span>#{text}</span>" : text
end
def pick_one_or_any_question?
%(one any).include?( question.pick )
end
end
end
end
|
# effective_csv_importer 1.0
# Creates one rake task per importer model, as well as a `rake csv:import:all` task.
# Usage:
# Put your importer in /lib/csv_importers/posts.rb
# Put your csv data in /lib/csv_importers/data/posts.csv
# Both filenames should be pluralized
# rake csv:import:posts (one task created per model)
# rake csv:import:all
# rake csv:import:scaffold
# rake csv:import:scaffold[users]
# rake csv:export
namespace :csv do
namespace :import do
# Create a rake task to import each csv file
Dir['lib/csv_importers/*.rb'].each do |file|
importer = file.sub('lib/csv_importers/', '').sub('_importer.rb', '')
csv_file = "lib/csv_importers/data/#{importer}.csv"
next unless File.exists?(csv_file)
# rake csv:import:foo
desc "Import #{importer} from #{csv_file}"
task importer => :environment do
require "#{Rails.application.root}/#{file}"
klass = "CsvImporters::#{importer.classify.pluralize}Importer".safe_constantize
raise "unable to constantize CsvImporters::#{importer.classify.pluralize}Importer for #{file}" unless klass
klass.new().import!
end
end
# rake csv:import:all
desc 'Import all from /lib/csv_importers/*.rb'
task :all => :environment do
Dir['lib/csv_importers/*.rb'].each do |file|
importer = file.sub('lib/csv_importers/', '').sub('_importer.rb', '')
csv_file = "lib/csv_importers/data/#{importer}.csv"
next unless File.exists?(csv_file)
Rake::Task["csv:import:#{importer}"].invoke
end
end
# rake csv:scaffold
# rake csv:scaffold[users]
desc 'Scaffold an Effective::CSVImporter for each /lib/csv_importers/data/*.csv file'
task :scaffold, [:file_name] => :environment do |t, args|
args.with_defaults(file_name: 'all')
require 'csv'
generator = ERB.new(File.read(File.dirname(__FILE__) + '/../generators/effective_developer/csv_importer.rb.erb'))
letters = ('A'..'AT').to_a
Dir['lib/csv_importers/data/*.csv'].each do |file|
csv_file = file.split('/').last.gsub('.csv', '')
next if (Array(args.file_name) != ['all'] && Array(args.file_name).include?(csv_file) == false)
klass = csv_file.classify.pluralize
columns = CSV.open(file, 'r') { |csv| csv.first }
File.open("#{Rails.root}/lib/csv_importers/#{csv_file}_importer.rb", 'w') do |file|
file.write generator.result(binding)
end
end
end
end
# rake csv:export
desc 'Export all database tables to /tmp/csv_exports/*.csv'
task :export => :environment do
require 'csv'
path = Rails.root.to_s + '/tmp/csv_exports/'
FileUtils.mkdir_p(path) unless File.directory?(path)
(ActiveRecord::Base.connection.tables - ['schema_migrations']).each do |table|
records = ActiveRecord::Base.connection.exec_query("SELECT * FROM #{table} ORDER BY id")
CSV.open(path + "#{table}.csv", 'wb') do |csv|
csv << records.columns
records.rows.each { |row| csv << row }
end
end
puts "Successfully csv exported #{ActiveRecord::Base.connection.tables.length} tables to #{path}"
end
end
Rails 5 tweak to importer
# effective_csv_importer 1.0
# Creates one rake task per importer model, as well as a `rake csv:import:all` task.
# Usage:
# Put your importer in /lib/csv_importers/posts.rb
# Put your csv data in /lib/csv_importers/data/posts.csv
# Both filenames should be pluralized
# rake csv:import:posts (one task created per model)
# rake csv:import:all
# rake csv:import:scaffold
# rake csv:import:scaffold[users]
# rake csv:export
namespace :csv do
namespace :import do
# Create a rake task to import each csv file
Dir['lib/csv_importers/*.rb'].each do |file|
importer = file.sub('lib/csv_importers/', '').sub('_importer.rb', '')
csv_file = "lib/csv_importers/data/#{importer}.csv"
next unless File.exists?(csv_file)
# rake csv:import:foo
desc "Import #{importer} from #{csv_file}"
task importer => :environment do
require "#{Rails.application.root}/#{file}"
klass = "CsvImporters::#{importer.classify.pluralize}Importer".safe_constantize
raise "unable to constantize CsvImporters::#{importer.classify.pluralize}Importer for #{file}" unless klass
klass.new().import!
end
end
# rake csv:import:all
desc 'Import all from /lib/csv_importers/*.rb'
task :all => :environment do
Dir['lib/csv_importers/*.rb'].each do |file|
importer = file.sub('lib/csv_importers/', '').sub('_importer.rb', '')
csv_file = "lib/csv_importers/data/#{importer}.csv"
next unless File.exists?(csv_file)
Rake::Task["csv:import:#{importer}"].invoke
end
end
# rake csv:scaffold
# rake csv:scaffold[users]
desc 'Scaffold an Effective::CSVImporter for each /lib/csv_importers/data/*.csv file'
task :scaffold, [:file_name] => :environment do |t, args|
args.with_defaults(file_name: 'all')
require 'csv'
generator = ERB.new(File.read(File.dirname(__FILE__) + '/../generators/effective_developer/csv_importer.rb.erb'))
letters = ('A'..'AT').to_a
Dir['lib/csv_importers/data/*.csv'].each do |file|
csv_file = file.split('/').last.gsub('.csv', '')
next if (Array(args.file_name) != ['all'] && Array(args.file_name).include?(csv_file) == false)
klass = csv_file.classify.pluralize
columns = CSV.open(file, 'r') { |csv| csv.first }
File.open("#{Rails.root}/lib/csv_importers/#{csv_file}_importer.rb", 'w') do |file|
file.write generator.result(binding)
end
end
end
end
# rake csv:export
desc 'Export all database tables to /tmp/csv_exports/*.csv'
task :export => :environment do
require 'csv'
path = Rails.root.to_s + '/tmp/csv_exports/'
FileUtils.mkdir_p(path) unless File.directory?(path)
(ActiveRecord::Base.connection.tables - ['schema_migrations', 'ar_internal_metadata']).each do |table|
records = ActiveRecord::Base.connection.exec_query("SELECT * FROM #{table} ORDER BY id")
CSV.open(path + "#{table}.csv", 'wb') do |csv|
csv << records.columns
records.rows.each { |row| csv << row }
end
end
puts "Successfully csv exported #{ActiveRecord::Base.connection.tables.length} tables to #{path}"
end
end
|
require 'helper'
class TestSlimHtmlStructure < TestSlim
def test_simple_render
# Keep the trailing space behind "body "!
source = %q{
html
head
title Simple Test Title
body
p Hello World, meet Slim.
}
assert_html '<html><head><title>Simple Test Title</title></head><body><p>Hello World, meet Slim.</p></body></html>', source
end
def test_relaxed_indentation_of_first_line
source = %q{
p
.content
}
assert_html "<p><div class=\"content\"></div></p>", source
end
def test_html_tag_with_text_and_empty_line
source = %q{
p Hello
p World
}
assert_html "<p>Hello</p><p>World</p>", source
end
def test_html_namespaces
source = %q{
html:body
html:p html:id="test" Text
}
assert_html '<html:body><html:p html:id="test">Text</html:p></html:body>', source
end
def test_doctype
source = %q{
doctype 1.1
html
}
assert_html '<!DOCTYPE html PUBLIC "-//W3C//DTD XHTML 1.1//EN" "http://www.w3.org/TR/xhtml11/DTD/xhtml11.dtd"><html></html>', source, format: :xhtml
end
def test_doctype_new_syntax
source = %q{
doctype 5
html
}
assert_html '<!DOCTYPE html><html></html>', source, format: :xhtml
end
def test_doctype_new_syntax_html5
source = %q{
doctype html
html
}
assert_html '<!DOCTYPE html><html></html>', source, format: :xhtml
end
def test_render_with_shortcut_attributes
source = %q{
h1#title This is my title
#notice.hello.world
= hello_world
}
assert_html '<h1 id="title">This is my title</h1><div class="hello world" id="notice">Hello World from @env</div>', source
end
def test_render_with_overwritten_default_tag
source = %q{
#notice.hello.world
= hello_world
}
assert_html '<section class="hello world" id="notice">Hello World from @env</section>', source, default_tag: 'section'
end
def test_render_with_custom_shortcut
source = %q{
#notice.hello.world@test
= hello_world
@abc
= hello_world
}
assert_html '<div class="hello world" id="notice" role="test">Hello World from @env</div><section role="abc">Hello World from @env</section>', source, shortcut: {'#' => {attr: 'id'}, '.' => {attr: 'class'}, '@' => {tag: 'section', attr: 'role'}}
end
def test_render_with_custom_array_shortcut
source = %q{
#user@.admin Daniel
}
assert_html '<div class="admin" id="user" role="admin">Daniel</div>', source, shortcut: {'#' => {attr: 'id'}, '.' => {attr: 'class'}, '@' => {attr: 'role'}, '@.' => {attr: ['class', 'role']}}
end
def test_render_with_custom_shortcut_and_additional_attrs
source = %q{
^items
== "[{'title':'item0'},{'title':'item1'},{'title':'item2'},{'title':'item3'},{'title':'item4'}]"
}
assert_html '<script data-binding="items" type="application/json">[{\'title\':\'item0\'},{\'title\':\'item1\'},{\'title\':\'item2\'},{\'title\':\'item3\'},{\'title\':\'item4\'}]</script>',
source, shortcut: {'^' => {tag: 'script', attr: 'data-binding', additional_attrs: { type: "application/json" }}}
end
def test_render_with_text_block
source = %q{
p
|
Lorem ipsum dolor sit amet, consectetur adipiscing elit.
}
assert_html '<p>Lorem ipsum dolor sit amet, consectetur adipiscing elit.</p>', source
end
def test_render_with_text_block_with_subsequent_markup
source = %q{
p
|
Lorem ipsum dolor sit amet, consectetur adipiscing elit.
p Some more markup
}
assert_html '<p>Lorem ipsum dolor sit amet, consectetur adipiscing elit.</p><p>Some more markup</p>', source
end
def test_render_with_text_block_with_trailing_whitespace
source = %q{
' this is
a link to
a href="link" page
}
assert_html "this is\na link to <a href=\"link\">page</a>", source
end
def test_nested_text
source = %q{
p
|
This is line one.
This is line two.
This is line three.
This is line four.
p This is a new paragraph.
}
assert_html "<p>This is line one.\n This is line two.\n This is line three.\n This is line four.</p><p>This is a new paragraph.</p>", source
end
def test_nested_text_with_nested_html_one_same_line
source = %q{
p
| This is line one.
This is line two.
span.bold This is a bold line in the paragraph.
| This is more content.
}
assert_html "<p>This is line one.\n This is line two.<span class=\"bold\">This is a bold line in the paragraph.</span> This is more content.</p>", source
end
def test_nested_text_with_nested_html_one_same_line2
source = %q{
p
|This is line one.
This is line two.
span.bold This is a bold line in the paragraph.
| This is more content.
}
assert_html "<p>This is line one.\n This is line two.<span class=\"bold\">This is a bold line in the paragraph.</span> This is more content.</p>", source
end
def test_nested_text_with_nested_html
source = %q{
p
|
This is line one.
This is line two.
This is line three.
This is line four.
span.bold This is a bold line in the paragraph.
| This is more content.
}
assert_html "<p>This is line one.\n This is line two.\n This is line three.\n This is line four.<span class=\"bold\">This is a bold line in the paragraph.</span> This is more content.</p>", source
end
def test_simple_paragraph_with_padding
source = %q{
p There will be 3 spaces in front of this line.
}
assert_html '<p> There will be 3 spaces in front of this line.</p>', source
end
def test_paragraph_with_nested_text
source = %q{
p This is line one.
This is line two.
}
assert_html "<p>This is line one.\n This is line two.</p>", source
end
def test_paragraph_with_padded_nested_text
source = %q{
p This is line one.
This is line two.
}
assert_html "<p> This is line one.\n This is line two.</p>", source
end
def test_paragraph_with_attributes_and_nested_text
source = %q{
p#test class="paragraph" This is line one.
This is line two.
}
assert_html "<p class=\"paragraph\" id=\"test\">This is line one.\nThis is line two.</p>", source
end
def test_relaxed_text_indentation
source = %q{
p
| text block
text
line3
}
assert_html "<p>text block\ntext\n line3</p>", source
end
def test_output_code_with_leading_spaces
source = %q{
p= hello_world
p = hello_world
p = hello_world
}
assert_html '<p>Hello World from @env</p><p>Hello World from @env</p><p>Hello World from @env</p>', source
end
def test_single_quoted_attributes
source = %q{
p class='underscored_class_name' = output_number
}
assert_html '<p class="underscored_class_name">1337</p>', source
end
def test_nonstandard_attributes
source = %q{
p id="dashed-id" class="underscored_class_name" = output_number
}
assert_html '<p class="underscored_class_name" id="dashed-id">1337</p>', source
end
def test_nonstandard_shortcut_attributes
source = %q{
p#dashed-id.underscored_class_name = output_number
}
assert_html '<p class="underscored_class_name" id="dashed-id">1337</p>', source
end
def test_dashed_attributes
source = %q{
p data-info="Illudium Q-36" = output_number
}
assert_html '<p data-info="Illudium Q-36">1337</p>', source
end
def test_dashed_attributes_with_shortcuts
source = %q{
p#marvin.martian data-info="Illudium Q-36" = output_number
}
assert_html '<p class="martian" data-info="Illudium Q-36" id="marvin">1337</p>', source
end
def test_parens_around_attributes
source = %q{
p(id="marvin" class="martian" data-info="Illudium Q-36") = output_number
}
assert_html '<p class="martian" data-info="Illudium Q-36" id="marvin">1337</p>', source
end
def test_square_brackets_around_attributes
source = %q{
p[id="marvin" class="martian" data-info="Illudium Q-36"] = output_number
}
assert_html '<p class="martian" data-info="Illudium Q-36" id="marvin">1337</p>', source
end
def test_parens_around_attributes_with_equal_sign_snug_to_right_paren
source = %q{
p(id="marvin" class="martian" data-info="Illudium Q-36")= output_number
}
assert_html '<p class="martian" data-info="Illudium Q-36" id="marvin">1337</p>', source
end
def test_default_attr_delims_option
source = %q{
p<id="marvin" class="martian" data-info="Illudium Q-36">= output_number
}
Slim::Parser.options[:attr_list_delims].each do |k,v|
str = source.sub('<',k).sub('>',v)
assert_html '<p class="martian" data-info="Illudium Q-36" id="marvin">1337</p>', str
end
end
def test_custom_attr_delims_option
source = %q{
p { foo="bar" }
}
assert_html '<p foo="bar"></p>', source
assert_html '<p foo="bar"></p>', source, attr_list_delims: {'{' => '}'}
assert_html '<p>{ foo="bar" }</p>', source, attr_list_delims: {'(' => ')', '[' => ']'}
end
def test_closed_tag
source = %q{
closed/
}
assert_html '<closed />', source, format: :xhtml
end
def test_custom_attr_list_delims_option
source = %q{
p { foo="bar" x=(1+1) }
p < x=(1+1) > Hello
}
assert_html '<p foo="bar" x="2"></p><p>< x=(1+1) > Hello</p>', source
assert_html '<p foo="bar" x="2"></p><p>< x=(1+1) > Hello</p>', source, attr_list_delims: {'{' => '}'}
assert_html '<p>{ foo="bar" x=(1+1) }</p><p x="2">Hello</p>', source, attr_list_delims: {'<' => '>'}, code_attr_delims: { '(' => ')' }
end
def test_closed_tag
source = %q{
closed/
}
assert_html '<closed />', source, format: :xhtml
end
def test_attributs_with_parens_and_spaces
source = %q{label{ for='filter' }= hello_world}
assert_html '<label for="filter">Hello World from @env</label>', source
end
def test_attributs_with_parens_and_spaces2
source = %q{label{ for='filter' } = hello_world}
assert_html '<label for="filter">Hello World from @env</label>', source
end
def test_attributs_with_multiple_spaces
source = %q{label for='filter' class="test" = hello_world}
assert_html '<label class="test" for="filter">Hello World from @env</label>', source
end
def test_closed_tag_with_attributes
source = %q{
closed id="test" /
}
assert_html '<closed id="test" />', source, format: :xhtml
end
def test_closed_tag_with_attributes_and_parens
source = %q{
closed(id="test")/
}
assert_html '<closed id="test" />', source, format: :xhtml
end
def test_render_with_html_comments
source = %q{
p Hello
/! This is a comment
Another comment
p World
}
assert_html "<p>Hello</p><!--This is a comment\n\nAnother comment--><p>World</p>", source
end
def test_render_with_html_conditional_and_tag
source = %q{
/[ if IE ]
p Get a better browser.
}
assert_html "<!--[if IE]><p>Get a better browser.</p><![endif]-->", source
end
def test_render_with_html_conditional_and_method_output
source = %q{
/[ if IE ]
= message 'hello'
}
assert_html "<!--[if IE]>hello<![endif]-->", source
end
def test_multiline_attributes_with_method
source = %q{
p<id="marvin"
class="martian"
data-info="Illudium Q-36"> = output_number
}
Slim::Parser.options[:attr_list_delims].each do |k,v|
str = source.sub('<',k).sub('>',v)
assert_html '<p class="martian" data-info="Illudium Q-36" id="marvin">1337</p>', str
end
end
def test_multiline_attributes_with_text_on_same_line
source = %q{
p<id="marvin"
class="martian"
data-info="Illudium Q-36"> THE space modulator
}
Slim::Parser.options[:attr_list_delims].each do |k,v|
str = source.sub('<',k).sub('>',v)
assert_html '<p class="martian" data-info="Illudium Q-36" id="marvin">THE space modulator</p>', str
end
end
def test_multiline_attributes_with_nested_text
source = %q{
p<id="marvin"
class="martian"
data-info="Illudium Q-36">
| THE space modulator
}
Slim::Parser.options[:attr_list_delims].each do |k,v|
str = source.sub('<',k).sub('>',v)
assert_html '<p class="martian" data-info="Illudium Q-36" id="marvin">THE space modulator</p>', str
end
end
def test_multiline_attributes_with_dynamic_attr
source = %q{
p<id=id_helper
class="martian"
data-info="Illudium Q-36">
| THE space modulator
}
Slim::Parser.options[:attr_list_delims].each do |k,v|
str = source.sub('<',k).sub('>',v)
assert_html '<p class="martian" data-info="Illudium Q-36" id="notice">THE space modulator</p>', str
end
end
def test_multiline_attributes_with_nested_tag
source = %q{
p<id=id_helper
class="martian"
data-info="Illudium Q-36">
span.emphasis THE
| space modulator
}
Slim::Parser.options[:attr_list_delims].each do |k,v|
str = source.sub('<',k).sub('>',v)
assert_html '<p class="martian" data-info="Illudium Q-36" id="notice"><span class="emphasis">THE</span> space modulator</p>', str
end
end
def test_multiline_attributes_with_nested_text_and_extra_indentation
source = %q{
li< id="myid"
class="myclass"
data-info="myinfo">
a href="link" My Link
}
Slim::Parser.options[:attr_list_delims].each do |k,v|
str = source.sub('<',k).sub('>',v)
assert_html '<li class="myclass" data-info="myinfo" id="myid"><a href="link">My Link</a></li>', str
end
end
def test_block_expansion_support
source = %q{
ul
li.first: a href='a' foo
li: a href='b' bar
li.last: a href='c' baz
}
assert_html %{<ul><li class=\"first\"><a href=\"a\">foo</a></li><li><a href=\"b\">bar</a></li><li class=\"last\"><a href=\"c\">baz</a></li></ul>}, source
end
def test_block_expansion_class_attributes
source = %q{
.a: .b: #c d
}
assert_html %{<div class="a"><div class="b"><div id="c">d</div></div></div>}, source
end
def test_block_expansion_nesting
source = %q{
html: body: .content
| Text
}
assert_html %{<html><body><div class=\"content\">Text</div></body></html>}, source
end
def test_eval_attributes_once
source = %q{
input[value=succ_x]
input[value=succ_x]
}
assert_html %{<input value="1" /><input value="2" />}, source
end
def test_html_line_indicator
source = %q{
<html>
head
meta name="keywords" content=hello_world
- if true
<p>#{hello_world}</p>
span = hello_world
</html>
}
assert_html '<html><head><meta content="Hello World from @env" name="keywords" /></head><p>Hello World from @env</p><span>Hello World from @env</span></html>', source
end
end
Fix test case for #82
require 'helper'
class TestSlimHtmlStructure < TestSlim
def test_simple_render
# Keep the trailing space behind "body "!
source = %q{
html
head
title Simple Test Title
body
p Hello World, meet Slim.
}
assert_html '<html><head><title>Simple Test Title</title></head><body><p>Hello World, meet Slim.</p></body></html>', source
end
def test_relaxed_indentation_of_first_line
source = %q{
p
.content
}
assert_html "<p><div class=\"content\"></div></p>", source
end
def test_html_tag_with_text_and_empty_line
source = %q{
p Hello
p World
}
assert_html "<p>Hello</p><p>World</p>", source
end
def test_html_namespaces
source = %q{
html:body
html:p html:id="test" Text
}
assert_html '<html:body><html:p html:id="test">Text</html:p></html:body>', source
end
def test_doctype
source = %q{
doctype 1.1
html
}
assert_html '<!DOCTYPE html PUBLIC "-//W3C//DTD XHTML 1.1//EN" "http://www.w3.org/TR/xhtml11/DTD/xhtml11.dtd"><html></html>', source, format: :xhtml
end
def test_doctype_new_syntax
source = %q{
doctype 5
html
}
assert_html '<!DOCTYPE html><html></html>', source, format: :xhtml
end
def test_doctype_new_syntax_html5
source = %q{
doctype html
html
}
assert_html '<!DOCTYPE html><html></html>', source, format: :xhtml
end
def test_render_with_shortcut_attributes
source = %q{
h1#title This is my title
#notice.hello.world
= hello_world
}
assert_html '<h1 id="title">This is my title</h1><div class="hello world" id="notice">Hello World from @env</div>', source
end
def test_render_with_overwritten_default_tag
source = %q{
#notice.hello.world
= hello_world
}
assert_html '<section class="hello world" id="notice">Hello World from @env</section>', source, default_tag: 'section'
end
def test_render_with_custom_shortcut
source = %q{
#notice.hello.world@test
= hello_world
@abc
= hello_world
}
assert_html '<div class="hello world" id="notice" role="test">Hello World from @env</div><section role="abc">Hello World from @env</section>', source, shortcut: {'#' => {attr: 'id'}, '.' => {attr: 'class'}, '@' => {tag: 'section', attr: 'role'}}
end
def test_render_with_custom_array_shortcut
source = %q{
#user@.admin Daniel
}
assert_html '<div class="admin" id="user" role="admin">Daniel</div>', source, shortcut: {'#' => {attr: 'id'}, '.' => {attr: 'class'}, '@' => {attr: 'role'}, '@.' => {attr: ['class', 'role']}}
end
def test_render_with_custom_shortcut_and_additional_attrs
source = %q{
^items
== "[{'title':'item0'},{'title':'item1'},{'title':'item2'},{'title':'item3'},{'title':'item4'}]"
}
assert_html '<script data-binding="items" type="application/json">[{\'title\':\'item0\'},{\'title\':\'item1\'},{\'title\':\'item2\'},{\'title\':\'item3\'},{\'title\':\'item4\'}]</script>',
source, shortcut: {'^' => {tag: 'script', attr: 'data-binding', additional_attrs: { type: "application/json" }}}
end
def test_render_with_text_block
source = %q{
p
|
Lorem ipsum dolor sit amet, consectetur adipiscing elit.
}
assert_html '<p>Lorem ipsum dolor sit amet, consectetur adipiscing elit.</p>', source
end
def test_render_with_text_block_with_subsequent_markup
source = %q{
p
|
Lorem ipsum dolor sit amet, consectetur adipiscing elit.
p Some more markup
}
assert_html '<p>Lorem ipsum dolor sit amet, consectetur adipiscing elit.</p><p>Some more markup</p>', source
end
def test_render_with_text_block_with_trailing_whitespace
source = %q{
' this is
a link to
a href="link" page
}
assert_html "this is\na link to <a href=\"link\">page</a>", source
end
def test_nested_text
source = %q{
p
|
This is line one.
This is line two.
This is line three.
This is line four.
p This is a new paragraph.
}
assert_html "<p>This is line one.\n This is line two.\n This is line three.\n This is line four.</p><p>This is a new paragraph.</p>", source
end
def test_nested_text_with_nested_html_one_same_line
source = %q{
p
| This is line one.
This is line two.
span.bold This is a bold line in the paragraph.
| This is more content.
}
assert_html "<p>This is line one.\n This is line two.<span class=\"bold\">This is a bold line in the paragraph.</span> This is more content.</p>", source
end
def test_nested_text_with_nested_html_one_same_line2
source = %q{
p
|This is line one.
This is line two.
span.bold This is a bold line in the paragraph.
| This is more content.
}
assert_html "<p>This is line one.\n This is line two.<span class=\"bold\">This is a bold line in the paragraph.</span> This is more content.</p>", source
end
def test_nested_text_with_nested_html
source = %q{
p
|
This is line one.
This is line two.
This is line three.
This is line four.
span.bold This is a bold line in the paragraph.
| This is more content.
}
assert_html "<p>This is line one.\n This is line two.\n This is line three.\n This is line four.<span class=\"bold\">This is a bold line in the paragraph.</span> This is more content.</p>", source
end
def test_simple_paragraph_with_padding
source = %q{
p There will be 3 spaces in front of this line.
}
assert_html '<p> There will be 3 spaces in front of this line.</p>', source
end
def test_paragraph_with_nested_text
source = %q{
p This is line one.
This is line two.
}
assert_html "<p>This is line one.\n This is line two.</p>", source
end
def test_paragraph_with_padded_nested_text
source = %q{
p This is line one.
This is line two.
}
assert_html "<p> This is line one.\n This is line two.</p>", source
end
def test_paragraph_with_attributes_and_nested_text
source = %q{
p#test class="paragraph" This is line one.
This is line two.
}
assert_html "<p class=\"paragraph\" id=\"test\">This is line one.\nThis is line two.</p>", source
end
def test_relaxed_text_indentation
source = %q{
p
| text block
text
line3
}
assert_html "<p>text block\ntext\n line3</p>", source
end
def test_output_code_with_leading_spaces
source = %q{
p= hello_world
p = hello_world
p = hello_world
}
assert_html '<p>Hello World from @env</p><p>Hello World from @env</p><p>Hello World from @env</p>', source
end
def test_single_quoted_attributes
source = %q{
p class='underscored_class_name' = output_number
}
assert_html '<p class="underscored_class_name">1337</p>', source
end
def test_nonstandard_attributes
source = %q{
p id="dashed-id" class="underscored_class_name" = output_number
}
assert_html '<p class="underscored_class_name" id="dashed-id">1337</p>', source
end
def test_nonstandard_shortcut_attributes
source = %q{
p#dashed-id.underscored_class_name = output_number
}
assert_html '<p class="underscored_class_name" id="dashed-id">1337</p>', source
end
def test_dashed_attributes
source = %q{
p data-info="Illudium Q-36" = output_number
}
assert_html '<p data-info="Illudium Q-36">1337</p>', source
end
def test_dashed_attributes_with_shortcuts
source = %q{
p#marvin.martian data-info="Illudium Q-36" = output_number
}
assert_html '<p class="martian" data-info="Illudium Q-36" id="marvin">1337</p>', source
end
def test_parens_around_attributes
source = %q{
p(id="marvin" class="martian" data-info="Illudium Q-36") = output_number
}
assert_html '<p class="martian" data-info="Illudium Q-36" id="marvin">1337</p>', source
end
def test_square_brackets_around_attributes
source = %q{
p[id="marvin" class="martian" data-info="Illudium Q-36"] = output_number
}
assert_html '<p class="martian" data-info="Illudium Q-36" id="marvin">1337</p>', source
end
def test_parens_around_attributes_with_equal_sign_snug_to_right_paren
source = %q{
p(id="marvin" class="martian" data-info="Illudium Q-36")= output_number
}
assert_html '<p class="martian" data-info="Illudium Q-36" id="marvin">1337</p>', source
end
def test_default_attr_delims_option
source = %q{
p<id="marvin" class="martian" data-info="Illudium Q-36">= output_number
}
Slim::Parser.options[:attr_list_delims].each do |k,v|
str = source.sub('<',k).sub('>',v)
assert_html '<p class="martian" data-info="Illudium Q-36" id="marvin">1337</p>', str
end
end
def test_custom_attr_delims_option
source = %q{
p { foo="bar" }
}
assert_html '<p foo="bar"></p>', source
assert_html '<p foo="bar"></p>', source, attr_list_delims: {'{' => '}'}
assert_html '<p>{ foo="bar" }</p>', source, attr_list_delims: {'(' => ')', '[' => ']'}
end
def test_closed_tag
source = %q{
closed/
}
assert_html '<closed />', source, format: :xhtml
end
def test_custom_attr_list_delims_option
source = %q{
p { foo="bar" x=(1+1) }
p < x=(1+1) > Hello
}
assert_html '<p foo="bar" x="2"></p><p>< x=(1+1) > Hello</p>', source
assert_html '<p foo="bar" x="2"></p><p>< x=(1+1) > Hello</p>', source, attr_list_delims: {'{' => '}'}
assert_html '<p>{ foo="bar" x=(1+1) }</p><p x="2">Hello</p>', source, attr_list_delims: {'<' => '>'}, code_attr_delims: { '(' => ')' }
end
def test_closed_tag
source = %q{
closed/
}
assert_html '<closed />', source, format: :xhtml
end
def test_attributs_with_parens_and_spaces
source = %q{label{ for='filter' }= hello_world}
assert_html '<label for="filter">Hello World from @env</label>', source
end
def test_attributs_with_parens_and_spaces2
source = %q{label{ for='filter' } = hello_world}
assert_html '<label for="filter">Hello World from @env</label>', source
end
def test_attributs_with_multiple_spaces
source = %q{label for='filter' class="test" = hello_world}
assert_html '<label class="test" for="filter">Hello World from @env</label>', source
end
def test_closed_tag_with_attributes
source = %q{
closed id="test" /
}
assert_html '<closed id="test" />', source, format: :xhtml
end
def test_closed_tag_with_attributes_and_parens
source = %q{
closed(id="test")/
}
assert_html '<closed id="test" />', source, format: :xhtml
end
def test_render_with_html_comments
source = %q{
p Hello
/! This is a comment
Another comment
p World
}
assert_html "<p>Hello</p><!--This is a comment\n\nAnother comment--><p>World</p>", source
end
def test_render_with_html_conditional_and_tag
source = %q{
/[ if IE ]
p Get a better browser.
}
assert_html "<!--[if IE]><p>Get a better browser.</p><![endif]-->", source
end
def test_render_with_html_conditional_and_method_output
source = %q{
/[ if IE ]
= message 'hello'
}
assert_html "<!--[if IE]>hello<![endif]-->", source
end
def test_multiline_attributes_with_method
source = %q{
p<id="marvin"
class="martian"
data-info="Illudium Q-36"> = output_number
}
Slim::Parser.options[:attr_list_delims].each do |k,v|
str = source.sub('<',k).sub('>',v)
assert_html '<p class="martian" data-info="Illudium Q-36" id="marvin">1337</p>', str
end
end
def test_multiline_attributes_with_text_on_same_line
source = %q{
p<id="marvin"
class="martian"
data-info="Illudium Q-36"> THE space modulator
}
Slim::Parser.options[:attr_list_delims].each do |k,v|
str = source.sub('<',k).sub('>',v)
assert_html '<p class="martian" data-info="Illudium Q-36" id="marvin">THE space modulator</p>', str
end
end
def test_multiline_attributes_with_nested_text
source = %q{
p<id="marvin"
class="martian"
data-info="Illudium Q-36">
| THE space modulator
}
Slim::Parser.options[:attr_list_delims].each do |k,v|
str = source.sub('<',k).sub('>',v)
assert_html '<p class="martian" data-info="Illudium Q-36" id="marvin">THE space modulator</p>', str
end
end
def test_multiline_attributes_with_dynamic_attr
source = %q{
p<id=id_helper
class="martian"
data-info="Illudium Q-36">
| THE space modulator
}
Slim::Parser.options[:attr_list_delims].each do |k,v|
str = source.sub('<',k).sub('>',v)
assert_html '<p class="martian" data-info="Illudium Q-36" id="notice">THE space modulator</p>', str
end
end
def test_multiline_attributes_with_nested_tag
source = %q{
p<id=id_helper
class="martian"
data-info="Illudium Q-36">
span.emphasis THE
| space modulator
}
Slim::Parser.options[:attr_list_delims].each do |k,v|
str = source.sub('<',k).sub('>',v)
assert_html '<p class="martian" data-info="Illudium Q-36" id="notice"><span class="emphasis">THE</span> space modulator</p>', str
end
end
def test_multiline_attributes_with_nested_text_and_extra_indentation
source = %q{
li< id="myid"
class="myclass"
data-info="myinfo">
a href="link" My Link
}
Slim::Parser.options[:attr_list_delims].each do |k,v|
str = source.sub('<',k).sub('>',v)
assert_html '<li class="myclass" data-info="myinfo" id="myid"><a href="link">My Link</a></li>', str
end
end
def test_block_expansion_support
source = %q{
ul
li.first: a href='a' foo
li: a href='b' bar
li.last: a href='c' baz
}
assert_html %{<ul><li class=\"first\"><a href=\"a\">foo</a></li><li><a href=\"b\">bar</a></li><li class=\"last\"><a href=\"c\">baz</a></li></ul>}, source
end
def test_block_expansion_class_attributes
source = %q{
.a: .b: #c d
}
assert_html %{<div class="a"><div class="b"><div id="c">d</div></div></div>}, source
end
def test_block_expansion_nesting
source = %q{
html: body: .content
| Text
}
assert_html %{<html><body><div class=\"content\">Text</div></body></html>}, source
end
def test_eval_attributes_once
source = %q{
input[value=succ_x]
input[value=succ_x]
}
assert_html %{<input value="1" /><input value="2" />}, source
end
def test_html_line_indicator
source = %q{
<html>
head
meta name="keywords" content=hello_world
- if true
<p>#{hello_world}</p>
span = hello_world
</html>
}
assert_html '<html><head><meta content="Hello World from @env" name="keywords" /></head><p>Hello World from @env</p><span>Hello World from @env</span></html>', source
end
end
|
module TrySailBlogNotification
module HTTP
def initialize(url)
@url = url
@response = get_response
end
attr_reader :url, :response
private
def get_response
end
end
end
Require libraries
require 'net/http'
require 'net/https'
require 'uri'
module TrySailBlogNotification
module HTTP
def initialize(url)
@url = url
@response = get_response
end
attr_reader :url, :response
private
def get_response
end
end
end |
require 'much-timeout'
require 'concurrent'
require 'tumugi'
require 'tumugi/error'
Concurrent.use_stdlib_logger(Logger::DEBUG)
module Tumugi
module Executor
class LocalExecutor
def initialize(dag, logger=nil, worker_num: 1)
@dag = dag
@main_task = dag.tsort.last
@logger = logger || Tumugi::Logger.instance
@options = { worker_num: worker_num }
@mutex = Mutex.new
end
def execute
pool = Concurrent::ThreadPoolExecutor.new(
min_threads: @options[:worker_num],
max_threads: @options[:worker_num]
)
setup_task_queue(@dag)
loop do
task = dequeue_task
break if task.nil?
Concurrent::Future.execute(executor: pool) do
if !task.runnable?(Time.now)
info "not_runnable: #{task.id}"
enqueue_task(task)
else
begin
info "start: #{task.id}"
task.trigger!(:start)
MuchTimeout.optional_timeout(task_timeout(task), Tumugi::TimeoutError) do
task.run
end
task.trigger!(:complete)
info "#{task.state}: #{task.id}"
rescue => e
handle_error(task, e)
end
end
end
end
pool.shutdown
pool.wait_for_termination
@dag.tsort.all? { |t| t.success? }
end
private
def task_timeout(task)
timeout = task.timeout || Tumugi.config.timeout
timeout = nil if !timeout.nil? && timeout == 0 # for backward compatibility
timeout
end
def setup_task_queue(dag)
@queue = []
dag.tsort.each { |t| enqueue_task(t) }
@queue
end
def dequeue_task
loop do
task = @mutex.synchronize {
debug { "queue: #{@queue.map(&:id)}" }
@queue.shift
}
if task.nil?
if @main_task.finished?
break nil
else
sleep(0.1)
end
else
debug { "dequeue: #{task.id}" }
if task.requires_failed?
task.trigger!(:requires_fail)
info "#{task.state}: #{task.id} has failed requires task"
elsif task.completed?
task.trigger!(:skip)
info "#{task.state}: #{task.id} is already completed"
else
break task
end
end
end
end
def enqueue_task(task)
debug { "enqueue: #{task.id}" }
@mutex.synchronize { @queue.push(task) }
end
def handle_error(task, err)
if task.retry
task.trigger!(:pend)
@logger.error "#{err.class}: '#{err.message}' - #{task.tries} tries and wait #{task.retry_interval} seconds until the next try."
enqueue_task(task)
else
task.trigger!(:fail)
@logger.error "#{err.class}: '#{err.message}' - #{task.tries} tries and reached max retry count, so task #{task.id} failed."
info "#{task.state}: #{task.id}"
@logger.error "#{err.message}"
@logger.debug { err.backtrace.join("\n") }
end
end
def info(message)
@logger.info "#{message}, thread: #{Thread.current.object_id}"
end
def debug(&block)
@logger.debug { "#{block.call}, thread: #{Thread.current.object_id}" }
end
end
end
end
Suppress unusable info log
require 'much-timeout'
require 'concurrent'
require 'tumugi'
require 'tumugi/error'
module Tumugi
module Executor
class LocalExecutor
def initialize(dag, logger=nil, worker_num: 1)
@dag = dag
@main_task = dag.tsort.last
@logger = logger || Tumugi::Logger.instance
@options = { worker_num: worker_num }
@mutex = Mutex.new
end
def execute
pool = Concurrent::ThreadPoolExecutor.new(
min_threads: @options[:worker_num],
max_threads: @options[:worker_num]
)
setup_task_queue(@dag)
loop do
task = dequeue_task
break if task.nil?
Concurrent::Future.execute(executor: pool) do
if !task.runnable?(Time.now)
debug { "not_runnable: #{task.id}" }
enqueue_task(task)
else
begin
info "start: #{task.id}"
task.trigger!(:start)
MuchTimeout.optional_timeout(task_timeout(task), Tumugi::TimeoutError) do
task.run
end
task.trigger!(:complete)
info "#{task.state}: #{task.id}"
rescue => e
handle_error(task, e)
end
end
end
end
pool.shutdown
pool.wait_for_termination
@dag.tsort.all? { |t| t.success? }
end
private
def task_timeout(task)
timeout = task.timeout || Tumugi.config.timeout
timeout = nil if !timeout.nil? && timeout == 0 # for backward compatibility
timeout
end
def setup_task_queue(dag)
@queue = []
dag.tsort.each { |t| enqueue_task(t) }
@queue
end
def dequeue_task
loop do
task = @mutex.synchronize {
debug { "queue: #{@queue.map(&:id)}" }
@queue.shift
}
if task.nil?
if @main_task.finished?
break nil
else
sleep(0.1)
end
else
debug { "dequeue: #{task.id}" }
if task.requires_failed?
task.trigger!(:requires_fail)
info "#{task.state}: #{task.id} has failed requires task"
elsif task.completed?
task.trigger!(:skip)
info "#{task.state}: #{task.id} is already completed"
else
break task
end
end
end
end
def enqueue_task(task)
debug { "enqueue: #{task.id}" }
@mutex.synchronize { @queue.push(task) }
end
def handle_error(task, err)
if task.retry
task.trigger!(:pend)
@logger.error "#{err.class}: '#{err.message}' - #{task.tries} tries and wait #{task.retry_interval} seconds until the next try."
enqueue_task(task)
else
task.trigger!(:fail)
@logger.error "#{err.class}: '#{err.message}' - #{task.tries} tries and reached max retry count, so task #{task.id} failed."
info "#{task.state}: #{task.id}"
@logger.error "#{err.message}"
@logger.debug { err.backtrace.join("\n") }
end
end
def info(message)
@logger.info "#{message}, thread: #{Thread.current.object_id}"
end
def debug(&block)
@logger.debug { "#{block.call}, thread: #{Thread.current.object_id}" }
end
end
end
end
|
# The PathRenderer is a simple way to render a string of the contents of a view
# at the passed in path.
require 'volt/page/bindings/view_binding/view_lookup_for_path'
require 'volt/page/bindings/view_binding/controller_handler'
require 'volt/page/string_template_renderer'
module Volt
class PathStringRenderer
attr_reader :html
def initialize(path, attrs=nil, page=nil, render_from_path=nil)
# use the global page if one is not passed in
page ||= $page
# where to do the path lookup from
render_from_path ||= "main/main/main/body"
# Make path into a full path
@view_lookup = Volt::ViewLookupForPath.new(page, render_from_path)
full_path, controller_path = @view_lookup.path_for_template(path, nil)
controller_class, action = ControllerHandler.get_controller_and_action(controller_path)
controller = controller_class.new(SubContext.new(attrs, nil, true))
renderer = StringTemplateRenderer.new(page, controller, full_path)
@html = renderer.html
# remove when done
renderer.remove
end
end
end
Make PathStringRenderer use the attrs hash as the model, so you can access properties as models in the view.
# The PathRenderer is a simple way to render a string of the contents of a view
# at the passed in path.
require 'volt/page/bindings/view_binding/view_lookup_for_path'
require 'volt/page/bindings/view_binding/controller_handler'
require 'volt/page/string_template_renderer'
module Volt
class ViewLookupException < Exception ; end
class PathStringRenderer
attr_reader :html
def initialize(path, attrs=nil, page=nil, render_from_path=nil)
# use the global page if one is not passed in
page ||= $page
# where to do the path lookup from
render_from_path ||= "main/main/main/body"
# Make path into a full path
@view_lookup = Volt::ViewLookupForPath.new(page, render_from_path)
full_path, controller_path = @view_lookup.path_for_template(path, nil)
if full_path == nil
raise ViewLookupException, "Unable to find view at `#{path}`"
end
controller_class, action = ControllerHandler.get_controller_and_action(controller_path)
controller = controller_class.new#(SubContext.new(attrs, nil, true))
controller.model = SubContext.new(attrs, nil, true)
renderer = StringTemplateRenderer.new(page, controller, full_path)
@html = renderer.html
# remove when done
renderer.remove
end
end
end |
$:.push File.expand_path("../lib", __FILE__)
# Maintain your gem's version:
require "axlsx_rails/version"
# Describe your gem and declare its dependencies:
Gem::Specification.new do |s|
s.name = "caxlsx_rails"
s.version = AxlsxRails::VERSION
s.authors = ["Noel Peden"]
s.email = ["noel@peden.biz"]
s.homepage = "https://github.com/caxlsx/caxlsx_rails"
s.summary = "A simple rails plugin to provide an xlsx renderer using the caxlsx gem."
s.description = "Caxlsx_Rails provides an Caxlsx renderer so you can move all your spreadsheet code from your controller into view files. Partials are supported so you can organize any code into reusable chunks (e.g. cover sheets, common styling, etc.) You can use it with acts_as_caxlsx, placing the to_xlsx call in a view and adding ':package => xlsx_package' to the parameter list. Now you can keep your controllers thin!"
s.files = Dir["{app,config,db,lib}/**/*"] + Dir['[A-Z]*'] - ['Guardfile']
s.test_files = Dir["spec/**/*"] + ['Guardfile']
s.add_dependency "actionpack", ">= 3.1"
s.add_dependency "caxlsx", ">= 3.0"
s.add_development_dependency "bundler"
s.add_development_dependency "rake"
s.add_development_dependency "rspec-rails"
s.add_development_dependency "guard-rspec"
s.add_development_dependency "capybara"
s.add_development_dependency "roo"
s.add_development_dependency "rubyzip"
s.add_development_dependency "growl"
s.add_development_dependency "rb-fsevent"
s.add_development_dependency "coveralls"
s.add_development_dependency "pry"
s.add_development_dependency "pry-nav"
end
Do not publish spec files to rubygems
$:.push File.expand_path("../lib", __FILE__)
# Maintain your gem's version:
require "axlsx_rails/version"
# Describe your gem and declare its dependencies:
Gem::Specification.new do |s|
s.name = "caxlsx_rails"
s.version = AxlsxRails::VERSION
s.authors = ["Noel Peden"]
s.email = ["noel@peden.biz"]
s.homepage = "https://github.com/caxlsx/caxlsx_rails"
s.summary = "A simple rails plugin to provide an xlsx renderer using the caxlsx gem."
s.description = "Caxlsx_Rails provides an Caxlsx renderer so you can move all your spreadsheet code from your controller into view files. Partials are supported so you can organize any code into reusable chunks (e.g. cover sheets, common styling, etc.) You can use it with acts_as_caxlsx, placing the to_xlsx call in a view and adding ':package => xlsx_package' to the parameter list. Now you can keep your controllers thin!"
s.files = Dir["{app,config,db,lib}/**/*"] + Dir['[A-Z]*'] - Dir["spec/**/*"] - ['Guardfile']
s.test_files = Dir["spec/**/*"] + ['Guardfile']
s.add_dependency "actionpack", ">= 3.1"
s.add_dependency "caxlsx", ">= 3.0"
s.add_development_dependency "bundler"
s.add_development_dependency "rake"
s.add_development_dependency "rspec-rails"
s.add_development_dependency "guard-rspec"
s.add_development_dependency "capybara"
s.add_development_dependency "roo"
s.add_development_dependency "rubyzip"
s.add_development_dependency "growl"
s.add_development_dependency "rb-fsevent"
s.add_development_dependency "coveralls"
s.add_development_dependency "pry"
s.add_development_dependency "pry-nav"
end
|
# encoding: utf-8
module Watir
class Table < HTMLElement
#
# The table as an 2D Array of strings with the text of each cell.
#
# @return [Array<Array<String>>]
#
def to_a
assert_exists
trs.inject [] do |res, row|
res << row.tds.map { |cell| cell.text }
end
end
#
# Get the n'th row of this table.
#
# @return Watir::TableRow
#
def [](idx)
row(:index, idx)
end
end # Table
end # Watir
Table#to_a finds takes <th> into account.
# encoding: utf-8
module Watir
class Table < HTMLElement
#
# The table as an 2D Array of strings with the text of each cell.
#
# @return [Array<Array<String>>]
#
def to_a
assert_exists
trs.inject [] do |res, row|
res << row.wd.find_elements(:xpath, ".//td | .//th").map { |cell| cell.text }
end
end
#
# Get the n'th row of this table.
#
# @return Watir::TableRow
#
def [](idx)
row(:index, idx)
end
end # Table
end # Watir
|
if defined?(Curl)
module Curl
class Easy
def http_with_webmock(method)
@webmock_method = method
curb_or_webmock do
http_without_webmock(method)
end
end
alias_method :http_without_webmock, :http
alias_method :http, :http_with_webmock
%w[ get head delete ].each do |verb|
define_method "http_#{verb}_with_webmock" do
@webmock_method = verb
curb_or_webmock do
send( "http_#{verb}_without_webmock" )
end
end
alias_method "http_#{verb}_without_webmock", "http_#{verb}"
alias_method "http_#{verb}", "http_#{verb}_with_webmock"
end
def http_put_with_webmock data
@webmock_method = :put
@put_data = data
curb_or_webmock do
http_put_without_webmock(data)
end
end
alias_method :http_put_without_webmock, :http_put
alias_method :http_put, :http_put_with_webmock
def http_post_with_webmock data
@webmock_method = :post
@post_body = data
curb_or_webmock do
http_post_without_webmock(data)
end
end
alias_method :http_post_without_webmock, :http_post
alias_method :http_post, :http_post_with_webmock
def curb_or_webmock
request_signature = build_request_signature
WebMock::RequestRegistry.instance.requested_signatures.put(request_signature)
if WebMock.registered_request?(request_signature)
webmock_response = WebMock.response_for_request(request_signature)
build_curb_response(webmock_response)
WebMock::CallbackRegistry.invoke_callbacks(
{:lib => :curb}, request_signature, webmock_response)
invoke_curb_callbacks
true
elsif WebMock.net_connect_allowed?(request_signature.uri)
res = yield
if WebMock::CallbackRegistry.any_callbacks?
webmock_response = build_webmock_response
WebMock::CallbackRegistry.invoke_callbacks(
{:lib => :curb, :real_request => true}, request_signature,
webmock_response)
end
res
else
raise WebMock::NetConnectNotAllowedError.new(request_signature)
end
end
def perform_with_webmock
@webmock_method ||= :get
curb_or_webmock do
perform_without_webmock
end
end
alias :perform_without_webmock :perform
alias :perform :perform_with_webmock
def build_request_signature
method = @webmock_method.to_s.downcase.to_sym
uri = WebMock::Util::URI.heuristic_parse(self.url)
uri.path = uri.normalized_path.gsub("[^:]//","/")
uri.user = self.username
uri.password = self.password
request_body = case method
when :post
self.post_body || @post_body
when :put
@put_data
else
nil
end
request_signature = WebMock::RequestSignature.new(
method,
uri.to_s,
:body => request_body,
:headers => self.headers
)
request_signature
end
def put_data_with_webmock= data
@webmock_method = :put
@put_data = data
self.put_data_without_webmock = data
end
alias_method :put_data_without_webmock=, :put_data=
alias_method :put_data=, :put_data_with_webmock=
def post_body_with_webmock= data
@webmock_method = :post
self.post_body_without_webmock = data
end
alias_method :post_body_without_webmock=, :post_body=
alias_method :post_body=, :post_body_with_webmock=
def delete_with_webmock= value
@webmock_method = :delete if value
self.delete_without_webmock = value
end
alias_method :delete_without_webmock=, :delete=
alias_method :delete=, :delete_with_webmock=
def head_with_webmock= value
@webmock_method = :head if value
self.head_without_webmock = value
end
alias_method :head_without_webmock=, :head=
alias_method :head=, :head_with_webmock=
def build_curb_response(webmock_response)
raise Curl::Err::TimeoutError if webmock_response.should_timeout
webmock_response.raise_error_if_any
@body_str = webmock_response.body
@response_code = webmock_response.status[0]
@header_str = "HTTP/1.1 #{webmock_response.status[0]} #{webmock_response.status[1]}\r\n"
if webmock_response.headers
@header_str << webmock_response.headers.map do |k,v|
"#{k}: #{v.is_a?(Array) ? v.join(", ") : v}"
end.join("\r\n")
end
end
def body_str_with_webmock
@body_str || body_str_without_webmock
end
alias :body_str_without_webmock :body_str
alias :body_str :body_str_with_webmock
def response_code_with_webmock
@response_code || response_code_without_webmock
end
alias :response_code_without_webmock :response_code
alias :response_code :response_code_with_webmock
def header_str_with_webmock
@header_str || header_str_without_webmock
end
alias :header_str_without_webmock :header_str
alias :header_str :header_str_with_webmock
def on_success_with_webmock &block
@on_success = block
on_success_without_webmock &block
end
alias :on_success_without_webmock :on_success
alias :on_success :on_success_with_webmock
def on_failure_with_webmock &block
@on_failure = block
on_failure_without_webmock &block
end
alias :on_failure_without_webmock :on_failure
alias :on_failure :on_failure_with_webmock
def on_header_with_webmock &block
@on_header = block
on_header_without_webmock &block
end
alias :on_header_without_webmock :on_header
alias :on_header :on_header_with_webmock
def on_body_with_webmock &block
@on_body = block
on_body_without_webmock &block
end
alias :on_body_without_webmock :on_body
alias :on_body :on_body_with_webmock
def on_complete_with_webmock &block
@on_complete = block
on_complete_without_webmock &block
end
alias :on_complete_without_webmock :on_complete
alias :on_complete :on_complete_with_webmock
def on_progress_with_webmock &block
@on_progress = block
on_progress_without_webmock &block
end
alias :on_progress_without_webmock :on_progress
alias :on_progress :on_progress_with_webmock
def invoke_curb_callbacks
@on_progress.call(0.0,1.0,0.0,1.0) if @on_progress
@on_header.call(self.header_str) if @on_header
@on_body.call(self.body_str) if @on_body
case response_code
when 200..299
@on_success.call(self) if @on_success
when 500..599
@on_failure.call(self, self.response_code) if @on_failure
end
@on_complete.call(self) if @on_complete
end
def build_webmock_response
status, headers = WebmockHelper.parse_header_string(self.header_str)
webmock_response = WebMock::Response.new
webmock_response.status = [self.response_code, status]
webmock_response.body = self.body_str
webmock_response.headers = headers
webmock_response
end
module WebmockHelper
# Borrowed from Patron:
# http://github.com/toland/patron/blob/master/lib/patron/response.rb
def self.parse_header_string(header_string)
status, headers = nil, {}
header_string.split(/\r\n/).each do |header|
if header =~ %r|^HTTP/1.[01] \d\d\d (.*)|
status = $1
else
parts = header.split(':', 2)
unless parts.empty?
parts[1].strip! unless parts[1].nil?
if headers.has_key?(parts[0])
headers[parts[0]] = [headers[parts[0]]] unless headers[parts[0]].kind_of? Array
headers[parts[0]] << parts[1]
else
headers[parts[0]] = parts[1]
end
end
end
end
return status, headers
end
end
end
end
end
DRY and correct on_complete's callback order.
if defined?(Curl)
module Curl
class Easy
def http_with_webmock(method)
@webmock_method = method
curb_or_webmock do
http_without_webmock(method)
end
end
alias_method :http_without_webmock, :http
alias_method :http, :http_with_webmock
%w[ get head delete ].each do |verb|
define_method "http_#{verb}_with_webmock" do
@webmock_method = verb
curb_or_webmock do
send( "http_#{verb}_without_webmock" )
end
end
alias_method "http_#{verb}_without_webmock", "http_#{verb}"
alias_method "http_#{verb}", "http_#{verb}_with_webmock"
end
def http_put_with_webmock data
@webmock_method = :put
@put_data = data
curb_or_webmock do
http_put_without_webmock(data)
end
end
alias_method :http_put_without_webmock, :http_put
alias_method :http_put, :http_put_with_webmock
def http_post_with_webmock data
@webmock_method = :post
@post_body = data
curb_or_webmock do
http_post_without_webmock(data)
end
end
alias_method :http_post_without_webmock, :http_post
alias_method :http_post, :http_post_with_webmock
def curb_or_webmock
request_signature = build_request_signature
WebMock::RequestRegistry.instance.requested_signatures.put(request_signature)
if WebMock.registered_request?(request_signature)
webmock_response = WebMock.response_for_request(request_signature)
build_curb_response(webmock_response)
WebMock::CallbackRegistry.invoke_callbacks(
{:lib => :curb}, request_signature, webmock_response)
invoke_curb_callbacks
true
elsif WebMock.net_connect_allowed?(request_signature.uri)
res = yield
if WebMock::CallbackRegistry.any_callbacks?
webmock_response = build_webmock_response
WebMock::CallbackRegistry.invoke_callbacks(
{:lib => :curb, :real_request => true}, request_signature,
webmock_response)
end
res
else
raise WebMock::NetConnectNotAllowedError.new(request_signature)
end
end
def perform_with_webmock
@webmock_method ||= :get
curb_or_webmock do
perform_without_webmock
end
end
alias :perform_without_webmock :perform
alias :perform :perform_with_webmock
def build_request_signature
method = @webmock_method.to_s.downcase.to_sym
uri = WebMock::Util::URI.heuristic_parse(self.url)
uri.path = uri.normalized_path.gsub("[^:]//","/")
uri.user = self.username
uri.password = self.password
request_body = case method
when :post
self.post_body || @post_body
when :put
@put_data
else
nil
end
request_signature = WebMock::RequestSignature.new(
method,
uri.to_s,
:body => request_body,
:headers => self.headers
)
request_signature
end
def put_data_with_webmock= data
@webmock_method = :put
@put_data = data
self.put_data_without_webmock = data
end
alias_method :put_data_without_webmock=, :put_data=
alias_method :put_data=, :put_data_with_webmock=
def post_body_with_webmock= data
@webmock_method = :post
self.post_body_without_webmock = data
end
alias_method :post_body_without_webmock=, :post_body=
alias_method :post_body=, :post_body_with_webmock=
def delete_with_webmock= value
@webmock_method = :delete if value
self.delete_without_webmock = value
end
alias_method :delete_without_webmock=, :delete=
alias_method :delete=, :delete_with_webmock=
def head_with_webmock= value
@webmock_method = :head if value
self.head_without_webmock = value
end
alias_method :head_without_webmock=, :head=
alias_method :head=, :head_with_webmock=
def build_curb_response(webmock_response)
raise Curl::Err::TimeoutError if webmock_response.should_timeout
webmock_response.raise_error_if_any
@body_str = webmock_response.body
@response_code = webmock_response.status[0]
@header_str = "HTTP/1.1 #{webmock_response.status[0]} #{webmock_response.status[1]}\r\n"
if webmock_response.headers
@header_str << webmock_response.headers.map do |k,v|
"#{k}: #{v.is_a?(Array) ? v.join(", ") : v}"
end.join("\r\n")
end
end
def body_str_with_webmock
@body_str || body_str_without_webmock
end
alias :body_str_without_webmock :body_str
alias :body_str :body_str_with_webmock
def response_code_with_webmock
@response_code || response_code_without_webmock
end
alias :response_code_without_webmock :response_code
alias :response_code :response_code_with_webmock
def header_str_with_webmock
@header_str || header_str_without_webmock
end
alias :header_str_without_webmock :header_str
alias :header_str :header_str_with_webmock
%w[ success failure header body complete progress ].each do |callback|
define_method "on_#{callback}_with_webmock" do |&block|
instance_variable_set( "@on_#{callback}", block )
send( "on_#{callback}_without_webmock", &block )
end
alias_method "on_#{callback}_without_webmock", "on_#{callback}"
alias_method "on_#{callback}", "on_#{callback}_with_webmock"
end
def invoke_curb_callbacks
@on_progress.call(0.0,1.0,0.0,1.0) if @on_progress
@on_header.call(self.header_str) if @on_header
@on_body.call(self.body_str) if @on_body
@on_complete.call(self) if @on_complete
case response_code
when 200..299
@on_success.call(self) if @on_success
when 500..599
@on_failure.call(self, self.response_code) if @on_failure
end
end
def build_webmock_response
status, headers = WebmockHelper.parse_header_string(self.header_str)
webmock_response = WebMock::Response.new
webmock_response.status = [self.response_code, status]
webmock_response.body = self.body_str
webmock_response.headers = headers
webmock_response
end
module WebmockHelper
# Borrowed from Patron:
# http://github.com/toland/patron/blob/master/lib/patron/response.rb
def self.parse_header_string(header_string)
status, headers = nil, {}
header_string.split(/\r\n/).each do |header|
if header =~ %r|^HTTP/1.[01] \d\d\d (.*)|
status = $1
else
parts = header.split(':', 2)
unless parts.empty?
parts[1].strip! unless parts[1].nil?
if headers.has_key?(parts[0])
headers[parts[0]] = [headers[parts[0]]] unless headers[parts[0]].kind_of? Array
headers[parts[0]] << parts[1]
else
headers[parts[0]] = parts[1]
end
end
end
end
return status, headers
end
end
end
end
end
|
module YahooGeminiClient
class Collection
attr_reader :client
include Enumerable
def initialize(opts={})
@client = opts[:client]
end
def api_request_headers
client.api_request_headers
end
def each
@members ||= all
@members.each { |member| yield member }
end
private
def get(uri)
response = HTTParty.get(uri, headers: api_request_headers)
if response.success?
JSON.parse(response.body).with_indifferent_access[:response]
else
# TODO testme
raise "Reponse Unsuccessful: #{response.body}"
end
end
def member_uri(ids)
GenerateMemberURI.execute(ids)
end
def base_uri
raise 'This must be overriden'
end
end
end
Make error message more accurate and descriptive.
module YahooGeminiClient
class Collection
attr_reader :client
include Enumerable
def initialize(opts={})
@client = opts[:client]
end
def api_request_headers
client.api_request_headers
end
def each
@members ||= all
@members.each { |member| yield member }
end
private
def get(uri)
response = HTTParty.get(uri, headers: api_request_headers)
if response.success?
JSON.parse(response.body).with_indifferent_access[:response]
else
# TODO testme
raise "GET Request Unsuccessful. Response: #{response.body}"
end
end
def member_uri(ids)
GenerateMemberURI.execute(ids)
end
def base_uri
raise 'This must be overriden'
end
end
end
|
# -*- encoding: utf-8 -*-
$:.push File.expand_path("../lib", __FILE__)
require "gri/version"
Gem::Specification.new do |s|
s.name = "gri"
s.version = GRI::VERSION
s.authors = ["maebashi"]
s.homepage = ""
s.summary = %q{GRI}
s.description = %q{GRI}
s.files = `git ls-files`.split("\n").select {|e| /^tmp/!~e}
s.test_files = `git ls-files -- {test,spec,features}/*`.split("\n")
s.executables = `git ls-files -- bin/*`.split("\n").map{ |f| File.basename(f
) }
s.require_paths = ["lib"]
# specify any dependencies here; for example:
s.add_runtime_dependency "rack"
end
add development dependency
$:.push File.expand_path("../lib", __FILE__)
require "gri/version"
Gem::Specification.new do |s|
s.name = "gri"
s.version = GRI::VERSION
s.authors = ["maebashi"]
s.homepage = ""
s.summary = %q{GRI}
s.description = %q{GRI}
s.files = `git ls-files`.split("\n").select {|e| /^tmp/!~e}
s.test_files = `git ls-files -- {test,spec,features}/*`.split("\n")
s.executables = `git ls-files -- bin/*`.split("\n").map{ |f| File.basename(f
) }
s.require_paths = ["lib"]
s.add_development_dependency "msgpack"
s.add_runtime_dependency "rack"
end
|
require File.expand_path('../boot', __FILE__)
require 'rails'
%w(
action_controller
action_view
sprockets
).each do |framework|
require "#{framework}/railtie"
end
require 'jquery-rails'
require 'honoka-rails'
require 'uglifier'
require 'sass-rails'
require 'coffee-rails'
module Dummy
class Application < Rails::Application
config.assets.enabled = true if config.assets.respond_to?(:enabled)
config.assets.precompile += %w( application.css application.js )
config.to_prepare do
if ENV['VERBOSE']
STDERR.puts "Loaded Rails #{Rails::VERSION::STRING}, Sprockets #{Sprockets::VERSION}",
"Asset paths: #{Rails.application.config.assets.paths}"
end
end
end
end
Fix codings
require File.expand_path('../boot', __FILE__)
require 'rails'
%w(action_controller action_view sprockets).each do |framework|
require "#{framework}/railtie"
end
require 'jquery-rails'
require 'honoka-rails'
require 'uglifier'
require 'sass-rails'
require 'coffee-rails'
module Dummy
class Application < Rails::Application
config.assets.enabled = true if config.assets.respond_to?(:enabled)
config.assets.precompile += %w(application.css application.js)
config.to_prepare do
if ENV['VERBOSE']
STDERR.puts "Loaded Rails #{Rails::VERSION::STRING}, Sprockets #{Sprockets::VERSION}",
"Asset paths: #{Rails.application.config.assets.paths}"
end
end
end
end
|
#! /usr/bin/env ruby
#
# <script name>
#
# DESCRIPTION:
# Get time series values from Graphite and create events based on values
#
# OUTPUT:
# plain text
#
# PLATFORMS:
# Linux
#
# DEPENDENCIES:
# gem: sensu-plugin
# gem: array_stats
#
# USAGE:
# #YELLOW
#
# NOTES:
#
# LICENSE:
# Copyright 2012 Ulf Mansson @ Recorded Future
# Modifications by Chris Jansen to support wildcard targets
# Released under the same terms as Sensu (the MIT license); see LICENSE
# for details.
#
require 'sensu-plugin/check/cli'
require 'json'
require 'net/http'
require 'socket'
require 'array_stats'
class Graphite < Sensu::Plugin::Check::CLI
option :host,
short: '-h HOST',
long: '--host HOST',
description: 'Graphite host to connect to, include port',
required: true
option :target,
description: 'The graphite metric name. Could be a comma separated list of metric names.',
short: '-t TARGET',
long: '--target TARGET',
required: true
option :complex_target,
description: 'Allows complex targets which contain functions. Disables splitting on comma.',
short: '-x',
long: '--complex_target',
default: false
option :period,
description: 'The period back in time to extract from Graphite and compare with. Use 24hours,2days etc, same format as in Graphite',
short: '-p PERIOD',
long: '--period PERIOD',
default: '2hours'
option :updated_since,
description: 'The graphite value should have been updated within UPDATED_SINCE seconds, default to 600 seconds',
short: '-u UPDATED_SINCE',
long: '--updated_since UPDATED_SINCE',
default: 600
option :acceptable_diff_percentage,
description: 'The acceptable diff from max values in percentage, used in check_function_increasing',
short: '-d ACCEPTABLE_DIFF_PERCENTAGE',
long: '--acceptable_diff_percentage ACCEPTABLE_DIFF_PERCENTAGE',
default: 0
option :check_function_increasing,
description: 'Check that value is increasing or equal over time (use acceptable_diff_percentage if it should allow to be lower)',
short: '-i',
long: '--check_function_decreasing',
default: false,
boolean: true
option :greater_than,
description: 'Change whether value is greater than or less than check',
short: '-g',
long: '--greater_than',
default: false
option :check_last,
description: 'Check that the last value in GRAPHITE is greater/less than VALUE',
short: '-l VALUE',
long: '--last VALUE',
default: nil
option :ignore_nulls,
description: 'Do not error on null values, used in check_function_increasing',
short: '-n',
long: '--ignore_nulls',
default: false,
boolean: true
option :concat_output,
description: 'Include warning messages in output even if overall status is critical',
short: '-c',
long: '--concat_output',
default: false,
boolean: true
option :short_output,
description: 'Report only the highest status per series in output',
short: '-s',
long: '--short_output',
default: false,
boolean: true
option :check_average,
description: 'MAX_VALUE should be greater than the average of Graphite values from PERIOD',
short: '-a MAX_VALUE',
long: '--average_value MAX_VALUE'
option :data_points,
description: 'Number of data points to include in average check (smooths out spikes)',
short: '-d VALUE',
long: '--data_points VALUE',
default: 1
option :check_average_percent,
description: 'MAX_VALUE% should be greater than the average of Graphite values from PERIOD',
short: '-b MAX_VALUE',
long: '--average_percent_value MAX_VALUE'
option :percentile,
description: 'Percentile value, should be used in conjunction with percentile_value, defaults to 90',
long: '--percentile PERCENTILE',
default: 90
option :check_percentile,
description: 'Values should not be greater than the VALUE of Graphite values from PERIOD',
long: '--percentile_value VALUE'
option :http_user,
description: 'Basic HTTP authentication user',
short: '-U USER',
long: '--http-user USER',
default: nil
option :http_password,
description: 'Basic HTTP authentication password',
short: '-P PASSWORD',
long: '--http-password USER',
default: nil
def initialize
super
@graphite_cache = {}
end
def graphite_cache(target = nil)
# #YELLOW
if @graphite_cache.key?(target)
graphite_value = @graphite_cache[target].select { |value| value[:period] == @period }
graphite_value if graphite_value.size > 0
end
end
# Create a graphite url from params
#
#
def graphite_url(target = nil)
url = "#{config[:host]}/render/"
url = 'http://' + url unless url[0..3] == 'http'
# #YELLOW
url = url + "?target=#{target}" if target # rubocop:disable Style/SelfAssignment
URI.parse(url)
end
def get_levels(config_param)
values = config_param.split(',')
i = 0
levels = {}
%w(warning error fatal).each do |type|
levels[type] = values[i] if values[i]
i += 1
end
levels
end
def get_graphite_values(target)
cache_value = graphite_cache target
return cache_value if cache_value
params = {
target: target,
from: "-#{@period}",
format: 'json'
}
req = Net::HTTP::Post.new(graphite_url.path)
# If the basic http authentication credentials have been provided, then use them
if !config[:http_user].nil? && !config[:http_password].nil?
req.basic_auth(config[:http_user], config[:http_password])
end
req.set_form_data(params)
resp = Net::HTTP.new(graphite_url.host, graphite_url.port).start { |http| http.request(req) }
data = JSON.parse(resp.body)
@graphite_cache[target] = []
if data.size > 0
data.each { |d| @graphite_cache[target] << { target: d['target'], period: @period, datapoints: d['datapoints'] } }
graphite_cache target
end
end
# Will give max values for [0..-2]
def max_graphite_value(target)
max_values = {}
values = get_graphite_values target
if values
values.each do |val|
max = get_max_value(val[:datapoints])
max_values[val[:target]] = max
end
end
max_values
end
def get_max_value(values)
if values
values.map { |i| i[0] ? i[0] : 0 }[0..-2].max
end
end
def last_graphite_metric(target, count = 1)
last_values = {}
values = get_graphite_values target
if values
values.each do |val|
last = get_last_metric(val[:datapoints], count)
last_values[val[:target]] = last
end
end
last_values
end
def get_last_metric(values, count = 1)
if values
ret = []
values_size = values.size
count = values_size if count > values_size
while count > 0
values_size -= 1
break if values[values_size].nil?
count -= 1 if values[values_size][0]
ret.push(values[values_size]) if values[values_size][0]
end
ret
end
end
def last_graphite_value(target, count = 1)
last_metrics = last_graphite_metric(target, count)
last_values = {}
if last_metrics
last_metrics.each do |target_name, metrics|
last_values[target_name] = metrics.map { |metric| metric[0] }.mean
end
end
last_values
end
def been_updated_since(target, time, updated_since)
last_time_stamp = last_graphite_metric target
warnings = []
if last_time_stamp
last_time_stamp.each do |target_name, value|
last_time_stamp_bool = value[1] > time.to_i ? true : false
warnings << "The metric #{target_name} has not been updated in #{updated_since} seconds" unless last_time_stamp_bool
end
end
warnings
end
def greater_less
return 'greater' if config[:greater_than]
return 'less' unless config[:greater_than]
end
def check_increasing(target)
updated_since = config[:updated_since].to_i
time_to_be_updated_since = Time.now - updated_since
critical_errors = []
warnings = []
max_gv = max_graphite_value target
last_gv = last_graphite_value target
if last_gv.is_a?(Hash) && max_gv.is_a?(Hash)
# #YELLOW
last_gv.each do |target_name, value|
if value && max_gv[target_name]
last = value
max = max_gv[target_name]
if max > last * (1 + config[:acceptable_diff_percentage].to_f / 100)
msg = "The metric #{target} with last value #{last} is less than max value #{max} during #{config[:period]} period"
critical_errors << msg
end
end
end
else
warnings << "Could not found any value in Graphite for metric #{target}, see #{graphite_url(target)}"
end
unless config[:ignore_nulls]
warnings.concat(been_updated_since(target, time_to_be_updated_since, updated_since))
end
[warnings, critical_errors, []]
end
def check_average_percent(target, max_values, data_points = 1)
values = get_graphite_values target
last_values = last_graphite_value(target, data_points)
return [[], [], []] unless values
warnings = []
criticals = []
fatal = []
values.each do |data|
target = data[:target]
values_pair = data[:datapoints]
values_array = values_pair.select(&:first).map { |v| v.first unless v.first.nil? }
# #YELLOW
avg_value = values_array.reduce { |sum, el| sum + el if el }.to_f / values_array.size # rubocop:disable SingleLineBlockParams
last_value = last_values[target]
percent = last_value / avg_value unless last_value.nil? || avg_value.nil?
# #YELLOW
%w(fatal error warning).each do |type|
next unless max_values.key?(type)
max_value = max_values[type]
var1 = config[:greater_than] ? percent : max_value.to_f
var2 = config[:greater_than] ? max_value.to_f : percent
if !percent.nil? && var1 > var2 && (values_array.size > 0 || !config[:ignore_nulls])
text = "The last value of metric #{target} is #{percent}% #{greater_less} than allowed #{max_value}% of the average value #{avg_value}"
case type
when 'warning'
warnings << text
when 'error'
criticals << text
when 'fatal'
fatal << text
else
fail "Unknown type #{type}"
end
break if config[:short_output]
end
end
end
[warnings, criticals, fatal]
end
def check_average(target, max_values)
values = get_graphite_values target
return [[], [], []] unless values
warnings = []
criticals = []
fatal = []
values.each do |data|
target = data[:target]
values_pair = data[:datapoints]
values_array = values_pair.select(&:first).map { |v| v.first unless v.first.nil? }
# #YELLOW
avg_value = values_array.reduce { |sum, el| sum + el if el }.to_f / values_array.size # rubocop:disable SingleLineBlockParams
# YELLOW
%w(fatal error warning).each do |type|
next unless max_values.key?(type)
max_value = max_values[type]
var1 = config[:greater_than] ? avg_value : max_value.to_f
var2 = config[:greater_than] ? max_value.to_f : avg_value
if var1 > var2 && (values_array.size > 0 || !config[:ignore_nulls])
text = "The average value of metric #{target} is #{avg_value} that is #{greater_less} than allowed average of #{max_value}"
case type
when 'warning'
warnings << text
when 'error'
criticals << text
when 'fatal'
fatal << text
else
fail "Unknown type #{type}"
end
break if config[:short_output]
end
end
end
[warnings, criticals, fatal]
end
def check_percentile(target, max_values, percentile, data_points = 1)
values = get_graphite_values target
last_values = last_graphite_value(target, data_points)
return [[], [], []] unless values
warnings = []
criticals = []
fatal = []
values.each do |data|
target = data[:target]
values_pair = data[:datapoints]
values_array = values_pair.select(&:first).map { |v| v.first unless v.first.nil? }
percentile_value = values_array.percentile(percentile)
last_value = last_values[target]
percent = last_value / percentile_value unless last_value.nil? || percentile_value.nil?
# #YELLOW
%w(fatal error warning).each do |type|
next unless max_values.key?(type)
max_value = max_values[type]
var1 = config[:greater_than] ? percent : max_value.to_f
var2 = config[:greater_than] ? max_value.to_f : percent
if !percentile_value.nil? && var1 > var2
text = "The percentile value of metric #{target} (#{last_value}) is #{greater_less} than the
#{percentile}th percentile (#{percentile_value}) by more than #{max_value}%"
case type
when 'warning'
warnings << text
when 'error'
criticals << text
when 'fatal'
fatal << text
else
fail "Unknown type #{type}"
end
break if config[:short_output]
end
end
end
[warnings, criticals, fatal]
end
def check_last(target, max_values)
last_targets = last_graphite_metric target
return [[], [], []] unless last_targets
warnings = []
criticals = []
fatal = []
# #YELLOW
last_targets.each do |target_name, last|
last_value = last.first
unless last_value.nil?
# #YELLOW
%w(fatal error warning).each do |type|
next unless max_values.key?(type)
max_value = max_values[type]
var1 = config[:greater_than] ? last_value : max_value.to_f
var2 = config[:greater_than] ? max_value.to_f : last_value
if var1 > var2
text = "The metric #{target_name} is #{last_value} that is #{greater_less} than max allowed #{max_value}"
case type
when 'warning'
warnings << text
when 'error'
criticals << text
when 'fatal'
fatal << text
else
fail "Unknown type #{type}"
end
break if config[:short_output]
end
end
end
end
[warnings, criticals, fatal]
end
def run # rubocop:disable Metrics/AbcSize
targets = config[:complex_target] ? [config[:target]] : config[:target].split(',')
@period = config[:period]
critical_errors = []
warnings = []
fatals = []
# #YELLOW
targets.each do |target|
if config[:check_function_increasing]
inc_warnings, inc_critical, inc_fatal = check_increasing target
warnings += inc_warnings
critical_errors += inc_critical
fatals += inc_fatal
end
if config[:check_last]
max_values = get_levels config[:check_last]
lt_warnings, lt_critical, lt_fatal = check_last(target, max_values)
warnings += lt_warnings
critical_errors += lt_critical
fatals += lt_fatal
end
if config[:check_average]
max_values = get_levels config[:check_average]
avg_warnings, avg_critical, avg_fatal = check_average(target, max_values)
warnings += avg_warnings
critical_errors += avg_critical
fatals += avg_fatal
end
if config[:check_average_percent]
max_values = get_levels config[:check_average_percent]
avg_warnings, avg_critical, avg_fatal = check_average_percent(target, max_values, config[:data_points].to_i)
warnings += avg_warnings
critical_errors += avg_critical
fatals += avg_fatal
end
if config[:check_percentile]
max_values = get_levels config[:check_percentile]
pct_warnings, pct_critical, pct_fatal = check_percentile(target, max_values, config[:percentile].to_i, config[:data_points].to_i)
warnings += pct_warnings
critical_errors += pct_critical
fatals += pct_fatal
end
end
fatals_string = fatals.size > 0 ? fatals.join("\n") : ''
criticals_string = critical_errors.size > 0 ? critical_errors.join("\n") : ''
warnings_string = warnings.size > 0 ? warnings.join("\n") : ''
if config[:concat_output]
fatals_string = fatals_string + "\n" + criticals_string if critical_errors.size > 0
fatals_string = fatals_string + "\nGraphite WARNING: " + warnings_string if warnings.size > 0
criticals_string = criticals_string + "\nGraphite WARNING: " + warnings_string if warnings.size > 0
critical fatals_string if fatals.size > 0
critical criticals_string if critical_errors.size > 0
warning warnings_string if warnings.size > 0
else
critical fatals_string if fatals.size > 0
critical criticals_string if critical_errors.size > 0
warning warnings_string if warnings.size > 0
end
ok
end
end
change cop name in disable notation
#! /usr/bin/env ruby
#
# <script name>
#
# DESCRIPTION:
# Get time series values from Graphite and create events based on values
#
# OUTPUT:
# plain text
#
# PLATFORMS:
# Linux
#
# DEPENDENCIES:
# gem: sensu-plugin
# gem: array_stats
#
# USAGE:
# #YELLOW
#
# NOTES:
#
# LICENSE:
# Copyright 2012 Ulf Mansson @ Recorded Future
# Modifications by Chris Jansen to support wildcard targets
# Released under the same terms as Sensu (the MIT license); see LICENSE
# for details.
#
require 'sensu-plugin/check/cli'
require 'json'
require 'net/http'
require 'socket'
require 'array_stats'
class Graphite < Sensu::Plugin::Check::CLI
option :host,
short: '-h HOST',
long: '--host HOST',
description: 'Graphite host to connect to, include port',
required: true
option :target,
description: 'The graphite metric name. Could be a comma separated list of metric names.',
short: '-t TARGET',
long: '--target TARGET',
required: true
option :complex_target,
description: 'Allows complex targets which contain functions. Disables splitting on comma.',
short: '-x',
long: '--complex_target',
default: false
option :period,
description: 'The period back in time to extract from Graphite and compare with. Use 24hours,2days etc, same format as in Graphite',
short: '-p PERIOD',
long: '--period PERIOD',
default: '2hours'
option :updated_since,
description: 'The graphite value should have been updated within UPDATED_SINCE seconds, default to 600 seconds',
short: '-u UPDATED_SINCE',
long: '--updated_since UPDATED_SINCE',
default: 600
option :acceptable_diff_percentage,
description: 'The acceptable diff from max values in percentage, used in check_function_increasing',
short: '-d ACCEPTABLE_DIFF_PERCENTAGE',
long: '--acceptable_diff_percentage ACCEPTABLE_DIFF_PERCENTAGE',
default: 0
option :check_function_increasing,
description: 'Check that value is increasing or equal over time (use acceptable_diff_percentage if it should allow to be lower)',
short: '-i',
long: '--check_function_decreasing',
default: false,
boolean: true
option :greater_than,
description: 'Change whether value is greater than or less than check',
short: '-g',
long: '--greater_than',
default: false
option :check_last,
description: 'Check that the last value in GRAPHITE is greater/less than VALUE',
short: '-l VALUE',
long: '--last VALUE',
default: nil
option :ignore_nulls,
description: 'Do not error on null values, used in check_function_increasing',
short: '-n',
long: '--ignore_nulls',
default: false,
boolean: true
option :concat_output,
description: 'Include warning messages in output even if overall status is critical',
short: '-c',
long: '--concat_output',
default: false,
boolean: true
option :short_output,
description: 'Report only the highest status per series in output',
short: '-s',
long: '--short_output',
default: false,
boolean: true
option :check_average,
description: 'MAX_VALUE should be greater than the average of Graphite values from PERIOD',
short: '-a MAX_VALUE',
long: '--average_value MAX_VALUE'
option :data_points,
description: 'Number of data points to include in average check (smooths out spikes)',
short: '-d VALUE',
long: '--data_points VALUE',
default: 1
option :check_average_percent,
description: 'MAX_VALUE% should be greater than the average of Graphite values from PERIOD',
short: '-b MAX_VALUE',
long: '--average_percent_value MAX_VALUE'
option :percentile,
description: 'Percentile value, should be used in conjunction with percentile_value, defaults to 90',
long: '--percentile PERCENTILE',
default: 90
option :check_percentile,
description: 'Values should not be greater than the VALUE of Graphite values from PERIOD',
long: '--percentile_value VALUE'
option :http_user,
description: 'Basic HTTP authentication user',
short: '-U USER',
long: '--http-user USER',
default: nil
option :http_password,
description: 'Basic HTTP authentication password',
short: '-P PASSWORD',
long: '--http-password USER',
default: nil
def initialize
super
@graphite_cache = {}
end
def graphite_cache(target = nil)
# #YELLOW
if @graphite_cache.key?(target)
graphite_value = @graphite_cache[target].select { |value| value[:period] == @period }
graphite_value if graphite_value.size > 0
end
end
# Create a graphite url from params
#
#
def graphite_url(target = nil)
url = "#{config[:host]}/render/"
url = 'http://' + url unless url[0..3] == 'http'
# #YELLOW
url = url + "?target=#{target}" if target # rubocop:disable Style/SelfAssignment
URI.parse(url)
end
def get_levels(config_param)
values = config_param.split(',')
i = 0
levels = {}
%w(warning error fatal).each do |type|
levels[type] = values[i] if values[i]
i += 1
end
levels
end
def get_graphite_values(target)
cache_value = graphite_cache target
return cache_value if cache_value
params = {
target: target,
from: "-#{@period}",
format: 'json'
}
req = Net::HTTP::Post.new(graphite_url.path)
# If the basic http authentication credentials have been provided, then use them
if !config[:http_user].nil? && !config[:http_password].nil?
req.basic_auth(config[:http_user], config[:http_password])
end
req.set_form_data(params)
resp = Net::HTTP.new(graphite_url.host, graphite_url.port).start { |http| http.request(req) }
data = JSON.parse(resp.body)
@graphite_cache[target] = []
if data.size > 0
data.each { |d| @graphite_cache[target] << { target: d['target'], period: @period, datapoints: d['datapoints'] } }
graphite_cache target
end
end
# Will give max values for [0..-2]
def max_graphite_value(target)
max_values = {}
values = get_graphite_values target
if values
values.each do |val|
max = get_max_value(val[:datapoints])
max_values[val[:target]] = max
end
end
max_values
end
def get_max_value(values)
if values
values.map { |i| i[0] ? i[0] : 0 }[0..-2].max
end
end
def last_graphite_metric(target, count = 1)
last_values = {}
values = get_graphite_values target
if values
values.each do |val|
last = get_last_metric(val[:datapoints], count)
last_values[val[:target]] = last
end
end
last_values
end
def get_last_metric(values, count = 1)
if values
ret = []
values_size = values.size
count = values_size if count > values_size
while count > 0
values_size -= 1
break if values[values_size].nil?
count -= 1 if values[values_size][0]
ret.push(values[values_size]) if values[values_size][0]
end
ret
end
end
def last_graphite_value(target, count = 1)
last_metrics = last_graphite_metric(target, count)
last_values = {}
if last_metrics
last_metrics.each do |target_name, metrics|
last_values[target_name] = metrics.map { |metric| metric[0] }.mean
end
end
last_values
end
def been_updated_since(target, time, updated_since)
last_time_stamp = last_graphite_metric target
warnings = []
if last_time_stamp
last_time_stamp.each do |target_name, value|
last_time_stamp_bool = value[1] > time.to_i ? true : false
warnings << "The metric #{target_name} has not been updated in #{updated_since} seconds" unless last_time_stamp_bool
end
end
warnings
end
def greater_less
return 'greater' if config[:greater_than]
return 'less' unless config[:greater_than]
end
def check_increasing(target)
updated_since = config[:updated_since].to_i
time_to_be_updated_since = Time.now - updated_since
critical_errors = []
warnings = []
max_gv = max_graphite_value target
last_gv = last_graphite_value target
if last_gv.is_a?(Hash) && max_gv.is_a?(Hash)
# #YELLOW
last_gv.each do |target_name, value|
if value && max_gv[target_name]
last = value
max = max_gv[target_name]
if max > last * (1 + config[:acceptable_diff_percentage].to_f / 100)
msg = "The metric #{target} with last value #{last} is less than max value #{max} during #{config[:period]} period"
critical_errors << msg
end
end
end
else
warnings << "Could not found any value in Graphite for metric #{target}, see #{graphite_url(target)}"
end
unless config[:ignore_nulls]
warnings.concat(been_updated_since(target, time_to_be_updated_since, updated_since))
end
[warnings, critical_errors, []]
end
def check_average_percent(target, max_values, data_points = 1)
values = get_graphite_values target
last_values = last_graphite_value(target, data_points)
return [[], [], []] unless values
warnings = []
criticals = []
fatal = []
values.each do |data|
target = data[:target]
values_pair = data[:datapoints]
values_array = values_pair.select(&:first).map { |v| v.first unless v.first.nil? }
# #YELLOW
avg_value = values_array.reduce { |sum, el| sum + el if el }.to_f / values_array.size # rubocop:disable SingleLineBlockParams
last_value = last_values[target]
percent = last_value / avg_value unless last_value.nil? || avg_value.nil?
# #YELLOW
%w(fatal error warning).each do |type|
next unless max_values.key?(type)
max_value = max_values[type]
var1 = config[:greater_than] ? percent : max_value.to_f
var2 = config[:greater_than] ? max_value.to_f : percent
if !percent.nil? && var1 > var2 && (values_array.size > 0 || !config[:ignore_nulls])
text = "The last value of metric #{target} is #{percent}% #{greater_less} than allowed #{max_value}% of the average value #{avg_value}"
case type
when 'warning'
warnings << text
when 'error'
criticals << text
when 'fatal'
fatal << text
else
fail "Unknown type #{type}"
end
break if config[:short_output]
end
end
end
[warnings, criticals, fatal]
end
def check_average(target, max_values)
values = get_graphite_values target
return [[], [], []] unless values
warnings = []
criticals = []
fatal = []
values.each do |data|
target = data[:target]
values_pair = data[:datapoints]
values_array = values_pair.select(&:first).map { |v| v.first unless v.first.nil? }
# #YELLOW
avg_value = values_array.reduce { |sum, el| sum + el if el }.to_f / values_array.size # rubocop:disable SingleLineBlockParams
# YELLOW
%w(fatal error warning).each do |type|
next unless max_values.key?(type)
max_value = max_values[type]
var1 = config[:greater_than] ? avg_value : max_value.to_f
var2 = config[:greater_than] ? max_value.to_f : avg_value
if var1 > var2 && (values_array.size > 0 || !config[:ignore_nulls])
text = "The average value of metric #{target} is #{avg_value} that is #{greater_less} than allowed average of #{max_value}"
case type
when 'warning'
warnings << text
when 'error'
criticals << text
when 'fatal'
fatal << text
else
fail "Unknown type #{type}"
end
break if config[:short_output]
end
end
end
[warnings, criticals, fatal]
end
def check_percentile(target, max_values, percentile, data_points = 1)
values = get_graphite_values target
last_values = last_graphite_value(target, data_points)
return [[], [], []] unless values
warnings = []
criticals = []
fatal = []
values.each do |data|
target = data[:target]
values_pair = data[:datapoints]
values_array = values_pair.select(&:first).map { |v| v.first unless v.first.nil? }
percentile_value = values_array.percentile(percentile)
last_value = last_values[target]
percent = last_value / percentile_value unless last_value.nil? || percentile_value.nil?
# #YELLOW
%w(fatal error warning).each do |type|
next unless max_values.key?(type)
max_value = max_values[type]
var1 = config[:greater_than] ? percent : max_value.to_f
var2 = config[:greater_than] ? max_value.to_f : percent
if !percentile_value.nil? && var1 > var2
text = "The percentile value of metric #{target} (#{last_value}) is #{greater_less} than the
#{percentile}th percentile (#{percentile_value}) by more than #{max_value}%"
case type
when 'warning'
warnings << text
when 'error'
criticals << text
when 'fatal'
fatal << text
else
fail "Unknown type #{type}"
end
break if config[:short_output]
end
end
end
[warnings, criticals, fatal]
end
def check_last(target, max_values)
last_targets = last_graphite_metric target
return [[], [], []] unless last_targets
warnings = []
criticals = []
fatal = []
# #YELLOW
last_targets.each do |target_name, last|
last_value = last.first
unless last_value.nil?
# #YELLOW
%w(fatal error warning).each do |type|
next unless max_values.key?(type)
max_value = max_values[type]
var1 = config[:greater_than] ? last_value : max_value.to_f
var2 = config[:greater_than] ? max_value.to_f : last_value
if var1 > var2
text = "The metric #{target_name} is #{last_value} that is #{greater_less} than max allowed #{max_value}"
case type
when 'warning'
warnings << text
when 'error'
criticals << text
when 'fatal'
fatal << text
else
fail "Unknown type #{type}"
end
break if config[:short_output]
end
end
end
end
[warnings, criticals, fatal]
end
def run # rubocop:disable AbcSize
targets = config[:complex_target] ? [config[:target]] : config[:target].split(',')
@period = config[:period]
critical_errors = []
warnings = []
fatals = []
# #YELLOW
targets.each do |target|
if config[:check_function_increasing]
inc_warnings, inc_critical, inc_fatal = check_increasing target
warnings += inc_warnings
critical_errors += inc_critical
fatals += inc_fatal
end
if config[:check_last]
max_values = get_levels config[:check_last]
lt_warnings, lt_critical, lt_fatal = check_last(target, max_values)
warnings += lt_warnings
critical_errors += lt_critical
fatals += lt_fatal
end
if config[:check_average]
max_values = get_levels config[:check_average]
avg_warnings, avg_critical, avg_fatal = check_average(target, max_values)
warnings += avg_warnings
critical_errors += avg_critical
fatals += avg_fatal
end
if config[:check_average_percent]
max_values = get_levels config[:check_average_percent]
avg_warnings, avg_critical, avg_fatal = check_average_percent(target, max_values, config[:data_points].to_i)
warnings += avg_warnings
critical_errors += avg_critical
fatals += avg_fatal
end
if config[:check_percentile]
max_values = get_levels config[:check_percentile]
pct_warnings, pct_critical, pct_fatal = check_percentile(target, max_values, config[:percentile].to_i, config[:data_points].to_i)
warnings += pct_warnings
critical_errors += pct_critical
fatals += pct_fatal
end
end
fatals_string = fatals.size > 0 ? fatals.join("\n") : ''
criticals_string = critical_errors.size > 0 ? critical_errors.join("\n") : ''
warnings_string = warnings.size > 0 ? warnings.join("\n") : ''
if config[:concat_output]
fatals_string = fatals_string + "\n" + criticals_string if critical_errors.size > 0
fatals_string = fatals_string + "\nGraphite WARNING: " + warnings_string if warnings.size > 0
criticals_string = criticals_string + "\nGraphite WARNING: " + warnings_string if warnings.size > 0
critical fatals_string if fatals.size > 0
critical criticals_string if critical_errors.size > 0
warning warnings_string if warnings.size > 0
else
critical fatals_string if fatals.size > 0
critical criticals_string if critical_errors.size > 0
warning warnings_string if warnings.size > 0
end
ok
end
end
|
#!/usr/bin/env ruby
require 'bfs_brute_force'
require 'set'
# Puzzle:
#
# Seperate green and red books. Can only
# move books in pairs.
#
# Inital layout:
#
# +---+---+---+---+---+---+---+---+---+---+
# | R | G | R | G | R | G | R | G | | |
# +---+---+---+---+---+---+---+---+---+---+
# 0 1 2 3 4 5 6 7 8 9
class BooksState < BfsBruteForce::State
def initialize(books = nil)
@books = books || [
:R, :G,
:R, :G,
:R, :G,
:R, :G,
:_, :_
]
end
def solved?
@books == [
:R, :R,
:R, :R,
:_, :_,
:G, :G,
:G, :G
]
end
def next_states(already_seen)
empty_start = @books.index :_
(@books.length - 1).times.each do |index|
next if index == empty_start - 1 or index == empty_start
book1, book2 = @books[index, 2]
new_books = @books.dup
new_books[empty_start] = book1
new_books[empty_start + 1] = book2
new_books[index] = :_
new_books[index + 1] = :_
new_state = BooksState.new new_books
yield "Move #{index}.5 to #{empty_start}.5\n#{self}\n#{new_state}", new_state
end
end
def to_s
fmt = %q{
+---+---+---+---+---+---+---+---+---+---+
| %s | %s | %s | %s | %s | %s | %s | %s | %s | %s |
+---+---+---+---+---+---+---+---+---+---+
0 1 2 3 4 5 6 7 8 9
}
fmt % @books.map {|s| s == :_ ? ' ' : s}
end
end
solver = BfsBruteForce::Solver.new
solver.solve BooksState.new
Skip previously evaluated states
#!/usr/bin/env ruby
require 'bfs_brute_force'
# Puzzle:
#
# Seperate green and red books. Can only
# move books in pairs.
#
# Inital layout:
#
# +---+---+---+---+---+---+---+---+---+---+
# | R | G | R | G | R | G | R | G | | |
# +---+---+---+---+---+---+---+---+---+---+
# 0 1 2 3 4 5 6 7 8 9
class BooksState < BfsBruteForce::State
def initialize(books = nil)
@books = books || [
:R, :G,
:R, :G,
:R, :G,
:R, :G,
:_, :_
]
end
def solved?
@books == [
:R, :R,
:R, :R,
:_, :_,
:G, :G,
:G, :G
]
end
def next_states(already_seen)
empty_start = @books.index :_
(@books.length - 1).times.each do |index|
next if index == empty_start - 1 or index == empty_start
book1, book2 = @books[index, 2]
new_books = @books.dup
new_books[empty_start] = book1
new_books[empty_start + 1] = book2
new_books[index] = :_
new_books[index + 1] = :_
if already_seen.add?(new_books)
new_state = BooksState.new new_books
yield "Move #{index}.5 to #{empty_start}.5\n#{self}\n#{new_state}", new_state
end
end
end
def to_s
fmt = %q{
+---+---+---+---+---+---+---+---+---+---+
| %s | %s | %s | %s | %s | %s | %s | %s | %s | %s |
+---+---+---+---+---+---+---+---+---+---+
0 1 2 3 4 5 6 7 8 9
}
fmt % @books.map {|s| s == :_ ? ' ' : s}
end
end
solver = BfsBruteForce::Solver.new
solver.solve BooksState.new
|
# coding: utf-8
lib = File.expand_path('../lib', __FILE__)
$LOAD_PATH.unshift(lib) unless $LOAD_PATH.include?(lib)
require 'moodle/api/version'
Gem::Specification.new do |spec|
spec.name = "moodle-api"
spec.version = Moodle::Api::VERSION
spec.authors = ["Ryan-Neal Mes"]
spec.email = ["ryan.mes@gmail.com"]
spec.summary = %q{Moodle web service API wrapper.}
spec.description = %q{Wraps Moodle API and exposes web services that have been made external.}
spec.homepage = "https://github.com/get-smarter/moodle"
spec.license = "MIT"
# Prevent pushing this gem to RubyGems.org by setting 'allowed_push_host', or
# delete this section to allow pushing this gem to any host.
if spec.respond_to?(:metadata)
spec.metadata['allowed_push_host'] = "TODO: Set to 'http://mygemserver.com'"
else
raise "RubyGems 2.0 or newer is required to protect against public gem pushes."
end
spec.files = `git ls-files -z`.split("\x0").reject { |f| f.match(%r{^(test|spec|features)/}) }
spec.bindir = "exe"
spec.executables = spec.files.grep(%r{^exe/}) { |f| File.basename(f) }
spec.require_paths = ["lib"]
spec.add_development_dependency "bundler", "~> 1.9"
spec.add_development_dependency "rake", "~> 10.0"
spec.add_development_dependency 'rspec'
spec.add_development_dependency 'guard-rspec'
spec.add_development_dependency 'pry'
spec.add_development_dependency 'vcr'
spec.add_dependency 'typhoeus'
end
Update moodle-api.gemspec
Enabled pushing to rubygems
# coding: utf-8
lib = File.expand_path('../lib', __FILE__)
$LOAD_PATH.unshift(lib) unless $LOAD_PATH.include?(lib)
require 'moodle/api/version'
Gem::Specification.new do |spec|
spec.name = "moodle-api"
spec.version = Moodle::Api::VERSION
spec.authors = ["Ryan-Neal Mes"]
spec.email = ["ryan.mes@gmail.com"]
spec.summary = %q{Moodle web service API wrapper.}
spec.description = %q{Wraps Moodle API and exposes web services that have been made external.}
spec.homepage = "https://github.com/get-smarter/moodle"
spec.license = "MIT"
# Prevent pushing this gem to RubyGems.org by setting 'allowed_push_host', or
# delete this section to allow pushing this gem to any host.
if spec.respond_to?(:metadata)
spec.metadata['allowed_push_host'] = "https://rubygems.org"
else
raise "RubyGems 2.0 or newer is required to protect against public gem pushes."
end
spec.files = `git ls-files -z`.split("\x0").reject { |f| f.match(%r{^(test|spec|features)/}) }
spec.bindir = "exe"
spec.executables = spec.files.grep(%r{^exe/}) { |f| File.basename(f) }
spec.require_paths = ["lib"]
spec.add_development_dependency "bundler", "~> 1.9"
spec.add_development_dependency "rake", "~> 10.0"
spec.add_development_dependency 'rspec'
spec.add_development_dependency 'guard-rspec'
spec.add_development_dependency 'pry'
spec.add_development_dependency 'vcr'
spec.add_dependency 'typhoeus'
end
|
require 'test_helper'
describe 'Seedbank rake.task' do
describe "seeds with dependency" do
subject { Rake.application.tasks_in_scope %w[db seed] }
it "creates all the seed tasks" do
seeds = %w(db:seed:circular1 db:seed:circular2 db:seed:common db:seed:dependency db:seed:dependency2
db:seed:dependent db:seed:dependent_on_nested db:seed:dependent_on_several db:seed:development
db:seed:development:users db:seed:no_block db:seed:original)
subject.map(&:to_s).must_equal seeds
end
end
describe "common seeds in the root directory" do
Dir[File.expand_path('../../../dummy/db/seeds/*.seeds.rb', __FILE__)].each do |seed_file|
seed = File.basename(seed_file, '.seeds.rb')
describe seed do
subject { Rake.application.lookup(seed, %w[db seed]) }
it "is dependent on db:abort_if_pending_migrations" do
subject.prerequisites.must_equal %w[db:abort_if_pending_migrations]
end
end
end
end
describe "db:seed:common" do
subject { Rake::Task['db:seed:common'] }
it "is dependent on the common seeds and db:seed:original" do
prerequisite_seeds = Dir[File.expand_path('../../../dummy/db/seeds/*.seeds.rb', __FILE__)].map do |seed_file|
['db', 'seed', File.basename(seed_file, '.seeds.rb')].join(':')
end.unshift('db:seed:original')
subject.prerequisites.must_equal prerequisite_seeds
end
end
describe "db:seed:original" do
subject { Rake::Task['db:seed:original'] }
it "has no dependencies" do
subject.prerequisites.must_be_empty
end
describe "when seeds are reloaded" do
before do
Dummy::Application.load_tasks
end
it "still has no dependencies" do
subject.prerequisites.must_be_empty
end
end
end
describe "environment seeds" do
Dir[File.expand_path('../../../dummy/db/seeds', __FILE__) + '/*/'].each do |environment_directory|
environment = File.basename(environment_directory)
describe "seeds in the #{environment} environment" do
Dir[File.expand_path("../../../dummy/db/seeds/#{environment}/*.seeds.rb", __FILE__)].each do |seed_file|
seed = File.basename(seed_file, '.seeds.rb')
describe seed do
subject { Rake.application.lookup(seed, ['db', 'seed', environment]) }
it "is dependent on db:abort_if_pending_migrations" do
subject.prerequisites.must_equal %w[db:abort_if_pending_migrations]
end
end
end
end
describe "db:seed:#{environment}" do
subject { Rake.application.lookup(environment, %w[db seed]) }
it "is dependent on the seeds in the environment directory" do
prerequisite_seeds = Dir[File.expand_path("../../../dummy/db/seeds/#{environment}/*.seeds.rb", __FILE__)].map do |seed_file|
['db', 'seed', environment, File.basename(seed_file, '.seeds.rb')].join(':')
end.unshift('db:seed:common')
subject.prerequisites.must_equal prerequisite_seeds
end
end
end
end
describe "db:seed task" do
subject { Rake::Task['db:seed'] }
describe "when no environment seeds are defined" do
it "is dependent on db:seed:common" do
subject.prerequisites.must_equal %w[db:seed:common]
end
end
describe "when environment seeds are defined" do
it "is dependent on db:seed:common" do
flexmock(Rails).should_receive(:env).and_return('development').once
Rake.application.clear
Dummy::Application.load_tasks
subject.prerequisites.must_equal %w[db:seed:common db:seed:development]
end
end
end
end
Fixes test broken by fixing #23.
Signed-off-by: James McCarthy <474ba67bdb289c6263b36dfd8a7bed6c85b04943@thisishatch.co.uk>
require 'test_helper'
describe 'Seedbank rake.task' do
describe "seeds with dependency" do
subject { Rake.application.tasks_in_scope %w[db seed] }
it "creates all the seed tasks" do
seeds = %w(db:seed:circular1 db:seed:circular2 db:seed:common db:seed:dependency db:seed:dependency2
db:seed:dependent db:seed:dependent_on_nested db:seed:dependent_on_several db:seed:development
db:seed:development:users db:seed:no_block db:seed:original)
subject.map(&:to_s).must_equal seeds
end
end
describe "common seeds in the root directory" do
Dir[File.expand_path('../../../dummy/db/seeds/*.seeds.rb', __FILE__)].each do |seed_file|
seed = File.basename(seed_file, '.seeds.rb')
describe seed do
subject { Rake.application.lookup(seed, %w[db seed]) }
it "is dependent on db:abort_if_pending_migrations" do
subject.prerequisites.must_equal %w[db:abort_if_pending_migrations]
end
end
end
end
describe "db:seed:common" do
subject { Rake::Task['db:seed:common'] }
it "is dependent on the common seeds and db:seed:original" do
prerequisite_seeds = Dir[File.expand_path('../../../dummy/db/seeds/*.seeds.rb', __FILE__)].sort.map do |seed_file|
['db', 'seed', File.basename(seed_file, '.seeds.rb')].join(':')
end.unshift('db:seed:original')
subject.prerequisites.must_equal prerequisite_seeds
end
end
describe "db:seed:original" do
subject { Rake::Task['db:seed:original'] }
it "has no dependencies" do
subject.prerequisites.must_be_empty
end
describe "when seeds are reloaded" do
before do
Dummy::Application.load_tasks
end
it "still has no dependencies" do
subject.prerequisites.must_be_empty
end
end
end
describe "environment seeds" do
Dir[File.expand_path('../../../dummy/db/seeds', __FILE__) + '/*/'].each do |environment_directory|
environment = File.basename(environment_directory)
describe "seeds in the #{environment} environment" do
Dir[File.expand_path("../../../dummy/db/seeds/#{environment}/*.seeds.rb", __FILE__)].each do |seed_file|
seed = File.basename(seed_file, '.seeds.rb')
describe seed do
subject { Rake.application.lookup(seed, ['db', 'seed', environment]) }
it "is dependent on db:abort_if_pending_migrations" do
subject.prerequisites.must_equal %w[db:abort_if_pending_migrations]
end
end
end
end
describe "db:seed:#{environment}" do
subject { Rake.application.lookup(environment, %w[db seed]) }
it "is dependent on the seeds in the environment directory" do
prerequisite_seeds = Dir[File.expand_path("../../../dummy/db/seeds/#{environment}/*.seeds.rb", __FILE__)].sort.map do |seed_file|
['db', 'seed', environment, File.basename(seed_file, '.seeds.rb')].join(':')
end.unshift('db:seed:common')
subject.prerequisites.must_equal prerequisite_seeds
end
end
end
end
describe "db:seed task" do
subject { Rake::Task['db:seed'] }
describe "when no environment seeds are defined" do
it "is dependent on db:seed:common" do
subject.prerequisites.must_equal %w[db:seed:common]
end
end
describe "when environment seeds are defined" do
it "is dependent on db:seed:common" do
flexmock(Rails).should_receive(:env).and_return('development').once
Rake.application.clear
Dummy::Application.load_tasks
subject.prerequisites.must_equal %w[db:seed:common db:seed:development]
end
end
end
end
|
#------------------------------------------------------------------------
# (The MIT License)
#
# Copyright (c) 2008-2011 Rhomobile, Inc.
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
# THE SOFTWARE.
#
# http://rhomobile.com
#------------------------------------------------------------------------
require File.dirname(__FILE__) + '/androidcommon.rb'
require File.dirname(__FILE__) + '/android_tools.rb'
require File.dirname(__FILE__) + '/manifest_generator.rb'
require 'pathname'
require 'tempfile'
USE_OWN_STLPORT = false
#USE_TRACES = # see androidcommon.h
def get_market_version(apilevel)
AndroidTools.get_market_version(apilevel)
end
def get_api_level(version)
AndroidTools.get_api_level(version)
end
JAVA_PACKAGE_NAME = 'com.rhomobile.rhodes'
# Here is place were android platform should be specified.
# For complete list of android API levels and its mapping to
# market names (such as "Android-1.5" etc) see output of
# command "android list targets"
ANDROID_SDK_LEVEL = 4
ANDROID_PERMISSIONS = {
'audio' => ['RECORD_AUDIO', 'MODIFY_AUDIO_SETTINGS'],
'camera' => 'CAMERA',
'gps' => ['ACCESS_FINE_LOCATION', 'ACCESS_COARSE_LOCATION'],
'network_state' => 'ACCESS_NETWORK_STATE',
'phone' => ['CALL_PHONE', 'READ_PHONE_STATE'],
'pim' => ['READ_CONTACTS', 'WRITE_CONTACTS', 'GET_ACCOUNTS'],
'record_audio' => 'RECORD_AUDIO',
'vibrate' => 'VIBRATE',
'bluetooth' => ['BLUETOOTH_ADMIN', 'BLUETOOTH'],
'calendar' => ['READ_CALENDAR', 'WRITE_CALENDAR'],
'sdcard' => 'WRITE_EXTERNAL_STORAGE',
'push' => proc do |manifest| add_push(manifest) end,
'motorola' => ['SYSTEM_ALERT_WINDOW', 'BROADCAST_STICKY', proc do |manifest| add_motosol_sdk(manifest) end],
'motoroladev' => ['SYSTEM_ALERT_WINDOW', 'BROADCAST_STICKY', proc do |manifest| add_motosol_sdk(manifest) end],
'webkit_browser' => nil,
'shared_runtime' => nil,
'motorola_browser' => nil
}
ANDROID_CAPS_ALWAYS_ENABLED = ['network_state']
def add_push(manifest)
element = REXML::Element.new('permission')
element.add_attribute('android:name', "#{$app_package_name}.permission.C2D_MESSAGE")
element.add_attribute('android:protectionLevel', 'signature')
manifest.add element
element = REXML::Element.new('uses-permission')
element.add_attribute('android:name', "#{$app_package_name}.permission.C2D_MESSAGE")
manifest.add element
element = REXML::Element.new('uses-permission')
element.add_attribute('android:name', "com.google.android.c2dm.permission.RECEIVE")
manifest.add element
receiver = REXML::Element.new('receiver')
receiver.add_attribute('android:name', "#{JAVA_PACKAGE_NAME}.PushReceiver")
receiver.add_attribute('android:permission', "com.google.android.c2dm.permission.SEND")
action = REXML::Element.new('action')
action.add_attribute('android:name', "com.google.android.c2dm.intent.RECEIVE")
category = REXML::Element.new('category')
category.add_attribute('android:name', $app_package_name)
ie = REXML::Element.new('intent-filter')
ie.add_element(action)
ie.add_element(category)
receiver.add_element(ie)
action = REXML::Element.new('action')
action.add_attribute('android:name', "com.google.android.c2dm.intent.REGISTRATION")
category = REXML::Element.new('category')
category.add_attribute('android:name', $app_package_name)
ie = REXML::Element.new('intent-filter')
ie.add_element(action)
ie.add_element(category)
receiver.add_element(ie)
manifest.elements.each('application') do |app|
app.add receiver
end
end
def add_motosol_sdk(manifest)
uses_scanner = REXML::Element.new 'uses-library'
uses_scanner.add_attribute 'android:name', 'com.motorolasolutions.scanner'
uses_scanner.add_attribute 'android:required', 'false'
uses_msr = REXML::Element.new 'uses-library'
uses_msr.add_attribute 'android:name', 'com.motorolasolutions.emdk.msr'
uses_msr.add_attribute 'android:required', 'false'
manifest.elements.each('application') do |app|
app.add uses_scanner
app.add uses_msr
end
end
def set_app_name_android(newname)
puts "set_app_name"
$stdout.flush
rm_rf $appres
cp_r $rhores, $appres
iconappname = File.join($app_path, "icon", "icon.png")
iconresname = File.join($appres, "drawable", "icon.png")
rm_f iconresname
cp iconappname, iconresname
rhostrings = File.join($rhores, "values", "strings.xml")
appstrings = File.join($appres, "values", "strings.xml")
doc = REXML::Document.new(File.new(rhostrings))
doc.elements["resources/string[@name='app_name']"].text = newname
File.open(appstrings, "w") { |f| doc.write f }
buf = File.new($rho_android_r,"r").read.gsub(/^\s*import com\.rhomobile\..*\.R;\s*$/,"\nimport #{$app_package_name}.R;\n")
File.open($app_android_r,"w") { |f| f.write(buf) }
end
def generate_rjava
manifest = $appmanifest
resource = $appres
assets = Jake.get_absolute File.join($tmpdir, 'assets')
nativelibs = Jake.get_absolute(File.join($androidpath, "Rhodes", "libs"))
#rjava = Jake.get_absolute(File.join($androidpath, "Rhodes", "gen", "com", "rhomobile", "rhodes"))
args = ["package", "-f", "-M", manifest, "-S", resource, "-A", assets, "-I", $androidjar, "-J", $app_rjava_dir]
Jake.run($aapt, args)
unless $?.success?
raise "Error in AAPT"
end
end
def get_boolean(arg)
arg == 'true' or arg == 'yes' or arg == 'enabled' or arg == 'enable' or arg == '1'
end
namespace "config" do
task :set_android_platform do
$current_platform = "android"
end
task :android => :set_android_platform do
Rake::Task["config:common"].invoke
$java = $config["env"]["paths"]["java"]
$neon_root = nil
$neon_root = $config["env"]["paths"]["neon"] unless $config["env"]["paths"].nil?
if !($app_config["paths"].nil? or $app_config["paths"]["neon"].nil?)
$neon_root = $app_config["paths"]["neon"]
end
$androidsdkpath = $config["env"]["paths"]["android"]
unless File.exists? $androidsdkpath
puts "Missing or invalid 'android' section in rhobuild.yml: '#{$androidsdkpath}'"
exit 1
end
$androidndkpath = $config["env"]["paths"]["android-ndk"]
unless File.exists? $androidndkpath
puts "Missing or invalid 'android-ndk' section in rhobuild.yml: '#{$androidndkpath}'"
exit 1
end
errfmt = "WARNING!!! Path to Android %s contain spaces! It will not work because of the Google toolchain restrictions. Move it to another location and reconfigure rhodes."
if $androidndkpath =~ /\s/
puts(errfmt % "NDK")
exit 1
end
$min_sdk_level = $app_config["android"]["minSDK"] unless $app_config["android"].nil?
$min_sdk_level = $config["android"]["minSDK"] if $min_sdk_level.nil? and not $config["android"].nil?
$min_sdk_level = $min_sdk_level.to_i unless $min_sdk_level.nil?
$min_sdk_level = ANDROID_SDK_LEVEL if $min_sdk_level.nil?
$max_sdk_level = $app_config["android"]["maxSDK"] unless $app_config["android"].nil?
$androidplatform = AndroidTools.fill_api_levels $androidsdkpath
if $androidplatform == nil
puts "No Android platform found at SDK path: '#{$androidsdkpath}'"
exit 1
end
android_api_levels = AndroidTools.get_installed_api_levels
android_api_levels.sort!
$found_api_level = android_api_levels.last
$gapikey = $app_config["android"]["apikey"] unless $app_config["android"].nil?
$gapikey = $config["android"]["apikey"] if $gapikey.nil? and not $config["android"].nil?
$gapikey = '' unless $gapikey.is_a? String
$gapikey = nil if $gapikey.empty?
$android_orientation = $app_config["android"]["orientation"] unless $app_config["android"].nil?
$use_geomapping = $app_config["android"]["mapping"] unless $app_config["android"].nil?
$use_geomapping = $config["android"]["mapping"] if $use_geomapping.nil? and not $config["android"].nil?
$use_geomapping = 'false' if $use_geomapping.nil?
$use_geomapping = get_boolean($use_geomapping.to_s)
$use_google_addon_api = false
$use_google_addon_api = true if $use_geomapping
#Additionally $use_google_addon_api set to true if PUSH capability is enabled
puts "Use Google addon API: #{$use_google_addon_api}" if USE_TRACES
$uri_scheme = $app_config["android"]["URIScheme"] unless $app_config["android"].nil?
$uri_scheme = "http" if $uri_scheme.nil?
$uri_host = $app_config["android"]["URIHost"] unless $app_config["android"].nil?
# Here is switch between release/debug configuration used for
# building native libraries
if $app_config["debug"].nil?
$build_release = true
else
$build_release = !$app_config["debug"].to_i
end
$androidpath = Jake.get_absolute $config["build"]["androidpath"]
$bindir = File.join($app_path, "bin")
$rhobindir = File.join($androidpath, "bin")
$builddir = File.join($androidpath, "build")
$shareddir = File.join($androidpath, "..", "shared")
$srcdir = File.join($bindir, "RhoBundle")
$targetdir = File.join($bindir,'target','android')
$excludelib = ['**/builtinME.rb','**/ServeME.rb','**/dateME.rb','**/rationalME.rb']
$tmpdir = File.join($bindir, "tmp")
#$rhomanifest = File.join $androidpath, "Rhodes", "AndroidManifest.xml"
$rhomanifesterb = File.join $androidpath, "Rhodes", "AndroidManifest.xml.erb"
$appmanifest = File.join $tmpdir, "AndroidManifest.xml"
$rhores = File.join $androidpath, "Rhodes", "res"
$appres = File.join $tmpdir, "res"
$appincdir = File.join $tmpdir, "include"
$rho_android_r = File.join $androidpath, "Rhodes", "src", "com", "rhomobile", "rhodes", "AndroidR.java"
$app_android_r = File.join $tmpdir, "AndroidR.java"
$app_rjava_dir = File.join $tmpdir
$app_native_libs_java = File.join $tmpdir, "NativeLibraries.java"
$app_capabilities_java = File.join $tmpdir, "Capabilities.java"
$app_push_java = File.join $tmpdir, "Push.java"
$app_startup_listeners_java = File.join $tmpdir, "RhodesStartupListeners.java"
if RUBY_PLATFORM =~ /(win|w)32$/
$bat_ext = ".bat"
$exe_ext = ".exe"
$path_separator = ";"
# Add PATH to cygwin1.dll
ENV['CYGWIN'] = 'nodosfilewarning'
if $path_cygwin_modified.nil?
ENV['PATH'] = Jake.get_absolute("res/build-tools") + ";" + ENV['PATH']
path_cygwin_modified = true
end
else
#XXX make these absolute
$bat_ext = ""
$exe_ext = ""
$path_separator = ":"
# TODO: add ruby executable for Linux
end
puts "+++ Looking for platform..." if USE_TRACES
if $androidplatform.nil?
ajar = File.join($androidsdkpath, 'platforms', 'android-' + $min_sdk_level.to_s, 'android.jar')
if USE_TRACES
puts "Using target path: "+ ajar
end
$androidplatform = 'android-' + $min_sdk_level.to_s if File.file?(ajar)
end
if $androidplatform.nil?
puts "+++ No required platform (API level >= #{$min_sdk_level}) found, can't proceed"
puts "+++ Looks like you have no installed required Android platform package."
puts "+++ To solve that, please strictly follow instructions from http://wiki.rhomobile.com/index.php/BuildingRhodes#Prerequisites_5"
exit 1
else
puts "+++ Platform found: #{$androidplatform}" if USE_TRACES
end
$stdout.flush
$dx = File.join( $androidsdkpath, "platforms", $androidplatform, "tools", "dx" + $bat_ext )
$dx = File.join( $androidsdkpath, "platform-tools", "dx" + $bat_ext ) unless File.exists? $dx
$aapt = File.join( $androidsdkpath, "platforms", $androidplatform, "tools", "aapt" + $exe_ext )
$aapt = File.join( $androidsdkpath, "platform-tools", "aapt" + $exe_ext ) unless File.exists? $aapt
$apkbuilder = File.join( $androidsdkpath, "tools", "apkbuilder" + $bat_ext )
$androidbin = File.join( $androidsdkpath, "tools", "android" + $bat_ext )
$adb = File.join( $androidsdkpath, "tools", "adb" + $exe_ext )
$adb = File.join( $androidsdkpath, "platform-tools", "adb" + $exe_ext ) unless File.exists? $adb
$zipalign = File.join( $androidsdkpath, "tools", "zipalign" + $exe_ext )
$androidjar = File.join($androidsdkpath, "platforms", $androidplatform, "android.jar")
$dxjar = File.join( $androidsdkpath, "platforms", $androidplatform, "tools", "lib", "dx.jar")
$dxjar = File.join( $androidsdkpath, "platform-tools", "lib", "dx.jar") unless File.exists? $dxjar
$keytool = File.join( $java, "keytool" + $exe_ext )
$jarsigner = File.join( $java, "jarsigner" + $exe_ext )
$jarbin = File.join( $java, "jar" + $exe_ext )
$keystore = nil
$keystore = $app_config["android"]["production"]["certificate"] if !$app_config["android"].nil? and !$app_config["android"]["production"].nil?
$keystore = $config["android"]["production"]["certificate"] if $keystore.nil? and !$config["android"].nil? and !$config["android"]["production"].nil?
$keystore = File.expand_path($keystore, $app_path) unless $keystore.nil?
$keystore = File.expand_path(File.join(ENV['HOME'], ".rhomobile", "keystore")) if $keystore.nil?
$storepass = nil
$storepass = $app_config["android"]["production"]["password"] if !$app_config["android"].nil? and !$app_config["android"]["production"].nil?
$storepass = $config["android"]["production"]["password"] if $storepass.nil? and !$config["android"].nil? and !$config["android"]["production"].nil?
$storepass = "81719ef3a881469d96debda3112854eb" if $storepass.nil?
$keypass = $storepass
$storealias = nil
$storealias = $app_config["android"]["production"]["alias"] if !$app_config["android"].nil? and !$app_config["android"]["production"].nil?
$storealias = $config["android"]["production"]["alias"] if $storealias.nil? and !$config["android"].nil? and !$config["android"]["production"].nil?
$storealias = "rhomobile.keystore" if $storealias.nil?
$app_config["capabilities"] += ANDROID_CAPS_ALWAYS_ENABLED
$app_config["capabilities"].map! { |cap| cap.is_a?(String) ? cap : nil }.delete_if { |cap| cap.nil? }
$use_google_addon_api = true unless $app_config["capabilities"].index("push").nil?
$appname = $app_config["name"]
$appname = "Rhodes" if $appname.nil?
$vendor = $app_config["vendor"]
if $vendor.nil?
if $app_config['capabilities'].index('motorola').nil? and $app_config['capabilities'].index('motoroladev').nil?
$vendor = 'rhomobile'
else
$vendor = 'motorolasolutions'
end
end
$vendor = $vendor.gsub(/^[^A-Za-z]/, '_').gsub(/[^A-Za-z0-9]/, '_').gsub(/_+/, '_').downcase
$app_package_name = $app_config["android"] ? $app_config["android"]["package_name"] : nil
$app_package_name = "com.#{$vendor}." + $appname.downcase.gsub(/[^A-Za-z_0-9]/, '') unless $app_package_name
$app_package_name.gsub!(/\.[\d]/, "._")
puts "$vendor = #{$vendor}"
puts "$app_package_name = #{$app_package_name}"
if $uri_host.nil?
if $app_config['capabilities'].index('motorola').nil? and $app_config['capabilities'].index('motoroladev').nil?
$uri_host = 'rhomobile.com'
else
$uri_host = 'motorolasolutions.com'
end
$uri_path_prefix = "/#{$app_package_name}"
end
unless $app_config['capabilities'].index('motorola').nil? and $app_config['capabilities'].index('motoroladev').nil?
$use_motosol_api = true
$use_motosol_api_classpath = true unless $app_config['capabilities'].index('motoroladev').nil?
raise 'Cannot use Motorola SDK addon and Google SDK addon together!' if $use_google_addon_api
end
$applog_path = nil
$applog_file = $app_config["applog"]
if !$applog_file.nil?
$applog_path = File.join( $app_path, $applog_file )
end
if $min_sdk_level > $found_api_level
raise "Latest installed Android platform '#{$androidplatform}' does not meet minSdk '#{$min_sdk_level}' requirement"
end
# Look for Motorola SDK addon
if $use_motosol_api_classpath
puts "Looking for Motorola API SDK add-on..." if USE_TRACES
motosol_jars = ['com.motorolasolutions.scanner']
motosol_jars << 'com.motorolasolutions.emdk.msr'
$motosol_classpath = AndroidTools::get_addon_classpath(motosol_jars)
end
# Detect Google API add-on path
if $use_google_addon_api
puts "Looking for Google API SDK add-on..." if USE_TRACES
google_jars = ['com.google.android.maps']
$google_classpath = AndroidTools::get_addon_classpath(google_jars, $found_api_level)
end
setup_ndk($androidndkpath, $found_api_level)
$std_includes = File.join $androidndkpath, "sources", "cxx-stl", "stlport", "stlport"
unless File.directory? $std_includes
$stlport_includes = File.join $shareddir, "stlport", "stlport"
USE_OWN_STLPORT = true
end
$native_libs = ["sqlite", "curl", "stlport", "ruby", "json", "rhocommon", "rhodb", "rholog", "rhosync", "rhomain"]
if $build_release
$confdir = "release"
else
$confdir = "debug"
end
$app_builddir = File.join($bindir,'target','android',$confdir)
$objdir = {}
$libname = {}
$native_libs.each do |x|
$objdir[x] = File.join($tmpdir,x)
$libname[x] = File.join($app_builddir,x,"lib#{x}.a")
end
$push_sender = nil
$push_sender = $config["android"]["push"]["sender"] if !$config["android"].nil? and !$config["android"]["push"].nil?
$push_sender = $app_config["android"]["push"]["sender"] if !$app_config["android"].nil? and !$app_config["android"]["push"].nil?
$push_sender = "support@rhomobile.com" if $push_sender.nil?
$push_notifications = nil
$push_notifications = $app_config["android"]["push"]["notifications"] if !$app_config["android"].nil? and !$app_config["android"]["push"].nil?
$push_notifications = "none" if $push_notifications.nil?
$push_notifications = $push_notifications
mkdir_p $bindir if not File.exists? $bindir
mkdir_p $rhobindir if not File.exists? $rhobindir
mkdir_p $targetdir if not File.exists? $targetdir
mkdir_p $srcdir if not File.exists? $srcdir
end #task 'config:android'
namespace 'android' do
task :extensions => ['config:android', 'build:bundle:noxruby'] do
$ext_android_rhodes_activity_listener = []
$ext_android_additional_sources = {}
$ext_android_additional_lib = []
$ext_android_build_scripts = {}
$ext_android_manifest_changes = {}
$ext_android_adds = {}
$app_config["extensions"].each do |ext|
puts "#{ext} is processing..."
$app_config["extpaths"].each do |p|
extpath = File.join(p, ext, 'ext')
puts "Checking extpath: #{extpath}"
if File.exists? extpath and File.directory? extpath
puts "#{extpath} is configuring..."
extyml = File.join(p, ext,"ext.yml")
if File.file? extyml
puts "#{extyml} is processing..."
extconf = Jake.config(File.open(extyml))
extconf_android = extconf['android']
exttype = 'build'
exttype = extconf_android['exttype'] if extconf_android and extconf_android['exttype']
addspath = File.join($app_builddir,'extensions',ext,'adds')
prebuiltpath = nil
if exttype == 'prebuilt'
prebuiltpath = Dir.glob(File.join(extpath, '**', 'android'))
if prebuiltpath.count == 1
prebuiltpath = prebuiltpath.first
else
raise "android:exttype is 'prebuilt' but prebuilt path is not found #{prebuiltpath.inspect}"
end
end
android_listener = extconf["android_rhodes_activity_listener"]
android_listener = extconf_android['rhodes_listener'] if android_listener.nil? and extconf_android
$ext_android_rhodes_activity_listener << android_listener unless android_listener.nil?
manifest_changes = extconf["android_manifest_changes"]
manifest_changes = extconf_android['manifest_changes'] if manifest_changes.nil? and extconf_android
if manifest_changes
manifest_changes = [manifest_changes] unless manifest_changes.is_a? Array
manifest_changes.map! { |path| File.join(p,ext,path) }
else
if prebuiltpath
manifest_changes = []
path = File.join(prebuiltpath,'adds','AndroidManifest.rb')
manifest_changes << path if File.file? path
templates = Dir.glob File.join(prebuiltpath,'adds','*.erb')
manifest_changes += templates
if templates.empty?
path = File.join(prebuiltpath,'adds','AndroidManifest.xml')
manifest_changes << path if File.file? path
end
end
end
if manifest_changes
$ext_android_manifest_changes[ext] = manifest_changes
end
resource_addons = extconf["android_resources_addons"]
resource_addons = extconf_android['adds'] if resource_addons.nil? and extconf_android
if resource_addons
resource_addons = File.join(p, ext, resource_addons)
else
if prebuiltpath
resource_addons = File.join(prebuiltpath,'adds')
resource_addons = nil unless File.directory? resource_addons
end
end
if resource_addons
$ext_android_adds[ext] = resource_addons
end
additional_sources = extconf["android_additional_sources_list"]
additional_sources = extconf_android['source_list'] if additional_sources.nil? and extconf_android
unless additional_sources.nil?
ext_sources_list = File.join(p, ext, additional_sources)
if File.exists? ext_sources_list
$ext_android_additional_sources[File.join(p, ext)] = ext_sources_list
else
raise "Extension java source list is missed: #{ext_sources_list}"
end
else
puts "No additional java sources for '#{ext}'"
end
# there is no 'additional_libs' param in android section moreover
# place libraries into android adds folder
android_additional_lib = extconf["android_additional_lib"]
if android_additional_lib != nil
android_additional_lib.each do |lib|
$ext_android_additional_lib << File.join(p, ext, lib)
end
end
if prebuiltpath
targetpath = File.join $app_builddir,'extensions',ext
libaddspath = File.join addspath,'lib','armeabi'
mkdir_p targetpath
Dir.glob(File.join(prebuiltpath,'lib*.a')).each do |lib|
cp lib, targetpath
end
Dir.glob(File.join(prebuiltpath,'*.jar')).each do |lib|
cp lib, targetpath
end
Dir.glob(File.join(prebuiltpath,'**','lib*.so')).each do |lib|
next if lib =~ /adds/
if lib =~ /noautoload/
mkdir_p File.join(libaddspath,'noautoload')
cp lib, File.join(libaddspath,'noautoload')
else
mkdir_p libaddspath
cp lib, libaddspath
end
end
end
puts "#{extyml} is processed"
end
if exttype == 'rakefile'
rakedir = Dir.glob File.join(extpath,'**','android')
$ext_android_build_scripts[ext] = [rakedir.first, 'rake']
else
build_script = File.join(extpath, 'build' + $bat_ext)
if File.exists? build_script
if RUBY_PLATFORM =~ /(win|w)32$/
$ext_android_build_scripts[ext] = [extpath, 'build.bat']
else
$ext_android_build_scripts[ext] = [extpath, File.join('.', 'build' + $bat_ext)]
end
end
end
puts "#{extpath} is configured"
# to prevent to build 2 extensions with same name
break
end # exists?
end # $app_config["extpaths"].each
end # $app_config["extensions"].each
puts "Extensions' java source lists: #{$ext_android_additional_sources.inspect}"
end #task :extensions
task :emulator=>"config:android" do
$device_flag = "-e"
$emuversion = $app_config["android"]["version"] unless $app_config["android"].nil?
$emuversion = $config["android"]["version"] if $emuversion.nil? and !$config["android"].nil?
if RUBY_PLATFORM =~ /(win|w)32$/
$emulator = #"cmd /c " +
File.join( $androidsdkpath, "tools", "emulator.exe" )
else
$emulator = File.join( $androidsdkpath, "tools", "emulator" )
end
$emuversion = AndroidTools.get_market_version($min_sdk_level) if $emuversion.nil?
if $emuversion.nil?
raise "Wrong Android emulator version: #{$emuversion}. Android SDK target API is not installed"
end
# Detect android targets
$androidtargets = {}
id = nil
`"#{$androidbin}" list targets`.split(/\n/).each do |line|
line.chomp!
if line =~ /^id:\s+([0-9]+)/
id = $1
if $use_google_addon_api
if line =~ /Google Inc\.:Google APIs:([0-9]+)/
apilevel = $1
$androidtargets[apilevel.to_i] = id.to_i
end
else
if $use_motosol_api
if line =~ /MotorolaSolutions\s+Inc\.:MotorolaSolution\s+Value\s+Add\s+APIs.*:([0-9]+)/
apilevel = $1
$androidtargets[apilevel.to_i] = id.to_i
end
end
end
end
unless $use_google_addon_api or $use_motosol_api
if line =~ /^\s+API\s+level:\s+([0-9]+)$/
apilevel = $1
$androidtargets[apilevel.to_i] = id.to_i
end
end
end
if USE_TRACES
puts "Android emulator version: #{$emuversion}"
puts "Android targets:"
puts $androidtargets.inspect
end
$emuversion = $emuversion.to_s
$appavdname = $app_config["android"]["emulator"] if $app_config["android"] != nil && $app_config["android"].length > 0
$appavdname = $config["android"]["emulator"] if $appavdname.nil? and !$config["android"].nil? and $config["android"].length > 0
end # task 'config:android:emulator'
task :device=>"config:android" do
$device_flag = "-d"
end
end #namespace 'config:android'
end
namespace "build" do
namespace "android" do
desc "Build RhoBundle for android"
task :rhobundle => ["config:android", :extensions] do
Rake::Task["build:bundle:noxruby"].invoke
#assets = File.join(Jake.get_absolute($androidpath), "Rhodes", "assets")
assets = File.join $tmpdir, 'assets'
rm_rf assets
mkdir_p assets
hash = nil
["apps", "db", "lib"].each do |d|
cp_r File.join($srcdir, d), assets, :preserve => true
# Calculate hash of directories
hash = get_dir_hash(File.join($srcdir, d), hash)
end
File.open(File.join($srcdir, "hash"), "w") { |f| f.write(hash.hexdigest) }
File.open(File.join($srcdir, "name"), "w") { |f| f.write($appname) }
Jake.build_file_map($srcdir, "rho.dat")
["apps", "db", "lib", "hash", "name", "rho.dat"].each do |d|
cp_r File.join($srcdir, d), assets, :preserve => true
end
end
desc "Build RhoBundle for Eclipse project"
task :eclipsebundle => "build:android:rhobundle" do
assets = File.join $tmpdir, 'assets'
eclipse_assets = File.join(Jake.get_absolute($androidpath), "Rhodes", "assets")
rm_rf eclipse_assets
cp_r assets, eclipse_assets, :preserve => true
end
desc 'Building native extensioons'
task :extensions => ["config:android:extensions", :genconfig] do
Rake::Task["build:bundle:noxruby"].invoke
ENV['RHO_PLATFORM'] = 'android'
ENV["RHO_APP_DIR"] = $app_path
ENV["ANDROID_NDK"] = $androidndkpath
ENV["ANDROID_API_LEVEL"] = $found_api_level.to_s
ENV["RHO_ROOT"] = $startdir
ENV["BUILD_DIR"] ||= $startdir + "/platform/android/build"
ENV["RHO_INC"] = $appincdir
ENV["RHO_RES"] = $appres
ENV["RHO_ANDROID_TMP_DIR"] = $tmpdir
ENV["NEON_ROOT"] = $neon_root unless $neon_root.nil?
ENV["CONFIG_XML"] = $config_xml unless $config_xml.nil?
$ext_android_build_scripts.each do |ext, builddata|
#ext = File.basename(File.dirname(extpath))
ENV["TARGET_TEMP_DIR"] = File.join($app_builddir,'extensions',ext)
ENV['TEMP_FILES_DIR'] = File.join($tmpdir,ext)
mkdir_p ENV["TARGET_TEMP_DIR"] unless File.directory? ENV["TARGET_TEMP_DIR"]
mkdir_p ENV["TEMP_FILES_DIR"] unless File.directory? ENV["TEMP_FILES_DIR"]
puts "Executing extension build script: #{ext}"
if RUBY_PLATFORM =~ /(win|w)32$/ || (builddata[1] == 'rake')
Jake.run(builddata[1], [], builddata[0])
else
currentdir = Dir.pwd()
Dir.chdir builddata[0]
sh %{$SHELL #{builddata[1]}}
Dir.chdir currentdir
end
raise "Cannot build #{builddata[0]}" unless $?.success?
puts "Extension build script finished"
end
$ext_android_manifest_changes.each do |ext, manifest_changes|
addspath = File.join($app_builddir,'extensions',ext,'adds')
mkdir_p addspath
manifest_changes.each do |path|
if File.extname(path) == '.xml'
cp path, File.join(addspath,'AndroidManifest.xml')
else
if File.extname(path) == '.rb'
cp path, File.join(addspath,'AndroidManifest.rb')
else
if File.extname(path) == '.erb'
cp path, addspath
else
raise "Wrong AndroidManifest patch file: #{path}"
end
end
end
end
end
$ext_android_adds.each do |ext, path|
addspath = File.join($app_builddir,'extensions',ext,'adds')
mkdir_p addspath
Dir.glob(File.join(path,'*')).each do |add|
cp_r add, addspath if File.directory? add
end
end
end #task :extensions
task :libsqlite => "config:android" do
srcdir = File.join($shareddir, "sqlite")
objdir = $objdir["sqlite"]
libname = $libname["sqlite"]
mkdir_p objdir
mkdir_p File.dirname(libname)
cc_build 'libsqlite', objdir, ["-I\"#{srcdir}\"", "-I\"#{$shareddir}\""] or exit 1
cc_ar ('"'+(libname)+'"'), Dir.glob(objdir + "/**/*.o").collect{|x| '"'+x+'"'} or exit 1
end
task :libcurl => "config:android" do
# Steps to get curl_config.h from fresh libcurl sources:
#export PATH=<ndkroot>/build/prebuilt/linux-x86/arm-eabi-4.2.1/bin:$PATH
#export CC=arm-eabi-gcc
#export CPP=arm-eabi-cpp
#export CFLAGS="--sysroot <ndkroot>/build/platforms/android-3/arch-arm -fPIC -mandroid -DANDROID -DOS_ANDROID"
#export CPPFLAGS="--sysroot <ndkroot>/build/platforms/android-3/arch-arm -fPIC -mandroid -DANDROID -DOS_ANDROID"
#./configure --without-ssl --without-ca-bundle --without-ca-path --without-libssh2 --without-libidn --disable-ldap --disable-ldaps --host=arm-eabi
srcdir = File.join $shareddir, "curl", "lib"
objdir = $objdir["curl"]
libname = $libname["curl"]
mkdir_p objdir
mkdir_p File.dirname(libname)
args = []
args << "-DHAVE_CONFIG_H"
args << "-I\"#{srcdir}/../include\""
args << "-I\"#{srcdir}\""
args << "-I\"#{$shareddir}\""
cc_build 'libcurl', objdir, args or exit 1
cc_ar ('"'+(libname)+'"'), Dir.glob(objdir + "/**/*.o").collect{|x| '"'+x+'"'} or exit 1
end
task :libruby => "config:android" do
srcdir = File.join $shareddir, "ruby"
objdir = $objdir["ruby"]
libname = $libname["ruby"]
mkdir_p objdir
mkdir_p File.dirname(libname)
args = []
args << "-Wno-uninitialized"
args << "-Wno-missing-field-initializers"
args << "-I\"#{srcdir}/include\""
args << "-I\"#{srcdir}/android\""
args << "-I\"#{srcdir}/generated\""
args << "-I\"#{srcdir}\""
args << "-I\"#{srcdir}/..\""
args << "-I\"#{srcdir}/../sqlite\""
args << "-I\"#{$std_includes}\"" unless $std_includes.nil?
args << "-D__NEW__" if USE_OWN_STLPORT
args << "-I\"#{$stlport_includes}\"" if USE_OWN_STLPORT
cc_build 'libruby', objdir, args or exit 1
cc_ar ('"'+(libname)+'"'), Dir.glob(objdir + "/**/*.o").collect{|x| '"'+x+'"'} or exit 1
end
task :libjson => "config:android" do
srcdir = File.join $shareddir, "json"
objdir = $objdir["json"]
libname = $libname["json"]
mkdir_p objdir
mkdir_p File.dirname(libname)
args = []
args << "-I\"#{srcdir}\""
args << "-I\"#{srcdir}/..\""
args << "-I\"#{$std_includes}\"" unless $std_includes.nil?
args << "-D__NEW__" if USE_OWN_STLPORT
args << "-I\"#{$stlport_includes}\"" if USE_OWN_STLPORT
cc_build 'libjson', objdir, args or exit 1
cc_ar ('"'+(libname)+'"'), Dir.glob(objdir + "/**/*.o").collect{|x| '"'+x+'"'} or exit 1
end
task :libstlport => "config:android" do
if USE_OWN_STLPORT
objdir = $objdir["stlport"]
libname = $libname["stlport"]
mkdir_p objdir
mkdir_p File.dirname(libname)
args = []
args << "-I\"#{$stlport_includes}\""
args << "-DTARGET_OS=android"
args << "-DOSNAME=android"
args << "-DCOMPILER_NAME=gcc"
args << "-DBUILD_OSNAME=android"
args << "-D_REENTRANT"
args << "-D__NEW__"
args << "-ffunction-sections"
args << "-fdata-sections"
args << "-fno-rtti"
args << "-fno-exceptions"
cc_build 'libstlport', objdir, args or exit 1
cc_ar ('"'+(libname)+'"'), Dir.glob(objdir + "/**/*.o").collect{|x| '"'+x+'"'} or exit 1
end
end
task :librholog => "config:android" do
srcdir = File.join $shareddir, "logging"
objdir = $objdir["rholog"]
libname = $libname["rholog"]
mkdir_p objdir
mkdir_p File.dirname(libname)
args = []
args << "-I\"#{srcdir}/..\""
args << "-I\"#{$std_includes}\"" unless $std_includes.nil?
args << "-D__NEW__" if USE_OWN_STLPORT
args << "-I\"#{$stlport_includes}\"" if USE_OWN_STLPORT
cc_build 'librholog', objdir, args or exit 1
cc_ar ('"'+(libname)+'"'), Dir.glob(objdir + "/**/*.o").collect{|x| '"'+x+'"'} or exit 1
end
task :librhomain => "config:android" do
srcdir = $shareddir
objdir = $objdir["rhomain"]
libname = $libname["rhomain"]
mkdir_p objdir
mkdir_p File.dirname(libname)
args = []
args << "-I\"#{srcdir}\""
args << "-I\"#{$std_includes}\"" unless $std_includes.nil?
args << "-D__NEW__" if USE_OWN_STLPORT
args << "-I\"#{$stlport_includes}\"" if USE_OWN_STLPORT
cc_build 'librhomain', objdir, args or exit 1
cc_ar ('"'+(libname)+'"'), Dir.glob(objdir + "/**/*.o").collect{|x| '"'+x+'"'} or exit 1
end
task :librhocommon => "config:android" do
objdir = $objdir["rhocommon"]
libname = $libname["rhocommon"]
mkdir_p objdir
mkdir_p File.dirname(libname)
args = []
args << "-I\"#{$shareddir}\""
args << "-I\"#{$shareddir}/curl/include\""
args << "-I\"#{$shareddir}/ruby/include\""
args << "-I\"#{$shareddir}/ruby/android\""
args << "-I\"#{$std_includes}\"" unless $std_includes.nil?
args << "-D__NEW__" if USE_OWN_STLPORT
args << "-I\"#{$stlport_includes}\"" if USE_OWN_STLPORT
cc_build 'librhocommon', objdir, args or exit 1
cc_ar ('"'+(libname)+'"'), Dir.glob(objdir + "/**/*.o").collect{|x| '"'+x+'"'} or exit 1
end
task :librhodb => "config:android" do
srcdir = File.join $shareddir, "db"
objdir = $objdir["rhodb"]
libname = $libname["rhodb"]
mkdir_p objdir
mkdir_p File.dirname(libname)
args = []
args << "-I\"#{srcdir}\""
args << "-I\"#{srcdir}/..\""
args << "-I\"#{srcdir}/../sqlite\""
args << "-I\"#{$std_includes}\"" unless $std_includes.nil?
args << "-D__NEW__" if USE_OWN_STLPORT
args << "-I\"#{$stlport_includes}\"" if USE_OWN_STLPORT
cc_build 'librhodb', objdir, args or exit 1
cc_ar ('"'+(libname)+'"'), Dir.glob(objdir + "/**/*.o").collect{|x| '"'+x+'"'} or exit 1
end
task :librhosync => "config:android" do
srcdir = File.join $shareddir, "sync"
objdir = $objdir["rhosync"]
libname = $libname["rhosync"]
mkdir_p objdir
mkdir_p File.dirname(libname)
args = []
args << "-I\"#{srcdir}\""
args << "-I\"#{srcdir}/..\""
args << "-I\"#{srcdir}/../sqlite\""
args << "-I\"#{$std_includes}\"" unless $std_includes.nil?
args << "-D__NEW__" if USE_OWN_STLPORT
args << "-I\"#{$stlport_includes}\"" if USE_OWN_STLPORT
cc_build 'librhosync', objdir, args or exit 1
cc_ar ('"'+(libname)+'"'), Dir.glob(objdir + "/**/*.o").collect{|x| '"'+x+'"'} or exit 1
end
task :libs => [:libsqlite, :libcurl, :libruby, :libjson, :libstlport, :librhodb, :librhocommon, :librhomain, :librhosync, :librholog]
task :genconfig => "config:android" do
mkdir_p $appincdir unless File.directory? $appincdir
# Generate genconfig.h
genconfig_h = File.join($appincdir, 'genconfig.h')
gapi_already_enabled = false
caps_already_enabled = {}
#ANDROID_PERMISSIONS.keys.each do |k|
# caps_already_enabled[k] = false
#end
if File.file? genconfig_h
File.open(genconfig_h, 'r') do |f|
while line = f.gets
if line =~ /^\s*#\s*define\s+RHO_GOOGLE_API_KEY\s+"[^"]*"\s*$/
gapi_already_enabled = true
else
ANDROID_PERMISSIONS.keys.each do |k|
if line =~ /^\s*#\s*define\s+RHO_CAP_#{k.upcase}_ENABLED\s+(.*)\s*$/
value = $1.strip
if value == 'true'
caps_already_enabled[k] = true
elsif value == 'false'
caps_already_enabled[k] = false
else
raise "Unknown value for the RHO_CAP_#{k.upcase}_ENABLED: #{value}"
end
end
end
end
end
end
end
regenerate = false
regenerate = true unless File.file? genconfig_h
regenerate = $use_geomapping != gapi_already_enabled unless regenerate
caps_enabled = {}
ANDROID_PERMISSIONS.keys.each do |k|
caps_enabled[k] = $app_config["capabilities"].index(k) != nil
regenerate = true if caps_already_enabled[k].nil? or caps_enabled[k] != caps_already_enabled[k]
end
puts caps_enabled.inspect
if regenerate
puts "Need to regenerate genconfig.h"
$stdout.flush
File.open(genconfig_h, 'w') do |f|
f.puts "#ifndef RHO_GENCONFIG_H_411BFA4742CF4F2AAA3F6B411ED7514F"
f.puts "#define RHO_GENCONFIG_H_411BFA4742CF4F2AAA3F6B411ED7514F"
f.puts ""
f.puts "#define RHO_GOOGLE_API_KEY \"#{$gapikey}\"" if $use_geomapping and !$gapikey.nil?
caps_enabled.each do |k,v|
f.puts "#define RHO_CAP_#{k.upcase}_ENABLED #{v ? "true" : "false"}"
end
f.puts ""
f.puts "#endif /* RHO_GENCONFIG_H_411BFA4742CF4F2AAA3F6B411ED7514F */"
end
else
puts "No need to regenerate genconfig.h"
$stdout.flush
end
# Generate rhocaps.inc
rhocaps_inc = File.join($appincdir, 'rhocaps.inc')
caps_already_defined = []
if File.exists? rhocaps_inc
File.open(rhocaps_inc, 'r') do |f|
while line = f.gets
next unless line =~ /^\s*RHO_DEFINE_CAP\s*\(\s*([A-Z_]*)\s*\)\s*\s*$/
caps_already_defined << $1.downcase
end
end
end
if caps_already_defined.sort.uniq != ANDROID_PERMISSIONS.keys.sort.uniq
puts "Need to regenerate rhocaps.inc"
$stdout.flush
File.open(rhocaps_inc, 'w') do |f|
ANDROID_PERMISSIONS.keys.sort.each do |k|
f.puts "RHO_DEFINE_CAP(#{k.upcase})"
end
end
else
puts "No need to regenerate rhocaps.inc"
$stdout.flush
end
# Generate Capabilities.java
File.open($app_capabilities_java, "w") do |f|
f.puts "package #{JAVA_PACKAGE_NAME};"
f.puts "public class Capabilities {"
ANDROID_PERMISSIONS.keys.sort.each do |k|
val = 'false'
val = 'true' if caps_enabled[k]
f.puts " public static final boolean #{k.upcase}_ENABLED = #{val};"
end
f.puts "}"
end
# Generate Push.java
File.open($app_push_java, "w") do |f|
f.puts "package #{JAVA_PACKAGE_NAME};"
f.puts "public class Push {"
f.puts " public static final String SENDER = \"#{$push_sender}\";"
if $push_notifications.nil?
f.puts " public static final String PUSH_NOTIFICATIONS = \"none\";"
else
f.puts " public static final String PUSH_NOTIFICATIONS = \"#{$push_notifications}\";"
end
f.puts "};"
end
end
task :gen_java_ext => "config:android" do
File.open($app_native_libs_java, "w") do |f|
f.puts "package #{JAVA_PACKAGE_NAME};"
f.puts "public class NativeLibraries {"
f.puts " public static void load() {"
f.puts " // Load native .so libraries"
Dir.glob($app_builddir + "/**/lib*.so").reverse.each do |lib|
next if lib =~ /noautoload/
libname = File.basename(lib).gsub(/^lib/, '').gsub(/\.so$/, '')
f.puts " System.loadLibrary(\"#{libname}\");"
end
f.puts " // Load native implementation of rhodes"
f.puts " System.loadLibrary(\"rhodes\");"
f.puts " }"
f.puts "};"
end
end
task :gensources => [:genconfig, :gen_java_ext]
task :librhodes => [:libs, :gensources] do
srcdir = File.join $androidpath, "Rhodes", "jni", "src"
libdir = File.join $app_builddir,'librhodes','lib','armeabi'
objdir = File.join $tmpdir,'librhodes'
libname = File.join libdir,'librhodes.so'
mkdir_p libdir
mkdir_p objdir
# add licence lib to build
lic_dst = File.join $app_builddir,'librhodes','libMotorolaLicence.a'
lic_src = $startdir + "/res/libs/motorolalicence/android/libMotorolaLicence.a"
rm_f lic_dst
cp lic_src, lic_dst
args = []
args << "-I\"#{$appincdir}\""
args << "-I\"#{srcdir}/../include\""
args << "-I\"#{srcdir}/../include/rhodes/details\""
args << "-I\"#{$shareddir}\""
args << "-I\"#{$shareddir}/common\""
args << "-I\"#{$shareddir}/sqlite\""
args << "-I\"#{$shareddir}/curl/include\""
args << "-I\"#{$shareddir}/ruby/include\""
args << "-I\"#{$shareddir}/ruby/android\""
args << "-I\"#{$std_includes}\"" unless $std_includes.nil?
args << "-D__SGI_STL_INTERNAL_PAIR_H" if USE_OWN_STLPORT
args << "-D__NEW__" if USE_OWN_STLPORT
args << "-I\"#{$stlport_includes}\"" if USE_OWN_STLPORT
cc_build 'librhodes', objdir, args or exit 1
deps = []
$libname.each do |k,v|
deps << v
end
args = []
args << "-L\"#{$rhobindir}/#{$confdir}\""
args << "-L\"#{libdir}\""
rlibs = []
rlibs << "log"
rlibs << "dl"
rlibs << "z"
rlibs.map! { |x| "-l#{x}" }
elibs = []
extlibs = Dir.glob($app_builddir + "/**/lib*.a")# + Dir.glob($app_builddir + "/**/lib*.so")
extlibs.each do |lib|
args << "-L\"#{File.dirname(lib)}\""
end
stub = []
extlibs.reverse.each do |f|
lparam = "-l" + File.basename(f).gsub(/^lib/,"").gsub(/\.(a|so)$/,"")
elibs << lparam
# Workaround for GNU ld: this way we have specified one lib multiple times
# command line so ld's dependency mechanism will find required functions
# independently of its position in command line
stub.each do |s|
args << s
end
stub << lparam
end
args += elibs
args += elibs
args += rlibs
#mkdir_p File.dirname(libname) unless File.directory? File.dirname(libname)
cc_link libname, Dir.glob(objdir + "/**/*.o").collect{|x| '"'+x+'"'}, args, deps or exit 1
destdir = File.join($androidpath, "Rhodes", "libs", "armeabi")
mkdir_p destdir unless File.exists? destdir
cp_r libname, destdir
cc_run($stripbin, ['"'+File.join(destdir, File.basename(libname))+'"'])
end
task :manifest => ["config:android", :extensions] do
version = {'major' => 0, 'minor' => 0, 'patch' => 0}
if $app_config["version"]
if $app_config["version"] =~ /^(\d+)$/
version["major"] = $1.to_i
elsif $app_config["version"] =~ /^(\d+)\.(\d+)$/
version["major"] = $1.to_i
version["minor"] = $2.to_i
elsif $app_config["version"] =~ /^(\d+)\.(\d+)\.(\d+)$/
version["major"] = $1.to_i
version["minor"] = $2.to_i
version["patch"] = $3.to_i
end
end
version = version["major"]*10000 + version["minor"]*100 + version["patch"]
usesPermissions = ['android.permission.INTERNET', 'android.permission.PERSISTENT_ACTIVITY', 'android.permission.WAKE_LOCK']
$app_config["capabilities"].each do |cap|
cap = ANDROID_PERMISSIONS[cap]
next if cap.nil?
cap = [cap] unless cap.is_a? Array
cap.each do |cap_item|
if cap_item.is_a? Proc
#caps_proc << cap_item
next
end
if cap_item.is_a? String
usesPermissions << "android.permission.#{cap_item}"
next
end
end
end
usesPermissions.uniq!
hidden = get_boolean($app_config['hidden_app'])
generator = ManifestGenerator.new JAVA_PACKAGE_NAME, $app_package_name, hidden, usesPermissions
generator.versionName = $app_config["version"]
generator.versionCode = version
generator.installLocation = 'auto'
generator.minSdkVer = $min_sdk_level
generator.maxSdkVer = $max_sdk_level
generator.usesLibraries['com.google.android.maps'] = true if $use_google_addon_api
generator.addGooglePush(File.join($androidpath,'Rhodes','PushReceiver.erb')) if $app_config["capabilities"].index 'push'
generator.addUriParams $uri_scheme, $uri_host, $uri_path_prefix
Dir.glob(File.join($app_builddir,'extensions','*','adds','AndroidManifest.rb')).each do |extscript|
puts "Evaluating #{extscript}"
eval(File.new(extscript).read)
end
Dir.glob(File.join($app_builddir,'extensions','*','adds','Manifest*.erb')).each do |exttemplate|
puts "Adding template #{exttemplate}"
generator.manifestManifestAdds << exttemplate
end
Dir.glob(File.join($app_builddir,'extensions','*','adds','Application*.erb')).each do |exttemplate|
puts "Adding template #{exttemplate}"
generator.applicationManifestAdds << exttemplate
end
manifest = generator.render $rhomanifesterb
File.open($appmanifest, "w") { |f| f.write manifest }
#######################################################
# Deprecated staff below
app_f = File.new($appmanifest)
manifest_orig_doc = REXML::Document.new(app_f)
app_f.close
dst_manifest = manifest_orig_doc.elements["manifest"]
dst_application = manifest_orig_doc.elements["manifest/application"]
dst_main_activity = nil
puts '$$$ try to found MainActivity'
dst_application.elements.each("activity") do |a|
puts '$$$ activity with attr = '+a.attribute('name','android').to_s
if a.attribute('name','android').to_s == 'com.rhomobile.rhodes.RhodesActivity'
puts ' $$$ FOUND !'
dst_main_activity = a
end
end
Dir.glob(File.join($app_builddir,'extensions','*','adds','AndroidManifest.xml')).each do |ext_manifest|
if File.exists? ext_manifest
puts 'AndroidManifest.xml['+ext_manifest+'] from native extension found !'
manifest_ext_doc = REXML::Document.new(File.new(ext_manifest))
src_manifest = manifest_ext_doc.elements["manifest"]
src_application = manifest_ext_doc.elements["manifest/application"]
if src_application != nil
puts 'Extension Manifest process application item :'
src_application.elements.each do |e|
puts '$$$ process element with attr = '+e.attribute('name','android').to_s
if e.attribute('name','android').to_s == 'com.rhomobile.rhodes.RhodesActivity'
e.elements.each do |sube|
puts ' add item to MainActivity['+sube.xpath+']'
dst_main_activity.add sube
end
else
puts ' add item ['+e.xpath+']'
dst_application.add e
end
end
end
puts 'Extension Manifest process root <manifest> item :'
src_manifest.elements.each do |e|
p = e.xpath
if p != '/manifest/application'
dst_e = manifest_orig_doc.elements[p]
if dst_e != nil
if p == '/manifest/uses-sdk'
puts ' found and delete original item ['+p+']'
manifest_orig_doc.elements.delete p
end
end
puts ' and new item ['+p+']'
dst_manifest.add e
end
end
else
puts 'AndroidManifest change file ['+m+'] from native extension not found !'
end
end
puts 'delete original manifest'
File.delete($appmanifest)
updated_f = File.open($appmanifest, "w")
manifest_orig_doc.write updated_f, 2
updated_f.close
#rm tappmanifest
puts 'Manifest updated by extension is saved!'
end
task :resources => [:rhobundle, :extensions] do
set_app_name_android($appname)
end
#desc "Build Rhodes for android"
task :rhodes => [:rhobundle, :librhodes, :manifest, :resources] do
rm_rf $tmpdir + "/Rhodes"
mkdir_p $tmpdir + "/Rhodes"
# RhodesActivity Listeners
f = StringIO.new("", "w+")
f.puts '// WARNING! THIS FILE IS GENERATED AUTOMATICALLY! DO NOT EDIT IT MANUALLY!'
f.puts 'package com.rhomobile.rhodes.extmanager;'
f.puts ''
f.puts 'class RhodesStartupListeners {'
f.puts ''
f.puts ' public static final String[] ourRunnableList = { ""'
$ext_android_rhodes_activity_listener.each do |a|
f.puts ' ,"'+a+'"'
end
f.puts ' };'
f.puts '}'
Jake.modify_file_if_content_changed($app_startup_listeners_java, f)
puts 'EXT: add additional files to project before build'
Dir.glob(File.join($app_builddir,'extensions','*','adds','*')).each do |res|
if File.directory?(res) && (res != '.') && (res != '..')
puts "add resources from extension [#{res}] to [#{$tmpdir}]"
cp_r res, $tmpdir
end
end
unless $config_xml.nil?
rawres_path = File.join($tmpdir, 'res', 'raw')
mkdir_p rawres_path unless File.exist? rawres_path
cp $config_xml, File.join(rawres_path,'config.xml')
end
generate_rjava
mkdir_p File.join($app_rjava_dir, "R") if not File.exists? File.join($app_rjava_dir, "R")
buf = File.new(File.join($app_rjava_dir, "R.java"),"r").read.gsub(/^\s*package\s*#{$app_package_name};\s*$/,"\npackage com.rhomobile.rhodes;\n")
buf.gsub!(/public\s*static\s*final\s*int/, "public static int")
File.open(File.join($app_rjava_dir, "R", "R.java"),"w") { |f| f.write(buf) }
srclist = File.join($builddir, "RhodesSRC_build.files")
newsrclist = File.join($tmpdir, "RhodesSRC_build.files")
lines = []
File.open(srclist, "r") do |f|
while line = f.gets
line.chomp!
next if line =~ /\/AndroidR\.java\s*$/
if !$use_geomapping
next if line == "platform/android/Rhodes/src/com/rhomobile/rhodes/mapview/GoogleMapView.java"
next if line == "platform/android/Rhodes/src/com/rhomobile/rhodes/mapview/AnnotationsOverlay.java"
next if line == "platform/android/Rhodes/src/com/rhomobile/rhodes/mapview/CalloutOverlay.java"
end
lines << line
end
end
lines << "\"" +File.join($app_rjava_dir, "R.java")+"\""
lines << "\"" +File.join($app_rjava_dir, "R", "R.java")+"\""
lines << "\"" +$app_android_r+"\""
lines << "\"" +$app_native_libs_java+"\""
lines << "\"" +$app_capabilities_java+"\""
lines << "\"" +$app_push_java+"\""
lines << "\"" +$app_startup_listeners_java+"\""
File.open(newsrclist, "w") { |f| f.write lines.join("\n") }
srclist = newsrclist
classpath = $androidjar
classpath += $path_separator + $google_classpath if $google_classpath
classpath += $path_separator + $motosol_classpath if $motosol_classpath
classpath += $path_separator + File.join($tmpdir, 'Rhodes')
javafilelists = [srclist]
extlist = File.join $app_builddir, "ext_build.files"
if File.exists? extlist
puts "#{extlist} is found! THere are addditional java files"
javafilelists << extlist
end
java_compile(File.join($tmpdir, 'Rhodes'), classpath, javafilelists)
files = []
Dir.glob(File.join($tmpdir, "Rhodes", "*")).each do |f|
relpath = Pathname.new(f).relative_path_from(Pathname.new(File.join($tmpdir, "Rhodes"))).to_s
files << relpath
end
unless files.empty?
jar = File.join($app_builddir,'librhodes','Rhodes.jar')
args = ["cf", jar]
args += files
Jake.run($jarbin, args, File.join($tmpdir, "Rhodes"))
unless $?.success?
raise "Error creating #{jar}"
end
$android_jars = [jar]
end
end
task :extensions_java => [:rhodes, :extensions] do
puts 'Compile additional java files:'
classpath = $androidjar
classpath += $path_separator + $google_classpath if $google_classpath
classpath += $path_separator + $motosol_classpath if $motosol_classpath
classpath += $path_separator + File.join($tmpdir, 'Rhodes')
Dir.glob(File.join($app_builddir,'**','*.jar')).each do |jar|
classpath += $path_separator + jar
end
$ext_android_additional_sources.each do |extpath, list|
ext = File.basename(extpath)
puts "Compiling '#{ext}' extension java sources: #{list}"
srclist = Tempfile.new "#{ext}SRC_build"
lines = []
File.open(list, "r") do |f|
while line = f.gets
line.chomp!
srclist.write "\"#{File.join(extpath, line)}\"\n"
#srclist.write "#{line}\n"
end
end
srclist.close
mkdir_p File.join($tmpdir, ext)
#puts '$$$$$$$$$$$$$$$$$$ START'
#currentdir = Dir.pwd()
#Dir.chdir extpath
java_compile(File.join($tmpdir, ext), classpath, [srclist.path])
#Dir.chdir currentdir
#puts '$$$$$$$$$$$$$$$$$$ FINISH'
extjar = File.join $app_builddir,'extensions',ext,ext + '.jar'
args = ["cf", extjar, '.']
Jake.run($jarbin, args, File.join($tmpdir, ext))
unless $?.success?
raise "Error creating #{extjar}"
end
$android_jars << extjar
end
end
task :extensions_adds => "config:android:extensions" do
end
task :upgrade_package => :rhobundle do
#puts '$$$$$$$$$$$$$$$$$$'
#puts 'targetdir = '+$targetdir.to_s
#puts 'bindir = '+$bindir.to_s
android_targetdir = $targetdir #File.join($targetdir, 'android')
mkdir_p android_targetdir if not File.exists? android_targetdir
zip_file_path = File.join(android_targetdir, 'upgrade_bundle.zip')
Jake.build_file_map(File.join($srcdir, "apps"), "rhofilelist.txt")
Jake.zip_upgrade_bundle($bindir, zip_file_path)
end
task :upgrade_package_partial => ["build:android:rhobundle"] do
#puts '$$$$$$$$$$$$$$$$$$'
#puts 'targetdir = '+$targetdir.to_s
#puts 'bindir = '+$bindir.to_s
# process partial update
add_list_full_name = File.join($app_path, 'upgrade_package_add_files.txt')
remove_list_full_name = File.join($app_path, 'upgrade_package_remove_files.txt')
src_folder = File.join($bindir, 'RhoBundle')
src_folder = File.join(src_folder, 'apps')
tmp_folder = $bindir + '_tmp_partial'
rm_rf tmp_folder if File.exists? tmp_folder
mkdir_p tmp_folder
dst_tmp_folder = File.join(tmp_folder, 'RhoBundle')
mkdir_p dst_tmp_folder
# copy all
cp_r src_folder, dst_tmp_folder
dst_tmp_folder = File.join(dst_tmp_folder, 'apps')
mkdir_p dst_tmp_folder
add_files = []
if File.exists? add_list_full_name
File.open(add_list_full_name, "r") do |f|
while line = f.gets
fixed_path = line.gsub('.rb', '.iseq').gsub('.erb', '_erb.iseq').chop
add_files << fixed_path
puts '### ['+fixed_path+']'
end
end
end
remove_files = []
if File.exists? remove_list_full_name
File.open(remove_list_full_name, "r") do |f|
while line = f.gets
fixed_path = line.gsub('.rb', '.iseq').gsub('.erb', '_erb.iseq').chop
remove_files << fixed_path
#puts '### ['+fixed_path+']'
end
end
end
psize = dst_tmp_folder.size+1
Dir.glob(File.join(dst_tmp_folder, '**/*')).sort.each do |f|
relpath = f[psize..-1]
if File.file?(f)
#puts '$$$ ['+relpath+']'
if not add_files.include?(relpath)
rm_rf f
end
end
end
Jake.build_file_map( dst_tmp_folder, "upgrade_package_add_files.txt" )
#if File.exists? add_list_full_name
# File.open(File.join(dst_tmp_folder, 'upgrade_package_add_files.txt'), "w") do |f|
# add_files.each do |j|
# f.puts "#{j}\tfile\t0\t0"
# end
# end
#end
if File.exists? remove_list_full_name
File.open(File.join(dst_tmp_folder, 'upgrade_package_remove_files.txt'), "w") do |f|
remove_files.each do |j|
f.puts "#{j}"
#f.puts "#{j}\tfile\t0\t0"
end
end
end
mkdir_p $targetdir if not File.exists? $targetdir
zip_file_path = File.join($targetdir, "upgrade_bundle_partial.zip")
Jake.zip_upgrade_bundle( tmp_folder, zip_file_path)
rm_rf tmp_folder
end
#desc "build all"
task :all => [:rhobundle, :rhodes, :extensions_java]
end
end
namespace "package" do
task :android => "build:android:all" do
puts "Running dx utility"
args = []
args << "-Xmx1024m"
args << "-jar"
args << $dxjar
args << "--dex"
args << "--output=#{$bindir}/classes.dex"
Dir.glob(File.join($app_builddir,'**','*.jar')).each do |jar|
args << jar
end
Jake.run(File.join($java, 'java'+$exe_ext), args)
unless $?.success?
raise "Error running DX utility"
end
manifest = $appmanifest
resource = $appres
assets = File.join($tmpdir, 'assets')
resourcepkg = $bindir + "/rhodes.ap_"
puts "Packaging Assets and Jars"
# this task already caaled during build "build:android:all"
#set_app_name_android($appname)
args = ["package", "-f", "-M", manifest, "-S", resource, "-A", assets, "-I", $androidjar, "-F", resourcepkg]
Jake.run($aapt, args)
unless $?.success?
raise "Error running AAPT (1)"
end
# Workaround: manually add files starting with '_' because aapt silently ignore such files when creating package
Dir.glob(File.join($tmpdir, "assets/**/*")).each do |f|
next unless File.basename(f) =~ /^_/
relpath = Pathname.new(f).relative_path_from(Pathname.new($tmpdir)).to_s
puts "Add #{relpath} to #{resourcepkg}..."
args = ["uf", resourcepkg, relpath]
Jake.run($jarbin, args, $tmpdir)
unless $?.success?
raise "Error running AAPT (2)"
end
end
puts "Packaging Native Libs"
# Add native librhodes.so
#rm_rf File.join($tmpdir, "lib")
mkdir_p File.join($tmpdir, "lib/armeabi")
cp_r File.join($app_builddir,'librhodes','lib','armeabi','librhodes.so'), File.join($tmpdir, "lib/armeabi")
# Add extensions .so libraries
Dir.glob($app_builddir + "/**/lib*.so").each do |lib|
cp_r lib, File.join($tmpdir, "lib/armeabi")
end
$ext_android_additional_lib.each do |lib|
cp_r lib, File.join($tmpdir, "lib/armeabi")
end
args = ["uf", resourcepkg]
# Strip them all to decrease size
Dir.glob($tmpdir + "/lib/armeabi/lib*.so").each do |lib|
cc_run($stripbin, ['"'+lib+'"'])
args << "lib/armeabi/#{File.basename(lib)}"
end
Jake.run($jarbin, args, $tmpdir)
err = $?
#rm_rf $tmpdir + "/lib"
unless err.success?
raise "Error running AAPT (3)"
end
end
end
namespace "device" do
namespace "android" do
desc "Build debug self signed for device"
task :debug => "package:android" do
dexfile = $bindir + "/classes.dex"
simple_apkfile = $targetdir + "/" + $appname + "-tmp.apk"
final_apkfile = $targetdir + "/" + $appname + "-debug.apk"
resourcepkg = $bindir + "/rhodes.ap_"
apk_build $androidsdkpath, simple_apkfile, resourcepkg, dexfile, true
puts "Align Debug APK file"
args = []
args << "-f"
args << "-v"
args << "4"
args << simple_apkfile
args << final_apkfile
out = Jake.run2($zipalign, args, :hide_output => true)
puts out if USE_TRACES
unless $?.success?
puts "Error running zipalign"
exit 1
end
#remove temporary files
rm_rf simple_apkfile
File.open(File.join(File.dirname(final_apkfile), "app_info.txt"), "w") do |f|
f.puts $app_package_name
end
end
task :install => :debug do
apkfile = $targetdir + "/" + $appname + "-debug.apk"
Jake.run $adb, ['-d', 'wait-for-device']
puts "Install APK file"
Jake.run($adb, ["-d", "install", "-r", apkfile])
unless $?.success?
puts "Error installing APK file"
exit 1
end
puts "Install complete"
end
desc "Build production signed for device"
task :production => "package:android" do
dexfile = $bindir + "/classes.dex"
simple_apkfile = $targetdir + "/" + $appname + "_tmp.apk"
final_apkfile = $targetdir + "/" + $appname + "_signed.apk"
signed_apkfile = $targetdir + "/" + $appname + "_tmp_signed.apk"
resourcepkg = $bindir + "/rhodes.ap_"
apk_build $androidsdkpath, simple_apkfile, resourcepkg, dexfile, false
if not File.exists? $keystore
puts "Generating private keystore..."
mkdir_p File.dirname($keystore) unless File.directory? File.dirname($keystore)
args = []
args << "-genkey"
args << "-alias"
args << $storealias
args << "-keyalg"
args << "RSA"
args << "-validity"
args << "20000"
args << "-keystore"
args << $keystore
args << "-storepass"
args << $storepass
args << "-keypass"
args << $keypass
Jake.run($keytool, args)
unless $?.success?
puts "Error generating keystore file"
exit 1
end
end
puts "Signing APK file"
args = []
args << "-sigalg"
args << "MD5withRSA"
args << "-digestalg"
args << "SHA1"
args << "-verbose"
args << "-keystore"
args << $keystore
args << "-storepass"
args << $storepass
args << "-signedjar"
args << signed_apkfile
args << simple_apkfile
args << $storealias
Jake.run($jarsigner, args)
unless $?.success?
puts "Error running jarsigner"
exit 1
end
puts "Align APK file"
args = []
args << "-f"
args << "-v"
args << "4"
args << '"' + signed_apkfile + '"'
args << '"' + final_apkfile + '"'
Jake.run($zipalign, args)
unless $?.success?
puts "Error running zipalign"
exit 1
end
#remove temporary files
rm_rf simple_apkfile
rm_rf signed_apkfile
File.open(File.join(File.dirname(final_apkfile), "app_info.txt"), "w") do |f|
f.puts $app_package_name
end
end
task :getlog => "config:android" do
AndroidTools.get_app_log($appname, true) or exit 1
end
end
end
namespace "emulator" do
namespace "android" do
task :getlog => "config:android" do
AndroidTools.get_app_log($appname, false) or exit 1
end
end
end
namespace "android" do
task :get_log => "config:android" do
puts "log_file=" + $applog_path
end
end
namespace "run" do
namespace "android" do
task :spec => ["device:android:debug"] do
if $device_flag == '-e'
Rake::Task["config:android:emulator"].invoke
else
Rake::Task["config:android:device"].invoke
end
log_name = $app_path + '/RhoLogSpec.txt'
File.delete(log_name) if File.exist?(log_name)
AndroidTools.logclear($device_flag)
run_emulator( :hidden => true ) if $device_flag == '-e'
do_uninstall($device_flag)
# Failsafe to prevent eternal hangs
Thread.new {
sleep 2000
if $device_flag == '-e'
AndroidTools.kill_adb_and_emulator
else
AndroidTools.kill_adb
end
}
load_app_and_run($device_flag)
AndroidTools.logcat($device_flag, log_name)
Jake.before_run_spec
start = Time.now
puts "waiting for application"
for i in 0..60
if AndroidTools.application_running($device_flag, $app_package_name)
break
else
sleep(1)
end
end
puts "waiting for log: " + log_name
for i in 0..120
if !File.exist?(log_name)
sleep(1)
else
break
end
end
if !File.exist?(log_name)
puts "Can not read log file: " + log_name
exit(1)
end
puts "start read log"
io = File.new(log_name, 'r:UTF-8')
end_spec = false
while !end_spec do
io.each do |line|
#puts line
if line.class.method_defined? "valid_encoding?"
end_spec = !Jake.process_spec_output(line) if line.valid_encoding?
else
end_spec = !Jake.process_spec_output(line)
end
break if end_spec
end
break unless AndroidTools.application_running($device_flag, $app_package_name)
sleep(5) unless end_spec
end
io.close
Jake.process_spec_results(start)
# stop app
if $device_flag == '-e'
AndroidTools.kill_adb_and_emulator
else
do_uninstall($device_flag)
AndroidTools.kill_adb
end
$stdout.flush
end
task :phone_spec => "phone_spec:emulator"
task :framework_spec => "framework_spec:emulator"
namespace "phone_spec" do
task :device do
$device_flag = '-d'
Jake.run_spec_app('android','phone_spec')
unless $dont_exit_on_failure
exit 1 if $total.to_i==0
exit $failed.to_i
end
end
task :emulator do
$device_flag = '-e'
Jake.run_spec_app('android','phone_spec')
unless $dont_exit_on_failure
exit 1 if $total.to_i==0
exit $failed.to_i
end
end
end
namespace "framework_spec" do
task :device do
$device_flag = '-d'
Jake.run_spec_app('android','framework_spec')
unless $dont_exit_on_failure
exit 1 if $total.to_i==0
exit $failed.to_i
end
end
task :emulator do
$device_flag = '-e'
Jake.run_spec_app('android','framework_spec')
unless $dont_exit_on_failure
exit 1 if $total.to_i==0
exit $failed.to_i
end
end
end
task :allspecs do
$dont_exit_on_failure = true
Rake::Task['run:android:phone_spec'].invoke
Rake::Task['run:android:framework_spec'].invoke
failure_output = ""
if $failed.to_i > 0
failure_output = ""
failure_output += "phone_spec failures:\n\n" + File.open(app_expanded_path('phone_spec') + "/faillog.txt").read if
File.exist?(app_expanded_path('phone_spec') + "/faillog.txt")
failure_output += "framework_spec failures:\n\n" + File.open(app_expanded_path('framework_spec') + "/faillog.txt").read if
File.exist?(app_expanded_path('framework_spec') + "/faillog.txt")
chdir basedir
File.open("faillog.txt", "w") { |io| failure_output.each {|x| io << x } }
end
puts "Agg Total: #{$total}"
puts "Agg Passed: #{$passed}"
puts "Agg Failed: #{$failed}"
exit 1 if $total.to_i==0
exit $failed.to_i
end
task :emulator=>['config:android:emulator', 'device:android:debug'] do
run_emulator
load_app_and_run
end
desc "Run application on RhoSimulator"
task :rhosimulator => ["config:set_android_platform","config:common"] do
$emuversion = $app_config["android"]["version"] unless $app_config["android"].nil?
$emuversion = $config["android"]["version"] if $emuversion.nil? and !$config["android"].nil?
$rhosim_config = "platform='android'\r\n"
$rhosim_config += "os_version='#{$emuversion}'\r\n" if $emuversion
Rake::Task["run:rhosimulator"].invoke
end
task :rhosimulator_debug => ["config:set_android_platform","config:common"] do
$emuversion = $app_config["android"]["version"] unless $app_config["android"].nil?
$emuversion = $config["android"]["version"] if $emuversion.nil? and !$config["android"].nil?
$rhosim_config = "platform='android'\r\n"
$rhosim_config += "os_version='#{$emuversion}'\r\n" if $emuversion
Rake::Task["run:rhosimulator_debug"].invoke
end
# task :get_info => "config:android" do
# $androidtargets.each do |level|
# puts "#{get_market_version(level[0])}"
# end
#
# emu_version = $emuversion
#
# puts ""
# cur_name = ""
#
# `"#{$androidbin}" list avd`.split(/\n/).each do |line|
# line.each_line do |item|
# ar = item.split(':')
# ar[0].strip!
# if ar[0] == "Name"
# cur_name = ar[1].strip!
# puts "#{cur_name}"
# end
#
# if $appavdname && cur_name == $appavdname && (ar[0] == "Target" || ar.length == 1)
#
# text = ar[0] == "Target" ? ar[1] : ar[0]
#
# nAnd = text.index("Android")
# if nAnd
# nAnd = text.index(" ", nAnd)
# nAnd1 = text.index("-", nAnd+1)
# nAnd1 = text.index(" ", nAnd+1) unless nAnd1
# emu_version = text[nAnd+1, nAnd1-nAnd-1]
# end
# end
# end
# end
#
# puts ""
#
# puts "#{emu_version}"
# puts "#{$appavdname}"
#
# end
def run_emulator(options = {})
apkfile = Jake.get_absolute $targetdir + "/" + $appname + "-debug.apk"
#AndroidTools.kill_adb
Jake.run($adb, ['start-server'], nil, true)
rm_f $applog_path if !$applog_path.nil?
AndroidTools.logcat_process()
running = AndroidTools.is_emulator_running
if !running
$avdname = "rhoAndroid" + $emuversion.gsub(/[^0-9]/, "")
$avdname += "google" if $use_google_addon_api
$avdname += "motosol" if $use_motosol_api
$avdtarget = $androidtargets[get_api_level($emuversion)]
raise "Unable to run Android emulator. No appropriate target API for SDK version: #{$emuversion}" unless $avdtarget
if $appavdname != nil
$avdname = $appavdname
end
createavd = "\"#{$androidbin}\" create avd --name #{$avdname} --target #{$avdtarget} --sdcard 128M "
system("echo no | #{createavd}") unless File.directory?( File.join(ENV['HOME'], ".android", "avd", "#{$avdname}.avd" ) )
# Start the emulator, check on it every 5 seconds until it's running
cmd = "\"#{$emulator}\" -cpu-delay 0"
cmd << " -no-window" if options[:hidden]
cmd << " -avd #{$avdname}"
Thread.new { system(cmd) }
puts "Waiting for emulator..."
res = 'error'
while res =~ /error/ do
sleep 5
res = Jake.run $adb, ['-e', 'wait-for-device']
puts res
end
puts "Waiting up to 600 seconds for emulator..."
startedWaiting = Time.now
adbRestarts = 1
while (Time.now - startedWaiting < 600 )
sleep 5
now = Time.now
started = false
booted = true
Jake.run2 $adb, ["-e", "shell", "ps"], :system => false, :hideerrors => false do |line|
#puts line
booted = false if line =~ /bootanimation/
started = true if line =~ /android\.process\.acore/
true
end
#puts "started: #{started}, booted: #{booted}"
unless started and booted
printf("%.2fs: ",(now - startedWaiting))
if (now - startedWaiting) > (180 * adbRestarts)
# Restart the adb server every 60 seconds to prevent eternal waiting
puts "Appears hung, restarting adb server"
AndroidTools.kill_adb
Jake.run($adb, ['start-server'], nil, true)
adbRestarts += 1
rm_f $applog_path if !$applog_path.nil?
AndroidTools.logcat_process()
else
puts "Still waiting..."
end
else
puts "Success"
puts "Device is ready after " + (Time.now - startedWaiting).to_s + " seconds"
break
end
end
if !AndroidTools.is_emulator_running
puts "Emulator still isn't up and running, giving up"
exit 1
end
else
puts "Emulator is up and running"
end
$stdout.flush
end
def load_app_and_run(device_flag = '-e')
puts "Loading package"
apkfile = Jake.get_absolute $targetdir + "/" + $appname + "-debug.apk"
count = 0
done = false
while count < 20
f = Jake.run2($adb, [device_flag, "install", "-r", apkfile], {:nowait => true})
theoutput = ""
while c = f.getc
$stdout.putc c
$stdout.flush
theoutput << c
end
f.close
if theoutput.to_s.match(/Success/)
done = true
break
end
puts "Failed to load (possibly because emulator not done launching)- retrying"
$stdout.flush
sleep 1
count += 1
end
puts "Loading complete, starting application.." if done
AndroidTools.run_application(device_flag) if done
end
desc "build and install on device"
task :device => "device:android:install" do
puts "Starting application..."
AndroidTools.run_application("-d")
puts "Application was started"
AndroidTools.logcat_process("-d")
puts "Starting log process ..."
end
end
desc "build and launch emulator"
task :android => "run:android:emulator" do
end
end
namespace "uninstall" do
def do_uninstall(flag)
args = []
args << flag
args << "uninstall"
args << $app_package_name
for i in 0..20
result = Jake.run($adb, args)
unless $?.success?
puts "Error uninstalling application"
exit 1
end
if result.include?("Success")
puts "Application uninstalled successfully"
break
else
if result.include?("Failure")
puts "Application is not installed on the device"
break
else
puts "Error uninstalling application"
exit 1 if i == 20
end
end
sleep(5)
end
end
namespace "android" do
task :emulator => "config:android" do
unless AndroidTools.is_emulator_running
puts "WARNING!!! Emulator is not up and running"
exit 1
end
do_uninstall('-e')
end
desc "uninstall from device"
task :device => "config:android" do
unless AndroidTools.is_device_running
puts "WARNING!!! Device is not connected"
exit 1
end
do_uninstall('-d')
end
end
desc "uninstall from emulator"
task :android => "uninstall:android:emulator" do
end
end
namespace "clean" do
desc "Clean Android"
task :android => "clean:android:all"
namespace "android" do
task :files => "config:android" do
rm_rf $targetdir
rm_rf $app_builddir
Dir.glob( File.join( $bindir, "*.*" ) ) { |f| rm f, :force => true }
rm_rf $srcdir
rm_rf $tmpdir
end
task :all => :files
end
end
Android: rake run:android:get_log to meet common agreement
#------------------------------------------------------------------------
# (The MIT License)
#
# Copyright (c) 2008-2011 Rhomobile, Inc.
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
# THE SOFTWARE.
#
# http://rhomobile.com
#------------------------------------------------------------------------
require File.dirname(__FILE__) + '/androidcommon.rb'
require File.dirname(__FILE__) + '/android_tools.rb'
require File.dirname(__FILE__) + '/manifest_generator.rb'
require 'pathname'
require 'tempfile'
USE_OWN_STLPORT = false
#USE_TRACES = # see androidcommon.h
def get_market_version(apilevel)
AndroidTools.get_market_version(apilevel)
end
def get_api_level(version)
AndroidTools.get_api_level(version)
end
JAVA_PACKAGE_NAME = 'com.rhomobile.rhodes'
# Here is place were android platform should be specified.
# For complete list of android API levels and its mapping to
# market names (such as "Android-1.5" etc) see output of
# command "android list targets"
ANDROID_SDK_LEVEL = 4
ANDROID_PERMISSIONS = {
'audio' => ['RECORD_AUDIO', 'MODIFY_AUDIO_SETTINGS'],
'camera' => 'CAMERA',
'gps' => ['ACCESS_FINE_LOCATION', 'ACCESS_COARSE_LOCATION'],
'network_state' => 'ACCESS_NETWORK_STATE',
'phone' => ['CALL_PHONE', 'READ_PHONE_STATE'],
'pim' => ['READ_CONTACTS', 'WRITE_CONTACTS', 'GET_ACCOUNTS'],
'record_audio' => 'RECORD_AUDIO',
'vibrate' => 'VIBRATE',
'bluetooth' => ['BLUETOOTH_ADMIN', 'BLUETOOTH'],
'calendar' => ['READ_CALENDAR', 'WRITE_CALENDAR'],
'sdcard' => 'WRITE_EXTERNAL_STORAGE',
'push' => proc do |manifest| add_push(manifest) end,
'motorola' => ['SYSTEM_ALERT_WINDOW', 'BROADCAST_STICKY', proc do |manifest| add_motosol_sdk(manifest) end],
'motoroladev' => ['SYSTEM_ALERT_WINDOW', 'BROADCAST_STICKY', proc do |manifest| add_motosol_sdk(manifest) end],
'webkit_browser' => nil,
'shared_runtime' => nil,
'motorola_browser' => nil
}
ANDROID_CAPS_ALWAYS_ENABLED = ['network_state']
def add_push(manifest)
element = REXML::Element.new('permission')
element.add_attribute('android:name', "#{$app_package_name}.permission.C2D_MESSAGE")
element.add_attribute('android:protectionLevel', 'signature')
manifest.add element
element = REXML::Element.new('uses-permission')
element.add_attribute('android:name', "#{$app_package_name}.permission.C2D_MESSAGE")
manifest.add element
element = REXML::Element.new('uses-permission')
element.add_attribute('android:name', "com.google.android.c2dm.permission.RECEIVE")
manifest.add element
receiver = REXML::Element.new('receiver')
receiver.add_attribute('android:name', "#{JAVA_PACKAGE_NAME}.PushReceiver")
receiver.add_attribute('android:permission', "com.google.android.c2dm.permission.SEND")
action = REXML::Element.new('action')
action.add_attribute('android:name', "com.google.android.c2dm.intent.RECEIVE")
category = REXML::Element.new('category')
category.add_attribute('android:name', $app_package_name)
ie = REXML::Element.new('intent-filter')
ie.add_element(action)
ie.add_element(category)
receiver.add_element(ie)
action = REXML::Element.new('action')
action.add_attribute('android:name', "com.google.android.c2dm.intent.REGISTRATION")
category = REXML::Element.new('category')
category.add_attribute('android:name', $app_package_name)
ie = REXML::Element.new('intent-filter')
ie.add_element(action)
ie.add_element(category)
receiver.add_element(ie)
manifest.elements.each('application') do |app|
app.add receiver
end
end
def add_motosol_sdk(manifest)
uses_scanner = REXML::Element.new 'uses-library'
uses_scanner.add_attribute 'android:name', 'com.motorolasolutions.scanner'
uses_scanner.add_attribute 'android:required', 'false'
uses_msr = REXML::Element.new 'uses-library'
uses_msr.add_attribute 'android:name', 'com.motorolasolutions.emdk.msr'
uses_msr.add_attribute 'android:required', 'false'
manifest.elements.each('application') do |app|
app.add uses_scanner
app.add uses_msr
end
end
def set_app_name_android(newname)
puts "set_app_name"
$stdout.flush
rm_rf $appres
cp_r $rhores, $appres
iconappname = File.join($app_path, "icon", "icon.png")
iconresname = File.join($appres, "drawable", "icon.png")
rm_f iconresname
cp iconappname, iconresname
rhostrings = File.join($rhores, "values", "strings.xml")
appstrings = File.join($appres, "values", "strings.xml")
doc = REXML::Document.new(File.new(rhostrings))
doc.elements["resources/string[@name='app_name']"].text = newname
File.open(appstrings, "w") { |f| doc.write f }
buf = File.new($rho_android_r,"r").read.gsub(/^\s*import com\.rhomobile\..*\.R;\s*$/,"\nimport #{$app_package_name}.R;\n")
File.open($app_android_r,"w") { |f| f.write(buf) }
end
def generate_rjava
manifest = $appmanifest
resource = $appres
assets = Jake.get_absolute File.join($tmpdir, 'assets')
nativelibs = Jake.get_absolute(File.join($androidpath, "Rhodes", "libs"))
#rjava = Jake.get_absolute(File.join($androidpath, "Rhodes", "gen", "com", "rhomobile", "rhodes"))
args = ["package", "-f", "-M", manifest, "-S", resource, "-A", assets, "-I", $androidjar, "-J", $app_rjava_dir]
Jake.run($aapt, args)
unless $?.success?
raise "Error in AAPT"
end
end
def get_boolean(arg)
arg == 'true' or arg == 'yes' or arg == 'enabled' or arg == 'enable' or arg == '1'
end
namespace "config" do
task :set_android_platform do
$current_platform = "android"
end
task :android => :set_android_platform do
Rake::Task["config:common"].invoke
$java = $config["env"]["paths"]["java"]
$neon_root = nil
$neon_root = $config["env"]["paths"]["neon"] unless $config["env"]["paths"].nil?
if !($app_config["paths"].nil? or $app_config["paths"]["neon"].nil?)
$neon_root = $app_config["paths"]["neon"]
end
$androidsdkpath = $config["env"]["paths"]["android"]
unless File.exists? $androidsdkpath
puts "Missing or invalid 'android' section in rhobuild.yml: '#{$androidsdkpath}'"
exit 1
end
$androidndkpath = $config["env"]["paths"]["android-ndk"]
unless File.exists? $androidndkpath
puts "Missing or invalid 'android-ndk' section in rhobuild.yml: '#{$androidndkpath}'"
exit 1
end
errfmt = "WARNING!!! Path to Android %s contain spaces! It will not work because of the Google toolchain restrictions. Move it to another location and reconfigure rhodes."
if $androidndkpath =~ /\s/
puts(errfmt % "NDK")
exit 1
end
$min_sdk_level = $app_config["android"]["minSDK"] unless $app_config["android"].nil?
$min_sdk_level = $config["android"]["minSDK"] if $min_sdk_level.nil? and not $config["android"].nil?
$min_sdk_level = $min_sdk_level.to_i unless $min_sdk_level.nil?
$min_sdk_level = ANDROID_SDK_LEVEL if $min_sdk_level.nil?
$max_sdk_level = $app_config["android"]["maxSDK"] unless $app_config["android"].nil?
$androidplatform = AndroidTools.fill_api_levels $androidsdkpath
if $androidplatform == nil
puts "No Android platform found at SDK path: '#{$androidsdkpath}'"
exit 1
end
android_api_levels = AndroidTools.get_installed_api_levels
android_api_levels.sort!
$found_api_level = android_api_levels.last
$gapikey = $app_config["android"]["apikey"] unless $app_config["android"].nil?
$gapikey = $config["android"]["apikey"] if $gapikey.nil? and not $config["android"].nil?
$gapikey = '' unless $gapikey.is_a? String
$gapikey = nil if $gapikey.empty?
$android_orientation = $app_config["android"]["orientation"] unless $app_config["android"].nil?
$use_geomapping = $app_config["android"]["mapping"] unless $app_config["android"].nil?
$use_geomapping = $config["android"]["mapping"] if $use_geomapping.nil? and not $config["android"].nil?
$use_geomapping = 'false' if $use_geomapping.nil?
$use_geomapping = get_boolean($use_geomapping.to_s)
$use_google_addon_api = false
$use_google_addon_api = true if $use_geomapping
#Additionally $use_google_addon_api set to true if PUSH capability is enabled
puts "Use Google addon API: #{$use_google_addon_api}" if USE_TRACES
$uri_scheme = $app_config["android"]["URIScheme"] unless $app_config["android"].nil?
$uri_scheme = "http" if $uri_scheme.nil?
$uri_host = $app_config["android"]["URIHost"] unless $app_config["android"].nil?
# Here is switch between release/debug configuration used for
# building native libraries
if $app_config["debug"].nil?
$build_release = true
else
$build_release = !$app_config["debug"].to_i
end
$androidpath = Jake.get_absolute $config["build"]["androidpath"]
$bindir = File.join($app_path, "bin")
$rhobindir = File.join($androidpath, "bin")
$builddir = File.join($androidpath, "build")
$shareddir = File.join($androidpath, "..", "shared")
$srcdir = File.join($bindir, "RhoBundle")
$targetdir = File.join($bindir,'target','android')
$excludelib = ['**/builtinME.rb','**/ServeME.rb','**/dateME.rb','**/rationalME.rb']
$tmpdir = File.join($bindir, "tmp")
#$rhomanifest = File.join $androidpath, "Rhodes", "AndroidManifest.xml"
$rhomanifesterb = File.join $androidpath, "Rhodes", "AndroidManifest.xml.erb"
$appmanifest = File.join $tmpdir, "AndroidManifest.xml"
$rhores = File.join $androidpath, "Rhodes", "res"
$appres = File.join $tmpdir, "res"
$appincdir = File.join $tmpdir, "include"
$rho_android_r = File.join $androidpath, "Rhodes", "src", "com", "rhomobile", "rhodes", "AndroidR.java"
$app_android_r = File.join $tmpdir, "AndroidR.java"
$app_rjava_dir = File.join $tmpdir
$app_native_libs_java = File.join $tmpdir, "NativeLibraries.java"
$app_capabilities_java = File.join $tmpdir, "Capabilities.java"
$app_push_java = File.join $tmpdir, "Push.java"
$app_startup_listeners_java = File.join $tmpdir, "RhodesStartupListeners.java"
if RUBY_PLATFORM =~ /(win|w)32$/
$bat_ext = ".bat"
$exe_ext = ".exe"
$path_separator = ";"
# Add PATH to cygwin1.dll
ENV['CYGWIN'] = 'nodosfilewarning'
if $path_cygwin_modified.nil?
ENV['PATH'] = Jake.get_absolute("res/build-tools") + ";" + ENV['PATH']
path_cygwin_modified = true
end
else
#XXX make these absolute
$bat_ext = ""
$exe_ext = ""
$path_separator = ":"
# TODO: add ruby executable for Linux
end
puts "+++ Looking for platform..." if USE_TRACES
if $androidplatform.nil?
ajar = File.join($androidsdkpath, 'platforms', 'android-' + $min_sdk_level.to_s, 'android.jar')
if USE_TRACES
puts "Using target path: "+ ajar
end
$androidplatform = 'android-' + $min_sdk_level.to_s if File.file?(ajar)
end
if $androidplatform.nil?
puts "+++ No required platform (API level >= #{$min_sdk_level}) found, can't proceed"
puts "+++ Looks like you have no installed required Android platform package."
puts "+++ To solve that, please strictly follow instructions from http://wiki.rhomobile.com/index.php/BuildingRhodes#Prerequisites_5"
exit 1
else
puts "+++ Platform found: #{$androidplatform}" if USE_TRACES
end
$stdout.flush
$dx = File.join( $androidsdkpath, "platforms", $androidplatform, "tools", "dx" + $bat_ext )
$dx = File.join( $androidsdkpath, "platform-tools", "dx" + $bat_ext ) unless File.exists? $dx
$aapt = File.join( $androidsdkpath, "platforms", $androidplatform, "tools", "aapt" + $exe_ext )
$aapt = File.join( $androidsdkpath, "platform-tools", "aapt" + $exe_ext ) unless File.exists? $aapt
$apkbuilder = File.join( $androidsdkpath, "tools", "apkbuilder" + $bat_ext )
$androidbin = File.join( $androidsdkpath, "tools", "android" + $bat_ext )
$adb = File.join( $androidsdkpath, "tools", "adb" + $exe_ext )
$adb = File.join( $androidsdkpath, "platform-tools", "adb" + $exe_ext ) unless File.exists? $adb
$zipalign = File.join( $androidsdkpath, "tools", "zipalign" + $exe_ext )
$androidjar = File.join($androidsdkpath, "platforms", $androidplatform, "android.jar")
$dxjar = File.join( $androidsdkpath, "platforms", $androidplatform, "tools", "lib", "dx.jar")
$dxjar = File.join( $androidsdkpath, "platform-tools", "lib", "dx.jar") unless File.exists? $dxjar
$keytool = File.join( $java, "keytool" + $exe_ext )
$jarsigner = File.join( $java, "jarsigner" + $exe_ext )
$jarbin = File.join( $java, "jar" + $exe_ext )
$keystore = nil
$keystore = $app_config["android"]["production"]["certificate"] if !$app_config["android"].nil? and !$app_config["android"]["production"].nil?
$keystore = $config["android"]["production"]["certificate"] if $keystore.nil? and !$config["android"].nil? and !$config["android"]["production"].nil?
$keystore = File.expand_path($keystore, $app_path) unless $keystore.nil?
$keystore = File.expand_path(File.join(ENV['HOME'], ".rhomobile", "keystore")) if $keystore.nil?
$storepass = nil
$storepass = $app_config["android"]["production"]["password"] if !$app_config["android"].nil? and !$app_config["android"]["production"].nil?
$storepass = $config["android"]["production"]["password"] if $storepass.nil? and !$config["android"].nil? and !$config["android"]["production"].nil?
$storepass = "81719ef3a881469d96debda3112854eb" if $storepass.nil?
$keypass = $storepass
$storealias = nil
$storealias = $app_config["android"]["production"]["alias"] if !$app_config["android"].nil? and !$app_config["android"]["production"].nil?
$storealias = $config["android"]["production"]["alias"] if $storealias.nil? and !$config["android"].nil? and !$config["android"]["production"].nil?
$storealias = "rhomobile.keystore" if $storealias.nil?
$app_config["capabilities"] += ANDROID_CAPS_ALWAYS_ENABLED
$app_config["capabilities"].map! { |cap| cap.is_a?(String) ? cap : nil }.delete_if { |cap| cap.nil? }
$use_google_addon_api = true unless $app_config["capabilities"].index("push").nil?
$appname = $app_config["name"]
$appname = "Rhodes" if $appname.nil?
$vendor = $app_config["vendor"]
if $vendor.nil?
if $app_config['capabilities'].index('motorola').nil? and $app_config['capabilities'].index('motoroladev').nil?
$vendor = 'rhomobile'
else
$vendor = 'motorolasolutions'
end
end
$vendor = $vendor.gsub(/^[^A-Za-z]/, '_').gsub(/[^A-Za-z0-9]/, '_').gsub(/_+/, '_').downcase
$app_package_name = $app_config["android"] ? $app_config["android"]["package_name"] : nil
$app_package_name = "com.#{$vendor}." + $appname.downcase.gsub(/[^A-Za-z_0-9]/, '') unless $app_package_name
$app_package_name.gsub!(/\.[\d]/, "._")
puts "$vendor = #{$vendor}"
puts "$app_package_name = #{$app_package_name}"
if $uri_host.nil?
if $app_config['capabilities'].index('motorola').nil? and $app_config['capabilities'].index('motoroladev').nil?
$uri_host = 'rhomobile.com'
else
$uri_host = 'motorolasolutions.com'
end
$uri_path_prefix = "/#{$app_package_name}"
end
unless $app_config['capabilities'].index('motorola').nil? and $app_config['capabilities'].index('motoroladev').nil?
$use_motosol_api = true
$use_motosol_api_classpath = true unless $app_config['capabilities'].index('motoroladev').nil?
raise 'Cannot use Motorola SDK addon and Google SDK addon together!' if $use_google_addon_api
end
$applog_path = nil
$applog_file = $app_config["applog"]
if !$applog_file.nil?
$applog_path = File.join( $app_path, $applog_file )
end
if $min_sdk_level > $found_api_level
raise "Latest installed Android platform '#{$androidplatform}' does not meet minSdk '#{$min_sdk_level}' requirement"
end
# Look for Motorola SDK addon
if $use_motosol_api_classpath
puts "Looking for Motorola API SDK add-on..." if USE_TRACES
motosol_jars = ['com.motorolasolutions.scanner']
motosol_jars << 'com.motorolasolutions.emdk.msr'
$motosol_classpath = AndroidTools::get_addon_classpath(motosol_jars)
end
# Detect Google API add-on path
if $use_google_addon_api
puts "Looking for Google API SDK add-on..." if USE_TRACES
google_jars = ['com.google.android.maps']
$google_classpath = AndroidTools::get_addon_classpath(google_jars, $found_api_level)
end
setup_ndk($androidndkpath, $found_api_level)
$std_includes = File.join $androidndkpath, "sources", "cxx-stl", "stlport", "stlport"
unless File.directory? $std_includes
$stlport_includes = File.join $shareddir, "stlport", "stlport"
USE_OWN_STLPORT = true
end
$native_libs = ["sqlite", "curl", "stlport", "ruby", "json", "rhocommon", "rhodb", "rholog", "rhosync", "rhomain"]
if $build_release
$confdir = "release"
else
$confdir = "debug"
end
$app_builddir = File.join($bindir,'target','android',$confdir)
$objdir = {}
$libname = {}
$native_libs.each do |x|
$objdir[x] = File.join($tmpdir,x)
$libname[x] = File.join($app_builddir,x,"lib#{x}.a")
end
$push_sender = nil
$push_sender = $config["android"]["push"]["sender"] if !$config["android"].nil? and !$config["android"]["push"].nil?
$push_sender = $app_config["android"]["push"]["sender"] if !$app_config["android"].nil? and !$app_config["android"]["push"].nil?
$push_sender = "support@rhomobile.com" if $push_sender.nil?
$push_notifications = nil
$push_notifications = $app_config["android"]["push"]["notifications"] if !$app_config["android"].nil? and !$app_config["android"]["push"].nil?
$push_notifications = "none" if $push_notifications.nil?
$push_notifications = $push_notifications
mkdir_p $bindir if not File.exists? $bindir
mkdir_p $rhobindir if not File.exists? $rhobindir
mkdir_p $targetdir if not File.exists? $targetdir
mkdir_p $srcdir if not File.exists? $srcdir
end #task 'config:android'
namespace 'android' do
task :extensions => ['config:android', 'build:bundle:noxruby'] do
$ext_android_rhodes_activity_listener = []
$ext_android_additional_sources = {}
$ext_android_additional_lib = []
$ext_android_build_scripts = {}
$ext_android_manifest_changes = {}
$ext_android_adds = {}
$app_config["extensions"].each do |ext|
puts "#{ext} is processing..."
$app_config["extpaths"].each do |p|
extpath = File.join(p, ext, 'ext')
puts "Checking extpath: #{extpath}"
if File.exists? extpath and File.directory? extpath
puts "#{extpath} is configuring..."
extyml = File.join(p, ext,"ext.yml")
if File.file? extyml
puts "#{extyml} is processing..."
extconf = Jake.config(File.open(extyml))
extconf_android = extconf['android']
exttype = 'build'
exttype = extconf_android['exttype'] if extconf_android and extconf_android['exttype']
addspath = File.join($app_builddir,'extensions',ext,'adds')
prebuiltpath = nil
if exttype == 'prebuilt'
prebuiltpath = Dir.glob(File.join(extpath, '**', 'android'))
if prebuiltpath.count == 1
prebuiltpath = prebuiltpath.first
else
raise "android:exttype is 'prebuilt' but prebuilt path is not found #{prebuiltpath.inspect}"
end
end
android_listener = extconf["android_rhodes_activity_listener"]
android_listener = extconf_android['rhodes_listener'] if android_listener.nil? and extconf_android
$ext_android_rhodes_activity_listener << android_listener unless android_listener.nil?
manifest_changes = extconf["android_manifest_changes"]
manifest_changes = extconf_android['manifest_changes'] if manifest_changes.nil? and extconf_android
if manifest_changes
manifest_changes = [manifest_changes] unless manifest_changes.is_a? Array
manifest_changes.map! { |path| File.join(p,ext,path) }
else
if prebuiltpath
manifest_changes = []
path = File.join(prebuiltpath,'adds','AndroidManifest.rb')
manifest_changes << path if File.file? path
templates = Dir.glob File.join(prebuiltpath,'adds','*.erb')
manifest_changes += templates
if templates.empty?
path = File.join(prebuiltpath,'adds','AndroidManifest.xml')
manifest_changes << path if File.file? path
end
end
end
if manifest_changes
$ext_android_manifest_changes[ext] = manifest_changes
end
resource_addons = extconf["android_resources_addons"]
resource_addons = extconf_android['adds'] if resource_addons.nil? and extconf_android
if resource_addons
resource_addons = File.join(p, ext, resource_addons)
else
if prebuiltpath
resource_addons = File.join(prebuiltpath,'adds')
resource_addons = nil unless File.directory? resource_addons
end
end
if resource_addons
$ext_android_adds[ext] = resource_addons
end
additional_sources = extconf["android_additional_sources_list"]
additional_sources = extconf_android['source_list'] if additional_sources.nil? and extconf_android
unless additional_sources.nil?
ext_sources_list = File.join(p, ext, additional_sources)
if File.exists? ext_sources_list
$ext_android_additional_sources[File.join(p, ext)] = ext_sources_list
else
raise "Extension java source list is missed: #{ext_sources_list}"
end
else
puts "No additional java sources for '#{ext}'"
end
# there is no 'additional_libs' param in android section moreover
# place libraries into android adds folder
android_additional_lib = extconf["android_additional_lib"]
if android_additional_lib != nil
android_additional_lib.each do |lib|
$ext_android_additional_lib << File.join(p, ext, lib)
end
end
if prebuiltpath
targetpath = File.join $app_builddir,'extensions',ext
libaddspath = File.join addspath,'lib','armeabi'
mkdir_p targetpath
Dir.glob(File.join(prebuiltpath,'lib*.a')).each do |lib|
cp lib, targetpath
end
Dir.glob(File.join(prebuiltpath,'*.jar')).each do |lib|
cp lib, targetpath
end
Dir.glob(File.join(prebuiltpath,'**','lib*.so')).each do |lib|
next if lib =~ /adds/
if lib =~ /noautoload/
mkdir_p File.join(libaddspath,'noautoload')
cp lib, File.join(libaddspath,'noautoload')
else
mkdir_p libaddspath
cp lib, libaddspath
end
end
end
puts "#{extyml} is processed"
end
if exttype == 'rakefile'
rakedir = Dir.glob File.join(extpath,'**','android')
$ext_android_build_scripts[ext] = [rakedir.first, 'rake']
else
build_script = File.join(extpath, 'build' + $bat_ext)
if File.exists? build_script
if RUBY_PLATFORM =~ /(win|w)32$/
$ext_android_build_scripts[ext] = [extpath, 'build.bat']
else
$ext_android_build_scripts[ext] = [extpath, File.join('.', 'build' + $bat_ext)]
end
end
end
puts "#{extpath} is configured"
# to prevent to build 2 extensions with same name
break
end # exists?
end # $app_config["extpaths"].each
end # $app_config["extensions"].each
puts "Extensions' java source lists: #{$ext_android_additional_sources.inspect}"
end #task :extensions
task :emulator=>"config:android" do
$device_flag = "-e"
$emuversion = $app_config["android"]["version"] unless $app_config["android"].nil?
$emuversion = $config["android"]["version"] if $emuversion.nil? and !$config["android"].nil?
if RUBY_PLATFORM =~ /(win|w)32$/
$emulator = #"cmd /c " +
File.join( $androidsdkpath, "tools", "emulator.exe" )
else
$emulator = File.join( $androidsdkpath, "tools", "emulator" )
end
$emuversion = AndroidTools.get_market_version($min_sdk_level) if $emuversion.nil?
if $emuversion.nil?
raise "Wrong Android emulator version: #{$emuversion}. Android SDK target API is not installed"
end
# Detect android targets
$androidtargets = {}
id = nil
`"#{$androidbin}" list targets`.split(/\n/).each do |line|
line.chomp!
if line =~ /^id:\s+([0-9]+)/
id = $1
if $use_google_addon_api
if line =~ /Google Inc\.:Google APIs:([0-9]+)/
apilevel = $1
$androidtargets[apilevel.to_i] = id.to_i
end
else
if $use_motosol_api
if line =~ /MotorolaSolutions\s+Inc\.:MotorolaSolution\s+Value\s+Add\s+APIs.*:([0-9]+)/
apilevel = $1
$androidtargets[apilevel.to_i] = id.to_i
end
end
end
end
unless $use_google_addon_api or $use_motosol_api
if line =~ /^\s+API\s+level:\s+([0-9]+)$/
apilevel = $1
$androidtargets[apilevel.to_i] = id.to_i
end
end
end
if USE_TRACES
puts "Android emulator version: #{$emuversion}"
puts "Android targets:"
puts $androidtargets.inspect
end
$emuversion = $emuversion.to_s
$appavdname = $app_config["android"]["emulator"] if $app_config["android"] != nil && $app_config["android"].length > 0
$appavdname = $config["android"]["emulator"] if $appavdname.nil? and !$config["android"].nil? and $config["android"].length > 0
end # task 'config:android:emulator'
task :device=>"config:android" do
$device_flag = "-d"
end
end #namespace 'config:android'
end
namespace "build" do
namespace "android" do
desc "Build RhoBundle for android"
task :rhobundle => ["config:android", :extensions] do
Rake::Task["build:bundle:noxruby"].invoke
#assets = File.join(Jake.get_absolute($androidpath), "Rhodes", "assets")
assets = File.join $tmpdir, 'assets'
rm_rf assets
mkdir_p assets
hash = nil
["apps", "db", "lib"].each do |d|
cp_r File.join($srcdir, d), assets, :preserve => true
# Calculate hash of directories
hash = get_dir_hash(File.join($srcdir, d), hash)
end
File.open(File.join($srcdir, "hash"), "w") { |f| f.write(hash.hexdigest) }
File.open(File.join($srcdir, "name"), "w") { |f| f.write($appname) }
Jake.build_file_map($srcdir, "rho.dat")
["apps", "db", "lib", "hash", "name", "rho.dat"].each do |d|
cp_r File.join($srcdir, d), assets, :preserve => true
end
end
desc "Build RhoBundle for Eclipse project"
task :eclipsebundle => "build:android:rhobundle" do
assets = File.join $tmpdir, 'assets'
eclipse_assets = File.join(Jake.get_absolute($androidpath), "Rhodes", "assets")
rm_rf eclipse_assets
cp_r assets, eclipse_assets, :preserve => true
end
desc 'Building native extensioons'
task :extensions => ["config:android:extensions", :genconfig] do
Rake::Task["build:bundle:noxruby"].invoke
ENV['RHO_PLATFORM'] = 'android'
ENV["RHO_APP_DIR"] = $app_path
ENV["ANDROID_NDK"] = $androidndkpath
ENV["ANDROID_API_LEVEL"] = $found_api_level.to_s
ENV["RHO_ROOT"] = $startdir
ENV["BUILD_DIR"] ||= $startdir + "/platform/android/build"
ENV["RHO_INC"] = $appincdir
ENV["RHO_RES"] = $appres
ENV["RHO_ANDROID_TMP_DIR"] = $tmpdir
ENV["NEON_ROOT"] = $neon_root unless $neon_root.nil?
ENV["CONFIG_XML"] = $config_xml unless $config_xml.nil?
$ext_android_build_scripts.each do |ext, builddata|
#ext = File.basename(File.dirname(extpath))
ENV["TARGET_TEMP_DIR"] = File.join($app_builddir,'extensions',ext)
ENV['TEMP_FILES_DIR'] = File.join($tmpdir,ext)
mkdir_p ENV["TARGET_TEMP_DIR"] unless File.directory? ENV["TARGET_TEMP_DIR"]
mkdir_p ENV["TEMP_FILES_DIR"] unless File.directory? ENV["TEMP_FILES_DIR"]
puts "Executing extension build script: #{ext}"
if RUBY_PLATFORM =~ /(win|w)32$/ || (builddata[1] == 'rake')
Jake.run(builddata[1], [], builddata[0])
else
currentdir = Dir.pwd()
Dir.chdir builddata[0]
sh %{$SHELL #{builddata[1]}}
Dir.chdir currentdir
end
raise "Cannot build #{builddata[0]}" unless $?.success?
puts "Extension build script finished"
end
$ext_android_manifest_changes.each do |ext, manifest_changes|
addspath = File.join($app_builddir,'extensions',ext,'adds')
mkdir_p addspath
manifest_changes.each do |path|
if File.extname(path) == '.xml'
cp path, File.join(addspath,'AndroidManifest.xml')
else
if File.extname(path) == '.rb'
cp path, File.join(addspath,'AndroidManifest.rb')
else
if File.extname(path) == '.erb'
cp path, addspath
else
raise "Wrong AndroidManifest patch file: #{path}"
end
end
end
end
end
$ext_android_adds.each do |ext, path|
addspath = File.join($app_builddir,'extensions',ext,'adds')
mkdir_p addspath
Dir.glob(File.join(path,'*')).each do |add|
cp_r add, addspath if File.directory? add
end
end
end #task :extensions
task :libsqlite => "config:android" do
srcdir = File.join($shareddir, "sqlite")
objdir = $objdir["sqlite"]
libname = $libname["sqlite"]
mkdir_p objdir
mkdir_p File.dirname(libname)
cc_build 'libsqlite', objdir, ["-I\"#{srcdir}\"", "-I\"#{$shareddir}\""] or exit 1
cc_ar ('"'+(libname)+'"'), Dir.glob(objdir + "/**/*.o").collect{|x| '"'+x+'"'} or exit 1
end
task :libcurl => "config:android" do
# Steps to get curl_config.h from fresh libcurl sources:
#export PATH=<ndkroot>/build/prebuilt/linux-x86/arm-eabi-4.2.1/bin:$PATH
#export CC=arm-eabi-gcc
#export CPP=arm-eabi-cpp
#export CFLAGS="--sysroot <ndkroot>/build/platforms/android-3/arch-arm -fPIC -mandroid -DANDROID -DOS_ANDROID"
#export CPPFLAGS="--sysroot <ndkroot>/build/platforms/android-3/arch-arm -fPIC -mandroid -DANDROID -DOS_ANDROID"
#./configure --without-ssl --without-ca-bundle --without-ca-path --without-libssh2 --without-libidn --disable-ldap --disable-ldaps --host=arm-eabi
srcdir = File.join $shareddir, "curl", "lib"
objdir = $objdir["curl"]
libname = $libname["curl"]
mkdir_p objdir
mkdir_p File.dirname(libname)
args = []
args << "-DHAVE_CONFIG_H"
args << "-I\"#{srcdir}/../include\""
args << "-I\"#{srcdir}\""
args << "-I\"#{$shareddir}\""
cc_build 'libcurl', objdir, args or exit 1
cc_ar ('"'+(libname)+'"'), Dir.glob(objdir + "/**/*.o").collect{|x| '"'+x+'"'} or exit 1
end
task :libruby => "config:android" do
srcdir = File.join $shareddir, "ruby"
objdir = $objdir["ruby"]
libname = $libname["ruby"]
mkdir_p objdir
mkdir_p File.dirname(libname)
args = []
args << "-Wno-uninitialized"
args << "-Wno-missing-field-initializers"
args << "-I\"#{srcdir}/include\""
args << "-I\"#{srcdir}/android\""
args << "-I\"#{srcdir}/generated\""
args << "-I\"#{srcdir}\""
args << "-I\"#{srcdir}/..\""
args << "-I\"#{srcdir}/../sqlite\""
args << "-I\"#{$std_includes}\"" unless $std_includes.nil?
args << "-D__NEW__" if USE_OWN_STLPORT
args << "-I\"#{$stlport_includes}\"" if USE_OWN_STLPORT
cc_build 'libruby', objdir, args or exit 1
cc_ar ('"'+(libname)+'"'), Dir.glob(objdir + "/**/*.o").collect{|x| '"'+x+'"'} or exit 1
end
task :libjson => "config:android" do
srcdir = File.join $shareddir, "json"
objdir = $objdir["json"]
libname = $libname["json"]
mkdir_p objdir
mkdir_p File.dirname(libname)
args = []
args << "-I\"#{srcdir}\""
args << "-I\"#{srcdir}/..\""
args << "-I\"#{$std_includes}\"" unless $std_includes.nil?
args << "-D__NEW__" if USE_OWN_STLPORT
args << "-I\"#{$stlport_includes}\"" if USE_OWN_STLPORT
cc_build 'libjson', objdir, args or exit 1
cc_ar ('"'+(libname)+'"'), Dir.glob(objdir + "/**/*.o").collect{|x| '"'+x+'"'} or exit 1
end
task :libstlport => "config:android" do
if USE_OWN_STLPORT
objdir = $objdir["stlport"]
libname = $libname["stlport"]
mkdir_p objdir
mkdir_p File.dirname(libname)
args = []
args << "-I\"#{$stlport_includes}\""
args << "-DTARGET_OS=android"
args << "-DOSNAME=android"
args << "-DCOMPILER_NAME=gcc"
args << "-DBUILD_OSNAME=android"
args << "-D_REENTRANT"
args << "-D__NEW__"
args << "-ffunction-sections"
args << "-fdata-sections"
args << "-fno-rtti"
args << "-fno-exceptions"
cc_build 'libstlport', objdir, args or exit 1
cc_ar ('"'+(libname)+'"'), Dir.glob(objdir + "/**/*.o").collect{|x| '"'+x+'"'} or exit 1
end
end
task :librholog => "config:android" do
srcdir = File.join $shareddir, "logging"
objdir = $objdir["rholog"]
libname = $libname["rholog"]
mkdir_p objdir
mkdir_p File.dirname(libname)
args = []
args << "-I\"#{srcdir}/..\""
args << "-I\"#{$std_includes}\"" unless $std_includes.nil?
args << "-D__NEW__" if USE_OWN_STLPORT
args << "-I\"#{$stlport_includes}\"" if USE_OWN_STLPORT
cc_build 'librholog', objdir, args or exit 1
cc_ar ('"'+(libname)+'"'), Dir.glob(objdir + "/**/*.o").collect{|x| '"'+x+'"'} or exit 1
end
task :librhomain => "config:android" do
srcdir = $shareddir
objdir = $objdir["rhomain"]
libname = $libname["rhomain"]
mkdir_p objdir
mkdir_p File.dirname(libname)
args = []
args << "-I\"#{srcdir}\""
args << "-I\"#{$std_includes}\"" unless $std_includes.nil?
args << "-D__NEW__" if USE_OWN_STLPORT
args << "-I\"#{$stlport_includes}\"" if USE_OWN_STLPORT
cc_build 'librhomain', objdir, args or exit 1
cc_ar ('"'+(libname)+'"'), Dir.glob(objdir + "/**/*.o").collect{|x| '"'+x+'"'} or exit 1
end
task :librhocommon => "config:android" do
objdir = $objdir["rhocommon"]
libname = $libname["rhocommon"]
mkdir_p objdir
mkdir_p File.dirname(libname)
args = []
args << "-I\"#{$shareddir}\""
args << "-I\"#{$shareddir}/curl/include\""
args << "-I\"#{$shareddir}/ruby/include\""
args << "-I\"#{$shareddir}/ruby/android\""
args << "-I\"#{$std_includes}\"" unless $std_includes.nil?
args << "-D__NEW__" if USE_OWN_STLPORT
args << "-I\"#{$stlport_includes}\"" if USE_OWN_STLPORT
cc_build 'librhocommon', objdir, args or exit 1
cc_ar ('"'+(libname)+'"'), Dir.glob(objdir + "/**/*.o").collect{|x| '"'+x+'"'} or exit 1
end
task :librhodb => "config:android" do
srcdir = File.join $shareddir, "db"
objdir = $objdir["rhodb"]
libname = $libname["rhodb"]
mkdir_p objdir
mkdir_p File.dirname(libname)
args = []
args << "-I\"#{srcdir}\""
args << "-I\"#{srcdir}/..\""
args << "-I\"#{srcdir}/../sqlite\""
args << "-I\"#{$std_includes}\"" unless $std_includes.nil?
args << "-D__NEW__" if USE_OWN_STLPORT
args << "-I\"#{$stlport_includes}\"" if USE_OWN_STLPORT
cc_build 'librhodb', objdir, args or exit 1
cc_ar ('"'+(libname)+'"'), Dir.glob(objdir + "/**/*.o").collect{|x| '"'+x+'"'} or exit 1
end
task :librhosync => "config:android" do
srcdir = File.join $shareddir, "sync"
objdir = $objdir["rhosync"]
libname = $libname["rhosync"]
mkdir_p objdir
mkdir_p File.dirname(libname)
args = []
args << "-I\"#{srcdir}\""
args << "-I\"#{srcdir}/..\""
args << "-I\"#{srcdir}/../sqlite\""
args << "-I\"#{$std_includes}\"" unless $std_includes.nil?
args << "-D__NEW__" if USE_OWN_STLPORT
args << "-I\"#{$stlport_includes}\"" if USE_OWN_STLPORT
cc_build 'librhosync', objdir, args or exit 1
cc_ar ('"'+(libname)+'"'), Dir.glob(objdir + "/**/*.o").collect{|x| '"'+x+'"'} or exit 1
end
task :libs => [:libsqlite, :libcurl, :libruby, :libjson, :libstlport, :librhodb, :librhocommon, :librhomain, :librhosync, :librholog]
task :genconfig => "config:android" do
mkdir_p $appincdir unless File.directory? $appincdir
# Generate genconfig.h
genconfig_h = File.join($appincdir, 'genconfig.h')
gapi_already_enabled = false
caps_already_enabled = {}
#ANDROID_PERMISSIONS.keys.each do |k|
# caps_already_enabled[k] = false
#end
if File.file? genconfig_h
File.open(genconfig_h, 'r') do |f|
while line = f.gets
if line =~ /^\s*#\s*define\s+RHO_GOOGLE_API_KEY\s+"[^"]*"\s*$/
gapi_already_enabled = true
else
ANDROID_PERMISSIONS.keys.each do |k|
if line =~ /^\s*#\s*define\s+RHO_CAP_#{k.upcase}_ENABLED\s+(.*)\s*$/
value = $1.strip
if value == 'true'
caps_already_enabled[k] = true
elsif value == 'false'
caps_already_enabled[k] = false
else
raise "Unknown value for the RHO_CAP_#{k.upcase}_ENABLED: #{value}"
end
end
end
end
end
end
end
regenerate = false
regenerate = true unless File.file? genconfig_h
regenerate = $use_geomapping != gapi_already_enabled unless regenerate
caps_enabled = {}
ANDROID_PERMISSIONS.keys.each do |k|
caps_enabled[k] = $app_config["capabilities"].index(k) != nil
regenerate = true if caps_already_enabled[k].nil? or caps_enabled[k] != caps_already_enabled[k]
end
puts caps_enabled.inspect
if regenerate
puts "Need to regenerate genconfig.h"
$stdout.flush
File.open(genconfig_h, 'w') do |f|
f.puts "#ifndef RHO_GENCONFIG_H_411BFA4742CF4F2AAA3F6B411ED7514F"
f.puts "#define RHO_GENCONFIG_H_411BFA4742CF4F2AAA3F6B411ED7514F"
f.puts ""
f.puts "#define RHO_GOOGLE_API_KEY \"#{$gapikey}\"" if $use_geomapping and !$gapikey.nil?
caps_enabled.each do |k,v|
f.puts "#define RHO_CAP_#{k.upcase}_ENABLED #{v ? "true" : "false"}"
end
f.puts ""
f.puts "#endif /* RHO_GENCONFIG_H_411BFA4742CF4F2AAA3F6B411ED7514F */"
end
else
puts "No need to regenerate genconfig.h"
$stdout.flush
end
# Generate rhocaps.inc
rhocaps_inc = File.join($appincdir, 'rhocaps.inc')
caps_already_defined = []
if File.exists? rhocaps_inc
File.open(rhocaps_inc, 'r') do |f|
while line = f.gets
next unless line =~ /^\s*RHO_DEFINE_CAP\s*\(\s*([A-Z_]*)\s*\)\s*\s*$/
caps_already_defined << $1.downcase
end
end
end
if caps_already_defined.sort.uniq != ANDROID_PERMISSIONS.keys.sort.uniq
puts "Need to regenerate rhocaps.inc"
$stdout.flush
File.open(rhocaps_inc, 'w') do |f|
ANDROID_PERMISSIONS.keys.sort.each do |k|
f.puts "RHO_DEFINE_CAP(#{k.upcase})"
end
end
else
puts "No need to regenerate rhocaps.inc"
$stdout.flush
end
# Generate Capabilities.java
File.open($app_capabilities_java, "w") do |f|
f.puts "package #{JAVA_PACKAGE_NAME};"
f.puts "public class Capabilities {"
ANDROID_PERMISSIONS.keys.sort.each do |k|
val = 'false'
val = 'true' if caps_enabled[k]
f.puts " public static final boolean #{k.upcase}_ENABLED = #{val};"
end
f.puts "}"
end
# Generate Push.java
File.open($app_push_java, "w") do |f|
f.puts "package #{JAVA_PACKAGE_NAME};"
f.puts "public class Push {"
f.puts " public static final String SENDER = \"#{$push_sender}\";"
if $push_notifications.nil?
f.puts " public static final String PUSH_NOTIFICATIONS = \"none\";"
else
f.puts " public static final String PUSH_NOTIFICATIONS = \"#{$push_notifications}\";"
end
f.puts "};"
end
end
task :gen_java_ext => "config:android" do
File.open($app_native_libs_java, "w") do |f|
f.puts "package #{JAVA_PACKAGE_NAME};"
f.puts "public class NativeLibraries {"
f.puts " public static void load() {"
f.puts " // Load native .so libraries"
Dir.glob($app_builddir + "/**/lib*.so").reverse.each do |lib|
next if lib =~ /noautoload/
libname = File.basename(lib).gsub(/^lib/, '').gsub(/\.so$/, '')
f.puts " System.loadLibrary(\"#{libname}\");"
end
f.puts " // Load native implementation of rhodes"
f.puts " System.loadLibrary(\"rhodes\");"
f.puts " }"
f.puts "};"
end
end
task :gensources => [:genconfig, :gen_java_ext]
task :librhodes => [:libs, :gensources] do
srcdir = File.join $androidpath, "Rhodes", "jni", "src"
libdir = File.join $app_builddir,'librhodes','lib','armeabi'
objdir = File.join $tmpdir,'librhodes'
libname = File.join libdir,'librhodes.so'
mkdir_p libdir
mkdir_p objdir
# add licence lib to build
lic_dst = File.join $app_builddir,'librhodes','libMotorolaLicence.a'
lic_src = $startdir + "/res/libs/motorolalicence/android/libMotorolaLicence.a"
rm_f lic_dst
cp lic_src, lic_dst
args = []
args << "-I\"#{$appincdir}\""
args << "-I\"#{srcdir}/../include\""
args << "-I\"#{srcdir}/../include/rhodes/details\""
args << "-I\"#{$shareddir}\""
args << "-I\"#{$shareddir}/common\""
args << "-I\"#{$shareddir}/sqlite\""
args << "-I\"#{$shareddir}/curl/include\""
args << "-I\"#{$shareddir}/ruby/include\""
args << "-I\"#{$shareddir}/ruby/android\""
args << "-I\"#{$std_includes}\"" unless $std_includes.nil?
args << "-D__SGI_STL_INTERNAL_PAIR_H" if USE_OWN_STLPORT
args << "-D__NEW__" if USE_OWN_STLPORT
args << "-I\"#{$stlport_includes}\"" if USE_OWN_STLPORT
cc_build 'librhodes', objdir, args or exit 1
deps = []
$libname.each do |k,v|
deps << v
end
args = []
args << "-L\"#{$rhobindir}/#{$confdir}\""
args << "-L\"#{libdir}\""
rlibs = []
rlibs << "log"
rlibs << "dl"
rlibs << "z"
rlibs.map! { |x| "-l#{x}" }
elibs = []
extlibs = Dir.glob($app_builddir + "/**/lib*.a")# + Dir.glob($app_builddir + "/**/lib*.so")
extlibs.each do |lib|
args << "-L\"#{File.dirname(lib)}\""
end
stub = []
extlibs.reverse.each do |f|
lparam = "-l" + File.basename(f).gsub(/^lib/,"").gsub(/\.(a|so)$/,"")
elibs << lparam
# Workaround for GNU ld: this way we have specified one lib multiple times
# command line so ld's dependency mechanism will find required functions
# independently of its position in command line
stub.each do |s|
args << s
end
stub << lparam
end
args += elibs
args += elibs
args += rlibs
#mkdir_p File.dirname(libname) unless File.directory? File.dirname(libname)
cc_link libname, Dir.glob(objdir + "/**/*.o").collect{|x| '"'+x+'"'}, args, deps or exit 1
destdir = File.join($androidpath, "Rhodes", "libs", "armeabi")
mkdir_p destdir unless File.exists? destdir
cp_r libname, destdir
cc_run($stripbin, ['"'+File.join(destdir, File.basename(libname))+'"'])
end
task :manifest => ["config:android", :extensions] do
version = {'major' => 0, 'minor' => 0, 'patch' => 0}
if $app_config["version"]
if $app_config["version"] =~ /^(\d+)$/
version["major"] = $1.to_i
elsif $app_config["version"] =~ /^(\d+)\.(\d+)$/
version["major"] = $1.to_i
version["minor"] = $2.to_i
elsif $app_config["version"] =~ /^(\d+)\.(\d+)\.(\d+)$/
version["major"] = $1.to_i
version["minor"] = $2.to_i
version["patch"] = $3.to_i
end
end
version = version["major"]*10000 + version["minor"]*100 + version["patch"]
usesPermissions = ['android.permission.INTERNET', 'android.permission.PERSISTENT_ACTIVITY', 'android.permission.WAKE_LOCK']
$app_config["capabilities"].each do |cap|
cap = ANDROID_PERMISSIONS[cap]
next if cap.nil?
cap = [cap] unless cap.is_a? Array
cap.each do |cap_item|
if cap_item.is_a? Proc
#caps_proc << cap_item
next
end
if cap_item.is_a? String
usesPermissions << "android.permission.#{cap_item}"
next
end
end
end
usesPermissions.uniq!
hidden = get_boolean($app_config['hidden_app'])
generator = ManifestGenerator.new JAVA_PACKAGE_NAME, $app_package_name, hidden, usesPermissions
generator.versionName = $app_config["version"]
generator.versionCode = version
generator.installLocation = 'auto'
generator.minSdkVer = $min_sdk_level
generator.maxSdkVer = $max_sdk_level
generator.usesLibraries['com.google.android.maps'] = true if $use_google_addon_api
generator.addGooglePush(File.join($androidpath,'Rhodes','PushReceiver.erb')) if $app_config["capabilities"].index 'push'
generator.addUriParams $uri_scheme, $uri_host, $uri_path_prefix
Dir.glob(File.join($app_builddir,'extensions','*','adds','AndroidManifest.rb')).each do |extscript|
puts "Evaluating #{extscript}"
eval(File.new(extscript).read)
end
Dir.glob(File.join($app_builddir,'extensions','*','adds','Manifest*.erb')).each do |exttemplate|
puts "Adding template #{exttemplate}"
generator.manifestManifestAdds << exttemplate
end
Dir.glob(File.join($app_builddir,'extensions','*','adds','Application*.erb')).each do |exttemplate|
puts "Adding template #{exttemplate}"
generator.applicationManifestAdds << exttemplate
end
manifest = generator.render $rhomanifesterb
File.open($appmanifest, "w") { |f| f.write manifest }
#######################################################
# Deprecated staff below
app_f = File.new($appmanifest)
manifest_orig_doc = REXML::Document.new(app_f)
app_f.close
dst_manifest = manifest_orig_doc.elements["manifest"]
dst_application = manifest_orig_doc.elements["manifest/application"]
dst_main_activity = nil
puts '$$$ try to found MainActivity'
dst_application.elements.each("activity") do |a|
puts '$$$ activity with attr = '+a.attribute('name','android').to_s
if a.attribute('name','android').to_s == 'com.rhomobile.rhodes.RhodesActivity'
puts ' $$$ FOUND !'
dst_main_activity = a
end
end
Dir.glob(File.join($app_builddir,'extensions','*','adds','AndroidManifest.xml')).each do |ext_manifest|
if File.exists? ext_manifest
puts 'AndroidManifest.xml['+ext_manifest+'] from native extension found !'
manifest_ext_doc = REXML::Document.new(File.new(ext_manifest))
src_manifest = manifest_ext_doc.elements["manifest"]
src_application = manifest_ext_doc.elements["manifest/application"]
if src_application != nil
puts 'Extension Manifest process application item :'
src_application.elements.each do |e|
puts '$$$ process element with attr = '+e.attribute('name','android').to_s
if e.attribute('name','android').to_s == 'com.rhomobile.rhodes.RhodesActivity'
e.elements.each do |sube|
puts ' add item to MainActivity['+sube.xpath+']'
dst_main_activity.add sube
end
else
puts ' add item ['+e.xpath+']'
dst_application.add e
end
end
end
puts 'Extension Manifest process root <manifest> item :'
src_manifest.elements.each do |e|
p = e.xpath
if p != '/manifest/application'
dst_e = manifest_orig_doc.elements[p]
if dst_e != nil
if p == '/manifest/uses-sdk'
puts ' found and delete original item ['+p+']'
manifest_orig_doc.elements.delete p
end
end
puts ' and new item ['+p+']'
dst_manifest.add e
end
end
else
puts 'AndroidManifest change file ['+m+'] from native extension not found !'
end
end
puts 'delete original manifest'
File.delete($appmanifest)
updated_f = File.open($appmanifest, "w")
manifest_orig_doc.write updated_f, 2
updated_f.close
#rm tappmanifest
puts 'Manifest updated by extension is saved!'
end
task :resources => [:rhobundle, :extensions] do
set_app_name_android($appname)
end
#desc "Build Rhodes for android"
task :rhodes => [:rhobundle, :librhodes, :manifest, :resources] do
rm_rf $tmpdir + "/Rhodes"
mkdir_p $tmpdir + "/Rhodes"
# RhodesActivity Listeners
f = StringIO.new("", "w+")
f.puts '// WARNING! THIS FILE IS GENERATED AUTOMATICALLY! DO NOT EDIT IT MANUALLY!'
f.puts 'package com.rhomobile.rhodes.extmanager;'
f.puts ''
f.puts 'class RhodesStartupListeners {'
f.puts ''
f.puts ' public static final String[] ourRunnableList = { ""'
$ext_android_rhodes_activity_listener.each do |a|
f.puts ' ,"'+a+'"'
end
f.puts ' };'
f.puts '}'
Jake.modify_file_if_content_changed($app_startup_listeners_java, f)
puts 'EXT: add additional files to project before build'
Dir.glob(File.join($app_builddir,'extensions','*','adds','*')).each do |res|
if File.directory?(res) && (res != '.') && (res != '..')
puts "add resources from extension [#{res}] to [#{$tmpdir}]"
cp_r res, $tmpdir
end
end
unless $config_xml.nil?
rawres_path = File.join($tmpdir, 'res', 'raw')
mkdir_p rawres_path unless File.exist? rawres_path
cp $config_xml, File.join(rawres_path,'config.xml')
end
generate_rjava
mkdir_p File.join($app_rjava_dir, "R") if not File.exists? File.join($app_rjava_dir, "R")
buf = File.new(File.join($app_rjava_dir, "R.java"),"r").read.gsub(/^\s*package\s*#{$app_package_name};\s*$/,"\npackage com.rhomobile.rhodes;\n")
buf.gsub!(/public\s*static\s*final\s*int/, "public static int")
File.open(File.join($app_rjava_dir, "R", "R.java"),"w") { |f| f.write(buf) }
srclist = File.join($builddir, "RhodesSRC_build.files")
newsrclist = File.join($tmpdir, "RhodesSRC_build.files")
lines = []
File.open(srclist, "r") do |f|
while line = f.gets
line.chomp!
next if line =~ /\/AndroidR\.java\s*$/
if !$use_geomapping
next if line == "platform/android/Rhodes/src/com/rhomobile/rhodes/mapview/GoogleMapView.java"
next if line == "platform/android/Rhodes/src/com/rhomobile/rhodes/mapview/AnnotationsOverlay.java"
next if line == "platform/android/Rhodes/src/com/rhomobile/rhodes/mapview/CalloutOverlay.java"
end
lines << line
end
end
lines << "\"" +File.join($app_rjava_dir, "R.java")+"\""
lines << "\"" +File.join($app_rjava_dir, "R", "R.java")+"\""
lines << "\"" +$app_android_r+"\""
lines << "\"" +$app_native_libs_java+"\""
lines << "\"" +$app_capabilities_java+"\""
lines << "\"" +$app_push_java+"\""
lines << "\"" +$app_startup_listeners_java+"\""
File.open(newsrclist, "w") { |f| f.write lines.join("\n") }
srclist = newsrclist
classpath = $androidjar
classpath += $path_separator + $google_classpath if $google_classpath
classpath += $path_separator + $motosol_classpath if $motosol_classpath
classpath += $path_separator + File.join($tmpdir, 'Rhodes')
javafilelists = [srclist]
extlist = File.join $app_builddir, "ext_build.files"
if File.exists? extlist
puts "#{extlist} is found! THere are addditional java files"
javafilelists << extlist
end
java_compile(File.join($tmpdir, 'Rhodes'), classpath, javafilelists)
files = []
Dir.glob(File.join($tmpdir, "Rhodes", "*")).each do |f|
relpath = Pathname.new(f).relative_path_from(Pathname.new(File.join($tmpdir, "Rhodes"))).to_s
files << relpath
end
unless files.empty?
jar = File.join($app_builddir,'librhodes','Rhodes.jar')
args = ["cf", jar]
args += files
Jake.run($jarbin, args, File.join($tmpdir, "Rhodes"))
unless $?.success?
raise "Error creating #{jar}"
end
$android_jars = [jar]
end
end
task :extensions_java => [:rhodes, :extensions] do
puts 'Compile additional java files:'
classpath = $androidjar
classpath += $path_separator + $google_classpath if $google_classpath
classpath += $path_separator + $motosol_classpath if $motosol_classpath
classpath += $path_separator + File.join($tmpdir, 'Rhodes')
Dir.glob(File.join($app_builddir,'**','*.jar')).each do |jar|
classpath += $path_separator + jar
end
$ext_android_additional_sources.each do |extpath, list|
ext = File.basename(extpath)
puts "Compiling '#{ext}' extension java sources: #{list}"
srclist = Tempfile.new "#{ext}SRC_build"
lines = []
File.open(list, "r") do |f|
while line = f.gets
line.chomp!
srclist.write "\"#{File.join(extpath, line)}\"\n"
#srclist.write "#{line}\n"
end
end
srclist.close
mkdir_p File.join($tmpdir, ext)
#puts '$$$$$$$$$$$$$$$$$$ START'
#currentdir = Dir.pwd()
#Dir.chdir extpath
java_compile(File.join($tmpdir, ext), classpath, [srclist.path])
#Dir.chdir currentdir
#puts '$$$$$$$$$$$$$$$$$$ FINISH'
extjar = File.join $app_builddir,'extensions',ext,ext + '.jar'
args = ["cf", extjar, '.']
Jake.run($jarbin, args, File.join($tmpdir, ext))
unless $?.success?
raise "Error creating #{extjar}"
end
$android_jars << extjar
end
end
task :extensions_adds => "config:android:extensions" do
end
task :upgrade_package => :rhobundle do
#puts '$$$$$$$$$$$$$$$$$$'
#puts 'targetdir = '+$targetdir.to_s
#puts 'bindir = '+$bindir.to_s
android_targetdir = $targetdir #File.join($targetdir, 'android')
mkdir_p android_targetdir if not File.exists? android_targetdir
zip_file_path = File.join(android_targetdir, 'upgrade_bundle.zip')
Jake.build_file_map(File.join($srcdir, "apps"), "rhofilelist.txt")
Jake.zip_upgrade_bundle($bindir, zip_file_path)
end
task :upgrade_package_partial => ["build:android:rhobundle"] do
#puts '$$$$$$$$$$$$$$$$$$'
#puts 'targetdir = '+$targetdir.to_s
#puts 'bindir = '+$bindir.to_s
# process partial update
add_list_full_name = File.join($app_path, 'upgrade_package_add_files.txt')
remove_list_full_name = File.join($app_path, 'upgrade_package_remove_files.txt')
src_folder = File.join($bindir, 'RhoBundle')
src_folder = File.join(src_folder, 'apps')
tmp_folder = $bindir + '_tmp_partial'
rm_rf tmp_folder if File.exists? tmp_folder
mkdir_p tmp_folder
dst_tmp_folder = File.join(tmp_folder, 'RhoBundle')
mkdir_p dst_tmp_folder
# copy all
cp_r src_folder, dst_tmp_folder
dst_tmp_folder = File.join(dst_tmp_folder, 'apps')
mkdir_p dst_tmp_folder
add_files = []
if File.exists? add_list_full_name
File.open(add_list_full_name, "r") do |f|
while line = f.gets
fixed_path = line.gsub('.rb', '.iseq').gsub('.erb', '_erb.iseq').chop
add_files << fixed_path
puts '### ['+fixed_path+']'
end
end
end
remove_files = []
if File.exists? remove_list_full_name
File.open(remove_list_full_name, "r") do |f|
while line = f.gets
fixed_path = line.gsub('.rb', '.iseq').gsub('.erb', '_erb.iseq').chop
remove_files << fixed_path
#puts '### ['+fixed_path+']'
end
end
end
psize = dst_tmp_folder.size+1
Dir.glob(File.join(dst_tmp_folder, '**/*')).sort.each do |f|
relpath = f[psize..-1]
if File.file?(f)
#puts '$$$ ['+relpath+']'
if not add_files.include?(relpath)
rm_rf f
end
end
end
Jake.build_file_map( dst_tmp_folder, "upgrade_package_add_files.txt" )
#if File.exists? add_list_full_name
# File.open(File.join(dst_tmp_folder, 'upgrade_package_add_files.txt'), "w") do |f|
# add_files.each do |j|
# f.puts "#{j}\tfile\t0\t0"
# end
# end
#end
if File.exists? remove_list_full_name
File.open(File.join(dst_tmp_folder, 'upgrade_package_remove_files.txt'), "w") do |f|
remove_files.each do |j|
f.puts "#{j}"
#f.puts "#{j}\tfile\t0\t0"
end
end
end
mkdir_p $targetdir if not File.exists? $targetdir
zip_file_path = File.join($targetdir, "upgrade_bundle_partial.zip")
Jake.zip_upgrade_bundle( tmp_folder, zip_file_path)
rm_rf tmp_folder
end
#desc "build all"
task :all => [:rhobundle, :rhodes, :extensions_java]
end
end
namespace "package" do
task :android => "build:android:all" do
puts "Running dx utility"
args = []
args << "-Xmx1024m"
args << "-jar"
args << $dxjar
args << "--dex"
args << "--output=#{$bindir}/classes.dex"
Dir.glob(File.join($app_builddir,'**','*.jar')).each do |jar|
args << jar
end
Jake.run(File.join($java, 'java'+$exe_ext), args)
unless $?.success?
raise "Error running DX utility"
end
manifest = $appmanifest
resource = $appres
assets = File.join($tmpdir, 'assets')
resourcepkg = $bindir + "/rhodes.ap_"
puts "Packaging Assets and Jars"
# this task already caaled during build "build:android:all"
#set_app_name_android($appname)
args = ["package", "-f", "-M", manifest, "-S", resource, "-A", assets, "-I", $androidjar, "-F", resourcepkg]
Jake.run($aapt, args)
unless $?.success?
raise "Error running AAPT (1)"
end
# Workaround: manually add files starting with '_' because aapt silently ignore such files when creating package
Dir.glob(File.join($tmpdir, "assets/**/*")).each do |f|
next unless File.basename(f) =~ /^_/
relpath = Pathname.new(f).relative_path_from(Pathname.new($tmpdir)).to_s
puts "Add #{relpath} to #{resourcepkg}..."
args = ["uf", resourcepkg, relpath]
Jake.run($jarbin, args, $tmpdir)
unless $?.success?
raise "Error running AAPT (2)"
end
end
puts "Packaging Native Libs"
# Add native librhodes.so
#rm_rf File.join($tmpdir, "lib")
mkdir_p File.join($tmpdir, "lib/armeabi")
cp_r File.join($app_builddir,'librhodes','lib','armeabi','librhodes.so'), File.join($tmpdir, "lib/armeabi")
# Add extensions .so libraries
Dir.glob($app_builddir + "/**/lib*.so").each do |lib|
cp_r lib, File.join($tmpdir, "lib/armeabi")
end
$ext_android_additional_lib.each do |lib|
cp_r lib, File.join($tmpdir, "lib/armeabi")
end
args = ["uf", resourcepkg]
# Strip them all to decrease size
Dir.glob($tmpdir + "/lib/armeabi/lib*.so").each do |lib|
cc_run($stripbin, ['"'+lib+'"'])
args << "lib/armeabi/#{File.basename(lib)}"
end
Jake.run($jarbin, args, $tmpdir)
err = $?
#rm_rf $tmpdir + "/lib"
unless err.success?
raise "Error running AAPT (3)"
end
end
end
namespace "device" do
namespace "android" do
desc "Build debug self signed for device"
task :debug => "package:android" do
dexfile = $bindir + "/classes.dex"
simple_apkfile = $targetdir + "/" + $appname + "-tmp.apk"
final_apkfile = $targetdir + "/" + $appname + "-debug.apk"
resourcepkg = $bindir + "/rhodes.ap_"
apk_build $androidsdkpath, simple_apkfile, resourcepkg, dexfile, true
puts "Align Debug APK file"
args = []
args << "-f"
args << "-v"
args << "4"
args << simple_apkfile
args << final_apkfile
out = Jake.run2($zipalign, args, :hide_output => true)
puts out if USE_TRACES
unless $?.success?
puts "Error running zipalign"
exit 1
end
#remove temporary files
rm_rf simple_apkfile
File.open(File.join(File.dirname(final_apkfile), "app_info.txt"), "w") do |f|
f.puts $app_package_name
end
end
task :install => :debug do
apkfile = $targetdir + "/" + $appname + "-debug.apk"
Jake.run $adb, ['-d', 'wait-for-device']
puts "Install APK file"
Jake.run($adb, ["-d", "install", "-r", apkfile])
unless $?.success?
puts "Error installing APK file"
exit 1
end
puts "Install complete"
end
desc "Build production signed for device"
task :production => "package:android" do
dexfile = $bindir + "/classes.dex"
simple_apkfile = $targetdir + "/" + $appname + "_tmp.apk"
final_apkfile = $targetdir + "/" + $appname + "_signed.apk"
signed_apkfile = $targetdir + "/" + $appname + "_tmp_signed.apk"
resourcepkg = $bindir + "/rhodes.ap_"
apk_build $androidsdkpath, simple_apkfile, resourcepkg, dexfile, false
if not File.exists? $keystore
puts "Generating private keystore..."
mkdir_p File.dirname($keystore) unless File.directory? File.dirname($keystore)
args = []
args << "-genkey"
args << "-alias"
args << $storealias
args << "-keyalg"
args << "RSA"
args << "-validity"
args << "20000"
args << "-keystore"
args << $keystore
args << "-storepass"
args << $storepass
args << "-keypass"
args << $keypass
Jake.run($keytool, args)
unless $?.success?
puts "Error generating keystore file"
exit 1
end
end
puts "Signing APK file"
args = []
args << "-sigalg"
args << "MD5withRSA"
args << "-digestalg"
args << "SHA1"
args << "-verbose"
args << "-keystore"
args << $keystore
args << "-storepass"
args << $storepass
args << "-signedjar"
args << signed_apkfile
args << simple_apkfile
args << $storealias
Jake.run($jarsigner, args)
unless $?.success?
puts "Error running jarsigner"
exit 1
end
puts "Align APK file"
args = []
args << "-f"
args << "-v"
args << "4"
args << '"' + signed_apkfile + '"'
args << '"' + final_apkfile + '"'
Jake.run($zipalign, args)
unless $?.success?
puts "Error running zipalign"
exit 1
end
#remove temporary files
rm_rf simple_apkfile
rm_rf signed_apkfile
File.open(File.join(File.dirname(final_apkfile), "app_info.txt"), "w") do |f|
f.puts $app_package_name
end
end
#task :getlog => "config:android" do
# AndroidTools.get_app_log($appname, true) or exit 1
#end
end
end
#namespace "emulator" do
# namespace "android" do
# task :getlog => "config:android" do
# AndroidTools.get_app_log($appname, false) or exit 1
# end
# end
#end
namespace "run" do
namespace "android" do
task :get_log => "config:android" do
puts "log_file=" + $applog_path
end
task :spec => ["device:android:debug"] do
if $device_flag == '-e'
Rake::Task["config:android:emulator"].invoke
else
Rake::Task["config:android:device"].invoke
end
log_name = $app_path + '/RhoLogSpec.txt'
File.delete(log_name) if File.exist?(log_name)
AndroidTools.logclear($device_flag)
run_emulator( :hidden => true ) if $device_flag == '-e'
do_uninstall($device_flag)
# Failsafe to prevent eternal hangs
Thread.new {
sleep 2000
if $device_flag == '-e'
AndroidTools.kill_adb_and_emulator
else
AndroidTools.kill_adb
end
}
load_app_and_run($device_flag)
AndroidTools.logcat($device_flag, log_name)
Jake.before_run_spec
start = Time.now
puts "waiting for application"
for i in 0..60
if AndroidTools.application_running($device_flag, $app_package_name)
break
else
sleep(1)
end
end
puts "waiting for log: " + log_name
for i in 0..120
if !File.exist?(log_name)
sleep(1)
else
break
end
end
if !File.exist?(log_name)
puts "Can not read log file: " + log_name
exit(1)
end
puts "start read log"
io = File.new(log_name, 'r:UTF-8')
end_spec = false
while !end_spec do
io.each do |line|
#puts line
if line.class.method_defined? "valid_encoding?"
end_spec = !Jake.process_spec_output(line) if line.valid_encoding?
else
end_spec = !Jake.process_spec_output(line)
end
break if end_spec
end
break unless AndroidTools.application_running($device_flag, $app_package_name)
sleep(5) unless end_spec
end
io.close
Jake.process_spec_results(start)
# stop app
if $device_flag == '-e'
AndroidTools.kill_adb_and_emulator
else
do_uninstall($device_flag)
AndroidTools.kill_adb
end
$stdout.flush
end
task :phone_spec => "phone_spec:emulator"
task :framework_spec => "framework_spec:emulator"
namespace "phone_spec" do
task :device do
$device_flag = '-d'
Jake.run_spec_app('android','phone_spec')
unless $dont_exit_on_failure
exit 1 if $total.to_i==0
exit $failed.to_i
end
end
task :emulator do
$device_flag = '-e'
Jake.run_spec_app('android','phone_spec')
unless $dont_exit_on_failure
exit 1 if $total.to_i==0
exit $failed.to_i
end
end
end
namespace "framework_spec" do
task :device do
$device_flag = '-d'
Jake.run_spec_app('android','framework_spec')
unless $dont_exit_on_failure
exit 1 if $total.to_i==0
exit $failed.to_i
end
end
task :emulator do
$device_flag = '-e'
Jake.run_spec_app('android','framework_spec')
unless $dont_exit_on_failure
exit 1 if $total.to_i==0
exit $failed.to_i
end
end
end
task :allspecs do
$dont_exit_on_failure = true
Rake::Task['run:android:phone_spec'].invoke
Rake::Task['run:android:framework_spec'].invoke
failure_output = ""
if $failed.to_i > 0
failure_output = ""
failure_output += "phone_spec failures:\n\n" + File.open(app_expanded_path('phone_spec') + "/faillog.txt").read if
File.exist?(app_expanded_path('phone_spec') + "/faillog.txt")
failure_output += "framework_spec failures:\n\n" + File.open(app_expanded_path('framework_spec') + "/faillog.txt").read if
File.exist?(app_expanded_path('framework_spec') + "/faillog.txt")
chdir basedir
File.open("faillog.txt", "w") { |io| failure_output.each {|x| io << x } }
end
puts "Agg Total: #{$total}"
puts "Agg Passed: #{$passed}"
puts "Agg Failed: #{$failed}"
exit 1 if $total.to_i==0
exit $failed.to_i
end
task :emulator=>['config:android:emulator', 'device:android:debug'] do
run_emulator
load_app_and_run
end
desc "Run application on RhoSimulator"
task :rhosimulator => ["config:set_android_platform","config:common"] do
$emuversion = $app_config["android"]["version"] unless $app_config["android"].nil?
$emuversion = $config["android"]["version"] if $emuversion.nil? and !$config["android"].nil?
$rhosim_config = "platform='android'\r\n"
$rhosim_config += "os_version='#{$emuversion}'\r\n" if $emuversion
Rake::Task["run:rhosimulator"].invoke
end
task :rhosimulator_debug => ["config:set_android_platform","config:common"] do
$emuversion = $app_config["android"]["version"] unless $app_config["android"].nil?
$emuversion = $config["android"]["version"] if $emuversion.nil? and !$config["android"].nil?
$rhosim_config = "platform='android'\r\n"
$rhosim_config += "os_version='#{$emuversion}'\r\n" if $emuversion
Rake::Task["run:rhosimulator_debug"].invoke
end
# task :get_info => "config:android" do
# $androidtargets.each do |level|
# puts "#{get_market_version(level[0])}"
# end
#
# emu_version = $emuversion
#
# puts ""
# cur_name = ""
#
# `"#{$androidbin}" list avd`.split(/\n/).each do |line|
# line.each_line do |item|
# ar = item.split(':')
# ar[0].strip!
# if ar[0] == "Name"
# cur_name = ar[1].strip!
# puts "#{cur_name}"
# end
#
# if $appavdname && cur_name == $appavdname && (ar[0] == "Target" || ar.length == 1)
#
# text = ar[0] == "Target" ? ar[1] : ar[0]
#
# nAnd = text.index("Android")
# if nAnd
# nAnd = text.index(" ", nAnd)
# nAnd1 = text.index("-", nAnd+1)
# nAnd1 = text.index(" ", nAnd+1) unless nAnd1
# emu_version = text[nAnd+1, nAnd1-nAnd-1]
# end
# end
# end
# end
#
# puts ""
#
# puts "#{emu_version}"
# puts "#{$appavdname}"
#
# end
def run_emulator(options = {})
apkfile = Jake.get_absolute $targetdir + "/" + $appname + "-debug.apk"
#AndroidTools.kill_adb
Jake.run($adb, ['start-server'], nil, true)
rm_f $applog_path if !$applog_path.nil?
AndroidTools.logcat_process()
running = AndroidTools.is_emulator_running
if !running
$avdname = "rhoAndroid" + $emuversion.gsub(/[^0-9]/, "")
$avdname += "google" if $use_google_addon_api
$avdname += "motosol" if $use_motosol_api
$avdtarget = $androidtargets[get_api_level($emuversion)]
raise "Unable to run Android emulator. No appropriate target API for SDK version: #{$emuversion}" unless $avdtarget
if $appavdname != nil
$avdname = $appavdname
end
createavd = "\"#{$androidbin}\" create avd --name #{$avdname} --target #{$avdtarget} --sdcard 128M "
system("echo no | #{createavd}") unless File.directory?( File.join(ENV['HOME'], ".android", "avd", "#{$avdname}.avd" ) )
# Start the emulator, check on it every 5 seconds until it's running
cmd = "\"#{$emulator}\" -cpu-delay 0"
cmd << " -no-window" if options[:hidden]
cmd << " -avd #{$avdname}"
Thread.new { system(cmd) }
puts "Waiting for emulator..."
res = 'error'
while res =~ /error/ do
sleep 5
res = Jake.run $adb, ['-e', 'wait-for-device']
puts res
end
puts "Waiting up to 600 seconds for emulator..."
startedWaiting = Time.now
adbRestarts = 1
while (Time.now - startedWaiting < 600 )
sleep 5
now = Time.now
started = false
booted = true
Jake.run2 $adb, ["-e", "shell", "ps"], :system => false, :hideerrors => false do |line|
#puts line
booted = false if line =~ /bootanimation/
started = true if line =~ /android\.process\.acore/
true
end
#puts "started: #{started}, booted: #{booted}"
unless started and booted
printf("%.2fs: ",(now - startedWaiting))
if (now - startedWaiting) > (180 * adbRestarts)
# Restart the adb server every 60 seconds to prevent eternal waiting
puts "Appears hung, restarting adb server"
AndroidTools.kill_adb
Jake.run($adb, ['start-server'], nil, true)
adbRestarts += 1
rm_f $applog_path if !$applog_path.nil?
AndroidTools.logcat_process()
else
puts "Still waiting..."
end
else
puts "Success"
puts "Device is ready after " + (Time.now - startedWaiting).to_s + " seconds"
break
end
end
if !AndroidTools.is_emulator_running
puts "Emulator still isn't up and running, giving up"
exit 1
end
else
puts "Emulator is up and running"
end
$stdout.flush
end
def load_app_and_run(device_flag = '-e')
puts "Loading package"
apkfile = Jake.get_absolute $targetdir + "/" + $appname + "-debug.apk"
count = 0
done = false
while count < 20
f = Jake.run2($adb, [device_flag, "install", "-r", apkfile], {:nowait => true})
theoutput = ""
while c = f.getc
$stdout.putc c
$stdout.flush
theoutput << c
end
f.close
if theoutput.to_s.match(/Success/)
done = true
break
end
puts "Failed to load (possibly because emulator not done launching)- retrying"
$stdout.flush
sleep 1
count += 1
end
puts "Loading complete, starting application.." if done
AndroidTools.run_application(device_flag) if done
end
desc "build and install on device"
task :device => "device:android:install" do
puts "Starting application..."
AndroidTools.run_application("-d")
puts "Application was started"
AndroidTools.logcat_process("-d")
puts "Starting log process ..."
end
end
desc "build and launch emulator"
task :android => "run:android:emulator" do
end
end
namespace "uninstall" do
def do_uninstall(flag)
args = []
args << flag
args << "uninstall"
args << $app_package_name
for i in 0..20
result = Jake.run($adb, args)
unless $?.success?
puts "Error uninstalling application"
exit 1
end
if result.include?("Success")
puts "Application uninstalled successfully"
break
else
if result.include?("Failure")
puts "Application is not installed on the device"
break
else
puts "Error uninstalling application"
exit 1 if i == 20
end
end
sleep(5)
end
end
namespace "android" do
task :emulator => "config:android" do
unless AndroidTools.is_emulator_running
puts "WARNING!!! Emulator is not up and running"
exit 1
end
do_uninstall('-e')
end
desc "uninstall from device"
task :device => "config:android" do
unless AndroidTools.is_device_running
puts "WARNING!!! Device is not connected"
exit 1
end
do_uninstall('-d')
end
end
desc "uninstall from emulator"
task :android => "uninstall:android:emulator" do
end
end
namespace "clean" do
desc "Clean Android"
task :android => "clean:android:all"
namespace "android" do
task :files => "config:android" do
rm_rf $targetdir
rm_rf $app_builddir
Dir.glob( File.join( $bindir, "*.*" ) ) { |f| rm f, :force => true }
rm_rf $srcdir
rm_rf $tmpdir
end
task :all => :files
end
end
|
#------------------------------------------------------------------------
# (The MIT License)
#
# Copyright (c) 2008-2011 Rhomobile, Inc.
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
# THE SOFTWARE.
#
# http://rhomobile.com
#------------------------------------------------------------------------
require File.dirname(__FILE__) + '/androidcommon.rb'
require File.dirname(__FILE__) + '/android_tools.rb'
require File.dirname(__FILE__) + '/manifest_generator.rb'
require File.dirname(__FILE__) + '/eclipse_project_generator.rb'
require 'pathname'
require 'tempfile'
USE_OWN_STLPORT = false
#USE_TRACES = # see androidcommon.rb
def get_market_version(apilevel)
AndroidTools.get_market_version(apilevel)
end
def get_api_level(version)
AndroidTools.get_api_level(version)
end
JAVA_PACKAGE_NAME = 'com.rhomobile.rhodes'
# Here is place were android platform should be specified.
# For complete list of android API levels and its mapping to
# market names (such as "Android-1.5" etc) see output of
# command "android list targets"
ANDROID_SDK_LEVEL = 4
ANDROID_PERMISSIONS = {
'audio' => ['RECORD_AUDIO', 'MODIFY_AUDIO_SETTINGS'],
'camera' => 'CAMERA',
'gps' => ['ACCESS_FINE_LOCATION', 'ACCESS_COARSE_LOCATION'],
'network_state' => 'ACCESS_NETWORK_STATE',
'phone' => ['CALL_PHONE', 'READ_PHONE_STATE'],
'pim' => ['READ_CONTACTS', 'WRITE_CONTACTS', 'GET_ACCOUNTS'],
'record_audio' => 'RECORD_AUDIO',
'vibrate' => 'VIBRATE',
'bluetooth' => ['BLUETOOTH_ADMIN', 'BLUETOOTH'],
'calendar' => ['READ_CALENDAR', 'WRITE_CALENDAR'],
'sdcard' => 'WRITE_EXTERNAL_STORAGE',
'push' => nil,
'motorola' => ['SYSTEM_ALERT_WINDOW', 'BROADCAST_STICKY', proc do |manifest|
add_motosol_sdk(manifest)
end],
'motoroladev' => ['SYSTEM_ALERT_WINDOW', 'BROADCAST_STICKY', proc do |manifest|
add_motosol_sdk(manifest)
end],
'webkit_browser' => nil,
'shared_runtime' => nil,
'motorola_browser' => nil,
'hardware_acceleration' => nil
}
ANDROID_CAPS_ALWAYS_ENABLED = ['network_state']
def add_motosol_sdk(manifest)
uses_scanner = REXML::Element.new 'uses-library'
uses_scanner.add_attribute 'android:name', 'com.motorolasolutions.scanner'
uses_scanner.add_attribute 'android:required', 'false'
uses_msr = REXML::Element.new 'uses-library'
uses_msr.add_attribute 'android:name', 'com.motorolasolutions.emdk.msr'
uses_msr.add_attribute 'android:required', 'false'
manifest.elements.each('application') do |app|
app.add uses_scanner
app.add uses_msr
end
end
def set_app_icon_android
iconappname = File.join($app_path, "icon", "icon.png")
['drawable', 'drawable-hdpi', 'drawable-mdpi', 'drawable-ldpi'].each do |dpi|
drawable = File.join($appres, dpi)
iconresname = File.join(drawable, "icon.png")
rm_f iconresname
cp iconappname, iconresname if File.exist? drawable
end
end
def set_app_name_android(newname)
puts "set_app_name"
$stdout.flush
rm_rf $appres
cp_r $rhores, $appres
rhostrings = File.join($rhores, "values", "strings.xml")
appstrings = File.join($appres, "values", "strings.xml")
doc = REXML::Document.new(File.new(rhostrings))
doc.elements["resources/string[@name='app_name']"].text = newname
File.open(appstrings, "w") { |f| doc.write f }
end
def get_boolean(arg)
arg == 'true' or arg == 'yes' or arg == 'enabled' or arg == 'enable' or arg == '1'
end
namespace 'project' do
namespace 'android' do
task :eclipse => ['config:android', 'config:android:extensions','build:android:manifest'] do
#options = [ 'create', 'project',
# '--path', $projectpath,
# '--target', $androidtargets[$found_api_level][:id],
# '--package', $app_package_name,
# '--activity', 'RhodesActivity'
#]
#Jake.run($androidbin, options)
project_template_path = File.join 'res','generators','templates','project','android'
project_erb_path = File.join project_template_path,'project.erb'
classpath_erb_path = File.join project_template_path,'classpath.erb'
project_prop_erb_path = File.join project_template_path,'project.properties.erb'
manifest_path = File.join $tmpdir,'AndroidManifest.xml'
project_path = File.join $app_path,'project','android'
project_file_path = File.join project_path,'.project'
classpath_file_path = File.join project_path,'.classpath'
project_prop_file_path = File.join project_path,'project.properties'
manifest_file_path = File.join project_path,'AndroidManifest.xml'
rhodes_path = File.absolute_path '.'
generator = EclipseProjectGenerator.new $appname, $app_path, rhodes_path, $androidtargets[$found_api_level][:name]
$app_config["extpaths"].each do |extpath|
next if extpath.start_with? rhodes_path
generator.addVirtualFolder extpath
end
$ext_android_additional_sources.each do |extpath, list|
classpaths = []
ext = File.basename(extpath)
puts "Adding '#{ext}' extension java sources: #{list}"
File.open(list, "r") do |f|
while line = f.gets
line.chomp!
src = File.join(extpath, line)
if src =~ /(.*\/src\/).*/
src = $1
unless classpaths.index(src)
puts "Add classpath: #{src}"
classpaths << src
end
end
end
end
generator.addExtension(ext, classpaths) unless classpaths.empty?
end
mkdir_p project_path
project_buf = generator.render project_erb_path
File.open(project_file_path, "w") { |f| f.write project_buf }
classpath_buf = generator.render classpath_erb_path
File.open(classpath_file_path, "w") { |f| f.write classpath_buf }
project_prop_buf = generator.render project_prop_erb_path
File.open(project_prop_file_path, "w") { |f| f.write project_prop_buf }
cp_r File.join(project_template_path,'externalToolBuilders'), File.join(project_path,'.externalToolBuilders') unless File.exists? File.join(project_path,'.externalToolBuilders')
cp File.join(project_template_path,'gensources.xml'), project_path unless File.exists? File.join(project_path,'gensources.xml')
cp File.join(project_template_path,'eclipsebundle.xml'), project_path unless File.exists? File.join(project_path,'eclipsebundle.xml')
cp manifest_path, project_path
end
end
end
namespace "config" do
task :set_android_platform do
$current_platform = "android"
end
task :android => :set_android_platform do
Rake::Task["config:common"].invoke
$java = $config["env"]["paths"]["java"]
$neon_root = nil
$neon_root = $config["env"]["paths"]["neon"] unless $config["env"]["paths"].nil?
if !($app_config["paths"].nil? or $app_config["paths"]["neon"].nil?)
$neon_root = $app_config["paths"]["neon"]
end
$androidsdkpath = $config["env"]["paths"]["android"]
unless File.exists? $androidsdkpath
puts "Missing or invalid 'android' section in rhobuild.yml: '#{$androidsdkpath}'"
exit 1
end
$androidndkpath = $config["env"]["paths"]["android-ndk"]
unless File.exists? $androidndkpath
puts "Missing or invalid 'android-ndk' section in rhobuild.yml: '#{$androidndkpath}'"
exit 1
end
errfmt = "WARNING!!! Path to Android %s contain spaces! It will not work because of the Google toolchain restrictions. Move it to another location and reconfigure rhodes."
if $androidndkpath =~ /\s/
puts(errfmt % "NDK")
exit 1
end
$min_sdk_level = $app_config["android"]["minSDK"] unless $app_config["android"].nil?
$min_sdk_level = $config["android"]["minSDK"] if $min_sdk_level.nil? and not $config["android"].nil?
$min_sdk_level = $min_sdk_level.to_i unless $min_sdk_level.nil?
$min_sdk_level = ANDROID_SDK_LEVEL if $min_sdk_level.nil?
$max_sdk_level = $app_config["android"]["maxSDK"] unless $app_config["android"].nil?
$androidplatform = AndroidTools.fill_api_levels $androidsdkpath
if $androidplatform == nil
puts "No Android platform found at SDK path: '#{$androidsdkpath}'"
exit 1
end
android_api_levels = AndroidTools.get_installed_api_levels
android_api_levels.sort!
$found_api_level = android_api_levels.last
$gapikey = $app_config["android"]["apikey"] unless $app_config["android"].nil?
$gapikey = $config["android"]["apikey"] if $gapikey.nil? and not $config["android"].nil?
$gapikey = '' unless $gapikey.is_a? String
$gapikey = nil if $gapikey.empty?
$android_orientation = $app_config["android"]["orientation"] unless $app_config["android"].nil?
$use_geomapping = $app_config["android"]["mapping"] unless $app_config["android"].nil?
$use_geomapping = $config["android"]["mapping"] if $use_geomapping.nil? and not $config["android"].nil?
$use_geomapping = 'false' if $use_geomapping.nil?
$use_geomapping = get_boolean($use_geomapping.to_s)
$use_google_addon_api = false
$use_google_addon_api = true if $use_geomapping
#Additionally $use_google_addon_api set to true if PUSH capability is enabled
$config_xml = $app_config["android"]["rhoelements"]["config"] if $app_config["android"]["rhoelements"] if $app_config["android"]
if $config_xml
$config_xml = File.expand_path $config_xml, $app_path
puts "Custom config.xml path: #{$config_xml}"
end
puts "Use Google addon API: #{$use_google_addon_api}" if USE_TRACES
$uri_scheme = $app_config["android"]["URIScheme"] unless $app_config["android"].nil?
$uri_scheme = "http" if $uri_scheme.nil?
$uri_host = $app_config["android"]["URIHost"] unless $app_config["android"].nil?
# Here is switch between release/debug configuration used for
# building native libraries
if $app_config["debug"].nil?
$build_release = true
else
$build_release = !$app_config["debug"].to_i
end
$androidpath = Jake.get_absolute $config["build"]["androidpath"]
$bindir = File.join($app_path, "bin")
$rhobindir = File.join($androidpath, "bin")
$builddir = File.join($androidpath, "build")
$shareddir = File.join($androidpath, "..", "shared")
$coreapidir = File.join($androidpath, "..", "..", "lib", "commonAPI", "coreapi", "ext", "shared")
$commonapidir = File.join($androidpath, "..", "..", "lib", "commonAPI")
$targetdir = File.join($bindir, 'target', 'android')
$projectpath = File.join($app_path, 'project', 'android')
$excludelib = ['**/builtinME.rb', '**/ServeME.rb', '**/dateME.rb', '**/rationalME.rb']
$tmpdir = File.join($bindir, "tmp")
$srcdir = File.join $tmpdir,'assets' #File.join($bindir, "RhoBundle")
#$rhomanifest = File.join $androidpath, "Rhodes", "AndroidManifest.xml"
$rhomanifesterb = File.join $androidpath, "Rhodes", "AndroidManifest.xml.erb"
$appmanifest = File.join $tmpdir, "AndroidManifest.xml"
$rhores = File.join $androidpath, 'Rhodes','res'
$appres = File.join $tmpdir,'res'
$appassets = $srcdir
$applibs = File.join $tmpdir,'lib','armeabi'
$appincdir = File.join $tmpdir, "include"
$rho_java_gen_dir = File.join $tmpdir,'gen','com','rhomobile','rhodes'
#$rho_android_r = File.join $androidpath, 'Rhodes','src','com','rhomobile','rhodes','AndroidR.java'
#$app_android_r = File.join $rho_java_gen_dir,'AndroidR.java'
$app_rjava_dir = $rho_java_gen_dir
$app_native_libs_java = File.join $rho_java_gen_dir,'NativeLibraries.java'
$app_capabilities_java = File.join $rho_java_gen_dir,'Capabilities.java'
$app_push_java = File.join $rho_java_gen_dir,'Push.java'
$app_startup_listeners_java = File.join $rho_java_gen_dir,'extmanager','RhodesStartupListeners.java'
if RUBY_PLATFORM =~ /(win|w)32$/
$bat_ext = ".bat"
$exe_ext = ".exe"
$path_separator = ";"
# Add PATH to cygwin1.dll
ENV['CYGWIN'] = 'nodosfilewarning'
if $path_cygwin_modified.nil?
ENV['PATH'] = Jake.get_absolute("res/build-tools") + ";" + ENV['PATH']
path_cygwin_modified = true
end
else
#XXX make these absolute
$bat_ext = ""
$exe_ext = ""
$path_separator = ":"
# TODO: add ruby executable for Linux
end
build_tools_path = nil
if File.exist?(File.join($androidsdkpath, "build-tools"))
build_tools_path = []
Dir.foreach(File.join($androidsdkpath, "build-tools")) do |entry|
next if entry == '.' or entry == '..'
build_tools_path << entry
end
build_tools_path.sort!
build_tools_path = build_tools_path.last
end
if build_tools_path
puts "Using Android SDK build-tools: #{build_tools_path}"
build_tools_path = File.join $androidsdkpath,'build-tools',build_tools_path
#puts "build-tools path: #{build_tools_path}"
#$dx = File.join(build_tools_path,"dx" + $bat_ext)
$dxjar = File.join(build_tools_path,'lib','dx.jar')
$aapt = File.join(build_tools_path, "aapt#{$exe_ext}")
else
#$dx = File.join($androidsdkpath, "platforms", $androidplatform, "tools", "dx" + $bat_ext)
#$dx = File.join($androidsdkpath, "platform-tools", "dx" + $bat_ext) unless File.exists? $dx
$dxjar = File.join($androidsdkpath, "platforms", $androidplatform, "tools", "lib", "dx.jar")
$dxjar = File.join($androidsdkpath, "platform-tools", "lib", "dx.jar") unless File.exists? $dxjar
$aapt = File.join($androidsdkpath, "platforms", $androidplatform, "tools", "aapt" + $exe_ext)
$aapt = File.join($androidsdkpath, "platform-tools", "aapt" + $exe_ext) unless File.exists? $aapt
end
$androidbin = File.join($androidsdkpath, "tools", "android" + $bat_ext)
$adb = File.join($androidsdkpath, "tools", "adb" + $exe_ext)
$adb = File.join($androidsdkpath, "platform-tools", "adb" + $exe_ext) unless File.exists? $adb
$zipalign = File.join($androidsdkpath, "tools", "zipalign" + $exe_ext)
$androidjar = File.join($androidsdkpath, "platforms", $androidplatform, "android.jar")
$sdklibjar = File.join($androidsdkpath, 'tools', 'lib', 'sdklib.jar')
$keytool = File.join($java, "keytool" + $exe_ext)
$jarsigner = File.join($java, "jarsigner" + $exe_ext)
$jarbin = File.join($java, "jar" + $exe_ext)
$keystore = nil
$keystore = $app_config["android"]["production"]["certificate"] if !$app_config["android"].nil? and !$app_config["android"]["production"].nil?
$keystore = $config["android"]["production"]["certificate"] if $keystore.nil? and !$config["android"].nil? and !$config["android"]["production"].nil?
$keystore = File.expand_path($keystore, $app_path) unless $keystore.nil?
$keystore = File.expand_path(File.join(ENV['HOME'], ".rhomobile", "keystore")) if $keystore.nil?
$storepass = nil
$storepass = $app_config["android"]["production"]["password"] if !$app_config["android"].nil? and !$app_config["android"]["production"].nil?
$storepass = $config["android"]["production"]["password"] if $storepass.nil? and !$config["android"].nil? and !$config["android"]["production"].nil?
$storepass = "81719ef3a881469d96debda3112854eb" if $storepass.nil?
$keypass = $storepass
$storealias = nil
$storealias = $app_config["android"]["production"]["alias"] if !$app_config["android"].nil? and !$app_config["android"]["production"].nil?
$storealias = $config["android"]["production"]["alias"] if $storealias.nil? and !$config["android"].nil? and !$config["android"]["production"].nil?
$storealias = "rhomobile.keystore" if $storealias.nil?
$app_config["capabilities"] += ANDROID_CAPS_ALWAYS_ENABLED
$app_config["capabilities"].map! { |cap| cap.is_a?(String) ? cap : nil }.delete_if { |cap| cap.nil? }
$use_google_addon_api = true unless $app_config["capabilities"].index("push").nil?
$appname = $app_config["name"]
$appname = "Rhodes" if $appname.nil?
$vendor = $app_config["vendor"]
if $vendor.nil?
if $app_config['capabilities'].index('motorola').nil? and $app_config['capabilities'].index('motoroladev').nil?
$vendor = 'rhomobile'
else
$vendor = 'motorolasolutions'
end
end
$vendor = $vendor.gsub(/^[^A-Za-z]/, '_').gsub(/[^A-Za-z0-9]/, '_').gsub(/_+/, '_').downcase
$app_package_name = $app_config["android"] ? $app_config["android"]["package_name"] : nil
$app_package_name = "com.#{$vendor}." + $appname.downcase.gsub(/[^A-Za-z_0-9]/, '') unless $app_package_name
$app_package_name.gsub!(/\.[\d]/, "._")
puts "$vendor = #{$vendor}"
puts "$app_package_name = #{$app_package_name}"
if $uri_host.nil?
if $app_config['capabilities'].index('motorola').nil? and $app_config['capabilities'].index('motoroladev').nil?
$uri_host = 'rhomobile.com'
else
$uri_host = 'motorolasolutions.com'
end
$uri_path_prefix = "/#{$app_package_name}"
end
unless $app_config['capabilities'].index('motorola').nil? and $app_config['capabilities'].index('motoroladev').nil?
$use_motosol_api = true
$use_motosol_api_classpath = true unless $app_config['capabilities'].index('motoroladev').nil?
raise 'Cannot use Motorola SDK addon and Google SDK addon together!' if $use_google_addon_api
end
$no_compression = $app_config['android']['no_compression'] if $app_config['android']
$applog_path = nil
$applog_file = $app_config["applog"]
if !$applog_file.nil?
$applog_path = File.join($app_path, $applog_file)
end
if $min_sdk_level > $found_api_level
raise "Latest installed Android platform '#{$androidplatform}' does not meet minSdk '#{$min_sdk_level}' requirement"
end
# Look for Motorola SDK addon
if $use_motosol_api_classpath
puts "Looking for Motorola API SDK add-on..." if USE_TRACES
motosol_jars = ['com.motorolasolutions.scanner', 'com.motorolasolutions.msr']
$motosol_classpath = AndroidTools::get_addon_classpath(motosol_jars)
end
# Detect Google API add-on path
if $use_google_addon_api
puts "Looking for Google API SDK add-on..." if USE_TRACES
google_jars = ['com.google.android.maps']
$google_classpath = AndroidTools::get_addon_classpath(google_jars, $found_api_level)
end
setup_ndk($androidndkpath, $found_api_level)
$std_includes = File.join $androidndkpath, "sources", "cxx-stl", "stlport", "stlport"
unless File.directory? $std_includes
$stlport_includes = File.join $shareddir, "stlport", "stlport"
USE_OWN_STLPORT = true
end
$native_libs = ["sqlite", "curl", "stlport", "ruby", "json", "rhocommon", "rhodb", "rholog", "rhosync", "rhomain"]
if $build_release
$confdir = "release"
else
$confdir = "debug"
end
$app_builddir = File.join($bindir, 'target', 'android', $confdir)
$objdir = {}
$libname = {}
$native_libs.each do |x|
$objdir[x] = File.join($tmpdir, x)
$libname[x] = File.join($app_builddir, x, "lib#{x}.a")
end
$push_sender = nil
$push_sender = $config["android"]["push"]["sender"] if !$config["android"].nil? and !$config["android"]["push"].nil?
$push_sender = $app_config["android"]["push"]["sender"] if !$app_config["android"].nil? and !$app_config["android"]["push"].nil?
$push_sender = "support@rhomobile.com" if $push_sender.nil?
$push_notifications = nil
$push_notifications = $app_config["android"]["push"]["notifications"] if !$app_config["android"].nil? and !$app_config["android"]["push"].nil?
$push_notifications = "none" if $push_notifications.nil?
$push_notifications = $push_notifications
# Detect android targets
$androidtargets = {}
id = nil
apilevel = nil
target_name = nil
`"#{$androidbin}" list targets`.split(/\n/).each do |line|
line.chomp!
if line =~ /^id:\s+([0-9]+)\s+or\s+\"(.*)\"/
id = $1
target_name = $2
if $use_google_addon_api
if line =~ /Google Inc\.:Google APIs:([0-9]+)/
apilevel = $1.to_i
$androidtargets[apilevel] = {:id => id.to_i, :name => target_name}
end
else
if $use_motosol_api
if line =~ /MotorolaSolutions\s+Inc\.:MotorolaSolution\s+Value\s+Add\s+APIs.*:([0-9]+)/
apilevel = $1.to_i
$androidtargets[apilevel] = {:id => id.to_i, :name => target_name}
end
end
end
end
unless $use_google_addon_api and $use_motosol_api
if line =~ /^\s+API\s+level:\s+([0-9]+)$/
apilevel = $1.to_i
$androidtargets[apilevel] = {:id => id.to_i, :name => target_name}
end
end
if apilevel && $androidtargets[apilevel][:id] == id.to_i
if line =~ /^\s+ABIs\s*:\s+(.*)/
$androidtargets[apilevel][:abis] = []
$1.split(/,\s*/).each do |abi|
$androidtargets[apilevel][:abis] << abi
end
puts $androidtargets[apilevel][:abis].inspect if USE_TRACES
end
end
end
if USE_TRACES
puts "Android targets:"
puts $androidtargets.inspect
end
mkdir_p $bindir if not File.exists? $bindir
mkdir_p $rhobindir if not File.exists? $rhobindir
mkdir_p $targetdir if not File.exists? $targetdir
mkdir_p $srcdir if not File.exists? $srcdir
end #task 'config:android'
namespace 'android' do
# 'config:android:app_config' task is invoked directly by common Rakefile
# just after build config has been read and before processing extensions
task :app_config do
if $app_config['capabilities'].index('push')
$app_config['extensions'] << 'gcm-push' unless $app_config['extensions'].index('gcm-push')
end
if $app_config['capabilities'].index('native_browser')
$app_config['extensions'].delete('rhoelements')
end
$file_map_name = "rho.dat"
end
task :extensions => ['config:android', 'build:bundle:noxruby'] do
$ext_android_rhodes_activity_listener = []
$ext_android_additional_sources = {}
$ext_android_additional_lib = []
$ext_android_build_scripts = {}
$ext_android_manifest_changes = {}
$ext_android_adds = {}
$ext_android_library_deps = {}
$app_config["extensions"].each do |ext|
puts "#{ext} is processing..."
$app_config["extpaths"].each do |p|
extpath = File.join(p, ext, 'ext')
puts "Checking extpath: #{extpath}"
if File.exists? extpath and File.directory? extpath
puts "#{extpath} is configuring..."
extyml = File.join(p, ext, "ext.yml")
if File.file? extyml
puts "#{extyml} is processing..."
extconf = Jake.config(File.open(extyml))
extconf_android = extconf['android']
exttype = 'build'
exttype = extconf_android['exttype'] if extconf_android and extconf_android['exttype']
addspath = File.join($app_builddir, 'extensions', ext, 'adds')
prebuiltpath = nil
if exttype == 'prebuilt'
prebuiltpath = Dir.glob(File.join(extpath, '**', 'android'))
if prebuiltpath.count == 1
prebuiltpath = prebuiltpath.first
else
raise "android:exttype is 'prebuilt' but prebuilt path is not found #{prebuiltpath.inspect}"
end
end
android_listener = extconf["android_rhodes_activity_listener"]
android_listener = extconf_android['rhodes_listener'] if android_listener.nil? and extconf_android
$ext_android_rhodes_activity_listener << android_listener unless android_listener.nil?
manifest_changes = extconf["android_manifest_changes"]
manifest_changes = extconf_android['manifest_changes'] if manifest_changes.nil? and extconf_android
if manifest_changes
manifest_changes = [manifest_changes] unless manifest_changes.is_a? Array
manifest_changes.map! { |path| File.join(p, ext, path) }
else
if prebuiltpath
manifest_changes = []
path = File.join(prebuiltpath, 'adds', 'AndroidManifest.rb')
manifest_changes << path if File.file? path
templates = Dir.glob File.join(prebuiltpath, 'adds', '*.erb')
manifest_changes += templates
if templates.empty?
path = File.join(prebuiltpath, 'adds', 'AndroidManifest.xml')
manifest_changes << path if File.file? path
end
end
end
if manifest_changes
$ext_android_manifest_changes[ext] = manifest_changes
end
resource_addons = extconf["android_resources_addons"]
resource_addons = extconf_android['adds'] if resource_addons.nil? and extconf_android
if resource_addons
resource_addons = File.join(p, ext, resource_addons)
else
if prebuiltpath
resource_addons = File.join(prebuiltpath, 'adds')
resource_addons = nil unless File.directory? resource_addons
end
end
if resource_addons
$ext_android_adds[ext] = resource_addons
end
library_deps = extconf_android['library_deps'] if extconf_android
if library_deps
if library_deps.is_a? Array
library_deps.each do |dep|
deppath = File.join($androidsdkpath, dep)
$ext_android_library_deps[AndroidTools.read_manifest_package(deppath)] = deppath
end
end
end
additional_sources = extconf["android_additional_sources_list"]
additional_sources = extconf_android['source_list'] if additional_sources.nil? and extconf_android
unless additional_sources.nil?
ext_sources_list = File.join(p, ext, additional_sources)
if File.exists? ext_sources_list
$ext_android_additional_sources[File.join(p, ext)] = ext_sources_list
else
raise "Extension java source list is missed: #{ext_sources_list}"
end
else
puts "No additional java sources for '#{ext}'"
end
# there is no 'additional_libs' param in android section moreover
# place libraries into android adds folder
android_additional_lib = extconf["android_additional_lib"]
if android_additional_lib != nil
android_additional_lib.each do |lib|
$ext_android_additional_lib << File.join(p, ext, lib)
end
end
if prebuiltpath
targetpath = File.join $app_builddir, 'extensions', ext
libaddspath = File.join addspath, 'lib', 'armeabi'
mkdir_p targetpath
Dir.glob(File.join(prebuiltpath, 'lib*.a')).each do |lib|
cp lib, targetpath
end
Dir.glob(File.join(prebuiltpath, '*.jar')).each do |lib|
cp lib, targetpath
end
Dir.glob(File.join(prebuiltpath, '**', 'lib*.so')).each do |lib|
next if lib =~ /adds/
if lib =~ /noautoload/
mkdir_p File.join(libaddspath, 'noautoload')
cp lib, File.join(libaddspath, 'noautoload')
else
mkdir_p libaddspath
cp lib, libaddspath
end
end
end
puts "#{extyml} is processed"
end
if exttype == 'rakefile'
rakedir = Dir.glob File.join(extpath, '**', 'android')
$ext_android_build_scripts[ext] = [rakedir.first, 'rake']
else
build_script = File.join(extpath, 'build' + $bat_ext)
if File.exists? build_script
if RUBY_PLATFORM =~ /(win|w)32$/
$ext_android_build_scripts[ext] = [extpath, 'build.bat']
else
$ext_android_build_scripts[ext] = [extpath, File.join('.', 'build' + $bat_ext)]
end
end
end
puts "#{extpath} is configured"
# to prevent to build 2 extensions with same name
break
end # exists?
end # $app_config["extpaths"].each
end # $app_config["extensions"].each
puts "Extensions' java source lists: #{$ext_android_additional_sources.inspect}"
end #task :extensions
task :emulator => "config:android" do
$emuversion = $app_config["android"]["version"] unless $app_config["android"].nil?
$emuversion = $config["android"]["version"] if $emuversion.nil? and !$config["android"].nil?
if RUBY_PLATFORM =~ /(win|w)32$/
$emulator = #"cmd /c " +
File.join($androidsdkpath, "tools", "emulator.exe")
else
$emulator = File.join($androidsdkpath, "tools", "emulator")
end
$emuversion = AndroidTools.get_market_version($min_sdk_level) if $emuversion.nil?
if $emuversion.nil?
raise "Wrong Android emulator version: #{$emuversion}. Android SDK target API is not installed"
end
if USE_TRACES
puts "Android emulator version: #{$emuversion}"
end
$emuversion = $emuversion.to_s
$appavdname = $app_config["android"]["emulator"] if $app_config["android"] != nil && $app_config["android"].length > 0
$appavdname = $config["android"]["emulator"] if $appavdname.nil? and !$config["android"].nil? and $config["android"].length > 0
end # task 'config:android:emulator'
task :device => "config:android" do
end
end #namespace 'config:android'
end
namespace "build" do
namespace "android" do
desc "Build RhoBundle for android"
task :rhobundle => ["config:android", :extensions] do
$srcdir = $appassets
Rake::Task["build:bundle:noxruby"].invoke
hash = nil
["apps", "db", "lib"].each do |d|
# Calculate hash of directories
hash = get_dir_hash(File.join($srcdir, d), hash)
end
File.open(File.join($srcdir, "hash"), "w") { |f| f.write(hash.hexdigest) }
File.open(File.join($srcdir, "name"), "w") { |f| f.write($appname) }
end
desc "Build RhoBundle for Eclipse project"
task :eclipsebundle => "build:android:rhobundle" do
eclipse_assets = File.join(Jake.get_absolute($androidpath), "Rhodes", "assets")
rm_rf eclipse_assets
cp_r $appassets, eclipse_assets, :preserve => true
end
desc 'Building native extensions'
task :extensions => ["config:android:extensions", :genconfig] do
Rake::Task["build:bundle:noxruby"].invoke
ENV['RHO_PLATFORM'] = 'android'
ENV["RHO_APP_DIR"] = $app_path
ENV["ANDROID_SDK"] = $androidsdkpath
ENV["ANDROID_NDK"] = $androidndkpath
ENV["ANDROID_API_LEVEL"] = $found_api_level.to_s
ENV["RHO_ROOT"] = $startdir
ENV["BUILD_DIR"] ||= $startdir + "/platform/android/build"
ENV["RHO_INC"] = $appincdir
ENV["RHO_RES"] = $appres
ENV["RHO_ANDROID_TMP_DIR"] = $tmpdir
ENV["NEON_ROOT"] = $neon_root unless $neon_root.nil?
ENV["CONFIG_XML"] = $config_xml unless $config_xml.nil?
$ext_android_build_scripts.each do |ext, builddata|
#ext = File.basename(File.dirname(extpath))
ENV["TARGET_TEMP_DIR"] = File.join($app_builddir, 'extensions', ext)
ENV['TEMP_FILES_DIR'] = File.join($tmpdir, ext)
mkdir_p ENV["TARGET_TEMP_DIR"] unless File.directory? ENV["TARGET_TEMP_DIR"]
mkdir_p ENV["TEMP_FILES_DIR"] unless File.directory? ENV["TEMP_FILES_DIR"]
puts "Executing extension build script: #{ext}"
if RUBY_PLATFORM =~ /(win|w)32$/ || (builddata[1] == 'rake')
Jake.run(builddata[1], [], builddata[0])
else
currentdir = Dir.pwd()
Dir.chdir builddata[0]
sh %{$SHELL #{builddata[1]}}
Dir.chdir currentdir
end
raise "Cannot build #{builddata[0]}" unless $?.success?
puts "Extension build script finished"
end
$ext_android_manifest_changes.each do |ext, manifest_changes|
addspath = File.join($app_builddir, 'extensions', ext, 'adds')
mkdir_p addspath
manifest_changes.each do |path|
if File.extname(path) == '.xml'
cp path, File.join(addspath, 'AndroidManifest.xml')
else
if File.extname(path) == '.rb'
cp path, File.join(addspath, 'AndroidManifest.rb')
else
if File.extname(path) == '.erb'
cp path, addspath
else
raise "Wrong AndroidManifest patch file: #{path}"
end
end
end
end
end
$ext_android_adds.each do |ext, path|
addspath = File.join($app_builddir, 'extensions', ext, 'adds')
mkdir_p addspath
Dir.glob(File.join(path, '*')).each do |add|
cp_r add, addspath if File.directory? add
end
end
#$ext_android_library_deps.each do |package, path|
# res = File.join path, 'res'
# assets = File.join path, 'assets'
# addspath = File.join($app_builddir, 'extensions', package, 'adds')
# mkdir_p addspath
# cp_r res, addspath if File.directory? res
# cp_r assets, addspath if File.directory? assets
#end
end #task :extensions
task :libsqlite => "config:android" do
srcdir = File.join($shareddir, "sqlite")
objdir = $objdir["sqlite"]
libname = $libname["sqlite"]
sourcelist = File.join($builddir, 'libsqlite_build.files')
mkdir_p objdir
mkdir_p File.dirname(libname)
args = ["-I\"#{srcdir}\"", "-I\"#{$shareddir}\""]
sources = get_sources sourcelist
objects = get_objects sources, objdir
cc_build sources, objdir, args or exit 1
cc_ar ('"'+(libname)+'"'), objects.collect { |x| '"'+x+'"' } or exit 1
end
task :libcurl => "config:android" do
# Steps to get curl_config.h from fresh libcurl sources:
#export PATH=<ndkroot>/build/prebuilt/linux-x86/arm-eabi-4.2.1/bin:$PATH
#export CC=arm-eabi-gcc
#export CPP=arm-eabi-cpp
#export CFLAGS="--sysroot <ndkroot>/build/platforms/android-3/arch-arm -fPIC -mandroid -DANDROID -DOS_ANDROID"
#export CPPFLAGS="--sysroot <ndkroot>/build/platforms/android-3/arch-arm -fPIC -mandroid -DANDROID -DOS_ANDROID"
#./configure --without-ssl --without-ca-bundle --without-ca-path --without-libssh2 --without-libidn --disable-ldap --disable-ldaps --host=arm-eabi
srcdir = File.join $shareddir, "curl", "lib"
objdir = $objdir["curl"]
libname = $libname["curl"]
sourcelist = File.join($builddir, 'libcurl_build.files')
mkdir_p objdir
mkdir_p File.dirname(libname)
args = []
args << "-DHAVE_CONFIG_H"
args << "-I\"#{srcdir}/../include\""
args << "-I\"#{srcdir}\""
args << "-I\"#{$shareddir}\""
sources = get_sources sourcelist
objects = get_objects sources, objdir
cc_build sources, objdir, args or exit 1
cc_ar ('"'+(libname)+'"'), objects.collect { |x| '"'+x+'"' } or exit 1
end
task :libruby => "config:android" do
srcdir = File.join $shareddir, "ruby"
objdir = $objdir["ruby"]
libname = $libname["ruby"]
sourcelist = File.join($builddir, 'libruby_build.files')
mkdir_p objdir
mkdir_p File.dirname(libname)
args = []
args << "-Wno-uninitialized"
args << "-Wno-missing-field-initializers"
args << '-Wno-shadow'
args << "-I\"#{srcdir}/include\""
args << "-I\"#{srcdir}/android\""
args << "-I\"#{srcdir}/generated\""
args << "-I\"#{srcdir}\""
args << "-I\"#{srcdir}/..\""
args << "-I\"#{srcdir}/../sqlite\""
args << "-I\"#{$std_includes}\"" unless $std_includes.nil?
args << "-D__NEW__" if USE_OWN_STLPORT
args << "-I\"#{$stlport_includes}\"" if USE_OWN_STLPORT
sources = get_sources sourcelist
objects = get_objects sources, objdir
cc_build sources, objdir, args or exit 1
cc_ar ('"'+(libname)+'"'), objects.collect { |x| '"'+x+'"' } or exit 1
end
task :libjson => "config:android" do
srcdir = File.join $shareddir, "json"
objdir = $objdir["json"]
libname = $libname["json"]
sourcelist = File.join($builddir, 'libjson_build.files')
mkdir_p objdir
mkdir_p File.dirname(libname)
args = []
args << "-I\"#{srcdir}\""
args << "-I\"#{srcdir}/..\""
args << "-I\"#{$std_includes}\"" unless $std_includes.nil?
args << "-D__NEW__" if USE_OWN_STLPORT
args << "-I\"#{$stlport_includes}\"" if USE_OWN_STLPORT
sources = get_sources sourcelist
objects = get_objects sources, objdir
cc_build sources, objdir, args or exit 1
cc_ar ('"'+(libname)+'"'), objects.collect { |x| '"'+x+'"' } or exit 1
end
task :libstlport => "config:android" do
if USE_OWN_STLPORT
objdir = $objdir["stlport"]
libname = $libname["stlport"]
sourcelist = File.join($builddir, 'libstlport_build.files')
mkdir_p objdir
mkdir_p File.dirname(libname)
args = []
args << "-I\"#{$stlport_includes}\""
args << "-DTARGET_OS=android"
args << "-DOSNAME=android"
args << "-DCOMPILER_NAME=gcc"
args << "-DBUILD_OSNAME=android"
args << "-D_REENTRANT"
args << "-D__NEW__"
args << "-ffunction-sections"
args << "-fdata-sections"
args << "-fno-rtti"
args << "-fno-exceptions"
sources = get_sources sourcelist
objects = get_objects sources, objdir
cc_build sources, objdir, args or exit 1
cc_ar ('"'+(libname)+'"'), objects.collect { |x| '"'+x+'"' } or exit 1
end
end
task :librholog => "config:android" do
srcdir = File.join $shareddir, "logging"
objdir = $objdir["rholog"]
libname = $libname["rholog"]
sourcelist = File.join($builddir, 'librholog_build.files')
mkdir_p objdir
mkdir_p File.dirname(libname)
args = []
args << "-I\"#{srcdir}/..\""
args << "-I\"#{$std_includes}\"" unless $std_includes.nil?
args << "-D__NEW__" if USE_OWN_STLPORT
args << "-I\"#{$stlport_includes}\"" if USE_OWN_STLPORT
sources = get_sources sourcelist
objects = get_objects sources, objdir
cc_build sources, objdir, args or exit 1
cc_ar ('"'+(libname)+'"'), objects.collect { |x| '"'+x+'"' } or exit 1
end
task :librhomain => "config:android" do
srcdir = $shareddir
objdir = $objdir["rhomain"]
libname = $libname["rhomain"]
sourcelist = File.join($builddir, 'librhomain_build.files')
mkdir_p objdir
mkdir_p File.dirname(libname)
args = []
args << "-I\"#{srcdir}\""
args << "-I\"#{$commonapidir}\""
args << "-I\"#{$std_includes}\"" unless $std_includes.nil?
args << "-D__NEW__" if USE_OWN_STLPORT
args << "-I\"#{$stlport_includes}\"" if USE_OWN_STLPORT
sources = get_sources sourcelist
objects = get_objects sources, objdir
cc_build sources, objdir, args or exit 1
cc_ar ('"'+(libname)+'"'), objects.collect { |x| '"'+x+'"' } or exit 1
end
task :librhocommon => "config:android" do
objdir = $objdir["rhocommon"]
libname = $libname["rhocommon"]
sourcelist = File.join($builddir, 'librhocommon_build.files')
mkdir_p objdir
mkdir_p File.dirname(libname)
args = []
args << "-I\"#{$shareddir}\""
args << "-I\"#{$shareddir}/curl/include\""
args << "-I\"#{$shareddir}/ruby/include\""
args << "-I\"#{$shareddir}/ruby/android\""
args << "-I\"#{$std_includes}\"" unless $std_includes.nil?
args << "-D__NEW__" if USE_OWN_STLPORT
args << "-I\"#{$stlport_includes}\"" if USE_OWN_STLPORT
sources = get_sources sourcelist
objects = get_objects sources, objdir
cc_build sources, objdir, args or exit 1
cc_ar ('"'+(libname)+'"'), objects.collect { |x| '"'+x+'"' } or exit 1
end
task :librhodb => "config:android" do
srcdir = File.join $shareddir, "db"
objdir = $objdir["rhodb"]
libname = $libname["rhodb"]
sourcelist = File.join($builddir, 'librhodb_build.files')
mkdir_p objdir
mkdir_p File.dirname(libname)
args = []
args << "-I\"#{srcdir}\""
args << "-I\"#{srcdir}/..\""
args << "-I\"#{srcdir}/../sqlite\""
args << "-I\"#{$std_includes}\"" unless $std_includes.nil?
args << "-D__NEW__" if USE_OWN_STLPORT
args << "-I\"#{$stlport_includes}\"" if USE_OWN_STLPORT
sources = get_sources sourcelist
objects = get_objects sources, objdir
cc_build sources, objdir, args or exit 1
cc_ar ('"'+(libname)+'"'), objects.collect { |x| '"'+x+'"' } or exit 1
end
task :librhosync => "config:android" do
srcdir = File.join $shareddir, "sync"
objdir = $objdir["rhosync"]
libname = $libname["rhosync"]
sourcelist = File.join($builddir, 'librhosync_build.files')
mkdir_p objdir
mkdir_p File.dirname(libname)
args = []
args << "-I\"#{srcdir}\""
args << "-I\"#{srcdir}/..\""
args << "-I\"#{srcdir}/../sqlite\""
args << "-I\"#{$std_includes}\"" unless $std_includes.nil?
args << "-D__NEW__" if USE_OWN_STLPORT
args << "-I\"#{$stlport_includes}\"" if USE_OWN_STLPORT
sources = get_sources sourcelist
objects = get_objects sources, objdir
cc_build sources, objdir, args or exit 1
cc_ar ('"'+(libname)+'"'), objects.collect { |x| '"'+x+'"' } or exit 1
end
task :libs => [:libsqlite, :libcurl, :libruby, :libjson, :libstlport, :librhodb, :librhocommon, :librhomain, :librhosync, :librholog]
task :genconfig => "config:android" do
mkdir_p $appincdir unless File.directory? $appincdir
# Generate genconfig.h
genconfig_h = File.join($appincdir, 'genconfig.h')
gapi_already_enabled = false
caps_already_enabled = {}
#ANDROID_PERMISSIONS.keys.each do |k|
# caps_already_enabled[k] = false
#end
if File.file? genconfig_h
File.open(genconfig_h, 'r') do |f|
while line = f.gets
if line =~ /^\s*#\s*define\s+RHO_GOOGLE_API_KEY\s+"[^"]*"\s*$/
gapi_already_enabled = true
else
ANDROID_PERMISSIONS.keys.each do |k|
if line =~ /^\s*#\s*define\s+RHO_CAP_#{k.upcase}_ENABLED\s+(.*)\s*$/
value = $1.strip
if value == 'true'
caps_already_enabled[k] = true
elsif value == 'false'
caps_already_enabled[k] = false
else
raise "Unknown value for the RHO_CAP_#{k.upcase}_ENABLED: #{value}"
end
end
end
end
end
end
end
regenerate = false
regenerate = true unless File.file? genconfig_h
regenerate = $use_geomapping != gapi_already_enabled unless regenerate
caps_enabled = {}
ANDROID_PERMISSIONS.keys.each do |k|
caps_enabled[k] = $app_config["capabilities"].index(k) != nil
regenerate = true if caps_already_enabled[k].nil? or caps_enabled[k] != caps_already_enabled[k]
end
puts caps_enabled.inspect
if regenerate
puts "Need to regenerate genconfig.h"
$stdout.flush
File.open(genconfig_h, 'w') do |f|
f.puts "#ifndef RHO_GENCONFIG_H_411BFA4742CF4F2AAA3F6B411ED7514F"
f.puts "#define RHO_GENCONFIG_H_411BFA4742CF4F2AAA3F6B411ED7514F"
f.puts ""
f.puts "#define RHO_GOOGLE_API_KEY \"#{$gapikey}\"" if $gapikey
caps_enabled.each do |k, v|
f.puts "#define RHO_CAP_#{k.upcase}_ENABLED #{v ? "true" : "false"}"
end
f.puts ""
f.puts "#endif /* RHO_GENCONFIG_H_411BFA4742CF4F2AAA3F6B411ED7514F */"
end
else
puts "No need to regenerate genconfig.h"
$stdout.flush
end
# Generate rhocaps.inc
#rhocaps_inc = File.join($appincdir, 'rhocaps.inc')
#caps_already_defined = []
#if File.exists? rhocaps_inc
# File.open(rhocaps_inc, 'r') do |f|
# while line = f.gets
# next unless line =~ /^\s*RHO_DEFINE_CAP\s*\(\s*([A-Z_]*)\s*\)\s*\s*$/
# caps_already_defined << $1.downcase
# end
# end
#end
#
#if caps_already_defined.sort.uniq != ANDROID_PERMISSIONS.keys.sort.uniq
# puts "Need to regenerate rhocaps.inc"
# $stdout.flush
# File.open(rhocaps_inc, 'w') do |f|
# ANDROID_PERMISSIONS.keys.sort.each do |k|
# f.puts "RHO_DEFINE_CAP(#{k.upcase})"
# end
# end
#else
# puts "No need to regenerate rhocaps.inc"
# $stdout.flush
#end
end
task :librhodes => [:libs, :extensions, :genconfig] do
srcdir = File.join $androidpath, "Rhodes", "jni", "src"
libdir = File.join $app_builddir, 'librhodes', 'lib', 'armeabi'
objdir = File.join $tmpdir, 'librhodes'
libname = File.join libdir, 'librhodes.so'
sourcelist = File.join($builddir, 'librhodes_build.files')
mkdir_p libdir
mkdir_p objdir
# add licence lib to build
lic_dst = File.join $app_builddir, 'librhodes', 'libMotorolaLicence.a'
lic_src = $startdir + "/res/libs/motorolalicence/android/libMotorolaLicence.a"
rm_f lic_dst
cp lic_src, lic_dst
args = []
args << "-I\"#{$appincdir}\""
args << "-I\"#{srcdir}/../include\""
args << "-I\"#{srcdir}/../include/rhodes/details\""
args << "-I\"#{$shareddir}\""
args << "-I\"#{$shareddir}/common\""
args << "-I\"#{$shareddir}/api_generator\""
args << "-I\"#{$shareddir}/sqlite\""
args << "-I\"#{$shareddir}/curl/include\""
args << "-I\"#{$shareddir}/ruby/include\""
args << "-I\"#{$shareddir}/ruby/android\""
args << "-I\"#{$coreapidir}\""
args << "-I\"#{$std_includes}\"" unless $std_includes.nil?
args << "-D__SGI_STL_INTERNAL_PAIR_H" if USE_OWN_STLPORT
args << "-D__NEW__" if USE_OWN_STLPORT
args << "-I\"#{$stlport_includes}\"" if USE_OWN_STLPORT
sources = get_sources sourcelist
cc_build sources, objdir, args or exit 1
deps = []
$libname.each do |k, v|
deps << v
end
args = []
args << "-L\"#{$rhobindir}/#{$confdir}\""
args << "-L\"#{libdir}\""
rlibs = []
rlibs << "log"
rlibs << "dl"
rlibs << "z"
rlibs.map! { |x| "-l#{x}" }
elibs = []
extlibs = Dir.glob($app_builddir + "/**/lib*.a") # + Dir.glob($app_builddir + "/**/lib*.so")
extlibs.each do |lib|
args << "-L\"#{File.dirname(lib)}\""
end
stub = []
extlibs.reverse.each do |f|
lparam = "-l" + File.basename(f).gsub(/^lib/, "").gsub(/\.(a|so)$/, "")
elibs << lparam
# Workaround for GNU ld: this way we have specified one lib multiple times
# command line so ld's dependency mechanism will find required functions
# independently of its position in command line
stub.each do |s|
args << s
end
stub << lparam
end
args += elibs
args += elibs
args += rlibs
objects = get_objects sources, objdir
#mkdir_p File.dirname(libname) unless File.directory? File.dirname(libname)
cc_link libname, objects.collect { |x| '"'+x+'"' }, args, deps+extlibs or exit 1
destdir = File.join($androidpath, "Rhodes", "libs", "armeabi")
mkdir_p destdir unless File.exists? destdir
cp_r libname, destdir
cc_run($stripbin, ['"'+File.join(destdir, File.basename(libname))+'"'])
end
task :manifest => ["config:android", :extensions] do
version = {'major' => 0, 'minor' => 0, 'patch' => 0, "build" => 0}
if $app_config["version"]
if $app_config["version"] =~ /^(\d+)$/
version["major"] = $1.to_i
elsif $app_config["version"] =~ /^(\d+)\.(\d+)$/
version["major"] = $1.to_i
version["minor"] = $2.to_i
elsif $app_config["version"] =~ /^(\d+)\.(\d+)\.(\d+)$/
version["major"] = $1.to_i
version["minor"] = $2.to_i
version["patch"] = $3.to_i
elsif $app_config["version"] =~ /^(\d+)\.(\d+)\.(\d+)\.(\d+)$/
version["major"] = $1.to_i
version["minor"] = $2.to_i
version["patch"] = $3.to_i
version["build"] = $4.to_i
else
raise "Version number must be numeric and in one of these formats: major, major.minor, major.minor.patch, or major.minor.patch.build."
end
end
version = version["major"]*1000000 + version["minor"]*10000 + version["patch"]*100 + version["build"]
usesPermissions = ['android.permission.INTERNET', 'android.permission.PERSISTENT_ACTIVITY', 'android.permission.WAKE_LOCK']
$app_config["capabilities"].each do |cap|
cap = ANDROID_PERMISSIONS[cap]
next if cap.nil?
cap = [cap] unless cap.is_a? Array
cap.each do |cap_item|
if cap_item.is_a? Proc
#caps_proc << cap_item
next
end
if cap_item.is_a? String
usesPermissions << "android.permission.#{cap_item}"
next
end
end
end
usesPermissions.uniq!
hidden = get_boolean($app_config['hidden_app'])
generator = ManifestGenerator.new JAVA_PACKAGE_NAME, $app_package_name, hidden, usesPermissions
generator.versionName = $app_config["version"]
generator.versionCode = version
generator.installLocation = 'auto'
generator.minSdkVer = $min_sdk_level
generator.maxSdkVer = $max_sdk_level
generator.screenOrientation = $android_orientation unless $android_orientation.nil?
generator.hardwareAcceleration = true if $app_config["capabilities"].index('hardware_acceleration')
generator.apikey = $gapikey if $gapikey
generator.addUriParams $uri_scheme, $uri_host, $uri_path_prefix
Dir.glob(File.join($app_builddir, 'extensions', '*', 'adds', 'AndroidManifest.rb')).each do |extscript|
puts "Evaluating #{extscript}"
eval(File.new(extscript).read)
end
Dir.glob(File.join($app_builddir, 'extensions', '*', 'adds', 'Manifest*.erb')).each do |exttemplate|
puts "Adding template #{exttemplate}"
generator.manifestManifestAdds << exttemplate
end
Dir.glob(File.join($app_builddir, 'extensions', '*', 'adds', 'Application*.erb')).each do |exttemplate|
puts "Adding template #{exttemplate}"
generator.applicationManifestAdds << exttemplate
end
manifest = generator.render $rhomanifesterb
File.open($appmanifest, "w") { |f| f.write manifest }
#######################################################
# Deprecated staff below
app_f = File.new($appmanifest)
manifest_orig_doc = REXML::Document.new(app_f)
app_f.close
dst_manifest = manifest_orig_doc.elements["manifest"]
dst_application = manifest_orig_doc.elements["manifest/application"]
dst_main_activity = nil
puts '$$$ try to found MainActivity'
dst_application.elements.each("activity") do |a|
puts '$$$ activity with attr = '+a.attribute('name', 'android').to_s
if a.attribute('name', 'android').to_s == 'com.rhomobile.rhodes.RhodesActivity'
puts ' $$$ FOUND !'
dst_main_activity = a
end
end
Dir.glob(File.join($app_builddir, 'extensions', '*', 'adds', 'AndroidManifest.xml')).each do |ext_manifest|
if File.exists? ext_manifest
puts 'AndroidManifest.xml['+ext_manifest+'] from native extension found !'
manifest_ext_doc = REXML::Document.new(File.new(ext_manifest))
src_manifest = manifest_ext_doc.elements["manifest"]
src_application = manifest_ext_doc.elements["manifest/application"]
if src_application != nil
puts 'Extension Manifest process application item :'
src_application.elements.each do |e|
puts '$$$ process element with attr = '+e.attribute('name', 'android').to_s
if e.attribute('name', 'android').to_s == 'com.rhomobile.rhodes.RhodesActivity'
e.elements.each do |sube|
puts ' add item to MainActivity['+sube.xpath+']'
dst_main_activity.add sube
end
else
puts ' add item ['+e.xpath+']'
dst_application.add e
end
end
end
puts 'Extension Manifest process root <manifest> item :'
src_manifest.elements.each do |e|
p = e.xpath
if p != '/manifest/application'
dst_e = manifest_orig_doc.elements[p]
if dst_e != nil
if p == '/manifest/uses-sdk'
puts ' found and delete original item ['+p+']'
manifest_orig_doc.elements.delete p
end
end
puts ' and new item ['+p+']'
dst_manifest.add e
end
end
else
puts 'AndroidManifest change file ['+m+'] from native extension not found !'
end
end
puts 'delete original manifest'
File.delete($appmanifest)
updated_f = File.open($appmanifest, "w")
manifest_orig_doc.write updated_f, 2
updated_f.close
#rm tappmanifest
puts 'Manifest updated by extension is saved!'
end
task :resources => [:rhobundle, :extensions, :librhodes] do
set_app_name_android($appname)
puts 'EXT: add additional files to project before build'
Dir.glob(File.join($app_builddir, 'extensions', '*', 'adds', '*')).each do |res|
if File.directory?(res) && (res != '.') && (res != '..')
puts "add resources from extension [#{res}] to [#{$tmpdir}]"
cp_r res, $tmpdir
end
end
#copy icon after extension resources in case it overwrites them (like rhoelementsext...)
set_app_icon_android
if $config_xml
puts "Copying custom config.xml"
rawres_path = File.join($tmpdir, 'res', 'raw')
mkdir_p rawres_path unless File.exist? rawres_path
cp $config_xml, File.join(rawres_path, 'config.xml')
end
mkdir_p File.join($applibs)
# Add .so libraries
Dir.glob($app_builddir + "/**/lib*.so").each do |lib|
cp_r lib, $applibs
end
$ext_android_additional_lib.each do |lib|
cp_r lib, $applibs
end
# Dir.glob($tmpdir + "/lib/armeabi/lib*.so").each do |lib|
# cc_run($stripbin, ['"'+lib+'"'])
# end
end
task :fulleclipsebundle => [:resources, :librhodes] do
#manifest = File.join $tmpdir,'AndroidManifest.xml'
eclipse_res = File.join $projectpath,'res'
eclipse_assets = File.join $projectpath,'assets'
eclipse_libs = File.join $projectpath,'libs'
#eclipse_manifest = File.join $projectpath,'AndroidManifest.xml'
rm_rf eclipse_res
rm_rf eclipse_assets
rm_rf eclipse_libs
#rm_rf eclipse_manifest
mkdir_p eclipse_libs
cp_r $appres, $projectpath
cp_r $appassets, $projectpath
cp_r $applibs, eclipse_libs
#cp manifest, $projectpath
end
task :gencapabilitiesjava => "config:android" do
# Generate Capabilities.java
mkdir_p File.dirname $app_capabilities_java
f = StringIO.new("", "w+")
#File.open($app_capabilities_java, "w") do |f|
f.puts "package #{JAVA_PACKAGE_NAME};"
f.puts "public class Capabilities {"
ANDROID_PERMISSIONS.keys.sort.each do |k|
val = 'false'
val = 'true' if $app_config["capabilities"].index(k) != nil
f.puts " public static final boolean #{k.upcase}_ENABLED = #{val};"
end
f.puts "}"
#end
Jake.modify_file_if_content_changed($app_capabilities_java, f)
end
task :genpushjava => "config:android" do
# Generate Push.java
mkdir_p File.dirname $app_push_java
f = StringIO.new("", "w+")
#File.open($app_push_java, "w") do |f|
f.puts "package #{JAVA_PACKAGE_NAME};"
f.puts "public class Push {"
f.puts " public static final String SENDER = \"#{$push_sender}\";"
if $push_notifications.nil?
f.puts " public static final String PUSH_NOTIFICATIONS = \"none\";"
else
f.puts " public static final String PUSH_NOTIFICATIONS = \"#{$push_notifications}\";"
end
f.puts "};"
#end
Jake.modify_file_if_content_changed($app_push_java, f)
end
task :genloadlibsjava => "config:android" do
mkdir_p File.dirname $app_native_libs_java
f = StringIO.new("", "w+")
#File.open($app_native_libs_java, "w") do |f|
f.puts "package #{JAVA_PACKAGE_NAME};"
f.puts "public class NativeLibraries {"
f.puts " public static void load() {"
f.puts " // Load native .so libraries"
Dir.glob($app_builddir + "/**/lib*.so").reverse.each do |lib|
next if lib =~ /noautoload/
libname = File.basename(lib).gsub(/^lib/, '').gsub(/\.so$/, '')
f.puts " System.loadLibrary(\"#{libname}\");"
end
#f.puts " // Load native implementation of rhodes"
#f.puts " System.loadLibrary(\"rhodes\");"
f.puts " }"
f.puts "};"
#end
Jake.modify_file_if_content_changed($app_native_libs_java, f)
end
task :genrholisteners => ['config:android:extensions', 'config:android'] do
# RhodesActivity Listeners
mkdir_p File.dirname $app_startup_listeners_java
f = StringIO.new("", "w+")
f.puts '// WARNING! THIS FILE IS GENERATED AUTOMATICALLY! DO NOT EDIT IT MANUALLY!'
f.puts 'package com.rhomobile.rhodes.extmanager;'
f.puts ''
f.puts 'class RhodesStartupListeners {'
f.puts ''
f.puts ' public static final String[] ourRunnableList = { ""'
$ext_android_rhodes_activity_listener.each do |a|
f.puts ' ,"'+a+'"'
end
f.puts ' };'
f.puts '}'
Jake.modify_file_if_content_changed($app_startup_listeners_java, f)
end
task :genrjava => [:manifest, :resources] do
mkdir_p $app_rjava_dir
puts "Generate initial R.java at #{$app_rjava_dir} >>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>"
args = ["package", "-f", "-M", $appmanifest, "-S", $appres, "-A", $appassets, "-I", $androidjar, "-J", $app_rjava_dir]
Jake.run($aapt, args)
raise 'Error in AAPT: R.java' unless $?.success?
#buf = File.new($rho_android_r, "r").read.gsub(/^\s*import com\.rhomobile\..*\.R;\s*$/, "\nimport #{$app_package_name}.R;\n")
#File.open($app_android_r, "w") { |f| f.write(buf) }
mkdir_p File.join($app_rjava_dir, "R") if not File.exists? File.join($app_rjava_dir, "R")
buf = File.new(File.join($app_rjava_dir, "R.java"), "r").read.gsub(/^\s*package\s*#{$app_package_name};\s*$/, "\npackage com.rhomobile.rhodes;\n")
#buf.gsub!(/public\s*static\s*final\s*int/, "public static int")
File.open(File.join($app_rjava_dir, "R", "R.java"), "w") { |f| f.write(buf) }
$ext_android_library_deps.each do |package, path|
r_dir = File.join $tmpdir, 'gen', package.split('.')
mkdir_p r_dir
buf = File.new(File.join($app_rjava_dir, 'R.java'), "r").read.gsub(/^\s*package\s*#{$app_package_name};\s*$/, "\npackage #{package};\n")
File.open(File.join(r_dir,'R.java'), 'w') { |f| f.write(buf) }
end
end
task :genreclipse => [:manifest, :resources] do
mkdir_p $app_rjava_dir
args = ["package", "-f", "-M", $appmanifest, "-S", $appres, "-A", $appassets, "-I", $androidjar, "-J", $app_rjava_dir]
Jake.run($aapt, args)
raise 'Error in AAPT: R.java' unless $?.success?
Dir.glob(File.join $app_rjava_dir, '*.java') do |java|
buf = File.new(java, 'r').read.gsub(/package .*$/, 'package com.rhomobile.rhodes;')
File.open(java, 'w') { |f| f.write buf }
end
#buf = File.new($rho_android_r, "r").read.gsub(/^\s*import com\.rhomobile\..*\.R;\s*$/, "\nimport #{$app_package_name}.R;\n")
#File.open($app_android_r, "w") { |f| f.write(buf) }
#mkdir_p File.join($app_rjava_dir, "R") if not File.exists? File.join($app_rjava_dir, "R")
#buf = File.new(File.join($app_rjava_dir, "R.java"), "r").read.gsub(/^\s*package\s*#{$app_package_name};\s*$/, "\npackage com.rhomobile.rhodes.R;\n")
#"{b}"uf.gsub!(/public\s*static\s*final\s*int/, "public static int")
#File.open(File.join($app_rjava_dir, "R", "R.java"), "w") { |f| f.write(buf) }
end
task :gensourceseclipse => [:genloadlibsjava, :genpushjava, :gencapabilitiesjava, :genrholisteners, :genreclipse]
task :gensourcesjava => [:genloadlibsjava, :genpushjava, :gencapabilitiesjava, :genrholisteners, :genrjava]
#desc "Build Rhodes for android"
task :rhodes => [:rhobundle, :librhodes, :manifest, :resources, :gensourcesjava] do
rm_rf $tmpdir + "/Rhodes"
mkdir_p $tmpdir + "/Rhodes"
srclist = File.join($builddir, "RhodesSRC_build.files")
newsrclist = File.join($tmpdir, "RhodesSRC_build.files")
lines = []
File.open(srclist, "r") do |f|
while line = f.gets
line.chomp!
next if line =~ /\/AndroidR\.java\s*$/
lines << line
end
end
Dir.glob(File.join($tmpdir,'gen','**','*.java')) do |filepath|
lines << "\"#{filepath}\""
end
#lines << "\"" +File.join($app_rjava_dir, "R.java")+"\""
#lines << "\"" +File.join($app_rjava_dir, "R", "R.java")+"\""
#lines << "\"" +$app_native_libs_java+"\""
#lines << "\"" +$app_capabilities_java+"\""
#lines << "\"" +$app_push_java+"\""
#lines << "\"" +$app_startup_listeners_java+"\""
File.open(newsrclist, "w") { |f| f.write lines.join("\n") }
srclist = newsrclist
classpath = $androidjar
classpath += $path_separator + $google_classpath if $google_classpath
classpath += $path_separator + $motosol_classpath if $motosol_classpath
classpath += $path_separator + File.join($tmpdir, 'Rhodes')
javafilelists = [srclist]
extlist = File.join $app_builddir, "ext_build.files"
if File.exists? extlist
puts "#{extlist} is found! THere are addditional java files"
javafilelists << extlist
end
java_compile(File.join($tmpdir, 'Rhodes'), classpath, javafilelists)
files = []
Dir.glob(File.join($tmpdir, "Rhodes", "*")).each do |f|
relpath = Pathname.new(f).relative_path_from(Pathname.new(File.join($tmpdir, "Rhodes"))).to_s
files << relpath
end
unless files.empty?
jar = File.join($app_builddir, 'librhodes', 'Rhodes.jar')
args = ["cf", jar]
args += files
Jake.run($jarbin, args, File.join($tmpdir, "Rhodes"))
unless $?.success?
raise "Error creating #{jar}"
end
$android_jars = [jar]
end
end
task :extensions_java => [:rhodes, :extensions] do
puts 'Compile additional java files:'
classpath = $androidjar
classpath += $path_separator + $google_classpath if $google_classpath
classpath += $path_separator + $motosol_classpath if $motosol_classpath
classpath += $path_separator + File.join($tmpdir, 'Rhodes')
Dir.glob(File.join($app_builddir, '**', '*.jar')).each do |jar|
classpath += $path_separator + jar
end
$ext_android_additional_sources.each do |extpath, list|
ext = File.basename(extpath)
puts "Compiling '#{ext}' extension java sources: #{list}"
srclist = Tempfile.new "#{ext}SRC_build"
lines = []
File.open(list, "r") do |f|
while line = f.gets
line.chomp!
srclist.write "\"#{File.join(extpath, line)}\"\n"
#srclist.write "#{line}\n"
end
end
srclist.close
mkdir_p File.join($tmpdir, ext)
java_compile(File.join($tmpdir, ext), classpath, [srclist.path])
extjar = File.join $app_builddir, 'extensions', ext, ext + '.jar'
args = ["cf", extjar, '.']
Jake.run($jarbin, args, File.join($tmpdir, ext))
unless $?.success?
raise "Error creating #{extjar}"
end
$android_jars << extjar
classpath += $path_separator + extjar
end
end
task :upgrade_package => :rhobundle do
#puts '$$$$$$$$$$$$$$$$$$'
#puts 'targetdir = '+$targetdir.to_s
#puts 'bindir = '+$bindir.to_s
android_targetdir = $targetdir #File.join($targetdir, 'android')
mkdir_p android_targetdir if not File.exists? android_targetdir
zip_file_path = File.join(android_targetdir, 'upgrade_bundle.zip')
Jake.build_file_map(File.join($srcdir, "apps"), "rhofilelist.txt")
Jake.zip_upgrade_bundle($bindir, zip_file_path)
end
task :upgrade_package_partial => ["build:android:rhobundle"] do
#puts '$$$$$$$$$$$$$$$$$$'
#puts 'targetdir = '+$targetdir.to_s
#puts 'bindir = '+$bindir.to_s
# process partial update
add_list_full_name = File.join($app_path, 'upgrade_package_add_files.txt')
remove_list_full_name = File.join($app_path, 'upgrade_package_remove_files.txt')
src_folder = File.join($bindir, 'RhoBundle')
src_folder = File.join(src_folder, 'apps')
tmp_folder = $bindir + '_tmp_partial'
rm_rf tmp_folder if File.exists? tmp_folder
mkdir_p tmp_folder
dst_tmp_folder = File.join(tmp_folder, 'RhoBundle')
mkdir_p dst_tmp_folder
# copy all
cp_r src_folder, dst_tmp_folder
dst_tmp_folder = File.join(dst_tmp_folder, 'apps')
mkdir_p dst_tmp_folder
add_files = []
if File.exists? add_list_full_name
File.open(add_list_full_name, "r") do |f|
while line = f.gets
fixed_path = line.gsub('.rb', '.iseq').gsub('.erb', '_erb.iseq').chop
add_files << fixed_path
puts '### ['+fixed_path+']'
end
end
end
remove_files = []
if File.exists? remove_list_full_name
File.open(remove_list_full_name, "r") do |f|
while line = f.gets
fixed_path = line.gsub('.rb', '.iseq').gsub('.erb', '_erb.iseq').chop
remove_files << fixed_path
#puts '### ['+fixed_path+']'
end
end
end
psize = dst_tmp_folder.size+1
Dir.glob(File.join(dst_tmp_folder, '**/*')).sort.each do |f|
relpath = f[psize..-1]
if File.file?(f)
#puts '$$$ ['+relpath+']'
if not add_files.include?(relpath)
rm_rf f
end
end
end
Jake.build_file_map(dst_tmp_folder, "upgrade_package_add_files.txt")
#if File.exists? add_list_full_name
# File.open(File.join(dst_tmp_folder, 'upgrade_package_add_files.txt'), "w") do |f|
# add_files.each do |j|
# f.puts "#{j}\tfile\t0\t0"
# end
# end
#end
if File.exists? remove_list_full_name
File.open(File.join(dst_tmp_folder, 'upgrade_package_remove_files.txt'), "w") do |f|
remove_files.each do |j|
f.puts "#{j}"
#f.puts "#{j}\tfile\t0\t0"
end
end
end
mkdir_p $targetdir if not File.exists? $targetdir
zip_file_path = File.join($targetdir, "upgrade_bundle_partial.zip")
Jake.zip_upgrade_bundle(tmp_folder, zip_file_path)
rm_rf tmp_folder
end
#desc "build all"
task :all => [:rhobundle, :rhodes, :extensions_java]
end
end
namespace "package" do
task :android => "build:android:all" do
puts "Running dx utility"
args = []
args << "-Xmx1024m"
args << "-jar"
args << $dxjar
args << "--dex"
args << "--output=#{$bindir}/classes.dex"
Dir.glob(File.join($app_builddir, '**', '*.jar')).each do |jar|
args << jar
end
Jake.run(File.join($java, 'java'+$exe_ext), args)
unless $?.success?
raise "Error running DX utility"
end
resourcepkg = $bindir + "/rhodes.ap_"
puts "Packaging Assets and Jars"
# this task already caaled during build "build:android:all"
#set_app_name_android($appname)
args = ["package", "-f", "-M", $appmanifest, "-S", $appres, "-A", $appassets, "-I", $androidjar, "-F", resourcepkg]
if $no_compression
$no_compression.each do |ext|
args << '-0'
args << ext
end
end
Jake.run($aapt, args)
unless $?.success?
raise "Error running AAPT (1)"
end
# Workaround: manually add files starting with '_' because aapt silently ignore such files when creating package
Dir.glob(File.join($appassets, "**/*")).each do |f|
next unless File.basename(f) =~ /^_/
relpath = Pathname.new(f).relative_path_from(Pathname.new($tmpdir)).to_s
puts "Add #{relpath} to #{resourcepkg}..."
args = ["uf", resourcepkg, relpath]
Jake.run($jarbin, args, $tmpdir)
unless $?.success?
raise "Error packaging assets"
end
end
puts "Packaging Native Libs"
args = ["uf", resourcepkg]
Dir.glob(File.join($applibs, "lib*.so")).each do |lib|
cc_run($stripbin, ['"'+lib+'"'])
args << "lib/armeabi/#{File.basename(lib)}"
end
Jake.run($jarbin, args, $tmpdir)
unless $?.success?
raise "Error packaging native libraries"
end
end
end
namespace "device" do
namespace "android" do
desc "Build debug self signed for device"
task :debug => "package:android" do
dexfile = $bindir + "/classes.dex"
simple_apkfile = $targetdir + "/" + $appname + "-tmp.apk"
final_apkfile = $targetdir + "/" + $appname + "-debug.apk"
resourcepkg = $bindir + "/rhodes.ap_"
apk_build $androidsdkpath, simple_apkfile, resourcepkg, dexfile, true
puts "Align Debug APK file"
args = []
args << "-f"
args << "-v"
args << "4"
args << simple_apkfile
args << final_apkfile
out = Jake.run2($zipalign, args, :hide_output => true)
puts out if USE_TRACES
unless $?.success?
puts "Error running zipalign"
exit 1
end
#remove temporary files
rm_rf simple_apkfile
File.open(File.join(File.dirname(final_apkfile), "app_info.txt"), "w") do |f|
f.puts $app_package_name
end
end
task :install => :debug do
apkfile = $targetdir + "/" + $appname + "-debug.apk"
Jake.run $adb, ['-d', 'wait-for-device']
puts "Install APK file"
Jake.run($adb, ["-d", "install", "-r", apkfile])
unless $?.success?
raise "Error installing APK file"
end
puts "Install complete"
end
desc "Build production signed for device"
task :production => "package:android" do
dexfile = $bindir + "/classes.dex"
simple_apkfile = $targetdir + "/" + $appname + "_tmp.apk"
final_apkfile = $targetdir + "/" + $appname + "_signed.apk"
signed_apkfile = $targetdir + "/" + $appname + "_tmp_signed.apk"
resourcepkg = $bindir + "/rhodes.ap_"
apk_build $androidsdkpath, simple_apkfile, resourcepkg, dexfile, false
if not File.exists? $keystore
puts "Generating private keystore..."
mkdir_p File.dirname($keystore) unless File.directory? File.dirname($keystore)
args = []
args << "-genkey"
args << "-alias"
args << $storealias
args << "-keyalg"
args << "RSA"
args << "-validity"
args << "20000"
args << "-keystore"
args << $keystore
args << "-storepass"
args << $storepass
args << "-keypass"
args << $keypass
Jake.run($keytool, args)
unless $?.success?
puts "Error generating keystore file"
exit 1
end
end
puts "Signing APK file"
args = []
args << "-sigalg"
args << "MD5withRSA"
args << "-digestalg"
args << "SHA1"
args << "-verbose"
args << "-keystore"
args << $keystore
args << "-storepass"
args << $storepass
args << "-signedjar"
args << signed_apkfile
args << simple_apkfile
args << $storealias
Jake.run($jarsigner, args)
unless $?.success?
puts "Error running jarsigner"
exit 1
end
puts "Align APK file"
args = []
args << "-f"
args << "-v"
args << "4"
args << '"' + signed_apkfile + '"'
args << '"' + final_apkfile + '"'
Jake.run($zipalign, args)
unless $?.success?
puts "Error running zipalign"
exit 1
end
#remove temporary files
rm_rf simple_apkfile
rm_rf signed_apkfile
File.open(File.join(File.dirname(final_apkfile), "app_info.txt"), "w") do |f|
f.puts $app_package_name
end
end
#task :getlog => "config:android" do
# AndroidTools.get_app_log($appname, true) or exit 1
#end
end
end
#namespace "emulator" do
# namespace "android" do
# task :getlog => "config:android" do
# AndroidTools.get_app_log($appname, false) or exit 1
# end
# end
#end
def run_as_spec(device_flag, uninstall_app)
Rake::Task["device:android:debug"].invoke
if device_flag == '-e'
Rake::Task["config:android:emulator"].invoke
else
Rake::Task["config:android:device"].invoke
end
log_name = $app_path + '/RhoLogSpec.txt'
File.delete(log_name) if File.exist?(log_name)
AndroidTools.logclear(device_flag)
AndroidTools.run_emulator(:hidden => true) if device_flag == '-e'
do_uninstall(device_flag)
# Failsafe to prevent eternal hangs
Thread.new {
sleep 2000
if device_flag == '-e'
AndroidTools.kill_adb_and_emulator
else
AndroidTools.kill_adb_logcat device_flag, log_name
end
}
apkfile = File.expand_path(File.join $targetdir, $appname + "-debug.apk")
AndroidTools.load_app_and_run(device_flag, apkfile, $app_package_name)
AndroidTools.logcat(device_flag, log_name)
Jake.before_run_spec
start = Time.now
puts "Waiting for application ..."
for i in 0..60
if AndroidTools.application_running(device_flag, $app_package_name)
break
else
sleep(1)
end
end
puts "Waiting for log file: #{log_name}"
for i in 0..120
if !File.exist?(log_name)
sleep(1)
else
break
end
end
if !File.exist?(log_name)
puts "Cannot read log file: " + log_name
exit(1)
end
puts "Start reading log ..."
io = File.new(log_name, 'r:UTF-8')
end_spec = false
while !end_spec do
io.each do |line|
if line.class.method_defined? "valid_encoding?"
end_spec = !Jake.process_spec_output(line) if line.valid_encoding?
else
end_spec = !Jake.process_spec_output(line)
end
break if end_spec
end
break unless AndroidTools.application_running(device_flag, $app_package_name)
sleep(5) unless end_spec
end
io.close
puts "Processing spec results ..."
Jake.process_spec_results(start)
# stop app
uninstall_app = true if uninstall_app.nil? # by default uninstall spec app
do_uninstall(device_flag) if uninstall_app
if device_flag == '-e'
AndroidTools.kill_adb_and_emulator
else
AndroidTools.kill_adb_logcat(device_flag, log_name)
end
$stdout.flush
end
namespace "run" do
namespace "android" do
def sleepRubyProcess
if $remote_debug == true
while 1
sleep 1
end
end
end
namespace "emulator" do
task :spec, :uninstall_app do |t, args|
Jake.decorate_spec { run_as_spec('-e', args.uninstall_app) }
end
end
namespace "device" do
task :spec, :uninstall_app do |t, args|
Jake.decorate_spec { run_as_spec('-d', args.uninstall_app) }
end
end
task :spec => "run:android:emulator:spec" do
end
task :get_log => "config:android" do
puts "log_file=" + $applog_path
end
task :emulator => ['config:android:emulator', 'device:android:debug'] do
AndroidTools.kill_adb_logcat('-e')
AndroidTools.run_emulator
apkfile = File.expand_path(File.join $targetdir, $appname + "-debug.apk")
AndroidTools.load_app_and_run('-e', apkfile, $app_package_name)
AndroidTools.logcat_process('-e')
sleepRubyProcess
end
desc "Run application on RhoSimulator"
task :rhosimulator => ["config:set_android_platform", "config:common"] do
$emuversion = $app_config["android"]["version"] unless $app_config["android"].nil?
$emuversion = $config["android"]["version"] if $emuversion.nil? and !$config["android"].nil?
$rhosim_config = "platform='android'\r\n"
$rhosim_config += "os_version='#{$emuversion}'\r\n" if $emuversion
Rake::Task["run:rhosimulator"].invoke
end
task :rhosimulator_debug => ["config:set_android_platform", "config:common"] do
$emuversion = $app_config["android"]["version"] unless $app_config["android"].nil?
$emuversion = $config["android"]["version"] if $emuversion.nil? and !$config["android"].nil?
$rhosim_config = "platform='android'\r\n"
$rhosim_config += "os_version='#{$emuversion}'\r\n" if $emuversion
Rake::Task["run:rhosimulator_debug"].invoke
end
desc "build and install on device"
task :device => "device:android:debug" do
AndroidTools.kill_adb_logcat('-d')
apkfile = File.join $targetdir, $appname + "-debug.apk"
AndroidTools.load_app_and_run('-d', apkfile, $app_package_name)
AndroidTools.logcat_process('-d')
sleepRubyProcess
end
end
desc "build and launch emulator"
task :android => "run:android:emulator" do
end
end
namespace "uninstall" do
def do_uninstall(flag)
args = []
args << flag
args << "uninstall"
args << $app_package_name
for i in 0..20
result = Jake.run($adb, args)
unless $?.success?
puts "Error uninstalling application"
exit 1
end
if result.include?("Success")
puts "Application uninstalled successfully"
break
else
if result.include?("Failure")
puts "Application is not installed on the device"
break
else
puts "Error uninstalling application"
exit 1 if i == 20
end
end
sleep(5)
end
end
namespace "android" do
task :emulator => "config:android" do
unless AndroidTools.is_emulator_running
puts "WARNING!!! Emulator is not up and running"
exit 1
end
do_uninstall('-e')
end
desc "uninstall from device"
task :device => "config:android" do
unless AndroidTools.is_device_running
puts "WARNING!!! Device is not connected"
exit 1
end
do_uninstall('-d')
end
end
desc "uninstall from emulator"
task :android => "uninstall:android:emulator" do
end
end
namespace "clean" do
desc "Clean Android"
task :android => ["clean:android:all", "clean:common"]
namespace "android" do
task :files => "config:android" do
rm_rf $targetdir
rm_rf $app_builddir
Dir.glob(File.join($bindir, "*.*")) { |f| rm f, :force => true }
rm_rf $srcdir
rm_rf $tmpdir
end
task :all => :files
end
end
namespace :stop do
namespace :android do
namespace :debug do
def killRuby
if RUBY_PLATFORM =~ /windows|cygwin|mingw/
# Windows
`taskkill /F /IM ruby.exe`
else
`killall -9 ruby`
end
end
task :emulator do #=> "stop:android:emulator"do
AndroidTools.kill_adb_logcat('-e')
killRuby
end
task :device do #=> "stop:android:device" do
AndroidTools.kill_adb_logcat('-d')
killRuby
end
end #end of debug
task :emulator do
AndroidTools.kill_adb_and_emulator
end
task :device do
device_flag = '-d'
do_uninstall(device_flag)
log_name = $app_path + '/RhoLogSpec.txt'
AndroidTools.kill_adb_logcat device_flag, log_name
end
end
end
Added 'wipe user data' to android spec emulator
#------------------------------------------------------------------------
# (The MIT License)
#
# Copyright (c) 2008-2011 Rhomobile, Inc.
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
# THE SOFTWARE.
#
# http://rhomobile.com
#------------------------------------------------------------------------
require File.dirname(__FILE__) + '/androidcommon.rb'
require File.dirname(__FILE__) + '/android_tools.rb'
require File.dirname(__FILE__) + '/manifest_generator.rb'
require File.dirname(__FILE__) + '/eclipse_project_generator.rb'
require 'pathname'
require 'tempfile'
USE_OWN_STLPORT = false
#USE_TRACES = # see androidcommon.rb
def get_market_version(apilevel)
AndroidTools.get_market_version(apilevel)
end
def get_api_level(version)
AndroidTools.get_api_level(version)
end
JAVA_PACKAGE_NAME = 'com.rhomobile.rhodes'
# Here is place were android platform should be specified.
# For complete list of android API levels and its mapping to
# market names (such as "Android-1.5" etc) see output of
# command "android list targets"
ANDROID_SDK_LEVEL = 4
ANDROID_PERMISSIONS = {
'audio' => ['RECORD_AUDIO', 'MODIFY_AUDIO_SETTINGS'],
'camera' => 'CAMERA',
'gps' => ['ACCESS_FINE_LOCATION', 'ACCESS_COARSE_LOCATION'],
'network_state' => 'ACCESS_NETWORK_STATE',
'phone' => ['CALL_PHONE', 'READ_PHONE_STATE'],
'pim' => ['READ_CONTACTS', 'WRITE_CONTACTS', 'GET_ACCOUNTS'],
'record_audio' => 'RECORD_AUDIO',
'vibrate' => 'VIBRATE',
'bluetooth' => ['BLUETOOTH_ADMIN', 'BLUETOOTH'],
'calendar' => ['READ_CALENDAR', 'WRITE_CALENDAR'],
'sdcard' => 'WRITE_EXTERNAL_STORAGE',
'push' => nil,
'motorola' => ['SYSTEM_ALERT_WINDOW', 'BROADCAST_STICKY', proc do |manifest|
add_motosol_sdk(manifest)
end],
'motoroladev' => ['SYSTEM_ALERT_WINDOW', 'BROADCAST_STICKY', proc do |manifest|
add_motosol_sdk(manifest)
end],
'webkit_browser' => nil,
'shared_runtime' => nil,
'motorola_browser' => nil,
'hardware_acceleration' => nil
}
ANDROID_CAPS_ALWAYS_ENABLED = ['network_state']
def add_motosol_sdk(manifest)
uses_scanner = REXML::Element.new 'uses-library'
uses_scanner.add_attribute 'android:name', 'com.motorolasolutions.scanner'
uses_scanner.add_attribute 'android:required', 'false'
uses_msr = REXML::Element.new 'uses-library'
uses_msr.add_attribute 'android:name', 'com.motorolasolutions.emdk.msr'
uses_msr.add_attribute 'android:required', 'false'
manifest.elements.each('application') do |app|
app.add uses_scanner
app.add uses_msr
end
end
def set_app_icon_android
iconappname = File.join($app_path, "icon", "icon.png")
['drawable', 'drawable-hdpi', 'drawable-mdpi', 'drawable-ldpi'].each do |dpi|
drawable = File.join($appres, dpi)
iconresname = File.join(drawable, "icon.png")
rm_f iconresname
cp iconappname, iconresname if File.exist? drawable
end
end
def set_app_name_android(newname)
puts "set_app_name"
$stdout.flush
rm_rf $appres
cp_r $rhores, $appres
rhostrings = File.join($rhores, "values", "strings.xml")
appstrings = File.join($appres, "values", "strings.xml")
doc = REXML::Document.new(File.new(rhostrings))
doc.elements["resources/string[@name='app_name']"].text = newname
File.open(appstrings, "w") { |f| doc.write f }
end
def get_boolean(arg)
arg == 'true' or arg == 'yes' or arg == 'enabled' or arg == 'enable' or arg == '1'
end
namespace 'project' do
namespace 'android' do
task :eclipse => ['config:android', 'config:android:extensions','build:android:manifest'] do
#options = [ 'create', 'project',
# '--path', $projectpath,
# '--target', $androidtargets[$found_api_level][:id],
# '--package', $app_package_name,
# '--activity', 'RhodesActivity'
#]
#Jake.run($androidbin, options)
project_template_path = File.join 'res','generators','templates','project','android'
project_erb_path = File.join project_template_path,'project.erb'
classpath_erb_path = File.join project_template_path,'classpath.erb'
project_prop_erb_path = File.join project_template_path,'project.properties.erb'
manifest_path = File.join $tmpdir,'AndroidManifest.xml'
project_path = File.join $app_path,'project','android'
project_file_path = File.join project_path,'.project'
classpath_file_path = File.join project_path,'.classpath'
project_prop_file_path = File.join project_path,'project.properties'
manifest_file_path = File.join project_path,'AndroidManifest.xml'
rhodes_path = File.absolute_path '.'
generator = EclipseProjectGenerator.new $appname, $app_path, rhodes_path, $androidtargets[$found_api_level][:name]
$app_config["extpaths"].each do |extpath|
next if extpath.start_with? rhodes_path
generator.addVirtualFolder extpath
end
$ext_android_additional_sources.each do |extpath, list|
classpaths = []
ext = File.basename(extpath)
puts "Adding '#{ext}' extension java sources: #{list}"
File.open(list, "r") do |f|
while line = f.gets
line.chomp!
src = File.join(extpath, line)
if src =~ /(.*\/src\/).*/
src = $1
unless classpaths.index(src)
puts "Add classpath: #{src}"
classpaths << src
end
end
end
end
generator.addExtension(ext, classpaths) unless classpaths.empty?
end
mkdir_p project_path
project_buf = generator.render project_erb_path
File.open(project_file_path, "w") { |f| f.write project_buf }
classpath_buf = generator.render classpath_erb_path
File.open(classpath_file_path, "w") { |f| f.write classpath_buf }
project_prop_buf = generator.render project_prop_erb_path
File.open(project_prop_file_path, "w") { |f| f.write project_prop_buf }
cp_r File.join(project_template_path,'externalToolBuilders'), File.join(project_path,'.externalToolBuilders') unless File.exists? File.join(project_path,'.externalToolBuilders')
cp File.join(project_template_path,'gensources.xml'), project_path unless File.exists? File.join(project_path,'gensources.xml')
cp File.join(project_template_path,'eclipsebundle.xml'), project_path unless File.exists? File.join(project_path,'eclipsebundle.xml')
cp manifest_path, project_path
end
end
end
namespace "config" do
task :set_android_platform do
$current_platform = "android"
end
task :android => :set_android_platform do
Rake::Task["config:common"].invoke
$java = $config["env"]["paths"]["java"]
$neon_root = nil
$neon_root = $config["env"]["paths"]["neon"] unless $config["env"]["paths"].nil?
if !($app_config["paths"].nil? or $app_config["paths"]["neon"].nil?)
$neon_root = $app_config["paths"]["neon"]
end
$androidsdkpath = $config["env"]["paths"]["android"]
unless File.exists? $androidsdkpath
puts "Missing or invalid 'android' section in rhobuild.yml: '#{$androidsdkpath}'"
exit 1
end
$androidndkpath = $config["env"]["paths"]["android-ndk"]
unless File.exists? $androidndkpath
puts "Missing or invalid 'android-ndk' section in rhobuild.yml: '#{$androidndkpath}'"
exit 1
end
errfmt = "WARNING!!! Path to Android %s contain spaces! It will not work because of the Google toolchain restrictions. Move it to another location and reconfigure rhodes."
if $androidndkpath =~ /\s/
puts(errfmt % "NDK")
exit 1
end
$min_sdk_level = $app_config["android"]["minSDK"] unless $app_config["android"].nil?
$min_sdk_level = $config["android"]["minSDK"] if $min_sdk_level.nil? and not $config["android"].nil?
$min_sdk_level = $min_sdk_level.to_i unless $min_sdk_level.nil?
$min_sdk_level = ANDROID_SDK_LEVEL if $min_sdk_level.nil?
$max_sdk_level = $app_config["android"]["maxSDK"] unless $app_config["android"].nil?
$androidplatform = AndroidTools.fill_api_levels $androidsdkpath
if $androidplatform == nil
puts "No Android platform found at SDK path: '#{$androidsdkpath}'"
exit 1
end
android_api_levels = AndroidTools.get_installed_api_levels
android_api_levels.sort!
$found_api_level = android_api_levels.last
$gapikey = $app_config["android"]["apikey"] unless $app_config["android"].nil?
$gapikey = $config["android"]["apikey"] if $gapikey.nil? and not $config["android"].nil?
$gapikey = '' unless $gapikey.is_a? String
$gapikey = nil if $gapikey.empty?
$android_orientation = $app_config["android"]["orientation"] unless $app_config["android"].nil?
$use_geomapping = $app_config["android"]["mapping"] unless $app_config["android"].nil?
$use_geomapping = $config["android"]["mapping"] if $use_geomapping.nil? and not $config["android"].nil?
$use_geomapping = 'false' if $use_geomapping.nil?
$use_geomapping = get_boolean($use_geomapping.to_s)
$use_google_addon_api = false
$use_google_addon_api = true if $use_geomapping
#Additionally $use_google_addon_api set to true if PUSH capability is enabled
$config_xml = $app_config["android"]["rhoelements"]["config"] if $app_config["android"]["rhoelements"] if $app_config["android"]
if $config_xml
$config_xml = File.expand_path $config_xml, $app_path
puts "Custom config.xml path: #{$config_xml}"
end
puts "Use Google addon API: #{$use_google_addon_api}" if USE_TRACES
$uri_scheme = $app_config["android"]["URIScheme"] unless $app_config["android"].nil?
$uri_scheme = "http" if $uri_scheme.nil?
$uri_host = $app_config["android"]["URIHost"] unless $app_config["android"].nil?
# Here is switch between release/debug configuration used for
# building native libraries
if $app_config["debug"].nil?
$build_release = true
else
$build_release = !$app_config["debug"].to_i
end
$androidpath = Jake.get_absolute $config["build"]["androidpath"]
$bindir = File.join($app_path, "bin")
$rhobindir = File.join($androidpath, "bin")
$builddir = File.join($androidpath, "build")
$shareddir = File.join($androidpath, "..", "shared")
$coreapidir = File.join($androidpath, "..", "..", "lib", "commonAPI", "coreapi", "ext", "shared")
$commonapidir = File.join($androidpath, "..", "..", "lib", "commonAPI")
$targetdir = File.join($bindir, 'target', 'android')
$projectpath = File.join($app_path, 'project', 'android')
$excludelib = ['**/builtinME.rb', '**/ServeME.rb', '**/dateME.rb', '**/rationalME.rb']
$tmpdir = File.join($bindir, "tmp")
$srcdir = File.join $tmpdir,'assets' #File.join($bindir, "RhoBundle")
#$rhomanifest = File.join $androidpath, "Rhodes", "AndroidManifest.xml"
$rhomanifesterb = File.join $androidpath, "Rhodes", "AndroidManifest.xml.erb"
$appmanifest = File.join $tmpdir, "AndroidManifest.xml"
$rhores = File.join $androidpath, 'Rhodes','res'
$appres = File.join $tmpdir,'res'
$appassets = $srcdir
$applibs = File.join $tmpdir,'lib','armeabi'
$appincdir = File.join $tmpdir, "include"
$rho_java_gen_dir = File.join $tmpdir,'gen','com','rhomobile','rhodes'
#$rho_android_r = File.join $androidpath, 'Rhodes','src','com','rhomobile','rhodes','AndroidR.java'
#$app_android_r = File.join $rho_java_gen_dir,'AndroidR.java'
$app_rjava_dir = $rho_java_gen_dir
$app_native_libs_java = File.join $rho_java_gen_dir,'NativeLibraries.java'
$app_capabilities_java = File.join $rho_java_gen_dir,'Capabilities.java'
$app_push_java = File.join $rho_java_gen_dir,'Push.java'
$app_startup_listeners_java = File.join $rho_java_gen_dir,'extmanager','RhodesStartupListeners.java'
if RUBY_PLATFORM =~ /(win|w)32$/
$bat_ext = ".bat"
$exe_ext = ".exe"
$path_separator = ";"
# Add PATH to cygwin1.dll
ENV['CYGWIN'] = 'nodosfilewarning'
if $path_cygwin_modified.nil?
ENV['PATH'] = Jake.get_absolute("res/build-tools") + ";" + ENV['PATH']
path_cygwin_modified = true
end
else
#XXX make these absolute
$bat_ext = ""
$exe_ext = ""
$path_separator = ":"
# TODO: add ruby executable for Linux
end
build_tools_path = nil
if File.exist?(File.join($androidsdkpath, "build-tools"))
build_tools_path = []
Dir.foreach(File.join($androidsdkpath, "build-tools")) do |entry|
next if entry == '.' or entry == '..'
build_tools_path << entry
end
build_tools_path.sort!
build_tools_path = build_tools_path.last
end
if build_tools_path
puts "Using Android SDK build-tools: #{build_tools_path}"
build_tools_path = File.join $androidsdkpath,'build-tools',build_tools_path
#puts "build-tools path: #{build_tools_path}"
#$dx = File.join(build_tools_path,"dx" + $bat_ext)
$dxjar = File.join(build_tools_path,'lib','dx.jar')
$aapt = File.join(build_tools_path, "aapt#{$exe_ext}")
else
#$dx = File.join($androidsdkpath, "platforms", $androidplatform, "tools", "dx" + $bat_ext)
#$dx = File.join($androidsdkpath, "platform-tools", "dx" + $bat_ext) unless File.exists? $dx
$dxjar = File.join($androidsdkpath, "platforms", $androidplatform, "tools", "lib", "dx.jar")
$dxjar = File.join($androidsdkpath, "platform-tools", "lib", "dx.jar") unless File.exists? $dxjar
$aapt = File.join($androidsdkpath, "platforms", $androidplatform, "tools", "aapt" + $exe_ext)
$aapt = File.join($androidsdkpath, "platform-tools", "aapt" + $exe_ext) unless File.exists? $aapt
end
$androidbin = File.join($androidsdkpath, "tools", "android" + $bat_ext)
$adb = File.join($androidsdkpath, "tools", "adb" + $exe_ext)
$adb = File.join($androidsdkpath, "platform-tools", "adb" + $exe_ext) unless File.exists? $adb
$zipalign = File.join($androidsdkpath, "tools", "zipalign" + $exe_ext)
$androidjar = File.join($androidsdkpath, "platforms", $androidplatform, "android.jar")
$sdklibjar = File.join($androidsdkpath, 'tools', 'lib', 'sdklib.jar')
$keytool = File.join($java, "keytool" + $exe_ext)
$jarsigner = File.join($java, "jarsigner" + $exe_ext)
$jarbin = File.join($java, "jar" + $exe_ext)
$keystore = nil
$keystore = $app_config["android"]["production"]["certificate"] if !$app_config["android"].nil? and !$app_config["android"]["production"].nil?
$keystore = $config["android"]["production"]["certificate"] if $keystore.nil? and !$config["android"].nil? and !$config["android"]["production"].nil?
$keystore = File.expand_path($keystore, $app_path) unless $keystore.nil?
$keystore = File.expand_path(File.join(ENV['HOME'], ".rhomobile", "keystore")) if $keystore.nil?
$storepass = nil
$storepass = $app_config["android"]["production"]["password"] if !$app_config["android"].nil? and !$app_config["android"]["production"].nil?
$storepass = $config["android"]["production"]["password"] if $storepass.nil? and !$config["android"].nil? and !$config["android"]["production"].nil?
$storepass = "81719ef3a881469d96debda3112854eb" if $storepass.nil?
$keypass = $storepass
$storealias = nil
$storealias = $app_config["android"]["production"]["alias"] if !$app_config["android"].nil? and !$app_config["android"]["production"].nil?
$storealias = $config["android"]["production"]["alias"] if $storealias.nil? and !$config["android"].nil? and !$config["android"]["production"].nil?
$storealias = "rhomobile.keystore" if $storealias.nil?
$app_config["capabilities"] += ANDROID_CAPS_ALWAYS_ENABLED
$app_config["capabilities"].map! { |cap| cap.is_a?(String) ? cap : nil }.delete_if { |cap| cap.nil? }
$use_google_addon_api = true unless $app_config["capabilities"].index("push").nil?
$appname = $app_config["name"]
$appname = "Rhodes" if $appname.nil?
$vendor = $app_config["vendor"]
if $vendor.nil?
if $app_config['capabilities'].index('motorola').nil? and $app_config['capabilities'].index('motoroladev').nil?
$vendor = 'rhomobile'
else
$vendor = 'motorolasolutions'
end
end
$vendor = $vendor.gsub(/^[^A-Za-z]/, '_').gsub(/[^A-Za-z0-9]/, '_').gsub(/_+/, '_').downcase
$app_package_name = $app_config["android"] ? $app_config["android"]["package_name"] : nil
$app_package_name = "com.#{$vendor}." + $appname.downcase.gsub(/[^A-Za-z_0-9]/, '') unless $app_package_name
$app_package_name.gsub!(/\.[\d]/, "._")
puts "$vendor = #{$vendor}"
puts "$app_package_name = #{$app_package_name}"
if $uri_host.nil?
if $app_config['capabilities'].index('motorola').nil? and $app_config['capabilities'].index('motoroladev').nil?
$uri_host = 'rhomobile.com'
else
$uri_host = 'motorolasolutions.com'
end
$uri_path_prefix = "/#{$app_package_name}"
end
unless $app_config['capabilities'].index('motorola').nil? and $app_config['capabilities'].index('motoroladev').nil?
$use_motosol_api = true
$use_motosol_api_classpath = true unless $app_config['capabilities'].index('motoroladev').nil?
raise 'Cannot use Motorola SDK addon and Google SDK addon together!' if $use_google_addon_api
end
$no_compression = $app_config['android']['no_compression'] if $app_config['android']
$applog_path = nil
$applog_file = $app_config["applog"]
if !$applog_file.nil?
$applog_path = File.join($app_path, $applog_file)
end
if $min_sdk_level > $found_api_level
raise "Latest installed Android platform '#{$androidplatform}' does not meet minSdk '#{$min_sdk_level}' requirement"
end
# Look for Motorola SDK addon
if $use_motosol_api_classpath
puts "Looking for Motorola API SDK add-on..." if USE_TRACES
motosol_jars = ['com.motorolasolutions.scanner', 'com.motorolasolutions.msr']
$motosol_classpath = AndroidTools::get_addon_classpath(motosol_jars)
end
# Detect Google API add-on path
if $use_google_addon_api
puts "Looking for Google API SDK add-on..." if USE_TRACES
google_jars = ['com.google.android.maps']
$google_classpath = AndroidTools::get_addon_classpath(google_jars, $found_api_level)
end
setup_ndk($androidndkpath, $found_api_level)
$std_includes = File.join $androidndkpath, "sources", "cxx-stl", "stlport", "stlport"
unless File.directory? $std_includes
$stlport_includes = File.join $shareddir, "stlport", "stlport"
USE_OWN_STLPORT = true
end
$native_libs = ["sqlite", "curl", "stlport", "ruby", "json", "rhocommon", "rhodb", "rholog", "rhosync", "rhomain"]
if $build_release
$confdir = "release"
else
$confdir = "debug"
end
$app_builddir = File.join($bindir, 'target', 'android', $confdir)
$objdir = {}
$libname = {}
$native_libs.each do |x|
$objdir[x] = File.join($tmpdir, x)
$libname[x] = File.join($app_builddir, x, "lib#{x}.a")
end
$push_sender = nil
$push_sender = $config["android"]["push"]["sender"] if !$config["android"].nil? and !$config["android"]["push"].nil?
$push_sender = $app_config["android"]["push"]["sender"] if !$app_config["android"].nil? and !$app_config["android"]["push"].nil?
$push_sender = "support@rhomobile.com" if $push_sender.nil?
$push_notifications = nil
$push_notifications = $app_config["android"]["push"]["notifications"] if !$app_config["android"].nil? and !$app_config["android"]["push"].nil?
$push_notifications = "none" if $push_notifications.nil?
$push_notifications = $push_notifications
# Detect android targets
$androidtargets = {}
id = nil
apilevel = nil
target_name = nil
`"#{$androidbin}" list targets`.split(/\n/).each do |line|
line.chomp!
if line =~ /^id:\s+([0-9]+)\s+or\s+\"(.*)\"/
id = $1
target_name = $2
if $use_google_addon_api
if line =~ /Google Inc\.:Google APIs:([0-9]+)/
apilevel = $1.to_i
$androidtargets[apilevel] = {:id => id.to_i, :name => target_name}
end
else
if $use_motosol_api
if line =~ /MotorolaSolutions\s+Inc\.:MotorolaSolution\s+Value\s+Add\s+APIs.*:([0-9]+)/
apilevel = $1.to_i
$androidtargets[apilevel] = {:id => id.to_i, :name => target_name}
end
end
end
end
unless $use_google_addon_api and $use_motosol_api
if line =~ /^\s+API\s+level:\s+([0-9]+)$/
apilevel = $1.to_i
$androidtargets[apilevel] = {:id => id.to_i, :name => target_name}
end
end
if apilevel && $androidtargets[apilevel][:id] == id.to_i
if line =~ /^\s+ABIs\s*:\s+(.*)/
$androidtargets[apilevel][:abis] = []
$1.split(/,\s*/).each do |abi|
$androidtargets[apilevel][:abis] << abi
end
puts $androidtargets[apilevel][:abis].inspect if USE_TRACES
end
end
end
if USE_TRACES
puts "Android targets:"
puts $androidtargets.inspect
end
mkdir_p $bindir if not File.exists? $bindir
mkdir_p $rhobindir if not File.exists? $rhobindir
mkdir_p $targetdir if not File.exists? $targetdir
mkdir_p $srcdir if not File.exists? $srcdir
end #task 'config:android'
namespace 'android' do
# 'config:android:app_config' task is invoked directly by common Rakefile
# just after build config has been read and before processing extensions
task :app_config do
if $app_config['capabilities'].index('push')
$app_config['extensions'] << 'gcm-push' unless $app_config['extensions'].index('gcm-push')
end
if $app_config['capabilities'].index('native_browser')
$app_config['extensions'].delete('rhoelements')
end
$file_map_name = "rho.dat"
end
task :extensions => ['config:android', 'build:bundle:noxruby'] do
$ext_android_rhodes_activity_listener = []
$ext_android_additional_sources = {}
$ext_android_additional_lib = []
$ext_android_build_scripts = {}
$ext_android_manifest_changes = {}
$ext_android_adds = {}
$ext_android_library_deps = {}
$app_config["extensions"].each do |ext|
puts "#{ext} is processing..."
$app_config["extpaths"].each do |p|
extpath = File.join(p, ext, 'ext')
puts "Checking extpath: #{extpath}"
if File.exists? extpath and File.directory? extpath
puts "#{extpath} is configuring..."
extyml = File.join(p, ext, "ext.yml")
if File.file? extyml
puts "#{extyml} is processing..."
extconf = Jake.config(File.open(extyml))
extconf_android = extconf['android']
exttype = 'build'
exttype = extconf_android['exttype'] if extconf_android and extconf_android['exttype']
addspath = File.join($app_builddir, 'extensions', ext, 'adds')
prebuiltpath = nil
if exttype == 'prebuilt'
prebuiltpath = Dir.glob(File.join(extpath, '**', 'android'))
if prebuiltpath.count == 1
prebuiltpath = prebuiltpath.first
else
raise "android:exttype is 'prebuilt' but prebuilt path is not found #{prebuiltpath.inspect}"
end
end
android_listener = extconf["android_rhodes_activity_listener"]
android_listener = extconf_android['rhodes_listener'] if android_listener.nil? and extconf_android
$ext_android_rhodes_activity_listener << android_listener unless android_listener.nil?
manifest_changes = extconf["android_manifest_changes"]
manifest_changes = extconf_android['manifest_changes'] if manifest_changes.nil? and extconf_android
if manifest_changes
manifest_changes = [manifest_changes] unless manifest_changes.is_a? Array
manifest_changes.map! { |path| File.join(p, ext, path) }
else
if prebuiltpath
manifest_changes = []
path = File.join(prebuiltpath, 'adds', 'AndroidManifest.rb')
manifest_changes << path if File.file? path
templates = Dir.glob File.join(prebuiltpath, 'adds', '*.erb')
manifest_changes += templates
if templates.empty?
path = File.join(prebuiltpath, 'adds', 'AndroidManifest.xml')
manifest_changes << path if File.file? path
end
end
end
if manifest_changes
$ext_android_manifest_changes[ext] = manifest_changes
end
resource_addons = extconf["android_resources_addons"]
resource_addons = extconf_android['adds'] if resource_addons.nil? and extconf_android
if resource_addons
resource_addons = File.join(p, ext, resource_addons)
else
if prebuiltpath
resource_addons = File.join(prebuiltpath, 'adds')
resource_addons = nil unless File.directory? resource_addons
end
end
if resource_addons
$ext_android_adds[ext] = resource_addons
end
library_deps = extconf_android['library_deps'] if extconf_android
if library_deps
if library_deps.is_a? Array
library_deps.each do |dep|
deppath = File.join($androidsdkpath, dep)
$ext_android_library_deps[AndroidTools.read_manifest_package(deppath)] = deppath
end
end
end
additional_sources = extconf["android_additional_sources_list"]
additional_sources = extconf_android['source_list'] if additional_sources.nil? and extconf_android
unless additional_sources.nil?
ext_sources_list = File.join(p, ext, additional_sources)
if File.exists? ext_sources_list
$ext_android_additional_sources[File.join(p, ext)] = ext_sources_list
else
raise "Extension java source list is missed: #{ext_sources_list}"
end
else
puts "No additional java sources for '#{ext}'"
end
# there is no 'additional_libs' param in android section moreover
# place libraries into android adds folder
android_additional_lib = extconf["android_additional_lib"]
if android_additional_lib != nil
android_additional_lib.each do |lib|
$ext_android_additional_lib << File.join(p, ext, lib)
end
end
if prebuiltpath
targetpath = File.join $app_builddir, 'extensions', ext
libaddspath = File.join addspath, 'lib', 'armeabi'
mkdir_p targetpath
Dir.glob(File.join(prebuiltpath, 'lib*.a')).each do |lib|
cp lib, targetpath
end
Dir.glob(File.join(prebuiltpath, '*.jar')).each do |lib|
cp lib, targetpath
end
Dir.glob(File.join(prebuiltpath, '**', 'lib*.so')).each do |lib|
next if lib =~ /adds/
if lib =~ /noautoload/
mkdir_p File.join(libaddspath, 'noautoload')
cp lib, File.join(libaddspath, 'noautoload')
else
mkdir_p libaddspath
cp lib, libaddspath
end
end
end
puts "#{extyml} is processed"
end
if exttype == 'rakefile'
rakedir = Dir.glob File.join(extpath, '**', 'android')
$ext_android_build_scripts[ext] = [rakedir.first, 'rake']
else
build_script = File.join(extpath, 'build' + $bat_ext)
if File.exists? build_script
if RUBY_PLATFORM =~ /(win|w)32$/
$ext_android_build_scripts[ext] = [extpath, 'build.bat']
else
$ext_android_build_scripts[ext] = [extpath, File.join('.', 'build' + $bat_ext)]
end
end
end
puts "#{extpath} is configured"
# to prevent to build 2 extensions with same name
break
end # exists?
end # $app_config["extpaths"].each
end # $app_config["extensions"].each
puts "Extensions' java source lists: #{$ext_android_additional_sources.inspect}"
end #task :extensions
task :emulator => "config:android" do
$emuversion = $app_config["android"]["version"] unless $app_config["android"].nil?
$emuversion = $config["android"]["version"] if $emuversion.nil? and !$config["android"].nil?
if RUBY_PLATFORM =~ /(win|w)32$/
$emulator = #"cmd /c " +
File.join($androidsdkpath, "tools", "emulator.exe")
else
$emulator = File.join($androidsdkpath, "tools", "emulator")
end
$emuversion = AndroidTools.get_market_version($min_sdk_level) if $emuversion.nil?
if $emuversion.nil?
raise "Wrong Android emulator version: #{$emuversion}. Android SDK target API is not installed"
end
if USE_TRACES
puts "Android emulator version: #{$emuversion}"
end
$emuversion = $emuversion.to_s
$appavdname = $app_config["android"]["emulator"] if $app_config["android"] != nil && $app_config["android"].length > 0
$appavdname = $config["android"]["emulator"] if $appavdname.nil? and !$config["android"].nil? and $config["android"].length > 0
end # task 'config:android:emulator'
task :device => "config:android" do
end
end #namespace 'config:android'
end
namespace "build" do
namespace "android" do
desc "Build RhoBundle for android"
task :rhobundle => ["config:android", :extensions] do
$srcdir = $appassets
Rake::Task["build:bundle:noxruby"].invoke
hash = nil
["apps", "db", "lib"].each do |d|
# Calculate hash of directories
hash = get_dir_hash(File.join($srcdir, d), hash)
end
File.open(File.join($srcdir, "hash"), "w") { |f| f.write(hash.hexdigest) }
File.open(File.join($srcdir, "name"), "w") { |f| f.write($appname) }
end
desc "Build RhoBundle for Eclipse project"
task :eclipsebundle => "build:android:rhobundle" do
eclipse_assets = File.join(Jake.get_absolute($androidpath), "Rhodes", "assets")
rm_rf eclipse_assets
cp_r $appassets, eclipse_assets, :preserve => true
end
desc 'Building native extensions'
task :extensions => ["config:android:extensions", :genconfig] do
Rake::Task["build:bundle:noxruby"].invoke
ENV['RHO_PLATFORM'] = 'android'
ENV["RHO_APP_DIR"] = $app_path
ENV["ANDROID_SDK"] = $androidsdkpath
ENV["ANDROID_NDK"] = $androidndkpath
ENV["ANDROID_API_LEVEL"] = $found_api_level.to_s
ENV["RHO_ROOT"] = $startdir
ENV["BUILD_DIR"] ||= $startdir + "/platform/android/build"
ENV["RHO_INC"] = $appincdir
ENV["RHO_RES"] = $appres
ENV["RHO_ANDROID_TMP_DIR"] = $tmpdir
ENV["NEON_ROOT"] = $neon_root unless $neon_root.nil?
ENV["CONFIG_XML"] = $config_xml unless $config_xml.nil?
$ext_android_build_scripts.each do |ext, builddata|
#ext = File.basename(File.dirname(extpath))
ENV["TARGET_TEMP_DIR"] = File.join($app_builddir, 'extensions', ext)
ENV['TEMP_FILES_DIR'] = File.join($tmpdir, ext)
mkdir_p ENV["TARGET_TEMP_DIR"] unless File.directory? ENV["TARGET_TEMP_DIR"]
mkdir_p ENV["TEMP_FILES_DIR"] unless File.directory? ENV["TEMP_FILES_DIR"]
puts "Executing extension build script: #{ext}"
if RUBY_PLATFORM =~ /(win|w)32$/ || (builddata[1] == 'rake')
Jake.run(builddata[1], [], builddata[0])
else
currentdir = Dir.pwd()
Dir.chdir builddata[0]
sh %{$SHELL #{builddata[1]}}
Dir.chdir currentdir
end
raise "Cannot build #{builddata[0]}" unless $?.success?
puts "Extension build script finished"
end
$ext_android_manifest_changes.each do |ext, manifest_changes|
addspath = File.join($app_builddir, 'extensions', ext, 'adds')
mkdir_p addspath
manifest_changes.each do |path|
if File.extname(path) == '.xml'
cp path, File.join(addspath, 'AndroidManifest.xml')
else
if File.extname(path) == '.rb'
cp path, File.join(addspath, 'AndroidManifest.rb')
else
if File.extname(path) == '.erb'
cp path, addspath
else
raise "Wrong AndroidManifest patch file: #{path}"
end
end
end
end
end
$ext_android_adds.each do |ext, path|
addspath = File.join($app_builddir, 'extensions', ext, 'adds')
mkdir_p addspath
Dir.glob(File.join(path, '*')).each do |add|
cp_r add, addspath if File.directory? add
end
end
#$ext_android_library_deps.each do |package, path|
# res = File.join path, 'res'
# assets = File.join path, 'assets'
# addspath = File.join($app_builddir, 'extensions', package, 'adds')
# mkdir_p addspath
# cp_r res, addspath if File.directory? res
# cp_r assets, addspath if File.directory? assets
#end
end #task :extensions
task :libsqlite => "config:android" do
srcdir = File.join($shareddir, "sqlite")
objdir = $objdir["sqlite"]
libname = $libname["sqlite"]
sourcelist = File.join($builddir, 'libsqlite_build.files')
mkdir_p objdir
mkdir_p File.dirname(libname)
args = ["-I\"#{srcdir}\"", "-I\"#{$shareddir}\""]
sources = get_sources sourcelist
objects = get_objects sources, objdir
cc_build sources, objdir, args or exit 1
cc_ar ('"'+(libname)+'"'), objects.collect { |x| '"'+x+'"' } or exit 1
end
task :libcurl => "config:android" do
# Steps to get curl_config.h from fresh libcurl sources:
#export PATH=<ndkroot>/build/prebuilt/linux-x86/arm-eabi-4.2.1/bin:$PATH
#export CC=arm-eabi-gcc
#export CPP=arm-eabi-cpp
#export CFLAGS="--sysroot <ndkroot>/build/platforms/android-3/arch-arm -fPIC -mandroid -DANDROID -DOS_ANDROID"
#export CPPFLAGS="--sysroot <ndkroot>/build/platforms/android-3/arch-arm -fPIC -mandroid -DANDROID -DOS_ANDROID"
#./configure --without-ssl --without-ca-bundle --without-ca-path --without-libssh2 --without-libidn --disable-ldap --disable-ldaps --host=arm-eabi
srcdir = File.join $shareddir, "curl", "lib"
objdir = $objdir["curl"]
libname = $libname["curl"]
sourcelist = File.join($builddir, 'libcurl_build.files')
mkdir_p objdir
mkdir_p File.dirname(libname)
args = []
args << "-DHAVE_CONFIG_H"
args << "-I\"#{srcdir}/../include\""
args << "-I\"#{srcdir}\""
args << "-I\"#{$shareddir}\""
sources = get_sources sourcelist
objects = get_objects sources, objdir
cc_build sources, objdir, args or exit 1
cc_ar ('"'+(libname)+'"'), objects.collect { |x| '"'+x+'"' } or exit 1
end
task :libruby => "config:android" do
srcdir = File.join $shareddir, "ruby"
objdir = $objdir["ruby"]
libname = $libname["ruby"]
sourcelist = File.join($builddir, 'libruby_build.files')
mkdir_p objdir
mkdir_p File.dirname(libname)
args = []
args << "-Wno-uninitialized"
args << "-Wno-missing-field-initializers"
args << '-Wno-shadow'
args << "-I\"#{srcdir}/include\""
args << "-I\"#{srcdir}/android\""
args << "-I\"#{srcdir}/generated\""
args << "-I\"#{srcdir}\""
args << "-I\"#{srcdir}/..\""
args << "-I\"#{srcdir}/../sqlite\""
args << "-I\"#{$std_includes}\"" unless $std_includes.nil?
args << "-D__NEW__" if USE_OWN_STLPORT
args << "-I\"#{$stlport_includes}\"" if USE_OWN_STLPORT
sources = get_sources sourcelist
objects = get_objects sources, objdir
cc_build sources, objdir, args or exit 1
cc_ar ('"'+(libname)+'"'), objects.collect { |x| '"'+x+'"' } or exit 1
end
task :libjson => "config:android" do
srcdir = File.join $shareddir, "json"
objdir = $objdir["json"]
libname = $libname["json"]
sourcelist = File.join($builddir, 'libjson_build.files')
mkdir_p objdir
mkdir_p File.dirname(libname)
args = []
args << "-I\"#{srcdir}\""
args << "-I\"#{srcdir}/..\""
args << "-I\"#{$std_includes}\"" unless $std_includes.nil?
args << "-D__NEW__" if USE_OWN_STLPORT
args << "-I\"#{$stlport_includes}\"" if USE_OWN_STLPORT
sources = get_sources sourcelist
objects = get_objects sources, objdir
cc_build sources, objdir, args or exit 1
cc_ar ('"'+(libname)+'"'), objects.collect { |x| '"'+x+'"' } or exit 1
end
task :libstlport => "config:android" do
if USE_OWN_STLPORT
objdir = $objdir["stlport"]
libname = $libname["stlport"]
sourcelist = File.join($builddir, 'libstlport_build.files')
mkdir_p objdir
mkdir_p File.dirname(libname)
args = []
args << "-I\"#{$stlport_includes}\""
args << "-DTARGET_OS=android"
args << "-DOSNAME=android"
args << "-DCOMPILER_NAME=gcc"
args << "-DBUILD_OSNAME=android"
args << "-D_REENTRANT"
args << "-D__NEW__"
args << "-ffunction-sections"
args << "-fdata-sections"
args << "-fno-rtti"
args << "-fno-exceptions"
sources = get_sources sourcelist
objects = get_objects sources, objdir
cc_build sources, objdir, args or exit 1
cc_ar ('"'+(libname)+'"'), objects.collect { |x| '"'+x+'"' } or exit 1
end
end
task :librholog => "config:android" do
srcdir = File.join $shareddir, "logging"
objdir = $objdir["rholog"]
libname = $libname["rholog"]
sourcelist = File.join($builddir, 'librholog_build.files')
mkdir_p objdir
mkdir_p File.dirname(libname)
args = []
args << "-I\"#{srcdir}/..\""
args << "-I\"#{$std_includes}\"" unless $std_includes.nil?
args << "-D__NEW__" if USE_OWN_STLPORT
args << "-I\"#{$stlport_includes}\"" if USE_OWN_STLPORT
sources = get_sources sourcelist
objects = get_objects sources, objdir
cc_build sources, objdir, args or exit 1
cc_ar ('"'+(libname)+'"'), objects.collect { |x| '"'+x+'"' } or exit 1
end
task :librhomain => "config:android" do
srcdir = $shareddir
objdir = $objdir["rhomain"]
libname = $libname["rhomain"]
sourcelist = File.join($builddir, 'librhomain_build.files')
mkdir_p objdir
mkdir_p File.dirname(libname)
args = []
args << "-I\"#{srcdir}\""
args << "-I\"#{$commonapidir}\""
args << "-I\"#{$std_includes}\"" unless $std_includes.nil?
args << "-D__NEW__" if USE_OWN_STLPORT
args << "-I\"#{$stlport_includes}\"" if USE_OWN_STLPORT
sources = get_sources sourcelist
objects = get_objects sources, objdir
cc_build sources, objdir, args or exit 1
cc_ar ('"'+(libname)+'"'), objects.collect { |x| '"'+x+'"' } or exit 1
end
task :librhocommon => "config:android" do
objdir = $objdir["rhocommon"]
libname = $libname["rhocommon"]
sourcelist = File.join($builddir, 'librhocommon_build.files')
mkdir_p objdir
mkdir_p File.dirname(libname)
args = []
args << "-I\"#{$shareddir}\""
args << "-I\"#{$shareddir}/curl/include\""
args << "-I\"#{$shareddir}/ruby/include\""
args << "-I\"#{$shareddir}/ruby/android\""
args << "-I\"#{$std_includes}\"" unless $std_includes.nil?
args << "-D__NEW__" if USE_OWN_STLPORT
args << "-I\"#{$stlport_includes}\"" if USE_OWN_STLPORT
sources = get_sources sourcelist
objects = get_objects sources, objdir
cc_build sources, objdir, args or exit 1
cc_ar ('"'+(libname)+'"'), objects.collect { |x| '"'+x+'"' } or exit 1
end
task :librhodb => "config:android" do
srcdir = File.join $shareddir, "db"
objdir = $objdir["rhodb"]
libname = $libname["rhodb"]
sourcelist = File.join($builddir, 'librhodb_build.files')
mkdir_p objdir
mkdir_p File.dirname(libname)
args = []
args << "-I\"#{srcdir}\""
args << "-I\"#{srcdir}/..\""
args << "-I\"#{srcdir}/../sqlite\""
args << "-I\"#{$std_includes}\"" unless $std_includes.nil?
args << "-D__NEW__" if USE_OWN_STLPORT
args << "-I\"#{$stlport_includes}\"" if USE_OWN_STLPORT
sources = get_sources sourcelist
objects = get_objects sources, objdir
cc_build sources, objdir, args or exit 1
cc_ar ('"'+(libname)+'"'), objects.collect { |x| '"'+x+'"' } or exit 1
end
task :librhosync => "config:android" do
srcdir = File.join $shareddir, "sync"
objdir = $objdir["rhosync"]
libname = $libname["rhosync"]
sourcelist = File.join($builddir, 'librhosync_build.files')
mkdir_p objdir
mkdir_p File.dirname(libname)
args = []
args << "-I\"#{srcdir}\""
args << "-I\"#{srcdir}/..\""
args << "-I\"#{srcdir}/../sqlite\""
args << "-I\"#{$std_includes}\"" unless $std_includes.nil?
args << "-D__NEW__" if USE_OWN_STLPORT
args << "-I\"#{$stlport_includes}\"" if USE_OWN_STLPORT
sources = get_sources sourcelist
objects = get_objects sources, objdir
cc_build sources, objdir, args or exit 1
cc_ar ('"'+(libname)+'"'), objects.collect { |x| '"'+x+'"' } or exit 1
end
task :libs => [:libsqlite, :libcurl, :libruby, :libjson, :libstlport, :librhodb, :librhocommon, :librhomain, :librhosync, :librholog]
task :genconfig => "config:android" do
mkdir_p $appincdir unless File.directory? $appincdir
# Generate genconfig.h
genconfig_h = File.join($appincdir, 'genconfig.h')
gapi_already_enabled = false
caps_already_enabled = {}
#ANDROID_PERMISSIONS.keys.each do |k|
# caps_already_enabled[k] = false
#end
if File.file? genconfig_h
File.open(genconfig_h, 'r') do |f|
while line = f.gets
if line =~ /^\s*#\s*define\s+RHO_GOOGLE_API_KEY\s+"[^"]*"\s*$/
gapi_already_enabled = true
else
ANDROID_PERMISSIONS.keys.each do |k|
if line =~ /^\s*#\s*define\s+RHO_CAP_#{k.upcase}_ENABLED\s+(.*)\s*$/
value = $1.strip
if value == 'true'
caps_already_enabled[k] = true
elsif value == 'false'
caps_already_enabled[k] = false
else
raise "Unknown value for the RHO_CAP_#{k.upcase}_ENABLED: #{value}"
end
end
end
end
end
end
end
regenerate = false
regenerate = true unless File.file? genconfig_h
regenerate = $use_geomapping != gapi_already_enabled unless regenerate
caps_enabled = {}
ANDROID_PERMISSIONS.keys.each do |k|
caps_enabled[k] = $app_config["capabilities"].index(k) != nil
regenerate = true if caps_already_enabled[k].nil? or caps_enabled[k] != caps_already_enabled[k]
end
puts caps_enabled.inspect
if regenerate
puts "Need to regenerate genconfig.h"
$stdout.flush
File.open(genconfig_h, 'w') do |f|
f.puts "#ifndef RHO_GENCONFIG_H_411BFA4742CF4F2AAA3F6B411ED7514F"
f.puts "#define RHO_GENCONFIG_H_411BFA4742CF4F2AAA3F6B411ED7514F"
f.puts ""
f.puts "#define RHO_GOOGLE_API_KEY \"#{$gapikey}\"" if $gapikey
caps_enabled.each do |k, v|
f.puts "#define RHO_CAP_#{k.upcase}_ENABLED #{v ? "true" : "false"}"
end
f.puts ""
f.puts "#endif /* RHO_GENCONFIG_H_411BFA4742CF4F2AAA3F6B411ED7514F */"
end
else
puts "No need to regenerate genconfig.h"
$stdout.flush
end
# Generate rhocaps.inc
#rhocaps_inc = File.join($appincdir, 'rhocaps.inc')
#caps_already_defined = []
#if File.exists? rhocaps_inc
# File.open(rhocaps_inc, 'r') do |f|
# while line = f.gets
# next unless line =~ /^\s*RHO_DEFINE_CAP\s*\(\s*([A-Z_]*)\s*\)\s*\s*$/
# caps_already_defined << $1.downcase
# end
# end
#end
#
#if caps_already_defined.sort.uniq != ANDROID_PERMISSIONS.keys.sort.uniq
# puts "Need to regenerate rhocaps.inc"
# $stdout.flush
# File.open(rhocaps_inc, 'w') do |f|
# ANDROID_PERMISSIONS.keys.sort.each do |k|
# f.puts "RHO_DEFINE_CAP(#{k.upcase})"
# end
# end
#else
# puts "No need to regenerate rhocaps.inc"
# $stdout.flush
#end
end
task :librhodes => [:libs, :extensions, :genconfig] do
srcdir = File.join $androidpath, "Rhodes", "jni", "src"
libdir = File.join $app_builddir, 'librhodes', 'lib', 'armeabi'
objdir = File.join $tmpdir, 'librhodes'
libname = File.join libdir, 'librhodes.so'
sourcelist = File.join($builddir, 'librhodes_build.files')
mkdir_p libdir
mkdir_p objdir
# add licence lib to build
lic_dst = File.join $app_builddir, 'librhodes', 'libMotorolaLicence.a'
lic_src = $startdir + "/res/libs/motorolalicence/android/libMotorolaLicence.a"
rm_f lic_dst
cp lic_src, lic_dst
args = []
args << "-I\"#{$appincdir}\""
args << "-I\"#{srcdir}/../include\""
args << "-I\"#{srcdir}/../include/rhodes/details\""
args << "-I\"#{$shareddir}\""
args << "-I\"#{$shareddir}/common\""
args << "-I\"#{$shareddir}/api_generator\""
args << "-I\"#{$shareddir}/sqlite\""
args << "-I\"#{$shareddir}/curl/include\""
args << "-I\"#{$shareddir}/ruby/include\""
args << "-I\"#{$shareddir}/ruby/android\""
args << "-I\"#{$coreapidir}\""
args << "-I\"#{$std_includes}\"" unless $std_includes.nil?
args << "-D__SGI_STL_INTERNAL_PAIR_H" if USE_OWN_STLPORT
args << "-D__NEW__" if USE_OWN_STLPORT
args << "-I\"#{$stlport_includes}\"" if USE_OWN_STLPORT
sources = get_sources sourcelist
cc_build sources, objdir, args or exit 1
deps = []
$libname.each do |k, v|
deps << v
end
args = []
args << "-L\"#{$rhobindir}/#{$confdir}\""
args << "-L\"#{libdir}\""
rlibs = []
rlibs << "log"
rlibs << "dl"
rlibs << "z"
rlibs.map! { |x| "-l#{x}" }
elibs = []
extlibs = Dir.glob($app_builddir + "/**/lib*.a") # + Dir.glob($app_builddir + "/**/lib*.so")
extlibs.each do |lib|
args << "-L\"#{File.dirname(lib)}\""
end
stub = []
extlibs.reverse.each do |f|
lparam = "-l" + File.basename(f).gsub(/^lib/, "").gsub(/\.(a|so)$/, "")
elibs << lparam
# Workaround for GNU ld: this way we have specified one lib multiple times
# command line so ld's dependency mechanism will find required functions
# independently of its position in command line
stub.each do |s|
args << s
end
stub << lparam
end
args += elibs
args += elibs
args += rlibs
objects = get_objects sources, objdir
#mkdir_p File.dirname(libname) unless File.directory? File.dirname(libname)
cc_link libname, objects.collect { |x| '"'+x+'"' }, args, deps+extlibs or exit 1
destdir = File.join($androidpath, "Rhodes", "libs", "armeabi")
mkdir_p destdir unless File.exists? destdir
cp_r libname, destdir
cc_run($stripbin, ['"'+File.join(destdir, File.basename(libname))+'"'])
end
task :manifest => ["config:android", :extensions] do
version = {'major' => 0, 'minor' => 0, 'patch' => 0, "build" => 0}
if $app_config["version"]
if $app_config["version"] =~ /^(\d+)$/
version["major"] = $1.to_i
elsif $app_config["version"] =~ /^(\d+)\.(\d+)$/
version["major"] = $1.to_i
version["minor"] = $2.to_i
elsif $app_config["version"] =~ /^(\d+)\.(\d+)\.(\d+)$/
version["major"] = $1.to_i
version["minor"] = $2.to_i
version["patch"] = $3.to_i
elsif $app_config["version"] =~ /^(\d+)\.(\d+)\.(\d+)\.(\d+)$/
version["major"] = $1.to_i
version["minor"] = $2.to_i
version["patch"] = $3.to_i
version["build"] = $4.to_i
else
raise "Version number must be numeric and in one of these formats: major, major.minor, major.minor.patch, or major.minor.patch.build."
end
end
version = version["major"]*1000000 + version["minor"]*10000 + version["patch"]*100 + version["build"]
usesPermissions = ['android.permission.INTERNET', 'android.permission.PERSISTENT_ACTIVITY', 'android.permission.WAKE_LOCK']
$app_config["capabilities"].each do |cap|
cap = ANDROID_PERMISSIONS[cap]
next if cap.nil?
cap = [cap] unless cap.is_a? Array
cap.each do |cap_item|
if cap_item.is_a? Proc
#caps_proc << cap_item
next
end
if cap_item.is_a? String
usesPermissions << "android.permission.#{cap_item}"
next
end
end
end
usesPermissions.uniq!
hidden = get_boolean($app_config['hidden_app'])
generator = ManifestGenerator.new JAVA_PACKAGE_NAME, $app_package_name, hidden, usesPermissions
generator.versionName = $app_config["version"]
generator.versionCode = version
generator.installLocation = 'auto'
generator.minSdkVer = $min_sdk_level
generator.maxSdkVer = $max_sdk_level
generator.screenOrientation = $android_orientation unless $android_orientation.nil?
generator.hardwareAcceleration = true if $app_config["capabilities"].index('hardware_acceleration')
generator.apikey = $gapikey if $gapikey
generator.addUriParams $uri_scheme, $uri_host, $uri_path_prefix
Dir.glob(File.join($app_builddir, 'extensions', '*', 'adds', 'AndroidManifest.rb')).each do |extscript|
puts "Evaluating #{extscript}"
eval(File.new(extscript).read)
end
Dir.glob(File.join($app_builddir, 'extensions', '*', 'adds', 'Manifest*.erb')).each do |exttemplate|
puts "Adding template #{exttemplate}"
generator.manifestManifestAdds << exttemplate
end
Dir.glob(File.join($app_builddir, 'extensions', '*', 'adds', 'Application*.erb')).each do |exttemplate|
puts "Adding template #{exttemplate}"
generator.applicationManifestAdds << exttemplate
end
manifest = generator.render $rhomanifesterb
File.open($appmanifest, "w") { |f| f.write manifest }
#######################################################
# Deprecated staff below
app_f = File.new($appmanifest)
manifest_orig_doc = REXML::Document.new(app_f)
app_f.close
dst_manifest = manifest_orig_doc.elements["manifest"]
dst_application = manifest_orig_doc.elements["manifest/application"]
dst_main_activity = nil
puts '$$$ try to found MainActivity'
dst_application.elements.each("activity") do |a|
puts '$$$ activity with attr = '+a.attribute('name', 'android').to_s
if a.attribute('name', 'android').to_s == 'com.rhomobile.rhodes.RhodesActivity'
puts ' $$$ FOUND !'
dst_main_activity = a
end
end
Dir.glob(File.join($app_builddir, 'extensions', '*', 'adds', 'AndroidManifest.xml')).each do |ext_manifest|
if File.exists? ext_manifest
puts 'AndroidManifest.xml['+ext_manifest+'] from native extension found !'
manifest_ext_doc = REXML::Document.new(File.new(ext_manifest))
src_manifest = manifest_ext_doc.elements["manifest"]
src_application = manifest_ext_doc.elements["manifest/application"]
if src_application != nil
puts 'Extension Manifest process application item :'
src_application.elements.each do |e|
puts '$$$ process element with attr = '+e.attribute('name', 'android').to_s
if e.attribute('name', 'android').to_s == 'com.rhomobile.rhodes.RhodesActivity'
e.elements.each do |sube|
puts ' add item to MainActivity['+sube.xpath+']'
dst_main_activity.add sube
end
else
puts ' add item ['+e.xpath+']'
dst_application.add e
end
end
end
puts 'Extension Manifest process root <manifest> item :'
src_manifest.elements.each do |e|
p = e.xpath
if p != '/manifest/application'
dst_e = manifest_orig_doc.elements[p]
if dst_e != nil
if p == '/manifest/uses-sdk'
puts ' found and delete original item ['+p+']'
manifest_orig_doc.elements.delete p
end
end
puts ' and new item ['+p+']'
dst_manifest.add e
end
end
else
puts 'AndroidManifest change file ['+m+'] from native extension not found !'
end
end
puts 'delete original manifest'
File.delete($appmanifest)
updated_f = File.open($appmanifest, "w")
manifest_orig_doc.write updated_f, 2
updated_f.close
#rm tappmanifest
puts 'Manifest updated by extension is saved!'
end
task :resources => [:rhobundle, :extensions, :librhodes] do
set_app_name_android($appname)
puts 'EXT: add additional files to project before build'
Dir.glob(File.join($app_builddir, 'extensions', '*', 'adds', '*')).each do |res|
if File.directory?(res) && (res != '.') && (res != '..')
puts "add resources from extension [#{res}] to [#{$tmpdir}]"
cp_r res, $tmpdir
end
end
#copy icon after extension resources in case it overwrites them (like rhoelementsext...)
set_app_icon_android
if $config_xml
puts "Copying custom config.xml"
rawres_path = File.join($tmpdir, 'res', 'raw')
mkdir_p rawres_path unless File.exist? rawres_path
cp $config_xml, File.join(rawres_path, 'config.xml')
end
mkdir_p File.join($applibs)
# Add .so libraries
Dir.glob($app_builddir + "/**/lib*.so").each do |lib|
cp_r lib, $applibs
end
$ext_android_additional_lib.each do |lib|
cp_r lib, $applibs
end
# Dir.glob($tmpdir + "/lib/armeabi/lib*.so").each do |lib|
# cc_run($stripbin, ['"'+lib+'"'])
# end
end
task :fulleclipsebundle => [:resources, :librhodes] do
#manifest = File.join $tmpdir,'AndroidManifest.xml'
eclipse_res = File.join $projectpath,'res'
eclipse_assets = File.join $projectpath,'assets'
eclipse_libs = File.join $projectpath,'libs'
#eclipse_manifest = File.join $projectpath,'AndroidManifest.xml'
rm_rf eclipse_res
rm_rf eclipse_assets
rm_rf eclipse_libs
#rm_rf eclipse_manifest
mkdir_p eclipse_libs
cp_r $appres, $projectpath
cp_r $appassets, $projectpath
cp_r $applibs, eclipse_libs
#cp manifest, $projectpath
end
task :gencapabilitiesjava => "config:android" do
# Generate Capabilities.java
mkdir_p File.dirname $app_capabilities_java
f = StringIO.new("", "w+")
#File.open($app_capabilities_java, "w") do |f|
f.puts "package #{JAVA_PACKAGE_NAME};"
f.puts "public class Capabilities {"
ANDROID_PERMISSIONS.keys.sort.each do |k|
val = 'false'
val = 'true' if $app_config["capabilities"].index(k) != nil
f.puts " public static final boolean #{k.upcase}_ENABLED = #{val};"
end
f.puts "}"
#end
Jake.modify_file_if_content_changed($app_capabilities_java, f)
end
task :genpushjava => "config:android" do
# Generate Push.java
mkdir_p File.dirname $app_push_java
f = StringIO.new("", "w+")
#File.open($app_push_java, "w") do |f|
f.puts "package #{JAVA_PACKAGE_NAME};"
f.puts "public class Push {"
f.puts " public static final String SENDER = \"#{$push_sender}\";"
if $push_notifications.nil?
f.puts " public static final String PUSH_NOTIFICATIONS = \"none\";"
else
f.puts " public static final String PUSH_NOTIFICATIONS = \"#{$push_notifications}\";"
end
f.puts "};"
#end
Jake.modify_file_if_content_changed($app_push_java, f)
end
task :genloadlibsjava => "config:android" do
mkdir_p File.dirname $app_native_libs_java
f = StringIO.new("", "w+")
#File.open($app_native_libs_java, "w") do |f|
f.puts "package #{JAVA_PACKAGE_NAME};"
f.puts "public class NativeLibraries {"
f.puts " public static void load() {"
f.puts " // Load native .so libraries"
Dir.glob($app_builddir + "/**/lib*.so").reverse.each do |lib|
next if lib =~ /noautoload/
libname = File.basename(lib).gsub(/^lib/, '').gsub(/\.so$/, '')
f.puts " System.loadLibrary(\"#{libname}\");"
end
#f.puts " // Load native implementation of rhodes"
#f.puts " System.loadLibrary(\"rhodes\");"
f.puts " }"
f.puts "};"
#end
Jake.modify_file_if_content_changed($app_native_libs_java, f)
end
task :genrholisteners => ['config:android:extensions', 'config:android'] do
# RhodesActivity Listeners
mkdir_p File.dirname $app_startup_listeners_java
f = StringIO.new("", "w+")
f.puts '// WARNING! THIS FILE IS GENERATED AUTOMATICALLY! DO NOT EDIT IT MANUALLY!'
f.puts 'package com.rhomobile.rhodes.extmanager;'
f.puts ''
f.puts 'class RhodesStartupListeners {'
f.puts ''
f.puts ' public static final String[] ourRunnableList = { ""'
$ext_android_rhodes_activity_listener.each do |a|
f.puts ' ,"'+a+'"'
end
f.puts ' };'
f.puts '}'
Jake.modify_file_if_content_changed($app_startup_listeners_java, f)
end
task :genrjava => [:manifest, :resources] do
mkdir_p $app_rjava_dir
puts "Generate initial R.java at #{$app_rjava_dir} >>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>"
args = ["package", "-f", "-M", $appmanifest, "-S", $appres, "-A", $appassets, "-I", $androidjar, "-J", $app_rjava_dir]
Jake.run($aapt, args)
raise 'Error in AAPT: R.java' unless $?.success?
#buf = File.new($rho_android_r, "r").read.gsub(/^\s*import com\.rhomobile\..*\.R;\s*$/, "\nimport #{$app_package_name}.R;\n")
#File.open($app_android_r, "w") { |f| f.write(buf) }
mkdir_p File.join($app_rjava_dir, "R") if not File.exists? File.join($app_rjava_dir, "R")
buf = File.new(File.join($app_rjava_dir, "R.java"), "r").read.gsub(/^\s*package\s*#{$app_package_name};\s*$/, "\npackage com.rhomobile.rhodes;\n")
#buf.gsub!(/public\s*static\s*final\s*int/, "public static int")
File.open(File.join($app_rjava_dir, "R", "R.java"), "w") { |f| f.write(buf) }
$ext_android_library_deps.each do |package, path|
r_dir = File.join $tmpdir, 'gen', package.split('.')
mkdir_p r_dir
buf = File.new(File.join($app_rjava_dir, 'R.java'), "r").read.gsub(/^\s*package\s*#{$app_package_name};\s*$/, "\npackage #{package};\n")
File.open(File.join(r_dir,'R.java'), 'w') { |f| f.write(buf) }
end
end
task :genreclipse => [:manifest, :resources] do
mkdir_p $app_rjava_dir
args = ["package", "-f", "-M", $appmanifest, "-S", $appres, "-A", $appassets, "-I", $androidjar, "-J", $app_rjava_dir]
Jake.run($aapt, args)
raise 'Error in AAPT: R.java' unless $?.success?
Dir.glob(File.join $app_rjava_dir, '*.java') do |java|
buf = File.new(java, 'r').read.gsub(/package .*$/, 'package com.rhomobile.rhodes;')
File.open(java, 'w') { |f| f.write buf }
end
#buf = File.new($rho_android_r, "r").read.gsub(/^\s*import com\.rhomobile\..*\.R;\s*$/, "\nimport #{$app_package_name}.R;\n")
#File.open($app_android_r, "w") { |f| f.write(buf) }
#mkdir_p File.join($app_rjava_dir, "R") if not File.exists? File.join($app_rjava_dir, "R")
#buf = File.new(File.join($app_rjava_dir, "R.java"), "r").read.gsub(/^\s*package\s*#{$app_package_name};\s*$/, "\npackage com.rhomobile.rhodes.R;\n")
#"{b}"uf.gsub!(/public\s*static\s*final\s*int/, "public static int")
#File.open(File.join($app_rjava_dir, "R", "R.java"), "w") { |f| f.write(buf) }
end
task :gensourceseclipse => [:genloadlibsjava, :genpushjava, :gencapabilitiesjava, :genrholisteners, :genreclipse]
task :gensourcesjava => [:genloadlibsjava, :genpushjava, :gencapabilitiesjava, :genrholisteners, :genrjava]
#desc "Build Rhodes for android"
task :rhodes => [:rhobundle, :librhodes, :manifest, :resources, :gensourcesjava] do
rm_rf $tmpdir + "/Rhodes"
mkdir_p $tmpdir + "/Rhodes"
srclist = File.join($builddir, "RhodesSRC_build.files")
newsrclist = File.join($tmpdir, "RhodesSRC_build.files")
lines = []
File.open(srclist, "r") do |f|
while line = f.gets
line.chomp!
next if line =~ /\/AndroidR\.java\s*$/
lines << line
end
end
Dir.glob(File.join($tmpdir,'gen','**','*.java')) do |filepath|
lines << "\"#{filepath}\""
end
#lines << "\"" +File.join($app_rjava_dir, "R.java")+"\""
#lines << "\"" +File.join($app_rjava_dir, "R", "R.java")+"\""
#lines << "\"" +$app_native_libs_java+"\""
#lines << "\"" +$app_capabilities_java+"\""
#lines << "\"" +$app_push_java+"\""
#lines << "\"" +$app_startup_listeners_java+"\""
File.open(newsrclist, "w") { |f| f.write lines.join("\n") }
srclist = newsrclist
classpath = $androidjar
classpath += $path_separator + $google_classpath if $google_classpath
classpath += $path_separator + $motosol_classpath if $motosol_classpath
classpath += $path_separator + File.join($tmpdir, 'Rhodes')
javafilelists = [srclist]
extlist = File.join $app_builddir, "ext_build.files"
if File.exists? extlist
puts "#{extlist} is found! THere are addditional java files"
javafilelists << extlist
end
java_compile(File.join($tmpdir, 'Rhodes'), classpath, javafilelists)
files = []
Dir.glob(File.join($tmpdir, "Rhodes", "*")).each do |f|
relpath = Pathname.new(f).relative_path_from(Pathname.new(File.join($tmpdir, "Rhodes"))).to_s
files << relpath
end
unless files.empty?
jar = File.join($app_builddir, 'librhodes', 'Rhodes.jar')
args = ["cf", jar]
args += files
Jake.run($jarbin, args, File.join($tmpdir, "Rhodes"))
unless $?.success?
raise "Error creating #{jar}"
end
$android_jars = [jar]
end
end
task :extensions_java => [:rhodes, :extensions] do
puts 'Compile additional java files:'
classpath = $androidjar
classpath += $path_separator + $google_classpath if $google_classpath
classpath += $path_separator + $motosol_classpath if $motosol_classpath
classpath += $path_separator + File.join($tmpdir, 'Rhodes')
Dir.glob(File.join($app_builddir, '**', '*.jar')).each do |jar|
classpath += $path_separator + jar
end
$ext_android_additional_sources.each do |extpath, list|
ext = File.basename(extpath)
puts "Compiling '#{ext}' extension java sources: #{list}"
srclist = Tempfile.new "#{ext}SRC_build"
lines = []
File.open(list, "r") do |f|
while line = f.gets
line.chomp!
srclist.write "\"#{File.join(extpath, line)}\"\n"
#srclist.write "#{line}\n"
end
end
srclist.close
mkdir_p File.join($tmpdir, ext)
java_compile(File.join($tmpdir, ext), classpath, [srclist.path])
extjar = File.join $app_builddir, 'extensions', ext, ext + '.jar'
args = ["cf", extjar, '.']
Jake.run($jarbin, args, File.join($tmpdir, ext))
unless $?.success?
raise "Error creating #{extjar}"
end
$android_jars << extjar
classpath += $path_separator + extjar
end
end
task :upgrade_package => :rhobundle do
#puts '$$$$$$$$$$$$$$$$$$'
#puts 'targetdir = '+$targetdir.to_s
#puts 'bindir = '+$bindir.to_s
android_targetdir = $targetdir #File.join($targetdir, 'android')
mkdir_p android_targetdir if not File.exists? android_targetdir
zip_file_path = File.join(android_targetdir, 'upgrade_bundle.zip')
Jake.build_file_map(File.join($srcdir, "apps"), "rhofilelist.txt")
Jake.zip_upgrade_bundle($bindir, zip_file_path)
end
task :upgrade_package_partial => ["build:android:rhobundle"] do
#puts '$$$$$$$$$$$$$$$$$$'
#puts 'targetdir = '+$targetdir.to_s
#puts 'bindir = '+$bindir.to_s
# process partial update
add_list_full_name = File.join($app_path, 'upgrade_package_add_files.txt')
remove_list_full_name = File.join($app_path, 'upgrade_package_remove_files.txt')
src_folder = File.join($bindir, 'RhoBundle')
src_folder = File.join(src_folder, 'apps')
tmp_folder = $bindir + '_tmp_partial'
rm_rf tmp_folder if File.exists? tmp_folder
mkdir_p tmp_folder
dst_tmp_folder = File.join(tmp_folder, 'RhoBundle')
mkdir_p dst_tmp_folder
# copy all
cp_r src_folder, dst_tmp_folder
dst_tmp_folder = File.join(dst_tmp_folder, 'apps')
mkdir_p dst_tmp_folder
add_files = []
if File.exists? add_list_full_name
File.open(add_list_full_name, "r") do |f|
while line = f.gets
fixed_path = line.gsub('.rb', '.iseq').gsub('.erb', '_erb.iseq').chop
add_files << fixed_path
puts '### ['+fixed_path+']'
end
end
end
remove_files = []
if File.exists? remove_list_full_name
File.open(remove_list_full_name, "r") do |f|
while line = f.gets
fixed_path = line.gsub('.rb', '.iseq').gsub('.erb', '_erb.iseq').chop
remove_files << fixed_path
#puts '### ['+fixed_path+']'
end
end
end
psize = dst_tmp_folder.size+1
Dir.glob(File.join(dst_tmp_folder, '**/*')).sort.each do |f|
relpath = f[psize..-1]
if File.file?(f)
#puts '$$$ ['+relpath+']'
if not add_files.include?(relpath)
rm_rf f
end
end
end
Jake.build_file_map(dst_tmp_folder, "upgrade_package_add_files.txt")
#if File.exists? add_list_full_name
# File.open(File.join(dst_tmp_folder, 'upgrade_package_add_files.txt'), "w") do |f|
# add_files.each do |j|
# f.puts "#{j}\tfile\t0\t0"
# end
# end
#end
if File.exists? remove_list_full_name
File.open(File.join(dst_tmp_folder, 'upgrade_package_remove_files.txt'), "w") do |f|
remove_files.each do |j|
f.puts "#{j}"
#f.puts "#{j}\tfile\t0\t0"
end
end
end
mkdir_p $targetdir if not File.exists? $targetdir
zip_file_path = File.join($targetdir, "upgrade_bundle_partial.zip")
Jake.zip_upgrade_bundle(tmp_folder, zip_file_path)
rm_rf tmp_folder
end
#desc "build all"
task :all => [:rhobundle, :rhodes, :extensions_java]
end
end
namespace "package" do
task :android => "build:android:all" do
puts "Running dx utility"
args = []
args << "-Xmx1024m"
args << "-jar"
args << $dxjar
args << "--dex"
args << "--output=#{$bindir}/classes.dex"
Dir.glob(File.join($app_builddir, '**', '*.jar')).each do |jar|
args << jar
end
Jake.run(File.join($java, 'java'+$exe_ext), args)
unless $?.success?
raise "Error running DX utility"
end
resourcepkg = $bindir + "/rhodes.ap_"
puts "Packaging Assets and Jars"
# this task already caaled during build "build:android:all"
#set_app_name_android($appname)
args = ["package", "-f", "-M", $appmanifest, "-S", $appres, "-A", $appassets, "-I", $androidjar, "-F", resourcepkg]
if $no_compression
$no_compression.each do |ext|
args << '-0'
args << ext
end
end
Jake.run($aapt, args)
unless $?.success?
raise "Error running AAPT (1)"
end
# Workaround: manually add files starting with '_' because aapt silently ignore such files when creating package
Dir.glob(File.join($appassets, "**/*")).each do |f|
next unless File.basename(f) =~ /^_/
relpath = Pathname.new(f).relative_path_from(Pathname.new($tmpdir)).to_s
puts "Add #{relpath} to #{resourcepkg}..."
args = ["uf", resourcepkg, relpath]
Jake.run($jarbin, args, $tmpdir)
unless $?.success?
raise "Error packaging assets"
end
end
puts "Packaging Native Libs"
args = ["uf", resourcepkg]
Dir.glob(File.join($applibs, "lib*.so")).each do |lib|
cc_run($stripbin, ['"'+lib+'"'])
args << "lib/armeabi/#{File.basename(lib)}"
end
Jake.run($jarbin, args, $tmpdir)
unless $?.success?
raise "Error packaging native libraries"
end
end
end
namespace "device" do
namespace "android" do
desc "Build debug self signed for device"
task :debug => "package:android" do
dexfile = $bindir + "/classes.dex"
simple_apkfile = $targetdir + "/" + $appname + "-tmp.apk"
final_apkfile = $targetdir + "/" + $appname + "-debug.apk"
resourcepkg = $bindir + "/rhodes.ap_"
apk_build $androidsdkpath, simple_apkfile, resourcepkg, dexfile, true
puts "Align Debug APK file"
args = []
args << "-f"
args << "-v"
args << "4"
args << simple_apkfile
args << final_apkfile
out = Jake.run2($zipalign, args, :hide_output => true)
puts out if USE_TRACES
unless $?.success?
puts "Error running zipalign"
exit 1
end
#remove temporary files
rm_rf simple_apkfile
File.open(File.join(File.dirname(final_apkfile), "app_info.txt"), "w") do |f|
f.puts $app_package_name
end
end
task :install => :debug do
apkfile = $targetdir + "/" + $appname + "-debug.apk"
Jake.run $adb, ['-d', 'wait-for-device']
puts "Install APK file"
Jake.run($adb, ["-d", "install", "-r", apkfile])
unless $?.success?
raise "Error installing APK file"
end
puts "Install complete"
end
desc "Build production signed for device"
task :production => "package:android" do
dexfile = $bindir + "/classes.dex"
simple_apkfile = $targetdir + "/" + $appname + "_tmp.apk"
final_apkfile = $targetdir + "/" + $appname + "_signed.apk"
signed_apkfile = $targetdir + "/" + $appname + "_tmp_signed.apk"
resourcepkg = $bindir + "/rhodes.ap_"
apk_build $androidsdkpath, simple_apkfile, resourcepkg, dexfile, false
if not File.exists? $keystore
puts "Generating private keystore..."
mkdir_p File.dirname($keystore) unless File.directory? File.dirname($keystore)
args = []
args << "-genkey"
args << "-alias"
args << $storealias
args << "-keyalg"
args << "RSA"
args << "-validity"
args << "20000"
args << "-keystore"
args << $keystore
args << "-storepass"
args << $storepass
args << "-keypass"
args << $keypass
Jake.run($keytool, args)
unless $?.success?
puts "Error generating keystore file"
exit 1
end
end
puts "Signing APK file"
args = []
args << "-sigalg"
args << "MD5withRSA"
args << "-digestalg"
args << "SHA1"
args << "-verbose"
args << "-keystore"
args << $keystore
args << "-storepass"
args << $storepass
args << "-signedjar"
args << signed_apkfile
args << simple_apkfile
args << $storealias
Jake.run($jarsigner, args)
unless $?.success?
puts "Error running jarsigner"
exit 1
end
puts "Align APK file"
args = []
args << "-f"
args << "-v"
args << "4"
args << '"' + signed_apkfile + '"'
args << '"' + final_apkfile + '"'
Jake.run($zipalign, args)
unless $?.success?
puts "Error running zipalign"
exit 1
end
#remove temporary files
rm_rf simple_apkfile
rm_rf signed_apkfile
File.open(File.join(File.dirname(final_apkfile), "app_info.txt"), "w") do |f|
f.puts $app_package_name
end
end
#task :getlog => "config:android" do
# AndroidTools.get_app_log($appname, true) or exit 1
#end
end
end
#namespace "emulator" do
# namespace "android" do
# task :getlog => "config:android" do
# AndroidTools.get_app_log($appname, false) or exit 1
# end
# end
#end
def run_as_spec(device_flag, uninstall_app)
Rake::Task["device:android:debug"].invoke
if device_flag == '-e'
Rake::Task["config:android:emulator"].invoke
else
Rake::Task["config:android:device"].invoke
end
log_name = $app_path + '/RhoLogSpec.txt'
File.delete(log_name) if File.exist?(log_name)
AndroidTools.logclear(device_flag)
# Start emulator with options: hidden window display and wipe user data
AndroidTools.run_emulator(:hidden => true, :wipe => true) if device_flag == '-e'
do_uninstall(device_flag)
# Failsafe to prevent eternal hangs
Thread.new {
sleep 2000
if device_flag == '-e'
AndroidTools.kill_adb_and_emulator
else
AndroidTools.kill_adb_logcat device_flag, log_name
end
}
apkfile = File.expand_path(File.join $targetdir, $appname + "-debug.apk")
AndroidTools.load_app_and_run(device_flag, apkfile, $app_package_name)
AndroidTools.logcat(device_flag, log_name)
Jake.before_run_spec
start = Time.now
puts "Waiting for application ..."
for i in 0..60
if AndroidTools.application_running(device_flag, $app_package_name)
break
else
sleep(1)
end
end
puts "Waiting for log file: #{log_name}"
for i in 0..120
if !File.exist?(log_name)
sleep(1)
else
break
end
end
if !File.exist?(log_name)
puts "Cannot read log file: " + log_name
exit(1)
end
puts "Start reading log ..."
io = File.new(log_name, 'r:UTF-8')
end_spec = false
while !end_spec do
io.each do |line|
if line.class.method_defined? "valid_encoding?"
end_spec = !Jake.process_spec_output(line) if line.valid_encoding?
else
end_spec = !Jake.process_spec_output(line)
end
break if end_spec
end
break unless AndroidTools.application_running(device_flag, $app_package_name)
sleep(5) unless end_spec
end
io.close
puts "Processing spec results ..."
Jake.process_spec_results(start)
# stop app
uninstall_app = true if uninstall_app.nil? # by default uninstall spec app
do_uninstall(device_flag) if uninstall_app
if device_flag == '-e'
AndroidTools.kill_adb_and_emulator
else
AndroidTools.kill_adb_logcat(device_flag, log_name)
end
$stdout.flush
end
namespace "run" do
namespace "android" do
def sleepRubyProcess
if $remote_debug == true
while 1
sleep 1
end
end
end
namespace "emulator" do
task :spec, :uninstall_app do |t, args|
Jake.decorate_spec { run_as_spec('-e', args.uninstall_app) }
end
end
namespace "device" do
task :spec, :uninstall_app do |t, args|
Jake.decorate_spec { run_as_spec('-d', args.uninstall_app) }
end
end
task :spec => "run:android:emulator:spec" do
end
task :get_log => "config:android" do
puts "log_file=" + $applog_path
end
task :emulator => ['config:android:emulator', 'device:android:debug'] do
AndroidTools.kill_adb_logcat('-e')
AndroidTools.run_emulator
apkfile = File.expand_path(File.join $targetdir, $appname + "-debug.apk")
AndroidTools.load_app_and_run('-e', apkfile, $app_package_name)
AndroidTools.logcat_process('-e')
sleepRubyProcess
end
desc "Run application on RhoSimulator"
task :rhosimulator => ["config:set_android_platform", "config:common"] do
$emuversion = $app_config["android"]["version"] unless $app_config["android"].nil?
$emuversion = $config["android"]["version"] if $emuversion.nil? and !$config["android"].nil?
$rhosim_config = "platform='android'\r\n"
$rhosim_config += "os_version='#{$emuversion}'\r\n" if $emuversion
Rake::Task["run:rhosimulator"].invoke
end
task :rhosimulator_debug => ["config:set_android_platform", "config:common"] do
$emuversion = $app_config["android"]["version"] unless $app_config["android"].nil?
$emuversion = $config["android"]["version"] if $emuversion.nil? and !$config["android"].nil?
$rhosim_config = "platform='android'\r\n"
$rhosim_config += "os_version='#{$emuversion}'\r\n" if $emuversion
Rake::Task["run:rhosimulator_debug"].invoke
end
desc "build and install on device"
task :device => "device:android:debug" do
AndroidTools.kill_adb_logcat('-d')
apkfile = File.join $targetdir, $appname + "-debug.apk"
AndroidTools.load_app_and_run('-d', apkfile, $app_package_name)
AndroidTools.logcat_process('-d')
sleepRubyProcess
end
end
desc "build and launch emulator"
task :android => "run:android:emulator" do
end
end
namespace "uninstall" do
def do_uninstall(flag)
args = []
args << flag
args << "uninstall"
args << $app_package_name
for i in 0..20
result = Jake.run($adb, args)
unless $?.success?
puts "Error uninstalling application"
exit 1
end
if result.include?("Success")
puts "Application uninstalled successfully"
break
else
if result.include?("Failure")
puts "Application is not installed on the device"
break
else
puts "Error uninstalling application"
exit 1 if i == 20
end
end
sleep(5)
end
end
namespace "android" do
task :emulator => "config:android" do
unless AndroidTools.is_emulator_running
puts "WARNING!!! Emulator is not up and running"
exit 1
end
do_uninstall('-e')
end
desc "uninstall from device"
task :device => "config:android" do
unless AndroidTools.is_device_running
puts "WARNING!!! Device is not connected"
exit 1
end
do_uninstall('-d')
end
end
desc "uninstall from emulator"
task :android => "uninstall:android:emulator" do
end
end
namespace "clean" do
desc "Clean Android"
task :android => ["clean:android:all", "clean:common"]
namespace "android" do
task :files => "config:android" do
rm_rf $targetdir
rm_rf $app_builddir
Dir.glob(File.join($bindir, "*.*")) { |f| rm f, :force => true }
rm_rf $srcdir
rm_rf $tmpdir
end
task :all => :files
end
end
namespace :stop do
namespace :android do
namespace :debug do
def killRuby
if RUBY_PLATFORM =~ /windows|cygwin|mingw/
# Windows
`taskkill /F /IM ruby.exe`
else
`killall -9 ruby`
end
end
task :emulator do #=> "stop:android:emulator"do
AndroidTools.kill_adb_logcat('-e')
killRuby
end
task :device do #=> "stop:android:device" do
AndroidTools.kill_adb_logcat('-d')
killRuby
end
end #end of debug
task :emulator do
AndroidTools.kill_adb_and_emulator
end
task :device do
device_flag = '-d'
do_uninstall(device_flag)
log_name = $app_path + '/RhoLogSpec.txt'
AndroidTools.kill_adb_logcat device_flag, log_name
end
end
end
|
Include test coverage for the handling and raising exceptions.
require 'test_helper'
class LocotimezoneErrorsTest < Minitest::Test
describe 'testing error handling' do
it 'must be empty if bad request' do
result = Locotimezone.locotime address: ''
assert true, result[:geo].empty?
assert true, result[:timezone].empty?
end
it 'must be empty if no location is found' do
result = Locotimezone.locotime address: '%'
assert true, result[:geo].empty?
assert true, result[:timezone].empty?
end
it 'raises argument error if no address is given' do
assert_raises(ArgumentError) { Locotimezone.locotime }
end
it 'raises argument error if no location is given when timezone only' do
assert_raises ArgumentError do
Locotimezone.locotime timezone_only: true
end
end
end
end
|
namespace "config" do
task :set_sym_platform do
$current_platform = "symbian" unless $current_platform
end
task :symbian => [:set_sym_platform, "config:common"] do
$rubypath = "res/build-tools/RhoRuby.exe"
$zippath = "res/build-tools/7za.exe"
$appname = $app_config["name"].nil? ? "Rhodes" : $app_config["name"]
$bindir = $app_path + "/bin"
$rhobundledir = $app_path + "/RhoBundle"
$srcdir = $bindir + "/RhoBundle"
$targetdir = $bindir + "/target/sym"
$tmpdir = $bindir +"/tmp"
$jom = $config["env"]["paths"]["qtsymbian-sdk"] + "/QtCreator/bin/jom.exe"
$sdkprefix = $config["env"]["paths"]["qtsymbian-sdk"]+"/Symbian/SDKs/Symbian1Qt473"
$sdkprefix_emu = $config["env"]["paths"]["qtsymbian-sdk"]+"/Simulator/Qt/msvc2005"
$epocroot = "/QtSDK/Symbian/SDKs/Symbian1Qt473/"
$qmake = $sdkprefix+"/bin/qmake.exe"
$make = $sdkprefix+"/epoc32/tools/make.exe"
$qmake_emu = $sdkprefix_emu+"/bin/qmake.exe"
$symbiandir = $config["env"]["paths"]["qtsymbian-sdk"]+"/Symbian"
$excludelib = ['**/builtinME.rb','**/ServeME.rb','**/dateME.rb','**/rationalME.rb']
$msvs = $config["env"]["paths"]["msvs2005"]
end
end
namespace "build" do
namespace "symbian" do
task :extensions => "config:symbian" do
$app_config["extensions"].each do |ext|
$app_config["extpaths"].each do |p|
extpath = File.join(p, ext, 'ext')
next unless File.exists? File.join(extpath, "build.bat")
ENV['RHO_PLATFORM'] = 'symbian'
ENV['PWD'] = $startdir
ENV['RHO_ROOT'] = ENV['PWD']
ENV['TARGET_TEMP_DIR'] = File.join(ENV['PWD'], "platform", "symbian", "bin", $sdk, "rhodes", "Release")
ENV['TEMP_FILES_DIR'] = File.join(ENV['PWD'], "platform", "symbian", "bin", $sdk, "extensions", ext)
ENV['VCBUILD'] = $vcbuild
ENV['SDK'] = $sdk
puts Jake.run("build.bat", [], extpath)
break
end
end
end
desc "Build symbian rhobundle"
task :rhobundle => ["config:symbian"] do
Rake::Task["build:bundle:noxruby"].execute
rm_r $startdir + "/"+$config["build"]["symbianpath"]+"/rhodes/apps"
rm_r $startdir + "/"+$config["build"]["symbianpath"]+"/rhodes/db"
rm_r $startdir + "/"+$config["build"]["symbianpath"]+"/rhodes/lib"
cp_r $srcdir + "/apps", $startdir + "/"+$config["build"]["symbianpath"]+"/rhodes"
cp_r $srcdir + "/db", $startdir + "/"+$config["build"]["symbianpath"]+"/rhodes"
cp_r $srcdir + "/lib", $startdir + "/"+$config["build"]["symbianpath"]+"/rhodes"
cp $app_path + "/icon/icon.svg", $config["build"]["symbianpath"]+"/rhodes/rhodes.svg"
end
task :rhodesdev => ["config:symbian"] do
ENV['EPOCROOT'] = $epocroot
chdir $config["build"]["symbianpath"]
ENV['PATH'] = $sdkprefix+"/epoc32/tools;"+$symbiandir+"/tools/perl/bin;C:/Windows/system32;"+$sdkprefix+"/epoc32/gcc/bin;"+$symbiandir+"/tools/gcce4/bin;"+$sdkprefix+"/bin;"+$sdkprefix+"/mkspecs/default;"+$symbiandir+"/tools/gcce4/arm-none-symbianelf/bin;"
ENV['GCCPATH']= $symbiandir+"/tools/gcce4"
ENV['DEFALT_MKSPEC_PATH']=$sdkprefix+"/mkspecs/default"
ENV['QMAKE_PATH']=$sdkprefix+"/bin"
args = ['rhodes.pro', '-r','-spec', 'symbian-abld', "\"CONFIG+=release\"", '-after', "\"OBJECTS_DIR=obj\"", "\"MOC_DIR=moc\"", "\"UI_DIR=ui\"", "\"RCC_DIR=rcc\"", '-after', "\"OBJECTS_DIR=obj\"",
"\"MOC_DIR=moc\"", "\"UI_DIR=ui\"", "\"RCC_DIR=rcc\""]
puts "\nThe following step may take several minutes or more to complete depending on your processor speed\n\n"
puts Jake.run($qmake,args)
unless $? == 0
puts "Error building"
exit 1
end
args = ['release-gcce', '-w']
puts Jake.run($make,args)
unless $? == 0
puts "Error building"
exit 1
end
args = ['sis']
puts Jake.run($make,args)
unless $? == 0
puts "Error building"
exit 1
end
mkdir_p $targetdir if not File.exists? $targetdir
rm $targetdir+"/"+$appname+".sis" if File.exists? $targetdir+"/"+$appname+".sis"
mv $startdir + "/"+$config["build"]["symbianpath"] + "/rhodes/rhodes.sis", $targetdir+"/"+$appname+".sis"
chdir $startdir
end
task :rhodesemu => ["config:symbian"] do
chdir $config["build"]["symbianpath"]
ENV['DEFALT_MKSPEC_PATH']=$sdkprefix_emu+"/mkspecs/default"
ENV['QTDIR']=$sdkprefix_emu+"/bin"
ENV['QMAKESPEC']=$sdkprefix_emu+"/mkspecs/default"
ENV['INCLUDE'] = $msvs+"/VC/include;"+$msvs+"/VC/PlatformSDK/Include;"+$msvs+"/VC/atlmfc/include"
ENV['LIB'] = $msvs+"/VC/lib;"+$msvs+"/VC/PlatformSDK/Lib;"+$msvs+"/VC/atlmfc/lib"
ENV['PATH'] = $sdkprefix_emu+"/bin;"+$msvs+"/VC/bin;C:/Windows/system32;"+$msvs+"/Common7/IDE"
chdir "../"
mkdir "rhodes-symbian-emulator-build" if not File.exists? "rhodes-symbian-emulator-build"
chdir "rhodes-symbian-emulator-build"
args = ['../symbian/rhodes_win32.pro', '-r','-spec', 'win32-msvc2005', "\"CONFIG+=release\""]
puts "\nThe following step may take several minutes or more to complete depending on your processor speed\n\n"
puts Jake.run( $qmake_emu,args)
unless $? == 0
puts "Error building"
exit 1
end
args = ['-nologo', '-j', '2', '-f', 'Makefile']
puts Jake.run($jom,args)
unless $? == 0
puts "Error building"
exit 1
end
rm_r "apps" if File.exists? "apps"
rm_r "db" if File.exists? "db"
rm_r "lib" if File.exists? "lib"
mv "rhodes/apps", pwd
mv "rhodes/db", pwd
mv "rhodes/lib", pwd
puts Jake.run("rhodes.exe",[])
end
end
end
namespace "clean" do
task :rhodes => ["config:symbian"] do
ENV['EPOCROOT'] = $epocroot
chdir $config["build"]["symbianpath"]
ENV['PATH'] = $sdkprefix+"/epoc32/tools;"+$symbiandir+"/tools/perl/bin;C:/Windows/system32;"+$sdkprefix+"/epoc32/gcc/bin;"+$symbiandir+"/tools/gcce4/bin;"+$sdkprefix+"/bin;"+$sdkprefix+"/mkspecs/default;"+$symbiandir+"/tools/gcce4/arm-none-symbianelf/bin;"
ENV['GCCPATH']= $symbiandir+"/tools/gcce4"
ENV['DEFALT_MKSPEC_PATH']=$sdkprefix+"/mkspecs/default"
ENV['QMAKE_PATH']=$sdkprefix+"/bin"
args = ['distclean']
puts Jake.run($make,args)
#unless $? == 0
#puts "Error cleaning"
# exit 1
#end
rm_rf "../rhodes-symbian-emulator-build"
rm_rf $targetdir
end
task :symbian => "clean:rhodes" do
end
end
namespace "device" do
namespace "symbian" do
desc "Build production for device"
task :production => ["config:symbian","build:symbian:rhobundle","build:symbian:rhodesdev"] do
end
end
end
namespace "run" do
task :symbian => ["config:symbian","build:symbian:rhobundle","build:symbian:rhodesemu"] do
end
end
fixed small bugs in rake script
namespace "config" do
task :set_sym_platform do
$current_platform = "symbian" unless $current_platform
end
task :symbian => [:set_sym_platform, "config:common"] do
$rubypath = "res/build-tools/RhoRuby.exe"
$zippath = "res/build-tools/7za.exe"
$appname = $app_config["name"].nil? ? "Rhodes" : $app_config["name"]
$bindir = $app_path + "/bin"
$rhobundledir = $app_path + "/RhoBundle"
$srcdir = $bindir + "/RhoBundle"
$targetdir = $bindir + "/target/sym"
$tmpdir = $bindir +"/tmp"
$jom = $config["env"]["paths"]["qtsymbian-sdk"] + "/QtCreator/bin/jom.exe"
$sdkprefix = $config["env"]["paths"]["qtsymbian-sdk"]+"/Symbian/SDKs/Symbian1Qt473"
$sdkprefix_emu = $config["env"]["paths"]["qtsymbian-sdk"]+"/Simulator/Qt/msvc2005"
$epocroot = "/QtSDK/Symbian/SDKs/Symbian1Qt473/"
$qmake = $sdkprefix+"/bin/qmake.exe"
$make = $sdkprefix+"/epoc32/tools/make.exe"
$qmake_emu = $sdkprefix_emu+"/bin/qmake.exe"
$symbiandir = $config["env"]["paths"]["qtsymbian-sdk"]+"/Symbian"
$excludelib = ['**/builtinME.rb','**/ServeME.rb','**/dateME.rb','**/rationalME.rb']
$msvs = $config["env"]["paths"]["msvs2005"]
end
end
namespace "build" do
namespace "symbian" do
task :extensions => "config:symbian" do
$app_config["extensions"].each do |ext|
$app_config["extpaths"].each do |p|
extpath = File.join(p, ext, 'ext')
next unless File.exists? File.join(extpath, "build.bat")
ENV['RHO_PLATFORM'] = 'symbian'
ENV['PWD'] = $startdir
ENV['RHO_ROOT'] = ENV['PWD']
ENV['TARGET_TEMP_DIR'] = File.join(ENV['PWD'], "platform", "symbian", "bin", $sdk, "rhodes", "Release")
ENV['TEMP_FILES_DIR'] = File.join(ENV['PWD'], "platform", "symbian", "bin", $sdk, "extensions", ext)
ENV['VCBUILD'] = $vcbuild
ENV['SDK'] = $sdk
puts Jake.run("build.bat", [], extpath)
break
end
end
end
desc "Build symbian rhobundle"
task :rhobundle => ["config:symbian"] do
Rake::Task["build:bundle:noxruby"].execute
rm_r $startdir + "/"+$config["build"]["symbianpath"]+"/rhodes/apps"
rm_r $startdir + "/"+$config["build"]["symbianpath"]+"/rhodes/db"
rm_r $startdir + "/"+$config["build"]["symbianpath"]+"/rhodes/lib"
cp_r $srcdir + "/apps", $startdir + "/"+$config["build"]["symbianpath"]+"/rhodes"
cp_r $srcdir + "/db", $startdir + "/"+$config["build"]["symbianpath"]+"/rhodes"
cp_r $srcdir + "/lib", $startdir + "/"+$config["build"]["symbianpath"]+"/rhodes"
cp $app_path + "/icon/icon.svg", $config["build"]["symbianpath"]+"/rhodes/rhodes.svg" if File.exists? $app_path + "/icon/icon.svg"
end
task :rhodesdev => ["config:symbian"] do
ENV['EPOCROOT'] = $epocroot
chdir $config["build"]["symbianpath"]
ENV['PATH'] = $sdkprefix+"/epoc32/tools;"+$symbiandir+"/tools/perl/bin;C:/Windows/system32;"+$sdkprefix+"/epoc32/gcc/bin;"+$symbiandir+"/tools/gcce4/bin;"+$sdkprefix+"/bin;"+$sdkprefix+"/mkspecs/default;"+$symbiandir+"/tools/gcce4/arm-none-symbianelf/bin;"
ENV['GCCPATH']= $symbiandir+"/tools/gcce4"
ENV['DEFALT_MKSPEC_PATH']=$sdkprefix+"/mkspecs/default"
ENV['QMAKE_PATH']=$sdkprefix+"/bin"
args = ['rhodes.pro', '-r','-spec', 'symbian-abld', "\"CONFIG+=release\"", '-after', "\"OBJECTS_DIR=obj\"", "\"MOC_DIR=moc\"", "\"UI_DIR=ui\"", "\"RCC_DIR=rcc\"", '-after', "\"OBJECTS_DIR=obj\"",
"\"MOC_DIR=moc\"", "\"UI_DIR=ui\"", "\"RCC_DIR=rcc\""]
puts "\nThe following step may take several minutes or more to complete depending on your processor speed\n\n"
puts Jake.run($qmake,args)
unless $? == 0
puts "Error building"
exit 1
end
args = ['release-gcce', '-w']
puts Jake.run($make,args)
unless $? == 0
puts "Error building"
exit 1
end
args = ['sis']
puts Jake.run($make,args)
unless $? == 0
puts "Error building"
exit 1
end
mkdir_p $targetdir if not File.exists? $targetdir
rm $targetdir+"/"+$appname+".sis" if File.exists? $targetdir+"/"+$appname+".sis"
mv $startdir + "/"+$config["build"]["symbianpath"] + "/rhodes/rhodes.sis", $targetdir+"/"+$appname+".sis"
chdir $startdir
end
task :rhodesemu => ["config:symbian"] do
chdir $config["build"]["symbianpath"]
ENV['DEFALT_MKSPEC_PATH']=$sdkprefix_emu+"/mkspecs/default"
ENV['QTDIR']=$sdkprefix_emu+"/bin"
ENV['QMAKESPEC']=$sdkprefix_emu+"/mkspecs/default"
ENV['INCLUDE'] = $msvs+"/VC/include;"+$msvs+"/VC/PlatformSDK/Include;"+$msvs+"/VC/atlmfc/include"
ENV['LIB'] = $msvs+"/VC/lib;"+$msvs+"/VC/PlatformSDK/Lib;"+$msvs+"/VC/atlmfc/lib"
ENV['PATH'] = $sdkprefix_emu+"/bin;"+$msvs+"/VC/bin;C:/Windows/system32;"+$msvs+"/Common7/IDE"
chdir "../"
mkdir "rhodes-symbian-emulator-build" if not File.exists? "rhodes-symbian-emulator-build"
chdir "rhodes-symbian-emulator-build"
args = ['../symbian/rhodes_win32.pro', '-r','-spec', 'win32-msvc2005', "\"CONFIG+=release\""]
puts "\nThe following step may take several minutes or more to complete depending on your processor speed\n\n"
puts Jake.run( $qmake_emu,args)
unless $? == 0
puts "Error building"
exit 1
end
args = ['-nologo', '-j', '2', '-f', 'Makefile']
puts Jake.run($jom,args)
unless $? == 0
puts "Error building"
exit 1
end
rm_r "apps" if File.exists? "apps"
rm_r "db" if File.exists? "db"
rm_r "lib" if File.exists? "lib"
mv "rhodes/apps", pwd
mv "rhodes/db", pwd
mv "rhodes/lib", pwd
cp "rhodes/release/rhodes.exe", pwd if File.exists? "rhodes/release/rhodes.exe"
puts Jake.run("rhodes.exe",[])
end
end
end
namespace "clean" do
task :rhodes => ["config:symbian"] do
ENV['EPOCROOT'] = $epocroot
chdir $config["build"]["symbianpath"]
ENV['PATH'] = $sdkprefix+"/epoc32/tools;"+$symbiandir+"/tools/perl/bin;C:/Windows/system32;"+$sdkprefix+"/epoc32/gcc/bin;"+$symbiandir+"/tools/gcce4/bin;"+$sdkprefix+"/bin;"+$sdkprefix+"/mkspecs/default;"+$symbiandir+"/tools/gcce4/arm-none-symbianelf/bin;"
ENV['GCCPATH']= $symbiandir+"/tools/gcce4"
ENV['DEFALT_MKSPEC_PATH']=$sdkprefix+"/mkspecs/default"
ENV['QMAKE_PATH']=$sdkprefix+"/bin"
args = ['distclean']
puts Jake.run($make,args)
#unless $? == 0
#puts "Error cleaning"
# exit 1
#end
rm_rf "../rhodes-symbian-emulator-build"
rm_rf $targetdir
end
task :symbian => "clean:rhodes" do
end
end
namespace "device" do
namespace "symbian" do
desc "Build production for device"
task :production => ["config:symbian","build:symbian:rhobundle","build:symbian:rhodesdev"] do
end
end
end
namespace "run" do
task :symbian => ["config:symbian","build:symbian:rhobundle","build:symbian:rhodesemu"] do
end
end
|
require "application_system_test_case"
class OutagesShowTest < ApplicationSystemTestCase # rubocop:disable Metrics/ClassLength, Metrics/LineLength
test "show two notes default order" do
sign_in_for_system_tests(users(:basic))
visit outage_url(@outage)
notes = all("li.note")
assert_equal 2, notes.size
within(notes[0]) do
assert_text "Note A"
assert_text "1 hour ago"
assert_link "Edit"
# TODO: Make a link to user profile show.
assert_text "Basic"
end
within(notes[1]) do
assert_text "Note B"
assert_text "1 day ago"
assert_no_link "Edit"
# TODO: Make a link to user profile show.
assert_text "Can Edit CIs/Outages"
end
end
test "show two notes ascending order" do
sign_in_for_system_tests(users(:basic))
visit outage_url(@outage)
click_link "Oldest First"
# TODO: I see no other way than to wait for some time here.
sleep 2
notes = all("li.note")
within(notes[0]) do
assert_text "Note B"
assert_text "1 day ago"
assert_no_link "Edit"
# TODO: Make a link to user profile show.
assert_text "Can Edit CIs/Outages"
end
within(notes[1]) do
assert_text "Note A"
assert_text "1 hour ago"
assert_link "Edit"
# TODO: Make a link to user profile show.
assert_text "Basic"
end
click_link "Newest First"
# TODO: I see no other way than to wait for some time here.
sleep 2
assert_note_a(0)
assert_note_b(1)
end
test "add a note default order" do
sign_in_for_system_tests(users(:basic))
visit outage_url(@outage)
fill_in "New Note", with: "Note C."
assert_difference "Note.count" do
click_button "Save Note"
assert_text "Note C."
end
assert_note_b(2)
assert_note_a(1)
assert_note_c(0)
end
test "add a note ascending order" do
sign_in_for_system_tests(users(:basic))
visit outage_url(@outage)
click_link "Oldest First"
# TODO: I see no other way than to wait for some time here.
sleep 2
fill_in "New Note", with: "Note C."
assert_difference "Note.count" do
click_button "Save Note"
assert_text "Note C."
end
assert_note_b(0)
assert_note_a(1)
assert_note_c(2)
end
test "edit a note" do
sign_in_for_system_tests(users(:edit_ci_outages))
visit outage_url(@outage)
assert_no_difference "Note.count" do
within(all("li.note")[1]) { click_link "Edit" }
fill_in "Edit Note", with: "Note B Prime"
click_button "Update Note"
end
assert_selector("li.note", count: 2)
assert_note_b_prime(1)
end
test "delete a note" do
sign_in_for_system_tests(users(:basic))
visit outage_url(@outage)
assert_selector("li.note", count: 2)
assert_difference "Note.count", -1 do
within(all("li.note")[0]) { click_link "Delete" }
assert_selector("li.note", count: 1)
end
assert_selector("li.note", count: 1)
assert_note_b(0)
end
def setup
@outage = Outage.find_by(account: accounts(:company_a), name: "Outage A")
@outage.notes.create([
{
note: "Note A",
user: users(:basic),
created_at: Time.zone.now - 1.hour
},
{
note: "Note B",
user: users(:edit_ci_outages),
created_at: Time.zone.now - 1.day
}
])
assert @outage.save, "Save of notes failed #{@outage.errors.full_messages}"
end
# These also test that only the original author can edit or delete notes.
def assert_note_a(index)
within(all("li.note")[index]) do
assert_text "Note A"
assert_text "1 hour ago"
assert_link "Edit"
assert_link "Delete"
# TODO: Make a link to user profile show.
assert_text "Basic"
end
end
def assert_note_b(index)
within(all("li.note")[index]) do
assert_text "Note B"
assert_text "1 day ago"
assert_no_link "Edit"
assert_no_link "Delete"
# TODO: Make a link to user profile show.
assert_text "Can Edit CIs/Outages"
end
end
# This one is also for the other user.
def assert_note_b_prime(index)
within(all("li.note")[index]) do
assert_text "Note B Prime"
assert_text "1 day ago"
assert_link "Edit"
assert_link "Delete"
# TODO: Make a link to user profile show.
assert_text "Can Edit CIs/Outages"
end
end
def assert_note_c(index)
within(all("li.note")[index]) do
assert_text "Note C"
assert_text "less than 5 seconds ago"
assert_link "Edit"
assert_link "Delete"
# TODO: Make a link to user profile show.
assert_text "Basic"
end
end
end
Add some new skipped tests and TODOs.
require "application_system_test_case"
class OutagesShowTest < ApplicationSystemTestCase # rubocop:disable Metrics/ClassLength, Metrics/LineLength
test "show two notes default order" do
sign_in_for_system_tests(users(:basic))
visit outage_url(@outage)
notes = all("li.note")
assert_equal 2, notes.size
within(notes[0]) do
assert_text "Note A"
assert_text "1 hour ago"
assert_link "Edit"
# TODO: Make a link to user profile show.
assert_text "Basic"
end
within(notes[1]) do
assert_text "Note B"
assert_text "1 day ago"
assert_no_link "Edit"
# TODO: Make a link to user profile show.
assert_text "Can Edit CIs/Outages"
end
end
test "show two notes ascending order" do
sign_in_for_system_tests(users(:basic))
visit outage_url(@outage)
click_link "Oldest First"
# TODO: I see no other way than to wait for some time here.
sleep 2
notes = all("li.note")
within(notes[0]) do
assert_text "Note B"
assert_text "1 day ago"
assert_no_link "Edit"
# TODO: Make a link to user profile show.
assert_text "Can Edit CIs/Outages"
end
within(notes[1]) do
assert_text "Note A"
assert_text "1 hour ago"
assert_link "Edit"
# TODO: Make a link to user profile show.
assert_text "Basic"
end
click_link "Newest First"
# TODO: I see no other way than to wait for some time here.
sleep 2
assert_note_a(0)
assert_note_b(1)
end
test "add a note default order" do
sign_in_for_system_tests(users(:basic))
visit outage_url(@outage)
fill_in "New Note", with: "Note C."
assert_difference "Note.count" do
click_button "Save Note"
assert_text "Note C."
end
assert_note_b(2)
assert_note_a(1)
assert_note_c(0)
# TODO: Put this in, make it fail, then make it pass:
within("form#new_note") { assert_no_text "Note C." }
end
test "add a note ascending order" do
sign_in_for_system_tests(users(:basic))
visit outage_url(@outage)
click_link "Oldest First"
# TODO: I see no other way than to wait for some time here.
sleep 2
fill_in "New Note", with: "Note C."
assert_difference "Note.count" do
click_button "Save Note"
assert_text "Note C."
end
assert_note_b(0)
assert_note_a(1)
assert_note_c(2)
end
test "edit a note" do
sign_in_for_system_tests(users(:edit_ci_outages))
visit outage_url(@outage)
assert_no_difference "Note.count" do
within(all("li.note")[1]) { click_link "Edit" }
fill_in "Edit Note", with: "Note B Prime"
click_button "Update Note"
end
assert_selector("li.note", count: 2)
assert_note_b_prime(1)
end
test "delete a note" do
sign_in_for_system_tests(users(:basic))
visit outage_url(@outage)
assert_selector("li.note", count: 2)
assert_difference "Note.count", -1 do
within(all("li.note")[0]) { click_link "Delete" }
assert_selector("li.note", count: 1)
end
assert_selector("li.note", count: 1)
assert_note_b(0)
end
test "note save failure" do
# TODO: Add this test. Show the message and don't change pages.
skip "Failed to save note"
end
test "note destroy failure" do
# TODO: Add this test. Show the message and don't change pages.
skip "Failed to destroy note"
end
def setup
@outage = Outage.find_by(account: accounts(:company_a), name: "Outage A")
@outage.notes.create([
{
note: "Note A",
user: users(:basic),
created_at: Time.zone.now - 1.hour
},
{
note: "Note B",
user: users(:edit_ci_outages),
created_at: Time.zone.now - 1.day
}
])
assert @outage.save, "Save of notes failed #{@outage.errors.full_messages}"
end
# These also test that only the original author can edit or delete notes.
def assert_note_a(index)
within(all("li.note")[index]) do
assert_text "Note A"
assert_text "1 hour ago"
assert_link "Edit"
assert_link "Delete"
# TODO: Make a link to user profile show.
assert_text "Basic"
end
end
def assert_note_b(index)
within(all("li.note")[index]) do
assert_text "Note B"
assert_text "1 day ago"
assert_no_link "Edit"
assert_no_link "Delete"
# TODO: Make a link to user profile show.
assert_text "Can Edit CIs/Outages"
end
end
# This one is also for the other user.
def assert_note_b_prime(index)
within(all("li.note")[index]) do
assert_text "Note B Prime"
assert_text "1 day ago"
assert_link "Edit"
assert_link "Delete"
# TODO: Make a link to user profile show.
assert_text "Can Edit CIs/Outages"
end
end
def assert_note_c(index)
within(all("li.note")[index]) do
assert_text "Note C"
assert_text "less than 5 seconds ago"
assert_link "Edit"
assert_link "Delete"
# TODO: Make a link to user profile show.
assert_text "Basic"
end
end
end
|
# All the methods in this file should not be called from the outside world
# See the DSL files in concerns/test_botable/ for how to call these tests
module WizardTest
protected
def test_bot_wizard_test(&block)
sign_in(user) and visit(from_path)
0.upto(50) do |index| # Can only test wizards 51 steps long
assert_page_normal
yield if block_given?
if defined?(within_form)
within(within_form) { fill_form; submit_form; }
else
fill_form
submit_form
end
if to_path.present?
# Keep going till we hit a certain to_path
break if page.current_path == to_path
else
# Keep going till there's no more submit buttons
break if all("input[type='submit']").blank?
end
end
save_test_bot_screenshot
assert_current_path(to_path) if to_path.present?
end
end
assert_no_assigns_errors in wizard_test
# All the methods in this file should not be called from the outside world
# See the DSL files in concerns/test_botable/ for how to call these tests
module WizardTest
protected
def test_bot_wizard_test(&block)
sign_in(user) and visit(from_path)
0.upto(50) do |index| # Can only test wizards 51 steps long
assert_page_normal
yield if block_given?
if defined?(within_form)
within(within_form) { fill_form; submit_form; }
else
fill_form
submit_form
end
assert_no_assigns_errors
if to_path.present?
# Keep going till we hit a certain to_path
break if page.current_path == to_path
else
# Keep going till there's no more submit buttons
break if all("input[type='submit']").blank?
end
end
save_test_bot_screenshot
assert_current_path(to_path) if to_path.present?
end
end
|
require "helper"
#TODO: these 3 must be added to the deps.rip file
require "rumbster"
require "message_observers"
require "sinatra/ditties/mailer"
begin
require "redgreen"
rescue LoadError
end
require "integrity/notifier/test"
require "integrity/notifier/email"
class IntegrityEmailTest < Test::Unit::TestCase
include Integrity::Notifier::Test
MAIL_SERVER_PORT = 10_000
def notifier
"Email"
end
def setup
Net::SMTP.disable_tls
@server = Rumbster.new(MAIL_SERVER_PORT)
@mail_observer = MailMessageObserver.new
@server.add_observer(@mail_observer)
@server.start
setup_database
end
def commit(status=:successful)
Integrity::Commit.gen(status)
end
def build(status=:successful)
Integrity::Build.gen(status)
end
def teardown
@server.stop
end
def test_configuration_form
assert form_have_tag?("h3", :content => "SMTP Server Configuration")
assert provides_option?("to", "foo@example.org")
assert provides_option?("from", "bar@example.org")
assert provides_option?("host", "foobarhost.biz")
assert provides_option?("user", "foobaruser")
assert provides_option?("pass", "secret")
assert provides_option?("auth", "plain")
assert provides_option?("pass", "secret")
assert provides_option?("domain", "localhost")
assert provides_option?("sendmail", "/usr/sbin/sendmail")
end
def test_it_sends_email_notification
config = { "host" => "127.0.0.1",
"port" => MAIL_SERVER_PORT,
"to" => "you@example.org",
"from" => "me@example.org" }
successful = build(:successful)
failed = build(:failed)
Integrity::Notifier::Email.new(successful, config.dup).deliver!
Integrity::Notifier::Email.new(failed, config).deliver!
assert_equal "net_smtp", Sinatra::Mailer.delivery_method
mail = @mail_observer.messages.first
assert_equal ["you@example.org"], mail.destinations
assert_equal ["me@example.org"], mail.from
assert mail.subject.include?("successful")
assert mail.body.include?(successful.commit.committed_at.to_s)
assert mail.body.include?(successful.commit.author.name)
assert mail.body.include?(successful.output)
end
def test_it_configures_email_notification_with_sendmail
sendmail_path = "/usr/sbin/sendmail"
config = { "sendmail" => sendmail_path,
"to" => "sendmail@example.org",
"from" => "me@example.org" }
successful = build(:successful)
Integrity::Notifier::Email.new(successful, config)
assert_equal :sendmail, Sinatra::Mailer.delivery_method
assert_equal sendmail_path, Sinatra::Mailer.config[:sendmail_path]
end
end
Remove the email notifier unit test
|
# frozen_string_literal: true
$LOAD_PATH.unshift File.expand_path('lib', __dir__)
require 'rubocop/rspec/version'
Gem::Specification.new do |spec|
spec.name = 'rubocop-rspec'
spec.summary = 'Code style checking for RSpec files'
spec.description = <<-DESCRIPTION
Code style checking for RSpec files.
A plugin for the RuboCop code style enforcing & linting tool.
DESCRIPTION
spec.homepage = 'https://github.com/rubocop-hq/rubocop-rspec'
spec.authors = ['John Backus', 'Ian MacLeod', 'Nils Gemeinhardt']
spec.email = [
'johncbackus@gmail.com',
'ian@nevir.net',
'git@nilsgemeinhardt.de'
]
spec.licenses = ['MIT']
spec.version = RuboCop::RSpec::Version::STRING
spec.platform = Gem::Platform::RUBY
spec.required_ruby_version = '>= 2.4.0'
spec.require_paths = ['lib']
spec.files = Dir[
'lib/**/*',
'config/default.yml',
'*.md'
]
spec.extra_rdoc_files = ['MIT-LICENSE.md', 'README.md']
spec.metadata = {
'changelog_uri' => 'https://github.com/rubocop-hq/rubocop-rspec/blob/master/CHANGELOG.md',
'documentation_uri' => 'https://rubocop-rspec.readthedocs.io/'
}
spec.add_runtime_dependency 'rubocop', '>= 0.87.0'
spec.add_development_dependency 'rack'
spec.add_development_dependency 'rake'
spec.add_development_dependency 'rspec', '>= 3.4'
spec.add_development_dependency 'rubocop-performance', '~> 1.7'
# Workaround for cc-test-reporter with SimpleCov 0.18.
# Stop upgrading SimpleCov until the following issue will be resolved.
# https://github.com/codeclimate/test-reporter/issues/418
spec.add_development_dependency 'simplecov', '< 0.18'
spec.add_development_dependency 'yard'
end
Limit RuboCop dependency to be < 1.0
RuboCop 1.0 might come with breaking API changes https://github.com/rubocop-hq/rubocop/issues/8278
Limit pre-2.0 RuboCop RSpec versions to RuboCop >= 0.87, < 1.0
# frozen_string_literal: true
$LOAD_PATH.unshift File.expand_path('lib', __dir__)
require 'rubocop/rspec/version'
Gem::Specification.new do |spec|
spec.name = 'rubocop-rspec'
spec.summary = 'Code style checking for RSpec files'
spec.description = <<-DESCRIPTION
Code style checking for RSpec files.
A plugin for the RuboCop code style enforcing & linting tool.
DESCRIPTION
spec.homepage = 'https://github.com/rubocop-hq/rubocop-rspec'
spec.authors = ['John Backus', 'Ian MacLeod', 'Nils Gemeinhardt']
spec.email = [
'johncbackus@gmail.com',
'ian@nevir.net',
'git@nilsgemeinhardt.de'
]
spec.licenses = ['MIT']
spec.version = RuboCop::RSpec::Version::STRING
spec.platform = Gem::Platform::RUBY
spec.required_ruby_version = '>= 2.4.0'
spec.require_paths = ['lib']
spec.files = Dir[
'lib/**/*',
'config/default.yml',
'*.md'
]
spec.extra_rdoc_files = ['MIT-LICENSE.md', 'README.md']
spec.metadata = {
'changelog_uri' => 'https://github.com/rubocop-hq/rubocop-rspec/blob/master/CHANGELOG.md',
'documentation_uri' => 'https://rubocop-rspec.readthedocs.io/'
}
spec.add_runtime_dependency 'rubocop', '~> 0.87'
spec.add_development_dependency 'rack'
spec.add_development_dependency 'rake'
spec.add_development_dependency 'rspec', '>= 3.4'
spec.add_development_dependency 'rubocop-performance', '~> 1.7'
# Workaround for cc-test-reporter with SimpleCov 0.18.
# Stop upgrading SimpleCov until the following issue will be resolved.
# https://github.com/codeclimate/test-reporter/issues/418
spec.add_development_dependency 'simplecov', '< 0.18'
spec.add_development_dependency 'yard'
end
|
Gem::Specification.new do |s|
s.name = %q{ruby-saml-mod}
s.version = "0.1.20"
s.authors = ["OneLogin LLC", "Bracken", "Zach", "Cody", "Jeremy", "Paul"]
s.date = %q{2013-02-15}
s.extra_rdoc_files = [
"LICENSE"
]
s.files = [
"LICENSE",
"README",
"lib/onelogin/saml.rb",
"lib/onelogin/saml/auth_request.rb",
"lib/onelogin/saml/authn_contexts.rb",
"lib/onelogin/saml/log_out_request.rb",
"lib/onelogin/saml/logout_response.rb",
"lib/onelogin/saml/meta_data.rb",
"lib/onelogin/saml/name_identifiers.rb",
"lib/onelogin/saml/response.rb",
"lib/onelogin/saml/settings.rb",
"lib/onelogin/saml/status_codes.rb",
"lib/xml_sec.rb",
"ruby-saml-mod.gemspec"
]
s.add_dependency('libxml-ruby', '>= 2.3.0')
s.add_dependency('ffi')
s.homepage = %q{http://github.com/bracken/ruby-saml}
s.require_paths = ["lib"]
s.summary = %q{Ruby library for SAML service providers}
s.description = %q{This is an early fork from https://github.com/onelogin/ruby-saml - I plan to "rebase" these changes ontop of their current version eventually. }
end
bump version
Gem::Specification.new do |s|
s.name = %q{ruby-saml-mod}
s.version = "0.1.21"
s.authors = ["OneLogin LLC", "Bracken", "Zach", "Cody", "Jeremy", "Paul"]
s.date = %q{2013-03-07}
s.extra_rdoc_files = [
"LICENSE"
]
s.files = [
"LICENSE",
"README",
"lib/onelogin/saml.rb",
"lib/onelogin/saml/auth_request.rb",
"lib/onelogin/saml/authn_contexts.rb",
"lib/onelogin/saml/log_out_request.rb",
"lib/onelogin/saml/logout_response.rb",
"lib/onelogin/saml/meta_data.rb",
"lib/onelogin/saml/name_identifiers.rb",
"lib/onelogin/saml/response.rb",
"lib/onelogin/saml/settings.rb",
"lib/onelogin/saml/status_codes.rb",
"lib/xml_sec.rb",
"ruby-saml-mod.gemspec"
]
s.add_dependency('libxml-ruby', '>= 2.3.0')
s.add_dependency('ffi')
s.homepage = %q{http://github.com/bracken/ruby-saml}
s.require_paths = ["lib"]
s.summary = %q{Ruby library for SAML service providers}
s.description = %q{This is an early fork from https://github.com/onelogin/ruby-saml - I plan to "rebase" these changes ontop of their current version eventually. }
end
|
require 'test_helper'
class ModifierTest < Test::Unit::TestCase
attr_reader :page_class_with_compound_key, :page_class_with_standard_key
def setup
compound_key = BSON::OrderedHash['n', 42, 'i', BSON::ObjectId.new]
@page_class_with_compound_key = Doc do
key :_id, :default => lambda { compound_key }
key :title, String
key :day_count, Integer, :default => 0
key :week_count, Integer, :default => 0
key :month_count, Integer, :default => 0
key :tags, Array
end
@page_class_with_standard_key = Doc do
key :title, String
key :day_count, Integer, :default => 0
key :week_count, Integer, :default => 0
key :month_count, Integer, :default => 0
key :tags, Array
end
end
def assert_page_counts(page, day_count, week_count, month_count)
doc = page.collection.find_one({:_id => page.id})
assert doc, "Could not find document with _id => #{page.id.inspect}"
doc.fetch('day_count').should == day_count
doc.fetch('week_count').should == week_count
doc.fetch('month_count').should == month_count
end
def assert_keys_removed(page, *keys)
keys.each do |key|
doc = page.class.collection.find_one({:_id => page.id})
assert doc, "Could not find document with _id => #{page.id.inspect}"
doc.keys.should_not include(key)
end
end
context "ClassMethods" do
setup do
@page_class = page_class_with_standard_key
end
context "unset" do
setup do
@page = @page_class.create(:title => 'Home', :tags => %w(foo bar))
@page2 = @page_class.create(:title => 'Home')
end
should "work with criteria and keys" do
@page_class.unset({:title => 'Home'}, :title, :tags)
assert_keys_removed @page, :title, :tags
assert_keys_removed @page2, :title, :tags
end
should "work with ids and keys" do
@page_class.unset(@page.id, @page2.id, :title, :tags)
assert_keys_removed @page, :title, :tags
assert_keys_removed @page2, :title, :tags
end
context "additional options (upsert & safe)" do
should "be able to pass upsert option" do
new_key_value = DateTime.now.to_s
@page_class.unset({:title => new_key_value, :tags => %w(foo bar)}, :tags, {:upsert => true})
@page_class.count(:title => new_key_value).should == 1
@page_class.first(:title => new_key_value).tags.should == []
end
should "be able to pass safe option" do
@page_class.create(:title => "Better Be Safe than Sorry")
Mongo::Collection.any_instance.expects(:update).with(
{:title => "Better Be Safe than Sorry"},
{'$unset' => {:tags => 1}},
{:safe => true, :multi => true}
)
@page_class.unset({:title => "Better Be Safe than Sorry"}, :tags, {:safe => true})
end
should "be able to pass both safe and upsert options" do
new_key_value = DateTime.now.to_s
@page_class.unset({:title => new_key_value, :tags => %w(foo bar)}, :tags, {:upsert => true, :safe => true})
@page_class.count(:title => new_key_value).should == 1
@page_class.first(:title => new_key_value).tags.should == []
end
end
end
context "increment" do
setup do
@page = @page_class.create(:title => 'Home')
@page2 = @page_class.create(:title => 'Home')
end
should "work with criteria and modifier hashes" do
@page_class.increment({:title => 'Home'}, :day_count => 1, :week_count => 2, :month_count => 3)
assert_page_counts @page, 1, 2, 3
assert_page_counts @page2, 1, 2, 3
end
should "work with ids and modifier hash" do
@page_class.increment(@page.id, @page2.id, :day_count => 1, :week_count => 2, :month_count => 3)
assert_page_counts @page, 1, 2, 3
assert_page_counts @page2, 1, 2, 3
end
should "work with ids given as strings" do
@page_class.increment(@page.id.to_s, @page2.id.to_s, :day_count => 1, :week_count => 2, :month_count => 3)
assert_page_counts @page, 1, 2, 3
assert_page_counts @page2, 1, 2, 3
end
end
context "decrement" do
setup do
@page = @page_class.create(:title => 'Home', :day_count => 1, :week_count => 2, :month_count => 3)
@page2 = @page_class.create(:title => 'Home', :day_count => 1, :week_count => 2, :month_count => 3)
end
should "work with criteria and modifier hashes" do
@page_class.decrement({:title => 'Home'}, :day_count => 1, :week_count => 2, :month_count => 3)
assert_page_counts @page, 0, 0, 0
assert_page_counts @page2, 0, 0, 0
end
should "work with ids and modifier hash" do
@page_class.decrement(@page.id, @page2.id, :day_count => 1, :week_count => 2, :month_count => 3)
assert_page_counts @page, 0, 0, 0
assert_page_counts @page2, 0, 0, 0
end
should "decrement with positive or negative numbers" do
@page_class.decrement(@page.id, @page2.id, :day_count => -1, :week_count => 2, :month_count => -3)
assert_page_counts @page, 0, 0, 0
assert_page_counts @page2, 0, 0, 0
end
should "work with ids given as strings" do
@page_class.decrement(@page.id.to_s, @page2.id.to_s, :day_count => -1, :week_count => 2, :month_count => -3)
assert_page_counts @page, 0, 0, 0
assert_page_counts @page2, 0, 0, 0
end
end
context "set" do
setup do
@page = @page_class.create(:title => 'Home')
@page2 = @page_class.create(:title => 'Home')
end
should "work with criteria and modifier hashes" do
@page_class.set({:title => 'Home'}, :title => 'Home Revised')
@page.reload
@page.title.should == 'Home Revised'
@page2.reload
@page2.title.should == 'Home Revised'
end
should "work with ids and modifier hash" do
@page_class.set(@page.id, @page2.id, :title => 'Home Revised')
@page.reload
@page.title.should == 'Home Revised'
@page2.reload
@page2.title.should == 'Home Revised'
end
should "typecast values before querying" do
@page_class.key :tags, Set
assert_nothing_raised do
@page_class.set(@page.id, :tags => ['foo', 'bar'].to_set)
@page.reload
@page.tags.should == Set.new(['foo', 'bar'])
end
end
should "not typecast keys that are not defined in document" do
assert_raises(BSON::InvalidDocument) do
@page_class.set(@page.id, :colors => ['red', 'green'].to_set)
end
end
should "set keys that are not defined in document" do
@page_class.set(@page.id, :colors => %w[red green])
@page.reload
@page[:colors].should == %w[red green]
end
context "additional options (upsert & safe)" do
should "be able to pass upsert option" do
new_key_value = DateTime.now.to_s
@page_class.set({:title => new_key_value}, {:day_count => 1}, {:upsert => true})
@page_class.count(:title => new_key_value).should == 1
@page_class.first(:title => new_key_value).day_count.should == 1
end
should "be able to pass safe option" do
@page_class.create(:title => "Better Be Safe than Sorry")
Mongo::Collection.any_instance.expects(:update).with(
{:title => "Better Be Safe than Sorry"},
{'$set' => {:title => "I like safety."}},
{:safe => true, :multi => true}
)
@page_class.set({:title => "Better Be Safe than Sorry"}, {:title => "I like safety."}, {:safe => true})
end
should "be able to pass both safe and upsert options" do
new_key_value = DateTime.now.to_s
@page_class.set({:title => new_key_value}, {:day_count => 1}, {:upsert => true, :safe => true})
@page_class.count(:title => new_key_value).should == 1
@page_class.first(:title => new_key_value).day_count.should == 1
end
end
end
context "push" do
setup do
@page = @page_class.create(:title => 'Home')
@page2 = @page_class.create(:title => 'Home')
end
should "work with criteria and modifier hashes" do
@page_class.push({:title => 'Home'}, :tags => 'foo')
@page.reload
@page.tags.should == %w(foo)
@page2.reload
@page2.tags.should == %w(foo)
end
should "work with ids and modifier hash" do
@page_class.push(@page.id, @page2.id, :tags => 'foo')
@page.reload
@page.tags.should == %w(foo)
@page2.reload
@page2.tags.should == %w(foo)
end
end
context "push_all" do
setup do
@page = @page_class.create(:title => 'Home')
@page2 = @page_class.create(:title => 'Home')
@tags = %w(foo bar)
end
should "work with criteria and modifier hashes" do
@page_class.push_all({:title => 'Home'}, :tags => @tags)
@page.reload
@page.tags.should == @tags
@page2.reload
@page2.tags.should == @tags
end
should "work with ids and modifier hash" do
@page_class.push_all(@page.id, @page2.id, :tags => @tags)
@page.reload
@page.tags.should == @tags
@page2.reload
@page2.tags.should == @tags
end
end
context "pull" do
setup do
@page = @page_class.create(:title => 'Home', :tags => %w(foo bar))
@page2 = @page_class.create(:title => 'Home', :tags => %w(foo bar))
end
should "work with criteria and modifier hashes" do
@page_class.pull({:title => 'Home'}, :tags => 'foo')
@page.reload
@page.tags.should == %w(bar)
@page2.reload
@page2.tags.should == %w(bar)
end
should "be able to pull with ids and modifier hash" do
@page_class.pull(@page.id, @page2.id, :tags => 'foo')
@page.reload
@page.tags.should == %w(bar)
@page2.reload
@page2.tags.should == %w(bar)
end
end
context "pull_all" do
setup do
@page = @page_class.create(:title => 'Home', :tags => %w(foo bar baz))
@page2 = @page_class.create(:title => 'Home', :tags => %w(foo bar baz))
end
should "work with criteria and modifier hashes" do
@page_class.pull_all({:title => 'Home'}, :tags => %w(foo bar))
@page.reload
@page.tags.should == %w(baz)
@page2.reload
@page2.tags.should == %w(baz)
end
should "work with ids and modifier hash" do
@page_class.pull_all(@page.id, @page2.id, :tags => %w(foo bar))
@page.reload
@page.tags.should == %w(baz)
@page2.reload
@page2.tags.should == %w(baz)
end
end
context "add_to_set" do
setup do
@page = @page_class.create(:title => 'Home', :tags => 'foo')
@page2 = @page_class.create(:title => 'Home')
end
should "be able to add to set with criteria and modifier hash" do
@page_class.add_to_set({:title => 'Home'}, :tags => 'foo')
@page.reload
@page.tags.should == %w(foo)
@page2.reload
@page2.tags.should == %w(foo)
end
should "be able to add to set with ids and modifier hash" do
@page_class.add_to_set(@page.id, @page2.id, :tags => 'foo')
@page.reload
@page.tags.should == %w(foo)
@page2.reload
@page2.tags.should == %w(foo)
end
end
context "push_uniq" do
setup do
@page = @page_class.create(:title => 'Home', :tags => 'foo')
@page2 = @page_class.create(:title => 'Home')
end
should "be able to push uniq with criteria and modifier hash" do
@page_class.push_uniq({:title => 'Home'}, :tags => 'foo')
@page.reload
@page.tags.should == %w(foo)
@page2.reload
@page2.tags.should == %w(foo)
end
should "be able to push uniq with ids and modifier hash" do
@page_class.push_uniq(@page.id, @page2.id, :tags => 'foo')
@page.reload
@page.tags.should == %w(foo)
@page2.reload
@page2.tags.should == %w(foo)
end
end
context "pop" do
setup do
@page = @page_class.create(:title => 'Home', :tags => %w(foo bar))
end
should "be able to remove the last element the array" do
@page_class.pop(@page.id, :tags => 1)
@page.reload
@page.tags.should == %w(foo)
end
should "be able to remove the first element of the array" do
@page_class.pop(@page.id, :tags => -1)
@page.reload
@page.tags.should == %w(bar)
end
end
context "additional options (upsert & safe)" do
should "be able to pass upsert option" do
new_key_value = DateTime.now.to_s
@page_class.increment({:title => new_key_value}, {:day_count => 1}, {:upsert => true})
@page_class.count(:title => new_key_value).should == 1
@page_class.first(:title => new_key_value).day_count.should == 1
end
should "be able to pass safe option" do
@page_class.create(:title => "Better Be Safe than Sorry")
# We are trying to increment a key of type string here which should fail
assert_raises(Mongo::OperationFailure) do
@page_class.increment({:title => "Better Be Safe than Sorry"}, {:title => 1}, {:safe => true})
end
end
should "be able to pass both safe and upsert options" do
new_key_value = DateTime.now.to_s
@page_class.increment({:title => new_key_value}, {:day_count => 1}, {:upsert => true, :safe => true})
@page_class.count(:title => new_key_value).should == 1
@page_class.first(:title => new_key_value).day_count.should == 1
end
end
end
context "instance methods" do
{
:page_class_with_standard_key => "with standard key",
:page_class_with_compound_key => "with compound key",
}.each do |page_class, description|
context description do
setup do
@page_class = send(page_class)
end
should "be able to unset with keys" do
page = @page_class.create(:title => 'Foo', :tags => %w(foo))
page.unset(:title, :tags)
assert_keys_removed page, :title, :tags
end
should "be able to increment with modifier hashes" do
page = @page_class.create
page.increment(:day_count => 1, :week_count => 2, :month_count => 3)
assert_page_counts page, 1, 2, 3
end
should "be able to decrement with modifier hashes" do
page = @page_class.create(:day_count => 1, :week_count => 2, :month_count => 3)
page.decrement(:day_count => 1, :week_count => 2, :month_count => 3)
assert_page_counts page, 0, 0, 0
end
should "always decrement when decrement is called whether number is positive or negative" do
page = @page_class.create(:day_count => 1, :week_count => 2, :month_count => 3)
page.decrement(:day_count => -1, :week_count => 2, :month_count => -3)
assert_page_counts page, 0, 0, 0
end
should "be able to set with modifier hashes" do
page = @page_class.create(:title => 'Home')
page.set(:title => 'Home Revised')
page.reload
page.title.should == 'Home Revised'
end
should "be able to push with modifier hashes" do
page = @page_class.create
page.push(:tags => 'foo')
page.reload
page.tags.should == %w(foo)
end
should "be able to push_all with modifier hashes" do
page = @page_class.create
page.push_all(:tags => %w(foo bar))
page.reload
page.tags.should == %w(foo bar)
end
should "be able to pull with criteria and modifier hashes" do
page = @page_class.create(:tags => %w(foo bar))
page.pull(:tags => 'foo')
page.reload
page.tags.should == %w(bar)
end
should "be able to pull_all with criteria and modifier hashes" do
page = @page_class.create(:tags => %w(foo bar baz))
page.pull_all(:tags => %w(foo bar))
page.reload
page.tags.should == %w(baz)
end
should "be able to add_to_set with criteria and modifier hash" do
page = @page_class.create(:tags => 'foo')
page2 = @page_class.create
page.add_to_set(:tags => 'foo')
page2.add_to_set(:tags => 'foo')
page.reload
page.tags.should == %w(foo)
page2.reload
page2.tags.should == %w(foo)
end
should "be able to push uniq with criteria and modifier hash" do
page = @page_class.create(:tags => 'foo')
page2 = @page_class.create
page.push_uniq(:tags => 'foo')
page2.push_uniq(:tags => 'foo')
page.reload
page.tags.should == %w(foo)
page2.reload
page2.tags.should == %w(foo)
end
should "be able to pop with modifier hashes" do
page = @page_class.create(:tags => %w(foo bar))
page.pop(:tags => 1)
page.reload
page.tags.should == %w(foo)
end
should "be able to pass upsert option" do
page = @page_class.create(:title => "Upsert Page")
page.increment({:new_count => 1}, {:upsert => true})
page.reload
page.new_count.should == 1
end
should "be able to pass safe option" do
page = @page_class.create(:title => "Safe Page")
# We are trying to increment a key of type string here which should fail
assert_raises(Mongo::OperationFailure) do
page.increment({:title => 1}, {:safe => true})
end
end
should "be able to pass upsert and safe options" do
page = @page_class.create(:title => "Upsert and Safe Page")
page.increment({:another_count => 1}, {:upsert => true, :safe => true})
page.reload
page.another_count.should == 1
end
end
end
end
end
Correctly define _id as bson ordered hash.
The reason the modifier tests were failing is because in #attributes, _id was being converted to a hash with indifferent access instead of staying a bson ordered hash.
require 'test_helper'
class ModifierTest < Test::Unit::TestCase
attr_reader :page_class_with_compound_key, :page_class_with_standard_key
def setup
compound_key = BSON::OrderedHash['n', 42, 'i', BSON::ObjectId.new]
@page_class_with_compound_key = Doc do
key :_id, BSON::OrderedHash, :default => lambda { compound_key }
key :title, String
key :day_count, Integer, :default => 0
key :week_count, Integer, :default => 0
key :month_count, Integer, :default => 0
key :tags, Array
end
@page_class_with_standard_key = Doc do
key :title, String
key :day_count, Integer, :default => 0
key :week_count, Integer, :default => 0
key :month_count, Integer, :default => 0
key :tags, Array
end
end
def assert_page_counts(page, day_count, week_count, month_count)
doc = page.collection.find_one({:_id => page.id})
assert doc, "Could not find document with _id => #{page.id.inspect}"
doc.fetch('day_count').should == day_count
doc.fetch('week_count').should == week_count
doc.fetch('month_count').should == month_count
end
def assert_keys_removed(page, *keys)
keys.each do |key|
doc = page.class.collection.find_one({:_id => page.id})
assert doc, "Could not find document with _id => #{page.id.inspect}"
doc.keys.should_not include(key)
end
end
context "ClassMethods" do
setup do
@page_class = page_class_with_standard_key
end
context "unset" do
setup do
@page = @page_class.create(:title => 'Home', :tags => %w(foo bar))
@page2 = @page_class.create(:title => 'Home')
end
should "work with criteria and keys" do
@page_class.unset({:title => 'Home'}, :title, :tags)
assert_keys_removed @page, :title, :tags
assert_keys_removed @page2, :title, :tags
end
should "work with ids and keys" do
@page_class.unset(@page.id, @page2.id, :title, :tags)
assert_keys_removed @page, :title, :tags
assert_keys_removed @page2, :title, :tags
end
context "additional options (upsert & safe)" do
should "be able to pass upsert option" do
new_key_value = DateTime.now.to_s
@page_class.unset({:title => new_key_value, :tags => %w(foo bar)}, :tags, {:upsert => true})
@page_class.count(:title => new_key_value).should == 1
@page_class.first(:title => new_key_value).tags.should == []
end
should "be able to pass safe option" do
@page_class.create(:title => "Better Be Safe than Sorry")
Mongo::Collection.any_instance.expects(:update).with(
{:title => "Better Be Safe than Sorry"},
{'$unset' => {:tags => 1}},
{:safe => true, :multi => true}
)
@page_class.unset({:title => "Better Be Safe than Sorry"}, :tags, {:safe => true})
end
should "be able to pass both safe and upsert options" do
new_key_value = DateTime.now.to_s
@page_class.unset({:title => new_key_value, :tags => %w(foo bar)}, :tags, {:upsert => true, :safe => true})
@page_class.count(:title => new_key_value).should == 1
@page_class.first(:title => new_key_value).tags.should == []
end
end
end
context "increment" do
setup do
@page = @page_class.create(:title => 'Home')
@page2 = @page_class.create(:title => 'Home')
end
should "work with criteria and modifier hashes" do
@page_class.increment({:title => 'Home'}, :day_count => 1, :week_count => 2, :month_count => 3)
assert_page_counts @page, 1, 2, 3
assert_page_counts @page2, 1, 2, 3
end
should "work with ids and modifier hash" do
@page_class.increment(@page.id, @page2.id, :day_count => 1, :week_count => 2, :month_count => 3)
assert_page_counts @page, 1, 2, 3
assert_page_counts @page2, 1, 2, 3
end
should "work with ids given as strings" do
@page_class.increment(@page.id.to_s, @page2.id.to_s, :day_count => 1, :week_count => 2, :month_count => 3)
assert_page_counts @page, 1, 2, 3
assert_page_counts @page2, 1, 2, 3
end
end
context "decrement" do
setup do
@page = @page_class.create(:title => 'Home', :day_count => 1, :week_count => 2, :month_count => 3)
@page2 = @page_class.create(:title => 'Home', :day_count => 1, :week_count => 2, :month_count => 3)
end
should "work with criteria and modifier hashes" do
@page_class.decrement({:title => 'Home'}, :day_count => 1, :week_count => 2, :month_count => 3)
assert_page_counts @page, 0, 0, 0
assert_page_counts @page2, 0, 0, 0
end
should "work with ids and modifier hash" do
@page_class.decrement(@page.id, @page2.id, :day_count => 1, :week_count => 2, :month_count => 3)
assert_page_counts @page, 0, 0, 0
assert_page_counts @page2, 0, 0, 0
end
should "decrement with positive or negative numbers" do
@page_class.decrement(@page.id, @page2.id, :day_count => -1, :week_count => 2, :month_count => -3)
assert_page_counts @page, 0, 0, 0
assert_page_counts @page2, 0, 0, 0
end
should "work with ids given as strings" do
@page_class.decrement(@page.id.to_s, @page2.id.to_s, :day_count => -1, :week_count => 2, :month_count => -3)
assert_page_counts @page, 0, 0, 0
assert_page_counts @page2, 0, 0, 0
end
end
context "set" do
setup do
@page = @page_class.create(:title => 'Home')
@page2 = @page_class.create(:title => 'Home')
end
should "work with criteria and modifier hashes" do
@page_class.set({:title => 'Home'}, :title => 'Home Revised')
@page.reload
@page.title.should == 'Home Revised'
@page2.reload
@page2.title.should == 'Home Revised'
end
should "work with ids and modifier hash" do
@page_class.set(@page.id, @page2.id, :title => 'Home Revised')
@page.reload
@page.title.should == 'Home Revised'
@page2.reload
@page2.title.should == 'Home Revised'
end
should "typecast values before querying" do
@page_class.key :tags, Set
assert_nothing_raised do
@page_class.set(@page.id, :tags => ['foo', 'bar'].to_set)
@page.reload
@page.tags.should == Set.new(['foo', 'bar'])
end
end
should "not typecast keys that are not defined in document" do
assert_raises(BSON::InvalidDocument) do
@page_class.set(@page.id, :colors => ['red', 'green'].to_set)
end
end
should "set keys that are not defined in document" do
@page_class.set(@page.id, :colors => %w[red green])
@page.reload
@page[:colors].should == %w[red green]
end
context "additional options (upsert & safe)" do
should "be able to pass upsert option" do
new_key_value = DateTime.now.to_s
@page_class.set({:title => new_key_value}, {:day_count => 1}, {:upsert => true})
@page_class.count(:title => new_key_value).should == 1
@page_class.first(:title => new_key_value).day_count.should == 1
end
should "be able to pass safe option" do
@page_class.create(:title => "Better Be Safe than Sorry")
Mongo::Collection.any_instance.expects(:update).with(
{:title => "Better Be Safe than Sorry"},
{'$set' => {:title => "I like safety."}},
{:safe => true, :multi => true}
)
@page_class.set({:title => "Better Be Safe than Sorry"}, {:title => "I like safety."}, {:safe => true})
end
should "be able to pass both safe and upsert options" do
new_key_value = DateTime.now.to_s
@page_class.set({:title => new_key_value}, {:day_count => 1}, {:upsert => true, :safe => true})
@page_class.count(:title => new_key_value).should == 1
@page_class.first(:title => new_key_value).day_count.should == 1
end
end
end
context "push" do
setup do
@page = @page_class.create(:title => 'Home')
@page2 = @page_class.create(:title => 'Home')
end
should "work with criteria and modifier hashes" do
@page_class.push({:title => 'Home'}, :tags => 'foo')
@page.reload
@page.tags.should == %w(foo)
@page2.reload
@page2.tags.should == %w(foo)
end
should "work with ids and modifier hash" do
@page_class.push(@page.id, @page2.id, :tags => 'foo')
@page.reload
@page.tags.should == %w(foo)
@page2.reload
@page2.tags.should == %w(foo)
end
end
context "push_all" do
setup do
@page = @page_class.create(:title => 'Home')
@page2 = @page_class.create(:title => 'Home')
@tags = %w(foo bar)
end
should "work with criteria and modifier hashes" do
@page_class.push_all({:title => 'Home'}, :tags => @tags)
@page.reload
@page.tags.should == @tags
@page2.reload
@page2.tags.should == @tags
end
should "work with ids and modifier hash" do
@page_class.push_all(@page.id, @page2.id, :tags => @tags)
@page.reload
@page.tags.should == @tags
@page2.reload
@page2.tags.should == @tags
end
end
context "pull" do
setup do
@page = @page_class.create(:title => 'Home', :tags => %w(foo bar))
@page2 = @page_class.create(:title => 'Home', :tags => %w(foo bar))
end
should "work with criteria and modifier hashes" do
@page_class.pull({:title => 'Home'}, :tags => 'foo')
@page.reload
@page.tags.should == %w(bar)
@page2.reload
@page2.tags.should == %w(bar)
end
should "be able to pull with ids and modifier hash" do
@page_class.pull(@page.id, @page2.id, :tags => 'foo')
@page.reload
@page.tags.should == %w(bar)
@page2.reload
@page2.tags.should == %w(bar)
end
end
context "pull_all" do
setup do
@page = @page_class.create(:title => 'Home', :tags => %w(foo bar baz))
@page2 = @page_class.create(:title => 'Home', :tags => %w(foo bar baz))
end
should "work with criteria and modifier hashes" do
@page_class.pull_all({:title => 'Home'}, :tags => %w(foo bar))
@page.reload
@page.tags.should == %w(baz)
@page2.reload
@page2.tags.should == %w(baz)
end
should "work with ids and modifier hash" do
@page_class.pull_all(@page.id, @page2.id, :tags => %w(foo bar))
@page.reload
@page.tags.should == %w(baz)
@page2.reload
@page2.tags.should == %w(baz)
end
end
context "add_to_set" do
setup do
@page = @page_class.create(:title => 'Home', :tags => 'foo')
@page2 = @page_class.create(:title => 'Home')
end
should "be able to add to set with criteria and modifier hash" do
@page_class.add_to_set({:title => 'Home'}, :tags => 'foo')
@page.reload
@page.tags.should == %w(foo)
@page2.reload
@page2.tags.should == %w(foo)
end
should "be able to add to set with ids and modifier hash" do
@page_class.add_to_set(@page.id, @page2.id, :tags => 'foo')
@page.reload
@page.tags.should == %w(foo)
@page2.reload
@page2.tags.should == %w(foo)
end
end
context "push_uniq" do
setup do
@page = @page_class.create(:title => 'Home', :tags => 'foo')
@page2 = @page_class.create(:title => 'Home')
end
should "be able to push uniq with criteria and modifier hash" do
@page_class.push_uniq({:title => 'Home'}, :tags => 'foo')
@page.reload
@page.tags.should == %w(foo)
@page2.reload
@page2.tags.should == %w(foo)
end
should "be able to push uniq with ids and modifier hash" do
@page_class.push_uniq(@page.id, @page2.id, :tags => 'foo')
@page.reload
@page.tags.should == %w(foo)
@page2.reload
@page2.tags.should == %w(foo)
end
end
context "pop" do
setup do
@page = @page_class.create(:title => 'Home', :tags => %w(foo bar))
end
should "be able to remove the last element the array" do
@page_class.pop(@page.id, :tags => 1)
@page.reload
@page.tags.should == %w(foo)
end
should "be able to remove the first element of the array" do
@page_class.pop(@page.id, :tags => -1)
@page.reload
@page.tags.should == %w(bar)
end
end
context "additional options (upsert & safe)" do
should "be able to pass upsert option" do
new_key_value = DateTime.now.to_s
@page_class.increment({:title => new_key_value}, {:day_count => 1}, {:upsert => true})
@page_class.count(:title => new_key_value).should == 1
@page_class.first(:title => new_key_value).day_count.should == 1
end
should "be able to pass safe option" do
@page_class.create(:title => "Better Be Safe than Sorry")
# We are trying to increment a key of type string here which should fail
assert_raises(Mongo::OperationFailure) do
@page_class.increment({:title => "Better Be Safe than Sorry"}, {:title => 1}, {:safe => true})
end
end
should "be able to pass both safe and upsert options" do
new_key_value = DateTime.now.to_s
@page_class.increment({:title => new_key_value}, {:day_count => 1}, {:upsert => true, :safe => true})
@page_class.count(:title => new_key_value).should == 1
@page_class.first(:title => new_key_value).day_count.should == 1
end
end
end
context "instance methods" do
{
:page_class_with_standard_key => "with standard key",
:page_class_with_compound_key => "with compound key",
}.each do |page_class, description|
context description do
setup do
@page_class = send(page_class)
end
should "be able to unset with keys" do
page = @page_class.create(:title => 'Foo', :tags => %w(foo))
page.unset(:title, :tags)
assert_keys_removed page, :title, :tags
end
should "be able to increment with modifier hashes" do
page = @page_class.create
page.increment(:day_count => 1, :week_count => 2, :month_count => 3)
assert_page_counts page, 1, 2, 3
end
should "be able to decrement with modifier hashes" do
page = @page_class.create(:day_count => 1, :week_count => 2, :month_count => 3)
page.decrement(:day_count => 1, :week_count => 2, :month_count => 3)
assert_page_counts page, 0, 0, 0
end
should "always decrement when decrement is called whether number is positive or negative" do
page = @page_class.create(:day_count => 1, :week_count => 2, :month_count => 3)
page.decrement(:day_count => -1, :week_count => 2, :month_count => -3)
assert_page_counts page, 0, 0, 0
end
should "be able to set with modifier hashes" do
page = @page_class.create(:title => 'Home')
page.set(:title => 'Home Revised')
page.reload
page.title.should == 'Home Revised'
end
should "be able to push with modifier hashes" do
page = @page_class.create
page.push(:tags => 'foo')
page.reload
page.tags.should == %w(foo)
end
should "be able to push_all with modifier hashes" do
page = @page_class.create
page.push_all(:tags => %w(foo bar))
page.reload
page.tags.should == %w(foo bar)
end
should "be able to pull with criteria and modifier hashes" do
page = @page_class.create(:tags => %w(foo bar))
page.pull(:tags => 'foo')
page.reload
page.tags.should == %w(bar)
end
should "be able to pull_all with criteria and modifier hashes" do
page = @page_class.create(:tags => %w(foo bar baz))
page.pull_all(:tags => %w(foo bar))
page.reload
page.tags.should == %w(baz)
end
should "be able to add_to_set with criteria and modifier hash" do
page = @page_class.create(:tags => 'foo')
page2 = @page_class.create
page.add_to_set(:tags => 'foo')
page2.add_to_set(:tags => 'foo')
page.reload
page.tags.should == %w(foo)
page2.reload
page2.tags.should == %w(foo)
end
should "be able to push uniq with criteria and modifier hash" do
page = @page_class.create(:tags => 'foo')
page2 = @page_class.create
page.push_uniq(:tags => 'foo')
page2.push_uniq(:tags => 'foo')
page.reload
page.tags.should == %w(foo)
page2.reload
page2.tags.should == %w(foo)
end
should "be able to pop with modifier hashes" do
page = @page_class.create(:tags => %w(foo bar))
page.pop(:tags => 1)
page.reload
page.tags.should == %w(foo)
end
should "be able to pass upsert option" do
page = @page_class.create(:title => "Upsert Page")
page.increment({:new_count => 1}, {:upsert => true})
page.reload
page.new_count.should == 1
end
should "be able to pass safe option" do
page = @page_class.create(:title => "Safe Page")
# We are trying to increment a key of type string here which should fail
assert_raises(Mongo::OperationFailure) do
page.increment({:title => 1}, {:safe => true})
end
end
should "be able to pass upsert and safe options" do
page = @page_class.create(:title => "Upsert and Safe Page")
page.increment({:another_count => 1}, {:upsert => true, :safe => true})
page.reload
page.another_count.should == 1
end
end
end
end
end
|
require_relative "../test_helper"
class ProductsBlockTest < ActiveSupport::TestCase
def setup
@block = ProductsBlock.new
@product_category = fast_create(ProductCategory, :name => 'Products')
end
attr_reader :block
should 'be inherit from block' do
assert_kind_of Block, block
end
should 'provide default title' do
assert_not_equal Block.new.default_title, ProductsBlock.new.default_title
end
should 'provide default description' do
assert_not_equal Block.description, ProductsBlock.description
end
should "list owner products" do
enterprise = create(Enterprise, :name => 'testenterprise', :identifier => 'testenterprise')
create(Product, :enterprise => enterprise, :name => 'product one', :product_category => @product_category)
create(Product, :enterprise => enterprise, :name => 'product two', :product_category => @product_category)
block.expects(:products).returns(enterprise.products)
content = block.content
assert_tag_in_string content, :content => 'Products'
assert_tag_in_string content, :tag => 'li', :attributes => { :class => 'product' }, :descendant => { :tag => 'a', :content => /product one/ }
assert_tag_in_string content, :tag => 'li', :attributes => { :class => 'product' }, :descendant => { :tag => 'a', :content => /product two/ }
end
should 'point to all products in footer' do
enterprise = create(Enterprise, :name => 'testenterprise', :identifier => 'testenterprise')
create(Product, :enterprise => enterprise, :name => 'product one', :product_category => @product_category)
create(Product, :enterprise => enterprise, :name => 'product two', :product_category => @product_category)
block.stubs(:owner).returns(enterprise)
footer = block.footer
assert_tag_in_string footer, :tag => 'a', :attributes => { :href => /\/catalog\/testenterprise$/ }, :content => 'View all products'
end
should 'list 4 random products by default' do
enterprise = create(Enterprise, :name => 'testenterprise', :identifier => 'testenterprise')
create(Product, :enterprise => enterprise, :name => 'product one', :product_category => @product_category)
create(Product, :enterprise => enterprise, :name => 'product two', :product_category => @product_category)
create(Product, :enterprise => enterprise, :name => 'product three', :product_category => @product_category)
create(Product, :enterprise => enterprise, :name => 'product four', :product_category => @product_category)
create(Product, :enterprise => enterprise, :name => 'product five', :product_category => @product_category)
block.stubs(:owner).returns(enterprise)
assert_equal 4, block.products.size
end
should 'list all products if less than 4 by default' do
enterprise = create(Enterprise, :name => 'testenterprise', :identifier => 'testenterprise')
create(Product, :enterprise => enterprise, :name => 'product one', :product_category => @product_category)
create(Product, :enterprise => enterprise, :name => 'product two', :product_category => @product_category)
create(Product, :enterprise => enterprise, :name => 'product three', :product_category => @product_category)
block.stubs(:owner).returns(enterprise)
assert_equal 3, block.products.size
end
should 'be able to set product_ids and have them listed' do
enterprise = create(Enterprise, :name => 'testenterprise', :identifier => 'testenterprise')
p1 = create(Product, :enterprise => enterprise, :name => 'product one', :product_category => @product_category)
p2 = create(Product, :enterprise => enterprise, :name => 'product two', :product_category => @product_category)
p3 = create(Product, :enterprise => enterprise, :name => 'product three', :product_category => @product_category)
p4 = create(Product, :enterprise => enterprise, :name => 'product four', :product_category => @product_category)
p5 = create(Product, :enterprise => enterprise, :name => 'product five', :product_category => @product_category)
block.stubs(:owner).returns(enterprise)
block.product_ids = [p1, p3, p5].map(&:id)
assert_equivalent [p1, p3, p5], block.products
end
should 'save product_ids' do
enterprise = create(Enterprise, :name => 'testenterprise', :identifier => 'testenterprise')
p1 = create(Product, :enterprise => enterprise, :name => 'product one', :product_category => @product_category)
p2 = create(Product, :enterprise => enterprise, :name => 'product two', :product_category => @product_category)
block = ProductsBlock.new
enterprise.boxes.first.blocks << block
block.product_ids = [p1.id, p2.id]
block.save!
assert_equal [p1.id, p2.id], ProductsBlock.find(block.id).product_ids
end
should 'accept strings in product_ids but store integers' do
block = ProductsBlock.new
block.product_ids = [ '1', '2']
assert_equal [1, 2], block.product_ids
end
should 'not repeat products' do
enterprise = create(Enterprise, :name => 'testenterprise', :identifier => 'testenterprise')
p1 = create(Product, :enterprise => enterprise, :name => 'product one', :product_category => @product_category)
p2 = create(Product, :enterprise => enterprise, :name => 'product two', :product_category => @product_category)
p3 = create(Product, :enterprise => enterprise, :name => 'product three', :product_category => @product_category)
p4 = create(Product, :enterprise => enterprise, :name => 'product four', :product_category => @product_category)
block = ProductsBlock.new
enterprise.boxes.first.blocks << block
block.save!
4.times do # to keep a minimal chance of false positive, its random after all
assert_equivalent [p1, p2, p3, p4], block.products
end
end
should 'generate footer when enterprise has own hostname' do
enterprise = create(Enterprise, :name => 'testenterprise', :identifier => 'testenterprise')
enterprise.domains << Domain.new(:name => 'sometest.com'); enterprise.save!
create(Product, :enterprise => enterprise, :name => 'product one', :product_category => @product_category)
create(Product, :enterprise => enterprise, :name => 'product two', :product_category => @product_category)
block.stubs(:owner).returns(enterprise)
footer = block.footer
assert_tag_in_string footer, :tag => 'a', :attributes => { :href => /\/catalog\/testenterprise$/ }, :content => 'View all products'
end
should 'display the default minor image if thumbnails were not processed' do
enterprise = create(Enterprise, :name => 'testenterprise', :identifier => 'testenterprise')
create(Product, :enterprise => enterprise, :name => 'product', :product_category => @product_category, :image_builder => { :uploaded_data => fixture_file_upload('/files/rails.png', 'image/png')})
block.expects(:products).returns(enterprise.products)
content = block.content
assert_tag_in_string content, :tag => 'a', :attributes => { :style => /image-loading-minor.png/ }
end
should 'display the thumbnail image if thumbnails were processed' do
enterprise = create(Enterprise, :name => 'testenterprise', :identifier => 'testenterprise')
create(Product, :enterprise => enterprise, :name => 'product', :product_category => @product_category, :image_builder => { :uploaded_data => fixture_file_upload('/files/rails.png', 'image/png')})
process_delayed_job_queue
block.expects(:products).returns(enterprise.products.reload)
content = block.content
assert_tag_in_string content, :tag => 'a', :attributes => { :style => /rails_minor.png/ }
end
end
Fix ProductsBlock unit tests and view
By removing the content method, it is now necessary to turn the test
into a view one using the BoxesHelper method and properly stubbing other
helpers.
require_relative "../test_helper"
class ProductsBlockTest < ActiveSupport::TestCase
def setup
@block = ProductsBlock.new
@product_category = fast_create(ProductCategory, :name => 'Products')
end
attr_reader :block
should 'be inherit from block' do
assert_kind_of Block, block
end
should 'provide default title' do
assert_not_equal Block.new.default_title, ProductsBlock.new.default_title
end
should 'provide default description' do
assert_not_equal Block.description, ProductsBlock.description
end
should 'point to all products in footer' do
enterprise = create(Enterprise, :name => 'testenterprise', :identifier => 'testenterprise')
create(Product, :enterprise => enterprise, :name => 'product one', :product_category => @product_category)
create(Product, :enterprise => enterprise, :name => 'product two', :product_category => @product_category)
block.stubs(:owner).returns(enterprise)
footer = block.footer
assert_tag_in_string footer, :tag => 'a', :attributes => { :href => /\/catalog\/testenterprise$/ }, :content => 'View all products'
end
should 'list 4 random products by default' do
enterprise = create(Enterprise, :name => 'testenterprise', :identifier => 'testenterprise')
create(Product, :enterprise => enterprise, :name => 'product one', :product_category => @product_category)
create(Product, :enterprise => enterprise, :name => 'product two', :product_category => @product_category)
create(Product, :enterprise => enterprise, :name => 'product three', :product_category => @product_category)
create(Product, :enterprise => enterprise, :name => 'product four', :product_category => @product_category)
create(Product, :enterprise => enterprise, :name => 'product five', :product_category => @product_category)
block.stubs(:owner).returns(enterprise)
assert_equal 4, block.products.size
end
should 'list all products if less than 4 by default' do
enterprise = create(Enterprise, :name => 'testenterprise', :identifier => 'testenterprise')
create(Product, :enterprise => enterprise, :name => 'product one', :product_category => @product_category)
create(Product, :enterprise => enterprise, :name => 'product two', :product_category => @product_category)
create(Product, :enterprise => enterprise, :name => 'product three', :product_category => @product_category)
block.stubs(:owner).returns(enterprise)
assert_equal 3, block.products.size
end
should 'be able to set product_ids and have them listed' do
enterprise = create(Enterprise, :name => 'testenterprise', :identifier => 'testenterprise')
p1 = create(Product, :enterprise => enterprise, :name => 'product one', :product_category => @product_category)
p2 = create(Product, :enterprise => enterprise, :name => 'product two', :product_category => @product_category)
p3 = create(Product, :enterprise => enterprise, :name => 'product three', :product_category => @product_category)
p4 = create(Product, :enterprise => enterprise, :name => 'product four', :product_category => @product_category)
p5 = create(Product, :enterprise => enterprise, :name => 'product five', :product_category => @product_category)
block.stubs(:owner).returns(enterprise)
block.product_ids = [p1, p3, p5].map(&:id)
assert_equivalent [p1, p3, p5], block.products
end
should 'save product_ids' do
enterprise = create(Enterprise, :name => 'testenterprise', :identifier => 'testenterprise')
p1 = create(Product, :enterprise => enterprise, :name => 'product one', :product_category => @product_category)
p2 = create(Product, :enterprise => enterprise, :name => 'product two', :product_category => @product_category)
block = ProductsBlock.new
enterprise.boxes.first.blocks << block
block.product_ids = [p1.id, p2.id]
block.save!
assert_equal [p1.id, p2.id], ProductsBlock.find(block.id).product_ids
end
should 'accept strings in product_ids but store integers' do
block = ProductsBlock.new
block.product_ids = [ '1', '2']
assert_equal [1, 2], block.product_ids
end
should 'not repeat products' do
enterprise = create(Enterprise, :name => 'testenterprise', :identifier => 'testenterprise')
p1 = create(Product, :enterprise => enterprise, :name => 'product one', :product_category => @product_category)
p2 = create(Product, :enterprise => enterprise, :name => 'product two', :product_category => @product_category)
p3 = create(Product, :enterprise => enterprise, :name => 'product three', :product_category => @product_category)
p4 = create(Product, :enterprise => enterprise, :name => 'product four', :product_category => @product_category)
block = ProductsBlock.new
enterprise.boxes.first.blocks << block
block.save!
4.times do # to keep a minimal chance of false positive, its random after all
assert_equivalent [p1, p2, p3, p4], block.products
end
end
should 'generate footer when enterprise has own hostname' do
enterprise = create(Enterprise, :name => 'testenterprise', :identifier => 'testenterprise')
enterprise.domains << Domain.new(:name => 'sometest.com'); enterprise.save!
create(Product, :enterprise => enterprise, :name => 'product one', :product_category => @product_category)
create(Product, :enterprise => enterprise, :name => 'product two', :product_category => @product_category)
block.stubs(:owner).returns(enterprise)
footer = block.footer
assert_tag_in_string footer, :tag => 'a', :attributes => { :href => /\/catalog\/testenterprise$/ }, :content => 'View all products'
end
end
require 'boxes_helper'
require 'block_helper'
class ProductsBlockViewTest < ActionView::TestCase
include BoxesHelper
ActionView::Base.send :include, BlockHelper
def setup
@block = ProductsBlock.new
@product_category = fast_create(ProductCategory, :name => 'Products')
end
attr_reader :block
should "list owner products" do
enterprise = create(Enterprise, :name => 'testenterprise', :identifier => 'testenterprise')
create(Product, :enterprise => enterprise, :name => 'product one', :product_category => @product_category)
create(Product, :enterprise => enterprise, :name => 'product two', :product_category => @product_category)
block.expects(:products).returns(enterprise.products)
content = render_block_content(block)
assert_tag_in_string content, :content => 'Products'
assert_tag_in_string content, :tag => 'li', :attributes => { :class => 'product' }, :descendant => { :tag => 'a', :content => /product one/ }
assert_tag_in_string content, :tag => 'li', :attributes => { :class => 'product' }, :descendant => { :tag => 'a', :content => /product two/ }
end
should 'display the default minor image if thumbnails were not processed' do
enterprise = create(Enterprise, :name => 'testenterprise', :identifier => 'testenterprise')
create(Product, :enterprise => enterprise, :name => 'product', :product_category => @product_category, :image_builder => { :uploaded_data => fixture_file_upload('/files/rails.png', 'image/png')})
block.expects(:products).returns(enterprise.products)
ActionView::Base.any_instance.stubs(:block_title).returns("")
content = render_block_content(block)
assert_tag_in_string content, :tag => 'a', :attributes => { :style => /image-loading-minor.png/ }
end
should 'display the thumbnail image if thumbnails were processed' do
enterprise = create(Enterprise, :name => 'testenterprise', :identifier => 'testenterprise')
create(Product, :enterprise => enterprise, :name => 'product', :product_category => @product_category, :image_builder => { :uploaded_data => fixture_file_upload('/files/rails.png', 'image/png')})
process_delayed_job_queue
block.expects(:products).returns(enterprise.products.reload)
ActionView::Base.any_instance.stubs(:block_title).returns("")
content = render_block_content(block)
assert_tag_in_string content, :tag => 'a', :attributes => { :style => /rails_minor.png/ }
end
end
|
# This is very largely based on @qrush's M, but there are many modifications.
# we need to load all dependencies up front, because bundler will
# remove us from the load path soon.
require "rubygems"
require "zeus/m/test_collection"
require "zeus/m/test_method"
# the Gemfile may specify a version of method_source, but we also want to require it here.
# To avoid possible "you've activated X; gemfile specifies Y" errors, we actually scan
# Gemfile.lock for a specific version, and require exactly that version if present.
gemfile_lock = ROOT_PATH + "/Gemfile.lock"
if File.exists?(gemfile_lock)
version = File.read(ROOT_PATH + "/Gemfile.lock").
scan(/\bmethod_source\s*\(([\d\.]+)\)/).flatten[0]
gem "method_source", version if version
end
require 'method_source'
module Zeus
#`m` stands for metal, which is a better test/unit test runner that can run
#tests by line number.
#
#[](http://travis-ci.org/qrush/m)
#
#
#
#<sub>[Rush at the Bristol Colston Hall May 1979](http://www.flickr.com/photos/8507625@N02/3468299995/)</sub>
### Install
#
### Usage
#
#Basically, I was sick of using the `-n` flag to grab one test to run. Instead, I
#prefer how RSpec's test runner allows tests to be run by line number.
#
#Given this file:
#
# $ cat -n test/example_test.rb
# 1 require 'test/unit'
# 2
# 3 class ExampleTest < Test::Unit::TestCase
# 4 def test_apple
# 5 assert_equal 1, 1
# 6 end
# 7
# 8 def test_banana
# 9 assert_equal 1, 1
# 10 end
# 11 end
#
#You can run a test by line number, using format `m TEST_FILE:LINE_NUMBER_OF_TEST`:
#
# $ m test/example_test.rb:4
# Run options: -n /test_apple/
#
# # Running tests:
#
# .
#
# Finished tests in 0.000525s, 1904.7619 tests/s, 1904.7619 assertions/s.
#
# 1 tests, 1 assertions, 0 failures, 0 errors, 0 skips
#
#Hit the wrong line number? No problem, `m` helps you out:
#
# $ m test/example_test.rb:2
# No tests found on line 2. Valid tests to run:
#
# test_apple: m test/examples/test_unit_example_test.rb:4
# test_banana: m test/examples/test_unit_example_test.rb:8
#
#Want to run the whole test? Just leave off the line number.
#
# $ m test/example_test.rb
# Run options:
#
# # Running tests:
#
# ..
#
# Finished tests in 0.001293s, 1546.7904 tests/s, 3093.5808 assertions/s.
#
# 1 tests, 2 assertions, 0 failures, 0 errors, 0 skips
#
#### Supports
#
#`m` works with a few Ruby test frameworks:
#
#* `Test::Unit`
#* `ActiveSupport::TestCase`
#* `MiniTest::Unit::TestCase`
#
### License
#
#This gem is MIT licensed, please see `LICENSE` for more information.
### M, your metal test runner
# Maybe this gem should have a longer name? Metal?
module M
VERSION = "1.2.1" unless defined?(VERSION)
# Accept arguments coming from bin/m and run tests.
def self.run(argv)
Runner.new(argv).run
end
### Runner is in charge of running your tests.
# Instead of slamming all of this junk in an `M` class, it's here instead.
class Runner
def initialize(argv)
@argv = argv
end
# There's two steps to running our tests:
# 1. Parsing the given input for the tests we need to find (or groups of tests)
# 2. Run those tests we found that match what you wanted
def run
parse
execute
end
private
def parse
# With no arguments,
if @argv.empty?
# Just shell out to `rake test`.
require 'rake'
Rake::Task['test'].invoke
exit
else
parse_options! @argv
# Parse out ARGV, it should be coming in in a format like `test/test_file.rb:9`
_, line = @argv.first.split(':')
@line ||= line.nil? ? nil : line.to_i
@files = []
@argv.each do |arg|
add_file(arg)
end
end
end
def add_file(arg)
file = arg.split(':').first
if Dir.exist?(file)
files = Dir.glob("#{file}/**/*test*.rb")
@files.concat(files)
else
files = Dir.glob(file)
files == [] and abort "Couldn't find test file '#{file}'!"
@files.concat(files)
end
end
def parse_options!(argv)
require 'optparse'
OptionParser.new do |opts|
opts.banner = 'Options:'
opts.version = M::VERSION
opts.on '-h', '--help', 'Display this help.' do
puts "Usage: m [OPTIONS] [FILES]\n\n", opts
exit
end
opts.on '--version', 'Display the version.' do
puts "m #{M::VERSION}"
exit
end
opts.on '-l', '--line LINE', Integer, 'Line number for file.' do |line|
@line = line
end
opts.on '-n', '--name NAME', String, 'Name or pattern for test methods to run.' do |name|
if name[0] == "/" && name[-1] == "/"
@test_name = Regexp.new(name[1..-2])
else
@test_name = name
end
end
opts.parse! argv
end
end
def execute
generate_tests_to_run
test_arguments = build_test_arguments
# directly run the tests from here and exit with the status of the tests passing or failing
case framework
when :minitest
exit MiniTest::Unit.runner.run test_arguments
when :testunit1, :testunit2
exit Test::Unit::AutoRunner.run(false, nil, test_arguments)
else
not_supported
end
end
def generate_tests_to_run
# Locate tests to run that may be inside of this line. There could be more than one!
all_tests = tests
if @line
@tests_to_run = all_tests.within(@line)
end
end
def build_test_arguments
if @line
abort_with_no_test_found_by_line_number if @tests_to_run.empty?
# assemble the regexp to run these tests,
test_names = @tests_to_run.map(&:name).join('|')
# set up the args needed for the runner
["-n", "/(#{test_names})/"]
elsif user_specified_name?
abort_with_no_test_found_by_name unless tests.contains?(@test_name)
test_names = test_name_to_s
["-n", test_names]
else
[]
end
end
def abort_with_no_test_found_by_line_number
abort_with_valid_tests_msg "No tests found on line #{@line}. "
end
def abort_with_no_test_found_by_name
abort_with_valid_tests_msg "No test name matches '#{test_name_to_s}'. "
end
def abort_with_valid_tests_msg message=""
message << "Valid tests to run:\n\n"
# For every test ordered by line number,
# spit out the test name and line number where it starts,
tests.by_line_number do |test|
message << "#{sprintf("%0#{tests.column_size}s", test.name)}: zeus test #{@files[0]}:#{test.start_line}\n"
end
# fail like a good unix process should.
abort message
end
def test_name_to_s
@test_name.is_a?(Regexp)? "/#{@test_name.source}/" : @test_name
end
def user_specified_name?
!@test_name.nil?
end
def framework
@framework ||= begin
if defined?(MiniTest)
:minitest
elsif defined?(Test)
if Test::Unit::TestCase.respond_to?(:test_suites)
:testunit2
else
:testunit1
end
end
end
end
# Finds all test suites in this test file, with test methods included.
def suites
# Since we're not using `ruby -Itest -Ilib` to run the tests, we need to add this directory to the `LOAD_PATH`
$:.unshift "./test", "./lib"
if framework == :testunit1
Test::Unit::TestCase.class_eval {
@@test_suites = {}
def self.inherited(klass)
@@test_suites[klass] = true
end
def self.test_suites
@@test_suites.keys
end
def self.test_methods
public_instance_methods(true).grep(/^test/).map(&:to_s)
end
}
end
begin
# Fire up the Ruby files. Let's hope they actually have tests.
@files.each { |f| load f }
rescue LoadError => e
# Fail with a happier error message instead of spitting out a backtrace from this gem
abort "Failed loading test file:\n#{e.message}"
end
# Figure out what test framework we're using
case framework
when :minitest
suites = MiniTest::Unit::TestCase.test_suites
when :testunit1, :testunit2
suites = Test::Unit::TestCase.test_suites
else
not_supported
end
# Use some janky internal APIs to group test methods by test suite.
suites.inject({}) do |suites, suite_class|
# End up with a hash of suite class name to an array of test methods, so we can later find them and ignore empty test suites
suites[suite_class] = suite_class.test_methods if suite_class.test_methods.size > 0
suites
end
end
# Shoves tests together in our custom container and collection classes.
# Memoize it since it's unnecessary to do this more than one for a given file.
def tests
@tests ||= begin
# With each suite and array of tests,
# and with each test method present in this test file,
# shove a new test method into this collection.
suites.inject(TestCollection.new) do |collection, (suite_class, test_methods)|
test_methods.each do |test_method|
find_locations = (@files.size == 1 && @line)
collection << TestMethod.create(suite_class, test_method, find_locations)
end
collection
end
end
end
# Fail loudly if this isn't supported
def not_supported
abort "This test framework is not supported! Please open up an issue at https://github.com/qrush/m !"
end
end
end
end
Fix constant naming issue
# This is very largely based on @qrush's M, but there are many modifications.
# we need to load all dependencies up front, because bundler will
# remove us from the load path soon.
require "rubygems"
require "zeus/m/test_collection"
require "zeus/m/test_method"
# the Gemfile may specify a version of method_source, but we also want to require it here.
# To avoid possible "you've activated X; gemfile specifies Y" errors, we actually scan
# Gemfile.lock for a specific version, and require exactly that version if present.
gemfile_lock = ROOT_PATH + "/Gemfile.lock"
if File.exists?(gemfile_lock)
version = File.read(ROOT_PATH + "/Gemfile.lock").
scan(/\bmethod_source\s*\(([\d\.]+)\)/).flatten[0]
gem "method_source", version if version
end
require 'method_source'
module Zeus
#`m` stands for metal, which is a better test/unit test runner that can run
#tests by line number.
#
#[](http://travis-ci.org/qrush/m)
#
#
#
#<sub>[Rush at the Bristol Colston Hall May 1979](http://www.flickr.com/photos/8507625@N02/3468299995/)</sub>
### Install
#
### Usage
#
#Basically, I was sick of using the `-n` flag to grab one test to run. Instead, I
#prefer how RSpec's test runner allows tests to be run by line number.
#
#Given this file:
#
# $ cat -n test/example_test.rb
# 1 require 'test/unit'
# 2
# 3 class ExampleTest < Test::Unit::TestCase
# 4 def test_apple
# 5 assert_equal 1, 1
# 6 end
# 7
# 8 def test_banana
# 9 assert_equal 1, 1
# 10 end
# 11 end
#
#You can run a test by line number, using format `m TEST_FILE:LINE_NUMBER_OF_TEST`:
#
# $ m test/example_test.rb:4
# Run options: -n /test_apple/
#
# # Running tests:
#
# .
#
# Finished tests in 0.000525s, 1904.7619 tests/s, 1904.7619 assertions/s.
#
# 1 tests, 1 assertions, 0 failures, 0 errors, 0 skips
#
#Hit the wrong line number? No problem, `m` helps you out:
#
# $ m test/example_test.rb:2
# No tests found on line 2. Valid tests to run:
#
# test_apple: m test/examples/test_unit_example_test.rb:4
# test_banana: m test/examples/test_unit_example_test.rb:8
#
#Want to run the whole test? Just leave off the line number.
#
# $ m test/example_test.rb
# Run options:
#
# # Running tests:
#
# ..
#
# Finished tests in 0.001293s, 1546.7904 tests/s, 3093.5808 assertions/s.
#
# 1 tests, 2 assertions, 0 failures, 0 errors, 0 skips
#
#### Supports
#
#`m` works with a few Ruby test frameworks:
#
#* `Test::Unit`
#* `ActiveSupport::TestCase`
#* `MiniTest::Unit::TestCase`
#
### License
#
#This gem is MIT licensed, please see `LICENSE` for more information.
### M, your metal test runner
# Maybe this gem should have a longer name? Metal?
module M
M::VERSION = "1.2.1" unless defined?(M::VERSION)
# Accept arguments coming from bin/m and run tests.
def self.run(argv)
Runner.new(argv).run
end
### Runner is in charge of running your tests.
# Instead of slamming all of this junk in an `M` class, it's here instead.
class Runner
def initialize(argv)
@argv = argv
end
# There's two steps to running our tests:
# 1. Parsing the given input for the tests we need to find (or groups of tests)
# 2. Run those tests we found that match what you wanted
def run
parse
execute
end
private
def parse
# With no arguments,
if @argv.empty?
# Just shell out to `rake test`.
require 'rake'
Rake::Task['test'].invoke
exit
else
parse_options! @argv
# Parse out ARGV, it should be coming in in a format like `test/test_file.rb:9`
_, line = @argv.first.split(':')
@line ||= line.nil? ? nil : line.to_i
@files = []
@argv.each do |arg|
add_file(arg)
end
end
end
def add_file(arg)
file = arg.split(':').first
if Dir.exist?(file)
files = Dir.glob("#{file}/**/*test*.rb")
@files.concat(files)
else
files = Dir.glob(file)
files == [] and abort "Couldn't find test file '#{file}'!"
@files.concat(files)
end
end
def parse_options!(argv)
require 'optparse'
OptionParser.new do |opts|
opts.banner = 'Options:'
opts.version = M::VERSION
opts.on '-h', '--help', 'Display this help.' do
puts "Usage: m [OPTIONS] [FILES]\n\n", opts
exit
end
opts.on '--version', 'Display the version.' do
puts "m #{M::VERSION}"
exit
end
opts.on '-l', '--line LINE', Integer, 'Line number for file.' do |line|
@line = line
end
opts.on '-n', '--name NAME', String, 'Name or pattern for test methods to run.' do |name|
if name[0] == "/" && name[-1] == "/"
@test_name = Regexp.new(name[1..-2])
else
@test_name = name
end
end
opts.parse! argv
end
end
def execute
generate_tests_to_run
test_arguments = build_test_arguments
# directly run the tests from here and exit with the status of the tests passing or failing
case framework
when :minitest
exit MiniTest::Unit.runner.run test_arguments
when :testunit1, :testunit2
exit Test::Unit::AutoRunner.run(false, nil, test_arguments)
else
not_supported
end
end
def generate_tests_to_run
# Locate tests to run that may be inside of this line. There could be more than one!
all_tests = tests
if @line
@tests_to_run = all_tests.within(@line)
end
end
def build_test_arguments
if @line
abort_with_no_test_found_by_line_number if @tests_to_run.empty?
# assemble the regexp to run these tests,
test_names = @tests_to_run.map(&:name).join('|')
# set up the args needed for the runner
["-n", "/(#{test_names})/"]
elsif user_specified_name?
abort_with_no_test_found_by_name unless tests.contains?(@test_name)
test_names = test_name_to_s
["-n", test_names]
else
[]
end
end
def abort_with_no_test_found_by_line_number
abort_with_valid_tests_msg "No tests found on line #{@line}. "
end
def abort_with_no_test_found_by_name
abort_with_valid_tests_msg "No test name matches '#{test_name_to_s}'. "
end
def abort_with_valid_tests_msg message=""
message << "Valid tests to run:\n\n"
# For every test ordered by line number,
# spit out the test name and line number where it starts,
tests.by_line_number do |test|
message << "#{sprintf("%0#{tests.column_size}s", test.name)}: zeus test #{@files[0]}:#{test.start_line}\n"
end
# fail like a good unix process should.
abort message
end
def test_name_to_s
@test_name.is_a?(Regexp)? "/#{@test_name.source}/" : @test_name
end
def user_specified_name?
!@test_name.nil?
end
def framework
@framework ||= begin
if defined?(MiniTest)
:minitest
elsif defined?(Test)
if Test::Unit::TestCase.respond_to?(:test_suites)
:testunit2
else
:testunit1
end
end
end
end
# Finds all test suites in this test file, with test methods included.
def suites
# Since we're not using `ruby -Itest -Ilib` to run the tests, we need to add this directory to the `LOAD_PATH`
$:.unshift "./test", "./lib"
if framework == :testunit1
Test::Unit::TestCase.class_eval {
@@test_suites = {}
def self.inherited(klass)
@@test_suites[klass] = true
end
def self.test_suites
@@test_suites.keys
end
def self.test_methods
public_instance_methods(true).grep(/^test/).map(&:to_s)
end
}
end
begin
# Fire up the Ruby files. Let's hope they actually have tests.
@files.each { |f| load f }
rescue LoadError => e
# Fail with a happier error message instead of spitting out a backtrace from this gem
abort "Failed loading test file:\n#{e.message}"
end
# Figure out what test framework we're using
case framework
when :minitest
suites = MiniTest::Unit::TestCase.test_suites
when :testunit1, :testunit2
suites = Test::Unit::TestCase.test_suites
else
not_supported
end
# Use some janky internal APIs to group test methods by test suite.
suites.inject({}) do |suites, suite_class|
# End up with a hash of suite class name to an array of test methods, so we can later find them and ignore empty test suites
suites[suite_class] = suite_class.test_methods if suite_class.test_methods.size > 0
suites
end
end
# Shoves tests together in our custom container and collection classes.
# Memoize it since it's unnecessary to do this more than one for a given file.
def tests
@tests ||= begin
# With each suite and array of tests,
# and with each test method present in this test file,
# shove a new test method into this collection.
suites.inject(TestCollection.new) do |collection, (suite_class, test_methods)|
test_methods.each do |test_method|
find_locations = (@files.size == 1 && @line)
collection << TestMethod.create(suite_class, test_method, find_locations)
end
collection
end
end
end
# Fail loudly if this isn't supported
def not_supported
abort "This test framework is not supported! Please open up an issue at https://github.com/qrush/m !"
end
end
end
end
|
Adding a test file for the faux text name library.
Signed-off-by: Kurtis Rainbolt-Greene <49ac34a1129a4d58178d6ca0b3dffac6aa6249a7@gmail.com>
require 'minitest/autorun'
require 'faux'
class TestFauxTextName < MiniTest::Unit::TestCase
def setup
end
def test_that_first_name_is_a_string
assert Faux(:first_name).is_a? String
end
def test_that_first_name_isnt_empty
refute Faux(:first_name).empty?
end
def test_that_first_name_has_valid_characters
assert Faux(:first_name).match /[a-zA-Z]+/i
refute Faux(:first_name).match /[^a-zA-Z]+/i
end
def test_that_middle_name_is_a_string
assert Faux(:middle_name).is_a? String
end
def test_that_middle_name_isnt_empty
refute Faux(:middle_name).empty?
end
def test_that_middle_name_has_valid_characters
assert Faux(:middle_name).match /[a-zA-Z\.]+/i
refute Faux(:middle_name).match /[^a-zA-Z\.]+/i
end
def test_that_middle_name_takes_full_option
name = Faux(:middle_name, :full_middle_name => true)
assert name.size > 2
refute name.match /\./i
end
def test_that_last_name_is_a_string
assert Faux(:last_name).is_a? String
end
def test_that_last_name_isnt_empty
refute Faux(:last_name).empty?
end
def test_that_last_name_has_valid_characters
assert Faux(:last_name).match /[a-zA-Z]+/i
refute Faux(:last_name).match /[^a-zA-Z]+/i
end
def test_that_full_name_is_a_string
assert Faux(:full_name).is_a? String
end
def test_that_full_name_isnt_empty
refute Faux(:full_name).empty?
end
def test_that_full_name_takes_first_name_opt
name = Faux(:full_name, :first_name => "Fred")
assert_equal "Fred", name.split(' ').first
end
def test_that_full_name_takes_last_name_opt
name = Faux(:full_name, :last_name => "Wilson")
assert_equal "Wilson", name.split(' ').last
end
def test_that_full_name_takes_middle_name_opt
name = Faux(:full_name, :middle_name => "Eve")
assert_equal "Eve", name.split(' ')[1]
end
def test_that_full_name_takes_full_middle_name_opt
name = Faux(:full_name, :full_middle_name => true)
assert name.size > 2
refute name.match /\./i
end
end
|
require_relative '../../app.rb'
require_relative '../helpers/test_helper.rb'
class TestInterfaceTest < MiniTest::Test
include Rack::Test::Methods
include Capybara::DSL
def test_creates_new_user_called_test_user
visit '/test/reset'
assert User.exists?(user_name: "test_user") , "new test user not created at test reset"
end
def test_if_test_user_exists_delete_all_existing_followers
visit '/test/reset'
test_user = User.find_by_user_name("test_user")
assert !Follow.exists?(user_id: test_user.id), "folowers of test user were not deleted"
end
def test_if_test_user_exists_delete_all_existing_followers
visit '/test/reset'
test_user = User.find_by_user_name("test_user")
assert !Tweet.exists?(user_id: test_user.id), "tweets of test user were not deleted"
end
def test_reset_redirects_to_test_user_timeline
visit '/test/reset'
assert page.has_content?('test_user'), "test reset failed"
end
end
test interface tests complete
require_relative '../../app.rb'
require_relative '../helpers/test_helper.rb'
class TestInterfaceTest < MiniTest::Test
include Rack::Test::Methods
include Capybara::DSL
def setup
User.destroy_all
Tweet.destroy_all
Follow.destroy_all
end
def test_creates_new_user_called_test_user
visit '/test/reset'
assert User.exists?(user_name: "test_user") , "new test user not created at test reset"
end
def test_if_test_user_exists_delete_all_existing_followers
visit '/test/reset'
test_user = User.find_by_user_name("test_user")
assert !Follow.exists?(user_id: test_user.id), "folowers of test user were not deleted"
end
def test_if_test_user_exists_delete_all_existing_tweets
visit '/test/reset'
test_user = User.find_by_user_name("test_user")
assert !Tweet.exists?(user_id: test_user.id), "tweets of test user were not deleted"
end
def test_reset_redirects_to_test_user_timeline
visit '/test/reset'
assert page.has_content?('test_user'), "test reset redirection failed"
end
def test_test_seed_creates_n_fake_users
initial_users_count = User.all.size
visit '/test/seed/12'
assert_equal(initial_users_count+12, User.all.size , msg = "seed doesn't create n fake users")
end
def test_generate_n_tweets
test_params = {user_name: "test_user",
name:"test user",
password: "test123",
email: "testuser@test.com"}
User.create(test_params)
test_user = User.find_by_user_name("test_user")
initial_tweets_count = test_user.tweets.count
visit '/test/tweets/12'
assert_equal(initial_tweets_count+12, test_user.tweets.count , msg = "doesn't create n tweets for test user")
end
def test_test_n_followers_for_test_user
test_params = {user_name: "test_user",
name:"test user",
password: "test123",
email: "testuser@test.com"}
User.create(test_params)
10.times { Fabricate(:user) }
test_user = User.find_by_user_name("test_user")
initial_followers_count = test_user.followers.count
visit '/test/follow/7'
assert_equal(initial_followers_count+7, test_user.followers.count , msg = "doesn't make n users follow test user")
end
end |
require 'test_helper'
class TestWdpaAttribute < ActiveSupport::TestCase
test '.standardise converts boolean-like values to boolean' do
assert_equal false, Wdpa::Attribute.standardise('0', as: :boolean),
"Expected '0' to be converted to false"
assert_equal true, Wdpa::Attribute.standardise('1', as: :boolean),
"Expected '1' to be converted to true"
assert_equal false, Wdpa::Attribute.standardise('trust', as: :boolean),
"Expected 'trust' to be converted to false"
end
test '.standardise converts integer-like values to integers' do
assert_equal 1, Wdpa::Attribute.standardise('1', as: :integer),
"Expected '1' to be converted to 1"
assert_equal 1, Wdpa::Attribute.standardise('abc', as: :integer),
"Expected 'abc' to be converted to 1"
end
test '.standardise converts string-like values to strings' do
assert_equal 'abc', Wdpa::Attribute.standardise('abc', as: :string),
"Expected 'abc' to remain as 'abc'"
assert_equal '1234', Wdpa::Attribute.standardise(1234, as: :string),
"Expected 1234 to be converted to '1234'"
end
test '.standardise converts float-like values to floats' do
assert_equal 1.43, Wdpa::Attribute.standardise('1.43', as: :float),
"Expected '1.43' to be converted to 1.43"
assert_equal 1.0, Wdpa::Attribute.standardise(1, as: :float),
"Expected 1 to be converted to 1.0"
assert_equal 0.0, Wdpa::Attribute.standardise('abc', as: :float),
"Expected 'abc' to be converted to 0.0"
end
test ".standardise raises an error if the specified converter doesn't exist" do
assert_raises NotImplementedError, "No conversion exists for type 'blue'" do
Wdpa::Attribute.standardise('carebear', as: :blue)
end
end
end
Corrects expected value for integer conversion test
require 'test_helper'
class TestWdpaAttribute < ActiveSupport::TestCase
test '.standardise converts boolean-like values to boolean' do
assert_equal false, Wdpa::Attribute.standardise('0', as: :boolean),
"Expected '0' to be converted to false"
assert_equal true, Wdpa::Attribute.standardise('1', as: :boolean),
"Expected '1' to be converted to true"
assert_equal false, Wdpa::Attribute.standardise('trust', as: :boolean),
"Expected 'trust' to be converted to false"
end
test '.standardise converts integer-like values to integers' do
assert_equal 1, Wdpa::Attribute.standardise('1', as: :integer),
"Expected '1' to be converted to 1"
assert_equal 0, Wdpa::Attribute.standardise('abc', as: :integer),
"Expected 'abc' to be converted to 1"
end
test '.standardise converts string-like values to strings' do
assert_equal 'abc', Wdpa::Attribute.standardise('abc', as: :string),
"Expected 'abc' to remain as 'abc'"
assert_equal '1234', Wdpa::Attribute.standardise(1234, as: :string),
"Expected 1234 to be converted to '1234'"
end
test '.standardise converts float-like values to floats' do
assert_equal 1.43, Wdpa::Attribute.standardise('1.43', as: :float),
"Expected '1.43' to be converted to 1.43"
assert_equal 1.0, Wdpa::Attribute.standardise(1, as: :float),
"Expected 1 to be converted to 1.0"
assert_equal 0.0, Wdpa::Attribute.standardise('abc', as: :float),
"Expected 'abc' to be converted to 0.0"
end
test ".standardise raises an error if the specified converter doesn't exist" do
assert_raises NotImplementedError, "No conversion exists for type 'blue'" do
Wdpa::Attribute.standardise('carebear', as: :blue)
end
end
end
|
Adding some basic tests
require 'minitest/autorun'
require 'helper'
class TestAuthNActivation < MiniTest::Unit::TestCase
def test_that_things_work
assert true
end
def test_that_AuthN_is_defined
assert(defined?(AuthN::Activation))
end
end
|
require_relative '../../test_helper'
describe "RedStack::Session" do
before do
@os = RedStack::Session.new(host: 'http://devstack:5000', api_version: 'v2.0', stub_openstack: true)
end
it "authenticates against the backend" do
@os.authenticate username: 'validuser', password: '123qwe'
@os.authenticated?.must_equal true
@os.access.wont_be_nil
end
it "handles invalid usernames" do
@os.authenticate username: 'invaliduser', password: '123qwe'
@os.authenticated?.must_equal false
@os.access.must_be_nil
end
end
Check for class of Session#access
require_relative '../../test_helper'
describe "RedStack::Session" do
before do
@os = RedStack::Session.new(host: 'http://devstack:5000', api_version: 'v2.0', stub_openstack: true)
end
it "authenticates against the backend" do
@os.authenticate username: 'validuser', password: '123qwe'
@os.authenticated?.must_equal true
@os.access.wont_be_nil
@os.access.must_be_instance_of RedStack::Data::Access
end
it "handles invalid usernames" do
@os.authenticate username: 'invaliduser', password: '123qwe'
@os.authenticated?.must_equal false
@os.access.must_be_nil
end
end |
# frozen_string_literal: true
require("test_helper")
class ExternalSiteTest < UnitTestCase
def test_create_valid
site = ExternalSite.create!(
name: "GenBank",
project: Project.first
)
assert_not_nil(site)
assert_empty(site.errors)
end
def test_create_missing_attributes
site = ExternalSite.create
assert_not_empty(site.errors[:name])
assert_not_empty(site.errors[:project])
end
def test_create_name_too_long
site = ExternalSite.create(name: "x" * 1000)
assert_not_empty(site.errors[:name])
end
def test_user_external_sites
marys_sites = ExternalSite.all.sort_by(&:id)
assert_obj_list_equal([], rolf.external_sites)
assert_obj_list_equal([], dick.external_sites)
assert_obj_list_equal(marys_sites, mary.external_sites.sort_by(&:id))
end
def test_member
site = external_sites(:mycoportal)
assert_false(site.member?(rolf))
assert_false(site.member?(dick))
assert_true(site.member?(mary))
end
def test_uniqueness
site1 = ExternalSite.first
site2 = ExternalSite.create(
name: site1.name,
project: site1.project
)
assert_not_empty(site2.errors)
site3 = ExternalSite.create(
name: site1.name + " two",
project: site1.project
)
assert_empty(site3.errors)
end
end
Add test for case_sensitive uniqueness
# frozen_string_literal: true
require("test_helper")
class ExternalSiteTest < UnitTestCase
def test_create_valid
site = ExternalSite.create!(
name: "GenBank",
project: Project.first
)
assert_not_nil(site)
assert_empty(site.errors)
assert_raises("Name has already been taken") do
ExternalSite.create!(
name: "genbank",
project: Project.first
)
end
end
def test_create_missing_attributes
site = ExternalSite.create
assert_not_empty(site.errors[:name])
assert_not_empty(site.errors[:project])
end
def test_create_name_too_long
site = ExternalSite.create(name: "x" * 1000)
assert_not_empty(site.errors[:name])
end
def test_user_external_sites
marys_sites = ExternalSite.all.sort_by(&:id)
assert_obj_list_equal([], rolf.external_sites)
assert_obj_list_equal([], dick.external_sites)
assert_obj_list_equal(marys_sites, mary.external_sites.sort_by(&:id))
end
def test_member
site = external_sites(:mycoportal)
assert_false(site.member?(rolf))
assert_false(site.member?(dick))
assert_true(site.member?(mary))
end
def test_uniqueness
site1 = ExternalSite.first
site2 = ExternalSite.create(
name: site1.name,
project: site1.project
)
assert_not_empty(site2.errors)
site3 = ExternalSite.create(
name: site1.name + " two",
project: site1.project
)
assert_empty(site3.errors)
end
end
|
require "test_helper"
class UserPolicyTest < ActiveSupport::TestCase
test "#index? allows only admins" do
assert(policy(users(:system_admin)).index?)
assert(policy(users(:one_admin)).index?)
refute(policy(users(:two)).index?)
refute(policy(Guest.new).index?)
end
test "#show? allows only admins" do
record = users(:one)
assert(policy(users(:system_admin), record).show?)
assert(policy(users(:one_admin), record).show?)
refute(policy(users(:two), record).show?)
refute(policy(Guest.new, record).show?)
end
test "#edit? allows only admins" do
record = users(:one)
assert(policy(users(:system_admin), record).edit?)
assert(policy(users(:one_admin), record).edit?)
refute(policy(users(:two), record).edit?)
refute(policy(Guest.new, record).edit?)
end
test "#update? allows only admins" do
record = users(:one)
assert(policy(users(:system_admin), record).update?)
assert(policy(users(:one_admin), record).update?)
refute(policy(users(:two), record).update?)
refute(policy(Guest.new, record).update?)
end
test "#destroy allows only system admins" do
record = users(:one)
assert(policy(users(:system_admin), record).destroy?)
refute(policy(users(:one_admin), record).destroy?)
refute(policy(users(:two), record).destroy?)
refute(policy(Guest.new, record).destroy?)
end
test "Scope shows all for admins, none for others" do
assert_equal(User.all.to_a, policy_scoped_records(users(:system_admin)))
assert_equal(User.all.to_a, policy_scoped_records(users(:one_admin)))
assert_empty(policy_scoped_records(users(:two)))
assert_empty(policy_scoped_records(Guest.new))
end
test "#permitted_attributes allows email for system admins only" do
user = users(:one)
params = params_to_grant("admin", user)
refute_nil(permit(:system_admin, user, params)[:email])
assert_nil(permit(:one_admin, user, params)[:email])
end
test "#permitted_attributes prevents granting admin role unless sys admin" do
user = users(:one)
params = params_to_grant("admin", user)
assert_empty(
permit(:one_admin, user, params)[:catalog_permissions_attributes]
)
assert_equal(
params[:user][:catalog_permissions_attributes],
permit(:system_admin, user, params)[:catalog_permissions_attributes]
)
end
test "#permitted_attributes prevents catalog admin from granting role in non-administered catalog" do
user = users(:two)
params = params_to_grant("editor", user)
assert_empty(
permit(:one_admin, user, params)[:catalog_permissions_attributes]
)
end
private
def policy(user, record=nil)
UserPolicy.new(user, record)
end
def policy_scoped_records(user)
UserPolicy::Scope.new(user, User).resolve.to_a
end
def permit(user_fixture, record, params)
policy(users(user_fixture), record).permit(params[:user])
end
def params_to_grant(role, user)
perm = user.catalog_permissions.first
ActionController::Parameters.new(
:user => {
:email => "changing-email@example.com",
:catalog_permissions_attributes => [
{
:id => perm.id,
:catalog_id => perm.catalog_id,
:role => role
}
]
})
end
end
Update mock params to correct nest attrs
require "test_helper"
class UserPolicyTest < ActiveSupport::TestCase
test "#index? allows only admins" do
assert(policy(users(:system_admin)).index?)
assert(policy(users(:one_admin)).index?)
refute(policy(users(:two)).index?)
refute(policy(Guest.new).index?)
end
test "#show? allows only admins" do
record = users(:one)
assert(policy(users(:system_admin), record).show?)
assert(policy(users(:one_admin), record).show?)
refute(policy(users(:two), record).show?)
refute(policy(Guest.new, record).show?)
end
test "#edit? allows only admins" do
record = users(:one)
assert(policy(users(:system_admin), record).edit?)
assert(policy(users(:one_admin), record).edit?)
refute(policy(users(:two), record).edit?)
refute(policy(Guest.new, record).edit?)
end
test "#update? allows only admins" do
record = users(:one)
assert(policy(users(:system_admin), record).update?)
assert(policy(users(:one_admin), record).update?)
refute(policy(users(:two), record).update?)
refute(policy(Guest.new, record).update?)
end
test "#destroy allows only system admins" do
record = users(:one)
assert(policy(users(:system_admin), record).destroy?)
refute(policy(users(:one_admin), record).destroy?)
refute(policy(users(:two), record).destroy?)
refute(policy(Guest.new, record).destroy?)
end
test "Scope shows all for admins, none for others" do
assert_equal(User.all.to_a, policy_scoped_records(users(:system_admin)))
assert_equal(User.all.to_a, policy_scoped_records(users(:one_admin)))
assert_empty(policy_scoped_records(users(:two)))
assert_empty(policy_scoped_records(Guest.new))
end
test "#permitted_attributes allows email for system admins only" do
user = users(:one)
params = params_to_grant("admin", user)
refute_nil(permit(:system_admin, user, params)[:email])
assert_nil(permit(:one_admin, user, params)[:email])
end
test "#permitted_attributes prevents granting admin role unless sys admin" do
user = users(:one)
params = params_to_grant("admin", user)
assert_empty(
permit(:one_admin, user, params)[:catalog_permissions_attributes]
)
assert_equal(
params[:user][:catalog_permissions_attributes],
permit(:system_admin, user, params)[:catalog_permissions_attributes]
)
end
test "#permitted_attributes prevents catalog admin from granting role in non-administered catalog" do
user = users(:two)
params = params_to_grant("editor", user)
assert_empty(
permit(:one_admin, user, params)[:catalog_permissions_attributes]
)
end
private
def policy(user, record=nil)
UserPolicy.new(user, record)
end
def policy_scoped_records(user)
UserPolicy::Scope.new(user, User).resolve.to_a
end
def permit(user_fixture, record, params)
policy(users(user_fixture), record).permit(params[:user])
end
def params_to_grant(role, user)
perm = user.catalog_permissions.first
ActionController::Parameters.new(
:user => {
:email => "changing-email@example.com",
:catalog_permissions_attributes => {
"0" => {
:id => perm.id,
:catalog_id => perm.catalog_id,
:role => role
}
}
})
end
end
|
directory node[:gopath]
execute "download & unarchive" do
url = 'https://dl.google.com/go/go1.14.linux-amd64.tar.gz'
dir = File.dirname(node[:goroot])
command "curl -L #{url} | tar -xz -C #{dir} && mv #{dir}/go #{node[:goroot]}"
not_if "test -f #{node[:goroot]}/bin/go"
end
execute "for building go" do
go14 = "#{node[:home]}/go1.4"
url = 'http://golang.org/dl/go1.4.linux-amd64.tar.gz'
dir = File.dirname(go14)
command "curl -L #{url} | tar -xz -C #{dir} && mv #{dir}/go #{go14}"
not_if "test -f #{go14}/bin/go"
end
package 'peco'
[
'github.com/x-motemen/ghq',
'github.com/golang/protobuf/proto',
'github.com/golang/protobuf/protoc-gen-go',
].each do |pkg|
execute "go get #{pkg}" do
command "GOROOT=#{node[:goroot]} GOPATH=#{node[:gopath]} #{node[:goroot]}/bin/go get #{pkg}"
not_if "test -d #{node[:gopath]}/src/#{pkg}"
end
end
Update 003_golang.rb
directory node[:gopath]
execute "download & unarchive" do
url = 'https://dl.google.com/go/go1.15.linux-amd64.tar.gz'
dir = File.dirname(node[:goroot])
command "curl -L #{url} | tar -xz -C #{dir} && mv #{dir}/go #{node[:goroot]}"
not_if "test -f #{node[:goroot]}/bin/go"
end
execute "for building go" do
go14 = "#{node[:home]}/go1.4"
url = 'http://golang.org/dl/go1.4.linux-amd64.tar.gz'
dir = File.dirname(go14)
command "curl -L #{url} | tar -xz -C #{dir} && mv #{dir}/go #{go14}"
not_if "test -f #{go14}/bin/go"
end
package 'peco'
[
'github.com/x-motemen/ghq',
'github.com/golang/protobuf/proto',
'github.com/golang/protobuf/protoc-gen-go',
].each do |pkg|
execute "go get #{pkg}" do
command "GOROOT=#{node[:goroot]} GOPATH=#{node[:gopath]} #{node[:goroot]}/bin/go get #{pkg}"
not_if "test -d #{node[:gopath]}/src/#{pkg}"
end
end
|
$:<< "./lib" # uncomment this to run against a Git clone instead of an installed gem
require "paytrace"
require "paytrace/debug"
# change this as needed to reflect the username, password, and test host you're testing against
PayTrace::Debug.configure_test("demo123", "demo123", "stage.paytrace.com")
PayTrace::Debug.trace do
params =
# this must be a valid transaction ID for the credentials supplied
transaction_id: 1131,
amount: 19.87
}
PayTrace::Transaction::adjust_amount(params)
end
Fix integration script.
$:<< "./lib" # uncomment this to run against a Git clone instead of an installed gem
require "paytrace"
require "paytrace/debug"
# change this as needed to reflect the username, password, and test host you're testing against
PayTrace::Debug.configure_test("demo123", "demo123", "stage.paytrace.com")
PayTrace::Debug.trace do
params = {
# this must be a valid transaction ID for the credentials supplied
transaction_id: 1131,
amount: 4.01
}
PayTrace::Transaction::adjust_amount(params)
end |
module Acapi
module Railties
class LocalAmqpPublisher < Rails::Railtie
initializer "local_amqp_publisher_railtie.configure_rails_initialization" do |app|
# TODO: Configure local event publishing client
publish_enabled = app.config.acapi.publish_amqp_events
if publish_enabled.blank?
warn_settings_not_specified
end
if publish_enabled
boot_local_publisher
else
disable_local_publisher
end
end
def self.warn_settings_not_specified
Rails.logger.info "No setting specified for 'acapi.publish_amqp_events' - disabling publishing of events to local AMQP instance'"
end
def self.boot_local_publisher
::Acapi::LocalAmqpPublisher.boot!
end
def self.disable_local_publisher
::Acapi::LocalAmqpPublisher.disable!
end
end
end
end
Updating configuration lookup.
module Acapi
module Railties
class LocalAmqpPublisher < Rails::Railtie
initializer "local_amqp_publisher_railtie.configure_rails_initialization" do |app|
# TODO: Configure local event publishing client
publish_enabled = lookup_publisher_configuration(app)
if publish_enabled.blank?
warn_settings_not_specified
end
if publish_enabled
boot_local_publisher
else
disable_local_publisher
end
end
def self.lookup_publisher_configuration(app)
r_config = app.config
return nil unless r_config.respond_to?(:acapi)
acapi_config = r_config.acapi
return nil unless acapi_config.respond_to?(:publish_amqp_events)
acapi_config.publish_amqp_events
end
def self.warn_settings_not_specified
Rails.logger.info "No setting specified for 'acapi.publish_amqp_events' - disabling publishing of events to local AMQP instance'"
end
def self.boot_local_publisher
::Acapi::LocalAmqpPublisher.boot!
end
def self.disable_local_publisher
::Acapi::LocalAmqpPublisher.disable!
end
end
end
end
|
class Acme::Client::Resources::Authorization
HTTP01 = Acme::Client::Resources::Challenges::HTTP01
DNS01 = Acme::Client::Resources::Challenges::DNS01
TLSSNI01 = Acme::Client::Resources::Challenges::TLSSNI01
attr_reader :domain, :status, :expires, :http01, :dns01, :tls_sni01
def initialize(client, response)
@client = client
assign_challenges(response.body['challenges'])
assign_attributes(response.body)
end
private
def assign_challenges(challenges)
challenges.each do |attributes|
case attributes.fetch('type')
when 'http-01' then @http01 = HTTP01.new(@client, attributes)
when 'dns-01' then @dns01 = DNS01.new(@client, attributes)
when 'tls-sni-01' then @tls_sni01 = TLSSNI01.new(@client, attributes)
# else no-op
end
end
end
def assign_attributes(body)
@expires = Time.parse(body['expires']) if body.has_key? 'expires'
@domain = body['identifier']['value']
@status = body['status']
end
end
Use ISO8601 format for time parsing
class Acme::Client::Resources::Authorization
HTTP01 = Acme::Client::Resources::Challenges::HTTP01
DNS01 = Acme::Client::Resources::Challenges::DNS01
TLSSNI01 = Acme::Client::Resources::Challenges::TLSSNI01
attr_reader :domain, :status, :expires, :http01, :dns01, :tls_sni01
def initialize(client, response)
@client = client
assign_challenges(response.body['challenges'])
assign_attributes(response.body)
end
private
def assign_challenges(challenges)
challenges.each do |attributes|
case attributes.fetch('type')
when 'http-01' then @http01 = HTTP01.new(@client, attributes)
when 'dns-01' then @dns01 = DNS01.new(@client, attributes)
when 'tls-sni-01' then @tls_sni01 = TLSSNI01.new(@client, attributes)
# else no-op
end
end
end
def assign_attributes(body)
@expires = Time.iso8601(body['expires']) if body.has_key? 'expires'
@domain = body['identifier']['value']
@status = body['status']
end
end
|
require 'open3'
require 'acmesmith/post_issueing_hooks/base'
module Acmesmith
module PostIssueingHooks
class Shell < Base
class HostedZoneNotFound < StandardError; end
class AmbiguousHostedZones < StandardError; end
def initialize(common_name:, command:, ignore_failure:false)
@common_name = common_name
@command = command
@ignore_failure = ignore_failure
end
def execute
puts "=> Executing Post Issueing Hook for #{@common_name} in #{self.class.name}"
puts "=> ENV: COMMON_NAME=#{@common_name}"
puts "=> Running: #{@command}"
status = system({"COMMON_NAME" => @common_name}, "#{@command};")
unless status
if @ignore_failure
$stderr.puts "WARNING, command failed"
else
raise "FATAL, command failed"
end
end
end
end
end
end
post_issueing_hooks/shell: Don't force shell use
require 'open3'
require 'acmesmith/post_issueing_hooks/base'
module Acmesmith
module PostIssueingHooks
class Shell < Base
class HostedZoneNotFound < StandardError; end
class AmbiguousHostedZones < StandardError; end
def initialize(common_name:, command:, ignore_failure:false)
@common_name = common_name
@command = command
@ignore_failure = ignore_failure
end
def execute
puts "=> Executing Post Issueing Hook for #{@common_name} in #{self.class.name}"
puts "=> ENV: COMMON_NAME=#{@common_name}"
puts "=> Running: #{@command}"
status = system({"COMMON_NAME" => common_name}, @command)
unless status
if @ignore_failure
$stderr.puts "WARNING, command failed"
else
raise "FATAL, command failed"
end
end
end
end
end
end
|
require 'action_dispatch/http/request'
require 'active_support/core_ext/uri'
require 'active_support/core_ext/array/extract_options'
require 'rack/utils'
require 'action_controller/metal/exceptions'
module ActionDispatch
module Routing
class Redirect # :nodoc:
attr_reader :status, :block
def initialize(status, block)
@status = status
@block = block
end
def call(env)
req = Request.new(env)
# If any of the path parameters has an invalid encoding then
# raise since it's likely to trigger errors further on.
req.symbolized_path_parameters.each do |key, value|
unless value.valid_encoding?
raise ActionController::BadRequest, "Invalid parameter: #{key} => #{value}"
end
end
uri = URI.parse(path(req.symbolized_path_parameters, req))
unless uri.host
if relative_path?(uri.path)
uri.path = "#{req.script_name}/#{uri.path}"
elsif uri.path.empty?
uri.path = req.script_name.empty? ? "/" : req.script_name
end
end
uri.scheme ||= req.scheme
uri.host ||= req.host
uri.port ||= req.port unless req.standard_port?
body = %(<html><body>You are being <a href="#{ERB::Util.h(uri.to_s)}">redirected</a>.</body></html>)
headers = {
'Location' => uri.to_s,
'Content-Type' => 'text/html',
'Content-Length' => body.length.to_s
}
[ status, headers, [body] ]
end
def path(params, request)
block.call params, request
end
def inspect
"redirect(#{status})"
end
private
def relative_path?(path)
path && !path.empty? && path[0] != '/'
end
def escape(params)
Hash[params.map{ |k,v| [k, Rack::Utils.escape(v)] }]
end
def escape_fragment(params)
Hash[params.map{ |k,v| [k, Journey::Router::Utils.escape_fragment(v)] }]
end
def escape_path(params)
Hash[params.map{ |k,v| [k, Journey::Router::Utils.escape_path(v)] }]
end
end
class PathRedirect < Redirect
URL_PARTS = /\A([^?]+)?(\?[^#]+)?(#.+)?\z/
def path(params, request)
if block.match(URL_PARTS)
path = interpolation_required?($1, params) ? $1 % escape_path(params) : $1
query = interpolation_required?($2, params) ? $2 % escape(params) : $2
fragment = interpolation_required?($3, params) ? $3 % escape_fragment(params) : $3
"#{path}#{query}#{fragment}"
else
interpolation_required?(block, params) ? block % escape(params) : block
end
end
def inspect
"redirect(#{status}, #{block})"
end
private
def interpolation_required?(string, params)
!params.empty? && string && string.match(/%\{\w*\}/)
end
end
class OptionRedirect < Redirect # :nodoc:
alias :options :block
def path(params, request)
url_options = {
:protocol => request.protocol,
:host => request.host,
:port => request.optional_port,
:path => request.path,
:params => request.query_parameters
}.merge! options
if !params.empty? && url_options[:path].match(/%\{\w*\}/)
url_options[:path] = (url_options[:path] % escape_path(params))
end
unless options[:host] || options[:domain]
if relative_path?(url_options[:path])
url_options[:path] = "/#{url_options[:path]}"
url_options[:script_name] = request.script_name
elsif url_options[:path].empty?
url_options[:path] = request.script_name.empty? ? "/" : ""
url_options[:script_name] = request.script_name
end
end
ActionDispatch::Http::URL.url_for url_options
end
def inspect
"redirect(#{status}, #{options.map{ |k,v| "#{k}: #{v}" }.join(', ')})"
end
end
module Redirection
# Redirect any path to another path:
#
# get "/stories" => redirect("/posts")
#
# You can also use interpolation in the supplied redirect argument:
#
# get 'docs/:article', to: redirect('/wiki/%{article}')
#
# Note that if you return a path without a leading slash then the url is prefixed with the
# current SCRIPT_NAME environment variable. This is typically '/' but may be different in
# a mounted engine or where the application is deployed to a subdirectory of a website.
#
# Alternatively you can use one of the other syntaxes:
#
# The block version of redirect allows for the easy encapsulation of any logic associated with
# the redirect in question. Either the params and request are supplied as arguments, or just
# params, depending of how many arguments your block accepts. A string is required as a
# return value.
#
# get 'jokes/:number', to: redirect { |params, request|
# path = (params[:number].to_i.even? ? "wheres-the-beef" : "i-love-lamp")
# "http://#{request.host_with_port}/#{path}"
# }
#
# Note that the +do end+ syntax for the redirect block wouldn't work, as Ruby would pass
# the block to +get+ instead of +redirect+. Use <tt>{ ... }</tt> instead.
#
# The options version of redirect allows you to supply only the parts of the url which need
# to change, it also supports interpolation of the path similar to the first example.
#
# get 'stores/:name', to: redirect(subdomain: 'stores', path: '/%{name}')
# get 'stores/:name(*all)', to: redirect(subdomain: 'stores', path: '/%{name}%{all}')
#
# Finally, an object which responds to call can be supplied to redirect, allowing you to reuse
# common redirect routes. The call method must accept two arguments, params and request, and return
# a string.
#
# get 'accounts/:name' => redirect(SubdomainRedirector.new('api'))
#
def redirect(*args, &block)
options = args.extract_options!
status = options.delete(:status) || 301
path = args.shift
return OptionRedirect.new(status, options) if options.any?
return PathRedirect.new(status, path) if String === path
block = path if path.respond_to? :call
raise ArgumentError, "redirection argument not supported" unless block
Redirect.new status, block
end
end
end
end
My fix so I can redirect 404s to another page
require 'action_dispatch/http/request'
require 'active_support/core_ext/uri'
require 'active_support/core_ext/array/extract_options'
require 'rack/utils'
require 'action_controller/metal/exceptions'
module ActionDispatch
module Routing
class Redirect # :nodoc:
attr_reader :status, :block
def initialize(status, block)
@status = status
@block = block
end
def call(env)
req = Request.new(env)
# If any of the path parameters has an invalid encoding then
# raise since it's likely to trigger errors further on.
req.symbolized_path_parameters.each do |key, value|
next unless value.respond_to?(:valid_encoding?)
unless value.valid_encoding?
raise ActionController::BadRequest, "Invalid parameter: #{key} => #{value}"
end
end
uri = URI.parse(path(req.symbolized_path_parameters, req))
unless uri.host
if relative_path?(uri.path)
uri.path = "#{req.script_name}/#{uri.path}"
elsif uri.path.empty?
uri.path = req.script_name.empty? ? "/" : req.script_name
end
end
uri.scheme ||= req.scheme
uri.host ||= req.host
uri.port ||= req.port unless req.standard_port?
body = %(<html><body>You are being <a href="#{ERB::Util.h(uri.to_s)}">redirected</a>.</body></html>)
headers = {
'Location' => uri.to_s,
'Content-Type' => 'text/html',
'Content-Length' => body.length.to_s
}
[ status, headers, [body] ]
end
def path(params, request)
block.call params, request
end
def inspect
"redirect(#{status})"
end
private
def relative_path?(path)
path && !path.empty? && path[0] != '/'
end
def escape(params)
Hash[params.map{ |k,v| [k, Rack::Utils.escape(v)] }]
end
def escape_fragment(params)
Hash[params.map{ |k,v| [k, Journey::Router::Utils.escape_fragment(v)] }]
end
def escape_path(params)
Hash[params.map{ |k,v| [k, Journey::Router::Utils.escape_path(v)] }]
end
end
class PathRedirect < Redirect
URL_PARTS = /\A([^?]+)?(\?[^#]+)?(#.+)?\z/
def path(params, request)
if block.match(URL_PARTS)
path = interpolation_required?($1, params) ? $1 % escape_path(params) : $1
query = interpolation_required?($2, params) ? $2 % escape(params) : $2
fragment = interpolation_required?($3, params) ? $3 % escape_fragment(params) : $3
"#{path}#{query}#{fragment}"
else
interpolation_required?(block, params) ? block % escape(params) : block
end
end
def inspect
"redirect(#{status}, #{block})"
end
private
def interpolation_required?(string, params)
!params.empty? && string && string.match(/%\{\w*\}/)
end
end
class OptionRedirect < Redirect # :nodoc:
alias :options :block
def path(params, request)
url_options = {
:protocol => request.protocol,
:host => request.host,
:port => request.optional_port,
:path => request.path,
:params => request.query_parameters
}.merge! options
if !params.empty? && url_options[:path].match(/%\{\w*\}/)
url_options[:path] = (url_options[:path] % escape_path(params))
end
unless options[:host] || options[:domain]
if relative_path?(url_options[:path])
url_options[:path] = "/#{url_options[:path]}"
url_options[:script_name] = request.script_name
elsif url_options[:path].empty?
url_options[:path] = request.script_name.empty? ? "/" : ""
url_options[:script_name] = request.script_name
end
end
ActionDispatch::Http::URL.url_for url_options
end
def inspect
"redirect(#{status}, #{options.map{ |k,v| "#{k}: #{v}" }.join(', ')})"
end
end
module Redirection
# Redirect any path to another path:
#
# get "/stories" => redirect("/posts")
#
# You can also use interpolation in the supplied redirect argument:
#
# get 'docs/:article', to: redirect('/wiki/%{article}')
#
# Note that if you return a path without a leading slash then the url is prefixed with the
# current SCRIPT_NAME environment variable. This is typically '/' but may be different in
# a mounted engine or where the application is deployed to a subdirectory of a website.
#
# Alternatively you can use one of the other syntaxes:
#
# The block version of redirect allows for the easy encapsulation of any logic associated with
# the redirect in question. Either the params and request are supplied as arguments, or just
# params, depending of how many arguments your block accepts. A string is required as a
# return value.
#
# get 'jokes/:number', to: redirect { |params, request|
# path = (params[:number].to_i.even? ? "wheres-the-beef" : "i-love-lamp")
# "http://#{request.host_with_port}/#{path}"
# }
#
# Note that the +do end+ syntax for the redirect block wouldn't work, as Ruby would pass
# the block to +get+ instead of +redirect+. Use <tt>{ ... }</tt> instead.
#
# The options version of redirect allows you to supply only the parts of the url which need
# to change, it also supports interpolation of the path similar to the first example.
#
# get 'stores/:name', to: redirect(subdomain: 'stores', path: '/%{name}')
# get 'stores/:name(*all)', to: redirect(subdomain: 'stores', path: '/%{name}%{all}')
#
# Finally, an object which responds to call can be supplied to redirect, allowing you to reuse
# common redirect routes. The call method must accept two arguments, params and request, and return
# a string.
#
# get 'accounts/:name' => redirect(SubdomainRedirector.new('api'))
#
def redirect(*args, &block)
options = args.extract_options!
status = options.delete(:status) || 301
path = args.shift
return OptionRedirect.new(status, options) if options.any?
return PathRedirect.new(status, path) if String === path
block = path if path.respond_to? :call
raise ArgumentError, "redirection argument not supported" unless block
Redirect.new status, block
end
end
end
end
|
ActiveAdmin.register Asset do
index as: :grid do |asset|
link_to(image_tag(asset.storage.thumb("100x100#").url), admin_asset_path(asset))
end
form do |f|
f.inputs do
f.input :storage, as: :dragonfly, input_html: { components: [:preview, :upload, :url, :remove ] }
end
f.buttons
end
show do
attributes_table do
row('Dimensions') do
"#{asset.storage.width}px x #{asset.storage.height}px"
end
row('Thumbnail') do
image_tag(asset.thumb_url)
end
row('25%') do
image_tag(asset.percentage_thumb_url(0.25))
end
row('50%') do
image_tag(asset.percentage_thumb_url(0.5))
end
row('75%') do
image_tag(asset.percentage_thumb_url(0.75))
end
row('Full Image') do
image_tag(asset.storage.url)
end
end
end
controller do
def create
# If an app is using Rack::RawUpload, it can just use
# params['file'] and not worry with original_filename parsing.
if params['file']
@asset = Asset.new
@asset.storage = params['file']
if @asset.save!
render json: { success: true }.to_json
else
render nothing: true, status: 500 and return
end
elsif params['qqfile']
@asset = Asset.new
io = request.env['rack.input']
# throw io
# def io.original_filename=(name) @original_filename = name; end
# def io.original_filename() @original_filename; end
# io.original_filename = params['qqfile']
@asset.storage = Dragonfly::TempObject.new(io.responds_to?(:string) ? io.string : io.read)
if @asset.save!
render json: { success: true }.to_json
else
render nothing: true, status: 500 and return
end
else
create!
end
end
end
end
Fix typo in previous commit.
ActiveAdmin.register Asset do
index as: :grid do |asset|
link_to(image_tag(asset.storage.thumb("100x100#").url), admin_asset_path(asset))
end
form do |f|
f.inputs do
f.input :storage, as: :dragonfly, input_html: { components: [:preview, :upload, :url, :remove ] }
end
f.buttons
end
show do
attributes_table do
row('Dimensions') do
"#{asset.storage.width}px x #{asset.storage.height}px"
end
row('Thumbnail') do
image_tag(asset.thumb_url)
end
row('25%') do
image_tag(asset.percentage_thumb_url(0.25))
end
row('50%') do
image_tag(asset.percentage_thumb_url(0.5))
end
row('75%') do
image_tag(asset.percentage_thumb_url(0.75))
end
row('Full Image') do
image_tag(asset.storage.url)
end
end
end
controller do
def create
# If an app is using Rack::RawUpload, it can just use
# params['file'] and not worry with original_filename parsing.
if params['file']
@asset = Asset.new
@asset.storage = params['file']
if @asset.save!
render json: { success: true }.to_json
else
render nothing: true, status: 500 and return
end
elsif params['qqfile']
@asset = Asset.new
io = request.env['rack.input']
# throw io
# def io.original_filename=(name) @original_filename = name; end
# def io.original_filename() @original_filename; end
# io.original_filename = params['qqfile']
@asset.storage = Dragonfly::TempObject.new(io.respond_to?(:string) ? io.string : io.read)
if @asset.save!
render json: { success: true }.to_json
else
render nothing: true, status: 500 and return
end
else
create!
end
end
end
end
|
require 'time'
require 'date'
require 'active_merchant/billing/expiry_date'
module ActiveMerchant #:nodoc:
module Billing #:nodoc:
# A +CreditCard+ object represents a physical credit card, and is capable of validating the various
# data associated with these.
#
# At the moment, the following credit card types are supported:
#
# * Visa
# * MasterCard
# * Discover
# * American Express
# * Diner's Club
# * JCB
# * Switch
# * Solo
# * Dankort
# * Maestro
# * Forbrugsforeningen
# * Laser
#
# For testing purposes, use the 'bogus' credit card type. This skips the vast majority of
# validations, allowing you to focus on your core concerns until you're ready to be more concerned
# with the details of particular credit cards or your gateway.
#
# == Testing With CreditCard
# Often when testing we don't care about the particulars of a given card type. When using the 'test'
# mode in your {Gateway}, there are six different valid card numbers: 1, 2, 3, 'success', 'fail',
# and 'error'.
#
# For details, see {CreditCardMethods::ClassMethods#valid_number?}
#
# == Example Usage
# cc = CreditCard.new(
# :first_name => 'Steve',
# :last_name => 'Smith',
# :month => '9',
# :year => '2010',
# :type => 'visa',
# :number => '4242424242424242'
# )
#
# cc.valid? # => true
# cc.display_number # => XXXX-XXXX-XXXX-4242
#
class CreditCard
include CreditCardMethods
include Validateable
cattr_accessor :require_verification_value
self.require_verification_value = true
# Returns or sets the credit card number.
#
# @return [String]
attr_accessor :number
# Returns or sets the expiry month for the card.
#
# @return [Integer]
attr_accessor :month
# Returns or sets the expiry year for the card.
#
# @return [Integer]
attr_accessor :year
# Returns or sets the credit card type.
#
# Valid card types are
#
# * +'visa'+
# * +'master'+
# * +'discover'+
# * +'american_express'+
# * +'diners_club'+
# * +'jcb'+
# * +'switch'+
# * +'solo'+
# * +'dankort'+
# * +'maestro'+
# * +'forbrugsforeningen'+
# * +'laser'+
#
# Or, if you wish to test your implementation, +'bogus'+.
#
# @return (String) the credit card type
attr_accessor :type
# Returns or sets the first name of the card holder.
#
# @return [String]
attr_accessor :first_name
# Returns or sets the last name of the card holder.
#
# @return [String]
attr_accessor :last_name
# Required for Switch / Solo cards
attr_accessor :start_month, :start_year, :issue_number
# Returns or sets the card verification value.
#
# This attribute is optional but recommended. The verification value is
# a {card security code}[http://en.wikipedia.org/wiki/Card_security_code]. If provided,
# the gateway will attempt to validate the value.
#
# @return [String] the verification value
attr_accessor :verification_value
alias_method :brand, :type
# Provides proxy access to an expiry date object
#
# @return [ExpiryDate]
def expiry_date
ExpiryDate.new(@month, @year)
end
# Returns whether the credit card has expired.
#
# @return +true+ if the card has expired, +false+ otherwise
def expired?
expiry_date.expired?
end
# Returns whether either the +first_name+ or the +last_name+ attributes has been set.
def name?
first_name? || last_name?
end
# Returns whether the +first_name+ attribute has been set.
def first_name?
@first_name.present?
end
# Returns whether the +last_name+ attribute has been set.
def last_name?
@last_name.present?
end
# Returns the full name of the card holder.
#
# @return [String] the full name of the card holder
def name
[@first_name, @last_name].compact.join(' ')
end
def name=(full_name)
names = full_name.split
self.last_name = names.pop
self.first_name = names.join(" ")
end
def verification_value?
!@verification_value.blank?
end
# Returns a display-friendly version of the card number.
#
# All but the last 4 numbers are replaced with an "X", and hyphens are
# inserted in order to improve legibility.
#
# @example
# credit_card = CreditCard.new(:number => "2132542376824338")
# credit_card.display_number # "XXXX-XXXX-XXXX-4338"
#
# @return [String] a display-friendly version of the card number
def display_number
self.class.mask(number)
end
def last_digits
self.class.last_digits(number)
end
# Validates the credit card details.
#
# Any validation errors are added to the {#errors} attribute.
def validate
validate_essential_attributes
# Bogus card is pretty much for testing purposes. Lets just skip these extra tests if its used
return if type == 'bogus'
validate_card_type
validate_card_number
validate_verification_value
validate_switch_or_solo_attributes
end
def self.requires_verification_value?
require_verification_value
end
private
def before_validate #:nodoc:
self.month = month.to_i
self.year = year.to_i
self.start_month = start_month.to_i unless start_month.nil?
self.start_year = start_year.to_i unless start_year.nil?
self.number = number.to_s.gsub(/[^\d]/, "")
self.type.downcase! if type.respond_to?(:downcase)
self.type = self.class.type?(number) if type.blank?
end
def validate_card_number #:nodoc:
if number.blank?
errors.add :number, "is required"
elsif !CreditCard.valid_number?(number)
errors.add :number, "is not a valid credit card number"
end
unless errors.on(:number) || errors.on(:type)
errors.add :type, "is not the correct card type" unless CreditCard.matching_type?(number, type)
end
end
def validate_card_type #:nodoc:
errors.add :type, "is required" if type.blank? && number.present?
errors.add :type, "is invalid" unless type.blank? || CreditCard.card_companies.keys.include?(type)
end
def validate_essential_attributes #:nodoc:
errors.add :first_name, "cannot be empty" if @first_name.blank?
errors.add :last_name, "cannot be empty" if @last_name.blank?
if @month.to_i.zero? || @year.to_i.zero?
errors.add :month, "is required" if @month.to_i.zero?
errors.add :year, "is required" if @year.to_i.zero?
else
errors.add :month, "is not a valid month" unless valid_month?(@month)
errors.add :year, "expired" if expired?
errors.add :year, "is not a valid year" unless expired? || valid_expiry_year?(@year)
end
end
def validate_switch_or_solo_attributes #:nodoc:
if %w[switch solo].include?(type)
unless valid_month?(@start_month) && valid_start_year?(@start_year) || valid_issue_number?(@issue_number)
errors.add :start_month, "is invalid" unless valid_month?(@start_month)
errors.add :start_year, "is invalid" unless valid_start_year?(@start_year)
errors.add :issue_number, "cannot be empty" unless valid_issue_number?(@issue_number)
end
end
end
def validate_verification_value #:nodoc:
if CreditCard.requires_verification_value?
errors.add :verification_value, "is required" unless verification_value?
end
end
end
end
end
Added an additional attribute accessor for card id for the MES provided token id
require 'time'
require 'date'
require 'active_merchant/billing/expiry_date'
module ActiveMerchant #:nodoc:
module Billing #:nodoc:
# A +CreditCard+ object represents a physical credit card, and is capable of validating the various
# data associated with these.
#
# At the moment, the following credit card types are supported:
#
# * Visa
# * MasterCard
# * Discover
# * American Express
# * Diner's Club
# * JCB
# * Switch
# * Solo
# * Dankort
# * Maestro
# * Forbrugsforeningen
# * Laser
#
# For testing purposes, use the 'bogus' credit card type. This skips the vast majority of
# validations, allowing you to focus on your core concerns until you're ready to be more concerned
# with the details of particular credit cards or your gateway.
#
# == Testing With CreditCard
# Often when testing we don't care about the particulars of a given card type. When using the 'test'
# mode in your {Gateway}, there are six different valid card numbers: 1, 2, 3, 'success', 'fail',
# and 'error'.
#
# For details, see {CreditCardMethods::ClassMethods#valid_number?}
#
# == Example Usage
# cc = CreditCard.new(
# :first_name => 'Steve',
# :last_name => 'Smith',
# :month => '9',
# :year => '2010',
# :type => 'visa',
# :number => '4242424242424242'
# )
#
# cc.valid? # => true
# cc.display_number # => XXXX-XXXX-XXXX-4242
#
class CreditCard
include CreditCardMethods
include Validateable
cattr_accessor :require_verification_value
self.require_verification_value = true
# Returns or sets the credit card number.
#
# @return [String]
attr_accessor :number
# Returns or sets the expiry month for the card.
#
# @return [Integer]
attr_accessor :month
# Returns or sets the expiry year for the card.
#
# @return [Integer]
attr_accessor :year
# Returns or sets the credit card type.
#
# Valid card types are
#
# * +'visa'+
# * +'master'+
# * +'discover'+
# * +'american_express'+
# * +'diners_club'+
# * +'jcb'+
# * +'switch'+
# * +'solo'+
# * +'dankort'+
# * +'maestro'+
# * +'forbrugsforeningen'+
# * +'laser'+
#
# Or, if you wish to test your implementation, +'bogus'+.
#
# @return (String) the credit card type
attr_accessor :type
# Returns or sets the first name of the card holder.
#
# @return [String]
attr_accessor :first_name
# Returns or sets the last name of the card holder.
#
# @return [String]
attr_accessor :last_name
# Required for Switch / Solo cards
attr_accessor :start_month, :start_year, :issue_number
# Returns or sets the card verification value.
#
# This attribute is optional but recommended. The verification value is
# a {card security code}[http://en.wikipedia.org/wiki/Card_security_code]. If provided,
# the gateway will attempt to validate the value.
#
# @return [String] the verification value
attr_accessor :verification_value
# Because MES ONLY stores card numbers when implementing their
# card store feature, additional data such as expiration date,
# name, and address are required to clear most transactions with
# a stored credit card. Because of that, the easiest way to
# package a card id token from MES along with that additional data
# for submission to the MES gateway is to package it along with
# the ActiveMerchant::Billing::CreditCard. Therefore, create an
# attribute accessor to hold the card id token
attr_accessor :card_id
alias_method :brand, :type
# Provides proxy access to an expiry date object
#
# @return [ExpiryDate]
def expiry_date
ExpiryDate.new(@month, @year)
end
# Returns whether the credit card has expired.
#
# @return +true+ if the card has expired, +false+ otherwise
def expired?
expiry_date.expired?
end
# Returns whether either the +first_name+ or the +last_name+ attributes has been set.
def name?
first_name? || last_name?
end
# Returns whether the +first_name+ attribute has been set.
def first_name?
@first_name.present?
end
# Returns whether the +last_name+ attribute has been set.
def last_name?
@last_name.present?
end
# Returns the full name of the card holder.
#
# @return [String] the full name of the card holder
def name
[@first_name, @last_name].compact.join(' ')
end
def name=(full_name)
names = full_name.split
self.last_name = names.pop
self.first_name = names.join(" ")
end
def verification_value?
!@verification_value.blank?
end
# Returns a display-friendly version of the card number.
#
# All but the last 4 numbers are replaced with an "X", and hyphens are
# inserted in order to improve legibility.
#
# @example
# credit_card = CreditCard.new(:number => "2132542376824338")
# credit_card.display_number # "XXXX-XXXX-XXXX-4338"
#
# @return [String] a display-friendly version of the card number
def display_number
self.class.mask(number)
end
def last_digits
self.class.last_digits(number)
end
# Validates the credit card details.
#
# Any validation errors are added to the {#errors} attribute.
def validate
validate_essential_attributes
# Bogus card is pretty much for testing purposes. Lets just skip these extra tests if its used
return if type == 'bogus'
validate_card_type
validate_card_number
validate_verification_value
validate_switch_or_solo_attributes
end
def self.requires_verification_value?
require_verification_value
end
private
def before_validate #:nodoc:
self.month = month.to_i
self.year = year.to_i
self.start_month = start_month.to_i unless start_month.nil?
self.start_year = start_year.to_i unless start_year.nil?
self.number = number.to_s.gsub(/[^\d]/, "")
self.type.downcase! if type.respond_to?(:downcase)
self.type = self.class.type?(number) if type.blank?
end
def validate_card_number #:nodoc:
if number.blank?
errors.add :number, "is required"
elsif !CreditCard.valid_number?(number)
errors.add :number, "is not a valid credit card number"
end
unless errors.on(:number) || errors.on(:type)
errors.add :type, "is not the correct card type" unless CreditCard.matching_type?(number, type)
end
end
def validate_card_type #:nodoc:
errors.add :type, "is required" if type.blank? && number.present?
errors.add :type, "is invalid" unless type.blank? || CreditCard.card_companies.keys.include?(type)
end
def validate_essential_attributes #:nodoc:
errors.add :first_name, "cannot be empty" if @first_name.blank?
errors.add :last_name, "cannot be empty" if @last_name.blank?
if @month.to_i.zero? || @year.to_i.zero?
errors.add :month, "is required" if @month.to_i.zero?
errors.add :year, "is required" if @year.to_i.zero?
else
errors.add :month, "is not a valid month" unless valid_month?(@month)
errors.add :year, "expired" if expired?
errors.add :year, "is not a valid year" unless expired? || valid_expiry_year?(@year)
end
end
def validate_switch_or_solo_attributes #:nodoc:
if %w[switch solo].include?(type)
unless valid_month?(@start_month) && valid_start_year?(@start_year) || valid_issue_number?(@issue_number)
errors.add :start_month, "is invalid" unless valid_month?(@start_month)
errors.add :start_year, "is invalid" unless valid_start_year?(@start_year)
errors.add :issue_number, "cannot be empty" unless valid_issue_number?(@issue_number)
end
end
end
def validate_verification_value #:nodoc:
if CreditCard.requires_verification_value?
errors.add :verification_value, "is required" unless verification_value?
end
end
end
end
end
|
# This file is part of Mconf-Web, a web application that provides access
# to the Mconf webconferencing system. Copyright (C) 2010-2012 Mconf
#
# This file is licensed under the Affero General Public License version
# 3 or later. See the LICENSE file.
module ActiveResource
module Formats
module HtmlFormat
extend self
def extension
"html"
end
def mime_type
"text/html"
end
def encode(hash, options = nil)
""
end
def decode(html)
{ :html => html }
end
end
end
end
Remove old unused class
refs #1398
|
module ActiveScaffold::Config
class BatchCreate < ActiveScaffold::Config::Form
self.crud_type = :create
def initialize(*args)
super
@multipart = @core.create.multipart? if @core.actions.include? :create
@process_mode = self.class.process_mode
@list_mode_enabled = self.class.list_mode_enabled
@run_in_transaction = self.class.run_in_transaction
@layout = self.class.layout
end
# global level configuration
# --------------------------
# the ActionLink for this action
def self.link
@@link
end
def self.link=(val)
@@link = val
end
@@link = ActiveScaffold::DataStructures::ActionLink.new('batch_new', :label => :create, :type => :collection, :security_method => :batch_create_authorized?, :ignore_method => :batch_create_ignore?)
# configures where the plugin itself is located. there is no instance version of this.
cattr_accessor :plugin_directory
@@plugin_directory = File.expand_path(__FILE__).match(%{(^.*)/lib/active_scaffold/config/batch_create.rb})[1]
# configures how batch create should be processed
# :create => standard activerecord create including validations
cattr_accessor :process_mode
@@process_mode = :create
# you may update all records in list view or all marked records
# you might disable list mode with this switch if you think it is
# too "dangerous"
cattr_accessor :list_mode_enabled
@@list_mode_enabled = true
# run all create statements in a transaction, so no record is created
# if someone fails
cattr_accessor :run_in_transaction
@@run_in_transaction = true
# layout for create multiple records
cattr_accessor :layout
@@layout = :vertical
# instance-level configuration
# ----------------------------
# see class accessor
attr_accessor :process_mode
attr_accessor :list_mode_enabled
# you may use create_batch to create a record for each record
# of a belong_to association (reverse must be has_many)
# eg. player belongs to team
# you may batch create a player records for a list of teams
attr_accessor :default_batch_by_column
# run all create statements in a transaction, so no record is created
# if someone fails
attr_accessor :run_in_transaction
# layout for create multiple records
attr_accessor :layout
def action_group
@action_group || (default_batch_by_column ? 'collection.group' : 'collection')
end
# the label= method already exists in the Form base class
def label(model = nil)
model ||= @core.label(:count => 2)
@label ? as_(@label) : as_(:create_model, :model => model)
end
end
end
use collection.batch group as update and destroy actions
module ActiveScaffold::Config
class BatchCreate < ActiveScaffold::Config::Form
self.crud_type = :create
def initialize(*args)
super
@multipart = @core.create.multipart? if @core.actions.include? :create
@process_mode = self.class.process_mode
@list_mode_enabled = self.class.list_mode_enabled
@run_in_transaction = self.class.run_in_transaction
@layout = self.class.layout
end
# global level configuration
# --------------------------
# the ActionLink for this action
def self.link
@@link
end
def self.link=(val)
@@link = val
end
@@link = ActiveScaffold::DataStructures::ActionLink.new('batch_new', :label => :create, :type => :collection, :security_method => :batch_create_authorized?, :ignore_method => :batch_create_ignore?)
# configures where the plugin itself is located. there is no instance version of this.
cattr_accessor :plugin_directory
@@plugin_directory = File.expand_path(__FILE__).match(%{(^.*)/lib/active_scaffold/config/batch_create.rb})[1]
# configures how batch create should be processed
# :create => standard activerecord create including validations
cattr_accessor :process_mode
@@process_mode = :create
# you may update all records in list view or all marked records
# you might disable list mode with this switch if you think it is
# too "dangerous"
cattr_accessor :list_mode_enabled
@@list_mode_enabled = true
# run all create statements in a transaction, so no record is created
# if someone fails
cattr_accessor :run_in_transaction
@@run_in_transaction = true
# layout for create multiple records
cattr_accessor :layout
@@layout = :vertical
# instance-level configuration
# ----------------------------
# see class accessor
attr_accessor :process_mode
attr_accessor :list_mode_enabled
# you may use create_batch to create a record for each record
# of a belong_to association (reverse must be has_many)
# eg. player belongs to team
# you may batch create a player records for a list of teams
attr_accessor :default_batch_by_column
# run all create statements in a transaction, so no record is created
# if someone fails
attr_accessor :run_in_transaction
# layout for create multiple records
attr_accessor :layout
def action_group
@action_group || (default_batch_by_column ? 'collection.batch' : 'collection')
end
# the label= method already exists in the Form base class
def label(model = nil)
model ||= @core.label(:count => 2)
@label ? as_(@label) : as_(:create_model, :model => model)
end
end
end
|
require "fileutils"
require "pathname"
class ActiveStorage::Service::DiskService < ActiveStorage::Service
attr_reader :root
def initialize(root:)
@root = root
end
def upload(key, io)
File.open(make_path_for(key), "wb") do |file|
while chunk = io.read(65536)
file.write(chunk)
end
end
end
def download(key)
if block_given?
File.open(path_for(key)) do |file|
while data = file.read(65536)
yield data
end
end
else
File.open path_for(key), &:read
end
end
def delete(key)
File.delete path_for(key) rescue Errno::ENOENT # Ignore files already deleted
end
def exist?(key)
File.exist? path_for(key)
end
def url(key, expires_in:, disposition:, filename:)
verified_key_with_expiration = ActiveStorage::VerifiedKeyWithExpiration.encode(key, expires_in: expires_in)
if defined?(Rails) && defined?(Rails.application)
Rails.application.routes.url_helpers.rails_disk_blob_path(verified_key_with_expiration, disposition: disposition)
else
"/rails/blobs/#{verified_key_with_expiration}?disposition=#{disposition}"
end
end
def byte_size(key)
File.size path_for(key)
end
def checksum(key)
Digest::MD5.file(path_for(key)).hexdigest
end
private
def path_for(key)
File.join root, folder_for(key), key
end
def folder_for(key)
[ key[0..1], key[2..3] ].join("/")
end
def make_path_for(key)
path_for(key).tap { |path| FileUtils.mkdir_p File.dirname(path) }
end
end
Convert magic number to constant
require "fileutils"
require "pathname"
class ActiveStorage::Service::DiskService < ActiveStorage::Service
CHUNK_SIZE = 65536
attr_reader :root
def initialize(root:)
@root = root
end
def upload(key, io)
File.open(make_path_for(key), "wb") do |file|
while chunk = io.read(CHUNK_SIZE)
file.write(chunk)
end
end
end
def download(key)
if block_given?
File.open(path_for(key)) do |file|
while data = file.read(CHUNK_SIZE)
yield data
end
end
else
File.open path_for(key), &:read
end
end
def delete(key)
File.delete path_for(key) rescue Errno::ENOENT # Ignore files already deleted
end
def exist?(key)
File.exist? path_for(key)
end
def url(key, expires_in:, disposition:, filename:)
verified_key_with_expiration = ActiveStorage::VerifiedKeyWithExpiration.encode(key, expires_in: expires_in)
if defined?(Rails) && defined?(Rails.application)
Rails.application.routes.url_helpers.rails_disk_blob_path(verified_key_with_expiration, disposition: disposition)
else
"/rails/blobs/#{verified_key_with_expiration}?disposition=#{disposition}"
end
end
def byte_size(key)
File.size path_for(key)
end
def checksum(key)
Digest::MD5.file(path_for(key)).hexdigest
end
private
def path_for(key)
File.join root, folder_for(key), key
end
def folder_for(key)
[ key[0..1], key[2..3] ].join("/")
end
def make_path_for(key)
path_for(key).tap { |path| FileUtils.mkdir_p File.dirname(path) }
end
end
|
# -*- encoding: utf-8 -*-
Gem::Specification.new do |s|
s.name = %q{jewelry_portfolio}
s.version = "0.3.0"
s.required_rubygems_version = Gem::Requirement.new(">= 0") if s.respond_to? :required_rubygems_version=
s.authors = ["Eloy Duran"]
s.date = %q{2009-03-05}
s.description = %q{Imagine writing an erb template once and use rake portfolio:release to generate and push the index.html for your GitHub pages. If that sounds good to you, you're in luck. Because that's exactly what this gem does.}
s.email = %q{eloy.de.enige@gmail.com}
s.extra_rdoc_files = ["README.rdoc", "LICENSE"]
s.files = ["README.rdoc", "VERSION.yml", "lib/jewelry_portfolio", "lib/jewelry_portfolio/repo.rb", "lib/jewelry_portfolio/repos_index.rb", "lib/jewelry_portfolio/tasks.rb", "lib/jewelry_portfolio/template", "lib/jewelry_portfolio/template/feed.rb", "lib/jewelry_portfolio/template/html.rb", "lib/jewelry_portfolio/template.rb", "lib/jewelry_portfolio.rb", "test/fixtures", "test/fixtures/alloy.github.com", "test/fixtures/alloy.github.com/feed.rb", "test/fixtures/alloy.github.com/feed.xml", "test/fixtures/alloy.github.com/index.erb", "test/fixtures/alloy.github.com/index.html", "test/fixtures/alloy.github.com/repos.yml", "test/fixtures/alloy.github.com.tgz", "test/fixtures/dr-nic-magic-awesome.gemspec_", "test/fixtures/dr-nic-magic-awesome.html", "test/fixtures/dr-nic-magic-awesome_repo.yml", "test/fixtures/feed_with_defaults.rb", "test/fixtures/feed_with_defaults.xml", "test/fixtures/feed_with_options.rb", "test/fixtures/feed_with_options.xml", "test/fixtures/index.erb", "test/fixtures/index.html", "test/fixtures/microgem.gemspec_", "test/fixtures/repos.yml", "test/jewelry_portfolio_test.rb", "test/repo_test.rb", "test/repos_index_test.rb", "test/tasks_test.rb", "test/template_test.rb", "test/test_helper.rb", "test/tmp", "test/tmp/alloy.github.com.git", "test/tmp/alloy.github.com.git/feed.rb", "test/tmp/alloy.github.com.git/feed.xml", "test/tmp/alloy.github.com.git/index.erb", "test/tmp/alloy.github.com.git/index.html", "test/tmp/alloy.github.com.git/repos.yml", "LICENSE"]
s.has_rdoc = true
s.homepage = %q{http://github.com/alloy/repo_page_san}
s.rdoc_options = ["--inline-source", "--charset=UTF-8"]
s.require_paths = ["lib"]
s.rubygems_version = %q{1.3.1}
s.summary = %q{A template renderer, and rake tasks, for lazy developers who would like to showcase their jewelry portfolio (libraries) on their GitHub pages.}
if s.respond_to? :specification_version then
current_version = Gem::Specification::CURRENT_SPECIFICATION_VERSION
s.specification_version = 2
if Gem::Version.new(Gem::RubyGemsVersion) >= Gem::Version.new('1.2.0') then
s.add_runtime_dependency(%q<builder>, [">= 0"])
s.add_runtime_dependency(%q<schacon-git>, [">= 0"])
else
s.add_dependency(%q<builder>, [">= 0"])
s.add_dependency(%q<schacon-git>, [">= 0"])
end
else
s.add_dependency(%q<builder>, [">= 0"])
s.add_dependency(%q<schacon-git>, [">= 0"])
end
end
Regenerated gemspec for version 0.3.1
# -*- encoding: utf-8 -*-
Gem::Specification.new do |s|
s.name = %q{jewelry_portfolio}
s.version = "0.3.1"
s.required_rubygems_version = Gem::Requirement.new(">= 0") if s.respond_to? :required_rubygems_version=
s.authors = ["Eloy Duran"]
s.date = %q{2009-03-05}
s.description = %q{Imagine writing an erb template once and use rake portfolio:release to generate and push the index.html for your GitHub pages. If that sounds good to you, you're in luck. Because that's exactly what this gem does.}
s.email = %q{eloy.de.enige@gmail.com}
s.extra_rdoc_files = ["README.rdoc", "LICENSE"]
s.files = ["README.rdoc", "VERSION.yml", "lib/jewelry_portfolio", "lib/jewelry_portfolio/repo.rb", "lib/jewelry_portfolio/repos_index.rb", "lib/jewelry_portfolio/tasks.rb", "lib/jewelry_portfolio/template", "lib/jewelry_portfolio/template/feed.rb", "lib/jewelry_portfolio/template/html.rb", "lib/jewelry_portfolio/template.rb", "lib/jewelry_portfolio.rb", "test/fixtures", "test/fixtures/alloy.github.com", "test/fixtures/alloy.github.com/feed.rb", "test/fixtures/alloy.github.com/feed.xml", "test/fixtures/alloy.github.com/index.erb", "test/fixtures/alloy.github.com/index.html", "test/fixtures/alloy.github.com/repos.yml", "test/fixtures/alloy.github.com.tgz", "test/fixtures/dr-nic-magic-awesome.gemspec_", "test/fixtures/dr-nic-magic-awesome.html", "test/fixtures/dr-nic-magic-awesome_repo.yml", "test/fixtures/feed_with_defaults.rb", "test/fixtures/feed_with_defaults.xml", "test/fixtures/feed_with_options.rb", "test/fixtures/feed_with_options.xml", "test/fixtures/index.erb", "test/fixtures/index.html", "test/fixtures/microgem.gemspec_", "test/fixtures/repos.yml", "test/jewelry_portfolio_test.rb", "test/repo_test.rb", "test/repos_index_test.rb", "test/tasks_test.rb", "test/template_test.rb", "test/test_helper.rb", "test/tmp", "test/tmp/alloy.github.com.git", "test/tmp/alloy.github.com.git/feed.rb", "test/tmp/alloy.github.com.git/feed.xml", "test/tmp/alloy.github.com.git/index.erb", "test/tmp/alloy.github.com.git/index.html", "test/tmp/alloy.github.com.git/repos.yml", "LICENSE"]
s.has_rdoc = true
s.homepage = %q{http://github.com/alloy/repo_page_san}
s.rdoc_options = ["--inline-source", "--charset=UTF-8"]
s.require_paths = ["lib"]
s.rubygems_version = %q{1.3.1}
s.summary = %q{A template renderer, and rake tasks, for lazy developers who would like to showcase their jewelry portfolio (libraries) on their GitHub pages.}
if s.respond_to? :specification_version then
current_version = Gem::Specification::CURRENT_SPECIFICATION_VERSION
s.specification_version = 2
if Gem::Version.new(Gem::RubyGemsVersion) >= Gem::Version.new('1.2.0') then
s.add_runtime_dependency(%q<builder>, [">= 0"])
s.add_runtime_dependency(%q<schacon-git>, [">= 0"])
else
s.add_dependency(%q<builder>, [">= 0"])
s.add_dependency(%q<schacon-git>, [">= 0"])
end
else
s.add_dependency(%q<builder>, [">= 0"])
s.add_dependency(%q<schacon-git>, [">= 0"])
end
end
|
module ActsAsSimpleTranslatable
VERSION = '0.1.0'
end
Updated version
module ActsAsSimpleTranslatable
VERSION = '0.1.1'
end
|
require 'gserver'
module Adhearsion
module VoIP
module Asterisk
module AGI
class Server
class RubyServer < GServer
def initialize(port, host)
super(port, host, (1.0/0.0)) # (1.0/0.0) == Infinity
end
def disconnecting(port)
@call.deliver_message :cancel if !@call.nil?
super(port)
end
def serve(io)
begin
call = Adhearsion.receive_call_from(io)
rescue EOFError
# We didn't get the initial headers we were expecting
return
end
Events.trigger_immediately([:asterisk, :before_call], call)
ahn_log.agi.debug "Handling call with variables #{call.variables.inspect}"
return DialPlan::ConfirmationManager.handle(call) if DialPlan::ConfirmationManager.confirmation_call?(call)
# This is what happens 99.9% of the time.
DialPlan::Manager.handle call
rescue Hangup
ahn_log.agi "HANGUP event for call with uniqueid #{call.variables[:uniqueid].inspect} and channel #{call.variables[:channel].inspect}"
Events.trigger_immediately([:asterisk, :after_call], call)
call.hangup!
rescue DialPlan::Manager::NoContextError => e
ahn_log.agi e.message
call.hangup!
rescue FailedExtensionCallException => failed_call
begin
ahn_log.agi "Received \"failed\" meta-call with :failed_reason => #{failed_call.call.failed_reason.inspect}. Executing Executing /asterisk/failed_call event callbacks."
Events.trigger [:asterisk, :failed_call], failed_call.call
call.hangup!
rescue => e
ahn_log.agi.error e
end
rescue HungupExtensionCallException => hungup_call
begin
ahn_log.agi "Received \"h\" meta-call. Executing /asterisk/hungup_call event callbacks."
Events.trigger [:asterisk, :hungup_call], hungup_call.call
call.hangup!
rescue => e
ahn_log.agi.error e
end
rescue UselessCallException
ahn_log.agi "Ignoring meta-AGI request"
call.hangup!
# TBD: (may have more hooks than what Jay has defined in hooks.rb)
rescue SyntaxError, StandardError => e
Events.trigger(['exception'], e)
ensure
Adhearsion.remove_inactive_call call rescue nil
end
end
DEFAULT_OPTIONS = { :server_class => RubyServer, :port => 4573, :host => "0.0.0.0" } unless defined? DEFAULT_OPTIONS
attr_reader :host, :port, :server_class, :server
def initialize(options = {})
options = DEFAULT_OPTIONS.merge options
@host, @port, @server_class = options.values_at(:host, :port, :server_class)
@server = server_class.new(port, host)
end
def start
server.audit = true
server.start
end
def graceful_shutdown
if @shutting_down
server.stop
return
end
@shutting_down = true
while server.connections > 0
sleep 0.2
end
server.stop
end
def shutdown
server.shutdown
end
def stop
server.stop
end
def join
server.join
end
end
end
end
end
end
Have the AGI server log to the AGI logger rather than direct to stdout
require 'gserver'
module Adhearsion
module VoIP
module Asterisk
module AGI
class Server
class RubyServer < GServer
def initialize(port, host)
super(port, host, (1.0/0.0)) # (1.0/0.0) == Infinity
end
def disconnecting(port)
@call.deliver_message :cancel if !@call.nil?
super(port)
end
def serve(io)
begin
call = Adhearsion.receive_call_from(io)
rescue EOFError
# We didn't get the initial headers we were expecting
return
end
Events.trigger_immediately([:asterisk, :before_call], call)
ahn_log.agi.debug "Handling call with variables #{call.variables.inspect}"
return DialPlan::ConfirmationManager.handle(call) if DialPlan::ConfirmationManager.confirmation_call?(call)
# This is what happens 99.9% of the time.
DialPlan::Manager.handle call
rescue Hangup
ahn_log.agi "HANGUP event for call with uniqueid #{call.variables[:uniqueid].inspect} and channel #{call.variables[:channel].inspect}"
Events.trigger_immediately([:asterisk, :after_call], call)
call.hangup!
rescue DialPlan::Manager::NoContextError => e
ahn_log.agi e.message
call.hangup!
rescue FailedExtensionCallException => failed_call
begin
ahn_log.agi "Received \"failed\" meta-call with :failed_reason => #{failed_call.call.failed_reason.inspect}. Executing Executing /asterisk/failed_call event callbacks."
Events.trigger [:asterisk, :failed_call], failed_call.call
call.hangup!
rescue => e
ahn_log.agi.error e
end
rescue HungupExtensionCallException => hungup_call
begin
ahn_log.agi "Received \"h\" meta-call. Executing /asterisk/hungup_call event callbacks."
Events.trigger [:asterisk, :hungup_call], hungup_call.call
call.hangup!
rescue => e
ahn_log.agi.error e
end
rescue UselessCallException
ahn_log.agi "Ignoring meta-AGI request"
call.hangup!
# TBD: (may have more hooks than what Jay has defined in hooks.rb)
rescue SyntaxError, StandardError => e
Events.trigger(['exception'], e)
ensure
Adhearsion.remove_inactive_call call rescue nil
end
def log(msg)
ahn_log.agi msg
end
def error(detail)
ahn_log.agi.error detail.backtrace.join("\n")
end
end
DEFAULT_OPTIONS = { :server_class => RubyServer, :port => 4573, :host => "0.0.0.0" } unless defined? DEFAULT_OPTIONS
attr_reader :host, :port, :server_class, :server
def initialize(options = {})
options = DEFAULT_OPTIONS.merge options
@host, @port, @server_class = options.values_at(:host, :port, :server_class)
@server = server_class.new(port, host)
end
def start
server.audit = true
server.start
end
def graceful_shutdown
if @shutting_down
server.stop
return
end
@shutting_down = true
while server.connections > 0
sleep 0.2
end
server.stop
end
def shutdown
server.shutdown
end
def stop
server.stop
end
def join
server.join
end
end
end
end
end
end
|
Add active_document test
## -------------------------------------------------------------------
##
## Copyright (c) "2014" Dmitri Zagidulin and Basho Technologies, Inc.
##
## This file is provided to you under the Apache License,
## Version 2.0 (the "License"); you may not use this file
## except in compliance with the License. You may obtain
## a copy of the License at
##
## http://www.apache.org/licenses/LICENSE-2.0
##
## Unless required by applicable law or agreed to in writing,
## software distributed under the License is distributed on an
## "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
## KIND, either express or implied. See the License for the
## specific language governing permissions and limitations
## under the License.
##
## -------------------------------------------------------------------
require 'test_helper'
describe "a Riagent::ActiveDocument" do
# an instance of Riagent::ActiveDocument
# see test/models/user.rb
let(:user_model) { User.new }
it "extends Riagent::Document" do
user_model.must_be_kind_of Riagent::Document
end
it "should know its collection name" do
# a document's collection name is used in ActiveModel::Conversion compatibility
User.collection_name.must_equal 'users'
end
it "uses its collection name to help form URLs" do
user_model.key = 'test-user-123'
user_model.to_partial_path.must_equal 'users/test-user-123'
end
end |
require 'test_helper'
require 'seek/upload_handling/data_upload'
require 'seek/upload_handling/examine_url'
class UploadHandingTest < ActiveSupport::TestCase
include Seek::UploadHandling::DataUpload
include Seek::UploadHandling::ExamineUrl
test 'valid scheme?' do
assert_equal %w(file).sort, Seek::UploadHandling::ContentInspection::INVALID_SCHEMES.sort
assert valid_scheme?('http://bbc.co.uk')
assert valid_scheme?('https://bbc.co.uk')
assert valid_scheme?('ftp://bbc.co.uk')
assert valid_scheme?('ssh://bbc.co.uk')
refute valid_scheme?('file:///secret/documents.txt')
end
test 'content_blob_params' do
@params = { content_blobs: [{ fish: 1, soup: 2 }], data_file: { title: 'george' } }
assert_equal([{ fish: 1, soup: 2 }], content_blob_params)
end
test 'default to http if missing' do
params = { data_url: 'fish.com/path?query=yes' }
default_to_http_if_missing(params)
assert_equal('http://fish.com/path?query=yes', params[:data_url])
params[:data_url] = 'https://fish.com/path?query=yes'
default_to_http_if_missing(params)
assert_equal('https://fish.com/path?query=yes', params[:data_url])
params[:data_url] = nil
default_to_http_if_missing(params)
assert_nil(params[:data_url])
params[:data_url] = 'sdfhksdlfsdkfh'
default_to_http_if_missing(params)
assert_equal('sdfhksdlfsdkfh', params[:data_url])
end
test 'asset params' do
@params = ActionController::Parameters.new({ content_blob: { fish: 1, soup: 2 },
data_file: { title: 'george' },
sop: { title: 'mary' } })
assert_equal 'george', asset_params[:title]
assert_equal 1, asset_params.keys.length
@controller_name = 'sops'
assert_equal 'mary', asset_params[:title]
assert_equal 1, asset_params.keys.length
end
test 'check url response code' do
stub_request(:head, 'http://bbc.co.uk/').to_return(status: 200, body: '', headers: { content_type: 'text/html', content_length: '555' })
stub_request(:head, 'http://not-there.com').to_return(status: 404, body: '', headers: {})
stub_request(:head, 'http://server-error.com').to_return(status: 500, body: '', headers: {})
stub_request(:head, 'http://forbidden.com').to_return(status: 403, body: '', headers: {})
stub_request(:head, 'http://unauthorized.com').to_return(status: 401, body: '', headers: {})
stub_request(:head, 'http://methodnotallowed.com').to_return(status: 405, body: '', headers: {})
assert_equal 200, check_url_response_code('http://bbc.co.uk')
assert_equal 404, check_url_response_code('http://not-there.com')
assert_equal 500, check_url_response_code('http://server-error.com')
assert_equal 403, check_url_response_code('http://forbidden.com')
assert_equal 401, check_url_response_code('http://unauthorized.com')
assert_equal 405, check_url_response_code('http://methodnotallowed.com')
# redirection will be followed
stub_request(:head, 'http://moved.com').to_return(status: 301, body: '', headers: { location: 'http://bbc.co.uk' })
stub_request(:head, 'http://moved2.com').to_return(status: 302, body: '', headers: { location: 'http://forbidden.com' })
assert_equal 200, check_url_response_code('http://moved.com')
assert_equal 403, check_url_response_code('http://moved2.com')
end
test 'fetch url headers' do
stub_request(:head, 'http://bbc.co.uk/').to_return(status: 200,
body: '',
headers: { content_type: 'text/html', content_length: '555' })
headers = fetch_url_headers('http://bbc.co.uk')
assert_equal 'text/html', headers[:content_type]
assert_equal 555, headers[:file_size]
stub_request(:head, 'http://somewhere.org/excel.xls').to_return(status: 200,
body: '',
headers: { content_type: 'application/vnd.ms-excel', content_length: '1111' })
headers = fetch_url_headers('http://somewhere.org/excel.xls')
assert_equal 'application/vnd.ms-excel', headers[:content_type]
assert_equal 1111, headers[:file_size]
stub_request(:head, 'http://not-there.com').to_return(status: 404, body: '', headers: {})
assert_equal 404, fetch_url_headers('http://not-there.com')[:code]
# follows redirection
stub_request(:head, 'http://moved.com').to_return(status: 301, body: '', headers: { location: 'http://bbc.co.uk' })
headers = fetch_url_headers('http://moved.com')
assert_equal 'text/html', headers[:content_type]
assert_equal 555, headers[:file_size]
end
test 'content type from filename' do
assert_equal 'text/html', content_type_from_filename(nil)
# FIXME: , MERGENOTE - .xml gives an incorrect mime type of sbml+xml due to the ordering
checks = [
{ f: 'test.jpg', t: 'image/jpeg' },
{ f: 'test.JPG', t: 'image/jpeg' },
{ f: 'test.png', t: 'image/png' },
{ f: 'test.PNG', t: 'image/png' },
{ f: 'test.jpeg', t: 'image/jpeg' },
{ f: 'test.JPEG', t: 'image/jpeg' },
{ f: 'test.xls', t: 'application/excel' },
{ f: 'test.doc', t: 'application/msword' },
{ f: 'test.xlsx', t: 'application/vnd.openxmlformats-officedocument.spreadsheetml.sheet' },
{ f: 'test.docx', t: 'application/vnd.openxmlformats-officedocument.wordprocessingml.document' },
{ f: 'test.XLs', t: 'application/excel' },
{ f: 'test.Doc', t: 'application/msword' },
{ f: 'test.XLSX', t: 'application/vnd.openxmlformats-officedocument.spreadsheetml.sheet' },
{ f: 'test.dOCx', t: 'application/vnd.openxmlformats-officedocument.wordprocessingml.document' },
{ f: 'unknown.xxx', t: 'application/octet-stream' },
{ f: nil, t: 'text/html' }
]
checks.each do |check|
assert_equal check[:t], content_type_from_filename(check[:f]), "Expected #{check[:t]} for #{check[:f]}"
end
end
test 'content is webpage?' do
assert content_is_webpage?('text/html')
assert content_is_webpage?('text/html; charset=UTF-8')
refute content_is_webpage?('application/zip')
refute content_is_webpage?(nil)
end
test 'valid uri?' do
assert valid_uri?('http://fish.com')
assert valid_uri?('http://fish.com')
assert valid_uri?('http://fish.com ')
assert valid_uri?('http://fish.com/fish.txt')
assert valid_uri?('http://fish.com/fish.txt ')
refute valid_uri?('x dd s')
refute valid_uri?(nil)
end
test 'determine_filename_from_disposition' do
assert_equal '_form.html.erb', determine_filename_from_disposition('inline; filename="_form.html.erb"')
assert_equal '_form.html.erb', determine_filename_from_disposition('inline; filename=_form.html.erb')
assert_equal '_form.html.erb', determine_filename_from_disposition('attachment; filename="_form.html.erb"')
assert_nil determine_filename_from_disposition(nil)
assert_nil determine_filename_from_disposition('')
end
test 'determine filename from url' do
assert_equal 'fred.txt', determine_filename_from_url('http://place.com/fred.txt')
assert_equal 'fred.txt', determine_filename_from_url('http://place.com/fred.txt ')
assert_equal 'jenny.txt', determine_filename_from_url('http://place.com/here/he%20/jenny.txt')
assert_nil determine_filename_from_url('http://place.com')
assert_nil determine_filename_from_url('http://place.com/')
assert_nil determine_filename_from_url('')
assert_nil determine_filename_from_url('sdfsdf')
assert_nil determine_filename_from_url(nil)
end
test 'check for data or url' do
refute check_for_data_or_url(data: '', data_url: '')
assert check_for_data_or_url(data: 'hhhh')
assert check_for_data_or_url(data_url: 'hhhh')
refute check_for_data_or_url(data: [], data_url: [])
assert check_for_data_or_url(data: ['hhhh'])
assert check_for_data_or_url(data_url: ['hhhh'])
end
test 'retained content blob ids' do
@params = { retained_content_blob_ids: [1, 2] }
assert_equal [1, 2], retained_content_blob_ids
@params = {}
assert_equal [], retained_content_blob_ids
@params = { content_blobs: nil }
assert_equal [], retained_content_blob_ids
@params = { retained_content_blob_ids: [1, 2, 3] }
assert_equal [1, 2, 3], retained_content_blob_ids
end
test 'model image present?' do
file_with_content = ActionDispatch::Http::UploadedFile.new(
filename: 'file',
content_type: 'text/plain',
tempfile: StringIO.new('fish')
)
@params = { model_image: { image_file: file_with_content }, content_blob: {}, model: { title: 'fish' } }
assert model_image_present?
@params = { model_image: {}, content_blob: {}, model: { title: 'fish' } }
refute model_image_present?
@params = { content_blob: {}, model: { title: 'fish' } }
refute model_image_present?
end
test 'check for data if present' do
file_with_content = ActionDispatch::Http::UploadedFile.new(
filename: 'file',
content_type: 'text/plain',
tempfile: StringIO.new('fish')
)
empty_content = ActionDispatch::Http::UploadedFile.new(
filename: 'file',
content_type: 'text/plain',
tempfile: StringIO.new('')
)
assert check_for_empty_data_if_present(data: '', data_url: 'http://fish')
assert check_for_empty_data_if_present(data: file_with_content, data_url: '')
assert check_for_empty_data_if_present(data: file_with_content, data_url: [])
refute check_for_empty_data_if_present(data: empty_content, data_url: '')
refute check_for_empty_data_if_present(data: empty_content, data_url: [])
refute check_for_empty_data_if_present(data: empty_content)
assert check_for_empty_data_if_present(data: [], data_url: 'http://fish')
assert check_for_empty_data_if_present(data: [file_with_content], data_url: '')
assert check_for_empty_data_if_present(data: [file_with_content], data_url: [])
refute check_for_empty_data_if_present(data: [empty_content], data_url: '')
refute check_for_empty_data_if_present(data: [empty_content], data_url: [])
refute check_for_empty_data_if_present(data: [empty_content])
refute check_for_empty_data_if_present(data: [empty_content, file_with_content])
end
# allows some methods to be tested the rely on flash.now[:error]
def flash
ActionDispatch::Flash::FlashHash.new
end
# mock out the params method, set @params for the desired params for the test
attr_reader :params
# mocks out the controller name, defaults to data_files, but can be changed by setting @controller_name
def controller_name
@controller_name || 'data_files'
end
private
def fetch_url_headers(url)
Seek::DownloadHandling::HTTPHandler.new(url).info
end
def check_url_response_code(url)
Seek::DownloadHandling::HTTPHandler.new(url, fallback_to_get: false).info[:code]
end
end
Test fix - Params is not just a hash
require 'test_helper'
require 'seek/upload_handling/data_upload'
require 'seek/upload_handling/examine_url'
class UploadHandingTest < ActiveSupport::TestCase
include Seek::UploadHandling::DataUpload
include Seek::UploadHandling::ExamineUrl
test 'valid scheme?' do
assert_equal %w(file).sort, Seek::UploadHandling::ContentInspection::INVALID_SCHEMES.sort
assert valid_scheme?('http://bbc.co.uk')
assert valid_scheme?('https://bbc.co.uk')
assert valid_scheme?('ftp://bbc.co.uk')
assert valid_scheme?('ssh://bbc.co.uk')
refute valid_scheme?('file:///secret/documents.txt')
end
test 'content_blob_params' do
@params = ActionController::Parameters.new({ content_blobs: [{ fish: 1, soup: 2 }],
data_file: { title: 'george' } })
assert_equal 1, content_blob_params.length
assert_equal 1, content_blob_params.first[:fish]
assert_equal 2, content_blob_params.first[:soup]
end
test 'default to http if missing' do
params = { data_url: 'fish.com/path?query=yes' }
default_to_http_if_missing(params)
assert_equal('http://fish.com/path?query=yes', params[:data_url])
params[:data_url] = 'https://fish.com/path?query=yes'
default_to_http_if_missing(params)
assert_equal('https://fish.com/path?query=yes', params[:data_url])
params[:data_url] = nil
default_to_http_if_missing(params)
assert_nil(params[:data_url])
params[:data_url] = 'sdfhksdlfsdkfh'
default_to_http_if_missing(params)
assert_equal('sdfhksdlfsdkfh', params[:data_url])
end
test 'asset params' do
@params = ActionController::Parameters.new({ content_blob: { fish: 1, soup: 2 },
data_file: { title: 'george' },
sop: { title: 'mary' } })
assert_equal 'george', asset_params[:title]
assert_equal 1, asset_params.keys.length
@controller_name = 'sops'
assert_equal 'mary', asset_params[:title]
assert_equal 1, asset_params.keys.length
end
test 'check url response code' do
stub_request(:head, 'http://bbc.co.uk/').to_return(status: 200, body: '', headers: { content_type: 'text/html', content_length: '555' })
stub_request(:head, 'http://not-there.com').to_return(status: 404, body: '', headers: {})
stub_request(:head, 'http://server-error.com').to_return(status: 500, body: '', headers: {})
stub_request(:head, 'http://forbidden.com').to_return(status: 403, body: '', headers: {})
stub_request(:head, 'http://unauthorized.com').to_return(status: 401, body: '', headers: {})
stub_request(:head, 'http://methodnotallowed.com').to_return(status: 405, body: '', headers: {})
assert_equal 200, check_url_response_code('http://bbc.co.uk')
assert_equal 404, check_url_response_code('http://not-there.com')
assert_equal 500, check_url_response_code('http://server-error.com')
assert_equal 403, check_url_response_code('http://forbidden.com')
assert_equal 401, check_url_response_code('http://unauthorized.com')
assert_equal 405, check_url_response_code('http://methodnotallowed.com')
# redirection will be followed
stub_request(:head, 'http://moved.com').to_return(status: 301, body: '', headers: { location: 'http://bbc.co.uk' })
stub_request(:head, 'http://moved2.com').to_return(status: 302, body: '', headers: { location: 'http://forbidden.com' })
assert_equal 200, check_url_response_code('http://moved.com')
assert_equal 403, check_url_response_code('http://moved2.com')
end
test 'fetch url headers' do
stub_request(:head, 'http://bbc.co.uk/').to_return(status: 200,
body: '',
headers: { content_type: 'text/html', content_length: '555' })
headers = fetch_url_headers('http://bbc.co.uk')
assert_equal 'text/html', headers[:content_type]
assert_equal 555, headers[:file_size]
stub_request(:head, 'http://somewhere.org/excel.xls').to_return(status: 200,
body: '',
headers: { content_type: 'application/vnd.ms-excel', content_length: '1111' })
headers = fetch_url_headers('http://somewhere.org/excel.xls')
assert_equal 'application/vnd.ms-excel', headers[:content_type]
assert_equal 1111, headers[:file_size]
stub_request(:head, 'http://not-there.com').to_return(status: 404, body: '', headers: {})
assert_equal 404, fetch_url_headers('http://not-there.com')[:code]
# follows redirection
stub_request(:head, 'http://moved.com').to_return(status: 301, body: '', headers: { location: 'http://bbc.co.uk' })
headers = fetch_url_headers('http://moved.com')
assert_equal 'text/html', headers[:content_type]
assert_equal 555, headers[:file_size]
end
test 'content type from filename' do
assert_equal 'text/html', content_type_from_filename(nil)
# FIXME: , MERGENOTE - .xml gives an incorrect mime type of sbml+xml due to the ordering
checks = [
{ f: 'test.jpg', t: 'image/jpeg' },
{ f: 'test.JPG', t: 'image/jpeg' },
{ f: 'test.png', t: 'image/png' },
{ f: 'test.PNG', t: 'image/png' },
{ f: 'test.jpeg', t: 'image/jpeg' },
{ f: 'test.JPEG', t: 'image/jpeg' },
{ f: 'test.xls', t: 'application/excel' },
{ f: 'test.doc', t: 'application/msword' },
{ f: 'test.xlsx', t: 'application/vnd.openxmlformats-officedocument.spreadsheetml.sheet' },
{ f: 'test.docx', t: 'application/vnd.openxmlformats-officedocument.wordprocessingml.document' },
{ f: 'test.XLs', t: 'application/excel' },
{ f: 'test.Doc', t: 'application/msword' },
{ f: 'test.XLSX', t: 'application/vnd.openxmlformats-officedocument.spreadsheetml.sheet' },
{ f: 'test.dOCx', t: 'application/vnd.openxmlformats-officedocument.wordprocessingml.document' },
{ f: 'unknown.xxx', t: 'application/octet-stream' },
{ f: nil, t: 'text/html' }
]
checks.each do |check|
assert_equal check[:t], content_type_from_filename(check[:f]), "Expected #{check[:t]} for #{check[:f]}"
end
end
test 'content is webpage?' do
assert content_is_webpage?('text/html')
assert content_is_webpage?('text/html; charset=UTF-8')
refute content_is_webpage?('application/zip')
refute content_is_webpage?(nil)
end
test 'valid uri?' do
assert valid_uri?('http://fish.com')
assert valid_uri?('http://fish.com')
assert valid_uri?('http://fish.com ')
assert valid_uri?('http://fish.com/fish.txt')
assert valid_uri?('http://fish.com/fish.txt ')
refute valid_uri?('x dd s')
refute valid_uri?(nil)
end
test 'determine_filename_from_disposition' do
assert_equal '_form.html.erb', determine_filename_from_disposition('inline; filename="_form.html.erb"')
assert_equal '_form.html.erb', determine_filename_from_disposition('inline; filename=_form.html.erb')
assert_equal '_form.html.erb', determine_filename_from_disposition('attachment; filename="_form.html.erb"')
assert_nil determine_filename_from_disposition(nil)
assert_nil determine_filename_from_disposition('')
end
test 'determine filename from url' do
assert_equal 'fred.txt', determine_filename_from_url('http://place.com/fred.txt')
assert_equal 'fred.txt', determine_filename_from_url('http://place.com/fred.txt ')
assert_equal 'jenny.txt', determine_filename_from_url('http://place.com/here/he%20/jenny.txt')
assert_nil determine_filename_from_url('http://place.com')
assert_nil determine_filename_from_url('http://place.com/')
assert_nil determine_filename_from_url('')
assert_nil determine_filename_from_url('sdfsdf')
assert_nil determine_filename_from_url(nil)
end
test 'check for data or url' do
refute check_for_data_or_url(data: '', data_url: '')
assert check_for_data_or_url(data: 'hhhh')
assert check_for_data_or_url(data_url: 'hhhh')
refute check_for_data_or_url(data: [], data_url: [])
assert check_for_data_or_url(data: ['hhhh'])
assert check_for_data_or_url(data_url: ['hhhh'])
end
test 'retained content blob ids' do
@params = { retained_content_blob_ids: [1, 2] }
assert_equal [1, 2], retained_content_blob_ids
@params = {}
assert_equal [], retained_content_blob_ids
@params = { content_blobs: nil }
assert_equal [], retained_content_blob_ids
@params = { retained_content_blob_ids: [1, 2, 3] }
assert_equal [1, 2, 3], retained_content_blob_ids
end
test 'model image present?' do
file_with_content = ActionDispatch::Http::UploadedFile.new(
filename: 'file',
content_type: 'text/plain',
tempfile: StringIO.new('fish')
)
@params = { model_image: { image_file: file_with_content }, content_blob: {}, model: { title: 'fish' } }
assert model_image_present?
@params = { model_image: {}, content_blob: {}, model: { title: 'fish' } }
refute model_image_present?
@params = { content_blob: {}, model: { title: 'fish' } }
refute model_image_present?
end
test 'check for data if present' do
file_with_content = ActionDispatch::Http::UploadedFile.new(
filename: 'file',
content_type: 'text/plain',
tempfile: StringIO.new('fish')
)
empty_content = ActionDispatch::Http::UploadedFile.new(
filename: 'file',
content_type: 'text/plain',
tempfile: StringIO.new('')
)
assert check_for_empty_data_if_present(data: '', data_url: 'http://fish')
assert check_for_empty_data_if_present(data: file_with_content, data_url: '')
assert check_for_empty_data_if_present(data: file_with_content, data_url: [])
refute check_for_empty_data_if_present(data: empty_content, data_url: '')
refute check_for_empty_data_if_present(data: empty_content, data_url: [])
refute check_for_empty_data_if_present(data: empty_content)
assert check_for_empty_data_if_present(data: [], data_url: 'http://fish')
assert check_for_empty_data_if_present(data: [file_with_content], data_url: '')
assert check_for_empty_data_if_present(data: [file_with_content], data_url: [])
refute check_for_empty_data_if_present(data: [empty_content], data_url: '')
refute check_for_empty_data_if_present(data: [empty_content], data_url: [])
refute check_for_empty_data_if_present(data: [empty_content])
refute check_for_empty_data_if_present(data: [empty_content, file_with_content])
end
# allows some methods to be tested the rely on flash.now[:error]
def flash
ActionDispatch::Flash::FlashHash.new
end
# mock out the params method, set @params for the desired params for the test
attr_reader :params
# mocks out the controller name, defaults to data_files, but can be changed by setting @controller_name
def controller_name
@controller_name || 'data_files'
end
private
def fetch_url_headers(url)
Seek::DownloadHandling::HTTPHandler.new(url).info
end
def check_url_response_code(url)
Seek::DownloadHandling::HTTPHandler.new(url, fallback_to_get: false).info[:code]
end
end
|
require File.dirname(__FILE__) + '/../test_helper'
class ValidationInfoTest < ActiveSupport::TestCase
should 'validate the presence of validation methodology description' do
info = ValidationInfo.new
info.valid?
assert info.errors.invalid?(:validation_methodology)
info.validation_methodology = 'lalala'
info.valid?
assert !info.errors.invalid?(:validation_methodology)
end
should 'refer to and validate the presence of an organization' do
info = ValidationInfo.new
assert_raise ActiveRecord::AssociationTypeMismatch do
info.organization = 1
end
assert_nothing_raised do
info.organization = Organization.new
end
end
should 'escape malformed html tags' do
info = ValidationInfo.new
info.validation_methodology = "<h1 Malformed >> html >< tag"
info.restrictions = "<h1 Malformed >> html >< tag"
info.valid?
assert_no_match /[<>]/, info.validation_methodology
assert_no_match /[<>]/, info.restrictions
end
end
rails3: Fix ValidationInfoTest unit test. Change deprecated call for method invalid?(attr) in ActiveModel::Errors class by [:attr].any?
require File.dirname(__FILE__) + '/../test_helper'
class ValidationInfoTest < ActiveSupport::TestCase
should 'validate the presence of validation methodology description' do
info = ValidationInfo.new
info.valid?
assert info.errors[:validation_methodology].any?
info.validation_methodology = 'lalala'
info.valid?
assert !info.errors[:validation_methodology].any?
end
should 'refer to and validate the presence of an organization' do
info = ValidationInfo.new
assert_raise ActiveRecord::AssociationTypeMismatch do
info.organization = 1
end
assert_nothing_raised do
info.organization = Organization.new
end
end
should 'escape malformed html tags' do
info = ValidationInfo.new
info.validation_methodology = "<h1 Malformed >> html >< tag"
info.restrictions = "<h1 Malformed >> html >< tag"
info.valid?
assert_no_match /[<>]/, info.validation_methodology
assert_no_match /[<>]/, info.restrictions
end
end
|
require 'asciidoctor-diagram/util/diagram'
require 'net/https'
module Asciidoctor
module Diagram
module CacooGenerator
def self.cacoo(c)
apiKey = ENV['CACOO_API_KEY']
diagramId = c.strip
# NOTE: See API document at https://cacoo.com/lang/en/api and
# https://cacoo.com/lang/en/api_image
url = "/api/v1/diagrams/#{diagramId}.png?apiKey=#{apiKey}"
https = Net::HTTP.new('cacoo.com', 443)
https.use_ssl = true
https.start {
response = https.get(url)
raise "Cacoo response status code was #{response.code}" if response.code != '200'
response.body
}
end
end
define_processors('Cacoo') do
register_format(:png, :image) do |c|
CacooGenerator.cacoo(c)
end
end
end
end
Replace Cacoo implementation with https://gist.github.com/pepijnve/2d4059db5d827462b5cd
Thanks to Pepijn Van Eeckhoudt!
require 'json'
require 'time'
require_relative '../api/diagram'
module Asciidoctor
module Diagram
# @private
module Cacoo
def self.get_diagram_metadata(diagram_id, api_key)
# NOTE: See API document at https://cacoo.com/lang/en/api and
# https://cacoo.com/lang/en/api_image
unless @diagrams
diagrams = JSON.parse(get("/api/v1/diagrams.json?apiKey=#{api_key}"))
@diagrams = diagrams['result'].each_with_object({}) { |d, h| h[d['diagramId']] = d }
end
@diagrams[diagram_id]
end
def self.get_diagram(diagram_id, api_key)
# NOTE: See API document at https://cacoo.com/lang/en/api and
# https://cacoo.com/lang/en/api_image
get("/api/v1/diagrams/#{diagram_id}.png?apiKey=#{api_key}")
end
def self.get(url)
https = Net::HTTP.new('cacoo.com', 443)
https.use_ssl = true
https.start do
response = https.get(url)
raise "Cacoo response status code was #{response.code}" if response.code != '200'
response.body
end
end
class Source < API::DiagramSource
attr_accessor :diagram_id
attr_accessor :api_key
def initialize(diagram_id, api_key)
@diagram_id = diagram_id
@api_key = api_key
end
def image_name
"cacoo-#{diagram_id}"
end
def code
diagram_id
end
def should_process?(image_file, image_metadata)
Time.rfc2822(cacoo_metadata['updated']) < Time.rfc2822(image_metadata['updated'])
end
def create_image_metadata
{ 'updated' => cacoo_metadata['updated'] }
end
def cacoo_metadata
Cacoo.get_diagram_metadata(diagram_id, api_key)
end
end
end
class CacooBlockMacroProcessor < API::DiagramBlockMacroProcessor
register_format(:png, :image) do |parent, source|
Cacoo.cacoo(source.code, source.api_key)
end
def create_source(parent, target, attributes)
api_key = ENV['CACOO_API_KEY'] || parent.document.attributes('cacoo_api_key')
raise "Please specify your Cacoo API key using the CACOO_API_KEY environment variable or cacoo_api_key document attribute" unless api_key
Cacoo::Source.new(target.strip, api_key)
end
end
end
end |
require 'digest/sha1'
module Awestruct
module Extensions
class IntenseDebate
def execute(site)
site.pages.each{|p| p.extend IntenseDebatable }
end
module IntenseDebatable
def intense_debate_comments()
post_id = self.post_id ? self.post_id : Digest::SHA1.hexdigest( self.url )
html = %Q(<script>\n)
html += %Q( var idcomments_acct='#{site.intense_debate_acct}';\n)
html += %Q( var idcomments_post_id='#{post_id}';\n )
html += %Q( var idcomments_post_url='#{site.intense_debate_base_url || site.base_url}#{self.url}';\n)
html += %Q(</script>\n)
html += %Q(<span id="IDCommentsPostTitle" style="display:none"></span>\n)
html += %Q(<script type='text/javascript' src='http://www.intensedebate.com/js/genericCommentWrapperV2.js'></script>\n)
html
end
def intense_debate_comments_link()
post_id = self.post_id ? self.post_id : Digest::SHA1.hexdigest( self.url )
html = %Q(<script>\n)
html += %Q( var idcomments_acct='#{site.intense_debate_acct}';\n)
html += %Q( var idcomments_post_id='#{post_id}';\n )
html += %Q( var idcomments_post_url='#{site.intense_debate_base_url || site.base_url}#{self.url}';\n)
html += %Q(</script>\n)
html += %Q(<script type='text/javascript' src='http://www.intensedebate.com/js/genericLinkWrapperV2.js'></script>\n)
html
end
end
end
end
end
Relative link is sufficient for comment links.
require 'digest/sha1'
module Awestruct
module Extensions
class IntenseDebate
def execute(site)
site.pages.each{|p| p.extend IntenseDebatable }
end
module IntenseDebatable
def intense_debate_comments()
post_id = self.post_id ? self.post_id : Digest::SHA1.hexdigest( self.url )
html = %Q(<script>\n)
html += %Q( var idcomments_acct='#{site.intense_debate_acct}';\n)
html += %Q( var idcomments_post_id='#{post_id}';\n )
html += %Q( var idcomments_post_url='#{site.intense_debate_base_url || site.base_url}#{self.url}';\n)
html += %Q(</script>\n)
html += %Q(<span id="IDCommentsPostTitle" style="display:none"></span>\n)
html += %Q(<script type='text/javascript' src='http://www.intensedebate.com/js/genericCommentWrapperV2.js'></script>\n)
html
end
def intense_debate_comments_link()
post_id = self.post_id ? self.post_id : Digest::SHA1.hexdigest( self.url )
html = %Q(<script>\n)
html += %Q( var idcomments_acct='#{site.intense_debate_acct}';\n)
html += %Q( var idcomments_post_id='#{post_id}';\n )
html += %Q( var idcomments_post_url='#{self.url}';\n)
html += %Q(</script>\n)
html += %Q(<script type='text/javascript' src='http://www.intensedebate.com/js/genericLinkWrapperV2.js'></script>\n)
html
end
end
end
end
end
|
module CancanStrongParameters
module Controller
HASH_DEFAULTS = ['id', '_destroy', '_delete']
module ClassMethods
# Use this with CanCan's load_resource to permit a set of params before
# it tries to build or update a resource with them.
#
# Usage:
# class BooksController < ApplicationController
# load_resource :book
# permit_params book: [:title, :isbn]
# end
#
# Or:
# class BooksController < ApplicationController
# load_resource
# permit_params :title, :isbn
# end
#
# the second form should work in the simple case where you don't have to
# supply a resource name for #load_resource
#
def permit_params *keys
filter_strong_params :permit, [:create, :update], keys
end
# Like permit_params, but only applies to create action
#
def permit_params_on_create *keys
filter_strong_params :permit, :create, keys
end
# Like permit_params, but only applies to update action
#
def permit_params_on_update *keys
filter_strong_params :permit, :update, keys
end
# Like permit_params, but marks the params required
#
def require_params *keys
filter_strong_params :require, [:create, :update], keys
end
# Like require_params, but only applies to create action
#
def require_params_on_create *keys
filter_strong_params :require, :create, keys
end
# Like require_params, but only applies to update action
#
def require_params_on_update *keys
filter_strong_params :require, :update, keys
end
# Does a permit! at every level of the params to let everything through
#
def permit_all_params options = {}
prepend_before_filter options.reverse_merge(:only => [:create, :update]) do
self.params.deep_permit!
end
end
def filter_strong_params method, actions, keys # :nodoc:
hash = keys.extract_options!
keys.flatten!
# Handle attributes if permitted attributes are given for nested models
if (hash.present? && keys.present?) || (hash.select{|k,v| v.is_a?(Array)} == hash)
defaults = CancanStrongParameters::Controller::HASH_DEFAULTS
# @todo We have to stringify everything for 1.8.7 due to a bug in `strong_parameters`.
# More at https://github.com/rails/strong_parameters/pull/51
hash = hash.attributized.stringified
prepend_before_filter :only => actions do
resource_name = self.class.resource_name
# @todo We have to stringify everything for 1.8.7 due to a bug in `strong_parameters`.
# More at https://github.com/rails/strong_parameters/pull/51
parameters = keys.flatten.map! {|k| k.to_s } + defaults
parameters << ActionController::Parameters.new(hash)
# original: parameters = keys.flatten + defaults
# parameters << hash
return warn("Not updating - no parameters key present for #{resource_name}") unless params[resource_name]
self.params[resource_name] = params[resource_name].standardized.send method, *parameters
end
elsif hash.present?
prepend_before_filter :only => actions do
self.params.merge! params.send(method, hash)
end
else
prepend_before_filter :only => actions do
resource_name = self.class.resource_name
if params.has_key?(resource_name)
self.params[resource_name] = params[resource_name].send method, *keys.stringified
else
self.params = params.send method, *keys.stringified
end
end
end
end
def resource_name(name_to_set=nil)
unless name_to_set.present?
@resource_name ||= self.to_s.sub("Controller", "").underscore.split('/').last.singularize
else
@resource_name = name_to_set
end
end
end
def self.included(base)
base.extend(ClassMethods)
end
# Errors
def warn msg
return unless Rails and Rails.logger
Rails.logger.warn(msg)
end
end
end
class Hash
# Converts keys with hash values -- e.g. posts: {} -- to posts_attributes for nested forms.
#
# Also, Allows rails specific values like _destroy or _delete.
#
# NOTE: You must enable `allow_destroy: true` in your call to `accepts_nested_attributes_for` anyway,
# so this is secure to whitelist here.
def attributized
defaults = CancanStrongParameters::Controller::HASH_DEFAULTS
Hash.new.tap do |h|
self.each do |k,v|
h[:"#{k}_attributes"] = self[k].attributized + defaults
end
end
end
# Converts keyed nested_forms (like task_attributes: {"0" => {}}) to normal params arrays.
def to_parameter_array
return self if self.empty?
return self unless (k = self.keys.first).is_a?(String) and k[0..3] == "new_" or k.is_i? or k.is_hex?
Array.new.tap do |a|
self.each do |k,v|
a << v.standardized
end
end
end
end
class Array
# Attributizes each element in an array
def attributized
Array.new.tap do |a|
self.each do |v|
v = v.attributized if v.is_a?(Hash)
a << v
end
end
end
end
class ActiveSupport::HashWithIndifferentAccess
# Takes params that are passed in for nested_forms (like the example below) and cleans them up.
#
# post: {
# comments_attributes: {
# "0" => {},
# "1" => {},
# "new_23023032" => {}
# }
# }
#
def standardized
ActionController::Parameters.new.tap do |h|
self.each do |k,v|
h[k] = v.is_a?(Hash) ? v.to_parameter_array : v
end
end
end
end
class String
def is_i?
!!(self =~ /^[-+]?[0-9]+$/)
end
def is_hex?
!!(self =~ /^[0-9a-f]+$/)
end
end
# @todo Can be remove when new version of `strong_parameters` (>=0.1.5) is released.
class Hash
def stringified
Hash.new.tap do |h|
each do |key, value|
value = case value
when Symbol
value.to_s
when Hash
value.indifferent
when Array
value.stringified
end
h[key.to_s] = value
end
end
end
end
class Array
def stringified
Array.new.tap do |a|
each do |value|
value = if value.is_a? Hash
value.stringified.indifferent
else
value.to_s
end
a << value
end
end
end
end
Patch - permit_params(!) now works on all controller actions.
module CancanStrongParameters
module Controller
HASH_DEFAULTS = ['id', '_destroy', '_delete']
module ClassMethods
# Use this with CanCan's load_resource to permit a set of params before
# it tries to build or update a resource with them.
#
# Usage:
# class BooksController < ApplicationController
# load_resource :book
# permit_params book: [:title, :isbn]
# end
#
# Or:
# class BooksController < ApplicationController
# load_resource
# permit_params :title, :isbn
# end
#
# the second form should work in the simple case where you don't have to
# supply a resource name for #load_resource
#
def permit_params *keys
filter_strong_params :permit, [:all], keys
end
# Like permit_params, but only applies to create action
#
def permit_params_on_create *keys
filter_strong_params :permit, [:create], keys
end
# Like permit_params, but only applies to update action
#
def permit_params_on_update *keys
filter_strong_params :permit, [:update], keys
end
# Like permit_params, but marks the params required
#
def require_params *keys
filter_strong_params :require, [:create, :update], keys
end
# Like require_params, but only applies to create action
#
def require_params_on_create *keys
filter_strong_params :require, [:create], keys
end
# Like require_params, but only applies to update action
#
def require_params_on_update *keys
filter_strong_params :require, [:update], keys
end
# Does a permit! at every level of the params to let everything through
#
def permit_all_params options = {}
prepend_before_filter do
self.params.deep_permit!
end
end
def filter_strong_params method, actions, keys # :nodoc:
# Get hash from end of array
hash = keys.extract_options!
keys.flatten!
# Filter_options is passed to our before filter, e.g. sets when they run
filter_options = actions == [:all] ? {} : { :only => actions }
# Handle attributes if permitted attributes are given for nested models
if (hash.present? && keys.present?) || (hash.select{|k,v| v.is_a?(Array)} == hash)
defaults = CancanStrongParameters::Controller::HASH_DEFAULTS
# @todo We have to stringify everything for 1.8.7 due to a bug in `strong_parameters`.
# More at https://github.com/rails/strong_parameters/pull/51
hash = hash.attributized.stringified
prepend_before_filter(filter_options) do
resource_name = self.class.resource_name
# @todo We have to stringify everything for 1.8.7 due to a bug in `strong_parameters`.
# More at https://github.com/rails/strong_parameters/pull/51
parameters = keys.flatten.map! {|k| k.to_s } + defaults
parameters << ActionController::Parameters.new(hash)
# original: parameters = keys.flatten + defaults
# parameters << hash
return warn("Not updating - no parameters key present for #{resource_name}") unless params[resource_name]
self.params[resource_name] = params[resource_name].standardized.send method, *parameters
end
elsif hash.present?
prepend_before_filter(filter_options) do
self.params.merge! params.send(method, hash)
end
else
prepend_before_filter(filter_options) do
resource_name = self.class.resource_name
if params.has_key?(resource_name)
self.params[resource_name] = params[resource_name].send method, *keys.stringified
else
self.params = params.send method, *keys.stringified
end
end
end
end
def resource_name(name_to_set=nil)
unless name_to_set.present?
@resource_name ||= self.to_s.sub("Controller", "").underscore.split('/').last.singularize
else
@resource_name = name_to_set
end
end
end
def self.included(base)
base.extend(ClassMethods)
end
# Errors
def warn msg
return unless Rails and Rails.logger
Rails.logger.warn(msg)
end
end
end
class Hash
# Converts keys with hash values -- e.g. posts: {} -- to posts_attributes for nested forms.
#
# Also, Allows rails specific values like _destroy or _delete.
#
# NOTE: You must enable `allow_destroy: true` in your call to `accepts_nested_attributes_for` anyway,
# so this is secure to whitelist here.
def attributized
defaults = CancanStrongParameters::Controller::HASH_DEFAULTS
Hash.new.tap do |h|
self.each do |k,v|
h[:"#{k}_attributes"] = self[k].attributized + defaults
end
end
end
# Converts keyed nested_forms (like task_attributes: {"0" => {}}) to normal params arrays.
def to_parameter_array
return self if self.empty?
return self unless (k = self.keys.first).is_a?(String) and k[0..3] == "new_" or k.is_i? or k.is_hex?
Array.new.tap do |a|
self.each do |k,v|
a << v.standardized
end
end
end
end
class Array
# Attributizes each element in an array
def attributized
Array.new.tap do |a|
self.each do |v|
v = v.attributized if v.is_a?(Hash)
a << v
end
end
end
end
class ActiveSupport::HashWithIndifferentAccess
# Takes params that are passed in for nested_forms (like the example below) and cleans them up.
#
# post: {
# comments_attributes: {
# "0" => {},
# "1" => {},
# "new_23023032" => {}
# }
# }
#
def standardized
ActionController::Parameters.new.tap do |h|
self.each do |k,v|
h[k] = v.is_a?(Hash) ? v.to_parameter_array : v
end
end
end
end
class String
def is_i?
!!(self =~ /^[-+]?[0-9]+$/)
end
def is_hex?
!!(self =~ /^[0-9a-f]+$/)
end
end
# @todo Can be remove when new version of `strong_parameters` (>=0.1.5) is released.
class Hash
def stringified
Hash.new.tap do |h|
each do |key, value|
value = case value
when Symbol
value.to_s
when Hash
value.indifferent
when Array
value.stringified
end
h[key.to_s] = value
end
end
end
end
class Array
def stringified
Array.new.tap do |a|
each do |value|
value = if value.is_a? Hash
value.stringified.indifferent
else
value.to_s
end
a << value
end
end
end
end |
# Author:: Jeff Moody (<jmoody@datapipe.com>), Takashi Kanai (<anikundesu@gmail.com>)
# Copyright:: Copyright (c) 2012 Datapipe, Copyright (c) 2012 IDC Frontier Inc.
# License:: Apache License, Version 2.0
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
require 'chef/knife/bootstrap'
Chef::Knife::Bootstrap.load_deps
require 'socket'
require 'net/ssh/multi'
require 'chef/json_compat'
require 'chef/knife/cloudstack_base'
class Chef
class Knife
class CloudstackServerCreate < Knife
include Knife::CloudstackBase
banner "knife cloudstack server create -s SERVICEID -t TEMPLATEID -z ZONEID (options)"
option :cloudstack_serviceid,
:short => "-s SERVICEID",
:long => "--serviceid SERVICEID",
:description => "The CloudStack service offering ID."
option :cloudstack_templateid,
:short => "-t TEMPLATEID",
:long => "--templateid TEMPLATEID",
:description => "The CloudStack template ID for the server."
option :cloudstack_zoneid,
:short => "-z ZONEID",
:long => "--zoneid ZONE",
:description => "The CloudStack zone ID for the server."
option :cloudstack_networkids,
:short => "-w NETWORKIDS",
:long => "--networkids NETWORKIDS",
:description => "Comma separated list of CloudStack network IDs.",
:proc => lambda { |n| n.split(/[\s,]+/) },
:default => []
option :cloudstack_groupids,
:short => "-g SECURITYGROUPIDS",
:long => "--groupids SECURITYGROUPIDS",
:description => "Comma separated list of CloudStack Security Group IDs.",
:proc => lambda { |n| n.split(/[\s,]+/) },
:default => []
option :cloudstack_groupnames,
:short => "-G SECURITYGROUPNAMES",
:long => "--groupnames SECURITYGROUPNAMES",
:description => "Comma separated list of CloudStack Security Group names. Each group name must be encapuslated in quotes if it contains whitespace.",
:proc => lambda { |n| n.split(/[\s,]+/) },
:default => []
option :distro,
:short => "-d DISTRO",
:long => "--distro DISTRO",
:description => "Bootstrap a distro using a template; default is 'ubuntu10.04-gems'",
:proc => Proc.new { |d| Chef::Config[:knife][:distro] = d },
:default => "ubuntu10.04-gems"
option :template_file,
:long => "--template-file TEMPLATE",
:description => "Full path to location of template to use",
:proc => Proc.new { |t| Chef::Config[:knife][:template_file] = t },
:default => false
option :run_list,
:short => "-r RUN_LIST",
:long => "--run-list RUN_LIST",
:description => "Comma separated list of roles/recipes to apply",
:proc => lambda { |o| o.split(/[\s,]+/) },
:default => []
option :ssh_user,
:short => "-x USERNAME",
:long => "--ssh-user USERNAME",
:description => "The ssh username",
:default => 'root'
option :ssh_password,
:short => "-P PASSWORD",
:long => "--ssh-password PASSWORD",
:description => "The ssh password"
option :identity_file,
:short => "-i PRIVATE_KEY_FILE",
:long => "--identity-file PRIVATE_KEY_FILE",
:description => "The Private key file for authenticating SSH session. --keypair option is also needed."
option :server_name,
:short => "-N NAME",
:long => "--server-name NAME",
:description => "The server name"
option :host_name,
:short => "-H NAME",
:long => "--hostname NAME",
:description => "The hostname"
option :keypair,
:short => "-k KEYPAIR",
:long => "--keypair KEYPAIR",
:description => "The CloudStack Key Pair to use for SSH key authentication."
option :diskoffering,
:short => "-D DISKOFFERINGID",
:long => "--diskoffering DISKOFFERINGID",
:description => "Specifies either the Disk Offering ID for the ROOT disk for an ISO template, or a DATA disk."
def bootstrap_for_node(host, user, password)
Chef::Log.debug("Bootstrap host: #{host}")
Chef::Log.debug("Bootstrap user: #{user}")
Chef::Log.debug("Bootstrap pass: #{password}")
bootstrap = Chef::Knife::Bootstrap.new
bootstrap.name_args = host
bootstrap.config[:run_list] = config[:run_list]
bootstrap.config[:ssh_user] = user
bootstrap.config[:ssh_password] = password
bootstrap.config[:identity_file] = locate_config_value(:identity_file)
bootstrap.config[:chef_node_name] = config[:server_name] if config[:server_name]
bootstrap.config[:prerelease] = config[:prerelease]
bootstrap.config[:bootstrap_version] = locate_config_value(:bootstrap_version)
bootstrap.config[:distro] = locate_config_value(:distro)
bootstrap.config[:use_sudo] = true
bootstrap.config[:template_file] = locate_config_value(:template_file)
bootstrap.config[:environment] = config[:environment]
# may be needed for vpc_mode
bootstrap.config[:no_host_key_verify] = config[:no_host_key_verify]
bootstrap
end
def tcp_test_ssh(hostname)
tcp_socket = TCPSocket.new(hostname, 22)
readable = IO.select([tcp_socket], nil, nil, 5)
if readable
Chef::Log.debug("\nsshd accepting connections on #{hostname}, banner is #{tcp_socket.gets}\n")
yield
true
else
false
end
rescue Errno::ETIMEDOUT
false
rescue Errno::EPERM
false
rescue Errno::ECONNREFUSED
sleep 2
false
rescue Errno::EHOSTUNREACH
sleep 2
false
rescue Errno::ENETUNREACH
sleep 30
false
ensure
tcp_socket && tcp_socket.close
end
def run
$stdout.sync = true
options = {}
options['zoneid'] = locate_config_value(:cloudstack_zoneid)
options['templateid'] = locate_config_value(:cloudstack_templateid)
if locate_config_value(:cloudstack_serviceid) != nil
options['serviceofferingid'] = locate_config_value(:cloudstack_serviceid)
end
if locate_config_value(:server_name) != nil
options['displayname'] = locate_config_value(:server_name)
end
if locate_config_value(:host_name) != nil
options['name'] = locate_config_value(:host_name)
end
network_ids = []
if locate_config_value(:cloudstack_networkids) != []
cs_networkids = locate_config_value(:cloudstack_networkids)
cs_networkids.each do |id|
network_ids.push(id)
end
options['networkids'] = network_ids
end
security_groups = []
if locate_config_value(:cloudstack_groupids) != []
cs_groupids = locate_config_value(:cloudstack_groupids)
cs_groupids.each do |id|
security_groups.push(id)
end
options['securitygroupids'] = security_groups
elsif locate_config_value(:cloudstack_groupnames) != []
cs_groupnames = locate_config_value(:cloudstack_groupnames)
cs_groupnames.each do |name|
security_groups.push(name)
end
options['securitygroupnames'] = security_groups
end
if locate_config_value(:keypair) != nil
options['keypair'] = locate_config_value(:keypair)
end
if locate_config_value(:diskoffering) != nil
options['diskofferingid'] = locate_config_value(:diskoffering)
end
Chef::Log.debug("Options: #{options} \n")
server = connection.deploy_virtual_machine(options)
jobid = server['deployvirtualmachineresponse'].fetch('jobid')
server_start = connection.query_async_job_result('jobid'=>jobid)
Chef::Log.debug("Job ID: #{jobid} \n")
print "#{ui.color("Waiting for server", :magenta)}"
while server_start['queryasyncjobresultresponse'].fetch('jobstatus') == 0
print "#{ui.color(".", :magenta)}"
sleep(15)
server_start = connection.query_async_job_result('jobid'=>jobid)
Chef::Log.debug("Server_Start: #{server_start} \n")
end
puts "\n\n"
if server_start['queryasyncjobresultresponse'].fetch('jobstatus') == 2
errortext = server_start['queryasyncjobresultresponse'].fetch('jobresult').fetch('errortext')
puts "#{ui.color("ERROR! Job failed with #{errortext}", :red)}"
end
if server_start['queryasyncjobresultresponse'].fetch('jobstatus') == 1
Chef::Log.debug("Job ID: #{jobid} \n")
Chef::Log.debug("Options: #{options} \n")
server_start = connection.query_async_job_result('jobid'=>jobid)
Chef::Log.debug("Server_Start: #{server_start} \n")
server_info = server_start['queryasyncjobresultresponse']['jobresult']['virtualmachine']
server_name = server_info['displayname']
server_id = server_info['hostname']
server_serviceoffering = server_info['serviceofferingname']
server_template = server_info['templatename']
if server_info['password'] != nil
ssh_password = server_info['password']
else
ssh_password = locate_config_value(:ssh_password)
end
ssh_user = locate_config_value(:ssh_user)
public_ip = nil
if server_info['nic'].size > 0
public_ip = server_info['nic'].first['ipaddress']
end
puts "\n\n"
puts "#{ui.color("Name", :cyan)}: #{server_name}"
puts "#{ui.color("Public IP", :cyan)}: #{public_ip}"
puts "#{ui.color("Username", :cyan)}: #{ssh_user}"
puts "#{ui.color("Password", :cyan)}: #{ssh_password}"
print "\n#{ui.color("Waiting for sshd", :magenta)}"
print("#{ui.color(".", :magenta)}") until tcp_test_ssh(public_ip) { sleep @initial_sleep_delay ||= 10; puts("done") }
bootstrap_for_node(public_ip, ssh_user, ssh_password).run
puts "\n"
puts "#{ui.color("Instance Name", :green)}: #{server_name}"
puts "#{ui.color("Instance ID", :green)}: #{server_id}"
puts "#{ui.color("Service Offering", :green)}: #{server_serviceoffering}"
puts "#{ui.color("Template", :green)}: #{server_template}"
puts "#{ui.color("Public IP Address", :green)}: #{public_ip}"
puts "#{ui.color("User", :green)}: #{ssh_user}"
puts "#{ui.color("Password", :green)}: #{ssh_password}"
puts "#{ui.color("Environment", :green)}: #{config[:environment] || '_default'}"
puts "#{ui.color("Run List", :green)}: #{config[:run_list].join(', ')}"
end
end
end
end
end
Fixed server name display bug
Also added time delay to allow cloud-set-guest-* scripts to complete
before trying to connect as I was having intermittent issues where
knife would connect to SSH prior to this completing and would fail to
bootstrap a node.
# Author:: Jeff Moody (<jmoody@datapipe.com>), Takashi Kanai (<anikundesu@gmail.com>)
# Copyright:: Copyright (c) 2012 Datapipe, Copyright (c) 2012 IDC Frontier Inc.
# License:: Apache License, Version 2.0
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
require 'chef/knife/bootstrap'
Chef::Knife::Bootstrap.load_deps
require 'socket'
require 'net/ssh/multi'
require 'chef/json_compat'
require 'chef/knife/cloudstack_base'
class Chef
class Knife
class CloudstackServerCreate < Knife
include Knife::CloudstackBase
banner "knife cloudstack server create -s SERVICEID -t TEMPLATEID -z ZONEID (options)"
option :cloudstack_serviceid,
:short => "-s SERVICEID",
:long => "--serviceid SERVICEID",
:description => "The CloudStack service offering ID."
option :cloudstack_templateid,
:short => "-t TEMPLATEID",
:long => "--templateid TEMPLATEID",
:description => "The CloudStack template ID for the server."
option :cloudstack_zoneid,
:short => "-z ZONEID",
:long => "--zoneid ZONE",
:description => "The CloudStack zone ID for the server."
option :cloudstack_networkids,
:short => "-w NETWORKIDS",
:long => "--networkids NETWORKIDS",
:description => "Comma separated list of CloudStack network IDs.",
:proc => lambda { |n| n.split(/[\s,]+/) },
:default => []
option :cloudstack_groupids,
:short => "-g SECURITYGROUPIDS",
:long => "--groupids SECURITYGROUPIDS",
:description => "Comma separated list of CloudStack Security Group IDs.",
:proc => lambda { |n| n.split(/[\s,]+/) },
:default => []
option :cloudstack_groupnames,
:short => "-G SECURITYGROUPNAMES",
:long => "--groupnames SECURITYGROUPNAMES",
:description => "Comma separated list of CloudStack Security Group names. Each group name must be encapuslated in quotes if it contains whitespace.",
:proc => lambda { |n| n.split(/[\s,]+/) },
:default => []
option :distro,
:short => "-d DISTRO",
:long => "--distro DISTRO",
:description => "Bootstrap a distro using a template; default is 'ubuntu10.04-gems'",
:proc => Proc.new { |d| Chef::Config[:knife][:distro] = d },
:default => "ubuntu10.04-gems"
option :template_file,
:long => "--template-file TEMPLATE",
:description => "Full path to location of template to use",
:proc => Proc.new { |t| Chef::Config[:knife][:template_file] = t },
:default => false
option :run_list,
:short => "-r RUN_LIST",
:long => "--run-list RUN_LIST",
:description => "Comma separated list of roles/recipes to apply",
:proc => lambda { |o| o.split(/[\s,]+/) },
:default => []
option :ssh_user,
:short => "-x USERNAME",
:long => "--ssh-user USERNAME",
:description => "The ssh username",
:default => 'root'
option :ssh_password,
:short => "-P PASSWORD",
:long => "--ssh-password PASSWORD",
:description => "The ssh password"
option :identity_file,
:short => "-i PRIVATE_KEY_FILE",
:long => "--identity-file PRIVATE_KEY_FILE",
:description => "The Private key file for authenticating SSH session. --keypair option is also needed."
option :server_name,
:short => "-N NAME",
:long => "--server-name NAME",
:description => "The server name"
option :host_name,
:short => "-H NAME",
:long => "--hostname NAME",
:description => "The hostname"
option :keypair,
:short => "-k KEYPAIR",
:long => "--keypair KEYPAIR",
:description => "The CloudStack Key Pair to use for SSH key authentication."
option :diskoffering,
:short => "-D DISKOFFERINGID",
:long => "--diskoffering DISKOFFERINGID",
:description => "Specifies either the Disk Offering ID for the ROOT disk for an ISO template, or a DATA disk."
def bootstrap_for_node(host, user, password)
Chef::Log.debug("Bootstrap host: #{host}")
Chef::Log.debug("Bootstrap user: #{user}")
Chef::Log.debug("Bootstrap pass: #{password}")
bootstrap = Chef::Knife::Bootstrap.new
bootstrap.name_args = host
bootstrap.config[:run_list] = config[:run_list]
bootstrap.config[:ssh_user] = user
bootstrap.config[:ssh_password] = password
bootstrap.config[:identity_file] = locate_config_value(:identity_file)
bootstrap.config[:chef_node_name] = config[:server_name] if config[:server_name]
bootstrap.config[:prerelease] = config[:prerelease]
bootstrap.config[:bootstrap_version] = locate_config_value(:bootstrap_version)
bootstrap.config[:distro] = locate_config_value(:distro)
bootstrap.config[:use_sudo] = true
bootstrap.config[:template_file] = locate_config_value(:template_file)
bootstrap.config[:environment] = config[:environment]
# may be needed for vpc_mode
bootstrap.config[:no_host_key_verify] = config[:no_host_key_verify]
bootstrap
end
def tcp_test_ssh(hostname)
print("#{ui.color(".", :magenta)}")
tcp_socket = TCPSocket.new(hostname, 22)
readable = IO.select([tcp_socket], nil, nil, 5)
if readable
Chef::Log.debug("\nsshd accepting connections on #{hostname}, banner is #{tcp_socket.gets}\n")
yield
true
else
false
end
rescue Errno::ETIMEDOUT
false
rescue Errno::EPERM
false
rescue Errno::ECONNREFUSED
sleep 2
false
rescue Errno::EHOSTUNREACH
sleep 2
false
rescue Errno::ENETUNREACH
sleep 30
false
ensure
tcp_socket && tcp_socket.close
end
def run
$stdout.sync = true
options = {}
options['zoneid'] = locate_config_value(:cloudstack_zoneid)
options['templateid'] = locate_config_value(:cloudstack_templateid)
if locate_config_value(:cloudstack_serviceid) != nil
options['serviceofferingid'] = locate_config_value(:cloudstack_serviceid)
end
if locate_config_value(:server_name) != nil
options['displayname'] = locate_config_value(:server_name)
end
if locate_config_value(:host_name) != nil
options['name'] = locate_config_value(:host_name)
end
network_ids = []
if locate_config_value(:cloudstack_networkids) != []
cs_networkids = locate_config_value(:cloudstack_networkids)
cs_networkids.each do |id|
network_ids.push(id)
end
options['networkids'] = network_ids
end
security_groups = []
if locate_config_value(:cloudstack_groupids) != []
cs_groupids = locate_config_value(:cloudstack_groupids)
cs_groupids.each do |id|
security_groups.push(id)
end
options['securitygroupids'] = security_groups
elsif locate_config_value(:cloudstack_groupnames) != []
cs_groupnames = locate_config_value(:cloudstack_groupnames)
cs_groupnames.each do |name|
security_groups.push(name)
end
options['securitygroupnames'] = security_groups
end
if locate_config_value(:keypair) != nil
options['keypair'] = locate_config_value(:keypair)
end
if locate_config_value(:diskoffering) != nil
options['diskofferingid'] = locate_config_value(:diskoffering)
end
Chef::Log.debug("Options: #{options} \n")
server = connection.deploy_virtual_machine(options)
jobid = server['deployvirtualmachineresponse'].fetch('jobid')
server_start = connection.query_async_job_result('jobid'=>jobid)
Chef::Log.debug("Job ID: #{jobid} \n")
print "#{ui.color("Waiting for server", :magenta)}"
while server_start['queryasyncjobresultresponse'].fetch('jobstatus') == 0
print "#{ui.color(".", :magenta)}"
sleep(15)
server_start = connection.query_async_job_result('jobid'=>jobid)
Chef::Log.debug("Server_Start: #{server_start} \n")
end
puts "\n\n"
if server_start['queryasyncjobresultresponse'].fetch('jobstatus') == 2
errortext = server_start['queryasyncjobresultresponse'].fetch('jobresult').fetch('errortext')
puts "#{ui.color("ERROR! Job failed with #{errortext}", :red)}"
end
if server_start['queryasyncjobresultresponse'].fetch('jobstatus') == 1
Chef::Log.debug("Job ID: #{jobid} \n")
Chef::Log.debug("Options: #{options} \n")
server_start = connection.query_async_job_result('jobid'=>jobid)
Chef::Log.debug("Server_Start: #{server_start} \n")
server_info = server_start['queryasyncjobresultresponse']['jobresult']['virtualmachine']
server_name = server_info['displayname']
server_id = server_info['name']
server_serviceoffering = server_info['serviceofferingname']
server_template = server_info['templatename']
if server_info['password'] != nil
ssh_password = server_info['password']
else
ssh_password = locate_config_value(:ssh_password)
end
ssh_user = locate_config_value(:ssh_user)
public_ip = nil
if server_info['nic'].size > 0
public_ip = server_info['nic'].first['ipaddress']
end
puts "\n\n"
puts "#{ui.color("Name", :cyan)}: #{server_name}"
puts "#{ui.color("Public IP", :cyan)}: #{public_ip}"
puts "#{ui.color("Username", :cyan)}: #{ssh_user}"
puts "#{ui.color("Password", :cyan)}: #{ssh_password}"
print "\n#{ui.color("Waiting for sshd", :magenta)}"
print("#{ui.color(".", :magenta)}") until tcp_test_ssh(public_ip) { sleep @initial_sleep_delay ||= 10; puts("done") }
puts("#{ui.color("Waiting for password/keys to sync.", :magenta)}")
sleep 15
bootstrap_for_node(public_ip, ssh_user, ssh_password).run
Chef::Log.debug("#{server_info}")
puts "\n"
puts "#{ui.color("Instance Name", :green)}: #{server_name}"
puts "#{ui.color("Instance ID", :green)}: #{server_id}"
puts "#{ui.color("Service Offering", :green)}: #{server_serviceoffering}"
puts "#{ui.color("Template", :green)}: #{server_template}"
puts "#{ui.color("Public IP Address", :green)}: #{public_ip}"
puts "#{ui.color("User", :green)}: #{ssh_user}"
puts "#{ui.color("Password", :green)}: #{ssh_password}"
puts "#{ui.color("Environment", :green)}: #{config[:environment] || '_default'}"
puts "#{ui.color("Run List", :green)}: #{config[:run_list].join(', ')}"
end
end
end
end
end
|
# Author:: Adam Jacob (<adam@chef.io>)
# Copyright:: Copyright 2008-2017, Chef Software Inc.
# License:: Apache License, Version 2.0
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
require "chef/provider/package/yum/python_helper"
require "chef/provider/package"
require "singleton"
class Chef
class Provider
class Package
class Yum < Chef::Provider::Package
# Cache for our installed and available packages, pulled in from yum-dump.py
class YumCache
include Singleton
def refresh
python_helper.restart
end
def reload
python_helper.restart
end
def reload_installed
python_helper.restart
end
def reload_provides
python_helper.restart
end
def reset
python_helper.restart
end
def reset_installed
python_helper.restart
end
def available_version(name)
p = python_helper.package_query(:whatavailable, name)
"#{p.version}.#{p.arch}"
end
def installed_version(name)
p = python_helper.package_query(:whatinstalled, name)
"#{p.version}.#{p.arch}"
end
private
def python_helper
@python_helper ||= PythonHelper.instance
end
end # YumCache
end
end
end
end
add pacakge_available? and version_available?
Signed-off-by: Lamont Granquist <0ab8dc438f73addc98d9ad5925ec8f2b97991703@scriptkiddie.org>
# Author:: Adam Jacob (<adam@chef.io>)
# Copyright:: Copyright 2008-2018, Chef Software Inc.
# License:: Apache License, Version 2.0
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
require "chef/provider/package/yum/python_helper"
require "chef/provider/package"
require "singleton"
class Chef
class Provider
class Package
class Yum < Chef::Provider::Package
# Cache for our installed and available packages, pulled in from yum-dump.py
class YumCache
include Singleton
def refresh
python_helper.restart
end
def reload
python_helper.restart
end
def reload_installed
python_helper.restart
end
def reload_provides
python_helper.restart
end
def reset
python_helper.restart
end
def reset_installed
python_helper.restart
end
def available_version(name)
p = python_helper.package_query(:whatavailable, name)
"#{p.version}.#{p.arch}"
end
def installed_version(name)
p = python_helper.package_query(:whatinstalled, name)
"#{p.version}.#{p.arch}"
end
def package_available?(package_name)
p = python_helper.package_query(:whatavailable, name)
!p.version.nil?
end
def version_available?(package_name, version, arch = nil)
p = python_helper.package_query(:whatavailable, name, version, arch)
!p.version.nil?
end
private
def python_helper
@python_helper ||= PythonHelper.instance
end
end # YumCache
end
end
end
end
|
require 'chef/mixin/shell_out'
require 'chef/provisioning/driver'
require 'chef/provisioning/convergence_strategy/install_cached'
require 'chef/provisioning/convergence_strategy/install_sh'
require 'chef/provisioning/convergence_strategy/no_converge'
require 'chef/provisioning/transport/ssh'
require 'chef/provisioning/machine/windows_machine'
require 'chef/provisioning/machine/unix_machine'
require 'chef/provisioning/machine_spec'
require 'chef/provider/aws_key_pair'
require 'chef/resource/aws_key_pair'
require 'chef/provisioning/aws_driver/version'
require 'chef/provisioning/aws_driver/credentials'
require 'yaml'
require 'aws-sdk-v1'
class Chef
module Provisioning
module AWSDriver
# Provisions machines using the AWS SDK
class Driver < Chef::Provisioning::Driver
include Chef::Mixin::ShellOut
attr_reader :region
# URL scheme:
# aws:account_id:region
# TODO: migration path from fog:AWS - parse that URL
# canonical URL calls realpath on <path>
def self.from_url(driver_url, config)
Driver.new(driver_url, config)
end
def initialize(driver_url, config)
super
credentials = aws_credentials.default
@region = credentials[:region]
# TODO: fix credentials here
AWS.config(:access_key_id => credentials[:aws_access_key_id],
:secret_access_key => credentials[:aws_secret_access_key],
:region => credentials[:region])
end
def self.canonicalize_url(driver_url, config)
url = driver_url.split(":")[0]
[ "aws:#{url}", config ]
end
# Load balancer methods
def allocate_load_balancer(action_handler, lb_spec, lb_options, machine_specs)
security_group_name = lb_options[:security_group_name] || 'default'
security_group_id = lb_options[:security_group_id]
security_group = if security_group_id.nil?
ec2.security_groups.filter('group-name', security_group_name).first
else
ec2.security_groups[security_group_id]
end
availability_zones = lb_options[:availability_zones]
listeners = lb_options[:listeners]
actual_elb = load_balancer_for(lb_spec)
if !actual_elb.exists?
perform_action = proc { |desc, &block| action_handler.perform_action(desc, &block) }
updates = [ "Create load balancer #{lb_spec.name} in #{@region}" ]
updates << " enable availability zones #{availability_zones.join(', ')}" if availability_zones && availability_zones.size > 0
updates << " with listeners #{listeners.join(', ')}" if listeners && listeners.size > 0
updates << " with security group #{security_group.name}" if security_group
action_handler.perform_action updates do
actual_elb = elb.load_balancers.create(lb_spec.name,
availability_zones: availability_zones,
listeners: listeners,
security_groups: [security_group])
lb_spec.location = {
'driver_url' => driver_url,
'driver_version' => Chef::Provisioning::AWSDriver::VERSION,
'allocated_at' => Time.now.utc.to_s,
}
end
else
# Header gets printed the first time we make an update
perform_action = proc do |desc, &block|
perform_action = proc { |desc, &block| action_handler.perform_action(desc, &block) }
action_handler.perform_action [ "Update load balancer #{lb_spec.name} in #{@region}", desc ].flatten, &block
end
# Update availability zones
enable_zones = (availability_zones || []).dup
disable_zones = []
actual_elb.availability_zones.each do |availability_zone|
if !enable_zones.delete(availability_zone.name)
disable_zones << availability_zone.name
end
end
if enable_zones.size > 0
perform_action.call(" enable availability zones #{enable_zones.join(', ')}") do
actual_elb.availability_zones.enable(*enable_zones)
end
end
if disable_zones.size > 0
perform_action.call(" disable availability zones #{disable_zones.join(', ')}") do
actual_elb.availability_zones.disable(*disable_zones)
end
end
# Update listeners
perform_listener_action = proc do |desc, &block|
perform_listener_action = proc { |desc, &block| perform_action(desc, &block) }
perform_action([ " update listener #{listener.port}", desc ], &block)
end
add_listeners = {}
listeners.each { |l| add_listeners[l[:port]] = l } if listeners
actual_elb.listeners.each do |listener|
desired_listener = add_listeners.delete(listener.port)
if desired_listener
if listener.protocol != desired_listener[:protocol]
perform_listener_action.call(" update protocol from #{listener.protocol.inspect} to #{desired_listener[:protocol].inspect}'") do
listener.protocol = desired_listener[:protocol]
end
end
if listener.instance_port != desired_listener[:instance_port]
perform_listener_action.call(" update instance port from #{listener.instance_port.inspect} to #{desired_listener[:instance_port].inspect}'") do
listener.instance_port = desired_listener[:instance_port]
end
end
if listener.instance_protocol != desired_listener[:instance_protocol]
perform_listener_action.call(" update instance protocol from #{listener.instance_protocol.inspect} to #{desired_listener[:instance_protocol].inspect}'") do
listener.instance_protocol = desired_listener[:instance_protocol]
end
end
if listener.server_certificate != desired_listener[:server_certificate]
perform_listener_action.call(" update server certificate from #{listener.server_certificate} to #{desired_listener[:server_certificate]}'") do
listener.server_certificate = desired_listener[:server_certificate]
end
end
else
perform_action.call(" remove listener #{listener.port}") do
listener.delete
end
end
end
add_listeners.each do |listener|
updates = [ " add listener #{listener[:port]}" ]
updates << " set protocol to #{listener[:protocol].inspect}"
updates << " set instance port to #{listener[:instance_port].inspect}"
updates << " set instance protocol to #{listener[:instance_protocol].inspect}"
updates << " set server certificate to #{listener[:server_certificate]}" if listener[:server_certificate]
perform_action.call(updates) do
actual_elb.listeners.create(listener)
end
end
end
# Update instance list
actual_instance_ids = Set.new(actual_elb.instances.map { |i| i.instance_id })
instances_to_add = machine_specs.select { |s| !actual_instance_ids.include?(s.location['instance_id']) }
instance_ids_to_remove = actual_instance_ids - machine_specs.map { |s| s.location['instance_id'] }
if instances_to_add.size > 0
perform_action.call(" add machines #{instances_to_add.map { |s| s.name }.join(', ')}") do
instance_ids_to_add = instances_to_add.map { |s| s.location['instance_id'] }
Chef::Log.debug("Adding instances #{instance_ids_to_add.join(', ')} to load balancer #{actual_elb.name} in region #{@region}")
actual_elb.instances.add(instance_ids_to_add)
end
end
if instance_ids_to_remove.size > 0
perform_action.call(" remove instances #{instance_ids_to_remove}") do
actual_elb.instances.remove(instance_ids_to_remove)
end
end
end
def ready_load_balancer(action_handler, lb_spec, lb_options, machine_specs)
end
def destroy_load_balancer(action_handler, lb_spec, lb_options)
return if lb_spec == nil
actual_elb = load_balancer_for(lb_spec)
if actual_elb && actual_elb.exists?
# Remove ELB from AWS
action_handler.perform_action "Deleting EC2 ELB #{lb_spec.id}" do
actual_elb.delete
end
end
# Remove LB spec from databag
lb_spec.delete(action_handler)
end
# Image methods
def allocate_image(action_handler, image_spec, image_options, machine_spec)
end
def ready_image(action_handler, image_spec, image_options)
end
def destroy_image(action_handler, image_spec, image_options)
end
# Machine methods
def allocate_machine(action_handler, machine_spec, machine_options)
actual_instance = instance_for(machine_spec)
if actual_instance == nil || !actual_instance.exists? || actual_instance.status == :terminated
image_id = machine_options[:image_id] || default_ami_for_region(@region)
bootstrap_options = machine_options[:bootstrap_options] || {}
bootstrap_options[:image_id] = image_id
if !bootstrap_options[:key_name]
Chef::Log.debug('No key specified, generating a default one...')
bootstrap_options[:key_name] = default_aws_keypair(action_handler, machine_spec)
end
Chef::Log.debug "AWS Bootstrap options: #{bootstrap_options.inspect}"
action_handler.perform_action "Create #{machine_spec.name} with AMI #{image_id} in #{@region}" do
Chef::Log.debug "Creating instance with bootstrap options #{bootstrap_options}"
instance = ec2.instances.create(bootstrap_options)
# Make sure the instance is ready to be tagged
sleep 5 while instance.status == :pending
# TODO add other tags identifying user / node url (same as fog)
instance.tags['Name'] = machine_spec.name
machine_spec.location = {
'driver_url' => driver_url,
'driver_version' => Chef::Provisioning::AWSDriver::VERSION,
'allocated_at' => Time.now.utc.to_s,
'host_node' => action_handler.host_node,
'image_id' => machine_options[:image_id],
'instance_id' => instance.id
}
end
end
end
def ready_machine(action_handler, machine_spec, machine_options)
instance = instance_for(machine_spec)
if instance.nil?
raise "Machine #{machine_spec.name} does not have an instance associated with it, or instance does not exist."
end
if instance.status != :running
wait_until(action_handler, machine_spec, instance) { instance.status != :stopping }
if instance.status == :stopped
action_handler.perform_action "Start #{machine_spec.name} (#{machine_spec.location['instance_id']}) in #{@region} ..." do
instance.start
end
end
end
wait_until_ready(action_handler, machine_spec, instance)
wait_for_transport(action_handler, machine_spec, machine_options)
machine_for(machine_spec, machine_options, instance)
end
def destroy_machine(action_handler, machine_spec, machine_options)
instance = instance_for(machine_spec)
if instance && instance.exists?
# TODO do we need to wait_until(action_handler, machine_spec, instance) { instance.status != :shutting_down } ?
action_handler.perform_action "Terminate #{machine_spec.name} (#{machine_spec.location['instance_id']}) in #{@region} ..." do
instance.terminate
machine_spec.location = nil
end
else
Chef::Log.warn "Instance #{machine_spec.location['instance_id']} doesn't exist for #{machine_spec.name}"
end
strategy = convergence_strategy_for(machine_spec, machine_options)
strategy.cleanup_convergence(action_handler, machine_spec)
end
private
# For creating things like AWS keypairs exclusively
@@chef_default_lock = Mutex.new
def machine_for(machine_spec, machine_options, instance = nil)
instance ||= instance_for(machine_spec)
if !instance
raise "Instance for node #{machine_spec.name} has not been created!"
end
if machine_spec.location['is_windows']
Chef::Provisioning::Machine::WindowsMachine.new(machine_spec, transport_for(machine_spec, machine_options, instance), convergence_strategy_for(machine_spec, machine_options))
else
Chef::Provisioning::Machine::UnixMachine.new(machine_spec, transport_for(machine_spec, machine_options, instance), convergence_strategy_for(machine_spec, machine_options))
end
end
def start_machine(action_handler, machine_spec, machine_options, base_image_name)
end
def ec2
@ec2 ||= AWS.ec2
end
def elb
@elb ||= AWS::ELB.new
end
def default_ssh_username
'ubuntu'
end
def keypair_for(bootstrap_options)
if bootstrap_options[:key_name]
keypair_name = bootstrap_options[:key_name]
actual_key_pair = ec2.key_pairs[keypair_name]
if !actual_key_pair.exists?
ec2.key_pairs.create(keypair_name)
end
actual_key_pair
end
end
def load_balancer_for(lb_spec)
if lb_spec.name
elb.load_balancers[lb_spec.name]
else
nil
end
end
def instance_for(machine_spec)
if machine_spec.location && machine_spec.location['instance_id']
ec2.instances[machine_spec.location['instance_id']]
else
nil
end
end
def transport_for(machine_spec, machine_options, instance)
# TODO winrm
create_ssh_transport(machine_spec, machine_options, instance)
end
def compute_options
end
def aws_credentials
# Grab the list of possible credentials
@aws_credentials ||= if driver_options[:aws_credentials]
driver_options[:aws_credentials]
else
credentials = Credentials.new
if driver_options[:aws_config_file]
credentials.load_ini(driver_options.delete(:aws_config_file))
elsif driver_options[:aws_csv_file]
credentials.load_csv(driver_options.delete(:aws_csv_file))
else
credentials.load_default
end
credentials
end
end
def default_ami_for_region(region)
Chef::Log.debug("Choosing default AMI for region '#{region}'")
case region
when 'ap-northeast-1'
'ami-c786dcc6'
when 'ap-southeast-1'
'ami-eefca7bc'
when 'ap-southeast-2'
'ami-996706a3'
when 'eu-west-1'
'ami-4ab46b3d'
when 'eu-central-1'
'ami-7c3c0a61'
when 'sa-east-1'
'ami-6770d87a'
when 'us-east-1'
'ami-d2ff23ba'
when 'us-west-1'
'ami-73717d36'
when 'us-west-2'
'ami-f1ce8bc1'
else
raise 'Unsupported region!'
end
end
def create_ssh_transport(machine_spec, machine_options, instance)
ssh_options = ssh_options_for(machine_spec, machine_options, instance)
username = machine_spec.location['ssh_username'] || machine_options[:ssh_username] || default_ssh_username
if machine_options.has_key?(:ssh_username) && machine_options[:ssh_username] != machine_spec.location['ssh_username']
Chef::Log.warn("Server #{machine_spec.name} was created with SSH username #{machine_spec.location['ssh_username']} and machine_options specifies username #{machine_options[:ssh_username]}. Using #{machine_spec.location['ssh_username']}. Please edit the node and change the chef_provisioning.location.ssh_username attribute if you want to change it.")
end
options = {}
if machine_spec.location[:sudo] || (!machine_spec.location.has_key?(:sudo) && username != 'root')
options[:prefix] = 'sudo '
end
remote_host = nil
if machine_spec.location['use_private_ip_for_ssh']
remote_host = instance.private_ip_address
elsif !instance.public_ip_address
Chef::Log.warn("Server #{machine_spec.name} has no public ip address. Using private ip '#{instance.private_ip_address}'. Set driver option 'use_private_ip_for_ssh' => true if this will always be the case ...")
remote_host = instance.private_ip_address
elsif instance.public_ip_address
remote_host = instance.public_ip_address
else
raise "Server #{instance.id} has no private or public IP address!"
end
#Enable pty by default
options[:ssh_pty_enable] = true
options[:ssh_gateway] = machine_spec.location['ssh_gateway'] if machine_spec.location.has_key?('ssh_gateway')
Chef::Provisioning::Transport::SSH.new(remote_host, username, ssh_options, options, config)
end
def ssh_options_for(machine_spec, machine_options, instance)
result = {
# TODO create a user known hosts file
# :user_known_hosts_file => vagrant_ssh_config['UserKnownHostsFile'],
# :paranoid => true,
:auth_methods => [ 'publickey' ],
:keys_only => true,
:host_key_alias => "#{instance.id}.AWS"
}.merge(machine_options[:ssh_options] || {})
if instance.respond_to?(:private_key) && instance.private_key
result[:key_data] = [ instance.private_key ]
elsif instance.respond_to?(:key_name) && instance.key_name
key = get_private_key(instance.key_name)
unless key
raise "Server has key name '#{instance.key_name}', but the corresponding private key was not found locally. Check if the key is in Chef::Config.private_key_paths: #{Chef::Config.private_key_paths.join(', ')}"
end
result[:key_data] = [ key ]
elsif machine_spec.location['key_name']
key = get_private_key(machine_spec.location['key_name'])
unless key
raise "Server was created with key name '#{machine_spec.location['key_name']}', but the corresponding private key was not found locally. Check if the key is in Chef::Config.private_key_paths: #{Chef::Config.private_key_paths.join(', ')}"
end
result[:key_data] = [ key ]
elsif machine_options[:bootstrap_options] && machine_options[:bootstrap_options][:key_path]
result[:key_data] = [ IO.read(machine_options[:bootstrap_options][:key_path]) ]
elsif machine_options[:bootstrap_options] && machine_options[:bootstrap_options][:key_name]
result[:key_data] = [ get_private_key(machine_options[:bootstrap_options][:key_name]) ]
else
# TODO make a way to suggest other keys to try ...
raise "No key found to connect to #{machine_spec.name} (#{machine_spec.location.inspect})!"
end
result
end
def convergence_strategy_for(machine_spec, machine_options)
# Tell Ohai that this is an EC2 instance so that it runs the EC2 plugin
machine_options[:convergence_options] ||= {}
machine_options[:convergence_options][:ohai_hints] = { 'ec2' => ''}
# Defaults
if !machine_spec.location
return Chef::Provisioning::ConvergenceStrategy::NoConverge.new(machine_options[:convergence_options], config)
end
if machine_spec.location['is_windows']
Chef::Provisioning::ConvergenceStrategy::InstallMsi.new(machine_options[:convergence_options], config)
elsif machine_options[:cached_installer] == true
Chef::Provisioning::ConvergenceStrategy::InstallCached.new(machine_options[:convergence_options], config)
else
Chef::Provisioning::ConvergenceStrategy::InstallSh.new(machine_options[:convergence_options], config)
end
end
def wait_until_ready(action_handler, machine_spec, instance=nil)
wait_until(action_handler, machine_spec, instance) { instance.status == :running }
end
def wait_until(action_handler, machine_spec, instance=nil, &block)
instance ||= instance_for(machine_spec)
time_elapsed = 0
sleep_time = 10
max_wait_time = 120
if !yield(instance)
if action_handler.should_perform_actions
action_handler.report_progress "waiting for #{machine_spec.name} (#{instance.id} on #{driver_url}) to be ready ..."
while time_elapsed < 120 && !yield(instance)
action_handler.report_progress "been waiting #{time_elapsed}/#{max_wait_time} -- sleeping #{sleep_time} seconds for #{machine_spec.name} (#{instance.id} on #{driver_url}) to be ready ..."
sleep(sleep_time)
time_elapsed += sleep_time
end
action_handler.report_progress "#{machine_spec.name} is now ready"
end
end
end
def wait_for_transport(action_handler, machine_spec, machine_options)
instance = instance_for(machine_spec)
time_elapsed = 0
sleep_time = 10
max_wait_time = 120
transport = transport_for(machine_spec, machine_options, instance)
unless transport.available?
if action_handler.should_perform_actions
action_handler.report_progress "waiting for #{machine_spec.name} (#{instance.id} on #{driver_url}) to be connectable (transport up and running) ..."
while time_elapsed < 120 && !transport.available?
action_handler.report_progress "been waiting #{time_elapsed}/#{max_wait_time} -- sleeping #{sleep_time} seconds for #{machine_spec.name} (#{instance.id} on #{driver_url}) to be connectable ..."
sleep(sleep_time)
time_elapsed += sleep_time
end
action_handler.report_progress "#{machine_spec.name} is now connectable"
end
end
end
def default_aws_keypair_name(machine_spec)
if machine_spec.location &&
Gem::Version.new(machine_spec.location['driver_version']) < Gem::Version.new('0.10')
'metal_default'
else
'chef_default'
end
end
def default_aws_keypair(action_handler, machine_spec)
driver = self
default_key_name = default_aws_keypair_name(machine_spec)
_region = region
updated = @@chef_default_lock.synchronize do
Provisioning.inline_resource(action_handler) do
aws_key_pair default_key_name do
driver driver
allow_overwrite true
region_name _region
end
end
end
# Only warn the first time
default_warning = 'Using default key, which is not shared between machines! It is recommended to create an AWS key pair with the fog_key_pair resource, and set :bootstrap_options => { :key_name => <key name> }'
Chef::Log.warn(default_warning) if updated
default_key_name
end
end
end
end
end
Machine_batch will now work with the aws provisioner
require 'chef/mixin/shell_out'
require 'chef/provisioning/driver'
require 'chef/provisioning/convergence_strategy/install_cached'
require 'chef/provisioning/convergence_strategy/install_sh'
require 'chef/provisioning/convergence_strategy/no_converge'
require 'chef/provisioning/transport/ssh'
require 'chef/provisioning/machine/windows_machine'
require 'chef/provisioning/machine/unix_machine'
require 'chef/provisioning/machine_spec'
require 'chef/provider/aws_key_pair'
require 'chef/resource/aws_key_pair'
require 'chef/provisioning/aws_driver/version'
require 'chef/provisioning/aws_driver/credentials'
require 'yaml'
require 'aws-sdk-v1'
class Chef
module Provisioning
module AWSDriver
# Provisions machines using the AWS SDK
class Driver < Chef::Provisioning::Driver
include Chef::Mixin::ShellOut
attr_reader :region
# URL scheme:
# aws:account_id:region
# TODO: migration path from fog:AWS - parse that URL
# canonical URL calls realpath on <path>
def self.from_url(driver_url, config)
Driver.new(driver_url, config)
end
def initialize(driver_url, config)
super
credentials = aws_credentials.default
@region = credentials[:region]
# TODO: fix credentials here
AWS.config(:access_key_id => credentials[:aws_access_key_id],
:secret_access_key => credentials[:aws_secret_access_key],
:region => credentials[:region])
end
def self.canonicalize_url(driver_url, config)
url = driver_url.split(":")[0]
[ "aws:#{url}", config ]
end
# Load balancer methods
def allocate_load_balancer(action_handler, lb_spec, lb_options, machine_specs)
security_group_name = lb_options[:security_group_name] || 'default'
security_group_id = lb_options[:security_group_id]
security_group = if security_group_id.nil?
ec2.security_groups.filter('group-name', security_group_name).first
else
ec2.security_groups[security_group_id]
end
availability_zones = lb_options[:availability_zones]
listeners = lb_options[:listeners]
actual_elb = load_balancer_for(lb_spec)
if !actual_elb.exists?
perform_action = proc { |desc, &block| action_handler.perform_action(desc, &block) }
updates = [ "Create load balancer #{lb_spec.name} in #{@region}" ]
updates << " enable availability zones #{availability_zones.join(', ')}" if availability_zones && availability_zones.size > 0
updates << " with listeners #{listeners.join(', ')}" if listeners && listeners.size > 0
updates << " with security group #{security_group.name}" if security_group
action_handler.perform_action updates do
actual_elb = elb.load_balancers.create(lb_spec.name,
availability_zones: availability_zones,
listeners: listeners,
security_groups: [security_group])
lb_spec.location = {
'driver_url' => driver_url,
'driver_version' => Chef::Provisioning::AWSDriver::VERSION,
'allocated_at' => Time.now.utc.to_s,
}
end
else
# Header gets printed the first time we make an update
perform_action = proc do |desc, &block|
perform_action = proc { |desc, &block| action_handler.perform_action(desc, &block) }
action_handler.perform_action [ "Update load balancer #{lb_spec.name} in #{@region}", desc ].flatten, &block
end
# Update availability zones
enable_zones = (availability_zones || []).dup
disable_zones = []
actual_elb.availability_zones.each do |availability_zone|
if !enable_zones.delete(availability_zone.name)
disable_zones << availability_zone.name
end
end
if enable_zones.size > 0
perform_action.call(" enable availability zones #{enable_zones.join(', ')}") do
actual_elb.availability_zones.enable(*enable_zones)
end
end
if disable_zones.size > 0
perform_action.call(" disable availability zones #{disable_zones.join(', ')}") do
actual_elb.availability_zones.disable(*disable_zones)
end
end
# Update listeners
perform_listener_action = proc do |desc, &block|
perform_listener_action = proc { |desc, &block| perform_action(desc, &block) }
perform_action([ " update listener #{listener.port}", desc ], &block)
end
add_listeners = {}
listeners.each { |l| add_listeners[l[:port]] = l } if listeners
actual_elb.listeners.each do |listener|
desired_listener = add_listeners.delete(listener.port)
if desired_listener
if listener.protocol != desired_listener[:protocol]
perform_listener_action.call(" update protocol from #{listener.protocol.inspect} to #{desired_listener[:protocol].inspect}'") do
listener.protocol = desired_listener[:protocol]
end
end
if listener.instance_port != desired_listener[:instance_port]
perform_listener_action.call(" update instance port from #{listener.instance_port.inspect} to #{desired_listener[:instance_port].inspect}'") do
listener.instance_port = desired_listener[:instance_port]
end
end
if listener.instance_protocol != desired_listener[:instance_protocol]
perform_listener_action.call(" update instance protocol from #{listener.instance_protocol.inspect} to #{desired_listener[:instance_protocol].inspect}'") do
listener.instance_protocol = desired_listener[:instance_protocol]
end
end
if listener.server_certificate != desired_listener[:server_certificate]
perform_listener_action.call(" update server certificate from #{listener.server_certificate} to #{desired_listener[:server_certificate]}'") do
listener.server_certificate = desired_listener[:server_certificate]
end
end
else
perform_action.call(" remove listener #{listener.port}") do
listener.delete
end
end
end
add_listeners.each do |listener|
updates = [ " add listener #{listener[:port]}" ]
updates << " set protocol to #{listener[:protocol].inspect}"
updates << " set instance port to #{listener[:instance_port].inspect}"
updates << " set instance protocol to #{listener[:instance_protocol].inspect}"
updates << " set server certificate to #{listener[:server_certificate]}" if listener[:server_certificate]
perform_action.call(updates) do
actual_elb.listeners.create(listener)
end
end
end
# Update instance list
actual_instance_ids = Set.new(actual_elb.instances.map { |i| i.instance_id })
instances_to_add = machine_specs.select { |s| !actual_instance_ids.include?(s.location['instance_id']) }
instance_ids_to_remove = actual_instance_ids - machine_specs.map { |s| s.location['instance_id'] }
if instances_to_add.size > 0
perform_action.call(" add machines #{instances_to_add.map { |s| s.name }.join(', ')}") do
instance_ids_to_add = instances_to_add.map { |s| s.location['instance_id'] }
Chef::Log.debug("Adding instances #{instance_ids_to_add.join(', ')} to load balancer #{actual_elb.name} in region #{@region}")
actual_elb.instances.add(instance_ids_to_add)
end
end
if instance_ids_to_remove.size > 0
perform_action.call(" remove instances #{instance_ids_to_remove}") do
actual_elb.instances.remove(instance_ids_to_remove)
end
end
end
def ready_load_balancer(action_handler, lb_spec, lb_options, machine_specs)
end
def destroy_load_balancer(action_handler, lb_spec, lb_options)
return if lb_spec == nil
actual_elb = load_balancer_for(lb_spec)
if actual_elb && actual_elb.exists?
# Remove ELB from AWS
action_handler.perform_action "Deleting EC2 ELB #{lb_spec.id}" do
actual_elb.delete
end
end
# Remove LB spec from databag
lb_spec.delete(action_handler)
end
# Image methods
def allocate_image(action_handler, image_spec, image_options, machine_spec)
end
def ready_image(action_handler, image_spec, image_options)
end
def destroy_image(action_handler, image_spec, image_options)
end
# Machine methods
def allocate_machine(action_handler, machine_spec, machine_options)
actual_instance = instance_for(machine_spec)
if actual_instance == nil || !actual_instance.exists? || actual_instance.status == :terminated
image_id = machine_options[:image_id] || default_ami_for_region(@region)
bootstrap_options = machine_options[:bootstrap_options] || {}
bootstrap_options[:image_id] = image_id
if !bootstrap_options[:key_name]
Chef::Log.debug('No key specified, generating a default one...')
bootstrap_options[:key_name] = default_aws_keypair(action_handler, machine_spec)
end
Chef::Log.debug "AWS Bootstrap options: #{bootstrap_options.inspect}"
action_handler.perform_action "Create #{machine_spec.name} with AMI #{image_id} in #{@region}" do
Chef::Log.debug "Creating instance with bootstrap options #{bootstrap_options}"
instance = ec2.instances.create(bootstrap_options)
# Make sure the instance is ready to be tagged
sleep 5 while instance.status == :pending
# TODO add other tags identifying user / node url (same as fog)
instance.tags['Name'] = machine_spec.name
machine_spec.location = {
'driver_url' => driver_url,
'driver_version' => Chef::Provisioning::AWSDriver::VERSION,
'allocated_at' => Time.now.utc.to_s,
'host_node' => action_handler.host_node,
'image_id' => machine_options[:image_id],
'instance_id' => instance.id
}
end
end
end
def allocate_machines(action_handler, specs_and_options, parallelizer)
#Chef::Log.warn("#{specs_and_options}")
create_servers(action_handler, specs_and_options, parallelizer) do |machine_spec, server|
#Chef::Log.warn("#{machine_spec}")
yield machine_spec
end
specs_and_options.keys
end
def ready_machine(action_handler, machine_spec, machine_options)
instance = instance_for(machine_spec)
if instance.nil?
raise "Machine #{machine_spec.name} does not have an instance associated with it, or instance does not exist."
end
if instance.status != :running
wait_until(action_handler, machine_spec, instance) { instance.status != :stopping }
if instance.status == :stopped
action_handler.perform_action "Start #{machine_spec.name} (#{machine_spec.location['instance_id']}) in #{@region} ..." do
instance.start
end
end
end
wait_until_ready(action_handler, machine_spec, instance)
wait_for_transport(action_handler, machine_spec, machine_options)
machine_for(machine_spec, machine_options, instance)
end
def destroy_machine(action_handler, machine_spec, machine_options)
instance = instance_for(machine_spec)
if instance && instance.exists?
# TODO do we need to wait_until(action_handler, machine_spec, instance) { instance.status != :shutting_down } ?
action_handler.perform_action "Terminate #{machine_spec.name} (#{machine_spec.location['instance_id']}) in #{@region} ..." do
instance.terminate
machine_spec.location = nil
end
else
Chef::Log.warn "Instance #{machine_spec.location['instance_id']} doesn't exist for #{machine_spec.name}"
end
strategy = convergence_strategy_for(machine_spec, machine_options)
strategy.cleanup_convergence(action_handler, machine_spec)
end
private
# For creating things like AWS keypairs exclusively
@@chef_default_lock = Mutex.new
def machine_for(machine_spec, machine_options, instance = nil)
instance ||= instance_for(machine_spec)
if !instance
raise "Instance for node #{machine_spec.name} has not been created!"
end
if machine_spec.location['is_windows']
Chef::Provisioning::Machine::WindowsMachine.new(machine_spec, transport_for(machine_spec, machine_options, instance), convergence_strategy_for(machine_spec, machine_options))
else
Chef::Provisioning::Machine::UnixMachine.new(machine_spec, transport_for(machine_spec, machine_options, instance), convergence_strategy_for(machine_spec, machine_options))
end
end
def start_machine(action_handler, machine_spec, machine_options, base_image_name)
end
def ec2
@ec2 ||= AWS.ec2
end
def elb
@elb ||= AWS::ELB.new
end
def default_ssh_username
'ubuntu'
end
def keypair_for(bootstrap_options)
if bootstrap_options[:key_name]
keypair_name = bootstrap_options[:key_name]
actual_key_pair = ec2.key_pairs[keypair_name]
if !actual_key_pair.exists?
ec2.key_pairs.create(keypair_name)
end
actual_key_pair
end
end
def load_balancer_for(lb_spec)
if lb_spec.name
elb.load_balancers[lb_spec.name]
else
nil
end
end
def instance_for(machine_spec)
if machine_spec.location && machine_spec.location['instance_id']
ec2.instances[machine_spec.location['instance_id']]
else
nil
end
end
def instances_for(machine_specs)
result = {}
machine_specs.each do |machine_spec|
if machine_spec.location && machine_spec.location['instance_id']
if machine_spec.location['driver_url'] != driver_url
raise "Switching a machine's driver from #{machine_spec.location['driver_url']} to #{driver_url} is not currently supported! Use machine :destroy and then re-create the machine on the new driver."
end
#returns nil if not found
result[machine_spec] = ec2.instances[machine_spec.location['instance_id']]
end
end
result
end
def transport_for(machine_spec, machine_options, instance)
# TODO winrm
create_ssh_transport(machine_spec, machine_options, instance)
end
def compute_options
end
def aws_credentials
# Grab the list of possible credentials
@aws_credentials ||= if driver_options[:aws_credentials]
driver_options[:aws_credentials]
else
credentials = Credentials.new
if driver_options[:aws_config_file]
credentials.load_ini(driver_options.delete(:aws_config_file))
elsif driver_options[:aws_csv_file]
credentials.load_csv(driver_options.delete(:aws_csv_file))
else
credentials.load_default
end
credentials
end
end
def default_ami_for_region(region)
Chef::Log.debug("Choosing default AMI for region '#{region}'")
case region
when 'ap-northeast-1'
'ami-c786dcc6'
when 'ap-southeast-1'
'ami-eefca7bc'
when 'ap-southeast-2'
'ami-996706a3'
when 'eu-west-1'
'ami-4ab46b3d'
when 'eu-central-1'
'ami-7c3c0a61'
when 'sa-east-1'
'ami-6770d87a'
when 'us-east-1'
'ami-d2ff23ba'
when 'us-west-1'
'ami-73717d36'
when 'us-west-2'
'ami-f1ce8bc1'
else
raise 'Unsupported region!'
end
end
def create_ssh_transport(machine_spec, machine_options, instance)
ssh_options = ssh_options_for(machine_spec, machine_options, instance)
username = machine_spec.location['ssh_username'] || machine_options[:ssh_username] || default_ssh_username
if machine_options.has_key?(:ssh_username) && machine_options[:ssh_username] != machine_spec.location['ssh_username']
Chef::Log.warn("Server #{machine_spec.name} was created with SSH username #{machine_spec.location['ssh_username']} and machine_options specifies username #{machine_options[:ssh_username]}. Using #{machine_spec.location['ssh_username']}. Please edit the node and change the chef_provisioning.location.ssh_username attribute if you want to change it.")
end
options = {}
if machine_spec.location[:sudo] || (!machine_spec.location.has_key?(:sudo) && username != 'root')
options[:prefix] = 'sudo '
end
remote_host = nil
if machine_spec.location['use_private_ip_for_ssh']
remote_host = instance.private_ip_address
elsif !instance.public_ip_address
Chef::Log.warn("Server #{machine_spec.name} has no public ip address. Using private ip '#{instance.private_ip_address}'. Set driver option 'use_private_ip_for_ssh' => true if this will always be the case ...")
remote_host = instance.private_ip_address
elsif instance.public_ip_address
remote_host = instance.public_ip_address
else
raise "Server #{instance.id} has no private or public IP address!"
end
#Enable pty by default
options[:ssh_pty_enable] = true
options[:ssh_gateway] = machine_spec.location['ssh_gateway'] if machine_spec.location.has_key?('ssh_gateway')
Chef::Provisioning::Transport::SSH.new(remote_host, username, ssh_options, options, config)
end
def ssh_options_for(machine_spec, machine_options, instance)
result = {
# TODO create a user known hosts file
# :user_known_hosts_file => vagrant_ssh_config['UserKnownHostsFile'],
# :paranoid => true,
:auth_methods => [ 'publickey' ],
:keys_only => true,
:host_key_alias => "#{instance.id}.AWS"
}.merge(machine_options[:ssh_options] || {})
if instance.respond_to?(:private_key) && instance.private_key
result[:key_data] = [ instance.private_key ]
elsif instance.respond_to?(:key_name) && instance.key_name
key = get_private_key(instance.key_name)
unless key
raise "Server has key name '#{instance.key_name}', but the corresponding private key was not found locally. Check if the key is in Chef::Config.private_key_paths: #{Chef::Config.private_key_paths.join(', ')}"
end
result[:key_data] = [ key ]
elsif machine_spec.location['key_name']
key = get_private_key(machine_spec.location['key_name'])
unless key
raise "Server was created with key name '#{machine_spec.location['key_name']}', but the corresponding private key was not found locally. Check if the key is in Chef::Config.private_key_paths: #{Chef::Config.private_key_paths.join(', ')}"
end
result[:key_data] = [ key ]
elsif machine_options[:bootstrap_options] && machine_options[:bootstrap_options][:key_path]
result[:key_data] = [ IO.read(machine_options[:bootstrap_options][:key_path]) ]
elsif machine_options[:bootstrap_options] && machine_options[:bootstrap_options][:key_name]
result[:key_data] = [ get_private_key(machine_options[:bootstrap_options][:key_name]) ]
else
# TODO make a way to suggest other keys to try ...
raise "No key found to connect to #{machine_spec.name} (#{machine_spec.location.inspect})!"
end
result
end
def convergence_strategy_for(machine_spec, machine_options)
# Tell Ohai that this is an EC2 instance so that it runs the EC2 plugin
machine_options[:convergence_options] ||= {}
machine_options[:convergence_options][:ohai_hints] = { 'ec2' => ''}
# Defaults
if !machine_spec.location
return Chef::Provisioning::ConvergenceStrategy::NoConverge.new(machine_options[:convergence_options], config)
end
if machine_spec.location['is_windows']
Chef::Provisioning::ConvergenceStrategy::InstallMsi.new(machine_options[:convergence_options], config)
elsif machine_options[:cached_installer] == true
Chef::Provisioning::ConvergenceStrategy::InstallCached.new(machine_options[:convergence_options], config)
else
Chef::Provisioning::ConvergenceStrategy::InstallSh.new(machine_options[:convergence_options], config)
end
end
def wait_until_ready(action_handler, machine_spec, instance=nil)
wait_until(action_handler, machine_spec, instance) { instance.status == :running }
end
def wait_until(action_handler, machine_spec, instance=nil, &block)
instance ||= instance_for(machine_spec)
time_elapsed = 0
sleep_time = 10
max_wait_time = 120
if !yield(instance)
if action_handler.should_perform_actions
action_handler.report_progress "waiting for #{machine_spec.name} (#{instance.id} on #{driver_url}) to be ready ..."
while time_elapsed < 120 && !yield(instance)
action_handler.report_progress "been waiting #{time_elapsed}/#{max_wait_time} -- sleeping #{sleep_time} seconds for #{machine_spec.name} (#{instance.id} on #{driver_url}) to be ready ..."
sleep(sleep_time)
time_elapsed += sleep_time
end
action_handler.report_progress "#{machine_spec.name} is now ready"
end
end
end
def wait_for_transport(action_handler, machine_spec, machine_options)
instance = instance_for(machine_spec)
time_elapsed = 0
sleep_time = 10
max_wait_time = 120
transport = transport_for(machine_spec, machine_options, instance)
unless transport.available?
if action_handler.should_perform_actions
action_handler.report_progress "waiting for #{machine_spec.name} (#{instance.id} on #{driver_url}) to be connectable (transport up and running) ..."
while time_elapsed < 120 && !transport.available?
action_handler.report_progress "been waiting #{time_elapsed}/#{max_wait_time} -- sleeping #{sleep_time} seconds for #{machine_spec.name} (#{instance.id} on #{driver_url}) to be connectable ..."
sleep(sleep_time)
time_elapsed += sleep_time
end
action_handler.report_progress "#{machine_spec.name} is now connectable"
end
end
end
def default_aws_keypair_name(machine_spec)
if machine_spec.location &&
Gem::Version.new(machine_spec.location['driver_version']) < Gem::Version.new('0.10')
'metal_default'
else
'chef_default'
end
end
def default_aws_keypair(action_handler, machine_spec)
driver = self
default_key_name = default_aws_keypair_name(machine_spec)
_region = region
updated = @@chef_default_lock.synchronize do
Provisioning.inline_resource(action_handler) do
aws_key_pair default_key_name do
driver driver
allow_overwrite true
region_name _region
end
end
end
# Only warn the first time
default_warning = 'Using default key, which is not shared between machines! It is recommended to create an AWS key pair with the fog_key_pair resource, and set :bootstrap_options => { :key_name => <key name> }'
Chef::Log.warn(default_warning) if updated
default_key_name
end
def create_servers(action_handler, specs_and_options, parallelizer, &block)
specs_and_servers = instances_for(specs_and_options.keys)
by_bootstrap_options = {}
specs_and_options.each do |machine_spec, machine_options|
actual_instance = specs_and_servers[machine_spec]
if actual_instance
if actual_instance.status == :terminated
Chef::Log.warn "Machine #{machine_spec.name} (#{actual_instance.id}) is terminated. Recreating ..."
else
yield machine_spec, actual_instance if block_given?
next
end
elsif machine_spec.location
Chef::Log.warn "Machine #{machine_spec.name} (#{machine_spec.location['instance_id']} on #{driver_url}) no longer exists. Recreating ..."
end
bootstrap_options = machine_options[:bootstrap_options] || {}
by_bootstrap_options[bootstrap_options] ||= []
by_bootstrap_options[bootstrap_options] << machine_spec
end
# Create the servers in parallel
parallelizer.parallelize(by_bootstrap_options) do |bootstrap_options, machine_specs|
machine_description = if machine_specs.size == 1
"machine #{machine_specs.first.name}"
else
"machines #{machine_specs.map { |s| s.name }.join(", ")}"
end
description = [ "creating #{machine_description} on #{driver_url}" ]
bootstrap_options.each_pair { |key,value| description << " #{key}: #{value.inspect}" }
action_handler.report_progress description
if action_handler.should_perform_actions
# Actually create the servers
create_many_instances(machine_specs.size, bootstrap_options, parallelizer) do |instance|
# Assign each one to a machine spec
machine_spec = machine_specs.pop
machine_options = specs_and_options[machine_spec]
machine_spec.location = {
'driver_url' => driver_url,
'driver_version' => Chef::Provisioning::AWSDriver::VERSION,
'allocated_at' => Time.now.utc.to_s,
'host_node' => action_handler.host_node,
'image_id' => bootstrap_options[:image_id],
'instance_id' => instance.id
}
instance.tags['Name'] = machine_spec.name
machine_spec.location['key_name'] = bootstrap_options[:key_name] if bootstrap_options[:key_name]
%w(is_windows ssh_username sudo use_private_ip_for_ssh ssh_gateway).each do |key|
machine_spec.location[key] = machine_options[key.to_sym] if machine_options[key.to_sym]
end
action_handler.performed_action "machine #{machine_spec.name} created as #{instance.id} on #{driver_url}"
yield machine_spec, instance if block_given?
end
if machine_specs.size > 0
raise "Not all machines were created by create_servers"
end
end
end.to_a
end
def create_many_instances(num_servers, bootstrap_options, parallelizer)
parallelizer.parallelize(1.upto(num_servers)) do |i|
clean_bootstrap_options = Marshal.load(Marshal.dump(bootstrap_options))
#using the singleton ec2 variable creates a threading issue.
#have each thread create its own instance of ec2
instance = AWS.ec2.instances.create(clean_bootstrap_options)
yield instance if block_given?
instance
end.to_a
end
end
end
end
end
|
require 'chef_zero/chef_data/acl_path'
module ChefZero
module ChefData
#
# The DefaultCreator creates default values when you ask for them.
# - It relies on created and deleted being called when things get
# created and deleted, so that it knows the owners of said objects
# and knows to eliminate default values on delete.
# - get, list and exists? get data.
#
class DefaultCreator
def initialize(data, single_org, osc_compat, superusers = nil)
@data = data
@single_org = single_org
@osc_compat = osc_compat
@superusers = superusers || DEFAULT_SUPERUSERS
clear
end
attr_reader :data
attr_reader :single_org
attr_reader :osc_compat
attr_reader :creators
attr_reader :deleted
PERMISSIONS = %w(create read update delete grant)
DEFAULT_SUPERUSERS = %w(pivotal)
def clear
@creators = { [] => @superusers }
@deleted = {}
end
def deleted(path)
# acl deletes mean nothing, they are entirely subservient to their
# parent object
unless path[0] == 'acls' || (path[0] == 'organizations' && path[2] == 'acls')
result = exists?(path)
@deleted[path] = true
result
end
false
end
def deleted?(path)
1.upto(path.size) do |index|
return true if @deleted[path[0..-index]]
end
false
end
def created(path, creator, create_parents)
while !@creators[path]
@creators[path] = [ creator ]
@deleted.delete(path) if @deleted[path]
# Only do this once if create_parents is false
break if !create_parents || path.size == 0
path = path[0..-2]
end
end
def superusers
@creators[[]]
end
def get(path)
return nil if deleted?(path)
result = case path[0]
when 'acls'
# /acls/*
object_path = AclPath.get_object_path(path)
if data_exists?(object_path)
default_acl(path)
end
when 'containers'
if path.size == 2 && exists?(path)
{}
end
when 'users'
if path.size == 2 && data.exists?(path)
# User is empty user
{}
end
when 'organizations'
if path.size >= 2
# /organizations/*/**
if data.exists_dir?(path[0..1])
get_org_default(path)
end
end
end
result
end
def list(path)
return nil if deleted?(path)
if path.size == 0
return %w(containers users organizations acls)
end
case path[0]
when 'acls'
if path.size == 1
[ 'root' ] + (data.list(path + [ 'containers' ]) - [ 'organizations' ])
else
data.list(AclPath.get_object_path(path))
end
when 'containers'
[ 'containers', 'users', 'organizations' ]
when 'users'
superusers
when 'organizations'
if path.size == 1
single_org ? [ single_org ] : []
elsif path.size >= 2 && data.exists_dir?(path[0..1])
list_org_default(path)
end
end
end
def exists?(path)
return true if path.size == 0
parent_list = list(path[0..-2])
parent_list && parent_list.include?(path[-1])
end
protected
DEFAULT_ORG_SPINE = {
'clients' => {},
'cookbooks' => {},
'data' => {},
'environments' => %w(_default),
'file_store' => {
'checksums' => {}
},
'nodes' => {},
'roles' => {},
'sandboxes' => {},
'users' => {},
'org' => {},
'containers' => %w(clients containers cookbooks data environments groups nodes roles sandboxes),
'groups' => %w(admins billing-admins clients users),
'association_requests' => {}
}
def list_org_default(path)
if path.size >= 3 && path[2] == 'acls'
if path.size == 3
# /organizations/ORG/acls
return [ 'root' ] + data.list(path[0..1] + [ 'containers' ])
elsif path.size == 4
# /organizations/ORG/acls/TYPE
return data.list(path[0..1] + [ path[3] ])
else
return nil
end
end
value = DEFAULT_ORG_SPINE
2.upto(path.size-1) do |index|
value = nil if @deleted[path[0..index]]
break if !value
value = value[path[index]]
end
result = if value.is_a?(Hash)
value.keys
elsif value
value
end
if path.size == 3
if path[2] == 'clients'
result << "#{path[1]}-validator"
if osc_compat
result << "#{path[1]}-webui"
end
elsif path[2] == 'users'
if osc_compat
result << 'admin'
end
end
end
result
end
def get_org_default(path)
if path[2] == 'acls'
get_org_acl_default(path)
elsif path.size >= 4
if !osc_compat && path[2] == 'users'
if @creators[path[0..1]] && @creators[path[0..1]].include?(path[3])
return {}
end
end
if path[2] == 'containers' && path.size == 4
if exists?(path)
return {}
else
return nil
end
end
# /organizations/(*)/clients/\1-validator
# /organizations/*/environments/_default
# /organizations/*/groups/{admins,billing-admins,clients,users}
case path[2..-1].join('/')
when "clients/#{path[1]}-validator"
{ 'validator' => 'true' }
when "clients/#{path[1]}-webui", "users/admin"
if osc_compat
{ 'admin' => 'true' }
end
when "environments/_default"
{ "description" => "The default Chef environment" }
when "groups/admins"
admins = data.list(path[0..1] + [ 'users' ]).select do |name|
user = JSON.parse(data.get(path[0..1] + [ 'users', name ]), :create_additions => false)
user['admin']
end
admins += data.list(path[0..1] + [ 'clients' ]).select do |name|
client = JSON.parse(data.get(path[0..1] + [ 'clients', name ]), :create_additions => false)
client['admin']
end
admins += @creators[path[0..1]] if @creators[path[0..1]]
{ 'actors' => admins.uniq }
when "groups/billing-admins"
{}
when "groups/clients"
{ 'clients' => data.list(path[0..1] + [ 'clients' ]) }
when "groups/users"
users = data.list(path[0..1] + [ 'users' ])
users |= @creators[path[0..1]] if @creators[path[0..1]]
{ 'users' => users }
when "org"
{}
end
end
end
def get_org_acl_default(path)
object_path = AclPath.get_object_path(path)
# The actual things containers correspond to don't have to exist, as long as the container does
return nil if object_path[2] != 'containers' && !data_exists?(object_path)
basic_acl =
case path[3..-1].join('/')
when 'root', 'containers/containers', 'containers/groups'
{
'create' => { 'groups' => %w(admins) },
'read' => { 'groups' => %w(admins users) },
'update' => { 'groups' => %w(admins) },
'delete' => { 'groups' => %w(admins) },
'grant' => { 'groups' => %w(admins) },
}
when 'containers/cookbooks', 'containers/environments', 'containers/roles'
{
'create' => { 'groups' => %w(admins users) },
'read' => { 'groups' => %w(admins users clients) },
'update' => { 'groups' => %w(admins users) },
'delete' => { 'groups' => %w(admins users) },
'grant' => { 'groups' => %w(admins) },
}
when 'containers/cookbooks', 'containers/data'
{
'create' => { 'groups' => %w(admins users clients) },
'read' => { 'groups' => %w(admins users clients) },
'update' => { 'groups' => %w(admins users clients) },
'delete' => { 'groups' => %w(admins users clients) },
'grant' => { 'groups' => %w(admins) },
}
when 'containers/nodes'
{
'create' => { 'groups' => %w(admins users clients) },
'read' => { 'groups' => %w(admins users clients) },
'update' => { 'groups' => %w(admins users) },
'delete' => { 'groups' => %w(admins users) },
'grant' => { 'groups' => %w(admins) },
}
when 'containers/clients'
{
'create' => { 'groups' => %w(admins) },
'read' => { 'groups' => %w(admins users) },
'update' => { 'groups' => %w(admins) },
'delete' => { 'groups' => %w(admins users) },
'grant' => { 'groups' => %w(admins) },
}
when 'containers/sandboxes'
{
'create' => { 'groups' => %w(admins users) },
'read' => { 'groups' => %w(admins) },
'update' => { 'groups' => %w(admins) },
'delete' => { 'groups' => %w(admins) },
'grant' => { 'groups' => %w(admins) },
}
when 'groups/admins', 'groups/clients', 'groups/users'
{
'create' => { 'groups' => %w(admins) },
'read' => { 'groups' => %w(admins) },
'update' => { 'groups' => %w(admins) },
'delete' => { 'groups' => %w(admins) },
'grant' => { 'groups' => %w(admins) },
}
when 'groups/billing-admins'
{
'create' => { 'groups' => %w() },
'read' => { 'groups' => %w(billing-admins) },
'update' => { 'groups' => %w(billing-admins) },
'delete' => { 'groups' => %w() },
'grant' => { 'groups' => %w() },
}
else
{}
end
default_acl(path, basic_acl)
end
def get_owners(acl_path)
owners = []
path = AclPath.get_object_path(acl_path)
if path
# Non-validator clients own themselves.
if path.size == 4 && path[0] == 'organizations' && path[2] == 'clients'
begin
client = JSON.parse(data.get(path), :create_additions => false)
if !client['validator']
owners |= [ path[3] ]
end
rescue
owners |= [ path[3] ]
end
# Add creators as owners (except any validator clients).
if @creators[path]
@creators[path].each do |creator|
begin
client = JSON.parse(data.get(path[0..2] + [ creator ]), :create_additions => false)
next if client['validator']
rescue
end
owners |= [ creator ]
end
end
else
owners |= @creators[path] if @creators[path]
end
#ANGRY
# Non-default containers do not get superusers added to them,
# because reasons.
unless path.size == 4 && path[0] == 'organizations' && path[2] == 'containers' && !exists?(path)
owners |= superusers
end
end
owners.uniq
end
def default_acl(acl_path, acl={})
owners = nil
container_acl = nil
PERMISSIONS.each do |perm|
acl[perm] ||= {}
acl[perm]['actors'] ||= begin
owners ||= get_owners(acl_path)
end
acl[perm]['groups'] ||= begin
# When we create containers, we don't merge groups (not sure why).
if acl_path[0] == 'organizations' && acl_path[3] == 'containers'
[]
else
container_acl ||= get_container_acl(acl_path) || {}
(container_acl[perm] ? container_acl[perm]['groups'] : []) || []
end
end
end
acl
end
def get_container_acl(acl_path)
parent_path = AclPath.parent_acl_data_path(acl_path)
if parent_path
JSON.parse(data.get(parent_path), :create_additions => false)
else
nil
end
end
def data_exists?(path)
if is_dir?(path)
data.exists_dir?(path)
else
data.exists?(path)
end
end
def is_dir?(path)
case path.size
when 0, 1
return true
when 2
return path[0] == 'organizations' || (path[0] == 'acls' && path[1] != 'root')
when 3
# If it has a container, it is a directory.
return path[0] == 'organizations' &&
(path[2] == 'acls' || data.exists?(path[0..1] + [ 'containers', path[2] ]))
when 4
return path[0] == 'organizations' && (
(path[2] == 'acls' && path[1] != 'root') ||
%w(cookbooks data).include?(path[2]))
else
return false
end
end
end
end
end
Register created owners even on deleted defaults
require 'chef_zero/chef_data/acl_path'
module ChefZero
module ChefData
#
# The DefaultCreator creates default values when you ask for them.
# - It relies on created and deleted being called when things get
# created and deleted, so that it knows the owners of said objects
# and knows to eliminate default values on delete.
# - get, list and exists? get data.
#
class DefaultCreator
def initialize(data, single_org, osc_compat, superusers = nil)
@data = data
@single_org = single_org
@osc_compat = osc_compat
@superusers = superusers || DEFAULT_SUPERUSERS
clear
end
attr_reader :data
attr_reader :single_org
attr_reader :osc_compat
attr_reader :creators
attr_reader :deleted
PERMISSIONS = %w(create read update delete grant)
DEFAULT_SUPERUSERS = %w(pivotal)
def clear
@creators = { [] => @superusers }
@deleted = {}
end
def deleted(path)
# acl deletes mean nothing, they are entirely subservient to their
# parent object
unless path[0] == 'acls' || (path[0] == 'organizations' && path[2] == 'acls')
result = exists?(path)
@deleted[path] = true
result
end
false
end
def deleted?(path)
1.upto(path.size) do |index|
return true if @deleted[path[0..-index]]
end
false
end
def created(path, creator, create_parents)
# If a parent has been deleted, we will need to clear that.
deleted_index = nil
0.upto(path.size-1) do |index|
deleted_index = index if @deleted[path[0..index]]
end
# Walk up the tree, setting the creator on anything that doesn't exist
# (anything that is either deleted or was never created)
while (deleted_index && path.size > deleted_index) || !@creators[path]
@creators[path] = [ creator ]
@deleted.delete(path)
# Only do this once if create_parents is false
break if !create_parents || path.size == 0
path = path[0..-2]
end
end
def superusers
@creators[[]]
end
def get(path)
return nil if deleted?(path)
result = case path[0]
when 'acls'
# /acls/*
object_path = AclPath.get_object_path(path)
if data_exists?(object_path)
default_acl(path)
end
when 'containers'
if path.size == 2 && exists?(path)
{}
end
when 'users'
if path.size == 2 && data.exists?(path)
# User is empty user
{}
end
when 'organizations'
if path.size >= 2
# /organizations/*/**
if data.exists_dir?(path[0..1])
get_org_default(path)
end
end
end
result
end
def list(path)
return nil if deleted?(path)
if path.size == 0
return %w(containers users organizations acls)
end
case path[0]
when 'acls'
if path.size == 1
[ 'root' ] + (data.list(path + [ 'containers' ]) - [ 'organizations' ])
else
data.list(AclPath.get_object_path(path))
end
when 'containers'
[ 'containers', 'users', 'organizations' ]
when 'users'
superusers
when 'organizations'
if path.size == 1
single_org ? [ single_org ] : []
elsif path.size >= 2 && data.exists_dir?(path[0..1])
list_org_default(path)
end
end
end
def exists?(path)
return true if path.size == 0
parent_list = list(path[0..-2])
parent_list && parent_list.include?(path[-1])
end
protected
DEFAULT_ORG_SPINE = {
'clients' => {},
'cookbooks' => {},
'data' => {},
'environments' => %w(_default),
'file_store' => {
'checksums' => {}
},
'nodes' => {},
'roles' => {},
'sandboxes' => {},
'users' => {},
'org' => {},
'containers' => %w(clients containers cookbooks data environments groups nodes roles sandboxes),
'groups' => %w(admins billing-admins clients users),
'association_requests' => {}
}
def list_org_default(path)
if path.size >= 3 && path[2] == 'acls'
if path.size == 3
# /organizations/ORG/acls
return [ 'root' ] + data.list(path[0..1] + [ 'containers' ])
elsif path.size == 4
# /organizations/ORG/acls/TYPE
return data.list(path[0..1] + [ path[3] ])
else
return nil
end
end
value = DEFAULT_ORG_SPINE
2.upto(path.size-1) do |index|
value = nil if @deleted[path[0..index]]
break if !value
value = value[path[index]]
end
result = if value.is_a?(Hash)
value.keys
elsif value
value
end
if path.size == 3
if path[2] == 'clients'
result << "#{path[1]}-validator"
if osc_compat
result << "#{path[1]}-webui"
end
elsif path[2] == 'users'
if osc_compat
result << 'admin'
end
end
end
result
end
def get_org_default(path)
if path[2] == 'acls'
get_org_acl_default(path)
elsif path.size >= 4
if !osc_compat && path[2] == 'users'
if @creators[path[0..1]] && @creators[path[0..1]].include?(path[3])
return {}
end
end
if path[2] == 'containers' && path.size == 4
if exists?(path)
return {}
else
return nil
end
end
# /organizations/(*)/clients/\1-validator
# /organizations/*/environments/_default
# /organizations/*/groups/{admins,billing-admins,clients,users}
case path[2..-1].join('/')
when "clients/#{path[1]}-validator"
{ 'validator' => 'true' }
when "clients/#{path[1]}-webui", "users/admin"
if osc_compat
{ 'admin' => 'true' }
end
when "environments/_default"
{ "description" => "The default Chef environment" }
when "groups/admins"
admins = data.list(path[0..1] + [ 'users' ]).select do |name|
user = JSON.parse(data.get(path[0..1] + [ 'users', name ]), :create_additions => false)
user['admin']
end
admins += data.list(path[0..1] + [ 'clients' ]).select do |name|
client = JSON.parse(data.get(path[0..1] + [ 'clients', name ]), :create_additions => false)
client['admin']
end
admins += @creators[path[0..1]] if @creators[path[0..1]]
{ 'actors' => admins.uniq }
when "groups/billing-admins"
{}
when "groups/clients"
{ 'clients' => data.list(path[0..1] + [ 'clients' ]) }
when "groups/users"
users = data.list(path[0..1] + [ 'users' ])
users |= @creators[path[0..1]] if @creators[path[0..1]]
{ 'users' => users }
when "org"
{}
end
end
end
def get_org_acl_default(path)
object_path = AclPath.get_object_path(path)
# The actual things containers correspond to don't have to exist, as long as the container does
return nil if object_path[2] != 'containers' && !data_exists?(object_path)
basic_acl =
case path[3..-1].join('/')
when 'root', 'containers/containers', 'containers/groups'
{
'create' => { 'groups' => %w(admins) },
'read' => { 'groups' => %w(admins users) },
'update' => { 'groups' => %w(admins) },
'delete' => { 'groups' => %w(admins) },
'grant' => { 'groups' => %w(admins) },
}
when 'containers/cookbooks', 'containers/environments', 'containers/roles'
{
'create' => { 'groups' => %w(admins users) },
'read' => { 'groups' => %w(admins users clients) },
'update' => { 'groups' => %w(admins users) },
'delete' => { 'groups' => %w(admins users) },
'grant' => { 'groups' => %w(admins) },
}
when 'containers/cookbooks', 'containers/data'
{
'create' => { 'groups' => %w(admins users clients) },
'read' => { 'groups' => %w(admins users clients) },
'update' => { 'groups' => %w(admins users clients) },
'delete' => { 'groups' => %w(admins users clients) },
'grant' => { 'groups' => %w(admins) },
}
when 'containers/nodes'
{
'create' => { 'groups' => %w(admins users clients) },
'read' => { 'groups' => %w(admins users clients) },
'update' => { 'groups' => %w(admins users) },
'delete' => { 'groups' => %w(admins users) },
'grant' => { 'groups' => %w(admins) },
}
when 'containers/clients'
{
'create' => { 'groups' => %w(admins) },
'read' => { 'groups' => %w(admins users) },
'update' => { 'groups' => %w(admins) },
'delete' => { 'groups' => %w(admins users) },
'grant' => { 'groups' => %w(admins) },
}
when 'containers/sandboxes'
{
'create' => { 'groups' => %w(admins users) },
'read' => { 'groups' => %w(admins) },
'update' => { 'groups' => %w(admins) },
'delete' => { 'groups' => %w(admins) },
'grant' => { 'groups' => %w(admins) },
}
when 'groups/admins', 'groups/clients', 'groups/users'
{
'create' => { 'groups' => %w(admins) },
'read' => { 'groups' => %w(admins) },
'update' => { 'groups' => %w(admins) },
'delete' => { 'groups' => %w(admins) },
'grant' => { 'groups' => %w(admins) },
}
when 'groups/billing-admins'
{
'create' => { 'groups' => %w() },
'read' => { 'groups' => %w(billing-admins) },
'update' => { 'groups' => %w(billing-admins) },
'delete' => { 'groups' => %w() },
'grant' => { 'groups' => %w() },
}
else
{}
end
default_acl(path, basic_acl)
end
def get_owners(acl_path)
owners = []
path = AclPath.get_object_path(acl_path)
if path
# Non-validator clients own themselves.
if path.size == 4 && path[0] == 'organizations' && path[2] == 'clients'
begin
client = JSON.parse(data.get(path), :create_additions => false)
if !client['validator']
owners |= [ path[3] ]
end
rescue
owners |= [ path[3] ]
end
# Add creators as owners (except any validator clients).
if @creators[path]
@creators[path].each do |creator|
begin
client = JSON.parse(data.get(path[0..2] + [ creator ]), :create_additions => false)
next if client['validator']
rescue
end
owners |= [ creator ]
end
end
else
owners |= @creators[path] if @creators[path]
end
#ANGRY
# Non-default containers do not get superusers added to them,
# because reasons.
unless path.size == 4 && path[0] == 'organizations' && path[2] == 'containers' && !exists?(path)
owners |= superusers
end
end
owners.uniq
end
def default_acl(acl_path, acl={})
owners = nil
container_acl = nil
PERMISSIONS.each do |perm|
acl[perm] ||= {}
acl[perm]['actors'] ||= begin
owners ||= get_owners(acl_path)
end
acl[perm]['groups'] ||= begin
# When we create containers, we don't merge groups (not sure why).
if acl_path[0] == 'organizations' && acl_path[3] == 'containers'
[]
else
container_acl ||= get_container_acl(acl_path) || {}
(container_acl[perm] ? container_acl[perm]['groups'] : []) || []
end
end
end
acl
end
def get_container_acl(acl_path)
parent_path = AclPath.parent_acl_data_path(acl_path)
if parent_path
JSON.parse(data.get(parent_path), :create_additions => false)
else
nil
end
end
def data_exists?(path)
if is_dir?(path)
data.exists_dir?(path)
else
data.exists?(path)
end
end
def is_dir?(path)
case path.size
when 0, 1
return true
when 2
return path[0] == 'organizations' || (path[0] == 'acls' && path[1] != 'root')
when 3
# If it has a container, it is a directory.
return path[0] == 'organizations' &&
(path[2] == 'acls' || data.exists?(path[0..1] + [ 'containers', path[2] ]))
when 4
return path[0] == 'organizations' && (
(path[2] == 'acls' && path[1] != 'root') ||
%w(cookbooks data).include?(path[2]))
else
return false
end
end
end
end
end
|
require 'chef_zero/data_store/interface_v2'
module ChefZero
module DataStore
class DefaultFacade < ChefZero::DataStore::InterfaceV2
def initialize(real_store, osc_compat)
@real_store = real_store
@osc_compat = osc_compat
clear
end
attr_reader :real_store
attr_reader :osc_compat
def default(path, name=nil)
value = @defaults
for part in path
break if !value
value = value[part]
end
value = value[name] if value && name
if value.is_a?(Proc)
return value.call(self, path)
else
if value.nil?
# ACLs are a special case: defaults for them exist as long as the
# underlying object does
if (path[0] == 'acls' || (path[0] == 'organizations' && path[2] == 'acls')) &&
target_object_exists?(path)
return '{}'
end
end
return value
end
end
def target_object_exists?(acl_path)
if acl_path[0] == 'organizations'
org_path = acl_path[0..1]
object_part = acl_path[3..-1]
if object_part == [ 'organization' ]
exists_dir?(org_path)
else
path = org_path + object_part
if object_part.size == 2 && %w(cookbooks data).include?(object_part[0])
exists_dir?(path)
else
exists?(path)
end
end
elsif acl_path[0] == 'acls'
exists?(acl_path[1..-1])
end
end
def delete_default(path)
value = @defaults
for part in path[0..-2]
break if !value
value = value[part]
end
if value
!!value.delete(path[-1])
else
false
end
end
def clear
real_store.clear if real_store.respond_to?(:clear)
@defaults = {
'organizations' => {},
'acls' => {}
}
unless osc_compat
@defaults['users'] = {
'pivotal' => '{}'
}
end
end
def create_dir(path, name, *options)
if default(path, name) && !options.include?(:recursive)
raise DataAlreadyExistsError.new(path + [name])
end
begin
real_store.create_dir(path, name, *options)
rescue DataNotFoundError
if default(path)
real_store.create_dir(path, name, :recursive, *options)
else
raise
end
end
# If the org hasn't been created, create its defaults
if path.size > 0 && path[0] == 'organizations'
options_hash = options.last
requestor = options_hash.is_a?(Hash) ? options_hash[:requestor] : nil
if path.size == 1
@defaults['organizations'][name] ||= org_defaults(name, requestor)
else
@defaults['organizations'][path[1]] ||= org_default(path[1], requestor)
end
end
end
def create(path, name, data, *options)
if default(path, name) && !options.include?(:create_dir)
raise DataAlreadyExistsError.new(path + [name])
end
begin
real_store.create(path, name, data, *options)
rescue DataNotFoundError
if default(path)
real_store.create(path, name, data, :create_dir, *options)
else
raise
end
end
# If the org hasn't been created, create its defaults
if path.size > 0 && path[0] == 'organizations'
options_hash = options.last
requestor = options_hash.is_a?(Hash) ? options_hash[:requestor] : nil
if path.size == 1
@defaults['organizations'][name] ||= org_defaults(name, options[:requestor])
else
@defaults['organizations'][path[1]] ||= org_defaults(path[1], options[:requestor])
end
end
end
def get(path, request=nil)
begin
real_store.get(path, request)
rescue DataNotFoundError
result = default(path)
if result
result
else
raise
end
end
end
def set(path, data, *options)
begin
real_store.set(path, data, *options)
rescue DataNotFoundError
if default(path)
real_store.set(path, data, :create, :create_dir, *options)
else
raise
end
end
end
def delete(path)
deleted = delete_default(path)
begin
real_store.delete(path)
rescue DataNotFoundError
if deleted
return
else
raise
end
end
end
def delete_dir(path, *options)
deleted = delete_default(path)
begin
real_store.delete_dir(path, *options)
rescue DataNotFoundError
if !deleted
raise
end
end
end
def list(path)
default_results = default(path)
default_results = default_results.keys if default_results
begin
real_results = real_store.list(path)
if default_results
(real_results + default_results).uniq
else
real_results
end
rescue DataNotFoundError
if default_results
default_results
else
raise
end
end
end
def exists?(path)
real_store.exists?(path) || default(path)
end
def exists_dir?(path)
real_store.exists_dir?(path) || default(path)
end
def org_defaults(name, requestor)
result = {
'clients' => {
"#{name}-validator" => '{ "validator": true }'
},
'cookbooks' => {},
'data' => {},
'environments' => {
'_default' => '{ "description": "The default Chef environment" }'
},
'file_store' => {
'checksums' => {}
},
'nodes' => {},
'roles' => {},
'sandboxes' => {},
'users' => {},
'org' => '{}',
'containers' => {
'clients' => '{}',
'containers' => '{}',
'cookbooks' => '{}',
'data' => '{}',
'environments' => '{}',
'groups' => '{}',
'nodes' => '{}',
'roles' => '{}',
'sandboxes' => '{}'
},
'groups' => {
'admins' => admins_group,
'billing-admins' => '{}',
'clients' => clients_group,
'users' => users_group,
},
'acls' => {
'clients' => {},
'containers' => {
'cookbooks' => '{
"create": { "groups": [ "admins", "users" ] },
"read": { "groups": [ "admins", "users", "clients" ] },
"update": { "groups": [ "admins", "users" ] },
"delete": { "groups": [ "admins", "users" ] }
}',
'environments' => '{
"create": { "groups": [ "admins", "users" ] },
"read": { "groups": [ "admins", "users", "clients" ] },
"update": { "groups": [ "admins", "users" ] },
"delete": { "groups": [ "admins", "users" ] }
}',
'roles' => '{
"create": { "groups": [ "admins", "users" ] },
"read": { "groups": [ "admins", "users", "clients" ] },
"update": { "groups": [ "admins", "users" ] },
"delete": { "groups": [ "admins", "users" ] }
}',
'data' => '{
"create": { "groups": [ "admins", "users", "clients" ] },
"read": { "groups": [ "admins", "users", "clients" ] },
"update": { "groups": [ "admins", "users", "clients" ] },
"delete": { "groups": [ "admins", "users", "clients" ] }
}',
'nodes' => '{
"create": { "groups": [ "admins", "users", "clients" ] },
"read": { "groups": [ "admins", "users", "clients" ] },
"update": { "groups": [ "admins", "users" ] },
"delete": { "groups": [ "admins", "users" ] }
}',
'clients' => client_container_acls(requestor),
'groups' => '{
"read": { "groups": [ "admins", "users" ] }
}',
'containers' => %'{
"create": { "actors": [ "#{requestor}" ] },
"read": { "actors": [ "#{requestor}" ], "groups": [ "admins", "users" ] },
"update": { "actors": [ "#{requestor}" ] },
"delete": { "actors": [ "#{requestor}" ] },
"grant": { "actors": [ "#{requestor}" ] }
}',
'sandboxes' => '{
"create": { "groups": [ "admins", "users" ] }
}'
},
'cookbooks' => {},
'data' => {},
'environments' => {},
'groups' => {
'billing-admins' => '{
"create": { "groups": [ ] },
"read": { "groups": [ "billing-admins" ] },
"update": { "groups": [ "billing-admins" ] },
"delete": { "groups": [ ] },
"grant": { "groups": [ ] }
}',
},
'nodes' => {},
'roles' => {},
'organizations' => '{
"read": { "groups": [ "admins", "users" ] }
}',
'sandboxes' => {}
},
'association_requests' => {},
}
if osc_compat
result['users']['admin'] = '{ "admin": "true" }'
result['clients']["#{name}-webui"] = '{ "admin": true }'
else
result['users'][requestor] = '{}'
end
result
end
def admins_group
proc do |data, path|
admins = data.list(path[0..1] + [ 'users' ]).select do |name|
user = JSON.parse(data.get(path[0..1] + [ 'users', name ]), :create_additions => false)
user['admin']
end
admins += data.list(path[0..1] + [ 'clients' ]).select do |name|
client = JSON.parse(data.get(path[0..1] + [ 'clients', name ]), :create_additions => false)
client['admin']
end
JSON.pretty_generate({ 'actors' => admins })
end
end
def clients_group
proc do |data, path|
clients = data.list(path[0..1] + [ 'clients' ])
JSON.pretty_generate({ 'clients' => clients })
end
end
def users_group
proc do |data, path|
users = data.list(path[0..1] + [ 'users' ])
JSON.pretty_generate({ 'users' => users })
end
end
def client_container_acls(requestor)
proc do |data, path|
validators = data.list(path[0..1] + [ 'clients' ]).select do |name|
client = JSON.parse(data.get(path[0..1] + [ 'clients', name ]), :create_additions => false)
client['validator']
end
JSON.pretty_generate({
'create' => { 'actors' => [ requestor ] + validators },
'read' => { 'groups' => [ 'admins', 'users' ] },
'delete' => { 'groups' => [ 'admins', 'users' ] }
})
end
end
end
end
end
Make org creator automatically an admin
require 'chef_zero/data_store/interface_v2'
module ChefZero
module DataStore
class DefaultFacade < ChefZero::DataStore::InterfaceV2
def initialize(real_store, osc_compat)
@real_store = real_store
@osc_compat = osc_compat
clear
end
attr_reader :real_store
attr_reader :osc_compat
def default(path, name=nil)
value = @defaults
for part in path
break if !value
value = value[part]
end
value = value[name] if value && name
if value.is_a?(Proc)
return value.call(self, path)
else
if value.nil?
# ACLs are a special case: defaults for them exist as long as the
# underlying object does
if (path[0] == 'acls' || (path[0] == 'organizations' && path[2] == 'acls')) &&
target_object_exists?(path)
return '{}'
end
end
return value
end
end
def target_object_exists?(acl_path)
if acl_path[0] == 'organizations'
org_path = acl_path[0..1]
object_part = acl_path[3..-1]
if object_part == [ 'organization' ]
exists_dir?(org_path)
else
path = org_path + object_part
if object_part.size == 2 && %w(cookbooks data).include?(object_part[0])
exists_dir?(path)
else
exists?(path)
end
end
elsif acl_path[0] == 'acls'
exists?(acl_path[1..-1])
end
end
def delete_default(path)
value = @defaults
for part in path[0..-2]
break if !value
value = value[part]
end
if value
!!value.delete(path[-1])
else
false
end
end
def clear
real_store.clear if real_store.respond_to?(:clear)
@defaults = {
'organizations' => {},
'acls' => {}
}
unless osc_compat
@defaults['users'] = {
'pivotal' => '{}'
}
end
end
def create_dir(path, name, *options)
if default(path, name) && !options.include?(:recursive)
raise DataAlreadyExistsError.new(path + [name])
end
begin
real_store.create_dir(path, name, *options)
rescue DataNotFoundError
if default(path)
real_store.create_dir(path, name, :recursive, *options)
else
raise
end
end
# If the org hasn't been created, create its defaults
if path.size > 0 && path[0] == 'organizations'
options_hash = options.last
requestor = options_hash.is_a?(Hash) ? options_hash[:requestor] : nil
if path.size == 1
@defaults['organizations'][name] ||= org_defaults(name, requestor)
else
@defaults['organizations'][path[1]] ||= org_default(path[1], requestor)
end
end
end
def create(path, name, data, *options)
if default(path, name) && !options.include?(:create_dir)
raise DataAlreadyExistsError.new(path + [name])
end
begin
real_store.create(path, name, data, *options)
rescue DataNotFoundError
if default(path)
real_store.create(path, name, data, :create_dir, *options)
else
raise
end
end
# If the org hasn't been created, create its defaults
if path.size > 0 && path[0] == 'organizations'
options_hash = options.last
requestor = options_hash.is_a?(Hash) ? options_hash[:requestor] : nil
if path.size == 1
@defaults['organizations'][name] ||= org_defaults(name, options[:requestor])
else
@defaults['organizations'][path[1]] ||= org_defaults(path[1], options[:requestor])
end
end
end
def get(path, request=nil)
begin
real_store.get(path, request)
rescue DataNotFoundError
result = default(path)
if result
result
else
raise
end
end
end
def set(path, data, *options)
begin
real_store.set(path, data, *options)
rescue DataNotFoundError
if default(path)
real_store.set(path, data, :create, :create_dir, *options)
else
raise
end
end
end
def delete(path)
deleted = delete_default(path)
begin
real_store.delete(path)
rescue DataNotFoundError
if deleted
return
else
raise
end
end
end
def delete_dir(path, *options)
deleted = delete_default(path)
begin
real_store.delete_dir(path, *options)
rescue DataNotFoundError
if !deleted
raise
end
end
end
def list(path)
default_results = default(path)
default_results = default_results.keys if default_results
begin
real_results = real_store.list(path)
if default_results
(real_results + default_results).uniq
else
real_results
end
rescue DataNotFoundError
if default_results
default_results
else
raise
end
end
end
def exists?(path)
real_store.exists?(path) || default(path)
end
def exists_dir?(path)
real_store.exists_dir?(path) || default(path)
end
def org_defaults(name, requestor)
result = {
'clients' => {
"#{name}-validator" => '{ "validator": true }'
},
'cookbooks' => {},
'data' => {},
'environments' => {
'_default' => '{ "description": "The default Chef environment" }'
},
'file_store' => {
'checksums' => {}
},
'nodes' => {},
'roles' => {},
'sandboxes' => {},
'users' => {},
'org' => '{}',
'containers' => {
'clients' => '{}',
'containers' => '{}',
'cookbooks' => '{}',
'data' => '{}',
'environments' => '{}',
'groups' => '{}',
'nodes' => '{}',
'roles' => '{}',
'sandboxes' => '{}'
},
'groups' => {
'admins' => admins_group(requestor),
'billing-admins' => '{}',
'clients' => clients_group,
'users' => users_group(requestor),
},
'acls' => {
'clients' => {},
'containers' => {
'cookbooks' => '{
"create": { "groups": [ "admins", "users" ] },
"read": { "groups": [ "admins", "users", "clients" ] },
"update": { "groups": [ "admins", "users" ] },
"delete": { "groups": [ "admins", "users" ] }
}',
'environments' => '{
"create": { "groups": [ "admins", "users" ] },
"read": { "groups": [ "admins", "users", "clients" ] },
"update": { "groups": [ "admins", "users" ] },
"delete": { "groups": [ "admins", "users" ] }
}',
'roles' => '{
"create": { "groups": [ "admins", "users" ] },
"read": { "groups": [ "admins", "users", "clients" ] },
"update": { "groups": [ "admins", "users" ] },
"delete": { "groups": [ "admins", "users" ] }
}',
'data' => '{
"create": { "groups": [ "admins", "users", "clients" ] },
"read": { "groups": [ "admins", "users", "clients" ] },
"update": { "groups": [ "admins", "users", "clients" ] },
"delete": { "groups": [ "admins", "users", "clients" ] }
}',
'nodes' => '{
"create": { "groups": [ "admins", "users", "clients" ] },
"read": { "groups": [ "admins", "users", "clients" ] },
"update": { "groups": [ "admins", "users" ] },
"delete": { "groups": [ "admins", "users" ] }
}',
'clients' => client_container_acls(requestor),
'groups' => '{
"read": { "groups": [ "admins", "users" ] }
}',
'containers' => %'{
"create": { "actors": [ "#{requestor}" ] },
"read": { "actors": [ "#{requestor}" ], "groups": [ "admins", "users" ] },
"update": { "actors": [ "#{requestor}" ] },
"delete": { "actors": [ "#{requestor}" ] },
"grant": { "actors": [ "#{requestor}" ] }
}',
'sandboxes' => '{
"create": { "groups": [ "admins", "users" ] }
}'
},
'cookbooks' => {},
'data' => {},
'environments' => {},
'groups' => {
'billing-admins' => '{
"create": { "groups": [ ] },
"read": { "groups": [ "billing-admins" ] },
"update": { "groups": [ "billing-admins" ] },
"delete": { "groups": [ ] },
"grant": { "groups": [ ] }
}',
},
'nodes' => {},
'roles' => {},
'organizations' => '{
"read": { "groups": [ "admins", "users" ] }
}',
'sandboxes' => {}
},
'association_requests' => {},
}
if osc_compat
result['users']['admin'] = '{ "admin": "true" }'
result['clients']["#{name}-webui"] = '{ "admin": true }'
else
result['users'][requestor] = '{}'
end
result
end
def admins_group(requestor)
proc do |data, path|
admins = data.list(path[0..1] + [ 'users' ]).select do |name|
user = JSON.parse(data.get(path[0..1] + [ 'users', name ]), :create_additions => false)
user['admin']
end
admins += data.list(path[0..1] + [ 'clients' ]).select do |name|
client = JSON.parse(data.get(path[0..1] + [ 'clients', name ]), :create_additions => false)
client['admin']
end
JSON.pretty_generate({ 'actors' => ([ requestor ] + admins).uniq })
end
end
def clients_group
proc do |data, path|
clients = data.list(path[0..1] + [ 'clients' ])
JSON.pretty_generate({ 'clients' => clients })
end
end
def users_group(requestor)
proc do |data, path|
users = data.list(path[0..1] + [ 'users' ])
JSON.pretty_generate({ 'users' => ([ requestor ] + users).uniq })
end
end
def client_container_acls(requestor)
proc do |data, path|
validators = data.list(path[0..1] + [ 'clients' ]).select do |name|
client = JSON.parse(data.get(path[0..1] + [ 'clients', name ]), :create_additions => false)
client['validator']
end
JSON.pretty_generate({
'create' => { 'actors' => [ requestor ] + validators },
'read' => { 'groups' => [ 'admins', 'users' ] },
'delete' => { 'groups' => [ 'admins', 'users' ] }
})
end
end
end
end
end
|
module Clever
module APIOperations
# Handles paginated requests.
# TODO: use rel links
# TODO: build functionality elsewhere
class PageList
def initialize(uri, filters = {})
@uri = uri
@filters = filters
end
def each
current = 0
total = 1
while current < total
page = Page.new @uri, @filters.merge(page: current + 1)
yield page
current = page.paging[:current]
total = page.paging[:total]
end
end
end
end
end
use hypermedia links to handle pagination
module Clever
module APIOperations
# Handles paginated requests.
# TODO: use rel links
# TODO: build functionality elsewhere
class PageList
def initialize(uri, filters = {})
@uri = uri
@filters = filters
end
def each
page = Page.new @uri
until page.first.nil?
yield page
if page.links.key? :next
page = Page.new page.links[:next]
else
break
end
end
end
end
end
end
|
#------------------------------------------------------------------------
# (The MIT License)
#
# Copyright (c) 2008-2013 Rhomobile, Inc.
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
# THE SOFTWARE.
#
# http://rhomobile.com
#------------------------------------------------------------------------
if Rho::System.platform == 'APPLE' || Rho::System.platform == 'ANDROID' || Rho::System.platform == 'WP8'
class Camera
def self.take_picture(callback_url, options = {})
options = process_hash_params(options)
cam_type = 'back'
cam_type = if options['camera_type'] != nil
options['camera_type']
elsif options[:camera_type] != nil
options[:camera_type]
end
cam_type = 'back' if cam_type == 'main'
if Rho::System.platform != 'APPLE'
unless options['fileName']
options['fileName'] = File.join(Rho::RhoApplication.get_blob_folder(), Rho::RhoConfig.generate_id.to_s)
end
end
cams = Camera.enumerate
if cams != nil
if cams.size > 0
selected_cam = cams[0]
cams.each do |cam|
if cam.cameraType == cam_type
selected_cam = cam
end
end
selected_cam.takePicture(options, callback_url)
end
end
end
def self.choose_picture(callback_url, options = {})
options[:deprecated] = true
if Rho::System.platform != 'APPLE'
unless options['fileName']
options['fileName'] = File.join(Rho::RhoApplication.get_blob_folder(), Rho::RhoConfig.generate_id.to_s)
end
end
Camera.choosePicture(options, callback_url)
end
def self.get_camera_info(cam_type='main')
cam_type = 'back' if cam_type == 'main'
cams = Camera.enumerate
if cams != nil
cams.each do |cam|
if cam.cameraType == cam_type
info ={ "max_resolution" => { "width" => cam.maxWidth.to_s, "height" => cam.maxHeight.to_s } }
return info
end
end
end
return nil
end
end
private
def process_hash_params(options = {})
options[:deprecated] = true
#if color_model is there then handle it in new way
#Default value
colorModel = 'rgb'
colorModel = if options['color_model'] != nil
if options['color_model'] == 'RGB'
'rgb'
elsif options['color_model'] == 'Grayscale'
'grayscale'
end
elsif options[:color_model] != nil
if options[:color_model] == 'RGB'
'rgb'
elsif options[:color_model] == 'Grayscale'
'grayscale'
end
end
options[:colorModel] = colorModel
#if image_format is there then handle it in new way
imageFormat = 'jpg'
imageFormat = if options['format'] != nil
options['format']
elsif options[:format] != nil
options[:format]
end
options[:imageFormat] = imageFormat
#if flash_mode is there then handle it in new way
#No default value for flash mode. Set to off
flashMode = 'off'
flashMode = if options['flash_mode'] != nil
if options['flash_mode'] == 'red-eye'
"redEye"
else
options['flash_mode']
end
elsif options[:flash_mode] != nil
if options[:flash_mode] == 'red-eye'
"redEye"
else
options[:flash_mode]
end
end
options[:flashMode] = flashMode
#if desired_width is there then handle it in new way
imageWidth = if options['desired_width'] != nil
options['desired_width']
elsif options[:desired_width] != nil
options[:desired_width]
end
options[:imageWidth] = imageWidth
#if desired_height is there then handle it in new way
imageHeight = if options['desired_height'] != nil
options['desired_height']
elsif options[:desired_height] != nil
options[:desired_height]
end
options[:imageHeight] = imageHeight
options
end
end
Fix for JIRA id: MC-113715
When take_picture was called without any properties in RE2.2, fileName property was appearing in property map.
#------------------------------------------------------------------------
# (The MIT License)
#
# Copyright (c) 2008-2013 Rhomobile, Inc.
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
# THE SOFTWARE.
#
# http://rhomobile.com
#------------------------------------------------------------------------
if Rho::System.platform == 'APPLE' || Rho::System.platform == 'ANDROID' || Rho::System.platform == 'WP8'
class Camera
def self.take_picture(callback_url, options = {})
options = process_hash_params(options)
cam_type = 'back'
cam_type = if options['camera_type'] != nil
options['camera_type']
elsif options[:camera_type] != nil
options[:camera_type]
end
cam_type = 'back' if cam_type == 'main'
if Rho::System.platform != 'APPLE' && Rho::System.platform != 'ANDROID'
unless options['fileName']
options['fileName'] = File.join(Rho::RhoApplication.get_blob_folder(), Rho::RhoConfig.generate_id.to_s)
end
end
cams = Camera.enumerate
if cams != nil
if cams.size > 0
selected_cam = cams[0]
cams.each do |cam|
if cam.cameraType == cam_type
selected_cam = cam
end
end
selected_cam.takePicture(options, callback_url)
end
end
end
def self.choose_picture(callback_url, options = {})
options[:deprecated] = true
if Rho::System.platform != 'APPLE'
unless options['fileName']
options['fileName'] = File.join(Rho::RhoApplication.get_blob_folder(), Rho::RhoConfig.generate_id.to_s)
end
end
Camera.choosePicture(options, callback_url)
end
def self.get_camera_info(cam_type='main')
cam_type = 'back' if cam_type == 'main'
cams = Camera.enumerate
if cams != nil
cams.each do |cam|
if cam.cameraType == cam_type
info ={ "max_resolution" => { "width" => cam.maxWidth.to_s, "height" => cam.maxHeight.to_s } }
return info
end
end
end
return nil
end
end
private
def process_hash_params(options = {})
options[:deprecated] = true
#if color_model is there then handle it in new way
#Default value
colorModel = 'rgb'
colorModel = if options['color_model'] != nil
if options['color_model'] == 'RGB'
'rgb'
elsif options['color_model'] == 'Grayscale'
'grayscale'
end
elsif options[:color_model] != nil
if options[:color_model] == 'RGB'
'rgb'
elsif options[:color_model] == 'Grayscale'
'grayscale'
end
end
options[:colorModel] = colorModel
#if image_format is there then handle it in new way
imageFormat = 'jpg'
imageFormat = if options['format'] != nil
options['format']
elsif options[:format] != nil
options[:format]
end
options[:imageFormat] = imageFormat
#if flash_mode is there then handle it in new way
#No default value for flash mode. Set to off
flashMode = 'off'
flashMode = if options['flash_mode'] != nil
if options['flash_mode'] == 'red-eye'
"redEye"
else
options['flash_mode']
end
elsif options[:flash_mode] != nil
if options[:flash_mode] == 'red-eye'
"redEye"
else
options[:flash_mode]
end
end
options[:flashMode] = flashMode
#if desired_width is there then handle it in new way
imageWidth = if options['desired_width'] != nil
options['desired_width']
elsif options[:desired_width] != nil
options[:desired_width]
end
options[:imageWidth] = imageWidth
#if desired_height is there then handle it in new way
imageHeight = if options['desired_height'] != nil
options['desired_height']
elsif options[:desired_height] != nil
options[:desired_height]
end
options[:imageHeight] = imageHeight
options
end
end
|
Association = Struct.new(:klass, :name, :macro, :options)
Column = Struct.new(:name, :type, :limit) do
# Returns +true+ if the column is either of type integer, float or decimal.
def number?
type == :integer || type == :float || type == :decimal
end
end
Company = Struct.new(:id, :name) do
extend ActiveModel::Naming
include ActiveModel::Conversion
def self.all(options={})
all = (1..3).map { |i| Company.new(i, "Company #{i}") }
return [all.first] if options[:conditions].present?
return [all.last] if options[:order].present?
return all[0..1] if options[:include].present?
return all[1..2] if options[:joins].present?
all
end
def self.merge_conditions(a, b)
(a || {}).merge(b || {})
end
def persisted?
true
end
end
class Tag < Company
def self.all(options={})
(1..3).map { |i| Tag.new(i, "Tag #{i}") }
end
end
TagGroup = Struct.new(:id, :name, :tags)
class User
extend ActiveModel::Naming
include ActiveModel::Conversion
attr_accessor :id, :name, :company, :company_id, :time_zone, :active, :age,
:description, :created_at, :updated_at, :credit_limit, :password, :url,
:delivery_time, :born_at, :special_company_id, :country, :tags, :tag_ids,
:avatar, :home_picture, :email, :status, :residence_country, :phone_number,
:post_count, :lock_version, :amount, :attempts, :action, :credit_card, :gender
def initialize(options={})
@new_record = false
options.each do |key, value|
send("#{key}=", value)
end if options
end
def new_record!
@new_record = true
end
def persisted?
!@new_record
end
def company_attributes=(*)
end
def tags_attributes=(*)
end
def column_for_attribute(attribute)
column_type, limit = case attribute.to_sym
when :name, :status, :password then [:string, 100]
when :description then [:text, 200]
when :age then :integer
when :credit_limit then [:decimal, 15]
when :active then :boolean
when :born_at then :date
when :delivery_time then :time
when :created_at then :datetime
when :updated_at then :timestamp
when :lock_version then :integer
when :home_picture then :string
when :amount then :integer
when :attempts then :integer
when :action then :string
when :credit_card then :string
end
Column.new(attribute, column_type, limit)
end
def self.human_attribute_name(attribute)
case attribute
when 'name'
'Super User Name!'
when 'description'
'User Description!'
when 'company'
'Company Human Name!'
else
attribute.humanize
end
end
def self.reflect_on_association(association)
case association
when :company
Association.new(Company, association, :belongs_to, {})
when :tags
Association.new(Tag, association, :has_many, {})
when :first_company
Association.new(Company, association, :has_one, {})
when :special_company
Association.new(Company, association, :belongs_to, { conditions: { id: 1 } })
end
end
def errors
@errors ||= begin
hash = Hash.new { |h,k| h[k] = [] }
hash.merge!(
name: ["can't be blank"],
description: ["must be longer than 15 characters"],
age: ["is not a number", "must be greater than 18"],
company: ["company must be present"],
company_id: ["must be valid"]
)
end
end
def self.readonly_attributes
["credit_card"]
end
end
class ValidatingUser < User
include ActiveModel::Validations
validates :name, presence: true
validates :company, presence: true
validates :age, presence: true, if: Proc.new { |user| user.name }
validates :amount, presence: true, unless: Proc.new { |user| user.age }
validates :action, presence: true, on: :create
validates :credit_limit, presence: true, on: :save
validates :phone_number, presence: true, on: :update
validates_numericality_of :age,
greater_than_or_equal_to: 18,
less_than_or_equal_to: 99,
only_integer: true
validates_numericality_of :amount,
greater_than: :min_amount,
less_than: :max_amount,
only_integer: true
validates_numericality_of :attempts,
greater_than_or_equal_to: :min_attempts,
less_than_or_equal_to: :max_attempts,
only_integer: true
validates_length_of :name, maximum: 25
validates_length_of :description, maximum: 50
validates_length_of :action, maximum: 10, tokenizer: lambda { |str| str.scan(/\w+/) }
validates_length_of :home_picture, is: 12
def min_amount
10
end
def max_amount
100
end
def min_attempts
1
end
def max_attempts
100
end
end
class OtherValidatingUser < User
include ActiveModel::Validations
validates_numericality_of :age,
greater_than: 17,
less_than: 100,
only_integer: true
validates_numericality_of :amount,
greater_than: Proc.new { |user| user.age },
less_than: Proc.new { |user| user.age + 100 },
only_integer: true
validates_numericality_of :attempts,
greater_than_or_equal_to: Proc.new { |user| user.age },
less_than_or_equal_to: Proc.new { |user| user.age + 100 },
only_integer: true
validates_format_of :country, with: /\w+/
validates_format_of :name, with: Proc.new { /\w+/ }
validates_format_of :description, without: /\d+/
end
class HashBackedAuthor < Hash
extend ActiveModel::Naming
include ActiveModel::Conversion
def persisted?; false; end
def name
'hash backed author'
end
end
Remove dead stuff from test models
Association = Struct.new(:klass, :name, :macro, :options)
Column = Struct.new(:name, :type, :limit) do
# Returns +true+ if the column is either of type integer, float or decimal.
def number?
type == :integer || type == :float || type == :decimal
end
end
Company = Struct.new(:id, :name) do
extend ActiveModel::Naming
include ActiveModel::Conversion
def self.all(options={})
all = (1..3).map { |i| Company.new(i, "Company #{i}") }
if options[:conditions]
[all.first]
elsif options[:order]
[all.last]
else
all
end
end
def persisted?
true
end
end
class Tag < Company
def self.all(options={})
(1..3).map { |i| Tag.new(i, "Tag #{i}") }
end
end
TagGroup = Struct.new(:id, :name, :tags)
class User
extend ActiveModel::Naming
include ActiveModel::Conversion
attr_accessor :id, :name, :company, :company_id, :time_zone, :active, :age,
:description, :created_at, :updated_at, :credit_limit, :password, :url,
:delivery_time, :born_at, :special_company_id, :country, :tags, :tag_ids,
:avatar, :home_picture, :email, :status, :residence_country, :phone_number,
:post_count, :lock_version, :amount, :attempts, :action, :credit_card, :gender
def initialize(options={})
@new_record = false
options.each do |key, value|
send("#{key}=", value)
end if options
end
def new_record!
@new_record = true
end
def persisted?
!@new_record
end
def company_attributes=(*)
end
def tags_attributes=(*)
end
def column_for_attribute(attribute)
column_type, limit = case attribute.to_sym
when :name, :status, :password then [:string, 100]
when :description then [:text, 200]
when :age then :integer
when :credit_limit then [:decimal, 15]
when :active then :boolean
when :born_at then :date
when :delivery_time then :time
when :created_at then :datetime
when :updated_at then :timestamp
when :lock_version then :integer
when :home_picture then :string
when :amount then :integer
when :attempts then :integer
when :action then :string
when :credit_card then :string
end
Column.new(attribute, column_type, limit)
end
def self.human_attribute_name(attribute)
case attribute
when 'name'
'Super User Name!'
when 'description'
'User Description!'
when 'company'
'Company Human Name!'
else
attribute.humanize
end
end
def self.reflect_on_association(association)
case association
when :company
Association.new(Company, association, :belongs_to, {})
when :tags
Association.new(Tag, association, :has_many, {})
when :first_company
Association.new(Company, association, :has_one, {})
when :special_company
Association.new(Company, association, :belongs_to, { conditions: { id: 1 } })
end
end
def errors
@errors ||= begin
hash = Hash.new { |h,k| h[k] = [] }
hash.merge!(
name: ["can't be blank"],
description: ["must be longer than 15 characters"],
age: ["is not a number", "must be greater than 18"],
company: ["company must be present"],
company_id: ["must be valid"]
)
end
end
def self.readonly_attributes
["credit_card"]
end
end
class ValidatingUser < User
include ActiveModel::Validations
validates :name, presence: true
validates :company, presence: true
validates :age, presence: true, if: Proc.new { |user| user.name }
validates :amount, presence: true, unless: Proc.new { |user| user.age }
validates :action, presence: true, on: :create
validates :credit_limit, presence: true, on: :save
validates :phone_number, presence: true, on: :update
validates_numericality_of :age,
greater_than_or_equal_to: 18,
less_than_or_equal_to: 99,
only_integer: true
validates_numericality_of :amount,
greater_than: :min_amount,
less_than: :max_amount,
only_integer: true
validates_numericality_of :attempts,
greater_than_or_equal_to: :min_attempts,
less_than_or_equal_to: :max_attempts,
only_integer: true
validates_length_of :name, maximum: 25
validates_length_of :description, maximum: 50
validates_length_of :action, maximum: 10, tokenizer: lambda { |str| str.scan(/\w+/) }
validates_length_of :home_picture, is: 12
def min_amount
10
end
def max_amount
100
end
def min_attempts
1
end
def max_attempts
100
end
end
class OtherValidatingUser < User
include ActiveModel::Validations
validates_numericality_of :age,
greater_than: 17,
less_than: 100,
only_integer: true
validates_numericality_of :amount,
greater_than: Proc.new { |user| user.age },
less_than: Proc.new { |user| user.age + 100 },
only_integer: true
validates_numericality_of :attempts,
greater_than_or_equal_to: Proc.new { |user| user.age },
less_than_or_equal_to: Proc.new { |user| user.age + 100 },
only_integer: true
validates_format_of :country, with: /\w+/
validates_format_of :name, with: Proc.new { /\w+/ }
validates_format_of :description, without: /\d+/
end
class HashBackedAuthor < Hash
extend ActiveModel::Naming
include ActiveModel::Conversion
def persisted?; false; end
def name
'hash backed author'
end
end
|
require 'data_kitten/utils/guessable_lookup.rb'
module DataKitten
module PublishingFormats
module CKAN
@@metadata = nil
private
def self.supported?(instance)
uri = instance.uri
package = uri.path.split("/").last
# If the package is a UUID - it's more than likely to be a CKAN ID
if package.match(/[0-9a-f]{8}-[0-9a-f]{4}-[0-9a-f]{4}-[0-9a-f]{4}-[0-9a-f]{12}/)
@@id = package
else
results = RestClient.get "#{uri.scheme}://#{uri.host}/api/3/action/package_show", {:params => {:id => package}} rescue ""
if results == ""
results = RestClient.get "#{uri.scheme}://#{uri.host}/api/2/rest/dataset/#{package}"
end
result = JSON.parse results
@@id = result["result"]["id"] rescue result["id"]
end
@@metadata = JSON.parse RestClient.get "#{uri.scheme}://#{uri.host}/api/rest/package/#{@@id}"
@@metadata.extend(GuessableLookup)
rescue
false
end
public
# The publishing format for the dataset.
# @return [Symbol] +:ckan+
# @see Dataset#publishing_format
def publishing_format
:ckan
end
# The human-readable title of the dataset.
#
# @see Dataset#data_title
def data_title
metadata["title"] rescue nil
end
# A brief description of the dataset
#
# @see Dataset#description
def description
metadata["notes"] || metadata["description"]
rescue
nil
end
def identifier
metadata["name"] rescue nil
end
# Keywords for the dataset
#
# @see Dataset#keywords
def keywords
keywords = []
metadata["tags"].each do |tag|
keywords << tag
end
return keywords
rescue
[]
end
# A list of publishers.
#
# @see Dataset#publishers
def publishers
id = metadata['organization']['id'] || metadata['groups'][0]
fetch_publisher(id)
rescue
[]
end
def maintainers
extract_agent('maintainer', 'maintainer_email')
end
def contributors
extract_agent('author', 'author_email')
end
# A list of licenses.
#
# @see Dataset#licenses
def licenses
extras = metadata["extras"] || {}
id = metadata["license_id"]
uri = metadata["license_url"] || extras["licence_url"]
name = metadata["license_title"] || extras["licence_url_title"]
if [id, uri, name].any?
[License.new(:id => id, :uri => uri, :name => name)]
else
[]
end
end
# A list of distributions, referred to as +resources+ by Datapackage.
#
# @see Dataset#distributions
def distributions
distributions = []
metadata["resources"].each do |resource|
distribution = {
:title => resource["description"],
:accessURL => resource["url"],
:format => resource["format"]
}
distributions << Distribution.new(self, ckan_resource: distribution)
end
return distributions
rescue
nil
end
# How frequently the data is updated.
#
# @see Dataset#update_frequency
def update_frequency
metadata["extras"]["update_frequency"] ||
metadata["extras"]["frequency-of-update"] ||
metadata["extras"]["accrualPeriodicity"] ||
metadata["extras"]["accrual_periodicity"]
rescue
nil
end
# Date the dataset was released
#
# @see Dataset#issued
def issued
Date.parse metadata["metadata_created"] rescue nil
end
# Date the dataset was modified
#
# @see Dataset#modified
def modified
Date.parse metadata["metadata_modified"] rescue nil
end
# The temporal coverage of the dataset
#
# @see Dataset#temporal
def temporal
start_date = Date.parse metadata["extras"]["temporal_coverage-from"] rescue nil
end_date = Date.parse metadata["extras"]["temporal_coverage-to"] rescue nil
Temporal.new(:start => start_date, :end => end_date)
end
private
def metadata
@@metadata
end
def select_extras(group, key)
extra = group["extras"][key] rescue ""
if extra == ""
extra = group['result']['extras'].select {|e| e["key"] == key }.first['value'] rescue ""
end
extra
end
def fetch_publisher(id)
uri = parsed_uri
[
"#{uri.scheme}://#{uri.host}/api/rest/group/#{id}",
"#{uri.scheme}://#{uri.host}/api/3/action/group_show?id=#{id}",
"#{uri.scheme}://#{uri.host}/api/3/action/organization_show?id=#{id}"
].each do |uri|
begin
@group = JSON.parse RestClient.get uri
break
rescue RestClient::ResourceNotFound
nil
end
end
[
Agent.new(
:name => @group["display_name"] || @group["result"]["title"],
:homepage => select_extras(@group, "website-url"),
:mbox => select_extras(@group, "contact-email")
)
]
end
def parsed_uri
URI(self.uri)
end
def extract_agent(name_field, email_field)
name = metadata[name_field]
email = metadata[email_field]
if [name, email].any?
[Agent.new(name: name, mbox: email)]
else
[]
end
end
end
end
end
Integrate GuessableLookup
require 'data_kitten/utils/guessable_lookup.rb'
module DataKitten
module PublishingFormats
module CKAN
@@metadata = nil
private
def self.supported?(instance)
uri = instance.uri
package = uri.path.split("/").last
# If the package is a UUID - it's more than likely to be a CKAN ID
if package.match(/[0-9a-f]{8}-[0-9a-f]{4}-[0-9a-f]{4}-[0-9a-f]{4}-[0-9a-f]{12}/)
@@id = package
else
results = RestClient.get "#{uri.scheme}://#{uri.host}/api/3/action/package_show", {:params => {:id => package}} rescue ""
if results == ""
results = RestClient.get "#{uri.scheme}://#{uri.host}/api/2/rest/dataset/#{package}"
end
result = JSON.parse results
@@id = result["result"]["id"] rescue result["id"]
end
@@metadata = JSON.parse RestClient.get "#{uri.scheme}://#{uri.host}/api/rest/package/#{@@id}"
@@metadata.extend(GuessableLookup)
rescue
false
end
public
# The publishing format for the dataset.
# @return [Symbol] +:ckan+
# @see Dataset#publishing_format
def publishing_format
:ckan
end
# The human-readable title of the dataset.
#
# @see Dataset#data_title
def data_title
metadata.lookup("title")
end
# A brief description of the dataset
#
# @see Dataset#description
def description
metadata.lookup("notes") || metadata.lookup("description")
rescue
nil
end
def identifier
metadata.lookup("name")
end
# Keywords for the dataset
#
# @see Dataset#keywords
def keywords
keywords = []
metadata.lookup("tags").each do |tag|
keywords << tag
end
return keywords
rescue
[]
end
# A list of publishers.
#
# @see Dataset#publishers
def publishers
id = metadata.lookup('organization', 'id') || metadata.lookup('groups', 0)
fetch_publisher(id)
rescue
[]
end
def maintainers
extract_agent('maintainer', 'maintainer_email')
end
def contributors
extract_agent('author', 'author_email')
end
# A list of licenses.
#
# @see Dataset#licenses
def licenses
id = metadata.lookup("license_id")
uri = metadata.lookup("license_url") || metadata.lookup("extras", "licence_url")
name = metadata.lookup("license_title") || metadata.lookup("extras", "licence_url_title")
if [id, uri, name].any?
[License.new(:id => id, :uri => uri, :name => name)]
else
[]
end
end
# A list of distributions, referred to as +resources+ by Datapackage.
#
# @see Dataset#distributions
def distributions
distributions = []
metadata.lookup("resources").each do |resource|
distribution = {
:title => resource["description"],
:accessURL => resource["url"],
:format => resource["format"]
}
distributions << Distribution.new(self, ckan_resource: distribution)
end
return distributions
rescue
nil
end
# How frequently the data is updated.
#
# @see Dataset#update_frequency
def update_frequency
metadata.lookup("extras", "update_frequency") ||
metadata.lookup("extras", "frequency-of-update") ||
metadata.lookup("extras", "accrual_periodicity")
rescue
nil
end
# Date the dataset was released
#
# @see Dataset#issued
def issued
Date.parse metadata.lookup("metadata_created")
end
# Date the dataset was modified
#
# @see Dataset#modified
def modified
Date.parse metadata.lookup("metadata_modified")
end
# The temporal coverage of the dataset
#
# @see Dataset#temporal
def temporal
start_date = Date.parse metadata.lookup("extras", "temporal_coverage-from") rescue nil
end_date = Date.parse metadata.lookup("extras", "temporal_coverage-to") rescue nil
Temporal.new(:start => start_date, :end => end_date)
end
private
def metadata
@@metadata
end
def select_extras(group, key)
extra = group["extras"][key] rescue ""
if extra == ""
extra = group['result']['extras'].select {|e| e["key"] == key }.first['value'] rescue ""
end
extra
end
def fetch_publisher(id)
uri = parsed_uri
[
"#{uri.scheme}://#{uri.host}/api/rest/group/#{id}",
"#{uri.scheme}://#{uri.host}/api/3/action/group_show?id=#{id}",
"#{uri.scheme}://#{uri.host}/api/3/action/organization_show?id=#{id}"
].each do |uri|
begin
@group = JSON.parse RestClient.get uri
break
rescue RestClient::ResourceNotFound
nil
end
end
[
Agent.new(
:name => @group["display_name"] || @group["result"]["title"],
:homepage => select_extras(@group, "website-url"),
:mbox => select_extras(@group, "contact-email")
)
]
end
def parsed_uri
URI(self.uri)
end
def extract_agent(name_field, email_field)
name = metadata.lookup(name_field)
email = metadata.lookup(email_field)
if [name, email].any?
[Agent.new(name: name, mbox: email)]
else
[]
end
end
end
end
end
|
require 'database_cleaner/generic/base'
require 'active_record'
require 'erb'
module DatabaseCleaner
module ActiveRecord
def self.available_strategies
%w[truncation transaction deletion]
end
def self.config_file_location=(path)
@config_file_location = path
end
def self.config_file_location
@config_file_location ||= "#{DatabaseCleaner.app_root}/config/database.yml"
end
module Base
include ::DatabaseCleaner::Generic::Base
attr_accessor :connection_hash
def db=(desired_db)
@db = desired_db
load_config
end
def db
@db || super
end
def load_config
if self.db != :default && File.file?(ActiveRecord.config_file_location)
connection_details = YAML::load(ERB.new(IO.read(ActiveRecord.config_file_location)).result)
@connection_hash = connection_details[self.db.to_s]
end
end
def create_connection_klass
Class.new(::ActiveRecord::Base)
end
def connection_klass
return ::ActiveRecord::Base unless connection_hash
klass = create_connection_klass
klass.send :establish_connection, connection_hash
klass
end
end
end
end
Transaction strategy works with ActiveRecord and multiple databases
require 'database_cleaner/generic/base'
require 'active_record'
require 'erb'
module DatabaseCleaner
module ActiveRecord
def self.available_strategies
%w[truncation transaction deletion]
end
def self.config_file_location=(path)
@config_file_location = path
end
def self.config_file_location
@config_file_location ||= "#{DatabaseCleaner.app_root}/config/database.yml"
end
module Base
include ::DatabaseCleaner::Generic::Base
attr_accessor :connection_hash
def db=(desired_db)
@db = desired_db
load_config
end
def db
@db || super
end
def load_config
if self.db != :default && File.file?(ActiveRecord.config_file_location)
connection_details = YAML::load(ERB.new(IO.read(ActiveRecord.config_file_location)).result)
@connection_hash = connection_details[self.db.to_s]
end
end
def create_connection_klass
Class.new(::ActiveRecord::Base)
end
def connection_klass
return ::ActiveRecord::Base unless connection_hash
if ::ActiveRecord::Base.respond_to?(:descendants)
database_name = connection_hash["database"]
models = ::ActiveRecord::Base.descendants
klass = models.detect {|m| m.connection_pool.spec.config[:database] == database_name}
return klass if klass
end
klass = create_connection_klass
klass.send :establish_connection, connection_hash
klass
end
end
end
end
|
module DatabaseCleaner
module Generic
module Truncation
def initialize(opts={})
if !opts.empty? && !(opts.keys - [:only, :except]).empty?
raise ArgumentError, "The only valid options are :only and :except. You specified #{opts.keys.join(',')}."
end
if opts.has_key?(:only) && opts.has_key?(:except)
raise ArgumentError, "You may only specify either :only or :either. Doing both doesn't really make sense does it?"
end
@only = opts[:only]
@tables_to_exclude = (opts[:except] || []).dup
@tables_to_exclude << migration_storage_name if migration_storage_name
end
def start
#included for compatability reasons, do nothing if you don't need to
end
def clean
raise NotImplementedError
end
private
def tables_to_truncate
raise NotImplementedError
end
# overwrite in subclasses
# default implementation given because migration storage need not be present
def migration_storage_name
nil
end
end
end
end
Fix typo in Truncation options error message
module DatabaseCleaner
module Generic
module Truncation
def initialize(opts={})
if !opts.empty? && !(opts.keys - [:only, :except]).empty?
raise ArgumentError, "The only valid options are :only and :except. You specified #{opts.keys.join(',')}."
end
if opts.has_key?(:only) && opts.has_key?(:except)
raise ArgumentError, "You may only specify either :only or :except. Doing both doesn't really make sense does it?"
end
@only = opts[:only]
@tables_to_exclude = (opts[:except] || []).dup
@tables_to_exclude << migration_storage_name if migration_storage_name
end
def start
#included for compatability reasons, do nothing if you don't need to
end
def clean
raise NotImplementedError
end
private
def tables_to_truncate
raise NotImplementedError
end
# overwrite in subclasses
# default implementation given because migration storage need not be present
def migration_storage_name
nil
end
end
end
end
|
class Datatrans::Web::Transaction
class AuthorizeResponse
attr_accessor :params
def initialize(params)
@params = params
end
def successful?
raise Datatrans::InvalidSignatureError unless valid_signature?
response_code == '01' && response_message == 'Authorized' && !errors_occurred?
end
def valid_signature?
return true if errors_occurred? # no sign2 sent on error
sign(Datatrans.merchant_id, params[:amount], params[:currency], params[:uppTransactionId]) == params[:sign2]
end
def response_code
params[:responseCode] rescue nil
end
def response_message
params[:responseMessage] rescue nil
end
def transaction_id
params[:uppTransactionId] rescue nil
end
def reference_number
params[:refno] rescue nil
end
def authorization_code
params[:authorizationCode] rescue nil
end
def masked_cc
params[:maskedCC] rescue nil
end
def creditcard_alias
params[:aliasCC] rescue nil
end
def error_code
params[:errorCode] rescue nil
end
def error_message
params[:errorMessage] rescue nil
end
def error_detail
params[:errorDetail] rescue nil
end
private
def errors_occurred?
error_code || error_message || error_detail
end
include Datatrans::Common
end
end
added exp_month and exp_year getters to web authorize request
class Datatrans::Web::Transaction
class AuthorizeResponse
attr_accessor :params
def initialize(params)
@params = params
end
def successful?
raise Datatrans::InvalidSignatureError unless valid_signature?
response_code == '01' && response_message == 'Authorized' && !errors_occurred?
end
def valid_signature?
return true if errors_occurred? # no sign2 sent on error
sign(Datatrans.merchant_id, params[:amount], params[:currency], params[:uppTransactionId]) == params[:sign2]
end
def response_code
params[:responseCode] rescue nil
end
def response_message
params[:responseMessage] rescue nil
end
def transaction_id
params[:uppTransactionId] rescue nil
end
def reference_number
params[:refno] rescue nil
end
def authorization_code
params[:authorizationCode] rescue nil
end
def masked_cc
params[:maskedCC] rescue nil
end
def exp_year
params[:expy] rescue nil
end
def exp_month
params[:expm] rescue nil
end
def creditcard_alias
params[:aliasCC] rescue nil
end
def error_code
params[:errorCode] rescue nil
end
def error_message
params[:errorMessage] rescue nil
end
def error_detail
params[:errorDetail] rescue nil
end
private
def errors_occurred?
error_code || error_message || error_detail
end
include Datatrans::Common
end
end |
require 'sequel/no_core_ext' # to avoid sequel ~> 3.0 coliding with ActiveRecord
require 'multi_json'
module Dynflow
module PersistenceAdapters
Sequel.extension :migration
class Sequel < Abstract
include Algebrick::TypeCheck
include Algebrick::Matching
MAX_RETRIES = 10
RETRY_DELAY = 1
attr_reader :db
def pagination?
true
end
def filtering_by
META_DATA.fetch :execution_plan
end
def ordering_by
META_DATA.fetch :execution_plan
end
META_DATA = { execution_plan: %w(state result started_at ended_at real_time execution_time),
action: %w(caller_execution_plan_id caller_action_id),
step: %w(state started_at ended_at real_time execution_time action_id progress_done progress_weight),
envelope: %w(receiver_id),
coordinator_record: %w(id owner_id class),
delayed: %w(execution_plan_uuid start_at start_before args_serializer)}
def initialize(config)
config = config.dup
@additional_responsibilities = { coordinator: true, connector: true }
if config.is_a?(Hash) && config.key?(:additional_responsibilities)
@additional_responsibilities.merge!(config.delete(:additional_responsibilities))
end
@db = initialize_db config
migrate_db
end
def transaction(&block)
db.transaction(&block)
end
def find_execution_plans(options = {})
options[:order_by] ||= :started_at
data_set = filter(:execution_plan,
order(:execution_plan,
paginate(table(:execution_plan), options),
options),
options[:filters])
data_set.map { |record| load_data(record) }
end
def delete_execution_plans(filters, batch_size = 1000)
count = 0
filter(:execution_plan, table(:execution_plan), filters).each_slice(batch_size) do |plans|
uuids = plans.map { |p| p.fetch(:uuid) }
@db.transaction do
table(:delayed).where(execution_plan_uuid: uuids).delete
table(:step).where(execution_plan_uuid: uuids).delete
table(:action).where(execution_plan_uuid: uuids).delete
count += table(:execution_plan).where(uuid: uuids).delete
end
end
return count
end
def load_execution_plan(execution_plan_id)
load :execution_plan, uuid: execution_plan_id
end
def save_execution_plan(execution_plan_id, value)
save :execution_plan, { uuid: execution_plan_id }, value
end
def delete_delayed_plans(filters, batch_size = 1000)
count = 0
filter(:delayed, table(:delayed), filters).each_slice(batch_size) do |plans|
uuids = plans.map { |p| p.fetch(:execution_plan_uuid) }
@db.transaction do
count += table(:delayed).where(execution_plan_uuid: uuids).delete
end
end
count
end
def find_past_delayed_plans(time)
table(:delayed)
.where('start_at <= ? OR (start_before IS NOT NULL AND start_before <= ?)', time, time)
.order_by(:start_at)
.all
.map { |plan| load_data(plan) }
end
def load_delayed_plan(execution_plan_id)
load :delayed, execution_plan_uuid: execution_plan_id
rescue KeyError
return nil
end
def save_delayed_plan(execution_plan_id, value)
save :delayed, { execution_plan_uuid: execution_plan_id }, value
end
def load_step(execution_plan_id, step_id)
load :step, execution_plan_uuid: execution_plan_id, id: step_id
end
def save_step(execution_plan_id, step_id, value)
save :step, { execution_plan_uuid: execution_plan_id, id: step_id }, value
end
def load_action(execution_plan_id, action_id)
load :action, execution_plan_uuid: execution_plan_id, id: action_id
end
def save_action(execution_plan_id, action_id, value)
save :action, { execution_plan_uuid: execution_plan_id, id: action_id }, value
end
def connector_feature!
unless @additional_responsibilities[:connector]
raise "The sequel persistence adapter connector feature used but not enabled in additional_features"
end
end
def save_envelope(data)
connector_feature!
save :envelope, {}, data
end
def pull_envelopes(receiver_id)
connector_feature!
db.transaction do
data_set = table(:envelope).where(receiver_id: receiver_id).to_a
envelopes = data_set.map { |record| load_data(record) }
table(:envelope).where(id: data_set.map { |d| d[:id] }).delete
return envelopes
end
end
def push_envelope(envelope)
connector_feature!
table(:envelope).insert(prepare_record(:envelope, envelope))
end
def coordinator_feature!
unless @additional_responsibilities[:coordinator]
raise "The sequel persistence adapter coordinator feature used but not enabled in additional_features"
end
end
def insert_coordinator_record(value)
coordinator_feature!
save :coordinator_record, {}, value
end
def update_coordinator_record(class_name, record_id, value)
coordinator_feature!
save :coordinator_record, {class: class_name, :id => record_id}, value
end
def delete_coordinator_record(class_name, record_id)
coordinator_feature!
table(:coordinator_record).where(class: class_name, id: record_id).delete
end
def find_coordinator_records(options)
coordinator_feature!
options = options.dup
filters = (options[:filters] || {}).dup
exclude_owner_id = filters.delete(:exclude_owner_id)
data_set = filter(:coordinator_record, table(:coordinator_record), filters)
if exclude_owner_id
data_set = data_set.exclude(:owner_id => exclude_owner_id)
end
data_set.all.map { |record| load_data(record) }
end
def to_hash
{ execution_plans: table(:execution_plan).all.to_a,
steps: table(:step).all.to_a,
actions: table(:action).all.to_a,
envelopes: table(:envelope).all.to_a }
end
private
TABLES = { execution_plan: :dynflow_execution_plans,
action: :dynflow_actions,
step: :dynflow_steps,
envelope: :dynflow_envelopes,
coordinator_record: :dynflow_coordinator_records,
delayed: :dynflow_delayed_plans }
def table(which)
db[TABLES.fetch(which)]
end
def initialize_db(db_path)
::Sequel.connect db_path
end
def self.migrations_path
File.expand_path('../sequel_migrations', __FILE__)
end
def migrate_db
::Sequel::Migrator.run(db, self.class.migrations_path, table: 'dynflow_schema_info')
end
def prepare_record(table_name, value, base = {})
record = base.dup
if table(table_name).columns.include?(:data)
record[:data] = dump_data(value)
end
record.merge! extract_metadata(table_name, value)
record.each { |k, v| record[k] = v.to_s if v.is_a? Symbol }
record
end
def save(what, condition, value)
table = table(what)
existing_record = with_retry { table.first condition } unless condition.empty?
if value
record = prepare_record(what, value, (existing_record || condition))
if existing_record
with_retry { table.where(condition).update(record) }
else
with_retry { table.insert record }
end
else
existing_record and with_retry { table.where(condition).delete }
end
value
end
def load(what, condition)
table = table(what)
if (record = with_retry { table.first(Utils.symbolize_keys(condition)) } )
load_data(record)
else
raise KeyError, "searching: #{what} by: #{condition.inspect}"
end
end
def load_data(record)
Utils.indifferent_hash(MultiJson.load(record[:data]))
end
def delete(what, condition)
table(what).where(Utils.symbolize_keys(condition)).delete
end
def extract_metadata(what, value)
meta_keys = META_DATA.fetch(what)
value = Utils.indifferent_hash(value)
meta_keys.inject({}) { |h, k| h.update k.to_sym => value[k] }
end
def dump_data(value)
MultiJson.dump Type!(value, Hash)
end
def paginate(data_set, options)
page = Integer(options[:page]) if options[:page]
per_page = Integer(options[:per_page]) if options[:per_page]
if page
raise ArgumentError, "page specified without per_page attribute" unless per_page
data_set.limit per_page, per_page * page
else
data_set
end
end
def order(what, data_set, options)
order_by = (options[:order_by]).to_s
return data_set if order_by.empty?
unless META_DATA.fetch(what).include? order_by
raise ArgumentError, "unknown column #{order_by.inspect}"
end
order_by = order_by.to_sym
data_set.order_by options[:desc] ? ::Sequel.desc(order_by) : order_by
end
def filter(what, data_set, filters)
Type! filters, NilClass, Hash
return data_set if filters.nil?
unknown = filters.keys.map(&:to_s) - META_DATA.fetch(what)
if what == :execution_plan
unknown -= %w[uuid caller_execution_plan_id caller_action_id]
if filters.key?('caller_action_id') && !filters.key?('caller_execution_plan_id')
raise ArgumentError, "caller_action_id given but caller_execution_plan_id missing"
end
if filters.key?('caller_execution_plan_id')
data_set = data_set.join_table(:inner, TABLES[:action], :execution_plan_uuid => :uuid).
select_all(TABLES[:execution_plan]).distinct
end
end
unless unknown.empty?
raise ArgumentError, "unkown columns: #{unknown.inspect}"
end
data_set.where Utils.symbolize_keys(filters)
end
def with_retry
attempts = 0
begin
yield
rescue ::Sequel::UniqueConstraintViolation => e
raise e
rescue Exception => e
attempts += 1
log(:error, e)
if attempts > MAX_RETRIES
log(:error, "The number of MAX_RETRIES exceeded")
raise Errors::PersistenceError.delegate(e)
else
log(:error, "Persistence retry no. #{attempts}")
sleep RETRY_DELAY
retry
end
end
end
end
end
end
Don't use sequel map method (#198)
The Sequel's map method on dataset is not thread sefe, which might
occasionally lead to stack consistency issues. We already addreseed
similar case in 1510982b3d2b641ee4248cf6aa0ad9bd5392f828.
While it seemed it was the only place it might cause troubles,
similar thing could happen with auto-execute functionality,
when many tasks were pending. I've converted the rest of map
calls that return the result of map outside of the adapter to
make sure they are thread safe.
require 'sequel/no_core_ext' # to avoid sequel ~> 3.0 coliding with ActiveRecord
require 'multi_json'
module Dynflow
module PersistenceAdapters
Sequel.extension :migration
class Sequel < Abstract
include Algebrick::TypeCheck
include Algebrick::Matching
MAX_RETRIES = 10
RETRY_DELAY = 1
attr_reader :db
def pagination?
true
end
def filtering_by
META_DATA.fetch :execution_plan
end
def ordering_by
META_DATA.fetch :execution_plan
end
META_DATA = { execution_plan: %w(state result started_at ended_at real_time execution_time),
action: %w(caller_execution_plan_id caller_action_id),
step: %w(state started_at ended_at real_time execution_time action_id progress_done progress_weight),
envelope: %w(receiver_id),
coordinator_record: %w(id owner_id class),
delayed: %w(execution_plan_uuid start_at start_before args_serializer)}
def initialize(config)
config = config.dup
@additional_responsibilities = { coordinator: true, connector: true }
if config.is_a?(Hash) && config.key?(:additional_responsibilities)
@additional_responsibilities.merge!(config.delete(:additional_responsibilities))
end
@db = initialize_db config
migrate_db
end
def transaction(&block)
db.transaction(&block)
end
def find_execution_plans(options = {})
options[:order_by] ||= :started_at
data_set = filter(:execution_plan,
order(:execution_plan,
paginate(table(:execution_plan), options),
options),
options[:filters])
data_set.all.map { |record| load_data(record) }
end
def delete_execution_plans(filters, batch_size = 1000)
count = 0
filter(:execution_plan, table(:execution_plan), filters).each_slice(batch_size) do |plans|
uuids = plans.map { |p| p.fetch(:uuid) }
@db.transaction do
table(:delayed).where(execution_plan_uuid: uuids).delete
table(:step).where(execution_plan_uuid: uuids).delete
table(:action).where(execution_plan_uuid: uuids).delete
count += table(:execution_plan).where(uuid: uuids).delete
end
end
return count
end
def load_execution_plan(execution_plan_id)
load :execution_plan, uuid: execution_plan_id
end
def save_execution_plan(execution_plan_id, value)
save :execution_plan, { uuid: execution_plan_id }, value
end
def delete_delayed_plans(filters, batch_size = 1000)
count = 0
filter(:delayed, table(:delayed), filters).each_slice(batch_size) do |plans|
uuids = plans.map { |p| p.fetch(:execution_plan_uuid) }
@db.transaction do
count += table(:delayed).where(execution_plan_uuid: uuids).delete
end
end
count
end
def find_past_delayed_plans(time)
table(:delayed)
.where('start_at <= ? OR (start_before IS NOT NULL AND start_before <= ?)', time, time)
.order_by(:start_at)
.all
.map { |plan| load_data(plan) }
end
def load_delayed_plan(execution_plan_id)
load :delayed, execution_plan_uuid: execution_plan_id
rescue KeyError
return nil
end
def save_delayed_plan(execution_plan_id, value)
save :delayed, { execution_plan_uuid: execution_plan_id }, value
end
def load_step(execution_plan_id, step_id)
load :step, execution_plan_uuid: execution_plan_id, id: step_id
end
def save_step(execution_plan_id, step_id, value)
save :step, { execution_plan_uuid: execution_plan_id, id: step_id }, value
end
def load_action(execution_plan_id, action_id)
load :action, execution_plan_uuid: execution_plan_id, id: action_id
end
def save_action(execution_plan_id, action_id, value)
save :action, { execution_plan_uuid: execution_plan_id, id: action_id }, value
end
def connector_feature!
unless @additional_responsibilities[:connector]
raise "The sequel persistence adapter connector feature used but not enabled in additional_features"
end
end
def save_envelope(data)
connector_feature!
save :envelope, {}, data
end
def pull_envelopes(receiver_id)
connector_feature!
db.transaction do
data_set = table(:envelope).where(receiver_id: receiver_id).to_a
envelopes = data_set.map { |record| load_data(record) }
table(:envelope).where(id: data_set.map { |d| d[:id] }).delete
return envelopes
end
end
def push_envelope(envelope)
connector_feature!
table(:envelope).insert(prepare_record(:envelope, envelope))
end
def coordinator_feature!
unless @additional_responsibilities[:coordinator]
raise "The sequel persistence adapter coordinator feature used but not enabled in additional_features"
end
end
def insert_coordinator_record(value)
coordinator_feature!
save :coordinator_record, {}, value
end
def update_coordinator_record(class_name, record_id, value)
coordinator_feature!
save :coordinator_record, {class: class_name, :id => record_id}, value
end
def delete_coordinator_record(class_name, record_id)
coordinator_feature!
table(:coordinator_record).where(class: class_name, id: record_id).delete
end
def find_coordinator_records(options)
coordinator_feature!
options = options.dup
filters = (options[:filters] || {}).dup
exclude_owner_id = filters.delete(:exclude_owner_id)
data_set = filter(:coordinator_record, table(:coordinator_record), filters)
if exclude_owner_id
data_set = data_set.exclude(:owner_id => exclude_owner_id)
end
data_set.all.map { |record| load_data(record) }
end
def to_hash
{ execution_plans: table(:execution_plan).all.to_a,
steps: table(:step).all.to_a,
actions: table(:action).all.to_a,
envelopes: table(:envelope).all.to_a }
end
private
TABLES = { execution_plan: :dynflow_execution_plans,
action: :dynflow_actions,
step: :dynflow_steps,
envelope: :dynflow_envelopes,
coordinator_record: :dynflow_coordinator_records,
delayed: :dynflow_delayed_plans }
def table(which)
db[TABLES.fetch(which)]
end
def initialize_db(db_path)
::Sequel.connect db_path
end
def self.migrations_path
File.expand_path('../sequel_migrations', __FILE__)
end
def migrate_db
::Sequel::Migrator.run(db, self.class.migrations_path, table: 'dynflow_schema_info')
end
def prepare_record(table_name, value, base = {})
record = base.dup
if table(table_name).columns.include?(:data)
record[:data] = dump_data(value)
end
record.merge! extract_metadata(table_name, value)
record.each { |k, v| record[k] = v.to_s if v.is_a? Symbol }
record
end
def save(what, condition, value)
table = table(what)
existing_record = with_retry { table.first condition } unless condition.empty?
if value
record = prepare_record(what, value, (existing_record || condition))
if existing_record
with_retry { table.where(condition).update(record) }
else
with_retry { table.insert record }
end
else
existing_record and with_retry { table.where(condition).delete }
end
value
end
def load(what, condition)
table = table(what)
if (record = with_retry { table.first(Utils.symbolize_keys(condition)) } )
load_data(record)
else
raise KeyError, "searching: #{what} by: #{condition.inspect}"
end
end
def load_data(record)
Utils.indifferent_hash(MultiJson.load(record[:data]))
end
def delete(what, condition)
table(what).where(Utils.symbolize_keys(condition)).delete
end
def extract_metadata(what, value)
meta_keys = META_DATA.fetch(what)
value = Utils.indifferent_hash(value)
meta_keys.inject({}) { |h, k| h.update k.to_sym => value[k] }
end
def dump_data(value)
MultiJson.dump Type!(value, Hash)
end
def paginate(data_set, options)
page = Integer(options[:page]) if options[:page]
per_page = Integer(options[:per_page]) if options[:per_page]
if page
raise ArgumentError, "page specified without per_page attribute" unless per_page
data_set.limit per_page, per_page * page
else
data_set
end
end
def order(what, data_set, options)
order_by = (options[:order_by]).to_s
return data_set if order_by.empty?
unless META_DATA.fetch(what).include? order_by
raise ArgumentError, "unknown column #{order_by.inspect}"
end
order_by = order_by.to_sym
data_set.order_by options[:desc] ? ::Sequel.desc(order_by) : order_by
end
def filter(what, data_set, filters)
Type! filters, NilClass, Hash
return data_set if filters.nil?
unknown = filters.keys.map(&:to_s) - META_DATA.fetch(what)
if what == :execution_plan
unknown -= %w[uuid caller_execution_plan_id caller_action_id]
if filters.key?('caller_action_id') && !filters.key?('caller_execution_plan_id')
raise ArgumentError, "caller_action_id given but caller_execution_plan_id missing"
end
if filters.key?('caller_execution_plan_id')
data_set = data_set.join_table(:inner, TABLES[:action], :execution_plan_uuid => :uuid).
select_all(TABLES[:execution_plan]).distinct
end
end
unless unknown.empty?
raise ArgumentError, "unkown columns: #{unknown.inspect}"
end
data_set.where Utils.symbolize_keys(filters)
end
def with_retry
attempts = 0
begin
yield
rescue ::Sequel::UniqueConstraintViolation => e
raise e
rescue Exception => e
attempts += 1
log(:error, e)
if attempts > MAX_RETRIES
log(:error, "The number of MAX_RETRIES exceeded")
raise Errors::PersistenceError.delegate(e)
else
log(:error, "Persistence retry no. #{attempts}")
sleep RETRY_DELAY
retry
end
end
end
end
end
end
|
require 'zlib'
module Embulk
module Output
class Vertica < OutputPlugin
class OutputThreadPool
def initialize(task, schema, size)
@task = task
@size = size
@schema = schema
@converters = ValueConverterFactory.create_converters(schema, task['default_timezone'], task['column_options'])
@output_threads = size.times.map { OutputThread.new(task) }
@current_index = 0
end
def enqueue(page)
json_page = []
page.each do |record|
json_page << to_json(record)
end
@mutex.synchronize do
@output_threads[@current_index].enqueue(json_page)
@current_index = (@current_index + 1) % @size
end
end
def start
@mutex = Mutex.new
@size.times.map {|i| @output_threads[i].start }
end
def commit
task_reports = @size.times.map {|i| @output_threads[i].commit }
end
def to_json(record)
if @task['json_payload']
record.first
else
Hash[*(@schema.names.zip(record).map do |column_name, value|
[column_name, @converters[column_name].call(value)]
end.flatten!(1))].to_json
end
end
end
class OutputThread
def initialize(task)
@task = task
@queue = SizedQueue.new(1)
@num_input_rows = 0
@num_output_rows = 0
@num_rejected_rows = 0
@outer_thread = Thread.current
@thread_active = false
@progress_log_timer = Time.now
@previous_num_input_rows = 0
case task['compress']
when 'GZIP'
@write_proc = self.method(:write_gzip)
else
@write_proc = self.method(:write_uncompressed)
end
end
def enqueue(json_page)
if @thread_active and @thread.alive?
Embulk.logger.trace { "embulk-output-vertica: enqueue" }
@queue.push(json_page)
else
Embulk.logger.info { "embulk-output-vertica: thread is dead, but still trying to enqueue" }
raise RuntimeError, "embulk-output-vertica: thread is died, but still trying to enqueue"
end
end
def write_gzip(io, page, &block)
buf = Zlib::Deflate.new
write_buf(buf, page, &block)
write_io(io, buf.finish)
end
def write_uncompressed(io, page, &block)
buf = ''
write_buf(buf, page, &block)
write_io(io, buf)
end
PIPE_BUF = 4096
def write_io(io, str)
str = str.force_encoding('ASCII-8BIT')
i = 0
# split str not to be blocked (max size of pipe buf is 64k bytes on Linux, Mac at default)
while substr = str[i, PIPE_BUF]
io.write(substr)
i += PIPE_BUF
end
end
def write_buf(buf, json_page, &block)
json_page.each do |record|
yield(record) if block_given?
Embulk.logger.trace { "embulk-output-vertica: record #{record}" }
buf << record << "\n"
@num_input_rows += 1
end
now = Time.now
if @progress_log_timer < now - 10 # once in 10 seconds
speed = ((@num_input_rows - @previous_num_input_rows) / (now - @progress_log_timer).to_f).round(1)
@progress_log_timer = now
@previous_num_input_rows = @num_input_rows
Embulk.logger.info { "embulk-output-vertica: num_input_rows #{num_format(@num_input_rows)} (#{num_format(speed)} rows/sec)" }
end
end
def num_format(number)
number.to_s.gsub(/(\d)(?=(\d{3})+(?!\d))/, '\1,')
end
def run
Embulk.logger.debug { "embulk-output-vertica: thread started" }
Vertica.connect(@task) do |jv|
begin
last_record = nil
num_output_rows, rejects = copy(jv, copy_sql) do |stdin|
while json_page = @queue.pop
if json_page == 'finish'
Embulk.logger.debug { "embulk-output-vertica: popped finish" }
break
end
Embulk.logger.trace { "embulk-output-vertica: dequeued" }
@write_proc.call(stdin, json_page) do |record|
last_record = record
end
end
end
Embulk.logger.debug { "embulk-output-vertica: thread finished" }
num_rejected_rows = rejects.size
@num_output_rows += num_output_rows
@num_rejected_rows += num_rejected_rows
Embulk.logger.info { "embulk-output-vertica: COMMIT!" }
jv.commit
Embulk.logger.debug { "embulk-output-vertica: COMMITTED!" }
rescue java.sql.SQLDataException => e
if @task['reject_on_materialized_type_error'] and e.message =~ /Rejected by user-defined parser/
Embulk.logger.warn "embulk-output-vertica: ROLLBACK! some of column types and values types do not fit #{last_record}"
else
Embulk.logger.warn "embulk-output-vertica: ROLLBACK!"
end
Embulk.logger.info { "embulk-output-vertica: last_record: #{last_record}" }
jv.rollback
raise e # die transaction
rescue => e
Embulk.logger.warn "embulk-output-vertica: ROLLBACK!"
jv.rollback
raise e
end
end
rescue => e
@thread_active = false # not to be enqueued any more
while @queue.size > 0
@queue.pop # dequeue all because some might be still trying @queue.push and get blocked, need to release
end
@outer_thread.raise e.class.new("#{e.message}\n #{e.backtrace.join("\n ")}")
end
def start
@thread = Thread.new(&method(:run))
@thread_active = true
end
def commit
@thread_active = false
if @thread.alive?
Embulk.logger.debug { "embulk-output-vertica: push finish" }
@queue.push('finish')
Thread.pass
@thread.join
else
raise RuntimeError, "embulk-output-vertica: thread died accidently"
end
task_report = {
'num_input_rows' => @num_input_rows,
'num_output_rows' => @num_output_rows,
'num_rejected_rows' => @num_rejected_rows,
}
end
# private
def copy(conn, sql, &block)
Embulk.logger.debug "embulk-output-vertica: #{sql}"
results, rejects = conn.copy(sql, &block)
end
def copy_sql
@copy_sql ||= "COPY #{quoted_schema}.#{quoted_temp_table} FROM STDIN#{compress}#{fjsonparser}#{copy_mode}#{abort_on_error} NO COMMIT"
end
def quoted_schema
::Jvertica.quote_identifier(@task['schema'])
end
def quoted_table
::Jvertica.quote_identifier(@task['table'])
end
def quoted_temp_table
::Jvertica.quote_identifier(@task['temp_table'])
end
def compress
" #{@task['compress']}"
end
def copy_mode
" #{@task['copy_mode']}"
end
def abort_on_error
@task['abort_on_error'] ? ' ABORT ON ERROR' : ''
end
def fjsonparser
" PARSER fjsonparser(#{reject_on_materialized_type_error})"
end
def reject_on_materialized_type_error
@task['reject_on_materialized_type_error'] ? 'reject_on_materialized_type_error=true' : ''
end
end
end
end
end
Fix to get stuck if no input is comming
require 'zlib'
module Embulk
module Output
class Vertica < OutputPlugin
class OutputThreadPool
def initialize(task, schema, size)
@task = task
@size = size
@schema = schema
@converters = ValueConverterFactory.create_converters(schema, task['default_timezone'], task['column_options'])
@output_threads = size.times.map { OutputThread.new(task) }
@current_index = 0
end
def enqueue(page)
json_page = []
page.each do |record|
json_page << to_json(record)
end
@mutex.synchronize do
@output_threads[@current_index].enqueue(json_page)
@current_index = (@current_index + 1) % @size
end
end
def start
@mutex = Mutex.new
@size.times.map {|i| @output_threads[i].start }
end
def commit
task_reports = @size.times.map {|i| @output_threads[i].commit }
end
def to_json(record)
if @task['json_payload']
record.first
else
Hash[*(@schema.names.zip(record).map do |column_name, value|
[column_name, @converters[column_name].call(value)]
end.flatten!(1))].to_json
end
end
end
class OutputThread
def initialize(task)
@task = task
@queue = SizedQueue.new(1)
@num_input_rows = 0
@num_output_rows = 0
@num_rejected_rows = 0
@outer_thread = Thread.current
@thread_active = false
@progress_log_timer = Time.now
@previous_num_input_rows = 0
case task['compress']
when 'GZIP'
@write_proc = self.method(:write_gzip)
else
@write_proc = self.method(:write_uncompressed)
end
end
def enqueue(json_page)
if @thread_active and @thread.alive?
Embulk.logger.trace { "embulk-output-vertica: enqueue" }
@queue.push(json_page)
else
Embulk.logger.info { "embulk-output-vertica: thread is dead, but still trying to enqueue" }
raise RuntimeError, "embulk-output-vertica: thread is died, but still trying to enqueue"
end
end
def write_gzip(io, page, &block)
buf = Zlib::Deflate.new
write_buf(buf, page, &block)
write_io(io, buf.finish)
end
def write_uncompressed(io, page, &block)
buf = ''
write_buf(buf, page, &block)
write_io(io, buf)
end
PIPE_BUF = 4096
def write_io(io, str)
str = str.force_encoding('ASCII-8BIT')
i = 0
# split str not to be blocked (max size of pipe buf is 64k bytes on Linux, Mac at default)
while substr = str[i, PIPE_BUF]
io.write(substr)
i += PIPE_BUF
end
end
def write_buf(buf, json_page, &block)
json_page.each do |record|
yield(record) if block_given?
Embulk.logger.trace { "embulk-output-vertica: record #{record}" }
buf << record << "\n"
@num_input_rows += 1
end
now = Time.now
if @progress_log_timer < now - 10 # once in 10 seconds
speed = ((@num_input_rows - @previous_num_input_rows) / (now - @progress_log_timer).to_f).round(1)
@progress_log_timer = now
@previous_num_input_rows = @num_input_rows
Embulk.logger.info { "embulk-output-vertica: num_input_rows #{num_format(@num_input_rows)} (#{num_format(speed)} rows/sec)" }
end
end
def num_format(number)
number.to_s.gsub(/(\d)(?=(\d{3})+(?!\d))/, '\1,')
end
# @return [Array] dequeued json_page
# @return [String] 'finish' is dequeued to finish
def dequeue
json_page = @queue.pop
Embulk.logger.trace { "embulk-output-vertica: dequeued" }
Embulk.logger.debug { "embulk-output-vertica: dequeued finish" } if json_page == 'finish'
json_page
end
def copy(jv, sql, &block)
Embulk.logger.debug "embulk-output-vertica: #{sql}"
num_output_rows = 0; rejected_row_nums = []; last_record = nil
json_page = dequeue
return [num_output_rows, rejected_row_nums, last_record] if json_page == 'finish'
num_output_rows, rejected_row_nums = jv.copy(sql) do |stdin, stream|
@write_proc.call(stdin, json_page) {|record| last_record = record }
while true
json_page = dequeue
break if json_page == 'finish'
@write_proc.call(stdin, json_page) {|record| last_record = record }
end
end
@num_output_rows += num_output_rows
@num_rejected_rows += rejected_row_nums.size
Embulk.logger.info { "embulk-output-vertica: COMMIT!" }
jv.commit
Embulk.logger.debug { "embulk-output-vertica: COMMITTED!" }
if rejected_row_nums.size > 0
Embulk.logger.debug { "embulk-output-vertica: rejected_row_nums: #{rejected_row_nums}" }
end
[num_output_rows, rejected_row_nums, last_record]
end
def run
Embulk.logger.debug { "embulk-output-vertica: thread started" }
Vertica.connect(@task) do |jv|
begin
num_output_rows, rejected_row_nums, last_record = copy(jv, copy_sql)
Embulk.logger.debug { "embulk-output-vertica: thread finished" }
rescue java.sql.SQLDataException => e
if @task['reject_on_materialized_type_error'] and e.message =~ /Rejected by user-defined parser/
Embulk.logger.warn "embulk-output-vertica: ROLLBACK! some of column types and values types do not fit #{rejected_row_nums}"
else
Embulk.logger.warn "embulk-output-vertica: ROLLBACK! #{rejected_row_nums}"
end
Embulk.logger.info { "embulk-output-vertica: last_record: #{last_record}" }
jv.rollback
raise e # die transaction
rescue => e
Embulk.logger.warn "embulk-output-vertica: ROLLBACK! #{e.class} #{e.message}"
jv.rollback
raise e
end
end
rescue => e
@thread_active = false # not to be enqueued any more
while @queue.size > 0
@queue.pop # dequeue all because some might be still trying @queue.push and get blocked, need to release
end
@outer_thread.raise e.class.new("#{e.message}\n #{e.backtrace.join("\n ")}")
end
def start
@thread = Thread.new(&method(:run))
@thread_active = true
end
def commit
@thread_active = false
if @thread.alive?
Embulk.logger.debug { "embulk-output-vertica: push finish" }
@queue.push('finish')
Thread.pass
@thread.join
else
raise RuntimeError, "embulk-output-vertica: thread died accidently"
end
task_report = {
'num_input_rows' => @num_input_rows,
'num_output_rows' => @num_output_rows,
'num_rejected_rows' => @num_rejected_rows,
}
end
# private
def copy_sql
@copy_sql ||= "COPY #{quoted_schema}.#{quoted_temp_table} FROM STDIN#{compress}#{fjsonparser}#{copy_mode}#{abort_on_error} NO COMMIT"
end
def quoted_schema
::Jvertica.quote_identifier(@task['schema'])
end
def quoted_table
::Jvertica.quote_identifier(@task['table'])
end
def quoted_temp_table
::Jvertica.quote_identifier(@task['temp_table'])
end
def compress
" #{@task['compress']}"
end
def copy_mode
" #{@task['copy_mode']}"
end
def abort_on_error
@task['abort_on_error'] ? ' ABORT ON ERROR' : ''
end
def fjsonparser
" PARSER fjsonparser(#{reject_on_materialized_type_error})"
end
def reject_on_materialized_type_error
@task['reject_on_materialized_type_error'] ? 'reject_on_materialized_type_error=true' : ''
end
end
end
end
end
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.